From 94c41d32ced998662addd81b01a9edd1e33e0346 Mon Sep 17 00:00:00 2001 From: Madeline Kalilh Date: Mon, 10 Feb 2025 11:31:51 -0500 Subject: [PATCH 001/270] gopls/internal/golang: add comment about SymbolKind Change-Id: I99bdf283f0ebd63ab1de044ca54fc8ce65136e65 Reviewed-on: https://go-review.googlesource.com/c/tools/+/648096 Reviewed-by: Hongxiang Jiang LUCI-TryBot-Result: Go LUCI --- gopls/internal/protocol/command/interface.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/gopls/internal/protocol/command/interface.go b/gopls/internal/protocol/command/interface.go index 32e03dd388a..34adf59b38e 100644 --- a/gopls/internal/protocol/command/interface.go +++ b/gopls/internal/protocol/command/interface.go @@ -814,6 +814,9 @@ type PackageSymbol struct { Detail string `json:"detail,omitempty"` + // protocol.SymbolKind maps an integer to an enum: + // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#symbolKind + // i.e. File = 1 Kind protocol.SymbolKind `json:"kind"` Tags []protocol.SymbolTag `json:"tags,omitempty"` From 91bac86b5c14ba7d5ed6033fe1b85d9c2aed8215 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Mon, 10 Feb 2025 16:32:24 -0500 Subject: [PATCH 002/270] internal/analysisinternal: add CanImport Add a function that reports whether one package can import another, respecting the Go toolchain's interpretation of path segments named "internal". Change-Id: I66dc90453a178d0e626117f4e3cadf30e61912dc Reviewed-on: https://go-review.googlesource.com/c/tools/+/648255 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- internal/analysisinternal/analysis.go | 27 +++++++++++++++++ internal/analysisinternal/analysis_test.go | 34 ++++++++++++++++++++++ internal/refactor/inline/inline.go | 27 ++--------------- 3 files changed, 63 insertions(+), 25 deletions(-) create mode 100644 internal/analysisinternal/analysis_test.go diff --git a/internal/analysisinternal/analysis.go b/internal/analysisinternal/analysis.go index abf708111bf..a1edabbe84d 100644 --- a/internal/analysisinternal/analysis.go +++ b/internal/analysisinternal/analysis.go @@ -449,3 +449,30 @@ func validateFix(fset *token.FileSet, fix *analysis.SuggestedFix) error { return nil } + +// CanImport reports whether one package is allowed to import another. +// +// TODO(adonovan): allow customization of the accessibility relation +// (e.g. for Bazel). +func CanImport(from, to string) bool { + // TODO(adonovan): better segment hygiene. + if to == "internal" || strings.HasPrefix(to, "internal/") { + // Special case: only std packages may import internal/... + // We can't reliably know whether we're in std, so we + // use a heuristic on the first segment. + first, _, _ := strings.Cut(from, "/") + if strings.Contains(first, ".") { + return false // example.com/foo ∉ std + } + if first == "testdata" { + return false // testdata/foo ∉ std + } + } + if strings.HasSuffix(to, "/internal") { + return strings.HasPrefix(from, to[:len(to)-len("/internal")]) + } + if i := strings.LastIndex(to, "/internal/"); i >= 0 { + return strings.HasPrefix(from, to[:i]) + } + return true +} diff --git a/internal/analysisinternal/analysis_test.go b/internal/analysisinternal/analysis_test.go new file mode 100644 index 00000000000..0b21876d386 --- /dev/null +++ b/internal/analysisinternal/analysis_test.go @@ -0,0 +1,34 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package analysisinternal + +import "testing" + +func TestCanImport(t *testing.T) { + for _, tt := range []struct { + from string + to string + want bool + }{ + {"fmt", "internal", true}, + {"fmt", "internal/foo", true}, + {"a.com/b", "internal", false}, + {"a.com/b", "xinternal", true}, + {"a.com/b", "internal/foo", false}, + {"a.com/b", "xinternal/foo", true}, + {"a.com/b", "a.com/internal", true}, + {"a.com/b", "a.com/b/internal", true}, + {"a.com/b", "a.com/b/internal/foo", true}, + {"a.com/b", "a.com/c/internal", false}, + {"a.com/b", "a.com/c/xinternal", true}, + {"a.com/b", "a.com/c/internal/foo", false}, + {"a.com/b", "a.com/c/xinternal/foo", true}, + } { + got := CanImport(tt.from, tt.to) + if got != tt.want { + t.Errorf("CanImport(%q, %q) = %v, want %v", tt.from, tt.to, got, tt.want) + } + } +} diff --git a/internal/refactor/inline/inline.go b/internal/refactor/inline/inline.go index 2c897c24954..96fbb8f8706 100644 --- a/internal/refactor/inline/inline.go +++ b/internal/refactor/inline/inline.go @@ -23,6 +23,7 @@ import ( "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/types/typeutil" "golang.org/x/tools/imports" + "golang.org/x/tools/internal/analysisinternal" internalastutil "golang.org/x/tools/internal/astutil" "golang.org/x/tools/internal/typeparams" "golang.org/x/tools/internal/typesinternal" @@ -331,7 +332,7 @@ func (st *state) inline() (*Result, error) { for _, imp := range res.newImports { // Check that the new imports are accessible. path, _ := strconv.Unquote(imp.spec.Path.Value) - if !canImport(caller.Types.Path(), path) { + if !analysisinternal.CanImport(caller.Types.Path(), path) { return nil, fmt.Errorf("can't inline function %v as its body refers to inaccessible package %q", callee, path) } importDecl.Specs = append(importDecl.Specs, imp.spec) @@ -3196,30 +3197,6 @@ func last[T any](slice []T) T { return *new(T) } -// canImport reports whether one package is allowed to import another. -// -// TODO(adonovan): allow customization of the accessibility relation -// (e.g. for Bazel). -func canImport(from, to string) bool { - // TODO(adonovan): better segment hygiene. - if strings.HasPrefix(to, "internal/") { - // Special case: only std packages may import internal/... - // We can't reliably know whether we're in std, so we - // use a heuristic on the first segment. - first, _, _ := strings.Cut(from, "/") - if strings.Contains(first, ".") { - return false // example.com/foo ∉ std - } - if first == "testdata" { - return false // testdata/foo ∉ std - } - } - if i := strings.LastIndex(to, "/internal/"); i >= 0 { - return strings.HasPrefix(from, to[:i]) - } - return true -} - // consistentOffsets reports whether the portion of caller.Content // that corresponds to caller.Call can be parsed as a call expression. // If not, the client has provided inconsistent information, possibly From f61b225cbfefb205c92c7ebcd3e25c20c0f3c090 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Mon, 10 Feb 2025 15:54:38 -0500 Subject: [PATCH 003/270] internal/analysisinternal: AddImport puts new import in a group If AddImport needs to add a new import, and the file's first declaration is a grouped import, then add it to that import. This is one step towards a full implementation of the issue below, and perhaps is good enough. For golang/go#71647. Change-Id: I8327b07c21c3efbd189c519e51c339b7aa4751d8 Reviewed-on: https://go-review.googlesource.com/c/tools/+/648136 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan Auto-Submit: Jonathan Amsterdam --- internal/analysisinternal/addimport_test.go | 22 +++++++++++++++++++++ internal/analysisinternal/analysis.go | 22 ++++++++++++++------- 2 files changed, 37 insertions(+), 7 deletions(-) diff --git a/internal/analysisinternal/addimport_test.go b/internal/analysisinternal/addimport_test.go index 145d5861b8f..12423b7c061 100644 --- a/internal/analysisinternal/addimport_test.go +++ b/internal/analysisinternal/addimport_test.go @@ -219,6 +219,28 @@ import . "fmt" func _(Print fmt.Stringer) { fmt +}`, + }, + { + descr: descr("add import to group"), + src: `package a + +import ( + "io" +) + +func _(io.Reader) { + «fmt fmt» +}`, + want: `package a + +import ( + "io" + "fmt" +) + +func _(io.Reader) { + fmt }`, }, } { diff --git a/internal/analysisinternal/analysis.go b/internal/analysisinternal/analysis.go index a1edabbe84d..d96d22982c5 100644 --- a/internal/analysisinternal/analysis.go +++ b/internal/analysisinternal/analysis.go @@ -255,16 +255,16 @@ func AddImport(info *types.Info, file *ast.File, preferredName, pkgpath, member newName = fmt.Sprintf("%s%d", preferredName, i) } - // For now, keep it real simple: create a new import - // declaration before the first existing declaration (which - // must exist), including its comments, and let goimports tidy it up. + // Create a new import declaration either before the first existing + // declaration (which must exist), including its comments; or + // inside the declaration, if it is an import group. // // Use a renaming import whenever the preferred name is not // available, or the chosen name does not match the last // segment of its path. - newText := fmt.Sprintf("import %q\n\n", pkgpath) + newText := fmt.Sprintf("%q", pkgpath) if newName != preferredName || newName != pathpkg.Base(pkgpath) { - newText = fmt.Sprintf("import %s %q\n\n", newName, pkgpath) + newText = fmt.Sprintf("%s %q", newName, pkgpath) } decl0 := file.Decls[0] var before ast.Node = decl0 @@ -278,9 +278,17 @@ func AddImport(info *types.Info, file *ast.File, preferredName, pkgpath, member before = decl0.Doc } } + // If the first decl is an import group, add this new import at the end. + if gd, ok := before.(*ast.GenDecl); ok && gd.Tok == token.IMPORT && gd.Rparen.IsValid() { + pos = gd.Rparen + newText = "\t" + newText + "\n" + } else { + pos = before.Pos() + newText = "import " + newText + "\n\n" + } return newName, newName + ".", []analysis.TextEdit{{ - Pos: before.Pos(), - End: before.Pos(), + Pos: pos, + End: pos, NewText: []byte(newText), }} } From 027eab55ae11cdaf85b7e426cd74249b206070a3 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Fri, 7 Feb 2025 17:34:36 -0500 Subject: [PATCH 004/270] go/analysis/analysistest: RunWithSuggestedFix: 3-way merge This CL adds support for three-way merging to RunWithSuggestedFix, similar to the recent changes in go/analysis/internal/checker's -fix logic. Although unresolved conflicts are still considered a test failure, the diff algorithm may successfully merge identical edits, in particular redundant imports of the same package. NOTE: This does mean that existing tests whose golden files expect redundant imports will fail, and need updating. The test failure messages are improved, again following internal/checker. Fixes golang/go#67049 Fixes golang/go#68765 Change-Id: I8ace0ff0cd0b147696ec4dc742b0d63d0d713155 Reviewed-on: https://go-review.googlesource.com/c/tools/+/647798 Reviewed-by: Jonathan Amsterdam LUCI-TryBot-Result: Go LUCI --- go/analysis/analysistest/analysistest.go | 284 ++++++++++-------- go/analysis/internal/checker/fix_test.go | 3 + .../src/slicesdelete/slicesdelete.go.golden | 14 - .../src/sortslice/sortslice.go.golden | 2 - 4 files changed, 164 insertions(+), 139 deletions(-) diff --git a/go/analysis/analysistest/analysistest.go b/go/analysis/analysistest/analysistest.go index 775fd20094d..08981776478 100644 --- a/go/analysis/analysistest/analysistest.go +++ b/go/analysis/analysistest/analysistest.go @@ -7,6 +7,7 @@ package analysistest import ( "bytes" + "cmp" "fmt" "go/format" "go/token" @@ -78,16 +79,24 @@ type Testing interface { Errorf(format string, args ...interface{}) } -// RunWithSuggestedFixes behaves like Run, but additionally verifies suggested fixes. -// It uses golden files placed alongside the source code under analysis: -// suggested fixes for code in example.go will be compared against example.go.golden. +// RunWithSuggestedFixes behaves like Run, but additionally applies +// suggested fixes and verifies their output. // -// Golden files can be formatted in one of two ways: as plain Go source code, or as txtar archives. -// In the first case, all suggested fixes will be applied to the original source, which will then be compared against the golden file. -// In the second case, suggested fixes will be grouped by their messages, and each set of fixes will be applied and tested separately. -// Each section in the archive corresponds to a single message. +// It uses golden files, placed alongside each source file, to express +// the desired output: the expected transformation of file example.go +// is specified in file example.go.golden. // -// A golden file using txtar may look like this: +// Golden files may be of two forms: a plain Go source file, or a +// txtar archive. +// +// A plain Go source file indicates the expected result of applying +// all suggested fixes to the original file. +// +// A txtar archive specifies, in each section, the expected result of +// applying all suggested fixes of a given message to the original +// file; the name of the archive section is the fix's message. In this +// way, the various alternative fixes offered by a single diagnostic +// can be tested independently. Here's an example: // // -- turn into single negation -- // package pkg @@ -109,41 +118,28 @@ type Testing interface { // // # Conflicts // -// A single analysis pass may offer two or more suggested fixes that -// (1) conflict but are nonetheless logically composable, (e.g. -// because both update the import declaration), or (2) are -// fundamentally incompatible (e.g. alternative fixes to the same -// statement). -// -// It is up to the driver to decide how to apply such fixes. A -// sophisticated driver could attempt to resolve conflicts of the -// first kind, but this test driver simply reports the fact of the -// conflict with the expectation that the user will split their tests -// into nonconflicting parts. +// Regardless of the form of the golden file, it is possible for +// multiple fixes to conflict, either because they overlap, or are +// close enough together that the particular diff algorithm cannot +// separate them. // -// Conflicts of the second kind can be avoided by giving the -// alternative fixes different names (SuggestedFix.Message) and -// defining the .golden file as a multi-section txtar file with a -// named section for each alternative fix, as shown above. +// RunWithSuggestedFixes uses a simple three-way merge to accumulate +// fixes, similar to a git merge. The merge algorithm may be able to +// coalesce identical edits, for example duplicate imports of the same +// package. (Bear in mind that this is an editorial decision. In +// general, coalescing identical edits may not be correct: consider +// two statements that increment the same counter.) // -// Analyzers that compute fixes from a textual diff of the -// before/after file contents (instead of directly from syntax tree -// positions) may produce fixes that, although logically -// non-conflicting, nonetheless conflict due to the particulars of the -// diff algorithm. In such cases it may suffice to introduce -// sufficient separation of the statements in the test input so that -// the computed diffs do not overlap. If that fails, break the test -// into smaller parts. +// If there are conflicts, the test fails. In any case, the +// non-conflicting edits will be compared against the expected output. +// In this situation, we recommend that you increase the textual +// separation between conflicting parts or, if that fails, split +// your tests into smaller parts. // -// TODO(adonovan): the behavior of RunWithSuggestedFixes as documented -// above is impractical for tests that report multiple diagnostics and -// offer multiple alternative fixes for the same diagnostic, and it is -// inconsistent with the interpretation of multiple diagnostics -// described at Diagnostic.SuggestedFixes. -// We need to rethink the analyzer testing API to better support such -// cases. In the meantime, users of RunWithSuggestedFixes testing -// analyzers that offer alternative fixes are advised to put each fix -// in a separate .go file in the testdata. +// If a diagnostic offers multiple fixes for the same problem, they +// are almost certain to conflict, so in this case you should define +// the expected output using a multi-section txtar file as described +// above. func RunWithSuggestedFixes(t Testing, dir string, a *analysis.Analyzer, patterns ...string) []*Result { results := Run(t, dir, a, patterns...) @@ -173,133 +169,165 @@ func RunWithSuggestedFixes(t Testing, dir string, a *analysis.Analyzer, patterns for _, result := range results { act := result.Action - // file -> message -> edits - // TODO(adonovan): this mapping assumes fix.Messages are unique across analyzers, - // whereas they are only unique within a given Diagnostic. - fileEdits := make(map[*token.File]map[string][]diff.Edit) - - // We may assume that fixes are validated upon creation in Pass.Report. - // Group fixes by file and message. + // For each fix, split its edits by file and convert to diff form. + var ( + // fixEdits: message -> fixes -> filename -> edits + // + // TODO(adonovan): this mapping assumes fix.Messages + // are unique across analyzers, whereas they are only + // unique within a given Diagnostic. + fixEdits = make(map[string][]map[string][]diff.Edit) + allFilenames = make(map[string]bool) + ) for _, diag := range act.Diagnostics { + // Fixes are validated upon creation in Pass.Report. for _, fix := range diag.SuggestedFixes { // Assert that lazy fixes have a Category (#65578, #65087). if inTools && len(fix.TextEdits) == 0 && diag.Category == "" { t.Errorf("missing Diagnostic.Category for SuggestedFix without TextEdits (gopls requires the category for the name of the fix command") } + // Convert edits to diff form. + // Group fixes by message and file. + edits := make(map[string][]diff.Edit) for _, edit := range fix.TextEdits { file := act.Package.Fset.File(edit.Pos) - if _, ok := fileEdits[file]; !ok { - fileEdits[file] = make(map[string][]diff.Edit) - } - fileEdits[file][fix.Message] = append(fileEdits[file][fix.Message], diff.Edit{ + allFilenames[file.Name()] = true + edits[file.Name()] = append(edits[file.Name()], diff.Edit{ Start: file.Offset(edit.Pos), End: file.Offset(edit.End), New: string(edit.NewText), }) } + fixEdits[fix.Message] = append(fixEdits[fix.Message], edits) } } - for file, fixes := range fileEdits { - // Get the original file contents. - // TODO(adonovan): plumb pass.ReadFile. - orig, err := os.ReadFile(file.Name()) + merge := func(file, message string, x, y []diff.Edit) []diff.Edit { + z, ok := diff.Merge(x, y) + if !ok { + t.Errorf("in file %s, conflict applying fix %q", file, message) + return x // discard y + } + return z + } + + // Because the checking is driven by original + // filenames, there is no way to express that a fix + // (e.g. extract declaration) creates a new file. + for _, filename := range sortedKeys(allFilenames) { + // Read the original file. + content, err := os.ReadFile(filename) if err != nil { - t.Errorf("error reading %s: %v", file.Name(), err) + t.Errorf("error reading %s: %v", filename, err) continue } - // Get the golden file and read the contents. - ar, err := txtar.ParseFile(file.Name() + ".golden") + // check checks that the accumulated edits applied + // to the original content yield the wanted content. + check := func(prefix string, accumulated []diff.Edit, want []byte) { + if err := applyDiffsAndCompare(filename, content, want, accumulated); err != nil { + t.Errorf("%s: %s", prefix, err) + } + } + + // Read the golden file. It may have one of two forms: + // (1) A txtar archive with one section per fix title, + // including all fixes of just that title. + // (2) The expected output for file.Name after all (?) fixes are applied. + // This form requires that no diagnostic has multiple fixes. + ar, err := txtar.ParseFile(filename + ".golden") if err != nil { - t.Errorf("error reading %s.golden: %v", file.Name(), err) + t.Errorf("error reading %s.golden: %v", filename, err) continue } - if len(ar.Files) > 0 { - // one virtual file per kind of suggested fix - - if len(ar.Comment) != 0 { - // we allow either just the comment, or just virtual - // files, not both. it is not clear how "both" should - // behave. - t.Errorf("%s.golden has leading comment; we don't know what to do with it", file.Name()) + // Form #1: one archive section per kind of suggested fix. + if len(ar.Comment) > 0 { + // Disallow the combination of comment and archive sections. + t.Errorf("%s.golden has leading comment; we don't know what to do with it", filename) continue } - // Sort map keys for determinism in tests. - // TODO(jba): replace with slices.Sorted(maps.Keys(fixes)) when go.mod >= 1.23. - var keys []string - for k := range fixes { - keys = append(keys, k) - } - slices.Sort(keys) - for _, sf := range keys { - edits := fixes[sf] - found := false - for _, vf := range ar.Files { - if vf.Name == sf { - found = true - // the file may contain multiple trailing - // newlines if the user places empty lines - // between files in the archive. normalize - // this to a single newline. - golden := append(bytes.TrimRight(vf.Data, "\n"), '\n') - - if err := applyDiffsAndCompare(orig, golden, edits, file.Name()); err != nil { - t.Errorf("%s", err) - } - break - } - } - if !found { - t.Errorf("no section for suggested fix %q in %s.golden", sf, file.Name()) + + // Each archive section is named for a fix.Message. + // Accumulate the parts of the fix that apply to the current file, + // using a simple three-way merge, discarding conflicts, + // then apply the merged edits and compare to the archive section. + for _, section := range ar.Files { + message, want := section.Name, section.Data + var accumulated []diff.Edit + for _, fix := range fixEdits[message] { + accumulated = merge(filename, message, accumulated, fix[filename]) } - } - } else { - // all suggested fixes are represented by a single file - // TODO(adonovan): fix: this makes no sense if len(fixes) > 1. - // Sort map keys for determinism in tests. - // TODO(jba): replace with slices.Sorted(maps.Keys(fixes)) when go.mod >= 1.23. - var keys []string - for k := range fixes { - keys = append(keys, k) - } - slices.Sort(keys) - var catchallEdits []diff.Edit - for _, k := range keys { - catchallEdits = append(catchallEdits, fixes[k]...) + check(fmt.Sprintf("all fixes of message %q", message), accumulated, want) } - if err := applyDiffsAndCompare(orig, ar.Comment, catchallEdits, file.Name()); err != nil { - t.Errorf("%s", err) + } else { + // Form #2: all suggested fixes are represented by a single file. + want := ar.Comment + var accumulated []diff.Edit + for _, message := range sortedKeys(fixEdits) { + for _, fix := range fixEdits[message] { + accumulated = merge(filename, message, accumulated, fix[filename]) + } } + check("all fixes", accumulated, want) } } } + return results } -// applyDiffsAndCompare applies edits to src and compares the results against -// golden after formatting both. fileName is use solely for error reporting. -func applyDiffsAndCompare(src, golden []byte, edits []diff.Edit, fileName string) error { - out, err := diff.ApplyBytes(src, edits) +// applyDiffsAndCompare applies edits to original and compares the results against +// want after formatting both. fileName is use solely for error reporting. +func applyDiffsAndCompare(filename string, original, want []byte, edits []diff.Edit) error { + // Relativize filename, for tidier errors. + if cwd, err := os.Getwd(); err == nil { + if rel, err := filepath.Rel(cwd, filename); err == nil { + filename = rel + } + } + + if len(edits) == 0 { + return fmt.Errorf("%s: no edits", filename) + } + fixedBytes, err := diff.ApplyBytes(original, edits) if err != nil { - return fmt.Errorf("%s: error applying fixes: %v (see possible explanations at RunWithSuggestedFixes)", fileName, err) + return fmt.Errorf("%s: error applying fixes: %v (see possible explanations at RunWithSuggestedFixes)", filename, err) } - wantRaw, err := format.Source(golden) + fixed, err := format.Source(fixedBytes) if err != nil { - return fmt.Errorf("%s.golden: error formatting golden file: %v\n%s", fileName, err, out) + return fmt.Errorf("%s: error formatting resulting source: %v\n%s", filename, err, fixed) } - want := string(wantRaw) - formatted, err := format.Source(out) + want, err = format.Source(want) if err != nil { - return fmt.Errorf("%s: error formatting resulting source: %v\n%s", fileName, err, out) + return fmt.Errorf("%s.golden: error formatting golden file: %v\n%s", filename, err, fixed) + } + + // Keep error reporting logic below consistent with + // TestScript in ../internal/checker/fix_test.go! + + unified := func(xlabel, ylabel string, x, y []byte) string { + x = append(slices.Clip(bytes.TrimSpace(x)), '\n') + y = append(slices.Clip(bytes.TrimSpace(y)), '\n') + return diff.Unified(xlabel, ylabel, string(x), string(y)) } - if got := string(formatted); got != want { - unified := diff.Unified(fileName+".golden", "actual", want, got) - return fmt.Errorf("suggested fixes failed for %s:\n%s", fileName, unified) + + if diff := unified(filename+" (fixed)", filename+" (want)", fixed, want); diff != "" { + return fmt.Errorf("unexpected %s content:\n"+ + "-- original --\n%s\n"+ + "-- fixed --\n%s\n"+ + "-- want --\n%s\n"+ + "-- diff original fixed --\n%s\n"+ + "-- diff fixed want --\n%s", + filename, + original, + fixed, + want, + unified(filename+" (original)", filename+" (fixed)", original, fixed), + diff) } return nil } @@ -740,3 +768,13 @@ func sanitize(gopath, filename string) string { prefix := gopath + string(os.PathSeparator) + "src" + string(os.PathSeparator) return filepath.ToSlash(strings.TrimPrefix(filename, prefix)) } + +// TODO(adonovan): use better stuff from go1.23. +func sortedKeys[K cmp.Ordered, V any](m map[K]V) []K { + keys := make([]K, 0, len(m)) + for k := range m { + keys = append(keys, k) + } + slices.Sort(keys) + return keys +} diff --git a/go/analysis/internal/checker/fix_test.go b/go/analysis/internal/checker/fix_test.go index 8fb7506ac70..8f4e7a3f6a9 100644 --- a/go/analysis/internal/checker/fix_test.go +++ b/go/analysis/internal/checker/fix_test.go @@ -281,6 +281,9 @@ func TestScript(t *testing.T) { t.Logf("%s: $ %s\nstdout:\n%s\nstderr:\n%s", prefix, clean(cmd), stdout, lastStderr) } + // Keep error reporting logic below consistent with + // applyDiffsAndCompare in ../../analysistest/analysistest.go! + unified := func(xlabel, ylabel string, x, y []byte) string { x = append(slices.Clip(bytes.TrimSpace(x)), '\n') y = append(slices.Clip(bytes.TrimSpace(y)), '\n') diff --git a/gopls/internal/analysis/modernize/testdata/src/slicesdelete/slicesdelete.go.golden b/gopls/internal/analysis/modernize/testdata/src/slicesdelete/slicesdelete.go.golden index 9b2ba9a0b80..2d9447af3a3 100644 --- a/gopls/internal/analysis/modernize/testdata/src/slicesdelete/slicesdelete.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/slicesdelete/slicesdelete.go.golden @@ -2,20 +2,6 @@ package slicesdelete import "slices" -import "slices" - -import "slices" - -import "slices" - -import "slices" - -import "slices" - -import "slices" - -import "slices" - var g struct{ f []int } func slicesdelete(test, other []byte, i int) { diff --git a/gopls/internal/analysis/modernize/testdata/src/sortslice/sortslice.go.golden b/gopls/internal/analysis/modernize/testdata/src/sortslice/sortslice.go.golden index d97636fd311..34af5aad60b 100644 --- a/gopls/internal/analysis/modernize/testdata/src/sortslice/sortslice.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/sortslice/sortslice.go.golden @@ -2,8 +2,6 @@ package sortslice import "slices" -import "slices" - import "sort" type myint int From 0d16805d3d4f589b6910ab64a4c8e18dc5f02f16 Mon Sep 17 00:00:00 2001 From: Gopher Robot Date: Tue, 11 Feb 2025 10:28:37 -0800 Subject: [PATCH 005/270] internal/stdlib: update stdlib index for Go 1.24.0 For golang/go#38706. Change-Id: Iaa6281ad4c18906c8dc1733df29ccc6b78130fb4 Reviewed-on: https://go-review.googlesource.com/c/tools/+/648556 Reviewed-by: Cherry Mui Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI Auto-Submit: Gopher Robot --- internal/stdlib/manifest.go | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/internal/stdlib/manifest.go b/internal/stdlib/manifest.go index 9f0b871ff6b..e7d0aee2186 100644 --- a/internal/stdlib/manifest.go +++ b/internal/stdlib/manifest.go @@ -2151,6 +2151,8 @@ var PackageSymbols = map[string][]Symbol{ {"(Type).String", Method, 0}, {"(Version).GoString", Method, 0}, {"(Version).String", Method, 0}, + {"(VersionIndex).Index", Method, 24}, + {"(VersionIndex).IsHidden", Method, 24}, {"ARM_MAGIC_TRAMP_NUMBER", Const, 0}, {"COMPRESS_HIOS", Const, 6}, {"COMPRESS_HIPROC", Const, 6}, @@ -3834,6 +3836,7 @@ var PackageSymbols = map[string][]Symbol{ {"SymType", Type, 0}, {"SymVis", Type, 0}, {"Symbol", Type, 0}, + {"Symbol.HasVersion", Field, 24}, {"Symbol.Info", Field, 0}, {"Symbol.Library", Field, 13}, {"Symbol.Name", Field, 0}, @@ -3843,18 +3846,12 @@ var PackageSymbols = map[string][]Symbol{ {"Symbol.Value", Field, 0}, {"Symbol.Version", Field, 13}, {"Symbol.VersionIndex", Field, 24}, - {"Symbol.VersionScope", Field, 24}, - {"SymbolVersionScope", Type, 24}, {"Type", Type, 0}, {"VER_FLG_BASE", Const, 24}, {"VER_FLG_INFO", Const, 24}, {"VER_FLG_WEAK", Const, 24}, {"Version", Type, 0}, - {"VersionScopeGlobal", Const, 24}, - {"VersionScopeHidden", Const, 24}, - {"VersionScopeLocal", Const, 24}, - {"VersionScopeNone", Const, 24}, - {"VersionScopeSpecific", Const, 24}, + {"VersionIndex", Type, 24}, }, "debug/gosym": { {"(*DecodingError).Error", Method, 0}, From d2585c467c83030b6ee984e63dce55e799ff4741 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Fri, 31 Jan 2025 11:39:18 -0500 Subject: [PATCH 006/270] gopls/internal/golang: folding range: remove FoldingRangeInfo Another unnecessary data type. Updates golang/go#71489 Change-Id: I374f677afe44abf818a35741202579abfed4aeb3 Reviewed-on: https://go-review.googlesource.com/c/tools/+/645855 Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley --- gopls/internal/golang/folding_range.go | 64 +++++++++++++------------- gopls/internal/server/folding_range.go | 21 +-------- 2 files changed, 34 insertions(+), 51 deletions(-) diff --git a/gopls/internal/golang/folding_range.go b/gopls/internal/golang/folding_range.go index 9d80cc8de29..4352da28151 100644 --- a/gopls/internal/golang/folding_range.go +++ b/gopls/internal/golang/folding_range.go @@ -6,6 +6,7 @@ package golang import ( "bytes" + "cmp" "context" "go/ast" "go/token" @@ -20,14 +21,8 @@ import ( "golang.org/x/tools/gopls/internal/util/safetoken" ) -// FoldingRangeInfo holds range and kind info of folding for an ast.Node -type FoldingRangeInfo struct { - Range protocol.Range - Kind protocol.FoldingRangeKind -} - // FoldingRange gets all of the folding range for f. -func FoldingRange(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, lineFoldingOnly bool) (ranges []*FoldingRangeInfo, err error) { +func FoldingRange(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, lineFoldingOnly bool) ([]protocol.FoldingRange, error) { // TODO(suzmue): consider limiting the number of folding ranges returned, and // implement a way to prioritize folding ranges in that case. pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) @@ -48,27 +43,29 @@ func FoldingRange(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, } // Get folding ranges for comments separately as they are not walked by ast.Inspect. - ranges = append(ranges, commentsFoldingRange(pgf)...) + ranges := commentsFoldingRange(pgf) - visit := func(n ast.Node) bool { - rng := foldingRangeFunc(pgf, n, lineFoldingOnly) - if rng != nil { + // Walk the ast and collect folding ranges. + ast.Inspect(pgf.File, func(n ast.Node) bool { + if rng, ok := foldingRangeFunc(pgf, n, lineFoldingOnly); ok { ranges = append(ranges, rng) } return true - } - // Walk the ast and collect folding ranges. - ast.Inspect(pgf.File, visit) + }) - slices.SortFunc(ranges, func(x, y *FoldingRangeInfo) int { - return protocol.CompareRange(x.Range, y.Range) + // Sort by start position. + slices.SortFunc(ranges, func(x, y protocol.FoldingRange) int { + if d := cmp.Compare(x.StartLine, y.StartLine); d != 0 { + return d + } + return cmp.Compare(x.StartCharacter, y.StartCharacter) }) return ranges, nil } // foldingRangeFunc calculates the line folding range for ast.Node n -func foldingRangeFunc(pgf *parsego.File, n ast.Node, lineFoldingOnly bool) *FoldingRangeInfo { +func foldingRangeFunc(pgf *parsego.File, n ast.Node, lineFoldingOnly bool) (protocol.FoldingRange, bool) { // TODO(suzmue): include trailing empty lines before the closing // parenthesis/brace. var kind protocol.FoldingRangeKind @@ -109,25 +106,22 @@ func foldingRangeFunc(pgf *parsego.File, n ast.Node, lineFoldingOnly bool) *Fold // Check that folding positions are valid. if !start.IsValid() || !end.IsValid() { - return nil + return protocol.FoldingRange{}, false } if start == end { // Nothing to fold. - return nil + return protocol.FoldingRange{}, false } // in line folding mode, do not fold if the start and end lines are the same. if lineFoldingOnly && safetoken.Line(pgf.Tok, start) == safetoken.Line(pgf.Tok, end) { - return nil + return protocol.FoldingRange{}, false } rng, err := pgf.PosRange(start, end) if err != nil { bug.Reportf("failed to create range: %s", err) // can't happen - return nil - } - return &FoldingRangeInfo{ - Range: rng, - Kind: kind, + return protocol.FoldingRange{}, false } + return foldingRange(kind, rng), true } // getLineFoldingRange returns the folding range for nodes with parentheses/braces/brackets @@ -196,7 +190,7 @@ func getLineFoldingRange(pgf *parsego.File, open, close token.Pos, lineFoldingOn // commentsFoldingRange returns the folding ranges for all comment blocks in file. // The folding range starts at the end of the first line of the comment block, and ends at the end of the // comment block and has kind protocol.Comment. -func commentsFoldingRange(pgf *parsego.File) (comments []*FoldingRangeInfo) { +func commentsFoldingRange(pgf *parsego.File) (comments []protocol.FoldingRange) { tokFile := pgf.Tok for _, commentGrp := range pgf.File.Comments { startGrpLine, endGrpLine := safetoken.Line(tokFile, commentGrp.Pos()), safetoken.Line(tokFile, commentGrp.End()) @@ -218,11 +212,19 @@ func commentsFoldingRange(pgf *parsego.File) (comments []*FoldingRangeInfo) { bug.Reportf("failed to create mapped range: %s", err) // can't happen continue } - comments = append(comments, &FoldingRangeInfo{ - // Fold from the end of the first line comment to the end of the comment block. - Range: rng, - Kind: protocol.Comment, - }) + // Fold from the end of the first line comment to the end of the comment block. + comments = append(comments, foldingRange(protocol.Comment, rng)) } return comments } + +func foldingRange(kind protocol.FoldingRangeKind, rng protocol.Range) protocol.FoldingRange { + return protocol.FoldingRange{ + // I have no idea why LSP doesn't use a protocol.Range here. + StartLine: rng.Start.Line, + StartCharacter: rng.Start.Character, + EndLine: rng.End.Line, + EndCharacter: rng.End.Character, + Kind: string(kind), + } +} diff --git a/gopls/internal/server/folding_range.go b/gopls/internal/server/folding_range.go index 95b2ffc0744..b05d5302f10 100644 --- a/gopls/internal/server/folding_range.go +++ b/gopls/internal/server/folding_range.go @@ -26,24 +26,5 @@ func (s *server) FoldingRange(ctx context.Context, params *protocol.FoldingRange if snapshot.FileKind(fh) != file.Go { return nil, nil // empty result } - ranges, err := golang.FoldingRange(ctx, snapshot, fh, snapshot.Options().LineFoldingOnly) - if err != nil { - return nil, err - } - return toProtocolFoldingRanges(ranges) -} - -func toProtocolFoldingRanges(ranges []*golang.FoldingRangeInfo) ([]protocol.FoldingRange, error) { - result := make([]protocol.FoldingRange, 0, len(ranges)) - for _, info := range ranges { - rng := info.Range - result = append(result, protocol.FoldingRange{ - StartLine: rng.Start.Line, - StartCharacter: rng.Start.Character, - EndLine: rng.End.Line, - EndCharacter: rng.End.Character, - Kind: string(info.Kind), - }) - } - return result, nil + return golang.FoldingRange(ctx, snapshot, fh, snapshot.Options().LineFoldingOnly) } From b3c5d108cdc6a215e5a4169c71a1c4dedbc69a83 Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Tue, 11 Feb 2025 20:58:26 +0000 Subject: [PATCH 007/270] gopls: record telemetry counters for settings that are used Instrument telemetry recording which settings are passed to gopls. In some cases, this merely records whether settings were set. In others, it records buckets for the setting value. Fixes golang/go#71285 Change-Id: I820318fe9cf1b05accb3105e5e2d6ddc3c5e768f Reviewed-on: https://go-review.googlesource.com/c/tools/+/648416 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan Auto-Submit: Robert Findley --- gopls/internal/cache/session_test.go | 3 +- gopls/internal/server/general.go | 17 +- gopls/internal/settings/settings.go | 201 ++++++++++++------- gopls/internal/settings/settings_test.go | 2 +- gopls/internal/telemetry/counterpath.go | 30 +++ gopls/internal/telemetry/counterpath_test.go | 47 +++++ gopls/internal/telemetry/telemetry_test.go | 44 ++++ 7 files changed, 262 insertions(+), 82 deletions(-) create mode 100644 gopls/internal/telemetry/counterpath.go create mode 100644 gopls/internal/telemetry/counterpath_test.go diff --git a/gopls/internal/cache/session_test.go b/gopls/internal/cache/session_test.go index 5f9a59a4945..1b7472af605 100644 --- a/gopls/internal/cache/session_test.go +++ b/gopls/internal/cache/session_test.go @@ -337,7 +337,8 @@ replace ( for _, f := range test.folders { opts := settings.DefaultOptions() if f.options != nil { - for _, err := range opts.Set(f.options(dir)) { + _, errs := opts.Set(f.options(dir)) + for _, err := range errs { t.Fatal(err) } } diff --git a/gopls/internal/server/general.go b/gopls/internal/server/general.go index 35614945f9d..de6b764c79f 100644 --- a/gopls/internal/server/general.go +++ b/gopls/internal/server/general.go @@ -28,6 +28,7 @@ import ( "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/gopls/internal/protocol/semtok" "golang.org/x/tools/gopls/internal/settings" + "golang.org/x/tools/gopls/internal/telemetry" "golang.org/x/tools/gopls/internal/util/bug" "golang.org/x/tools/gopls/internal/util/goversion" "golang.org/x/tools/gopls/internal/util/moremaps" @@ -74,7 +75,11 @@ func (s *server) Initialize(ctx context.Context, params *protocol.ParamInitializ // TODO(rfindley): eliminate this defer. defer func() { s.SetOptions(options) }() - s.handleOptionErrors(ctx, options.Set(params.InitializationOptions)) + // Process initialization options. + { + res, errs := options.Set(params.InitializationOptions) + s.handleOptionResult(ctx, res, errs) + } options.ForClientCapabilities(params.ClientInfo, params.Capabilities) if options.ShowBugReports { @@ -541,7 +546,8 @@ func (s *server) fetchFolderOptions(ctx context.Context, folder protocol.Documen opts = opts.Clone() for _, config := range configs { - s.handleOptionErrors(ctx, opts.Set(config)) + res, errs := opts.Set(config) + s.handleOptionResult(ctx, res, errs) } return opts, nil } @@ -555,7 +561,12 @@ func (s *server) eventuallyShowMessage(ctx context.Context, msg *protocol.ShowMe s.notifications = append(s.notifications, msg) } -func (s *server) handleOptionErrors(ctx context.Context, optionErrors []error) { +func (s *server) handleOptionResult(ctx context.Context, applied []telemetry.CounterPath, optionErrors []error) { + for _, path := range applied { + path = append(settings.CounterPath{"gopls", "setting"}, path...) + counter.Inc(path.FullName()) + } + var warnings, errs []string for _, err := range optionErrors { if err == nil { diff --git a/gopls/internal/settings/settings.go b/gopls/internal/settings/settings.go index 8f33bdae96b..7d64cbef459 100644 --- a/gopls/internal/settings/settings.go +++ b/gopls/internal/settings/settings.go @@ -14,6 +14,7 @@ import ( "golang.org/x/tools/gopls/internal/file" "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/gopls/internal/protocol/semtok" + "golang.org/x/tools/gopls/internal/telemetry" "golang.org/x/tools/gopls/internal/util/frob" ) @@ -822,10 +823,18 @@ const ( // TODO: support "Manual"? ) -// Set updates *options based on the provided JSON value: +type CounterPath = telemetry.CounterPath + +// Set updates *Options based on the provided JSON value: // null, bool, string, number, array, or object. +// +// The applied result describes settings that were applied. Each CounterPath +// contains at least the name of the setting, but may also include sub-setting +// names for settings that are themselves maps, and/or a non-empty bucket name +// when bucketing is desirable. +// // On failure, it returns one or more non-nil errors. -func (o *Options) Set(value any) (errors []error) { +func (o *Options) Set(value any) (applied []CounterPath, errs []error) { switch value := value.(type) { case nil: case map[string]any: @@ -840,19 +849,32 @@ func (o *Options) Set(value any) (errors []error) { name = split[len(split)-1] if _, ok := seen[name]; ok { - errors = append(errors, fmt.Errorf("duplicate value for %s", name)) + errs = append(errs, fmt.Errorf("duplicate value for %s", name)) } seen[name] = struct{}{} - if err := o.setOne(name, value); err != nil { + paths, err := o.setOne(name, value) + if err != nil { err := fmt.Errorf("setting option %q: %w", name, err) - errors = append(errors, err) + errs = append(errs, err) + } + _, soft := err.(*SoftError) + if err == nil || soft { + if len(paths) == 0 { + path := CounterPath{name, ""} + applied = append(applied, path) + } else { + for _, subpath := range paths { + path := append(CounterPath{name}, subpath...) + applied = append(applied, path) + } + } } } default: - errors = append(errors, fmt.Errorf("invalid options type %T (want JSON null or object)", value)) + errs = append(errs, fmt.Errorf("invalid options type %T (want JSON null or object)", value)) } - return errors + return applied, errs } func (o *Options) ForClientCapabilities(clientInfo *protocol.ClientInfo, caps protocol.ClientCapabilities) { @@ -955,14 +977,26 @@ func validateDirectoryFilter(ifilter string) (string, error) { } // setOne updates a field of o based on the name and value. +// +// The applied result describes the counter values to be updated as a result of +// the applied setting. If the result is nil, the default counter for this +// setting should be updated. +// +// For example, if the setting name is "foo", +// - If applied is nil, update the count for "foo". +// - If applied is []CounterPath{{"bucket"}}, update the count for +// foo:bucket. +// - If applied is []CounterPath{{"a","b"}, {"c","d"}}, update foo/a:b and +// foo/c:d. +// // It returns an error if the value was invalid or duplicate. // It is the caller's responsibility to augment the error with 'name'. -func (o *Options) setOne(name string, value any) error { +func (o *Options) setOne(name string, value any) (applied []CounterPath, _ error) { switch name { case "env": env, ok := value.(map[string]any) if !ok { - return fmt.Errorf("invalid type %T (want JSON object)", value) + return nil, fmt.Errorf("invalid type %T (want JSON object)", value) } if o.Env == nil { o.Env = make(map[string]string) @@ -973,30 +1007,32 @@ func (o *Options) setOne(name string, value any) error { case string, int: o.Env[k] = fmt.Sprint(v) default: - return fmt.Errorf("invalid map value %T (want string)", v) + return nil, fmt.Errorf("invalid map value %T (want string)", v) } } + return nil, nil case "buildFlags": - return setStringSlice(&o.BuildFlags, value) + return nil, setStringSlice(&o.BuildFlags, value) case "directoryFilters": filterStrings, err := asStringSlice(value) if err != nil { - return err + return nil, err } var filters []string for _, filterStr := range filterStrings { filter, err := validateDirectoryFilter(filterStr) if err != nil { - return err + return nil, err } filters = append(filters, strings.TrimRight(filepath.FromSlash(filter), "/")) } o.DirectoryFilters = filters + return nil, nil case "workspaceFiles": - return setStringSlice(&o.WorkspaceFiles, value) + return nil, setStringSlice(&o.WorkspaceFiles, value) case "completionDocumentation": return setBool(&o.CompletionDocumentation, value) case "usePlaceholders": @@ -1006,7 +1042,7 @@ func (o *Options) setOne(name string, value any) error { case "completeUnimported": return setBool(&o.CompleteUnimported, value) case "completionBudget": - return setDuration(&o.CompletionBudget, value) + return nil, setDuration(&o.CompletionBudget, value) case "importsSource": return setEnum(&o.ImportsSource, value, ImportsSourceOff, @@ -1038,7 +1074,7 @@ func (o *Options) setOne(name string, value any) error { case "hoverKind": if s, ok := value.(string); ok && strings.EqualFold(s, "structured") { - return deprecatedError("the experimental hoverKind='structured' setting was removed in gopls/v0.18.0 (https://go.dev/issue/70233)") + return nil, deprecatedError("the experimental hoverKind='structured' setting was removed in gopls/v0.18.0 (https://go.dev/issue/70233)") } return setEnum(&o.HoverKind, value, NoDocumentation, @@ -1047,7 +1083,7 @@ func (o *Options) setOne(name string, value any) error { FullDocumentation) case "linkTarget": - return setString(&o.LinkTarget, value) + return nil, setString(&o.LinkTarget, value) case "linksInHover": switch value { @@ -1058,9 +1094,9 @@ func (o *Options) setOne(name string, value any) error { case "gopls": o.LinksInHover = LinksInHover_Gopls default: - return fmt.Errorf(`invalid value %s; expect false, true, or "gopls"`, - value) + return nil, fmt.Errorf(`invalid value %s; expect false, true, or "gopls"`, value) } + return nil, nil case "importShortcut": return setEnum(&o.ImportShortcut, value, @@ -1069,18 +1105,20 @@ func (o *Options) setOne(name string, value any) error { DefinitionShortcut) case "analyses": - if err := setBoolMap(&o.Analyses, value); err != nil { - return err + counts, err := setBoolMap(&o.Analyses, value) + if err != nil { + return nil, err } if o.Analyses["fieldalignment"] { - return deprecatedError("the 'fieldalignment' analyzer was removed in gopls/v0.17.0; instead, hover over struct fields to see size/offset information (https://go.dev/issue/66861)") + return counts, deprecatedError("the 'fieldalignment' analyzer was removed in gopls/v0.17.0; instead, hover over struct fields to see size/offset information (https://go.dev/issue/66861)") } + return counts, nil case "hints": return setBoolMap(&o.Hints, value) case "annotations": - return deprecatedError("the 'annotations' setting was removed in gopls/v0.18.0; all compiler optimization details are now shown") + return nil, deprecatedError("the 'annotations' setting was removed in gopls/v0.18.0; all compiler optimization details are now shown") case "vulncheck": return setEnum(&o.Vulncheck, value, @@ -1090,7 +1128,7 @@ func (o *Options) setOne(name string, value any) error { case "codelenses", "codelens": lensOverrides, err := asBoolMap[CodeLensSource](value) if err != nil { - return err + return nil, err } if o.Codelenses == nil { o.Codelenses = make(map[CodeLensSource]bool) @@ -1098,15 +1136,21 @@ func (o *Options) setOne(name string, value any) error { o.Codelenses = maps.Clone(o.Codelenses) maps.Copy(o.Codelenses, lensOverrides) + var counts []CounterPath + for k, v := range lensOverrides { + counts = append(counts, CounterPath{string(k), fmt.Sprint(v)}) + } + if name == "codelens" { - return deprecatedError("codelenses") + return counts, deprecatedError("codelenses") } + return counts, nil case "staticcheck": return setBool(&o.Staticcheck, value) case "local": - return setString(&o.Local, value) + return nil, setString(&o.Local, value) case "verboseOutput": return setBool(&o.VerboseOutput, value) @@ -1128,16 +1172,18 @@ func (o *Options) setOne(name string, value any) error { // TODO(hxjiang): deprecate noSemanticString and noSemanticNumber. case "noSemanticString": - if err := setBool(&o.NoSemanticString, value); err != nil { - return err + counts, err := setBool(&o.NoSemanticString, value) + if err != nil { + return nil, err } - return &SoftError{fmt.Sprintf("noSemanticString setting is deprecated, use semanticTokenTypes instead (though you can continue to apply them for the time being).")} + return counts, &SoftError{"noSemanticString setting is deprecated, use semanticTokenTypes instead (though you can continue to apply them for the time being)."} case "noSemanticNumber": - if err := setBool(&o.NoSemanticNumber, value); err != nil { - return nil + counts, err := setBool(&o.NoSemanticNumber, value) + if err != nil { + return nil, err } - return &SoftError{fmt.Sprintf("noSemanticNumber setting is deprecated, use semanticTokenTypes instead (though you can continue to apply them for the time being).")} + return counts, &SoftError{"noSemanticNumber setting is deprecated, use semanticTokenTypes instead (though you can continue to apply them for the time being)."} case "semanticTokenTypes": return setBoolMap(&o.SemanticTokenTypes, value) @@ -1157,15 +1203,16 @@ func (o *Options) setOne(name string, value any) error { case "templateExtensions": switch value := value.(type) { case []any: - return setStringSlice(&o.TemplateExtensions, value) + return nil, setStringSlice(&o.TemplateExtensions, value) case nil: o.TemplateExtensions = nil default: - return fmt.Errorf("unexpected type %T (want JSON array of string)", value) + return nil, fmt.Errorf("unexpected type %T (want JSON array of string)", value) } + return nil, nil case "diagnosticsDelay": - return setDuration(&o.DiagnosticsDelay, value) + return nil, setDuration(&o.DiagnosticsDelay, value) case "diagnosticsTrigger": return setEnum(&o.DiagnosticsTrigger, value, @@ -1175,11 +1222,8 @@ func (o *Options) setOne(name string, value any) error { case "analysisProgressReporting": return setBool(&o.AnalysisProgressReporting, value) - case "allowImplicitNetworkAccess": - return deprecatedError("") - case "standaloneTags": - return setStringSlice(&o.StandaloneTags, value) + return nil, setStringSlice(&o.StandaloneTags, value) case "subdirWatchPatterns": return setEnum(&o.SubdirWatchPatterns, value, @@ -1188,7 +1232,7 @@ func (o *Options) setOne(name string, value any) error { SubdirWatchPatternsAuto) case "reportAnalysisProgressAfter": - return setDuration(&o.ReportAnalysisProgressAfter, value) + return nil, setDuration(&o.ReportAnalysisProgressAfter, value) case "telemetryPrompt": return setBool(&o.TelemetryPrompt, value) @@ -1213,50 +1257,54 @@ func (o *Options) setOne(name string, value any) error { // renamed case "experimentalDisabledAnalyses": - return deprecatedError("analyses") + return nil, deprecatedError("analyses") case "disableDeepCompletion": - return deprecatedError("deepCompletion") + return nil, deprecatedError("deepCompletion") case "disableFuzzyMatching": - return deprecatedError("fuzzyMatching") + return nil, deprecatedError("fuzzyMatching") case "wantCompletionDocumentation": - return deprecatedError("completionDocumentation") + return nil, deprecatedError("completionDocumentation") case "wantUnimportedCompletions": - return deprecatedError("completeUnimported") + return nil, deprecatedError("completeUnimported") case "fuzzyMatching": - return deprecatedError("matcher") + return nil, deprecatedError("matcher") case "caseSensitiveCompletion": - return deprecatedError("matcher") + return nil, deprecatedError("matcher") case "experimentalDiagnosticsDelay": - return deprecatedError("diagnosticsDelay") + return nil, deprecatedError("diagnosticsDelay") // deprecated + + case "allowImplicitNetworkAccess": + return nil, deprecatedError("") + case "memoryMode": - return deprecatedError("") + return nil, deprecatedError("") case "tempModFile": - return deprecatedError("") + return nil, deprecatedError("") case "experimentalWorkspaceModule": - return deprecatedError("") + return nil, deprecatedError("") case "experimentalTemplateSupport": - return deprecatedError("") + return nil, deprecatedError("") case "experimentalWatchedFileDelay": - return deprecatedError("") + return nil, deprecatedError("") case "experimentalPackageCacheKey": - return deprecatedError("") + return nil, deprecatedError("") case "allowModfileModifications": - return deprecatedError("") + return nil, deprecatedError("") case "allExperiments": // golang/go#65548: this setting is a no-op, but we fail don't report it as @@ -1265,29 +1313,29 @@ func (o *Options) setOne(name string, value any) error { // If, in the future, VS Code stops injecting this, we could theoretically // report an error here, but it also seems harmless to keep ignoring this // setting forever. + return nil, nil case "experimentalUseInvalidMetadata": - return deprecatedError("") + return nil, deprecatedError("") case "newDiff": - return deprecatedError("") + return nil, deprecatedError("") case "wantSuggestedFixes": - return deprecatedError("") + return nil, deprecatedError("") case "noIncrementalSync": - return deprecatedError("") + return nil, deprecatedError("") case "watchFileChanges": - return deprecatedError("") + return nil, deprecatedError("") case "go-diff": - return deprecatedError("") + return nil, deprecatedError("") default: - return fmt.Errorf("unexpected setting") + return nil, fmt.Errorf("unexpected setting") } - return nil } // EnabledSemanticTokenModifiers returns a map of modifiers to boolean. @@ -1323,11 +1371,6 @@ func (e *SoftError) Error() string { return e.msg } -// softErrorf reports a soft error related to the current option. -func softErrorf(format string, args ...any) error { - return &SoftError{fmt.Sprintf(format, args...)} -} - // deprecatedError reports the current setting as deprecated. // The optional replacement is suggested to the user. func deprecatedError(replacement string) error { @@ -1341,13 +1384,13 @@ func deprecatedError(replacement string) error { // setT() and asT() helpers: the setT forms write to the 'dest *T' // variable only on success, to reduce boilerplate in Option.set. -func setBool(dest *bool, value any) error { +func setBool(dest *bool, value any) ([]CounterPath, error) { b, err := asBool(value) if err != nil { - return err + return nil, err } *dest = b - return nil + return []CounterPath{{fmt.Sprint(b)}}, nil } func asBool(value any) (bool, error) { @@ -1371,13 +1414,17 @@ func setDuration(dest *time.Duration, value any) error { return nil } -func setBoolMap[K ~string](dest *map[K]bool, value any) error { +func setBoolMap[K ~string](dest *map[K]bool, value any) ([]CounterPath, error) { m, err := asBoolMap[K](value) if err != nil { - return err + return nil, err } *dest = m - return nil + var counts []CounterPath + for k, v := range m { + counts = append(counts, CounterPath{string(k), fmt.Sprint(v)}) + } + return counts, nil } func asBoolMap[K ~string](value any) (map[K]bool, error) { @@ -1438,13 +1485,13 @@ func asStringSlice(value any) ([]string, error) { return slice, nil } -func setEnum[S ~string](dest *S, value any, options ...S) error { +func setEnum[S ~string](dest *S, value any, options ...S) ([]CounterPath, error) { enum, err := asEnum(value, options...) if err != nil { - return err + return nil, err } *dest = enum - return nil + return []CounterPath{{string(enum)}}, nil } func asEnum[S ~string](value any, options ...S) (S, error) { diff --git a/gopls/internal/settings/settings_test.go b/gopls/internal/settings/settings_test.go index 63b4aded8bd..05afa8ecac3 100644 --- a/gopls/internal/settings/settings_test.go +++ b/gopls/internal/settings/settings_test.go @@ -206,7 +206,7 @@ func TestOptions_Set(t *testing.T) { for _, test := range tests { var opts Options - err := opts.Set(map[string]any{test.name: test.value}) + _, err := opts.Set(map[string]any{test.name: test.value}) if err != nil { if !test.wantError { t.Errorf("Options.set(%q, %v) failed: %v", diff --git a/gopls/internal/telemetry/counterpath.go b/gopls/internal/telemetry/counterpath.go new file mode 100644 index 00000000000..e6d9d84b531 --- /dev/null +++ b/gopls/internal/telemetry/counterpath.go @@ -0,0 +1,30 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package telemetry + +import "strings" + +// A CounterPath represents the components of a telemetry counter name. +// +// By convention, counter names follow the format path/to/counter:bucket. The +// CounterPath holds the '/'-separated components of this path, along with a +// final element representing the bucket. +// +// CounterPaths may be used to build up counters incrementally, such as when a +// set of observed counters shared a common prefix, to be controlled by the +// caller. +type CounterPath []string + +// FullName returns the counter name for the receiver. +func (p CounterPath) FullName() string { + if len(p) == 0 { + return "" + } + name := strings.Join([]string(p[:len(p)-1]), "/") + if bucket := p[len(p)-1]; bucket != "" { + name += ":" + bucket + } + return name +} diff --git a/gopls/internal/telemetry/counterpath_test.go b/gopls/internal/telemetry/counterpath_test.go new file mode 100644 index 00000000000..b6ac7478b72 --- /dev/null +++ b/gopls/internal/telemetry/counterpath_test.go @@ -0,0 +1,47 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package telemetry + +import ( + "testing" +) + +// TestCounterPath tests the formatting of various counter paths. +func TestCounterPath(t *testing.T) { + tests := []struct { + path CounterPath + want string + }{ + { + path: CounterPath{}, + want: "", + }, + { + path: CounterPath{"counter"}, + want: ":counter", + }, + { + path: CounterPath{"counter", "bucket"}, + want: "counter:bucket", + }, + { + path: CounterPath{"path", "to", "counter"}, + want: "path/to:counter", + }, + { + path: CounterPath{"multi", "component", "path", "bucket"}, + want: "multi/component/path:bucket", + }, + { + path: CounterPath{"path", ""}, + want: "path", + }, + } + for _, tt := range tests { + if got := tt.path.FullName(); got != tt.want { + t.Errorf("CounterPath(%v).FullName() = %v, want %v", tt.path, got, tt.want) + } + } +} diff --git a/gopls/internal/telemetry/telemetry_test.go b/gopls/internal/telemetry/telemetry_test.go index 7aaca41ab55..4c41cc40dc9 100644 --- a/gopls/internal/telemetry/telemetry_test.go +++ b/gopls/internal/telemetry/telemetry_test.go @@ -119,6 +119,50 @@ func TestTelemetry(t *testing.T) { } } +func TestSettingTelemetry(t *testing.T) { + // counters that should be incremented by each session + sessionCounters := []*counter.Counter{ + counter.New("gopls/setting/diagnosticsDelay"), + counter.New("gopls/setting/staticcheck:true"), + counter.New("gopls/setting/noSemanticString:true"), + counter.New("gopls/setting/analyses/deprecated:false"), + } + + initialCounts := make([]uint64, len(sessionCounters)) + for i, c := range sessionCounters { + count, err := countertest.ReadCounter(c) + if err != nil { + continue // counter db not open, or counter not found + } + initialCounts[i] = count + } + + // Run gopls. + WithOptions( + Modes(Default), + Settings{ + "staticcheck": true, + "analyses": map[string]bool{ + "deprecated": false, + }, + "diagnosticsDelay": "0s", + "noSemanticString": true, + }, + ).Run(t, "", func(_ *testing.T, env *Env) { + }) + + for i, c := range sessionCounters { + count, err := countertest.ReadCounter(c) + if err != nil { + t.Errorf("ReadCounter(%q) failed: %v", c.Name(), err) + continue + } + if count <= initialCounts[i] { + t.Errorf("ReadCounter(%q) = %d, want > %d", c.Name(), count, initialCounts[i]) + } + } +} + func addForwardedCounters(env *Env, names []string, values []int64) { args, err := command.MarshalArgs(command.AddTelemetryCountersArgs{ Names: names, Values: values, From 25932623b63eed2010348abe2dc5ff3e1fe6f86d Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Tue, 11 Feb 2025 16:04:17 -0500 Subject: [PATCH 008/270] gopls/internal/telemetry/cmd/stacks: remove leading \b match A directory separator / does not create word boundaries, so dir/file will not match \bfile. This CL removes the leading word-boundary match from the interpretation of string literals in stacks' claim expression language, which was causing spurious duplicate issues. + test Change-Id: Ie02be3591096ddf1d38b10873bed02449e35bd56 Reviewed-on: https://go-review.googlesource.com/c/tools/+/648579 LUCI-TryBot-Result: Go LUCI Reviewed-by: Jonathan Amsterdam --- gopls/internal/telemetry/cmd/stacks/stacks.go | 19 +++++++++++++++---- .../telemetry/cmd/stacks/stacks_test.go | 10 ++++++---- 2 files changed, 21 insertions(+), 8 deletions(-) diff --git a/gopls/internal/telemetry/cmd/stacks/stacks.go b/gopls/internal/telemetry/cmd/stacks/stacks.go index 7cb20012657..36a675d0eb0 100644 --- a/gopls/internal/telemetry/cmd/stacks/stacks.go +++ b/gopls/internal/telemetry/cmd/stacks/stacks.go @@ -479,11 +479,20 @@ func parsePredicate(s string) (func(string) bool, error) { if err != nil { return err } - // The literal should match complete words. It may match multiple words, - // if it contains non-word runes like whitespace; but it must match word - // boundaries at each end. + // The end of the literal (usually "symbol", + // "pkg.symbol", or "pkg.symbol:+1") must + // match a word boundary. However, the start + // of the literal need not: an input line such + // as "domain.name/dir/pkg.symbol:+1" should + // match literal "pkg.symbol", but the slash + // is not a word boundary (witness: + // https://go.dev/play/p/w-8ev_VUBSq). + // + // It may match multiple words if it contains + // non-word runes like whitespace. + // // The constructed regular expression is always valid. - literalRegexps[e] = regexp.MustCompile(`\b` + regexp.QuoteMeta(lit) + `\b`) + literalRegexps[e] = regexp.MustCompile(regexp.QuoteMeta(lit) + `\b`) default: return fmt.Errorf("syntax error (%T)", e) @@ -1084,6 +1093,8 @@ type Issue struct { newStacks []string // new stacks to add to existing issue (comments and IDs) } +func (issue *Issue) String() string { return fmt.Sprintf("#%d", issue.Number) } + type User struct { Login string HTMLURL string `json:"html_url"` diff --git a/gopls/internal/telemetry/cmd/stacks/stacks_test.go b/gopls/internal/telemetry/cmd/stacks/stacks_test.go index 452113a1581..9f798aa43a3 100644 --- a/gopls/internal/telemetry/cmd/stacks/stacks_test.go +++ b/gopls/internal/telemetry/cmd/stacks/stacks_test.go @@ -85,13 +85,15 @@ func TestParsePredicate(t *testing.T) { want bool }{ {`"x"`, `"x"`, true}, - {`"x"`, `"axe"`, false}, // literals match whole words + {`"x"`, `"axe"`, false}, // literals must match word ends + {`"xe"`, `"axe"`, true}, {`"x"`, "val:x+5", true}, {`"fu+12"`, "x:fu+12,", true}, - {`"fu+12"`, "snafu+12,", false}, + {`"fu+12"`, "snafu+12,", true}, // literals needn't match word start {`"fu+12"`, "x:fu+123,", false}, - {`"a.*b"`, "a.*b", true}, // regexp metachars are escaped - {`"a.*b"`, "axxb", false}, // ditto + {`"foo:+12"`, "dir/foo:+12,", true}, // literals needn't match word start + {`"a.*b"`, "a.*b", true}, // regexp metachars are escaped + {`"a.*b"`, "axxb", false}, // ditto {`"x"`, `"y"`, false}, {`!"x"`, "x", false}, {`!"x"`, "y", true}, From d98774edc040d4c944774f1b6777522d4d921b54 Mon Sep 17 00:00:00 2001 From: Sean Liao Date: Sun, 9 Feb 2025 13:15:11 +0000 Subject: [PATCH 009/270] cmd/signature-fuzzer/internal/fuzz-generator: update to math/rand/v2 Fixes golang/go#71613 Change-Id: Id69044282568b3564aee82dfe4c1b98c41d16d0f Reviewed-on: https://go-review.googlesource.com/c/tools/+/647896 Reviewed-by: Than McIntosh Reviewed-by: Dmitri Shuralyov Auto-Submit: Dmitri Shuralyov Reviewed-by: Cherry Mui LUCI-TryBot-Result: Go LUCI --- .../internal/fuzz-generator/wraprand.go | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/cmd/signature-fuzzer/internal/fuzz-generator/wraprand.go b/cmd/signature-fuzzer/internal/fuzz-generator/wraprand.go index bba178dc317..f83a5f22e27 100644 --- a/cmd/signature-fuzzer/internal/fuzz-generator/wraprand.go +++ b/cmd/signature-fuzzer/internal/fuzz-generator/wraprand.go @@ -6,7 +6,7 @@ package generator import ( "fmt" - "math/rand" + "math/rand/v2" "os" "runtime" "strings" @@ -20,8 +20,7 @@ const ( ) func NewWrapRand(seed int64, ctl int) *wraprand { - rand.Seed(seed) - return &wraprand{seed: seed, ctl: ctl} + return &wraprand{seed: seed, ctl: ctl, rand: rand.New(rand.NewPCG(0, uint64(seed)))} } type wraprand struct { @@ -32,6 +31,7 @@ type wraprand struct { tag string calls []string ctl int + rand *rand.Rand } func (w *wraprand) captureCall(tag string, val string) { @@ -59,7 +59,7 @@ func (w *wraprand) captureCall(tag string, val string) { func (w *wraprand) Intn(n int64) int64 { w.intncalls++ - rv := rand.Int63n(n) + rv := w.rand.Int64N(n) if w.ctl&RandCtlCapture != 0 { w.captureCall("Intn", fmt.Sprintf("%d", rv)) } @@ -68,7 +68,7 @@ func (w *wraprand) Intn(n int64) int64 { func (w *wraprand) Float32() float32 { w.f32calls++ - rv := rand.Float32() + rv := w.rand.Float32() if w.ctl&RandCtlCapture != 0 { w.captureCall("Float32", fmt.Sprintf("%f", rv)) } @@ -77,7 +77,7 @@ func (w *wraprand) Float32() float32 { func (w *wraprand) NormFloat64() float64 { w.f64calls++ - rv := rand.NormFloat64() + rv := w.rand.NormFloat64() if w.ctl&RandCtlCapture != 0 { w.captureCall("NormFloat64", fmt.Sprintf("%f", rv)) } @@ -85,7 +85,7 @@ func (w *wraprand) NormFloat64() float64 { } func (w *wraprand) emitCalls(fn string) { - outf, err := os.OpenFile(fn, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0666) + outf, err := os.OpenFile(fn, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0o666) if err != nil { panic(err) } From b752317a21a68f705b3f8845fb2696d6f977cf4e Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Tue, 11 Feb 2025 17:01:42 -0500 Subject: [PATCH 010/270] internal/analysisinternal: disable AddImport test without go command The AddImport test uses the default importer, which calls go list. This fails in environments that can't call the go command, like js/wasm. Add a predicate to testenv that asserts the need for the default importer, and call it from TestAddImport. A subtlety: although this bug manifested itself only for the dot-import cases, in fact all the test cases failed type checking on js/wasm for this reason. But a successful type-check is not a precondition for the test (see the new comment in TestAddImport). What caused the particular test case to fail was a bad diff resulting from how the edit was applied in the presence of that failure. Fixes golang/go#71645. Change-Id: Ib04afad108c323999bb67f329cf8d9cf329fead1 Reviewed-on: https://go-review.googlesource.com/c/tools/+/648580 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- internal/analysisinternal/addimport_test.go | 6 ++++++ internal/testenv/testenv.go | 10 ++++++++++ 2 files changed, 16 insertions(+) diff --git a/internal/analysisinternal/addimport_test.go b/internal/analysisinternal/addimport_test.go index 12423b7c061..da7c7f90114 100644 --- a/internal/analysisinternal/addimport_test.go +++ b/internal/analysisinternal/addimport_test.go @@ -18,9 +18,12 @@ import ( "github.com/google/go-cmp/cmp" "golang.org/x/tools/go/analysis" "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/testenv" ) func TestAddImport(t *testing.T) { + testenv.NeedsDefaultImporter(t) + descr := func(s string) string { if _, _, line, ok := runtime.Caller(1); ok { return fmt.Sprintf("L%d %s", line, s) @@ -270,6 +273,9 @@ func _(io.Reader) { Implicits: make(map[ast.Node]types.Object), } conf := &types.Config{ + // We don't want to fail if there is an error during type checking: + // the error may be because we're missing an import, and adding imports + // is the whole point of AddImport. Error: func(err error) { t.Log(err) }, Importer: importer.Default(), } diff --git a/internal/testenv/testenv.go b/internal/testenv/testenv.go index 144f4f8fd64..5c541b7b19b 100644 --- a/internal/testenv/testenv.go +++ b/internal/testenv/testenv.go @@ -278,6 +278,16 @@ func NeedsGoBuild(t testing.TB) { NeedsTool(t, "go") } +// NeedsDefaultImporter skips t if the test uses the default importer, +// returned by [go/importer.Default]. +func NeedsDefaultImporter(t testing.TB) { + t.Helper() + // The default importer may call `go list` + // (in src/internal/exportdata/exportdata.go:lookupGorootExport), + // so check for the go tool. + NeedsTool(t, "go") +} + // ExitIfSmallMachine emits a helpful diagnostic and calls os.Exit(0) if the // current machine is a builder known to have scarce resources. // From b5a64bbcfd2247d59f40403a28ca8e3a9a417a24 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Fri, 7 Feb 2025 13:18:50 -0500 Subject: [PATCH 011/270] go/analysis/internal/checker: be silent with -fix Just apply the fixes without listing the diagnostics. Also: be verbose with -v. The -v flag exists for historical reasons to do with the vet CLI, but it had no effect. This change makes it effectively an alias for -debug=v, which no-one can remember how to spell. Also, list the number of fixes and fixes updated when -fix and -v. Change-Id: Ic2342ad4868b6c8649077bf13c48e8727dbeba37 Reviewed-on: https://go-review.googlesource.com/c/tools/+/647698 LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan Reviewed-by: Robert Findley --- go/analysis/internal/checker/checker.go | 65 ++++++++++++++----- go/analysis/internal/checker/checker_test.go | 32 +++++---- go/analysis/internal/checker/fix_test.go | 4 +- go/analysis/internal/checker/start_test.go | 1 + .../internal/checker/testdata/diff.txt | 3 +- .../internal/checker/testdata/fixes.txt | 6 +- .../internal/checker/testdata/importdup.txt | 5 +- .../internal/checker/testdata/importdup2.txt | 5 +- .../internal/checker/testdata/noend.txt | 3 +- .../internal/checker/testdata/overlap.txt | 7 +- 10 files changed, 85 insertions(+), 46 deletions(-) diff --git a/go/analysis/internal/checker/checker.go b/go/analysis/internal/checker/checker.go index fb3c47b1625..2a9ff2931b3 100644 --- a/go/analysis/internal/checker/checker.go +++ b/go/analysis/internal/checker/checker.go @@ -86,7 +86,36 @@ func RegisterFlags() { // It provides most of the logic for the main functions of both the // singlechecker and the multi-analysis commands. // It returns the appropriate exit code. -func Run(args []string, analyzers []*analysis.Analyzer) int { +// +// TODO(adonovan): tests should not call this function directly; +// fiddling with global variables (flags) is error-prone and hostile +// to parallelism. Instead, use unit tests of the actual units (e.g. +// checker.Analyze) and integration tests (e.g. TestScript) of whole +// executables. +func Run(args []string, analyzers []*analysis.Analyzer) (exitcode int) { + // Instead of returning a code directly, + // call this function to monotonically increase the exit code. + // This allows us to keep going in the face of some errors + // without having to remember what code to return. + // + // TODO(adonovan): interpreting exit codes is like reading tea-leaves. + // Insted of wasting effort trying to encode a multidimensional result + // into 7 bits we should just emit structured JSON output, and + // an exit code of 0 or 1 for success or failure. + exitAtLeast := func(code int) { + exitcode = max(code, exitcode) + } + + // When analysisflags is linked in (for {single,multi}checker), + // then the -v flag is registered for complex legacy reasons + // related to cmd/vet CLI. + // Treat it as an undocumented alias for -debug=v. + if v := flag.CommandLine.Lookup("v"); v != nil && + v.Value.(flag.Getter).Get() == true && + !strings.Contains(Debug, "v") { + Debug += "v" + } + if CPUProfile != "" { f, err := os.Create(CPUProfile) if err != nil { @@ -142,17 +171,14 @@ func Run(args []string, analyzers []*analysis.Analyzer) int { initial, err := load(args, allSyntax) if err != nil { log.Print(err) - return 1 + exitAtLeast(1) + return } - // TODO(adonovan): simplify exit code logic by using a single - // exit code variable and applying "code = max(code, X)" each - // time an error of code X occurs. - pkgsExitCode := 0 // Print package and module errors regardless of RunDespiteErrors. // Do not exit if there are errors, yet. if n := packages.PrintErrors(initial); n > 0 { - pkgsExitCode = 1 + exitAtLeast(1) } var factLog io.Writer @@ -172,7 +198,8 @@ func Run(args []string, analyzers []*analysis.Analyzer) int { graph, err := checker.Analyze(analyzers, initial, opts) if err != nil { log.Print(err) - return 1 + exitAtLeast(1) + return } // Don't print the diagnostics, @@ -181,22 +208,22 @@ func Run(args []string, analyzers []*analysis.Analyzer) int { if err := applyFixes(graph.Roots, Diff); err != nil { // Fail when applying fixes failed. log.Print(err) - return 1 + exitAtLeast(1) + return } - // TODO(adonovan): don't proceed to print the text or JSON output - // if we applied fixes; stop here. - // - // return pkgsExitCode + // Don't proceed to print text/JSON, + // and don't report an error + // just because there were diagnostics. + return } // Print the results. If !RunDespiteErrors and there // are errors in the packages, this will have 0 exit // code. Otherwise, we prefer to return exit code // indicating diagnostics. - if diagExitCode := printDiagnostics(graph); diagExitCode != 0 { - return diagExitCode // there were diagnostics - } - return pkgsExitCode // package errors but no diagnostics + exitAtLeast(printDiagnostics(graph)) + + return } // printDiagnostics prints diagnostics in text or JSON form @@ -541,6 +568,10 @@ fixloop: } } + if dbg('v') { + log.Printf("applied %d fixes, updated %d files", len(fixes), filesUpdated) + } + return nil } diff --git a/go/analysis/internal/checker/checker_test.go b/go/analysis/internal/checker/checker_test.go index fcf5f66e03e..9ec6e61cd73 100644 --- a/go/analysis/internal/checker/checker_test.go +++ b/go/analysis/internal/checker/checker_test.go @@ -49,8 +49,10 @@ func Foo() { t.Fatal(err) } path := filepath.Join(testdata, "src/rename/test.go") + checker.Fix = true checker.Run([]string{"file=" + path}, []*analysis.Analyzer{renameAnalyzer}) + checker.Fix = false contents, err := os.ReadFile(path) if err != nil { @@ -138,31 +140,33 @@ func NewT1() *T1 { return &T1{T} } // package from source. For the rest, it asks 'go list' for export data, // which fails because the compiler encounters the type error. Since the // errors come from 'go list', the driver doesn't run the analyzer. - {name: "despite-error", pattern: []string{rderrFile}, analyzers: []*analysis.Analyzer{noop}, code: 1}, + {name: "despite-error", pattern: []string{rderrFile}, analyzers: []*analysis.Analyzer{noop}, code: exitCodeFailed}, // The noopfact analyzer does use facts, so the driver loads source for // all dependencies, does type checking itself, recognizes the error as a // type error, and runs the analyzer. - {name: "despite-error-fact", pattern: []string{rderrFile}, analyzers: []*analysis.Analyzer{noopWithFact}, code: 1}, + {name: "despite-error-fact", pattern: []string{rderrFile}, analyzers: []*analysis.Analyzer{noopWithFact}, code: exitCodeFailed}, // combination of parse/type errors and no errors - {name: "despite-error-and-no-error", pattern: []string{rderrFile, "sort"}, analyzers: []*analysis.Analyzer{renameAnalyzer, noop}, code: 1}, + {name: "despite-error-and-no-error", pattern: []string{rderrFile, "sort"}, analyzers: []*analysis.Analyzer{renameAnalyzer, noop}, code: exitCodeFailed}, // non-existing package error - {name: "no-package", pattern: []string{"xyz"}, analyzers: []*analysis.Analyzer{renameAnalyzer}, code: 1}, - {name: "no-package-despite-error", pattern: []string{"abc"}, analyzers: []*analysis.Analyzer{noop}, code: 1}, - {name: "no-multi-package-despite-error", pattern: []string{"xyz", "abc"}, analyzers: []*analysis.Analyzer{noop}, code: 1}, + {name: "no-package", pattern: []string{"xyz"}, analyzers: []*analysis.Analyzer{renameAnalyzer}, code: exitCodeFailed}, + {name: "no-package-despite-error", pattern: []string{"abc"}, analyzers: []*analysis.Analyzer{noop}, code: exitCodeFailed}, + {name: "no-multi-package-despite-error", pattern: []string{"xyz", "abc"}, analyzers: []*analysis.Analyzer{noop}, code: exitCodeFailed}, // combination of type/parsing and different errors - {name: "different-errors", pattern: []string{rderrFile, "xyz"}, analyzers: []*analysis.Analyzer{renameAnalyzer, noop}, code: 1}, + {name: "different-errors", pattern: []string{rderrFile, "xyz"}, analyzers: []*analysis.Analyzer{renameAnalyzer, noop}, code: exitCodeFailed}, // non existing dir error - {name: "no-match-dir", pattern: []string{"file=non/existing/dir"}, analyzers: []*analysis.Analyzer{renameAnalyzer, noop}, code: 1}, + {name: "no-match-dir", pattern: []string{"file=non/existing/dir"}, analyzers: []*analysis.Analyzer{renameAnalyzer, noop}, code: exitCodeFailed}, // no errors - {name: "no-errors", pattern: []string{"sort"}, analyzers: []*analysis.Analyzer{renameAnalyzer, noop}, code: 0}, + {name: "no-errors", pattern: []string{"sort"}, analyzers: []*analysis.Analyzer{renameAnalyzer, noop}, code: exitCodeSuccess}, // duplicate list error with no findings - {name: "list-error", pattern: []string{cperrFile}, analyzers: []*analysis.Analyzer{noop}, code: 1}, + {name: "list-error", pattern: []string{cperrFile}, analyzers: []*analysis.Analyzer{noop}, code: exitCodeFailed}, // duplicate list errors with findings (issue #67790) - {name: "list-error-findings", pattern: []string{cperrFile}, analyzers: []*analysis.Analyzer{renameAnalyzer}, code: 3}, + {name: "list-error-findings", pattern: []string{cperrFile}, analyzers: []*analysis.Analyzer{renameAnalyzer}, code: exitCodeDiagnostics}, } { - if got := checker.Run(test.pattern, test.analyzers); got != test.code { - t.Errorf("got incorrect exit code %d for test %s; want %d", got, test.name, test.code) - } + t.Run(test.name, func(t *testing.T) { + if got := checker.Run(test.pattern, test.analyzers); got != test.code { + t.Errorf("got incorrect exit code %d for test %s; want %d", got, test.name, test.code) + } + }) } } diff --git a/go/analysis/internal/checker/fix_test.go b/go/analysis/internal/checker/fix_test.go index 8f4e7a3f6a9..68d965d08d6 100644 --- a/go/analysis/internal/checker/fix_test.go +++ b/go/analysis/internal/checker/fix_test.go @@ -52,9 +52,9 @@ func TestMain(m *testing.M) { } const ( - exitCodeSuccess = 0 // success (no diagnostics) + exitCodeSuccess = 0 // success (no diagnostics, or successful -fix) exitCodeFailed = 1 // analysis failed to run - exitCodeDiagnostics = 3 // diagnostics were reported + exitCodeDiagnostics = 3 // diagnostics were reported (and no -fix) ) // TestReportInvalidDiagnostic tests that a call to pass.Report with diff --git a/go/analysis/internal/checker/start_test.go b/go/analysis/internal/checker/start_test.go index 618ccd09b93..c78829a5adf 100644 --- a/go/analysis/internal/checker/start_test.go +++ b/go/analysis/internal/checker/start_test.go @@ -40,6 +40,7 @@ package comment path := filepath.Join(testdata, "src/comment/doc.go") checker.Fix = true checker.Run([]string{"file=" + path}, []*analysis.Analyzer{commentAnalyzer}) + checker.Fix = false contents, err := os.ReadFile(path) if err != nil { diff --git a/go/analysis/internal/checker/testdata/diff.txt b/go/analysis/internal/checker/testdata/diff.txt index 5a0c9c2a3b2..f11f01ad1e4 100644 --- a/go/analysis/internal/checker/testdata/diff.txt +++ b/go/analysis/internal/checker/testdata/diff.txt @@ -8,8 +8,7 @@ skip GOOS=windows checker -rename -fix -diff example.com/p -exit 3 -stderr renaming "bar" to "baz" +exit 0 -- go.mod -- module example.com diff --git a/go/analysis/internal/checker/testdata/fixes.txt b/go/analysis/internal/checker/testdata/fixes.txt index 89f245f9ace..4d906ca3f54 100644 --- a/go/analysis/internal/checker/testdata/fixes.txt +++ b/go/analysis/internal/checker/testdata/fixes.txt @@ -2,9 +2,9 @@ # particular when processing duplicate fixes for overlapping packages # in the same directory ("p", "p [p.test]", "p_test [p.test]"). -checker -rename -fix example.com/p -exit 3 -stderr renaming "bar" to "baz" +checker -rename -fix -v example.com/p +stderr applied 8 fixes, updated 3 files +exit 0 -- go.mod -- module example.com diff --git a/go/analysis/internal/checker/testdata/importdup.txt b/go/analysis/internal/checker/testdata/importdup.txt index e1783777858..4c144a61221 100644 --- a/go/analysis/internal/checker/testdata/importdup.txt +++ b/go/analysis/internal/checker/testdata/importdup.txt @@ -1,8 +1,9 @@ # Test that duplicate imports--and, more generally, duplicate # identical insertions--are coalesced. -checker -marker -fix example.com/a -exit 3 +checker -marker -fix -v example.com/a +stderr applied 2 fixes, updated 1 files +exit 0 -- go.mod -- module example.com diff --git a/go/analysis/internal/checker/testdata/importdup2.txt b/go/analysis/internal/checker/testdata/importdup2.txt index 118fdc0184b..c2da0f33195 100644 --- a/go/analysis/internal/checker/testdata/importdup2.txt +++ b/go/analysis/internal/checker/testdata/importdup2.txt @@ -19,8 +19,9 @@ # In more complex examples, the result # may be more subtly order-dependent. -checker -marker -fix example.com/a example.com/b -exit 3 +checker -marker -fix -v example.com/a example.com/b +stderr applied 6 fixes, updated 2 files +exit 0 -- go.mod -- module example.com diff --git a/go/analysis/internal/checker/testdata/noend.txt b/go/analysis/internal/checker/testdata/noend.txt index 2d6be074565..5ebc5e011ba 100644 --- a/go/analysis/internal/checker/testdata/noend.txt +++ b/go/analysis/internal/checker/testdata/noend.txt @@ -2,8 +2,7 @@ # interpreted as if equal to SuggestedFix.Pos (see issue #64199). checker -noend -fix example.com/a -exit 3 -stderr say hello +exit 0 -- go.mod -- module example.com diff --git a/go/analysis/internal/checker/testdata/overlap.txt b/go/analysis/internal/checker/testdata/overlap.txt index f556ef308b9..581f2e18950 100644 --- a/go/analysis/internal/checker/testdata/overlap.txt +++ b/go/analysis/internal/checker/testdata/overlap.txt @@ -15,9 +15,12 @@ # (This is a pretty unlikely situation, but it corresponds # to a historical test, TestOther, that used to check for # a conflict, and it seemed wrong to delete it without explanation.) +# +# The fixes are silently and successfully applied. -checker -rename -marker -fix example.com/a -exit 3 +checker -rename -marker -fix -v example.com/a +stderr applied 2 fixes, updated 1 file +exit 0 -- go.mod -- module example.com From f9aad7054b5ff7461d687469b3329b583093e72e Mon Sep 17 00:00:00 2001 From: Dmitri Shuralyov Date: Wed, 12 Feb 2025 10:43:03 -0500 Subject: [PATCH 012/270] go/types/typeutil: avoid shifting uintptr by 32 on 32-bit archs Shifting by 32 on an uintptr causes vet's check for shifts that equal or exceed the width of the integer to trigger on 32-bit architectures. For example, on 386: $ GOARCH=386 GOOS=linux go vet golang.org/x/tools/go/types/typeutil # golang.org/x/tools/go/types/typeutil go/types/typeutil/map.go:393:24: hash (32 bits) too small for shift of 32 Because this package is vendored into the main Go tree, and go test has a special case to turn on all vet checks there, the finding is promoted to an error (even if the code is otherwise harmless). Fix it. For golang/go#69407. For golang/go#36905. Change-Id: Ib8bf981bbe338db4ba8e9b7add0b373acae7338d Cq-Include-Trybots: luci.golang.try:x_tools-gotip-linux-386-longtest,x_tools-gotip-linux-amd64-longtest Reviewed-on: https://go-review.googlesource.com/c/tools/+/648895 LUCI-TryBot-Result: Go LUCI Reviewed-by: Dmitri Shuralyov Auto-Submit: Dmitri Shuralyov Reviewed-by: Alan Donovan --- go/types/typeutil/map.go | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/go/types/typeutil/map.go b/go/types/typeutil/map.go index 43261147c05..b6d542c64ee 100644 --- a/go/types/typeutil/map.go +++ b/go/types/typeutil/map.go @@ -389,8 +389,13 @@ func (hasher) hashTypeName(tname *types.TypeName) uint32 { // path, and whether or not it is a package-level typename. It // is rare for a package to define multiple local types with // the same name.) - hash := uintptr(unsafe.Pointer(tname)) - return uint32(hash ^ (hash >> 32)) + ptr := uintptr(unsafe.Pointer(tname)) + if unsafe.Sizeof(ptr) == 8 { + hash := uint64(ptr) + return uint32(hash ^ (hash >> 32)) + } else { + return uint32(ptr) + } } // shallowHash computes a hash of t without looking at any of its From 57629448da517f7c9b04e039d70bf2aa06d02ee6 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Tue, 11 Feb 2025 18:49:54 -0500 Subject: [PATCH 013/270] gopls/internal/analysis/gofix: check package visibility If the RHS of an inlinable constant is in a package that is not visible from the current package, do not report that it can be inlined. For golang/go#32816. Change-Id: Iff9e18f844aa898beb9f1f8df01142057b341c39 Reviewed-on: https://go-review.googlesource.com/c/tools/+/648581 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- gopls/internal/analysis/gofix/gofix.go | 3 +++ gopls/internal/analysis/gofix/testdata/src/a/a.go | 5 +++++ gopls/internal/analysis/gofix/testdata/src/a/a.go.golden | 5 +++++ gopls/internal/analysis/gofix/testdata/src/a/internal/d.go | 5 +++++ gopls/internal/analysis/gofix/testdata/src/b/b.go | 2 ++ gopls/internal/analysis/gofix/testdata/src/b/b.go.golden | 2 ++ 6 files changed, 22 insertions(+) create mode 100644 gopls/internal/analysis/gofix/testdata/src/a/internal/d.go diff --git a/gopls/internal/analysis/gofix/gofix.go b/gopls/internal/analysis/gofix/gofix.go index 101924366d6..8460286bbe3 100644 --- a/gopls/internal/analysis/gofix/gofix.go +++ b/gopls/internal/analysis/gofix/gofix.go @@ -261,6 +261,9 @@ func run(pass *analysis.Pass) (any, error) { // "B" means something different here than at the inlinable const's scope. continue } + } else if !analysisinternal.CanImport(pass.Pkg.Path(), fcon.RHSPkgPath) { + // If this package can't see the RHS's package, we can't inline. + continue } var ( importPrefix string diff --git a/gopls/internal/analysis/gofix/testdata/src/a/a.go b/gopls/internal/analysis/gofix/testdata/src/a/a.go index ae486746e5b..4f41b9a8c5d 100644 --- a/gopls/internal/analysis/gofix/testdata/src/a/a.go +++ b/gopls/internal/analysis/gofix/testdata/src/a/a.go @@ -1,5 +1,7 @@ package a +import "a/internal" + // Functions. func f() { @@ -75,6 +77,9 @@ const ( in8 = x ) +//go:fix inline +const D = internal.D // want D: `goFixInline const "a/internal".D` + func shadow() { var x int // shadows x at package scope diff --git a/gopls/internal/analysis/gofix/testdata/src/a/a.go.golden b/gopls/internal/analysis/gofix/testdata/src/a/a.go.golden index 7d75a598fb7..9e9cc25996f 100644 --- a/gopls/internal/analysis/gofix/testdata/src/a/a.go.golden +++ b/gopls/internal/analysis/gofix/testdata/src/a/a.go.golden @@ -1,5 +1,7 @@ package a +import "a/internal" + // Functions. func f() { @@ -75,6 +77,9 @@ const ( in8 = x ) +//go:fix inline +const D = internal.D // want D: `goFixInline const "a/internal".D` + func shadow() { var x int // shadows x at package scope diff --git a/gopls/internal/analysis/gofix/testdata/src/a/internal/d.go b/gopls/internal/analysis/gofix/testdata/src/a/internal/d.go new file mode 100644 index 00000000000..3211d7ae3cc --- /dev/null +++ b/gopls/internal/analysis/gofix/testdata/src/a/internal/d.go @@ -0,0 +1,5 @@ +// According to the go toolchain's rule about internal packages, +// this package is visible to package a, but not package b. +package internal + +const D = 1 diff --git a/gopls/internal/analysis/gofix/testdata/src/b/b.go b/gopls/internal/analysis/gofix/testdata/src/b/b.go index 4bf9f0dc650..74876738bea 100644 --- a/gopls/internal/analysis/gofix/testdata/src/b/b.go +++ b/gopls/internal/analysis/gofix/testdata/src/b/b.go @@ -28,3 +28,5 @@ func g() { _ = a _ = x } + +const d = a.D // nope: a.D refers to a constant in a package that is not visible here. diff --git a/gopls/internal/analysis/gofix/testdata/src/b/b.go.golden b/gopls/internal/analysis/gofix/testdata/src/b/b.go.golden index b26a05c3046..b3608d6793e 100644 --- a/gopls/internal/analysis/gofix/testdata/src/b/b.go.golden +++ b/gopls/internal/analysis/gofix/testdata/src/b/b.go.golden @@ -32,3 +32,5 @@ func g() { _ = a _ = x } + +const d = a.D // nope: a.D refers to a constant in a package that is not visible here. From 86f13a91fb506bb6aee3a8a398f8a639c5212425 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Tue, 11 Feb 2025 18:58:26 -0500 Subject: [PATCH 014/270] gopls/internal/analysis/gofix: rename local Trivial: rename a local from fcon (forwardable const) to incon (inlinable const) to match terminology. For golang/go#32816. Change-Id: I7d61f055c7057c30b240c076b8710f47f2bf86d1 Reviewed-on: https://go-review.googlesource.com/c/tools/+/648715 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- gopls/internal/analysis/gofix/gofix.go | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/gopls/internal/analysis/gofix/gofix.go b/gopls/internal/analysis/gofix/gofix.go index 8460286bbe3..8ec31bd4736 100644 --- a/gopls/internal/analysis/gofix/gofix.go +++ b/gopls/internal/analysis/gofix/gofix.go @@ -220,15 +220,15 @@ func run(pass *analysis.Pass) (any, error) { case *ast.Ident: // If the identifier is a use of an inlinable constant, suggest inlining it. if con, ok := pass.TypesInfo.Uses[n].(*types.Const); ok { - fcon, ok := inlinableConsts[con] + incon, ok := inlinableConsts[con] if !ok { var fact goFixInlineConstFact if pass.ImportObjectFact(con, &fact) { - fcon = &fact - inlinableConsts[con] = fcon + incon = &fact + inlinableConsts[con] = incon } } - if fcon == nil { + if incon == nil { continue // nope } @@ -248,20 +248,20 @@ func run(pass *analysis.Pass) (any, error) { // If the RHS is not in the current package, AddImport will handle // shadowing, so we only need to worry about when both expressions // are in the current package. - if pass.Pkg.Path() == fcon.RHSPkgPath { + if pass.Pkg.Path() == incon.RHSPkgPath { // fcon.rhsObj is the object referred to by B in the definition of A. scope := pass.TypesInfo.Scopes[curFile].Innermost(n.Pos()) // n's scope - _, obj := scope.LookupParent(fcon.RHSName, n.Pos()) // what "B" means in n's scope + _, obj := scope.LookupParent(incon.RHSName, n.Pos()) // what "B" means in n's scope if obj == nil { // Should be impossible: if code at n can refer to the LHS, // it can refer to the RHS. - panic(fmt.Sprintf("no object for inlinable const %s RHS %s", n.Name, fcon.RHSName)) + panic(fmt.Sprintf("no object for inlinable const %s RHS %s", n.Name, incon.RHSName)) } - if obj != fcon.rhsObj { + if obj != incon.rhsObj { // "B" means something different here than at the inlinable const's scope. continue } - } else if !analysisinternal.CanImport(pass.Pkg.Path(), fcon.RHSPkgPath) { + } else if !analysisinternal.CanImport(pass.Pkg.Path(), incon.RHSPkgPath) { // If this package can't see the RHS's package, we can't inline. continue } @@ -269,9 +269,9 @@ func run(pass *analysis.Pass) (any, error) { importPrefix string edits []analysis.TextEdit ) - if fcon.RHSPkgPath != pass.Pkg.Path() { + if incon.RHSPkgPath != pass.Pkg.Path() { _, importPrefix, edits = analysisinternal.AddImport( - pass.TypesInfo, curFile, fcon.RHSPkgName, fcon.RHSPkgPath, fcon.RHSName, n.Pos()) + pass.TypesInfo, curFile, incon.RHSPkgName, incon.RHSPkgPath, incon.RHSName, n.Pos()) } var ( pos = n.Pos() @@ -287,7 +287,7 @@ func run(pass *analysis.Pass) (any, error) { edits = append(edits, analysis.TextEdit{ Pos: pos, End: end, - NewText: []byte(importPrefix + fcon.RHSName), + NewText: []byte(importPrefix + incon.RHSName), }) pass.Report(analysis.Diagnostic{ Pos: pos, From 2f1b076c4ab654a507fef278b9a1d4a6bae56f04 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Wed, 12 Feb 2025 17:24:58 -0500 Subject: [PATCH 015/270] x/tools: add //go:fix inline This CL adds //go:fix inline annotations to some deprecated functions that may be inlined. Updates golang/go#32816 Change-Id: I2e8e82bee054721f266506af24ea39cf2e8b7983 Reviewed-on: https://go-review.googlesource.com/c/tools/+/649056 LUCI-TryBot-Result: Go LUCI Commit-Queue: Alan Donovan Auto-Submit: Alan Donovan Reviewed-by: Jonathan Amsterdam --- go/ast/astutil/util.go | 2 ++ go/packages/packages.go | 2 ++ 2 files changed, 4 insertions(+) diff --git a/go/ast/astutil/util.go b/go/ast/astutil/util.go index ca71e3e1055..c820b208499 100644 --- a/go/ast/astutil/util.go +++ b/go/ast/astutil/util.go @@ -8,4 +8,6 @@ import "go/ast" // Unparen returns e with any enclosing parentheses stripped. // Deprecated: use [ast.Unparen]. +// +//go:fix inline func Unparen(e ast.Expr) ast.Expr { return ast.Unparen(e) } diff --git a/go/packages/packages.go b/go/packages/packages.go index c3a59b8ebf4..342f019a0f9 100644 --- a/go/packages/packages.go +++ b/go/packages/packages.go @@ -141,6 +141,8 @@ const ( LoadAllSyntax = LoadSyntax | NeedDeps // Deprecated: NeedExportsFile is a historical misspelling of NeedExportFile. + // + //go:fix inline NeedExportsFile = NeedExportFile ) From d0d86e40a80dcab58f5cd2fa5f81e650d0777817 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Wed, 12 Feb 2025 17:26:02 -0500 Subject: [PATCH 016/270] x/tools: run gopls/internal/analysis/gofix/main.go -fix This inlines calls to a number of deprecated functions in both std and x/tools. Updates golang/go#32816 Change-Id: Id7f89983b1428fd3c042947dbecf07349f0bc134 Reviewed-on: https://go-review.googlesource.com/c/tools/+/649057 LUCI-TryBot-Result: Go LUCI Commit-Queue: Alan Donovan Auto-Submit: Alan Donovan Reviewed-by: Jonathan Amsterdam --- go/analysis/checker/checker.go | 2 +- go/analysis/passes/copylock/copylock.go | 2 +- go/analysis/passes/unusedresult/unusedresult.go | 4 +--- go/analysis/validate.go | 2 +- go/ast/astutil/rewrite.go | 2 +- go/callgraph/vta/propagation_test.go | 2 +- go/internal/gccgoimporter/parser.go | 4 ++-- go/ssa/interp/reflect.go | 4 ++-- go/ssa/util.go | 2 +- go/ssa/wrappers.go | 6 ++---- go/types/objectpath/objectpath_test.go | 2 +- internal/astutil/clone.go | 2 +- internal/gcimporter/ureader_yes.go | 2 +- internal/refactor/inline/inline.go | 2 +- internal/refactor/inline/inline_test.go | 2 +- internal/tool/tool.go | 4 ++-- refactor/eg/match.go | 4 +--- refactor/eg/rewrite.go | 2 +- refactor/rename/util.go | 4 +--- 19 files changed, 23 insertions(+), 31 deletions(-) diff --git a/go/analysis/checker/checker.go b/go/analysis/checker/checker.go index 502ec922179..94808733b9d 100644 --- a/go/analysis/checker/checker.go +++ b/go/analysis/checker/checker.go @@ -594,7 +594,7 @@ func (act *Action) exportPackageFact(fact analysis.Fact) { func factType(fact analysis.Fact) reflect.Type { t := reflect.TypeOf(fact) - if t.Kind() != reflect.Ptr { + if t.Kind() != reflect.Pointer { log.Fatalf("invalid Fact type: got %T, want pointer", fact) } return t diff --git a/go/analysis/passes/copylock/copylock.go b/go/analysis/passes/copylock/copylock.go index a9f02ac62e6..8a215677165 100644 --- a/go/analysis/passes/copylock/copylock.go +++ b/go/analysis/passes/copylock/copylock.go @@ -378,7 +378,7 @@ var lockerType *types.Interface // Construct a sync.Locker interface type. func init() { - nullary := types.NewSignature(nil, nil, nil, false) // func() + nullary := types.NewSignatureType(nil, nil, nil, nil, nil, false) // func() methods := []*types.Func{ types.NewFunc(token.NoPos, nil, "Lock", nullary), types.NewFunc(token.NoPos, nil, "Unlock", nullary), diff --git a/go/analysis/passes/unusedresult/unusedresult.go b/go/analysis/passes/unusedresult/unusedresult.go index d7cc1e6ae2c..e298f644277 100644 --- a/go/analysis/passes/unusedresult/unusedresult.go +++ b/go/analysis/passes/unusedresult/unusedresult.go @@ -130,9 +130,7 @@ func run(pass *analysis.Pass) (interface{}, error) { } // func() string -var sigNoArgsStringResult = types.NewSignature(nil, nil, - types.NewTuple(types.NewParam(token.NoPos, nil, "", types.Typ[types.String])), - false) +var sigNoArgsStringResult = types.NewSignatureType(nil, nil, nil, nil, types.NewTuple(types.NewParam(token.NoPos, nil, "", types.Typ[types.String])), false) type stringSetFlag map[string]bool diff --git a/go/analysis/validate.go b/go/analysis/validate.go index 4f2c4045622..14539392116 100644 --- a/go/analysis/validate.go +++ b/go/analysis/validate.go @@ -63,7 +63,7 @@ func Validate(analyzers []*Analyzer) error { return fmt.Errorf("fact type %s registered by two analyzers: %v, %v", t, a, prev) } - if t.Kind() != reflect.Ptr { + if t.Kind() != reflect.Pointer { return fmt.Errorf("%s: fact type %s is not a pointer", a, t) } factTypes[t] = a diff --git a/go/ast/astutil/rewrite.go b/go/ast/astutil/rewrite.go index 58934f76633..5c8dbbb7a35 100644 --- a/go/ast/astutil/rewrite.go +++ b/go/ast/astutil/rewrite.go @@ -183,7 +183,7 @@ type application struct { func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.Node) { // convert typed nil into untyped nil - if v := reflect.ValueOf(n); v.Kind() == reflect.Ptr && v.IsNil() { + if v := reflect.ValueOf(n); v.Kind() == reflect.Pointer && v.IsNil() { n = nil } diff --git a/go/callgraph/vta/propagation_test.go b/go/callgraph/vta/propagation_test.go index 492258f81e3..3885ef201cb 100644 --- a/go/callgraph/vta/propagation_test.go +++ b/go/callgraph/vta/propagation_test.go @@ -203,7 +203,7 @@ func testSuite() map[string]*vtaGraph { a := newNamedType("A") b := newNamedType("B") c := newNamedType("C") - sig := types.NewSignature(nil, types.NewTuple(), types.NewTuple(), false) + sig := types.NewSignatureType(nil, nil, nil, types.NewTuple(), types.NewTuple(), false) f1 := &ssa.Function{Signature: sig} setName(f1, "F1") diff --git a/go/internal/gccgoimporter/parser.go b/go/internal/gccgoimporter/parser.go index f315ec41004..f70946edbe4 100644 --- a/go/internal/gccgoimporter/parser.go +++ b/go/internal/gccgoimporter/parser.go @@ -619,7 +619,7 @@ func (p *parser) parseNamedType(nlist []interface{}) types.Type { p.skipInlineBody() p.expectEOL() - sig := types.NewSignature(receiver, params, results, isVariadic) + sig := types.NewSignatureType(receiver, nil, nil, params, results, isVariadic) nt.AddMethod(types.NewFunc(token.NoPos, pkg, name, sig)) } } @@ -800,7 +800,7 @@ func (p *parser) parseFunctionType(pkg *types.Package, nlist []interface{}) *typ params, isVariadic := p.parseParamList(pkg) results := p.parseResultList(pkg) - *t = *types.NewSignature(nil, params, results, isVariadic) + *t = *types.NewSignatureType(nil, nil, nil, params, results, isVariadic) return t } diff --git a/go/ssa/interp/reflect.go b/go/ssa/interp/reflect.go index 8259e56d860..22f8cde89c0 100644 --- a/go/ssa/interp/reflect.go +++ b/go/ssa/interp/reflect.go @@ -231,7 +231,7 @@ func reflectKind(t types.Type) reflect.Kind { case *types.Map: return reflect.Map case *types.Pointer: - return reflect.Ptr + return reflect.Pointer case *types.Slice: return reflect.Slice case *types.Struct: @@ -510,7 +510,7 @@ func newMethod(pkg *ssa.Package, recvType types.Type, name string) *ssa.Function // that is needed is the "pointerness" of Recv.Type, and for // now, we'll set it to always be false since we're only // concerned with rtype. Encapsulate this better. - sig := types.NewSignature(types.NewParam(token.NoPos, nil, "recv", recvType), nil, nil, false) + sig := types.NewSignatureType(types.NewParam(token.NoPos, nil, "recv", recvType), nil, nil, nil, nil, false) fn := pkg.Prog.NewFunction(name, sig, "fake reflect method") fn.Pkg = pkg return fn diff --git a/go/ssa/util.go b/go/ssa/util.go index 4a056cbe0bd..56638129602 100644 --- a/go/ssa/util.go +++ b/go/ssa/util.go @@ -195,7 +195,7 @@ func makeLen(T types.Type) *Builtin { lenParams := types.NewTuple(anonVar(T)) return &Builtin{ name: "len", - sig: types.NewSignature(nil, lenParams, lenResults, false), + sig: types.NewSignatureType(nil, nil, nil, lenParams, lenResults, false), } } diff --git a/go/ssa/wrappers.go b/go/ssa/wrappers.go index d09b4f250ee..aeb160eff23 100644 --- a/go/ssa/wrappers.go +++ b/go/ssa/wrappers.go @@ -106,9 +106,7 @@ func (b *builder) buildWrapper(fn *Function) { var c Call c.Call.Value = &Builtin{ name: "ssa:wrapnilchk", - sig: types.NewSignature(nil, - types.NewTuple(anonVar(fn.method.recv), anonVar(tString), anonVar(tString)), - types.NewTuple(anonVar(fn.method.recv)), false), + sig: types.NewSignatureType(nil, nil, nil, types.NewTuple(anonVar(fn.method.recv), anonVar(tString), anonVar(tString)), types.NewTuple(anonVar(fn.method.recv)), false), } c.Call.Args = []Value{ v, @@ -262,7 +260,7 @@ func createThunk(prog *Program, sel *selection) *Function { } func changeRecv(s *types.Signature, recv *types.Var) *types.Signature { - return types.NewSignature(recv, s.Params(), s.Results(), s.Variadic()) + return types.NewSignatureType(recv, nil, nil, s.Params(), s.Results(), s.Variadic()) } // A local version of *types.Selection. diff --git a/go/types/objectpath/objectpath_test.go b/go/types/objectpath/objectpath_test.go index 0805c9d919a..642d6da4926 100644 --- a/go/types/objectpath/objectpath_test.go +++ b/go/types/objectpath/objectpath_test.go @@ -308,7 +308,7 @@ func (unreachable) F() {} // not reachable in export data if err != nil { t.Fatal(err) } - conf := types.Config{Importer: importer.For("source", nil)} + conf := types.Config{Importer: importer.ForCompiler(token.NewFileSet(), "source", nil)} info := &types.Info{ Defs: make(map[*ast.Ident]types.Object), } diff --git a/internal/astutil/clone.go b/internal/astutil/clone.go index d5ee82c58b2..2c9b6bb4841 100644 --- a/internal/astutil/clone.go +++ b/internal/astutil/clone.go @@ -25,7 +25,7 @@ func cloneNode(n ast.Node) ast.Node { } clone = func(x reflect.Value) reflect.Value { switch x.Kind() { - case reflect.Ptr: + case reflect.Pointer: if x.IsNil() { return x } diff --git a/internal/gcimporter/ureader_yes.go b/internal/gcimporter/ureader_yes.go index 522287d18d6..37b4a39e9e1 100644 --- a/internal/gcimporter/ureader_yes.go +++ b/internal/gcimporter/ureader_yes.go @@ -574,7 +574,7 @@ func (pr *pkgReader) objIdx(idx pkgbits.Index) (*types.Package, string) { recv := types.NewVar(fn.Pos(), fn.Pkg(), "", named) typesinternal.SetVarKind(recv, typesinternal.RecvVar) - methods[i] = types.NewFunc(fn.Pos(), fn.Pkg(), fn.Name(), types.NewSignature(recv, sig.Params(), sig.Results(), sig.Variadic())) + methods[i] = types.NewFunc(fn.Pos(), fn.Pkg(), fn.Name(), types.NewSignatureType(recv, nil, nil, sig.Params(), sig.Results(), sig.Variadic())) } embeds := make([]types.Type, iface.NumEmbeddeds()) diff --git a/internal/refactor/inline/inline.go b/internal/refactor/inline/inline.go index 96fbb8f8706..54308243e1c 100644 --- a/internal/refactor/inline/inline.go +++ b/internal/refactor/inline/inline.go @@ -2981,7 +2981,7 @@ func replaceNode(root ast.Node, from, to ast.Node) { var visit func(reflect.Value) visit = func(v reflect.Value) { switch v.Kind() { - case reflect.Ptr: + case reflect.Pointer: if v.Interface() == from { found = true diff --git a/internal/refactor/inline/inline_test.go b/internal/refactor/inline/inline_test.go index 03fb5ccdb17..3be37d5ecde 100644 --- a/internal/refactor/inline/inline_test.go +++ b/internal/refactor/inline/inline_test.go @@ -1977,7 +1977,7 @@ func deepHash(n ast.Node) any { var visit func(reflect.Value) visit = func(v reflect.Value) { switch v.Kind() { - case reflect.Ptr: + case reflect.Pointer: ptr := v.UnsafePointer() writeUint64(uint64(uintptr(ptr))) if !v.IsNil() { diff --git a/internal/tool/tool.go b/internal/tool/tool.go index 46f5b87fa35..fe2b1c289b8 100644 --- a/internal/tool/tool.go +++ b/internal/tool/tool.go @@ -250,7 +250,7 @@ func addFlags(f *flag.FlagSet, field reflect.StructField, value reflect.Value) * child := value.Type().Field(i) v := value.Field(i) // make sure we have a pointer - if v.Kind() != reflect.Ptr { + if v.Kind() != reflect.Pointer { v = v.Addr() } // check if that field is a flag or contains flags @@ -289,7 +289,7 @@ func addFlag(f *flag.FlagSet, value reflect.Value, flagName string, help string) func resolve(v reflect.Value) reflect.Value { for { switch v.Kind() { - case reflect.Interface, reflect.Ptr: + case reflect.Interface, reflect.Pointer: v = v.Elem() default: return v diff --git a/refactor/eg/match.go b/refactor/eg/match.go index 31f8af28f23..0a109210bc4 100644 --- a/refactor/eg/match.go +++ b/refactor/eg/match.go @@ -13,8 +13,6 @@ import ( "log" "os" "reflect" - - "golang.org/x/tools/go/ast/astutil" ) // matchExpr reports whether pattern x matches y. @@ -229,7 +227,7 @@ func (tr *Transformer) matchWildcard(xobj *types.Var, y ast.Expr) bool { // -- utilities -------------------------------------------------------- -func unparen(e ast.Expr) ast.Expr { return astutil.Unparen(e) } +func unparen(e ast.Expr) ast.Expr { return ast.Unparen(e) } // isRef returns the object referred to by this (possibly qualified) // identifier, or nil if the node is not a referring identifier. diff --git a/refactor/eg/rewrite.go b/refactor/eg/rewrite.go index 3f71c53b7bb..6fb1e44ef30 100644 --- a/refactor/eg/rewrite.go +++ b/refactor/eg/rewrite.go @@ -338,7 +338,7 @@ func (tr *Transformer) subst(env map[string]ast.Expr, pattern, pos reflect.Value } return v - case reflect.Ptr: + case reflect.Pointer: v := reflect.New(p.Type()).Elem() if elem := p.Elem(); elem.IsValid() { v.Set(tr.subst(env, elem, pos).Addr()) diff --git a/refactor/rename/util.go b/refactor/rename/util.go index 7c1a634e4ed..a3d998f90e0 100644 --- a/refactor/rename/util.go +++ b/refactor/rename/util.go @@ -14,8 +14,6 @@ import ( "runtime" "strings" "unicode" - - "golang.org/x/tools/go/ast/astutil" ) func objectKind(obj types.Object) string { @@ -93,7 +91,7 @@ func sameFile(x, y string) bool { return false } -func unparen(e ast.Expr) ast.Expr { return astutil.Unparen(e) } +func unparen(e ast.Expr) ast.Expr { return ast.Unparen(e) } func is[T any](x any) bool { _, ok := x.(T) From 44b61a1d174cc06329b20f5de60c2b0c800741a4 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Wed, 12 Feb 2025 17:33:27 -0500 Subject: [PATCH 017/270] x/tools: eliminate various unparen (et al) helpers This was achieved by annotiating them with //go:fix inline, running gopls/internal/analysis/gofix/main.go -fix ./... then deleting them. Update golang/go#32816 Change-Id: If65dbf8bfcad796ef274d80804daa135e8ccabf9 Reviewed-on: https://go-review.googlesource.com/c/tools/+/648976 Reviewed-by: Jonathan Amsterdam LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan Commit-Queue: Alan Donovan --- go/ssa/builder.go | 20 +++++++-------- go/ssa/emit.go | 2 +- go/ssa/source.go | 2 +- go/ssa/util.go | 2 -- gopls/internal/golang/extract.go | 2 +- gopls/internal/golang/freesymbols.go | 2 +- refactor/eg/match.go | 6 ++--- refactor/rename/spec.go | 4 +-- refactor/rename/util.go | 3 --- refactor/satisfy/find.go | 38 ++++++++++++---------------- 10 files changed, 34 insertions(+), 47 deletions(-) diff --git a/go/ssa/builder.go b/go/ssa/builder.go index 4cd71260b61..1761dcc3068 100644 --- a/go/ssa/builder.go +++ b/go/ssa/builder.go @@ -559,7 +559,7 @@ func (sb *storebuf) emit(fn *Function) { // literal that may reference parts of the LHS. func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb *storebuf) { // Can we initialize it in place? - if e, ok := unparen(e).(*ast.CompositeLit); ok { + if e, ok := ast.Unparen(e).(*ast.CompositeLit); ok { // A CompositeLit never evaluates to a pointer, // so if the type of the location is a pointer, // an &-operation is implied. @@ -614,7 +614,7 @@ func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb * // expr lowers a single-result expression e to SSA form, emitting code // to fn and returning the Value defined by the expression. func (b *builder) expr(fn *Function, e ast.Expr) Value { - e = unparen(e) + e = ast.Unparen(e) tv := fn.info.Types[e] @@ -704,7 +704,7 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value { return y } // Call to "intrinsic" built-ins, e.g. new, make, panic. - if id, ok := unparen(e.Fun).(*ast.Ident); ok { + if id, ok := ast.Unparen(e.Fun).(*ast.Ident); ok { if obj, ok := fn.info.Uses[id].(*types.Builtin); ok { if v := b.builtin(fn, obj, e.Args, fn.typ(tv.Type), e.Lparen); v != nil { return v @@ -721,7 +721,7 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value { switch e.Op { case token.AND: // &X --- potentially escaping. addr := b.addr(fn, e.X, true) - if _, ok := unparen(e.X).(*ast.StarExpr); ok { + if _, ok := ast.Unparen(e.X).(*ast.StarExpr); ok { // &*p must panic if p is nil (http://golang.org/s/go12nil). // For simplicity, we'll just (suboptimally) rely // on the side effects of a load. @@ -1002,7 +1002,7 @@ func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) { c.pos = e.Lparen // Is this a method call? - if selector, ok := unparen(e.Fun).(*ast.SelectorExpr); ok { + if selector, ok := ast.Unparen(e.Fun).(*ast.SelectorExpr); ok { sel := fn.selection(selector) if sel != nil && sel.kind == types.MethodVal { obj := sel.obj.(*types.Func) @@ -1372,7 +1372,7 @@ func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero // An &-operation may be implied: // map[*struct{}]bool{&struct{}{}: true} wantAddr := false - if _, ok := unparen(e.Key).(*ast.CompositeLit); ok { + if _, ok := ast.Unparen(e.Key).(*ast.CompositeLit); ok { wantAddr = isPointerCore(t.Key()) } @@ -1547,9 +1547,9 @@ func (b *builder) typeSwitchStmt(fn *Function, s *ast.TypeSwitchStmt, label *lbl var x Value switch ass := s.Assign.(type) { case *ast.ExprStmt: // x.(type) - x = b.expr(fn, unparen(ass.X).(*ast.TypeAssertExpr).X) + x = b.expr(fn, ast.Unparen(ass.X).(*ast.TypeAssertExpr).X) case *ast.AssignStmt: // y := x.(type) - x = b.expr(fn, unparen(ass.Rhs[0]).(*ast.TypeAssertExpr).X) + x = b.expr(fn, ast.Unparen(ass.Rhs[0]).(*ast.TypeAssertExpr).X) } done := fn.newBasicBlock("typeswitch.done") @@ -1667,7 +1667,7 @@ func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) { } case *ast.AssignStmt: // x := <-ch - recv := unparen(comm.Rhs[0]).(*ast.UnaryExpr) + recv := ast.Unparen(comm.Rhs[0]).(*ast.UnaryExpr) st = &SelectState{ Dir: types.RecvOnly, Chan: b.expr(fn, recv.X), @@ -1678,7 +1678,7 @@ func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) { } case *ast.ExprStmt: // <-ch - recv := unparen(comm.X).(*ast.UnaryExpr) + recv := ast.Unparen(comm.X).(*ast.UnaryExpr) st = &SelectState{ Dir: types.RecvOnly, Chan: b.expr(fn, recv.X), diff --git a/go/ssa/emit.go b/go/ssa/emit.go index a3d41ad95a4..bca79adc4e1 100644 --- a/go/ssa/emit.go +++ b/go/ssa/emit.go @@ -81,7 +81,7 @@ func emitDebugRef(f *Function, e ast.Expr, v Value, isAddr bool) { panic("nil") } var obj types.Object - e = unparen(e) + e = ast.Unparen(e) if id, ok := e.(*ast.Ident); ok { if isBlankIdent(id) { return diff --git a/go/ssa/source.go b/go/ssa/source.go index 055a6b1ef5f..d0cc1f4861a 100644 --- a/go/ssa/source.go +++ b/go/ssa/source.go @@ -153,7 +153,7 @@ func findNamedFunc(pkg *Package, pos token.Pos) *Function { // the ssa.Value.) func (f *Function) ValueForExpr(e ast.Expr) (value Value, isAddr bool) { if f.debugInfo() { // (opt) - e = unparen(e) + e = ast.Unparen(e) for _, b := range f.Blocks { for _, instr := range b.Instrs { if ref, ok := instr.(*DebugRef); ok { diff --git a/go/ssa/util.go b/go/ssa/util.go index 56638129602..2a9c9b9d318 100644 --- a/go/ssa/util.go +++ b/go/ssa/util.go @@ -35,8 +35,6 @@ func assert(p bool, msg string) { //// AST utilities -func unparen(e ast.Expr) ast.Expr { return ast.Unparen(e) } - // isBlankIdent returns true iff e is an Ident with name "_". // They have no associated types.Object, and thus no type. func isBlankIdent(e ast.Expr) bool { diff --git a/gopls/internal/golang/extract.go b/gopls/internal/golang/extract.go index 2ce89795a06..8c8758d9f0a 100644 --- a/gopls/internal/golang/extract.go +++ b/gopls/internal/golang/extract.go @@ -1229,7 +1229,7 @@ func collectFreeVars(info *types.Info, file *ast.File, start, end token.Pos, nod // return value acts as an indicator for where it was defined. var sel func(n *ast.SelectorExpr) (types.Object, bool) sel = func(n *ast.SelectorExpr) (types.Object, bool) { - switch x := astutil.Unparen(n.X).(type) { + switch x := ast.Unparen(n.X).(type) { case *ast.SelectorExpr: return sel(x) case *ast.Ident: diff --git a/gopls/internal/golang/freesymbols.go b/gopls/internal/golang/freesymbols.go index 2c9e25165f6..336025367f5 100644 --- a/gopls/internal/golang/freesymbols.go +++ b/gopls/internal/golang/freesymbols.go @@ -342,7 +342,7 @@ func freeRefs(pkg *types.Package, info *types.Info, file *ast.File, start, end t for { suffix = append(suffix, info.Uses[sel.Sel]) - switch x := astutil.Unparen(sel.X).(type) { + switch x := ast.Unparen(sel.X).(type) { case *ast.Ident: return id(x, suffix) default: diff --git a/refactor/eg/match.go b/refactor/eg/match.go index 0a109210bc4..d85a473b978 100644 --- a/refactor/eg/match.go +++ b/refactor/eg/match.go @@ -32,8 +32,8 @@ func (tr *Transformer) matchExpr(x, y ast.Expr) bool { if x == nil || y == nil { return false } - x = unparen(x) - y = unparen(y) + x = ast.Unparen(x) + y = ast.Unparen(y) // Is x a wildcard? (a reference to a 'before' parameter) if xobj, ok := tr.wildcardObj(x); ok { @@ -227,8 +227,6 @@ func (tr *Transformer) matchWildcard(xobj *types.Var, y ast.Expr) bool { // -- utilities -------------------------------------------------------- -func unparen(e ast.Expr) ast.Expr { return ast.Unparen(e) } - // isRef returns the object referred to by this (possibly qualified) // identifier, or nil if the node is not a referring identifier. func isRef(n ast.Node, info *types.Info) types.Object { diff --git a/refactor/rename/spec.go b/refactor/rename/spec.go index 1d8c32c9dc3..99068c13358 100644 --- a/refactor/rename/spec.go +++ b/refactor/rename/spec.go @@ -155,7 +155,7 @@ func parseObjectSpec(spec *spec, main string) error { } if e, ok := e.(*ast.SelectorExpr); ok { - x := unparen(e.X) + x := ast.Unparen(e.X) // Strip off star constructor, if any. if star, ok := x.(*ast.StarExpr); ok { @@ -172,7 +172,7 @@ func parseObjectSpec(spec *spec, main string) error { if x, ok := x.(*ast.SelectorExpr); ok { // field/method of type e.g. ("encoding/json".Decoder).Decode - y := unparen(x.X) + y := ast.Unparen(x.X) if pkg := parseImportPath(y); pkg != "" { spec.pkg = pkg // e.g. "encoding/json" spec.pkgMember = x.Sel.Name // e.g. "Decoder" diff --git a/refactor/rename/util.go b/refactor/rename/util.go index a3d998f90e0..cb7cea3a86e 100644 --- a/refactor/rename/util.go +++ b/refactor/rename/util.go @@ -5,7 +5,6 @@ package rename import ( - "go/ast" "go/token" "go/types" "os" @@ -91,8 +90,6 @@ func sameFile(x, y string) bool { return false } -func unparen(e ast.Expr) ast.Expr { return ast.Unparen(e) } - func is[T any](x any) bool { _, ok := x.(T) return ok diff --git a/refactor/satisfy/find.go b/refactor/satisfy/find.go index 3d693aa04ab..a897c3c2fd4 100644 --- a/refactor/satisfy/find.go +++ b/refactor/satisfy/find.go @@ -126,13 +126,13 @@ func (f *Finder) exprN(e ast.Expr) types.Type { case *ast.CallExpr: // x, err := f(args) - sig := coreType(f.expr(e.Fun)).(*types.Signature) + sig := typeparams.CoreType(f.expr(e.Fun)).(*types.Signature) f.call(sig, e.Args) case *ast.IndexExpr: // y, ok := x[i] x := f.expr(e.X) - f.assign(f.expr(e.Index), coreType(x).(*types.Map).Key()) + f.assign(f.expr(e.Index), typeparams.CoreType(x).(*types.Map).Key()) case *ast.TypeAssertExpr: // y, ok := x.(T) @@ -213,7 +213,7 @@ func (f *Finder) builtin(obj *types.Builtin, sig *types.Signature, args []ast.Ex f.expr(args[1]) } else { // append(x, y, z) - tElem := coreType(s).(*types.Slice).Elem() + tElem := typeparams.CoreType(s).(*types.Slice).Elem() for _, arg := range args[1:] { f.assign(tElem, f.expr(arg)) } @@ -222,7 +222,7 @@ func (f *Finder) builtin(obj *types.Builtin, sig *types.Signature, args []ast.Ex case "delete": m := f.expr(args[0]) k := f.expr(args[1]) - f.assign(coreType(m).(*types.Map).Key(), k) + f.assign(typeparams.CoreType(m).(*types.Map).Key(), k) default: // ordinary call @@ -273,7 +273,7 @@ func (f *Finder) assign(lhs, rhs types.Type) { if types.Identical(lhs, rhs) { return } - if !isInterface(lhs) { + if !types.IsInterface(lhs) { return } @@ -354,7 +354,7 @@ func (f *Finder) expr(e ast.Expr) types.Type { f.sig = saved case *ast.CompositeLit: - switch T := coreType(typeparams.Deref(tv.Type)).(type) { + switch T := typeparams.CoreType(typeparams.Deref(tv.Type)).(type) { case *types.Struct: for i, elem := range e.Elts { if kv, ok := elem.(*ast.KeyValueExpr); ok { @@ -405,7 +405,7 @@ func (f *Finder) expr(e ast.Expr) types.Type { // x[i] or m[k] -- index or lookup operation x := f.expr(e.X) i := f.expr(e.Index) - if ux, ok := coreType(x).(*types.Map); ok { + if ux, ok := typeparams.CoreType(x).(*types.Map); ok { f.assign(ux.Key(), i) } } @@ -440,7 +440,7 @@ func (f *Finder) expr(e ast.Expr) types.Type { // unsafe call. Treat calls to functions in unsafe like ordinary calls, // except that their signature cannot be determined by their func obj. // Without this special handling, f.expr(e.Fun) would fail below. - if s, ok := unparen(e.Fun).(*ast.SelectorExpr); ok { + if s, ok := ast.Unparen(e.Fun).(*ast.SelectorExpr); ok { if obj, ok := f.info.Uses[s.Sel].(*types.Builtin); ok && obj.Pkg().Path() == "unsafe" { sig := f.info.Types[e.Fun].Type.(*types.Signature) f.call(sig, e.Args) @@ -449,7 +449,7 @@ func (f *Finder) expr(e ast.Expr) types.Type { } // builtin call - if id, ok := unparen(e.Fun).(*ast.Ident); ok { + if id, ok := ast.Unparen(e.Fun).(*ast.Ident); ok { if obj, ok := f.info.Uses[id].(*types.Builtin); ok { sig := f.info.Types[id].Type.(*types.Signature) f.builtin(obj, sig, e.Args) @@ -458,7 +458,7 @@ func (f *Finder) expr(e ast.Expr) types.Type { } // ordinary call - f.call(coreType(f.expr(e.Fun)).(*types.Signature), e.Args) + f.call(typeparams.CoreType(f.expr(e.Fun)).(*types.Signature), e.Args) } case *ast.StarExpr: @@ -518,7 +518,7 @@ func (f *Finder) stmt(s ast.Stmt) { case *ast.SendStmt: ch := f.expr(s.Chan) val := f.expr(s.Value) - f.assign(coreType(ch).(*types.Chan).Elem(), val) + f.assign(typeparams.CoreType(ch).(*types.Chan).Elem(), val) case *ast.IncDecStmt: f.expr(s.X) @@ -622,9 +622,9 @@ func (f *Finder) stmt(s ast.Stmt) { var I types.Type switch ass := s.Assign.(type) { case *ast.ExprStmt: // x.(type) - I = f.expr(unparen(ass.X).(*ast.TypeAssertExpr).X) + I = f.expr(ast.Unparen(ass.X).(*ast.TypeAssertExpr).X) case *ast.AssignStmt: // y := x.(type) - I = f.expr(unparen(ass.Rhs[0]).(*ast.TypeAssertExpr).X) + I = f.expr(ast.Unparen(ass.Rhs[0]).(*ast.TypeAssertExpr).X) } for _, cc := range s.Body.List { cc := cc.(*ast.CaseClause) @@ -668,7 +668,7 @@ func (f *Finder) stmt(s ast.Stmt) { var xelem types.Type // Keys of array, *array, slice, string aren't interesting // since the RHS key type is just an int. - switch ux := coreType(x).(type) { + switch ux := typeparams.CoreType(x).(type) { case *types.Chan: xelem = ux.Elem() case *types.Map: @@ -683,13 +683,13 @@ func (f *Finder) stmt(s ast.Stmt) { var xelem types.Type // Values of type strings aren't interesting because // the RHS value type is just a rune. - switch ux := coreType(x).(type) { + switch ux := typeparams.CoreType(x).(type) { case *types.Array: xelem = ux.Elem() case *types.Map: xelem = ux.Elem() case *types.Pointer: // *array - xelem = coreType(typeparams.Deref(ux)).(*types.Array).Elem() + xelem = typeparams.CoreType(typeparams.Deref(ux)).(*types.Array).Elem() case *types.Slice: xelem = ux.Elem() } @@ -707,12 +707,6 @@ func (f *Finder) stmt(s ast.Stmt) { // -- Plundered from golang.org/x/tools/go/ssa ----------------- -func unparen(e ast.Expr) ast.Expr { return ast.Unparen(e) } - -func isInterface(T types.Type) bool { return types.IsInterface(T) } - -func coreType(T types.Type) types.Type { return typeparams.CoreType(T) } - func instance(info *types.Info, expr ast.Expr) bool { var id *ast.Ident switch x := expr.(type) { From ddd4bde5a0f514414daf47ff0232b601ba01d18f Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Thu, 13 Feb 2025 18:02:22 +0000 Subject: [PATCH 018/270] gopls/internal/golang: avoid PackageSymbols errors with missing packages As reported in golang/vscode-go#3681, spurious errors from gopls.package_symbols can cause very distracting popups in VS Code. For now, err on the side of silence. In the future, we may want to revisit this behavior. For golang/vscode-go#3681 Change-Id: I67f8b8e1e299ef88dabbb284a151aada131652f8 Reviewed-on: https://go-review.googlesource.com/c/tools/+/649257 Reviewed-by: Hongxiang Jiang LUCI-TryBot-Result: Go LUCI --- gopls/internal/golang/symbols.go | 32 +++--- gopls/internal/server/command.go | 4 + .../integration/misc/package_symbols_test.go | 101 ++++++++++-------- 3 files changed, 80 insertions(+), 57 deletions(-) diff --git a/gopls/internal/golang/symbols.go b/gopls/internal/golang/symbols.go index 14f2703441c..db31baa69f2 100644 --- a/gopls/internal/golang/symbols.go +++ b/gopls/internal/golang/symbols.go @@ -86,17 +86,22 @@ func PackageSymbols(ctx context.Context, snapshot *cache.Snapshot, uri protocol. ctx, done := event.Start(ctx, "source.PackageSymbols") defer done() - mp, err := NarrowestMetadataForFile(ctx, snapshot, uri) - if err != nil { - return command.PackageSymbolsResult{}, err + pkgFiles := []protocol.DocumentURI{uri} + + // golang/vscode-go#3681: do our best if the file is not in a package. + // TODO(rfindley): revisit this in the future once there is more graceful + // handling in VS Code. + if mp, err := NarrowestMetadataForFile(ctx, snapshot, uri); err == nil { + pkgFiles = mp.CompiledGoFiles } - pkgfiles := mp.CompiledGoFiles - // Maps receiver name to the methods that use it - receiverToMethods := make(map[string][]command.PackageSymbol) - // Maps type symbol name to its index in symbols - typeSymbolToIdx := make(map[string]int) - var symbols []command.PackageSymbol - for fidx, f := range pkgfiles { + + var ( + pkgName string + symbols []command.PackageSymbol + receiverToMethods = make(map[string][]command.PackageSymbol) // receiver name -> methods + typeSymbolToIdx = make(map[string]int) // type name -> index in symbols + ) + for fidx, f := range pkgFiles { fh, err := snapshot.ReadFile(ctx, f) if err != nil { return command.PackageSymbolsResult{}, err @@ -105,6 +110,9 @@ func PackageSymbols(ctx context.Context, snapshot *cache.Snapshot, uri protocol. if err != nil { return command.PackageSymbolsResult{}, err } + if pkgName == "" && pgf.File != nil && pgf.File.Name != nil { + pkgName = pgf.File.Name.Name + } for _, decl := range pgf.File.Decls { switch decl := decl.(type) { case *ast.FuncDecl: @@ -154,8 +162,8 @@ func PackageSymbols(ctx context.Context, snapshot *cache.Snapshot, uri protocol. } } return command.PackageSymbolsResult{ - PackageName: string(mp.Name), - Files: pkgfiles, + PackageName: pkgName, + Files: pkgFiles, Symbols: symbols, }, nil diff --git a/gopls/internal/server/command.go b/gopls/internal/server/command.go index 2b5c282a28f..007b8d5218f 100644 --- a/gopls/internal/server/command.go +++ b/gopls/internal/server/command.go @@ -1741,6 +1741,10 @@ func (c *commandHandler) PackageSymbols(ctx context.Context, args command.Packag err := c.run(ctx, commandConfig{ forURI: args.URI, }, func(ctx context.Context, deps commandDeps) error { + if deps.snapshot.FileKind(deps.fh) != file.Go { + // golang/vscode-go#3681: fail silently, to avoid spurious error popups. + return nil + } res, err := golang.PackageSymbols(ctx, deps.snapshot, args.URI) if err != nil { return err diff --git a/gopls/internal/test/integration/misc/package_symbols_test.go b/gopls/internal/test/integration/misc/package_symbols_test.go index 860264f2bb0..1e06a655935 100644 --- a/gopls/internal/test/integration/misc/package_symbols_test.go +++ b/gopls/internal/test/integration/misc/package_symbols_test.go @@ -16,6 +16,11 @@ import ( func TestPackageSymbols(t *testing.T) { const files = ` +-- go.mod -- +module example.com + +go 1.20 + -- a.go -- package a @@ -33,68 +38,74 @@ func (s *S) M2() {} func (s *S) M3() {} func F() {} +-- unloaded.go -- +//go:build unloaded + +package a + +var Unloaded int ` integration.Run(t, files, func(t *testing.T, env *integration.Env) { - a_uri := env.Sandbox.Workdir.URI("a.go") - b_uri := env.Sandbox.Workdir.URI("b.go") + aURI := env.Sandbox.Workdir.URI("a.go") + bURI := env.Sandbox.Workdir.URI("b.go") args, err := command.MarshalArgs(command.PackageSymbolsArgs{ - URI: a_uri, + URI: aURI, }) if err != nil { - t.Fatalf("failed to MarshalArgs: %v", err) + t.Fatal(err) } var res command.PackageSymbolsResult env.ExecuteCommand(&protocol.ExecuteCommandParams{ - Command: "gopls.package_symbols", + Command: command.PackageSymbols.String(), Arguments: args, }, &res) want := command.PackageSymbolsResult{ PackageName: "a", - Files: []protocol.DocumentURI{a_uri, b_uri}, + Files: []protocol.DocumentURI{aURI, bURI}, Symbols: []command.PackageSymbol{ - { - Name: "A", - Kind: protocol.Variable, - File: 0, - }, - { - Name: "F", - Kind: protocol.Function, - File: 1, - }, - { - Name: "S", - Kind: protocol.Struct, - File: 0, - Children: []command.PackageSymbol{ - { - Name: "M1", - Kind: protocol.Method, - File: 0, - }, - { - Name: "M2", - Kind: protocol.Method, - File: 1, - }, - { - Name: "M3", - Kind: protocol.Method, - File: 1, - }, - }, - }, - { - Name: "b", - Kind: protocol.Variable, - File: 1, - }, + {Name: "A", Kind: protocol.Variable, File: 0}, + {Name: "F", Kind: protocol.Function, File: 1}, + {Name: "S", Kind: protocol.Struct, File: 0, Children: []command.PackageSymbol{ + {Name: "M1", Kind: protocol.Method, File: 0}, + {Name: "M2", Kind: protocol.Method, File: 1}, + {Name: "M3", Kind: protocol.Method, File: 1}, + }}, + {Name: "b", Kind: protocol.Variable, File: 1}, }, } - if diff := cmp.Diff(want, res, cmpopts.IgnoreFields(command.PackageSymbol{}, "Range", "SelectionRange", "Detail")); diff != "" { - t.Errorf("gopls.package_symbols returned unexpected diff (-want +got):\n%s", diff) + ignore := cmpopts.IgnoreFields(command.PackageSymbol{}, "Range", "SelectionRange", "Detail") + if diff := cmp.Diff(want, res, ignore); diff != "" { + t.Errorf("package_symbols returned unexpected diff (-want +got):\n%s", diff) + } + + for file, want := range map[string]command.PackageSymbolsResult{ + "go.mod": {}, + "unloaded.go": { + PackageName: "a", + Files: []protocol.DocumentURI{env.Sandbox.Workdir.URI("unloaded.go")}, + Symbols: []command.PackageSymbol{ + {Name: "Unloaded", Kind: protocol.Variable, File: 0}, + }, + }, + } { + uri := env.Sandbox.Workdir.URI(file) + args, err := command.MarshalArgs(command.PackageSymbolsArgs{ + URI: uri, + }) + if err != nil { + t.Fatal(err) + } + var res command.PackageSymbolsResult + env.ExecuteCommand(&protocol.ExecuteCommandParams{ + Command: command.PackageSymbols.String(), + Arguments: args, + }, &res) + + if diff := cmp.Diff(want, res, ignore); diff != "" { + t.Errorf("package_symbols returned unexpected diff (-want +got):\n%s", diff) + } } }) } From ab04c1963f5c2e5425c494f549e018313bdaa817 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Thu, 13 Feb 2025 14:37:56 -0500 Subject: [PATCH 019/270] gopls/internal/analysis/modernize: improve rangeint transformation for i := 0; i < len(slice); i++ {} is currently reduced to for i := range len(slice) {} but this CL delivers the better style of: for i := range slice {} Fixes golang/go#71725 Change-Id: Idb025315047c3be992267f6c1783757798e0c840 Reviewed-on: https://go-review.googlesource.com/c/tools/+/649356 Reviewed-by: Jonathan Amsterdam LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan --- gopls/internal/analysis/modernize/rangeint.go | 10 ++++++++++ .../modernize/testdata/src/rangeint/rangeint.go | 5 ++++- .../modernize/testdata/src/rangeint/rangeint.go.golden | 5 ++++- 3 files changed, 18 insertions(+), 2 deletions(-) diff --git a/gopls/internal/analysis/modernize/rangeint.go b/gopls/internal/analysis/modernize/rangeint.go index c36203cef06..2d25d6a0a06 100644 --- a/gopls/internal/analysis/modernize/rangeint.go +++ b/gopls/internal/analysis/modernize/rangeint.go @@ -8,10 +8,12 @@ import ( "fmt" "go/ast" "go/token" + "go/types" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" "golang.org/x/tools/internal/analysisinternal" "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/astutil/edge" @@ -98,6 +100,14 @@ func rangeint(pass *analysis.Pass) { }) } + // If limit is len(slice), + // simplify "range len(slice)" to "range slice". + if call, ok := limit.(*ast.CallExpr); ok && + typeutil.Callee(info, call) == builtinLen && + is[*types.Slice](info.TypeOf(call.Args[0]).Underlying()) { + limit = call.Args[0] + } + pass.Report(analysis.Diagnostic{ Pos: init.Pos(), End: inc.End(), diff --git a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go index e17dccac9d0..a60bd5eac37 100644 --- a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go +++ b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go @@ -1,6 +1,6 @@ package rangeint -func _(i int, s struct{ i int }) { +func _(i int, s struct{ i int }, slice []int) { for i := 0; i < 10; i++ { // want "for loop can be modernized using range over int" println(i) } @@ -9,6 +9,9 @@ func _(i int, s struct{ i int }) { for i := 0; i < 10; i++ { // want "for loop can be modernized using range over int" // i unused within loop } + for i := 0; i < len(slice); i++ { // want "for loop can be modernized using range over int" + println(slice[i]) + } // nope for i := 0; i < 10; { // nope: missing increment diff --git a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden index 5a76229c858..348f77508ac 100644 --- a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden @@ -1,6 +1,6 @@ package rangeint -func _(i int, s struct{ i int }) { +func _(i int, s struct{ i int }, slice []int) { for i := range 10 { // want "for loop can be modernized using range over int" println(i) } @@ -9,6 +9,9 @@ func _(i int, s struct{ i int }) { for range 10 { // want "for loop can be modernized using range over int" // i unused within loop } + for i := range slice { // want "for loop can be modernized using range over int" + println(slice[i]) + } // nope for i := 0; i < 10; { // nope: missing increment From 809cde44e486bf9b083f7c68d47e09fc20662b4f Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Thu, 13 Feb 2025 13:37:07 -0500 Subject: [PATCH 020/270] gopls/internal/analysis/modernize: fix bug in minmax Wrong operator. D'oh. + test Fixes golang/go#71721 Change-Id: Ia7fe314df07afa9a9de63c2b6031e678755e9d56 Reviewed-on: https://go-review.googlesource.com/c/tools/+/649357 Reviewed-by: Jonathan Amsterdam Reviewed-by: Alan Donovan Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI --- gopls/internal/analysis/modernize/minmax.go | 2 +- .../analysis/modernize/testdata/src/minmax/minmax.go | 11 +++++++++++ .../modernize/testdata/src/minmax/minmax.go.golden | 11 +++++++++++ 3 files changed, 23 insertions(+), 1 deletion(-) diff --git a/gopls/internal/analysis/modernize/minmax.go b/gopls/internal/analysis/modernize/minmax.go index 26b12341cad..1466e767fc7 100644 --- a/gopls/internal/analysis/modernize/minmax.go +++ b/gopls/internal/analysis/modernize/minmax.go @@ -57,7 +57,7 @@ func minmax(pass *analysis.Pass) { if equalSyntax(lhs, lhs2) { if equalSyntax(rhs, a) && equalSyntax(rhs2, b) { sign = +sign - } else if equalSyntax(rhs2, a) || equalSyntax(rhs, b) { + } else if equalSyntax(rhs2, a) && equalSyntax(rhs, b) { sign = -sign } else { return diff --git a/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go b/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go index c73bd30139b..8fdc3bc2106 100644 --- a/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go +++ b/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go @@ -92,3 +92,14 @@ func nopeAssignHasIncrementOperator() { } print(y) } + +// Regression test for https://github.com/golang/go/issues/71721. +func nopeNotAMinimum(x, y int) int { + // A value of -1 or 0 will use a default value (30). + if x <= 0 { + y = 30 + } else { + y = x + } + return y +} diff --git a/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go.golden b/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go.golden index 11eac2c1418..48e154729e7 100644 --- a/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go.golden @@ -69,3 +69,14 @@ func nopeAssignHasIncrementOperator() { } print(y) } + +// Regression test for https://github.com/golang/go/issues/71721. +func nopeNotAMinimum(x, y int) int { + // A value of -1 or 0 will use a default value (30). + if x <= 0 { + y = 30 + } else { + y = x + } + return y +} From 85a3833c521302254b403f7606939863e21f736a Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Wed, 12 Feb 2025 11:18:11 -0500 Subject: [PATCH 021/270] internal/analysis/gofix: simple type aliases Offer to replace inlinable type aliases whose RHS is a named type. Despite the amount of new code, there is little to test, because most of it duplicates constants. When there is a chain of inlinable type aliases, as in: type A = T type AA = A var v AA we don't follow the chain to the end. Instead, the first set of fixes produces: type A = T type AA = T var v A That is, the replacements happen "in parallel." A second round of fixes would rewrite var v A to var v T This case is rare enough that it's not worth doing better. For golang/go#32816. Change-Id: Ib6854d60b26a273b592a297cb9a650a31e094392 Reviewed-on: https://go-review.googlesource.com/c/tools/+/649055 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- gopls/internal/analysis/gofix/gofix.go | 264 +++++++++++++----- .../analysis/gofix/testdata/src/a/a.go | 15 + .../analysis/gofix/testdata/src/a/a.go.golden | 15 + .../analysis/gofix/testdata/src/b/b.go | 2 + .../analysis/gofix/testdata/src/b/b.go.golden | 2 + 5 files changed, 225 insertions(+), 73 deletions(-) diff --git a/gopls/internal/analysis/gofix/gofix.go b/gopls/internal/analysis/gofix/gofix.go index 8ec31bd4736..147399d315d 100644 --- a/gopls/internal/analysis/gofix/gofix.go +++ b/gopls/internal/analysis/gofix/gofix.go @@ -63,8 +63,11 @@ func run(pass *analysis.Pass) (any, error) { // Pass 1: find functions and constants annotated with an appropriate "//go:fix" // comment (the syntax proposed by #32816), // and export a fact for each one. - inlinableFuncs := make(map[*types.Func]*inline.Callee) // memoization of fact import (nil => no fact) - inlinableConsts := make(map[*types.Const]*goFixInlineConstFact) + var ( + inlinableFuncs = make(map[*types.Func]*inline.Callee) // memoization of fact import (nil => no fact) + inlinableConsts = make(map[*types.Const]*goFixInlineConstFact) + inlinableAliases = make(map[*types.TypeName]*goFixInlineAliasFact) + ) inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) nodeFilter := []ast.Node{(*ast.FuncDecl)(nil), (*ast.GenDecl)(nil)} @@ -89,52 +92,96 @@ func run(pass *analysis.Pass) (any, error) { inlinableFuncs[fn] = callee case *ast.GenDecl: - if decl.Tok != token.CONST { + if decl.Tok != token.CONST && decl.Tok != token.TYPE { return } declInline := hasFixInline(decl.Doc) // Accept inline directives on the entire decl as well as individual specs. for _, spec := range decl.Specs { - spec := spec.(*ast.ValueSpec) // guaranteed by Tok == CONST - specInline := hasFixInline(spec.Doc) - if declInline || specInline { - for i, name := range spec.Names { - if i >= len(spec.Values) { - // Possible following an iota. - break - } - val := spec.Values[i] - var rhsID *ast.Ident - switch e := val.(type) { - case *ast.Ident: - // Constants defined with the predeclared iota cannot be inlined. - if pass.TypesInfo.Uses[e] == builtinIota { - pass.Reportf(val.Pos(), "invalid //go:fix inline directive: const value is iota") + switch spec := spec.(type) { + case *ast.TypeSpec: // Tok == TYPE + if !declInline && !hasFixInline(spec.Doc) { + continue + } + if !spec.Assign.IsValid() { + pass.Reportf(spec.Pos(), "invalid //go:fix inline directive: not a type alias") + continue + } + if spec.TypeParams != nil { + // TODO(jba): handle generic aliases + continue + } + // The alias must refer to another named type. + // TODO(jba): generalize to more type expressions. + var rhsID *ast.Ident + switch e := ast.Unparen(spec.Type).(type) { + case *ast.Ident: + rhsID = e + case *ast.SelectorExpr: + rhsID = e.Sel + default: + continue + } + lhs := pass.TypesInfo.Defs[spec.Name].(*types.TypeName) + // more (jba): test one alias pointing to another alias + rhs := pass.TypesInfo.Uses[rhsID].(*types.TypeName) + typ := &goFixInlineAliasFact{ + RHSName: rhs.Name(), + RHSPkgName: rhs.Pkg().Name(), + RHSPkgPath: rhs.Pkg().Path(), + } + if rhs.Pkg() == pass.Pkg { + typ.rhsObj = rhs + } + inlinableAliases[lhs] = typ + // Create a fact only if the LHS is exported and defined at top level. + // We create a fact even if the RHS is non-exported, + // so we can warn about uses in other packages. + if lhs.Exported() && typesinternal.IsPackageLevel(lhs) { + pass.ExportObjectFact(lhs, typ) + } + + case *ast.ValueSpec: // Tok == CONST + specInline := hasFixInline(spec.Doc) + if declInline || specInline { + for i, name := range spec.Names { + if i >= len(spec.Values) { + // Possible following an iota. + break + } + val := spec.Values[i] + var rhsID *ast.Ident + switch e := val.(type) { + case *ast.Ident: + // Constants defined with the predeclared iota cannot be inlined. + if pass.TypesInfo.Uses[e] == builtinIota { + pass.Reportf(val.Pos(), "invalid //go:fix inline directive: const value is iota") + continue + } + rhsID = e + case *ast.SelectorExpr: + rhsID = e.Sel + default: + pass.Reportf(val.Pos(), "invalid //go:fix inline directive: const value is not the name of another constant") continue } - rhsID = e - case *ast.SelectorExpr: - rhsID = e.Sel - default: - pass.Reportf(val.Pos(), "invalid //go:fix inline directive: const value is not the name of another constant") - continue - } - lhs := pass.TypesInfo.Defs[name].(*types.Const) - rhs := pass.TypesInfo.Uses[rhsID].(*types.Const) // must be so in a well-typed program - con := &goFixInlineConstFact{ - RHSName: rhs.Name(), - RHSPkgName: rhs.Pkg().Name(), - RHSPkgPath: rhs.Pkg().Path(), - } - if rhs.Pkg() == pass.Pkg { - con.rhsObj = rhs - } - inlinableConsts[lhs] = con - // Create a fact only if the LHS is exported and defined at top level. - // We create a fact even if the RHS is non-exported, - // so we can warn uses in other packages. - if lhs.Exported() && typesinternal.IsPackageLevel(lhs) { - pass.ExportObjectFact(lhs, con) + lhs := pass.TypesInfo.Defs[name].(*types.Const) + rhs := pass.TypesInfo.Uses[rhsID].(*types.Const) // must be so in a well-typed program + con := &goFixInlineConstFact{ + RHSName: rhs.Name(), + RHSPkgName: rhs.Pkg().Name(), + RHSPkgPath: rhs.Pkg().Path(), + } + if rhs.Pkg() == pass.Pkg { + con.rhsObj = rhs + } + inlinableConsts[lhs] = con + // Create a fact only if the LHS is exported and defined at top level. + // We create a fact even if the RHS is non-exported, + // so we can warn about uses in other packages. + if lhs.Exported() && typesinternal.IsPackageLevel(lhs) { + pass.ExportObjectFact(lhs, con) + } } } } @@ -143,7 +190,7 @@ func run(pass *analysis.Pass) (any, error) { }) // Pass 2. Inline each static call to an inlinable function - // and each reference to an inlinable constant. + // and each reference to an inlinable constant or type alias. // // TODO(adonovan): handle multiple diffs that each add the same import. for cur := range cursor.Root(inspect).Preorder((*ast.CallExpr)(nil), (*ast.Ident)(nil)) { @@ -218,6 +265,65 @@ func run(pass *analysis.Pass) (any, error) { } case *ast.Ident: + // If the identifier is a use of an inlinable type alias, suggest inlining it. + // TODO(jba): much of this code is shared with the constant case, below. + // Try to factor more of it out, unless it will change anyway when we move beyond simple RHS's. + if ali, ok := pass.TypesInfo.Uses[n].(*types.TypeName); ok { + inalias, ok := inlinableAliases[ali] + if !ok { + var fact goFixInlineAliasFact + if pass.ImportObjectFact(ali, &fact) { + inalias = &fact + inlinableAliases[ali] = inalias + } + } + if inalias == nil { + continue // nope + } + curFile := currentFile(cur) + + // We have an identifier A here (n), possibly qualified by a package identifier (sel.X, + // where sel is the parent of X), // and an inlinable "type A = B" elsewhere (inali). + // Consider replacing A with B. + + // Check that the expression we are inlining (B) means the same thing + // (refers to the same object) in n's scope as it does in A's scope. + // If the RHS is not in the current package, AddImport will handle + // shadowing, so we only need to worry about when both expressions + // are in the current package. + if pass.Pkg.Path() == inalias.RHSPkgPath { + // fcon.rhsObj is the object referred to by B in the definition of A. + scope := pass.TypesInfo.Scopes[curFile].Innermost(n.Pos()) // n's scope + _, obj := scope.LookupParent(inalias.RHSName, n.Pos()) // what "B" means in n's scope + if obj == nil { + // Should be impossible: if code at n can refer to the LHS, + // it can refer to the RHS. + panic(fmt.Sprintf("no object for inlinable alias %s RHS %s", n.Name, inalias.RHSName)) + } + if obj != inalias.rhsObj { + // "B" means something different here than at the inlinable const's scope. + continue + } + } else if !analysisinternal.CanImport(pass.Pkg.Path(), inalias.RHSPkgPath) { + // If this package can't see the RHS's package, we can't inline. + continue + } + var ( + importPrefix string + edits []analysis.TextEdit + ) + if inalias.RHSPkgPath != pass.Pkg.Path() { + _, importPrefix, edits = analysisinternal.AddImport( + pass.TypesInfo, curFile, inalias.RHSPkgName, inalias.RHSPkgPath, inalias.RHSName, n.Pos()) + } + // If n is qualified by a package identifier, we'll need the full selector expression. + var expr ast.Expr = n + if e, _ := cur.Edge(); e == edge.SelectorExpr_Sel { + expr = cur.Parent().Node().(ast.Expr) + } + reportInline(pass, "type alias", "Type alias", expr, edits, importPrefix+inalias.RHSName) + continue + } // If the identifier is a use of an inlinable constant, suggest inlining it. if con, ok := pass.TypesInfo.Uses[n].(*types.Const); ok { incon, ok := inlinableConsts[con] @@ -233,14 +339,10 @@ func run(pass *analysis.Pass) (any, error) { } // If n is qualified by a package identifier, we'll need the full selector expression. - var sel *ast.SelectorExpr - if e, _ := cur.Edge(); e == edge.SelectorExpr_Sel { - sel = cur.Parent().Node().(*ast.SelectorExpr) - } curFile := currentFile(cur) - // We have an identifier A here (n), possibly qualified by a package identifier (sel.X), - // and an inlinable "const A = B" elsewhere (fcon). + // We have an identifier A here (n), possibly qualified by a package identifier (sel.X, + // where sel is the parent of n), // and an inlinable "const A = B" elsewhere (incon). // Consider replacing A with B. // Check that the expression we are inlining (B) means the same thing @@ -249,7 +351,7 @@ func run(pass *analysis.Pass) (any, error) { // shadowing, so we only need to worry about when both expressions // are in the current package. if pass.Pkg.Path() == incon.RHSPkgPath { - // fcon.rhsObj is the object referred to by B in the definition of A. + // incon.rhsObj is the object referred to by B in the definition of A. scope := pass.TypesInfo.Scopes[curFile].Innermost(n.Pos()) // n's scope _, obj := scope.LookupParent(incon.RHSName, n.Pos()) // what "B" means in n's scope if obj == nil { @@ -273,31 +375,12 @@ func run(pass *analysis.Pass) (any, error) { _, importPrefix, edits = analysisinternal.AddImport( pass.TypesInfo, curFile, incon.RHSPkgName, incon.RHSPkgPath, incon.RHSName, n.Pos()) } - var ( - pos = n.Pos() - end = n.End() - name = n.Name - ) - // Replace the entire SelectorExpr if there is one. - if sel != nil { - pos = sel.Pos() - end = sel.End() - name = sel.X.(*ast.Ident).Name + "." + n.Name + // If n is qualified by a package identifier, we'll need the full selector expression. + var expr ast.Expr = n + if e, _ := cur.Edge(); e == edge.SelectorExpr_Sel { + expr = cur.Parent().Node().(ast.Expr) } - edits = append(edits, analysis.TextEdit{ - Pos: pos, - End: end, - NewText: []byte(importPrefix + incon.RHSName), - }) - pass.Report(analysis.Diagnostic{ - Pos: pos, - End: end, - Message: fmt.Sprintf("Constant %s should be inlined", name), - SuggestedFixes: []analysis.SuggestedFix{{ - Message: fmt.Sprintf("Inline constant %s", name), - TextEdits: edits, - }}, - }) + reportInline(pass, "constant", "Constant", expr, edits, importPrefix+incon.RHSName) } } } @@ -305,6 +388,25 @@ func run(pass *analysis.Pass) (any, error) { return nil, nil } +// reportInline reports a diagnostic for fixing an inlinable name. +func reportInline(pass *analysis.Pass, kind, capKind string, ident ast.Expr, edits []analysis.TextEdit, newText string) { + edits = append(edits, analysis.TextEdit{ + Pos: ident.Pos(), + End: ident.End(), + NewText: []byte(newText), + }) + name := analysisinternal.Format(pass.Fset, ident) + pass.Report(analysis.Diagnostic{ + Pos: ident.Pos(), + End: ident.End(), + Message: fmt.Sprintf("%s %s should be inlined", capKind, name), + SuggestedFixes: []analysis.SuggestedFix{{ + Message: fmt.Sprintf("Inline %s %s", kind, name), + TextEdits: edits, + }}, + }) +} + // hasFixInline reports the presence of a "//go:fix inline" directive // in the comments. func hasFixInline(cg *ast.CommentGroup) bool { @@ -339,6 +441,22 @@ func (c *goFixInlineConstFact) String() string { func (*goFixInlineConstFact) AFact() {} +// A goFixInlineAliasFact is exported for each type alias marked "//go:fix inline". +// It holds information about an inlinable type alias. Gob-serializable. +type goFixInlineAliasFact struct { + // Information about "type LHSName = RHSName". + RHSName string + RHSPkgPath string + RHSPkgName string + rhsObj types.Object // for current package +} + +func (c *goFixInlineAliasFact) String() string { + return fmt.Sprintf("goFixInline alias %q.%s", c.RHSPkgPath, c.RHSName) +} + +func (*goFixInlineAliasFact) AFact() {} + func discard(string, ...any) {} var builtinIota = types.Universe.Lookup("iota") diff --git a/gopls/internal/analysis/gofix/testdata/src/a/a.go b/gopls/internal/analysis/gofix/testdata/src/a/a.go index 4f41b9a8c5d..fb4d8b92172 100644 --- a/gopls/internal/analysis/gofix/testdata/src/a/a.go +++ b/gopls/internal/analysis/gofix/testdata/src/a/a.go @@ -101,3 +101,18 @@ func shadow() { _ = x } + +// Type aliases + +//go:fix inline +type A = T // want A: `goFixInline alias "a".T` + +var _ A // want `Type alias A should be inlined` + +type B = []T // nope: only named RHSs + +//go:fix inline +type AA = // want AA: `goFixInline alias "a".A` +A // want `Type alias A should be inlined` + +var _ AA // want `Type alias AA should be inlined` diff --git a/gopls/internal/analysis/gofix/testdata/src/a/a.go.golden b/gopls/internal/analysis/gofix/testdata/src/a/a.go.golden index 9e9cc25996f..9ab1bcbc652 100644 --- a/gopls/internal/analysis/gofix/testdata/src/a/a.go.golden +++ b/gopls/internal/analysis/gofix/testdata/src/a/a.go.golden @@ -101,3 +101,18 @@ func shadow() { _ = x } + +// Type aliases + +//go:fix inline +type A = T // want A: `goFixInline alias "a".T` + +var _ T // want `Type alias A should be inlined` + +type B = []T // nope: only named RHSs + +//go:fix inline +type AA = // want AA: `goFixInline alias "a".A` +T // want `Type alias A should be inlined` + +var _ A // want `Type alias AA should be inlined` diff --git a/gopls/internal/analysis/gofix/testdata/src/b/b.go b/gopls/internal/analysis/gofix/testdata/src/b/b.go index 74876738bea..d52fd514024 100644 --- a/gopls/internal/analysis/gofix/testdata/src/b/b.go +++ b/gopls/internal/analysis/gofix/testdata/src/b/b.go @@ -30,3 +30,5 @@ func g() { } const d = a.D // nope: a.D refers to a constant in a package that is not visible here. + +var _ a.A // want `Type alias a\.A should be inlined` diff --git a/gopls/internal/analysis/gofix/testdata/src/b/b.go.golden b/gopls/internal/analysis/gofix/testdata/src/b/b.go.golden index b3608d6793e..4228ffeb489 100644 --- a/gopls/internal/analysis/gofix/testdata/src/b/b.go.golden +++ b/gopls/internal/analysis/gofix/testdata/src/b/b.go.golden @@ -34,3 +34,5 @@ func g() { } const d = a.D // nope: a.D refers to a constant in a package that is not visible here. + +var _ a.T // want `Type alias a\.A should be inlined` From c0dbb60e4ba78287d76e623a94ae61616ff3c74c Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Thu, 13 Feb 2025 16:29:14 -0500 Subject: [PATCH 022/270] gopls: tweak release notes Also, move gofix command into a package so that it can be "go run" from the release branch. Change-Id: I0a75c1ec4b00d22eef6c13c5162dd02ed9ef272f Reviewed-on: https://go-review.googlesource.com/c/tools/+/649318 Auto-Submit: Alan Donovan Reviewed-by: Jonathan Amsterdam LUCI-TryBot-Result: Go LUCI --- gopls/doc/analyzers.md | 38 ++++++++++++++++--- gopls/doc/release/v0.18.0.md | 18 +++++++-- .../analysis/gofix/{ => cmd/gofix}/main.go | 5 +-- gopls/internal/analysis/gofix/doc.go | 4 ++ gopls/internal/analysis/modernize/doc.go | 9 +++++ gopls/internal/analysis/unusedfunc/doc.go | 29 +++++++++++--- gopls/internal/doc/api.json | 8 ++-- 7 files changed, 88 insertions(+), 23 deletions(-) rename gopls/internal/analysis/gofix/{ => cmd/gofix}/main.go (83%) diff --git a/gopls/doc/analyzers.md b/gopls/doc/analyzers.md index 68465f9809d..dde95591718 100644 --- a/gopls/doc/analyzers.md +++ b/gopls/doc/analyzers.md @@ -500,6 +500,15 @@ existing code by using more modern features of Go, such as: - replacing Split in "for range strings.Split(...)" by go1.24's more efficient SplitSeq; +To apply all modernization fixes en masse, you can use the +following command: + + $ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -test ./... + +If the tool warns of conflicting fixes, you may need to run it more +than once until it has applied all fixes cleanly. This command is +not an officially supported interface and may change in the future. + Default: on. Package documentation: [modernize](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/modernize) @@ -962,12 +971,29 @@ A method is considered unused if it is unexported, not referenced that of any method of an interface type declared within the same package. -The tool may report a false positive for a declaration of an -unexported function that is referenced from another package using -the go:linkname mechanism, if the declaration's doc comment does -not also have a go:linkname comment. (Such code is in any case -strongly discouraged: linkname annotations, if they must be used at -all, should be used on both the declaration and the alias.) +The tool may report false positives in some situations, for +example: + + - For a declaration of an unexported function that is referenced + from another package using the go:linkname mechanism, if the + declaration's doc comment does not also have a go:linkname + comment. + + (Such code is in any case strongly discouraged: linkname + annotations, if they must be used at all, should be used on both + the declaration and the alias.) + + - For compiler intrinsics in the "runtime" package that, though + never referenced, are known to the compiler and are called + indirectly by compiled object code. + + - For functions called only from assembly. + + - For functions called only from files whose build tags are not + selected in the current build configuration. + +See https://github.com/golang/go/issues/71686 for discussion of +these limitations. The unusedfunc algorithm is not as precise as the golang.org/x/tools/cmd/deadcode tool, but it has the advantage that diff --git a/gopls/doc/release/v0.18.0.md b/gopls/doc/release/v0.18.0.md index 8d641a2104f..ba2c0184307 100644 --- a/gopls/doc/release/v0.18.0.md +++ b/gopls/doc/release/v0.18.0.md @@ -37,16 +37,22 @@ details to be reported as diagnostics. For example, it indicates which variables escape to the heap, and which array accesses require bounds checks. +TODO: add links to the complete manual for each item. + ## New `modernize` analyzer Gopls now reports when code could be simplified or clarified by using more modern features of Go, and provides a quick fix to apply the change. -Examples: +For example, a conditional assignment using an if/else statement may +be replaced by a call to the `min` or `max` built-in functions added +in Go 1.18. -- replacement of conditional assignment using an if/else statement by - a call to the `min` or `max` built-in functions added in Go 1.18; +Use this command to apply modernization fixes en masse: +``` +$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -test ./... +``` ## New `unusedfunc` analyzer @@ -97,6 +103,12 @@ const Ptr = Pointer ``` gopls will suggest replacing `Ptr` in your code with `Pointer`. +Use this command to apply such fixes en masse: + +``` +$ go run golang.org/x/tools/gopls/internal/analysis/gofix/cmd/gofix@latest -test ./... +``` + ## "Implementations" supports generics At long last, the "Go to Implementations" feature now fully supports diff --git a/gopls/internal/analysis/gofix/main.go b/gopls/internal/analysis/gofix/cmd/gofix/main.go similarity index 83% rename from gopls/internal/analysis/gofix/main.go rename to gopls/internal/analysis/gofix/cmd/gofix/main.go index fde633f2f62..d75978f6e59 100644 --- a/gopls/internal/analysis/gofix/main.go +++ b/gopls/internal/analysis/gofix/cmd/gofix/main.go @@ -1,10 +1,7 @@ -// Copyright 2023 The Go Authors. All rights reserved. +// Copyright 2025 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build ignore -// +build ignore - // The inline command applies the inliner to the specified packages of // Go source code. Run with: // diff --git a/gopls/internal/analysis/gofix/doc.go b/gopls/internal/analysis/gofix/doc.go index a0c6a08ded9..ad8b067daa4 100644 --- a/gopls/internal/analysis/gofix/doc.go +++ b/gopls/internal/analysis/gofix/doc.go @@ -77,5 +77,9 @@ or before a group, applying to every constant in the group: ) The proposal https://go.dev/issue/32816 introduces the "//go:fix" directives. + +You can use this (officially unsupported) command to apply gofix fixes en masse: + + $ go run golang.org/x/tools/gopls/internal/analysis/gofix/cmd/gofix@latest -test ./... */ package gofix diff --git a/gopls/internal/analysis/modernize/doc.go b/gopls/internal/analysis/modernize/doc.go index 15aeab64d8d..3759fdb10c5 100644 --- a/gopls/internal/analysis/modernize/doc.go +++ b/gopls/internal/analysis/modernize/doc.go @@ -32,4 +32,13 @@ // for i := range n {}, added in go1.22; // - replacing Split in "for range strings.Split(...)" by go1.24's // more efficient SplitSeq; +// +// To apply all modernization fixes en masse, you can use the +// following command: +// +// $ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -test ./... +// +// If the tool warns of conflicting fixes, you may need to run it more +// than once until it has applied all fixes cleanly. This command is +// not an officially supported interface and may change in the future. package modernize diff --git a/gopls/internal/analysis/unusedfunc/doc.go b/gopls/internal/analysis/unusedfunc/doc.go index 5946ed897bb..9e2fc8145c8 100644 --- a/gopls/internal/analysis/unusedfunc/doc.go +++ b/gopls/internal/analysis/unusedfunc/doc.go @@ -20,12 +20,29 @@ // that of any method of an interface type declared within the same // package. // -// The tool may report a false positive for a declaration of an -// unexported function that is referenced from another package using -// the go:linkname mechanism, if the declaration's doc comment does -// not also have a go:linkname comment. (Such code is in any case -// strongly discouraged: linkname annotations, if they must be used at -// all, should be used on both the declaration and the alias.) +// The tool may report false positives in some situations, for +// example: +// +// - For a declaration of an unexported function that is referenced +// from another package using the go:linkname mechanism, if the +// declaration's doc comment does not also have a go:linkname +// comment. +// +// (Such code is in any case strongly discouraged: linkname +// annotations, if they must be used at all, should be used on both +// the declaration and the alias.) +// +// - For compiler intrinsics in the "runtime" package that, though +// never referenced, are known to the compiler and are called +// indirectly by compiled object code. +// +// - For functions called only from assembly. +// +// - For functions called only from files whose build tags are not +// selected in the current build configuration. +// +// See https://github.com/golang/go/issues/71686 for discussion of +// these limitations. // // The unusedfunc algorithm is not as precise as the // golang.org/x/tools/cmd/deadcode tool, but it has the advantage that diff --git a/gopls/internal/doc/api.json b/gopls/internal/doc/api.json index 8f101079a9c..629e45ff766 100644 --- a/gopls/internal/doc/api.json +++ b/gopls/internal/doc/api.json @@ -510,7 +510,7 @@ }, { "Name": "\"modernize\"", - "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go, such as:\n\n - replacing an if/else conditional assignment by a call to the\n built-in min or max functions added in go1.21;\n - replacing sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21;\n - replacing interface{} by the 'any' type added in go1.18;\n - replacing append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21;\n - replacing a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions\n from the maps package, added in go1.21;\n - replacing []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19;\n - replacing uses of context.WithCancel in tests with t.Context, added in\n go1.24;\n - replacing omitempty by omitzero on structs, added in go1.24;\n - replacing append(s[:i], s[i+1]...) by slices.Delete(s, i, i+1),\n added in go1.21\n - replacing a 3-clause for i := 0; i \u003c n; i++ {} loop by\n for i := range n {}, added in go1.22;\n - replacing Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq;", + "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go, such as:\n\n - replacing an if/else conditional assignment by a call to the\n built-in min or max functions added in go1.21;\n - replacing sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21;\n - replacing interface{} by the 'any' type added in go1.18;\n - replacing append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21;\n - replacing a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions\n from the maps package, added in go1.21;\n - replacing []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19;\n - replacing uses of context.WithCancel in tests with t.Context, added in\n go1.24;\n - replacing omitempty by omitzero on structs, added in go1.24;\n - replacing append(s[:i], s[i+1]...) by slices.Delete(s, i, i+1),\n added in go1.21\n - replacing a 3-clause for i := 0; i \u003c n; i++ {} loop by\n for i := range n {}, added in go1.22;\n - replacing Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq;\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -test ./...\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.", "Default": "true" }, { @@ -630,7 +630,7 @@ }, { "Name": "\"unusedfunc\"", - "Doc": "check for unused functions and methods\n\nThe unusedfunc analyzer reports functions and methods that are\nnever referenced outside of their own declaration.\n\nA function is considered unused if it is unexported and not\nreferenced (except within its own declaration).\n\nA method is considered unused if it is unexported, not referenced\n(except within its own declaration), and its name does not match\nthat of any method of an interface type declared within the same\npackage.\n\nThe tool may report a false positive for a declaration of an\nunexported function that is referenced from another package using\nthe go:linkname mechanism, if the declaration's doc comment does\nnot also have a go:linkname comment. (Such code is in any case\nstrongly discouraged: linkname annotations, if they must be used at\nall, should be used on both the declaration and the alias.)\n\nThe unusedfunc algorithm is not as precise as the\ngolang.org/x/tools/cmd/deadcode tool, but it has the advantage that\nit runs within the modular analysis framework, enabling near\nreal-time feedback within gopls.", + "Doc": "check for unused functions and methods\n\nThe unusedfunc analyzer reports functions and methods that are\nnever referenced outside of their own declaration.\n\nA function is considered unused if it is unexported and not\nreferenced (except within its own declaration).\n\nA method is considered unused if it is unexported, not referenced\n(except within its own declaration), and its name does not match\nthat of any method of an interface type declared within the same\npackage.\n\nThe tool may report false positives in some situations, for\nexample:\n\n - For a declaration of an unexported function that is referenced\n from another package using the go:linkname mechanism, if the\n declaration's doc comment does not also have a go:linkname\n comment.\n\n (Such code is in any case strongly discouraged: linkname\n annotations, if they must be used at all, should be used on both\n the declaration and the alias.)\n\n - For compiler intrinsics in the \"runtime\" package that, though\n never referenced, are known to the compiler and are called\n indirectly by compiled object code.\n\n - For functions called only from assembly.\n\n - For functions called only from files whose build tags are not\n selected in the current build configuration.\n\nSee https://github.com/golang/go/issues/71686 for discussion of\nthese limitations.\n\nThe unusedfunc algorithm is not as precise as the\ngolang.org/x/tools/cmd/deadcode tool, but it has the advantage that\nit runs within the modular analysis framework, enabling near\nreal-time feedback within gopls.", "Default": "true" }, { @@ -1189,7 +1189,7 @@ }, { "Name": "modernize", - "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go, such as:\n\n - replacing an if/else conditional assignment by a call to the\n built-in min or max functions added in go1.21;\n - replacing sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21;\n - replacing interface{} by the 'any' type added in go1.18;\n - replacing append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21;\n - replacing a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions\n from the maps package, added in go1.21;\n - replacing []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19;\n - replacing uses of context.WithCancel in tests with t.Context, added in\n go1.24;\n - replacing omitempty by omitzero on structs, added in go1.24;\n - replacing append(s[:i], s[i+1]...) by slices.Delete(s, i, i+1),\n added in go1.21\n - replacing a 3-clause for i := 0; i \u003c n; i++ {} loop by\n for i := range n {}, added in go1.22;\n - replacing Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq;", + "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go, such as:\n\n - replacing an if/else conditional assignment by a call to the\n built-in min or max functions added in go1.21;\n - replacing sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21;\n - replacing interface{} by the 'any' type added in go1.18;\n - replacing append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21;\n - replacing a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions\n from the maps package, added in go1.21;\n - replacing []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19;\n - replacing uses of context.WithCancel in tests with t.Context, added in\n go1.24;\n - replacing omitempty by omitzero on structs, added in go1.24;\n - replacing append(s[:i], s[i+1]...) by slices.Delete(s, i, i+1),\n added in go1.21\n - replacing a 3-clause for i := 0; i \u003c n; i++ {} loop by\n for i := range n {}, added in go1.22;\n - replacing Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq;\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -test ./...\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.", "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/modernize", "Default": true }, @@ -1333,7 +1333,7 @@ }, { "Name": "unusedfunc", - "Doc": "check for unused functions and methods\n\nThe unusedfunc analyzer reports functions and methods that are\nnever referenced outside of their own declaration.\n\nA function is considered unused if it is unexported and not\nreferenced (except within its own declaration).\n\nA method is considered unused if it is unexported, not referenced\n(except within its own declaration), and its name does not match\nthat of any method of an interface type declared within the same\npackage.\n\nThe tool may report a false positive for a declaration of an\nunexported function that is referenced from another package using\nthe go:linkname mechanism, if the declaration's doc comment does\nnot also have a go:linkname comment. (Such code is in any case\nstrongly discouraged: linkname annotations, if they must be used at\nall, should be used on both the declaration and the alias.)\n\nThe unusedfunc algorithm is not as precise as the\ngolang.org/x/tools/cmd/deadcode tool, but it has the advantage that\nit runs within the modular analysis framework, enabling near\nreal-time feedback within gopls.", + "Doc": "check for unused functions and methods\n\nThe unusedfunc analyzer reports functions and methods that are\nnever referenced outside of their own declaration.\n\nA function is considered unused if it is unexported and not\nreferenced (except within its own declaration).\n\nA method is considered unused if it is unexported, not referenced\n(except within its own declaration), and its name does not match\nthat of any method of an interface type declared within the same\npackage.\n\nThe tool may report false positives in some situations, for\nexample:\n\n - For a declaration of an unexported function that is referenced\n from another package using the go:linkname mechanism, if the\n declaration's doc comment does not also have a go:linkname\n comment.\n\n (Such code is in any case strongly discouraged: linkname\n annotations, if they must be used at all, should be used on both\n the declaration and the alias.)\n\n - For compiler intrinsics in the \"runtime\" package that, though\n never referenced, are known to the compiler and are called\n indirectly by compiled object code.\n\n - For functions called only from assembly.\n\n - For functions called only from files whose build tags are not\n selected in the current build configuration.\n\nSee https://github.com/golang/go/issues/71686 for discussion of\nthese limitations.\n\nThe unusedfunc algorithm is not as precise as the\ngolang.org/x/tools/cmd/deadcode tool, but it has the advantage that\nit runs within the modular analysis framework, enabling near\nreal-time feedback within gopls.", "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/unusedfunc", "Default": true }, From 8807101233fe3a3ccf31dca77298fe538436ee20 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Thu, 13 Feb 2025 17:48:26 -0500 Subject: [PATCH 023/270] gopls/internal/analysis/gofix: one function per pass Split the two passes into two separate functions. Declare a type to hold common state. No behavior changes. For golang/go#32816. Change-Id: I571956859b12687d824f36c80f75deb96db38d92 Reviewed-on: https://go-review.googlesource.com/c/tools/+/649476 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- gopls/internal/analysis/gofix/gofix.go | 224 +++++++++++++------------ 1 file changed, 120 insertions(+), 104 deletions(-) diff --git a/gopls/internal/analysis/gofix/gofix.go b/gopls/internal/analysis/gofix/gofix.go index 147399d315d..35d21c0e05a 100644 --- a/gopls/internal/analysis/gofix/gofix.go +++ b/gopls/internal/analysis/gofix/gofix.go @@ -37,63 +37,60 @@ var Analyzer = &analysis.Analyzer{ Requires: []*analysis.Analyzer{inspect.Analyzer}, } -func run(pass *analysis.Pass) (any, error) { - // Memoize repeated calls for same file. - fileContent := make(map[string][]byte) - readFile := func(node ast.Node) ([]byte, error) { - filename := pass.Fset.File(node.Pos()).Name() - content, ok := fileContent[filename] - if !ok { - var err error - content, err = pass.ReadFile(filename) - if err != nil { - return nil, err - } - fileContent[filename] = content - } - return content, nil - } +// analyzer holds the state for this analysis. +type analyzer struct { + pass *analysis.Pass + root cursor.Cursor + // memoization of repeated calls for same file. + fileContent map[string][]byte + // memoization of fact imports (nil => no fact) + inlinableFuncs map[*types.Func]*inline.Callee + inlinableConsts map[*types.Const]*goFixInlineConstFact + inlinableAliases map[*types.TypeName]*goFixInlineAliasFact +} - // Return the unique ast.File for a cursor. - currentFile := func(c cursor.Cursor) *ast.File { - cf, _ := moreiters.First(c.Ancestors((*ast.File)(nil))) - return cf.Node().(*ast.File) +func run(pass *analysis.Pass) (any, error) { + a := &analyzer{ + pass: pass, + root: cursor.Root(pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)), + fileContent: make(map[string][]byte), + inlinableFuncs: make(map[*types.Func]*inline.Callee), + inlinableConsts: make(map[*types.Const]*goFixInlineConstFact), + inlinableAliases: make(map[*types.TypeName]*goFixInlineAliasFact), } + a.find() + a.inline() + return nil, nil +} - // Pass 1: find functions and constants annotated with an appropriate "//go:fix" - // comment (the syntax proposed by #32816), - // and export a fact for each one. - var ( - inlinableFuncs = make(map[*types.Func]*inline.Callee) // memoization of fact import (nil => no fact) - inlinableConsts = make(map[*types.Const]*goFixInlineConstFact) - inlinableAliases = make(map[*types.TypeName]*goFixInlineAliasFact) - ) - - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - nodeFilter := []ast.Node{(*ast.FuncDecl)(nil), (*ast.GenDecl)(nil)} - inspect.Preorder(nodeFilter, func(n ast.Node) { - switch decl := n.(type) { +// find finds functions and constants annotated with an appropriate "//go:fix" +// comment (the syntax proposed by #32816), +// and exports a fact for each one. +func (a *analyzer) find() { + info := a.pass.TypesInfo + for cur := range a.root.Preorder((*ast.FuncDecl)(nil), (*ast.GenDecl)(nil)) { + switch decl := cur.Node().(type) { case *ast.FuncDecl: if !hasFixInline(decl.Doc) { - return + continue } - content, err := readFile(decl) + content, err := a.readFile(decl) if err != nil { - pass.Reportf(decl.Doc.Pos(), "invalid inlining candidate: cannot read source file: %v", err) - return + a.pass.Reportf(decl.Doc.Pos(), "invalid inlining candidate: cannot read source file: %v", err) + continue } - callee, err := inline.AnalyzeCallee(discard, pass.Fset, pass.Pkg, pass.TypesInfo, decl, content) + callee, err := inline.AnalyzeCallee(discard, a.pass.Fset, a.pass.Pkg, info, decl, content) if err != nil { - pass.Reportf(decl.Doc.Pos(), "invalid inlining candidate: %v", err) - return + a.pass.Reportf(decl.Doc.Pos(), "invalid inlining candidate: %v", err) + continue } - fn := pass.TypesInfo.Defs[decl.Name].(*types.Func) - pass.ExportObjectFact(fn, &goFixInlineFuncFact{callee}) - inlinableFuncs[fn] = callee + fn := info.Defs[decl.Name].(*types.Func) + a.pass.ExportObjectFact(fn, &goFixInlineFuncFact{callee}) + a.inlinableFuncs[fn] = callee case *ast.GenDecl: if decl.Tok != token.CONST && decl.Tok != token.TYPE { - return + continue } declInline := hasFixInline(decl.Doc) // Accept inline directives on the entire decl as well as individual specs. @@ -104,7 +101,7 @@ func run(pass *analysis.Pass) (any, error) { continue } if !spec.Assign.IsValid() { - pass.Reportf(spec.Pos(), "invalid //go:fix inline directive: not a type alias") + a.pass.Reportf(spec.Pos(), "invalid //go:fix inline directive: not a type alias") continue } if spec.TypeParams != nil { @@ -122,23 +119,23 @@ func run(pass *analysis.Pass) (any, error) { default: continue } - lhs := pass.TypesInfo.Defs[spec.Name].(*types.TypeName) + lhs := info.Defs[spec.Name].(*types.TypeName) // more (jba): test one alias pointing to another alias - rhs := pass.TypesInfo.Uses[rhsID].(*types.TypeName) + rhs := info.Uses[rhsID].(*types.TypeName) typ := &goFixInlineAliasFact{ RHSName: rhs.Name(), RHSPkgName: rhs.Pkg().Name(), RHSPkgPath: rhs.Pkg().Path(), } - if rhs.Pkg() == pass.Pkg { + if rhs.Pkg() == a.pass.Pkg { typ.rhsObj = rhs } - inlinableAliases[lhs] = typ + a.inlinableAliases[lhs] = typ // Create a fact only if the LHS is exported and defined at top level. // We create a fact even if the RHS is non-exported, // so we can warn about uses in other packages. if lhs.Exported() && typesinternal.IsPackageLevel(lhs) { - pass.ExportObjectFact(lhs, typ) + a.pass.ExportObjectFact(lhs, typ) } case *ast.ValueSpec: // Tok == CONST @@ -154,58 +151,65 @@ func run(pass *analysis.Pass) (any, error) { switch e := val.(type) { case *ast.Ident: // Constants defined with the predeclared iota cannot be inlined. - if pass.TypesInfo.Uses[e] == builtinIota { - pass.Reportf(val.Pos(), "invalid //go:fix inline directive: const value is iota") + if info.Uses[e] == builtinIota { + a.pass.Reportf(val.Pos(), "invalid //go:fix inline directive: const value is iota") continue } rhsID = e case *ast.SelectorExpr: rhsID = e.Sel default: - pass.Reportf(val.Pos(), "invalid //go:fix inline directive: const value is not the name of another constant") + a.pass.Reportf(val.Pos(), "invalid //go:fix inline directive: const value is not the name of another constant") continue } - lhs := pass.TypesInfo.Defs[name].(*types.Const) - rhs := pass.TypesInfo.Uses[rhsID].(*types.Const) // must be so in a well-typed program + lhs := info.Defs[name].(*types.Const) + rhs := info.Uses[rhsID].(*types.Const) // must be so in a well-typed program con := &goFixInlineConstFact{ RHSName: rhs.Name(), RHSPkgName: rhs.Pkg().Name(), RHSPkgPath: rhs.Pkg().Path(), } - if rhs.Pkg() == pass.Pkg { + if rhs.Pkg() == a.pass.Pkg { con.rhsObj = rhs } - inlinableConsts[lhs] = con + a.inlinableConsts[lhs] = con // Create a fact only if the LHS is exported and defined at top level. // We create a fact even if the RHS is non-exported, // so we can warn about uses in other packages. if lhs.Exported() && typesinternal.IsPackageLevel(lhs) { - pass.ExportObjectFact(lhs, con) + a.pass.ExportObjectFact(lhs, con) } } } } } } - }) + } +} + +// inline inlines each static call to an inlinable function +// and each reference to an inlinable constant or type alias. +// +// TODO(adonovan): handle multiple diffs that each add the same import. +func (a *analyzer) inline() { + // Return the unique ast.File for a cursor. + currentFile := func(c cursor.Cursor) *ast.File { + cf, _ := moreiters.First(c.Ancestors((*ast.File)(nil))) + return cf.Node().(*ast.File) + } - // Pass 2. Inline each static call to an inlinable function - // and each reference to an inlinable constant or type alias. - // - // TODO(adonovan): handle multiple diffs that each add the same import. - for cur := range cursor.Root(inspect).Preorder((*ast.CallExpr)(nil), (*ast.Ident)(nil)) { - n := cur.Node() - switch n := n.(type) { + for cur := range a.root.Preorder((*ast.CallExpr)(nil), (*ast.Ident)(nil)) { + switch n := cur.Node().(type) { case *ast.CallExpr: call := n - if fn := typeutil.StaticCallee(pass.TypesInfo, call); fn != nil { + if fn := typeutil.StaticCallee(a.pass.TypesInfo, call); fn != nil { // Inlinable? - callee, ok := inlinableFuncs[fn] + callee, ok := a.inlinableFuncs[fn] if !ok { var fact goFixInlineFuncFact - if pass.ImportObjectFact(fn, &fact) { + if a.pass.ImportObjectFact(fn, &fact) { callee = fact.Callee - inlinableFuncs[fn] = callee + a.inlinableFuncs[fn] = callee } } if callee == nil { @@ -213,23 +217,23 @@ func run(pass *analysis.Pass) (any, error) { } // Inline the call. - content, err := readFile(call) + content, err := a.readFile(call) if err != nil { - pass.Reportf(call.Lparen, "invalid inlining candidate: cannot read source file: %v", err) + a.pass.Reportf(call.Lparen, "invalid inlining candidate: cannot read source file: %v", err) continue } curFile := currentFile(cur) caller := &inline.Caller{ - Fset: pass.Fset, - Types: pass.Pkg, - Info: pass.TypesInfo, + Fset: a.pass.Fset, + Types: a.pass.Pkg, + Info: a.pass.TypesInfo, File: curFile, Call: call, Content: content, } res, err := inline.Inline(caller, callee, &inline.Options{Logf: discard}) if err != nil { - pass.Reportf(call.Lparen, "%v", err) + a.pass.Reportf(call.Lparen, "%v", err) continue } if res.Literalized { @@ -253,7 +257,7 @@ func run(pass *analysis.Pass) (any, error) { NewText: []byte(edit.New), }) } - pass.Report(analysis.Diagnostic{ + a.pass.Report(analysis.Diagnostic{ Pos: call.Pos(), End: call.End(), Message: fmt.Sprintf("Call of %v should be inlined", callee), @@ -268,13 +272,13 @@ func run(pass *analysis.Pass) (any, error) { // If the identifier is a use of an inlinable type alias, suggest inlining it. // TODO(jba): much of this code is shared with the constant case, below. // Try to factor more of it out, unless it will change anyway when we move beyond simple RHS's. - if ali, ok := pass.TypesInfo.Uses[n].(*types.TypeName); ok { - inalias, ok := inlinableAliases[ali] + if ali, ok := a.pass.TypesInfo.Uses[n].(*types.TypeName); ok { + inalias, ok := a.inlinableAliases[ali] if !ok { var fact goFixInlineAliasFact - if pass.ImportObjectFact(ali, &fact) { + if a.pass.ImportObjectFact(ali, &fact) { inalias = &fact - inlinableAliases[ali] = inalias + a.inlinableAliases[ali] = inalias } } if inalias == nil { @@ -291,10 +295,10 @@ func run(pass *analysis.Pass) (any, error) { // If the RHS is not in the current package, AddImport will handle // shadowing, so we only need to worry about when both expressions // are in the current package. - if pass.Pkg.Path() == inalias.RHSPkgPath { + if a.pass.Pkg.Path() == inalias.RHSPkgPath { // fcon.rhsObj is the object referred to by B in the definition of A. - scope := pass.TypesInfo.Scopes[curFile].Innermost(n.Pos()) // n's scope - _, obj := scope.LookupParent(inalias.RHSName, n.Pos()) // what "B" means in n's scope + scope := a.pass.TypesInfo.Scopes[curFile].Innermost(n.Pos()) // n's scope + _, obj := scope.LookupParent(inalias.RHSName, n.Pos()) // what "B" means in n's scope if obj == nil { // Should be impossible: if code at n can refer to the LHS, // it can refer to the RHS. @@ -304,7 +308,7 @@ func run(pass *analysis.Pass) (any, error) { // "B" means something different here than at the inlinable const's scope. continue } - } else if !analysisinternal.CanImport(pass.Pkg.Path(), inalias.RHSPkgPath) { + } else if !analysisinternal.CanImport(a.pass.Pkg.Path(), inalias.RHSPkgPath) { // If this package can't see the RHS's package, we can't inline. continue } @@ -312,26 +316,26 @@ func run(pass *analysis.Pass) (any, error) { importPrefix string edits []analysis.TextEdit ) - if inalias.RHSPkgPath != pass.Pkg.Path() { + if inalias.RHSPkgPath != a.pass.Pkg.Path() { _, importPrefix, edits = analysisinternal.AddImport( - pass.TypesInfo, curFile, inalias.RHSPkgName, inalias.RHSPkgPath, inalias.RHSName, n.Pos()) + a.pass.TypesInfo, curFile, inalias.RHSPkgName, inalias.RHSPkgPath, inalias.RHSName, n.Pos()) } // If n is qualified by a package identifier, we'll need the full selector expression. var expr ast.Expr = n if e, _ := cur.Edge(); e == edge.SelectorExpr_Sel { expr = cur.Parent().Node().(ast.Expr) } - reportInline(pass, "type alias", "Type alias", expr, edits, importPrefix+inalias.RHSName) + a.reportInline("type alias", "Type alias", expr, edits, importPrefix+inalias.RHSName) continue } // If the identifier is a use of an inlinable constant, suggest inlining it. - if con, ok := pass.TypesInfo.Uses[n].(*types.Const); ok { - incon, ok := inlinableConsts[con] + if con, ok := a.pass.TypesInfo.Uses[n].(*types.Const); ok { + incon, ok := a.inlinableConsts[con] if !ok { var fact goFixInlineConstFact - if pass.ImportObjectFact(con, &fact) { + if a.pass.ImportObjectFact(con, &fact) { incon = &fact - inlinableConsts[con] = incon + a.inlinableConsts[con] = incon } } if incon == nil { @@ -350,10 +354,10 @@ func run(pass *analysis.Pass) (any, error) { // If the RHS is not in the current package, AddImport will handle // shadowing, so we only need to worry about when both expressions // are in the current package. - if pass.Pkg.Path() == incon.RHSPkgPath { + if a.pass.Pkg.Path() == incon.RHSPkgPath { // incon.rhsObj is the object referred to by B in the definition of A. - scope := pass.TypesInfo.Scopes[curFile].Innermost(n.Pos()) // n's scope - _, obj := scope.LookupParent(incon.RHSName, n.Pos()) // what "B" means in n's scope + scope := a.pass.TypesInfo.Scopes[curFile].Innermost(n.Pos()) // n's scope + _, obj := scope.LookupParent(incon.RHSName, n.Pos()) // what "B" means in n's scope if obj == nil { // Should be impossible: if code at n can refer to the LHS, // it can refer to the RHS. @@ -363,7 +367,7 @@ func run(pass *analysis.Pass) (any, error) { // "B" means something different here than at the inlinable const's scope. continue } - } else if !analysisinternal.CanImport(pass.Pkg.Path(), incon.RHSPkgPath) { + } else if !analysisinternal.CanImport(a.pass.Pkg.Path(), incon.RHSPkgPath) { // If this package can't see the RHS's package, we can't inline. continue } @@ -371,32 +375,30 @@ func run(pass *analysis.Pass) (any, error) { importPrefix string edits []analysis.TextEdit ) - if incon.RHSPkgPath != pass.Pkg.Path() { + if incon.RHSPkgPath != a.pass.Pkg.Path() { _, importPrefix, edits = analysisinternal.AddImport( - pass.TypesInfo, curFile, incon.RHSPkgName, incon.RHSPkgPath, incon.RHSName, n.Pos()) + a.pass.TypesInfo, curFile, incon.RHSPkgName, incon.RHSPkgPath, incon.RHSName, n.Pos()) } // If n is qualified by a package identifier, we'll need the full selector expression. var expr ast.Expr = n if e, _ := cur.Edge(); e == edge.SelectorExpr_Sel { expr = cur.Parent().Node().(ast.Expr) } - reportInline(pass, "constant", "Constant", expr, edits, importPrefix+incon.RHSName) + a.reportInline("constant", "Constant", expr, edits, importPrefix+incon.RHSName) } } } - - return nil, nil } // reportInline reports a diagnostic for fixing an inlinable name. -func reportInline(pass *analysis.Pass, kind, capKind string, ident ast.Expr, edits []analysis.TextEdit, newText string) { +func (a *analyzer) reportInline(kind, capKind string, ident ast.Expr, edits []analysis.TextEdit, newText string) { edits = append(edits, analysis.TextEdit{ Pos: ident.Pos(), End: ident.End(), NewText: []byte(newText), }) - name := analysisinternal.Format(pass.Fset, ident) - pass.Report(analysis.Diagnostic{ + name := analysisinternal.Format(a.pass.Fset, ident) + a.pass.Report(analysis.Diagnostic{ Pos: ident.Pos(), End: ident.End(), Message: fmt.Sprintf("%s %s should be inlined", capKind, name), @@ -407,6 +409,20 @@ func reportInline(pass *analysis.Pass, kind, capKind string, ident ast.Expr, edi }) } +func (a *analyzer) readFile(node ast.Node) ([]byte, error) { + filename := a.pass.Fset.File(node.Pos()).Name() + content, ok := a.fileContent[filename] + if !ok { + var err error + content, err = a.pass.ReadFile(filename) + if err != nil { + return nil, err + } + a.fileContent[filename] = content + } + return content, nil +} + // hasFixInline reports the presence of a "//go:fix inline" directive // in the comments. func hasFixInline(cg *ast.CommentGroup) bool { From 2880aae7521d3d90acfcef88c2efd17fc8f10369 Mon Sep 17 00:00:00 2001 From: Peter Weinberger Date: Thu, 13 Feb 2025 17:16:56 -0500 Subject: [PATCH 024/270] gopls/internal/protocol: Avoid omitempty for integer fields The existing code adds the json tag 'omitempty' to optional protocol fields, but for some integer-valued fields 0 and empty have different semantics. This CL changes the behavior so that optional integer-valued fields are not omitted if they are zero. FIxes golang.go/go#71489 Change-Id: I1f2cd6c6b0d6b7495adb1d8bc0b404ee9ea895f5 Reviewed-on: https://go-review.googlesource.com/c/tools/+/649455 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- gopls/internal/protocol/generate/generate.go | 35 +++++++++++--------- gopls/internal/protocol/generate/output.go | 13 ++++++-- gopls/internal/protocol/tsprotocol.go | 22 ++++++------ 3 files changed, 41 insertions(+), 29 deletions(-) diff --git a/gopls/internal/protocol/generate/generate.go b/gopls/internal/protocol/generate/generate.go index 2bb14790940..9c7009113ab 100644 --- a/gopls/internal/protocol/generate/generate.go +++ b/gopls/internal/protocol/generate/generate.go @@ -54,39 +54,44 @@ func generateDoc(out *bytes.Buffer, doc string) { // decide if a property is optional, and if it needs a * // return ",omitempty" if it is optional, and "*" if it needs a pointer -func propStar(name string, t NameType, gotype string) (string, string) { - var opt, star string +func propStar(name string, t NameType, gotype string) (omitempty, indirect bool) { if t.Optional { - star = "*" - opt = ",omitempty" + switch gotype { + case "uint32", "int32": + // in FoldingRange.endLine, 0 and empty have different semantics + // There seem to be no other cases. + default: + indirect = true + omitempty = true + } } if strings.HasPrefix(gotype, "[]") || strings.HasPrefix(gotype, "map[") { - star = "" // passed by reference, so no need for * + indirect = false // passed by reference, so no need for * } else { switch gotype { - case "bool", "uint32", "int32", "string", "interface{}", "any": - star = "" // gopls compatibility if t.Optional + case "bool", "string", "interface{}", "any": + indirect = false // gopls compatibility if t.Optional } } - ostar, oopt := star, opt + oind, oomit := indirect, omitempty if newStar, ok := goplsStar[prop{name, t.Name}]; ok { switch newStar { case nothing: - star, opt = "", "" + indirect, omitempty = false, false case wantStar: - star, opt = "*", "" + indirect, omitempty = false, false case wantOpt: - star, opt = "", ",omitempty" + indirect, omitempty = false, true case wantOptStar: - star, opt = "*", ",omitempty" + indirect, omitempty = true, true } - if star == ostar && opt == oopt { // no change - log.Printf("goplsStar[ {%q, %q} ](%d) useless %s/%s %s/%s", name, t.Name, t.Line, ostar, star, oopt, opt) + if indirect == oind && omitempty == oomit { // no change + log.Printf("goplsStar[ {%q, %q} ](%d) useless %v/%v %v/%v", name, t.Name, t.Line, oind, indirect, oomit, omitempty) } usedGoplsStar[prop{name, t.Name}] = true } - return opt, star + return } func goName(s string) string { diff --git a/gopls/internal/protocol/generate/output.go b/gopls/internal/protocol/generate/output.go index ba9d0cb909f..5eaa0cba969 100644 --- a/gopls/internal/protocol/generate/output.go +++ b/gopls/internal/protocol/generate/output.go @@ -273,10 +273,17 @@ func genProps(out *bytes.Buffer, props []NameType, name string) { tp = newNm } // it's a pointer if it is optional, or for gopls compatibility - opt, star := propStar(name, p, tp) - json := fmt.Sprintf(" `json:\"%s%s\"`", p.Name, opt) + omit, star := propStar(name, p, tp) + json := fmt.Sprintf(" `json:\"%s\"`", p.Name) + if omit { + json = fmt.Sprintf(" `json:\"%s,omitempty\"`", p.Name) + } generateDoc(out, p.Documentation) - fmt.Fprintf(out, "\t%s %s%s %s\n", goName(p.Name), star, tp, json) + if star { + fmt.Fprintf(out, "\t%s *%s %s\n", goName(p.Name), tp, json) + } else { + fmt.Fprintf(out, "\t%s %s %s\n", goName(p.Name), tp, json) + } } } diff --git a/gopls/internal/protocol/tsprotocol.go b/gopls/internal/protocol/tsprotocol.go index 444e51e0717..7306f62a7ad 100644 --- a/gopls/internal/protocol/tsprotocol.go +++ b/gopls/internal/protocol/tsprotocol.go @@ -55,7 +55,7 @@ type ApplyWorkspaceEditResult struct { // Depending on the client's failure handling strategy `failedChange` might // contain the index of the change that failed. This property is only available // if the client signals a `failureHandlingStrategy` in its client capabilities. - FailedChange uint32 `json:"failedChange,omitempty"` + FailedChange uint32 `json:"failedChange"` } // A base for all symbol information. @@ -2377,12 +2377,12 @@ type FoldingRange struct { // To be valid, the end must be zero or larger and smaller than the number of lines in the document. StartLine uint32 `json:"startLine"` // The zero-based character offset from where the folded range starts. If not defined, defaults to the length of the start line. - StartCharacter uint32 `json:"startCharacter,omitempty"` + StartCharacter uint32 `json:"startCharacter"` // The zero-based end line of the range to fold. The folded area ends with the line's last character. // To be valid, the end must be zero or larger and smaller than the number of lines in the document. EndLine uint32 `json:"endLine"` // The zero-based character offset before the folded range ends. If not defined, defaults to the length of the end line. - EndCharacter uint32 `json:"endCharacter,omitempty"` + EndCharacter uint32 `json:"endCharacter"` // Describes the kind of the folding range such as 'comment' or 'region'. The kind // is used to categorize folding ranges and used by commands like 'Fold all comments'. // See {@link FoldingRangeKind} for an enumeration of standardized kinds. @@ -2405,7 +2405,7 @@ type FoldingRangeClientCapabilities struct { // The maximum number of folding ranges that the client prefers to receive // per document. The value serves as a hint, servers are free to follow the // limit. - RangeLimit uint32 `json:"rangeLimit,omitempty"` + RangeLimit uint32 `json:"rangeLimit"` // If set, the client signals that it only supports folding complete lines. // If set, client will ignore specified `startCharacter` and `endCharacter` // properties in a FoldingRange. @@ -4148,7 +4148,7 @@ type PublishDiagnosticsParams struct { // Optional the version number of the document the diagnostics are published for. // // @since 3.15.0 - Version int32 `json:"version,omitempty"` + Version int32 `json:"version"` // An array of diagnostic information items. Diagnostics []Diagnostic `json:"diagnostics"` } @@ -4907,7 +4907,7 @@ type SignatureHelp struct { // // In future version of the protocol this property might become // mandatory to better express this. - ActiveSignature uint32 `json:"activeSignature,omitempty"` + ActiveSignature uint32 `json:"activeSignature"` // The active parameter of the active signature. // // If `null`, no parameter of the signature is active (for example a named @@ -4924,7 +4924,7 @@ type SignatureHelp struct { // In future version of the protocol this property might become // mandatory (but still nullable) to better express the active parameter if // the active signature does have any. - ActiveParameter uint32 `json:"activeParameter,omitempty"` + ActiveParameter uint32 `json:"activeParameter"` } // Client Capabilities for a {@link SignatureHelpRequest}. @@ -5036,7 +5036,7 @@ type SignatureInformation struct { // `SignatureHelp.activeParameter`. // // @since 3.16.0 - ActiveParameter uint32 `json:"activeParameter,omitempty"` + ActiveParameter uint32 `json:"activeParameter"` } // An interactive text edit. @@ -5261,7 +5261,7 @@ type TextDocumentContentChangePartial struct { // The optional length of the range that got replaced. // // @deprecated use range instead. - RangeLength uint32 `json:"rangeLength,omitempty"` + RangeLength uint32 `json:"rangeLength"` // The new text for the provided range. Text string `json:"text"` } @@ -5764,7 +5764,7 @@ type WorkDoneProgressBegin struct { // // The value should be steadily rising. Clients are free to ignore values // that are not following this rule. The value range is [0, 100]. - Percentage uint32 `json:"percentage,omitempty"` + Percentage uint32 `json:"percentage"` } // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#workDoneProgressCancelParams @@ -5824,7 +5824,7 @@ type WorkDoneProgressReport struct { // // The value should be steadily rising. Clients are free to ignore values // that are not following this rule. The value range is [0, 100] - Percentage uint32 `json:"percentage,omitempty"` + Percentage uint32 `json:"percentage"` } // Workspace specific client capabilities. From 32ffaa3103522a0b05609e241d1b03b3b1abb9a6 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Fri, 14 Feb 2025 08:03:39 -0500 Subject: [PATCH 025/270] gopls/internal/analysis/gofix: one function per kind Refactor so that each kind of inlinable (function, type alias, constant) has its own function for each pass. Refactoring only; no behavior changes. For golang/go#32816. Change-Id: I2f09b4020bcf03409664cee3b8379417d8717fa6 Reviewed-on: https://go-review.googlesource.com/c/tools/+/649456 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- gopls/internal/analysis/gofix/gofix.go | 593 +++++++++++++------------ 1 file changed, 308 insertions(+), 285 deletions(-) diff --git a/gopls/internal/analysis/gofix/gofix.go b/gopls/internal/analysis/gofix/gofix.go index 35d21c0e05a..ffc64be755b 100644 --- a/gopls/internal/analysis/gofix/gofix.go +++ b/gopls/internal/analysis/gofix/gofix.go @@ -67,26 +67,10 @@ func run(pass *analysis.Pass) (any, error) { // comment (the syntax proposed by #32816), // and exports a fact for each one. func (a *analyzer) find() { - info := a.pass.TypesInfo for cur := range a.root.Preorder((*ast.FuncDecl)(nil), (*ast.GenDecl)(nil)) { switch decl := cur.Node().(type) { case *ast.FuncDecl: - if !hasFixInline(decl.Doc) { - continue - } - content, err := a.readFile(decl) - if err != nil { - a.pass.Reportf(decl.Doc.Pos(), "invalid inlining candidate: cannot read source file: %v", err) - continue - } - callee, err := inline.AnalyzeCallee(discard, a.pass.Fset, a.pass.Pkg, info, decl, content) - if err != nil { - a.pass.Reportf(decl.Doc.Pos(), "invalid inlining candidate: %v", err) - continue - } - fn := info.Defs[decl.Name].(*types.Func) - a.pass.ExportObjectFact(fn, &goFixInlineFuncFact{callee}) - a.inlinableFuncs[fn] = callee + a.findFunc(decl) case *ast.GenDecl: if decl.Tok != token.CONST && decl.Tok != token.TYPE { @@ -97,91 +81,119 @@ func (a *analyzer) find() { for _, spec := range decl.Specs { switch spec := spec.(type) { case *ast.TypeSpec: // Tok == TYPE - if !declInline && !hasFixInline(spec.Doc) { - continue - } - if !spec.Assign.IsValid() { - a.pass.Reportf(spec.Pos(), "invalid //go:fix inline directive: not a type alias") - continue - } - if spec.TypeParams != nil { - // TODO(jba): handle generic aliases - continue - } - // The alias must refer to another named type. - // TODO(jba): generalize to more type expressions. - var rhsID *ast.Ident - switch e := ast.Unparen(spec.Type).(type) { - case *ast.Ident: - rhsID = e - case *ast.SelectorExpr: - rhsID = e.Sel - default: - continue - } - lhs := info.Defs[spec.Name].(*types.TypeName) - // more (jba): test one alias pointing to another alias - rhs := info.Uses[rhsID].(*types.TypeName) - typ := &goFixInlineAliasFact{ - RHSName: rhs.Name(), - RHSPkgName: rhs.Pkg().Name(), - RHSPkgPath: rhs.Pkg().Path(), - } - if rhs.Pkg() == a.pass.Pkg { - typ.rhsObj = rhs - } - a.inlinableAliases[lhs] = typ - // Create a fact only if the LHS is exported and defined at top level. - // We create a fact even if the RHS is non-exported, - // so we can warn about uses in other packages. - if lhs.Exported() && typesinternal.IsPackageLevel(lhs) { - a.pass.ExportObjectFact(lhs, typ) - } + a.findAlias(spec, declInline) case *ast.ValueSpec: // Tok == CONST - specInline := hasFixInline(spec.Doc) - if declInline || specInline { - for i, name := range spec.Names { - if i >= len(spec.Values) { - // Possible following an iota. - break - } - val := spec.Values[i] - var rhsID *ast.Ident - switch e := val.(type) { - case *ast.Ident: - // Constants defined with the predeclared iota cannot be inlined. - if info.Uses[e] == builtinIota { - a.pass.Reportf(val.Pos(), "invalid //go:fix inline directive: const value is iota") - continue - } - rhsID = e - case *ast.SelectorExpr: - rhsID = e.Sel - default: - a.pass.Reportf(val.Pos(), "invalid //go:fix inline directive: const value is not the name of another constant") - continue - } - lhs := info.Defs[name].(*types.Const) - rhs := info.Uses[rhsID].(*types.Const) // must be so in a well-typed program - con := &goFixInlineConstFact{ - RHSName: rhs.Name(), - RHSPkgName: rhs.Pkg().Name(), - RHSPkgPath: rhs.Pkg().Path(), - } - if rhs.Pkg() == a.pass.Pkg { - con.rhsObj = rhs - } - a.inlinableConsts[lhs] = con - // Create a fact only if the LHS is exported and defined at top level. - // We create a fact even if the RHS is non-exported, - // so we can warn about uses in other packages. - if lhs.Exported() && typesinternal.IsPackageLevel(lhs) { - a.pass.ExportObjectFact(lhs, con) - } - } - } + a.findConst(spec, declInline) + } + } + } + } +} + +func (a *analyzer) findFunc(decl *ast.FuncDecl) { + if !hasFixInline(decl.Doc) { + return + } + content, err := a.readFile(decl) + if err != nil { + a.pass.Reportf(decl.Doc.Pos(), "invalid inlining candidate: cannot read source file: %v", err) + return + } + callee, err := inline.AnalyzeCallee(discard, a.pass.Fset, a.pass.Pkg, a.pass.TypesInfo, decl, content) + if err != nil { + a.pass.Reportf(decl.Doc.Pos(), "invalid inlining candidate: %v", err) + return + } + fn := a.pass.TypesInfo.Defs[decl.Name].(*types.Func) + a.pass.ExportObjectFact(fn, &goFixInlineFuncFact{callee}) + a.inlinableFuncs[fn] = callee +} + +func (a *analyzer) findAlias(spec *ast.TypeSpec, declInline bool) { + if !declInline && !hasFixInline(spec.Doc) { + return + } + if !spec.Assign.IsValid() { + a.pass.Reportf(spec.Pos(), "invalid //go:fix inline directive: not a type alias") + return + } + if spec.TypeParams != nil { + // TODO(jba): handle generic aliases + return + } + // The alias must refer to another named type. + // TODO(jba): generalize to more type expressions. + var rhsID *ast.Ident + switch e := ast.Unparen(spec.Type).(type) { + case *ast.Ident: + rhsID = e + case *ast.SelectorExpr: + rhsID = e.Sel + default: + return + } + lhs := a.pass.TypesInfo.Defs[spec.Name].(*types.TypeName) + // more (jba): test one alias pointing to another alias + rhs := a.pass.TypesInfo.Uses[rhsID].(*types.TypeName) + typ := &goFixInlineAliasFact{ + RHSName: rhs.Name(), + RHSPkgName: rhs.Pkg().Name(), + RHSPkgPath: rhs.Pkg().Path(), + } + if rhs.Pkg() == a.pass.Pkg { + typ.rhsObj = rhs + } + a.inlinableAliases[lhs] = typ + // Create a fact only if the LHS is exported and defined at top level. + // We create a fact even if the RHS is non-exported, + // so we can warn about uses in other packages. + if lhs.Exported() && typesinternal.IsPackageLevel(lhs) { + a.pass.ExportObjectFact(lhs, typ) + } +} + +func (a *analyzer) findConst(spec *ast.ValueSpec, declInline bool) { + info := a.pass.TypesInfo + specInline := hasFixInline(spec.Doc) + if declInline || specInline { + for i, name := range spec.Names { + if i >= len(spec.Values) { + // Possible following an iota. + break + } + val := spec.Values[i] + var rhsID *ast.Ident + switch e := val.(type) { + case *ast.Ident: + // Constants defined with the predeclared iota cannot be inlined. + if info.Uses[e] == builtinIota { + a.pass.Reportf(val.Pos(), "invalid //go:fix inline directive: const value is iota") + return } + rhsID = e + case *ast.SelectorExpr: + rhsID = e.Sel + default: + a.pass.Reportf(val.Pos(), "invalid //go:fix inline directive: const value is not the name of another constant") + return + } + lhs := info.Defs[name].(*types.Const) + rhs := info.Uses[rhsID].(*types.Const) // must be so in a well-typed program + con := &goFixInlineConstFact{ + RHSName: rhs.Name(), + RHSPkgName: rhs.Pkg().Name(), + RHSPkgPath: rhs.Pkg().Path(), + } + if rhs.Pkg() == a.pass.Pkg { + con.rhsObj = rhs + } + a.inlinableConsts[lhs] = con + // Create a fact only if the LHS is exported and defined at top level. + // We create a fact even if the RHS is non-exported, + // so we can warn about uses in other packages. + if lhs.Exported() && typesinternal.IsPackageLevel(lhs) { + a.pass.ExportObjectFact(lhs, con) } } } @@ -192,204 +204,209 @@ func (a *analyzer) find() { // // TODO(adonovan): handle multiple diffs that each add the same import. func (a *analyzer) inline() { - // Return the unique ast.File for a cursor. - currentFile := func(c cursor.Cursor) *ast.File { - cf, _ := moreiters.First(c.Ancestors((*ast.File)(nil))) - return cf.Node().(*ast.File) - } - for cur := range a.root.Preorder((*ast.CallExpr)(nil), (*ast.Ident)(nil)) { switch n := cur.Node().(type) { case *ast.CallExpr: - call := n - if fn := typeutil.StaticCallee(a.pass.TypesInfo, call); fn != nil { - // Inlinable? - callee, ok := a.inlinableFuncs[fn] - if !ok { - var fact goFixInlineFuncFact - if a.pass.ImportObjectFact(fn, &fact) { - callee = fact.Callee - a.inlinableFuncs[fn] = callee - } - } - if callee == nil { - continue // nope - } - - // Inline the call. - content, err := a.readFile(call) - if err != nil { - a.pass.Reportf(call.Lparen, "invalid inlining candidate: cannot read source file: %v", err) - continue - } - curFile := currentFile(cur) - caller := &inline.Caller{ - Fset: a.pass.Fset, - Types: a.pass.Pkg, - Info: a.pass.TypesInfo, - File: curFile, - Call: call, - Content: content, - } - res, err := inline.Inline(caller, callee, &inline.Options{Logf: discard}) - if err != nil { - a.pass.Reportf(call.Lparen, "%v", err) - continue - } - if res.Literalized { - // Users are not fond of inlinings that literalize - // f(x) to func() { ... }(), so avoid them. - // - // (Unfortunately the inliner is very timid, - // and often literalizes when it cannot prove that - // reducing the call is safe; the user of this tool - // has no indication of what the problem is.) - continue - } - got := res.Content - - // Suggest the "fix". - var textEdits []analysis.TextEdit - for _, edit := range diff.Bytes(content, got) { - textEdits = append(textEdits, analysis.TextEdit{ - Pos: curFile.FileStart + token.Pos(edit.Start), - End: curFile.FileStart + token.Pos(edit.End), - NewText: []byte(edit.New), - }) - } - a.pass.Report(analysis.Diagnostic{ - Pos: call.Pos(), - End: call.End(), - Message: fmt.Sprintf("Call of %v should be inlined", callee), - SuggestedFixes: []analysis.SuggestedFix{{ - Message: fmt.Sprintf("Inline call of %v", callee), - TextEdits: textEdits, - }}, - }) - } + a.inlineCall(n, cur) case *ast.Ident: - // If the identifier is a use of an inlinable type alias, suggest inlining it. - // TODO(jba): much of this code is shared with the constant case, below. - // Try to factor more of it out, unless it will change anyway when we move beyond simple RHS's. - if ali, ok := a.pass.TypesInfo.Uses[n].(*types.TypeName); ok { - inalias, ok := a.inlinableAliases[ali] - if !ok { - var fact goFixInlineAliasFact - if a.pass.ImportObjectFact(ali, &fact) { - inalias = &fact - a.inlinableAliases[ali] = inalias - } - } - if inalias == nil { - continue // nope - } - curFile := currentFile(cur) - - // We have an identifier A here (n), possibly qualified by a package identifier (sel.X, - // where sel is the parent of X), // and an inlinable "type A = B" elsewhere (inali). - // Consider replacing A with B. - - // Check that the expression we are inlining (B) means the same thing - // (refers to the same object) in n's scope as it does in A's scope. - // If the RHS is not in the current package, AddImport will handle - // shadowing, so we only need to worry about when both expressions - // are in the current package. - if a.pass.Pkg.Path() == inalias.RHSPkgPath { - // fcon.rhsObj is the object referred to by B in the definition of A. - scope := a.pass.TypesInfo.Scopes[curFile].Innermost(n.Pos()) // n's scope - _, obj := scope.LookupParent(inalias.RHSName, n.Pos()) // what "B" means in n's scope - if obj == nil { - // Should be impossible: if code at n can refer to the LHS, - // it can refer to the RHS. - panic(fmt.Sprintf("no object for inlinable alias %s RHS %s", n.Name, inalias.RHSName)) - } - if obj != inalias.rhsObj { - // "B" means something different here than at the inlinable const's scope. - continue - } - } else if !analysisinternal.CanImport(a.pass.Pkg.Path(), inalias.RHSPkgPath) { - // If this package can't see the RHS's package, we can't inline. - continue - } - var ( - importPrefix string - edits []analysis.TextEdit - ) - if inalias.RHSPkgPath != a.pass.Pkg.Path() { - _, importPrefix, edits = analysisinternal.AddImport( - a.pass.TypesInfo, curFile, inalias.RHSPkgName, inalias.RHSPkgPath, inalias.RHSName, n.Pos()) - } - // If n is qualified by a package identifier, we'll need the full selector expression. - var expr ast.Expr = n - if e, _ := cur.Edge(); e == edge.SelectorExpr_Sel { - expr = cur.Parent().Node().(ast.Expr) - } - a.reportInline("type alias", "Type alias", expr, edits, importPrefix+inalias.RHSName) - continue + switch t := a.pass.TypesInfo.Uses[n].(type) { + case *types.TypeName: + a.inlineAlias(t, cur) + case *types.Const: + a.inlineConst(t, cur) } - // If the identifier is a use of an inlinable constant, suggest inlining it. - if con, ok := a.pass.TypesInfo.Uses[n].(*types.Const); ok { - incon, ok := a.inlinableConsts[con] - if !ok { - var fact goFixInlineConstFact - if a.pass.ImportObjectFact(con, &fact) { - incon = &fact - a.inlinableConsts[con] = incon - } - } - if incon == nil { - continue // nope - } + } + } +} - // If n is qualified by a package identifier, we'll need the full selector expression. - curFile := currentFile(cur) - - // We have an identifier A here (n), possibly qualified by a package identifier (sel.X, - // where sel is the parent of n), // and an inlinable "const A = B" elsewhere (incon). - // Consider replacing A with B. - - // Check that the expression we are inlining (B) means the same thing - // (refers to the same object) in n's scope as it does in A's scope. - // If the RHS is not in the current package, AddImport will handle - // shadowing, so we only need to worry about when both expressions - // are in the current package. - if a.pass.Pkg.Path() == incon.RHSPkgPath { - // incon.rhsObj is the object referred to by B in the definition of A. - scope := a.pass.TypesInfo.Scopes[curFile].Innermost(n.Pos()) // n's scope - _, obj := scope.LookupParent(incon.RHSName, n.Pos()) // what "B" means in n's scope - if obj == nil { - // Should be impossible: if code at n can refer to the LHS, - // it can refer to the RHS. - panic(fmt.Sprintf("no object for inlinable const %s RHS %s", n.Name, incon.RHSName)) - } - if obj != incon.rhsObj { - // "B" means something different here than at the inlinable const's scope. - continue - } - } else if !analysisinternal.CanImport(a.pass.Pkg.Path(), incon.RHSPkgPath) { - // If this package can't see the RHS's package, we can't inline. - continue - } - var ( - importPrefix string - edits []analysis.TextEdit - ) - if incon.RHSPkgPath != a.pass.Pkg.Path() { - _, importPrefix, edits = analysisinternal.AddImport( - a.pass.TypesInfo, curFile, incon.RHSPkgName, incon.RHSPkgPath, incon.RHSName, n.Pos()) - } - // If n is qualified by a package identifier, we'll need the full selector expression. - var expr ast.Expr = n - if e, _ := cur.Edge(); e == edge.SelectorExpr_Sel { - expr = cur.Parent().Node().(ast.Expr) - } - a.reportInline("constant", "Constant", expr, edits, importPrefix+incon.RHSName) +// If call is a call to an inlinable func, suggest inlining its use at cur. +func (a *analyzer) inlineCall(call *ast.CallExpr, cur cursor.Cursor) { + if fn := typeutil.StaticCallee(a.pass.TypesInfo, call); fn != nil { + // Inlinable? + callee, ok := a.inlinableFuncs[fn] + if !ok { + var fact goFixInlineFuncFact + if a.pass.ImportObjectFact(fn, &fact) { + callee = fact.Callee + a.inlinableFuncs[fn] = callee } } + if callee == nil { + return // nope + } + + // Inline the call. + content, err := a.readFile(call) + if err != nil { + a.pass.Reportf(call.Lparen, "invalid inlining candidate: cannot read source file: %v", err) + return + } + curFile := currentFile(cur) + caller := &inline.Caller{ + Fset: a.pass.Fset, + Types: a.pass.Pkg, + Info: a.pass.TypesInfo, + File: curFile, + Call: call, + Content: content, + } + res, err := inline.Inline(caller, callee, &inline.Options{Logf: discard}) + if err != nil { + a.pass.Reportf(call.Lparen, "%v", err) + return + } + if res.Literalized { + // Users are not fond of inlinings that literalize + // f(x) to func() { ... }(), so avoid them. + // + // (Unfortunately the inliner is very timid, + // and often literalizes when it cannot prove that + // reducing the call is safe; the user of this tool + // has no indication of what the problem is.) + return + } + got := res.Content + + // Suggest the "fix". + var textEdits []analysis.TextEdit + for _, edit := range diff.Bytes(content, got) { + textEdits = append(textEdits, analysis.TextEdit{ + Pos: curFile.FileStart + token.Pos(edit.Start), + End: curFile.FileStart + token.Pos(edit.End), + NewText: []byte(edit.New), + }) + } + a.pass.Report(analysis.Diagnostic{ + Pos: call.Pos(), + End: call.End(), + Message: fmt.Sprintf("Call of %v should be inlined", callee), + SuggestedFixes: []analysis.SuggestedFix{{ + Message: fmt.Sprintf("Inline call of %v", callee), + TextEdits: textEdits, + }}, + }) } } +// If tn is the TypeName of an inlinable alias, suggest inlining its use at cur. +func (a *analyzer) inlineAlias(tn *types.TypeName, cur cursor.Cursor) { + inalias, ok := a.inlinableAliases[tn] + if !ok { + var fact goFixInlineAliasFact + if a.pass.ImportObjectFact(tn, &fact) { + inalias = &fact + a.inlinableAliases[tn] = inalias + } + } + if inalias == nil { + return // nope + } + curFile := currentFile(cur) + + // We have an identifier A here (n), possibly qualified by a package identifier (sel.X, + // where sel is the parent of X), // and an inlinable "type A = B" elsewhere (inali). + // Consider replacing A with B. + + // Check that the expression we are inlining (B) means the same thing + // (refers to the same object) in n's scope as it does in A's scope. + // If the RHS is not in the current package, AddImport will handle + // shadowing, so we only need to worry about when both expressions + // are in the current package. + n := cur.Node().(*ast.Ident) + if a.pass.Pkg.Path() == inalias.RHSPkgPath { + // fcon.rhsObj is the object referred to by B in the definition of A. + scope := a.pass.TypesInfo.Scopes[curFile].Innermost(n.Pos()) // n's scope + _, obj := scope.LookupParent(inalias.RHSName, n.Pos()) // what "B" means in n's scope + if obj == nil { + // Should be impossible: if code at n can refer to the LHS, + // it can refer to the RHS. + panic(fmt.Sprintf("no object for inlinable alias %s RHS %s", n.Name, inalias.RHSName)) + } + if obj != inalias.rhsObj { + // "B" means something different here than at the inlinable const's scope. + return + } + } else if !analysisinternal.CanImport(a.pass.Pkg.Path(), inalias.RHSPkgPath) { + // If this package can't see the RHS's package, we can't inline. + return + } + var ( + importPrefix string + edits []analysis.TextEdit + ) + if inalias.RHSPkgPath != a.pass.Pkg.Path() { + _, importPrefix, edits = analysisinternal.AddImport( + a.pass.TypesInfo, curFile, inalias.RHSPkgName, inalias.RHSPkgPath, inalias.RHSName, n.Pos()) + } + // If n is qualified by a package identifier, we'll need the full selector expression. + var expr ast.Expr = n + if e, _ := cur.Edge(); e == edge.SelectorExpr_Sel { + expr = cur.Parent().Node().(ast.Expr) + } + a.reportInline("type alias", "Type alias", expr, edits, importPrefix+inalias.RHSName) +} + +// If con is an inlinable constant, suggest inlining its use at cur. +func (a *analyzer) inlineConst(con *types.Const, cur cursor.Cursor) { + incon, ok := a.inlinableConsts[con] + if !ok { + var fact goFixInlineConstFact + if a.pass.ImportObjectFact(con, &fact) { + incon = &fact + a.inlinableConsts[con] = incon + } + } + if incon == nil { + return // nope + } + + // If n is qualified by a package identifier, we'll need the full selector expression. + curFile := currentFile(cur) + n := cur.Node().(*ast.Ident) + + // We have an identifier A here (n), possibly qualified by a package identifier (sel.X, + // where sel is the parent of n), // and an inlinable "const A = B" elsewhere (incon). + // Consider replacing A with B. + + // Check that the expression we are inlining (B) means the same thing + // (refers to the same object) in n's scope as it does in A's scope. + // If the RHS is not in the current package, AddImport will handle + // shadowing, so we only need to worry about when both expressions + // are in the current package. + if a.pass.Pkg.Path() == incon.RHSPkgPath { + // incon.rhsObj is the object referred to by B in the definition of A. + scope := a.pass.TypesInfo.Scopes[curFile].Innermost(n.Pos()) // n's scope + _, obj := scope.LookupParent(incon.RHSName, n.Pos()) // what "B" means in n's scope + if obj == nil { + // Should be impossible: if code at n can refer to the LHS, + // it can refer to the RHS. + panic(fmt.Sprintf("no object for inlinable const %s RHS %s", n.Name, incon.RHSName)) + } + if obj != incon.rhsObj { + // "B" means something different here than at the inlinable const's scope. + return + } + } else if !analysisinternal.CanImport(a.pass.Pkg.Path(), incon.RHSPkgPath) { + // If this package can't see the RHS's package, we can't inline. + return + } + var ( + importPrefix string + edits []analysis.TextEdit + ) + if incon.RHSPkgPath != a.pass.Pkg.Path() { + _, importPrefix, edits = analysisinternal.AddImport( + a.pass.TypesInfo, curFile, incon.RHSPkgName, incon.RHSPkgPath, incon.RHSName, n.Pos()) + } + // If n is qualified by a package identifier, we'll need the full selector expression. + var expr ast.Expr = n + if e, _ := cur.Edge(); e == edge.SelectorExpr_Sel { + expr = cur.Parent().Node().(ast.Expr) + } + a.reportInline("constant", "Constant", expr, edits, importPrefix+incon.RHSName) +} + // reportInline reports a diagnostic for fixing an inlinable name. func (a *analyzer) reportInline(kind, capKind string, ident ast.Expr, edits []analysis.TextEdit, newText string) { edits = append(edits, analysis.TextEdit{ @@ -423,6 +440,12 @@ func (a *analyzer) readFile(node ast.Node) ([]byte, error) { return content, nil } +// currentFile returns the unique ast.File for a cursor. +func currentFile(c cursor.Cursor) *ast.File { + cf, _ := moreiters.First(c.Ancestors((*ast.File)(nil))) + return cf.Node().(*ast.File) +} + // hasFixInline reports the presence of a "//go:fix inline" directive // in the comments. func hasFixInline(cg *ast.CommentGroup) bool { From ead62e94e2a9fcb4562a875cad4b3308b2d616ac Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Sat, 15 Feb 2025 08:59:08 -0500 Subject: [PATCH 026/270] gopls/internal/analysis/modernize: handle parens In the maps modernizer, consider the possibility of parentheses surrounding some bits of syntax. Change-Id: I395de81b99f2e9b47dca7f4bbfbed66c0772b6f6 Reviewed-on: https://go-review.googlesource.com/c/tools/+/649975 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- gopls/internal/analysis/modernize/maps.go | 4 ++-- .../testdata/src/mapsloop/mapsloop.go | 19 +++++++++++++++++++ .../testdata/src/mapsloop/mapsloop.go.golden | 10 ++++++++++ 3 files changed, 31 insertions(+), 2 deletions(-) diff --git a/gopls/internal/analysis/modernize/maps.go b/gopls/internal/analysis/modernize/maps.go index c93899621ef..91de659d107 100644 --- a/gopls/internal/analysis/modernize/maps.go +++ b/gopls/internal/analysis/modernize/maps.go @@ -87,9 +87,9 @@ func mapsloop(pass *analysis.Pass) { // Have: m = rhs; for k, v := range x { m[k] = v } var newMap bool rhs := assign.Rhs[0] - switch rhs := rhs.(type) { + switch rhs := ast.Unparen(rhs).(type) { case *ast.CallExpr: - if id, ok := rhs.Fun.(*ast.Ident); ok && + if id, ok := ast.Unparen(rhs.Fun).(*ast.Ident); ok && info.Uses[id] == builtinMake { // Have: m = make(...) newMap = true diff --git a/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go index 769b4c84f60..e4e6963dbae 100644 --- a/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go +++ b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go @@ -33,6 +33,25 @@ func useClone(src map[int]string) { for key, value := range src { dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Clone" } + + dst = map[int]string{} + for key, value := range src { + dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Clone" + } + println(dst) +} + +func useCloneParen(src map[int]string) { + // Replace (make)(...) by maps.Clone. + dst := (make)(map[int]string, len(src)) + for key, value := range src { + dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Clone" + } + + dst = (map[int]string{}) + for key, value := range src { + dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Clone" + } println(dst) } diff --git a/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go.golden b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go.golden index b9aa39021e8..70b9a28ed5b 100644 --- a/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go.golden @@ -26,6 +26,16 @@ func useCopyGeneric[K comparable, V any, M ~map[K]V](dst, src M) { func useClone(src map[int]string) { // Replace make(...) by maps.Clone. dst := maps.Clone(src) + + dst = maps.Clone(src) + println(dst) +} + +func useCloneParen(src map[int]string) { + // Replace (make)(...) by maps.Clone. + dst := maps.Clone(src) + + dst = maps.Clone(src) println(dst) } From 94db7107945332a789135d458a0f0de1d7c00ddb Mon Sep 17 00:00:00 2001 From: Gopher Robot Date: Fri, 14 Feb 2025 21:17:29 +0000 Subject: [PATCH 027/270] all: upgrade go directive to at least 1.23.0 [generated] By now Go 1.24.0 has been released, and Go 1.22 is no longer supported per the Go Release Policy (https://go.dev/doc/devel/release#policy). For golang/go#69095. [git-generate] (cd . && go get go@1.23.0 && go mod tidy && go fix ./... && go mod edit -toolchain=none) (cd gopls/doc/assets && go get go@1.23.0 && go mod tidy && go fix ./... && go mod edit -toolchain=none) (cd gopls && echo 'skipping because it already has go1.23.4 >= go1.23.0, nothing to do') Change-Id: I37dad9abd1457a8a8aa940809b7ee6664fba006d Reviewed-on: https://go-review.googlesource.com/c/tools/+/649321 Auto-Submit: Gopher Robot LUCI-TryBot-Result: Go LUCI Reviewed-by: Dmitri Shuralyov Reviewed-by: Cherry Mui Reviewed-by: Robert Findley --- cmd/callgraph/main_test.go | 1 - cmd/fiximports/main_test.go | 1 - cmd/godex/isAlias18.go | 1 - cmd/godex/isAlias19.go | 1 - cmd/goimports/goimports_gc.go | 1 - cmd/goimports/goimports_not_gc.go | 1 - cmd/gotype/sizesFor18.go | 1 - cmd/gotype/sizesFor19.go | 1 - cmd/splitdwarf/splitdwarf.go | 1 - cmd/stress/stress.go | 1 - go.mod | 2 +- go/analysis/multichecker/multichecker_test.go | 1 - go/analysis/passes/errorsas/errorsas_test.go | 1 - go/analysis/passes/stdversion/main.go | 1 - go/analysis/unitchecker/main.go | 1 - go/buildutil/allpackages_test.go | 1 - go/callgraph/cha/cha_test.go | 1 - go/callgraph/rta/rta_test.go | 1 - go/callgraph/vta/internal/trie/bits_test.go | 1 - go/callgraph/vta/internal/trie/trie_test.go | 1 - go/gcexportdata/example_test.go | 5 ----- go/gcexportdata/main.go | 1 - go/internal/gccgoimporter/newInterface10.go | 1 - go/internal/gccgoimporter/newInterface11.go | 1 - go/loader/loader_test.go | 1 - go/ssa/example_test.go | 3 --- go/ssa/ssautil/switch_test.go | 1 - go/ssa/stdlib_test.go | 1 - godoc/godoc17_test.go | 1 - godoc/static/makestatic.go | 1 - godoc/vfs/fs.go | 1 - gopls/doc/assets/go.mod | 2 +- internal/gcimporter/israce_test.go | 1 - internal/imports/mkindex.go | 1 - internal/jsonrpc2_v2/serve_go116.go | 1 - internal/jsonrpc2_v2/serve_pre116.go | 1 - internal/pprof/main.go | 1 - internal/robustio/copyfiles.go | 1 - internal/robustio/robustio_flaky.go | 1 - internal/robustio/robustio_other.go | 1 - internal/robustio/robustio_plan9.go | 1 - internal/robustio/robustio_posix.go | 1 - internal/stdlib/generate.go | 1 - internal/testenv/testenv_notunix.go | 1 - internal/testenv/testenv_unix.go | 1 - internal/typeparams/copytermlist.go | 1 - playground/socket/socket.go | 1 - refactor/eg/eg_test.go | 1 - refactor/importgraph/graph_test.go | 1 - 49 files changed, 2 insertions(+), 55 deletions(-) diff --git a/cmd/callgraph/main_test.go b/cmd/callgraph/main_test.go index ce634139e68..3b56cd7ffef 100644 --- a/cmd/callgraph/main_test.go +++ b/cmd/callgraph/main_test.go @@ -5,7 +5,6 @@ // No testdata on Android. //go:build !android && go1.11 -// +build !android,go1.11 package main diff --git a/cmd/fiximports/main_test.go b/cmd/fiximports/main_test.go index ebbd7520d2e..69f8726f135 100644 --- a/cmd/fiximports/main_test.go +++ b/cmd/fiximports/main_test.go @@ -5,7 +5,6 @@ // No testdata on Android. //go:build !android -// +build !android package main diff --git a/cmd/godex/isAlias18.go b/cmd/godex/isAlias18.go index 431602b2243..f1f78731d4c 100644 --- a/cmd/godex/isAlias18.go +++ b/cmd/godex/isAlias18.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !go1.9 -// +build !go1.9 package main diff --git a/cmd/godex/isAlias19.go b/cmd/godex/isAlias19.go index e5889119fa1..db29555fd8c 100644 --- a/cmd/godex/isAlias19.go +++ b/cmd/godex/isAlias19.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build go1.9 -// +build go1.9 package main diff --git a/cmd/goimports/goimports_gc.go b/cmd/goimports/goimports_gc.go index 3326646d035..3a88482fe8d 100644 --- a/cmd/goimports/goimports_gc.go +++ b/cmd/goimports/goimports_gc.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build gc -// +build gc package main diff --git a/cmd/goimports/goimports_not_gc.go b/cmd/goimports/goimports_not_gc.go index 344fe7576b0..21dc77920be 100644 --- a/cmd/goimports/goimports_not_gc.go +++ b/cmd/goimports/goimports_not_gc.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !gc -// +build !gc package main diff --git a/cmd/gotype/sizesFor18.go b/cmd/gotype/sizesFor18.go index 39e3d9f047e..15d2355ca42 100644 --- a/cmd/gotype/sizesFor18.go +++ b/cmd/gotype/sizesFor18.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !go1.9 -// +build !go1.9 // This file contains a copy of the implementation of types.SizesFor // since this function is not available in go/types before Go 1.9. diff --git a/cmd/gotype/sizesFor19.go b/cmd/gotype/sizesFor19.go index 34181c8d04d..c46bb777024 100644 --- a/cmd/gotype/sizesFor19.go +++ b/cmd/gotype/sizesFor19.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build go1.9 -// +build go1.9 package main diff --git a/cmd/splitdwarf/splitdwarf.go b/cmd/splitdwarf/splitdwarf.go index 90ff10b6a05..24aa239226c 100644 --- a/cmd/splitdwarf/splitdwarf.go +++ b/cmd/splitdwarf/splitdwarf.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd -// +build aix darwin dragonfly freebsd linux netbsd openbsd /* Splitdwarf uncompresses and copies the DWARF segment of a Mach-O diff --git a/cmd/stress/stress.go b/cmd/stress/stress.go index 6472064f933..e8b8641b387 100644 --- a/cmd/stress/stress.go +++ b/cmd/stress/stress.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build unix || aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris || windows -// +build unix aix darwin dragonfly freebsd linux netbsd openbsd solaris windows // The stress utility is intended for catching sporadic failures. // It runs a given process in parallel in a loop and collects any failures. diff --git a/go.mod b/go.mod index 8cea866daf8..9edfc58936d 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module golang.org/x/tools -go 1.22.0 // => default GODEBUG has gotypesalias=0 +go 1.23.0 // => default GODEBUG has gotypesalias=0 require ( github.com/google/go-cmp v0.6.0 diff --git a/go/analysis/multichecker/multichecker_test.go b/go/analysis/multichecker/multichecker_test.go index 07bf977369b..94a280564ce 100644 --- a/go/analysis/multichecker/multichecker_test.go +++ b/go/analysis/multichecker/multichecker_test.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build go1.12 -// +build go1.12 package multichecker_test diff --git a/go/analysis/passes/errorsas/errorsas_test.go b/go/analysis/passes/errorsas/errorsas_test.go index 6689d8114a7..5414f9e8b6d 100644 --- a/go/analysis/passes/errorsas/errorsas_test.go +++ b/go/analysis/passes/errorsas/errorsas_test.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build go1.13 -// +build go1.13 package errorsas_test diff --git a/go/analysis/passes/stdversion/main.go b/go/analysis/passes/stdversion/main.go index 2156d41e4a9..bf1c3a0b31f 100644 --- a/go/analysis/passes/stdversion/main.go +++ b/go/analysis/passes/stdversion/main.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build ignore -// +build ignore package main diff --git a/go/analysis/unitchecker/main.go b/go/analysis/unitchecker/main.go index 4374e7bf945..246be909249 100644 --- a/go/analysis/unitchecker/main.go +++ b/go/analysis/unitchecker/main.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build ignore -// +build ignore // This file provides an example command for static checkers // conforming to the golang.org/x/tools/go/analysis API. diff --git a/go/buildutil/allpackages_test.go b/go/buildutil/allpackages_test.go index 6af86771104..2df5f27e223 100644 --- a/go/buildutil/allpackages_test.go +++ b/go/buildutil/allpackages_test.go @@ -5,7 +5,6 @@ // Incomplete source tree on Android. //go:build !android -// +build !android package buildutil_test diff --git a/go/callgraph/cha/cha_test.go b/go/callgraph/cha/cha_test.go index 5ac64e17244..7795cb44de0 100644 --- a/go/callgraph/cha/cha_test.go +++ b/go/callgraph/cha/cha_test.go @@ -5,7 +5,6 @@ // No testdata on Android. //go:build !android -// +build !android package cha_test diff --git a/go/callgraph/rta/rta_test.go b/go/callgraph/rta/rta_test.go index 74e77b01291..dcec98d7c5d 100644 --- a/go/callgraph/rta/rta_test.go +++ b/go/callgraph/rta/rta_test.go @@ -5,7 +5,6 @@ // No testdata on Android. //go:build !android -// +build !android package rta_test diff --git a/go/callgraph/vta/internal/trie/bits_test.go b/go/callgraph/vta/internal/trie/bits_test.go index 07784cdffac..f6e510eccd0 100644 --- a/go/callgraph/vta/internal/trie/bits_test.go +++ b/go/callgraph/vta/internal/trie/bits_test.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build go1.13 -// +build go1.13 package trie diff --git a/go/callgraph/vta/internal/trie/trie_test.go b/go/callgraph/vta/internal/trie/trie_test.go index c0651b0ef86..71fd398f12c 100644 --- a/go/callgraph/vta/internal/trie/trie_test.go +++ b/go/callgraph/vta/internal/trie/trie_test.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build go1.13 -// +build go1.13 package trie diff --git a/go/gcexportdata/example_test.go b/go/gcexportdata/example_test.go index 9574f30d32b..852ba5a597c 100644 --- a/go/gcexportdata/example_test.go +++ b/go/gcexportdata/example_test.go @@ -3,11 +3,6 @@ // license that can be found in the LICENSE file. //go:build go1.7 && gc && !android && !ios && (unix || aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris || plan9 || windows) -// +build go1.7 -// +build gc -// +build !android -// +build !ios -// +build unix aix darwin dragonfly freebsd linux netbsd openbsd solaris plan9 windows package gcexportdata_test diff --git a/go/gcexportdata/main.go b/go/gcexportdata/main.go index e9df4e9a9a5..0b267e33867 100644 --- a/go/gcexportdata/main.go +++ b/go/gcexportdata/main.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build ignore -// +build ignore // The gcexportdata command is a diagnostic tool that displays the // contents of gc export data files. diff --git a/go/internal/gccgoimporter/newInterface10.go b/go/internal/gccgoimporter/newInterface10.go index 1b449ef9886..f49c9b067dd 100644 --- a/go/internal/gccgoimporter/newInterface10.go +++ b/go/internal/gccgoimporter/newInterface10.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !go1.11 -// +build !go1.11 package gccgoimporter diff --git a/go/internal/gccgoimporter/newInterface11.go b/go/internal/gccgoimporter/newInterface11.go index 631546ec66f..c7d5edb4858 100644 --- a/go/internal/gccgoimporter/newInterface11.go +++ b/go/internal/gccgoimporter/newInterface11.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build go1.11 -// +build go1.11 package gccgoimporter diff --git a/go/loader/loader_test.go b/go/loader/loader_test.go index 2276b49ad6f..eb9feb221f0 100644 --- a/go/loader/loader_test.go +++ b/go/loader/loader_test.go @@ -5,7 +5,6 @@ // No testdata on Android. //go:build !android -// +build !android package loader_test diff --git a/go/ssa/example_test.go b/go/ssa/example_test.go index e0fba0be681..03775414df2 100644 --- a/go/ssa/example_test.go +++ b/go/ssa/example_test.go @@ -3,9 +3,6 @@ // license that can be found in the LICENSE file. //go:build !android && !ios && (unix || aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris || plan9 || windows) -// +build !android -// +build !ios -// +build unix aix darwin dragonfly freebsd linux netbsd openbsd solaris plan9 windows package ssa_test diff --git a/go/ssa/ssautil/switch_test.go b/go/ssa/ssautil/switch_test.go index 081b09010ee..6ff5c9b92c3 100644 --- a/go/ssa/ssautil/switch_test.go +++ b/go/ssa/ssautil/switch_test.go @@ -5,7 +5,6 @@ // No testdata on Android. //go:build !android -// +build !android package ssautil_test diff --git a/go/ssa/stdlib_test.go b/go/ssa/stdlib_test.go index 9b78cfbf839..08df50b9eeb 100644 --- a/go/ssa/stdlib_test.go +++ b/go/ssa/stdlib_test.go @@ -5,7 +5,6 @@ // Incomplete source tree on Android. //go:build !android -// +build !android package ssa_test diff --git a/godoc/godoc17_test.go b/godoc/godoc17_test.go index 82e23e64775..c8bf2d96d42 100644 --- a/godoc/godoc17_test.go +++ b/godoc/godoc17_test.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build go1.7 -// +build go1.7 package godoc diff --git a/godoc/static/makestatic.go b/godoc/static/makestatic.go index a8a652f8ed5..5a7337290ff 100644 --- a/godoc/static/makestatic.go +++ b/godoc/static/makestatic.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build ignore -// +build ignore // Command makestatic writes the generated file buffer to "static.go". // It is intended to be invoked via "go generate" (directive in "gen.go"). diff --git a/godoc/vfs/fs.go b/godoc/vfs/fs.go index f12d653fef2..2bec5886052 100644 --- a/godoc/vfs/fs.go +++ b/godoc/vfs/fs.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build go1.16 -// +build go1.16 package vfs diff --git a/gopls/doc/assets/go.mod b/gopls/doc/assets/go.mod index 73f49695726..9b417f19ed8 100644 --- a/gopls/doc/assets/go.mod +++ b/gopls/doc/assets/go.mod @@ -4,4 +4,4 @@ module golang.org/x/tools/gopls/doc/assets -go 1.19 +go 1.23.0 diff --git a/internal/gcimporter/israce_test.go b/internal/gcimporter/israce_test.go index 885ba1c01c5..c75a16b7a1b 100644 --- a/internal/gcimporter/israce_test.go +++ b/internal/gcimporter/israce_test.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build race -// +build race package gcimporter_test diff --git a/internal/imports/mkindex.go b/internal/imports/mkindex.go index ff006b0cd2e..10e8da5243d 100644 --- a/internal/imports/mkindex.go +++ b/internal/imports/mkindex.go @@ -1,5 +1,4 @@ //go:build ignore -// +build ignore // Copyright 2013 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style diff --git a/internal/jsonrpc2_v2/serve_go116.go b/internal/jsonrpc2_v2/serve_go116.go index 2dac7413f31..19114502d1c 100644 --- a/internal/jsonrpc2_v2/serve_go116.go +++ b/internal/jsonrpc2_v2/serve_go116.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build go1.16 -// +build go1.16 package jsonrpc2 diff --git a/internal/jsonrpc2_v2/serve_pre116.go b/internal/jsonrpc2_v2/serve_pre116.go index ef5477fecb9..9e8ece2ea7b 100644 --- a/internal/jsonrpc2_v2/serve_pre116.go +++ b/internal/jsonrpc2_v2/serve_pre116.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !go1.16 -// +build !go1.16 package jsonrpc2 diff --git a/internal/pprof/main.go b/internal/pprof/main.go index 5e1ae633b4d..42aa187a6a7 100644 --- a/internal/pprof/main.go +++ b/internal/pprof/main.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build ignore -// +build ignore // The pprof command prints the total time in a pprof profile provided // through the standard input. diff --git a/internal/robustio/copyfiles.go b/internal/robustio/copyfiles.go index 8c93fcd7163..8aace49da8b 100644 --- a/internal/robustio/copyfiles.go +++ b/internal/robustio/copyfiles.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build ignore -// +build ignore // The copyfiles script copies the contents of the internal cmd/go robustio // package to the current directory, with adjustments to make it build. diff --git a/internal/robustio/robustio_flaky.go b/internal/robustio/robustio_flaky.go index d5c241857b4..c56e36ca624 100644 --- a/internal/robustio/robustio_flaky.go +++ b/internal/robustio/robustio_flaky.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build windows || darwin -// +build windows darwin package robustio diff --git a/internal/robustio/robustio_other.go b/internal/robustio/robustio_other.go index 3a20cac6cf8..da9a46e4fac 100644 --- a/internal/robustio/robustio_other.go +++ b/internal/robustio/robustio_other.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !windows && !darwin -// +build !windows,!darwin package robustio diff --git a/internal/robustio/robustio_plan9.go b/internal/robustio/robustio_plan9.go index 9fa4cacb5a3..3026b9f6321 100644 --- a/internal/robustio/robustio_plan9.go +++ b/internal/robustio/robustio_plan9.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build plan9 -// +build plan9 package robustio diff --git a/internal/robustio/robustio_posix.go b/internal/robustio/robustio_posix.go index cf74865d0b5..6b4beec96fc 100644 --- a/internal/robustio/robustio_posix.go +++ b/internal/robustio/robustio_posix.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !windows && !plan9 -// +build !windows,!plan9 package robustio diff --git a/internal/stdlib/generate.go b/internal/stdlib/generate.go index d4964f60955..1192885405c 100644 --- a/internal/stdlib/generate.go +++ b/internal/stdlib/generate.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build ignore -// +build ignore // The generate command reads all the GOROOT/api/go1.*.txt files and // generates a single combined manifest.go file containing the Go diff --git a/internal/testenv/testenv_notunix.go b/internal/testenv/testenv_notunix.go index e9ce0d3649d..85b3820e3fb 100644 --- a/internal/testenv/testenv_notunix.go +++ b/internal/testenv/testenv_notunix.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !(unix || aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris) -// +build !unix,!aix,!darwin,!dragonfly,!freebsd,!linux,!netbsd,!openbsd,!solaris package testenv diff --git a/internal/testenv/testenv_unix.go b/internal/testenv/testenv_unix.go index bc6af1ff81d..d635b96b31b 100644 --- a/internal/testenv/testenv_unix.go +++ b/internal/testenv/testenv_unix.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build unix || aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris -// +build unix aix darwin dragonfly freebsd linux netbsd openbsd solaris package testenv diff --git a/internal/typeparams/copytermlist.go b/internal/typeparams/copytermlist.go index 5357f9d2fd7..1edaaa01c9a 100644 --- a/internal/typeparams/copytermlist.go +++ b/internal/typeparams/copytermlist.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build ignore -// +build ignore // copytermlist.go copies the term list algorithm from GOROOT/src/go/types. diff --git a/playground/socket/socket.go b/playground/socket/socket.go index 9e5b4a954d2..378edd4c3a5 100644 --- a/playground/socket/socket.go +++ b/playground/socket/socket.go @@ -3,7 +3,6 @@ // license that can be found in the LICENSE file. //go:build !appengine -// +build !appengine // Package socket implements a WebSocket-based playground backend. // Clients connect to a websocket handler and send run/kill commands, and diff --git a/refactor/eg/eg_test.go b/refactor/eg/eg_test.go index eb54f0b3f95..4dc24f53358 100644 --- a/refactor/eg/eg_test.go +++ b/refactor/eg/eg_test.go @@ -5,7 +5,6 @@ // No testdata on Android. //go:build !android -// +build !android package eg_test diff --git a/refactor/importgraph/graph_test.go b/refactor/importgraph/graph_test.go index f3378a41e86..a07cc633454 100644 --- a/refactor/importgraph/graph_test.go +++ b/refactor/importgraph/graph_test.go @@ -5,7 +5,6 @@ // Incomplete std lib sources on Android. //go:build !android -// +build !android package importgraph_test From c18bffa1b03c9346f0dd1af830b09979e63c7b5f Mon Sep 17 00:00:00 2001 From: Dmitri Shuralyov Date: Sun, 16 Feb 2025 02:16:29 -0500 Subject: [PATCH 028/270] all: delete redundant //go:debug gotypesalias=1 directives [generated] The module's go directive is >= 1.23 now, so these files no longer have any effect. Also drop the outdated comment in go.mod. For golang/go#69772. [git-generate] find . -type f -name gotypesalias.go -exec rm {} + sed -i '' 's| // => default GODEBUG has gotypesalias=0||' go.mod Change-Id: Id0fa54f89695991de6c8721aadecfd587826d158 Reviewed-on: https://go-review.googlesource.com/c/tools/+/650095 Reviewed-by: Cherry Mui Auto-Submit: Dmitri Shuralyov Reviewed-by: Funda Secgin Reviewed-by: Robert Findley LUCI-TryBot-Result: Go LUCI --- cmd/bundle/gotypesalias.go | 12 ------------ cmd/callgraph/gotypesalias.go | 12 ------------ cmd/deadcode/gotypesalias.go | 12 ------------ cmd/eg/gotypesalias.go | 12 ------------ cmd/godex/gotypesalias.go | 12 ------------ cmd/godoc/gotypesalias.go | 12 ------------ cmd/goimports/gotypesalias.go | 12 ------------ cmd/gomvpkg/gotypesalias.go | 12 ------------ cmd/gotype/gotypesalias.go | 12 ------------ cmd/ssadump/gotypesalias.go | 12 ------------ cmd/stringer/gotypesalias.go | 12 ------------ go.mod | 2 +- go/analysis/passes/defers/cmd/defers/gotypesalias.go | 12 ------------ .../cmd/fieldalignment/gotypesalias.go | 12 ------------ .../passes/findcall/cmd/findcall/gotypesalias.go | 12 ------------ .../passes/httpmux/cmd/httpmux/gotypesalias.go | 12 ------------ .../ifaceassert/cmd/ifaceassert/gotypesalias.go | 12 ------------ .../passes/lostcancel/cmd/lostcancel/gotypesalias.go | 12 ------------ .../passes/nilness/cmd/nilness/gotypesalias.go | 12 ------------ go/analysis/passes/shadow/cmd/shadow/gotypesalias.go | 12 ------------ .../stringintconv/cmd/stringintconv/gotypesalias.go | 12 ------------ .../passes/unmarshal/cmd/unmarshal/gotypesalias.go | 12 ------------ .../unusedresult/cmd/unusedresult/gotypesalias.go | 12 ------------ go/packages/gopackages/gotypesalias.go | 12 ------------ go/packages/internal/nodecount/gotypesalias.go | 12 ------------ go/types/internal/play/gotypesalias.go | 12 ------------ 26 files changed, 1 insertion(+), 301 deletions(-) delete mode 100644 cmd/bundle/gotypesalias.go delete mode 100644 cmd/callgraph/gotypesalias.go delete mode 100644 cmd/deadcode/gotypesalias.go delete mode 100644 cmd/eg/gotypesalias.go delete mode 100644 cmd/godex/gotypesalias.go delete mode 100644 cmd/godoc/gotypesalias.go delete mode 100644 cmd/goimports/gotypesalias.go delete mode 100644 cmd/gomvpkg/gotypesalias.go delete mode 100644 cmd/gotype/gotypesalias.go delete mode 100644 cmd/ssadump/gotypesalias.go delete mode 100644 cmd/stringer/gotypesalias.go delete mode 100644 go/analysis/passes/defers/cmd/defers/gotypesalias.go delete mode 100644 go/analysis/passes/fieldalignment/cmd/fieldalignment/gotypesalias.go delete mode 100644 go/analysis/passes/findcall/cmd/findcall/gotypesalias.go delete mode 100644 go/analysis/passes/httpmux/cmd/httpmux/gotypesalias.go delete mode 100644 go/analysis/passes/ifaceassert/cmd/ifaceassert/gotypesalias.go delete mode 100644 go/analysis/passes/lostcancel/cmd/lostcancel/gotypesalias.go delete mode 100644 go/analysis/passes/nilness/cmd/nilness/gotypesalias.go delete mode 100644 go/analysis/passes/shadow/cmd/shadow/gotypesalias.go delete mode 100644 go/analysis/passes/stringintconv/cmd/stringintconv/gotypesalias.go delete mode 100644 go/analysis/passes/unmarshal/cmd/unmarshal/gotypesalias.go delete mode 100644 go/analysis/passes/unusedresult/cmd/unusedresult/gotypesalias.go delete mode 100644 go/packages/gopackages/gotypesalias.go delete mode 100644 go/packages/internal/nodecount/gotypesalias.go delete mode 100644 go/types/internal/play/gotypesalias.go diff --git a/cmd/bundle/gotypesalias.go b/cmd/bundle/gotypesalias.go deleted file mode 100644 index 288c10c2d0a..00000000000 --- a/cmd/bundle/gotypesalias.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.23 - -//go:debug gotypesalias=1 - -package main - -// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). -// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/cmd/callgraph/gotypesalias.go b/cmd/callgraph/gotypesalias.go deleted file mode 100644 index 288c10c2d0a..00000000000 --- a/cmd/callgraph/gotypesalias.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.23 - -//go:debug gotypesalias=1 - -package main - -// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). -// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/cmd/deadcode/gotypesalias.go b/cmd/deadcode/gotypesalias.go deleted file mode 100644 index 288c10c2d0a..00000000000 --- a/cmd/deadcode/gotypesalias.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.23 - -//go:debug gotypesalias=1 - -package main - -// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). -// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/cmd/eg/gotypesalias.go b/cmd/eg/gotypesalias.go deleted file mode 100644 index 288c10c2d0a..00000000000 --- a/cmd/eg/gotypesalias.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.23 - -//go:debug gotypesalias=1 - -package main - -// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). -// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/cmd/godex/gotypesalias.go b/cmd/godex/gotypesalias.go deleted file mode 100644 index 288c10c2d0a..00000000000 --- a/cmd/godex/gotypesalias.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.23 - -//go:debug gotypesalias=1 - -package main - -// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). -// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/cmd/godoc/gotypesalias.go b/cmd/godoc/gotypesalias.go deleted file mode 100644 index 288c10c2d0a..00000000000 --- a/cmd/godoc/gotypesalias.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.23 - -//go:debug gotypesalias=1 - -package main - -// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). -// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/cmd/goimports/gotypesalias.go b/cmd/goimports/gotypesalias.go deleted file mode 100644 index 288c10c2d0a..00000000000 --- a/cmd/goimports/gotypesalias.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.23 - -//go:debug gotypesalias=1 - -package main - -// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). -// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/cmd/gomvpkg/gotypesalias.go b/cmd/gomvpkg/gotypesalias.go deleted file mode 100644 index 288c10c2d0a..00000000000 --- a/cmd/gomvpkg/gotypesalias.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.23 - -//go:debug gotypesalias=1 - -package main - -// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). -// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/cmd/gotype/gotypesalias.go b/cmd/gotype/gotypesalias.go deleted file mode 100644 index 288c10c2d0a..00000000000 --- a/cmd/gotype/gotypesalias.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.23 - -//go:debug gotypesalias=1 - -package main - -// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). -// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/cmd/ssadump/gotypesalias.go b/cmd/ssadump/gotypesalias.go deleted file mode 100644 index 288c10c2d0a..00000000000 --- a/cmd/ssadump/gotypesalias.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.23 - -//go:debug gotypesalias=1 - -package main - -// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). -// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/cmd/stringer/gotypesalias.go b/cmd/stringer/gotypesalias.go deleted file mode 100644 index 288c10c2d0a..00000000000 --- a/cmd/stringer/gotypesalias.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.23 - -//go:debug gotypesalias=1 - -package main - -// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). -// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/go.mod b/go.mod index 9edfc58936d..bc7636b4cf8 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module golang.org/x/tools -go 1.23.0 // => default GODEBUG has gotypesalias=0 +go 1.23.0 require ( github.com/google/go-cmp v0.6.0 diff --git a/go/analysis/passes/defers/cmd/defers/gotypesalias.go b/go/analysis/passes/defers/cmd/defers/gotypesalias.go deleted file mode 100644 index 288c10c2d0a..00000000000 --- a/go/analysis/passes/defers/cmd/defers/gotypesalias.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.23 - -//go:debug gotypesalias=1 - -package main - -// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). -// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/go/analysis/passes/fieldalignment/cmd/fieldalignment/gotypesalias.go b/go/analysis/passes/fieldalignment/cmd/fieldalignment/gotypesalias.go deleted file mode 100644 index 288c10c2d0a..00000000000 --- a/go/analysis/passes/fieldalignment/cmd/fieldalignment/gotypesalias.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.23 - -//go:debug gotypesalias=1 - -package main - -// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). -// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/go/analysis/passes/findcall/cmd/findcall/gotypesalias.go b/go/analysis/passes/findcall/cmd/findcall/gotypesalias.go deleted file mode 100644 index 288c10c2d0a..00000000000 --- a/go/analysis/passes/findcall/cmd/findcall/gotypesalias.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.23 - -//go:debug gotypesalias=1 - -package main - -// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). -// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/go/analysis/passes/httpmux/cmd/httpmux/gotypesalias.go b/go/analysis/passes/httpmux/cmd/httpmux/gotypesalias.go deleted file mode 100644 index 288c10c2d0a..00000000000 --- a/go/analysis/passes/httpmux/cmd/httpmux/gotypesalias.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.23 - -//go:debug gotypesalias=1 - -package main - -// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). -// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/go/analysis/passes/ifaceassert/cmd/ifaceassert/gotypesalias.go b/go/analysis/passes/ifaceassert/cmd/ifaceassert/gotypesalias.go deleted file mode 100644 index 288c10c2d0a..00000000000 --- a/go/analysis/passes/ifaceassert/cmd/ifaceassert/gotypesalias.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.23 - -//go:debug gotypesalias=1 - -package main - -// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). -// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/go/analysis/passes/lostcancel/cmd/lostcancel/gotypesalias.go b/go/analysis/passes/lostcancel/cmd/lostcancel/gotypesalias.go deleted file mode 100644 index 288c10c2d0a..00000000000 --- a/go/analysis/passes/lostcancel/cmd/lostcancel/gotypesalias.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.23 - -//go:debug gotypesalias=1 - -package main - -// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). -// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/go/analysis/passes/nilness/cmd/nilness/gotypesalias.go b/go/analysis/passes/nilness/cmd/nilness/gotypesalias.go deleted file mode 100644 index 288c10c2d0a..00000000000 --- a/go/analysis/passes/nilness/cmd/nilness/gotypesalias.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.23 - -//go:debug gotypesalias=1 - -package main - -// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). -// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/go/analysis/passes/shadow/cmd/shadow/gotypesalias.go b/go/analysis/passes/shadow/cmd/shadow/gotypesalias.go deleted file mode 100644 index 288c10c2d0a..00000000000 --- a/go/analysis/passes/shadow/cmd/shadow/gotypesalias.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.23 - -//go:debug gotypesalias=1 - -package main - -// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). -// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/go/analysis/passes/stringintconv/cmd/stringintconv/gotypesalias.go b/go/analysis/passes/stringintconv/cmd/stringintconv/gotypesalias.go deleted file mode 100644 index 288c10c2d0a..00000000000 --- a/go/analysis/passes/stringintconv/cmd/stringintconv/gotypesalias.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.23 - -//go:debug gotypesalias=1 - -package main - -// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). -// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/go/analysis/passes/unmarshal/cmd/unmarshal/gotypesalias.go b/go/analysis/passes/unmarshal/cmd/unmarshal/gotypesalias.go deleted file mode 100644 index 288c10c2d0a..00000000000 --- a/go/analysis/passes/unmarshal/cmd/unmarshal/gotypesalias.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.23 - -//go:debug gotypesalias=1 - -package main - -// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). -// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/go/analysis/passes/unusedresult/cmd/unusedresult/gotypesalias.go b/go/analysis/passes/unusedresult/cmd/unusedresult/gotypesalias.go deleted file mode 100644 index 288c10c2d0a..00000000000 --- a/go/analysis/passes/unusedresult/cmd/unusedresult/gotypesalias.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.23 - -//go:debug gotypesalias=1 - -package main - -// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). -// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/go/packages/gopackages/gotypesalias.go b/go/packages/gopackages/gotypesalias.go deleted file mode 100644 index 288c10c2d0a..00000000000 --- a/go/packages/gopackages/gotypesalias.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.23 - -//go:debug gotypesalias=1 - -package main - -// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). -// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/go/packages/internal/nodecount/gotypesalias.go b/go/packages/internal/nodecount/gotypesalias.go deleted file mode 100644 index 288c10c2d0a..00000000000 --- a/go/packages/internal/nodecount/gotypesalias.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.23 - -//go:debug gotypesalias=1 - -package main - -// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). -// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/go/types/internal/play/gotypesalias.go b/go/types/internal/play/gotypesalias.go deleted file mode 100644 index 288c10c2d0a..00000000000 --- a/go/types/internal/play/gotypesalias.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.23 - -//go:debug gotypesalias=1 - -package main - -// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). -// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). From d115b345e2022d300181300e01d379c4f65da9f6 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Mon, 17 Feb 2025 14:24:33 -0500 Subject: [PATCH 029/270] gopls/internal/analysis: simplify type-error analyzers with Cursor This CL makes a number of simplifications to the type-error analyzers: - Nodes are found using Cursor.FindPos from the error position, which is very fast; - Error position information is read from types.Error instead of formatting the ast.File (!) then invoking the dubious heuristics of analysisinternal.TypeErrorEndPos, which scans the text (!!) assuming well-formattedness (!!!). - plus various minor cleanups. - rename typesinternal.ReadGo116ErrorData to ErrorCodeStartEnd. Updates golang/go#65966 Updates golang/go#71803 Change-Id: Ie4561144040b001b957ef6a3c3631328297d5001 Reviewed-on: https://go-review.googlesource.com/c/tools/+/650217 LUCI-TryBot-Result: Go LUCI Reviewed-by: Jonathan Amsterdam Auto-Submit: Alan Donovan Commit-Queue: Alan Donovan --- .../analysis/fillreturns/fillreturns.go | 147 +++++++----------- .../internal/analysis/nonewvars/nonewvars.go | 81 +++++----- .../analysis/noresultvalues/noresultvalues.go | 61 +++----- gopls/internal/cache/check.go | 5 +- gopls/internal/golang/codeaction.go | 2 +- .../marker/testdata/highlight/controlflow.txt | 3 +- internal/analysisinternal/analysis.go | 2 + internal/typesinternal/types.go | 6 +- 8 files changed, 130 insertions(+), 177 deletions(-) diff --git a/gopls/internal/analysis/fillreturns/fillreturns.go b/gopls/internal/analysis/fillreturns/fillreturns.go index 8a602dc2eef..b6bcc1f24dc 100644 --- a/gopls/internal/analysis/fillreturns/fillreturns.go +++ b/gopls/internal/analysis/fillreturns/fillreturns.go @@ -15,9 +15,12 @@ import ( "strings" "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/gopls/internal/fuzzy" + "golang.org/x/tools/gopls/internal/util/moreiters" "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/typesinternal" ) @@ -27,105 +30,41 @@ var doc string var Analyzer = &analysis.Analyzer{ Name: "fillreturns", Doc: analysisinternal.MustExtractDoc(doc, "fillreturns"), + Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, RunDespiteErrors: true, URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/fillreturns", } func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) info := pass.TypesInfo - if info == nil { - return nil, fmt.Errorf("nil TypeInfo") - } outer: for _, typeErr := range pass.TypeErrors { - // Filter out the errors that are not relevant to this analyzer. - if !FixesError(typeErr) { - continue - } - var file *ast.File - for _, f := range pass.Files { - if f.FileStart <= typeErr.Pos && typeErr.Pos <= f.FileEnd { - file = f - break - } - } - if file == nil { - continue - } - - // Get the end position of the error. - // (This heuristic assumes that the buffer is formatted, - // at least up to the end position of the error.) - var buf bytes.Buffer - if err := format.Node(&buf, pass.Fset, file); err != nil { - continue + if !fixesError(typeErr) { + continue // irrelevant type error } - typeErrEndPos := analysisinternal.TypeErrorEndPos(pass.Fset, buf.Bytes(), typeErr.Pos) - - // TODO(rfindley): much of the error handling code below returns, when it - // should probably continue. - - // Get the path for the relevant range. - path, _ := astutil.PathEnclosingInterval(file, typeErr.Pos, typeErrEndPos) - if len(path) == 0 { - return nil, nil - } - - // Find the enclosing return statement. - var ret *ast.ReturnStmt - var retIdx int - for i, n := range path { - if r, ok := n.(*ast.ReturnStmt); ok { - ret = r - retIdx = i - break - } + _, start, end, ok := typesinternal.ErrorCodeStartEnd(typeErr) + if !ok { + continue // no position information } - if ret == nil { - return nil, nil + curErr, ok := cursor.Root(inspect).FindPos(start, end) + if !ok { + continue // can't find node } - // Get the function type that encloses the ReturnStmt. - var enclosingFunc *ast.FuncType - for _, n := range path[retIdx+1:] { - switch node := n.(type) { - case *ast.FuncLit: - enclosingFunc = node.Type - case *ast.FuncDecl: - enclosingFunc = node.Type - } - if enclosingFunc != nil { - break - } - } - if enclosingFunc == nil || enclosingFunc.Results == nil { - continue - } - - // Skip any generic enclosing functions, since type parameters don't - // have 0 values. - // TODO(rfindley): We should be able to handle this if the return - // values are all concrete types. - if tparams := enclosingFunc.TypeParams; tparams != nil && tparams.NumFields() > 0 { - return nil, nil - } - - // Find the function declaration that encloses the ReturnStmt. - var outer *ast.FuncDecl - for _, p := range path { - if p, ok := p.(*ast.FuncDecl); ok { - outer = p - break + // Find cursor for enclosing return statement (which may be curErr itself). + curRet := curErr + if _, ok := curRet.Node().(*ast.ReturnStmt); !ok { + curRet, ok = moreiters.First(curErr.Ancestors((*ast.ReturnStmt)(nil))) + if !ok { + continue // no enclosing return } } - if outer == nil { - return nil, nil - } + ret := curRet.Node().(*ast.ReturnStmt) - // Skip any return statements that contain function calls with multiple - // return values. + // Skip if any return argument is a tuple-valued function call. for _, expr := range ret.Results { e, ok := expr.(*ast.CallExpr) if !ok { @@ -136,24 +75,47 @@ outer: } } + // Get type of innermost enclosing function. + var funcType *ast.FuncType + curFunc, _ := enclosingFunc(curRet) // can't fail + switch fn := curFunc.Node().(type) { + case *ast.FuncLit: + funcType = fn.Type + case *ast.FuncDecl: + funcType = fn.Type + + // Skip generic functions since type parameters don't have zero values. + // TODO(rfindley): We should be able to handle this if the return + // values are all concrete types. + if funcType.TypeParams.NumFields() > 0 { + continue + } + } + if funcType.Results == nil { + continue + } + // Duplicate the return values to track which values have been matched. remaining := make([]ast.Expr, len(ret.Results)) copy(remaining, ret.Results) - fixed := make([]ast.Expr, len(enclosingFunc.Results.List)) + fixed := make([]ast.Expr, len(funcType.Results.List)) // For each value in the return function declaration, find the leftmost element // in the return statement that has the desired type. If no such element exists, // fill in the missing value with the appropriate "zero" value. // Beware that type information may be incomplete. var retTyps []types.Type - for _, ret := range enclosingFunc.Results.List { + for _, ret := range funcType.Results.List { retTyp := info.TypeOf(ret.Type) if retTyp == nil { return nil, nil } retTyps = append(retTyps, retTyp) } + + curFile, _ := moreiters.First(curRet.Ancestors((*ast.File)(nil))) + file := curFile.Node().(*ast.File) matches := analysisinternal.MatchingIdents(retTyps, file, ret.Pos(), info, pass.Pkg) qual := typesinternal.FileQualifier(file, pass.Pkg) for i, retTyp := range retTyps { @@ -215,8 +177,8 @@ outer: } pass.Report(analysis.Diagnostic{ - Pos: typeErr.Pos, - End: typeErrEndPos, + Pos: start, + End: end, Message: typeErr.Msg, SuggestedFixes: []analysis.SuggestedFix{{ Message: "Fill in return values", @@ -255,7 +217,7 @@ var wrongReturnNumRegexes = []*regexp.Regexp{ regexp.MustCompile(`not enough return values`), } -func FixesError(err types.Error) bool { +func fixesError(err types.Error) bool { msg := strings.TrimSpace(err.Msg) for _, rx := range wrongReturnNumRegexes { if rx.MatchString(msg) { @@ -264,3 +226,12 @@ func FixesError(err types.Error) bool { } return false } + +// enclosingFunc returns the cursor for the innermost Func{Decl,Lit} +// that encloses c, if any. +func enclosingFunc(c cursor.Cursor) (cursor.Cursor, bool) { + for curAncestor := range c.Ancestors((*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)) { + return curAncestor, true + } + return cursor.Cursor{}, false +} diff --git a/gopls/internal/analysis/nonewvars/nonewvars.go b/gopls/internal/analysis/nonewvars/nonewvars.go index 9e5d79df02c..8a3bf502c51 100644 --- a/gopls/internal/analysis/nonewvars/nonewvars.go +++ b/gopls/internal/analysis/nonewvars/nonewvars.go @@ -7,16 +7,17 @@ package nonewvars import ( - "bytes" _ "embed" "go/ast" - "go/format" "go/token" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/gopls/internal/util/moreiters" "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/astutil/cursor" + "golang.org/x/tools/internal/typesinternal" ) //go:embed doc.go @@ -33,57 +34,45 @@ var Analyzer = &analysis.Analyzer{ func run(pass *analysis.Pass) (interface{}, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - if len(pass.TypeErrors) == 0 { - return nil, nil - } - nodeFilter := []ast.Node{(*ast.AssignStmt)(nil)} - inspect.Preorder(nodeFilter, func(n ast.Node) { - assignStmt, _ := n.(*ast.AssignStmt) - // We only care about ":=". - if assignStmt.Tok != token.DEFINE { - return + for _, typeErr := range pass.TypeErrors { + if typeErr.Msg != "no new variables on left side of :=" { + continue // irrelevant error + } + _, start, end, ok := typesinternal.ErrorCodeStartEnd(typeErr) + if !ok { + continue // can't get position info + } + curErr, ok := cursor.Root(inspect).FindPos(start, end) + if !ok { + continue // can't find errant node } - var file *ast.File - for _, f := range pass.Files { - if f.FileStart <= assignStmt.Pos() && assignStmt.Pos() < f.FileEnd { - file = f - break + // Find enclosing assignment (which may be curErr itself). + assign, ok := curErr.Node().(*ast.AssignStmt) + if !ok { + cur, ok := moreiters.First(curErr.Ancestors((*ast.AssignStmt)(nil))) + if !ok { + continue // no enclosing assignment } + assign = cur.Node().(*ast.AssignStmt) } - if file == nil { - return + if assign.Tok != token.DEFINE { + continue // not a := statement } - for _, err := range pass.TypeErrors { - if !FixesError(err.Msg) { - continue - } - if assignStmt.Pos() > err.Pos || err.Pos >= assignStmt.End() { - continue - } - var buf bytes.Buffer - if err := format.Node(&buf, pass.Fset, file); err != nil { - continue - } - pass.Report(analysis.Diagnostic{ - Pos: err.Pos, - End: analysisinternal.TypeErrorEndPos(pass.Fset, buf.Bytes(), err.Pos), - Message: err.Msg, - SuggestedFixes: []analysis.SuggestedFix{{ - Message: "Change ':=' to '='", - TextEdits: []analysis.TextEdit{{ - Pos: err.Pos, - End: err.Pos + 1, - }}, + pass.Report(analysis.Diagnostic{ + Pos: assign.TokPos, + End: assign.TokPos + token.Pos(len(":=")), + Message: typeErr.Msg, + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Change ':=' to '='", + TextEdits: []analysis.TextEdit{{ + Pos: assign.TokPos, + End: assign.TokPos + token.Pos(len(":")), }}, - }) - } - }) + }}, + }) + } return nil, nil } - -func FixesError(msg string) bool { - return msg == "no new variables on left side of :=" -} diff --git a/gopls/internal/analysis/noresultvalues/noresultvalues.go b/gopls/internal/analysis/noresultvalues/noresultvalues.go index 118beb4568b..fe979f52aac 100644 --- a/gopls/internal/analysis/noresultvalues/noresultvalues.go +++ b/gopls/internal/analysis/noresultvalues/noresultvalues.go @@ -5,9 +5,8 @@ package noresultvalues import ( - "bytes" "go/ast" - "go/format" + "go/token" "strings" _ "embed" @@ -15,7 +14,10 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/gopls/internal/util/moreiters" "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/astutil/cursor" + "golang.org/x/tools/internal/typesinternal" ) //go:embed doc.go @@ -32,55 +34,40 @@ var Analyzer = &analysis.Analyzer{ func run(pass *analysis.Pass) (interface{}, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - if len(pass.TypeErrors) == 0 { - return nil, nil - } - - nodeFilter := []ast.Node{(*ast.ReturnStmt)(nil)} - inspect.Preorder(nodeFilter, func(n ast.Node) { - retStmt, _ := n.(*ast.ReturnStmt) - var file *ast.File - for _, f := range pass.Files { - if f.FileStart <= retStmt.Pos() && retStmt.Pos() < f.FileEnd { - file = f - break - } + for _, typErr := range pass.TypeErrors { + if !fixesError(typErr.Msg) { + continue // irrelevant error } - if file == nil { - return + _, start, end, ok := typesinternal.ErrorCodeStartEnd(typErr) + if !ok { + continue // can't get position info } - - for _, err := range pass.TypeErrors { - if !FixesError(err.Msg) { - continue - } - if retStmt.Pos() >= err.Pos || err.Pos >= retStmt.End() { - continue - } - var buf bytes.Buffer - if err := format.Node(&buf, pass.Fset, file); err != nil { - continue - } + curErr, ok := cursor.Root(inspect).FindPos(start, end) + if !ok { + continue // can't find errant node + } + // Find first enclosing return statement, if any. + if curRet, ok := moreiters.First(curErr.Ancestors((*ast.ReturnStmt)(nil))); ok { + ret := curRet.Node() pass.Report(analysis.Diagnostic{ - Pos: err.Pos, - End: analysisinternal.TypeErrorEndPos(pass.Fset, buf.Bytes(), err.Pos), - Message: err.Msg, + Pos: start, + End: end, + Message: typErr.Msg, SuggestedFixes: []analysis.SuggestedFix{{ Message: "Delete return values", TextEdits: []analysis.TextEdit{{ - Pos: retStmt.Pos(), - End: retStmt.End(), - NewText: []byte("return"), + Pos: ret.Pos() + token.Pos(len("return")), + End: ret.End(), }}, }}, }) } - }) + } return nil, nil } -func FixesError(msg string) bool { +func fixesError(msg string) bool { return msg == "no result values expected" || strings.HasPrefix(msg, "too many return values") && strings.Contains(msg, "want ()") } diff --git a/gopls/internal/cache/check.go b/gopls/internal/cache/check.go index d094c535d7a..a3aff5e5475 100644 --- a/gopls/internal/cache/check.go +++ b/gopls/internal/cache/check.go @@ -2001,7 +2001,7 @@ func typeErrorsToDiagnostics(pkg *syntaxPackage, inputs *typeCheckInputs, errs [ batch := func(related []types.Error) { var diags []*Diagnostic for i, e := range related { - code, start, end, ok := typesinternal.ReadGo116ErrorData(e) + code, start, end, ok := typesinternal.ErrorCodeStartEnd(e) if !ok || !start.IsValid() || !end.IsValid() { start, end = e.Pos, e.Pos code = 0 @@ -2075,6 +2075,9 @@ func typeErrorsToDiagnostics(pkg *syntaxPackage, inputs *typeCheckInputs, errs [ if end == start { // Expand the end position to a more meaningful span. + // + // TODO(adonovan): It is the type checker's responsibility + // to ensure that (start, end) are meaningful; see #71803. end = analysisinternal.TypeErrorEndPos(e.Fset, pgf.Src, start) // debugging golang/go#65960 diff --git a/gopls/internal/golang/codeaction.go b/gopls/internal/golang/codeaction.go index 34ac7426019..f82c32d6a9c 100644 --- a/gopls/internal/golang/codeaction.go +++ b/gopls/internal/golang/codeaction.go @@ -309,7 +309,7 @@ func quickFix(ctx context.Context, req *codeActionsRequest) error { for _, typeError := range req.pkg.TypeErrors() { // Does type error overlap with CodeAction range? start, end := typeError.Pos, typeError.Pos - if _, _, endPos, ok := typesinternal.ReadGo116ErrorData(typeError); ok { + if _, _, endPos, ok := typesinternal.ErrorCodeStartEnd(typeError); ok { end = endPos } typeErrorRange, err := req.pgf.PosRange(start, end) diff --git a/gopls/internal/test/marker/testdata/highlight/controlflow.txt b/gopls/internal/test/marker/testdata/highlight/controlflow.txt index c09f748a553..46ec48d030d 100644 --- a/gopls/internal/test/marker/testdata/highlight/controlflow.txt +++ b/gopls/internal/test/marker/testdata/highlight/controlflow.txt @@ -68,7 +68,6 @@ func _() { } func _() () { - // TODO(golang/go#65966): fix the triplicate diagnostics here. - return 0 //@hiloc(ret2, "0", text), diag("0", re"too many return"), diag("0", re"too many return"), diag("0", re"too many return") + return 0 //@hiloc(ret2, "0", text), diag("0", re"too many return") //@highlight(ret2, ret2) } diff --git a/internal/analysisinternal/analysis.go b/internal/analysisinternal/analysis.go index d96d22982c5..aba435fa404 100644 --- a/internal/analysisinternal/analysis.go +++ b/internal/analysisinternal/analysis.go @@ -23,6 +23,8 @@ import ( "golang.org/x/tools/internal/typesinternal" ) +// Deprecated: this heuristic is ill-defined. +// TODO(adonovan): move to sole use in gopls/internal/cache. func TypeErrorEndPos(fset *token.FileSet, src []byte, start token.Pos) token.Pos { // Get the end position for the type error. file := fset.File(start) diff --git a/internal/typesinternal/types.go b/internal/typesinternal/types.go index 34534879630..edf0347ec3b 100644 --- a/internal/typesinternal/types.go +++ b/internal/typesinternal/types.go @@ -32,12 +32,14 @@ func SetUsesCgo(conf *types.Config) bool { return true } -// ReadGo116ErrorData extracts additional information from types.Error values +// ErrorCodeStartEnd extracts additional information from types.Error values // generated by Go version 1.16 and later: the error code, start position, and // end position. If all positions are valid, start <= err.Pos <= end. // // If the data could not be read, the final result parameter will be false. -func ReadGo116ErrorData(err types.Error) (code ErrorCode, start, end token.Pos, ok bool) { +// +// TODO(adonovan): eliminate start/end when proposal #71803 is accepted. +func ErrorCodeStartEnd(err types.Error) (code ErrorCode, start, end token.Pos, ok bool) { var data [3]int // By coincidence all of these fields are ints, which simplifies things. v := reflect.ValueOf(err) From fe883a85d1827c1acaed7273c9a10ff0660d9a0d Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Tue, 18 Feb 2025 13:06:42 -0500 Subject: [PATCH 030/270] gopls/internal/analysis/unusedvariable: refine bug.Report golang/go#71812 This CL adds assertions to refine the bug reported in golang/go#71812, in which the analyzer reports an invalid SuggestedFix. Updates golang/go#71812 Change-Id: Ie4a9aac9ba3d16974320d7cd4b48bc4cc76fc3c4 Reviewed-on: https://go-review.googlesource.com/c/tools/+/650395 Commit-Queue: Alan Donovan Reviewed-by: Jonathan Amsterdam Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI --- .../analysis/unusedvariable/unusedvariable.go | 81 ++++++++++--------- 1 file changed, 45 insertions(+), 36 deletions(-) diff --git a/gopls/internal/analysis/unusedvariable/unusedvariable.go b/gopls/internal/analysis/unusedvariable/unusedvariable.go index 15bcd43d873..5f1c188eb6a 100644 --- a/gopls/internal/analysis/unusedvariable/unusedvariable.go +++ b/gopls/internal/analysis/unusedvariable/unusedvariable.go @@ -13,10 +13,12 @@ import ( "go/token" "go/types" "regexp" + "slices" "strings" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/gopls/internal/util/bug" "golang.org/x/tools/gopls/internal/util/safetoken" ) @@ -165,16 +167,13 @@ func removeVariableFromSpec(pass *analysis.Pass, path []ast.Node, stmt *ast.Valu // Find parent DeclStmt and delete it for _, node := range path { if declStmt, ok := node.(*ast.DeclStmt); ok { - edits := deleteStmtFromBlock(pass.Fset, path, declStmt) - if len(edits) == 0 { - return nil // can this happen? - } - return []analysis.SuggestedFix{ - { + if edits := deleteStmtFromBlock(pass.Fset, path, declStmt); len(edits) > 0 { + return []analysis.SuggestedFix{{ Message: suggestedFixMessage(ident.Name), TextEdits: edits, - }, + }} } + return nil } } } @@ -222,16 +221,13 @@ func removeVariableFromAssignment(fset *token.FileSet, path []ast.Node, stmt *as } // RHS does not have any side effects, delete the whole statement - edits := deleteStmtFromBlock(fset, path, stmt) - if len(edits) == 0 { - return nil // can this happen? - } - return []analysis.SuggestedFix{ - { + if edits := deleteStmtFromBlock(fset, path, stmt); len(edits) > 0 { + return []analysis.SuggestedFix{{ Message: suggestedFixMessage(ident.Name), TextEdits: edits, - }, + }} } + return nil } // Otherwise replace ident with `_` @@ -253,34 +249,48 @@ func suggestedFixMessage(name string) string { return fmt.Sprintf("Remove variable %s", name) } +// deleteStmtFromBlock returns the edits to remove stmt if its parent is a BlockStmt. +// (stmt is not necessarily the leaf, path[0].) +// +// It returns nil if the parent is not a block, as in these examples: +// +// switch STMT; {} +// switch { default: STMT } +// select { default: STMT } +// +// TODO(adonovan): handle these cases too. func deleteStmtFromBlock(fset *token.FileSet, path []ast.Node, stmt ast.Stmt) []analysis.TextEdit { - // Find innermost enclosing BlockStmt. - var block *ast.BlockStmt - for i := range path { - if blockStmt, ok := path[i].(*ast.BlockStmt); ok { - block = blockStmt - break - } + // TODO(adonovan): simplify using Cursor API. + i := slices.Index(path, ast.Node(stmt)) // must be present + block, ok := path[i+1].(*ast.BlockStmt) + if !ok { + return nil // parent is not a BlockStmt } - nodeIndex := -1 - for i, blockStmt := range block.List { - if blockStmt == stmt { - nodeIndex = i - break - } + nodeIndex := slices.Index(block.List, stmt) + if nodeIndex == -1 { + bug.Reportf("%s: Stmt not found in BlockStmt.List", safetoken.StartPosition(fset, stmt.Pos())) // refine #71812 + return nil } - // The statement we need to delete was not found in BlockStmt - if nodeIndex == -1 { + if !stmt.Pos().IsValid() { + bug.Reportf("%s: invalid Stmt.Pos", safetoken.StartPosition(fset, stmt.Pos())) // refine #71812 return nil } // Delete until the end of the block unless there is another statement after // the one we are trying to delete end := block.Rbrace + if !end.IsValid() { + bug.Reportf("%s: BlockStmt has no Rbrace", safetoken.StartPosition(fset, block.Pos())) // refine #71812 + return nil + } if nodeIndex < len(block.List)-1 { end = block.List[nodeIndex+1].Pos() + if end < stmt.Pos() { + bug.Reportf("%s: BlockStmt.List[last].Pos > BlockStmt.Rbrace", safetoken.StartPosition(fset, block.Pos())) // refine #71812 + return nil + } } // Account for comments within the block containing the statement @@ -298,7 +308,7 @@ outer: // If a comment exists within the current block, after the unused variable statement, // and before the next statement, we shouldn't delete it. if coLine > stmtEndLine { - end = co.Pos() + end = co.Pos() // preserves invariant stmt.Pos <= end (#71812) break outer } if co.Pos() > end { @@ -308,12 +318,11 @@ outer: } } - return []analysis.TextEdit{ - { - Pos: stmt.Pos(), - End: end, - }, - } + // Delete statement and optional following comment. + return []analysis.TextEdit{{ + Pos: stmt.Pos(), + End: end, + }} } // exprMayHaveSideEffects reports whether the expression may have side effects From 4b3fdfd83f78b8336aef4160183bf5b27febc5b9 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Sun, 16 Feb 2025 22:19:08 -0500 Subject: [PATCH 031/270] go/analysis/passes/printf: suppress diagnostic for Println("...%XX...") A common form of literal string is a URL containing URL-escaped characters. This CL causes the printf checker not to report a "Println call has possible Printf formatting directive" diagnostic for it. + test Fixes golang/go#29854 Change-Id: Ib1dcc44dd8185da17f61296632ad030cb1e58420 Reviewed-on: https://go-review.googlesource.com/c/tools/+/650175 LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan Commit-Queue: Alan Donovan Reviewed-by: Jonathan Amsterdam --- go/analysis/passes/printf/printf.go | 18 +++++++++++++++--- go/analysis/passes/printf/testdata/src/a/a.go | 2 ++ 2 files changed, 17 insertions(+), 3 deletions(-) diff --git a/go/analysis/passes/printf/printf.go b/go/analysis/passes/printf/printf.go index 81600a283aa..a28ed365d1e 100644 --- a/go/analysis/passes/printf/printf.go +++ b/go/analysis/passes/printf/printf.go @@ -924,9 +924,14 @@ func checkPrint(pass *analysis.Pass, call *ast.CallExpr, name string) { // The % in "abc 0.0%" couldn't be a formatting directive. s = strings.TrimSuffix(s, "%") if strings.Contains(s, "%") { - m := printFormatRE.FindStringSubmatch(s) - if m != nil { - pass.ReportRangef(call, "%s call has possible Printf formatting directive %s", name, m[0]) + for _, m := range printFormatRE.FindAllString(s, -1) { + // Allow %XX where XX are hex digits, + // as this is common in URLs. + if len(m) >= 3 && isHex(m[1]) && isHex(m[2]) { + continue + } + pass.ReportRangef(call, "%s call has possible Printf formatting directive %s", name, m) + break // report only the first one } } } @@ -992,3 +997,10 @@ func (ss stringSet) Set(flag string) error { // // Remove this after the 1.24 release. var suppressNonconstants bool + +// isHex reports whether b is a hex digit. +func isHex(b byte) bool { + return '0' <= b && b <= '9' || + 'A' <= b && b <= 'F' || + 'a' <= b && b <= 'f' +} diff --git a/go/analysis/passes/printf/testdata/src/a/a.go b/go/analysis/passes/printf/testdata/src/a/a.go index 02ce425f8a3..da48f98f0a8 100644 --- a/go/analysis/passes/printf/testdata/src/a/a.go +++ b/go/analysis/passes/printf/testdata/src/a/a.go @@ -154,6 +154,8 @@ func PrintfTests() { fmt.Println("%v", "hi") // want "fmt.Println call has possible Printf formatting directive %v" fmt.Println("%T", "hi") // want "fmt.Println call has possible Printf formatting directive %T" fmt.Println("%s"+" there", "hi") // want "fmt.Println call has possible Printf formatting directive %s" + fmt.Println("http://foo.com?q%2Fabc") // no diagnostic: %XX is excepted + fmt.Println("http://foo.com?q%2Fabc-%s") // want"fmt.Println call has possible Printf formatting directive %s" fmt.Println("0.0%") // correct (trailing % couldn't be a formatting directive) fmt.Printf("%s", "hi", 3) // want "fmt.Printf call needs 1 arg but has 2 args" _ = fmt.Sprintf("%"+("s"), "hi", 3) // want "fmt.Sprintf call needs 1 arg but has 2 args" From ad5dd9875168fe5cb7c43643a34c8cc26411b2f9 Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Tue, 18 Feb 2025 20:49:25 +0000 Subject: [PATCH 032/270] gopls: fix a few bugs related to the new modcache imports source Fix the following bugs related to the new "gopls" imports source and module cache index: - Only construct the modcacheState if the imports source is "gopls", which is not yet the default. This was causing memory regressions, as the modcache table is non-trivial. - Add missing error handling. - Don't call modcacheState.stopTimer if the modcacheState is nil, which may already have been the case with "importsSource": "off". For golang/go#71607 Change-Id: I33c90ee4b97c8675b342cb0c045eef183a1ef365 Reviewed-on: https://go-review.googlesource.com/c/tools/+/650397 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- gopls/internal/cache/session.go | 7 ++++++- gopls/internal/cache/view.go | 9 +++++++-- gopls/internal/golang/format.go | 6 ++++++ 3 files changed, 19 insertions(+), 3 deletions(-) diff --git a/gopls/internal/cache/session.go b/gopls/internal/cache/session.go index a7fb618f679..5ae753eb91c 100644 --- a/gopls/internal/cache/session.go +++ b/gopls/internal/cache/session.go @@ -238,7 +238,12 @@ func (s *Session) createView(ctx context.Context, def *viewDefinition) (*View, * viewDefinition: def, importsState: newImportsState(backgroundCtx, s.cache.modCache, pe), } - if def.folder.Options.ImportsSource != settings.ImportsSourceOff { + + // Keep this in sync with golang.computeImportEdits. + // + // TODO(rfindley): encapsulate the imports state logic so that the handling + // for Options.ImportsSource is in a single location. + if def.folder.Options.ImportsSource == settings.ImportsSourceGopls { v.modcacheState = newModcacheState(def.folder.Env.GOMODCACHE) } diff --git a/gopls/internal/cache/view.go b/gopls/internal/cache/view.go index 26f0de86125..6ebf6837ef2 100644 --- a/gopls/internal/cache/view.go +++ b/gopls/internal/cache/view.go @@ -109,7 +109,10 @@ type View struct { // importsState is for the old imports code importsState *importsState - // maintain the current module cache index + // modcacheState is the replacement for importsState, to be used for + // goimports operations when the imports source is "gopls". + // + // It may be nil, if the imports source is not "gopls". modcacheState *modcacheState // pkgIndex is an index of package IDs, for efficient storage of typerefs. @@ -492,7 +495,9 @@ func (v *View) shutdown() { // Cancel the initial workspace load if it is still running. v.cancelInitialWorkspaceLoad() v.importsState.stopTimer() - v.modcacheState.stopTimer() + if v.modcacheState != nil { + v.modcacheState.stopTimer() + } v.snapshotMu.Lock() if v.snapshot != nil { diff --git a/gopls/internal/golang/format.go b/gopls/internal/golang/format.go index de4ec3a642c..acc619eba0c 100644 --- a/gopls/internal/golang/format.go +++ b/gopls/internal/golang/format.go @@ -137,7 +137,13 @@ func computeImportEdits(ctx context.Context, pgf *parsego.File, snapshot *cache. // Build up basic information about the original file. isource, err := imports.NewProcessEnvSource(options.Env, filename, pgf.File.Name.Name) + if err != nil { + return nil, nil, err + } var source imports.Source + + // Keep this in sync with [cache.Session.createView] (see the TODO there: we + // should factor out the handling of the ImportsSource setting). switch snapshot.Options().ImportsSource { case settings.ImportsSourceGopls: source = snapshot.NewGoplsSource(isource) From df7baa073c7b4850753e4f6b6084402fd9cb573b Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Thu, 13 Feb 2025 13:55:48 -0500 Subject: [PATCH 033/270] gopls/internal/analysis/simplifyrange: more precise fix This CL reduces the size of the fix offered by simplifyrange, which makes the cursor jump less. It's also simpler, and handles a missing case. Change-Id: I8dbff96158e442b2073e86694b61ea1f0b1ea704 Reviewed-on: https://go-review.googlesource.com/c/tools/+/649355 Reviewed-by: Robert Findley LUCI-TryBot-Result: Go LUCI --- .../analysis/simplifyrange/simplifyrange.go | 93 +++++++------------ .../simplifyrange/simplifyrange_test.go | 11 +-- .../simplifyrange/testdata/src/a/a.go | 7 ++ .../simplifyrange/testdata/src/a/a.go.golden | 7 ++ 4 files changed, 51 insertions(+), 67 deletions(-) diff --git a/gopls/internal/analysis/simplifyrange/simplifyrange.go b/gopls/internal/analysis/simplifyrange/simplifyrange.go index 6d079059eb1..fd685ba2c5b 100644 --- a/gopls/internal/analysis/simplifyrange/simplifyrange.go +++ b/gopls/internal/analysis/simplifyrange/simplifyrange.go @@ -5,10 +5,8 @@ package simplifyrange import ( - "bytes" _ "embed" "go/ast" - "go/printer" "go/token" "golang.org/x/tools/go/analysis" @@ -42,73 +40,48 @@ func run(pass *analysis.Pass) (interface{}, error) { (*ast.RangeStmt)(nil), } inspect.Preorder(nodeFilter, func(n ast.Node) { - if _, ok := generated[pass.Fset.File(n.Pos())]; ok { - return // skip checking if it's generated code - } + rng := n.(*ast.RangeStmt) - var copy *ast.RangeStmt // shallow-copy the AST before modifying - { - x := *n.(*ast.RangeStmt) - copy = &x - } - end := newlineIndex(pass.Fset, copy) + kblank := isBlank(rng.Key) + vblank := isBlank(rng.Value) + var start, end token.Pos + switch { + case kblank && (rng.Value == nil || vblank): + // for _ = range x {} + // for _, _ = range x {} + // ^^^^^^^ + start, end = rng.Key.Pos(), rng.Range - // Range statements of the form: for i, _ := range x {} - var old ast.Expr - if isBlank(copy.Value) { - old = copy.Value - copy.Value = nil - } - // Range statements of the form: for _ := range x {} - if isBlank(copy.Key) && copy.Value == nil { - old = copy.Key - copy.Key = nil + case vblank: + // for k, _ := range x {} + // ^^^ + start, end = rng.Key.End(), rng.Value.End() + + default: + return } - // Return early if neither if condition is met. - if old == nil { + + if generated[pass.Fset.File(n.Pos())] { return } + pass.Report(analysis.Diagnostic{ - Pos: old.Pos(), - End: old.End(), - Message: "simplify range expression", - SuggestedFixes: suggestedFixes(pass.Fset, copy, end), + Pos: start, + End: end, + Message: "simplify range expression", + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Remove empty value", + TextEdits: []analysis.TextEdit{{ + Pos: start, + End: end, + }}, + }}, }) }) return nil, nil } -func suggestedFixes(fset *token.FileSet, rng *ast.RangeStmt, end token.Pos) []analysis.SuggestedFix { - var b bytes.Buffer - printer.Fprint(&b, fset, rng) - stmt := b.Bytes() - index := bytes.Index(stmt, []byte("\n")) - // If there is a new line character, then don't replace the body. - if index != -1 { - stmt = stmt[:index] - } - return []analysis.SuggestedFix{{ - Message: "Remove empty value", - TextEdits: []analysis.TextEdit{{ - Pos: rng.Pos(), - End: end, - NewText: stmt[:index], - }}, - }} -} - -func newlineIndex(fset *token.FileSet, rng *ast.RangeStmt) token.Pos { - var b bytes.Buffer - printer.Fprint(&b, fset, rng) - contents := b.Bytes() - index := bytes.Index(contents, []byte("\n")) - if index == -1 { - return rng.End() - } - return rng.Pos() + token.Pos(index) -} - -func isBlank(x ast.Expr) bool { - ident, ok := x.(*ast.Ident) - return ok && ident.Name == "_" +func isBlank(e ast.Expr) bool { + id, ok := e.(*ast.Ident) + return ok && id.Name == "_" } diff --git a/gopls/internal/analysis/simplifyrange/simplifyrange_test.go b/gopls/internal/analysis/simplifyrange/simplifyrange_test.go index 50a600e03bf..089f65df870 100644 --- a/gopls/internal/analysis/simplifyrange/simplifyrange_test.go +++ b/gopls/internal/analysis/simplifyrange/simplifyrange_test.go @@ -5,8 +5,6 @@ package simplifyrange_test import ( - "go/build" - "slices" "testing" "golang.org/x/tools/go/analysis/analysistest" @@ -14,9 +12,8 @@ import ( ) func Test(t *testing.T) { - testdata := analysistest.TestData() - analysistest.RunWithSuggestedFixes(t, testdata, simplifyrange.Analyzer, "a", "generatedcode") - if slices.Contains(build.Default.ReleaseTags, "go1.23") { // uses iter.Seq - analysistest.RunWithSuggestedFixes(t, testdata, simplifyrange.Analyzer, "rangeoverfunc") - } + analysistest.RunWithSuggestedFixes(t, analysistest.TestData(), simplifyrange.Analyzer, + "a", + "generatedcode", + "rangeoverfunc") } diff --git a/gopls/internal/analysis/simplifyrange/testdata/src/a/a.go b/gopls/internal/analysis/simplifyrange/testdata/src/a/a.go index 49face1e968..1d7b1bd58f2 100644 --- a/gopls/internal/analysis/simplifyrange/testdata/src/a/a.go +++ b/gopls/internal/analysis/simplifyrange/testdata/src/a/a.go @@ -13,4 +13,11 @@ func m() { } for _ = range maps { // want "simplify range expression" } + for _, _ = range maps { // want "simplify range expression" + } + for _, v := range maps { // nope + println(v) + } + for range maps { // nope + } } diff --git a/gopls/internal/analysis/simplifyrange/testdata/src/a/a.go.golden b/gopls/internal/analysis/simplifyrange/testdata/src/a/a.go.golden index ec8490ab337..25139bd93f2 100644 --- a/gopls/internal/analysis/simplifyrange/testdata/src/a/a.go.golden +++ b/gopls/internal/analysis/simplifyrange/testdata/src/a/a.go.golden @@ -13,4 +13,11 @@ func m() { } for range maps { // want "simplify range expression" } + for range maps { // want "simplify range expression" + } + for _, v := range maps { // nope + println(v) + } + for range maps { // nope + } } From 776604a9ed881ee1274724fb3a5f058f3ebdf0eb Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Mon, 17 Feb 2025 12:24:06 -0500 Subject: [PATCH 034/270] gopls/internal/analysis/modernize: sortslice: fix crash The sole statement of a comparison func body is not necessarily a return statement. + Test Fixes golang/go#71786 Change-Id: Ic002035fc9fa303b62ed1828c13f3bdfb8bc6950 Reviewed-on: https://go-review.googlesource.com/c/tools/+/650215 Reviewed-by: Robert Findley Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI --- .../internal/analysis/modernize/sortslice.go | 75 ++++++++++--------- .../testdata/src/sortslice/sortslice.go | 12 ++- .../src/sortslice/sortslice.go.golden | 12 ++- 3 files changed, 60 insertions(+), 39 deletions(-) diff --git a/gopls/internal/analysis/modernize/sortslice.go b/gopls/internal/analysis/modernize/sortslice.go index 7f695d76495..bbc8befb8ee 100644 --- a/gopls/internal/analysis/modernize/sortslice.go +++ b/gopls/internal/analysis/modernize/sortslice.go @@ -57,45 +57,46 @@ func sortslice(pass *analysis.Pass) { i := sig.Params().At(0) j := sig.Params().At(1) - ret := lit.Body.List[0].(*ast.ReturnStmt) - if compare, ok := ret.Results[0].(*ast.BinaryExpr); ok && compare.Op == token.LSS { - // isIndex reports whether e is s[v]. - isIndex := func(e ast.Expr, v *types.Var) bool { - index, ok := e.(*ast.IndexExpr) - return ok && - equalSyntax(index.X, s) && - is[*ast.Ident](index.Index) && - info.Uses[index.Index.(*ast.Ident)] == v - } - if isIndex(compare.X, i) && isIndex(compare.Y, j) { - // Have: sort.Slice(s, func(i, j int) bool { return s[i] < s[j] }) + if ret, ok := lit.Body.List[0].(*ast.ReturnStmt); ok { + if compare, ok := ret.Results[0].(*ast.BinaryExpr); ok && compare.Op == token.LSS { + // isIndex reports whether e is s[v]. + isIndex := func(e ast.Expr, v *types.Var) bool { + index, ok := e.(*ast.IndexExpr) + return ok && + equalSyntax(index.X, s) && + is[*ast.Ident](index.Index) && + info.Uses[index.Index.(*ast.Ident)] == v + } + if isIndex(compare.X, i) && isIndex(compare.Y, j) { + // Have: sort.Slice(s, func(i, j int) bool { return s[i] < s[j] }) - _, prefix, importEdits := analysisinternal.AddImport( - info, file, "slices", "slices", "Sort", call.Pos()) + _, prefix, importEdits := analysisinternal.AddImport( + info, file, "slices", "slices", "Sort", call.Pos()) - pass.Report(analysis.Diagnostic{ - // Highlight "sort.Slice". - Pos: call.Fun.Pos(), - End: call.Fun.End(), - Category: "sortslice", - Message: fmt.Sprintf("sort.Slice can be modernized using slices.Sort"), - SuggestedFixes: []analysis.SuggestedFix{{ - Message: fmt.Sprintf("Replace sort.Slice call by slices.Sort"), - TextEdits: append(importEdits, []analysis.TextEdit{ - { - // Replace sort.Slice with slices.Sort. - Pos: call.Fun.Pos(), - End: call.Fun.End(), - NewText: []byte(prefix + "Sort"), - }, - { - // Eliminate FuncLit. - Pos: call.Args[0].End(), - End: call.Rparen, - }, - }...), - }}, - }) + pass.Report(analysis.Diagnostic{ + // Highlight "sort.Slice". + Pos: call.Fun.Pos(), + End: call.Fun.End(), + Category: "sortslice", + Message: fmt.Sprintf("sort.Slice can be modernized using slices.Sort"), + SuggestedFixes: []analysis.SuggestedFix{{ + Message: fmt.Sprintf("Replace sort.Slice call by slices.Sort"), + TextEdits: append(importEdits, []analysis.TextEdit{ + { + // Replace sort.Slice with slices.Sort. + Pos: call.Fun.Pos(), + End: call.Fun.End(), + NewText: []byte(prefix + "Sort"), + }, + { + // Eliminate FuncLit. + Pos: call.Args[0].End(), + End: call.Rparen, + }, + }...), + }}, + }) + } } } } diff --git a/gopls/internal/analysis/modernize/testdata/src/sortslice/sortslice.go b/gopls/internal/analysis/modernize/testdata/src/sortslice/sortslice.go index 53d15746839..19242065b24 100644 --- a/gopls/internal/analysis/modernize/testdata/src/sortslice/sortslice.go +++ b/gopls/internal/analysis/modernize/testdata/src/sortslice/sortslice.go @@ -20,6 +20,16 @@ func _(s []int) { sort.Slice(s, func(i, j int) bool { return s[j] < s[i] }) // nope: wrong index var } -func _(s2 []struct{ x int }) { +func _(sense bool, s2 []struct{ x int }) { sort.Slice(s2, func(i, j int) bool { return s2[i].x < s2[j].x }) // nope: not a simple index operation + + // Regression test for a crash: the sole statement of a + // comparison func body is not necessarily a return! + sort.Slice(s2, func(i, j int) bool { + if sense { + return s2[i].x < s2[j].x + } else { + return s2[i].x > s2[j].x + } + }) } diff --git a/gopls/internal/analysis/modernize/testdata/src/sortslice/sortslice.go.golden b/gopls/internal/analysis/modernize/testdata/src/sortslice/sortslice.go.golden index 34af5aad60b..19149b4480a 100644 --- a/gopls/internal/analysis/modernize/testdata/src/sortslice/sortslice.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/sortslice/sortslice.go.golden @@ -22,6 +22,16 @@ func _(s []int) { sort.Slice(s, func(i, j int) bool { return s[j] < s[i] }) // nope: wrong index var } -func _(s2 []struct{ x int }) { +func _(sense bool, s2 []struct{ x int }) { sort.Slice(s2, func(i, j int) bool { return s2[i].x < s2[j].x }) // nope: not a simple index operation + + // Regression test for a crash: the sole statement of a + // comparison func body is not necessarily a return! + sort.Slice(s2, func(i, j int) bool { + if sense { + return s2[i].x < s2[j].x + } else { + return s2[i].x > s2[j].x + } + }) } From e6754cedb2986a3026a6de6e1460f6a2edbe01f0 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Tue, 18 Feb 2025 14:43:35 -0500 Subject: [PATCH 035/270] gopls/internal/cache/parsego: add File.Cursor, and use it This CL adds a Cursor to the parsego.File. Though the primary motivation is convenience and flexibility, it is expected to be an optimization: though it is computed eagerly, it is retained in the parse cache, and is expected to pay for itself very quickly by allowing us to replace many whole-File ast.Inspect operations with more targeted traversals. The CL replaces all ast.Inspect(file) operations with Cursor, but there remain many more opportunities for using it in narrower traversals, and in places that need to navigate to siblings or ancestors. Also, amend Cursor.FindPos to use the complete range of the File, as CL 637738 recently did for astutil.NodeContains. Also, various clean-ups to InlayHint: - push the traversals down in InlayHint to avoid having to scan a slice for every single node we visit; - simplify the function signature used for each hint algorithm. Change-Id: I64d0c2cae75fd73a4b539ceb81ad9d6f7d80cfb8 Reviewed-on: https://go-review.googlesource.com/c/tools/+/650396 Reviewed-by: Jonathan Amsterdam Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI Commit-Queue: Alan Donovan Reviewed-by: Robert Findley --- gopls/internal/cache/parsego/file.go | 8 + gopls/internal/cache/parsego/parse.go | 8 + gopls/internal/cache/xrefs/xrefs.go | 11 +- gopls/internal/golang/folding_range.go | 142 ++++---- gopls/internal/golang/implementation.go | 20 +- gopls/internal/golang/inlay_hint.go | 414 ++++++++++++------------ gopls/internal/golang/references.go | 12 +- gopls/internal/server/link.go | 16 +- internal/astutil/cursor/cursor.go | 12 +- 9 files changed, 326 insertions(+), 317 deletions(-) diff --git a/gopls/internal/cache/parsego/file.go b/gopls/internal/cache/parsego/file.go index 41fd1937ec1..2be4ed4b2ca 100644 --- a/gopls/internal/cache/parsego/file.go +++ b/gopls/internal/cache/parsego/file.go @@ -14,6 +14,7 @@ import ( "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/gopls/internal/util/bug" "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/internal/astutil/cursor" ) // A File contains the results of parsing a Go file. @@ -32,6 +33,8 @@ type File struct { // actual content of the file if we have fixed the AST. Src []byte + Cursor cursor.Cursor // cursor of *ast.File, sans sibling files + // fixedSrc and fixedAST report on "fixing" that occurred during parsing of // this file. // @@ -71,6 +74,11 @@ func (pgf *File) PositionPos(p protocol.Position) (token.Pos, error) { return safetoken.Pos(pgf.Tok, offset) } +// PosPosition returns a protocol Position for the token.Pos in this file. +func (pgf *File) PosPosition(pos token.Pos) (protocol.Position, error) { + return pgf.Mapper.PosPosition(pgf.Tok, pos) +} + // PosRange returns a protocol Range for the token.Pos interval in this file. func (pgf *File) PosRange(start, end token.Pos) (protocol.Range, error) { return pgf.Mapper.PosRange(pgf.Tok, start, end) diff --git a/gopls/internal/cache/parsego/parse.go b/gopls/internal/cache/parsego/parse.go index df167314b04..db6089d8e6d 100644 --- a/gopls/internal/cache/parsego/parse.go +++ b/gopls/internal/cache/parsego/parse.go @@ -23,11 +23,13 @@ import ( "reflect" "slices" + "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/gopls/internal/label" "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/gopls/internal/util/astutil" "golang.org/x/tools/gopls/internal/util/bug" "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/diff" "golang.org/x/tools/internal/event" ) @@ -153,6 +155,11 @@ func Parse(ctx context.Context, fset *token.FileSet, uri protocol.DocumentURI, s } assert(file != nil, "nil *ast.File") + // Provide a cursor for fast and convenient navigation. + inspect := inspector.New([]*ast.File{file}) + curFile, _ := cursor.Root(inspect).FirstChild() + _ = curFile.Node().(*ast.File) + return &File{ URI: uri, Mode: mode, @@ -161,6 +168,7 @@ func Parse(ctx context.Context, fset *token.FileSet, uri protocol.DocumentURI, s fixedAST: fixedAST, File: file, Tok: tok, + Cursor: curFile, Mapper: protocol.NewMapper(uri, src), ParseErr: parseErr, }, fixes diff --git a/gopls/internal/cache/xrefs/xrefs.go b/gopls/internal/cache/xrefs/xrefs.go index 2115322bfdc..d9b7051737a 100644 --- a/gopls/internal/cache/xrefs/xrefs.go +++ b/gopls/internal/cache/xrefs/xrefs.go @@ -44,8 +44,8 @@ func Index(files []*parsego.File, pkg *types.Package, info *types.Info) []byte { objectpathFor := new(objectpath.Encoder).For for fileIndex, pgf := range files { - ast.Inspect(pgf.File, func(n ast.Node) bool { - switch n := n.(type) { + for cur := range pgf.Cursor.Preorder((*ast.Ident)(nil), (*ast.ImportSpec)(nil)) { + switch n := cur.Node().(type) { case *ast.Ident: // Report a reference for each identifier that // uses a symbol exported from another package. @@ -68,7 +68,7 @@ func Index(files []*parsego.File, pkg *types.Package, info *types.Info) []byte { if err != nil { // Capitalized but not exported // (e.g. local const/var/type). - return true + continue } gobObj = &gobObject{Path: path} objects[obj] = gobObj @@ -91,7 +91,7 @@ func Index(files []*parsego.File, pkg *types.Package, info *types.Info) []byte { // string to the imported package. pkgname := info.PkgNameOf(n) if pkgname == nil { - return true // missing import + continue // missing import } objects := getObjects(pkgname.Imported()) gobObj, ok := objects[nil] @@ -109,8 +109,7 @@ func Index(files []*parsego.File, pkg *types.Package, info *types.Info) []byte { bug.Reportf("out of bounds import spec %+v", n.Path) } } - return true - }) + } } // Flatten the maps into slices, and sort for determinism. diff --git a/gopls/internal/golang/folding_range.go b/gopls/internal/golang/folding_range.go index 4352da28151..eed31e92944 100644 --- a/gopls/internal/golang/folding_range.go +++ b/gopls/internal/golang/folding_range.go @@ -46,12 +46,84 @@ func FoldingRange(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, ranges := commentsFoldingRange(pgf) // Walk the ast and collect folding ranges. - ast.Inspect(pgf.File, func(n ast.Node) bool { - if rng, ok := foldingRangeFunc(pgf, n, lineFoldingOnly); ok { - ranges = append(ranges, rng) + filter := []ast.Node{ + (*ast.BasicLit)(nil), + (*ast.BlockStmt)(nil), + (*ast.CallExpr)(nil), + (*ast.CaseClause)(nil), + (*ast.CommClause)(nil), + (*ast.CompositeLit)(nil), + (*ast.FieldList)(nil), + (*ast.GenDecl)(nil), + } + for cur := range pgf.Cursor.Preorder(filter...) { + // TODO(suzmue): include trailing empty lines before the closing + // parenthesis/brace. + var kind protocol.FoldingRangeKind + // start and end define the range of content to fold away. + var start, end token.Pos + switch n := cur.Node().(type) { + case *ast.BlockStmt: + // Fold between positions of or lines between "{" and "}". + start, end = getLineFoldingRange(pgf, n.Lbrace, n.Rbrace, lineFoldingOnly) + + case *ast.CaseClause: + // Fold from position of ":" to end. + start, end = n.Colon+1, n.End() + + case *ast.CommClause: + // Fold from position of ":" to end. + start, end = n.Colon+1, n.End() + + case *ast.CallExpr: + // Fold between positions of or lines between "(" and ")". + start, end = getLineFoldingRange(pgf, n.Lparen, n.Rparen, lineFoldingOnly) + + case *ast.FieldList: + // Fold between positions of or lines between opening parenthesis/brace and closing parenthesis/brace. + start, end = getLineFoldingRange(pgf, n.Opening, n.Closing, lineFoldingOnly) + + case *ast.GenDecl: + // If this is an import declaration, set the kind to be protocol.Imports. + if n.Tok == token.IMPORT { + kind = protocol.Imports + } + // Fold between positions of or lines between "(" and ")". + start, end = getLineFoldingRange(pgf, n.Lparen, n.Rparen, lineFoldingOnly) + + case *ast.BasicLit: + // Fold raw string literals from position of "`" to position of "`". + if n.Kind == token.STRING && len(n.Value) >= 2 && n.Value[0] == '`' && n.Value[len(n.Value)-1] == '`' { + start, end = n.Pos(), n.End() + } + + case *ast.CompositeLit: + // Fold between positions of or lines between "{" and "}". + start, end = getLineFoldingRange(pgf, n.Lbrace, n.Rbrace, lineFoldingOnly) + + default: + panic(n) } - return true - }) + + // Check that folding positions are valid. + if !start.IsValid() || !end.IsValid() { + continue + } + if start == end { + // Nothing to fold. + continue + } + // in line folding mode, do not fold if the start and end lines are the same. + if lineFoldingOnly && safetoken.Line(pgf.Tok, start) == safetoken.Line(pgf.Tok, end) { + continue + } + rng, err := pgf.PosRange(start, end) + if err != nil { + bug.Reportf("failed to create range: %s", err) // can't happen + continue + } + ranges = append(ranges, foldingRange(kind, rng)) + } // Sort by start position. slices.SortFunc(ranges, func(x, y protocol.FoldingRange) int { @@ -64,66 +136,6 @@ func FoldingRange(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, return ranges, nil } -// foldingRangeFunc calculates the line folding range for ast.Node n -func foldingRangeFunc(pgf *parsego.File, n ast.Node, lineFoldingOnly bool) (protocol.FoldingRange, bool) { - // TODO(suzmue): include trailing empty lines before the closing - // parenthesis/brace. - var kind protocol.FoldingRangeKind - // start and end define the range of content to fold away. - var start, end token.Pos - switch n := n.(type) { - case *ast.BlockStmt: - // Fold between positions of or lines between "{" and "}". - start, end = getLineFoldingRange(pgf, n.Lbrace, n.Rbrace, lineFoldingOnly) - case *ast.CaseClause: - // Fold from position of ":" to end. - start, end = n.Colon+1, n.End() - case *ast.CommClause: - // Fold from position of ":" to end. - start, end = n.Colon+1, n.End() - case *ast.CallExpr: - // Fold between positions of or lines between "(" and ")". - start, end = getLineFoldingRange(pgf, n.Lparen, n.Rparen, lineFoldingOnly) - case *ast.FieldList: - // Fold between positions of or lines between opening parenthesis/brace and closing parenthesis/brace. - start, end = getLineFoldingRange(pgf, n.Opening, n.Closing, lineFoldingOnly) - case *ast.GenDecl: - // If this is an import declaration, set the kind to be protocol.Imports. - if n.Tok == token.IMPORT { - kind = protocol.Imports - } - // Fold between positions of or lines between "(" and ")". - start, end = getLineFoldingRange(pgf, n.Lparen, n.Rparen, lineFoldingOnly) - case *ast.BasicLit: - // Fold raw string literals from position of "`" to position of "`". - if n.Kind == token.STRING && len(n.Value) >= 2 && n.Value[0] == '`' && n.Value[len(n.Value)-1] == '`' { - start, end = n.Pos(), n.End() - } - case *ast.CompositeLit: - // Fold between positions of or lines between "{" and "}". - start, end = getLineFoldingRange(pgf, n.Lbrace, n.Rbrace, lineFoldingOnly) - } - - // Check that folding positions are valid. - if !start.IsValid() || !end.IsValid() { - return protocol.FoldingRange{}, false - } - if start == end { - // Nothing to fold. - return protocol.FoldingRange{}, false - } - // in line folding mode, do not fold if the start and end lines are the same. - if lineFoldingOnly && safetoken.Line(pgf.Tok, start) == safetoken.Line(pgf.Tok, end) { - return protocol.FoldingRange{}, false - } - rng, err := pgf.PosRange(start, end) - if err != nil { - bug.Reportf("failed to create range: %s", err) // can't happen - return protocol.FoldingRange{}, false - } - return foldingRange(kind, rng), true -} - // getLineFoldingRange returns the folding range for nodes with parentheses/braces/brackets // that potentially can take up multiple lines. func getLineFoldingRange(pgf *parsego.File, open, close token.Pos, lineFoldingOnly bool) (token.Pos, token.Pos) { diff --git a/gopls/internal/golang/implementation.go b/gopls/internal/golang/implementation.go index fe0a34a1c80..a7a7e663d44 100644 --- a/gopls/internal/golang/implementation.go +++ b/gopls/internal/golang/implementation.go @@ -352,17 +352,14 @@ func localImplementations(ctx context.Context, snapshot *cache.Snapshot, pkg *ca var locs []protocol.Location var methodLocs []methodsets.Location for _, pgf := range pkg.CompiledGoFiles() { - ast.Inspect(pgf.File, func(n ast.Node) bool { - spec, ok := n.(*ast.TypeSpec) - if !ok { - return true // not a type declaration - } + for cur := range pgf.Cursor.Preorder((*ast.TypeSpec)(nil)) { + spec := cur.Node().(*ast.TypeSpec) def := pkg.TypesInfo().Defs[spec.Name] if def == nil { - return true // "can't happen" for types + continue // "can't happen" for types } if def.(*types.TypeName).IsAlias() { - return true // skip type aliases to avoid duplicate reporting + continue // skip type aliases to avoid duplicate reporting } candidateType := methodsets.EnsurePointer(def.Type()) @@ -373,20 +370,20 @@ func localImplementations(ctx context.Context, snapshot *cache.Snapshot, pkg *ca // TODO(adonovan): UX: report I/I pairs too? // The same question appears in the global algorithm (methodsets). if !concreteImplementsIntf(&msets, candidateType, queryType) { - return true // not assignable + continue // not assignable } // Ignore types with empty method sets. // (No point reporting that every type satisfies 'any'.) mset := msets.MethodSet(candidateType) if mset.Len() == 0 { - return true + continue } if method == nil { // Found matching type. locs = append(locs, mustLocation(pgf, spec.Name)) - return true + continue } // Find corresponding method. @@ -407,8 +404,7 @@ func localImplementations(ctx context.Context, snapshot *cache.Snapshot, pkg *ca break } } - return true - }) + } } // Finally convert method positions to protocol form by reading the files. diff --git a/gopls/internal/golang/inlay_hint.go b/gopls/internal/golang/inlay_hint.go index bc85745cb0b..84b18e06781 100644 --- a/gopls/internal/golang/inlay_hint.go +++ b/gopls/internal/golang/inlay_hint.go @@ -14,9 +14,11 @@ import ( "strings" "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/parsego" "golang.org/x/tools/gopls/internal/file" "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/gopls/internal/settings" + "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/typeparams" "golang.org/x/tools/internal/typesinternal" @@ -47,7 +49,7 @@ func InlayHint(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pR } info := pkg.TypesInfo() - q := typesinternal.FileQualifier(pgf.File, pkg.Types()) + qual := typesinternal.FileQualifier(pgf.File, pkg.Types()) // Set the range to the full file if the range is not valid. start, end := pgf.File.FileStart, pgf.File.FileEnd @@ -63,20 +65,16 @@ func InlayHint(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pR } var hints []protocol.InlayHint - ast.Inspect(pgf.File, func(node ast.Node) bool { - // If not in range, we can stop looking. - if node == nil || node.End() < start || node.Pos() > end { - return false - } + if curSubrange, ok := pgf.Cursor.FindPos(start, end); ok { + add := func(hint protocol.InlayHint) { hints = append(hints, hint) } for _, fn := range enabledHints { - hints = append(hints, fn(node, pgf.Mapper, pgf.Tok, info, &q)...) + fn(info, pgf, qual, curSubrange, add) } - return true - }) + } return hints, nil } -type inlayHintFunc func(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, q *types.Qualifier) []protocol.InlayHint +type inlayHintFunc func(info *types.Info, pgf *parsego.File, qual types.Qualifier, cur cursor.Cursor, add func(protocol.InlayHint)) var allInlayHints = map[settings.InlayHint]inlayHintFunc{ settings.AssignVariableTypes: assignVariableTypes, @@ -88,259 +86,243 @@ var allInlayHints = map[settings.InlayHint]inlayHintFunc{ settings.FunctionTypeParameters: funcTypeParams, } -func parameterNames(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, _ *types.Qualifier) []protocol.InlayHint { - callExpr, ok := node.(*ast.CallExpr) - if !ok { - return nil - } - t := info.TypeOf(callExpr.Fun) - if t == nil { - return nil - } - signature, ok := typeparams.CoreType(t).(*types.Signature) - if !ok { - return nil +func parameterNames(info *types.Info, pgf *parsego.File, qual types.Qualifier, cur cursor.Cursor, add func(protocol.InlayHint)) { + for curCall := range cur.Preorder((*ast.CallExpr)(nil)) { + callExpr := curCall.Node().(*ast.CallExpr) + t := info.TypeOf(callExpr.Fun) + if t == nil { + continue + } + signature, ok := typeparams.CoreType(t).(*types.Signature) + if !ok { + continue + } + + for i, v := range callExpr.Args { + start, err := pgf.PosPosition(v.Pos()) + if err != nil { + continue + } + params := signature.Params() + // When a function has variadic params, we skip args after + // params.Len(). + if i > params.Len()-1 { + break + } + param := params.At(i) + // param.Name is empty for built-ins like append + if param.Name() == "" { + continue + } + // Skip the parameter name hint if the arg matches + // the parameter name. + if i, ok := v.(*ast.Ident); ok && i.Name == param.Name() { + continue + } + + label := param.Name() + if signature.Variadic() && i == params.Len()-1 { + label = label + "..." + } + add(protocol.InlayHint{ + Position: start, + Label: buildLabel(label + ":"), + Kind: protocol.Parameter, + PaddingRight: true, + }) + } } +} - var hints []protocol.InlayHint - for i, v := range callExpr.Args { - start, err := m.PosPosition(tf, v.Pos()) - if err != nil { +func funcTypeParams(info *types.Info, pgf *parsego.File, qual types.Qualifier, cur cursor.Cursor, add func(protocol.InlayHint)) { + for curCall := range cur.Preorder((*ast.CallExpr)(nil)) { + call := curCall.Node().(*ast.CallExpr) + id, ok := call.Fun.(*ast.Ident) + if !ok { continue } - params := signature.Params() - // When a function has variadic params, we skip args after - // params.Len(). - if i > params.Len()-1 { - break - } - param := params.At(i) - // param.Name is empty for built-ins like append - if param.Name() == "" { + inst := info.Instances[id] + if inst.TypeArgs == nil { continue } - // Skip the parameter name hint if the arg matches - // the parameter name. - if i, ok := v.(*ast.Ident); ok && i.Name == param.Name() { + start, err := pgf.PosPosition(id.End()) + if err != nil { continue } - - label := param.Name() - if signature.Variadic() && i == params.Len()-1 { - label = label + "..." + var args []string + for i := 0; i < inst.TypeArgs.Len(); i++ { + args = append(args, inst.TypeArgs.At(i).String()) + } + if len(args) == 0 { + continue } - hints = append(hints, protocol.InlayHint{ - Position: start, - Label: buildLabel(label + ":"), - Kind: protocol.Parameter, - PaddingRight: true, + add(protocol.InlayHint{ + Position: start, + Label: buildLabel("[" + strings.Join(args, ", ") + "]"), + Kind: protocol.Type, }) } - return hints -} - -func funcTypeParams(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, _ *types.Qualifier) []protocol.InlayHint { - ce, ok := node.(*ast.CallExpr) - if !ok { - return nil - } - id, ok := ce.Fun.(*ast.Ident) - if !ok { - return nil - } - inst := info.Instances[id] - if inst.TypeArgs == nil { - return nil - } - start, err := m.PosPosition(tf, id.End()) - if err != nil { - return nil - } - var args []string - for i := 0; i < inst.TypeArgs.Len(); i++ { - args = append(args, inst.TypeArgs.At(i).String()) - } - if len(args) == 0 { - return nil - } - return []protocol.InlayHint{{ - Position: start, - Label: buildLabel("[" + strings.Join(args, ", ") + "]"), - Kind: protocol.Type, - }} } -func assignVariableTypes(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, q *types.Qualifier) []protocol.InlayHint { - stmt, ok := node.(*ast.AssignStmt) - if !ok || stmt.Tok != token.DEFINE { - return nil - } - - var hints []protocol.InlayHint - for _, v := range stmt.Lhs { - if h := variableType(v, m, tf, info, q); h != nil { - hints = append(hints, *h) +func assignVariableTypes(info *types.Info, pgf *parsego.File, qual types.Qualifier, cur cursor.Cursor, add func(protocol.InlayHint)) { + for curAssign := range cur.Preorder((*ast.AssignStmt)(nil)) { + stmt := curAssign.Node().(*ast.AssignStmt) + if stmt.Tok != token.DEFINE { + continue + } + for _, v := range stmt.Lhs { + variableType(info, pgf, qual, v, add) } } - return hints } -func rangeVariableTypes(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, q *types.Qualifier) []protocol.InlayHint { - rStmt, ok := node.(*ast.RangeStmt) - if !ok { - return nil - } - var hints []protocol.InlayHint - if h := variableType(rStmt.Key, m, tf, info, q); h != nil { - hints = append(hints, *h) - } - if h := variableType(rStmt.Value, m, tf, info, q); h != nil { - hints = append(hints, *h) +func rangeVariableTypes(info *types.Info, pgf *parsego.File, qual types.Qualifier, cur cursor.Cursor, add func(protocol.InlayHint)) { + for curRange := range cur.Preorder((*ast.RangeStmt)(nil)) { + rStmt := curRange.Node().(*ast.RangeStmt) + variableType(info, pgf, qual, rStmt.Key, add) + variableType(info, pgf, qual, rStmt.Value, add) } - return hints } -func variableType(e ast.Expr, m *protocol.Mapper, tf *token.File, info *types.Info, q *types.Qualifier) *protocol.InlayHint { +func variableType(info *types.Info, pgf *parsego.File, qual types.Qualifier, e ast.Expr, add func(protocol.InlayHint)) { typ := info.TypeOf(e) if typ == nil { - return nil + return } - end, err := m.PosPosition(tf, e.End()) + end, err := pgf.PosPosition(e.End()) if err != nil { - return nil + return } - return &protocol.InlayHint{ + add(protocol.InlayHint{ Position: end, - Label: buildLabel(types.TypeString(typ, *q)), + Label: buildLabel(types.TypeString(typ, qual)), Kind: protocol.Type, PaddingLeft: true, - } + }) } -func constantValues(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, _ *types.Qualifier) []protocol.InlayHint { - genDecl, ok := node.(*ast.GenDecl) - if !ok || genDecl.Tok != token.CONST { - return nil - } - - var hints []protocol.InlayHint - for _, v := range genDecl.Specs { - spec, ok := v.(*ast.ValueSpec) - if !ok { +func constantValues(info *types.Info, pgf *parsego.File, qual types.Qualifier, cur cursor.Cursor, add func(protocol.InlayHint)) { + for curDecl := range cur.Preorder((*ast.GenDecl)(nil)) { + genDecl := curDecl.Node().(*ast.GenDecl) + if genDecl.Tok != token.CONST { continue } - end, err := m.PosPosition(tf, v.End()) - if err != nil { - continue - } - // Show hints when values are missing or at least one value is not - // a basic literal. - showHints := len(spec.Values) == 0 - checkValues := len(spec.Names) == len(spec.Values) - var values []string - for i, w := range spec.Names { - obj, ok := info.ObjectOf(w).(*types.Const) - if !ok || obj.Val().Kind() == constant.Unknown { - return nil + + for _, v := range genDecl.Specs { + spec, ok := v.(*ast.ValueSpec) + if !ok { + continue + } + end, err := pgf.PosPosition(v.End()) + if err != nil { + continue } - if checkValues { - switch spec.Values[i].(type) { - case *ast.BadExpr: - return nil - case *ast.BasicLit: - default: - if obj.Val().Kind() != constant.Bool { - showHints = true + // Show hints when values are missing or at least one value is not + // a basic literal. + showHints := len(spec.Values) == 0 + checkValues := len(spec.Names) == len(spec.Values) + var values []string + for i, w := range spec.Names { + obj, ok := info.ObjectOf(w).(*types.Const) + if !ok || obj.Val().Kind() == constant.Unknown { + continue + } + if checkValues { + switch spec.Values[i].(type) { + case *ast.BadExpr: + continue + case *ast.BasicLit: + default: + if obj.Val().Kind() != constant.Bool { + showHints = true + } } } + values = append(values, fmt.Sprintf("%v", obj.Val())) } - values = append(values, fmt.Sprintf("%v", obj.Val())) - } - if !showHints || len(values) == 0 { - continue + if !showHints || len(values) == 0 { + continue + } + add(protocol.InlayHint{ + Position: end, + Label: buildLabel("= " + strings.Join(values, ", ")), + PaddingLeft: true, + }) } - hints = append(hints, protocol.InlayHint{ - Position: end, - Label: buildLabel("= " + strings.Join(values, ", ")), - PaddingLeft: true, - }) } - return hints } -func compositeLiteralFields(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, _ *types.Qualifier) []protocol.InlayHint { - compLit, ok := node.(*ast.CompositeLit) - if !ok { - return nil - } - typ := info.TypeOf(compLit) - if typ == nil { - return nil - } - typ = typesinternal.Unpointer(typ) - strct, ok := typeparams.CoreType(typ).(*types.Struct) - if !ok { - return nil - } +func compositeLiteralFields(info *types.Info, pgf *parsego.File, qual types.Qualifier, cur cursor.Cursor, add func(protocol.InlayHint)) { + for curCompLit := range cur.Preorder((*ast.CompositeLit)(nil)) { + compLit, ok := curCompLit.Node().(*ast.CompositeLit) + typ := info.TypeOf(compLit) + if typ == nil { + continue + } + typ = typesinternal.Unpointer(typ) + strct, ok := typeparams.CoreType(typ).(*types.Struct) + if !ok { + continue + } - var hints []protocol.InlayHint - var allEdits []protocol.TextEdit - for i, v := range compLit.Elts { - if _, ok := v.(*ast.KeyValueExpr); !ok { - start, err := m.PosPosition(tf, v.Pos()) - if err != nil { - continue - } - if i > strct.NumFields()-1 { - break + var hints []protocol.InlayHint + var allEdits []protocol.TextEdit + for i, v := range compLit.Elts { + if _, ok := v.(*ast.KeyValueExpr); !ok { + start, err := pgf.PosPosition(v.Pos()) + if err != nil { + continue + } + if i > strct.NumFields()-1 { + break + } + hints = append(hints, protocol.InlayHint{ + Position: start, + Label: buildLabel(strct.Field(i).Name() + ":"), + Kind: protocol.Parameter, + PaddingRight: true, + }) + allEdits = append(allEdits, protocol.TextEdit{ + Range: protocol.Range{Start: start, End: start}, + NewText: strct.Field(i).Name() + ": ", + }) } - hints = append(hints, protocol.InlayHint{ - Position: start, - Label: buildLabel(strct.Field(i).Name() + ":"), - Kind: protocol.Parameter, - PaddingRight: true, - }) - allEdits = append(allEdits, protocol.TextEdit{ - Range: protocol.Range{Start: start, End: start}, - NewText: strct.Field(i).Name() + ": ", - }) + } + // It is not allowed to have a mix of keyed and unkeyed fields, so + // have the text edits add keys to all fields. + for i := range hints { + hints[i].TextEdits = allEdits + add(hints[i]) } } - // It is not allowed to have a mix of keyed and unkeyed fields, so - // have the text edits add keys to all fields. - for i := range hints { - hints[i].TextEdits = allEdits - } - return hints } -func compositeLiteralTypes(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, q *types.Qualifier) []protocol.InlayHint { - compLit, ok := node.(*ast.CompositeLit) - if !ok { - return nil - } - typ := info.TypeOf(compLit) - if typ == nil { - return nil - } - if compLit.Type != nil { - return nil - } - prefix := "" - if t, ok := typeparams.CoreType(typ).(*types.Pointer); ok { - typ = t.Elem() - prefix = "&" - } - // The type for this composite literal is implicit, add an inlay hint. - start, err := m.PosPosition(tf, compLit.Lbrace) - if err != nil { - return nil +func compositeLiteralTypes(info *types.Info, pgf *parsego.File, qual types.Qualifier, cur cursor.Cursor, add func(protocol.InlayHint)) { + for curCompLit := range cur.Preorder((*ast.CompositeLit)(nil)) { + compLit := curCompLit.Node().(*ast.CompositeLit) + typ := info.TypeOf(compLit) + if typ == nil { + continue + } + if compLit.Type != nil { + continue + } + prefix := "" + if t, ok := typeparams.CoreType(typ).(*types.Pointer); ok { + typ = t.Elem() + prefix = "&" + } + // The type for this composite literal is implicit, add an inlay hint. + start, err := pgf.PosPosition(compLit.Lbrace) + if err != nil { + continue + } + add(protocol.InlayHint{ + Position: start, + Label: buildLabel(fmt.Sprintf("%s%s", prefix, types.TypeString(typ, qual))), + Kind: protocol.Type, + }) } - return []protocol.InlayHint{{ - Position: start, - Label: buildLabel(fmt.Sprintf("%s%s", prefix, types.TypeString(typ, *q))), - Kind: protocol.Type, - }} } func buildLabel(s string) []protocol.InlayHintLabelPart { diff --git a/gopls/internal/golang/references.go b/gopls/internal/golang/references.go index 3ecaab6e3e1..12152453dcd 100644 --- a/gopls/internal/golang/references.go +++ b/gopls/internal/golang/references.go @@ -602,14 +602,12 @@ func localReferences(pkg *cache.Package, targets map[types.Object]bool, correspo // Scan through syntax looking for uses of one of the target objects. for _, pgf := range pkg.CompiledGoFiles() { - ast.Inspect(pgf.File, func(n ast.Node) bool { - if id, ok := n.(*ast.Ident); ok { - if obj, ok := pkg.TypesInfo().Uses[id]; ok && matches(obj) { - report(mustLocation(pgf, id), false) - } + for curId := range pgf.Cursor.Preorder((*ast.Ident)(nil)) { + id := curId.Node().(*ast.Ident) + if obj, ok := pkg.TypesInfo().Uses[id]; ok && matches(obj) { + report(mustLocation(pgf, id), false) } - return true - }) + } } return nil } diff --git a/gopls/internal/server/link.go b/gopls/internal/server/link.go index 13097d89887..c888904baab 100644 --- a/gopls/internal/server/link.go +++ b/gopls/internal/server/link.go @@ -164,17 +164,15 @@ func goLinks(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]p // Gather links found in string literals. var str []*ast.BasicLit - ast.Inspect(pgf.File, func(node ast.Node) bool { - switch n := node.(type) { - case *ast.ImportSpec: - return false // don't process import strings again - case *ast.BasicLit: - if n.Kind == token.STRING { - str = append(str, n) + for curLit := range pgf.Cursor.Preorder((*ast.BasicLit)(nil)) { + lit := curLit.Node().(*ast.BasicLit) + if lit.Kind == token.STRING { + if _, ok := curLit.Parent().Node().(*ast.ImportSpec); ok { + continue // ignore import strings } + str = append(str, lit) } - return true - }) + } for _, s := range str { strOffset, err := safetoken.Offset(pgf.Tok, s.Pos()) if err != nil { diff --git a/internal/astutil/cursor/cursor.go b/internal/astutil/cursor/cursor.go index 1052f65acfb..5ed177c9f3d 100644 --- a/internal/astutil/cursor/cursor.go +++ b/internal/astutil/cursor/cursor.go @@ -407,6 +407,8 @@ func (c Cursor) FindNode(n ast.Node) (Cursor, bool) { // FindPos returns the cursor for the innermost node n in the tree // rooted at c such that n.Pos() <= start && end <= n.End(). +// (For an *ast.File, it uses the bounds n.FileStart-n.FileEnd.) +// // It returns zero if none is found. // Precondition: start <= end. // @@ -425,10 +427,16 @@ func (c Cursor) FindPos(start, end token.Pos) (Cursor, bool) { for i, limit := c.indices(); i < limit; i++ { ev := events[i] if ev.index > i { // push? - if ev.node.Pos() > start { + n := ev.node + var nodeStart, nodeEnd token.Pos + if file, ok := n.(*ast.File); ok { + nodeStart, nodeEnd = file.FileStart, file.FileEnd + } else { + nodeStart, nodeEnd = n.Pos(), n.End() + } + if nodeStart > start { break // disjoint, after; stop } - nodeEnd := ev.node.End() if end <= nodeEnd { // node fully contains target range best = i From 3c245dad2c55e3b6e3e1635f1d5bc3da5277be83 Mon Sep 17 00:00:00 2001 From: Cuong Manh Le Date: Wed, 19 Feb 2025 15:19:16 +0700 Subject: [PATCH 036/270] gopls: fix diagnostics integration test CL 649355 improved simplifyrange suggested fix, thus the corresponding diagnostics integration test must be updated, too. Change-Id: Ief33cc4e9ab3d760c1af28c94102638f6d2b69e8 Reviewed-on: https://go-review.googlesource.com/c/tools/+/650556 Reviewed-by: Michael Knyszek Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI Auto-Submit: Cuong Manh Le Reviewed-by: Alan Donovan --- gopls/internal/test/integration/diagnostics/diagnostics_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gopls/internal/test/integration/diagnostics/diagnostics_test.go b/gopls/internal/test/integration/diagnostics/diagnostics_test.go index c496f6464a3..a97d249e7b5 100644 --- a/gopls/internal/test/integration/diagnostics/diagnostics_test.go +++ b/gopls/internal/test/integration/diagnostics/diagnostics_test.go @@ -562,7 +562,7 @@ func _() { env.OpenFile("main.go") var d protocol.PublishDiagnosticsParams env.AfterChange( - Diagnostics(AtPosition("main.go", 5, 8)), + Diagnostics(AtPosition("main.go", 5, 6)), ReadDiagnostics("main.go", &d), ) if fixes := env.GetQuickFixes("main.go", d.Diagnostics); len(fixes) != 0 { From 44abb0ac312034f5c655a3d815ec385884038027 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Fri, 14 Feb 2025 10:16:33 -0500 Subject: [PATCH 037/270] go/types/internal/play: display type structure Display the complete recursive structure of a types.Type, in the style of ast.Fprint. Change-Id: I408c9b1f1beb214e0184381e97085e606ad8a5a1 Reviewed-on: https://go-review.googlesource.com/c/tools/+/649617 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley --- go/types/internal/play/play.go | 66 +++++++++++++++++++++++++++++++++- 1 file changed, 65 insertions(+), 1 deletion(-) diff --git a/go/types/internal/play/play.go b/go/types/internal/play/play.go index eb9e5794b94..8d3b9d19346 100644 --- a/go/types/internal/play/play.go +++ b/go/types/internal/play/play.go @@ -2,6 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +//go:build go1.23 + // The play program is a playground for go/types: a simple web-based // text editor into which the user can enter a Go program, select a // region, and see type information about it. @@ -35,7 +37,6 @@ import ( // TODO(adonovan): // - show line numbers next to textarea. -// - show a (tree) breakdown of the representation of the expression's type. // - mention this in the go/types tutorial. // - display versions of go/types and go command. @@ -297,6 +298,10 @@ func formatObj(out *strings.Builder, fset *token.FileSet, ref string, obj types. } fmt.Fprintf(out, "\n\n") + fmt.Fprintf(out, "Type:\n") + describeType(out, obj.Type()) + fmt.Fprintf(out, "\n") + // method set if methods := typeutil.IntuitiveMethodSet(obj.Type(), nil); len(methods) > 0 { fmt.Fprintf(out, "Methods:\n") @@ -318,6 +323,65 @@ func formatObj(out *strings.Builder, fset *token.FileSet, ref string, obj types. } } +// describeType formats t to out in a way that makes it clear what methods to call on t to +// get at its parts. +// describeType assumes t was constructed by the type checker, so it doesn't check +// for recursion. The type checker replaces recursive alias types, which are illegal, +// with a BasicType that says as much. Other types that it constructs are recursive +// only via a name, and this function does not traverse names. +func describeType(out *strings.Builder, t types.Type) { + depth := -1 + + var ft func(string, types.Type) + ft = func(prefix string, t types.Type) { + depth++ + defer func() { depth-- }() + + for range depth { + fmt.Fprint(out, ". ") + } + + fmt.Fprintf(out, "%s%T:", prefix, t) + switch t := t.(type) { + case *types.Basic: + fmt.Fprintf(out, " Name: %q\n", t.Name()) + case *types.Pointer: + fmt.Fprintln(out) + ft("Elem: ", t.Elem()) + case *types.Slice: + fmt.Fprintln(out) + ft("Elem: ", t.Elem()) + case *types.Array: + fmt.Fprintf(out, " Len: %d\n", t.Len()) + ft("Elem: ", t.Elem()) + case *types.Map: + fmt.Fprintln(out) + ft("Key: ", t.Key()) + ft("Elem: ", t.Elem()) + case *types.Chan: + fmt.Fprintf(out, " Dir: %s\n", chanDirs[t.Dir()]) + ft("Elem: ", t.Elem()) + case *types.Alias: + fmt.Fprintf(out, " Name: %q\n", t.Obj().Name()) + ft("Rhs: ", t.Rhs()) + default: + // For types we may have missed or which have too much to bother with, + // print their string representation. + // TODO(jba): print more about struct types (their fields) and interface and named + // types (their methods). + fmt.Fprintf(out, " %s\n", t) + } + } + + ft("", t) +} + +var chanDirs = []string{ + "SendRecv", + "SendOnly", + "RecvOnly", +} + func handleRoot(w http.ResponseWriter, req *http.Request) { io.WriteString(w, mainHTML) } func handleJS(w http.ResponseWriter, req *http.Request) { io.WriteString(w, mainJS) } func handleCSS(w http.ResponseWriter, req *http.Request) { io.WriteString(w, mainCSS) } From 877c1d128ba89b146547e8becff924a35cb9b322 Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Wed, 19 Feb 2025 17:50:27 +0000 Subject: [PATCH 038/270] gopls: address various staticcheck findings Fix several real findings uncovered by running staticcheck ./... from the gopls module. Change-Id: Ieffd38bfd98cac24052c3b408907eb197c9e1cda Reviewed-on: https://go-review.googlesource.com/c/tools/+/650643 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- .../internal/analysis/modernize/sortslice.go | 5 ++-- .../analysis/unusedvariable/unusedvariable.go | 2 +- gopls/internal/cache/analysis.go | 3 +-- gopls/internal/cache/check.go | 23 +------------------ .../cache/methodsets/fingerprint_test.go | 5 ---- gopls/internal/cache/snapshot.go | 2 +- gopls/internal/cache/source.go | 13 ++++++----- gopls/internal/cache/view.go | 3 --- gopls/internal/cmd/cmd.go | 9 ++------ gopls/internal/golang/extract.go | 2 +- gopls/internal/golang/inlay_hint.go | 3 +++ gopls/internal/golang/semtok.go | 2 +- gopls/internal/golang/workspace_symbol.go | 2 -- gopls/internal/settings/settings.go | 8 +++---- 14 files changed, 24 insertions(+), 58 deletions(-) diff --git a/gopls/internal/analysis/modernize/sortslice.go b/gopls/internal/analysis/modernize/sortslice.go index bbc8befb8ee..a033be7f635 100644 --- a/gopls/internal/analysis/modernize/sortslice.go +++ b/gopls/internal/analysis/modernize/sortslice.go @@ -5,7 +5,6 @@ package modernize import ( - "fmt" "go/ast" "go/token" "go/types" @@ -78,9 +77,9 @@ func sortslice(pass *analysis.Pass) { Pos: call.Fun.Pos(), End: call.Fun.End(), Category: "sortslice", - Message: fmt.Sprintf("sort.Slice can be modernized using slices.Sort"), + Message: "sort.Slice can be modernized using slices.Sort", SuggestedFixes: []analysis.SuggestedFix{{ - Message: fmt.Sprintf("Replace sort.Slice call by slices.Sort"), + Message: "Replace sort.Slice call by slices.Sort", TextEdits: append(importEdits, []analysis.TextEdit{ { // Replace sort.Slice with slices.Sort. diff --git a/gopls/internal/analysis/unusedvariable/unusedvariable.go b/gopls/internal/analysis/unusedvariable/unusedvariable.go index 5f1c188eb6a..3ea1dbe6953 100644 --- a/gopls/internal/analysis/unusedvariable/unusedvariable.go +++ b/gopls/internal/analysis/unusedvariable/unusedvariable.go @@ -47,7 +47,7 @@ func run(pass *analysis.Pass) (any, error) { if len(match) > 0 { varName := match[1] // Beginning in Go 1.23, go/types began quoting vars as `v'. - varName = strings.Trim(varName, "'`'") + varName = strings.Trim(varName, "`'") err := runForError(pass, typeErr, varName) if err != nil { diff --git a/gopls/internal/cache/analysis.go b/gopls/internal/cache/analysis.go index d570c0a46ae..a0dd322a51e 100644 --- a/gopls/internal/cache/analysis.go +++ b/gopls/internal/cache/analysis.go @@ -822,8 +822,7 @@ func typesLookup(pkg *types.Package) func(string) *types.Package { ) // search scans children the next package in pending, looking for pkgPath. - var search func(pkgPath string) (*types.Package, int) - search = func(pkgPath string) (sought *types.Package, numPending int) { + search := func(pkgPath string) (sought *types.Package, numPending int) { mu.Lock() defer mu.Unlock() diff --git a/gopls/internal/cache/check.go b/gopls/internal/cache/check.go index a3aff5e5475..aa1537c8705 100644 --- a/gopls/internal/cache/check.go +++ b/gopls/internal/cache/check.go @@ -44,11 +44,6 @@ import ( "golang.org/x/tools/internal/versions" ) -// Various optimizations that should not affect correctness. -const ( - preserveImportGraph = true // hold on to the import graph for open packages -) - type unit = struct{} // A typeCheckBatch holds data for a logical type-checking operation, which may @@ -97,21 +92,6 @@ func (b *typeCheckBatch) getHandle(id PackageID) *packageHandle { return b._handles[id] } -// A futurePackage is a future result of type checking or importing a package, -// to be cached in a map. -// -// The goroutine that creates the futurePackage is responsible for evaluating -// its value, and closing the done channel. -type futurePackage struct { - done chan unit - v pkgOrErr -} - -type pkgOrErr struct { - pkg *types.Package - err error -} - // TypeCheck parses and type-checks the specified packages, // and returns them in the same order as the ids. // The resulting packages' types may belong to different importers, @@ -701,8 +681,7 @@ func importLookup(mp *metadata.Package, source metadata.Source) func(PackagePath // search scans children the next package in pending, looking for pkgPath. // Invariant: whenever search is called, pkgPath is not yet mapped. - var search func(pkgPath PackagePath) (PackageID, bool) - search = func(pkgPath PackagePath) (id PackageID, found bool) { + search := func(pkgPath PackagePath) (id PackageID, found bool) { pkg := pending[0] pending = pending[1:] for depPath, depID := range pkg.DepsByPkgPath { diff --git a/gopls/internal/cache/methodsets/fingerprint_test.go b/gopls/internal/cache/methodsets/fingerprint_test.go index a9f47c1a2e6..795ddaa965b 100644 --- a/gopls/internal/cache/methodsets/fingerprint_test.go +++ b/gopls/internal/cache/methodsets/fingerprint_test.go @@ -39,11 +39,6 @@ func Test_fingerprint(t *testing.T) { // (Non-tricky types only.) var fingerprints typeutil.Map - type eqclass struct { - class map[types.Type]bool - fp string - } - for _, pkg := range pkgs { switch pkg.Types.Path() { case "unsafe", "builtin": diff --git a/gopls/internal/cache/snapshot.go b/gopls/internal/cache/snapshot.go index c341ac6e85a..578cea61eb7 100644 --- a/gopls/internal/cache/snapshot.go +++ b/gopls/internal/cache/snapshot.go @@ -1301,7 +1301,7 @@ searchOverlays: // where the file is inside a workspace module, but perhaps no packages // were loaded for that module. _, loadedMod := loadedModFiles[goMod] - _, workspaceMod := s.view.viewDefinition.workspaceModFiles[goMod] + _, workspaceMod := s.view.workspaceModFiles[goMod] // If we have a relevant go.mod file, check whether the file is orphaned // due to its go.mod file being inactive. We could also offer a // prescriptive diagnostic in the case that there is no go.mod file, but diff --git a/gopls/internal/cache/source.go b/gopls/internal/cache/source.go index 3e21c641651..fa038ec37a6 100644 --- a/gopls/internal/cache/source.go +++ b/gopls/internal/cache/source.go @@ -61,9 +61,7 @@ func (s *goplsSource) ResolveReferences(ctx context.Context, filename string, mi // collect the ones that are still needed := maps.Clone(missing) for _, a := range fromWS { - if _, ok := needed[a.Package.Name]; ok { - delete(needed, a.Package.Name) - } + delete(needed, a.Package.Name) } // when debug (below) is gone, change this to: if len(needed) == 0 {return fromWS, nil} var fromCache []*result @@ -184,10 +182,13 @@ type found struct { func (s *goplsSource) resolveWorkspaceReferences(filename string, missing imports.References) ([]*imports.Result, error) { uri := protocol.URIFromPath(filename) mypkgs, err := s.S.MetadataForFile(s.ctx, uri) - if len(mypkgs) != 1 { - // what does this mean? can it happen? + if err != nil { + return nil, err + } + if len(mypkgs) == 0 { + return nil, nil } - mypkg := mypkgs[0] + mypkg := mypkgs[0] // narrowest package // search the metadata graph for package ids correstponding to missing g := s.S.MetadataGraph() var ids []metadata.PackageID diff --git a/gopls/internal/cache/view.go b/gopls/internal/cache/view.go index 6ebf6837ef2..fc1ac5724ed 100644 --- a/gopls/internal/cache/view.go +++ b/gopls/internal/cache/view.go @@ -20,7 +20,6 @@ import ( "os/exec" "path" "path/filepath" - "regexp" "slices" "sort" "strings" @@ -1253,8 +1252,6 @@ func globsMatchPath(globs, target string) bool { return false } -var modFlagRegexp = regexp.MustCompile(`-mod[ =](\w+)`) - // TODO(rfindley): clean up the redundancy of allFilesExcluded, // pathExcludedByFilterFunc, pathExcludedByFilter, view.filterFunc... func allFilesExcluded(files []string, filterFunc func(protocol.DocumentURI) bool) bool { diff --git a/gopls/internal/cmd/cmd.go b/gopls/internal/cmd/cmd.go index a647b3198df..f7ba04df6a4 100644 --- a/gopls/internal/cmd/cmd.go +++ b/gopls/internal/cmd/cmd.go @@ -310,11 +310,6 @@ func (app *Application) featureCommands() []tool.Application { } } -var ( - internalMu sync.Mutex - internalConnections = make(map[string]*connection) -) - // connect creates and initializes a new in-process gopls session. func (app *Application) connect(ctx context.Context) (*connection, error) { client := newClient(app) @@ -377,10 +372,10 @@ func (c *connection) initialize(ctx context.Context, options func(*settings.Opti params.InitializationOptions = map[string]interface{}{ "symbolMatcher": string(opts.SymbolMatcher), } - if c.initializeResult, err = c.Server.Initialize(ctx, params); err != nil { + if c.initializeResult, err = c.Initialize(ctx, params); err != nil { return err } - if err := c.Server.Initialized(ctx, &protocol.InitializedParams{}); err != nil { + if err := c.Initialized(ctx, &protocol.InitializedParams{}); err != nil { return err } return nil diff --git a/gopls/internal/golang/extract.go b/gopls/internal/golang/extract.go index 8c8758d9f0a..b8219562de5 100644 --- a/gopls/internal/golang/extract.go +++ b/gopls/internal/golang/extract.go @@ -375,7 +375,7 @@ func stmtToInsertVarBefore(path []ast.Node, variables []*variable) (ast.Stmt, er } return parent, nil } - return enclosingStmt.(ast.Stmt), nil + return enclosingStmt, nil } // canExtractVariable reports whether the code in the given range can be diff --git a/gopls/internal/golang/inlay_hint.go b/gopls/internal/golang/inlay_hint.go index 84b18e06781..b49ebd85e21 100644 --- a/gopls/internal/golang/inlay_hint.go +++ b/gopls/internal/golang/inlay_hint.go @@ -255,6 +255,9 @@ func constantValues(info *types.Info, pgf *parsego.File, qual types.Qualifier, c func compositeLiteralFields(info *types.Info, pgf *parsego.File, qual types.Qualifier, cur cursor.Cursor, add func(protocol.InlayHint)) { for curCompLit := range cur.Preorder((*ast.CompositeLit)(nil)) { compLit, ok := curCompLit.Node().(*ast.CompositeLit) + if !ok { + continue + } typ := info.TypeOf(compLit) if typ == nil { continue diff --git a/gopls/internal/golang/semtok.go b/gopls/internal/golang/semtok.go index cb3f2cfd478..121531d8280 100644 --- a/gopls/internal/golang/semtok.go +++ b/gopls/internal/golang/semtok.go @@ -616,7 +616,7 @@ func (tv *tokenVisitor) ident(id *ast.Ident) { obj types.Object ok bool ) - if obj, ok = tv.info.Defs[id]; obj != nil { + if obj, _ = tv.info.Defs[id]; obj != nil { // definition mods = append(mods, semtok.ModDefinition) tok, mods = tv.appendObjectModifiers(mods, obj) diff --git a/gopls/internal/golang/workspace_symbol.go b/gopls/internal/golang/workspace_symbol.go index feba6081515..89c144b9230 100644 --- a/gopls/internal/golang/workspace_symbol.go +++ b/gopls/internal/golang/workspace_symbol.go @@ -293,14 +293,12 @@ func (c comboMatcher) match(chunks []string) (int, float64) { func collectSymbols(ctx context.Context, snapshots []*cache.Snapshot, matcherType settings.SymbolMatcher, symbolizer symbolizer, query string) ([]protocol.SymbolInformation, error) { // Extract symbols from all files. var work []symbolFile - var roots []string seen := make(map[protocol.DocumentURI]*metadata.Package) // only scan each file once for _, snapshot := range snapshots { // Use the root view URIs for determining (lexically) // whether a URI is in any open workspace. folderURI := snapshot.Folder() - roots = append(roots, strings.TrimRight(string(folderURI), "/")) filters := snapshot.Options().DirectoryFilters filterer := cache.NewFilterer(filters) diff --git a/gopls/internal/settings/settings.go b/gopls/internal/settings/settings.go index 7d64cbef459..393bccac312 100644 --- a/gopls/internal/settings/settings.go +++ b/gopls/internal/settings/settings.go @@ -740,8 +740,8 @@ type ImportsSourceEnum string const ( ImportsSourceOff ImportsSourceEnum = "off" - ImportsSourceGopls = "gopls" - ImportsSourceGoimports = "goimports" + ImportsSourceGopls ImportsSourceEnum = "gopls" + ImportsSourceGoimports ImportsSourceEnum = "goimports" ) type Matcher string @@ -967,7 +967,7 @@ func validateDirectoryFilter(ifilter string) (string, error) { if seg != "**" { for _, op := range unsupportedOps { if strings.Contains(seg, op) { - return "", fmt.Errorf("invalid filter %v, operator %v not supported. If you want to have this operator supported, consider filing an issue.", filter, op) + return "", fmt.Errorf("invalid filter %v, operator %v not supported. If you want to have this operator supported, consider filing an issue", filter, op) } } } @@ -1374,7 +1374,7 @@ func (e *SoftError) Error() string { // deprecatedError reports the current setting as deprecated. // The optional replacement is suggested to the user. func deprecatedError(replacement string) error { - msg := fmt.Sprintf("this setting is deprecated") + msg := "this setting is deprecated" if replacement != "" { msg = fmt.Sprintf("%s, use %q instead", msg, replacement) } From 4991e7daac565a1cb14d843e78a63a1a91f726d4 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Tue, 18 Feb 2025 16:04:35 -0500 Subject: [PATCH 039/270] gopls/internal/golang: use pgf.Cursor in CodeAction fix This CL pushes down the pgf.Cursor into internal interfaces so that it is avaiable where needed. The existing implementations have not been updated to use it. As a rule of thumb, any place that calls PathEnclosingInterval would be better off using Cursor; the exception is when there's a public API that accepts a 'path []Node'. Change-Id: I18313809808fa83cc1f2a1d51850a9fdcf43ecdb Reviewed-on: https://go-review.googlesource.com/c/tools/+/650398 LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan Reviewed-by: Robert Findley Commit-Queue: Alan Donovan --- .../analysis/fillstruct/fillstruct.go | 7 +-- gopls/internal/golang/codeaction.go | 20 ++++----- gopls/internal/golang/extract.go | 44 ++++++++++++------- gopls/internal/golang/fix.go | 6 +-- gopls/internal/golang/invertifcondition.go | 9 ++-- gopls/internal/golang/lines.go | 22 ++++++---- gopls/internal/golang/stub.go | 6 +-- .../golang/stubmethods/stubcalledfunc.go | 6 ++- .../golang/stubmethods/stubmethods.go | 9 +++- gopls/internal/golang/undeclared.go | 5 ++- gopls/internal/util/typesutil/typesutil.go | 2 + 11 files changed, 84 insertions(+), 52 deletions(-) diff --git a/gopls/internal/analysis/fillstruct/fillstruct.go b/gopls/internal/analysis/fillstruct/fillstruct.go index a8a861f0651..641b98e6fa7 100644 --- a/gopls/internal/analysis/fillstruct/fillstruct.go +++ b/gopls/internal/analysis/fillstruct/fillstruct.go @@ -28,6 +28,7 @@ import ( "golang.org/x/tools/gopls/internal/fuzzy" "golang.org/x/tools/gopls/internal/util/safetoken" "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/typeparams" "golang.org/x/tools/internal/typesinternal" ) @@ -129,15 +130,15 @@ const FixCategory = "fillstruct" // recognized by gopls ApplyFix // SuggestedFix computes the suggested fix for the kinds of // diagnostics produced by the Analyzer above. -func SuggestedFix(fset *token.FileSet, start, end token.Pos, content []byte, file *ast.File, pkg *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { +func SuggestedFix(fset *token.FileSet, start, end token.Pos, content []byte, curFile cursor.Cursor, pkg *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { if info == nil { return nil, nil, fmt.Errorf("nil types.Info") } pos := start // don't use the end - // TODO(rstambler): Using ast.Inspect would probably be more efficient than - // calling PathEnclosingInterval. Switch this approach. + // TODO(adonovan): simplify, using Cursor. + file := curFile.Node().(*ast.File) path, _ := astutil.PathEnclosingInterval(file, pos, pos) if len(path) == 0 { return nil, nil, fmt.Errorf("no enclosing ast.Node") diff --git a/gopls/internal/golang/codeaction.go b/gopls/internal/golang/codeaction.go index f82c32d6a9c..49a861852ff 100644 --- a/gopls/internal/golang/codeaction.go +++ b/gopls/internal/golang/codeaction.go @@ -325,8 +325,7 @@ func quickFix(ctx context.Context, req *codeActionsRequest) error { case strings.Contains(msg, "missing method"), strings.HasPrefix(msg, "cannot convert"), strings.Contains(msg, "not implement"): - path, _ := astutil.PathEnclosingInterval(req.pgf.File, start, end) - si := stubmethods.GetIfaceStubInfo(req.pkg.FileSet(), info, path, start) + si := stubmethods.GetIfaceStubInfo(req.pkg.FileSet(), info, req.pgf, start, end) if si != nil { qual := typesinternal.FileQualifier(req.pgf.File, si.Concrete.Obj().Pkg()) iface := types.TypeString(si.Interface.Type(), qual) @@ -338,8 +337,7 @@ func quickFix(ctx context.Context, req *codeActionsRequest) error { // Offer a "Declare missing method T.f" code action. // See [stubMissingCalledFunctionFixer] for command implementation. case strings.Contains(msg, "has no field or method"): - path, _ := astutil.PathEnclosingInterval(req.pgf.File, start, end) - si := stubmethods.GetCallStubInfo(req.pkg.FileSet(), info, path, start) + si := stubmethods.GetCallStubInfo(req.pkg.FileSet(), info, req.pgf, start, end) if si != nil { msg := fmt.Sprintf("Declare missing method %s.%s", si.Receiver.Obj().Name(), si.MethodName) req.addApplyFixAction(msg, fixMissingCalledFunction, req.loc) @@ -462,7 +460,7 @@ func goDoc(ctx context.Context, req *codeActionsRequest) error { // refactorExtractFunction produces "Extract function" code actions. // See [extractFunction] for command implementation. func refactorExtractFunction(ctx context.Context, req *codeActionsRequest) error { - if _, ok, _, _ := canExtractFunction(req.pgf.Tok, req.start, req.end, req.pgf.Src, req.pgf.File); ok { + if _, ok, _, _ := canExtractFunction(req.pgf.Tok, req.start, req.end, req.pgf.Src, req.pgf.Cursor); ok { req.addApplyFixAction("Extract function", fixExtractFunction, req.loc) } return nil @@ -471,7 +469,7 @@ func refactorExtractFunction(ctx context.Context, req *codeActionsRequest) error // refactorExtractMethod produces "Extract method" code actions. // See [extractMethod] for command implementation. func refactorExtractMethod(ctx context.Context, req *codeActionsRequest) error { - if _, ok, methodOK, _ := canExtractFunction(req.pgf.Tok, req.start, req.end, req.pgf.Src, req.pgf.File); ok && methodOK { + if _, ok, methodOK, _ := canExtractFunction(req.pgf.Tok, req.start, req.end, req.pgf.Src, req.pgf.Cursor); ok && methodOK { req.addApplyFixAction("Extract method", fixExtractMethod, req.loc) } return nil @@ -481,7 +479,7 @@ func refactorExtractMethod(ctx context.Context, req *codeActionsRequest) error { // See [extractVariable] for command implementation. func refactorExtractVariable(ctx context.Context, req *codeActionsRequest) error { info := req.pkg.TypesInfo() - if exprs, err := canExtractVariable(info, req.pgf.File, req.start, req.end, false); err == nil { + if exprs, err := canExtractVariable(info, req.pgf.Cursor, req.start, req.end, false); err == nil { // Offer one of refactor.extract.{constant,variable} // based on the constness of the expression; this is a // limitation of the codeActionProducers mechanism. @@ -507,7 +505,7 @@ func refactorExtractVariableAll(ctx context.Context, req *codeActionsRequest) er info := req.pkg.TypesInfo() // Don't suggest if only one expr is found, // otherwise it will duplicate with [refactorExtractVariable] - if exprs, err := canExtractVariable(info, req.pgf.File, req.start, req.end, true); err == nil && len(exprs) > 1 { + if exprs, err := canExtractVariable(info, req.pgf.Cursor, req.start, req.end, true); err == nil && len(exprs) > 1 { start, end, err := req.pgf.NodeOffsets(exprs[0]) if err != nil { return err @@ -664,7 +662,7 @@ func refactorRewriteChangeQuote(ctx context.Context, req *codeActionsRequest) er // refactorRewriteInvertIf produces "Invert 'if' condition" code actions. // See [invertIfCondition] for command implementation. func refactorRewriteInvertIf(ctx context.Context, req *codeActionsRequest) error { - if _, ok, _ := canInvertIfCondition(req.pgf.File, req.start, req.end); ok { + if _, ok, _ := canInvertIfCondition(req.pgf.Cursor, req.start, req.end); ok { req.addApplyFixAction("Invert 'if' condition", fixInvertIfCondition, req.loc) } return nil @@ -674,7 +672,7 @@ func refactorRewriteInvertIf(ctx context.Context, req *codeActionsRequest) error // See [splitLines] for command implementation. func refactorRewriteSplitLines(ctx context.Context, req *codeActionsRequest) error { // TODO(adonovan): opt: don't set needPkg just for FileSet. - if msg, ok, _ := canSplitLines(req.pgf.File, req.pkg.FileSet(), req.start, req.end); ok { + if msg, ok, _ := canSplitLines(req.pgf.Cursor, req.pkg.FileSet(), req.start, req.end); ok { req.addApplyFixAction(msg, fixSplitLines, req.loc) } return nil @@ -684,7 +682,7 @@ func refactorRewriteSplitLines(ctx context.Context, req *codeActionsRequest) err // See [joinLines] for command implementation. func refactorRewriteJoinLines(ctx context.Context, req *codeActionsRequest) error { // TODO(adonovan): opt: don't set needPkg just for FileSet. - if msg, ok, _ := canJoinLines(req.pgf.File, req.pkg.FileSet(), req.start, req.end); ok { + if msg, ok, _ := canJoinLines(req.pgf.Cursor, req.pkg.FileSet(), req.start, req.end); ok { req.addApplyFixAction(msg, fixJoinLines, req.loc) } return nil diff --git a/gopls/internal/golang/extract.go b/gopls/internal/golang/extract.go index b8219562de5..3d2b880db2d 100644 --- a/gopls/internal/golang/extract.go +++ b/gopls/internal/golang/extract.go @@ -24,17 +24,18 @@ import ( "golang.org/x/tools/gopls/internal/util/bug" "golang.org/x/tools/gopls/internal/util/safetoken" "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/typesinternal" ) // extractVariable implements the refactor.extract.{variable,constant} CodeAction command. -func extractVariable(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, _ *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { - return extractExprs(fset, start, end, src, file, info, false) +func extractVariable(fset *token.FileSet, start, end token.Pos, src []byte, curFile cursor.Cursor, _ *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { + return extractExprs(fset, start, end, src, curFile, info, false) } // extractVariableAll implements the refactor.extract.{variable,constant}-all CodeAction command. -func extractVariableAll(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, _ *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { - return extractExprs(fset, start, end, src, file, info, true) +func extractVariableAll(fset *token.FileSet, start, end token.Pos, src []byte, curFile cursor.Cursor, _ *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { + return extractExprs(fset, start, end, src, curFile, info, true) } // extractExprs replaces occurrence(s) of a specified expression within the same function @@ -43,9 +44,11 @@ func extractVariableAll(fset *token.FileSet, start, end token.Pos, src []byte, f // // The new variable/constant is declared as close as possible to the first found expression // within the deepest common scope accessible to all candidate occurrences. -func extractExprs(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, info *types.Info, all bool) (*token.FileSet, *analysis.SuggestedFix, error) { +func extractExprs(fset *token.FileSet, start, end token.Pos, src []byte, curFile cursor.Cursor, info *types.Info, all bool) (*token.FileSet, *analysis.SuggestedFix, error) { + file := curFile.Node().(*ast.File) + // TODO(adonovan): simplify, using Cursor. tokFile := fset.File(file.FileStart) - exprs, err := canExtractVariable(info, file, start, end, all) + exprs, err := canExtractVariable(info, curFile, start, end, all) if err != nil { return nil, nil, fmt.Errorf("cannot extract: %v", err) } @@ -381,10 +384,12 @@ func stmtToInsertVarBefore(path []ast.Node, variables []*variable) (ast.Stmt, er // canExtractVariable reports whether the code in the given range can be // extracted to a variable (or constant). It returns the selected expression or, if 'all', // all structurally equivalent expressions within the same function body, in lexical order. -func canExtractVariable(info *types.Info, file *ast.File, start, end token.Pos, all bool) ([]ast.Expr, error) { +func canExtractVariable(info *types.Info, curFile cursor.Cursor, start, end token.Pos, all bool) ([]ast.Expr, error) { if start == end { return nil, fmt.Errorf("empty selection") } + file := curFile.Node().(*ast.File) + // TODO(adonovan): simplify, using Cursor. path, exact := astutil.PathEnclosingInterval(file, start, end) if !exact { return nil, fmt.Errorf("selection is not an expression") @@ -571,13 +576,13 @@ type returnVariable struct { } // extractMethod refactors the selected block of code into a new method. -func extractMethod(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { - return extractFunctionMethod(fset, start, end, src, file, pkg, info, true) +func extractMethod(fset *token.FileSet, start, end token.Pos, src []byte, curFile cursor.Cursor, pkg *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { + return extractFunctionMethod(fset, start, end, src, curFile, pkg, info, true) } // extractFunction refactors the selected block of code into a new function. -func extractFunction(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { - return extractFunctionMethod(fset, start, end, src, file, pkg, info, false) +func extractFunction(fset *token.FileSet, start, end token.Pos, src []byte, curFile cursor.Cursor, pkg *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { + return extractFunctionMethod(fset, start, end, src, curFile, pkg, info, false) } // extractFunctionMethod refactors the selected block of code into a new function/method. @@ -588,17 +593,19 @@ func extractFunction(fset *token.FileSet, start, end token.Pos, src []byte, file // and return values of the extracted function/method. Lastly, we construct the call // of the function/method and insert this call as well as the extracted function/method into // their proper locations. -func extractFunctionMethod(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, pkg *types.Package, info *types.Info, isMethod bool) (*token.FileSet, *analysis.SuggestedFix, error) { +func extractFunctionMethod(fset *token.FileSet, start, end token.Pos, src []byte, curFile cursor.Cursor, pkg *types.Package, info *types.Info, isMethod bool) (*token.FileSet, *analysis.SuggestedFix, error) { errorPrefix := "extractFunction" if isMethod { errorPrefix = "extractMethod" } + file := curFile.Node().(*ast.File) + // TODO(adonovan): simplify, using Cursor. tok := fset.File(file.FileStart) if tok == nil { return nil, nil, bug.Errorf("no file for position") } - p, ok, methodOk, err := canExtractFunction(tok, start, end, src, file) + p, ok, methodOk, err := canExtractFunction(tok, start, end, src, curFile) if (!ok && !isMethod) || (!methodOk && isMethod) { return nil, nil, fmt.Errorf("%s: cannot extract %s: %v", errorPrefix, safetoken.StartPosition(fset, start), err) @@ -1086,7 +1093,10 @@ func moveParamToFrontIfFound(params []ast.Expr, paramTypes []*ast.Field, x, sel // their cursors for whitespace. To support this use case, we must manually adjust the // ranges to match the correct AST node. In this particular example, we would adjust // rng.Start forward to the start of 'if' and rng.End backward to after '}'. -func adjustRangeForCommentsAndWhiteSpace(tok *token.File, start, end token.Pos, content []byte, file *ast.File) (token.Pos, token.Pos, error) { +func adjustRangeForCommentsAndWhiteSpace(tok *token.File, start, end token.Pos, content []byte, curFile cursor.Cursor) (token.Pos, token.Pos, error) { + file := curFile.Node().(*ast.File) + // TODO(adonovan): simplify, using Cursor. + // Adjust the end of the range to after leading whitespace and comments. prevStart := token.NoPos startComment := sort.Search(len(file.Comments), func(i int) bool { @@ -1410,12 +1420,14 @@ type fnExtractParams struct { // canExtractFunction reports whether the code in the given range can be // extracted to a function. -func canExtractFunction(tok *token.File, start, end token.Pos, src []byte, file *ast.File) (*fnExtractParams, bool, bool, error) { +func canExtractFunction(tok *token.File, start, end token.Pos, src []byte, curFile cursor.Cursor) (*fnExtractParams, bool, bool, error) { if start == end { return nil, false, false, fmt.Errorf("start and end are equal") } var err error - start, end, err = adjustRangeForCommentsAndWhiteSpace(tok, start, end, src, file) + file := curFile.Node().(*ast.File) + // TODO(adonovan): simplify, using Cursor. + start, end, err = adjustRangeForCommentsAndWhiteSpace(tok, start, end, src, curFile) if err != nil { return nil, false, false, err } diff --git a/gopls/internal/golang/fix.go b/gopls/internal/golang/fix.go index e812c677541..2c14d09c218 100644 --- a/gopls/internal/golang/fix.go +++ b/gopls/internal/golang/fix.go @@ -7,7 +7,6 @@ package golang import ( "context" "fmt" - "go/ast" "go/token" "go/types" @@ -20,6 +19,7 @@ import ( "golang.org/x/tools/gopls/internal/file" "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/imports" ) @@ -47,12 +47,12 @@ type fixer func(ctx context.Context, s *cache.Snapshot, pkg *cache.Package, pgf // TODO(adonovan): move fillstruct and undeclaredname into this // package, so we can remove the import restriction and push // the singleFile wrapper down into each singleFileFixer? -type singleFileFixer func(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) +type singleFileFixer func(fset *token.FileSet, start, end token.Pos, src []byte, curFile cursor.Cursor, pkg *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) // singleFile adapts a single-file fixer to a Fixer. func singleFile(fixer1 singleFileFixer) fixer { return func(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { - return fixer1(pkg.FileSet(), start, end, pgf.Src, pgf.File, pkg.Types(), pkg.TypesInfo()) + return fixer1(pkg.FileSet(), start, end, pgf.Src, pgf.Cursor, pkg.Types(), pkg.TypesInfo()) } } diff --git a/gopls/internal/golang/invertifcondition.go b/gopls/internal/golang/invertifcondition.go index 0fb7d1e4d0a..b26618ebf93 100644 --- a/gopls/internal/golang/invertifcondition.go +++ b/gopls/internal/golang/invertifcondition.go @@ -14,11 +14,12 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/internal/astutil/cursor" ) // invertIfCondition is a singleFileFixFunc that inverts an if/else statement -func invertIfCondition(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, _ *types.Package, _ *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { - ifStatement, _, err := canInvertIfCondition(file, start, end) +func invertIfCondition(fset *token.FileSet, start, end token.Pos, src []byte, curFile cursor.Cursor, _ *types.Package, _ *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { + ifStatement, _, err := canInvertIfCondition(curFile, start, end) if err != nil { return nil, nil, err } @@ -241,7 +242,9 @@ func invertAndOr(fset *token.FileSet, expr *ast.BinaryExpr, src []byte) ([]byte, // canInvertIfCondition reports whether we can do invert-if-condition on the // code in the given range. -func canInvertIfCondition(file *ast.File, start, end token.Pos) (*ast.IfStmt, bool, error) { +func canInvertIfCondition(curFile cursor.Cursor, start, end token.Pos) (*ast.IfStmt, bool, error) { + file := curFile.Node().(*ast.File) + // TODO(adonovan): simplify, using Cursor. path, _ := astutil.PathEnclosingInterval(file, start, end) for _, node := range path { stmt, isIfStatement := node.(*ast.IfStmt) diff --git a/gopls/internal/golang/lines.go b/gopls/internal/golang/lines.go index b6a9823957d..f208e33e0c3 100644 --- a/gopls/internal/golang/lines.go +++ b/gopls/internal/golang/lines.go @@ -20,12 +20,13 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/internal/astutil/cursor" ) // canSplitLines checks whether we can split lists of elements inside // an enclosing curly bracket/parens into separate lines. -func canSplitLines(file *ast.File, fset *token.FileSet, start, end token.Pos) (string, bool, error) { - itemType, items, comments, _, _, _ := findSplitJoinTarget(fset, file, nil, start, end) +func canSplitLines(curFile cursor.Cursor, fset *token.FileSet, start, end token.Pos) (string, bool, error) { + itemType, items, comments, _, _, _ := findSplitJoinTarget(fset, curFile, nil, start, end) if itemType == "" { return "", false, nil } @@ -47,8 +48,8 @@ func canSplitLines(file *ast.File, fset *token.FileSet, start, end token.Pos) (s // canJoinLines checks whether we can join lists of elements inside an // enclosing curly bracket/parens into a single line. -func canJoinLines(file *ast.File, fset *token.FileSet, start, end token.Pos) (string, bool, error) { - itemType, items, comments, _, _, _ := findSplitJoinTarget(fset, file, nil, start, end) +func canJoinLines(curFile cursor.Cursor, fset *token.FileSet, start, end token.Pos) (string, bool, error) { + itemType, items, comments, _, _, _ := findSplitJoinTarget(fset, curFile, nil, start, end) if itemType == "" { return "", false, nil } @@ -84,8 +85,8 @@ func canSplitJoinLines(items []ast.Node, comments []*ast.CommentGroup) bool { } // splitLines is a singleFile fixer. -func splitLines(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, _ *types.Package, _ *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { - itemType, items, comments, indent, braceOpen, braceClose := findSplitJoinTarget(fset, file, src, start, end) +func splitLines(fset *token.FileSet, start, end token.Pos, src []byte, curFile cursor.Cursor, _ *types.Package, _ *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { + itemType, items, comments, indent, braceOpen, braceClose := findSplitJoinTarget(fset, curFile, src, start, end) if itemType == "" { return nil, nil, nil // no fix available } @@ -94,8 +95,8 @@ func splitLines(fset *token.FileSet, start, end token.Pos, src []byte, file *ast } // joinLines is a singleFile fixer. -func joinLines(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, _ *types.Package, _ *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { - itemType, items, comments, _, braceOpen, braceClose := findSplitJoinTarget(fset, file, src, start, end) +func joinLines(fset *token.FileSet, start, end token.Pos, src []byte, curFile cursor.Cursor, _ *types.Package, _ *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { + itemType, items, comments, _, braceOpen, braceClose := findSplitJoinTarget(fset, curFile, src, start, end) if itemType == "" { return nil, nil, nil // no fix available } @@ -166,11 +167,14 @@ func processLines(fset *token.FileSet, items []ast.Node, comments []*ast.Comment } // findSplitJoinTarget returns the first curly bracket/parens that encloses the current cursor. -func findSplitJoinTarget(fset *token.FileSet, file *ast.File, src []byte, start, end token.Pos) (itemType string, items []ast.Node, comments []*ast.CommentGroup, indent string, open, close token.Pos) { +func findSplitJoinTarget(fset *token.FileSet, curFile cursor.Cursor, src []byte, start, end token.Pos) (itemType string, items []ast.Node, comments []*ast.CommentGroup, indent string, open, close token.Pos) { isCursorInside := func(nodePos, nodeEnd token.Pos) bool { return nodePos < start && end < nodeEnd } + file := curFile.Node().(*ast.File) + // TODO(adonovan): simplify, using Cursor. + findTarget := func() (targetType string, target ast.Node, open, close token.Pos) { path, _ := astutil.PathEnclosingInterval(file, start, end) for _, node := range path { diff --git a/gopls/internal/golang/stub.go b/gopls/internal/golang/stub.go index a04a82988c5..c85080f8a0c 100644 --- a/gopls/internal/golang/stub.go +++ b/gopls/internal/golang/stub.go @@ -31,8 +31,7 @@ import ( // methods of the concrete type that is assigned to an interface type // at the cursor position. func stubMissingInterfaceMethodsFixer(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { - nodes, _ := astutil.PathEnclosingInterval(pgf.File, start, end) - si := stubmethods.GetIfaceStubInfo(pkg.FileSet(), pkg.TypesInfo(), nodes, start) + si := stubmethods.GetIfaceStubInfo(pkg.FileSet(), pkg.TypesInfo(), pgf, start, end) if si == nil { return nil, nil, fmt.Errorf("nil interface request") } @@ -43,8 +42,7 @@ func stubMissingInterfaceMethodsFixer(ctx context.Context, snapshot *cache.Snaps // method that the user may want to generate based on CallExpr // at the cursor position. func stubMissingCalledFunctionFixer(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { - nodes, _ := astutil.PathEnclosingInterval(pgf.File, start, end) - si := stubmethods.GetCallStubInfo(pkg.FileSet(), pkg.TypesInfo(), nodes, start) + si := stubmethods.GetCallStubInfo(pkg.FileSet(), pkg.TypesInfo(), pgf, start, end) if si == nil { return nil, nil, fmt.Errorf("invalid type request") } diff --git a/gopls/internal/golang/stubmethods/stubcalledfunc.go b/gopls/internal/golang/stubmethods/stubcalledfunc.go index 1b1b6aba7de..b4b59340d83 100644 --- a/gopls/internal/golang/stubmethods/stubcalledfunc.go +++ b/gopls/internal/golang/stubmethods/stubcalledfunc.go @@ -13,6 +13,8 @@ import ( "strings" "unicode" + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/gopls/internal/cache/parsego" "golang.org/x/tools/gopls/internal/util/typesutil" "golang.org/x/tools/internal/typesinternal" ) @@ -34,7 +36,9 @@ type CallStubInfo struct { // GetCallStubInfo extracts necessary information to generate a method definition from // a CallExpr. -func GetCallStubInfo(fset *token.FileSet, info *types.Info, path []ast.Node, pos token.Pos) *CallStubInfo { +func GetCallStubInfo(fset *token.FileSet, info *types.Info, pgf *parsego.File, start, end token.Pos) *CallStubInfo { + // TODO(adonovan): simplify, using pgf.Cursor. + path, _ := astutil.PathEnclosingInterval(pgf.File, start, end) for i, n := range path { switch n := n.(type) { case *ast.CallExpr: diff --git a/gopls/internal/golang/stubmethods/stubmethods.go b/gopls/internal/golang/stubmethods/stubmethods.go index f380f5b984d..a060993b1ab 100644 --- a/gopls/internal/golang/stubmethods/stubmethods.go +++ b/gopls/internal/golang/stubmethods/stubmethods.go @@ -15,8 +15,10 @@ import ( "go/types" "strings" + "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/internal/typesinternal" + "golang.org/x/tools/gopls/internal/cache/parsego" "golang.org/x/tools/gopls/internal/util/bug" "golang.org/x/tools/gopls/internal/util/typesutil" ) @@ -49,7 +51,12 @@ type IfaceStubInfo struct { // function call. This is essentially what the refactor/satisfy does, // more generally. Refactor to share logic, after auditing 'satisfy' // for safety on ill-typed code. -func GetIfaceStubInfo(fset *token.FileSet, info *types.Info, path []ast.Node, pos token.Pos) *IfaceStubInfo { +func GetIfaceStubInfo(fset *token.FileSet, info *types.Info, pgf *parsego.File, pos, end token.Pos) *IfaceStubInfo { + // TODO(adonovan): simplify, using Cursor: + // curErr, _ := pgf.Cursor.FindPos(pos, end) + // for cur := range curErr.Ancestors() { + // switch n := cur.Node().(type) {... + path, _ := astutil.PathEnclosingInterval(pgf.File, pos, end) for _, n := range path { switch n := n.(type) { case *ast.ValueSpec: diff --git a/gopls/internal/golang/undeclared.go b/gopls/internal/golang/undeclared.go index 0615386e9bf..f9331348f47 100644 --- a/gopls/internal/golang/undeclared.go +++ b/gopls/internal/golang/undeclared.go @@ -17,6 +17,7 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/gopls/internal/util/typesutil" + "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/typesinternal" ) @@ -69,8 +70,10 @@ func undeclaredFixTitle(path []ast.Node, errMsg string) string { } // createUndeclared generates a suggested declaration for an undeclared variable or function. -func createUndeclared(fset *token.FileSet, start, end token.Pos, content []byte, file *ast.File, pkg *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { +func createUndeclared(fset *token.FileSet, start, end token.Pos, content []byte, curFile cursor.Cursor, pkg *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { pos := start // don't use the end + file := curFile.Node().(*ast.File) + // TODO(adonovan): simplify, using Cursor. path, _ := astutil.PathEnclosingInterval(file, pos, pos) if len(path) < 2 { return nil, nil, fmt.Errorf("no expression found") diff --git a/gopls/internal/util/typesutil/typesutil.go b/gopls/internal/util/typesutil/typesutil.go index 79042a24901..4b5c5e7fd4f 100644 --- a/gopls/internal/util/typesutil/typesutil.go +++ b/gopls/internal/util/typesutil/typesutil.go @@ -42,6 +42,8 @@ func FormatTypeParams(tparams *types.TypeParamList) string { // the hole that must be filled by EXPR has type (string, int). // // It returns nil on failure. +// +// TODO(adonovan): simplify using Cursor. func TypesFromContext(info *types.Info, path []ast.Node, pos token.Pos) []types.Type { anyType := types.Universe.Lookup("any").Type() var typs []types.Type From cd01e86527e7f9c4cb689b66c5313bf739674c09 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Wed, 19 Feb 2025 12:22:56 -0500 Subject: [PATCH 040/270] gopls/internal/golang: make singleFileFixer like fixer In the past the singleFileFixer has two roles: to simplify the signature for fixers of a single file, and to allow them to be expressed only in terms of go/{ast,types} data types, allowing fixers to be more loosely coupled to gopls. But that latter role seems unimportant now, so this CL simplifies the two functions to make them more alike. Change-Id: I42ee9fd275e344fafee0b27b9861f8f599f89e3e Reviewed-on: https://go-review.googlesource.com/c/tools/+/650641 Reviewed-by: Robert Findley Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI --- .../analysis/fillstruct/fillstruct.go | 21 ++++----- gopls/internal/golang/extract.go | 43 ++++++++++++------- gopls/internal/golang/fix.go | 18 +++----- gopls/internal/golang/invertifcondition.go | 12 ++++-- gopls/internal/golang/lines.go | 17 +++++--- gopls/internal/golang/undeclared.go | 19 +++++--- 6 files changed, 76 insertions(+), 54 deletions(-) diff --git a/gopls/internal/analysis/fillstruct/fillstruct.go b/gopls/internal/analysis/fillstruct/fillstruct.go index 641b98e6fa7..62f7d77f58e 100644 --- a/gopls/internal/analysis/fillstruct/fillstruct.go +++ b/gopls/internal/analysis/fillstruct/fillstruct.go @@ -25,10 +25,11 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/parsego" "golang.org/x/tools/gopls/internal/fuzzy" "golang.org/x/tools/gopls/internal/util/safetoken" "golang.org/x/tools/internal/analysisinternal" - "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/typeparams" "golang.org/x/tools/internal/typesinternal" ) @@ -130,15 +131,15 @@ const FixCategory = "fillstruct" // recognized by gopls ApplyFix // SuggestedFix computes the suggested fix for the kinds of // diagnostics produced by the Analyzer above. -func SuggestedFix(fset *token.FileSet, start, end token.Pos, content []byte, curFile cursor.Cursor, pkg *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { - if info == nil { - return nil, nil, fmt.Errorf("nil types.Info") - } - - pos := start // don't use the end - +func SuggestedFix(cpkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { + var ( + fset = cpkg.FileSet() + pkg = cpkg.Types() + info = cpkg.TypesInfo() + pos = start // don't use end + ) // TODO(adonovan): simplify, using Cursor. - file := curFile.Node().(*ast.File) + file := pgf.Cursor.Node().(*ast.File) path, _ := astutil.PathEnclosingInterval(file, pos, pos) if len(path) == 0 { return nil, nil, fmt.Errorf("no enclosing ast.Node") @@ -235,7 +236,7 @@ func SuggestedFix(fset *token.FileSet, start, end token.Pos, content []byte, cur } // Find the line on which the composite literal is declared. - split := bytes.Split(content, []byte("\n")) + split := bytes.Split(pgf.Src, []byte("\n")) lineNumber := safetoken.StartPosition(fset, expr.Lbrace).Line firstLine := split[lineNumber-1] // lines are 1-indexed diff --git a/gopls/internal/golang/extract.go b/gopls/internal/golang/extract.go index 3d2b880db2d..f73e772e676 100644 --- a/gopls/internal/golang/extract.go +++ b/gopls/internal/golang/extract.go @@ -20,6 +20,8 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/parsego" goplsastutil "golang.org/x/tools/gopls/internal/util/astutil" "golang.org/x/tools/gopls/internal/util/bug" "golang.org/x/tools/gopls/internal/util/safetoken" @@ -29,13 +31,13 @@ import ( ) // extractVariable implements the refactor.extract.{variable,constant} CodeAction command. -func extractVariable(fset *token.FileSet, start, end token.Pos, src []byte, curFile cursor.Cursor, _ *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { - return extractExprs(fset, start, end, src, curFile, info, false) +func extractVariable(pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { + return extractExprs(pkg, pgf, start, end, false) } // extractVariableAll implements the refactor.extract.{variable,constant}-all CodeAction command. -func extractVariableAll(fset *token.FileSet, start, end token.Pos, src []byte, curFile cursor.Cursor, _ *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { - return extractExprs(fset, start, end, src, curFile, info, true) +func extractVariableAll(pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { + return extractExprs(pkg, pgf, start, end, true) } // extractExprs replaces occurrence(s) of a specified expression within the same function @@ -44,11 +46,15 @@ func extractVariableAll(fset *token.FileSet, start, end token.Pos, src []byte, c // // The new variable/constant is declared as close as possible to the first found expression // within the deepest common scope accessible to all candidate occurrences. -func extractExprs(fset *token.FileSet, start, end token.Pos, src []byte, curFile cursor.Cursor, info *types.Info, all bool) (*token.FileSet, *analysis.SuggestedFix, error) { - file := curFile.Node().(*ast.File) +func extractExprs(pkg *cache.Package, pgf *parsego.File, start, end token.Pos, all bool) (*token.FileSet, *analysis.SuggestedFix, error) { + var ( + fset = pkg.FileSet() + info = pkg.TypesInfo() + file = pgf.File + ) // TODO(adonovan): simplify, using Cursor. tokFile := fset.File(file.FileStart) - exprs, err := canExtractVariable(info, curFile, start, end, all) + exprs, err := canExtractVariable(info, pgf.Cursor, start, end, all) if err != nil { return nil, nil, fmt.Errorf("cannot extract: %v", err) } @@ -157,7 +163,7 @@ Outer: return nil, nil, fmt.Errorf("cannot find location to insert extraction: %v", err) } // Within function: compute appropriate statement indentation. - indent, err := calculateIndentation(src, tokFile, before) + indent, err := calculateIndentation(pgf.Src, tokFile, before) if err != nil { return nil, nil, err } @@ -576,13 +582,13 @@ type returnVariable struct { } // extractMethod refactors the selected block of code into a new method. -func extractMethod(fset *token.FileSet, start, end token.Pos, src []byte, curFile cursor.Cursor, pkg *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { - return extractFunctionMethod(fset, start, end, src, curFile, pkg, info, true) +func extractMethod(pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { + return extractFunctionMethod(pkg, pgf, start, end, true) } // extractFunction refactors the selected block of code into a new function. -func extractFunction(fset *token.FileSet, start, end token.Pos, src []byte, curFile cursor.Cursor, pkg *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { - return extractFunctionMethod(fset, start, end, src, curFile, pkg, info, false) +func extractFunction(pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { + return extractFunctionMethod(pkg, pgf, start, end, false) } // extractFunctionMethod refactors the selected block of code into a new function/method. @@ -593,19 +599,26 @@ func extractFunction(fset *token.FileSet, start, end token.Pos, src []byte, curF // and return values of the extracted function/method. Lastly, we construct the call // of the function/method and insert this call as well as the extracted function/method into // their proper locations. -func extractFunctionMethod(fset *token.FileSet, start, end token.Pos, src []byte, curFile cursor.Cursor, pkg *types.Package, info *types.Info, isMethod bool) (*token.FileSet, *analysis.SuggestedFix, error) { +func extractFunctionMethod(cpkg *cache.Package, pgf *parsego.File, start, end token.Pos, isMethod bool) (*token.FileSet, *analysis.SuggestedFix, error) { + var ( + fset = cpkg.FileSet() + pkg = cpkg.Types() + info = cpkg.TypesInfo() + src = pgf.Src + ) + errorPrefix := "extractFunction" if isMethod { errorPrefix = "extractMethod" } - file := curFile.Node().(*ast.File) + file := pgf.Cursor.Node().(*ast.File) // TODO(adonovan): simplify, using Cursor. tok := fset.File(file.FileStart) if tok == nil { return nil, nil, bug.Errorf("no file for position") } - p, ok, methodOk, err := canExtractFunction(tok, start, end, src, curFile) + p, ok, methodOk, err := canExtractFunction(tok, start, end, src, pgf.Cursor) if (!ok && !isMethod) || (!methodOk && isMethod) { return nil, nil, fmt.Errorf("%s: cannot extract %s: %v", errorPrefix, safetoken.StartPosition(fset, start), err) diff --git a/gopls/internal/golang/fix.go b/gopls/internal/golang/fix.go index 2c14d09c218..dbd83ef071f 100644 --- a/gopls/internal/golang/fix.go +++ b/gopls/internal/golang/fix.go @@ -8,7 +8,6 @@ import ( "context" "fmt" "go/token" - "go/types" "golang.org/x/tools/go/analysis" "golang.org/x/tools/gopls/internal/analysis/embeddirective" @@ -19,7 +18,6 @@ import ( "golang.org/x/tools/gopls/internal/file" "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/gopls/internal/util/bug" - "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/imports" ) @@ -41,18 +39,14 @@ import ( // A fixer may return (nil, nil) if no fix is available. type fixer func(ctx context.Context, s *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) -// A singleFileFixer is a Fixer that inspects only a single file, -// and does not depend on data types from the cache package. -// -// TODO(adonovan): move fillstruct and undeclaredname into this -// package, so we can remove the import restriction and push -// the singleFile wrapper down into each singleFileFixer? -type singleFileFixer func(fset *token.FileSet, start, end token.Pos, src []byte, curFile cursor.Cursor, pkg *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) +// A singleFileFixer is a [fixer] that inspects only a single file. +type singleFileFixer func(pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) -// singleFile adapts a single-file fixer to a Fixer. +// singleFile adapts a [singleFileFixer] to a [fixer] +// by discarding the snapshot and the context it needs. func singleFile(fixer1 singleFileFixer) fixer { - return func(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { - return fixer1(pkg.FileSet(), start, end, pgf.Src, pgf.Cursor, pkg.Types(), pkg.TypesInfo()) + return func(_ context.Context, _ *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { + return fixer1(pkg, pgf, start, end) } } diff --git a/gopls/internal/golang/invertifcondition.go b/gopls/internal/golang/invertifcondition.go index b26618ebf93..012278df79e 100644 --- a/gopls/internal/golang/invertifcondition.go +++ b/gopls/internal/golang/invertifcondition.go @@ -8,18 +8,24 @@ import ( "fmt" "go/ast" "go/token" - "go/types" "strings" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/parsego" "golang.org/x/tools/gopls/internal/util/safetoken" "golang.org/x/tools/internal/astutil/cursor" ) // invertIfCondition is a singleFileFixFunc that inverts an if/else statement -func invertIfCondition(fset *token.FileSet, start, end token.Pos, src []byte, curFile cursor.Cursor, _ *types.Package, _ *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { - ifStatement, _, err := canInvertIfCondition(curFile, start, end) +func invertIfCondition(pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { + var ( + fset = pkg.FileSet() + src = pgf.Src + ) + + ifStatement, _, err := canInvertIfCondition(pgf.Cursor, start, end) if err != nil { return nil, nil, err } diff --git a/gopls/internal/golang/lines.go b/gopls/internal/golang/lines.go index f208e33e0c3..cb161671726 100644 --- a/gopls/internal/golang/lines.go +++ b/gopls/internal/golang/lines.go @@ -12,13 +12,14 @@ import ( "bytes" "go/ast" "go/token" - "go/types" "slices" "sort" "strings" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/parsego" "golang.org/x/tools/gopls/internal/util/safetoken" "golang.org/x/tools/internal/astutil/cursor" ) @@ -85,23 +86,25 @@ func canSplitJoinLines(items []ast.Node, comments []*ast.CommentGroup) bool { } // splitLines is a singleFile fixer. -func splitLines(fset *token.FileSet, start, end token.Pos, src []byte, curFile cursor.Cursor, _ *types.Package, _ *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { - itemType, items, comments, indent, braceOpen, braceClose := findSplitJoinTarget(fset, curFile, src, start, end) +func splitLines(pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { + fset := pkg.FileSet() + itemType, items, comments, indent, braceOpen, braceClose := findSplitJoinTarget(fset, pgf.Cursor, pgf.Src, start, end) if itemType == "" { return nil, nil, nil // no fix available } - return fset, processLines(fset, items, comments, src, braceOpen, braceClose, ",\n", "\n", ",\n"+indent, indent+"\t"), nil + return fset, processLines(fset, items, comments, pgf.Src, braceOpen, braceClose, ",\n", "\n", ",\n"+indent, indent+"\t"), nil } // joinLines is a singleFile fixer. -func joinLines(fset *token.FileSet, start, end token.Pos, src []byte, curFile cursor.Cursor, _ *types.Package, _ *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { - itemType, items, comments, _, braceOpen, braceClose := findSplitJoinTarget(fset, curFile, src, start, end) +func joinLines(pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { + fset := pkg.FileSet() + itemType, items, comments, _, braceOpen, braceClose := findSplitJoinTarget(fset, pgf.Cursor, pgf.Src, start, end) if itemType == "" { return nil, nil, nil // no fix available } - return fset, processLines(fset, items, comments, src, braceOpen, braceClose, ", ", "", "", ""), nil + return fset, processLines(fset, items, comments, pgf.Src, braceOpen, braceClose, ", ", "", "", ""), nil } // processLines is the common operation for both split and join lines because this split/join operation is diff --git a/gopls/internal/golang/undeclared.go b/gopls/internal/golang/undeclared.go index f9331348f47..9df8e2bfd2e 100644 --- a/gopls/internal/golang/undeclared.go +++ b/gopls/internal/golang/undeclared.go @@ -16,8 +16,9 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/parsego" "golang.org/x/tools/gopls/internal/util/typesutil" - "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/typesinternal" ) @@ -70,9 +71,13 @@ func undeclaredFixTitle(path []ast.Node, errMsg string) string { } // createUndeclared generates a suggested declaration for an undeclared variable or function. -func createUndeclared(fset *token.FileSet, start, end token.Pos, content []byte, curFile cursor.Cursor, pkg *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { - pos := start // don't use the end - file := curFile.Node().(*ast.File) +func createUndeclared(pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { + var ( + fset = pkg.FileSet() + info = pkg.TypesInfo() + file = pgf.File + pos = start // don't use end + ) // TODO(adonovan): simplify, using Cursor. path, _ := astutil.PathEnclosingInterval(file, pos, pos) if len(path) < 2 { @@ -86,7 +91,7 @@ func createUndeclared(fset *token.FileSet, start, end token.Pos, content []byte, // Check for a possible call expression, in which case we should add a // new function declaration. if isCallPosition(path) { - return newFunctionDeclaration(path, file, pkg, info, fset) + return newFunctionDeclaration(path, file, pkg.Types(), info, fset) } var ( firstRef *ast.Ident // We should insert the new declaration before the first occurrence of the undefined ident. @@ -132,7 +137,7 @@ func createUndeclared(fset *token.FileSet, start, end token.Pos, content []byte, if err != nil { return nil, nil, fmt.Errorf("could not locate insertion point: %v", err) } - indent, err := calculateIndentation(content, fset.File(file.FileStart), insertBeforeStmt) + indent, err := calculateIndentation(pgf.Src, fset.File(file.FileStart), insertBeforeStmt) if err != nil { return nil, nil, err } @@ -141,7 +146,7 @@ func createUndeclared(fset *token.FileSet, start, end token.Pos, content []byte, // Default to 0. typs = []types.Type{types.Typ[types.Int]} } - expr, _ := typesinternal.ZeroExpr(typs[0], typesinternal.FileQualifier(file, pkg)) + expr, _ := typesinternal.ZeroExpr(typs[0], typesinternal.FileQualifier(file, pkg.Types())) assignStmt := &ast.AssignStmt{ Lhs: []ast.Expr{ast.NewIdent(ident.Name)}, Tok: token.DEFINE, From 0b693ed05c20dd39478c8afb542bb4473dde7ba7 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Wed, 19 Feb 2025 14:22:09 -0500 Subject: [PATCH 041/270] internal/astutil/cursor: FindPos: don't assume Files are in Pos order + test Change-Id: I75c4c3b7789feda874da7644bda8ba93f87f5efb Reviewed-on: https://go-review.googlesource.com/c/tools/+/650645 Reviewed-by: Robert Findley Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI --- internal/astutil/cursor/cursor.go | 18 ++++++++++++------ internal/astutil/cursor/cursor_test.go | 26 +++++++++++++++++++++++++- 2 files changed, 37 insertions(+), 7 deletions(-) diff --git a/internal/astutil/cursor/cursor.go b/internal/astutil/cursor/cursor.go index 5ed177c9f3d..83a47e09058 100644 --- a/internal/astutil/cursor/cursor.go +++ b/internal/astutil/cursor/cursor.go @@ -428,15 +428,21 @@ func (c Cursor) FindPos(start, end token.Pos) (Cursor, bool) { ev := events[i] if ev.index > i { // push? n := ev.node - var nodeStart, nodeEnd token.Pos + var nodeEnd token.Pos if file, ok := n.(*ast.File); ok { - nodeStart, nodeEnd = file.FileStart, file.FileEnd + nodeEnd = file.FileEnd + // Note: files may be out of Pos order. + if file.FileStart > start { + i = ev.index // disjoint, after; skip to next file + continue + } } else { - nodeStart, nodeEnd = n.Pos(), n.End() - } - if nodeStart > start { - break // disjoint, after; stop + nodeEnd = n.End() + if n.Pos() > start { + break // disjoint, after; stop + } } + // Inv: node.{Pos,FileStart} <= start if end <= nodeEnd { // node fully contains target range best = i diff --git a/internal/astutil/cursor/cursor_test.go b/internal/astutil/cursor/cursor_test.go index 01f791f2833..67e91544c4d 100644 --- a/internal/astutil/cursor/cursor_test.go +++ b/internal/astutil/cursor/cursor_test.go @@ -14,6 +14,7 @@ import ( "go/token" "iter" "log" + "math/rand" "path/filepath" "reflect" "slices" @@ -332,8 +333,31 @@ func TestCursor_FindNode(t *testing.T) { } } } +} + +// TestCursor_FindPos_order ensures that FindPos does not assume files are in Pos order. +func TestCursor_FindPos_order(t *testing.T) { + // Pick an arbitrary decl. + target := netFiles[7].Decls[0] + + // Find the target decl by its position. + cur, ok := cursor.Root(netInspect).FindPos(target.Pos(), target.End()) + if !ok || cur.Node() != target { + t.Fatalf("unshuffled: FindPos(%T) = (%v, %t)", target, cur, ok) + } - // TODO(adonovan): FindPos needs a test (not just a benchmark). + // Shuffle the files out of Pos order. + files := slices.Clone(netFiles) + rand.Shuffle(len(files), func(i, j int) { + files[i], files[j] = files[j], files[i] + }) + + // Find it again. + inspect := inspector.New(files) + cur, ok = cursor.Root(inspect).FindPos(target.Pos(), target.End()) + if !ok || cur.Node() != target { + t.Fatalf("shuffled: FindPos(%T) = (%v, %t)", target, cur, ok) + } } func TestCursor_Edge(t *testing.T) { From 107c5b255f0e491b4e9f5ce6d3be554e07a38caa Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Wed, 19 Feb 2025 15:28:28 -0500 Subject: [PATCH 042/270] gopls/internal/analysis/modernize: disable unsound maps.Clone fix The fix is sound only if the operand is provably non-nil. My word, these simple cases are subtle. Are we wrong to want to expand access to the framework? Fixes golang/go#71844 Change-Id: I5cf414cae41bf9b6445b7a4a7175dbb9c292e4b8 Reviewed-on: https://go-review.googlesource.com/c/tools/+/650756 Reviewed-by: Jonathan Amsterdam LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan --- gopls/internal/analysis/modernize/maps.go | 14 ++++++- .../testdata/src/mapsloop/mapsloop.go | 30 ++++++++++----- .../testdata/src/mapsloop/mapsloop.go.golden | 37 ++++++++++++++----- .../testdata/src/mapsloop/mapsloop_dot.go | 6 ++- .../src/mapsloop/mapsloop_dot.go.golden | 8 ++-- 5 files changed, 69 insertions(+), 26 deletions(-) diff --git a/gopls/internal/analysis/modernize/maps.go b/gopls/internal/analysis/modernize/maps.go index 91de659d107..dad329477cd 100644 --- a/gopls/internal/analysis/modernize/maps.go +++ b/gopls/internal/analysis/modernize/maps.go @@ -32,7 +32,7 @@ import ( // // maps.Copy(m, x) (x is map) // maps.Insert(m, x) (x is iter.Seq2) -// m = maps.Clone(x) (x is map, m is a new map) +// m = maps.Clone(x) (x is a non-nil map, m is a new map) // m = maps.Collect(x) (x is iter.Seq2, m is a new map) // // A map is newly created if the preceding statement has one of these @@ -77,6 +77,8 @@ func mapsloop(pass *analysis.Pass) { // Is the preceding statement of the form // m = make(M) or M{} // and can we replace its RHS with slices.{Clone,Collect}? + // + // Beware: if x may be nil, we cannot use Clone as it preserves nilness. var mrhs ast.Expr // make(M) or M{}, or nil if curPrev, ok := curRange.PrevSibling(); ok { if assign, ok := curPrev.Node().(*ast.AssignStmt); ok && @@ -122,6 +124,16 @@ func mapsloop(pass *analysis.Pass) { mrhs = rhs } } + + // Temporarily disable the transformation to the + // (nil-preserving) maps.Clone until we can prove + // that x is non-nil. This is rarely possible, + // and may require control flow analysis + // (e.g. a dominating "if len(x)" check). + // See #71844. + if xmap { + mrhs = nil + } } } } diff --git a/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go index e4e6963dbae..68ff9154ffd 100644 --- a/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go +++ b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go @@ -27,30 +27,34 @@ func useCopyGeneric[K comparable, V any, M ~map[K]V](dst, src M) { } } -func useClone(src map[int]string) { - // Replace make(...) by maps.Clone. +func useCopyNotClone(src map[int]string) { + // Clone is tempting but wrong when src may be nil; see #71844. + + // Replace make(...) by maps.Copy. dst := make(map[int]string, len(src)) for key, value := range src { - dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Clone" + dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Copy" } dst = map[int]string{} for key, value := range src { - dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Clone" + dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Copy" } println(dst) } -func useCloneParen(src map[int]string) { +func useCopyParen(src map[int]string) { + // Clone is tempting but wrong when src may be nil; see #71844. + // Replace (make)(...) by maps.Clone. dst := (make)(map[int]string, len(src)) for key, value := range src { - dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Clone" + dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Copy" } dst = (map[int]string{}) for key, value := range src { - dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Clone" + dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Copy" } println(dst) } @@ -74,32 +78,38 @@ func useCopy_typesDiffer2(src map[int]string) { } func useClone_typesDiffer3(src map[int]string) { + // Clone is tempting but wrong when src may be nil; see #71844. + // Replace loop and make(...) as maps.Clone(src) returns map[int]string // which is assignable to M. var dst M dst = make(M, len(src)) for key, value := range src { - dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Clone" + dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Copy" } println(dst) } func useClone_typesDiffer4(src map[int]string) { + // Clone is tempting but wrong when src may be nil; see #71844. + // Replace loop and make(...) as maps.Clone(src) returns map[int]string // which is assignable to M. var dst M dst = make(M, len(src)) for key, value := range src { - dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Clone" + dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Copy" } println(dst) } func useClone_generic[Map ~map[K]V, K comparable, V any](src Map) { + // Clone is tempting but wrong when src may be nil; see #71844. + // Replace loop and make(...) by maps.Clone dst := make(Map, len(src)) for key, value := range src { - dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Clone" + dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Copy" } println(dst) } diff --git a/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go.golden b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go.golden index 70b9a28ed5b..be189673d9a 100644 --- a/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go.golden @@ -23,19 +23,27 @@ func useCopyGeneric[K comparable, V any, M ~map[K]V](dst, src M) { maps.Copy(dst, src) } -func useClone(src map[int]string) { - // Replace make(...) by maps.Clone. - dst := maps.Clone(src) +func useCopyNotClone(src map[int]string) { + // Clone is tempting but wrong when src may be nil; see #71844. - dst = maps.Clone(src) + // Replace make(...) by maps.Copy. + dst := make(map[int]string, len(src)) + maps.Copy(dst, src) + + dst = map[int]string{} + maps.Copy(dst, src) println(dst) } -func useCloneParen(src map[int]string) { +func useCopyParen(src map[int]string) { + // Clone is tempting but wrong when src may be nil; see #71844. + // Replace (make)(...) by maps.Clone. - dst := maps.Clone(src) + dst := (make)(map[int]string, len(src)) + maps.Copy(dst, src) - dst = maps.Clone(src) + dst = (map[int]string{}) + maps.Copy(dst, src) println(dst) } @@ -54,24 +62,33 @@ func useCopy_typesDiffer2(src map[int]string) { } func useClone_typesDiffer3(src map[int]string) { + // Clone is tempting but wrong when src may be nil; see #71844. + // Replace loop and make(...) as maps.Clone(src) returns map[int]string // which is assignable to M. var dst M - dst = maps.Clone(src) + dst = make(M, len(src)) + maps.Copy(dst, src) println(dst) } func useClone_typesDiffer4(src map[int]string) { + // Clone is tempting but wrong when src may be nil; see #71844. + // Replace loop and make(...) as maps.Clone(src) returns map[int]string // which is assignable to M. var dst M - dst = maps.Clone(src) + dst = make(M, len(src)) + maps.Copy(dst, src) println(dst) } func useClone_generic[Map ~map[K]V, K comparable, V any](src Map) { + // Clone is tempting but wrong when src may be nil; see #71844. + // Replace loop and make(...) by maps.Clone - dst := maps.Clone(src) + dst := make(Map, len(src)) + maps.Copy(dst, src) println(dst) } diff --git a/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop_dot.go b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop_dot.go index c33d43e23ad..ae28f11afda 100644 --- a/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop_dot.go +++ b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop_dot.go @@ -14,10 +14,12 @@ func useCopyDot(dst, src map[int]string) { } func useCloneDot(src map[int]string) { - // Replace make(...) by maps.Clone. + // Clone is tempting but wrong when src may be nil; see #71844. + + // Replace make(...) by maps.Copy. dst := make(map[int]string, len(src)) for key, value := range src { - dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Clone" + dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Copy" } println(dst) } diff --git a/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop_dot.go.golden b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop_dot.go.golden index d6a30537645..e992314cf56 100644 --- a/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop_dot.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop_dot.go.golden @@ -12,8 +12,10 @@ func useCopyDot(dst, src map[int]string) { } func useCloneDot(src map[int]string) { - // Replace make(...) by maps.Clone. - dst := Clone(src) + // Clone is tempting but wrong when src may be nil; see #71844. + + // Replace make(...) by maps.Copy. + dst := make(map[int]string, len(src)) + Copy(dst, src) println(dst) } - From 99337ebe7b90918701a41932abf121600b972e34 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Wed, 19 Feb 2025 17:40:54 -0500 Subject: [PATCH 043/270] x/tools: modernize interface{} -> any Produced with a patched version of modernize containing only the efaceany pass: $ go run ./gopls/internal/analysis/modernize/cmd/modernize/main.go -test -fix ./... ./gopls/... This is very safe and forms the bulk of the modernize diff, isolating the other changes for ease of review. Change-Id: Iec9352bac5a639a5c03368427531c7842c6e9ad0 Reviewed-on: https://go-review.googlesource.com/c/tools/+/650759 LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley Auto-Submit: Alan Donovan --- blog/blog.go | 2 +- container/intsets/sparse.go | 2 +- go/analysis/analysis.go | 8 +- go/analysis/analysistest/analysistest.go | 2 +- go/analysis/analysistest/analysistest_test.go | 2 +- go/analysis/internal/analysisflags/flags.go | 10 +- go/analysis/internal/checker/checker_test.go | 6 +- go/analysis/internal/checker/start_test.go | 2 +- go/analysis/internal/internal.go | 2 +- .../internal/versiontest/version_test.go | 2 +- go/analysis/multichecker/multichecker_test.go | 2 +- go/analysis/passes/appends/appends.go | 2 +- go/analysis/passes/asmdecl/asmdecl.go | 6 +- go/analysis/passes/buildssa/buildssa.go | 2 +- go/analysis/passes/buildtag/buildtag.go | 2 +- go/analysis/passes/cgocall/cgocall.go | 2 +- go/analysis/passes/composite/composite.go | 2 +- go/analysis/passes/copylock/copylock.go | 2 +- go/analysis/passes/ctrlflow/ctrlflow.go | 2 +- go/analysis/passes/directive/directive.go | 2 +- .../passes/fieldalignment/fieldalignment.go | 2 +- go/analysis/passes/findcall/findcall.go | 2 +- .../passes/framepointer/framepointer.go | 2 +- go/analysis/passes/ifaceassert/ifaceassert.go | 2 +- go/analysis/passes/inspect/inspect.go | 2 +- go/analysis/passes/loopclosure/loopclosure.go | 2 +- go/analysis/passes/lostcancel/lostcancel.go | 2 +- go/analysis/passes/nilfunc/nilfunc.go | 2 +- go/analysis/passes/nilness/nilness.go | 4 +- go/analysis/passes/pkgfact/pkgfact.go | 2 +- .../reflectvaluecompare.go | 2 +- go/analysis/passes/shadow/shadow.go | 2 +- go/analysis/passes/shift/shift.go | 2 +- go/analysis/passes/stdmethods/stdmethods.go | 2 +- go/analysis/passes/stringintconv/string.go | 2 +- go/analysis/passes/structtag/structtag.go | 2 +- .../testinggoroutine/testinggoroutine.go | 2 +- go/analysis/passes/tests/tests.go | 2 +- go/analysis/passes/unreachable/unreachable.go | 2 +- go/analysis/passes/unsafeptr/unsafeptr.go | 2 +- .../passes/unusedresult/unusedresult.go | 2 +- .../passes/usesgenerics/usesgenerics.go | 2 +- go/analysis/unitchecker/unitchecker.go | 4 +- go/analysis/unitchecker/unitchecker_test.go | 2 +- go/analysis/validate_test.go | 4 +- go/buildutil/fakecontext.go | 4 +- go/buildutil/tags.go | 2 +- go/callgraph/rta/rta.go | 4 +- go/callgraph/rta/rta_test.go | 2 +- go/callgraph/vta/internal/trie/builder.go | 20 +- go/callgraph/vta/internal/trie/op_test.go | 48 ++-- go/callgraph/vta/internal/trie/trie.go | 22 +- go/callgraph/vta/internal/trie/trie_test.go | 214 +++++++++--------- go/callgraph/vta/propagation.go | 2 +- go/callgraph/vta/vta_test.go | 2 +- go/expect/expect.go | 8 +- go/expect/expect_test.go | 6 +- go/expect/extract.go | 12 +- go/internal/cgo/cgo.go | 2 +- go/internal/gccgoimporter/parser.go | 32 +-- go/loader/loader.go | 2 +- go/packages/gopackages/main.go | 2 +- go/packages/overlay_test.go | 36 +-- go/packages/packages.go | 10 +- go/packages/packages_test.go | 104 ++++----- go/packages/packagestest/expect.go | 36 +-- go/packages/packagestest/expect_test.go | 2 +- go/packages/packagestest/export.go | 10 +- go/packages/packagestest/export_test.go | 26 +-- go/ssa/const_test.go | 6 +- go/ssa/interp/interp.go | 2 +- go/ssa/interp/map.go | 2 +- go/ssa/interp/value.go | 10 +- go/ssa/mode.go | 2 +- go/ssa/print.go | 2 +- go/ssa/sanity.go | 6 +- go/ssa/ssautil/load_test.go | 2 +- go/ssa/util.go | 2 +- .../analysis/deprecated/deprecated.go | 2 +- .../analysis/embeddirective/embeddirective.go | 2 +- .../analysis/fillreturns/fillreturns.go | 2 +- .../internal/analysis/nonewvars/nonewvars.go | 2 +- .../analysis/noresultvalues/noresultvalues.go | 2 +- .../simplifycompositelit.go | 2 +- .../analysis/simplifyrange/simplifyrange.go | 2 +- .../analysis/simplifyslice/simplifyslice.go | 2 +- gopls/internal/analysis/yield/yield.go | 2 +- gopls/internal/cache/analysis.go | 6 +- gopls/internal/cache/load.go | 2 +- gopls/internal/cache/mod.go | 10 +- gopls/internal/cache/mod_tidy.go | 2 +- gopls/internal/cache/mod_vuln.go | 2 +- gopls/internal/cache/parse_cache.go | 6 +- gopls/internal/cmd/cmd.go | 12 +- gopls/internal/cmd/integration_test.go | 4 +- gopls/internal/cmd/stats.go | 2 +- gopls/internal/cmd/symbols.go | 4 +- gopls/internal/debug/log/log.go | 2 +- gopls/internal/debug/rpc.go | 2 +- gopls/internal/debug/serve.go | 22 +- gopls/internal/debug/template_test.go | 2 +- gopls/internal/debug/trace.go | 2 +- gopls/internal/golang/rename_check.go | 2 +- gopls/internal/lsprpc/binder_test.go | 2 +- .../lsprpc/commandinterceptor_test.go | 10 +- gopls/internal/lsprpc/export_test.go | 4 +- gopls/internal/lsprpc/goenv.go | 2 +- gopls/internal/lsprpc/goenv_test.go | 18 +- gopls/internal/lsprpc/lsprpc.go | 12 +- gopls/internal/lsprpc/lsprpc_test.go | 6 +- gopls/internal/lsprpc/middleware_test.go | 2 +- gopls/internal/server/command.go | 2 +- gopls/internal/server/general.go | 4 +- gopls/internal/server/unimplemented.go | 2 +- gopls/internal/template/parse.go | 2 +- .../test/integration/bench/completion_test.go | 2 +- .../test/integration/bench/didchange_test.go | 2 +- gopls/internal/test/integration/env.go | 2 +- gopls/internal/test/integration/env_test.go | 2 +- .../internal/test/integration/expectation.go | 6 +- .../internal/test/integration/fake/client.go | 6 +- .../test/integration/fake/glob/glob.go | 2 +- gopls/internal/test/integration/options.go | 6 +- gopls/internal/util/bug/bug.go | 4 +- gopls/internal/vulncheck/vulntest/report.go | 2 +- internal/event/export/id.go | 2 +- internal/event/export/metric/exporter.go | 4 +- internal/event/export/ocagent/ocagent.go | 4 +- .../event/export/prometheus/prometheus.go | 2 +- internal/event/keys/keys.go | 6 +- internal/event/label/label.go | 6 +- internal/expect/expect.go | 2 +- internal/expect/expect_test.go | 2 +- internal/expect/extract.go | 4 +- internal/facts/facts.go | 2 +- internal/gcimporter/bimport.go | 2 +- internal/gcimporter/iexport.go | 6 +- internal/gcimporter/iimport.go | 2 +- internal/gopathwalk/walk.go | 4 +- internal/imports/fix_test.go | 2 +- internal/jsonrpc2/conn.go | 10 +- internal/jsonrpc2/handler.go | 8 +- internal/jsonrpc2/jsonrpc2_test.go | 12 +- internal/jsonrpc2/messages.go | 8 +- internal/jsonrpc2_v2/conn.go | 14 +- internal/jsonrpc2_v2/jsonrpc2.go | 16 +- internal/jsonrpc2_v2/jsonrpc2_test.go | 28 +-- internal/jsonrpc2_v2/messages.go | 12 +- internal/jsonrpc2_v2/serve_test.go | 4 +- internal/jsonrpc2_v2/wire.go | 2 +- internal/jsonrpc2_v2/wire_test.go | 8 +- internal/memoize/memoize.go | 20 +- internal/memoize/memoize_test.go | 20 +- internal/packagesinternal/packages.go | 2 +- internal/packagestest/expect.go | 36 +-- internal/packagestest/expect_test.go | 2 +- internal/packagestest/export.go | 10 +- internal/packagestest/export_test.go | 26 +-- internal/tool/tool.go | 2 +- internal/typeparams/normalize.go | 2 +- internal/xcontext/xcontext.go | 8 +- 161 files changed, 630 insertions(+), 630 deletions(-) diff --git a/blog/blog.go b/blog/blog.go index 947c60e95a2..901b53f440e 100644 --- a/blog/blog.go +++ b/blog/blog.go @@ -420,7 +420,7 @@ type rootData struct { BasePath string GodocURL string AnalyticsHTML template.HTML - Data interface{} + Data any } // ServeHTTP serves the front, index, and article pages diff --git a/container/intsets/sparse.go b/container/intsets/sparse.go index c56aacc28bb..b9b4c91ed21 100644 --- a/container/intsets/sparse.go +++ b/container/intsets/sparse.go @@ -267,7 +267,7 @@ func (s *Sparse) init() { // loop. Fail fast before this occurs. // We don't want to call panic here because it prevents the // inlining of this function. - _ = (interface{}(nil)).(to_copy_a_sparse_you_must_call_its_Copy_method) + _ = (any(nil)).(to_copy_a_sparse_you_must_call_its_Copy_method) } } diff --git a/go/analysis/analysis.go b/go/analysis/analysis.go index 3a73084a53c..a7df4d1fe4e 100644 --- a/go/analysis/analysis.go +++ b/go/analysis/analysis.go @@ -45,7 +45,7 @@ type Analyzer struct { // To pass analysis results between packages (and thus // potentially between address spaces), use Facts, which are // serializable. - Run func(*Pass) (interface{}, error) + Run func(*Pass) (any, error) // RunDespiteErrors allows the driver to invoke // the Run method of this analyzer even on a @@ -112,7 +112,7 @@ type Pass struct { // The map keys are the elements of Analysis.Required, // and the type of each corresponding value is the required // analysis's ResultType. - ResultOf map[*Analyzer]interface{} + ResultOf map[*Analyzer]any // ReadFile returns the contents of the named file. // @@ -186,7 +186,7 @@ type ObjectFact struct { // Reportf is a helper function that reports a Diagnostic using the // specified position and formatted error message. -func (pass *Pass) Reportf(pos token.Pos, format string, args ...interface{}) { +func (pass *Pass) Reportf(pos token.Pos, format string, args ...any) { msg := fmt.Sprintf(format, args...) pass.Report(Diagnostic{Pos: pos, Message: msg}) } @@ -201,7 +201,7 @@ type Range interface { // ReportRangef is a helper function that reports a Diagnostic using the // range provided. ast.Node values can be passed in as the range because // they satisfy the Range interface. -func (pass *Pass) ReportRangef(rng Range, format string, args ...interface{}) { +func (pass *Pass) ReportRangef(rng Range, format string, args ...any) { msg := fmt.Sprintf(format, args...) pass.Report(Diagnostic{Pos: rng.Pos(), End: rng.End(), Message: msg}) } diff --git a/go/analysis/analysistest/analysistest.go b/go/analysis/analysistest/analysistest.go index 08981776478..0b5cfe70bfe 100644 --- a/go/analysis/analysistest/analysistest.go +++ b/go/analysis/analysistest/analysistest.go @@ -76,7 +76,7 @@ var TestData = func() string { // Testing is an abstraction of a *testing.T. type Testing interface { - Errorf(format string, args ...interface{}) + Errorf(format string, args ...any) } // RunWithSuggestedFixes behaves like Run, but additionally applies diff --git a/go/analysis/analysistest/analysistest_test.go b/go/analysis/analysistest/analysistest_test.go index eedbb5c2a90..88cd8f8f1d5 100644 --- a/go/analysis/analysistest/analysistest_test.go +++ b/go/analysis/analysistest/analysistest_test.go @@ -262,6 +262,6 @@ type T string type errorfunc func(string) -func (f errorfunc) Errorf(format string, args ...interface{}) { +func (f errorfunc) Errorf(format string, args ...any) { f(fmt.Sprintf(format, args...)) } diff --git a/go/analysis/internal/analysisflags/flags.go b/go/analysis/internal/analysisflags/flags.go index c2445575cff..6aefef25815 100644 --- a/go/analysis/internal/analysisflags/flags.go +++ b/go/analysis/internal/analysisflags/flags.go @@ -201,7 +201,7 @@ func addVersionFlag() { type versionFlag struct{} func (versionFlag) IsBoolFlag() bool { return true } -func (versionFlag) Get() interface{} { return nil } +func (versionFlag) Get() any { return nil } func (versionFlag) String() string { return "" } func (versionFlag) Set(s string) error { if s != "full" { @@ -252,7 +252,7 @@ const ( // triState implements flag.Value, flag.Getter, and flag.boolFlag. // They work like boolean flags: we can say vet -printf as well as vet -printf=true -func (ts *triState) Get() interface{} { +func (ts *triState) Get() any { return *ts == setTrue } @@ -340,7 +340,7 @@ func PrintPlain(out io.Writer, fset *token.FileSet, contextLines int, diag analy // A JSONTree is a mapping from package ID to analysis name to result. // Each result is either a jsonError or a list of JSONDiagnostic. -type JSONTree map[string]map[string]interface{} +type JSONTree map[string]map[string]any // A TextEdit describes the replacement of a portion of a file. // Start and End are zero-based half-open indices into the original byte @@ -383,7 +383,7 @@ type JSONRelatedInformation struct { // Add adds the result of analysis 'name' on package 'id'. // The result is either a list of diagnostics or an error. func (tree JSONTree) Add(fset *token.FileSet, id, name string, diags []analysis.Diagnostic, err error) { - var v interface{} + var v any if err != nil { type jsonError struct { Err string `json:"error"` @@ -429,7 +429,7 @@ func (tree JSONTree) Add(fset *token.FileSet, id, name string, diags []analysis. if v != nil { m, ok := tree[id] if !ok { - m = make(map[string]interface{}) + m = make(map[string]any) tree[id] = m } m[name] = v diff --git a/go/analysis/internal/checker/checker_test.go b/go/analysis/internal/checker/checker_test.go index 9ec6e61cd73..7d73aa3c6bb 100644 --- a/go/analysis/internal/checker/checker_test.go +++ b/go/analysis/internal/checker/checker_test.go @@ -107,7 +107,7 @@ func NewT1() *T1 { return &T1{T} } Name: "noop", Doc: "noop", Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { return nil, nil }, RunDespiteErrors: true, @@ -119,7 +119,7 @@ func NewT1() *T1 { return &T1{T} } Name: "noopfact", Doc: "noopfact", Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { return nil, nil }, RunDespiteErrors: true, @@ -185,7 +185,7 @@ func TestURL(t *testing.T) { Name: "pkgname", Doc: "trivial analyzer that reports package names", URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/internal/checker", - Run: func(p *analysis.Pass) (interface{}, error) { + Run: func(p *analysis.Pass) (any, error) { for _, f := range p.Files { p.ReportRangef(f.Name, "package name is %s", f.Name.Name) } diff --git a/go/analysis/internal/checker/start_test.go b/go/analysis/internal/checker/start_test.go index c78829a5adf..60ed54464ae 100644 --- a/go/analysis/internal/checker/start_test.go +++ b/go/analysis/internal/checker/start_test.go @@ -62,7 +62,7 @@ var commentAnalyzer = &analysis.Analyzer{ Run: commentRun, } -func commentRun(pass *analysis.Pass) (interface{}, error) { +func commentRun(pass *analysis.Pass) (any, error) { const ( from = "/* Package comment */" to = "// Package comment" diff --git a/go/analysis/internal/internal.go b/go/analysis/internal/internal.go index e7c8247fd33..327c4b50579 100644 --- a/go/analysis/internal/internal.go +++ b/go/analysis/internal/internal.go @@ -9,4 +9,4 @@ import "golang.org/x/tools/go/analysis" // This function is set by the checker package to provide // backdoor access to the private Pass field // of the checker.Action type, for use by analysistest. -var Pass func(interface{}) *analysis.Pass +var Pass func(any) *analysis.Pass diff --git a/go/analysis/internal/versiontest/version_test.go b/go/analysis/internal/versiontest/version_test.go index 43c52f565f7..5bd6d3027dd 100644 --- a/go/analysis/internal/versiontest/version_test.go +++ b/go/analysis/internal/versiontest/version_test.go @@ -26,7 +26,7 @@ import ( var analyzer = &analysis.Analyzer{ Name: "versiontest", Doc: "off", - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { pass.Reportf(pass.Files[0].Package, "goversion=%s", pass.Pkg.GoVersion()) return nil, nil }, diff --git a/go/analysis/multichecker/multichecker_test.go b/go/analysis/multichecker/multichecker_test.go index 94a280564ce..1491df153b9 100644 --- a/go/analysis/multichecker/multichecker_test.go +++ b/go/analysis/multichecker/multichecker_test.go @@ -23,7 +23,7 @@ func main() { fail := &analysis.Analyzer{ Name: "fail", Doc: "always fail on a package 'sort'", - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { if pass.Pkg.Path() == "sort" { return nil, fmt.Errorf("failed") } diff --git a/go/analysis/passes/appends/appends.go b/go/analysis/passes/appends/appends.go index 6976f0d9090..e554c3cc903 100644 --- a/go/analysis/passes/appends/appends.go +++ b/go/analysis/passes/appends/appends.go @@ -29,7 +29,7 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) nodeFilter := []ast.Node{ diff --git a/go/analysis/passes/asmdecl/asmdecl.go b/go/analysis/passes/asmdecl/asmdecl.go index a47ecbae731..436b03cb290 100644 --- a/go/analysis/passes/asmdecl/asmdecl.go +++ b/go/analysis/passes/asmdecl/asmdecl.go @@ -150,7 +150,7 @@ var ( abiSuff = re(`^(.+)<(ABI.+)>$`) ) -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { // No work if no assembly files. var sfiles []string for _, fname := range pass.OtherFiles { @@ -226,7 +226,7 @@ Files: for lineno, line := range lines { lineno++ - badf := func(format string, args ...interface{}) { + badf := func(format string, args ...any) { pass.Reportf(analysisutil.LineStart(tf, lineno), "[%s] %s: %s", arch, fnName, fmt.Sprintf(format, args...)) } @@ -646,7 +646,7 @@ func asmParseDecl(pass *analysis.Pass, decl *ast.FuncDecl) map[string]*asmFunc { } // asmCheckVar checks a single variable reference. -func asmCheckVar(badf func(string, ...interface{}), fn *asmFunc, line, expr string, off int, v *asmVar, archDef *asmArch) { +func asmCheckVar(badf func(string, ...any), fn *asmFunc, line, expr string, off int, v *asmVar, archDef *asmArch) { m := asmOpcode.FindStringSubmatch(line) if m == nil { if !strings.HasPrefix(strings.TrimSpace(line), "//") { diff --git a/go/analysis/passes/buildssa/buildssa.go b/go/analysis/passes/buildssa/buildssa.go index f077ea28247..f49fea51762 100644 --- a/go/analysis/passes/buildssa/buildssa.go +++ b/go/analysis/passes/buildssa/buildssa.go @@ -32,7 +32,7 @@ type SSA struct { SrcFuncs []*ssa.Function } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { // We must create a new Program for each Package because the // analysis API provides no place to hang a Program shared by // all Packages. Consequently, SSA Packages and Functions do not diff --git a/go/analysis/passes/buildtag/buildtag.go b/go/analysis/passes/buildtag/buildtag.go index e7434e8fed2..6c7a0df585d 100644 --- a/go/analysis/passes/buildtag/buildtag.go +++ b/go/analysis/passes/buildtag/buildtag.go @@ -26,7 +26,7 @@ var Analyzer = &analysis.Analyzer{ Run: runBuildTag, } -func runBuildTag(pass *analysis.Pass) (interface{}, error) { +func runBuildTag(pass *analysis.Pass) (any, error) { for _, f := range pass.Files { checkGoFile(pass, f) } diff --git a/go/analysis/passes/cgocall/cgocall.go b/go/analysis/passes/cgocall/cgocall.go index 4f3bb035d65..d9189b5b696 100644 --- a/go/analysis/passes/cgocall/cgocall.go +++ b/go/analysis/passes/cgocall/cgocall.go @@ -55,7 +55,7 @@ func run(pass *analysis.Pass) (any, error) { return nil, nil } -func checkCgo(fset *token.FileSet, f *ast.File, info *types.Info, reportf func(token.Pos, string, ...interface{})) { +func checkCgo(fset *token.FileSet, f *ast.File, info *types.Info, reportf func(token.Pos, string, ...any)) { ast.Inspect(f, func(n ast.Node) bool { call, ok := n.(*ast.CallExpr) if !ok { diff --git a/go/analysis/passes/composite/composite.go b/go/analysis/passes/composite/composite.go index f56c3e622fb..60c6afe49f0 100644 --- a/go/analysis/passes/composite/composite.go +++ b/go/analysis/passes/composite/composite.go @@ -51,7 +51,7 @@ func init() { // runUnkeyedLiteral checks if a composite literal is a struct literal with // unkeyed fields. -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) nodeFilter := []ast.Node{ diff --git a/go/analysis/passes/copylock/copylock.go b/go/analysis/passes/copylock/copylock.go index 8a215677165..49c14d4980d 100644 --- a/go/analysis/passes/copylock/copylock.go +++ b/go/analysis/passes/copylock/copylock.go @@ -36,7 +36,7 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) var goversion string // effective file version ("" => unknown) diff --git a/go/analysis/passes/ctrlflow/ctrlflow.go b/go/analysis/passes/ctrlflow/ctrlflow.go index d21adeee900..951aaed00fd 100644 --- a/go/analysis/passes/ctrlflow/ctrlflow.go +++ b/go/analysis/passes/ctrlflow/ctrlflow.go @@ -80,7 +80,7 @@ func (c *CFGs) FuncLit(lit *ast.FuncLit) *cfg.CFG { return c.funcLits[lit].cfg } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) // Because CFG construction consumes and produces noReturn diff --git a/go/analysis/passes/directive/directive.go b/go/analysis/passes/directive/directive.go index b205402388e..bebec891408 100644 --- a/go/analysis/passes/directive/directive.go +++ b/go/analysis/passes/directive/directive.go @@ -40,7 +40,7 @@ var Analyzer = &analysis.Analyzer{ Run: runDirective, } -func runDirective(pass *analysis.Pass) (interface{}, error) { +func runDirective(pass *analysis.Pass) (any, error) { for _, f := range pass.Files { checkGoFile(pass, f) } diff --git a/go/analysis/passes/fieldalignment/fieldalignment.go b/go/analysis/passes/fieldalignment/fieldalignment.go index 93fa39140e6..e2ddc83b604 100644 --- a/go/analysis/passes/fieldalignment/fieldalignment.go +++ b/go/analysis/passes/fieldalignment/fieldalignment.go @@ -65,7 +65,7 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) nodeFilter := []ast.Node{ (*ast.StructType)(nil), diff --git a/go/analysis/passes/findcall/findcall.go b/go/analysis/passes/findcall/findcall.go index 2671573d1fe..9db4de1c20f 100644 --- a/go/analysis/passes/findcall/findcall.go +++ b/go/analysis/passes/findcall/findcall.go @@ -38,7 +38,7 @@ func init() { Analyzer.Flags.StringVar(&name, "name", name, "name of the function to find") } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { for _, f := range pass.Files { ast.Inspect(f, func(n ast.Node) bool { if call, ok := n.(*ast.CallExpr); ok { diff --git a/go/analysis/passes/framepointer/framepointer.go b/go/analysis/passes/framepointer/framepointer.go index 8012de99daa..ba94fd68ea4 100644 --- a/go/analysis/passes/framepointer/framepointer.go +++ b/go/analysis/passes/framepointer/framepointer.go @@ -113,7 +113,7 @@ var arm64Branch = map[string]bool{ "RET": true, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { arch, ok := arches[build.Default.GOARCH] if !ok { return nil, nil diff --git a/go/analysis/passes/ifaceassert/ifaceassert.go b/go/analysis/passes/ifaceassert/ifaceassert.go index 5f07ed3ffde..4022dbe7c22 100644 --- a/go/analysis/passes/ifaceassert/ifaceassert.go +++ b/go/analysis/passes/ifaceassert/ifaceassert.go @@ -52,7 +52,7 @@ func assertableTo(free *typeparams.Free, v, t types.Type) *types.Func { return nil } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) nodeFilter := []ast.Node{ (*ast.TypeAssertExpr)(nil), diff --git a/go/analysis/passes/inspect/inspect.go b/go/analysis/passes/inspect/inspect.go index 3b121cb0ce7..ee1972f56df 100644 --- a/go/analysis/passes/inspect/inspect.go +++ b/go/analysis/passes/inspect/inspect.go @@ -44,6 +44,6 @@ var Analyzer = &analysis.Analyzer{ ResultType: reflect.TypeOf(new(inspector.Inspector)), } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { return inspector.New(pass.Files), nil } diff --git a/go/analysis/passes/loopclosure/loopclosure.go b/go/analysis/passes/loopclosure/loopclosure.go index d3181242153..64df1b106a1 100644 --- a/go/analysis/passes/loopclosure/loopclosure.go +++ b/go/analysis/passes/loopclosure/loopclosure.go @@ -30,7 +30,7 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) nodeFilter := []ast.Node{ diff --git a/go/analysis/passes/lostcancel/lostcancel.go b/go/analysis/passes/lostcancel/lostcancel.go index f8a661aa5db..a7fee180925 100644 --- a/go/analysis/passes/lostcancel/lostcancel.go +++ b/go/analysis/passes/lostcancel/lostcancel.go @@ -47,7 +47,7 @@ var contextPackage = "context" // containing the assignment, we assume that other uses exist. // // checkLostCancel analyzes a single named or literal function. -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { // Fast path: bypass check if file doesn't use context.WithCancel. if !analysisinternal.Imports(pass.Pkg, contextPackage) { return nil, nil diff --git a/go/analysis/passes/nilfunc/nilfunc.go b/go/analysis/passes/nilfunc/nilfunc.go index 778f7f1f8f9..3ac2dcd4907 100644 --- a/go/analysis/passes/nilfunc/nilfunc.go +++ b/go/analysis/passes/nilfunc/nilfunc.go @@ -30,7 +30,7 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) nodeFilter := []ast.Node{ diff --git a/go/analysis/passes/nilness/nilness.go b/go/analysis/passes/nilness/nilness.go index faaf12a9385..af61ae6088d 100644 --- a/go/analysis/passes/nilness/nilness.go +++ b/go/analysis/passes/nilness/nilness.go @@ -28,7 +28,7 @@ var Analyzer = &analysis.Analyzer{ Requires: []*analysis.Analyzer{buildssa.Analyzer}, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { ssainput := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) for _, fn := range ssainput.SrcFuncs { runFunc(pass, fn) @@ -37,7 +37,7 @@ func run(pass *analysis.Pass) (interface{}, error) { } func runFunc(pass *analysis.Pass, fn *ssa.Function) { - reportf := func(category string, pos token.Pos, format string, args ...interface{}) { + reportf := func(category string, pos token.Pos, format string, args ...any) { // We ignore nil-checking ssa.Instructions // that don't correspond to syntax. if pos.IsValid() { diff --git a/go/analysis/passes/pkgfact/pkgfact.go b/go/analysis/passes/pkgfact/pkgfact.go index 077c8780815..31748795dac 100644 --- a/go/analysis/passes/pkgfact/pkgfact.go +++ b/go/analysis/passes/pkgfact/pkgfact.go @@ -53,7 +53,7 @@ type pairsFact []string func (f *pairsFact) AFact() {} func (f *pairsFact) String() string { return "pairs(" + strings.Join(*f, ", ") + ")" } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { result := make(map[string]string) // At each import, print the fact from the imported diff --git a/go/analysis/passes/reflectvaluecompare/reflectvaluecompare.go b/go/analysis/passes/reflectvaluecompare/reflectvaluecompare.go index 72435b2fc7a..d0632dbdafe 100644 --- a/go/analysis/passes/reflectvaluecompare/reflectvaluecompare.go +++ b/go/analysis/passes/reflectvaluecompare/reflectvaluecompare.go @@ -28,7 +28,7 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) nodeFilter := []ast.Node{ diff --git a/go/analysis/passes/shadow/shadow.go b/go/analysis/passes/shadow/shadow.go index 30258c991f3..8f768bb76c5 100644 --- a/go/analysis/passes/shadow/shadow.go +++ b/go/analysis/passes/shadow/shadow.go @@ -36,7 +36,7 @@ func init() { Analyzer.Flags.BoolVar(&strict, "strict", strict, "whether to be strict about shadowing; can be noisy") } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) spans := make(map[types.Object]span) diff --git a/go/analysis/passes/shift/shift.go b/go/analysis/passes/shift/shift.go index 46b5f6d68c6..57987b3d203 100644 --- a/go/analysis/passes/shift/shift.go +++ b/go/analysis/passes/shift/shift.go @@ -34,7 +34,7 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) // Do a complete pass to compute dead nodes. diff --git a/go/analysis/passes/stdmethods/stdmethods.go b/go/analysis/passes/stdmethods/stdmethods.go index 28f51b1ec9a..a0bdf001abd 100644 --- a/go/analysis/passes/stdmethods/stdmethods.go +++ b/go/analysis/passes/stdmethods/stdmethods.go @@ -66,7 +66,7 @@ var canonicalMethods = map[string]struct{ args, results []string }{ "WriteTo": {[]string{"=io.Writer"}, []string{"int64", "error"}}, // io.WriterTo } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) nodeFilter := []ast.Node{ diff --git a/go/analysis/passes/stringintconv/string.go b/go/analysis/passes/stringintconv/string.go index f56e6ecaa29..a23721cd26f 100644 --- a/go/analysis/passes/stringintconv/string.go +++ b/go/analysis/passes/stringintconv/string.go @@ -70,7 +70,7 @@ func typeName(t types.Type) string { return "" } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) nodeFilter := []ast.Node{ (*ast.File)(nil), diff --git a/go/analysis/passes/structtag/structtag.go b/go/analysis/passes/structtag/structtag.go index 4115ef76943..d926503403d 100644 --- a/go/analysis/passes/structtag/structtag.go +++ b/go/analysis/passes/structtag/structtag.go @@ -34,7 +34,7 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) nodeFilter := []ast.Node{ diff --git a/go/analysis/passes/testinggoroutine/testinggoroutine.go b/go/analysis/passes/testinggoroutine/testinggoroutine.go index fef5a6014c4..f49ac4eb1a0 100644 --- a/go/analysis/passes/testinggoroutine/testinggoroutine.go +++ b/go/analysis/passes/testinggoroutine/testinggoroutine.go @@ -36,7 +36,7 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) if !analysisinternal.Imports(pass.Pkg, "testing") { diff --git a/go/analysis/passes/tests/tests.go b/go/analysis/passes/tests/tests.go index 285b34218c3..9f59006ebb2 100644 --- a/go/analysis/passes/tests/tests.go +++ b/go/analysis/passes/tests/tests.go @@ -47,7 +47,7 @@ var acceptedFuzzTypes = []types.Type{ types.NewSlice(types.Universe.Lookup("byte").Type()), } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { for _, f := range pass.Files { if !strings.HasSuffix(pass.Fset.File(f.FileStart).Name(), "_test.go") { continue diff --git a/go/analysis/passes/unreachable/unreachable.go b/go/analysis/passes/unreachable/unreachable.go index b810db7ee95..fcf5fbd9060 100644 --- a/go/analysis/passes/unreachable/unreachable.go +++ b/go/analysis/passes/unreachable/unreachable.go @@ -30,7 +30,7 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) nodeFilter := []ast.Node{ diff --git a/go/analysis/passes/unsafeptr/unsafeptr.go b/go/analysis/passes/unsafeptr/unsafeptr.go index fb5b944faad..57c6da64ff3 100644 --- a/go/analysis/passes/unsafeptr/unsafeptr.go +++ b/go/analysis/passes/unsafeptr/unsafeptr.go @@ -30,7 +30,7 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) nodeFilter := []ast.Node{ diff --git a/go/analysis/passes/unusedresult/unusedresult.go b/go/analysis/passes/unusedresult/unusedresult.go index e298f644277..932f1347e56 100644 --- a/go/analysis/passes/unusedresult/unusedresult.go +++ b/go/analysis/passes/unusedresult/unusedresult.go @@ -85,7 +85,7 @@ func init() { "comma-separated list of names of methods of type func() string whose results must be used") } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) // Split functions into (pkg, name) pairs to save allocation later. diff --git a/go/analysis/passes/usesgenerics/usesgenerics.go b/go/analysis/passes/usesgenerics/usesgenerics.go index 5c5df3a79a0..b7ff3ad6877 100644 --- a/go/analysis/passes/usesgenerics/usesgenerics.go +++ b/go/analysis/passes/usesgenerics/usesgenerics.go @@ -53,7 +53,7 @@ type featuresFact struct { func (f *featuresFact) AFact() {} func (f *featuresFact) String() string { return f.Features.String() } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) direct := genericfeatures.ForPackage(inspect, pass.TypesInfo) diff --git a/go/analysis/unitchecker/unitchecker.go b/go/analysis/unitchecker/unitchecker.go index 82c3db6a39d..a1ee80388b6 100644 --- a/go/analysis/unitchecker/unitchecker.go +++ b/go/analysis/unitchecker/unitchecker.go @@ -287,7 +287,7 @@ func run(fset *token.FileSet, cfg *Config, analyzers []*analysis.Analyzer) ([]re // Also build a map to hold working state and result. type action struct { once sync.Once - result interface{} + result any err error usesFacts bool // (transitively uses) diagnostics []analysis.Diagnostic @@ -337,7 +337,7 @@ func run(fset *token.FileSet, cfg *Config, analyzers []*analysis.Analyzer) ([]re // The inputs to this analysis are the // results of its prerequisites. - inputs := make(map[*analysis.Analyzer]interface{}) + inputs := make(map[*analysis.Analyzer]any) var failed []string for _, req := range a.Requires { reqact := exec(req) diff --git a/go/analysis/unitchecker/unitchecker_test.go b/go/analysis/unitchecker/unitchecker_test.go index 173d76348f7..6c3bba6793e 100644 --- a/go/analysis/unitchecker/unitchecker_test.go +++ b/go/analysis/unitchecker/unitchecker_test.go @@ -59,7 +59,7 @@ func testIntegration(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a func _() { diff --git a/go/analysis/validate_test.go b/go/analysis/validate_test.go index 7f4ee2c05b9..b192ef0a3c0 100644 --- a/go/analysis/validate_test.go +++ b/go/analysis/validate_test.go @@ -11,7 +11,7 @@ import ( func TestValidate(t *testing.T) { var ( - run = func(p *Pass) (interface{}, error) { + run = func(p *Pass) (any, error) { return nil, nil } dependsOnSelf = &Analyzer{ @@ -130,7 +130,7 @@ func TestCycleInRequiresGraphErrorMessage(t *testing.T) { func TestValidateEmptyDoc(t *testing.T) { withoutDoc := &Analyzer{ Name: "withoutDoc", - Run: func(p *Pass) (interface{}, error) { + Run: func(p *Pass) (any, error) { return nil, nil }, } diff --git a/go/buildutil/fakecontext.go b/go/buildutil/fakecontext.go index 763d18809b4..1f75141d504 100644 --- a/go/buildutil/fakecontext.go +++ b/go/buildutil/fakecontext.go @@ -95,7 +95,7 @@ func (s byName) Less(i, j int) bool { return s[i].Name() < s[j].Name() } type fakeFileInfo string func (fi fakeFileInfo) Name() string { return string(fi) } -func (fakeFileInfo) Sys() interface{} { return nil } +func (fakeFileInfo) Sys() any { return nil } func (fakeFileInfo) ModTime() time.Time { return time.Time{} } func (fakeFileInfo) IsDir() bool { return false } func (fakeFileInfo) Size() int64 { return 0 } @@ -104,7 +104,7 @@ func (fakeFileInfo) Mode() os.FileMode { return 0644 } type fakeDirInfo string func (fd fakeDirInfo) Name() string { return string(fd) } -func (fakeDirInfo) Sys() interface{} { return nil } +func (fakeDirInfo) Sys() any { return nil } func (fakeDirInfo) ModTime() time.Time { return time.Time{} } func (fakeDirInfo) IsDir() bool { return true } func (fakeDirInfo) Size() int64 { return 0 } diff --git a/go/buildutil/tags.go b/go/buildutil/tags.go index 32c8d1424d2..410c8e72d48 100644 --- a/go/buildutil/tags.go +++ b/go/buildutil/tags.go @@ -51,7 +51,7 @@ func (v *TagsFlag) Set(s string) error { return nil } -func (v *TagsFlag) Get() interface{} { return *v } +func (v *TagsFlag) Get() any { return *v } func splitQuotedFields(s string) ([]string, error) { // See $GOROOT/src/cmd/internal/quoted/quoted.go (Split) diff --git a/go/callgraph/rta/rta.go b/go/callgraph/rta/rta.go index b489b0178c8..224c0b96ce0 100644 --- a/go/callgraph/rta/rta.go +++ b/go/callgraph/rta/rta.go @@ -371,7 +371,7 @@ func (r *rta) interfaces(C types.Type) []*types.Interface { // Ascertain set of interfaces C implements // and update the 'implements' relation. - r.interfaceTypes.Iterate(func(I types.Type, v interface{}) { + r.interfaceTypes.Iterate(func(I types.Type, v any) { iinfo := v.(*interfaceTypeInfo) if I := types.Unalias(I).(*types.Interface); implements(cinfo, iinfo) { iinfo.implementations = append(iinfo.implementations, C) @@ -400,7 +400,7 @@ func (r *rta) implementations(I *types.Interface) []types.Type { // Ascertain set of concrete types that implement I // and update the 'implements' relation. - r.concreteTypes.Iterate(func(C types.Type, v interface{}) { + r.concreteTypes.Iterate(func(C types.Type, v any) { cinfo := v.(*concreteTypeInfo) if implements(cinfo, iinfo) { cinfo.implements = append(cinfo.implements, I) diff --git a/go/callgraph/rta/rta_test.go b/go/callgraph/rta/rta_test.go index dcec98d7c5d..6b16484245b 100644 --- a/go/callgraph/rta/rta_test.go +++ b/go/callgraph/rta/rta_test.go @@ -220,7 +220,7 @@ func check(t *testing.T, f *ast.File, pkg *ssa.Package, res *rta.Result) { // Check runtime types. { got := make(stringset) - res.RuntimeTypes.Iterate(func(key types.Type, value interface{}) { + res.RuntimeTypes.Iterate(func(key types.Type, value any) { if !value.(bool) { // accessible to reflection typ := types.TypeString(types.Unalias(key), types.RelativeTo(pkg.Pkg)) got[typ] = true diff --git a/go/callgraph/vta/internal/trie/builder.go b/go/callgraph/vta/internal/trie/builder.go index c814c039f72..bdd39397ec6 100644 --- a/go/callgraph/vta/internal/trie/builder.go +++ b/go/callgraph/vta/internal/trie/builder.go @@ -14,13 +14,13 @@ package trie // // Collisions functions may be applied whenever a value is inserted // or two maps are merged, or intersected. -type Collision func(lhs interface{}, rhs interface{}) interface{} +type Collision func(lhs any, rhs any) any // TakeLhs always returns the left value in a collision. -func TakeLhs(lhs, rhs interface{}) interface{} { return lhs } +func TakeLhs(lhs, rhs any) any { return lhs } // TakeRhs always returns the right hand side in a collision. -func TakeRhs(lhs, rhs interface{}) interface{} { return rhs } +func TakeRhs(lhs, rhs any) any { return rhs } // Builder creates new Map. Each Builder has a unique Scope. // @@ -78,7 +78,7 @@ func (b *Builder) Empty() Map { return Map{b.Scope(), b.empty} } // if _, ok := m[k]; ok { m[k] = c(m[k], v} else { m[k] = v} // // An insertion or update happened whenever Insert(m, ...) != m . -func (b *Builder) InsertWith(c Collision, m Map, k uint64, v interface{}) Map { +func (b *Builder) InsertWith(c Collision, m Map, k uint64, v any) Map { m = b.Clone(m) return Map{b.Scope(), b.insert(c, m.n, b.mkLeaf(key(k), v), false)} } @@ -92,7 +92,7 @@ func (b *Builder) InsertWith(c Collision, m Map, k uint64, v interface{}) Map { // if _, ok := m[k]; ok { m[k] = val } // // This is equivalent to b.Merge(m, b.Create({k: v})). -func (b *Builder) Insert(m Map, k uint64, v interface{}) Map { +func (b *Builder) Insert(m Map, k uint64, v any) Map { return b.InsertWith(TakeLhs, m, k, v) } @@ -100,7 +100,7 @@ func (b *Builder) Insert(m Map, k uint64, v interface{}) Map { // updating a map[uint64]interface{} by: // // m[key] = val -func (b *Builder) Update(m Map, key uint64, val interface{}) Map { +func (b *Builder) Update(m Map, key uint64, val any) Map { return b.InsertWith(TakeRhs, m, key, val) } @@ -185,14 +185,14 @@ func (b *Builder) MutEmpty() MutMap { // Insert an element into the map using the collision function for the builder. // Returns true if the element was inserted. -func (mm *MutMap) Insert(k uint64, v interface{}) bool { +func (mm *MutMap) Insert(k uint64, v any) bool { old := mm.M mm.M = mm.B.Insert(old, k, v) return old != mm.M } // Updates an element in the map. Returns true if the map was updated. -func (mm *MutMap) Update(k uint64, v interface{}) bool { +func (mm *MutMap) Update(k uint64, v any) bool { old := mm.M mm.M = mm.B.Update(old, k, v) return old != mm.M @@ -221,7 +221,7 @@ func (mm *MutMap) Intersect(other Map) bool { return old != mm.M } -func (b *Builder) Create(m map[uint64]interface{}) Map { +func (b *Builder) Create(m map[uint64]any) Map { var leaves []*leaf for k, v := range m { leaves = append(leaves, b.mkLeaf(key(k), v)) @@ -259,7 +259,7 @@ func (b *Builder) create(leaves []*leaf) node { } // mkLeaf returns the hash-consed representative of (k, v) in the current scope. -func (b *Builder) mkLeaf(k key, v interface{}) *leaf { +func (b *Builder) mkLeaf(k key, v any) *leaf { rep, ok := b.leaves[leaf{k, v}] if !ok { rep = &leaf{k, v} // heap-allocated copy diff --git a/go/callgraph/vta/internal/trie/op_test.go b/go/callgraph/vta/internal/trie/op_test.go index ba0d5be71a9..b4610d55c22 100644 --- a/go/callgraph/vta/internal/trie/op_test.go +++ b/go/callgraph/vta/internal/trie/op_test.go @@ -21,13 +21,13 @@ import ( // mapCollection is effectively a []map[uint64]interface{}. // These support operations being applied to the i'th maps. type mapCollection interface { - Elements() []map[uint64]interface{} + Elements() []map[uint64]any DeepEqual(l, r int) bool - Lookup(id int, k uint64) (interface{}, bool) + Lookup(id int, k uint64) (any, bool) - Insert(id int, k uint64, v interface{}) - Update(id int, k uint64, v interface{}) + Insert(id int, k uint64, v any) + Update(id int, k uint64, v any) Remove(id int, k uint64) Intersect(l int, r int) Merge(l int, r int) @@ -86,19 +86,19 @@ type trieCollection struct { tries []trie.MutMap } -func (c *trieCollection) Elements() []map[uint64]interface{} { - var maps []map[uint64]interface{} +func (c *trieCollection) Elements() []map[uint64]any { + var maps []map[uint64]any for _, m := range c.tries { maps = append(maps, trie.Elems(m.M)) } return maps } -func (c *trieCollection) Eq(id int, m map[uint64]interface{}) bool { +func (c *trieCollection) Eq(id int, m map[uint64]any) bool { elems := trie.Elems(c.tries[id].M) return !reflect.DeepEqual(elems, m) } -func (c *trieCollection) Lookup(id int, k uint64) (interface{}, bool) { +func (c *trieCollection) Lookup(id int, k uint64) (any, bool) { return c.tries[id].M.Lookup(k) } func (c *trieCollection) DeepEqual(l, r int) bool { @@ -109,11 +109,11 @@ func (c *trieCollection) Add() { c.tries = append(c.tries, c.b.MutEmpty()) } -func (c *trieCollection) Insert(id int, k uint64, v interface{}) { +func (c *trieCollection) Insert(id int, k uint64, v any) { c.tries[id].Insert(k, v) } -func (c *trieCollection) Update(id int, k uint64, v interface{}) { +func (c *trieCollection) Update(id int, k uint64, v any) { c.tries[id].Update(k, v) } @@ -140,7 +140,7 @@ func (c *trieCollection) Assign(l, r int) { c.tries[l] = c.tries[r] } -func average(x interface{}, y interface{}) interface{} { +func average(x any, y any) any { if x, ok := x.(float32); ok { if y, ok := y.(float32); ok { return (x + y) / 2.0 @@ -149,13 +149,13 @@ func average(x interface{}, y interface{}) interface{} { return x } -type builtinCollection []map[uint64]interface{} +type builtinCollection []map[uint64]any -func (c builtinCollection) Elements() []map[uint64]interface{} { +func (c builtinCollection) Elements() []map[uint64]any { return c } -func (c builtinCollection) Lookup(id int, k uint64) (interface{}, bool) { +func (c builtinCollection) Lookup(id int, k uint64) (any, bool) { v, ok := c[id][k] return v, ok } @@ -163,13 +163,13 @@ func (c builtinCollection) DeepEqual(l, r int) bool { return reflect.DeepEqual(c[l], c[r]) } -func (c builtinCollection) Insert(id int, k uint64, v interface{}) { +func (c builtinCollection) Insert(id int, k uint64, v any) { if _, ok := c[id][k]; !ok { c[id][k] = v } } -func (c builtinCollection) Update(id int, k uint64, v interface{}) { +func (c builtinCollection) Update(id int, k uint64, v any) { c[id][k] = v } @@ -178,7 +178,7 @@ func (c builtinCollection) Remove(id int, k uint64) { } func (c builtinCollection) Intersect(l int, r int) { - result := map[uint64]interface{}{} + result := map[uint64]any{} for k, v := range c[l] { if _, ok := c[r][k]; ok { result[k] = v @@ -188,7 +188,7 @@ func (c builtinCollection) Intersect(l int, r int) { } func (c builtinCollection) Merge(l int, r int) { - result := map[uint64]interface{}{} + result := map[uint64]any{} for k, v := range c[r] { result[k] = v } @@ -199,7 +199,7 @@ func (c builtinCollection) Merge(l int, r int) { } func (c builtinCollection) Average(l int, r int) { - avg := map[uint64]interface{}{} + avg := map[uint64]any{} for k, lv := range c[l] { if rv, ok := c[r][k]; ok { avg[k] = average(lv, rv) @@ -216,7 +216,7 @@ func (c builtinCollection) Average(l int, r int) { } func (c builtinCollection) Assign(l, r int) { - m := map[uint64]interface{}{} + m := map[uint64]any{} for k, v := range c[r] { m[k] = v } @@ -224,7 +224,7 @@ func (c builtinCollection) Assign(l, r int) { } func (c builtinCollection) Clear(id int) { - c[id] = map[uint64]interface{}{} + c[id] = map[uint64]any{} } func newTriesCollection(size int) *trieCollection { @@ -241,7 +241,7 @@ func newTriesCollection(size int) *trieCollection { func newMapsCollection(size int) *builtinCollection { maps := make(builtinCollection, size) for i := 0; i < size; i++ { - maps[i] = map[uint64]interface{}{} + maps[i] = map[uint64]any{} } return &maps } @@ -255,9 +255,9 @@ type operation struct { } // Apply the operation to maps. -func (op operation) Apply(maps mapCollection) interface{} { +func (op operation) Apply(maps mapCollection) any { type lookupresult struct { - v interface{} + v any ok bool } switch op.code { diff --git a/go/callgraph/vta/internal/trie/trie.go b/go/callgraph/vta/internal/trie/trie.go index 511fde51565..a8480192556 100644 --- a/go/callgraph/vta/internal/trie/trie.go +++ b/go/callgraph/vta/internal/trie/trie.go @@ -55,7 +55,7 @@ func (m Map) Size() int { } return m.n.size() } -func (m Map) Lookup(k uint64) (interface{}, bool) { +func (m Map) Lookup(k uint64) (any, bool) { if m.n != nil { if leaf := m.n.find(key(k)); leaf != nil { return leaf.v, true @@ -68,7 +68,7 @@ func (m Map) Lookup(k uint64) (interface{}, bool) { // %s string conversion for . func (m Map) String() string { var kvs []string - m.Range(func(u uint64, i interface{}) bool { + m.Range(func(u uint64, i any) bool { kvs = append(kvs, fmt.Sprintf("%d: %s", u, i)) return true }) @@ -78,7 +78,7 @@ func (m Map) String() string { // Range over the leaf (key, value) pairs in the map in order and // applies cb(key, value) to each. Stops early if cb returns false. // Returns true if all elements were visited without stopping early. -func (m Map) Range(cb func(uint64, interface{}) bool) bool { +func (m Map) Range(cb func(uint64, any) bool) bool { if m.n != nil { return m.n.visit(cb) } @@ -100,9 +100,9 @@ func (m Map) DeepEqual(other Map) bool { } // Elems are the (k,v) elements in the Map as a map[uint64]interface{} -func Elems(m Map) map[uint64]interface{} { - dest := make(map[uint64]interface{}, m.Size()) - m.Range(func(k uint64, v interface{}) bool { +func Elems(m Map) map[uint64]any { + dest := make(map[uint64]any, m.Size()) + m.Range(func(k uint64, v any) bool { dest[k] = v return true }) @@ -117,7 +117,7 @@ type node interface { // visit the leaves (key, value) pairs in the map in order and // applies cb(key, value) to each. Stops early if cb returns false. // Returns true if all elements were visited without stopping early. - visit(cb func(uint64, interface{}) bool) bool + visit(cb func(uint64, any) bool) bool // Two nodes contain the same elements regardless of scope. deepEqual(node) bool @@ -139,7 +139,7 @@ type empty struct { // leaf represents a single pair. type leaf struct { k key - v interface{} + v any } // branch represents a tree node within the Patricia trie. @@ -215,13 +215,13 @@ func (br *branch) deepEqual(m node) bool { return false } -func (*empty) visit(cb func(uint64, interface{}) bool) bool { +func (*empty) visit(cb func(uint64, any) bool) bool { return true } -func (l *leaf) visit(cb func(uint64, interface{}) bool) bool { +func (l *leaf) visit(cb func(uint64, any) bool) bool { return cb(uint64(l.k), l.v) } -func (br *branch) visit(cb func(uint64, interface{}) bool) bool { +func (br *branch) visit(cb func(uint64, any) bool) bool { if !br.left.visit(cb) { return false } diff --git a/go/callgraph/vta/internal/trie/trie_test.go b/go/callgraph/vta/internal/trie/trie_test.go index 71fd398f12c..817cb5c5e28 100644 --- a/go/callgraph/vta/internal/trie/trie_test.go +++ b/go/callgraph/vta/internal/trie/trie_test.go @@ -34,8 +34,8 @@ func TestScope(t *testing.T) { } func TestCollision(t *testing.T) { - var x interface{} = 1 - var y interface{} = 2 + var x any = 1 + var y any = 2 if v := TakeLhs(x, y); v != x { t.Errorf("TakeLhs(%s, %s) got %s. want %s", x, y, v, x) @@ -57,7 +57,7 @@ func TestDefault(t *testing.T) { if v, ok := def.Lookup(123); !(v == nil && !ok) { t.Errorf("Scope{}.Lookup() = (%s, %v) not (nil, false)", v, ok) } - if !def.Range(func(k uint64, v interface{}) bool { + if !def.Range(func(k uint64, v any) bool { t.Errorf("Scope{}.Range() called it callback on %d:%s", k, v) return true }) { @@ -114,7 +114,7 @@ func TestEmpty(t *testing.T) { if l := e.n.find(123); l != nil { t.Errorf("empty.find(123) got %v. want nil", l) } - e.Range(func(k uint64, v interface{}) bool { + e.Range(func(k uint64, v any) bool { t.Errorf("empty.Range() called it callback on %d:%s", k, v) return true }) @@ -129,23 +129,23 @@ func TestCreate(t *testing.T) { // The node orders are printed in lexicographic little-endian. b := NewBuilder() for _, c := range []struct { - m map[uint64]interface{} + m map[uint64]any want string }{ { - map[uint64]interface{}{}, + map[uint64]any{}, "{}", }, { - map[uint64]interface{}{1: "a"}, + map[uint64]any{1: "a"}, "{1: a}", }, { - map[uint64]interface{}{2: "b", 1: "a"}, + map[uint64]any{2: "b", 1: "a"}, "{1: a, 2: b}", }, { - map[uint64]interface{}{1: "x", 4: "y", 5: "z"}, + map[uint64]any{1: "x", 4: "y", 5: "z"}, "{1: x, 4: y, 5: z}", }, } { @@ -158,7 +158,7 @@ func TestCreate(t *testing.T) { func TestElems(t *testing.T) { b := NewBuilder() - for _, orig := range []map[uint64]interface{}{ + for _, orig := range []map[uint64]any{ {}, {1: "a"}, {1: "a", 2: "b"}, @@ -174,10 +174,10 @@ func TestElems(t *testing.T) { func TestRange(t *testing.T) { b := NewBuilder() - m := b.Create(map[uint64]interface{}{1: "x", 3: "y", 5: "z", 6: "stop", 8: "a"}) + m := b.Create(map[uint64]any{1: "x", 3: "y", 5: "z", 6: "stop", 8: "a"}) calls := 0 - cb := func(k uint64, v interface{}) bool { + cb := func(k uint64, v any) bool { t.Logf("visiting (%d, %v)", k, v) calls++ return k%2 != 0 // stop after the first even number. @@ -195,7 +195,7 @@ func TestRange(t *testing.T) { } func TestDeepEqual(t *testing.T) { - for _, m := range []map[uint64]interface{}{ + for _, m := range []map[uint64]any{ {}, {1: "x"}, {1: "x", 2: "y"}, @@ -210,32 +210,32 @@ func TestDeepEqual(t *testing.T) { func TestNotDeepEqual(t *testing.T) { for _, c := range []struct { - left map[uint64]interface{} - right map[uint64]interface{} + left map[uint64]any + right map[uint64]any }{ { - map[uint64]interface{}{1: "x"}, - map[uint64]interface{}{}, + map[uint64]any{1: "x"}, + map[uint64]any{}, }, { - map[uint64]interface{}{}, - map[uint64]interface{}{1: "y"}, + map[uint64]any{}, + map[uint64]any{1: "y"}, }, { - map[uint64]interface{}{1: "x"}, - map[uint64]interface{}{1: "y"}, + map[uint64]any{1: "x"}, + map[uint64]any{1: "y"}, }, { - map[uint64]interface{}{1: "x"}, - map[uint64]interface{}{1: "x", 2: "Y"}, + map[uint64]any{1: "x"}, + map[uint64]any{1: "x", 2: "Y"}, }, { - map[uint64]interface{}{1: "x", 2: "Y"}, - map[uint64]interface{}{1: "x"}, + map[uint64]any{1: "x", 2: "Y"}, + map[uint64]any{1: "x"}, }, { - map[uint64]interface{}{1: "x", 2: "y"}, - map[uint64]interface{}{1: "x", 2: "Y"}, + map[uint64]any{1: "x", 2: "y"}, + map[uint64]any{1: "x", 2: "Y"}, }, } { l := NewBuilder().Create(c.left) @@ -249,97 +249,97 @@ func TestNotDeepEqual(t *testing.T) { func TestMerge(t *testing.T) { b := NewBuilder() for _, c := range []struct { - left map[uint64]interface{} - right map[uint64]interface{} + left map[uint64]any + right map[uint64]any want string }{ { - map[uint64]interface{}{}, - map[uint64]interface{}{}, + map[uint64]any{}, + map[uint64]any{}, "{}", }, { - map[uint64]interface{}{}, - map[uint64]interface{}{1: "a"}, + map[uint64]any{}, + map[uint64]any{1: "a"}, "{1: a}", }, { - map[uint64]interface{}{1: "a"}, - map[uint64]interface{}{}, + map[uint64]any{1: "a"}, + map[uint64]any{}, "{1: a}", }, { - map[uint64]interface{}{1: "a", 2: "b"}, - map[uint64]interface{}{}, + map[uint64]any{1: "a", 2: "b"}, + map[uint64]any{}, "{1: a, 2: b}", }, { - map[uint64]interface{}{1: "x"}, - map[uint64]interface{}{1: "y"}, + map[uint64]any{1: "x"}, + map[uint64]any{1: "y"}, "{1: x}", // default collision is left }, { - map[uint64]interface{}{1: "x"}, - map[uint64]interface{}{2: "y"}, + map[uint64]any{1: "x"}, + map[uint64]any{2: "y"}, "{1: x, 2: y}", }, { - map[uint64]interface{}{4: "y", 5: "z"}, - map[uint64]interface{}{1: "x"}, + map[uint64]any{4: "y", 5: "z"}, + map[uint64]any{1: "x"}, "{1: x, 4: y, 5: z}", }, { - map[uint64]interface{}{1: "x", 5: "z"}, - map[uint64]interface{}{4: "y"}, + map[uint64]any{1: "x", 5: "z"}, + map[uint64]any{4: "y"}, "{1: x, 4: y, 5: z}", }, { - map[uint64]interface{}{1: "x", 4: "y"}, - map[uint64]interface{}{5: "z"}, + map[uint64]any{1: "x", 4: "y"}, + map[uint64]any{5: "z"}, "{1: x, 4: y, 5: z}", }, { - map[uint64]interface{}{1: "a", 4: "c"}, - map[uint64]interface{}{2: "b", 5: "d"}, + map[uint64]any{1: "a", 4: "c"}, + map[uint64]any{2: "b", 5: "d"}, "{1: a, 2: b, 4: c, 5: d}", }, { - map[uint64]interface{}{1: "a", 4: "c"}, - map[uint64]interface{}{2: "b", 5 + 8: "d"}, + map[uint64]any{1: "a", 4: "c"}, + map[uint64]any{2: "b", 5 + 8: "d"}, "{1: a, 2: b, 4: c, 13: d}", }, { - map[uint64]interface{}{2: "b", 5 + 8: "d"}, - map[uint64]interface{}{1: "a", 4: "c"}, + map[uint64]any{2: "b", 5 + 8: "d"}, + map[uint64]any{1: "a", 4: "c"}, "{1: a, 2: b, 4: c, 13: d}", }, { - map[uint64]interface{}{1: "a", 4: "c"}, - map[uint64]interface{}{2: "b", 5 + 8: "d"}, + map[uint64]any{1: "a", 4: "c"}, + map[uint64]any{2: "b", 5 + 8: "d"}, "{1: a, 2: b, 4: c, 13: d}", }, { - map[uint64]interface{}{2: "b", 5 + 8: "d"}, - map[uint64]interface{}{1: "a", 4: "c"}, + map[uint64]any{2: "b", 5 + 8: "d"}, + map[uint64]any{1: "a", 4: "c"}, "{1: a, 2: b, 4: c, 13: d}", }, { - map[uint64]interface{}{2: "b", 5 + 8: "d"}, - map[uint64]interface{}{2: "", 3: "a"}, + map[uint64]any{2: "b", 5 + 8: "d"}, + map[uint64]any{2: "", 3: "a"}, "{2: b, 3: a, 13: d}", }, { // crafted for `!prefixesOverlap(p, m, q, n)` - left: map[uint64]interface{}{1: "a", 2 + 1: "b"}, - right: map[uint64]interface{}{4 + 1: "c", 4 + 2: "d"}, + left: map[uint64]any{1: "a", 2 + 1: "b"}, + right: map[uint64]any{4 + 1: "c", 4 + 2: "d"}, // p: 5, m: 2 q: 1, n: 2 want: "{1: a, 3: b, 5: c, 6: d}", }, { // crafted for `ord(m, n) && !zeroBit(q, m)` - left: map[uint64]interface{}{8 + 2 + 1: "a", 16 + 4: "b"}, - right: map[uint64]interface{}{16 + 8 + 2 + 1: "c", 16 + 8 + 4 + 2 + 1: "d"}, + left: map[uint64]any{8 + 2 + 1: "a", 16 + 4: "b"}, + right: map[uint64]any{16 + 8 + 2 + 1: "c", 16 + 8 + 4 + 2 + 1: "d"}, // left: p: 15, m: 16 // right: q: 27, n: 4 want: "{11: a, 20: b, 27: c, 31: d}", @@ -347,8 +347,8 @@ func TestMerge(t *testing.T) { { // crafted for `ord(n, m) && !zeroBit(p, n)` // p: 6, m: 1 q: 5, n: 2 - left: map[uint64]interface{}{4 + 2: "b", 4 + 2 + 1: "c"}, - right: map[uint64]interface{}{4: "a", 4 + 2 + 1: "dropped"}, + left: map[uint64]any{4 + 2: "b", 4 + 2 + 1: "c"}, + right: map[uint64]any{4: "a", 4 + 2 + 1: "dropped"}, want: "{4: a, 6: b, 7: c}", }, } { @@ -364,65 +364,65 @@ func TestIntersect(t *testing.T) { // Most of the test cases go after specific branches of intersect. b := NewBuilder() for _, c := range []struct { - left map[uint64]interface{} - right map[uint64]interface{} + left map[uint64]any + right map[uint64]any want string }{ { - left: map[uint64]interface{}{10: "a", 39: "b"}, - right: map[uint64]interface{}{10: "A", 39: "B", 75: "C"}, + left: map[uint64]any{10: "a", 39: "b"}, + right: map[uint64]any{10: "A", 39: "B", 75: "C"}, want: "{10: a, 39: b}", }, { - left: map[uint64]interface{}{10: "a", 39: "b"}, - right: map[uint64]interface{}{}, + left: map[uint64]any{10: "a", 39: "b"}, + right: map[uint64]any{}, want: "{}", }, { - left: map[uint64]interface{}{}, - right: map[uint64]interface{}{10: "A", 39: "B", 75: "C"}, + left: map[uint64]any{}, + right: map[uint64]any{10: "A", 39: "B", 75: "C"}, want: "{}", }, { // m == n && p == q && left.(*empty) case - left: map[uint64]interface{}{4: 1, 6: 3, 10: 8, 15: "on left"}, - right: map[uint64]interface{}{0: 8, 7: 6, 11: 0, 15: "on right"}, + left: map[uint64]any{4: 1, 6: 3, 10: 8, 15: "on left"}, + right: map[uint64]any{0: 8, 7: 6, 11: 0, 15: "on right"}, want: "{15: on left}", }, { // m == n && p == q && right.(*empty) case - left: map[uint64]interface{}{0: "on left", 1: 2, 2: 3, 3: 1, 7: 3}, - right: map[uint64]interface{}{0: "on right", 5: 1, 6: 8}, + left: map[uint64]any{0: "on left", 1: 2, 2: 3, 3: 1, 7: 3}, + right: map[uint64]any{0: "on right", 5: 1, 6: 8}, want: "{0: on left}", }, { // m == n && p == q && both left and right are not empty - left: map[uint64]interface{}{1: "a", 2: "b", 3: "c"}, - right: map[uint64]interface{}{0: "A", 1: "B", 2: "C"}, + left: map[uint64]any{1: "a", 2: "b", 3: "c"}, + right: map[uint64]any{0: "A", 1: "B", 2: "C"}, want: "{1: a, 2: b}", }, { // m == n && p == q && both left and right are not empty - left: map[uint64]interface{}{1: "a", 2: "b", 3: "c"}, - right: map[uint64]interface{}{0: "A", 1: "B", 2: "C"}, + left: map[uint64]any{1: "a", 2: "b", 3: "c"}, + right: map[uint64]any{0: "A", 1: "B", 2: "C"}, want: "{1: a, 2: b}", }, { // !prefixesOverlap(p, m, q, n) // p = 1, m = 2, q = 5, n = 2 - left: map[uint64]interface{}{0b001: 1, 0b011: 3}, - right: map[uint64]interface{}{0b100: 4, 0b111: 7}, + left: map[uint64]any{0b001: 1, 0b011: 3}, + right: map[uint64]any{0b100: 4, 0b111: 7}, want: "{}", }, { // ord(m, n) && zeroBit(q, m) // p = 3, m = 4, q = 0, n = 1 - left: map[uint64]interface{}{0b010: 2, 0b101: 5}, - right: map[uint64]interface{}{0b000: 0, 0b001: 1}, + left: map[uint64]any{0b010: 2, 0b101: 5}, + right: map[uint64]any{0b000: 0, 0b001: 1}, want: "{}", }, { // ord(m, n) && !zeroBit(q, m) // p = 29, m = 2, q = 30, n = 1 - left: map[uint64]interface{}{ + left: map[uint64]any{ 0b11101: "29", 0b11110: "30", }, - right: map[uint64]interface{}{ + right: map[uint64]any{ 0b11110: "30 on right", 0b11111: "31", }, @@ -430,14 +430,14 @@ func TestIntersect(t *testing.T) { }, { // ord(n, m) && zeroBit(p, n) // p = 5, m = 2, q = 3, n = 4 - left: map[uint64]interface{}{0b000: 0, 0b001: 1}, - right: map[uint64]interface{}{0b010: 2, 0b101: 5}, + left: map[uint64]any{0b000: 0, 0b001: 1}, + right: map[uint64]any{0b010: 2, 0b101: 5}, want: "{}", }, { // default case // p = 5, m = 2, q = 3, n = 4 - left: map[uint64]interface{}{0b100: 1, 0b110: 3}, - right: map[uint64]interface{}{0b000: 8, 0b111: 6}, + left: map[uint64]any{0b100: 1, 0b110: 3}, + right: map[uint64]any{0b000: 8, 0b111: 6}, want: "{}", }, } { @@ -451,10 +451,10 @@ func TestIntersect(t *testing.T) { func TestIntersectWith(t *testing.T) { b := NewBuilder() - l := b.Create(map[uint64]interface{}{10: 2.0, 39: 32.0}) - r := b.Create(map[uint64]interface{}{10: 6.0, 39: 10.0, 75: 1.0}) + l := b.Create(map[uint64]any{10: 2.0, 39: 32.0}) + r := b.Create(map[uint64]any{10: 6.0, 39: 10.0, 75: 1.0}) - prodIfDifferent := func(x interface{}, y interface{}) interface{} { + prodIfDifferent := func(x any, y any) any { if x, ok := x.(float64); ok { if y, ok := y.(float64); ok { if x == y { @@ -478,24 +478,24 @@ func TestRemove(t *testing.T) { // Most of the test cases go after specific branches of intersect. b := NewBuilder() for _, c := range []struct { - m map[uint64]interface{} + m map[uint64]any key uint64 want string }{ - {map[uint64]interface{}{}, 10, "{}"}, - {map[uint64]interface{}{10: "a"}, 10, "{}"}, - {map[uint64]interface{}{39: "b"}, 10, "{39: b}"}, + {map[uint64]any{}, 10, "{}"}, + {map[uint64]any{10: "a"}, 10, "{}"}, + {map[uint64]any{39: "b"}, 10, "{39: b}"}, // Branch cases: // !matchPrefix(kp, br.prefix, br.branching) - {map[uint64]interface{}{10: "a", 39: "b"}, 128, "{10: a, 39: b}"}, + {map[uint64]any{10: "a", 39: "b"}, 128, "{10: a, 39: b}"}, // case: left == br.left && right == br.right - {map[uint64]interface{}{10: "a", 39: "b"}, 16, "{10: a, 39: b}"}, + {map[uint64]any{10: "a", 39: "b"}, 16, "{10: a, 39: b}"}, // left updated and is empty. - {map[uint64]interface{}{10: "a", 39: "b"}, 10, "{39: b}"}, + {map[uint64]any{10: "a", 39: "b"}, 10, "{39: b}"}, // right updated and is empty. - {map[uint64]interface{}{10: "a", 39: "b"}, 39, "{10: a}"}, + {map[uint64]any{10: "a", 39: "b"}, 39, "{10: a}"}, // final b.mkBranch(...) case. - {map[uint64]interface{}{10: "a", 39: "b", 128: "c"}, 39, "{10: a, 128: c}"}, + {map[uint64]any{10: "a", 39: "b", 128: "c"}, 39, "{10: a, 128: c}"}, } { pre := b.Create(c.m) post := b.Remove(pre, c.key) @@ -507,8 +507,8 @@ func TestRemove(t *testing.T) { func TestRescope(t *testing.T) { b := NewBuilder() - l := b.Create(map[uint64]interface{}{10: "a", 39: "b"}) - r := b.Create(map[uint64]interface{}{10: "A", 39: "B", 75: "C"}) + l := b.Create(map[uint64]any{10: "a", 39: "b"}) + r := b.Create(map[uint64]any{10: "A", 39: "B", 75: "C"}) b.Rescope() @@ -526,8 +526,8 @@ func TestRescope(t *testing.T) { func TestSharing(t *testing.T) { b := NewBuilder() - l := b.Create(map[uint64]interface{}{0: "a", 1: "b"}) - r := b.Create(map[uint64]interface{}{1: "B", 2: "C"}) + l := b.Create(map[uint64]any{0: "a", 1: "b"}) + r := b.Create(map[uint64]any{1: "B", 2: "C"}) rleftold := r.n.(*branch).left diff --git a/go/callgraph/vta/propagation.go b/go/callgraph/vta/propagation.go index f448cde1135..6ce1ca9e322 100644 --- a/go/callgraph/vta/propagation.go +++ b/go/callgraph/vta/propagation.go @@ -120,7 +120,7 @@ func (ptm propTypeMap) propTypes(n node) func(yield func(propType) bool) { // (https://go.dev/issue/65237). return func(yield func(propType) bool) { if types := ptm[n]; types != nil { - types.M.Range(func(_ uint64, elem interface{}) bool { + types.M.Range(func(_ uint64, elem any) bool { return yield(elem.(propType)) }) } diff --git a/go/callgraph/vta/vta_test.go b/go/callgraph/vta/vta_test.go index ea7d584d2d9..42610abb139 100644 --- a/go/callgraph/vta/vta_test.go +++ b/go/callgraph/vta/vta_test.go @@ -118,7 +118,7 @@ func TestVTAProgVsFuncSet(t *testing.T) { // available, which can happen when using analysis package. A successful // test simply does not panic. func TestVTAPanicMissingDefinitions(t *testing.T) { - run := func(pass *analysis.Pass) (interface{}, error) { + run := func(pass *analysis.Pass) (any, error) { s := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) CallGraph(ssautil.AllFunctions(s.Pkg.Prog), cha.CallGraph(s.Pkg.Prog)) return nil, nil diff --git a/go/expect/expect.go b/go/expect/expect.go index be0e1dd23e6..1c002d91b60 100644 --- a/go/expect/expect.go +++ b/go/expect/expect.go @@ -66,9 +66,9 @@ import ( // It knows the position of the start of the comment, and the name and // arguments that make up the note. type Note struct { - Pos token.Pos // The position at which the note identifier appears - Name string // the name associated with the note - Args []interface{} // the arguments for the note + Pos token.Pos // The position at which the note identifier appears + Name string // the name associated with the note + Args []any // the arguments for the note } // ReadFile is the type of a function that can provide file contents for a @@ -85,7 +85,7 @@ type ReadFile func(filename string) ([]byte, error) // MatchBefore returns the range of the line that matched the pattern, and // invalid positions if there was no match, or an error if the line could not be // found. -func MatchBefore(fset *token.FileSet, readFile ReadFile, end token.Pos, pattern interface{}) (token.Pos, token.Pos, error) { +func MatchBefore(fset *token.FileSet, readFile ReadFile, end token.Pos, pattern any) (token.Pos, token.Pos, error) { f := fset.File(end) content, err := readFile(f.Name()) if err != nil { diff --git a/go/expect/expect_test.go b/go/expect/expect_test.go index cc585418d1b..d1ce96b868e 100644 --- a/go/expect/expect_test.go +++ b/go/expect/expect_test.go @@ -18,7 +18,7 @@ func TestMarker(t *testing.T) { filename string expectNotes int expectMarkers map[string]string - expectChecks map[string][]interface{} + expectChecks map[string][]any }{ { filename: "testdata/test.go", @@ -36,7 +36,7 @@ func TestMarker(t *testing.T) { "NonIdentifier": "+", "StringMarker": "\"hello\"", }, - expectChecks: map[string][]interface{}{ + expectChecks: map[string][]any{ "αSimpleMarker": nil, "StringAndInt": {"Number %d", int64(12)}, "Bool": {true}, @@ -140,7 +140,7 @@ func TestMarker(t *testing.T) { } } -func checkMarker(t *testing.T, fset *token.FileSet, readFile expect.ReadFile, markers map[string]token.Pos, pos token.Pos, name string, pattern interface{}) { +func checkMarker(t *testing.T, fset *token.FileSet, readFile expect.ReadFile, markers map[string]token.Pos, pos token.Pos, name string, pattern any) { start, end, err := expect.MatchBefore(fset, readFile, pos, pattern) if err != nil { t.Errorf("%v: MatchBefore failed: %v", fset.Position(pos), err) diff --git a/go/expect/extract.go b/go/expect/extract.go index 902b1e806e4..9cc5c8171fd 100644 --- a/go/expect/extract.go +++ b/go/expect/extract.go @@ -32,7 +32,7 @@ type Identifier string // See the package documentation for details about the syntax of those // notes. func Parse(fset *token.FileSet, filename string, content []byte) ([]*Note, error) { - var src interface{} + var src any if content != nil { src = content } @@ -220,7 +220,7 @@ func (t *tokens) Pos() token.Pos { return t.base + token.Pos(t.scanner.Position.Offset) } -func (t *tokens) Errorf(msg string, args ...interface{}) { +func (t *tokens) Errorf(msg string, args ...any) { if t.err != nil { return } @@ -280,9 +280,9 @@ func parseNote(t *tokens) *Note { } } -func parseArgumentList(t *tokens) []interface{} { - args := []interface{}{} // @name() is represented by a non-nil empty slice. - t.Consume() // '(' +func parseArgumentList(t *tokens) []any { + args := []any{} // @name() is represented by a non-nil empty slice. + t.Consume() // '(' t.Skip('\n') for t.Token() != ')' { args = append(args, parseArgument(t)) @@ -300,7 +300,7 @@ func parseArgumentList(t *tokens) []interface{} { return args } -func parseArgument(t *tokens) interface{} { +func parseArgument(t *tokens) any { switch t.Token() { case scanner.Ident: v := t.Consume() diff --git a/go/internal/cgo/cgo.go b/go/internal/cgo/cgo.go index 697974bb9b2..735efeb531d 100644 --- a/go/internal/cgo/cgo.go +++ b/go/internal/cgo/cgo.go @@ -203,7 +203,7 @@ func envList(key, def string) []string { // stringList's arguments should be a sequence of string or []string values. // stringList flattens them into a single []string. -func stringList(args ...interface{}) []string { +func stringList(args ...any) []string { var x []string for _, arg := range args { switch arg := arg.(type) { diff --git a/go/internal/gccgoimporter/parser.go b/go/internal/gccgoimporter/parser.go index f70946edbe4..7b0702892c4 100644 --- a/go/internal/gccgoimporter/parser.go +++ b/go/internal/gccgoimporter/parser.go @@ -86,7 +86,7 @@ func (e importError) Error() string { return fmt.Sprintf("import error %s (byte offset = %d): %s", e.pos, e.pos.Offset, e.err) } -func (p *parser) error(err interface{}) { +func (p *parser) error(err any) { if s, ok := err.(string); ok { err = errors.New(s) } @@ -94,7 +94,7 @@ func (p *parser) error(err interface{}) { panic(importError{p.scanner.Pos(), err.(error)}) } -func (p *parser) errorf(format string, args ...interface{}) { +func (p *parser) errorf(format string, args ...any) { p.error(fmt.Errorf(format, args...)) } @@ -492,7 +492,7 @@ func (p *parser) reserve(n int) { // used to resolve named types, or it can be a *types.Pointer, // used to resolve pointers to named types in case they are referenced // by embedded fields. -func (p *parser) update(t types.Type, nlist []interface{}) { +func (p *parser) update(t types.Type, nlist []any) { if t == reserved { p.errorf("internal error: update(%v) invoked on reserved", nlist) } @@ -529,7 +529,7 @@ func (p *parser) update(t types.Type, nlist []interface{}) { // NamedType = TypeName [ "=" ] Type { Method } . // TypeName = ExportedName . // Method = "func" "(" Param ")" Name ParamList ResultList [InlineBody] ";" . -func (p *parser) parseNamedType(nlist []interface{}) types.Type { +func (p *parser) parseNamedType(nlist []any) types.Type { pkg, name := p.parseExportedName() scope := pkg.Scope() obj := scope.Lookup(name) @@ -648,7 +648,7 @@ func (p *parser) parseInt() int { // parseArrayOrSliceType parses an ArrayOrSliceType: // // ArrayOrSliceType = "[" [ int ] "]" Type . -func (p *parser) parseArrayOrSliceType(pkg *types.Package, nlist []interface{}) types.Type { +func (p *parser) parseArrayOrSliceType(pkg *types.Package, nlist []any) types.Type { p.expect('[') if p.tok == ']' { p.next() @@ -673,7 +673,7 @@ func (p *parser) parseArrayOrSliceType(pkg *types.Package, nlist []interface{}) // parseMapType parses a MapType: // // MapType = "map" "[" Type "]" Type . -func (p *parser) parseMapType(pkg *types.Package, nlist []interface{}) types.Type { +func (p *parser) parseMapType(pkg *types.Package, nlist []any) types.Type { p.expectKeyword("map") t := new(types.Map) @@ -691,7 +691,7 @@ func (p *parser) parseMapType(pkg *types.Package, nlist []interface{}) types.Typ // parseChanType parses a ChanType: // // ChanType = "chan" ["<-" | "-<"] Type . -func (p *parser) parseChanType(pkg *types.Package, nlist []interface{}) types.Type { +func (p *parser) parseChanType(pkg *types.Package, nlist []any) types.Type { p.expectKeyword("chan") t := new(types.Chan) @@ -720,7 +720,7 @@ func (p *parser) parseChanType(pkg *types.Package, nlist []interface{}) types.Ty // parseStructType parses a StructType: // // StructType = "struct" "{" { Field } "}" . -func (p *parser) parseStructType(pkg *types.Package, nlist []interface{}) types.Type { +func (p *parser) parseStructType(pkg *types.Package, nlist []any) types.Type { p.expectKeyword("struct") t := new(types.Struct) @@ -793,7 +793,7 @@ func (p *parser) parseResultList(pkg *types.Package) *types.Tuple { // parseFunctionType parses a FunctionType: // // FunctionType = ParamList ResultList . -func (p *parser) parseFunctionType(pkg *types.Package, nlist []interface{}) *types.Signature { +func (p *parser) parseFunctionType(pkg *types.Package, nlist []any) *types.Signature { t := new(types.Signature) p.update(t, nlist) @@ -837,7 +837,7 @@ func (p *parser) parseFunc(pkg *types.Package) *types.Func { // parseInterfaceType parses an InterfaceType: // // InterfaceType = "interface" "{" { ("?" Type | Func) ";" } "}" . -func (p *parser) parseInterfaceType(pkg *types.Package, nlist []interface{}) types.Type { +func (p *parser) parseInterfaceType(pkg *types.Package, nlist []any) types.Type { p.expectKeyword("interface") t := new(types.Interface) @@ -868,7 +868,7 @@ func (p *parser) parseInterfaceType(pkg *types.Package, nlist []interface{}) typ // parsePointerType parses a PointerType: // // PointerType = "*" ("any" | Type) . -func (p *parser) parsePointerType(pkg *types.Package, nlist []interface{}) types.Type { +func (p *parser) parsePointerType(pkg *types.Package, nlist []any) types.Type { p.expect('*') if p.tok == scanner.Ident { p.expectKeyword("any") @@ -888,7 +888,7 @@ func (p *parser) parsePointerType(pkg *types.Package, nlist []interface{}) types // parseTypeSpec parses a TypeSpec: // // TypeSpec = NamedType | MapType | ChanType | StructType | InterfaceType | PointerType | ArrayOrSliceType | FunctionType . -func (p *parser) parseTypeSpec(pkg *types.Package, nlist []interface{}) types.Type { +func (p *parser) parseTypeSpec(pkg *types.Package, nlist []any) types.Type { switch p.tok { case scanner.String: return p.parseNamedType(nlist) @@ -980,14 +980,14 @@ func lookupBuiltinType(typ int) types.Type { // Type = "<" "type" ( "-" int | int [ TypeSpec ] ) ">" . // // parseType updates the type map to t for all type numbers n. -func (p *parser) parseType(pkg *types.Package, n ...interface{}) types.Type { +func (p *parser) parseType(pkg *types.Package, n ...any) types.Type { p.expect('<') t, _ := p.parseTypeAfterAngle(pkg, n...) return t } // (*parser).Type after reading the "<". -func (p *parser) parseTypeAfterAngle(pkg *types.Package, n ...interface{}) (t types.Type, n1 int) { +func (p *parser) parseTypeAfterAngle(pkg *types.Package, n ...any) (t types.Type, n1 int) { p.expectKeyword("type") n1 = 0 @@ -1030,7 +1030,7 @@ func (p *parser) parseTypeAfterAngle(pkg *types.Package, n ...interface{}) (t ty // parseTypeExtended is identical to parseType, but if the type in // question is a saved type, returns the index as well as the type // pointer (index returned is zero if we parsed a builtin). -func (p *parser) parseTypeExtended(pkg *types.Package, n ...interface{}) (t types.Type, n1 int) { +func (p *parser) parseTypeExtended(pkg *types.Package, n ...any) (t types.Type, n1 int) { p.expect('<') t, n1 = p.parseTypeAfterAngle(pkg, n...) return @@ -1119,7 +1119,7 @@ func (p *parser) parseTypes(pkg *types.Package) { } // parseSavedType parses one saved type definition. -func (p *parser) parseSavedType(pkg *types.Package, i int, nlist []interface{}) { +func (p *parser) parseSavedType(pkg *types.Package, i int, nlist []any) { defer func(s *scanner.Scanner, tok rune, lit string) { p.scanner = s p.tok = tok diff --git a/go/loader/loader.go b/go/loader/loader.go index 2d4865f664f..d06f95ad76c 100644 --- a/go/loader/loader.go +++ b/go/loader/loader.go @@ -215,7 +215,7 @@ func (conf *Config) fset() *token.FileSet { // src specifies the parser input as a string, []byte, or io.Reader, and // filename is its apparent name. If src is nil, the contents of // filename are read from the file system. -func (conf *Config) ParseFile(filename string, src interface{}) (*ast.File, error) { +func (conf *Config) ParseFile(filename string, src any) (*ast.File, error) { // TODO(adonovan): use conf.build() etc like parseFiles does. return parser.ParseFile(conf.fset(), filename, src, conf.ParserMode) } diff --git a/go/packages/gopackages/main.go b/go/packages/gopackages/main.go index 3841ac3410b..7ec0bdc7bdd 100644 --- a/go/packages/gopackages/main.go +++ b/go/packages/gopackages/main.go @@ -248,7 +248,7 @@ func (app *application) print(lpkg *packages.Package) { // e.g. --flag=one --flag=two would produce []string{"one", "two"}. type stringListValue []string -func (ss *stringListValue) Get() interface{} { return []string(*ss) } +func (ss *stringListValue) Get() any { return []string(*ss) } func (ss *stringListValue) String() string { return fmt.Sprintf("%q", *ss) } diff --git a/go/packages/overlay_test.go b/go/packages/overlay_test.go index 9edd0d646ed..1108461926f 100644 --- a/go/packages/overlay_test.go +++ b/go/packages/overlay_test.go @@ -32,7 +32,7 @@ func testOverlayChangesPackageName(t *testing.T, exporter packagestest.Exporter) log.SetFlags(log.Lshortfile) exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a.go": "package foo\nfunc f(){}\n", }, Overlay: map[string][]byte{ @@ -62,7 +62,7 @@ func testOverlayChangesBothPackageNames(t *testing.T, exporter packagestest.Expo log.SetFlags(log.Lshortfile) exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a.go": "package foo\nfunc g(){}\n", "a_test.go": "package foo\nfunc f(){}\n", }, @@ -110,7 +110,7 @@ func TestOverlayChangesTestPackageName(t *testing.T) { func testOverlayChangesTestPackageName(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a_test.go": "package foo\nfunc f(){}\n", }, Overlay: map[string][]byte{ @@ -194,7 +194,7 @@ func TestHello(t *testing.T) { // First, get the source of truth by loading the package, all on disk. onDisk := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": aFile, "a/a_test.go": aTestVariant, "a/a_x_test.go": aXTest, @@ -213,7 +213,7 @@ func TestHello(t *testing.T) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": aFile, "a/a_test.go": aTestVariant, "a/a_x_test.go": ``, // empty x test on disk @@ -248,7 +248,7 @@ func TestOverlay(t *testing.T) { testAllOrModulesParallel(t, testOverlay) } func testOverlay(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a; import "golang.org/fake/b"; const A = "a" + b.B`, "b/b.go": `package b; import "golang.org/fake/c"; const B = "b" + c.C`, "c/c.go": `package c; const C = "c"`, @@ -316,7 +316,7 @@ func TestOverlayDeps(t *testing.T) { testAllOrModulesParallel(t, testOverlayDeps func testOverlayDeps(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "c/c.go": `package c; const C = "c"`, "c/c_test.go": `package c; import "testing"; func TestC(t *testing.T) {}`, }, @@ -366,7 +366,7 @@ func testNewPackagesInOverlay(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{ { Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a; import "golang.org/fake/b"; const A = "a" + b.B`, "b/b.go": `package b; import "golang.org/fake/c"; const B = "b" + c.C`, "c/c.go": `package c; const C = "c"`, @@ -375,7 +375,7 @@ func testNewPackagesInOverlay(t *testing.T, exporter packagestest.Exporter) { }, { Name: "example.com/extramodule", - Files: map[string]interface{}{ + Files: map[string]any{ "pkg/x.go": "package pkg\n", }, }, @@ -471,7 +471,7 @@ func testOverlayNewPackageAndTest(t *testing.T, exporter packagestest.Exporter) exported := packagestest.Export(t, exporter, []packagestest.Module{ { Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "foo.txt": "placeholder", }, }, @@ -623,7 +623,7 @@ func TestOverlayGOPATHVendoring(t *testing.T) { exported := packagestest.Export(t, packagestest.GOPATH, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "vendor/vendor.com/foo/foo.go": `package foo; const X = "hi"`, "user/user.go": `package user`, }, @@ -652,7 +652,7 @@ func TestContainsOverlay(t *testing.T) { testAllOrModulesParallel(t, testContain func testContainsOverlay(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a; import "golang.org/fake/b"`, "b/b.go": `package b; import "golang.org/fake/c"`, "c/c.go": `package c`, @@ -681,7 +681,7 @@ func TestContainsOverlayXTest(t *testing.T) { testAllOrModulesParallel(t, testCo func testContainsOverlayXTest(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a; import "golang.org/fake/b"`, "b/b.go": `package b; import "golang.org/fake/c"`, "c/c.go": `package c`, @@ -717,7 +717,7 @@ func testInvalidFilesBeforeOverlay(t *testing.T, exporter packagestest.Exporter) exported := packagestest.Export(t, exporter, []packagestest.Module{ { Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "d/d.go": ``, "main.go": ``, }, @@ -754,7 +754,7 @@ func testInvalidFilesBeforeOverlayContains(t *testing.T, exporter packagestest.E exported := packagestest.Export(t, exporter, []packagestest.Module{ { Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "d/d.go": `package d; import "net/http"; const Get = http.MethodGet; const Hello = "hello";`, "d/util.go": ``, "d/d_test.go": ``, @@ -861,7 +861,7 @@ func testInvalidXTestInGOPATH(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{ { Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "x/x.go": `package x`, "x/x_test.go": ``, }, @@ -892,7 +892,7 @@ func testAddImportInOverlay(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{ { Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a import ( @@ -961,7 +961,7 @@ func testLoadDifferentPatterns(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{ { Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "foo.txt": "placeholder", "b/b.go": `package b import "golang.org/fake/a" diff --git a/go/packages/packages.go b/go/packages/packages.go index 342f019a0f9..6665a04c173 100644 --- a/go/packages/packages.go +++ b/go/packages/packages.go @@ -163,7 +163,7 @@ type Config struct { // If the user provides a logger, debug logging is enabled. // If the GOPACKAGESDEBUG environment variable is set to true, // but the logger is nil, default to log.Printf. - Logf func(format string, args ...interface{}) + Logf func(format string, args ...any) // Dir is the directory in which to run the build system's query tool // that provides information about the packages. @@ -566,13 +566,13 @@ type ModuleError struct { } func init() { - packagesinternal.GetDepsErrors = func(p interface{}) []*packagesinternal.PackageError { + packagesinternal.GetDepsErrors = func(p any) []*packagesinternal.PackageError { return p.(*Package).depsErrors } - packagesinternal.SetModFile = func(config interface{}, value string) { + packagesinternal.SetModFile = func(config any, value string) { config.(*Config).modFile = value } - packagesinternal.SetModFlag = func(config interface{}, value string) { + packagesinternal.SetModFlag = func(config any, value string) { config.(*Config).modFlag = value } packagesinternal.TypecheckCgo = int(typecheckCgo) @@ -741,7 +741,7 @@ func newLoader(cfg *Config) *loader { if debug { ld.Config.Logf = log.Printf } else { - ld.Config.Logf = func(format string, args ...interface{}) {} + ld.Config.Logf = func(format string, args ...any) {} } } if ld.Config.Mode == 0 { diff --git a/go/packages/packages_test.go b/go/packages/packages_test.go index 06fa488d1ed..5678b265561 100644 --- a/go/packages/packages_test.go +++ b/go/packages/packages_test.go @@ -129,7 +129,7 @@ func TestLoadImportsGraph(t *testing.T) { testAllOrModulesParallel(t, testLoadIm func testLoadImportsGraph(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a; const A = 1`, "b/b.go": `package b; import ("golang.org/fake/a"; _ "container/list"); var B = a.A`, "c/c.go": `package c; import (_ "golang.org/fake/b"; _ "unsafe")`, @@ -305,7 +305,7 @@ func TestLoadImportsTestVariants(t *testing.T) { func testLoadImportsTestVariants(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a; import _ "golang.org/fake/b"`, "b/b.go": `package b`, "b/b_test.go": `package b`, @@ -346,11 +346,11 @@ func TestLoadAbsolutePath(t *testing.T) { exported := packagestest.Export(t, packagestest.GOPATH, []packagestest.Module{{ Name: "golang.org/gopatha", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a`, }}, { Name: "golang.org/gopathb", - Files: map[string]interface{}{ + Files: map[string]any{ "b/b.go": `package b`, }}}) defer exported.Cleanup() @@ -381,7 +381,7 @@ func TestLoadArgumentListIsNotTooLong(t *testing.T) { argMax := 1_000_000 exported := packagestest.Export(t, packagestest.GOPATH, []packagestest.Module{{ Name: "golang.org/mod", - Files: map[string]interface{}{ + Files: map[string]any{ "main.go": `package main"`, }}}) defer exported.Cleanup() @@ -402,7 +402,7 @@ func TestVendorImports(t *testing.T) { exported := packagestest.Export(t, packagestest.GOPATH, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a; import _ "b"; import _ "golang.org/fake/c";`, "a/vendor/b/b.go": `package b; import _ "golang.org/fake/c"`, "c/c.go": `package c; import _ "b"`, @@ -463,7 +463,7 @@ func TestConfigDir(t *testing.T) { testAllOrModulesParallel(t, testConfigDir) } func testConfigDir(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a; const Name = "a" `, "a/b/b.go": `package b; const Name = "a/b"`, "b/b.go": `package b; const Name = "b"`, @@ -522,7 +522,7 @@ func testConfigFlags(t *testing.T, exporter packagestest.Exporter) { // Test satisfying +build line tags, with -tags flag. exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ // package a "a/a.go": `package a; import _ "golang.org/fake/a/b"`, "a/b.go": `// +build tag @@ -587,7 +587,7 @@ func testLoadTypes(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a; import "golang.org/fake/b"; import "golang.org/fake/c"; const A = "a" + b.B + c.C`, "b/b.go": `package b; const B = "b"`, "c/c.go": `package c; const C = "c" + 1`, @@ -640,7 +640,7 @@ func TestLoadTypesBits(t *testing.T) { testAllOrModulesParallel(t, testLoadTypes func testLoadTypesBits(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a; import "golang.org/fake/b"; const A = "a" + b.B`, "b/b.go": `package b; import "golang.org/fake/c"; const B = "b" + c.C`, "c/c.go": `package c; import "golang.org/fake/d"; const C = "c" + d.D`, @@ -716,7 +716,7 @@ func TestLoadSyntaxOK(t *testing.T) { testAllOrModulesParallel(t, testLoadSyntax func testLoadSyntaxOK(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a; import "golang.org/fake/b"; const A = "a" + b.B`, "b/b.go": `package b; import "golang.org/fake/c"; const B = "b" + c.C`, "c/c.go": `package c; import "golang.org/fake/d"; const C = "c" + d.D`, @@ -807,7 +807,7 @@ func testLoadDiamondTypes(t *testing.T, exporter packagestest.Exporter) { // We make a diamond dependency and check the type d.D is the same through both paths exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a; import ("golang.org/fake/b"; "golang.org/fake/c"); var _ = b.B == c.C`, "b/b.go": `package b; import "golang.org/fake/d"; var B d.D`, "c/c.go": `package c; import "golang.org/fake/d"; var C d.D`, @@ -850,7 +850,7 @@ func testLoadSyntaxError(t *testing.T, exporter packagestest.Exporter) { // should be IllTyped. exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a; import "golang.org/fake/b"; const A = "a" + b.B`, "b/b.go": `package b; import "golang.org/fake/c"; const B = "b" + c.C`, "c/c.go": `package c; import "golang.org/fake/d"; const C = "c" + d.D`, @@ -922,7 +922,7 @@ func TestParseFileModifyAST(t *testing.T) { testAllOrModulesParallel(t, testPars func testParseFileModifyAST(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a; const A = "a" `, }}}) defer exported.Cleanup() @@ -1010,7 +1010,7 @@ func testLoadAllSyntaxImportErrors(t *testing.T, exporter packagestest.Exporter) exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "unicycle/unicycle.go": `package unicycle; import _ "unicycle"`, "bicycle1/bicycle1.go": `package bicycle1; import _ "bicycle2"`, "bicycle2/bicycle2.go": `package bicycle2; import _ "bicycle1"`, @@ -1090,7 +1090,7 @@ func TestAbsoluteFilenames(t *testing.T) { testAllOrModulesParallel(t, testAbsol func testAbsoluteFilenames(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a; const A = 1`, "b/b.go": `package b; import ("golang.org/fake/a"; _ "errors"); var B = a.A`, "b/vendor/a/a.go": `package a; const A = 1`, @@ -1180,7 +1180,7 @@ func TestContains(t *testing.T) { testAllOrModulesParallel(t, testContains) } func testContains(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a; import "golang.org/fake/b"`, "b/b.go": `package b; import "golang.org/fake/c"`, "c/c.go": `package c`, @@ -1219,7 +1219,7 @@ func testSizes(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a; import "unsafe"; const WordSize = 8*unsafe.Sizeof(int(0))`, }}}) defer exported.Cleanup() @@ -1257,7 +1257,7 @@ func TestNeedTypeSizesWithBadGOARCH(t *testing.T) { testAllOrModulesParallel(t, func(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "testdata", - Files: map[string]interface{}{"a/a.go": `package a`}}}) + Files: map[string]any{"a/a.go": `package a`}}}) defer exported.Cleanup() exported.Config.Mode = packages.NeedTypesSizes // or {,Info,Sizes} @@ -1280,7 +1280,7 @@ func TestContainsFallbackSticks(t *testing.T) { func testContainsFallbackSticks(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a; import "golang.org/fake/b"`, "b/b.go": `package b; import "golang.org/fake/c"`, "c/c.go": `package c`, @@ -1313,7 +1313,7 @@ func TestNoPatterns(t *testing.T) { testAllOrModulesParallel(t, testNoPatterns) func testNoPatterns(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a;`, "a/b/b.go": `package b;`, }}}) @@ -1336,7 +1336,7 @@ func testJSON(t *testing.T, exporter packagestest.Exporter) { // TODO: add in some errors exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a; const A = 1`, "b/b.go": `package b; import "golang.org/fake/a"; var B = a.A`, "c/c.go": `package c; import "golang.org/fake/b" ; var C = b.B`, @@ -1503,7 +1503,7 @@ func TestPatternPassthrough(t *testing.T) { testAllOrModulesParallel(t, testPatt func testPatternPassthrough(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a;`, }}}) defer exported.Cleanup() @@ -1563,7 +1563,7 @@ EOF } exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "bin/gopackagesdriver": driverScript, "golist/golist.go": "package golist", }}}) @@ -1639,7 +1639,7 @@ func TestBasicXTest(t *testing.T) { testAllOrModulesParallel(t, testBasicXTest) func testBasicXTest(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a;`, "a/a_test.go": `package a_test;`, }}}) @@ -1657,7 +1657,7 @@ func TestErrorMissingFile(t *testing.T) { testAllOrModulesParallel(t, testErrorM func testErrorMissingFile(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a_test.go": `package a;`, }}}) defer exported.Cleanup() @@ -1685,11 +1685,11 @@ func TestReturnErrorWhenUsingNonGoFiles(t *testing.T) { func testReturnErrorWhenUsingNonGoFiles(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/gopatha", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a`, }}, { Name: "golang.org/gopathb", - Files: map[string]interface{}{ + Files: map[string]any{ "b/b.c": `package b`, }}}) defer exported.Cleanup() @@ -1713,7 +1713,7 @@ func TestReturnErrorWhenUsingGoFilesInMultipleDirectories(t *testing.T) { func testReturnErrorWhenUsingGoFilesInMultipleDirectories(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/gopatha", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a`, "b/b.go": `package b`, }}}) @@ -1745,7 +1745,7 @@ func TestReturnErrorForUnexpectedDirectoryLayout(t *testing.T) { func testReturnErrorForUnexpectedDirectoryLayout(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/gopatha", - Files: map[string]interface{}{ + Files: map[string]any{ "a/testdata/a.go": `package a; import _ "b"`, "a/vendor/b/b.go": `package b; import _ "fmt"`, }}}) @@ -1774,7 +1774,7 @@ func TestMissingDependency(t *testing.T) { testAllOrModulesParallel(t, testMissi func testMissingDependency(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a; import _ "this/package/doesnt/exist"`, }}}) defer exported.Cleanup() @@ -1796,7 +1796,7 @@ func TestAdHocContains(t *testing.T) { testAllOrModulesParallel(t, testAdHocCont func testAdHocContains(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a;`, }}}) defer exported.Cleanup() @@ -1839,7 +1839,7 @@ func testCgoNoCcompiler(t *testing.T, exporter packagestest.Exporter) { testenv.NeedsTool(t, "cgo") exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a import "net/http" const A = http.MethodGet @@ -1873,7 +1873,7 @@ func testCgoMissingFile(t *testing.T, exporter packagestest.Exporter) { testenv.NeedsTool(t, "cgo") exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a // #include "foo.h" @@ -1962,7 +1962,7 @@ func testCgoNoSyntax(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "c/c.go": `package c; import "C"`, }, }}) @@ -2005,7 +2005,7 @@ func testCgoBadPkgConfig(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "c/c.go": `package c // #cgo pkg-config: --cflags -- foo @@ -2074,7 +2074,7 @@ func TestIssue32814(t *testing.T) { testAllOrModulesParallel(t, testIssue32814) func testIssue32814(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{}}}) + Files: map[string]any{}}}) defer exported.Cleanup() exported.Config.Mode = packages.NeedName | packages.NeedTypes | packages.NeedSyntax | packages.NeedTypesInfo | packages.NeedTypesSizes @@ -2103,7 +2103,7 @@ func TestLoadTypesInfoWithoutNeedDeps(t *testing.T) { func testLoadTypesInfoWithoutNeedDeps(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a; import _ "golang.org/fake/b"`, "b/b.go": `package b`, }}}) @@ -2130,7 +2130,7 @@ func TestLoadWithNeedDeps(t *testing.T) { func testLoadWithNeedDeps(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a; import _ "golang.org/fake/b"`, "b/b.go": `package b; import _ "golang.org/fake/c"`, "c/c.go": `package c`, @@ -2174,7 +2174,7 @@ func TestImpliedLoadMode(t *testing.T) { func testImpliedLoadMode(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a; import _ "golang.org/fake/b"`, "b/b.go": `package b`, }}}) @@ -2243,7 +2243,7 @@ func TestMultiplePackageVersionsIssue36188(t *testing.T) { func testMultiplePackageVersionsIssue36188(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a; import _ "golang.org/fake/b"`, "b/b.go": `package main`, }}}) @@ -2363,7 +2363,7 @@ func TestCycleImportStack(t *testing.T) { func testCycleImportStack(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a; import _ "golang.org/fake/b"`, "b/b.go": `package b; import _ "golang.org/fake/a"`, }}}) @@ -2393,7 +2393,7 @@ func TestForTestField(t *testing.T) { func testForTestField(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a; func hello() {};`, "a/a_test.go": `package a; import "testing"; func TestA1(t *testing.T) {};`, "a/x_test.go": `package a_test; import "testing"; func TestA2(t *testing.T) {};`, @@ -2499,7 +2499,7 @@ func testIssue37098(t *testing.T, exporter packagestest.Exporter) { // file. exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ // The "package" statement must be included for SWIG sources to // be generated. "a/a.go": "package a", @@ -2550,7 +2550,7 @@ func TestIssue56632(t *testing.T) { exported := packagestest.Export(t, packagestest.GOPATH, []packagestest.Module{{ Name: "golang.org/issue56632", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a`, "a/a_cgo.go": `package a @@ -2593,7 +2593,7 @@ func testInvalidFilesInXTest(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{ { Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "d/d.go": `package d; import "net/http"; const d = http.MethodGet; func Get() string { return d; }`, "d/d2.go": ``, // invalid file "d/d_test.go": `package d_test; import "testing"; import "golang.org/fake/d"; func TestD(t *testing.T) { d.Get(); }`, @@ -2628,7 +2628,7 @@ func testTypecheckCgo(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{ { Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "cgo/cgo.go": cgo, }, }, @@ -2662,7 +2662,7 @@ func testIssue48226(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{ { Name: "golang.org/fake/syntax", - Files: map[string]interface{}{ + Files: map[string]any{ "syntax.go": `package test`, }, }, @@ -2697,7 +2697,7 @@ func TestModule(t *testing.T) { func testModule(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{"a/a.go": `package a`}}}) + Files: map[string]any{"a/a.go": `package a`}}}) exported.Config.Mode = packages.NeedModule rootDir := filepath.Dir(filepath.Dir(exported.File("golang.org/fake", "a/a.go"))) @@ -2746,7 +2746,7 @@ func testExternal_NotHandled(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a`, "empty_driver/main.go": `package main @@ -2825,7 +2825,7 @@ func TestInvalidPackageName(t *testing.T) { func testInvalidPackageName(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "main.go": `package default func main() { @@ -3206,7 +3206,7 @@ func TestLoadTypesInfoWithoutSyntaxOrTypes(t *testing.T) { func testLoadTypesInfoWithoutSyntaxOrTypes(t *testing.T, exporter packagestest.Exporter) { exported := packagestest.Export(t, exporter, []packagestest.Module{{ Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "a/a.go": `package a; func foo() int { diff --git a/go/packages/packagestest/expect.go b/go/packages/packagestest/expect.go index dc41894a6ed..4be34191e62 100644 --- a/go/packages/packagestest/expect.go +++ b/go/packages/packagestest/expect.go @@ -72,7 +72,7 @@ const ( // // It is safe to call this repeatedly with different method sets, but it is // not safe to call it concurrently. -func (e *Exported) Expect(methods map[string]interface{}) error { +func (e *Exported) Expect(methods map[string]any) error { if err := e.getNotes(); err != nil { return err } @@ -98,7 +98,7 @@ func (e *Exported) Expect(methods map[string]interface{}) error { n = &expect.Note{ Pos: n.Pos, Name: markMethod, - Args: []interface{}{n.Name, n.Name}, + Args: []any{n.Name, n.Name}, } } mi, ok := ms[n.Name] @@ -222,7 +222,7 @@ func (e *Exported) getMarkers() error { } // set markers early so that we don't call getMarkers again from Expect e.markers = make(map[string]Range) - return e.Expect(map[string]interface{}{ + return e.Expect(map[string]any{ markMethod: e.Mark, }) } @@ -243,7 +243,7 @@ var ( // It takes the args remaining, and returns the args it did not consume. // This allows a converter to consume 0 args for well known types, or multiple // args for compound types. -type converter func(*expect.Note, []interface{}) (reflect.Value, []interface{}, error) +type converter func(*expect.Note, []any) (reflect.Value, []any, error) // method is used to track information about Invoke methods that is expensive to // calculate so that we can work it out once rather than per marker. @@ -259,19 +259,19 @@ type method struct { func (e *Exported) buildConverter(pt reflect.Type) (converter, error) { switch { case pt == noteType: - return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + return func(n *expect.Note, args []any) (reflect.Value, []any, error) { return reflect.ValueOf(n), args, nil }, nil case pt == fsetType: - return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + return func(n *expect.Note, args []any) (reflect.Value, []any, error) { return reflect.ValueOf(e.ExpectFileSet), args, nil }, nil case pt == exportedType: - return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + return func(n *expect.Note, args []any) (reflect.Value, []any, error) { return reflect.ValueOf(e), args, nil }, nil case pt == posType: - return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + return func(n *expect.Note, args []any) (reflect.Value, []any, error) { r, remains, err := e.rangeConverter(n, args) if err != nil { return reflect.Value{}, nil, err @@ -279,7 +279,7 @@ func (e *Exported) buildConverter(pt reflect.Type) (converter, error) { return reflect.ValueOf(r.Start), remains, nil }, nil case pt == positionType: - return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + return func(n *expect.Note, args []any) (reflect.Value, []any, error) { r, remains, err := e.rangeConverter(n, args) if err != nil { return reflect.Value{}, nil, err @@ -287,7 +287,7 @@ func (e *Exported) buildConverter(pt reflect.Type) (converter, error) { return reflect.ValueOf(e.ExpectFileSet.Position(r.Start)), remains, nil }, nil case pt == rangeType: - return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + return func(n *expect.Note, args []any) (reflect.Value, []any, error) { r, remains, err := e.rangeConverter(n, args) if err != nil { return reflect.Value{}, nil, err @@ -295,7 +295,7 @@ func (e *Exported) buildConverter(pt reflect.Type) (converter, error) { return reflect.ValueOf(r), remains, nil }, nil case pt == identifierType: - return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + return func(n *expect.Note, args []any) (reflect.Value, []any, error) { if len(args) < 1 { return reflect.Value{}, nil, fmt.Errorf("missing argument") } @@ -310,7 +310,7 @@ func (e *Exported) buildConverter(pt reflect.Type) (converter, error) { }, nil case pt == regexType: - return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + return func(n *expect.Note, args []any) (reflect.Value, []any, error) { if len(args) < 1 { return reflect.Value{}, nil, fmt.Errorf("missing argument") } @@ -323,7 +323,7 @@ func (e *Exported) buildConverter(pt reflect.Type) (converter, error) { }, nil case pt.Kind() == reflect.String: - return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + return func(n *expect.Note, args []any) (reflect.Value, []any, error) { if len(args) < 1 { return reflect.Value{}, nil, fmt.Errorf("missing argument") } @@ -339,7 +339,7 @@ func (e *Exported) buildConverter(pt reflect.Type) (converter, error) { } }, nil case pt.Kind() == reflect.Int64: - return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + return func(n *expect.Note, args []any) (reflect.Value, []any, error) { if len(args) < 1 { return reflect.Value{}, nil, fmt.Errorf("missing argument") } @@ -353,7 +353,7 @@ func (e *Exported) buildConverter(pt reflect.Type) (converter, error) { } }, nil case pt.Kind() == reflect.Bool: - return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + return func(n *expect.Note, args []any) (reflect.Value, []any, error) { if len(args) < 1 { return reflect.Value{}, nil, fmt.Errorf("missing argument") } @@ -366,7 +366,7 @@ func (e *Exported) buildConverter(pt reflect.Type) (converter, error) { return reflect.ValueOf(b), args, nil }, nil case pt.Kind() == reflect.Slice: - return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + return func(n *expect.Note, args []any) (reflect.Value, []any, error) { converter, err := e.buildConverter(pt.Elem()) if err != nil { return reflect.Value{}, nil, err @@ -384,7 +384,7 @@ func (e *Exported) buildConverter(pt reflect.Type) (converter, error) { }, nil default: if pt.Kind() == reflect.Interface && pt.NumMethod() == 0 { - return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + return func(n *expect.Note, args []any) (reflect.Value, []any, error) { if len(args) < 1 { return reflect.Value{}, nil, fmt.Errorf("missing argument") } @@ -395,7 +395,7 @@ func (e *Exported) buildConverter(pt reflect.Type) (converter, error) { } } -func (e *Exported) rangeConverter(n *expect.Note, args []interface{}) (Range, []interface{}, error) { +func (e *Exported) rangeConverter(n *expect.Note, args []any) (Range, []any, error) { tokFile := e.ExpectFileSet.File(n.Pos) if len(args) < 1 { return Range{}, nil, fmt.Errorf("missing argument") diff --git a/go/packages/packagestest/expect_test.go b/go/packages/packagestest/expect_test.go index 46d96d61fb9..70ff6656012 100644 --- a/go/packages/packagestest/expect_test.go +++ b/go/packages/packagestest/expect_test.go @@ -19,7 +19,7 @@ func TestExpect(t *testing.T) { }}) defer exported.Cleanup() checkCount := 0 - if err := exported.Expect(map[string]interface{}{ + if err := exported.Expect(map[string]any{ "check": func(src, target token.Position) { checkCount++ }, diff --git a/go/packages/packagestest/export.go b/go/packages/packagestest/export.go index 47e6d11b94b..4ac4967b46b 100644 --- a/go/packages/packagestest/export.go +++ b/go/packages/packagestest/export.go @@ -101,7 +101,7 @@ type Module struct { // The keys are the file fragment that follows the module name, the value can // be a string or byte slice, in which case it is the contents of the // file, otherwise it must be a Writer function. - Files map[string]interface{} + Files map[string]any // Overlay is the set of source file overlays for the module. // The keys are the file fragment as in the Files configuration. @@ -483,7 +483,7 @@ func GroupFilesByModules(root string) ([]Module, error) { primarymod := &Module{ Name: root, - Files: make(map[string]interface{}), + Files: make(map[string]any), Overlay: make(map[string][]byte), } mods := map[string]*Module{ @@ -573,7 +573,7 @@ func GroupFilesByModules(root string) ([]Module, error) { } mods[path] = &Module{ Name: filepath.ToSlash(module), - Files: make(map[string]interface{}), + Files: make(map[string]any), Overlay: make(map[string][]byte), } currentModule = path @@ -591,8 +591,8 @@ func GroupFilesByModules(root string) ([]Module, error) { // This is to enable the common case in tests where you have a full copy of the // package in your testdata. // This will panic if there is any kind of error trying to walk the file tree. -func MustCopyFileTree(root string) map[string]interface{} { - result := map[string]interface{}{} +func MustCopyFileTree(root string) map[string]any { + result := map[string]any{} if err := filepath.Walk(filepath.FromSlash(root), func(path string, info os.FileInfo, err error) error { if err != nil { return err diff --git a/go/packages/packagestest/export_test.go b/go/packages/packagestest/export_test.go index eb13f560916..e3e4658efb6 100644 --- a/go/packages/packagestest/export_test.go +++ b/go/packages/packagestest/export_test.go @@ -16,7 +16,7 @@ import ( var testdata = []packagestest.Module{{ Name: "golang.org/fake1", - Files: map[string]interface{}{ + Files: map[string]any{ "a.go": packagestest.Symlink("testdata/a.go"), // broken symlink "b.go": "invalid file contents", }, @@ -26,22 +26,22 @@ var testdata = []packagestest.Module{{ }, }, { Name: "golang.org/fake2", - Files: map[string]interface{}{ + Files: map[string]any{ "other/a.go": "package fake2", }, }, { Name: "golang.org/fake2/v2", - Files: map[string]interface{}{ + Files: map[string]any{ "other/a.go": "package fake2", }, }, { Name: "golang.org/fake3@v1.0.0", - Files: map[string]interface{}{ + Files: map[string]any{ "other/a.go": "package fake3", }, }, { Name: "golang.org/fake3@v1.1.0", - Files: map[string]interface{}{ + Files: map[string]any{ "other/a.go": "package fake3", }, }} @@ -97,13 +97,13 @@ func TestGroupFilesByModules(t *testing.T) { want: []packagestest.Module{ { Name: "testdata/groups/one", - Files: map[string]interface{}{ + Files: map[string]any{ "main.go": true, }, }, { Name: "example.com/extra", - Files: map[string]interface{}{ + Files: map[string]any{ "help.go": true, }, }, @@ -114,7 +114,7 @@ func TestGroupFilesByModules(t *testing.T) { want: []packagestest.Module{ { Name: "testdata/groups/two", - Files: map[string]interface{}{ + Files: map[string]any{ "main.go": true, "expect/yo.go": true, "expect/yo_test.go": true, @@ -122,33 +122,33 @@ func TestGroupFilesByModules(t *testing.T) { }, { Name: "example.com/extra", - Files: map[string]interface{}{ + Files: map[string]any{ "yo.go": true, "geez/help.go": true, }, }, { Name: "example.com/extra/v2", - Files: map[string]interface{}{ + Files: map[string]any{ "me.go": true, "geez/help.go": true, }, }, { Name: "example.com/tempmod", - Files: map[string]interface{}{ + Files: map[string]any{ "main.go": true, }, }, { Name: "example.com/what@v1.0.0", - Files: map[string]interface{}{ + Files: map[string]any{ "main.go": true, }, }, { Name: "example.com/what@v1.1.0", - Files: map[string]interface{}{ + Files: map[string]any{ "main.go": true, }, }, diff --git a/go/ssa/const_test.go b/go/ssa/const_test.go index 6738f07b2ef..6097bd93757 100644 --- a/go/ssa/const_test.go +++ b/go/ssa/const_test.go @@ -39,9 +39,9 @@ func TestConstString(t *testing.T) { } for _, test := range []struct { - expr string // type expression - constant interface{} // constant value - want string // expected String() value + expr string // type expression + constant any // constant value + want string // expected String() value }{ {"int", int64(0), "0:int"}, {"int64", int64(0), "0:int64"}, diff --git a/go/ssa/interp/interp.go b/go/ssa/interp/interp.go index f80db0676c7..7bd06120f6c 100644 --- a/go/ssa/interp/interp.go +++ b/go/ssa/interp/interp.go @@ -109,7 +109,7 @@ type frame struct { defers *deferred result value panicking bool - panic interface{} + panic any phitemps []value // temporaries for parallel phi assignment } diff --git a/go/ssa/interp/map.go b/go/ssa/interp/map.go index f5d5f230b73..e96e44df2b9 100644 --- a/go/ssa/interp/map.go +++ b/go/ssa/interp/map.go @@ -17,7 +17,7 @@ import ( type hashable interface { hash(t types.Type) int - eq(t types.Type, x interface{}) bool + eq(t types.Type, x any) bool } type entry struct { diff --git a/go/ssa/interp/value.go b/go/ssa/interp/value.go index bd681cb6152..4d65aa6c83e 100644 --- a/go/ssa/interp/value.go +++ b/go/ssa/interp/value.go @@ -48,7 +48,7 @@ import ( "golang.org/x/tools/go/types/typeutil" ) -type value interface{} +type value any type tuple []value @@ -123,7 +123,7 @@ func usesBuiltinMap(t types.Type) bool { panic(fmt.Sprintf("invalid map key type: %T", t)) } -func (x array) eq(t types.Type, _y interface{}) bool { +func (x array) eq(t types.Type, _y any) bool { y := _y.(array) tElt := t.Underlying().(*types.Array).Elem() for i, xi := range x { @@ -143,7 +143,7 @@ func (x array) hash(t types.Type) int { return h } -func (x structure) eq(t types.Type, _y interface{}) bool { +func (x structure) eq(t types.Type, _y any) bool { y := _y.(structure) tStruct := t.Underlying().(*types.Struct) for i, n := 0, tStruct.NumFields(); i < n; i++ { @@ -175,7 +175,7 @@ func sameType(x, y types.Type) bool { return y != nil && types.Identical(x, y) } -func (x iface) eq(t types.Type, _y interface{}) bool { +func (x iface) eq(t types.Type, _y any) bool { y := _y.(iface) return sameType(x.t, y.t) && (x.t == nil || equals(x.t, x.v, y.v)) } @@ -188,7 +188,7 @@ func (x rtype) hash(_ types.Type) int { return hashType(x.t) } -func (x rtype) eq(_ types.Type, y interface{}) bool { +func (x rtype) eq(_ types.Type, y any) bool { return types.Identical(x.t, y.(rtype).t) } diff --git a/go/ssa/mode.go b/go/ssa/mode.go index 8381639a585..61c91452ce2 100644 --- a/go/ssa/mode.go +++ b/go/ssa/mode.go @@ -108,4 +108,4 @@ func (m *BuilderMode) Set(s string) error { } // Get returns m. -func (m BuilderMode) Get() interface{} { return m } +func (m BuilderMode) Get() any { return m } diff --git a/go/ssa/print.go b/go/ssa/print.go index 432c4b05b6d..8b92d08463a 100644 --- a/go/ssa/print.go +++ b/go/ssa/print.go @@ -387,7 +387,7 @@ func (s *MapUpdate) String() string { func (s *DebugRef) String() string { p := s.Parent().Prog.Fset.Position(s.Pos()) - var descr interface{} + var descr any if s.object != nil { descr = s.object // e.g. "var x int" } else { diff --git a/go/ssa/sanity.go b/go/ssa/sanity.go index e35e4d79357..97ef886e3cf 100644 --- a/go/ssa/sanity.go +++ b/go/ssa/sanity.go @@ -48,7 +48,7 @@ func mustSanityCheck(fn *Function, reporter io.Writer) { } } -func (s *sanity) diagnostic(prefix, format string, args ...interface{}) { +func (s *sanity) diagnostic(prefix, format string, args ...any) { fmt.Fprintf(s.reporter, "%s: function %s", prefix, s.fn) if s.block != nil { fmt.Fprintf(s.reporter, ", block %s", s.block) @@ -58,12 +58,12 @@ func (s *sanity) diagnostic(prefix, format string, args ...interface{}) { io.WriteString(s.reporter, "\n") } -func (s *sanity) errorf(format string, args ...interface{}) { +func (s *sanity) errorf(format string, args ...any) { s.insane = true s.diagnostic("Error", format, args...) } -func (s *sanity) warnf(format string, args ...interface{}) { +func (s *sanity) warnf(format string, args ...any) { s.diagnostic("Warning", format, args...) } diff --git a/go/ssa/ssautil/load_test.go b/go/ssa/ssautil/load_test.go index 10375a3227f..cf157fe4401 100644 --- a/go/ssa/ssautil/load_test.go +++ b/go/ssa/ssautil/load_test.go @@ -154,7 +154,7 @@ func TestIssue53604(t *testing.T) { e := packagestest.Export(t, packagestest.Modules, []packagestest.Module{ { Name: "golang.org/fake", - Files: map[string]interface{}{ + Files: map[string]any{ "x/x.go": `package x; import "golang.org/fake/y"; var V = y.F()`, "y/y.go": `package y; import "golang.org/fake/z"; var F = func () *int { return &z.Z } `, "z/z.go": `package z; var Z int`, diff --git a/go/ssa/util.go b/go/ssa/util.go index 2a9c9b9d318..9a73984a6a0 100644 --- a/go/ssa/util.go +++ b/go/ssa/util.go @@ -166,7 +166,7 @@ func declaredWithin(obj types.Object, fn *types.Func) bool { // returns a closure that prints the corresponding "end" message. // Call using 'defer logStack(...)()' to show builder stack on panic. // Don't forget trailing parens! -func logStack(format string, args ...interface{}) func() { +func logStack(format string, args ...any) func() { msg := fmt.Sprintf(format, args...) io.WriteString(os.Stderr, msg) io.WriteString(os.Stderr, "\n") diff --git a/gopls/internal/analysis/deprecated/deprecated.go b/gopls/internal/analysis/deprecated/deprecated.go index c6df00b4f50..400041ba088 100644 --- a/gopls/internal/analysis/deprecated/deprecated.go +++ b/gopls/internal/analysis/deprecated/deprecated.go @@ -36,7 +36,7 @@ var Analyzer = &analysis.Analyzer{ } // checkDeprecated is a simplified copy of staticcheck.CheckDeprecated. -func checkDeprecated(pass *analysis.Pass) (interface{}, error) { +func checkDeprecated(pass *analysis.Pass) (any, error) { inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) deprs, err := collectDeprecatedNames(pass, inspector) diff --git a/gopls/internal/analysis/embeddirective/embeddirective.go b/gopls/internal/analysis/embeddirective/embeddirective.go index e623587cc68..7590cba9ad8 100644 --- a/gopls/internal/analysis/embeddirective/embeddirective.go +++ b/gopls/internal/analysis/embeddirective/embeddirective.go @@ -28,7 +28,7 @@ var Analyzer = &analysis.Analyzer{ const FixCategory = "addembedimport" // recognized by gopls ApplyFix -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { for _, f := range pass.Files { comments := embedDirectiveComments(f) if len(comments) == 0 { diff --git a/gopls/internal/analysis/fillreturns/fillreturns.go b/gopls/internal/analysis/fillreturns/fillreturns.go index b6bcc1f24dc..184aac5ea1f 100644 --- a/gopls/internal/analysis/fillreturns/fillreturns.go +++ b/gopls/internal/analysis/fillreturns/fillreturns.go @@ -36,7 +36,7 @@ var Analyzer = &analysis.Analyzer{ URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/fillreturns", } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) info := pass.TypesInfo diff --git a/gopls/internal/analysis/nonewvars/nonewvars.go b/gopls/internal/analysis/nonewvars/nonewvars.go index 8a3bf502c51..b7f861ba7f1 100644 --- a/gopls/internal/analysis/nonewvars/nonewvars.go +++ b/gopls/internal/analysis/nonewvars/nonewvars.go @@ -32,7 +32,7 @@ var Analyzer = &analysis.Analyzer{ URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/nonewvars", } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) for _, typeErr := range pass.TypeErrors { diff --git a/gopls/internal/analysis/noresultvalues/noresultvalues.go b/gopls/internal/analysis/noresultvalues/noresultvalues.go index fe979f52aac..6b8f9d895e4 100644 --- a/gopls/internal/analysis/noresultvalues/noresultvalues.go +++ b/gopls/internal/analysis/noresultvalues/noresultvalues.go @@ -32,7 +32,7 @@ var Analyzer = &analysis.Analyzer{ URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/noresultvalues", } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) for _, typErr := range pass.TypeErrors { diff --git a/gopls/internal/analysis/simplifycompositelit/simplifycompositelit.go b/gopls/internal/analysis/simplifycompositelit/simplifycompositelit.go index 15176cef1c8..b38ccf4d5ed 100644 --- a/gopls/internal/analysis/simplifycompositelit/simplifycompositelit.go +++ b/gopls/internal/analysis/simplifycompositelit/simplifycompositelit.go @@ -33,7 +33,7 @@ var Analyzer = &analysis.Analyzer{ URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/simplifycompositelit", } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { // Gather information whether file is generated or not generated := make(map[*token.File]bool) for _, file := range pass.Files { diff --git a/gopls/internal/analysis/simplifyrange/simplifyrange.go b/gopls/internal/analysis/simplifyrange/simplifyrange.go index fd685ba2c5b..594ebd1f55a 100644 --- a/gopls/internal/analysis/simplifyrange/simplifyrange.go +++ b/gopls/internal/analysis/simplifyrange/simplifyrange.go @@ -26,7 +26,7 @@ var Analyzer = &analysis.Analyzer{ URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/simplifyrange", } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { // Gather information whether file is generated or not generated := make(map[*token.File]bool) for _, file := range pass.Files { diff --git a/gopls/internal/analysis/simplifyslice/simplifyslice.go b/gopls/internal/analysis/simplifyslice/simplifyslice.go index 6755187afe5..28cc266d713 100644 --- a/gopls/internal/analysis/simplifyslice/simplifyslice.go +++ b/gopls/internal/analysis/simplifyslice/simplifyslice.go @@ -37,7 +37,7 @@ var Analyzer = &analysis.Analyzer{ // An example where it does not: // x, y := b[:n], b[n:] -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { // Gather information whether file is generated or not generated := make(map[*token.File]bool) for _, file := range pass.Files { diff --git a/gopls/internal/analysis/yield/yield.go b/gopls/internal/analysis/yield/yield.go index ccd30045f97..354cf372186 100644 --- a/gopls/internal/analysis/yield/yield.go +++ b/gopls/internal/analysis/yield/yield.go @@ -44,7 +44,7 @@ var Analyzer = &analysis.Analyzer{ URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/yield", } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) // Find all calls to yield of the right type. diff --git a/gopls/internal/cache/analysis.go b/gopls/internal/cache/analysis.go index a0dd322a51e..4083f49d2d6 100644 --- a/gopls/internal/cache/analysis.go +++ b/gopls/internal/cache/analysis.go @@ -885,7 +885,7 @@ type action struct { vdeps map[PackageID]*analysisNode // vertical dependencies // results of action.exec(): - result interface{} // result of Run function, of type a.ResultType + result any // result of Run function, of type a.ResultType summary *actionSummary err error } @@ -964,7 +964,7 @@ func (act *action) exec(ctx context.Context) (any, *actionSummary, error) { } // Gather analysis Result values from horizontal dependencies. - inputs := make(map[*analysis.Analyzer]interface{}) + inputs := make(map[*analysis.Analyzer]any) for _, dep := range act.hdeps { inputs[dep.a] = dep.result } @@ -1178,7 +1178,7 @@ func (act *action) exec(ctx context.Context) (any, *actionSummary, error) { // Recover from panics (only) within the analyzer logic. // (Use an anonymous function to limit the recover scope.) - var result interface{} + var result any func() { start := time.Now() defer func() { diff --git a/gopls/internal/cache/load.go b/gopls/internal/cache/load.go index 140cbc45490..e15e0cef0b6 100644 --- a/gopls/internal/cache/load.go +++ b/gopls/internal/cache/load.go @@ -365,7 +365,7 @@ func (s *Snapshot) config(ctx context.Context, allowNetwork AllowNetwork) *packa packages.NeedForTest, Fset: nil, // we do our own parsing Overlay: s.buildOverlays(), - Logf: func(format string, args ...interface{}) { + Logf: func(format string, args ...any) { if s.view.folder.Options.VerboseOutput { event.Log(ctx, fmt.Sprintf(format, args...)) } diff --git a/gopls/internal/cache/mod.go b/gopls/internal/cache/mod.go index f16cfbfe1af..f6dd22754cc 100644 --- a/gopls/internal/cache/mod.go +++ b/gopls/internal/cache/mod.go @@ -45,14 +45,14 @@ func (s *Snapshot) ParseMod(ctx context.Context, fh file.Handle) (*ParsedModule, // cache miss? if !hit { - promise, release := s.store.Promise(parseModKey(fh.Identity()), func(ctx context.Context, _ interface{}) interface{} { + promise, release := s.store.Promise(parseModKey(fh.Identity()), func(ctx context.Context, _ any) any { parsed, err := parseModImpl(ctx, fh) return parseModResult{parsed, err} }) entry = promise s.mu.Lock() - s.parseModHandles.Set(uri, entry, func(_, _ interface{}) { release() }) + s.parseModHandles.Set(uri, entry, func(_, _ any) { release() }) s.mu.Unlock() } @@ -131,14 +131,14 @@ func (s *Snapshot) ParseWork(ctx context.Context, fh file.Handle) (*ParsedWorkFi // cache miss? if !hit { - handle, release := s.store.Promise(parseWorkKey(fh.Identity()), func(ctx context.Context, _ interface{}) interface{} { + handle, release := s.store.Promise(parseWorkKey(fh.Identity()), func(ctx context.Context, _ any) any { parsed, err := parseWorkImpl(ctx, fh) return parseWorkResult{parsed, err} }) entry = handle s.mu.Lock() - s.parseWorkHandles.Set(uri, entry, func(_, _ interface{}) { release() }) + s.parseWorkHandles.Set(uri, entry, func(_, _ any) { release() }) s.mu.Unlock() } @@ -212,7 +212,7 @@ func (s *Snapshot) ModWhy(ctx context.Context, fh file.Handle) (map[string]strin // cache miss? if !hit { - handle := memoize.NewPromise("modWhy", func(ctx context.Context, arg interface{}) interface{} { + handle := memoize.NewPromise("modWhy", func(ctx context.Context, arg any) any { why, err := modWhyImpl(ctx, arg.(*Snapshot), fh) return modWhyResult{why, err} }) diff --git a/gopls/internal/cache/mod_tidy.go b/gopls/internal/cache/mod_tidy.go index 4d473d39b12..6d9a3e56b81 100644 --- a/gopls/internal/cache/mod_tidy.go +++ b/gopls/internal/cache/mod_tidy.go @@ -76,7 +76,7 @@ func (s *Snapshot) ModTidy(ctx context.Context, pm *ParsedModule) (*TidiedModule return nil, err } - handle := memoize.NewPromise("modTidy", func(ctx context.Context, arg interface{}) interface{} { + handle := memoize.NewPromise("modTidy", func(ctx context.Context, arg any) any { tidied, err := modTidyImpl(ctx, arg.(*Snapshot), pm) return modTidyResult{tidied, err} }) diff --git a/gopls/internal/cache/mod_vuln.go b/gopls/internal/cache/mod_vuln.go index a92f5b5abe1..a48b18e4ba4 100644 --- a/gopls/internal/cache/mod_vuln.go +++ b/gopls/internal/cache/mod_vuln.go @@ -40,7 +40,7 @@ func (s *Snapshot) ModVuln(ctx context.Context, modURI protocol.DocumentURI) (*v // Cache miss? if !hit { - handle := memoize.NewPromise("modVuln", func(ctx context.Context, arg interface{}) interface{} { + handle := memoize.NewPromise("modVuln", func(ctx context.Context, arg any) any { result, err := modVulnImpl(ctx, arg.(*Snapshot)) return modVuln{result, err} }) diff --git a/gopls/internal/cache/parse_cache.go b/gopls/internal/cache/parse_cache.go index 8586f655d28..015510b881d 100644 --- a/gopls/internal/cache/parse_cache.go +++ b/gopls/internal/cache/parse_cache.go @@ -195,7 +195,7 @@ func (c *parseCache) startParse(mode parser.Mode, purgeFuncBodies bool, fhs ...f } uri := fh.URI() - promise := memoize.NewPromise("parseCache.parse", func(ctx context.Context, _ interface{}) interface{} { + promise := memoize.NewPromise("parseCache.parse", func(ctx context.Context, _ any) any { // Allocate 2*len(content)+parsePadding to allow for re-parsing once // inside of parseGoSrc without exceeding the allocated space. base, nextBase := c.allocateSpace(2*len(content) + parsePadding) @@ -404,13 +404,13 @@ func (q queue) Swap(i, j int) { q[j].lruIndex = j } -func (q *queue) Push(x interface{}) { +func (q *queue) Push(x any) { e := x.(*parseCacheEntry) e.lruIndex = len(*q) *q = append(*q, e) } -func (q *queue) Pop() interface{} { +func (q *queue) Pop() any { last := len(*q) - 1 e := (*q)[last] (*q)[last] = nil // aid GC diff --git a/gopls/internal/cmd/cmd.go b/gopls/internal/cmd/cmd.go index f7ba04df6a4..2a161ad0fc8 100644 --- a/gopls/internal/cmd/cmd.go +++ b/gopls/internal/cmd/cmd.go @@ -369,7 +369,7 @@ func (c *connection) initialize(ctx context.Context, options func(*settings.Opti } params.Capabilities.Window.WorkDoneProgress = true - params.InitializationOptions = map[string]interface{}{ + params.InitializationOptions = map[string]any{ "symbolMatcher": string(opts.SymbolMatcher), } if c.initializeResult, err = c.Initialize(ctx, params); err != nil { @@ -468,7 +468,7 @@ func (c *cmdClient) LogMessage(ctx context.Context, p *protocol.LogMessageParams return nil } -func (c *cmdClient) Event(ctx context.Context, t *interface{}) error { return nil } +func (c *cmdClient) Event(ctx context.Context, t *any) error { return nil } func (c *cmdClient) RegisterCapability(ctx context.Context, p *protocol.RegistrationParams) error { return nil @@ -482,13 +482,13 @@ func (c *cmdClient) WorkspaceFolders(ctx context.Context) ([]protocol.WorkspaceF return nil, nil } -func (c *cmdClient) Configuration(ctx context.Context, p *protocol.ParamConfiguration) ([]interface{}, error) { - results := make([]interface{}, len(p.Items)) +func (c *cmdClient) Configuration(ctx context.Context, p *protocol.ParamConfiguration) ([]any, error) { + results := make([]any, len(p.Items)) for i, item := range p.Items { if item.Section != "gopls" { continue } - m := map[string]interface{}{ + m := map[string]any{ "analyses": map[string]any{ "fillreturns": true, "nonewvars": true, @@ -658,7 +658,7 @@ func (c *cmdClient) PublishDiagnostics(ctx context.Context, p *protocol.PublishD // TODO(golang/go#60122): replace the gopls.diagnose_files // command with support for textDocument/diagnostic, // so that we don't need to do this de-duplication. - type key [6]interface{} + type key [6]any seen := make(map[key]bool) out := file.diagnostics[:0] for _, d := range file.diagnostics { diff --git a/gopls/internal/cmd/integration_test.go b/gopls/internal/cmd/integration_test.go index 42812a870a4..986453253f8 100644 --- a/gopls/internal/cmd/integration_test.go +++ b/gopls/internal/cmd/integration_test.go @@ -930,7 +930,7 @@ package foo res3 := goplsWithEnv(t, tree, []string{"GOPACKAGESDRIVER=off"}, "stats", "-anon") res3.checkExit(true) - var statsAsMap3 map[string]interface{} + var statsAsMap3 map[string]any if err := json.Unmarshal([]byte(res3.stdout), &statsAsMap3); err != nil { t.Fatalf("failed to unmarshal JSON output of stats command: %v", err) } @@ -1212,7 +1212,7 @@ func (res *result) checkOutput(pattern, name, content string) { } // toJSON decodes res.stdout as JSON into to *ptr and reports its success. -func (res *result) toJSON(ptr interface{}) bool { +func (res *result) toJSON(ptr any) bool { if err := json.Unmarshal([]byte(res.stdout), ptr); err != nil { res.t.Errorf("invalid JSON %v", err) return false diff --git a/gopls/internal/cmd/stats.go b/gopls/internal/cmd/stats.go index cc19a94fb84..1ba43ccee83 100644 --- a/gopls/internal/cmd/stats.go +++ b/gopls/internal/cmd/stats.go @@ -164,7 +164,7 @@ func (s *stats) Run(ctx context.Context, args ...string) error { } // Filter JSON output to fields that are consistent with s.Anon. - okFields := make(map[string]interface{}) + okFields := make(map[string]any) { v := reflect.ValueOf(stats) t := v.Type() diff --git a/gopls/internal/cmd/symbols.go b/gopls/internal/cmd/symbols.go index 663a08f4be1..15c593b0e74 100644 --- a/gopls/internal/cmd/symbols.go +++ b/gopls/internal/cmd/symbols.go @@ -53,7 +53,7 @@ func (r *symbols) Run(ctx context.Context, args ...string) error { return err } for _, s := range symbols { - if m, ok := s.(map[string]interface{}); ok { + if m, ok := s.(map[string]any); ok { s, err = mapToSymbol(m) if err != nil { return err @@ -69,7 +69,7 @@ func (r *symbols) Run(ctx context.Context, args ...string) error { return nil } -func mapToSymbol(m map[string]interface{}) (interface{}, error) { +func mapToSymbol(m map[string]any) (any, error) { b, err := json.Marshal(m) if err != nil { return nil, err diff --git a/gopls/internal/debug/log/log.go b/gopls/internal/debug/log/log.go index d015f9bfdd3..9e7efa7bf17 100644 --- a/gopls/internal/debug/log/log.go +++ b/gopls/internal/debug/log/log.go @@ -33,7 +33,7 @@ func (l Level) Log(ctx context.Context, msg string) { } // Logf formats and exports a log event labeled with level l. -func (l Level) Logf(ctx context.Context, format string, args ...interface{}) { +func (l Level) Logf(ctx context.Context, format string, args ...any) { l.Log(ctx, fmt.Sprintf(format, args...)) } diff --git a/gopls/internal/debug/rpc.go b/gopls/internal/debug/rpc.go index 8a696f848d0..5b8e1dbbbd0 100644 --- a/gopls/internal/debug/rpc.go +++ b/gopls/internal/debug/rpc.go @@ -209,7 +209,7 @@ func getStatusCode(span *export.Span) string { return "" } -func (r *Rpcs) getData(req *http.Request) interface{} { +func (r *Rpcs) getData(req *http.Request) any { return r } diff --git a/gopls/internal/debug/serve.go b/gopls/internal/debug/serve.go index 058254b755b..c471f488cd1 100644 --- a/gopls/internal/debug/serve.go +++ b/gopls/internal/debug/serve.go @@ -280,23 +280,23 @@ func cmdline(w http.ResponseWriter, r *http.Request) { pprof.Cmdline(fake, r) } -func (i *Instance) getCache(r *http.Request) interface{} { +func (i *Instance) getCache(r *http.Request) any { return i.State.Cache(path.Base(r.URL.Path)) } -func (i *Instance) getAnalysis(r *http.Request) interface{} { +func (i *Instance) getAnalysis(r *http.Request) any { return i.State.Analysis() } -func (i *Instance) getSession(r *http.Request) interface{} { +func (i *Instance) getSession(r *http.Request) any { return i.State.Session(path.Base(r.URL.Path)) } -func (i *Instance) getClient(r *http.Request) interface{} { +func (i *Instance) getClient(r *http.Request) any { return i.State.Client(path.Base(r.URL.Path)) } -func (i *Instance) getServer(r *http.Request) interface{} { +func (i *Instance) getServer(r *http.Request) any { i.State.mu.Lock() defer i.State.mu.Unlock() id := path.Base(r.URL.Path) @@ -308,7 +308,7 @@ func (i *Instance) getServer(r *http.Request) interface{} { return nil } -func (i *Instance) getFile(r *http.Request) interface{} { +func (i *Instance) getFile(r *http.Request) any { identifier := path.Base(r.URL.Path) sid := path.Base(path.Dir(r.URL.Path)) s := i.State.Session(sid) @@ -324,7 +324,7 @@ func (i *Instance) getFile(r *http.Request) interface{} { return nil } -func (i *Instance) getInfo(r *http.Request) interface{} { +func (i *Instance) getInfo(r *http.Request) any { buf := &bytes.Buffer{} i.PrintServerInfo(r.Context(), buf) return template.HTML(buf.String()) @@ -340,7 +340,7 @@ func (i *Instance) AddService(s protocol.Server, session *cache.Session) { stdlog.Printf("unable to find a Client to add the protocol.Server to") } -func getMemory(_ *http.Request) interface{} { +func getMemory(_ *http.Request) any { var m runtime.MemStats runtime.ReadMemStats(&m) return m @@ -439,7 +439,7 @@ func (i *Instance) Serve(ctx context.Context, addr string) (string, error) { event.Log(ctx, "Debug serving", label1.Port.Of(port)) go func() { mux := http.NewServeMux() - mux.HandleFunc("/", render(MainTmpl, func(*http.Request) interface{} { return i })) + mux.HandleFunc("/", render(MainTmpl, func(*http.Request) any { return i })) mux.HandleFunc("/debug/", render(DebugTmpl, nil)) mux.HandleFunc("/debug/pprof/", pprof.Index) mux.HandleFunc("/debug/pprof/cmdline", cmdline) @@ -594,11 +594,11 @@ func makeInstanceExporter(i *Instance) event.Exporter { return exporter } -type dataFunc func(*http.Request) interface{} +type dataFunc func(*http.Request) any func render(tmpl *template.Template, fun dataFunc) func(http.ResponseWriter, *http.Request) { return func(w http.ResponseWriter, r *http.Request) { - var data interface{} + var data any if fun != nil { data = fun(r) } diff --git a/gopls/internal/debug/template_test.go b/gopls/internal/debug/template_test.go index d4d9071c140..52c60244776 100644 --- a/gopls/internal/debug/template_test.go +++ b/gopls/internal/debug/template_test.go @@ -29,7 +29,7 @@ import ( var templates = map[string]struct { tmpl *template.Template - data interface{} // a value of the needed type + data any // a value of the needed type }{ "MainTmpl": {debug.MainTmpl, &debug.Instance{}}, "DebugTmpl": {debug.DebugTmpl, nil}, diff --git a/gopls/internal/debug/trace.go b/gopls/internal/debug/trace.go index 9314a04d241..e6ff9697b67 100644 --- a/gopls/internal/debug/trace.go +++ b/gopls/internal/debug/trace.go @@ -277,7 +277,7 @@ func (t *traces) addRecentLocked(span *traceSpan, start bool) { } // getData returns the TraceResults rendered by TraceTmpl for the /trace[/name] endpoint. -func (t *traces) getData(req *http.Request) interface{} { +func (t *traces) getData(req *http.Request) any { // TODO(adonovan): the HTTP request doesn't acquire the mutex // for t or for each span! Audit and fix. diff --git a/gopls/internal/golang/rename_check.go b/gopls/internal/golang/rename_check.go index 280795abe5e..97423fe87a7 100644 --- a/gopls/internal/golang/rename_check.go +++ b/gopls/internal/golang/rename_check.go @@ -51,7 +51,7 @@ import ( ) // errorf reports an error (e.g. conflict) and prevents file modification. -func (r *renamer) errorf(pos token.Pos, format string, args ...interface{}) { +func (r *renamer) errorf(pos token.Pos, format string, args ...any) { // Conflict error messages in the old gorename tool (whence this // logic originated) contain rich information associated with // multiple source lines, such as: diff --git a/gopls/internal/lsprpc/binder_test.go b/gopls/internal/lsprpc/binder_test.go index 042056e7777..07a8b2cdf99 100644 --- a/gopls/internal/lsprpc/binder_test.go +++ b/gopls/internal/lsprpc/binder_test.go @@ -56,7 +56,7 @@ func (b *ServerBinder) Bind(ctx context.Context, conn *jsonrpc2_v2.Connection) j serverHandler := protocol.ServerHandlerV2(server) // Wrap the server handler to inject the client into each request context, so // that log events are reflected back to the client. - wrapped := jsonrpc2_v2.HandlerFunc(func(ctx context.Context, req *jsonrpc2_v2.Request) (interface{}, error) { + wrapped := jsonrpc2_v2.HandlerFunc(func(ctx context.Context, req *jsonrpc2_v2.Request) (any, error) { ctx = protocol.WithClient(ctx, client) return serverHandler.Handle(ctx, req) }) diff --git a/gopls/internal/lsprpc/commandinterceptor_test.go b/gopls/internal/lsprpc/commandinterceptor_test.go index 7c83ef993f0..3cfa2e35a7f 100644 --- a/gopls/internal/lsprpc/commandinterceptor_test.go +++ b/gopls/internal/lsprpc/commandinterceptor_test.go @@ -15,9 +15,9 @@ import ( . "golang.org/x/tools/gopls/internal/lsprpc" ) -func CommandInterceptor(command string, run func(*protocol.ExecuteCommandParams) (interface{}, error)) Middleware { +func CommandInterceptor(command string, run func(*protocol.ExecuteCommandParams) (any, error)) Middleware { return BindHandler(func(delegate jsonrpc2_v2.Handler) jsonrpc2_v2.Handler { - return jsonrpc2_v2.HandlerFunc(func(ctx context.Context, req *jsonrpc2_v2.Request) (interface{}, error) { + return jsonrpc2_v2.HandlerFunc(func(ctx context.Context, req *jsonrpc2_v2.Request) (any, error) { if req.Method == "workspace/executeCommand" { var params protocol.ExecuteCommandParams if err := json.Unmarshal(req.Params, ¶ms); err == nil { @@ -35,9 +35,9 @@ func CommandInterceptor(command string, run func(*protocol.ExecuteCommandParams) func TestCommandInterceptor(t *testing.T) { const command = "foo" caught := false - intercept := func(_ *protocol.ExecuteCommandParams) (interface{}, error) { + intercept := func(_ *protocol.ExecuteCommandParams) (any, error) { caught = true - return map[string]interface{}{}, nil + return map[string]any{}, nil } ctx := context.Background() @@ -50,7 +50,7 @@ func TestCommandInterceptor(t *testing.T) { params := &protocol.ExecuteCommandParams{ Command: command, } - var res interface{} + var res any err := conn.Call(ctx, "workspace/executeCommand", params).Await(ctx, &res) if err != nil { t.Fatal(err) diff --git a/gopls/internal/lsprpc/export_test.go b/gopls/internal/lsprpc/export_test.go index 509129870dc..8cbdecc98a2 100644 --- a/gopls/internal/lsprpc/export_test.go +++ b/gopls/internal/lsprpc/export_test.go @@ -26,7 +26,7 @@ type Canceler struct { Conn *jsonrpc2_v2.Connection } -func (c *Canceler) Preempt(ctx context.Context, req *jsonrpc2_v2.Request) (interface{}, error) { +func (c *Canceler) Preempt(ctx context.Context, req *jsonrpc2_v2.Request) (any, error) { if req.Method != "$/cancelRequest" { return nil, jsonrpc2_v2.ErrNotHandled } @@ -65,7 +65,7 @@ func (b *ForwardBinder) Bind(ctx context.Context, conn *jsonrpc2_v2.Connection) serverConn, err := jsonrpc2_v2.Dial(context.Background(), b.dialer, clientBinder) if err != nil { return jsonrpc2_v2.ConnectionOptions{ - Handler: jsonrpc2_v2.HandlerFunc(func(context.Context, *jsonrpc2_v2.Request) (interface{}, error) { + Handler: jsonrpc2_v2.HandlerFunc(func(context.Context, *jsonrpc2_v2.Request) (any, error) { return nil, fmt.Errorf("%w: %v", jsonrpc2_v2.ErrInternal, err) }), } diff --git a/gopls/internal/lsprpc/goenv.go b/gopls/internal/lsprpc/goenv.go index 52ec08ff7eb..2b8b94345ca 100644 --- a/gopls/internal/lsprpc/goenv.go +++ b/gopls/internal/lsprpc/goenv.go @@ -12,7 +12,7 @@ import ( "golang.org/x/tools/internal/gocommand" ) -func getGoEnv(ctx context.Context, env map[string]interface{}) (map[string]string, error) { +func getGoEnv(ctx context.Context, env map[string]any) (map[string]string, error) { var runEnv []string for k, v := range env { runEnv = append(runEnv, fmt.Sprintf("%s=%s", k, v)) diff --git a/gopls/internal/lsprpc/goenv_test.go b/gopls/internal/lsprpc/goenv_test.go index 6c41540fafb..bc39228c614 100644 --- a/gopls/internal/lsprpc/goenv_test.go +++ b/gopls/internal/lsprpc/goenv_test.go @@ -21,7 +21,7 @@ import ( func GoEnvMiddleware() (Middleware, error) { return BindHandler(func(delegate jsonrpc2_v2.Handler) jsonrpc2_v2.Handler { - return jsonrpc2_v2.HandlerFunc(func(ctx context.Context, req *jsonrpc2_v2.Request) (interface{}, error) { + return jsonrpc2_v2.HandlerFunc(func(ctx context.Context, req *jsonrpc2_v2.Request) (any, error) { if req.Method == "initialize" { if err := addGoEnvToInitializeRequestV2(ctx, req); err != nil { event.Error(ctx, "adding go env to initialize", err) @@ -39,20 +39,20 @@ func addGoEnvToInitializeRequestV2(ctx context.Context, req *jsonrpc2_v2.Request if err := json.Unmarshal(req.Params, ¶ms); err != nil { return err } - var opts map[string]interface{} + var opts map[string]any switch v := params.InitializationOptions.(type) { case nil: - opts = make(map[string]interface{}) - case map[string]interface{}: + opts = make(map[string]any) + case map[string]any: opts = v default: return fmt.Errorf("unexpected type for InitializationOptions: %T", v) } envOpt, ok := opts["env"] if !ok { - envOpt = make(map[string]interface{}) + envOpt = make(map[string]any) } - env, ok := envOpt.(map[string]interface{}) + env, ok := envOpt.(map[string]any) if !ok { return fmt.Errorf("env option is %T, expected a map", envOpt) } @@ -108,8 +108,8 @@ func TestGoEnvMiddleware(t *testing.T) { conn := env.dial(ctx, t, l.Dialer(), noopBinder, true) dispatch := protocol.ServerDispatcherV2(conn) initParams := &protocol.ParamInitialize{} - initParams.InitializationOptions = map[string]interface{}{ - "env": map[string]interface{}{ + initParams.InitializationOptions = map[string]any{ + "env": map[string]any{ "GONOPROXY": "example.com", }, } @@ -120,7 +120,7 @@ func TestGoEnvMiddleware(t *testing.T) { if server.params == nil { t.Fatalf("initialize params are unset") } - envOpts := server.params.InitializationOptions.(map[string]interface{})["env"].(map[string]interface{}) + envOpts := server.params.InitializationOptions.(map[string]any)["env"].(map[string]any) // Check for an arbitrary Go variable. It should be set. if _, ok := envOpts["GOPRIVATE"]; !ok { diff --git a/gopls/internal/lsprpc/lsprpc.go b/gopls/internal/lsprpc/lsprpc.go index b77557c9a4b..9255f9176bc 100644 --- a/gopls/internal/lsprpc/lsprpc.go +++ b/gopls/internal/lsprpc/lsprpc.go @@ -323,20 +323,20 @@ func addGoEnvToInitializeRequest(ctx context.Context, r jsonrpc2.Request) (jsonr if err := json.Unmarshal(r.Params(), ¶ms); err != nil { return nil, err } - var opts map[string]interface{} + var opts map[string]any switch v := params.InitializationOptions.(type) { case nil: - opts = make(map[string]interface{}) - case map[string]interface{}: + opts = make(map[string]any) + case map[string]any: opts = v default: return nil, fmt.Errorf("unexpected type for InitializationOptions: %T", v) } envOpt, ok := opts["env"] if !ok { - envOpt = make(map[string]interface{}) + envOpt = make(map[string]any) } - env, ok := envOpt.(map[string]interface{}) + env, ok := envOpt.(map[string]any) if !ok { return nil, fmt.Errorf(`env option is %T, expected a map`, envOpt) } @@ -368,7 +368,7 @@ func (f *forwarder) replyWithDebugAddress(outerCtx context.Context, r jsonrpc2.R event.Log(outerCtx, "no debug instance to start") return r } - return func(ctx context.Context, result interface{}, outerErr error) error { + return func(ctx context.Context, result any, outerErr error) error { if outerErr != nil { return r(ctx, result, outerErr) } diff --git a/gopls/internal/lsprpc/lsprpc_test.go b/gopls/internal/lsprpc/lsprpc_test.go index c4ccab71a3e..1a259bbd646 100644 --- a/gopls/internal/lsprpc/lsprpc_test.go +++ b/gopls/internal/lsprpc/lsprpc_test.go @@ -302,8 +302,8 @@ func TestEnvForwarding(t *testing.T) { conn.Go(ctx, jsonrpc2.MethodNotFound) dispatch := protocol.ServerDispatcher(conn) initParams := &protocol.ParamInitialize{} - initParams.InitializationOptions = map[string]interface{}{ - "env": map[string]interface{}{ + initParams.InitializationOptions = map[string]any{ + "env": map[string]any{ "GONOPROXY": "example.com", }, } @@ -314,7 +314,7 @@ func TestEnvForwarding(t *testing.T) { if server.params == nil { t.Fatalf("initialize params are unset") } - env := server.params.InitializationOptions.(map[string]interface{})["env"].(map[string]interface{}) + env := server.params.InitializationOptions.(map[string]any)["env"].(map[string]any) // Check for an arbitrary Go variable. It should be set. if _, ok := env["GOPRIVATE"]; !ok { diff --git a/gopls/internal/lsprpc/middleware_test.go b/gopls/internal/lsprpc/middleware_test.go index 526c7343b78..afa6ae78d2f 100644 --- a/gopls/internal/lsprpc/middleware_test.go +++ b/gopls/internal/lsprpc/middleware_test.go @@ -154,7 +154,7 @@ func (h *Handshaker) Middleware(inner jsonrpc2_v2.Binder) jsonrpc2_v2.Binder { // Wrap the delegated handler to accept the handshake. delegate := opts.Handler - opts.Handler = jsonrpc2_v2.HandlerFunc(func(ctx context.Context, req *jsonrpc2_v2.Request) (interface{}, error) { + opts.Handler = jsonrpc2_v2.HandlerFunc(func(ctx context.Context, req *jsonrpc2_v2.Request) (any, error) { if req.Method == HandshakeMethod { var peerInfo PeerInfo if err := json.Unmarshal(req.Params, &peerInfo); err != nil { diff --git a/gopls/internal/server/command.go b/gopls/internal/server/command.go index 007b8d5218f..0142de532c3 100644 --- a/gopls/internal/server/command.go +++ b/gopls/internal/server/command.go @@ -46,7 +46,7 @@ import ( "golang.org/x/tools/internal/xcontext" ) -func (s *server) ExecuteCommand(ctx context.Context, params *protocol.ExecuteCommandParams) (interface{}, error) { +func (s *server) ExecuteCommand(ctx context.Context, params *protocol.ExecuteCommandParams) (any, error) { ctx, done := event.Start(ctx, "lsp.Server.executeCommand") defer done() diff --git a/gopls/internal/server/general.go b/gopls/internal/server/general.go index de6b764c79f..b7b69931103 100644 --- a/gopls/internal/server/general.go +++ b/gopls/internal/server/general.go @@ -104,7 +104,7 @@ func (s *server) Initialize(ctx context.Context, params *protocol.ParamInitializ } s.pendingFolders = append(s.pendingFolders, folders...) - var codeActionProvider interface{} = true + var codeActionProvider any = true if ca := params.Capabilities.TextDocument.CodeAction; len(ca.CodeActionLiteralSupport.CodeActionKind.ValueSet) > 0 { // If the client has specified CodeActionLiteralSupport, // send the code actions we support. @@ -126,7 +126,7 @@ func (s *server) Initialize(ctx context.Context, params *protocol.ParamInitializ } } - var renameOpts interface{} = true + var renameOpts any = true if r := params.Capabilities.TextDocument.Rename; r != nil && r.PrepareSupport { renameOpts = protocol.RenameOptions{ PrepareProvider: r.PrepareSupport, diff --git a/gopls/internal/server/unimplemented.go b/gopls/internal/server/unimplemented.go index 470a7cbb0ee..7375dc4bb1b 100644 --- a/gopls/internal/server/unimplemented.go +++ b/gopls/internal/server/unimplemented.go @@ -114,7 +114,7 @@ func (s *server) ResolveWorkspaceSymbol(context.Context, *protocol.WorkspaceSymb return nil, notImplemented("ResolveWorkspaceSymbol") } -func (s *server) SemanticTokensFullDelta(context.Context, *protocol.SemanticTokensDeltaParams) (interface{}, error) { +func (s *server) SemanticTokensFullDelta(context.Context, *protocol.SemanticTokensDeltaParams) (any, error) { return nil, notImplemented("SemanticTokensFullDelta") } diff --git a/gopls/internal/template/parse.go b/gopls/internal/template/parse.go index 448a5ab51e8..f1b26bbb14f 100644 --- a/gopls/internal/template/parse.go +++ b/gopls/internal/template/parse.go @@ -114,7 +114,7 @@ func parseBuffer(buf []byte) *Parsed { matches := parseErrR.FindStringSubmatch(err.Error()) if len(matches) == 2 { // suppress the error by giving it a function with the right name - funcs[matches[1]] = func() interface{} { return nil } + funcs[matches[1]] = func() any { return nil } t, err = template.New("").Funcs(funcs).Parse(string(ans.buf)) continue } diff --git a/gopls/internal/test/integration/bench/completion_test.go b/gopls/internal/test/integration/bench/completion_test.go index bbbba0e3fd1..d84512d1f8f 100644 --- a/gopls/internal/test/integration/bench/completion_test.go +++ b/gopls/internal/test/integration/bench/completion_test.go @@ -282,7 +282,7 @@ func runCompletion(b *testing.B, test completionTest, followingEdit, completeUni env := repo.newEnv(b, fake.EditorConfig{ Env: envvars, - Settings: map[string]interface{}{ + Settings: map[string]any{ "completeUnimported": completeUnimported, "completionBudget": budget, }, diff --git a/gopls/internal/test/integration/bench/didchange_test.go b/gopls/internal/test/integration/bench/didchange_test.go index 57ed01bbcd6..b1613bb1b03 100644 --- a/gopls/internal/test/integration/bench/didchange_test.go +++ b/gopls/internal/test/integration/bench/didchange_test.go @@ -118,7 +118,7 @@ func runChangeDiagnosticsBenchmark(b *testing.B, test changeTest, save bool, ope Env: map[string]string{ "GOPATH": sharedEnv.Sandbox.GOPATH(), }, - Settings: map[string]interface{}{ + Settings: map[string]any{ "diagnosticsDelay": "0s", }, } diff --git a/gopls/internal/test/integration/env.go b/gopls/internal/test/integration/env.go index 64344d0d146..c8a1b5043aa 100644 --- a/gopls/internal/test/integration/env.go +++ b/gopls/internal/test/integration/env.go @@ -282,7 +282,7 @@ func (a *Awaiter) onProgress(_ context.Context, m *protocol.ProgressParams) erro if !ok { panic(fmt.Sprintf("got progress report for unknown report %v: %v", m.Token, m)) } - v := m.Value.(map[string]interface{}) + v := m.Value.(map[string]any) switch kind := v["kind"]; kind { case "begin": work.title = v["title"].(string) diff --git a/gopls/internal/test/integration/env_test.go b/gopls/internal/test/integration/env_test.go index 32203f7cb83..1fa68676b5c 100644 --- a/gopls/internal/test/integration/env_test.go +++ b/gopls/internal/test/integration/env_test.go @@ -33,7 +33,7 @@ func TestProgressUpdating(t *testing.T) { } updates := []struct { token string - value interface{} + value any }{ {"foo", protocol.WorkDoneProgressBegin{Kind: "begin", Title: "foo work"}}, {"bar", protocol.WorkDoneProgressBegin{Kind: "begin", Title: "bar work"}}, diff --git a/gopls/internal/test/integration/expectation.go b/gopls/internal/test/integration/expectation.go index ad41423d098..fdfca90796e 100644 --- a/gopls/internal/test/integration/expectation.go +++ b/gopls/internal/test/integration/expectation.go @@ -677,7 +677,7 @@ func checkFileWatch(re string, onMatch, onNoMatch Verdict) func(State) Verdict { rec := regexp.MustCompile(re) return func(s State) Verdict { r := s.registeredCapabilities["workspace/didChangeWatchedFiles"] - watchers := jsonProperty(r.RegisterOptions, "watchers").([]interface{}) + watchers := jsonProperty(r.RegisterOptions, "watchers").([]any) for _, watcher := range watchers { pattern := jsonProperty(watcher, "globPattern").(string) if rec.MatchString(pattern) { @@ -699,11 +699,11 @@ func checkFileWatch(re string, onMatch, onNoMatch Verdict) func(State) Verdict { // } // // Then jsonProperty(obj, "foo", "bar") will be 3. -func jsonProperty(obj interface{}, path ...string) interface{} { +func jsonProperty(obj any, path ...string) any { if len(path) == 0 || obj == nil { return obj } - m := obj.(map[string]interface{}) + m := obj.(map[string]any) return jsonProperty(m[path[0]], path[1:]...) } diff --git a/gopls/internal/test/integration/fake/client.go b/gopls/internal/test/integration/fake/client.go index 93eeab4a8af..aee6c1cfc3e 100644 --- a/gopls/internal/test/integration/fake/client.go +++ b/gopls/internal/test/integration/fake/client.go @@ -103,7 +103,7 @@ func (c *Client) LogMessage(ctx context.Context, params *protocol.LogMessagePara return nil } -func (c *Client) Event(ctx context.Context, event *interface{}) error { +func (c *Client) Event(ctx context.Context, event *any) error { return nil } @@ -118,8 +118,8 @@ func (c *Client) WorkspaceFolders(context.Context) ([]protocol.WorkspaceFolder, return []protocol.WorkspaceFolder{}, nil } -func (c *Client) Configuration(_ context.Context, p *protocol.ParamConfiguration) ([]interface{}, error) { - results := make([]interface{}, len(p.Items)) +func (c *Client) Configuration(_ context.Context, p *protocol.ParamConfiguration) ([]any, error) { + results := make([]any, len(p.Items)) for i, item := range p.Items { if item.ScopeURI != nil && *item.ScopeURI == "" { return nil, fmt.Errorf(`malformed ScopeURI ""`) diff --git a/gopls/internal/test/integration/fake/glob/glob.go b/gopls/internal/test/integration/fake/glob/glob.go index a540ebefac5..3bda93bee6d 100644 --- a/gopls/internal/test/integration/fake/glob/glob.go +++ b/gopls/internal/test/integration/fake/glob/glob.go @@ -217,7 +217,7 @@ func (g *Glob) Match(input string) bool { } func match(elems []element, input string) (ok bool) { - var elem interface{} + var elem any for len(elems) > 0 { elem, elems = elems[0], elems[1:] switch elem := elem.(type) { diff --git a/gopls/internal/test/integration/options.go b/gopls/internal/test/integration/options.go index 8090388e17d..11824aa7c16 100644 --- a/gopls/internal/test/integration/options.go +++ b/gopls/internal/test/integration/options.go @@ -25,7 +25,7 @@ type runConfig struct { func defaultConfig() runConfig { return runConfig{ editor: fake.EditorConfig{ - Settings: map[string]interface{}{ + Settings: map[string]any{ // Shorten the diagnostic delay to speed up test execution (else we'd add // the default delay to each assertion about diagnostics) "diagnosticsDelay": "10ms", @@ -109,11 +109,11 @@ func CapabilitiesJSON(capabilities []byte) RunOption { // // As a special case, the env setting must not be provided via Settings: use // EnvVars instead. -type Settings map[string]interface{} +type Settings map[string]any func (s Settings) set(opts *runConfig) { if opts.editor.Settings == nil { - opts.editor.Settings = make(map[string]interface{}) + opts.editor.Settings = make(map[string]any) } for k, v := range s { opts.editor.Settings[k] = v diff --git a/gopls/internal/util/bug/bug.go b/gopls/internal/util/bug/bug.go index dcd242d4856..265ec9dac10 100644 --- a/gopls/internal/util/bug/bug.go +++ b/gopls/internal/util/bug/bug.go @@ -50,13 +50,13 @@ type Bug struct { } // Reportf reports a formatted bug message. -func Reportf(format string, args ...interface{}) { +func Reportf(format string, args ...any) { report(fmt.Sprintf(format, args...)) } // Errorf calls fmt.Errorf for the given arguments, and reports the resulting // error message as a bug. -func Errorf(format string, args ...interface{}) error { +func Errorf(format string, args ...any) error { err := fmt.Errorf(format, args...) report(err.Error()) return err diff --git a/gopls/internal/vulncheck/vulntest/report.go b/gopls/internal/vulncheck/vulntest/report.go index 6aa87221866..3b1bfcc5c96 100644 --- a/gopls/internal/vulncheck/vulntest/report.go +++ b/gopls/internal/vulncheck/vulntest/report.go @@ -134,7 +134,7 @@ func (v Version) Canonical() string { // single-element mapping of type to URL. type Reference osv.Reference -func (r *Reference) MarshalYAML() (interface{}, error) { +func (r *Reference) MarshalYAML() (any, error) { return map[string]string{ strings.ToLower(string(r.Type)): r.URL, }, nil diff --git a/internal/event/export/id.go b/internal/event/export/id.go index bf9938b38c1..fb6026462c1 100644 --- a/internal/event/export/id.go +++ b/internal/event/export/id.go @@ -39,7 +39,7 @@ var ( func initGenerator() { var rngSeed int64 - for _, p := range []interface{}{ + for _, p := range []any{ &rngSeed, &traceIDAdd, &nextSpanID, &spanIDInc, } { binary.Read(crand.Reader, binary.LittleEndian, p) diff --git a/internal/event/export/metric/exporter.go b/internal/event/export/metric/exporter.go index 4cafaa52928..588b8a108c7 100644 --- a/internal/event/export/metric/exporter.go +++ b/internal/event/export/metric/exporter.go @@ -19,14 +19,14 @@ import ( var Entries = keys.New("metric_entries", "The set of metrics calculated for an event") type Config struct { - subscribers map[interface{}][]subscriber + subscribers map[any][]subscriber } type subscriber func(time.Time, label.Map, label.Label) Data func (e *Config) subscribe(key label.Key, s subscriber) { if e.subscribers == nil { - e.subscribers = make(map[interface{}][]subscriber) + e.subscribers = make(map[any][]subscriber) } e.subscribers[key] = append(e.subscribers[key], s) } diff --git a/internal/event/export/ocagent/ocagent.go b/internal/event/export/ocagent/ocagent.go index 722a7446939..d86c4aed0cf 100644 --- a/internal/event/export/ocagent/ocagent.go +++ b/internal/event/export/ocagent/ocagent.go @@ -167,7 +167,7 @@ func (cfg *Config) buildNode() *wire.Node { } } -func (e *Exporter) send(endpoint string, message interface{}) { +func (e *Exporter) send(endpoint string, message any) { blob, err := json.Marshal(message) if err != nil { errorInExport("ocagent failed to marshal message for %v: %v", endpoint, err) @@ -190,7 +190,7 @@ func (e *Exporter) send(endpoint string, message interface{}) { } } -func errorInExport(message string, args ...interface{}) { +func errorInExport(message string, args ...any) { // This function is useful when debugging the exporter, but in general we // want to just drop any export } diff --git a/internal/event/export/prometheus/prometheus.go b/internal/event/export/prometheus/prometheus.go index 0281f60a35f..82bb6c15dfc 100644 --- a/internal/event/export/prometheus/prometheus.go +++ b/internal/event/export/prometheus/prometheus.go @@ -66,7 +66,7 @@ func (e *Exporter) header(w http.ResponseWriter, name, description string, isGau fmt.Fprintf(w, "# TYPE %s %s\n", name, kind) } -func (e *Exporter) row(w http.ResponseWriter, name string, group []label.Label, extra string, value interface{}) { +func (e *Exporter) row(w http.ResponseWriter, name string, group []label.Label, extra string, value any) { fmt.Fprint(w, name) buf := &bytes.Buffer{} fmt.Fprint(buf, group) diff --git a/internal/event/keys/keys.go b/internal/event/keys/keys.go index a02206e3015..4cfa51b6123 100644 --- a/internal/event/keys/keys.go +++ b/internal/event/keys/keys.go @@ -32,7 +32,7 @@ func (k *Value) Format(w io.Writer, buf []byte, l label.Label) { } // Get can be used to get a label for the key from a label.Map. -func (k *Value) Get(lm label.Map) interface{} { +func (k *Value) Get(lm label.Map) any { if t := lm.Find(k); t.Valid() { return k.From(t) } @@ -40,10 +40,10 @@ func (k *Value) Get(lm label.Map) interface{} { } // From can be used to get a value from a Label. -func (k *Value) From(t label.Label) interface{} { return t.UnpackValue() } +func (k *Value) From(t label.Label) any { return t.UnpackValue() } // Of creates a new Label with this key and the supplied value. -func (k *Value) Of(value interface{}) label.Label { return label.OfValue(k, value) } +func (k *Value) Of(value any) label.Label { return label.OfValue(k, value) } // Tag represents a key for tagging labels that have no value. // These are used when the existence of the label is the entire information it diff --git a/internal/event/label/label.go b/internal/event/label/label.go index 0f526e1f9ab..7c00ca2a6da 100644 --- a/internal/event/label/label.go +++ b/internal/event/label/label.go @@ -32,7 +32,7 @@ type Key interface { type Label struct { key Key packed uint64 - untyped interface{} + untyped any } // Map is the interface to a collection of Labels indexed by key. @@ -76,13 +76,13 @@ type mapChain struct { // OfValue creates a new label from the key and value. // This method is for implementing new key types, label creation should // normally be done with the Of method of the key. -func OfValue(k Key, value interface{}) Label { return Label{key: k, untyped: value} } +func OfValue(k Key, value any) Label { return Label{key: k, untyped: value} } // UnpackValue assumes the label was built using LabelOfValue and returns the value // that was passed to that constructor. // This method is for implementing new key types, for type safety normal // access should be done with the From method of the key. -func (t Label) UnpackValue() interface{} { return t.untyped } +func (t Label) UnpackValue() any { return t.untyped } // Of64 creates a new label from a key and a uint64. This is often // used for non uint64 values that can be packed into a uint64. diff --git a/internal/expect/expect.go b/internal/expect/expect.go index d977ea4e262..69875cd6585 100644 --- a/internal/expect/expect.go +++ b/internal/expect/expect.go @@ -86,7 +86,7 @@ type ReadFile func(filename string) ([]byte, error) // MatchBefore returns the range of the line that matched the pattern, and // invalid positions if there was no match, or an error if the line could not be // found. -func MatchBefore(fset *token.FileSet, readFile ReadFile, end token.Pos, pattern interface{}) (token.Pos, token.Pos, error) { +func MatchBefore(fset *token.FileSet, readFile ReadFile, end token.Pos, pattern any) (token.Pos, token.Pos, error) { f := fset.File(end) content, err := readFile(f.Name()) if err != nil { diff --git a/internal/expect/expect_test.go b/internal/expect/expect_test.go index 3ad8d1a74fa..e8f8b6a7a07 100644 --- a/internal/expect/expect_test.go +++ b/internal/expect/expect_test.go @@ -155,7 +155,7 @@ func TestMarker(t *testing.T) { } } -func checkMarker(t *testing.T, fset *token.FileSet, readFile expect.ReadFile, markers map[string]token.Pos, pos token.Pos, name string, pattern interface{}) { +func checkMarker(t *testing.T, fset *token.FileSet, readFile expect.ReadFile, markers map[string]token.Pos, pos token.Pos, name string, pattern any) { start, end, err := expect.MatchBefore(fset, readFile, pos, pattern) if err != nil { t.Errorf("%v: MatchBefore failed: %v", fset.Position(pos), err) diff --git a/internal/expect/extract.go b/internal/expect/extract.go index 1fb4349c48e..150a2afbbf6 100644 --- a/internal/expect/extract.go +++ b/internal/expect/extract.go @@ -32,7 +32,7 @@ type Identifier string // See the package documentation for details about the syntax of those // notes. func Parse(fset *token.FileSet, filename string, content []byte) ([]*Note, error) { - var src interface{} + var src any if content != nil { src = content } @@ -220,7 +220,7 @@ func (t *tokens) Pos() token.Pos { return t.base + token.Pos(t.scanner.Position.Offset) } -func (t *tokens) Errorf(msg string, args ...interface{}) { +func (t *tokens) Errorf(msg string, args ...any) { if t.err != nil { return } diff --git a/internal/facts/facts.go b/internal/facts/facts.go index e1c18d373c3..8e2997e6def 100644 --- a/internal/facts/facts.go +++ b/internal/facts/facts.go @@ -209,7 +209,7 @@ func (d *Decoder) Decode(read func(pkgPath string) ([]byte, error)) (*Set, error // Facts may describe indirectly imported packages, or their objects. m := make(map[key]analysis.Fact) // one big bucket for _, imp := range d.pkg.Imports() { - logf := func(format string, args ...interface{}) { + logf := func(format string, args ...any) { if debug { prefix := fmt.Sprintf("in %s, importing %s: ", d.pkg.Path(), imp.Path()) diff --git a/internal/gcimporter/bimport.go b/internal/gcimporter/bimport.go index d79a605ed13..734c46198df 100644 --- a/internal/gcimporter/bimport.go +++ b/internal/gcimporter/bimport.go @@ -14,7 +14,7 @@ import ( "sync" ) -func errorf(format string, args ...interface{}) { +func errorf(format string, args ...any) { panic(fmt.Sprintf(format, args...)) } diff --git a/internal/gcimporter/iexport.go b/internal/gcimporter/iexport.go index 7dfc31a37d7..253d6493c21 100644 --- a/internal/gcimporter/iexport.go +++ b/internal/gcimporter/iexport.go @@ -310,7 +310,7 @@ func IImportShallow(fset *token.FileSet, getPackages GetPackagesFunc, data []byt } // ReportFunc is the type of a function used to report formatted bugs. -type ReportFunc = func(string, ...interface{}) +type ReportFunc = func(string, ...any) // Current bundled export format version. Increase with each format change. // 0: initial implementation @@ -597,7 +597,7 @@ type filePositions struct { needed []uint64 // unordered list of needed file offsets } -func (p *iexporter) trace(format string, args ...interface{}) { +func (p *iexporter) trace(format string, args ...any) { if !trace { // Call sites should also be guarded, but having this check here allows // easily enabling/disabling debug trace statements. @@ -1583,6 +1583,6 @@ func (e internalError) Error() string { return "gcimporter: " + string(e) } // "internalErrorf" as the former is used for bugs, whose cause is // internal inconsistency, whereas the latter is used for ordinary // situations like bad input, whose cause is external. -func internalErrorf(format string, args ...interface{}) error { +func internalErrorf(format string, args ...any) error { return internalError(fmt.Sprintf(format, args...)) } diff --git a/internal/gcimporter/iimport.go b/internal/gcimporter/iimport.go index 12943927159..bc6c9741e7d 100644 --- a/internal/gcimporter/iimport.go +++ b/internal/gcimporter/iimport.go @@ -400,7 +400,7 @@ type iimporter struct { indent int // for tracing support } -func (p *iimporter) trace(format string, args ...interface{}) { +func (p *iimporter) trace(format string, args ...any) { if !trace { // Call sites should also be guarded, but having this check here allows // easily enabling/disabling debug trace statements. diff --git a/internal/gopathwalk/walk.go b/internal/gopathwalk/walk.go index 8361515519f..984b79c2a07 100644 --- a/internal/gopathwalk/walk.go +++ b/internal/gopathwalk/walk.go @@ -22,7 +22,7 @@ import ( // Options controls the behavior of a Walk call. type Options struct { // If Logf is non-nil, debug logging is enabled through this function. - Logf func(format string, args ...interface{}) + Logf func(format string, args ...any) // Search module caches. Also disables legacy goimports ignore rules. ModulesEnabled bool @@ -81,7 +81,7 @@ func WalkSkip(roots []Root, add func(root Root, dir string), skip func(root Root // walkDir creates a walker and starts fastwalk with this walker. func walkDir(root Root, add func(Root, string), skip func(root Root, dir string) bool, opts Options) { if opts.Logf == nil { - opts.Logf = func(format string, args ...interface{}) {} + opts.Logf = func(format string, args ...any) {} } if _, err := os.Stat(root.Path); os.IsNotExist(err) { opts.Logf("skipping nonexistent directory: %v", root.Path) diff --git a/internal/imports/fix_test.go b/internal/imports/fix_test.go index 02ddd480dfd..478313aec7f 100644 --- a/internal/imports/fix_test.go +++ b/internal/imports/fix_test.go @@ -1680,7 +1680,7 @@ type testConfig struct { } // fm is the type for a packagestest.Module's Files, abbreviated for shorter lines. -type fm map[string]interface{} +type fm map[string]any func (c testConfig) test(t *testing.T, fn func(*goimportTest)) { t.Helper() diff --git a/internal/jsonrpc2/conn.go b/internal/jsonrpc2/conn.go index 1d76ef9726b..6e8625208d9 100644 --- a/internal/jsonrpc2/conn.go +++ b/internal/jsonrpc2/conn.go @@ -25,12 +25,12 @@ type Conn interface { // The response will be unmarshaled from JSON into the result. // The id returned will be unique from this connection, and can be used for // logging or tracking. - Call(ctx context.Context, method string, params, result interface{}) (ID, error) + Call(ctx context.Context, method string, params, result any) (ID, error) // Notify invokes the target method but does not wait for a response. // The params will be marshaled to JSON before sending over the wire, and will // be handed to the method invoked. - Notify(ctx context.Context, method string, params interface{}) error + Notify(ctx context.Context, method string, params any) error // Go starts a goroutine to handle the connection. // It must be called exactly once for each Conn. @@ -76,7 +76,7 @@ func NewConn(s Stream) Conn { return conn } -func (c *conn) Notify(ctx context.Context, method string, params interface{}) (err error) { +func (c *conn) Notify(ctx context.Context, method string, params any) (err error) { notify, err := NewNotification(method, params) if err != nil { return fmt.Errorf("marshaling notify parameters: %v", err) @@ -96,7 +96,7 @@ func (c *conn) Notify(ctx context.Context, method string, params interface{}) (e return err } -func (c *conn) Call(ctx context.Context, method string, params, result interface{}) (_ ID, err error) { +func (c *conn) Call(ctx context.Context, method string, params, result any) (_ ID, err error) { // generate a new request identifier id := ID{number: atomic.AddInt64(&c.seq, 1)} call, err := NewCall(id, method, params) @@ -153,7 +153,7 @@ func (c *conn) Call(ctx context.Context, method string, params, result interface } func (c *conn) replier(req Request, spanDone func()) Replier { - return func(ctx context.Context, result interface{}, err error) error { + return func(ctx context.Context, result any, err error) error { defer func() { recordStatus(ctx, err) spanDone() diff --git a/internal/jsonrpc2/handler.go b/internal/jsonrpc2/handler.go index 27cb108922a..317b94f8ac1 100644 --- a/internal/jsonrpc2/handler.go +++ b/internal/jsonrpc2/handler.go @@ -18,7 +18,7 @@ type Handler func(ctx context.Context, reply Replier, req Request) error // Replier is passed to handlers to allow them to reply to the request. // If err is set then result will be ignored. -type Replier func(ctx context.Context, result interface{}, err error) error +type Replier func(ctx context.Context, result any, err error) error // MethodNotFound is a Handler that replies to all call requests with the // standard method not found response. @@ -32,7 +32,7 @@ func MethodNotFound(ctx context.Context, reply Replier, req Request) error { func MustReplyHandler(handler Handler) Handler { return func(ctx context.Context, reply Replier, req Request) error { called := false - err := handler(ctx, func(ctx context.Context, result interface{}, err error) error { + err := handler(ctx, func(ctx context.Context, result any, err error) error { if called { panic(fmt.Errorf("request %q replied to more than once", req.Method())) } @@ -59,7 +59,7 @@ func CancelHandler(handler Handler) (Handler, func(id ID)) { handling[call.ID()] = cancel mu.Unlock() innerReply := reply - reply = func(ctx context.Context, result interface{}, err error) error { + reply = func(ctx context.Context, result any, err error) error { mu.Lock() delete(handling, call.ID()) mu.Unlock() @@ -92,7 +92,7 @@ func AsyncHandler(handler Handler) Handler { nextRequest = make(chan struct{}) releaser := &releaser{ch: nextRequest} innerReply := reply - reply = func(ctx context.Context, result interface{}, err error) error { + reply = func(ctx context.Context, result any, err error) error { releaser.release(true) return innerReply(ctx, result, err) } diff --git a/internal/jsonrpc2/jsonrpc2_test.go b/internal/jsonrpc2/jsonrpc2_test.go index f62977edfce..b7688bc2334 100644 --- a/internal/jsonrpc2/jsonrpc2_test.go +++ b/internal/jsonrpc2/jsonrpc2_test.go @@ -23,8 +23,8 @@ var logRPC = flag.Bool("logrpc", false, "Enable jsonrpc2 communication logging") type callTest struct { method string - params interface{} - expect interface{} + params any + expect any } var callTests = []callTest{ @@ -35,10 +35,10 @@ var callTests = []callTest{ //TODO: expand the test cases } -func (test *callTest) newResults() interface{} { +func (test *callTest) newResults() any { switch e := test.expect.(type) { - case []interface{}: - var r []interface{} + case []any: + var r []any for _, v := range e { r = append(r, reflect.New(reflect.TypeOf(v)).Interface()) } @@ -50,7 +50,7 @@ func (test *callTest) newResults() interface{} { } } -func (test *callTest) verifyResults(t *testing.T, results interface{}) { +func (test *callTest) verifyResults(t *testing.T, results any) { if results == nil { return } diff --git a/internal/jsonrpc2/messages.go b/internal/jsonrpc2/messages.go index e87d772f398..5078b88f4ae 100644 --- a/internal/jsonrpc2/messages.go +++ b/internal/jsonrpc2/messages.go @@ -65,7 +65,7 @@ type Response struct { // NewNotification constructs a new Notification message for the supplied // method and parameters. -func NewNotification(method string, params interface{}) (*Notification, error) { +func NewNotification(method string, params any) (*Notification, error) { p, merr := marshalToRaw(params) return &Notification{method: method, params: p}, merr } @@ -98,7 +98,7 @@ func (n *Notification) UnmarshalJSON(data []byte) error { // NewCall constructs a new Call message for the supplied ID, method and // parameters. -func NewCall(id ID, method string, params interface{}) (*Call, error) { +func NewCall(id ID, method string, params any) (*Call, error) { p, merr := marshalToRaw(params) return &Call{id: id, method: method, params: p}, merr } @@ -135,7 +135,7 @@ func (c *Call) UnmarshalJSON(data []byte) error { // NewResponse constructs a new Response message that is a reply to the // supplied. If err is set result may be ignored. -func NewResponse(id ID, result interface{}, err error) (*Response, error) { +func NewResponse(id ID, result any, err error) (*Response, error) { r, merr := marshalToRaw(result) return &Response{id: id, result: r, err: err}, merr } @@ -229,7 +229,7 @@ func DecodeMessage(data []byte) (Message, error) { return call, nil } -func marshalToRaw(obj interface{}) (json.RawMessage, error) { +func marshalToRaw(obj any) (json.RawMessage, error) { data, err := json.Marshal(obj) if err != nil { return json.RawMessage{}, err diff --git a/internal/jsonrpc2_v2/conn.go b/internal/jsonrpc2_v2/conn.go index df885bfa4c3..4c52a1fd34b 100644 --- a/internal/jsonrpc2_v2/conn.go +++ b/internal/jsonrpc2_v2/conn.go @@ -260,7 +260,7 @@ func newConnection(bindCtx context.Context, rwc io.ReadWriteCloser, binder Binde // Notify invokes the target method but does not wait for a response. // The params will be marshaled to JSON before sending over the wire, and will // be handed to the method invoked. -func (c *Connection) Notify(ctx context.Context, method string, params interface{}) (err error) { +func (c *Connection) Notify(ctx context.Context, method string, params any) (err error) { ctx, done := event.Start(ctx, method, jsonrpc2.Method.Of(method), jsonrpc2.RPCDirection.Of(jsonrpc2.Outbound), @@ -309,7 +309,7 @@ func (c *Connection) Notify(ctx context.Context, method string, params interface // be handed to the method invoked. // You do not have to wait for the response, it can just be ignored if not needed. // If sending the call failed, the response will be ready and have the error in it. -func (c *Connection) Call(ctx context.Context, method string, params interface{}) *AsyncCall { +func (c *Connection) Call(ctx context.Context, method string, params any) *AsyncCall { // Generate a new request identifier. id := Int64ID(atomic.AddInt64(&c.seq, 1)) ctx, endSpan := event.Start(ctx, method, @@ -410,7 +410,7 @@ func (ac *AsyncCall) retire(response *Response) { // Await waits for (and decodes) the results of a Call. // The response will be unmarshaled from JSON into the result. -func (ac *AsyncCall) Await(ctx context.Context, result interface{}) error { +func (ac *AsyncCall) Await(ctx context.Context, result any) error { select { case <-ctx.Done(): return ctx.Err() @@ -429,7 +429,7 @@ func (ac *AsyncCall) Await(ctx context.Context, result interface{}) error { // // Respond must be called exactly once for any message for which a handler // returns ErrAsyncResponse. It must not be called for any other message. -func (c *Connection) Respond(id ID, result interface{}, err error) error { +func (c *Connection) Respond(id ID, result any, err error) error { var req *incomingRequest c.updateInFlight(func(s *inFlightState) { req = s.incomingByID[id] @@ -678,7 +678,7 @@ func (c *Connection) handleAsync() { } // processResult processes the result of a request and, if appropriate, sends a response. -func (c *Connection) processResult(from interface{}, req *incomingRequest, result interface{}, err error) error { +func (c *Connection) processResult(from any, req *incomingRequest, result any, err error) error { switch err { case ErrAsyncResponse: if !req.IsCall() { @@ -781,7 +781,7 @@ func (c *Connection) write(ctx context.Context, msg Message) error { // internalErrorf reports an internal error. By default it panics, but if // c.onInternalError is non-nil it instead calls that and returns an error // wrapping ErrInternal. -func (c *Connection) internalErrorf(format string, args ...interface{}) error { +func (c *Connection) internalErrorf(format string, args ...any) error { err := fmt.Errorf(format, args...) if c.onInternalError == nil { panic("jsonrpc2: " + err.Error()) @@ -803,7 +803,7 @@ func labelStatus(ctx context.Context, err error) { // notDone is a context.Context wrapper that returns a nil Done channel. type notDone struct{ ctx context.Context } -func (ic notDone) Value(key interface{}) interface{} { +func (ic notDone) Value(key any) any { return ic.ctx.Value(key) } diff --git a/internal/jsonrpc2_v2/jsonrpc2.go b/internal/jsonrpc2_v2/jsonrpc2.go index 9d775de0603..270f4f341d8 100644 --- a/internal/jsonrpc2_v2/jsonrpc2.go +++ b/internal/jsonrpc2_v2/jsonrpc2.go @@ -44,13 +44,13 @@ type Preempter interface { // Otherwise, the result and error are processed as if returned by Handle. // // Preempt must not block. (The Context passed to it is for Values only.) - Preempt(ctx context.Context, req *Request) (result interface{}, err error) + Preempt(ctx context.Context, req *Request) (result any, err error) } // A PreempterFunc implements the Preempter interface for a standalone Preempt function. -type PreempterFunc func(ctx context.Context, req *Request) (interface{}, error) +type PreempterFunc func(ctx context.Context, req *Request) (any, error) -func (f PreempterFunc) Preempt(ctx context.Context, req *Request) (interface{}, error) { +func (f PreempterFunc) Preempt(ctx context.Context, req *Request) (any, error) { return f(ctx, req) } @@ -71,23 +71,23 @@ type Handler interface { // connection is broken or the request is canceled or completed. // (If Handle returns ErrAsyncResponse, ctx will remain uncanceled // until either Cancel or Respond is called for the request's ID.) - Handle(ctx context.Context, req *Request) (result interface{}, err error) + Handle(ctx context.Context, req *Request) (result any, err error) } type defaultHandler struct{} -func (defaultHandler) Preempt(context.Context, *Request) (interface{}, error) { +func (defaultHandler) Preempt(context.Context, *Request) (any, error) { return nil, ErrNotHandled } -func (defaultHandler) Handle(context.Context, *Request) (interface{}, error) { +func (defaultHandler) Handle(context.Context, *Request) (any, error) { return nil, ErrNotHandled } // A HandlerFunc implements the Handler interface for a standalone Handle function. -type HandlerFunc func(ctx context.Context, req *Request) (interface{}, error) +type HandlerFunc func(ctx context.Context, req *Request) (any, error) -func (f HandlerFunc) Handle(ctx context.Context, req *Request) (interface{}, error) { +func (f HandlerFunc) Handle(ctx context.Context, req *Request) (any, error) { return f(ctx, req) } diff --git a/internal/jsonrpc2_v2/jsonrpc2_test.go b/internal/jsonrpc2_v2/jsonrpc2_test.go index d75a20739e8..e42f63736c0 100644 --- a/internal/jsonrpc2_v2/jsonrpc2_test.go +++ b/internal/jsonrpc2_v2/jsonrpc2_test.go @@ -87,24 +87,24 @@ type invoker interface { type notify struct { method string - params interface{} + params any } type call struct { method string - params interface{} - expect interface{} + params any + expect any } type async struct { name string method string - params interface{} + params any } type collect struct { name string - expect interface{} + expect any fails bool } @@ -180,7 +180,7 @@ func (test call) Invoke(t *testing.T, ctx context.Context, h *handler) { func (test echo) Invoke(t *testing.T, ctx context.Context, h *handler) { results := newResults(test.expect) - if err := h.conn.Call(ctx, "echo", []interface{}{test.method, test.params}).Await(ctx, results); err != nil { + if err := h.conn.Call(ctx, "echo", []any{test.method, test.params}).Await(ctx, results); err != nil { t.Fatalf("%v:Echo failed: %v", test.method, err) } verifyResults(t, test.method, results, test.expect) @@ -221,10 +221,10 @@ func (test sequence) Invoke(t *testing.T, ctx context.Context, h *handler) { } // newResults makes a new empty copy of the expected type to put the results into -func newResults(expect interface{}) interface{} { +func newResults(expect any) any { switch e := expect.(type) { - case []interface{}: - var r []interface{} + case []any: + var r []any for _, v := range e { r = append(r, reflect.New(reflect.TypeOf(v)).Interface()) } @@ -237,7 +237,7 @@ func newResults(expect interface{}) interface{} { } // verifyResults compares the results to the expected values -func verifyResults(t *testing.T, method string, results interface{}, expect interface{}) { +func verifyResults(t *testing.T, method string, results any, expect any) { if expect == nil { if results != nil { t.Errorf("%v:Got results %+v where none expected", method, expect) @@ -278,7 +278,7 @@ func (h *handler) waiter(name string) chan struct{} { return waiter } -func (h *handler) Preempt(ctx context.Context, req *jsonrpc2.Request) (interface{}, error) { +func (h *handler) Preempt(ctx context.Context, req *jsonrpc2.Request) (any, error) { switch req.Method { case "unblock": var name string @@ -304,7 +304,7 @@ func (h *handler) Preempt(ctx context.Context, req *jsonrpc2.Request) (interface } } -func (h *handler) Handle(ctx context.Context, req *jsonrpc2.Request) (interface{}, error) { +func (h *handler) Handle(ctx context.Context, req *jsonrpc2.Request) (any, error) { switch req.Method { case "no_args": if len(req.Params) > 0 { @@ -349,11 +349,11 @@ func (h *handler) Handle(ctx context.Context, req *jsonrpc2.Request) (interface{ } return path.Join(v...), nil case "echo": - var v []interface{} + var v []any if err := json.Unmarshal(req.Params, &v); err != nil { return nil, fmt.Errorf("%w: %s", jsonrpc2.ErrParse, err) } - var result interface{} + var result any err := h.conn.Call(ctx, v[0].(string), v[1]).Await(ctx, &result) return result, err case "wait": diff --git a/internal/jsonrpc2_v2/messages.go b/internal/jsonrpc2_v2/messages.go index f02b879c3f2..9cfe6e70fe5 100644 --- a/internal/jsonrpc2_v2/messages.go +++ b/internal/jsonrpc2_v2/messages.go @@ -12,7 +12,7 @@ import ( // ID is a Request identifier. type ID struct { - value interface{} + value any } // Message is the interface to all jsonrpc2 message types. @@ -59,18 +59,18 @@ func Int64ID(i int64) ID { return ID{value: i} } func (id ID) IsValid() bool { return id.value != nil } // Raw returns the underlying value of the ID. -func (id ID) Raw() interface{} { return id.value } +func (id ID) Raw() any { return id.value } // NewNotification constructs a new Notification message for the supplied // method and parameters. -func NewNotification(method string, params interface{}) (*Request, error) { +func NewNotification(method string, params any) (*Request, error) { p, merr := marshalToRaw(params) return &Request{Method: method, Params: p}, merr } // NewCall constructs a new Call message for the supplied ID, method and // parameters. -func NewCall(id ID, method string, params interface{}) (*Request, error) { +func NewCall(id ID, method string, params any) (*Request, error) { p, merr := marshalToRaw(params) return &Request{ID: id, Method: method, Params: p}, merr } @@ -85,7 +85,7 @@ func (msg *Request) marshal(to *wireCombined) { // NewResponse constructs a new Response message that is a reply to the // supplied. If err is set result may be ignored. -func NewResponse(id ID, result interface{}, rerr error) (*Response, error) { +func NewResponse(id ID, result any, rerr error) (*Response, error) { r, merr := marshalToRaw(result) return &Response{ID: id, Result: r, Error: rerr}, merr } @@ -169,7 +169,7 @@ func DecodeMessage(data []byte) (Message, error) { return resp, nil } -func marshalToRaw(obj interface{}) (json.RawMessage, error) { +func marshalToRaw(obj any) (json.RawMessage, error) { if obj == nil { return nil, nil } diff --git a/internal/jsonrpc2_v2/serve_test.go b/internal/jsonrpc2_v2/serve_test.go index c5c41e201cd..8eb572c9d01 100644 --- a/internal/jsonrpc2_v2/serve_test.go +++ b/internal/jsonrpc2_v2/serve_test.go @@ -148,7 +148,7 @@ type msg struct { type fakeHandler struct{} -func (fakeHandler) Handle(ctx context.Context, req *jsonrpc2.Request) (interface{}, error) { +func (fakeHandler) Handle(ctx context.Context, req *jsonrpc2.Request) (any, error) { switch req.Method { case "ping": return &msg{"pong"}, nil @@ -296,7 +296,7 @@ func TestCloseCallRace(t *testing.T) { pokec := make(chan *jsonrpc2.AsyncCall, 1) s := jsonrpc2.NewServer(ctx, listener, jsonrpc2.BinderFunc(func(_ context.Context, srvConn *jsonrpc2.Connection) jsonrpc2.ConnectionOptions { - h := jsonrpc2.HandlerFunc(func(ctx context.Context, _ *jsonrpc2.Request) (interface{}, error) { + h := jsonrpc2.HandlerFunc(func(ctx context.Context, _ *jsonrpc2.Request) (any, error) { // Start a concurrent call from the server to the client. // The point of this test is to ensure this doesn't deadlock // if the client shuts down the connection concurrently. diff --git a/internal/jsonrpc2_v2/wire.go b/internal/jsonrpc2_v2/wire.go index 8f60fc62766..bc56951b5c3 100644 --- a/internal/jsonrpc2_v2/wire.go +++ b/internal/jsonrpc2_v2/wire.go @@ -45,7 +45,7 @@ const wireVersion = "2.0" // We can decode this and then work out which it is. type wireCombined struct { VersionTag string `json:"jsonrpc"` - ID interface{} `json:"id,omitempty"` + ID any `json:"id,omitempty"` Method string `json:"method,omitempty"` Params json.RawMessage `json:"params,omitempty"` Result json.RawMessage `json:"result,omitempty"` diff --git a/internal/jsonrpc2_v2/wire_test.go b/internal/jsonrpc2_v2/wire_test.go index e9337373239..c155c92f287 100644 --- a/internal/jsonrpc2_v2/wire_test.go +++ b/internal/jsonrpc2_v2/wire_test.go @@ -63,7 +63,7 @@ func TestWireMessage(t *testing.T) { } } -func newNotification(method string, params interface{}) jsonrpc2.Message { +func newNotification(method string, params any) jsonrpc2.Message { msg, err := jsonrpc2.NewNotification(method, params) if err != nil { panic(err) @@ -71,7 +71,7 @@ func newNotification(method string, params interface{}) jsonrpc2.Message { return msg } -func newID(id interface{}) jsonrpc2.ID { +func newID(id any) jsonrpc2.ID { switch v := id.(type) { case nil: return jsonrpc2.ID{} @@ -86,7 +86,7 @@ func newID(id interface{}) jsonrpc2.ID { } } -func newCall(id interface{}, method string, params interface{}) jsonrpc2.Message { +func newCall(id any, method string, params any) jsonrpc2.Message { msg, err := jsonrpc2.NewCall(newID(id), method, params) if err != nil { panic(err) @@ -94,7 +94,7 @@ func newCall(id interface{}, method string, params interface{}) jsonrpc2.Message return msg } -func newResponse(id interface{}, result interface{}, rerr error) jsonrpc2.Message { +func newResponse(id any, result any, rerr error) jsonrpc2.Message { msg, err := jsonrpc2.NewResponse(newID(id), result, rerr) if err != nil { panic(err) diff --git a/internal/memoize/memoize.go b/internal/memoize/memoize.go index e56af3bb45b..e49942a8827 100644 --- a/internal/memoize/memoize.go +++ b/internal/memoize/memoize.go @@ -42,7 +42,7 @@ import ( // The main purpose of the argument is to avoid the Function closure // needing to retain large objects (in practice: the snapshot) in // memory that can be supplied at call time by any caller. -type Function func(ctx context.Context, arg interface{}) interface{} +type Function func(ctx context.Context, arg any) any // A RefCounted is a value whose functional lifetime is determined by // reference counting. @@ -94,7 +94,7 @@ type Promise struct { // the function that will be used to populate the value function Function // value is set in completed state. - value interface{} + value any } // NewPromise returns a promise for the future result of calling the @@ -124,7 +124,7 @@ const ( // // It will never cause the value to be generated. // It will return the cached value, if present. -func (p *Promise) Cached() interface{} { +func (p *Promise) Cached() any { p.mu.Lock() defer p.mu.Unlock() if p.state == stateCompleted { @@ -144,7 +144,7 @@ func (p *Promise) Cached() interface{} { // If all concurrent calls to Get are cancelled, the context provided // to the function is cancelled. A later call to Get may attempt to // call the function again. -func (p *Promise) Get(ctx context.Context, arg interface{}) (interface{}, error) { +func (p *Promise) Get(ctx context.Context, arg any) (any, error) { if ctx.Err() != nil { return nil, ctx.Err() } @@ -163,7 +163,7 @@ func (p *Promise) Get(ctx context.Context, arg interface{}) (interface{}, error) } // run starts p.function and returns the result. p.mu must be locked. -func (p *Promise) run(ctx context.Context, arg interface{}) (interface{}, error) { +func (p *Promise) run(ctx context.Context, arg any) (any, error) { childCtx, cancel := context.WithCancel(xcontext.Detach(ctx)) p.cancel = cancel p.state = stateRunning @@ -210,7 +210,7 @@ func (p *Promise) run(ctx context.Context, arg interface{}) (interface{}, error) } // wait waits for the value to be computed, or ctx to be cancelled. p.mu must be locked. -func (p *Promise) wait(ctx context.Context) (interface{}, error) { +func (p *Promise) wait(ctx context.Context) (any, error) { p.waiters++ done := p.done p.mu.Unlock() @@ -258,7 +258,7 @@ type Store struct { evictionPolicy EvictionPolicy promisesMu sync.Mutex - promises map[interface{}]*Promise + promises map[any]*Promise } // NewStore creates a new store with the given eviction policy. @@ -276,13 +276,13 @@ func NewStore(policy EvictionPolicy) *Store { // // Once the last reference has been released, the promise is removed from the // store. -func (store *Store) Promise(key interface{}, function Function) (*Promise, func()) { +func (store *Store) Promise(key any, function Function) (*Promise, func()) { store.promisesMu.Lock() p, ok := store.promises[key] if !ok { p = NewPromise(reflect.TypeOf(key).String(), function) if store.promises == nil { - store.promises = map[interface{}]*Promise{} + store.promises = map[any]*Promise{} } store.promises[key] = p } @@ -323,7 +323,7 @@ func (s *Store) Stats() map[reflect.Type]int { // DebugOnlyIterate iterates through the store and, for each completed // promise, calls f(k, v) for the map key k and function result v. It // should only be used for debugging purposes. -func (s *Store) DebugOnlyIterate(f func(k, v interface{})) { +func (s *Store) DebugOnlyIterate(f func(k, v any)) { s.promisesMu.Lock() defer s.promisesMu.Unlock() diff --git a/internal/memoize/memoize_test.go b/internal/memoize/memoize_test.go index c54572d59ca..08b097eb081 100644 --- a/internal/memoize/memoize_test.go +++ b/internal/memoize/memoize_test.go @@ -18,7 +18,7 @@ func TestGet(t *testing.T) { evaled := 0 - h, release := store.Promise("key", func(context.Context, interface{}) interface{} { + h, release := store.Promise("key", func(context.Context, any) any { evaled++ return "res" }) @@ -30,7 +30,7 @@ func TestGet(t *testing.T) { } } -func expectGet(t *testing.T, h *memoize.Promise, wantV interface{}) { +func expectGet(t *testing.T, h *memoize.Promise, wantV any) { t.Helper() gotV, gotErr := h.Get(context.Background(), nil) if gotV != wantV || gotErr != nil { @@ -40,7 +40,7 @@ func expectGet(t *testing.T, h *memoize.Promise, wantV interface{}) { func TestNewPromise(t *testing.T) { calls := 0 - f := func(context.Context, interface{}) interface{} { + f := func(context.Context, any) any { calls++ return calls } @@ -63,10 +63,10 @@ func TestStoredPromiseRefCounting(t *testing.T) { var store memoize.Store v1 := false v2 := false - p1, release1 := store.Promise("key1", func(context.Context, interface{}) interface{} { + p1, release1 := store.Promise("key1", func(context.Context, any) any { return &v1 }) - p2, release2 := store.Promise("key2", func(context.Context, interface{}) interface{} { + p2, release2 := store.Promise("key2", func(context.Context, any) any { return &v2 }) expectGet(t, p1, &v1) @@ -75,7 +75,7 @@ func TestStoredPromiseRefCounting(t *testing.T) { expectGet(t, p1, &v1) expectGet(t, p2, &v2) - p2Copy, release2Copy := store.Promise("key2", func(context.Context, interface{}) interface{} { + p2Copy, release2Copy := store.Promise("key2", func(context.Context, any) any { return &v1 }) if p2 != p2Copy { @@ -93,7 +93,7 @@ func TestStoredPromiseRefCounting(t *testing.T) { } release1() - p2Copy, release2Copy = store.Promise("key2", func(context.Context, interface{}) interface{} { + p2Copy, release2Copy = store.Promise("key2", func(context.Context, any) any { return &v2 }) if p2 == p2Copy { @@ -109,7 +109,7 @@ func TestPromiseDestroyedWhileRunning(t *testing.T) { c := make(chan int) var v int - h, release := store.Promise("key", func(ctx context.Context, _ interface{}) interface{} { + h, release := store.Promise("key", func(ctx context.Context, _ any) any { <-c <-c if err := ctx.Err(); err != nil { @@ -123,7 +123,7 @@ func TestPromiseDestroyedWhileRunning(t *testing.T) { var wg sync.WaitGroup wg.Add(1) - var got interface{} + var got any var err error go func() { got, err = h.Get(ctx, nil) @@ -146,7 +146,7 @@ func TestPromiseDestroyedWhileRunning(t *testing.T) { func TestDoubleReleasePanics(t *testing.T) { var store memoize.Store - _, release := store.Promise("key", func(ctx context.Context, _ interface{}) interface{} { return 0 }) + _, release := store.Promise("key", func(ctx context.Context, _ any) any { return 0 }) panicked := false diff --git a/internal/packagesinternal/packages.go b/internal/packagesinternal/packages.go index 784605914e0..25ebab663ba 100644 --- a/internal/packagesinternal/packages.go +++ b/internal/packagesinternal/packages.go @@ -17,4 +17,4 @@ var TypecheckCgo int var DepsErrors int // must be set as a LoadMode to call GetDepsErrors var SetModFlag = func(config any, value string) {} -var SetModFile = func(config interface{}, value string) {} +var SetModFile = func(config any, value string) {} diff --git a/internal/packagestest/expect.go b/internal/packagestest/expect.go index e3e3509579d..a5f76f55686 100644 --- a/internal/packagestest/expect.go +++ b/internal/packagestest/expect.go @@ -72,7 +72,7 @@ const ( // // It is safe to call this repeatedly with different method sets, but it is // not safe to call it concurrently. -func (e *Exported) Expect(methods map[string]interface{}) error { +func (e *Exported) Expect(methods map[string]any) error { if err := e.getNotes(); err != nil { return err } @@ -98,7 +98,7 @@ func (e *Exported) Expect(methods map[string]interface{}) error { n = &expect.Note{ Pos: n.Pos, Name: markMethod, - Args: []interface{}{n.Name, n.Name}, + Args: []any{n.Name, n.Name}, } } mi, ok := ms[n.Name] @@ -222,7 +222,7 @@ func (e *Exported) getMarkers() error { } // set markers early so that we don't call getMarkers again from Expect e.markers = make(map[string]Range) - return e.Expect(map[string]interface{}{ + return e.Expect(map[string]any{ markMethod: e.Mark, }) } @@ -243,7 +243,7 @@ var ( // It takes the args remaining, and returns the args it did not consume. // This allows a converter to consume 0 args for well known types, or multiple // args for compound types. -type converter func(*expect.Note, []interface{}) (reflect.Value, []interface{}, error) +type converter func(*expect.Note, []any) (reflect.Value, []any, error) // method is used to track information about Invoke methods that is expensive to // calculate so that we can work it out once rather than per marker. @@ -259,19 +259,19 @@ type method struct { func (e *Exported) buildConverter(pt reflect.Type) (converter, error) { switch { case pt == noteType: - return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + return func(n *expect.Note, args []any) (reflect.Value, []any, error) { return reflect.ValueOf(n), args, nil }, nil case pt == fsetType: - return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + return func(n *expect.Note, args []any) (reflect.Value, []any, error) { return reflect.ValueOf(e.ExpectFileSet), args, nil }, nil case pt == exportedType: - return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + return func(n *expect.Note, args []any) (reflect.Value, []any, error) { return reflect.ValueOf(e), args, nil }, nil case pt == posType: - return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + return func(n *expect.Note, args []any) (reflect.Value, []any, error) { r, remains, err := e.rangeConverter(n, args) if err != nil { return reflect.Value{}, nil, err @@ -279,7 +279,7 @@ func (e *Exported) buildConverter(pt reflect.Type) (converter, error) { return reflect.ValueOf(r.Start), remains, nil }, nil case pt == positionType: - return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + return func(n *expect.Note, args []any) (reflect.Value, []any, error) { r, remains, err := e.rangeConverter(n, args) if err != nil { return reflect.Value{}, nil, err @@ -287,7 +287,7 @@ func (e *Exported) buildConverter(pt reflect.Type) (converter, error) { return reflect.ValueOf(e.ExpectFileSet.Position(r.Start)), remains, nil }, nil case pt == rangeType: - return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + return func(n *expect.Note, args []any) (reflect.Value, []any, error) { r, remains, err := e.rangeConverter(n, args) if err != nil { return reflect.Value{}, nil, err @@ -295,7 +295,7 @@ func (e *Exported) buildConverter(pt reflect.Type) (converter, error) { return reflect.ValueOf(r), remains, nil }, nil case pt == identifierType: - return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + return func(n *expect.Note, args []any) (reflect.Value, []any, error) { if len(args) < 1 { return reflect.Value{}, nil, fmt.Errorf("missing argument") } @@ -310,7 +310,7 @@ func (e *Exported) buildConverter(pt reflect.Type) (converter, error) { }, nil case pt == regexType: - return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + return func(n *expect.Note, args []any) (reflect.Value, []any, error) { if len(args) < 1 { return reflect.Value{}, nil, fmt.Errorf("missing argument") } @@ -323,7 +323,7 @@ func (e *Exported) buildConverter(pt reflect.Type) (converter, error) { }, nil case pt.Kind() == reflect.String: - return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + return func(n *expect.Note, args []any) (reflect.Value, []any, error) { if len(args) < 1 { return reflect.Value{}, nil, fmt.Errorf("missing argument") } @@ -339,7 +339,7 @@ func (e *Exported) buildConverter(pt reflect.Type) (converter, error) { } }, nil case pt.Kind() == reflect.Int64: - return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + return func(n *expect.Note, args []any) (reflect.Value, []any, error) { if len(args) < 1 { return reflect.Value{}, nil, fmt.Errorf("missing argument") } @@ -353,7 +353,7 @@ func (e *Exported) buildConverter(pt reflect.Type) (converter, error) { } }, nil case pt.Kind() == reflect.Bool: - return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + return func(n *expect.Note, args []any) (reflect.Value, []any, error) { if len(args) < 1 { return reflect.Value{}, nil, fmt.Errorf("missing argument") } @@ -366,7 +366,7 @@ func (e *Exported) buildConverter(pt reflect.Type) (converter, error) { return reflect.ValueOf(b), args, nil }, nil case pt.Kind() == reflect.Slice: - return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + return func(n *expect.Note, args []any) (reflect.Value, []any, error) { converter, err := e.buildConverter(pt.Elem()) if err != nil { return reflect.Value{}, nil, err @@ -384,7 +384,7 @@ func (e *Exported) buildConverter(pt reflect.Type) (converter, error) { }, nil default: if pt.Kind() == reflect.Interface && pt.NumMethod() == 0 { - return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + return func(n *expect.Note, args []any) (reflect.Value, []any, error) { if len(args) < 1 { return reflect.Value{}, nil, fmt.Errorf("missing argument") } @@ -395,7 +395,7 @@ func (e *Exported) buildConverter(pt reflect.Type) (converter, error) { } } -func (e *Exported) rangeConverter(n *expect.Note, args []interface{}) (Range, []interface{}, error) { +func (e *Exported) rangeConverter(n *expect.Note, args []any) (Range, []any, error) { tokFile := e.ExpectFileSet.File(n.Pos) if len(args) < 1 { return Range{}, nil, fmt.Errorf("missing argument") diff --git a/internal/packagestest/expect_test.go b/internal/packagestest/expect_test.go index d155f5fe9e2..4f148b4183e 100644 --- a/internal/packagestest/expect_test.go +++ b/internal/packagestest/expect_test.go @@ -19,7 +19,7 @@ func TestExpect(t *testing.T) { }}) defer exported.Cleanup() checkCount := 0 - if err := exported.Expect(map[string]interface{}{ + if err := exported.Expect(map[string]any{ "check": func(src, target token.Position) { checkCount++ }, diff --git a/internal/packagestest/export.go b/internal/packagestest/export.go index f8d10718c09..ce992e17a90 100644 --- a/internal/packagestest/export.go +++ b/internal/packagestest/export.go @@ -97,7 +97,7 @@ type Module struct { // The keys are the file fragment that follows the module name, the value can // be a string or byte slice, in which case it is the contents of the // file, otherwise it must be a Writer function. - Files map[string]interface{} + Files map[string]any // Overlay is the set of source file overlays for the module. // The keys are the file fragment as in the Files configuration. @@ -479,7 +479,7 @@ func GroupFilesByModules(root string) ([]Module, error) { primarymod := &Module{ Name: root, - Files: make(map[string]interface{}), + Files: make(map[string]any), Overlay: make(map[string][]byte), } mods := map[string]*Module{ @@ -569,7 +569,7 @@ func GroupFilesByModules(root string) ([]Module, error) { } mods[path] = &Module{ Name: filepath.ToSlash(module), - Files: make(map[string]interface{}), + Files: make(map[string]any), Overlay: make(map[string][]byte), } currentModule = path @@ -587,8 +587,8 @@ func GroupFilesByModules(root string) ([]Module, error) { // This is to enable the common case in tests where you have a full copy of the // package in your testdata. // This will panic if there is any kind of error trying to walk the file tree. -func MustCopyFileTree(root string) map[string]interface{} { - result := map[string]interface{}{} +func MustCopyFileTree(root string) map[string]any { + result := map[string]any{} if err := filepath.Walk(filepath.FromSlash(root), func(path string, info os.FileInfo, err error) error { if err != nil { return err diff --git a/internal/packagestest/export_test.go b/internal/packagestest/export_test.go index 6c074216fbe..fae8bd2d5ba 100644 --- a/internal/packagestest/export_test.go +++ b/internal/packagestest/export_test.go @@ -16,7 +16,7 @@ import ( var testdata = []packagestest.Module{{ Name: "golang.org/fake1", - Files: map[string]interface{}{ + Files: map[string]any{ "a.go": packagestest.Symlink("testdata/a.go"), // broken symlink "b.go": "invalid file contents", }, @@ -26,22 +26,22 @@ var testdata = []packagestest.Module{{ }, }, { Name: "golang.org/fake2", - Files: map[string]interface{}{ + Files: map[string]any{ "other/a.go": "package fake2", }, }, { Name: "golang.org/fake2/v2", - Files: map[string]interface{}{ + Files: map[string]any{ "other/a.go": "package fake2", }, }, { Name: "golang.org/fake3@v1.0.0", - Files: map[string]interface{}{ + Files: map[string]any{ "other/a.go": "package fake3", }, }, { Name: "golang.org/fake3@v1.1.0", - Files: map[string]interface{}{ + Files: map[string]any{ "other/a.go": "package fake3", }, }} @@ -97,13 +97,13 @@ func TestGroupFilesByModules(t *testing.T) { want: []packagestest.Module{ { Name: "testdata/groups/one", - Files: map[string]interface{}{ + Files: map[string]any{ "main.go": true, }, }, { Name: "example.com/extra", - Files: map[string]interface{}{ + Files: map[string]any{ "help.go": true, }, }, @@ -114,7 +114,7 @@ func TestGroupFilesByModules(t *testing.T) { want: []packagestest.Module{ { Name: "testdata/groups/two", - Files: map[string]interface{}{ + Files: map[string]any{ "main.go": true, "expect/yo.go": true, "expect/yo_test.go": true, @@ -122,33 +122,33 @@ func TestGroupFilesByModules(t *testing.T) { }, { Name: "example.com/extra", - Files: map[string]interface{}{ + Files: map[string]any{ "yo.go": true, "geez/help.go": true, }, }, { Name: "example.com/extra/v2", - Files: map[string]interface{}{ + Files: map[string]any{ "me.go": true, "geez/help.go": true, }, }, { Name: "example.com/tempmod", - Files: map[string]interface{}{ + Files: map[string]any{ "main.go": true, }, }, { Name: "example.com/what@v1.0.0", - Files: map[string]interface{}{ + Files: map[string]any{ "main.go": true, }, }, { Name: "example.com/what@v1.1.0", - Files: map[string]interface{}{ + Files: map[string]any{ "main.go": true, }, }, diff --git a/internal/tool/tool.go b/internal/tool/tool.go index fe2b1c289b8..6420c9667d9 100644 --- a/internal/tool/tool.go +++ b/internal/tool/tool.go @@ -81,7 +81,7 @@ func (e commandLineError) Error() string { return string(e) } // CommandLineErrorf is like fmt.Errorf except that it returns a value that // triggers printing of the command line help. // In general you should use this when generating command line validation errors. -func CommandLineErrorf(message string, args ...interface{}) error { +func CommandLineErrorf(message string, args ...any) error { return commandLineError(fmt.Sprintf(message, args...)) } diff --git a/internal/typeparams/normalize.go b/internal/typeparams/normalize.go index 93c80fdc96c..f49802b8ef7 100644 --- a/internal/typeparams/normalize.go +++ b/internal/typeparams/normalize.go @@ -120,7 +120,7 @@ type termSet struct { terms termlist } -func indentf(depth int, format string, args ...interface{}) { +func indentf(depth int, format string, args ...any) { fmt.Fprintf(os.Stderr, strings.Repeat(".", depth)+format+"\n", args...) } diff --git a/internal/xcontext/xcontext.go b/internal/xcontext/xcontext.go index ff8ed4ebb95..641dfe5a102 100644 --- a/internal/xcontext/xcontext.go +++ b/internal/xcontext/xcontext.go @@ -17,7 +17,7 @@ func Detach(ctx context.Context) context.Context { return detachedContext{ctx} } type detachedContext struct{ parent context.Context } -func (v detachedContext) Deadline() (time.Time, bool) { return time.Time{}, false } -func (v detachedContext) Done() <-chan struct{} { return nil } -func (v detachedContext) Err() error { return nil } -func (v detachedContext) Value(key interface{}) interface{} { return v.parent.Value(key) } +func (v detachedContext) Deadline() (time.Time, bool) { return time.Time{}, false } +func (v detachedContext) Done() <-chan struct{} { return nil } +func (v detachedContext) Err() error { return nil } +func (v detachedContext) Value(key any) any { return v.parent.Value(key) } From 8a39d47f70846bf278b4bfb793f04b76e478e37b Mon Sep 17 00:00:00 2001 From: Viktor Blomqvist Date: Mon, 13 Jan 2025 20:25:31 +0100 Subject: [PATCH 044/270] gopls/internal/golang: Add "Eliminate dot import" code action. The code action will qualify identifiers if possible. If there are names in scope which will shadow the package name then the code action fails. Updates golang/go#70319. Change-Id: I7c1ff1c60d592cb6f1093ab653c04a44d7092607 Reviewed-on: https://go-review.googlesource.com/c/tools/+/642016 Auto-Submit: Robert Findley Reviewed-by: Robert Findley Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- gopls/doc/features/transformation.md | 9 ++ gopls/doc/release/v0.19.0.md | 7 ++ gopls/internal/golang/codeaction.go | 100 ++++++++++++++++++ gopls/internal/settings/codeactionkind.go | 19 ++-- .../codeaction/eliminate_dot_import.txt | 40 +++++++ 5 files changed, 166 insertions(+), 9 deletions(-) create mode 100644 gopls/doc/release/v0.19.0.md create mode 100644 gopls/internal/test/marker/testdata/codeaction/eliminate_dot_import.txt diff --git a/gopls/doc/features/transformation.md b/gopls/doc/features/transformation.md index caf13221cfa..a72ff676832 100644 --- a/gopls/doc/features/transformation.md +++ b/gopls/doc/features/transformation.md @@ -814,3 +814,12 @@ which HTML documents are composed: ![Before "Add cases for Addr"](../assets/fill-switch-enum-before.png) ![After "Add cases for Addr"](../assets/fill-switch-enum-after.png) + + + +### `refactor.rewrite.eliminateDotImport`: Eliminate dot import + +When the cursor is on a dot import gopls can offer the "Eliminate dot import" +code action, which removes the dot from the import and qualifies uses of the +package throughout the file. This code action is offered only if +each use of the package can be qualified without collisions with existing names. diff --git a/gopls/doc/release/v0.19.0.md b/gopls/doc/release/v0.19.0.md new file mode 100644 index 00000000000..0b3ea64c305 --- /dev/null +++ b/gopls/doc/release/v0.19.0.md @@ -0,0 +1,7 @@ +# New features + +## "Eliminate dot import" code action + +This code action, available on a dotted import, will offer to replace +the import with a regular one and qualify each use of the package +with its name. diff --git a/gopls/internal/golang/codeaction.go b/gopls/internal/golang/codeaction.go index 49a861852ff..587ae3e2de3 100644 --- a/gopls/internal/golang/codeaction.go +++ b/gopls/internal/golang/codeaction.go @@ -260,6 +260,7 @@ var codeActionProducers = [...]codeActionProducer{ {kind: settings.RefactorRewriteMoveParamLeft, fn: refactorRewriteMoveParamLeft, needPkg: true}, {kind: settings.RefactorRewriteMoveParamRight, fn: refactorRewriteMoveParamRight, needPkg: true}, {kind: settings.RefactorRewriteSplitLines, fn: refactorRewriteSplitLines, needPkg: true}, + {kind: settings.RefactorRewriteEliminateDotImport, fn: refactorRewriteEliminateDotImport, needPkg: true}, // Note: don't forget to update the allow-list in Server.CodeAction // when adding new query operations like GoTest and GoDoc that @@ -678,6 +679,105 @@ func refactorRewriteSplitLines(ctx context.Context, req *codeActionsRequest) err return nil } +func refactorRewriteEliminateDotImport(ctx context.Context, req *codeActionsRequest) error { + // Figure out if the request is placed over a dot import. + var importSpec *ast.ImportSpec + for _, imp := range req.pgf.File.Imports { + if posRangeContains(imp.Pos(), imp.End(), req.start, req.end) { + importSpec = imp + break + } + } + if importSpec == nil { + return nil + } + if importSpec.Name == nil || importSpec.Name.Name != "." { + return nil + } + + // dotImported package path and its imported name after removing the dot. + imported := req.pkg.TypesInfo().PkgNameOf(importSpec).Imported() + newName := imported.Name() + + rng, err := req.pgf.PosRange(importSpec.Name.Pos(), importSpec.Path.Pos()) + if err != nil { + return err + } + // Delete the '.' part of the import. + edits := []protocol.TextEdit{{ + Range: rng, + }} + + fileScope, ok := req.pkg.TypesInfo().Scopes[req.pgf.File] + if !ok { + return nil + } + + // Go through each use of the dot imported package, checking its scope for + // shadowing and calculating an edit to qualify the identifier. + var stack []ast.Node + ast.Inspect(req.pgf.File, func(n ast.Node) bool { + if n == nil { + stack = stack[:len(stack)-1] // pop + return false + } + stack = append(stack, n) // push + + ident, ok := n.(*ast.Ident) + if !ok { + return true + } + // Only keep identifiers that use a symbol from the + // dot imported package. + use := req.pkg.TypesInfo().Uses[ident] + if use == nil || use.Pkg() == nil { + return true + } + if use.Pkg() != imported { + return true + } + + // Only qualify unqualified identifiers (due to dot imports). + // All other references to a symbol imported from another package + // are nested within a select expression (pkg.Foo, v.Method, v.Field). + if is[*ast.SelectorExpr](stack[len(stack)-2]) { + return true + } + + // Make sure that the package name will not be shadowed by something else in scope. + // If it is then we cannot offer this particular code action. + // + // TODO: If the object found in scope is the package imported without a + // dot, or some builtin not used in the file, the code action could be + // allowed to go through. + sc := fileScope.Innermost(ident.Pos()) + if sc == nil { + return true + } + _, obj := sc.LookupParent(newName, ident.Pos()) + if obj != nil { + return true + } + + rng, err := req.pgf.PosRange(ident.Pos(), ident.Pos()) // sic, zero-width range before ident + if err != nil { + return true + } + edits = append(edits, protocol.TextEdit{ + Range: rng, + NewText: newName + ".", + }) + + return true + }) + + req.addEditAction("Eliminate dot import", nil, protocol.DocumentChangeEdit( + req.fh, + edits, + )) + return nil +} + // refactorRewriteJoinLines produces "Join ITEMS into one line" code actions. // See [joinLines] for command implementation. func refactorRewriteJoinLines(ctx context.Context, req *codeActionsRequest) error { diff --git a/gopls/internal/settings/codeactionkind.go b/gopls/internal/settings/codeactionkind.go index fcce7cd2682..09d9d419567 100644 --- a/gopls/internal/settings/codeactionkind.go +++ b/gopls/internal/settings/codeactionkind.go @@ -86,15 +86,16 @@ const ( GoplsDocFeatures protocol.CodeActionKind = "gopls.doc.features" // refactor.rewrite - RefactorRewriteChangeQuote protocol.CodeActionKind = "refactor.rewrite.changeQuote" - RefactorRewriteFillStruct protocol.CodeActionKind = "refactor.rewrite.fillStruct" - RefactorRewriteFillSwitch protocol.CodeActionKind = "refactor.rewrite.fillSwitch" - RefactorRewriteInvertIf protocol.CodeActionKind = "refactor.rewrite.invertIf" - RefactorRewriteJoinLines protocol.CodeActionKind = "refactor.rewrite.joinLines" - RefactorRewriteRemoveUnusedParam protocol.CodeActionKind = "refactor.rewrite.removeUnusedParam" - RefactorRewriteMoveParamLeft protocol.CodeActionKind = "refactor.rewrite.moveParamLeft" - RefactorRewriteMoveParamRight protocol.CodeActionKind = "refactor.rewrite.moveParamRight" - RefactorRewriteSplitLines protocol.CodeActionKind = "refactor.rewrite.splitLines" + RefactorRewriteChangeQuote protocol.CodeActionKind = "refactor.rewrite.changeQuote" + RefactorRewriteFillStruct protocol.CodeActionKind = "refactor.rewrite.fillStruct" + RefactorRewriteFillSwitch protocol.CodeActionKind = "refactor.rewrite.fillSwitch" + RefactorRewriteInvertIf protocol.CodeActionKind = "refactor.rewrite.invertIf" + RefactorRewriteJoinLines protocol.CodeActionKind = "refactor.rewrite.joinLines" + RefactorRewriteRemoveUnusedParam protocol.CodeActionKind = "refactor.rewrite.removeUnusedParam" + RefactorRewriteMoveParamLeft protocol.CodeActionKind = "refactor.rewrite.moveParamLeft" + RefactorRewriteMoveParamRight protocol.CodeActionKind = "refactor.rewrite.moveParamRight" + RefactorRewriteSplitLines protocol.CodeActionKind = "refactor.rewrite.splitLines" + RefactorRewriteEliminateDotImport protocol.CodeActionKind = "refactor.rewrite.eliminateDotImport" // refactor.inline RefactorInlineCall protocol.CodeActionKind = "refactor.inline.call" diff --git a/gopls/internal/test/marker/testdata/codeaction/eliminate_dot_import.txt b/gopls/internal/test/marker/testdata/codeaction/eliminate_dot_import.txt new file mode 100644 index 00000000000..e72d8bd5417 --- /dev/null +++ b/gopls/internal/test/marker/testdata/codeaction/eliminate_dot_import.txt @@ -0,0 +1,40 @@ +This test checks the behavior of the 'remove dot import' code action. + +-- go.mod -- +module golang.org/lsptests/removedotimport + +go 1.18 + +-- a.go -- +package dotimport + +// Base case: action is OK. + +import ( + . "fmt" //@codeaction(`.`, "refactor.rewrite.eliminateDotImport", edit=a1) + . "bytes" //@codeaction(`.`, "refactor.rewrite.eliminateDotImport", edit=a2) +) + +var _ = a + +func a() { + Println("hello") + + buf := NewBuffer(nil) + buf.Grow(10) +} + +-- @a1/a.go -- +@@ -6 +6 @@ +- . "fmt" //@codeaction(`.`, "refactor.rewrite.eliminateDotImport", edit=a1) ++ "fmt" //@codeaction(`.`, "refactor.rewrite.eliminateDotImport", edit=a1) +@@ -13 +13 @@ +- Println("hello") ++ fmt.Println("hello") +-- @a2/a.go -- +@@ -7 +7 @@ +- . "bytes" //@codeaction(`.`, "refactor.rewrite.eliminateDotImport", edit=a2) ++ "bytes" //@codeaction(`.`, "refactor.rewrite.eliminateDotImport", edit=a2) +@@ -15 +15 @@ +- buf := NewBuffer(nil) ++ buf := bytes.NewBuffer(nil) From 300465cc970af3836a5368d587764267a8f4d77e Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Wed, 19 Feb 2025 18:33:23 -0500 Subject: [PATCH 045/270] gopls/internal/analysis/modernize: fix rangeint bug info.Defs[v] is nil if the loop variable is not declared (for i = 0 instead of for i := 0). + test Updates golang/go#71847 Change-Id: I28f82188e813f2d4f1ddc9335f0c13bd90c31ec1 Reviewed-on: https://go-review.googlesource.com/c/tools/+/650815 Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley --- gopls/internal/analysis/modernize/rangeint.go | 2 +- .../modernize/testdata/src/rangeint/rangeint.go | 13 +++++++++++++ .../testdata/src/rangeint/rangeint.go.golden | 13 +++++++++++++ 3 files changed, 27 insertions(+), 1 deletion(-) diff --git a/gopls/internal/analysis/modernize/rangeint.go b/gopls/internal/analysis/modernize/rangeint.go index 2d25d6a0a06..273c13877bd 100644 --- a/gopls/internal/analysis/modernize/rangeint.go +++ b/gopls/internal/analysis/modernize/rangeint.go @@ -75,7 +75,7 @@ func rangeint(pass *analysis.Pass) { // Have: for i = 0; i < limit; i++ {} // Find references to i within the loop body. - v := info.Defs[index] + v := info.ObjectOf(index) used := false for curId := range curLoop.Child(loop.Body).Preorder((*ast.Ident)(nil)) { id := curId.Node().(*ast.Ident) diff --git a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go index a60bd5eac37..6c30f183340 100644 --- a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go +++ b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go @@ -12,6 +12,9 @@ func _(i int, s struct{ i int }, slice []int) { for i := 0; i < len(slice); i++ { // want "for loop can be modernized using range over int" println(slice[i]) } + for i := 0; i < len(""); i++ { // want "for loop can be modernized using range over int" + // NB: not simplified to range "" + } // nope for i := 0; i < 10; { // nope: missing increment @@ -38,3 +41,13 @@ func _(i int, s struct{ i int }, slice []int) { } func f() int { return 0 } + +// Repro for part of #71847: ("for range n is invalid if the loop body contains i++"): +func _(s string) { + var i int // (this is necessary) + for i = 0; i < len(s); i++ { // nope: loop body increments i + if true { + i++ // nope + } + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden index 348f77508ac..52f16347b1e 100644 --- a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden @@ -12,6 +12,9 @@ func _(i int, s struct{ i int }, slice []int) { for i := range slice { // want "for loop can be modernized using range over int" println(slice[i]) } + for range len("") { // want "for loop can be modernized using range over int" + // NB: not simplified to range "" + } // nope for i := 0; i < 10; { // nope: missing increment @@ -38,3 +41,13 @@ func _(i int, s struct{ i int }, slice []int) { } func f() int { return 0 } + +// Repro for part of #71847: ("for range n is invalid if the loop body contains i++"): +func _(s string) { + var i int // (this is necessary) + for i = 0; i < len(s); i++ { // nope: loop body increments i + if true { + i++ // nope + } + } +} From f0af81c3ddded0970b2ffe7922f269b53e1a63bb Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Fri, 14 Feb 2025 14:24:21 -0500 Subject: [PATCH 046/270] gopls/internal/goasm: support Definition in Go *.s assembly This CL provides a minimal implementation of the Definition query within Go assembly files, plus a test. For now it only works for references to package-level symbols in the same package or a dependency. Details: - add file.Kind Asm and protocol.LanguageKind "go.s". - include .s files in metadata.Graph.IDs mapping. - set LanguageKind correctly in gopls CLI. Also: - add String() method to file.Handle. - add convenient forward deps iterator to Graph. - internal/extract: extract notes from .s files too. Updates golang/go#71754 Change-Id: I0c518c3279f825411221ebe23dc04654e129fc56 Reviewed-on: https://go-review.googlesource.com/c/tools/+/649461 Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan Reviewed-by: Robert Findley Commit-Queue: Alan Donovan --- gopls/internal/cache/fs_memoized.go | 2 + gopls/internal/cache/fs_overlay.go | 2 + gopls/internal/cache/metadata/graph.go | 36 +++++ gopls/internal/cache/parse_cache_test.go | 4 + gopls/internal/cache/session.go | 1 + gopls/internal/cache/snapshot.go | 21 +++ gopls/internal/cmd/cmd.go | 17 ++- gopls/internal/cmd/definition.go | 2 +- gopls/internal/file/file.go | 2 + gopls/internal/file/kind.go | 8 +- gopls/internal/goasm/definition.go | 125 ++++++++++++++++++ gopls/internal/golang/snapshot.go | 14 +- gopls/internal/server/definition.go | 3 + .../internal/test/integration/fake/editor.go | 14 +- .../test/marker/testdata/definition/asm.txt | 33 +++++ internal/expect/extract.go | 46 ++++++- 16 files changed, 304 insertions(+), 26 deletions(-) create mode 100644 gopls/internal/goasm/definition.go create mode 100644 gopls/internal/test/marker/testdata/definition/asm.txt diff --git a/gopls/internal/cache/fs_memoized.go b/gopls/internal/cache/fs_memoized.go index 9f156e3e153..a179b0ce7f5 100644 --- a/gopls/internal/cache/fs_memoized.go +++ b/gopls/internal/cache/fs_memoized.go @@ -41,6 +41,8 @@ type diskFile struct { err error } +func (h *diskFile) String() string { return h.uri.Path() } + func (h *diskFile) URI() protocol.DocumentURI { return h.uri } func (h *diskFile) Identity() file.Identity { diff --git a/gopls/internal/cache/fs_overlay.go b/gopls/internal/cache/fs_overlay.go index 265598bb967..b18d6d3f154 100644 --- a/gopls/internal/cache/fs_overlay.go +++ b/gopls/internal/cache/fs_overlay.go @@ -64,6 +64,8 @@ type overlay struct { saved bool } +func (o *overlay) String() string { return o.uri.Path() } + func (o *overlay) URI() protocol.DocumentURI { return o.uri } func (o *overlay) Identity() file.Identity { diff --git a/gopls/internal/cache/metadata/graph.go b/gopls/internal/cache/metadata/graph.go index 4b846df53be..716b767e37b 100644 --- a/gopls/internal/cache/metadata/graph.go +++ b/gopls/internal/cache/metadata/graph.go @@ -5,7 +5,9 @@ package metadata import ( + "iter" "sort" + "strings" "golang.org/x/tools/go/packages" "golang.org/x/tools/gopls/internal/protocol" @@ -99,6 +101,11 @@ func newGraph(pkgs map[PackageID]*Package) *Graph { for _, uri := range mp.GoFiles { uris[uri] = struct{}{} } + for _, uri := range mp.OtherFiles { + if strings.HasSuffix(string(uri), ".s") { // assembly + uris[uri] = struct{}{} + } + } for uri := range uris { uriIDs[uri] = append(uriIDs[uri], id) } @@ -160,6 +167,35 @@ func (g *Graph) ReverseReflexiveTransitiveClosure(ids ...PackageID) map[PackageI return seen } +// ForwardReflexiveTransitiveClosure returns an iterator over the +// specified nodes and all their forward dependencies, in an arbitrary +// topological (dependencies-first) order. The order may vary. +func (g *Graph) ForwardReflexiveTransitiveClosure(ids ...PackageID) iter.Seq[*Package] { + return func(yield func(*Package) bool) { + seen := make(map[PackageID]bool) + var visit func(PackageID) bool + visit = func(id PackageID) bool { + if !seen[id] { + seen[id] = true + if mp := g.Packages[id]; mp != nil { + for _, depID := range mp.DepsByPkgPath { + if !visit(depID) { + return false + } + } + if !yield(mp) { + return false + } + } + } + return true + } + for _, id := range ids { + visit(id) + } + } +} + // breakImportCycles breaks import cycles in the metadata by deleting // Deps* edges. It modifies only metadata present in the 'updates' // subset. This function has an internal test. diff --git a/gopls/internal/cache/parse_cache_test.go b/gopls/internal/cache/parse_cache_test.go index 7aefac77c38..fe0548aa20d 100644 --- a/gopls/internal/cache/parse_cache_test.go +++ b/gopls/internal/cache/parse_cache_test.go @@ -218,6 +218,10 @@ type fakeFileHandle struct { hash file.Hash } +func (h fakeFileHandle) String() string { + return h.uri.Path() +} + func (h fakeFileHandle) URI() protocol.DocumentURI { return h.uri } diff --git a/gopls/internal/cache/session.go b/gopls/internal/cache/session.go index 5ae753eb91c..c2f57e985f7 100644 --- a/gopls/internal/cache/session.go +++ b/gopls/internal/cache/session.go @@ -1084,6 +1084,7 @@ type brokenFile struct { err error } +func (b brokenFile) String() string { return b.uri.Path() } func (b brokenFile) URI() protocol.DocumentURI { return b.uri } func (b brokenFile) Identity() file.Identity { return file.Identity{URI: b.uri} } func (b brokenFile) SameContentsOnDisk() bool { return false } diff --git a/gopls/internal/cache/snapshot.go b/gopls/internal/cache/snapshot.go index 578cea61eb7..754389c7008 100644 --- a/gopls/internal/cache/snapshot.go +++ b/gopls/internal/cache/snapshot.go @@ -323,6 +323,8 @@ func fileKind(fh file.Handle) file.Kind { return file.Sum case ".work": return file.Work + case ".s": + return file.Asm } return file.UnknownKind } @@ -645,6 +647,21 @@ func (s *Snapshot) Tests(ctx context.Context, ids ...PackageID) ([]*testfuncs.In return indexes, s.forEachPackage(ctx, ids, pre, post) } +// NarrowestMetadataForFile returns metadata for the narrowest package +// (the one with the fewest files) that encloses the specified file. +// The result may be a test variant, but never an intermediate test variant. +func (snapshot *Snapshot) NarrowestMetadataForFile(ctx context.Context, uri protocol.DocumentURI) (*metadata.Package, error) { + mps, err := snapshot.MetadataForFile(ctx, uri) + if err != nil { + return nil, err + } + metadata.RemoveIntermediateTestVariants(&mps) + if len(mps) == 0 { + return nil, fmt.Errorf("no package metadata for file %s", uri) + } + return mps[0], nil +} + // MetadataForFile returns a new slice containing metadata for each // package containing the Go file identified by uri, ordered by the // number of CompiledGoFiles (i.e. "narrowest" to "widest" package), @@ -652,6 +669,10 @@ func (s *Snapshot) Tests(ctx context.Context, ids ...PackageID) ([]*testfuncs.In // The result may include tests and intermediate test variants of // importable packages. // It returns an error if the context was cancelled. +// +// TODO(adonovan): in nearly all cases the caller must use +// [metadata.RemoveIntermediateTestVariants]. Make this a parameter to +// force the caller to consider it (and reduce code). func (s *Snapshot) MetadataForFile(ctx context.Context, uri protocol.DocumentURI) ([]*metadata.Package, error) { if s.view.typ == AdHocView { // As described in golang/go#57209, in ad-hoc workspaces (where we load ./ diff --git a/gopls/internal/cmd/cmd.go b/gopls/internal/cmd/cmd.go index 2a161ad0fc8..8bd7d7b899f 100644 --- a/gopls/internal/cmd/cmd.go +++ b/gopls/internal/cmd/cmd.go @@ -773,10 +773,25 @@ func (c *connection) openFile(ctx context.Context, uri protocol.DocumentURI) (*c return nil, file.err } + // Choose language ID from file extension. + var langID protocol.LanguageKind // "" eventually maps to file.UnknownKind + switch filepath.Ext(uri.Path()) { + case ".go": + langID = "go" + case ".mod": + langID = "go.mod" + case ".sum": + langID = "go.sum" + case ".work": + langID = "go.work" + case ".s": + langID = "go.s" + } + p := &protocol.DidOpenTextDocumentParams{ TextDocument: protocol.TextDocumentItem{ URI: uri, - LanguageID: "go", + LanguageID: langID, Version: 1, Text: string(file.mapper.Content), }, diff --git a/gopls/internal/cmd/definition.go b/gopls/internal/cmd/definition.go index d9cd98554e3..71e8b1511bd 100644 --- a/gopls/internal/cmd/definition.go +++ b/gopls/internal/cmd/definition.go @@ -96,7 +96,7 @@ func (d *definition) Run(ctx context.Context, args ...string) error { } if len(locs) == 0 { - return fmt.Errorf("%v: not an identifier", from) + return fmt.Errorf("%v: no definition location (not an identifier?)", from) } file, err = conn.openFile(ctx, locs[0].URI) if err != nil { diff --git a/gopls/internal/file/file.go b/gopls/internal/file/file.go index 5f8be06cf60..b817306aa07 100644 --- a/gopls/internal/file/file.go +++ b/gopls/internal/file/file.go @@ -49,6 +49,8 @@ type Handle interface { // Content returns the contents of a file. // If the file is not available, returns a nil slice and an error. Content() ([]byte, error) + // String returns the file's path. + String() string } // A Source maps URIs to Handles. diff --git a/gopls/internal/file/kind.go b/gopls/internal/file/kind.go index 087a57f32d0..6a0ed009ed5 100644 --- a/gopls/internal/file/kind.go +++ b/gopls/internal/file/kind.go @@ -28,6 +28,8 @@ const ( Tmpl // Work is a go.work file. Work + // Asm is a Go assembly (.s) file. + Asm ) func (k Kind) String() string { @@ -42,13 +44,15 @@ func (k Kind) String() string { return "tmpl" case Work: return "go.work" + case Asm: + return "Go assembly" default: return fmt.Sprintf("internal error: unknown file kind %d", k) } } // KindForLang returns the gopls file [Kind] associated with the given LSP -// LanguageKind string from protocol.TextDocumentItem.LanguageID, +// LanguageKind string from the LanguageID field of [protocol.TextDocumentItem], // or UnknownKind if the language is not one recognized by gopls. func KindForLang(langID protocol.LanguageKind) Kind { switch langID { @@ -62,6 +66,8 @@ func KindForLang(langID protocol.LanguageKind) Kind { return Tmpl case "go.work": return Work + case "go.s": + return Asm default: return UnknownKind } diff --git a/gopls/internal/goasm/definition.go b/gopls/internal/goasm/definition.go new file mode 100644 index 00000000000..4403e7cac7f --- /dev/null +++ b/gopls/internal/goasm/definition.go @@ -0,0 +1,125 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package goasm + +import ( + "bytes" + "context" + "fmt" + "go/token" + "strings" + "unicode" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/morestrings" + "golang.org/x/tools/internal/event" +) + +// Definition handles the textDocument/definition request for Go assembly files. +func Definition(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) ([]protocol.Location, error) { + ctx, done := event.Start(ctx, "goasm.Definition") + defer done() + + mp, err := snapshot.NarrowestMetadataForFile(ctx, fh.URI()) + if err != nil { + return nil, err + } + + // Read the file. + content, err := fh.Content() + if err != nil { + return nil, err + } + mapper := protocol.NewMapper(fh.URI(), content) + offset, err := mapper.PositionOffset(position) + if err != nil { + return nil, err + } + + // Figure out the selected symbol. + // For now, just find the identifier around the cursor. + // + // TODO(adonovan): use a real asm parser; see cmd/asm/internal/asm/parse.go. + // Ideally this would just be just another attribute of the + // type-checked cache.Package. + nonIdentRune := func(r rune) bool { return !isIdentRune(r) } + i := bytes.LastIndexFunc(content[:offset], nonIdentRune) + j := bytes.IndexFunc(content[offset:], nonIdentRune) + if j < 0 || j == 0 { + return nil, nil // identifier runs to EOF, or not an identifier + } + sym := string(content[i+1 : offset+j]) + sym = strings.ReplaceAll(sym, "·", ".") // (U+00B7 MIDDLE DOT) + sym = strings.ReplaceAll(sym, "∕", "/") // (U+2215 DIVISION SLASH) + if sym != "" && sym[0] == '.' { + sym = string(mp.PkgPath) + sym + } + + // package-qualified symbol? + if pkgpath, name, ok := morestrings.CutLast(sym, "."); ok { + // Find declaring package among dependencies. + // + // TODO(adonovan): assembly may legally reference + // non-dependencies. For example, sync/atomic calls + // internal/runtime/atomic. Perhaps we should search + // the entire metadata graph, but that's path-dependent. + var declaring *metadata.Package + for pkg := range snapshot.MetadataGraph().ForwardReflexiveTransitiveClosure(mp.ID) { + if pkg.PkgPath == metadata.PackagePath(pkgpath) { + declaring = pkg + break + } + } + if declaring == nil { + return nil, fmt.Errorf("package %q is not a dependency", pkgpath) + } + + pkgs, err := snapshot.TypeCheck(ctx, declaring.ID) + if err != nil { + return nil, err + } + pkg := pkgs[0] + def := pkg.Types().Scope().Lookup(name) + if def == nil { + return nil, fmt.Errorf("no symbol %q in package %q", name, pkgpath) + } + loc, err := mapPosition(ctx, pkg.FileSet(), snapshot, def.Pos(), def.Pos()) + if err == nil { + return []protocol.Location{loc}, nil + } + } + + // TODO(adonovan): support jump to var, block label, and other + // TEXT, DATA, and GLOBAL symbols in the same file. Needs asm parser. + + return nil, nil +} + +// The assembler allows center dot (· U+00B7) and +// division slash (∕ U+2215) to work as identifier characters. +func isIdentRune(r rune) bool { + return unicode.IsLetter(r) || unicode.IsDigit(r) || r == '_' || r == '·' || r == '∕' +} + +// TODO(rfindley): avoid the duplicate column mapping here, by associating a +// column mapper with each file handle. +// TODO(adonovan): plundered from ../golang; factor. +func mapPosition(ctx context.Context, fset *token.FileSet, s file.Source, start, end token.Pos) (protocol.Location, error) { + file := fset.File(start) + uri := protocol.URIFromPath(file.Name()) + fh, err := s.ReadFile(ctx, uri) + if err != nil { + return protocol.Location{}, err + } + content, err := fh.Content() + if err != nil { + return protocol.Location{}, err + } + m := protocol.NewMapper(fh.URI(), content) + return m.PosLocation(file, start, end) +} diff --git a/gopls/internal/golang/snapshot.go b/gopls/internal/golang/snapshot.go index c381c962d08..30199d45463 100644 --- a/gopls/internal/golang/snapshot.go +++ b/gopls/internal/golang/snapshot.go @@ -14,19 +14,9 @@ import ( "golang.org/x/tools/gopls/internal/protocol" ) -// NarrowestMetadataForFile returns metadata for the narrowest package -// (the one with the fewest files) that encloses the specified file. -// The result may be a test variant, but never an intermediate test variant. +//go:fix inline func NarrowestMetadataForFile(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI) (*metadata.Package, error) { - mps, err := snapshot.MetadataForFile(ctx, uri) - if err != nil { - return nil, err - } - metadata.RemoveIntermediateTestVariants(&mps) - if len(mps) == 0 { - return nil, fmt.Errorf("no package metadata for file %s", uri) - } - return mps[0], nil + return snapshot.NarrowestMetadataForFile(ctx, uri) } // NarrowestPackageForFile is a convenience function that selects the narrowest diff --git a/gopls/internal/server/definition.go b/gopls/internal/server/definition.go index 7b4df3c7c07..5a9c020cfc5 100644 --- a/gopls/internal/server/definition.go +++ b/gopls/internal/server/definition.go @@ -9,6 +9,7 @@ import ( "fmt" "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/goasm" "golang.org/x/tools/gopls/internal/golang" "golang.org/x/tools/gopls/internal/label" "golang.org/x/tools/gopls/internal/protocol" @@ -37,6 +38,8 @@ func (s *server) Definition(ctx context.Context, params *protocol.DefinitionPara return template.Definition(snapshot, fh, params.Position) case file.Go: return golang.Definition(ctx, snapshot, fh, params.Position) + case file.Asm: + return goasm.Definition(ctx, snapshot, fh, params.Position) default: return nil, fmt.Errorf("can't find definitions for file type %s", kind) } diff --git a/gopls/internal/test/integration/fake/editor.go b/gopls/internal/test/integration/fake/editor.go index adc9df6c17d..170a9823cad 100644 --- a/gopls/internal/test/integration/fake/editor.go +++ b/gopls/internal/test/integration/fake/editor.go @@ -113,11 +113,12 @@ type EditorConfig struct { // Map of language ID -> regexp to match, used to set the file type of new // buffers. Applied as an overlay on top of the following defaults: - // "go" -> ".*\.go" + // "go" -> ".*\.go" // "go.mod" -> "go\.mod" // "go.sum" -> "go\.sum" // "gotmpl" -> ".*tmpl" - FileAssociations map[string]string + // "go.s" -> ".*\.s" + FileAssociations map[protocol.LanguageKind]string // Settings holds user-provided configuration for the LSP server. Settings map[string]any @@ -619,27 +620,28 @@ func (e *Editor) sendDidOpen(ctx context.Context, item protocol.TextDocumentItem return nil } -var defaultFileAssociations = map[string]*regexp.Regexp{ +var defaultFileAssociations = map[protocol.LanguageKind]*regexp.Regexp{ "go": regexp.MustCompile(`^.*\.go$`), // '$' is important: don't match .gotmpl! "go.mod": regexp.MustCompile(`^go\.mod$`), "go.sum": regexp.MustCompile(`^go(\.work)?\.sum$`), "go.work": regexp.MustCompile(`^go\.work$`), "gotmpl": regexp.MustCompile(`^.*tmpl$`), + "go.s": regexp.MustCompile(`\.s$`), } // languageID returns the language identifier for the path p given the user // configured fileAssociations. -func languageID(p string, fileAssociations map[string]string) protocol.LanguageKind { +func languageID(p string, fileAssociations map[protocol.LanguageKind]string) protocol.LanguageKind { base := path.Base(p) for lang, re := range fileAssociations { re := regexp.MustCompile(re) if re.MatchString(base) { - return protocol.LanguageKind(lang) + return lang } } for lang, re := range defaultFileAssociations { if re.MatchString(base) { - return protocol.LanguageKind(lang) + return lang } } return "" diff --git a/gopls/internal/test/marker/testdata/definition/asm.txt b/gopls/internal/test/marker/testdata/definition/asm.txt new file mode 100644 index 00000000000..f0187d7e24a --- /dev/null +++ b/gopls/internal/test/marker/testdata/definition/asm.txt @@ -0,0 +1,33 @@ +This test exercises the Definition request in a Go assembly file. + +For now we support only references to package-level symbols defined in +the same package or a dependency. + +Repeatedly jumping to Definition on ff ping-pongs between the Go and +assembly declarations. + +-- go.mod -- +module example.com +go 1.18 + +-- a/a.go -- +package a + +import _ "fmt" +import _ "example.com/b" + +func ff() //@ loc(ffgo, re"()ff"), def("ff", ffasm) + +var _ = ff // pacify unusedfunc analyzer + +-- a/asm.s -- +// portable assembly + +TEXT ·ff(SB), $16 //@ loc(ffasm, "ff"), def("ff", ffgo) + CALL example·com∕b·B //@ def("com", bB) + JMP ·ff //@ def("ff", ffgo) + +-- b/b.go -- +package b + +func B() {} //@ loc(bB, re"()B") diff --git a/internal/expect/extract.go b/internal/expect/extract.go index 150a2afbbf6..8ad1cb259e5 100644 --- a/internal/expect/extract.go +++ b/internal/expect/extract.go @@ -8,7 +8,9 @@ import ( "fmt" "go/ast" "go/parser" + goscanner "go/scanner" "go/token" + "os" "path/filepath" "regexp" "strconv" @@ -32,21 +34,54 @@ type Identifier string // See the package documentation for details about the syntax of those // notes. func Parse(fset *token.FileSet, filename string, content []byte) ([]*Note, error) { - var src any - if content != nil { - src = content + if content == nil { + data, err := os.ReadFile(filename) + if err != nil { + return nil, err + } + content = data } + switch filepath.Ext(filename) { + case ".s": + // The assembler uses a custom scanner, + // but the go/scanner package is close + // enough: we only want the comments. + file := fset.AddFile(filename, -1, len(content)) + var scan goscanner.Scanner + scan.Init(file, content, nil, goscanner.ScanComments) + + var notes []*Note + for { + pos, tok, lit := scan.Scan() + if tok == token.EOF { + break + } + if tok == token.COMMENT { + text, adjust := getAdjustedNote(lit) + if text == "" { + continue + } + parsed, err := parse(fset, pos+token.Pos(adjust), text) + if err != nil { + return nil, err + } + notes = append(notes, parsed...) + } + } + return notes, nil + case ".go": - // TODO: We should write this in terms of the scanner. + // TODO: We should write this in terms of the scanner, like the .s case above. // there are ways you can break the parser such that it will not add all the // comments to the ast, which may result in files where the tests are silently // not run. - file, err := parser.ParseFile(fset, filename, src, parser.ParseComments|parser.AllErrors|parser.SkipObjectResolution) + file, err := parser.ParseFile(fset, filename, content, parser.ParseComments|parser.AllErrors|parser.SkipObjectResolution) if file == nil { return nil, err } return ExtractGo(fset, file) + case ".mod": file, err := modfile.Parse(filename, content, nil) if err != nil { @@ -64,6 +99,7 @@ func Parse(fset *token.FileSet, filename string, content []byte) ([]*Note, error note.Pos += token.Pos(f.Base()) } return notes, nil + case ".work": file, err := modfile.ParseWork(filename, content, nil) if err != nil { From 9f7a2b618a10d26b9bc935167355490a7c32a20b Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Thu, 20 Feb 2025 16:01:31 -0500 Subject: [PATCH 047/270] gopls/doc/features: tweak markdown Previously the seems to cause underlining of what follows; see https://github.com/golang/tools/blob/master/gopls/doc/features/diagnostics.md. This fix is kind of a stab in the dark. Change-Id: Ic552faae8d03b3d49c1a913ef7e3a145add5cfc4 Reviewed-on: https://go-review.googlesource.com/c/tools/+/651096 Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley --- gopls/doc/features/diagnostics.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gopls/doc/features/diagnostics.md b/gopls/doc/features/diagnostics.md index ceec607c123..6be7a43493a 100644 --- a/gopls/doc/features/diagnostics.md +++ b/gopls/doc/features/diagnostics.md @@ -51,7 +51,7 @@ build`. Gopls doesn't actually run the compiler; that would be too There is an optional third source of diagnostics: - + - **Compiler optimization details** are diagnostics that report details relevant to optimization decisions made by the Go From 33f1ed9242128736ca381ce86d10a5fc479aab4c Mon Sep 17 00:00:00 2001 From: Gopher Robot Date: Thu, 20 Feb 2025 10:41:48 -0800 Subject: [PATCH 048/270] gopls/go.mod: update dependencies following the v0.18.0 release This is an automated CL which updates the go.mod and go.sum. For golang/go#71607 Change-Id: Ic4d3e8174be60eca3f4799c0d3a99dd8f9017320 Reviewed-on: https://go-review.googlesource.com/c/tools/+/651116 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI Reviewed-by: Michael Knyszek Auto-Submit: Gopher Robot --- gopls/go.mod | 12 ++++++------ gopls/go.sum | 24 ++++++++++++------------ 2 files changed, 18 insertions(+), 18 deletions(-) diff --git a/gopls/go.mod b/gopls/go.mod index 83620720ae6..f6a2b0a1e9a 100644 --- a/gopls/go.mod +++ b/gopls/go.mod @@ -10,20 +10,20 @@ require ( golang.org/x/mod v0.23.0 golang.org/x/sync v0.11.0 golang.org/x/sys v0.30.0 - golang.org/x/telemetry v0.0.0-20241220003058-cc96b6e0d3d9 + golang.org/x/telemetry v0.0.0-20250220152412-165e2f84edbc golang.org/x/text v0.22.0 - golang.org/x/tools v0.28.0 - golang.org/x/vuln v1.1.3 + golang.org/x/tools v0.30.0 + golang.org/x/vuln v1.1.4 gopkg.in/yaml.v3 v3.0.1 - honnef.co/go/tools v0.5.1 + honnef.co/go/tools v0.6.0 mvdan.cc/gofumpt v0.7.0 - mvdan.cc/xurls/v2 v2.5.0 + mvdan.cc/xurls/v2 v2.6.0 ) require ( github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c // indirect github.com/google/safehtml v0.1.0 // indirect - golang.org/x/exp/typeparams v0.0.0-20241210194714-1829a127f884 // indirect + golang.org/x/exp/typeparams v0.0.0-20250218142911-aa4b98e5adaa // indirect gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 // indirect ) diff --git a/gopls/go.sum b/gopls/go.sum index b2b3d925a78..ef93b2c4601 100644 --- a/gopls/go.sum +++ b/gopls/go.sum @@ -12,13 +12,13 @@ github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= -github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= +github.com/rogpeppe/go-internal v1.13.2-0.20241226121412-a5dc8ff20d0a h1:w3tdWGKbLGBPtR/8/oO74W6hmz0qE5q0z9aqSAewaaM= +github.com/rogpeppe/go-internal v1.13.2-0.20241226121412-a5dc8ff20d0a/go.mod h1:S8kfXMp+yh77OxPD4fdM6YUknrZpQxLhvxzS4gDHENY= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= golang.org/x/crypto v0.33.0/go.mod h1:bVdXmD7IV/4GdElGPozy6U7lWdRXA4qyRVGJV57uQ5M= -golang.org/x/exp/typeparams v0.0.0-20241210194714-1829a127f884 h1:1xaZTydL5Gsg78QharTwKfA9FY9CZ1VQj6D/AZEvHR0= -golang.org/x/exp/typeparams v0.0.0-20241210194714-1829a127f884/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= +golang.org/x/exp/typeparams v0.0.0-20250218142911-aa4b98e5adaa h1:Br3+0EZZohShrmVVc85znGpxw7Ca8hsUJlrdT/JQGw8= +golang.org/x/exp/typeparams v0.0.0-20250218142911-aa4b98e5adaa/go.mod h1:LKZHyeOpPuZcMgxeHjJp4p5yvxrCX1xDvH10zYHhjjQ= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/mod v0.23.0 h1:Zb7khfcRGKk+kqfxFaP5tZqCnDZMjC5VtUBs87Hr6QM= @@ -36,8 +36,8 @@ golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc= golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457/go.mod h1:pRgIJT+bRLFKnoM1ldnzKoxTIn14Yxz928LQRYYgIN0= -golang.org/x/telemetry v0.0.0-20241220003058-cc96b6e0d3d9 h1:L2k9GUV2TpQKVRGMjN94qfUMgUwOFimSQ6gipyJIjKw= -golang.org/x/telemetry v0.0.0-20241220003058-cc96b6e0d3d9/go.mod h1:8h4Hgq+jcTvCDv2+i7NrfWwpYHcESleo2nGHxLbFLJ4= +golang.org/x/telemetry v0.0.0-20250220152412-165e2f84edbc h1:HS+G1Mhh2dxM8ObutfYKdjfD7zpkyeP/UxeRnJpIZtQ= +golang.org/x/telemetry v0.0.0-20250220152412-165e2f84edbc/go.mod h1:bDzXkYUaHzz51CtDy5kh/jR4lgPxsdbqC37kp/dzhCc= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= golang.org/x/term v0.29.0/go.mod h1:6bl4lRlvVuDgSf3179VpIxBF0o10JUpXWOnI7nErv7s= @@ -46,16 +46,16 @@ golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM= golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY= -golang.org/x/vuln v1.1.3 h1:NPGnvPOTgnjBc9HTaUx+nj+EaUYxl5SJOWqaDYGaFYw= -golang.org/x/vuln v1.1.3/go.mod h1:7Le6Fadm5FOqE9C926BCD0g12NWyhg7cxV4BwcPFuNY= +golang.org/x/vuln v1.1.4 h1:Ju8QsuyhX3Hk8ma3CesTbO8vfJD9EvUBgHvkxHBzj0I= +golang.org/x/vuln v1.1.4/go.mod h1:F+45wmU18ym/ca5PLTPLsSzr2KppzswxPP603ldA67s= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -honnef.co/go/tools v0.5.1 h1:4bH5o3b5ZULQ4UrBmP+63W9r7qIkqJClEA9ko5YKx+I= -honnef.co/go/tools v0.5.1/go.mod h1:e9irvo83WDG9/irijV44wr3tbhcFeRnfpVlRqVwpzMs= +honnef.co/go/tools v0.6.0 h1:TAODvD3knlq75WCp2nyGJtT4LeRV/o7NN9nYPeVJXf8= +honnef.co/go/tools v0.6.0/go.mod h1:3puzxxljPCe8RGJX7BIy1plGbxEOZni5mR2aXe3/uk4= mvdan.cc/gofumpt v0.7.0 h1:bg91ttqXmi9y2xawvkuMXyvAA/1ZGJqYAEGjXuP0JXU= mvdan.cc/gofumpt v0.7.0/go.mod h1:txVFJy/Sc/mvaycET54pV8SW8gWxTlUuGHVEcncmNUo= -mvdan.cc/xurls/v2 v2.5.0 h1:lyBNOm8Wo71UknhUs4QTFUNNMyxy2JEIaKKo0RWOh+8= -mvdan.cc/xurls/v2 v2.5.0/go.mod h1:yQgaGQ1rFtJUzkmKiHYSSfuQxqfYmd//X6PxvholpeE= +mvdan.cc/xurls/v2 v2.6.0 h1:3NTZpeTxYVWNSokW3MKeyVkz/j7uYXYiMtXRUfmjbgI= +mvdan.cc/xurls/v2 v2.6.0/go.mod h1:bCvEZ1XvdA6wDnxY7jPPjEmigDtvtvPXAD/Exa9IMSk= From 1f6c6d67720feea9eeba7e1eb23841e63f3ccc81 Mon Sep 17 00:00:00 2001 From: Tobias Klauser Date: Thu, 20 Feb 2025 23:31:50 +0100 Subject: [PATCH 049/270] gopls/doc: adjust nvim-lspconfig link target The file was renamed in the github.com/neovim/nvim-lspconfig repository. Change-Id: I89a8dcbbb31c24d77f0ca00934df1916b338d460 Reviewed-on: https://go-review.googlesource.com/c/tools/+/651195 Reviewed-by: Robert Findley Auto-Submit: Robert Findley Auto-Submit: Tobias Klauser Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- gopls/doc/vim.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gopls/doc/vim.md b/gopls/doc/vim.md index e71482115ea..444a7d6ff31 100644 --- a/gopls/doc/vim.md +++ b/gopls/doc/vim.md @@ -230,5 +230,5 @@ require('lspconfig').gopls.setup({ [govim-install]: https://github.com/myitcv/govim/blob/master/README.md#govim---go-development-plugin-for-vim8 [nvim-docs]: https://neovim.io/doc/user/lsp.html [nvim-install]: https://github.com/neovim/neovim/wiki/Installing-Neovim -[nvim-lspconfig]: https://github.com/neovim/nvim-lspconfig/blob/master/doc/server_configurations.md#gopls +[nvim-lspconfig]: https://github.com/neovim/nvim-lspconfig/blob/master/doc/configs.md#gopls [nvim-lspconfig-imports]: https://github.com/neovim/nvim-lspconfig/issues/115 From 96bfb60194183d530de41f887c48081f8c104a86 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Fri, 21 Feb 2025 09:36:24 -0500 Subject: [PATCH 050/270] gopls/internal/analysis/modernize: fix minmax bug The matcher for pattern 2 forgot to check that the IfStmt.Else subtree was nil, leading to unsound fixes. Updates golang/go#71847 Change-Id: I0919076c1af38012cedf3072ef5d1117e96a64b9 Reviewed-on: https://go-review.googlesource.com/c/tools/+/651375 Reviewed-by: Jonathan Amsterdam Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI --- gopls/internal/analysis/modernize/minmax.go | 2 +- .../analysis/modernize/testdata/src/minmax/minmax.go | 12 ++++++++++++ .../modernize/testdata/src/minmax/minmax.go.golden | 12 ++++++++++++ 3 files changed, 25 insertions(+), 1 deletion(-) diff --git a/gopls/internal/analysis/modernize/minmax.go b/gopls/internal/analysis/modernize/minmax.go index 1466e767fc7..8888383afec 100644 --- a/gopls/internal/analysis/modernize/minmax.go +++ b/gopls/internal/analysis/modernize/minmax.go @@ -95,7 +95,7 @@ func minmax(pass *analysis.Pass) { }) } - } else if prev, ok := curIfStmt.PrevSibling(); ok && isSimpleAssign(prev.Node()) { + } else if prev, ok := curIfStmt.PrevSibling(); ok && isSimpleAssign(prev.Node()) && ifStmt.Else == nil { fassign := prev.Node().(*ast.AssignStmt) // Have: lhs0 = rhs0; if a < b { lhs = rhs } diff --git a/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go b/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go index 8fdc3bc2106..44ba7c9193a 100644 --- a/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go +++ b/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go @@ -103,3 +103,15 @@ func nopeNotAMinimum(x, y int) int { } return y } + +// Regression test for https://github.com/golang/go/issues/71847#issuecomment-2673491596 +func nopeHasElseBlock(x int) int { + y := x + // Before, this was erroneously reduced to y = max(x, 0) + if y < 0 { + y = 0 + } else { + y += 2 + } + return y +} diff --git a/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go.golden b/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go.golden index 48e154729e7..df1d5180f8a 100644 --- a/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go.golden @@ -80,3 +80,15 @@ func nopeNotAMinimum(x, y int) int { } return y } + +// Regression test for https://github.com/golang/go/issues/71847#issuecomment-2673491596 +func nopeHasElseBlock(x int) int { + y := x + // Before, this was erroneously reduced to y = max(x, 0) + if y < 0 { + y = 0 + } else { + y += 2 + } + return y +} From f95771e6301730bd96b5ece4dfb6df630c070e83 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Fri, 21 Feb 2025 09:43:42 -0500 Subject: [PATCH 051/270] gopls/go.mod: update to go1.24 No code changes yet. Change-Id: Ibdf2dfab2bf282aea4f1bb7d0787fb60d81ebbdb Reviewed-on: https://go-review.googlesource.com/c/tools/+/651395 Reviewed-by: Jonathan Amsterdam LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan --- gopls/go.mod | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/gopls/go.mod b/gopls/go.mod index f6a2b0a1e9a..210943206b8 100644 --- a/gopls/go.mod +++ b/gopls/go.mod @@ -1,8 +1,6 @@ module golang.org/x/tools/gopls -// go 1.23.1 fixes some bugs in go/types Alias support (golang/go#68894, golang/go#68905). -// go 1.23.4 fixes a miscompilation of range-over-func (golang/go#70035). -go 1.23.4 +go 1.24.0 require ( github.com/google/go-cmp v0.6.0 From 8b85edcc2f1f820a72c251a20869722780356f0a Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Fri, 21 Feb 2025 10:05:18 -0500 Subject: [PATCH 052/270] gopls/internal: use go1.24-isms This CL intentionally does not include any new API covered by the modernizers, whose fixes will be submitted separately. Surprisingly few changes in all. Change-Id: I0c45ed674fd80234e7c76823a23b8b3af3011835 Reviewed-on: https://go-review.googlesource.com/c/tools/+/651376 Auto-Submit: Alan Donovan Reviewed-by: Robert Findley Reviewed-by: Jonathan Amsterdam LUCI-TryBot-Result: Go LUCI --- gopls/internal/analysis/gofix/directive.go | 2 +- gopls/internal/golang/pkgdoc.go | 17 +---------------- .../test/integration/misc/references_test.go | 2 +- 3 files changed, 3 insertions(+), 18 deletions(-) diff --git a/gopls/internal/analysis/gofix/directive.go b/gopls/internal/analysis/gofix/directive.go index 796feb5189e..20c45313cfb 100644 --- a/gopls/internal/analysis/gofix/directive.go +++ b/gopls/internal/analysis/gofix/directive.go @@ -12,7 +12,7 @@ import ( // -- plundered from the future (CL 605517, issue #68021) -- -// TODO(adonovan): replace with ast.Directive after go1.24 (#68021). +// TODO(adonovan): replace with ast.Directive after go1.25 (#68021). // Beware of our local mods to handle analysistest // "want" comments on the same line. diff --git a/gopls/internal/golang/pkgdoc.go b/gopls/internal/golang/pkgdoc.go index a5f9cc97fa4..2faff1a1526 100644 --- a/gopls/internal/golang/pkgdoc.go +++ b/gopls/internal/golang/pkgdoc.go @@ -39,7 +39,6 @@ import ( "go/token" "go/types" "html" - "iter" "path/filepath" "slices" "strings" @@ -666,7 +665,7 @@ window.addEventListener('load', function() { cloneTparams(sig.RecvTypeParams()), cloneTparams(sig.TypeParams()), types.NewTuple(append( - slices.Collect(tupleVariables(sig.Params()))[:3], + slices.Collect(sig.Params().Variables())[:3], types.NewParam(0, nil, "", types.Typ[types.Invalid]))...), sig.Results(), false) // any final ...T parameter is truncated @@ -851,17 +850,3 @@ window.addEventListener('load', function() { return buf.Bytes(), nil } - -// tupleVariables returns a go1.23 iterator over the variables of a tuple type. -// -// Example: for v := range tuple.Variables() { ... } -// TODO(adonovan): use t.Variables in go1.24. -func tupleVariables(t *types.Tuple) iter.Seq[*types.Var] { - return func(yield func(v *types.Var) bool) { - for i := range t.Len() { - if !yield(t.At(i)) { - break - } - } - } -} diff --git a/gopls/internal/test/integration/misc/references_test.go b/gopls/internal/test/integration/misc/references_test.go index e84dcd71dc3..58fdb3c5cd8 100644 --- a/gopls/internal/test/integration/misc/references_test.go +++ b/gopls/internal/test/integration/misc/references_test.go @@ -126,7 +126,7 @@ var _ = unsafe.Slice(nil, 0) Run(t, files, func(t *testing.T, env *Env) { env.OpenFile("a.go") - for _, name := range strings.Fields( + for name := range strings.FieldsSeq( "iota error int nil append iota Pointer Sizeof Alignof Add Slice") { loc := env.RegexpSearch("a.go", `\b`+name+`\b`) From 23211ff47d7fe7c3bf662f2a3bf33d9c0ba57f31 Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Thu, 20 Feb 2025 22:18:10 +0000 Subject: [PATCH 053/270] gopls/internal/test/integration: better expectation failures A particularly tricky-to-diagnose marker test failure finally led me to address several long-standing TODOs: integration test expectations should identify their specific failure reason. Previously, we had been relying on a combination the State.String summary and LSP logs to debug failed expectations, but often it was not obvious from the test failure what condition actually failed. Now, expectations describe their failure, and composite expectations compose their component failures. Change-Id: I2533c8a35b4eb561f505fd3ed95fe55483340773 Reviewed-on: https://go-review.googlesource.com/c/tools/+/651417 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI Auto-Submit: Robert Findley --- gopls/internal/test/integration/env.go | 107 ++---- .../internal/test/integration/expectation.go | 307 ++++++++++-------- 2 files changed, 194 insertions(+), 220 deletions(-) diff --git a/gopls/internal/test/integration/env.go b/gopls/internal/test/integration/env.go index c8a1b5043aa..f19a426316d 100644 --- a/gopls/internal/test/integration/env.go +++ b/gopls/internal/test/integration/env.go @@ -114,53 +114,16 @@ type workProgress struct { complete bool // seen 'end' } -// This method, provided for debugging, accesses mutable fields without a lock, -// so it must not be called concurrent with any State mutation. -func (s State) String() string { - var b strings.Builder - b.WriteString("#### log messages (see RPC logs for full text):\n") - for _, msg := range s.logs { - summary := fmt.Sprintf("%v: %q", msg.Type, msg.Message) - if len(summary) > 60 { - summary = summary[:57] + "..." - } - // Some logs are quite long, and since they should be reproduced in the RPC - // logs on any failure we include here just a short summary. - fmt.Fprint(&b, "\t"+summary+"\n") - } - b.WriteString("\n") - b.WriteString("#### diagnostics:\n") - for name, params := range s.diagnostics { - fmt.Fprintf(&b, "\t%s (version %d):\n", name, params.Version) - for _, d := range params.Diagnostics { - fmt.Fprintf(&b, "\t\t%d:%d [%s]: %s\n", d.Range.Start.Line, d.Range.Start.Character, d.Source, d.Message) - } - } - b.WriteString("\n") - b.WriteString("#### outstanding work:\n") - for token, state := range s.work { - if state.complete { - continue - } - name := state.title - if name == "" { - name = fmt.Sprintf("!NO NAME(token: %s)", token) - } - fmt.Fprintf(&b, "\t%s: %.2f\n", name, state.percent) - } - b.WriteString("#### completed work:\n") - for name, count := range s.completedWork { - fmt.Fprintf(&b, "\t%s: %d\n", name, count) - } - return b.String() +type awaitResult struct { + verdict Verdict + reason string } -// A condition is satisfied when all expectations are simultaneously -// met. At that point, the 'met' channel is closed. On any failure, err is set -// and the failed channel is closed. +// A condition is satisfied when its expectation is [Met] or [Unmeetable]. The +// result is sent on the verdict channel. type condition struct { - expectations []Expectation - verdict chan Verdict + expectation Expectation + verdict chan awaitResult } func (a *Awaiter) onDiagnostics(_ context.Context, d *protocol.PublishDiagnosticsParams) error { @@ -334,27 +297,13 @@ func (a *Awaiter) onUnregisterCapability(_ context.Context, m *protocol.Unregist func (a *Awaiter) checkConditionsLocked() { for id, condition := range a.waiters { - if v, _ := checkExpectations(a.state, condition.expectations); v != Unmet { + if v, why := condition.expectation.Check(a.state); v != Unmet { delete(a.waiters, id) - condition.verdict <- v + condition.verdict <- awaitResult{v, why} } } } -// checkExpectations reports whether s meets all expectations. -func checkExpectations(s State, expectations []Expectation) (Verdict, string) { - finalVerdict := Met - var summary strings.Builder - for _, e := range expectations { - v := e.Check(s) - if v > finalVerdict { - finalVerdict = v - } - fmt.Fprintf(&summary, "%v: %s\n", v, e.Description) - } - return finalVerdict, summary.String() -} - // Await blocks until the given expectations are all simultaneously met. // // Generally speaking Await should be avoided because it blocks indefinitely if @@ -363,7 +312,7 @@ func checkExpectations(s State, expectations []Expectation) (Verdict, string) { // waiting. func (e *Env) Await(expectations ...Expectation) { e.T.Helper() - if err := e.Awaiter.Await(e.Ctx, expectations...); err != nil { + if err := e.Awaiter.Await(e.Ctx, AllOf(expectations...)); err != nil { e.T.Fatal(err) } } @@ -371,30 +320,30 @@ func (e *Env) Await(expectations ...Expectation) { // OnceMet blocks until the precondition is met by the state or becomes // unmeetable. If it was met, OnceMet checks that the state meets all // expectations in mustMeets. -func (e *Env) OnceMet(precondition Expectation, mustMeets ...Expectation) { +func (e *Env) OnceMet(pre Expectation, mustMeets ...Expectation) { e.T.Helper() - e.Await(OnceMet(precondition, mustMeets...)) + e.Await(OnceMet(pre, AllOf(mustMeets...))) } // Await waits for all expectations to simultaneously be met. It should only be // called from the main test goroutine. -func (a *Awaiter) Await(ctx context.Context, expectations ...Expectation) error { +func (a *Awaiter) Await(ctx context.Context, expectation Expectation) error { a.mu.Lock() // Before adding the waiter, we check if the condition is currently met or // failed to avoid a race where the condition was realized before Await was // called. - switch verdict, summary := checkExpectations(a.state, expectations); verdict { + switch verdict, why := expectation.Check(a.state); verdict { case Met: a.mu.Unlock() return nil case Unmeetable: - err := fmt.Errorf("unmeetable expectations:\n%s\nstate:\n%v", summary, a.state) + err := fmt.Errorf("unmeetable expectation:\n%s\nreason:\n%s", indent(expectation.Description), indent(why)) a.mu.Unlock() return err } cond := &condition{ - expectations: expectations, - verdict: make(chan Verdict), + expectation: expectation, + verdict: make(chan awaitResult), } a.waiters[nextAwaiterRegistration.Add(1)] = cond a.mu.Unlock() @@ -403,19 +352,17 @@ func (a *Awaiter) Await(ctx context.Context, expectations ...Expectation) error select { case <-ctx.Done(): err = ctx.Err() - case v := <-cond.verdict: - if v != Met { - err = fmt.Errorf("condition has final verdict %v", v) + case res := <-cond.verdict: + if res.verdict != Met { + err = fmt.Errorf("the following condition is %s:\n%s\nreason:\n%s", + res.verdict, indent(expectation.Description), indent(res.reason)) } } - a.mu.Lock() - defer a.mu.Unlock() - _, summary := checkExpectations(a.state, expectations) + return err +} - // Debugging an unmet expectation can be tricky, so we put some effort into - // nicely formatting the failure. - if err != nil { - return fmt.Errorf("waiting on:\n%s\nerr:%v\n\nstate:\n%v", summary, err, a.state) - } - return nil +// indent indents all lines of msg, including the first. +func indent(msg string) string { + const prefix = " " + return prefix + strings.ReplaceAll(msg, "\n", "\n"+prefix) } diff --git a/gopls/internal/test/integration/expectation.go b/gopls/internal/test/integration/expectation.go index fdfca90796e..70a16fd6b3a 100644 --- a/gopls/internal/test/integration/expectation.go +++ b/gopls/internal/test/integration/expectation.go @@ -5,14 +5,17 @@ package integration import ( + "bytes" "fmt" + "maps" "regexp" - "sort" + "slices" "strings" "github.com/google/go-cmp/cmp" "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/gopls/internal/server" + "golang.org/x/tools/gopls/internal/util/constraints" ) var ( @@ -55,16 +58,11 @@ func (v Verdict) String() string { // // Expectations are combinators. By composing them, tests may express // complex expectations in terms of simpler ones. -// -// TODO(rfindley): as expectations are combined, it becomes harder to identify -// why they failed. A better signature for Check would be -// -// func(State) (Verdict, string) -// -// returning a reason for the verdict that can be composed similarly to -// descriptions. type Expectation struct { - Check func(State) Verdict + // Check returns the verdict of this expectation for the given state. + // If the vertict is not [Met], the second result should return a reason + // that the verdict is not (yet) met. + Check func(State) (Verdict, string) // Description holds a noun-phrase identifying what the expectation checks. // @@ -74,117 +72,117 @@ type Expectation struct { // OnceMet returns an Expectation that, once the precondition is met, asserts // that mustMeet is met. -func OnceMet(precondition Expectation, mustMeets ...Expectation) Expectation { - check := func(s State) Verdict { - switch pre := precondition.Check(s); pre { - case Unmeetable: - return Unmeetable +func OnceMet(pre, post Expectation) Expectation { + check := func(s State) (Verdict, string) { + switch v, why := pre.Check(s); v { + case Unmeetable, Unmet: + return v, fmt.Sprintf("precondition is %s: %s", v, why) case Met: - for _, mustMeet := range mustMeets { - verdict := mustMeet.Check(s) - if verdict != Met { - return Unmeetable - } + v, why := post.Check(s) + if v != Met { + return Unmeetable, fmt.Sprintf("postcondition is not met:\n%s", indent(why)) } - return Met + return Met, "" default: - return Unmet + panic(fmt.Sprintf("unknown precondition verdict %s", v)) } } - description := describeExpectations(mustMeets...) + desc := fmt.Sprintf("once the following is met:\n%s\nmust have:\n%s", + indent(pre.Description), indent(post.Description)) return Expectation{ Check: check, - Description: fmt.Sprintf("once %q is met, must have:\n%s", precondition.Description, description), - } -} - -func describeExpectations(expectations ...Expectation) string { - var descriptions []string - for _, e := range expectations { - descriptions = append(descriptions, e.Description) + Description: desc, } - return strings.Join(descriptions, "\n") } // Not inverts the sense of an expectation: a met expectation is unmet, and an // unmet expectation is met. func Not(e Expectation) Expectation { - check := func(s State) Verdict { - switch v := e.Check(s); v { + check := func(s State) (Verdict, string) { + switch v, _ := e.Check(s); v { case Met: - return Unmet + return Unmet, "condition unexpectedly satisfied" case Unmet, Unmeetable: - return Met + return Met, "" default: panic(fmt.Sprintf("unexpected verdict %v", v)) } } - description := describeExpectations(e) return Expectation{ Check: check, - Description: fmt.Sprintf("not: %s", description), + Description: fmt.Sprintf("not: %s", e.Description), } } // AnyOf returns an expectation that is satisfied when any of the given // expectations is met. func AnyOf(anyOf ...Expectation) Expectation { - check := func(s State) Verdict { + if len(anyOf) == 1 { + return anyOf[0] // avoid unnecessary boilerplate + } + check := func(s State) (Verdict, string) { for _, e := range anyOf { - verdict := e.Check(s) + verdict, _ := e.Check(s) if verdict == Met { - return Met + return Met, "" } } - return Unmet + return Unmet, "none of the expectations were met" } description := describeExpectations(anyOf...) return Expectation{ Check: check, - Description: fmt.Sprintf("Any of:\n%s", description), + Description: fmt.Sprintf("any of:\n%s", description), } } // AllOf expects that all given expectations are met. -// -// TODO(rfindley): the problem with these types of combinators (OnceMet, AnyOf -// and AllOf) is that we lose the information of *why* they failed: the Awaiter -// is not smart enough to look inside. -// -// Refactor the API such that the Check function is responsible for explaining -// why an expectation failed. This should allow us to significantly improve -// test output: we won't need to summarize state at all, as the verdict -// explanation itself should describe clearly why the expectation not met. func AllOf(allOf ...Expectation) Expectation { - check := func(s State) Verdict { - verdict := Met + if len(allOf) == 1 { + return allOf[0] // avoid unnecessary boilerplate + } + check := func(s State) (Verdict, string) { + var ( + verdict = Met + reason string + ) for _, e := range allOf { - if v := e.Check(s); v > verdict { + v, why := e.Check(s) + if v > verdict { verdict = v + reason = why } } - return verdict + return verdict, reason } - description := describeExpectations(allOf...) + desc := describeExpectations(allOf...) return Expectation{ Check: check, - Description: fmt.Sprintf("All of:\n%s", description), + Description: fmt.Sprintf("all of:\n%s", indent(desc)), } } +func describeExpectations(expectations ...Expectation) string { + var descriptions []string + for _, e := range expectations { + descriptions = append(descriptions, e.Description) + } + return strings.Join(descriptions, "\n") +} + // ReadDiagnostics is an Expectation that stores the current diagnostics for // fileName in into, whenever it is evaluated. // // It can be used in combination with OnceMet or AfterChange to capture the // state of diagnostics when other expectations are satisfied. func ReadDiagnostics(fileName string, into *protocol.PublishDiagnosticsParams) Expectation { - check := func(s State) Verdict { + check := func(s State) (Verdict, string) { diags, ok := s.diagnostics[fileName] if !ok { - return Unmeetable + return Unmeetable, fmt.Sprintf("no diagnostics for %q", fileName) } *into = *diags - return Met + return Met, "" } return Expectation{ Check: check, @@ -198,13 +196,10 @@ func ReadDiagnostics(fileName string, into *protocol.PublishDiagnosticsParams) E // It can be used in combination with OnceMet or AfterChange to capture the // state of diagnostics when other expectations are satisfied. func ReadAllDiagnostics(into *map[string]*protocol.PublishDiagnosticsParams) Expectation { - check := func(s State) Verdict { - allDiags := make(map[string]*protocol.PublishDiagnosticsParams) - for name, diags := range s.diagnostics { - allDiags[name] = diags - } + check := func(s State) (Verdict, string) { + allDiags := maps.Clone(s.diagnostics) *into = allDiags - return Met + return Met, "" } return Expectation{ Check: check, @@ -215,13 +210,13 @@ func ReadAllDiagnostics(into *map[string]*protocol.PublishDiagnosticsParams) Exp // ShownDocument asserts that the client has received a // ShowDocumentRequest for the given URI. func ShownDocument(uri protocol.URI) Expectation { - check := func(s State) Verdict { + check := func(s State) (Verdict, string) { for _, params := range s.showDocument { if params.URI == uri { - return Met + return Met, "" } } - return Unmet + return Unmet, fmt.Sprintf("no ShowDocumentRequest received for %s", uri) } return Expectation{ Check: check, @@ -236,9 +231,9 @@ func ShownDocument(uri protocol.URI) Expectation { // capture the set of showDocument requests when other expectations // are satisfied. func ShownDocuments(into *[]*protocol.ShowDocumentParams) Expectation { - check := func(s State) Verdict { + check := func(s State) (Verdict, string) { *into = append(*into, s.showDocument...) - return Met + return Met, "" } return Expectation{ Check: check, @@ -247,31 +242,39 @@ func ShownDocuments(into *[]*protocol.ShowDocumentParams) Expectation { } // NoShownMessage asserts that the editor has not received a ShowMessage. -func NoShownMessage(subString string) Expectation { - check := func(s State) Verdict { +func NoShownMessage(containing string) Expectation { + check := func(s State) (Verdict, string) { for _, m := range s.showMessage { - if strings.Contains(m.Message, subString) { - return Unmeetable + if strings.Contains(m.Message, containing) { + // Format the message (which may contain newlines) as a block quote. + msg := fmt.Sprintf("\"\"\"\n%s\n\"\"\"", strings.TrimSpace(m.Message)) + return Unmeetable, fmt.Sprintf("observed the following message:\n%s", indent(msg)) } } - return Met + return Met, "" + } + var desc string + if containing != "" { + desc = fmt.Sprintf("received no ShowMessage containing %q", containing) + } else { + desc = "received no ShowMessage requests" } return Expectation{ Check: check, - Description: fmt.Sprintf("no ShowMessage received containing %q", subString), + Description: desc, } } // ShownMessage asserts that the editor has received a ShowMessageRequest // containing the given substring. func ShownMessage(containing string) Expectation { - check := func(s State) Verdict { + check := func(s State) (Verdict, string) { for _, m := range s.showMessage { if strings.Contains(m.Message, containing) { - return Met + return Met, "" } } - return Unmet + return Unmet, fmt.Sprintf("no ShowMessage containing %q", containing) } return Expectation{ Check: check, @@ -281,22 +284,22 @@ func ShownMessage(containing string) Expectation { // ShownMessageRequest asserts that the editor has received a // ShowMessageRequest with message matching the given regular expression. -func ShownMessageRequest(messageRegexp string) Expectation { - msgRE := regexp.MustCompile(messageRegexp) - check := func(s State) Verdict { +func ShownMessageRequest(matchingRegexp string) Expectation { + msgRE := regexp.MustCompile(matchingRegexp) + check := func(s State) (Verdict, string) { if len(s.showMessageRequest) == 0 { - return Unmet + return Unmet, "no ShowMessageRequest have been received" } for _, m := range s.showMessageRequest { if msgRE.MatchString(m.Message) { - return Met + return Met, "" } } - return Unmet + return Unmet, fmt.Sprintf("no ShowMessageRequest (out of %d) match %q", len(s.showMessageRequest), matchingRegexp) } return Expectation{ Check: check, - Description: fmt.Sprintf("ShowMessageRequest matching %q", messageRegexp), + Description: fmt.Sprintf("ShowMessageRequest matching %q", matchingRegexp), } } @@ -328,9 +331,7 @@ func (e *Env) DoneDiagnosingChanges() Expectation { } // Sort for stability. - sort.Slice(expected, func(i, j int) bool { - return expected[i] < expected[j] - }) + slices.Sort(expected) var all []Expectation for _, source := range expected { @@ -411,15 +412,16 @@ func (e *Env) DoneWithClose() Expectation { // // See CompletedWork. func StartedWork(title string, atLeast uint64) Expectation { - check := func(s State) Verdict { - if s.startedWork[title] >= atLeast { - return Met + check := func(s State) (Verdict, string) { + started := s.startedWork[title] + if started >= atLeast { + return Met, "" } - return Unmet + return Unmet, fmt.Sprintf("started work %d %s", started, pluralize("time", started)) } return Expectation{ Check: check, - Description: fmt.Sprintf("started work %q at least %d time(s)", title, atLeast), + Description: fmt.Sprintf("started work %q at least %d %s", title, atLeast, pluralize("time", atLeast)), } } @@ -428,16 +430,16 @@ func StartedWork(title string, atLeast uint64) Expectation { // Since the Progress API doesn't include any hidden metadata, we must use the // progress notification title to identify the work we expect to be completed. func CompletedWork(title string, count uint64, atLeast bool) Expectation { - check := func(s State) Verdict { + check := func(s State) (Verdict, string) { completed := s.completedWork[title] if completed == count || atLeast && completed > count { - return Met + return Met, "" } - return Unmet + return Unmet, fmt.Sprintf("completed %d %s", completed, pluralize("time", completed)) } - desc := fmt.Sprintf("completed work %q %v times", title, count) + desc := fmt.Sprintf("completed work %q %v %s", title, count, pluralize("time", count)) if atLeast { - desc = fmt.Sprintf("completed work %q at least %d time(s)", title, count) + desc = fmt.Sprintf("completed work %q at least %d %s", title, count, pluralize("time", count)) } return Expectation{ Check: check, @@ -445,6 +447,14 @@ func CompletedWork(title string, count uint64, atLeast bool) Expectation { } } +// pluralize adds an 's' suffix to name if n > 1. +func pluralize[T constraints.Integer](name string, n T) string { + if n > 1 { + return name + "s" + } + return name +} + type WorkStatus struct { // Last seen message from either `begin` or `report` progress. Msg string @@ -459,24 +469,23 @@ type WorkStatus struct { // If the token is not a progress token that the client has seen, this // expectation is Unmeetable. func CompletedProgressToken(token protocol.ProgressToken, into *WorkStatus) Expectation { - check := func(s State) Verdict { + check := func(s State) (Verdict, string) { work, ok := s.work[token] if !ok { - return Unmeetable // TODO(rfindley): refactor to allow the verdict to explain this result + return Unmeetable, "no matching work items" } if work.complete { if into != nil { into.Msg = work.msg into.EndMsg = work.endMsg } - return Met + return Met, "" } - return Unmet + return Unmet, fmt.Sprintf("work is not complete; last message: %q", work.msg) } - desc := fmt.Sprintf("completed work for token %v", token) return Expectation{ Check: check, - Description: desc, + Description: fmt.Sprintf("completed work for token %v", token), } } @@ -488,28 +497,27 @@ func CompletedProgressToken(token protocol.ProgressToken, into *WorkStatus) Expe // This expectation is a vestige of older workarounds for asynchronous command // execution. func CompletedProgress(title string, into *WorkStatus) Expectation { - check := func(s State) Verdict { + check := func(s State) (Verdict, string) { var work *workProgress for _, w := range s.work { if w.title == title { if work != nil { - // TODO(rfindley): refactor to allow the verdict to explain this result - return Unmeetable // multiple matches + return Unmeetable, "multiple matching work items" } work = w } } if work == nil { - return Unmeetable // zero matches + return Unmeetable, "no matching work items" } if work.complete { if into != nil { into.Msg = work.msg into.EndMsg = work.endMsg } - return Met + return Met, "" } - return Unmet + return Unmet, fmt.Sprintf("work is not complete; last message: %q", work.msg) } desc := fmt.Sprintf("exactly 1 completed workDoneProgress with title %v", title) return Expectation{ @@ -522,16 +530,16 @@ func CompletedProgress(title string, into *WorkStatus) Expectation { // be an exact match, whereas the given msg must only be contained in the work // item's message. func OutstandingWork(title, msg string) Expectation { - check := func(s State) Verdict { + check := func(s State) (Verdict, string) { for _, work := range s.work { if work.complete { continue } if work.title == title && strings.Contains(work.msg, msg) { - return Met + return Met, "" } } - return Unmet + return Unmet, "no matching work" } return Expectation{ Check: check, @@ -548,7 +556,7 @@ func OutstandingWork(title, msg string) Expectation { // TODO(rfindley): consider refactoring to treat outstanding work the same way // we treat diagnostics: with an algebra of filters. func NoOutstandingWork(ignore func(title, msg string) bool) Expectation { - check := func(s State) Verdict { + check := func(s State) (Verdict, string) { for _, w := range s.work { if w.complete { continue @@ -563,9 +571,9 @@ func NoOutstandingWork(ignore func(title, msg string) bool) Expectation { if ignore != nil && ignore(w.title, w.msg) { continue } - return Unmet + return Unmet, fmt.Sprintf("found outstanding work %q: %q", w.title, w.msg) } - return Met + return Met, "" } return Expectation{ Check: check, @@ -600,7 +608,7 @@ func LogMatching(typ protocol.MessageType, re string, count int, atLeast bool) E if err != nil { panic(err) } - check := func(state State) Verdict { + check := func(state State) (Verdict, string) { var found int for _, msg := range state.logs { if msg.Type == typ && rec.Match([]byte(msg.Message)) { @@ -609,14 +617,15 @@ func LogMatching(typ protocol.MessageType, re string, count int, atLeast bool) E } // Check for an exact or "at least" match. if found == count || (found >= count && atLeast) { - return Met + return Met, "" } // If we require an exact count, and have received more than expected, the // expectation can never be met. + verdict := Unmet if found > count && !atLeast { - return Unmeetable + verdict = Unmeetable } - return Unmet + return verdict, fmt.Sprintf("found %d matching logs", found) } desc := fmt.Sprintf("log message matching %q expected %v times", re, count) if atLeast { @@ -640,20 +649,24 @@ func NoLogMatching(typ protocol.MessageType, re string) Expectation { panic(err) } } - check := func(state State) Verdict { + check := func(state State) (Verdict, string) { for _, msg := range state.logs { if msg.Type != typ { continue } if r == nil || r.Match([]byte(msg.Message)) { - return Unmeetable + return Unmeetable, fmt.Sprintf("found matching log %q", msg.Message) } } - return Met + return Met, "" + } + desc := fmt.Sprintf("no %s log messages", typ) + if re != "" { + desc += fmt.Sprintf(" matching %q", re) } return Expectation{ Check: check, - Description: fmt.Sprintf("no log message matching %q", re), + Description: desc, } } @@ -673,18 +686,18 @@ func NoFileWatchMatching(re string) Expectation { } } -func checkFileWatch(re string, onMatch, onNoMatch Verdict) func(State) Verdict { +func checkFileWatch(re string, onMatch, onNoMatch Verdict) func(State) (Verdict, string) { rec := regexp.MustCompile(re) - return func(s State) Verdict { + return func(s State) (Verdict, string) { r := s.registeredCapabilities["workspace/didChangeWatchedFiles"] watchers := jsonProperty(r.RegisterOptions, "watchers").([]any) for _, watcher := range watchers { pattern := jsonProperty(watcher, "globPattern").(string) if rec.MatchString(pattern) { - return onMatch + return onMatch, fmt.Sprintf("matches watcher pattern %q", pattern) } } - return onNoMatch + return onNoMatch, "no matching watchers" } } @@ -707,10 +720,14 @@ func jsonProperty(obj any, path ...string) any { return jsonProperty(m[path[0]], path[1:]...) } +func formatDiagnostic(d protocol.Diagnostic) string { + return fmt.Sprintf("%d:%d [%s]: %s\n", d.Range.Start.Line, d.Range.Start.Character, d.Source, d.Message) +} + // Diagnostics asserts that there is at least one diagnostic matching the given // filters. func Diagnostics(filters ...DiagnosticFilter) Expectation { - check := func(s State) Verdict { + check := func(s State) (Verdict, string) { diags := flattenDiagnostics(s) for _, filter := range filters { var filtered []flatDiagnostic @@ -720,14 +737,22 @@ func Diagnostics(filters ...DiagnosticFilter) Expectation { } } if len(filtered) == 0 { - // TODO(rfindley): if/when expectations describe their own failure, we - // can provide more useful information here as to which filter caused - // the failure. - return Unmet + // Reprinting the description of the filters is too verbose. + // + // We can probably do better here, but for now just format the + // diagnostics. + var b bytes.Buffer + for name, params := range s.diagnostics { + fmt.Fprintf(&b, "\t%s (version %d):\n", name, params.Version) + for _, d := range params.Diagnostics { + fmt.Fprintf(&b, "\t\t%s", formatDiagnostic(d)) + } + } + return Unmet, fmt.Sprintf("diagnostics:\n%s", b.String()) } diags = filtered } - return Met + return Met, "" } var descs []string for _, filter := range filters { @@ -743,7 +768,7 @@ func Diagnostics(filters ...DiagnosticFilter) Expectation { // filters. Notably, if no filters are supplied this assertion checks that // there are no diagnostics at all, for any file. func NoDiagnostics(filters ...DiagnosticFilter) Expectation { - check := func(s State) Verdict { + check := func(s State) (Verdict, string) { diags := flattenDiagnostics(s) for _, filter := range filters { var filtered []flatDiagnostic @@ -755,9 +780,11 @@ func NoDiagnostics(filters ...DiagnosticFilter) Expectation { diags = filtered } if len(diags) > 0 { - return Unmet + d := diags[0] + why := fmt.Sprintf("have diagnostic: %s: %v", d.name, formatDiagnostic(d.diag)) + return Unmet, why } - return Met + return Met, "" } var descs []string for _, filter := range filters { From f2beb33b192b2c3cfca5cc80b88d1d46abc058a7 Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Fri, 21 Feb 2025 17:47:43 +0000 Subject: [PATCH 054/270] gopls: temporarily reinstate the "Structured" hover kind As described in golang/go#71879, the removal of the experimental "Structured" hover kind unexpectedly broke vim-go. Reinstate support for this setting, with tests, so that we can proceed with its deprecation more cautiously. For golang/go#71879 Change-Id: I6d22852aa10126c84b66f4345fbbdcf4cefbd182 Reviewed-on: https://go-review.googlesource.com/c/tools/+/651238 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan Auto-Submit: Robert Findley --- gopls/doc/settings.md | 3 + gopls/internal/doc/api.json | 4 + gopls/internal/golang/hover.go | 136 ++++++++++-------- gopls/internal/settings/settings.go | 14 +- gopls/internal/settings/settings_test.go | 16 +-- .../test/marker/testdata/hover/json.txt | 33 +++++ 6 files changed, 135 insertions(+), 71 deletions(-) create mode 100644 gopls/internal/test/marker/testdata/hover/json.txt diff --git a/gopls/doc/settings.md b/gopls/doc/settings.md index d989b2d19b9..7aeab79a575 100644 --- a/gopls/doc/settings.md +++ b/gopls/doc/settings.md @@ -428,6 +428,9 @@ Must be one of: * `"FullDocumentation"` * `"NoDocumentation"` * `"SingleLine"` +* `"Structured"` is a misguided experimental setting that returns a JSON +hover format. This setting should not be used, as it will be removed in a +future release of gopls. * `"SynopsisDocumentation"` Default: `"FullDocumentation"`. diff --git a/gopls/internal/doc/api.json b/gopls/internal/doc/api.json index 629e45ff766..b6e53d18558 100644 --- a/gopls/internal/doc/api.json +++ b/gopls/internal/doc/api.json @@ -134,6 +134,10 @@ "Value": "\"SingleLine\"", "Doc": "" }, + { + "Value": "\"Structured\"", + "Doc": "`\"Structured\"` is a misguided experimental setting that returns a JSON\nhover format. This setting should not be used, as it will be removed in a\nfuture release of gopls.\n" + }, { "Value": "\"SynopsisDocumentation\"", "Doc": "" diff --git a/gopls/internal/golang/hover.go b/gopls/internal/golang/hover.go index 7fc584f2c1a..cda79dcadb8 100644 --- a/gopls/internal/golang/hover.go +++ b/gopls/internal/golang/hover.go @@ -7,6 +7,7 @@ package golang import ( "bytes" "context" + "encoding/json" "fmt" "go/ast" "go/constant" @@ -48,37 +49,47 @@ import ( // It is formatted in one of several formats as determined by the // HoverKind setting. type hoverResult struct { - // synopsis is a single sentence synopsis of the symbol's documentation. + // The fields below are exported to define the JSON hover format. + // TODO(golang/go#70233): (re)remove support for JSON hover. + + // Synopsis is a single sentence Synopsis of the symbol's documentation. // - // TODO(adonovan): in what syntax? It (usually) comes from doc.synopsis, + // TODO(adonovan): in what syntax? It (usually) comes from doc.Synopsis, // which produces "Text" form, but it may be fed to // DocCommentToMarkdown, which expects doc comment syntax. - synopsis string + Synopsis string - // fullDocumentation is the symbol's full documentation. - fullDocumentation string + // FullDocumentation is the symbol's full documentation. + FullDocumentation string - // signature is the symbol's signature. - signature string + // Signature is the symbol's Signature. + Signature string - // singleLine is a single line describing the symbol. + // SingleLine is a single line describing the symbol. // This is recommended only for use in clients that show a single line for hover. - singleLine string + SingleLine string - // symbolName is the human-readable name to use for the symbol in links. - symbolName string + // SymbolName is the human-readable name to use for the symbol in links. + SymbolName string - // linkPath is the path of the package enclosing the given symbol, + // LinkPath is the path of the package enclosing the given symbol, // with the module portion (if any) replaced by "module@version". // // For example: "github.com/google/go-github/v48@v48.1.0/github". // - // Use LinkTarget + "/" + linkPath + "#" + LinkAnchor to form a pkgsite URL. - linkPath string + // Use LinkTarget + "/" + LinkPath + "#" + LinkAnchor to form a pkgsite URL. + LinkPath string - // linkAnchor is the pkg.go.dev link anchor for the given symbol. + // LinkAnchor is the pkg.go.dev link anchor for the given symbol. // For example, the "Node" part of "pkg.go.dev/go/ast#Node". - linkAnchor string + LinkAnchor string + + // New fields go below, and are unexported. The existing + // exported fields are underspecified and have already + // constrained our movements too much. A detailed JSON + // interface might be nice, but it needs a design and a + // precise specification. + // TODO(golang/go#70233): (re)deprecate the JSON hover output. // typeDecl is the declaration syntax for a type, // or "" for a non-type. @@ -284,9 +295,9 @@ func hover(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp pro typesinternal.SetVarKind(v, typesinternal.LocalVar) signature := types.ObjectString(v, qual) return *hoverRange, &hoverResult{ - signature: signature, - singleLine: signature, - symbolName: v.Name(), + Signature: signature, + SingleLine: signature, + SymbolName: v.Name(), }, nil } @@ -615,13 +626,13 @@ func hover(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp pro } return *hoverRange, &hoverResult{ - synopsis: doc.Synopsis(docText), - fullDocumentation: docText, - singleLine: singleLineSignature, - symbolName: linkName, - signature: signature, - linkPath: linkPath, - linkAnchor: anchor, + Synopsis: doc.Synopsis(docText), + FullDocumentation: docText, + SingleLine: singleLineSignature, + SymbolName: linkName, + Signature: signature, + LinkPath: linkPath, + LinkAnchor: anchor, typeDecl: typeDecl, methods: methods, promotedFields: fields, @@ -638,8 +649,8 @@ func hoverBuiltin(ctx context.Context, snapshot *cache.Snapshot, obj types.Objec if obj.Name() == "Error" { signature := obj.String() return &hoverResult{ - signature: signature, - singleLine: signature, + Signature: signature, + SingleLine: signature, // TODO(rfindley): these are better than the current behavior. // SymbolName: "(error).Error", // LinkPath: "builtin", @@ -682,13 +693,13 @@ func hoverBuiltin(ctx context.Context, snapshot *cache.Snapshot, obj types.Objec docText := comment.Text() return &hoverResult{ - synopsis: doc.Synopsis(docText), - fullDocumentation: docText, - signature: signature, - singleLine: obj.String(), - symbolName: obj.Name(), - linkPath: "builtin", - linkAnchor: obj.Name(), + Synopsis: doc.Synopsis(docText), + FullDocumentation: docText, + Signature: signature, + SingleLine: obj.String(), + SymbolName: obj.Name(), + LinkPath: "builtin", + LinkAnchor: obj.Name(), }, nil } @@ -740,9 +751,9 @@ func hoverImport(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Packa docText := comment.Text() return rng, &hoverResult{ - signature: "package " + string(impMetadata.Name), - synopsis: doc.Synopsis(docText), - fullDocumentation: docText, + Signature: "package " + string(impMetadata.Name), + Synopsis: doc.Synopsis(docText), + FullDocumentation: docText, }, nil } @@ -798,9 +809,9 @@ func hoverPackageName(pkg *cache.Package, pgf *parsego.File) (protocol.Range, *h } return rng, &hoverResult{ - signature: "package " + string(pkg.Metadata().Name), - synopsis: doc.Synopsis(docText), - fullDocumentation: docText, + Signature: "package " + string(pkg.Metadata().Name), + Synopsis: doc.Synopsis(docText), + FullDocumentation: docText, footer: footer, }, nil } @@ -926,8 +937,8 @@ func hoverLit(pgf *parsego.File, lit *ast.BasicLit, pos token.Pos) (protocol.Ran } hover := b.String() return rng, &hoverResult{ - synopsis: hover, - fullDocumentation: hover, + Synopsis: hover, + FullDocumentation: hover, }, nil } @@ -966,7 +977,7 @@ func hoverReturnStatement(pgf *parsego.File, path []ast.Node, ret *ast.ReturnStm } buf.WriteByte(')') return rng, &hoverResult{ - signature: buf.String(), + Signature: buf.String(), }, nil } @@ -1005,9 +1016,9 @@ func hoverEmbed(fh file.Handle, rng protocol.Range, pattern string) (protocol.Ra } res := &hoverResult{ - signature: fmt.Sprintf("Embedding %q", pattern), - synopsis: s.String(), - fullDocumentation: s.String(), + Signature: fmt.Sprintf("Embedding %q", pattern), + Synopsis: s.String(), + FullDocumentation: s.String(), } return rng, res, nil } @@ -1242,10 +1253,17 @@ func formatHover(h *hoverResult, options *settings.Options, pkgURL func(path Pac switch options.HoverKind { case settings.SingleLine: - return h.singleLine, nil + return h.SingleLine, nil case settings.NoDocumentation: - return maybeFenced(h.signature), nil + return maybeFenced(h.Signature), nil + + case settings.Structured: + b, err := json.Marshal(h) + if err != nil { + return "", err + } + return string(b), nil case settings.SynopsisDocumentation, settings.FullDocumentation: var sections [][]string // assembled below @@ -1256,20 +1274,20 @@ func formatHover(h *hoverResult, options *settings.Options, pkgURL func(path Pac // but not Signature, which is redundant (= TypeDecl + "\n" + Methods). // For all other symbols, we display Signature; // TypeDecl and Methods are empty. - // (Now that JSON is no more, we could rationalize this.) + // TODO(golang/go#70233): When JSON is no more, we could rationalize this. if h.typeDecl != "" { sections = append(sections, []string{maybeFenced(h.typeDecl)}) } else { - sections = append(sections, []string{maybeFenced(h.signature)}) + sections = append(sections, []string{maybeFenced(h.Signature)}) } // Doc section. var doc string switch options.HoverKind { case settings.SynopsisDocumentation: - doc = h.synopsis + doc = h.Synopsis case settings.FullDocumentation: - doc = h.fullDocumentation + doc = h.FullDocumentation } if options.PreferredContentFormat == protocol.Markdown { doc = DocCommentToMarkdown(doc, options) @@ -1392,7 +1410,7 @@ func StdSymbolOf(obj types.Object) *stdlib.Symbol { // If pkgURL is non-nil, it should be used to generate doc links. func formatLink(h *hoverResult, options *settings.Options, pkgURL func(path PackagePath, fragment string) protocol.URI) string { - if options.LinksInHover == settings.LinksInHover_None || h.linkPath == "" { + if options.LinksInHover == settings.LinksInHover_None || h.LinkPath == "" { return "" } var url protocol.URI @@ -1400,26 +1418,26 @@ func formatLink(h *hoverResult, options *settings.Options, pkgURL func(path Pack if pkgURL != nil { // LinksInHover == "gopls" // Discard optional module version portion. // (Ideally the hoverResult would retain the structure...) - path := h.linkPath - if module, versionDir, ok := strings.Cut(h.linkPath, "@"); ok { + path := h.LinkPath + if module, versionDir, ok := strings.Cut(h.LinkPath, "@"); ok { // "module@version/dir" path = module if _, dir, ok := strings.Cut(versionDir, "/"); ok { path += "/" + dir } } - url = pkgURL(PackagePath(path), h.linkAnchor) + url = pkgURL(PackagePath(path), h.LinkAnchor) caption = "in gopls doc viewer" } else { if options.LinkTarget == "" { return "" } - url = cache.BuildLink(options.LinkTarget, h.linkPath, h.linkAnchor) + url = cache.BuildLink(options.LinkTarget, h.LinkPath, h.LinkAnchor) caption = "on " + options.LinkTarget } switch options.PreferredContentFormat { case protocol.Markdown: - return fmt.Sprintf("[`%s` %s](%s)", h.symbolName, caption, url) + return fmt.Sprintf("[`%s` %s](%s)", h.SymbolName, caption, url) case protocol.PlainText: return "" default: diff --git a/gopls/internal/settings/settings.go b/gopls/internal/settings/settings.go index 393bccac312..7b04e6b746b 100644 --- a/gopls/internal/settings/settings.go +++ b/gopls/internal/settings/settings.go @@ -798,6 +798,11 @@ const ( NoDocumentation HoverKind = "NoDocumentation" SynopsisDocumentation HoverKind = "SynopsisDocumentation" FullDocumentation HoverKind = "FullDocumentation" + + // Structured is a misguided experimental setting that returns a JSON + // hover format. This setting should not be used, as it will be removed in a + // future release of gopls. + Structured HoverKind = "Structured" ) type VulncheckMode string @@ -1073,14 +1078,15 @@ func (o *Options) setOne(name string, value any) (applied []CounterPath, _ error AllSymbolScope) case "hoverKind": - if s, ok := value.(string); ok && strings.EqualFold(s, "structured") { - return nil, deprecatedError("the experimental hoverKind='structured' setting was removed in gopls/v0.18.0 (https://go.dev/issue/70233)") - } + // TODO(rfindley): reinstate the deprecation of Structured hover by making + // it a warning in gopls v0.N+1, and removing it in gopls v0.N+2. return setEnum(&o.HoverKind, value, NoDocumentation, SingleLine, SynopsisDocumentation, - FullDocumentation) + FullDocumentation, + Structured, + ) case "linkTarget": return nil, setString(&o.LinkTarget, value) diff --git a/gopls/internal/settings/settings_test.go b/gopls/internal/settings/settings_test.go index 05afa8ecac3..bd9ec110874 100644 --- a/gopls/internal/settings/settings_test.go +++ b/gopls/internal/settings/settings_test.go @@ -91,19 +91,19 @@ func TestOptions_Set(t *testing.T) { }, }, { - name: "hoverKind", - value: "Structured", - wantError: true, + name: "hoverKind", + value: "Structured", + // wantError: true, // TODO(rfindley): reinstate this error check: func(o Options) bool { - return o.HoverKind == FullDocumentation + return o.HoverKind == Structured }, }, { - name: "ui.documentation.hoverKind", - value: "Structured", - wantError: true, + name: "ui.documentation.hoverKind", + value: "Structured", + // wantError: true, // TODO(rfindley): reinstate this error check: func(o Options) bool { - return o.HoverKind == FullDocumentation + return o.HoverKind == Structured }, }, { diff --git a/gopls/internal/test/marker/testdata/hover/json.txt b/gopls/internal/test/marker/testdata/hover/json.txt new file mode 100644 index 00000000000..6c489cb4221 --- /dev/null +++ b/gopls/internal/test/marker/testdata/hover/json.txt @@ -0,0 +1,33 @@ +This test demonstrates support for "hoverKind": "Structured". + +Its size expectations assume a 64-bit machine. + +-- flags -- +-skip_goarch=386,arm + +-- go.mod -- +module example.com/p + +go 1.18 + +-- settings.json -- +{ + "hoverKind": "Structured" +} +-- p.go -- +package p + +// MyType is a type. +type MyType struct { //@ hover("MyType", "MyType", MyType) + F int // a field + S string // a string field +} + +// MyFunc is a function. +func MyFunc(i int) string { //@ hover("MyFunc", "MyFunc", MyFunc) + return "" +} +-- @MyFunc -- +{"Synopsis":"MyFunc is a function.","FullDocumentation":"MyFunc is a function.\n","Signature":"func MyFunc(i int) string","SingleLine":"func MyFunc(i int) string","SymbolName":"p.MyFunc","LinkPath":"example.com/p","LinkAnchor":"MyFunc"} +-- @MyType -- +{"Synopsis":"MyType is a type.","FullDocumentation":"MyType is a type.\n","Signature":"type MyType struct { // size=24 (0x18)\n\tF int // a field\n\tS string // a string field\n}\n","SingleLine":"type MyType struct{F int; S string}","SymbolName":"p.MyType","LinkPath":"example.com/p","LinkAnchor":"MyType"} From 7347766eee58ceaf6dc96b921cbd775f7844f267 Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Thu, 20 Feb 2025 17:40:52 +0000 Subject: [PATCH 055/270] gopls/internal/test: fix failures when running tests with GOTOOLCHAIN Gopls integration tests want to use the ambient Go toolchain, to test integration with older Go commands. But GOTOOLCHAIN injects the toolchain binary into PATH, so gopls must remove this injected path element before it runs the go command. Unfortunately, if GOTOOLCHAIN=go1.N.P explicitly, those tests will also try to *download* the explicit toolchain and fail because we have set GOPROXY to a file based proxy. Fix this by first adding a check that the initial workspace load did not fail, as well as other related error annotations such that the failure message more accurately identifies the problem. Additionally, the preceding CL improved the integration test framework to better surface such errors. Then, actually fix the problem by setting GOTOOLCHAIN=local in our integration test sandbox. Change-Id: I8c7e9f10d1c17143f10be42476caf29021ab63e0 Reviewed-on: https://go-review.googlesource.com/c/tools/+/651418 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI Auto-Submit: Robert Findley --- .../internal/golang/completion/completion.go | 19 ++++++++++++++----- .../internal/test/integration/fake/sandbox.go | 1 + gopls/internal/test/marker/marker_test.go | 5 ++++- internal/imports/fix.go | 2 +- 4 files changed, 20 insertions(+), 7 deletions(-) diff --git a/gopls/internal/golang/completion/completion.go b/gopls/internal/golang/completion/completion.go index 4c340055233..a6c0e49c311 100644 --- a/gopls/internal/golang/completion/completion.go +++ b/gopls/internal/golang/completion/completion.go @@ -668,7 +668,7 @@ func Completion(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, p err = c.collectCompletions(ctx) if err != nil { - return nil, nil, err + return nil, nil, fmt.Errorf("failed to collect completions: %v", err) } // Deep search collected candidates and their members for more candidates. @@ -688,7 +688,7 @@ func Completion(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, p for _, callback := range c.completionCallbacks { if deadline == nil || time.Now().Before(*deadline) { if err := c.snapshot.RunProcessEnvFunc(ctx, callback); err != nil { - return nil, nil, err + return nil, nil, fmt.Errorf("failed to run goimports callback: %v", err) } } } @@ -989,7 +989,10 @@ func (c *completer) populateImportCompletions(searchImport *ast.ImportSpec) erro } c.completionCallbacks = append(c.completionCallbacks, func(ctx context.Context, opts *imports.Options) error { - return imports.GetImportPaths(ctx, searchImports, prefix, c.filename, c.pkg.Types().Name(), opts.Env) + if err := imports.GetImportPaths(ctx, searchImports, prefix, c.filename, c.pkg.Types().Name(), opts.Env); err != nil { + return fmt.Errorf("getting import paths: %v", err) + } + return nil }) return nil } @@ -1529,7 +1532,10 @@ func (c *completer) selector(ctx context.Context, sel *ast.SelectorExpr) error { c.completionCallbacks = append(c.completionCallbacks, func(ctx context.Context, opts *imports.Options) error { defer cancel() - return imports.GetPackageExports(ctx, add, id.Name, c.filename, c.pkg.Types().Name(), opts.Env) + if err := imports.GetPackageExports(ctx, add, id.Name, c.filename, c.pkg.Types().Name(), opts.Env); err != nil { + return fmt.Errorf("getting package exports: %v", err) + } + return nil }) return nil } @@ -1916,7 +1922,10 @@ func (c *completer) unimportedPackages(ctx context.Context, seen map[string]stru } c.completionCallbacks = append(c.completionCallbacks, func(ctx context.Context, opts *imports.Options) error { - return imports.GetAllCandidates(ctx, add, prefix, c.filename, c.pkg.Types().Name(), opts.Env) + if err := imports.GetAllCandidates(ctx, add, prefix, c.filename, c.pkg.Types().Name(), opts.Env); err != nil { + return fmt.Errorf("getting completion candidates: %v", err) + } + return nil }) return nil diff --git a/gopls/internal/test/integration/fake/sandbox.go b/gopls/internal/test/integration/fake/sandbox.go index 7adf3e3e4a9..1d8918babd4 100644 --- a/gopls/internal/test/integration/fake/sandbox.go +++ b/gopls/internal/test/integration/fake/sandbox.go @@ -208,6 +208,7 @@ func (sb *Sandbox) GoEnv() map[string]string { "GO111MODULE": "", "GOSUMDB": "off", "GOPACKAGESDRIVER": "off", + "GOTOOLCHAIN": "local", // tests should not download toolchains } if testenv.Go1Point() >= 5 { vars["GOMODCACHE"] = "" diff --git a/gopls/internal/test/marker/marker_test.go b/gopls/internal/test/marker/marker_test.go index 516dfeb3881..d7f91abed46 100644 --- a/gopls/internal/test/marker/marker_test.go +++ b/gopls/internal/test/marker/marker_test.go @@ -971,7 +971,10 @@ func newEnv(t *testing.T, cache *cache.Cache, files, proxyFiles map[string][]byt sandbox.Close() // ignore error t.Fatal(err) } - if err := awaiter.Await(ctx, integration.InitialWorkspaceLoad); err != nil { + if err := awaiter.Await(ctx, integration.OnceMet( + integration.InitialWorkspaceLoad, + integration.NoShownMessage(""), + )); err != nil { sandbox.Close() // ignore error t.Fatal(err) } diff --git a/internal/imports/fix.go b/internal/imports/fix.go index bf6b0aaddde..ee0efe48a55 100644 --- a/internal/imports/fix.go +++ b/internal/imports/fix.go @@ -1030,7 +1030,7 @@ func (e *ProcessEnv) GetResolver() (Resolver, error) { // // For gopls, we can optionally explicitly choose a resolver type, since we // already know the view type. - if len(e.Env["GOMOD"]) == 0 && len(e.Env["GOWORK"]) == 0 { + if e.Env["GOMOD"] == "" && (e.Env["GOWORK"] == "" || e.Env["GOWORK"] == "off") { e.resolver = newGopathResolver(e) e.logf("created gopath resolver") } else if r, err := newModuleResolver(e, e.ModCache); err != nil { From 4e0c888d60c4363071510deedaa07ca8cc9530ae Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Tue, 21 Jan 2025 17:36:24 +0800 Subject: [PATCH 056/270] gopls/internal/hover: show alias rhs type declaration on hover This CL support to find the direct Rhs declaration for an alias type in hover. Fixes golang/go#71286 Change-Id: Ie43a70ec52fe41510e303bb538cc170ff59020c0 Reviewed-on: https://go-review.googlesource.com/c/tools/+/644495 Auto-Submit: Robert Findley Reviewed-by: Robert Findley LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- gopls/internal/golang/hover.go | 111 ++++++++++++------ .../test/marker/testdata/definition/embed.txt | 2 + .../marker/testdata/hover/hover_alias.txt | 81 +++++++++++++ 3 files changed, 156 insertions(+), 38 deletions(-) create mode 100644 gopls/internal/test/marker/testdata/hover/hover_alias.txt diff --git a/gopls/internal/golang/hover.go b/gopls/internal/golang/hover.go index cda79dcadb8..947595715a7 100644 --- a/gopls/internal/golang/hover.go +++ b/gopls/internal/golang/hover.go @@ -138,6 +138,28 @@ func Hover(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, positi }, nil } +// findRhsTypeDecl finds an alias's rhs type and returns its declaration. +// The rhs of an alias might be an alias as well, but we feel this is a rare case. +// It returns an empty string if the given obj is not an alias. +func findRhsTypeDecl(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, obj types.Object) (string, error) { + if alias, ok := obj.Type().(*types.Alias); ok { + // we choose Rhs instead of types.Unalias to make the connection between original alias + // and the corresponding aliased type clearer. + // types.Unalias brings confusion because it breaks the connection from A to C given + // the alias chain like 'type ( A = B; B =C ; )' except we show all transitive alias + // from start to the end. As it's rare, we don't do so. + t := alias.Rhs() + switch o := t.(type) { + case *types.Named: + obj = o.Obj() + declPGF1, declPos1, _ := parseFull(ctx, snapshot, pkg.FileSet(), obj.Pos()) + realTypeDecl, _, err := typeDeclContent(declPGF1, declPos1, obj) + return realTypeDecl, err + } + } + return "", nil +} + // hover computes hover information at the given position. If we do not support // hovering at the position, it returns _, nil, nil: an error is only returned // if the position is valid but we fail to compute hover information. @@ -404,46 +426,20 @@ func hover(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp pro _, isTypeName := obj.(*types.TypeName) _, isTypeParam := types.Unalias(obj.Type()).(*types.TypeParam) if isTypeName && !isTypeParam { - spec, ok := spec.(*ast.TypeSpec) - if !ok { - // We cannot find a TypeSpec for this type or alias declaration - // (that is not a type parameter or a built-in). - // This should be impossible even for ill-formed trees; - // we suspect that AST repair may be creating inconsistent - // positions. Don't report a bug in that case. (#64241) - errorf := fmt.Errorf - if !declPGF.Fixed() { - errorf = bug.Errorf - } - return protocol.Range{}, nil, errorf("type name %q without type spec", obj.Name()) + var spec1 *ast.TypeSpec + typeDecl, spec1, err = typeDeclContent(declPGF, declPos, obj) + if err != nil { + return protocol.Range{}, nil, err } - // Format the type's declaration syntax. - { - // Don't duplicate comments. - spec2 := *spec - spec2.Doc = nil - spec2.Comment = nil - - var b strings.Builder - b.WriteString("type ") - fset := tokeninternal.FileSetFor(declPGF.Tok) - // TODO(adonovan): use a smarter formatter that omits - // inaccessible fields (non-exported ones from other packages). - if err := format.Node(&b, fset, &spec2); err != nil { - return protocol.Range{}, nil, err - } - typeDecl = b.String() - - // Splice in size/offset at end of first line. - // "type T struct { // size=..." - if sizeOffset != "" { - nl := strings.IndexByte(typeDecl, '\n') - if nl < 0 { - nl = len(typeDecl) - } - typeDecl = typeDecl[:nl] + " // " + sizeOffset + typeDecl[nl:] + // Splice in size/offset at end of first line. + // "type T struct { // size=..." + if sizeOffset != "" { + nl := strings.IndexByte(typeDecl, '\n') + if nl < 0 { + nl = len(typeDecl) } + typeDecl = typeDecl[:nl] + " // " + sizeOffset + typeDecl[nl:] } // Promoted fields @@ -478,7 +474,7 @@ func hover(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp pro // already been displayed when the node was formatted // above. Don't list these again. var skip map[string]bool - if iface, ok := spec.Type.(*ast.InterfaceType); ok { + if iface, ok := spec1.Type.(*ast.InterfaceType); ok { if iface.Methods.List != nil { for _, m := range iface.Methods.List { if len(m.Names) == 1 { @@ -520,6 +516,12 @@ func hover(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp pro } } + // realTypeDecl is defined to store the underlying definition of an alias. + realTypeDecl, _ := findRhsTypeDecl(ctx, snapshot, pkg, obj) // tolerate the error + if realTypeDecl != "" { + typeDecl += fmt.Sprintf("\n\n%s", realTypeDecl) + } + // Compute link data (on pkg.go.dev or other documentation host). // // If linkPath is empty, the symbol is not linkable. @@ -640,6 +642,39 @@ func hover(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp pro }, nil } +// typeDeclContent returns a well formatted type definition. +func typeDeclContent(declPGF *parsego.File, declPos token.Pos, obj types.Object) (string, *ast.TypeSpec, error) { + _, spec, _ := findDeclInfo([]*ast.File{declPGF.File}, declPos) // may be nil^3 + // Don't duplicate comments. + spec1, ok := spec.(*ast.TypeSpec) + if !ok { + // We cannot find a TypeSpec for this type or alias declaration + // (that is not a type parameter or a built-in). + // This should be impossible even for ill-formed trees; + // we suspect that AST repair may be creating inconsistent + // positions. Don't report a bug in that case. (#64241) + errorf := fmt.Errorf + if !declPGF.Fixed() { + errorf = bug.Errorf + } + return "", nil, errorf("type name %q without type spec", obj.Name()) + } + spec2 := *spec1 + spec2.Doc = nil + spec2.Comment = nil + + var b strings.Builder + b.WriteString("type ") + fset := tokeninternal.FileSetFor(declPGF.Tok) + // TODO(adonovan): use a smarter formatter that omits + // inaccessible fields (non-exported ones from other packages). + if err := format.Node(&b, fset, &spec2); err != nil { + return "", nil, err + } + typeDecl := b.String() + return typeDecl, spec1, nil +} + // hoverBuiltin computes hover information when hovering over a builtin // identifier. func hoverBuiltin(ctx context.Context, snapshot *cache.Snapshot, obj types.Object) (*hoverResult, error) { diff --git a/gopls/internal/test/marker/testdata/definition/embed.txt b/gopls/internal/test/marker/testdata/definition/embed.txt index 8ff3e37adb3..5a29b31708f 100644 --- a/gopls/internal/test/marker/testdata/definition/embed.txt +++ b/gopls/internal/test/marker/testdata/definition/embed.txt @@ -322,6 +322,8 @@ func (a.A) Hi() -- @aAlias -- ```go type aAlias = a.A // size=16 (0x10) + +type A string ``` --- diff --git a/gopls/internal/test/marker/testdata/hover/hover_alias.txt b/gopls/internal/test/marker/testdata/hover/hover_alias.txt new file mode 100644 index 00000000000..886a175981c --- /dev/null +++ b/gopls/internal/test/marker/testdata/hover/hover_alias.txt @@ -0,0 +1,81 @@ +This test checks gopls behavior when hovering over alias type. + +-- flags -- +-skip_goarch=386,arm + +-- go.mod -- +module mod.com + +-- main.go -- +package main + +import "mod.com/a" +import "mod.com/b" + +type ToTypeDecl = b.RealType //@hover("ToTypeDecl", "ToTypeDecl", ToTypeDecl) + +type ToAlias = a.Alias //@hover("ToAlias", "ToAlias", ToAlias) + +type ToAliasWithComment = a.AliasWithComment //@hover("ToAliasWithComment", "ToAliasWithComment", ToAliasWithComment) + +-- a/a.go -- +package a +import "mod.com/b" + +type Alias = b.RealType + +// AliasWithComment is a type alias with comments. +type AliasWithComment = b.RealType + +-- b/b.go -- +package b +// RealType is a real type rather than an alias type. +type RealType struct { + Name string + Age int +} + +-- @ToTypeDecl -- +```go +type ToTypeDecl = b.RealType // size=24 (0x18) + +type RealType struct { + Name string + Age int +} +``` + +--- + +@hover("ToTypeDecl", "ToTypeDecl", ToTypeDecl) + + +--- + +[`main.ToTypeDecl` on pkg.go.dev](https://pkg.go.dev/mod.com#ToTypeDecl) +-- @ToAlias -- +```go +type ToAlias = a.Alias // size=24 (0x18) +``` + +--- + +@hover("ToAlias", "ToAlias", ToAlias) + + +--- + +[`main.ToAlias` on pkg.go.dev](https://pkg.go.dev/mod.com#ToAlias) +-- @ToAliasWithComment -- +```go +type ToAliasWithComment = a.AliasWithComment // size=24 (0x18) +``` + +--- + +@hover("ToAliasWithComment", "ToAliasWithComment", ToAliasWithComment) + + +--- + +[`main.ToAliasWithComment` on pkg.go.dev](https://pkg.go.dev/mod.com#ToAliasWithComment) From 1c52ccd39b923912d9e4b54944df219a62b60f91 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Fri, 14 Feb 2025 08:11:27 -0500 Subject: [PATCH 057/270] gopls/internal/analysis/gofix: inline most aliases Support inlining an alias with an arbitrary right-hand side. The type checker gives us almost everything we need to inline an alias; the only thing missing is the bit that says that a //go:fix directive was present. So the fact is an empty struct. Skip aliases that mention arrays. The array length expression isn't represented, and it may refer to other values, so inlining it would incorrectly decouple the inlined expression from the original. For golang/go#32816. Change-Id: I2e5ff1bd69a0f88cd7cb396dee8d4b426988d1cc Reviewed-on: https://go-review.googlesource.com/c/tools/+/650755 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- gopls/internal/analysis/gofix/gofix.go | 220 ++++++++++++------ gopls/internal/analysis/gofix/gofix_test.go | 162 ++++++++++++- .../analysis/gofix/testdata/src/a/a.go | 56 ++++- .../analysis/gofix/testdata/src/a/a.go.golden | 57 ++++- .../gofix/testdata/src/a/internal/d.go | 2 + .../analysis/gofix/testdata/src/b/b.go | 5 + .../analysis/gofix/testdata/src/b/b.go.golden | 9 +- .../analysis/gofix/testdata/src/c/c.go | 5 + 8 files changed, 439 insertions(+), 77 deletions(-) diff --git a/gopls/internal/analysis/gofix/gofix.go b/gopls/internal/analysis/gofix/gofix.go index ffc64be755b..bb6ce4b43ce 100644 --- a/gopls/internal/analysis/gofix/gofix.go +++ b/gopls/internal/analysis/gofix/gofix.go @@ -9,6 +9,7 @@ import ( "go/ast" "go/token" "go/types" + "strings" _ "embed" @@ -118,32 +119,31 @@ func (a *analyzer) findAlias(spec *ast.TypeSpec, declInline bool) { a.pass.Reportf(spec.Pos(), "invalid //go:fix inline directive: not a type alias") return } + + // Disallow inlines of type expressions containing array types. + // Given an array type like [N]int where N is a named constant, go/types provides + // only the value of the constant as an int64. So inlining A in this code: + // + // const N = 5 + // type A = [N]int + // + // would result in [5]int, breaking the connection with N. + // TODO(jba): accept type expressions where the array size is a literal integer + for n := range ast.Preorder(spec.Type) { + if ar, ok := n.(*ast.ArrayType); ok && ar.Len != nil { + a.pass.Reportf(spec.Pos(), "invalid //go:fix inline directive: array types not supported") + return + } + } + if spec.TypeParams != nil { // TODO(jba): handle generic aliases return } - // The alias must refer to another named type. - // TODO(jba): generalize to more type expressions. - var rhsID *ast.Ident - switch e := ast.Unparen(spec.Type).(type) { - case *ast.Ident: - rhsID = e - case *ast.SelectorExpr: - rhsID = e.Sel - default: - return - } + + // Remember that this is an inlinable alias. + typ := &goFixInlineAliasFact{} lhs := a.pass.TypesInfo.Defs[spec.Name].(*types.TypeName) - // more (jba): test one alias pointing to another alias - rhs := a.pass.TypesInfo.Uses[rhsID].(*types.TypeName) - typ := &goFixInlineAliasFact{ - RHSName: rhs.Name(), - RHSPkgName: rhs.Pkg().Name(), - RHSPkgPath: rhs.Pkg().Path(), - } - if rhs.Pkg() == a.pass.Pkg { - typ.rhsObj = rhs - } a.inlinableAliases[lhs] = typ // Create a fact only if the LHS is exported and defined at top level. // We create a fact even if the RHS is non-exported, @@ -302,49 +302,148 @@ func (a *analyzer) inlineAlias(tn *types.TypeName, cur cursor.Cursor) { if inalias == nil { return // nope } - curFile := currentFile(cur) - // We have an identifier A here (n), possibly qualified by a package identifier (sel.X, - // where sel is the parent of X), // and an inlinable "type A = B" elsewhere (inali). - // Consider replacing A with B. + // Get the alias's RHS. It has everything we need to format the replacement text. + rhs := tn.Type().(*types.Alias).Rhs() - // Check that the expression we are inlining (B) means the same thing - // (refers to the same object) in n's scope as it does in A's scope. - // If the RHS is not in the current package, AddImport will handle - // shadowing, so we only need to worry about when both expressions - // are in the current package. + curPath := a.pass.Pkg.Path() + curFile := currentFile(cur) n := cur.Node().(*ast.Ident) - if a.pass.Pkg.Path() == inalias.RHSPkgPath { - // fcon.rhsObj is the object referred to by B in the definition of A. - scope := a.pass.TypesInfo.Scopes[curFile].Innermost(n.Pos()) // n's scope - _, obj := scope.LookupParent(inalias.RHSName, n.Pos()) // what "B" means in n's scope - if obj == nil { - // Should be impossible: if code at n can refer to the LHS, - // it can refer to the RHS. - panic(fmt.Sprintf("no object for inlinable alias %s RHS %s", n.Name, inalias.RHSName)) + // We have an identifier A here (n), possibly qualified by a package + // identifier (sel.n), and an inlinable "type A = rhs" elsewhere. + // + // We can replace A with rhs if no name in rhs is shadowed at n's position, + // and every package in rhs is importable by the current package. + + var ( + importPrefixes = map[string]string{curPath: ""} // from pkg path to prefix + edits []analysis.TextEdit + ) + for _, tn := range typenames(rhs) { + var pkgPath, pkgName string + if pkg := tn.Pkg(); pkg != nil { + pkgPath = pkg.Path() + pkgName = pkg.Name() } - if obj != inalias.rhsObj { - // "B" means something different here than at the inlinable const's scope. + if pkgPath == "" || pkgPath == curPath { + // The name is in the current package or the universe scope, so no import + // is required. Check that it is not shadowed (that is, that the type + // it refers to in rhs is the same one it refers to at n). + scope := a.pass.TypesInfo.Scopes[curFile].Innermost(n.Pos()) // n's scope + _, obj := scope.LookupParent(tn.Name(), n.Pos()) // what qn.name means in n's scope + if obj != tn { // shadowed + return + } + } else if !analysisinternal.CanImport(a.pass.Pkg.Path(), pkgPath) { + // If this package can't see the package of this part of rhs, we can't inline. return + } else if _, ok := importPrefixes[pkgPath]; !ok { + // Use AddImport to add pkgPath if it's not there already. Associate the prefix it assigns + // with the package path for use by the TypeString qualifier below. + _, prefix, eds := analysisinternal.AddImport( + a.pass.TypesInfo, curFile, pkgName, pkgPath, tn.Name(), n.Pos()) + importPrefixes[pkgPath] = strings.TrimSuffix(prefix, ".") + edits = append(edits, eds...) } - } else if !analysisinternal.CanImport(a.pass.Pkg.Path(), inalias.RHSPkgPath) { - // If this package can't see the RHS's package, we can't inline. - return - } - var ( - importPrefix string - edits []analysis.TextEdit - ) - if inalias.RHSPkgPath != a.pass.Pkg.Path() { - _, importPrefix, edits = analysisinternal.AddImport( - a.pass.TypesInfo, curFile, inalias.RHSPkgName, inalias.RHSPkgPath, inalias.RHSName, n.Pos()) } // If n is qualified by a package identifier, we'll need the full selector expression. var expr ast.Expr = n if e, _ := cur.Edge(); e == edge.SelectorExpr_Sel { expr = cur.Parent().Node().(ast.Expr) } - a.reportInline("type alias", "Type alias", expr, edits, importPrefix+inalias.RHSName) + // To get the replacement text, render the alias RHS using the package prefixes + // we assigned above. + newText := types.TypeString(rhs, func(p *types.Package) string { + if p == a.pass.Pkg { + return "" + } + if prefix, ok := importPrefixes[p.Path()]; ok { + return prefix + } + panic(fmt.Sprintf("in %q, package path %q has no import prefix", rhs, p.Path())) + }) + a.reportInline("type alias", "Type alias", expr, edits, newText) +} + +// typenames returns the TypeNames for types within t (including t itself) that have +// them: basic types, named types and alias types. +// The same name may appear more than once. +func typenames(t types.Type) []*types.TypeName { + var tns []*types.TypeName + + var visit func(types.Type) + + // TODO(jba): when typesinternal.NamedOrAlias adds TypeArgs, replace this type literal with it. + namedOrAlias := func(t interface { + TypeArgs() *types.TypeList + Obj() *types.TypeName + }) { + tns = append(tns, t.Obj()) + args := t.TypeArgs() + // TODO(jba): replace with TypeList.Types when this file is at 1.24. + for i := range args.Len() { + visit(args.At(i)) + } + } + + visit = func(t types.Type) { + switch t := t.(type) { + case *types.Basic: + tns = append(tns, types.Universe.Lookup(t.Name()).(*types.TypeName)) + case *types.Named: + namedOrAlias(t) + case *types.Alias: + namedOrAlias(t) + case *types.TypeParam: + tns = append(tns, t.Obj()) + case *types.Pointer: + visit(t.Elem()) + case *types.Slice: + visit(t.Elem()) + case *types.Array: + visit(t.Elem()) + case *types.Chan: + visit(t.Elem()) + case *types.Map: + visit(t.Key()) + visit(t.Elem()) + case *types.Struct: + // TODO(jba): replace with Struct.Fields when this file is at 1.24. + for i := range t.NumFields() { + visit(t.Field(i).Type()) + } + case *types.Signature: + // Ignore the receiver: although it may be present, it has no meaning + // in a type expression. + // Ditto for receiver type params. + // Also, function type params cannot appear in a type expression. + if t.TypeParams() != nil { + panic("Signature.TypeParams in type expression") + } + visit(t.Params()) + visit(t.Results()) + case *types.Interface: + for i := range t.NumEmbeddeds() { + visit(t.EmbeddedType(i)) + } + for i := range t.NumExplicitMethods() { + visit(t.ExplicitMethod(i).Type()) + } + case *types.Tuple: + // TODO(jba): replace with Tuple.Variables when this file is at 1.24. + for i := range t.Len() { + visit(t.At(i).Type()) + } + case *types.Union: + panic("Union in type expression") + default: + panic(fmt.Sprintf("unknown type %T", t)) + } + } + + visit(t) + + return tns } // If con is an inlinable constant, suggest inlining its use at cur. @@ -481,20 +580,11 @@ func (c *goFixInlineConstFact) String() string { func (*goFixInlineConstFact) AFact() {} // A goFixInlineAliasFact is exported for each type alias marked "//go:fix inline". -// It holds information about an inlinable type alias. Gob-serializable. -type goFixInlineAliasFact struct { - // Information about "type LHSName = RHSName". - RHSName string - RHSPkgPath string - RHSPkgName string - rhsObj types.Object // for current package -} - -func (c *goFixInlineAliasFact) String() string { - return fmt.Sprintf("goFixInline alias %q.%s", c.RHSPkgPath, c.RHSName) -} +// It holds no information; its mere existence demonstrates that an alias is inlinable. +type goFixInlineAliasFact struct{} -func (*goFixInlineAliasFact) AFact() {} +func (c *goFixInlineAliasFact) String() string { return "goFixInline alias" } +func (*goFixInlineAliasFact) AFact() {} func discard(string, ...any) {} diff --git a/gopls/internal/analysis/gofix/gofix_test.go b/gopls/internal/analysis/gofix/gofix_test.go index 32bd87b6cd2..dc98ef47181 100644 --- a/gopls/internal/analysis/gofix/gofix_test.go +++ b/gopls/internal/analysis/gofix/gofix_test.go @@ -2,15 +2,171 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package gofix_test +package gofix import ( + "go/ast" + "go/importer" + "go/parser" + "go/token" + "go/types" + "slices" "testing" + gocmp "github.com/google/go-cmp/cmp" "golang.org/x/tools/go/analysis/analysistest" - "golang.org/x/tools/gopls/internal/analysis/gofix" + "golang.org/x/tools/internal/testenv" ) func TestAnalyzer(t *testing.T) { - analysistest.RunWithSuggestedFixes(t, analysistest.TestData(), gofix.Analyzer, "a", "b") + analysistest.RunWithSuggestedFixes(t, analysistest.TestData(), Analyzer, "a", "b") +} + +func TestTypesWithNames(t *testing.T) { + // Test setup inspired by internal/analysisinternal/addimport_test.go. + testenv.NeedsDefaultImporter(t) + + for _, test := range []struct { + typeExpr string + want []string + }{ + { + "int", + []string{"int"}, + }, + { + "*int", + []string{"int"}, + }, + { + "[]*int", + []string{"int"}, + }, + { + "[2]int", + []string{"int"}, + }, + { + // go/types does not expose the length expression. + "[unsafe.Sizeof(uint(1))]int", + []string{"int"}, + }, + { + "map[string]int", + []string{"int", "string"}, + }, + { + "map[int]struct{x, y int}", + []string{"int"}, + }, + { + "T", + []string{"a.T"}, + }, + { + "iter.Seq[int]", + []string{"int", "iter.Seq"}, + }, + { + "io.Reader", + []string{"io.Reader"}, + }, + { + "map[*io.Writer]map[T]A", + []string{"a.A", "a.T", "io.Writer"}, + }, + { + "func(int, int) (bool, error)", + []string{"bool", "error", "int"}, + }, + { + "func(int, ...string) (T, *T, error)", + []string{"a.T", "error", "int", "string"}, + }, + { + "func(iter.Seq[int])", + []string{"int", "iter.Seq"}, + }, + { + "struct { a int; b bool}", + []string{"bool", "int"}, + }, + { + "struct { io.Reader; a int}", + []string{"int", "io.Reader"}, + }, + { + "map[*string]struct{x chan int; y [2]bool}", + []string{"bool", "int", "string"}, + }, + { + "interface {F(int) bool}", + []string{"bool", "int"}, + }, + { + "interface {io.Reader; F(int) bool}", + []string{"bool", "int", "io.Reader"}, + }, + { + "G", // a type parameter of the function + []string{"a.G"}, + }, + } { + src := ` + package a + import ("io"; "iter"; "unsafe") + func _(io.Reader, iter.Seq[int]) uintptr {return unsafe.Sizeof(1)} + type T int + type A = T + + func F[G any]() { + var V ` + test.typeExpr + ` + _ = V + }` + + // parse + fset := token.NewFileSet() + f, err := parser.ParseFile(fset, "a.go", src, 0) + if err != nil { + t.Errorf("%s: %v", test.typeExpr, err) + continue + } + + // type-check + info := &types.Info{ + Types: make(map[ast.Expr]types.TypeAndValue), + Scopes: make(map[ast.Node]*types.Scope), + Defs: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + } + conf := &types.Config{ + Error: func(err error) { t.Fatalf("%s: %v", test.typeExpr, err) }, + Importer: importer.Default(), + } + pkg, err := conf.Check(f.Name.Name, fset, []*ast.File{f}, info) + if err != nil { + t.Errorf("%s: %v", test.typeExpr, err) + continue + } + + // Look at V's type. + typ := pkg.Scope().Lookup("F").(*types.Func). + Scope().Lookup("V").(*types.Var).Type() + tns := typenames(typ) + // Sort names for comparison. + var got []string + for _, tn := range tns { + var prefix string + if p := tn.Pkg(); p != nil && p.Path() != "" { + prefix = p.Path() + "." + } + got = append(got, prefix+tn.Name()) + } + slices.Sort(got) + got = slices.Compact(got) + + if diff := gocmp.Diff(test.want, got); diff != "" { + t.Errorf("%s: mismatch (-want, +got):\n%s", test.typeExpr, diff) + } + } } diff --git a/gopls/internal/analysis/gofix/testdata/src/a/a.go b/gopls/internal/analysis/gofix/testdata/src/a/a.go index fb4d8b92172..49a0587c2b1 100644 --- a/gopls/internal/analysis/gofix/testdata/src/a/a.go +++ b/gopls/internal/analysis/gofix/testdata/src/a/a.go @@ -105,14 +105,62 @@ func shadow() { // Type aliases //go:fix inline -type A = T // want A: `goFixInline alias "a".T` +type A = T // want A: `goFixInline alias` var _ A // want `Type alias A should be inlined` -type B = []T // nope: only named RHSs - //go:fix inline -type AA = // want AA: `goFixInline alias "a".A` +type AA = // want AA: `goFixInline alias` A // want `Type alias A should be inlined` var _ AA // want `Type alias AA should be inlined` + +//go:fix inline +type ( + B = []T // want B: `goFixInline alias` + C = map[*string][]error // want C: `goFixInline alias` +) + +var _ B // want `Type alias B should be inlined` +var _ C // want `Type alias C should be inlined` + +//go:fix inline +type E = map[[Uno]string][]*T // want `invalid //go:fix inline directive: array types not supported` + +var _ E // nothing should happen here + +//go:fix inline +type F = map[internal.T]T // want F: `goFixInline alias` + +var _ F // want `Type alias F should be inlined` + +//go:fix inline +type G = []chan *internal.T // want G: `goFixInline alias` + +var _ G // want `Type alias G should be inlined` + +// local shadowing +func _() { + type string = int + const T = 1 + + var _ B // nope: B's RHS contains T, which is shadowed + var _ C // nope: C's RHS contains string, which is shadowed +} + +// local inlining +func _[P any]() { + const a = 1 + //go:fix inline + const b = a + + x := b // want `Constant b should be inlined` + + //go:fix inline + type u = []P + + var y u // want `Type alias u should be inlined` + + _ = x + _ = y +} diff --git a/gopls/internal/analysis/gofix/testdata/src/a/a.go.golden b/gopls/internal/analysis/gofix/testdata/src/a/a.go.golden index 9ab1bcbc652..9d4c527919e 100644 --- a/gopls/internal/analysis/gofix/testdata/src/a/a.go.golden +++ b/gopls/internal/analysis/gofix/testdata/src/a/a.go.golden @@ -105,14 +105,63 @@ func shadow() { // Type aliases //go:fix inline -type A = T // want A: `goFixInline alias "a".T` +type A = T // want A: `goFixInline alias` var _ T // want `Type alias A should be inlined` -type B = []T // nope: only named RHSs - //go:fix inline -type AA = // want AA: `goFixInline alias "a".A` +type AA = // want AA: `goFixInline alias` T // want `Type alias A should be inlined` var _ A // want `Type alias AA should be inlined` + +//go:fix inline +type ( + B = []T // want B: `goFixInline alias` + C = map[*string][]error // want C: `goFixInline alias` +) + +var _ []T // want `Type alias B should be inlined` +var _ map[*string][]error // want `Type alias C should be inlined` + +//go:fix inline +type E = map[[Uno]string][]*T // want `invalid //go:fix inline directive: array types not supported` + +var _ E // nothing should happen here + +//go:fix inline +type F = map[internal.T]T // want F: `goFixInline alias` + +var _ map[internal.T]T // want `Type alias F should be inlined` + +//go:fix inline +type G = []chan *internal.T // want G: `goFixInline alias` + +var _ []chan *internal.T // want `Type alias G should be inlined` + +// local shadowing +func _() { + type string = int + const T = 1 + + var _ B // nope: B's RHS contains T, which is shadowed + var _ C // nope: C's RHS contains string, which is shadowed +} + + +// local inlining +func _[P any]() { + const a = 1 + //go:fix inline + const b = a + + x := a // want `Constant b should be inlined` + + //go:fix inline + type u = []P + + var y []P // want `Type alias u should be inlined` + + _ = x + _ = y +} diff --git a/gopls/internal/analysis/gofix/testdata/src/a/internal/d.go b/gopls/internal/analysis/gofix/testdata/src/a/internal/d.go index 3211d7ae3cc..60d0c1ab7e8 100644 --- a/gopls/internal/analysis/gofix/testdata/src/a/internal/d.go +++ b/gopls/internal/analysis/gofix/testdata/src/a/internal/d.go @@ -3,3 +3,5 @@ package internal const D = 1 + +type T int diff --git a/gopls/internal/analysis/gofix/testdata/src/b/b.go b/gopls/internal/analysis/gofix/testdata/src/b/b.go index d52fd514024..b358d7b4f67 100644 --- a/gopls/internal/analysis/gofix/testdata/src/b/b.go +++ b/gopls/internal/analysis/gofix/testdata/src/b/b.go @@ -32,3 +32,8 @@ func g() { const d = a.D // nope: a.D refers to a constant in a package that is not visible here. var _ a.A // want `Type alias a\.A should be inlined` +var _ a.B // want `Type alias a\.B should be inlined` +var _ a.C // want `Type alias a\.C should be inlined` +var _ R // want `Type alias R should be inlined` + +var _ a.G // nope: a.G refers to a type in a package that is not visible here diff --git a/gopls/internal/analysis/gofix/testdata/src/b/b.go.golden b/gopls/internal/analysis/gofix/testdata/src/b/b.go.golden index 4228ffeb489..fd8d87a2ef1 100644 --- a/gopls/internal/analysis/gofix/testdata/src/b/b.go.golden +++ b/gopls/internal/analysis/gofix/testdata/src/b/b.go.golden @@ -2,6 +2,8 @@ package b import a0 "a" +import "io" + import ( "a" . "c" @@ -35,4 +37,9 @@ func g() { const d = a.D // nope: a.D refers to a constant in a package that is not visible here. -var _ a.T // want `Type alias a\.A should be inlined` +var _ a.T // want `Type alias a\.A should be inlined` +var _ []a.T // want `Type alias a\.B should be inlined` +var _ map[*string][]error // want `Type alias a\.C should be inlined` +var _ map[io.Reader]io.Reader // want `Type alias R should be inlined` + +var _ a.G // nope: a.G refers to a type in a package that is not visible here diff --git a/gopls/internal/analysis/gofix/testdata/src/c/c.go b/gopls/internal/analysis/gofix/testdata/src/c/c.go index 36504b886a7..7f6a3f26fe2 100644 --- a/gopls/internal/analysis/gofix/testdata/src/c/c.go +++ b/gopls/internal/analysis/gofix/testdata/src/c/c.go @@ -2,4 +2,9 @@ package c // This package is dot-imported by package b. +import "io" + const C = 1 + +//go:fix inline +type R = map[io.Reader]io.Reader From 6e3d8bca20c96bbb8297a834e4421b93e6c3ffa5 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Fri, 21 Feb 2025 11:20:11 -0500 Subject: [PATCH 058/270] gopls/internal/analysis/gofix: use 1.24 iterators For golang/go#32816. Change-Id: Icf805984f812af19c720d4f477ed12a97a5dd68d Reviewed-on: https://go-review.googlesource.com/c/tools/+/651615 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- gopls/internal/analysis/gofix/gofix.go | 35 ++++++++++---------------- 1 file changed, 13 insertions(+), 22 deletions(-) diff --git a/gopls/internal/analysis/gofix/gofix.go b/gopls/internal/analysis/gofix/gofix.go index bb6ce4b43ce..237e5b0b58a 100644 --- a/gopls/internal/analysis/gofix/gofix.go +++ b/gopls/internal/analysis/gofix/gofix.go @@ -372,28 +372,21 @@ func typenames(t types.Type) []*types.TypeName { var tns []*types.TypeName var visit func(types.Type) - - // TODO(jba): when typesinternal.NamedOrAlias adds TypeArgs, replace this type literal with it. - namedOrAlias := func(t interface { - TypeArgs() *types.TypeList - Obj() *types.TypeName - }) { - tns = append(tns, t.Obj()) - args := t.TypeArgs() - // TODO(jba): replace with TypeList.Types when this file is at 1.24. - for i := range args.Len() { - visit(args.At(i)) - } - } - visit = func(t types.Type) { + if hasName, ok := t.(interface{ Obj() *types.TypeName }); ok { + tns = append(tns, hasName.Obj()) + } switch t := t.(type) { case *types.Basic: tns = append(tns, types.Universe.Lookup(t.Name()).(*types.TypeName)) case *types.Named: - namedOrAlias(t) + for t := range t.TypeArgs().Types() { + visit(t) + } case *types.Alias: - namedOrAlias(t) + for t := range t.TypeArgs().Types() { + visit(t) + } case *types.TypeParam: tns = append(tns, t.Obj()) case *types.Pointer: @@ -408,9 +401,8 @@ func typenames(t types.Type) []*types.TypeName { visit(t.Key()) visit(t.Elem()) case *types.Struct: - // TODO(jba): replace with Struct.Fields when this file is at 1.24. - for i := range t.NumFields() { - visit(t.Field(i).Type()) + for f := range t.Fields() { + visit(f.Type()) } case *types.Signature: // Ignore the receiver: although it may be present, it has no meaning @@ -430,9 +422,8 @@ func typenames(t types.Type) []*types.TypeName { visit(t.ExplicitMethod(i).Type()) } case *types.Tuple: - // TODO(jba): replace with Tuple.Variables when this file is at 1.24. - for i := range t.Len() { - visit(t.At(i).Type()) + for v := range t.Variables() { + visit(v.Type()) } case *types.Union: panic("Union in type expression") From 3d7c2e28a97c1a7e134dba0e3a3a27c560ddaa75 Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Fri, 21 Feb 2025 21:23:18 +0000 Subject: [PATCH 059/270] gopls/internal/golang: add missing json tags for hoverResult In my haste to partially revert CL 635226 in 651238, I failed to add json struct tags. Add them back. For golang/go#71879 Change-Id: I45190cba5154eeed7b6a49db51d2a2a51999be7a Reviewed-on: https://go-review.googlesource.com/c/tools/+/651618 Auto-Submit: Robert Findley Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- gopls/internal/golang/hover.go | 14 +++++++------- gopls/internal/test/marker/testdata/hover/json.txt | 4 ++-- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/gopls/internal/golang/hover.go b/gopls/internal/golang/hover.go index 947595715a7..74cf5dbb593 100644 --- a/gopls/internal/golang/hover.go +++ b/gopls/internal/golang/hover.go @@ -57,20 +57,20 @@ type hoverResult struct { // TODO(adonovan): in what syntax? It (usually) comes from doc.Synopsis, // which produces "Text" form, but it may be fed to // DocCommentToMarkdown, which expects doc comment syntax. - Synopsis string + Synopsis string `json:"synopsis"` // FullDocumentation is the symbol's full documentation. - FullDocumentation string + FullDocumentation string `json:"fullDocumentation"` // Signature is the symbol's Signature. - Signature string + Signature string `json:"signature"` // SingleLine is a single line describing the symbol. // This is recommended only for use in clients that show a single line for hover. - SingleLine string + SingleLine string `json:"singleLine"` // SymbolName is the human-readable name to use for the symbol in links. - SymbolName string + SymbolName string `json:"symbolName"` // LinkPath is the path of the package enclosing the given symbol, // with the module portion (if any) replaced by "module@version". @@ -78,11 +78,11 @@ type hoverResult struct { // For example: "github.com/google/go-github/v48@v48.1.0/github". // // Use LinkTarget + "/" + LinkPath + "#" + LinkAnchor to form a pkgsite URL. - LinkPath string + LinkPath string `json:"linkPath"` // LinkAnchor is the pkg.go.dev link anchor for the given symbol. // For example, the "Node" part of "pkg.go.dev/go/ast#Node". - LinkAnchor string + LinkAnchor string `json:"linkAnchor"` // New fields go below, and are unexported. The existing // exported fields are underspecified and have already diff --git a/gopls/internal/test/marker/testdata/hover/json.txt b/gopls/internal/test/marker/testdata/hover/json.txt index 6c489cb4221..f3229805cb6 100644 --- a/gopls/internal/test/marker/testdata/hover/json.txt +++ b/gopls/internal/test/marker/testdata/hover/json.txt @@ -28,6 +28,6 @@ func MyFunc(i int) string { //@ hover("MyFunc", "MyFunc", MyFunc) return "" } -- @MyFunc -- -{"Synopsis":"MyFunc is a function.","FullDocumentation":"MyFunc is a function.\n","Signature":"func MyFunc(i int) string","SingleLine":"func MyFunc(i int) string","SymbolName":"p.MyFunc","LinkPath":"example.com/p","LinkAnchor":"MyFunc"} +{"synopsis":"MyFunc is a function.","fullDocumentation":"MyFunc is a function.\n","signature":"func MyFunc(i int) string","singleLine":"func MyFunc(i int) string","symbolName":"p.MyFunc","linkPath":"example.com/p","linkAnchor":"MyFunc"} -- @MyType -- -{"Synopsis":"MyType is a type.","FullDocumentation":"MyType is a type.\n","Signature":"type MyType struct { // size=24 (0x18)\n\tF int // a field\n\tS string // a string field\n}\n","SingleLine":"type MyType struct{F int; S string}","SymbolName":"p.MyType","LinkPath":"example.com/p","LinkAnchor":"MyType"} +{"synopsis":"MyType is a type.","fullDocumentation":"MyType is a type.\n","signature":"type MyType struct { // size=24 (0x18)\n\tF int // a field\n\tS string // a string field\n}\n","singleLine":"type MyType struct{F int; S string}","symbolName":"p.MyType","linkPath":"example.com/p","linkAnchor":"MyType"} From 5299dcb7277190caeca1a827cb7d5c856b22f37f Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Fri, 21 Feb 2025 21:57:05 +0000 Subject: [PATCH 060/270] gopls/internal/settings: fix misleading error messages The deprecatedError helper constructs a specifically formatted error string suggesting a replacement. Certain deprecations were misusing the API, resulting in nonsensical error messages. Change-Id: Ic72bf608b5b2e97baf75c192a49fd4181d7800b2 Reviewed-on: https://go-review.googlesource.com/c/tools/+/651695 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- gopls/internal/settings/settings.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gopls/internal/settings/settings.go b/gopls/internal/settings/settings.go index 7b04e6b746b..dd353da64e9 100644 --- a/gopls/internal/settings/settings.go +++ b/gopls/internal/settings/settings.go @@ -1116,7 +1116,7 @@ func (o *Options) setOne(name string, value any) (applied []CounterPath, _ error return nil, err } if o.Analyses["fieldalignment"] { - return counts, deprecatedError("the 'fieldalignment' analyzer was removed in gopls/v0.17.0; instead, hover over struct fields to see size/offset information (https://go.dev/issue/66861)") + return counts, &SoftError{"the 'fieldalignment' analyzer was removed in gopls/v0.17.0; instead, hover over struct fields to see size/offset information (https://go.dev/issue/66861)"} } return counts, nil @@ -1124,7 +1124,7 @@ func (o *Options) setOne(name string, value any) (applied []CounterPath, _ error return setBoolMap(&o.Hints, value) case "annotations": - return nil, deprecatedError("the 'annotations' setting was removed in gopls/v0.18.0; all compiler optimization details are now shown") + return nil, &SoftError{"the 'annotations' setting was removed in gopls/v0.18.0; all compiler optimization details are now shown"} case "vulncheck": return setEnum(&o.Vulncheck, value, From 274b2375098fb4ba49aedcc8b86edcbf2079ba0a Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Fri, 21 Feb 2025 20:26:19 +0000 Subject: [PATCH 061/270] gopls: add a -severity flag for gopls check In golang/go#50764, users were reporting having to filter out noisy diagnostics from the output of `gopls check` in CI. This is because there was no differentiation between diagnostics that represent real bugs, and those that are suggestions. By contrast, hint level diagnostics are very unobtrusive in the editor. Add a new -severity flag to control the minimum severity output by gopls check, and set its default to "warning". For golang/go#50764 Change-Id: I48d8bb74371fa6035fef4d2608412b986f509f7b Reviewed-on: https://go-review.googlesource.com/c/tools/+/651616 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan Auto-Submit: Robert Findley --- gopls/doc/release/v0.19.0.md | 7 +++++++ gopls/internal/cmd/check.go | 20 +++++++++++++++++++- gopls/internal/cmd/cmd.go | 2 +- gopls/internal/cmd/integration_test.go | 22 ++++++++++++++++++++++ gopls/internal/cmd/usage/check.hlp | 2 ++ 5 files changed, 51 insertions(+), 2 deletions(-) diff --git a/gopls/doc/release/v0.19.0.md b/gopls/doc/release/v0.19.0.md index 0b3ea64c305..18088732656 100644 --- a/gopls/doc/release/v0.19.0.md +++ b/gopls/doc/release/v0.19.0.md @@ -1,3 +1,10 @@ +# Configuration Changes + +- The `gopls check` subcommant now accepts a `-severity` flag to set a minimum + severity for the diagnostics it reports. By default, the minimum severity + is "warning", so `gopls check` may report fewer diagnostics than before. Set + `-severity=hint` to reproduce the previous behavior. + # New features ## "Eliminate dot import" code action diff --git a/gopls/internal/cmd/check.go b/gopls/internal/cmd/check.go index d256fa9de2a..8c0362b148a 100644 --- a/gopls/internal/cmd/check.go +++ b/gopls/internal/cmd/check.go @@ -16,7 +16,8 @@ import ( // check implements the check verb for gopls. type check struct { - app *Application + app *Application + Severity string `flag:"severity" help:"minimum diagnostic severity (hint, info, warning, or error)"` } func (c *check) Name() string { return "check" } @@ -35,6 +36,20 @@ Example: show the diagnostic results of this file: // Run performs the check on the files specified by args and prints the // results to stdout. func (c *check) Run(ctx context.Context, args ...string) error { + severityCutoff := protocol.SeverityWarning + switch c.Severity { + case "hint": + severityCutoff = protocol.SeverityHint + case "info": + severityCutoff = protocol.SeverityInformation + case "warning": + // default + case "error": + severityCutoff = protocol.SeverityError + default: + return fmt.Errorf("unrecognized -severity value %q", c.Severity) + } + if len(args) == 0 { return nil } @@ -95,6 +110,9 @@ func (c *check) Run(ctx context.Context, args ...string) error { file.diagnosticsMu.Unlock() for _, diag := range diags { + if diag.Severity > severityCutoff { // lower severity value => greater severity, counterintuitively + continue + } if err := print(file.uri, diag.Range, diag.Message); err != nil { return err } diff --git a/gopls/internal/cmd/cmd.go b/gopls/internal/cmd/cmd.go index 8bd7d7b899f..119577c012b 100644 --- a/gopls/internal/cmd/cmd.go +++ b/gopls/internal/cmd/cmd.go @@ -284,7 +284,7 @@ func (app *Application) internalCommands() []tool.Application { func (app *Application) featureCommands() []tool.Application { return []tool.Application{ &callHierarchy{app: app}, - &check{app: app}, + &check{app: app, Severity: "warning"}, &codeaction{app: app}, &codelens{app: app}, &definition{app: app}, diff --git a/gopls/internal/cmd/integration_test.go b/gopls/internal/cmd/integration_test.go index 986453253f8..e7ac774f5c0 100644 --- a/gopls/internal/cmd/integration_test.go +++ b/gopls/internal/cmd/integration_test.go @@ -108,6 +108,12 @@ var C int -- c/c2.go -- package c var C int +-- d/d.go -- +package d + +import "io/ioutil" + +var _ = ioutil.ReadFile `) // no files @@ -141,6 +147,22 @@ var C int res.checkStdout(`c2.go:2:5-6: C redeclared in this block`) res.checkStdout(`c.go:2:5-6: - other declaration of C`) } + + // No deprecated (hint) diagnostic without -severity. + { + res := gopls(t, tree, "check", "./d/d.go") + res.checkExit(true) + if len(res.stdout) > 0 { + t.Errorf("check ./d/d.go returned unexpected output:\n%s", res.stdout) + } + } + + // Deprecated (hint) diagnostics with -severity=hint + { + res := gopls(t, tree, "check", "-severity=hint", "./d/d.go") + res.checkExit(true) + res.checkStdout(`ioutil.ReadFile is deprecated`) + } } // TestCallHierarchy tests the 'call_hierarchy' subcommand (call_hierarchy.go). diff --git a/gopls/internal/cmd/usage/check.hlp b/gopls/internal/cmd/usage/check.hlp index eda1a25a191..c387c2cf5d9 100644 --- a/gopls/internal/cmd/usage/check.hlp +++ b/gopls/internal/cmd/usage/check.hlp @@ -6,3 +6,5 @@ Usage: Example: show the diagnostic results of this file: $ gopls check internal/cmd/check.go + -severity=string + minimum diagnostic severity (hint, info, warning, or error) (default "warning") From 739a5af40476496b626dc23e996357a7dff4e3e8 Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Sun, 23 Feb 2025 13:08:20 +0000 Subject: [PATCH 062/270] gopls/internal/test/marker: skip on the freebsd race builder The marker tests frequently time out on the freebsd race builder. Skip them to reduce noise. (We don't currently have resources to investigate). Fixes golang/go#71731 Change-Id: I2e27c2e8063b6d5e698eb9d1b5c32d08914fcc77 Reviewed-on: https://go-review.googlesource.com/c/tools/+/651895 Auto-Submit: Robert Findley Reviewed-by: Peter Weinberger LUCI-TryBot-Result: Go LUCI --- gopls/internal/test/marker/marker_test.go | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/gopls/internal/test/marker/marker_test.go b/gopls/internal/test/marker/marker_test.go index d7f91abed46..a5e23b928ad 100644 --- a/gopls/internal/test/marker/marker_test.go +++ b/gopls/internal/test/marker/marker_test.go @@ -96,6 +96,9 @@ func Test(t *testing.T) { if strings.HasPrefix(builder, "darwin-") || strings.Contains(builder, "solaris") { t.Skip("golang/go#64473: skipping with -short: this test is too slow on darwin and solaris builders") } + if strings.HasSuffix(builder, "freebsd-amd64-race") { + t.Skip("golang/go#71731: the marker tests are too slow to run on the amd64-race builder") + } } // The marker tests must be able to run go/packages.Load. testenv.NeedsGoPackages(t) @@ -658,7 +661,7 @@ type stringListValue []string func (l *stringListValue) Set(s string) error { if s != "" { - for _, d := range strings.Split(s, ",") { + for d := range strings.SplitSeq(s, ",") { *l = append(*l, strings.TrimSpace(d)) } } @@ -1838,7 +1841,7 @@ func removeDiagnostic(mark marker, loc protocol.Location, matchEnd bool, re *reg diags := mark.run.diags[key] for i, diag := range diags { if re.MatchString(diag.Message) && (!matchEnd || diag.Range.End == loc.Range.End) { - mark.run.diags[key] = append(diags[:i], diags[i+1:]...) + mark.run.diags[key] = slices.Delete(diags, i, i+1) return diag, true } } From 2b2a44ed6f269fbfd9adfd17139a0485e1b0a144 Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Sun, 23 Feb 2025 13:52:01 +0000 Subject: [PATCH 063/270] gopls/internal/test: avoid panic in TestDoubleParamReturnCompletion An invalid assumption in this test led to an out of bounds error, which likely masked a real error or timeout. Update the test to not panic, and factor. Fixes golang/go#71906 Change-Id: Ib01d3b75df8bdb71984457807312cfe1d27ddf73 Reviewed-on: https://go-review.googlesource.com/c/tools/+/651896 LUCI-TryBot-Result: Go LUCI Reviewed-by: Jonathan Amsterdam --- .../integration/completion/completion_test.go | 30 ++++++++----------- 1 file changed, 13 insertions(+), 17 deletions(-) diff --git a/gopls/internal/test/integration/completion/completion_test.go b/gopls/internal/test/integration/completion/completion_test.go index 1d293fe9019..0713b1f62b9 100644 --- a/gopls/internal/test/integration/completion/completion_test.go +++ b/gopls/internal/test/integration/completion/completion_test.go @@ -1212,25 +1212,21 @@ func TestDoubleParamReturnCompletion(t *testing.T) { Run(t, src, func(t *testing.T, env *Env) { env.OpenFile("a.go") - compl := env.RegexpSearch("a.go", `DoubleWrap\[()\]\(\)`) - result := env.Completion(compl) - - wantLabel := []string{"InterfaceA", "TypeA", "InterfaceB", "TypeB", "TypeC"} - - for i, item := range result.Items[:len(wantLabel)] { - if diff := cmp.Diff(wantLabel[i], item.Label); diff != "" { - t.Errorf("Completion: unexpected label mismatch (-want +got):\n%s", diff) - } + tests := map[string][]string{ + `DoubleWrap\[()\]\(\)`: {"InterfaceA", "TypeA", "InterfaceB", "TypeB", "TypeC"}, + `DoubleWrap\[InterfaceA, (_)\]\(\)`: {"InterfaceB", "TypeB", "TypeX", "InterfaceA", "TypeA"}, } - compl = env.RegexpSearch("a.go", `DoubleWrap\[InterfaceA, (_)\]\(\)`) - result = env.Completion(compl) - - wantLabel = []string{"InterfaceB", "TypeB", "TypeX", "InterfaceA", "TypeA"} - - for i, item := range result.Items[:len(wantLabel)] { - if diff := cmp.Diff(wantLabel[i], item.Label); diff != "" { - t.Errorf("Completion: unexpected label mismatch (-want +got):\n%s", diff) + for re, wantLabels := range tests { + compl := env.RegexpSearch("a.go", re) + result := env.Completion(compl) + if len(result.Items) < len(wantLabels) { + t.Fatalf("Completion(%q) returned mismatching labels: got %v, want at least labels %v", re, result.Items, wantLabels) + } + for i, item := range result.Items[:len(wantLabels)] { + if diff := cmp.Diff(wantLabels[i], item.Label); diff != "" { + t.Errorf("Completion(%q): unexpected label mismatch (-want +got):\n%s", re, diff) + } } } }) From d2fcd360ffaa3fcc4c225918750054b056033d3c Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Thu, 20 Feb 2025 16:31:14 -0500 Subject: [PATCH 064/270] go/analysis/passes/unreachable/testdata: relax test for CL 638395 This test case is about to become a parse error. To allow us to submit the change to the parser, we must relax this test. Updates golang/go#71659 Updates golang/go#70957 Change-Id: Ic4fbfedb69d152d691dec41a94bb402149463f84 Reviewed-on: https://go-review.googlesource.com/c/tools/+/651155 Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley --- go/analysis/passes/unreachable/testdata/src/a/a.go | 5 ----- go/analysis/passes/unreachable/testdata/src/a/a.go.golden | 5 ----- 2 files changed, 10 deletions(-) diff --git a/go/analysis/passes/unreachable/testdata/src/a/a.go b/go/analysis/passes/unreachable/testdata/src/a/a.go index b283fd00b9a..136a07caa21 100644 --- a/go/analysis/passes/unreachable/testdata/src/a/a.go +++ b/go/analysis/passes/unreachable/testdata/src/a/a.go @@ -2118,11 +2118,6 @@ var _ = func() int { println() // ok } -var _ = func() { - // goto without label used to panic - goto -} - func _() int { // Empty switch tag with non-bool case value used to panic. switch { diff --git a/go/analysis/passes/unreachable/testdata/src/a/a.go.golden b/go/analysis/passes/unreachable/testdata/src/a/a.go.golden index 40494030423..79cb89d4181 100644 --- a/go/analysis/passes/unreachable/testdata/src/a/a.go.golden +++ b/go/analysis/passes/unreachable/testdata/src/a/a.go.golden @@ -2082,11 +2082,6 @@ var _ = func() int { println() // ok } -var _ = func() { - // goto without label used to panic - goto -} - func _() int { // Empty switch tag with non-bool case value used to panic. switch { From 3e76cae71578160dca62d1cab42a715ef960c892 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Thu, 20 Feb 2025 16:37:02 -0500 Subject: [PATCH 065/270] internal/analysisinternal: ValidateFix: more specific errors These details help us diagnose errors in gopls especially relating to synthezized End() positions beyond EOF. Change-Id: Iff36f5c4e01f2256f2cbf8cc03b27d7b3aa74b11 Reviewed-on: https://go-review.googlesource.com/c/tools/+/651097 Reviewed-by: Robert Findley Commit-Queue: Alan Donovan LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan --- go/analysis/internal/checker/fix_test.go | 4 ++-- internal/analysisinternal/analysis.go | 9 +++++---- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/go/analysis/internal/checker/fix_test.go b/go/analysis/internal/checker/fix_test.go index 68d965d08d6..00710cc0e1b 100644 --- a/go/analysis/internal/checker/fix_test.go +++ b/go/analysis/internal/checker/fix_test.go @@ -93,7 +93,7 @@ func TestReportInvalidDiagnostic(t *testing.T) { // TextEdit has invalid Pos. { "bad Pos", - `analyzer "a" suggests invalid fix .*: missing file info for pos`, + `analyzer "a" suggests invalid fix .*: no token.File for TextEdit.Pos .0.`, func(pos token.Pos) analysis.Diagnostic { return analysis.Diagnostic{ Pos: pos, @@ -110,7 +110,7 @@ func TestReportInvalidDiagnostic(t *testing.T) { // TextEdit has invalid End. { "End < Pos", - `analyzer "a" suggests invalid fix .*: pos .* > end`, + `analyzer "a" suggests invalid fix .*: TextEdit.Pos .* > TextEdit.End .*`, func(pos token.Pos) analysis.Diagnostic { return analysis.Diagnostic{ Pos: pos, diff --git a/internal/analysisinternal/analysis.go b/internal/analysisinternal/analysis.go index aba435fa404..5eb7ac5a939 100644 --- a/internal/analysisinternal/analysis.go +++ b/internal/analysisinternal/analysis.go @@ -419,18 +419,19 @@ func validateFix(fset *token.FileSet, fix *analysis.SuggestedFix) error { start := edit.Pos file := fset.File(start) if file == nil { - return fmt.Errorf("missing file info for pos (%v)", edit.Pos) + return fmt.Errorf("no token.File for TextEdit.Pos (%v)", edit.Pos) } if end := edit.End; end.IsValid() { if end < start { - return fmt.Errorf("pos (%v) > end (%v)", edit.Pos, edit.End) + return fmt.Errorf("TextEdit.Pos (%v) > TextEdit.End (%v)", edit.Pos, edit.End) } endFile := fset.File(end) if endFile == nil { - return fmt.Errorf("malformed end position %v", end) + return fmt.Errorf("no token.File for TextEdit.End (%v; File(start).FileEnd is %d)", end, file.Base()+file.Size()) } if endFile != file { - return fmt.Errorf("edit spans files %v and %v", file.Name(), endFile.Name()) + return fmt.Errorf("edit #%d spans files (%v and %v)", + i, file.Position(edit.Pos), endFile.Position(edit.End)) } } else { edit.End = start // update the SuggestedFix From 851c747e148e33b095d285e569c734385d2b074b Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Mon, 24 Feb 2025 15:05:15 +0000 Subject: [PATCH 066/270] gopls/internal/golang: fix crash when hovering over implicit After months of intermittent investigation, I was finally able to reproduce the telemetry crash of golang/go#69362: if the operant of the type switch is undefined, the selectedType will be nil, and therefore logic may proceed to the point where it reaches a nil entry in types.Info.Defs. The fix is of course straightforward, now that we understand it: we cannot rely on types.Info.Defs not containing nil entries. A follow-up CL will introduce an analyzer to detect such problematic uses of the go/types API. Fixes golang/go#69362 Change-Id: I8f75c24710dbb2e78c79d8b9d721f45d9a040cd7 Reviewed-on: https://go-review.googlesource.com/c/tools/+/652015 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- gopls/internal/golang/hover.go | 13 ++++--------- .../internal/test/marker/testdata/hover/issues.txt | 12 ++++++++++++ 2 files changed, 16 insertions(+), 9 deletions(-) diff --git a/gopls/internal/golang/hover.go b/gopls/internal/golang/hover.go index 74cf5dbb593..c3fecd1c9d1 100644 --- a/gopls/internal/golang/hover.go +++ b/gopls/internal/golang/hover.go @@ -375,15 +375,10 @@ func hover(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp pro // use the default build config for all other types, even // if they embed platform-variant types. // - var sizeOffset string // optional size/offset description - // debugging #69362: unexpected nil Defs[ident] value (?) - _ = ident.Pos() // (can't be nil due to check after referencedObject) - _ = pkg.TypesInfo() // (can't be nil due to check in call to inferredSignature) - _ = pkg.TypesInfo().Defs // (can't be nil due to nature of cache.Package) - if def, ok := pkg.TypesInfo().Defs[ident]; ok { - _ = def.Pos() // can't be nil due to reasoning in #69362. - } - if def, ok := pkg.TypesInfo().Defs[ident]; ok && ident.Pos() == def.Pos() { + var sizeOffset string + + // As painfully learned in golang/go#69362, Defs can contain nil entries. + if def, _ := pkg.TypesInfo().Defs[ident]; def != nil && ident.Pos() == def.Pos() { // This is the declaring identifier. // (We can't simply use ident.Pos() == obj.Pos() because // referencedObject prefers the TypeName for an embedded field). diff --git a/gopls/internal/test/marker/testdata/hover/issues.txt b/gopls/internal/test/marker/testdata/hover/issues.txt index 6212964dff2..eda0eea3efa 100644 --- a/gopls/internal/test/marker/testdata/hover/issues.txt +++ b/gopls/internal/test/marker/testdata/hover/issues.txt @@ -20,3 +20,15 @@ package issue64237 import "golang.org/lsptests/nonexistant" //@diag("\"golang", re"could not import") var _ = nonexistant.Value //@hovererr("nonexistant", "no package data") + +-- issue69362/p.go -- +package issue69362 + +// golang/go#69362: hover panics over undefined implicits. + +func _() { + switch x := y.(type) { //@diag("y", re"undefined"), hover("x", "x", "") + case int: + _ = x + } +} From bf9e2a812de33f4ff08ed99be3ecfa95d857830e Mon Sep 17 00:00:00 2001 From: Peter Weinberger Date: Thu, 13 Feb 2025 12:49:36 -0500 Subject: [PATCH 067/270] gopls/internal: test fixes for some imports bugs The CL has tests for two fixed bugs. For the new imports to work the metadata graph has to be current, which in this CL, is accomplished with a call to snapshot.WordspaceMetadata which may wait for changes to be assimilated. Fixes: golang.go/go#44510 Fixes: golang.go/go#67973 Change-Id: Ieb5a9361a75796a172da953cc58853d38f596ebd Reviewed-on: https://go-review.googlesource.com/c/tools/+/649315 Reviewed-by: Robert Findley LUCI-TryBot-Result: Go LUCI --- gopls/internal/cache/source.go | 2 +- gopls/internal/golang/format.go | 3 + .../test/integration/misc/imports_test.go | 55 +++++++++++++++++++ 3 files changed, 59 insertions(+), 1 deletion(-) diff --git a/gopls/internal/cache/source.go b/gopls/internal/cache/source.go index fa038ec37a6..7946b9746ab 100644 --- a/gopls/internal/cache/source.go +++ b/gopls/internal/cache/source.go @@ -212,7 +212,7 @@ func (s *goplsSource) resolveWorkspaceReferences(filename string, missing import // keep track of used syms and found results by package name // TODO: avoid import cycles (is current package in forward closure) founds := make(map[string][]found) - for i := 0; i < len(ids); i++ { + for i := range len(ids) { nm := string(pkgs[i].Name) if satisfies(syms[i], missing[nm]) { got := &imports.Result{ diff --git a/gopls/internal/golang/format.go b/gopls/internal/golang/format.go index acc619eba0c..b353538d978 100644 --- a/gopls/internal/golang/format.go +++ b/gopls/internal/golang/format.go @@ -152,6 +152,9 @@ func computeImportEdits(ctx context.Context, pgf *parsego.File, snapshot *cache. case settings.ImportsSourceGoimports: source = isource } + // imports require a current metadata graph + // TODO(rfindlay) improve the API + snapshot.WorkspaceMetadata(ctx) allFixes, err := imports.FixImports(ctx, filename, pgf.Src, goroot, options.Env.Logf, source) if err != nil { return nil, nil, err diff --git a/gopls/internal/test/integration/misc/imports_test.go b/gopls/internal/test/integration/misc/imports_test.go index 98a70478ecf..bcbfacc967a 100644 --- a/gopls/internal/test/integration/misc/imports_test.go +++ b/gopls/internal/test/integration/misc/imports_test.go @@ -401,6 +401,31 @@ return nil } }) } + +// use the import from a different package in the same module +func Test44510(t *testing.T) { + const files = `-- go.mod -- +module test +go 1.19 +-- foo/foo.go -- +package main +import strs "strings" +var _ = strs.Count +-- bar/bar.go -- +package main +var _ = strs.Builder +` + WithOptions( + WriteGoSum("."), + ).Run(t, files, func(T *testing.T, env *Env) { + env.OpenFile("bar/bar.go") + env.SaveBuffer("bar/bar.go") + buf := env.BufferText("bar/bar.go") + if !strings.Contains(buf, "strs") { + t.Error(buf) + } + }) +} func TestRelativeReplace(t *testing.T) { const files = ` -- go.mod -- @@ -688,3 +713,33 @@ func Test() { } }) } + +// this test replaces 'package bar' with 'package foo' +// saves the file, and then looks for the import in the main package.s +func Test67973(t *testing.T) { + const files = `-- go.mod -- +module hello +go 1.19 +-- hello.go -- +package main +var _ = foo.Bar +-- internal/foo/foo.go -- +package bar +func Bar() {} +` + WithOptions( + Settings{"importsSource": settings.ImportsSourceGopls}, + ).Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("hello.go") + env.AfterChange(env.DoneWithOpen()) + env.SaveBuffer("hello.go") + env.OpenFile("internal/foo/foo.go") + env.RegexpReplace("internal/foo/foo.go", "bar", "foo") + env.SaveBuffer("internal/foo/foo.go") + env.SaveBuffer("hello.go") + buf := env.BufferText("hello.go") + if !strings.Contains(buf, "internal/foo") { + t.Errorf(`expected import "hello/internal/foo" but got %q`, buf) + } + }) +} From 6d4af1e1f521077aa5f196b9a4be4297d95ec1c2 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Mon, 24 Feb 2025 17:24:53 -0500 Subject: [PATCH 068/270] gopls/internal/golang: Assembly: update "Compiling" message This CL causes the "Browse assembly" feature to flush the header early, and to update the "Compiling..." message when the report is complete. Change-Id: I96e0c3e1e0949dadbbd058101959f01e38e7596b Reviewed-on: https://go-review.googlesource.com/c/tools/+/652196 Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley --- gopls/internal/golang/assembly.go | 60 ++++++++++++++++++++----------- gopls/internal/server/server.go | 7 +--- 2 files changed, 40 insertions(+), 27 deletions(-) diff --git a/gopls/internal/golang/assembly.go b/gopls/internal/golang/assembly.go index 3b778a54697..a74c48a171d 100644 --- a/gopls/internal/golang/assembly.go +++ b/gopls/internal/golang/assembly.go @@ -16,6 +16,8 @@ import ( "context" "fmt" "html" + "io" + "net/http" "regexp" "strconv" "strings" @@ -26,39 +28,33 @@ import ( // AssemblyHTML returns an HTML document containing an assembly listing of the selected function. // -// TODO(adonovan): -// - display a "Compiling..." message as a cold build can be slow. -// - cross-link jumps and block labels, like github.com/aclements/objbrowse. -func AssemblyHTML(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, symbol string, web Web) ([]byte, error) { - // Compile the package with -S, and capture its stderr stream. +// TODO(adonovan): cross-link jumps and block labels, like github.com/aclements/objbrowse. +func AssemblyHTML(ctx context.Context, snapshot *cache.Snapshot, w http.ResponseWriter, pkg *cache.Package, symbol string, web Web) { + // Prepare to compile the package with -S, and capture its stderr stream. inv, cleanupInvocation, err := snapshot.GoCommandInvocation(cache.NoNetwork, pkg.Metadata().CompiledGoFiles[0].DirPath(), "build", []string{"-gcflags=-S", "."}) if err != nil { - return nil, err // e.g. failed to write overlays (rare) + // e.g. failed to write overlays (rare) + http.Error(w, err.Error(), http.StatusInternalServerError) + return } defer cleanupInvocation() - _, stderr, err, _ := snapshot.View().GoCommandRunner().RunRaw(ctx, *inv) - if err != nil { - return nil, err // e.g. won't compile - } - content := stderr.String() escape := html.EscapeString - // Produce the report. + // Emit the start of the report. title := fmt.Sprintf("%s assembly for %s", escape(snapshot.View().GOARCH()), escape(symbol)) - var buf bytes.Buffer - buf.WriteString(` + io.WriteString(w, ` - Codestin Search App + Codestin Search App -

` + title + `

+

`+title+`

A Quick Guide to Go's Assembler

@@ -69,11 +65,23 @@ func AssemblyHTML(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Pack Click on a source line marker L1234 to navigate your editor there. (VS Code users: please upvote #208093)

-

- Reload the page to recompile. -

+

Compiling...

 `)
+	if flusher, ok := w.(http.Flusher); ok {
+		flusher.Flush()
+	}
+
+	// Compile the package.
+	_, stderr, err, _ := snapshot.View().GoCommandRunner().RunRaw(ctx, *inv)
+	if err != nil {
+		// e.g. won't compile
+		http.Error(w, err.Error(), http.StatusInternalServerError)
+		return
+	}
+
+	// Write the rest of the report.
+	content := stderr.String()
 
 	// insnRx matches an assembly instruction line.
 	// Submatch groups are: (offset-hex-dec, file-line-column, instruction).
@@ -88,7 +96,8 @@ func AssemblyHTML(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Pack
 	//
 	// Allow matches of symbol, symbol.func1, symbol.deferwrap, etc.
 	on := false
-	for _, line := range strings.Split(content, "\n") {
+	var buf bytes.Buffer
+	for line := range strings.SplitSeq(content, "\n") {
 		// start of function symbol?
 		if strings.Contains(line, " STEXT ") {
 			on = strings.HasPrefix(line, symbol) &&
@@ -116,5 +125,14 @@ func AssemblyHTML(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Pack
 		}
 		buf.WriteByte('\n')
 	}
-	return buf.Bytes(), nil
+
+	// Update the "Compiling..." message.
+	buf.WriteString(`
+
+ +`) + + w.Write(buf.Bytes()) } diff --git a/gopls/internal/server/server.go b/gopls/internal/server/server.go index d9090250a66..033295ffb32 100644 --- a/gopls/internal/server/server.go +++ b/gopls/internal/server/server.go @@ -447,12 +447,7 @@ func (s *server) initWeb() (*web, error) { pkg := pkgs[0] // Produce report. - html, err := golang.AssemblyHTML(ctx, snapshot, pkg, symbol, web) - if err != nil { - http.Error(w, err.Error(), http.StatusInternalServerError) - return - } - w.Write(html) + golang.AssemblyHTML(ctx, snapshot, w, pkg, symbol, web) }) return web, nil From e890c1f6805a34b15289937191b324a5172f9c22 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Mon, 24 Feb 2025 18:03:04 -0500 Subject: [PATCH 069/270] gopls/internal/golang: Assembly: support package level var and init This CL offers the "Browse Assembly" code action when the selection is within a package-level var initializer, or a source-level init function. + Test Change-Id: Ic0fcf321765027df0c11fb7269a1aedf971814fc Reviewed-on: https://go-review.googlesource.com/c/tools/+/652197 Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley --- gopls/internal/golang/assembly.go | 32 ++--- gopls/internal/golang/codeaction.go | 86 +++++++++----- .../test/integration/misc/webserver_test.go | 109 ++++++++++++------ 3 files changed, 144 insertions(+), 83 deletions(-) diff --git a/gopls/internal/golang/assembly.go b/gopls/internal/golang/assembly.go index a74c48a171d..9e673dd9719 100644 --- a/gopls/internal/golang/assembly.go +++ b/gopls/internal/golang/assembly.go @@ -29,6 +29,8 @@ import ( // AssemblyHTML returns an HTML document containing an assembly listing of the selected function. // // TODO(adonovan): cross-link jumps and block labels, like github.com/aclements/objbrowse. +// +// See gopls/internal/test/integration/misc/webserver_test.go for tests. func AssemblyHTML(ctx context.Context, snapshot *cache.Snapshot, w http.ResponseWriter, pkg *cache.Package, symbol string, web Web) { // Prepare to compile the package with -S, and capture its stderr stream. inv, cleanupInvocation, err := snapshot.GoCommandInvocation(cache.NoNetwork, pkg.Metadata().CompiledGoFiles[0].DirPath(), "build", []string{"-gcflags=-S", "."}) @@ -72,11 +74,26 @@ func AssemblyHTML(ctx context.Context, snapshot *cache.Snapshot, w http.Response flusher.Flush() } + // At this point errors must be reported by writing HTML. + // To do this, set "status" return early. + + var buf bytes.Buffer + status := "Reload the page to recompile." + defer func() { + // Update the "Compiling..." message. + fmt.Fprintf(&buf, ` + + +`, status) + w.Write(buf.Bytes()) + }() + // Compile the package. _, stderr, err, _ := snapshot.View().GoCommandRunner().RunRaw(ctx, *inv) if err != nil { - // e.g. won't compile - http.Error(w, err.Error(), http.StatusInternalServerError) + status = fmt.Sprintf("compilation failed: %v", err) return } @@ -96,7 +113,6 @@ func AssemblyHTML(ctx context.Context, snapshot *cache.Snapshot, w http.Response // // Allow matches of symbol, symbol.func1, symbol.deferwrap, etc. on := false - var buf bytes.Buffer for line := range strings.SplitSeq(content, "\n") { // start of function symbol? if strings.Contains(line, " STEXT ") { @@ -125,14 +141,4 @@ func AssemblyHTML(ctx context.Context, snapshot *cache.Snapshot, w http.Response } buf.WriteByte('\n') } - - // Update the "Compiling..." message. - buf.WriteString(` - - -`) - - w.Write(buf.Bytes()) } diff --git a/gopls/internal/golang/codeaction.go b/gopls/internal/golang/codeaction.go index 587ae3e2de3..74f3c2b6085 100644 --- a/gopls/internal/golang/codeaction.go +++ b/gopls/internal/golang/codeaction.go @@ -945,44 +945,66 @@ func goAssembly(ctx context.Context, req *codeActionsRequest) error { // directly to (say) a lambda of interest. // Perhaps we could scroll to STEXT for the innermost // enclosing nested function? - path, _ := astutil.PathEnclosingInterval(req.pgf.File, req.start, req.end) - if len(path) >= 2 { // [... FuncDecl File] - if decl, ok := path[len(path)-2].(*ast.FuncDecl); ok { - if fn, ok := req.pkg.TypesInfo().Defs[decl.Name].(*types.Func); ok { - sig := fn.Signature() - - // Compute the linker symbol of the enclosing function. - var sym strings.Builder - if fn.Pkg().Name() == "main" { - sym.WriteString("main") - } else { - sym.WriteString(fn.Pkg().Path()) - } - sym.WriteString(".") - if sig.Recv() != nil { - if isPtr, named := typesinternal.ReceiverNamed(sig.Recv()); named != nil { - if isPtr { - fmt.Fprintf(&sym, "(*%s)", named.Obj().Name()) - } else { - sym.WriteString(named.Obj().Name()) + + // Compute the linker symbol of the enclosing function or var initializer. + var sym strings.Builder + if pkg := req.pkg.Types(); pkg.Name() == "main" { + sym.WriteString("main") + } else { + sym.WriteString(pkg.Path()) + } + sym.WriteString(".") + + curSel, _ := req.pgf.Cursor.FindPos(req.start, req.end) + for cur := range curSel.Ancestors((*ast.FuncDecl)(nil), (*ast.ValueSpec)(nil)) { + var name string // in command title + switch node := cur.Node().(type) { + case *ast.FuncDecl: + // package-level func or method + if fn, ok := req.pkg.TypesInfo().Defs[node.Name].(*types.Func); ok && + fn.Name() != "_" { // blank functions are not compiled + + // Source-level init functions are compiled (along with + // package-level var initializers) in into a single pkg.init + // function, so this falls out of the logic below. + + if sig := fn.Signature(); sig.TypeParams() == nil && sig.RecvTypeParams() == nil { // generic => no assembly + if sig.Recv() != nil { + if isPtr, named := typesinternal.ReceiverNamed(sig.Recv()); named != nil { + if isPtr { + fmt.Fprintf(&sym, "(*%s)", named.Obj().Name()) + } else { + sym.WriteString(named.Obj().Name()) + } + sym.WriteByte('.') } - sym.WriteByte('.') } + sym.WriteString(fn.Name()) + + name = node.Name.Name // success } - sym.WriteString(fn.Name()) - - if fn.Name() != "_" && // blank functions are not compiled - (fn.Name() != "init" || sig.Recv() != nil) && // init functions aren't linker functions - sig.TypeParams() == nil && sig.RecvTypeParams() == nil { // generic => no assembly - cmd := command.NewAssemblyCommand( - fmt.Sprintf("Browse %s assembly for %s", view.GOARCH(), decl.Name), - view.ID(), - string(req.pkg.Metadata().ID), - sym.String()) - req.addCommandAction(cmd, false) + } + + case *ast.ValueSpec: + // package-level var initializer? + if len(node.Names) > 0 && len(node.Values) > 0 { + v := req.pkg.TypesInfo().Defs[node.Names[0]] + if v != nil && typesinternal.IsPackageLevel(v) { + sym.WriteString("init") + name = "package initializer" // success } } } + + if name != "" { + cmd := command.NewAssemblyCommand( + fmt.Sprintf("Browse %s assembly for %s", view.GOARCH(), name), + view.ID(), + string(req.pkg.Metadata().ID), + sym.String()) + req.addCommandAction(cmd, false) + break + } } return nil } diff --git a/gopls/internal/test/integration/misc/webserver_test.go b/gopls/internal/test/integration/misc/webserver_test.go index 2bde7df8aa2..5153289941f 100644 --- a/gopls/internal/test/integration/misc/webserver_test.go +++ b/gopls/internal/test/integration/misc/webserver_test.go @@ -520,43 +520,57 @@ module example.com -- a/a.go -- package a -func f() { +func f(x int) int { println("hello") defer println("world") + return x } func g() { println("goodbye") } + +var v = [...]int{ + f(123), + f(456), +} + +func init() { + f(789) +} ` Run(t, files, func(t *testing.T, env *Env) { env.OpenFile("a/a.go") - // Invoke the "Browse assembly" code action to start the server. - loc := env.RegexpSearch("a/a.go", "println") - actions, err := env.Editor.CodeAction(env.Ctx, loc, nil, protocol.CodeActionUnknownTrigger) - if err != nil { - t.Fatalf("CodeAction: %v", err) - } - action, err := codeActionByKind(actions, settings.GoAssembly) - if err != nil { - t.Fatal(err) - } + asmFor := func(pattern string) []byte { + // Invoke the "Browse assembly" code action to start the server. + loc := env.RegexpSearch("a/a.go", pattern) + actions, err := env.Editor.CodeAction(env.Ctx, loc, nil, protocol.CodeActionUnknownTrigger) + if err != nil { + t.Fatalf("CodeAction: %v", err) + } + action, err := codeActionByKind(actions, settings.GoAssembly) + if err != nil { + t.Fatal(err) + } - // Execute the command. - // Its side effect should be a single showDocument request. - params := &protocol.ExecuteCommandParams{ - Command: action.Command.Command, - Arguments: action.Command.Arguments, - } - var result command.DebuggingResult - collectDocs := env.Awaiter.ListenToShownDocuments() - env.ExecuteCommand(params, &result) - doc := shownDocument(t, collectDocs(), "http:") - if doc == nil { - t.Fatalf("no showDocument call had 'file:' prefix") + // Execute the command. + // Its side effect should be a single showDocument request. + params := &protocol.ExecuteCommandParams{ + Command: action.Command.Command, + Arguments: action.Command.Arguments, + } + var result command.DebuggingResult + collectDocs := env.Awaiter.ListenToShownDocuments() + env.ExecuteCommand(params, &result) + doc := shownDocument(t, collectDocs(), "http:") + if doc == nil { + t.Fatalf("no showDocument call had 'file:' prefix") + } + t.Log("showDocument(package doc) URL:", doc.URI) + + return get(t, doc.URI) } - t.Log("showDocument(package doc) URL:", doc.URI) // Get the report and do some minimal checks for sensible results. // @@ -567,23 +581,42 @@ func g() { // (e.g. uses JAL for CALL, or BL for RET). // We conservatively test only on the two most popular // architectures. - report := get(t, doc.URI) - checkMatch(t, true, report, `TEXT.*example.com/a.f`) - switch runtime.GOARCH { - case "amd64", "arm64": - checkMatch(t, true, report, `CALL runtime.printlock`) - checkMatch(t, true, report, `CALL runtime.printstring`) - checkMatch(t, true, report, `CALL runtime.printunlock`) - checkMatch(t, true, report, `CALL example.com/a.f.deferwrap1`) - checkMatch(t, true, report, `RET`) - checkMatch(t, true, report, `CALL runtime.morestack_noctxt`) + { + report := asmFor("println") + checkMatch(t, true, report, `TEXT.*example.com/a.f`) + switch runtime.GOARCH { + case "amd64", "arm64": + checkMatch(t, true, report, `CALL runtime.printlock`) + checkMatch(t, true, report, `CALL runtime.printstring`) + checkMatch(t, true, report, `CALL runtime.printunlock`) + checkMatch(t, true, report, `CALL example.com/a.f.deferwrap1`) + checkMatch(t, true, report, `RET`) + checkMatch(t, true, report, `CALL runtime.morestack_noctxt`) + } + + // Nested functions are also shown. + checkMatch(t, true, report, `TEXT.*example.com/a.f.deferwrap1`) + + // But other functions are not. + checkMatch(t, false, report, `TEXT.*example.com/a.g`) } - // Nested functions are also shown. - checkMatch(t, true, report, `TEXT.*example.com/a.f.deferwrap1`) + // Check that code in a package-level var initializer is found too. + { + report := asmFor(`f\(123\)`) + checkMatch(t, true, report, `TEXT.*example.com/a.init`) + checkMatch(t, true, report, `MOV. \$123`) + checkMatch(t, true, report, `MOV. \$456`) + checkMatch(t, true, report, `CALL example.com/a.f`) + } - // But other functions are not. - checkMatch(t, false, report, `TEXT.*example.com/a.g`) + // And code in a source-level init function. + { + report := asmFor(`f\(789\)`) + checkMatch(t, true, report, `TEXT.*example.com/a.init`) + checkMatch(t, true, report, `MOV. \$789`) + checkMatch(t, true, report, `CALL example.com/a.f`) + } }) } From 6f7906b2b92af49e85ca2e34f08a7097a19b6b5a Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Thu, 13 Feb 2025 13:21:46 -0500 Subject: [PATCH 070/270] x/tools: use ast.IsGenerated throughout Note the behavior change: the go/ast implementation checks that the special comment appears before the packge declaration; the ad hoc implementations did not. Change-Id: Ib51c498c1c73fd32c882e25f8228a6076bba7ed7 Reviewed-on: https://go-review.googlesource.com/c/tools/+/649316 LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan Reviewed-by: Jonathan Amsterdam Commit-Queue: Alan Donovan --- cmd/deadcode/deadcode.go | 41 +------------------ copyright/copyright.go | 23 +---------- gopls/internal/golang/format.go | 11 ++--- gopls/internal/golang/util.go | 26 ++---------- .../diagnostics/diagnostics_test.go | 17 +++++--- .../marker/testdata/diagnostics/generated.txt | 4 +- internal/refactor/inline/inline.go | 4 +- refactor/rename/rename.go | 2 +- refactor/rename/spec.go | 25 +---------- 9 files changed, 29 insertions(+), 124 deletions(-) diff --git a/cmd/deadcode/deadcode.go b/cmd/deadcode/deadcode.go index f129102cc4c..0c66d07f79f 100644 --- a/cmd/deadcode/deadcode.go +++ b/cmd/deadcode/deadcode.go @@ -175,7 +175,7 @@ func main() { } } - if isGenerated(file) { + if ast.IsGenerated(file) { generated[p.Fset.File(file.Pos()).Name()] = true } } @@ -414,45 +414,6 @@ func printObjects(format string, objects []any) { } } -// TODO(adonovan): use go1.21's ast.IsGenerated. - -// isGenerated reports whether the file was generated by a program, -// not handwritten, by detecting the special comment described -// at https://go.dev/s/generatedcode. -// -// The syntax tree must have been parsed with the ParseComments flag. -// Example: -// -// f, err := parser.ParseFile(fset, filename, src, parser.ParseComments|parser.PackageClauseOnly) -// if err != nil { ... } -// gen := ast.IsGenerated(f) -func isGenerated(file *ast.File) bool { - _, ok := generator(file) - return ok -} - -func generator(file *ast.File) (string, bool) { - for _, group := range file.Comments { - for _, comment := range group.List { - if comment.Pos() > file.Package { - break // after package declaration - } - // opt: check Contains first to avoid unnecessary array allocation in Split. - const prefix = "// Code generated " - if strings.Contains(comment.Text, prefix) { - for _, line := range strings.Split(comment.Text, "\n") { - if rest, ok := strings.CutPrefix(line, prefix); ok { - if gen, ok := strings.CutSuffix(rest, " DO NOT EDIT."); ok { - return gen, true - } - } - } - } - } - } - return "", false -} - // pathSearch returns the shortest path from one of the roots to one // of the targets (along with the root itself), or zero if no path was found. func pathSearch(roots []*ssa.Function, res *rta.Result, targets map[*ssa.Function]bool) (*callgraph.Node, []*callgraph.Edge) { diff --git a/copyright/copyright.go b/copyright/copyright.go index 54bc8f512a4..4d4ad71fd72 100644 --- a/copyright/copyright.go +++ b/copyright/copyright.go @@ -75,7 +75,7 @@ func checkFile(toolsDir, filename string) (bool, error) { return false, err } // Don't require headers on generated files. - if isGenerated(fset, parsed) { + if ast.IsGenerated(parsed) { return false, nil } shouldAddCopyright := true @@ -91,24 +91,3 @@ func checkFile(toolsDir, filename string) (bool, error) { } return shouldAddCopyright, nil } - -// Copied from golang.org/x/tools/gopls/internal/golang/util.go. -// Matches cgo generated comment as well as the proposed standard: -// -// https://golang.org/s/generatedcode -var generatedRx = regexp.MustCompile(`// .*DO NOT EDIT\.?`) - -func isGenerated(fset *token.FileSet, file *ast.File) bool { - for _, commentGroup := range file.Comments { - for _, comment := range commentGroup.List { - if matched := generatedRx.MatchString(comment.Text); !matched { - continue - } - // Check if comment is at the beginning of the line in source. - if pos := fset.Position(comment.Slash); pos.Column == 1 { - return true - } - } - } - return false -} diff --git a/gopls/internal/golang/format.go b/gopls/internal/golang/format.go index b353538d978..ded00deef38 100644 --- a/gopls/internal/golang/format.go +++ b/gopls/internal/golang/format.go @@ -35,15 +35,16 @@ func Format(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]pr ctx, done := event.Start(ctx, "golang.Format") defer done() - // Generated files shouldn't be edited. So, don't format them - if IsGenerated(ctx, snapshot, fh.URI()) { - return nil, fmt.Errorf("can't format %q: file is generated", fh.URI().Path()) - } - pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) if err != nil { return nil, err } + + // Generated files shouldn't be edited. So, don't format them. + if ast.IsGenerated(pgf.File) { + return nil, fmt.Errorf("can't format %q: file is generated", fh.URI().Path()) + } + // Even if this file has parse errors, it might still be possible to format it. // Using format.Node on an AST with errors may result in code being modified. // Attempt to format the source of this file instead. diff --git a/gopls/internal/golang/util.go b/gopls/internal/golang/util.go index 23fd3443fac..a81ff3fbe58 100644 --- a/gopls/internal/golang/util.go +++ b/gopls/internal/golang/util.go @@ -19,16 +19,11 @@ import ( "golang.org/x/tools/gopls/internal/cache/parsego" "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/gopls/internal/util/bug" - "golang.org/x/tools/gopls/internal/util/safetoken" "golang.org/x/tools/internal/tokeninternal" ) -// IsGenerated gets and reads the file denoted by uri and reports -// whether it contains a "generated file" comment as described at -// https://golang.org/s/generatedcode. -// -// TODO(adonovan): opt: this function does too much. -// Move snapshot.ReadFile into the caller (most of which have already done it). +// IsGenerated reads and parses the header of the file denoted by uri +// and reports whether it [ast.IsGenerated]. func IsGenerated(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI) bool { fh, err := snapshot.ReadFile(ctx, uri) if err != nil { @@ -38,17 +33,7 @@ func IsGenerated(ctx context.Context, snapshot *cache.Snapshot, uri protocol.Doc if err != nil { return false } - for _, commentGroup := range pgf.File.Comments { - for _, comment := range commentGroup.List { - if matched := generatedRx.MatchString(comment.Text); matched { - // Check if comment is at the beginning of the line in source. - if safetoken.Position(pgf.Tok, comment.Slash).Column == 1 { - return true - } - } - } - } - return false + return ast.IsGenerated(pgf.File) } // adjustedObjEnd returns the end position of obj, possibly modified for @@ -76,11 +61,6 @@ func adjustedObjEnd(obj types.Object) token.Pos { return obj.Pos() + token.Pos(nameLen) } -// Matches cgo generated comment as well as the proposed standard: -// -// https://golang.org/s/generatedcode -var generatedRx = regexp.MustCompile(`// .*DO NOT EDIT\.?`) - // FormatNode returns the "pretty-print" output for an ast node. func FormatNode(fset *token.FileSet, n ast.Node) string { var buf strings.Builder diff --git a/gopls/internal/test/integration/diagnostics/diagnostics_test.go b/gopls/internal/test/integration/diagnostics/diagnostics_test.go index a97d249e7b5..5ef39a5f0c5 100644 --- a/gopls/internal/test/integration/diagnostics/diagnostics_test.go +++ b/gopls/internal/test/integration/diagnostics/diagnostics_test.go @@ -542,27 +542,34 @@ var X = 0 // Tests golang/go#38467. func TestNoSuggestedFixesForGeneratedFiles_Issue38467(t *testing.T) { + // This test ensures that gopls' CodeAction handler suppresses + // diagnostics in generated code. Beware that many analyzers + // themselves suppress diagnostics in generated files, in + // particular the low-status "simplifiers" (modernize, + // simplify{range,slice,compositelit}), so we use the hostport + // analyzer here. const generated = ` -- go.mod -- module mod.com go 1.12 -- main.go -- +// Code generated by generator.go. DO NOT EDIT. + package main -// Code generated by generator.go. DO NOT EDIT. +import ("fmt"; "net") func _() { - for i, _ := range []string{} { - _ = i - } + addr := fmt.Sprintf("%s:%d", "localhost", 12345) + net.Dial("tcp", addr) } ` Run(t, generated, func(t *testing.T, env *Env) { env.OpenFile("main.go") var d protocol.PublishDiagnosticsParams env.AfterChange( - Diagnostics(AtPosition("main.go", 5, 6)), + Diagnostics(AtPosition("main.go", 7, 21)), ReadDiagnostics("main.go", &d), ) if fixes := env.GetQuickFixes("main.go", d.Diagnostics); len(fixes) != 0 { diff --git a/gopls/internal/test/marker/testdata/diagnostics/generated.txt b/gopls/internal/test/marker/testdata/diagnostics/generated.txt index 123602df3c3..80de61200a3 100644 --- a/gopls/internal/test/marker/testdata/diagnostics/generated.txt +++ b/gopls/internal/test/marker/testdata/diagnostics/generated.txt @@ -10,10 +10,10 @@ module example.com go 1.12 -- generated.go -- -package generated - // Code generated by generator.go. DO NOT EDIT. +package generated + func _() { var y int //@diag("y", re"declared (and|but) not used") } diff --git a/internal/refactor/inline/inline.go b/internal/refactor/inline/inline.go index 54308243e1c..6f6ed4583a9 100644 --- a/internal/refactor/inline/inline.go +++ b/internal/refactor/inline/inline.go @@ -102,9 +102,9 @@ func (st *state) inline() (*Result, error) { return nil, fmt.Errorf("internal error: caller syntax positions are inconsistent with file content (did you forget to use FileSet.PositionFor when computing the file name?)") } - // TODO(adonovan): use go1.21's ast.IsGenerated. // Break the string literal so we can use inlining in this file. :) - if bytes.Contains(caller.Content, []byte("// Code generated by "+"cmd/cgo; DO NOT EDIT.")) { + if ast.IsGenerated(caller.File) && + bytes.Contains(caller.Content, []byte("// Code generated by "+"cmd/cgo; DO NOT EDIT.")) { return nil, fmt.Errorf("cannot inline calls from files that import \"C\"") } diff --git a/refactor/rename/rename.go b/refactor/rename/rename.go index 3e944b2df38..cb218434e49 100644 --- a/refactor/rename/rename.go +++ b/refactor/rename/rename.go @@ -491,7 +491,7 @@ func (r *renamer) update() error { for _, info := range r.packages { for _, f := range info.Files { tokenFile := r.iprog.Fset.File(f.FileStart) - if filesToUpdate[tokenFile] && generated(f, tokenFile) { + if filesToUpdate[tokenFile] && ast.IsGenerated(f) { generatedFileNames = append(generatedFileNames, tokenFile.Name()) } } diff --git a/refactor/rename/spec.go b/refactor/rename/spec.go index 99068c13358..0a6d7d4346c 100644 --- a/refactor/rename/spec.go +++ b/refactor/rename/spec.go @@ -19,7 +19,6 @@ import ( "log" "os" "path/filepath" - "regexp" "strconv" "strings" @@ -321,7 +320,7 @@ func findFromObjectsInFile(iprog *loader.Program, spec *spec) ([]types.Object, e if spec.offset != 0 { // We cannot refactor generated files since position information is invalidated. - if generated(f, thisFile) { + if ast.IsGenerated(f) { return nil, fmt.Errorf("cannot rename identifiers in generated file containing DO NOT EDIT marker: %s", thisFile.Name()) } @@ -566,25 +565,3 @@ func ambiguityError(fset *token.FileSet, objects []types.Object) error { return fmt.Errorf("ambiguous specifier %s matches %s", objects[0].Name(), buf.String()) } - -// Matches cgo generated comment as well as the proposed standard: -// -// https://golang.org/s/generatedcode -var generatedRx = regexp.MustCompile(`// .*DO NOT EDIT\.?`) - -// generated reports whether ast.File is a generated file. -func generated(f *ast.File, tokenFile *token.File) bool { - - // Iterate over the comments in the file - for _, commentGroup := range f.Comments { - for _, comment := range commentGroup.List { - if matched := generatedRx.MatchString(comment.Text); matched { - // Check if comment is at the beginning of the line in source - if pos := tokenFile.Position(comment.Slash); pos.Column == 1 { - return true - } - } - } - } - return false -} From 7fed2a4a04b822b897c3dd789a11e027c9ad1b0c Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Tue, 25 Feb 2025 16:02:44 -0500 Subject: [PATCH 071/270] gopls/internal/analysis/modernize: fix bug in rangeint for and range loops leave their index variables with a different final value: limit, and limit-1, respectively. Thus we must not offer a fix if the loop variable is used after the loop. + test Fixes golang/go#71952 Change-Id: Iaabd20792724166ace0ed5fd9dd997edaa96a435 Reviewed-on: https://go-review.googlesource.com/c/tools/+/652496 Auto-Submit: Alan Donovan Reviewed-by: Jonathan Amsterdam LUCI-TryBot-Result: Go LUCI --- gopls/internal/analysis/modernize/rangeint.go | 12 ++++++++++++ .../modernize/testdata/src/rangeint/rangeint.go | 17 ++++++++++++++++- .../testdata/src/rangeint/rangeint.go.golden | 17 ++++++++++++++++- 3 files changed, 44 insertions(+), 2 deletions(-) diff --git a/gopls/internal/analysis/modernize/rangeint.go b/gopls/internal/analysis/modernize/rangeint.go index 273c13877bd..b94bff34431 100644 --- a/gopls/internal/analysis/modernize/rangeint.go +++ b/gopls/internal/analysis/modernize/rangeint.go @@ -100,6 +100,18 @@ func rangeint(pass *analysis.Pass) { }) } + // If i is used after the loop, + // don't offer a fix, as a range loop + // leaves i with a different final value (limit-1). + if init.Tok == token.ASSIGN { + for curId := range curLoop.Parent().Preorder((*ast.Ident)(nil)) { + id := curId.Node().(*ast.Ident) + if id.Pos() > loop.End() && info.Uses[id] == v { + continue nextLoop + } + } + } + // If limit is len(slice), // simplify "range len(slice)" to "range slice". if call, ok := limit.(*ast.CallExpr); ok && diff --git a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go index 6c30f183340..32628f5fae3 100644 --- a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go +++ b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go @@ -4,7 +4,11 @@ func _(i int, s struct{ i int }, slice []int) { for i := 0; i < 10; i++ { // want "for loop can be modernized using range over int" println(i) } - for i = 0; i < f(); i++ { // want "for loop can be modernized using range over int" + { + var i int + for i = 0; i < f(); i++ { // want "for loop can be modernized using range over int" + } + // NB: no uses of i after loop. } for i := 0; i < 10; i++ { // want "for loop can be modernized using range over int" // i unused within loop @@ -51,3 +55,14 @@ func _(s string) { } } } + +// Repro for #71952: for and range loops have different final values +// on i (n and n-1, respectively) so we can't offer the fix if i is +// used after the loop. +func nopePostconditionDiffers() { + i := 0 + for i = 0; i < 5; i++ { + println(i) + } + println(i) // must print 5, not 4 +} diff --git a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden index 52f16347b1e..43cf220d699 100644 --- a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden @@ -4,7 +4,11 @@ func _(i int, s struct{ i int }, slice []int) { for i := range 10 { // want "for loop can be modernized using range over int" println(i) } - for i = range f() { // want "for loop can be modernized using range over int" + { + var i int + for i = range f() { // want "for loop can be modernized using range over int" + } + // NB: no uses of i after loop. } for range 10 { // want "for loop can be modernized using range over int" // i unused within loop @@ -51,3 +55,14 @@ func _(s string) { } } } + +// Repro for #71952: for and range loops have different final values +// on i (n and n-1, respectively) so we can't offer the fix if i is +// used after the loop. +func nopePostconditionDiffers() { + i := 0 + for i = 0; i < 5; i++ { + println(i) + } + println(i) // must print 5, not 4 +} From 6399d21203019d71e290a17b26c0946f3152cba0 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Fri, 14 Feb 2025 09:42:49 -0500 Subject: [PATCH 072/270] go/analysis/passes/reflectvaluecompare/cmd/reflectvaluecompare: add main.go This makes it easier to play with. Updates golang/go#71732 Change-Id: If5ec810c051b0c12ec30891c9a431cc5ca06dcd9 Reviewed-on: https://go-review.googlesource.com/c/tools/+/649615 Reviewed-by: Keith Randall Reviewed-by: Keith Randall Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI --- .../cmd/reflectvaluecompare/main.go | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 go/analysis/passes/reflectvaluecompare/cmd/reflectvaluecompare/main.go diff --git a/go/analysis/passes/reflectvaluecompare/cmd/reflectvaluecompare/main.go b/go/analysis/passes/reflectvaluecompare/cmd/reflectvaluecompare/main.go new file mode 100644 index 00000000000..f3f9e163913 --- /dev/null +++ b/go/analysis/passes/reflectvaluecompare/cmd/reflectvaluecompare/main.go @@ -0,0 +1,18 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// The reflectvaluecompare command applies the reflectvaluecompare +// checker to the specified packages of Go source code. +// +// Run with: +// +// $ go run ./go/analysis/passes/reflectvaluecompare/cmd/reflectvaluecompare -- packages... +package main + +import ( + "golang.org/x/tools/go/analysis/passes/reflectvaluecompare" + "golang.org/x/tools/go/analysis/singlechecker" +) + +func main() { singlechecker.Main(reflectvaluecompare.Analyzer) } From 5dc980c6debffbe1b319cf554f28eaf100b9fc94 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Tue, 25 Feb 2025 22:24:09 -0500 Subject: [PATCH 073/270] gopls/internal/test/integration/misc: fix "want" assembly MOVD, MOVL, MOV are all valid. The latter appears in riscv. Fixes golang/go#71956 Change-Id: I74aa3d9a47a20b44d398054e7184e984c6701ca0 Reviewed-on: https://go-review.googlesource.com/c/tools/+/652359 LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley Auto-Submit: Alan Donovan --- gopls/internal/test/integration/misc/webserver_test.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/gopls/internal/test/integration/misc/webserver_test.go b/gopls/internal/test/integration/misc/webserver_test.go index 5153289941f..4b495dfa07e 100644 --- a/gopls/internal/test/integration/misc/webserver_test.go +++ b/gopls/internal/test/integration/misc/webserver_test.go @@ -605,8 +605,8 @@ func init() { { report := asmFor(`f\(123\)`) checkMatch(t, true, report, `TEXT.*example.com/a.init`) - checkMatch(t, true, report, `MOV. \$123`) - checkMatch(t, true, report, `MOV. \$456`) + checkMatch(t, true, report, `MOV.? \$123`) + checkMatch(t, true, report, `MOV.? \$456`) checkMatch(t, true, report, `CALL example.com/a.f`) } @@ -614,7 +614,7 @@ func init() { { report := asmFor(`f\(789\)`) checkMatch(t, true, report, `TEXT.*example.com/a.init`) - checkMatch(t, true, report, `MOV. \$789`) + checkMatch(t, true, report, `MOV.? \$789`) checkMatch(t, true, report, `CALL example.com/a.f`) } }) From 779331ac58c17baf109674a5754c0f0c630f695a Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Wed, 26 Feb 2025 07:50:27 -0500 Subject: [PATCH 074/270] gopls/internal/test/integration/misc: only test asm on {arm,amd}64 Fixes golang/go#71956 for real this time Change-Id: I3db07168da163ea6c8fdeefda28f64b94fe2ed57 Reviewed-on: https://go-review.googlesource.com/c/tools/+/652695 Reviewed-by: Robert Findley Commit-Queue: Alan Donovan LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan --- .../test/integration/misc/webserver_test.go | 20 ++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/gopls/internal/test/integration/misc/webserver_test.go b/gopls/internal/test/integration/misc/webserver_test.go index 4b495dfa07e..79a6548ee3e 100644 --- a/gopls/internal/test/integration/misc/webserver_test.go +++ b/gopls/internal/test/integration/misc/webserver_test.go @@ -604,18 +604,24 @@ func init() { // Check that code in a package-level var initializer is found too. { report := asmFor(`f\(123\)`) - checkMatch(t, true, report, `TEXT.*example.com/a.init`) - checkMatch(t, true, report, `MOV.? \$123`) - checkMatch(t, true, report, `MOV.? \$456`) - checkMatch(t, true, report, `CALL example.com/a.f`) + switch runtime.GOARCH { + case "amd64", "arm64": + checkMatch(t, true, report, `TEXT.*example.com/a.init`) + checkMatch(t, true, report, `MOV.? \$123`) + checkMatch(t, true, report, `MOV.? \$456`) + checkMatch(t, true, report, `CALL example.com/a.f`) + } } // And code in a source-level init function. { report := asmFor(`f\(789\)`) - checkMatch(t, true, report, `TEXT.*example.com/a.init`) - checkMatch(t, true, report, `MOV.? \$789`) - checkMatch(t, true, report, `CALL example.com/a.f`) + switch runtime.GOARCH { + case "amd64", "arm64": + checkMatch(t, true, report, `TEXT.*example.com/a.init`) + checkMatch(t, true, report, `MOV.? \$789`) + checkMatch(t, true, report, `CALL example.com/a.f`) + } } }) } From d740adf9c34bc9f6c7944b62fd3fd15851ed8fc0 Mon Sep 17 00:00:00 2001 From: danztran Date: Wed, 26 Feb 2025 06:18:57 +0000 Subject: [PATCH 075/270] gopls/internal/settings: correct SemanticTokenTypes source fix golang/go#71964 Change-Id: I2694023636272ea971880865a4f2cb6d9192d7d5 GitHub-Last-Rev: c81d388cfe13667504cff275c969fc81587c6fc9 GitHub-Pull-Request: golang/tools#564 Reviewed-on: https://go-review.googlesource.com/c/tools/+/652655 Reviewed-by: Hongxiang Jiang LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley Auto-Submit: Hongxiang Jiang --- gopls/internal/settings/settings.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gopls/internal/settings/settings.go b/gopls/internal/settings/settings.go index dd353da64e9..11b06040181 100644 --- a/gopls/internal/settings/settings.go +++ b/gopls/internal/settings/settings.go @@ -1356,7 +1356,7 @@ func (o *Options) EnabledSemanticTokenModifiers() map[semtok.Modifier]bool { // EncodeSemanticTokenTypes returns a map of types to boolean. func (o *Options) EnabledSemanticTokenTypes() map[semtok.Type]bool { copy := make(map[semtok.Type]bool, len(o.SemanticTokenTypes)) - for k, v := range o.SemanticTokenModifiers { + for k, v := range o.SemanticTokenTypes { copy[semtok.Type(k)] = v } if o.NoSemanticString { From 63229bc79404d8cf2fe4e88ad569168fe251d993 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Wed, 26 Feb 2025 14:20:40 -0500 Subject: [PATCH 076/270] gopls/internal/analysis/gofix: register "alias" fact type Fixes golang/go#71982 Change-Id: I29535d430e2fb9da0915a1d6ec99d4a3ade8e4e8 Reviewed-on: https://go-review.googlesource.com/c/tools/+/652975 Auto-Submit: Alan Donovan Reviewed-by: Jonathan Amsterdam LUCI-TryBot-Result: Go LUCI --- gopls/internal/analysis/gofix/gofix.go | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/gopls/internal/analysis/gofix/gofix.go b/gopls/internal/analysis/gofix/gofix.go index 237e5b0b58a..7323028aa31 100644 --- a/gopls/internal/analysis/gofix/gofix.go +++ b/gopls/internal/analysis/gofix/gofix.go @@ -30,12 +30,16 @@ import ( var doc string var Analyzer = &analysis.Analyzer{ - Name: "gofix", - Doc: analysisinternal.MustExtractDoc(doc, "gofix"), - URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/gofix", - Run: run, - FactTypes: []analysis.Fact{new(goFixInlineFuncFact), new(goFixInlineConstFact)}, - Requires: []*analysis.Analyzer{inspect.Analyzer}, + Name: "gofix", + Doc: analysisinternal.MustExtractDoc(doc, "gofix"), + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/gofix", + Run: run, + FactTypes: []analysis.Fact{ + (*goFixInlineFuncFact)(nil), + (*goFixInlineConstFact)(nil), + (*goFixInlineAliasFact)(nil), + }, + Requires: []*analysis.Analyzer{inspect.Analyzer}, } // analyzer holds the state for this analysis. From 57b529ad205da65cbc7429c2cadd5d7c44055981 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Tue, 25 Feb 2025 11:10:41 -0500 Subject: [PATCH 077/270] doc/release/v0.18.0.md: add -fix flag Added the -fix flag to the command line for applying go:fix fixes. The given command prints the fixes, but does not apply them. Change-Id: Ia6dc100cf88e293453fbc6649f14aa0046572104 Reviewed-on: https://go-review.googlesource.com/c/tools/+/652355 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- gopls/doc/release/v0.18.0.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gopls/doc/release/v0.18.0.md b/gopls/doc/release/v0.18.0.md index ba2c0184307..9aa0f9c9d07 100644 --- a/gopls/doc/release/v0.18.0.md +++ b/gopls/doc/release/v0.18.0.md @@ -106,7 +106,7 @@ gopls will suggest replacing `Ptr` in your code with `Pointer`. Use this command to apply such fixes en masse: ``` -$ go run golang.org/x/tools/gopls/internal/analysis/gofix/cmd/gofix@latest -test ./... +$ go run golang.org/x/tools/gopls/internal/analysis/gofix/cmd/gofix@latest -test -fix ./... ``` ## "Implementations" supports generics From 8f4b8cd6b69a761defc548aa8377b8306a881c20 Mon Sep 17 00:00:00 2001 From: Madeline Kalil Date: Thu, 27 Feb 2025 11:28:35 -0500 Subject: [PATCH 078/270] gopls/internal/golang: add package symbols comment Note and explain why we use only syntax and not type information to parse package symbols. Change-Id: I7498158cf633e82d4149f88fc7e8858babd66559 Reviewed-on: https://go-review.googlesource.com/c/tools/+/653355 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- gopls/internal/golang/symbols.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/gopls/internal/golang/symbols.go b/gopls/internal/golang/symbols.go index db31baa69f2..53fbb663800 100644 --- a/gopls/internal/golang/symbols.go +++ b/gopls/internal/golang/symbols.go @@ -82,6 +82,9 @@ func DocumentSymbols(ctx context.Context, snapshot *cache.Snapshot, fh file.Hand // The PackageSymbol data type contains the same fields as protocol.DocumentSymbol, with // an additional int field "File" that stores the index of that symbol's file in the // PackageSymbolsResult.Files. +// Symbols are gathered using syntax rather than type information because type checking is +// significantly slower. Syntax information provides enough value to the user without +// causing a lag when loading symbol information across different files. func PackageSymbols(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI) (command.PackageSymbolsResult, error) { ctx, done := event.Start(ctx, "source.PackageSymbols") defer done() From 1cc80ad525837f752d516a5827e78bce18755cd2 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Wed, 26 Feb 2025 13:35:43 -0500 Subject: [PATCH 079/270] internal/event/export/ocagent: delete We never use it, and OpenCensus is officially moribund. Change-Id: I0095f996c58954c238be7875694ee62dd721b3f2 Reviewed-on: https://go-review.googlesource.com/c/tools/+/653016 LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley Commit-Queue: Alan Donovan Auto-Submit: Alan Donovan --- gopls/internal/cmd/cmd.go | 7 +- gopls/internal/cmd/usage/usage-v.hlp | 2 - gopls/internal/cmd/usage/usage.hlp | 2 - gopls/internal/debug/serve.go | 15 +- gopls/internal/lsprpc/lsprpc_test.go | 8 +- gopls/internal/test/integration/runner.go | 4 +- gopls/internal/test/marker/marker_test.go | 2 +- internal/event/export/ocagent/README.md | 139 ------- internal/event/export/ocagent/metrics.go | 213 ----------- internal/event/export/ocagent/metrics_test.go | 144 ------- internal/event/export/ocagent/ocagent.go | 358 ------------------ internal/event/export/ocagent/ocagent_test.go | 210 ---------- internal/event/export/ocagent/trace_test.go | 158 -------- internal/event/export/ocagent/wire/common.go | 101 ----- internal/event/export/ocagent/wire/core.go | 17 - internal/event/export/ocagent/wire/metrics.go | 204 ---------- .../event/export/ocagent/wire/metrics_test.go | 80 ---- internal/event/export/ocagent/wire/trace.go | 112 ------ 18 files changed, 10 insertions(+), 1766 deletions(-) delete mode 100644 internal/event/export/ocagent/README.md delete mode 100644 internal/event/export/ocagent/metrics.go delete mode 100644 internal/event/export/ocagent/metrics_test.go delete mode 100644 internal/event/export/ocagent/ocagent.go delete mode 100644 internal/event/export/ocagent/ocagent_test.go delete mode 100644 internal/event/export/ocagent/trace_test.go delete mode 100644 internal/event/export/ocagent/wire/common.go delete mode 100644 internal/event/export/ocagent/wire/core.go delete mode 100644 internal/event/export/ocagent/wire/metrics.go delete mode 100644 internal/event/export/ocagent/wire/metrics_test.go delete mode 100644 internal/event/export/ocagent/wire/trace.go diff --git a/gopls/internal/cmd/cmd.go b/gopls/internal/cmd/cmd.go index 119577c012b..4a00afc4115 100644 --- a/gopls/internal/cmd/cmd.go +++ b/gopls/internal/cmd/cmd.go @@ -63,9 +63,6 @@ type Application struct { // VeryVerbose enables a higher level of verbosity in logging output. VeryVerbose bool `flag:"vv,veryverbose" help:"very verbose output"` - // Control ocagent export of telemetry - OCAgent string `flag:"ocagent" help:"the address of the ocagent (e.g. http://localhost:55678), or off"` - // PrepareOptions is called to update the options when a new view is built. // It is primarily to allow the behavior of gopls to be modified by hooks. PrepareOptions func(*settings.Options) @@ -98,8 +95,6 @@ func (app *Application) verbose() bool { // New returns a new Application ready to run. func New() *Application { app := &Application{ - OCAgent: "off", //TODO: Remove this line to default the exporter to on - Serve: Serve{ RemoteListenTimeout: 1 * time.Minute, }, @@ -238,7 +233,7 @@ func (app *Application) Run(ctx context.Context, args ...string) error { // executable, and immediately runs a gc. filecache.Start() - ctx = debug.WithInstance(ctx, app.OCAgent) + ctx = debug.WithInstance(ctx) if len(args) == 0 { s := flag.NewFlagSet(app.Name(), flag.ExitOnError) return tool.Run(ctx, s, &app.Serve, args) diff --git a/gopls/internal/cmd/usage/usage-v.hlp b/gopls/internal/cmd/usage/usage-v.hlp index 64f99a3387e..044d4251e89 100644 --- a/gopls/internal/cmd/usage/usage-v.hlp +++ b/gopls/internal/cmd/usage/usage-v.hlp @@ -61,8 +61,6 @@ flags: filename to log to. if value is "auto", then logging to a default output file is enabled -mode=string no effect - -ocagent=string - the address of the ocagent (e.g. http://localhost:55678), or off (default "off") -port=int port on which to run gopls for debugging purposes -profile.alloc=string diff --git a/gopls/internal/cmd/usage/usage.hlp b/gopls/internal/cmd/usage/usage.hlp index c801a467626..b918b24a411 100644 --- a/gopls/internal/cmd/usage/usage.hlp +++ b/gopls/internal/cmd/usage/usage.hlp @@ -58,8 +58,6 @@ flags: filename to log to. if value is "auto", then logging to a default output file is enabled -mode=string no effect - -ocagent=string - the address of the ocagent (e.g. http://localhost:55678), or off (default "off") -port=int port on which to run gopls for debugging purposes -profile.alloc=string diff --git a/gopls/internal/debug/serve.go b/gopls/internal/debug/serve.go index c471f488cd1..7cfe2b3d23e 100644 --- a/gopls/internal/debug/serve.go +++ b/gopls/internal/debug/serve.go @@ -33,7 +33,6 @@ import ( "golang.org/x/tools/internal/event/core" "golang.org/x/tools/internal/event/export" "golang.org/x/tools/internal/event/export/metric" - "golang.org/x/tools/internal/event/export/ocagent" "golang.org/x/tools/internal/event/export/prometheus" "golang.org/x/tools/internal/event/keys" "golang.org/x/tools/internal/event/label" @@ -51,13 +50,11 @@ type Instance struct { Logfile string StartTime time.Time ServerAddress string - OCAgentConfig string LogWriter io.Writer exporter event.Exporter - ocagent *ocagent.Exporter prometheus *prometheus.Exporter rpcs *Rpcs traces *traces @@ -363,16 +360,11 @@ func GetInstance(ctx context.Context) *Instance { // WithInstance creates debug instance ready for use using the supplied // configuration and stores it in the returned context. -func WithInstance(ctx context.Context, agent string) context.Context { +func WithInstance(ctx context.Context) context.Context { i := &Instance{ - StartTime: time.Now(), - OCAgentConfig: agent, + StartTime: time.Now(), } i.LogWriter = os.Stderr - ocConfig := ocagent.Discover() - //TODO: we should not need to adjust the discovered configuration - ocConfig.Address = i.OCAgentConfig - i.ocagent = ocagent.Connect(ocConfig) i.prometheus = prometheus.New() i.rpcs = &Rpcs{} i.traces = &traces{} @@ -541,9 +533,6 @@ func messageType(l log.Level) protocol.MessageType { func makeInstanceExporter(i *Instance) event.Exporter { exporter := func(ctx context.Context, ev core.Event, lm label.Map) context.Context { - if i.ocagent != nil { - ctx = i.ocagent.ProcessEvent(ctx, ev, lm) - } if i.prometheus != nil { ctx = i.prometheus.ProcessEvent(ctx, ev, lm) } diff --git a/gopls/internal/lsprpc/lsprpc_test.go b/gopls/internal/lsprpc/lsprpc_test.go index 1a259bbd646..eda00b28c7a 100644 --- a/gopls/internal/lsprpc/lsprpc_test.go +++ b/gopls/internal/lsprpc/lsprpc_test.go @@ -58,7 +58,7 @@ func TestClientLogging(t *testing.T) { server := PingServer{} client := FakeClient{Logs: make(chan string, 10)} - ctx = debug.WithInstance(ctx, "") + ctx = debug.WithInstance(ctx) ss := NewStreamServer(cache.New(nil), false, nil).(*StreamServer) ss.serverForTest = server ts := servertest.NewPipeServer(ss, nil) @@ -121,7 +121,7 @@ func checkClose(t *testing.T, closer func() error) { func setupForwarding(ctx context.Context, t *testing.T, s protocol.Server) (direct, forwarded servertest.Connector, cleanup func()) { t.Helper() - serveCtx := debug.WithInstance(ctx, "") + serveCtx := debug.WithInstance(ctx) ss := NewStreamServer(cache.New(nil), false, nil).(*StreamServer) ss.serverForTest = s tsDirect := servertest.NewTCPServer(serveCtx, ss, nil) @@ -214,8 +214,8 @@ func TestDebugInfoLifecycle(t *testing.T) { baseCtx, cancel := context.WithCancel(context.Background()) defer cancel() - clientCtx := debug.WithInstance(baseCtx, "") - serverCtx := debug.WithInstance(baseCtx, "") + clientCtx := debug.WithInstance(baseCtx) + serverCtx := debug.WithInstance(baseCtx) cache := cache.New(nil) ss := NewStreamServer(cache, false, nil) diff --git a/gopls/internal/test/integration/runner.go b/gopls/internal/test/integration/runner.go index 6d10b16cab3..b3e98b859d3 100644 --- a/gopls/internal/test/integration/runner.go +++ b/gopls/internal/test/integration/runner.go @@ -173,7 +173,7 @@ func (r *Runner) Run(t *testing.T, files string, test TestFunc, opts ...RunOptio } // TODO(rfindley): do we need an instance at all? Can it be removed? - ctx = debug.WithInstance(ctx, "off") + ctx = debug.WithInstance(ctx) rootDir := filepath.Join(r.tempDir, filepath.FromSlash(t.Name())) if err := os.MkdirAll(rootDir, 0755); err != nil { @@ -349,7 +349,7 @@ func (r *Runner) defaultServer() jsonrpc2.StreamServer { func (r *Runner) forwardedServer() jsonrpc2.StreamServer { r.tsOnce.Do(func() { ctx := context.Background() - ctx = debug.WithInstance(ctx, "off") + ctx = debug.WithInstance(ctx) ss := lsprpc.NewStreamServer(cache.New(nil), false, nil) r.ts = servertest.NewTCPServer(ctx, ss, nil) }) diff --git a/gopls/internal/test/marker/marker_test.go b/gopls/internal/test/marker/marker_test.go index a5e23b928ad..a3e62d35968 100644 --- a/gopls/internal/test/marker/marker_test.go +++ b/gopls/internal/test/marker/marker_test.go @@ -964,7 +964,7 @@ func newEnv(t *testing.T, cache *cache.Cache, files, proxyFiles map[string][]byt // Put a debug instance in the context to prevent logging to stderr. // See associated TODO in runner.go: we should revisit this pattern. ctx := context.Background() - ctx = debug.WithInstance(ctx, "off") + ctx = debug.WithInstance(ctx) awaiter := integration.NewAwaiter(sandbox.Workdir) ss := lsprpc.NewStreamServer(cache, false, nil) diff --git a/internal/event/export/ocagent/README.md b/internal/event/export/ocagent/README.md deleted file mode 100644 index 22e8469f06b..00000000000 --- a/internal/event/export/ocagent/README.md +++ /dev/null @@ -1,139 +0,0 @@ -# Exporting Metrics and Traces with OpenCensus, Zipkin, and Prometheus - -This tutorial provides a minimum example to verify that metrics and traces -can be exported to OpenCensus from Go tools. - -## Setting up oragent - -1. Ensure you have [docker](https://www.docker.com/get-started) and [docker-compose](https://docs.docker.com/compose/install/). -2. Clone [oragent](https://github.com/orijtech/oragent). -3. In the oragent directory, start the services: -```bash -docker-compose up -``` -If everything goes well, you should see output resembling the following: -``` -Starting oragent_zipkin_1 ... done -Starting oragent_oragent_1 ... done -Starting oragent_prometheus_1 ... done -... -``` -* You can check the status of the OpenCensus agent using zPages at http://localhost:55679/debug/tracez. -* You can now access the Prometheus UI at http://localhost:9445. -* You can now access the Zipkin UI at http://localhost:9444. -4. To shut down oragent, hit Ctrl+C in the terminal. -5. You can also start oragent in detached mode by running `docker-compose up -d`. To stop oragent while detached, run `docker-compose down`. - -## Exporting Metrics and Traces -1. Clone the [tools](https://golang.org/x/tools) subrepository. -1. Inside `internal`, create a file named `main.go` with the following contents: -```go -package main - -import ( - "context" - "fmt" - "math/rand" - "net/http" - "time" - - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/event/export" - "golang.org/x/tools/internal/event/export/metric" - "golang.org/x/tools/internal/event/export/ocagent" -) - -type testExporter struct { - metrics metric.Exporter - ocagent *ocagent.Exporter -} - -func (e *testExporter) ProcessEvent(ctx context.Context, ev event.Event) (context.Context, event.Event) { - ctx, ev = export.Tag(ctx, ev) - ctx, ev = export.ContextSpan(ctx, ev) - ctx, ev = e.metrics.ProcessEvent(ctx, ev) - ctx, ev = e.ocagent.ProcessEvent(ctx, ev) - return ctx, ev -} - -func main() { - exporter := &testExporter{} - - exporter.ocagent = ocagent.Connect(&ocagent.Config{ - Start: time.Now(), - Address: "http://127.0.0.1:55678", - Service: "go-tools-test", - Rate: 5 * time.Second, - Client: &http.Client{}, - }) - event.SetExporter(exporter) - - ctx := context.TODO() - mLatency := event.NewFloat64Key("latency", "the latency in milliseconds") - distribution := metric.HistogramFloat64Data{ - Info: &metric.HistogramFloat64{ - Name: "latencyDistribution", - Description: "the various latencies", - Buckets: []float64{0, 10, 50, 100, 200, 400, 800, 1000, 1400, 2000, 5000, 10000, 15000}, - }, - } - - distribution.Info.Record(&exporter.metrics, mLatency) - - for { - sleep := randomSleep() - _, end := event.StartSpan(ctx, "main.randomSleep()") - time.Sleep(time.Duration(sleep) * time.Millisecond) - end() - event.Record(ctx, mLatency.Of(float64(sleep))) - - fmt.Println("Latency: ", float64(sleep)) - } -} - -func randomSleep() int64 { - var max int64 - switch modulus := time.Now().Unix() % 5; modulus { - case 0: - max = 17001 - case 1: - max = 8007 - case 2: - max = 917 - case 3: - max = 87 - case 4: - max = 1173 - } - return rand.Int63n(max) -} - -``` -3. Run the new file from within the tools repository: -```bash -go run internal/main.go -``` -4. After about 5 seconds, OpenCensus should start receiving your new metrics, which you can see at http://localhost:8844/metrics. This page will look similar to the following: -``` -# HELP promdemo_latencyDistribution the various latencies -# TYPE promdemo_latencyDistribution histogram -promdemo_latencyDistribution_bucket{vendor="otc",le="0"} 0 -promdemo_latencyDistribution_bucket{vendor="otc",le="10"} 2 -promdemo_latencyDistribution_bucket{vendor="otc",le="50"} 9 -promdemo_latencyDistribution_bucket{vendor="otc",le="100"} 22 -promdemo_latencyDistribution_bucket{vendor="otc",le="200"} 35 -promdemo_latencyDistribution_bucket{vendor="otc",le="400"} 49 -promdemo_latencyDistribution_bucket{vendor="otc",le="800"} 63 -promdemo_latencyDistribution_bucket{vendor="otc",le="1000"} 78 -promdemo_latencyDistribution_bucket{vendor="otc",le="1400"} 93 -promdemo_latencyDistribution_bucket{vendor="otc",le="2000"} 108 -promdemo_latencyDistribution_bucket{vendor="otc",le="5000"} 123 -promdemo_latencyDistribution_bucket{vendor="otc",le="10000"} 138 -promdemo_latencyDistribution_bucket{vendor="otc",le="15000"} 153 -promdemo_latencyDistribution_bucket{vendor="otc",le="+Inf"} 15 -promdemo_latencyDistribution_sum{vendor="otc"} 1641 -promdemo_latencyDistribution_count{vendor="otc"} 15 -``` -5. After a few more seconds, Prometheus should start displaying your new metrics. You can view the distribution at http://localhost:9445/graph?g0.range_input=5m&g0.stacked=1&g0.expr=rate(oragent_latencyDistribution_bucket%5B5m%5D)&g0.tab=0. - -6. Zipkin should also start displaying traces. You can view them at http://localhost:9444/zipkin/?limit=10&lookback=300000&serviceName=go-tools-test. \ No newline at end of file diff --git a/internal/event/export/ocagent/metrics.go b/internal/event/export/ocagent/metrics.go deleted file mode 100644 index 78d65994db8..00000000000 --- a/internal/event/export/ocagent/metrics.go +++ /dev/null @@ -1,213 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ocagent - -import ( - "time" - - "golang.org/x/tools/internal/event/export/metric" - "golang.org/x/tools/internal/event/export/ocagent/wire" - "golang.org/x/tools/internal/event/label" -) - -// dataToMetricDescriptor return a *wire.MetricDescriptor based on data. -func dataToMetricDescriptor(data metric.Data) *wire.MetricDescriptor { - if data == nil { - return nil - } - descriptor := &wire.MetricDescriptor{ - Name: data.Handle(), - Description: getDescription(data), - // TODO: Unit? - Type: dataToMetricDescriptorType(data), - LabelKeys: getLabelKeys(data), - } - - return descriptor -} - -// getDescription returns the description of data. -func getDescription(data metric.Data) string { - switch d := data.(type) { - case *metric.Int64Data: - return d.Info.Description - - case *metric.Float64Data: - return d.Info.Description - - case *metric.HistogramInt64Data: - return d.Info.Description - - case *metric.HistogramFloat64Data: - return d.Info.Description - } - - return "" -} - -// getLabelKeys returns a slice of *wire.LabelKeys based on the keys -// in data. -func getLabelKeys(data metric.Data) []*wire.LabelKey { - switch d := data.(type) { - case *metric.Int64Data: - return infoKeysToLabelKeys(d.Info.Keys) - - case *metric.Float64Data: - return infoKeysToLabelKeys(d.Info.Keys) - - case *metric.HistogramInt64Data: - return infoKeysToLabelKeys(d.Info.Keys) - - case *metric.HistogramFloat64Data: - return infoKeysToLabelKeys(d.Info.Keys) - } - - return nil -} - -// dataToMetricDescriptorType returns a wire.MetricDescriptor_Type based on the -// underlying type of data. -func dataToMetricDescriptorType(data metric.Data) wire.MetricDescriptor_Type { - switch d := data.(type) { - case *metric.Int64Data: - if d.IsGauge { - return wire.MetricDescriptor_GAUGE_INT64 - } - return wire.MetricDescriptor_CUMULATIVE_INT64 - - case *metric.Float64Data: - if d.IsGauge { - return wire.MetricDescriptor_GAUGE_DOUBLE - } - return wire.MetricDescriptor_CUMULATIVE_DOUBLE - - case *metric.HistogramInt64Data: - return wire.MetricDescriptor_CUMULATIVE_DISTRIBUTION - - case *metric.HistogramFloat64Data: - return wire.MetricDescriptor_CUMULATIVE_DISTRIBUTION - } - - return wire.MetricDescriptor_UNSPECIFIED -} - -// dataToTimeseries returns a slice of *wire.TimeSeries based on the -// points in data. -func dataToTimeseries(data metric.Data, start time.Time) []*wire.TimeSeries { - if data == nil { - return nil - } - - numRows := numRows(data) - startTimestamp := convertTimestamp(start) - timeseries := make([]*wire.TimeSeries, 0, numRows) - - for i := 0; i < numRows; i++ { - timeseries = append(timeseries, &wire.TimeSeries{ - StartTimestamp: &startTimestamp, - // TODO: labels? - Points: dataToPoints(data, i), - }) - } - - return timeseries -} - -// numRows returns the number of rows in data. -func numRows(data metric.Data) int { - switch d := data.(type) { - case *metric.Int64Data: - return len(d.Rows) - case *metric.Float64Data: - return len(d.Rows) - case *metric.HistogramInt64Data: - return len(d.Rows) - case *metric.HistogramFloat64Data: - return len(d.Rows) - } - - return 0 -} - -// dataToPoints returns an array of *wire.Points based on the point(s) -// in data at index i. -func dataToPoints(data metric.Data, i int) []*wire.Point { - switch d := data.(type) { - case *metric.Int64Data: - timestamp := convertTimestamp(d.EndTime) - return []*wire.Point{ - { - Value: wire.PointInt64Value{ - Int64Value: d.Rows[i], - }, - Timestamp: ×tamp, - }, - } - case *metric.Float64Data: - timestamp := convertTimestamp(d.EndTime) - return []*wire.Point{ - { - Value: wire.PointDoubleValue{ - DoubleValue: d.Rows[i], - }, - Timestamp: ×tamp, - }, - } - case *metric.HistogramInt64Data: - row := d.Rows[i] - bucketBounds := make([]float64, len(d.Info.Buckets)) - for i, val := range d.Info.Buckets { - bucketBounds[i] = float64(val) - } - return distributionToPoints(row.Values, row.Count, float64(row.Sum), bucketBounds, d.EndTime) - case *metric.HistogramFloat64Data: - row := d.Rows[i] - return distributionToPoints(row.Values, row.Count, row.Sum, d.Info.Buckets, d.EndTime) - } - - return nil -} - -// distributionToPoints returns an array of *wire.Points containing a -// wire.PointDistributionValue representing a distribution with the -// supplied counts, count, and sum. -func distributionToPoints(counts []int64, count int64, sum float64, bucketBounds []float64, end time.Time) []*wire.Point { - buckets := make([]*wire.Bucket, len(counts)) - for i := 0; i < len(counts); i++ { - buckets[i] = &wire.Bucket{ - Count: counts[i], - } - } - timestamp := convertTimestamp(end) - return []*wire.Point{ - { - Value: wire.PointDistributionValue{ - DistributionValue: &wire.DistributionValue{ - Count: count, - Sum: sum, - // TODO: SumOfSquaredDeviation? - Buckets: buckets, - BucketOptions: &wire.BucketOptionsExplicit{ - Bounds: bucketBounds, - }, - }, - }, - Timestamp: ×tamp, - }, - } -} - -// infoKeysToLabelKeys returns an array of *wire.LabelKeys containing the -// string values of the elements of labelKeys. -func infoKeysToLabelKeys(infoKeys []label.Key) []*wire.LabelKey { - labelKeys := make([]*wire.LabelKey, 0, len(infoKeys)) - for _, key := range infoKeys { - labelKeys = append(labelKeys, &wire.LabelKey{ - Key: key.Name(), - }) - } - - return labelKeys -} diff --git a/internal/event/export/ocagent/metrics_test.go b/internal/event/export/ocagent/metrics_test.go deleted file mode 100644 index 001e7f02dbf..00000000000 --- a/internal/event/export/ocagent/metrics_test.go +++ /dev/null @@ -1,144 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ocagent_test - -import ( - "context" - "errors" - "testing" - - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/event/keys" -) - -func TestEncodeMetric(t *testing.T) { - exporter := registerExporter() - const prefix = testNodeStr + ` - "metrics":[` - const suffix = `]}` - tests := []struct { - name string - run func(ctx context.Context) - want string - }{ - { - name: "HistogramFloat64, HistogramInt64", - run: func(ctx context.Context) { - ctx = event.Label(ctx, keyMethod.Of("godoc.ServeHTTP")) - event.Metric(ctx, latencyMs.Of(96.58)) - ctx = event.Label(ctx, keys.Err.Of(errors.New("panic: fatal signal"))) - event.Metric(ctx, bytesIn.Of(97e2)) - }, - want: prefix + ` - { - "metric_descriptor": { - "name": "latency_ms", - "description": "The latency of calls in milliseconds", - "type": 6, - "label_keys": [ - { - "key": "method" - }, - { - "key": "route" - } - ] - }, - "timeseries": [ - { - "start_timestamp": "1970-01-01T00:00:00Z", - "points": [ - { - "timestamp": "1970-01-01T00:00:40Z", - "distributionValue": { - "count": 1, - "sum": 96.58, - "bucket_options": { - "explicit": { - "bounds": [ - 0, - 5, - 10, - 25, - 50 - ] - } - }, - "buckets": [ - {}, - {}, - {}, - {}, - {} - ] - } - } - ] - } - ] - }, - { - "metric_descriptor": { - "name": "latency_ms", - "description": "The latency of calls in milliseconds", - "type": 6, - "label_keys": [ - { - "key": "method" - }, - { - "key": "route" - } - ] - }, - "timeseries": [ - { - "start_timestamp": "1970-01-01T00:00:00Z", - "points": [ - { - "timestamp": "1970-01-01T00:00:40Z", - "distributionValue": { - "count": 1, - "sum": 9700, - "bucket_options": { - "explicit": { - "bounds": [ - 0, - 10, - 50, - 100, - 500, - 1000, - 2000 - ] - } - }, - "buckets": [ - {}, - {}, - {}, - {}, - {}, - {}, - {} - ] - } - } - ] - } - ] - }` + suffix, - }, - } - - ctx := context.TODO() - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - tt.run(ctx) - got := exporter.Output("/v1/metrics") - checkJSON(t, got, []byte(tt.want)) - }) - } -} diff --git a/internal/event/export/ocagent/ocagent.go b/internal/event/export/ocagent/ocagent.go deleted file mode 100644 index d86c4aed0cf..00000000000 --- a/internal/event/export/ocagent/ocagent.go +++ /dev/null @@ -1,358 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package ocagent adds the ability to export all telemetry to an ocagent. -// This keeps the compile time dependencies to zero and allows the agent to -// have the exporters needed for telemetry aggregation and viewing systems. -package ocagent - -import ( - "bytes" - "context" - "encoding/json" - "fmt" - "net/http" - "os" - "path/filepath" - "sync" - "time" - - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/event/core" - "golang.org/x/tools/internal/event/export" - "golang.org/x/tools/internal/event/export/metric" - "golang.org/x/tools/internal/event/export/ocagent/wire" - "golang.org/x/tools/internal/event/keys" - "golang.org/x/tools/internal/event/label" -) - -type Config struct { - Start time.Time - Host string - Process uint32 - Client *http.Client - Service string - Address string - Rate time.Duration -} - -var ( - connectMu sync.Mutex - exporters = make(map[Config]*Exporter) -) - -// Discover finds the local agent to export to, it will return nil if there -// is not one running. -// TODO: Actually implement a discovery protocol rather than a hard coded address -func Discover() *Config { - return &Config{ - Address: "http://localhost:55678", - } -} - -type Exporter struct { - mu sync.Mutex - config Config - spans []*export.Span - metrics []metric.Data -} - -// Connect creates a process specific exporter with the specified -// serviceName and the address of the ocagent to which it will upload -// its telemetry. -func Connect(config *Config) *Exporter { - if config == nil || config.Address == "off" { - return nil - } - resolved := *config - if resolved.Host == "" { - hostname, _ := os.Hostname() - resolved.Host = hostname - } - if resolved.Process == 0 { - resolved.Process = uint32(os.Getpid()) - } - if resolved.Client == nil { - resolved.Client = http.DefaultClient - } - if resolved.Service == "" { - resolved.Service = filepath.Base(os.Args[0]) - } - if resolved.Rate == 0 { - resolved.Rate = 2 * time.Second - } - - connectMu.Lock() - defer connectMu.Unlock() - if exporter, found := exporters[resolved]; found { - return exporter - } - exporter := &Exporter{config: resolved} - exporters[resolved] = exporter - if exporter.config.Start.IsZero() { - exporter.config.Start = time.Now() - } - go func() { - for range time.Tick(exporter.config.Rate) { - exporter.Flush() - } - }() - return exporter -} - -func (e *Exporter) ProcessEvent(ctx context.Context, ev core.Event, lm label.Map) context.Context { - switch { - case event.IsEnd(ev): - e.mu.Lock() - defer e.mu.Unlock() - span := export.GetSpan(ctx) - if span != nil { - e.spans = append(e.spans, span) - } - case event.IsMetric(ev): - e.mu.Lock() - defer e.mu.Unlock() - data := metric.Entries.Get(lm).([]metric.Data) - e.metrics = append(e.metrics, data...) - } - return ctx -} - -func (e *Exporter) Flush() { - e.mu.Lock() - defer e.mu.Unlock() - spans := make([]*wire.Span, len(e.spans)) - for i, s := range e.spans { - spans[i] = convertSpan(s) - } - e.spans = nil - metrics := make([]*wire.Metric, len(e.metrics)) - for i, m := range e.metrics { - metrics[i] = convertMetric(m, e.config.Start) - } - e.metrics = nil - - if len(spans) > 0 { - e.send("/v1/trace", &wire.ExportTraceServiceRequest{ - Node: e.config.buildNode(), - Spans: spans, - //TODO: Resource? - }) - } - if len(metrics) > 0 { - e.send("/v1/metrics", &wire.ExportMetricsServiceRequest{ - Node: e.config.buildNode(), - Metrics: metrics, - //TODO: Resource? - }) - } -} - -func (cfg *Config) buildNode() *wire.Node { - return &wire.Node{ - Identifier: &wire.ProcessIdentifier{ - HostName: cfg.Host, - Pid: cfg.Process, - StartTimestamp: convertTimestamp(cfg.Start), - }, - LibraryInfo: &wire.LibraryInfo{ - Language: wire.LanguageGo, - ExporterVersion: "0.0.1", - CoreLibraryVersion: "x/tools", - }, - ServiceInfo: &wire.ServiceInfo{ - Name: cfg.Service, - }, - } -} - -func (e *Exporter) send(endpoint string, message any) { - blob, err := json.Marshal(message) - if err != nil { - errorInExport("ocagent failed to marshal message for %v: %v", endpoint, err) - return - } - uri := e.config.Address + endpoint - req, err := http.NewRequest("POST", uri, bytes.NewReader(blob)) - if err != nil { - errorInExport("ocagent failed to build request for %v: %v", uri, err) - return - } - req.Header.Set("Content-Type", "application/json") - res, err := e.config.Client.Do(req) - if err != nil { - errorInExport("ocagent failed to send message: %v \n", err) - return - } - if res.Body != nil { - res.Body.Close() - } -} - -func errorInExport(message string, args ...any) { - // This function is useful when debugging the exporter, but in general we - // want to just drop any export -} - -func convertTimestamp(t time.Time) wire.Timestamp { - return t.Format(time.RFC3339Nano) -} - -func toTruncatableString(s string) *wire.TruncatableString { - if s == "" { - return nil - } - return &wire.TruncatableString{Value: s} -} - -func convertSpan(span *export.Span) *wire.Span { - result := &wire.Span{ - TraceID: span.ID.TraceID[:], - SpanID: span.ID.SpanID[:], - TraceState: nil, //TODO? - ParentSpanID: span.ParentID[:], - Name: toTruncatableString(span.Name), - Kind: wire.UnspecifiedSpanKind, - StartTime: convertTimestamp(span.Start().At()), - EndTime: convertTimestamp(span.Finish().At()), - Attributes: convertAttributes(span.Start(), 1), - TimeEvents: convertEvents(span.Events()), - SameProcessAsParentSpan: true, - //TODO: StackTrace? - //TODO: Links? - //TODO: Status? - //TODO: Resource? - } - return result -} - -func convertMetric(data metric.Data, start time.Time) *wire.Metric { - descriptor := dataToMetricDescriptor(data) - timeseries := dataToTimeseries(data, start) - - if descriptor == nil && timeseries == nil { - return nil - } - - // TODO: handle Histogram metrics - return &wire.Metric{ - MetricDescriptor: descriptor, - Timeseries: timeseries, - // TODO: attach Resource? - } -} - -func skipToValidLabel(list label.List, index int) (int, label.Label) { - // skip to the first valid label - for ; list.Valid(index); index++ { - l := list.Label(index) - if !l.Valid() || l.Key() == keys.Label { - continue - } - return index, l - } - return -1, label.Label{} -} - -func convertAttributes(list label.List, index int) *wire.Attributes { - index, l := skipToValidLabel(list, index) - if !l.Valid() { - return nil - } - attributes := make(map[string]wire.Attribute) - for { - if l.Valid() { - attributes[l.Key().Name()] = convertAttribute(l) - } - index++ - if !list.Valid(index) { - return &wire.Attributes{AttributeMap: attributes} - } - l = list.Label(index) - } -} - -func convertAttribute(l label.Label) wire.Attribute { - switch key := l.Key().(type) { - case *keys.Int: - return wire.IntAttribute{IntValue: int64(key.From(l))} - case *keys.Int8: - return wire.IntAttribute{IntValue: int64(key.From(l))} - case *keys.Int16: - return wire.IntAttribute{IntValue: int64(key.From(l))} - case *keys.Int32: - return wire.IntAttribute{IntValue: int64(key.From(l))} - case *keys.Int64: - return wire.IntAttribute{IntValue: int64(key.From(l))} - case *keys.UInt: - return wire.IntAttribute{IntValue: int64(key.From(l))} - case *keys.UInt8: - return wire.IntAttribute{IntValue: int64(key.From(l))} - case *keys.UInt16: - return wire.IntAttribute{IntValue: int64(key.From(l))} - case *keys.UInt32: - return wire.IntAttribute{IntValue: int64(key.From(l))} - case *keys.UInt64: - return wire.IntAttribute{IntValue: int64(key.From(l))} - case *keys.Float32: - return wire.DoubleAttribute{DoubleValue: float64(key.From(l))} - case *keys.Float64: - return wire.DoubleAttribute{DoubleValue: key.From(l)} - case *keys.Boolean: - return wire.BoolAttribute{BoolValue: key.From(l)} - case *keys.String: - return wire.StringAttribute{StringValue: toTruncatableString(key.From(l))} - case *keys.Error: - return wire.StringAttribute{StringValue: toTruncatableString(key.From(l).Error())} - case *keys.Value: - return wire.StringAttribute{StringValue: toTruncatableString(fmt.Sprint(key.From(l)))} - default: - return wire.StringAttribute{StringValue: toTruncatableString(fmt.Sprintf("%T", key))} - } -} - -func convertEvents(events []core.Event) *wire.TimeEvents { - //TODO: MessageEvents? - result := make([]wire.TimeEvent, len(events)) - for i, event := range events { - result[i] = convertEvent(event) - } - return &wire.TimeEvents{TimeEvent: result} -} - -func convertEvent(ev core.Event) wire.TimeEvent { - return wire.TimeEvent{ - Time: convertTimestamp(ev.At()), - Annotation: convertAnnotation(ev), - } -} - -func getAnnotationDescription(ev core.Event) (string, int) { - l := ev.Label(0) - if l.Key() != keys.Msg { - return "", 0 - } - if msg := keys.Msg.From(l); msg != "" { - return msg, 1 - } - l = ev.Label(1) - if l.Key() != keys.Err { - return "", 1 - } - if err := keys.Err.From(l); err != nil { - return err.Error(), 2 - } - return "", 2 -} - -func convertAnnotation(ev core.Event) *wire.Annotation { - description, index := getAnnotationDescription(ev) - if _, l := skipToValidLabel(ev, index); !l.Valid() && description == "" { - return nil - } - return &wire.Annotation{ - Description: toTruncatableString(description), - Attributes: convertAttributes(ev, index), - } -} diff --git a/internal/event/export/ocagent/ocagent_test.go b/internal/event/export/ocagent/ocagent_test.go deleted file mode 100644 index 38a52faede5..00000000000 --- a/internal/event/export/ocagent/ocagent_test.go +++ /dev/null @@ -1,210 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ocagent_test - -import ( - "bytes" - "context" - "encoding/json" - "fmt" - "io" - "net/http" - "sync" - "testing" - "time" - - "golang.org/x/tools/internal/event" - "golang.org/x/tools/internal/event/core" - "golang.org/x/tools/internal/event/export" - "golang.org/x/tools/internal/event/export/metric" - "golang.org/x/tools/internal/event/export/ocagent" - "golang.org/x/tools/internal/event/keys" - "golang.org/x/tools/internal/event/label" -) - -const testNodeStr = `{ - "node":{ - "identifier":{ - "host_name":"tester", - "pid":1, - "start_timestamp":"1970-01-01T00:00:00Z" - }, - "library_info":{ - "language":4, - "exporter_version":"0.0.1", - "core_library_version":"x/tools" - }, - "service_info":{ - "name":"ocagent-tests" - } - },` - -var ( - keyDB = keys.NewString("db", "the database name") - keyMethod = keys.NewString("method", "a metric grouping key") - keyRoute = keys.NewString("route", "another metric grouping key") - - key1DB = keys.NewString("1_db", "A test string key") - - key2aAge = keys.NewFloat64("2a_age", "A test float64 key") - key2bTTL = keys.NewFloat32("2b_ttl", "A test float32 key") - key2cExpiryMS = keys.NewFloat64("2c_expiry_ms", "A test float64 key") - - key3aRetry = keys.NewBoolean("3a_retry", "A test boolean key") - key3bStale = keys.NewBoolean("3b_stale", "Another test boolean key") - - key4aMax = keys.NewInt("4a_max", "A test int key") - key4bOpcode = keys.NewInt8("4b_opcode", "A test int8 key") - key4cBase = keys.NewInt16("4c_base", "A test int16 key") - key4eChecksum = keys.NewInt32("4e_checksum", "A test int32 key") - key4fMode = keys.NewInt64("4f_mode", "A test int64 key") - - key5aMin = keys.NewUInt("5a_min", "A test uint key") - key5bMix = keys.NewUInt8("5b_mix", "A test uint8 key") - key5cPort = keys.NewUInt16("5c_port", "A test uint16 key") - key5dMinHops = keys.NewUInt32("5d_min_hops", "A test uint32 key") - key5eMaxHops = keys.NewUInt64("5e_max_hops", "A test uint64 key") - - recursiveCalls = keys.NewInt64("recursive_calls", "Number of recursive calls") - bytesIn = keys.NewInt64("bytes_in", "Number of bytes in") //, unit.Bytes) - latencyMs = keys.NewFloat64("latency", "The latency in milliseconds") //, unit.Milliseconds) - - metricLatency = metric.HistogramFloat64{ - Name: "latency_ms", - Description: "The latency of calls in milliseconds", - Keys: []label.Key{keyMethod, keyRoute}, - Buckets: []float64{0, 5, 10, 25, 50}, - } - - metricBytesIn = metric.HistogramInt64{ - Name: "latency_ms", - Description: "The latency of calls in milliseconds", - Keys: []label.Key{keyMethod, keyRoute}, - Buckets: []int64{0, 10, 50, 100, 500, 1000, 2000}, - } - - metricRecursiveCalls = metric.Scalar{ - Name: "latency_ms", - Description: "The latency of calls in milliseconds", - Keys: []label.Key{keyMethod, keyRoute}, - } -) - -type testExporter struct { - ocagent *ocagent.Exporter - sent fakeSender -} - -func registerExporter() *testExporter { - exporter := &testExporter{} - cfg := ocagent.Config{ - Host: "tester", - Process: 1, - Service: "ocagent-tests", - Client: &http.Client{Transport: &exporter.sent}, - } - cfg.Start, _ = time.Parse(time.RFC3339Nano, "1970-01-01T00:00:00Z") - exporter.ocagent = ocagent.Connect(&cfg) - - metrics := metric.Config{} - metricLatency.Record(&metrics, latencyMs) - metricBytesIn.Record(&metrics, bytesIn) - metricRecursiveCalls.SumInt64(&metrics, recursiveCalls) - - e := exporter.ocagent.ProcessEvent - e = metrics.Exporter(e) - e = spanFixer(e) - e = export.Spans(e) - e = export.Labels(e) - e = timeFixer(e) - event.SetExporter(e) - return exporter -} - -func timeFixer(output event.Exporter) event.Exporter { - start, _ := time.Parse(time.RFC3339Nano, "1970-01-01T00:00:30Z") - at, _ := time.Parse(time.RFC3339Nano, "1970-01-01T00:00:40Z") - end, _ := time.Parse(time.RFC3339Nano, "1970-01-01T00:00:50Z") - return func(ctx context.Context, ev core.Event, lm label.Map) context.Context { - switch { - case event.IsStart(ev): - ev = core.CloneEvent(ev, start) - case event.IsEnd(ev): - ev = core.CloneEvent(ev, end) - default: - ev = core.CloneEvent(ev, at) - } - return output(ctx, ev, lm) - } -} - -func spanFixer(output event.Exporter) event.Exporter { - return func(ctx context.Context, ev core.Event, lm label.Map) context.Context { - if event.IsStart(ev) { - span := export.GetSpan(ctx) - span.ID = export.SpanContext{} - } - return output(ctx, ev, lm) - } -} - -func (e *testExporter) Output(route string) []byte { - e.ocagent.Flush() - return e.sent.get(route) -} - -func checkJSON(t *testing.T, got, want []byte) { - // compare the compact form, to allow for formatting differences - g := &bytes.Buffer{} - if err := json.Compact(g, got); err != nil { - t.Fatal(err) - } - w := &bytes.Buffer{} - if err := json.Compact(w, want); err != nil { - t.Fatal(err) - } - if g.String() != w.String() { - t.Fatalf("Got:\n%s\nWant:\n%s", g, w) - } -} - -type fakeSender struct { - mu sync.Mutex - data map[string][]byte -} - -func (s *fakeSender) get(route string) []byte { - s.mu.Lock() - defer s.mu.Unlock() - data, found := s.data[route] - if found { - delete(s.data, route) - } - return data -} - -func (s *fakeSender) RoundTrip(req *http.Request) (*http.Response, error) { - s.mu.Lock() - defer s.mu.Unlock() - if s.data == nil { - s.data = make(map[string][]byte) - } - data, err := io.ReadAll(req.Body) - if err != nil { - return nil, err - } - path := req.URL.EscapedPath() - if _, found := s.data[path]; found { - return nil, fmt.Errorf("duplicate delivery to %v", path) - } - s.data[path] = data - return &http.Response{ - Status: "200 OK", - StatusCode: 200, - Proto: "HTTP/1.0", - ProtoMajor: 1, - ProtoMinor: 0, - }, nil -} diff --git a/internal/event/export/ocagent/trace_test.go b/internal/event/export/ocagent/trace_test.go deleted file mode 100644 index 99def34d149..00000000000 --- a/internal/event/export/ocagent/trace_test.go +++ /dev/null @@ -1,158 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package ocagent_test - -import ( - "context" - "errors" - "testing" - - "golang.org/x/tools/internal/event" -) - -func TestTrace(t *testing.T) { - exporter := registerExporter() - const prefix = testNodeStr + ` - "spans":[{ - "trace_id":"AAAAAAAAAAAAAAAAAAAAAA==", - "span_id":"AAAAAAAAAAA=", - "parent_span_id":"AAAAAAAAAAA=", - "name":{"value":"event span"}, - "start_time":"1970-01-01T00:00:30Z", - "end_time":"1970-01-01T00:00:50Z", - "time_events":{ -` - const suffix = ` - }, - "same_process_as_parent_span":true - }] -}` - - tests := []struct { - name string - run func(ctx context.Context) - want string - }{ - { - name: "no labels", - run: func(ctx context.Context) { - event.Label(ctx) - }, - want: prefix + ` - "timeEvent":[{"time":"1970-01-01T00:00:40Z"}] - ` + suffix, - }, - { - name: "description no error", - run: func(ctx context.Context) { - event.Log(ctx, "cache miss", keyDB.Of("godb")) - }, - want: prefix + `"timeEvent":[{"time":"1970-01-01T00:00:40Z","annotation":{ -"description": { "value": "cache miss" }, -"attributes": { - "attributeMap": { - "db": { "stringValue": { "value": "godb" } } - } -} -}}]` + suffix, - }, - - { - name: "description and error", - run: func(ctx context.Context) { - event.Error(ctx, "cache miss", - errors.New("no network connectivity"), - keyDB.Of("godb"), - ) - }, - want: prefix + `"timeEvent":[{"time":"1970-01-01T00:00:40Z","annotation":{ -"description": { "value": "cache miss" }, -"attributes": { - "attributeMap": { - "db": { "stringValue": { "value": "godb" } }, - "error": { "stringValue": { "value": "no network connectivity" } } - } -} -}}]` + suffix, - }, - { - name: "no description, but error", - run: func(ctx context.Context) { - event.Error(ctx, "", - errors.New("no network connectivity"), - keyDB.Of("godb"), - ) - }, - want: prefix + `"timeEvent":[{"time":"1970-01-01T00:00:40Z","annotation":{ -"description": { "value": "no network connectivity" }, -"attributes": { - "attributeMap": { - "db": { "stringValue": { "value": "godb" } } - } -} -}}]` + suffix, - }, - { - name: "enumerate all attribute types", - run: func(ctx context.Context) { - event.Log(ctx, "cache miss", - key1DB.Of("godb"), - - key2aAge.Of(0.456), // Constant converted into "float64" - key2bTTL.Of(float32(5000)), - key2cExpiryMS.Of(float64(1e3)), - - key3aRetry.Of(false), - key3bStale.Of(true), - - key4aMax.Of(0x7fff), // Constant converted into "int" - key4bOpcode.Of(int8(0x7e)), - key4cBase.Of(int16(1<<9)), - key4eChecksum.Of(int32(0x11f7e294)), - key4fMode.Of(int64(0644)), - - key5aMin.Of(uint(1)), - key5bMix.Of(uint8(44)), - key5cPort.Of(uint16(55678)), - key5dMinHops.Of(uint32(1<<9)), - key5eMaxHops.Of(uint64(0xffffff)), - ) - }, - want: prefix + `"timeEvent":[{"time":"1970-01-01T00:00:40Z","annotation":{ -"description": { "value": "cache miss" }, -"attributes": { - "attributeMap": { - "1_db": { "stringValue": { "value": "godb" } }, - "2a_age": { "doubleValue": 0.456 }, - "2b_ttl": { "doubleValue": 5000 }, - "2c_expiry_ms": { "doubleValue": 1000 }, - "3a_retry": {}, - "3b_stale": { "boolValue": true }, - "4a_max": { "intValue": 32767 }, - "4b_opcode": { "intValue": 126 }, - "4c_base": { "intValue": 512 }, - "4e_checksum": { "intValue": 301458068 }, - "4f_mode": { "intValue": 420 }, - "5a_min": { "intValue": 1 }, - "5b_mix": { "intValue": 44 }, - "5c_port": { "intValue": 55678 }, - "5d_min_hops": { "intValue": 512 }, - "5e_max_hops": { "intValue": 16777215 } - } -} -}}]` + suffix, - }, - } - ctx := context.TODO() - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - ctx, done := event.Start(ctx, "event span") - tt.run(ctx) - done() - got := exporter.Output("/v1/trace") - checkJSON(t, got, []byte(tt.want)) - }) - } -} diff --git a/internal/event/export/ocagent/wire/common.go b/internal/event/export/ocagent/wire/common.go deleted file mode 100644 index f22b535654c..00000000000 --- a/internal/event/export/ocagent/wire/common.go +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package wire - -// This file holds common ocagent types - -type Node struct { - Identifier *ProcessIdentifier `json:"identifier,omitempty"` - LibraryInfo *LibraryInfo `json:"library_info,omitempty"` - ServiceInfo *ServiceInfo `json:"service_info,omitempty"` - Attributes map[string]string `json:"attributes,omitempty"` -} - -type Resource struct { - Type string `json:"type,omitempty"` - Labels map[string]string `json:"labels,omitempty"` -} - -type TruncatableString struct { - Value string `json:"value,omitempty"` - TruncatedByteCount int32 `json:"truncated_byte_count,omitempty"` -} - -type Attributes struct { - AttributeMap map[string]Attribute `json:"attributeMap,omitempty"` - DroppedAttributesCount int32 `json:"dropped_attributes_count,omitempty"` -} - -type StringAttribute struct { - StringValue *TruncatableString `json:"stringValue,omitempty"` -} - -type IntAttribute struct { - IntValue int64 `json:"intValue,omitempty"` -} - -type BoolAttribute struct { - BoolValue bool `json:"boolValue,omitempty"` -} - -type DoubleAttribute struct { - DoubleValue float64 `json:"doubleValue,omitempty"` -} - -type Attribute interface { - labelAttribute() -} - -func (StringAttribute) labelAttribute() {} -func (IntAttribute) labelAttribute() {} -func (BoolAttribute) labelAttribute() {} -func (DoubleAttribute) labelAttribute() {} - -type StackTrace struct { - StackFrames *StackFrames `json:"stack_frames,omitempty"` - StackTraceHashID uint64 `json:"stack_trace_hash_id,omitempty"` -} - -type StackFrames struct { - Frame []*StackFrame `json:"frame,omitempty"` - DroppedFramesCount int32 `json:"dropped_frames_count,omitempty"` -} - -type StackFrame struct { - FunctionName *TruncatableString `json:"function_name,omitempty"` - OriginalFunctionName *TruncatableString `json:"original_function_name,omitempty"` - FileName *TruncatableString `json:"file_name,omitempty"` - LineNumber int64 `json:"line_number,omitempty"` - ColumnNumber int64 `json:"column_number,omitempty"` - LoadModule *Module `json:"load_module,omitempty"` - SourceVersion *TruncatableString `json:"source_version,omitempty"` -} - -type Module struct { - Module *TruncatableString `json:"module,omitempty"` - BuildID *TruncatableString `json:"build_id,omitempty"` -} - -type ProcessIdentifier struct { - HostName string `json:"host_name,omitempty"` - Pid uint32 `json:"pid,omitempty"` - StartTimestamp Timestamp `json:"start_timestamp,omitempty"` -} - -type LibraryInfo struct { - Language Language `json:"language,omitempty"` - ExporterVersion string `json:"exporter_version,omitempty"` - CoreLibraryVersion string `json:"core_library_version,omitempty"` -} - -type Language int32 - -const ( - LanguageGo Language = 4 -) - -type ServiceInfo struct { - Name string `json:"name,omitempty"` -} diff --git a/internal/event/export/ocagent/wire/core.go b/internal/event/export/ocagent/wire/core.go deleted file mode 100644 index 95c05d66906..00000000000 --- a/internal/event/export/ocagent/wire/core.go +++ /dev/null @@ -1,17 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package wire - -// This file contains type that match core proto types - -type Timestamp = string - -type Int64Value struct { - Value int64 `json:"value,omitempty"` -} - -type DoubleValue struct { - Value float64 `json:"value,omitempty"` -} diff --git a/internal/event/export/ocagent/wire/metrics.go b/internal/event/export/ocagent/wire/metrics.go deleted file mode 100644 index 6cb58943c00..00000000000 --- a/internal/event/export/ocagent/wire/metrics.go +++ /dev/null @@ -1,204 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package wire - -import ( - "encoding/json" - "fmt" -) - -type ExportMetricsServiceRequest struct { - Node *Node `json:"node,omitempty"` - Metrics []*Metric `json:"metrics,omitempty"` - Resource *Resource `json:"resource,omitempty"` -} - -type Metric struct { - MetricDescriptor *MetricDescriptor `json:"metric_descriptor,omitempty"` - Timeseries []*TimeSeries `json:"timeseries,omitempty"` - Resource *Resource `json:"resource,omitempty"` -} - -type MetricDescriptor struct { - Name string `json:"name,omitempty"` - Description string `json:"description,omitempty"` - Unit string `json:"unit,omitempty"` - Type MetricDescriptor_Type `json:"type,omitempty"` - LabelKeys []*LabelKey `json:"label_keys,omitempty"` -} - -type MetricDescriptor_Type int32 - -const ( - MetricDescriptor_UNSPECIFIED MetricDescriptor_Type = 0 - MetricDescriptor_GAUGE_INT64 MetricDescriptor_Type = 1 - MetricDescriptor_GAUGE_DOUBLE MetricDescriptor_Type = 2 - MetricDescriptor_GAUGE_DISTRIBUTION MetricDescriptor_Type = 3 - MetricDescriptor_CUMULATIVE_INT64 MetricDescriptor_Type = 4 - MetricDescriptor_CUMULATIVE_DOUBLE MetricDescriptor_Type = 5 - MetricDescriptor_CUMULATIVE_DISTRIBUTION MetricDescriptor_Type = 6 - MetricDescriptor_SUMMARY MetricDescriptor_Type = 7 -) - -type LabelKey struct { - Key string `json:"key,omitempty"` - Description string `json:"description,omitempty"` -} - -type TimeSeries struct { - StartTimestamp *Timestamp `json:"start_timestamp,omitempty"` - LabelValues []*LabelValue `json:"label_values,omitempty"` - Points []*Point `json:"points,omitempty"` -} - -type LabelValue struct { - Value string `json:"value,omitempty"` - HasValue bool `json:"has_value,omitempty"` -} - -type Point struct { - Timestamp *Timestamp `json:"timestamp,omitempty"` - Value PointValue `json:"value,omitempty"` -} - -type PointInt64Value struct { - Int64Value int64 `json:"int64Value,omitempty"` -} - -// MarshalJSON creates JSON formatted the same way as jsonpb so that the -// OpenCensus service can correctly determine the underlying value type. -// This custom MarshalJSON exists because, -// by default *Point is JSON marshalled as: -// -// {"value": {"int64Value": 1}} -// -// but it should be marshalled as: -// -// {"int64Value": 1} -func (p *Point) MarshalJSON() ([]byte, error) { - if p == nil { - return []byte("null"), nil - } - - switch d := p.Value.(type) { - case PointInt64Value: - return json.Marshal(&struct { - Timestamp *Timestamp `json:"timestamp,omitempty"` - Value int64 `json:"int64Value,omitempty"` - }{ - Timestamp: p.Timestamp, - Value: d.Int64Value, - }) - case PointDoubleValue: - return json.Marshal(&struct { - Timestamp *Timestamp `json:"timestamp,omitempty"` - Value float64 `json:"doubleValue,omitempty"` - }{ - Timestamp: p.Timestamp, - Value: d.DoubleValue, - }) - case PointDistributionValue: - return json.Marshal(&struct { - Timestamp *Timestamp `json:"timestamp,omitempty"` - Value *DistributionValue `json:"distributionValue,omitempty"` - }{ - Timestamp: p.Timestamp, - Value: d.DistributionValue, - }) - default: - return nil, fmt.Errorf("unknown point type %T", p.Value) - } -} - -type PointDoubleValue struct { - DoubleValue float64 `json:"doubleValue,omitempty"` -} - -type PointDistributionValue struct { - DistributionValue *DistributionValue `json:"distributionValue,omitempty"` -} - -type PointSummaryValue struct { - SummaryValue *SummaryValue `json:"summaryValue,omitempty"` -} - -type PointValue interface { - labelPointValue() -} - -func (PointInt64Value) labelPointValue() {} -func (PointDoubleValue) labelPointValue() {} -func (PointDistributionValue) labelPointValue() {} -func (PointSummaryValue) labelPointValue() {} - -type DistributionValue struct { - Count int64 `json:"count,omitempty"` - Sum float64 `json:"sum,omitempty"` - SumOfSquaredDeviation float64 `json:"sum_of_squared_deviation,omitempty"` - BucketOptions BucketOptions `json:"bucket_options,omitempty"` - Buckets []*Bucket `json:"buckets,omitempty"` -} - -type BucketOptionsExplicit struct { - Bounds []float64 `json:"bounds,omitempty"` -} - -type BucketOptions interface { - labelBucketOptions() -} - -func (*BucketOptionsExplicit) labelBucketOptions() {} - -var _ BucketOptions = (*BucketOptionsExplicit)(nil) -var _ json.Marshaler = (*BucketOptionsExplicit)(nil) - -// Declared for the purpose of custom JSON marshaling without cycles. -type bucketOptionsExplicitAlias BucketOptionsExplicit - -// MarshalJSON creates JSON formatted the same way as jsonpb so that the -// OpenCensus service can correctly determine the underlying value type. -// This custom MarshalJSON exists because, -// by default BucketOptionsExplicit is JSON marshalled as: -// -// {"bounds":[1,2,3]} -// -// but it should be marshalled as: -// -// {"explicit":{"bounds":[1,2,3]}} -func (be *BucketOptionsExplicit) MarshalJSON() ([]byte, error) { - return json.Marshal(&struct { - Explicit *bucketOptionsExplicitAlias `json:"explicit,omitempty"` - }{ - Explicit: (*bucketOptionsExplicitAlias)(be), - }) -} - -type Bucket struct { - Count int64 `json:"count,omitempty"` - Exemplar *Exemplar `json:"exemplar,omitempty"` -} - -type Exemplar struct { - Value float64 `json:"value,omitempty"` - Timestamp *Timestamp `json:"timestamp,omitempty"` - Attachments map[string]string `json:"attachments,omitempty"` -} - -type SummaryValue struct { - Count *Int64Value `json:"count,omitempty"` - Sum *DoubleValue `json:"sum,omitempty"` - Snapshot *Snapshot `json:"snapshot,omitempty"` -} - -type Snapshot struct { - Count *Int64Value `json:"count,omitempty"` - Sum *DoubleValue `json:"sum,omitempty"` - PercentileValues []*SnapshotValueAtPercentile `json:"percentile_values,omitempty"` -} - -type SnapshotValueAtPercentile struct { - Percentile float64 `json:"percentile,omitempty"` - Value float64 `json:"value,omitempty"` -} diff --git a/internal/event/export/ocagent/wire/metrics_test.go b/internal/event/export/ocagent/wire/metrics_test.go deleted file mode 100644 index 34247ad6332..00000000000 --- a/internal/event/export/ocagent/wire/metrics_test.go +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package wire - -import ( - "reflect" - "testing" -) - -func TestMarshalJSON(t *testing.T) { - tests := []struct { - name string - pt *Point - want string - }{ - { - "PointInt64", - &Point{ - Value: PointInt64Value{ - Int64Value: 5, - }, - }, - `{"int64Value":5}`, - }, - { - "PointDouble", - &Point{ - Value: PointDoubleValue{ - DoubleValue: 3.14, - }, - }, - `{"doubleValue":3.14}`, - }, - { - "PointDistribution", - &Point{ - Value: PointDistributionValue{ - DistributionValue: &DistributionValue{ - Count: 3, - Sum: 10, - Buckets: []*Bucket{ - { - Count: 1, - }, - { - Count: 2, - }, - }, - BucketOptions: &BucketOptionsExplicit{ - Bounds: []float64{ - 0, 5, - }, - }, - }, - }, - }, - `{"distributionValue":{"count":3,"sum":10,"bucket_options":{"explicit":{"bounds":[0,5]}},"buckets":[{"count":1},{"count":2}]}}`, - }, - { - "nil point", - nil, - `null`, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - buf, err := tt.pt.MarshalJSON() - if err != nil { - t.Fatalf("Got:\n%v\nWant:\n%v", err, nil) - } - got := string(buf) - if !reflect.DeepEqual(got, tt.want) { - t.Fatalf("Got:\n%s\nWant:\n%s", got, tt.want) - } - }) - } -} diff --git a/internal/event/export/ocagent/wire/trace.go b/internal/event/export/ocagent/wire/trace.go deleted file mode 100644 index 88856673a18..00000000000 --- a/internal/event/export/ocagent/wire/trace.go +++ /dev/null @@ -1,112 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package wire - -type ExportTraceServiceRequest struct { - Node *Node `json:"node,omitempty"` - Spans []*Span `json:"spans,omitempty"` - Resource *Resource `json:"resource,omitempty"` -} - -type Span struct { - TraceID []byte `json:"trace_id,omitempty"` - SpanID []byte `json:"span_id,omitempty"` - TraceState *TraceState `json:"tracestate,omitempty"` - ParentSpanID []byte `json:"parent_span_id,omitempty"` - Name *TruncatableString `json:"name,omitempty"` - Kind SpanKind `json:"kind,omitempty"` - StartTime Timestamp `json:"start_time,omitempty"` - EndTime Timestamp `json:"end_time,omitempty"` - Attributes *Attributes `json:"attributes,omitempty"` - StackTrace *StackTrace `json:"stack_trace,omitempty"` - TimeEvents *TimeEvents `json:"time_events,omitempty"` - Links *Links `json:"links,omitempty"` - Status *Status `json:"status,omitempty"` - Resource *Resource `json:"resource,omitempty"` - SameProcessAsParentSpan bool `json:"same_process_as_parent_span,omitempty"` - ChildSpanCount bool `json:"child_span_count,omitempty"` -} - -type TraceState struct { - Entries []*TraceStateEntry `json:"entries,omitempty"` -} - -type TraceStateEntry struct { - Key string `json:"key,omitempty"` - Value string `json:"value,omitempty"` -} - -type SpanKind int32 - -const ( - UnspecifiedSpanKind SpanKind = 0 - ServerSpanKind SpanKind = 1 - ClientSpanKind SpanKind = 2 -) - -type TimeEvents struct { - TimeEvent []TimeEvent `json:"timeEvent,omitempty"` - DroppedAnnotationsCount int32 `json:"dropped_annotations_count,omitempty"` - DroppedMessageEventsCount int32 `json:"dropped_message_events_count,omitempty"` -} - -type TimeEvent struct { - Time Timestamp `json:"time,omitempty"` - MessageEvent *MessageEvent `json:"messageEvent,omitempty"` - Annotation *Annotation `json:"annotation,omitempty"` -} - -type Annotation struct { - Description *TruncatableString `json:"description,omitempty"` - Attributes *Attributes `json:"attributes,omitempty"` -} - -type MessageEvent struct { - Type MessageEventType `json:"type,omitempty"` - ID uint64 `json:"id,omitempty"` - UncompressedSize uint64 `json:"uncompressed_size,omitempty"` - CompressedSize uint64 `json:"compressed_size,omitempty"` -} - -type MessageEventType int32 - -const ( - UnspecifiedMessageEvent MessageEventType = iota - SentMessageEvent - ReceivedMessageEvent -) - -type TimeEventValue interface { - labelTimeEventValue() -} - -func (Annotation) labelTimeEventValue() {} -func (MessageEvent) labelTimeEventValue() {} - -type Links struct { - Link []*Link `json:"link,omitempty"` - DroppedLinksCount int32 `json:"dropped_links_count,omitempty"` -} - -type Link struct { - TraceID []byte `json:"trace_id,omitempty"` - SpanID []byte `json:"span_id,omitempty"` - Type LinkType `json:"type,omitempty"` - Attributes *Attributes `json:"attributes,omitempty"` - TraceState *TraceState `json:"tracestate,omitempty"` -} - -type LinkType int32 - -const ( - UnspecifiedLinkType LinkType = 0 - ChildLinkType LinkType = 1 - ParentLinkType LinkType = 2 -) - -type Status struct { - Code int32 `json:"code,omitempty"` - Message string `json:"message,omitempty"` -} From ff03c59f3ffcb691d1205f8f2b57bcf992652358 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Thu, 27 Feb 2025 16:36:45 -0500 Subject: [PATCH 080/270] gopls/internal/analysis/modernize: append -> bytes.Clone This CL causes appendclipped to offer bytes.Clone in place of slices.Clone where the file already imports bytes but not slices. + test Updates golang/go#70815 Change-Id: I049698c3d5b8acf46abaa42ab34d72548a012a1a Reviewed-on: https://go-review.googlesource.com/c/tools/+/653455 LUCI-TryBot-Result: Go LUCI Commit-Queue: Alan Donovan Reviewed-by: Robert Findley Auto-Submit: Alan Donovan --- gopls/internal/analysis/modernize/slices.go | 33 ++++++++++++++++--- .../testdata/src/appendclipped/bytesclone.go | 11 +++++++ .../src/appendclipped/bytesclone.go.golden | 11 +++++++ 3 files changed, 50 insertions(+), 5 deletions(-) create mode 100644 gopls/internal/analysis/modernize/testdata/src/appendclipped/bytesclone.go create mode 100644 gopls/internal/analysis/modernize/testdata/src/appendclipped/bytesclone.go.golden diff --git a/gopls/internal/analysis/modernize/slices.go b/gopls/internal/analysis/modernize/slices.go index bdab9dea649..9cca3e98156 100644 --- a/gopls/internal/analysis/modernize/slices.go +++ b/gopls/internal/analysis/modernize/slices.go @@ -12,6 +12,7 @@ import ( "go/ast" "go/types" "slices" + "strconv" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" @@ -27,6 +28,10 @@ import ( // with a call to go1.21's slices.Concat(base, a, b, c), or simpler // replacements such as slices.Clone(a) in degenerate cases. // +// We offer bytes.Clone in preference to slices.Clone where +// appropriate, if the package already imports "bytes"; +// their behaviors are identical. +// // The base expression must denote a clipped slice (see [isClipped] // for definition), otherwise the replacement might eliminate intended // side effects to the base slice's array. @@ -41,7 +46,8 @@ import ( // The fix does not always preserve nilness the of base slice when the // addends (a, b, c) are all empty. func appendclipped(pass *analysis.Pass) { - if pass.Pkg.Path() == "slices" { + switch pass.Pkg.Path() { + case "slices", "bytes": return } @@ -94,15 +100,32 @@ func appendclipped(pass *analysis.Pass) { } } - // append(zerocap, s...) -> slices.Clone(s) - _, prefix, importEdits := analysisinternal.AddImport(info, file, "slices", "slices", "Clone", call.Pos()) + // If the slice type is []byte, and the file imports + // "bytes" but not "slices", prefer the (behaviorally + // identical) bytes.Clone for local consistency. + // https://go.dev/issue/70815#issuecomment-2671572984 + fileImports := func(path string) bool { + return slices.ContainsFunc(file.Imports, func(spec *ast.ImportSpec) bool { + value, _ := strconv.Unquote(spec.Path.Value) + return value == path + }) + } + clonepkg := cond( + types.Identical(info.TypeOf(call), byteSliceType) && + !fileImports("slices") && fileImports("bytes"), + "bytes", + "slices") + + // append(zerocap, s...) -> slices.Clone(s) or bytes.Clone(s) + _, prefix, importEdits := analysisinternal.AddImport(info, file, clonepkg, clonepkg, "Clone", call.Pos()) + message := fmt.Sprintf("Replace append with %s.Clone", clonepkg) pass.Report(analysis.Diagnostic{ Pos: call.Pos(), End: call.End(), Category: "slicesclone", - Message: "Replace append with slices.Clone", + Message: message, SuggestedFixes: []analysis.SuggestedFix{{ - Message: "Replace append with slices.Clone", + Message: message, TextEdits: append(importEdits, []analysis.TextEdit{{ Pos: call.Pos(), End: call.End(), diff --git a/gopls/internal/analysis/modernize/testdata/src/appendclipped/bytesclone.go b/gopls/internal/analysis/modernize/testdata/src/appendclipped/bytesclone.go new file mode 100644 index 00000000000..6425211b924 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/appendclipped/bytesclone.go @@ -0,0 +1,11 @@ +package appendclipped + +import ( + "bytes" +) + +var _ bytes.Buffer + +func _(b []byte) { + print(append([]byte{}, b...)) // want "Replace append with bytes.Clone" +} diff --git a/gopls/internal/analysis/modernize/testdata/src/appendclipped/bytesclone.go.golden b/gopls/internal/analysis/modernize/testdata/src/appendclipped/bytesclone.go.golden new file mode 100644 index 00000000000..f49be6156b2 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/appendclipped/bytesclone.go.golden @@ -0,0 +1,11 @@ +package appendclipped + +import ( + "bytes" +) + +var _ bytes.Buffer + +func _(b []byte) { + print(bytes.Clone(b)) // want "Replace append with bytes.Clone" +} From 66eb306a364a3fd7c8ebb427be1425a3fd56262d Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Tue, 25 Feb 2025 17:03:33 -0500 Subject: [PATCH 081/270] Revert "internal/settings: drop "annotations" setting" This reverts commit 5fe60fd (CL 639835), which removed the ability for users to customize the subset of "annotations" (a misnomer for categories of compiler optimization details). Apparently some users were relying on this experimental feature. Minor tweaks were made to comments but not to logic. Fixes golang/go#71888 Change-Id: I3d0227f841582a2cb29521b9b999546226b670ef Reviewed-on: https://go-review.googlesource.com/c/tools/+/652595 LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley Auto-Submit: Alan Donovan --- gopls/doc/settings.md | 24 ++++++++ gopls/internal/doc/api.json | 35 +++++++++++ gopls/internal/golang/compileropt.go | 72 +++++++++++++++------- gopls/internal/settings/default.go | 6 ++ gopls/internal/settings/settings.go | 77 +++++++++++++++++++++++- gopls/internal/settings/settings_test.go | 11 ++++ 6 files changed, 203 insertions(+), 22 deletions(-) diff --git a/gopls/doc/settings.md b/gopls/doc/settings.md index 7aeab79a575..1f4f5746524 100644 --- a/gopls/doc/settings.md +++ b/gopls/doc/settings.md @@ -355,6 +355,30 @@ These analyses are documented on Default: `false`. + +### `annotations map[enum]bool` + +annotations specifies the various kinds of compiler +optimization details that should be reported as diagnostics +when enabled for a package by the "Toggle compiler +optimization details" (`gopls.gc_details`) command. + +(Some users care only about one kind of annotation in their +profiling efforts. More importantly, in large packages, the +number of annotations can sometimes overwhelm the user +interface and exceed the per-file diagnostic limit.) + +TODO(adonovan): rename this field to CompilerOptDetail. + +Each enum must be one of: + +* `"bounds"` controls bounds checking diagnostics. +* `"escape"` controls diagnostics about escape choices. +* `"inline"` controls diagnostics about inlining choices. +* `"nil"` controls nil checks. + +Default: `{"bounds":true,"escape":true,"inline":true,"nil":true}`. + ### `vulncheck enum` diff --git a/gopls/internal/doc/api.json b/gopls/internal/doc/api.json index b6e53d18558..5775d0d4361 100644 --- a/gopls/internal/doc/api.json +++ b/gopls/internal/doc/api.json @@ -689,6 +689,41 @@ "Hierarchy": "ui.diagnostic", "DeprecationMessage": "" }, + { + "Name": "annotations", + "Type": "map[enum]bool", + "Doc": "annotations specifies the various kinds of compiler\noptimization details that should be reported as diagnostics\nwhen enabled for a package by the \"Toggle compiler\noptimization details\" (`gopls.gc_details`) command.\n\n(Some users care only about one kind of annotation in their\nprofiling efforts. More importantly, in large packages, the\nnumber of annotations can sometimes overwhelm the user\ninterface and exceed the per-file diagnostic limit.)\n\nTODO(adonovan): rename this field to CompilerOptDetail.\n", + "EnumKeys": { + "ValueType": "bool", + "Keys": [ + { + "Name": "\"bounds\"", + "Doc": "`\"bounds\"` controls bounds checking diagnostics.\n", + "Default": "true" + }, + { + "Name": "\"escape\"", + "Doc": "`\"escape\"` controls diagnostics about escape choices.\n", + "Default": "true" + }, + { + "Name": "\"inline\"", + "Doc": "`\"inline\"` controls diagnostics about inlining choices.\n", + "Default": "true" + }, + { + "Name": "\"nil\"", + "Doc": "`\"nil\"` controls nil checks.\n", + "Default": "true" + } + ] + }, + "EnumValues": null, + "Default": "{\"bounds\":true,\"escape\":true,\"inline\":true,\"nil\":true}", + "Status": "", + "Hierarchy": "ui.diagnostic", + "DeprecationMessage": "" + }, { "Name": "vulncheck", "Type": "enum", diff --git a/gopls/internal/golang/compileropt.go b/gopls/internal/golang/compileropt.go index f9f046463f6..bcce82c123f 100644 --- a/gopls/internal/golang/compileropt.go +++ b/gopls/internal/golang/compileropt.go @@ -16,6 +16,7 @@ import ( "golang.org/x/tools/gopls/internal/cache" "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/settings" "golang.org/x/tools/internal/event" ) @@ -65,7 +66,7 @@ func CompilerOptDetails(ctx context.Context, snapshot *cache.Snapshot, pkgDir pr reports := make(map[protocol.DocumentURI][]*cache.Diagnostic) var parseError error for _, fn := range files { - uri, diagnostics, err := parseDetailsFile(fn) + uri, diagnostics, err := parseDetailsFile(fn, snapshot.Options()) if err != nil { // expect errors for all the files, save 1 parseError = err @@ -87,7 +88,7 @@ func CompilerOptDetails(ctx context.Context, snapshot *cache.Snapshot, pkgDir pr } // parseDetailsFile parses the file written by the Go compiler which contains a JSON-encoded protocol.Diagnostic. -func parseDetailsFile(filename string) (protocol.DocumentURI, []*cache.Diagnostic, error) { +func parseDetailsFile(filename string, options *settings.Options) (protocol.DocumentURI, []*cache.Diagnostic, error) { buf, err := os.ReadFile(filename) if err != nil { return "", nil, err @@ -118,30 +119,14 @@ func parseDetailsFile(filename string) (protocol.DocumentURI, []*cache.Diagnosti if err := dec.Decode(d); err != nil { return "", nil, err } - if d.Source != "go compiler" { - continue - } d.Tags = []protocol.DiagnosticTag{} // must be an actual slice msg := d.Code.(string) if msg != "" { - // Typical message prefixes gathered by grepping the source of - // cmd/compile for literal arguments in calls to logopt.LogOpt. - // (It is not a well defined set.) - // - // - canInlineFunction - // - cannotInlineCall - // - cannotInlineFunction - // - copy - // - escape - // - escapes - // - isInBounds - // - isSliceInBounds - // - iteration-variable-to-{heap,stack} - // - leak - // - loop-modified-{range,for} - // - nilcheck msg = fmt.Sprintf("%s(%s)", msg, d.Message) } + if !showDiagnostic(msg, d.Source, options) { + continue + } // zeroIndexedRange subtracts 1 from the line and // range, because the compiler output neglects to @@ -186,6 +171,51 @@ func parseDetailsFile(filename string) (protocol.DocumentURI, []*cache.Diagnosti return uri, diagnostics, nil } +// showDiagnostic reports whether a given diagnostic should be shown to the end +// user, given the current options. +func showDiagnostic(msg, source string, o *settings.Options) bool { + if source != "go compiler" { + return false + } + if o.Annotations == nil { + return true + } + + // The strings below were gathered by grepping the source of + // cmd/compile for literal arguments in calls to logopt.LogOpt. + // (It is not a well defined set.) + // + // - canInlineFunction + // - cannotInlineCall + // - cannotInlineFunction + // - escape + // - escapes + // - isInBounds + // - isSliceInBounds + // - leak + // - nilcheck + // + // Additional ones not handled by logic below: + // - copy + // - iteration-variable-to-{heap,stack} + // - loop-modified-{range,for} + + switch { + case strings.HasPrefix(msg, "canInline") || + strings.HasPrefix(msg, "cannotInline") || + strings.HasPrefix(msg, "inlineCall"): + return o.Annotations[settings.Inline] + case strings.HasPrefix(msg, "escape") || msg == "leak": + return o.Annotations[settings.Escape] + case strings.HasPrefix(msg, "nilcheck"): + return o.Annotations[settings.Nil] + case strings.HasPrefix(msg, "isInBounds") || + strings.HasPrefix(msg, "isSliceInBounds"): + return o.Annotations[settings.Bounds] + } + return false +} + func findJSONFiles(dir string) ([]string, error) { ans := []string{} f := func(path string, fi os.FileInfo, _ error) error { diff --git a/gopls/internal/settings/default.go b/gopls/internal/settings/default.go index ebb3f1ccfae..aa81640f3e8 100644 --- a/gopls/internal/settings/default.go +++ b/gopls/internal/settings/default.go @@ -91,6 +91,12 @@ func DefaultOptions(overrides ...func(*Options)) *Options { }, UIOptions: UIOptions{ DiagnosticOptions: DiagnosticOptions{ + Annotations: map[Annotation]bool{ + Bounds: true, + Escape: true, + Inline: true, + Nil: true, + }, Vulncheck: ModeVulncheckOff, DiagnosticsDelay: 1 * time.Second, DiagnosticsTrigger: DiagnosticsOnEdit, diff --git a/gopls/internal/settings/settings.go b/gopls/internal/settings/settings.go index 11b06040181..e98bc365935 100644 --- a/gopls/internal/settings/settings.go +++ b/gopls/internal/settings/settings.go @@ -18,6 +18,23 @@ import ( "golang.org/x/tools/gopls/internal/util/frob" ) +// An Annotation is a category of Go compiler optimization diagnostic. +type Annotation string + +const ( + // Nil controls nil checks. + Nil Annotation = "nil" + + // Escape controls diagnostics about escape choices. + Escape Annotation = "escape" + + // Inline controls diagnostics about inlining choices. + Inline Annotation = "inline" + + // Bounds controls bounds checking diagnostics. + Bounds Annotation = "bounds" +) + // Options holds various configuration that affects Gopls execution, organized // by the nature or origin of the settings. // @@ -436,6 +453,19 @@ type DiagnosticOptions struct { // [Staticcheck's website](https://staticcheck.io/docs/checks/). Staticcheck bool `status:"experimental"` + // Annotations specifies the various kinds of compiler + // optimization details that should be reported as diagnostics + // when enabled for a package by the "Toggle compiler + // optimization details" (`gopls.gc_details`) command. + // + // (Some users care only about one kind of annotation in their + // profiling efforts. More importantly, in large packages, the + // number of annotations can sometimes overwhelm the user + // interface and exceed the per-file diagnostic limit.) + // + // TODO(adonovan): rename this field to CompilerOptDetail. + Annotations map[Annotation]bool + // Vulncheck enables vulnerability scanning. Vulncheck VulncheckMode `status:"experimental"` @@ -1124,7 +1154,7 @@ func (o *Options) setOne(name string, value any) (applied []CounterPath, _ error return setBoolMap(&o.Hints, value) case "annotations": - return nil, &SoftError{"the 'annotations' setting was removed in gopls/v0.18.0; all compiler optimization details are now shown"} + return setAnnotationMap(&o.Annotations, value) case "vulncheck": return setEnum(&o.Vulncheck, value, @@ -1420,6 +1450,51 @@ func setDuration(dest *time.Duration, value any) error { return nil } +func setAnnotationMap(dest *map[Annotation]bool, value any) ([]CounterPath, error) { + all, err := asBoolMap[string](value) + if err != nil { + return nil, err + } + var counters []CounterPath + // Default to everything enabled by default. + m := make(map[Annotation]bool) + for k, enabled := range all { + var a Annotation + cnts, err := setEnum(&a, k, + Nil, + Escape, + Inline, + Bounds) + if err != nil { + // In case of an error, process any legacy values. + switch k { + case "noEscape": + m[Escape] = false + return nil, fmt.Errorf(`"noEscape" is deprecated, set "Escape: false" instead`) + + case "noNilcheck": + m[Nil] = false + return nil, fmt.Errorf(`"noNilcheck" is deprecated, set "Nil: false" instead`) + + case "noInline": + m[Inline] = false + return nil, fmt.Errorf(`"noInline" is deprecated, set "Inline: false" instead`) + + case "noBounds": + m[Bounds] = false + return nil, fmt.Errorf(`"noBounds" is deprecated, set "Bounds: false" instead`) + + default: + return nil, err + } + } + counters = append(counters, cnts...) + m[a] = enabled + } + *dest = m + return counters, nil +} + func setBoolMap[K ~string](dest *map[K]bool, value any) ([]CounterPath, error) { m, err := asBoolMap[K](value) if err != nil { diff --git a/gopls/internal/settings/settings_test.go b/gopls/internal/settings/settings_test.go index bd9ec110874..d7a032e1938 100644 --- a/gopls/internal/settings/settings_test.go +++ b/gopls/internal/settings/settings_test.go @@ -180,6 +180,17 @@ func TestOptions_Set(t *testing.T) { return len(o.DirectoryFilters) == 0 }, }, + { + name: "annotations", + value: map[string]any{ + "Nil": false, + "noBounds": true, + }, + wantError: true, + check: func(o Options) bool { + return !o.Annotations[Nil] && !o.Annotations[Bounds] + }, + }, { name: "vulncheck", value: []any{"invalid"}, From 408d2e2cc08b50104f3e92800ce7b74e7c89daa2 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Fri, 28 Feb 2025 12:14:45 -0500 Subject: [PATCH 082/270] x/tools: remove workarounds for Go <1.23 Change-Id: I740769d6ed117bf140c9894b4464b3d3f7f326f1 Reviewed-on: https://go-review.googlesource.com/c/tools/+/653655 Auto-Submit: Alan Donovan Reviewed-by: Robert Findley LUCI-TryBot-Result: Go LUCI --- cmd/deadcode/deadcode.go | 54 +++--------------------- go/analysis/analysistest/analysistest.go | 16 ++----- go/callgraph/vta/graph.go | 7 ++- go/callgraph/vta/propagation.go | 7 ++- go/callgraph/vta/propagation_test.go | 5 +-- go/callgraph/vta/utils.go | 7 ++- go/callgraph/vta/vta.go | 10 ++--- go/ssa/builder.go | 6 ++- go/ssa/lift.go | 19 +-------- go/types/internal/play/play.go | 9 ---- gopls/internal/cache/parsego/parse.go | 34 +-------------- internal/refactor/inline/inline.go | 35 +-------------- internal/refactor/inline/util.go | 10 ----- internal/typesinternal/element_test.go | 9 ++-- internal/typesinternal/zerovalue_test.go | 23 +++++----- 15 files changed, 48 insertions(+), 203 deletions(-) diff --git a/cmd/deadcode/deadcode.go b/cmd/deadcode/deadcode.go index 0c66d07f79f..e164dc22ba8 100644 --- a/cmd/deadcode/deadcode.go +++ b/cmd/deadcode/deadcode.go @@ -15,11 +15,13 @@ import ( "go/types" "io" "log" + "maps" "os" "path/filepath" "regexp" "runtime" "runtime/pprof" + "slices" "sort" "strings" "text/template" @@ -290,9 +292,7 @@ func main() { // Build array of jsonPackage objects. var packages []any - pkgpaths := keys(byPkgPath) - sort.Strings(pkgpaths) - for _, pkgpath := range pkgpaths { + for _, pkgpath := range slices.Sorted(maps.Keys(byPkgPath)) { if !filter.MatchString(pkgpath) { continue } @@ -303,7 +303,7 @@ func main() { // declaration order. This tends to keep related // methods such as (T).Marshal and (*T).Unmarshal // together better than sorting. - fns := keys(m) + fns := slices.Collect(maps.Keys(m)) sort.Slice(fns, func(i, j int) bool { xposn := prog.Fset.Position(fns[i].Pos()) yposn := prog.Fset.Position(fns[j].Pos()) @@ -368,7 +368,7 @@ func prettyName(fn *ssa.Function, qualified bool) string { // anonymous? if fn.Parent() != nil { format(fn.Parent()) - i := index(fn.Parent().AnonFuncs, fn) + i := slices.Index(fn.Parent().AnonFuncs, fn) fmt.Fprintf(&buf, "$%d", i+1) return } @@ -427,7 +427,7 @@ func pathSearch(roots []*ssa.Function, res *rta.Result, targets map[*ssa.Functio // Sort roots into preferred order. importsTesting := func(fn *ssa.Function) bool { isTesting := func(p *types.Package) bool { return p.Path() == "testing" } - return containsFunc(fn.Pkg.Pkg.Imports(), isTesting) + return slices.ContainsFunc(fn.Pkg.Pkg.Imports(), isTesting) } sort.Slice(roots, func(i, j int) bool { x, y := roots[i], roots[j] @@ -461,7 +461,7 @@ func pathSearch(roots []*ssa.Function, res *rta.Result, targets map[*ssa.Functio for { edge := seen[node] if edge == nil { - reverse(path) + slices.Reverse(path) return path } path = append(path, edge) @@ -565,43 +565,3 @@ type jsonPosition struct { func (p jsonPosition) String() string { return fmt.Sprintf("%s:%d:%d", p.File, p.Line, p.Col) } - -// -- from the future -- - -// TODO(adonovan): use go1.22's slices and maps packages. - -func containsFunc[S ~[]E, E any](s S, f func(E) bool) bool { - return indexFunc(s, f) >= 0 -} - -func indexFunc[S ~[]E, E any](s S, f func(E) bool) int { - for i := range s { - if f(s[i]) { - return i - } - } - return -1 -} - -func index[S ~[]E, E comparable](s S, v E) int { - for i := range s { - if v == s[i] { - return i - } - } - return -1 -} - -func reverse[S ~[]E, E any](s S) { - for i, j := 0, len(s)-1; i < j; i, j = i+1, j-1 { - s[i], s[j] = s[j], s[i] - } -} - -func keys[M ~map[K]V, K comparable, V any](m M) []K { - r := make([]K, 0, len(m)) - for k := range m { - r = append(r, k) - } - return r -} diff --git a/go/analysis/analysistest/analysistest.go b/go/analysis/analysistest/analysistest.go index 0b5cfe70bfe..143b4260346 100644 --- a/go/analysis/analysistest/analysistest.go +++ b/go/analysis/analysistest/analysistest.go @@ -7,12 +7,12 @@ package analysistest import ( "bytes" - "cmp" "fmt" "go/format" "go/token" "go/types" "log" + "maps" "os" "path/filepath" "regexp" @@ -215,7 +215,7 @@ func RunWithSuggestedFixes(t Testing, dir string, a *analysis.Analyzer, patterns // Because the checking is driven by original // filenames, there is no way to express that a fix // (e.g. extract declaration) creates a new file. - for _, filename := range sortedKeys(allFilenames) { + for _, filename := range slices.Sorted(maps.Keys(allFilenames)) { // Read the original file. content, err := os.ReadFile(filename) if err != nil { @@ -266,7 +266,7 @@ func RunWithSuggestedFixes(t Testing, dir string, a *analysis.Analyzer, patterns // Form #2: all suggested fixes are represented by a single file. want := ar.Comment var accumulated []diff.Edit - for _, message := range sortedKeys(fixEdits) { + for _, message := range slices.Sorted(maps.Keys(fixEdits)) { for _, fix := range fixEdits[message] { accumulated = merge(filename, message, accumulated, fix[filename]) } @@ -768,13 +768,3 @@ func sanitize(gopath, filename string) string { prefix := gopath + string(os.PathSeparator) + "src" + string(os.PathSeparator) return filepath.ToSlash(strings.TrimPrefix(filename, prefix)) } - -// TODO(adonovan): use better stuff from go1.23. -func sortedKeys[K cmp.Ordered, V any](m map[K]V) []K { - keys := make([]K, 0, len(m)) - for k := range m { - keys = append(keys, k) - } - slices.Sort(keys) - return keys -} diff --git a/go/callgraph/vta/graph.go b/go/callgraph/vta/graph.go index c13b8a5e6cb..164018708ef 100644 --- a/go/callgraph/vta/graph.go +++ b/go/callgraph/vta/graph.go @@ -633,12 +633,12 @@ func (b *builder) call(c ssa.CallInstruction) { return } - siteCallees(c, b.callees)(func(f *ssa.Function) bool { + for f := range siteCallees(c, b.callees) { addArgumentFlows(b, c, f) site, ok := c.(ssa.Value) if !ok { - return true // go or defer + continue // go or defer } results := f.Signature.Results() @@ -653,8 +653,7 @@ func (b *builder) call(c ssa.CallInstruction) { b.addInFlowEdge(resultVar{f: f, index: i}, local) } } - return true - }) + } } func addArgumentFlows(b *builder, c ssa.CallInstruction, f *ssa.Function) { diff --git a/go/callgraph/vta/propagation.go b/go/callgraph/vta/propagation.go index 6ce1ca9e322..1c4dcd2888e 100644 --- a/go/callgraph/vta/propagation.go +++ b/go/callgraph/vta/propagation.go @@ -6,6 +6,7 @@ package vta import ( "go/types" + "iter" "slices" "golang.org/x/tools/go/callgraph/vta/internal/trie" @@ -113,11 +114,9 @@ type propType struct { // the role of a map from nodes to a set of propTypes. type propTypeMap map[node]*trie.MutMap -// propTypes returns a go1.23 iterator for the propTypes associated with +// propTypes returns an iterator for the propTypes associated with // node `n` in map `ptm`. -func (ptm propTypeMap) propTypes(n node) func(yield func(propType) bool) { - // TODO: when x/tools uses go1.23, change callers to use range-over-func - // (https://go.dev/issue/65237). +func (ptm propTypeMap) propTypes(n node) iter.Seq[propType] { return func(yield func(propType) bool) { if types := ptm[n]; types != nil { types.M.Range(func(_ uint64, elem any) bool { diff --git a/go/callgraph/vta/propagation_test.go b/go/callgraph/vta/propagation_test.go index 3885ef201cb..bc9ca1ecde6 100644 --- a/go/callgraph/vta/propagation_test.go +++ b/go/callgraph/vta/propagation_test.go @@ -98,10 +98,9 @@ func nodeToTypeString(pMap propTypeMap) map[string]string { nodeToTypeStr := make(map[string]string) for node := range pMap { var propStrings []string - pMap.propTypes(node)(func(prop propType) bool { + for prop := range pMap.propTypes(node) { propStrings = append(propStrings, propTypeString(prop)) - return true - }) + } sort.Strings(propStrings) nodeToTypeStr[node.String()] = strings.Join(propStrings, ";") } diff --git a/go/callgraph/vta/utils.go b/go/callgraph/vta/utils.go index bbd8400ec9b..3a708f220a7 100644 --- a/go/callgraph/vta/utils.go +++ b/go/callgraph/vta/utils.go @@ -6,6 +6,7 @@ package vta import ( "go/types" + "iter" "golang.org/x/tools/go/ssa" "golang.org/x/tools/internal/typeparams" @@ -147,10 +148,8 @@ func sliceArrayElem(t types.Type) types.Type { } } -// siteCallees returns a go1.23 iterator for the callees for call site `c`. -func siteCallees(c ssa.CallInstruction, callees calleesFunc) func(yield func(*ssa.Function) bool) { - // TODO: when x/tools uses go1.23, change callers to use range-over-func - // (https://go.dev/issue/65237). +// siteCallees returns an iterator for the callees for call site `c`. +func siteCallees(c ssa.CallInstruction, callees calleesFunc) iter.Seq[*ssa.Function] { return func(yield func(*ssa.Function) bool) { for _, callee := range callees(c) { if !yield(callee) { diff --git a/go/callgraph/vta/vta.go b/go/callgraph/vta/vta.go index 56fce13725f..ed12001fdb2 100644 --- a/go/callgraph/vta/vta.go +++ b/go/callgraph/vta/vta.go @@ -126,12 +126,11 @@ func (c *constructor) resolves(call ssa.CallInstruction) []*ssa.Function { // Cover the case of dynamic higher-order and interface calls. var res []*ssa.Function resolved := resolve(call, c.types, c.cache) - siteCallees(call, c.callees)(func(f *ssa.Function) bool { + for f := range siteCallees(call, c.callees) { if _, ok := resolved[f]; ok { res = append(res, f) } - return true - }) + } return res } @@ -140,12 +139,11 @@ func (c *constructor) resolves(call ssa.CallInstruction) []*ssa.Function { func resolve(c ssa.CallInstruction, types propTypeMap, cache methodCache) map[*ssa.Function]empty { fns := make(map[*ssa.Function]empty) n := local{val: c.Common().Value} - types.propTypes(n)(func(p propType) bool { + for p := range types.propTypes(n) { for _, f := range propFunc(p, c, cache) { fns[f] = empty{} } - return true - }) + } return fns } diff --git a/go/ssa/builder.go b/go/ssa/builder.go index 1761dcc3068..84ccbc0927a 100644 --- a/go/ssa/builder.go +++ b/go/ssa/builder.go @@ -82,6 +82,8 @@ import ( "runtime" "sync" + "slices" + "golang.org/x/tools/internal/typeparams" "golang.org/x/tools/internal/versions" ) @@ -2021,8 +2023,8 @@ func (b *builder) forStmtGo122(fn *Function, s *ast.ForStmt, label *lblock) { // Remove instructions for phi, load, and store. // lift() will remove the unused i_next *Alloc. isDead := func(i Instruction) bool { return dead[i] } - loop.Instrs = removeInstrsIf(loop.Instrs, isDead) - post.Instrs = removeInstrsIf(post.Instrs, isDead) + loop.Instrs = slices.DeleteFunc(loop.Instrs, isDead) + post.Instrs = slices.DeleteFunc(post.Instrs, isDead) } } diff --git a/go/ssa/lift.go b/go/ssa/lift.go index aada3dc3227..6138ca82e0e 100644 --- a/go/ssa/lift.go +++ b/go/ssa/lift.go @@ -43,6 +43,7 @@ import ( "go/token" "math/big" "os" + "slices" "golang.org/x/tools/internal/typeparams" ) @@ -105,23 +106,7 @@ func buildDomFrontier(fn *Function) domFrontier { } func removeInstr(refs []Instruction, instr Instruction) []Instruction { - return removeInstrsIf(refs, func(i Instruction) bool { return i == instr }) -} - -func removeInstrsIf(refs []Instruction, p func(Instruction) bool) []Instruction { - // TODO(taking): replace with go1.22 slices.DeleteFunc. - i := 0 - for _, ref := range refs { - if p(ref) { - continue - } - refs[i] = ref - i++ - } - for j := i; j != len(refs); j++ { - refs[j] = nil // aid GC - } - return refs[:i] + return slices.DeleteFunc(refs, func(i Instruction) bool { return i == instr }) } // lift replaces local and new Allocs accessed only with diff --git a/go/types/internal/play/play.go b/go/types/internal/play/play.go index 8d3b9d19346..f1318ac247a 100644 --- a/go/types/internal/play/play.go +++ b/go/types/internal/play/play.go @@ -430,12 +430,3 @@ textarea { width: 6in; } body { color: gray; } div#out { font-family: monospace; font-size: 80%; } ` - -// TODO(adonovan): use go1.21 built-in. -func min(x, y int) int { - if x < y { - return x - } else { - return y - } -} diff --git a/gopls/internal/cache/parsego/parse.go b/gopls/internal/cache/parsego/parse.go index db6089d8e6d..08a1c395a2a 100644 --- a/gopls/internal/cache/parsego/parse.go +++ b/gopls/internal/cache/parsego/parse.go @@ -27,7 +27,6 @@ import ( "golang.org/x/tools/gopls/internal/label" "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/gopls/internal/util/astutil" - "golang.org/x/tools/gopls/internal/util/bug" "golang.org/x/tools/gopls/internal/util/safetoken" "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/diff" @@ -65,39 +64,8 @@ func Parse(ctx context.Context, fset *token.FileSet, uri protocol.DocumentURI, s } // Inv: file != nil. - // Workaround for #70162 (missing File{Start,End} when - // parsing empty file) with go1.23. - // - // When parsing an empty file, or one without a valid - // package declaration, the go1.23 parser bails out before - // setting FileStart and End. - // - // This leaves us no way to find the original - // token.File that ParseFile created, so as a - // workaround, we recreate the token.File, and - // populate the FileStart and FileEnd fields. - // - // See also #53202. tokenFile := func(file *ast.File) *token.File { - tok := fset.File(file.FileStart) - if tok == nil { - // Invalid File.FileStart (also File.{Package,Name.Pos}). - if file.Package.IsValid() { - bug.Report("ast.File has valid Package but no FileStart") - } - if file.Name.Pos().IsValid() { - bug.Report("ast.File has valid Name.Pos but no FileStart") - } - tok = fset.AddFile(uri.Path(), -1, len(src)) - tok.SetLinesForContent(src) - // If the File contained any valid token.Pos values, - // they would all be invalid wrt the new token.File, - // but we have established that it lacks FileStart, - // Package, and Name.Pos. - file.FileStart = token.Pos(tok.Base()) - file.FileEnd = token.Pos(tok.Base() + tok.Size()) - } - return tok + return fset.File(file.FileStart) } tok := tokenFile(file) diff --git a/internal/refactor/inline/inline.go b/internal/refactor/inline/inline.go index 6f6ed4583a9..2b6f06242e7 100644 --- a/internal/refactor/inline/inline.go +++ b/internal/refactor/inline/inline.go @@ -363,7 +363,7 @@ func (st *state) inline() (*Result, error) { specToDelete := oldImport.spec for _, decl := range f.Decls { if decl, ok := decl.(*ast.GenDecl); ok && decl.Tok == token.IMPORT { - decl.Specs = slicesDeleteFunc(decl.Specs, func(spec ast.Spec) bool { + decl.Specs = slices.DeleteFunc(decl.Specs, func(spec ast.Spec) bool { imp := spec.(*ast.ImportSpec) // Since we re-parsed the file, we can't match by identity; // instead look for syntactic equivalence. @@ -2042,7 +2042,7 @@ func resolveEffects(logf logger, args []*argument, effects []int, sg substGraph) argi := args[i] if sg.has(argi) && !argi.pure { // i is not bound: check whether it must be bound due to hazards. - idx := index(effects, i) + idx := slices.Index(effects, i) if idx >= 0 { for _, j := range effects[:idx] { var ( @@ -3710,34 +3710,3 @@ func soleUse(info *types.Info, obj types.Object) (sole *ast.Ident) { } type unit struct{} // for representing sets as maps - -// slicesDeleteFunc removes any elements from s for which del returns true, -// returning the modified slice. -// slicesDeleteFunc zeroes the elements between the new length and the original length. -// TODO(adonovan): use go1.21 slices.DeleteFunc -func slicesDeleteFunc[S ~[]E, E any](s S, del func(E) bool) S { - i := slicesIndexFunc(s, del) - if i == -1 { - return s - } - // Don't start copying elements until we find one to delete. - for j := i + 1; j < len(s); j++ { - if v := s[j]; !del(v) { - s[i] = v - i++ - } - } - // clear(s[i:]) // zero/nil out the obsolete elements, for GC - return s[:i] -} - -// slicesIndexFunc returns the first index i satisfying f(s[i]), -// or -1 if none do. -func slicesIndexFunc[S ~[]E, E any](s S, f func(E) bool) int { - for i := range s { - if f(s[i]) { - return i - } - } - return -1 -} diff --git a/internal/refactor/inline/util.go b/internal/refactor/inline/util.go index 591dc4265c0..c3f049c73b0 100644 --- a/internal/refactor/inline/util.go +++ b/internal/refactor/inline/util.go @@ -22,16 +22,6 @@ func is[T any](x any) bool { return ok } -// TODO(adonovan): use go1.21's slices.Index. -func index[T comparable](slice []T, x T) int { - for i, elem := range slice { - if elem == x { - return i - } - } - return -1 -} - func btoi(b bool) int { if b { return 1 diff --git a/internal/typesinternal/element_test.go b/internal/typesinternal/element_test.go index b4475633270..95f1ab33478 100644 --- a/internal/typesinternal/element_test.go +++ b/internal/typesinternal/element_test.go @@ -9,6 +9,8 @@ import ( "go/parser" "go/token" "go/types" + "maps" + "slices" "strings" "testing" @@ -142,12 +144,7 @@ func TestForEachElement(t *testing.T) { } } if fail { - for k := range got { - t.Logf("got element: %s", k) - } - // TODO(adonovan): use this when go1.23 is assured: - // t.Logf("got elements:\n%s", - // strings.Join(slices.Sorted(maps.Keys(got)), "\n")) + t.Logf("got elements:\n%s", strings.Join(slices.Sorted(maps.Keys(got)), "\n")) } } } diff --git a/internal/typesinternal/zerovalue_test.go b/internal/typesinternal/zerovalue_test.go index 8ec1012dfda..67295a95020 100644 --- a/internal/typesinternal/zerovalue_test.go +++ b/internal/typesinternal/zerovalue_test.go @@ -68,15 +68,15 @@ type aliasNamed = foo func _[T any]() { type aliasTypeParam = T - // type aliasWithTypeParam[u any] = struct { - // x u - // y T - // } - // type aliasWithTypeParams[u, q any] = struct { - // x u - // y q - // z T - // } + type aliasWithTypeParam[u any] = struct { + x u + y T + } + type aliasWithTypeParams[u, q any] = struct { + x u + y q + z T + } type namedWithTypeParam[u any] struct { x u @@ -135,9 +135,8 @@ func _[T any]() { _ aliasTypeParam // *new(T) _ *aliasTypeParam // nil - // TODO(hxjiang): add test for alias type param after stop supporting go1.22. - // _ aliasWithTypeParam[int] // aliasWithTypeParam[int]{} - // _ aliasWithTypeParams[int, string] // aliasWithTypeParams[int, string]{} + _ aliasWithTypeParam[int] // aliasWithTypeParam[int]{} + _ aliasWithTypeParams[int, string] // aliasWithTypeParams[int, string]{} _ namedWithTypeParam[int] // namedWithTypeParam[int]{} _ namedWithTypeParams[int, string] // namedWithTypeParams[int, string]{} From 608d370dd53cb3898f3ddb6dfa5f0d29eae80d2d Mon Sep 17 00:00:00 2001 From: cuishuang Date: Thu, 27 Feb 2025 12:49:28 +0800 Subject: [PATCH 083/270] internal/imports: use a more straightforward return value Change-Id: Ibd8249da636a854dd1a53c047e3d215ef45c911f Reviewed-on: https://go-review.googlesource.com/c/tools/+/653196 Reviewed-by: Michael Pratt LUCI-TryBot-Result: Go LUCI Reviewed-by: Ian Lance Taylor Auto-Submit: Ian Lance Taylor --- internal/imports/fix.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/imports/fix.go b/internal/imports/fix.go index ee0efe48a55..737a9bfae8f 100644 --- a/internal/imports/fix.go +++ b/internal/imports/fix.go @@ -559,7 +559,7 @@ func fixImportsDefault(fset *token.FileSet, f *ast.File, filename string, env *P return err } apply(fset, f, fixes) - return err + return nil } // getFixes gets the import fixes that need to be made to f in order to fix the imports. From b2aa62b57015c812848d950d884f626839a43fd7 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Thu, 27 Feb 2025 16:03:51 -0500 Subject: [PATCH 084/270] internal/stdlib: provide API for import graph of std library This CL adds two functions for accessing the direct and transitive imports of the packages of the standard library: func Imports(pkgs ...string) iter.Seq[string] func Dependencies(pkgs ...string) iter.Seq[string] These are needed by modernizers so that they can avoid offering fixes that add an import of, say, "slices" while analyzing a package that is itself a dependency of "slices". The compressed graph is generated from the current toolchain; this may not exactly match the source code being analyzed by the application, but we expect drift to be small. Updates golang/go#70815 Change-Id: I2d7180bcff1d1c72ce61b8436a346b8921c02ba9 Reviewed-on: https://go-review.googlesource.com/c/tools/+/653356 Commit-Queue: Alan Donovan Reviewed-by: Robert Findley LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan Reviewed-by: Ian Lance Taylor --- internal/stdlib/deps.go | 359 +++++++++++++++++++++++ internal/stdlib/deps_test.go | 36 +++ internal/stdlib/generate.go | 125 +++++++- internal/stdlib/import.go | 89 ++++++ internal/stdlib/manifest.go | 9 +- internal/stdlib/stdlib.go | 2 +- internal/stdlib/testdata/nethttp.deps | 171 +++++++++++ internal/stdlib/testdata/nethttp.imports | 47 +++ 8 files changed, 834 insertions(+), 4 deletions(-) create mode 100644 internal/stdlib/deps.go create mode 100644 internal/stdlib/deps_test.go create mode 100644 internal/stdlib/import.go create mode 100644 internal/stdlib/testdata/nethttp.deps create mode 100644 internal/stdlib/testdata/nethttp.imports diff --git a/internal/stdlib/deps.go b/internal/stdlib/deps.go new file mode 100644 index 00000000000..7cca431cd65 --- /dev/null +++ b/internal/stdlib/deps.go @@ -0,0 +1,359 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate.go. DO NOT EDIT. + +package stdlib + +type pkginfo struct { + name string + deps string // list of indices of dependencies, as varint-encoded deltas +} + +var deps = [...]pkginfo{ + {"archive/tar", "\x03k\x03E5\x01\v\x01#\x01\x01\x02\x05\t\x02\x01\x02\x02\v"}, + {"archive/zip", "\x02\x04a\a\x16\x0205\x01+\x05\x01\x10\x03\x02\r\x04"}, + {"bufio", "\x03k}E\x13"}, + {"bytes", "n+R\x03\fG\x02\x02"}, + {"cmp", ""}, + {"compress/bzip2", "\x02\x02\xe7\x01B"}, + {"compress/flate", "\x02l\x03z\r\x024\x01\x03"}, + {"compress/gzip", "\x02\x04a\a\x03\x15eT"}, + {"compress/lzw", "\x02l\x03z"}, + {"compress/zlib", "\x02\x04a\a\x03\x13\x01f"}, + {"container/heap", "\xae\x02"}, + {"container/list", ""}, + {"container/ring", ""}, + {"context", "n\\h\x01\f"}, + {"crypto", "\x84\x01gD"}, + {"crypto/aes", "\x10\n\a\x8e\x02"}, + {"crypto/cipher", "\x03\x1e\x01\x01\x1d\x11\x1d,Q"}, + {"crypto/des", "\x10\x13\x1d.,\x95\x01\x03"}, + {"crypto/dsa", "@\x04*}\x0e"}, + {"crypto/ecdh", "\x03\v\f\x0e\x04\x14\x04\r\x1d}"}, + {"crypto/ecdsa", "\x0e\x05\x03\x04\x01\x0e\x16\x01\x04\f\x01\x1d}\x0e\x04K\x01"}, + {"crypto/ed25519", "\x0e\x1c\x16\n\a\x1d}D"}, + {"crypto/elliptic", "0>}\x0e9"}, + {"crypto/fips140", " \x05\x91\x01"}, + {"crypto/hkdf", "-\x12\x01.\x16"}, + {"crypto/hmac", "\x1a\x14\x11\x01\x113"}, + {"crypto/internal/boring", "\x0e\x02\rg"}, + {"crypto/internal/boring/bbig", "\x1a\xdf\x01L"}, + {"crypto/internal/boring/bcache", "\xb3\x02\x12"}, + {"crypto/internal/boring/sig", ""}, + {"crypto/internal/cryptotest", "\x03\r\n)\x0e\x1a\x06\x13\x12#\a\t\x11\x11\x11\x1b\x01\f\f\x05\n"}, + {"crypto/internal/entropy", "E"}, + {"crypto/internal/fips140", ">0}9\f\x15"}, + {"crypto/internal/fips140/aes", "\x03\x1d\x03\x02\x13\x04\x01\x01\x05+\x8c\x015"}, + {"crypto/internal/fips140/aes/gcm", " \x01\x02\x02\x02\x11\x04\x01\x06+\x8a\x01"}, + {"crypto/internal/fips140/alias", "\xc5\x02"}, + {"crypto/internal/fips140/bigmod", "%\x17\x01\x06+\x8c\x01"}, + {"crypto/internal/fips140/check", " \x0e\x06\b\x02\xad\x01Z"}, + {"crypto/internal/fips140/check/checktest", "%\xff\x01!"}, + {"crypto/internal/fips140/drbg", "\x03\x1c\x01\x01\x04\x13\x04\b\x01)}\x0f8"}, + {"crypto/internal/fips140/ecdh", "\x03\x1d\x05\x02\t\f2}\x0f8"}, + {"crypto/internal/fips140/ecdsa", "\x03\x1d\x04\x01\x02\a\x02\x068}G"}, + {"crypto/internal/fips140/ed25519", "\x03\x1d\x05\x02\x04\v8\xc1\x01\x03"}, + {"crypto/internal/fips140/edwards25519", "%\a\f\x042\x8c\x018"}, + {"crypto/internal/fips140/edwards25519/field", "%\x13\x042\x8c\x01"}, + {"crypto/internal/fips140/hkdf", "\x03\x1d\x05\t\x06:"}, + {"crypto/internal/fips140/hmac", "\x03\x1d\x14\x01\x018"}, + {"crypto/internal/fips140/mlkem", "\x03\x1d\x05\x02\x0e\x03\x042"}, + {"crypto/internal/fips140/nistec", "%\f\a\x042\x8c\x01*\x0e\x13"}, + {"crypto/internal/fips140/nistec/fiat", "%\x136\x8c\x01"}, + {"crypto/internal/fips140/pbkdf2", "\x03\x1d\x05\t\x06:"}, + {"crypto/internal/fips140/rsa", "\x03\x1d\x04\x01\x02\r\x01\x01\x026}G"}, + {"crypto/internal/fips140/sha256", "\x03\x1d\x1c\x01\x06+\x8c\x01"}, + {"crypto/internal/fips140/sha3", "\x03\x1d\x18\x04\x011\x8c\x01K"}, + {"crypto/internal/fips140/sha512", "\x03\x1d\x1c\x01\x06+\x8c\x01"}, + {"crypto/internal/fips140/ssh", " \x05"}, + {"crypto/internal/fips140/subtle", "#\x19\xbe\x01"}, + {"crypto/internal/fips140/tls12", "\x03\x1d\x05\t\x06\x028"}, + {"crypto/internal/fips140/tls13", "\x03\x1d\x05\b\a\b2"}, + {"crypto/internal/fips140deps", ""}, + {"crypto/internal/fips140deps/byteorder", "\x9a\x01"}, + {"crypto/internal/fips140deps/cpu", "\xae\x01\a"}, + {"crypto/internal/fips140deps/godebug", "\xb6\x01"}, + {"crypto/internal/fips140hash", "5\x1a5\xc1\x01"}, + {"crypto/internal/fips140only", "'\r\x01\x01N25"}, + {"crypto/internal/fips140test", ""}, + {"crypto/internal/hpke", "\x0e\x01\x01\x03\x1a\x1d$,`M"}, + {"crypto/internal/impl", "\xb0\x02"}, + {"crypto/internal/randutil", "\xeb\x01\x12"}, + {"crypto/internal/sysrand", "\xd7\x01@\x1b\x01\f\x06"}, + {"crypto/internal/sysrand/internal/seccomp", "n"}, + {"crypto/md5", "\x0e2.\x16\x16`"}, + {"crypto/mlkem", "/"}, + {"crypto/pbkdf2", "2\r\x01.\x16"}, + {"crypto/rand", "\x1a\x06\a\x19\x04\x01)}\x0eL"}, + {"crypto/rc4", "#\x1d.\xc1\x01"}, + {"crypto/rsa", "\x0e\f\x01\t\x0f\f\x01\x04\x06\a\x1d\x03\x1325\r\x01"}, + {"crypto/sha1", "\x0e\f&.\x16\x16\x14L"}, + {"crypto/sha256", "\x0e\f\x1aP"}, + {"crypto/sha3", "\x0e'O\xc1\x01"}, + {"crypto/sha512", "\x0e\f\x1cN"}, + {"crypto/subtle", "8\x98\x01T"}, + {"crypto/tls", "\x03\b\x02\x01\x01\x01\x01\x02\x01\x01\x01\x03\x01\a\x01\v\x02\n\x01\b\x05\x03\x01\x01\x01\x01\x02\x01\x02\x01\x18\x02\x03\x13\x16\x14\b5\x16\x16\r\t\x01\x01\x01\x02\x01\f\x06\x02\x01"}, + {"crypto/tls/internal/fips140tls", " \x93\x02"}, + {"crypto/x509", "\x03\v\x01\x01\x01\x01\x01\x01\x01\x011\x03\x02\x01\x01\x02\x05\x01\x0e\x06\x02\x02\x03E5\x03\t\x01\x01\x01\a\x10\x05\t\x05\v\x01\x02\r\x02\x01\x01\x02\x03\x01"}, + {"crypto/x509/internal/macos", "\x03k'\x8f\x01\v\x10\x06"}, + {"crypto/x509/pkix", "d\x06\a\x88\x01F"}, + {"database/sql", "\x03\nK\x16\x03z\f\x06\"\x05\t\x02\x03\x01\f\x02\x02\x02"}, + {"database/sql/driver", "\ra\x03\xae\x01\x10\x10"}, + {"debug/buildinfo", "\x03X\x02\x01\x01\b\a\x03`\x18\x02\x01+\x10\x1e"}, + {"debug/dwarf", "\x03d\a\x03z1\x12\x01\x01"}, + {"debug/elf", "\x03\x06Q\r\a\x03`\x19\x01,\x18\x01\x15"}, + {"debug/gosym", "\x03d\n\xbd\x01\x01\x01\x02"}, + {"debug/macho", "\x03\x06Q\r\n`\x1a,\x18\x01"}, + {"debug/pe", "\x03\x06Q\r\a\x03`\x1a,\x18\x01\x15"}, + {"debug/plan9obj", "g\a\x03`\x1a,"}, + {"embed", "n+:\x18\x01S"}, + {"embed/internal/embedtest", ""}, + {"encoding", ""}, + {"encoding/ascii85", "\xeb\x01D"}, + {"encoding/asn1", "\x03k\x03\x87\x01\x01&\x0e\x02\x01\x0f\x03\x01"}, + {"encoding/base32", "\xeb\x01B\x02"}, + {"encoding/base64", "\x9a\x01QB\x02"}, + {"encoding/binary", "n}\r'\x0e\x05"}, + {"encoding/csv", "\x02\x01k\x03zE\x11\x02"}, + {"encoding/gob", "\x02`\x05\a\x03`\x1a\f\x01\x02\x1d\b\x13\x01\x0e\x02"}, + {"encoding/hex", "n\x03zB\x03"}, + {"encoding/json", "\x03\x01^\x04\b\x03z\r'\x0e\x02\x01\x02\x0f\x01\x01\x02"}, + {"encoding/pem", "\x03c\b}B\x03"}, + {"encoding/xml", "\x02\x01_\f\x03z4\x05\v\x01\x02\x0f\x02"}, + {"errors", "\xca\x01{"}, + {"expvar", "kK9\t\n\x15\r\t\x02\x03\x01\x10"}, + {"flag", "b\f\x03z,\b\x05\t\x02\x01\x0f"}, + {"fmt", "nE8\r\x1f\b\x0e\x02\x03\x11"}, + {"go/ast", "\x03\x01m\x0f\x01j\x03)\b\x0e\x02\x01"}, + {"go/ast/internal/tests", ""}, + {"go/build", "\x02\x01k\x03\x01\x03\x02\a\x02\x01\x17\x1e\x04\x02\t\x14\x12\x01+\x01\x04\x01\a\t\x02\x01\x11\x02\x02"}, + {"go/build/constraint", "n\xc1\x01\x01\x11\x02"}, + {"go/constant", "q\x10w\x01\x015\x01\x02\x11"}, + {"go/doc", "\x04m\x01\x06\t=-1\x11\x02\x01\x11\x02"}, + {"go/doc/comment", "\x03n\xbc\x01\x01\x01\x01\x11\x02"}, + {"go/format", "\x03n\x01\f\x01\x02jE"}, + {"go/importer", "t\a\x01\x01\x04\x01i9"}, + {"go/internal/gccgoimporter", "\x02\x01X\x13\x03\x05\v\x01g\x02,\x01\x05\x12\x01\v\b"}, + {"go/internal/gcimporter", "\x02o\x10\x01/\x05\x0e',\x16\x03\x02"}, + {"go/internal/srcimporter", "q\x01\x02\n\x03\x01i,\x01\x05\x13\x02\x13"}, + {"go/parser", "\x03k\x03\x01\x03\v\x01j\x01+\x06\x13"}, + {"go/printer", "q\x01\x03\x03\tj\r\x1f\x16\x02\x01\x02\n\x05\x02"}, + {"go/scanner", "\x03n\x10j2\x11\x01\x12\x02"}, + {"go/token", "\x04m\xbc\x01\x02\x03\x01\x0e\x02"}, + {"go/types", "\x03\x01\x06d\x03\x01\x04\b\x03\x02\x15\x1e\x06+\x04\x03\n%\a\t\x01\x01\x01\x02\x01\x0e\x02\x02"}, + {"go/version", "\xbb\x01u"}, + {"hash", "\xeb\x01"}, + {"hash/adler32", "n\x16\x16"}, + {"hash/crc32", "n\x16\x16\x14\x84\x01\x01"}, + {"hash/crc64", "n\x16\x16\x98\x01"}, + {"hash/fnv", "n\x16\x16`"}, + {"hash/maphash", "\x95\x01\x05\x1b\x03@M"}, + {"html", "\xb0\x02\x02\x11"}, + {"html/template", "\x03h\x06\x19,5\x01\v \x05\x01\x02\x03\r\x01\x02\v\x01\x03\x02"}, + {"image", "\x02l\x1f^\x0f5\x03\x01"}, + {"image/color", ""}, + {"image/color/palette", "\x8d\x01"}, + {"image/draw", "\x8c\x01\x01\x04"}, + {"image/gif", "\x02\x01\x05f\x03\x1b\x01\x01\x01\vQ"}, + {"image/internal/imageutil", "\x8c\x01"}, + {"image/jpeg", "\x02l\x1e\x01\x04Z"}, + {"image/png", "\x02\a^\n\x13\x02\x06\x01^D"}, + {"index/suffixarray", "\x03d\a}\r*\v\x01"}, + {"internal/abi", "\xb5\x01\x90\x01"}, + {"internal/asan", "\xc5\x02"}, + {"internal/bisect", "\xa4\x02\x0e\x01"}, + {"internal/buildcfg", "qG_\x06\x02\x05\v\x01"}, + {"internal/bytealg", "\xae\x01\x97\x01"}, + {"internal/byteorder", ""}, + {"internal/cfg", ""}, + {"internal/chacha8rand", "\x9a\x01\x1b\x90\x01"}, + {"internal/copyright", ""}, + {"internal/coverage", ""}, + {"internal/coverage/calloc", ""}, + {"internal/coverage/cfile", "k\x06\x17\x16\x01\x02\x01\x01\x01\x01\x01\x01\x01$\x01\x1e,\x06\a\v\x01\x03\f\x06"}, + {"internal/coverage/cformat", "\x04m-\x04I\f6\x01\x02\f"}, + {"internal/coverage/cmerge", "q-Z"}, + {"internal/coverage/decodecounter", "g\n-\v\x02@,\x18\x16"}, + {"internal/coverage/decodemeta", "\x02e\n\x17\x16\v\x02@,"}, + {"internal/coverage/encodecounter", "\x02e\n-\f\x01\x02>\f \x16"}, + {"internal/coverage/encodemeta", "\x02\x01d\n\x13\x04\x16\r\x02>,."}, + {"internal/coverage/pods", "\x04m-y\x06\x05\v\x02\x01"}, + {"internal/coverage/rtcov", "\xc5\x02"}, + {"internal/coverage/slicereader", "g\nzZ"}, + {"internal/coverage/slicewriter", "qz"}, + {"internal/coverage/stringtab", "q8\x04>"}, + {"internal/coverage/test", ""}, + {"internal/coverage/uleb128", ""}, + {"internal/cpu", "\xc5\x02"}, + {"internal/dag", "\x04m\xbc\x01\x03"}, + {"internal/diff", "\x03n\xbd\x01\x02"}, + {"internal/exportdata", "\x02\x01k\x03\x03]\x1a,\x01\x05\x12\x01\x02"}, + {"internal/filepathlite", "n+:\x19A"}, + {"internal/fmtsort", "\x04\x9b\x02\x0e"}, + {"internal/fuzz", "\x03\nA\x19\x04\x03\x03\x01\f\x0355\r\x02\x1d\x01\x05\x02\x05\v\x01\x02\x01\x01\v\x04\x02"}, + {"internal/goarch", ""}, + {"internal/godebug", "\x97\x01 {\x01\x12"}, + {"internal/godebugs", ""}, + {"internal/goexperiment", ""}, + {"internal/goos", ""}, + {"internal/goroot", "\x97\x02\x01\x05\x13\x02"}, + {"internal/gover", "\x04"}, + {"internal/goversion", ""}, + {"internal/itoa", ""}, + {"internal/lazyregexp", "\x97\x02\v\x0e\x02"}, + {"internal/lazytemplate", "\xeb\x01,\x19\x02\v"}, + {"internal/msan", "\xc5\x02"}, + {"internal/nettrace", ""}, + {"internal/obscuretestdata", "f\x85\x01,"}, + {"internal/oserror", "n"}, + {"internal/pkgbits", "\x03K\x19\a\x03\x05\vj\x0e\x1e\r\v\x01"}, + {"internal/platform", ""}, + {"internal/poll", "nO\x1a\x149\x0e\x01\x01\v\x06"}, + {"internal/profile", "\x03\x04g\x03z7\f\x01\x01\x0f"}, + {"internal/profilerecord", ""}, + {"internal/race", "\x95\x01\xb0\x01"}, + {"internal/reflectlite", "\x95\x01 3\x01P\x0e\x13\x12"}, + {"unsafe", ""}, + {"vendor/golang.org/x/crypto/chacha20", "\x10W\a\x8c\x01*&"}, + {"vendor/golang.org/x/crypto/chacha20poly1305", "\x10W\a\xd8\x01\x04\x01"}, + {"vendor/golang.org/x/crypto/cryptobyte", "d\n\x03\x88\x01& \n"}, + {"vendor/golang.org/x/crypto/cryptobyte/asn1", ""}, + {"vendor/golang.org/x/crypto/internal/alias", "\xc5\x02"}, + {"vendor/golang.org/x/crypto/internal/poly1305", "Q\x16\x93\x01"}, + {"vendor/golang.org/x/net/dns/dnsmessage", "n"}, + {"vendor/golang.org/x/net/http/httpguts", "\x81\x02\x14\x1b\x13\r"}, + {"vendor/golang.org/x/net/http/httpproxy", "n\x03\x90\x01\x15\x01\x19\x13\r"}, + {"vendor/golang.org/x/net/http2/hpack", "\x03k\x03zG"}, + {"vendor/golang.org/x/net/idna", "q\x87\x018\x13\x10\x02\x01"}, + {"vendor/golang.org/x/net/nettest", "\x03d\a\x03z\x11\x05\x16\x01\f\v\x01\x02\x02\x01\n"}, + {"vendor/golang.org/x/sys/cpu", "\x97\x02\r\v\x01\x15"}, + {"vendor/golang.org/x/text/secure/bidirule", "n\xd5\x01\x11\x01"}, + {"vendor/golang.org/x/text/transform", "\x03k}X"}, + {"vendor/golang.org/x/text/unicode/bidi", "\x03\bf~?\x15"}, + {"vendor/golang.org/x/text/unicode/norm", "g\nzG\x11\x11"}, + {"weak", "\x95\x01\x8f\x01!"}, +} diff --git a/internal/stdlib/deps_test.go b/internal/stdlib/deps_test.go new file mode 100644 index 00000000000..41d2d126ec5 --- /dev/null +++ b/internal/stdlib/deps_test.go @@ -0,0 +1,36 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package stdlib_test + +import ( + "iter" + "os" + "slices" + "sort" + "strings" + "testing" + + "github.com/google/go-cmp/cmp" + "golang.org/x/tools/internal/stdlib" +) + +func TestImports(t *testing.T) { testDepsFunc(t, "testdata/nethttp.imports", stdlib.Imports) } +func TestDeps(t *testing.T) { testDepsFunc(t, "testdata/nethttp.deps", stdlib.Dependencies) } + +// testDepsFunc checks that the specified dependency function applied +// to net/http returns the set of dependencies in the named file. +func testDepsFunc(t *testing.T, filename string, depsFunc func(pkgs ...string) iter.Seq[string]) { + data, err := os.ReadFile(filename) + if err != nil { + t.Fatal(err) + } + want := strings.Split(strings.TrimSpace(string(data)), "\n") + got := slices.Collect(depsFunc("net/http")) + sort.Strings(want) + sort.Strings(got) + if diff := cmp.Diff(got, want); diff != "" { + t.Fatalf("Deps mismatch (-want +got):\n%s", diff) + } +} diff --git a/internal/stdlib/generate.go b/internal/stdlib/generate.go index 1192885405c..4c67d8bd797 100644 --- a/internal/stdlib/generate.go +++ b/internal/stdlib/generate.go @@ -7,11 +7,18 @@ // The generate command reads all the GOROOT/api/go1.*.txt files and // generates a single combined manifest.go file containing the Go // standard library API symbols along with versions. +// +// It also runs "go list -deps std" and records the import graph. This +// information may be used, for example, to ensure that tools don't +// suggest fixes that import package P when analyzing one of P's +// dependencies. package main import ( "bytes" "cmp" + "encoding/binary" + "encoding/json" "errors" "fmt" "go/format" @@ -19,6 +26,7 @@ import ( "io/fs" "log" "os" + "os/exec" "path/filepath" "regexp" "runtime" @@ -29,6 +37,13 @@ import ( ) func main() { + manifest() + deps() +} + +// -- generate std manifest -- + +func manifest() { pkgs := make(map[string]map[string]symInfo) // package -> symbol -> info symRE := regexp.MustCompile(`^pkg (\S+).*?, (var|func|type|const|method \([^)]*\)) ([\pL\p{Nd}_]+)(.*)`) @@ -131,7 +146,7 @@ func main() { // Write the combined manifest. var buf bytes.Buffer - buf.WriteString(`// Copyright 2024 The Go Authors. All rights reserved. + buf.WriteString(`// Copyright 2025 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. @@ -157,7 +172,7 @@ var PackageSymbols = map[string][]Symbol{ if err != nil { log.Fatal(err) } - if err := os.WriteFile("manifest.go", fmtbuf, 0666); err != nil { + if err := os.WriteFile("manifest.go", fmtbuf, 0o666); err != nil { log.Fatal(err) } } @@ -223,3 +238,109 @@ func removeTypeParam(s string) string { } return s } + +// -- generate dependency graph -- + +func deps() { + stdout := new(bytes.Buffer) + cmd := exec.Command("go", "list", "-deps", "-json", "std") + cmd.Stdout = stdout + cmd.Stderr = os.Stderr + if err := cmd.Run(); err != nil { + log.Fatal(err) + } + + type Package struct { + // go list JSON output + ImportPath string // import path of package in dir + Imports []string // import paths used by this package + + // encoding + index int + deps []int // indices of direct imports, sorted + } + pkgs := make(map[string]*Package) + var keys []string + for dec := json.NewDecoder(stdout); dec.More(); { + var pkg Package + if err := dec.Decode(&pkg); err != nil { + log.Fatal(err) + } + pkgs[pkg.ImportPath] = &pkg + keys = append(keys, pkg.ImportPath) + } + + // Sort and number the packages. + // There are 344 as of Mar 2025. + slices.Sort(keys) + for i, name := range keys { + pkgs[name].index = i + } + + // Encode the dependencies. + for _, pkg := range pkgs { + for _, imp := range pkg.Imports { + if imp == "C" { + continue + } + pkg.deps = append(pkg.deps, pkgs[imp].index) + } + slices.Sort(pkg.deps) + } + + // Emit the table. + var buf bytes.Buffer + buf.WriteString(`// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate.go. DO NOT EDIT. + +package stdlib + +type pkginfo struct { + name string + deps string // list of indices of dependencies, as varint-encoded deltas +} +var deps = [...]pkginfo{ +`) + for _, name := range keys { + prev := 0 + var deps []int + for _, v := range pkgs[name].deps { + deps = append(deps, v-prev) // delta + prev = v + } + var data []byte + for _, v := range deps { + data = binary.AppendUvarint(data, uint64(v)) + } + fmt.Fprintf(&buf, "\t{%q, %q},\n", name, data) + } + fmt.Fprintln(&buf, "}") + + fmtbuf, err := format.Source(buf.Bytes()) + if err != nil { + log.Fatal(err) + } + if err := os.WriteFile("deps.go", fmtbuf, 0o666); err != nil { + log.Fatal(err) + } + + // Also generate the data for the test. + for _, t := range [...]struct{ flag, filename string }{ + {"-deps=true", "testdata/nethttp.deps"}, + {`-f={{join .Imports "\n"}}`, "testdata/nethttp.imports"}, + } { + stdout := new(bytes.Buffer) + cmd := exec.Command("go", "list", t.flag, "net/http") + cmd.Stdout = stdout + cmd.Stderr = os.Stderr + if err := cmd.Run(); err != nil { + log.Fatal(err) + } + if err := os.WriteFile(t.filename, stdout.Bytes(), 0666); err != nil { + log.Fatal(err) + } + } +} diff --git a/internal/stdlib/import.go b/internal/stdlib/import.go new file mode 100644 index 00000000000..f6909878a8a --- /dev/null +++ b/internal/stdlib/import.go @@ -0,0 +1,89 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package stdlib + +// This file provides the API for the import graph of the standard library. +// +// Be aware that the compiler-generated code for every package +// implicitly depends on package "runtime" and a handful of others +// (see runtimePkgs in GOROOT/src/cmd/internal/objabi/pkgspecial.go). + +import ( + "encoding/binary" + "iter" + "slices" + "strings" +) + +// Imports returns the sequence of packages directly imported by the +// named standard packages, in name order. +// The imports of an unknown package are the empty set. +// +// The graph is built into the application and may differ from the +// graph in the Go source tree being analyzed by the application. +func Imports(pkgs ...string) iter.Seq[string] { + return func(yield func(string) bool) { + for _, pkg := range pkgs { + if i, ok := find(pkg); ok { + var depIndex uint64 + for data := []byte(deps[i].deps); len(data) > 0; { + delta, n := binary.Uvarint(data) + depIndex += delta + if !yield(deps[depIndex].name) { + return + } + data = data[n:] + } + } + } + } +} + +// Dependencies returns the set of all dependencies of the named +// standard packages, including the initial package, +// in a deterministic topological order. +// The dependencies of an unknown package are the empty set. +// +// The graph is built into the application and may differ from the +// graph in the Go source tree being analyzed by the application. +func Dependencies(pkgs ...string) iter.Seq[string] { + return func(yield func(string) bool) { + for _, pkg := range pkgs { + if i, ok := find(pkg); ok { + var seen [1 + len(deps)/8]byte // bit set of seen packages + var visit func(i int) bool + visit = func(i int) bool { + bit := byte(1) << (i % 8) + if seen[i/8]&bit == 0 { + seen[i/8] |= bit + var depIndex uint64 + for data := []byte(deps[i].deps); len(data) > 0; { + delta, n := binary.Uvarint(data) + depIndex += delta + if !visit(int(depIndex)) { + return false + } + data = data[n:] + } + if !yield(deps[i].name) { + return false + } + } + return true + } + if !visit(i) { + return + } + } + } + } +} + +// find returns the index of pkg in the deps table. +func find(pkg string) (int, bool) { + return slices.BinarySearchFunc(deps[:], pkg, func(p pkginfo, n string) int { + return strings.Compare(p.name, n) + }) +} diff --git a/internal/stdlib/manifest.go b/internal/stdlib/manifest.go index e7d0aee2186..00776a31b60 100644 --- a/internal/stdlib/manifest.go +++ b/internal/stdlib/manifest.go @@ -1,4 +1,4 @@ -// Copyright 2024 The Go Authors. All rights reserved. +// Copyright 2025 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. @@ -7119,6 +7119,7 @@ var PackageSymbols = map[string][]Symbol{ {"FormatFileInfo", Func, 21}, {"Glob", Func, 16}, {"GlobFS", Type, 16}, + {"Lstat", Func, 25}, {"ModeAppend", Const, 16}, {"ModeCharDevice", Const, 16}, {"ModeDevice", Const, 16}, @@ -7143,6 +7144,8 @@ var PackageSymbols = map[string][]Symbol{ {"ReadDirFile", Type, 16}, {"ReadFile", Func, 16}, {"ReadFileFS", Type, 16}, + {"ReadLink", Func, 25}, + {"ReadLinkFS", Type, 25}, {"SkipAll", Var, 20}, {"SkipDir", Var, 16}, {"Stat", Func, 16}, @@ -9146,6 +9149,8 @@ var PackageSymbols = map[string][]Symbol{ {"(*ProcessState).SysUsage", Method, 0}, {"(*ProcessState).SystemTime", Method, 0}, {"(*ProcessState).UserTime", Method, 0}, + {"(*Root).Chmod", Method, 25}, + {"(*Root).Chown", Method, 25}, {"(*Root).Close", Method, 24}, {"(*Root).Create", Method, 24}, {"(*Root).FS", Method, 24}, @@ -16754,9 +16759,11 @@ var PackageSymbols = map[string][]Symbol{ }, "testing/fstest": { {"(MapFS).Glob", Method, 16}, + {"(MapFS).Lstat", Method, 25}, {"(MapFS).Open", Method, 16}, {"(MapFS).ReadDir", Method, 16}, {"(MapFS).ReadFile", Method, 16}, + {"(MapFS).ReadLink", Method, 25}, {"(MapFS).Stat", Method, 16}, {"(MapFS).Sub", Method, 16}, {"MapFS", Type, 16}, diff --git a/internal/stdlib/stdlib.go b/internal/stdlib/stdlib.go index 98904017f2c..3d96d3bf686 100644 --- a/internal/stdlib/stdlib.go +++ b/internal/stdlib/stdlib.go @@ -6,7 +6,7 @@ // Package stdlib provides a table of all exported symbols in the // standard library, along with the version at which they first -// appeared. +// appeared. It also provides the import graph of std packages. package stdlib import ( diff --git a/internal/stdlib/testdata/nethttp.deps b/internal/stdlib/testdata/nethttp.deps new file mode 100644 index 00000000000..e1235e84932 --- /dev/null +++ b/internal/stdlib/testdata/nethttp.deps @@ -0,0 +1,171 @@ +internal/goarch +unsafe +internal/abi +internal/unsafeheader +internal/cpu +internal/bytealg +internal/byteorder +internal/chacha8rand +internal/coverage/rtcov +internal/godebugs +internal/goexperiment +internal/goos +internal/profilerecord +internal/runtime/atomic +internal/runtime/exithook +internal/asan +internal/msan +internal/race +internal/runtime/math +internal/runtime/sys +internal/runtime/maps +internal/stringslite +internal/trace/tracev2 +runtime +internal/reflectlite +errors +sync/atomic +internal/sync +sync +io +iter +math/bits +unicode +unicode/utf8 +bytes +strings +bufio +cmp +internal/itoa +math +strconv +reflect +slices +internal/fmtsort +internal/oserror +path +internal/bisect +internal/godebug +syscall +time +io/fs +internal/filepathlite +internal/syscall/unix +internal/poll +internal/syscall/execenv +internal/testlog +os +fmt +sort +compress/flate +encoding/binary +hash +hash/crc32 +compress/gzip +container/list +context +crypto +crypto/internal/fips140deps/godebug +crypto/internal/fips140 +crypto/internal/fips140/alias +crypto/internal/fips140deps/byteorder +crypto/internal/fips140deps/cpu +crypto/internal/impl +crypto/internal/fips140/sha256 +crypto/internal/fips140/subtle +crypto/internal/fips140/sha3 +crypto/internal/fips140/sha512 +crypto/internal/fips140/hmac +crypto/internal/fips140/check +crypto/internal/fips140/aes +crypto/internal/sysrand +crypto/internal/entropy +math/rand/v2 +crypto/internal/randutil +crypto/internal/fips140/drbg +crypto/internal/fips140/aes/gcm +crypto/internal/fips140only +crypto/subtle +crypto/cipher +crypto/internal/boring/sig +crypto/internal/boring +math/rand +math/big +crypto/rand +crypto/aes +crypto/des +crypto/internal/fips140/nistec/fiat +crypto/internal/fips140/nistec +crypto/internal/fips140/ecdh +crypto/internal/fips140/edwards25519/field +crypto/ecdh +crypto/elliptic +crypto/internal/boring/bbig +crypto/internal/fips140/bigmod +crypto/internal/fips140/ecdsa +crypto/sha3 +crypto/internal/fips140hash +crypto/sha512 +unicode/utf16 +encoding/asn1 +vendor/golang.org/x/crypto/cryptobyte/asn1 +vendor/golang.org/x/crypto/cryptobyte +crypto/ecdsa +crypto/internal/fips140/edwards25519 +crypto/internal/fips140/ed25519 +crypto/ed25519 +crypto/hmac +crypto/internal/fips140/hkdf +crypto/internal/fips140/mlkem +crypto/internal/fips140/tls12 +crypto/internal/fips140/tls13 +vendor/golang.org/x/crypto/internal/alias +vendor/golang.org/x/crypto/chacha20 +vendor/golang.org/x/crypto/internal/poly1305 +vendor/golang.org/x/crypto/chacha20poly1305 +crypto/internal/hpke +crypto/md5 +crypto/rc4 +crypto/internal/fips140/rsa +crypto/rsa +crypto/sha1 +crypto/sha256 +crypto/tls/internal/fips140tls +crypto/dsa +crypto/x509/internal/macos +encoding/hex +crypto/x509/pkix +encoding/base64 +encoding/pem +maps +vendor/golang.org/x/net/dns/dnsmessage +internal/nettrace +weak +unique +net/netip +internal/routebsd +internal/singleflight +net +net/url +crypto/x509 +crypto/tls +vendor/golang.org/x/text/transform +log/internal +log +vendor/golang.org/x/text/unicode/bidi +vendor/golang.org/x/text/secure/bidirule +vendor/golang.org/x/text/unicode/norm +vendor/golang.org/x/net/idna +net/textproto +vendor/golang.org/x/net/http/httpguts +vendor/golang.org/x/net/http/httpproxy +vendor/golang.org/x/net/http2/hpack +mime +mime/quotedprintable +path/filepath +mime/multipart +net/http/httptrace +net/http/internal +net/http/internal/ascii +net/http/internal/httpcommon +net/http diff --git a/internal/stdlib/testdata/nethttp.imports b/internal/stdlib/testdata/nethttp.imports new file mode 100644 index 00000000000..77e78696bdd --- /dev/null +++ b/internal/stdlib/testdata/nethttp.imports @@ -0,0 +1,47 @@ +bufio +bytes +compress/gzip +container/list +context +crypto/rand +crypto/tls +encoding/base64 +encoding/binary +errors +fmt +vendor/golang.org/x/net/http/httpguts +vendor/golang.org/x/net/http/httpproxy +vendor/golang.org/x/net/http2/hpack +vendor/golang.org/x/net/idna +internal/godebug +io +io/fs +log +maps +math +math/bits +math/rand +mime +mime/multipart +net +net/http/httptrace +net/http/internal +net/http/internal/ascii +net/http/internal/httpcommon +net/textproto +net/url +os +path +path/filepath +reflect +runtime +slices +sort +strconv +strings +sync +sync/atomic +time +unicode +unicode/utf8 +unsafe From 5f02a3e879c4c45e42f3a630f971a0f6a13110e5 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Fri, 28 Feb 2025 10:31:17 -0500 Subject: [PATCH 085/270] gopls/internal/analysis/modernize: don't import slices within slices This CL strengthens the check that the various modernizer passes use to skip packages in which the fixes cannot be applied. Before, we would not add an import of "slices" from withing the "slices" package itself, but we cannot add this import from any package that "slices" itself transitively depends upon, as this would create an import cycle. So, we consult the std dependency graph baked into the executable. This feature was tested interactively by running modernize on std. Updates golang/go#71847 Change-Id: Iaec6ef07b58ca07df498db63369dae8087331ab9 Reviewed-on: https://go-review.googlesource.com/c/tools/+/653595 Reviewed-by: Robert Findley Auto-Submit: Alan Donovan Reviewed-by: Jonathan Amsterdam LUCI-TryBot-Result: Go LUCI --- gopls/internal/analysis/modernize/maps.go | 4 +++- .../internal/analysis/modernize/modernize.go | 22 +++++++++++++++++++ gopls/internal/analysis/modernize/slices.go | 5 +++-- .../analysis/modernize/slicescontains.go | 5 +++-- .../analysis/modernize/slicesdelete.go | 6 +++++ .../internal/analysis/modernize/sortslice.go | 4 +++- gopls/internal/util/moreiters/iters.go | 10 +++++++++ 7 files changed, 50 insertions(+), 6 deletions(-) diff --git a/gopls/internal/analysis/modernize/maps.go b/gopls/internal/analysis/modernize/maps.go index dad329477cd..5577978278c 100644 --- a/gopls/internal/analysis/modernize/maps.go +++ b/gopls/internal/analysis/modernize/maps.go @@ -41,7 +41,9 @@ import ( // m = make(M) // m = M{} func mapsloop(pass *analysis.Pass) { - if pass.Pkg.Path() == "maps " { + // Skip the analyzer in packages where its + // fixes would create an import cycle. + if within(pass, "maps", "bytes", "runtime") { return } diff --git a/gopls/internal/analysis/modernize/modernize.go b/gopls/internal/analysis/modernize/modernize.go index 0f7b58eed37..354836d6b40 100644 --- a/gopls/internal/analysis/modernize/modernize.go +++ b/gopls/internal/analysis/modernize/modernize.go @@ -18,8 +18,10 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/gopls/internal/util/astutil" + "golang.org/x/tools/gopls/internal/util/moreiters" "golang.org/x/tools/internal/analysisinternal" "golang.org/x/tools/internal/astutil/cursor" + "golang.org/x/tools/internal/stdlib" "golang.org/x/tools/internal/versions" ) @@ -125,6 +127,26 @@ func filesUsing(inspect *inspector.Inspector, info *types.Info, version string) } } +// within reports whether the current pass is analyzing one of the +// specified standard packages or their dependencies. +func within(pass *analysis.Pass, pkgs ...string) bool { + path := pass.Pkg.Path() + return standard(path) && + moreiters.Contains(stdlib.Dependencies(pkgs...), path) +} + +// standard reports whether the specified package path belongs to a +// package in the standard library (including internal dependencies). +func standard(path string) bool { + // A standard package has no dot in its first segment. + // (It may yet have a dot, e.g. "vendor/golang.org/x/foo".) + slash := strings.IndexByte(path, '/') + if slash < 0 { + slash = len(path) + } + return !strings.Contains(path[:slash], ".") && path != "testdata" +} + var ( builtinAny = types.Universe.Lookup("any") builtinAppend = types.Universe.Lookup("append") diff --git a/gopls/internal/analysis/modernize/slices.go b/gopls/internal/analysis/modernize/slices.go index 9cca3e98156..7e0d9cbd92e 100644 --- a/gopls/internal/analysis/modernize/slices.go +++ b/gopls/internal/analysis/modernize/slices.go @@ -46,8 +46,9 @@ import ( // The fix does not always preserve nilness the of base slice when the // addends (a, b, c) are all empty. func appendclipped(pass *analysis.Pass) { - switch pass.Pkg.Path() { - case "slices", "bytes": + // Skip the analyzer in packages where its + // fixes would create an import cycle. + if within(pass, "slices", "bytes", "runtime") { return } diff --git a/gopls/internal/analysis/modernize/slicescontains.go b/gopls/internal/analysis/modernize/slicescontains.go index 09642448bb5..b59ea452a0f 100644 --- a/gopls/internal/analysis/modernize/slicescontains.go +++ b/gopls/internal/analysis/modernize/slicescontains.go @@ -47,8 +47,9 @@ import ( // (Mostly this appears to be a desirable optimization, avoiding // redundantly repeated evaluation.) func slicescontains(pass *analysis.Pass) { - // Don't modify the slices package itself. - if pass.Pkg.Path() == "slices" { + // Skip the analyzer in packages where its + // fixes would create an import cycle. + if within(pass, "slices", "runtime") { return } diff --git a/gopls/internal/analysis/modernize/slicesdelete.go b/gopls/internal/analysis/modernize/slicesdelete.go index 24b2182ca6a..3c3d880f62b 100644 --- a/gopls/internal/analysis/modernize/slicesdelete.go +++ b/gopls/internal/analysis/modernize/slicesdelete.go @@ -21,6 +21,12 @@ import ( // Other variations that will also have suggested replacements include: // append(s[:i-1], s[i:]...) and append(s[:i+k1], s[i+k2:]) where k2 > k1. func slicesdelete(pass *analysis.Pass) { + // Skip the analyzer in packages where its + // fixes would create an import cycle. + if within(pass, "slices", "runtime") { + return + } + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) info := pass.TypesInfo report := func(file *ast.File, call *ast.CallExpr, slice1, slice2 *ast.SliceExpr) { diff --git a/gopls/internal/analysis/modernize/sortslice.go b/gopls/internal/analysis/modernize/sortslice.go index a033be7f635..0437aaf2f67 100644 --- a/gopls/internal/analysis/modernize/sortslice.go +++ b/gopls/internal/analysis/modernize/sortslice.go @@ -36,7 +36,9 @@ import ( // - sort.Sort(x) where x has a named slice type whose Less method is the natural order. // -> sort.Slice(x) func sortslice(pass *analysis.Pass) { - if !analysisinternal.Imports(pass.Pkg, "sort") { + // Skip the analyzer in packages where its + // fixes would create an import cycle. + if within(pass, "slices", "sort", "runtime") { return } diff --git a/gopls/internal/util/moreiters/iters.go b/gopls/internal/util/moreiters/iters.go index e4d83ae8618..d41cb1d3bca 100644 --- a/gopls/internal/util/moreiters/iters.go +++ b/gopls/internal/util/moreiters/iters.go @@ -14,3 +14,13 @@ func First[T any](seq iter.Seq[T]) (z T, ok bool) { } return z, false } + +// Contains reports whether x is an element of the sequence seq. +func Contains[T comparable](seq iter.Seq[T], x T) bool { + for cand := range seq { + if cand == x { + return true + } + } + return false +} From d14149970b9aba669e3257bfc34df2994a2a2fbc Mon Sep 17 00:00:00 2001 From: Egon Elbre Date: Thu, 20 Feb 2025 13:43:48 +0200 Subject: [PATCH 086/270] cmd/toolstash: fix windows executable name handling Change-Id: I1ff643fae4c48b4f68b452eb6881fca99832930c Reviewed-on: https://go-review.googlesource.com/c/tools/+/650915 Reviewed-by: Junyang Shao Reviewed-by: Michael Pratt Auto-Submit: Sean Liao Commit-Queue: Sean Liao Reviewed-by: Sean Liao LUCI-TryBot-Result: Go LUCI --- cmd/toolstash/main.go | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/cmd/toolstash/main.go b/cmd/toolstash/main.go index c533ed1e572..3a92c00bfff 100644 --- a/cmd/toolstash/main.go +++ b/cmd/toolstash/main.go @@ -225,7 +225,7 @@ func main() { return } - tool = cmd[0] + tool = exeName(cmd[0]) if i := strings.LastIndexAny(tool, `/\`); i >= 0 { tool = tool[i+1:] } @@ -530,7 +530,7 @@ func runCmd(cmd []string, keepLog bool, logName string) (output []byte, err erro }() } - xcmd := exec.Command(cmd[0], cmd[1:]...) + xcmd := exec.Command(exeName(cmd[0]), cmd[1:]...) if !keepLog { return xcmd.CombinedOutput() } @@ -571,9 +571,10 @@ func save() { if !shouldSave(name) { continue } - src := filepath.Join(binDir, name) + bin := exeName(name) + src := filepath.Join(binDir, bin) if _, err := os.Stat(src); err == nil { - cp(src, filepath.Join(stashDir, name)) + cp(src, filepath.Join(stashDir, bin)) } } @@ -641,3 +642,10 @@ func cp(src, dst string) { log.Fatal(err) } } + +func exeName(name string) string { + if runtime.GOOS == "windows" { + return name + ".exe" + } + return name +} From 0efa5e51a822f9f580ed226cd8cd96089bc2d80d Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Fri, 28 Feb 2025 15:56:52 -0500 Subject: [PATCH 087/270] gopls/internal/analysis/modernize: rangeint: non-integer untyped constants This CL fixes a bug in rangeint that caused it to replace const limit = 1e3 for i := 0; i < limit; i++ {} with for range limit {} // error: limit is not an integer Now, we check that the type of limit is assignable to int, and if not insert an explicit int(limit) conversion. Updates golang/go#71847 (item d) Change-Id: Icfaa96e5506fcb5a3e6f3ed8f911bf4bda9cf32f Reviewed-on: https://go-review.googlesource.com/c/tools/+/653616 LUCI-TryBot-Result: Go LUCI Reviewed-by: Jonathan Amsterdam Auto-Submit: Alan Donovan --- gopls/internal/analysis/modernize/rangeint.go | 42 +++++++++++++++++++ .../testdata/src/rangeint/rangeint.go | 11 +++++ .../testdata/src/rangeint/rangeint.go.golden | 11 +++++ 3 files changed, 64 insertions(+) diff --git a/gopls/internal/analysis/modernize/rangeint.go b/gopls/internal/analysis/modernize/rangeint.go index b94bff34431..2921bbb3468 100644 --- a/gopls/internal/analysis/modernize/rangeint.go +++ b/gopls/internal/analysis/modernize/rangeint.go @@ -31,6 +31,8 @@ import ( // - The ':=' may be replaced by '='. // - The fix may remove "i :=" if it would become unused. // +// TODO(adonovan): permit variants such as "i := int64(0)". +// // Restrictions: // - The variable i must not be assigned or address-taken within the // loop, because a "for range int" loop does not respect assignments @@ -120,6 +122,31 @@ func rangeint(pass *analysis.Pass) { limit = call.Args[0] } + // If the limit is a untyped constant of non-integer type, + // such as "const limit = 1e3", its effective type may + // differ between the two forms. + // In a for loop, it must be comparable with int i, + // for i := 0; i < limit; i++ + // but in a range loop it would become a float, + // for i := range limit {} + // which is a type error. We need to convert it to int + // in this case. + // + // Unfortunately go/types discards the untyped type + // (but see Untyped in golang/go#70638) so we must + // re-type check the expression to detect this case. + var beforeLimit, afterLimit string + if v := info.Types[limit].Value; v != nil { + beforeLimit, afterLimit = "int(", ")" + info2 := &types.Info{Types: make(map[ast.Expr]types.TypeAndValue)} + if types.CheckExpr(pass.Fset, pass.Pkg, limit.Pos(), limit, info2) == nil { + tLimit := types.Default(info2.TypeOf(limit)) + if types.AssignableTo(tLimit, types.Typ[types.Int]) { + beforeLimit, afterLimit = "", "" + } + } + } + pass.Report(analysis.Diagnostic{ Pos: init.Pos(), End: inc.End(), @@ -133,15 +160,30 @@ func rangeint(pass *analysis.Pass) { // ----- --- // ------- // for i := range limit {} + + // Delete init. { Pos: init.Rhs[0].Pos(), End: limit.Pos(), NewText: []byte("range "), }, + // Add "int(" before limit, if needed. + { + Pos: limit.Pos(), + End: limit.Pos(), + NewText: []byte(beforeLimit), + }, + // Delete inc. { Pos: limit.End(), End: inc.End(), }, + // Add ")" after limit, if needed. + { + Pos: limit.End(), + End: limit.End(), + NewText: []byte(afterLimit), + }, }...), }}, }) diff --git a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go index 32628f5fae3..da486dcd32c 100644 --- a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go +++ b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go @@ -66,3 +66,14 @@ func nopePostconditionDiffers() { } println(i) // must print 5, not 4 } + +// Non-integer untyped constants need to be explicitly converted to int. +func issue71847d() { + const limit = 1e3 // float + for i := 0; i < limit; i++ { // want "for loop can be modernized using range over int" + } + + const limit2 = 1 + 0i // complex + for i := 0; i < limit2; i++ { // want "for loop can be modernized using range over int" + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden index 43cf220d699..01d28ccb92b 100644 --- a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden @@ -66,3 +66,14 @@ func nopePostconditionDiffers() { } println(i) // must print 5, not 4 } + +// Non-integer untyped constants need to be explicitly converted to int. +func issue71847d() { + const limit = 1e3 // float + for range int(limit) { // want "for loop can be modernized using range over int" + } + + const limit2 = 1 + 0i // complex + for range int(limit2) { // want "for loop can be modernized using range over int" + } +} From 2839096cd63a762fc544b0a489afe080032c472d Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Fri, 21 Feb 2025 15:03:43 -0500 Subject: [PATCH 088/270] gopls/internal/analysis/gofix: generic aliases Support inlining generic aliases. For golang/go#32816. Change-Id: Ic65e6fb30d65ee0f7d6e0093fd882a675de71da4 Reviewed-on: https://go-review.googlesource.com/c/tools/+/651617 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- gopls/internal/analysis/gofix/gofix.go | 68 +++++++++++++------ .../analysis/gofix/testdata/src/a/a.go | 22 ++++++ .../analysis/gofix/testdata/src/a/a.go.golden | 25 +++++++ 3 files changed, 96 insertions(+), 19 deletions(-) diff --git a/gopls/internal/analysis/gofix/gofix.go b/gopls/internal/analysis/gofix/gofix.go index 7323028aa31..7a55d7ca93d 100644 --- a/gopls/internal/analysis/gofix/gofix.go +++ b/gopls/internal/analysis/gofix/gofix.go @@ -9,6 +9,7 @@ import ( "go/ast" "go/token" "go/types" + "slices" "strings" _ "embed" @@ -140,11 +141,6 @@ func (a *analyzer) findAlias(spec *ast.TypeSpec, declInline bool) { } } - if spec.TypeParams != nil { - // TODO(jba): handle generic aliases - return - } - // Remember that this is an inlinable alias. typ := &goFixInlineAliasFact{} lhs := a.pass.TypesInfo.Defs[spec.Name].(*types.TypeName) @@ -294,7 +290,7 @@ func (a *analyzer) inlineCall(call *ast.CallExpr, cur cursor.Cursor) { } // If tn is the TypeName of an inlinable alias, suggest inlining its use at cur. -func (a *analyzer) inlineAlias(tn *types.TypeName, cur cursor.Cursor) { +func (a *analyzer) inlineAlias(tn *types.TypeName, curId cursor.Cursor) { inalias, ok := a.inlinableAliases[tn] if !ok { var fact goFixInlineAliasFact @@ -307,12 +303,17 @@ func (a *analyzer) inlineAlias(tn *types.TypeName, cur cursor.Cursor) { return // nope } - // Get the alias's RHS. It has everything we need to format the replacement text. - rhs := tn.Type().(*types.Alias).Rhs() - + alias := tn.Type().(*types.Alias) + // Remember the names of the alias's type params. When we check for shadowing + // later, we'll ignore these because they won't appear in the replacement text. + typeParamNames := map[*types.TypeName]bool{} + for tp := range alias.TypeParams().TypeParams() { + typeParamNames[tp.Obj()] = true + } + rhs := alias.Rhs() curPath := a.pass.Pkg.Path() - curFile := currentFile(cur) - n := cur.Node().(*ast.Ident) + curFile := currentFile(curId) + id := curId.Node().(*ast.Ident) // We have an identifier A here (n), possibly qualified by a package // identifier (sel.n), and an inlinable "type A = rhs" elsewhere. // @@ -324,6 +325,10 @@ func (a *analyzer) inlineAlias(tn *types.TypeName, cur cursor.Cursor) { edits []analysis.TextEdit ) for _, tn := range typenames(rhs) { + // Ignore the type parameters of the alias: they won't appear in the result. + if typeParamNames[tn] { + continue + } var pkgPath, pkgName string if pkg := tn.Pkg(); pkg != nil { pkgPath = pkg.Path() @@ -333,9 +338,9 @@ func (a *analyzer) inlineAlias(tn *types.TypeName, cur cursor.Cursor) { // The name is in the current package or the universe scope, so no import // is required. Check that it is not shadowed (that is, that the type // it refers to in rhs is the same one it refers to at n). - scope := a.pass.TypesInfo.Scopes[curFile].Innermost(n.Pos()) // n's scope - _, obj := scope.LookupParent(tn.Name(), n.Pos()) // what qn.name means in n's scope - if obj != tn { // shadowed + scope := a.pass.TypesInfo.Scopes[curFile].Innermost(id.Pos()) // n's scope + _, obj := scope.LookupParent(tn.Name(), id.Pos()) // what qn.name means in n's scope + if obj != tn { return } } else if !analysisinternal.CanImport(a.pass.Pkg.Path(), pkgPath) { @@ -345,15 +350,40 @@ func (a *analyzer) inlineAlias(tn *types.TypeName, cur cursor.Cursor) { // Use AddImport to add pkgPath if it's not there already. Associate the prefix it assigns // with the package path for use by the TypeString qualifier below. _, prefix, eds := analysisinternal.AddImport( - a.pass.TypesInfo, curFile, pkgName, pkgPath, tn.Name(), n.Pos()) + a.pass.TypesInfo, curFile, pkgName, pkgPath, tn.Name(), id.Pos()) importPrefixes[pkgPath] = strings.TrimSuffix(prefix, ".") edits = append(edits, eds...) } } - // If n is qualified by a package identifier, we'll need the full selector expression. - var expr ast.Expr = n - if e, _ := cur.Edge(); e == edge.SelectorExpr_Sel { - expr = cur.Parent().Node().(ast.Expr) + // Find the complete identifier, which may take any of these forms: + // Id + // Id[T] + // Id[K, V] + // pkg.Id + // pkg.Id[T] + // pkg.Id[K, V] + var expr ast.Expr = id + if e, _ := curId.Edge(); e == edge.SelectorExpr_Sel { + curId = curId.Parent() + expr = curId.Node().(ast.Expr) + } + // If expr is part of an IndexExpr or IndexListExpr, we'll need that node. + // Given C[int], TypeOf(C) is generic but TypeOf(C[int]) is instantiated. + switch ek, _ := curId.Edge(); ek { + case edge.IndexExpr_X: + expr = curId.Parent().Node().(*ast.IndexExpr) + case edge.IndexListExpr_X: + expr = curId.Parent().Node().(*ast.IndexListExpr) + } + t := a.pass.TypesInfo.TypeOf(expr).(*types.Alias) // type of entire identifier + if targs := t.TypeArgs(); targs.Len() > 0 { + // Instantiate the alias with the type args from this use. + // For example, given type A = M[K, V], compute the type of the use + // A[int, Foo] as M[int, Foo]. + // Don't validate instantiation: it can't panic unless we have a bug, + // in which case seeing the stack trace via telemetry would be helpful. + instAlias, _ := types.Instantiate(nil, alias, slices.Collect(targs.Types()), false) + rhs = instAlias.(*types.Alias).Rhs() } // To get the replacement text, render the alias RHS using the package prefixes // we assigned above. diff --git a/gopls/internal/analysis/gofix/testdata/src/a/a.go b/gopls/internal/analysis/gofix/testdata/src/a/a.go index 49a0587c2b1..60a55052584 100644 --- a/gopls/internal/analysis/gofix/testdata/src/a/a.go +++ b/gopls/internal/analysis/gofix/testdata/src/a/a.go @@ -164,3 +164,25 @@ func _[P any]() { _ = x _ = y } + +// generic type aliases + +//go:fix inline +type ( + Mapset[T comparable] = map[T]bool // want Mapset: `goFixInline alias` + Pair[X, Y any] = struct { // want Pair: `goFixInline alias` + X X + Y Y + } +) + +var _ Mapset[int] // want `Type alias Mapset\[int\] should be inlined` + +var _ Pair[T, string] // want `Type alias Pair\[T, string\] should be inlined` + +func _[V any]() { + //go:fix inline + type M[K comparable] = map[K]V + + var _ M[int] // want `Type alias M\[int\] should be inlined` +} diff --git a/gopls/internal/analysis/gofix/testdata/src/a/a.go.golden b/gopls/internal/analysis/gofix/testdata/src/a/a.go.golden index 9d4c527919e..c637da103ee 100644 --- a/gopls/internal/analysis/gofix/testdata/src/a/a.go.golden +++ b/gopls/internal/analysis/gofix/testdata/src/a/a.go.golden @@ -165,3 +165,28 @@ func _[P any]() { _ = x _ = y } + +// generic type aliases + +//go:fix inline +type ( + Mapset[T comparable] = map[T]bool // want Mapset: `goFixInline alias` + Pair[X, Y any] = struct { // want Pair: `goFixInline alias` + X X + Y Y + } +) + +var _ map[int]bool // want `Type alias Mapset\[int\] should be inlined` + +var _ struct { + X T + Y string +} // want `Type alias Pair\[T, string\] should be inlined` + +func _[V any]() { + //go:fix inline + type M[K comparable] = map[K]V + + var _ map[int]V // want `Type alias M\[int\] should be inlined` +} From 0ffdb82ead2753b9fbba8bf0932ba396b13ba6ea Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Fri, 21 Feb 2025 16:55:53 -0500 Subject: [PATCH 089/270] gopls/internal/analysis/gofix: add vet analyzer Add a second analyzer that checks for valid go:fix inline directives without suggesting changes. Change-Id: I0b9ad3da79f554caef01dda66ef954c59718015d Reviewed-on: https://go-review.googlesource.com/c/tools/+/651656 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- gopls/internal/analysis/gofix/doc.go | 10 ++- gopls/internal/analysis/gofix/gofix.go | 26 ++++++- gopls/internal/analysis/gofix/gofix_test.go | 5 ++ .../gofix/testdata/src/directive/directive.go | 63 ++++++++++++++++ .../src/directive/directive.go.golden | 71 +++++++++++++++++++ 5 files changed, 172 insertions(+), 3 deletions(-) create mode 100644 gopls/internal/analysis/gofix/testdata/src/directive/directive.go create mode 100644 gopls/internal/analysis/gofix/testdata/src/directive/directive.go.golden diff --git a/gopls/internal/analysis/gofix/doc.go b/gopls/internal/analysis/gofix/doc.go index ad8b067daa4..15de4f28b27 100644 --- a/gopls/internal/analysis/gofix/doc.go +++ b/gopls/internal/analysis/gofix/doc.go @@ -5,7 +5,8 @@ /* Package gofix defines an Analyzer that inlines calls to functions and uses of constants -marked with a "//go:fix inline" doc comment. +marked with a "//go:fix inline" directive. +A second analyzer only checks uses of the directive. # Analyzer gofix @@ -81,5 +82,12 @@ The proposal https://go.dev/issue/32816 introduces the "//go:fix" directives. You can use this (officially unsupported) command to apply gofix fixes en masse: $ go run golang.org/x/tools/gopls/internal/analysis/gofix/cmd/gofix@latest -test ./... + +# Analyzer gofixdirective + +gofixdirective: validate uses of gofix comment directives + +The gofixdirective analyzer checks "//go:fix inline" directives for correctness. +See the documentation for the gofix analyzer for more about "/go:fix inline". */ package gofix diff --git a/gopls/internal/analysis/gofix/gofix.go b/gopls/internal/analysis/gofix/gofix.go index 7a55d7ca93d..df7154ca2fc 100644 --- a/gopls/internal/analysis/gofix/gofix.go +++ b/gopls/internal/analysis/gofix/gofix.go @@ -34,7 +34,20 @@ var Analyzer = &analysis.Analyzer{ Name: "gofix", Doc: analysisinternal.MustExtractDoc(doc, "gofix"), URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/gofix", - Run: run, + Run: func(pass *analysis.Pass) (any, error) { return run(pass, true) }, + FactTypes: []analysis.Fact{ + (*goFixInlineFuncFact)(nil), + (*goFixInlineConstFact)(nil), + (*goFixInlineAliasFact)(nil), + }, + Requires: []*analysis.Analyzer{inspect.Analyzer}, +} + +var DirectiveAnalyzer = &analysis.Analyzer{ + Name: "gofixdirective", + Doc: analysisinternal.MustExtractDoc(doc, "gofixdirective"), + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/gofix", + Run: func(pass *analysis.Pass) (any, error) { return run(pass, false) }, FactTypes: []analysis.Fact{ (*goFixInlineFuncFact)(nil), (*goFixInlineConstFact)(nil), @@ -46,6 +59,7 @@ var Analyzer = &analysis.Analyzer{ // analyzer holds the state for this analysis. type analyzer struct { pass *analysis.Pass + fix bool // only suggest fixes if true; else, just check directives root cursor.Cursor // memoization of repeated calls for same file. fileContent map[string][]byte @@ -55,9 +69,10 @@ type analyzer struct { inlinableAliases map[*types.TypeName]*goFixInlineAliasFact } -func run(pass *analysis.Pass) (any, error) { +func run(pass *analysis.Pass, fix bool) (any, error) { a := &analyzer{ pass: pass, + fix: fix, root: cursor.Root(pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)), fileContent: make(map[string][]byte), inlinableFuncs: make(map[*types.Func]*inline.Callee), @@ -256,6 +271,10 @@ func (a *analyzer) inlineCall(call *ast.CallExpr, cur cursor.Cursor) { a.pass.Reportf(call.Lparen, "%v", err) return } + if !a.fix { + return + } + if res.Literalized { // Users are not fond of inlinings that literalize // f(x) to func() { ... }(), so avoid them. @@ -533,6 +552,9 @@ func (a *analyzer) inlineConst(con *types.Const, cur cursor.Cursor) { // reportInline reports a diagnostic for fixing an inlinable name. func (a *analyzer) reportInline(kind, capKind string, ident ast.Expr, edits []analysis.TextEdit, newText string) { + if !a.fix { + return + } edits = append(edits, analysis.TextEdit{ Pos: ident.Pos(), End: ident.End(), diff --git a/gopls/internal/analysis/gofix/gofix_test.go b/gopls/internal/analysis/gofix/gofix_test.go index dc98ef47181..4acc4daf2ff 100644 --- a/gopls/internal/analysis/gofix/gofix_test.go +++ b/gopls/internal/analysis/gofix/gofix_test.go @@ -22,6 +22,11 @@ func TestAnalyzer(t *testing.T) { analysistest.RunWithSuggestedFixes(t, analysistest.TestData(), Analyzer, "a", "b") } +func TestDirectiveAnalyzer(t *testing.T) { + analysistest.RunWithSuggestedFixes(t, analysistest.TestData(), DirectiveAnalyzer, "directive") + +} + func TestTypesWithNames(t *testing.T) { // Test setup inspired by internal/analysisinternal/addimport_test.go. testenv.NeedsDefaultImporter(t) diff --git a/gopls/internal/analysis/gofix/testdata/src/directive/directive.go b/gopls/internal/analysis/gofix/testdata/src/directive/directive.go new file mode 100644 index 00000000000..47c2884c386 --- /dev/null +++ b/gopls/internal/analysis/gofix/testdata/src/directive/directive.go @@ -0,0 +1,63 @@ +package directive + +// Functions. + +func f() { + One() + + new(T).Two() +} + +type T struct{} + +//go:fix inline +func One() int { return one } // want One:`goFixInline directive.One` + +const one = 1 + +//go:fix inline +func (T) Two() int { return 2 } // want Two:`goFixInline \(directive.T\).Two` + +// Constants. + +const Uno = 1 + +//go:fix inline +const In1 = Uno // want In1: `goFixInline const "directive".Uno` + +const ( + no1 = one + + //go:fix inline + In2 = one // want In2: `goFixInline const "directive".one` +) + +//go:fix inline +const bad1 = 1 // want `invalid //go:fix inline directive: const value is not the name of another constant` + +//go:fix inline +const in5, + in6, + bad2 = one, one, + one + 1 // want `invalid //go:fix inline directive: const value is not the name of another constant` + +// Make sure we don't crash on iota consts, but still process the whole decl. +// +//go:fix inline +const ( + a = iota // want `invalid //go:fix inline directive: const value is iota` + b + in7 = one +) + +const ( + x = 1 + //go:fix inline + in8 = x +) + +//go:fix inline +const in9 = iota // want `invalid //go:fix inline directive: const value is iota` + +//go:fix inline +type E = map[[Uno]string][]*T // want `invalid //go:fix inline directive: array types not supported` diff --git a/gopls/internal/analysis/gofix/testdata/src/directive/directive.go.golden b/gopls/internal/analysis/gofix/testdata/src/directive/directive.go.golden new file mode 100644 index 00000000000..3e5b3409288 --- /dev/null +++ b/gopls/internal/analysis/gofix/testdata/src/directive/directive.go.golden @@ -0,0 +1,71 @@ +package golden + +import "a/internal" + +// Functions. + +func f() { + One() + + new(T).Two() +} + +type T struct{} + +//go:fix inline +func One() int { return one } + +const one = 1 + +//go:fix inline +func (T) Two() int { return 2 } + +// Constants. + +const Uno = 1 + +//go:fix inline +const In1 = Uno // want In1: `goFixInline const "a".Uno` + +const ( + no1 = one + + //go:fix inline + In2 = one // want In2: `goFixInline const "a".one` +) + +//go:fix inline +const bad1 = 1 // want `invalid //go:fix inline directive: const value is not the name of another constant` + +//go:fix inline +const in5, + in6, + bad2 = one, one, + one + 1 // want `invalid //go:fix inline directive: const value is not the name of another constant` + +// Make sure we don't crash on iota consts, but still process the whole decl. +// +//go:fix inline +const ( + a = iota // want `invalid //go:fix inline directive: const value is iota` + b + in7 = one +) + +const ( + x = 1 + //go:fix inline + in8 = x +) + +//go:fix inline +const a = iota // want `invalid //go:fix inline directive: const value is iota` + +//go:fix inline +type E = map[[Uno]string][]*T // want `invalid //go:fix inline directive: array types not supported` + +// literal array lengths are OK +// +//go:fix inline +type EL = map[[2]string][]*T // want EL: `goFixInline alias` + From 2b1f55036370bc9a05bed74aa13fa85fecce40e2 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Fri, 21 Feb 2025 16:24:42 -0500 Subject: [PATCH 090/270] gopls/internal/analysis/gofix: allow literal array lengths An array type can be inlined if its length is a literal integer. For golang/go#32816. Change-Id: I80c7f18721c813a0ea7039411ddf8a804b5bf0b5 Reviewed-on: https://go-review.googlesource.com/c/tools/+/651655 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- gopls/internal/analysis/gofix/gofix.go | 5 ++++- gopls/internal/analysis/gofix/testdata/src/a/a.go | 7 +++++++ gopls/internal/analysis/gofix/testdata/src/a/a.go.golden | 7 +++++++ 3 files changed, 18 insertions(+), 1 deletion(-) diff --git a/gopls/internal/analysis/gofix/gofix.go b/gopls/internal/analysis/gofix/gofix.go index df7154ca2fc..41cebcb63b9 100644 --- a/gopls/internal/analysis/gofix/gofix.go +++ b/gopls/internal/analysis/gofix/gofix.go @@ -148,9 +148,12 @@ func (a *analyzer) findAlias(spec *ast.TypeSpec, declInline bool) { // type A = [N]int // // would result in [5]int, breaking the connection with N. - // TODO(jba): accept type expressions where the array size is a literal integer for n := range ast.Preorder(spec.Type) { if ar, ok := n.(*ast.ArrayType); ok && ar.Len != nil { + // Make an exception when the array length is a literal int. + if lit, ok := ast.Unparen(ar.Len).(*ast.BasicLit); ok && lit.Kind == token.INT { + continue + } a.pass.Reportf(spec.Pos(), "invalid //go:fix inline directive: array types not supported") return } diff --git a/gopls/internal/analysis/gofix/testdata/src/a/a.go b/gopls/internal/analysis/gofix/testdata/src/a/a.go index 60a55052584..96f4f4d4e13 100644 --- a/gopls/internal/analysis/gofix/testdata/src/a/a.go +++ b/gopls/internal/analysis/gofix/testdata/src/a/a.go @@ -129,6 +129,13 @@ type E = map[[Uno]string][]*T // want `invalid //go:fix inline directive: array var _ E // nothing should happen here +// literal array lengths are OK +// +//go:fix inline +type EL = map[[2]string][]*T // want EL: `goFixInline alias` + +var _ EL // want `Type alias EL should be inlined` + //go:fix inline type F = map[internal.T]T // want F: `goFixInline alias` diff --git a/gopls/internal/analysis/gofix/testdata/src/a/a.go.golden b/gopls/internal/analysis/gofix/testdata/src/a/a.go.golden index c637da103ee..64d08ec1548 100644 --- a/gopls/internal/analysis/gofix/testdata/src/a/a.go.golden +++ b/gopls/internal/analysis/gofix/testdata/src/a/a.go.golden @@ -129,6 +129,13 @@ type E = map[[Uno]string][]*T // want `invalid //go:fix inline directive: array var _ E // nothing should happen here +// literal array lengths are OK +// +//go:fix inline +type EL = map[[2]string][]*T // want EL: `goFixInline alias` + +var _ map[[2]string][]*T // want `Type alias EL should be inlined` + //go:fix inline type F = map[internal.T]T // want F: `goFixInline alias` From 455db21bd963fea3efdf0473e0ddce37313b8f91 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Fri, 28 Feb 2025 11:09:28 -0500 Subject: [PATCH 091/270] gopls/internal/cache/parsego: fix OOB crash in fixInitStmt This is a priori not how to use safetoken. I haven't attempted to reproduce the crash. Fixes golang/go#72026 Change-Id: I7a95383032f9a882c8b667203bbe0cf06f85a987 Reviewed-on: https://go-review.googlesource.com/c/tools/+/653596 Reviewed-by: Robert Findley LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan --- gopls/internal/cache/parsego/parse.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gopls/internal/cache/parsego/parse.go b/gopls/internal/cache/parsego/parse.go index 08a1c395a2a..fd598e235d1 100644 --- a/gopls/internal/cache/parsego/parse.go +++ b/gopls/internal/cache/parsego/parse.go @@ -528,11 +528,11 @@ func fixInitStmt(bad *ast.BadExpr, parent ast.Node, tok *token.File, src []byte) } // Try to extract a statement from the BadExpr. - start, end, err := safetoken.Offsets(tok, bad.Pos(), bad.End()-1) + start, end, err := safetoken.Offsets(tok, bad.Pos(), bad.End()) if err != nil { return false } - stmtBytes := src[start : end+1] + stmtBytes := src[start:end] stmt, err := parseStmt(tok, bad.Pos(), stmtBytes) if err != nil { return false From d81d6fcce1a24f2b8d0a9493f4d84b75c80176e4 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Fri, 28 Feb 2025 18:40:18 -0500 Subject: [PATCH 092/270] gopls/internal/util/asm: better assembly parsing This CL adds a rudimentary parser for symbols in Go .s files. It is a placeholder for a more principled implementation, but it is sufficient to make Definition support control labels (also in this CL) and for a cross-references index (future work). + test of Definition on control label + test of asm.Parse Updates golang/go#71754 Change-Id: I2ff19b4ade130c051197d6b097a1a3dbcd95555a Reviewed-on: https://go-review.googlesource.com/c/tools/+/654335 LUCI-TryBot-Result: Go LUCI Reviewed-by: Jonathan Amsterdam Auto-Submit: Alan Donovan --- gopls/internal/goasm/definition.go | 59 +++-- gopls/internal/golang/assembly.go | 3 + .../test/marker/testdata/definition/asm.txt | 3 + gopls/internal/util/asm/parse.go | 245 ++++++++++++++++++ gopls/internal/util/asm/parse_test.go | 67 +++++ 5 files changed, 353 insertions(+), 24 deletions(-) create mode 100644 gopls/internal/util/asm/parse.go create mode 100644 gopls/internal/util/asm/parse_test.go diff --git a/gopls/internal/goasm/definition.go b/gopls/internal/goasm/definition.go index 4403e7cac7f..903916d265d 100644 --- a/gopls/internal/goasm/definition.go +++ b/gopls/internal/goasm/definition.go @@ -2,20 +2,20 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +// Package goasm provides language-server features for files in Go +// assembly language (https://go.dev/doc/asm). package goasm import ( - "bytes" "context" "fmt" "go/token" - "strings" - "unicode" "golang.org/x/tools/gopls/internal/cache" "golang.org/x/tools/gopls/internal/cache/metadata" "golang.org/x/tools/gopls/internal/file" "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/asm" "golang.org/x/tools/gopls/internal/util/morestrings" "golang.org/x/tools/internal/event" ) @@ -41,21 +41,27 @@ func Definition(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, p return nil, err } + // Parse the assembly. + // + // TODO(adonovan): make this just another + // attribute of the type-checked cache.Package. + file := asm.Parse(content) + // Figure out the selected symbol. // For now, just find the identifier around the cursor. - // - // TODO(adonovan): use a real asm parser; see cmd/asm/internal/asm/parse.go. - // Ideally this would just be just another attribute of the - // type-checked cache.Package. - nonIdentRune := func(r rune) bool { return !isIdentRune(r) } - i := bytes.LastIndexFunc(content[:offset], nonIdentRune) - j := bytes.IndexFunc(content[offset:], nonIdentRune) - if j < 0 || j == 0 { - return nil, nil // identifier runs to EOF, or not an identifier + var found *asm.Ident + for _, id := range file.Idents { + if id.Offset <= offset && offset <= id.End() { + found = &id + break + } } - sym := string(content[i+1 : offset+j]) - sym = strings.ReplaceAll(sym, "·", ".") // (U+00B7 MIDDLE DOT) - sym = strings.ReplaceAll(sym, "∕", "/") // (U+2215 DIVISION SLASH) + if found == nil { + return nil, fmt.Errorf("not an identifier") + } + + // Resolve a symbol with a "." prefix to the current package. + sym := found.Name if sym != "" && sym[0] == '.' { sym = string(mp.PkgPath) + sym } @@ -92,18 +98,23 @@ func Definition(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, p if err == nil { return []protocol.Location{loc}, nil } - } - // TODO(adonovan): support jump to var, block label, and other - // TEXT, DATA, and GLOBAL symbols in the same file. Needs asm parser. + } else { + // local symbols (funcs, vars, labels) + for _, id := range file.Idents { + if id.Name == found.Name && + (id.Kind == asm.Text || id.Kind == asm.Global || id.Kind == asm.Label) { - return nil, nil -} + loc, err := mapper.OffsetLocation(id.Offset, id.End()) + if err != nil { + return nil, err + } + return []protocol.Location{loc}, nil + } + } + } -// The assembler allows center dot (· U+00B7) and -// division slash (∕ U+2215) to work as identifier characters. -func isIdentRune(r rune) bool { - return unicode.IsLetter(r) || unicode.IsDigit(r) || r == '_' || r == '·' || r == '∕' + return nil, nil } // TODO(rfindley): avoid the duplicate column mapping here, by associating a diff --git a/gopls/internal/golang/assembly.go b/gopls/internal/golang/assembly.go index 9e673dd9719..12244a58c59 100644 --- a/gopls/internal/golang/assembly.go +++ b/gopls/internal/golang/assembly.go @@ -10,6 +10,9 @@ package golang // - ./codeaction.go - computes the symbol and offers the CodeAction command. // - ../server/command.go - handles the command by opening a web page. // - ../server/server.go - handles the HTTP request and calls this function. +// +// For language-server behavior in Go assembly language files, +// see [golang.org/x/tools/gopls/internal/goasm]. import ( "bytes" diff --git a/gopls/internal/test/marker/testdata/definition/asm.txt b/gopls/internal/test/marker/testdata/definition/asm.txt index f0187d7e24a..250f237d299 100644 --- a/gopls/internal/test/marker/testdata/definition/asm.txt +++ b/gopls/internal/test/marker/testdata/definition/asm.txt @@ -26,6 +26,9 @@ var _ = ff // pacify unusedfunc analyzer TEXT ·ff(SB), $16 //@ loc(ffasm, "ff"), def("ff", ffgo) CALL example·com∕b·B //@ def("com", bB) JMP ·ff //@ def("ff", ffgo) + JMP label //@ def("label", label) +label: //@ loc(label,"label") + RET -- b/b.go -- package b diff --git a/gopls/internal/util/asm/parse.go b/gopls/internal/util/asm/parse.go new file mode 100644 index 00000000000..11c59a7cc3d --- /dev/null +++ b/gopls/internal/util/asm/parse.go @@ -0,0 +1,245 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package asm provides a simple parser for Go assembly files. +package asm + +import ( + "bufio" + "bytes" + "fmt" + "strings" + "unicode" +) + +// Kind describes the nature of an identifier in an assembly file. +type Kind uint8 + +const ( + Invalid Kind = iota // reserved zero value; not used by Ident + Ref // arbitrary reference to symbol or control label + Text // definition of TEXT (function) symbol + Global // definition of GLOBL (var) symbol + Data // initialization of GLOBL (var) symbol; effectively a reference + Label // definition of control label +) + +func (k Kind) String() string { + if int(k) < len(kindString) { + return kindString[k] + } + return fmt.Sprintf("Kind(%d)", k) +} + +var kindString = [...]string{ + Invalid: "invalid", + Ref: "ref", + Text: "text", + Global: "global", + Data: "data", + Label: "label", +} + +// A file represents a parsed file of Go assembly language. +type File struct { + Idents []Ident + + // TODO(adonovan): use token.File? This may be important in a + // future in which analyzers can report diagnostics in .s files. +} + +// Ident represents an identifier in an assembly file. +type Ident struct { + Name string // symbol name (after correcting [·∕]); Name[0]='.' => current package + Offset int // zero-based byte offset + Kind Kind +} + +// End returns the identifier's end offset. +func (id Ident) End() int { return id.Offset + len(id.Name) } + +// Parse extracts identifiers from Go assembly files. +// Since it is a best-effort parser, it never returns an error. +func Parse(content []byte) *File { + var idents []Ident + offset := 0 // byte offset of start of current line + + // TODO(adonovan) use a proper tokenizer that respects + // comments, string literals, line continuations, etc. + scan := bufio.NewScanner(bytes.NewReader(content)) + for ; scan.Scan(); offset += len(scan.Bytes()) + len("\n") { + line := scan.Text() + + // Strip comments. + if idx := strings.Index(line, "//"); idx >= 0 { + line = line[:idx] + } + + // Skip blank lines. + if strings.TrimSpace(line) == "" { + continue + } + + // Check for label definitions (ending with colon). + if colon := strings.IndexByte(line, ':'); colon > 0 { + label := strings.TrimSpace(line[:colon]) + if isIdent(label) { + idents = append(idents, Ident{ + Name: label, + Offset: offset + strings.Index(line, label), + Kind: Label, + }) + continue + } + } + + // Split line into words. + words := strings.Fields(line) + if len(words) == 0 { + continue + } + + // A line of the form + // TEXT ·sym(SB),NOSPLIT,$12 + // declares a text symbol "·sym". + if len(words) > 1 { + kind := Invalid + switch words[0] { + case "TEXT": + kind = Text + case "GLOBL": + kind = Global + case "DATA": + kind = Data + } + if kind != Invalid { + sym := words[1] + sym = cutBefore(sym, ",") // strip ",NOSPLIT,$12" etc + sym = cutBefore(sym, "(") // "sym(SB)" -> "sym" + sym = cutBefore(sym, "<") // "sym" -> "sym" + sym = strings.TrimSpace(sym) + if isIdent(sym) { + // (The Index call assumes sym is not itself "TEXT" etc.) + idents = append(idents, Ident{ + Name: cleanup(sym), + Kind: kind, + Offset: offset + strings.Index(line, sym), + }) + } + continue + } + } + + // Find references in the rest of the line. + pos := 0 + for _, word := range words { + // Find actual position of word within line. + tokenPos := strings.Index(line[pos:], word) + if tokenPos < 0 { + panic(line) + } + tokenPos += pos + pos = tokenPos + len(word) + + // Reject probable instruction mnemonics (e.g. MOV). + if len(word) >= 2 && word[0] != '·' && + !strings.ContainsFunc(word, unicode.IsLower) { + continue + } + + if word[0] == '$' { + word = word[1:] + tokenPos++ + + // Reject probable immediate values (e.g. "$123"). + if !strings.ContainsFunc(word, isNonDigit) { + continue + } + } + + // Reject probably registers (e.g. "PC"). + if len(word) <= 3 && !strings.ContainsFunc(word, unicode.IsLower) { + continue + } + + // Probable identifier reference. + // + // TODO(adonovan): handle FP symbols correctly; + // sym+8(FP) is essentially a comment about + // stack slot 8, not a reference to a symbol + // with a declaration somewhere; so they form + // an equivalence class without a canonical + // declaration. + // + // TODO(adonovan): handle pseudoregisters and field + // references such as: + // MOVD $runtime·g0(SB), g // pseudoreg + // MOVD R0, g_stackguard0(g) // field ref + + sym := cutBefore(word, "(") // "·sym(SB)" => "sym" + sym = cutBefore(sym, "+") // "sym+8(FP)" => "sym" + sym = cutBefore(sym, "<") // "sym" =>> "sym" + if isIdent(sym) { + idents = append(idents, Ident{ + Name: cleanup(sym), + Kind: Ref, + Offset: offset + tokenPos, + }) + } + } + } + + _ = scan.Err() // ignore scan errors + + return &File{Idents: idents} +} + +// isIdent reports whether s is a valid Go assembly identifier. +func isIdent(s string) bool { + for i, r := range s { + if !isIdentRune(r, i) { + return false + } + } + return len(s) > 0 +} + +// cutBefore returns the portion of s before the first occurrence of sep, if any. +func cutBefore(s, sep string) string { + if before, _, ok := strings.Cut(s, sep); ok { + return before + } + return s +} + +// cleanup converts a symbol name from assembler syntax to linker syntax. +func cleanup(sym string) string { + return repl.Replace(sym) +} + +var repl = strings.NewReplacer( + "·", ".", // (U+00B7 MIDDLE DOT) + "∕", "/", // (U+2215 DIVISION SLASH) +) + +func isNonDigit(r rune) bool { return !unicode.IsDigit(r) } + +// -- plundered from GOROOT/src/cmd/asm/internal/asm/parse.go -- + +// We want center dot (·) and division slash (∕) to work as identifier characters. +func isIdentRune(ch rune, i int) bool { + if unicode.IsLetter(ch) { + return true + } + switch ch { + case '_': // Underscore; traditional. + return true + case '\u00B7': // Represents the period in runtime.exit. U+00B7 '·' middle dot + return true + case '\u2215': // Represents the slash in runtime/debug.setGCPercent. U+2215 '∕' division slash + return true + } + // Digits are OK only after the first character. + return i > 0 && unicode.IsDigit(ch) +} diff --git a/gopls/internal/util/asm/parse_test.go b/gopls/internal/util/asm/parse_test.go new file mode 100644 index 00000000000..67a1286d28b --- /dev/null +++ b/gopls/internal/util/asm/parse_test.go @@ -0,0 +1,67 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package asm_test + +import ( + "bytes" + "fmt" + "testing" + + "github.com/google/go-cmp/cmp" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/util/asm" +) + +// TestIdents checks that (likely) identifiers are extracted in the expected places. +func TestIdents(t *testing.T) { + src := []byte(` +// This is a nonsense file containing a variety of syntax. + +#include "foo.h" +#ifdef MACRO +DATA hello<>+0x00(SB)/64, $"Hello" +GLOBL hello<(SB), RODATA, $64 +#endif + +TEXT mypkg·f(SB),NOSPLIT,$0 + MOVD R1, 16(RSP) // another comment + MOVD $otherpkg·data(SB), R2 + JMP label +label: + BL ·g(SB) + +TEXT ·g(SB),NOSPLIT,$0 + MOVD $runtime·g0(SB), g + MOVD R0, g_stackguard0(g) + MOVD R0, (g_stack+stack_lo)(g) +`[1:]) + const filename = "asm.s" + m := protocol.NewMapper(protocol.URIFromPath(filename), src) + file := asm.Parse(src) + + want := ` +asm.s:5:6-11: data "hello" +asm.s:6:7-12: global "hello" +asm.s:9:6-13: text "mypkg.f" +asm.s:11:8-21: ref "otherpkg.data" +asm.s:12:6-11: ref "label" +asm.s:13:1-6: label "label" +asm.s:14:5-7: ref ".g" +asm.s:16:6-8: text ".g" +asm.s:17:8-18: ref "runtime.g0" +asm.s:17:25-26: ref "g" +asm.s:18:11-24: ref "g_stackguard0" +`[1:] + var buf bytes.Buffer + for _, id := range file.Idents { + line, col := m.OffsetLineCol8(id.Offset) + _, endCol := m.OffsetLineCol8(id.Offset + len(id.Name)) + fmt.Fprintf(&buf, "%s:%d:%d-%d:\t%s %q\n", filename, line, col, endCol, id.Kind, id.Name) + } + got := buf.String() + if got != want { + t.Errorf("got:\n%s\nwant:\n%s\ndiff:\n%s", got, want, cmp.Diff(want, got)) + } +} From 8d38122b0b1a9991f490aa06b7bfca7b4140bdad Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Mon, 3 Mar 2025 22:13:31 +0000 Subject: [PATCH 093/270] gopls/internal/cache: reproduce and fix crash on if cond overflow Through reverse engineering, I was able to reproduce the overflow of golang/go#72026, and verify the fix of CL 653596. Along the way, I incidentally reproduced golang/go#66766, which I think we can safely ignore now that we understand it. Updates golang/go#72026 Fixes golang/go#66766 Change-Id: I2131d771c13688c1ad47f6bc6285e524fb4c04a1 Reviewed-on: https://go-review.googlesource.com/c/tools/+/654336 Reviewed-by: Alan Donovan Auto-Submit: Robert Findley LUCI-TryBot-Result: Go LUCI --- gopls/internal/cache/check.go | 15 +++++++-------- gopls/internal/cache/parsego/parse.go | 1 + .../integration/completion/fixedbugs_test.go | 17 +++++++++++++++++ 3 files changed, 25 insertions(+), 8 deletions(-) diff --git a/gopls/internal/cache/check.go b/gopls/internal/cache/check.go index aa1537c8705..27d5cfa240b 100644 --- a/gopls/internal/cache/check.go +++ b/gopls/internal/cache/check.go @@ -2005,15 +2005,14 @@ func typeErrorsToDiagnostics(pkg *syntaxPackage, inputs *typeCheckInputs, errs [ posn := safetoken.StartPosition(e.Fset, start) if !posn.IsValid() { // All valid positions produced by the type checker should described by - // its fileset. + // its fileset, yet since type checker errors are associated with + // positions in the AST, and AST nodes can overflow the file + // (golang/go#48300), we can't rely on this. // - // Note: in golang/go#64488, we observed an error that was positioned - // over fixed syntax, which overflowed its file. So it's definitely - // possible that we get here (it's hard to reason about fixing up the - // AST). Nevertheless, it's a bug. - if pkg.hasFixedFiles() { - bug.Reportf("internal error: type checker error %q outside its Fset (fixed files)", e) - } else { + // We should fix the parser, but in the meantime type errors are not + // significant if there are parse errors, so we can safely ignore this + // case. + if len(pkg.parseErrors) == 0 { bug.Reportf("internal error: type checker error %q outside its Fset", e) } continue diff --git a/gopls/internal/cache/parsego/parse.go b/gopls/internal/cache/parsego/parse.go index fd598e235d1..4b37816caff 100644 --- a/gopls/internal/cache/parsego/parse.go +++ b/gopls/internal/cache/parsego/parse.go @@ -532,6 +532,7 @@ func fixInitStmt(bad *ast.BadExpr, parent ast.Node, tok *token.File, src []byte) if err != nil { return false } + assert(end <= len(src), "offset overflow") // golang/go#72026 stmtBytes := src[start:end] stmt, err := parseStmt(tok, bad.Pos(), stmtBytes) if err != nil { diff --git a/gopls/internal/test/integration/completion/fixedbugs_test.go b/gopls/internal/test/integration/completion/fixedbugs_test.go index faa5324e138..ccec432904e 100644 --- a/gopls/internal/test/integration/completion/fixedbugs_test.go +++ b/gopls/internal/test/integration/completion/fixedbugs_test.go @@ -38,3 +38,20 @@ package } }) } + +func TestFixInitStatementCrash_Issue72026(t *testing.T) { + // This test checks that we don't crash when the if condition overflows the + // file (as is possible with a malformed struct type). + + const files = ` +-- go.mod -- +module example.com + +go 1.18 +` + + Run(t, files, func(t *testing.T, env *Env) { + env.CreateBuffer("p.go", "package p\nfunc _() {\n\tfor i := struct") + env.AfterChange() + }) +} From 07219402b2fc707689574d91ee3cfd2c9a544a87 Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Tue, 4 Mar 2025 19:05:13 +0800 Subject: [PATCH 094/270] gopls/internal/analysis/modernize: strings.Fields -> FieldsSeq This CL enhances the existing modernizer to support calls to strings.Fields and bytes.Fields, that offers a fix to instead use go1.24's FieldsSeq, which avoids allocating an array. Fixes golang/go#72033 Change-Id: I2059f66f38a639c5a264b650137ced7b4f84550e Reviewed-on: https://go-review.googlesource.com/c/tools/+/654535 Auto-Submit: Alan Donovan Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI Reviewed-by: Junyang Shao --- gopls/doc/analyzers.md | 2 +- gopls/internal/analysis/modernize/doc.go | 2 +- .../internal/analysis/modernize/modernize.go | 2 +- .../analysis/modernize/modernize_test.go | 1 + .../modernize/{splitseq.go => stringsseq.go} | 31 +++++++++----- .../testdata/src/fieldsseq/fieldsseq.go | 42 +++++++++++++++++++ .../src/fieldsseq/fieldsseq.go.golden | 42 +++++++++++++++++++ .../testdata/src/fieldsseq/fieldsseq_go123.go | 1 + gopls/internal/doc/api.json | 4 +- 9 files changed, 111 insertions(+), 16 deletions(-) rename gopls/internal/analysis/modernize/{splitseq.go => stringsseq.go} (77%) create mode 100644 gopls/internal/analysis/modernize/testdata/src/fieldsseq/fieldsseq.go create mode 100644 gopls/internal/analysis/modernize/testdata/src/fieldsseq/fieldsseq.go.golden create mode 100644 gopls/internal/analysis/modernize/testdata/src/fieldsseq/fieldsseq_go123.go diff --git a/gopls/doc/analyzers.md b/gopls/doc/analyzers.md index dde95591718..aa95e024089 100644 --- a/gopls/doc/analyzers.md +++ b/gopls/doc/analyzers.md @@ -498,7 +498,7 @@ existing code by using more modern features of Go, such as: - replacing a 3-clause for i := 0; i < n; i++ {} loop by for i := range n {}, added in go1.22; - replacing Split in "for range strings.Split(...)" by go1.24's - more efficient SplitSeq; + more efficient SplitSeq, or Fields with FieldSeq; To apply all modernization fixes en masse, you can use the following command: diff --git a/gopls/internal/analysis/modernize/doc.go b/gopls/internal/analysis/modernize/doc.go index 3759fdb10c5..b12abab7063 100644 --- a/gopls/internal/analysis/modernize/doc.go +++ b/gopls/internal/analysis/modernize/doc.go @@ -31,7 +31,7 @@ // - replacing a 3-clause for i := 0; i < n; i++ {} loop by // for i := range n {}, added in go1.22; // - replacing Split in "for range strings.Split(...)" by go1.24's -// more efficient SplitSeq; +// more efficient SplitSeq, or Fields with FieldSeq; // // To apply all modernization fixes en masse, you can use the // following command: diff --git a/gopls/internal/analysis/modernize/modernize.go b/gopls/internal/analysis/modernize/modernize.go index 354836d6b40..96e8b325df4 100644 --- a/gopls/internal/analysis/modernize/modernize.go +++ b/gopls/internal/analysis/modernize/modernize.go @@ -72,7 +72,7 @@ func run(pass *analysis.Pass) (any, error) { rangeint(pass) slicescontains(pass) slicesdelete(pass) - splitseq(pass) + stringsseq(pass) sortslice(pass) testingContext(pass) diff --git a/gopls/internal/analysis/modernize/modernize_test.go b/gopls/internal/analysis/modernize/modernize_test.go index 6662914b28d..7bdc8014389 100644 --- a/gopls/internal/analysis/modernize/modernize_test.go +++ b/gopls/internal/analysis/modernize/modernize_test.go @@ -24,6 +24,7 @@ func Test(t *testing.T) { "slicescontains", "slicesdelete", "splitseq", + "fieldsseq", "sortslice", "testingcontext", ) diff --git a/gopls/internal/analysis/modernize/splitseq.go b/gopls/internal/analysis/modernize/stringsseq.go similarity index 77% rename from gopls/internal/analysis/modernize/splitseq.go rename to gopls/internal/analysis/modernize/stringsseq.go index 1f3da859e9b..ca9d918912e 100644 --- a/gopls/internal/analysis/modernize/splitseq.go +++ b/gopls/internal/analysis/modernize/stringsseq.go @@ -5,6 +5,7 @@ package modernize import ( + "fmt" "go/ast" "go/token" "go/types" @@ -17,8 +18,9 @@ import ( "golang.org/x/tools/internal/astutil/edge" ) -// splitseq offers a fix to replace a call to strings.Split with -// SplitSeq when it is the operand of a range loop, either directly: +// stringsseq offers a fix to replace a call to strings.Split with +// SplitSeq or strings.Fields with FieldsSeq +// when it is the operand of a range loop, either directly: // // for _, line := range strings.Split() {...} // @@ -29,7 +31,8 @@ import ( // // Variants: // - bytes.SplitSeq -func splitseq(pass *analysis.Pass) { +// - bytes.FieldsSeq +func stringsseq(pass *analysis.Pass) { if !analysisinternal.Imports(pass.Pkg, "strings") && !analysisinternal.Imports(pass.Pkg, "bytes") { return @@ -88,21 +91,27 @@ func splitseq(pass *analysis.Pass) { }) } - if sel, ok := call.Fun.(*ast.SelectorExpr); ok && - (analysisinternal.IsFunctionNamed(typeutil.Callee(info, call), "strings", "Split") || - analysisinternal.IsFunctionNamed(typeutil.Callee(info, call), "bytes", "Split")) { + sel, ok := call.Fun.(*ast.SelectorExpr) + if !ok { + continue + } + + obj := typeutil.Callee(info, call) + if analysisinternal.IsFunctionNamed(obj, "strings", "Split", "Fields") || + analysisinternal.IsFunctionNamed(obj, "bytes", "Split", "Fields") { + oldFnName := obj.Name() + seqFnName := fmt.Sprintf("%sSeq", oldFnName) pass.Report(analysis.Diagnostic{ Pos: sel.Pos(), End: sel.End(), - Category: "splitseq", - Message: "Ranging over SplitSeq is more efficient", + Category: "stringsseq", + Message: fmt.Sprintf("Ranging over %s is more efficient", seqFnName), SuggestedFixes: []analysis.SuggestedFix{{ - Message: "Replace Split with SplitSeq", + Message: fmt.Sprintf("Replace %s with %s", oldFnName, seqFnName), TextEdits: append(edits, analysis.TextEdit{ - // Split -> SplitSeq Pos: sel.Sel.Pos(), End: sel.Sel.End(), - NewText: []byte("SplitSeq")}), + NewText: []byte(seqFnName)}), }}, }) } diff --git a/gopls/internal/analysis/modernize/testdata/src/fieldsseq/fieldsseq.go b/gopls/internal/analysis/modernize/testdata/src/fieldsseq/fieldsseq.go new file mode 100644 index 00000000000..b86df1a8a94 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/fieldsseq/fieldsseq.go @@ -0,0 +1,42 @@ +//go:build go1.24 + +package fieldsseq + +import ( + "bytes" + "strings" +) + +func _() { + for _, line := range strings.Fields("") { // want "Ranging over FieldsSeq is more efficient" + println(line) + } + for i, line := range strings.Fields("") { // nope: uses index var + println(i, line) + } + for i, _ := range strings.Fields("") { // nope: uses index var + println(i) + } + for i := range strings.Fields("") { // nope: uses index var + println(i) + } + for _ = range strings.Fields("") { // want "Ranging over FieldsSeq is more efficient" + } + for range strings.Fields("") { // want "Ranging over FieldsSeq is more efficient" + } + for range bytes.Fields(nil) { // want "Ranging over FieldsSeq is more efficient" + } + { + lines := strings.Fields("") // want "Ranging over FieldsSeq is more efficient" + for _, line := range lines { + println(line) + } + } + { + lines := strings.Fields("") // nope: lines is used not just by range + for _, line := range lines { + println(line) + } + println(lines) + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/fieldsseq/fieldsseq.go.golden b/gopls/internal/analysis/modernize/testdata/src/fieldsseq/fieldsseq.go.golden new file mode 100644 index 00000000000..9fa1bfd1b62 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/fieldsseq/fieldsseq.go.golden @@ -0,0 +1,42 @@ +//go:build go1.24 + +package fieldsseq + +import ( + "bytes" + "strings" +) + +func _() { + for line := range strings.FieldsSeq("") { // want "Ranging over FieldsSeq is more efficient" + println(line) + } + for i, line := range strings.Fields( "") { // nope: uses index var + println(i, line) + } + for i, _ := range strings.Fields( "") { // nope: uses index var + println(i) + } + for i := range strings.Fields( "") { // nope: uses index var + println(i) + } + for range strings.FieldsSeq("") { // want "Ranging over FieldsSeq is more efficient" + } + for range strings.FieldsSeq("") { // want "Ranging over FieldsSeq is more efficient" + } + for range bytes.FieldsSeq(nil) { // want "Ranging over FieldsSeq is more efficient" + } + { + lines := strings.FieldsSeq("") // want "Ranging over FieldsSeq is more efficient" + for line := range lines { + println(line) + } + } + { + lines := strings.Fields( "") // nope: lines is used not just by range + for _, line := range lines { + println(line) + } + println(lines) + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/fieldsseq/fieldsseq_go123.go b/gopls/internal/analysis/modernize/testdata/src/fieldsseq/fieldsseq_go123.go new file mode 100644 index 00000000000..c2bd314db75 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/fieldsseq/fieldsseq_go123.go @@ -0,0 +1 @@ +package fieldsseq diff --git a/gopls/internal/doc/api.json b/gopls/internal/doc/api.json index 5775d0d4361..4001e3605bb 100644 --- a/gopls/internal/doc/api.json +++ b/gopls/internal/doc/api.json @@ -514,7 +514,7 @@ }, { "Name": "\"modernize\"", - "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go, such as:\n\n - replacing an if/else conditional assignment by a call to the\n built-in min or max functions added in go1.21;\n - replacing sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21;\n - replacing interface{} by the 'any' type added in go1.18;\n - replacing append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21;\n - replacing a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions\n from the maps package, added in go1.21;\n - replacing []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19;\n - replacing uses of context.WithCancel in tests with t.Context, added in\n go1.24;\n - replacing omitempty by omitzero on structs, added in go1.24;\n - replacing append(s[:i], s[i+1]...) by slices.Delete(s, i, i+1),\n added in go1.21\n - replacing a 3-clause for i := 0; i \u003c n; i++ {} loop by\n for i := range n {}, added in go1.22;\n - replacing Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq;\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -test ./...\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.", + "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go, such as:\n\n - replacing an if/else conditional assignment by a call to the\n built-in min or max functions added in go1.21;\n - replacing sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21;\n - replacing interface{} by the 'any' type added in go1.18;\n - replacing append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21;\n - replacing a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions\n from the maps package, added in go1.21;\n - replacing []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19;\n - replacing uses of context.WithCancel in tests with t.Context, added in\n go1.24;\n - replacing omitempty by omitzero on structs, added in go1.24;\n - replacing append(s[:i], s[i+1]...) by slices.Delete(s, i, i+1),\n added in go1.21\n - replacing a 3-clause for i := 0; i \u003c n; i++ {} loop by\n for i := range n {}, added in go1.22;\n - replacing Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq;\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -test ./...\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.", "Default": "true" }, { @@ -1228,7 +1228,7 @@ }, { "Name": "modernize", - "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go, such as:\n\n - replacing an if/else conditional assignment by a call to the\n built-in min or max functions added in go1.21;\n - replacing sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21;\n - replacing interface{} by the 'any' type added in go1.18;\n - replacing append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21;\n - replacing a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions\n from the maps package, added in go1.21;\n - replacing []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19;\n - replacing uses of context.WithCancel in tests with t.Context, added in\n go1.24;\n - replacing omitempty by omitzero on structs, added in go1.24;\n - replacing append(s[:i], s[i+1]...) by slices.Delete(s, i, i+1),\n added in go1.21\n - replacing a 3-clause for i := 0; i \u003c n; i++ {} loop by\n for i := range n {}, added in go1.22;\n - replacing Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq;\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -test ./...\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.", + "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go, such as:\n\n - replacing an if/else conditional assignment by a call to the\n built-in min or max functions added in go1.21;\n - replacing sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21;\n - replacing interface{} by the 'any' type added in go1.18;\n - replacing append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21;\n - replacing a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions\n from the maps package, added in go1.21;\n - replacing []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19;\n - replacing uses of context.WithCancel in tests with t.Context, added in\n go1.24;\n - replacing omitempty by omitzero on structs, added in go1.24;\n - replacing append(s[:i], s[i+1]...) by slices.Delete(s, i, i+1),\n added in go1.21\n - replacing a 3-clause for i := 0; i \u003c n; i++ {} loop by\n for i := range n {}, added in go1.22;\n - replacing Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq;\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -test ./...\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.", "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/modernize", "Default": true }, From 340f21a49b9cad20d07a2b58e483a991084dc481 Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Wed, 5 Mar 2025 12:14:02 +0800 Subject: [PATCH 095/270] gopls: move gopls/doc/generate package This CL tracks adonovan's TODO by moving generate package from gopls/doc/generate to gopls/internal/doc/generate. Change-Id: I08fc90859cc6afe10ab5ac658a7b8a514d36cc32 Reviewed-on: https://go-review.googlesource.com/c/tools/+/654536 Reviewed-by: Alan Donovan Reviewed-by: Junyang Shao Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI --- gopls/internal/doc/api.go | 2 +- gopls/{ => internal}/doc/generate/generate.go | 4 +--- gopls/{ => internal}/doc/generate/generate_test.go | 0 3 files changed, 2 insertions(+), 4 deletions(-) rename gopls/{ => internal}/doc/generate/generate.go (99%) rename gopls/{ => internal}/doc/generate/generate_test.go (100%) diff --git a/gopls/internal/doc/api.go b/gopls/internal/doc/api.go index 258f90d49ae..5011d2172ed 100644 --- a/gopls/internal/doc/api.go +++ b/gopls/internal/doc/api.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:generate go run ../../doc/generate +//go:generate go run ./generate // The doc package provides JSON metadata that documents gopls' public // interfaces. diff --git a/gopls/doc/generate/generate.go b/gopls/internal/doc/generate/generate.go similarity index 99% rename from gopls/doc/generate/generate.go rename to gopls/internal/doc/generate/generate.go index b0d3e8c49f6..51c8b89e39b 100644 --- a/gopls/doc/generate/generate.go +++ b/gopls/internal/doc/generate/generate.go @@ -11,9 +11,7 @@ // // Run it with this command: // -// $ cd gopls/internal/doc && go generate -// -// TODO(adonovan): move this package to gopls/internal/doc/generate. +// $ cd gopls/internal/doc/generate && go generate package main import ( diff --git a/gopls/doc/generate/generate_test.go b/gopls/internal/doc/generate/generate_test.go similarity index 100% rename from gopls/doc/generate/generate_test.go rename to gopls/internal/doc/generate/generate_test.go From ece9e9ba0760eb361376c8a890b24e89db031d9e Mon Sep 17 00:00:00 2001 From: Hongxiang Jiang Date: Tue, 25 Feb 2025 13:48:15 -0500 Subject: [PATCH 096/270] gopls/doc/generate: add status in codelenses and inlayhints Features configurable through map[K]V can not be marked as experimental. To comply with deprecation guideline, this CL introduces a per key and per value status where gopls can mark a specific key or a specific value as experimental. The status can be indicated by the comment directives as part of the doc comment. The status can be delcared following pattern "//gopls:status X" very similar to struct tag. This clarifies the question: if "codelenses" is a released feature, are all enum keys configurable in "codelenses" are also released feature? VSCode-Go CL 652357 Change-Id: I4ddc5155751452d5f7b92bbb3610aa61680a29a4 Reviewed-on: https://go-review.googlesource.com/c/tools/+/652356 Auto-Submit: Hongxiang Jiang Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- gopls/doc/codelenses.md | 4 + gopls/internal/analysis/gofix/directive.go | 95 ------ gopls/internal/analysis/gofix/gofix.go | 3 +- gopls/internal/doc/api.go | 8 +- gopls/internal/doc/api.json | 351 ++++++++++++++------- gopls/internal/doc/generate/generate.go | 55 +++- gopls/internal/settings/settings.go | 4 + internal/astutil/comment.go | 85 +++++ 8 files changed, 375 insertions(+), 230 deletions(-) delete mode 100644 gopls/internal/analysis/gofix/directive.go diff --git a/gopls/doc/codelenses.md b/gopls/doc/codelenses.md index d8aa8e1f479..fa7c6c68859 100644 --- a/gopls/doc/codelenses.md +++ b/gopls/doc/codelenses.md @@ -75,6 +75,8 @@ File type: Go ## `run_govulncheck`: Run govulncheck (legacy) +**This setting is experimental and may be deleted.** + This codelens source annotates the `module` directive in a go.mod file with a command to run Govulncheck asynchronously. @@ -134,6 +136,8 @@ File type: go.mod ## `vulncheck`: Run govulncheck +**This setting is experimental and may be deleted.** + This codelens source annotates the `module` directive in a go.mod file with a command to run govulncheck synchronously. diff --git a/gopls/internal/analysis/gofix/directive.go b/gopls/internal/analysis/gofix/directive.go deleted file mode 100644 index 20c45313cfb..00000000000 --- a/gopls/internal/analysis/gofix/directive.go +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package gofix - -import ( - "go/ast" - "go/token" - "strings" -) - -// -- plundered from the future (CL 605517, issue #68021) -- - -// TODO(adonovan): replace with ast.Directive after go1.25 (#68021). -// Beware of our local mods to handle analysistest -// "want" comments on the same line. - -// A directive is a comment line with special meaning to the Go -// toolchain or another tool. It has the form: -// -// //tool:name args -// -// The "tool:" portion is missing for the three directives named -// line, extern, and export. -// -// See https://go.dev/doc/comment#Syntax for details of Go comment -// syntax and https://pkg.go.dev/cmd/compile#hdr-Compiler_Directives -// for details of directives used by the Go compiler. -type directive struct { - Pos token.Pos // of preceding "//" - Tool string - Name string - Args string // may contain internal spaces -} - -// directives returns the directives within the comment. -func directives(g *ast.CommentGroup) (res []*directive) { - if g != nil { - // Avoid (*ast.CommentGroup).Text() as it swallows directives. - for _, c := range g.List { - if len(c.Text) > 2 && - c.Text[1] == '/' && - c.Text[2] != ' ' && - isDirective(c.Text[2:]) { - - tool, nameargs, ok := strings.Cut(c.Text[2:], ":") - if !ok { - // Must be one of {line,extern,export}. - tool, nameargs = "", tool - } - name, args, _ := strings.Cut(nameargs, " ") // tab?? - // Permit an additional line comment after the args, chiefly to support - // [golang.org/x/tools/go/analysis/analysistest]. - args, _, _ = strings.Cut(args, "//") - res = append(res, &directive{ - Pos: c.Slash, - Tool: tool, - Name: name, - Args: strings.TrimSpace(args), - }) - } - } - } - return -} - -// isDirective reports whether c is a comment directive. -// This code is also in go/printer. -func isDirective(c string) bool { - // "//line " is a line directive. - // "//extern " is for gccgo. - // "//export " is for cgo. - // (The // has been removed.) - if strings.HasPrefix(c, "line ") || strings.HasPrefix(c, "extern ") || strings.HasPrefix(c, "export ") { - return true - } - - // "//[a-z0-9]+:[a-z0-9]" - // (The // has been removed.) - colon := strings.Index(c, ":") - if colon <= 0 || colon+1 >= len(c) { - return false - } - for i := 0; i <= colon+1; i++ { - if i == colon { - continue - } - b := c[i] - if !('a' <= b && b <= 'z' || '0' <= b && b <= '9') { - return false - } - } - return true -} diff --git a/gopls/internal/analysis/gofix/gofix.go b/gopls/internal/analysis/gofix/gofix.go index 41cebcb63b9..a2380f1d644 100644 --- a/gopls/internal/analysis/gofix/gofix.go +++ b/gopls/internal/analysis/gofix/gofix.go @@ -20,6 +20,7 @@ import ( "golang.org/x/tools/go/types/typeutil" "golang.org/x/tools/gopls/internal/util/moreiters" "golang.org/x/tools/internal/analysisinternal" + internalastutil "golang.org/x/tools/internal/astutil" "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/astutil/edge" "golang.org/x/tools/internal/diff" @@ -598,7 +599,7 @@ func currentFile(c cursor.Cursor) *ast.File { // hasFixInline reports the presence of a "//go:fix inline" directive // in the comments. func hasFixInline(cg *ast.CommentGroup) bool { - for _, d := range directives(cg) { + for _, d := range internalastutil.Directives(cg) { if d.Tool == "go" && d.Name == "fix" && d.Args == "inline" { return true } diff --git a/gopls/internal/doc/api.go b/gopls/internal/doc/api.go index 5011d2172ed..52101dda8c9 100644 --- a/gopls/internal/doc/api.go +++ b/gopls/internal/doc/api.go @@ -47,11 +47,13 @@ type EnumKey struct { Name string // in JSON syntax (quoted) Doc string Default string + Status string // = "" | "advanced" | "experimental" | "deprecated" } type EnumValue struct { - Value string // in JSON syntax (quoted) - Doc string // doc comment; always starts with `Value` + Value string // in JSON syntax (quoted) + Doc string // doc comment; always starts with `Value` + Status string // = "" | "advanced" | "experimental" | "deprecated" } type Lens struct { @@ -60,6 +62,7 @@ type Lens struct { Title string Doc string Default bool + Status string // = "" | "advanced" | "experimental" | "deprecated" } type Analyzer struct { @@ -73,4 +76,5 @@ type Hint struct { Name string Doc string Default bool + Status string // = "" | "advanced" | "experimental" | "deprecated" } diff --git a/gopls/internal/doc/api.json b/gopls/internal/doc/api.json index 4001e3605bb..b9e0e78e950 100644 --- a/gopls/internal/doc/api.json +++ b/gopls/internal/doc/api.json @@ -124,23 +124,28 @@ "EnumValues": [ { "Value": "\"FullDocumentation\"", - "Doc": "" + "Doc": "", + "Status": "" }, { "Value": "\"NoDocumentation\"", - "Doc": "" + "Doc": "", + "Status": "" }, { "Value": "\"SingleLine\"", - "Doc": "" + "Doc": "", + "Status": "" }, { "Value": "\"Structured\"", - "Doc": "`\"Structured\"` is a misguided experimental setting that returns a JSON\nhover format. This setting should not be used, as it will be removed in a\nfuture release of gopls.\n" + "Doc": "`\"Structured\"` is a misguided experimental setting that returns a JSON\nhover format. This setting should not be used, as it will be removed in a\nfuture release of gopls.\n", + "Status": "" }, { "Value": "\"SynopsisDocumentation\"", - "Doc": "" + "Doc": "", + "Status": "" } ], "Default": "\"FullDocumentation\"", @@ -173,15 +178,18 @@ "EnumValues": [ { "Value": "false", - "Doc": "false: do not show links" + "Doc": "false: do not show links", + "Status": "" }, { "Value": "true", - "Doc": "true: show links to the `linkTarget` domain" + "Doc": "true: show links to the `linkTarget` domain", + "Status": "" }, { "Value": "\"gopls\"", - "Doc": "`\"gopls\"`: show links to gopls' internal documentation viewer" + "Doc": "`\"gopls\"`: show links to gopls' internal documentation viewer", + "Status": "" } ], "Default": "true", @@ -228,15 +236,18 @@ "EnumValues": [ { "Value": "\"CaseInsensitive\"", - "Doc": "" + "Doc": "", + "Status": "" }, { "Value": "\"CaseSensitive\"", - "Doc": "" + "Doc": "", + "Status": "" }, { "Value": "\"Fuzzy\"", - "Doc": "" + "Doc": "", + "Status": "" } ], "Default": "\"Fuzzy\"", @@ -283,15 +294,18 @@ "EnumValues": [ { "Value": "\"Both\"", - "Doc": "" + "Doc": "", + "Status": "" }, { "Value": "\"Definition\"", - "Doc": "" + "Doc": "", + "Status": "" }, { "Value": "\"Link\"", - "Doc": "" + "Doc": "", + "Status": "" } ], "Default": "\"Both\"", @@ -310,19 +324,23 @@ "EnumValues": [ { "Value": "\"CaseInsensitive\"", - "Doc": "" + "Doc": "", + "Status": "" }, { "Value": "\"CaseSensitive\"", - "Doc": "" + "Doc": "", + "Status": "" }, { "Value": "\"FastFuzzy\"", - "Doc": "" + "Doc": "", + "Status": "" }, { "Value": "\"Fuzzy\"", - "Doc": "" + "Doc": "", + "Status": "" } ], "Default": "\"FastFuzzy\"", @@ -341,15 +359,18 @@ "EnumValues": [ { "Value": "\"Dynamic\"", - "Doc": "`\"Dynamic\"` uses whichever qualifier results in the highest scoring\nmatch for the given symbol query. Here a \"qualifier\" is any \"/\" or \".\"\ndelimited suffix of the fully qualified symbol. i.e. \"to/pkg.Foo.Field\" or\njust \"Foo.Field\".\n" + "Doc": "`\"Dynamic\"` uses whichever qualifier results in the highest scoring\nmatch for the given symbol query. Here a \"qualifier\" is any \"/\" or \".\"\ndelimited suffix of the fully qualified symbol. i.e. \"to/pkg.Foo.Field\" or\njust \"Foo.Field\".\n", + "Status": "" }, { "Value": "\"Full\"", - "Doc": "`\"Full\"` is fully qualified symbols, i.e.\n\"path/to/pkg.Foo.Field\".\n" + "Doc": "`\"Full\"` is fully qualified symbols, i.e.\n\"path/to/pkg.Foo.Field\".\n", + "Status": "" }, { "Value": "\"Package\"", - "Doc": "`\"Package\"` is package qualified symbols i.e.\n\"pkg.Foo.Field\".\n" + "Doc": "`\"Package\"` is package qualified symbols i.e.\n\"pkg.Foo.Field\".\n", + "Status": "" } ], "Default": "\"Dynamic\"", @@ -368,11 +389,13 @@ "EnumValues": [ { "Value": "\"all\"", - "Doc": "`\"all\"` matches symbols in any loaded package, including\ndependencies.\n" + "Doc": "`\"all\"` matches symbols in any loaded package, including\ndependencies.\n", + "Status": "" }, { "Value": "\"workspace\"", - "Doc": "`\"workspace\"` matches symbols in workspace packages only.\n" + "Doc": "`\"workspace\"` matches symbols in workspace packages only.\n", + "Status": "" } ], "Default": "\"all\"", @@ -390,282 +413,338 @@ { "Name": "\"appends\"", "Doc": "check for missing values after append\n\nThis checker reports calls to append that pass\nno values to be appended to the slice.\n\n\ts := []string{\"a\", \"b\", \"c\"}\n\t_ = append(s)\n\nSuch calls are always no-ops and often indicate an\nunderlying mistake.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"asmdecl\"", "Doc": "report mismatches between assembly files and Go declarations", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"assign\"", "Doc": "check for useless assignments\n\nThis checker reports assignments of the form x = x or a[i] = a[i].\nThese are almost always useless, and even when they aren't they are\nusually a mistake.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"atomic\"", "Doc": "check for common mistakes using the sync/atomic package\n\nThe atomic checker looks for assignment statements of the form:\n\n\tx = atomic.AddUint64(\u0026x, 1)\n\nwhich are not atomic.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"atomicalign\"", "Doc": "check for non-64-bits-aligned arguments to sync/atomic functions", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"bools\"", "Doc": "check for common mistakes involving boolean operators", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"buildtag\"", "Doc": "check //go:build and // +build directives", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"cgocall\"", "Doc": "detect some violations of the cgo pointer passing rules\n\nCheck for invalid cgo pointer passing.\nThis looks for code that uses cgo to call C code passing values\nwhose types are almost always invalid according to the cgo pointer\nsharing rules.\nSpecifically, it warns about attempts to pass a Go chan, map, func,\nor slice to C, either directly, or via a pointer, array, or struct.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"composites\"", "Doc": "check for unkeyed composite literals\n\nThis analyzer reports a diagnostic for composite literals of struct\ntypes imported from another package that do not use the field-keyed\nsyntax. Such literals are fragile because the addition of a new field\n(even if unexported) to the struct will cause compilation to fail.\n\nAs an example,\n\n\terr = \u0026net.DNSConfigError{err}\n\nshould be replaced by:\n\n\terr = \u0026net.DNSConfigError{Err: err}\n", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"copylocks\"", "Doc": "check for locks erroneously passed by value\n\nInadvertently copying a value containing a lock, such as sync.Mutex or\nsync.WaitGroup, may cause both copies to malfunction. Generally such\nvalues should be referred to through a pointer.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"deepequalerrors\"", "Doc": "check for calls of reflect.DeepEqual on error values\n\nThe deepequalerrors checker looks for calls of the form:\n\n reflect.DeepEqual(err1, err2)\n\nwhere err1 and err2 are errors. Using reflect.DeepEqual to compare\nerrors is discouraged.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"defers\"", "Doc": "report common mistakes in defer statements\n\nThe defers analyzer reports a diagnostic when a defer statement would\nresult in a non-deferred call to time.Since, as experience has shown\nthat this is nearly always a mistake.\n\nFor example:\n\n\tstart := time.Now()\n\t...\n\tdefer recordLatency(time.Since(start)) // error: call to time.Since is not deferred\n\nThe correct code is:\n\n\tdefer func() { recordLatency(time.Since(start)) }()", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"deprecated\"", "Doc": "check for use of deprecated identifiers\n\nThe deprecated analyzer looks for deprecated symbols and package\nimports.\n\nSee https://go.dev/wiki/Deprecated to learn about Go's convention\nfor documenting and signaling deprecated identifiers.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"directive\"", "Doc": "check Go toolchain directives such as //go:debug\n\nThis analyzer checks for problems with known Go toolchain directives\nin all Go source files in a package directory, even those excluded by\n//go:build constraints, and all non-Go source files too.\n\nFor //go:debug (see https://go.dev/doc/godebug), the analyzer checks\nthat the directives are placed only in Go source files, only above the\npackage comment, and only in package main or *_test.go files.\n\nSupport for other known directives may be added in the future.\n\nThis analyzer does not check //go:build, which is handled by the\nbuildtag analyzer.\n", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"embed\"", "Doc": "check //go:embed directive usage\n\nThis analyzer checks that the embed package is imported if //go:embed\ndirectives are present, providing a suggested fix to add the import if\nit is missing.\n\nThis analyzer also checks that //go:embed directives precede the\ndeclaration of a single variable.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"errorsas\"", "Doc": "report passing non-pointer or non-error values to errors.As\n\nThe errorsas analysis reports calls to errors.As where the type\nof the second argument is not a pointer to a type implementing error.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"fillreturns\"", "Doc": "suggest fixes for errors due to an incorrect number of return values\n\nThis checker provides suggested fixes for type errors of the\ntype \"wrong number of return values (want %d, got %d)\". For example:\n\n\tfunc m() (int, string, *bool, error) {\n\t\treturn\n\t}\n\nwill turn into\n\n\tfunc m() (int, string, *bool, error) {\n\t\treturn 0, \"\", nil, nil\n\t}\n\nThis functionality is similar to https://github.com/sqs/goreturns.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"framepointer\"", "Doc": "report assembly that clobbers the frame pointer before saving it", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"gofix\"", "Doc": "apply fixes based on go:fix comment directives\n\nThe gofix analyzer inlines functions and constants that are marked for inlining.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"hostport\"", "Doc": "check format of addresses passed to net.Dial\n\nThis analyzer flags code that produce network address strings using\nfmt.Sprintf, as in this example:\n\n addr := fmt.Sprintf(\"%s:%d\", host, 12345) // \"will not work with IPv6\"\n ...\n conn, err := net.Dial(\"tcp\", addr) // \"when passed to dial here\"\n\nThe analyzer suggests a fix to use the correct approach, a call to\nnet.JoinHostPort:\n\n addr := net.JoinHostPort(host, \"12345\")\n ...\n conn, err := net.Dial(\"tcp\", addr)\n\nA similar diagnostic and fix are produced for a format string of \"%s:%s\".\n", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"httpresponse\"", "Doc": "check for mistakes using HTTP responses\n\nA common mistake when using the net/http package is to defer a function\ncall to close the http.Response Body before checking the error that\ndetermines whether the response is valid:\n\n\tresp, err := http.Head(url)\n\tdefer resp.Body.Close()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\t// (defer statement belongs here)\n\nThis checker helps uncover latent nil dereference bugs by reporting a\ndiagnostic for such mistakes.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"ifaceassert\"", "Doc": "detect impossible interface-to-interface type assertions\n\nThis checker flags type assertions v.(T) and corresponding type-switch cases\nin which the static type V of v is an interface that cannot possibly implement\nthe target interface T. This occurs when V and T contain methods with the same\nname but different signatures. Example:\n\n\tvar v interface {\n\t\tRead()\n\t}\n\t_ = v.(io.Reader)\n\nThe Read method in v has a different signature than the Read method in\nio.Reader, so this assertion cannot succeed.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"infertypeargs\"", "Doc": "check for unnecessary type arguments in call expressions\n\nExplicit type arguments may be omitted from call expressions if they can be\ninferred from function arguments, or from other type arguments:\n\n\tfunc f[T any](T) {}\n\t\n\tfunc _() {\n\t\tf[string](\"foo\") // string could be inferred\n\t}\n", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"loopclosure\"", "Doc": "check references to loop variables from within nested functions\n\nThis analyzer reports places where a function literal references the\niteration variable of an enclosing loop, and the loop calls the function\nin such a way (e.g. with go or defer) that it may outlive the loop\niteration and possibly observe the wrong value of the variable.\n\nNote: An iteration variable can only outlive a loop iteration in Go versions \u003c=1.21.\nIn Go 1.22 and later, the loop variable lifetimes changed to create a new\niteration variable per loop iteration. (See go.dev/issue/60078.)\n\nIn this example, all the deferred functions run after the loop has\ncompleted, so all observe the final value of v [\u003cgo1.22].\n\n\tfor _, v := range list {\n\t defer func() {\n\t use(v) // incorrect\n\t }()\n\t}\n\nOne fix is to create a new variable for each iteration of the loop:\n\n\tfor _, v := range list {\n\t v := v // new var per iteration\n\t defer func() {\n\t use(v) // ok\n\t }()\n\t}\n\nAfter Go version 1.22, the previous two for loops are equivalent\nand both are correct.\n\nThe next example uses a go statement and has a similar problem [\u003cgo1.22].\nIn addition, it has a data race because the loop updates v\nconcurrent with the goroutines accessing it.\n\n\tfor _, v := range elem {\n\t go func() {\n\t use(v) // incorrect, and a data race\n\t }()\n\t}\n\nA fix is the same as before. The checker also reports problems\nin goroutines started by golang.org/x/sync/errgroup.Group.\nA hard-to-spot variant of this form is common in parallel tests:\n\n\tfunc Test(t *testing.T) {\n\t for _, test := range tests {\n\t t.Run(test.name, func(t *testing.T) {\n\t t.Parallel()\n\t use(test) // incorrect, and a data race\n\t })\n\t }\n\t}\n\nThe t.Parallel() call causes the rest of the function to execute\nconcurrent with the loop [\u003cgo1.22].\n\nThe analyzer reports references only in the last statement,\nas it is not deep enough to understand the effects of subsequent\nstatements that might render the reference benign.\n(\"Last statement\" is defined recursively in compound\nstatements such as if, switch, and select.)\n\nSee: https://golang.org/doc/go_faq.html#closures_and_goroutines", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"lostcancel\"", "Doc": "check cancel func returned by context.WithCancel is called\n\nThe cancellation function returned by context.WithCancel, WithTimeout,\nWithDeadline and variants such as WithCancelCause must be called,\nor the new context will remain live until its parent context is cancelled.\n(The background context is never cancelled.)", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"modernize\"", "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go, such as:\n\n - replacing an if/else conditional assignment by a call to the\n built-in min or max functions added in go1.21;\n - replacing sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21;\n - replacing interface{} by the 'any' type added in go1.18;\n - replacing append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21;\n - replacing a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions\n from the maps package, added in go1.21;\n - replacing []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19;\n - replacing uses of context.WithCancel in tests with t.Context, added in\n go1.24;\n - replacing omitempty by omitzero on structs, added in go1.24;\n - replacing append(s[:i], s[i+1]...) by slices.Delete(s, i, i+1),\n added in go1.21\n - replacing a 3-clause for i := 0; i \u003c n; i++ {} loop by\n for i := range n {}, added in go1.22;\n - replacing Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq;\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -test ./...\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"nilfunc\"", "Doc": "check for useless comparisons between functions and nil\n\nA useless comparison is one like f == nil as opposed to f() == nil.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"nilness\"", "Doc": "check for redundant or impossible nil comparisons\n\nThe nilness checker inspects the control-flow graph of each function in\na package and reports nil pointer dereferences, degenerate nil\npointers, and panics with nil values. A degenerate comparison is of the form\nx==nil or x!=nil where x is statically known to be nil or non-nil. These are\noften a mistake, especially in control flow related to errors. Panics with nil\nvalues are checked because they are not detectable by\n\n\tif r := recover(); r != nil {\n\nThis check reports conditions such as:\n\n\tif f == nil { // impossible condition (f is a function)\n\t}\n\nand:\n\n\tp := \u0026v\n\t...\n\tif p != nil { // tautological condition\n\t}\n\nand:\n\n\tif p == nil {\n\t\tprint(*p) // nil dereference\n\t}\n\nand:\n\n\tif p == nil {\n\t\tpanic(p)\n\t}\n\nSometimes the control flow may be quite complex, making bugs hard\nto spot. In the example below, the err.Error expression is\nguaranteed to panic because, after the first return, err must be\nnil. The intervening loop is just a distraction.\n\n\t...\n\terr := g.Wait()\n\tif err != nil {\n\t\treturn err\n\t}\n\tpartialSuccess := false\n\tfor _, err := range errs {\n\t\tif err == nil {\n\t\t\tpartialSuccess = true\n\t\t\tbreak\n\t\t}\n\t}\n\tif partialSuccess {\n\t\treportStatus(StatusMessage{\n\t\t\tCode: code.ERROR,\n\t\t\tDetail: err.Error(), // \"nil dereference in dynamic method call\"\n\t\t})\n\t\treturn nil\n\t}\n\n...", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"nonewvars\"", "Doc": "suggested fixes for \"no new vars on left side of :=\"\n\nThis checker provides suggested fixes for type errors of the\ntype \"no new vars on left side of :=\". For example:\n\n\tz := 1\n\tz := 2\n\nwill turn into\n\n\tz := 1\n\tz = 2", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"noresultvalues\"", "Doc": "suggested fixes for unexpected return values\n\nThis checker provides suggested fixes for type errors of the\ntype \"no result values expected\" or \"too many return values\".\nFor example:\n\n\tfunc z() { return nil }\n\nwill turn into\n\n\tfunc z() { return }", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"printf\"", "Doc": "check consistency of Printf format strings and arguments\n\nThe check applies to calls of the formatting functions such as\n[fmt.Printf] and [fmt.Sprintf], as well as any detected wrappers of\nthose functions such as [log.Printf]. It reports a variety of\nmistakes such as syntax errors in the format string and mismatches\n(of number and type) between the verbs and their arguments.\n\nSee the documentation of the fmt package for the complete set of\nformat operators and their operand types.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"shadow\"", "Doc": "check for possible unintended shadowing of variables\n\nThis analyzer check for shadowed variables.\nA shadowed variable is a variable declared in an inner scope\nwith the same name and type as a variable in an outer scope,\nand where the outer variable is mentioned after the inner one\nis declared.\n\n(This definition can be refined; the module generates too many\nfalse positives and is not yet enabled by default.)\n\nFor example:\n\n\tfunc BadRead(f *os.File, buf []byte) error {\n\t\tvar err error\n\t\tfor {\n\t\t\tn, err := f.Read(buf) // shadows the function variable 'err'\n\t\t\tif err != nil {\n\t\t\t\tbreak // causes return of wrong value\n\t\t\t}\n\t\t\tfoo(buf)\n\t\t}\n\t\treturn err\n\t}", - "Default": "false" + "Default": "false", + "Status": "" }, { "Name": "\"shift\"", "Doc": "check for shifts that equal or exceed the width of the integer", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"sigchanyzer\"", "Doc": "check for unbuffered channel of os.Signal\n\nThis checker reports call expression of the form\n\n\tsignal.Notify(c \u003c-chan os.Signal, sig ...os.Signal),\n\nwhere c is an unbuffered channel, which can be at risk of missing the signal.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"simplifycompositelit\"", "Doc": "check for composite literal simplifications\n\nAn array, slice, or map composite literal of the form:\n\n\t[]T{T{}, T{}}\n\nwill be simplified to:\n\n\t[]T{{}, {}}\n\nThis is one of the simplifications that \"gofmt -s\" applies.\n\nThis analyzer ignores generated code.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"simplifyrange\"", "Doc": "check for range statement simplifications\n\nA range of the form:\n\n\tfor x, _ = range v {...}\n\nwill be simplified to:\n\n\tfor x = range v {...}\n\nA range of the form:\n\n\tfor _ = range v {...}\n\nwill be simplified to:\n\n\tfor range v {...}\n\nThis is one of the simplifications that \"gofmt -s\" applies.\n\nThis analyzer ignores generated code.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"simplifyslice\"", "Doc": "check for slice simplifications\n\nA slice expression of the form:\n\n\ts[a:len(s)]\n\nwill be simplified to:\n\n\ts[a:]\n\nThis is one of the simplifications that \"gofmt -s\" applies.\n\nThis analyzer ignores generated code.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"slog\"", "Doc": "check for invalid structured logging calls\n\nThe slog checker looks for calls to functions from the log/slog\npackage that take alternating key-value pairs. It reports calls\nwhere an argument in a key position is neither a string nor a\nslog.Attr, and where a final key is missing its value.\nFor example,it would report\n\n\tslog.Warn(\"message\", 11, \"k\") // slog.Warn arg \"11\" should be a string or a slog.Attr\n\nand\n\n\tslog.Info(\"message\", \"k1\", v1, \"k2\") // call to slog.Info missing a final value", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"sortslice\"", "Doc": "check the argument type of sort.Slice\n\nsort.Slice requires an argument of a slice type. Check that\nthe interface{} value passed to sort.Slice is actually a slice.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"stdmethods\"", "Doc": "check signature of methods of well-known interfaces\n\nSometimes a type may be intended to satisfy an interface but may fail to\ndo so because of a mistake in its method signature.\nFor example, the result of this WriteTo method should be (int64, error),\nnot error, to satisfy io.WriterTo:\n\n\ttype myWriterTo struct{...}\n\tfunc (myWriterTo) WriteTo(w io.Writer) error { ... }\n\nThis check ensures that each method whose name matches one of several\nwell-known interface methods from the standard library has the correct\nsignature for that interface.\n\nChecked method names include:\n\n\tFormat GobEncode GobDecode MarshalJSON MarshalXML\n\tPeek ReadByte ReadFrom ReadRune Scan Seek\n\tUnmarshalJSON UnreadByte UnreadRune WriteByte\n\tWriteTo", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"stdversion\"", "Doc": "report uses of too-new standard library symbols\n\nThe stdversion analyzer reports references to symbols in the standard\nlibrary that were introduced by a Go release higher than the one in\nforce in the referring file. (Recall that the file's Go version is\ndefined by the 'go' directive its module's go.mod file, or by a\n\"//go:build go1.X\" build tag at the top of the file.)\n\nThe analyzer does not report a diagnostic for a reference to a \"too\nnew\" field or method of a type that is itself \"too new\", as this may\nhave false positives, for example if fields or methods are accessed\nthrough a type alias that is guarded by a Go version constraint.\n", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"stringintconv\"", "Doc": "check for string(int) conversions\n\nThis checker flags conversions of the form string(x) where x is an integer\n(but not byte or rune) type. Such conversions are discouraged because they\nreturn the UTF-8 representation of the Unicode code point x, and not a decimal\nstring representation of x as one might expect. Furthermore, if x denotes an\ninvalid code point, the conversion cannot be statically rejected.\n\nFor conversions that intend on using the code point, consider replacing them\nwith string(rune(x)). Otherwise, strconv.Itoa and its equivalents return the\nstring representation of the value in the desired base.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"structtag\"", "Doc": "check that struct field tags conform to reflect.StructTag.Get\n\nAlso report certain struct tags (json, xml) used with unexported fields.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"testinggoroutine\"", "Doc": "report calls to (*testing.T).Fatal from goroutines started by a test\n\nFunctions that abruptly terminate a test, such as the Fatal, Fatalf, FailNow, and\nSkip{,f,Now} methods of *testing.T, must be called from the test goroutine itself.\nThis checker detects calls to these functions that occur within a goroutine\nstarted by the test. For example:\n\n\tfunc TestFoo(t *testing.T) {\n\t go func() {\n\t t.Fatal(\"oops\") // error: (*T).Fatal called from non-test goroutine\n\t }()\n\t}", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"tests\"", "Doc": "check for common mistaken usages of tests and examples\n\nThe tests checker walks Test, Benchmark, Fuzzing and Example functions checking\nmalformed names, wrong signatures and examples documenting non-existent\nidentifiers.\n\nPlease see the documentation for package testing in golang.org/pkg/testing\nfor the conventions that are enforced for Tests, Benchmarks, and Examples.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"timeformat\"", "Doc": "check for calls of (time.Time).Format or time.Parse with 2006-02-01\n\nThe timeformat checker looks for time formats with the 2006-02-01 (yyyy-dd-mm)\nformat. Internationally, \"yyyy-dd-mm\" does not occur in common calendar date\nstandards, and so it is more likely that 2006-01-02 (yyyy-mm-dd) was intended.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"unmarshal\"", "Doc": "report passing non-pointer or non-interface values to unmarshal\n\nThe unmarshal analysis reports calls to functions such as json.Unmarshal\nin which the argument type is not a pointer or an interface.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"unreachable\"", "Doc": "check for unreachable code\n\nThe unreachable analyzer finds statements that execution can never reach\nbecause they are preceded by a return statement, a call to panic, an\ninfinite loop, or similar constructs.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"unsafeptr\"", "Doc": "check for invalid conversions of uintptr to unsafe.Pointer\n\nThe unsafeptr analyzer reports likely incorrect uses of unsafe.Pointer\nto convert integers to pointers. A conversion from uintptr to\nunsafe.Pointer is invalid if it implies that there is a uintptr-typed\nword in memory that holds a pointer value, because that word will be\ninvisible to stack copying and to the garbage collector.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"unusedfunc\"", "Doc": "check for unused functions and methods\n\nThe unusedfunc analyzer reports functions and methods that are\nnever referenced outside of their own declaration.\n\nA function is considered unused if it is unexported and not\nreferenced (except within its own declaration).\n\nA method is considered unused if it is unexported, not referenced\n(except within its own declaration), and its name does not match\nthat of any method of an interface type declared within the same\npackage.\n\nThe tool may report false positives in some situations, for\nexample:\n\n - For a declaration of an unexported function that is referenced\n from another package using the go:linkname mechanism, if the\n declaration's doc comment does not also have a go:linkname\n comment.\n\n (Such code is in any case strongly discouraged: linkname\n annotations, if they must be used at all, should be used on both\n the declaration and the alias.)\n\n - For compiler intrinsics in the \"runtime\" package that, though\n never referenced, are known to the compiler and are called\n indirectly by compiled object code.\n\n - For functions called only from assembly.\n\n - For functions called only from files whose build tags are not\n selected in the current build configuration.\n\nSee https://github.com/golang/go/issues/71686 for discussion of\nthese limitations.\n\nThe unusedfunc algorithm is not as precise as the\ngolang.org/x/tools/cmd/deadcode tool, but it has the advantage that\nit runs within the modular analysis framework, enabling near\nreal-time feedback within gopls.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"unusedparams\"", "Doc": "check for unused parameters of functions\n\nThe unusedparams analyzer checks functions to see if there are\nany parameters that are not being used.\n\nTo ensure soundness, it ignores:\n - \"address-taken\" functions, that is, functions that are used as\n a value rather than being called directly; their signatures may\n be required to conform to a func type.\n - exported functions or methods, since they may be address-taken\n in another package.\n - unexported methods whose name matches an interface method\n declared in the same package, since the method's signature\n may be required to conform to the interface type.\n - functions with empty bodies, or containing just a call to panic.\n - parameters that are unnamed, or named \"_\", the blank identifier.\n\nThe analyzer suggests a fix of replacing the parameter name by \"_\",\nbut in such cases a deeper fix can be obtained by invoking the\n\"Refactor: remove unused parameter\" code action, which will\neliminate the parameter entirely, along with all corresponding\narguments at call sites, while taking care to preserve any side\neffects in the argument expressions; see\nhttps://github.com/golang/tools/releases/tag/gopls%2Fv0.14.\n\nThis analyzer ignores generated code.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"unusedresult\"", "Doc": "check for unused results of calls to some functions\n\nSome functions like fmt.Errorf return a result and have no side\neffects, so it is always a mistake to discard the result. Other\nfunctions may return an error that must not be ignored, or a cleanup\noperation that must be called. This analyzer reports calls to\nfunctions like these when the result of the call is ignored.\n\nThe set of functions may be controlled using flags.", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"unusedvariable\"", "Doc": "check for unused variables and suggest fixes", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"unusedwrite\"", "Doc": "checks for unused writes\n\nThe analyzer reports instances of writes to struct fields and\narrays that are never read. Specifically, when a struct object\nor an array is copied, its elements are copied implicitly by\nthe compiler, and any element write to this copy does nothing\nwith the original object.\n\nFor example:\n\n\ttype T struct { x int }\n\n\tfunc f(input []T) {\n\t\tfor i, v := range input { // v is a copy\n\t\t\tv.x = i // unused write to field x\n\t\t}\n\t}\n\nAnother example is about non-pointer receiver:\n\n\ttype T struct { x int }\n\n\tfunc (t T) f() { // t is a copy\n\t\tt.x = i // unused write to field x\n\t}", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"waitgroup\"", "Doc": "check for misuses of sync.WaitGroup\n\nThis analyzer detects mistaken calls to the (*sync.WaitGroup).Add\nmethod from inside a new goroutine, causing Add to race with Wait:\n\n\t// WRONG\n\tvar wg sync.WaitGroup\n\tgo func() {\n\t wg.Add(1) // \"WaitGroup.Add called from inside new goroutine\"\n\t defer wg.Done()\n\t ...\n\t}()\n\twg.Wait() // (may return prematurely before new goroutine starts)\n\nThe correct code calls Add before starting the goroutine:\n\n\t// RIGHT\n\tvar wg sync.WaitGroup\n\twg.Add(1)\n\tgo func() {\n\t\tdefer wg.Done()\n\t\t...\n\t}()\n\twg.Wait()", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"yield\"", "Doc": "report calls to yield where the result is ignored\n\nAfter a yield function returns false, the caller should not call\nthe yield function again; generally the iterator should return\npromptly.\n\nThis example fails to check the result of the call to yield,\ncausing this analyzer to report a diagnostic:\n\n\tyield(1) // yield may be called again (on L2) after returning false\n\tyield(2)\n\nThe corrected code is either this:\n\n\tif yield(1) { yield(2) }\n\nor simply:\n\n\t_ = yield(1) \u0026\u0026 yield(2)\n\nIt is not always a mistake to ignore the result of yield.\nFor example, this is a valid single-element iterator:\n\n\tyield(1) // ok to ignore result\n\treturn\n\nIt is only a mistake when the yield call that returned false may be\nfollowed by another call.", - "Default": "true" + "Default": "true", + "Status": "" } ] }, @@ -699,22 +778,26 @@ { "Name": "\"bounds\"", "Doc": "`\"bounds\"` controls bounds checking diagnostics.\n", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"escape\"", "Doc": "`\"escape\"` controls diagnostics about escape choices.\n", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"inline\"", "Doc": "`\"inline\"` controls diagnostics about inlining choices.\n", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"nil\"", "Doc": "`\"nil\"` controls nil checks.\n", - "Default": "true" + "Default": "true", + "Status": "" } ] }, @@ -735,11 +818,13 @@ "EnumValues": [ { "Value": "\"Imports\"", - "Doc": "`\"Imports\"`: In Imports mode, `gopls` will report vulnerabilities that affect packages\ndirectly and indirectly used by the analyzed main module.\n" + "Doc": "`\"Imports\"`: In Imports mode, `gopls` will report vulnerabilities that affect packages\ndirectly and indirectly used by the analyzed main module.\n", + "Status": "" }, { "Value": "\"Off\"", - "Doc": "`\"Off\"`: Disable vulnerability analysis.\n" + "Doc": "`\"Off\"`: Disable vulnerability analysis.\n", + "Status": "" } ], "Default": "\"Off\"", @@ -772,11 +857,13 @@ "EnumValues": [ { "Value": "\"Edit\"", - "Doc": "`\"Edit\"`: Trigger diagnostics on file edit and save. (default)\n" + "Doc": "`\"Edit\"`: Trigger diagnostics on file edit and save. (default)\n", + "Status": "" }, { "Value": "\"Save\"", - "Doc": "`\"Save\"`: Trigger diagnostics only on file save. Events like initial workspace load\nor configuration change will still trigger diagnostics.\n" + "Doc": "`\"Save\"`: Trigger diagnostics only on file save. Events like initial workspace load\nor configuration change will still trigger diagnostics.\n", + "Status": "" } ], "Default": "\"Edit\"", @@ -808,37 +895,44 @@ { "Name": "\"assignVariableTypes\"", "Doc": "`\"assignVariableTypes\"` controls inlay hints for variable types in assign statements:\n```go\n\ti/* int*/, j/* int*/ := 0, len(r)-1\n```\n", - "Default": "false" + "Default": "false", + "Status": "" }, { "Name": "\"compositeLiteralFields\"", "Doc": "`\"compositeLiteralFields\"` inlay hints for composite literal field names:\n```go\n\t{/*in: */\"Hello, world\", /*want: */\"dlrow ,olleH\"}\n```\n", - "Default": "false" + "Default": "false", + "Status": "" }, { "Name": "\"compositeLiteralTypes\"", "Doc": "`\"compositeLiteralTypes\"` controls inlay hints for composite literal types:\n```go\n\tfor _, c := range []struct {\n\t\tin, want string\n\t}{\n\t\t/*struct{ in string; want string }*/{\"Hello, world\", \"dlrow ,olleH\"},\n\t}\n```\n", - "Default": "false" + "Default": "false", + "Status": "" }, { "Name": "\"constantValues\"", "Doc": "`\"constantValues\"` controls inlay hints for constant values:\n```go\n\tconst (\n\t\tKindNone Kind = iota/* = 0*/\n\t\tKindPrint/* = 1*/\n\t\tKindPrintf/* = 2*/\n\t\tKindErrorf/* = 3*/\n\t)\n```\n", - "Default": "false" + "Default": "false", + "Status": "" }, { "Name": "\"functionTypeParameters\"", "Doc": "`\"functionTypeParameters\"` inlay hints for implicit type parameters on generic functions:\n```go\n\tmyFoo/*[int, string]*/(1, \"hello\")\n```\n", - "Default": "false" + "Default": "false", + "Status": "" }, { "Name": "\"parameterNames\"", "Doc": "`\"parameterNames\"` controls inlay hints for parameter names:\n```go\n\tparseInt(/* str: */ \"123\", /* radix: */ 8)\n```\n", - "Default": "false" + "Default": "false", + "Status": "" }, { "Name": "\"rangeVariableTypes\"", "Doc": "`\"rangeVariableTypes\"` controls inlay hints for variable types in range statements:\n```go\n\tfor k/* int*/, v/* string*/ := range []string{} {\n\t\tfmt.Println(k, v)\n\t}\n```\n", - "Default": "false" + "Default": "false", + "Status": "" } ] }, @@ -858,42 +952,50 @@ { "Name": "\"generate\"", "Doc": "`\"generate\"`: Run `go generate`\n\nThis codelens source annotates any `//go:generate` comments\nwith commands to run `go generate` in this directory, on\nall directories recursively beneath this one.\n\nSee [Generating code](https://go.dev/blog/generate) for\nmore details.\n", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"regenerate_cgo\"", "Doc": "`\"regenerate_cgo\"`: Re-generate cgo declarations\n\nThis codelens source annotates an `import \"C\"` declaration\nwith a command to re-run the [cgo\ncommand](https://pkg.go.dev/cmd/cgo) to regenerate the\ncorresponding Go declarations.\n\nUse this after editing the C code in comments attached to\nthe import, or in C header files included by it.\n", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"run_govulncheck\"", "Doc": "`\"run_govulncheck\"`: Run govulncheck (legacy)\n\nThis codelens source annotates the `module` directive in a go.mod file\nwith a command to run Govulncheck asynchronously.\n\n[Govulncheck](https://go.dev/blog/vuln) is a static analysis tool that\ncomputes the set of functions reachable within your application, including\ndependencies; queries a database of known security vulnerabilities; and\nreports any potential problems it finds.\n", - "Default": "false" + "Default": "false", + "Status": "experimental" }, { "Name": "\"test\"", "Doc": "`\"test\"`: Run tests and benchmarks\n\nThis codelens source annotates each `Test` and `Benchmark`\nfunction in a `*_test.go` file with a command to run it.\n\nThis source is off by default because VS Code has\na client-side custom UI for testing, and because progress\nnotifications are not a great UX for streamed test output.\nSee:\n- golang/go#67400 for a discussion of this feature.\n- https://github.com/joaotavora/eglot/discussions/1402\n for an alternative approach.\n", - "Default": "false" + "Default": "false", + "Status": "" }, { "Name": "\"tidy\"", "Doc": "`\"tidy\"`: Tidy go.mod file\n\nThis codelens source annotates the `module` directive in a\ngo.mod file with a command to run [`go mod\ntidy`](https://go.dev/ref/mod#go-mod-tidy), which ensures\nthat the go.mod file matches the source code in the module.\n", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"upgrade_dependency\"", "Doc": "`\"upgrade_dependency\"`: Update dependencies\n\nThis codelens source annotates the `module` directive in a\ngo.mod file with commands to:\n\n- check for available upgrades,\n- upgrade direct dependencies, and\n- upgrade all dependencies transitively.\n", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"vendor\"", "Doc": "`\"vendor\"`: Update vendor directory\n\nThis codelens source annotates the `module` directive in a\ngo.mod file with a command to run [`go mod\nvendor`](https://go.dev/ref/mod#go-mod-vendor), which\ncreates or updates the directory named `vendor` in the\nmodule root so that it contains an up-to-date copy of all\nnecessary package dependencies.\n", - "Default": "true" + "Default": "true", + "Status": "" }, { "Name": "\"vulncheck\"", "Doc": "`\"vulncheck\"`: Run govulncheck\n\nThis codelens source annotates the `module` directive in a go.mod file\nwith a command to run govulncheck synchronously.\n\n[Govulncheck](https://go.dev/blog/vuln) is a static analysis tool that\ncomputes the set of functions reachable within your application, including\ndependencies; queries a database of known security vulnerabilities; and\nreports any potential problems it finds.\n", - "Default": "false" + "Default": "false", + "Status": "experimental" } ] }, @@ -1023,56 +1125,64 @@ "Lens": "generate", "Title": "Run `go generate`", "Doc": "\nThis codelens source annotates any `//go:generate` comments\nwith commands to run `go generate` in this directory, on\nall directories recursively beneath this one.\n\nSee [Generating code](https://go.dev/blog/generate) for\nmore details.\n", - "Default": true + "Default": true, + "Status": "" }, { "FileType": "Go", "Lens": "regenerate_cgo", "Title": "Re-generate cgo declarations", "Doc": "\nThis codelens source annotates an `import \"C\"` declaration\nwith a command to re-run the [cgo\ncommand](https://pkg.go.dev/cmd/cgo) to regenerate the\ncorresponding Go declarations.\n\nUse this after editing the C code in comments attached to\nthe import, or in C header files included by it.\n", - "Default": true + "Default": true, + "Status": "" }, { "FileType": "Go", "Lens": "test", "Title": "Run tests and benchmarks", "Doc": "\nThis codelens source annotates each `Test` and `Benchmark`\nfunction in a `*_test.go` file with a command to run it.\n\nThis source is off by default because VS Code has\na client-side custom UI for testing, and because progress\nnotifications are not a great UX for streamed test output.\nSee:\n- golang/go#67400 for a discussion of this feature.\n- https://github.com/joaotavora/eglot/discussions/1402\n for an alternative approach.\n", - "Default": false + "Default": false, + "Status": "" }, { "FileType": "go.mod", "Lens": "run_govulncheck", "Title": "Run govulncheck (legacy)", "Doc": "\nThis codelens source annotates the `module` directive in a go.mod file\nwith a command to run Govulncheck asynchronously.\n\n[Govulncheck](https://go.dev/blog/vuln) is a static analysis tool that\ncomputes the set of functions reachable within your application, including\ndependencies; queries a database of known security vulnerabilities; and\nreports any potential problems it finds.\n", - "Default": false + "Default": false, + "Status": "experimental" }, { "FileType": "go.mod", "Lens": "tidy", "Title": "Tidy go.mod file", "Doc": "\nThis codelens source annotates the `module` directive in a\ngo.mod file with a command to run [`go mod\ntidy`](https://go.dev/ref/mod#go-mod-tidy), which ensures\nthat the go.mod file matches the source code in the module.\n", - "Default": true + "Default": true, + "Status": "" }, { "FileType": "go.mod", "Lens": "upgrade_dependency", "Title": "Update dependencies", "Doc": "\nThis codelens source annotates the `module` directive in a\ngo.mod file with commands to:\n\n- check for available upgrades,\n- upgrade direct dependencies, and\n- upgrade all dependencies transitively.\n", - "Default": true + "Default": true, + "Status": "" }, { "FileType": "go.mod", "Lens": "vendor", "Title": "Update vendor directory", "Doc": "\nThis codelens source annotates the `module` directive in a\ngo.mod file with a command to run [`go mod\nvendor`](https://go.dev/ref/mod#go-mod-vendor), which\ncreates or updates the directory named `vendor` in the\nmodule root so that it contains an up-to-date copy of all\nnecessary package dependencies.\n", - "Default": true + "Default": true, + "Status": "" }, { "FileType": "go.mod", "Lens": "vulncheck", "Title": "Run govulncheck", "Doc": "\nThis codelens source annotates the `module` directive in a go.mod file\nwith a command to run govulncheck synchronously.\n\n[Govulncheck](https://go.dev/blog/vuln) is a static analysis tool that\ncomputes the set of functions reachable within your application, including\ndependencies; queries a database of known security vulnerabilities; and\nreports any potential problems it finds.\n", - "Default": false + "Default": false, + "Status": "experimental" } ], "Analyzers": [ @@ -1417,37 +1527,44 @@ { "Name": "assignVariableTypes", "Doc": "`\"assignVariableTypes\"` controls inlay hints for variable types in assign statements:\n```go\n\ti/* int*/, j/* int*/ := 0, len(r)-1\n```\n", - "Default": false + "Default": false, + "Status": "" }, { "Name": "compositeLiteralFields", "Doc": "`\"compositeLiteralFields\"` inlay hints for composite literal field names:\n```go\n\t{/*in: */\"Hello, world\", /*want: */\"dlrow ,olleH\"}\n```\n", - "Default": false + "Default": false, + "Status": "" }, { "Name": "compositeLiteralTypes", "Doc": "`\"compositeLiteralTypes\"` controls inlay hints for composite literal types:\n```go\n\tfor _, c := range []struct {\n\t\tin, want string\n\t}{\n\t\t/*struct{ in string; want string }*/{\"Hello, world\", \"dlrow ,olleH\"},\n\t}\n```\n", - "Default": false + "Default": false, + "Status": "" }, { "Name": "constantValues", "Doc": "`\"constantValues\"` controls inlay hints for constant values:\n```go\n\tconst (\n\t\tKindNone Kind = iota/* = 0*/\n\t\tKindPrint/* = 1*/\n\t\tKindPrintf/* = 2*/\n\t\tKindErrorf/* = 3*/\n\t)\n```\n", - "Default": false + "Default": false, + "Status": "" }, { "Name": "functionTypeParameters", "Doc": "`\"functionTypeParameters\"` inlay hints for implicit type parameters on generic functions:\n```go\n\tmyFoo/*[int, string]*/(1, \"hello\")\n```\n", - "Default": false + "Default": false, + "Status": "" }, { "Name": "parameterNames", "Doc": "`\"parameterNames\"` controls inlay hints for parameter names:\n```go\n\tparseInt(/* str: */ \"123\", /* radix: */ 8)\n```\n", - "Default": false + "Default": false, + "Status": "" }, { "Name": "rangeVariableTypes", "Doc": "`\"rangeVariableTypes\"` controls inlay hints for variable types in range statements:\n```go\n\tfor k/* int*/, v/* string*/ := range []string{} {\n\t\tfmt.Println(k, v)\n\t}\n```\n", - "Default": false + "Default": false, + "Status": "" } ] } \ No newline at end of file diff --git a/gopls/internal/doc/generate/generate.go b/gopls/internal/doc/generate/generate.go index 51c8b89e39b..762fceeb4b9 100644 --- a/gopls/internal/doc/generate/generate.go +++ b/gopls/internal/doc/generate/generate.go @@ -317,9 +317,17 @@ func loadEnums(pkg *packages.Package) (map[types.Type][]doc.EnumValue, error) { spec := path[1].(*ast.ValueSpec) value := cnst.Val().ExactString() docstring := valueDoc(cnst.Name(), value, spec.Doc.Text()) + var status string + for _, d := range internalastutil.Directives(spec.Doc) { + if d.Tool == "gopls" && d.Name == "status" { + status = d.Args + break + } + } v := doc.EnumValue{ - Value: value, - Doc: docstring, + Value: value, + Doc: docstring, + Status: status, } enums[obj.Type()] = append(enums[obj.Type()], v) } @@ -354,6 +362,7 @@ func collectEnumKeys(m *types.Map, reflectField reflect.Value, enumValues []doc. keys = append(keys, doc.EnumKey{ Name: v.Value, Doc: v.Doc, + Status: v.Status, Default: def, }) } @@ -436,6 +445,7 @@ func loadLenses(settingsPkg *packages.Package, defaults map[settings.CodeLensSou // Find the CodeLensSource enums among the files of the protocol package. // Map each enum value to its doc comment. enumDoc := make(map[string]string) + enumStatus := make(map[string]string) for _, f := range settingsPkg.Syntax { for _, decl := range f.Decls { if decl, ok := decl.(*ast.GenDecl); ok && decl.Tok == token.CONST { @@ -455,6 +465,12 @@ func loadLenses(settingsPkg *packages.Package, defaults map[settings.CodeLensSou return nil, fmt.Errorf("%s: %s lacks doc comment", posn, spec.Names[0].Name) } enumDoc[value] = spec.Doc.Text() + for _, d := range internalastutil.Directives(spec.Doc) { + if d.Tool == "gopls" && d.Name == "status" { + enumStatus[value] = d.Args + break + } + } } } } @@ -479,6 +495,7 @@ func loadLenses(settingsPkg *packages.Package, defaults map[settings.CodeLensSou Title: title, Doc: docText, Default: defaults[source], + Status: enumStatus[string(source)], }) } return nil @@ -518,8 +535,9 @@ func loadHints(settingsPkg *packages.Package) ([]*doc.Hint, error) { for _, enumVal := range enums[inlayHint] { name, _ := strconv.Unquote(enumVal.Value) hints = append(hints, &doc.Hint{ - Name: name, - Doc: enumVal.Doc, + Name: name, + Doc: enumVal.Doc, + Status: enumVal.Status, }) } return hints, nil @@ -600,17 +618,7 @@ func rewriteSettings(prevContent []byte, api *doc.API) ([]byte, error) { fmt.Fprintf(&buf, "### `%s %s`\n\n", opt.Name, opt.Type) // status - switch opt.Status { - case "": - case "advanced": - fmt.Fprint(&buf, "**This is an advanced setting and should not be configured by most `gopls` users.**\n\n") - case "debug": - fmt.Fprint(&buf, "**This setting is for debugging purposes only.**\n\n") - case "experimental": - fmt.Fprint(&buf, "**This setting is experimental and may be deleted.**\n\n") - default: - fmt.Fprintf(&buf, "**Status: %s.**\n\n", opt.Status) - } + writeStatus(&buf, opt.Status) // doc comment buf.WriteString(opt.Doc) @@ -651,6 +659,22 @@ func rewriteSettings(prevContent []byte, api *doc.API) ([]byte, error) { return content, nil } +// writeStatus emits a Markdown paragraph to buf about the status of a feature, +// if nonempty. +func writeStatus(buf *bytes.Buffer, status string) { + switch status { + case "": + case "advanced": + fmt.Fprint(buf, "**This is an advanced setting and should not be configured by most `gopls` users.**\n\n") + case "debug": + fmt.Fprint(buf, "**This setting is for debugging purposes only.**\n\n") + case "experimental": + fmt.Fprint(buf, "**This setting is experimental and may be deleted.**\n\n") + default: + fmt.Fprintf(buf, "**Status: %s.**\n\n", status) + } +} + var parBreakRE = regexp.MustCompile("\n{2,}") func shouldShowEnumKeysInSettings(name string) bool { @@ -722,6 +746,7 @@ func rewriteCodeLenses(prevContent []byte, api *doc.API) ([]byte, error) { var buf bytes.Buffer for _, lens := range api.Lenses { fmt.Fprintf(&buf, "## `%s`: %s\n\n", lens.Lens, lens.Title) + writeStatus(&buf, lens.Status) fmt.Fprintf(&buf, "%s\n\n", lens.Doc) fmt.Fprintf(&buf, "Default: %v\n\n", onOff(lens.Default)) fmt.Fprintf(&buf, "File type: %s\n\n", lens.FileType) diff --git a/gopls/internal/settings/settings.go b/gopls/internal/settings/settings.go index e98bc365935..59b2aa1b87f 100644 --- a/gopls/internal/settings/settings.go +++ b/gopls/internal/settings/settings.go @@ -269,6 +269,8 @@ const ( // computes the set of functions reachable within your application, including // dependencies; queries a database of known security vulnerabilities; and // reports any potential problems it finds. + // + //gopls:status experimental CodeLensVulncheck CodeLensSource = "vulncheck" // Run govulncheck (legacy) @@ -280,6 +282,8 @@ const ( // computes the set of functions reachable within your application, including // dependencies; queries a database of known security vulnerabilities; and // reports any potential problems it finds. + // + //gopls:status experimental CodeLensRunGovulncheck CodeLensSource = "run_govulncheck" // Run tests and benchmarks diff --git a/internal/astutil/comment.go b/internal/astutil/comment.go index 192d6430de0..ee4be23f226 100644 --- a/internal/astutil/comment.go +++ b/internal/astutil/comment.go @@ -6,6 +6,7 @@ package astutil import ( "go/ast" + "go/token" "strings" ) @@ -26,3 +27,87 @@ func Deprecation(doc *ast.CommentGroup) string { } return "" } + +// -- plundered from the future (CL 605517, issue #68021) -- + +// TODO(adonovan): replace with ast.Directive after go1.25 (#68021). +// Beware of our local mods to handle analysistest +// "want" comments on the same line. + +// A directive is a comment line with special meaning to the Go +// toolchain or another tool. It has the form: +// +// //tool:name args +// +// The "tool:" portion is missing for the three directives named +// line, extern, and export. +// +// See https://go.dev/doc/comment#Syntax for details of Go comment +// syntax and https://pkg.go.dev/cmd/compile#hdr-Compiler_Directives +// for details of directives used by the Go compiler. +type Directive struct { + Pos token.Pos // of preceding "//" + Tool string + Name string + Args string // may contain internal spaces +} + +// isDirective reports whether c is a comment directive. +// This code is also in go/printer. +func isDirective(c string) bool { + // "//line " is a line directive. + // "//extern " is for gccgo. + // "//export " is for cgo. + // (The // has been removed.) + if strings.HasPrefix(c, "line ") || strings.HasPrefix(c, "extern ") || strings.HasPrefix(c, "export ") { + return true + } + + // "//[a-z0-9]+:[a-z0-9]" + // (The // has been removed.) + colon := strings.Index(c, ":") + if colon <= 0 || colon+1 >= len(c) { + return false + } + for i := 0; i <= colon+1; i++ { + if i == colon { + continue + } + b := c[i] + if !('a' <= b && b <= 'z' || '0' <= b && b <= '9') { + return false + } + } + return true +} + +// Directives returns the directives within the comment. +func Directives(g *ast.CommentGroup) (res []*Directive) { + if g != nil { + // Avoid (*ast.CommentGroup).Text() as it swallows directives. + for _, c := range g.List { + if len(c.Text) > 2 && + c.Text[1] == '/' && + c.Text[2] != ' ' && + isDirective(c.Text[2:]) { + + tool, nameargs, ok := strings.Cut(c.Text[2:], ":") + if !ok { + // Must be one of {line,extern,export}. + tool, nameargs = "", tool + } + name, args, _ := strings.Cut(nameargs, " ") // tab?? + // Permit an additional line comment after the args, chiefly to support + // [golang.org/x/tools/go/analysis/analysistest]. + args, _, _ = strings.Cut(args, "//") + res = append(res, &Directive{ + Pos: c.Slash, + Tool: tool, + Name: name, + Args: strings.TrimSpace(args), + }) + } + } + } + return +} From db6008cb90f09485deb11255e5dd6da114b4ecef Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Wed, 5 Mar 2025 13:18:07 -0500 Subject: [PATCH 097/270] go/types/internal/play: show Cursor.Stack of selected node Change-Id: Iaf6a6369e05ded0b10b85f468d8fbf91269373e4 Reviewed-on: https://go-review.googlesource.com/c/tools/+/655135 Reviewed-by: Jonathan Amsterdam Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI --- go/types/internal/play/play.go | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/go/types/internal/play/play.go b/go/types/internal/play/play.go index f1318ac247a..4212a6b82cf 100644 --- a/go/types/internal/play/play.go +++ b/go/types/internal/play/play.go @@ -30,8 +30,10 @@ import ( "strings" "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/packages" "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/typeparams" ) @@ -161,6 +163,15 @@ func handleSelectJSON(w http.ResponseWriter, req *http.Request) { innermostExpr = e } } + // Show the cursor stack too. + // It's usually the same, but may differ in edge + // cases (e.g. around FuncType.Func). + inspect := inspector.New([]*ast.File{file}) + if cur, ok := cursor.Root(inspect).FindPos(startPos, endPos); ok { + fmt.Fprintf(out, "Cursor.FindPos().Stack() = %v\n", cur.Stack(nil)) + } else { + fmt.Fprintf(out, "Cursor.FindPos() failed\n") + } fmt.Fprintf(out, "\n") // Expression type information From 25a90befcdf96d15f13dd947b7395c8531dc67de Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Mon, 3 Mar 2025 23:06:21 -0500 Subject: [PATCH 098/270] gopls/internal/golang: Implementations for func types This CL adds support to the Implementations query for function types. The query relates two sets of locations: 1. the "func" token of each function declaration (FuncDecl or FuncLit). These are analogous to declarations of concrete methods. 2. uses of abstract functions: (a) the "func" token of each FuncType that is not part of Func{Decl,Lit}. These are analogous to interface{...} types. (b) the "(" paren of each dynamic call on a value of an abstract function type. These are analogous to references to interface method names, but no names are involved, which has historically made them hard to search for. An Implementations query on a location in set 1 returns set 2, and vice versa. Only the local algorithm is implemented for now; the global one (using an index analogous to methodsets) will follow. This CL supersedes CL 448035 and CL 619515, both of which attempt to unify the treatment of functions and interfaces in the methodsets algorithm and in the index; but the two problems are not precisely analogous, and I think we'll end up with more but simpler code if we implement themn separately. + tests, docs, relnotes Updates golang/go#56572 Change-Id: I18e1a7cc2f6c320112b9f3589323d04f9a52ef3c Reviewed-on: https://go-review.googlesource.com/c/tools/+/654556 Commit-Queue: Alan Donovan Reviewed-by: Jonathan Amsterdam LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan --- gopls/doc/features/navigation.md | 22 +- gopls/doc/release/v0.19.0.md | 27 ++ gopls/internal/golang/implementation.go | 291 ++++++++++++++++-- gopls/internal/test/marker/doc.go | 7 +- gopls/internal/test/marker/marker_test.go | 10 +- .../testdata/implementation/signature.txt | 79 +++++ 6 files changed, 403 insertions(+), 33 deletions(-) create mode 100644 gopls/internal/test/marker/testdata/implementation/signature.txt diff --git a/gopls/doc/features/navigation.md b/gopls/doc/features/navigation.md index f46f2935683..f3454f7188c 100644 --- a/gopls/doc/features/navigation.md +++ b/gopls/doc/features/navigation.md @@ -85,7 +85,10 @@ Client support: The LSP [`textDocument/implementation`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_implementation) -request queries the "implements" relation between interfaces and concrete types: +request queries the relation between abstract and concrete types and +their methods. + +Interfaces and concrete types are matched using method sets: - When invoked on a reference to an **interface type**, it returns the location of the declaration of each type that implements @@ -111,6 +114,17 @@ types with methods due to embedding) may be missing from the results. but that is not consistent with the "scalable" gopls design. --> +Functions, `func` types, and dynamic function calls are matched using signatures: + +- When invoked on the `func` token of a **function definition**, + it returns the locations of the matching signature types + and dynamic call expressions. +- When invoked on the `func` token of a **signature type**, + it returns the locations of the matching concrete function definitions. +- When invoked on the `(` token of a **dynamic function call**, + it returns the locations of the matching concrete function + definitions. + If either the target type or the candidate type are generic, the results will include the candidate type if there is any instantiation of the two types that would allow one to implement the other. @@ -120,6 +134,12 @@ types, without regard to consistency of substitutions across the method set or even within a single method. This may lead to occasional spurious matches.) +Since a type may be both a function type and a named type with methods +(for example, `http.HandlerFunc`), it may participate in both kinds of +implementation queries (by method-sets and function signatures). +Queries using method-sets should be invoked on the type or method name, +and queries using signatures should be invoked on a `func` or `(` token. + Client support: - **VS Code**: Use [Go to Implementations](https://code.visualstudio.com/docs/editor/editingevolved#_go-to-implementation) (`⌘F12`). - **Emacs + eglot**: Use `M-x eglot-find-implementation`. diff --git a/gopls/doc/release/v0.19.0.md b/gopls/doc/release/v0.19.0.md index 18088732656..149a474244a 100644 --- a/gopls/doc/release/v0.19.0.md +++ b/gopls/doc/release/v0.19.0.md @@ -7,6 +7,33 @@ # New features +## "Implementations" supports signature types + +The Implementations query reports the correspondence between abstract +and concrete types and their methods based on their method sets. +Now, it also reports the correspondence between function types, +dynamic function calls, and function definitions, based on their signatures. + +To use it, invoke an Implementations query on the `func` token of the +definition of a named function, named method, or function literal. +Gopls reports the set of function signature types that abstract this +function, and the set of dynamic calls through values of such types. + +Conversely, an Implementations query on the `func` token of a +signature type, or on the `(` paren of a dynamic function call, +reports the set of concrete functions that the signature abstracts +or that the call dispatches to. + +Since a type may be both a function type and a named type with methods +(for example, `http.HandlerFunc`), it may participate in both kinds of +Implements queries (method-sets and function signatures). +Queries using method-sets should be invoked on the type or method name, +and queries using signatures should be invoked on a `func` or `(` token. + +Only the local (same-package) algorithm is currently supported. +TODO: implement global. + + ## "Eliminate dot import" code action This code action, available on a dotted import, will offer to replace diff --git a/gopls/internal/golang/implementation.go b/gopls/internal/golang/implementation.go index a7a7e663d44..2d9a1e93ef3 100644 --- a/gopls/internal/golang/implementation.go +++ b/gopls/internal/golang/implementation.go @@ -12,6 +12,7 @@ import ( "go/token" "go/types" "reflect" + "slices" "sort" "strings" "sync" @@ -21,10 +22,13 @@ import ( "golang.org/x/tools/gopls/internal/cache" "golang.org/x/tools/gopls/internal/cache/metadata" "golang.org/x/tools/gopls/internal/cache/methodsets" + "golang.org/x/tools/gopls/internal/cache/parsego" "golang.org/x/tools/gopls/internal/file" "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/gopls/internal/util/bug" "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/internal/astutil/cursor" + "golang.org/x/tools/internal/astutil/edge" "golang.org/x/tools/internal/event" ) @@ -74,9 +78,26 @@ func Implementation(ctx context.Context, snapshot *cache.Snapshot, f file.Handle } func implementations(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp protocol.Position) ([]protocol.Location, error) { - // First, find the object referenced at the cursor by type checking the - // current package. - obj, pkg, err := implementsObj(ctx, snapshot, fh.URI(), pp) + // Type check the current package. + pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) + if err != nil { + return nil, err + } + pos, err := pgf.PositionPos(pp) + if err != nil { + return nil, err + } + + // Find implementations based on func signatures. + if locs, err := implFuncs(pkg, pgf, pos); err != errNotHandled { + return locs, err + } + + // Find implementations based on method sets. + + // First, find the object referenced at the cursor. + // The object may be declared in a different package. + obj, err := implementsObj(pkg, pgf, pos) if err != nil { return nil, err } @@ -272,21 +293,9 @@ func offsetToLocation(ctx context.Context, snapshot *cache.Snapshot, filename st return m.OffsetLocation(start, end) } -// implementsObj returns the object to query for implementations, which is a -// type name or method. -// -// The returned Package is the narrowest package containing ppos, which is the -// package using the resulting obj but not necessarily the declaring package. -func implementsObj(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI, ppos protocol.Position) (types.Object, *cache.Package, error) { - pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, uri) - if err != nil { - return nil, nil, err - } - pos, err := pgf.PositionPos(ppos) - if err != nil { - return nil, nil, err - } - +// implementsObj returns the object to query for implementations, +// which is a type name or method. +func implementsObj(pkg *cache.Package, pgf *parsego.File, pos token.Pos) (types.Object, error) { // This function inherits the limitation of its predecessor in // requiring the selection to be an identifier (of a type or // method). But there's no fundamental reason why one could @@ -299,11 +308,11 @@ func implementsObj(ctx context.Context, snapshot *cache.Snapshot, uri protocol.D // TODO(adonovan): simplify: use objectsAt? path := pathEnclosingObjNode(pgf.File, pos) if path == nil { - return nil, nil, ErrNoIdentFound + return nil, ErrNoIdentFound } id, ok := path[0].(*ast.Ident) if !ok { - return nil, nil, ErrNoIdentFound + return nil, ErrNoIdentFound } // Is the object a type or method? Reject other kinds. @@ -319,17 +328,18 @@ func implementsObj(ctx context.Context, snapshot *cache.Snapshot, uri protocol.D // ok case *types.Func: if obj.Signature().Recv() == nil { - return nil, nil, fmt.Errorf("%s is a function, not a method", id.Name) + return nil, fmt.Errorf("%s is a function, not a method (query at 'func' token to find matching signatures)", id.Name) } case nil: - return nil, nil, fmt.Errorf("%s denotes unknown object", id.Name) + return nil, fmt.Errorf("%s denotes unknown object", id.Name) default: // e.g. *types.Var -> "var". kind := strings.ToLower(strings.TrimPrefix(reflect.TypeOf(obj).String(), "*types.")) - return nil, nil, fmt.Errorf("%s is a %s, not a type", id.Name, kind) + // TODO(adonovan): improve upon "nil is a nil, not a type". + return nil, fmt.Errorf("%s is a %s, not a type", id.Name, kind) } - return obj, pkg, nil + return obj, nil } // localImplementations searches within pkg for declarations of all @@ -679,9 +689,236 @@ func pathEnclosingObjNode(f *ast.File, pos token.Pos) []ast.Node { } // Reverse path so leaf is first element. - for i := 0; i < len(path)/2; i++ { - path[i], path[len(path)-1-i] = path[len(path)-1-i], path[i] - } + slices.Reverse(path) return path } + +// --- Implementations based on signature types -- + +// implFuncs finds Implementations based on func types. +// +// Just as an interface type abstracts a set of concrete methods, a +// function type abstracts a set of concrete functions. Gopls provides +// analogous operations for navigating from abstract to concrete and +// back in the domain of function types. +// +// A single type (for example http.HandlerFunc) can have both an +// underlying type of function (types.Signature) and have methods that +// cause it to implement an interface. To avoid a confusing user +// interface we want to separate the two operations so that the user +// can unambiguously specify the query they want. +// +// So, whereas Implementations queries on interface types are usually +// keyed by an identifier of a named type, Implementations queries on +// function types are keyed by the "func" keyword, or by the "(" of a +// call expression. The query relates two sets of locations: +// +// 1. the "func" token of each function declaration (FuncDecl or +// FuncLit). These are analogous to declarations of concrete +// methods. +// +// 2. uses of abstract functions: +// +// (a) the "func" token of each FuncType that is not part of +// Func{Decl,Lit}. These are analogous to interface{...} types. +// +// (b) the "(" paren of each dynamic call on a value of an +// abstract function type. These are analogous to references to +// interface method names, but no names are involved, which has +// historically made them hard to search for. +// +// An Implementations query on a location in set 1 returns set 2, +// and vice versa. +// +// implFuncs returns errNotHandled to indicate that we should try the +// regular method-sets algorithm. +func implFuncs(pkg *cache.Package, pgf *parsego.File, pos token.Pos) ([]protocol.Location, error) { + curSel, ok := pgf.Cursor.FindPos(pos, pos) + if !ok { + return nil, fmt.Errorf("no code selected") + } + + info := pkg.TypesInfo() + + // Find innermost enclosing FuncType or CallExpr. + // + // We are looking for specific tokens (FuncType.Func and + // CallExpr.Lparen), but FindPos prefers an adjoining + // subexpression: given f(x) without additional spaces between + // tokens, FindPos always returns either f or x, never the + // CallExpr itself. Thus we must ascend the tree. + // + // Another subtlety: due to an edge case in go/ast, FindPos at + // FuncDecl.Type.Func does not return FuncDecl.Type, only the + // FuncDecl, because the orders of tree positions and tokens + // are inconsistent. Consequently, the ancestors for a "func" + // token of Func{Lit,Decl} do not include FuncType, hence the + // explicit cases below. + for _, cur := range curSel.Stack(nil) { + switch n := cur.Node().(type) { + case *ast.FuncDecl, *ast.FuncLit: + if inToken(n.Pos(), "func", pos) { + // Case 1: concrete function declaration. + // Report uses of corresponding function types. + switch n := n.(type) { + case *ast.FuncDecl: + return funcUses(pkg, info.Defs[n.Name].Type()) + case *ast.FuncLit: + return funcUses(pkg, info.TypeOf(n.Type)) + } + } + + case *ast.FuncType: + if n.Func.IsValid() && inToken(n.Func, "func", pos) && !beneathFuncDef(cur) { + // Case 2a: function type. + // Report declarations of corresponding concrete functions. + return funcDefs(pkg, info.TypeOf(n)) + } + + case *ast.CallExpr: + if inToken(n.Lparen, "(", pos) { + t := dynamicFuncCallType(info, n) + if t == nil { + return nil, fmt.Errorf("not a dynamic function call") + } + // Case 2b: dynamic call of function value. + // Report declarations of corresponding concrete functions. + return funcDefs(pkg, t) + } + } + } + + // It's probably a query of a named type or method. + // Fall back to the method-sets computation. + return nil, errNotHandled +} + +var errNotHandled = errors.New("not handled") + +// funcUses returns all locations in the workspace that are dynamic +// uses of the specified function type. +func funcUses(pkg *cache.Package, t types.Type) ([]protocol.Location, error) { + var locs []protocol.Location + + // local search + for _, pgf := range pkg.CompiledGoFiles() { + for cur := range pgf.Cursor.Preorder((*ast.CallExpr)(nil), (*ast.FuncType)(nil)) { + var pos, end token.Pos + var ftyp types.Type + switch n := cur.Node().(type) { + case *ast.CallExpr: + ftyp = dynamicFuncCallType(pkg.TypesInfo(), n) + pos, end = n.Lparen, n.Lparen+token.Pos(len("(")) + + case *ast.FuncType: + if !beneathFuncDef(cur) { + // func type (not def) + ftyp = pkg.TypesInfo().TypeOf(n) + pos, end = n.Func, n.Func+token.Pos(len("func")) + } + } + if ftyp == nil { + continue // missing type information + } + if unify(t, ftyp) { + loc, err := pgf.PosLocation(pos, end) + if err != nil { + return nil, err + } + locs = append(locs, loc) + } + } + } + + // TODO(adonovan): implement global search + + return locs, nil +} + +// funcDefs returns all locations in the workspace that define +// functions of the specified type. +func funcDefs(pkg *cache.Package, t types.Type) ([]protocol.Location, error) { + var locs []protocol.Location + + // local search + for _, pgf := range pkg.CompiledGoFiles() { + for curFn := range pgf.Cursor.Preorder((*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)) { + fn := curFn.Node() + var ftyp types.Type + switch fn := fn.(type) { + case *ast.FuncDecl: + ftyp = pkg.TypesInfo().Defs[fn.Name].Type() + case *ast.FuncLit: + ftyp = pkg.TypesInfo().TypeOf(fn) + } + if ftyp == nil { + continue // missing type information + } + if unify(t, ftyp) { + pos := fn.Pos() + loc, err := pgf.PosLocation(pos, pos+token.Pos(len("func"))) + if err != nil { + return nil, err + } + locs = append(locs, loc) + } + } + } + + // TODO(adonovan): implement global search, by analogy with + // methodsets algorithm. + // + // One optimization: if any signature type has free package + // names, look for matches only in packages among the rdeps of + // those packages. + + return locs, nil +} + +// beneathFuncDef reports whether the specified FuncType cursor is a +// child of Func{Decl,Lit}. +func beneathFuncDef(cur cursor.Cursor) bool { + ek, _ := cur.Edge() + switch ek { + case edge.FuncDecl_Type, edge.FuncLit_Type: + return true + } + return false +} + +// dynamicFuncCallType reports whether call is a dynamic (non-method) function call. +// If so, it returns the function type, otherwise nil. +// +// Tested via ../test/marker/testdata/implementation/signature.txt. +func dynamicFuncCallType(info *types.Info, call *ast.CallExpr) types.Type { + fun := ast.Unparen(call.Fun) + tv := info.Types[fun] + + // Reject conversion, or call to built-in. + if !tv.IsValue() { + return nil + } + + // Reject call to named func/method. + if id, ok := fun.(*ast.Ident); ok && is[*types.Func](info.Uses[id]) { + return nil + } + + // Reject method selections (T.method() or x.method()) + if sel, ok := fun.(*ast.SelectorExpr); ok { + seln, ok := info.Selections[sel] + if !ok || seln.Kind() != types.FieldVal { + return nil + } + } + + // TODO(adonovan): consider x() where x : TypeParam. + return tv.Type.Underlying() // e.g. x() or x.field() +} + +// inToken reports whether pos is within the token of +// the specified position and string. +func inToken(tokPos token.Pos, tokStr string, pos token.Pos) bool { + return tokPos <= pos && pos <= tokPos+token.Pos(len(tokStr)) +} diff --git a/gopls/internal/test/marker/doc.go b/gopls/internal/test/marker/doc.go index dff8dfa109f..2fc3e042061 100644 --- a/gopls/internal/test/marker/doc.go +++ b/gopls/internal/test/marker/doc.go @@ -212,9 +212,10 @@ Here is the list of supported action markers: - hovererr(src, sm stringMatcher): performs a textDocument/hover at the src location, and checks that the error matches the given stringMatcher. - - implementations(src location, want ...location): makes a - textDocument/implementation query at the src location and - checks that the resulting set of locations matches want. + - implementation(src location, want ...location, err=stringMatcher): + makes a textDocument/implementation query at the src location and + checks that the resulting set of locations matches want. If err is + set, the implementation query must fail with the expected error. - incomingcalls(src location, want ...location): makes a callHierarchy/incomingCalls query at the src location, and checks that diff --git a/gopls/internal/test/marker/marker_test.go b/gopls/internal/test/marker/marker_test.go index a3e62d35968..3ff7da65ac5 100644 --- a/gopls/internal/test/marker/marker_test.go +++ b/gopls/internal/test/marker/marker_test.go @@ -584,7 +584,7 @@ var actionMarkerFuncs = map[string]func(marker){ "highlightall": actionMarkerFunc(highlightAllMarker), "hover": actionMarkerFunc(hoverMarker), "hovererr": actionMarkerFunc(hoverErrMarker), - "implementation": actionMarkerFunc(implementationMarker), + "implementation": actionMarkerFunc(implementationMarker, "err"), "incomingcalls": actionMarkerFunc(incomingCallsMarker), "inlayhints": actionMarkerFunc(inlayhintsMarker), "outgoingcalls": actionMarkerFunc(outgoingCallsMarker), @@ -2375,13 +2375,19 @@ func refsMarker(mark marker, src protocol.Location, want ...protocol.Location) { // implementationMarker implements the @implementation marker. func implementationMarker(mark marker, src protocol.Location, want ...protocol.Location) { + wantErr := namedArgFunc(mark, "err", convertStringMatcher, stringMatcher{}) + got, err := mark.server().Implementation(mark.ctx(), &protocol.ImplementationParams{ TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(src), }) - if err != nil { + if err != nil && wantErr.empty() { mark.errorf("implementation at %s failed: %v", src, err) return } + if !wantErr.empty() { + wantErr.checkErr(mark, err) + return + } if err := compareLocations(mark, got, want); err != nil { mark.errorf("implementation: %v", err) } diff --git a/gopls/internal/test/marker/testdata/implementation/signature.txt b/gopls/internal/test/marker/testdata/implementation/signature.txt new file mode 100644 index 00000000000..b94d048a135 --- /dev/null +++ b/gopls/internal/test/marker/testdata/implementation/signature.txt @@ -0,0 +1,79 @@ +Test of local Implementation queries using function signatures. + +Assertions: +- Query on "func" of a function type returns the corresponding concrete functions. +- Query on "func" of a concrete function returns corresponding function types. +- Query on "(" of a dynamic function call returns corresponding function types. +- Different signatures (Nullary vs Handler) don't correspond. + +The @loc markers use the suffixes Func, Type, Call for the three kinds. +Each query maps between these two sets: {Func} <=> {Type,Call}. + +-- go.mod -- +module example.com +go 1.18 + +-- a/a.go -- +package a + +// R is short for Record. +type R struct{} + +// H is short for Handler. +type H func(*R) //@ loc(HType, "func"), implementation("func", aFunc, bFunc, cFunc) + +func aFunc(*R) {} //@ loc(aFunc, "func"), implementation("func", HType, hParamType, hCall) + +var bFunc = func(*R) {} //@ loc(bFunc, "func"), implementation("func", hParamType, hCall, HType) + +func nullary() { //@ loc(nullaryFunc, "func"), implementation("func", Nullary, fieldCall) + cFunc := func(*R) {} //@ loc(cFunc, "func"), implementation("func", hParamType, hCall, HType) + _ = cFunc +} + +type Nullary func() //@ loc(Nullary, "func") + +func _( + h func(*R)) { //@ loc(hParamType, "func"), implementation("func", aFunc, bFunc, cFunc) + + _ = aFunc // pacify unusedfunc + _ = nullary // pacify unusedfunc + _ = h + + h(nil) //@ loc(hCall, "("), implementation("(", aFunc, bFunc, cFunc) +} + +// generics: + +func _[T any](complex128) { + f1 := func(T) int { return 0 } //@ loc(f1Func, "func"), implementation("func", fParamType, fCall, f1Call, f2Call) + f2 := func(string) int { return 0 } //@ loc(f2Func, "func"), implementation("func", fParamType, fCall, f1Call, f2Call) + f3 := func(int) int { return 0 } //@ loc(f3Func, "func"), implementation("func", f1Call) + + f1(*new(T)) //@ loc(f1Call, "("), implementation("(", f1Func, f2Func, f3Func, f4Func) + f2("") //@ loc(f2Call, "("), implementation("(", f1Func, f2Func, f4Func) + _ = f3 // not called +} + +func f4[T any](T) int { return 0 } //@ loc(f4Func, "func"), implementation("func", fParamType, fCall, f1Call, f2Call) + +var _ = f4[string] // pacify unusedfunc + +func _( + f func(string) int, //@ loc(fParamType, "func"), implementation("func", f1Func, f2Func, f4Func) + err error) { + + f("") //@ loc(fCall, "("), implementation("(", f1Func, f2Func, f4Func) + + struct{x Nullary}{}.x() //@ loc(fieldCall, "("), implementation("(", nullaryFunc) + + // Calls that are not dynamic function calls: + _ = len("") //@ implementation("(", err="not a dynamic function call") + _ = int(0) //@ implementation("(", err="not a dynamic function call") + _ = error.Error(nil) //@ implementation("(", err="not a dynamic function call") + _ = err.Error() //@ implementation("(", err="not a dynamic function call") + _ = f4(0) //@ implementation("(", err="not a dynamic function call"), loc(f4Call, "(") +} + + + From 6a5b66bef78dc7a1cf8593b276f35102ec0cb11c Mon Sep 17 00:00:00 2001 From: Gopher Robot Date: Wed, 5 Mar 2025 11:56:25 -0800 Subject: [PATCH 099/270] go.mod: update golang.org/x dependencies Update golang.org/x dependencies to their latest tagged versions. Change-Id: I13ce38cd00119b55ee384af53f27a72feb72572b Reviewed-on: https://go-review.googlesource.com/c/tools/+/655020 Reviewed-by: Dmitri Shuralyov LUCI-TryBot-Result: Go LUCI Reviewed-by: David Chase Auto-Submit: Gopher Robot --- go.mod | 8 ++++---- go.sum | 16 ++++++++-------- gopls/go.mod | 8 ++++---- gopls/go.sum | 22 +++++++++++----------- 4 files changed, 27 insertions(+), 27 deletions(-) diff --git a/go.mod b/go.mod index bc7636b4cf8..3a120629b94 100644 --- a/go.mod +++ b/go.mod @@ -5,10 +5,10 @@ go 1.23.0 require ( github.com/google/go-cmp v0.6.0 github.com/yuin/goldmark v1.4.13 - golang.org/x/mod v0.23.0 - golang.org/x/net v0.35.0 - golang.org/x/sync v0.11.0 + golang.org/x/mod v0.24.0 + golang.org/x/net v0.37.0 + golang.org/x/sync v0.12.0 golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457 ) -require golang.org/x/sys v0.30.0 // indirect +require golang.org/x/sys v0.31.0 // indirect diff --git a/go.sum b/go.sum index 2d11b060c08..3d0337c8351 100644 --- a/go.sum +++ b/go.sum @@ -2,13 +2,13 @@ github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/yuin/goldmark v1.4.13 h1:fVcFKWvrslecOb/tg+Cc05dkeYx540o0FuFt3nUVDoE= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -golang.org/x/mod v0.23.0 h1:Zb7khfcRGKk+kqfxFaP5tZqCnDZMjC5VtUBs87Hr6QM= -golang.org/x/mod v0.23.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8= -golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk= -golang.org/x/sync v0.11.0 h1:GGz8+XQP4FvTTrjZPzNKTMFtSXH80RAzG+5ghFPgK9w= -golang.org/x/sync v0.11.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc= -golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU= +golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= +golang.org/x/net v0.37.0 h1:1zLorHbz+LYj7MQlSf1+2tPIIgibq2eL5xkrGk6f+2c= +golang.org/x/net v0.37.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= +golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw= +golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik= +golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457 h1:zf5N6UOrA487eEFacMePxjXAJctxKmyjKUsjA11Uzuk= golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457/go.mod h1:pRgIJT+bRLFKnoM1ldnzKoxTIn14Yxz928LQRYYgIN0= diff --git a/gopls/go.mod b/gopls/go.mod index 210943206b8..da7303222d2 100644 --- a/gopls/go.mod +++ b/gopls/go.mod @@ -5,11 +5,11 @@ go 1.24.0 require ( github.com/google/go-cmp v0.6.0 github.com/jba/templatecheck v0.7.1 - golang.org/x/mod v0.23.0 - golang.org/x/sync v0.11.0 - golang.org/x/sys v0.30.0 + golang.org/x/mod v0.24.0 + golang.org/x/sync v0.12.0 + golang.org/x/sys v0.31.0 golang.org/x/telemetry v0.0.0-20250220152412-165e2f84edbc - golang.org/x/text v0.22.0 + golang.org/x/text v0.23.0 golang.org/x/tools v0.30.0 golang.org/x/vuln v1.1.4 gopkg.in/yaml.v3 v3.0.1 diff --git a/gopls/go.sum b/gopls/go.sum index ef93b2c4601..20633541388 100644 --- a/gopls/go.sum +++ b/gopls/go.sum @@ -16,36 +16,36 @@ github.com/rogpeppe/go-internal v1.13.2-0.20241226121412-a5dc8ff20d0a h1:w3tdWGK github.com/rogpeppe/go-internal v1.13.2-0.20241226121412-a5dc8ff20d0a/go.mod h1:S8kfXMp+yh77OxPD4fdM6YUknrZpQxLhvxzS4gDHENY= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= -golang.org/x/crypto v0.33.0/go.mod h1:bVdXmD7IV/4GdElGPozy6U7lWdRXA4qyRVGJV57uQ5M= +golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc= golang.org/x/exp/typeparams v0.0.0-20250218142911-aa4b98e5adaa h1:Br3+0EZZohShrmVVc85znGpxw7Ca8hsUJlrdT/JQGw8= golang.org/x/exp/typeparams v0.0.0-20250218142911-aa4b98e5adaa/go.mod h1:LKZHyeOpPuZcMgxeHjJp4p5yvxrCX1xDvH10zYHhjjQ= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= -golang.org/x/mod v0.23.0 h1:Zb7khfcRGKk+kqfxFaP5tZqCnDZMjC5VtUBs87Hr6QM= -golang.org/x/mod v0.23.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU= +golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= -golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk= +golang.org/x/net v0.37.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sync v0.11.0 h1:GGz8+XQP4FvTTrjZPzNKTMFtSXH80RAzG+5ghFPgK9w= -golang.org/x/sync v0.11.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw= +golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc= -golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik= +golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457/go.mod h1:pRgIJT+bRLFKnoM1ldnzKoxTIn14Yxz928LQRYYgIN0= golang.org/x/telemetry v0.0.0-20250220152412-165e2f84edbc h1:HS+G1Mhh2dxM8ObutfYKdjfD7zpkyeP/UxeRnJpIZtQ= golang.org/x/telemetry v0.0.0-20250220152412-165e2f84edbc/go.mod h1:bDzXkYUaHzz51CtDy5kh/jR4lgPxsdbqC37kp/dzhCc= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= -golang.org/x/term v0.29.0/go.mod h1:6bl4lRlvVuDgSf3179VpIxBF0o10JUpXWOnI7nErv7s= +golang.org/x/term v0.30.0/go.mod h1:NYYFdzHoI5wRh/h5tDMdMqCqPJZEuNqVR5xJLd/n67g= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM= -golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY= +golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY= +golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4= golang.org/x/vuln v1.1.4 h1:Ju8QsuyhX3Hk8ma3CesTbO8vfJD9EvUBgHvkxHBzj0I= golang.org/x/vuln v1.1.4/go.mod h1:F+45wmU18ym/ca5PLTPLsSzr2KppzswxPP603ldA67s= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= From b08c7a26ea3c519d19f4e2095d070ca8ce65161a Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Wed, 5 Mar 2025 13:45:05 -0500 Subject: [PATCH 100/270] gopls/internal/util/fingerprint: split from cache/methodsets This CL splits the fingerprint data type into its own package, as the index for Implementations by signatures will need it, but is otherwise unrelated to the logic to build the index by method sets. Updates golang/go#56572 Change-Id: I87905d3c5f3d555f100f318b97080e6802b616e4 Reviewed-on: https://go-review.googlesource.com/c/tools/+/655175 Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI Reviewed-by: Jonathan Amsterdam Commit-Queue: Alan Donovan --- gopls/internal/cache/methodsets/methodsets.go | 11 +-- .../fingerprint}/fingerprint.go | 85 ++++++++++++------- .../fingerprint}/fingerprint_test.go | 41 ++++----- 3 files changed, 78 insertions(+), 59 deletions(-) rename gopls/internal/{cache/methodsets => util/fingerprint}/fingerprint.go (83%) rename gopls/internal/{cache/methodsets => util/fingerprint}/fingerprint_test.go (79%) diff --git a/gopls/internal/cache/methodsets/methodsets.go b/gopls/internal/cache/methodsets/methodsets.go index 3026819ee81..2387050f2d9 100644 --- a/gopls/internal/cache/methodsets/methodsets.go +++ b/gopls/internal/cache/methodsets/methodsets.go @@ -52,6 +52,7 @@ import ( "golang.org/x/tools/go/types/objectpath" "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/fingerprint" "golang.org/x/tools/gopls/internal/util/frob" "golang.org/x/tools/gopls/internal/util/safetoken" "golang.org/x/tools/internal/typesinternal" @@ -195,7 +196,7 @@ func implements(x, y *gobMethodSet) bool { // so a string match is sufficient. match = mx.Sum&my.Sum == my.Sum && mx.Fingerprint == my.Fingerprint } else { - match = unify(mx.parse(), my.parse()) + match = fingerprint.Matches(mx.parse(), my.parse()) } return !match } @@ -326,7 +327,7 @@ func methodSetInfo(t types.Type, setIndexInfo func(*gobMethod, *types.Func)) *go for i := 0; i < mset.Len(); i++ { m := mset.At(i).Obj().(*types.Func) id := m.Id() - fp, isTricky := fingerprint(m.Signature()) + fp, isTricky := fingerprint.Encode(m.Signature()) if isTricky { tricky = true } @@ -389,7 +390,7 @@ type gobMethod struct { ObjectPath int // object path of method relative to PkgPath // internal fields (not serialized) - tree atomic.Pointer[sexpr] // fingerprint tree, parsed on demand + tree atomic.Pointer[fingerprint.Tree] // fingerprint tree, parsed on demand } // A gobPosition records the file, offset, and length of an identifier. @@ -400,10 +401,10 @@ type gobPosition struct { // parse returns the method's parsed fingerprint tree. // It may return a new instance or a cached one. -func (m *gobMethod) parse() sexpr { +func (m *gobMethod) parse() fingerprint.Tree { ptr := m.tree.Load() if ptr == nil { - tree := parseFingerprint(m.Fingerprint) + tree := fingerprint.Parse(m.Fingerprint) ptr = &tree m.tree.Store(ptr) // may race; that's ok } diff --git a/gopls/internal/cache/methodsets/fingerprint.go b/gopls/internal/util/fingerprint/fingerprint.go similarity index 83% rename from gopls/internal/cache/methodsets/fingerprint.go rename to gopls/internal/util/fingerprint/fingerprint.go index 05ccfe0911c..2b657ba7857 100644 --- a/gopls/internal/cache/methodsets/fingerprint.go +++ b/gopls/internal/util/fingerprint/fingerprint.go @@ -1,7 +1,13 @@ -// Copyright 2024 The Go Authors. All rights reserved. +// Copyright 2025 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package methodsets + +// Package fingerprint defines a function to [Encode] types as strings +// with the property that identical types have equal string encodings, +// in most cases. In the remaining cases (mostly involving generic +// types), the encodings can be parsed using [Parse] into [Tree] form +// and matched using [Matches]. +package fingerprint import ( "fmt" @@ -12,6 +18,52 @@ import ( "text/scanner" ) +// Encode returns an encoding of a [types.Type] such that, in +// most cases, Encode(x) == Encode(y) iff [types.Identical](x, y). +// +// For a minority of types, mostly involving type parameters, identity +// cannot be reduced to string comparison; these types are called +// "tricky", and are indicated by the boolean result. +// +// In general, computing identity correctly for tricky types requires +// the type checker. However, the fingerprint encoding can be parsed +// by [Parse] into a [Tree] form that permits simple matching sufficient +// to allow a type parameter to unify with any subtree; see [Match]. +// +// In the standard library, 99.8% of package-level types have a +// non-tricky method-set. The most common exceptions are due to type +// parameters. +// +// fingerprint.Encode is defined only for the signature types of functions +// and methods. It must not be called for "untyped" basic types, nor +// the type of a generic function. +func Encode(t types.Type) (_ string, tricky bool) { return fingerprint(t) } + +// A Tree is a parsed form of a fingerprint for use with [Matches]. +type Tree struct{ tree sexpr } + +// String returns the tree in an unspecified human-readable form. +func (tree Tree) String() string { + var out strings.Builder + writeSexpr(&out, tree.tree) + return out.String() +} + +// Parse parses a fingerprint into tree form. +// +// The input must have been produced by [Encode] at the same source +// version; parsing is thus infallible. +func Parse(fp string) Tree { + return Tree{parseFingerprint(fp)} +} + +// Matches reports whether two fingerprint trees match, meaning that +// under some conditions (for example, particular instantiations of +// type parameters) the two types may be identical. +func Matches(x, y Tree) bool { + return unify(x.tree, y.tree) +} + // Fingerprint syntax // // The lexical syntax is essentially Lisp S-expressions: @@ -38,25 +90,6 @@ import ( // // field = IDENT IDENT STRING τ -- name, embedded?, tag, type -// fingerprint returns an encoding of a [types.Type] such that, in -// most cases, fingerprint(x) == fingerprint(t) iff types.Identical(x, y). -// -// For a minority of types, mostly involving type parameters, identity -// cannot be reduced to string comparison; these types are called -// "tricky", and are indicated by the boolean result. -// -// In general, computing identity correctly for tricky types requires -// the type checker. However, the fingerprint encoding can be parsed -// by [parseFingerprint] into a tree form that permits simple matching -// sufficient to allow a type parameter to unify with any subtree. -// -// In the standard library, 99.8% of package-level types have a -// non-tricky method-set. The most common exceptions are due to type -// parameters. -// -// fingerprint is defined only for the signature types of methods. It -// must not be called for "untyped" basic types, nor the type of a -// generic function. func fingerprint(t types.Type) (string, bool) { var buf strings.Builder tricky := false @@ -202,8 +235,6 @@ func fingerprint(t types.Type) (string, bool) { return buf.String(), tricky } -const symTypeparam = "typeparam" - // sexpr defines the representation of a fingerprint tree. type ( sexpr any // = string | int | symbol | *cons | nil @@ -272,12 +303,6 @@ func parseFingerprint(fp string) sexpr { return parse() } -func sexprString(x sexpr) string { - var out strings.Builder - writeSexpr(&out, x) - return out.String() -} - // writeSexpr formats an S-expression. // It is provided for debugging. func writeSexpr(out *strings.Builder, x sexpr) { @@ -355,3 +380,5 @@ func isTypeParam(x sexpr) int { } return -1 } + +const symTypeparam = "typeparam" diff --git a/gopls/internal/cache/methodsets/fingerprint_test.go b/gopls/internal/util/fingerprint/fingerprint_test.go similarity index 79% rename from gopls/internal/cache/methodsets/fingerprint_test.go rename to gopls/internal/util/fingerprint/fingerprint_test.go index 795ddaa965b..7a7a2fe7569 100644 --- a/gopls/internal/cache/methodsets/fingerprint_test.go +++ b/gopls/internal/util/fingerprint/fingerprint_test.go @@ -1,13 +1,8 @@ -// Copyright 2024 The Go Authors. All rights reserved. +// Copyright 2025 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package methodsets - -// This is an internal test of [fingerprint] and [unify]. -// -// TODO(adonovan): avoid internal tests. -// Break fingerprint.go off into its own package? +package fingerprint_test import ( "go/types" @@ -15,15 +10,15 @@ import ( "golang.org/x/tools/go/packages" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/testenv" + "golang.org/x/tools/gopls/internal/util/fingerprint" "golang.org/x/tools/internal/testfiles" "golang.org/x/tools/txtar" ) -// Test_fingerprint runs the fingerprint encoder, decoder, and printer +// Test runs the fingerprint encoder, decoder, and printer // on the types of all package-level symbols in gopls, and ensures // that parse+print is lossless. -func Test_fingerprint(t *testing.T) { +func Test(t *testing.T) { if testing.Short() { t.Skip("skipping slow test") } @@ -54,7 +49,7 @@ func Test_fingerprint(t *testing.T) { continue // untyped constant } - fp, tricky := fingerprint(typ) // check Type encoder doesn't panic + fp, tricky := fingerprint.Encode(typ) // check Type encoder doesn't panic // All equivalent (non-tricky) types have the same fingerprint. if !tricky { @@ -66,8 +61,8 @@ func Test_fingerprint(t *testing.T) { } } - tree := parseFingerprint(fp) // check parser doesn't panic - fp2 := sexprString(tree) // check formatter doesn't pannic + tree := fingerprint.Parse(fp) // check parser doesn't panic + fp2 := tree.String() // check formatter doesn't pannic // A parse+print round-trip should be lossless. if fp != fp2 { @@ -79,12 +74,8 @@ func Test_fingerprint(t *testing.T) { } } -// Test_unify exercises the matching algorithm for generic types. -func Test_unify(t *testing.T) { - if testenv.Go1Point() < 24 { - testenv.NeedsGoExperiment(t, "aliastypeparams") // testenv.Go1Point() >= 24 implies aliastypeparams=1 - } - +// TestMatches exercises the matching algorithm for generic types. +func TestMatches(t *testing.T) { const src = ` -- go.mod -- module example.com @@ -167,17 +158,17 @@ func E3(int8) uint32 a := lookup(test.a) b := lookup(test.b) - afp, _ := fingerprint(a) - bfp, _ := fingerprint(b) + afp, _ := fingerprint.Encode(a) + bfp, _ := fingerprint.Encode(b) - atree := parseFingerprint(afp) - btree := parseFingerprint(bfp) + atree := fingerprint.Parse(afp) + btree := fingerprint.Parse(bfp) - got := unify(atree, btree) + got := fingerprint.Matches(atree, btree) if got != test.want { t.Errorf("a=%s b=%s method=%s: unify returned %t for these inputs:\n- %s\n- %s", test.a, test.b, test.method, - got, sexprString(atree), sexprString(btree)) + got, atree, btree) } } } From 7435a8148d95389cf0c726d9954341d63f69cc66 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Fri, 7 Mar 2025 11:52:17 -0500 Subject: [PATCH 101/270] gopls/internal/analysis/modernize: document workflow Also, add and document a -category flag on the modernize analyzer. (This is a stopgap until the checker driver supports this flag generally; see issue 72008 and CL 655555.) Fixes golang/go#72008 Change-Id: Iad5bc277b1251f2edb935f16077fd3add61041c5 Reviewed-on: https://go-review.googlesource.com/c/tools/+/655436 Auto-Submit: Alan Donovan Commit-Queue: Alan Donovan LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley --- gopls/doc/analyzers.md | 89 ++++++++++++++----- gopls/internal/analysis/modernize/doc.go | 89 ++++++++++++++----- .../internal/analysis/modernize/modernize.go | 42 +++++++-- gopls/internal/doc/api.json | 4 +- 4 files changed, 166 insertions(+), 58 deletions(-) diff --git a/gopls/doc/analyzers.md b/gopls/doc/analyzers.md index aa95e024089..bcf5590090a 100644 --- a/gopls/doc/analyzers.md +++ b/gopls/doc/analyzers.md @@ -476,39 +476,80 @@ Package documentation: [lostcancel](https://pkg.go.dev/golang.org/x/tools/go/ana This analyzer reports opportunities for simplifying and clarifying -existing code by using more modern features of Go, such as: - - - replacing an if/else conditional assignment by a call to the - built-in min or max functions added in go1.21; - - replacing sort.Slice(x, func(i, j int) bool) { return s[i] < s[j] } - by a call to slices.Sort(s), added in go1.21; - - replacing interface{} by the 'any' type added in go1.18; - - replacing append([]T(nil), s...) by slices.Clone(s) or - slices.Concat(s), added in go1.21; - - replacing a loop around an m[k]=v map update by a call - to one of the Collect, Copy, Clone, or Insert functions - from the maps package, added in go1.21; - - replacing []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...), - added in go1.19; - - replacing uses of context.WithCancel in tests with t.Context, added in - go1.24; - - replacing omitempty by omitzero on structs, added in go1.24; - - replacing append(s[:i], s[i+1]...) by slices.Delete(s, i, i+1), - added in go1.21 - - replacing a 3-clause for i := 0; i < n; i++ {} loop by - for i := range n {}, added in go1.22; - - replacing Split in "for range strings.Split(...)" by go1.24's - more efficient SplitSeq, or Fields with FieldSeq; +existing code by using more modern features of Go and its standard +library. + +Each diagnostic provides a fix. Our intent is that these fixes may +be safely applied en masse without changing the behavior of your +program. In some cases the suggested fixes are imperfect and may +lead to (for example) unused imports or unused local variables, +causing build breakage. However, these problems are generally +trivial to fix. We regard any modernizer whose fix changes program +behavior to have a serious bug and will endeavor to fix it. To apply all modernization fixes en masse, you can use the following command: - $ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -test ./... + $ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./... If the tool warns of conflicting fixes, you may need to run it more than once until it has applied all fixes cleanly. This command is not an officially supported interface and may change in the future. +Changes produced by this tool should be reviewed as usual before +being merged. In some cases, a loop may be replaced by a simple +function call, causing comments within the loop to be discarded. +Human judgment may be required to avoid losing comments of value. + +Each diagnostic reported by modernize has a specific category. (The +categories are listed below.) Diagnostics in some categories, such +as "efaceany" (which replaces "interface{}" with "any" where it is +safe to do so) are particularly numerous. It may ease the burden of +code review to apply fixes in two passes, the first change +consisting only of fixes of category "efaceany", the second +consisting of all others. This can be achieved using the -category flag: + + $ modernize -category=efaceany -fix -test ./... + $ modernize -category=-efaceany -fix -test ./... + +Categories of modernize diagnostic: + + - minmax: replace an if/else conditional assignment by a call to + the built-in min or max functions added in go1.21. + + - sortslice: replace sort.Slice(x, func(i, j int) bool) { return s[i] < s[j] } + by a call to slices.Sort(s), added in go1.21. + + - efaceany: replace interface{} by the 'any' type added in go1.18. + + - slicesclone: replace append([]T(nil), s...) by slices.Clone(s) or + slices.Concat(s), added in go1.21. + + - mapsloop: replace a loop around an m[k]=v map update by a call + to one of the Collect, Copy, Clone, or Insert functions from + the maps package, added in go1.21. + + - fmtappendf: replace []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...), + added in go1.19. + + - testingcontext: replace uses of context.WithCancel in tests + with t.Context, added in go1.24. + + - omitzero: replace omitempty by omitzero on structs, added in go1.24. + + - bloop: replace "for i := range b.N" or "for range b.N" in a + benchmark with "for b.Loop()", and remove any preceding calls + to b.StopTimer, b.StartTimer, and b.ResetTimer. + + - slicesdelete: replace append(s[:i], s[i+1]...) by + slices.Delete(s, i, i+1), added in go1.21. + + - rangeint: replace a 3-clause "for i := 0; i < n; i++" loop by + "for i := range n", added in go1.22. + + - stringseq: replace Split in "for range strings.Split(...)" by go1.24's + more efficient SplitSeq, or Fields with FieldSeq. + Default: on. Package documentation: [modernize](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/modernize) diff --git a/gopls/internal/analysis/modernize/doc.go b/gopls/internal/analysis/modernize/doc.go index b12abab7063..931a2e6bd45 100644 --- a/gopls/internal/analysis/modernize/doc.go +++ b/gopls/internal/analysis/modernize/doc.go @@ -9,36 +9,77 @@ // modernize: simplify code by using modern constructs // // This analyzer reports opportunities for simplifying and clarifying -// existing code by using more modern features of Go, such as: -// -// - replacing an if/else conditional assignment by a call to the -// built-in min or max functions added in go1.21; -// - replacing sort.Slice(x, func(i, j int) bool) { return s[i] < s[j] } -// by a call to slices.Sort(s), added in go1.21; -// - replacing interface{} by the 'any' type added in go1.18; -// - replacing append([]T(nil), s...) by slices.Clone(s) or -// slices.Concat(s), added in go1.21; -// - replacing a loop around an m[k]=v map update by a call -// to one of the Collect, Copy, Clone, or Insert functions -// from the maps package, added in go1.21; -// - replacing []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...), -// added in go1.19; -// - replacing uses of context.WithCancel in tests with t.Context, added in -// go1.24; -// - replacing omitempty by omitzero on structs, added in go1.24; -// - replacing append(s[:i], s[i+1]...) by slices.Delete(s, i, i+1), -// added in go1.21 -// - replacing a 3-clause for i := 0; i < n; i++ {} loop by -// for i := range n {}, added in go1.22; -// - replacing Split in "for range strings.Split(...)" by go1.24's -// more efficient SplitSeq, or Fields with FieldSeq; +// existing code by using more modern features of Go and its standard +// library. +// +// Each diagnostic provides a fix. Our intent is that these fixes may +// be safely applied en masse without changing the behavior of your +// program. In some cases the suggested fixes are imperfect and may +// lead to (for example) unused imports or unused local variables, +// causing build breakage. However, these problems are generally +// trivial to fix. We regard any modernizer whose fix changes program +// behavior to have a serious bug and will endeavor to fix it. // // To apply all modernization fixes en masse, you can use the // following command: // -// $ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -test ./... +// $ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./... // // If the tool warns of conflicting fixes, you may need to run it more // than once until it has applied all fixes cleanly. This command is // not an officially supported interface and may change in the future. +// +// Changes produced by this tool should be reviewed as usual before +// being merged. In some cases, a loop may be replaced by a simple +// function call, causing comments within the loop to be discarded. +// Human judgment may be required to avoid losing comments of value. +// +// Each diagnostic reported by modernize has a specific category. (The +// categories are listed below.) Diagnostics in some categories, such +// as "efaceany" (which replaces "interface{}" with "any" where it is +// safe to do so) are particularly numerous. It may ease the burden of +// code review to apply fixes in two passes, the first change +// consisting only of fixes of category "efaceany", the second +// consisting of all others. This can be achieved using the -category flag: +// +// $ modernize -category=efaceany -fix -test ./... +// $ modernize -category=-efaceany -fix -test ./... +// +// Categories of modernize diagnostic: +// +// - minmax: replace an if/else conditional assignment by a call to +// the built-in min or max functions added in go1.21. +// +// - sortslice: replace sort.Slice(x, func(i, j int) bool) { return s[i] < s[j] } +// by a call to slices.Sort(s), added in go1.21. +// +// - efaceany: replace interface{} by the 'any' type added in go1.18. +// +// - slicesclone: replace append([]T(nil), s...) by slices.Clone(s) or +// slices.Concat(s), added in go1.21. +// +// - mapsloop: replace a loop around an m[k]=v map update by a call +// to one of the Collect, Copy, Clone, or Insert functions from +// the maps package, added in go1.21. +// +// - fmtappendf: replace []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...), +// added in go1.19. +// +// - testingcontext: replace uses of context.WithCancel in tests +// with t.Context, added in go1.24. +// +// - omitzero: replace omitempty by omitzero on structs, added in go1.24. +// +// - bloop: replace "for i := range b.N" or "for range b.N" in a +// benchmark with "for b.Loop()", and remove any preceding calls +// to b.StopTimer, b.StartTimer, and b.ResetTimer. +// +// - slicesdelete: replace append(s[:i], s[i+1]...) by +// slices.Delete(s, i, i+1), added in go1.21. +// +// - rangeint: replace a 3-clause "for i := 0; i < n; i++" loop by +// "for i := range n", added in go1.22. +// +// - stringseq: replace Split in "for range strings.Split(...)" by go1.24's +// more efficient SplitSeq, or Fields with FieldSeq. package modernize diff --git a/gopls/internal/analysis/modernize/modernize.go b/gopls/internal/analysis/modernize/modernize.go index 96e8b325df4..fb7d43eb8d7 100644 --- a/gopls/internal/analysis/modernize/modernize.go +++ b/gopls/internal/analysis/modernize/modernize.go @@ -12,6 +12,7 @@ import ( "go/types" "iter" "regexp" + "slices" "strings" "golang.org/x/tools/go/analysis" @@ -36,6 +37,15 @@ var Analyzer = &analysis.Analyzer{ URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/modernize", } +// Stopgap until general solution in CL 655555 lands. A change to the +// cmd/vet CLI requires a proposal whereas a change to an analyzer's +// flag set does not. +var category string + +func init() { + Analyzer.Flags.StringVar(&category, "category", "", "comma-separated list of categories to apply; with a leading '-', a list of categories to ignore") +} + func run(pass *analysis.Pass) (any, error) { // Decorate pass.Report to suppress diagnostics in generated files. // @@ -55,6 +65,10 @@ func run(pass *analysis.Pass) (any, error) { if diag.Category == "" { panic("Diagnostic.Category is unset") } + // TODO(adonovan): stopgap until CL 655555 lands. + if !enabledCategory(category, diag.Category) { + return + } if _, ok := generated[pass.Fset.File(diag.Pos)]; ok { return // skip checking if it's generated code } @@ -76,14 +90,7 @@ func run(pass *analysis.Pass) (any, error) { sortslice(pass) testingContext(pass) - // TODO(adonovan): - // - more modernizers here; see #70815. - // - opt: interleave these micro-passes within a single inspection. - // - solve the "duplicate import" problem (#68765) when a number of - // fixes in the same file are applied in parallel and all add - // the same import. The tests exhibit the problem. - // - should all diagnostics be of the form "x can be modernized by y" - // or is that a foolish consistency? + // TODO(adonovan): opt: interleave these micro-passes within a single inspection. return nil, nil } @@ -159,3 +166,22 @@ var ( byteSliceType = types.NewSlice(types.Typ[types.Byte]) omitemptyRegex = regexp.MustCompile(`(?:^json| json):"[^"]*(,omitempty)(?:"|,[^"]*")\s?`) ) + +// enabledCategory reports whether a given category is enabled by the specified +// filter. filter is a comma-separated list of categories, optionally prefixed +// with `-` to disable all provided categories. All categories are enabled with +// an empty filter. +// +// (Will be superseded by https://go.dev/cl/655555.) +func enabledCategory(filter, category string) bool { + if filter == "" { + return true + } + // negation must be specified at the start + filter, exclude := strings.CutPrefix(filter, "-") + filters := strings.Split(filter, ",") + if slices.Contains(filters, category) { + return !exclude + } + return exclude +} diff --git a/gopls/internal/doc/api.json b/gopls/internal/doc/api.json index b9e0e78e950..b47d635638c 100644 --- a/gopls/internal/doc/api.json +++ b/gopls/internal/doc/api.json @@ -562,7 +562,7 @@ }, { "Name": "\"modernize\"", - "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go, such as:\n\n - replacing an if/else conditional assignment by a call to the\n built-in min or max functions added in go1.21;\n - replacing sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21;\n - replacing interface{} by the 'any' type added in go1.18;\n - replacing append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21;\n - replacing a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions\n from the maps package, added in go1.21;\n - replacing []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19;\n - replacing uses of context.WithCancel in tests with t.Context, added in\n go1.24;\n - replacing omitempty by omitzero on structs, added in go1.24;\n - replacing append(s[:i], s[i+1]...) by slices.Delete(s, i, i+1),\n added in go1.21\n - replacing a 3-clause for i := 0; i \u003c n; i++ {} loop by\n for i := range n {}, added in go1.22;\n - replacing Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq;\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -test ./...\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.", + "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go and its standard\nlibrary.\n\nEach diagnostic provides a fix. Our intent is that these fixes may\nbe safely applied en masse without changing the behavior of your\nprogram. In some cases the suggested fixes are imperfect and may\nlead to (for example) unused imports or unused local variables,\ncausing build breakage. However, these problems are generally\ntrivial to fix. We regard any modernizer whose fix changes program\nbehavior to have a serious bug and will endeavor to fix it.\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./...\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.\n\nChanges produced by this tool should be reviewed as usual before\nbeing merged. In some cases, a loop may be replaced by a simple\nfunction call, causing comments within the loop to be discarded.\nHuman judgment may be required to avoid losing comments of value.\n\nEach diagnostic reported by modernize has a specific category. (The\ncategories are listed below.) Diagnostics in some categories, such\nas \"efaceany\" (which replaces \"interface{}\" with \"any\" where it is\nsafe to do so) are particularly numerous. It may ease the burden of\ncode review to apply fixes in two passes, the first change\nconsisting only of fixes of category \"efaceany\", the second\nconsisting of all others. This can be achieved using the -category flag:\n\n\t$ modernize -category=efaceany -fix -test ./...\n\t$ modernize -category=-efaceany -fix -test ./...\n\nCategories of modernize diagnostic:\n\n - minmax: replace an if/else conditional assignment by a call to\n the built-in min or max functions added in go1.21.\n\n - sortslice: replace sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21.\n\n - efaceany: replace interface{} by the 'any' type added in go1.18.\n\n - slicesclone: replace append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21.\n\n - mapsloop: replace a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions from\n the maps package, added in go1.21.\n\n - fmtappendf: replace []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19.\n\n - testingcontext: replace uses of context.WithCancel in tests\n with t.Context, added in go1.24.\n\n - omitzero: replace omitempty by omitzero on structs, added in go1.24.\n\n - bloop: replace \"for i := range b.N\" or \"for range b.N\" in a\n benchmark with \"for b.Loop()\", and remove any preceding calls\n to b.StopTimer, b.StartTimer, and b.ResetTimer.\n\n - slicesdelete: replace append(s[:i], s[i+1]...) by\n slices.Delete(s, i, i+1), added in go1.21.\n\n - rangeint: replace a 3-clause \"for i := 0; i \u003c n; i++\" loop by\n \"for i := range n\", added in go1.22.\n\n - stringseq: replace Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq.", "Default": "true", "Status": "" }, @@ -1338,7 +1338,7 @@ }, { "Name": "modernize", - "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go, such as:\n\n - replacing an if/else conditional assignment by a call to the\n built-in min or max functions added in go1.21;\n - replacing sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21;\n - replacing interface{} by the 'any' type added in go1.18;\n - replacing append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21;\n - replacing a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions\n from the maps package, added in go1.21;\n - replacing []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19;\n - replacing uses of context.WithCancel in tests with t.Context, added in\n go1.24;\n - replacing omitempty by omitzero on structs, added in go1.24;\n - replacing append(s[:i], s[i+1]...) by slices.Delete(s, i, i+1),\n added in go1.21\n - replacing a 3-clause for i := 0; i \u003c n; i++ {} loop by\n for i := range n {}, added in go1.22;\n - replacing Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq;\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -test ./...\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.", + "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go and its standard\nlibrary.\n\nEach diagnostic provides a fix. Our intent is that these fixes may\nbe safely applied en masse without changing the behavior of your\nprogram. In some cases the suggested fixes are imperfect and may\nlead to (for example) unused imports or unused local variables,\ncausing build breakage. However, these problems are generally\ntrivial to fix. We regard any modernizer whose fix changes program\nbehavior to have a serious bug and will endeavor to fix it.\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./...\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.\n\nChanges produced by this tool should be reviewed as usual before\nbeing merged. In some cases, a loop may be replaced by a simple\nfunction call, causing comments within the loop to be discarded.\nHuman judgment may be required to avoid losing comments of value.\n\nEach diagnostic reported by modernize has a specific category. (The\ncategories are listed below.) Diagnostics in some categories, such\nas \"efaceany\" (which replaces \"interface{}\" with \"any\" where it is\nsafe to do so) are particularly numerous. It may ease the burden of\ncode review to apply fixes in two passes, the first change\nconsisting only of fixes of category \"efaceany\", the second\nconsisting of all others. This can be achieved using the -category flag:\n\n\t$ modernize -category=efaceany -fix -test ./...\n\t$ modernize -category=-efaceany -fix -test ./...\n\nCategories of modernize diagnostic:\n\n - minmax: replace an if/else conditional assignment by a call to\n the built-in min or max functions added in go1.21.\n\n - sortslice: replace sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21.\n\n - efaceany: replace interface{} by the 'any' type added in go1.18.\n\n - slicesclone: replace append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21.\n\n - mapsloop: replace a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions from\n the maps package, added in go1.21.\n\n - fmtappendf: replace []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19.\n\n - testingcontext: replace uses of context.WithCancel in tests\n with t.Context, added in go1.24.\n\n - omitzero: replace omitempty by omitzero on structs, added in go1.24.\n\n - bloop: replace \"for i := range b.N\" or \"for range b.N\" in a\n benchmark with \"for b.Loop()\", and remove any preceding calls\n to b.StopTimer, b.StartTimer, and b.ResetTimer.\n\n - slicesdelete: replace append(s[:i], s[i+1]...) by\n slices.Delete(s, i, i+1), added in go1.21.\n\n - rangeint: replace a 3-clause \"for i := 0; i \u003c n; i++\" loop by\n \"for i := range n\", added in go1.22.\n\n - stringseq: replace Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq.", "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/modernize", "Default": true }, From 29f81e9da6bf86f0b8ca0004cd7ea205e80c8ab5 Mon Sep 17 00:00:00 2001 From: Hongxiang Jiang Date: Tue, 4 Mar 2025 16:20:08 -0500 Subject: [PATCH 102/270] gopls/internal/cache: filter **/foo match any depth Based on description of "build.directoryFilters": "-**/node_modules" should match node_modules at any level. Filter interpreted "**/foo" as "/foo/" but input path is "foo/" causing the mismatch. This CL make following changes: - add "^/" instead of "/" in the front when interpreting user input filters. - ensure the leading "/" when consuming file paths. - keep the performance by removing the leading "^/.*". Replace Filterer by a function accepting rules (raw filters) and returning a function can be used to determine the input path is included or not. Eliminate double-negative by returning true if included, false otherwise. For golang/vscode-go#3692 Change-Id: Ia7d2ab76154db1411dc8c96cd0211eb9c008c3ac Reviewed-on: https://go-review.googlesource.com/c/tools/+/654357 Reviewed-by: Alan Donovan Auto-Submit: Hongxiang Jiang LUCI-TryBot-Result: Go LUCI --- gopls/internal/cache/filterer.go | 74 +++++++++++-------- gopls/internal/cache/session.go | 4 +- gopls/internal/cache/view.go | 8 +- gopls/internal/cache/view_test.go | 6 +- gopls/internal/golang/workspace_symbol.go | 5 +- .../internal/golang/workspace_symbol_test.go | 14 +++- 6 files changed, 63 insertions(+), 48 deletions(-) diff --git a/gopls/internal/cache/filterer.go b/gopls/internal/cache/filterer.go index 0ec18369bdf..13dbd8a1b04 100644 --- a/gopls/internal/cache/filterer.go +++ b/gopls/internal/cache/filterer.go @@ -11,45 +11,55 @@ import ( "strings" ) -type Filterer struct { - // Whether a filter is excluded depends on the operator (first char of the raw filter). - // Slices filters and excluded then should have the same length. - filters []*regexp.Regexp - excluded []bool -} - -// NewFilterer computes regular expression form of all raw filters -func NewFilterer(rawFilters []string) *Filterer { - var f Filterer - for _, filter := range rawFilters { +// PathIncludeFunc creates a function that determines if a given file path +// should be included based on a set of inclusion/exclusion rules. +// +// The `rules` parameter is a slice of strings, where each string represents a +// filtering rule. Each rule consists of an operator (`+` for inclusion, `-` +// for exclusion) followed by a path pattern. See more detail of rules syntax +// at [settings.BuildOptions.DirectoryFilters]. +// +// Rules are evaluated in order, and the last matching rule determines +// whether a path is included or excluded. +// +// Examples: +// - []{"-foo"}: Exclude "foo" at the current depth. +// - []{"-**foo"}: Exclude "foo" at any depth. +// - []{"+bar"}: Include "bar" at the current depth. +// - []{"-foo", "+foo/**/bar"}: Exclude all "foo" at current depth except +// directory "bar" under "foo" at any depth. +func PathIncludeFunc(rules []string) func(string) bool { + var matchers []*regexp.Regexp + var included []bool + for _, filter := range rules { filter = path.Clean(filepath.ToSlash(filter)) // TODO(dungtuanle): fix: validate [+-] prefix. op, prefix := filter[0], filter[1:] - // convertFilterToRegexp adds "/" at the end of prefix to handle cases where a filter is a prefix of another filter. + // convertFilterToRegexp adds "/" at the end of prefix to handle cases + // where a filter is a prefix of another filter. // For example, it prevents [+foobar, -foo] from excluding "foobar". - f.filters = append(f.filters, convertFilterToRegexp(filepath.ToSlash(prefix))) - f.excluded = append(f.excluded, op == '-') + matchers = append(matchers, convertFilterToRegexp(filepath.ToSlash(prefix))) + included = append(included, op == '+') } - return &f -} - -// Disallow return true if the path is excluded from the filterer's filters. -func (f *Filterer) Disallow(path string) bool { - // Ensure trailing but not leading slash. - path = strings.TrimPrefix(path, "/") - if !strings.HasSuffix(path, "/") { - path += "/" - } + return func(path string) bool { + // Ensure leading and trailing slashes. + if !strings.HasPrefix(path, "/") { + path = "/" + path + } + if !strings.HasSuffix(path, "/") { + path += "/" + } - // TODO(adonovan): opt: iterate in reverse and break at first match. - excluded := false - for i, filter := range f.filters { - if filter.MatchString(path) { - excluded = f.excluded[i] // last match wins + // TODO(adonovan): opt: iterate in reverse and break at first match. + include := true + for i, filter := range matchers { + if filter.MatchString(path) { + include = included[i] // last match wins + } } + return include } - return excluded } // convertFilterToRegexp replaces glob-like operator substrings in a string file path to their equivalent regex forms. @@ -60,7 +70,7 @@ func convertFilterToRegexp(filter string) *regexp.Regexp { return regexp.MustCompile(".*") } var ret strings.Builder - ret.WriteString("^") + ret.WriteString("^/") segs := strings.Split(filter, "/") for _, seg := range segs { // Inv: seg != "" since path is clean. @@ -77,7 +87,7 @@ func convertFilterToRegexp(filter string) *regexp.Regexp { // BenchmarkWorkspaceSymbols time by ~20% (even though // filter CPU time increased by only by ~2.5%) when the // default filter was changed to "**/node_modules". - pattern = strings.TrimPrefix(pattern, "^.*") + pattern = strings.TrimPrefix(pattern, "^/.*") return regexp.MustCompile(pattern) } diff --git a/gopls/internal/cache/session.go b/gopls/internal/cache/session.go index c2f57e985f7..c46fc78b975 100644 --- a/gopls/internal/cache/session.go +++ b/gopls/internal/cache/session.go @@ -169,14 +169,14 @@ func (s *Session) createView(ctx context.Context, def *viewDefinition) (*View, * // Compute a prefix match, respecting segment boundaries, by ensuring // the pattern (dir) has a trailing slash. dirPrefix := strings.TrimSuffix(string(def.folder.Dir), "/") + "/" - filterer := NewFilterer(def.folder.Options.DirectoryFilters) + pathIncluded := PathIncludeFunc(def.folder.Options.DirectoryFilters) skipPath = func(dir string) bool { uri := strings.TrimSuffix(string(protocol.URIFromPath(dir)), "/") // Note that the logic below doesn't handle the case where uri == // v.folder.Dir, because there is no point in excluding the entire // workspace folder! if rel := strings.TrimPrefix(uri, dirPrefix); rel != uri { - return filterer.Disallow(rel) + return !pathIncluded(rel) } return false } diff --git a/gopls/internal/cache/view.go b/gopls/internal/cache/view.go index fc1ac5724ed..6bb0ae8edeb 100644 --- a/gopls/internal/cache/view.go +++ b/gopls/internal/cache/view.go @@ -477,11 +477,11 @@ func (v *View) filterFunc() func(protocol.DocumentURI) bool { modcacheFilter := "-" + strings.TrimPrefix(filepath.ToSlash(pref), "/") filters = append(filters, modcacheFilter) } - filterer := NewFilterer(filters) + pathIncluded := PathIncludeFunc(filters) v._filterFunc = func(uri protocol.DocumentURI) bool { // Only filter relative to the configured root directory. if pathutil.InDir(folderDir, uri.Path()) { - return relPathExcludedByFilter(strings.TrimPrefix(uri.Path(), folderDir), filterer) + return relPathExcludedByFilter(strings.TrimPrefix(uri.Path(), folderDir), pathIncluded) } return false } @@ -1264,7 +1264,7 @@ func allFilesExcluded(files []string, filterFunc func(protocol.DocumentURI) bool return true } -func relPathExcludedByFilter(path string, filterer *Filterer) bool { +func relPathExcludedByFilter(path string, pathIncluded func(string) bool) bool { path = strings.TrimPrefix(filepath.ToSlash(path), "/") - return filterer.Disallow(path) + return !pathIncluded(path) } diff --git a/gopls/internal/cache/view_test.go b/gopls/internal/cache/view_test.go index 992a3d61828..46000191e42 100644 --- a/gopls/internal/cache/view_test.go +++ b/gopls/internal/cache/view_test.go @@ -90,14 +90,14 @@ func TestFilters(t *testing.T) { } for _, tt := range tests { - filterer := NewFilterer(tt.filters) + pathIncluded := PathIncludeFunc(tt.filters) for _, inc := range tt.included { - if relPathExcludedByFilter(inc, filterer) { + if relPathExcludedByFilter(inc, pathIncluded) { t.Errorf("filters %q excluded %v, wanted included", tt.filters, inc) } } for _, exc := range tt.excluded { - if !relPathExcludedByFilter(exc, filterer) { + if !relPathExcludedByFilter(exc, pathIncluded) { t.Errorf("filters %q included %v, wanted excluded", tt.filters, exc) } } diff --git a/gopls/internal/golang/workspace_symbol.go b/gopls/internal/golang/workspace_symbol.go index 89c144b9230..91c5ee22925 100644 --- a/gopls/internal/golang/workspace_symbol.go +++ b/gopls/internal/golang/workspace_symbol.go @@ -300,8 +300,7 @@ func collectSymbols(ctx context.Context, snapshots []*cache.Snapshot, matcherTyp // whether a URI is in any open workspace. folderURI := snapshot.Folder() - filters := snapshot.Options().DirectoryFilters - filterer := cache.NewFilterer(filters) + pathIncluded := cache.PathIncludeFunc(snapshot.Options().DirectoryFilters) folder := filepath.ToSlash(folderURI.Path()) var ( @@ -371,7 +370,7 @@ func collectSymbols(ctx context.Context, snapshots []*cache.Snapshot, matcherTyp uri := sp.Files[i] norm := filepath.ToSlash(uri.Path()) nm := strings.TrimPrefix(norm, folder) - if filterer.Disallow(nm) { + if !pathIncluded(nm) { continue } // Only scan each file once. diff --git a/gopls/internal/golang/workspace_symbol_test.go b/gopls/internal/golang/workspace_symbol_test.go index 4982b767754..fbfec8e1204 100644 --- a/gopls/internal/golang/workspace_symbol_test.go +++ b/gopls/internal/golang/workspace_symbol_test.go @@ -47,7 +47,7 @@ func TestParseQuery(t *testing.T) { } } -func TestFiltererDisallow(t *testing.T) { +func TestPathIncludeFunc(t *testing.T) { tests := []struct { filters []string included []string @@ -119,18 +119,24 @@ func TestFiltererDisallow(t *testing.T) { []string{"a/b/c.go", "bb"}, []string{"b/c/d.go", "b"}, }, + // golang/vscode-go#3692 + { + []string{"-**/foo", "+**/bar"}, + []string{"bar/a.go", "a/bar/b.go"}, + []string{"foo/a.go", "a/foo/b.go"}, + }, } for _, test := range tests { - filterer := cache.NewFilterer(test.filters) + pathIncluded := cache.PathIncludeFunc(test.filters) for _, inc := range test.included { - if filterer.Disallow(inc) { + if !pathIncluded(inc) { t.Errorf("Filters %v excluded %v, wanted included", test.filters, inc) } } for _, exc := range test.excluded { - if !filterer.Disallow(exc) { + if pathIncluded(exc) { t.Errorf("Filters %v included %v, wanted excluded", test.filters, exc) } } From 8fa586e1a64f1e145c3915541170e37228e69fe1 Mon Sep 17 00:00:00 2001 From: Peter Weinberger Date: Wed, 5 Mar 2025 19:53:51 -0500 Subject: [PATCH 103/270] internal/analysis: add function to delete a statement In gopls, for unused variables and for modernizers, there is a need to provide edits to delete a specific statement. To avoid duplication these will use this new DeleteStatemnt. This CL, the first of a series, is to install the new function and its signature, and provide tests. Immediatley following CLs will use the new function to replace existing code. Change-Id: I1213cfaf14e66eaa9a111b11094a536bb869b298 Reviewed-on: https://go-review.googlesource.com/c/tools/+/655295 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- internal/analysisinternal/analysis.go | 121 +++++++++++ internal/analysisinternal/analysis_test.go | 223 ++++++++++++++++++++- 2 files changed, 343 insertions(+), 1 deletion(-) diff --git a/internal/analysisinternal/analysis.go b/internal/analysisinternal/analysis.go index 5eb7ac5a939..cc3b351708e 100644 --- a/internal/analysisinternal/analysis.go +++ b/internal/analysisinternal/analysis.go @@ -20,6 +20,8 @@ import ( "strings" "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/typesinternal" ) @@ -487,3 +489,122 @@ func CanImport(from, to string) bool { } return true } + +// DeleteStmt returns the edits to remove stmt if it is contained +// in a BlockStmt, CaseClause, CommClause, or is the STMT in switch STMT; ... {...} +// The report function abstracts gopls' bug.Report. +func DeleteStmt(fset *token.FileSet, astFile *ast.File, stmt ast.Stmt, report func(string, token.Pos)) []analysis.TextEdit { + // TODO: pass in the cursor to a ast.Stmt. callers should provide the Cursor + insp := inspector.New([]*ast.File{astFile}) + root := cursor.Root(insp) + cstmt, ok := root.FindNode(stmt) + if !ok { + report("%s not found in file", stmt.Pos()) + return nil + } + // some paranoia + if !stmt.Pos().IsValid() || !stmt.End().IsValid() { + report("%s: stmt has invalid position", stmt.Pos()) + return nil + } + + // if the stmt is on a line by itself delete the whole line + // otherwise just delete the statement. + + // this logic would be a lot simpler with the file contents, and somewhat simpler + // if the cursors included the comments. + + tokFile := fset.File(stmt.Pos()) + lineOf := tokFile.Line + stmtStartLine, stmtEndLine := lineOf(stmt.Pos()), lineOf(stmt.End()) + + var from, to token.Pos + // bounds of adjacent syntax/comments on same line, if any + limits := func(left, right token.Pos) { + if lineOf(left) == stmtStartLine { + from = left + } + if lineOf(right) == stmtEndLine { + to = right + } + } + // TODO(pjw): there are other places a statement might be removed: + // IfStmt = "if" [ SimpleStmt ";" ] Expression Block [ "else" ( IfStmt | Block ) ] . + // (removing the blocks requires more rewriting than this routine would do) + // CommCase = "case" ( SendStmt | RecvStmt ) | "default" . + // (removing the stmt requires more rewriting, and it's unclear what the user means) + switch parent := cstmt.Parent().Node().(type) { + case *ast.SwitchStmt: + limits(parent.Switch, parent.Body.Lbrace) + case *ast.TypeSwitchStmt: + limits(parent.Switch, parent.Body.Lbrace) + if parent.Assign == stmt { + return nil // don't let the user break the type switch + } + case *ast.BlockStmt: + limits(parent.Lbrace, parent.Rbrace) + case *ast.CommClause: + limits(parent.Colon, cstmt.Parent().Parent().Node().(*ast.BlockStmt).Rbrace) + if parent.Comm == stmt { + return nil // maybe the user meant to remove the entire CommClause? + } + case *ast.CaseClause: + limits(parent.Colon, cstmt.Parent().Parent().Node().(*ast.BlockStmt).Rbrace) + case *ast.ForStmt: + limits(parent.For, parent.Body.Lbrace) + + default: + return nil // not one of ours + } + + if prev, found := cstmt.PrevSibling(); found && lineOf(prev.Node().End()) == stmtStartLine { + from = prev.Node().End() // preceding statement ends on same line + } + if next, found := cstmt.NextSibling(); found && lineOf(next.Node().Pos()) == stmtEndLine { + to = next.Node().Pos() // following statement begins on same line + } + // and now for the comments +Outer: + for _, cg := range astFile.Comments { + for _, co := range cg.List { + if lineOf(co.End()) < stmtStartLine { + continue + } else if lineOf(co.Pos()) > stmtEndLine { + break Outer // no more are possible + } + if lineOf(co.End()) == stmtStartLine && co.End() < stmt.Pos() { + if !from.IsValid() || co.End() > from { + from = co.End() + continue // maybe there are more + } + } + if lineOf(co.Pos()) == stmtEndLine && co.Pos() > stmt.End() { + if !to.IsValid() || co.Pos() < to { + to = co.Pos() + continue // maybe there are more + } + } + } + } + // if either from or to is valid, just remove the statement + // otherwise remove the line + edit := analysis.TextEdit{Pos: stmt.Pos(), End: stmt.End()} + if from.IsValid() || to.IsValid() { + // remove just the statment. + // we can't tell if there is a ; or whitespace right after the statment + // ideally we'd like to remove the former and leave the latter + // (if gofmt has run, there likely won't be a ;) + // In type switches we know there's a semicolon somewhere after the statement, + // but the extra work for this special case is not worth it, as gofmt will fix it. + return []analysis.TextEdit{edit} + } + // remove the whole line + for lineOf(edit.Pos) == stmtStartLine { + edit.Pos-- + } + edit.Pos++ // get back tostmtStartLine + for lineOf(edit.End) == stmtEndLine { + edit.End++ + } + return []analysis.TextEdit{edit} +} diff --git a/internal/analysisinternal/analysis_test.go b/internal/analysisinternal/analysis_test.go index 0b21876d386..530e57250c2 100644 --- a/internal/analysisinternal/analysis_test.go +++ b/internal/analysisinternal/analysis_test.go @@ -4,7 +4,15 @@ package analysisinternal -import "testing" +import ( + "go/ast" + "go/parser" + "go/token" + "testing" + + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/astutil/cursor" +) func TestCanImport(t *testing.T) { for _, tt := range []struct { @@ -32,3 +40,216 @@ func TestCanImport(t *testing.T) { } } } + +func TestDeleteStmt(t *testing.T) { + type testCase struct { + in string + which int // count of ast.Stmt in ast.Inspect traversal to remove + want string + name string // should contain exactly one of [block,switch,case,comm,for,type] + } + tests := []testCase{ + { // do nothing when asked to remove a function body + in: "package p; func f() { }", + which: 0, + want: "package p; func f() { }", + name: "block0", + }, + { + in: "package p; func f() { abcd()}", + which: 1, + want: "package p; func f() { }", + name: "block1", + }, + { + in: "package p; func f() { a() }", + which: 1, + want: "package p; func f() { }", + name: "block2", + }, + { + in: "package p; func f() { a();}", + which: 1, + want: "package p; func f() { ;}", + name: "block3", + }, + { + in: "package p; func f() {\n a() \n\n}", + which: 1, + want: "package p; func f() {\n\n}", + name: "block4", + }, + { + in: "package p; func f() { a()// comment\n}", + which: 1, + want: "package p; func f() { // comment\n}", + name: "block5", + }, + { + in: "package p; func f() { /*c*/a() \n}", + which: 1, + want: "package p; func f() { /*c*/ \n}", + name: "block6", + }, + { + in: "package p; func f() { a();b();}", + which: 2, + want: "package p; func f() { a();;}", + name: "block7", + }, + { + in: "package p; func f() {\n\ta()\n\tb()\n}", + which: 2, + want: "package p; func f() {\n\ta()\n}", + name: "block8", + }, + { + in: "package p; func f() {\n\ta()\n\tb()\n\tc()\n}", + which: 2, + want: "package p; func f() {\n\ta()\n\tc()\n}", + name: "block9", + }, + { + in: "package p\nfunc f() {a()+b()}", + which: 1, + want: "package p\nfunc f() {}", + name: "block10", + }, + { + in: "package p\nfunc f() {(a()+b())}", + which: 1, + want: "package p\nfunc f() {}", + name: "block11", + }, + { + in: "package p; func f() { switch a(); b() {}}", + which: 2, // 0 is the func body, 1 is the switch statement + want: "package p; func f() { switch ; b() {}}", + name: "switch0", + }, + { + in: "package p; func f() { switch /*c*/a(); {}}", + which: 2, // 0 is the func body, 1 is the switch statement + want: "package p; func f() { switch /*c*/; {}}", + name: "switch1", + }, + { + in: "package p; func f() { switch a()/*c*/; {}}", + which: 2, // 0 is the func body, 1 is the switch statement + want: "package p; func f() { switch /*c*/; {}}", + name: "switch2", + }, + { + in: "package p; func f() { select {default: a()}}", + which: 4, // 0 is the func body, 1 is the select statement, 2 is its body, 3 is the comm clause + want: "package p; func f() { select {default: }}", + name: "comm0", + }, + { + in: "package p; func f(x chan any) { select {case x <- a: a(x)}}", + which: 5, // 0 is the func body, 1 is the select statement, 2 is its body, 3 is the comm clause + want: "package p; func f(x chan any) { select {case x <- a: }}", + name: "comm1", + }, + { + in: "package p; func f(x chan any) { select {case x <- a: a(x)}}", + which: 4, // 0 is the func body, 1 is the select statement, 2 is its body, 3 is the comm clause + want: "package p; func f(x chan any) { select {case x <- a: a(x)}}", + name: "comm2", + }, + { + in: "package p; func f() { switch {default: a()}}", + which: 4, // 0 is the func body, 1 is the select statement, 2 is its body + want: "package p; func f() { switch {default: }}", + name: "case0", + }, + { + in: "package p; func f() { switch {case 3: a()}}", + which: 4, // 0 is the func body, 1 is the select statement, 2 is its body + want: "package p; func f() { switch {case 3: }}", + name: "case1", + }, + { + in: "package p; func f() {for a();;b() {}}", + which: 2, + want: "package p; func f() {for ;;b() {}}", + name: "for0", + }, + { + in: "package p; func f() {for a();c();b() {}}", + which: 3, + want: "package p; func f() {for a();c(); {}}", + name: "for1", + }, + { + in: "package p; func f() {for\na();c()\nb() {}}", + which: 2, + want: "package p; func f() {for\n;c()\nb() {}}", + name: "for2", + }, + { + in: "package p; func f() {for a();\nc();b() {}}", + which: 3, + want: "package p; func f() {for a();\nc(); {}}", + name: "for3", + }, + { + in: "package p; func f() {switch a();b().(type){}}", + which: 2, + want: "package p; func f() {switch ;b().(type){}}", + name: "type0", + }, + { + in: "package p; func f() {switch a();b().(type){}}", + which: 3, + want: "package p; func f() {switch a();b().(type){}}", + name: "type1", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + fset := token.NewFileSet() + f, err := parser.ParseFile(fset, tt.name, tt.in, parser.ParseComments) + if err != nil { + t.Fatalf("%s: %v", tt.name, err) + } + insp := inspector.New([]*ast.File{f}) + root := cursor.Root(insp) + var stmt cursor.Cursor + cnt := 0 + for cn := range root.Preorder() { // Preorder(ast.Stmt(nil)) doesn't work + if _, ok := cn.Node().(ast.Stmt); !ok { + continue + } + if cnt == tt.which { + stmt = cn + break + } + cnt++ + } + if cnt != tt.which { + t.Fatalf("test %s does not contain desired statement %d", tt.name, tt.which) + } + edits := DeleteStmt(fset, f, stmt.Node().(ast.Stmt), nil) + if tt.want == tt.in { + if len(edits) != 0 { + t.Fatalf("%s: got %d edits, expected 0", tt.name, len(edits)) + } + return + } + if len(edits) != 1 { + t.Fatalf("%s: got %d edits, expected 1", tt.name, len(edits)) + } + tokFile := fset.File(f.Pos()) + + left := tokFile.Offset(edits[0].Pos) + right := tokFile.Offset(edits[0].End) + + got := tt.in[:left] + tt.in[right:] + if got != tt.want { + t.Errorf("%s: got\n%q, want\n%q", tt.name, got, tt.want) + } + }) + + } +} From 5a45ac2d4cce31a435c1c9436717ae061d981e23 Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Fri, 7 Mar 2025 18:34:55 +0800 Subject: [PATCH 104/270] x/tools: use range over function for some API This CL tracks the todo to use range over function when go1.23 is assured. Change-Id: Iee685ce89571443443d21e6991d018c13a9c2af2 Reviewed-on: https://go-review.googlesource.com/c/tools/+/655776 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan Auto-Submit: Alan Donovan Reviewed-by: Jonathan Amsterdam --- go/analysis/internal/checker/checker.go | 13 +++++-------- go/callgraph/vta/graph.go | 3 ++- go/callgraph/vta/graph_test.go | 10 ++++++---- go/callgraph/vta/propagation.go | 10 ++++------ go/callgraph/vta/propagation_test.go | 11 ++++------- go/ssa/func.go | 4 ++-- go/ssa/sanity.go | 6 ++---- 7 files changed, 25 insertions(+), 32 deletions(-) diff --git a/go/analysis/internal/checker/checker.go b/go/analysis/internal/checker/checker.go index 2a9ff2931b3..bc57dc6e673 100644 --- a/go/analysis/internal/checker/checker.go +++ b/go/analysis/internal/checker/checker.go @@ -242,15 +242,14 @@ func printDiagnostics(graph *checker.Graph) (exitcode int) { // Compute the exit code. var numErrors, rootDiags int - // TODO(adonovan): use "for act := range graph.All() { ... }" in go1.23. - graph.All()(func(act *checker.Action) bool { + for act := range graph.All() { if act.Err != nil { numErrors++ } else if act.IsRoot { rootDiags += len(act.Diagnostics) } - return true - }) + } + if numErrors > 0 { exitcode = 1 // analysis failed, at least partially } else if rootDiags > 0 { @@ -266,12 +265,10 @@ func printDiagnostics(graph *checker.Graph) (exitcode int) { var list []*checker.Action var total time.Duration - // TODO(adonovan): use "for act := range graph.All() { ... }" in go1.23. - graph.All()(func(act *checker.Action) bool { + for act := range graph.All() { list = append(list, act) total += act.Duration - return true - }) + } // Print actions accounting for 90% of the total. sort.Slice(list, func(i, j int) bool { diff --git a/go/callgraph/vta/graph.go b/go/callgraph/vta/graph.go index 164018708ef..26225e7db37 100644 --- a/go/callgraph/vta/graph.go +++ b/go/callgraph/vta/graph.go @@ -8,6 +8,7 @@ import ( "fmt" "go/token" "go/types" + "iter" "golang.org/x/tools/go/ssa" "golang.org/x/tools/go/types/typeutil" @@ -270,7 +271,7 @@ func (g *vtaGraph) numNodes() int { return len(g.idx) } -func (g *vtaGraph) successors(x idx) func(yield func(y idx) bool) { +func (g *vtaGraph) successors(x idx) iter.Seq[idx] { return func(yield func(y idx) bool) { for y := range g.m[x] { if !yield(y) { diff --git a/go/callgraph/vta/graph_test.go b/go/callgraph/vta/graph_test.go index 9e780c7e4e2..725749ea6ab 100644 --- a/go/callgraph/vta/graph_test.go +++ b/go/callgraph/vta/graph_test.go @@ -148,7 +148,9 @@ func TestVtaGraph(t *testing.T) { {n4, 0}, } { sl := 0 - g.successors(g.idx[test.n])(func(_ idx) bool { sl++; return true }) + for range g.successors(g.idx[test.n]) { + sl++ + } if sl != test.l { t.Errorf("want %d successors; got %d", test.l, sl) } @@ -163,10 +165,10 @@ func vtaGraphStr(g *vtaGraph) []string { var vgs []string for n := 0; n < g.numNodes(); n++ { var succStr []string - g.successors(idx(n))(func(s idx) bool { + for s := range g.successors(idx(n)) { succStr = append(succStr, g.node[s].String()) - return true - }) + } + sort.Strings(succStr) entry := fmt.Sprintf("%v -> %v", g.node[n].String(), strings.Join(succStr, ", ")) vgs = append(vgs, removeModulePrefix(entry)) diff --git a/go/callgraph/vta/propagation.go b/go/callgraph/vta/propagation.go index 1c4dcd2888e..a71c5b0034a 100644 --- a/go/callgraph/vta/propagation.go +++ b/go/callgraph/vta/propagation.go @@ -42,7 +42,7 @@ func scc(g *vtaGraph) (sccs [][]idx, idxToSccID []int) { *ns = state{pre: nextPre, lowLink: nextPre, onStack: true} stack = append(stack, n) - g.successors(n)(func(s idx) bool { + for s := range g.successors(n) { if ss := &states[s]; ss.pre == 0 { // Analyze successor s that has not been visited yet. doSCC(s) @@ -52,8 +52,7 @@ func scc(g *vtaGraph) (sccs [][]idx, idxToSccID []int) { // in the current SCC. ns.lowLink = min(ns.lowLink, ss.pre) } - return true - }) + } // if n is a root node, pop the stack and generate a new SCC. if ns.lowLink == ns.pre { @@ -166,10 +165,9 @@ func propagate(graph *vtaGraph, canon *typeutil.Map) propTypeMap { for i := len(sccs) - 1; i >= 0; i-- { nextSccs := make(map[int]empty) for _, n := range sccs[i] { - graph.successors(n)(func(succ idx) bool { + for succ := range graph.successors(n) { nextSccs[idxToSccID[succ]] = empty{} - return true - }) + } } // Propagate types to all successor SCCs. for nextScc := range nextSccs { diff --git a/go/callgraph/vta/propagation_test.go b/go/callgraph/vta/propagation_test.go index bc9ca1ecde6..2b36cf39bb7 100644 --- a/go/callgraph/vta/propagation_test.go +++ b/go/callgraph/vta/propagation_test.go @@ -123,17 +123,14 @@ func sccEqual(sccs1 []string, sccs2 []string) bool { // // for every edge x -> y in g, nodeToScc[x] > nodeToScc[y] func isRevTopSorted(g *vtaGraph, idxToScc []int) bool { - result := true - for n := 0; n < len(idxToScc); n++ { - g.successors(idx(n))(func(s idx) bool { + for n := range idxToScc { + for s := range g.successors(idx(n)) { if idxToScc[n] < idxToScc[s] { - result = false return false } - return true - }) + } } - return result + return true } func sccMapsConsistent(sccs [][]idx, idxToSccID []int) bool { diff --git a/go/ssa/func.go b/go/ssa/func.go index 010c128a9ec..a6e6b149fd9 100644 --- a/go/ssa/func.go +++ b/go/ssa/func.go @@ -13,6 +13,7 @@ import ( "go/token" "go/types" "io" + "iter" "os" "strings" @@ -187,8 +188,7 @@ func targetedBlock(f *Function, tok token.Token) *BasicBlock { } // instrs returns an iterator that returns each reachable instruction of the SSA function. -// TODO: return an iter.Seq once x/tools is on 1.23 -func (f *Function) instrs() func(yield func(i Instruction) bool) { +func (f *Function) instrs() iter.Seq[Instruction] { return func(yield func(i Instruction) bool) { for _, block := range f.Blocks { for _, instr := range block.Instrs { diff --git a/go/ssa/sanity.go b/go/ssa/sanity.go index 97ef886e3cf..3b862992680 100644 --- a/go/ssa/sanity.go +++ b/go/ssa/sanity.go @@ -529,12 +529,10 @@ func (s *sanity) checkFunction(fn *Function) bool { // Build the set of valid referrers. s.instrs = make(map[Instruction]unit) - // TODO: switch to range-over-func when x/tools updates to 1.23. // instrs are the instructions that are present in the function. - fn.instrs()(func(instr Instruction) bool { + for instr := range fn.instrs() { s.instrs[instr] = unit{} - return true - }) + } // Check all Locals allocations appear in the function instruction. for i, l := range fn.Locals { From 03f197e9708232c1425b20c5a26d25da07e31df4 Mon Sep 17 00:00:00 2001 From: Peter Weinberger Date: Sat, 1 Mar 2025 21:44:58 -0500 Subject: [PATCH 105/270] gopls/internal/modernize: remove assignment in ranges As of go1.22, the var := var idiom as the first statement of a for range statement is no longer necessary. This modernizer will remove it. Change-Id: Ia3102bc221540de962d5e357a3eb21eaf8feac4b Reviewed-on: https://go-review.googlesource.com/c/tools/+/654035 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- gopls/go.sum | 1 + gopls/internal/analysis/modernize/forvar.go | 117 ++++++++++++++++++ .../internal/analysis/modernize/modernize.go | 1 + .../analysis/modernize/modernize_test.go | 1 + .../modernize/testdata/src/forvar/forvar.go | 62 ++++++++++ .../testdata/src/forvar/forvar.go.golden | 62 ++++++++++ internal/analysisinternal/analysis.go | 2 +- 7 files changed, 245 insertions(+), 1 deletion(-) create mode 100644 gopls/internal/analysis/modernize/forvar.go create mode 100644 gopls/internal/analysis/modernize/testdata/src/forvar/forvar.go create mode 100644 gopls/internal/analysis/modernize/testdata/src/forvar/forvar.go.golden diff --git a/gopls/go.sum b/gopls/go.sum index 20633541388..5a7914737a4 100644 --- a/gopls/go.sum +++ b/gopls/go.sum @@ -15,6 +15,7 @@ github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/rogpeppe/go-internal v1.13.2-0.20241226121412-a5dc8ff20d0a h1:w3tdWGKbLGBPtR/8/oO74W6hmz0qE5q0z9aqSAewaaM= github.com/rogpeppe/go-internal v1.13.2-0.20241226121412-a5dc8ff20d0a/go.mod h1:S8kfXMp+yh77OxPD4fdM6YUknrZpQxLhvxzS4gDHENY= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +golang.org/dl v0.0.0-20250211172903-ae3823a6a0a3/go.mod h1:fwQ+hlTD8I6TIzOGkQqxQNfE2xqR+y7SzGaDkksVFkw= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc= golang.org/x/exp/typeparams v0.0.0-20250218142911-aa4b98e5adaa h1:Br3+0EZZohShrmVVc85znGpxw7Ca8hsUJlrdT/JQGw8= diff --git a/gopls/internal/analysis/modernize/forvar.go b/gopls/internal/analysis/modernize/forvar.go new file mode 100644 index 00000000000..3a7eee4be9c --- /dev/null +++ b/gopls/internal/analysis/modernize/forvar.go @@ -0,0 +1,117 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + "go/ast" + "go/token" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/internal/analysisinternal" +) + +// forvar offers to fix unnecessary copying of a for variable +// +// for _, x := range foo { +// x := x // offer to remove this superfluous assignment +// } +// +// Prerequisites: +// First statement in a range loop has to be := +// where the two idents are the same, +// and the ident is defined (:=) as a variable in the for statement. +// (Note that this 'fix' does not work for three clause loops +// because the Go specification says "The variable used by each subsequent iteration +// is declared implicitly before executing the post statement and initialized to the +// value of the previous iteration's variable at that moment.") +func forvar(pass *analysis.Pass) { + info := pass.TypesInfo + + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + for curFile := range filesUsing(inspect, info, "go1.22") { + for curLoop := range curFile.Preorder((*ast.RangeStmt)(nil)) { + // in a range loop. Is the first statement var := var? + // if so, is var one of the range vars, and is it defined + // in the for statement? + // If so, decide how much to delete. + loop := curLoop.Node().(*ast.RangeStmt) + if loop.Tok != token.DEFINE { + continue + } + v, stmt := loopVarRedecl(loop.Body) + if v == nil { + continue // index is not redeclared + } + if (loop.Key == nil || !equalSyntax(loop.Key, v)) && + (loop.Value == nil || !equalSyntax(loop.Value, v)) { + continue + } + astFile := curFile.Node().(*ast.File) + edits := analysisinternal.DeleteStmt(pass.Fset, astFile, stmt, bug.Reportf) + if len(edits) == 0 { + bug.Reportf("forvar failed to delete statement") + continue + } + remove := edits[0] + diag := analysis.Diagnostic{ + Pos: remove.Pos, + End: remove.End, + Category: "forvar", + Message: "copying variable is unneeded", + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Remove unneeded redeclaration", + TextEdits: []analysis.TextEdit{remove}, + }}, + } + pass.Report(diag) + } + } +} + +// if the expression is an Ident, return its name +func simplevar(expr ast.Expr) string { + if expr == nil { + return "" + } + if ident, ok := expr.(*ast.Ident); ok { + return ident.Name + } + return "" +} + +func usefulRangeVars(loop *ast.RangeStmt) []string { + ans := make([]string, 0, 2) + if v := simplevar(loop.Key); v != "" { + ans = append(ans, v) + } + if v := simplevar(loop.Value); v != "" { + ans = append(ans, v) + } + return ans +} + +// if the first statement is var := var, return var and the stmt +func loopVarRedecl(body *ast.BlockStmt) (*ast.Ident, *ast.AssignStmt) { + if len(body.List) < 1 { + return nil, nil + } + stmt, ok := body.List[0].(*ast.AssignStmt) + if !ok || !isSimpleAssign(stmt) || stmt.Tok != token.DEFINE { + return nil, nil + } + if _, ok := stmt.Lhs[0].(*ast.Ident); !ok { + return nil, nil + } + if _, ok := stmt.Rhs[0].(*ast.Ident); !ok { + return nil, nil + } + if stmt.Lhs[0].(*ast.Ident).Name == stmt.Rhs[0].(*ast.Ident).Name { + return stmt.Lhs[0].(*ast.Ident), stmt + } + return nil, nil +} diff --git a/gopls/internal/analysis/modernize/modernize.go b/gopls/internal/analysis/modernize/modernize.go index fb7d43eb8d7..5dd94a82a6b 100644 --- a/gopls/internal/analysis/modernize/modernize.go +++ b/gopls/internal/analysis/modernize/modernize.go @@ -80,6 +80,7 @@ func run(pass *analysis.Pass) (any, error) { bloop(pass) efaceany(pass) fmtappendf(pass) + forvar(pass) mapsloop(pass) minmax(pass) omitzero(pass) diff --git a/gopls/internal/analysis/modernize/modernize_test.go b/gopls/internal/analysis/modernize/modernize_test.go index 7bdc8014389..f9727d1e253 100644 --- a/gopls/internal/analysis/modernize/modernize_test.go +++ b/gopls/internal/analysis/modernize/modernize_test.go @@ -17,6 +17,7 @@ func Test(t *testing.T) { "bloop", "efaceany", "fmtappendf", + "forvar", "mapsloop", "minmax", "omitzero", diff --git a/gopls/internal/analysis/modernize/testdata/src/forvar/forvar.go b/gopls/internal/analysis/modernize/testdata/src/forvar/forvar.go new file mode 100644 index 00000000000..dd5ecd75e29 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/forvar/forvar.go @@ -0,0 +1,62 @@ +package forvar + +func _(m map[int]int, s []int) { + // changed + for i := range s { + i := i // want "copying variable is unneeded" + go f(i) + } + for _, v := range s { + v := v // want "copying variable is unneeded" + go f(v) + } + for k, v := range m { + k := k // want "copying variable is unneeded" + v := v // nope: report only the first redeclaration + go f(k) + go f(v) + } + for _, v := range m { + v := v // want "copying variable is unneeded" + go f(v) + } + for i := range s { + /* hi */ i := i // want "copying variable is unneeded" + go f(i) + } + // nope + var i, k, v int + + for i = range s { // nope, scope change + i := i + go f(i) + } + for _, v = range s { // nope, scope change + v := v + go f(v) + } + for k = range m { // nope, scope change + k := k + go f(k) + } + for k, v = range m { // nope, scope change + k := k + v := v + go f(k) + go f(v) + } + for _, v = range m { // nope, scope change + v := v + go f(v) + } + for _, v = range m { // nope, not x := x + v := i + go f(v) + } + for i := range s { + i := (i) + go f(i) + } +} + +func f(n int) {} diff --git a/gopls/internal/analysis/modernize/testdata/src/forvar/forvar.go.golden b/gopls/internal/analysis/modernize/testdata/src/forvar/forvar.go.golden new file mode 100644 index 00000000000..35f71404c35 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/forvar/forvar.go.golden @@ -0,0 +1,62 @@ +package forvar + +func _(m map[int]int, s []int) { + // changed + for i := range s { + // want "copying variable is unneeded" + go f(i) + } + for _, v := range s { + // want "copying variable is unneeded" + go f(v) + } + for k, v := range m { + // want "copying variable is unneeded" + v := v // nope: report only the first redeclaration + go f(k) + go f(v) + } + for _, v := range m { + // want "copying variable is unneeded" + go f(v) + } + for i := range s { + /* hi */ // want "copying variable is unneeded" + go f(i) + } + // nope + var i, k, v int + + for i = range s { // nope, scope change + i := i + go f(i) + } + for _, v = range s { // nope, scope change + v := v + go f(v) + } + for k = range m { // nope, scope change + k := k + go f(k) + } + for k, v = range m { // nope, scope change + k := k + v := v + go f(k) + go f(v) + } + for _, v = range m { // nope, scope change + v := v + go f(v) + } + for _, v = range m { // nope, not x := x + v := i + go f(v) + } + for i := range s { + i := (i) + go f(i) + } +} + +func f(n int) {} diff --git a/internal/analysisinternal/analysis.go b/internal/analysisinternal/analysis.go index cc3b351708e..69e21a14ca9 100644 --- a/internal/analysisinternal/analysis.go +++ b/internal/analysisinternal/analysis.go @@ -493,7 +493,7 @@ func CanImport(from, to string) bool { // DeleteStmt returns the edits to remove stmt if it is contained // in a BlockStmt, CaseClause, CommClause, or is the STMT in switch STMT; ... {...} // The report function abstracts gopls' bug.Report. -func DeleteStmt(fset *token.FileSet, astFile *ast.File, stmt ast.Stmt, report func(string, token.Pos)) []analysis.TextEdit { +func DeleteStmt(fset *token.FileSet, astFile *ast.File, stmt ast.Stmt, report func(string, ...any)) []analysis.TextEdit { // TODO: pass in the cursor to a ast.Stmt. callers should provide the Cursor insp := inspector.New([]*ast.File{astFile}) root := cursor.Root(insp) From cc7d6983044e11af79ce7fd42729be1714961587 Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Mon, 10 Mar 2025 20:44:31 +0000 Subject: [PATCH 106/270] gopls/internal/test/integration/misc: fix TestAssembly for CL 639515 To avoid the test failure in CL 639515, where the naming of closures functions is being revisited, loosen the checked conditions of TestAssembly. Change-Id: I8b44a76d7fe72f8747db1c8130c8db52542d25f5 Reviewed-on: https://go-review.googlesource.com/c/tools/+/656456 Reviewed-by: Alan Donovan Auto-Submit: Robert Findley LUCI-TryBot-Result: Go LUCI --- gopls/internal/test/integration/misc/webserver_test.go | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/gopls/internal/test/integration/misc/webserver_test.go b/gopls/internal/test/integration/misc/webserver_test.go index 79a6548ee3e..691d45baa6e 100644 --- a/gopls/internal/test/integration/misc/webserver_test.go +++ b/gopls/internal/test/integration/misc/webserver_test.go @@ -589,13 +589,15 @@ func init() { checkMatch(t, true, report, `CALL runtime.printlock`) checkMatch(t, true, report, `CALL runtime.printstring`) checkMatch(t, true, report, `CALL runtime.printunlock`) - checkMatch(t, true, report, `CALL example.com/a.f.deferwrap1`) + checkMatch(t, true, report, `CALL example.com/a.f.deferwrap`) checkMatch(t, true, report, `RET`) checkMatch(t, true, report, `CALL runtime.morestack_noctxt`) } // Nested functions are also shown. - checkMatch(t, true, report, `TEXT.*example.com/a.f.deferwrap1`) + // + // The condition here was relaxed to unblock go.dev/cl/639515. + checkMatch(t, true, report, `example.com/a.f.deferwrap`) // But other functions are not. checkMatch(t, false, report, `TEXT.*example.com/a.g`) From 381d68d88c9845fe24f00f8b5d6c6f23aa8a56df Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Fri, 7 Mar 2025 15:05:04 -0500 Subject: [PATCH 107/270] gopls/internal/util/fingerprint/fingerprint: unify type params Enhance Matches to observe the bindings of type parameters. Previously, each occurrence of a type parameter matched any type. For example, matching these two signatures: func f[T any](T, T) func g(int, bool) succeeded even though g is not an instantiation of f. This CL tracks the bindings of type parameters, so that the above match fails but matching f with this function: func h(int, int) succeeds. Change-Id: Ia1ed653b24168d8e307593ca98d7c151b9dbb458 Reviewed-on: https://go-review.googlesource.com/c/tools/+/655995 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- .../internal/util/fingerprint/fingerprint.go | 132 ++++++++++++++---- .../util/fingerprint/fingerprint_test.go | 58 ++++++-- 2 files changed, 151 insertions(+), 39 deletions(-) diff --git a/gopls/internal/util/fingerprint/fingerprint.go b/gopls/internal/util/fingerprint/fingerprint.go index 2b657ba7857..22817e4cb2f 100644 --- a/gopls/internal/util/fingerprint/fingerprint.go +++ b/gopls/internal/util/fingerprint/fingerprint.go @@ -338,44 +338,126 @@ func writeSexpr(out *strings.Builder, x sexpr) { } } -// unify reports whether the types of methods x and y match, in the -// presence of type parameters, each of which matches anything at all. -// (It's not true unification as we don't track substitutions.) -// -// TODO(adonovan): implement full unification. +// unify reports whether x and y match, in the presence of type parameters. +// The constraints on type parameters are ignored, but each type parameter must +// have a consistent binding. func unify(x, y sexpr) bool { - if isTypeParam(x) >= 0 || isTypeParam(y) >= 0 { - return true // a type parameter matches anything + + // maxTypeParam returns the maximum type parameter index in x. + var maxTypeParam func(x sexpr) int + maxTypeParam = func(x sexpr) int { + if i := typeParamIndex(x); i >= 0 { + return i + } + if c, ok := x.(*cons); ok { + return max(maxTypeParam(c.car), maxTypeParam(c.cdr)) + } + return 0 } - if reflect.TypeOf(x) != reflect.TypeOf(y) { - return false // type mismatch + + // xBindings[i] is the binding for type parameter #i in x, and similarly for y. + // Although type parameters are nominally bound to sexprs, each bindings[i] + // is a *sexpr, so unbound variables can share a binding. + xBindings := make([]*sexpr, maxTypeParam(x)+1) + for i := range len(xBindings) { + xBindings[i] = new(sexpr) } - switch x := x.(type) { - case nil, string, int, symbol: - return x == y - case *cons: - y := y.(*cons) - if !unify(x.car, y.car) { + yBindings := make([]*sexpr, maxTypeParam(y)+1) + for i := range len(yBindings) { + yBindings[i] = new(sexpr) + } + + // bind sets binding b to s from bindings if it does not occur in s. + bind := func(b *sexpr, s sexpr, bindings []*sexpr) bool { + // occurs reports whether b is present in s. + var occurs func(s sexpr) bool + occurs = func(s sexpr) bool { + if j := typeParamIndex(s); j >= 0 { + return b == bindings[j] + } + if c, ok := s.(*cons); ok { + return occurs(c.car) || occurs(c.cdr) + } return false } - if x.cdr == nil { - return y.cdr == nil - } - if y.cdr == nil { + + if occurs(s) { return false } - return unify(x.cdr, y.cdr) - default: - panic(fmt.Sprintf("unify %T %T", x, y)) + *b = s + return true + } + + var uni func(x, y sexpr) bool + uni = func(x, y sexpr) bool { + var bx, by *sexpr + ix := typeParamIndex(x) + if ix >= 0 { + bx = xBindings[ix] + } + iy := typeParamIndex(y) + if iy >= 0 { + by = yBindings[iy] + } + + if bx != nil || by != nil { + // If both args are type params and neither is bound, have them share a binding. + if bx != nil && by != nil && *bx == nil && *by == nil { + xBindings[ix] = yBindings[iy] + return true + } + // Treat param bindings like original args in what follows. + if bx != nil && *bx != nil { + x = *bx + } + if by != nil && *by != nil { + y = *by + } + // If the x param is unbound, bind it to y. + if bx != nil && *bx == nil { + return bind(bx, y, yBindings) + } + // If the y param is unbound, bind it to x. + if by != nil && *by == nil { + return bind(by, x, xBindings) + } + // Unify the binding of a bound parameter. + return uni(x, y) + } + + // Neither arg is a type param. + if reflect.TypeOf(x) != reflect.TypeOf(y) { + return false // type mismatch + } + switch x := x.(type) { + case nil, string, int, symbol: + return x == y + case *cons: + y := y.(*cons) + if !uni(x.car, y.car) { + return false + } + if x.cdr == nil { + return y.cdr == nil + } + if y.cdr == nil { + return false + } + return uni(x.cdr, y.cdr) + default: + panic(fmt.Sprintf("unify %T %T", x, y)) + } } + // At least one param is bound. Unify its binding with the other. + return uni(x, y) } -// isTypeParam returns the index of the type parameter, +// typeParamIndex returns the index of the type parameter, // if x has the form "(typeparam INTEGER)", otherwise -1. -func isTypeParam(x sexpr) int { +func typeParamIndex(x sexpr) int { if x, ok := x.(*cons); ok { if sym, ok := x.car.(symbol); ok && sym == symTypeparam { - return 0 + return x.cdr.(*cons).car.(int) } } return -1 diff --git a/gopls/internal/util/fingerprint/fingerprint_test.go b/gopls/internal/util/fingerprint/fingerprint_test.go index 7a7a2fe7569..737c6896157 100644 --- a/gopls/internal/util/fingerprint/fingerprint_test.go +++ b/gopls/internal/util/fingerprint/fingerprint_test.go @@ -104,6 +104,7 @@ func C2[U any](int, int, ...U) bool { panic(0) } func C3(int, bool, ...string) rune func C4(int, bool, ...string) func C5(int, float64, bool, string) bool +func C6(int, bool, ...string) bool func DAny[T any](Named[T]) { panic(0) } func DString(Named[string]) @@ -114,6 +115,17 @@ type Named[T any] struct { x T } func E1(byte) rune func E2(uint8) int32 func E3(int8) uint32 + +// generic vs. generic +func F1[T any](T) { panic(0) } +func F2[T any](*T) { panic(0) } +func F3[T any](T, T) { panic(0) } +func F4[U any](U, *U) {panic(0) } +func F5[T, U any](T, U, U) { panic(0) } +func F6[T any](T, int, T) { panic(0) } +func F7[T any](bool, T, T) { panic(0) } +func F8[V any](*V, int, int) { panic(0) } +func F9[V any](V, *V, V) { panic(0) } ` pkg := testfiles.LoadPackages(t, txtar.Parse([]byte(src)), "./a")[0] scope := pkg.Types.Scope() @@ -128,11 +140,12 @@ func E3(int8) uint32 {"B", "String", "", true}, {"B", "Int", "", true}, {"B", "A", "", true}, - {"C1", "C2", "", true}, // matches despite inconsistent substitution - {"C1", "C3", "", true}, + {"C1", "C2", "", false}, + {"C1", "C3", "", false}, {"C1", "C4", "", false}, {"C1", "C5", "", false}, - {"C2", "C3", "", false}, // intransitive (C1≡C2 ^ C1≡C3) + {"C1", "C6", "", true}, + {"C2", "C3", "", false}, {"C2", "C4", "", false}, {"C3", "C4", "", false}, {"DAny", "DString", "", true}, @@ -140,6 +153,13 @@ func E3(int8) uint32 {"DString", "DInt", "", false}, // different instantiations of Named {"E1", "E2", "", true}, // byte and rune are just aliases {"E2", "E3", "", false}, + // The following tests cover all of the type param cases of unify. + {"F1", "F2", "", true}, // F1[*int] = F2[int] + {"F3", "F4", "", false}, // would require U identical to *U, prevented by occur check + {"F5", "F6", "", true}, // one param is bound, the other is not + {"F6", "F7", "", false}, // both are bound + {"F5", "F8", "", true}, // T=*int, U=int, V=int + {"F5", "F9", "", false}, // T is unbound, V is bound, and T occurs in V } { lookup := func(name string) types.Type { obj := scope.Lookup(name) @@ -155,20 +175,30 @@ func E3(int8) uint32 return obj.Type() } - a := lookup(test.a) - b := lookup(test.b) + check := func(sa, sb string, want bool) { + t.Helper() + + a := lookup(sa) + b := lookup(sb) - afp, _ := fingerprint.Encode(a) - bfp, _ := fingerprint.Encode(b) + afp, _ := fingerprint.Encode(a) + bfp, _ := fingerprint.Encode(b) - atree := fingerprint.Parse(afp) - btree := fingerprint.Parse(bfp) + atree := fingerprint.Parse(afp) + btree := fingerprint.Parse(bfp) - got := fingerprint.Matches(atree, btree) - if got != test.want { - t.Errorf("a=%s b=%s method=%s: unify returned %t for these inputs:\n- %s\n- %s", - test.a, test.b, test.method, - got, atree, btree) + got := fingerprint.Matches(atree, btree) + if got != want { + t.Errorf("a=%s b=%s method=%s: unify returned %t for these inputs:\n- %s\n- %s", + sa, sb, test.method, got, a, b) + } } + + check(test.a, test.b, test.want) + // Matches is symmetric + check(test.b, test.a, test.want) + // Matches is reflexive + check(test.a, test.a, true) + check(test.b, test.b, true) } } From bf70295789942e4b20ca70a8cd2fe1f3ca2a70bd Mon Sep 17 00:00:00 2001 From: Sean Liao Date: Mon, 10 Mar 2025 22:23:15 +0000 Subject: [PATCH 108/270] cmd/go-contrib-init: drop unneeded GOPATH checks in module mode Fixes golang/go#72773 Change-Id: I72728446de0e7ddb01c2219523533e7f7f0cb910 Reviewed-on: https://go-review.googlesource.com/c/tools/+/656515 LUCI-TryBot-Result: Go LUCI Reviewed-by: David Chase Reviewed-by: Ian Lance Taylor --- cmd/go-contrib-init/contrib.go | 38 ---------------------------------- 1 file changed, 38 deletions(-) diff --git a/cmd/go-contrib-init/contrib.go b/cmd/go-contrib-init/contrib.go index 9254b86388f..0ab93c90f73 100644 --- a/cmd/go-contrib-init/contrib.go +++ b/cmd/go-contrib-init/contrib.go @@ -160,44 +160,6 @@ GOPATH: %s } return } - - gopath := firstGoPath() - if gopath == "" { - log.Fatal("Your GOPATH is not set, please set it") - } - - rightdir := filepath.Join(gopath, "src", "golang.org", "x", *repo) - if !strings.HasPrefix(wd, rightdir) { - dirExists, err := exists(rightdir) - if err != nil { - log.Fatal(err) - } - if !dirExists { - log.Fatalf("The repo you want to work on is currently not on your system.\n"+ - "Run %q to obtain this repo\n"+ - "then go to the directory %q\n", - "go get -d golang.org/x/"+*repo, rightdir) - } - log.Fatalf("Your current directory is:%q\n"+ - "Working on golang/x/%v requires you be in %q\n", - wd, *repo, rightdir) - } -} - -func firstGoPath() string { - list := filepath.SplitList(build.Default.GOPATH) - if len(list) < 1 { - return "" - } - return list[0] -} - -func exists(path string) (bool, error) { - _, err := os.Stat(path) - if os.IsNotExist(err) { - return false, nil - } - return true, err } func inGoPath(wd string) bool { From 4ee50fe6264385fde55bff9cda80aa103d98e64b Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Wed, 12 Mar 2025 23:10:45 -0600 Subject: [PATCH 109/270] gopls/internal/analysis/modernize: rangeint: avoid offering wrong fix This change adds additional checking to ensure that rangeint won't offer a fix in cases where RHS of 'i < limit' depends on loop var. Given the code snippet below, this change will no longer offer a wrong fix as it did before. var n, kd int for j := 0; j < min(n-j, kd+1); j++ { } - offered fix before(build error 'undefined: j') var n, kd int for j := range min(n-j, kd+1){ } Fixes golang/go#72726 Change-Id: I78c5457406258c44dd2fa861aa43d9ddb9c707fc Reviewed-on: https://go-review.googlesource.com/c/tools/+/656975 Reviewed-by: Alan Donovan Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley --- gopls/internal/analysis/modernize/rangeint.go | 8 +++++++ .../testdata/src/rangeint/rangeint.go | 21 +++++++++++++++++++ .../testdata/src/rangeint/rangeint.go.golden | 21 +++++++++++++++++++ 3 files changed, 50 insertions(+) diff --git a/gopls/internal/analysis/modernize/rangeint.go b/gopls/internal/analysis/modernize/rangeint.go index 2921bbb3468..d51bd79433e 100644 --- a/gopls/internal/analysis/modernize/rangeint.go +++ b/gopls/internal/analysis/modernize/rangeint.go @@ -62,7 +62,15 @@ func rangeint(pass *analysis.Pass) { compare.Op == token.LSS && equalSyntax(compare.X, init.Lhs[0]) { // Have: for i = 0; i < limit; ... {} + limit := compare.Y + curLimit, _ := curLoop.FindNode(limit) + // Don't offer a fix if the limit expression depends on the loop index. + for cur := range curLimit.Preorder((*ast.Ident)(nil)) { + if cur.Node().(*ast.Ident).Name == index.Name { + continue nextLoop + } + } // Skip loops up to b.N in benchmarks; see [bloop]. if sel, ok := limit.(*ast.SelectorExpr); ok && diff --git a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go index da486dcd32c..915f122b4fc 100644 --- a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go +++ b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go @@ -77,3 +77,24 @@ func issue71847d() { for i := 0; i < limit2; i++ { // want "for loop can be modernized using range over int" } } + +func issue72726() { + var n, kd int + for i := 0; i < n; i++ { // want "for loop can be modernized using range over int" + // nope: j will be invisible once it's refactored to 'for j := range min(n-j, kd+1)' + for j := 0; j < min(n-j, kd+1); j++ { // nope + _, _ = i, j + } + } + + for i := 0; i < i; i++ { // nope + } + + var i int + for i = 0; i < i/2; i++ { // nope + } + + var arr []int + for i = 0; i < arr[i]; i++ { // nope + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden index 01d28ccb92b..bd76ce688bb 100644 --- a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden @@ -77,3 +77,24 @@ func issue71847d() { for range int(limit2) { // want "for loop can be modernized using range over int" } } + +func issue72726() { + var n, kd int + for i := range n { // want "for loop can be modernized using range over int" + // nope: j will be invisible once it's refactored to 'for j := range min(n-j, kd+1)' + for j := 0; j < min(n-j, kd+1); j++ { // nope + _, _ = i, j + } + } + + for i := 0; i < i; i++ { // nope + } + + var i int + for i = 0; i < i/2; i++ { // nope + } + + var arr []int + for i = 0; i < arr[i]; i++ { // nope + } +} From e59d6c5d501f1e31cc418cb4e6dcb1cea096c368 Mon Sep 17 00:00:00 2001 From: Ethan Reesor Date: Tue, 11 Mar 2025 17:44:11 -0500 Subject: [PATCH 110/270] gopls/internal/cache/testfuncs: handle recursive subtests Resolves an issue that caused runaway recursive allocation. Previously, the following would cause unbounded recursion: func Test(t *testing.T) { t.Run("Test", Test) } Now, subtests that reference a top-level test will not be scanned, and a subtest that has already been scanned will not be scanned again. Additionally, there is now an arbitrary recursion depth limit of 100. Fixes golang/go#72769. Change-Id: I0322c55ac5db65bb01cc8fc92ecf015484bbccd8 Reviewed-on: https://go-review.googlesource.com/c/tools/+/656875 Reviewed-by: Robert Findley LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- gopls/internal/cache/testfuncs/tests.go | 48 +++++++++++---- .../integration/workspace/packages_test.go | 60 +++++++++++++++++++ 2 files changed, 96 insertions(+), 12 deletions(-) diff --git a/gopls/internal/cache/testfuncs/tests.go b/gopls/internal/cache/testfuncs/tests.go index 1182795b37b..e0e3ce1beca 100644 --- a/gopls/internal/cache/testfuncs/tests.go +++ b/gopls/internal/cache/testfuncs/tests.go @@ -57,6 +57,7 @@ func NewIndex(files []*parsego.File, info *types.Info) *Index { b := &indexBuilder{ fileIndex: make(map[protocol.DocumentURI]int), subNames: make(map[string]int), + visited: make(map[*types.Func]bool), } return b.build(files, info) } @@ -101,6 +102,7 @@ func (b *indexBuilder) build(files []*parsego.File, info *types.Info) *Index { } b.Files[i].Tests = append(b.Files[i].Tests, t) + b.visited[obj] = true // Check for subtests if isTest { @@ -168,27 +170,48 @@ func (b *indexBuilder) findSubtests(parent gobTest, typ *ast.FuncType, body *ast t.Location.Range, _ = file.NodeRange(call) tests = append(tests, t) - if typ, body := findFunc(files, info, body, call.Args[1]); typ != nil { + fn, typ, body := findFunc(files, info, body, call.Args[1]) + if typ == nil { + continue + } + + // Function literals don't have an associated object + if fn == nil { tests = append(tests, b.findSubtests(t, typ, body, file, files, info)...) + continue + } + + // Never recurse if the second argument is a top-level test function + if isTest, _ := isTestOrExample(fn); isTest { + continue + } + + // Don't recurse into functions that have already been visited + if b.visited[fn] { + continue } + + b.visited[fn] = true + tests = append(tests, b.findSubtests(t, typ, body, file, files, info)...) } return tests } // findFunc finds the type and body of the given expr, which may be a function -// literal or reference to a declared function. -// -// If no function is found, findFunc returns (nil, nil). -func findFunc(files []*parsego.File, info *types.Info, body *ast.BlockStmt, expr ast.Expr) (*ast.FuncType, *ast.BlockStmt) { +// literal or reference to a declared function. If the expression is a declared +// function, findFunc returns its [types.Func]. If the expression is a function +// literal, findFunc returns nil for the first return value. If no function is +// found, findFunc returns (nil, nil, nil). +func findFunc(files []*parsego.File, info *types.Info, body *ast.BlockStmt, expr ast.Expr) (*types.Func, *ast.FuncType, *ast.BlockStmt) { var obj types.Object switch arg := expr.(type) { case *ast.FuncLit: - return arg.Type, arg.Body + return nil, arg.Type, arg.Body case *ast.Ident: obj = info.ObjectOf(arg) if obj == nil { - return nil, nil + return nil, nil, nil } case *ast.SelectorExpr: @@ -198,12 +221,12 @@ func findFunc(files []*parsego.File, info *types.Info, body *ast.BlockStmt, expr // complex. However, those cases should be rare. sel, ok := info.Selections[arg] if !ok { - return nil, nil + return nil, nil, nil } obj = sel.Obj() default: - return nil, nil + return nil, nil, nil } if v, ok := obj.(*types.Var); ok { @@ -211,7 +234,7 @@ func findFunc(files []*parsego.File, info *types.Info, body *ast.BlockStmt, expr // the file), but that doesn't account for assignment. If the variable // is assigned multiple times, we could easily get the wrong one. _, _ = v, body - return nil, nil + return nil, nil, nil } for _, file := range files { @@ -228,11 +251,11 @@ func findFunc(files []*parsego.File, info *types.Info, body *ast.BlockStmt, expr } if info.ObjectOf(decl.Name) == obj { - return decl.Type, decl.Body + return obj.(*types.Func), decl.Type, decl.Body } } } - return nil, nil + return nil, nil, nil } // isTestOrExample reports whether the given func is a testing func or an @@ -308,6 +331,7 @@ type indexBuilder struct { gobPackage fileIndex map[protocol.DocumentURI]int subNames map[string]int + visited map[*types.Func]bool } // -- serial format of index -- diff --git a/gopls/internal/test/integration/workspace/packages_test.go b/gopls/internal/test/integration/workspace/packages_test.go index fdee21d822f..3420e32e084 100644 --- a/gopls/internal/test/integration/workspace/packages_test.go +++ b/gopls/internal/test/integration/workspace/packages_test.go @@ -433,6 +433,66 @@ func (X) SubtestMethod(t *testing.T) { }) } +func TestRecursiveSubtest(t *testing.T) { + const files = ` +-- go.mod -- +module foo + +-- foo_test.go -- +package foo + +import "testing" + +func TestFoo(t *testing.T) { t.Run("Foo", TestFoo) } +func TestBar(t *testing.T) { t.Run("Foo", TestFoo) } + +func TestBaz(t *testing.T) { + var sub func(t *testing.T) + sub = func(t *testing.T) { t.Run("Sub", sub) } + t.Run("Sub", sub) +} +` + + Run(t, files, func(t *testing.T, env *Env) { + checkPackages(t, env, []protocol.DocumentURI{env.Editor.DocumentURI("foo_test.go")}, false, command.NeedTests, []command.Package{ + { + Path: "foo", + ForTest: "foo", + ModulePath: "foo", + TestFiles: []command.TestFile{ + { + URI: env.Editor.DocumentURI("foo_test.go"), + Tests: []command.TestCase{ + {Name: "TestFoo"}, + {Name: "TestFoo/Foo"}, + {Name: "TestBar"}, + {Name: "TestBar/Foo"}, + {Name: "TestBaz"}, + {Name: "TestBaz/Sub"}, + }, + }, + }, + }, + }, map[string]command.Module{ + "foo": { + Path: "foo", + GoMod: env.Editor.DocumentURI("go.mod"), + }, + }, []string{ + `func TestFoo(t *testing.T) { t.Run("Foo", TestFoo) }`, + `t.Run("Foo", TestFoo)`, + `func TestBar(t *testing.T) { t.Run("Foo", TestFoo) }`, + `t.Run("Foo", TestFoo)`, + `func TestBaz(t *testing.T) { + var sub func(t *testing.T) + sub = func(t *testing.T) { t.Run("Sub", sub) } + t.Run("Sub", sub) +}`, + `t.Run("Sub", sub)`, + }) + }) +} + func checkPackages(t testing.TB, env *Env, files []protocol.DocumentURI, recursive bool, mode command.PackagesMode, wantPkg []command.Package, wantModule map[string]command.Module, wantSource []string) { t.Helper() From 40f8cca0a7780784a66e1d0bb1d41e87283ceea9 Mon Sep 17 00:00:00 2001 From: Robert Findley Date: Wed, 12 Mar 2025 16:24:42 -0400 Subject: [PATCH 111/270] internal/imports: fix extra logf argument Thanks vikblom for pointing it out. Unfortunately, the printf analyzer does not handle this type of call. Change-Id: I314914dcbf68e9ab4310a88e3031413cc09fc975 Reviewed-on: https://go-review.googlesource.com/c/tools/+/657335 LUCI-TryBot-Result: Go LUCI Auto-Submit: Robert Findley Reviewed-by: Alan Donovan --- internal/imports/fix.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/imports/fix.go b/internal/imports/fix.go index 737a9bfae8f..c78d10f2d61 100644 --- a/internal/imports/fix.go +++ b/internal/imports/fix.go @@ -585,7 +585,7 @@ func getFixesWithSource(ctx context.Context, fset *token.FileSet, f *ast.File, f srcDir := filepath.Dir(abs) if logf != nil { - logf("fixImports(filename=%q), srcDir=%q ...", filename, abs, srcDir) + logf("fixImports(filename=%q), srcDir=%q ...", filename, srcDir) } // First pass: looking only at f, and using the naive algorithm to From dcc4b8a191617e187055ef6ce3be7798867f4daa Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Mon, 24 Feb 2025 15:20:07 +0000 Subject: [PATCH 112/270] gopls/internal/golang: use slices.Reverse in pathEnclosingObjNode Change-Id: I1ffa1708564a87125cc38355baf481c3ea6b85b8 Reviewed-on: https://go-review.googlesource.com/c/tools/+/652016 Reviewed-by: Alan Donovan Auto-Submit: Robert Findley LUCI-TryBot-Result: Go LUCI --- gopls/internal/golang/implementation.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/gopls/internal/golang/implementation.go b/gopls/internal/golang/implementation.go index 2d9a1e93ef3..0ccab640709 100644 --- a/gopls/internal/golang/implementation.go +++ b/gopls/internal/golang/implementation.go @@ -665,6 +665,7 @@ func pathEnclosingObjNode(f *ast.File, pos token.Pos) []ast.Node { // handled this by calling astutil.PathEnclosingInterval twice, // once for "pos" and once for "pos-1". found = n.Pos() <= pos && pos <= n.End() + case *ast.ImportSpec: if n.Path.Pos() <= pos && pos < n.Path.End() { found = true @@ -674,6 +675,7 @@ func pathEnclosingObjNode(f *ast.File, pos token.Pos) []ast.Node { path = append(path, n.Name) } } + case *ast.StarExpr: // Follow star expressions to the inner identifier. if pos == n.Star { @@ -690,7 +692,6 @@ func pathEnclosingObjNode(f *ast.File, pos token.Pos) []ast.Node { // Reverse path so leaf is first element. slices.Reverse(path) - return path } From 6c3e542dfc660fed39233c18adf1467c5cdb359d Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Thu, 13 Mar 2025 01:57:28 -0600 Subject: [PATCH 113/270] gopls/internal/analysis/modernize: preserves comments in minmax This CL changes the original deletion (from after rh0 to the end of the if stmt) into deletion from the start of assignment to the end of if stmt), add all comments between them and create a new assigment as-is. This change preserves all comments inside if stmt and the comments after the line of assignment and before if stmt, causing comments B,C,D to be preserved and put on the top of min/max function call after fix. - source: lhs0 = rhs0 // A // B if rhs0 < b { // C lhs0 = b // D } - fixed: // A // B // C // D lhs0 = max(rhs0,b) Fixes golang/go#72727 Change-Id: I7c193711aac5834ebb0d5e8ae22c26ae7990c34f Reviewed-on: https://go-review.googlesource.com/c/tools/+/656655 Auto-Submit: Alan Donovan Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley --- gopls/internal/analysis/modernize/minmax.go | 27 ++++++++--- .../modernize/testdata/src/minmax/minmax.go | 35 ++++++++++++--- .../testdata/src/minmax/minmax.go.golden | 35 ++++++++++++++- internal/analysisinternal/analysis.go | 22 +++++++++ internal/analysisinternal/analysis_test.go | 45 +++++++++++++++++++ 5 files changed, 148 insertions(+), 16 deletions(-) diff --git a/gopls/internal/analysis/modernize/minmax.go b/gopls/internal/analysis/modernize/minmax.go index 8888383afec..a72506c3bbb 100644 --- a/gopls/internal/analysis/modernize/minmax.go +++ b/gopls/internal/analysis/modernize/minmax.go @@ -9,6 +9,7 @@ import ( "go/ast" "go/token" "go/types" + "strings" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" @@ -32,7 +33,7 @@ func minmax(pass *analysis.Pass) { // check is called for all statements of this form: // if a < b { lhs = rhs } - check := func(curIfStmt cursor.Cursor, compare *ast.BinaryExpr) { + check := func(file *ast.File, curIfStmt cursor.Cursor, compare *ast.BinaryExpr) { var ( ifStmt = curIfStmt.Node().(*ast.IfStmt) tassign = ifStmt.Body.List[0].(*ast.AssignStmt) @@ -44,6 +45,14 @@ func minmax(pass *analysis.Pass) { sign = isInequality(compare.Op) ) + allComments := func(file *ast.File, start, end token.Pos) string { + var buf strings.Builder + for co := range analysisinternal.Comments(file, start, end) { + _, _ = fmt.Fprintf(&buf, "%s\n", co.Text) + } + return buf.String() + } + if fblock, ok := ifStmt.Else.(*ast.BlockStmt); ok && isAssignBlock(fblock) { fassign := fblock.List[0].(*ast.AssignStmt) @@ -85,7 +94,8 @@ func minmax(pass *analysis.Pass) { // Replace IfStmt with lhs = min(a, b). Pos: ifStmt.Pos(), End: ifStmt.End(), - NewText: fmt.Appendf(nil, "%s = %s(%s, %s)", + NewText: fmt.Appendf(nil, "%s%s = %s(%s, %s)", + allComments(file, ifStmt.Pos(), ifStmt.End()), analysisinternal.Format(pass.Fset, lhs), sym, analysisinternal.Format(pass.Fset, a), @@ -144,10 +154,13 @@ func minmax(pass *analysis.Pass) { SuggestedFixes: []analysis.SuggestedFix{{ Message: fmt.Sprintf("Replace if/else with %s", sym), TextEdits: []analysis.TextEdit{{ - // Replace rhs0 and IfStmt with min(a, b) - Pos: rhs0.Pos(), + Pos: fassign.Pos(), End: ifStmt.End(), - NewText: fmt.Appendf(nil, "%s(%s, %s)", + // Replace "x := a; if ... {}" with "x = min(...)", preserving comments. + NewText: fmt.Appendf(nil, "%s %s %s %s(%s, %s)", + allComments(file, fassign.Pos(), ifStmt.End()), + analysisinternal.Format(pass.Fset, lhs), + fassign.Tok.String(), sym, analysisinternal.Format(pass.Fset, a), analysisinternal.Format(pass.Fset, b)), @@ -161,16 +174,16 @@ func minmax(pass *analysis.Pass) { // Find all "if a < b { lhs = rhs }" statements. inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) for curFile := range filesUsing(inspect, pass.TypesInfo, "go1.21") { + astFile := curFile.Node().(*ast.File) for curIfStmt := range curFile.Preorder((*ast.IfStmt)(nil)) { ifStmt := curIfStmt.Node().(*ast.IfStmt) - if compare, ok := ifStmt.Cond.(*ast.BinaryExpr); ok && ifStmt.Init == nil && isInequality(compare.Op) != 0 && isAssignBlock(ifStmt.Body) { // Have: if a < b { lhs = rhs } - check(curIfStmt, compare) + check(astFile, curIfStmt, compare) } } } diff --git a/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go b/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go index 44ba7c9193a..e0ac6da2734 100644 --- a/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go +++ b/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go @@ -1,9 +1,12 @@ package minmax func ifmin(a, b int) { - x := a + x := a // A + // B if a < b { // want "if statement can be modernized using max" - x = b + // C + x = b // D + // E } print(x) } @@ -33,20 +36,30 @@ func ifmaxvariant(a, b int) { } func ifelsemin(a, b int) { - var x int + var x int // A + // B if a <= b { // want "if/else statement can be modernized using min" - x = a + // C + x = a // D + // E } else { - x = b + // F + x = b // G + // H } print(x) } func ifelsemax(a, b int) { - var x int + // A + var x int // B + // C if a >= b { // want "if/else statement can be modernized using max" - x = a + // D + x = a // E + // F } else { + // G x = b } print(x) @@ -115,3 +128,11 @@ func nopeHasElseBlock(x int) int { } return y } + +func fix72727(a, b int) { + o := a - 42 + // some important comment. DO NOT REMOVE. + if o < b { // want "if statement can be modernized using max" + o = b + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go.golden b/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go.golden index df1d5180f8a..5a62435ac0c 100644 --- a/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go.golden @@ -1,33 +1,57 @@ package minmax func ifmin(a, b int) { + // A + // B + // want "if statement can be modernized using max" + // C + // D + // E x := max(a, b) print(x) } func ifmax(a, b int) { + // want "if statement can be modernized using min" x := min(a, b) print(x) } func ifminvariant(a, b int) { + // want "if statement can be modernized using min" x := min(a, b) print(x) } func ifmaxvariant(a, b int) { + // want "if statement can be modernized using min" x := min(a, b) print(x) } func ifelsemin(a, b int) { - var x int + var x int // A + // B + // want "if/else statement can be modernized using min" + // C + // D + // E + // F + // G + // H x = min(a, b) print(x) } func ifelsemax(a, b int) { - var x int + // A + var x int // B + // C + // want "if/else statement can be modernized using max" + // D + // E + // F + // G x = max(a, b) print(x) } @@ -55,6 +79,7 @@ func nopeIfStmtHasInitStmt() { // Regression test for a bug: fix was "y := max(x, y)". func oops() { x := 1 + // want "if statement can be modernized using max" y := max(x, 2) print(y) } @@ -92,3 +117,9 @@ func nopeHasElseBlock(x int) int { } return y } + +func fix72727(a, b int) { + // some important comment. DO NOT REMOVE. + // want "if statement can be modernized using max" + o := max(a-42, b) +} diff --git a/internal/analysisinternal/analysis.go b/internal/analysisinternal/analysis.go index 69e21a14ca9..bc10f66da25 100644 --- a/internal/analysisinternal/analysis.go +++ b/internal/analysisinternal/analysis.go @@ -15,6 +15,7 @@ import ( "go/scanner" "go/token" "go/types" + "iter" pathpkg "path" "slices" "strings" @@ -608,3 +609,24 @@ Outer: } return []analysis.TextEdit{edit} } + +// Comments returns an iterator over the comments overlapping the specified interval. +func Comments(file *ast.File, start, end token.Pos) iter.Seq[*ast.Comment] { + // TODO(adonovan): optimize use binary O(log n) instead of linear O(n) search. + return func(yield func(*ast.Comment) bool) { + for _, cg := range file.Comments { + for _, co := range cg.List { + if co.Pos() > end { + return + } + if co.End() < start { + continue + } + + if !yield(co) { + return + } + } + } + } +} diff --git a/internal/analysisinternal/analysis_test.go b/internal/analysisinternal/analysis_test.go index 530e57250c2..e3c760aff5a 100644 --- a/internal/analysisinternal/analysis_test.go +++ b/internal/analysisinternal/analysis_test.go @@ -8,6 +8,7 @@ import ( "go/ast" "go/parser" "go/token" + "slices" "testing" "golang.org/x/tools/go/ast/inspector" @@ -253,3 +254,47 @@ func TestDeleteStmt(t *testing.T) { } } + +func TestComments(t *testing.T) { + src := ` +package main + +// A +func fn() { }` + var fset token.FileSet + f, err := parser.ParseFile(&fset, "", []byte(src), parser.ParseComments|parser.AllErrors) + if err != nil { + t.Fatal(err) + } + + commentA := f.Comments[0].List[0] + commentAMidPos := (commentA.Pos() + commentA.End()) / 2 + + want := []*ast.Comment{commentA} + testCases := []struct { + name string + start, end token.Pos + want []*ast.Comment + }{ + {name: "comment totally overlaps with given interval", start: f.Pos(), end: f.End(), want: want}, + {name: "interval from file start to mid of comment A", start: f.Pos(), end: commentAMidPos, want: want}, + {name: "interval from mid of comment A to file end", start: commentAMidPos, end: commentA.End(), want: want}, + {name: "interval from start of comment A to mid of comment A", start: commentA.Pos(), end: commentAMidPos, want: want}, + {name: "interval from mid of comment A to comment A end", start: commentAMidPos, end: commentA.End(), want: want}, + {name: "interval at the start of comment A", start: commentA.Pos(), end: commentA.Pos(), want: want}, + {name: "interval at the end of comment A", start: commentA.End(), end: commentA.End(), want: want}, + {name: "interval from file start to the front of comment A start", start: f.Pos(), end: commentA.Pos() - 1, want: nil}, + {name: "interval from the position after end of comment A to file end", start: commentA.End() + 1, end: f.End(), want: nil}, + } + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var got []*ast.Comment + for co := range Comments(f, tc.start, tc.end) { + got = append(got, co) + } + if !slices.Equal(got, tc.want) { + t.Errorf("%s: got %v, want %v", tc.name, got, tc.want) + } + }) + } +} From e06efb48035968dc7c72ac5b66f17048d53f0549 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Sun, 16 Mar 2025 17:27:53 -0400 Subject: [PATCH 114/270] internal/gcimporter: bug.Report in export's panic handler This CL effectively refines the telemetry report golang/go#71067 by pushing the bug.Report call down into the panic handler where the stack is still available. Fixes golang/go#71067 Change-Id: I354f01d3085f1547232bca499d0bd1f0bf2daef3 Reviewed-on: https://go-review.googlesource.com/c/tools/+/658355 LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley Auto-Submit: Alan Donovan --- gopls/internal/cache/check.go | 5 ++++- internal/gcimporter/iexport.go | 23 +++++++++++++++-------- internal/gcimporter/iexport_test.go | 2 +- 3 files changed, 20 insertions(+), 10 deletions(-) diff --git a/gopls/internal/cache/check.go b/gopls/internal/cache/check.go index 27d5cfa240b..909003288bc 100644 --- a/gopls/internal/cache/check.go +++ b/gopls/internal/cache/check.go @@ -637,7 +637,10 @@ func (b *typeCheckBatch) checkPackageForImport(ctx context.Context, ph *packageH go func() { exportData, err := gcimporter.IExportShallow(b.fset, pkg, bug.Reportf) if err != nil { - bug.Reportf("exporting package %v: %v", ph.mp.ID, err) + // Internal error; the stack will have been reported via + // bug.Reportf within IExportShallow, so there's not much + // to do here (issue #71067). + event.Error(ctx, "IExportShallow failed", err, label.Package.Of(string(ph.mp.ID))) return } if err := filecache.Set(exportDataKind, ph.key, exportData); err != nil { diff --git a/internal/gcimporter/iexport.go b/internal/gcimporter/iexport.go index 253d6493c21..48e90b29ded 100644 --- a/internal/gcimporter/iexport.go +++ b/internal/gcimporter/iexport.go @@ -271,10 +271,10 @@ import ( // file system, be sure to include a cryptographic digest of the executable in // the key to avoid version skew. // -// If the provided reportf func is non-nil, it will be used for reporting bugs -// encountered during export. -// TODO(rfindley): remove reportf when we are confident enough in the new -// objectpath encoding. +// If the provided reportf func is non-nil, it is used for reporting +// bugs (e.g. recovered panics) encountered during export, enabling us +// to obtain via telemetry the stack that would otherwise be lost by +// merely returning an error. func IExportShallow(fset *token.FileSet, pkg *types.Package, reportf ReportFunc) ([]byte, error) { // In principle this operation can only fail if out.Write fails, // but that's impossible for bytes.Buffer---and as a matter of @@ -283,7 +283,7 @@ func IExportShallow(fset *token.FileSet, pkg *types.Package, reportf ReportFunc) // TODO(adonovan): use byte slices throughout, avoiding copying. const bundle, shallow = false, true var out bytes.Buffer - err := iexportCommon(&out, fset, bundle, shallow, iexportVersion, []*types.Package{pkg}) + err := iexportCommon(&out, fset, bundle, shallow, iexportVersion, []*types.Package{pkg}, reportf) return out.Bytes(), err } @@ -323,20 +323,27 @@ const bundleVersion = 0 // so that calls to IImportData can override with a provided package path. func IExportData(out io.Writer, fset *token.FileSet, pkg *types.Package) error { const bundle, shallow = false, false - return iexportCommon(out, fset, bundle, shallow, iexportVersion, []*types.Package{pkg}) + return iexportCommon(out, fset, bundle, shallow, iexportVersion, []*types.Package{pkg}, nil) } // IExportBundle writes an indexed export bundle for pkgs to out. func IExportBundle(out io.Writer, fset *token.FileSet, pkgs []*types.Package) error { const bundle, shallow = true, false - return iexportCommon(out, fset, bundle, shallow, iexportVersion, pkgs) + return iexportCommon(out, fset, bundle, shallow, iexportVersion, pkgs, nil) } -func iexportCommon(out io.Writer, fset *token.FileSet, bundle, shallow bool, version int, pkgs []*types.Package) (err error) { +func iexportCommon(out io.Writer, fset *token.FileSet, bundle, shallow bool, version int, pkgs []*types.Package, reportf ReportFunc) (err error) { if !debug { defer func() { if e := recover(); e != nil { + // Report the stack via telemetry (see #71067). + if reportf != nil { + reportf("panic in exporter") + } if ierr, ok := e.(internalError); ok { + // internalError usually means we exported a + // bad go/types data structure: a violation + // of an implicit precondition of Export. err = ierr return } diff --git a/internal/gcimporter/iexport_test.go b/internal/gcimporter/iexport_test.go index 5707b3784a5..fa8ecd30dc1 100644 --- a/internal/gcimporter/iexport_test.go +++ b/internal/gcimporter/iexport_test.go @@ -29,7 +29,7 @@ import ( func iexport(fset *token.FileSet, version int, pkg *types.Package) ([]byte, error) { var buf bytes.Buffer const bundle, shallow = false, false - if err := gcimporter.IExportCommon(&buf, fset, bundle, shallow, version, []*types.Package{pkg}); err != nil { + if err := gcimporter.IExportCommon(&buf, fset, bundle, shallow, version, []*types.Package{pkg}, nil); err != nil { return nil, err } return buf.Bytes(), nil From 066484ed0313db3236f05d8c2b3049e4a52e8983 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Mon, 17 Mar 2025 14:41:33 -0400 Subject: [PATCH 115/270] gopls/internal/test/integration/misc: test "annotations" setting This CL adds a test that the "annotations" config setting is honored by the "Toggle compiler optimization details" setting. (The test resulted from the investigation of the comment at https://github.com/golang/go/issues/71888#issuecomment-2727492170.) Updates golang/go#71888 Change-Id: I002d945fe8883ecd2dc95a2d43c0ccf2aa93a2c2 Reviewed-on: https://go-review.googlesource.com/c/tools/+/658555 Reviewed-by: Robert Findley Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI --- .../test/integration/misc/compileropt_test.go | 63 +++++++++++++++++++ 1 file changed, 63 insertions(+) diff --git a/gopls/internal/test/integration/misc/compileropt_test.go b/gopls/internal/test/integration/misc/compileropt_test.go index 175ec640042..68138fabc43 100644 --- a/gopls/internal/test/integration/misc/compileropt_test.go +++ b/gopls/internal/test/integration/misc/compileropt_test.go @@ -166,3 +166,66 @@ func H(x int) any { return &x } ) }) } + +// TestCompilerOptDetails_config exercises that the "want optimization +// details" flag honors the "annotation" configuration setting. +func TestCompilerOptDetails_config(t *testing.T) { + if runtime.GOOS == "android" { + t.Skipf("the compiler optimization details code action doesn't work on Android") + } + + const mod = ` +-- go.mod -- +module mod.com +go 1.18 + +-- a/a.go -- +package a + +func F(x int) any { return &x } // escape(x escapes to heap) +func G() { defer func(){} () } // cannotInlineFunction(unhandled op DEFER) +` + + for _, escape := range []bool{true, false} { + WithOptions( + Settings{"annotations": map[string]any{"inline": true, "escape": escape}}, + ).Run(t, mod, func(t *testing.T, env *Env) { + env.OpenFile("a/a.go") + actions := env.CodeActionForFile("a/a.go", nil) + + docAction, err := codeActionByKind(actions, settings.GoToggleCompilerOptDetails) + if err != nil { + t.Fatal(err) + } + params := &protocol.ExecuteCommandParams{ + Command: docAction.Command.Command, + Arguments: docAction.Command.Arguments, + } + env.ExecuteCommand(params, nil) + + env.OnceMet( + CompletedWork(server.DiagnosticWorkTitle(server.FromToggleCompilerOptDetails), 1, true), + cond(escape, Diagnostics, NoDiagnostics)( + ForFile("a/a.go"), + AtPosition("a/a.go", 2, 7), + WithMessage("x escapes to heap"), + WithSeverityTags("optimizer details", protocol.SeverityInformation, nil), + ), + Diagnostics( + ForFile("a/a.go"), + AtPosition("a/a.go", 3, 5), + WithMessage("cannotInlineFunction(unhandled op DEFER)"), + WithSeverityTags("optimizer details", protocol.SeverityInformation, nil), + ), + ) + }) + } +} + +func cond[T any](cond bool, x, y T) T { + if cond { + return x + } else { + return y + } +} From 95eb16e6031d0496dca9bac57362606d1cfd70c6 Mon Sep 17 00:00:00 2001 From: Dmitri Shuralyov Date: Fri, 14 Mar 2025 13:11:44 -0400 Subject: [PATCH 116/270] gopls/internal/test/integration: skip x_tools-gotip-openbsd-amd64 (7.6) The new openbsd/amd64 7.6 builder is generally working well everywhere but this one place. Add a skip for now to buy time to investigate this issue. Note that the previous openbsd/amd64 7.2 builder was also running into problems with these tests, as tracked in go.dev/issue/54461, though it wasn't happening as consistently as it is now. For golang/go#72145. For golang/go#54461. Change-Id: I6dd34fcdcca99c90282f0b9119936efa6bebf458 Cq-Include-Trybots: luci.golang.try:x_tools-gotip-openbsd-amd64 Reviewed-on: https://go-review.googlesource.com/c/tools/+/658015 LUCI-TryBot-Result: Go LUCI Reviewed-by: Cherry Mui Reviewed-by: Alan Donovan Auto-Submit: Dmitri Shuralyov Reviewed-by: Dmitri Shuralyov --- gopls/internal/test/integration/runner.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/gopls/internal/test/integration/runner.go b/gopls/internal/test/integration/runner.go index b3e98b859d3..b4b9d3a2a4d 100644 --- a/gopls/internal/test/integration/runner.go +++ b/gopls/internal/test/integration/runner.go @@ -266,10 +266,10 @@ func ConnectGoplsEnv(t testing.TB, ctx context.Context, sandbox *fake.Sandbox, c // longBuilders maps builders that are skipped when -short is set to a // (possibly empty) justification. var longBuilders = map[string]string{ - "openbsd-amd64-64": "go.dev/issue/42789", - "openbsd-386-64": "go.dev/issue/42789", - "openbsd-386-68": "go.dev/issue/42789", - "openbsd-amd64-68": "go.dev/issue/42789", + "x_tools-gotip-openbsd-amd64": "go.dev/issue/72145", + "x_tools-go1.24-openbsd-amd64": "go.dev/issue/72145", + "x_tools-go1.23-openbsd-amd64": "go.dev/issue/72145", + "darwin-amd64-10_12": "", "freebsd-amd64-race": "", "illumos-amd64": "", From e7b4c64c77184852af2327225261d6f3f2ff38e7 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Mon, 17 Mar 2025 18:41:02 -0400 Subject: [PATCH 117/270] gopls/internal/golang: fix crash in source.test code action We forgot to set needPkg=true. Clearly this code has never been tested since its inception in CL 231959. Also, add the missing test. Fixes golang/go#72907 Change-Id: I077b27ab4c64900ecefa19cb1329eb47d9cd6f28 Reviewed-on: https://go-review.googlesource.com/c/tools/+/658556 Reviewed-by: Robert Findley LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan --- gopls/internal/golang/codeaction.go | 2 +- .../test/integration/misc/test_test.go | 82 +++++++++++++++++++ 2 files changed, 83 insertions(+), 1 deletion(-) create mode 100644 gopls/internal/test/integration/misc/test_test.go diff --git a/gopls/internal/golang/codeaction.go b/gopls/internal/golang/codeaction.go index 74f3c2b6085..a5591edf1f9 100644 --- a/gopls/internal/golang/codeaction.go +++ b/gopls/internal/golang/codeaction.go @@ -240,7 +240,7 @@ var codeActionProducers = [...]codeActionProducer{ {kind: settings.GoAssembly, fn: goAssembly, needPkg: true}, {kind: settings.GoDoc, fn: goDoc, needPkg: true}, {kind: settings.GoFreeSymbols, fn: goFreeSymbols}, - {kind: settings.GoTest, fn: goTest}, + {kind: settings.GoTest, fn: goTest, needPkg: true}, {kind: settings.GoToggleCompilerOptDetails, fn: toggleCompilerOptDetails}, {kind: settings.GoplsDocFeatures, fn: goplsDocFeatures}, {kind: settings.RefactorExtractFunction, fn: refactorExtractFunction}, diff --git a/gopls/internal/test/integration/misc/test_test.go b/gopls/internal/test/integration/misc/test_test.go new file mode 100644 index 00000000000..b282bf57a95 --- /dev/null +++ b/gopls/internal/test/integration/misc/test_test.go @@ -0,0 +1,82 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package misc + +// This file defines tests of the source.test ("Run tests and +// benchmarks") code action. + +import ( + "os" + "path/filepath" + "testing" + + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/settings" + . "golang.org/x/tools/gopls/internal/test/integration" +) + +func TestRunTestsAndBenchmarks(t *testing.T) { + file := filepath.Join(t.TempDir(), "out") + os.Setenv("TESTFILE", file) + + const src = ` +-- go.mod -- +module example.com +go 1.19 + +-- a/a.go -- +package a + +-- a/a_test.go -- +package a + +import ( + "os" + "testing" +) + +func Test(t *testing.T) { + os.WriteFile(os.Getenv("TESTFILE"), []byte("ok"), 0644) +} + +` + Run(t, src, func(t *testing.T, env *Env) { + env.OpenFile("a/a_test.go") + loc := env.RegexpSearch("a/a_test.go", "WriteFile") + + // Request code actions. (settings.GoTest is special: + // it is returned only when explicitly requested.) + actions, err := env.Editor.Server.CodeAction(env.Ctx, &protocol.CodeActionParams{ + TextDocument: protocol.TextDocumentIdentifier{URI: loc.URI}, + Range: loc.Range, + Context: protocol.CodeActionContext{ + Only: []protocol.CodeActionKind{settings.GoTest}, + }, + }) + if err != nil { + t.Fatal(err) + } + if len(actions) != 1 { + t.Fatalf("CodeAction returned %#v, want one source.test action", actions) + } + if actions[0].Command == nil { + t.Fatalf("CodeActions()[0] has no Command") + } + + // Execute test. + // (ExecuteCommand fails if the test fails.) + t.Logf("Running %s...", actions[0].Title) + env.ExecuteCommand(&protocol.ExecuteCommandParams{ + Command: actions[0].Command.Command, + Arguments: actions[0].Command.Arguments, + }, nil) + + // Check test had expected side effect. + data, err := os.ReadFile(file) + if string(data) != "ok" { + t.Fatalf("Test did not write expected content of %s; ReadFile returned (%q, %v)", file, data, err) + } + }) +} From 3d22fef61cb20f382db36aafa102b442df090c87 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Fri, 14 Mar 2025 11:00:38 -0400 Subject: [PATCH 118/270] gopls/internal/analysis/modernize: disable minmax on floating point The built-in min and max functions return NaN if any operand is NaN, so the minmax transformation is not sound for certain inputs. Since it is usually infeasible to prove that the operands are not NaN, this CL disables minmax for floating-point operands. Behavior-preserving translation: celebrating 75 years of being harder than it looks. Fixes golang/go#72829 Change-Id: Idb3454fea7ec37842e622154f66d5898703a392f Reviewed-on: https://go-review.googlesource.com/c/tools/+/657955 Auto-Submit: Alan Donovan Commit-Queue: Alan Donovan LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley --- gopls/internal/analysis/modernize/minmax.go | 26 +++++++++++++++++-- .../modernize/testdata/src/minmax/minmax.go | 13 ++++++++++ .../testdata/src/minmax/minmax.go.golden | 13 ++++++++++ 3 files changed, 50 insertions(+), 2 deletions(-) diff --git a/gopls/internal/analysis/modernize/minmax.go b/gopls/internal/analysis/modernize/minmax.go index a72506c3bbb..a996f9bd56a 100644 --- a/gopls/internal/analysis/modernize/minmax.go +++ b/gopls/internal/analysis/modernize/minmax.go @@ -16,6 +16,7 @@ import ( "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/internal/analysisinternal" "golang.org/x/tools/internal/astutil/cursor" + "golang.org/x/tools/internal/typeparams" ) // The minmax pass replaces if/else statements with calls to min or max. @@ -25,6 +26,10 @@ import ( // 1. if a < b { x = a } else { x = b } => x = min(a, b) // 2. x = a; if a < b { x = b } => x = max(a, b) // +// Pattern 1 requires that a is not NaN, and pattern 2 requires that b +// is not Nan. Since this is hard to prove, we reject floating-point +// numbers. +// // Variants: // - all four ordered comparisons // - "x := a" or "x = a" or "var x = a" in pattern 2 @@ -172,15 +177,17 @@ func minmax(pass *analysis.Pass) { } // Find all "if a < b { lhs = rhs }" statements. + info := pass.TypesInfo inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - for curFile := range filesUsing(inspect, pass.TypesInfo, "go1.21") { + for curFile := range filesUsing(inspect, info, "go1.21") { astFile := curFile.Node().(*ast.File) for curIfStmt := range curFile.Preorder((*ast.IfStmt)(nil)) { ifStmt := curIfStmt.Node().(*ast.IfStmt) if compare, ok := ifStmt.Cond.(*ast.BinaryExpr); ok && ifStmt.Init == nil && isInequality(compare.Op) != 0 && - isAssignBlock(ifStmt.Body) { + isAssignBlock(ifStmt.Body) && + !maybeNaN(info.TypeOf(ifStmt.Body.List[0].(*ast.AssignStmt).Lhs[0])) { // lhs // Have: if a < b { lhs = rhs } check(astFile, curIfStmt, compare) @@ -219,6 +226,21 @@ func isSimpleAssign(n ast.Node) bool { len(assign.Rhs) == 1 } +// maybeNaN reports whether t is (or may be) a floating-point type. +func maybeNaN(t types.Type) bool { + // For now, we rely on core types. + // TODO(adonovan): In the post-core-types future, + // follow the approach of types.Checker.applyTypeFunc. + t = typeparams.CoreType(t) + if t == nil { + return true // fail safe + } + if basic, ok := t.(*types.Basic); ok && basic.Info()&types.IsFloat != 0 { + return true + } + return false +} + // -- utils -- func is[T any](x any) bool { diff --git a/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go b/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go index e0ac6da2734..cd117dabf84 100644 --- a/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go +++ b/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go @@ -136,3 +136,16 @@ func fix72727(a, b int) { o = b } } + +type myfloat float64 + +// The built-in min/max differ in their treatement of NaN, +// so reject floating-point numbers (#72829). +func nopeFloat(a, b myfloat) (res myfloat) { + if a < b { + res = a + } else { + res = b + } + return +} diff --git a/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go.golden b/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go.golden index 5a62435ac0c..23bfd6f9ecd 100644 --- a/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go.golden @@ -123,3 +123,16 @@ func fix72727(a, b int) { // want "if statement can be modernized using max" o := max(a-42, b) } + +type myfloat float64 + +// The built-in min/max differ in their treatement of NaN, +// so reject floating-point numbers (#72829). +func nopeFloat(a, b myfloat) (res myfloat) { + if a < b { + res = a + } else { + res = b + } + return +} From 7042bab9ca1ab65dd39be8accbb1870862e2600b Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Wed, 19 Mar 2025 02:10:01 -0600 Subject: [PATCH 119/270] gopls/internal/analysis/modernize: modernizer to suggest using strings.CutPrefix This CL defines a modernizer to suggest users using strings.CutPrefix rather than a combination of strings.HasPrefix and strings.TrimPrefix; or strings.TrimPrefix first with a further comparison in an if statement. Updates golang/go#71369 Change-Id: Id373bbf34292231f3fbfa41d7ffcf23505682beb Reviewed-on: https://go-review.googlesource.com/c/tools/+/655777 Reviewed-by: Robert Findley Reviewed-by: Alan Donovan Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI --- gopls/doc/analyzers.md | 3 + gopls/internal/analysis/modernize/doc.go | 3 + .../internal/analysis/modernize/modernize.go | 1 + .../analysis/modernize/modernize_test.go | 1 + .../analysis/modernize/stringscutprefix.go | 197 ++++++++++++++++++ .../stringscutprefix/bytescutprefix_dot.go | 13 ++ .../bytescutprefix_dot.go.golden | 13 ++ .../src/stringscutprefix/stringscutprefix.go | 134 ++++++++++++ .../stringscutprefix.go.golden | 134 ++++++++++++ .../stringscutprefix/stringscutprefix_dot.go | 23 ++ .../stringscutprefix_dot.go.golden | 23 ++ gopls/internal/doc/api.json | 4 +- internal/analysisinternal/analysis.go | 23 +- 13 files changed, 562 insertions(+), 10 deletions(-) create mode 100644 gopls/internal/analysis/modernize/stringscutprefix.go create mode 100644 gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix_dot.go create mode 100644 gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix_dot.go.golden create mode 100644 gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix.go create mode 100644 gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix.go.golden create mode 100644 gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix_dot.go create mode 100644 gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix_dot.go.golden diff --git a/gopls/doc/analyzers.md b/gopls/doc/analyzers.md index bcf5590090a..4ec7fcbd1d0 100644 --- a/gopls/doc/analyzers.md +++ b/gopls/doc/analyzers.md @@ -550,6 +550,9 @@ Categories of modernize diagnostic: - stringseq: replace Split in "for range strings.Split(...)" by go1.24's more efficient SplitSeq, or Fields with FieldSeq. + - stringscutprefix: replace some uses of HasPrefix followed by TrimPrefix with CutPrefix, + added to the strings package in go1.20. + Default: on. Package documentation: [modernize](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/modernize) diff --git a/gopls/internal/analysis/modernize/doc.go b/gopls/internal/analysis/modernize/doc.go index 931a2e6bd45..354bf0955d3 100644 --- a/gopls/internal/analysis/modernize/doc.go +++ b/gopls/internal/analysis/modernize/doc.go @@ -82,4 +82,7 @@ // // - stringseq: replace Split in "for range strings.Split(...)" by go1.24's // more efficient SplitSeq, or Fields with FieldSeq. +// +// - stringscutprefix: replace some uses of HasPrefix followed by TrimPrefix with CutPrefix, +// added to the strings package in go1.20. package modernize diff --git a/gopls/internal/analysis/modernize/modernize.go b/gopls/internal/analysis/modernize/modernize.go index 5dd94a82a6b..75f5b4014b6 100644 --- a/gopls/internal/analysis/modernize/modernize.go +++ b/gopls/internal/analysis/modernize/modernize.go @@ -87,6 +87,7 @@ func run(pass *analysis.Pass) (any, error) { rangeint(pass) slicescontains(pass) slicesdelete(pass) + stringscutprefix(pass) stringsseq(pass) sortslice(pass) testingContext(pass) diff --git a/gopls/internal/analysis/modernize/modernize_test.go b/gopls/internal/analysis/modernize/modernize_test.go index f9727d1e253..9f17d159073 100644 --- a/gopls/internal/analysis/modernize/modernize_test.go +++ b/gopls/internal/analysis/modernize/modernize_test.go @@ -24,6 +24,7 @@ func Test(t *testing.T) { "rangeint", "slicescontains", "slicesdelete", + "stringscutprefix", "splitseq", "fieldsseq", "sortslice", diff --git a/gopls/internal/analysis/modernize/stringscutprefix.go b/gopls/internal/analysis/modernize/stringscutprefix.go new file mode 100644 index 00000000000..28c42c93b05 --- /dev/null +++ b/gopls/internal/analysis/modernize/stringscutprefix.go @@ -0,0 +1,197 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + "fmt" + "go/ast" + "go/token" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" +) + +// stringscutprefix offers a fix to replace an if statement which +// calls to the 2 patterns below with strings.CutPrefix. +// +// Patterns: +// +// 1. if strings.HasPrefix(s, pre) { use(strings.TrimPrefix(s, pre) } +// => +// if after, ok := strings.CutPrefix(s, pre); ok { use(after) } +// +// 2. if after := strings.TrimPrefix(s, pre); after != s { use(after) } +// => +// if after, ok := strings.CutPrefix(s, pre); ok { use(after) } +// +// The use must occur within the first statement of the block, and the offered fix +// only replaces the first occurrence of strings.TrimPrefix. +// +// Variants: +// - bytes.HasPrefix usage as pattern 1. +func stringscutprefix(pass *analysis.Pass) { + if !analysisinternal.Imports(pass.Pkg, "strings") && + !analysisinternal.Imports(pass.Pkg, "bytes") { + return + } + + const ( + category = "stringscutprefix" + fixedMessage = "Replace HasPrefix/TrimPrefix with CutPrefix" + ) + + info := pass.TypesInfo + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + for curFile := range filesUsing(inspect, pass.TypesInfo, "go1.20") { + for curIfStmt := range curFile.Preorder((*ast.IfStmt)(nil)) { + ifStmt := curIfStmt.Node().(*ast.IfStmt) + + // pattern1 + if call, ok := ifStmt.Cond.(*ast.CallExpr); ok && len(ifStmt.Body.List) > 0 { + obj := typeutil.Callee(info, call) + if !analysisinternal.IsFunctionNamed(obj, "strings", "HasPrefix") && + !analysisinternal.IsFunctionNamed(obj, "bytes", "HasPrefix") { + continue + } + + // Replace the first occurrence of strings.TrimPrefix(s, pre) in the first statement only, + // but not later statements in case s or pre are modified by intervening logic. + firstStmt := curIfStmt.Child(ifStmt.Body).Child(ifStmt.Body.List[0]) + for curCall := range firstStmt.Preorder((*ast.CallExpr)(nil)) { + call1 := curCall.Node().(*ast.CallExpr) + obj1 := typeutil.Callee(info, call1) + if !analysisinternal.IsFunctionNamed(obj1, "strings", "TrimPrefix") && + !analysisinternal.IsFunctionNamed(obj1, "bytes", "TrimPrefix") { + continue + } + + // Have: if strings.HasPrefix(s0, pre0) { ...strings.TrimPrefix(s, pre)... } + var ( + s0 = call.Args[0] + pre0 = call.Args[1] + s = call1.Args[0] + pre = call1.Args[1] + ) + + // check whether the obj1 uses the exact the same argument with strings.HasPrefix + // shadow variables won't be valid because we only access the first statement. + if equalSyntax(s0, s) && equalSyntax(pre0, pre) { + after := analysisinternal.FreshName(info.Scopes[ifStmt], ifStmt.Pos(), "after") + _, prefix, importEdits := analysisinternal.AddImport( + info, + curFile.Node().(*ast.File), + obj1.Pkg().Name(), + obj1.Pkg().Path(), + "CutPrefix", + call.Pos(), + ) + okVarName := analysisinternal.FreshName(info.Scopes[ifStmt], ifStmt.Pos(), "ok") + pass.Report(analysis.Diagnostic{ + // highlight at HasPrefix call. + Pos: call.Pos(), + End: call.End(), + Category: category, + Message: "HasPrefix + TrimPrefix can be simplified to CutPrefix", + SuggestedFixes: []analysis.SuggestedFix{{ + Message: fixedMessage, + // if strings.HasPrefix(s, pre) { use(strings.TrimPrefix(s, pre)) } + // ------------ ----------------- ----- -------------------------- + // if after, ok := strings.CutPrefix(s, pre); ok { use(after) } + TextEdits: append(importEdits, []analysis.TextEdit{ + { + Pos: call.Fun.Pos(), + End: call.Fun.Pos(), + NewText: []byte(fmt.Sprintf("%s, %s :=", after, okVarName)), + }, + { + Pos: call.Fun.Pos(), + End: call.Fun.End(), + NewText: fmt.Appendf(nil, "%sCutPrefix", prefix), + }, + { + Pos: call.End(), + End: call.End(), + NewText: []byte(fmt.Sprintf("; %s ", okVarName)), + }, + { + Pos: call1.Pos(), + End: call1.End(), + NewText: []byte(after), + }, + }...), + }}}, + ) + break + } + } + } + + // pattern2 + if bin, ok := ifStmt.Cond.(*ast.BinaryExpr); ok && + bin.Op == token.NEQ && + ifStmt.Init != nil && + isSimpleAssign(ifStmt.Init) { + assign := ifStmt.Init.(*ast.AssignStmt) + if call, ok := assign.Rhs[0].(*ast.CallExpr); ok && assign.Tok == token.DEFINE { + lhs := assign.Lhs[0] + obj := typeutil.Callee(info, call) + if analysisinternal.IsFunctionNamed(obj, "strings", "TrimPrefix") && + (equalSyntax(lhs, bin.X) && equalSyntax(call.Args[0], bin.Y) || + (equalSyntax(lhs, bin.Y) && equalSyntax(call.Args[0], bin.X))) { + okVarName := analysisinternal.FreshName(info.Scopes[ifStmt], ifStmt.Pos(), "ok") + // Have one of: + // if rest := TrimPrefix(s, prefix); rest != s { + // if rest := TrimPrefix(s, prefix); s != rest { + + // We use AddImport not to add an import (since it exists already) + // but to compute the correct prefix in the dot-import case. + _, prefix, importEdits := analysisinternal.AddImport( + info, + curFile.Node().(*ast.File), + obj.Pkg().Name(), + obj.Pkg().Path(), + "CutPrefix", + call.Pos(), + ) + + pass.Report(analysis.Diagnostic{ + // highlight from the init and the condition end. + Pos: ifStmt.Init.Pos(), + End: ifStmt.Cond.End(), + Category: category, + Message: "TrimPrefix can be simplified to CutPrefix", + SuggestedFixes: []analysis.SuggestedFix{{ + Message: fixedMessage, + // if x := strings.TrimPrefix(s, pre); x != s ... + // ---- ---------- ------ + // if x, ok := strings.CutPrefix (s, pre); ok ... + TextEdits: append(importEdits, []analysis.TextEdit{ + { + Pos: assign.Lhs[0].End(), + End: assign.Lhs[0].End(), + NewText: fmt.Appendf(nil, ", %s", okVarName), + }, + { + Pos: call.Fun.Pos(), + End: call.Fun.End(), + NewText: fmt.Appendf(nil, "%sCutPrefix", prefix), + }, + { + Pos: ifStmt.Cond.Pos(), + End: ifStmt.Cond.End(), + NewText: []byte(okVarName), + }, + }...), + }}, + }) + } + } + } + } + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix_dot.go b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix_dot.go new file mode 100644 index 00000000000..4da9ed52e13 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix_dot.go @@ -0,0 +1,13 @@ +package stringscutprefix + +import ( + . "bytes" +) + +// test supported cases of pattern 1 +func _() { + if HasPrefix(bss, bspre) { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a := TrimPrefix(bss, bspre) + _ = a + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix_dot.go.golden b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix_dot.go.golden new file mode 100644 index 00000000000..054214cabf1 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix_dot.go.golden @@ -0,0 +1,13 @@ +package stringscutprefix + +import ( + . "bytes" +) + +// test supported cases of pattern 1 +func _() { + if after, ok := CutPrefix(bss, bspre); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a := after + _ = a + } +} \ No newline at end of file diff --git a/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix.go b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix.go new file mode 100644 index 00000000000..f5f890f4171 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix.go @@ -0,0 +1,134 @@ +package stringscutprefix + +import ( + "bytes" + "strings" +) + +var ( + s, pre string + bss, bspre []byte +) + +// test supported cases of pattern 1 +func _() { + if strings.HasPrefix(s, pre) { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a := strings.TrimPrefix(s, pre) + _ = a + } + if strings.HasPrefix("", "") { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a := strings.TrimPrefix("", "") + _ = a + } + if strings.HasPrefix(s, "") { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + println([]byte(strings.TrimPrefix(s, ""))) + } + if strings.HasPrefix(s, "") { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a, b := "", strings.TrimPrefix(s, "") + _, _ = a, b + } + if strings.HasPrefix(s, "") { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a, b := strings.TrimPrefix(s, ""), strings.TrimPrefix(s, "") // only replace the first occurrence + s = "123" + b = strings.TrimPrefix(s, "") // only replace the first occurrence + _, _ = a, b + } + + if bytes.HasPrefix(bss, bspre) { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a := bytes.TrimPrefix(bss, bspre) + _ = a + } + if bytes.HasPrefix([]byte(""), []byte("")) { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a := bytes.TrimPrefix([]byte(""), []byte("")) + _ = a + } + var a, b string + if strings.HasPrefix(s, "") { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a, b = "", strings.TrimPrefix(s, "") + _, _ = a, b + } +} + +// test cases that are not supported by pattern1 +func _() { + ok := strings.HasPrefix("", "") + if ok { // noop, currently it doesn't track the result usage of HasPrefix + a := strings.TrimPrefix("", "") + _ = a + } + if strings.HasPrefix(s, pre) { + a := strings.TrimPrefix("", "") // noop, as the argument isn't the same + _ = a + } + if strings.HasPrefix(s, pre) { + var result string + result = strings.TrimPrefix("", "") // noop, as we believe define is more popular. + _ = result + } + if strings.HasPrefix("", "") { + a := strings.TrimPrefix(s, pre) // noop, as the argument isn't the same + _ = a + } +} + +var value0 string + +// test supported cases of pattern2 +func _() { + if after := strings.TrimPrefix(s, pre); after != s { // want "TrimPrefix can be simplified to CutPrefix" + println(after) + } + if after := strings.TrimPrefix(s, pre); s != after { // want "TrimPrefix can be simplified to CutPrefix" + println(after) + } + if after := strings.TrimPrefix(s, pre); s != after { // want "TrimPrefix can be simplified to CutPrefix" + println(strings.TrimPrefix(s, pre)) // noop here + } + if after := strings.TrimPrefix(s, ""); s != after { // want "TrimPrefix can be simplified to CutPrefix" + println(after) + } + var ok bool // define an ok variable to test the fix won't shadow it for its if stmt body + _ = ok + if after := strings.TrimPrefix(s, pre); after != s { // want "TrimPrefix can be simplified to CutPrefix" + println(after) + } + var predefined string + if predefined = strings.TrimPrefix(s, pre); s != predefined { // noop + println(predefined) + } + if predefined = strings.TrimPrefix(s, pre); s != predefined { // noop + println(&predefined) + } + var value string + if value = strings.TrimPrefix(s, pre); s != value { // noop + println(value) + } + lhsMap := make(map[string]string) + if lhsMap[""] = strings.TrimPrefix(s, pre); s != lhsMap[""] { // noop + println(lhsMap[""]) + } + arr := make([]string, 0) + if arr[0] = strings.TrimPrefix(s, pre); s != arr[0] { // noop + println(arr[0]) + } + type example struct { + field string + } + var e example + if e.field = strings.TrimPrefix(s, pre); s != e.field { // noop + println(e.field) + } +} + +// test cases that not supported by pattern2 +func _() { + if after := strings.TrimPrefix(s, pre); s != pre { // noop + println(after) + } + if after := strings.TrimPrefix(s, pre); after != pre { // noop + println(after) + } + if strings.TrimPrefix(s, pre) != s { + println(strings.TrimPrefix(s, pre)) + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix.go.golden b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix.go.golden new file mode 100644 index 00000000000..d8b7b2ba47f --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix.go.golden @@ -0,0 +1,134 @@ +package stringscutprefix + +import ( + "bytes" + "strings" +) + +var ( + s, pre string + bss, bspre []byte +) + +// test supported cases of pattern 1 +func _() { + if after, ok := strings.CutPrefix(s, pre); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a := after + _ = a + } + if after, ok := strings.CutPrefix("", ""); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a := after + _ = a + } + if after, ok := strings.CutPrefix(s, ""); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + println([]byte(after)) + } + if after, ok := strings.CutPrefix(s, ""); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a, b := "", after + _, _ = a, b + } + if after, ok := strings.CutPrefix(s, ""); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a, b := after, strings.TrimPrefix(s, "") // only replace the first occurrence + s = "123" + b = strings.TrimPrefix(s, "") // only replace the first occurrence + _, _ = a, b + } + + if after, ok := bytes.CutPrefix(bss, bspre); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a := after + _ = a + } + if after, ok := bytes.CutPrefix([]byte(""), []byte("")); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a := after + _ = a + } + var a, b string + if after, ok := strings.CutPrefix(s, ""); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a, b = "", after + _, _ = a, b + } +} + +// test cases that are not supported by pattern1 +func _() { + ok := strings.HasPrefix("", "") + if ok { // noop, currently it doesn't track the result usage of HasPrefix + a := strings.TrimPrefix("", "") + _ = a + } + if strings.HasPrefix(s, pre) { + a := strings.TrimPrefix("", "") // noop, as the argument isn't the same + _ = a + } + if strings.HasPrefix(s, pre) { + var result string + result = strings.TrimPrefix("", "") // noop, as we believe define is more popular. + _ = result + } + if strings.HasPrefix("", "") { + a := strings.TrimPrefix(s, pre) // noop, as the argument isn't the same + _ = a + } +} + +var value0 string + +// test supported cases of pattern2 +func _() { + if after, ok := strings.CutPrefix(s, pre); ok { // want "TrimPrefix can be simplified to CutPrefix" + println(after) + } + if after, ok := strings.CutPrefix(s, pre); ok { // want "TrimPrefix can be simplified to CutPrefix" + println(after) + } + if after, ok := strings.CutPrefix(s, pre); ok { // want "TrimPrefix can be simplified to CutPrefix" + println(strings.TrimPrefix(s, pre)) // noop here + } + if after, ok := strings.CutPrefix(s, ""); ok { // want "TrimPrefix can be simplified to CutPrefix" + println(after) + } + var ok bool // define an ok variable to test the fix won't shadow it for its if stmt body + _ = ok + if after, ok0 := strings.CutPrefix(s, pre); ok0 { // want "TrimPrefix can be simplified to CutPrefix" + println(after) + } + var predefined string + if predefined = strings.TrimPrefix(s, pre); s != predefined { // noop + println(predefined) + } + if predefined = strings.TrimPrefix(s, pre); s != predefined { // noop + println(&predefined) + } + var value string + if value = strings.TrimPrefix(s, pre); s != value { // noop + println(value) + } + lhsMap := make(map[string]string) + if lhsMap[""] = strings.TrimPrefix(s, pre); s != lhsMap[""] { // noop + println(lhsMap[""]) + } + arr := make([]string, 0) + if arr[0] = strings.TrimPrefix(s, pre); s != arr[0] { // noop + println(arr[0]) + } + type example struct { + field string + } + var e example + if e.field = strings.TrimPrefix(s, pre); s != e.field { // noop + println(e.field) + } +} + +// test cases that not supported by pattern2 +func _() { + if after := strings.TrimPrefix(s, pre); s != pre { // noop + println(after) + } + if after := strings.TrimPrefix(s, pre); after != pre { // noop + println(after) + } + if strings.TrimPrefix(s, pre) != s { + println(strings.TrimPrefix(s, pre)) + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix_dot.go b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix_dot.go new file mode 100644 index 00000000000..75ce5bbe39b --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix_dot.go @@ -0,0 +1,23 @@ +package stringscutprefix + +import ( + . "strings" +) + +// test supported cases of pattern 1 +func _() { + if HasPrefix(s, pre) { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a := TrimPrefix(s, pre) + _ = a + } +} + +// test supported cases of pattern2 +func _() { + if after := TrimPrefix(s, pre); after != s { // want "TrimPrefix can be simplified to CutPrefix" + println(after) + } + if after := TrimPrefix(s, pre); s != after { // want "TrimPrefix can be simplified to CutPrefix" + println(after) + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix_dot.go.golden b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix_dot.go.golden new file mode 100644 index 00000000000..b5f97b3695a --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix_dot.go.golden @@ -0,0 +1,23 @@ +package stringscutprefix + +import ( + . "strings" +) + +// test supported cases of pattern 1 +func _() { + if after, ok := CutPrefix(s, pre); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a := after + _ = a + } +} + +// test supported cases of pattern2 +func _() { + if after, ok := CutPrefix(s, pre); ok { // want "TrimPrefix can be simplified to CutPrefix" + println(after) + } + if after, ok := CutPrefix(s, pre); ok { // want "TrimPrefix can be simplified to CutPrefix" + println(after) + } +} \ No newline at end of file diff --git a/gopls/internal/doc/api.json b/gopls/internal/doc/api.json index b47d635638c..f731e0d7984 100644 --- a/gopls/internal/doc/api.json +++ b/gopls/internal/doc/api.json @@ -562,7 +562,7 @@ }, { "Name": "\"modernize\"", - "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go and its standard\nlibrary.\n\nEach diagnostic provides a fix. Our intent is that these fixes may\nbe safely applied en masse without changing the behavior of your\nprogram. In some cases the suggested fixes are imperfect and may\nlead to (for example) unused imports or unused local variables,\ncausing build breakage. However, these problems are generally\ntrivial to fix. We regard any modernizer whose fix changes program\nbehavior to have a serious bug and will endeavor to fix it.\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./...\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.\n\nChanges produced by this tool should be reviewed as usual before\nbeing merged. In some cases, a loop may be replaced by a simple\nfunction call, causing comments within the loop to be discarded.\nHuman judgment may be required to avoid losing comments of value.\n\nEach diagnostic reported by modernize has a specific category. (The\ncategories are listed below.) Diagnostics in some categories, such\nas \"efaceany\" (which replaces \"interface{}\" with \"any\" where it is\nsafe to do so) are particularly numerous. It may ease the burden of\ncode review to apply fixes in two passes, the first change\nconsisting only of fixes of category \"efaceany\", the second\nconsisting of all others. This can be achieved using the -category flag:\n\n\t$ modernize -category=efaceany -fix -test ./...\n\t$ modernize -category=-efaceany -fix -test ./...\n\nCategories of modernize diagnostic:\n\n - minmax: replace an if/else conditional assignment by a call to\n the built-in min or max functions added in go1.21.\n\n - sortslice: replace sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21.\n\n - efaceany: replace interface{} by the 'any' type added in go1.18.\n\n - slicesclone: replace append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21.\n\n - mapsloop: replace a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions from\n the maps package, added in go1.21.\n\n - fmtappendf: replace []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19.\n\n - testingcontext: replace uses of context.WithCancel in tests\n with t.Context, added in go1.24.\n\n - omitzero: replace omitempty by omitzero on structs, added in go1.24.\n\n - bloop: replace \"for i := range b.N\" or \"for range b.N\" in a\n benchmark with \"for b.Loop()\", and remove any preceding calls\n to b.StopTimer, b.StartTimer, and b.ResetTimer.\n\n - slicesdelete: replace append(s[:i], s[i+1]...) by\n slices.Delete(s, i, i+1), added in go1.21.\n\n - rangeint: replace a 3-clause \"for i := 0; i \u003c n; i++\" loop by\n \"for i := range n\", added in go1.22.\n\n - stringseq: replace Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq.", + "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go and its standard\nlibrary.\n\nEach diagnostic provides a fix. Our intent is that these fixes may\nbe safely applied en masse without changing the behavior of your\nprogram. In some cases the suggested fixes are imperfect and may\nlead to (for example) unused imports or unused local variables,\ncausing build breakage. However, these problems are generally\ntrivial to fix. We regard any modernizer whose fix changes program\nbehavior to have a serious bug and will endeavor to fix it.\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./...\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.\n\nChanges produced by this tool should be reviewed as usual before\nbeing merged. In some cases, a loop may be replaced by a simple\nfunction call, causing comments within the loop to be discarded.\nHuman judgment may be required to avoid losing comments of value.\n\nEach diagnostic reported by modernize has a specific category. (The\ncategories are listed below.) Diagnostics in some categories, such\nas \"efaceany\" (which replaces \"interface{}\" with \"any\" where it is\nsafe to do so) are particularly numerous. It may ease the burden of\ncode review to apply fixes in two passes, the first change\nconsisting only of fixes of category \"efaceany\", the second\nconsisting of all others. This can be achieved using the -category flag:\n\n\t$ modernize -category=efaceany -fix -test ./...\n\t$ modernize -category=-efaceany -fix -test ./...\n\nCategories of modernize diagnostic:\n\n - minmax: replace an if/else conditional assignment by a call to\n the built-in min or max functions added in go1.21.\n\n - sortslice: replace sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21.\n\n - efaceany: replace interface{} by the 'any' type added in go1.18.\n\n - slicesclone: replace append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21.\n\n - mapsloop: replace a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions from\n the maps package, added in go1.21.\n\n - fmtappendf: replace []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19.\n\n - testingcontext: replace uses of context.WithCancel in tests\n with t.Context, added in go1.24.\n\n - omitzero: replace omitempty by omitzero on structs, added in go1.24.\n\n - bloop: replace \"for i := range b.N\" or \"for range b.N\" in a\n benchmark with \"for b.Loop()\", and remove any preceding calls\n to b.StopTimer, b.StartTimer, and b.ResetTimer.\n\n - slicesdelete: replace append(s[:i], s[i+1]...) by\n slices.Delete(s, i, i+1), added in go1.21.\n\n - rangeint: replace a 3-clause \"for i := 0; i \u003c n; i++\" loop by\n \"for i := range n\", added in go1.22.\n\n - stringseq: replace Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq.\n\n - stringscutprefix: replace some uses of HasPrefix followed by TrimPrefix with CutPrefix,\n added to the strings package in go1.20.", "Default": "true", "Status": "" }, @@ -1338,7 +1338,7 @@ }, { "Name": "modernize", - "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go and its standard\nlibrary.\n\nEach diagnostic provides a fix. Our intent is that these fixes may\nbe safely applied en masse without changing the behavior of your\nprogram. In some cases the suggested fixes are imperfect and may\nlead to (for example) unused imports or unused local variables,\ncausing build breakage. However, these problems are generally\ntrivial to fix. We regard any modernizer whose fix changes program\nbehavior to have a serious bug and will endeavor to fix it.\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./...\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.\n\nChanges produced by this tool should be reviewed as usual before\nbeing merged. In some cases, a loop may be replaced by a simple\nfunction call, causing comments within the loop to be discarded.\nHuman judgment may be required to avoid losing comments of value.\n\nEach diagnostic reported by modernize has a specific category. (The\ncategories are listed below.) Diagnostics in some categories, such\nas \"efaceany\" (which replaces \"interface{}\" with \"any\" where it is\nsafe to do so) are particularly numerous. It may ease the burden of\ncode review to apply fixes in two passes, the first change\nconsisting only of fixes of category \"efaceany\", the second\nconsisting of all others. This can be achieved using the -category flag:\n\n\t$ modernize -category=efaceany -fix -test ./...\n\t$ modernize -category=-efaceany -fix -test ./...\n\nCategories of modernize diagnostic:\n\n - minmax: replace an if/else conditional assignment by a call to\n the built-in min or max functions added in go1.21.\n\n - sortslice: replace sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21.\n\n - efaceany: replace interface{} by the 'any' type added in go1.18.\n\n - slicesclone: replace append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21.\n\n - mapsloop: replace a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions from\n the maps package, added in go1.21.\n\n - fmtappendf: replace []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19.\n\n - testingcontext: replace uses of context.WithCancel in tests\n with t.Context, added in go1.24.\n\n - omitzero: replace omitempty by omitzero on structs, added in go1.24.\n\n - bloop: replace \"for i := range b.N\" or \"for range b.N\" in a\n benchmark with \"for b.Loop()\", and remove any preceding calls\n to b.StopTimer, b.StartTimer, and b.ResetTimer.\n\n - slicesdelete: replace append(s[:i], s[i+1]...) by\n slices.Delete(s, i, i+1), added in go1.21.\n\n - rangeint: replace a 3-clause \"for i := 0; i \u003c n; i++\" loop by\n \"for i := range n\", added in go1.22.\n\n - stringseq: replace Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq.", + "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go and its standard\nlibrary.\n\nEach diagnostic provides a fix. Our intent is that these fixes may\nbe safely applied en masse without changing the behavior of your\nprogram. In some cases the suggested fixes are imperfect and may\nlead to (for example) unused imports or unused local variables,\ncausing build breakage. However, these problems are generally\ntrivial to fix. We regard any modernizer whose fix changes program\nbehavior to have a serious bug and will endeavor to fix it.\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./...\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.\n\nChanges produced by this tool should be reviewed as usual before\nbeing merged. In some cases, a loop may be replaced by a simple\nfunction call, causing comments within the loop to be discarded.\nHuman judgment may be required to avoid losing comments of value.\n\nEach diagnostic reported by modernize has a specific category. (The\ncategories are listed below.) Diagnostics in some categories, such\nas \"efaceany\" (which replaces \"interface{}\" with \"any\" where it is\nsafe to do so) are particularly numerous. It may ease the burden of\ncode review to apply fixes in two passes, the first change\nconsisting only of fixes of category \"efaceany\", the second\nconsisting of all others. This can be achieved using the -category flag:\n\n\t$ modernize -category=efaceany -fix -test ./...\n\t$ modernize -category=-efaceany -fix -test ./...\n\nCategories of modernize diagnostic:\n\n - minmax: replace an if/else conditional assignment by a call to\n the built-in min or max functions added in go1.21.\n\n - sortslice: replace sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21.\n\n - efaceany: replace interface{} by the 'any' type added in go1.18.\n\n - slicesclone: replace append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21.\n\n - mapsloop: replace a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions from\n the maps package, added in go1.21.\n\n - fmtappendf: replace []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19.\n\n - testingcontext: replace uses of context.WithCancel in tests\n with t.Context, added in go1.24.\n\n - omitzero: replace omitempty by omitzero on structs, added in go1.24.\n\n - bloop: replace \"for i := range b.N\" or \"for range b.N\" in a\n benchmark with \"for b.Loop()\", and remove any preceding calls\n to b.StopTimer, b.StartTimer, and b.ResetTimer.\n\n - slicesdelete: replace append(s[:i], s[i+1]...) by\n slices.Delete(s, i, i+1), added in go1.21.\n\n - rangeint: replace a 3-clause \"for i := 0; i \u003c n; i++\" loop by\n \"for i := range n\", added in go1.22.\n\n - stringseq: replace Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq.\n\n - stringscutprefix: replace some uses of HasPrefix followed by TrimPrefix with CutPrefix,\n added to the strings package in go1.20.", "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/modernize", "Default": true }, diff --git a/internal/analysisinternal/analysis.go b/internal/analysisinternal/analysis.go index bc10f66da25..b22e314cf45 100644 --- a/internal/analysisinternal/analysis.go +++ b/internal/analysisinternal/analysis.go @@ -220,7 +220,7 @@ func CheckReadable(pass *analysis.Pass, filename string) error { // to form a qualified name, and the edit for the new import. // // In the special case that pkgpath is dot-imported then member, the -// identifer for which the import is being added, is consulted. If +// identifier for which the import is being added, is consulted. If // member is not shadowed at pos, AddImport returns (".", "", nil). // (AddImport accepts the caller's implicit claim that the imported // package declares member.) @@ -252,13 +252,7 @@ func AddImport(info *types.Info, file *ast.File, preferredName, pkgpath, member // We must add a new import. // Ensure we have a fresh name. - newName := preferredName - for i := 0; ; i++ { - if _, obj := scope.LookupParent(newName, pos); obj == nil { - break // fresh - } - newName = fmt.Sprintf("%s%d", preferredName, i) - } + newName := FreshName(scope, pos, preferredName) // Create a new import declaration either before the first existing // declaration (which must exist), including its comments; or @@ -298,6 +292,19 @@ func AddImport(info *types.Info, file *ast.File, preferredName, pkgpath, member }} } +// FreshName returns the name of an identifier that is undefined +// at the specified position, based on the preferred name. +func FreshName(scope *types.Scope, pos token.Pos, preferred string) string { + newName := preferred + for i := 0; ; i++ { + if _, obj := scope.LookupParent(newName, pos); obj == nil { + break // fresh + } + newName = fmt.Sprintf("%s%d", preferred, i) + } + return newName +} + // Format returns a string representation of the expression e. func Format(fset *token.FileSet, e ast.Expr) string { var buf strings.Builder From a70d348b8d7b57339a4ba6c769ff28a3bade686d Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Tue, 18 Mar 2025 18:36:17 -0400 Subject: [PATCH 120/270] gopls/internal/util/persistent: add concurrency test It didn't find any problems. Updates golang/go#72931 Change-Id: Idb65548480af1fd6777dffdcc0e6c6e89b5a06f5 Reviewed-on: https://go-review.googlesource.com/c/tools/+/659015 Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley --- gopls/internal/util/persistent/map.go | 2 + gopls/internal/util/persistent/race_test.go | 66 +++++++++++++++++++++ 2 files changed, 68 insertions(+) create mode 100644 gopls/internal/util/persistent/race_test.go diff --git a/gopls/internal/util/persistent/map.go b/gopls/internal/util/persistent/map.go index 193f98791d8..d97a9494c41 100644 --- a/gopls/internal/util/persistent/map.go +++ b/gopls/internal/util/persistent/map.go @@ -203,6 +203,8 @@ func (pm *Map[K, V]) SetAll(other *Map[K, V]) { // Set updates the value associated with the specified key. // If release is non-nil, it will be called with entry's key and value once the // key is no longer contained in the map or any clone. +// +// TODO(adonovan): fix release, which has the wrong type. func (pm *Map[K, V]) Set(key K, value V, release func(key, value any)) { first := pm.root second := newNodeWithRef(key, value, release) diff --git a/gopls/internal/util/persistent/race_test.go b/gopls/internal/util/persistent/race_test.go new file mode 100644 index 00000000000..827791a78dc --- /dev/null +++ b/gopls/internal/util/persistent/race_test.go @@ -0,0 +1,66 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build race + +package persistent + +import ( + "context" + "maps" + "testing" + "time" + + "golang.org/x/sync/errgroup" +) + +// TestConcurrency exercises concurrent map access. +// It doesn't assert anything, but it runs under the race detector. +func TestConcurrency(t *testing.T) { + ctx, cancel := context.WithTimeout(t.Context(), 5*time.Second) + defer cancel() + var orig Map[int, int] // maps subset of [0-10] to itself (values aren't interesting) + for i := range 10 { + orig.Set(i, i, func(k, v any) { /* just for good measure*/ }) + } + g, ctx := errgroup.WithContext(ctx) + const N = 10 // concurrency level + g.SetLimit(N) + for range N { + g.Go(func() error { + // Each thread has its own clone of the original, + // sharing internal structures. Each map is accessed + // only by a single thread; the shared data is immutable. + m := orig.Clone() + + // Run until the timeout. + for ctx.Err() == nil { + for i := range 1000 { + key := i % 10 + + switch { + case i%2 == 0: + _, _ = m.Get(key) + case i%11 == 0: + m.Set(key, key, func(key, value any) {}) + case i%13 == 0: + _ = maps.Collect(m.All()) + case i%17 == 0: + _ = m.Delete(key) + case i%19 == 0: + _ = m.Keys() + case i%31 == 0: + _ = m.String() + case i%23 == 0: + _ = m.Clone() + } + // Don't call m.Clear(), as it would + // disentangle the various maps from each other. + } + } + return nil + }) + } + g.Wait() // no errors +} From be0d52b7f28e0282c48e226c51fd5c823904dc82 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Sat, 15 Mar 2025 09:59:38 -0400 Subject: [PATCH 121/270] gopls/internal/cache: improve build constraint trimming Generalize trimContentForPortMatch to handle +build directives. It assumed only go:build directives, but the +build variety is still valid, and in fact there is a file in the Go build on my local Google-internal machine that has them. This fixes a test that was failing for me because of that file. Change-Id: I534a18ef6e66575d242406e7b81c32055e3c8ace Reviewed-on: https://go-review.googlesource.com/c/tools/+/658195 Reviewed-by: Robert Findley LUCI-TryBot-Result: Go LUCI --- gopls/internal/cache/port.go | 30 +++++++++++++----------------- 1 file changed, 13 insertions(+), 17 deletions(-) diff --git a/gopls/internal/cache/port.go b/gopls/internal/cache/port.go index 40005bcf6d4..8caaa801b68 100644 --- a/gopls/internal/cache/port.go +++ b/gopls/internal/cache/port.go @@ -7,6 +7,7 @@ package cache import ( "bytes" "go/build" + "go/build/constraint" "go/parser" "go/token" "io" @@ -173,12 +174,16 @@ func (p port) matches(path string, content []byte) bool { // without trimming content. func trimContentForPortMatch(content []byte) []byte { buildComment := buildComment(content) - return []byte(buildComment + "\npackage p") // package name does not matter + // The package name does not matter, but +build lines + // require a blank line before the package declaration. + return []byte(buildComment + "\n\npackage p") } // buildComment returns the first matching //go:build comment in the given // content, or "" if none exists. func buildComment(content []byte) string { + var lines []string + f, err := parser.ParseFile(token.NewFileSet(), "", content, parser.PackageClauseOnly|parser.ParseComments) if err != nil { return "" @@ -186,24 +191,15 @@ func buildComment(content []byte) string { for _, cg := range f.Comments { for _, c := range cg.List { - if isGoBuildComment(c.Text) { + if constraint.IsGoBuild(c.Text) { + // A file must have only one //go:build line. return c.Text } + if constraint.IsPlusBuild(c.Text) { + // A file may have several // +build lines. + lines = append(lines, c.Text) + } } } - return "" -} - -// Adapted from go/build/build.go. -// -// TODO(rfindley): use constraint.IsGoBuild once we are on 1.19+. -func isGoBuildComment(line string) bool { - const goBuildComment = "//go:build" - if !strings.HasPrefix(line, goBuildComment) { - return false - } - // Report whether //go:build is followed by a word boundary. - line = strings.TrimSpace(line) - rest := line[len(goBuildComment):] - return len(rest) == 0 || len(strings.TrimSpace(rest)) < len(rest) + return strings.Join(lines, "\n") } From 58e40aec2e1187bbfcfa12319d977ed2bcd1e82a Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Wed, 19 Mar 2025 19:09:07 +0000 Subject: [PATCH 122/270] gopls/internal/golang/completion: avoid crash in addFieldItems For now, be defensive and avoid the crash reported in golang/go#72828. No attempt was made to reproduce. Longer term, as the TODO indicates, we should investigate the logic error that leads to addFieldItems being called with nil surrounding. Fixes golang/go#72828 Change-Id: I2300406b49fc3d53561b288d42f64793429e3fbd Reviewed-on: https://go-review.googlesource.com/c/tools/+/659237 Reviewed-by: Jonathan Amsterdam LUCI-TryBot-Result: Go LUCI Auto-Submit: Robert Findley --- gopls/internal/golang/completion/completion.go | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/gopls/internal/golang/completion/completion.go b/gopls/internal/golang/completion/completion.go index a6c0e49c311..19c50453017 100644 --- a/gopls/internal/golang/completion/completion.go +++ b/gopls/internal/golang/completion/completion.go @@ -1177,7 +1177,10 @@ func isValidIdentifierChar(char byte) bool { // adds struct fields, interface methods, function declaration fields to completion func (c *completer) addFieldItems(fields *ast.FieldList) { - if fields == nil { + // TODO: in golang/go#72828, we get here with a nil surrounding. + // This indicates a logic bug elsewhere: we should only be interrogating the + // surrounding if it is set. + if fields == nil || c.surrounding == nil { return } From 3a64d74429d40fce4072eccecd4061b7a650444d Mon Sep 17 00:00:00 2001 From: cuishuang Date: Thu, 20 Mar 2025 21:21:51 +0800 Subject: [PATCH 123/270] all: make function comment match function name Change-Id: I16e2fd79a65693179680dfdeed84bbe0fe4e0b54 Reviewed-on: https://go-review.googlesource.com/c/tools/+/659535 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley Auto-Submit: Alan Donovan --- go/ssa/func.go | 2 +- godoc/vfs/zipfs/zipfs_test.go | 2 +- gopls/internal/cache/analysis.go | 2 +- gopls/internal/golang/completion/util.go | 2 +- gopls/internal/mod/diagnostics.go | 2 +- gopls/internal/server/link.go | 2 +- gopls/internal/settings/settings.go | 2 +- gopls/internal/telemetry/cmd/stacks/stacks.go | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/go/ssa/func.go b/go/ssa/func.go index a6e6b149fd9..2d52309b623 100644 --- a/go/ssa/func.go +++ b/go/ssa/func.go @@ -817,7 +817,7 @@ func blockExit(fn *Function, block *BasicBlock, pos token.Pos) *exit { return e } -// blockExit creates a new exit to a yield fn that returns the source function. +// returnExit creates a new exit to a yield fn that returns the source function. func returnExit(fn *Function, pos token.Pos) *exit { e := &exit{ id: unique(fn), diff --git a/godoc/vfs/zipfs/zipfs_test.go b/godoc/vfs/zipfs/zipfs_test.go index b6f2431b0b5..cb000361745 100644 --- a/godoc/vfs/zipfs/zipfs_test.go +++ b/godoc/vfs/zipfs/zipfs_test.go @@ -59,7 +59,7 @@ func TestMain(t *testing.M) { os.Exit(t.Run()) } -// setups state each of the tests uses +// setup state each of the tests uses func setup() error { // create zipfs b := new(bytes.Buffer) diff --git a/gopls/internal/cache/analysis.go b/gopls/internal/cache/analysis.go index 4083f49d2d6..cf5518cf79f 100644 --- a/gopls/internal/cache/analysis.go +++ b/gopls/internal/cache/analysis.go @@ -637,7 +637,7 @@ func (an *analysisNode) runCached(ctx context.Context, key file.Hash) (*analyzeS return summary, nil } -// analysisCacheKey returns a cache key that is a cryptographic digest +// cacheKey returns a cache key that is a cryptographic digest // of the all the values that might affect type checking and analysis: // the analyzer names, package metadata, names and contents of // compiled Go files, and vdeps (successor) information diff --git a/gopls/internal/golang/completion/util.go b/gopls/internal/golang/completion/util.go index 7a4729413ae..306078296c1 100644 --- a/gopls/internal/golang/completion/util.go +++ b/gopls/internal/golang/completion/util.go @@ -171,7 +171,7 @@ func deslice(T types.Type) types.Type { return nil } -// isSelector returns the enclosing *ast.SelectorExpr when pos is in the +// enclosingSelector returns the enclosing *ast.SelectorExpr when pos is in the // selector. func enclosingSelector(path []ast.Node, pos token.Pos) *ast.SelectorExpr { if len(path) == 0 { diff --git a/gopls/internal/mod/diagnostics.go b/gopls/internal/mod/diagnostics.go index a89c148d7a7..8ad1ece05e7 100644 --- a/gopls/internal/mod/diagnostics.go +++ b/gopls/internal/mod/diagnostics.go @@ -34,7 +34,7 @@ func ParseDiagnostics(ctx context.Context, snapshot *cache.Snapshot) (map[protoc return collectDiagnostics(ctx, snapshot, parseDiagnostics) } -// Diagnostics returns diagnostics from running go mod tidy. +// TidyDiagnostics returns diagnostics from running go mod tidy. func TidyDiagnostics(ctx context.Context, snapshot *cache.Snapshot) (map[protocol.DocumentURI][]*cache.Diagnostic, error) { ctx, done := event.Start(ctx, "mod.Diagnostics", snapshot.Labels()...) defer done() diff --git a/gopls/internal/server/link.go b/gopls/internal/server/link.go index c888904baab..851ec036d4d 100644 --- a/gopls/internal/server/link.go +++ b/gopls/internal/server/link.go @@ -211,7 +211,7 @@ var acceptedSchemes = map[string]bool{ "https": true, } -// urlRegexp is the user-supplied regular expression to match URL. +// findLinksInString is the user-supplied regular expression to match URL. // srcOffset is the start offset of 'src' within m's file. func findLinksInString(urlRegexp *regexp.Regexp, src string, srcOffset int, m *protocol.Mapper) ([]protocol.DocumentLink, error) { var links []protocol.DocumentLink diff --git a/gopls/internal/settings/settings.go b/gopls/internal/settings/settings.go index 59b2aa1b87f..a47a69b0296 100644 --- a/gopls/internal/settings/settings.go +++ b/gopls/internal/settings/settings.go @@ -1387,7 +1387,7 @@ func (o *Options) EnabledSemanticTokenModifiers() map[semtok.Modifier]bool { return copy } -// EncodeSemanticTokenTypes returns a map of types to boolean. +// EnabledSemanticTokenTypes returns a map of types to boolean. func (o *Options) EnabledSemanticTokenTypes() map[semtok.Type]bool { copy := make(map[semtok.Type]bool, len(o.SemanticTokenTypes)) for k, v := range o.SemanticTokenTypes { diff --git a/gopls/internal/telemetry/cmd/stacks/stacks.go b/gopls/internal/telemetry/cmd/stacks/stacks.go index 36a675d0eb0..f8caabd67e6 100644 --- a/gopls/internal/telemetry/cmd/stacks/stacks.go +++ b/gopls/internal/telemetry/cmd/stacks/stacks.go @@ -529,7 +529,7 @@ func parsePredicate(s string) (func(string) bool, error) { }, nil } -// claimStack maps each stack ID to its issue (if any). +// claimStacks maps each stack ID to its issue (if any). // // It returns a map of stack text to the issue that claimed it. // From cfd8cf5ce27e4287604691e79b24027ada5c1b7f Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Thu, 20 Mar 2025 10:39:41 -0400 Subject: [PATCH 124/270] internal/astutil/cursor: split Edge into Parent{Edge,Index} The ergonomics are better, even if it requires unpacking twice. Also, add ChildAt(e edge.Kind, index int). Invariant: c.Parent.ChildAt(c.ParentEdge, c.ParentIndex)==c. Change-Id: I7e5c03515a98ceefa44e9a234db3d470cbc93578 Reviewed-on: https://go-review.googlesource.com/c/tools/+/659575 LUCI-TryBot-Result: Go LUCI Reviewed-by: Jonathan Amsterdam Auto-Submit: Alan Donovan --- go/ast/inspector/inspector.go | 2 + gopls/internal/analysis/gofix/gofix.go | 6 +- gopls/internal/analysis/modernize/rangeint.go | 4 +- .../internal/analysis/modernize/stringsseq.go | 2 +- .../analysis/modernize/testingcontext.go | 3 +- .../analysis/unusedparams/unusedparams.go | 3 +- gopls/internal/golang/implementation.go | 3 +- internal/astutil/cursor/cursor.go | 60 ++++++++++++++++--- internal/astutil/cursor/cursor_test.go | 23 +++++-- internal/astutil/cursor/hooks.go | 3 + 10 files changed, 85 insertions(+), 24 deletions(-) diff --git a/go/ast/inspector/inspector.go b/go/ast/inspector/inspector.go index 0d5050fe405..03511d11a2b 100644 --- a/go/ast/inspector/inspector.go +++ b/go/ast/inspector/inspector.go @@ -10,6 +10,7 @@ // builds a list of push/pop events and their node type. Subsequent // method calls that request a traversal scan this list, rather than walk // the AST, and perform type filtering using efficient bit sets. +// This representation is sometimes called a "balanced parenthesis tree". // // Experiments suggest the inspector's traversals are about 2.5x faster // than ast.Inspect, but it may take around 5 traversals for this @@ -50,6 +51,7 @@ type Inspector struct { //go:linkname events func events(in *Inspector) []event { return in.events } +//go:linkname packEdgeKindAndIndex func packEdgeKindAndIndex(ek edge.Kind, index int) int32 { return int32(uint32(index+1)<<7 | uint32(ek)) } diff --git a/gopls/internal/analysis/gofix/gofix.go b/gopls/internal/analysis/gofix/gofix.go index a2380f1d644..333b9a690e7 100644 --- a/gopls/internal/analysis/gofix/gofix.go +++ b/gopls/internal/analysis/gofix/gofix.go @@ -386,13 +386,13 @@ func (a *analyzer) inlineAlias(tn *types.TypeName, curId cursor.Cursor) { // pkg.Id[T] // pkg.Id[K, V] var expr ast.Expr = id - if e, _ := curId.Edge(); e == edge.SelectorExpr_Sel { + if curId.ParentEdge() == edge.SelectorExpr_Sel { curId = curId.Parent() expr = curId.Node().(ast.Expr) } // If expr is part of an IndexExpr or IndexListExpr, we'll need that node. // Given C[int], TypeOf(C) is generic but TypeOf(C[int]) is instantiated. - switch ek, _ := curId.Edge(); ek { + switch curId.ParentEdge() { case edge.IndexExpr_X: expr = curId.Parent().Node().(*ast.IndexExpr) case edge.IndexListExpr_X: @@ -548,7 +548,7 @@ func (a *analyzer) inlineConst(con *types.Const, cur cursor.Cursor) { } // If n is qualified by a package identifier, we'll need the full selector expression. var expr ast.Expr = n - if e, _ := cur.Edge(); e == edge.SelectorExpr_Sel { + if cur.ParentEdge() == edge.SelectorExpr_Sel { expr = cur.Parent().Node().(ast.Expr) } a.reportInline("constant", "Constant", expr, edits, importPrefix+incon.RHSName) diff --git a/gopls/internal/analysis/modernize/rangeint.go b/gopls/internal/analysis/modernize/rangeint.go index d51bd79433e..655f5b1c6bf 100644 --- a/gopls/internal/analysis/modernize/rangeint.go +++ b/gopls/internal/analysis/modernize/rangeint.go @@ -215,10 +215,10 @@ func isScalarLvalue(curId cursor.Cursor) bool { cur := curId // Strip enclosing parens. - ek, _ := cur.Edge() + ek := cur.ParentEdge() for ek == edge.ParenExpr_X { cur = cur.Parent() - ek, _ = cur.Edge() + ek = cur.ParentEdge() } switch ek { diff --git a/gopls/internal/analysis/modernize/stringsseq.go b/gopls/internal/analysis/modernize/stringsseq.go index ca9d918912e..51d4053eeb5 100644 --- a/gopls/internal/analysis/modernize/stringsseq.go +++ b/gopls/internal/analysis/modernize/stringsseq.go @@ -55,7 +55,7 @@ func stringsseq(pass *analysis.Pass) { if !ok { if id, ok := rng.X.(*ast.Ident); ok { if v, ok := info.Uses[id].(*types.Var); ok { - if ek, idx := curRange.Edge(); ek == edge.BlockStmt_List && idx > 0 { + if curRange.ParentEdge() == edge.BlockStmt_List && curRange.ParentIndex() > 0 { curPrev, _ := curRange.PrevSibling() if assign, ok := curPrev.Node().(*ast.AssignStmt); ok && assign.Tok == token.DEFINE && diff --git a/gopls/internal/analysis/modernize/testingcontext.go b/gopls/internal/analysis/modernize/testingcontext.go index 9bdc11ccfca..ca4ba1397e3 100644 --- a/gopls/internal/analysis/modernize/testingcontext.go +++ b/gopls/internal/analysis/modernize/testingcontext.go @@ -110,7 +110,8 @@ func testingContext(pass *analysis.Pass) { if curFunc, ok := enclosingFunc(cur); ok { switch n := curFunc.Node().(type) { case *ast.FuncLit: - if e, idx := curFunc.Edge(); e == edge.CallExpr_Args && idx == 1 { + if curFunc.ParentEdge() == edge.CallExpr_Args && + curFunc.ParentIndex() == 1 { // Have: call(..., func(...) { ...context.WithCancel(...)... }) obj := typeutil.Callee(info, curFunc.Parent().Node().(*ast.CallExpr)) if (analysisinternal.IsMethodNamed(obj, "testing", "T", "Run") || diff --git a/gopls/internal/analysis/unusedparams/unusedparams.go b/gopls/internal/analysis/unusedparams/unusedparams.go index 2986dfd6e41..9316e6bd5af 100644 --- a/gopls/internal/analysis/unusedparams/unusedparams.go +++ b/gopls/internal/analysis/unusedparams/unusedparams.go @@ -202,7 +202,8 @@ func run(pass *analysis.Pass) (any, error) { case *ast.AssignStmt: // f = func() {...} // f := func() {...} - if e, idx := c.Edge(); e == edge.AssignStmt_Rhs { + if c.ParentEdge() == edge.AssignStmt_Rhs { + idx := c.ParentIndex() // Inv: n == AssignStmt.Rhs[idx] if id, ok := parent.Lhs[idx].(*ast.Ident); ok { fn = pass.TypesInfo.ObjectOf(id) diff --git a/gopls/internal/golang/implementation.go b/gopls/internal/golang/implementation.go index 0ccab640709..7c414dcdb8a 100644 --- a/gopls/internal/golang/implementation.go +++ b/gopls/internal/golang/implementation.go @@ -880,8 +880,7 @@ func funcDefs(pkg *cache.Package, t types.Type) ([]protocol.Location, error) { // beneathFuncDef reports whether the specified FuncType cursor is a // child of Func{Decl,Lit}. func beneathFuncDef(cur cursor.Cursor) bool { - ek, _ := cur.Edge() - switch ek { + switch cur.ParentEdge() { case edge.FuncDecl_Type, edge.FuncLit_Type: return true } diff --git a/internal/astutil/cursor/cursor.go b/internal/astutil/cursor/cursor.go index 83a47e09058..f6691ce0684 100644 --- a/internal/astutil/cursor/cursor.go +++ b/internal/astutil/cursor/cursor.go @@ -209,17 +209,36 @@ func (c Cursor) Parent() Cursor { return Cursor{c.in, c.events()[c.index].parent} } -// Edge returns the identity of the field in the parent node -// that holds this cursor's node, and if it is a list, the index within it. +// ParentEdge returns the identity of the field in the parent node +// that holds this cursor's node. // // For example, f(x, y) is a CallExpr whose three children are Idents. -// f has edge kind [edge.CallExpr_Fun] and index -1. -// x and y have kind [edge.CallExpr_Args] and indices 0 and 1, respectively. +// f has edge kind [edge.CallExpr_Fun] and x and y have kind +// [edge.CallExpr_Args]. // -// Edge must not be called on the Root node (whose [Cursor.Node] returns nil). +// If called on a child of the Root node, it returns [edge.Invalid]. // -// If called on a child of the Root node, it returns ([edge.Invalid], -1). -func (c Cursor) Edge() (edge.Kind, int) { +// ParentEdge must not be called on the Root node (whose [Cursor.Node] returns nil). +func (c Cursor) ParentEdge() edge.Kind { + k, _ := c.parentEdgeAndIndex() + return k +} + +// ParentIndex returns the slice index of this cursor's node within +// the field of the parent node that holds it. +// +// For example, f(x, y) is a CallExpr whose three children are Idents. +// x and y have indices 0 and 1, respectively; f has index -1. +// +// If called on a child of the Root node, it returns -1. +// +// ParentIndex must not be called on the Root node (whose [Cursor.Node] returns nil). +func (c Cursor) ParentIndex() int { + _, idx := c.parentEdgeAndIndex() + return idx +} + +func (c Cursor) parentEdgeAndIndex() (edge.Kind, int) { if c.index < 0 { panic("Cursor.Edge called on Root node") } @@ -228,6 +247,31 @@ func (c Cursor) Edge() (edge.Kind, int) { return unpackEdgeKindAndIndex(events[pop].parent) } +// ChildAt returns the cursor for the child of the +// current node identified by its edge and index. +// The index must be -1 if the edge.Kind is not a slice. +// The indicated child node must exist. +// +// ChildAt must not be called on the Root node (whose [Cursor.Node] returns nil). +func (c Cursor) ChildAt(k edge.Kind, idx int) Cursor { + target := packEdgeKindAndIndex(k, idx) + + // Unfortunately there's no shortcut to looping. + events := c.events() + i := c.index + 1 + for { + pop := events[i].index + if pop < i { + break + } + if events[pop].parent == target { + return Cursor{c.in, i} + } + i = pop + 1 + } + panic(fmt.Sprintf("ChildAt(%v, %d): no such child of %v", k, idx, c)) +} + // Child returns the cursor for n, which must be a direct child of c's Node. // // Child must not be called on the Root node (whose [Cursor.Node] returns nil). @@ -355,7 +399,7 @@ func (c Cursor) LastChild() (Cursor, bool) { // So, do not assume that the previous sibling of an ast.Stmt is also // an ast.Stmt, or if it is, that they are executed sequentially, // unless you have established that, say, its parent is a BlockStmt -// or its [Cursor.Edge] is [edge.BlockStmt_List]. +// or its [Cursor.ParentEdge] is [edge.BlockStmt_List]. // For example, given "for S1; ; S2 {}", the predecessor of S2 is S1, // even though they are not executed in sequence. func (c Cursor) Children() iter.Seq[Cursor] { diff --git a/internal/astutil/cursor/cursor_test.go b/internal/astutil/cursor/cursor_test.go index 67e91544c4d..3fd80802c15 100644 --- a/internal/astutil/cursor/cursor_test.go +++ b/internal/astutil/cursor/cursor_test.go @@ -131,9 +131,11 @@ func g() { _ = curFunc.Node().(*ast.FuncDecl) // Check edge and index. - if e, idx := curFunc.Edge(); e != edge.File_Decls || idx != nfuncs { - t.Errorf("%v.Edge() = (%v, %v), want edge.File_Decls, %d", - curFunc, e, idx, nfuncs) + if k := curFunc.ParentEdge(); k != edge.File_Decls { + t.Errorf("%v.ParentEdge() = %v, want edge.File_Decls", curFunc, k) + } + if idx := curFunc.ParentIndex(); idx != nfuncs { + t.Errorf("%v.ParentIndex() = %d, want %d", curFunc, idx, nfuncs) } nfuncs++ @@ -367,8 +369,11 @@ func TestCursor_Edge(t *testing.T) { continue // root node } - e, idx := cur.Edge() - parent := cur.Parent() + var ( + parent = cur.Parent() + e = cur.ParentEdge() + idx = cur.ParentIndex() + ) // ast.File, child of root? if parent.Node() == nil { @@ -384,12 +389,18 @@ func TestCursor_Edge(t *testing.T) { e.NodeType(), parent.Node()) } - // Check consistency of c.Edge.Get(c.Parent().Node()) == c.Node(). + // Check c.Edge.Get(c.Parent.Node) == c.Node. if got := e.Get(parent.Node(), idx); got != cur.Node() { t.Errorf("cur=%v@%s: %s.Get(cur.Parent().Node(), %d) = %T@%s, want cur.Node()", cur, netFset.Position(cur.Node().Pos()), e, idx, got, netFset.Position(got.Pos())) } + // Check c.Parent.ChildAt(c.ParentEdge, c.ParentIndex) == c. + if got := parent.ChildAt(e, idx); got != cur { + t.Errorf("cur=%v@%s: cur.Parent().ChildAt(%v, %d) = %T@%s, want cur", + cur, netFset.Position(cur.Node().Pos()), e, idx, got.Node(), netFset.Position(got.Node().Pos())) + } + // Check that reflection on the parent finds the current node. fv := reflect.ValueOf(parent.Node()).Elem().FieldByName(e.FieldName()) if idx >= 0 { diff --git a/internal/astutil/cursor/hooks.go b/internal/astutil/cursor/hooks.go index 8b61f5ddc11..0257d61d778 100644 --- a/internal/astutil/cursor/hooks.go +++ b/internal/astutil/cursor/hooks.go @@ -31,6 +31,9 @@ func maskOf(nodes []ast.Node) uint64 //go:linkname events golang.org/x/tools/go/ast/inspector.events func events(in *inspector.Inspector) []event +//go:linkname packEdgeKindAndIndex golang.org/x/tools/go/ast/inspector.packEdgeKindAndIndex +func packEdgeKindAndIndex(edge.Kind, int) int32 + //go:linkname unpackEdgeKindAndIndex golang.org/x/tools/go/ast/inspector.unpackEdgeKindAndIndex func unpackEdgeKindAndIndex(int32) (edge.Kind, int) From 9b2264a60f31f593728fbda4debf9e87145477fe Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Wed, 19 Mar 2025 16:29:02 -0400 Subject: [PATCH 125/270] gopls/internal/golang/completion: ensure expectedCompositeLiteralType arg is not nil It was possible for expectedCompositeLiteralType to be given a nil *compLitInfo. Check for nilness outside the offending call site and skip the call. Fixes golang/go#72136. Change-Id: Idc661145ae409a19909e0b4e1f74163acb11f5b5 Reviewed-on: https://go-review.googlesource.com/c/tools/+/659375 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan Reviewed-by: Robert Findley --- gopls/internal/golang/completion/completion.go | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/gopls/internal/golang/completion/completion.go b/gopls/internal/golang/completion/completion.go index 19c50453017..600219370b9 100644 --- a/gopls/internal/golang/completion/completion.go +++ b/gopls/internal/golang/completion/completion.go @@ -2001,6 +2001,8 @@ func (c *completer) structLiteralFieldName(ctx context.Context) error { // enclosingCompositeLiteral returns information about the composite literal enclosing the // position. +// It returns nil on failure; for example, if there is no type information for a +// node on path. func enclosingCompositeLiteral(path []ast.Node, pos token.Pos, info *types.Info) *compLitInfo { for _, n := range path { switch n := n.(type) { @@ -2565,7 +2567,7 @@ func inferExpectedResultTypes(c *completer, callNodeIdx int) []types.Type { switch node := c.path[callNodeIdx+1].(type) { case *ast.KeyValueExpr: enclosingCompositeLiteral := enclosingCompositeLiteral(c.path[callNodeIdx:], callNode.Pos(), c.pkg.TypesInfo()) - if !wantStructFieldCompletions(enclosingCompositeLiteral) { + if enclosingCompositeLiteral != nil && !wantStructFieldCompletions(enclosingCompositeLiteral) { expectedResults = append(expectedResults, expectedCompositeLiteralType(enclosingCompositeLiteral, callNode.Pos())) } case *ast.AssignStmt: From c2768b73f46671714e183a72727bf0b36e828f9b Mon Sep 17 00:00:00 2001 From: Peter Weinberger Date: Fri, 21 Mar 2025 10:03:02 -0400 Subject: [PATCH 126/270] gopls/modernize: remove unused functions These functions should have been removed in a previous CL. Change-Id: I3e25a49ce2660a472b25f4b1a4a91bfdc4739fde Reviewed-on: https://go-review.googlesource.com/c/tools/+/659895 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- gopls/internal/analysis/modernize/forvar.go | 22 --------------------- 1 file changed, 22 deletions(-) diff --git a/gopls/internal/analysis/modernize/forvar.go b/gopls/internal/analysis/modernize/forvar.go index 3a7eee4be9c..6f88ab77ed9 100644 --- a/gopls/internal/analysis/modernize/forvar.go +++ b/gopls/internal/analysis/modernize/forvar.go @@ -73,28 +73,6 @@ func forvar(pass *analysis.Pass) { } } -// if the expression is an Ident, return its name -func simplevar(expr ast.Expr) string { - if expr == nil { - return "" - } - if ident, ok := expr.(*ast.Ident); ok { - return ident.Name - } - return "" -} - -func usefulRangeVars(loop *ast.RangeStmt) []string { - ans := make([]string, 0, 2) - if v := simplevar(loop.Key); v != "" { - ans = append(ans, v) - } - if v := simplevar(loop.Value); v != "" { - ans = append(ans, v) - } - return ans -} - // if the first statement is var := var, return var and the stmt func loopVarRedecl(body *ast.BlockStmt) (*ast.Ident, *ast.AssignStmt) { if len(body.List) < 1 { From cb292c67c19fd2ca6a9be9ad561c136043fd0472 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Thu, 20 Mar 2025 13:35:25 -0400 Subject: [PATCH 127/270] internal/astutil/cursor: unsplit Parent{Edge,Index} -> ParentEdge This CL is a partial revert of CL 659575, which was merged by mistake. During review, we agreed that the original form was more logical (if sometimes inconvenient), but that we liked the new name, and the new ChildAt function. Change-Id: Ib1d4e0dfa8cefdb944eb1394be946006e8285390 Reviewed-on: https://go-review.googlesource.com/c/tools/+/659635 Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley --- go/ast/inspector/inspector.go | 2 +- gopls/internal/analysis/gofix/gofix.go | 6 ++-- gopls/internal/analysis/modernize/rangeint.go | 4 +-- .../internal/analysis/modernize/stringsseq.go | 2 +- .../analysis/modernize/testingcontext.go | 3 +- .../analysis/unusedparams/unusedparams.go | 3 +- gopls/internal/golang/implementation.go | 2 +- internal/astutil/cursor/cursor.go | 33 +++++-------------- internal/astutil/cursor/cursor_test.go | 12 +++---- 9 files changed, 22 insertions(+), 45 deletions(-) diff --git a/go/ast/inspector/inspector.go b/go/ast/inspector/inspector.go index 03511d11a2b..1da4a361f0b 100644 --- a/go/ast/inspector/inspector.go +++ b/go/ast/inspector/inspector.go @@ -10,7 +10,7 @@ // builds a list of push/pop events and their node type. Subsequent // method calls that request a traversal scan this list, rather than walk // the AST, and perform type filtering using efficient bit sets. -// This representation is sometimes called a "balanced parenthesis tree". +// This representation is sometimes called a "balanced parenthesis tree." // // Experiments suggest the inspector's traversals are about 2.5x faster // than ast.Inspect, but it may take around 5 traversals for this diff --git a/gopls/internal/analysis/gofix/gofix.go b/gopls/internal/analysis/gofix/gofix.go index 333b9a690e7..bff4120a39a 100644 --- a/gopls/internal/analysis/gofix/gofix.go +++ b/gopls/internal/analysis/gofix/gofix.go @@ -386,13 +386,13 @@ func (a *analyzer) inlineAlias(tn *types.TypeName, curId cursor.Cursor) { // pkg.Id[T] // pkg.Id[K, V] var expr ast.Expr = id - if curId.ParentEdge() == edge.SelectorExpr_Sel { + if ek, _ := curId.ParentEdge(); ek == edge.SelectorExpr_Sel { curId = curId.Parent() expr = curId.Node().(ast.Expr) } // If expr is part of an IndexExpr or IndexListExpr, we'll need that node. // Given C[int], TypeOf(C) is generic but TypeOf(C[int]) is instantiated. - switch curId.ParentEdge() { + switch ek, _ := curId.ParentEdge(); ek { case edge.IndexExpr_X: expr = curId.Parent().Node().(*ast.IndexExpr) case edge.IndexListExpr_X: @@ -548,7 +548,7 @@ func (a *analyzer) inlineConst(con *types.Const, cur cursor.Cursor) { } // If n is qualified by a package identifier, we'll need the full selector expression. var expr ast.Expr = n - if cur.ParentEdge() == edge.SelectorExpr_Sel { + if ek, _ := cur.ParentEdge(); ek == edge.SelectorExpr_Sel { expr = cur.Parent().Node().(ast.Expr) } a.reportInline("constant", "Constant", expr, edits, importPrefix+incon.RHSName) diff --git a/gopls/internal/analysis/modernize/rangeint.go b/gopls/internal/analysis/modernize/rangeint.go index 655f5b1c6bf..3d3b33f4a97 100644 --- a/gopls/internal/analysis/modernize/rangeint.go +++ b/gopls/internal/analysis/modernize/rangeint.go @@ -215,10 +215,10 @@ func isScalarLvalue(curId cursor.Cursor) bool { cur := curId // Strip enclosing parens. - ek := cur.ParentEdge() + ek, _ := cur.ParentEdge() for ek == edge.ParenExpr_X { cur = cur.Parent() - ek = cur.ParentEdge() + ek, _ = cur.ParentEdge() } switch ek { diff --git a/gopls/internal/analysis/modernize/stringsseq.go b/gopls/internal/analysis/modernize/stringsseq.go index 51d4053eeb5..a26b8da705c 100644 --- a/gopls/internal/analysis/modernize/stringsseq.go +++ b/gopls/internal/analysis/modernize/stringsseq.go @@ -55,7 +55,7 @@ func stringsseq(pass *analysis.Pass) { if !ok { if id, ok := rng.X.(*ast.Ident); ok { if v, ok := info.Uses[id].(*types.Var); ok { - if curRange.ParentEdge() == edge.BlockStmt_List && curRange.ParentIndex() > 0 { + if ek, idx := curRange.ParentEdge(); ek == edge.BlockStmt_List && idx > 0 { curPrev, _ := curRange.PrevSibling() if assign, ok := curPrev.Node().(*ast.AssignStmt); ok && assign.Tok == token.DEFINE && diff --git a/gopls/internal/analysis/modernize/testingcontext.go b/gopls/internal/analysis/modernize/testingcontext.go index ca4ba1397e3..05c0b811ab7 100644 --- a/gopls/internal/analysis/modernize/testingcontext.go +++ b/gopls/internal/analysis/modernize/testingcontext.go @@ -110,8 +110,7 @@ func testingContext(pass *analysis.Pass) { if curFunc, ok := enclosingFunc(cur); ok { switch n := curFunc.Node().(type) { case *ast.FuncLit: - if curFunc.ParentEdge() == edge.CallExpr_Args && - curFunc.ParentIndex() == 1 { + if ek, idx := curFunc.ParentEdge(); ek == edge.CallExpr_Args && idx == 1 { // Have: call(..., func(...) { ...context.WithCancel(...)... }) obj := typeutil.Callee(info, curFunc.Parent().Node().(*ast.CallExpr)) if (analysisinternal.IsMethodNamed(obj, "testing", "T", "Run") || diff --git a/gopls/internal/analysis/unusedparams/unusedparams.go b/gopls/internal/analysis/unusedparams/unusedparams.go index 9316e6bd5af..559b65d2bc2 100644 --- a/gopls/internal/analysis/unusedparams/unusedparams.go +++ b/gopls/internal/analysis/unusedparams/unusedparams.go @@ -202,8 +202,7 @@ func run(pass *analysis.Pass) (any, error) { case *ast.AssignStmt: // f = func() {...} // f := func() {...} - if c.ParentEdge() == edge.AssignStmt_Rhs { - idx := c.ParentIndex() + if ek, idx := c.ParentEdge(); ek == edge.AssignStmt_Rhs { // Inv: n == AssignStmt.Rhs[idx] if id, ok := parent.Lhs[idx].(*ast.Ident); ok { fn = pass.TypesInfo.ObjectOf(id) diff --git a/gopls/internal/golang/implementation.go b/gopls/internal/golang/implementation.go index 7c414dcdb8a..93ac8879550 100644 --- a/gopls/internal/golang/implementation.go +++ b/gopls/internal/golang/implementation.go @@ -880,7 +880,7 @@ func funcDefs(pkg *cache.Package, t types.Type) ([]protocol.Location, error) { // beneathFuncDef reports whether the specified FuncType cursor is a // child of Func{Decl,Lit}. func beneathFuncDef(cur cursor.Cursor) bool { - switch cur.ParentEdge() { + switch ek, _ := cur.ParentEdge(); ek { case edge.FuncDecl_Type, edge.FuncLit_Type: return true } diff --git a/internal/astutil/cursor/cursor.go b/internal/astutil/cursor/cursor.go index f6691ce0684..889733ed92f 100644 --- a/internal/astutil/cursor/cursor.go +++ b/internal/astutil/cursor/cursor.go @@ -210,37 +210,18 @@ func (c Cursor) Parent() Cursor { } // ParentEdge returns the identity of the field in the parent node -// that holds this cursor's node. +// that holds this cursor's node, and if it is a list, the index within it. // // For example, f(x, y) is a CallExpr whose three children are Idents. -// f has edge kind [edge.CallExpr_Fun] and x and y have kind -// [edge.CallExpr_Args]. +// f has edge kind [edge.CallExpr_Fun] and index -1. +// x and y have kind [edge.CallExpr_Args] and indices 0 and 1, respectively. // -// If called on a child of the Root node, it returns [edge.Invalid]. +// If called on a child of the Root node, it returns ([edge.Invalid], -1). // // ParentEdge must not be called on the Root node (whose [Cursor.Node] returns nil). -func (c Cursor) ParentEdge() edge.Kind { - k, _ := c.parentEdgeAndIndex() - return k -} - -// ParentIndex returns the slice index of this cursor's node within -// the field of the parent node that holds it. -// -// For example, f(x, y) is a CallExpr whose three children are Idents. -// x and y have indices 0 and 1, respectively; f has index -1. -// -// If called on a child of the Root node, it returns -1. -// -// ParentIndex must not be called on the Root node (whose [Cursor.Node] returns nil). -func (c Cursor) ParentIndex() int { - _, idx := c.parentEdgeAndIndex() - return idx -} - -func (c Cursor) parentEdgeAndIndex() (edge.Kind, int) { +func (c Cursor) ParentEdge() (edge.Kind, int) { if c.index < 0 { - panic("Cursor.Edge called on Root node") + panic("Cursor.ParentEdge called on Root node") } events := c.events() pop := events[c.index].index @@ -253,6 +234,8 @@ func (c Cursor) parentEdgeAndIndex() (edge.Kind, int) { // The indicated child node must exist. // // ChildAt must not be called on the Root node (whose [Cursor.Node] returns nil). +// +// Invariant: c.Parent().ChildAt(c.ParentEdge()) == c. func (c Cursor) ChildAt(k edge.Kind, idx int) Cursor { target := packEdgeKindAndIndex(k, idx) diff --git a/internal/astutil/cursor/cursor_test.go b/internal/astutil/cursor/cursor_test.go index 3fd80802c15..76e7232bc86 100644 --- a/internal/astutil/cursor/cursor_test.go +++ b/internal/astutil/cursor/cursor_test.go @@ -131,11 +131,8 @@ func g() { _ = curFunc.Node().(*ast.FuncDecl) // Check edge and index. - if k := curFunc.ParentEdge(); k != edge.File_Decls { - t.Errorf("%v.ParentEdge() = %v, want edge.File_Decls", curFunc, k) - } - if idx := curFunc.ParentIndex(); idx != nfuncs { - t.Errorf("%v.ParentIndex() = %d, want %d", curFunc, idx, nfuncs) + if k, idx := curFunc.ParentEdge(); k != edge.File_Decls || idx != nfuncs { + t.Errorf("%v.ParentEdge() = (%v, %d), want edge.File_Decls, %d", curFunc, k, idx, nfuncs) } nfuncs++ @@ -371,8 +368,7 @@ func TestCursor_Edge(t *testing.T) { var ( parent = cur.Parent() - e = cur.ParentEdge() - idx = cur.ParentIndex() + e, idx = cur.ParentEdge() ) // ast.File, child of root? @@ -395,7 +391,7 @@ func TestCursor_Edge(t *testing.T) { cur, netFset.Position(cur.Node().Pos()), e, idx, got, netFset.Position(got.Pos())) } - // Check c.Parent.ChildAt(c.ParentEdge, c.ParentIndex) == c. + // Check c.Parent.ChildAt(c.ParentEdge()) == c. if got := parent.ChildAt(e, idx); got != cur { t.Errorf("cur=%v@%s: cur.Parent().ChildAt(%v, %d) = %T@%s, want cur", cur, netFset.Position(cur.Node().Pos()), e, idx, got.Node(), netFset.Position(got.Node().Pos())) From 9abefc5f9e63648ff7076e2483dae802a82142cf Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Thu, 20 Mar 2025 01:47:36 -0600 Subject: [PATCH 128/270] gopls/internal/analysis/modernize: permit int/uint type variants in rangeint This CL tracks the todo and enhances the rangeint modernizer to provide fixes for all int/uint variants, such as 'for i := int32(0); ...'. It now supports all int/uint variants when defining a new index variable with a zero value, including: - int8, int16, int32, int64 - uint, uint8, uint16, uint32, uint64 In addition to the newly supported variants, this CL also handles zero float literals explicitly cast to integer/unsigned integer types, such as: - 'for i := int32(0.); ...' - 'for i := int32(.0); ...' Change-Id: I1058d78eb6d3cbd8318c8c7e0d6b951ef0a5648c Reviewed-on: https://go-review.googlesource.com/c/tools/+/659155 Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley Reviewed-by: Alan Donovan --- gopls/internal/analysis/modernize/bloop.go | 2 +- .../internal/analysis/modernize/modernize.go | 8 +-- gopls/internal/analysis/modernize/rangeint.go | 16 +++-- gopls/internal/analysis/modernize/slices.go | 2 +- .../testdata/src/rangeint/rangeint.go | 58 +++++++++++++++++++ .../testdata/src/rangeint/rangeint.go.golden | 58 +++++++++++++++++++ 6 files changed, 133 insertions(+), 11 deletions(-) diff --git a/gopls/internal/analysis/modernize/bloop.go b/gopls/internal/analysis/modernize/bloop.go index f851a6688e1..a70246b5e0e 100644 --- a/gopls/internal/analysis/modernize/bloop.go +++ b/gopls/internal/analysis/modernize/bloop.go @@ -101,7 +101,7 @@ func bloop(pass *analysis.Pass) { if assign, ok := n.Init.(*ast.AssignStmt); ok && assign.Tok == token.DEFINE && len(assign.Rhs) == 1 && - isZeroLiteral(assign.Rhs[0]) && + isZeroIntLiteral(info, assign.Rhs[0]) && is[*ast.IncDecStmt](n.Post) && n.Post.(*ast.IncDecStmt).Tok == token.INC && equalSyntax(n.Post.(*ast.IncDecStmt).X, assign.Lhs[0]) && diff --git a/gopls/internal/analysis/modernize/modernize.go b/gopls/internal/analysis/modernize/modernize.go index 75f5b4014b6..4c49f6d1ecf 100644 --- a/gopls/internal/analysis/modernize/modernize.go +++ b/gopls/internal/analysis/modernize/modernize.go @@ -7,6 +7,7 @@ package modernize import ( _ "embed" "go/ast" + "go/constant" "go/format" "go/token" "go/types" @@ -117,10 +118,9 @@ func formatExprs(fset *token.FileSet, exprs []ast.Expr) string { return buf.String() } -// isZeroLiteral reports whether e is the literal 0. -func isZeroLiteral(e ast.Expr) bool { - lit, ok := e.(*ast.BasicLit) - return ok && lit.Kind == token.INT && lit.Value == "0" +// isZeroIntLiteral reports whether e is an integer whose value is 0. +func isZeroIntLiteral(info *types.Info, e ast.Expr) bool { + return info.Types[e].Value == constant.MakeInt64(0) } // filesUsing returns a cursor for each *ast.File in the inspector diff --git a/gopls/internal/analysis/modernize/rangeint.go b/gopls/internal/analysis/modernize/rangeint.go index 3d3b33f4a97..2a500085e01 100644 --- a/gopls/internal/analysis/modernize/rangeint.go +++ b/gopls/internal/analysis/modernize/rangeint.go @@ -31,8 +31,6 @@ import ( // - The ':=' may be replaced by '='. // - The fix may remove "i :=" if it would become unused. // -// TODO(adonovan): permit variants such as "i := int64(0)". -// // Restrictions: // - The variable i must not be assigned or address-taken within the // loop, because a "for range int" loop does not respect assignments @@ -54,7 +52,7 @@ func rangeint(pass *analysis.Pass) { if init, ok := loop.Init.(*ast.AssignStmt); ok && isSimpleAssign(init) && is[*ast.Ident](init.Lhs[0]) && - isZeroLiteral(init.Rhs[0]) { + isZeroIntLiteral(info, init.Rhs[0]) { // Have: for i = 0; ... (or i := 0) index := init.Lhs[0].(*ast.Ident) @@ -145,11 +143,19 @@ func rangeint(pass *analysis.Pass) { // re-type check the expression to detect this case. var beforeLimit, afterLimit string if v := info.Types[limit].Value; v != nil { - beforeLimit, afterLimit = "int(", ")" + tVar := info.TypeOf(init.Rhs[0]) + + // TODO(adonovan): use a types.Qualifier that respects the existing + // imports of this file that are visible (not shadowed) at the current position, + // and adds new imports as needed, similar to analysisinternal.AddImport. + // (Unfortunately types.Qualifier doesn't provide the name of the package + // member to be qualified, a qualifier cannot perform the necessary shadowing + // check for dot-imported names.) + beforeLimit, afterLimit = fmt.Sprintf("%s(", types.TypeString(tVar, (*types.Package).Name)), ")" info2 := &types.Info{Types: make(map[ast.Expr]types.TypeAndValue)} if types.CheckExpr(pass.Fset, pass.Pkg, limit.Pos(), limit, info2) == nil { tLimit := types.Default(info2.TypeOf(limit)) - if types.AssignableTo(tLimit, types.Typ[types.Int]) { + if types.AssignableTo(tLimit, tVar) { beforeLimit, afterLimit = "", "" } } diff --git a/gopls/internal/analysis/modernize/slices.go b/gopls/internal/analysis/modernize/slices.go index 7e0d9cbd92e..22999b60cc5 100644 --- a/gopls/internal/analysis/modernize/slices.go +++ b/gopls/internal/analysis/modernize/slices.go @@ -216,7 +216,7 @@ func clippedSlice(info *types.Info, e ast.Expr) (res ast.Expr, empty bool) { // x[:0:0], x[:len(x):len(x)], x[:k:k] if e.Slice3 && e.High != nil && e.Max != nil && equalSyntax(e.High, e.Max) { // x[:k:k] res = e - empty = isZeroLiteral(e.High) // x[:0:0] + empty = isZeroIntLiteral(info, e.High) // x[:0:0] if call, ok := e.High.(*ast.CallExpr); ok && typeutil.Callee(info, call) == builtinLen && equalSyntax(call.Args[0], e.X) { diff --git a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go index 915f122b4fc..b2a7459e5a3 100644 --- a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go +++ b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go @@ -1,9 +1,51 @@ package rangeint +import ( + "os" + os1 "os" +) + func _(i int, s struct{ i int }, slice []int) { for i := 0; i < 10; i++ { // want "for loop can be modernized using range over int" println(i) } + for j := int(0); j < 10; j++ { // want "for loop can be modernized using range over int" + println(j) + } + for j := int8(0); j < 10; j++ { // want "for loop can be modernized using range over int" + println(j) + } + for j := int16(0); j < 10; j++ { // want "for loop can be modernized using range over int" + println(j) + } + for j := int32(0); j < 10; j++ { // want "for loop can be modernized using range over int" + println(j) + } + for j := int64(0); j < 10; j++ { // want "for loop can be modernized using range over int" + println(j) + } + for j := uint8(0); j < 10; j++ { // want "for loop can be modernized using range over int" + println(j) + } + for j := uint16(0); j < 10; j++ { // want "for loop can be modernized using range over int" + println(j) + } + for j := uint32(0); j < 10; j++ { // want "for loop can be modernized using range over int" + println(j) + } + for j := uint64(0); j < 10; j++ { // want "for loop can be modernized using range over int" + println(j) + } + for j := int8(0.); j < 10; j++ { // want "for loop can be modernized using range over int" + println(j) + } + for j := int8(.0); j < 10; j++ { // want "for loop can be modernized using range over int" + println(j) + } + for j := os.FileMode(0); j < 10; j++ { // want "for loop can be modernized using range over int" + println(j) + } + { var i int for i = 0; i < f(); i++ { // want "for loop can be modernized using range over int" @@ -21,6 +63,12 @@ func _(i int, s struct{ i int }, slice []int) { } // nope + for j := .0; j < 10; j++ { // nope: j is a float type + println(j) + } + for j := float64(0); j < 10; j++ { // nope: j is a float type + println(j) + } for i := 0; i < 10; { // nope: missing increment } for i := 0; i < 10; i-- { // nope: negative increment @@ -72,6 +120,10 @@ func issue71847d() { const limit = 1e3 // float for i := 0; i < limit; i++ { // want "for loop can be modernized using range over int" } + for i := int(0); i < limit; i++ { // want "for loop can be modernized using range over int" + } + for i := uint(0); i < limit; i++ { // want "for loop can be modernized using range over int" + } const limit2 = 1 + 0i // complex for i := 0; i < limit2; i++ { // want "for loop can be modernized using range over int" @@ -98,3 +150,9 @@ func issue72726() { for i = 0; i < arr[i]; i++ { // nope } } + +func todo() { + for j := os1.FileMode(0); j < 10; j++ { // want "for loop can be modernized using range over int" + println(j) + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden index bd76ce688bb..f323879e13f 100644 --- a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden @@ -1,9 +1,51 @@ package rangeint +import ( + "os" + os1 "os" +) + func _(i int, s struct{ i int }, slice []int) { for i := range 10 { // want "for loop can be modernized using range over int" println(i) } + for j := range 10 { // want "for loop can be modernized using range over int" + println(j) + } + for j := range int8(10) { // want "for loop can be modernized using range over int" + println(j) + } + for j := range int16(10) { // want "for loop can be modernized using range over int" + println(j) + } + for j := range int32(10) { // want "for loop can be modernized using range over int" + println(j) + } + for j := range int64(10) { // want "for loop can be modernized using range over int" + println(j) + } + for j := range uint8(10) { // want "for loop can be modernized using range over int" + println(j) + } + for j := range uint16(10) { // want "for loop can be modernized using range over int" + println(j) + } + for j := range uint32(10) { // want "for loop can be modernized using range over int" + println(j) + } + for j := range uint64(10) { // want "for loop can be modernized using range over int" + println(j) + } + for j := range int8(10) { // want "for loop can be modernized using range over int" + println(j) + } + for j := range int8(10) { // want "for loop can be modernized using range over int" + println(j) + } + for j := range os.FileMode(10) { // want "for loop can be modernized using range over int" + println(j) + } + { var i int for i = range f() { // want "for loop can be modernized using range over int" @@ -21,6 +63,12 @@ func _(i int, s struct{ i int }, slice []int) { } // nope + for j := .0; j < 10; j++ { // nope: j is a float type + println(j) + } + for j := float64(0); j < 10; j++ { // nope: j is a float type + println(j) + } for i := 0; i < 10; { // nope: missing increment } for i := 0; i < 10; i-- { // nope: negative increment @@ -72,6 +120,10 @@ func issue71847d() { const limit = 1e3 // float for range int(limit) { // want "for loop can be modernized using range over int" } + for range int(limit) { // want "for loop can be modernized using range over int" + } + for range uint(limit) { // want "for loop can be modernized using range over int" + } const limit2 = 1 + 0i // complex for range int(limit2) { // want "for loop can be modernized using range over int" @@ -98,3 +150,9 @@ func issue72726() { for i = 0; i < arr[i]; i++ { // nope } } + +func todo() { + for j := range os.FileMode(10) { // want "for loop can be modernized using range over int" + println(j) + } +} From 084551fb2c220b27775cbeb1a4b884dd29aac742 Mon Sep 17 00:00:00 2001 From: Kyle Weingartner Date: Fri, 21 Mar 2025 10:56:03 -0700 Subject: [PATCH 129/270] go/analysis/passes/maprange: check for redundant Keys/Values calls Add an analyzer for redundant use of the functions maps.Keys and maps.Values in "for" statements with "range" clauses. Updates golang/go#72908 Change-Id: I19589dc42fa9cc2465c6d5aa1542175af6aaa6ea Reviewed-on: https://go-review.googlesource.com/c/tools/+/658695 Auto-Submit: Alan Donovan Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- .../analysis/maprange/cmd/maprange/main.go | 14 ++ gopls/internal/analysis/maprange/doc.go | 37 ++++ gopls/internal/analysis/maprange/maprange.go | 155 +++++++++++++ .../analysis/maprange/maprange_test.go | 23 ++ .../analysis/maprange/testdata/basic.txtar | 209 ++++++++++++++++++ .../analysis/maprange/testdata/old.txtar | 62 ++++++ 6 files changed, 500 insertions(+) create mode 100644 gopls/internal/analysis/maprange/cmd/maprange/main.go create mode 100644 gopls/internal/analysis/maprange/doc.go create mode 100644 gopls/internal/analysis/maprange/maprange.go create mode 100644 gopls/internal/analysis/maprange/maprange_test.go create mode 100644 gopls/internal/analysis/maprange/testdata/basic.txtar create mode 100644 gopls/internal/analysis/maprange/testdata/old.txtar diff --git a/gopls/internal/analysis/maprange/cmd/maprange/main.go b/gopls/internal/analysis/maprange/cmd/maprange/main.go new file mode 100644 index 00000000000..ec1fd5ca93c --- /dev/null +++ b/gopls/internal/analysis/maprange/cmd/maprange/main.go @@ -0,0 +1,14 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// The maprange command applies the golang.org/x/tools/gopls/internal/analysis/maprange +// analysis to the specified packages of Go source code. +package main + +import ( + "golang.org/x/tools/go/analysis/singlechecker" + "golang.org/x/tools/gopls/internal/analysis/maprange" +) + +func main() { singlechecker.Main(maprange.Analyzer) } diff --git a/gopls/internal/analysis/maprange/doc.go b/gopls/internal/analysis/maprange/doc.go new file mode 100644 index 00000000000..46f465059a9 --- /dev/null +++ b/gopls/internal/analysis/maprange/doc.go @@ -0,0 +1,37 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package maprange defines an Analyzer that checks for redundant use +// of the functions maps.Keys and maps.Values in "for" statements with +// "range" clauses. +// +// # Analyzer maprange +// +// maprange: checks for unnecessary calls to maps.Keys and maps.Values in range statements +// +// Consider a loop written like this: +// +// for val := range maps.Values(m) { +// fmt.Println(val) +// } +// +// This should instead be written without the call to maps.Values: +// +// for _, val := range m { +// fmt.Println(val) +// } +// +// golang.org/x/exp/maps returns slices for Keys/Values instead of iterators, +// but unnecessary calls should similarly be removed: +// +// for _, key := range maps.Keys(m) { +// fmt.Println(key) +// } +// +// should be rewritten as: +// +// for key := range m { +// fmt.Println(key) +// } +package maprange diff --git a/gopls/internal/analysis/maprange/maprange.go b/gopls/internal/analysis/maprange/maprange.go new file mode 100644 index 00000000000..c3990f9ea75 --- /dev/null +++ b/gopls/internal/analysis/maprange/maprange.go @@ -0,0 +1,155 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package maprange + +import ( + _ "embed" + "fmt" + "go/ast" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/versions" +) + +//go:embed doc.go +var doc string + +var Analyzer = &analysis.Analyzer{ + Name: "maprange", + Doc: analysisinternal.MustExtractDoc(doc, "maprange"), + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/maprange", + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: run, +} + +// This is a variable because the package name is different in Google's code base. +var xmaps = "golang.org/x/exp/maps" + +func run(pass *analysis.Pass) (any, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + switch pass.Pkg.Path() { + case "maps", xmaps: + // These packages know how to use their own APIs. + return nil, nil + } + + if !(analysisinternal.Imports(pass.Pkg, "maps") || analysisinternal.Imports(pass.Pkg, xmaps)) { + return nil, nil // fast path + } + + inspect.Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, func(n ast.Node) { + rangeStmt, ok := n.(*ast.RangeStmt) + if !ok { + return + } + call, ok := rangeStmt.X.(*ast.CallExpr) + if !ok { + return + } + callee := typeutil.Callee(pass.TypesInfo, call) + if !analysisinternal.IsFunctionNamed(callee, "maps", "Keys", "Values") && + !analysisinternal.IsFunctionNamed(callee, xmaps, "Keys", "Values") { + return + } + version := pass.Pkg.GoVersion() + pkg, fn := callee.Pkg().Path(), callee.Name() + key, value := rangeStmt.Key, rangeStmt.Value + + edits := editRangeStmt(pass, version, pkg, fn, key, value, call) + if len(edits) > 0 { + pass.Report(analysis.Diagnostic{ + Pos: call.Pos(), + End: call.End(), + Message: fmt.Sprintf("unnecessary and inefficient call of %s.%s", pkg, fn), + SuggestedFixes: []analysis.SuggestedFix{{ + Message: fmt.Sprintf("Remove unnecessary call to %s.%s", pkg, fn), + TextEdits: edits, + }}, + }) + } + }) + + return nil, nil +} + +// editRangeStmt returns edits to transform a range statement that calls +// maps.Keys or maps.Values (either the stdlib or the x/exp/maps version). +// +// It reports a diagnostic if an edit cannot be made because the Go version is too old. +// +// It returns nil if no edits are needed. +func editRangeStmt(pass *analysis.Pass, version, pkg, fn string, key, value ast.Expr, call *ast.CallExpr) []analysis.TextEdit { + var edits []analysis.TextEdit + + // Check if the call to maps.Keys or maps.Values can be removed/replaced. + // Example: + // for range maps.Keys(m) + // ^^^^^^^^^ removeCall + // for i, _ := range maps.Keys(m) + // ^^^^^^^^^ replace with `len` + // + // If we have: for i, k := range maps.Keys(m) (only possible using x/exp/maps) + // or: for i, v = range maps.Values(m) + // do not remove the call. + removeCall := !isSet(key) || !isSet(value) + replace := "" + if pkg == xmaps && isSet(key) && value == nil { + // If we have: for i := range maps.Keys(m) (using x/exp/maps), + // Replace with: for i := range len(m) + replace = "len" + } + if removeCall { + edits = append(edits, analysis.TextEdit{ + Pos: call.Fun.Pos(), + End: call.Fun.End(), + NewText: []byte(replace)}) + } + // Check if the key of the range statement should be removed. + // Example: + // for _, k := range maps.Keys(m) + // ^^^ removeKey ^^^^^^^^^ removeCall + removeKey := pkg == xmaps && fn == "Keys" && !isSet(key) && isSet(value) + if removeKey { + edits = append(edits, analysis.TextEdit{ + Pos: key.Pos(), + End: value.Pos(), + }) + } + // Check if a key should be inserted to the range statement. + // Example: + // for _, v := range maps.Values(m) + // ^^^ addKey ^^^^^^^^^^^ removeCall + addKey := pkg == "maps" && fn == "Values" && isSet(key) + if addKey { + edits = append(edits, analysis.TextEdit{ + Pos: key.Pos(), + End: key.Pos(), + NewText: []byte("_, "), + }) + } + + // Range over int was added in Go 1.22. + // If the Go version is too old, report a diagnostic but do not make any edits. + if replace == "len" && versions.Before(pass.Pkg.GoVersion(), versions.Go1_22) { + pass.Report(analysis.Diagnostic{ + Pos: call.Pos(), + End: call.End(), + Message: fmt.Sprintf("likely incorrect use of %s.%s (returns a slice)", pkg, fn), + }) + return nil + } + + return edits +} + +// isSet reports whether an ast.Expr is a non-nil expression that is not the blank identifier. +func isSet(expr ast.Expr) bool { + ident, ok := expr.(*ast.Ident) + return expr != nil && (!ok || ident.Name != "_") +} diff --git a/gopls/internal/analysis/maprange/maprange_test.go b/gopls/internal/analysis/maprange/maprange_test.go new file mode 100644 index 00000000000..1759dc1db99 --- /dev/null +++ b/gopls/internal/analysis/maprange/maprange_test.go @@ -0,0 +1,23 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package maprange_test + +import ( + "golang.org/x/tools/go/analysis/analysistest" + "golang.org/x/tools/gopls/internal/analysis/maprange" + "golang.org/x/tools/internal/testfiles" + "path/filepath" + "testing" +) + +func TestBasic(t *testing.T) { + dir := testfiles.ExtractTxtarFileToTmp(t, filepath.Join(analysistest.TestData(), "basic.txtar")) + analysistest.RunWithSuggestedFixes(t, dir, maprange.Analyzer, "maprange") +} + +func TestOld(t *testing.T) { + dir := testfiles.ExtractTxtarFileToTmp(t, filepath.Join(analysistest.TestData(), "old.txtar")) + analysistest.RunWithSuggestedFixes(t, dir, maprange.Analyzer, "maprange") +} diff --git a/gopls/internal/analysis/maprange/testdata/basic.txtar b/gopls/internal/analysis/maprange/testdata/basic.txtar new file mode 100644 index 00000000000..1950e958218 --- /dev/null +++ b/gopls/internal/analysis/maprange/testdata/basic.txtar @@ -0,0 +1,209 @@ +Test of fixing redundant calls to maps.Keys and maps.Values +(both stdlib "maps" and "golang.org/x/exp/maps") for Go 1.24. + +-- go.mod -- +module maprange + +require golang.org/x/exp v0.0.0 + +replace golang.org/x/exp => ./exp + +go 1.24 + +-- basic.go -- +package basic + +import "maps" + +func _() { + m := make(map[int]int) + + for range maps.Keys(m) { // want `unnecessary and inefficient call of maps.Keys` + } + + for range maps.Values(m) { // want `unnecessary and inefficient call of maps.Values` + } + + var x struct { + Map map[int]int + } + x.Map = make(map[int]int) + for x.Map[1] = range maps.Keys(m) { // want `unnecessary and inefficient call of maps.Keys` + } + + for x.Map[2] = range maps.Values(m) { // want `unnecessary and inefficient call of maps.Values` + } + + for k := range maps.Keys(m) { // want `unnecessary and inefficient call of maps.Keys` + _ = k + } + + for v := range maps.Values(m) { // want `unnecessary and inefficient call of maps.Values` + _ = v + } + + for range maps.Keys(x.Map) { // want `unnecessary and inefficient call of maps.Keys` + } + + for /* comment */ k := range /* comment */ maps.Keys(/* comment */ m) { // want `unnecessary and inefficient call of maps.Keys` + _ = k + } +} + +-- basic.go.golden -- +package basic + +import "maps" + +func _() { + m := make(map[int]int) + + for range m { // want `unnecessary and inefficient call of maps.Keys` + } + + for range m { // want `unnecessary and inefficient call of maps.Values` + } + + var x struct { + Map map[int]int + } + x.Map = make(map[int]int) + for x.Map[1] = range m { // want `unnecessary and inefficient call of maps.Keys` + } + + for _, x.Map[2] = range m { // want `unnecessary and inefficient call of maps.Values` + } + + for k := range m { // want `unnecessary and inefficient call of maps.Keys` + _ = k + } + + for _, v := range m { // want `unnecessary and inefficient call of maps.Values` + _ = v + } + + for range x.Map { // want `unnecessary and inefficient call of maps.Keys` + } + + for /* comment */ k := range /* comment */ /* comment */ m { // want `unnecessary and inefficient call of maps.Keys` + _ = k + } +} + +-- xmaps.go -- +package basic + +import "golang.org/x/exp/maps" + +func _() { + m := make(map[int]int) + + for range maps.Keys(m) { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Keys` + } + + for range maps.Values(m) { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Values` + } + + for i := range maps.Values(m) { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Values` + _ = i + } + + var x struct { + Map map[int]int + } + x.Map = make(map[int]int) + for _, x.Map[1] = range maps.Keys(m) { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Keys` + } + + for _, x.Map[2] = range maps.Values(m) { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Values` + } + + for _, k := range maps.Keys(m) { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Keys` + _ = k + } + + for _, v := range maps.Values(m) { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Values` + _ = v + } + + for range maps.Keys(x.Map) { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Keys` + } + + for i, k := range maps.Keys(m) { // ok: this can't be straightforwardly rewritten + _, _ = i, k + } + + for _, _ = range maps.Values(m) { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Values` + } +} + +-- xmaps.go.golden -- +package basic + +import "golang.org/x/exp/maps" + +func _() { + m := make(map[int]int) + + for range m { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Keys` + } + + for range m { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Values` + } + + for i := range len(m) { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Values` + _ = i + } + + var x struct { + Map map[int]int + } + x.Map = make(map[int]int) + for x.Map[1] = range m { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Keys` + } + + for _, x.Map[2] = range m { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Values` + } + + for k := range m { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Keys` + _ = k + } + + for _, v := range m { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Values` + _ = v + } + + for range x.Map { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Keys` + } + + for i, k := range maps.Keys(m) { // ok: this can't be straightforwardly rewritten + _, _ = i, k + } + + for _, _ = range m { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Values` + } +} + +-- exp/go.mod -- +module golang.org/x/exp + +go 1.24 + +-- exp/maps/maps.go -- +package maps + +func Keys[M ~map[K]V, K comparable, V any](m M) []K { + r := make([]K, 0, len(m)) + for k := range m { + r = append(r, k) + } + return r +} + +func Values[M ~map[K]V, K comparable, V any](m M) []V { + r := make([]V, 0, len(m)) + for _, v := range m { + r = append(r, v) + } + return r +} \ No newline at end of file diff --git a/gopls/internal/analysis/maprange/testdata/old.txtar b/gopls/internal/analysis/maprange/testdata/old.txtar new file mode 100644 index 00000000000..d27ff8c2a22 --- /dev/null +++ b/gopls/internal/analysis/maprange/testdata/old.txtar @@ -0,0 +1,62 @@ +Test of fixing redundant calls to maps.Keys and maps.Values +(both stdlib "maps" and "golang.org/x/exp/maps") for Go 1.21, +before range over int made suggesting a fix for a rare case easier. + +-- go.mod -- +module maprange + +require golang.org/x/exp v0.0.0 + +replace golang.org/x/exp => ./exp + +go 1.21 + +-- old.go -- +package old + +import "golang.org/x/exp/maps" + +func _() { + m := make(map[int]int) + + for i := range maps.Keys(m) { // want `likely incorrect use of golang.org/x/exp/maps.Keys \(returns a slice\)` + _ = i + } +} + +-- old.go.golden -- +package old + +import "golang.org/x/exp/maps" + +func _() { + m := make(map[int]int) + + for i := range maps.Keys(m) { // want `likely incorrect use of golang.org/x/exp/maps.Keys \(returns a slice\)` + _ = i + } +} + +-- exp/go.mod -- +module golang.org/x/exp + +go 1.21 + +-- exp/maps/maps.go -- +package maps + +func Keys[M ~map[K]V, K comparable, V any](m M) []K { + r := make([]K, 0, len(m)) + for k := range m { + r = append(r, k) + } + return r +} + +func Values[M ~map[K]V, K comparable, V any](m M) []V { + r := make([]V, 0, len(m)) + for _, v := range m { + r = append(r, v) + } + return r +} \ No newline at end of file From 20f8890687341c0dc67b20e3df0ab48651dc3618 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Fri, 21 Mar 2025 14:27:10 -0400 Subject: [PATCH 130/270] internal/astutil/cursor: add Cursor.Contains(Cursor) bool The inspector representation gives us an extremely efficient O(1) check for containment. Change-Id: If40834922c8d1a8f6a847ea674f84d6ead6cb026 Reviewed-on: https://go-review.googlesource.com/c/tools/+/660015 LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley --- internal/astutil/cursor/cursor.go | 12 ++++++++++ internal/astutil/cursor/cursor_test.go | 33 ++++++++++++++++++++++++++ 2 files changed, 45 insertions(+) diff --git a/internal/astutil/cursor/cursor.go b/internal/astutil/cursor/cursor.go index 889733ed92f..144182f38cd 100644 --- a/internal/astutil/cursor/cursor.go +++ b/internal/astutil/cursor/cursor.go @@ -394,6 +394,18 @@ func (c Cursor) Children() iter.Seq[Cursor] { } } +// Contains reports whether c contains or is equal to c2. +// +// Both Cursors must belong to the same [Inspector]; +// neither may be its Root node. +func (c Cursor) Contains(c2 Cursor) bool { + if c.in != c2.in { + panic("different inspectors") + } + events := c.events() + return c.index <= c2.index && events[c2.index].index <= events[c.index].index +} + // FindNode returns the cursor for node n if it belongs to the subtree // rooted at c. It returns zero if n is not found. func (c Cursor) FindNode(n ast.Node) (Cursor, bool) { diff --git a/internal/astutil/cursor/cursor_test.go b/internal/astutil/cursor/cursor_test.go index 76e7232bc86..9f540ffdc76 100644 --- a/internal/astutil/cursor/cursor_test.go +++ b/internal/astutil/cursor/cursor_test.go @@ -415,6 +415,39 @@ func TestCursor_Edge(t *testing.T) { if cur.Parent().Child(cur.Node()) != cur { t.Errorf("Cursor.Parent.Child = %v, want %v", cur.Parent().Child(cur.Node()), cur) } + + // Check invariants of Contains: + + // A cursor contains itself. + if !cur.Contains(cur) { + t.Errorf("!cur.Contains(cur): %v", cur) + } + // A parent contains its child, but not the inverse. + if !parent.Contains(cur) { + t.Errorf("!cur.Parent().Contains(cur): %v", cur) + } + if cur.Contains(parent) { + t.Errorf("cur.Contains(cur.Parent()): %v", cur) + } + // A grandparent contains its grandchild, but not the inverse. + if grandparent := cur.Parent(); grandparent.Node() != nil { + if !grandparent.Contains(cur) { + t.Errorf("!cur.Parent().Parent().Contains(cur): %v", cur) + } + if cur.Contains(grandparent) { + t.Errorf("cur.Contains(cur.Parent().Parent()): %v", cur) + } + } + // A cursor and its uncle/aunt do not contain each other. + if uncle, ok := parent.NextSibling(); ok { + if uncle.Contains(cur) { + t.Errorf("cur.Parent().NextSibling().Contains(cur): %v", cur) + } + if cur.Contains(uncle) { + t.Errorf("cur.Contains(cur.Parent().NextSibling()): %v", cur) + } + } + } } From ec542a7d37be24d241637dd53fad3a4ee3617e7a Mon Sep 17 00:00:00 2001 From: Peter Weinberger Date: Fri, 21 Mar 2025 10:50:53 -0400 Subject: [PATCH 131/270] gopls/internal/fuzzy: apply modernizers to the fuzzy matcher Uses of b.N changed to b.Loop, one use of min(...), and several three-clause for statements were change to range over ints. Change-Id: If50f1f19232751635f7bb0ec2c27f29b575a062a Reviewed-on: https://go-review.googlesource.com/c/tools/+/659935 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- gopls/internal/fuzzy/input.go | 6 +++--- gopls/internal/fuzzy/input_test.go | 2 +- gopls/internal/fuzzy/matcher.go | 13 +++++-------- gopls/internal/fuzzy/matcher_test.go | 3 +-- gopls/internal/fuzzy/self_test.go | 4 ++-- 5 files changed, 12 insertions(+), 16 deletions(-) diff --git a/gopls/internal/fuzzy/input.go b/gopls/internal/fuzzy/input.go index c1038163f1a..fd8575f6382 100644 --- a/gopls/internal/fuzzy/input.go +++ b/gopls/internal/fuzzy/input.go @@ -36,7 +36,7 @@ func RuneRoles(candidate []byte, reuse []RuneRole) []RuneRole { } prev, prev2 := rtNone, rtNone - for i := 0; i < len(candidate); i++ { + for i := range candidate { r := rune(candidate[i]) role := RNone @@ -122,7 +122,7 @@ func LastSegment(input string, roles []RuneRole) string { func fromChunks(chunks []string, buffer []byte) []byte { ii := 0 for _, chunk := range chunks { - for i := 0; i < len(chunk); i++ { + for i := range len(chunk) { if ii >= cap(buffer) { break } @@ -143,7 +143,7 @@ func toLower(input []byte, reuse []byte) []byte { output = make([]byte, len(input)) } - for i := 0; i < len(input); i++ { + for i := range input { r := rune(input[i]) if input[i] <= unicode.MaxASCII { if 'A' <= r && r <= 'Z' { diff --git a/gopls/internal/fuzzy/input_test.go b/gopls/internal/fuzzy/input_test.go index ffe147241b6..dd751b8f0c2 100644 --- a/gopls/internal/fuzzy/input_test.go +++ b/gopls/internal/fuzzy/input_test.go @@ -127,7 +127,7 @@ func BenchmarkRoles(b *testing.B) { str := "AbstractSWTFactory" out := make([]fuzzy.RuneRole, len(str)) - for i := 0; i < b.N; i++ { + for b.Loop() { fuzzy.RuneRoles([]byte(str), out) } b.SetBytes(int64(len(str))) diff --git a/gopls/internal/fuzzy/matcher.go b/gopls/internal/fuzzy/matcher.go index 29d1b36501e..eff86efac34 100644 --- a/gopls/internal/fuzzy/matcher.go +++ b/gopls/internal/fuzzy/matcher.go @@ -134,10 +134,7 @@ func (m *Matcher) ScoreChunks(chunks []string) float32 { if sc < 0 { sc = 0 } - normalizedScore := float32(sc) * m.scoreScale - if normalizedScore > 1 { - normalizedScore = 1 - } + normalizedScore := min(float32(sc)*m.scoreScale, 1) return normalizedScore } @@ -177,7 +174,7 @@ func (m *Matcher) MatchedRanges() []int { i-- } // Reverse slice. - for i := 0; i < len(ret)/2; i++ { + for i := range len(ret) / 2 { ret[i], ret[len(ret)-1-i] = ret[len(ret)-1-i], ret[i] } return ret @@ -211,7 +208,7 @@ func (m *Matcher) computeScore(candidate []byte, candidateLower []byte) int { m.scores[0][0][0] = score(0, 0) // Start with 0. segmentsLeft, lastSegStart := 1, 0 - for i := 0; i < candLen; i++ { + for i := range candLen { if m.roles[i] == RSep { segmentsLeft++ lastSegStart = i + 1 @@ -304,7 +301,7 @@ func (m *Matcher) computeScore(candidate []byte, candidateLower []byte) int { // Third dimension encodes whether there is a gap between the previous match and the current // one. - for k := 0; k < 2; k++ { + for k := range 2 { sc := m.scores[i-1][j-1][k].val() + charScore isConsecutive := k == 1 || i-1 == 0 || i-1 == lastSegStart @@ -342,7 +339,7 @@ func (m *Matcher) ScoreTable(candidate string) string { var line1, line2, separator bytes.Buffer line1.WriteString("\t") line2.WriteString("\t") - for j := 0; j < len(m.pattern); j++ { + for j := range len(m.pattern) { line1.WriteString(fmt.Sprintf("%c\t\t", m.pattern[j])) separator.WriteString("----------------") } diff --git a/gopls/internal/fuzzy/matcher_test.go b/gopls/internal/fuzzy/matcher_test.go index 056da25d675..f743be0c5ef 100644 --- a/gopls/internal/fuzzy/matcher_test.go +++ b/gopls/internal/fuzzy/matcher_test.go @@ -293,8 +293,7 @@ func BenchmarkMatcher(b *testing.B) { matcher := fuzzy.NewMatcher(pattern) - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { for _, c := range candidates { matcher.Score(c) } diff --git a/gopls/internal/fuzzy/self_test.go b/gopls/internal/fuzzy/self_test.go index 1c64f1953df..7cdb4fdef96 100644 --- a/gopls/internal/fuzzy/self_test.go +++ b/gopls/internal/fuzzy/self_test.go @@ -14,7 +14,7 @@ func BenchmarkSelf_Matcher(b *testing.B) { idents := collectIdentifiers(b) patterns := generatePatterns() - for i := 0; i < b.N; i++ { + for b.Loop() { for _, pattern := range patterns { sm := NewMatcher(pattern) for _, ident := range idents { @@ -28,7 +28,7 @@ func BenchmarkSelf_SymbolMatcher(b *testing.B) { idents := collectIdentifiers(b) patterns := generatePatterns() - for i := 0; i < b.N; i++ { + for b.Loop() { for _, pattern := range patterns { sm := NewSymbolMatcher(pattern) for _, ident := range idents { From bf12eb7e7db44e3f510e54843d4064c99b782068 Mon Sep 17 00:00:00 2001 From: Zamir Ashurbekov Date: Fri, 21 Mar 2025 19:31:29 +0000 Subject: [PATCH 132/270] gopls/internal/analysis/modernize: fix slicedelete triggers on slice identifiers with side effects Add a check that the expression defining the slice has no side effects to trigger slicedelete. This is a necessary condition to ensure that the change does not change the program behavior. Fixes golang/go#72955 Change-Id: Ic326baa37e0b621fa7ba204bbfeb61c3e7daea47 GitHub-Last-Rev: 54e9082718e3d24ee82c681c494035fdc0e4e177 GitHub-Pull-Request: golang/tools#567 Reviewed-on: https://go-review.googlesource.com/c/tools/+/659295 Reviewed-by: Cherry Mui Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan --- .../internal/analysis/modernize/modernize.go | 38 +++++++++++++++++ gopls/internal/analysis/modernize/slices.go | 3 ++ .../analysis/modernize/slicescontains.go | 3 ++ .../analysis/modernize/slicesdelete.go | 2 +- .../testdata/src/slicesdelete/slicesdelete.go | 8 ++++ .../src/slicesdelete/slicesdelete.go.golden | 42 +++++++++++-------- 6 files changed, 78 insertions(+), 18 deletions(-) diff --git a/gopls/internal/analysis/modernize/modernize.go b/gopls/internal/analysis/modernize/modernize.go index 4c49f6d1ecf..16fea0d8896 100644 --- a/gopls/internal/analysis/modernize/modernize.go +++ b/gopls/internal/analysis/modernize/modernize.go @@ -187,3 +187,41 @@ func enabledCategory(filter, category string) bool { } return exclude } + +// noEffects reports whether the expression has no side effects, i.e., it +// does not modify the memory state. This function is conservative: it may +// return false even when the expression has no effect. +func noEffects(info *types.Info, expr ast.Expr) bool { + noEffects := true + ast.Inspect(expr, func(n ast.Node) bool { + switch v := n.(type) { + case nil, *ast.Ident, *ast.BasicLit, *ast.BinaryExpr, *ast.ParenExpr, + *ast.SelectorExpr, *ast.IndexExpr, *ast.SliceExpr, *ast.TypeAssertExpr, + *ast.StarExpr, *ast.CompositeLit, *ast.ArrayType, *ast.StructType, + *ast.MapType, *ast.InterfaceType, *ast.KeyValueExpr: + // No effect + case *ast.UnaryExpr: + // Channel send <-ch has effects + if v.Op == token.ARROW { + noEffects = false + } + case *ast.CallExpr: + // Type conversion has no effects + if !info.Types[v].IsType() { + // TODO(adonovan): Add a case for built-in functions without side + // effects (by using callsPureBuiltin from tools/internal/refactor/inline) + + noEffects = false + } + case *ast.FuncLit: + // A FuncLit has no effects, but do not descend into it. + return false + default: + // All other expressions have effects + noEffects = false + } + + return noEffects + }) + return noEffects +} diff --git a/gopls/internal/analysis/modernize/slices.go b/gopls/internal/analysis/modernize/slices.go index 22999b60cc5..18e02d51ebf 100644 --- a/gopls/internal/analysis/modernize/slices.go +++ b/gopls/internal/analysis/modernize/slices.go @@ -210,6 +210,9 @@ func appendclipped(pass *analysis.Pass) { // x[:len(x):len(x)] (nonempty) res=x // x[:k:k] (nonempty) // slices.Clip(x) (nonempty) res=x +// +// TODO(adonovan): Add a check that the expression x has no side effects in +// case x[:len(x):len(x)] -> x. Now the program behavior may change. func clippedSlice(info *types.Info, e ast.Expr) (res ast.Expr, empty bool) { switch e := e.(type) { case *ast.SliceExpr: diff --git a/gopls/internal/analysis/modernize/slicescontains.go b/gopls/internal/analysis/modernize/slicescontains.go index b59ea452a0f..589efe7ffc8 100644 --- a/gopls/internal/analysis/modernize/slicescontains.go +++ b/gopls/internal/analysis/modernize/slicescontains.go @@ -46,6 +46,9 @@ import ( // It may change cardinality of effects of the "needle" expression. // (Mostly this appears to be a desirable optimization, avoiding // redundantly repeated evaluation.) +// +// TODO(adonovan): Add a check that needle/predicate expression from +// if-statement has no effects. Now the program behavior may change. func slicescontains(pass *analysis.Pass) { // Skip the analyzer in packages where its // fixes would create an import cycle. diff --git a/gopls/internal/analysis/modernize/slicesdelete.go b/gopls/internal/analysis/modernize/slicesdelete.go index 3c3d880f62b..493009c35be 100644 --- a/gopls/internal/analysis/modernize/slicesdelete.go +++ b/gopls/internal/analysis/modernize/slicesdelete.go @@ -94,7 +94,7 @@ func slicesdelete(pass *analysis.Pass) { slice2, ok2 := call.Args[1].(*ast.SliceExpr) if ok1 && slice1.Low == nil && !slice1.Slice3 && ok2 && slice2.High == nil && !slice2.Slice3 && - equalSyntax(slice1.X, slice2.X) && + equalSyntax(slice1.X, slice2.X) && noEffects(info, slice1.X) && increasingSliceIndices(info, slice1.High, slice2.Low) { // Have append(s[:a], s[b:]...) where we can verify a < b. report(file, call, slice1, slice2) diff --git a/gopls/internal/analysis/modernize/testdata/src/slicesdelete/slicesdelete.go b/gopls/internal/analysis/modernize/testdata/src/slicesdelete/slicesdelete.go index a710d06f2fe..0ee608d8f9f 100644 --- a/gopls/internal/analysis/modernize/testdata/src/slicesdelete/slicesdelete.go +++ b/gopls/internal/analysis/modernize/testdata/src/slicesdelete/slicesdelete.go @@ -2,6 +2,10 @@ package slicesdelete var g struct{ f []int } +func h() []int { return []int{} } + +var ch chan []int + func slicesdelete(test, other []byte, i int) { const k = 1 _ = append(test[:i], test[i+1:]...) // want "Replace append with slices.Delete" @@ -26,6 +30,10 @@ func slicesdelete(test, other []byte, i int) { _ = append(g.f[:i], g.f[i+k:]...) // want "Replace append with slices.Delete" + _ = append(h()[:i], h()[i+1:]...) // potentially has side effects + + _ = append((<-ch)[:i], (<-ch)[i+1:]...) // has side effects + _ = append(test[:3], test[i+1:]...) // cannot verify a < b _ = append(test[:i-4], test[i-1:]...) // want "Replace append with slices.Delete" diff --git a/gopls/internal/analysis/modernize/testdata/src/slicesdelete/slicesdelete.go.golden b/gopls/internal/analysis/modernize/testdata/src/slicesdelete/slicesdelete.go.golden index 2d9447af3a3..a15eb07dee9 100644 --- a/gopls/internal/analysis/modernize/testdata/src/slicesdelete/slicesdelete.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/slicesdelete/slicesdelete.go.golden @@ -4,35 +4,43 @@ import "slices" var g struct{ f []int } +func h() []int { return []int{} } + +var ch chan []int + func slicesdelete(test, other []byte, i int) { - const k = 1 - _ = slices.Delete(test, i, i+1) // want "Replace append with slices.Delete" + const k = 1 + _ = slices.Delete(test, i, i+1) // want "Replace append with slices.Delete" + + _ = slices.Delete(test, i+1, i+2) // want "Replace append with slices.Delete" + + _ = append(test[:i+1], test[i+1:]...) // not deleting any slice elements - _ = slices.Delete(test, i+1, i+2) // want "Replace append with slices.Delete" + _ = append(test[:i], test[i-1:]...) // not deleting any slice elements - _ = append(test[:i+1], test[i+1:]...) // not deleting any slice elements + _ = slices.Delete(test, i-1, i) // want "Replace append with slices.Delete" - _ = append(test[:i], test[i-1:]...) // not deleting any slice elements + _ = slices.Delete(test, i-2, i+1) // want "Replace append with slices.Delete" - _ = slices.Delete(test, i-1, i) // want "Replace append with slices.Delete" + _ = append(test[:i-2], other[i+1:]...) // different slices "test" and "other" - _ = slices.Delete(test, i-2, i+1) // want "Replace append with slices.Delete" + _ = append(test[:i-2], other[i+1+k:]...) // cannot verify a < b - _ = append(test[:i-2], other[i+1:]...) // different slices "test" and "other" + _ = append(test[:i-2], test[11:]...) // cannot verify a < b - _ = append(test[:i-2], other[i+1+k:]...) // cannot verify a < b + _ = slices.Delete(test, 1, 3) // want "Replace append with slices.Delete" - _ = append(test[:i-2], test[11:]...) // cannot verify a < b + _ = slices.Delete(g.f, i, i+k) // want "Replace append with slices.Delete" - _ = slices.Delete(test, 1, 3) // want "Replace append with slices.Delete" + _ = append(h()[:i], h()[i+1:]...) // potentially has side effects - _ = slices.Delete(g.f, i, i+k) // want "Replace append with slices.Delete" + _ = append((<-ch)[:i], (<-ch)[i+1:]...) // has side effects - _ = append(test[:3], test[i+1:]...) // cannot verify a < b + _ = append(test[:3], test[i+1:]...) // cannot verify a < b - _ = slices.Delete(test, i-4, i-1) // want "Replace append with slices.Delete" + _ = slices.Delete(test, i-4, i-1) // want "Replace append with slices.Delete" - _ = slices.Delete(test, 1+2, 3+4) // want "Replace append with slices.Delete" + _ = slices.Delete(test, 1+2, 3+4) // want "Replace append with slices.Delete" - _ = append(test[:1+2], test[i-1:]...) // cannot verify a < b -} \ No newline at end of file + _ = append(test[:1+2], test[i-1:]...) // cannot verify a < b +} From 961631ad41f23a4c926e37b7cf89b64351a01ce7 Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Mon, 24 Mar 2025 19:07:31 +0800 Subject: [PATCH 133/270] internal/testfiles: replace outdated function with os.CopyFS Change-Id: I3e8ccfa7e529a8e0c7469fde580edb02035cbfb9 Reviewed-on: https://go-review.googlesource.com/c/tools/+/660335 LUCI-TryBot-Result: Go LUCI Reviewed-by: Dmitri Shuralyov Reviewed-by: Alan Donovan Auto-Submit: Alan Donovan --- internal/testfiles/testfiles.go | 30 +----------------------------- 1 file changed, 1 insertion(+), 29 deletions(-) diff --git a/internal/testfiles/testfiles.go b/internal/testfiles/testfiles.go index 78733976b3b..dee63c1c2f0 100644 --- a/internal/testfiles/testfiles.go +++ b/internal/testfiles/testfiles.go @@ -7,7 +7,6 @@ package testfiles import ( - "io" "io/fs" "os" "path/filepath" @@ -46,7 +45,7 @@ import ( func CopyToTmp(t testing.TB, src fs.FS, rename ...string) string { dstdir := t.TempDir() - if err := copyFS(dstdir, src); err != nil { + if err := os.CopyFS(dstdir, src); err != nil { t.Fatal(err) } for _, r := range rename { @@ -64,33 +63,6 @@ func CopyToTmp(t testing.TB, src fs.FS, rename ...string) string { return dstdir } -// Copy the files in src to dst. -// Use os.CopyFS when 1.23 can be used in x/tools. -func copyFS(dstdir string, src fs.FS) error { - return fs.WalkDir(src, ".", func(path string, d fs.DirEntry, err error) error { - if err != nil { - return err - } - newpath := filepath.Join(dstdir, path) - if d.IsDir() { - return os.MkdirAll(newpath, 0777) - } - r, err := src.Open(path) - if err != nil { - return err - } - defer r.Close() - - w, err := os.Create(newpath) - if err != nil { - return err - } - defer w.Close() - _, err = io.Copy(w, r) - return err - }) -} - // ExtractTxtarFileToTmp read a txtar archive on a given path, // extracts it to a temporary directory, and returns the // temporary directory. From baedf716f743020c064dcf41a2251d633c42c826 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Wed, 12 Mar 2025 09:15:19 -0400 Subject: [PATCH 134/270] gopls/internal/golang: unify tracks type params Enhance unify to take type params into account. Unify(x, y) will return false if there is no assignment to the type parameters of x and y that will make x identical to y. (Except that type param constraints and interface literals are not handled; unify "fails open" for these, returning true when the right answer might be false. That is the same behavior as previously.) The API supports initial bindings for type params, in order to handle types like C in instantiations of F: func F[T any]() { type C *T } The implementation matches that in internal/util/fingerprint.go, except that it works on actual types.Type values instead of the reconstituted fingerprint types. This CL tests the return value when there are no initial bindings. Subsequent CLs will test the final values of bindings, with and without initial values. Change-Id: I2402f818b86a5ccac874491e0801bb503b449cd6 Reviewed-on: https://go-review.googlesource.com/c/tools/+/657076 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- gopls/internal/golang/implementation.go | 379 +++++++++++++----- gopls/internal/golang/implementation_test.go | 140 +++++++ .../internal/util/fingerprint/fingerprint.go | 2 +- .../util/fingerprint/fingerprint_test.go | 2 +- gopls/internal/util/moreiters/iters.go | 21 + 5 files changed, 440 insertions(+), 104 deletions(-) create mode 100644 gopls/internal/golang/implementation_test.go diff --git a/gopls/internal/golang/implementation.go b/gopls/internal/golang/implementation.go index 93ac8879550..53fde6c147b 100644 --- a/gopls/internal/golang/implementation.go +++ b/gopls/internal/golang/implementation.go @@ -11,6 +11,7 @@ import ( "go/ast" "go/token" "go/types" + "iter" "reflect" "slices" "sort" @@ -26,6 +27,7 @@ import ( "golang.org/x/tools/gopls/internal/file" "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/moreiters" "golang.org/x/tools/gopls/internal/util/safetoken" "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/astutil/edge" @@ -497,133 +499,306 @@ func concreteImplementsIntf(msets *typeutil.MethodSetCache, x, y types.Type) boo if !ok { return false // x lacks a method of y } - if !unify(xm.Signature(), ym.Signature()) { + if !unify(xm.Signature(), ym.Signature(), nil) { return false // signatures do not match } } return true // all methods found } -// unify reports whether the types of x and y match, allowing free -// type parameters to stand for anything at all, without regard to -// consistency of substitutions. +// unify reports whether the types of x and y match. // -// TODO(adonovan): implement proper unification (#63982), finding the -// most general unifier across all the interface methods. +// If unifier is nil, unify reports only whether it succeeded. +// If unifier is non-nil, it is populated with the values +// of type parameters determined during a successful unification. +// If unification succeeds without binding a type parameter, that parameter +// will not be present in the map. // -// See also: unify in cache/methodsets/fingerprint, which uses a -// similar ersatz unification approach on type fingerprints, for -// the global index. -func unify(x, y types.Type) bool { - x = types.Unalias(x) - y = types.Unalias(y) - - // For now, allow a type parameter to match anything, - // without regard to consistency of substitutions. - if is[*types.TypeParam](x) || is[*types.TypeParam](y) { - return true +// On entry, the unifier's contents are treated as the values of already-bound type +// parameters, constraining the unification. +// +// For example, if unifier is an empty (not nil) map on entry, then the types +// +// func[T any](T, int) +// +// and +// +// func[U any](bool, U) +// +// will unify, with T=bool and U=int. +// That is, the contents of unifier after unify returns will be +// +// {T: bool, U: int} +// +// where "T" is the type parameter T and "bool" is the basic type for bool. +// +// But if unifier is {T: int} is int on entry, then unification will fail, because T +// does not unify with bool. +// +// See also: unify in cache/methodsets/fingerprint, which implements +// unification for type fingerprints, for the global index. +// +// BUG: literal interfaces are not handled properly. But this function is currently +// used only for signatures, where such types are very rare. +func unify(x, y types.Type, unifier map[*types.TypeParam]types.Type) bool { + // bindings[tp] is the binding for type parameter tp. + // Although type parameters are nominally bound to types, each bindings[tp] + // is a pointer to a type, so unbound variables that unify can share a binding. + bindings := map[*types.TypeParam]*types.Type{} + + // Bindings is initialized with pointers to the provided types. + for tp, t := range unifier { + bindings[tp] = &t } - if reflect.TypeOf(x) != reflect.TypeOf(y) { - return false // mismatched types - } - - switch x := x.(type) { - case *types.Array: - y := y.(*types.Array) - return x.Len() == y.Len() && - unify(x.Elem(), y.Elem()) - - case *types.Basic: - y := y.(*types.Basic) - return x.Kind() == y.Kind() - - case *types.Chan: - y := y.(*types.Chan) - return x.Dir() == y.Dir() && - unify(x.Elem(), y.Elem()) - - case *types.Interface: - y := y.(*types.Interface) - // TODO(adonovan): fix: for correctness, we must check - // that both interfaces have the same set of methods - // modulo type parameters, while avoiding the risk of - // unbounded interface recursion. - // - // Since non-empty interface literals are vanishingly - // rare in methods signatures, we ignore this for now. - // If more precision is needed we could compare method - // names and arities, still without full recursion. - return x.NumMethods() == y.NumMethods() - - case *types.Map: - y := y.(*types.Map) - return unify(x.Key(), y.Key()) && - unify(x.Elem(), y.Elem()) - - case *types.Named: - y := y.(*types.Named) - if x.Origin() != y.Origin() { - return false // different named types + // bindingFor returns the *types.Type in bindings for tp if tp is not nil, + // creating one if needed. + bindingFor := func(tp *types.TypeParam) *types.Type { + if tp == nil { + return nil } - xtargs := x.TypeArgs() - ytargs := y.TypeArgs() - if xtargs.Len() != ytargs.Len() { - return false // arity error (ill-typed) + b := bindings[tp] + if b == nil { + b = new(types.Type) + bindings[tp] = b } - for i := range xtargs.Len() { - if !unify(xtargs.At(i), ytargs.At(i)) { - return false // mismatched type args + return b + } + + // bind sets b to t if b does not occur in t. + bind := func(b *types.Type, t types.Type) bool { + for tp := range typeParams(t) { + if b == bindings[tp] { + return false // failed "occurs" check } } + *b = t return true + } - case *types.Pointer: - y := y.(*types.Pointer) - return unify(x.Elem(), y.Elem()) - - case *types.Signature: - y := y.(*types.Signature) - return x.Variadic() == y.Variadic() && - unify(x.Params(), y.Params()) && - unify(x.Results(), y.Results()) + // uni performs the actual unification. + var uni func(x, y types.Type) bool + uni = func(x, y types.Type) bool { + x = types.Unalias(x) + y = types.Unalias(y) + + tpx, _ := x.(*types.TypeParam) + tpy, _ := y.(*types.TypeParam) + if tpx != nil || tpy != nil { + bx := bindingFor(tpx) + by := bindingFor(tpy) + + // If both args are type params and neither is bound, have them share a binding. + if bx != nil && by != nil && *bx == nil && *by == nil { + // Arbitrarily give y's binding to x. + bindings[tpx] = by + return true + } + // Treat param bindings like original args in what follows. + if bx != nil && *bx != nil { + x = *bx + } + if by != nil && *by != nil { + y = *by + } + // If the x param is unbound, bind it to y. + if bx != nil && *bx == nil { + return bind(bx, y) + } + // If the y param is unbound, bind it to x. + if by != nil && *by == nil { + return bind(by, x) + } + // Unify the binding of a bound parameter. + return uni(x, y) + } - case *types.Slice: - y := y.(*types.Slice) - return unify(x.Elem(), y.Elem()) + // Neither arg is a type param. - case *types.Struct: - y := y.(*types.Struct) - if x.NumFields() != y.NumFields() { - return false + if reflect.TypeOf(x) != reflect.TypeOf(y) { + return false // mismatched types } - for i := range x.NumFields() { - xf := x.Field(i) - yf := y.Field(i) - if xf.Embedded() != yf.Embedded() || - xf.Name() != yf.Name() || - x.Tag(i) != y.Tag(i) || - !xf.Exported() && xf.Pkg() != yf.Pkg() || - !unify(xf.Type(), yf.Type()) { + + switch x := x.(type) { + case *types.Array: + y := y.(*types.Array) + return x.Len() == y.Len() && + uni(x.Elem(), y.Elem()) + + case *types.Basic: + y := y.(*types.Basic) + return x.Kind() == y.Kind() + + case *types.Chan: + y := y.(*types.Chan) + return x.Dir() == y.Dir() && + uni(x.Elem(), y.Elem()) + + case *types.Interface: + y := y.(*types.Interface) + // TODO(adonovan,jba): fix: for correctness, we must check + // that both interfaces have the same set of methods + // modulo type parameters, while avoiding the risk of + // unbounded interface recursion. + // + // Since non-empty interface literals are vanishingly + // rare in methods signatures, we ignore this for now. + // If more precision is needed we could compare method + // names and arities, still without full recursion. + return x.NumMethods() == y.NumMethods() + + case *types.Map: + y := y.(*types.Map) + return uni(x.Key(), y.Key()) && + uni(x.Elem(), y.Elem()) + + case *types.Named: + y := y.(*types.Named) + if x.Origin() != y.Origin() { + return false // different named types + } + xtargs := x.TypeArgs() + ytargs := y.TypeArgs() + if xtargs.Len() != ytargs.Len() { + return false // arity error (ill-typed) + } + for i := range xtargs.Len() { + if !uni(xtargs.At(i), ytargs.At(i)) { + return false // mismatched type args + } + } + return true + + case *types.Pointer: + y := y.(*types.Pointer) + return uni(x.Elem(), y.Elem()) + + case *types.Signature: + y := y.(*types.Signature) + return x.Variadic() == y.Variadic() && + uni(x.Params(), y.Params()) && + uni(x.Results(), y.Results()) + + case *types.Slice: + y := y.(*types.Slice) + return uni(x.Elem(), y.Elem()) + + case *types.Struct: + y := y.(*types.Struct) + if x.NumFields() != y.NumFields() { return false } + for i := range x.NumFields() { + xf := x.Field(i) + yf := y.Field(i) + if xf.Embedded() != yf.Embedded() || + xf.Name() != yf.Name() || + x.Tag(i) != y.Tag(i) || + !xf.Exported() && xf.Pkg() != yf.Pkg() || + !uni(xf.Type(), yf.Type()) { + return false + } + } + return true + + case *types.Tuple: + y := y.(*types.Tuple) + if x.Len() != y.Len() { + return false + } + for i := range x.Len() { + if !uni(x.At(i).Type(), y.At(i).Type()) { + return false + } + } + return true + + default: // incl. *Union, *TypeParam + panic(fmt.Sprintf("unexpected Type %#v", x)) } - return true + } - case *types.Tuple: - y := y.(*types.Tuple) - if x.Len() != y.Len() { - return false + if !uni(x, y) { + return false + } + + // Populate the input map with the resulting types. + if unifier != nil { + for tparam, tptr := range bindings { + unifier[tparam] = *tptr } - for i := range x.Len() { - if !unify(x.At(i).Type(), y.At(i).Type()) { - return false + } + return true +} + +// typeParams yields all the free type parameters within t that are relevant for +// unification. +func typeParams(t types.Type) iter.Seq[*types.TypeParam] { + + return func(yield func(*types.TypeParam) bool) { + seen := map[*types.TypeParam]bool{} // yield each type param only once + + // tps(t) yields each TypeParam in t and returns false to stop. + var tps func(types.Type) bool + tps = func(t types.Type) bool { + t = types.Unalias(t) + + switch t := t.(type) { + case *types.TypeParam: + if seen[t] { + return true + } + seen[t] = true + return yield(t) + + case *types.Basic: + return true + + case *types.Array: + return tps(t.Elem()) + + case *types.Chan: + return tps(t.Elem()) + + case *types.Interface: + // TODO(jba): implement. + return true + + case *types.Map: + return tps(t.Key()) && tps(t.Elem()) + + case *types.Named: + if t.Origin() == t { + // generic type: look at type params + return moreiters.Every(t.TypeParams().TypeParams(), + func(tp *types.TypeParam) bool { return tps(tp) }) + } + // instantiated type: look at type args + return moreiters.Every(t.TypeArgs().Types(), tps) + + case *types.Pointer: + return tps(t.Elem()) + + case *types.Signature: + return tps(t.Params()) && tps(t.Results()) + + case *types.Slice: + return tps(t.Elem()) + + case *types.Struct: + return moreiters.Every(t.Fields(), + func(v *types.Var) bool { return tps(v.Type()) }) + + case *types.Tuple: + return moreiters.Every(t.Variables(), + func(v *types.Var) bool { return tps(v.Type()) }) + + default: // incl. *Union + panic(fmt.Sprintf("unexpected Type %#v", t)) } } - return true - default: // incl. *Union, *TypeParam - panic(fmt.Sprintf("unexpected Type %#v", x)) + tps(t) } } @@ -822,7 +997,7 @@ func funcUses(pkg *cache.Package, t types.Type) ([]protocol.Location, error) { if ftyp == nil { continue // missing type information } - if unify(t, ftyp) { + if unify(t, ftyp, nil) { loc, err := pgf.PosLocation(pos, end) if err != nil { return nil, err @@ -856,7 +1031,7 @@ func funcDefs(pkg *cache.Package, t types.Type) ([]protocol.Location, error) { if ftyp == nil { continue // missing type information } - if unify(t, ftyp) { + if unify(t, ftyp, nil) { pos := fn.Pos() loc, err := pgf.PosLocation(pos, pos+token.Pos(len("func"))) if err != nil { diff --git a/gopls/internal/golang/implementation_test.go b/gopls/internal/golang/implementation_test.go new file mode 100644 index 00000000000..08b1d281204 --- /dev/null +++ b/gopls/internal/golang/implementation_test.go @@ -0,0 +1,140 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +import ( + "go/types" + "testing" + + "golang.org/x/tools/internal/testfiles" + "golang.org/x/tools/txtar" +) + +// TODO(jba): test unify with some params already bound. + +func TestUnifyEmptyInfo(t *testing.T) { + // Check unify with no initial bound type params. + // This is currently the only case in use. + // Test cases from TestMatches in gopls/internal/util/fingerprint/fingerprint_test.go. + const src = ` +-- go.mod -- +module example.com +go 1.24 + +-- a/a.go -- +package a + +type Int = int +type String = string + +// Eq.Equal matches casefold.Equal. +type Eq[T any] interface { Equal(T, T) bool } +type casefold struct{} +func (casefold) Equal(x, y string) bool + +// A matches AString. +type A[T any] = struct { x T } +type AString = struct { x string } + +// B matches anything! +type B[T any] = T + +func C1[T any](int, T, ...string) T { panic(0) } +func C2[U any](int, int, ...U) bool { panic(0) } +func C3(int, bool, ...string) rune +func C4(int, bool, ...string) +func C5(int, float64, bool, string) bool +func C6(int, bool, ...string) bool + +func DAny[T any](Named[T]) { panic(0) } +func DString(Named[string]) +func DInt(Named[int]) + +type Named[T any] struct { x T } + +func E1(byte) rune +func E2(uint8) int32 +func E3(int8) uint32 + +// generic vs. generic +func F1[T any](T) { panic(0) } +func F2[T any](*T) { panic(0) } +func F3[T any](T, T) { panic(0) } +func F4[U any](U, *U) {panic(0) } +func F5[T, U any](T, U, U) { panic(0) } +func F6[T any](T, int, T) { panic(0) } +func F7[T any](bool, T, T) { panic(0) } +func F8[V any](*V, int, int) { panic(0) } +func F9[V any](V, *V, V) { panic(0) } +` + pkg := testfiles.LoadPackages(t, txtar.Parse([]byte(src)), "./a")[0] + scope := pkg.Types.Scope() + for _, test := range []struct { + a, b string + method string // optional field or method + want bool + }{ + {"Eq", "casefold", "Equal", true}, + {"A", "AString", "", true}, + {"A", "Eq", "", false}, // completely unrelated + {"B", "String", "", true}, + {"B", "Int", "", true}, + {"B", "A", "", true}, + {"C1", "C2", "", false}, + {"C1", "C3", "", false}, + {"C1", "C4", "", false}, + {"C1", "C5", "", false}, + {"C1", "C6", "", true}, + {"C2", "C3", "", false}, + {"C2", "C4", "", false}, + {"C3", "C4", "", false}, + {"DAny", "DString", "", true}, + {"DAny", "DInt", "", true}, + {"DString", "DInt", "", false}, // different instantiations of Named + {"E1", "E2", "", true}, // byte and rune are just aliases + {"E2", "E3", "", false}, + // // The following tests cover all of the type param cases of unify. + {"F1", "F2", "", true}, // F1[*int] = F2[int] + {"F3", "F4", "", false}, // would require U identical to *U, prevented by occur check + {"F5", "F6", "", true}, // one param is bound, the other is not + {"F6", "F7", "", false}, // both are bound + {"F5", "F8", "", true}, // T=*int, U=int, V=int + {"F5", "F9", "", false}, // T is unbound, V is bound, and T occurs in V + } { + lookup := func(name string) types.Type { + obj := scope.Lookup(name) + if obj == nil { + t.Fatalf("Lookup %s failed", name) + } + if test.method != "" { + obj, _, _ = types.LookupFieldOrMethod(obj.Type(), true, pkg.Types, test.method) + if obj == nil { + t.Fatalf("Lookup %s.%s failed", name, test.method) + } + } + return obj.Type() + } + + check := func(sa, sb string, want bool) { + t.Helper() + + a := lookup(sa) + b := lookup(sb) + + got := unify(a, b, nil) + if got != want { + t.Errorf("a=%s b=%s method=%s: unify returned %t for these inputs:\n- %s\n- %s", + sa, sb, test.method, got, a, b) + } + } + + check(test.a, test.b, test.want) + // unify is symmetric + check(test.b, test.a, test.want) + // unify is reflexive + check(test.a, test.a, true) + check(test.b, test.b, true) + } +} diff --git a/gopls/internal/util/fingerprint/fingerprint.go b/gopls/internal/util/fingerprint/fingerprint.go index 22817e4cb2f..b279003d081 100644 --- a/gopls/internal/util/fingerprint/fingerprint.go +++ b/gopls/internal/util/fingerprint/fingerprint.go @@ -352,7 +352,7 @@ func unify(x, y sexpr) bool { if c, ok := x.(*cons); ok { return max(maxTypeParam(c.car), maxTypeParam(c.cdr)) } - return 0 + return -1 } // xBindings[i] is the binding for type parameter #i in x, and similarly for y. diff --git a/gopls/internal/util/fingerprint/fingerprint_test.go b/gopls/internal/util/fingerprint/fingerprint_test.go index 737c6896157..40ea2ede34e 100644 --- a/gopls/internal/util/fingerprint/fingerprint_test.go +++ b/gopls/internal/util/fingerprint/fingerprint_test.go @@ -120,7 +120,7 @@ func E3(int8) uint32 func F1[T any](T) { panic(0) } func F2[T any](*T) { panic(0) } func F3[T any](T, T) { panic(0) } -func F4[U any](U, *U) {panic(0) } +func F4[U any](U, *U) { panic(0) } func F5[T, U any](T, U, U) { panic(0) } func F6[T any](T, int, T) { panic(0) } func F7[T any](bool, T, T) { panic(0) } diff --git a/gopls/internal/util/moreiters/iters.go b/gopls/internal/util/moreiters/iters.go index d41cb1d3bca..69c76ccb9b6 100644 --- a/gopls/internal/util/moreiters/iters.go +++ b/gopls/internal/util/moreiters/iters.go @@ -24,3 +24,24 @@ func Contains[T comparable](seq iter.Seq[T], x T) bool { } return false } + +// Every reports whether every pred(t) for t in seq returns true, +// stopping at the first false element. +func Every[T any](seq iter.Seq[T], pred func(T) bool) bool { + for t := range seq { + if !pred(t) { + return false + } + } + return true +} + +// Any reports whether any pred(t) for t in seq returns true. +func Any[T any](seq iter.Seq[T], pred func(T) bool) bool { + for t := range seq { + if pred(t) { + return true + } + } + return false +} From 95701555d661b1971669aeb90053be92e2f885c8 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Thu, 13 Mar 2025 16:40:39 -0400 Subject: [PATCH 135/270] gopls/internal/golang: test unify result bindings Add checks to the test of unify that verify the type param bindings that it reports. Still to be done: tests of initial bindings. Change-Id: I8251220b5e849579cda719669ec7ca5626667ec1 Reviewed-on: https://go-review.googlesource.com/c/tools/+/657637 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- gopls/internal/golang/implementation.go | 7 + gopls/internal/golang/implementation_test.go | 194 +++++++++++++++---- 2 files changed, 158 insertions(+), 43 deletions(-) diff --git a/gopls/internal/golang/implementation.go b/gopls/internal/golang/implementation.go index 53fde6c147b..8453c571ba7 100644 --- a/gopls/internal/golang/implementation.go +++ b/gopls/internal/golang/implementation.go @@ -535,6 +535,13 @@ func concreteImplementsIntf(msets *typeutil.MethodSetCache, x, y types.Type) boo // But if unifier is {T: int} is int on entry, then unification will fail, because T // does not unify with bool. // +// Unify does not preserve aliases. For example, given the following: +// +// type String = string +// type A[T] = T +// +// unification succeeds with T bound to string, not String. +// // See also: unify in cache/methodsets/fingerprint, which implements // unification for type fingerprints, for the global index. // diff --git a/gopls/internal/golang/implementation_test.go b/gopls/internal/golang/implementation_test.go index 08b1d281204..80403443cd9 100644 --- a/gopls/internal/golang/implementation_test.go +++ b/gopls/internal/golang/implementation_test.go @@ -6,17 +6,14 @@ package golang import ( "go/types" + "maps" "testing" "golang.org/x/tools/internal/testfiles" "golang.org/x/tools/txtar" ) -// TODO(jba): test unify with some params already bound. - -func TestUnifyEmptyInfo(t *testing.T) { - // Check unify with no initial bound type params. - // This is currently the only case in use. +func TestUnify(t *testing.T) { // Test cases from TestMatches in gopls/internal/util/fingerprint/fingerprint_test.go. const src = ` -- go.mod -- @@ -69,39 +66,135 @@ func F7[T any](bool, T, T) { panic(0) } func F8[V any](*V, int, int) { panic(0) } func F9[V any](V, *V, V) { panic(0) } ` + type tmap = map[*types.TypeParam]types.Type + + var ( + boolType = types.Typ[types.Bool] + intType = types.Typ[types.Int] + stringType = types.Typ[types.String] + ) pkg := testfiles.LoadPackages(t, txtar.Parse([]byte(src)), "./a")[0] scope := pkg.Types.Scope() + + tparam := func(name string, index int) *types.TypeParam { + obj := scope.Lookup(name) + var tps *types.TypeParamList + switch obj := obj.(type) { + case *types.Func: + tps = obj.Signature().TypeParams() + case *types.TypeName: + if n, ok := obj.Type().(*types.Named); ok { + tps = n.TypeParams() + } else { + tps = obj.Type().(*types.Alias).TypeParams() + } + default: + t.Fatalf("unsupported object of type %T", obj) + } + return tps.At(index) + } + for _, test := range []struct { - a, b string - method string // optional field or method - want bool + x, y string // the symbols in the above source code whose types to unify + method string // optional field or method + params tmap // initial values of type params + want bool // success or failure + wantParams tmap // expected output }{ - {"Eq", "casefold", "Equal", true}, - {"A", "AString", "", true}, - {"A", "Eq", "", false}, // completely unrelated - {"B", "String", "", true}, - {"B", "Int", "", true}, - {"B", "A", "", true}, - {"C1", "C2", "", false}, - {"C1", "C3", "", false}, - {"C1", "C4", "", false}, - {"C1", "C5", "", false}, - {"C1", "C6", "", true}, - {"C2", "C3", "", false}, - {"C2", "C4", "", false}, - {"C3", "C4", "", false}, - {"DAny", "DString", "", true}, - {"DAny", "DInt", "", true}, - {"DString", "DInt", "", false}, // different instantiations of Named - {"E1", "E2", "", true}, // byte and rune are just aliases - {"E2", "E3", "", false}, - // // The following tests cover all of the type param cases of unify. - {"F1", "F2", "", true}, // F1[*int] = F2[int] - {"F3", "F4", "", false}, // would require U identical to *U, prevented by occur check - {"F5", "F6", "", true}, // one param is bound, the other is not - {"F6", "F7", "", false}, // both are bound - {"F5", "F8", "", true}, // T=*int, U=int, V=int - {"F5", "F9", "", false}, // T is unbound, V is bound, and T occurs in V + { + // In Eq[T], T is bound to string. + x: "Eq", + y: "casefold", + method: "Equal", + want: true, + wantParams: tmap{tparam("Eq", 0): stringType}, + }, + { + // If we unify A[T] and A[string], T should be bound to string. + x: "A", + y: "AString", + want: true, + wantParams: tmap{tparam("A", 0): stringType}, + }, + {x: "A", y: "Eq", want: false}, // completely unrelated + { + x: "B", + y: "String", + want: true, + wantParams: tmap{tparam("B", 0): stringType}, + }, + { + x: "B", + y: "Int", + want: true, + wantParams: tmap{tparam("B", 0): intType}, + }, + { + x: "B", + y: "A", + want: true, + // B's T is bound to A's struct { x T } + wantParams: tmap{tparam("B", 0): scope.Lookup("A").Type().Underlying()}, + }, + { + // C1's U unifies with C6's bool. + x: "C1", + y: "C6", + wantParams: tmap{tparam("C1", 0): boolType}, + want: true, + }, + // C1 fails to unify with C2 because C1's T must be bound to both int and bool. + {x: "C1", y: "C2", want: false}, + // The remaining "C" cases fail for less interesting reasons, usually different numbers + // or types of parameters or results. + {x: "C1", y: "C3", want: false}, + {x: "C1", y: "C4", want: false}, + {x: "C1", y: "C5", want: false}, + {x: "C2", y: "C3", want: false}, + {x: "C2", y: "C4", want: false}, + {x: "C3", y: "C4", want: false}, + { + x: "DAny", + y: "DString", + want: true, + wantParams: tmap{tparam("DAny", 0): stringType}, + }, + {x: "DString", y: "DInt", want: false}, // different instantiations of Named + {x: "E1", y: "E2", want: true}, // byte and rune are just aliases + {x: "E2", y: "E3", want: false}, + + // The following tests cover all of the type param cases of unify. + { + // F1[*int] = F2[int], for example + // F1's T is bound to a pointer to F2's T. + x: "F1", + y: "F2", + want: true, + wantParams: tmap{tparam("F1", 0): types.NewPointer(tparam("F2", 0))}, + }, + {x: "F3", y: "F4", want: false}, // would require U identical to *U, prevented by occur check + { + x: "F5", + y: "F6", + want: true, + wantParams: tmap{ + tparam("F5", 0): intType, + tparam("F5", 1): intType, + tparam("F6", 0): intType, + }, + }, + {x: "F6", y: "F7", want: false}, // both are bound + { + // T=*V, U=int, V=int + x: "F5", + y: "F8", + want: true, + wantParams: tmap{ + tparam("F5", 0): types.NewPointer(tparam("F8", 0)), + tparam("F5", 1): intType, + }, + }, + {x: "F5", y: "F9", want: false}, // T is unbound, V is bound, and T occurs in V } { lookup := func(name string) types.Type { obj := scope.Lookup(name) @@ -117,24 +210,39 @@ func F9[V any](V, *V, V) { panic(0) } return obj.Type() } - check := func(sa, sb string, want bool) { + check := func(a, b string, want, compareParams bool) { t.Helper() - a := lookup(sa) - b := lookup(sb) + ta := lookup(a) + tb := lookup(b) - got := unify(a, b, nil) + var gotParams tmap + if test.params == nil { + // Get the unifier even if there are no input params. + gotParams = tmap{} + } else { + gotParams = maps.Clone(test.params) + } + got := unify(ta, tb, gotParams) if got != want { t.Errorf("a=%s b=%s method=%s: unify returned %t for these inputs:\n- %s\n- %s", - sa, sb, test.method, got, a, b) + a, b, test.method, got, a, b) + return + } + if !compareParams { + return + } + if !maps.EqualFunc(gotParams, test.wantParams, types.Identical) { + t.Errorf("x=%s y=%s method=%s: xParams: got %v, want %v", + a, b, test.method, gotParams, test.wantParams) } } - check(test.a, test.b, test.want) + check(test.x, test.y, test.want, true) // unify is symmetric - check(test.b, test.a, test.want) + check(test.y, test.x, test.want, true) // unify is reflexive - check(test.a, test.a, true) - check(test.b, test.b, true) + check(test.x, test.x, true, false) + check(test.y, test.y, true, false) } } From 45b8eacdc2a69465d7ec78ecf6ffe7b2c2af252e Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Thu, 13 Mar 2025 17:29:34 -0400 Subject: [PATCH 136/270] gopls/internal/golang: test initial bindings to unify Add tests to unify that check that bindings provided on input behave as expected. One test case uncovered an infinite recursion. Fixed that, but in case there are more, added a depth check. For golang/go#63982. Change-Id: Ib3685948243391c450d5a85d30dad0eaea3c459a Reviewed-on: https://go-review.googlesource.com/c/tools/+/657638 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- gopls/internal/golang/implementation.go | 14 +++++ gopls/internal/golang/implementation_test.go | 65 ++++++++++++++++++-- 2 files changed, 74 insertions(+), 5 deletions(-) diff --git a/gopls/internal/golang/implementation.go b/gopls/internal/golang/implementation.go index 8453c571ba7..b9a332ac62a 100644 --- a/gopls/internal/golang/implementation.go +++ b/gopls/internal/golang/implementation.go @@ -584,14 +584,27 @@ func unify(x, y types.Type, unifier map[*types.TypeParam]types.Type) bool { } // uni performs the actual unification. + depth := 0 var uni func(x, y types.Type) bool uni = func(x, y types.Type) bool { + // Panic if recursion gets too deep, to detect bugs before + // overflowing the stack. + depth++ + defer func() { depth-- }() + if depth > 100 { + panic("unify: max depth exceeded") + } + x = types.Unalias(x) y = types.Unalias(y) tpx, _ := x.(*types.TypeParam) tpy, _ := y.(*types.TypeParam) if tpx != nil || tpy != nil { + // Identical type params unify. + if tpx == tpy { + return true + } bx := bindingFor(tpx) by := bindingFor(tpy) @@ -726,6 +739,7 @@ func unify(x, y types.Type, unifier map[*types.TypeParam]types.Type) bool { } if !uni(x, y) { + clear(unifier) return false } diff --git a/gopls/internal/golang/implementation_test.go b/gopls/internal/golang/implementation_test.go index 80403443cd9..b7253bb8bf7 100644 --- a/gopls/internal/golang/implementation_test.go +++ b/gopls/internal/golang/implementation_test.go @@ -14,7 +14,7 @@ import ( ) func TestUnify(t *testing.T) { - // Test cases from TestMatches in gopls/internal/util/fingerprint/fingerprint_test.go. + // Most cases from TestMatches in gopls/internal/util/fingerprint/fingerprint_test.go. const src = ` -- go.mod -- module example.com @@ -60,6 +60,7 @@ func F1[T any](T) { panic(0) } func F2[T any](*T) { panic(0) } func F3[T any](T, T) { panic(0) } func F4[U any](U, *U) {panic(0) } +func F4a[U any](U, Named[U]) {panic(0) } func F5[T, U any](T, U, U) { panic(0) } func F6[T any](T, int, T) { panic(0) } func F7[T any](bool, T, T) { panic(0) } @@ -73,6 +74,7 @@ func F9[V any](V, *V, V) { panic(0) } intType = types.Typ[types.Int] stringType = types.Typ[types.String] ) + pkg := testfiles.LoadPackages(t, txtar.Parse([]byte(src)), "./a")[0] scope := pkg.Types.Scope() @@ -167,12 +169,14 @@ func F9[V any](V, *V, V) { panic(0) } { // F1[*int] = F2[int], for example // F1's T is bound to a pointer to F2's T. - x: "F1", + x: "F1", + // F2's T is unbound: any instantiation works. y: "F2", want: true, wantParams: tmap{tparam("F1", 0): types.NewPointer(tparam("F2", 0))}, }, - {x: "F3", y: "F4", want: false}, // would require U identical to *U, prevented by occur check + {x: "F3", y: "F4", want: false}, // would require U identical to *U, prevented by occur check + {x: "F3", y: "F4a", want: false}, // occur check through Named[T] { x: "F5", y: "F6", @@ -184,6 +188,24 @@ func F9[V any](V, *V, V) { panic(0) } }, }, {x: "F6", y: "F7", want: false}, // both are bound + { + x: "F5", + y: "F6", + params: tmap{tparam("F6", 0): intType}, // consistent with the result + want: true, + wantParams: tmap{ + tparam("F5", 0): intType, + tparam("F5", 1): intType, + tparam("F6", 0): intType, + }, + }, + { + x: "F5", + y: "F6", + params: tmap{tparam("F6", 0): boolType}, // not consistent + want: false, + }, + {x: "F6", y: "F7", want: false}, // both are bound { // T=*V, U=int, V=int x: "F5", @@ -194,8 +216,41 @@ func F9[V any](V, *V, V) { panic(0) } tparam("F5", 1): intType, }, }, + { + // T=*V, U=int, V=int + // Partial initial information is fine, as long as it's consistent. + x: "F5", + y: "F8", + want: true, + params: tmap{tparam("F5", 1): intType}, + wantParams: tmap{ + tparam("F5", 0): types.NewPointer(tparam("F8", 0)), + tparam("F5", 1): intType, + }, + }, + { + // T=*V, U=int, V=int + // Partial initial information is fine, as long as it's consistent. + x: "F5", + y: "F8", + want: true, + params: tmap{tparam("F5", 0): types.NewPointer(tparam("F8", 0))}, + wantParams: tmap{ + tparam("F5", 0): types.NewPointer(tparam("F8", 0)), + tparam("F5", 1): intType, + }, + }, {x: "F5", y: "F9", want: false}, // T is unbound, V is bound, and T occurs in V + { + // T bound to Named[T'] + x: "F1", + y: "DAny", + want: true, + wantParams: tmap{ + tparam("F1", 0): scope.Lookup("DAny").(*types.Func).Signature().Params().At(0).Type()}, + }, } { + lookup := func(name string) types.Type { obj := scope.Lookup(name) if obj == nil { @@ -226,14 +281,14 @@ func F9[V any](V, *V, V) { panic(0) } got := unify(ta, tb, gotParams) if got != want { t.Errorf("a=%s b=%s method=%s: unify returned %t for these inputs:\n- %s\n- %s", - a, b, test.method, got, a, b) + a, b, test.method, got, ta, tb) return } if !compareParams { return } if !maps.EqualFunc(gotParams, test.wantParams, types.Identical) { - t.Errorf("x=%s y=%s method=%s: xParams: got %v, want %v", + t.Errorf("x=%s y=%s method=%s: params: got %v, want %v", a, b, test.method, gotParams, test.wantParams) } } From 19f73a601401cc3b0748f6dd74dda19177a0a760 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Fri, 14 Mar 2025 10:23:22 -0400 Subject: [PATCH 137/270] internal/typesinternal/typeindex: index of types.Info This CL provides: - typeindex.Index, a reverse index of types.Info, allowing efficient query of the defining or using identifiers of a given types.Object symbol. - typeindex.Analyzer, an Analyzer that builds an Index and offers it to later analysis passes. For example, several analyzers scan over the entirety of Info.Uses looking for a particular object; now they can make a direct reverse query with typeindex.Index.Uses(Object). - a demonstration of its use in the hostport analyzer, which uses it to: (a) implement a much more specific initial "fast path" check to reject candidate packages, and (b) to optimize the navigation from a use of a variable to its declaration. - a demonstration of it with the fmtappendf modernizer, which now locates the calls of interest directly. - a benchmark, showing that the time to locate a single call to net.Dial in a large package such as net/http is about 10,000x (!) faster. This is admittedly an extreme case. The one-time overhead is about 6ms, roughly twice the cost of building an Inspector. - new cursor API to extract the index from a Cursor and to reconstruct a Cursor from its index. This allows for a compact encoding of Uses as varint- encoded deltas (~2 byte per Cursor instead of 16). Follow-up changes will make use of the index in other analyzers. Change-Id: If28c31d3a4d360b7c2ea2285896a3d06e6af0a0d Reviewed-on: https://go-review.googlesource.com/c/tools/+/657958 LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan Reviewed-by: Jonathan Amsterdam Commit-Queue: Alan Donovan --- gopls/internal/analysis/hostport/hostport.go | 203 ++++++++-------- gopls/internal/analysis/modernize/bloop.go | 1 + .../internal/analysis/modernize/fmtappendf.go | 45 ++-- .../internal/analysis/modernize/modernize.go | 17 +- .../testdata/src/fmtappendf/fmtappendf.go | 4 +- .../src/fmtappendf/fmtappendf.go.golden | 4 +- .../analysisinternal/typeindex/typeindex.go | 33 +++ internal/astutil/cursor/cursor.go | 31 +++ internal/astutil/cursor/cursor_test.go | 1 - internal/typesinternal/typeindex/typeindex.go | 223 ++++++++++++++++++ .../typesinternal/typeindex/typeindex_test.go | 157 ++++++++++++ 11 files changed, 588 insertions(+), 131 deletions(-) create mode 100644 internal/analysisinternal/typeindex/typeindex.go create mode 100644 internal/typesinternal/typeindex/typeindex.go create mode 100644 internal/typesinternal/typeindex/typeindex_test.go diff --git a/gopls/internal/analysis/hostport/hostport.go b/gopls/internal/analysis/hostport/hostport.go index a7030ae116f..d95e475d1bf 100644 --- a/gopls/internal/analysis/hostport/hostport.go +++ b/gopls/internal/analysis/hostport/hostport.go @@ -14,11 +14,10 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" "golang.org/x/tools/gopls/internal/util/safetoken" - "golang.org/x/tools/internal/analysisinternal" - "golang.org/x/tools/internal/astutil/cursor" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" + "golang.org/x/tools/internal/typesinternal/typeindex" ) const Doc = `check format of addresses passed to net.Dial @@ -44,20 +43,20 @@ var Analyzer = &analysis.Analyzer{ Name: "hostport", Doc: Doc, URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/hostport", - Requires: []*analysis.Analyzer{inspect.Analyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, typeindexanalyzer.Analyzer}, Run: run, } func run(pass *analysis.Pass) (any, error) { - // Fast path: if the package doesn't import net and fmt, skip - // the traversal. - if !analysisinternal.Imports(pass.Pkg, "net") || - !analysisinternal.Imports(pass.Pkg, "fmt") { - return nil, nil + var ( + index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + info = pass.TypesInfo + fmtSprintf = index.Object("fmt", "Sprintf") + ) + if !index.Used(fmtSprintf) { + return nil, nil // fast path: package doesn't use fmt.Sprintf } - info := pass.TypesInfo - // checkAddr reports a diagnostic (and returns true) if e // is a call of the form fmt.Sprintf("%d:%d", ...). // The diagnostic includes a fix. @@ -65,96 +64,94 @@ func run(pass *analysis.Pass) (any, error) { // dialCall is non-nil if the Dial call is non-local // but within the same file. checkAddr := func(e ast.Expr, dialCall *ast.CallExpr) { - if call, ok := e.(*ast.CallExpr); ok { - obj := typeutil.Callee(info, call) - if analysisinternal.IsFunctionNamed(obj, "fmt", "Sprintf") { - // Examine format string. - formatArg := call.Args[0] - if tv := info.Types[formatArg]; tv.Value != nil { - numericPort := false - format := constant.StringVal(tv.Value) - switch format { - case "%s:%d": - // Have: fmt.Sprintf("%s:%d", host, port) - numericPort = true - - case "%s:%s": - // Have: fmt.Sprintf("%s:%s", host, portStr) - // Keep port string as is. - - default: - return - } + if call, ok := e.(*ast.CallExpr); ok && typeutil.Callee(info, call) == fmtSprintf { + // Examine format string. + formatArg := call.Args[0] + if tv := info.Types[formatArg]; tv.Value != nil { + numericPort := false + format := constant.StringVal(tv.Value) + switch format { + case "%s:%d": + // Have: fmt.Sprintf("%s:%d", host, port) + numericPort = true + + case "%s:%s": + // Have: fmt.Sprintf("%s:%s", host, portStr) + // Keep port string as is. + + default: + return + } - // Use granular edits to preserve original formatting. - edits := []analysis.TextEdit{ - { - // Replace fmt.Sprintf with net.JoinHostPort. - Pos: call.Fun.Pos(), - End: call.Fun.End(), - NewText: []byte("net.JoinHostPort"), - }, - { - // Delete format string. - Pos: formatArg.Pos(), - End: call.Args[1].Pos(), - }, - } + // Use granular edits to preserve original formatting. + edits := []analysis.TextEdit{ + { + // Replace fmt.Sprintf with net.JoinHostPort. + Pos: call.Fun.Pos(), + End: call.Fun.End(), + NewText: []byte("net.JoinHostPort"), + }, + { + // Delete format string. + Pos: formatArg.Pos(), + End: call.Args[1].Pos(), + }, + } - // Turn numeric port into a string. - if numericPort { - // port => fmt.Sprintf("%d", port) - // 123 => "123" - port := call.Args[2] - newPort := fmt.Sprintf(`fmt.Sprintf("%%d", %s)`, port) - if port := info.Types[port].Value; port != nil { - if i, ok := constant.Int64Val(port); ok { - newPort = fmt.Sprintf(`"%d"`, i) // numeric constant - } + // Turn numeric port into a string. + if numericPort { + // port => fmt.Sprintf("%d", port) + // 123 => "123" + port := call.Args[2] + newPort := fmt.Sprintf(`fmt.Sprintf("%%d", %s)`, port) + if port := info.Types[port].Value; port != nil { + if i, ok := constant.Int64Val(port); ok { + newPort = fmt.Sprintf(`"%d"`, i) // numeric constant } - - edits = append(edits, analysis.TextEdit{ - Pos: port.Pos(), - End: port.End(), - NewText: []byte(newPort), - }) - } - - // Refer to Dial call, if not adjacent. - suffix := "" - if dialCall != nil { - suffix = fmt.Sprintf(" (passed to net.Dial at L%d)", - safetoken.StartPosition(pass.Fset, dialCall.Pos()).Line) } - pass.Report(analysis.Diagnostic{ - // Highlight the format string. - Pos: formatArg.Pos(), - End: formatArg.End(), - Message: fmt.Sprintf("address format %q does not work with IPv6%s", format, suffix), - SuggestedFixes: []analysis.SuggestedFix{{ - Message: "Replace fmt.Sprintf with net.JoinHostPort", - TextEdits: edits, - }}, + edits = append(edits, analysis.TextEdit{ + Pos: port.Pos(), + End: port.End(), + NewText: []byte(newPort), }) } + + // Refer to Dial call, if not adjacent. + suffix := "" + if dialCall != nil { + suffix = fmt.Sprintf(" (passed to net.Dial at L%d)", + safetoken.StartPosition(pass.Fset, dialCall.Pos()).Line) + } + + pass.Report(analysis.Diagnostic{ + // Highlight the format string. + Pos: formatArg.Pos(), + End: formatArg.End(), + Message: fmt.Sprintf("address format %q does not work with IPv6%s", format, suffix), + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Replace fmt.Sprintf with net.JoinHostPort", + TextEdits: edits, + }}, + }) } } } // Check address argument of each call to net.Dial et al. - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - for curCall := range cursor.Root(inspect).Preorder((*ast.CallExpr)(nil)) { - call := curCall.Node().(*ast.CallExpr) - - obj := typeutil.Callee(info, call) - if analysisinternal.IsFunctionNamed(obj, "net", "Dial", "DialTimeout") || - analysisinternal.IsMethodNamed(obj, "net", "Dialer", "Dial") { - + for _, callee := range []types.Object{ + index.Object("net", "Dial"), + index.Object("net", "DialTimeout"), + index.Selection("net", "Dialer", "Dial"), + } { + for curCall := range index.Calls(callee) { + call := curCall.Node().(*ast.CallExpr) switch address := call.Args[1].(type) { case *ast.CallExpr: - // net.Dial("tcp", fmt.Sprintf("%s:%d", ...)) - checkAddr(address, nil) + if len(call.Args) == 2 { // avoid spread-call edge case + // net.Dial("tcp", fmt.Sprintf("%s:%d", ...)) + checkAddr(address, nil) + } case *ast.Ident: // addr := fmt.Sprintf("%s:%d", ...) @@ -162,25 +159,23 @@ func run(pass *analysis.Pass) (any, error) { // net.Dial("tcp", addr) // Search for decl of addrVar within common ancestor of addrVar and Dial call. + // TODO(adonovan): abstract "find RHS of statement that assigns var v". + // TODO(adonovan): reject if there are other assignments to var v. if addrVar, ok := info.Uses[address].(*types.Var); ok { - pos := addrVar.Pos() - for curAncestor := range curCall.Ancestors() { - if curIdent, ok := curAncestor.FindPos(pos, pos); ok { - // curIdent is the declaring ast.Ident of addr. - switch parent := curIdent.Parent().Node().(type) { - case *ast.AssignStmt: - if len(parent.Rhs) == 1 { - // Have: addr := fmt.Sprintf("%s:%d", ...) - checkAddr(parent.Rhs[0], call) - } - - case *ast.ValueSpec: - if len(parent.Values) == 1 { - // Have: var addr = fmt.Sprintf("%s:%d", ...) - checkAddr(parent.Values[0], call) - } + if curId, ok := index.Def(addrVar); ok { + // curIdent is the declaring ast.Ident of addr. + switch parent := curId.Parent().Node().(type) { + case *ast.AssignStmt: + if len(parent.Rhs) == 1 { + // Have: addr := fmt.Sprintf("%s:%d", ...) + checkAddr(parent.Rhs[0], call) + } + + case *ast.ValueSpec: + if len(parent.Values) == 1 { + // Have: var addr = fmt.Sprintf("%s:%d", ...) + checkAddr(parent.Values[0], call) } - break } } } diff --git a/gopls/internal/analysis/modernize/bloop.go b/gopls/internal/analysis/modernize/bloop.go index a70246b5e0e..2ebaa606508 100644 --- a/gopls/internal/analysis/modernize/bloop.go +++ b/gopls/internal/analysis/modernize/bloop.go @@ -152,6 +152,7 @@ func bloop(pass *analysis.Pass) { } // uses reports whether the subtree cur contains a use of obj. +// TODO(adonovan): opt: use typeindex. func uses(info *types.Info, cur cursor.Cursor, obj types.Object) bool { for curId := range cur.Preorder((*ast.Ident)(nil)) { if info.Uses[curId.Node().(*ast.Ident)] == obj { diff --git a/gopls/internal/analysis/modernize/fmtappendf.go b/gopls/internal/analysis/modernize/fmtappendf.go index 8575827aa3e..199a626a86e 100644 --- a/gopls/internal/analysis/modernize/fmtappendf.go +++ b/gopls/internal/analysis/modernize/fmtappendf.go @@ -5,33 +5,35 @@ package modernize import ( + "fmt" "go/ast" "go/types" "strings" "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/analysisinternal" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" + "golang.org/x/tools/internal/astutil/edge" + "golang.org/x/tools/internal/typesinternal/typeindex" ) // The fmtappend function replaces []byte(fmt.Sprintf(...)) by -// fmt.Appendf(nil, ...). +// fmt.Appendf(nil, ...), and similarly for Sprint, Sprintln. func fmtappendf(pass *analysis.Pass) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - info := pass.TypesInfo - for curFile := range filesUsing(inspect, info, "go1.19") { - for curCallExpr := range curFile.Preorder((*ast.CallExpr)(nil)) { - conv := curCallExpr.Node().(*ast.CallExpr) - tv := info.Types[conv.Fun] - if tv.IsType() && types.Identical(tv.Type, byteSliceType) { - call, ok := conv.Args[0].(*ast.CallExpr) - if ok { - obj := typeutil.Callee(info, call) - if !analysisinternal.IsFunctionNamed(obj, "fmt", "Sprintf", "Sprintln", "Sprint") { - continue - } + index := pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + for _, fn := range []types.Object{ + index.Object("fmt", "Sprintf"), + index.Object("fmt", "Sprintln"), + index.Object("fmt", "Sprint"), + } { + for curCall := range index.Calls(fn) { + call := curCall.Node().(*ast.CallExpr) + if ek, idx := curCall.ParentEdge(); ek == edge.CallExpr_Args && idx == 0 { + // Is parent a T(fmt.SprintX(...)) conversion? + conv := curCall.Parent().Node().(*ast.CallExpr) + tv := pass.TypesInfo.Types[conv.Fun] + if tv.IsType() && types.Identical(tv.Type, byteSliceType) && + fileUses(pass.TypesInfo, curCall, "go1.19") { + // Have: []byte(fmt.SprintX(...)) // Find "Sprint" identifier. var id *ast.Ident @@ -42,13 +44,14 @@ func fmtappendf(pass *analysis.Pass) { id = e // "Sprint" after `import . "fmt"` } + old, new := fn.Name(), strings.Replace(fn.Name(), "Sprint", "Append", 1) pass.Report(analysis.Diagnostic{ Pos: conv.Pos(), End: conv.End(), Category: "fmtappendf", - Message: "Replace []byte(fmt.Sprintf...) with fmt.Appendf", + Message: fmt.Sprintf("Replace []byte(fmt.%s...) with fmt.%s", old, new), SuggestedFixes: []analysis.SuggestedFix{{ - Message: "Replace []byte(fmt.Sprintf...) with fmt.Appendf", + Message: fmt.Sprintf("Replace []byte(fmt.%s...) with fmt.%s", old, new), TextEdits: []analysis.TextEdit{ { // delete "[]byte(" @@ -63,7 +66,7 @@ func fmtappendf(pass *analysis.Pass) { { Pos: id.Pos(), End: id.End(), - NewText: []byte(strings.Replace(obj.Name(), "Sprint", "Append", 1)), + NewText: []byte(new), }, { Pos: call.Lparen + 1, diff --git a/gopls/internal/analysis/modernize/modernize.go b/gopls/internal/analysis/modernize/modernize.go index 16fea0d8896..831376bde38 100644 --- a/gopls/internal/analysis/modernize/modernize.go +++ b/gopls/internal/analysis/modernize/modernize.go @@ -22,6 +22,7 @@ import ( "golang.org/x/tools/gopls/internal/util/astutil" "golang.org/x/tools/gopls/internal/util/moreiters" "golang.org/x/tools/internal/analysisinternal" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/stdlib" "golang.org/x/tools/internal/versions" @@ -33,7 +34,7 @@ var doc string var Analyzer = &analysis.Analyzer{ Name: "modernize", Doc: analysisinternal.MustExtractDoc(doc, "modernize"), - Requires: []*analysis.Analyzer{inspect.Analyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, typeindexanalyzer.Analyzer}, Run: run, URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/modernize", } @@ -125,6 +126,9 @@ func isZeroIntLiteral(info *types.Info, e ast.Expr) bool { // filesUsing returns a cursor for each *ast.File in the inspector // that uses at least the specified version of Go (e.g. "go1.24"). +// +// TODO(adonovan): opt: eliminate this function, instead following the +// approach of [fmtappendf], which uses typeindex and [fileUses]. func filesUsing(inspect *inspector.Inspector, info *types.Info, version string) iter.Seq[cursor.Cursor] { return func(yield func(cursor.Cursor) bool) { for curFile := range cursor.Root(inspect).Children() { @@ -136,6 +140,17 @@ func filesUsing(inspect *inspector.Inspector, info *types.Info, version string) } } +// fileUses reports whether the file containing the specified cursor +// uses at least the specified version of Go (e.g. "go1.24"). +func fileUses(info *types.Info, c cursor.Cursor, version string) bool { + // TODO(adonovan): make Ancestors reflexive so !ok becomes impossible. + if curFile, ok := moreiters.First(c.Ancestors((*ast.File)(nil))); ok { + c = curFile + } + file := c.Node().(*ast.File) + return !versions.Before(info.FileVersions[file], version) +} + // within reports whether the current pass is analyzing one of the // specified standard packages or their dependencies. func within(pass *analysis.Pass, pkgs ...string) bool { diff --git a/gopls/internal/analysis/modernize/testdata/src/fmtappendf/fmtappendf.go b/gopls/internal/analysis/modernize/testdata/src/fmtappendf/fmtappendf.go index a39a03ee786..a435b6a6461 100644 --- a/gopls/internal/analysis/modernize/testdata/src/fmtappendf/fmtappendf.go +++ b/gopls/internal/analysis/modernize/testdata/src/fmtappendf/fmtappendf.go @@ -29,8 +29,8 @@ func typealias() { } func otherprints() { - sprint := []byte(fmt.Sprint("bye %d", 1)) // want "Replace .*Sprintf.* with fmt.Appendf" + sprint := []byte(fmt.Sprint("bye %d", 1)) // want "Replace .*Sprint.* with fmt.Append" print(sprint) - sprintln := []byte(fmt.Sprintln("bye %d", 1)) // want "Replace .*Sprintf.* with fmt.Appendf" + sprintln := []byte(fmt.Sprintln("bye %d", 1)) // want "Replace .*Sprintln.* with fmt.Appendln" print(sprintln) } diff --git a/gopls/internal/analysis/modernize/testdata/src/fmtappendf/fmtappendf.go.golden b/gopls/internal/analysis/modernize/testdata/src/fmtappendf/fmtappendf.go.golden index 7c8aa7b9a5e..4fd2b136b82 100644 --- a/gopls/internal/analysis/modernize/testdata/src/fmtappendf/fmtappendf.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/fmtappendf/fmtappendf.go.golden @@ -29,8 +29,8 @@ func typealias() { } func otherprints() { - sprint := fmt.Append(nil, "bye %d", 1) // want "Replace .*Sprintf.* with fmt.Appendf" + sprint := fmt.Append(nil, "bye %d", 1) // want "Replace .*Sprint.* with fmt.Append" print(sprint) - sprintln := fmt.Appendln(nil, "bye %d", 1) // want "Replace .*Sprintf.* with fmt.Appendf" + sprintln := fmt.Appendln(nil, "bye %d", 1) // want "Replace .*Sprintln.* with fmt.Appendln" print(sprintln) } \ No newline at end of file diff --git a/internal/analysisinternal/typeindex/typeindex.go b/internal/analysisinternal/typeindex/typeindex.go new file mode 100644 index 00000000000..bba21c6ea01 --- /dev/null +++ b/internal/analysisinternal/typeindex/typeindex.go @@ -0,0 +1,33 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package typeindex defines an analyzer that provides a +// [golang.org/x/tools/internal/typesinternal/typeindex.Index]. +// +// Like [golang.org/x/tools/go/analysis/passes/inspect], it is +// intended to be used as a helper by other analyzers; it reports no +// diagnostics of its own. +package typeindex + +import ( + "reflect" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/typesinternal/typeindex" +) + +var Analyzer = &analysis.Analyzer{ + Name: "typeindex", + Doc: "indexes of type information for later passes", + URL: "https://pkg.go.dev/golang.org/x/tools/internal/analysisinternal/typeindex", + Run: func(pass *analysis.Pass) (any, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + return typeindex.New(inspect, pass.Pkg, pass.TypesInfo), nil + }, + RunDespiteErrors: true, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + ResultType: reflect.TypeOf(new(typeindex.Index)), +} diff --git a/internal/astutil/cursor/cursor.go b/internal/astutil/cursor/cursor.go index 144182f38cd..9139d4e516c 100644 --- a/internal/astutil/cursor/cursor.go +++ b/internal/astutil/cursor/cursor.go @@ -44,6 +44,37 @@ func Root(in *inspector.Inspector) Cursor { return Cursor{in, -1} } +// At returns the cursor at the specified index in the traversal, +// which must have been obtained from [Cursor.Index] on a Cursor +// belonging to the same Inspector. +func At(in *inspector.Inspector, index int32) Cursor { + if index < 0 { + panic("negative index") + } + events := events(in) + if int(index) >= len(events) { + panic("index out of range for this inspector") + } + if events[index].index < index { + panic("invalid index") // (a push, not a pop) + } + return Cursor{in, index} +} + +// Index returns the index of this cursor position within the package. +// +// Clients should not assume anything about the numeric Index value +// except that it increases monotonically throughout the traversal. +// It is provided for use with [At]. +// +// Index must not be called on the Root node. +func (c Cursor) Index() int32 { + if c.index < 0 { + panic("Index called on Root node") + } + return c.index +} + // Node returns the node at the current cursor position, // or nil for the cursor returned by [Inspector.Root]. func (c Cursor) Node() ast.Node { diff --git a/internal/astutil/cursor/cursor_test.go b/internal/astutil/cursor/cursor_test.go index 9f540ffdc76..380414df790 100644 --- a/internal/astutil/cursor/cursor_test.go +++ b/internal/astutil/cursor/cursor_test.go @@ -447,7 +447,6 @@ func TestCursor_Edge(t *testing.T) { t.Errorf("cur.Contains(cur.Parent().NextSibling()): %v", cur) } } - } } diff --git a/internal/typesinternal/typeindex/typeindex.go b/internal/typesinternal/typeindex/typeindex.go new file mode 100644 index 00000000000..a6cc6956892 --- /dev/null +++ b/internal/typesinternal/typeindex/typeindex.go @@ -0,0 +1,223 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package typeindex provides an [Index] of type information for a +// package, allowing efficient lookup of, say, whether a given symbol +// is referenced and, if so, where from; or of the [cursor.Cursor] for +// the declaration of a particular [types.Object] symbol. +package typeindex + +import ( + "encoding/binary" + "go/ast" + "go/types" + "iter" + + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/astutil/cursor" + "golang.org/x/tools/internal/astutil/edge" + "golang.org/x/tools/internal/typesinternal" +) + +// New constructs an Index for the package of type-annotated syntax +// +// TODO(adonovan): accept a FileSet too? +// We regret not requiring one in inspector.New. +func New(inspect *inspector.Inspector, pkg *types.Package, info *types.Info) *Index { + ix := &Index{ + inspect: inspect, + info: info, + packages: make(map[string]*types.Package), + def: make(map[types.Object]cursor.Cursor), + uses: make(map[types.Object]*uses), + } + + addPackage := func(pkg2 *types.Package) { + if pkg2 != nil && pkg2 != pkg { + ix.packages[pkg2.Path()] = pkg2 + } + } + + for cur := range cursor.Root(inspect).Preorder((*ast.ImportSpec)(nil), (*ast.Ident)(nil)) { + switch n := cur.Node().(type) { + case *ast.ImportSpec: + // Index direct imports, including blank ones. + if pkgname := info.PkgNameOf(n); pkgname != nil { + addPackage(pkgname.Imported()) + } + + case *ast.Ident: + // Index all defining and using identifiers. + if obj := info.Defs[n]; obj != nil { + ix.def[obj] = cur + } + + if obj := info.Uses[n]; obj != nil { + // Index indirect dependencies (via fields and methods). + if !typesinternal.IsPackageLevel(obj) { + addPackage(obj.Pkg()) + } + + us, ok := ix.uses[obj] + if !ok { + us = &uses{} + us.code = us.initial[:0] + ix.uses[obj] = us + } + delta := cur.Index() - us.last + if delta < 0 { + panic("non-monotonic") + } + us.code = binary.AppendUvarint(us.code, uint64(delta)) + us.last = cur.Index() + } + } + } + return ix +} + +// An Index holds an index mapping [types.Object] symbols to their syntax. +// In effect, it is the inverse of [types.Info]. +type Index struct { + inspect *inspector.Inspector + info *types.Info + packages map[string]*types.Package // packages of all symbols referenced from this package + def map[types.Object]cursor.Cursor // Cursor of *ast.Ident that defines the Object + uses map[types.Object]*uses // Cursors of *ast.Idents that use the Object +} + +// A uses holds the list of Cursors of Idents that use a given symbol. +// +// The Uses map of [types.Info] is substantial, so it pays to compress +// its inverse mapping here, both in space and in CPU due to reduced +// allocation. A Cursor is 2 words; a Cursor.Index is 4 bytes; but +// since Cursors are naturally delivered in ascending order, we can +// use varint-encoded deltas at a cost of only ~1.7-2.2 bytes per use. +// +// Many variables have only one or two uses, so their encoded uses may +// fit in the 4 bytes of initial, saving further CPU and space +// essentially for free since the struct's size class is 4 words. +type uses struct { + code []byte // varint-encoded deltas of successive Cursor.Index values + last int32 // most recent Cursor.Index value; used during encoding + initial [4]byte // use slack in size class as initial space for code +} + +// Uses returns the sequence of Cursors of [*ast.Ident]s in this package +// that refer to obj. If obj is nil, the sequence is empty. +func (ix *Index) Uses(obj types.Object) iter.Seq[cursor.Cursor] { + return func(yield func(cursor.Cursor) bool) { + if uses := ix.uses[obj]; uses != nil { + var last int32 + for code := uses.code; len(code) > 0; { + delta, n := binary.Uvarint(code) + last += int32(delta) + if !yield(cursor.At(ix.inspect, last)) { + return + } + code = code[n:] + } + } + } +} + +// Used reports whether any of the specified objects are used, in +// other words, obj != nil && Uses(obj) is non-empty for some obj in objs. +// +// (This treatment of nil allows Used to be called directly on the +// result of [Index.Object] so that analyzers can conveniently skip +// packages that don't use a symbol of interest.) +func (ix *Index) Used(objs ...types.Object) bool { + for _, obj := range objs { + if obj != nil && ix.uses[obj] != nil { + return true + } + } + return false +} + +// Def returns the Cursor of the [*ast.Ident] in this package +// that declares the specified object, if any. +func (ix *Index) Def(obj types.Object) (cursor.Cursor, bool) { + cur, ok := ix.def[obj] + return cur, ok +} + +// Package returns the package of the specified path, +// or nil if it is not referenced from this package. +func (ix *Index) Package(path string) *types.Package { + return ix.packages[path] +} + +// Object returns the package-level symbol name within the package of +// the specified path, or nil if the package or symbol does not exist +// or is not visible from this package. +func (ix *Index) Object(path, name string) types.Object { + if pkg := ix.Package(path); pkg != nil { + return pkg.Scope().Lookup(name) + } + return nil +} + +// Selection returns the named method or field belonging to the +// package-level type returned by Object(path, typename). +func (ix *Index) Selection(path, typename, name string) types.Object { + if obj := ix.Object(path, typename); obj != nil { + if tname, ok := obj.(*types.TypeName); ok { + obj, _, _ := types.LookupFieldOrMethod(tname.Type(), true, obj.Pkg(), name) + return obj + } + } + return nil +} + +// Calls returns the sequence of cursors for *ast.CallExpr nodes that +// call the specified callee, as defined by [typeutil.Callee]. +// If callee is nil, the sequence is empty. +func (ix *Index) Calls(callee types.Object) iter.Seq[cursor.Cursor] { + return func(yield func(cursor.Cursor) bool) { + for cur := range ix.Uses(callee) { + ek, _ := cur.ParentEdge() + + // The call may be of the form f() or x.f(), + // optionally with parens; ascend from f to call. + // + // It is tempting but wrong to use the first + // CallExpr ancestor: we have to make sure the + // ident is in the CallExpr.Fun position, otherwise + // f(f, f) would have two spurious matches. + // Avoiding Ancestors is also significantly faster. + + // inverse unparen: f -> (f) + for ek == edge.ParenExpr_X { + cur = cur.Parent() + ek, _ = cur.ParentEdge() + } + + // ascend selector: f -> x.f + if ek == edge.SelectorExpr_Sel { + cur = cur.Parent() + ek, _ = cur.ParentEdge() + } + + // inverse unparen again + for ek == edge.ParenExpr_X { + cur = cur.Parent() + ek, _ = cur.ParentEdge() + } + + // ascend from f or x.f to call + if ek == edge.CallExpr_Fun { + curCall := cur.Parent() + call := curCall.Node().(*ast.CallExpr) + if typeutil.Callee(ix.info, call) == callee { + if !yield(curCall) { + return + } + } + } + } + } +} diff --git a/internal/typesinternal/typeindex/typeindex_test.go b/internal/typesinternal/typeindex/typeindex_test.go new file mode 100644 index 00000000000..767d183ac44 --- /dev/null +++ b/internal/typesinternal/typeindex/typeindex_test.go @@ -0,0 +1,157 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.24 + +package typeindex_test + +import ( + "go/ast" + "slices" + "testing" + + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/packages" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/astutil/cursor" + "golang.org/x/tools/internal/typesinternal/typeindex" +) + +func TestIndex(t *testing.T) { + var ( + pkg = loadNetHTTP(t) + inspect = inspector.New(pkg.Syntax) + index = typeindex.New(inspect, pkg.Types, pkg.TypesInfo) + fmtSprintf = index.Object("fmt", "Sprintf") + ) + + // Gather calls and uses of fmt.Sprintf in net/http. + var ( + wantUses []*ast.Ident + wantCalls []*ast.CallExpr + ) + for n := range inspect.PreorderSeq((*ast.CallExpr)(nil), (*ast.Ident)(nil)) { + switch n := n.(type) { + case *ast.CallExpr: + if typeutil.Callee(pkg.TypesInfo, n) == fmtSprintf { + wantCalls = append(wantCalls, n) + } + case *ast.Ident: + if pkg.TypesInfo.Uses[n] == fmtSprintf { + wantUses = append(wantUses, n) + } + } + } + // sanity check (expect about 60 of each) + if wantUses == nil || wantCalls == nil { + t.Fatalf("no calls or uses of fmt.Sprintf in net/http") + } + + var ( + gotUses []*ast.Ident + gotCalls []*ast.CallExpr + ) + for curId := range index.Uses(fmtSprintf) { + gotUses = append(gotUses, curId.Node().(*ast.Ident)) + } + for curCall := range index.Calls(fmtSprintf) { + gotCalls = append(gotCalls, curCall.Node().(*ast.CallExpr)) + } + + if !slices.Equal(gotUses, wantUses) { + t.Errorf("index.Uses(fmt.Sprintf) = %v, want %v", gotUses, wantUses) + } + if !slices.Equal(gotCalls, wantCalls) { + t.Errorf("index.Calls(fmt.Sprintf) = %v, want %v", gotCalls, wantCalls) + } +} + +func loadNetHTTP(tb testing.TB) *packages.Package { + cfg := &packages.Config{Mode: packages.LoadSyntax} + pkgs, err := packages.Load(cfg, "net/http") + if err != nil { + tb.Fatal(err) + } + return pkgs[0] +} + +func BenchmarkIndex(b *testing.B) { + // Load net/http, a large package, and find calls to net.Dial. + // + // There is currently exactly one, which provides an extreme + // demonstration of the performance advantage of the Index. + // + // Index construction costs approximately 7x the cursor + // traversal, so it breaks even when it replaces 7 passes. + // The cost of index lookup is approximately zero. + pkg := loadNetHTTP(b) + + // Build the Inspector (~2.8ms). + var inspect *inspector.Inspector + b.Run("inspector.New", func(b *testing.B) { + for b.Loop() { + inspect = inspector.New(pkg.Syntax) + } + }) + + // Build the Index (~6.6ms). + var index *typeindex.Index + b.Run("typeindex.New", func(b *testing.B) { + b.ReportAllocs() // 2.48MB/op + for b.Loop() { + index = typeindex.New(inspect, pkg.Types, pkg.TypesInfo) + } + }) + + target := index.Object("net", "Dial") + + var countA, countB, countC int + + // unoptimized inspect implementation (~1.6ms, 1x) + b.Run("inspect", func(b *testing.B) { + for b.Loop() { + countA = 0 + for _, file := range pkg.Syntax { + ast.Inspect(file, func(n ast.Node) bool { + if call, ok := n.(*ast.CallExpr); ok { + if typeutil.Callee(pkg.TypesInfo, call) == target { + countA++ + } + } + return true + }) + } + } + }) + if countA == 0 { + b.Errorf("target %v not found", target) + } + + // unoptimized cursor implementation (~390us, 4x faster) + b.Run("cursor", func(b *testing.B) { + for b.Loop() { + countB = 0 + for curCall := range cursor.Root(inspect).Preorder((*ast.CallExpr)(nil)) { + call := curCall.Node().(*ast.CallExpr) + if typeutil.Callee(pkg.TypesInfo, call) == target { + countB++ + } + } + } + }) + + // indexed implementation (~120ns, >10,000x faster) + b.Run("index", func(b *testing.B) { + for b.Loop() { + countC = 0 + for range index.Calls(target) { + countC++ + } + } + }) + + if countA != countB || countA != countC { + b.Fatalf("inconsistent results (inspect=%d, cursor=%d, index=%d)", countA, countB, countC) + } +} From 11a3153db611913acc03b4b5a3c2d4a4cdf3e095 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Mon, 24 Mar 2025 17:25:37 -0400 Subject: [PATCH 138/270] gopls/internal/analysis/modernize: rangeint: respect side effects This CL fixes a serious bug in rangeint that caused it to offer a fix from "for i := 0; i < len(s); i++" to "for range s" even when the loop may modify the value of slice s. (The for loop reads the length on each iteration, whereas the range loop reads it only once.) + test Fixes golang/go#72917 Change-Id: Id0c926f0590241736c7fe7589c2796a075d05744 Reviewed-on: https://go-review.googlesource.com/c/tools/+/660435 LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan Reviewed-by: Jonathan Amsterdam --- gopls/internal/analysis/modernize/rangeint.go | 74 ++++++++++++++----- .../testdata/src/rangeint/rangeint.go | 48 +++++++++++- .../testdata/src/rangeint/rangeint.go.golden | 48 +++++++++++- 3 files changed, 151 insertions(+), 19 deletions(-) diff --git a/gopls/internal/analysis/modernize/rangeint.go b/gopls/internal/analysis/modernize/rangeint.go index 2a500085e01..a8106f08d57 100644 --- a/gopls/internal/analysis/modernize/rangeint.go +++ b/gopls/internal/analysis/modernize/rangeint.go @@ -15,8 +15,11 @@ import ( "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" "golang.org/x/tools/internal/analysisinternal" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/astutil/edge" + "golang.org/x/tools/internal/typesinternal" + "golang.org/x/tools/internal/typesinternal/typeindex" ) // rangeint offers a fix to replace a 3-clause 'for' loop: @@ -38,13 +41,23 @@ import ( // - The limit must not be b.N, to avoid redundancy with bloop's fixes. // // Caveats: -// - The fix will cause the limit expression to be evaluated exactly -// once, instead of once per iteration. The limit may be a function call -// (e.g. seq.Len()). The fix may change the cardinality of side effects. +// +// The fix causes the limit expression to be evaluated exactly once, +// instead of once per iteration. So, to avoid changing the +// cardinality of side effects, the limit expression must not involve +// function calls (e.g. seq.Len()) or channel receives. Moreover, the +// value of the limit expression must be loop invariant, which in +// practice means it must take one of the following forms: +// +// - a local variable that is assigned only once and not address-taken; +// - a constant; or +// - len(s), where s has the above properties. func rangeint(pass *analysis.Pass) { info := pass.TypesInfo inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + typeindex := pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + for curFile := range filesUsing(inspect, info, "go1.22") { nextLoop: for curLoop := range curFile.Preorder((*ast.ForStmt)(nil)) { @@ -62,19 +75,39 @@ func rangeint(pass *analysis.Pass) { // Have: for i = 0; i < limit; ... {} limit := compare.Y - curLimit, _ := curLoop.FindNode(limit) - // Don't offer a fix if the limit expression depends on the loop index. - for cur := range curLimit.Preorder((*ast.Ident)(nil)) { - if cur.Node().(*ast.Ident).Name == index.Name { - continue nextLoop - } + + // If limit is "len(slice)", simplify it to "slice". + // + // (Don't replace "for i := 0; i < len(map); i++" + // with "for range m" because it's too hard to prove + // that len(m) is loop-invariant). + if call, ok := limit.(*ast.CallExpr); ok && + typeutil.Callee(info, call) == builtinLen && + is[*types.Slice](info.TypeOf(call.Args[0]).Underlying()) { + limit = call.Args[0] } - // Skip loops up to b.N in benchmarks; see [bloop]. - if sel, ok := limit.(*ast.SelectorExpr); ok && - sel.Sel.Name == "N" && - analysisinternal.IsPointerToNamed(info.TypeOf(sel.X), "testing", "B") { - continue // skip b.N + // Check the form of limit: must be a constant, + // or a local var that is not assigned or address-taken. + limitOK := false + if info.Types[limit].Value != nil { + limitOK = true // constant + } else if id, ok := limit.(*ast.Ident); ok { + if v, ok := info.Uses[id].(*types.Var); ok && + !(v.Exported() && typesinternal.IsPackageLevel(v)) { + // limit is a local or unexported global var. + // (An exported global may have uses we can't see.) + for cur := range typeindex.Uses(v) { + if isScalarLvalue(info, cur) { + // Limit var is assigned or address-taken. + continue nextLoop + } + } + limitOK = true + } + } + if !limitOK { + continue nextLoop } if inc, ok := loop.Post.(*ast.IncDecStmt); ok && @@ -93,7 +126,7 @@ func rangeint(pass *analysis.Pass) { // Reject if any is an l-value (assigned or address-taken): // a "for range int" loop does not respect assignments to // the loop variable. - if isScalarLvalue(curId) { + if isScalarLvalue(info, curId) { continue nextLoop } } @@ -213,7 +246,7 @@ func rangeint(pass *analysis.Pass) { // // This function is valid only for scalars (x = ...), // not for aggregates (x.a[i] = ...) -func isScalarLvalue(curId cursor.Cursor) bool { +func isScalarLvalue(info *types.Info, curId cursor.Cursor) bool { // Unfortunately we can't simply use info.Types[e].Assignable() // as it is always true for a variable even when that variable is // used only as an r-value. So we must inspect enclosing syntax. @@ -229,7 +262,14 @@ func isScalarLvalue(curId cursor.Cursor) bool { switch ek { case edge.AssignStmt_Lhs: - return true // i = j + assign := cur.Parent().Node().(*ast.AssignStmt) + if assign.Tok == token.ASSIGN { + return true // i = j + } + id := curId.Node().(*ast.Ident) + if v, ok := info.Defs[id]; ok && v.Pos() != id.Pos() { + return true // reassignment of i (i, j := 1, 2) + } case edge.IncDecStmt_X: return true // i++, i-- case edge.UnaryExpr_X: diff --git a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go index b2a7459e5a3..0890051f0ba 100644 --- a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go +++ b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go @@ -48,7 +48,7 @@ func _(i int, s struct{ i int }, slice []int) { { var i int - for i = 0; i < f(); i++ { // want "for loop can be modernized using range over int" + for i = 0; i < 10; i++ { // want "for loop can be modernized using range over int" } // NB: no uses of i after loop. } @@ -90,8 +90,54 @@ func _(i int, s struct{ i int }, slice []int) { for i := 0; i < 10; i++ { // nope: assigns i i = 8 } + + // The limit expression must be loop invariant; + // see https://github.com/golang/go/issues/72917 + for i := 0; i < f(); i++ { // nope + } + { + var s struct{ limit int } + for i := 0; i < s.limit; i++ { // nope: limit is not a const or local var + } + } + { + const k = 10 + for i := 0; i < k; i++ { // want "for loop can be modernized using range over int" + } + } + { + var limit = 10 + for i := 0; i < limit; i++ { // want "for loop can be modernized using range over int" + } + } + { + var limit = 10 + for i := 0; i < limit; i++ { // nope: limit is address-taken + } + print(&limit) + } + { + limit := 10 + limit++ + for i := 0; i < limit; i++ { // nope: limit is assigned other than by its declaration + } + } + for i := 0; i < Global; i++ { // nope: limit is an exported global var; may be updated elsewhere + } + for i := 0; i < len(table); i++ { // want "for loop can be modernized using range over int" + } + { + s := []string{} + for i := 0; i < len(s); i++ { // nope: limit is not loop-invariant + s = s[1:] + } + } } +var Global int + +var table = []string{"hello", "world"} + func f() int { return 0 } // Repro for part of #71847: ("for range n is invalid if the loop body contains i++"): diff --git a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden index f323879e13f..a6b3755840a 100644 --- a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden @@ -48,7 +48,7 @@ func _(i int, s struct{ i int }, slice []int) { { var i int - for i = range f() { // want "for loop can be modernized using range over int" + for i = range 10 { // want "for loop can be modernized using range over int" } // NB: no uses of i after loop. } @@ -90,8 +90,54 @@ func _(i int, s struct{ i int }, slice []int) { for i := 0; i < 10; i++ { // nope: assigns i i = 8 } + + // The limit expression must be loop invariant; + // see https://github.com/golang/go/issues/72917 + for i := 0; i < f(); i++ { // nope + } + { + var s struct{ limit int } + for i := 0; i < s.limit; i++ { // nope: limit is not a const or local var + } + } + { + const k = 10 + for range k { // want "for loop can be modernized using range over int" + } + } + { + var limit = 10 + for range limit { // want "for loop can be modernized using range over int" + } + } + { + var limit = 10 + for i := 0; i < limit; i++ { // nope: limit is address-taken + } + print(&limit) + } + { + limit := 10 + limit++ + for i := 0; i < limit; i++ { // nope: limit is assigned other than by its declaration + } + } + for i := 0; i < Global; i++ { // nope: limit is an exported global var; may be updated elsewhere + } + for range table { // want "for loop can be modernized using range over int" + } + { + s := []string{} + for i := 0; i < len(s); i++ { // nope: limit is not loop-invariant + s = s[1:] + } + } } +var Global int + +var table = []string{"hello", "world"} + func f() int { return 0 } // Repro for part of #71847: ("for range n is invalid if the loop body contains i++"): From 30641f5ab3e08af28ea60644441d2fd05f2bc054 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Tue, 25 Mar 2025 12:57:46 -0400 Subject: [PATCH 139/270] gopls/internal/analysis/modernize: use typeindex throughout Change-Id: Ibc59b715430f60a4d311adff5fe75287ae2b897a Reviewed-on: https://go-review.googlesource.com/c/tools/+/660616 Reviewed-by: Jonathan Amsterdam Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI Commit-Queue: Alan Donovan --- gopls/internal/analysis/modernize/bloop.go | 18 +++-- .../internal/analysis/modernize/fmtappendf.go | 2 +- .../internal/analysis/modernize/modernize.go | 14 ++-- .../analysis/modernize/slicescontains.go | 13 ++- .../internal/analysis/modernize/sortslice.go | 34 +++----- .../analysis/modernize/stringscutprefix.go | 20 +++-- .../internal/analysis/modernize/stringsseq.go | 26 +++--- .../analysis/modernize/testingcontext.go | 80 +++++++------------ 8 files changed, 103 insertions(+), 104 deletions(-) diff --git a/gopls/internal/analysis/modernize/bloop.go b/gopls/internal/analysis/modernize/bloop.go index 2ebaa606508..1fc07169486 100644 --- a/gopls/internal/analysis/modernize/bloop.go +++ b/gopls/internal/analysis/modernize/bloop.go @@ -15,7 +15,9 @@ import ( "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" "golang.org/x/tools/internal/analysisinternal" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" "golang.org/x/tools/internal/astutil/cursor" + "golang.org/x/tools/internal/typesinternal/typeindex" ) // bloop updates benchmarks that use "for range b.N", replacing it @@ -31,7 +33,11 @@ func bloop(pass *analysis.Pass) { return } - info := pass.TypesInfo + var ( + inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + info = pass.TypesInfo + ) // edits computes the text edits for a matched for/range loop // at the specified cursor. b is the *testing.B value, and @@ -76,7 +82,6 @@ func bloop(pass *analysis.Pass) { } // Find all for/range statements. - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) loops := []ast.Node{ (*ast.ForStmt)(nil), (*ast.RangeStmt)(nil), @@ -105,7 +110,7 @@ func bloop(pass *analysis.Pass) { is[*ast.IncDecStmt](n.Post) && n.Post.(*ast.IncDecStmt).Tok == token.INC && equalSyntax(n.Post.(*ast.IncDecStmt).X, assign.Lhs[0]) && - !uses(info, body, info.Defs[assign.Lhs[0].(*ast.Ident)]) { + !uses(index, body, info.Defs[assign.Lhs[0].(*ast.Ident)]) { delStart, delEnd = n.Init.Pos(), n.Post.End() } @@ -152,10 +157,9 @@ func bloop(pass *analysis.Pass) { } // uses reports whether the subtree cur contains a use of obj. -// TODO(adonovan): opt: use typeindex. -func uses(info *types.Info, cur cursor.Cursor, obj types.Object) bool { - for curId := range cur.Preorder((*ast.Ident)(nil)) { - if info.Uses[curId.Node().(*ast.Ident)] == obj { +func uses(index *typeindex.Index, cur cursor.Cursor, obj types.Object) bool { + for use := range index.Uses(obj) { + if cur.Contains(use) { return true } } diff --git a/gopls/internal/analysis/modernize/fmtappendf.go b/gopls/internal/analysis/modernize/fmtappendf.go index 199a626a86e..6b01d38050e 100644 --- a/gopls/internal/analysis/modernize/fmtappendf.go +++ b/gopls/internal/analysis/modernize/fmtappendf.go @@ -32,7 +32,7 @@ func fmtappendf(pass *analysis.Pass) { conv := curCall.Parent().Node().(*ast.CallExpr) tv := pass.TypesInfo.Types[conv.Fun] if tv.IsType() && types.Identical(tv.Type, byteSliceType) && - fileUses(pass.TypesInfo, curCall, "go1.19") { + fileUses(pass.TypesInfo, enclosingFile(curCall), "go1.19") { // Have: []byte(fmt.SprintX(...)) // Find "Sprint" identifier. diff --git a/gopls/internal/analysis/modernize/modernize.go b/gopls/internal/analysis/modernize/modernize.go index 831376bde38..ac4a5c28e5f 100644 --- a/gopls/internal/analysis/modernize/modernize.go +++ b/gopls/internal/analysis/modernize/modernize.go @@ -140,15 +140,19 @@ func filesUsing(inspect *inspector.Inspector, info *types.Info, version string) } } -// fileUses reports whether the file containing the specified cursor -// uses at least the specified version of Go (e.g. "go1.24"). -func fileUses(info *types.Info, c cursor.Cursor, version string) bool { +// fileUses reports whether the specified file uses at least the +// specified version of Go (e.g. "go1.24"). +func fileUses(info *types.Info, file *ast.File, version string) bool { + return !versions.Before(info.FileVersions[file], version) +} + +// enclosingFile returns the syntax tree for the file enclosing c. +func enclosingFile(c cursor.Cursor) *ast.File { // TODO(adonovan): make Ancestors reflexive so !ok becomes impossible. if curFile, ok := moreiters.First(c.Ancestors((*ast.File)(nil))); ok { c = curFile } - file := c.Node().(*ast.File) - return !versions.Before(info.FileVersions[file], version) + return c.Node().(*ast.File) } // within reports whether the current pass is analyzing one of the diff --git a/gopls/internal/analysis/modernize/slicescontains.go b/gopls/internal/analysis/modernize/slicescontains.go index 589efe7ffc8..e99474df6ab 100644 --- a/gopls/internal/analysis/modernize/slicescontains.go +++ b/gopls/internal/analysis/modernize/slicescontains.go @@ -15,8 +15,10 @@ import ( "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" "golang.org/x/tools/internal/analysisinternal" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/typesinternal/typeindex" ) // The slicescontains pass identifies loops that can be replaced by a @@ -56,7 +58,11 @@ func slicescontains(pass *analysis.Pass) { return } - info := pass.TypesInfo + var ( + inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + info = pass.TypesInfo + ) // check is called for each RangeStmt of this form: // for i, elem := range s { if cond { ... } } @@ -144,8 +150,8 @@ func slicescontains(pass *analysis.Pass) { if !ok { panic(fmt.Sprintf("FindNode(%T) failed", n)) } - return uses(info, cur, info.Defs[rng.Key.(*ast.Ident)]) || - rng.Value != nil && uses(info, cur, info.Defs[rng.Value.(*ast.Ident)]) + return uses(index, cur, info.Defs[rng.Key.(*ast.Ident)]) || + rng.Value != nil && uses(index, cur, info.Defs[rng.Value.(*ast.Ident)]) } if usesRangeVar(body) { // Body uses range var "i" or "elem". @@ -349,7 +355,6 @@ func slicescontains(pass *analysis.Pass) { } } - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) for curFile := range filesUsing(inspect, info, "go1.21") { file := curFile.Node().(*ast.File) diff --git a/gopls/internal/analysis/modernize/sortslice.go b/gopls/internal/analysis/modernize/sortslice.go index 0437aaf2f67..bbd04e9293d 100644 --- a/gopls/internal/analysis/modernize/sortslice.go +++ b/gopls/internal/analysis/modernize/sortslice.go @@ -10,10 +10,9 @@ import ( "go/types" "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/types/typeutil" "golang.org/x/tools/internal/analysisinternal" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" + "golang.org/x/tools/internal/typesinternal/typeindex" ) // The sortslice pass replaces sort.Slice(slice, less) with @@ -42,14 +41,13 @@ func sortslice(pass *analysis.Pass) { return } - info := pass.TypesInfo - - check := func(file *ast.File, call *ast.CallExpr) { - // call to sort.Slice? - obj := typeutil.Callee(info, call) - if !analysisinternal.IsFunctionNamed(obj, "sort", "Slice") { - return - } + var ( + info = pass.TypesInfo + index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + sortSlice = index.Object("sort", "Slice") + ) + for curCall := range index.Calls(sortSlice) { + call := curCall.Node().(*ast.CallExpr) if lit, ok := call.Args[1].(*ast.FuncLit); ok && len(lit.Body.List) == 1 { sig := info.Types[lit.Type].Type.(*types.Signature) @@ -68,7 +66,9 @@ func sortslice(pass *analysis.Pass) { is[*ast.Ident](index.Index) && info.Uses[index.Index.(*ast.Ident)] == v } - if isIndex(compare.X, i) && isIndex(compare.Y, j) { + file := enclosingFile(curCall) + if isIndex(compare.X, i) && isIndex(compare.Y, j) && + fileUses(info, file, "go1.21") { // Have: sort.Slice(s, func(i, j int) bool { return s[i] < s[j] }) _, prefix, importEdits := analysisinternal.AddImport( @@ -102,14 +102,4 @@ func sortslice(pass *analysis.Pass) { } } } - - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - for curFile := range filesUsing(inspect, info, "go1.21") { - file := curFile.Node().(*ast.File) - - for curCall := range curFile.Preorder((*ast.CallExpr)(nil)) { - call := curCall.Node().(*ast.CallExpr) - check(file, call) - } - } } diff --git a/gopls/internal/analysis/modernize/stringscutprefix.go b/gopls/internal/analysis/modernize/stringscutprefix.go index 28c42c93b05..9e9239c0f21 100644 --- a/gopls/internal/analysis/modernize/stringscutprefix.go +++ b/gopls/internal/analysis/modernize/stringscutprefix.go @@ -14,6 +14,8 @@ import ( "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" "golang.org/x/tools/internal/analysisinternal" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" + "golang.org/x/tools/internal/typesinternal/typeindex" ) // stringscutprefix offers a fix to replace an if statement which @@ -35,8 +37,15 @@ import ( // Variants: // - bytes.HasPrefix usage as pattern 1. func stringscutprefix(pass *analysis.Pass) { - if !analysisinternal.Imports(pass.Pkg, "strings") && - !analysisinternal.Imports(pass.Pkg, "bytes") { + var ( + inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + info = pass.TypesInfo + + stringsTrimPrefix = index.Object("strings", "TrimPrefix") + bytesTrimPrefix = index.Object("bytes", "TrimPrefix") + ) + if !index.Used(stringsTrimPrefix, bytesTrimPrefix) { return } @@ -45,8 +54,6 @@ func stringscutprefix(pass *analysis.Pass) { fixedMessage = "Replace HasPrefix/TrimPrefix with CutPrefix" ) - info := pass.TypesInfo - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) for curFile := range filesUsing(inspect, pass.TypesInfo, "go1.20") { for curIfStmt := range curFile.Preorder((*ast.IfStmt)(nil)) { ifStmt := curIfStmt.Node().(*ast.IfStmt) @@ -65,8 +72,7 @@ func stringscutprefix(pass *analysis.Pass) { for curCall := range firstStmt.Preorder((*ast.CallExpr)(nil)) { call1 := curCall.Node().(*ast.CallExpr) obj1 := typeutil.Callee(info, call1) - if !analysisinternal.IsFunctionNamed(obj1, "strings", "TrimPrefix") && - !analysisinternal.IsFunctionNamed(obj1, "bytes", "TrimPrefix") { + if obj1 != stringsTrimPrefix && obj1 != bytesTrimPrefix { continue } @@ -140,7 +146,7 @@ func stringscutprefix(pass *analysis.Pass) { if call, ok := assign.Rhs[0].(*ast.CallExpr); ok && assign.Tok == token.DEFINE { lhs := assign.Lhs[0] obj := typeutil.Callee(info, call) - if analysisinternal.IsFunctionNamed(obj, "strings", "TrimPrefix") && + if obj == stringsTrimPrefix && (equalSyntax(lhs, bin.X) && equalSyntax(call.Args[0], bin.Y) || (equalSyntax(lhs, bin.Y) && equalSyntax(call.Args[0], bin.X))) { okVarName := analysisinternal.FreshName(info.Scopes[ifStmt], ifStmt.Pos(), "ok") diff --git a/gopls/internal/analysis/modernize/stringsseq.go b/gopls/internal/analysis/modernize/stringsseq.go index a26b8da705c..d32f8be754f 100644 --- a/gopls/internal/analysis/modernize/stringsseq.go +++ b/gopls/internal/analysis/modernize/stringsseq.go @@ -14,8 +14,9 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/analysisinternal" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" "golang.org/x/tools/internal/astutil/edge" + "golang.org/x/tools/internal/typesinternal/typeindex" ) // stringsseq offers a fix to replace a call to strings.Split with @@ -33,12 +34,20 @@ import ( // - bytes.SplitSeq // - bytes.FieldsSeq func stringsseq(pass *analysis.Pass) { - if !analysisinternal.Imports(pass.Pkg, "strings") && - !analysisinternal.Imports(pass.Pkg, "bytes") { + var ( + inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + info = pass.TypesInfo + + stringsSplit = index.Object("strings", "Split") + stringsFields = index.Object("strings", "Fields") + bytesSplit = index.Object("bytes", "Split") + bytesFields = index.Object("bytes", "Fields") + ) + if !index.Used(stringsSplit, stringsFields, bytesSplit, bytesFields) { return } - info := pass.TypesInfo - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + for curFile := range filesUsing(inspect, info, "go1.24") { for curRange := range curFile.Preorder((*ast.RangeStmt)(nil)) { rng := curRange.Node().(*ast.RangeStmt) @@ -62,7 +71,7 @@ func stringsseq(pass *analysis.Pass) { len(assign.Lhs) == 1 && len(assign.Rhs) == 1 && info.Defs[assign.Lhs[0].(*ast.Ident)] == v && - soleUse(info, v) == id { + soleUseIs(index, v, id) { // Have: // lines := ... // for _, line := range lines {...} @@ -96,9 +105,8 @@ func stringsseq(pass *analysis.Pass) { continue } - obj := typeutil.Callee(info, call) - if analysisinternal.IsFunctionNamed(obj, "strings", "Split", "Fields") || - analysisinternal.IsFunctionNamed(obj, "bytes", "Split", "Fields") { + switch obj := typeutil.Callee(info, call); obj { + case stringsSplit, stringsFields, bytesSplit, bytesFields: oldFnName := obj.Name() seqFnName := fmt.Sprintf("%sSeq", oldFnName) pass.Report(analysis.Diagnostic{ diff --git a/gopls/internal/analysis/modernize/testingcontext.go b/gopls/internal/analysis/modernize/testingcontext.go index 05c0b811ab7..de52f756ab8 100644 --- a/gopls/internal/analysis/modernize/testingcontext.go +++ b/gopls/internal/analysis/modernize/testingcontext.go @@ -14,12 +14,11 @@ import ( "unicode/utf8" "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" "golang.org/x/tools/internal/analysisinternal" - "golang.org/x/tools/internal/astutil/cursor" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" "golang.org/x/tools/internal/astutil/edge" + "golang.org/x/tools/internal/typesinternal/typeindex" ) // The testingContext pass replaces calls to context.WithCancel from within @@ -41,38 +40,32 @@ import ( // - the call is within a test or subtest function // - the relevant testing.{T,B,F} is named and not shadowed at the call func testingContext(pass *analysis.Pass) { - if !analysisinternal.Imports(pass.Pkg, "testing") { - return - } + var ( + index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + info = pass.TypesInfo - info := pass.TypesInfo + contextWithCancel = index.Object("context", "WithCancel") + ) - // checkCall finds eligible calls to context.WithCancel to replace. - checkCall := func(cur cursor.Cursor) { +calls: + for cur := range index.Calls(contextWithCancel) { call := cur.Node().(*ast.CallExpr) - obj := typeutil.Callee(info, call) - if !analysisinternal.IsFunctionNamed(obj, "context", "WithCancel") { - return - } - - // Have: context.WithCancel(arg) + // Have: context.WithCancel(...) arg, ok := call.Args[0].(*ast.CallExpr) if !ok { - return + continue } - if obj := typeutil.Callee(info, arg); !analysisinternal.IsFunctionNamed(obj, "context", "Background", "TODO") { - return + if !analysisinternal.IsFunctionNamed(typeutil.Callee(info, arg), "context", "Background", "TODO") { + continue } - // Have: context.WithCancel(context.{Background,TODO}()) parent := cur.Parent() assign, ok := parent.Node().(*ast.AssignStmt) if !ok || assign.Tok != token.DEFINE { - return + continue } - // Have: a, b := context.WithCancel(context.{Background,TODO}()) // Check that both a and b are declared, not redeclarations. @@ -80,27 +73,27 @@ func testingContext(pass *analysis.Pass) { for _, expr := range assign.Lhs { id, ok := expr.(*ast.Ident) if !ok { - return + continue calls } obj, ok := info.Defs[id] if !ok { - return + continue calls } lhs = append(lhs, obj) } next, ok := parent.NextSibling() if !ok { - return + continue } defr, ok := next.Node().(*ast.DeferStmt) if !ok { - return + continue } - if soleUse(info, lhs[1]) != defr.Call.Fun { - return + deferId, ok := defr.Call.Fun.(*ast.Ident) + if !ok || !soleUseIs(index, lhs[1], deferId) { + continue // b is used elsewhere } - // Have: // a, b := context.WithCancel(context.{Background,TODO}()) // defer b() @@ -126,8 +119,7 @@ func testingContext(pass *analysis.Pass) { testObj = isTestFn(info, n) } } - - if testObj != nil { + if testObj != nil && fileUses(info, enclosingFile(cur), "go1.24") { // Have a test function. Check that we can resolve the relevant // testing.{T,B,F} at the current position. if _, obj := lhs[0].Parent().LookupParent(testObj.Name(), lhs[0].Pos()); obj == testObj { @@ -148,29 +140,19 @@ func testingContext(pass *analysis.Pass) { } } } - - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - for curFile := range filesUsing(inspect, info, "go1.24") { - for cur := range curFile.Preorder((*ast.CallExpr)(nil)) { - checkCall(cur) - } - } } -// soleUse returns the ident that refers to obj, if there is exactly one. -// -// TODO(rfindley): consider factoring to share with gopls/internal/refactor/inline. -func soleUse(info *types.Info, obj types.Object) (sole *ast.Ident) { - // This is not efficient, but it is called infrequently. - for id, obj2 := range info.Uses { - if obj2 == obj { - if sole != nil { - return nil // not unique - } - sole = id +// soleUseIs reports whether id is the sole Ident that uses obj. +// (It returns false if there were no uses of obj.) +func soleUseIs(index *typeindex.Index, obj types.Object, id *ast.Ident) bool { + empty := true + for use := range index.Uses(obj) { + empty = false + if use.Node() != id { + return false } } - return sole + return !empty } // isTestFn checks whether fn is a test function (TestX, BenchmarkX, FuzzX), From 7efe9a8b020b4f51bbbd887c6ecffde35fb8a4e1 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Tue, 25 Mar 2025 14:23:33 -0400 Subject: [PATCH 140/270] gopls/internal/analysis/modernize: rangeint: fix yet another bug ASSIGN and DEFINE are not the only kinds of AssignStmt. + test Change-Id: I81b5122b0ac0db9be5178b6685c00212f3c4c469 Reviewed-on: https://go-review.googlesource.com/c/tools/+/660695 Reviewed-by: Jonathan Amsterdam LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan --- gopls/internal/analysis/modernize/rangeint.go | 4 ++-- .../analysis/modernize/testdata/src/rangeint/rangeint.go | 3 +++ .../modernize/testdata/src/rangeint/rangeint.go.golden | 3 +++ 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/gopls/internal/analysis/modernize/rangeint.go b/gopls/internal/analysis/modernize/rangeint.go index a8106f08d57..4ca87e40aec 100644 --- a/gopls/internal/analysis/modernize/rangeint.go +++ b/gopls/internal/analysis/modernize/rangeint.go @@ -263,8 +263,8 @@ func isScalarLvalue(info *types.Info, curId cursor.Cursor) bool { switch ek { case edge.AssignStmt_Lhs: assign := cur.Parent().Node().(*ast.AssignStmt) - if assign.Tok == token.ASSIGN { - return true // i = j + if assign.Tok != token.DEFINE { + return true // i = j or i += j } id := curId.Node().(*ast.Ident) if v, ok := info.Defs[id]; ok && v.Pos() != id.Pos() { diff --git a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go index 0890051f0ba..7048fea1148 100644 --- a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go +++ b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go @@ -132,6 +132,9 @@ func _(i int, s struct{ i int }, slice []int) { s = s[1:] } } + for i := 0; i < len(slice); i++ { // nope: i is incremented within loop + i += 1 + } } var Global int diff --git a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden index a6b3755840a..8c3fdc40b77 100644 --- a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden @@ -132,6 +132,9 @@ func _(i int, s struct{ i int }, slice []int) { s = s[1:] } } + for i := 0; i < len(slice); i++ { // nope: i is incremented within loop + i += 1 + } } var Global int From b75dab25fbcec2c54025cd5007c22b5d8b648063 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Tue, 25 Mar 2025 16:10:22 -0400 Subject: [PATCH 141/270] internal/typesinternal/typeindex: suppress test on js Fixes golang/go#73043 Change-Id: Ia0524bb45562095356984693f705a51fb0a35224 Reviewed-on: https://go-review.googlesource.com/c/tools/+/660735 LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan Reviewed-by: Jonathan Amsterdam --- internal/typesinternal/typeindex/typeindex_test.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/internal/typesinternal/typeindex/typeindex_test.go b/internal/typesinternal/typeindex/typeindex_test.go index 767d183ac44..c8b08dc9d00 100644 --- a/internal/typesinternal/typeindex/typeindex_test.go +++ b/internal/typesinternal/typeindex/typeindex_test.go @@ -15,10 +15,12 @@ import ( "golang.org/x/tools/go/packages" "golang.org/x/tools/go/types/typeutil" "golang.org/x/tools/internal/astutil/cursor" + "golang.org/x/tools/internal/testenv" "golang.org/x/tools/internal/typesinternal/typeindex" ) func TestIndex(t *testing.T) { + testenv.NeedsGoPackages(t) var ( pkg = loadNetHTTP(t) inspect = inspector.New(pkg.Syntax) From 8be0d5f6f63449d5236aca9528f40e7bf6ef0215 Mon Sep 17 00:00:00 2001 From: Kyle Weingartner Date: Tue, 25 Mar 2025 13:59:55 -0700 Subject: [PATCH 142/270] gopls/internal/analysis/maprange: use typeindex Updates golang/go#72908 Change-Id: Ie6cfbfd9326b02d3b1c7421e7da8f700d4bb5de5 Reviewed-on: https://go-review.googlesource.com/c/tools/+/660755 Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI Commit-Queue: Alan Donovan Reviewed-by: Alan Donovan --- gopls/internal/analysis/maprange/maprange.go | 132 ++++++++++--------- 1 file changed, 69 insertions(+), 63 deletions(-) diff --git a/gopls/internal/analysis/maprange/maprange.go b/gopls/internal/analysis/maprange/maprange.go index c3990f9ea75..4bd7066b8cb 100644 --- a/gopls/internal/analysis/maprange/maprange.go +++ b/gopls/internal/analysis/maprange/maprange.go @@ -8,11 +8,14 @@ import ( _ "embed" "fmt" "go/ast" + "go/types" "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/gopls/internal/util/moreiters" "golang.org/x/tools/internal/analysisinternal" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" + "golang.org/x/tools/internal/astutil/cursor" + "golang.org/x/tools/internal/astutil/edge" + "golang.org/x/tools/internal/typesinternal/typeindex" "golang.org/x/tools/internal/versions" ) @@ -23,7 +26,7 @@ var Analyzer = &analysis.Analyzer{ Name: "maprange", Doc: analysisinternal.MustExtractDoc(doc, "maprange"), URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/maprange", - Requires: []*analysis.Analyzer{inspect.Analyzer}, + Requires: []*analysis.Analyzer{typeindexanalyzer.Analyzer}, Run: run, } @@ -31,60 +34,44 @@ var Analyzer = &analysis.Analyzer{ var xmaps = "golang.org/x/exp/maps" func run(pass *analysis.Pass) (any, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - switch pass.Pkg.Path() { case "maps", xmaps: // These packages know how to use their own APIs. return nil, nil } - - if !(analysisinternal.Imports(pass.Pkg, "maps") || analysisinternal.Imports(pass.Pkg, xmaps)) { - return nil, nil // fast path - } - - inspect.Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, func(n ast.Node) { - rangeStmt, ok := n.(*ast.RangeStmt) - if !ok { - return - } - call, ok := rangeStmt.X.(*ast.CallExpr) - if !ok { - return - } - callee := typeutil.Callee(pass.TypesInfo, call) - if !analysisinternal.IsFunctionNamed(callee, "maps", "Keys", "Values") && - !analysisinternal.IsFunctionNamed(callee, xmaps, "Keys", "Values") { - return - } - version := pass.Pkg.GoVersion() - pkg, fn := callee.Pkg().Path(), callee.Name() - key, value := rangeStmt.Key, rangeStmt.Value - - edits := editRangeStmt(pass, version, pkg, fn, key, value, call) - if len(edits) > 0 { - pass.Report(analysis.Diagnostic{ - Pos: call.Pos(), - End: call.End(), - Message: fmt.Sprintf("unnecessary and inefficient call of %s.%s", pkg, fn), - SuggestedFixes: []analysis.SuggestedFix{{ - Message: fmt.Sprintf("Remove unnecessary call to %s.%s", pkg, fn), - TextEdits: edits, - }}, - }) + var ( + index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + mapsKeys = index.Object("maps", "Keys") + mapsValues = index.Object("maps", "Values") + xmapsKeys = index.Object(xmaps, "Keys") + xmapsValues = index.Object(xmaps, "Values") + ) + for _, callee := range []types.Object{mapsKeys, mapsValues, xmapsKeys, xmapsValues} { + for curCall := range index.Calls(callee) { + if ek, _ := curCall.ParentEdge(); ek != edge.RangeStmt_X { + continue + } + analyzeRangeStmt(pass, callee, curCall) } - }) - + } return nil, nil } -// editRangeStmt returns edits to transform a range statement that calls -// maps.Keys or maps.Values (either the stdlib or the x/exp/maps version). -// -// It reports a diagnostic if an edit cannot be made because the Go version is too old. +// analyzeRangeStmt analyzes range statements iterating over calls to maps.Keys +// or maps.Values (from the standard library "maps" or "golang.org/x/exp/maps"). // -// It returns nil if no edits are needed. -func editRangeStmt(pass *analysis.Pass, version, pkg, fn string, key, value ast.Expr, call *ast.CallExpr) []analysis.TextEdit { +// It reports a diagnostic with a suggested fix to simplify the loop by removing +// the unnecessary function call and adjusting range variables, if possible. +// For certain patterns involving x/exp/maps.Keys before Go 1.22, it reports +// a diagnostic about potential incorrect usage without a suggested fix. +// No diagnostic is reported if the range statement doesn't require changes. +func analyzeRangeStmt(pass *analysis.Pass, callee types.Object, curCall cursor.Cursor) { + var ( + call = curCall.Node().(*ast.CallExpr) + rangeStmt = curCall.Parent().Node().(*ast.RangeStmt) + pkg = callee.Pkg().Path() + fn = callee.Name() + ) var edits []analysis.TextEdit // Check if the call to maps.Keys or maps.Values can be removed/replaced. @@ -97,12 +84,21 @@ func editRangeStmt(pass *analysis.Pass, version, pkg, fn string, key, value ast. // If we have: for i, k := range maps.Keys(m) (only possible using x/exp/maps) // or: for i, v = range maps.Values(m) // do not remove the call. - removeCall := !isSet(key) || !isSet(value) + removeCall := !isSet(rangeStmt.Key) || !isSet(rangeStmt.Value) replace := "" - if pkg == xmaps && isSet(key) && value == nil { + if pkg == xmaps && isSet(rangeStmt.Key) && rangeStmt.Value == nil { // If we have: for i := range maps.Keys(m) (using x/exp/maps), // Replace with: for i := range len(m) replace = "len" + canRangeOverInt := fileUses(pass.TypesInfo, curCall, "go1.22") + if !canRangeOverInt { + pass.Report(analysis.Diagnostic{ + Pos: call.Pos(), + End: call.End(), + Message: fmt.Sprintf("likely incorrect use of %s.%s (returns a slice)", pkg, fn), + }) + return + } } if removeCall { edits = append(edits, analysis.TextEdit{ @@ -114,38 +110,37 @@ func editRangeStmt(pass *analysis.Pass, version, pkg, fn string, key, value ast. // Example: // for _, k := range maps.Keys(m) // ^^^ removeKey ^^^^^^^^^ removeCall - removeKey := pkg == xmaps && fn == "Keys" && !isSet(key) && isSet(value) + removeKey := pkg == xmaps && fn == "Keys" && !isSet(rangeStmt.Key) && isSet(rangeStmt.Value) if removeKey { edits = append(edits, analysis.TextEdit{ - Pos: key.Pos(), - End: value.Pos(), + Pos: rangeStmt.Key.Pos(), + End: rangeStmt.Value.Pos(), }) } // Check if a key should be inserted to the range statement. // Example: // for _, v := range maps.Values(m) // ^^^ addKey ^^^^^^^^^^^ removeCall - addKey := pkg == "maps" && fn == "Values" && isSet(key) + addKey := pkg == "maps" && fn == "Values" && isSet(rangeStmt.Key) if addKey { edits = append(edits, analysis.TextEdit{ - Pos: key.Pos(), - End: key.Pos(), + Pos: rangeStmt.Key.Pos(), + End: rangeStmt.Key.Pos(), NewText: []byte("_, "), }) } - // Range over int was added in Go 1.22. - // If the Go version is too old, report a diagnostic but do not make any edits. - if replace == "len" && versions.Before(pass.Pkg.GoVersion(), versions.Go1_22) { + if len(edits) > 0 { pass.Report(analysis.Diagnostic{ Pos: call.Pos(), End: call.End(), - Message: fmt.Sprintf("likely incorrect use of %s.%s (returns a slice)", pkg, fn), + Message: fmt.Sprintf("unnecessary and inefficient call of %s.%s", pkg, fn), + SuggestedFixes: []analysis.SuggestedFix{{ + Message: fmt.Sprintf("Remove unnecessary call to %s.%s", pkg, fn), + TextEdits: edits, + }}, }) - return nil } - - return edits } // isSet reports whether an ast.Expr is a non-nil expression that is not the blank identifier. @@ -153,3 +148,14 @@ func isSet(expr ast.Expr) bool { ident, ok := expr.(*ast.Ident) return expr != nil && (!ok || ident.Name != "_") } + +// fileUses reports whether the file containing the specified cursor +// uses at least the specified version of Go (e.g. "go1.24"). +func fileUses(info *types.Info, c cursor.Cursor, version string) bool { + // TODO(adonovan): make Ancestors reflexive so !ok becomes impossible. + if curFile, ok := moreiters.First(c.Ancestors((*ast.File)(nil))); ok { + c = curFile + } + file := c.Node().(*ast.File) + return !versions.Before(info.FileVersions[file], version) +} From d70c04eddcecab264eae45dafa22a9811ff16c0c Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Mon, 24 Mar 2025 14:43:22 -0400 Subject: [PATCH 143/270] internal/refactor/inline: replace extractTxtar Use existing building blocks. Doing so uncovered a bug in a test file: duplicate filenames that were being overwritten. Fix by splitting into two files. Change-Id: I084b5aae00964be611b10c00096d196ab27b6cc9 Reviewed-on: https://go-review.googlesource.com/c/tools/+/660576 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- internal/refactor/inline/inline_test.go | 20 +---- .../inline/testdata/import-shadow-2.txtar | 75 +++++++++++++++++ .../inline/testdata/import-shadow.txtar | 82 +------------------ 3 files changed, 83 insertions(+), 94 deletions(-) create mode 100644 internal/refactor/inline/testdata/import-shadow-2.txtar diff --git a/internal/refactor/inline/inline_test.go b/internal/refactor/inline/inline_test.go index 3be37d5ecde..7f6c95e15f5 100644 --- a/internal/refactor/inline/inline_test.go +++ b/internal/refactor/inline/inline_test.go @@ -29,6 +29,7 @@ import ( "golang.org/x/tools/internal/expect" "golang.org/x/tools/internal/refactor/inline" "golang.org/x/tools/internal/testenv" + "golang.org/x/tools/internal/testfiles" "golang.org/x/tools/txtar" ) @@ -56,10 +57,11 @@ func TestData(t *testing.T) { if err != nil { t.Fatal(err) } - dir := t.TempDir() - if err := extractTxtar(ar, dir); err != nil { + fs, err := txtar.FS(ar) + if err != nil { t.Fatal(err) } + dir := testfiles.CopyToTmp(t, fs) // Load packages. cfg := &packages.Config{ @@ -1941,20 +1943,6 @@ func checkTranscode(callee *inline.Callee) error { return nil } -// TODO(adonovan): publish this a helper (#61386). -func extractTxtar(ar *txtar.Archive, dir string) error { - for _, file := range ar.Files { - name := filepath.Join(dir, file.Name) - if err := os.MkdirAll(filepath.Dir(name), 0777); err != nil { - return err - } - if err := os.WriteFile(name, file.Data, 0666); err != nil { - return err - } - } - return nil -} - // deepHash computes a cryptographic hash of an ast.Node so that // if the data structure is mutated, the hash changes. // It assumes Go variables do not change address. diff --git a/internal/refactor/inline/testdata/import-shadow-2.txtar b/internal/refactor/inline/testdata/import-shadow-2.txtar new file mode 100644 index 00000000000..14cd045c6c3 --- /dev/null +++ b/internal/refactor/inline/testdata/import-shadow-2.txtar @@ -0,0 +1,75 @@ +See import-shadow.txtar for a description. + +-- go.mod -- +module testdata +go 1.12 + +-- a/a.go -- +package a + +import "testdata/b" + +var x b.T + +func A(b int) { + x.F() //@ inline(re"F", fresult) +} + +-- b/b.go -- +package b + +type T struct{} + +func (T) F() { + One() + Two() +} + +func One() {} +func Two() {} + +-- fresult -- +package a + +import ( + "testdata/b" + b0 "testdata/b" +) + +var x b.T + +func A(b int) { + b0.One() + b0.Two() //@ inline(re"F", fresult) +} + +-- d/d.go -- +package d + +import "testdata/e" + +func D() { + const log = "shadow" + e.E() //@ inline(re"E", eresult) +} + +-- e/e.go -- +package e + +import "log" + +func E() { + log.Printf("") +} + +-- eresult -- +package d + +import ( + log0 "log" +) + +func D() { + const log = "shadow" + log0.Printf("") //@ inline(re"E", eresult) +} diff --git a/internal/refactor/inline/testdata/import-shadow.txtar b/internal/refactor/inline/testdata/import-shadow.txtar index 9d1abdb9e95..a1078e2495b 100644 --- a/internal/refactor/inline/testdata/import-shadow.txtar +++ b/internal/refactor/inline/testdata/import-shadow.txtar @@ -2,10 +2,10 @@ Just because a package (e.g. log) is imported by the caller, and the name log is in scope, doesn't mean the name in scope refers to the package: it could be locally shadowed. -In all three scenarios below, renaming import with a fresh name is -added because the usual name is locally shadowed: in cases 1, 2 an -existing import is shadowed by (respectively) a local constant, -parameter; in case 3 there is no existing import. +In all three scenarios in this file and import-shadow-2.txtar, a renaming +import with a fresh name is added because the usual name is locally +shadowed: in cases 1, 2 an existing import is shadowed by (respectively) +a local constant, parameter; in case 3 there is no existing import. -- go.mod -- module testdata @@ -47,77 +47,3 @@ func A() { } var _ log.Logger - --- go.mod -- -module testdata -go 1.12 - --- a/a.go -- -package a - -import "testdata/b" - -var x b.T - -func A(b int) { - x.F() //@ inline(re"F", fresult) -} - --- b/b.go -- -package b - -type T struct{} - -func (T) F() { - One() - Two() -} - -func One() {} -func Two() {} - --- fresult -- -package a - -import ( - "testdata/b" - b0 "testdata/b" -) - -var x b.T - -func A(b int) { - b0.One() - b0.Two() //@ inline(re"F", fresult) -} - --- d/d.go -- -package d - -import "testdata/e" - -func D() { - const log = "shadow" - e.E() //@ inline(re"E", eresult) -} - --- e/e.go -- -package e - -import "log" - -func E() { - log.Printf("") -} - --- eresult -- -package d - -import ( - log0 "log" -) - -func D() { - const log = "shadow" - log0.Printf("") //@ inline(re"E", eresult) -} From c1b6839e804981926f118cb500a8faf52670fac8 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Tue, 25 Mar 2025 17:02:13 -0400 Subject: [PATCH 144/270] internal/astutil/cursor: Ancestors -> Enclosing (+ reflexive) This CL makes Ancestors reflexive, which is the behaviour nearly always wanted by clients. (The former irreflexive behavior is easily requested by adding a call to Cursor.Parent.) Also, rename it to Enclosing, which is at least ambiguous as to its reflexivity, unlike Ancestors. Change-Id: I83dc92c58939058e9a30f8f54727667014fcf3c2 Reviewed-on: https://go-review.googlesource.com/c/tools/+/660775 LUCI-TryBot-Result: Go LUCI Reviewed-by: Jonathan Amsterdam Auto-Submit: Alan Donovan --- .../analysis/fillreturns/fillreturns.go | 16 +++++----------- gopls/internal/analysis/gofix/gofix.go | 2 +- gopls/internal/analysis/maprange/maprange.go | 6 ++---- gopls/internal/analysis/modernize/bloop.go | 6 ++---- .../internal/analysis/modernize/modernize.go | 5 +---- .../internal/analysis/nonewvars/nonewvars.go | 9 +++------ .../analysis/noresultvalues/noresultvalues.go | 2 +- gopls/internal/golang/codeaction.go | 2 +- .../golang/stubmethods/stubmethods.go | 2 +- internal/astutil/cursor/cursor.go | 19 +++++++++---------- internal/astutil/cursor/cursor_test.go | 11 +++++------ internal/typesinternal/typeindex/typeindex.go | 2 +- 12 files changed, 32 insertions(+), 50 deletions(-) diff --git a/gopls/internal/analysis/fillreturns/fillreturns.go b/gopls/internal/analysis/fillreturns/fillreturns.go index 184aac5ea1f..a90105f6f56 100644 --- a/gopls/internal/analysis/fillreturns/fillreturns.go +++ b/gopls/internal/analysis/fillreturns/fillreturns.go @@ -55,12 +55,9 @@ outer: } // Find cursor for enclosing return statement (which may be curErr itself). - curRet := curErr - if _, ok := curRet.Node().(*ast.ReturnStmt); !ok { - curRet, ok = moreiters.First(curErr.Ancestors((*ast.ReturnStmt)(nil))) - if !ok { - continue // no enclosing return - } + curRet, ok := moreiters.First(curErr.Enclosing((*ast.ReturnStmt)(nil))) + if !ok { + continue // no enclosing return } ret := curRet.Node().(*ast.ReturnStmt) @@ -114,7 +111,7 @@ outer: retTyps = append(retTyps, retTyp) } - curFile, _ := moreiters.First(curRet.Ancestors((*ast.File)(nil))) + curFile, _ := moreiters.First(curRet.Enclosing((*ast.File)(nil))) file := curFile.Node().(*ast.File) matches := analysisinternal.MatchingIdents(retTyps, file, ret.Pos(), info, pass.Pkg) qual := typesinternal.FileQualifier(file, pass.Pkg) @@ -230,8 +227,5 @@ func fixesError(err types.Error) bool { // enclosingFunc returns the cursor for the innermost Func{Decl,Lit} // that encloses c, if any. func enclosingFunc(c cursor.Cursor) (cursor.Cursor, bool) { - for curAncestor := range c.Ancestors((*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)) { - return curAncestor, true - } - return cursor.Cursor{}, false + return moreiters.First(c.Enclosing((*ast.FuncDecl)(nil), (*ast.FuncLit)(nil))) } diff --git a/gopls/internal/analysis/gofix/gofix.go b/gopls/internal/analysis/gofix/gofix.go index bff4120a39a..6f4c8a6e2fd 100644 --- a/gopls/internal/analysis/gofix/gofix.go +++ b/gopls/internal/analysis/gofix/gofix.go @@ -592,7 +592,7 @@ func (a *analyzer) readFile(node ast.Node) ([]byte, error) { // currentFile returns the unique ast.File for a cursor. func currentFile(c cursor.Cursor) *ast.File { - cf, _ := moreiters.First(c.Ancestors((*ast.File)(nil))) + cf, _ := moreiters.First(c.Enclosing((*ast.File)(nil))) return cf.Node().(*ast.File) } diff --git a/gopls/internal/analysis/maprange/maprange.go b/gopls/internal/analysis/maprange/maprange.go index 4bd7066b8cb..eed04b14e72 100644 --- a/gopls/internal/analysis/maprange/maprange.go +++ b/gopls/internal/analysis/maprange/maprange.go @@ -9,6 +9,7 @@ import ( "fmt" "go/ast" "go/types" + "golang.org/x/tools/go/analysis" "golang.org/x/tools/gopls/internal/util/moreiters" "golang.org/x/tools/internal/analysisinternal" @@ -152,10 +153,7 @@ func isSet(expr ast.Expr) bool { // fileUses reports whether the file containing the specified cursor // uses at least the specified version of Go (e.g. "go1.24"). func fileUses(info *types.Info, c cursor.Cursor, version string) bool { - // TODO(adonovan): make Ancestors reflexive so !ok becomes impossible. - if curFile, ok := moreiters.First(c.Ancestors((*ast.File)(nil))); ok { - c = curFile - } + c, _ = moreiters.First(c.Enclosing((*ast.File)(nil))) file := c.Node().(*ast.File) return !versions.Before(info.FileVersions[file], version) } diff --git a/gopls/internal/analysis/modernize/bloop.go b/gopls/internal/analysis/modernize/bloop.go index 1fc07169486..5bfb0b7d8e8 100644 --- a/gopls/internal/analysis/modernize/bloop.go +++ b/gopls/internal/analysis/modernize/bloop.go @@ -14,6 +14,7 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/gopls/internal/util/moreiters" "golang.org/x/tools/internal/analysisinternal" typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" "golang.org/x/tools/internal/astutil/cursor" @@ -169,8 +170,5 @@ func uses(index *typeindex.Index, cur cursor.Cursor, obj types.Object) bool { // enclosingFunc returns the cursor for the innermost Func{Decl,Lit} // that encloses c, if any. func enclosingFunc(c cursor.Cursor) (cursor.Cursor, bool) { - for curAncestor := range c.Ancestors((*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)) { - return curAncestor, true - } - return cursor.Cursor{}, false + return moreiters.First(c.Enclosing((*ast.FuncDecl)(nil), (*ast.FuncLit)(nil))) } diff --git a/gopls/internal/analysis/modernize/modernize.go b/gopls/internal/analysis/modernize/modernize.go index ac4a5c28e5f..ebf83ab1bc3 100644 --- a/gopls/internal/analysis/modernize/modernize.go +++ b/gopls/internal/analysis/modernize/modernize.go @@ -148,10 +148,7 @@ func fileUses(info *types.Info, file *ast.File, version string) bool { // enclosingFile returns the syntax tree for the file enclosing c. func enclosingFile(c cursor.Cursor) *ast.File { - // TODO(adonovan): make Ancestors reflexive so !ok becomes impossible. - if curFile, ok := moreiters.First(c.Ancestors((*ast.File)(nil))); ok { - c = curFile - } + c, _ = moreiters.First(c.Enclosing((*ast.File)(nil))) return c.Node().(*ast.File) } diff --git a/gopls/internal/analysis/nonewvars/nonewvars.go b/gopls/internal/analysis/nonewvars/nonewvars.go index b7f861ba7f1..eeae7211c97 100644 --- a/gopls/internal/analysis/nonewvars/nonewvars.go +++ b/gopls/internal/analysis/nonewvars/nonewvars.go @@ -49,14 +49,11 @@ func run(pass *analysis.Pass) (any, error) { } // Find enclosing assignment (which may be curErr itself). - assign, ok := curErr.Node().(*ast.AssignStmt) + curAssign, ok := moreiters.First(curErr.Enclosing((*ast.AssignStmt)(nil))) if !ok { - cur, ok := moreiters.First(curErr.Ancestors((*ast.AssignStmt)(nil))) - if !ok { - continue // no enclosing assignment - } - assign = cur.Node().(*ast.AssignStmt) + continue // no enclosing assignment } + assign := curAssign.Node().(*ast.AssignStmt) if assign.Tok != token.DEFINE { continue // not a := statement } diff --git a/gopls/internal/analysis/noresultvalues/noresultvalues.go b/gopls/internal/analysis/noresultvalues/noresultvalues.go index 6b8f9d895e4..848f6532ce0 100644 --- a/gopls/internal/analysis/noresultvalues/noresultvalues.go +++ b/gopls/internal/analysis/noresultvalues/noresultvalues.go @@ -48,7 +48,7 @@ func run(pass *analysis.Pass) (any, error) { continue // can't find errant node } // Find first enclosing return statement, if any. - if curRet, ok := moreiters.First(curErr.Ancestors((*ast.ReturnStmt)(nil))); ok { + if curRet, ok := moreiters.First(curErr.Enclosing((*ast.ReturnStmt)(nil))); ok { ret := curRet.Node() pass.Report(analysis.Diagnostic{ Pos: start, diff --git a/gopls/internal/golang/codeaction.go b/gopls/internal/golang/codeaction.go index a5591edf1f9..d9f2af47d24 100644 --- a/gopls/internal/golang/codeaction.go +++ b/gopls/internal/golang/codeaction.go @@ -956,7 +956,7 @@ func goAssembly(ctx context.Context, req *codeActionsRequest) error { sym.WriteString(".") curSel, _ := req.pgf.Cursor.FindPos(req.start, req.end) - for cur := range curSel.Ancestors((*ast.FuncDecl)(nil), (*ast.ValueSpec)(nil)) { + for cur := range curSel.Enclosing((*ast.FuncDecl)(nil), (*ast.ValueSpec)(nil)) { var name string // in command title switch node := cur.Node().(type) { case *ast.FuncDecl: diff --git a/gopls/internal/golang/stubmethods/stubmethods.go b/gopls/internal/golang/stubmethods/stubmethods.go index a060993b1ab..43842264d70 100644 --- a/gopls/internal/golang/stubmethods/stubmethods.go +++ b/gopls/internal/golang/stubmethods/stubmethods.go @@ -54,7 +54,7 @@ type IfaceStubInfo struct { func GetIfaceStubInfo(fset *token.FileSet, info *types.Info, pgf *parsego.File, pos, end token.Pos) *IfaceStubInfo { // TODO(adonovan): simplify, using Cursor: // curErr, _ := pgf.Cursor.FindPos(pos, end) - // for cur := range curErr.Ancestors() { + // for cur := range curErr.Enclosing() { // switch n := cur.Node().(type) {... path, _ := astutil.PathEnclosingInterval(pgf.File, pos, end) for _, n := range path { diff --git a/internal/astutil/cursor/cursor.go b/internal/astutil/cursor/cursor.go index 9139d4e516c..3f015998c52 100644 --- a/internal/astutil/cursor/cursor.go +++ b/internal/astutil/cursor/cursor.go @@ -198,30 +198,29 @@ func (c Cursor) Stack(stack []Cursor) []Cursor { panic("Cursor.Stack called on Root node") } - stack = append(stack, c) - stack = slices.AppendSeq(stack, c.Ancestors()) + stack = slices.AppendSeq(stack, c.Enclosing()) slices.Reverse(stack) return stack } -// Ancestors returns an iterator over the ancestors of the current -// node, starting with [Cursor.Parent]. +// Enclosing returns an iterator over the nodes enclosing the current +// current node, starting with the Cursor itself. // -// Ancestors must not be called on the Root node (whose [Cursor.Node] returns nil). +// Enclosing must not be called on the Root node (whose [Cursor.Node] returns nil). // // The types argument, if non-empty, enables type-based filtering of -// events: the sequence includes only ancestors whose type matches an -// element of the types slice. -func (c Cursor) Ancestors(types ...ast.Node) iter.Seq[Cursor] { +// events: the sequence includes only enclosing nodes whose type +// matches an element of the types slice. +func (c Cursor) Enclosing(types ...ast.Node) iter.Seq[Cursor] { if c.index < 0 { - panic("Cursor.Ancestors called on Root node") + panic("Cursor.Enclosing called on Root node") } mask := maskOf(types) return func(yield func(Cursor) bool) { events := c.events() - for i := events[c.index].parent; i >= 0; i = events[i].parent { + for i := c.index; i >= 0; i = events[i].parent { if events[i].typ&mask != 0 && !yield(Cursor{c.in, i}) { break } diff --git a/internal/astutil/cursor/cursor_test.go b/internal/astutil/cursor/cursor_test.go index 380414df790..9effae912a3 100644 --- a/internal/astutil/cursor/cursor_test.go +++ b/internal/astutil/cursor/cursor_test.go @@ -162,11 +162,10 @@ func g() { t.Errorf("curCall.Stack() = %q, want %q", got, want) } - // Ancestors = Reverse(Stack[:last]). - stack = stack[:len(stack)-1] + // Enclosing = Reverse(Stack()). slices.Reverse(stack) - if got, want := slices.Collect(curCall.Ancestors()), stack; !reflect.DeepEqual(got, want) { - t.Errorf("Ancestors = %v, Reverse(Stack - last element) = %v", got, want) + if got, want := slices.Collect(curCall.Enclosing()), stack; !reflect.DeepEqual(got, want) { + t.Errorf("Enclosing = %v, Reverse(Stack - last element) = %v", got, want) } } @@ -542,12 +541,12 @@ func BenchmarkInspectCalls(b *testing.B) { } }) - b.Run("CursorAncestors", func(b *testing.B) { + b.Run("CursorEnclosing", func(b *testing.B) { var ncalls int for range b.N { for cur := range cursor.Root(inspect).Preorder(callExprs...) { _ = cur.Node().(*ast.CallExpr) - for range cur.Ancestors() { + for range cur.Enclosing() { } ncalls++ } diff --git a/internal/typesinternal/typeindex/typeindex.go b/internal/typesinternal/typeindex/typeindex.go index a6cc6956892..34087a98fbf 100644 --- a/internal/typesinternal/typeindex/typeindex.go +++ b/internal/typesinternal/typeindex/typeindex.go @@ -188,7 +188,7 @@ func (ix *Index) Calls(callee types.Object) iter.Seq[cursor.Cursor] { // CallExpr ancestor: we have to make sure the // ident is in the CallExpr.Fun position, otherwise // f(f, f) would have two spurious matches. - // Avoiding Ancestors is also significantly faster. + // Avoiding Enclosing is also significantly faster. // inverse unparen: f -> (f) for ek == edge.ParenExpr_X { From b3ce3e13267b25b8adf8d9c7d39cd549b3674012 Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Sat, 22 Mar 2025 00:52:46 -0600 Subject: [PATCH 145/270] gopls/completion: use high score for package name main when current package is main This CL gives main package a high score when it finds the current package name is main to ensure new files under main package will have main package as best suggestion. Besides, this CL sorts the package candidates based on their scores in descending order, which was missed before, as the other functions do. Fixes golang/go#72993 Change-Id: Ib09199b915ad8731c7ee359dddf6e37280df39b3 Reviewed-on: https://go-review.googlesource.com/c/tools/+/659995 Reviewed-by: Peter Weinberger Commit-Queue: Alan Donovan Reviewed-by: Alan Donovan Reviewed-by: Dmitri Shuralyov Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI --- .../internal/golang/completion/completion.go | 18 +++++++------- gopls/internal/golang/completion/package.go | 24 +++++++++++++++---- gopls/internal/server/completion.go | 2 ++ .../integration/completion/completion_test.go | 11 +++++++++ 4 files changed, 41 insertions(+), 14 deletions(-) diff --git a/gopls/internal/golang/completion/completion.go b/gopls/internal/golang/completion/completion.go index 600219370b9..a3270f97909 100644 --- a/gopls/internal/golang/completion/completion.go +++ b/gopls/internal/golang/completion/completion.go @@ -164,14 +164,14 @@ func (i *CompletionItem) addConversion(c *completer, conv conversionEdits) error // Scoring constants are used for weighting the relevance of different candidates. const ( + // lowScore indicates an irrelevant or not useful completion item. + lowScore float64 = 0.01 + // stdScore is the base score for all completion items. stdScore float64 = 1.0 // highScore indicates a very relevant completion item. highScore float64 = 10.0 - - // lowScore indicates an irrelevant or not useful completion item. - lowScore float64 = 0.01 ) // matcher matches a candidate's label against the user input. The @@ -702,7 +702,7 @@ func Completion(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, p // depend on other candidates having already been collected. c.addStatementCandidates() - c.sortItems() + sortItems(c.items) return c.items, c.getSurrounding(), nil } @@ -830,16 +830,16 @@ func (c *completer) scanToken(contents []byte) (token.Pos, token.Token, string) } } -func (c *completer) sortItems() { - sort.SliceStable(c.items, func(i, j int) bool { +func sortItems(items []CompletionItem) { + sort.SliceStable(items, func(i, j int) bool { // Sort by score first. - if c.items[i].Score != c.items[j].Score { - return c.items[i].Score > c.items[j].Score + if items[i].Score != items[j].Score { + return items[i].Score > items[j].Score } // Then sort by label so order stays consistent. This also has the // effect of preferring shorter candidates. - return c.items[i].Label < c.items[j].Label + return items[i].Label < items[j].Label }) } diff --git a/gopls/internal/golang/completion/package.go b/gopls/internal/golang/completion/package.go index 5fd6c04144d..01d5622c7f7 100644 --- a/gopls/internal/golang/completion/package.go +++ b/gopls/internal/golang/completion/package.go @@ -62,7 +62,7 @@ func packageClauseCompletions(ctx context.Context, snapshot *cache.Snapshot, fh Score: pkg.score, }) } - + sortItems(items) return items, surrounding, nil } @@ -197,11 +197,20 @@ func packageSuggestions(ctx context.Context, snapshot *cache.Snapshot, fileURI p } matcher := fuzzy.NewMatcher(prefix) + var currentPackageName string + if variants, err := snapshot.MetadataForFile(ctx, fileURI); err == nil && + len(variants) != 0 { + currentPackageName = string(variants[0].Name) + } // Always try to suggest a main package defer func() { + mainScore := lowScore + if currentPackageName == "main" { + mainScore = highScore + } if score := float64(matcher.Score("main")); score > 0 { - packages = append(packages, toCandidate("main", score*lowScore)) + packages = append(packages, toCandidate("main", score*mainScore)) } }() @@ -254,15 +263,20 @@ func packageSuggestions(ctx context.Context, snapshot *cache.Snapshot, fileURI p seenPkgs[testPkgName] = struct{}{} } - // Add current directory name as a low relevance suggestion. if _, ok := seenPkgs[pkgName]; !ok { + // Add current directory name as a low relevance suggestion. + dirNameScore := lowScore + // if current package name is empty, the dir name is the best choice. + if currentPackageName == "" { + dirNameScore = highScore + } if score := float64(matcher.Score(string(pkgName))); score > 0 { - packages = append(packages, toCandidate(string(pkgName), score*lowScore)) + packages = append(packages, toCandidate(string(pkgName), score*dirNameScore)) } testPkgName := pkgName + "_test" if score := float64(matcher.Score(string(testPkgName))); score > 0 { - packages = append(packages, toCandidate(string(testPkgName), score*lowScore)) + packages = append(packages, toCandidate(string(testPkgName), score*dirNameScore)) } } diff --git a/gopls/internal/server/completion.go b/gopls/internal/server/completion.go index 6c185e93717..e72d156de05 100644 --- a/gopls/internal/server/completion.go +++ b/gopls/internal/server/completion.go @@ -102,6 +102,8 @@ func (s *server) saveLastCompletion(uri protocol.DocumentURI, version int32, ite s.efficacyItems = items } +// toProtocolCompletionItems converts the candidates to the protocol completion items, +// the candidates must be sorted based on score as it will be respected by client side. func toProtocolCompletionItems(candidates []completion.CompletionItem, surrounding *completion.Selection, options *settings.Options) ([]protocol.CompletionItem, error) { replaceRng, err := surrounding.Range() if err != nil { diff --git a/gopls/internal/test/integration/completion/completion_test.go b/gopls/internal/test/integration/completion/completion_test.go index 0713b1f62b9..8fa03908c01 100644 --- a/gopls/internal/test/integration/completion/completion_test.go +++ b/gopls/internal/test/integration/completion/completion_test.go @@ -53,6 +53,10 @@ func TestPackageCompletion(t *testing.T) { module mod.com go 1.12 +-- cmd/main.go -- +package main +-- cmd/testfile.go -- +package -- fruits/apple.go -- package apple @@ -95,6 +99,13 @@ package want []string editRegexp string }{ + { + name: "main package completion after package keyword", + filename: "cmd/testfile.go", + triggerRegexp: "package()", + want: []string{"package main", "package cmd", "package cmd_test"}, + editRegexp: "package", + }, { name: "package completion at valid position", filename: "fruits/testfile.go", From 6a913554a79edb44963cc65b675f01ad4ed911a7 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Wed, 26 Mar 2025 08:21:39 -0400 Subject: [PATCH 146/270] internal/refactor/inline: factor out import map construction Pull out some code from inlineCall to simplify it. Change-Id: I4f354493648c78c26bec93935f014e78ef48694c Reviewed-on: https://go-review.googlesource.com/c/tools/+/660955 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- internal/refactor/inline/inline.go | 98 ++++++++++++++++-------------- 1 file changed, 53 insertions(+), 45 deletions(-) diff --git a/internal/refactor/inline/inline.go b/internal/refactor/inline/inline.go index 2b6f06242e7..749d8cb7a4f 100644 --- a/internal/refactor/inline/inline.go +++ b/internal/refactor/inline/inline.go @@ -586,52 +586,10 @@ func (st *state) inlineCall() (*inlineCallResult, error) { assign1 = func(v *types.Var) bool { return !updatedLocals[v] } } - // import map, initially populated with caller imports, and updated below + // Build a map, initially populated with caller imports, and updated below // with new imports necessary to reference free symbols in the callee. - // - // For simplicity we ignore existing dot imports, so that a qualified - // identifier (QI) in the callee is always represented by a QI in the caller, - // allowing us to treat a QI like a selection on a package name. - importMap := make(map[string][]string) // maps package path to local name(s) - var oldImports []oldImport // imports referenced only by caller.Call.Fun - - for _, imp := range caller.File.Imports { - if pkgName, ok := importedPkgName(caller.Info, imp); ok && - pkgName.Name() != "." && - pkgName.Name() != "_" { - - // If the import's sole use is in caller.Call.Fun of the form p.F(...), - // where p.F is a qualified identifier, the p import may not be - // necessary. - // - // Only the qualified identifier case matters, as other references to - // imported package names in the Call.Fun expression (e.g. - // x.after(3*time.Second).f() or time.Second.String()) will remain after - // inlining, as arguments. - // - // If that is the case, proactively check if any of the callee FreeObjs - // need this import. Doing so eagerly simplifies the resulting logic. - needed := true - sel, ok := ast.Unparen(caller.Call.Fun).(*ast.SelectorExpr) - if ok && soleUse(caller.Info, pkgName) == sel.X { - needed = false // no longer needed by caller - // Check to see if any of the inlined free objects need this package. - for _, obj := range callee.FreeObjs { - if obj.PkgPath == pkgName.Imported().Path() && obj.Shadow[pkgName.Name()] == 0 { - needed = true // needed by callee - break - } - } - } - - if needed { - path := pkgName.Imported().Path() - importMap[path] = append(importMap[path], pkgName.Name()) - } else { - oldImports = append(oldImports, oldImport{pkgName: pkgName, spec: imp}) - } - } - } + // oldImports are caller imports that won't be needed after inlining. + importMap, oldImports := callerImportMap(caller, callee) // importName finds an existing import name to use in a particular shadowing // context. It is used to determine the set of new imports in @@ -1390,6 +1348,56 @@ func (st *state) inlineCall() (*inlineCallResult, error) { return res, nil } +// callerImportMap returns a map from package paths in the caller's file to local names. +// The map excludes imports not needed by the caller or callee after inlining; the second +// return value holds these. +func callerImportMap(caller *Caller, callee *gobCallee) (map[string][]string, []oldImport) { + // For simplicity we ignore existing dot imports, so that a qualified + // identifier (QI) in the callee is always represented by a QI in the caller, + // allowing us to treat a QI like a selection on a package name. + importMap := make(map[string][]string) // maps package path to local name(s) + var oldImports []oldImport // imports referenced only by caller.Call.Fun + + for _, imp := range caller.File.Imports { + if pkgName, ok := importedPkgName(caller.Info, imp); ok && + pkgName.Name() != "." && + pkgName.Name() != "_" { + + // If the import's sole use is in caller.Call.Fun of the form p.F(...), + // where p.F is a qualified identifier, the p import may not be + // necessary. + // + // Only the qualified identifier case matters, as other references to + // imported package names in the Call.Fun expression (e.g. + // x.after(3*time.Second).f() or time.Second.String()) will remain after + // inlining, as arguments. + // + // If that is the case, proactively check if any of the callee FreeObjs + // need this import. Doing so eagerly simplifies the resulting logic. + needed := true + sel, ok := ast.Unparen(caller.Call.Fun).(*ast.SelectorExpr) + if ok && soleUse(caller.Info, pkgName) == sel.X { + needed = false // no longer needed by caller + // Check to see if any of the inlined free objects need this package. + for _, obj := range callee.FreeObjs { + if obj.PkgPath == pkgName.Imported().Path() && obj.Shadow[pkgName.Name()] == 0 { + needed = true // needed by callee + break + } + } + } + + if needed { + path := pkgName.Imported().Path() + importMap[path] = append(importMap[path], pkgName.Name()) + } else { + oldImports = append(oldImports, oldImport{pkgName: pkgName, spec: imp}) + } + } + } + return importMap, oldImports +} + type argument struct { expr ast.Expr typ types.Type // may be tuple for sole non-receiver arg in spread call From 2d8ef138e247bcb82c39faca236398bb434f95ba Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Wed, 26 Mar 2025 07:44:39 -0400 Subject: [PATCH 147/270] internal/refactor/inline: document test file format Change-Id: Ie4b4a9548b66f83690e795755eefcce726150a1b Reviewed-on: https://go-review.googlesource.com/c/tools/+/660895 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- internal/refactor/inline/inline_test.go | 25 ++++++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/internal/refactor/inline/inline_test.go b/internal/refactor/inline/inline_test.go index 7f6c95e15f5..611541c9677 100644 --- a/internal/refactor/inline/inline_test.go +++ b/internal/refactor/inline/inline_test.go @@ -34,6 +34,28 @@ import ( ) // TestData executes test scenarios specified by files in testdata/*.txtar. +// Each txtar file describes two sets of files, some containing Go source +// and others expected results. +// +// The Go source files and go.mod are parsed and type-checked as a Go module. +// Some of these files contain marker comments (in a form described below) describing +// the inlinings to perform and whether they should succeed or fail. A marker +// indicating success refers to another file in the txtar, not a .go +// file, that should contain the contents of the first file after inlining. +// +// The marker format for success is +// +// @inline(re"pat", wantfile) +// +// The first call in the marker's line that matches pat is inlined, and the contents +// of the resulting file must match the contents of wantfile. +// +// The marker format for failure is +// +// @inline(re"pat", re"errpat") +// +// The first argument selects the call for inlining as before, and the second +// is a regular expression that must match the text of resulting error. func TestData(t *testing.T) { testenv.NeedsGoPackages(t) @@ -120,8 +142,9 @@ func TestData(t *testing.T) { var want any switch x := note.Args[1].(type) { case string, expect.Identifier: + name := fmt.Sprint(x) for _, file := range ar.Files { - if file.Name == fmt.Sprint(x) { + if file.Name == name { want = file.Data break } From fbb70473486945bd125dd973858578e184619dc1 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Wed, 26 Mar 2025 12:41:46 -0400 Subject: [PATCH 148/270] internal/refactor/inline: extract import handling from inlineCall Change-Id: I4102e9a11aca35daf52fbaa343d30bde50dd9fb1 Reviewed-on: https://go-review.googlesource.com/c/tools/+/660957 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- internal/refactor/inline/inline.go | 308 +++++++++++++++-------------- 1 file changed, 157 insertions(+), 151 deletions(-) diff --git a/internal/refactor/inline/inline.go b/internal/refactor/inline/inline.go index 749d8cb7a4f..d89a62972c6 100644 --- a/internal/refactor/inline/inline.go +++ b/internal/refactor/inline/inline.go @@ -470,6 +470,156 @@ type newImport struct { spec *ast.ImportSpec } +// importState tracks information about imports. +type importState struct { + logf func(string, ...any) + caller *Caller + importMap map[string][]string // from package paths in the caller's file to local names + newImports []newImport // for references to free names in callee; to be added to the file + oldImports []oldImport // referenced only by caller.Call.Fun; to be removed from the file +} + +// newImportState returns an importState with initial information about the caller's imports. +func newImportState(logf func(string, ...any), caller *Caller, callee *gobCallee) *importState { + // For simplicity we ignore existing dot imports, so that a qualified + // identifier (QI) in the callee is always represented by a QI in the caller, + // allowing us to treat a QI like a selection on a package name. + is := &importState{ + logf: logf, + caller: caller, + importMap: make(map[string][]string), + } + + for _, imp := range caller.File.Imports { + if pkgName, ok := importedPkgName(caller.Info, imp); ok && + pkgName.Name() != "." && + pkgName.Name() != "_" { + + // If the import's sole use is in caller.Call.Fun of the form p.F(...), + // where p.F is a qualified identifier, the p import may not be + // necessary. + // + // Only the qualified identifier case matters, as other references to + // imported package names in the Call.Fun expression (e.g. + // x.after(3*time.Second).f() or time.Second.String()) will remain after + // inlining, as arguments. + // + // If that is the case, proactively check if any of the callee FreeObjs + // need this import. Doing so eagerly simplifies the resulting logic. + needed := true + sel, ok := ast.Unparen(caller.Call.Fun).(*ast.SelectorExpr) + if ok && soleUse(caller.Info, pkgName) == sel.X { + needed = false // no longer needed by caller + // Check to see if any of the inlined free objects need this package. + for _, obj := range callee.FreeObjs { + if obj.PkgPath == pkgName.Imported().Path() && obj.Shadow[pkgName.Name()] == 0 { + needed = true // needed by callee + break + } + } + } + + // Exclude imports not needed by the caller or callee after inlining; the second + // return value holds these. + if needed { + path := pkgName.Imported().Path() + is.importMap[path] = append(is.importMap[path], pkgName.Name()) + } else { + is.oldImports = append(is.oldImports, oldImport{pkgName: pkgName, spec: imp}) + } + } + } + return is +} + +// importName finds an existing import name to use in a particular shadowing +// context. It is used to determine the set of new imports in +// getOrMakeImportName, and is also used for writing out names in inlining +// strategies below. +func (i *importState) importName(pkgPath string, shadow shadowMap) string { + for _, name := range i.importMap[pkgPath] { + // Check that either the import preexisted, or that it was newly added + // (no PkgName) but is not shadowed, either in the callee (shadows) or + // caller (caller.lookup). + if shadow[name] == 0 { + found := i.caller.lookup(name) + if is[*types.PkgName](found) || found == nil { + return name + } + } + } + return "" +} + +// localName returns the local name for a given imported package path, +// adding one if it doesn't exists. +func (i *importState) localName(pkgPath, pkgName string, shadow shadowMap) string { + // Does an import already exist that works in this shadowing context? + if name := i.importName(pkgPath, shadow); name != "" { + return name + } + + newlyAdded := func(name string) bool { + for _, new := range i.newImports { + if new.pkgName == name { + return true + } + } + return false + } + + // shadowedInCaller reports whether a candidate package name + // already refers to a declaration in the caller. + shadowedInCaller := func(name string) bool { + obj := i.caller.lookup(name) + if obj == nil { + return false + } + // If obj will be removed, the name is available. + for _, old := range i.oldImports { + if old.pkgName == obj { + return false + } + } + return true + } + + // import added by callee + // + // Choose local PkgName based on last segment of + // package path plus, if needed, a numeric suffix to + // ensure uniqueness. + // + // "init" is not a legal PkgName. + // + // TODO(rfindley): is it worth preserving local package names for callee + // imports? Are they likely to be better or worse than the name we choose + // here? + base := pkgName + name := base + for n := 0; shadow[name] != 0 || shadowedInCaller(name) || newlyAdded(name) || name == "init"; n++ { + name = fmt.Sprintf("%s%d", base, n) + } + i.logf("adding import %s %q", name, pkgPath) + spec := &ast.ImportSpec{ + Path: &ast.BasicLit{ + Kind: token.STRING, + Value: strconv.Quote(pkgPath), + }, + } + // Use explicit pkgname (out of necessity) when it differs from the declared name, + // or (for good style) when it differs from base(pkgpath). + if name != pkgName || name != pathpkg.Base(pkgPath) { + spec.Name = makeIdent(name) + } + i.newImports = append(i.newImports, newImport{ + pkgName: name, + spec: spec, + }) + i.importMap[pkgPath] = append(i.importMap[pkgPath], name) + return name +} + type inlineCallResult struct { newImports []newImport // to add oldImports []oldImport // to remove @@ -586,102 +736,8 @@ func (st *state) inlineCall() (*inlineCallResult, error) { assign1 = func(v *types.Var) bool { return !updatedLocals[v] } } - // Build a map, initially populated with caller imports, and updated below - // with new imports necessary to reference free symbols in the callee. - // oldImports are caller imports that won't be needed after inlining. - importMap, oldImports := callerImportMap(caller, callee) - - // importName finds an existing import name to use in a particular shadowing - // context. It is used to determine the set of new imports in - // getOrMakeImportName, and is also used for writing out names in inlining - // strategies below. - importName := func(pkgPath string, shadow shadowMap) string { - for _, name := range importMap[pkgPath] { - // Check that either the import preexisted, or that it was newly added - // (no PkgName) but is not shadowed, either in the callee (shadows) or - // caller (caller.lookup). - if shadow[name] == 0 { - found := caller.lookup(name) - if is[*types.PkgName](found) || found == nil { - return name - } - } - } - return "" - } - - // keep track of new imports that are necessary to reference any free names - // in the callee. - var newImports []newImport - - // getOrMakeImportName returns the local name for a given imported package path, - // adding one if it doesn't exists. - getOrMakeImportName := func(pkgPath, pkgName string, shadow shadowMap) string { - // Does an import already exist that works in this shadowing context? - if name := importName(pkgPath, shadow); name != "" { - return name - } - - newlyAdded := func(name string) bool { - for _, new := range newImports { - if new.pkgName == name { - return true - } - } - return false - } - - // shadowedInCaller reports whether a candidate package name - // already refers to a declaration in the caller. - shadowedInCaller := func(name string) bool { - obj := caller.lookup(name) - if obj == nil { - return false - } - // If obj will be removed, the name is available. - for _, old := range oldImports { - if old.pkgName == obj { - return false - } - } - return true - } - - // import added by callee - // - // Choose local PkgName based on last segment of - // package path plus, if needed, a numeric suffix to - // ensure uniqueness. - // - // "init" is not a legal PkgName. - // - // TODO(rfindley): is it worth preserving local package names for callee - // imports? Are they likely to be better or worse than the name we choose - // here? - base := pkgName - name := base - for n := 0; shadow[name] != 0 || shadowedInCaller(name) || newlyAdded(name) || name == "init"; n++ { - name = fmt.Sprintf("%s%d", base, n) - } - logf("adding import %s %q", name, pkgPath) - spec := &ast.ImportSpec{ - Path: &ast.BasicLit{ - Kind: token.STRING, - Value: strconv.Quote(pkgPath), - }, - } - // Use explicit pkgname (out of necessity) when it differs from the declared name, - // or (for good style) when it differs from base(pkgpath). - if name != pkgName || name != pathpkg.Base(pkgPath) { - spec.Name = makeIdent(name) - } - newImports = append(newImports, newImport{ - pkgName: name, - spec: spec, - }) - importMap[pkgPath] = append(importMap[pkgPath], name) - return name - } + // Extract information about the caller's imports. + istate := newImportState(logf, caller, callee) // Compute the renaming of the callee's free identifiers. objRenames := make([]ast.Expr, len(callee.FreeObjs)) // nil => no change @@ -709,7 +765,7 @@ func (st *state) inlineCall() (*inlineCallResult, error) { var newName ast.Expr if obj.Kind == "pkgname" { // Use locally appropriate import, creating as needed. - n := getOrMakeImportName(obj.PkgPath, obj.PkgName, obj.Shadow) + n := istate.localName(obj.PkgPath, obj.PkgName, obj.Shadow) newName = makeIdent(n) // imported package } else if !obj.ValidPos { // Built-in function, type, or value (e.g. nil, zero): @@ -754,7 +810,7 @@ func (st *state) inlineCall() (*inlineCallResult, error) { // Form a qualified identifier, pkg.Name. if qualify { - pkgName := getOrMakeImportName(obj.PkgPath, obj.PkgName, obj.Shadow) + pkgName := istate.localName(obj.PkgPath, obj.PkgName, obj.Shadow) newName = &ast.SelectorExpr{ X: makeIdent(pkgName), Sel: makeIdent(obj.Name), @@ -765,8 +821,8 @@ func (st *state) inlineCall() (*inlineCallResult, error) { } res := &inlineCallResult{ - newImports: newImports, - oldImports: oldImports, + newImports: istate.newImports, + oldImports: istate.oldImports, } // Parse callee function declaration. @@ -1115,7 +1171,7 @@ func (st *state) inlineCall() (*inlineCallResult, error) { (!needBindingDecl || (bindingDecl != nil && len(bindingDecl.names) == 0)) { // Reduces to: { var (bindings); lhs... := rhs... } - if newStmts, ok := st.assignStmts(stmt, results, importName); ok { + if newStmts, ok := st.assignStmts(stmt, results, istate.importName); ok { logf("strategy: reduce assign-context call to { return exprs }") clearPositions(calleeDecl.Body) @@ -1348,56 +1404,6 @@ func (st *state) inlineCall() (*inlineCallResult, error) { return res, nil } -// callerImportMap returns a map from package paths in the caller's file to local names. -// The map excludes imports not needed by the caller or callee after inlining; the second -// return value holds these. -func callerImportMap(caller *Caller, callee *gobCallee) (map[string][]string, []oldImport) { - // For simplicity we ignore existing dot imports, so that a qualified - // identifier (QI) in the callee is always represented by a QI in the caller, - // allowing us to treat a QI like a selection on a package name. - importMap := make(map[string][]string) // maps package path to local name(s) - var oldImports []oldImport // imports referenced only by caller.Call.Fun - - for _, imp := range caller.File.Imports { - if pkgName, ok := importedPkgName(caller.Info, imp); ok && - pkgName.Name() != "." && - pkgName.Name() != "_" { - - // If the import's sole use is in caller.Call.Fun of the form p.F(...), - // where p.F is a qualified identifier, the p import may not be - // necessary. - // - // Only the qualified identifier case matters, as other references to - // imported package names in the Call.Fun expression (e.g. - // x.after(3*time.Second).f() or time.Second.String()) will remain after - // inlining, as arguments. - // - // If that is the case, proactively check if any of the callee FreeObjs - // need this import. Doing so eagerly simplifies the resulting logic. - needed := true - sel, ok := ast.Unparen(caller.Call.Fun).(*ast.SelectorExpr) - if ok && soleUse(caller.Info, pkgName) == sel.X { - needed = false // no longer needed by caller - // Check to see if any of the inlined free objects need this package. - for _, obj := range callee.FreeObjs { - if obj.PkgPath == pkgName.Imported().Path() && obj.Shadow[pkgName.Name()] == 0 { - needed = true // needed by callee - break - } - } - } - - if needed { - path := pkgName.Imported().Path() - importMap[path] = append(importMap[path], pkgName.Name()) - } else { - oldImports = append(oldImports, oldImport{pkgName: pkgName, spec: imp}) - } - } - } - return importMap, oldImports -} - type argument struct { expr ast.Expr typ types.Type // may be tuple for sole non-receiver arg in spread call From 8c42f8aeff807484822b4be1294b82696f8fc17e Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Wed, 26 Mar 2025 22:11:08 -0600 Subject: [PATCH 149/270] gopls/internal/analysis/modernize: use types.RelativeTo to respect current package This CL fixes a bug introduced in CL659155. It leverages types.RelativeTo to determine the package name of a variable, ensuring that the package name is not added for types declared within the same package. This prevents invalid type errors. Additionally, this CL introduces additional test cases. Fixes golang/go#73037 Change-Id: I6ad0c80c02ad1a679a956b93a53a05a8eb1ba9c2 Reviewed-on: https://go-review.googlesource.com/c/tools/+/660815 LUCI-TryBot-Result: Go LUCI Reviewed-by: Dmitri Shuralyov Auto-Submit: Alan Donovan Reviewed-by: Alan Donovan --- gopls/internal/analysis/modernize/rangeint.go | 2 +- .../testdata/src/rangeint/rangeint.go | 26 +++++++++++++++++++ .../testdata/src/rangeint/rangeint.go.golden | 26 +++++++++++++++++++ 3 files changed, 53 insertions(+), 1 deletion(-) diff --git a/gopls/internal/analysis/modernize/rangeint.go b/gopls/internal/analysis/modernize/rangeint.go index 4ca87e40aec..1d3f4b5db0c 100644 --- a/gopls/internal/analysis/modernize/rangeint.go +++ b/gopls/internal/analysis/modernize/rangeint.go @@ -184,7 +184,7 @@ func rangeint(pass *analysis.Pass) { // (Unfortunately types.Qualifier doesn't provide the name of the package // member to be qualified, a qualifier cannot perform the necessary shadowing // check for dot-imported names.) - beforeLimit, afterLimit = fmt.Sprintf("%s(", types.TypeString(tVar, (*types.Package).Name)), ")" + beforeLimit, afterLimit = fmt.Sprintf("%s(", types.TypeString(tVar, types.RelativeTo(pass.Pkg))), ")" info2 := &types.Info{Types: make(map[ast.Expr]types.TypeAndValue)} if types.CheckExpr(pass.Fset, pass.Pkg, limit.Pos(), limit, info2) == nil { tLimit := types.Default(info2.TypeOf(limit)) diff --git a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go index 7048fea1148..74f3488546c 100644 --- a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go +++ b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go @@ -205,3 +205,29 @@ func todo() { println(j) } } + +type T uint +type TAlias = uint + +func Fn(a int) T { + return T(a) +} + +func issue73037() { + var q T + for a := T(0); a < q; a++ { // want "for loop can be modernized using range over int" + println(a) + } + for a := Fn(0); a < q; a++ { + println(a) + } + var qa TAlias + for a := TAlias(0); a < qa; a++ { // want "for loop can be modernized using range over int" + println(a) + } + for a := T(0); a < 10; a++ { // want "for loop can be modernized using range over int" + for b := T(0); b < 10; b++ { // want "for loop can be modernized using range over int" + println(a, b) + } + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden index 8c3fdc40b77..a21bd7e8607 100644 --- a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden @@ -205,3 +205,29 @@ func todo() { println(j) } } + +type T uint +type TAlias = uint + +func Fn(a int) T { + return T(a) +} + +func issue73037() { + var q T + for a := range q { // want "for loop can be modernized using range over int" + println(a) + } + for a := Fn(0); a < q; a++ { + println(a) + } + var qa TAlias + for a := range qa { // want "for loop can be modernized using range over int" + println(a) + } + for a := range T(10) { // want "for loop can be modernized using range over int" + for b := range T(10) { // want "for loop can be modernized using range over int" + println(a, b) + } + } +} From 48421ae1421f1b5c6f0ae26a4eeef04e9e4d094d Mon Sep 17 00:00:00 2001 From: acehinnnqru Date: Mon, 24 Mar 2025 01:22:28 +0800 Subject: [PATCH 150/270] gopls/internal/analysis/modernize: preserves comments in mapsloop This CL changes the original deletion to new deletion, - (from the start of mrhs to the end of the loop) to (from the start of the assigment to the end of the loop) - unchanged: (from the start of the loop to the end) to (from the start of the loop to the end) and put all comments between them on top of the new expression. This change preserves all comments between the mapsloop range, causing comments B,C,D to be preserved and put on the top of new exprs. Here shows an example of `maps.Copy` - source: m1 = make(map) m2 = make(map) // A for k, v := range m1 { // B m2[k] = b // C } - fixed: m1 = make(map) m2 = make(map) // A // B // C maps.Copy(m2, m1) Fixes: golang/go#72958 Change-Id: Id751c39151880504683c533f7b38599c6ab6e19e Reviewed-on: https://go-review.googlesource.com/c/tools/+/660255 Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI Reviewed-by: Dmitri Shuralyov Reviewed-by: Alan Donovan --- gopls/internal/analysis/modernize/maps.go | 27 +++++++++++++++--- gopls/internal/analysis/modernize/minmax.go | 17 +++++------ .../testdata/src/mapsloop/mapsloop.go | 14 +++++++++- .../testdata/src/mapsloop/mapsloop.go.golden | 28 +++++++++++++++++++ .../src/mapsloop/mapsloop_dot.go.golden | 2 ++ 5 files changed, 75 insertions(+), 13 deletions(-) diff --git a/gopls/internal/analysis/modernize/maps.go b/gopls/internal/analysis/modernize/maps.go index 5577978278c..1a5e2c3eeee 100644 --- a/gopls/internal/analysis/modernize/maps.go +++ b/gopls/internal/analysis/modernize/maps.go @@ -156,16 +156,35 @@ func mapsloop(pass *analysis.Pass) { start, end token.Pos ) if mrhs != nil { - // Replace RHS of preceding m=... assignment (and loop) with expression. - start, end = mrhs.Pos(), rng.End() - newText = fmt.Appendf(nil, "%s%s(%s)", + // Replace assignment and loop with expression. + // + // m = make(...) + // for k, v := range x { /* comments */ m[k] = v } + // + // -> + // + // /* comments */ + // m = maps.Copy(x) + curPrev, _ := curRange.PrevSibling() + start, end = curPrev.Node().Pos(), rng.End() + newText = fmt.Appendf(nil, "%s%s = %s%s(%s)", + allComments(file, start, end), + analysisinternal.Format(pass.Fset, m), prefix, funcName, analysisinternal.Format(pass.Fset, x)) } else { // Replace loop with call statement. + // + // for k, v := range x { /* comments */ m[k] = v } + // + // -> + // + // /* comments */ + // maps.Copy(m, x) start, end = rng.Pos(), rng.End() - newText = fmt.Appendf(nil, "%s%s(%s, %s)", + newText = fmt.Appendf(nil, "%s%s%s(%s, %s)", + allComments(file, start, end), prefix, funcName, analysisinternal.Format(pass.Fset, m), diff --git a/gopls/internal/analysis/modernize/minmax.go b/gopls/internal/analysis/modernize/minmax.go index a996f9bd56a..415e9fc5661 100644 --- a/gopls/internal/analysis/modernize/minmax.go +++ b/gopls/internal/analysis/modernize/minmax.go @@ -50,14 +50,6 @@ func minmax(pass *analysis.Pass) { sign = isInequality(compare.Op) ) - allComments := func(file *ast.File, start, end token.Pos) string { - var buf strings.Builder - for co := range analysisinternal.Comments(file, start, end) { - _, _ = fmt.Fprintf(&buf, "%s\n", co.Text) - } - return buf.String() - } - if fblock, ok := ifStmt.Else.(*ast.BlockStmt); ok && isAssignBlock(fblock) { fassign := fblock.List[0].(*ast.AssignStmt) @@ -196,6 +188,15 @@ func minmax(pass *analysis.Pass) { } } +// allComments collects all the comments from start to end. +func allComments(file *ast.File, start, end token.Pos) string { + var buf strings.Builder + for co := range analysisinternal.Comments(file, start, end) { + _, _ = fmt.Fprintf(&buf, "%s\n", co.Text) + } + return buf.String() +} + // isInequality reports non-zero if tok is one of < <= => >: // +1 for > and -1 for <. func isInequality(tok token.Token) int { diff --git a/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go index 68ff9154ffd..7d0f7d17e91 100644 --- a/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go +++ b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go @@ -16,6 +16,7 @@ type M map[int]string func useCopy(dst, src map[int]string) { // Replace loop by maps.Copy. for key, value := range src { + // A dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Copy" } } @@ -23,6 +24,7 @@ func useCopy(dst, src map[int]string) { func useCopyGeneric[K comparable, V any, M ~map[K]V](dst, src M) { // Replace loop by maps.Copy. for key, value := range src { + // A dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Copy" } } @@ -32,12 +34,18 @@ func useCopyNotClone(src map[int]string) { // Replace make(...) by maps.Copy. dst := make(map[int]string, len(src)) + // A for key, value := range src { + // B dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Copy" + // C } + // A dst = map[int]string{} + // B for key, value := range src { + // C dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Copy" } println(dst) @@ -126,8 +134,10 @@ func useInsert_assignableToSeq2(dst map[int]string, src func(yield func(int, str func useCollect(src iter.Seq2[int, string]) { // Replace loop and make(...) by maps.Collect. var dst map[int]string - dst = make(map[int]string) + dst = make(map[int]string) // A + // B for key, value := range src { + // C dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Collect" } } @@ -137,7 +147,9 @@ func useInsert_typesDifferAssign(src iter.Seq2[int, string]) { // that is assignable to M. var dst M dst = make(M) + // A for key, value := range src { + // B dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Collect" } } diff --git a/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go.golden b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go.golden index be189673d9a..9136105b908 100644 --- a/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go.golden @@ -15,11 +15,15 @@ type M map[int]string func useCopy(dst, src map[int]string) { // Replace loop by maps.Copy. + // A + // want "Replace m\\[k\\]=v loop with maps.Copy" maps.Copy(dst, src) } func useCopyGeneric[K comparable, V any, M ~map[K]V](dst, src M) { // Replace loop by maps.Copy. + // A + // want "Replace m\\[k\\]=v loop with maps.Copy" maps.Copy(dst, src) } @@ -28,9 +32,17 @@ func useCopyNotClone(src map[int]string) { // Replace make(...) by maps.Copy. dst := make(map[int]string, len(src)) + // A + // B + // want "Replace m\\[k\\]=v loop with maps.Copy" + // C maps.Copy(dst, src) + // A dst = map[int]string{} + // B + // C + // want "Replace m\\[k\\]=v loop with maps.Copy" maps.Copy(dst, src) println(dst) } @@ -40,9 +52,11 @@ func useCopyParen(src map[int]string) { // Replace (make)(...) by maps.Clone. dst := (make)(map[int]string, len(src)) + // want "Replace m\\[k\\]=v loop with maps.Copy" maps.Copy(dst, src) dst = (map[int]string{}) + // want "Replace m\\[k\\]=v loop with maps.Copy" maps.Copy(dst, src) println(dst) } @@ -50,6 +64,7 @@ func useCopyParen(src map[int]string) { func useCopy_typesDiffer(src M) { // Replace loop but not make(...) as maps.Copy(src) would return wrong type M. dst := make(map[int]string, len(src)) + // want "Replace m\\[k\\]=v loop with maps.Copy" maps.Copy(dst, src) println(dst) } @@ -57,6 +72,7 @@ func useCopy_typesDiffer(src M) { func useCopy_typesDiffer2(src map[int]string) { // Replace loop but not make(...) as maps.Copy(src) would return wrong type map[int]string. dst := make(M, len(src)) + // want "Replace m\\[k\\]=v loop with maps.Copy" maps.Copy(dst, src) println(dst) } @@ -68,6 +84,7 @@ func useClone_typesDiffer3(src map[int]string) { // which is assignable to M. var dst M dst = make(M, len(src)) + // want "Replace m\\[k\\]=v loop with maps.Copy" maps.Copy(dst, src) println(dst) } @@ -79,6 +96,7 @@ func useClone_typesDiffer4(src map[int]string) { // which is assignable to M. var dst M dst = make(M, len(src)) + // want "Replace m\\[k\\]=v loop with maps.Copy" maps.Copy(dst, src) println(dst) } @@ -88,6 +106,7 @@ func useClone_generic[Map ~map[K]V, K comparable, V any](src Map) { // Replace loop and make(...) by maps.Clone dst := make(Map, len(src)) + // want "Replace m\\[k\\]=v loop with maps.Copy" maps.Copy(dst, src) println(dst) } @@ -96,12 +115,17 @@ func useClone_generic[Map ~map[K]V, K comparable, V any](src Map) { func useInsert_assignableToSeq2(dst map[int]string, src func(yield func(int, string) bool)) { // Replace loop by maps.Insert because src is assignable to iter.Seq2. + // want "Replace m\\[k\\]=v loop with maps.Insert" maps.Insert(dst, src) } func useCollect(src iter.Seq2[int, string]) { // Replace loop and make(...) by maps.Collect. var dst map[int]string + // A + // B + // C + // want "Replace m\\[k\\]=v loop with maps.Collect" dst = maps.Collect(src) } @@ -109,6 +133,9 @@ func useInsert_typesDifferAssign(src iter.Seq2[int, string]) { // Replace loop and make(...): maps.Collect returns an unnamed map type // that is assignable to M. var dst M + // A + // B + // want "Replace m\\[k\\]=v loop with maps.Collect" dst = maps.Collect(src) } @@ -116,6 +143,7 @@ func useInsert_typesDifferDeclare(src iter.Seq2[int, string]) { // Replace loop but not make(...) as maps.Collect would return an // unnamed map type that would change the type of dst. dst := make(M) + // want "Replace m\\[k\\]=v loop with maps.Insert" maps.Insert(dst, src) } diff --git a/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop_dot.go.golden b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop_dot.go.golden index e992314cf56..6347d56360a 100644 --- a/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop_dot.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop_dot.go.golden @@ -8,6 +8,7 @@ var _ = Clone[M] // force "maps" import so that each diagnostic doesn't add one func useCopyDot(dst, src map[int]string) { // Replace loop by maps.Copy. + // want "Replace m\\[k\\]=v loop with maps.Copy" Copy(dst, src) } @@ -16,6 +17,7 @@ func useCloneDot(src map[int]string) { // Replace make(...) by maps.Copy. dst := make(map[int]string, len(src)) + // want "Replace m\\[k\\]=v loop with maps.Copy" Copy(dst, src) println(dst) } From 07cbcde02556290809fe12e096943d8d751dbaab Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Wed, 26 Mar 2025 17:32:00 -0400 Subject: [PATCH 151/270] gopls/internal/cmd: suppress TestImplementation on go1.23 The types.CheckExpr data race (#71817), fixed at master and backported to go1.24, is not backported to go1.23. The race itself is benign, but it causes this one test to flake. So we suppress it on go1.23. Fixes golang/go#72082 Change-Id: I64731adc50137aefbbecc0b7a47a41036d831eab Reviewed-on: https://go-review.googlesource.com/c/tools/+/661176 Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Griesemer --- gopls/internal/cmd/integration_test.go | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/gopls/internal/cmd/integration_test.go b/gopls/internal/cmd/integration_test.go index e7ac774f5c0..9d135ceadb2 100644 --- a/gopls/internal/cmd/integration_test.go +++ b/gopls/internal/cmd/integration_test.go @@ -508,6 +508,14 @@ func f() { func TestImplementations(t *testing.T) { t.Parallel() + // types.CheckExpr, now used in the rangeint modernizer, had a + // data race (#71817) that was fixed in go1.25 and backported + // to go1.24 but not to go1.23. Although in principle it could + // affect a lot of tests, it (weirdly) only seems to show up + // in this one (#72082). Rather than backport again, we + // suppress this test. + testenv.NeedsGo1Point(t, 24) + tree := writeTree(t, ` -- a.go -- package a From 1b0b68818a49feca5d1a8a204e8e11d75c71e117 Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Thu, 27 Mar 2025 19:51:54 +0800 Subject: [PATCH 152/270] gopls: fix indent issue and track a TODO Change-Id: I634a7f8c5bf1e32be8d9bc0a6620c992f8aa01ce Reviewed-on: https://go-review.googlesource.com/c/tools/+/661216 LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan Reviewed-by: Alan Donovan Reviewed-by: Dmitri Shuralyov --- .../testdata/src/rangeint/rangeint.go.golden | 12 +- gopls/internal/telemetry/telemetry_test.go | 4 +- .../test/integration/bench/completion_test.go | 2 +- .../test/integration/bench/repo_test.go | 4 +- gopls/internal/test/integration/env.go | 8 +- .../internal/test/integration/expectation.go | 2 +- .../test/integration/misc/highlight_test.go | 2 +- .../integration/misc/workspace_symbol_test.go | 6 +- gopls/internal/test/integration/runner.go | 2 +- gopls/internal/test/integration/wrappers.go | 224 +++++++++--------- gopls/internal/test/marker/marker_test.go | 20 +- 11 files changed, 143 insertions(+), 143 deletions(-) diff --git a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden index a21bd7e8607..cdd2f118997 100644 --- a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden @@ -214,17 +214,17 @@ func Fn(a int) T { } func issue73037() { - var q T + var q T for a := range q { // want "for loop can be modernized using range over int" - println(a) + println(a) } for a := Fn(0); a < q; a++ { println(a) } - var qa TAlias - for a := range qa { // want "for loop can be modernized using range over int" - println(a) - } + var qa TAlias + for a := range qa { // want "for loop can be modernized using range over int" + println(a) + } for a := range T(10) { // want "for loop can be modernized using range over int" for b := range T(10) { // want "for loop can be modernized using range over int" println(a, b) diff --git a/gopls/internal/telemetry/telemetry_test.go b/gopls/internal/telemetry/telemetry_test.go index 4c41cc40dc9..1e56012182f 100644 --- a/gopls/internal/telemetry/telemetry_test.go +++ b/gopls/internal/telemetry/telemetry_test.go @@ -168,7 +168,7 @@ func addForwardedCounters(env *Env, names []string, values []int64) { Names: names, Values: values, }) if err != nil { - env.T.Fatal(err) + env.TB.Fatal(err) } var res error env.ExecuteCommand(&protocol.ExecuteCommandParams{ @@ -176,7 +176,7 @@ func addForwardedCounters(env *Env, names []string, values []int64) { Arguments: args, }, &res) if res != nil { - env.T.Errorf("%v failed - %v", command.AddTelemetryCounters, res) + env.TB.Errorf("%v failed - %v", command.AddTelemetryCounters, res) } } diff --git a/gopls/internal/test/integration/bench/completion_test.go b/gopls/internal/test/integration/bench/completion_test.go index d84512d1f8f..48ecf0cefd6 100644 --- a/gopls/internal/test/integration/bench/completion_test.go +++ b/gopls/internal/test/integration/bench/completion_test.go @@ -69,7 +69,7 @@ func endRangeInBuffer(env *Env, name string) protocol.Range { m := protocol.NewMapper("", []byte(buffer)) rng, err := m.OffsetRange(len(buffer), len(buffer)) if err != nil { - env.T.Fatal(err) + env.TB.Fatal(err) } return rng } diff --git a/gopls/internal/test/integration/bench/repo_test.go b/gopls/internal/test/integration/bench/repo_test.go index 50370e73491..65728c00552 100644 --- a/gopls/internal/test/integration/bench/repo_test.go +++ b/gopls/internal/test/integration/bench/repo_test.go @@ -211,7 +211,7 @@ func (r *repo) sharedEnv(tb testing.TB) *Env { }) return &Env{ - T: tb, + TB: tb, Ctx: context.Background(), Editor: r.editor, Sandbox: r.sandbox, @@ -238,7 +238,7 @@ func (r *repo) newEnv(tb testing.TB, config fake.EditorConfig, forOperation stri } return &Env{ - T: tb, + TB: tb, Ctx: context.Background(), Editor: editor, Sandbox: sandbox, diff --git a/gopls/internal/test/integration/env.go b/gopls/internal/test/integration/env.go index f19a426316d..822120e8324 100644 --- a/gopls/internal/test/integration/env.go +++ b/gopls/internal/test/integration/env.go @@ -21,7 +21,7 @@ import ( // wrapper methods that hide the boilerplate of plumbing contexts and checking // errors. type Env struct { - T testing.TB // TODO(rfindley): rename to TB + TB testing.TB Ctx context.Context // Most tests should not need to access the scratch area, editor, server, or @@ -311,9 +311,9 @@ func (a *Awaiter) checkConditionsLocked() { // Use AfterChange or OnceMet instead, so that the runner knows when to stop // waiting. func (e *Env) Await(expectations ...Expectation) { - e.T.Helper() + e.TB.Helper() if err := e.Awaiter.Await(e.Ctx, AllOf(expectations...)); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } @@ -321,7 +321,7 @@ func (e *Env) Await(expectations ...Expectation) { // unmeetable. If it was met, OnceMet checks that the state meets all // expectations in mustMeets. func (e *Env) OnceMet(pre Expectation, mustMeets ...Expectation) { - e.T.Helper() + e.TB.Helper() e.Await(OnceMet(pre, AllOf(mustMeets...))) } diff --git a/gopls/internal/test/integration/expectation.go b/gopls/internal/test/integration/expectation.go index 70a16fd6b3a..98554ddccc3 100644 --- a/gopls/internal/test/integration/expectation.go +++ b/gopls/internal/test/integration/expectation.go @@ -352,7 +352,7 @@ func (e *Env) DoneDiagnosingChanges() Expectation { // - workspace/didChangeWatchedFiles // - workspace/didChangeConfiguration func (e *Env) AfterChange(expectations ...Expectation) { - e.T.Helper() + e.TB.Helper() e.OnceMet( e.DoneDiagnosingChanges(), expectations..., diff --git a/gopls/internal/test/integration/misc/highlight_test.go b/gopls/internal/test/integration/misc/highlight_test.go index e4da558e5d0..36bddf25057 100644 --- a/gopls/internal/test/integration/misc/highlight_test.go +++ b/gopls/internal/test/integration/misc/highlight_test.go @@ -124,7 +124,7 @@ func main() {}` } func checkHighlights(env *Env, loc protocol.Location, highlightCount int) { - t := env.T + t := env.TB t.Helper() highlights := env.DocumentHighlight(loc) diff --git a/gopls/internal/test/integration/misc/workspace_symbol_test.go b/gopls/internal/test/integration/misc/workspace_symbol_test.go index 9420b146d85..f1148539447 100644 --- a/gopls/internal/test/integration/misc/workspace_symbol_test.go +++ b/gopls/internal/test/integration/misc/workspace_symbol_test.go @@ -8,8 +8,8 @@ import ( "testing" "github.com/google/go-cmp/cmp" - . "golang.org/x/tools/gopls/internal/test/integration" "golang.org/x/tools/gopls/internal/settings" + . "golang.org/x/tools/gopls/internal/test/integration" ) func TestWorkspaceSymbolMissingMetadata(t *testing.T) { @@ -103,12 +103,12 @@ const ( } func checkSymbols(env *Env, query string, want ...string) { - env.T.Helper() + env.TB.Helper() var got []string for _, info := range env.Symbol(query) { got = append(got, info.Name) } if diff := cmp.Diff(got, want); diff != "" { - env.T.Errorf("unexpected Symbol(%q) result (+want -got):\n%s", query, diff) + env.TB.Errorf("unexpected Symbol(%q) result (+want -got):\n%s", query, diff) } } diff --git a/gopls/internal/test/integration/runner.go b/gopls/internal/test/integration/runner.go index b4b9d3a2a4d..c4609cb8f91 100644 --- a/gopls/internal/test/integration/runner.go +++ b/gopls/internal/test/integration/runner.go @@ -253,7 +253,7 @@ func ConnectGoplsEnv(t testing.TB, ctx context.Context, sandbox *fake.Sandbox, c t.Fatal(err) } env := &Env{ - T: t, + TB: t, Ctx: ctx, Sandbox: sandbox, Server: connector, diff --git a/gopls/internal/test/integration/wrappers.go b/gopls/internal/test/integration/wrappers.go index 989ae913acf..6389cdb74e8 100644 --- a/gopls/internal/test/integration/wrappers.go +++ b/gopls/internal/test/integration/wrappers.go @@ -18,19 +18,19 @@ import ( // RemoveWorkspaceFile deletes a file on disk but does nothing in the // editor. It calls t.Fatal on any error. func (e *Env) RemoveWorkspaceFile(name string) { - e.T.Helper() + e.TB.Helper() if err := e.Sandbox.Workdir.RemoveFile(e.Ctx, name); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // ReadWorkspaceFile reads a file from the workspace, calling t.Fatal on any // error. func (e *Env) ReadWorkspaceFile(name string) string { - e.T.Helper() + e.TB.Helper() content, err := e.Sandbox.Workdir.ReadFile(name) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return string(content) } @@ -38,55 +38,55 @@ func (e *Env) ReadWorkspaceFile(name string) string { // WriteWorkspaceFile writes a file to disk but does nothing in the editor. // It calls t.Fatal on any error. func (e *Env) WriteWorkspaceFile(name, content string) { - e.T.Helper() + e.TB.Helper() if err := e.Sandbox.Workdir.WriteFile(e.Ctx, name, content); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // WriteWorkspaceFiles deletes a file on disk but does nothing in the // editor. It calls t.Fatal on any error. func (e *Env) WriteWorkspaceFiles(files map[string]string) { - e.T.Helper() + e.TB.Helper() if err := e.Sandbox.Workdir.WriteFiles(e.Ctx, files); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // ListFiles lists relative paths to files in the given directory. // It calls t.Fatal on any error. func (e *Env) ListFiles(dir string) []string { - e.T.Helper() + e.TB.Helper() paths, err := e.Sandbox.Workdir.ListFiles(dir) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return paths } // OpenFile opens a file in the editor, calling t.Fatal on any error. func (e *Env) OpenFile(name string) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.OpenFile(e.Ctx, name); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // CreateBuffer creates a buffer in the editor, calling t.Fatal on any error. func (e *Env) CreateBuffer(name string, content string) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.CreateBuffer(e.Ctx, name, content); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // BufferText returns the current buffer contents for the file with the given // relative path, calling t.Fatal if the file is not open in a buffer. func (e *Env) BufferText(name string) string { - e.T.Helper() + e.TB.Helper() text, ok := e.Editor.BufferText(name) if !ok { - e.T.Fatalf("buffer %q is not open", name) + e.TB.Fatalf("buffer %q is not open", name) } return text } @@ -94,24 +94,24 @@ func (e *Env) BufferText(name string) string { // CloseBuffer closes an editor buffer without saving, calling t.Fatal on any // error. func (e *Env) CloseBuffer(name string) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.CloseBuffer(e.Ctx, name); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // EditBuffer applies edits to an editor buffer, calling t.Fatal on any error. func (e *Env) EditBuffer(name string, edits ...protocol.TextEdit) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.EditBuffer(e.Ctx, name, edits); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } func (e *Env) SetBufferContent(name string, content string) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.SetBufferContent(e.Ctx, name, content); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } @@ -119,7 +119,7 @@ func (e *Env) SetBufferContent(name string, content string) { // editing session: it returns the buffer content for an open file, the // on-disk content for an unopened file, or "" for a non-existent file. func (e *Env) FileContent(name string) string { - e.T.Helper() + e.TB.Helper() text, ok := e.Editor.BufferText(name) if ok { return text @@ -129,7 +129,7 @@ func (e *Env) FileContent(name string) string { if errors.Is(err, os.ErrNotExist) { return "" } else { - e.T.Fatal(err) + e.TB.Fatal(err) } } return string(content) @@ -138,14 +138,14 @@ func (e *Env) FileContent(name string) string { // FileContentAt returns the file content at the given location, using the // file's mapper. func (e *Env) FileContentAt(location protocol.Location) string { - e.T.Helper() + e.TB.Helper() mapper, err := e.Editor.Mapper(location.URI.Path()) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } start, end, err := mapper.RangeOffsets(location.Range) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return string(mapper.Content[start:end]) } @@ -154,13 +154,13 @@ func (e *Env) FileContentAt(location protocol.Location) string { // buffer specified by name, calling t.Fatal on any error. It first searches // for the position in open buffers, then in workspace files. func (e *Env) RegexpSearch(name, re string) protocol.Location { - e.T.Helper() + e.TB.Helper() loc, err := e.Editor.RegexpSearch(name, re) if err == fake.ErrUnknownBuffer { loc, err = e.Sandbox.Workdir.RegexpSearch(name, re) } if err != nil { - e.T.Fatalf("RegexpSearch: %v, %v for %q", name, err, re) + e.TB.Fatalf("RegexpSearch: %v, %v for %q", name, err, re) } return loc } @@ -168,24 +168,24 @@ func (e *Env) RegexpSearch(name, re string) protocol.Location { // RegexpReplace replaces the first group in the first match of regexpStr with // the replace text, calling t.Fatal on any error. func (e *Env) RegexpReplace(name, regexpStr, replace string) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.RegexpReplace(e.Ctx, name, regexpStr, replace); err != nil { - e.T.Fatalf("RegexpReplace: %v", err) + e.TB.Fatalf("RegexpReplace: %v", err) } } // SaveBuffer saves an editor buffer, calling t.Fatal on any error. func (e *Env) SaveBuffer(name string) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.SaveBuffer(e.Ctx, name); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } func (e *Env) SaveBufferWithoutActions(name string) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.SaveBufferWithoutActions(e.Ctx, name); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } @@ -194,64 +194,64 @@ func (e *Env) SaveBufferWithoutActions(name string) { // // TODO(rfindley): rename this to just 'Definition'. func (e *Env) GoToDefinition(loc protocol.Location) protocol.Location { - e.T.Helper() + e.TB.Helper() loc, err := e.Editor.Definition(e.Ctx, loc) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return loc } func (e *Env) TypeDefinition(loc protocol.Location) protocol.Location { - e.T.Helper() + e.TB.Helper() loc, err := e.Editor.TypeDefinition(e.Ctx, loc) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return loc } // FormatBuffer formats the editor buffer, calling t.Fatal on any error. func (e *Env) FormatBuffer(name string) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.FormatBuffer(e.Ctx, name); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // OrganizeImports processes the source.organizeImports codeAction, calling // t.Fatal on any error. func (e *Env) OrganizeImports(name string) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.OrganizeImports(e.Ctx, name); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // ApplyQuickFixes processes the quickfix codeAction, calling t.Fatal on any error. func (e *Env) ApplyQuickFixes(path string, diagnostics []protocol.Diagnostic) { - e.T.Helper() + e.TB.Helper() loc := e.Sandbox.Workdir.EntireFile(path) if err := e.Editor.ApplyQuickFixes(e.Ctx, loc, diagnostics); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // ApplyCodeAction applies the given code action, calling t.Fatal on any error. func (e *Env) ApplyCodeAction(action protocol.CodeAction) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.ApplyCodeAction(e.Ctx, action); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // Diagnostics returns diagnostics for the given file, calling t.Fatal on any // error. func (e *Env) Diagnostics(name string) []protocol.Diagnostic { - e.T.Helper() + e.TB.Helper() diags, err := e.Editor.Diagnostics(e.Ctx, name) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return diags } @@ -259,11 +259,11 @@ func (e *Env) Diagnostics(name string) []protocol.Diagnostic { // GetQuickFixes returns the available quick fix code actions, calling t.Fatal // on any error. func (e *Env) GetQuickFixes(path string, diagnostics []protocol.Diagnostic) []protocol.CodeAction { - e.T.Helper() + e.TB.Helper() loc := e.Sandbox.Workdir.EntireFile(path) actions, err := e.Editor.GetQuickFixes(e.Ctx, loc, diagnostics) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return actions } @@ -271,28 +271,28 @@ func (e *Env) GetQuickFixes(path string, diagnostics []protocol.Diagnostic) []pr // Hover in the editor, calling t.Fatal on any error. // It may return (nil, zero) even on success. func (e *Env) Hover(loc protocol.Location) (*protocol.MarkupContent, protocol.Location) { - e.T.Helper() + e.TB.Helper() c, loc, err := e.Editor.Hover(e.Ctx, loc) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return c, loc } func (e *Env) DocumentLink(name string) []protocol.DocumentLink { - e.T.Helper() + e.TB.Helper() links, err := e.Editor.DocumentLink(e.Ctx, name) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return links } func (e *Env) DocumentHighlight(loc protocol.Location) []protocol.DocumentHighlight { - e.T.Helper() + e.TB.Helper() highlights, err := e.Editor.DocumentHighlight(e.Ctx, loc) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return highlights } @@ -301,9 +301,9 @@ func (e *Env) DocumentHighlight(loc protocol.Location) []protocol.DocumentHighli // It waits for the generate command to complete and checks for file changes // before returning. func (e *Env) RunGenerate(dir string) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.RunGenerate(e.Ctx, dir); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } e.Await(NoOutstandingWork(IgnoreTelemetryPromptWork)) // Ideally the editor.Workspace would handle all synthetic file watching, but @@ -315,10 +315,10 @@ func (e *Env) RunGenerate(dir string) { // RunGoCommand runs the given command in the sandbox's default working // directory. func (e *Env) RunGoCommand(verb string, args ...string) []byte { - e.T.Helper() + e.TB.Helper() out, err := e.Sandbox.RunGoCommand(e.Ctx, "", verb, args, nil, true) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return out } @@ -326,28 +326,28 @@ func (e *Env) RunGoCommand(verb string, args ...string) []byte { // RunGoCommandInDir is like RunGoCommand, but executes in the given // relative directory of the sandbox. func (e *Env) RunGoCommandInDir(dir, verb string, args ...string) { - e.T.Helper() + e.TB.Helper() if _, err := e.Sandbox.RunGoCommand(e.Ctx, dir, verb, args, nil, true); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // RunGoCommandInDirWithEnv is like RunGoCommand, but executes in the given // relative directory of the sandbox with the given additional environment variables. func (e *Env) RunGoCommandInDirWithEnv(dir string, env []string, verb string, args ...string) { - e.T.Helper() + e.TB.Helper() if _, err := e.Sandbox.RunGoCommand(e.Ctx, dir, verb, args, env, true); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // GoVersion checks the version of the go command. // It returns the X in Go 1.X. func (e *Env) GoVersion() int { - e.T.Helper() + e.TB.Helper() v, err := e.Sandbox.GoVersion(e.Ctx) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return v } @@ -355,33 +355,33 @@ func (e *Env) GoVersion() int { // DumpGoSum prints the correct go.sum contents for dir in txtar format, // for use in creating integration tests. func (e *Env) DumpGoSum(dir string) { - e.T.Helper() + e.TB.Helper() if _, err := e.Sandbox.RunGoCommand(e.Ctx, dir, "list", []string{"-mod=mod", "./..."}, nil, true); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } sumFile := path.Join(dir, "go.sum") - e.T.Log("\n\n-- " + sumFile + " --\n" + e.ReadWorkspaceFile(sumFile)) - e.T.Fatal("see contents above") + e.TB.Log("\n\n-- " + sumFile + " --\n" + e.ReadWorkspaceFile(sumFile)) + e.TB.Fatal("see contents above") } // CheckForFileChanges triggers a manual poll of the workspace for any file // changes since creation, or since last polling. It is a workaround for the // lack of true file watching support in the fake workspace. func (e *Env) CheckForFileChanges() { - e.T.Helper() + e.TB.Helper() if err := e.Sandbox.Workdir.CheckForFileChanges(e.Ctx); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // CodeLens calls textDocument/codeLens for the given path, calling t.Fatal on // any error. func (e *Env) CodeLens(path string) []protocol.CodeLens { - e.T.Helper() + e.TB.Helper() lens, err := e.Editor.CodeLens(e.Ctx, path) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return lens } @@ -391,9 +391,9 @@ func (e *Env) CodeLens(path string) []protocol.CodeLens { // // result is a pointer to a variable to be populated by json.Unmarshal. func (e *Env) ExecuteCodeLensCommand(path string, cmd command.Command, result any) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.ExecuteCodeLensCommand(e.Ctx, path, cmd, result); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } @@ -402,9 +402,9 @@ func (e *Env) ExecuteCodeLensCommand(path string, cmd command.Command, result an // // result is a pointer to a variable to be populated by json.Unmarshal. func (e *Env) ExecuteCommand(params *protocol.ExecuteCommandParams, result any) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.ExecuteCommand(e.Ctx, params, result); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } @@ -430,7 +430,7 @@ func (e *Env) StartProfile() (stop func() string) { // This would be a lot simpler if we generated params constructors. args, err := command.MarshalArgs(command.StartProfileArgs{}) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } params := &protocol.ExecuteCommandParams{ Command: command.StartProfile.String(), @@ -442,7 +442,7 @@ func (e *Env) StartProfile() (stop func() string) { return func() string { stopArgs, err := command.MarshalArgs(command.StopProfileArgs{}) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } stopParams := &protocol.ExecuteCommandParams{ Command: command.StopProfile.String(), @@ -457,91 +457,91 @@ func (e *Env) StartProfile() (stop func() string) { // InlayHints calls textDocument/inlayHints for the given path, calling t.Fatal on // any error. func (e *Env) InlayHints(path string) []protocol.InlayHint { - e.T.Helper() + e.TB.Helper() hints, err := e.Editor.InlayHint(e.Ctx, path) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return hints } // Symbol calls workspace/symbol func (e *Env) Symbol(query string) []protocol.SymbolInformation { - e.T.Helper() + e.TB.Helper() ans, err := e.Editor.Symbols(e.Ctx, query) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return ans } // References wraps Editor.References, calling t.Fatal on any error. func (e *Env) References(loc protocol.Location) []protocol.Location { - e.T.Helper() + e.TB.Helper() locations, err := e.Editor.References(e.Ctx, loc) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return locations } // Rename wraps Editor.Rename, calling t.Fatal on any error. func (e *Env) Rename(loc protocol.Location, newName string) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.Rename(e.Ctx, loc, newName); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // Implementations wraps Editor.Implementations, calling t.Fatal on any error. func (e *Env) Implementations(loc protocol.Location) []protocol.Location { - e.T.Helper() + e.TB.Helper() locations, err := e.Editor.Implementations(e.Ctx, loc) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return locations } // RenameFile wraps Editor.RenameFile, calling t.Fatal on any error. func (e *Env) RenameFile(oldPath, newPath string) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.RenameFile(e.Ctx, oldPath, newPath); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // SignatureHelp wraps Editor.SignatureHelp, calling t.Fatal on error func (e *Env) SignatureHelp(loc protocol.Location) *protocol.SignatureHelp { - e.T.Helper() + e.TB.Helper() sighelp, err := e.Editor.SignatureHelp(e.Ctx, loc) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return sighelp } // Completion executes a completion request on the server. func (e *Env) Completion(loc protocol.Location) *protocol.CompletionList { - e.T.Helper() + e.TB.Helper() completions, err := e.Editor.Completion(e.Ctx, loc) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return completions } func (e *Env) SetSuggestionInsertReplaceMode(useReplaceMode bool) { - e.T.Helper() + e.TB.Helper() e.Editor.SetSuggestionInsertReplaceMode(e.Ctx, useReplaceMode) } // AcceptCompletion accepts a completion for the given item at the given // position. func (e *Env) AcceptCompletion(loc protocol.Location, item protocol.CompletionItem) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.AcceptCompletion(e.Ctx, loc, item); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } @@ -554,38 +554,38 @@ func (e *Env) CodeActionForFile(path string, diagnostics []protocol.Diagnostic) // CodeAction calls textDocument/codeAction for a selection, // and calls t.Fatal if there were errors. func (e *Env) CodeAction(loc protocol.Location, diagnostics []protocol.Diagnostic, trigger protocol.CodeActionTriggerKind) []protocol.CodeAction { - e.T.Helper() + e.TB.Helper() actions, err := e.Editor.CodeAction(e.Ctx, loc, diagnostics, trigger) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return actions } // ChangeConfiguration updates the editor config, calling t.Fatal on any error. func (e *Env) ChangeConfiguration(newConfig fake.EditorConfig) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.ChangeConfiguration(e.Ctx, newConfig); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // ChangeWorkspaceFolders updates the editor workspace folders, calling t.Fatal // on any error. func (e *Env) ChangeWorkspaceFolders(newFolders ...string) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.ChangeWorkspaceFolders(e.Ctx, newFolders); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // SemanticTokensFull invokes textDocument/semanticTokens/full, calling t.Fatal // on any error. func (e *Env) SemanticTokensFull(path string) []fake.SemanticToken { - e.T.Helper() + e.TB.Helper() toks, err := e.Editor.SemanticTokensFull(e.Ctx, path) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return toks } @@ -593,10 +593,10 @@ func (e *Env) SemanticTokensFull(path string) []fake.SemanticToken { // SemanticTokensRange invokes textDocument/semanticTokens/range, calling t.Fatal // on any error. func (e *Env) SemanticTokensRange(loc protocol.Location) []fake.SemanticToken { - e.T.Helper() + e.TB.Helper() toks, err := e.Editor.SemanticTokensRange(e.Ctx, loc) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return toks } @@ -606,9 +606,9 @@ func (e *Env) SemanticTokensRange(loc protocol.Location) []fake.SemanticToken { func (e *Env) Close() { ctx := xcontext.Detach(e.Ctx) if err := e.Editor.Close(ctx); err != nil { - e.T.Errorf("closing editor: %v", err) + e.TB.Errorf("closing editor: %v", err) } if err := e.Sandbox.Close(); err != nil { - e.T.Errorf("cleaning up sandbox: %v", err) + e.TB.Errorf("cleaning up sandbox: %v", err) } } diff --git a/gopls/internal/test/marker/marker_test.go b/gopls/internal/test/marker/marker_test.go index 3ff7da65ac5..8c27adc9018 100644 --- a/gopls/internal/test/marker/marker_test.go +++ b/gopls/internal/test/marker/marker_test.go @@ -321,7 +321,7 @@ type marker struct { func (m marker) ctx() context.Context { return m.run.env.Ctx } // T returns the testing.TB for this mark. -func (m marker) T() testing.TB { return m.run.env.T } +func (m marker) T() testing.TB { return m.run.env.TB } // server returns the LSP server for the marker test run. func (m marker) editor() *fake.Editor { return m.run.env.Editor } @@ -982,7 +982,7 @@ func newEnv(t *testing.T, cache *cache.Cache, files, proxyFiles map[string][]byt t.Fatal(err) } return &integration.Env{ - T: t, + TB: t, Ctx: ctx, Editor: editor, Sandbox: sandbox, @@ -1035,17 +1035,17 @@ func (c *marker) sprintf(format string, args ...any) string { func (run *markerTestRun) fmtPos(pos token.Pos) string { file := run.test.fset.File(pos) if file == nil { - run.env.T.Errorf("position %d not in test fileset", pos) + run.env.TB.Errorf("position %d not in test fileset", pos) return "" } m, err := run.env.Editor.Mapper(file.Name()) if err != nil { - run.env.T.Errorf("%s", err) + run.env.TB.Errorf("%s", err) return "" } loc, err := m.PosLocation(file, pos, pos) if err != nil { - run.env.T.Errorf("Mapper(%s).PosLocation failed: %v", file.Name(), err) + run.env.TB.Errorf("Mapper(%s).PosLocation failed: %v", file.Name(), err) } return run.fmtLoc(loc) } @@ -1055,7 +1055,7 @@ func (run *markerTestRun) fmtPos(pos token.Pos) string { // archive file. func (run *markerTestRun) fmtLoc(loc protocol.Location) string { if loc == (protocol.Location{}) { - run.env.T.Errorf("unable to find %s in test archive", loc) + run.env.TB.Errorf("unable to find %s in test archive", loc) return "" } lines := bytes.Count(run.test.archive.Comment, []byte("\n")) @@ -1094,12 +1094,12 @@ func (run *markerTestRun) mapLocation(loc protocol.Location) (name string, start name = run.env.Sandbox.Workdir.URIToPath(loc.URI) m, err := run.env.Editor.Mapper(name) if err != nil { - run.env.T.Errorf("internal error: %v", err) + run.env.TB.Errorf("internal error: %v", err) return } start, end, err := m.RangeOffsets(loc.Range) if err != nil { - run.env.T.Errorf("error formatting location %s: %v", loc, err) + run.env.TB.Errorf("error formatting location %s: %v", loc, err) return } startLine, startCol = m.OffsetLineCol8(start) @@ -2306,11 +2306,11 @@ func codeActionChanges(env *integration.Env, uri protocol.DocumentURI, rng proto if action.Edit != nil { if len(action.Edit.Changes) > 0 { - env.T.Errorf("internal error: discarding unexpected CodeAction{Kind=%s, Title=%q}.Edit.Changes", action.Kind, action.Title) + env.TB.Errorf("internal error: discarding unexpected CodeAction{Kind=%s, Title=%q}.Edit.Changes", action.Kind, action.Title) } if action.Edit.DocumentChanges != nil { if action.Command != nil { - env.T.Errorf("internal error: discarding unexpected CodeAction{Kind=%s, Title=%q}.Command", action.Kind, action.Title) + env.TB.Errorf("internal error: discarding unexpected CodeAction{Kind=%s, Title=%q}.Command", action.Kind, action.Title) } return action.Edit.DocumentChanges, nil } From eb75b19426d3efd2bd643265b1094772288b35c0 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Thu, 27 Mar 2025 08:57:51 -0400 Subject: [PATCH 153/270] internal/refactor/inline: modernize Apply simple modernizations. Change-Id: Ie40a5989f3b414c189ad675faf16eae93da0eff5 Reviewed-on: https://go-review.googlesource.com/c/tools/+/661295 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- internal/refactor/inline/callee.go | 15 +++++++-------- internal/refactor/inline/calleefx.go | 2 +- internal/refactor/inline/inline.go | 24 +++++++----------------- 3 files changed, 15 insertions(+), 26 deletions(-) diff --git a/internal/refactor/inline/callee.go b/internal/refactor/inline/callee.go index b4ec43d551c..ca9426a2656 100644 --- a/internal/refactor/inline/callee.go +++ b/internal/refactor/inline/callee.go @@ -14,6 +14,7 @@ import ( "go/parser" "go/token" "go/types" + "slices" "strings" "golang.org/x/tools/go/types/typeutil" @@ -303,7 +304,7 @@ func AnalyzeCallee(logf func(string, ...any), fset *token.FileSet, pkg *types.Pa return nil, tuple.At(i).Type() } } - for i := 0; i < sig.Results().Len(); i++ { + for i := range sig.Results().Len() { expr, typ := argInfo(i) var flags returnOperandFlags if typ == types.Typ[types.UntypedNil] { // untyped nil is preserved by go/types @@ -572,11 +573,9 @@ func analyzeAssignment(info *types.Info, stack []ast.Node) (assignable, ifaceAss // Types do not need to match for an initializer with known type. if spec, ok := parent.(*ast.ValueSpec); ok && spec.Type != nil { - for _, v := range spec.Values { - if v == expr { - typ := info.TypeOf(spec.Type) - return true, typ == nil || types.IsInterface(typ), false - } + if slices.Contains(spec.Values, expr) { + typ := info.TypeOf(spec.Type) + return true, typ == nil || types.IsInterface(typ), false } } @@ -616,7 +615,7 @@ func analyzeAssignment(info *types.Info, stack []ast.Node) (assignable, ifaceAss return true, types.IsInterface(under.Elem()), false case *types.Struct: // Struct{k: expr} if id, _ := kv.Key.(*ast.Ident); id != nil { - for fi := 0; fi < under.NumFields(); fi++ { + for fi := range under.NumFields() { field := under.Field(fi) if info.Uses[id] == field { return true, types.IsInterface(field.Type()), false @@ -715,7 +714,7 @@ func paramTypeAtIndex(sig *types.Signature, call *ast.CallExpr, index int) types // given outer-to-inner stack, after stripping parentheses, along with the // remaining stack up to the parent node. // -// If no such context exists, returns (nil, nil). +// If no such context exists, returns (nil, nil, nil). func exprContext(stack []ast.Node) (remaining []ast.Node, parent ast.Node, expr ast.Expr) { expr, _ = stack[len(stack)-1].(ast.Expr) if expr == nil { diff --git a/internal/refactor/inline/calleefx.go b/internal/refactor/inline/calleefx.go index 11246e5b969..26dc02c010b 100644 --- a/internal/refactor/inline/calleefx.go +++ b/internal/refactor/inline/calleefx.go @@ -31,7 +31,7 @@ const ( // } // // is [1 0 -2 2], indicating reads of y and x, followed by the unknown -// effects of the g() call. and finally the read of parameter z. This +// effects of the g() call, and finally the read of parameter z. This // information is used during inlining to ascertain when it is safe // for parameter references to be replaced by their corresponding // argument expressions. Such substitutions are permitted only when diff --git a/internal/refactor/inline/inline.go b/internal/refactor/inline/inline.go index d89a62972c6..127a70c680b 100644 --- a/internal/refactor/inline/inline.go +++ b/internal/refactor/inline/inline.go @@ -534,7 +534,7 @@ func newImportState(logf func(string, ...any), caller *Caller, callee *gobCallee // importName finds an existing import name to use in a particular shadowing // context. It is used to determine the set of new imports in -// getOrMakeImportName, and is also used for writing out names in inlining +// localName, and is also used for writing out names in inlining // strategies below. func (i *importState) importName(pkgPath string, shadow shadowMap) string { for _, name := range i.importMap[pkgPath] { @@ -560,12 +560,7 @@ func (i *importState) localName(pkgPath, pkgName string, shadow shadowMap) strin } newlyAdded := func(name string) bool { - for _, new := range i.newImports { - if new.pkgName == name { - return true - } - } - return false + return slices.ContainsFunc(i.newImports, func(n newImport) bool { return n.pkgName == name }) } // shadowedInCaller reports whether a candidate package name @@ -576,12 +571,7 @@ func (i *importState) localName(pkgPath, pkgName string, shadow shadowMap) strin return false } // If obj will be removed, the name is available. - for _, old := range i.oldImports { - if old.pkgName == obj { - return false - } - } - return true + return !slices.ContainsFunc(i.oldImports, func(o oldImport) bool { return o.pkgName == obj }) } // import added by callee @@ -3030,13 +3020,13 @@ func replaceNode(root ast.Node, from, to ast.Node) { } case reflect.Struct: - for i := 0; i < v.Type().NumField(); i++ { + for i := range v.Type().NumField() { visit(v.Field(i)) } case reflect.Slice: compact := false - for i := 0; i < v.Len(); i++ { + for i := range v.Len() { visit(v.Index(i)) if v.Index(i).IsNil() { compact = true @@ -3047,7 +3037,7 @@ func replaceNode(root ast.Node, from, to ast.Node) { // (Do this is a second pass to avoid // unnecessary writes in the common case.) j := 0 - for i := 0; i < v.Len(); i++ { + for i := range v.Len() { if !v.Index(i).IsNil() { v.Index(j).Set(v.Index(i)) j++ @@ -3107,7 +3097,7 @@ func clearPositions(root ast.Node) { if n != nil { v := reflect.ValueOf(n).Elem() // deref the pointer to struct fields := v.Type().NumField() - for i := 0; i < fields; i++ { + for i := range fields { f := v.Field(i) // Clearing Pos arbitrarily is destructive, // as its presence may be semantically significant From a857356d5cc56c01228c895b060f0594e537b4eb Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Thu, 27 Mar 2025 14:44:16 -0400 Subject: [PATCH 154/270] internal/refactor/inline: improve freeishNames doc Change-Id: Ifbc6db97671a173237c55f0f415ad2d0eff6ecff Reviewed-on: https://go-review.googlesource.com/c/tools/+/661375 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- internal/refactor/inline/inline.go | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/internal/refactor/inline/inline.go b/internal/refactor/inline/inline.go index 127a70c680b..8ffd8315547 100644 --- a/internal/refactor/inline/inline.go +++ b/internal/refactor/inline/inline.go @@ -2449,7 +2449,12 @@ func freeVars(info *types.Info, e ast.Expr) map[string]bool { } // freeishNames computes an over-approximation to the free names -// of the type syntax t, inserting values into the map. +// of the expression (type or term) t, inserting values into the map. +// +// If t is a type expression, the approximation is not too far off (see below). For +// terms, it simply gathers all unqualified identifiers, ignoring scopes established +// by function and composite literals, so in some cases it can over-estimate quite +// a lot. // // Because we don't have go/types annotations, we can't give an exact // result in all cases. In particular, an array type [n]T might have a @@ -2468,9 +2473,9 @@ func freeishNames(free map[string]bool, t ast.Expr) { return false // don't visit .Sel case *ast.Field: + // Visit Type (which may have free references) + // but not Names (which are defs, not uses). ast.Inspect(n.Type, visit) - // Don't visit .Names: - // FuncType parameters, interface methods, struct fields return false } return true From aac3cf020c8adcf240806fba32b816bc55214ea5 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Fri, 28 Mar 2025 11:16:53 -0400 Subject: [PATCH 155/270] internal/refactor/inline: improve freeishNames Perform a more sophisticated free-name analysis. Adapt the name resolution logic from go/parser to find free names in an ast.Node. As the function doc says, the new function is stymied only by the composite-lit ambiguity and thus significantly more accurate that the old implementation, which did not take into account bindings within expressions. The test provides full coverage, except for some uninteresting edge cases. Change-Id: Id1e18b5e9b1aae78feb2066f5243fd565568ffdd Reviewed-on: https://go-review.googlesource.com/c/tools/+/661337 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- internal/refactor/inline/free.go | 367 ++++++++++++++++++++++++++ internal/refactor/inline/free_test.go | 206 +++++++++++++++ internal/refactor/inline/inline.go | 35 --- 3 files changed, 573 insertions(+), 35 deletions(-) create mode 100644 internal/refactor/inline/free.go create mode 100644 internal/refactor/inline/free_test.go diff --git a/internal/refactor/inline/free.go b/internal/refactor/inline/free.go new file mode 100644 index 00000000000..76c8010add6 --- /dev/null +++ b/internal/refactor/inline/free.go @@ -0,0 +1,367 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Copied, with considerable changes, from go/parser/resolver.go +// at af53bd2c03. + +package inline + +import ( + "go/ast" + "go/token" +) + +// freeishNames computes an over-approximation to the free names of the AST +// at node n based solely on syntax, inserting values into the map. +// +// In the absence of composite literals, the set of free names is exact. Composite +// literals introduce an ambiguity that can only be resolved with type information: +// whether F is a field name or a value in `T{F: ...}`. +// This function conservatively assumes T is not a struct type, so the +// resulting set may contain spurious entries that are not free lexical +// references but are references to struct fields. +// +// The code is based on go/parser.resolveFile, but heavily simplified. Crucial +// differences are: +// - Instead of resolving names to their objects, this function merely records +// whether they are free. +// - Labels are ignored: they do not refer to values. +// - This is never called on FuncDecls or ImportSpecs, so the function +// panics if it sees one. +func freeishNames(free map[string]bool, n ast.Node) { + r := &freeVisitor{free: free} + ast.Walk(r, n) + assert(r.scope == nil, "unbalanced scopes") +} + +// A freeVisitor holds state for a free-name analysis. +type freeVisitor struct { + scope *scope // the current innermost scope + free map[string]bool // free names seen so far +} + +// scope contains all the names defined in a lexical scope. +// It is like ast.Scope, but without deprecation warnings. +type scope struct { + names map[string]bool + outer *scope +} + +func (s *scope) defined(name string) bool { + for ; s != nil; s = s.outer { + if s.names[name] { + return true + } + } + return false +} + +func (v *freeVisitor) Visit(n ast.Node) ast.Visitor { + switch n := n.(type) { + + // Expressions. + case *ast.Ident: + v.resolve(n) + + case *ast.FuncLit: + v.openScope() + defer v.closeScope() + v.walkFuncType(n.Type) + v.walkBody(n.Body) + + case *ast.SelectorExpr: + v.walk(n.X) + // Skip n.Sel: it cannot be free. + + case *ast.StructType: + v.openScope() + defer v.closeScope() + v.walkFieldList(n.Fields) + + case *ast.FuncType: + v.openScope() + defer v.closeScope() + v.walkFuncType(n) + + case *ast.CompositeLit: + v.walk(n.Type) + for _, e := range n.Elts { + if kv, _ := e.(*ast.KeyValueExpr); kv != nil { + if ident, _ := kv.Key.(*ast.Ident); ident != nil { + // It is not possible from syntax alone to know whether + // an identifier used as a composite literal key is + // a struct field (if n.Type is a struct) or a value + // (if n.Type is a map, slice or array). + // Over-approximate by treating both cases as potentially + // free names. + v.resolve(ident) + } else { + v.walk(kv.Key) + } + v.walk(kv.Value) + } else { + v.walk(e) + } + } + + case *ast.InterfaceType: + v.openScope() + defer v.closeScope() + v.walkFieldList(n.Methods) + + // Statements + case *ast.AssignStmt: + walkSlice(v, n.Rhs) + if n.Tok == token.DEFINE { + v.shortVarDecl(n.Lhs) + } else { + walkSlice(v, n.Lhs) + } + + case *ast.LabeledStmt: + // ignore labels + // TODO(jba): consider labels? + v.walk(n.Stmt) + + case *ast.BranchStmt: + // Ignore labels. + // TODO(jba): consider labels? + + case *ast.BlockStmt: + v.openScope() + defer v.closeScope() + walkSlice(v, n.List) + + case *ast.IfStmt: + v.openScope() + defer v.closeScope() + v.walk(n.Init) + v.walk(n.Cond) + v.walk(n.Body) + v.walk(n.Else) + + case *ast.CaseClause: + walkSlice(v, n.List) + v.openScope() + defer v.closeScope() + walkSlice(v, n.Body) + + case *ast.SwitchStmt: + v.openScope() + defer v.closeScope() + v.walk(n.Init) + v.walk(n.Tag) + v.walkBody(n.Body) + + case *ast.TypeSwitchStmt: + if n.Init != nil { + v.openScope() + defer v.closeScope() + v.walk(n.Init) + } + v.openScope() + defer v.closeScope() + v.walk(n.Assign) + // We can use walkBody here because we don't track label scopes. + v.walkBody(n.Body) + + case *ast.CommClause: + v.openScope() + defer v.closeScope() + v.walk(n.Comm) + walkSlice(v, n.Body) + + case *ast.SelectStmt: + v.walkBody(n.Body) + + case *ast.ForStmt: + v.openScope() + defer v.closeScope() + v.walk(n.Init) + v.walk(n.Cond) + v.walk(n.Post) + v.walk(n.Body) + + case *ast.RangeStmt: + v.openScope() + defer v.closeScope() + v.walk(n.X) + var lhs []ast.Expr + if n.Key != nil { + lhs = append(lhs, n.Key) + } + if n.Value != nil { + lhs = append(lhs, n.Value) + } + if len(lhs) > 0 { + if n.Tok == token.DEFINE { + v.shortVarDecl(lhs) + } else { + walkSlice(v, lhs) + } + } + v.walk(n.Body) + + // Declarations + case *ast.GenDecl: + switch n.Tok { + case token.CONST, token.VAR: + for _, spec := range n.Specs { + spec := spec.(*ast.ValueSpec) + walkSlice(v, spec.Values) + if spec.Type != nil { + v.walk(spec.Type) + } + v.declare(spec.Names...) + } + case token.TYPE: + for _, spec := range n.Specs { + spec := spec.(*ast.TypeSpec) + // Go spec: The scope of a type identifier declared inside a + // function begins at the identifier in the TypeSpec and ends + // at the end of the innermost containing block. + v.declare(spec.Name) + if spec.TypeParams != nil { + v.openScope() + defer v.closeScope() + v.walkTypeParams(spec.TypeParams) + } + v.walk(spec.Type) + } + + case token.IMPORT: + panic("encountered import declaration in free analysis") + } + + case *ast.FuncDecl: + panic("encountered top-level function declaration in free analysis") + + default: + return v + } + + return nil +} + +func (r *freeVisitor) openScope() { + r.scope = &scope{map[string]bool{}, r.scope} +} + +func (r *freeVisitor) closeScope() { + r.scope = r.scope.outer +} + +func (r *freeVisitor) walk(n ast.Node) { + if n != nil { + ast.Walk(r, n) + } +} + +// walkFuncType walks a function type. It is used for explicit +// function types, like this: +// +// type RunFunc func(context.Context) error +// +// and function literals, like this: +// +// func(a, b int) int { return a + b} +// +// neither of which have type parameters. +// Function declarations do involve type parameters, but we don't +// handle them. +func (r *freeVisitor) walkFuncType(typ *ast.FuncType) { + // The order here doesn't really matter, because names in + // a field list cannot appear in types. + // (The situation is different for type parameters, for which + // see [freeVisitor.walkTypeParams].) + r.resolveFieldList(typ.Params) + r.resolveFieldList(typ.Results) + r.declareFieldList(typ.Params) + r.declareFieldList(typ.Results) +} + +// walkTypeParams is like walkFieldList, but declares type parameters eagerly so +// that they may be resolved in the constraint expressions held in the field +// Type. +func (r *freeVisitor) walkTypeParams(list *ast.FieldList) { + r.declareFieldList(list) + r.resolveFieldList(list) +} + +func (r *freeVisitor) walkBody(body *ast.BlockStmt) { + if body == nil { + return + } + walkSlice(r, body.List) +} + +func (r *freeVisitor) walkFieldList(list *ast.FieldList) { + if list == nil { + return + } + r.resolveFieldList(list) // .Type may contain references + r.declareFieldList(list) // .Names declares names +} + +func (r *freeVisitor) shortVarDecl(lhs []ast.Expr) { + // Go spec: A short variable declaration may redeclare variables provided + // they were originally declared in the same block with the same type, and + // at least one of the non-blank variables is new. + // + // However, it doesn't matter to free analysis whether a variable is declared + // fresh or redeclared. + for _, x := range lhs { + // In a well-formed program each expr must be an identifier, + // but be forgiving. + if id, ok := x.(*ast.Ident); ok { + r.declare(id) + } + } +} + +func walkSlice[S ~[]E, E ast.Node](r *freeVisitor, list S) { + for _, e := range list { + r.walk(e) + } +} + +// resolveFieldList resolves the types of the fields in list. +// The companion method declareFieldList declares the names of the fields. +func (r *freeVisitor) resolveFieldList(list *ast.FieldList) { + if list == nil { + return + } + for _, f := range list.List { + r.walk(f.Type) + } +} + +// declareFieldList declares the names of the fields in list. +// (Names in a FieldList always establish new bindings.) +// The companion method resolveFieldList resolves the types of the fields. +func (r *freeVisitor) declareFieldList(list *ast.FieldList) { + if list == nil { + return + } + for _, f := range list.List { + r.declare(f.Names...) + } +} + +// resolve marks ident as free if it is not in scope. +// TODO(jba): rename: no resolution is happening. +func (r *freeVisitor) resolve(ident *ast.Ident) { + if s := ident.Name; s != "_" && !r.scope.defined(s) { + r.free[s] = true + } +} + +// declare adds each non-blank ident to the current scope. +func (r *freeVisitor) declare(idents ...*ast.Ident) { + for _, id := range idents { + if id.Name != "_" { + r.scope.names[id.Name] = true + } + } +} diff --git a/internal/refactor/inline/free_test.go b/internal/refactor/inline/free_test.go new file mode 100644 index 00000000000..72543e7ae81 --- /dev/null +++ b/internal/refactor/inline/free_test.go @@ -0,0 +1,206 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package inline + +import ( + "go/ast" + "go/parser" + "go/token" + "maps" + "slices" + "strings" + "testing" +) + +func TestFreeishNames(t *testing.T) { + elems := func(m map[string]bool) string { + return strings.Join(slices.Sorted(maps.Keys(m)), " ") + } + + for _, test := range []struct { + code string // one or more exprs, decls or stmts + want string // space-separated list of free names + }{ + { + `x`, + "x", + }, + { + `x.y.z`, + "x", + }, + { + `T{a: 1, b: 2, c.d: e}`, + "a b c e T", + }, + { + `f(x)`, + "f x", + }, + { + `f.m(x)`, + "f x", + }, + { + `func(x int) int { return x + y }`, + "int y", + }, + { + `x = func(x int) int { return 2*x }()`, + "int x", + }, + { + `func(x int) (y int) { return x + y }`, + "int", + }, + { + `struct{a **int; b map[int][]bool}`, + "bool int", + }, + { + `struct{f int}{f: 0}`, + "f int", + }, + { + `interface{m1(int) bool; m2(x int) (y bool)}`, + "bool int", + }, + { + `x := 1; x++`, + "", + }, + { + `x = 1`, + "x", + }, + { + `_ = 1`, + "", + }, + { + `x, y := 1, 2; x = y + z`, + "z", + }, + { + `x, y := y, x; x = y + z`, + "x y z", + }, + { + `a, b := 0, 0; b, c := 0, 0; print(a, b, c, d)`, + "d print", + }, + { + `label: x++`, + "x", + }, + { + `if x == y {x}`, + "x y", + }, + { + `if x := 1; x == y {x}`, + "y", + }, + { + `if x := 1; x == y {x} else {z}`, + "y z", + }, + { + `switch x { case 1: x; case y: z }`, + "x y z", + }, + { + `switch x := 1; x { case 1: x; case y: z }`, + "y z", + }, + { + `switch x.(type) { case int: x; case []int: y }`, + "int x y", + }, + { + `switch x := 1; x.(type) { case int: x; case []int: y }`, + "int y", + }, + { + `switch y := x.(type) { case int: x; case []int: y }`, + "int x", + }, + { + `select { case c <- 1: x; case x := <-c: 2; default: y}`, + "c x y", + }, + { + `for i := 0; i < 9; i++ { c <- j }`, + "c j", + }, + { + `for i = 0; i < 9; i++ { c <- j }`, + "c i j", + }, + { + `for i := range 9 { c <- j }`, + "c j", + }, + { + `for i = range 9 { c <- j }`, + "c i j", + }, + { + `for _, e := range []int{1, 2, x} {e}`, + "int x", + }, + { + `var x, y int; f(x, y)`, + "f int", + }, + { + `{var x, y int}; f(x, y)`, + "f int x y", + }, + { + `const x = 1; { const y = iota; return x, y }`, + "iota", + }, + { + `type t int; t(0)`, + "int", + }, + { + `type t[T ~int] struct { t T }; x = t{t: 1}.t`, // field t shadowed by type decl + "int x", + }, + { + `type t[S ~[]E, E any] S`, + "any", + }, + { + `var a [unsafe.Sizeof(func(x int) { x + y })]int`, + "int unsafe y", + }, + } { + _, f := mustParse(t, "free.go", `package p; func _() {`+test.code+`}`) + n := f.Decls[0].(*ast.FuncDecl).Body + got := map[string]bool{} + want := map[string]bool{} + for _, n := range strings.Fields(test.want) { + want[n] = true + } + + freeishNames(got, n) + + if !maps.Equal(got, want) { + t.Errorf("\ncode %s\ngot %v\nwant %v", test.code, elems(got), elems(want)) + } + } +} + +func mustParse(t *testing.T, filename string, content any) (*token.FileSet, *ast.File) { + fset := token.NewFileSet() + f, err := parser.ParseFile(fset, filename, content, parser.ParseComments|parser.SkipObjectResolution) + if err != nil { + t.Fatalf("ParseFile: %v", err) + } + return fset, f +} diff --git a/internal/refactor/inline/inline.go b/internal/refactor/inline/inline.go index 8ffd8315547..7d65b583524 100644 --- a/internal/refactor/inline/inline.go +++ b/internal/refactor/inline/inline.go @@ -2448,41 +2448,6 @@ func freeVars(info *types.Info, e ast.Expr) map[string]bool { return free } -// freeishNames computes an over-approximation to the free names -// of the expression (type or term) t, inserting values into the map. -// -// If t is a type expression, the approximation is not too far off (see below). For -// terms, it simply gathers all unqualified identifiers, ignoring scopes established -// by function and composite literals, so in some cases it can over-estimate quite -// a lot. -// -// Because we don't have go/types annotations, we can't give an exact -// result in all cases. In particular, an array type [n]T might have a -// size such as unsafe.Sizeof(func() int{stmts...}()) and now the -// precise answer depends upon all the statement syntax too. But that -// never happens in practice. -func freeishNames(free map[string]bool, t ast.Expr) { - var visit func(n ast.Node) bool - visit = func(n ast.Node) bool { - switch n := n.(type) { - case *ast.Ident: - free[n.Name] = true - - case *ast.SelectorExpr: - ast.Inspect(n.X, visit) - return false // don't visit .Sel - - case *ast.Field: - // Visit Type (which may have free references) - // but not Names (which are defs, not uses). - ast.Inspect(n.Type, visit) - return false - } - return true - } - ast.Inspect(t, visit) -} - // effects reports whether an expression might change the state of the // program (through function calls and channel receives) and affect // the evaluation of subsequent expressions. From 659a8cd099cf8a6cd8d13de88269c081c6e069c5 Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Mon, 31 Mar 2025 15:13:55 +0800 Subject: [PATCH 156/270] go/analysis/analysistest: report input rather result when error happens Change-Id: I6ca92dd6c0aab72399341646a0eca908b08cad54 Reviewed-on: https://go-review.googlesource.com/c/tools/+/661755 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley Auto-Submit: Alan Donovan --- go/analysis/analysistest/analysistest.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/go/analysis/analysistest/analysistest.go b/go/analysis/analysistest/analysistest.go index 143b4260346..a20773fe26d 100644 --- a/go/analysis/analysistest/analysistest.go +++ b/go/analysis/analysistest/analysistest.go @@ -298,7 +298,7 @@ func applyDiffsAndCompare(filename string, original, want []byte, edits []diff.E } fixed, err := format.Source(fixedBytes) if err != nil { - return fmt.Errorf("%s: error formatting resulting source: %v\n%s", filename, err, fixed) + return fmt.Errorf("%s: error formatting resulting source: %v\n%s", filename, err, fixedBytes) } want, err = format.Source(want) From 5c9a69f93423f50b7224a9bfffe4e10e5f156b8a Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Fri, 28 Mar 2025 13:53:15 -0400 Subject: [PATCH 157/270] internal/refactor/inline: get rid of imports.Process Rewrite the part of the inliner that removes unnecessary new imports to avoid imports.Process. Now the inliner does not depend on the go command. There are two parts to the solution: 1. Use the recently improved free-name analysis to find unused imports. This CL further improves the algorithm by removing composite-literal keys from the set of possible free names for this call: such keys can never be package names. So freeishNames returns all of the used package names and none of the unused ones. Other calls to freeishNames retain the composite-literal keys to ensure that all free names are found. 2. Replace the ad-hoc code that deleted old imports with astutil.DeleteNamedImport. The more careful approach of that function results in prettier import decls that are closer to those of imports.Process, at the cost of deleting the old imports one by one. But it's unlikely that there are more than a couple of old imports. Some tests were tweaked to match the current behavior: - The new algorithm produces a slightly nicer result on assignment.txtar. - import-shadow.txtar's input already contained imports that would not have appeared in the wild to users of goimports or similar tools; its output is now similarly atypical. The algorithm behaves better on a version of import-shadow with cleaned-up imports. Both variants appear as tests. Some tests in other packages also needed minor changes. Change-Id: I410a2808511b3ccfe1cc555f37b832a0b8b6ea5c Reviewed-on: https://go-review.googlesource.com/c/tools/+/661635 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- .../analysis/gofix/testdata/src/b/b.go.golden | 6 +- .../testdata/codeaction/inline_issue67336.txt | 1 - .../codeaction/removeparam_imports.txt | 12 +- internal/refactor/inline/free.go | 35 +- internal/refactor/inline/free_test.go | 367 ++++++++++-------- internal/refactor/inline/inline.go | 141 +++---- .../refactor/inline/testdata/assignment.txtar | 4 +- .../inline/testdata/import-shadow-1.txtar | 48 +++ .../inline/testdata/import-shadow.txtar | 6 +- 9 files changed, 332 insertions(+), 288 deletions(-) create mode 100644 internal/refactor/inline/testdata/import-shadow-1.txtar diff --git a/gopls/internal/analysis/gofix/testdata/src/b/b.go.golden b/gopls/internal/analysis/gofix/testdata/src/b/b.go.golden index fd8d87a2ef1..4de7f09710f 100644 --- a/gopls/internal/analysis/gofix/testdata/src/b/b.go.golden +++ b/gopls/internal/analysis/gofix/testdata/src/b/b.go.golden @@ -4,10 +4,8 @@ import a0 "a" import "io" -import ( - "a" - . "c" -) +import "a" +import . "c" func f() { a.One() // want `cannot inline call to a.One because body refers to non-exported one` diff --git a/gopls/internal/test/marker/testdata/codeaction/inline_issue67336.txt b/gopls/internal/test/marker/testdata/codeaction/inline_issue67336.txt index 437fb474fb2..f15ca29397b 100644 --- a/gopls/internal/test/marker/testdata/codeaction/inline_issue67336.txt +++ b/gopls/internal/test/marker/testdata/codeaction/inline_issue67336.txt @@ -54,7 +54,6 @@ package c import ( "context" - "example.com/define/my/typ" "example.com/one/more/pkg" pkg0 "example.com/some/other/pkg" diff --git a/gopls/internal/test/marker/testdata/codeaction/removeparam_imports.txt b/gopls/internal/test/marker/testdata/codeaction/removeparam_imports.txt index d9f4f22dc7e..cd5f910a70d 100644 --- a/gopls/internal/test/marker/testdata/codeaction/removeparam_imports.txt +++ b/gopls/internal/test/marker/testdata/codeaction/removeparam_imports.txt @@ -65,9 +65,7 @@ func B(x, y c.C) { //@codeaction("x", "refactor.rewrite.removeUnusedParam", resu -- @b/a/a3.go -- package a -import ( - "mod.test/b" -) +import "mod.test/b" func _() { b.B(<-b.Chan) @@ -79,9 +77,7 @@ func _() { -- @b/a/a2.go -- package a -import ( - "mod.test/b" -) +import "mod.test/b" func _() { b.B(<-b.Chan) @@ -90,9 +86,7 @@ func _() { -- @b/a/a1.go -- package a -import ( - "mod.test/b" -) +import "mod.test/b" func _() { b.B(<-b.Chan) diff --git a/internal/refactor/inline/free.go b/internal/refactor/inline/free.go index 76c8010add6..28cebeea3db 100644 --- a/internal/refactor/inline/free.go +++ b/internal/refactor/inline/free.go @@ -12,15 +12,19 @@ import ( "go/token" ) -// freeishNames computes an over-approximation to the free names of the AST +// freeishNames computes an approximation to the free names of the AST // at node n based solely on syntax, inserting values into the map. // // In the absence of composite literals, the set of free names is exact. Composite // literals introduce an ambiguity that can only be resolved with type information: // whether F is a field name or a value in `T{F: ...}`. -// This function conservatively assumes T is not a struct type, so the -// resulting set may contain spurious entries that are not free lexical -// references but are references to struct fields. +// If includeComplitIdents is true, this function conservatively assumes +// T is not a struct type, so freeishNames overapproximates: the resulting +// set may contain spurious entries that are not free lexical references +// but are references to struct fields. +// If includeComplitIdents is false, this function assumes that T *is* +// a struct type, so freeishNames underapproximates: the resulting set +// may omit names that are free lexical references. // // The code is based on go/parser.resolveFile, but heavily simplified. Crucial // differences are: @@ -29,16 +33,17 @@ import ( // - Labels are ignored: they do not refer to values. // - This is never called on FuncDecls or ImportSpecs, so the function // panics if it sees one. -func freeishNames(free map[string]bool, n ast.Node) { - r := &freeVisitor{free: free} - ast.Walk(r, n) - assert(r.scope == nil, "unbalanced scopes") +func freeishNames(free map[string]bool, n ast.Node, includeComplitIdents bool) { + v := &freeVisitor{free: free, includeComplitIdents: includeComplitIdents} + ast.Walk(v, n) + assert(v.scope == nil, "unbalanced scopes") } // A freeVisitor holds state for a free-name analysis. type freeVisitor struct { - scope *scope // the current innermost scope - free map[string]bool // free names seen so far + scope *scope // the current innermost scope + free map[string]bool // free names seen so far + includeComplitIdents bool // include identifier key in composite literals } // scope contains all the names defined in a lexical scope. @@ -93,9 +98,13 @@ func (v *freeVisitor) Visit(n ast.Node) ast.Visitor { // an identifier used as a composite literal key is // a struct field (if n.Type is a struct) or a value // (if n.Type is a map, slice or array). - // Over-approximate by treating both cases as potentially - // free names. - v.resolve(ident) + if v.includeComplitIdents { + // Over-approximate by treating both cases as potentially + // free names. + v.resolve(ident) + } else { + // Under-approximate by ignoring potentially free names. + } } else { v.walk(kv.Key) } diff --git a/internal/refactor/inline/free_test.go b/internal/refactor/inline/free_test.go index 72543e7ae81..28fa56db099 100644 --- a/internal/refactor/inline/free_test.go +++ b/internal/refactor/inline/free_test.go @@ -5,6 +5,7 @@ package inline import ( + "fmt" "go/ast" "go/parser" "go/token" @@ -19,180 +20,212 @@ func TestFreeishNames(t *testing.T) { return strings.Join(slices.Sorted(maps.Keys(m)), " ") } - for _, test := range []struct { + type testcase struct { code string // one or more exprs, decls or stmts want string // space-separated list of free names + } + + for _, tc := range []struct { + includeComplitIdents bool + cases []testcase }{ - { - `x`, - "x", - }, - { - `x.y.z`, - "x", - }, - { - `T{a: 1, b: 2, c.d: e}`, - "a b c e T", - }, - { - `f(x)`, - "f x", - }, - { - `f.m(x)`, - "f x", - }, - { - `func(x int) int { return x + y }`, - "int y", - }, - { - `x = func(x int) int { return 2*x }()`, - "int x", - }, - { - `func(x int) (y int) { return x + y }`, - "int", - }, - { - `struct{a **int; b map[int][]bool}`, - "bool int", - }, - { - `struct{f int}{f: 0}`, - "f int", - }, - { - `interface{m1(int) bool; m2(x int) (y bool)}`, - "bool int", - }, - { - `x := 1; x++`, - "", - }, - { - `x = 1`, - "x", - }, - { - `_ = 1`, - "", - }, - { - `x, y := 1, 2; x = y + z`, - "z", - }, - { - `x, y := y, x; x = y + z`, - "x y z", - }, - { - `a, b := 0, 0; b, c := 0, 0; print(a, b, c, d)`, - "d print", - }, - { - `label: x++`, - "x", - }, - { - `if x == y {x}`, - "x y", - }, - { - `if x := 1; x == y {x}`, - "y", - }, - { - `if x := 1; x == y {x} else {z}`, - "y z", - }, - { - `switch x { case 1: x; case y: z }`, - "x y z", - }, - { - `switch x := 1; x { case 1: x; case y: z }`, - "y z", - }, - { - `switch x.(type) { case int: x; case []int: y }`, - "int x y", - }, - { - `switch x := 1; x.(type) { case int: x; case []int: y }`, - "int y", - }, - { - `switch y := x.(type) { case int: x; case []int: y }`, - "int x", - }, - { - `select { case c <- 1: x; case x := <-c: 2; default: y}`, - "c x y", - }, - { - `for i := 0; i < 9; i++ { c <- j }`, - "c j", - }, - { - `for i = 0; i < 9; i++ { c <- j }`, - "c i j", - }, - { - `for i := range 9 { c <- j }`, - "c j", - }, - { - `for i = range 9 { c <- j }`, - "c i j", - }, - { - `for _, e := range []int{1, 2, x} {e}`, - "int x", - }, - { - `var x, y int; f(x, y)`, - "f int", - }, - { - `{var x, y int}; f(x, y)`, - "f int x y", - }, - { - `const x = 1; { const y = iota; return x, y }`, - "iota", - }, - { - `type t int; t(0)`, - "int", - }, - { - `type t[T ~int] struct { t T }; x = t{t: 1}.t`, // field t shadowed by type decl - "int x", - }, - { - `type t[S ~[]E, E any] S`, - "any", - }, - { - `var a [unsafe.Sizeof(func(x int) { x + y })]int`, - "int unsafe y", + {true, []testcase{ + { + `x`, + "x", + }, + { + `x.y.z`, + "x", + }, + { + `T{a: 1, b: 2, c.d: e}`, + "a b c e T", + }, + { + `f(x)`, + "f x", + }, + { + `f.m(x)`, + "f x", + }, + { + `func(x int) int { return x + y }`, + "int y", + }, + { + `x = func(x int) int { return 2*x }()`, + "int x", + }, + { + `func(x int) (y int) { return x + y }`, + "int", + }, + { + `struct{a **int; b map[int][]bool}`, + "bool int", + }, + { + `struct{f int}{f: 0}`, + "f int", + }, + { + `interface{m1(int) bool; m2(x int) (y bool)}`, + "bool int", + }, + { + `x := 1; x++`, + "", + }, + { + `x = 1`, + "x", + }, + { + `_ = 1`, + "", + }, + { + `x, y := 1, 2; x = y + z`, + "z", + }, + { + `x, y := y, x; x = y + z`, + "x y z", + }, + { + `a, b := 0, 0; b, c := 0, 0; print(a, b, c, d)`, + "d print", + }, + { + `label: x++`, + "x", + }, + { + `if x == y {x}`, + "x y", + }, + { + `if x := 1; x == y {x}`, + "y", + }, + { + `if x := 1; x == y {x} else {z}`, + "y z", + }, + { + `switch x { case 1: x; case y: z }`, + "x y z", + }, + { + `switch x := 1; x { case 1: x; case y: z }`, + "y z", + }, + { + `switch x.(type) { case int: x; case []int: y }`, + "int x y", + }, + { + `switch x := 1; x.(type) { case int: x; case []int: y }`, + "int y", + }, + { + `switch y := x.(type) { case int: x; case []int: y }`, + "int x", + }, + { + `select { case c <- 1: x; case x := <-c: 2; default: y}`, + "c x y", + }, + { + `for i := 0; i < 9; i++ { c <- j }`, + "c j", + }, + { + `for i = 0; i < 9; i++ { c <- j }`, + "c i j", + }, + { + `for i := range 9 { c <- j }`, + "c j", + }, + { + `for i = range 9 { c <- j }`, + "c i j", + }, + { + `for _, e := range []int{1, 2, x} {e}`, + "int x", + }, + { + `var x, y int; f(x, y)`, + "f int", + }, + { + `{var x, y int}; f(x, y)`, + "f int x y", + }, + { + `const x = 1; { const y = iota; return x, y }`, + "iota", + }, + { + `type t int; t(0)`, + "int", + }, + { + `type t[T ~int] struct { t T }; x = t{t: 1}.t`, // field t shadowed by type decl + "int x", + }, + { + `type t[S ~[]E, E any] S`, + "any", + }, + { + `var a [unsafe.Sizeof(func(x int) { x + y })]int`, + "int unsafe y", + }, + }}, + { + false, + []testcase{ + { + `x`, + "x", + }, + { + `x.y.z`, + "x", + }, + { + `T{a: 1, b: 2, c.d: e}`, + "c e T", // omit a and b + }, + { + `type t[T ~int] struct { t T }; x = t{t: 1}.t`, // field t shadowed by type decl + "int x", + }, + }, }, } { - _, f := mustParse(t, "free.go", `package p; func _() {`+test.code+`}`) - n := f.Decls[0].(*ast.FuncDecl).Body - got := map[string]bool{} - want := map[string]bool{} - for _, n := range strings.Fields(test.want) { - want[n] = true - } + t.Run(fmt.Sprintf("includeComplitIdents=%t", tc.includeComplitIdents), func(t *testing.T) { + for _, test := range tc.cases { + _, f := mustParse(t, "free.go", `package p; func _() {`+test.code+`}`) + n := f.Decls[0].(*ast.FuncDecl).Body + got := map[string]bool{} + want := map[string]bool{} + for _, n := range strings.Fields(test.want) { + want[n] = true + } - freeishNames(got, n) + freeishNames(got, n, tc.includeComplitIdents) - if !maps.Equal(got, want) { - t.Errorf("\ncode %s\ngot %v\nwant %v", test.code, elems(got), elems(want)) - } + if !maps.Equal(got, want) { + t.Errorf("\ncode %s\ngot %v\nwant %v", test.code, elems(got), elems(want)) + } + } + }) } } diff --git a/internal/refactor/inline/inline.go b/internal/refactor/inline/inline.go index 7d65b583524..7817444150e 100644 --- a/internal/refactor/inline/inline.go +++ b/internal/refactor/inline/inline.go @@ -22,7 +22,6 @@ import ( "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/imports" "golang.org/x/tools/internal/analysisinternal" internalastutil "golang.org/x/tools/internal/astutil" "golang.org/x/tools/internal/typeparams" @@ -271,12 +270,12 @@ func (st *state) inline() (*Result, error) { } } - // Add new imports. - // + // Add new imports that are still used. + newImports := trimNewImports(res.newImports, res.new) // Insert new imports after last existing import, // to avoid migration of pre-import comments. // The imports will be organized below. - if len(res.newImports) > 0 { + if len(newImports) > 0 { // If we have imports to add, do so independent of the rest of the file. // Otherwise, the length of the new imports may consume floating comments, // causing them to be printed inside the imports block. @@ -329,7 +328,7 @@ func (st *state) inline() (*Result, error) { } } // Add new imports. - for _, imp := range res.newImports { + for _, imp := range newImports { // Check that the new imports are accessible. path, _ := strconv.Unquote(imp.spec.Path.Value) if !analysisinternal.CanImport(caller.Types.Path(), path) { @@ -355,30 +354,14 @@ func (st *state) inline() (*Result, error) { } // Delete imports referenced only by caller.Call.Fun. - // - // (We can't let imports.Process take care of it as it may - // mistake obsolete imports for missing new imports when the - // names are similar, as is common during a package migration.) for _, oldImport := range res.oldImports { specToDelete := oldImport.spec - for _, decl := range f.Decls { - if decl, ok := decl.(*ast.GenDecl); ok && decl.Tok == token.IMPORT { - decl.Specs = slices.DeleteFunc(decl.Specs, func(spec ast.Spec) bool { - imp := spec.(*ast.ImportSpec) - // Since we re-parsed the file, we can't match by identity; - // instead look for syntactic equivalence. - return imp.Path.Value == specToDelete.Path.Value && - (imp.Name != nil) == (specToDelete.Name != nil) && - (imp.Name == nil || imp.Name.Name == specToDelete.Name.Name) - }) - - // Edge case: import "foo" => import (). - if !decl.Lparen.IsValid() { - decl.Lparen = decl.TokPos + token.Pos(len("import")) - decl.Rparen = decl.Lparen + 1 - } - } + name := "" + if specToDelete.Name != nil { + name = specToDelete.Name.Name } + path, _ := strconv.Unquote(specToDelete.Path.Value) + astutil.DeleteNamedImport(caller.Fset, f, name, path) } var out bytes.Buffer @@ -387,66 +370,6 @@ func (st *state) inline() (*Result, error) { } newSrc := out.Bytes() - // Remove imports that are no longer referenced. - // - // It ought to be possible to compute the set of PkgNames used - // by the "old" code, compute the free identifiers of the - // "new" code using a syntax-only (no go/types) algorithm, and - // see if the reduction in the number of uses of any PkgName - // equals the number of times it appears in caller.Info.Uses, - // indicating that it is no longer referenced by res.new. - // - // However, the notorious ambiguity of resolving T{F: 0} makes this - // unreliable: without types, we can't tell whether F refers to - // a field of struct T, or a package-level const/var of a - // dot-imported (!) package. - // - // So, for now, we run imports.Process, which is - // unsatisfactory as it has to run the go command, and it - // looks at the user's module cache state--unnecessarily, - // since this step cannot add new imports. - // - // TODO(adonovan): replace with a simpler implementation since - // all the necessary imports are present but merely untidy. - // That will be faster, and also less prone to nondeterminism - // if there are bugs in our logic for import maintenance. - // - // However, golang.org/x/tools/internal/imports.ApplyFixes is - // too simple as it requires the caller to have figured out - // all the logical edits. In our case, we know all the new - // imports that are needed (see newImports), each of which can - // be specified as: - // - // &imports.ImportFix{ - // StmtInfo: imports.ImportInfo{path, name, - // IdentName: name, - // FixType: imports.AddImport, - // } - // - // but we don't know which imports are made redundant by the - // inlining itself. For example, inlining a call to - // fmt.Println may make the "fmt" import redundant. - // - // Also, both imports.Process and internal/imports.ApplyFixes - // reformat the entire file, which is not ideal for clients - // such as gopls. (That said, the point of a canonical format - // is arguably that any tool can reformat as needed without - // this being inconvenient.) - // - // We could invoke imports.Process and parse its result, - // compare against the original AST, compute a list of import - // fixes, and return that too. - - // Recompute imports only if there were existing ones. - if len(f.Imports) > 0 { - formatted, err := imports.Process("output", newSrc, nil) - if err != nil { - logf("cannot reformat: %v <<%s>>", err, &out) - return nil, err // cannot reformat (a bug?) - } - newSrc = formatted - } - literalized := false if call, ok := res.new.(*ast.CallExpr); ok && is[*ast.FuncLit](call.Fun) { literalized = true @@ -610,6 +533,43 @@ func (i *importState) localName(pkgPath, pkgName string, shadow shadowMap) strin return name } +// trimNewImports removes imports that are no longer needed. +// +// The list of new imports as constructed by calls to [importState.localName] +// includes all of the packages referenced by the callee. +// But in the process of inlining, we may have dropped some of those references. +// For example, if the callee looked like this: +// +// func F(x int) (p.T) {... /* no mention of p */ ...} +// +// and we inlined by assignment: +// +// v := ... +// +// then the reference to package p drops away. +// +// Remove the excess imports by seeing which remain in new, the expression +// to be inlined. +// We can find those by looking at the free names in new. +// The list of free names cannot include spurious package names. +// Free-name tracking is precise except for the case of an identifier +// key in a composite literal, which names either a field or a value. +// Neither fields nor values are package names. +// Since they are not relevant to removing unused imports, we instruct +// freeishNames to omit composite-literal keys that are identifiers. +func trimNewImports(newImports []newImport, new ast.Node) []newImport { + free := map[string]bool{} + const omitComplitIdents = false + freeishNames(free, new, omitComplitIdents) + var res []newImport + for _, ni := range newImports { + if free[ni.pkgName] { + res = append(res, ni) + } + } + return res +} + type inlineCallResult struct { newImports []newImport // to add oldImports []oldImport // to remove @@ -2317,7 +2277,8 @@ func createBindingDecl(logf logger, caller *Caller, args []*argument, calleeDecl free[name] = true } } - freeishNames(free, spec.Type) + const includeComplitIdents = true + freeishNames(free, spec.Type, includeComplitIdents) for name := range free { if names[name] { logf("binding decl would shadow free name %q", name) @@ -3390,12 +3351,14 @@ func (st *state) assignStmts(callerStmt *ast.AssignStmt, returnOperands []ast.Ex freeNames = make(map[string]bool) // free(ish) names among rhs expressions nonTrivial = make(map[int]bool) // indexes in rhs of nontrivial result conversions ) + const includeComplitIdents = true + for i, expr := range callerStmt.Rhs { if expr == caller.Call { assert(callIdx == -1, "malformed (duplicative) AST") callIdx = i for j, returnOperand := range returnOperands { - freeishNames(freeNames, returnOperand) + freeishNames(freeNames, returnOperand, includeComplitIdents) rhs = append(rhs, returnOperand) if resultInfo[j]&nonTrivialResult != 0 { nonTrivial[i+j] = true @@ -3408,7 +3371,7 @@ func (st *state) assignStmts(callerStmt *ast.AssignStmt, returnOperands []ast.Ex // We must clone before clearing positions, since e came from the caller. expr = internalastutil.CloneNode(expr) clearPositions(expr) - freeishNames(freeNames, expr) + freeishNames(freeNames, expr, includeComplitIdents) rhs = append(rhs, expr) } } diff --git a/internal/refactor/inline/testdata/assignment.txtar b/internal/refactor/inline/testdata/assignment.txtar index c79c1732934..e201d601480 100644 --- a/internal/refactor/inline/testdata/assignment.txtar +++ b/internal/refactor/inline/testdata/assignment.txtar @@ -103,9 +103,7 @@ func _() { -- b2 -- package a -import ( - "testdata/b" -) +import "testdata/b" func _() { var y int diff --git a/internal/refactor/inline/testdata/import-shadow-1.txtar b/internal/refactor/inline/testdata/import-shadow-1.txtar new file mode 100644 index 00000000000..dc960ac3213 --- /dev/null +++ b/internal/refactor/inline/testdata/import-shadow-1.txtar @@ -0,0 +1,48 @@ +This file is identical to import-shadow.txtar except +that the imports in a/a.go are not grouped. +That is unusual, since goimports and related tools +form groups. + +The result of inlining (bresult) also looks strange, +but again, goimports would fix it up. + +-- go.mod -- +module testdata +go 1.12 + +-- a/a.go -- +package a + +import "testdata/b" +import "log" + +func A() { + const log = "shadow" + b.B() //@ inline(re"B", bresult) +} + +var _ log.Logger + +-- b/b.go -- +package b + +import "log" + +func B() { + log.Printf("") +} + +-- bresult -- +package a + +import ( + log0 "log" +) +import "log" + +func A() { + const log = "shadow" + log0.Printf("") //@ inline(re"B", bresult) +} + +var _ log.Logger diff --git a/internal/refactor/inline/testdata/import-shadow.txtar b/internal/refactor/inline/testdata/import-shadow.txtar index a1078e2495b..c4ea9a61624 100644 --- a/internal/refactor/inline/testdata/import-shadow.txtar +++ b/internal/refactor/inline/testdata/import-shadow.txtar @@ -14,8 +14,10 @@ go 1.12 -- a/a.go -- package a -import "testdata/b" -import "log" +import ( + "testdata/b" + "log" +) func A() { const log = "shadow" From 5fba861ea8db4da158308a9a347e5de5887a183a Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Tue, 11 Mar 2025 15:25:00 -0400 Subject: [PATCH 158/270] internal/typesinternal: add Object and ClassifyCall Add two functions that provide information about expressions, typically those that are in the function position of calls. Object returns the object related to any expression. It is intended to be called on values in the field CallExpr.Fun. ClassifyCall returns information about all forms of syntactic function calls in Go, including conversions. A subsequent CL will remimplement Callee and StaticCallee with these functions. Change-Id: I812c9c89fa7369a968eb31bd11bb16257f5936ba Reviewed-on: https://go-review.googlesource.com/c/tools/+/658196 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan Reviewed-by: Robert Findley --- internal/typesinternal/classify_call.go | 173 +++++++++++++++++++ internal/typesinternal/classify_call_test.go | 165 ++++++++++++++++++ 2 files changed, 338 insertions(+) create mode 100644 internal/typesinternal/classify_call.go create mode 100644 internal/typesinternal/classify_call_test.go diff --git a/internal/typesinternal/classify_call.go b/internal/typesinternal/classify_call.go new file mode 100644 index 00000000000..1e79eb2b7ac --- /dev/null +++ b/internal/typesinternal/classify_call.go @@ -0,0 +1,173 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typesinternal + +import ( + "fmt" + "go/ast" + "go/types" +) + +// CallKind describes the function position of an [*ast.CallExpr]. +type CallKind int + +const ( + CallStatic CallKind = iota // static call to known function + CallInterface // dynamic call through an interface method + CallDynamic // dynamic call of a func value + CallBuiltin // call to a builtin function + CallConversion // a conversion (not a call) +) + +var callKindNames = []string{ + "CallStatic", + "CallInterface", + "CallDynamic", + "CallBuiltin", + "CallConversion", +} + +func (k CallKind) String() string { + if i := int(k); i >= 0 && i < len(callKindNames) { + return callKindNames[i] + } + return fmt.Sprintf("typeutil.CallKind(%d)", k) +} + +// ClassifyCall classifies the function position of a call expression ([*ast.CallExpr]). +// It distinguishes among true function calls, calls to builtins, and type conversions, +// and further classifies function calls as static calls (where the function is known), +// dynamic interface calls, and other dynamic calls. +// +// For static, interface and builtin calls, ClassifyCall returns the [types.Object] +// for the name of the caller. For calls of instantiated functions and +// methods, it returns the object for the corresponding generic function +// or method on the generic type. +// The relationships between the return values are: +// +// CallKind object +// CallStatic *types.Func +// CallInterface *types.Func +// CallBuiltin *types.Builtin +// CallDynamic nil +// CallConversion nil +// +// For the declarations: +// +// func f() {} +// func g[T any]() {} +// var v func() +// var s []func() +// type I interface { M() } +// var i I +// +// ClassifyCall returns the following: +// +// f() CallStatic the *types.Func for f +// g[int]() CallStatic the *types.Func for g[T] +// i.M() CallInterface the *types.Func for i.M +// min(1, 2) CallBuiltin the *types.Builtin for min +// v() CallDynamic nil +// s[0]() CallDynamic nil +// int(x) CallConversion nil +// []byte("") CallConversion nil +func ClassifyCall(info *types.Info, call *ast.CallExpr) (CallKind, types.Object) { + if info.Types[call.Fun].IsType() { + return CallConversion, nil + } + obj := Used(info, call.Fun) + // Classify the call by the type of the object, if any. + switch obj := obj.(type) { + case *types.Builtin: + return CallBuiltin, obj + case *types.Func: + if interfaceMethod(obj) { + return CallInterface, obj + } + return CallStatic, obj + default: + return CallDynamic, nil + } +} + +// Used returns the [types.Object] used by e, if any. +// If e is one of various forms of reference: +// +// f, c, v, T lexical reference +// pkg.X qualified identifier +// f[T] or pkg.F[K,V] instantiations of the above kinds +// expr.f field or method value selector +// T.f method expression selector +// +// Used returns the object to which it refers. +// +// For the declarations: +// +// func F[T any] {...} +// type I interface { M() } +// var ( +// x int +// s struct { f int } +// a []int +// i I +// ) +// +// Used returns the following: +// +// Expr Used +// x the *types.Var for x +// s.f the *types.Var for f +// F[int] the *types.Func for F[T] (not F[int]) +// i.M the *types.Func for i.M +// I.M the *types.Func for I.M +// min the *types.Builtin for min +// int the *types.TypeName for int +// 1 nil +// a[0] nil +// []byte nil +// +// Note: if e is an instantiated function or method, Used returns +// the corresponding generic function or method on the generic type. +func Used(info *types.Info, e ast.Expr) types.Object { + return used(info, e) +} + +// placeholder: will be moved and documented in the next CL. +func used(info *types.Info, e ast.Expr) types.Object { + e = ast.Unparen(e) + // Look through type instantiation if necessary. + isIndexed := false + switch d := e.(type) { + case *ast.IndexExpr: + if info.Types[d.Index].IsType() { + e = d.X + } + case *ast.IndexListExpr: + e = d.X + } + var obj types.Object + switch e := e.(type) { + case *ast.Ident: + obj = info.Uses[e] // type, var, builtin, or declared func + case *ast.SelectorExpr: + if sel, ok := info.Selections[e]; ok { + obj = sel.Obj() // method or field + } else { + obj = info.Uses[e.Sel] // qualified identifier? + } + } + // If a variable like a slice or map is being indexed, do not + // return an object. + if _, ok := obj.(*types.Var); ok && isIndexed { + return nil + } + return obj +} + +// placeholder: will be moved and documented in the next CL. +func interfaceMethod(f *types.Func) bool { + recv := f.Signature().Recv() + return recv != nil && types.IsInterface(recv.Type()) +} diff --git a/internal/typesinternal/classify_call_test.go b/internal/typesinternal/classify_call_test.go new file mode 100644 index 00000000000..8a6e75a3b0d --- /dev/null +++ b/internal/typesinternal/classify_call_test.go @@ -0,0 +1,165 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typesinternal_test + +import ( + "bytes" + "fmt" + "go/ast" + "go/format" + "go/importer" + "go/parser" + "go/token" + "go/types" + "testing" + + ti "golang.org/x/tools/internal/typesinternal" +) + +func TestClassifyCallAndUsed(t *testing.T) { + // This function directly tests ClassifyCall, but since that + // function's second return value is always the result of Used, + // it effectively tests Used as well. + const src = ` + package p + + func g(int) + + type A[T any] *T + + func F[T any](T) {} + + type S struct{ f func(int) } + func (S) g(int) + + type I interface{ m(int) } + + var ( + z S + a struct{b struct{c S}} + f = g + m map[int]func() + n []func() + p *int + ) + + func tests[T int]() { + var zt T + + g(1) + f(1) + println() + z.g(1) // a concrete method + a.b.c.g(1) // same + S.g(z, 1) // method expression + z.f(1) // struct field + I(nil).m(1) // interface method, then type conversion (preorder traversal) + m[0]() // a map + n[0]() // a slice + F[int](1) // instantiated function + F[T](zt) // generic function + func() {}() // function literal + _=[]byte("") // type expression + _=A[int](p) // instantiated type + _=T(1) // type param + // parenthesized forms + (z.g)(1) + (z).g(1) + + + // A[T](1) // generic type: illegal + } + ` + + fset := token.NewFileSet() + cfg := &types.Config{ + Error: func(err error) { t.Fatal(err) }, + Importer: importer.Default(), + } + info := &types.Info{ + Instances: make(map[*ast.Ident]types.Instance), + Uses: make(map[*ast.Ident]types.Object), + Defs: make(map[*ast.Ident]types.Object), + Types: make(map[ast.Expr]types.TypeAndValue), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + } + // parse + f, err := parser.ParseFile(fset, "classify.go", src, 0) + if err != nil { + t.Fatal(err) + } + + // type-check + pkg, err := cfg.Check(f.Name.Name, fset, []*ast.File{f}, info) + if err != nil { + t.Fatal(err) + } + + lookup := func(sym string) types.Object { + return pkg.Scope().Lookup(sym) + } + + member := func(sym, fieldOrMethod string) types.Object { + obj, _, _ := types.LookupFieldOrMethod(lookup(sym).Type(), false, pkg, fieldOrMethod) + return obj + } + + printlnObj := types.Universe.Lookup("println") + + // Expected Calls are in the order of CallExprs at the end of src, above. + wants := []struct { + kind ti.CallKind + obj types.Object + }{ + {ti.CallStatic, lookup("g")}, // g + {ti.CallDynamic, nil}, // f + {ti.CallBuiltin, printlnObj}, // println + {ti.CallStatic, member("S", "g")}, // z.g + {ti.CallStatic, member("S", "g")}, // a.b.c.g + {ti.CallStatic, member("S", "g")}, // S.g(z, 1) + {ti.CallDynamic, nil}, // z.f + {ti.CallInterface, member("I", "m")}, // I(nil).m + {ti.CallConversion, nil}, // I(nil) + {ti.CallDynamic, nil}, // m[0] + {ti.CallDynamic, nil}, // n[0] + {ti.CallStatic, lookup("F")}, // F[int] + {ti.CallStatic, lookup("F")}, // F[T] + {ti.CallDynamic, nil}, // f(){} + {ti.CallConversion, nil}, // []byte + {ti.CallConversion, nil}, // A[int] + {ti.CallConversion, nil}, // T + {ti.CallStatic, member("S", "g")}, // (z.g) + {ti.CallStatic, member("S", "g")}, // (z).g + } + + i := 0 + ast.Inspect(f, func(n ast.Node) bool { + if call, ok := n.(*ast.CallExpr); ok { + if i >= len(wants) { + t.Fatal("more calls than wants") + } + var buf bytes.Buffer + if err := format.Node(&buf, fset, n); err != nil { + t.Fatal(err) + } + prefix := fmt.Sprintf("%s (#%d)", buf.String(), i) + + gotKind, gotObj := ti.ClassifyCall(info, call) + want := wants[i] + + if gotKind != want.kind { + t.Errorf("%s kind: got %s, want %s", prefix, gotKind, want.kind) + } + if gotObj != want.obj { + t.Errorf("%s obj: got %v (%[2]T), want %v", prefix, gotObj, want.obj) + } + i++ + } + return true + }) + if i != len(wants) { + t.Fatal("more wants than calls") + } +} From f3a6b96d653fc1b187ba386b7ea7a286a1cd527e Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Tue, 1 Apr 2025 01:21:27 -0600 Subject: [PATCH 159/270] gopls/internal/analysis/modernize: add modernizer for WaitGroup.Go This CL supports a modernizer to replace old complex usages of WaitGroup by WaitGroup.Go from go1.25. Fixes: golang/go#73059 Change-Id: I8e2f8df0cca0fae4996d2a46c6a8229cf1d37e2c Reviewed-on: https://go-review.googlesource.com/c/tools/+/661775 Reviewed-by: Alan Donovan Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI Reviewed-by: Carlos Amedee --- gopls/doc/analyzers.md | 2 + gopls/internal/analysis/modernize/doc.go | 2 + .../internal/analysis/modernize/modernize.go | 8 +- .../analysis/modernize/modernize_test.go | 1 + .../testdata/src/waitgroup/waitgroup.go | 152 ++++++++++++++++++ .../src/waitgroup/waitgroup.go.golden | 143 ++++++++++++++++ .../testdata/src/waitgroup/waitgroup_alias.go | 21 +++ .../src/waitgroup/waitgroup_alias.go.golden | 19 +++ .../testdata/src/waitgroup/waitgroup_dot.go | 22 +++ .../src/waitgroup/waitgroup_dot.go.golden | 20 +++ .../internal/analysis/modernize/waitgroup.go | 131 +++++++++++++++ gopls/internal/doc/api.json | 4 +- 12 files changed, 522 insertions(+), 3 deletions(-) create mode 100644 gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup.go create mode 100644 gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup.go.golden create mode 100644 gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup_alias.go create mode 100644 gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup_alias.go.golden create mode 100644 gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup_dot.go create mode 100644 gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup_dot.go.golden create mode 100644 gopls/internal/analysis/modernize/waitgroup.go diff --git a/gopls/doc/analyzers.md b/gopls/doc/analyzers.md index 4ec7fcbd1d0..82b0e8753f9 100644 --- a/gopls/doc/analyzers.md +++ b/gopls/doc/analyzers.md @@ -553,6 +553,8 @@ Categories of modernize diagnostic: - stringscutprefix: replace some uses of HasPrefix followed by TrimPrefix with CutPrefix, added to the strings package in go1.20. + - waitgroup: replace old complex usages of sync.WaitGroup by less complex WaitGroup.Go method in go1.25. + Default: on. Package documentation: [modernize](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/modernize) diff --git a/gopls/internal/analysis/modernize/doc.go b/gopls/internal/analysis/modernize/doc.go index 354bf0955d3..7bcde40f900 100644 --- a/gopls/internal/analysis/modernize/doc.go +++ b/gopls/internal/analysis/modernize/doc.go @@ -85,4 +85,6 @@ // // - stringscutprefix: replace some uses of HasPrefix followed by TrimPrefix with CutPrefix, // added to the strings package in go1.20. +// +// - waitgroup: replace old complex usages of sync.WaitGroup by less complex WaitGroup.Go method in go1.25. package modernize diff --git a/gopls/internal/analysis/modernize/modernize.go b/gopls/internal/analysis/modernize/modernize.go index ebf83ab1bc3..dbef72fe5cf 100644 --- a/gopls/internal/analysis/modernize/modernize.go +++ b/gopls/internal/analysis/modernize/modernize.go @@ -93,6 +93,7 @@ func run(pass *analysis.Pass) (any, error) { stringsseq(pass) sortslice(pass) testingContext(pass) + waitgroup(pass) // TODO(adonovan): opt: interleave these micro-passes within a single inspection. @@ -121,7 +122,12 @@ func formatExprs(fset *token.FileSet, exprs []ast.Expr) string { // isZeroIntLiteral reports whether e is an integer whose value is 0. func isZeroIntLiteral(info *types.Info, e ast.Expr) bool { - return info.Types[e].Value == constant.MakeInt64(0) + return isIntLiteral(info, e, 0) +} + +// isIntLiteral reports whether e is an integer with given value. +func isIntLiteral(info *types.Info, e ast.Expr, n int64) bool { + return info.Types[e].Value == constant.MakeInt64(n) } // filesUsing returns a cursor for each *ast.File in the inspector diff --git a/gopls/internal/analysis/modernize/modernize_test.go b/gopls/internal/analysis/modernize/modernize_test.go index 9f17d159073..e9f91f2262c 100644 --- a/gopls/internal/analysis/modernize/modernize_test.go +++ b/gopls/internal/analysis/modernize/modernize_test.go @@ -29,5 +29,6 @@ func Test(t *testing.T) { "fieldsseq", "sortslice", "testingcontext", + "waitgroup", ) } diff --git a/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup.go b/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup.go new file mode 100644 index 00000000000..8269235bda7 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup.go @@ -0,0 +1,152 @@ +package waitgroup + +import ( + "fmt" + "sync" +) + +// supported case for pattern 1. +func _() { + var wg sync.WaitGroup + wg.Add(1) // want "Goroutine creation can be simplified using WaitGroup.Go" + go func() { + defer wg.Done() + fmt.Println() + }() + + wg.Add(1) // want "Goroutine creation can be simplified using WaitGroup.Go" + go func() { + defer wg.Done() + }() + + for range 10 { + wg.Add(1) // want "Goroutine creation can be simplified using WaitGroup.Go" + go func() { + defer wg.Done() + fmt.Println() + }() + } +} + +// supported case for pattern 2. +func _() { + var wg sync.WaitGroup + wg.Add(1) // want "Goroutine creation can be simplified using WaitGroup.Go" + go func() { + fmt.Println() + wg.Done() + }() + + wg.Add(1) // want "Goroutine creation can be simplified using WaitGroup.Go" + go func() { + wg.Done() + }() + + for range 10 { + wg.Add(1) // want "Goroutine creation can be simplified using WaitGroup.Go" + go func() { + fmt.Println() + wg.Done() + }() + } +} + +// this function puts some wrong usages but waitgroup modernizer will still offer fixes. +func _() { + var wg sync.WaitGroup + wg.Add(1) // want "Goroutine creation can be simplified using WaitGroup.Go" + go func() { + defer wg.Done() + defer wg.Done() + fmt.Println() + }() + + wg.Add(1) // want "Goroutine creation can be simplified using WaitGroup.Go" + go func() { + defer wg.Done() + fmt.Println() + wg.Done() + }() + + wg.Add(1) // want "Goroutine creation can be simplified using WaitGroup.Go" + go func() { + fmt.Println() + wg.Done() + wg.Done() + }() +} + +// this function puts the unsupported cases of pattern 1. +func _() { + var wg sync.WaitGroup + wg.Add(1) + go func() {}() + + wg.Add(1) + go func(i int) { + defer wg.Done() + fmt.Println(i) + }(1) + + wg.Add(1) + go func() { + fmt.Println() + defer wg.Done() + }() + + wg.Add(1) + go func() { // noop: no wg.Done call inside function body. + fmt.Println() + }() + + go func() { // noop: no Add call before this go stmt. + defer wg.Done() + fmt.Println() + }() + + wg.Add(2) // noop: only support Add(1). + go func() { + defer wg.Done() + }() + + var wg1 sync.WaitGroup + wg1.Add(1) // noop: Add and Done should be the same object. + go func() { + defer wg.Done() + fmt.Println() + }() + + wg.Add(1) // noop: Add and Done should be the same object. + go func() { + defer wg1.Done() + fmt.Println() + }() +} + +// this function puts the unsupported cases of pattern 2. +func _() { + var wg sync.WaitGroup + wg.Add(1) + go func() { + wg.Done() + fmt.Println() + }() + + go func() { // noop: no Add call before this go stmt. + fmt.Println() + wg.Done() + }() + + var wg1 sync.WaitGroup + wg1.Add(1) // noop: Add and Done should be the same object. + go func() { + fmt.Println() + wg.Done() + }() + + wg.Add(1) // noop: Add and Done should be the same object. + go func() { + fmt.Println() + wg1.Done() + }() +} diff --git a/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup.go.golden b/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup.go.golden new file mode 100644 index 00000000000..dd98429da0d --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup.go.golden @@ -0,0 +1,143 @@ +package waitgroup + +import ( + "fmt" + "sync" +) + +// supported case for pattern 1. +func _() { + var wg sync.WaitGroup + // want "Goroutine creation can be simplified using WaitGroup.Go" + wg.Go(func() { + fmt.Println() + }) + + // want "Goroutine creation can be simplified using WaitGroup.Go" + wg.Go(func() { + }) + + for range 10 { + // want "Goroutine creation can be simplified using WaitGroup.Go" + wg.Go(func() { + fmt.Println() + }) + } +} + +// supported case for pattern 2. +func _() { + var wg sync.WaitGroup + // want "Goroutine creation can be simplified using WaitGroup.Go" + wg.Go(func() { + fmt.Println() + }) + + // want "Goroutine creation can be simplified using WaitGroup.Go" + wg.Go(func() { + }) + + for range 10 { + // want "Goroutine creation can be simplified using WaitGroup.Go" + wg.Go(func() { + fmt.Println() + }) + } +} + +// this function puts some wrong usages but waitgroup modernizer will still offer fixes. +func _() { + var wg sync.WaitGroup + // want "Goroutine creation can be simplified using WaitGroup.Go" + wg.Go(func() { + defer wg.Done() + fmt.Println() + }) + + // want "Goroutine creation can be simplified using WaitGroup.Go" + wg.Go(func() { + fmt.Println() + wg.Done() + }) + + // want "Goroutine creation can be simplified using WaitGroup.Go" + wg.Go(func() { + fmt.Println() + wg.Done() + }) +} + +// this function puts the unsupported cases of pattern 1. +func _() { + var wg sync.WaitGroup + wg.Add(1) + go func() {}() + + wg.Add(1) + go func(i int) { + defer wg.Done() + fmt.Println(i) + }(1) + + wg.Add(1) + go func() { + fmt.Println() + defer wg.Done() + }() + + wg.Add(1) + go func() { // noop: no wg.Done call inside function body. + fmt.Println() + }() + + go func() { // noop: no Add call before this go stmt. + defer wg.Done() + fmt.Println() + }() + + wg.Add(2) // noop: only support Add(1). + go func() { + defer wg.Done() + }() + + var wg1 sync.WaitGroup + wg1.Add(1) // noop: Add and Done should be the same object. + go func() { + defer wg.Done() + fmt.Println() + }() + + wg.Add(1) // noop: Add and Done should be the same object. + go func() { + defer wg1.Done() + fmt.Println() + }() +} + +// this function puts the unsupported cases of pattern 2. +func _() { + var wg sync.WaitGroup + wg.Add(1) + go func() { + wg.Done() + fmt.Println() + }() + + go func() { // noop: no Add call before this go stmt. + fmt.Println() + wg.Done() + }() + + var wg1 sync.WaitGroup + wg1.Add(1) // noop: Add and Done should be the same object. + go func() { + fmt.Println() + wg.Done() + }() + + wg.Add(1) // noop: Add and Done should be the same object. + go func() { + fmt.Println() + wg1.Done() + }() +} diff --git a/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup_alias.go b/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup_alias.go new file mode 100644 index 00000000000..087edba27be --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup_alias.go @@ -0,0 +1,21 @@ +package waitgroup + +import ( + "fmt" + sync1 "sync" +) + +func _() { + var wg sync1.WaitGroup + wg.Add(1) // want "Goroutine creation can be simplified using WaitGroup.Go" + go func() { + defer wg.Done() + fmt.Println() + }() + + wg.Add(1) // want "Goroutine creation can be simplified using WaitGroup.Go" + go func() { + fmt.Println() + wg.Done() + }() +} diff --git a/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup_alias.go.golden b/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup_alias.go.golden new file mode 100644 index 00000000000..377973bc689 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup_alias.go.golden @@ -0,0 +1,19 @@ +package waitgroup + +import ( + "fmt" + sync1 "sync" +) + +func _() { + var wg sync1.WaitGroup + // want "Goroutine creation can be simplified using WaitGroup.Go" + wg.Go(func() { + fmt.Println() + }) + + // want "Goroutine creation can be simplified using WaitGroup.Go" + wg.Go(func() { + fmt.Println() + }) +} \ No newline at end of file diff --git a/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup_dot.go b/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup_dot.go new file mode 100644 index 00000000000..b4d1e150dbc --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup_dot.go @@ -0,0 +1,22 @@ +package waitgroup + +import ( + "fmt" + . "sync" +) + +// supported case for pattern 1. +func _() { + var wg WaitGroup + wg.Add(1) // want "Goroutine creation can be simplified using WaitGroup.Go" + go func() { + defer wg.Done() + fmt.Println() + }() + + wg.Add(1) // want "Goroutine creation can be simplified using WaitGroup.Go" + go func() { + fmt.Println() + wg.Done() + }() +} diff --git a/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup_dot.go.golden b/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup_dot.go.golden new file mode 100644 index 00000000000..37584be72f8 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup_dot.go.golden @@ -0,0 +1,20 @@ +package waitgroup + +import ( + "fmt" + . "sync" +) + +// supported case for pattern 1. +func _() { + var wg WaitGroup + // want "Goroutine creation can be simplified using WaitGroup.Go" + wg.Go(func() { + fmt.Println() + }) + + // want "Goroutine creation can be simplified using WaitGroup.Go" + wg.Go(func() { + fmt.Println() + }) +} \ No newline at end of file diff --git a/gopls/internal/analysis/modernize/waitgroup.go b/gopls/internal/analysis/modernize/waitgroup.go new file mode 100644 index 00000000000..37a12da5657 --- /dev/null +++ b/gopls/internal/analysis/modernize/waitgroup.go @@ -0,0 +1,131 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + "fmt" + "go/ast" + "slices" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" + "golang.org/x/tools/internal/astutil/cursor" + "golang.org/x/tools/internal/typesinternal/typeindex" +) + +// The waitgroup pass replaces old more complex code with +// go1.25 added API WaitGroup.Go. +// +// Patterns: +// +// 1. wg.Add(1); go func() { defer wg.Done(); ... }() +// => +// wg.Go(go func() { ... }) +// +// 2. wg.Add(1); go func() { ...; wg.Done() }() +// => +// wg.Go(go func() { ... }) +// +// The wg.Done must occur within the first statement of the block in a defer format or last statement of the block, +// and the offered fix only removes the first/last wg.Done call. It doesn't fix the existing wrong usage of sync.WaitGroup. +func waitgroup(pass *analysis.Pass) { + var ( + inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + info = pass.TypesInfo + syncWaitGroup = index.Object("sync", "WaitGroup") + syncWaitGroupAdd = index.Selection("sync", "WaitGroup", "Add") + syncWaitGroupDone = index.Selection("sync", "WaitGroup", "Done") + ) + if !index.Used(syncWaitGroup, syncWaitGroupAdd, syncWaitGroupDone) { + return + } + + checkWaitGroup := func(file *ast.File, curGostmt cursor.Cursor) { + gostmt := curGostmt.Node().(*ast.GoStmt) + + lit, ok := gostmt.Call.Fun.(*ast.FuncLit) + // go statement must have a no-arg function literal. + if !ok || len(gostmt.Call.Args) != 0 { + return + } + + // previous node must call wg.Add. + prev, ok := curGostmt.PrevSibling() + if !ok { + return + } + prevNode := prev.Node() + if !is[*ast.ExprStmt](prevNode) || !is[*ast.CallExpr](prevNode.(*ast.ExprStmt).X) { + return + } + + prevCall := prevNode.(*ast.ExprStmt).X.(*ast.CallExpr) + if typeutil.Callee(info, prevCall) != syncWaitGroupAdd || !isIntLiteral(info, prevCall.Args[0], 1) { + return + } + + addCallRecv := ast.Unparen(prevCall.Fun).(*ast.SelectorExpr).X + list := lit.Body.List + if len(list) == 0 { + return + } + + var doneStmt ast.Stmt + if deferStmt, ok := list[0].(*ast.DeferStmt); ok && + typeutil.Callee(info, deferStmt.Call) == syncWaitGroupDone && + equalSyntax(ast.Unparen(deferStmt.Call.Fun).(*ast.SelectorExpr).X, addCallRecv) { + // wg.Add(1); go func() { defer wg.Done(); ... }() + // --------- ------ --------------- - + // wg.Go(func() { ... } ) + doneStmt = deferStmt + } else if lastStmt, ok := list[len(list)-1].(*ast.ExprStmt); ok { + if doneCall, ok := lastStmt.X.(*ast.CallExpr); ok && + typeutil.Callee(info, doneCall) == syncWaitGroupDone && + equalSyntax(ast.Unparen(doneCall.Fun).(*ast.SelectorExpr).X, addCallRecv) { + // wg.Add(1); go func() { ... ;wg.Done();}() + // --------- ------ ---------- - + // wg.Go(func() { ... } ) + doneStmt = lastStmt + } + } + if doneStmt != nil { + pass.Report(analysis.Diagnostic{ + Pos: prevNode.Pos(), + End: gostmt.End(), + Category: "waitgroup", + Message: "Goroutine creation can be simplified using WaitGroup.Go", + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Simplify by using WaitGroup.Go", + TextEdits: slices.Concat( + analysisinternal.DeleteStmt(pass.Fset, file, prevNode.(*ast.ExprStmt), nil), + analysisinternal.DeleteStmt(pass.Fset, file, doneStmt, nil), + []analysis.TextEdit{ + { + Pos: gostmt.Pos(), + End: gostmt.Call.Pos(), + NewText: fmt.Appendf(nil, "%s.Go(", addCallRecv), + }, + { + Pos: gostmt.Call.Lparen, + End: gostmt.Call.Rparen, + }, + }, + ), + }}, + }) + } + } + + for curFile := range filesUsing(inspect, info, "go1.25") { + for curGostmt := range curFile.Preorder((*ast.GoStmt)(nil)) { + checkWaitGroup(curFile.Node().(*ast.File), curGostmt) + } + } +} diff --git a/gopls/internal/doc/api.json b/gopls/internal/doc/api.json index f731e0d7984..9dc7aef266d 100644 --- a/gopls/internal/doc/api.json +++ b/gopls/internal/doc/api.json @@ -562,7 +562,7 @@ }, { "Name": "\"modernize\"", - "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go and its standard\nlibrary.\n\nEach diagnostic provides a fix. Our intent is that these fixes may\nbe safely applied en masse without changing the behavior of your\nprogram. In some cases the suggested fixes are imperfect and may\nlead to (for example) unused imports or unused local variables,\ncausing build breakage. However, these problems are generally\ntrivial to fix. We regard any modernizer whose fix changes program\nbehavior to have a serious bug and will endeavor to fix it.\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./...\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.\n\nChanges produced by this tool should be reviewed as usual before\nbeing merged. In some cases, a loop may be replaced by a simple\nfunction call, causing comments within the loop to be discarded.\nHuman judgment may be required to avoid losing comments of value.\n\nEach diagnostic reported by modernize has a specific category. (The\ncategories are listed below.) Diagnostics in some categories, such\nas \"efaceany\" (which replaces \"interface{}\" with \"any\" where it is\nsafe to do so) are particularly numerous. It may ease the burden of\ncode review to apply fixes in two passes, the first change\nconsisting only of fixes of category \"efaceany\", the second\nconsisting of all others. This can be achieved using the -category flag:\n\n\t$ modernize -category=efaceany -fix -test ./...\n\t$ modernize -category=-efaceany -fix -test ./...\n\nCategories of modernize diagnostic:\n\n - minmax: replace an if/else conditional assignment by a call to\n the built-in min or max functions added in go1.21.\n\n - sortslice: replace sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21.\n\n - efaceany: replace interface{} by the 'any' type added in go1.18.\n\n - slicesclone: replace append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21.\n\n - mapsloop: replace a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions from\n the maps package, added in go1.21.\n\n - fmtappendf: replace []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19.\n\n - testingcontext: replace uses of context.WithCancel in tests\n with t.Context, added in go1.24.\n\n - omitzero: replace omitempty by omitzero on structs, added in go1.24.\n\n - bloop: replace \"for i := range b.N\" or \"for range b.N\" in a\n benchmark with \"for b.Loop()\", and remove any preceding calls\n to b.StopTimer, b.StartTimer, and b.ResetTimer.\n\n - slicesdelete: replace append(s[:i], s[i+1]...) by\n slices.Delete(s, i, i+1), added in go1.21.\n\n - rangeint: replace a 3-clause \"for i := 0; i \u003c n; i++\" loop by\n \"for i := range n\", added in go1.22.\n\n - stringseq: replace Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq.\n\n - stringscutprefix: replace some uses of HasPrefix followed by TrimPrefix with CutPrefix,\n added to the strings package in go1.20.", + "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go and its standard\nlibrary.\n\nEach diagnostic provides a fix. Our intent is that these fixes may\nbe safely applied en masse without changing the behavior of your\nprogram. In some cases the suggested fixes are imperfect and may\nlead to (for example) unused imports or unused local variables,\ncausing build breakage. However, these problems are generally\ntrivial to fix. We regard any modernizer whose fix changes program\nbehavior to have a serious bug and will endeavor to fix it.\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./...\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.\n\nChanges produced by this tool should be reviewed as usual before\nbeing merged. In some cases, a loop may be replaced by a simple\nfunction call, causing comments within the loop to be discarded.\nHuman judgment may be required to avoid losing comments of value.\n\nEach diagnostic reported by modernize has a specific category. (The\ncategories are listed below.) Diagnostics in some categories, such\nas \"efaceany\" (which replaces \"interface{}\" with \"any\" where it is\nsafe to do so) are particularly numerous. It may ease the burden of\ncode review to apply fixes in two passes, the first change\nconsisting only of fixes of category \"efaceany\", the second\nconsisting of all others. This can be achieved using the -category flag:\n\n\t$ modernize -category=efaceany -fix -test ./...\n\t$ modernize -category=-efaceany -fix -test ./...\n\nCategories of modernize diagnostic:\n\n - minmax: replace an if/else conditional assignment by a call to\n the built-in min or max functions added in go1.21.\n\n - sortslice: replace sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21.\n\n - efaceany: replace interface{} by the 'any' type added in go1.18.\n\n - slicesclone: replace append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21.\n\n - mapsloop: replace a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions from\n the maps package, added in go1.21.\n\n - fmtappendf: replace []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19.\n\n - testingcontext: replace uses of context.WithCancel in tests\n with t.Context, added in go1.24.\n\n - omitzero: replace omitempty by omitzero on structs, added in go1.24.\n\n - bloop: replace \"for i := range b.N\" or \"for range b.N\" in a\n benchmark with \"for b.Loop()\", and remove any preceding calls\n to b.StopTimer, b.StartTimer, and b.ResetTimer.\n\n - slicesdelete: replace append(s[:i], s[i+1]...) by\n slices.Delete(s, i, i+1), added in go1.21.\n\n - rangeint: replace a 3-clause \"for i := 0; i \u003c n; i++\" loop by\n \"for i := range n\", added in go1.22.\n\n - stringseq: replace Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq.\n\n - stringscutprefix: replace some uses of HasPrefix followed by TrimPrefix with CutPrefix,\n added to the strings package in go1.20.\n\n - waitgroup: replace old complex usages of sync.WaitGroup by less complex WaitGroup.Go method in go1.25.", "Default": "true", "Status": "" }, @@ -1338,7 +1338,7 @@ }, { "Name": "modernize", - "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go and its standard\nlibrary.\n\nEach diagnostic provides a fix. Our intent is that these fixes may\nbe safely applied en masse without changing the behavior of your\nprogram. In some cases the suggested fixes are imperfect and may\nlead to (for example) unused imports or unused local variables,\ncausing build breakage. However, these problems are generally\ntrivial to fix. We regard any modernizer whose fix changes program\nbehavior to have a serious bug and will endeavor to fix it.\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./...\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.\n\nChanges produced by this tool should be reviewed as usual before\nbeing merged. In some cases, a loop may be replaced by a simple\nfunction call, causing comments within the loop to be discarded.\nHuman judgment may be required to avoid losing comments of value.\n\nEach diagnostic reported by modernize has a specific category. (The\ncategories are listed below.) Diagnostics in some categories, such\nas \"efaceany\" (which replaces \"interface{}\" with \"any\" where it is\nsafe to do so) are particularly numerous. It may ease the burden of\ncode review to apply fixes in two passes, the first change\nconsisting only of fixes of category \"efaceany\", the second\nconsisting of all others. This can be achieved using the -category flag:\n\n\t$ modernize -category=efaceany -fix -test ./...\n\t$ modernize -category=-efaceany -fix -test ./...\n\nCategories of modernize diagnostic:\n\n - minmax: replace an if/else conditional assignment by a call to\n the built-in min or max functions added in go1.21.\n\n - sortslice: replace sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21.\n\n - efaceany: replace interface{} by the 'any' type added in go1.18.\n\n - slicesclone: replace append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21.\n\n - mapsloop: replace a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions from\n the maps package, added in go1.21.\n\n - fmtappendf: replace []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19.\n\n - testingcontext: replace uses of context.WithCancel in tests\n with t.Context, added in go1.24.\n\n - omitzero: replace omitempty by omitzero on structs, added in go1.24.\n\n - bloop: replace \"for i := range b.N\" or \"for range b.N\" in a\n benchmark with \"for b.Loop()\", and remove any preceding calls\n to b.StopTimer, b.StartTimer, and b.ResetTimer.\n\n - slicesdelete: replace append(s[:i], s[i+1]...) by\n slices.Delete(s, i, i+1), added in go1.21.\n\n - rangeint: replace a 3-clause \"for i := 0; i \u003c n; i++\" loop by\n \"for i := range n\", added in go1.22.\n\n - stringseq: replace Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq.\n\n - stringscutprefix: replace some uses of HasPrefix followed by TrimPrefix with CutPrefix,\n added to the strings package in go1.20.", + "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go and its standard\nlibrary.\n\nEach diagnostic provides a fix. Our intent is that these fixes may\nbe safely applied en masse without changing the behavior of your\nprogram. In some cases the suggested fixes are imperfect and may\nlead to (for example) unused imports or unused local variables,\ncausing build breakage. However, these problems are generally\ntrivial to fix. We regard any modernizer whose fix changes program\nbehavior to have a serious bug and will endeavor to fix it.\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./...\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.\n\nChanges produced by this tool should be reviewed as usual before\nbeing merged. In some cases, a loop may be replaced by a simple\nfunction call, causing comments within the loop to be discarded.\nHuman judgment may be required to avoid losing comments of value.\n\nEach diagnostic reported by modernize has a specific category. (The\ncategories are listed below.) Diagnostics in some categories, such\nas \"efaceany\" (which replaces \"interface{}\" with \"any\" where it is\nsafe to do so) are particularly numerous. It may ease the burden of\ncode review to apply fixes in two passes, the first change\nconsisting only of fixes of category \"efaceany\", the second\nconsisting of all others. This can be achieved using the -category flag:\n\n\t$ modernize -category=efaceany -fix -test ./...\n\t$ modernize -category=-efaceany -fix -test ./...\n\nCategories of modernize diagnostic:\n\n - minmax: replace an if/else conditional assignment by a call to\n the built-in min or max functions added in go1.21.\n\n - sortslice: replace sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21.\n\n - efaceany: replace interface{} by the 'any' type added in go1.18.\n\n - slicesclone: replace append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21.\n\n - mapsloop: replace a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions from\n the maps package, added in go1.21.\n\n - fmtappendf: replace []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19.\n\n - testingcontext: replace uses of context.WithCancel in tests\n with t.Context, added in go1.24.\n\n - omitzero: replace omitempty by omitzero on structs, added in go1.24.\n\n - bloop: replace \"for i := range b.N\" or \"for range b.N\" in a\n benchmark with \"for b.Loop()\", and remove any preceding calls\n to b.StopTimer, b.StartTimer, and b.ResetTimer.\n\n - slicesdelete: replace append(s[:i], s[i+1]...) by\n slices.Delete(s, i, i+1), added in go1.21.\n\n - rangeint: replace a 3-clause \"for i := 0; i \u003c n; i++\" loop by\n \"for i := range n\", added in go1.22.\n\n - stringseq: replace Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq.\n\n - stringscutprefix: replace some uses of HasPrefix followed by TrimPrefix with CutPrefix,\n added to the strings package in go1.20.\n\n - waitgroup: replace old complex usages of sync.WaitGroup by less complex WaitGroup.Go method in go1.25.", "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/modernize", "Default": true }, From ead1fea4b0600fc954c44ea316557ef6eb0c6b72 Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Tue, 1 Apr 2025 23:09:43 -0600 Subject: [PATCH 160/270] internal/analysis/modernize: add nil check before comparing with index object This CL introduces an additional nil check before comparing with the output of index.Object to prevent false-positive matches and avoid runtime errors. Running modernize on go/analysis/passes/buildtag/buildtag.go could reproduce the error. This issue occurs because one of bytesTrimPrefix and stringsTrimPrefix is nil and obj1 is also nil, leading to a false postive match and a runtime error "index out of range". This CL also separates the test cases to import either strings or bytes at a time, which helps prevent similar issues in the future. Change-Id: Iafbd38a55a0a2e0c39a2a418cbd571c67dbe50f0 Reviewed-on: https://go-review.googlesource.com/c/tools/+/661995 Auto-Submit: Alan Donovan Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley --- .../analysis/modernize/modernize_test.go | 1 + .../analysis/modernize/stringscutprefix.go | 6 ++- .../bytescutprefix/bytescutprefix.go | 16 ++++++++ .../bytescutprefix/bytescutprefix.go.golden | 16 ++++++++ .../bytescutprefix_dot.go | 4 +- .../bytescutprefix_dot.go.golden | 4 +- .../src/stringscutprefix/stringscutprefix.go | 12 +----- .../stringscutprefix.go.golden | 40 +++++++------------ .../stringscutprefix_dot.go.golden | 2 +- 9 files changed, 60 insertions(+), 41 deletions(-) create mode 100644 gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix/bytescutprefix.go create mode 100644 gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix/bytescutprefix.go.golden rename gopls/internal/analysis/modernize/testdata/src/stringscutprefix/{ => bytescutprefix}/bytescutprefix_dot.go (81%) rename gopls/internal/analysis/modernize/testdata/src/stringscutprefix/{ => bytescutprefix}/bytescutprefix_dot.go.golden (81%) diff --git a/gopls/internal/analysis/modernize/modernize_test.go b/gopls/internal/analysis/modernize/modernize_test.go index e9f91f2262c..e823e983995 100644 --- a/gopls/internal/analysis/modernize/modernize_test.go +++ b/gopls/internal/analysis/modernize/modernize_test.go @@ -25,6 +25,7 @@ func Test(t *testing.T) { "slicescontains", "slicesdelete", "stringscutprefix", + "stringscutprefix/bytescutprefix", "splitseq", "fieldsseq", "sortslice", diff --git a/gopls/internal/analysis/modernize/stringscutprefix.go b/gopls/internal/analysis/modernize/stringscutprefix.go index 9e9239c0f21..cd053539910 100644 --- a/gopls/internal/analysis/modernize/stringscutprefix.go +++ b/gopls/internal/analysis/modernize/stringscutprefix.go @@ -72,10 +72,12 @@ func stringscutprefix(pass *analysis.Pass) { for curCall := range firstStmt.Preorder((*ast.CallExpr)(nil)) { call1 := curCall.Node().(*ast.CallExpr) obj1 := typeutil.Callee(info, call1) - if obj1 != stringsTrimPrefix && obj1 != bytesTrimPrefix { + // bytesTrimPrefix or stringsTrimPrefix might be nil if the file doesn't import it, + // so we need to ensure the obj1 is not nil otherwise the call1 is not TrimPrefix and cause a panic. + if obj1 == nil || + obj1 != stringsTrimPrefix && obj1 != bytesTrimPrefix { continue } - // Have: if strings.HasPrefix(s0, pre0) { ...strings.TrimPrefix(s, pre)... } var ( s0 = call.Args[0] diff --git a/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix/bytescutprefix.go b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix/bytescutprefix.go new file mode 100644 index 00000000000..7c5363e6c8d --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix/bytescutprefix.go @@ -0,0 +1,16 @@ +package bytescutprefix + +import ( + "bytes" +) + +func _() { + if bytes.HasPrefix(bss, bspre) { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a := bytes.TrimPrefix(bss, bspre) + _ = a + } + if bytes.HasPrefix([]byte(""), []byte("")) { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a := bytes.TrimPrefix([]byte(""), []byte("")) + _ = a + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix/bytescutprefix.go.golden b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix/bytescutprefix.go.golden new file mode 100644 index 00000000000..8d41a8bf343 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix/bytescutprefix.go.golden @@ -0,0 +1,16 @@ +package bytescutprefix + +import ( + "bytes" +) + +func _() { + if after, ok := bytes.CutPrefix(bss, bspre); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a := after + _ = a + } + if after, ok := bytes.CutPrefix([]byte(""), []byte("")); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a := after + _ = a + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix_dot.go b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix/bytescutprefix_dot.go similarity index 81% rename from gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix_dot.go rename to gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix/bytescutprefix_dot.go index 4da9ed52e13..bfde6b7a461 100644 --- a/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix_dot.go +++ b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix/bytescutprefix_dot.go @@ -1,9 +1,11 @@ -package stringscutprefix +package bytescutprefix import ( . "bytes" ) +var bss, bspre []byte + // test supported cases of pattern 1 func _() { if HasPrefix(bss, bspre) { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" diff --git a/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix_dot.go.golden b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix/bytescutprefix_dot.go.golden similarity index 81% rename from gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix_dot.go.golden rename to gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix/bytescutprefix_dot.go.golden index 054214cabf1..8eb562e7940 100644 --- a/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix_dot.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix/bytescutprefix_dot.go.golden @@ -1,9 +1,11 @@ -package stringscutprefix +package bytescutprefix import ( . "bytes" ) +var bss, bspre []byte + // test supported cases of pattern 1 func _() { if after, ok := CutPrefix(bss, bspre); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" diff --git a/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix.go b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix.go index f5f890f4171..7679bdb6e67 100644 --- a/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix.go +++ b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix.go @@ -1,13 +1,11 @@ package stringscutprefix import ( - "bytes" "strings" ) var ( - s, pre string - bss, bspre []byte + s, pre string ) // test supported cases of pattern 1 @@ -34,14 +32,6 @@ func _() { _, _ = a, b } - if bytes.HasPrefix(bss, bspre) { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" - a := bytes.TrimPrefix(bss, bspre) - _ = a - } - if bytes.HasPrefix([]byte(""), []byte("")) { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" - a := bytes.TrimPrefix([]byte(""), []byte("")) - _ = a - } var a, b string if strings.HasPrefix(s, "") { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" a, b = "", strings.TrimPrefix(s, "") diff --git a/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix.go.golden b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix.go.golden index d8b7b2ba47f..a6c52b08802 100644 --- a/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix.go.golden @@ -1,27 +1,25 @@ package stringscutprefix import ( - "bytes" "strings" ) var ( - s, pre string - bss, bspre []byte + s, pre string ) // test supported cases of pattern 1 func _() { if after, ok := strings.CutPrefix(s, pre); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" - a := after + a := after _ = a } if after, ok := strings.CutPrefix("", ""); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" - a := after + a := after _ = a } if after, ok := strings.CutPrefix(s, ""); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" - println([]byte(after)) + println([]byte(after)) } if after, ok := strings.CutPrefix(s, ""); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" a, b := "", after @@ -34,19 +32,11 @@ func _() { _, _ = a, b } - if after, ok := bytes.CutPrefix(bss, bspre); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" - a := after - _ = a - } - if after, ok := bytes.CutPrefix([]byte(""), []byte("")); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" - a := after - _ = a - } - var a, b string - if after, ok := strings.CutPrefix(s, ""); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" - a, b = "", after - _, _ = a, b - } + var a, b string + if after, ok := strings.CutPrefix(s, ""); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a, b = "", after + _, _ = a, b + } } // test cases that are not supported by pattern1 @@ -81,12 +71,12 @@ func _() { if after, ok := strings.CutPrefix(s, pre); ok { // want "TrimPrefix can be simplified to CutPrefix" println(after) } - if after, ok := strings.CutPrefix(s, pre); ok { // want "TrimPrefix can be simplified to CutPrefix" - println(strings.TrimPrefix(s, pre)) // noop here - } - if after, ok := strings.CutPrefix(s, ""); ok { // want "TrimPrefix can be simplified to CutPrefix" - println(after) - } + if after, ok := strings.CutPrefix(s, pre); ok { // want "TrimPrefix can be simplified to CutPrefix" + println(strings.TrimPrefix(s, pre)) // noop here + } + if after, ok := strings.CutPrefix(s, ""); ok { // want "TrimPrefix can be simplified to CutPrefix" + println(after) + } var ok bool // define an ok variable to test the fix won't shadow it for its if stmt body _ = ok if after, ok0 := strings.CutPrefix(s, pre); ok0 { // want "TrimPrefix can be simplified to CutPrefix" diff --git a/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix_dot.go.golden b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix_dot.go.golden index b5f97b3695a..50e3b6ff0ca 100644 --- a/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix_dot.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix_dot.go.golden @@ -15,7 +15,7 @@ func _() { // test supported cases of pattern2 func _() { if after, ok := CutPrefix(s, pre); ok { // want "TrimPrefix can be simplified to CutPrefix" - println(after) + println(after) } if after, ok := CutPrefix(s, pre); ok { // want "TrimPrefix can be simplified to CutPrefix" println(after) From 255cfd76c54799184664fbf1e87d643aec61c429 Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Tue, 1 Apr 2025 00:58:23 -0600 Subject: [PATCH 161/270] gopls: automatically insert package clause for new go files This CL introduces a new feature in gopls to handle didCreateFiles requests from the client. When a new Go file is created, gopls will automatically insert the appropriate package clause at the beginning of the file, streamlining the file initialization process. Updates golang/go#72930 Change-Id: I72277294764300bc81f6c8d17ce54b7ed2cc55eb Reviewed-on: https://go-review.googlesource.com/c/tools/+/659595 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan Auto-Submit: Alan Donovan Reviewed-by: Robert Findley --- gopls/doc/release/v0.19.0.md | 7 + gopls/internal/cmd/cmd.go | 14 +- gopls/internal/golang/addtest.go | 2 +- gopls/internal/golang/completion/newfile.go | 65 ++++++++ gopls/internal/golang/completion/package.go | 19 +++ gopls/internal/golang/extracttofile.go | 2 +- gopls/internal/golang/util.go | 4 +- gopls/internal/server/general.go | 9 ++ gopls/internal/server/unimplemented.go | 4 - gopls/internal/server/workspace.go | 30 ++++ .../internal/test/integration/fake/editor.go | 13 ++ .../workspace/didcreatefiles_test.go | 146 ++++++++++++++++++ gopls/internal/test/integration/wrappers.go | 8 + 13 files changed, 310 insertions(+), 13 deletions(-) create mode 100644 gopls/internal/golang/completion/newfile.go create mode 100644 gopls/internal/test/integration/workspace/didcreatefiles_test.go diff --git a/gopls/doc/release/v0.19.0.md b/gopls/doc/release/v0.19.0.md index 149a474244a..f6208417ebc 100644 --- a/gopls/doc/release/v0.19.0.md +++ b/gopls/doc/release/v0.19.0.md @@ -39,3 +39,10 @@ TODO: implement global. This code action, available on a dotted import, will offer to replace the import with a regular one and qualify each use of the package with its name. + +### Auto-complete package clause for new Go files + +Gopls now automatically adds the appropriate `package` clause to newly created Go files, +so that you can immediately get started writing the interesting part. + +It requires client support for `workspace/didCreateFiles` \ No newline at end of file diff --git a/gopls/internal/cmd/cmd.go b/gopls/internal/cmd/cmd.go index 4a00afc4115..fed96388fb4 100644 --- a/gopls/internal/cmd/cmd.go +++ b/gopls/internal/cmd/cmd.go @@ -343,7 +343,8 @@ func (c *connection) initialize(ctx context.Context, options func(*settings.Opti // Make sure to respect configured options when sending initialize request. opts := settings.DefaultOptions(options) - // If you add an additional option here, you must update the map key in connect. + // If you add an additional option here, + // you must update the map key of settings.DefaultOptions called in (*Application).connect. params.Capabilities.TextDocument.Hover = &protocol.HoverClientCapabilities{ ContentFormat: []protocol.MarkupKind{opts.PreferredContentFormat}, } @@ -351,7 +352,7 @@ func (c *connection) initialize(ctx context.Context, options func(*settings.Opti params.Capabilities.TextDocument.SemanticTokens = protocol.SemanticTokensClientCapabilities{} params.Capabilities.TextDocument.SemanticTokens.Formats = []protocol.TokenFormat{"relative"} params.Capabilities.TextDocument.SemanticTokens.Requests.Range = &protocol.Or_ClientSemanticTokensRequestOptions_range{Value: true} - //params.Capabilities.TextDocument.SemanticTokens.Requests.Range.Value = true + // params.Capabilities.TextDocument.SemanticTokens.Requests.Range.Value = true params.Capabilities.TextDocument.SemanticTokens.Requests.Full = &protocol.Or_ClientSemanticTokensRequestOptions_full{Value: true} params.Capabilities.TextDocument.SemanticTokens.TokenTypes = moreslices.ConvertStrings[string](semtok.TokenTypes) params.Capabilities.TextDocument.SemanticTokens.TokenModifiers = moreslices.ConvertStrings[string](semtok.TokenModifiers) @@ -363,6 +364,9 @@ func (c *connection) initialize(ctx context.Context, options func(*settings.Opti }, } params.Capabilities.Window.WorkDoneProgress = true + params.Capabilities.Workspace.FileOperations = &protocol.FileOperationClientCapabilities{ + DidCreate: true, + } params.InitializationOptions = map[string]any{ "symbolMatcher": string(opts.SymbolMatcher), @@ -817,10 +821,10 @@ func (c *connection) diagnoseFiles(ctx context.Context, files []protocol.Documen } func (c *connection) terminate(ctx context.Context) { - //TODO: do we need to handle errors on these calls? + // TODO: do we need to handle errors on these calls? c.Shutdown(ctx) - //TODO: right now calling exit terminates the process, we should rethink that - //server.Exit(ctx) + // TODO: right now calling exit terminates the process, we should rethink that + // server.Exit(ctx) } // Implement io.Closer. diff --git a/gopls/internal/golang/addtest.go b/gopls/internal/golang/addtest.go index 4a43a82ffee..e952874e109 100644 --- a/gopls/internal/golang/addtest.go +++ b/gopls/internal/golang/addtest.go @@ -319,7 +319,7 @@ func AddTestForFunc(ctx context.Context, snapshot *cache.Snapshot, loc protocol. // package decl based on the originating file. // Search for something that looks like a copyright header, to replicate // in the new file. - if c := copyrightComment(pgf.File); c != nil { + if c := CopyrightComment(pgf.File); c != nil { start, end, err := pgf.NodeOffsets(c) if err != nil { return nil, err diff --git a/gopls/internal/golang/completion/newfile.go b/gopls/internal/golang/completion/newfile.go new file mode 100644 index 00000000000..d9869a2f050 --- /dev/null +++ b/gopls/internal/golang/completion/newfile.go @@ -0,0 +1,65 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package completion + +import ( + "bytes" + "context" + "fmt" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/protocol" +) + +// NewFile returns a document change to complete an empty go file. +func NewFile(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) (*protocol.DocumentChange, error) { + if bs, err := fh.Content(); err != nil || len(bs) != 0 { + return nil, err + } + meta, err := golang.NarrowestMetadataForFile(ctx, snapshot, fh.URI()) + if err != nil { + return nil, err + } + var buf bytes.Buffer + // Copy the copyright header from the first existing file that has one. + for _, fileURI := range meta.GoFiles { + if fileURI == fh.URI() { + continue + } + fh, err := snapshot.ReadFile(ctx, fileURI) + if err != nil { + continue + } + pgf, err := snapshot.ParseGo(ctx, fh, parsego.Header) + if err != nil { + continue + } + if group := golang.CopyrightComment(pgf.File); group != nil { + start, end, err := pgf.NodeOffsets(group) + if err != nil { + continue + } + buf.Write(pgf.Src[start:end]) + buf.WriteString("\n\n") + break + } + } + + pkgName, err := bestPackage(ctx, snapshot, fh.URI()) + if err != nil { + return nil, err + } + + fmt.Fprintf(&buf, "package %s\n", pkgName) + change := protocol.DocumentChangeEdit(fh, []protocol.TextEdit{{ + Range: protocol.Range{}, // insert at start of file + NewText: buf.String(), + }}) + + return &change, nil +} diff --git a/gopls/internal/golang/completion/package.go b/gopls/internal/golang/completion/package.go index 01d5622c7f7..d1698ee6580 100644 --- a/gopls/internal/golang/completion/package.go +++ b/gopls/internal/golang/completion/package.go @@ -15,6 +15,7 @@ import ( "go/token" "go/types" "path/filepath" + "sort" "strings" "unicode" @@ -27,6 +28,24 @@ import ( "golang.org/x/tools/gopls/internal/util/safetoken" ) +// bestPackage offers the best package name for a package declaration when +// one is not present in the given file. +func bestPackage(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI) (string, error) { + suggestions, err := packageSuggestions(ctx, snapshot, uri, "") + if err != nil { + return "", err + } + // sort with the same way of sortItems. + sort.SliceStable(suggestions, func(i, j int) bool { + if suggestions[i].score != suggestions[j].score { + return suggestions[i].score > suggestions[j].score + } + return suggestions[i].name < suggestions[j].name + }) + + return suggestions[0].name, nil +} + // packageClauseCompletions offers completions for a package declaration when // one is not present in the given file. func packageClauseCompletions(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) ([]CompletionItem, *Selection, error) { diff --git a/gopls/internal/golang/extracttofile.go b/gopls/internal/golang/extracttofile.go index 39fb28e624b..d3026d4ee0f 100644 --- a/gopls/internal/golang/extracttofile.go +++ b/gopls/internal/golang/extracttofile.go @@ -138,7 +138,7 @@ func ExtractToNewFile(ctx context.Context, snapshot *cache.Snapshot, fh file.Han } var buf bytes.Buffer - if c := copyrightComment(pgf.File); c != nil { + if c := CopyrightComment(pgf.File); c != nil { start, end, err := pgf.NodeOffsets(c) if err != nil { return nil, err diff --git a/gopls/internal/golang/util.go b/gopls/internal/golang/util.go index a81ff3fbe58..b13056e02b9 100644 --- a/gopls/internal/golang/util.go +++ b/gopls/internal/golang/util.go @@ -361,9 +361,9 @@ func AbbreviateVarName(s string) string { return b.String() } -// copyrightComment returns the copyright comment group from the input file, or +// CopyrightComment returns the copyright comment group from the input file, or // nil if not found. -func copyrightComment(file *ast.File) *ast.CommentGroup { +func CopyrightComment(file *ast.File) *ast.CommentGroup { if len(file.Comments) == 0 { return nil } diff --git a/gopls/internal/server/general.go b/gopls/internal/server/general.go index b7b69931103..7368206f578 100644 --- a/gopls/internal/server/general.go +++ b/gopls/internal/server/general.go @@ -189,6 +189,15 @@ func (s *server) Initialize(ctx context.Context, params *protocol.ParamInitializ Supported: true, ChangeNotifications: "workspace/didChangeWorkspaceFolders", }, + FileOperations: &protocol.FileOperationOptions{ + DidCreate: &protocol.FileOperationRegistrationOptions{ + Filters: []protocol.FileOperationFilter{{ + Scheme: "file", + // gopls is only interested with files in .go extension. + Pattern: protocol.FileOperationPattern{Glob: "**/*.go"}, + }}, + }, + }, }, }, ServerInfo: &protocol.ServerInfo{ diff --git a/gopls/internal/server/unimplemented.go b/gopls/internal/server/unimplemented.go index 7375dc4bb1b..d3bb07cb647 100644 --- a/gopls/internal/server/unimplemented.go +++ b/gopls/internal/server/unimplemented.go @@ -34,10 +34,6 @@ func (s *server) DidCloseNotebookDocument(context.Context, *protocol.DidCloseNot return notImplemented("DidCloseNotebookDocument") } -func (s *server) DidCreateFiles(context.Context, *protocol.CreateFilesParams) error { - return notImplemented("DidCreateFiles") -} - func (s *server) DidDeleteFiles(context.Context, *protocol.DeleteFilesParams) error { return notImplemented("DidDeleteFiles") } diff --git a/gopls/internal/server/workspace.go b/gopls/internal/server/workspace.go index 84e663c1049..8074ecca444 100644 --- a/gopls/internal/server/workspace.go +++ b/gopls/internal/server/workspace.go @@ -12,6 +12,8 @@ import ( "sync" "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/golang/completion" "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/gopls/internal/settings" "golang.org/x/tools/internal/event" @@ -139,3 +141,31 @@ func (s *server) DidChangeConfiguration(ctx context.Context, _ *protocol.DidChan return nil } + +func (s *server) DidCreateFiles(ctx context.Context, params *protocol.CreateFilesParams) error { + ctx, done := event.Start(ctx, "lsp.Server.didCreateFiles") + defer done() + + var allChanges []protocol.DocumentChange + for _, createdFile := range params.Files { + uri := protocol.DocumentURI(createdFile.URI) + fh, snapshot, release, err := s.fileOf(ctx, uri) + if err != nil { + event.Error(ctx, "fail to call fileOf", err) + continue + } + defer release() + + switch snapshot.FileKind(fh) { + case file.Go: + change, err := completion.NewFile(ctx, snapshot, fh) + if err != nil { + continue + } + allChanges = append(allChanges, *change) + default: + } + } + + return applyChanges(ctx, s.client, allChanges) +} diff --git a/gopls/internal/test/integration/fake/editor.go b/gopls/internal/test/integration/fake/editor.go index 170a9823cad..01f3de8aba9 100644 --- a/gopls/internal/test/integration/fake/editor.go +++ b/gopls/internal/test/integration/fake/editor.go @@ -1309,6 +1309,19 @@ func (e *Editor) Completion(ctx context.Context, loc protocol.Location) (*protoc return completions, nil } +func (e *Editor) DidCreateFiles(ctx context.Context, files ...protocol.DocumentURI) error { + if e.Server == nil { + return nil + } + params := &protocol.CreateFilesParams{} + for _, file := range files { + params.Files = append(params.Files, protocol.FileCreate{ + URI: string(file), + }) + } + return e.Server.DidCreateFiles(ctx, params) +} + func (e *Editor) SetSuggestionInsertReplaceMode(_ context.Context, useReplaceMode bool) { e.mu.Lock() defer e.mu.Unlock() diff --git a/gopls/internal/test/integration/workspace/didcreatefiles_test.go b/gopls/internal/test/integration/workspace/didcreatefiles_test.go new file mode 100644 index 00000000000..cba0daf472e --- /dev/null +++ b/gopls/internal/test/integration/workspace/didcreatefiles_test.go @@ -0,0 +1,146 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package workspace + +import ( + "context" + "fmt" + "testing" + + . "golang.org/x/tools/gopls/internal/test/integration" +) + +// TestAutoFillPackageDecl tests that creation of a new .go file causes +// gopls to choose a sensible package name and fill in the package declaration. +func TestAutoFillPackageDecl(t *testing.T) { + const existFiles = ` +-- go.mod -- +module mod.com + +go 1.12 + +-- dog/a_test.go -- +package dog +-- fruits/apple.go -- +package apple + +fun apple() int { + return 0 +} + +-- license/license.go -- +/* Copyright 2025 The Go Authors. All rights reserved. +Use of this source code is governed by a BSD-style +license that can be found in the LICENSE file. */ + +package license + +-- license1/license.go -- +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package license1 + +-- cmd/main.go -- +package main + +-- integration/a_test.go -- +package integration_test + +-- nopkg/testfile.go -- +package +` + for _, tc := range []struct { + name string + newfile string + want string + }{ + { + name: "new file in folder with a_test.go", + newfile: "dog/newfile.go", + want: "package dog\n", + }, + { + name: "new file in folder with go file", + newfile: "fruits/newfile.go", + want: "package apple\n", + }, + { + name: "new test file in folder with go file", + newfile: "fruits/newfile_test.go", + want: "package apple\n", + }, + { + name: "new file in folder with go file that contains license comment", + newfile: "license/newfile.go", + want: `/* Copyright 2025 The Go Authors. All rights reserved. +Use of this source code is governed by a BSD-style +license that can be found in the LICENSE file. */ + +package license +`, + }, + { + name: "new file in folder with go file that contains license comment", + newfile: "license1/newfile.go", + want: `// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package license1 +`, + }, + { + name: "new file in folder with main package", + newfile: "cmd/newfile.go", + want: "package main\n", + }, + { + name: "new file in empty folder", + newfile: "empty_folder/newfile.go", + want: "package emptyfolder\n", + }, + { + name: "new file in folder with integration_test package", + newfile: "integration/newfile.go", + want: "package integration\n", + }, + { + name: "new test file in folder with integration_test package", + newfile: "integration/newfile_test.go", + want: "package integration\n", + }, + { + name: "new file in folder with incomplete package clause", + newfile: "incomplete/newfile.go", + want: "package incomplete\n", + }, + { + name: "package completion for dir name with punctuation", + newfile: "123f_r.u~its-123/newfile.go", + want: "package fruits123\n", + }, + { + name: "package completion for dir name with invalid dir name", + newfile: "123f_r.u~its-123/newfile.go", + want: "package fruits123\n", + }, + } { + t.Run(tc.name, func(t *testing.T) { + createFiles := fmt.Sprintf("%s\n-- %s --", existFiles, tc.newfile) + Run(t, createFiles, func(t *testing.T, env *Env) { + env.DidCreateFiles(env.Editor.DocumentURI(tc.newfile)) + // save buffer to ensure the edits take effects in the file system. + if err := env.Editor.SaveBuffer(context.Background(), tc.newfile); err != nil { + t.Fatal(err) + } + if got := env.FileContent(tc.newfile); tc.want != got { + t.Fatalf("want '%s' but got '%s'", tc.want, got) + } + }) + }) + } +} diff --git a/gopls/internal/test/integration/wrappers.go b/gopls/internal/test/integration/wrappers.go index 6389cdb74e8..17e0cf329c4 100644 --- a/gopls/internal/test/integration/wrappers.go +++ b/gopls/internal/test/integration/wrappers.go @@ -531,6 +531,14 @@ func (e *Env) Completion(loc protocol.Location) *protocol.CompletionList { return completions } +func (e *Env) DidCreateFiles(files ...protocol.DocumentURI) { + e.TB.Helper() + err := e.Editor.DidCreateFiles(e.Ctx, files...) + if err != nil { + e.TB.Fatal(err) + } +} + func (e *Env) SetSuggestionInsertReplaceMode(useReplaceMode bool) { e.TB.Helper() e.Editor.SetSuggestionInsertReplaceMode(e.Ctx, useReplaceMode) From 97789e843eb9b75f676253a35cf0b4b2ed529ec2 Mon Sep 17 00:00:00 2001 From: alingse Date: Sun, 30 Mar 2025 14:22:48 +0800 Subject: [PATCH 162/270] gopls/internal/lsprpc: fix call function with wrong err Change-Id: I60dff0375e18d45ec074498ade25e89c7b0ac6b7 Reviewed-on: https://go-review.googlesource.com/c/tools/+/661695 Reviewed-by: Carlos Amedee Auto-Submit: Carlos Amedee LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley --- gopls/internal/lsprpc/lsprpc.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gopls/internal/lsprpc/lsprpc.go b/gopls/internal/lsprpc/lsprpc.go index 9255f9176bc..3d26bdd6896 100644 --- a/gopls/internal/lsprpc/lsprpc.go +++ b/gopls/internal/lsprpc/lsprpc.go @@ -392,7 +392,7 @@ func (f *forwarder) replyWithDebugAddress(outerCtx context.Context, r jsonrpc2.R addr, err = di.Serve(outerCtx, addr) if err != nil { event.Error(outerCtx, "starting debug server", err) - return r(ctx, result, outerErr) + return r(ctx, result, err) } urls := []string{"http://" + addr} modified.URLs = append(urls, modified.URLs...) From aee7ae56af35e2e9123b64d80813f6224781add3 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Tue, 1 Apr 2025 13:39:28 -0400 Subject: [PATCH 163/270] internal/typesinternal: support checking for full types.Info Add RequiresFullInfo, which panics if its types.Info argument is missing any maps. We will deploy this incrementally to enforce fully populated Infos across x/tools. Ultimately this will reduce bugs and panics from partially initialized Infos. Also add NewTypesInfo, which creates a types.Info with all its map fields populated. Perhaps this could live in go/types or go/types/typeutil. Change-Id: I09124eee470286c9b73d1ba17b89b63aef1abc87 Reviewed-on: https://go-review.googlesource.com/c/tools/+/662115 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan Auto-Submit: Jonathan Amsterdam --- internal/typesinternal/types.go | 36 ++++++++++++++++++++++++ internal/typesinternal/types_test.go | 41 ++++++++++++++++++++++++++++ 2 files changed, 77 insertions(+) create mode 100644 internal/typesinternal/types_test.go diff --git a/internal/typesinternal/types.go b/internal/typesinternal/types.go index edf0347ec3b..d9ef55ebc77 100644 --- a/internal/typesinternal/types.go +++ b/internal/typesinternal/types.go @@ -7,9 +7,12 @@ package typesinternal import ( + "fmt" + "go/ast" "go/token" "go/types" "reflect" + "strings" "unsafe" "golang.org/x/tools/internal/aliases" @@ -127,3 +130,36 @@ func Origin(t NamedOrAlias) NamedOrAlias { func IsPackageLevel(obj types.Object) bool { return obj.Pkg() != nil && obj.Parent() == obj.Pkg().Scope() } + +// NewTypesInfo returns a *types.Info with all maps populated. +func NewTypesInfo() *types.Info { + return &types.Info{ + Types: map[ast.Expr]types.TypeAndValue{}, + Instances: map[*ast.Ident]types.Instance{}, + Defs: map[*ast.Ident]types.Object{}, + Uses: map[*ast.Ident]types.Object{}, + Implicits: map[ast.Node]types.Object{}, + Selections: map[*ast.SelectorExpr]*types.Selection{}, + Scopes: map[ast.Node]*types.Scope{}, + FileVersions: map[*ast.File]string{}, + } +} + +// RequiresFullInfo panics unless info has non-nil values for all maps. +func RequiresFullInfo(info *types.Info) { + v := reflect.ValueOf(info).Elem() + t := v.Type() + var missing []string + for i := range t.NumField() { + f := t.Field(i) + if f.Type.Kind() == reflect.Map && v.Field(i).IsNil() { + missing = append(missing, f.Name) + } + } + if len(missing) > 0 { + msg := fmt.Sprintf(`A fully populated types.Info value is required. +This one is missing the following fields: +%s`, strings.Join(missing, ", ")) + panic(msg) + } +} diff --git a/internal/typesinternal/types_test.go b/internal/typesinternal/types_test.go new file mode 100644 index 00000000000..2a715549408 --- /dev/null +++ b/internal/typesinternal/types_test.go @@ -0,0 +1,41 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typesinternal + +import ( + "fmt" + "go/ast" + "go/types" + "regexp" + "testing" +) + +func TestRequiresFullInfo(t *testing.T) { + info := &types.Info{ + Uses: map[*ast.Ident]types.Object{}, + Scopes: map[ast.Node]*types.Scope{}, + } + panics(t, "Types, Instances, Defs, Implicits, Selections, FileVersions", func() { + RequiresFullInfo(info) + }) + + // Shouldn't panic. + RequiresFullInfo(NewTypesInfo()) +} + +// panics asserts that f() panics with with a value whose printed form matches the regexp want. +// Copied from go/analysis/internal/checker/fix_test.go. +func panics(t *testing.T, want string, f func()) { + defer func() { + if x := recover(); x == nil { + t.Errorf("function returned normally, wanted panic") + } else if m, err := regexp.MatchString(want, fmt.Sprint(x)); err != nil { + t.Errorf("panics: invalid regexp %q", want) + } else if !m { + t.Errorf("function panicked with value %q, want match for %q", x, want) + } + }() + f() +} From 300a853e6a9abb7867847abc12c1f7e0be9d8d56 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Tue, 1 Apr 2025 16:48:45 -0400 Subject: [PATCH 164/270] gopls: require go1.24.2 This is needed for the backport of the fix to the race in go/types (#71817), which, though benign, causes tests to flake. Updates golang/go#71817 Fixes golang/go#72082 Change-Id: Ie1e02095b971f93fe384e830018ec13126f403d0 Reviewed-on: https://go-review.googlesource.com/c/tools/+/662036 LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan Reviewed-by: Robert Findley --- gopls/go.mod | 2 +- gopls/go.sum | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/gopls/go.mod b/gopls/go.mod index da7303222d2..5cabb7974de 100644 --- a/gopls/go.mod +++ b/gopls/go.mod @@ -1,6 +1,6 @@ module golang.org/x/tools/gopls -go 1.24.0 +go 1.24.2 require ( github.com/google/go-cmp v0.6.0 diff --git a/gopls/go.sum b/gopls/go.sum index 5a7914737a4..20633541388 100644 --- a/gopls/go.sum +++ b/gopls/go.sum @@ -15,7 +15,6 @@ github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/rogpeppe/go-internal v1.13.2-0.20241226121412-a5dc8ff20d0a h1:w3tdWGKbLGBPtR/8/oO74W6hmz0qE5q0z9aqSAewaaM= github.com/rogpeppe/go-internal v1.13.2-0.20241226121412-a5dc8ff20d0a/go.mod h1:S8kfXMp+yh77OxPD4fdM6YUknrZpQxLhvxzS4gDHENY= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -golang.org/dl v0.0.0-20250211172903-ae3823a6a0a3/go.mod h1:fwQ+hlTD8I6TIzOGkQqxQNfE2xqR+y7SzGaDkksVFkw= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc= golang.org/x/exp/typeparams v0.0.0-20250218142911-aa4b98e5adaa h1:Br3+0EZZohShrmVVc85znGpxw7Ca8hsUJlrdT/JQGw8= From 66c560d9a060d7007d6a034a8f0c162eeab7ba27 Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Wed, 2 Apr 2025 13:34:28 +0800 Subject: [PATCH 165/270] x/tools: apply modernize fixes The changes are made by running 'modernize -fix ./...' under x/tools. Change-Id: Iefe9fc799edf105b347dcef9a495ed8b12e8e6c6 Reviewed-on: https://go-review.googlesource.com/c/tools/+/662196 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan Reviewed-by: Robert Findley --- cmd/callgraph/main.go | 5 +--- cmd/deadcode/deadcode_test.go | 1 - cmd/file2fuzz/main.go | 2 +- cmd/godex/writetype.go | 13 +++++----- cmd/godoc/godoc_test.go | 8 ++---- cmd/godoc/main.go | 2 +- cmd/goimports/goimports.go | 4 +-- cmd/goyacc/yacc.go | 10 +++---- cmd/html2article/conv.go | 7 +++-- cmd/present/main.go | 4 +-- cmd/present2md/main.go | 8 +++--- .../internal/fuzz-generator/gen_test.go | 4 +-- .../internal/fuzz-generator/generator.go | 26 ++++++++----------- cmd/stringer/golden_test.go | 1 - container/intsets/sparse_test.go | 20 +++++++------- go/analysis/analysistest/analysistest.go | 4 +-- go/analysis/passes/composite/composite.go | 2 +- go/analysis/passes/copylock/copylock.go | 2 +- .../passes/fieldalignment/fieldalignment.go | 6 ++--- go/analysis/passes/httpmux/httpmux.go | 8 ++---- go/analysis/passes/structtag/structtag.go | 8 +++--- go/analysis/passes/testinggoroutine/util.go | 8 ++---- go/ast/astutil/imports.go | 3 ++- go/ast/astutil/rewrite_test.go | 1 - go/buildutil/allpackages.go | 2 -- go/callgraph/cha/cha_test.go | 2 +- go/callgraph/rta/rta_test.go | 4 +-- go/callgraph/vta/helpers_test.go | 2 +- go/callgraph/vta/internal/trie/op_test.go | 21 ++++++--------- go/gcexportdata/example_test.go | 9 ++----- go/gcexportdata/gcexportdata.go | 5 +--- go/packages/external.go | 2 +- go/packages/overlay_test.go | 14 +++++----- go/packages/packages_test.go | 10 +++---- go/packages/packagestest/export.go | 2 -- go/ssa/builder_test.go | 3 --- go/ssa/dom.go | 9 ++++--- go/ssa/emit.go | 2 +- go/ssa/instantiate.go | 8 ++---- go/ssa/interp/external.go | 7 +++-- go/ssa/lift.go | 8 +++--- go/ssa/sanity.go | 25 +++--------------- go/ssa/subst.go | 4 +-- go/ssa/util.go | 2 +- godoc/index.go | 7 +++-- godoc/snippet.go | 7 +++-- godoc/static/gen_test.go | 2 +- godoc/versions_test.go | 8 ++---- godoc/vfs/os.go | 7 +++-- godoc/vfs/zipfs/zipfs_test.go | 2 +- internal/bisect/bisect.go | 6 ++--- internal/diff/diff.go | 3 ++- internal/diff/diff_test.go | 4 +-- internal/diff/lcs/common_test.go | 9 +++---- internal/diff/lcs/old.go | 5 +--- internal/diff/lcs/old_test.go | 4 +-- internal/diff/ndiff.go | 2 +- internal/diffp/diff.go | 7 ++--- internal/event/label/label.go | 7 +++-- internal/gcimporter/gcimporter_test.go | 4 +-- internal/gcimporter/iexport.go | 11 ++++---- internal/gcimporter/iimport.go | 3 ++- internal/gocommand/invoke.go | 2 +- internal/gopathwalk/walk.go | 7 +++-- internal/imports/fix.go | 9 +++---- internal/imports/fix_test.go | 4 +-- internal/imports/mod.go | 5 ++-- internal/imports/mod_cache.go | 4 +-- internal/imports/mod_test.go | 13 +++------- internal/imports/sortimports.go | 5 ++-- internal/modindex/lookup.go | 4 +-- internal/packagestest/export.go | 2 -- internal/pkgbits/decoder.go | 2 +- internal/proxydir/proxydir.go | 2 +- internal/refactor/inline/calleefx_test.go | 1 - internal/refactor/inline/everything_test.go | 3 ++- internal/refactor/inline/inline.go | 5 ++-- internal/refactor/inline/inline_test.go | 4 +-- internal/testenv/testenv.go | 6 ++--- internal/typeparams/free.go | 2 +- playground/socket/socket.go | 8 ++---- playground/socket/socket_test.go | 2 +- present/link.go | 8 +++--- present/parse_test.go | 1 - refactor/eg/eg.go | 17 ++++-------- 85 files changed, 203 insertions(+), 309 deletions(-) diff --git a/cmd/callgraph/main.go b/cmd/callgraph/main.go index 9e440bbafb9..e489de883d0 100644 --- a/cmd/callgraph/main.go +++ b/cmd/callgraph/main.go @@ -148,10 +148,7 @@ func init() { // If $GOMAXPROCS isn't set, use the full capacity of the machine. // For small machines, use at least 4 threads. if os.Getenv("GOMAXPROCS") == "" { - n := runtime.NumCPU() - if n < 4 { - n = 4 - } + n := max(runtime.NumCPU(), 4) runtime.GOMAXPROCS(n) } } diff --git a/cmd/deadcode/deadcode_test.go b/cmd/deadcode/deadcode_test.go index 90c067331dc..a9b8327c7d7 100644 --- a/cmd/deadcode/deadcode_test.go +++ b/cmd/deadcode/deadcode_test.go @@ -34,7 +34,6 @@ func Test(t *testing.T) { t.Fatal(err) } for _, filename := range matches { - filename := filename t.Run(filename, func(t *testing.T) { t.Parallel() diff --git a/cmd/file2fuzz/main.go b/cmd/file2fuzz/main.go index 2a86c2ece88..f9d4708cd28 100644 --- a/cmd/file2fuzz/main.go +++ b/cmd/file2fuzz/main.go @@ -34,7 +34,7 @@ import ( var encVersion1 = "go test fuzz v1" func encodeByteSlice(b []byte) []byte { - return []byte(fmt.Sprintf("%s\n[]byte(%q)", encVersion1, b)) + return fmt.Appendf(nil, "%s\n[]byte(%q)", encVersion1, b) } func usage() { diff --git a/cmd/godex/writetype.go b/cmd/godex/writetype.go index 866f718f05f..f59760a81c6 100644 --- a/cmd/godex/writetype.go +++ b/cmd/godex/writetype.go @@ -14,6 +14,7 @@ package main import ( "go/types" + "slices" ) func (p *printer) writeType(this *types.Package, typ types.Type) { @@ -28,11 +29,9 @@ func (p *printer) writeTypeInternal(this *types.Package, typ types.Type, visited // practice deeply nested composite types with unnamed component // types are uncommon. This code is likely more efficient than // using a map. - for _, t := range visited { - if t == typ { - p.printf("○%T", typ) // cycle to typ - return - } + if slices.Contains(visited, typ) { + p.printf("○%T", typ) // cycle to typ + return } visited = append(visited, typ) @@ -72,7 +71,7 @@ func (p *printer) writeTypeInternal(this *types.Package, typ types.Type, visited p.print("struct {\n") p.indent++ - for i := 0; i < n; i++ { + for i := range n { f := t.Field(i) if !f.Anonymous() { p.printf("%s ", f.Name()) @@ -120,7 +119,7 @@ func (p *printer) writeTypeInternal(this *types.Package, typ types.Type, visited if GcCompatibilityMode { // print flattened interface // (useful to compare against gc-generated interfaces) - for i := 0; i < n; i++ { + for i := range n { m := t.Method(i) p.print(m.Name()) p.writeSignatureInternal(this, m.Type().(*types.Signature), visited) diff --git a/cmd/godoc/godoc_test.go b/cmd/godoc/godoc_test.go index 66b93f10630..7cd38574233 100644 --- a/cmd/godoc/godoc_test.go +++ b/cmd/godoc/godoc_test.go @@ -16,6 +16,7 @@ import ( "os/exec" "regexp" "runtime" + "slices" "strings" "sync" "testing" @@ -127,12 +128,7 @@ func waitForServer(t *testing.T, ctx context.Context, url, match string, reverse // hasTag checks whether a given release tag is contained in the current version // of the go binary. func hasTag(t string) bool { - for _, v := range build.Default.ReleaseTags { - if t == v { - return true - } - } - return false + return slices.Contains(build.Default.ReleaseTags, t) } func TestURL(t *testing.T) { diff --git a/cmd/godoc/main.go b/cmd/godoc/main.go index a665be0769d..1bce091f269 100644 --- a/cmd/godoc/main.go +++ b/cmd/godoc/main.go @@ -114,7 +114,7 @@ func loggingHandler(h http.Handler) http.Handler { func handleURLFlag() { // Try up to 10 fetches, following redirects. urlstr := *urlFlag - for i := 0; i < 10; i++ { + for range 10 { // Prepare request. u, err := url.Parse(urlstr) if err != nil { diff --git a/cmd/goimports/goimports.go b/cmd/goimports/goimports.go index dcb5023a2e7..11f56e0e865 100644 --- a/cmd/goimports/goimports.go +++ b/cmd/goimports/goimports.go @@ -361,8 +361,8 @@ func replaceTempFilename(diff []byte, filename string) ([]byte, error) { } // Always print filepath with slash separator. f := filepath.ToSlash(filename) - bs[0] = []byte(fmt.Sprintf("--- %s%s", f+".orig", t0)) - bs[1] = []byte(fmt.Sprintf("+++ %s%s", f, t1)) + bs[0] = fmt.Appendf(nil, "--- %s%s", f+".orig", t0) + bs[1] = fmt.Appendf(nil, "+++ %s%s", f, t1) return bytes.Join(bs, []byte{'\n'}), nil } diff --git a/cmd/goyacc/yacc.go b/cmd/goyacc/yacc.go index 965a76f14dc..be084da3690 100644 --- a/cmd/goyacc/yacc.go +++ b/cmd/goyacc/yacc.go @@ -1478,7 +1478,7 @@ func symnam(i int) string { // set elements 0 through n-1 to c func aryfil(v []int, n, c int) { - for i := 0; i < n; i++ { + for i := range n { v[i] = c } } @@ -1840,7 +1840,7 @@ func closure(i int) { nexts: // initially fill the sets - for s := 0; s < n; s++ { + for s := range n { prd := curres[s] // @@ -2609,7 +2609,7 @@ func callopt() { if adb > 2 { for p = 0; p <= maxa; p += 10 { fmt.Fprintf(ftable, "%v ", p) - for i = 0; i < 10; i++ { + for i = range 10 { fmt.Fprintf(ftable, "%v ", amem[p+i]) } ftable.WriteRune('\n') @@ -2653,7 +2653,7 @@ func gin(i int) { // now, find amem place for it nextgp: - for p := 0; p < ACTSIZE; p++ { + for p := range ACTSIZE { if amem[p] != 0 { continue } @@ -3117,7 +3117,7 @@ func aryeq(a []int, b []int) int { if len(b) != n { return 0 } - for ll := 0; ll < n; ll++ { + for ll := range n { if a[ll] != b[ll] { return 0 } diff --git a/cmd/html2article/conv.go b/cmd/html2article/conv.go index 604bb1fd7cd..e2946431ce2 100644 --- a/cmd/html2article/conv.go +++ b/cmd/html2article/conv.go @@ -16,6 +16,7 @@ import ( "net/url" "os" "regexp" + "slices" "strings" "golang.org/x/net/html" @@ -270,10 +271,8 @@ func hasClass(name string) selector { return func(n *html.Node) bool { for _, a := range n.Attr { if a.Key == "class" { - for _, c := range strings.Fields(a.Val) { - if c == name { - return true - } + if slices.Contains(strings.Fields(a.Val), name) { + return true } } } diff --git a/cmd/present/main.go b/cmd/present/main.go index 340025276f9..99ed838e926 100644 --- a/cmd/present/main.go +++ b/cmd/present/main.go @@ -73,8 +73,8 @@ func main() { origin := &url.URL{Scheme: "http"} if *originHost != "" { - if strings.HasPrefix(*originHost, "https://") { - *originHost = strings.TrimPrefix(*originHost, "https://") + if after, ok := strings.CutPrefix(*originHost, "https://"); ok { + *originHost = after origin.Scheme = "https" } *originHost = strings.TrimPrefix(*originHost, "http://") diff --git a/cmd/present2md/main.go b/cmd/present2md/main.go index a11e57ecf8b..e23bb33daed 100644 --- a/cmd/present2md/main.go +++ b/cmd/present2md/main.go @@ -447,10 +447,10 @@ func parseInlineLink(s string) (link string, length int) { // If the URL is http://foo.com, drop the http:// // In other words, render [[http://golang.org]] as: // golang.org - if strings.HasPrefix(rawURL, url.Scheme+"://") { - simpleURL = strings.TrimPrefix(rawURL, url.Scheme+"://") - } else if strings.HasPrefix(rawURL, url.Scheme+":") { - simpleURL = strings.TrimPrefix(rawURL, url.Scheme+":") + if after, ok := strings.CutPrefix(rawURL, url.Scheme+"://"); ok { + simpleURL = after + } else if after, ok := strings.CutPrefix(rawURL, url.Scheme+":"); ok { + simpleURL = after } } return renderLink(rawURL, simpleURL), end + 2 diff --git a/cmd/signature-fuzzer/internal/fuzz-generator/gen_test.go b/cmd/signature-fuzzer/internal/fuzz-generator/gen_test.go index 4bd5bab7c38..f10a7e9a7df 100644 --- a/cmd/signature-fuzzer/internal/fuzz-generator/gen_test.go +++ b/cmd/signature-fuzzer/internal/fuzz-generator/gen_test.go @@ -35,7 +35,7 @@ func mkGenState() *genstate { func TestBasic(t *testing.T) { checkTunables(tunables) s := mkGenState() - for i := 0; i < 1000; i++ { + for i := range 1000 { s.wr = NewWrapRand(int64(i), RandCtlChecks|RandCtlPanic) fp := s.GenFunc(i, i) var buf bytes.Buffer @@ -58,7 +58,7 @@ func TestMoreComplicated(t *testing.T) { checkTunables(tunables) s := mkGenState() - for i := 0; i < 10000; i++ { + for i := range 10000 { s.wr = NewWrapRand(int64(i), RandCtlChecks|RandCtlPanic) fp := s.GenFunc(i, i) var buf bytes.Buffer diff --git a/cmd/signature-fuzzer/internal/fuzz-generator/generator.go b/cmd/signature-fuzzer/internal/fuzz-generator/generator.go index 6c8002f9f0c..261dd6c029b 100644 --- a/cmd/signature-fuzzer/internal/fuzz-generator/generator.go +++ b/cmd/signature-fuzzer/internal/fuzz-generator/generator.go @@ -48,6 +48,7 @@ import ( "os" "os/exec" "path/filepath" + "slices" "strconv" "strings" ) @@ -561,12 +562,7 @@ func (s *genstate) popTunables() { // See precludeSelectedTypes below for more info. func (s *genstate) redistributeFraction(toIncorporate uint8, avoid []int) { inavoid := func(j int) bool { - for _, k := range avoid { - if j == k { - return true - } - } - return false + return slices.Contains(avoid, j) } doredis := func() { @@ -631,7 +627,7 @@ func (s *genstate) GenParm(f *funcdef, depth int, mkctl bool, pidx int) parm { // Convert tf into a cumulative sum tf := s.tunables.typeFractions sum := uint8(0) - for i := 0; i < len(tf); i++ { + for i := range len(tf) { sum += tf[i] tf[i] = sum } @@ -662,7 +658,7 @@ func (s *genstate) GenParm(f *funcdef, depth int, mkctl bool, pidx int) parm { f.structdefs = append(f.structdefs, sp) tnf := int64(s.tunables.nStructFields) / int64(depth+1) nf := int(s.wr.Intn(tnf)) - for fi := 0; fi < nf; fi++ { + for range nf { fp := s.GenParm(f, depth+1, false, pidx) skComp := tunables.doSkipCompare && uint8(s.wr.Intn(100)) < s.tunables.skipCompareFraction @@ -832,7 +828,7 @@ func (s *genstate) GenFunc(fidx int, pidx int) *funcdef { needControl := f.recur f.dodefc = uint8(s.wr.Intn(100)) pTaken := uint8(s.wr.Intn(100)) < s.tunables.takenFraction - for pi := 0; pi < numParams; pi++ { + for range numParams { newparm := s.GenParm(f, 0, needControl, pidx) if !pTaken { newparm.SetAddrTaken(notAddrTaken) @@ -848,7 +844,7 @@ func (s *genstate) GenFunc(fidx int, pidx int) *funcdef { } rTaken := uint8(s.wr.Intn(100)) < s.tunables.takenFraction - for ri := 0; ri < numReturns; ri++ { + for range numReturns { r := s.GenReturn(f, 0, pidx) if !rTaken { r.SetAddrTaken(notAddrTaken) @@ -903,7 +899,7 @@ func (s *genstate) emitCompareFunc(f *funcdef, b *bytes.Buffer, p parm) { b.WriteString(" return ") numel := p.NumElements() ncmp := 0 - for i := 0; i < numel; i++ { + for i := range numel { lelref, lelparm := p.GenElemRef(i, "left") relref, _ := p.GenElemRef(i, "right") if lelref == "" || lelref == "_" { @@ -1501,7 +1497,7 @@ func (s *genstate) emitParamChecks(f *funcdef, b *bytes.Buffer, pidx int, value } else { numel := p.NumElements() cel := checkableElements(p) - for i := 0; i < numel; i++ { + for i := range numel { verb(4, "emitting check-code for p%d el %d value=%d", pi, i, value) elref, elparm := p.GenElemRef(i, s.genParamRef(p, pi)) valstr, value = s.GenValue(f, elparm, value, false) @@ -1535,7 +1531,7 @@ func (s *genstate) emitParamChecks(f *funcdef, b *bytes.Buffer, pidx int, value // receiver value check if f.isMethod { numel := f.receiver.NumElements() - for i := 0; i < numel; i++ { + for i := range numel { verb(4, "emitting check-code for rcvr el %d value=%d", i, value) elref, elparm := f.receiver.GenElemRef(i, "rcvr") valstr, value = s.GenValue(f, elparm, value, false) @@ -1608,7 +1604,7 @@ func (s *genstate) emitDeferChecks(f *funcdef, b *bytes.Buffer, value int) int { b.WriteString(" // check parm " + which + "\n") numel := p.NumElements() cel := checkableElements(p) - for i := 0; i < numel; i++ { + for i := range numel { elref, elparm := p.GenElemRef(i, s.genParamRef(p, pi)) if elref == "" || elref == "_" || cel == 0 { verb(4, "empty skip p%d el %d", pi, i) @@ -2058,7 +2054,7 @@ func (s *genstate) emitMain(outf *os.File, numit int, fcnmask map[int]int, pkmas for k := 0; k < s.NumTestPackages; k++ { cp := fmt.Sprintf("%s%s%d", s.Tag, CallerName, k) fmt.Fprintf(outf, " go func(ch chan bool) {\n") - for i := 0; i < numit; i++ { + for i := range numit { if shouldEmitFP(i, k, fcnmask, pkmask) { fmt.Fprintf(outf, " %s.%s%d(\"normal\")\n", cp, CallerName, i) if s.tunables.doReflectCall { diff --git a/cmd/stringer/golden_test.go b/cmd/stringer/golden_test.go index 2a81c0855aa..e40b7c53c91 100644 --- a/cmd/stringer/golden_test.go +++ b/cmd/stringer/golden_test.go @@ -453,7 +453,6 @@ func TestGolden(t *testing.T) { dir := t.TempDir() for _, test := range golden { - test := test t.Run(test.name, func(t *testing.T) { input := "package test\n" + test.input file := test.name + ".go" diff --git a/container/intsets/sparse_test.go b/container/intsets/sparse_test.go index cd8ec6e0840..f218e09b6a3 100644 --- a/container/intsets/sparse_test.go +++ b/container/intsets/sparse_test.go @@ -236,7 +236,7 @@ func (set *pset) check(t *testing.T, msg string) { func randomPset(prng *rand.Rand, maxSize int) *pset { set := makePset() size := int(prng.Int()) % maxSize - for i := 0; i < size; i++ { + for range size { // TODO(adonovan): benchmark how performance varies // with this sparsity parameter. n := int(prng.Int()) % 10000 @@ -252,7 +252,7 @@ func TestRandomMutations(t *testing.T) { set := makePset() prng := rand.New(rand.NewSource(0)) - for i := 0; i < 10000; i++ { + for i := range 10000 { n := int(prng.Int())%2000 - 1000 if i%2 == 0 { if debug { @@ -278,9 +278,9 @@ func TestRandomMutations(t *testing.T) { func TestLowerBound(t *testing.T) { // Use random sets of sizes from 0 to about 4000. prng := rand.New(rand.NewSource(0)) - for i := uint(0); i < 12; i++ { + for i := range uint(12) { x := randomPset(prng, 1<= j && e < found { @@ -302,7 +302,7 @@ func TestSetOperations(t *testing.T) { // For each operator, we test variations such as // Z.op(X, Y), Z.op(X, Z) and Z.op(Z, Y) to exercise // the degenerate cases of each method implementation. - for i := uint(0); i < 12; i++ { + for i := range uint(12) { X := randomPset(prng, 1< max { @@ -366,7 +366,7 @@ func (s *gcSizes) ptrdata(T types.Type) int64 { } var o, p int64 - for i := 0; i < nf; i++ { + for i := range nf { ft := t.Field(i).Type() a, sz := s.Alignof(ft), s.Sizeof(ft) fp := s.ptrdata(ft) diff --git a/go/analysis/passes/httpmux/httpmux.go b/go/analysis/passes/httpmux/httpmux.go index 58d3ed5daca..655b78fd1cb 100644 --- a/go/analysis/passes/httpmux/httpmux.go +++ b/go/analysis/passes/httpmux/httpmux.go @@ -9,6 +9,7 @@ import ( "go/constant" "go/types" "regexp" + "slices" "strings" "golang.org/x/mod/semver" @@ -103,12 +104,7 @@ func isMethodNamed(f *types.Func, pkgPath string, names ...string) bool { if f.Type().(*types.Signature).Recv() == nil { return false // not a method } - for _, n := range names { - if f.Name() == n { - return true - } - } - return false // not in names + return slices.Contains(names, f.Name()) } // stringConstantExpr returns expression's string constant value. diff --git a/go/analysis/passes/structtag/structtag.go b/go/analysis/passes/structtag/structtag.go index d926503403d..da4afd1b232 100644 --- a/go/analysis/passes/structtag/structtag.go +++ b/go/analysis/passes/structtag/structtag.go @@ -13,6 +13,7 @@ import ( "go/types" "path/filepath" "reflect" + "slices" "strconv" "strings" @@ -167,11 +168,8 @@ func checkTagDuplicates(pass *analysis.Pass, tag, key string, nearest, field *ty if i := strings.Index(val, ","); i >= 0 { if key == "xml" { // Use a separate namespace for XML attributes. - for _, opt := range strings.Split(val[i:], ",") { - if opt == "attr" { - key += " attribute" // Key is part of the error message. - break - } + if slices.Contains(strings.Split(val[i:], ","), "attr") { + key += " attribute" // Key is part of the error message. } } val = val[:i] diff --git a/go/analysis/passes/testinggoroutine/util.go b/go/analysis/passes/testinggoroutine/util.go index 027c99e6b0f..88e77fb4fc4 100644 --- a/go/analysis/passes/testinggoroutine/util.go +++ b/go/analysis/passes/testinggoroutine/util.go @@ -7,6 +7,7 @@ package testinggoroutine import ( "go/ast" "go/types" + "slices" "golang.org/x/tools/internal/typeparams" ) @@ -48,12 +49,7 @@ func isMethodNamed(f *types.Func, pkgPath string, names ...string) bool { if f.Type().(*types.Signature).Recv() == nil { return false } - for _, n := range names { - if f.Name() == n { - return true - } - } - return false + return slices.Contains(names, f.Name()) } func funcIdent(fun ast.Expr) *ast.Ident { diff --git a/go/ast/astutil/imports.go b/go/ast/astutil/imports.go index a6b5ed0a893..5e5601aa467 100644 --- a/go/ast/astutil/imports.go +++ b/go/ast/astutil/imports.go @@ -9,6 +9,7 @@ import ( "fmt" "go/ast" "go/token" + "slices" "strconv" "strings" ) @@ -186,7 +187,7 @@ func AddNamedImport(fset *token.FileSet, f *ast.File, name, path string) (added spec.(*ast.ImportSpec).Path.ValuePos = first.Pos() first.Specs = append(first.Specs, spec) } - f.Decls = append(f.Decls[:i], f.Decls[i+1:]...) + f.Decls = slices.Delete(f.Decls, i, i+1) i-- } diff --git a/go/ast/astutil/rewrite_test.go b/go/ast/astutil/rewrite_test.go index 57136a07cab..2e1c77034c8 100644 --- a/go/ast/astutil/rewrite_test.go +++ b/go/ast/astutil/rewrite_test.go @@ -244,7 +244,6 @@ func vardecl(name, typ string) *ast.GenDecl { func TestRewrite(t *testing.T) { t.Run("*", func(t *testing.T) { for _, test := range rewriteTests { - test := test t.Run(test.name, func(t *testing.T) { t.Parallel() fset := token.NewFileSet() diff --git a/go/buildutil/allpackages.go b/go/buildutil/allpackages.go index dfb8cd6c7b0..32886a7175f 100644 --- a/go/buildutil/allpackages.go +++ b/go/buildutil/allpackages.go @@ -52,7 +52,6 @@ func ForEachPackage(ctxt *build.Context, found func(importPath string, err error var wg sync.WaitGroup for _, root := range ctxt.SrcDirs() { - root := root wg.Add(1) go func() { allPackages(ctxt, root, ch) @@ -107,7 +106,6 @@ func allPackages(ctxt *build.Context, root string, ch chan<- item) { ch <- item{pkg, err} } for _, fi := range files { - fi := fi if fi.IsDir() { wg.Add(1) go func() { diff --git a/go/callgraph/cha/cha_test.go b/go/callgraph/cha/cha_test.go index 7795cb44de0..922541d6c56 100644 --- a/go/callgraph/cha/cha_test.go +++ b/go/callgraph/cha/cha_test.go @@ -40,7 +40,7 @@ var inputs = []string{ func expectation(f *ast.File) (string, token.Pos) { for _, c := range f.Comments { text := strings.TrimSpace(c.Text()) - if t := strings.TrimPrefix(text, "WANT:\n"); t != text { + if t, ok := strings.CutPrefix(text, "WANT:\n"); ok { return t, c.Pos() } } diff --git a/go/callgraph/rta/rta_test.go b/go/callgraph/rta/rta_test.go index 6b16484245b..8cfc73ee4db 100644 --- a/go/callgraph/rta/rta_test.go +++ b/go/callgraph/rta/rta_test.go @@ -105,7 +105,7 @@ func check(t *testing.T, f *ast.File, pkg *ssa.Package, res *rta.Result) { expectation := func(f *ast.File) (string, int) { for _, c := range f.Comments { text := strings.TrimSpace(c.Text()) - if t := strings.TrimPrefix(text, "WANT:\n"); t != text { + if t, ok := strings.CutPrefix(text, "WANT:\n"); ok { return t, tokFile.Line(c.Pos()) } } @@ -134,7 +134,7 @@ func check(t *testing.T, f *ast.File, pkg *ssa.Package, res *rta.Result) { // A leading "!" negates the assertion. sense := true - if rest := strings.TrimPrefix(line, "!"); rest != line { + if rest, ok := strings.CutPrefix(line, "!"); ok { sense = false line = strings.TrimSpace(rest) if line == "" { diff --git a/go/callgraph/vta/helpers_test.go b/go/callgraph/vta/helpers_test.go index 59a9277f759..be5e756dcd5 100644 --- a/go/callgraph/vta/helpers_test.go +++ b/go/callgraph/vta/helpers_test.go @@ -28,7 +28,7 @@ import ( func want(f *ast.File) []string { for _, c := range f.Comments { text := strings.TrimSpace(c.Text()) - if t := strings.TrimPrefix(text, "WANT:\n"); t != text { + if t, ok := strings.CutPrefix(text, "WANT:\n"); ok { return strings.Split(t, "\n") } } diff --git a/go/callgraph/vta/internal/trie/op_test.go b/go/callgraph/vta/internal/trie/op_test.go index b4610d55c22..535e7ac2775 100644 --- a/go/callgraph/vta/internal/trie/op_test.go +++ b/go/callgraph/vta/internal/trie/op_test.go @@ -12,6 +12,7 @@ import ( "time" "golang.org/x/tools/go/callgraph/vta/internal/trie" + "maps" ) // This file tests trie.Map by cross checking operations on a collection of @@ -189,12 +190,8 @@ func (c builtinCollection) Intersect(l int, r int) { func (c builtinCollection) Merge(l int, r int) { result := map[uint64]any{} - for k, v := range c[r] { - result[k] = v - } - for k, v := range c[l] { - result[k] = v - } + maps.Copy(result, c[r]) + maps.Copy(result, c[l]) c[l] = result } @@ -217,9 +214,7 @@ func (c builtinCollection) Average(l int, r int) { func (c builtinCollection) Assign(l, r int) { m := map[uint64]any{} - for k, v := range c[r] { - m[k] = v - } + maps.Copy(m, c[r]) c[l] = m } @@ -232,7 +227,7 @@ func newTriesCollection(size int) *trieCollection { b: trie.NewBuilder(), tries: make([]trie.MutMap, size), } - for i := 0; i < size; i++ { + for i := range size { tc.tries[i] = tc.b.MutEmpty() } return tc @@ -240,7 +235,7 @@ func newTriesCollection(size int) *trieCollection { func newMapsCollection(size int) *builtinCollection { maps := make(builtinCollection, size) - for i := 0; i < size; i++ { + for i := range size { maps[i] = map[uint64]any{} } return &maps @@ -290,7 +285,7 @@ func (op operation) Apply(maps mapCollection) any { func distribution(dist map[opCode]int) []opCode { var codes []opCode for op, n := range dist { - for i := 0; i < n; i++ { + for range n { codes = append(codes, op) } } @@ -326,7 +321,7 @@ func randOperator(r *rand.Rand, opts options) operation { func randOperators(r *rand.Rand, numops int, opts options) []operation { ops := make([]operation, numops) - for i := 0; i < numops; i++ { + for i := range numops { ops[i] = randOperator(r, opts) } return ops diff --git a/go/gcexportdata/example_test.go b/go/gcexportdata/example_test.go index 852ba5a597c..d6d69a8aa54 100644 --- a/go/gcexportdata/example_test.go +++ b/go/gcexportdata/example_test.go @@ -15,6 +15,7 @@ import ( "log" "os" "path/filepath" + "slices" "strings" "golang.org/x/tools/go/gcexportdata" @@ -51,13 +52,7 @@ func ExampleRead() { // We can see all the names in Names. members := pkg.Scope().Names() - foundPrintln := false - for _, member := range members { - if member == "Println" { - foundPrintln = true - break - } - } + foundPrintln := slices.Contains(members, "Println") fmt.Print("Package members: ") if foundPrintln { fmt.Println("Println found") diff --git a/go/gcexportdata/gcexportdata.go b/go/gcexportdata/gcexportdata.go index 65fe2628e90..7b90bc92353 100644 --- a/go/gcexportdata/gcexportdata.go +++ b/go/gcexportdata/gcexportdata.go @@ -193,10 +193,7 @@ func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package, return pkg, err default: - l := len(data) - if l > 10 { - l = 10 - } + l := min(len(data), 10) return nil, fmt.Errorf("unexpected export data with prefix %q for path %s", string(data[:l]), path) } } diff --git a/go/packages/external.go b/go/packages/external.go index 91bd62e83b1..f37bc651009 100644 --- a/go/packages/external.go +++ b/go/packages/external.go @@ -90,7 +90,7 @@ func findExternalDriver(cfg *Config) driver { const toolPrefix = "GOPACKAGESDRIVER=" tool := "" for _, env := range cfg.Env { - if val := strings.TrimPrefix(env, toolPrefix); val != env { + if val, ok := strings.CutPrefix(env, toolPrefix); ok { tool = val } } diff --git a/go/packages/overlay_test.go b/go/packages/overlay_test.go index 1108461926f..4a7cc68f4c7 100644 --- a/go/packages/overlay_test.go +++ b/go/packages/overlay_test.go @@ -10,6 +10,7 @@ import ( "os" "path/filepath" "reflect" + "slices" "sort" "testing" @@ -93,7 +94,7 @@ func testOverlayChangesBothPackageNames(t *testing.T, exporter packagestest.Expo if len(initial) != 3 { t.Fatalf("expected 3 packages, got %v", len(initial)) } - for i := 0; i < 3; i++ { + for i := range 3 { if ok := checkPkg(t, initial[i], want[i].id, want[i].name, want[i].count); !ok { t.Errorf("%d: got {%s %s %d}, expected %v", i, initial[i].ID, initial[i].Name, len(initial[i].Syntax), want[i]) @@ -139,7 +140,7 @@ func testOverlayChangesTestPackageName(t *testing.T, exporter packagestest.Expor if len(initial) != 3 { t.Fatalf("expected 3 packages, got %v", len(initial)) } - for i := 0; i < 3; i++ { + for i := range 3 { if ok := checkPkg(t, initial[i], want[i].id, want[i].name, want[i].count); !ok { t.Errorf("got {%s %s %d}, expected %v", initial[i].ID, initial[i].Name, len(initial[i].Syntax), want[i]) @@ -824,11 +825,8 @@ func testInvalidFilesBeforeOverlayContains(t *testing.T, exporter packagestest.E t.Fatalf("expected package ID %q, got %q", tt.wantID, pkg.ID) } var containsFile bool - for _, goFile := range pkg.CompiledGoFiles { - if f == goFile { - containsFile = true - break - } + if slices.Contains(pkg.CompiledGoFiles, f) { + containsFile = true } if !containsFile { t.Fatalf("expected %s in CompiledGoFiles, got %v", f, pkg.CompiledGoFiles) @@ -1054,7 +1052,7 @@ func TestOverlaysInReplace(t *testing.T) { if err := os.Mkdir(dirB, 0775); err != nil { t.Fatal(err) } - if err := os.WriteFile(filepath.Join(dirB, "go.mod"), []byte(fmt.Sprintf("module %s.com", dirB)), 0775); err != nil { + if err := os.WriteFile(filepath.Join(dirB, "go.mod"), fmt.Appendf(nil, "module %s.com", dirB), 0775); err != nil { t.Fatal(err) } if err := os.MkdirAll(filepath.Join(dirB, "inner"), 0775); err != nil { diff --git a/go/packages/packages_test.go b/go/packages/packages_test.go index 5678b265561..ae3cbb6bb2b 100644 --- a/go/packages/packages_test.go +++ b/go/packages/packages_test.go @@ -20,6 +20,7 @@ import ( "path/filepath" "reflect" "runtime" + "slices" "sort" "strings" "testing" @@ -387,7 +388,7 @@ func TestLoadArgumentListIsNotTooLong(t *testing.T) { defer exported.Cleanup() numOfPatterns := argMax/16 + 1 // the pattern below is approx. 16 chars patterns := make([]string, numOfPatterns) - for i := 0; i < numOfPatterns; i++ { + for i := range numOfPatterns { patterns[i] = fmt.Sprintf("golang.org/mod/p%d", i) } // patterns have more than argMax number of chars combined with whitespaces b/w patterns @@ -1610,7 +1611,7 @@ EOF defer os.Setenv(pathKey, oldPath) // Clone exported.Config config := exported.Config - config.Env = append([]string{}, exported.Config.Env...) + config.Env = slices.Clone(exported.Config.Env) config.Env = append(config.Env, "GOPACKAGESDRIVER="+test.driver) pkgs, err := packages.Load(exported.Config, "golist") if err != nil { @@ -1978,7 +1979,6 @@ func testCgoNoSyntax(t *testing.T, exporter packagestest.Exporter) { packages.NeedName | packages.NeedImports, } for _, mode := range modes { - mode := mode t.Run(fmt.Sprint(mode), func(t *testing.T) { exported.Config.Mode = mode pkgs, err := packages.Load(exported.Config, "golang.org/fake/c") @@ -2787,7 +2787,7 @@ func main() { t.Fatal(err) } - exported.Config.Env = append(append([]string{}, baseEnv...), "GOPACKAGESDRIVER="+emptyDriverPath) + exported.Config.Env = append(slices.Clone(baseEnv), "GOPACKAGESDRIVER="+emptyDriverPath) initial, err := packages.Load(exported.Config, "golang.org/fake/a") if err != nil { t.Fatal(err) @@ -2807,7 +2807,7 @@ func main() { t.Fatal(err) } - exported.Config.Env = append(append([]string{}, baseEnv...), "GOPACKAGESDRIVER="+notHandledDriverPath) + exported.Config.Env = append(slices.Clone(baseEnv), "GOPACKAGESDRIVER="+notHandledDriverPath) initial, err = packages.Load(exported.Config, "golang.org/fake/a") if err != nil { t.Fatal(err) diff --git a/go/packages/packagestest/export.go b/go/packages/packagestest/export.go index 4ac4967b46b..86da99ecdf3 100644 --- a/go/packages/packagestest/export.go +++ b/go/packages/packagestest/export.go @@ -159,7 +159,6 @@ var All = []Exporter{GOPATH, Modules} func TestAll(t *testing.T, f func(*testing.T, Exporter)) { t.Helper() for _, e := range All { - e := e // in case f calls t.Parallel t.Run(e.Name(), func(t *testing.T) { t.Helper() f(t, e) @@ -173,7 +172,6 @@ func TestAll(t *testing.T, f func(*testing.T, Exporter)) { func BenchmarkAll(b *testing.B, f func(*testing.B, Exporter)) { b.Helper() for _, e := range All { - e := e // in case f calls t.Parallel b.Run(e.Name(), func(b *testing.B) { b.Helper() f(b, e) diff --git a/go/ssa/builder_test.go b/go/ssa/builder_test.go index 2589cc82bb6..a48723bd271 100644 --- a/go/ssa/builder_test.go +++ b/go/ssa/builder_test.go @@ -613,7 +613,6 @@ var indirect = R[int].M "(p.S[int]).M[int]", }, } { - entry := entry t.Run(entry.name, func(t *testing.T) { v := p.Var(entry.name) if v == nil { @@ -1011,7 +1010,6 @@ func TestGo117Builtins(t *testing.T) { } for _, tc := range tests { - tc := tc t.Run(tc.name, func(t *testing.T) { t.Parallel() fset := token.NewFileSet() @@ -1466,7 +1464,6 @@ func TestBuildPackageGo120(t *testing.T) { } for _, tc := range tests { - tc := tc t.Run(tc.name, func(t *testing.T) { t.Parallel() fset := token.NewFileSet() diff --git a/go/ssa/dom.go b/go/ssa/dom.go index f490986140c..78f651c8ee9 100644 --- a/go/ssa/dom.go +++ b/go/ssa/dom.go @@ -22,6 +22,7 @@ import ( "fmt" "math/big" "os" + "slices" "sort" ) @@ -43,7 +44,7 @@ func (b *BasicBlock) Dominates(c *BasicBlock) bool { // DomPreorder returns a new slice containing the blocks of f // in a preorder traversal of the dominator tree. func (f *Function) DomPreorder() []*BasicBlock { - slice := append([]*BasicBlock(nil), f.Blocks...) + slice := slices.Clone(f.Blocks) sort.Slice(slice, func(i, j int) bool { return slice[i].dom.pre < slice[j].dom.pre }) @@ -54,7 +55,7 @@ func (f *Function) DomPreorder() []*BasicBlock { // in a postorder traversal of the dominator tree. // (This is not the same as a postdominance order.) func (f *Function) DomPostorder() []*BasicBlock { - slice := append([]*BasicBlock(nil), f.Blocks...) + slice := slices.Clone(f.Blocks) sort.Slice(slice, func(i, j int) bool { return slice[i].dom.post < slice[j].dom.post }) @@ -277,8 +278,8 @@ func sanityCheckDomTree(f *Function) { // Check the entire relation. O(n^2). // The Recover block (if any) must be treated specially so we skip it. ok := true - for i := 0; i < n; i++ { - for j := 0; j < n; j++ { + for i := range n { + for j := range n { b, c := f.Blocks[i], f.Blocks[j] if c == f.Recover { continue diff --git a/go/ssa/emit.go b/go/ssa/emit.go index bca79adc4e1..e53ebf5a7fd 100644 --- a/go/ssa/emit.go +++ b/go/ssa/emit.go @@ -496,7 +496,7 @@ func emitTailCall(f *Function, call *Call) { case 1: ret.Results = []Value{tuple} default: - for i := 0; i < nr; i++ { + for i := range nr { v := emitExtract(f, tuple, i) // TODO(adonovan): in principle, this is required: // v = emitConv(f, o.Type, f.Signature.Results[i].Type) diff --git a/go/ssa/instantiate.go b/go/ssa/instantiate.go index 2512f32976c..20a0986e6d3 100644 --- a/go/ssa/instantiate.go +++ b/go/ssa/instantiate.go @@ -7,6 +7,7 @@ package ssa import ( "fmt" "go/types" + "slices" "sync" ) @@ -122,10 +123,5 @@ func (prog *Program) isParameterized(ts ...types.Type) bool { // handle the most common but shallow cases such as T, pkg.T, // *T without consulting the cache under the lock. - for _, t := range ts { - if prog.hasParams.Has(t) { - return true - } - } - return false + return slices.ContainsFunc(ts, prog.hasParams.Has) } diff --git a/go/ssa/interp/external.go b/go/ssa/interp/external.go index 2a3a7e5b79e..2fb683c07fe 100644 --- a/go/ssa/interp/external.go +++ b/go/ssa/interp/external.go @@ -9,6 +9,7 @@ package interp import ( "bytes" + "maps" "math" "os" "runtime" @@ -30,7 +31,7 @@ var externals = make(map[string]externalFn) func init() { // That little dot ۰ is an Arabic zero numeral (U+06F0), categories [Nd]. - for k, v := range map[string]externalFn{ + maps.Copy(externals, map[string]externalFn{ "(reflect.Value).Bool": ext۰reflect۰Value۰Bool, "(reflect.Value).CanAddr": ext۰reflect۰Value۰CanAddr, "(reflect.Value).CanInterface": ext۰reflect۰Value۰CanInterface, @@ -111,9 +112,7 @@ func init() { "strings.ToLower": ext۰strings۰ToLower, "time.Sleep": ext۰time۰Sleep, "unicode/utf8.DecodeRuneInString": ext۰unicode۰utf8۰DecodeRuneInString, - } { - externals[k] = v - } + }) } func ext۰bytes۰Equal(fr *frame, args []value) value { diff --git a/go/ssa/lift.go b/go/ssa/lift.go index 6138ca82e0e..d7c1bf5063e 100644 --- a/go/ssa/lift.go +++ b/go/ssa/lift.go @@ -374,7 +374,7 @@ func (s *blockSet) add(b *BasicBlock) bool { // returns its index, or returns -1 if empty. func (s *blockSet) take() int { l := s.BitLen() - for i := 0; i < l; i++ { + for i := range l { if s.Bit(i) == 1 { s.SetBit(&s.Int, i, 0) return i @@ -403,10 +403,8 @@ func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap, fresh *int) bool // Don't lift result values in functions that defer // calls that may recover from panic. if fn := alloc.Parent(); fn.Recover != nil { - for _, nr := range fn.results { - if nr == alloc { - return false - } + if slices.Contains(fn.results, alloc) { + return false } } diff --git a/go/ssa/sanity.go b/go/ssa/sanity.go index 3b862992680..b11680a1e1d 100644 --- a/go/ssa/sanity.go +++ b/go/ssa/sanity.go @@ -14,6 +14,7 @@ import ( "go/types" "io" "os" + "slices" "strings" ) @@ -119,13 +120,7 @@ func (s *sanity) checkInstr(idx int, instr Instruction) { case *Alloc: if !instr.Heap { - found := false - for _, l := range s.fn.Locals { - if l == instr { - found = true - break - } - } + found := slices.Contains(s.fn.Locals, instr) if !found { s.errorf("local alloc %s = %s does not appear in Function.Locals", instr.Name(), instr) } @@ -282,13 +277,7 @@ func (s *sanity) checkBlock(b *BasicBlock, index int) { // Check predecessor and successor relations are dual, // and that all blocks in CFG belong to same function. for _, a := range b.Preds { - found := false - for _, bb := range a.Succs { - if bb == b { - found = true - break - } - } + found := slices.Contains(a.Succs, b) if !found { s.errorf("expected successor edge in predecessor %s; found only: %s", a, a.Succs) } @@ -297,13 +286,7 @@ func (s *sanity) checkBlock(b *BasicBlock, index int) { } } for _, c := range b.Succs { - found := false - for _, bb := range c.Preds { - if bb == b { - found = true - break - } - } + found := slices.Contains(c.Preds, b) if !found { s.errorf("expected predecessor edge in successor %s; found only: %s", c, c.Preds) } diff --git a/go/ssa/subst.go b/go/ssa/subst.go index bbe5796d703..b4ea16854ea 100644 --- a/go/ssa/subst.go +++ b/go/ssa/subst.go @@ -266,7 +266,7 @@ func (subst *subster) interface_(iface *types.Interface) *types.Interface { var methods []*types.Func initMethods := func(n int) { // copy first n explicit methods methods = make([]*types.Func, iface.NumExplicitMethods()) - for i := 0; i < n; i++ { + for i := range n { f := iface.ExplicitMethod(i) norecv := changeRecv(f.Type().(*types.Signature), nil) methods[i] = types.NewFunc(f.Pos(), f.Pkg(), f.Name(), norecv) @@ -290,7 +290,7 @@ func (subst *subster) interface_(iface *types.Interface) *types.Interface { var embeds []types.Type initEmbeds := func(n int) { // copy first n embedded types embeds = make([]types.Type, iface.NumEmbeddeds()) - for i := 0; i < n; i++ { + for i := range n { embeds[i] = iface.EmbeddedType(i) } } diff --git a/go/ssa/util.go b/go/ssa/util.go index 9a73984a6a0..e53b31ff3bb 100644 --- a/go/ssa/util.go +++ b/go/ssa/util.go @@ -385,7 +385,7 @@ func (m *typeListMap) hash(ts []types.Type) uint32 { // Some smallish prime far away from typeutil.Hash. n := len(ts) h := uint32(13619) + 2*uint32(n) - for i := 0; i < n; i++ { + for i := range n { h += 3 * m.hasher.Hash(ts[i]) } return h diff --git a/godoc/index.go b/godoc/index.go index 05a1a9441ee..853337715c1 100644 --- a/godoc/index.go +++ b/godoc/index.go @@ -65,6 +65,7 @@ import ( "golang.org/x/tools/godoc/util" "golang.org/x/tools/godoc/vfs" + "maps" ) // ---------------------------------------------------------------------------- @@ -862,9 +863,7 @@ func (x *Indexer) indexGoFile(dirname string, filename string, file *token.File, dest = make(map[string]SpotKind) x.exports[pkgPath] = dest } - for k, v := range x.curPkgExports { - dest[k] = v - } + maps.Copy(dest, x.curPkgExports) } } @@ -1069,7 +1068,7 @@ func (c *Corpus) NewIndex() *Index { // convert alist into a map of alternative spellings alts := make(map[string]*AltWords) - for i := 0; i < len(alist); i++ { + for i := range alist { a := alist[i].(*AltWords) alts[a.Canon] = a } diff --git a/godoc/snippet.go b/godoc/snippet.go index 1750478606e..43c1899a093 100644 --- a/godoc/snippet.go +++ b/godoc/snippet.go @@ -14,6 +14,7 @@ import ( "fmt" "go/ast" "go/token" + "slices" ) type Snippet struct { @@ -41,10 +42,8 @@ func findSpec(list []ast.Spec, id *ast.Ident) ast.Spec { return s } case *ast.ValueSpec: - for _, n := range s.Names { - if n == id { - return s - } + if slices.Contains(s.Names, id) { + return s } case *ast.TypeSpec: if s.Name == id { diff --git a/godoc/static/gen_test.go b/godoc/static/gen_test.go index 1f1c62e0e9c..7b7668a558c 100644 --- a/godoc/static/gen_test.go +++ b/godoc/static/gen_test.go @@ -39,7 +39,7 @@ to see the differences.`) // TestAppendQuote ensures that AppendQuote produces a valid literal. func TestAppendQuote(t *testing.T) { var in, out bytes.Buffer - for r := rune(0); r < unicode.MaxRune; r++ { + for r := range unicode.MaxRune { in.WriteRune(r) } appendQuote(&out, in.Bytes()) diff --git a/godoc/versions_test.go b/godoc/versions_test.go index a021616ba11..7b822f69b51 100644 --- a/godoc/versions_test.go +++ b/godoc/versions_test.go @@ -6,6 +6,7 @@ package godoc import ( "go/build" + "slices" "testing" "golang.org/x/tools/internal/testenv" @@ -102,12 +103,7 @@ func TestParseVersionRow(t *testing.T) { // hasTag checks whether a given release tag is contained in the current version // of the go binary. func hasTag(t string) bool { - for _, v := range build.Default.ReleaseTags { - if t == v { - return true - } - } - return false + return slices.Contains(build.Default.ReleaseTags, t) } func TestAPIVersion(t *testing.T) { diff --git a/godoc/vfs/os.go b/godoc/vfs/os.go index 35d050946e6..fe21a58662e 100644 --- a/godoc/vfs/os.go +++ b/godoc/vfs/os.go @@ -12,6 +12,7 @@ import ( pathpkg "path" "path/filepath" "runtime" + "slices" ) // We expose a new variable because otherwise we need to copy the findGOROOT logic again @@ -45,10 +46,8 @@ type osFS struct { func isGoPath(path string) bool { for _, bp := range filepath.SplitList(build.Default.GOPATH) { - for _, gp := range filepath.SplitList(path) { - if bp == gp { - return true - } + if slices.Contains(filepath.SplitList(path), bp) { + return true } } return false diff --git a/godoc/vfs/zipfs/zipfs_test.go b/godoc/vfs/zipfs/zipfs_test.go index cb000361745..3e5a8034a5b 100644 --- a/godoc/vfs/zipfs/zipfs_test.go +++ b/godoc/vfs/zipfs/zipfs_test.go @@ -172,7 +172,7 @@ func TestZipFSOpenSeek(t *testing.T) { defer f.Close() // test Seek() multiple times - for i := 0; i < 3; i++ { + for range 3 { all, err := io.ReadAll(f) if err != nil { t.Error(err) diff --git a/internal/bisect/bisect.go b/internal/bisect/bisect.go index 5a7da4871a8..7b1d112a7cd 100644 --- a/internal/bisect/bisect.go +++ b/internal/bisect/bisect.go @@ -320,7 +320,7 @@ func AppendMarker(dst []byte, id uint64) []byte { const prefix = "[bisect-match 0x" var buf [len(prefix) + 16 + 1]byte copy(buf[:], prefix) - for i := 0; i < 16; i++ { + for i := range 16 { buf[len(prefix)+i] = "0123456789abcdef"[id>>60] id <<= 4 } @@ -504,7 +504,7 @@ func fnvString(h uint64, x string) uint64 { } func fnvUint64(h uint64, x uint64) uint64 { - for i := 0; i < 8; i++ { + for range 8 { h ^= uint64(x & 0xFF) x >>= 8 h *= prime64 @@ -513,7 +513,7 @@ func fnvUint64(h uint64, x uint64) uint64 { } func fnvUint32(h uint64, x uint32) uint64 { - for i := 0; i < 4; i++ { + for range 4 { h ^= uint64(x & 0xFF) x >>= 8 h *= prime64 diff --git a/internal/diff/diff.go b/internal/diff/diff.go index a13547b7a7e..c12bdfd2acd 100644 --- a/internal/diff/diff.go +++ b/internal/diff/diff.go @@ -7,6 +7,7 @@ package diff import ( "fmt" + "slices" "sort" "strings" ) @@ -64,7 +65,7 @@ func ApplyBytes(src []byte, edits []Edit) ([]byte, error) { // It may return a different slice. func validate(src string, edits []Edit) ([]Edit, int, error) { if !sort.IsSorted(editsSort(edits)) { - edits = append([]Edit(nil), edits...) + edits = slices.Clone(edits) SortEdits(edits) } diff --git a/internal/diff/diff_test.go b/internal/diff/diff_test.go index 77a20baf272..9e2a1d23997 100644 --- a/internal/diff/diff_test.go +++ b/internal/diff/diff_test.go @@ -61,7 +61,7 @@ func TestNEdits(t *testing.T) { func TestNRandom(t *testing.T) { rand.Seed(1) - for i := 0; i < 1000; i++ { + for i := range 1000 { a := randstr("abω", 16) b := randstr("abωc", 16) edits := diff.Strings(a, b) @@ -200,7 +200,7 @@ func TestRegressionOld002(t *testing.T) { func randstr(s string, n int) string { src := []rune(s) x := make([]rune, n) - for i := 0; i < n; i++ { + for i := range n { x[i] = src[rand.Intn(len(src))] } return string(x) diff --git a/internal/diff/lcs/common_test.go b/internal/diff/lcs/common_test.go index f19245e404c..68f4485fdb8 100644 --- a/internal/diff/lcs/common_test.go +++ b/internal/diff/lcs/common_test.go @@ -7,6 +7,7 @@ package lcs import ( "log" "math/rand" + "slices" "strings" "testing" ) @@ -72,10 +73,8 @@ func check(t *testing.T, str string, lcs lcs, want []string) { got.WriteString(str[dd.X : dd.X+dd.Len]) } ans := got.String() - for _, w := range want { - if ans == w { - return - } + if slices.Contains(want, ans) { + return } t.Fatalf("str=%q lcs=%v want=%q got=%q", str, lcs, want, ans) } @@ -109,7 +108,7 @@ func lcslen(l lcs) int { func randstr(s string, n int) string { src := []rune(s) x := make([]rune, n) - for i := 0; i < n; i++ { + for i := range n { x[i] = src[rand.Intn(len(src))] } return string(x) diff --git a/internal/diff/lcs/old.go b/internal/diff/lcs/old.go index 7c74b47bb1c..c0d43a6c2c7 100644 --- a/internal/diff/lcs/old.go +++ b/internal/diff/lcs/old.go @@ -377,10 +377,7 @@ func (e *editGraph) twoDone(df, db int) (int, bool) { if (df+db+e.delta)%2 != 0 { return 0, false // diagonals cannot overlap } - kmin := -db + e.delta - if -df > kmin { - kmin = -df - } + kmin := max(-df, -db+e.delta) kmax := db + e.delta if df < kmax { kmax = df diff --git a/internal/diff/lcs/old_test.go b/internal/diff/lcs/old_test.go index ddc3bde0ed2..2eac1af6d2f 100644 --- a/internal/diff/lcs/old_test.go +++ b/internal/diff/lcs/old_test.go @@ -107,7 +107,7 @@ func TestRegressionOld003(t *testing.T) { func TestRandOld(t *testing.T) { rand.Seed(1) - for i := 0; i < 1000; i++ { + for i := range 1000 { // TODO(adonovan): use ASCII and bytesSeqs here? The use of // non-ASCII isn't relevant to the property exercised by the test. a := []rune(randstr("abω", 16)) @@ -186,7 +186,7 @@ func genBench(set string, n int) []struct{ before, after string } { // before and after differing at least once, and about 5% rand.Seed(3) var ans []struct{ before, after string } - for i := 0; i < 24; i++ { + for range 24 { // maybe b should have an approximately known number of diffs a := randstr(set, n) cnt := 0 diff --git a/internal/diff/ndiff.go b/internal/diff/ndiff.go index fbef4d730c5..a2eef26ac77 100644 --- a/internal/diff/ndiff.go +++ b/internal/diff/ndiff.go @@ -72,7 +72,7 @@ func diffRunes(before, after []rune) []Edit { func runes(bytes []byte) []rune { n := utf8.RuneCount(bytes) runes := make([]rune, n) - for i := 0; i < n; i++ { + for i := range n { r, sz := utf8.DecodeRune(bytes) bytes = bytes[sz:] runes[i] = r diff --git a/internal/diffp/diff.go b/internal/diffp/diff.go index aa5ef81ac2e..54ab0888482 100644 --- a/internal/diffp/diff.go +++ b/internal/diffp/diff.go @@ -119,10 +119,7 @@ func Diff(oldName string, old []byte, newName string, new []byte) []byte { // End chunk with common lines for context. if len(ctext) > 0 { - n := end.x - start.x - if n > C { - n = C - } + n := min(end.x-start.x, C) for _, s := range x[start.x : start.x+n] { ctext = append(ctext, " "+s) count.x++ @@ -237,7 +234,7 @@ func tgs(x, y []string) []pair { for i := range T { T[i] = n + 1 } - for i := 0; i < n; i++ { + for i := range n { k := sort.Search(n, func(k int) bool { return T[k] >= J[i] }) diff --git a/internal/event/label/label.go b/internal/event/label/label.go index 7c00ca2a6da..92a39105731 100644 --- a/internal/event/label/label.go +++ b/internal/event/label/label.go @@ -8,6 +8,7 @@ import ( "fmt" "io" "reflect" + "slices" "unsafe" ) @@ -154,10 +155,8 @@ func (f *filter) Valid(index int) bool { func (f *filter) Label(index int) Label { l := f.underlying.Label(index) - for _, f := range f.keys { - if l.Key() == f { - return Label{} - } + if slices.Contains(f.keys, l.Key()) { + return Label{} } return l } diff --git a/internal/gcimporter/gcimporter_test.go b/internal/gcimporter/gcimporter_test.go index 9b38a0e1e28..9dc65fa19f6 100644 --- a/internal/gcimporter/gcimporter_test.go +++ b/internal/gcimporter/gcimporter_test.go @@ -672,7 +672,7 @@ func TestIssue15517(t *testing.T) { // file and package path are different, exposing the problem if present. // The same issue occurs with vendoring.) imports := make(map[string]*types.Package) - for i := 0; i < 3; i++ { + for range 3 { if _, err := gcimporter.Import(token.NewFileSet(), imports, "./././testdata/p", tmpdir, nil); err != nil { t.Fatal(err) } @@ -785,7 +785,7 @@ type K = StillBad[string] // Use the interface instances concurrently. for _, inst := range insts { var wg sync.WaitGroup - for i := 0; i < 2; i++ { + for range 2 { wg.Add(1) go func() { defer wg.Done() diff --git a/internal/gcimporter/iexport.go b/internal/gcimporter/iexport.go index 48e90b29ded..780873e3ae7 100644 --- a/internal/gcimporter/iexport.go +++ b/internal/gcimporter/iexport.go @@ -236,6 +236,7 @@ import ( "io" "math/big" "reflect" + "slices" "sort" "strconv" "strings" @@ -465,7 +466,7 @@ func (p *iexporter) encodeFile(w *intWriter, file *token.File, needed []uint64) w.uint64(size) // Sort the set of needed offsets. Duplicates are harmless. - sort.Slice(needed, func(i, j int) bool { return needed[i] < needed[j] }) + slices.Sort(needed) lines := file.Lines() // byte offset of each line start w.uint64(uint64(len(lines))) @@ -819,7 +820,7 @@ func (p *iexporter) doDecl(obj types.Object) { n := named.NumMethods() w.uint64(uint64(n)) - for i := 0; i < n; i++ { + for i := range n { m := named.Method(i) w.pos(m.Pos()) w.string(m.Name()) @@ -1096,7 +1097,7 @@ func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) { w.pkg(fieldPkg) w.uint64(uint64(n)) - for i := 0; i < n; i++ { + for i := range n { f := t.Field(i) if w.p.shallow { w.objectPath(f) @@ -1145,7 +1146,7 @@ func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) { w.startType(unionType) nt := t.Len() w.uint64(uint64(nt)) - for i := 0; i < nt; i++ { + for i := range nt { term := t.Term(i) w.bool(term.Tilde()) w.typ(term.Type(), pkg) @@ -1274,7 +1275,7 @@ func tparamName(exportName string) string { func (w *exportWriter) paramList(tup *types.Tuple) { n := tup.Len() w.uint64(uint64(n)) - for i := 0; i < n; i++ { + for i := range n { w.param(tup.At(i)) } } diff --git a/internal/gcimporter/iimport.go b/internal/gcimporter/iimport.go index bc6c9741e7d..82e6c9d2dc1 100644 --- a/internal/gcimporter/iimport.go +++ b/internal/gcimporter/iimport.go @@ -16,6 +16,7 @@ import ( "go/types" "io" "math/big" + "slices" "sort" "strings" @@ -314,7 +315,7 @@ func iimportCommon(fset *token.FileSet, getPackages GetPackagesFunc, data []byte pkgs = pkgList[:1] // record all referenced packages as imports - list := append(([]*types.Package)(nil), pkgList[1:]...) + list := slices.Clone(pkgList[1:]) sort.Sort(byPath(list)) pkgs[0].SetImports(list) } diff --git a/internal/gocommand/invoke.go b/internal/gocommand/invoke.go index 7ea9013447b..58721202de7 100644 --- a/internal/gocommand/invoke.go +++ b/internal/gocommand/invoke.go @@ -141,7 +141,7 @@ func (runner *Runner) runPiped(ctx context.Context, inv Invocation, stdout, stde // Wait for all in-progress go commands to return before proceeding, // to avoid load concurrency errors. - for i := 0; i < maxInFlight; i++ { + for range maxInFlight { select { case <-ctx.Done(): return ctx.Err(), ctx.Err() diff --git a/internal/gopathwalk/walk.go b/internal/gopathwalk/walk.go index 984b79c2a07..5252144d046 100644 --- a/internal/gopathwalk/walk.go +++ b/internal/gopathwalk/walk.go @@ -14,6 +14,7 @@ import ( "os" "path/filepath" "runtime" + "slices" "strings" "sync" "time" @@ -195,10 +196,8 @@ func (w *walker) getIgnoredDirs(path string) []string { // shouldSkipDir reports whether the file should be skipped or not. func (w *walker) shouldSkipDir(dir string) bool { - for _, ignoredDir := range w.ignoredDirs { - if dir == ignoredDir { - return true - } + if slices.Contains(w.ignoredDirs, dir) { + return true } if w.skip != nil { // Check with the user specified callback. diff --git a/internal/imports/fix.go b/internal/imports/fix.go index c78d10f2d61..89b96381cdc 100644 --- a/internal/imports/fix.go +++ b/internal/imports/fix.go @@ -32,6 +32,7 @@ import ( "golang.org/x/tools/internal/gocommand" "golang.org/x/tools/internal/gopathwalk" "golang.org/x/tools/internal/stdlib" + "maps" ) // importToGroup is a list of functions which map from an import path to @@ -968,9 +969,7 @@ func (e *ProcessEnv) CopyConfig() *ProcessEnv { resolver: nil, Env: map[string]string{}, } - for k, v := range e.Env { - copy.Env[k] = v - } + maps.Copy(copy.Env, e.Env) return copy } @@ -1003,9 +1002,7 @@ func (e *ProcessEnv) init() error { if err := json.Unmarshal(stdout.Bytes(), &goEnv); err != nil { return err } - for k, v := range goEnv { - e.Env[k] = v - } + maps.Copy(e.Env, goEnv) e.initialized = true return nil } diff --git a/internal/imports/fix_test.go b/internal/imports/fix_test.go index 478313aec7f..5313956dd63 100644 --- a/internal/imports/fix_test.go +++ b/internal/imports/fix_test.go @@ -2912,7 +2912,7 @@ func _() { wg sync.WaitGroup ) wg.Add(n) - for i := 0; i < n; i++ { + for range n { go func() { defer wg.Done() _, err := t.process("foo.com", "p/first.go", nil, nil) @@ -2983,7 +2983,7 @@ func TestSymbolSearchStarvation(t *testing.T) { } var candidates []pkgDistance - for i := 0; i < candCount; i++ { + for i := range candCount { name := fmt.Sprintf("bar%d", i) candidates = append(candidates, pkgDistance{ pkg: &pkg{ diff --git a/internal/imports/mod.go b/internal/imports/mod.go index 8555e3f83da..df94ec8186e 100644 --- a/internal/imports/mod.go +++ b/internal/imports/mod.go @@ -13,6 +13,7 @@ import ( "path" "path/filepath" "regexp" + "slices" "sort" "strconv" "strings" @@ -150,8 +151,8 @@ func newModuleResolver(e *ProcessEnv, moduleCacheCache *DirInfoCache) (*ModuleRe Path: "", Dir: filepath.Join(filepath.Dir(goWork), "vendor"), } - r.modsByModPath = append(append([]*gocommand.ModuleJSON{}, mainModsVendor...), r.dummyVendorMod) - r.modsByDir = append(append([]*gocommand.ModuleJSON{}, mainModsVendor...), r.dummyVendorMod) + r.modsByModPath = append(slices.Clone(mainModsVendor), r.dummyVendorMod) + r.modsByDir = append(slices.Clone(mainModsVendor), r.dummyVendorMod) } } else { // Vendor mode is off, so run go list -m ... to find everything. diff --git a/internal/imports/mod_cache.go b/internal/imports/mod_cache.go index b1192696b28..b96c9d4bf71 100644 --- a/internal/imports/mod_cache.go +++ b/internal/imports/mod_cache.go @@ -128,7 +128,7 @@ func (d *DirInfoCache) ScanAndListen(ctx context.Context, listener cacheListener // are going to be. Setting an arbitrary limit makes it much easier. const maxInFlight = 10 sema := make(chan struct{}, maxInFlight) - for i := 0; i < maxInFlight; i++ { + for range maxInFlight { sema <- struct{}{} } @@ -156,7 +156,7 @@ func (d *DirInfoCache) ScanAndListen(ctx context.Context, listener cacheListener d.mu.Lock() delete(d.listeners, cookie) d.mu.Unlock() - for i := 0; i < maxInFlight; i++ { + for range maxInFlight { <-sema } } diff --git a/internal/imports/mod_test.go b/internal/imports/mod_test.go index 890dc1b2e25..2862e84d184 100644 --- a/internal/imports/mod_test.go +++ b/internal/imports/mod_test.go @@ -25,6 +25,8 @@ import ( "golang.org/x/tools/internal/proxydir" "golang.org/x/tools/internal/testenv" "golang.org/x/tools/txtar" + "maps" + "slices" ) // Tests that we can find packages in the stdlib. @@ -928,12 +930,7 @@ func scanToSlice(resolver Resolver, exclude []gopathwalk.RootType) ([]*pkg, erro var result []*pkg filter := &scanCallback{ rootFound: func(root gopathwalk.Root) bool { - for _, rt := range exclude { - if root.Type == rt { - return false - } - } - return true + return !slices.Contains(exclude, root.Type) }, dirFound: func(pkg *pkg) bool { return true @@ -1023,9 +1020,7 @@ func setup(t *testing.T, extraEnv map[string]string, main, wd string) *modTest { WorkingDir: filepath.Join(mainDir, wd), GocmdRunner: &gocommand.Runner{}, } - for k, v := range extraEnv { - env.Env[k] = v - } + maps.Copy(env.Env, extraEnv) if *testDebug { env.Logf = log.Printf } diff --git a/internal/imports/sortimports.go b/internal/imports/sortimports.go index da8194fd965..67c17bc4319 100644 --- a/internal/imports/sortimports.go +++ b/internal/imports/sortimports.go @@ -11,6 +11,7 @@ import ( "go/ast" "go/token" "log" + "slices" "sort" "strconv" ) @@ -30,7 +31,7 @@ func sortImports(localPrefix string, tokFile *token.File, f *ast.File) { if len(d.Specs) == 0 { // Empty import block, remove it. - f.Decls = append(f.Decls[:i], f.Decls[i+1:]...) + f.Decls = slices.Delete(f.Decls, i, i+1) } if !d.Lparen.IsValid() { @@ -91,7 +92,7 @@ func mergeImports(f *ast.File) { spec.(*ast.ImportSpec).Path.ValuePos = first.Pos() first.Specs = append(first.Specs, spec) } - f.Decls = append(f.Decls[:i], f.Decls[i+1:]...) + f.Decls = slices.Delete(f.Decls, i, i+1) i-- } } diff --git a/internal/modindex/lookup.go b/internal/modindex/lookup.go index 5499c5c67f3..bd605e0d763 100644 --- a/internal/modindex/lookup.go +++ b/internal/modindex/lookup.go @@ -120,7 +120,7 @@ func (ix *Index) Lookup(pkg, name string, prefix bool) []Candidate { px.Results = int16(n) if len(flds) >= 4 { sig := strings.Split(flds[3], " ") - for i := 0; i < len(sig); i++ { + for i := range sig { // $ cannot otherwise occur. removing the spaces // almost works, but for chan struct{}, e.g. sig[i] = strings.Replace(sig[i], "$", " ", -1) @@ -136,7 +136,7 @@ func (ix *Index) Lookup(pkg, name string, prefix bool) []Candidate { func toFields(sig []string) []Field { ans := make([]Field, len(sig)/2) - for i := 0; i < len(ans); i++ { + for i := range ans { ans[i] = Field{Arg: sig[2*i], Type: sig[2*i+1]} } return ans diff --git a/internal/packagestest/export.go b/internal/packagestest/export.go index ce992e17a90..4dd2b331736 100644 --- a/internal/packagestest/export.go +++ b/internal/packagestest/export.go @@ -155,7 +155,6 @@ var All = []Exporter{GOPATH, Modules} func TestAll(t *testing.T, f func(*testing.T, Exporter)) { t.Helper() for _, e := range All { - e := e // in case f calls t.Parallel t.Run(e.Name(), func(t *testing.T) { t.Helper() f(t, e) @@ -169,7 +168,6 @@ func TestAll(t *testing.T, f func(*testing.T, Exporter)) { func BenchmarkAll(b *testing.B, f func(*testing.B, Exporter)) { b.Helper() for _, e := range All { - e := e // in case f calls t.Parallel b.Run(e.Name(), func(b *testing.B) { b.Helper() f(b, e) diff --git a/internal/pkgbits/decoder.go b/internal/pkgbits/decoder.go index f6cb37c5c3d..c0aba26c482 100644 --- a/internal/pkgbits/decoder.go +++ b/internal/pkgbits/decoder.go @@ -259,7 +259,7 @@ func (r *Decoder) rawUvarint() uint64 { func readUvarint(r *strings.Reader) (uint64, error) { var x uint64 var s uint - for i := 0; i < binary.MaxVarintLen64; i++ { + for i := range binary.MaxVarintLen64 { b, err := r.ReadByte() if err != nil { if i > 0 && err == io.EOF { diff --git a/internal/proxydir/proxydir.go b/internal/proxydir/proxydir.go index dc6b6ae94e8..bbd1ab4fd26 100644 --- a/internal/proxydir/proxydir.go +++ b/internal/proxydir/proxydir.go @@ -46,7 +46,7 @@ func WriteModuleVersion(rootDir, module, ver string, files map[string][]byte) (r } // info file, just the bare bones. - infoContents := []byte(fmt.Sprintf(`{"Version": "%v", "Time":"2017-12-14T13:08:43Z"}`, ver)) + infoContents := fmt.Appendf(nil, `{"Version": "%v", "Time":"2017-12-14T13:08:43Z"}`, ver) if err := os.WriteFile(filepath.Join(dir, ver+".info"), infoContents, 0644); err != nil { return err } diff --git a/internal/refactor/inline/calleefx_test.go b/internal/refactor/inline/calleefx_test.go index 1fc16aebaac..b643c7a06ac 100644 --- a/internal/refactor/inline/calleefx_test.go +++ b/internal/refactor/inline/calleefx_test.go @@ -107,7 +107,6 @@ func TestCalleeEffects(t *testing.T) { }, } for _, test := range tests { - test := test t.Run(test.descr, func(t *testing.T) { fset := token.NewFileSet() mustParse := func(filename string, content any) *ast.File { diff --git a/internal/refactor/inline/everything_test.go b/internal/refactor/inline/everything_test.go index 12b9ba47f21..a32e0709be1 100644 --- a/internal/refactor/inline/everything_test.go +++ b/internal/refactor/inline/everything_test.go @@ -13,6 +13,7 @@ import ( "log" "os" "path/filepath" + "slices" "strings" "testing" @@ -193,7 +194,7 @@ func TestEverything(t *testing.T) { t.Fatalf("transformed source does not parse: %v", err) } // Splice into original file list. - syntax := append([]*ast.File(nil), callerPkg.Syntax...) + syntax := slices.Clone(callerPkg.Syntax) for i := range callerPkg.Syntax { if syntax[i] == callerFile { syntax[i] = f diff --git a/internal/refactor/inline/inline.go b/internal/refactor/inline/inline.go index 7817444150e..edd5d836613 100644 --- a/internal/refactor/inline/inline.go +++ b/internal/refactor/inline/inline.go @@ -26,6 +26,7 @@ import ( internalastutil "golang.org/x/tools/internal/astutil" "golang.org/x/tools/internal/typeparams" "golang.org/x/tools/internal/typesinternal" + "maps" ) // A Caller describes the function call and its enclosing context. @@ -893,9 +894,7 @@ func (st *state) inlineCall() (*inlineCallResult, error) { elts = append(elts, arg.expr) pure = pure && arg.pure effects = effects || arg.effects - for k, v := range arg.freevars { - freevars[k] = v - } + maps.Copy(freevars, arg.freevars) } args = append(ordinary, &argument{ expr: &ast.CompositeLit{ diff --git a/internal/refactor/inline/inline_test.go b/internal/refactor/inline/inline_test.go index 611541c9677..a3934b5cd68 100644 --- a/internal/refactor/inline/inline_test.go +++ b/internal/refactor/inline/inline_test.go @@ -64,7 +64,6 @@ func TestData(t *testing.T) { t.Fatal(err) } for _, file := range files { - file := file t.Run(filepath.Base(file), func(t *testing.T) { t.Parallel() @@ -1794,7 +1793,6 @@ func TestRedundantConversions(t *testing.T) { func runTests(t *testing.T, tests []testcase) { for _, test := range tests { - test := test t.Run(test.descr, func(t *testing.T) { fset := token.NewFileSet() mustParse := func(filename string, content any) *ast.File { @@ -1885,7 +1883,7 @@ func runTests(t *testing.T, tests []testcase) { res, err := doIt() // Want error? - if rest := strings.TrimPrefix(test.want, "error: "); rest != test.want { + if rest, ok := strings.CutPrefix(test.want, "error: "); ok { if err == nil { t.Fatalf("unexpected success: want error matching %q", rest) } diff --git a/internal/testenv/testenv.go b/internal/testenv/testenv.go index 5c541b7b19b..fa53f37f7aa 100644 --- a/internal/testenv/testenv.go +++ b/internal/testenv/testenv.go @@ -149,7 +149,7 @@ func HasTool(tool string) error { func cgoEnabled(bypassEnvironment bool) (bool, error) { cmd := exec.Command("go", "env", "CGO_ENABLED") if bypassEnvironment { - cmd.Env = append(append([]string(nil), os.Environ()...), "CGO_ENABLED=") + cmd.Env = append(os.Environ(), "CGO_ENABLED=") } out, err := cmd.Output() if err != nil { @@ -251,8 +251,8 @@ func NeedsGoPackagesEnv(t testing.TB, env []string) { t.Helper() for _, v := range env { - if strings.HasPrefix(v, "GOPACKAGESDRIVER=") { - tool := strings.TrimPrefix(v, "GOPACKAGESDRIVER=") + if after, ok := strings.CutPrefix(v, "GOPACKAGESDRIVER="); ok { + tool := after if tool == "off" { NeedsTool(t, "go") } else { diff --git a/internal/typeparams/free.go b/internal/typeparams/free.go index 0ade5c2949e..709d2fc1447 100644 --- a/internal/typeparams/free.go +++ b/internal/typeparams/free.go @@ -70,7 +70,7 @@ func (w *Free) Has(typ types.Type) (res bool) { case *types.Tuple: n := t.Len() - for i := 0; i < n; i++ { + for i := range n { if w.Has(t.At(i).Type()) { return true } diff --git a/playground/socket/socket.go b/playground/socket/socket.go index 378edd4c3a5..c7843e59734 100644 --- a/playground/socket/socket.go +++ b/playground/socket/socket.go @@ -28,6 +28,7 @@ import ( "os/exec" "path/filepath" "runtime" + "slices" "strings" "time" "unicode/utf8" @@ -439,12 +440,7 @@ func (p *process) cmd(dir string, args ...string) *exec.Cmd { } func isNacl() bool { - for _, v := range append(Environ(), os.Environ()...) { - if v == "GOOS=nacl" { - return true - } - } - return false + return slices.Contains(append(Environ(), os.Environ()...), "GOOS=nacl") } // naclCmd returns an *exec.Cmd that executes bin under native client. diff --git a/playground/socket/socket_test.go b/playground/socket/socket_test.go index d410afea875..942f27e2af5 100644 --- a/playground/socket/socket_test.go +++ b/playground/socket/socket_test.go @@ -52,7 +52,7 @@ func TestLimiter(t *testing.T) { ch := make(chan *Message) go func() { var m Message - for i := 0; i < msgLimit+10; i++ { + for range msgLimit + 10 { ch <- &m } ch <- &Message{Kind: "end"} diff --git a/present/link.go b/present/link.go index ef96bf4ef6b..f6a8be1e693 100644 --- a/present/link.go +++ b/present/link.go @@ -86,10 +86,10 @@ func parseInlineLink(s string) (link string, length int) { // If the URL is http://foo.com, drop the http:// // In other words, render [[http://golang.org]] as: // golang.org - if strings.HasPrefix(rawURL, url.Scheme+"://") { - simpleURL = strings.TrimPrefix(rawURL, url.Scheme+"://") - } else if strings.HasPrefix(rawURL, url.Scheme+":") { - simpleURL = strings.TrimPrefix(rawURL, url.Scheme+":") + if after, ok := strings.CutPrefix(rawURL, url.Scheme+"://"); ok { + simpleURL = after + } else if after, ok := strings.CutPrefix(rawURL, url.Scheme+":"); ok { + simpleURL = after } } return renderLink(rawURL, simpleURL), end + 2 diff --git a/present/parse_test.go b/present/parse_test.go index dad57ea77ca..bb0fe72fad0 100644 --- a/present/parse_test.go +++ b/present/parse_test.go @@ -27,7 +27,6 @@ func TestTestdata(t *testing.T) { } files := append(filesP, filesMD...) for _, file := range files { - file := file name := filepath.Base(file) if name == "README" { continue diff --git a/refactor/eg/eg.go b/refactor/eg/eg.go index 15dfbd6ca0f..65a7f690bfd 100644 --- a/refactor/eg/eg.go +++ b/refactor/eg/eg.go @@ -14,6 +14,7 @@ import ( "go/printer" "go/token" "go/types" + "maps" "os" ) @@ -350,18 +351,10 @@ func stmtAndExpr(fn *ast.FuncDecl) ([]ast.Stmt, ast.Expr, error) { // mergeTypeInfo adds type info from src to dst. func mergeTypeInfo(dst, src *types.Info) { - for k, v := range src.Types { - dst.Types[k] = v - } - for k, v := range src.Defs { - dst.Defs[k] = v - } - for k, v := range src.Uses { - dst.Uses[k] = v - } - for k, v := range src.Selections { - dst.Selections[k] = v - } + maps.Copy(dst.Types, src.Types) + maps.Copy(dst.Defs, src.Defs) + maps.Copy(dst.Uses, src.Uses) + maps.Copy(dst.Selections, src.Selections) } // (debugging only) From e74d252b3d8b8c5eb5ba9f0bd475e3575b82f403 Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Wed, 2 Apr 2025 21:39:18 -0600 Subject: [PATCH 166/270] gopls/internal/analysis/modernize: check nil before calling maybeNaN This CL adds a nil check before calling maybeNaN, as a blank identifier has no type and leads a panic. It fixes the panic in modernize minmax. Fixes golang/go#72928 Change-Id: I57d6da6b48d1c6d95057ca0f064896a935187be7 Reviewed-on: https://go-review.googlesource.com/c/tools/+/662195 LUCI-TryBot-Result: Go LUCI Commit-Queue: Alan Donovan Auto-Submit: Alan Donovan Reviewed-by: Alan Donovan Reviewed-by: Robert Findley --- gopls/internal/analysis/modernize/minmax.go | 11 ++++++----- .../analysis/modernize/testdata/src/minmax/minmax.go | 7 +++++++ .../modernize/testdata/src/minmax/minmax.go.golden | 7 +++++++ 3 files changed, 20 insertions(+), 5 deletions(-) diff --git a/gopls/internal/analysis/modernize/minmax.go b/gopls/internal/analysis/modernize/minmax.go index 415e9fc5661..0e43ee11c3d 100644 --- a/gopls/internal/analysis/modernize/minmax.go +++ b/gopls/internal/analysis/modernize/minmax.go @@ -178,11 +178,12 @@ func minmax(pass *analysis.Pass) { if compare, ok := ifStmt.Cond.(*ast.BinaryExpr); ok && ifStmt.Init == nil && isInequality(compare.Op) != 0 && - isAssignBlock(ifStmt.Body) && - !maybeNaN(info.TypeOf(ifStmt.Body.List[0].(*ast.AssignStmt).Lhs[0])) { // lhs - - // Have: if a < b { lhs = rhs } - check(astFile, curIfStmt, compare) + isAssignBlock(ifStmt.Body) { + // a blank var has no type. + if tLHS := info.TypeOf(ifStmt.Body.List[0].(*ast.AssignStmt).Lhs[0]); tLHS != nil && !maybeNaN(tLHS) { + // Have: if a < b { lhs = rhs } + check(astFile, curIfStmt, compare) + } } } } diff --git a/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go b/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go index cd117dabf84..cdc767450d2 100644 --- a/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go +++ b/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go @@ -149,3 +149,10 @@ func nopeFloat(a, b myfloat) (res myfloat) { } return } + +// Regression test for golang/go#72928. +func underscoreAssign(a, b int) { + if a > b { + _ = a + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go.golden b/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go.golden index 23bfd6f9ecd..b7be86bf416 100644 --- a/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go.golden @@ -136,3 +136,10 @@ func nopeFloat(a, b myfloat) (res myfloat) { } return } + +// Regression test for golang/go#72928. +func underscoreAssign(a, b int) { + if a > b { + _ = a + } +} From 3348ae8f7b1211bbff61a149928d43d709a722ca Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Wed, 2 Apr 2025 17:05:01 -0400 Subject: [PATCH 167/270] go/analysis/passes/nilfunc: use typesinternal.Used Replace some logic for finding the types.Object of an expression with typesinternal.Used. This covers a case that was previously missed: instantiation of a qualified function, such as pkg.F[int]. Change-Id: Ib4f8630d859d14c8e1dee792b9a49c5064fc8b61 Reviewed-on: https://go-review.googlesource.com/c/tools/+/662277 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- go/analysis/passes/nilfunc/nilfunc.go | 20 ++------------------ 1 file changed, 2 insertions(+), 18 deletions(-) diff --git a/go/analysis/passes/nilfunc/nilfunc.go b/go/analysis/passes/nilfunc/nilfunc.go index 3ac2dcd4907..2b28c5a6b2c 100644 --- a/go/analysis/passes/nilfunc/nilfunc.go +++ b/go/analysis/passes/nilfunc/nilfunc.go @@ -16,7 +16,7 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/typesinternal" ) //go:embed doc.go @@ -55,24 +55,8 @@ func run(pass *analysis.Pass) (any, error) { return } - // Only want identifiers or selector expressions. - var obj types.Object - switch v := e2.(type) { - case *ast.Ident: - obj = pass.TypesInfo.Uses[v] - case *ast.SelectorExpr: - obj = pass.TypesInfo.Uses[v.Sel] - case *ast.IndexExpr, *ast.IndexListExpr: - // Check generic functions such as "f[T1,T2]". - x, _, _, _ := typeparams.UnpackIndexExpr(v) - if id, ok := x.(*ast.Ident); ok { - obj = pass.TypesInfo.Uses[id] - } - default: - return - } - // Only want functions. + obj := typesinternal.Used(pass.TypesInfo, e2) if _, ok := obj.(*types.Func); !ok { return } From c788d1715fc963b7127431746444f123cf035756 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Wed, 2 Apr 2025 12:15:39 -0400 Subject: [PATCH 168/270] gopls/internal/analysis/modernize: waitgroup: use index.Calls This CL changes the new waitgroup modernizer to use index.Calls to enumerate the calls to WaitGroup.Add directly, instead of searching for all "go" statements. This is an optimization, though only a minor one because go statements are already sufficiently rare that it doesn't matter. The real purpose of the change is to try to establish the form that we wish other modernizers (which may search for more numerous nodes) to follow. Also, remove check for uses of WaitGroup type itself. The modernizer's pattern doesn't depend on the type. Change-Id: Ie87a33b08b71764ced13f204e5e0b6e0ed35d58f Reviewed-on: https://go-review.googlesource.com/c/tools/+/662276 Auto-Submit: Alan Donovan Reviewed-by: Jonathan Amsterdam LUCI-TryBot-Result: Go LUCI Commit-Queue: Alan Donovan --- .../internal/analysis/modernize/modernize.go | 8 + .../internal/analysis/modernize/waitgroup.go | 143 ++++++++++-------- 2 files changed, 86 insertions(+), 65 deletions(-) diff --git a/gopls/internal/analysis/modernize/modernize.go b/gopls/internal/analysis/modernize/modernize.go index dbef72fe5cf..b7e943a0c51 100644 --- a/gopls/internal/analysis/modernize/modernize.go +++ b/gopls/internal/analysis/modernize/modernize.go @@ -135,6 +135,7 @@ func isIntLiteral(info *types.Info, e ast.Expr, n int64) bool { // // TODO(adonovan): opt: eliminate this function, instead following the // approach of [fmtappendf], which uses typeindex and [fileUses]. +// See "Tip" at [fileUses] for motivation. func filesUsing(inspect *inspector.Inspector, info *types.Info, version string) iter.Seq[cursor.Cursor] { return func(yield func(cursor.Cursor) bool) { for curFile := range cursor.Root(inspect).Children() { @@ -148,6 +149,13 @@ func filesUsing(inspect *inspector.Inspector, info *types.Info, version string) // fileUses reports whether the specified file uses at least the // specified version of Go (e.g. "go1.24"). +// +// Tip: we recommend using this check "late", just before calling +// pass.Report, rather than "early" (when entering each ast.File, or +// each candidate node of interest, during the traversal), because the +// operation is not free, yet is not a highly selective filter: the +// fraction of files that pass most version checks is high and +// increases over time. func fileUses(info *types.Info, file *ast.File, version string) bool { return !versions.Before(info.FileVersions[file], version) } diff --git a/gopls/internal/analysis/modernize/waitgroup.go b/gopls/internal/analysis/modernize/waitgroup.go index 37a12da5657..080bd4d362a 100644 --- a/gopls/internal/analysis/modernize/waitgroup.go +++ b/gopls/internal/analysis/modernize/waitgroup.go @@ -10,12 +10,9 @@ import ( "slices" "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" "golang.org/x/tools/internal/analysisinternal" typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" - "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/typesinternal/typeindex" ) @@ -32,100 +29,116 @@ import ( // => // wg.Go(go func() { ... }) // -// The wg.Done must occur within the first statement of the block in a defer format or last statement of the block, -// and the offered fix only removes the first/last wg.Done call. It doesn't fix the existing wrong usage of sync.WaitGroup. +// The wg.Done must occur within the first statement of the block in a +// defer format or last statement of the block, and the offered fix +// only removes the first/last wg.Done call. It doesn't fix existing +// wrong usage of sync.WaitGroup. +// +// The use of WaitGroup.Go in pattern 1 implicitly introduces a +// 'defer', which may change the behavior in the case of panic from +// the "..." logic. In this instance, the change is safe: before and +// after the transformation, an unhandled panic inevitably results in +// a fatal crash. The fact that the transformed code calls wg.Done() +// before the crash doesn't materially change anything. (If Done had +// other effects, or blocked, or if WaitGroup.Go propagated panics +// from child to parent goroutine, the argument would be different.) func waitgroup(pass *analysis.Pass) { var ( - inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) info = pass.TypesInfo - syncWaitGroup = index.Object("sync", "WaitGroup") syncWaitGroupAdd = index.Selection("sync", "WaitGroup", "Add") syncWaitGroupDone = index.Selection("sync", "WaitGroup", "Done") ) - if !index.Used(syncWaitGroup, syncWaitGroupAdd, syncWaitGroupDone) { + if !index.Used(syncWaitGroupDone) { return } - checkWaitGroup := func(file *ast.File, curGostmt cursor.Cursor) { - gostmt := curGostmt.Node().(*ast.GoStmt) - - lit, ok := gostmt.Call.Fun.(*ast.FuncLit) - // go statement must have a no-arg function literal. - if !ok || len(gostmt.Call.Args) != 0 { - return + for curAddCall := range index.Calls(syncWaitGroupAdd) { + // Extract receiver from wg.Add call. + addCall := curAddCall.Node().(*ast.CallExpr) + if !isIntLiteral(info, addCall.Args[0], 1) { + continue // not a call to wg.Add(1) } + // Inv: the Args[0] check ensures addCall is not of + // the form sync.WaitGroup.Add(&wg, 1). + addCallRecv := ast.Unparen(addCall.Fun).(*ast.SelectorExpr).X - // previous node must call wg.Add. - prev, ok := curGostmt.PrevSibling() + // Following statement must be go func() { ... } (). + addStmt, ok := curAddCall.Parent().Node().(*ast.ExprStmt) if !ok { - return + continue // unnecessary parens? } - prevNode := prev.Node() - if !is[*ast.ExprStmt](prevNode) || !is[*ast.CallExpr](prevNode.(*ast.ExprStmt).X) { - return + curNext, ok := curAddCall.Parent().NextSibling() + if !ok { + continue // no successor } - - prevCall := prevNode.(*ast.ExprStmt).X.(*ast.CallExpr) - if typeutil.Callee(info, prevCall) != syncWaitGroupAdd || !isIntLiteral(info, prevCall.Args[0], 1) { - return + goStmt, ok := curNext.Node().(*ast.GoStmt) + if !ok { + continue // not a go stmt + } + lit, ok := goStmt.Call.Fun.(*ast.FuncLit) + if !ok || len(goStmt.Call.Args) != 0 { + continue // go argument is not func(){...}() } - - addCallRecv := ast.Unparen(prevCall.Fun).(*ast.SelectorExpr).X list := lit.Body.List if len(list) == 0 { - return + continue } + // Body must start with "defer wg.Done()" or end with "wg.Done()". var doneStmt ast.Stmt if deferStmt, ok := list[0].(*ast.DeferStmt); ok && typeutil.Callee(info, deferStmt.Call) == syncWaitGroupDone && equalSyntax(ast.Unparen(deferStmt.Call.Fun).(*ast.SelectorExpr).X, addCallRecv) { - // wg.Add(1); go func() { defer wg.Done(); ... }() - // --------- ------ --------------- - - // wg.Go(func() { ... } ) - doneStmt = deferStmt + doneStmt = deferStmt // "defer wg.Done()" + } else if lastStmt, ok := list[len(list)-1].(*ast.ExprStmt); ok { if doneCall, ok := lastStmt.X.(*ast.CallExpr); ok && typeutil.Callee(info, doneCall) == syncWaitGroupDone && equalSyntax(ast.Unparen(doneCall.Fun).(*ast.SelectorExpr).X, addCallRecv) { - // wg.Add(1); go func() { ... ;wg.Done();}() - // --------- ------ ---------- - - // wg.Go(func() { ... } ) - doneStmt = lastStmt + doneStmt = lastStmt // "wg.Done()" } } - if doneStmt != nil { - pass.Report(analysis.Diagnostic{ - Pos: prevNode.Pos(), - End: gostmt.End(), - Category: "waitgroup", - Message: "Goroutine creation can be simplified using WaitGroup.Go", - SuggestedFixes: []analysis.SuggestedFix{{ - Message: "Simplify by using WaitGroup.Go", - TextEdits: slices.Concat( - analysisinternal.DeleteStmt(pass.Fset, file, prevNode.(*ast.ExprStmt), nil), - analysisinternal.DeleteStmt(pass.Fset, file, doneStmt, nil), - []analysis.TextEdit{ - { - Pos: gostmt.Pos(), - End: gostmt.Call.Pos(), - NewText: fmt.Appendf(nil, "%s.Go(", addCallRecv), - }, - { - Pos: gostmt.Call.Lparen, - End: gostmt.Call.Rparen, - }, - }, - ), - }}, - }) + if doneStmt == nil { + continue } - } - for curFile := range filesUsing(inspect, info, "go1.25") { - for curGostmt := range curFile.Preorder((*ast.GoStmt)(nil)) { - checkWaitGroup(curFile.Node().(*ast.File), curGostmt) + file := enclosingFile(curAddCall) + if !fileUses(info, file, "go1.25") { + continue } + + pass.Report(analysis.Diagnostic{ + Pos: addCall.Pos(), + End: goStmt.End(), + Category: "waitgroup", + Message: "Goroutine creation can be simplified using WaitGroup.Go", + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Simplify by using WaitGroup.Go", + TextEdits: slices.Concat( + // delete "wg.Add(1)" + analysisinternal.DeleteStmt(pass.Fset, file, addStmt, nil), + // delete "wg.Done()" or "defer wg.Done()" + analysisinternal.DeleteStmt(pass.Fset, file, doneStmt, nil), + []analysis.TextEdit{ + // go func() + // ------ + // wg.Go(func() + { + Pos: goStmt.Pos(), + End: goStmt.Call.Pos(), + NewText: fmt.Appendf(nil, "%s.Go(", addCallRecv), + }, + // ... }() + // - + // ... } ) + { + Pos: goStmt.Call.Lparen, + End: goStmt.Call.Rparen, + }, + }, + ), + }}, + }) } } From 7799973f284eec4bb6ccd238fd4207146eed1b36 Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Thu, 3 Apr 2025 21:18:46 -0600 Subject: [PATCH 169/270] gopls/internal/analysis/modernize: add docs for missing modernize passes Change-Id: Ief8ff9c3fdb020208feafa21cae60a3d517b4350 Reviewed-on: https://go-review.googlesource.com/c/tools/+/662535 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI Reviewed-by: Jonathan Amsterdam Auto-Submit: Alan Donovan --- gopls/doc/analyzers.md | 7 ++++++- gopls/internal/analysis/modernize/doc.go | 7 ++++++- gopls/internal/doc/api.json | 4 ++-- 3 files changed, 14 insertions(+), 4 deletions(-) diff --git a/gopls/doc/analyzers.md b/gopls/doc/analyzers.md index 82b0e8753f9..0d9fcb2313b 100644 --- a/gopls/doc/analyzers.md +++ b/gopls/doc/analyzers.md @@ -514,6 +514,11 @@ consisting of all others. This can be achieved using the -category flag: Categories of modernize diagnostic: + - forvar: remove x := x variable declarations made unnecessary by the new semantics of loops in go1.22. + + - slicescontains: replace 'for i, elem := range s { if elem == needle { ...; break }' + by a call to slices.Contains, added in go1.21. + - minmax: replace an if/else conditional assignment by a call to the built-in min or max functions added in go1.21. @@ -547,7 +552,7 @@ Categories of modernize diagnostic: - rangeint: replace a 3-clause "for i := 0; i < n; i++" loop by "for i := range n", added in go1.22. - - stringseq: replace Split in "for range strings.Split(...)" by go1.24's + - stringsseq: replace Split in "for range strings.Split(...)" by go1.24's more efficient SplitSeq, or Fields with FieldSeq. - stringscutprefix: replace some uses of HasPrefix followed by TrimPrefix with CutPrefix, diff --git a/gopls/internal/analysis/modernize/doc.go b/gopls/internal/analysis/modernize/doc.go index 7bcde40f900..aa052540832 100644 --- a/gopls/internal/analysis/modernize/doc.go +++ b/gopls/internal/analysis/modernize/doc.go @@ -47,6 +47,11 @@ // // Categories of modernize diagnostic: // +// - forvar: remove x := x variable declarations made unnecessary by the new semantics of loops in go1.22. +// +// - slicescontains: replace 'for i, elem := range s { if elem == needle { ...; break }' +// by a call to slices.Contains, added in go1.21. +// // - minmax: replace an if/else conditional assignment by a call to // the built-in min or max functions added in go1.21. // @@ -80,7 +85,7 @@ // - rangeint: replace a 3-clause "for i := 0; i < n; i++" loop by // "for i := range n", added in go1.22. // -// - stringseq: replace Split in "for range strings.Split(...)" by go1.24's +// - stringsseq: replace Split in "for range strings.Split(...)" by go1.24's // more efficient SplitSeq, or Fields with FieldSeq. // // - stringscutprefix: replace some uses of HasPrefix followed by TrimPrefix with CutPrefix, diff --git a/gopls/internal/doc/api.json b/gopls/internal/doc/api.json index 9dc7aef266d..f624af8632c 100644 --- a/gopls/internal/doc/api.json +++ b/gopls/internal/doc/api.json @@ -562,7 +562,7 @@ }, { "Name": "\"modernize\"", - "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go and its standard\nlibrary.\n\nEach diagnostic provides a fix. Our intent is that these fixes may\nbe safely applied en masse without changing the behavior of your\nprogram. In some cases the suggested fixes are imperfect and may\nlead to (for example) unused imports or unused local variables,\ncausing build breakage. However, these problems are generally\ntrivial to fix. We regard any modernizer whose fix changes program\nbehavior to have a serious bug and will endeavor to fix it.\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./...\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.\n\nChanges produced by this tool should be reviewed as usual before\nbeing merged. In some cases, a loop may be replaced by a simple\nfunction call, causing comments within the loop to be discarded.\nHuman judgment may be required to avoid losing comments of value.\n\nEach diagnostic reported by modernize has a specific category. (The\ncategories are listed below.) Diagnostics in some categories, such\nas \"efaceany\" (which replaces \"interface{}\" with \"any\" where it is\nsafe to do so) are particularly numerous. It may ease the burden of\ncode review to apply fixes in two passes, the first change\nconsisting only of fixes of category \"efaceany\", the second\nconsisting of all others. This can be achieved using the -category flag:\n\n\t$ modernize -category=efaceany -fix -test ./...\n\t$ modernize -category=-efaceany -fix -test ./...\n\nCategories of modernize diagnostic:\n\n - minmax: replace an if/else conditional assignment by a call to\n the built-in min or max functions added in go1.21.\n\n - sortslice: replace sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21.\n\n - efaceany: replace interface{} by the 'any' type added in go1.18.\n\n - slicesclone: replace append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21.\n\n - mapsloop: replace a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions from\n the maps package, added in go1.21.\n\n - fmtappendf: replace []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19.\n\n - testingcontext: replace uses of context.WithCancel in tests\n with t.Context, added in go1.24.\n\n - omitzero: replace omitempty by omitzero on structs, added in go1.24.\n\n - bloop: replace \"for i := range b.N\" or \"for range b.N\" in a\n benchmark with \"for b.Loop()\", and remove any preceding calls\n to b.StopTimer, b.StartTimer, and b.ResetTimer.\n\n - slicesdelete: replace append(s[:i], s[i+1]...) by\n slices.Delete(s, i, i+1), added in go1.21.\n\n - rangeint: replace a 3-clause \"for i := 0; i \u003c n; i++\" loop by\n \"for i := range n\", added in go1.22.\n\n - stringseq: replace Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq.\n\n - stringscutprefix: replace some uses of HasPrefix followed by TrimPrefix with CutPrefix,\n added to the strings package in go1.20.\n\n - waitgroup: replace old complex usages of sync.WaitGroup by less complex WaitGroup.Go method in go1.25.", + "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go and its standard\nlibrary.\n\nEach diagnostic provides a fix. Our intent is that these fixes may\nbe safely applied en masse without changing the behavior of your\nprogram. In some cases the suggested fixes are imperfect and may\nlead to (for example) unused imports or unused local variables,\ncausing build breakage. However, these problems are generally\ntrivial to fix. We regard any modernizer whose fix changes program\nbehavior to have a serious bug and will endeavor to fix it.\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./...\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.\n\nChanges produced by this tool should be reviewed as usual before\nbeing merged. In some cases, a loop may be replaced by a simple\nfunction call, causing comments within the loop to be discarded.\nHuman judgment may be required to avoid losing comments of value.\n\nEach diagnostic reported by modernize has a specific category. (The\ncategories are listed below.) Diagnostics in some categories, such\nas \"efaceany\" (which replaces \"interface{}\" with \"any\" where it is\nsafe to do so) are particularly numerous. It may ease the burden of\ncode review to apply fixes in two passes, the first change\nconsisting only of fixes of category \"efaceany\", the second\nconsisting of all others. This can be achieved using the -category flag:\n\n\t$ modernize -category=efaceany -fix -test ./...\n\t$ modernize -category=-efaceany -fix -test ./...\n\nCategories of modernize diagnostic:\n\n - forvar: remove x := x variable declarations made unnecessary by the new semantics of loops in go1.22.\n\n - slicescontains: replace 'for i, elem := range s { if elem == needle { ...; break }'\n by a call to slices.Contains, added in go1.21.\n\n - minmax: replace an if/else conditional assignment by a call to\n the built-in min or max functions added in go1.21.\n\n - sortslice: replace sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21.\n\n - efaceany: replace interface{} by the 'any' type added in go1.18.\n\n - slicesclone: replace append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21.\n\n - mapsloop: replace a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions from\n the maps package, added in go1.21.\n\n - fmtappendf: replace []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19.\n\n - testingcontext: replace uses of context.WithCancel in tests\n with t.Context, added in go1.24.\n\n - omitzero: replace omitempty by omitzero on structs, added in go1.24.\n\n - bloop: replace \"for i := range b.N\" or \"for range b.N\" in a\n benchmark with \"for b.Loop()\", and remove any preceding calls\n to b.StopTimer, b.StartTimer, and b.ResetTimer.\n\n - slicesdelete: replace append(s[:i], s[i+1]...) by\n slices.Delete(s, i, i+1), added in go1.21.\n\n - rangeint: replace a 3-clause \"for i := 0; i \u003c n; i++\" loop by\n \"for i := range n\", added in go1.22.\n\n - stringsseq: replace Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq.\n\n - stringscutprefix: replace some uses of HasPrefix followed by TrimPrefix with CutPrefix,\n added to the strings package in go1.20.\n\n - waitgroup: replace old complex usages of sync.WaitGroup by less complex WaitGroup.Go method in go1.25.", "Default": "true", "Status": "" }, @@ -1338,7 +1338,7 @@ }, { "Name": "modernize", - "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go and its standard\nlibrary.\n\nEach diagnostic provides a fix. Our intent is that these fixes may\nbe safely applied en masse without changing the behavior of your\nprogram. In some cases the suggested fixes are imperfect and may\nlead to (for example) unused imports or unused local variables,\ncausing build breakage. However, these problems are generally\ntrivial to fix. We regard any modernizer whose fix changes program\nbehavior to have a serious bug and will endeavor to fix it.\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./...\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.\n\nChanges produced by this tool should be reviewed as usual before\nbeing merged. In some cases, a loop may be replaced by a simple\nfunction call, causing comments within the loop to be discarded.\nHuman judgment may be required to avoid losing comments of value.\n\nEach diagnostic reported by modernize has a specific category. (The\ncategories are listed below.) Diagnostics in some categories, such\nas \"efaceany\" (which replaces \"interface{}\" with \"any\" where it is\nsafe to do so) are particularly numerous. It may ease the burden of\ncode review to apply fixes in two passes, the first change\nconsisting only of fixes of category \"efaceany\", the second\nconsisting of all others. This can be achieved using the -category flag:\n\n\t$ modernize -category=efaceany -fix -test ./...\n\t$ modernize -category=-efaceany -fix -test ./...\n\nCategories of modernize diagnostic:\n\n - minmax: replace an if/else conditional assignment by a call to\n the built-in min or max functions added in go1.21.\n\n - sortslice: replace sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21.\n\n - efaceany: replace interface{} by the 'any' type added in go1.18.\n\n - slicesclone: replace append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21.\n\n - mapsloop: replace a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions from\n the maps package, added in go1.21.\n\n - fmtappendf: replace []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19.\n\n - testingcontext: replace uses of context.WithCancel in tests\n with t.Context, added in go1.24.\n\n - omitzero: replace omitempty by omitzero on structs, added in go1.24.\n\n - bloop: replace \"for i := range b.N\" or \"for range b.N\" in a\n benchmark with \"for b.Loop()\", and remove any preceding calls\n to b.StopTimer, b.StartTimer, and b.ResetTimer.\n\n - slicesdelete: replace append(s[:i], s[i+1]...) by\n slices.Delete(s, i, i+1), added in go1.21.\n\n - rangeint: replace a 3-clause \"for i := 0; i \u003c n; i++\" loop by\n \"for i := range n\", added in go1.22.\n\n - stringseq: replace Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq.\n\n - stringscutprefix: replace some uses of HasPrefix followed by TrimPrefix with CutPrefix,\n added to the strings package in go1.20.\n\n - waitgroup: replace old complex usages of sync.WaitGroup by less complex WaitGroup.Go method in go1.25.", + "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go and its standard\nlibrary.\n\nEach diagnostic provides a fix. Our intent is that these fixes may\nbe safely applied en masse without changing the behavior of your\nprogram. In some cases the suggested fixes are imperfect and may\nlead to (for example) unused imports or unused local variables,\ncausing build breakage. However, these problems are generally\ntrivial to fix. We regard any modernizer whose fix changes program\nbehavior to have a serious bug and will endeavor to fix it.\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./...\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.\n\nChanges produced by this tool should be reviewed as usual before\nbeing merged. In some cases, a loop may be replaced by a simple\nfunction call, causing comments within the loop to be discarded.\nHuman judgment may be required to avoid losing comments of value.\n\nEach diagnostic reported by modernize has a specific category. (The\ncategories are listed below.) Diagnostics in some categories, such\nas \"efaceany\" (which replaces \"interface{}\" with \"any\" where it is\nsafe to do so) are particularly numerous. It may ease the burden of\ncode review to apply fixes in two passes, the first change\nconsisting only of fixes of category \"efaceany\", the second\nconsisting of all others. This can be achieved using the -category flag:\n\n\t$ modernize -category=efaceany -fix -test ./...\n\t$ modernize -category=-efaceany -fix -test ./...\n\nCategories of modernize diagnostic:\n\n - forvar: remove x := x variable declarations made unnecessary by the new semantics of loops in go1.22.\n\n - slicescontains: replace 'for i, elem := range s { if elem == needle { ...; break }'\n by a call to slices.Contains, added in go1.21.\n\n - minmax: replace an if/else conditional assignment by a call to\n the built-in min or max functions added in go1.21.\n\n - sortslice: replace sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21.\n\n - efaceany: replace interface{} by the 'any' type added in go1.18.\n\n - slicesclone: replace append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21.\n\n - mapsloop: replace a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions from\n the maps package, added in go1.21.\n\n - fmtappendf: replace []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19.\n\n - testingcontext: replace uses of context.WithCancel in tests\n with t.Context, added in go1.24.\n\n - omitzero: replace omitempty by omitzero on structs, added in go1.24.\n\n - bloop: replace \"for i := range b.N\" or \"for range b.N\" in a\n benchmark with \"for b.Loop()\", and remove any preceding calls\n to b.StopTimer, b.StartTimer, and b.ResetTimer.\n\n - slicesdelete: replace append(s[:i], s[i+1]...) by\n slices.Delete(s, i, i+1), added in go1.21.\n\n - rangeint: replace a 3-clause \"for i := 0; i \u003c n; i++\" loop by\n \"for i := range n\", added in go1.22.\n\n - stringsseq: replace Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq.\n\n - stringscutprefix: replace some uses of HasPrefix followed by TrimPrefix with CutPrefix,\n added to the strings package in go1.20.\n\n - waitgroup: replace old complex usages of sync.WaitGroup by less complex WaitGroup.Go method in go1.25.", "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/modernize", "Default": true }, From 83a805742f78e4fdb291567ac94a7b88c57e591f Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Thu, 3 Apr 2025 23:41:08 -0600 Subject: [PATCH 170/270] x/tools: regenerate code after go upgrading This CL runs 'go generate ./...' under x/tools to update outdated code caused by gopls minimal version upgrading to go1.24.2 in CL662036. This CL also adds new environments to disable the cgo and fixs the platform and os to ensure the graph is independent of the calling environment. Change-Id: Iab75df0a5625a273c8c971e19b9ac25e3806933b Reviewed-on: https://go-review.googlesource.com/c/tools/+/662497 Reviewed-by: Carlos Amedee Reviewed-by: Alan Donovan Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI --- internal/stdlib/deps.go | 528 +++++++++++------------ internal/stdlib/generate.go | 2 + internal/stdlib/manifest.go | 7 - internal/stdlib/testdata/nethttp.deps | 10 +- internal/stdlib/testdata/nethttp.imports | 1 - internal/typeparams/termlist.go | 12 +- internal/typeparams/typeterm.go | 3 + 7 files changed, 282 insertions(+), 281 deletions(-) diff --git a/internal/stdlib/deps.go b/internal/stdlib/deps.go index 7cca431cd65..c50bf406b7f 100644 --- a/internal/stdlib/deps.go +++ b/internal/stdlib/deps.go @@ -12,348 +12,348 @@ type pkginfo struct { } var deps = [...]pkginfo{ - {"archive/tar", "\x03k\x03E5\x01\v\x01#\x01\x01\x02\x05\t\x02\x01\x02\x02\v"}, - {"archive/zip", "\x02\x04a\a\x16\x0205\x01+\x05\x01\x10\x03\x02\r\x04"}, - {"bufio", "\x03k}E\x13"}, - {"bytes", "n+R\x03\fG\x02\x02"}, + {"archive/tar", "\x03j\x03E6\x01\v\x01\"\x01\x01\x02\x05\n\x02\x01\x02\x02\v"}, + {"archive/zip", "\x02\x04`\a\x16\x0206\x01*\x05\x01\x11\x03\x02\r\x04"}, + {"bufio", "\x03j~E\x13"}, + {"bytes", "m+S\x03\fG\x02\x02"}, {"cmp", ""}, {"compress/bzip2", "\x02\x02\xe7\x01B"}, - {"compress/flate", "\x02l\x03z\r\x024\x01\x03"}, - {"compress/gzip", "\x02\x04a\a\x03\x15eT"}, - {"compress/lzw", "\x02l\x03z"}, - {"compress/zlib", "\x02\x04a\a\x03\x13\x01f"}, + {"compress/flate", "\x02k\x03{\r\x024\x01\x03"}, + {"compress/gzip", "\x02\x04`\a\x03\x15fT"}, + {"compress/lzw", "\x02k\x03{"}, + {"compress/zlib", "\x02\x04`\a\x03\x13\x01g"}, {"container/heap", "\xae\x02"}, {"container/list", ""}, {"container/ring", ""}, - {"context", "n\\h\x01\f"}, - {"crypto", "\x84\x01gD"}, + {"context", "m\\i\x01\f"}, + {"crypto", "\x83\x01hD"}, {"crypto/aes", "\x10\n\a\x8e\x02"}, - {"crypto/cipher", "\x03\x1e\x01\x01\x1d\x11\x1d,Q"}, - {"crypto/des", "\x10\x13\x1d.,\x95\x01\x03"}, - {"crypto/dsa", "@\x04*}\x0e"}, - {"crypto/ecdh", "\x03\v\f\x0e\x04\x14\x04\r\x1d}"}, - {"crypto/ecdsa", "\x0e\x05\x03\x04\x01\x0e\x16\x01\x04\f\x01\x1d}\x0e\x04K\x01"}, - {"crypto/ed25519", "\x0e\x1c\x16\n\a\x1d}D"}, - {"crypto/elliptic", "0>}\x0e9"}, - {"crypto/fips140", " \x05\x91\x01"}, - {"crypto/hkdf", "-\x12\x01.\x16"}, - {"crypto/hmac", "\x1a\x14\x11\x01\x113"}, - {"crypto/internal/boring", "\x0e\x02\rg"}, + {"crypto/cipher", "\x03\x1e\x01\x01\x1d\x11\x1c,R"}, + {"crypto/des", "\x10\x13\x1d-,\x96\x01\x03"}, + {"crypto/dsa", "@\x04)~\x0e"}, + {"crypto/ecdh", "\x03\v\f\x0e\x04\x14\x04\r\x1c~"}, + {"crypto/ecdsa", "\x0e\x05\x03\x04\x01\x0e\x16\x01\x04\f\x01\x1c~\x0e\x04K\x01"}, + {"crypto/ed25519", "\x0e\x1c\x16\n\a\x1c~D"}, + {"crypto/elliptic", "0=~\x0e9"}, + {"crypto/fips140", " \x05\x90\x01"}, + {"crypto/hkdf", "-\x12\x01-\x16"}, + {"crypto/hmac", "\x1a\x14\x11\x01\x112"}, + {"crypto/internal/boring", "\x0e\x02\rf"}, {"crypto/internal/boring/bbig", "\x1a\xdf\x01L"}, {"crypto/internal/boring/bcache", "\xb3\x02\x12"}, {"crypto/internal/boring/sig", ""}, - {"crypto/internal/cryptotest", "\x03\r\n)\x0e\x1a\x06\x13\x12#\a\t\x11\x11\x11\x1b\x01\f\f\x05\n"}, + {"crypto/internal/cryptotest", "\x03\r\n)\x0e\x19\x06\x13\x12#\a\t\x11\x12\x11\x1a\r\r\x05\n"}, {"crypto/internal/entropy", "E"}, - {"crypto/internal/fips140", ">0}9\f\x15"}, - {"crypto/internal/fips140/aes", "\x03\x1d\x03\x02\x13\x04\x01\x01\x05+\x8c\x015"}, - {"crypto/internal/fips140/aes/gcm", " \x01\x02\x02\x02\x11\x04\x01\x06+\x8a\x01"}, + {"crypto/internal/fips140", ">/~8\r\x15"}, + {"crypto/internal/fips140/aes", "\x03\x1d\x03\x02\x13\x04\x01\x01\x05*\x8d\x015"}, + {"crypto/internal/fips140/aes/gcm", " \x01\x02\x02\x02\x11\x04\x01\x06*\x8b\x01"}, {"crypto/internal/fips140/alias", "\xc5\x02"}, - {"crypto/internal/fips140/bigmod", "%\x17\x01\x06+\x8c\x01"}, + {"crypto/internal/fips140/bigmod", "%\x17\x01\x06*\x8d\x01"}, {"crypto/internal/fips140/check", " \x0e\x06\b\x02\xad\x01Z"}, - {"crypto/internal/fips140/check/checktest", "%\xff\x01!"}, - {"crypto/internal/fips140/drbg", "\x03\x1c\x01\x01\x04\x13\x04\b\x01)}\x0f8"}, - {"crypto/internal/fips140/ecdh", "\x03\x1d\x05\x02\t\f2}\x0f8"}, - {"crypto/internal/fips140/ecdsa", "\x03\x1d\x04\x01\x02\a\x02\x068}G"}, - {"crypto/internal/fips140/ed25519", "\x03\x1d\x05\x02\x04\v8\xc1\x01\x03"}, - {"crypto/internal/fips140/edwards25519", "%\a\f\x042\x8c\x018"}, - {"crypto/internal/fips140/edwards25519/field", "%\x13\x042\x8c\x01"}, - {"crypto/internal/fips140/hkdf", "\x03\x1d\x05\t\x06:"}, - {"crypto/internal/fips140/hmac", "\x03\x1d\x14\x01\x018"}, - {"crypto/internal/fips140/mlkem", "\x03\x1d\x05\x02\x0e\x03\x042"}, - {"crypto/internal/fips140/nistec", "%\f\a\x042\x8c\x01*\x0e\x13"}, - {"crypto/internal/fips140/nistec/fiat", "%\x136\x8c\x01"}, - {"crypto/internal/fips140/pbkdf2", "\x03\x1d\x05\t\x06:"}, - {"crypto/internal/fips140/rsa", "\x03\x1d\x04\x01\x02\r\x01\x01\x026}G"}, - {"crypto/internal/fips140/sha256", "\x03\x1d\x1c\x01\x06+\x8c\x01"}, - {"crypto/internal/fips140/sha3", "\x03\x1d\x18\x04\x011\x8c\x01K"}, - {"crypto/internal/fips140/sha512", "\x03\x1d\x1c\x01\x06+\x8c\x01"}, + {"crypto/internal/fips140/check/checktest", "%\xfe\x01\""}, + {"crypto/internal/fips140/drbg", "\x03\x1c\x01\x01\x04\x13\x04\b\x01(~\x0f8"}, + {"crypto/internal/fips140/ecdh", "\x03\x1d\x05\x02\t\f1~\x0f8"}, + {"crypto/internal/fips140/ecdsa", "\x03\x1d\x04\x01\x02\a\x02\x067~G"}, + {"crypto/internal/fips140/ed25519", "\x03\x1d\x05\x02\x04\v7\xc2\x01\x03"}, + {"crypto/internal/fips140/edwards25519", "%\a\f\x041\x8d\x018"}, + {"crypto/internal/fips140/edwards25519/field", "%\x13\x041\x8d\x01"}, + {"crypto/internal/fips140/hkdf", "\x03\x1d\x05\t\x069"}, + {"crypto/internal/fips140/hmac", "\x03\x1d\x14\x01\x017"}, + {"crypto/internal/fips140/mlkem", "\x03\x1d\x05\x02\x0e\x03\x041"}, + {"crypto/internal/fips140/nistec", "%\f\a\x041\x8d\x01)\x0f\x13"}, + {"crypto/internal/fips140/nistec/fiat", "%\x135\x8d\x01"}, + {"crypto/internal/fips140/pbkdf2", "\x03\x1d\x05\t\x069"}, + {"crypto/internal/fips140/rsa", "\x03\x1d\x04\x01\x02\r\x01\x01\x025~G"}, + {"crypto/internal/fips140/sha256", "\x03\x1d\x1c\x01\x06*\x8d\x01"}, + {"crypto/internal/fips140/sha3", "\x03\x1d\x18\x04\x010\x8d\x01K"}, + {"crypto/internal/fips140/sha512", "\x03\x1d\x1c\x01\x06*\x8d\x01"}, {"crypto/internal/fips140/ssh", " \x05"}, - {"crypto/internal/fips140/subtle", "#\x19\xbe\x01"}, - {"crypto/internal/fips140/tls12", "\x03\x1d\x05\t\x06\x028"}, - {"crypto/internal/fips140/tls13", "\x03\x1d\x05\b\a\b2"}, + {"crypto/internal/fips140/subtle", "#"}, + {"crypto/internal/fips140/tls12", "\x03\x1d\x05\t\x06\x027"}, + {"crypto/internal/fips140/tls13", "\x03\x1d\x05\b\a\b1"}, {"crypto/internal/fips140deps", ""}, - {"crypto/internal/fips140deps/byteorder", "\x9a\x01"}, - {"crypto/internal/fips140deps/cpu", "\xae\x01\a"}, - {"crypto/internal/fips140deps/godebug", "\xb6\x01"}, - {"crypto/internal/fips140hash", "5\x1a5\xc1\x01"}, - {"crypto/internal/fips140only", "'\r\x01\x01N25"}, + {"crypto/internal/fips140deps/byteorder", "\x99\x01"}, + {"crypto/internal/fips140deps/cpu", "\xad\x01\a"}, + {"crypto/internal/fips140deps/godebug", "\xb5\x01"}, + {"crypto/internal/fips140hash", "5\x1a4\xc2\x01"}, + {"crypto/internal/fips140only", "'\r\x01\x01M26"}, {"crypto/internal/fips140test", ""}, - {"crypto/internal/hpke", "\x0e\x01\x01\x03\x1a\x1d$,`M"}, + {"crypto/internal/hpke", "\x0e\x01\x01\x03\x1a\x1d#,aM"}, {"crypto/internal/impl", "\xb0\x02"}, {"crypto/internal/randutil", "\xeb\x01\x12"}, - {"crypto/internal/sysrand", "\xd7\x01@\x1b\x01\f\x06"}, - {"crypto/internal/sysrand/internal/seccomp", "n"}, - {"crypto/md5", "\x0e2.\x16\x16`"}, + {"crypto/internal/sysrand", "mi\"\x1e\r\x0f\x01\x01\v\x06"}, + {"crypto/internal/sysrand/internal/seccomp", "m"}, + {"crypto/md5", "\x0e2-\x16\x16a"}, {"crypto/mlkem", "/"}, - {"crypto/pbkdf2", "2\r\x01.\x16"}, - {"crypto/rand", "\x1a\x06\a\x19\x04\x01)}\x0eL"}, - {"crypto/rc4", "#\x1d.\xc1\x01"}, - {"crypto/rsa", "\x0e\f\x01\t\x0f\f\x01\x04\x06\a\x1d\x03\x1325\r\x01"}, - {"crypto/sha1", "\x0e\f&.\x16\x16\x14L"}, - {"crypto/sha256", "\x0e\f\x1aP"}, - {"crypto/sha3", "\x0e'O\xc1\x01"}, - {"crypto/sha512", "\x0e\f\x1cN"}, - {"crypto/subtle", "8\x98\x01T"}, - {"crypto/tls", "\x03\b\x02\x01\x01\x01\x01\x02\x01\x01\x01\x03\x01\a\x01\v\x02\n\x01\b\x05\x03\x01\x01\x01\x01\x02\x01\x02\x01\x18\x02\x03\x13\x16\x14\b5\x16\x16\r\t\x01\x01\x01\x02\x01\f\x06\x02\x01"}, + {"crypto/pbkdf2", "2\r\x01-\x16"}, + {"crypto/rand", "\x1a\x06\a\x19\x04\x01(~\x0eL"}, + {"crypto/rc4", "#\x1d-\xc2\x01"}, + {"crypto/rsa", "\x0e\f\x01\t\x0f\f\x01\x04\x06\a\x1c\x03\x1326\r\x01"}, + {"crypto/sha1", "\x0e\f&-\x16\x16\x14M"}, + {"crypto/sha256", "\x0e\f\x1aO"}, + {"crypto/sha3", "\x0e'N\xc2\x01"}, + {"crypto/sha512", "\x0e\f\x1cM"}, + {"crypto/subtle", "8\x96\x01U"}, + {"crypto/tls", "\x03\b\x02\x01\x01\x01\x01\x02\x01\x01\x01\x03\x01\a\x01\v\x02\n\x01\b\x05\x03\x01\x01\x01\x01\x02\x01\x02\x01\x17\x02\x03\x13\x16\x14\b6\x16\x15\r\n\x01\x01\x01\x02\x01\f\x06\x02\x01"}, {"crypto/tls/internal/fips140tls", " \x93\x02"}, - {"crypto/x509", "\x03\v\x01\x01\x01\x01\x01\x01\x01\x011\x03\x02\x01\x01\x02\x05\x01\x0e\x06\x02\x02\x03E5\x03\t\x01\x01\x01\a\x10\x05\t\x05\v\x01\x02\r\x02\x01\x01\x02\x03\x01"}, - {"crypto/x509/internal/macos", "\x03k'\x8f\x01\v\x10\x06"}, - {"crypto/x509/pkix", "d\x06\a\x88\x01F"}, - {"database/sql", "\x03\nK\x16\x03z\f\x06\"\x05\t\x02\x03\x01\f\x02\x02\x02"}, - {"database/sql/driver", "\ra\x03\xae\x01\x10\x10"}, - {"debug/buildinfo", "\x03X\x02\x01\x01\b\a\x03`\x18\x02\x01+\x10\x1e"}, - {"debug/dwarf", "\x03d\a\x03z1\x12\x01\x01"}, - {"debug/elf", "\x03\x06Q\r\a\x03`\x19\x01,\x18\x01\x15"}, - {"debug/gosym", "\x03d\n\xbd\x01\x01\x01\x02"}, - {"debug/macho", "\x03\x06Q\r\n`\x1a,\x18\x01"}, - {"debug/pe", "\x03\x06Q\r\a\x03`\x1a,\x18\x01\x15"}, - {"debug/plan9obj", "g\a\x03`\x1a,"}, - {"embed", "n+:\x18\x01S"}, + {"crypto/x509", "\x03\v\x01\x01\x01\x01\x01\x01\x01\x011\x03\x02\x01\x01\x02\x05\x0e\x06\x02\x02\x03E\x033\x01\x02\t\x01\x01\x01\a\x0f\x05\x01\x06\x02\x05\f\x01\x02\r\x02\x01\x01\x02\x03\x01"}, + {"crypto/x509/pkix", "c\x06\a\x89\x01F"}, + {"database/sql", "\x03\nJ\x16\x03{\f\x06!\x05\n\x02\x03\x01\f\x02\x02\x02"}, + {"database/sql/driver", "\r`\x03\xae\x01\x11\x10"}, + {"debug/buildinfo", "\x03W\x02\x01\x01\b\a\x03`\x19\x02\x01*\x0f "}, + {"debug/dwarf", "\x03c\a\x03{0\x13\x01\x01"}, + {"debug/elf", "\x03\x06P\r\a\x03`\x1a\x01+\x19\x01\x15"}, + {"debug/gosym", "\x03c\n\xbe\x01\x01\x01\x02"}, + {"debug/macho", "\x03\x06P\r\n`\x1b+\x19\x01"}, + {"debug/pe", "\x03\x06P\r\a\x03`\x1b+\x19\x01\x15"}, + {"debug/plan9obj", "f\a\x03`\x1b+"}, + {"embed", "m+:\x19\x01S"}, {"embed/internal/embedtest", ""}, {"encoding", ""}, {"encoding/ascii85", "\xeb\x01D"}, - {"encoding/asn1", "\x03k\x03\x87\x01\x01&\x0e\x02\x01\x0f\x03\x01"}, + {"encoding/asn1", "\x03j\x03\x88\x01\x01%\x0f\x02\x01\x0f\x03\x01"}, {"encoding/base32", "\xeb\x01B\x02"}, - {"encoding/base64", "\x9a\x01QB\x02"}, - {"encoding/binary", "n}\r'\x0e\x05"}, - {"encoding/csv", "\x02\x01k\x03zE\x11\x02"}, - {"encoding/gob", "\x02`\x05\a\x03`\x1a\f\x01\x02\x1d\b\x13\x01\x0e\x02"}, - {"encoding/hex", "n\x03zB\x03"}, - {"encoding/json", "\x03\x01^\x04\b\x03z\r'\x0e\x02\x01\x02\x0f\x01\x01\x02"}, - {"encoding/pem", "\x03c\b}B\x03"}, - {"encoding/xml", "\x02\x01_\f\x03z4\x05\v\x01\x02\x0f\x02"}, - {"errors", "\xca\x01{"}, - {"expvar", "kK9\t\n\x15\r\t\x02\x03\x01\x10"}, - {"flag", "b\f\x03z,\b\x05\t\x02\x01\x0f"}, - {"fmt", "nE8\r\x1f\b\x0e\x02\x03\x11"}, - {"go/ast", "\x03\x01m\x0f\x01j\x03)\b\x0e\x02\x01"}, + {"encoding/base64", "f\x85\x01B\x02"}, + {"encoding/binary", "m~\r&\x0f\x05"}, + {"encoding/csv", "\x02\x01j\x03{E\x11\x02"}, + {"encoding/gob", "\x02_\x05\a\x03`\x1b\f\x01\x02\x1c\b\x14\x01\x0e\x02"}, + {"encoding/hex", "m\x03{B\x03"}, + {"encoding/json", "\x03\x01]\x04\b\x03{\r&\x0f\x02\x01\x02\x0f\x01\x01\x02"}, + {"encoding/pem", "\x03b\b~B\x03"}, + {"encoding/xml", "\x02\x01^\f\x03{3\x05\f\x01\x02\x0f\x02"}, + {"errors", "\xc9\x01|"}, + {"expvar", "jK:\t\n\x14\r\n\x02\x03\x01\x10"}, + {"flag", "a\f\x03{+\b\x05\n\x02\x01\x0f"}, + {"fmt", "mE9\r\x1e\b\x0f\x02\x03\x11"}, + {"go/ast", "\x03\x01l\x0f\x01k\x03(\b\x0f\x02\x01"}, {"go/ast/internal/tests", ""}, - {"go/build", "\x02\x01k\x03\x01\x03\x02\a\x02\x01\x17\x1e\x04\x02\t\x14\x12\x01+\x01\x04\x01\a\t\x02\x01\x11\x02\x02"}, - {"go/build/constraint", "n\xc1\x01\x01\x11\x02"}, - {"go/constant", "q\x10w\x01\x015\x01\x02\x11"}, - {"go/doc", "\x04m\x01\x06\t=-1\x11\x02\x01\x11\x02"}, - {"go/doc/comment", "\x03n\xbc\x01\x01\x01\x01\x11\x02"}, - {"go/format", "\x03n\x01\f\x01\x02jE"}, - {"go/importer", "t\a\x01\x01\x04\x01i9"}, - {"go/internal/gccgoimporter", "\x02\x01X\x13\x03\x05\v\x01g\x02,\x01\x05\x12\x01\v\b"}, - {"go/internal/gcimporter", "\x02o\x10\x01/\x05\x0e',\x16\x03\x02"}, - {"go/internal/srcimporter", "q\x01\x02\n\x03\x01i,\x01\x05\x13\x02\x13"}, - {"go/parser", "\x03k\x03\x01\x03\v\x01j\x01+\x06\x13"}, - {"go/printer", "q\x01\x03\x03\tj\r\x1f\x16\x02\x01\x02\n\x05\x02"}, - {"go/scanner", "\x03n\x10j2\x11\x01\x12\x02"}, - {"go/token", "\x04m\xbc\x01\x02\x03\x01\x0e\x02"}, - {"go/types", "\x03\x01\x06d\x03\x01\x04\b\x03\x02\x15\x1e\x06+\x04\x03\n%\a\t\x01\x01\x01\x02\x01\x0e\x02\x02"}, - {"go/version", "\xbb\x01u"}, + {"go/build", "\x02\x01j\x03\x01\x03\x02\a\x02\x01\x17\x1e\x04\x02\t\x14\x13\x01*\x01\x04\x01\a\n\x02\x01\x11\x02\x02"}, + {"go/build/constraint", "m\xc2\x01\x01\x11\x02"}, + {"go/constant", "p\x10x\x01\x015\x01\x02\x11"}, + {"go/doc", "\x04l\x01\x06\t=.0\x12\x02\x01\x11\x02"}, + {"go/doc/comment", "\x03m\xbd\x01\x01\x01\x01\x11\x02"}, + {"go/format", "\x03m\x01\f\x01\x02kE"}, + {"go/importer", "s\a\x01\x01\x04\x01j8"}, + {"go/internal/gccgoimporter", "\x02\x01W\x13\x03\x05\v\x01h\x02+\x01\x05\x13\x01\v\b"}, + {"go/internal/gcimporter", "\x02n\x10\x01/\x05\x0e(+\x17\x03\x02"}, + {"go/internal/srcimporter", "p\x01\x02\n\x03\x01j+\x01\x05\x14\x02\x13"}, + {"go/parser", "\x03j\x03\x01\x03\v\x01k\x01*\x06\x14"}, + {"go/printer", "p\x01\x03\x03\tk\r\x1e\x17\x02\x01\x02\n\x05\x02"}, + {"go/scanner", "\x03m\x10k1\x12\x01\x12\x02"}, + {"go/token", "\x04l\xbd\x01\x02\x03\x01\x0e\x02"}, + {"go/types", "\x03\x01\x06c\x03\x01\x04\b\x03\x02\x15\x1e\x06,\x04\x03\n$\a\n\x01\x01\x01\x02\x01\x0e\x02\x02"}, + {"go/version", "\xba\x01v"}, {"hash", "\xeb\x01"}, - {"hash/adler32", "n\x16\x16"}, - {"hash/crc32", "n\x16\x16\x14\x84\x01\x01"}, - {"hash/crc64", "n\x16\x16\x98\x01"}, - {"hash/fnv", "n\x16\x16`"}, - {"hash/maphash", "\x95\x01\x05\x1b\x03@M"}, + {"hash/adler32", "m\x16\x16"}, + {"hash/crc32", "m\x16\x16\x14\x85\x01\x01\x12"}, + {"hash/crc64", "m\x16\x16\x99\x01"}, + {"hash/fnv", "m\x16\x16a"}, + {"hash/maphash", "\x94\x01\x05\x1b\x03AM"}, {"html", "\xb0\x02\x02\x11"}, - {"html/template", "\x03h\x06\x19,5\x01\v \x05\x01\x02\x03\r\x01\x02\v\x01\x03\x02"}, - {"image", "\x02l\x1f^\x0f5\x03\x01"}, + {"html/template", "\x03g\x06\x19,6\x01\v\x1f\x05\x01\x02\x03\x0e\x01\x02\v\x01\x03\x02"}, + {"image", "\x02k\x1f_\x0f5\x03\x01"}, {"image/color", ""}, - {"image/color/palette", "\x8d\x01"}, - {"image/draw", "\x8c\x01\x01\x04"}, - {"image/gif", "\x02\x01\x05f\x03\x1b\x01\x01\x01\vQ"}, - {"image/internal/imageutil", "\x8c\x01"}, - {"image/jpeg", "\x02l\x1e\x01\x04Z"}, - {"image/png", "\x02\a^\n\x13\x02\x06\x01^D"}, - {"index/suffixarray", "\x03d\a}\r*\v\x01"}, - {"internal/abi", "\xb5\x01\x90\x01"}, + {"image/color/palette", "\x8c\x01"}, + {"image/draw", "\x8b\x01\x01\x04"}, + {"image/gif", "\x02\x01\x05e\x03\x1b\x01\x01\x01\vR"}, + {"image/internal/imageutil", "\x8b\x01"}, + {"image/jpeg", "\x02k\x1e\x01\x04["}, + {"image/png", "\x02\a]\n\x13\x02\x06\x01_D"}, + {"index/suffixarray", "\x03c\a~\r)\f\x01"}, + {"internal/abi", "\xb4\x01\x91\x01"}, {"internal/asan", "\xc5\x02"}, - {"internal/bisect", "\xa4\x02\x0e\x01"}, - {"internal/buildcfg", "qG_\x06\x02\x05\v\x01"}, - {"internal/bytealg", "\xae\x01\x97\x01"}, + {"internal/bisect", "\xa3\x02\x0f\x01"}, + {"internal/buildcfg", "pG_\x06\x02\x05\f\x01"}, + {"internal/bytealg", "\xad\x01\x98\x01"}, {"internal/byteorder", ""}, {"internal/cfg", ""}, - {"internal/chacha8rand", "\x9a\x01\x1b\x90\x01"}, + {"internal/chacha8rand", "\x99\x01\x1b\x91\x01"}, {"internal/copyright", ""}, {"internal/coverage", ""}, {"internal/coverage/calloc", ""}, - {"internal/coverage/cfile", "k\x06\x17\x16\x01\x02\x01\x01\x01\x01\x01\x01\x01$\x01\x1e,\x06\a\v\x01\x03\f\x06"}, - {"internal/coverage/cformat", "\x04m-\x04I\f6\x01\x02\f"}, - {"internal/coverage/cmerge", "q-Z"}, - {"internal/coverage/decodecounter", "g\n-\v\x02@,\x18\x16"}, - {"internal/coverage/decodemeta", "\x02e\n\x17\x16\v\x02@,"}, - {"internal/coverage/encodecounter", "\x02e\n-\f\x01\x02>\f \x16"}, - {"internal/coverage/encodemeta", "\x02\x01d\n\x13\x04\x16\r\x02>,."}, - {"internal/coverage/pods", "\x04m-y\x06\x05\v\x02\x01"}, + {"internal/coverage/cfile", "j\x06\x17\x16\x01\x02\x01\x01\x01\x01\x01\x01\x01#\x01 +\x06\a\f\x01\x03\f\x06"}, + {"internal/coverage/cformat", "\x04l-\x04J\f6\x01\x02\f"}, + {"internal/coverage/cmerge", "p-["}, + {"internal/coverage/decodecounter", "f\n-\v\x02A+\x19\x16"}, + {"internal/coverage/decodemeta", "\x02d\n\x17\x16\v\x02A+"}, + {"internal/coverage/encodecounter", "\x02d\n-\f\x01\x02?\f\x1f\x17"}, + {"internal/coverage/encodemeta", "\x02\x01c\n\x13\x04\x16\r\x02?+/"}, + {"internal/coverage/pods", "\x04l-y\x06\x05\f\x02\x01"}, {"internal/coverage/rtcov", "\xc5\x02"}, - {"internal/coverage/slicereader", "g\nzZ"}, - {"internal/coverage/slicewriter", "qz"}, - {"internal/coverage/stringtab", "q8\x04>"}, + {"internal/coverage/slicereader", "f\n{Z"}, + {"internal/coverage/slicewriter", "p{"}, + {"internal/coverage/stringtab", "p8\x04?"}, {"internal/coverage/test", ""}, {"internal/coverage/uleb128", ""}, {"internal/cpu", "\xc5\x02"}, - {"internal/dag", "\x04m\xbc\x01\x03"}, - {"internal/diff", "\x03n\xbd\x01\x02"}, - {"internal/exportdata", "\x02\x01k\x03\x03]\x1a,\x01\x05\x12\x01\x02"}, - {"internal/filepathlite", "n+:\x19A"}, - {"internal/fmtsort", "\x04\x9b\x02\x0e"}, - {"internal/fuzz", "\x03\nA\x19\x04\x03\x03\x01\f\x0355\r\x02\x1d\x01\x05\x02\x05\v\x01\x02\x01\x01\v\x04\x02"}, + {"internal/dag", "\x04l\xbd\x01\x03"}, + {"internal/diff", "\x03m\xbe\x01\x02"}, + {"internal/exportdata", "\x02\x01j\x03\x03]\x1b+\x01\x05\x13\x01\x02"}, + {"internal/filepathlite", "m+:\x1aA"}, + {"internal/fmtsort", "\x04\x9a\x02\x0f"}, + {"internal/fuzz", "\x03\nA\x18\x04\x03\x03\x01\f\x0356\r\x02\x1c\x01\x05\x02\x05\f\x01\x02\x01\x01\v\x04\x02"}, {"internal/goarch", ""}, - {"internal/godebug", "\x97\x01 {\x01\x12"}, + {"internal/godebug", "\x96\x01 |\x01\x12"}, {"internal/godebugs", ""}, {"internal/goexperiment", ""}, {"internal/goos", ""}, - {"internal/goroot", "\x97\x02\x01\x05\x13\x02"}, + {"internal/goroot", "\x96\x02\x01\x05\x14\x02"}, {"internal/gover", "\x04"}, {"internal/goversion", ""}, {"internal/itoa", ""}, - {"internal/lazyregexp", "\x97\x02\v\x0e\x02"}, - {"internal/lazytemplate", "\xeb\x01,\x19\x02\v"}, + {"internal/lazyregexp", "\x96\x02\v\x0f\x02"}, + {"internal/lazytemplate", "\xeb\x01+\x1a\x02\v"}, {"internal/msan", "\xc5\x02"}, {"internal/nettrace", ""}, - {"internal/obscuretestdata", "f\x85\x01,"}, - {"internal/oserror", "n"}, - {"internal/pkgbits", "\x03K\x19\a\x03\x05\vj\x0e\x1e\r\v\x01"}, + {"internal/obscuretestdata", "e\x86\x01+"}, + {"internal/oserror", "m"}, + {"internal/pkgbits", "\x03K\x18\a\x03\x05\vk\x0e\x1d\r\f\x01"}, {"internal/platform", ""}, - {"internal/poll", "nO\x1a\x149\x0e\x01\x01\v\x06"}, - {"internal/profile", "\x03\x04g\x03z7\f\x01\x01\x0f"}, + {"internal/poll", "mO\x1a\x158\x0f\x01\x01\v\x06"}, + {"internal/profile", "\x03\x04f\x03{6\r\x01\x01\x0f"}, {"internal/profilerecord", ""}, - {"internal/race", "\x95\x01\xb0\x01"}, - {"internal/reflectlite", "\x95\x01 3+\x1a\x02"}, {"internal/syslist", ""}, - {"internal/testenv", "\x03\na\x02\x01*\x1a\x10'+\x01\x05\a\v\x01\x02\x02\x01\n"}, + {"internal/testenv", "\x03\n`\x02\x01*\x1a\x10(*\x01\x05\a\f\x01\x02\x02\x01\n"}, {"internal/testlog", "\xb2\x02\x01\x12"}, - {"internal/testpty", "n\x03f@\x1d"}, - {"internal/trace", "\x02\x01\x01\x06]\a\x03n\x03\x03\x06\x03\n5\x01\x02\x0f\x06"}, - {"internal/trace/internal/testgen", "\x03d\nl\x03\x02\x03\x011\v\x0e"}, - {"internal/trace/internal/tracev1", "\x03\x01c\a\x03t\x06\r5\x01"}, - {"internal/trace/raw", "\x02e\nq\x03\x06D\x01\x11"}, - {"internal/trace/testtrace", "\x02\x01k\x03l\x03\x06\x057\v\x02\x01"}, - {"internal/trace/tracev2", ""}, - {"internal/trace/traceviewer", "\x02^\v\x06\x1a<\x16\a\a\x04\t\n\x15\x01\x05\a\v\x01\x02\r"}, + {"internal/testpty", "m\x03\xa6\x01"}, + {"internal/trace", "\x02\x01\x01\x06\\\a\x03m\x01\x01\x06\x06\x03\n5\x01\x02\x0f"}, + {"internal/trace/event", ""}, + {"internal/trace/event/go122", "pm"}, + {"internal/trace/internal/oldtrace", "\x03\x01b\a\x03m\b\x06\r5\x01"}, + {"internal/trace/internal/testgen/go122", "\x03c\nl\x01\x01\x03\x04\x010\v\x0f"}, + {"internal/trace/raw", "\x02d\nm\b\x06D\x01\x11"}, + {"internal/trace/testtrace", "\x02\x01j\x03l\x05\x05\x056\f\x02\x01"}, + {"internal/trace/traceviewer", "\x02]\v\x06\x1a<\x16\b\a\x04\t\n\x14\x01\x05\a\f\x01\x02\r"}, {"internal/trace/traceviewer/format", ""}, - {"internal/trace/version", "qq\t"}, - {"internal/txtar", "\x03n\xa6\x01\x19"}, + {"internal/trace/version", "pm\x01\r"}, + {"internal/txtar", "\x03m\xa6\x01\x1a"}, {"internal/types/errors", "\xaf\x02"}, {"internal/unsafeheader", "\xc5\x02"}, - {"internal/xcoff", "Z\r\a\x03`\x1a,\x18\x01"}, - {"internal/zstd", "g\a\x03z\x0f"}, - {"io", "n\xc4\x01"}, - {"io/fs", "n+*(1\x11\x12\x04"}, - {"io/ioutil", "\xeb\x01\x01+\x16\x03"}, - {"iter", "\xc9\x01[!"}, - {"log", "qz\x05'\r\x0e\x01\f"}, + {"internal/xcoff", "Y\r\a\x03`\x1b+\x19\x01"}, + {"internal/zstd", "f\a\x03{\x0f"}, + {"io", "m\xc5\x01"}, + {"io/fs", "m+*)0\x12\x12\x04"}, + {"io/ioutil", "\xeb\x01\x01*\x17\x03"}, + {"iter", "\xc8\x01[\""}, + {"log", "p{\x05&\r\x0f\x01\f"}, {"log/internal", ""}, - {"log/slog", "\x03\nU\t\x03\x03z\x04\x01\x02\x02\x04'\x05\t\x02\x01\x02\x01\f\x02\x02\x02"}, + {"log/slog", "\x03\nT\t\x03\x03{\x04\x01\x02\x02\x04&\x05\n\x02\x01\x02\x01\f\x02\x02\x02"}, {"log/slog/internal", ""}, - {"log/slog/internal/benchmarks", "\ra\x03z\x06\x03;\x10"}, + {"log/slog/internal/benchmarks", "\r`\x03{\x06\x03;\x10"}, {"log/slog/internal/buffer", "\xb2\x02"}, {"log/slog/internal/slogtest", "\xf1\x01"}, - {"log/syslog", "n\x03~\x12\x16\x19\x02\r"}, + {"log/syslog", "m\x03\x7f\x12\x15\x1a\x02\r"}, {"maps", "\xee\x01W"}, - {"math", "\xfa\x01K"}, - {"math/big", "\x03k\x03)Q\r\x02\x021\x02\x01\x02\x13"}, + {"math", "\xad\x01MK"}, + {"math/big", "\x03j\x03)\x14>\r\x02\x023\x01\x02\x13"}, {"math/bits", "\xc5\x02"}, {"math/cmplx", "\xf8\x01\x02"}, - {"math/rand", "\xb6\x01B:\x01\x12"}, - {"math/rand/v2", "n,\x02\\\x02K"}, - {"mime", "\x02\x01c\b\x03z\f \x16\x03\x02\x0f\x02"}, - {"mime/multipart", "\x02\x01G$\x03E5\f\x01\x06\x02\x15\x02\x06\x10\x02\x01\x15"}, - {"mime/quotedprintable", "\x02\x01nz"}, - {"net", "\x04\ta+\x1d\a\x04\x05\x05\a\x01\x04\x14\x01%\x06\r\t\x05\x01\x01\v\x06\a"}, - {"net/http", "\x02\x01\x04\x04\x02=\b\x14\x01\a\x03E5\x01\x03\b\x01\x02\x02\x02\x01\x02\x06\x02\x01\x01\n\x01\x01\x05\x01\x02\x05\t\x01\x01\x01\x02\x01\f\x02\x02\x02\b\x01\x01\x01"}, - {"net/http/cgi", "\x02P\x1c\x03z\x04\b\n\x01\x13\x01\x01\x01\x04\x01\x05\x02\t\x02\x01\x0f\x0e"}, - {"net/http/cookiejar", "\x04j\x03\x90\x01\x01\b\f\x17\x03\x02\r\x04"}, - {"net/http/fcgi", "\x02\x01\nZ\a\x03z\x16\x01\x01\x14\x19\x02\r"}, - {"net/http/httptest", "\x02\x01\nE\x02\x1c\x01z\x04\x12\x01\n\t\x02\x18\x01\x02\r\x0e"}, - {"net/http/httptrace", "\rEo@\x14\n "}, - {"net/http/httputil", "\x02\x01\na\x03z\x04\x0f\x03\x01\x05\x02\x01\v\x01\x1a\x02\r\x0e"}, - {"net/http/internal", "\x02\x01k\x03z"}, + {"math/rand", "\xb5\x01C:\x01\x12"}, + {"math/rand/v2", "m,\x02]\x02K"}, + {"mime", "\x02\x01b\b\x03{\f\x1f\x17\x03\x02\x0f\x02"}, + {"mime/multipart", "\x02\x01G#\x03E6\f\x01\x06\x02\x14\x02\x06\x11\x02\x01\x15"}, + {"mime/quotedprintable", "\x02\x01m{"}, + {"net", "\x04\t`+\x1d\a\x04\x05\f\x01\x04\x15\x01$\x06\r\n\x05\x01\x01\v\x06\a"}, + {"net/http", "\x02\x01\x04\x04\x02=\b\x13\x01\a\x03E6\x01\x03\b\x01\x02\x02\x02\x01\x02\x06\x02\x01\n\x01\x01\x05\x01\x02\x05\n\x01\x01\x01\x02\x01\f\x02\x02\x02\b\x01\x01\x01"}, + {"net/http/cgi", "\x02P\x1b\x03{\x04\b\n\x01\x12\x01\x01\x01\x04\x01\x05\x02\n\x02\x01\x0f\x0e"}, + {"net/http/cookiejar", "\x04i\x03\x91\x01\x01\b\v\x18\x03\x02\r\x04"}, + {"net/http/fcgi", "\x02\x01\nY\a\x03{\x16\x01\x01\x13\x1a\x02\r"}, + {"net/http/httptest", "\x02\x01\nE\x02\x1b\x01{\x04\x12\x01\t\t\x02\x19\x01\x02\r\x0e"}, + {"net/http/httptrace", "\rEnA\x13\n!"}, + {"net/http/httputil", "\x02\x01\n`\x03{\x04\x0f\x03\x01\x05\x02\x01\n\x01\x1b\x02\r\x0e"}, + {"net/http/internal", "\x02\x01j\x03{"}, {"net/http/internal/ascii", "\xb0\x02\x11"}, - {"net/http/internal/httpcommon", "\ra\x03\x96\x01\x0e\x01\x18\x01\x01\x02\x1b\x02"}, {"net/http/internal/testcert", "\xb0\x02"}, - {"net/http/pprof", "\x02\x01\nd\x19,\x11$\x04\x13\x14\x01\r\x06\x02\x01\x02\x01\x0f"}, - {"net/internal/cgotest", "\xd7\x01n"}, - {"net/internal/socktest", "q\xc1\x01\x02"}, - {"net/mail", "\x02l\x03z\x04\x0f\x03\x14\x1b\x02\r\x04"}, - {"net/netip", "\x04j+\x01#;\x025\x15"}, - {"net/rpc", "\x02g\x05\x03\x10\n`\x04\x12\x01\x1d\x0e\x03\x02"}, - {"net/rpc/jsonrpc", "k\x03\x03z\x16\x11 "}, - {"net/smtp", "\x19.\v\x14\b\x03z\x16\x14\x1b"}, - {"net/textproto", "\x02\x01k\x03z\r\t.\x01\x02\x13"}, - {"net/url", "n\x03\x86\x01%\x11\x02\x01\x15"}, - {"os", "n+\x19\v\t\r\x03\x01\x04\x10\x018\t\x05\x01\x01\v\x06"}, - {"os/exec", "\x03\naH \x01\x14\x01+\x06\a\v\x01\x04\v"}, + {"net/http/pprof", "\x02\x01\nc\x19,\x11%\x04\x13\x13\x01\r\x06\x03\x01\x02\x01\x0f"}, + {"net/internal/cgotest", ""}, + {"net/internal/socktest", "p\xc2\x01\x02"}, + {"net/mail", "\x02k\x03{\x04\x0f\x03\x13\x1c\x02\r\x04"}, + {"net/netip", "\x04i+\x01#<\x025\x15"}, + {"net/rpc", "\x02f\x05\x03\x10\na\x04\x12\x01\x1c\x0f\x03\x02"}, + {"net/rpc/jsonrpc", "j\x03\x03{\x16\x10!"}, + {"net/smtp", "\x19.\v\x13\b\x03{\x16\x13\x1c"}, + {"net/textproto", "\x02\x01j\x03{\r\t.\x01\x02\x13"}, + {"net/url", "m\x03\x87\x01$\x12\x02\x01\x15"}, + {"os", "m+\x01\x18\x03\b\t\r\x03\x01\x04\x11\x017\n\x05\x01\x01\v\x06"}, + {"os/exec", "\x03\n`H \x01\x15\x01*\x06\a\f\x01\x04\v"}, {"os/exec/internal/fdtest", "\xb4\x02"}, - {"os/signal", "\r\x8a\x02\x16\x05\x02"}, - {"os/user", "qfM\v\x01\x02\x02\x11"}, - {"path", "n+\xaa\x01"}, - {"path/filepath", "n+\x19:+\r\t\x03\x04\x0f"}, - {"plugin", "n\xc4\x01\x13"}, - {"reflect", "n'\x04\x1c\b\f\x05\x02\x18\x06\n,\v\x03\x0f\x02\x02"}, + {"os/signal", "\r\x89\x02\x17\x05\x02"}, + {"os/user", "\x02\x01j\x03{+\r\f\x01\x02"}, + {"path", "m+\xab\x01"}, + {"path/filepath", "m+\x19;*\r\n\x03\x04\x0f"}, + {"plugin", "m"}, + {"reflect", "m'\x04\x1c\b\f\x04\x02\x1a\x06\n+\f\x03\x0f\x02\x02"}, {"reflect/internal/example1", ""}, {"reflect/internal/example2", ""}, - {"regexp", "\x03\xe8\x018\n\x02\x01\x02\x0f\x02"}, + {"regexp", "\x03\xe8\x017\v\x02\x01\x02\x0f\x02"}, {"regexp/syntax", "\xad\x02\x01\x01\x01\x11\x02"}, - {"runtime", "\x95\x01\x04\x01\x02\f\x06\a\x02\x01\x01\x0f\x04\x01\x01\x01\x01\x03\x0fc"}, - {"runtime/cgo", "\xd0\x01b\x01\x12"}, - {"runtime/coverage", "\xa0\x01K"}, - {"runtime/debug", "qUQ\r\t\x02\x01\x0f\x06"}, + {"runtime", "\x94\x01\x04\x01\x02\f\x06\a\x02\x01\x01\x0f\x03\x01\x01\x01\x01\x01\x03s"}, + {"runtime/coverage", "\x9f\x01L"}, + {"runtime/debug", "pUQ\r\n\x02\x01\x0f\x06"}, + {"runtime/internal/startlinetest", ""}, {"runtime/internal/wasitest", ""}, - {"runtime/metrics", "\xb7\x01A,!"}, - {"runtime/pprof", "\x02\x01\x01\x03\x06Z\a\x03$3#\r\x1f\r\t\x01\x01\x01\x02\x02\b\x03\x06"}, - {"runtime/race", ""}, - {"runtime/trace", "\rdz9\x0e\x01\x12"}, + {"runtime/metrics", "\xb6\x01B+\""}, + {"runtime/pprof", "\x02\x01\x01\x03\x06Y\a\x03$3$\r\x1e\r\n\x01\x01\x01\x02\x02\b\x03\x06"}, + {"runtime/race", "\xab\x02"}, + {"runtime/race/internal/amd64v1", ""}, + {"runtime/trace", "\rc{8\x0f\x01\x12"}, {"slices", "\x04\xea\x01\fK"}, - {"sort", "\xca\x0103"}, - {"strconv", "n+:%\x02I"}, - {"strings", "n'\x04:\x18\x03\f8\x0f\x02\x02"}, + {"sort", "\xc9\x0113"}, + {"strconv", "m+:&\x02I"}, + {"strings", "m'\x04:\x19\x03\f8\x0f\x02\x02"}, {"structs", ""}, - {"sync", "\xc9\x01\vP\x0f\x12"}, + {"sync", "\xc8\x01\vP\x10\x12"}, {"sync/atomic", "\xc5\x02"}, - {"syscall", "n'\x01\x03\x01\x1b\b\x03\x03\x06[\x0e\x01\x12"}, - {"testing", "\x03\na\x02\x01X\x0f\x13\r\x04\x1b\x06\x02\x05\x03\x05\x01\x02\x01\x02\x01\f\x02\x02\x02"}, - {"testing/fstest", "n\x03z\x01\v%\x11\x03\b\a"}, - {"testing/internal/testdeps", "\x02\v\xa7\x01'\x10,\x03\x05\x03\b\x06\x02\r"}, - {"testing/iotest", "\x03k\x03z\x04"}, - {"testing/quick", "p\x01\x87\x01\x04#\x11\x0f"}, - {"testing/slogtest", "\ra\x03\x80\x01.\x05\x11\n"}, - {"text/scanner", "\x03nz,*\x02"}, - {"text/tabwriter", "qzX"}, - {"text/template", "n\x03B8\x01\v\x1f\x01\x05\x01\x02\x05\f\x02\f\x03\x02"}, - {"text/template/parse", "\x03n\xb3\x01\v\x01\x11\x02"}, - {"time", "n+\x1d\x1d'*\x0e\x02\x11"}, - {"time/tzdata", "n\xc6\x01\x11"}, + {"syscall", "m(\x03\x01\x1b\b\x03\x03\x06\aT\x0f\x01\x12"}, + {"testing", "\x03\n`\x02\x01G\x11\x0f\x14\r\x04\x1a\x06\x02\x05\x02\a\x01\x02\x01\x02\x01\f\x02\x02\x02"}, + {"testing/fstest", "m\x03{\x01\v$\x12\x03\b\a"}, + {"testing/internal/testdeps", "\x02\v\xa6\x01'\x11+\x03\x05\x03\b\a\x02\r"}, + {"testing/iotest", "\x03j\x03{\x04"}, + {"testing/quick", "o\x01\x88\x01\x04\"\x12\x0f"}, + {"testing/slogtest", "\r`\x03\x81\x01-\x05\x12\n"}, + {"text/scanner", "\x03m{++\x02"}, + {"text/tabwriter", "p{X"}, + {"text/template", "m\x03B9\x01\v\x1e\x01\x05\x01\x02\x05\r\x02\f\x03\x02"}, + {"text/template/parse", "\x03m\xb3\x01\f\x01\x11\x02"}, + {"time", "m+\x1d\x1d()\x0f\x02\x11"}, + {"time/tzdata", "m\xc7\x01\x11"}, {"unicode", ""}, {"unicode/utf16", ""}, {"unicode/utf8", ""}, - {"unique", "\x95\x01>\x01P\x0e\x13\x12"}, + {"unique", "\x94\x01>\x01P\x0f\x13\x12"}, {"unsafe", ""}, - {"vendor/golang.org/x/crypto/chacha20", "\x10W\a\x8c\x01*&"}, - {"vendor/golang.org/x/crypto/chacha20poly1305", "\x10W\a\xd8\x01\x04\x01"}, - {"vendor/golang.org/x/crypto/cryptobyte", "d\n\x03\x88\x01& \n"}, + {"vendor/golang.org/x/crypto/chacha20", "\x10V\a\x8d\x01)'"}, + {"vendor/golang.org/x/crypto/chacha20poly1305", "\x10V\a\xd9\x01\x04\x01\a"}, + {"vendor/golang.org/x/crypto/cryptobyte", "c\n\x03\x89\x01%!\n"}, {"vendor/golang.org/x/crypto/cryptobyte/asn1", ""}, {"vendor/golang.org/x/crypto/internal/alias", "\xc5\x02"}, - {"vendor/golang.org/x/crypto/internal/poly1305", "Q\x16\x93\x01"}, - {"vendor/golang.org/x/net/dns/dnsmessage", "n"}, - {"vendor/golang.org/x/net/http/httpguts", "\x81\x02\x14\x1b\x13\r"}, - {"vendor/golang.org/x/net/http/httpproxy", "n\x03\x90\x01\x15\x01\x19\x13\r"}, - {"vendor/golang.org/x/net/http2/hpack", "\x03k\x03zG"}, - {"vendor/golang.org/x/net/idna", "q\x87\x018\x13\x10\x02\x01"}, - {"vendor/golang.org/x/net/nettest", "\x03d\a\x03z\x11\x05\x16\x01\f\v\x01\x02\x02\x01\n"}, - {"vendor/golang.org/x/sys/cpu", "\x97\x02\r\v\x01\x15"}, - {"vendor/golang.org/x/text/secure/bidirule", "n\xd5\x01\x11\x01"}, - {"vendor/golang.org/x/text/transform", "\x03k}X"}, - {"vendor/golang.org/x/text/unicode/bidi", "\x03\bf~?\x15"}, - {"vendor/golang.org/x/text/unicode/norm", "g\nzG\x11\x11"}, - {"weak", "\x95\x01\x8f\x01!"}, + {"vendor/golang.org/x/crypto/internal/poly1305", "Q\x15\x94\x01"}, + {"vendor/golang.org/x/net/dns/dnsmessage", "m"}, + {"vendor/golang.org/x/net/http/httpguts", "\x81\x02\x13\x1c\x13\r"}, + {"vendor/golang.org/x/net/http/httpproxy", "m\x03\x91\x01\x0f\x05\x01\x1a\x13\r"}, + {"vendor/golang.org/x/net/http2/hpack", "\x03j\x03{G"}, + {"vendor/golang.org/x/net/idna", "p\x88\x018\x13\x10\x02\x01"}, + {"vendor/golang.org/x/net/nettest", "\x03c\a\x03{\x11\x05\x15\x01\f\f\x01\x02\x02\x01\n"}, + {"vendor/golang.org/x/sys/cpu", "\x96\x02\r\f\x01\x15"}, + {"vendor/golang.org/x/text/secure/bidirule", "m\xd6\x01\x11\x01"}, + {"vendor/golang.org/x/text/transform", "\x03j~X"}, + {"vendor/golang.org/x/text/unicode/bidi", "\x03\be\x7f?\x15"}, + {"vendor/golang.org/x/text/unicode/norm", "f\n{G\x11\x11"}, + {"weak", "\x94\x01\x8f\x01\""}, } diff --git a/internal/stdlib/generate.go b/internal/stdlib/generate.go index 4c67d8bd797..cfef0a2438f 100644 --- a/internal/stdlib/generate.go +++ b/internal/stdlib/generate.go @@ -246,6 +246,7 @@ func deps() { cmd := exec.Command("go", "list", "-deps", "-json", "std") cmd.Stdout = stdout cmd.Stderr = os.Stderr + cmd.Env = append(os.Environ(), "CGO_ENABLED=0", "GOOS=linux", "GOARCH=amd64") if err := cmd.Run(); err != nil { log.Fatal(err) } @@ -336,6 +337,7 @@ var deps = [...]pkginfo{ cmd := exec.Command("go", "list", t.flag, "net/http") cmd.Stdout = stdout cmd.Stderr = os.Stderr + cmd.Env = append(os.Environ(), "CGO_ENABLED=0", "GOOS=linux", "GOARCH=amd64") if err := cmd.Run(); err != nil { log.Fatal(err) } diff --git a/internal/stdlib/manifest.go b/internal/stdlib/manifest.go index 00776a31b60..2b418796abb 100644 --- a/internal/stdlib/manifest.go +++ b/internal/stdlib/manifest.go @@ -7119,7 +7119,6 @@ var PackageSymbols = map[string][]Symbol{ {"FormatFileInfo", Func, 21}, {"Glob", Func, 16}, {"GlobFS", Type, 16}, - {"Lstat", Func, 25}, {"ModeAppend", Const, 16}, {"ModeCharDevice", Const, 16}, {"ModeDevice", Const, 16}, @@ -7144,8 +7143,6 @@ var PackageSymbols = map[string][]Symbol{ {"ReadDirFile", Type, 16}, {"ReadFile", Func, 16}, {"ReadFileFS", Type, 16}, - {"ReadLink", Func, 25}, - {"ReadLinkFS", Type, 25}, {"SkipAll", Var, 20}, {"SkipDir", Var, 16}, {"Stat", Func, 16}, @@ -9149,8 +9146,6 @@ var PackageSymbols = map[string][]Symbol{ {"(*ProcessState).SysUsage", Method, 0}, {"(*ProcessState).SystemTime", Method, 0}, {"(*ProcessState).UserTime", Method, 0}, - {"(*Root).Chmod", Method, 25}, - {"(*Root).Chown", Method, 25}, {"(*Root).Close", Method, 24}, {"(*Root).Create", Method, 24}, {"(*Root).FS", Method, 24}, @@ -16759,11 +16754,9 @@ var PackageSymbols = map[string][]Symbol{ }, "testing/fstest": { {"(MapFS).Glob", Method, 16}, - {"(MapFS).Lstat", Method, 25}, {"(MapFS).Open", Method, 16}, {"(MapFS).ReadDir", Method, 16}, {"(MapFS).ReadFile", Method, 16}, - {"(MapFS).ReadLink", Method, 25}, {"(MapFS).Stat", Method, 16}, {"(MapFS).Sub", Method, 16}, {"MapFS", Type, 16}, diff --git a/internal/stdlib/testdata/nethttp.deps b/internal/stdlib/testdata/nethttp.deps index e1235e84932..71e58a0c693 100644 --- a/internal/stdlib/testdata/nethttp.deps +++ b/internal/stdlib/testdata/nethttp.deps @@ -19,8 +19,8 @@ internal/race internal/runtime/math internal/runtime/sys internal/runtime/maps +internal/runtime/syscall internal/stringslite -internal/trace/tracev2 runtime internal/reflectlite errors @@ -122,6 +122,7 @@ crypto/internal/fips140/tls13 vendor/golang.org/x/crypto/internal/alias vendor/golang.org/x/crypto/chacha20 vendor/golang.org/x/crypto/internal/poly1305 +vendor/golang.org/x/sys/cpu vendor/golang.org/x/crypto/chacha20poly1305 crypto/internal/hpke crypto/md5 @@ -132,7 +133,6 @@ crypto/sha1 crypto/sha256 crypto/tls/internal/fips140tls crypto/dsa -crypto/x509/internal/macos encoding/hex crypto/x509/pkix encoding/base64 @@ -140,13 +140,13 @@ encoding/pem maps vendor/golang.org/x/net/dns/dnsmessage internal/nettrace +internal/singleflight weak unique net/netip -internal/routebsd -internal/singleflight net net/url +path/filepath crypto/x509 crypto/tls vendor/golang.org/x/text/transform @@ -162,10 +162,8 @@ vendor/golang.org/x/net/http/httpproxy vendor/golang.org/x/net/http2/hpack mime mime/quotedprintable -path/filepath mime/multipart net/http/httptrace net/http/internal net/http/internal/ascii -net/http/internal/httpcommon net/http diff --git a/internal/stdlib/testdata/nethttp.imports b/internal/stdlib/testdata/nethttp.imports index 77e78696bdd..de41e46c0fe 100644 --- a/internal/stdlib/testdata/nethttp.imports +++ b/internal/stdlib/testdata/nethttp.imports @@ -27,7 +27,6 @@ net net/http/httptrace net/http/internal net/http/internal/ascii -net/http/internal/httpcommon net/textproto net/url os diff --git a/internal/typeparams/termlist.go b/internal/typeparams/termlist.go index cbd12f80131..9bc29143f6a 100644 --- a/internal/typeparams/termlist.go +++ b/internal/typeparams/termlist.go @@ -1,3 +1,6 @@ +// Code generated by "go test -run=Generate -write=all"; DO NOT EDIT. +// Source: ../../cmd/compile/internal/types2/termlist.go + // Copyright 2021 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. @@ -7,8 +10,8 @@ package typeparams import ( - "bytes" "go/types" + "strings" ) // A termlist represents the type set represented by the union @@ -22,15 +25,18 @@ type termlist []*term // It is in normal form. var allTermlist = termlist{new(term)} +// termSep is the separator used between individual terms. +const termSep = " | " + // String prints the termlist exactly (without normalization). func (xl termlist) String() string { if len(xl) == 0 { return "∅" } - var buf bytes.Buffer + var buf strings.Builder for i, x := range xl { if i > 0 { - buf.WriteString(" | ") + buf.WriteString(termSep) } buf.WriteString(x.String()) } diff --git a/internal/typeparams/typeterm.go b/internal/typeparams/typeterm.go index 7350bb702a1..fa758cdc989 100644 --- a/internal/typeparams/typeterm.go +++ b/internal/typeparams/typeterm.go @@ -1,3 +1,6 @@ +// Code generated by "go test -run=Generate -write=all"; DO NOT EDIT. +// Source: ../../cmd/compile/internal/types2/typeterm.go + // Copyright 2021 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. From 33f80b505bae8678854a2cea554dbbcf57b43f5e Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Thu, 3 Apr 2025 15:03:25 -0400 Subject: [PATCH 171/270] typesinternal: remove RequiresFullInfo We're not going to require that functions have a fully populated types.Info. Change-Id: I70e9a56fb71adc2a141bcc3937f4c9de8ca46f29 Reviewed-on: https://go-review.googlesource.com/c/tools/+/662696 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- internal/typesinternal/types.go | 21 -------------- internal/typesinternal/types_test.go | 41 ---------------------------- 2 files changed, 62 deletions(-) delete mode 100644 internal/typesinternal/types_test.go diff --git a/internal/typesinternal/types.go b/internal/typesinternal/types.go index d9ef55ebc77..cc244689ef8 100644 --- a/internal/typesinternal/types.go +++ b/internal/typesinternal/types.go @@ -7,12 +7,10 @@ package typesinternal import ( - "fmt" "go/ast" "go/token" "go/types" "reflect" - "strings" "unsafe" "golang.org/x/tools/internal/aliases" @@ -144,22 +142,3 @@ func NewTypesInfo() *types.Info { FileVersions: map[*ast.File]string{}, } } - -// RequiresFullInfo panics unless info has non-nil values for all maps. -func RequiresFullInfo(info *types.Info) { - v := reflect.ValueOf(info).Elem() - t := v.Type() - var missing []string - for i := range t.NumField() { - f := t.Field(i) - if f.Type.Kind() == reflect.Map && v.Field(i).IsNil() { - missing = append(missing, f.Name) - } - } - if len(missing) > 0 { - msg := fmt.Sprintf(`A fully populated types.Info value is required. -This one is missing the following fields: -%s`, strings.Join(missing, ", ")) - panic(msg) - } -} diff --git a/internal/typesinternal/types_test.go b/internal/typesinternal/types_test.go deleted file mode 100644 index 2a715549408..00000000000 --- a/internal/typesinternal/types_test.go +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright 2025 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package typesinternal - -import ( - "fmt" - "go/ast" - "go/types" - "regexp" - "testing" -) - -func TestRequiresFullInfo(t *testing.T) { - info := &types.Info{ - Uses: map[*ast.Ident]types.Object{}, - Scopes: map[ast.Node]*types.Scope{}, - } - panics(t, "Types, Instances, Defs, Implicits, Selections, FileVersions", func() { - RequiresFullInfo(info) - }) - - // Shouldn't panic. - RequiresFullInfo(NewTypesInfo()) -} - -// panics asserts that f() panics with with a value whose printed form matches the regexp want. -// Copied from go/analysis/internal/checker/fix_test.go. -func panics(t *testing.T, want string, f func()) { - defer func() { - if x := recover(); x == nil { - t.Errorf("function returned normally, wanted panic") - } else if m, err := regexp.MatchString(want, fmt.Sprint(x)); err != nil { - t.Errorf("panics: invalid regexp %q", want) - } else if !m { - t.Errorf("function panicked with value %q, want match for %q", x, want) - } - }() - f() -} From 17ce4c7b72f9e9fabeda5ed65e0ad9e19da506f7 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Thu, 3 Apr 2025 15:10:26 -0400 Subject: [PATCH 172/270] refactor/eg: return error if some info maps are missing NewTransformer requires that some fields of its argument types.Info are populated. This CL checks those requirements, which were formerly implicit. Change-Id: I8ecb7b211e05cd143d04073366927fbd877cf483 Reviewed-on: https://go-review.googlesource.com/c/tools/+/662697 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- refactor/eg/eg.go | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/refactor/eg/eg.go b/refactor/eg/eg.go index 65a7f690bfd..8de1fd7d1de 100644 --- a/refactor/eg/eg.go +++ b/refactor/eg/eg.go @@ -8,6 +8,7 @@ package eg // import "golang.org/x/tools/refactor/eg" import ( "bytes" + "errors" "fmt" "go/ast" "go/format" @@ -159,6 +160,10 @@ type Transformer struct { // described in the package documentation. // tmplInfo is the type information for tmplFile. func NewTransformer(fset *token.FileSet, tmplPkg *types.Package, tmplFile *ast.File, tmplInfo *types.Info, verbose bool) (*Transformer, error) { + // These maps are required by types.Info.TypeOf. + if tmplInfo.Types == nil || tmplInfo.Defs == nil || tmplInfo.Uses == nil { + return nil, errors.New("eg.NewTransformer: types.Info argument missing one of Types, Defs or Uses") + } // Check the template. beforeSig := funcSig(tmplPkg, "before") if beforeSig == nil { From e29f9ae7c1609b93592398e576786748f91393a9 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Thu, 3 Apr 2025 15:41:49 -0400 Subject: [PATCH 173/270] refactor/satisfy: check for presence of types.Info maps Although Finder.Find's doc says that it requires certain maps in its argument types.Info, it didn't actually check. Now it does. Change-Id: I4938f9cbca9377b935d8a027d59438b9697558b7 Reviewed-on: https://go-review.googlesource.com/c/tools/+/662698 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- refactor/satisfy/find.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/refactor/satisfy/find.go b/refactor/satisfy/find.go index a897c3c2fd4..766cc575387 100644 --- a/refactor/satisfy/find.go +++ b/refactor/satisfy/find.go @@ -84,6 +84,9 @@ type Finder struct { // info.{Defs,Uses,Selections,Types} must have been populated by the // type-checker. func (f *Finder) Find(info *types.Info, files []*ast.File) { + if info.Defs == nil || info.Uses == nil || info.Selections == nil || info.Types == nil { + panic("Finder.Find: one of info.{Defs,Uses,Selections.Types} is not populated") + } if f.Result == nil { f.Result = make(map[Constraint]bool) } From b437eff8291cf46efe66e499f4c0ac5c8df770d5 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Sat, 15 Mar 2025 08:00:39 -0400 Subject: [PATCH 174/270] go/types/typeutil: implement Callee and StaticCallee with Used Also, add a test for typesinternal.Used. It is employed by Callee and StaticCallee, and behaves differently from ClassifyCall in some cases. Change-Id: I21178a2cc8acdc20ebf669bb4741df03851be0b3 Reviewed-on: https://go-review.googlesource.com/c/tools/+/658235 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- go/types/typeutil/callee.go | 87 ++++++++++++-------- go/types/typeutil/callee_test.go | 1 + internal/typesinternal/classify_call.go | 44 ++-------- internal/typesinternal/classify_call_test.go | 71 ++++++++-------- 4 files changed, 100 insertions(+), 103 deletions(-) diff --git a/go/types/typeutil/callee.go b/go/types/typeutil/callee.go index 754380351e8..2e3ccaa3dc3 100644 --- a/go/types/typeutil/callee.go +++ b/go/types/typeutil/callee.go @@ -7,45 +7,23 @@ package typeutil import ( "go/ast" "go/types" - - "golang.org/x/tools/internal/typeparams" + _ "unsafe" // for linkname ) // Callee returns the named target of a function call, if any: // a function, method, builtin, or variable. // // Functions and methods may potentially have type parameters. +// +// Note: for calls of instantiated functions and methods, Callee returns +// the corresponding generic function or method on the generic type. func Callee(info *types.Info, call *ast.CallExpr) types.Object { - fun := ast.Unparen(call.Fun) - - // Look through type instantiation if necessary. - isInstance := false - switch fun.(type) { - case *ast.IndexExpr, *ast.IndexListExpr: - // When extracting the callee from an *IndexExpr, we need to check that - // it is a *types.Func and not a *types.Var. - // Example: Don't match a slice m within the expression `m[0]()`. - isInstance = true - fun, _, _, _ = typeparams.UnpackIndexExpr(fun) - } - - var obj types.Object - switch fun := fun.(type) { - case *ast.Ident: - obj = info.Uses[fun] // type, var, builtin, or declared func - case *ast.SelectorExpr: - if sel, ok := info.Selections[fun]; ok { - obj = sel.Obj() // method or field - } else { - obj = info.Uses[fun.Sel] // qualified identifier? - } + obj := used(info, call.Fun) + if obj == nil { + return nil } if _, ok := obj.(*types.TypeName); ok { - return nil // T(x) is a conversion, not a call - } - // A Func is required to match instantiations. - if _, ok := obj.(*types.Func); isInstance && !ok { - return nil // Was not a Func. + return nil } return obj } @@ -56,13 +34,54 @@ func Callee(info *types.Info, call *ast.CallExpr) types.Object { // Note: for calls of instantiated functions and methods, StaticCallee returns // the corresponding generic function or method on the generic type. func StaticCallee(info *types.Info, call *ast.CallExpr) *types.Func { - if f, ok := Callee(info, call).(*types.Func); ok && !interfaceMethod(f) { - return f + obj := used(info, call.Fun) + fn, _ := obj.(*types.Func) + if fn == nil || interfaceMethod(fn) { + return nil } - return nil + return fn } +// used is the implementation of [internal/typesinternal.Used]. +// It returns the object associated with e. +// See typesinternal.Used for a fuller description. +// This function should live in typesinternal, but cannot because it would +// create an import cycle. +// +//go:linkname used +func used(info *types.Info, e ast.Expr) types.Object { + if info.Types == nil || info.Uses == nil || info.Selections == nil { + panic("one of info.Types, info.Uses or info.Selections is nil; all must be populated") + } + // Look through type instantiation if necessary. + switch d := ast.Unparen(e).(type) { + case *ast.IndexExpr: + if info.Types[d.Index].IsType() { + e = d.X + } + case *ast.IndexListExpr: + e = d.X + } + + var obj types.Object + switch e := ast.Unparen(e).(type) { + case *ast.Ident: + obj = info.Uses[e] // type, var, builtin, or declared func + case *ast.SelectorExpr: + if sel, ok := info.Selections[e]; ok { + obj = sel.Obj() // method or field + } else { + obj = info.Uses[e.Sel] // qualified identifier? + } + } + return obj +} + +// interfaceMethod reports whether its argument is a method of an interface. +// This function should live in typesinternal, but cannot because it would create an import cycle. +// +//go:linkname interfaceMethod func interfaceMethod(f *types.Func) bool { - recv := f.Type().(*types.Signature).Recv() + recv := f.Signature().Recv() return recv != nil && types.IsInterface(recv.Type()) } diff --git a/go/types/typeutil/callee_test.go b/go/types/typeutil/callee_test.go index 1d48bc743a9..3f96533ffff 100644 --- a/go/types/typeutil/callee_test.go +++ b/go/types/typeutil/callee_test.go @@ -122,6 +122,7 @@ func testStaticCallee(t *testing.T, contents []string) { cfg := &types.Config{Importer: closure(packages)} info := &types.Info{ Instances: make(map[*ast.Ident]types.Instance), + Types: make(map[ast.Expr]types.TypeAndValue), Uses: make(map[*ast.Ident]types.Object), Selections: make(map[*ast.SelectorExpr]*types.Selection), FileVersions: make(map[*ast.File]string), diff --git a/internal/typesinternal/classify_call.go b/internal/typesinternal/classify_call.go index 1e79eb2b7ac..9d4da859370 100644 --- a/internal/typesinternal/classify_call.go +++ b/internal/typesinternal/classify_call.go @@ -8,6 +8,7 @@ import ( "fmt" "go/ast" "go/types" + _ "unsafe" ) // CallKind describes the function position of an [*ast.CallExpr]. @@ -74,6 +75,9 @@ func (k CallKind) String() string { // int(x) CallConversion nil // []byte("") CallConversion nil func ClassifyCall(info *types.Info, call *ast.CallExpr) (CallKind, types.Object) { + if info.Types == nil { + panic("ClassifyCall: info.Types is nil") + } if info.Types[call.Fun].IsType() { return CallConversion, nil } @@ -134,40 +138,8 @@ func Used(info *types.Info, e ast.Expr) types.Object { return used(info, e) } -// placeholder: will be moved and documented in the next CL. -func used(info *types.Info, e ast.Expr) types.Object { - e = ast.Unparen(e) - // Look through type instantiation if necessary. - isIndexed := false - switch d := e.(type) { - case *ast.IndexExpr: - if info.Types[d.Index].IsType() { - e = d.X - } - case *ast.IndexListExpr: - e = d.X - } - var obj types.Object - switch e := e.(type) { - case *ast.Ident: - obj = info.Uses[e] // type, var, builtin, or declared func - case *ast.SelectorExpr: - if sel, ok := info.Selections[e]; ok { - obj = sel.Obj() // method or field - } else { - obj = info.Uses[e.Sel] // qualified identifier? - } - } - // If a variable like a slice or map is being indexed, do not - // return an object. - if _, ok := obj.(*types.Var); ok && isIndexed { - return nil - } - return obj -} +//go:linkname used golang.org/x/tools/go/types/typeutil.used +func used(info *types.Info, e ast.Expr) types.Object -// placeholder: will be moved and documented in the next CL. -func interfaceMethod(f *types.Func) bool { - recv := f.Signature().Recv() - return recv != nil && types.IsInterface(recv.Type()) -} +//go:linkname interfaceMethod golang.org/x/tools/go/types/typeutil.interfaceMethod +func interfaceMethod(f *types.Func) bool diff --git a/internal/typesinternal/classify_call_test.go b/internal/typesinternal/classify_call_test.go index 8a6e75a3b0d..6a30ee280df 100644 --- a/internal/typesinternal/classify_call_test.go +++ b/internal/typesinternal/classify_call_test.go @@ -19,9 +19,6 @@ import ( ) func TestClassifyCallAndUsed(t *testing.T) { - // This function directly tests ClassifyCall, but since that - // function's second return value is always the result of Used, - // it effectively tests Used as well. const src = ` package p @@ -78,13 +75,7 @@ func TestClassifyCallAndUsed(t *testing.T) { Error: func(err error) { t.Fatal(err) }, Importer: importer.Default(), } - info := &types.Info{ - Instances: make(map[*ast.Ident]types.Instance), - Uses: make(map[*ast.Ident]types.Object), - Defs: make(map[*ast.Ident]types.Object), - Types: make(map[ast.Expr]types.TypeAndValue), - Selections: make(map[*ast.SelectorExpr]*types.Selection), - } + info := ti.NewTypesInfo() // parse f, err := parser.ParseFile(fset, "classify.go", src, 0) if err != nil { @@ -108,30 +99,36 @@ func TestClassifyCallAndUsed(t *testing.T) { printlnObj := types.Universe.Lookup("println") + typeParam := lookup("tests").Type().(*types.Signature).TypeParams().At(0).Obj() + + // A unique value for marking that Used returns the same object as ClassifyCall. + same := &types.Label{} + // Expected Calls are in the order of CallExprs at the end of src, above. wants := []struct { - kind ti.CallKind - obj types.Object + kind ti.CallKind + classifyObj types.Object // the object returned from ClassifyCall + usedObj types.Object // the object returned from Used, sometimes different }{ - {ti.CallStatic, lookup("g")}, // g - {ti.CallDynamic, nil}, // f - {ti.CallBuiltin, printlnObj}, // println - {ti.CallStatic, member("S", "g")}, // z.g - {ti.CallStatic, member("S", "g")}, // a.b.c.g - {ti.CallStatic, member("S", "g")}, // S.g(z, 1) - {ti.CallDynamic, nil}, // z.f - {ti.CallInterface, member("I", "m")}, // I(nil).m - {ti.CallConversion, nil}, // I(nil) - {ti.CallDynamic, nil}, // m[0] - {ti.CallDynamic, nil}, // n[0] - {ti.CallStatic, lookup("F")}, // F[int] - {ti.CallStatic, lookup("F")}, // F[T] - {ti.CallDynamic, nil}, // f(){} - {ti.CallConversion, nil}, // []byte - {ti.CallConversion, nil}, // A[int] - {ti.CallConversion, nil}, // T - {ti.CallStatic, member("S", "g")}, // (z.g) - {ti.CallStatic, member("S", "g")}, // (z).g + {ti.CallStatic, lookup("g"), same}, // g + {ti.CallDynamic, nil, lookup("f")}, // f + {ti.CallBuiltin, printlnObj, same}, // println + {ti.CallStatic, member("S", "g"), same}, // z.g + {ti.CallStatic, member("S", "g"), same}, // a.b.c.g + {ti.CallStatic, member("S", "g"), same}, // S.g(z, 1) + {ti.CallDynamic, nil, member("z", "f")}, // z.f + {ti.CallInterface, member("I", "m"), same}, // I(nil).m + {ti.CallConversion, nil, lookup("I")}, // I(nil) + {ti.CallDynamic, nil, same}, // m[0] + {ti.CallDynamic, nil, same}, // n[0] + {ti.CallStatic, lookup("F"), same}, // F[int] + {ti.CallStatic, lookup("F"), same}, // F[T] + {ti.CallDynamic, nil, same}, // f(){} + {ti.CallConversion, nil, same}, // []byte + {ti.CallConversion, nil, lookup("A")}, // A[int] + {ti.CallConversion, nil, typeParam}, // T + {ti.CallStatic, member("S", "g"), same}, // (z.g) + {ti.CallStatic, member("S", "g"), same}, // (z).g } i := 0 @@ -152,8 +149,16 @@ func TestClassifyCallAndUsed(t *testing.T) { if gotKind != want.kind { t.Errorf("%s kind: got %s, want %s", prefix, gotKind, want.kind) } - if gotObj != want.obj { - t.Errorf("%s obj: got %v (%[2]T), want %v", prefix, gotObj, want.obj) + if gotObj != want.classifyObj { + t.Errorf("%s obj: got %v (%[2]T), want %v", prefix, gotObj, want.classifyObj) + } + + w := want.usedObj + if w == same { + w = want.classifyObj + } + if g := ti.Used(info, call.Fun); g != w { + t.Errorf("%s used obj: got %v (%[2]T), want %v", prefix, g, w) } i++ } From b948add7e7e4926ce52fb3a01e15c18e4558c252 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Thu, 3 Apr 2025 16:51:43 -0400 Subject: [PATCH 175/270] internal/gofix: move from gopls/internal/analysis/gofix Change-Id: I31a899e4705c1c4226f934e015b89e0aa9e576e6 Reviewed-on: https://go-review.googlesource.com/c/tools/+/662755 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI Auto-Submit: Jonathan Amsterdam --- gopls/internal/settings/analysis.go | 2 +- .../gofix/cmd/gofix/main.go | 2 +- .../analysis => internal}/gofix/doc.go | 0 .../analysis => internal}/gofix/gofix.go | 38 ++++++++++++++----- .../analysis => internal}/gofix/gofix_test.go | 3 ++ .../gofix/testdata/src/a/a.go | 0 .../gofix/testdata/src/a/a.go.golden | 0 .../gofix/testdata/src/a/internal/d.go | 0 .../gofix/testdata/src/b/b.go | 0 .../gofix/testdata/src/b/b.go.golden | 0 .../gofix/testdata/src/c/c.go | 0 .../gofix/testdata/src/directive/directive.go | 0 .../src/directive/directive.go.golden | 1 - 13 files changed, 33 insertions(+), 13 deletions(-) rename {gopls/internal/analysis => internal}/gofix/cmd/gofix/main.go (89%) rename {gopls/internal/analysis => internal}/gofix/doc.go (100%) rename {gopls/internal/analysis => internal}/gofix/gofix.go (96%) rename {gopls/internal/analysis => internal}/gofix/gofix_test.go (97%) rename {gopls/internal/analysis => internal}/gofix/testdata/src/a/a.go (100%) rename {gopls/internal/analysis => internal}/gofix/testdata/src/a/a.go.golden (100%) rename {gopls/internal/analysis => internal}/gofix/testdata/src/a/internal/d.go (100%) rename {gopls/internal/analysis => internal}/gofix/testdata/src/b/b.go (100%) rename {gopls/internal/analysis => internal}/gofix/testdata/src/b/b.go.golden (100%) rename {gopls/internal/analysis => internal}/gofix/testdata/src/c/c.go (100%) rename {gopls/internal/analysis => internal}/gofix/testdata/src/directive/directive.go (100%) rename {gopls/internal/analysis => internal}/gofix/testdata/src/directive/directive.go.golden (99%) diff --git a/gopls/internal/settings/analysis.go b/gopls/internal/settings/analysis.go index 5ba8bdd06b0..e914407fe6b 100644 --- a/gopls/internal/settings/analysis.go +++ b/gopls/internal/settings/analysis.go @@ -49,7 +49,6 @@ import ( "golang.org/x/tools/gopls/internal/analysis/deprecated" "golang.org/x/tools/gopls/internal/analysis/embeddirective" "golang.org/x/tools/gopls/internal/analysis/fillreturns" - "golang.org/x/tools/gopls/internal/analysis/gofix" "golang.org/x/tools/gopls/internal/analysis/hostport" "golang.org/x/tools/gopls/internal/analysis/infertypeargs" "golang.org/x/tools/gopls/internal/analysis/modernize" @@ -63,6 +62,7 @@ import ( "golang.org/x/tools/gopls/internal/analysis/unusedvariable" "golang.org/x/tools/gopls/internal/analysis/yield" "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/gofix" ) // Analyzer augments a [analysis.Analyzer] with additional LSP configuration. diff --git a/gopls/internal/analysis/gofix/cmd/gofix/main.go b/internal/gofix/cmd/gofix/main.go similarity index 89% rename from gopls/internal/analysis/gofix/cmd/gofix/main.go rename to internal/gofix/cmd/gofix/main.go index d75978f6e59..9ec77943774 100644 --- a/gopls/internal/analysis/gofix/cmd/gofix/main.go +++ b/internal/gofix/cmd/gofix/main.go @@ -10,7 +10,7 @@ package main import ( "golang.org/x/tools/go/analysis/singlechecker" - "golang.org/x/tools/gopls/internal/analysis/gofix" + "golang.org/x/tools/internal/gofix" ) func main() { singlechecker.Main(gofix.Analyzer) } diff --git a/gopls/internal/analysis/gofix/doc.go b/internal/gofix/doc.go similarity index 100% rename from gopls/internal/analysis/gofix/doc.go rename to internal/gofix/doc.go diff --git a/gopls/internal/analysis/gofix/gofix.go b/internal/gofix/gofix.go similarity index 96% rename from gopls/internal/analysis/gofix/gofix.go rename to internal/gofix/gofix.go index 6f4c8a6e2fd..b2fc5318e09 100644 --- a/gopls/internal/analysis/gofix/gofix.go +++ b/internal/gofix/gofix.go @@ -9,6 +9,7 @@ import ( "go/ast" "go/token" "go/types" + "iter" "slices" "strings" @@ -18,7 +19,6 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/gopls/internal/util/moreiters" "golang.org/x/tools/internal/analysisinternal" internalastutil "golang.org/x/tools/internal/astutil" "golang.org/x/tools/internal/astutil/cursor" @@ -330,7 +330,7 @@ func (a *analyzer) inlineAlias(tn *types.TypeName, curId cursor.Cursor) { // Remember the names of the alias's type params. When we check for shadowing // later, we'll ignore these because they won't appear in the replacement text. typeParamNames := map[*types.TypeName]bool{} - for tp := range alias.TypeParams().TypeParams() { + for tp := range listIter(alias.TypeParams()) { typeParamNames[tp.Obj()] = true } rhs := alias.Rhs() @@ -405,7 +405,7 @@ func (a *analyzer) inlineAlias(tn *types.TypeName, curId cursor.Cursor) { // A[int, Foo] as M[int, Foo]. // Don't validate instantiation: it can't panic unless we have a bug, // in which case seeing the stack trace via telemetry would be helpful. - instAlias, _ := types.Instantiate(nil, alias, slices.Collect(targs.Types()), false) + instAlias, _ := types.Instantiate(nil, alias, slices.Collect(listIter(targs)), false) rhs = instAlias.(*types.Alias).Rhs() } // To get the replacement text, render the alias RHS using the package prefixes @@ -437,11 +437,11 @@ func typenames(t types.Type) []*types.TypeName { case *types.Basic: tns = append(tns, types.Universe.Lookup(t.Name()).(*types.TypeName)) case *types.Named: - for t := range t.TypeArgs().Types() { + for t := range listIter(t.TypeArgs()) { visit(t) } case *types.Alias: - for t := range t.TypeArgs().Types() { + for t := range listIter(t.TypeArgs()) { visit(t) } case *types.TypeParam: @@ -458,8 +458,8 @@ func typenames(t types.Type) []*types.TypeName { visit(t.Key()) visit(t.Elem()) case *types.Struct: - for f := range t.Fields() { - visit(f.Type()) + for i := range t.NumFields() { + visit(t.Field(i).Type()) } case *types.Signature: // Ignore the receiver: although it may be present, it has no meaning @@ -479,7 +479,7 @@ func typenames(t types.Type) []*types.TypeName { visit(t.ExplicitMethod(i).Type()) } case *types.Tuple: - for v := range t.Variables() { + for v := range listIter(t) { visit(v.Type()) } case *types.Union: @@ -592,8 +592,10 @@ func (a *analyzer) readFile(node ast.Node) ([]byte, error) { // currentFile returns the unique ast.File for a cursor. func currentFile(c cursor.Cursor) *ast.File { - cf, _ := moreiters.First(c.Enclosing((*ast.File)(nil))) - return cf.Node().(*ast.File) + for cf := range c.Enclosing((*ast.File)(nil)) { + return cf.Node().(*ast.File) + } + panic("no *ast.File enclosing a cursor: impossible") } // hasFixInline reports the presence of a "//go:fix inline" directive @@ -640,3 +642,19 @@ func (*goFixInlineAliasFact) AFact() {} func discard(string, ...any) {} var builtinIota = types.Universe.Lookup("iota") + +type list[T any] interface { + Len() int + At(int) T +} + +// TODO(adonovan): eliminate in favor of go/types@go1.24 iterators. +func listIter[L list[T], T any](lst L) iter.Seq[T] { + return func(yield func(T) bool) { + for i := range lst.Len() { + if !yield(lst.At(i)) { + return + } + } + } +} diff --git a/gopls/internal/analysis/gofix/gofix_test.go b/internal/gofix/gofix_test.go similarity index 97% rename from gopls/internal/analysis/gofix/gofix_test.go rename to internal/gofix/gofix_test.go index 4acc4daf2ff..ae2df3860a8 100644 --- a/gopls/internal/analysis/gofix/gofix_test.go +++ b/internal/gofix/gofix_test.go @@ -19,6 +19,9 @@ import ( ) func TestAnalyzer(t *testing.T) { + if testenv.Go1Point() < 24 { + testenv.NeedsGoExperiment(t, "aliastypeparams") + } analysistest.RunWithSuggestedFixes(t, analysistest.TestData(), Analyzer, "a", "b") } diff --git a/gopls/internal/analysis/gofix/testdata/src/a/a.go b/internal/gofix/testdata/src/a/a.go similarity index 100% rename from gopls/internal/analysis/gofix/testdata/src/a/a.go rename to internal/gofix/testdata/src/a/a.go diff --git a/gopls/internal/analysis/gofix/testdata/src/a/a.go.golden b/internal/gofix/testdata/src/a/a.go.golden similarity index 100% rename from gopls/internal/analysis/gofix/testdata/src/a/a.go.golden rename to internal/gofix/testdata/src/a/a.go.golden diff --git a/gopls/internal/analysis/gofix/testdata/src/a/internal/d.go b/internal/gofix/testdata/src/a/internal/d.go similarity index 100% rename from gopls/internal/analysis/gofix/testdata/src/a/internal/d.go rename to internal/gofix/testdata/src/a/internal/d.go diff --git a/gopls/internal/analysis/gofix/testdata/src/b/b.go b/internal/gofix/testdata/src/b/b.go similarity index 100% rename from gopls/internal/analysis/gofix/testdata/src/b/b.go rename to internal/gofix/testdata/src/b/b.go diff --git a/gopls/internal/analysis/gofix/testdata/src/b/b.go.golden b/internal/gofix/testdata/src/b/b.go.golden similarity index 100% rename from gopls/internal/analysis/gofix/testdata/src/b/b.go.golden rename to internal/gofix/testdata/src/b/b.go.golden diff --git a/gopls/internal/analysis/gofix/testdata/src/c/c.go b/internal/gofix/testdata/src/c/c.go similarity index 100% rename from gopls/internal/analysis/gofix/testdata/src/c/c.go rename to internal/gofix/testdata/src/c/c.go diff --git a/gopls/internal/analysis/gofix/testdata/src/directive/directive.go b/internal/gofix/testdata/src/directive/directive.go similarity index 100% rename from gopls/internal/analysis/gofix/testdata/src/directive/directive.go rename to internal/gofix/testdata/src/directive/directive.go diff --git a/gopls/internal/analysis/gofix/testdata/src/directive/directive.go.golden b/internal/gofix/testdata/src/directive/directive.go.golden similarity index 99% rename from gopls/internal/analysis/gofix/testdata/src/directive/directive.go.golden rename to internal/gofix/testdata/src/directive/directive.go.golden index 3e5b3409288..a6625e1731f 100644 --- a/gopls/internal/analysis/gofix/testdata/src/directive/directive.go.golden +++ b/internal/gofix/testdata/src/directive/directive.go.golden @@ -68,4 +68,3 @@ type E = map[[Uno]string][]*T // want `invalid //go:fix inline directive: array // //go:fix inline type EL = map[[2]string][]*T // want EL: `goFixInline alias` - From e850fe1872cee508a221a3efd67dd2901deddc4c Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Fri, 4 Apr 2025 22:42:46 -0600 Subject: [PATCH 176/270] gopls/internal/golang: CodeAction: place gopls doc as the last action This CL adjusts the position of 'gopls.doc.features' at the end of the code action producers to offer it as the last action. It also checks the order of each action in unit test rather than checking existence. Fixes golang/go#72742 Change-Id: I062792a5608fc3d0b4c04334389a4d7066873c62 Reviewed-on: https://go-review.googlesource.com/c/tools/+/662915 Auto-Submit: Alan Donovan Reviewed-by: Alan Donovan Reviewed-by: Carlos Amedee LUCI-TryBot-Result: Go LUCI --- gopls/internal/cmd/integration_test.go | 4 +- gopls/internal/golang/codeaction.go | 10 ++--- gopls/internal/settings/codeactionkind.go | 1 + .../test/integration/misc/codeactions_test.go | 42 +++++++++++++------ 4 files changed, 36 insertions(+), 21 deletions(-) diff --git a/gopls/internal/cmd/integration_test.go b/gopls/internal/cmd/integration_test.go index 9d135ceadb2..6e4b450635b 100644 --- a/gopls/internal/cmd/integration_test.go +++ b/gopls/internal/cmd/integration_test.go @@ -1010,9 +1010,9 @@ type C struct{} res := gopls(t, tree, "codeaction", "-title=Browse.*doc", "a/a.go") res.checkExit(true) got := res.stdout - want := `command "Browse gopls feature documentation" [gopls.doc.features]` + + want := `command "Browse documentation for package a" [source.doc]` + "\n" + - `command "Browse documentation for package a" [source.doc]` + + `command "Browse gopls feature documentation" [gopls.doc.features]` + "\n" if got != want { t.Errorf("codeaction: got <<%s>>, want <<%s>>\nstderr:\n%s", got, want, res.stderr) diff --git a/gopls/internal/golang/codeaction.go b/gopls/internal/golang/codeaction.go index d9f2af47d24..7949493a896 100644 --- a/gopls/internal/golang/codeaction.go +++ b/gopls/internal/golang/codeaction.go @@ -14,7 +14,6 @@ import ( "path/filepath" "reflect" "slices" - "sort" "strings" "golang.org/x/tools/go/ast/astutil" @@ -112,10 +111,7 @@ func CodeActions(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, } } - sort.Slice(actions, func(i, j int) bool { - return actions[i].Kind < actions[j].Kind - }) - + // Return code actions in the order their providers are listed. return actions, nil } @@ -233,6 +229,8 @@ type codeActionProducer struct { needPkg bool // fn needs type information (req.pkg) } +// Code Actions are returned in the order their producers are listed below. +// Depending on the client, this may influence the order they appear in the UI. var codeActionProducers = [...]codeActionProducer{ {kind: protocol.QuickFix, fn: quickFix, needPkg: true}, {kind: protocol.SourceOrganizeImports, fn: sourceOrganizeImports}, @@ -242,7 +240,6 @@ var codeActionProducers = [...]codeActionProducer{ {kind: settings.GoFreeSymbols, fn: goFreeSymbols}, {kind: settings.GoTest, fn: goTest, needPkg: true}, {kind: settings.GoToggleCompilerOptDetails, fn: toggleCompilerOptDetails}, - {kind: settings.GoplsDocFeatures, fn: goplsDocFeatures}, {kind: settings.RefactorExtractFunction, fn: refactorExtractFunction}, {kind: settings.RefactorExtractMethod, fn: refactorExtractMethod}, {kind: settings.RefactorExtractToNewFile, fn: refactorExtractToNewFile}, @@ -261,6 +258,7 @@ var codeActionProducers = [...]codeActionProducer{ {kind: settings.RefactorRewriteMoveParamRight, fn: refactorRewriteMoveParamRight, needPkg: true}, {kind: settings.RefactorRewriteSplitLines, fn: refactorRewriteSplitLines, needPkg: true}, {kind: settings.RefactorRewriteEliminateDotImport, fn: refactorRewriteEliminateDotImport, needPkg: true}, + {kind: settings.GoplsDocFeatures, fn: goplsDocFeatures}, // offer this one last (#72742) // Note: don't forget to update the allow-list in Server.CodeAction // when adding new query operations like GoTest and GoDoc that diff --git a/gopls/internal/settings/codeactionkind.go b/gopls/internal/settings/codeactionkind.go index 09d9d419567..f6f8a4df2a4 100644 --- a/gopls/internal/settings/codeactionkind.go +++ b/gopls/internal/settings/codeactionkind.go @@ -81,6 +81,7 @@ const ( GoTest protocol.CodeActionKind = "source.test" GoToggleCompilerOptDetails protocol.CodeActionKind = "source.toggleCompilerOptDetails" AddTest protocol.CodeActionKind = "source.addTest" + OrganizeImports protocol.CodeActionKind = "source.organizeImports" // gopls GoplsDocFeatures protocol.CodeActionKind = "gopls.doc.features" diff --git a/gopls/internal/test/integration/misc/codeactions_test.go b/gopls/internal/test/integration/misc/codeactions_test.go index c62a3898e9b..d9c83186d69 100644 --- a/gopls/internal/test/integration/misc/codeactions_test.go +++ b/gopls/internal/test/integration/misc/codeactions_test.go @@ -35,25 +35,28 @@ package a func f() { g() } func g() {} + +-- issue72742/a.go -- +package main + +func main(){ + fmt.Println("helloworld") +} ` Run(t, src, func(t *testing.T, env *Env) { - check := func(filename string, wantKind ...protocol.CodeActionKind) { + check := func(filename string, re string, want []protocol.CodeActionKind) { env.OpenFile(filename) - loc := env.RegexpSearch(filename, `g\(\)`) + loc := env.RegexpSearch(filename, re) actions, err := env.Editor.CodeAction(env.Ctx, loc, nil, protocol.CodeActionUnknownTrigger) if err != nil { t.Fatal(err) } - type kinds = map[protocol.CodeActionKind]bool - got := make(kinds) + type kinds = []protocol.CodeActionKind + got := make(kinds, 0) for _, act := range actions { - got[act.Kind] = true - } - want := make(kinds) - for _, kind := range wantKind { - want[kind] = true + got = append(got, act.Kind) } if diff := cmp.Diff(want, got); diff != "" { @@ -63,20 +66,33 @@ func g() {} } } - check("src/a.go", + check("src/a.go", `g\(\)`, []protocol.CodeActionKind{ settings.AddTest, settings.GoAssembly, settings.GoDoc, settings.GoFreeSymbols, settings.GoToggleCompilerOptDetails, + settings.RefactorInlineCall, settings.GoplsDocFeatures, - settings.RefactorInlineCall) - check("gen/a.go", + }) + + check("gen/a.go", `g\(\)`, []protocol.CodeActionKind{ settings.GoAssembly, settings.GoDoc, settings.GoFreeSymbols, settings.GoToggleCompilerOptDetails, - settings.GoplsDocFeatures) + settings.GoplsDocFeatures, + }) + + check("issue72742/a.go", `fmt`, []protocol.CodeActionKind{ + settings.OrganizeImports, + settings.AddTest, + settings.GoAssembly, + settings.GoDoc, + settings.GoFreeSymbols, + settings.GoToggleCompilerOptDetails, + settings.GoplsDocFeatures, + }) }) } From b97074b9c8ebe7cce7db7be133120bc966f9c33f Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Fri, 4 Apr 2025 12:11:52 -0400 Subject: [PATCH 177/270] internal/gofix: fix URLs Change-Id: Id09d42501c242ca702f251e6c06ca5047f7b4ac7 Reviewed-on: https://go-review.googlesource.com/c/tools/+/662956 LUCI-TryBot-Result: Go LUCI Auto-Submit: Jonathan Amsterdam Reviewed-by: Alan Donovan --- gopls/doc/analyzers.md | 2 +- gopls/internal/doc/api.json | 2 +- internal/gofix/gofix.go | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/gopls/doc/analyzers.md b/gopls/doc/analyzers.md index 0d9fcb2313b..4b2bff1a63a 100644 --- a/gopls/doc/analyzers.md +++ b/gopls/doc/analyzers.md @@ -298,7 +298,7 @@ The gofix analyzer inlines functions and constants that are marked for inlining. Default: on. -Package documentation: [gofix](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/gofix) +Package documentation: [gofix](https://pkg.go.dev/golang.org/x/tools/internal/gofix) ## `hostport`: check format of addresses passed to net.Dial diff --git a/gopls/internal/doc/api.json b/gopls/internal/doc/api.json index f624af8632c..0852870ba41 100644 --- a/gopls/internal/doc/api.json +++ b/gopls/internal/doc/api.json @@ -1297,7 +1297,7 @@ { "Name": "gofix", "Doc": "apply fixes based on go:fix comment directives\n\nThe gofix analyzer inlines functions and constants that are marked for inlining.", - "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/gofix", + "URL": "https://pkg.go.dev/golang.org/x/tools/internal/gofix", "Default": true }, { diff --git a/internal/gofix/gofix.go b/internal/gofix/gofix.go index b2fc5318e09..565272b5e46 100644 --- a/internal/gofix/gofix.go +++ b/internal/gofix/gofix.go @@ -34,7 +34,7 @@ var doc string var Analyzer = &analysis.Analyzer{ Name: "gofix", Doc: analysisinternal.MustExtractDoc(doc, "gofix"), - URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/gofix", + URL: "https://pkg.go.dev/golang.org/x/tools/internal/gofix", Run: func(pass *analysis.Pass) (any, error) { return run(pass, true) }, FactTypes: []analysis.Fact{ (*goFixInlineFuncFact)(nil), @@ -47,7 +47,7 @@ var Analyzer = &analysis.Analyzer{ var DirectiveAnalyzer = &analysis.Analyzer{ Name: "gofixdirective", Doc: analysisinternal.MustExtractDoc(doc, "gofixdirective"), - URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/gofix", + URL: "https://pkg.go.dev/golang.org/x/tools/internal/gofix", Run: func(pass *analysis.Pass) (any, error) { return run(pass, false) }, FactTypes: []analysis.Fact{ (*goFixInlineFuncFact)(nil), From 3e7f74d009150bf5e66483f3759d8c59f50e873d Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Fri, 4 Apr 2025 14:32:05 -0400 Subject: [PATCH 178/270] go/types/typeutil: used doesn't need Info.Selections Whenever info.Selections[e] is set, info.Uses[e.Sel] is set to the same object. So the used function doesn't need info.Selections. Change-Id: I43782ef728179e61084b0d7cdab4f01a8eea9f72 Reviewed-on: https://go-review.googlesource.com/c/tools/+/662957 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan Auto-Submit: Jonathan Amsterdam --- go/types/typeutil/callee.go | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/go/types/typeutil/callee.go b/go/types/typeutil/callee.go index 2e3ccaa3dc3..eeeb570a73c 100644 --- a/go/types/typeutil/callee.go +++ b/go/types/typeutil/callee.go @@ -50,8 +50,8 @@ func StaticCallee(info *types.Info, call *ast.CallExpr) *types.Func { // //go:linkname used func used(info *types.Info, e ast.Expr) types.Object { - if info.Types == nil || info.Uses == nil || info.Selections == nil { - panic("one of info.Types, info.Uses or info.Selections is nil; all must be populated") + if info.Types == nil || info.Uses == nil { + panic("one of info.Types or info.Uses is nil; both must be populated") } // Look through type instantiation if necessary. switch d := ast.Unparen(e).(type) { @@ -65,14 +65,13 @@ func used(info *types.Info, e ast.Expr) types.Object { var obj types.Object switch e := ast.Unparen(e).(type) { + // info.Uses always has the object we want, even for selector expressions. + // We don't need info.Selections. + // See go/types/recording.go:recordSelection. case *ast.Ident: obj = info.Uses[e] // type, var, builtin, or declared func case *ast.SelectorExpr: - if sel, ok := info.Selections[e]; ok { - obj = sel.Obj() // method or field - } else { - obj = info.Uses[e.Sel] // qualified identifier? - } + obj = info.Uses[e.Sel] // selector e.f or T.f or qualified identifier pkg.X } return obj } From 11a9b3f89dc9e5a2e6d738d243258495a9c53005 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Sat, 5 Apr 2025 11:31:59 -0400 Subject: [PATCH 179/270] gopls/internal/server: fix event labels after the big rename The old names predate the big directory renaming we did about a year ago. For the record: why can't these event.Start calls be emitted by the generated server handler code? Two reasons: 1) they sometimes have arguments specific to the call; and 2) they cover only the handler proper, but not the time to send the RPC reply message. Had they been automatically generated in the caller, some care would be required to exclude the RPC reply time. Change-Id: I290537579200aab162ac06522a482b4f4e4f7a28 Reviewed-on: https://go-review.googlesource.com/c/tools/+/663135 LUCI-TryBot-Result: Go LUCI Reviewed-by: Jonathan Amsterdam Auto-Submit: Alan Donovan --- gopls/internal/server/call_hierarchy.go | 6 +++--- gopls/internal/server/code_action.go | 4 ++-- gopls/internal/server/code_lens.go | 2 +- gopls/internal/server/command.go | 4 ++-- gopls/internal/server/completion.go | 2 +- gopls/internal/server/definition.go | 4 ++-- gopls/internal/server/diagnostics.go | 8 ++++---- gopls/internal/server/folding_range.go | 2 +- gopls/internal/server/format.go | 2 +- gopls/internal/server/general.go | 8 ++++---- gopls/internal/server/highlight.go | 2 +- gopls/internal/server/hover.go | 2 +- gopls/internal/server/implementation.go | 2 +- gopls/internal/server/inlay_hint.go | 2 +- gopls/internal/server/link.go | 2 +- gopls/internal/server/references.go | 2 +- gopls/internal/server/rename.go | 4 ++-- gopls/internal/server/selection_range.go | 2 +- gopls/internal/server/semantic.go | 2 +- gopls/internal/server/server.go | 2 +- gopls/internal/server/signature_help.go | 2 +- gopls/internal/server/symbols.go | 2 +- gopls/internal/server/text_synchronization.go | 10 +++++----- gopls/internal/server/workspace.go | 4 ++-- gopls/internal/server/workspace_symbol.go | 2 +- 25 files changed, 42 insertions(+), 42 deletions(-) diff --git a/gopls/internal/server/call_hierarchy.go b/gopls/internal/server/call_hierarchy.go index 671d4f8c81c..758a4628948 100644 --- a/gopls/internal/server/call_hierarchy.go +++ b/gopls/internal/server/call_hierarchy.go @@ -14,7 +14,7 @@ import ( ) func (s *server) PrepareCallHierarchy(ctx context.Context, params *protocol.CallHierarchyPrepareParams) ([]protocol.CallHierarchyItem, error) { - ctx, done := event.Start(ctx, "lsp.Server.prepareCallHierarchy") + ctx, done := event.Start(ctx, "server.PrepareCallHierarchy") defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) @@ -29,7 +29,7 @@ func (s *server) PrepareCallHierarchy(ctx context.Context, params *protocol.Call } func (s *server) IncomingCalls(ctx context.Context, params *protocol.CallHierarchyIncomingCallsParams) ([]protocol.CallHierarchyIncomingCall, error) { - ctx, done := event.Start(ctx, "lsp.Server.incomingCalls") + ctx, done := event.Start(ctx, "server.IncomingCalls") defer done() fh, snapshot, release, err := s.fileOf(ctx, params.Item.URI) @@ -44,7 +44,7 @@ func (s *server) IncomingCalls(ctx context.Context, params *protocol.CallHierarc } func (s *server) OutgoingCalls(ctx context.Context, params *protocol.CallHierarchyOutgoingCallsParams) ([]protocol.CallHierarchyOutgoingCall, error) { - ctx, done := event.Start(ctx, "lsp.Server.outgoingCalls") + ctx, done := event.Start(ctx, "server.OutgoingCalls") defer done() fh, snapshot, release, err := s.fileOf(ctx, params.Item.URI) diff --git a/gopls/internal/server/code_action.go b/gopls/internal/server/code_action.go index c36e7c33f94..4617fad5de7 100644 --- a/gopls/internal/server/code_action.go +++ b/gopls/internal/server/code_action.go @@ -22,7 +22,7 @@ import ( ) func (s *server) CodeAction(ctx context.Context, params *protocol.CodeActionParams) ([]protocol.CodeAction, error) { - ctx, done := event.Start(ctx, "lsp.Server.codeAction") + ctx, done := event.Start(ctx, "server.CodeAction") defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) @@ -225,7 +225,7 @@ func triggerKind(params *protocol.CodeActionParams) protocol.CodeActionTriggerKi // This feature allows capable clients to preview and selectively apply the diff // instead of applying the whole thing unconditionally through workspace/applyEdit. func (s *server) ResolveCodeAction(ctx context.Context, ca *protocol.CodeAction) (*protocol.CodeAction, error) { - ctx, done := event.Start(ctx, "lsp.Server.resolveCodeAction") + ctx, done := event.Start(ctx, "server.ResolveCodeAction") defer done() // Only resolve the code action if there is Data provided. diff --git a/gopls/internal/server/code_lens.go b/gopls/internal/server/code_lens.go index 67b359e866c..2509452f0b5 100644 --- a/gopls/internal/server/code_lens.go +++ b/gopls/internal/server/code_lens.go @@ -22,7 +22,7 @@ import ( // CodeLens reports the set of available CodeLenses // (range-associated commands) in the given file. func (s *server) CodeLens(ctx context.Context, params *protocol.CodeLensParams) ([]protocol.CodeLens, error) { - ctx, done := event.Start(ctx, "lsp.Server.codeLens", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.CodeLens", label.URI.Of(params.TextDocument.URI)) defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) diff --git a/gopls/internal/server/command.go b/gopls/internal/server/command.go index 0142de532c3..ca8177530e5 100644 --- a/gopls/internal/server/command.go +++ b/gopls/internal/server/command.go @@ -47,7 +47,7 @@ import ( ) func (s *server) ExecuteCommand(ctx context.Context, params *protocol.ExecuteCommandParams) (any, error) { - ctx, done := event.Start(ctx, "lsp.Server.executeCommand") + ctx, done := event.Start(ctx, "server.ExecuteCommand") defer done() // For test synchronization, always create a progress notification. @@ -1652,7 +1652,7 @@ func (c *commandHandler) DiagnoseFiles(ctx context.Context, args command.Diagnos // Though note that implementing pull diagnostics may cause some servers to // request diagnostics in an ad-hoc manner, and break our intentional pacing. - ctx, done := event.Start(ctx, "lsp.server.DiagnoseFiles") + ctx, done := event.Start(ctx, "commandHandler.DiagnoseFiles") defer done() snapshots := make(map[*cache.Snapshot]bool) diff --git a/gopls/internal/server/completion.go b/gopls/internal/server/completion.go index e72d156de05..02604b2f710 100644 --- a/gopls/internal/server/completion.go +++ b/gopls/internal/server/completion.go @@ -27,7 +27,7 @@ func (s *server) Completion(ctx context.Context, params *protocol.CompletionPara recordLatency(ctx, rerr) }() - ctx, done := event.Start(ctx, "lsp.Server.completion", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.Completion", label.URI.Of(params.TextDocument.URI)) defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) diff --git a/gopls/internal/server/definition.go b/gopls/internal/server/definition.go index 5a9c020cfc5..8b9d42413be 100644 --- a/gopls/internal/server/definition.go +++ b/gopls/internal/server/definition.go @@ -24,7 +24,7 @@ func (s *server) Definition(ctx context.Context, params *protocol.DefinitionPara recordLatency(ctx, rerr) }() - ctx, done := event.Start(ctx, "lsp.Server.definition", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.Definition", label.URI.Of(params.TextDocument.URI)) defer done() // TODO(rfindley): definition requests should be multiplexed across all views. @@ -46,7 +46,7 @@ func (s *server) Definition(ctx context.Context, params *protocol.DefinitionPara } func (s *server) TypeDefinition(ctx context.Context, params *protocol.TypeDefinitionParams) ([]protocol.Location, error) { - ctx, done := event.Start(ctx, "lsp.Server.typeDefinition", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.TypeDefinition", label.URI.Of(params.TextDocument.URI)) defer done() // TODO(rfindley): type definition requests should be multiplexed across all views. diff --git a/gopls/internal/server/diagnostics.go b/gopls/internal/server/diagnostics.go index b4e764b1233..92ca54e226a 100644 --- a/gopls/internal/server/diagnostics.go +++ b/gopls/internal/server/diagnostics.go @@ -200,7 +200,7 @@ func (s *server) diagnoseChangedViews(ctx context.Context, modID uint64, lastCha // snapshot (or a subsequent snapshot in the same View) is eventually // diagnosed. func (s *server) diagnoseSnapshot(ctx context.Context, snapshot *cache.Snapshot, changedURIs []protocol.DocumentURI, delay time.Duration) { - ctx, done := event.Start(ctx, "Server.diagnoseSnapshot", snapshot.Labels()...) + ctx, done := event.Start(ctx, "server.diagnoseSnapshot", snapshot.Labels()...) defer done() if delay > 0 { @@ -241,7 +241,7 @@ func (s *server) diagnoseSnapshot(ctx context.Context, snapshot *cache.Snapshot, } func (s *server) diagnoseChangedFiles(ctx context.Context, snapshot *cache.Snapshot, uris []protocol.DocumentURI) (diagMap, error) { - ctx, done := event.Start(ctx, "Server.diagnoseChangedFiles", snapshot.Labels()...) + ctx, done := event.Start(ctx, "server.diagnoseChangedFiles", snapshot.Labels()...) defer done() toDiagnose := make(map[metadata.PackageID]*metadata.Package) @@ -311,7 +311,7 @@ func (s *server) diagnoseChangedFiles(ctx context.Context, snapshot *cache.Snaps } func (s *server) diagnose(ctx context.Context, snapshot *cache.Snapshot) (diagMap, error) { - ctx, done := event.Start(ctx, "Server.diagnose", snapshot.Labels()...) + ctx, done := event.Start(ctx, "server.diagnose", snapshot.Labels()...) defer done() // Wait for a free diagnostics slot. @@ -640,7 +640,7 @@ func (s *server) updateCriticalErrorStatus(ctx context.Context, snapshot *cache. // updateDiagnostics records the result of diagnosing a snapshot, and publishes // any diagnostics that need to be updated on the client. func (s *server) updateDiagnostics(ctx context.Context, snapshot *cache.Snapshot, diagnostics diagMap, final bool) { - ctx, done := event.Start(ctx, "Server.publishDiagnostics") + ctx, done := event.Start(ctx, "server.publishDiagnostics") defer done() s.diagnosticsMu.Lock() diff --git a/gopls/internal/server/folding_range.go b/gopls/internal/server/folding_range.go index b05d5302f10..5dbfd697db4 100644 --- a/gopls/internal/server/folding_range.go +++ b/gopls/internal/server/folding_range.go @@ -15,7 +15,7 @@ import ( ) func (s *server) FoldingRange(ctx context.Context, params *protocol.FoldingRangeParams) ([]protocol.FoldingRange, error) { - ctx, done := event.Start(ctx, "lsp.Server.foldingRange", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.FoldingRange", label.URI.Of(params.TextDocument.URI)) defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) diff --git a/gopls/internal/server/format.go b/gopls/internal/server/format.go index 1e6344dcff4..6abbb96d5b6 100644 --- a/gopls/internal/server/format.go +++ b/gopls/internal/server/format.go @@ -17,7 +17,7 @@ import ( ) func (s *server) Formatting(ctx context.Context, params *protocol.DocumentFormattingParams) ([]protocol.TextEdit, error) { - ctx, done := event.Start(ctx, "lsp.Server.formatting", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.Formatting", label.URI.Of(params.TextDocument.URI)) defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) diff --git a/gopls/internal/server/general.go b/gopls/internal/server/general.go index 7368206f578..5e02b832747 100644 --- a/gopls/internal/server/general.go +++ b/gopls/internal/server/general.go @@ -38,7 +38,7 @@ import ( ) func (s *server) Initialize(ctx context.Context, params *protocol.ParamInitialize) (*protocol.InitializeResult, error) { - ctx, done := event.Start(ctx, "lsp.Server.initialize") + ctx, done := event.Start(ctx, "server.Initialize") defer done() var clientName string @@ -208,7 +208,7 @@ func (s *server) Initialize(ctx context.Context, params *protocol.ParamInitializ } func (s *server) Initialized(ctx context.Context, params *protocol.InitializedParams) error { - ctx, done := event.Start(ctx, "lsp.Server.initialized") + ctx, done := event.Start(ctx, "server.Initialized") defer done() s.stateMu.Lock() @@ -635,7 +635,7 @@ func (s *server) fileOf(ctx context.Context, uri protocol.DocumentURI) (file.Han // Shutdown implements the 'shutdown' LSP handler. It releases resources // associated with the server and waits for all ongoing work to complete. func (s *server) Shutdown(ctx context.Context) error { - ctx, done := event.Start(ctx, "lsp.Server.shutdown") + ctx, done := event.Start(ctx, "server.Shutdown") defer done() s.stateMu.Lock() @@ -662,7 +662,7 @@ func (s *server) Shutdown(ctx context.Context) error { } func (s *server) Exit(ctx context.Context) error { - ctx, done := event.Start(ctx, "lsp.Server.exit") + ctx, done := event.Start(ctx, "server.Exit") defer done() s.stateMu.Lock() diff --git a/gopls/internal/server/highlight.go b/gopls/internal/server/highlight.go index 35ffc2db2f5..04ebbfa25ec 100644 --- a/gopls/internal/server/highlight.go +++ b/gopls/internal/server/highlight.go @@ -16,7 +16,7 @@ import ( ) func (s *server) DocumentHighlight(ctx context.Context, params *protocol.DocumentHighlightParams) ([]protocol.DocumentHighlight, error) { - ctx, done := event.Start(ctx, "lsp.Server.documentHighlight", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.DocumentHighlight", label.URI.Of(params.TextDocument.URI)) defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) diff --git a/gopls/internal/server/hover.go b/gopls/internal/server/hover.go index 80c35c09565..ed70ce493ba 100644 --- a/gopls/internal/server/hover.go +++ b/gopls/internal/server/hover.go @@ -25,7 +25,7 @@ func (s *server) Hover(ctx context.Context, params *protocol.HoverParams) (_ *pr recordLatency(ctx, rerr) }() - ctx, done := event.Start(ctx, "lsp.Server.hover", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.Hover", label.URI.Of(params.TextDocument.URI)) defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) diff --git a/gopls/internal/server/implementation.go b/gopls/internal/server/implementation.go index 9e61ebc4d88..9b2c103b2c3 100644 --- a/gopls/internal/server/implementation.go +++ b/gopls/internal/server/implementation.go @@ -21,7 +21,7 @@ func (s *server) Implementation(ctx context.Context, params *protocol.Implementa recordLatency(ctx, rerr) }() - ctx, done := event.Start(ctx, "lsp.Server.implementation", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.Implementation", label.URI.Of(params.TextDocument.URI)) defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) diff --git a/gopls/internal/server/inlay_hint.go b/gopls/internal/server/inlay_hint.go index fca8bcbc1c8..a11ab4c313a 100644 --- a/gopls/internal/server/inlay_hint.go +++ b/gopls/internal/server/inlay_hint.go @@ -16,7 +16,7 @@ import ( ) func (s *server) InlayHint(ctx context.Context, params *protocol.InlayHintParams) ([]protocol.InlayHint, error) { - ctx, done := event.Start(ctx, "lsp.Server.inlayHint", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.InlayHint", label.URI.Of(params.TextDocument.URI)) defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) diff --git a/gopls/internal/server/link.go b/gopls/internal/server/link.go index 851ec036d4d..cf475ca90c9 100644 --- a/gopls/internal/server/link.go +++ b/gopls/internal/server/link.go @@ -29,7 +29,7 @@ import ( ) func (s *server) DocumentLink(ctx context.Context, params *protocol.DocumentLinkParams) (links []protocol.DocumentLink, err error) { - ctx, done := event.Start(ctx, "lsp.Server.documentLink") + ctx, done := event.Start(ctx, "server.DocumentLink") defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) diff --git a/gopls/internal/server/references.go b/gopls/internal/server/references.go index f5019693946..8a01e96498b 100644 --- a/gopls/internal/server/references.go +++ b/gopls/internal/server/references.go @@ -22,7 +22,7 @@ func (s *server) References(ctx context.Context, params *protocol.ReferenceParam recordLatency(ctx, rerr) }() - ctx, done := event.Start(ctx, "lsp.Server.references", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.References", label.URI.Of(params.TextDocument.URI)) defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) diff --git a/gopls/internal/server/rename.go b/gopls/internal/server/rename.go index b6fac8ba219..218740bd679 100644 --- a/gopls/internal/server/rename.go +++ b/gopls/internal/server/rename.go @@ -17,7 +17,7 @@ import ( ) func (s *server) Rename(ctx context.Context, params *protocol.RenameParams) (*protocol.WorkspaceEdit, error) { - ctx, done := event.Start(ctx, "lsp.Server.rename", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.Rename", label.URI.Of(params.TextDocument.URI)) defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) @@ -68,7 +68,7 @@ func (s *server) Rename(ctx context.Context, params *protocol.RenameParams) (*pr // TODO(rfindley): why wouldn't we want to show an error to the user, if the // user initiated a rename request at the cursor? func (s *server) PrepareRename(ctx context.Context, params *protocol.PrepareRenameParams) (*protocol.PrepareRenamePlaceholder, error) { - ctx, done := event.Start(ctx, "lsp.Server.prepareRename", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.PrepareRename", label.URI.Of(params.TextDocument.URI)) defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) diff --git a/gopls/internal/server/selection_range.go b/gopls/internal/server/selection_range.go index 484e1cf67ab..afc878b1544 100644 --- a/gopls/internal/server/selection_range.go +++ b/gopls/internal/server/selection_range.go @@ -27,7 +27,7 @@ import ( // returned for each cursor to avoid multiple round-trips when the user is // likely to issue this command multiple times in quick succession. func (s *server) SelectionRange(ctx context.Context, params *protocol.SelectionRangeParams) ([]protocol.SelectionRange, error) { - ctx, done := event.Start(ctx, "lsp.Server.selectionRange") + ctx, done := event.Start(ctx, "server.SelectionRange") defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) diff --git a/gopls/internal/server/semantic.go b/gopls/internal/server/semantic.go index f746593a3dd..f0a2e11dd98 100644 --- a/gopls/internal/server/semantic.go +++ b/gopls/internal/server/semantic.go @@ -24,7 +24,7 @@ func (s *server) SemanticTokensRange(ctx context.Context, params *protocol.Seman } func (s *server) semanticTokens(ctx context.Context, td protocol.TextDocumentIdentifier, rng *protocol.Range) (*protocol.SemanticTokens, error) { - ctx, done := event.Start(ctx, "lsp.Server.semanticTokens", label.URI.Of(td.URI)) + ctx, done := event.Start(ctx, "server.semanticTokens", label.URI.Of(td.URI)) defer done() fh, snapshot, release, err := s.fileOf(ctx, td.URI) diff --git a/gopls/internal/server/server.go b/gopls/internal/server/server.go index 033295ffb32..c22e8f19750 100644 --- a/gopls/internal/server/server.go +++ b/gopls/internal/server/server.go @@ -181,7 +181,7 @@ type server struct { } func (s *server) WorkDoneProgressCancel(ctx context.Context, params *protocol.WorkDoneProgressCancelParams) error { - ctx, done := event.Start(ctx, "lsp.Server.workDoneProgressCancel") + ctx, done := event.Start(ctx, "server.WorkDoneProgressCancel") defer done() return s.progress.Cancel(params.Token) diff --git a/gopls/internal/server/signature_help.go b/gopls/internal/server/signature_help.go index addcfe1e262..eb464c48e27 100644 --- a/gopls/internal/server/signature_help.go +++ b/gopls/internal/server/signature_help.go @@ -15,7 +15,7 @@ import ( ) func (s *server) SignatureHelp(ctx context.Context, params *protocol.SignatureHelpParams) (*protocol.SignatureHelp, error) { - ctx, done := event.Start(ctx, "lsp.Server.signatureHelp", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.SignatureHelp", label.URI.Of(params.TextDocument.URI)) defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) diff --git a/gopls/internal/server/symbols.go b/gopls/internal/server/symbols.go index e35b2c75451..40df7369f51 100644 --- a/gopls/internal/server/symbols.go +++ b/gopls/internal/server/symbols.go @@ -16,7 +16,7 @@ import ( ) func (s *server) DocumentSymbol(ctx context.Context, params *protocol.DocumentSymbolParams) ([]any, error) { - ctx, done := event.Start(ctx, "lsp.Server.documentSymbol", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.DocumentSymbol", label.URI.Of(params.TextDocument.URI)) defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) diff --git a/gopls/internal/server/text_synchronization.go b/gopls/internal/server/text_synchronization.go index ad1266d783e..ad8554d9302 100644 --- a/gopls/internal/server/text_synchronization.go +++ b/gopls/internal/server/text_synchronization.go @@ -92,7 +92,7 @@ func (m ModificationSource) String() string { } func (s *server) DidOpen(ctx context.Context, params *protocol.DidOpenTextDocumentParams) error { - ctx, done := event.Start(ctx, "lsp.Server.didOpen", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.DidOpen", label.URI.Of(params.TextDocument.URI)) defer done() uri := params.TextDocument.URI @@ -121,7 +121,7 @@ func (s *server) DidOpen(ctx context.Context, params *protocol.DidOpenTextDocume } func (s *server) DidChange(ctx context.Context, params *protocol.DidChangeTextDocumentParams) error { - ctx, done := event.Start(ctx, "lsp.Server.didChange", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.DidChange", label.URI.Of(params.TextDocument.URI)) defer done() uri := params.TextDocument.URI @@ -174,7 +174,7 @@ func (s *server) warnAboutModifyingGeneratedFiles(ctx context.Context, uri proto } func (s *server) DidChangeWatchedFiles(ctx context.Context, params *protocol.DidChangeWatchedFilesParams) error { - ctx, done := event.Start(ctx, "lsp.Server.didChangeWatchedFiles") + ctx, done := event.Start(ctx, "server.DidChangeWatchedFiles") defer done() var modifications []file.Modification @@ -190,7 +190,7 @@ func (s *server) DidChangeWatchedFiles(ctx context.Context, params *protocol.Did } func (s *server) DidSave(ctx context.Context, params *protocol.DidSaveTextDocumentParams) error { - ctx, done := event.Start(ctx, "lsp.Server.didSave", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.DidSave", label.URI.Of(params.TextDocument.URI)) defer done() c := file.Modification{ @@ -204,7 +204,7 @@ func (s *server) DidSave(ctx context.Context, params *protocol.DidSaveTextDocume } func (s *server) DidClose(ctx context.Context, params *protocol.DidCloseTextDocumentParams) error { - ctx, done := event.Start(ctx, "lsp.Server.didClose", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.DidClose", label.URI.Of(params.TextDocument.URI)) defer done() return s.didModifyFiles(ctx, []file.Modification{ diff --git a/gopls/internal/server/workspace.go b/gopls/internal/server/workspace.go index 8074ecca444..ced5656c6ac 100644 --- a/gopls/internal/server/workspace.go +++ b/gopls/internal/server/workspace.go @@ -61,7 +61,7 @@ func (s *server) addView(ctx context.Context, name string, dir protocol.Document } func (s *server) DidChangeConfiguration(ctx context.Context, _ *protocol.DidChangeConfigurationParams) error { - ctx, done := event.Start(ctx, "lsp.Server.didChangeConfiguration") + ctx, done := event.Start(ctx, "server.DidChangeConfiguration") defer done() var wg sync.WaitGroup @@ -143,7 +143,7 @@ func (s *server) DidChangeConfiguration(ctx context.Context, _ *protocol.DidChan } func (s *server) DidCreateFiles(ctx context.Context, params *protocol.CreateFilesParams) error { - ctx, done := event.Start(ctx, "lsp.Server.didCreateFiles") + ctx, done := event.Start(ctx, "server.DidCreateFiles") defer done() var allChanges []protocol.DocumentChange diff --git a/gopls/internal/server/workspace_symbol.go b/gopls/internal/server/workspace_symbol.go index 9eafeb015ad..f34e76f7937 100644 --- a/gopls/internal/server/workspace_symbol.go +++ b/gopls/internal/server/workspace_symbol.go @@ -20,7 +20,7 @@ func (s *server) Symbol(ctx context.Context, params *protocol.WorkspaceSymbolPar recordLatency(ctx, rerr) }() - ctx, done := event.Start(ctx, "lsp.Server.symbol") + ctx, done := event.Start(ctx, "server.Symbol") defer done() views := s.session.Views() From e73cd5af773602fdbc6cab94bdc0dc38abf828ab Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Sat, 15 Mar 2025 09:03:36 -0400 Subject: [PATCH 180/270] gopls/internal/golang: implement dynamicFuncCallType with typeutil.ClassifyCall Also fix some typos I came across. Change-Id: Ib3e73852c8260bf0a537ffb7c23ec1815c9546e1 Reviewed-on: https://go-review.googlesource.com/c/tools/+/658236 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- gopls/internal/golang/implementation.go | 29 +++------- .../testdata/implementation/issue67041.txt | 4 +- internal/typesinternal/classify_call.go | 41 +++++--------- internal/typesinternal/classify_call_test.go | 54 ++++++++----------- 4 files changed, 45 insertions(+), 83 deletions(-) diff --git a/gopls/internal/golang/implementation.go b/gopls/internal/golang/implementation.go index b9a332ac62a..a5ab5d19a13 100644 --- a/gopls/internal/golang/implementation.go +++ b/gopls/internal/golang/implementation.go @@ -32,6 +32,7 @@ import ( "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/astutil/edge" "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/typesinternal" ) // This file defines the new implementation of the 'implementation' @@ -937,6 +938,9 @@ func implFuncs(pkg *cache.Package, pgf *parsego.File, pos token.Pos) ([]protocol } info := pkg.TypesInfo() + if info.Types == nil || info.Defs == nil || info.Uses == nil { + panic("one of info.Types, .Defs or .Uses is nil") + } // Find innermost enclosing FuncType or CallExpr. // @@ -1088,29 +1092,10 @@ func beneathFuncDef(cur cursor.Cursor) bool { // // Tested via ../test/marker/testdata/implementation/signature.txt. func dynamicFuncCallType(info *types.Info, call *ast.CallExpr) types.Type { - fun := ast.Unparen(call.Fun) - tv := info.Types[fun] - - // Reject conversion, or call to built-in. - if !tv.IsValue() { - return nil - } - - // Reject call to named func/method. - if id, ok := fun.(*ast.Ident); ok && is[*types.Func](info.Uses[id]) { - return nil + if typesinternal.ClassifyCall(info, call) == typesinternal.CallDynamic { + return info.Types[call.Fun].Type.Underlying() } - - // Reject method selections (T.method() or x.method()) - if sel, ok := fun.(*ast.SelectorExpr); ok { - seln, ok := info.Selections[sel] - if !ok || seln.Kind() != types.FieldVal { - return nil - } - } - - // TODO(adonovan): consider x() where x : TypeParam. - return tv.Type.Underlying() // e.g. x() or x.field() + return nil } // inToken reports whether pos is within the token of diff --git a/gopls/internal/test/marker/testdata/implementation/issue67041.txt b/gopls/internal/test/marker/testdata/implementation/issue67041.txt index 3b058534cd3..78965200b20 100644 --- a/gopls/internal/test/marker/testdata/implementation/issue67041.txt +++ b/gopls/internal/test/marker/testdata/implementation/issue67041.txt @@ -1,5 +1,5 @@ -This test verifies that implementations uses the correct object when querying -local implementations . As described in golang/go#67041), a bug led to it +This test verifies that Implementations uses the correct object when querying +local implementations. As described in golang/go#67041, a bug led to it comparing types from different realms. -- go.mod -- diff --git a/internal/typesinternal/classify_call.go b/internal/typesinternal/classify_call.go index 9d4da859370..35e0f80248f 100644 --- a/internal/typesinternal/classify_call.go +++ b/internal/typesinternal/classify_call.go @@ -42,19 +42,6 @@ func (k CallKind) String() string { // and further classifies function calls as static calls (where the function is known), // dynamic interface calls, and other dynamic calls. // -// For static, interface and builtin calls, ClassifyCall returns the [types.Object] -// for the name of the caller. For calls of instantiated functions and -// methods, it returns the object for the corresponding generic function -// or method on the generic type. -// The relationships between the return values are: -// -// CallKind object -// CallStatic *types.Func -// CallInterface *types.Func -// CallBuiltin *types.Builtin -// CallDynamic nil -// CallConversion nil -// // For the declarations: // // func f() {} @@ -66,33 +53,33 @@ func (k CallKind) String() string { // // ClassifyCall returns the following: // -// f() CallStatic the *types.Func for f -// g[int]() CallStatic the *types.Func for g[T] -// i.M() CallInterface the *types.Func for i.M -// min(1, 2) CallBuiltin the *types.Builtin for min -// v() CallDynamic nil -// s[0]() CallDynamic nil -// int(x) CallConversion nil -// []byte("") CallConversion nil -func ClassifyCall(info *types.Info, call *ast.CallExpr) (CallKind, types.Object) { +// f() CallStatic +// g[int]() CallStatic +// i.M() CallInterface +// min(1, 2) CallBuiltin +// v() CallDynamic +// s[0]() CallDynamic +// int(x) CallConversion +// []byte("") CallConversion +func ClassifyCall(info *types.Info, call *ast.CallExpr) CallKind { if info.Types == nil { panic("ClassifyCall: info.Types is nil") } if info.Types[call.Fun].IsType() { - return CallConversion, nil + return CallConversion } obj := Used(info, call.Fun) // Classify the call by the type of the object, if any. switch obj := obj.(type) { case *types.Builtin: - return CallBuiltin, obj + return CallBuiltin case *types.Func: if interfaceMethod(obj) { - return CallInterface, obj + return CallInterface } - return CallStatic, obj + return CallStatic default: - return CallDynamic, nil + return CallDynamic } } diff --git a/internal/typesinternal/classify_call_test.go b/internal/typesinternal/classify_call_test.go index 6a30ee280df..42bdd193725 100644 --- a/internal/typesinternal/classify_call_test.go +++ b/internal/typesinternal/classify_call_test.go @@ -101,34 +101,30 @@ func TestClassifyCallAndUsed(t *testing.T) { typeParam := lookup("tests").Type().(*types.Signature).TypeParams().At(0).Obj() - // A unique value for marking that Used returns the same object as ClassifyCall. - same := &types.Label{} - // Expected Calls are in the order of CallExprs at the end of src, above. wants := []struct { - kind ti.CallKind - classifyObj types.Object // the object returned from ClassifyCall - usedObj types.Object // the object returned from Used, sometimes different + kind ti.CallKind + usedObj types.Object // the object returned from Used }{ - {ti.CallStatic, lookup("g"), same}, // g - {ti.CallDynamic, nil, lookup("f")}, // f - {ti.CallBuiltin, printlnObj, same}, // println - {ti.CallStatic, member("S", "g"), same}, // z.g - {ti.CallStatic, member("S", "g"), same}, // a.b.c.g - {ti.CallStatic, member("S", "g"), same}, // S.g(z, 1) - {ti.CallDynamic, nil, member("z", "f")}, // z.f - {ti.CallInterface, member("I", "m"), same}, // I(nil).m - {ti.CallConversion, nil, lookup("I")}, // I(nil) - {ti.CallDynamic, nil, same}, // m[0] - {ti.CallDynamic, nil, same}, // n[0] - {ti.CallStatic, lookup("F"), same}, // F[int] - {ti.CallStatic, lookup("F"), same}, // F[T] - {ti.CallDynamic, nil, same}, // f(){} - {ti.CallConversion, nil, same}, // []byte - {ti.CallConversion, nil, lookup("A")}, // A[int] - {ti.CallConversion, nil, typeParam}, // T - {ti.CallStatic, member("S", "g"), same}, // (z.g) - {ti.CallStatic, member("S", "g"), same}, // (z).g + {ti.CallStatic, lookup("g")}, // g + {ti.CallDynamic, lookup("f")}, // f + {ti.CallBuiltin, printlnObj}, // println + {ti.CallStatic, member("S", "g")}, // z.g + {ti.CallStatic, member("S", "g")}, // a.b.c.g + {ti.CallStatic, member("S", "g")}, // S.g(z, 1) + {ti.CallDynamic, member("z", "f")}, // z.f + {ti.CallInterface, member("I", "m")}, // I(nil).m + {ti.CallConversion, lookup("I")}, // I(nil) + {ti.CallDynamic, nil}, // m[0] + {ti.CallDynamic, nil}, // n[0] + {ti.CallStatic, lookup("F")}, // F[int] + {ti.CallStatic, lookup("F")}, // F[T] + {ti.CallDynamic, nil}, // f(){} + {ti.CallConversion, nil}, // []byte + {ti.CallConversion, lookup("A")}, // A[int] + {ti.CallConversion, typeParam}, // T + {ti.CallStatic, member("S", "g")}, // (z.g) + {ti.CallStatic, member("S", "g")}, // (z).g } i := 0 @@ -143,20 +139,14 @@ func TestClassifyCallAndUsed(t *testing.T) { } prefix := fmt.Sprintf("%s (#%d)", buf.String(), i) - gotKind, gotObj := ti.ClassifyCall(info, call) + gotKind := ti.ClassifyCall(info, call) want := wants[i] if gotKind != want.kind { t.Errorf("%s kind: got %s, want %s", prefix, gotKind, want.kind) } - if gotObj != want.classifyObj { - t.Errorf("%s obj: got %v (%[2]T), want %v", prefix, gotObj, want.classifyObj) - } w := want.usedObj - if w == same { - w = want.classifyObj - } if g := ti.Used(info, call.Fun); g != w { t.Errorf("%s used obj: got %v (%[2]T), want %v", prefix, g, w) } From 9a1fbbdb530258f2c0db9c411aecf399d1ec256b Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Fri, 4 Apr 2025 16:15:19 -0400 Subject: [PATCH 181/270] internal/typesinternal: change Used to UsedIdent Add UsedIdent, which returns the identifier underlying a used object. To get the object associate with e, one can now write info.Uses[UsedIdent(e)] This replaces Used, whose job can now be done by the above expression. As a demonstration, we can simplify the unusedparams analysis. Change-Id: Id6b08b548fa495d42de4a6767bba7717ad1b0d08 Reviewed-on: https://go-review.googlesource.com/c/tools/+/663035 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- go/analysis/passes/nilfunc/nilfunc.go | 2 +- go/types/typeutil/callee.go | 21 +++++------ .../analysis/unusedparams/unusedparams.go | 17 +-------- internal/typesinternal/classify_call.go | 37 ++++++++++--------- internal/typesinternal/classify_call_test.go | 4 +- 5 files changed, 34 insertions(+), 47 deletions(-) diff --git a/go/analysis/passes/nilfunc/nilfunc.go b/go/analysis/passes/nilfunc/nilfunc.go index 2b28c5a6b2c..fa1883b0c34 100644 --- a/go/analysis/passes/nilfunc/nilfunc.go +++ b/go/analysis/passes/nilfunc/nilfunc.go @@ -56,7 +56,7 @@ func run(pass *analysis.Pass) (any, error) { } // Only want functions. - obj := typesinternal.Used(pass.TypesInfo, e2) + obj := pass.TypesInfo.Uses[typesinternal.UsedIdent(pass.TypesInfo, e2)] if _, ok := obj.(*types.Func); !ok { return } diff --git a/go/types/typeutil/callee.go b/go/types/typeutil/callee.go index eeeb570a73c..53b71339305 100644 --- a/go/types/typeutil/callee.go +++ b/go/types/typeutil/callee.go @@ -18,7 +18,7 @@ import ( // Note: for calls of instantiated functions and methods, Callee returns // the corresponding generic function or method on the generic type. func Callee(info *types.Info, call *ast.CallExpr) types.Object { - obj := used(info, call.Fun) + obj := info.Uses[usedIdent(info, call.Fun)] if obj == nil { return nil } @@ -34,7 +34,7 @@ func Callee(info *types.Info, call *ast.CallExpr) types.Object { // Note: for calls of instantiated functions and methods, StaticCallee returns // the corresponding generic function or method on the generic type. func StaticCallee(info *types.Info, call *ast.CallExpr) *types.Func { - obj := used(info, call.Fun) + obj := info.Uses[usedIdent(info, call.Fun)] fn, _ := obj.(*types.Func) if fn == nil || interfaceMethod(fn) { return nil @@ -42,14 +42,14 @@ func StaticCallee(info *types.Info, call *ast.CallExpr) *types.Func { return fn } -// used is the implementation of [internal/typesinternal.Used]. -// It returns the object associated with e. -// See typesinternal.Used for a fuller description. +// usedIdent is the implementation of [internal/typesinternal.UsedIdent]. +// It returns the identifier associated with e. +// See typesinternal.UsedIdent for a fuller description. // This function should live in typesinternal, but cannot because it would // create an import cycle. // -//go:linkname used -func used(info *types.Info, e ast.Expr) types.Object { +//go:linkname usedIdent +func usedIdent(info *types.Info, e ast.Expr) *ast.Ident { if info.Types == nil || info.Uses == nil { panic("one of info.Types or info.Uses is nil; both must be populated") } @@ -63,17 +63,16 @@ func used(info *types.Info, e ast.Expr) types.Object { e = d.X } - var obj types.Object switch e := ast.Unparen(e).(type) { // info.Uses always has the object we want, even for selector expressions. // We don't need info.Selections. // See go/types/recording.go:recordSelection. case *ast.Ident: - obj = info.Uses[e] // type, var, builtin, or declared func + return e case *ast.SelectorExpr: - obj = info.Uses[e.Sel] // selector e.f or T.f or qualified identifier pkg.X + return e.Sel } - return obj + return nil } // interfaceMethod reports whether its argument is a method of an interface. diff --git a/gopls/internal/analysis/unusedparams/unusedparams.go b/gopls/internal/analysis/unusedparams/unusedparams.go index 559b65d2bc2..12076c5f273 100644 --- a/gopls/internal/analysis/unusedparams/unusedparams.go +++ b/gopls/internal/analysis/unusedparams/unusedparams.go @@ -80,24 +80,9 @@ func run(pass *analysis.Pass) (any, error) { inspect.Preorder(filter, func(n ast.Node) { switch n := n.(type) { case *ast.CallExpr: - // Strip off any generic instantiation. - fun := n.Fun - switch fun_ := fun.(type) { - case *ast.IndexExpr: - fun = fun_.X // f[T]() (funcs[i]() is rejected below) - case *ast.IndexListExpr: - fun = fun_.X // f[K, V]() - } - + id := typesinternal.UsedIdent(pass.TypesInfo, n.Fun) // Find object: // record non-exported function, method, or func-typed var. - var id *ast.Ident - switch fun := fun.(type) { - case *ast.Ident: - id = fun - case *ast.SelectorExpr: - id = fun.Sel - } if id != nil && !id.IsExported() { switch pass.TypesInfo.Uses[id].(type) { case *types.Func, *types.Var: diff --git a/internal/typesinternal/classify_call.go b/internal/typesinternal/classify_call.go index 35e0f80248f..649c82b6bea 100644 --- a/internal/typesinternal/classify_call.go +++ b/internal/typesinternal/classify_call.go @@ -68,7 +68,7 @@ func ClassifyCall(info *types.Info, call *ast.CallExpr) CallKind { if info.Types[call.Fun].IsType() { return CallConversion } - obj := Used(info, call.Fun) + obj := info.Uses[UsedIdent(info, call.Fun)] // Classify the call by the type of the object, if any. switch obj := obj.(type) { case *types.Builtin: @@ -83,7 +83,9 @@ func ClassifyCall(info *types.Info, call *ast.CallExpr) CallKind { } } -// Used returns the [types.Object] used by e, if any. +// UsedIdent returns the identifier such that info.Uses[UsedIdent(info, e)] +// is the [types.Object] used by e, if any. +// // If e is one of various forms of reference: // // f, c, v, T lexical reference @@ -92,7 +94,8 @@ func ClassifyCall(info *types.Info, call *ast.CallExpr) CallKind { // expr.f field or method value selector // T.f method expression selector // -// Used returns the object to which it refers. +// UsedIdent returns the identifier whose is associated value in [types.Info.Uses] +// is the object to which it refers. // // For the declarations: // @@ -105,28 +108,28 @@ func ClassifyCall(info *types.Info, call *ast.CallExpr) CallKind { // i I // ) // -// Used returns the following: +// UsedIdent returns the following: // -// Expr Used -// x the *types.Var for x -// s.f the *types.Var for f -// F[int] the *types.Func for F[T] (not F[int]) -// i.M the *types.Func for i.M -// I.M the *types.Func for I.M -// min the *types.Builtin for min -// int the *types.TypeName for int +// Expr UsedIdent +// x x +// s.f f +// F[int] F +// i.M M +// I.M M +// min min +// int int // 1 nil // a[0] nil // []byte nil // -// Note: if e is an instantiated function or method, Used returns +// Note: if e is an instantiated function or method, UsedIdent returns // the corresponding generic function or method on the generic type. -func Used(info *types.Info, e ast.Expr) types.Object { - return used(info, e) +func UsedIdent(info *types.Info, e ast.Expr) *ast.Ident { + return usedIdent(info, e) } -//go:linkname used golang.org/x/tools/go/types/typeutil.used -func used(info *types.Info, e ast.Expr) types.Object +//go:linkname usedIdent golang.org/x/tools/go/types/typeutil.usedIdent +func usedIdent(info *types.Info, e ast.Expr) *ast.Ident //go:linkname interfaceMethod golang.org/x/tools/go/types/typeutil.interfaceMethod func interfaceMethod(f *types.Func) bool diff --git a/internal/typesinternal/classify_call_test.go b/internal/typesinternal/classify_call_test.go index 42bdd193725..e875727d1a5 100644 --- a/internal/typesinternal/classify_call_test.go +++ b/internal/typesinternal/classify_call_test.go @@ -104,7 +104,7 @@ func TestClassifyCallAndUsed(t *testing.T) { // Expected Calls are in the order of CallExprs at the end of src, above. wants := []struct { kind ti.CallKind - usedObj types.Object // the object returned from Used + usedObj types.Object // the object obtained from the result of UsedIdent }{ {ti.CallStatic, lookup("g")}, // g {ti.CallDynamic, lookup("f")}, // f @@ -147,7 +147,7 @@ func TestClassifyCallAndUsed(t *testing.T) { } w := want.usedObj - if g := ti.Used(info, call.Fun); g != w { + if g := info.Uses[ti.UsedIdent(info, call.Fun)]; g != w { t.Errorf("%s used obj: got %v (%[2]T), want %v", prefix, g, w) } i++ From 5916e3cbd8b65f73c18253607fa0b696fc5b9da6 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Mon, 7 Apr 2025 15:51:09 -0400 Subject: [PATCH 182/270] internal/tokeninternal: AddExistingFiles: tweaks for proposal This CL clarifies AddExistingFiles in preparation for its proposal as an upstream change. The two helpers FileSetFor and CloneFileSet are demonstrably mere convenience functions. Updates golang/go#73205 Change-Id: I9b83dc46575417ac074d91f3d5bed942b522da6b Reviewed-on: https://go-review.googlesource.com/c/tools/+/663596 LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley Auto-Submit: Alan Donovan Reviewed-by: Jonathan Amsterdam --- internal/tokeninternal/tokeninternal.go | 37 +++++++++++++++++-------- 1 file changed, 25 insertions(+), 12 deletions(-) diff --git a/internal/tokeninternal/tokeninternal.go b/internal/tokeninternal/tokeninternal.go index 0a73e2ebda3..549bb183976 100644 --- a/internal/tokeninternal/tokeninternal.go +++ b/internal/tokeninternal/tokeninternal.go @@ -9,6 +9,7 @@ package tokeninternal import ( "fmt" "go/token" + "slices" "sort" "sync" "sync/atomic" @@ -18,7 +19,29 @@ import ( // AddExistingFiles adds the specified files to the FileSet if they // are not already present. It panics if any pair of files in the // resulting FileSet would overlap. +// +// TODO(adonovan): add this a method to FileSet; see +// https://github.com/golang/go/issues/73205 func AddExistingFiles(fset *token.FileSet, files []*token.File) { + + // This function cannot be implemented as: + // + // for _, file := range files { + // if prev := fset.File(token.Pos(file.Base())); prev != nil { + // if prev != file { + // panic("FileSet contains a different file at the same base") + // } + // continue + // } + // file2 := fset.AddFile(file.Name(), file.Base(), file.Size()) + // file2.SetLines(file.Lines()) + // } + // + // because all calls to AddFile must be in increasing order. + // AddExistingFiles lets us augment an existing FileSet + // sequentially, so long as all sets of files have disjoint + // ranges. + // Punch through the FileSet encapsulation. type tokenFileSet struct { // This type remained essentially consistent from go1.16 to go1.21. @@ -83,10 +106,7 @@ func AddExistingFiles(fset *token.FileSet, files []*token.File) { // of their Base. func FileSetFor(files ...*token.File) *token.FileSet { fset := token.NewFileSet() - for _, f := range files { - f2 := fset.AddFile(f.Name(), f.Base(), f.Size()) - f2.SetLines(f.Lines()) - } + AddExistingFiles(fset, files) return fset } @@ -94,12 +114,5 @@ func FileSetFor(files ...*token.File) *token.FileSet { // create copies of the token.Files in fset: they are added to the resulting // FileSet unmodified. func CloneFileSet(fset *token.FileSet) *token.FileSet { - var files []*token.File - fset.Iterate(func(f *token.File) bool { - files = append(files, f) - return true - }) - newFileSet := token.NewFileSet() - AddExistingFiles(newFileSet, files) - return newFileSet + return FileSetFor(slices.Collect(fset.Iterate)...) } From 456962ef0d65798b244374a272fb19b7d9723b62 Mon Sep 17 00:00:00 2001 From: Gopher Robot Date: Mon, 7 Apr 2025 14:12:03 -0700 Subject: [PATCH 183/270] go.mod: update golang.org/x dependencies Update golang.org/x dependencies to their latest tagged versions. Change-Id: Ia69bcc0a41699a8a7b57bc09bee11baa68495e3b Reviewed-on: https://go-review.googlesource.com/c/tools/+/663616 Auto-Submit: Gopher Robot LUCI-TryBot-Result: Go LUCI Reviewed-by: Dmitri Shuralyov Reviewed-by: David Chase --- go.mod | 6 +++--- go.sum | 12 ++++++------ gopls/go.mod | 6 +++--- gopls/go.sum | 18 +++++++++--------- 4 files changed, 21 insertions(+), 21 deletions(-) diff --git a/go.mod b/go.mod index 3a120629b94..7e4e371b770 100644 --- a/go.mod +++ b/go.mod @@ -6,9 +6,9 @@ require ( github.com/google/go-cmp v0.6.0 github.com/yuin/goldmark v1.4.13 golang.org/x/mod v0.24.0 - golang.org/x/net v0.37.0 - golang.org/x/sync v0.12.0 + golang.org/x/net v0.39.0 + golang.org/x/sync v0.13.0 golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457 ) -require golang.org/x/sys v0.31.0 // indirect +require golang.org/x/sys v0.32.0 // indirect diff --git a/go.sum b/go.sum index 3d0337c8351..ff5857bd93a 100644 --- a/go.sum +++ b/go.sum @@ -4,11 +4,11 @@ github.com/yuin/goldmark v1.4.13 h1:fVcFKWvrslecOb/tg+Cc05dkeYx540o0FuFt3nUVDoE= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU= golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= -golang.org/x/net v0.37.0 h1:1zLorHbz+LYj7MQlSf1+2tPIIgibq2eL5xkrGk6f+2c= -golang.org/x/net v0.37.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= -golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw= -golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= -golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik= -golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/net v0.39.0 h1:ZCu7HMWDxpXpaiKdhzIfaltL9Lp31x/3fCP11bc6/fY= +golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E= +golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610= +golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20= +golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457 h1:zf5N6UOrA487eEFacMePxjXAJctxKmyjKUsjA11Uzuk= golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457/go.mod h1:pRgIJT+bRLFKnoM1ldnzKoxTIn14Yxz928LQRYYgIN0= diff --git a/gopls/go.mod b/gopls/go.mod index 5cabb7974de..c09e2daf7bd 100644 --- a/gopls/go.mod +++ b/gopls/go.mod @@ -6,10 +6,10 @@ require ( github.com/google/go-cmp v0.6.0 github.com/jba/templatecheck v0.7.1 golang.org/x/mod v0.24.0 - golang.org/x/sync v0.12.0 - golang.org/x/sys v0.31.0 + golang.org/x/sync v0.13.0 + golang.org/x/sys v0.32.0 golang.org/x/telemetry v0.0.0-20250220152412-165e2f84edbc - golang.org/x/text v0.23.0 + golang.org/x/text v0.24.0 golang.org/x/tools v0.30.0 golang.org/x/vuln v1.1.4 gopkg.in/yaml.v3 v3.0.1 diff --git a/gopls/go.sum b/gopls/go.sum index 20633541388..f5a9bbde4ca 100644 --- a/gopls/go.sum +++ b/gopls/go.sum @@ -16,7 +16,7 @@ github.com/rogpeppe/go-internal v1.13.2-0.20241226121412-a5dc8ff20d0a h1:w3tdWGK github.com/rogpeppe/go-internal v1.13.2-0.20241226121412-a5dc8ff20d0a/go.mod h1:S8kfXMp+yh77OxPD4fdM6YUknrZpQxLhvxzS4gDHENY= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= -golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc= +golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc= golang.org/x/exp/typeparams v0.0.0-20250218142911-aa4b98e5adaa h1:Br3+0EZZohShrmVVc85znGpxw7Ca8hsUJlrdT/JQGw8= golang.org/x/exp/typeparams v0.0.0-20250218142911-aa4b98e5adaa/go.mod h1:LKZHyeOpPuZcMgxeHjJp4p5yvxrCX1xDvH10zYHhjjQ= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= @@ -25,27 +25,27 @@ golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU= golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= -golang.org/x/net v0.37.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= +golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E= golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw= -golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610= +golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik= -golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20= +golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457/go.mod h1:pRgIJT+bRLFKnoM1ldnzKoxTIn14Yxz928LQRYYgIN0= golang.org/x/telemetry v0.0.0-20250220152412-165e2f84edbc h1:HS+G1Mhh2dxM8ObutfYKdjfD7zpkyeP/UxeRnJpIZtQ= golang.org/x/telemetry v0.0.0-20250220152412-165e2f84edbc/go.mod h1:bDzXkYUaHzz51CtDy5kh/jR4lgPxsdbqC37kp/dzhCc= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= -golang.org/x/term v0.30.0/go.mod h1:NYYFdzHoI5wRh/h5tDMdMqCqPJZEuNqVR5xJLd/n67g= +golang.org/x/term v0.31.0/go.mod h1:R4BeIy7D95HzImkxGkTW1UQTtP54tio2RyHz7PwK0aw= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY= -golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4= +golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0= +golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU= golang.org/x/vuln v1.1.4 h1:Ju8QsuyhX3Hk8ma3CesTbO8vfJD9EvUBgHvkxHBzj0I= golang.org/x/vuln v1.1.4/go.mod h1:F+45wmU18ym/ca5PLTPLsSzr2KppzswxPP603ldA67s= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= From 9fbec96952b581229b6fa24c1b7dd7527257b12e Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Sun, 6 Apr 2025 16:15:42 -0600 Subject: [PATCH 184/270] gopls/internal/server: workspace: skip adding already added file This CL adds a check to prevent re-adding a folder to the workspace if it has already been added. It resolves an issue where the 'loading packages...' status bar would get stuck in the editor. The problem occurs when the editor sends an initialization request with a rootURI but no workspaceFolders, followed by a DidChangeWorkspaceFolders request for the same folder. Re-adding the same folder is unnecessary and causes the status bar to hang. Additionally, this change cleans the provided URI to handle cases where the client appends a trailing slash to the same folder. An alternative approach is to reload the added folder again and end the process of loading package as expected so the status bar could end soon, which is unneccessary because there is no change in given file. Fixes golang/go#71967 Change-Id: Ib28c327a5a85f1fdd54e4facedf97d500b909fb2 Reviewed-on: https://go-review.googlesource.com/c/tools/+/663295 LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley Reviewed-by: Dmitri Shuralyov --- gopls/internal/cache/session.go | 15 +++-- gopls/internal/protocol/uri.go | 5 ++ gopls/internal/server/general.go | 7 ++- .../internal/test/integration/fake/editor.go | 28 +++++++-- gopls/internal/test/integration/options.go | 16 +++++ .../integration/workspace/workspace_test.go | 60 +++++++++++++++++++ 6 files changed, 122 insertions(+), 9 deletions(-) diff --git a/gopls/internal/cache/session.go b/gopls/internal/cache/session.go index c46fc78b975..aa970b41e2c 100644 --- a/gopls/internal/cache/session.go +++ b/gopls/internal/cache/session.go @@ -139,11 +139,18 @@ func (s *Session) NewView(ctx context.Context, folder *Folder) (*View, *Snapshot } view, snapshot, release := s.createView(ctx, def) s.views = append(s.views, view) - // we always need to drop the view map - s.viewMap = make(map[protocol.DocumentURI]*View) + s.viewMap[protocol.Clean(folder.Dir)] = view return view, snapshot, release, nil } +// HasView checks whether the uri's view exists. +func (s *Session) HasView(uri protocol.DocumentURI) bool { + s.viewMu.Lock() + defer s.viewMu.Unlock() + _, ok := s.viewMap[protocol.Clean(uri)] + return ok +} + // createView creates a new view, with an initial snapshot that retains the // supplied context, detached from events and cancelation. // @@ -389,7 +396,7 @@ func (s *Session) SnapshotOf(ctx context.Context, uri protocol.DocumentURI) (*Sn // View is shut down. Forget this association. s.viewMu.Lock() if s.viewMap[uri] == v { - delete(s.viewMap, uri) + delete(s.viewMap, protocol.Clean(uri)) } s.viewMu.Unlock() } @@ -478,7 +485,7 @@ func (s *Session) viewOfLocked(ctx context.Context, uri protocol.DocumentURI) (* // (as in golang/go#60776). v = relevantViews[0] } - s.viewMap[uri] = v // may be nil + s.viewMap[protocol.Clean(uri)] = v // may be nil } return v, nil } diff --git a/gopls/internal/protocol/uri.go b/gopls/internal/protocol/uri.go index 4105bd041f8..491d767805f 100644 --- a/gopls/internal/protocol/uri.go +++ b/gopls/internal/protocol/uri.go @@ -67,6 +67,11 @@ func (uri *DocumentURI) UnmarshalText(data []byte) (err error) { return } +// Clean returns the cleaned uri by triggering filepath.Clean underlying. +func Clean(uri DocumentURI) DocumentURI { + return URIFromPath(filepath.Clean(uri.Path())) +} + // Path returns the file path for the given URI. // // DocumentURI("").Path() returns the empty string. diff --git a/gopls/internal/server/general.go b/gopls/internal/server/general.go index 5e02b832747..8dc0384b055 100644 --- a/gopls/internal/server/general.go +++ b/gopls/internal/server/general.go @@ -312,10 +312,15 @@ func (s *server) addFolders(ctx context.Context, folders []protocol.WorkspaceFol // but the list can grow over time. var filtered []protocol.WorkspaceFolder for _, f := range folders { - if _, err := protocol.ParseDocumentURI(f.URI); err != nil { + uri, err := protocol.ParseDocumentURI(f.URI) + if err != nil { debuglog.Warning.Logf(ctx, "skip adding virtual folder %q - invalid folder URI: %v", f.Name, err) continue } + if s.session.HasView(uri) { + debuglog.Warning.Logf(ctx, "skip adding the already added folder %q - its view has been created before", f.Name) + continue + } filtered = append(filtered, f) } folders = filtered diff --git a/gopls/internal/test/integration/fake/editor.go b/gopls/internal/test/integration/fake/editor.go index 01f3de8aba9..bd459decea4 100644 --- a/gopls/internal/test/integration/fake/editor.go +++ b/gopls/internal/test/integration/fake/editor.go @@ -108,6 +108,14 @@ type EditorConfig struct { // To explicitly send no workspace folders, use an empty (non-nil) slice. WorkspaceFolders []string + // NoDefaultWorkspaceFiles is used to specify whether the fake editor + // should give a default workspace folder when WorkspaceFolders is nil. + // When it's true, the editor will pass original WorkspaceFolders as is to the LSP server. + NoDefaultWorkspaceFiles bool + + // RelRootPath is the root path which will be converted to rootUri to configure on the LSP server. + RelRootPath string + // Whether to edit files with windows line endings. WindowsLineEndings bool @@ -322,8 +330,9 @@ func (e *Editor) initialize(ctx context.Context) error { Version: "v1.0.0", } params.InitializationOptions = makeSettings(e.sandbox, config, nil) - params.WorkspaceFolders = makeWorkspaceFolders(e.sandbox, config.WorkspaceFolders) + params.WorkspaceFolders = makeWorkspaceFolders(e.sandbox, config.WorkspaceFolders, config.NoDefaultWorkspaceFiles) + params.RootURI = protocol.DocumentURI(makeRootURI(e.sandbox, config.RelRootPath)) capabilities, err := clientCapabilities(config) if err != nil { return fmt.Errorf("unmarshalling EditorConfig.CapabilitiesJSON: %v", err) @@ -447,7 +456,10 @@ var uriRE = regexp.MustCompile(`^[a-z][a-z0-9+\-.]*://\S+`) // makeWorkspaceFolders creates a slice of workspace folders to use for // this editing session, based on the editor configuration. -func makeWorkspaceFolders(sandbox *Sandbox, paths []string) (folders []protocol.WorkspaceFolder) { +func makeWorkspaceFolders(sandbox *Sandbox, paths []string, useEmpty bool) (folders []protocol.WorkspaceFolder) { + if len(paths) == 0 && useEmpty { + return nil + } if len(paths) == 0 { paths = []string{string(sandbox.Workdir.RelativeTo)} } @@ -466,6 +478,14 @@ func makeWorkspaceFolders(sandbox *Sandbox, paths []string) (folders []protocol. return folders } +func makeRootURI(sandbox *Sandbox, path string) string { + uri := path + if !uriRE.MatchString(path) { // relative file path + uri = string(sandbox.Workdir.URI(path)) + } + return uri +} + // onFileChanges is registered to be called by the Workdir on any writes that // go through the Workdir API. It is called synchronously by the Workdir. func (e *Editor) onFileChanges(ctx context.Context, evts []protocol.FileEvent) { @@ -1645,8 +1665,8 @@ func (e *Editor) ChangeWorkspaceFolders(ctx context.Context, folders []string) e config := e.Config() // capture existing folders so that we can compute the change. - oldFolders := makeWorkspaceFolders(e.sandbox, config.WorkspaceFolders) - newFolders := makeWorkspaceFolders(e.sandbox, folders) + oldFolders := makeWorkspaceFolders(e.sandbox, config.WorkspaceFolders, config.NoDefaultWorkspaceFiles) + newFolders := makeWorkspaceFolders(e.sandbox, folders, config.NoDefaultWorkspaceFiles) config.WorkspaceFolders = folders e.SetConfig(config) diff --git a/gopls/internal/test/integration/options.go b/gopls/internal/test/integration/options.go index 11824aa7c16..5c5cdc19227 100644 --- a/gopls/internal/test/integration/options.go +++ b/gopls/internal/test/integration/options.go @@ -135,6 +135,22 @@ func WorkspaceFolders(relFolders ...string) RunOption { }) } +// NoDefaultWorkspaceFiles is used to specify whether the fake editor +// should give a default workspace folder to the LSP server. +// When it's true, the editor will pass original WorkspaceFolders to the LSP server. +func NoDefaultWorkspaceFiles() RunOption { + return optionSetter(func(opts *runConfig) { + opts.editor.NoDefaultWorkspaceFiles = true + }) +} + +// RootPath configures the roo path which will be converted to rootUri and sent to the LSP server. +func RootPath(relpath string) RunOption { + return optionSetter(func(opts *runConfig) { + opts.editor.RelRootPath = relpath + }) +} + // FolderSettings defines per-folder workspace settings, keyed by relative path // to the folder. // diff --git a/gopls/internal/test/integration/workspace/workspace_test.go b/gopls/internal/test/integration/workspace/workspace_test.go index 00d4d81e021..fc96a47dbe0 100644 --- a/gopls/internal/test/integration/workspace/workspace_test.go +++ b/gopls/internal/test/integration/workspace/workspace_test.go @@ -1412,6 +1412,66 @@ func TestInitializeWithNonFileWorkspaceFolders(t *testing.T) { } } +// TestChangeAddedWorkspaceFolders tests issue71967 which an editor sends the following requests. +// +// 1. send an initialization request with rootURI but no workspaceFolders, +// which gopls helps to find a workspaceFolders for it. +// 2. send a DidChangeWorkspaceFolders request with the exact the same folder gopls helps to find. +// +// It uses the same approach to simulate the scenario, and ensure we can skip the already added file. +func TestChangeAddedWorkspaceFolders(t *testing.T) { + for _, tt := range []struct { + name string + after []string + wantViewRoots []string + }{ + { + name: "add an already added file", + after: []string{"modb"}, + wantViewRoots: []string{"./modb"}, + }, + { + name: "add an already added file but with an ending slash", + after: []string{"modb/"}, + wantViewRoots: []string{"./modb"}, + }, + { + name: "add an already added file and a new file", + after: []string{"modb", "moda/a"}, + wantViewRoots: []string{"./modb", "moda/a"}, + }, + } { + t.Run(tt.name, func(t *testing.T) { + opts := []RunOption{ProxyFiles(workspaceProxy), RootPath("modb"), NoDefaultWorkspaceFiles()} + WithOptions(opts...).Run(t, multiModule, func(t *testing.T, env *Env) { + summary := func(typ cache.ViewType, root, folder string) command.View { + return command.View{ + Type: typ.String(), + Root: env.Sandbox.Workdir.URI(root), + Folder: env.Sandbox.Workdir.URI(folder), + } + } + checkViews := func(want ...command.View) { + got := env.Views() + if diff := cmp.Diff(want, got, cmpopts.IgnoreFields(command.View{}, "ID")); diff != "" { + t.Errorf("SummarizeViews() mismatch (-want +got):\n%s", diff) + } + } + var wantViews []command.View + for _, root := range tt.wantViewRoots { + wantViews = append(wantViews, summary(cache.GoModView, root, root)) + } + env.ChangeWorkspaceFolders(tt.after...) + env.Await( + LogMatching(protocol.Warning, "skip adding the already added folder", 1, false), + NoOutstandingWork(IgnoreTelemetryPromptWork), + ) + checkViews(wantViews...) + }) + }) + } +} + // Test that non-file scheme Document URIs in ChangeWorkspaceFolders // notification does not produce errors. func TestChangeNonFileWorkspaceFolders(t *testing.T) { From af71e4241f22deece57e98f466a54a46c877c799 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Tue, 8 Apr 2025 14:22:32 -0400 Subject: [PATCH 185/270] gopls/internal/golang: Rename: fix crash in ill-typed redeclaration + test Fixes golang/go#70968 Change-Id: I5d446dc3d4bd530a73565a17c12fa875bf25cfd0 Reviewed-on: https://go-review.googlesource.com/c/tools/+/663915 LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan Reviewed-by: Robert Findley --- gopls/internal/golang/rename_check.go | 17 ++++++++-------- .../marker/testdata/rename/issue70968.txt | 20 +++++++++++++++++++ 2 files changed, 29 insertions(+), 8 deletions(-) create mode 100644 gopls/internal/test/marker/testdata/rename/issue70968.txt diff --git a/gopls/internal/golang/rename_check.go b/gopls/internal/golang/rename_check.go index 97423fe87a7..6521e809773 100644 --- a/gopls/internal/golang/rename_check.go +++ b/gopls/internal/golang/rename_check.go @@ -472,14 +472,15 @@ func (r *renamer) checkStructField(from *types.Var) { // This struct is also a named type. // We must check for direct (non-promoted) field/field // and method/field conflicts. - named := r.pkg.TypesInfo().Defs[spec.Name].Type() - prev, indices, _ := types.LookupFieldOrMethod(named, true, r.pkg.Types(), r.to) - if len(indices) == 1 { - r.errorf(from.Pos(), "renaming this field %q to %q", - from.Name(), r.to) - r.errorf(prev.Pos(), "\twould conflict with this %s", - objectKind(prev)) - return // skip checkSelections to avoid redundant errors + if tname := r.pkg.TypesInfo().Defs[spec.Name]; tname != nil { + prev, indices, _ := types.LookupFieldOrMethod(tname.Type(), true, r.pkg.Types(), r.to) + if len(indices) == 1 { + r.errorf(from.Pos(), "renaming this field %q to %q", + from.Name(), r.to) + r.errorf(prev.Pos(), "\twould conflict with this %s", + objectKind(prev)) + return // skip checkSelections to avoid redundant errors + } } } else { // This struct is not a named type. diff --git a/gopls/internal/test/marker/testdata/rename/issue70968.txt b/gopls/internal/test/marker/testdata/rename/issue70968.txt new file mode 100644 index 00000000000..57e318e53bb --- /dev/null +++ b/gopls/internal/test/marker/testdata/rename/issue70968.txt @@ -0,0 +1,20 @@ +Test that an (ill-typed) redeclaration of a name, which causes +types.Info.Defs to lack an entry, doesn't lead to gopls to crash in +renaming. Now, it proceeds with a partial rename. + +See golang/go#70968 + +-- go.mod -- +module example.com +go 1.18 + +-- a/a.go -- +package a + +type T int //@ diag("T", re"T redeclared") +type T struct { f int } //@ diag("T", re"T redeclared"), rename("f", "g", out) + +-- @out/a/a.go -- +@@ -4 +4 @@ +-type T struct { f int } //@ diag("T", re"T redeclared"), rename("f", "g", out) ++type T struct { g int } //@ diag("T", re"T redeclared"), rename("f", "g", out) From f64b14abe06195f48848c604d0183cfb0a4d019e Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Tue, 8 Apr 2025 00:37:15 -0600 Subject: [PATCH 186/270] gopls/internal/golang: completion: better default keyword completion in switch stmt This CL improves the completion to offer a better default keyword completion in switch stmt for the following 2 cases: 1. if default keyword exists in current switch stmt already, completion doesn't offer a default keyword anymore. 2. if default keyword is missing in current switch stmt scope, and there is a variable named 'default[a-zA-Z0-9_]+', completion offers the default keyword as the first choice. Change-Id: Ic935d1bdf36ce58b56acb528cb67dffcfece0352 Reviewed-on: https://go-review.googlesource.com/c/tools/+/663296 Reviewed-by: Robert Findley Reviewed-by: Dmitri Shuralyov Auto-Submit: Robert Findley LUCI-TryBot-Result: Go LUCI --- gopls/internal/golang/completion/keywords.go | 28 ++++++++++++++++++- .../marker/testdata/completion/keywords.txt | 25 +++++++++++++++++ 2 files changed, 52 insertions(+), 1 deletion(-) diff --git a/gopls/internal/golang/completion/keywords.go b/gopls/internal/golang/completion/keywords.go index 3f2f5ac78cd..6b61e101c13 100644 --- a/gopls/internal/golang/completion/keywords.go +++ b/gopls/internal/golang/completion/keywords.go @@ -121,7 +121,11 @@ func (c *completer) addKeywordCompletions() { c.addKeywordItems(seen, stdScore, BREAK) } case *ast.TypeSwitchStmt, *ast.SelectStmt, *ast.SwitchStmt: - c.addKeywordItems(seen, stdScore, CASE, DEFAULT) + // if there is no default case yet, it's highly likely to add a default in switch. + // we don't offer 'default' anymore if user has used it already in current swtich. + if !hasDefaultClause(node) { + c.addKeywordItems(seen, highScore, CASE, DEFAULT) + } case *ast.ForStmt, *ast.RangeStmt: c.addKeywordItems(seen, stdScore, BREAK, CONTINUE) // This is a bit weak, functions allow for many keywords @@ -133,6 +137,28 @@ func (c *completer) addKeywordCompletions() { } } +// hasDefaultClause reports whether the given node contains a direct default case. +// It does not traverse child nodes to look for nested default clauses, +// and returns false if the node is not a switch statement. +func hasDefaultClause(node ast.Node) bool { + var cases []ast.Stmt + switch node := node.(type) { + case *ast.TypeSwitchStmt: + cases = node.Body.List + case *ast.SelectStmt: + cases = node.Body.List + case *ast.SwitchStmt: + cases = node.Body.List + } + for _, c := range cases { + if clause, ok := c.(*ast.CaseClause); ok && + clause.List == nil { // default case + return true + } + } + return false +} + // addKeywordItems dedupes and adds completion items for the specified // keywords with the specified score. func (c *completer) addKeywordItems(seen map[string]bool, score float64, kws ...string) { diff --git a/gopls/internal/test/marker/testdata/completion/keywords.txt b/gopls/internal/test/marker/testdata/completion/keywords.txt index 3a43f190553..3c69f2cd2ef 100644 --- a/gopls/internal/test/marker/testdata/completion/keywords.txt +++ b/gopls/internal/test/marker/testdata/completion/keywords.txt @@ -164,3 +164,28 @@ func _() { d //@complete(" //", default) } } + +-- default_name_var_switch.go -- +package keywords + +func _() { + var defaultVar int //@item(defaultVar, "defaultVar", "int", "var") + switch defaultVar { + case 1: + println("helloworld") + d //@complete(" //", default, defaultVar, defer) + } + switch defaultVar { + default: + d //@complete(" //", defaultVar, defer) + } + var nested int + switch defaultVar { + case 1: + switch nested { + default: + println("") + } + d //@complete(" //", default, defaultVar, defer) + } +} From 4e973d9dd05b23fa1579f90e3fae7f606921f2ca Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Tue, 8 Apr 2025 16:52:42 -0400 Subject: [PATCH 187/270] gopls/internal/golang: yet more refinement of golang/go#70553 Add three new assertions. Updates golang/go#70553 Change-Id: Ia7cff183bbffd287a011804899bc4809edd1c926 Reviewed-on: https://go-review.googlesource.com/c/tools/+/663955 Reviewed-by: Robert Findley Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI --- gopls/internal/golang/extracttofile.go | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/gopls/internal/golang/extracttofile.go b/gopls/internal/golang/extracttofile.go index d3026d4ee0f..cc833f12c42 100644 --- a/gopls/internal/golang/extracttofile.go +++ b/gopls/internal/golang/extracttofile.go @@ -93,6 +93,7 @@ func ExtractToNewFile(ctx context.Context, snapshot *cache.Snapshot, fh file.Han return nil, fmt.Errorf("%s: %w", errorPrefix, err) } + // Expand the selection, and compute the portion to extract. start, end, firstSymbol, ok := selectedToplevelDecls(pgf, start, end) if !ok { return nil, fmt.Errorf("invalid selection") @@ -109,7 +110,20 @@ func ExtractToNewFile(ctx context.Context, snapshot *cache.Snapshot, fh file.Han spaces := len(rest) - len(bytes.TrimLeft(rest, " \t\n")) end += token.Pos(spaces) pgf.CheckPos(end) // #70553 - // Inv: end is valid wrt pgf.Tok. + if !(start <= end) { + bug.Reportf("start: not before end") + } + // Inv: end is valid wrt pgf.Tok; env >= start. + fileStart := pgf.File.FileStart + pgf.CheckPos(fileStart) // #70553 + if !(0 <= start-fileStart) { + bug.Reportf("start: out of bounds") + } + if !(int(end-fileStart) <= len(pgf.Src)) { + bug.Reportf("end: out of bounds") + } + // Inv: 0 <= start-fileStart <= end-fileStart <= len(Src). + src := pgf.Src[start-fileStart : end-fileStart] replaceRange, err := pgf.PosRange(start, end) if err != nil { @@ -176,9 +190,7 @@ func ExtractToNewFile(ctx context.Context, snapshot *cache.Snapshot, fh file.Han return nil, fmt.Errorf("%s: %w", errorPrefix, err) } - fileStart := pgf.File.FileStart - pgf.CheckPos(fileStart) // #70553 - buf.Write(pgf.Src[start-fileStart : end-fileStart]) + buf.Write(src) newFileContent, err := format.Source(buf.Bytes()) if err != nil { From a99a1c3c8372669f03041068a494dc8386afcd2c Mon Sep 17 00:00:00 2001 From: karamaru-alpha Date: Wed, 9 Apr 2025 05:19:17 +0900 Subject: [PATCH 188/270] gopls/internal/analysis/modernize: prevent conversion of variadic functions to slices.ContainsFunc Exclude variadic functions from being converted to slices.ContainsFunc to avoid type errors. Fixes golang/go#73269 Change-Id: I2259d6639515939717305ab75553393485aca8e8 Reviewed-on: https://go-review.googlesource.com/c/tools/+/663436 Reviewed-by: Robert Findley Auto-Submit: Alan Donovan Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- .../analysis/modernize/slicescontains.go | 5 ++++ .../src/slicescontains/slicescontains.go | 23 +++++++++++++++++++ .../slicescontains/slicescontains.go.golden | 23 +++++++++++++++++++ 3 files changed, 51 insertions(+) diff --git a/gopls/internal/analysis/modernize/slicescontains.go b/gopls/internal/analysis/modernize/slicescontains.go index e99474df6ab..78a569eeca9 100644 --- a/gopls/internal/analysis/modernize/slicescontains.go +++ b/gopls/internal/analysis/modernize/slicescontains.go @@ -129,6 +129,11 @@ func slicescontains(pass *analysis.Pass) { isSliceElem(cond.Args[0]) && typeutil.Callee(info, cond) != nil { // not a conversion + // skip variadic functions + if sig, ok := info.TypeOf(cond.Fun).(*types.Signature); ok && sig.Variadic() { + return + } + funcName = "ContainsFunc" arg2 = cond.Fun // "if predicate(elem)" } diff --git a/gopls/internal/analysis/modernize/testdata/src/slicescontains/slicescontains.go b/gopls/internal/analysis/modernize/testdata/src/slicescontains/slicescontains.go index 6116ce14838..03bcfc69904 100644 --- a/gopls/internal/analysis/modernize/testdata/src/slicescontains/slicescontains.go +++ b/gopls/internal/analysis/modernize/testdata/src/slicescontains/slicescontains.go @@ -146,3 +146,26 @@ func nopeNeedleHaystackDifferentTypes2(x error, args []any) { } } } + +func nopeVariadicNamedContainsFunc(slice []int) bool { + for _, elem := range slice { + if variadicPredicate(elem) { + return true + } + } + return false +} + +func variadicPredicate(int, ...any) bool + +func nopeVariadicContainsFunc(slice []int) bool { + f := func(int, ...any) bool { + return true + } + for _, elem := range slice { + if f(elem) { + return true + } + } + return false +} diff --git a/gopls/internal/analysis/modernize/testdata/src/slicescontains/slicescontains.go.golden b/gopls/internal/analysis/modernize/testdata/src/slicescontains/slicescontains.go.golden index 2d67395f203..67e5b544960 100644 --- a/gopls/internal/analysis/modernize/testdata/src/slicescontains/slicescontains.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/slicescontains/slicescontains.go.golden @@ -102,3 +102,26 @@ func nopeNeedleHaystackDifferentTypes2(x error, args []any) { } } } + +func nopeVariadicNamedContainsFunc(slice []int) bool { + for _, elem := range slice { + if variadicPredicate(elem) { + return true + } + } + return false +} + +func variadicPredicate(int, ...any) bool + +func nopeVariadicContainsFunc(slice []int) bool { + f := func(int, ...any) bool { + return true + } + for _, elem := range slice { + if f(elem) { + return true + } + } + return false +} From edff653582daacb31f118586d48f137b34bef3cf Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Tue, 15 Oct 2024 22:19:01 -0400 Subject: [PATCH 189/270] gopls/internal/server: TypeHierarchy support This CL adds basic support for the LSP Type Hierarchy feature. There are three new RPCs: - PrepareTypeHierarchy asks for a description of the currently selected text, which must be a reference to a type name. - Supertypes and Subtypes ask for the items related by the subtyping relation, using the same machinery as Implementations by method sets, which has been factored to deliver a concurrent stream of results at a higher level then just protocol.Location. Unlike Implementations, Type Hierarchy does not report relationships between func types and FuncDecl/FuncLit/RangeStmt. The names of types are now saved in the methodsets index. The marker test framework has been extended with @{super,sub}types markers. This CL also sets us up to start reporting interface/interface relationships (golang/go#68641), which are especially desirable in the Type Hierarchy viewer; but that behavior change will be left for a follow-up. + tests, docs, relnotes Fixes golang/go#72142 Change-Id: Id60c9f447e938ac7e34262522ccd79bd54d90fc5 Reviewed-on: https://go-review.googlesource.com/c/tools/+/663055 Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI Commit-Queue: Alan Donovan Reviewed-by: Jonathan Amsterdam Reviewed-by: Robert Findley --- gopls/doc/features/navigation.md | 37 ++- gopls/doc/release/v0.19.0.md | 14 ++ gopls/internal/cache/methodsets/methodsets.go | 84 +++++-- gopls/internal/cache/snapshot.go | 2 +- gopls/internal/golang/implementation.go | 212 ++++++++++-------- gopls/internal/golang/references.go | 11 +- gopls/internal/golang/type_hierarchy.go | 157 +++++++++++++ gopls/internal/server/general.go | 1 + gopls/internal/server/type_hierarchy.go | 63 ++++++ gopls/internal/server/unimplemented.go | 12 - gopls/internal/test/marker/doc.go | 8 + gopls/internal/test/marker/marker_test.go | 55 ++++- .../marker/testdata/typehierarchy/basic.txt | 54 +++++ 13 files changed, 573 insertions(+), 137 deletions(-) create mode 100644 gopls/internal/golang/type_hierarchy.go create mode 100644 gopls/internal/server/type_hierarchy.go create mode 100644 gopls/internal/test/marker/testdata/typehierarchy/basic.txt diff --git a/gopls/doc/features/navigation.md b/gopls/doc/features/navigation.md index f3454f7188c..9895fcf4d9a 100644 --- a/gopls/doc/features/navigation.md +++ b/gopls/doc/features/navigation.md @@ -94,7 +94,7 @@ Interfaces and concrete types are matched using method sets: location of the declaration of each type that implements the interface. - When invoked on a **concrete type**, - it returns the locations of the matching interface types. + it returns the locations of the matching interface types. - When invoked on an **interface method**, it returns the corresponding methods of the types that satisfy the interface. - When invoked on a **concrete method**, @@ -282,3 +282,38 @@ Client support: - **VS Code**: `Show Call Hierarchy` menu item (`⌥⇧H`) opens [Call hierarchy view](https://code.visualstudio.com/docs/cpp/cpp-ide#_call-hierarchy) (note: docs refer to C++ but the idea is the same for Go). - **Emacs + eglot**: Not standard; install with `(package-vc-install "https://github.com/dolmens/eglot-hierarchy")`. Use `M-x eglot-hierarchy-call-hierarchy` to show the direct incoming calls to the selected function; use a prefix argument (`C-u`) to show the direct outgoing calls. There is no way to expand the tree. - **CLI**: `gopls call_hierarchy file.go:#offset` shows outgoing and incoming calls. + + +## Type Hierarchy + +The LSP TypeHierarchy mechanism consists of three queries that +together enable clients to present a hierarchical view of a portion of +the subtyping relation over named types. + +- [`textDocument/prepareTypeHierarchy`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_prepareTypeHierarchy) returns an [item](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchyItem) describing the named type at the current position; +- [`typeHierarchyItem/subtypes`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchy_subtypes) returns the set of subtypes of the selected (interface) type; and +- [`typeHierarchy/supertypes`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#typeHierarchy_supertypes) returns the set of supertypes (interface types) of the selected type. + +Invoke the command while selecting the name of a type. + +As with an Implementation query, a type hierarchy query reports +function-local types only within the same package as the query type. +Also the result does not include alias types, only defined types. + + + +Caveats: + +- The type hierarchy supports only named types and their assignability + relation. By contrast, the Implementations request also reports the + relation between unnamed `func` types and function declarations, + function literals, and dynamic calls of values of those types. + +Client support: +- **VS Code**: `Show Type Hierarchy` menu item opens [Type hierarchy view](https://code.visualstudio.com/docs/java/java-editing#_type-hierarchy) (note: docs refer to Java but the idea is the same for Go). +- **Emacs + eglot**: Support added in March 2025. Use `M-x eglot-show-call-hierarchy`. +- **CLI**: not yet supported. diff --git a/gopls/doc/release/v0.19.0.md b/gopls/doc/release/v0.19.0.md index f6208417ebc..f09bc65307a 100644 --- a/gopls/doc/release/v0.19.0.md +++ b/gopls/doc/release/v0.19.0.md @@ -33,6 +33,20 @@ and queries using signatures should be invoked on a `func` or `(` token. Only the local (same-package) algorithm is currently supported. TODO: implement global. +## Support for Type Hierarchy + + + +Gopls now implements the three LSP methods related to the Type +Hierarchy viewer: `textDocument/prepareTypeHierarchy`, +`typeHierarchy/supertypes`, `typeHierarchy/subtypes`. + +In VS Code, select "Show Type Hierarchy" from the context menu +to see a tree widget displaying all the supertypes or subtypes +of the selected named type. + +TODO: screenshot, but wait till #68641 is fixed. + ## "Eliminate dot import" code action diff --git a/gopls/internal/cache/methodsets/methodsets.go b/gopls/internal/cache/methodsets/methodsets.go index 2387050f2d9..fd2aedc5ad8 100644 --- a/gopls/internal/cache/methodsets/methodsets.go +++ b/gopls/internal/cache/methodsets/methodsets.go @@ -51,6 +51,7 @@ import ( "sync/atomic" "golang.org/x/tools/go/types/objectpath" + "golang.org/x/tools/gopls/internal/cache/metadata" "golang.org/x/tools/gopls/internal/util/bug" "golang.org/x/tools/gopls/internal/util/fingerprint" "golang.org/x/tools/gopls/internal/util/frob" @@ -62,14 +63,15 @@ import ( // types in a package in a form that permits assignability queries // without the type checker. type Index struct { - pkg gobPackage + pkg gobPackage + PkgPath metadata.PackagePath } // Decode decodes the given gob-encoded data as an Index. -func Decode(data []byte) *Index { +func Decode(pkgpath metadata.PackagePath, data []byte) *Index { var pkg gobPackage packageCodec.Decode(data, &pkg) - return &Index{pkg} + return &Index{pkg: pkg, PkgPath: pkgpath} } // Encode encodes the receiver as gob-encoded data. @@ -110,36 +112,75 @@ func KeyOf(t types.Type) (Key, bool) { // A Result reports a matching type or method in a method-set search. type Result struct { - Location Location // location of the type or method + TypeName string // name of the named type + IsInterface bool // matched type (or method) is abstract + Location Location // location of the type or method // methods only: PkgPath string // path of declaring package (may differ due to embedding) ObjectPath objectpath.Path // path of method within declaring package } -// Search reports each type that implements (or is implemented by) the -// type that produced the search key. If methodID is nonempty, only -// that method of each type is reported. +// TypeRelation indicates the direction of subtyping relation, +// if any, between two types. +// +// It is a bitset, so that clients of Implementations may use +// Supertype|Subtype to request an undirected match. +type TypeRelation int8 + +const ( + Supertype TypeRelation = 0x1 + Subtype TypeRelation = 0x2 +) + +// Search reports each type that implements (Supertype ∈ want) or is +// implemented by (Subtype ∈ want) the type that produced the search key. +// +// If method is non-nil, only that method of each type is reported. // // The result does not include the error.Error method. // TODO(adonovan): give this special case a more systematic treatment. -func (index *Index) Search(key Key, method *types.Func) []Result { +func (index *Index) Search(key Key, want TypeRelation, method *types.Func) []Result { var results []Result for _, candidate := range index.pkg.MethodSets { - // Traditionally this feature doesn't report - // interface/interface elements of the relation. - // I think that's a mistake. - // TODO(adonovan): UX: change it, here and in the local implementation. + // The historical behavior enshrined by this + // function rejects cases where both are + // (nontrivial) interface types, but this is + // useful information; see #68641 and CL 619719. + // TODO(adonovan): rescind this policy choice, + // and report I/I relationships, + // by deleting this continue statement. + // (It is also necessary to remove self-matches.) + // + // The same question appears in the local algorithm (implementations). if candidate.IsInterface && key.mset.IsInterface { continue } - if !implements(candidate, key.mset) && !implements(key.mset, candidate) { + // Test the direction of the relation. + // The client may request either direction or both + // (e.g. when the client is References), + // and the Result reports each test independently; + // both tests succeed when comparing identical + // interface types. + var got TypeRelation + if want&Subtype != 0 && implements(candidate, key.mset) { + got |= Subtype + } + if want&Supertype != 0 && implements(key.mset, candidate) { + got |= Supertype + } + if got == 0 { continue } + typeName := index.pkg.Strings[candidate.TypeName] if method == nil { - results = append(results, Result{Location: index.location(candidate.Posn)}) + results = append(results, Result{ + TypeName: typeName, + IsInterface: candidate.IsInterface, + Location: index.location(candidate.Posn), + }) } else { for _, m := range candidate.Methods { if m.ID == method.Id() { @@ -154,9 +195,11 @@ func (index *Index) Search(key Key, method *types.Func) []Result { } results = append(results, Result{ - Location: index.location(m.Posn), - PkgPath: index.pkg.Strings[m.PkgPath], - ObjectPath: objectpath.Path(index.pkg.Strings[m.ObjectPath]), + TypeName: typeName, + IsInterface: candidate.IsInterface, + Location: index.location(m.Posn), + PkgPath: index.pkg.Strings[m.PkgPath], + ObjectPath: objectpath.Path(index.pkg.Strings[m.ObjectPath]), }) break } @@ -285,6 +328,7 @@ func (b *indexBuilder) build(fset *token.FileSet, pkg *types.Package) *Index { for _, name := range scope.Names() { if tname, ok := scope.Lookup(name).(*types.TypeName); ok && !tname.IsAlias() { if mset := methodSetInfo(tname.Type(), setIndexInfo); mset.Mask != 0 { + mset.TypeName = b.string(name) mset.Posn = objectPos(tname) // Only record types with non-trivial method sets. b.MethodSets = append(b.MethodSets, mset) @@ -292,7 +336,10 @@ func (b *indexBuilder) build(fset *token.FileSet, pkg *types.Package) *Index { } } - return &Index{pkg: b.gobPackage} + return &Index{ + pkg: b.gobPackage, + PkgPath: metadata.PackagePath(pkg.Path()), + } } // string returns a small integer that encodes the string. @@ -370,6 +417,7 @@ type gobPackage struct { // A gobMethodSet records the method set of a single type. type gobMethodSet struct { + TypeName int // name (string index) of the package-level type Posn gobPosition IsInterface bool Tricky bool // at least one method is tricky; fingerprint must be parsed + unified diff --git a/gopls/internal/cache/snapshot.go b/gopls/internal/cache/snapshot.go index 754389c7008..81cfafc1470 100644 --- a/gopls/internal/cache/snapshot.go +++ b/gopls/internal/cache/snapshot.go @@ -608,7 +608,7 @@ func (s *Snapshot) MethodSets(ctx context.Context, ids ...PackageID) ([]*methods pre := func(i int, ph *packageHandle) bool { data, err := filecache.Get(methodSetsKind, ph.key) if err == nil { // hit - indexes[i] = methodsets.Decode(data) + indexes[i] = methodsets.Decode(ph.mp.PkgPath, data) return false } else if err != filecache.ErrNotFound { event.Error(ctx, "reading methodsets from filecache", err) diff --git a/gopls/internal/golang/implementation.go b/gopls/internal/golang/implementation.go index a5ab5d19a13..e7850e19b1a 100644 --- a/gopls/internal/golang/implementation.go +++ b/gopls/internal/golang/implementation.go @@ -14,7 +14,6 @@ import ( "iter" "reflect" "slices" - "sort" "strings" "sync" @@ -64,19 +63,8 @@ func Implementation(ctx context.Context, snapshot *cache.Snapshot, f file.Handle if err != nil { return nil, err } - - // Sort and de-duplicate locations. - sort.Slice(locs, func(i, j int) bool { - return protocol.CompareLocation(locs[i], locs[j]) < 0 - }) - out := locs[:0] - for _, loc := range locs { - if len(out) == 0 || out[len(out)-1] != loc { - out = append(out, loc) - } - } - locs = out - + slices.SortFunc(locs, protocol.CompareLocation) + locs = slices.Compact(locs) // de-duplicate return locs, nil } @@ -97,12 +85,42 @@ func implementations(ctx context.Context, snapshot *cache.Snapshot, fh file.Hand } // Find implementations based on method sets. + var ( + locsMu sync.Mutex + locs []protocol.Location + ) + // relation=0 here means infer direction of the relation + // (Supertypes/Subtypes) from concreteness of query type/method. + err = implementationsMsets(ctx, snapshot, pkg, pgf, pos, 0, func(_ metadata.PackagePath, _ string, _ bool, loc protocol.Location) { + locsMu.Lock() + locs = append(locs, loc) + locsMu.Unlock() + }) + return locs, err +} +// An implYieldFunc is a callback called for each match produced by the implementation machinery. +// - name describes the type or method. +// - abstract indicates that the result is an interface type or interface method. +// +// implYieldFunc implementations must be concurrency-safe. +type implYieldFunc func(pkgpath metadata.PackagePath, name string, abstract bool, loc protocol.Location) + +// implementationsMsets computes implementations of the type at the +// specified position, by method sets. +// +// rel specifies the desired direction of the relation: Subtype, +// Supertype, or both. As a special case, zero means infer the +// direction from the concreteness of the query object: Supertype for +// a concrete type, Subtype for an interface. +// +// It is shared by Implementations and TypeHierarchy. +func implementationsMsets(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, pos token.Pos, rel methodsets.TypeRelation, yield implYieldFunc) error { // First, find the object referenced at the cursor. // The object may be declared in a different package. - obj, err := implementsObj(pkg, pgf, pos) + obj, err := implementsObj(pkg.TypesInfo(), pgf.File, pos) if err != nil { - return nil, err + return err } // If the resulting object has a position, we can expand the search to types @@ -123,11 +141,11 @@ func implementations(ctx context.Context, snapshot *cache.Snapshot, fh file.Hand declURI = protocol.URIFromPath(declPosn.Filename) declMPs, err := snapshot.MetadataForFile(ctx, declURI) if err != nil { - return nil, err + return err } metadata.RemoveIntermediateTestVariants(&declMPs) if len(declMPs) == 0 { - return nil, fmt.Errorf("no packages for file %s", declURI) + return fmt.Errorf("no packages for file %s", declURI) } ids := make([]PackageID, len(declMPs)) for i, mp := range declMPs { @@ -135,7 +153,7 @@ func implementations(ctx context.Context, snapshot *cache.Snapshot, fh file.Hand } localPkgs, err = snapshot.TypeCheck(ctx, ids...) if err != nil { - return nil, err + return err } } @@ -159,7 +177,7 @@ func implementations(ctx context.Context, snapshot *cache.Snapshot, fh file.Hand } queryType, queryMethod := typeOrMethod(obj) if queryType == nil { - return nil, bug.Errorf("%s is not a type or method", obj.Name()) // should have been handled by implementsObj + return bug.Errorf("%s is not a type or method", obj.Name()) // should have been handled by implementsObj } // Compute the method-set fingerprint used as a key to the global search. @@ -167,7 +185,15 @@ func implementations(ctx context.Context, snapshot *cache.Snapshot, fh file.Hand if !hasMethods { // A type with no methods yields an empty result. // (No point reporting that every type satisfies 'any'.) - return nil, nil + return nil + } + + // If the client specified no relation, infer it + // from the concreteness of the query type. + if rel == 0 { + rel = cond(types.IsInterface(queryType), + methodsets.Subtype, + methodsets.Supertype) } // The global search needs to look at every package in the @@ -181,7 +207,7 @@ func implementations(ctx context.Context, snapshot *cache.Snapshot, fh file.Hand // be optimized by being applied as soon as each package is available. globalMetas, err := snapshot.AllMetadata(ctx) if err != nil { - return nil, err + return err } metadata.RemoveIntermediateTestVariants(&globalMetas) globalIDs := make([]PackageID, 0, len(globalMetas)) @@ -198,15 +224,12 @@ func implementations(ctx context.Context, snapshot *cache.Snapshot, fh file.Hand } indexes, err := snapshot.MethodSets(ctx, globalIDs...) if err != nil { - return nil, fmt.Errorf("querying method sets: %v", err) + return fmt.Errorf("querying method sets: %v", err) } // Search local and global packages in parallel. - var ( - group errgroup.Group - locsMu sync.Mutex - locs []protocol.Location - ) + var group errgroup.Group + // local search for _, localPkg := range localPkgs { // The localImplementations algorithm assumes needle and haystack @@ -242,21 +265,16 @@ func implementations(ctx context.Context, snapshot *cache.Snapshot, fh file.Hand if queryType == nil { return fmt.Errorf("querying method sets in package %q: %v", pkgID, err) } - localLocs, err := localImplementations(ctx, snapshot, declPkg, queryType, queryMethod) - if err != nil { + if err := localImplementations(ctx, snapshot, declPkg, queryType, rel, queryMethod, yield); err != nil { return fmt.Errorf("querying local implementations %q: %v", pkgID, err) } - locsMu.Lock() - locs = append(locs, localLocs...) - locsMu.Unlock() return nil }) } // global search for _, index := range indexes { - index := index group.Go(func() error { - for _, res := range index.Search(key, queryMethod) { + for _, res := range index.Search(key, rel, queryMethod) { loc := res.Location // Map offsets to protocol.Locations in parallel (may involve I/O). group.Go(func() error { @@ -264,20 +282,14 @@ func implementations(ctx context.Context, snapshot *cache.Snapshot, fh file.Hand if err != nil { return err } - locsMu.Lock() - locs = append(locs, ploc) - locsMu.Unlock() + yield(index.PkgPath, res.TypeName, res.IsInterface, ploc) return nil }) } return nil }) } - if err := group.Wait(); err != nil { - return nil, err - } - - return locs, nil + return group.Wait() } // offsetToLocation converts an offset-based position to a protocol.Location, @@ -298,7 +310,7 @@ func offsetToLocation(ctx context.Context, snapshot *cache.Snapshot, filename st // implementsObj returns the object to query for implementations, // which is a type name or method. -func implementsObj(pkg *cache.Package, pgf *parsego.File, pos token.Pos) (types.Object, error) { +func implementsObj(info *types.Info, file *ast.File, pos token.Pos) (types.Object, error) { // This function inherits the limitation of its predecessor in // requiring the selection to be an identifier (of a type or // method). But there's no fundamental reason why one could @@ -309,7 +321,7 @@ func implementsObj(pkg *cache.Package, pgf *parsego.File, pos token.Pos) (types. // subexpression such as x.f().) // TODO(adonovan): simplify: use objectsAt? - path := pathEnclosingObjNode(pgf.File, pos) + path := pathEnclosingObjNode(file, pos) if path == nil { return nil, ErrNoIdentFound } @@ -319,12 +331,12 @@ func implementsObj(pkg *cache.Package, pgf *parsego.File, pos token.Pos) (types. } // Is the object a type or method? Reject other kinds. - obj := pkg.TypesInfo().Uses[id] + obj := info.Uses[id] if obj == nil { // Check uses first (unlike ObjectOf) so that T in // struct{T} is treated as a reference to a type, // not a declaration of a field. - obj = pkg.TypesInfo().Defs[id] + obj = info.Defs[id] } switch obj := obj.(type) { case *types.TypeName: @@ -346,8 +358,9 @@ func implementsObj(pkg *cache.Package, pgf *parsego.File, pos token.Pos) (types. } // localImplementations searches within pkg for declarations of all -// types that are assignable to/from the query type, and returns a new -// unordered array of their locations. +// supertypes (if rel contains Supertype) or subtypes (if rel contains +// Subtype) of the query type, and returns a new unordered array of +// their locations. // // If method is non-nil, the function instead returns the location // of each type's method (if any) of that ID. @@ -356,14 +369,14 @@ func implementsObj(pkg *cache.Package, pgf *parsego.File, pos token.Pos) (types. // function's results may include type declarations that are local to // a function body. The global search index excludes such types // because reliably naming such types is hard.) -func localImplementations(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, queryType types.Type, method *types.Func) ([]protocol.Location, error) { +// +// Results are reported via the the yield function. +func localImplementations(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, queryType types.Type, rel methodsets.TypeRelation, method *types.Func, yield implYieldFunc) error { queryType = methodsets.EnsurePointer(queryType) var msets typeutil.MethodSetCache // Scan through all type declarations in the syntax. - var locs []protocol.Location - var methodLocs []methodsets.Location for _, pgf := range pkg.CompiledGoFiles() { for cur := range pgf.Cursor.Preorder((*ast.TypeSpec)(nil)) { spec := cur.Node().(*ast.TypeSpec) @@ -378,12 +391,35 @@ func localImplementations(ctx context.Context, snapshot *cache.Snapshot, pkg *ca // The historical behavior enshrined by this // function rejects cases where both are - // (nontrivial) interface types? - // That seems like useful information; see #68641. - // TODO(adonovan): UX: report I/I pairs too? + // (nontrivial) interface types, but this is + // useful information; see #68641 and CL 619719. + // TODO(adonovan): rescind this policy choice, + // and report I/I relationships, + // by deleting this continue statement. + // (It is also necessary to remove self-matches.) + // // The same question appears in the global algorithm (methodsets). - if !concreteImplementsIntf(&msets, candidateType, queryType) { - continue // not assignable + xiface := types.IsInterface(queryType) + yiface := types.IsInterface(candidateType) + if xiface == yiface { + continue + } + + // Test the direction of the relation. + // The client may request either direction or both + // (e.g. when the client is References), + // and the Result reports each test independently; + // both tests succeed when comparing identical + // interface types. + var got methodsets.TypeRelation + if rel&methodsets.Supertype != 0 && implements(&msets, queryType, candidateType) { + got |= methodsets.Supertype + } + if rel&methodsets.Subtype != 0 && implements(&msets, candidateType, queryType) { + got |= methodsets.Subtype + } + if got&rel == 0 { + continue } // Ignore types with empty method sets. @@ -393,9 +429,12 @@ func localImplementations(ctx context.Context, snapshot *cache.Snapshot, pkg *ca continue } + isInterface := types.IsInterface(def.Type()) + if method == nil { // Found matching type. - locs = append(locs, mustLocation(pgf, spec.Name)) + loc := mustLocation(pgf, spec.Name) + yield(pkg.Metadata().PkgPath, spec.Name.Name, isInterface, loc) continue } @@ -409,36 +448,37 @@ func localImplementations(ctx context.Context, snapshot *cache.Snapshot, pkg *ca m := mset.At(i).Obj() if m.Id() == method.Id() { posn := safetoken.StartPosition(pkg.FileSet(), m.Pos()) - methodLocs = append(methodLocs, methodsets.Location{ - Filename: posn.Filename, - Start: posn.Offset, - End: posn.Offset + len(m.Name()), - }) + loc, err := offsetToLocation(ctx, snapshot, posn.Filename, posn.Offset, posn.Offset+len(m.Name())) + if err != nil { + return err + } + yield(pkg.Metadata().PkgPath, m.Name(), isInterface, loc) break } } } } - // Finally convert method positions to protocol form by reading the files. - for _, mloc := range methodLocs { - loc, err := offsetToLocation(ctx, snapshot, mloc.Filename, mloc.Start, mloc.End) - if err != nil { - return nil, err - } - locs = append(locs, loc) - } - - // Special case: for types that satisfy error, report builtin.go (see #59527). + // Special case: for types that satisfy error, + // report error in builtin.go (see #59527). + // + // Two inconsistencies: + // 1. we always report the type "error" + // even when the query was for the method "Error"; + // 2. we report error even when the query type was + // an interface, but according to our current policy, + // we never report I/I relations; see #68641 above. + // This will soon change, at which point we should + // check rel&methodsets.Supertype != 0 here. if types.Implements(queryType, errorInterfaceType) { loc, err := errorLocation(ctx, snapshot) if err != nil { - return nil, err + return err } - locs = append(locs, loc) + yield("", "error", true, loc) } - return locs, nil + return nil } var errorInterfaceType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) @@ -461,28 +501,14 @@ func errorLocation(ctx context.Context, snapshot *cache.Snapshot) (protocol.Loca return protocol.Location{}, fmt.Errorf("built-in error type not found") } -// concreteImplementsIntf reports whether x is an interface type -// implemented by concrete type y, or vice versa. -// +// implements reports whether x implements y. // If one or both types are generic, the result indicates whether the // interface may be implemented under some instantiation. -func concreteImplementsIntf(msets *typeutil.MethodSetCache, x, y types.Type) bool { - xiface := types.IsInterface(x) - yiface := types.IsInterface(y) - - // Make sure exactly one is an interface type. - // TODO(adonovan): rescind this policy choice and report - // I/I relationships. See CL 619719 + issue #68641. - if xiface == yiface { +func implements(msets *typeutil.MethodSetCache, x, y types.Type) bool { + if !types.IsInterface(y) { return false } - // Rearrange if needed so x is the concrete type. - if xiface { - x, y = y, x - } - // Inv: y is an interface type. - // For each interface method of y, check that x has it too. // It is not necessary to compute x's complete method set. // diff --git a/gopls/internal/golang/references.go b/gopls/internal/golang/references.go index 12152453dcd..cf24685ca91 100644 --- a/gopls/internal/golang/references.go +++ b/gopls/internal/golang/references.go @@ -441,7 +441,6 @@ func ordinaryReferences(ctx context.Context, snapshot *cache.Snapshot, uri proto // corresponding methods (see above), which expand the global search. // The target objects are identified by (PkgPath, objectpath). for id := range expansions { - id := id group.Go(func() error { // TODO(adonovan): opt: batch these TypeChecks. pkgs, err := snapshot.TypeCheck(ctx, id) @@ -524,8 +523,9 @@ func expandMethodSearch(ctx context.Context, snapshot *cache.Snapshot, workspace i := i index := index group.Go(func() error { - // Consult index for matching methods. - results := index.Search(key, method) + // Consult index for matching (super/sub) methods. + const want = methodsets.Supertype | methodsets.Subtype + results := index.Search(key, want, method) if len(results) == 0 { return nil } @@ -583,7 +583,7 @@ func localReferences(pkg *cache.Package, targets map[types.Object]bool, correspo var msets typeutil.MethodSetCache // matches reports whether obj either is or corresponds to a target. - // (Correspondence is defined as usual for interface methods.) + // (Correspondence is defined as usual for interface methods: super/subtype.) matches := func(obj types.Object) bool { if containsOrigin(targets, obj) { return true @@ -591,7 +591,8 @@ func localReferences(pkg *cache.Package, targets map[types.Object]bool, correspo if methodRecvs != nil && obj.Name() == methodName { if orecv := effectiveReceiver(obj); orecv != nil { for _, mrecv := range methodRecvs { - if concreteImplementsIntf(&msets, orecv, mrecv) { + if implements(&msets, orecv, mrecv) || + implements(&msets, mrecv, orecv) { return true } } diff --git a/gopls/internal/golang/type_hierarchy.go b/gopls/internal/golang/type_hierarchy.go new file mode 100644 index 00000000000..bbcd5325d7b --- /dev/null +++ b/gopls/internal/golang/type_hierarchy.go @@ -0,0 +1,157 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +import ( + "context" + "fmt" + "go/token" + "go/types" + "slices" + "strings" + "sync" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/metadata" + "golang.org/x/tools/gopls/internal/cache/methodsets" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" +) + +// Type hierarchy support (using method sets) +// +// TODO(adonovan): +// - Support type hierarchy by signatures (using Kind=Function). +// As with Implementations by signature matching, needs more UX thought. +// +// - Allow methods too (using Kind=Method)? It's not exactly in the +// spirit of TypeHierarchy but it would be useful and it's easy +// enough to support. +// +// FIXME: fix pkg=command-line-arguments problem with query initiated at "error" in builtins.go + +// PrepareTypeHierarchy returns the TypeHierarchyItems for the types at the selected position. +func PrepareTypeHierarchy(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp protocol.Position) ([]protocol.TypeHierarchyItem, error) { + pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) + if err != nil { + return nil, err + } + pos, err := pgf.PositionPos(pp) + if err != nil { + return nil, err + } + + // For now, we require that the selection be a type name. + _, obj, _ := referencedObject(pkg, pgf, pos) + if obj == nil { + return nil, fmt.Errorf("not a symbol") + } + tname, ok := obj.(*types.TypeName) + if !ok { + return nil, fmt.Errorf("not a type name") + } + + // Find declaration. + var declLoc protocol.Location + if isBuiltin(obj) { + pgf, id, err := builtinDecl(ctx, snapshot, obj) + if err != nil { + return nil, err + } + declLoc, err = pgf.NodeLocation(id) + if err != nil { + return nil, err + } + } else { + declLoc, err = mapPosition(ctx, pkg.FileSet(), snapshot, tname.Pos(), tname.Pos()+token.Pos(len(tname.Name()))) + if err != nil { + return nil, err + } + } + + pkgpath := "builtin" + if tname.Pkg() != nil { + pkgpath = tname.Pkg().Path() + } + + return []protocol.TypeHierarchyItem{{ + Name: tname.Name(), + Kind: cond(types.IsInterface(tname.Type()), protocol.Interface, protocol.Class), + Detail: pkgpath, + URI: declLoc.URI, + Range: declLoc.Range, // (in theory this should be the entire declaration) + SelectionRange: declLoc.Range, + }}, nil +} + +// Subtypes reports information about subtypes of the selected type. +func Subtypes(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, item protocol.TypeHierarchyItem) ([]protocol.TypeHierarchyItem, error) { + return relatedTypes(ctx, snapshot, fh, item, methodsets.Subtype) +} + +// Subtypes reports information about supertypes of the selected type. +func Supertypes(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, item protocol.TypeHierarchyItem) ([]protocol.TypeHierarchyItem, error) { + return relatedTypes(ctx, snapshot, fh, item, methodsets.Supertype) +} + +// relatedTypes is the common implementation of {Super,Sub}types. +func relatedTypes(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, item protocol.TypeHierarchyItem, rel methodsets.TypeRelation) ([]protocol.TypeHierarchyItem, error) { + pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) + if err != nil { + return nil, err + } + pos, err := pgf.PositionPos(item.Range.Start) + if err != nil { + return nil, err + } + + var ( + itemsMu sync.Mutex + items []protocol.TypeHierarchyItem + ) + err = implementationsMsets(ctx, snapshot, pkg, pgf, pos, rel, func(pkgpath metadata.PackagePath, name string, abstract bool, loc protocol.Location) { + if pkgpath == "" { + pkgpath = "builtin" + } + + itemsMu.Lock() + defer itemsMu.Unlock() + items = append(items, protocol.TypeHierarchyItem{ + Name: name, + Kind: cond(abstract, protocol.Interface, protocol.Class), + Detail: string(pkgpath), + URI: loc.URI, + Range: loc.Range, // (in theory this should be the entire declaration) + SelectionRange: loc.Range, + }) + }) + if err != nil { + return nil, err + } + + // Sort by (package, name, URI, range) then + // de-duplicate based on the same 4-tuple + cmp := func(x, y protocol.TypeHierarchyItem) int { + if d := strings.Compare(x.Detail, y.Detail); d != 0 { + // Rank the original item's package first. + if d := boolCompare(x.Detail == item.Detail, y.Detail == item.Detail); d != 0 { + return -d + } + return d + } + if d := strings.Compare(x.Name, y.Name); d != 0 { + return d + } + if d := strings.Compare(string(x.URI), string(y.URI)); d != 0 { + return d + } + return protocol.CompareRange(x.SelectionRange, y.Range) + } + slices.SortFunc(items, cmp) + eq := func(x, y protocol.TypeHierarchyItem) bool { return cmp(x, y) == 0 } + items = slices.CompactFunc(items, eq) + + return items, nil +} diff --git a/gopls/internal/server/general.go b/gopls/internal/server/general.go index 8dc0384b055..6ce1f788dba 100644 --- a/gopls/internal/server/general.go +++ b/gopls/internal/server/general.go @@ -184,6 +184,7 @@ func (s *server) Initialize(ctx context.Context, params *protocol.ParamInitializ IncludeText: false, }, }, + TypeHierarchyProvider: &protocol.Or_ServerCapabilities_typeHierarchyProvider{Value: true}, Workspace: &protocol.WorkspaceOptions{ WorkspaceFolders: &protocol.WorkspaceFolders5Gn{ Supported: true, diff --git a/gopls/internal/server/type_hierarchy.go b/gopls/internal/server/type_hierarchy.go new file mode 100644 index 00000000000..5f40ed3c0c2 --- /dev/null +++ b/gopls/internal/server/type_hierarchy.go @@ -0,0 +1,63 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package server + +import ( + "context" + "fmt" + + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/event" +) + +func (s *server) PrepareTypeHierarchy(ctx context.Context, params *protocol.TypeHierarchyPrepareParams) ([]protocol.TypeHierarchyItem, error) { + ctx, done := event.Start(ctx, "server.PrepareTypeHierarchy") + defer done() + + fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) + if err != nil { + return nil, err + } + defer release() + switch snapshot.FileKind(fh) { + case file.Go: + return golang.PrepareTypeHierarchy(ctx, snapshot, fh, params.Position) + } + return nil, fmt.Errorf("unsupported file type: %v", fh) +} + +func (s *server) Subtypes(ctx context.Context, params *protocol.TypeHierarchySubtypesParams) ([]protocol.TypeHierarchyItem, error) { + ctx, done := event.Start(ctx, "server.Subtypes") + defer done() + + fh, snapshot, release, err := s.fileOf(ctx, params.Item.URI) + if err != nil { + return nil, err + } + defer release() + switch snapshot.FileKind(fh) { + case file.Go: + return golang.Subtypes(ctx, snapshot, fh, params.Item) + } + return nil, fmt.Errorf("unsupported file type: %v", fh) +} + +func (s *server) Supertypes(ctx context.Context, params *protocol.TypeHierarchySupertypesParams) ([]protocol.TypeHierarchyItem, error) { + ctx, done := event.Start(ctx, "server.Supertypes") + defer done() + + fh, snapshot, release, err := s.fileOf(ctx, params.Item.URI) + if err != nil { + return nil, err + } + defer release() + switch snapshot.FileKind(fh) { + case file.Go: + return golang.Supertypes(ctx, snapshot, fh, params.Item) + } + return nil, fmt.Errorf("unsupported file type: %v", fh) +} diff --git a/gopls/internal/server/unimplemented.go b/gopls/internal/server/unimplemented.go index d3bb07cb647..bd12b25f610 100644 --- a/gopls/internal/server/unimplemented.go +++ b/gopls/internal/server/unimplemented.go @@ -74,10 +74,6 @@ func (s *server) OnTypeFormatting(context.Context, *protocol.DocumentOnTypeForma return nil, notImplemented("OnTypeFormatting") } -func (s *server) PrepareTypeHierarchy(context.Context, *protocol.TypeHierarchyPrepareParams) ([]protocol.TypeHierarchyItem, error) { - return nil, notImplemented("PrepareTypeHierarchy") -} - func (s *server) Progress(context.Context, *protocol.ProgressParams) error { return notImplemented("Progress") } @@ -118,14 +114,6 @@ func (s *server) SetTrace(context.Context, *protocol.SetTraceParams) error { return notImplemented("SetTrace") } -func (s *server) Subtypes(context.Context, *protocol.TypeHierarchySubtypesParams) ([]protocol.TypeHierarchyItem, error) { - return nil, notImplemented("Subtypes") -} - -func (s *server) Supertypes(context.Context, *protocol.TypeHierarchySupertypesParams) ([]protocol.TypeHierarchyItem, error) { - return nil, notImplemented("Supertypes") -} - func (s *server) WillCreateFiles(context.Context, *protocol.CreateFilesParams) (*protocol.WorkspaceEdit, error) { return nil, notImplemented("WillCreateFiles") } diff --git a/gopls/internal/test/marker/doc.go b/gopls/internal/test/marker/doc.go index 2fc3e042061..604ee4c4033 100644 --- a/gopls/internal/test/marker/doc.go +++ b/gopls/internal/test/marker/doc.go @@ -276,6 +276,13 @@ Here is the list of supported action markers: case the item's label is used). It checks that the resulting snippet matches the provided snippet. + - subtypes (src location, want ...location), + supertypes(src location, want ...location): + execute a textDocument/prepareTypeHierarchy request at the src + location, followed by a typeHierarchy/{sub,super}types request on + the first response, and check that the result contains the list + of wanted locations in order. + - symbol(golden): makes a textDocument/documentSymbol request for the enclosing file, formats the response with one symbol per line, sorts it, and compares against the named golden file. @@ -398,5 +405,6 @@ Note that -update does not cause missing @diag or @loc markers to be added. - Rename the files .txtar. - Eliminate all *err markers, preferring named arguments. + - In failed assertions, display locations using symbolic @loc names where available. */ package marker diff --git a/gopls/internal/test/marker/marker_test.go b/gopls/internal/test/marker/marker_test.go index 8c27adc9018..c25eb3150a4 100644 --- a/gopls/internal/test/marker/marker_test.go +++ b/gopls/internal/test/marker/marker_test.go @@ -596,6 +596,8 @@ var actionMarkerFuncs = map[string]func(marker){ "selectionrange": actionMarkerFunc(selectionRangeMarker), "signature": actionMarkerFunc(signatureMarker), "snippet": actionMarkerFunc(snippetMarker), + "subtypes": actionMarkerFunc(subtypesMarker), + "supertypes": actionMarkerFunc(supertypesMarker), "quickfix": actionMarkerFunc(quickfixMarker), "quickfixerr": actionMarkerFunc(quickfixErrMarker), "symbol": actionMarkerFunc(symbolMarker), @@ -2457,13 +2459,8 @@ func callHierarchy(mark marker, src protocol.Location, getCalls callHierarchyFun calls = []protocol.Location{} } // TODO(rfindley): why aren't call hierarchy results stable? - sortLocs := func(locs []protocol.Location) { - sort.Slice(locs, func(i, j int) bool { - return protocol.CompareLocation(locs[i], locs[j]) < 0 - }) - } - sortLocs(want) - sortLocs(calls) + slices.SortFunc(want, protocol.CompareLocation) + slices.SortFunc(calls, protocol.CompareLocation) if d := cmp.Diff(want, calls); d != "" { mark.errorf("call hierarchy: unexpected results (-want +got):\n%s", d) } @@ -2526,6 +2523,50 @@ func prepareRenameMarker(mark marker, src protocol.Location, placeholder string) } } +func subtypesMarker(mark marker, src protocol.Location, want ...protocol.Location) { + typeHierarchy(mark, src, want, func(item protocol.TypeHierarchyItem) ([]protocol.TypeHierarchyItem, error) { + return mark.server().Subtypes(mark.ctx(), &protocol.TypeHierarchySubtypesParams{Item: item}) + }) +} + +func supertypesMarker(mark marker, src protocol.Location, want ...protocol.Location) { + typeHierarchy(mark, src, want, func(item protocol.TypeHierarchyItem) ([]protocol.TypeHierarchyItem, error) { + return mark.server().Supertypes(mark.ctx(), &protocol.TypeHierarchySupertypesParams{Item: item}) + }) +} + +type typeHierarchyFunc = func(item protocol.TypeHierarchyItem) ([]protocol.TypeHierarchyItem, error) + +func typeHierarchy(mark marker, src protocol.Location, want []protocol.Location, get typeHierarchyFunc) { + items, err := mark.server().PrepareTypeHierarchy(mark.ctx(), &protocol.TypeHierarchyPrepareParams{ + TextDocumentPositionParams: protocol.LocationTextDocumentPositionParams(src), + }) + if err != nil { + mark.errorf("PrepareTypeHierarchy failed: %v", err) + return + } + if nitems := len(items); nitems != 1 { + mark.errorf("PrepareTypeHierarchy returned %d items, want exactly 1", nitems) + return + } + if loc := (protocol.Location{URI: items[0].URI, Range: items[0].Range}); loc != src { + mark.errorf("PrepareTypeHierarchy found type %v, want %v", loc, src) + return + } + items, err = get(items[0]) + if err != nil { + mark.errorf("type hierarchy failed: %v", err) + return + } + got := []protocol.Location{} // non-nil; cmp.Diff cares + for _, item := range items { + got = append(got, protocol.Location{URI: item.URI, Range: item.Range}) + } + if d := cmp.Diff(want, got); d != "" { + mark.errorf("type hierarchy: unexpected results (-want +got):\n%s", d) + } +} + // symbolMarker implements the @symbol marker. func symbolMarker(mark marker, golden *Golden) { // Retrieve information about all symbols in this file. diff --git a/gopls/internal/test/marker/testdata/typehierarchy/basic.txt b/gopls/internal/test/marker/testdata/typehierarchy/basic.txt new file mode 100644 index 00000000000..5d7df964d2e --- /dev/null +++ b/gopls/internal/test/marker/testdata/typehierarchy/basic.txt @@ -0,0 +1,54 @@ +Basic test of type hierarchy. + +We pose the same queries across two identical packages to exercise +the local and global algorithms. + +TODO(adonovan): I and J are related by subtyping, but Implementations +refuses to report it and thus so does Type Hierarchy; see issue #68641 +and CL 619719. + +TODO(adonovan): test other properties of the result, such as kind. + +-- go.mod -- +module example.com +go 1.18 + +-- a/a.go -- +package a + +type I interface { F() } //@ loc(I, "I") + +type J interface { F(); G() } //@ loc(J, "J") + +type S int //@ loc(S, "S") + +func (S) F() {} +func (S) G() {} + +//@subtypes(S) +//@subtypes(I, S, BS) +//@subtypes(J, S, BS) + +//@supertypes(S, I, J, BI, BJ) +//@supertypes(I) +//@supertypes(J) + +-- b/b.go -- +package b + +type BI interface { F() } //@ loc(BI, "BI") + +type BJ interface { F(); G() } //@ loc(BJ, "BJ") + +type BS int //@ loc(BS, "BS") + +func (BS) F() {} +func (BS) G() {} + +//@subtypes(BS) +//@subtypes(BI, BS, S) +//@subtypes(BJ, BS, S) + +//@supertypes(BS, BI, BJ, I, J) +//@supertypes(BI) +//@supertypes(BJ) From c24b06c449fd65d08ba670340d0351d1c8693efb Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Mon, 7 Apr 2025 13:45:12 -0400 Subject: [PATCH 190/270] gopls/internal/golang: implementation: report iface/iface relations This change causes gopls to report subinterfaces among the response to an "implementation" query on an interface type. For example, implementation(io.Reader) will now report io.ReadCloser, where previously it was discarded because it was not a concrete type. (It will also return other interface types that are identical to Reader.) From the Interfaces RPC, there is no way to request a supertypes query starting at an interface (e.g. from ReadCloser to Reader); see microsoft/language-server-protocol#2037. Use the Type Hierarchy's Supertypes RPC in this case. + test, doc, relnotes Fixes golang/go#68641 Change-Id: Ia5cc42a5ea160a4e74dc801e41f5fe3209ecd6f2 Reviewed-on: https://go-review.googlesource.com/c/tools/+/619719 Reviewed-by: Robert Findley Reviewed-by: Jonathan Amsterdam Auto-Submit: Alan Donovan Commit-Queue: Alan Donovan LUCI-TryBot-Result: Go LUCI --- gopls/doc/assets/subtypes.png | Bin 0 -> 121496 bytes gopls/doc/assets/supertypes.png | Bin 0 -> 102607 bytes gopls/doc/features/navigation.md | 30 +++- gopls/doc/release/v0.19.0.md | 22 ++- gopls/internal/cache/methodsets/methodsets.go | 14 -- gopls/internal/golang/addtest.go | 2 - gopls/internal/golang/implementation.go | 145 +++++++++--------- .../marker/testdata/implementation/basic.txt | 6 +- .../implementation/generics-basicalias.txt | 4 +- .../testdata/implementation/generics.txt | 10 +- .../testdata/implementation/issue68641.txt | 64 ++++++++ .../marker/testdata/typehierarchy/basic.txt | 20 +-- 12 files changed, 197 insertions(+), 120 deletions(-) create mode 100644 gopls/doc/assets/subtypes.png create mode 100644 gopls/doc/assets/supertypes.png create mode 100644 gopls/internal/test/marker/testdata/implementation/issue68641.txt diff --git a/gopls/doc/assets/subtypes.png b/gopls/doc/assets/subtypes.png new file mode 100644 index 0000000000000000000000000000000000000000..9868a56a77dd55d49a5a26bc54df1d8325fc95a9 GIT binary patch literal 121496 zcmeFZWk8f$_dX1W0wS#fB3%YBg!IrYEiEMt(%m3PDvgwsfYKq|pmZbB9WyXA3^l+I z?~R^wo}-@M$N#q%XPmosuf6wL*Sgjns-z%=bMxL!6ciL38ENrnC@9yUz!!vh1Gu8? zaL*L@K{peT7ePTOjl@1PxDK3C8A(5rM?vvmKtXvGfP!)gTza*Ng5u1Ig0lVs1%)pW z1%=oyrA`?P+{iT1lrfc;M|lJsW1?I~B}BOf9H9dLP*CroT;&EFp*%$;`{(!>>VvzU`k@6}%v@V!d&^A|lP2n7Q;BfPru0rh{me?a>?9(emubk9A8}q(NO((#K}^CMpIsi zO3c>5go=xWm4%f?@Fo=%71+Vpl<%3iW+c+7xG21xO{>e8f#0jT|iOoGff@sIKxgFtl}c5}={ED(IiT zpY?RIF#TUiHjaNa3uqwd>JEsFg%$K)$xPfV{*Po=cYY@O(XOAxfv+;-duHNjYvp`Z zD>WMnCqZ`bk1~G0{J%>6Or`n1WP)r@o`Bf?O7`#Tzo*gqKhpgB`tNBJ9V|?M?!2m= z;NNBZ``W*rziK0&l7*Xzm8Q6bH6S=wX*k)~S;3(Hy!CrhF?({MW6&l7c~3;_@%?`6-(} zt^&d-coPizN6-at&d&`Wpr8n&$cT%mx}k2SVkD|cj0Fwi2Zf>1=6-na=*4qZ#@ppD z2I?PWuHKut4l%^dtzUny#8&z!oAdGddyX5oSr_m~P@@ZJcyC@daE?{hV6%l=| zX*sa$I_Hh1>YVy!z8rG`_5`KR7

dy?t zsX*v-UHkr8#039Q6-_ikXeqdY{*Sg$p}A92iKG$t)rI_C51?jkgi%zH%Nfjnv<;P@ z3mX+{ui?d*#D7&K9%um*o^|LSZ3_|B!<3GKT`y;$`H!lU0xd9dH<$grvw$M@M1h8b z5}gVDqbecvR3a3;V%q=FUs1$B!`;6YzWuYCLxh?9Zj>fZ5&qg;59rV^$ep4N*GGGw zK7C5aWjRx0&t6`jfe<1-(y6(CQ3YgRu0qZE8P}dv2C$>DHSKrggY?uR}c= zJrPw^*3+hHB{$!`!Zelwa@usutN3MPpy9&jr-!tI#|0%#FE0_km&A@+DvAQ5?>Q|^ zaqbx-%F{ebYaL-zQPg6F>sI5%spKB388-S=haIY_q2Ziknymvokg#`VKD%Erj!rj9 za+tDSw~H-&ZpAGhNafea`#PMXSnau8+bQ9qnwNQxhqwN6=AQWnmWRI%*o&in?)eEa z+XjNyOCMtu-b@a-7^(mj^sVwZT%Y`G&}uc1!uzmy{Y5`M&q|`vCdl_hR)J+MU+pmxCUmLgl&RwnW{lL2 z`)&MGF5Xy_IHpoG~aruRTG6Sp$U8+kOAA%>xCmg=k{{auRE+t|x|Wm;X= zS#8?BlRO;GkR=saRch3^Qjf*r*UmL9ntjOp zydvaEP%1~&%K^b)UU||ad-H4J$`kTV`z2(MjRix0SzMIh=YqZQDG#Ia!`% zOhGFdhM7!Qgh$Rda{QuyBtq!I4S6`OpS5&xcIUDB#4FTm-`}XFkq+Vl^8>ay=eZNU zepBOESl|R2Yu$W)BzsKVhH(wy1PSL3%8}SuI~Kq0s~_rHb-}X}{Yr)X-jY3j|un2x>Q^UjZ!pm@|XdH za@*HmA-I>Np6Stc(kWBHjIVhEhV-kJ%UK{}!<~kG1-FSJyn9c&!WZ-HsQPjFUU(g; zUj)DBvR)tfFwQsR+_FBvx4|^+I`2JOY%!coI%!hsglOgL{vzWtMXcxcl`yct1WEb@ zrkoj7QaSd#w;!V(5U*sIELoCR{H2Kxik+Ss|BE`OwWW)aRk)(@$Y6OrFq;U;U65ss zuw6%x#V5X>TV)gnT3AUcn^6!vwS`AZ58mO*gEe1PibcdgGJhE0QvH}LGPTR+WSN-w zD0Nj+@Q5ug8BgS5uNSlhd?S54%{3Ab4n-(MIjh#=<;aGRRSAI>0 zB`lt(HM-R8pM2JF?B|@d%t>`|Ajdmnjo=oSc@zO(hVX;VPAV|qt~2(XwRjuH`)5b% z1tk=^An0Ob`A3BSvL)6eHr>uN&ut63x`kFYz1)uwB7NJ;sxNq7(+gYCY-aET5)kvg z%>subD)3QtcVrup;uAtmI#0|~pDo;EY|Pg@r=$7Tt+_I82fcL(Xx@=C_X zTkYL;4bz^1KfBJk=Ll`a7ZjE~9Z5^PX2I3+P*u<8#DxEBw{?~@WD6KOZVQ(_Kp4hD z?dR3`$QnM?Q}VqaDQ~yjX+%^#F0N^jkUZYkQeWoyGA_n=huUFL;M9o80wzgjZHx!r zeYHWgh#_ebR#=y&UA7NTQ3;}BbV7q?vCKKx55k>aYX8`Gsa(Kphi4oSSA+xLP~p8~Q%U*Bl%+r>L|g1jlT0%|%ogegY|#rGePyUr}~ z5uWk7=_HlCrtjv(6cgPvDAKNWF+2_+QjmT?@jykZDZq$YiS3v*m_EgwG79=yi2=mw zZo4Sc|0ySh_iiM3PukV{;@h-<*Rh=|jz=SwE@SO={VFSc&#g)=2`lkC8V-RO*XG=8 z;S!WSc1Np2t?tltpHmC{KKF8>!+hEWg0zX8bf0^1h{ub>k9G|BjE-4XWMk4cl5Og` ziNm$NCvW+NEu>O#zEtgl+r6f|H&;W+XMB7M-a;-ikl*TRkm8Rl;=18NJPW93JpG$&mgmiV^oyxP&fkMrr%r(#ITgCVQ_V1LNN zPHJW8boiO+5`Qu-mJbm;^*y#Hyq$PA0NzF`jT&4DS*scj7qP?qxQ! ztB+dlXOWafb+ObepiKk9K1(esAB*MC;#lBtc`?;@E!35Hwt}$9>x|GK9DlDPL1J~% zOpZB7BN|^XfGBN7&m^M}^F&;ov?+uL)Nx|Q&`PgwGk3PXa&6w1mw>+yKd&8kLA|>KkgVQqC%|^6XHdu)%_jIH8Du6H6-$bUUG)t?F7l1MHW%Eg z7|x7B9M_$B()jr1ZvhIRxM7N&2aS8nK=8P}@9g>D-dy&6FoOTG6V!O{EdQlZ#1NY+ zpqoh{yQpL_2n!qGzK313ktAZz8eIJb?leHF=R68P>IPa#p2bR`E2u10PA6=NBtk-Et!xXU_Pzm8kmbTYp<+r#xq3|Qo9%8B>J3R}j%-b2 z(?*d3F*o;i8|20fR&XNCA9S4i#2drC4u+&sbN5=nV7)kJ(bsGNDqefH^+9Q>nwx-D zT8!f*0}KlsrV5K0K#56RR(CrqB_U`L zjsv_fp3^UNKkw|(>=PkF^4cg%|vXFl)5|Qt{@2> zFBG4)kWa-(+K-!%)lC_Opno32Ha)M;(tMvpe*;!RCmzZ)-5nz;m&8u5JggJ1*w9Wg zL`sL&&5)Av%@AC+d$7nA$`YWN1FKRBv){T%QiUB*&A5`FwtD*E)^J%*)rh<9E(A+u zDjis91X_0?*dD*1Ro6|zf&@4_(&#HbN96&D`anZNHUbu7?lOCHm= z(i+lcjHxUCJRkg=bF#S3V=I#gYVfSEF7cILUq4`9*&q1y$3=xax}}aEC1=#Zqa9|S zA4YE$=E-n3=o%wYGyIT>;+cCSq}QxY9+Q|HZC6a^0y%%_qA!fOud3D9dP=Z3xBG-0 z2SZdyh4%KWCnFY!*6|^XW0OWDJ4x>`@iGan{fM%Z#D3~`)Oe=J=Pboi~dj#VGPo?&rL5A;1(H$C7|?OJy#)yZK{TcjE7z-F^6k77;awR7q_<3}uD5GB&gA zoy>OaC?sJ9puc$t%8!X2LlLdT79IV5K4(eMDFv?%SpOamQ4`x} z$5}EKR#-7obGl)8ud+CrGH&pSJ7rs5o2zufs+QgpN#xS{2JegZdV6ayNpk{;0N@%k zho_PzkC0aIV`PAZ*Rg|Q14(ahFncN)X0EvYizi`oW-Ymkcu1=@9vFuSi8TV^-hIt( z!4IR3IAcnOcrK*QiwrH%erb-RdRc-Mc`RzYUT17|=3c8ABOcLptC$j3`$e%w%XrdP&SssFPtPqC<&P9o?GnC1lniit)$FTPC-B2bh= zoXX4y8%fgABM)~GQqCR@wXT`{_?Lo&tdxix%Mf9(pZ{$r9Se4A9Q-9={-CbyJ|3(LF+W{mU9iY@o>l<+!Y28i1zL2ZnXBuEw> z{#M^wpVN!Ukc{1?r1bZ!rVY7^NIdls*+h;l+Ud$cK8iH`)Q!bUjWg%tOWF}qTU%#e zcgTr}jz*$P%F!+@?M1MJ7k{hurPm~eWy}FvnE&S|wsbMFrb!Khg#DZfjFh73)Hs%f zrIM-wUZM9EoA8lP|Km=fi|8jG1^C!BGl)8;0mXbrW0-|hOd%M>T6)A(Uqnq1;YI%Z z%@2HP{R zlU&t_{nWP)#L=%HO~}M9O2fd0-p;?%U}W5=XhKcPE?~w!$EoIhp{aIE$Q8Em$u)89 z1E!c1U0^Ck0P~^@qk%e&q}n-J7HuFg^$w<2OO*SZo-j>f$kQ)xWidpX!pzpgtEdJr ztzTnY##V$@1x*fkpXh+PMHO-aFNIzx923w=8herQ^?jE=7bO(Ah>Rg6=Me0EQO4d) zSWVzD_!hoq{CYW*u=}a-_i#EYf zwMH(1*9Ig^i>*FjE}?kRUh+4y;!L@;35F5w;iN(l7^G|>nx*M1v_prJQX!?6AH=*? zofTJ)HHK`NtODz{l%3zCdY9i zY?dg)SK`_y!#nrE=q>ROX(y#`LqX&0PLeT4t!g4ii|cn2hcrCpk5+XS-`VpNO@89c zHxqJ)NLImdM%LDoZtV927YOvc>xXnRZtAECYE!m?(AXS0+Mcwu!(^z`~22*FHV zVo5Z;uqaO_v<_RTHr{5&TJAO>v{@|#`l3;ejJWbBpea+?VjWcl?vA~*Mr zlRF+qNYit$r^$M#k&NpkY>sK$Ru&Z3^~|ylyHI}<19=E)?kdXT-7#`Mk;*2hUva@47Ki_lH+n~s~t2}#86_1DWnctOKt0NOU)Mx z9k`nY3R=YtH+GnFnKVhLFwm)(*H#n!c1#I9*`GP_XL4$Vq_cF27Vr4s-k;a{dZP%hS?N`qaBLqs zlF+8G301phy08|t^1@Oe0j!j{PMIIyi@j%W^f+`_Ch$#)D3EvBI`-aOk`473?x&>i zkye*z3$k924J_3mS^R#W%D^#C`o+FUP7LyTJeU#*irIBLtGgsL^;PR=LL$fy15(kf zdV8>To8ZdmUZfxIeHW=oRHSP4WLKd*v)Y4pv+SWOD~#MsJ5m+Xym5;J-BoXjWh;pOO2-5WAUVk~6IE#rLKqtMYdvB1N z@>S?9Hih7~ieL%RI-MiClK`p)*W)t7q^`t*Ndygz-V)KJ^UmC?^CP!G-%LKjElBG) zb_t7a9i6vJNJXroAa6dJqNTaqA#8)XS~TDOTZ6tA3@x;w#iGB3Z@36@ayUf#?hrCJ zoT@P%p_s(pZn6q%!lI@%&}TOSr~BfWPQ%43!hG=-baTqtlgJ&qs99p#DH*?0$GIVv zD{lWV2p$NcIgNO-MJy;BE%67;$O+Pv;_uwHZMvNG-|aC`p!5+sQE<$Zp9dTVqT)(< zT<*%-L?BPLft_!n_>j1|Jaon(UZTs36;~345HBfwMsvNf`Zpb71F6&#+f7Itgjl@N zb#P#}#}U5TITE3W6;O-QEuLsuP;9A~!ZVa8*ykw#7YtrvZomWEt!=080qCOVC!Y@x zEIP3Y;%|$UthsJ=hY%{zE+LTgMoAZ^iMDtv@GuAU-P_|Oz7^^d)U86mrA)pHghKHT(yggZJWT*o(o&K4$9I_?lbVZ0WS*M*U)U4X6Ivketwu z`|T$Tg6XZR2W8(?2>acq$}9j}QUkSi&}gYTs7YSXYm=!>jzRG7GFIjpfaK%Ho-qyc z-K@Reoa5aYW^2aP3OVBqY@qYcWZ1J%BaY1^gu0o)olYqlo4k<>cc2aMUDIs#7QX_q zC=FO8++%OiPnaf;Na$EhWML-(0*p!7h+5KTE06Tucj+GMc^5G<3341<=HIA@UBv5m zw~-%B;EZrN%#5d;ELH4YCn$_YT#mWJH`b}L+}T?>wp$~;Nx{=|xv%@ujnf84SZbk! z<9X1|K-%M$(nbS;5VM#*`ZwYWZ{Ix>Ih!Djaoa2(6x_Zuwm9w{nS&j<3f|^8&*&lxrEGK~#lpt9 zZc%sQJWSnJffKr(w_eF1Kna(7#t_5#!fjDhL}}Ew%xb`_52uATspcrfoMQ_r7L|J= zK)k_Tx1%ezRzTS>T<)Z8oXiFi2NO$yPdB*YXTXD2+lE6BLUUbmJ#P1#Zif_iNrtHq zBLa!v=ShWNN_Y5e9t6yM?D?Rez3WDE)F36$)EMH@=E)xE!XURswRmzuXHmgS)Vt#S zuCQtMk}Z~XLvEXKvhwU=_q~sPm8=gq>>g)NKZs&pY-@w&V_4teWK;0j3DZdneI{CD zL;52Scz|!mtoZ9~?rs{oD-?QswCeBf^G6{t71xgW_RhfVEQofUZF;~eW`o^$2wN@Y zIQvaUpTuBa>Z#eEGSVH8jKX44_P|Fv&7SgxZ8 zg4!?9b(?zGSY+NjD_mz43E-86VX1>-HF%=q=T5{2L3`KL$w_6(t+jPQgT(VL)7CJD z8{cF^g7K9Flbl$MTO+b$^WMSN3}*&82X=HoJ+=Bilrd+_!~?N%#{_xZwilOYFd>@>69yRFV=)JF8g%xErdhTLp0>?QHpvc?)Qv2Pv7Fdx5EC1Q7|g?%A!nKwd0o9t;vJdix5Ha7f4o@ z`z$=Z_^+TNUs0jq^B=@zX>RS)OP*J@(xR5emhd!i65Dy0Vvf(g*W>T7+`0Q}Z(NaE{5CP^WY^=9Q;?ID5}$rL z41S@0d>9~#qAJ<9MMeFS3zLfl&@ebfbjN=x1pj^@cOM{qY@BHeqx_QFkI{YU5AaCj zmBu;$EN&eP&`B(f^j7{}uXy0br(# zqdV)W{32dg5B&d4`@JXspN!hK4-72%mcIz=PXdv!F73B`wSq+L_+apjV6Oj-**~L5 zC4>&89>g=#mH2b;gt449CrY1da~XBq(zx`$j%`>>i|2j|*ZQvs@o$onu+J4eDY(4& zzvxM9SM(%1bFn{)0O(0#06j@4#re+f9R&zv2tcr+cr8Zu8;j%WG>Q_K7blB_1^-o* zTY!X<+@JdQWclmZ@9c_M_Mfd4MmGdn(7Es}=oiw^&-|gmzE?k(Tkk z10{OTpTA^Gb2C)k!3H(F7|xb|TSZiK-{*L->U6!JUM$`FD9L-Tn=$l_1kz>7f^p*t ztC5sz-XGu}DwWgVb6R}?P@x>#t7jZ!3-p_ta7fwdhKJP@ejlJWX+XP&gjy!ZGy$#* zxzjfh^=HqPaG$&!=>phERTH9L@1iwnl;|m+kh@G!7f}fTPP;<20`=nfD|*n=D1hg% zE%LbUib<4c&2{gI`3u1D?Sdo*=~>NaTYtvkn)m|PF5ItEf1=+1NacrZfInB#VK#3S zerVNLjGWH5=F{xuB1|k z6M#M-9j0PeR5|7_{kV7PeHZ@eT=D>1IZKlMQM}7M!1}r~Y`+VzCNy|pDUJiBx~{XM zTT%_*36wTn0M&i!%9&+j1nr7G0yN&cNanor--aTN4p=E=?D7;I=Z$~kBj4km&V;Rv zeNZl+$V^VIoZphxcAIHGnzJ^W?6cqLq!|1<2DlY%uDrbk7OBo-GFlLz|L!{8|2hj$ zbvyxnkl_&$2fR9m9B8^c-|@XrQt#FGIcfJmY+DO>9DD{q$cc$kBbBrrf#rwTcfNk@ z=bC1ENpEmT2qnP%3$6HjmrAAmz_h{Z1I02PlMb|44gA`z2>~#u(iV~uxb$bA_68g- zxpP>bECpgcrJGb+(VhFW^FrJDoSoY(38>OLZ@Y0`F4rCn-xMYF>@9`hNf$;vi}2p! z^3F-I%~=t%iAdVVFpG<w7vBjPjI)u#vQ=V$#$-7c97Xt zT*tmUfyJ2eg3P8SBU%Ty9+u|0Sqn>Z+`tn!h?-XPFXtHGY1PGxyvT|aT#tyg8z7T` zrnxO)HCwpOIwv#&w6UQo{k6&R95w|{fPeN@bb}P{a-*cVZUdmS#5S1+E(6M*DZu-5 zOeLkO12|#lwx*SsJF!O6t-QPgZ^mxE*sS?&jst{Gnyzt8Bb<7G(UlS+<|Y96 zqSY9BHKzfxRv#8unWZn_-c~E-P`hsFJT-d6=bJlftC=>MTz0t5`Pqg4prVp%c1*8+ zB@ytl`1efhI0Q^1H{RK9#%2(!-G+SSm0jtpz}w!oMe3b~jTY#?OvM!(mYi|4QObN- zu)F7fUnHrk3gFV2bJM^TeNSVF$~MKr1oygVhwRn60w^LVeV5}j3?udhcPU)?P>*I^ z=euNgu!CU3s0-D+3AiZer6sNDkc7nxH4WX-9T(X)By;tQAAeLh?@H_*tV`zKBT zE_81^i|+FbopRG<$ayX9vI0PKx@nSMmj!UiR;C3#k2c={`AfJ3eKv6W0^s*nfMWT+ zq%j{)1O_l!|F^vf`{8$WlQ;y7Xy|({2Mo$ErWyy6yJ*JQL`JRkf|_UWr9+~CuCID} z_(`Bv?z6S;k{X*K7#s|rn?^Ko&8Qi8$Kmij?{%K4Jx>>Ke_f=huDsixelR>vYyCZB zPw5b+jsoAe=IQhmm%>RfW5as7D!l6a>Bd;mc|MyyZ}&n&bOZBTOJ2`zE=Z?_wMn`{ zzKn~KeNdrc!7aWUExm_Fp|OFdlMHrx?luQ$UEZmIMBFq?`RuKiVLzCc?YD0N;iwya zsH!%|<2)=Q@&u*iIKbm#DxeeCt0uRd0p11~HvP-58@*cUWh{>d)gi@IUmoE$ix#BU0uMsI|4Y58jqLai})w005Zzu_4V>GH$T#fGMt@S$?*keraXEpU(2S;0yI$gFR(@)q!y z11f{5ciHa5TQ8_d^<4t&U3*fPBO9*3#l6s~wE#X~S}@-l$Ll2!`P_b2yG9y~a*C2- zyzXDJ0mz9k9KS0xIAvPG>l`Sk0^liCe@_FbKP5FwH!X)U1Ea$|zPv&mW$Cc+>~-cg zj2`1!t+Um_^o$HB;pBRhuI;hbQd_k;zGktpcYQ_Syrbm#Fz1x`My)1wZ8CRDzu5k8 zX6c|56;&AljAYKbV>kFNFhPJtrd90Dn7fdU@A;ukpIf$JgD5@eHqXH~v8H1Kk3b=A9e$@;8&Ir1asB*gX0vZ8bM(gcfIM-BHaozeO#ts>2mmvvadbvmH z&Iqbnd~9zrg;L8hpk3&~Vs<*(d4_EEPs5pZ?JX9^{rt zzb4!`h=kH*ySN)Sp`WoSXUq9(t4XJ9;b@1h)f6mGpsOi!bUI#`11ha zEUWPF&P~hU-y~CnsH>I=W<%gE=h+@p+M6ftUhlG%Lc$0+Y6~SwH3O$Rs?{GAYnor1LgLH%30>MrhSS4Yl24i9`bqd_e6*`3=z})Al;*?G-OD-0s$HfLPNNFs9N>E1 zK^4vwgpt!5s?AV3Gy8-P4DWU~#i6wPXk=1tvP3d=CfXUnFBxWNWey$Q$wdSFGG z)pa~~!M=I2@s`(-QT-z=_Aoe%)tDw-vZfAC4OW-#wI}nvm#Cu&62V^w35_|V7f{Cd z&3L-`Mp!{?td(CZY~E>D-lo{RO*eCMBLoW!3BaT6fQP}f!;oBxBo_fVlrw`Q*C?hk zYs)tqr;$^Uw7umS!JRG6W%<76Hz*tkzihooM(}Web?h6%x^ca2J_pG^uQ8f$fWK&G z=ba!jRWN(b5vF8jYk>I#iHSH$(@N7uY|rWr*{xRqaOh;Z9z<5iwaymPTH$bSS(7kY z`N7?YK?LMDwlbUz4y@PXAN=6dtgJ7o}ke<6kM1x)d*muuoe z*i!xO>D*z>TkFb0?s7$nnu&Pk6e5+|gU1SHGcPx+)CDUi0`P(-BeU|QpfqXlFe5T6 zB|*K^C^OqoaF5%U^JDvLKH4?1G>2QIJQm;Sd_BTJ#LUlc4+pVE>-PE0bC2nHT64!y zB)10RDXM;yJPJ&`7UrsGV&G71_p(+Mwwm~x%q zY#b{I=r)|p9p2hWV})f)Zj+hrN2i>fQ-Z`ort&@L-bmfZ&xiG!*3EMFEX#ZKWP4+( zipGk|i-zqx%b>Q)bCVWELZ1l`X)sXw{_-&#GJBwl$=wlc+jux;(;bNKBgaf^8{!1& zA|2msgucU$ZE8=~Aevr$=O(ApaW>m-ZZLqJEhuZU_|6HwUzo$E*!jd`9K-?pUKgcKZTYV4Fuz ztfxk*0O!%_DiG6fyNI~uCM2(MXzj~xuwezOGKEq1iI9xxR?i-bn=DNJAE(s+RxdD` zI}vo|^okoCW3xEg>iE{o3y3ZVl=}3B`91d*zvotd>*kjB=|o#(TD63u>D1mQ?AeVR zEd7azgzZY$z}&20t?#QMCfNrXvs{L4A*+N2H01!bQ(%g+nJ~zQXk}~bQ-a=J!Dy3l z*K1&)IhrGv0r%ZYV-180pqrhJ-jrpJ-KV$@cKhrZ*it3B2;QMW)hF?p2Z+f5%)+@y zrsLbYj$MpQg{i-pZ`b9iK-3%5e)ZdRy>6q|lfg^ZA!@@NJbS=|XUQfF(Oh|}Xc}^8 zH^tEtZ3jxHnEYlbN5Y-d4uJ_tRrsDPCsd89Yu9QUc%9wR&@s4jO7!mS>_npb1In2* zR3t|wH>#s$c)%V2D-|0$Sy5xEqc>SA0KTi>`+}5nITgk~Hv)HJXnSa0jrKX9%+*K- z*jeq3_ud)5{@+(&?H#k6kAtjXHc_&tdIpxS1ZZz@unlJ&)_UX&eInx>?zlwF(n43bV{bPIF$s^wv2QP(=gR*!M%_T5AETK{K|}+PjtvoOURR!oWSd+J>Ye?x5=8@8^&UBTK>Av&1Yd#kztS|s_5qG^-;`nXh0Z!NN0$J7_HaJ)RHWI+; zbs&ncL6{=oJ@u~^F#yFWRbRosMX;j!JAZ)R5AXq)?)o0p$gN-1xGPw^P6Ei%M!*GILKn1{R*ieyN(mxyS3@pf4djP?e1oZ2~_#1AKKpSP~daU=<^Un?it^i=WgQBSH zAFT(%Kxu*A2DglT`j2#90EYmA?k^I+KkMz)fgczE#s+Dvbf5k)wEyo4^UX`0N|=&L zD14c&SDK$%=c&@H{b!T>_{2Ymkk^g-*Qm7}e}E)w#s)3-uT?ANaTYdzxAApU7O7AS z|D`cM#H9-l)g8b{Cv}(c*z^qd6B5s#_y$hbaQ8rm z8=i$>+e_xORu6WhZTja0gOv-V!^71p-`B= zoaR3u+Xs<*UHOE+dw}eI@N&NcI4D07P?+DWz&{Z}X4;_oJ{;mFpM97MwJbunrZO^g z5e|#dW&n8m zmhtf2rwPus~*anUY=R4!d^>ST+nok2z z;wHM57a74Y<^d3@`+P7>!1`cy$ff;`#>c~tOE`*j9gqz({^YXIfkA{IALt&p%_d8| z8{7~G({vAHxbL>$HwHwvR;MdOIQgF)3@cUxdvJf;Bi;inSWyr5L3xBGy}xFhfQfdduDir z4OO)^ywbB#+`-3AU3A(0N ziVD%P1`$Uq*Wd2~(D$yq3W2@;u;(#tM{{CBi$bGsTEA{X*^7ByAgs;|+I|>*1_ZHf zj%w&T8@(Y_Z0Kf4<}s*+caTZ7SRamRa1RI^O&D1(_p~8`SK*SKz517@8qHb;N%E67 z8OL{VcOFLCb^xdt1(pLk*WzN>u!1pwAKM1bhHWX(cQqnXiG%^1J^xkgXz zND2X~wh#g?m(7WHIrR18?-=hahy?YxX#??xy0?WhA7JQ9{<`{fy%Sb8(dyKG^B|qu zHmwLCqh7_G6H}zF^{-^<6H(F&%R^d9gyN8-^&Y0X=fgv2KW7y ztFUK!>$^rRjov^<_;4<=CzoM8e()t#-wQ4w_n;mT&-n6D>-8n;@_w$j^LMOm0hfs~ z43l5qW9`$bX4PZ2kR8Cht}p~odtld|C9)Z1uJor139ax-U{w4HrpHk!srsiN#R}b( zn}8@~lEsa%hb*_J&HGQsJ{q`7?-tKmZ`_VcOayrBViTzb6stuMZ}r9o5?mDcso z>UZ8{yR3cv!-HNZOs&eG8=p1K^8qMU3*%x&+^IX%j<&v01$GrbNr(^pO zfa@pLo!2fN!vQ>ZY{#V+h{&JBK{a>{D|_)3jejwK$f^~GJFM7oPyjIj{0HmV->el3 zg|5D%4r^pbtUp4Y4+3_tbxw-Io3|eD!C+4aDKG?{3b!27fheCN#(dgXZ(utsj9L?l z-B*?TMU!|&klRqes#fIA({W%H;*f3k5_l29{yWBf{FMyeA^N7I+L-|+Y!!W_6?n0J zN!tyIp{}Fti3t;xWwfgJ>j2X5WsKRWT4f%@Ue4pHN&_LTL+DDFN6$HUkA)?G&AB zC@&_ixno`hHu^pdq&F%j_*lVz8*hi;rffmTO)}^qirY=QZ! zW9v6>uXY^4U2&|(<$D{;_2u;ql8aU5h@X}CgmOM0+D>|&P=(X8&Ws47?Lgw{8dLf- zk2Jj>>$;KP-bb)J%g&C|+ZOT;mZmR?(r06wzMuf>O;7MrvEf-SSyR z0fqzbvnQMwN)pa#p$@*k)Kc$Fntir6Y!n?hT>|k#IKCRDwp$8iJXLwbg!K>s=_DA= zfZuOXI=Xtj0Eufvh=h}4^VW2=tJS%?IEGuYfM=Cu{k%d2R1k;)G#yFp{*1n!+t(Nq zo{o!QYdmJ=RYfWIW!L`-d|$VsGFQCz?)@c;O1Gup(*k}4{%Aa zhzqH(QKY1eK5{HYdS-|j04|Vdk{p^eEMA(HHoveyyiQ1`K5P0GUde2MMu~LT@QrLV z;|Lc%&Q;a}f*az&tlN6O+V5$?L!Mpd;Z#qLl@%xW=Bj~rNU%R;Sm_;U+uJHh(2Puu z*Y0Hp9YNE59VbVU6GF{x@qBCgvM=B3eW3jQX=JKIZHe&93G3m?CwdpxlDlK2&hFkJ zaM`qOYh&St2e*i_Pdufo#Xs)up)uX^Q*^a4iKN|YAV`bQJfepB2&H0oH;m5KIq$cp ze7Q+fDuL0XPT_NGz^y)Ec4HpcEH(jW1Lj-f%7NMtF3m-_4ZsVbYiP~;){?h4=X>eB zpEM3>T+bLEV~ms?qe|NXl#k?1I|T%XVq^6c>p%vkW0nsFAMGwksG^ZmhccyaA;!&S zyxl+{c0yKu?8g5eb8j6L)!O!ti=u)^2?$6@C?E~eDIkalh$zxX=MYju3(_r;0)v18 z(%mH~Idq3GbiRtA%z_h@?OdsE+9fEh@w4|T@N(g1?tV5WZ7ss}-7SlG7 zesw>aiF)g$u2Ijrc9>;^-PF;VJ~NeF39Ixtrq99S!mGC#=Ptuu$jh45&awc5A!d zK6ZL!E5rmqAgXBjl3P9HNz3rOgPnTvx^PJh3Qt_2w~e!mkY2#;gQ>9YMmW1#*<9OJ z@fO%_0|`-el<`MW2NRaXq2gb@{)SLUP%%DtaCjwgDIO6_D)Yk>R>XqPd;`-gEy?9$ zDrCWj8P+LKv5WPSFo)p>{tubcOWn&PM+_9UmYAgbB9 zubtC**YJ>9&&`z%=PG?*o1`A4?ljJugC+e;%*KA#Ww|+-3K4@T%`(ds_RtDE(J`MH z9g!hkJ(Y95y%_rPACQ`e>zls3H$?}-TPSjs*FB7M_v@aB3b zLR{Q43QvyU7!qwx{PKJ5mY0Uo=VAB{!a2qy`$!`2q;uEX!MGP6i0^w<@;RCx+nXN~ zpthwd{1%DAd{6pwU1{e!xmwGraFx*CG7zgm_}fK(GS{sAcq7QqfR3K4mB;_~j8_Af zv|iC`c;4X?|C1tZ{Ms3h8h3&Fh-Wi92cTJ$t*(ii(GO-zbvHENAjWyj3VYf*w1q#> zUChjYlhN2Dh3INj%+#upjntMs`5ov93YN&-j_}u~M8Z?e0)y5H8LT#HT3B+C?sl+k zY_^bqO9qy2qI6H@PsaKN^B!v3f+0%zf*+wdej>Q;NJ>=Hp89CHF-zDXb z6`)FeT0OaRh_4yv1{iIwrQtn4(vg~c)C~U) z%s_~r<0ntNBKx@;?00H^Q0($!jvrE7=O8#`93(NRo*-V>$QG04l^ATEO7*T6*?xl! zl%<3wA0*WBIodwwcFKmIpB@>Bbb3(lJj;8xy$p=*FCS7`Q*Godl5l7GZv_Lf?)dlX zH0z7Y2Xl#L?N%US#}r}F*jPJChcN>3t+Fbk3y0{}-Thn}7bZ$ANJ;M6s?~5Qj^T(k zP6R&PPGa&|Udkq7sM8KCLp3eM8`RvNCJh{q*$}%Bn{^+|Q7?|$+0@gfJk(D-6GOYG z$*ldpdpLwkDEsI5>nGmq2aaQlBo}X2Criunq~1Zrnc|cwdo3d2A`6sx<%LZmFI}|d zLHsxrQqyXEvMaWNn8^#ghdl!s5%^nP!`)khTn)6!_#@L6CO}z(T8=E~3>K{Egs&5k z+r_!!WQ=5Wy>zHUtg7`RY{)PMDoL_^v2VB=N~XH*NZ||k1;qXVUR=TqQUz-#@ms1D zu-OakOB13-Xa+`qRD}((sGG%7y%0^MtkX+h12bncvDVjqqZ~inZ=8rEsQ9}8~?(-0oZSC=5f`POW%riZOEVNrB=7z!|D?k&8?nxA0L0} zmbe>sHQ!HreMZn#>qVWnZq%tL3f{XRzqIS)?&=EEIBAE(hc+u;W2>r8ZR;66+)W~a zOE|ZLho?^h<%*jv>2M7oP-G_BRS9x-(CI2M>pSfsr`RcX(`KK4T-`wg zRH{7PB>4-1rxCZ`Ni59&IXz$cC1T~WxYp7A&F_G&i#t( zIePNN-L4Y=rG&&k4Q9}XH?%h^_Vf6#=qv(kw;4-Y!y9Y%)Q$WYvxYRe+4LDztAM%4 zr;wSA-jw&bb-z}Xfp{$5K(FXw2Y0;_Kq$1xCHHqKPM8OiUAGWqQ40VsH?@o-z?cv>jj&ICMG@5er>!nBF%%;=*|Wp=~7dVw#(gP=kNIlSA^Ju+u|=YY$Jf0{nn!@@LxKrBJb=vf;Vn` zRIpfV<6xBk;&-VH?Fkf2>0O3lN0dCq&${9qX2J~R$8VPje=PjDLq00!7}wGGOC=iJ z#@&vGSgDN^z&{9yWvsgU;Y0Pi%qxpgO7VI$;+%epWwR2Gd+3WZV;uuXs94R)a9? zzt2j;6?iBZAEBgr{bVPsDHsoiC zfBDUkRH3I1iI82X(tiMntf!xWd-GK;-NLch$cs4E1fn3Bhyk@${kd;ls!wdK3MW3J zK4~5*;#wXDMf3gKX*p=E`06+k$OfAT?T{J;Dh!nnQemb434-`PUJpdD;7=O_-AP-Y~8 zBA0WY&c)3<>$m#yFP^KJfGI0tKQ3mZFLcY8t=#=wd-49CTiIVfM$8L1q*M<#^O}Et zK!8Lc!QDrKJlmY^+#J3+q@5pCpoq(_%CZay7$& z3SB{c*kV>{nC*|9`tQqQ$G{X!|6=8P5;DmB=Qj#bVHz@b_afyZMvDBiCw|`?{^ppN zEHCRsf#V<8y;%FN$5(Jw#i7ZvlKa=We-?nEA1RS?xxc?!|NhiWBS?Wic4cVePZ$&W z{)w-HZ*vZ=X#RV<@YjU|NU+g^1<*;IX(jvL4hJaNHoF%r|K$deLpSI^!a?}oH>l+i z;7I$^S}|L3_q&%g%C@3yuJe=?53 zWBD$RPxX!3XI-12ITzErQA<0@%l~#ZpuqQv-ou(}IU>hrEzvn-Em*PZpyN7T_;<42~ea?fNK79)>|F;xi@7r z3s(j*CIHlvTm;(~FD(YB??kkEq5k7>*9XkZg}}5vruO0-K=P%&IHc({GN+GvYJp!Q zSFdT;2b03GzzY%y?X;Az{rOEl|BMMCx8T1+fEheVJ6(r-6|kXbMPC)^^Ha`t2~I3TJ|dg9tNL; ztHpIqc*3K3)tjs1Xn+iVC7FlvEc*23HvsEe3Uv4<-8VX+oDe+0`3Tbs0Mna-Eg`gw zM?Hn8A=tK~9O$SU%>oVp{Z)Aez)oJB((jPbBuU7jh|8e00xxvjMycxhZ?6xpD%sDb ztr7GMJB@TgK>vS7itKig(Sz3O)Wef_8!x6gxc`UdldQ`P86U24u&CZ2)`S6xSN8LB z(D4RgD+3A8Wt{6rwJm_^a~X83IZA+)sh1>~-a8l2JN_1eci-A4*N~-E(5cY}o6l)e zOE=UVh=%&AbRQjEfIwS^e25L$VqZ1-cwb>1g6HVNMfw4yS`?=km|DlhtMl7i;&Trn z(;a+4l)>>1^dZ8^ikX{_NwuT6MXJ&f#zxIYq#s|8emg`KR|eyFitGmv((CuqMGR|L zuo+BVog=XTvAR=lJK6pU()BGmA;L1*nyN0}2l$Ijt)62h@Yc(I|EuTHnOrL4gOPi z#Ln)w?jPxjm&<#xRPaSn$%?}Ezp1R4(GKWtY84NlMsl2SKWtfNX~vb@a!%F*GC||(lr|E+Qh;2_ijIl7 zk4va)5Xzfs2Q`$Yq7x*Ny?{U8u~cGN-Mx8r6g+H`q7ek!;3?u*JyAye{!(6L_e!;k zhsZv~sE@Tt45~i)ZLW+KyS7#(#_ksV1nsbc(Euat$Gfi{#r^t$&9WYK0))xsu-E2? zN&SQDq7{RiD^r69t7;k_%Qv~*cpTm4ngf-E2}%M6w8*7M$i%fsn$o&il0qp3xgQj5 zR;|gb?XdXxwow?AAA)D&Xv(VEVkD0}K0*8l#ML14)3y3%32z)n0dOPYuxUdFEmS7U6BI%r(r}1 z((OO*M2ohsgWPCCKssF!|%xP1UD>l9!o)3@2y=(JhZwa+Btp z!fz6`ZPs(kr;7mF-2|xgMea!Y^;%9XU^PovTsxSs4#y4*XTMe6Uu>epzX*rCC#wzB zPwiU(>1Cn>TI@-3;}C0cpf;cK5W?V_w2|Vs60Um#)WxaV^FpRWS3^4%$kYc)-AN(mhx+?p5S!5n@cOaRz13N{AV3rx#ytabO&s*TkgrT5h@|B z8Mf0twH-OPvEba|>PGm166K3^&zW>=Uy4@Ygy2WWfDEPIW&XMipHP*>k^x1#z)Q8C zGbq;vI|bIkhBe_E=fCvvp2yCdxR6Sf(-t+g<644cksO1@${QfRxSqb=^(LK>cBoK8do1HtBhZh&wX_De zFDwIDMdK-k9MFAVB>gO6o83}&^!y0TGnGE@Zr&?!lQ!qQg{`}g1DS@zf!i+Ck3Z_R z&jf$5Y41-!HWj^6CFuej58EtP&Y%lEb6tMDjOdQ$K+*R8T9}n3jpsR*}D3;9UyOH_$%yvkxQp2DqyEhige6>hNCn;oww+_5w)GP^I;h7M(&Q0 z)vr6;HAN}Qjl~+|QoX;vm?wt+Y~i8i*aQ+55&qZknu-NVk(Pk+UN=|C?|~J7)t^`& zR8hhWu5#Xr*TIt?8VIfFtmpviahVP#oZf$KQJNA=*KV47prHd?BGxWQQj~gy# z7t=(WH2ZCdyn$n$q{o38X#pPCu*HDGeY@_*!M6oJY$P*xf+UctW$zMV2VThB?6xB8NQREQnlY&?wMXp4x2qPg#1YhpR&`cv?IR~Iu#BjQTnw`8}Y zUm(+%*f*c(hStY}zC-z(87UT6ukI)HG(Q%4u5)K~zJ2**9aH34ZKcI@L(`q;d+^Tb zG^(JTSD&KPF5E9N4OZ8)a1dDqP1Scaj2?%RzF_d8e>fGsVl`2goAkDWd2xwT_HIAn z`wZa{@rz?5m%iJ0i5aBKA*Xf=A+Vno2G2v;giI7qv?|%~8Mv3#$9c>VrU>hrhUp)B zq3<5Yon&6BDT2?`5?pzl`(uC+j)7e=@|BDWCQI3XqVVI{4Xv%rw6Lde zDmuL7C)+Y(5KMt^KTwa23wv^9QGxMvk|JfVeHCq#>0fXLacMu3IndMFh9i(6Cds~E z4hl+vR9TTE1Aj&iWU5|%Oi@Jr;?Xv3jxiFk(K-<6UJD8hy$?WL-Q zE-E9JwKH8TpS7mO+*^xv(cC*Hmu{ZQ#9fkCwf=kBPaPJ4*;9&<#eizFRLsHk1wR^*;bZ5sS7<$8} zdPz0Ts%sYn=e$Gft#-@HuW#zGpiPvWvBfyj#YTDj-k0->G6=XY}((t9J6{j%s`PdEGNw{3?2LX1< z5yc8VD$!|4jG&4Gy-5b*z4kGL=ee`4$Zp*URg$pNLH4zTm#wu^;9>k0G{3Gks=N)v z^v%-PKBTnmiSdi0PGzua2L>r5ejs1By|2l(UeCj>?*f$60qeL;QoY1+GrZmy;q3f#a3!Li@RA z8n_^8lkAsJ_OP8w_SD}jmvi#dx@H0W6DSAYj4suW;HS+qzN=WgjLe!wxQ~JYo*H90 zG^fyfRJbKzZ76UkBg%mB%yI+MAqgEXjAfjy0kY@YM;xTjq> zyU%L~34i{r?Y9aHfZC-gq4-Y1d7r_P9h8k6sUM>KtWB%r2F2{;W(j`?W1x+X z2zkD*LKA0UkP)Mlp{k-ULZ#B`MwA{xrbJAK*gV}9h;JMKby$t>SaVZ>a~Qhr$^NcG z!5-TE#2vB(5#K@blJST6k2+ntn*)d)Th|nxEhj=a$v0(K@A{<;Y&LB&zWuH^ zuBIf(%LVWz*D7Gyt)7BMfapUU;j5=&9CL&@_IR92LMhwRAjZl=SAh+s&UN$dt3_K( z5fqwSQ`b~yp+Mz+l0Tx?VaB!z{oxNRMxqXzxb`l7ivanzc=;;6iFIF=jX2>YnevLx z)~{s>6MlRi^GJz7JcwsxG1 zAA^NaiwfPE5245R)uqEw+F+11Fy{@CHh@gSnoCM<0j~C+{Z!qK_+m}0_CR|0;B)yb z$1#v)FH4uex%ZNaD`e-8&!mU4{-5ut&;}Z-x*Bb6C(;`&0w_0h^amj)r{|@gAcYbT z+k8u^Gm)xZskrZGI3k{&o2Ak1Ys-iF)MVCjtF(iW6g9fHGLS}RIC3CURQarCqs}eW zi0Ly#j|JLwKt*z(qOA(-_zEOnjM~FPT0X!p5ob*9l;di}{W4ABntpzNKZWz1$~xrO*iA&HqD;wK2JW;R zkB`Lhb%|pN>pgcPxAOv=nE?3?kviG_ySrOp+)Jvy)l55MGG*cFOuc9STVC5Yz+~KK z6sWk3WQL;tmGL}AF$6C${6Jxw{XNUzZ>aDt4Q9{_h)yu;u>w9Z9A8;Rz_70#L4HTzjlS_ny*+D|8H%Z@|?@MyGF6OHhHu47cd;-Z1rIP44lk*5E zyugDto*+EaKZ8xO;wAb*X~AMP(6+AGX?^nKB~P|@rWi5GuG25%V*lLt;5fX~axsy_ z(Ih`zF$(DayhJaYA3ZF88Y=OU9uFckT^Hj`5=n_>yGMV*k&M$DaH${Sz&5U&DWEe0 z#MPBHwvw%c#x)gw{COX9?3REbzaWfC_*bgcaU%{#vt^sL8%X~z1FJ~3`ddOkm>ap? zDT024ftG4)TJBBT>6%B5z(Ct<-&S9mzr~&QWvi-{`v%YW#yYT1GK#&IFIiKf?TSv^ zd28o&ATNwC82+QB0?)q*_T{+CxEumWH`meAwvv-HAT8jNRaxF2U7svAaWwQO>$C!X zi1r)&LCn&zM6$8bVJ`z}aX9}qJ2fe*wwVc@_IxwEv-RGv)0T`fRRDVYjgKRWmhlT^ zAEoh2{H<`a%^$P`)`s}9A=;oDRyGyh=WKbEYL=I20E&*CA?b1Ydx`SL7>$AcTh-OC zan=UY>#8^OlG)22S+jOAdu!o5p?Q&*_d#R^2$Ls32cER8{*n9ybf!vz-BUo-or}CE zcB202K>V6QEN0RoWaXNH5E)i?tHiSC-n3NV3c=;LTQ2^+l3Vvj2^y7fQsT_9YNayM zokBt}-dEe6ZB@^)rVUMJvzXnRVsS@{?3Rsw;+bdWF84Q2mecg{gEA-&*muD)G z4`0TiGy@|2_^6pgG^Kt#qzd9cRDdQEWb<+B3{jUfQyhU7%}RR@h{n zq|f0d)Ff1DVIXz5^$D;^{9GP$Qy+7c@066?Ft-0ox{q3( z+#6%fiuyOL0=|<+)BT!ihm3zbdeD_Vj|3MWU!?x>U$!R#uNXye-CxTKHh<$t|K~D4 zGk^<-H7w5j7q@cvGFuY3Zh33HssByb{11;IAqNa)9PP%+|HX%9KYT3OQHM*=Z;BJC}gtB{Tou{upnIN3k-(A5gIM|q&0Kx?rG;ij-%gF6Z zYHB0o)L0fX>HcH{$!}h_I;Ca(zHO8kLQAdEfv2h*_<8#PcAo3~mDY_QGfgJ6h@<|r z=*{$7QFs?un_62KRoW_KS}nW_xj10&arpucnr4B?haDb_)%yMY^)_c}a9|3+{nsRw z6#+SnM!rsMs+5xGew6#^PMg`-jZI--0m@NI7E?w5Q_YkkNRahDPC&hxD1*VEaa16# zVPebUPun?XDTSAwvU)%5CjGayS34Ev}AraL~M`kS5n z``@O<;A*-jFD=*{gK2iXd^2w7)NCQ!IceZ_URwtWJ?3ADZ+is5e;xy|o1`<1K!ri+ z@T^(*7E8b(rzddnu5Wo>xYCVSC&0!_3PH5Rj9Mg!413P;sIFHMr^UC-Vx;G`x*_2S zxa>Li9Q%!N-fgL3`w*Du=t^PKm(V1bkd&7mYd5-*@JhsNxgXu?Wgsc09e z=c7Rx^|E-h-%FXPl6MMy3Yzu9p}~bu7J1~sw%j6M=GV7l^9jfV)~dezP%g+8ZyI!% zA08ir0;6Pnm4tM-fkfyId(|@C1W+KpXn_~Oa~2_M6buxE?NZ}b43{DD2tZ8TAX%JL z7`UcG@u=?o_VV)bBZ}~?nZ23t#fKfut{=R2k#?4M$UR(p_##H-9pA!V69Z=iuV!ZfelYX-6$$@#_rs~zYu~Kc z@_7xe@Mzh5zgl*>+xeOWGRp@|4gh}@QPUN@p;xaAMkL5?v-ZIPSRoqP9#B zw|L&AdUQ}!sn8@}sIj&7!fhWFTHs5825?ozLQXLWaba$6W}~31|qP zD;Sc9a?gQ$?0pBI!7v6+MI*P6B^cxv^Ef?%C#x1QN-$0uC%|r#bXA-}15ZwZGo}A? z>r8zy=Zkb2L#OjpoF-OJWnn`};h`-@gUPMq|{laSseUEpV`w;t8{*FIhjCJN46W6s*rP^0As%tBwUV%isi7i(#A|I7@u~ z>|B)l&y^NZ-Mt}id1*0foWZZmD%umGWeDtKh8^TQGcupBI|#d~`aPA(qGk#KP5}7flNz^do(iz?WP1ctzwt z*GJ&>PGZ0#_d2N+TlPlvqO`H1no{z^9%F@ksfe6JCbcExhz9a;ww81K#k_;->@QkA z+dcpKl+Fcp%MB`6s5`PKta8l~-lcHR741L9pD((ni7-}!*bM`VJF&8f>DY43JQ!7E zc2gi|PYp}&9+jv|34|VEMW|}h{Tvfo%Q3*BrI`q_I0MEGQ_zE?Xuoa0S9&d_@m?(R z_uF7`!YO&!=K!PQ;9VNyAXTSb>a+XZfFgi5VjYf1w$@Q3~O%~OfofKEh4 zxTTD@DsGiFH_-(>c8xD})ObH)E5H9S8_qQaL7xnYACE^DN5Kfvwmbd>>?-(|1EBD! z99xmfE6SUwCUSIQ%x&NERp1+NohuSrmlK*$-r6f`ZZ!O^1nVp3vJ=qWOV!Z6ey<<7Ru673Y$}uKgZu|)d zKsa0A@ccHsGj|LR11n+SNd&sahl(5Fzce!u$h0TDo}ti0EOveNyebFH-ghj)h(U#W zd~0M*^sYFPw+UMeq&VI|I@Y0oSCwfHc|{V<2V4{LCPb=HKxPX53els2``nxhddKGL zZ=})r1$nTyr);WYdFDOlR-aQ-bN99FC=xu!FbW~j}?qJR!>sxEW zaK&p(=ba}K?+(_-29`N@e74&r=*ZuNaqYqRd;(n>=-kdL z#PW{SzTr5B-u=-etu9hN9X4B7m1hv>^vDC?FzJXopv|C|zJI>y8xNd7hQOTS@BU$} z;>GWsK_=bvx88~2>OO6dzO=jc87~E02jLTFgvZ7nVr#OIY)Y|Zvhxf9P(n8x6g;gJeRv~07X!TC2{079F=jJrK6>0-!s1-mRmw@M+ z|F(nujYRld;0J-b_^K|ffhk7#$}6g>97_;C;N1%fQ5vli0BC*dQ~r6}v;bHzeMmlA z5_}@xgWh;l_~Ym0*f;7{NwPl}t~#5ZwP}7XYd!-xvJc;W$jiO*z4OMBgePEc=M=F- z$m06h*me|oOK_!j!ZjUL?)hQ(=u-xBQ1K~=>_`uKeJ_5GwIt}M*KQ|~_zt|m1@i7U zF#x=emA{P2$wkTjM&IpC4JBcJ@5CN*x)xVSFliL*SxST7q(z}5p&wtT(LaL_ZpLS7 z74z;#BlYJbmDTsS|OuWoE#MXwiTqcjS8K-aodp8 zN9zLA-PrgeY9=~6ph=M3QY_T3k)eIO{v!)l>QS=&5J&2r{hlI_m)pA3I5gEimGI8Q zKP<49u7~GI&3jH?j9+jMDK14Z#C3#0&V@+EbUVsuJI_aokIX{xxin2VG7`4vcPPy$ z14HMF1Nl|oNpDmVrP{9*>3wNeZV{~egH2n3OXTvdZ__cvm436&zf8Hh^1oRCrZCK> zJ(7x7wm+Og1G>Y6Hw8e>>Hew_vq=c6_s#4spx2>tCqpYwj($6llqYrw6g%V3=bZ`p zMg$v28d!8Mk>B??N!ZUr*VMvRkGMRTitm8|adC!?m%DA=_pZtzzO*<&!E#EEFX^=? z%DXiiBrQlK`&_~wfW~g04#k+d`mshxlA4u$#2DTUB{F`>L)Yei+p0%N9_r?_*wMzW zjpHk~-4WK|;qSguzIOK^*T1@!i$rgFRWV)hYI63yC2~h4;H0$JTx!emvzD}l3WJXW zZ~&m>``W+vdX@OJB+?R5<2NMJBPpu9(85i{;hnB=Sl7rLFZr}FO%M%?0j65JP8}u} z!N+=VergG-fe*a(8tJIQ-c{pp#U zwXEI$pbno0v0g@L_|}L8;HQu1o$HCEuIV%D>IdFStt>izvM5E$@=7JZc2#V_e--uh z%pr*3|RvrUqapozfqv zGqswDFVfHee_Uy&zd@>0HoWv%y_HYiCUwaJHr+aRthfTjluPrJt;a5sPe`A(pZthD z1T;Vh{%p}EcJ_TF7ofg9vW&a&+x&=rcB=fip{&A0+pP72v?I2qus{dbtf}BV#)o0z zz#phtyLXI=Dbn*!N?1rvIHYM=^rstfkY>WTyb|}|zVrH}T|XTLv!%JSpMh`8j$f#_ z+<{X`b_!54r;r_rw4cpZ&pQ8>XX7>eQmHreEJ zZ+L15HVH;_}t%Yg%GoH+(uA+hC@U)X02tir)Zfty5Vltd zvRxH%>Urjha@49Cmwuc>xZ&;acNTj3E_s}A4LB<$Pp8ND!x>#T^_pB?YsK4iZ#p5t0eRX0 z0>KoxT+Hl*T(Gcr_47HwJ==oP1*vzgZ$FR(#n?;=9LEWN+Lfy~H(s0jdiQfJD^^oQ)4UJCWz%(bhZQ#l?t570-xTkz zbKXdS>yF=09ujw%>#$tR|lg4UG9PXeooaJ6=k?Z`kWjIfz-1G#+Xw{uu z?j>7r%9YL=U$8!xB+>Xaey7Y#)uj0=<9I1wN}wjkz*}3L<+9F~$|g<*K>iOcm%4$r1}DC2GgI}Z`+s;Nn> zRmd9Ruk|Ocsh85`ov<%9N_d8X5!=?@n&vp6)k}vA#Lew^Vu&eU&)XNf$4%(8h!b%nJ{^s*2+P-~#wX~~$}Y4{)r9A=nex-aw5s30L6uuf zIFIlMq;c$Cqj#R!uk9B*W?wxn|H zvaWx2JSJ0rUeF!CpKy1-#;mh^v#R0O&B>0cd99g~E$5(pEqg&%ha+Y_yHy|avBHpi z_fe9vpKmQ>PQ*QTJWdw(8#lX$Zu~HPpK?_#TRVVLbO4O`QA^v~eovX=43N0NA1YmV z>7r<#C;A9yE&Y#y0+^i~x(gl2B8!{F>surFx;i8JVTaw$x<|Ez+Dx;l>x32Lnl#sH zo@Ub<)$aAn|0N-6x%GpcnB^JH>!-}weDif85|mb>$3nLKo3)-7=R4~-B`Bcx{S=ei zTKDB@Yzm%l>k@rIQ!{5=Ni)w6^j6?6(3LpKyb<@biVKD$IAKCTKS;$qn>+3{dJ5hb zTG28@`RSO7h}epX`10I3`aOp8b15Mc#iLDJDNH?S!4H0f3vO84b&0l#0aTYOc+1HS ztLW<1Si(#c6AbB*6HR==3wUc$M0@gi)x~U8ztZ{6JCzSqHmzmYO*N5qEaTp1!$G;* z1~Rp8IPgV#IA*@cH}k5*(b_yw3kak;1KPC56X!@MV5Tt5(*IHJDV$4QOM9y5sYfcm zij~HL`5WDKbCRiiIIvNS2*)HF8YFFu4XeKzZ+!kZ(dLR7Hoqh3d>26jnWj={D9xdp zI*0a?`<>mXxXd4q%d{hk6@gJZ3PbYTbc5Rz#8)pQT zV~OK!yYS5I`Ab)(XeXZrWrK7Jp7vKX>=fL>@bc*M4ejiI4{ z7|N{ZqXDuAJzpG^I_BbKN+#kEV6 zw{vyzTARpjQVhA=!Mk+E zgLfNa4!hi}9Au9Wf{>8*xF3a@zVei7bn%sHwZ zCtAwuG^E2JECAepJ6|eIviyv$SDX909q6mVojgI%(@vG@ZdRQPi`Z*3U_nt-CZRk+zCI-854hbr?HAaf zibZ@Y=5bD02tGo{!MsGAGW!mKG|!+#M&LxGgT2RwpD}{c;mI(4GKG^G``WX>7E#|7 z(0$|bTjW6XJP^z`n4f%PzZk;fYrLk9Ow%PY46B@9dD4(Y>F8$I=l3#M!njKlxa?0_ zoTOZDwIm;Wq1yh^z~Wtob!Bfyi*r**kVHr8IU*X2l8`nybr-m@a4R#ywQb^ZctCII zC;eqsxgK>zS7(!3l6yd!5{S@4DEJK#Fw<;NDZw#)sCX;RRbs(&Ak@)Vs|Pz|9@vg- zkf#kXM-~DTv+IX@gLM8u?t z<4k1NzYxUmZX0juw(Ls+LeXc^v@}BYro3wpFH?Jk2FL}ynH-Ypq%)$$B4k7m@L=jR zHE!rr>QPk5*EqkZjc@nP^+CY=a2dRf3n***R#hteQHy&s^rJO1{6txMB$eDIq>Erq&hs_xE&y3y5Uh32wGc z2@tQG7JoSs94`L$!>}+PPJOX`gPZ$zEOPWL@+ysaxfI(PL3lvyouMi~sieO1n8iru z4qXEJPZ7^&X{tXu=~y$EN67gk7^4`;xF;^NUvS!jsf<@(+Chgu9Y%hOwf_V$k}Cb| z0~SO3C~{@eAfr@yMz+oh+-edwV*RiH_W+yLl#HvAO$#KQLaKu$Xu`Qf3XR34{cRzv zd(CeJl45COF{gmRM)~@?Efz;|#WcTnH#1x409kaARhP*08D_Q4}Xf*nba1(#T4Br?-qIU`BMjJsK;2c;! z4YO{H>CvDnou`>Oa;Kxtnu(O8k>+(~-Ep+uJF~~EXC)KBT4d(BUMOq6-Kf9GUvt>1 z(f0n`C7)yX9AU%rv>1Vs%)+=Vt^a#H#V7%Z!M$Dn;+KAh!PP8~#DF{|D^-IILn{(A zh-2_gD`|4o_9}+dm*_6N=vIwbC1T6ZJ*#h49{Ob>uS4E0V zr8sbqV1m!j7m5TAYZ&7^6E4}P=>@N-Z5IV;@2>y6`p)TU-a^O}{`-0o6+9%qs@`ho z79vuo>8qO*95{#tH%YOL(-&iHryHCt6Fy}q(Fc2{QBP}M;s4*M56j;r zmw#Y!%Z(;7vK1S9H%Bh}cl`aYOiKc)cKkalwuBg> zH^1IVvU2_l zlMaSNO~251arj@7X;m>hWFo-BJaMG*I~@2w2k?rK_-qMo!@FG9e;<+n12FWP|2>2C zJ`gQT`xH3*o1n)av;k+T_+JBGO(ntqQoz!T{V%8T|HZ*z3ELV~FX;)?GG%>nr|PrW zCW{*p7p5^(4kyG3ye3xkT5^y7y;cn6ByR7rn(^7H2kazVcPfTtJ$RLQ(V@ReOS2C* z%{Ly5KeSb_7yRGn;Xezo3L4qOQh2L z`U~EFE~dNDU@1L_>7R}#Mv;caSqbY1_URm3I=NiXwDn$6Zq`%s2;i%iJt7_EQeiV?YnY5}u ztOGWy6zoSmfD~dl*=eI$Yszo$`KA!!`nXIt_x=8wcKk%51yMS(ko?s(J8`YIouH>{ zXIZaW%d-H3qKkNk)tYqRvsVG3_)wv}C1@w<#Hoa*Y|D@Ywm6Pd zSsMS&uT(ubikN{NXdOyglcww&xg;P-3G+^utl`jV~rTunyB$P3Ms-P})cZh!g zz^i?dnD4KP0glUgbS+IS!DCLTSu^e2rx(gl*^72%++FFib7Bye?5Yo9x1BP75Ff z^De2g)q0W7qrI{Vb=y_7PAz&wsET!-@yhK&2m9IM2K)WUr;*vsA0W+1Us7 zI>G5Y*j0Qit?A+?m9*6HQPHmi)hAWtnVbB@zO>MrhPbYSL3QBS;CkQ!oRLG`c)!kD zEAl#icaoY4VrrE7PA2St#NWTn+dY7gdlF^PN7qY1Gu9n{e<5U+COpYNoYCaoOKRnEegP;78&)pIGK^1Bgar`yoDs& zbKR_L1A%O_+fn9g72WikWHW`JAb-5D4m!r+a%){FpAP~u>ZDpWkBGn3Yhct{8NO1~ z7#kls#ROr`Llw7xBuAkp@6S<)OfeGKR8j~`u)@U0AY+Q@vtw@@c2l73aCvy&zBPor zJx8HTl##!4`jw+>R9~4;tu0*amuUiTw!ooF4H-cT7;$=O z1`M!%p7vnEvRGk$r?=Y4+CYex6lExFNWY9dC%gXc1HG&aov1n%U5$3NqisYU&|>r6 zQE3!YQq!w;STokNEPVX(qGYFDJsLG)`6DvS2hv)pYM6Zu5Lf9?K%8nWDHd6JQ*(!1 z(?Q^SWakt;CAP-E;p8Fy>E7_tq2RMZTpqjbC(xXwWgcUSH{~W`s!a3uKn9q(gU@FK zHTE?&mcVcQ4T*iYu)qyNFba(c;3EUvzTa`?TfXa!GeX-yzg2yGa=U1k4Gg*?N&t$n z)FvRzdin724CpB-wPR=S5#qO0SY99_?w!P{=?B7Z9)q!aIAOF)LQxRID#JxgrkZAU zDJAna$?O<`X&%=+>@7`GD0d|f&EOYcS~rnoTHD=y6?9i#t%Hyp!paPpogPJQCALo2 zOE|HPYQqY3Ud2pjOeY(xvArgLtkR}XYoN$@usqm${{Pr}%do1tcKzExDG?B)8z~7{ zbc2K_C?SG?bV@fYTIm##u0@B^-5@Qw5Tu($cf%szv7YpXwwVJDmL_Ut12n4cX1{(FS^;eH^->pCvf9_Kvx9DU^14cMhRFG_w~ zxT<&Hy9BAu$-X$)0%spp%88qyC51=9pTKR+41Pi~2~jG4+0Z?oA1vr|jQBIdupbv$7b zw7Am&F{Xl8Zl0-_FEoAWpui^D*~BPWc|G?g*~-^C(P%ELctdzX z4;k_@jNr{xnikJ-ce7W7O|#|)_lqqmnHy18$?EYpGaUloZfeAxzqAXSiZCM1oT(rw zWy1wMTg8eQ)4);IF!{YE2gvqjBj*98YdfQ=eP{#IVgWf1m4y2J9 zfFqS}=XHPwEmG?Fkt{j(QrH=Ev(yNN^r_fpp{x`@Vq}-Kw^vh)c!L*WHwQle&nJ{$ z`)8N1&*L2Q!y~ZL&&?|t@Q>F;UWV9l{Vpvz=wT(7tSe$U`Zlk12Mv{F_}w5Gp@kAC zMiksw<}$GDa&)?ig;c85kBAiZEj5AZO4pi^906h}JL&+`3?EPCa*Z1Q!}q0x3J4}t zQ}5J@=H|u!Jtjq)U@w=sOUAAL1*gf?UV-L)Et+C{Y?7&m`c=kvCe}>=fivA5AF0x) z4z;<0-MkNV;?OcDe( z!5GvST+#JN*7FJmU1=l5SS0ulM7I(V8sQ&yMQ;5L;r(`k%)Cu7cW~rk9|P535|hjD z*0tNEX@mfoo#_d=Z-%(Ij&GZWyiSzF=ao#zmLH^ zpFqX``9^+coWywm-8K_Ln$qC)5Zh~3yi}DwanqP|)s<8bcURRB|S}Lp? zFS<+qKFomuEx8`gl=m^|@}sZ41c$xEazVX=jh+Cs=PF}hZ+%pv)v4=lzrQ@yZLGHZ zq_@91oMD3CjsPK|i8?*#Q&wBBv@JLF+?K04myQT1)2TW=>zUiFLMT+p|xY#>D6W=Y?sERFn2;=1 zHV|2iD**V5q=w_)0hwtJP(dvpvry!dGkX0PbCz}3SguC1KdWT~@6-gx-N^~L(4&%N zB7>tE4MUC26iPK|;^Oq66U4E0K&%T}s{bSFZgq3aqZbEhMrNyo_7KJaiBgt)`;bN* zsG`5nUE9(kcKvYnXn`G(87t8TYWQ{e!=&Gy?OW3D3_l_=5upzHrO2XFV%$^T+s&#PvTzR7O?v|YfKxNn;2ctA-1u?wq6(PkipeGn?e^8`##O+8s+ z{u)}+4Thod1c=~z_z9?S71XL<+B#UjTHiv?85b>9t!?0sAqh;4_-#CsD{k0G8$7RR z?Ub85$+onFd5<3d_>h5`I%OAHdQtu~cYM<|7aeJ_nU+`BXCOo+nf}tqRM}QNxNX1fk1&pF$NT&wwKMM!l*OdP zbNuL5by+zIWaF1w$Ox@hJZS zNOH--AL9x;tU)vGnt@bft{iQ8+thD=pPP_iRl8+uOF|TA3IOTkX=LWI<)fnQc6WR8 z%draeV!5ePTuQ6OiV5dl>|ypPTWeKf+NV|MbCQGzH?1(4q;fzhpvI;BnH^u7Dq0%~ znW{N=BeWH#;=tz(dw$}_>L@QCBrtuZ{qu3zYg4;jw5GO<&|bH-s-2w;#|I)n6L;@f zhpYeFwE*2zKEu9R-Mjpb)2)Zuwi}g$Z{J+oYSp60^}bh-I-&1LVEM%Ngz*;T?V7C% zM~mqGi*isTPXJ5@nLSI&-!7KH*JwtxxIY6yV9T#alz9+M_QQ$!oGU9BQ)zDxhEGml zesHRAaM>_d&Fm>}+`YM66(7NRlHpVu%+eb~1q-7YK&CrY^7USr=NGDNe?zr@E4O1y z@E_t}WES$TH2M?SPLE2tICbic@r??Z1uKj?p^i;1sOao{w^G)0CTZp$!vnZ8Q&JW} zi{l+aFmxvzwm9d)$&=p+&t*s7@Xr{bLQqhd-IAwQz?6_tMdzsX-^yv}G-=?3c@duI)GerFiVB_7ChQdO-YIKO8qB(#FK(rFrw1Yng;GL z>f+T8Eg7j>Z2Jv0X55~Mxssz+K1JEtgN?~~d&mQ2bQfeU0WINDdi^=5Y==2i40Wwk z7DohVtMA&}Dp)}A-0k8&IYM{lA4_HwVY62#g?mi?-C9#R2VFr2E5Z=F92XQM)IeYP z{LOd?S%87Zz%*zINO5mvmO5le;xFe~y@#G>pKGQ3e8<5kkXoA#qR9FL&fcptT5l6; zJ4T|UyZhDyji87@SgBouT~pPf98^kdkNou++XamMW5+AZnpz!toJ5mX9@uqF&vdst z+SHu3`P0}U>5`e?z188+t} z-NJXq150*?a}}WinyC1;{w_mq*kzjU!yl1>7J&ud9C9v1?F`b>i53n~K&-JE1uJ0y zfK?I(UWP3|<*WvI7kMw71GLWi?o6wcrrhPl8hZHmMq$G6)mqMc^eOLlhZJO>Led+Y z3-dASa|+)K>#!3d#(CkMdUZ3`2R!vR{d|0j@l>Ot{C#fok0wqh@RCCX(@uNCJ&*l9 z)`Fo0DI}Csh2g#c*8P_1&JD+KH zMDS}izsYm#_Y+mNhlC_?w=};DYwn7n{v96AAm=K;cu@`)>wR)Yu4cb0vTt|pWcRFV z(jBCnzi6d`yr#T3*(O(1?rPji&SKc<&FsYmMvWS#j5L7dM<_ePD05s7&-Xo2fIRU& zD~$TA1YU@!so{R+7B88u`>Wgl9MkQ(;9!YK#pmh>NXMf=_>S#^V7Zp4Zesj@$ zBfnEL#wUd-ssEZ*AMHfa-(cA|1R|tDf=J;TAh)A@=!((X#xOIpV<&5RZGDZ`Vy&F( z1==gNOEE3AVUKn7{U2vEm#}rtBmD8dxUV7|iR}Y~_C!|XFJU!%t0HKE-e_Z>=3G5U zA==IT;)r*q?!2OeyQ+K|7G~HCwh<4NzjS$cpSkG2HX_Sk-?9X#;(ZZBm`z7atybY{ zo248!dew>qix|O3?XA?QkY}D}#N@Eh9|}UAQ$FYg68#tyJ56d(?F4 z)PO1+@bzgs%n-~&Lvo-!Or4FIs5ZottX7t?OA>jsQQ$hk(^zc=452-x2Aw<(yPRn` z0baF9L9>h2L(j%4dl6mFSam20!D`+*)cZV(PH7+Ol|uMR(G0AbqgRR35QoFL$Xe?+ zDLLhRw!_UKYiP+{+%uG#?gUh*vO4(4gOb$Kl=TNur(3H^x;a7*ru+?q+;>HO^vg{9 zdvi(F>-rk6jZ*EHrdZr;+jy6w)S0~0B74pPz#{Y`+)gnz^Z(4ZQ3DUKW*CVD@|0hv zsr$anl#LkhlNeoINOS|E!6~3lCt(kLte7zUy3>D&r)Ilc&2;lmdX3NfZ591ZHkD-x zyX9Y`&fDJkY@SS3BONC?_1f<+3_J$K87CA~$d~u?l$8srHw((Y;D0TAU#z42lYNCv zc3PkP0ecLu!a%vNgx>3V|B=U~f!I9Oeaq9MuGW=#GiaCy+|6g!OZrbvGO(Dr2n@?- zSZ`OmvK=EI5p4r246jhwrT~8Xt1yeMLRk8J$7GsM;6ACB+3DuaM70Xl{VcxO3)`FW zVY&w=Z+?%g*=jf(C?yw+J(oBfD6Ih{+)iK0{WQ~J=V)qj(hhg^gpN@%HxeZ}$X1%sk z1Ey9S)$UnM>cg*U&WTyiG6{|%{wjYyVJv}`{ykSJMs}~$g5+0Mzor^d@Lh>ssjw+Hl&`%IdnfWZE}TkNlnen8t%qF_~ta{ka)w z4lx|n65bl^KIt(nw=&>>O$pEOY{&1f)H`U`mSS=~@FT@0Y1aOLWq8+bvodAuPgfjp zC7pe1Pf_}hLCzlSxeAD=Af%S#YVUqe0{`13?FCLzIsp-qZ^Lv24C=O_%hK-4@7$iZa>>2&Y7x6aM(^;h}xcrFqLuP(F^ALJ$_JSuluZ= zNX_Z*CIJ{QYGQv@qx@q9&h{s$6h!cmRwCL?Q4DimHtkEa2%O{UxOTGT|6mu1^}(rB zZgZ%`vXuv}oY)I;2`53}Vs&+KT0M8Q9aShLf8{h#Ul=;_modihM?5 zOt62b{k`*oUmw$gZ4QZY!GSSg0q|wII1jyIngEA$wZD$^Kc2T@*SRyibCo2<;gLbWuWayf6Q z*KhpDPi%hsY|1bzpNJ{E=~Q_$=M z-n=QcA9LV%VuPxeUX7~mW=%Xtr!?|BKMI~)1uf!o6z1*8l-$1!{U0gOG`C3(PUpPm z6vVF_lSEwYpY)*cv#6fET+zrTeP@>=3xBQM*gCwe)!GT-@&C~?SOn`#O zqJjj}Nv`-+cj)i3cFcG0E>8%2yL{ie;;RFK2?7h%GTnD0Vh!?a z7cc{;S`EU90%JBnWZgeT*{7h5f7Ff?68#?l48E56o2>JHHrEQSJB6c?3c}vttP#m9 zjtJwn1XIyUhXFw*d)*sg@|9nN=xuPVke+_;56V%vRc(aDj+j$SBoE$?=o*=M?>So( zQ}wF(A)o$+rTf{8?zgyZ%L`oZGcf-hd&Xe{Tv5)W8m1Gd_{3eqWw4aLRN)G5aZUlA zE5Z>D)!pkD4LPz>t>tRDZp5 zQTv0Y-sgK6Z0&Vt0EcGLTZAZ#zBQ{4^&Mo9kR_yYllx~q7bg9*S;<%H`HaiNwhS1x zAzI^S@W}tSVW3fdF$uM^1hKGH{fTooRf^@W6>Tz{7BCDa1t+adU^&o|3hN@ZXC2dW z2*)AkW1HnXm zV(&q@#A9XnYYMDq>N6EGXyM7re)29aadIbzTf{|XEsiX}$!WM?wTZ^&|*Sn-SVD7>ZlCdMx z1WvGOVawaVab5+cuM>D>S%a-R#z^}}8KP2@+lPpwo%;^VkAbRs3LF4(=g$TJ@sz$y zhqK5DfJj1nCV-B(0HB6C1Nnmuqy0BUBhQE5sx4>56nfGPj-yFNz6vxREvH}ui3|Je zM(igDpQMir6(<%V_5{zzjQ=gDNb?5*nwv^md{vGfd>!>3%oiObhbwa2t9_L%wWafI z4+|dk*5g=O44XPvVN=1(UqN){ga%Rx5H}d@XK&zr72&bP6&((X9!d()aM?E!7UBI&{D92p&;LdG<33kW;=1upHU7rXc+{xYsex-gMx$5ZmFESbz@8 zA3>Itg`dg5q@+JSThR@dDlJ3cdFN*xjhD-o$g#9+Rfs{pS8~N{r3-xNcha zVA?jxK32nobaSHIl;;AY86P>xFB0K28L05q+hu(T##dN_gdGX@+J+(}})NW$fy(tztODy@uxeDT)_+6+CoYv!TH=7twV!(!Ez7XLl26fUPq znOv&spFB{EG@C7Y%;z|06T2@@onXcvHUtczHdc4a(1F<5yzlRGltLsmbo{|RSG7}u zkUl`smKmduy?e*6{ZO)tp_h}EGw5P|y_4Lg0?3Sq_%j(&Fk#<_jD`AMHdu`5xEE5| z|DrABJaABN0xF*S5gwQi(LPBRFc27v@BO?xQWF_yMshWC;;JX)yayCOR}(qEBEdxl_1r1$yk{?r7X@}5 zQ;!r*+?I7H$1oZra%~A^;bLRgqi+su3&f2Oj%>Bpu=yptVEj9{1iWQ`J{o!zuT^y1 z4TaJ>y}4xEu0ed+Hv|8{vhv0~?*g>MD(nt_SM(Ald-?S4u@S;`^-`|Vht_r8l?3*XI zmvywe?=mYLM1kkn*BWLJ<;_cH{lCYt_SgT6V~NS5>^y|41(d41m&#`crE9Ngca0Yk zzo>WqXqs`JBDD%;=pmr~?GZ#I=aGD<-yL}VPI+g~S=tQr;QQxvn)wxYoPWRe|lDZ)d$o-RA>VX$pI@;u84w(Dtq`)YzC=pZIS zKoN{la}V#?MaPyIM5{U1M3{1?k}y$^zJoz_FEQabW8figzr&w*Eo-38@jF_gJ49t8 zFGc?JntT9>eZ3%7;QtDu;6;2~P%o2k%vr0B>CN4W5Q%CCHR~443bs3NNH&G%Hp#Kl zTj{u)7q=w4Ewe`tY%SI8M{BFH0Ayn*1PAc6xO`giapXU z;EJ4iO@N?4)M415LA6!1H#Ek|V*d3xl}>X6o$TBS93o!g<(d|JPtiG9{hsykGK)Sx zLB6w`Z$G|Kpxc`()u?U0N;jUr6Yk&WFu_Hz;oYi|OS zj4nzE_4Kv|uZO*Z85EkQZ6Xcu2d^ZS=t4-y?(p{DLOJAD^T3*+I26n5SXD3mEiQiINBNn4NC9_%9*uFjFM01Ym@O-OB$69M)b z=2;W}8;eWivdd#@aG=pWn(DBLXA+@lRcE@qY2zk*XtOwRWEyIG<{cH6`42T%q8G*d z60r(XT?JzCFfA^ml7VIsa8L6o?&M5d0UlY^;YgHa3snrCSC3YkJynjWcwg>Y^LPjSYn_H|V?OzL2UESd2m0H0{X#ZeKQ1tWRuyUn>Y&3 z#`+C~sGH-`x6RcBvM{L7@gEgK>?dk1N@1=#u^2L1Zk}Ph7kC986&C{9ow3beQBZ`u zB?@aO@Q`mO%NQY`OTIX0=Wds+cQ@B#(FIZ)1j~(xZVhS*zXb|)n;KS4_<@&Xz6mo| zEuPgO_$|1|SJFH8#++suHg)V+5^?hk`9|VsxO8!!rVPpiT@m@obiB%+qHtTWCX~M5 zigda9i~G}P=n5vqM(~B_#`jzgZq3CB{3i0C&Ao{j@JYoIUvrYab0aA+L>XH`QegYey@Ol>vM$V>qPq6tYK;a2Y7A)n z-a+d|iyD|RJ`tG;D~9Chk3;O>1YSykl!O#;pGaM%IWCVXybr(&?Qo`tD~zQjeYS7= zN(plF2J5sOwtVGB$4%(#^(MfVCiazMG|eM?Iu0(^*8pXwNFgPM5`pfHB;T$CR$n&adAnop%bKYfld|uHWfD zvs84PhW4CaQvrRE`EG(PdW2ueUA8TCthwwx{u%hPhza|`jLBs~=%B50>3^{R?7o=p zE7Z(qi!?9}!RTSiLV8zJl8P7u*aW3-@Q^UPqXnD5U0iPhq&Cjf)Zx$msT7yHw~PKu z73Yre4X`jSc>h!p0_@-&&Ko``ZgDan(tF(m_)1~V<%-*{z;y9tW(q$0R z%Z&M)2rJrXw$AbpLSDtHR^`D$_)#CGM92t-`zS*TD~|UjT7M)zTNv12Arx+c$CV7k zl{KW=UP2Ss(jKC19Y{0zR0~Vt*m@4cNtW)8wt2W7&e*KI-Tr*Vd{+?%M*c_=h(vFv zU{Zx~tRGONXf26r!PzSv75tlR2p4AWiFDDbP^&1QV{^ct^kAkc-*`$Zz2*B3*})|~ zu0U(vYsB@&ZG9-V;n|AGBt3g;mNrTawrjB_{Cw4PJR!78y2%mDwWUj?f~X(W{SHK~ z|C$6%BZM~EMccM0yZ>%{m+H%2;)f6nU;3Qok^jO8gGoQk^cL;FnWS4asZ@Zxmb{#) zIA_OX^|p!dadE{m(T|N58%xMT@%EArQCFE2=~_{0K^}{24G*bn#5$wbtqvI2)HSjS zdRVGB(qHPIeDe4{SgMrgH+##>nAUds4N0hBV#m{Mk&}hi4WBQ33z+ zklNI3Hj?ZZ@|{D|6wh>F`}n})!8&SZ$tQ=8H_&=Eyykc5qQ1klY8W>qQ}7)YZ1JVF zGzky(rUygmQpcIR$IXaf@H*^2OV@*w{oK-*5tEOQc~POOlLvuar1uwCET!hUu4-5B zzy+wNYc}h9#b(iiKHzEwS@3eqVTgM_Y*olE*o~M&@L*SurJl|TE44Z!64JOkx?Ub` zK^pwUy|DARVK4Q&g5xFzU0M+kB`WyK^z#c|Y<5re+pcSuFAoj&58J-nx)VSYMRK6l zck`+vI$PwQt%W@bY%MI?WFdBJh=sG7=q=;=f7Bi}ePz@Ru-syANUm~c7}J8E4;^Vc zb3*mFmCLK6%MJ|B8=4ggm&W7653Ob65q~6G$AsDDBTw`x?Xm8C3$5LQP6~}N)({|A zN}GN&27Q%ul4A|@?3lHIdX=sMsFxQdv8om;GTDtvH=wneL_RZhwYOj=mxE(Dtk-E5 zW8=A2kR)7=`D9_?`RywzvO2auwQ6}=N=uJ>;zU+_qOZ(~uF~K8)n^r#c9Pjapf6~a z8n-<)e}Ofw{$lj>{Sa!g9!;&IJ;G3{%-H-KOyX4ak|1C>OJI4F!FYeb01V%X{E4TPD$D9{M zRS^=k2_0^#lTF@h9|>~%t3K$&TT{S=_HS?8w!+g33uGI0`*i#(5Rc{#N$Og#)s1>< zh6?j1+&ip}I5;!djScp?2Bar;`C-+4t92iKrF^qXwLb2oO!%aUeETtl-6-ZEysMS4 zu7zub==RTeT{JODhOmhT+FEUgclxqH_Yo*;#VO;bE3hd}oS;%8U~#8KZ69Z~yqtz2hUEq6af!+FK}Qmd#m!%0<= zUsupONxpP{=d>Cce>5ObaERfYS_d!Oa4hp1ZZ-Dw*6%VZ+p)ahL>rA7GogNtg+#a{ z+wF&P)P3Xy8@}%svGX_^u(+w6Iq;%l@jDCaj`R3>>(SpGga#mJJ#}lJC3tn+ z6C|qj@DX`X65U|h?&F;m&`O^WvAx$;^VOe6q#E>{_DwXU4IkFA+9X5TV%SJV)F|10 zisd8>M0g;O_*}5FwPHo^X1CU51-~vP-(KB))Y?{fj_M3OG5NO3yz2iMX-;tAl~Tvn zHo)6%*>nr)#KNtsBc$9wAL5R4H_m^RGudZAT8IO0`dYzX5@!IzzB7n>bh^Xco-qWx z%?dO{2^1GQ$GCIuIh(z4ALVTny#lMjaeaJ*^0y+ouZtVcQ3?j>A2mQ1#5p|-8?7k7Gx?AZo|EVwH;J&{M%vY!~hCV-C%!%wa@AJvDVU(B!&};n{-#wC1lKo1ZTNXb6K9WhsktF#+oKxx-HVw{LY@ypCkoyov z%;T%&rU!)+7%jHRS##vWU;K&8=XQGs?~Qf%P-DjhaSZZGKQy&{ap^p)esuw}*dKc};58uh#@4e?EJ>wp$JU!bfb0p}1(1 zm94ZzjO7^bYz?`9UN;QJJ8POfnqz{rXWbR`wb!-~Ge!wUfecr`e!zWa)D=tA4jcjpe?M|d zTylWq>eTcREMG;dD10Zv7})F>LKxyPbxe+Q3ND1cE=tup|LH!cIerbc6DRxE;^Fl1lQVQO3kx4$H!Q|%s?RCH>U_E?`y z=(?>WD-HEK(7QLGc=@z}rga-cJL|wL*dx z93^4J3~cLZ7&3RlkOIcmIxCaeDh6#h{A|#C3{019X%TA2!RNdOhTYh!TyXwW_XBAy z|3*tOubD*Zm8oa^O(KqnhY`l5uzU0yx9^J0Cb$S9BdK0kbWR4*$TKz&ir5l$N#2g@ z`|wdX6sUjk#l*Wecfi4}beTHRC(UTq&PJtu2l$&*w|UUMMM!lfZnf5G<~-n&%x7rc zPLCN2?RpEhL8CyX+@eh_401x;&KAOBmQji%I?d^CdVZ*r$Mo&bYj3c03lcpz>->fN zZfy$hHgNAeNEcNQ*>7KKqnLewqnDZ*8Y&!HYCr)09D%!fmk1IO6NfPzodW05utat& zSuoQVJ%5dsI%E!}_@N>kTJiSc>XO=ds59RSBkKm~5@-9=9oTD)C2wH+(kOgyzJ{u+ z=Qzqag0zl15*&?E(r0_(QdTapINI{Z}0M*!A1wfAj7V|7BB(_WOV z!IHbVx|%o_qBcI&8@hlz8Nj4V8cvW&T1Y@piKT!2(cgPtnQBGx&?jvh%rxEMW^L0`_Za`r*%;xZ65 zK8&j*5w}Daz+7Z;<7)-oj1%bgvzrK=DFY3E4H+}g< z!%Ry(kbY~SNgzO}0buy)cl|t0LvAe?Mn}ATq()y5x}dIiV(8gwaXV7OlI|lN^-$0g zEq?smOMjLJRfIcmb5ZOU*2mcj)7X+vk`LnrYw^*=L}&^P{T%SatEcU;^N$s98dGfp ze0U;X2}7Dj35H?*r*gR~K!--BjE>zN(6xw>iy;-wtCm3@yh`;9Pv|5Mol^FafdD=R z6xET_R_c;S$_1@&)uf9Q&j)N`s)gHhLs9T!FhBUSvN~T#fOX(8Tw-kJ z?e#|b36YTi#N$_2^@Uw*n2n;_BU&y@j}A} zqV8xx*AmL{LumcTs{VyTRQdHnyddylG|T9l>c$4YTtgEPTKX!L!Q5COqnV+7_DP>Y z&&oOw%uGK+_#VeiRzy5OLjbp%L7a}8nNFNWQCu;H{#C18m>FCF^i0MIHibsJLC2Wy zQG}|(uKOY_naZ2O9Giom;yyeS>PqYJS3AV7#;1v_9fG*s?|%%6U%0G+5wRA7lW{UB1lix5U97FJ~Sg1 z;wghNlDmoT&4HXBx-E4yo0^4y*GZqo@q-v)alp#TI$VbRIN;upaON51&%(NR=L0UY z_b%QqjE2b<678KSs zVmphMb1gQqDpv!CA}TIIIQU3M=h5TYbuh=JMB5IAei(;SF+#=Yzw#+y|QkxhaQ2=c-n^8LMTP{7yN( zO5=u|$}!Roa5!TkvT?a*%J$5MJfFCzQ{AZ~C}1ZR#B?e_WUAvu*u#~jcDuUn4*y)> z(bC|l`4GjD59x|9RUR|w`KjXPryiH~G$8)q{|L>N=usLxF=lqpoByrB8@$g%#%=h~ zF0h>p_G}Fw(Lj1Sg~zH`OJ`{oMrJRb{;SXJmjv@k(p1Lupy}gk>sh9^DjzP$HC_HY zw^Ydu?MKs7xni9k%Jc;B5Q`#8=S?G&mWU5R4Bt+%Np$Xm($B#{;P^iuz4Aco9LTLz zA)riz%)dU4s|>ZT<&ls#d^dN2nOkfqpg7u{TVj}OY52hPS6!2T4|c9i8|(AG(!j|i z(?;$)Szz!0Pr~xcTlqQwBT3Kilra_OzU9~X(Kog;FMCGOu%h-ad-CTTnzS_QH_tW3 z@IF6cl$fW_z-3)#8XO|2thnf&vlpWK@3e(QEjLOhSCa3F4b^b)$=cD^*@R#usVPQ3$~r3`FHHWKhG^%5a(KSf&rrNOKSP6z+@f9wOdbEtQkniw6*H>C8c;Nhe3on?(SvsFlKxUj9>@yka#wI7`;42D%K1KAwQlH$Wq139R z4`Hj4y0mM-?L*wLezm*VH+^9%wO6i!0Njhlg0r# z%M69n(CUg;efJKPC$|k)7+**(!N0i*2$oFT-aKaf?>*pOZ>`YZNRV*09HtbGlDS%MSz*Hq1-0KM zMOPwDMU*0ziEolL+*HykZRTcCvGVvd^f~>V7@7j`;%^S7`}QaD_6`Uj!VbRIn3LfPEJv#VJ>{23Jppho;?6Fc zb=#akMLfg=^ACbYnrwy)*B%cbwg!=-M)3!8!GNO=KVxe{fV+x5qr^pK7G$U50aFsqAB>G+ksVomL5+Fd--MAF4s1dM`>l~$*0eR)59BZMG zJV&ij*sV~vR58yjAifAaHOsER+`BDz5CI=5Dl{j4B%w~z`6?V@WHezM7yj%NXbrnI zm&+F6e8>K(h78mLJT5!aFXtDOmkg3E4sL?bTn&F0KjpWy5F)|O1|msOu}dI=@7gS& z0N#+1$)d@Y`__N5zu+`PE1)*?&oAgv<%Bd6J}o0iGzA2WUADTW4{TVT{D0?}0OK^W zhHd)c2VW9YrX4UY>+E}$+mC6xOzbVR6zQWVfAY*&-OVwcV}`0gJ*{()V3u-cq>m*Aw z8=!0P^H)C+Dxe5RyTqtCH-n$iNJb<0!XJO5=5AME$r{*Ze}wq6T=>j%>}h9nlEByr zwNUM>fd{Z7ZdGhgPBFMUmeD5h!*>gd$JO8 zd2}zM{5Tq3MrKIT(8**4y$l@8dJ+AFW+;p0?Y-MFAul0sAHAiqJeb{OgZGz5CDc0X zrqsLd?(8BxoK1K761Z0{2NE1$&M-qs#^WExlxeBc-I+`n{DRj9kF33$W>Fdd%hnqB zCNS7xgHUAKPiFkBGosIa2j;Wrsx_g_vo0nvKTRzt+koNa5#P3H@dB=-jsBwELvTKI znK=KFw%%-0-s9q$7IFPjCFF9;R6NwcTnp@W3&q}777J^5&22&Za?#Ycr+dEM@VXqhr&scZ+Fyx>8AyQ+Pc`dF#)w0n3!Q@`qeyknrYM7OBVFWY9J+kYt=RDk) zLf@nN*}xDyY|>U@jrhJl_YMcLnONfWC>pabhL(w6C5C+IZ{s*^zNy3>kft?_!Bewy7y`Q zoXm3kb~^kSG-nC4Loq=9lDxcT(=})m3De-kcbI@3H*_O4tNo{W1yK3;gVoDyPk+2D zH6g86gW63z!1reP@~Ru;;81p`%%kMcq}PCg;V1P_@QHm2(=A-2lcS?_2x||_&p(%b`xm(C1ORAELuPy$^x5gwyDGUm~cZsE09UYFXEhTApls#IWFz`b}vT zVyGvu2|KTzZM_;q)6so`ChARD%}>1)WYvp`t}44_2|u71eQ=HO#P_R!*Cn(*<)Y^2 z9c*bbch9mva8Qv3fMxl@BG%d)ARY$m*Yfrfze_D4l{dRTy9><4!5`;B8Y7?VMcq%kqvesKtMSt<>c<&=4FHIFWi0qTM}#YZW= zzWOC7Hp|X|9k%eKOZ1%r^l3d`96Iu|9=FpWfPwuFrO6=*@meKcko|h!^keEm_I`kK3dHcmQfrvJDOq}-~TW2 z`7etBb)_5$d?&B%G`)^+&T&c7)g=YZHpTa)A^Gef#UjY;#$3$@!yA|iM~a=xsF$y5 zY~%xV)9s6guS?RX*@1-jcm2;>Y~OyxxgIQae?G&r3&BpTXx1R=@9E&KK@KsM&&t-B z+bxqSHl%W;`$>G4)l+GT%814P9e8%Xx^P8AY>!(v1XpZNzrqnZ!sB>q{NzLX{ z^Dgpy!)(Nl{31;SgIWOJ$slshAg|U(mh(2z9^)79L<-U4SMMpAFnb~UIgOASw&=gN92f&eZpHw zsn=b1!MH-kMG=uq6LCzK#u{H1)aL|-QYTUrL2E>(y`*1_vv=4|Saw`9d=@bcAnlH) zuzdS@egDo6?NvK*J5-#PK2Yy$eaqJ{Yy1R%5-_hIrW3z=px27as6D+JrcbjAP{dwU zjXkdnqiT%}rz$zk$RPNS{9`|s!Gh#0+uOs1-E<@c=q7E+T3#-8lDeqgwP7^Z6^ln_ z8%NPDO;S&=@OrTCVh^t$(tU!19#7JGg3>!NGbeam6W~Dv0IXUjc6n=^hpoQ1OFWe! zomoT{p9ZK*+CZhKu|s|BiP0+3wsWfz&u0hJNu-$Xu92X-&SS8I|6`vhm|)o?h1Jh? zAF!VS!R3m2PuoS$SJ|tlAHm!wOt26|KkE-yGu5_q7^ zdskFos&cs{8D{zdz^y+(QM?sX$AZW2Q@eq#mJ%h5oalTwlonP4NFm(*_^IzNlS`BA z;1TlBU5Sciz^I|hd6Ohcg>{PLvwY2$jWI?`m>M>PXGrQf5_E0Jv0Ctb22{|dnRMY8r^Li-R~S@*_>ug z1e7{|W)}IPGY(PwAh=H**I3dRg&CRZi^*>E7{e_S5pH9~-4$UYor_?4^^fA#qT9&b9LiBOl>!M^ z#FVj5@d2V6Mb1tU>%nxSL-UgurW+isVAxFeX~Ce6HsqBu48IV1prJTaPaxbX7N>x9 z4~Og7>psoH(-_F)~x&9c5P{Q3!o{22CZtm6WjM-y_&Tw z@2Bfz93b4M2uTJ$)=Eobg4`<-Wr zBUyqmkvzY=SlAwPe~xEtUx8CUc{gd9)-D6C?k!on(3}m`yb+##C^LWFZ{x-NC~Q$O zuaUTgNI$Aci9P)z-7RCy*)8){^(4MxMi!YL>~~vhsjIIJQ6gtTwrkDo!>l@#KsvZp zz%`OeA2XdL^x(+Ky$P(ay#3CeNXM)<;pV~hgLzzBv2OYl{%o_>IwY$}S-`bZF%o#u zw{C{W&X#%=aEHk(xz!;M(&Mf-?m#i|%HNLRd#YV3QV)=9lhtJYfX}3#gar)^wyUIY z`g2)krVXJfMLTJdqu6P`E;6_E?FbV;l z!`@rJMH#*A-jWU_Dcy~L3?((Bgp`0FEes73Lr8abNHP5JMw1biX&> zXFu9st8V7Gqomo+vQ5@FDW zbtSiCGN#`LNK&;%B30>9>3PJHl?xaDlM$f;zr7PcHm;NZGnTXYl*%PG0TWa4AP5ru zMar=Uh~@iZ^_)YwYA$^`0#ftsn@h(`wL;5isR-!EvsQv`91?Z;73FIkZa)~}W&WR%i_HUUcR+0Ql4Q07zF#_x-V>nx}zdW!tnm8uTjFp|T6V>&~(j#pOn7EoP*mS249S3jVdFRz%9M}hMl z?Mfy&uet~HI`|C;6gVR#h3cs!)^%C^ilPf$p0<=x#z1W{>(D(UXEck1G_z5>RnpV4 z)A?ZMYno^`aiunGz^rb*|J=8AsvkLv_;JiS)-@^2og(CS{HV{4$D&P^XsB-bXBgytY~e z!#75i3^A4ixO{!n-F)palgd}ZVYUUBoO)7^F_tlvdO&f2w+ZdhG2Z6xUbJ6{}slgc);>oI?=|O1G z-Yk`9c@Ia4q0d-zB^Sv}He;cfJ3>vfuXSJ7HW&q6G2t_rVREy7sC3KupHf7MZ7sk6ZCOHazfvW|H_Q*tEF8<>5g=!CcXE9F)%6&kX#T7Km*_oYg7_gg`Jb@CzA z#veJd59aaav~5sn<5ScNV8MNU=DOSIp1!`Ty2(1{=rHGKgg4<9bHe^TX6{|0pFOIO z?T{+{)2ARZKPE_CB0RpkGug7RC{BV#Sx`me+LIiYO&#l0dp+J)Fr6{;EdYN zvZCHtkUmnS3@@YQvN%Su>#nz}F;oT*A)UD%P)Q`pZ%IyK&)fC85WoMDOqMYpu0yl0Kfd}+tOl9bNVIwfF^eZKAi452&PcC?2ot<*twR+;rz5@A=jhsrlPNs2ny)BxPm$##e`KW*kM*rJ8-Q? z`vv9e-HbkTGCjJ75wE?R)c}jw`a*fG&(hm9f`zByPAqE40zY)dxlnar6eSWy2F19l1q=R?qDGtJTVJxHQtov#^vQ4` zoJ_L4;Z$hR5@z{qV5@d;4P?JjlCR&(Wbxq2gV?BR%s0dyp*PuQ7UDcs;L*BIBZ`f} zFn$dpVWX*&Ag!H#vd`g9FnK zb2J2so&)4twXl!Nq8CwxDGE>ovA@WMKg-Q zdj3Mc^ot&P1#M;7{UCWjP@pIuJYQvizYp3aNUfn=_38qMtr=)rO3{g0i8J8LDD}Mc zym{6{OBT0gGv#*!I?~%Ka&i%}z%nNpwt&PpYYW+^c6h%yS zK8kB=%1`E-H#%jon1T>~kp5yWY~ELlPdm$%29e88GrN1PR-Qk*`cR_BD1Zd6rqcjBj&wPA_;! zs6tTgt8&ILfmfaF9IWJmm2bMC-a=vzaZTe&#skVWdwsU`gUoHSGi^>h<n|6PD37P#ukdM(D)rkgLi#nvPTq_XblqE{Gwq2H7xow%Q=)Ofo|IN;P=WW3-4jT#b@ z!2$#eT0jt1vIXm~W`%xvIfUwBeU(y678f648Uq4nh96lf5e?K{yFr%q+uer>B98lz zNCk8iO#zKG=C0Y2F;SlU7~ng9;9`5jnWWRi-$=~)jp%cnM2^6+rSGhV*X40H+-&YR zt4P*Q$5#0Au3h!Ak!i6%n!Em;5hzRUH=-; zg()$5+X8P=Bkwv$7!+iaI$L&&xPTOx1eri$ANDcST}nU2fiyT5)oD!%=^M} zMoj~Lk2RVI&b$hVg8}XRjG03Z&m_O8v{IG_+&jtWe`3Ftl0H?6f$J_v-_!Bv9xbuO zK0SZiE*1hqJ)n40uDE4-oK~FGV`s{(?522l44V_C`Vg13tt6b`20aa|)MXo`bh;Vc zRt5oOlnwmV%m%m!5)26mUriOjGQ01FJAZxDSDEAjVP~wD;F!Ic!s3K^CWqt z=;>IlVPWoIWh3kJax2GaHOJO7!c#(#zETclzoQ>=$QrBjW5ZW|2dF4o`vKFpe|`W5 zAv<)o7jCL(aL@Mdwi;n2r(x`4?LBAP*4*9uoo01Vp*VE+@7)?b!%;$BSp>C`dXcau zYLb4e)Y~Vli|pLiq^Nu{A``lZb?!p&D!P1WsH4SMU^1w$PyZZ(&J-g!8ex4vS~SBy z47VSj?29)7d>Z%U{uS^2de#k&vdiiL(hCpA(OJ(6>-Fd4ZXTWeBO2C$&Id&bk=vdhZT&3l3^Fb5B_> zb>2mJ*tOg&8eVzw@LEoy^K$J#yY9+1P-40K3;L-lt(A~Z1~eP0|l(O4IRAaV=d%hpc3EpY_ zt#_sX7qR9a6X!pGYiFg&m(i*9R(Az!kh)*DUbw zKuaRD>m6BbVkWQ%stVY5Y$T)*Q|~Xl9#=7V3*DKDlUo@SHXnRZ$^6EvL}x&&rkG7! zt;4htbD`SA!)8hR^ZNcBdk7?hjBxe^hup2pl5gBa2nQK?Fy~p?!OJ{<F<4hrMiwuYwgTJIb1Tk6|tgDrEDbWY{dSX$nfu$Adjaqra_3O(5v&_J0M{e z&cX>{Zl(G(GouZHflg)5H}N;8lJK!X{A2L;6m;6MH~%y~hKz4571~R_J179tJ3CS4 zJQi0Of&~1;^6RYh_eE8n8=(=*nNskk>6HSzDxg>de(nuxlO0exfqZFe>wK%V1zDeM zK2ga8XR}r6PkOl9?55S#)B843%IUOH$M%hPRDQem=wdA8{_EKVQH2(_mEbI(WG0TK zO&AbHA`-%VmTOLd738?P2x33`w2$V-tqQsMe1Py}RTvvbYZVc~$V;&eeS z-|{!_?mXC0HsF_LCyS$AF_^swn5L!Ms82Hrivq_{xcO7}$Y2y)ud0F7eRpjOm%{ zN$BGqM<^{YP}E9=6}A9L1Nn*#x2Wzxzi4&5B(IX-#JHjkqw-}os?M&of*-X!ZCNKx zDcLub|EQd+njSA`e1h?8wg0111pG z4w3s4{xkhaXSfy_&+Phl2F+lgsly z#6bVTj5IK)KKBF^-Pzm}zM;NV$HgmSW1O`t4lAMZaYBEV1{w=K40+pWQ2@Q?(Idb| zpBx=%rEh4m*`aXRS?DH{ZY5DnqXE^Xq#EKGcIr;Gv}Bi({t39$htEk45iK?CK;#6K zt0KbleS5hFO$+l3J_9{^CTXC=Va-9PQ-f94(rz zb?t@T5)}2&-Ftif?2(v%ds-q{pQrIxufVQrV@+72f=k(7q<~bsD>UN+;gLr`C!sRE zzdSS`;j`sus3-a$MXntAV?r1cMwq=gT928G=rGBi#R<{Pbk;0dH~7%R2ECgU^~uJm zF9)oBx)8C@ooI72*8}oOvq#FX%Lyyu$cI*0jOix4>}aU@Oy0Mr5A(g9d94?Ofm^K? zTP;9U8&4DgA#ilEcr+&igSDvsve+siQYKfPebySs8c3Ln=|g4wda`5|8x9<5+tvPp zUVu!l3jFyiOfgE-97lG4dq2W++3j=vA7zDusq)t#SA}iq?KSUOa{TpQ=~f74UwATO zS*faK)H=s&ahuLgbk$9YwXpF@jbZ(VVu`u}?mKAiT9&1XNt{e|!1Qdp*}R~=a=F)F z=GI?esu)dZsohcP$tTYN6{}C{U!}Z{@NVc~9akd&LZVkaZgNXQofi`vf0QM$mvXNQ zR}B~q_Ae4x|8;2+^$8xHNE|9L#IPN_Vt4ywIeK|^5vjmyV?(f;QF___0wb$#%asIp z4sH={Q`nSl?a`!k;vfJS0hBq0B6yrxuIvZ-Q+rV>4~KuP%?}O__X*Y`!rhlUjqccp z)}tyIZd{1d=yKM;(g7Q*Y(xKX$aJ8+G?ZNR8w*rl9GxG(#}@r{%(xx88K`GBxDs5= zuhHG)B+wUa6(QbveZ<_2M~pJOB_|qUxwt`aUR|-!J18D<3iyq_MURiTGBsG4DoZTG zx-erPZwuLYf)=#Qj%zOJCJ8q%hVa&eHJ_F(gKgm0w3SpUf`s?6$NAy}$&c+}_VWNN z1Vo{5yLHBC5|OdeMy$R`kn9M6nxfMsuivGrzoC^QFmPWDHFZ7poRmBwLXsNz=eZbM zSkH=24c5z>S|6VFd+j1-UzRj0O-a-}M?cw{EkN$a(n+=(QkzPoyQZ9<2C+t^379_P zTN43WaCf*Daii*lDEEmoEq>Ris4{@3I%=%b7pSOj;y(HN^X}HO;x}88SvG3CgB_|{{gn{ zVRmDxdCn!LS@ct|CZ>+%q@bR|Ok~EAsr^IpUGbCrLw=ZPXmQ826CTUO*{m@vp483! z*+oz-#~Y2)FQ>eI0X2LFx_Z7Sx9><6XEZ<>PrYYKrke=}P)8!A^6C@Jq+c6g69?e1 z$)uBg%&curR6d)XwG5Hlg5lfW0aAnS64R4~inJ-_E%?)(MGh@wACE`8>?O z9m4I1W*l4TS}Z1nW2?-_%3l!9D#lm20+=R@84bioTXW*dyHTPSTzJ|})Gz} z>(O%P?p7f-B&fx>Ra+u_dOaS5!Y`wtYmVMM7|&+c%LI*^QW4Jk7)^z(fD>fqO#yyE zVAvA5fwFRk{CL3ekxJpxmagq`JWiY324YBiNTeIjAN&VI0q)bq8$RiR>7{5{1}VjI za`Sh(3jMC9E+tEi5*;hg$b8sqWLym?;0ghSSynq`yiB&md3=C}erBJ9ONtO!eBX!*w%RxKe9%`X@I~-A|$) zuk4Kx>_z{WDTkTJ_-*@kJcwu;TZc1Fk=TK7;~)+)uf%tmyN8SGGL~^@U+UY;J@1|U zXA6E~Bm5Abqn(9lpd@iKc9%-V>b{gnQ^lrJv3YYbprf9KUCygQrSe2ZnT26mTz{K` z@>7|FB}y=s0<_Y?l$D3v)FnDtI;=Wyl@(-|`dp~|9SC3dU}t$E`3QttOC zgtrJo=ZUU!Gx06a^K>#8)~}7~L@khCF7S0^8a0(F?Mm3d0y*`PeJP(DG*Hdev)M5N zMG_2>7^X~tiUP2-B(EppiIO&!Jilm=lWNeVCMMu7znK^kahta(sR^ZG3FC!FLVrfD zgl=TsCkQ?x3FNdNxW#5tlQbWF`LXqjPYi6h613*D`sasAf~o;+pcL%owOa>U9D9}u zZ_to0WxUj~^!Lyb;{9KX$PYg1Hy{-Cb1r+Nba3{zdL zdH6B!P-#)ByOvj&JHP+bgPn+h{<895v}cqRhrh;&)};3*RiLIl56GH>*8mNAeFD>I zCpy?kn}2Q^)88yFQzJ2>Up(;uo5KGFzW0m^YduFN{Ml~ZH@are6Kq5>3Fb({SmbPp zn17EW!%DH9d?^QxC!uWBK>1r}Ar{{Tn(BMaVr{h+M4VVEg*Q45BfKALuL%pWSo$Xw zL2L`}2+P@W_)9N^JJT0xu+Bdx?enkqc%-+BX%#KDoJcP_98fV?ak@3(`vX z^&!HKwL zi2a-8j#3eX%C!S%Sa}POh`%Cz-=`1mWkO-d?#N~InDinMl^C~Go0dL&4m`EI(67s+ zo@sfqF6=Vc-?LTt*?rXY-Z`&Y<=Z!Dbj1vL9yUU(tyh*L4SO}@aZ>P|=oK+)({s}l zj&F#W|7HP%R9zF&KNXTJo7bhTp82Z}orEsgkfH=21u~(+$JjNU?>($L4GX@=(*F{D z9l*zxB`@)#?t^WrS=-t1UM(kw<;U-}SeYPq)8r(=(;+*gLuG1qQiU5CYJ))Vg@q@X ztB&U>EwD8M1cyxHB^XF;SgZcfgWm5#zimOfaGVQjYI&cc58WQ+QoGH?uLHyju|mw0 zv2}MliO}xXWdE!?6}hhY#zwRR)O61TFov9L6|hpVqHA+a2KOMjk-BQWtx%0v3^BDU zEOhH!P;6EE7RPJOLWY+B!gfY`NT=VF@F!yVYl#6tpqgYB z#j~Hgh@;+88WU5F+y$2hhYCRiH|TrRA~UE8kHX-0l>}Qmb*Kd@I02MV7`+Tjd?8Rr zzS2D-$%PX6bOZ5A*Tsp7CCJC8Bsh$;tZ#Vl`xp?$mETwcud!~l@5s>?UyzvwDGx#9 zrKAh;Qy5b#<9;zfj}-?9$0$EPejh2&pLnS}yj^X{PlM9H{MMH03|c3!`)>WU^!aXH zNeuX}Zd(m1Y~y!{-sEY+wE#}Vi}3$?9|(m(T?`d_S6j@@>mBe;Qu!}%dJ%5Jvqic7 z$?2@J4{hZV%~bEOfU5j|J_i(-`Wg_WbI+Q;sQQ2Z2VhwK*UP7vK;`8kOk0cQzyAV! zz<m4uIY#wDoG9**%h=kB3f> znd|sh1K3KjzUDXbuE(V4|UQ*=x->ncSW(l!VfZ=8)%l)H8W)h$`2I z;#_314U0?3coxh{T>(9e|DL`7YohMTeepe7=`!0LVcxm&+{|G639*?hEL?bR%qiOO z181BWpBW3G1K}&LbAW`1)JmsDDS*3=vv4x0)*cNL}GAj`5|8FyOfB zo&`k4M*vbb6wvwpiTS6~j!dbV);0)D(J>OKYitY$DCZ^0Mh_SM4`eeyr*HcnIKR^r zt7J@W`0?L;DFe9X&j4JN?jydkXE-AYZCqu)pyvO34+VM=kPACPPcH$vd5if}N$^jA z3d(p3sGZA{+5ZJu2KyIcA-54(I5_UA9r95%cpLe^? zwmM{d@2r(j>AP=h0uc2oo7EpK9pH*QtGp zpnI6x@FSGe2AVEaqoRAVdTp0$(W^kHW`*d&qkxYZ&LC?tyP zDycxg8W@PY3k42`oSy(Oq*5**rR0vGlav60BKTuD1DyY(YW(kCyPX0$)eH0i)WY}* zw4UZ3fNZJfF<%HUH9rWtqUjLzl^-pXOIYdpO`K1TWvmw1@OPKw{mt1Fe%X4|y%B^< z4yMNi=%bHf{FlbI^~79v0L{`<>tC!8yooi?ry%y#?z??paLR`c8U0QVmnDf9f7{O2 zB`W-~Qk~(A`eu%XOb6(e-2l*(Iu%GM9WuZScMt2E$R2GWg?ioK|D^NShUIJ1Y zDs>_Y4s>9KT63cV)fE6mi#GUC7s4ThIv4~qp%SBUTuX*>lu-$%M{+fT~yUr+=gDJ$n#vJLOdY3a@aWcm)Aw z2M!*$koE_SSEwqqU=DDmeA%9rUN0d0C07l3Oh3}KNO{kQkz}Ie6#_UjB#mDF!!%5y zv$17bhP$cuZ_2Nf8sB|v4)FL+qImlBrXfoNQ&Bs|_o1OaT5rd;xgW{T6atH236wX! zT_Zy?(nk*S$x^zM6_=CgnScI9Fl@`Np{_K4Qa23@2IW0a$$eG)Wl&HE^$|ghcAN+y zw2dfA^id;6(e+vw{zJ7pPIGwv5?)TGwUaz2>{-&VXepVBCxXe%lGGv1h_j&!9WWc7kd3fSpT*)6%+pQSp1EcoFv1j~u_W}l=Ve$t>^ z3qks2aJZAN9Z1*!oT@uX&n5syeycjGDdhE<<|lEBUUA@ok1cUqG#0h82(D09ANe1E zhz&1hU+ol;VUE#Ew=z2X2Eg#doGdR7SmMWCSDOY~c^8UARAR%+ozzmK9vN=KB3Z{weBcHP))0jd^o4P4jrGM1{0(DndW zla^n0H!UFsGLHeR_{}QNYBRXv3tW!K&I^w^aDi&dK5)OtgOIbw5`4o51Y9L8c3tvS zQVHydt!FeSQv)ZUVKv_zA3Jgbo_k;=2nBo~%=)O#eH2y#=!~`7`RLC#6zCNvLQ3HO zQIrF7UKZQ(m(1^YE2eiuvNJdcA2pAChJvcHLB>z~vxfl4@2du!=j>0p(4Vil*E%<4 zmWf2B0FKldnllG9N|?R^NMx>!Vv#b^KlzhtFDQD9J+0(*y}mY7edI8`##(sxj17)- z67hynY^~)P?SLxZZZAf+X0!KWr8y{~J{gT5aeBP})M|D;9h+<=rOyoCDfI7vweV?D4$?_=K=a{T#rP zBs!@}+RxJT8fo{Nk zf~F$SD-J=cda2{woPXPNR?ua&R*eiGvt6rfT8kQ3@^1)Aez_v5_(UP1PPXm=e$h_m zSS$No+6Zb*S_zN@s#_ty^WhEc3#?U5?%k7$;u&ydev}OU(rHN;-3PjNPBRbX) z(Aa4}X)1nke3wue0C2a80la{OAO;s{YN-6eU*oEG2r7vZ>EY4~nU1_d%fZ5!QIlzl0uxNFlH>AqeR^(2cl?h@H zq3Q@o1leAAboTEm=X4UTPABnX8Q)w2hO-Ur`&OWvxe6>mk=MAMW_oIjs9U-LZI`|w zE~t&*`|;&b@ry-|#BXiBzUs`(&HaePrfzIbS=(Wdeq?l8iSWz%O~gsr7PUq9BYD}A zz4``YBL~u~$q)gR^)Igc571d~rMod84QEH=as|WUs!$^R1TzY}=)!@IO7B@x&#$*; zA=M!#Vk+i2KKmGgyL}@Q7&ggViEISLym((vikq|>xfZkZv@_<3EPKzI7R)yh9fuR= z5OtQ>g5-kSN>xKq%tJcyW(B&Oft4AP@+Vr9;&a~&!m_QB)Nr51t+BvmpiJN zk?aw7J6XdDv+$!Kfq>%0+DSr651pQ^-TKn??=cceAODyl`ItMO0)pk*5r#iF8{GEN zkS9TAto1lPPp+tV@pY*#VIv1OOt&;Hn7R6Y;tzRjRjTjTPgM676m9md;H5M$AMx7J ztAm(UDF;Lu#}K#b?HglO%9hw*^7`VpIh1Otmo2G>PyA46BimPEpVhI*Q{f689SwIW zCeJLV!_r|COc8i!MMV~kPuj%?k-(-y_CymdI`x#iv@?fy3)n43Uc0x#4F29V_0669 zs~wv!M}BS}x{#)!H~6XfR(&gyj_x~bhWvHgHzt>_u@ocf;i3%N=lv`RVgo?N%Jkn5 zXY|NAtR?aJswu1_G0{-uL~zgA>zmNFnY@Be|L9&BHi$;MywJ^ZGh7ps=Q?}pc>Fr4=Q)03_(D86QP?m=wj~E5{XdqM%+u$*IhcqE&;*04$}!OB z3`2Br`lLpld<=|u)8jj^YrS>qt=4fpT{#$&{6VNJqbvp*psq#mS1?-Ljv{p(tzG4# zHthj}`dZlYhne&c!nr$dPck&gSuXsFN7DnLnSy$~0P0hom|zTTH#$2IK5D_o3(_$! zA%ag$OlYr895;^wsnOVEL zVi#g5fLlC<)`-?)C;;P*jbl=9SVSYZ?R^(=1w`x)*bg@>Der2tnRM59i4MaTU#B;1 zX4(?9L3DQm*3j}7|2aTqls+Aw_*bTsMR@!!#JZ4!t}my3GQtmQ&n?MzRx12f>vi%n zU7aU^>^&WEw?}w$vj0iea!k5i#e}iC!{5taf)QIsvV1>*t%B781&T%4XT9YM;Nkd} z+&HyK*T~1;W6tx(Q=OrwtsX+C#@Ux}!XHOUKSBvqD0-5RmWHjsKCcJZU}t33TebR1 zy=#|iR|Mg|+!5g%QXyOJ1)UkPb$uNYuQ=k@>c=(%f$#6J~o#16i(W*t~ z(;Z4k=6)7Bc@^ZG$1c?qci4F~Z?p(FwsRicxl~zy{$u6T zXpGo6(5NS%ph`p@nQ>S&Ts2edCTPJe+gSr4GoscOBk$fS2wj}zb8%JAZd_j-^u|?< z6(w~)F9;7YG2SM7WfUiEZCPc#+c95TD9$y)3#bh`PIVTp-HH1MrQ_$FfmWAhQ={a% z{{>+xgcyh_MuP(yRT8K!Ls_}51qFLzI)Ysz&R=CkcRrTfFj70xl>(CrlL!-~(Ub|* zmx9=2_9r3&r=Advb~ucTWa8)^a1?SP!qnq_ky{Q9?v>Zz-UG8T&EoII0R2As)+tJCR@yYdPG;j3UO#b}TbFJtDn~aJZZ%1Hz8Tw5_0M%M$==wGI)xp6%ZP1_=3= z=av)Dd?sV%;003L?6&@`jj`(Nn)c7P=`pdbC8)EsUfT>U?=J{hhj0*eEJk=`e+$aD zT|ldKd8+U~PvtNu&$znZMce+wC|tjJqMW5JLX?tnZ3)Oi&D(mG%_bkgTSOF)$*(bM zek?(s>m)LgNipM`f?4={_NEE$7I3VNt!#vnX#i@lOYsV7?Lh~3-WagOD6rtS%5hEY zpw1x+ov@?me@PO`pcF2AvTOWa%eOP9mA;S6cJ@(DJ`u;koPxJS;+@X}=R}QpmjVv+AT?CwQOt5Ds ztx+Xi&erBigg1a78dacOWgxfUuIcGR3QM}KmzWoAknNGPA8q|7H(3glzTpDP@<#7_ z-S4oQx_ZAuK$O+ab-x&IQEZL8m>Ru14^p&I|2IC1IG9dtfFE`uO=!=egj+@l<5A}c z+r@#wV4GuL!T(IRtT5@WN{6PL?8uIlXbqGsKmZTNVX}Oi@)nae6Y%IXg&}~rs<{BtaP;*ob5TlX9{T0^{`~v5KuKKJ$@l^NhgOp$$hs|2-=aRj&wgfnWGEFyC;al%pk2 zitt-2_^m|K`Pz@TFC)f_gN7q5(Ds<|T>eNelLQp9+qR&OKosfuo3+mkGIQtq5d<-; z(=EJ=$sVRP#}nQr=jqmf3x}H)$4{?HKM7$Wn9 zt&a(rqMxM5fj@hg9o;bsFm`&gVt1iiPHAe8UqO;aqk9Qf!`xQHyf0V7nL?9z(*Uhy zV>B6;HS9<*=;|B+GHdcQ>r}a^JI7TRJIxwmx9@8WAWI}f5YApkZ@uTK02RhG0@_~f zF9J7!A&0p9>?~D^D*}i>D|*g3`}vd=cm}ybV`wD(e8^ZZ?j~EOeEOVchW>EOf& z*ywEVeat(iSQR}`UP%5vP%`o@JbkN#j~@hn`_O$8+$3^DoePuxebCQ16ut+X6x#1% zQn@B7DPA98GpI!%Ggk|FS0Ray*gdOU?SWbTo5)NOQlL>>t!nZ!b$0w-Dq~ zr&?YIu*K8pLIW?7yszYU&~>y}$yrT7#Z=Lj?uk~;Yp4Y&Xqd4bUJ-&OMv8&&21!UH zj2BDw7Gf+YO+T>g#kPL=Hzf(AmDe_#|Mewj%Id^3YuRFZpU&Fe&up6ih}R!b%Cg z5|kEB09W}Qn8iekCts$w{=vg59f~>?1Wl4X70E48jZp^!Yxq}Dob|d2a;TeqKI^LgGti>s66?++uporqGvpIkpK&f^nh{W6`2i6@Z>Qw3Nkzp& zaC^mHoLk4vrV8C^)F5o)!|bk;Ne*O@@yoU->r9G1MDI??lK9=27j9=q&+Q~?_Zxlg zI9w-^qxYNIqU#dpRqH$Eu@+y-5KeR_Ccmj--3$gW6t}2c9fItKWifTKe#i45J$$Yy z8h~TOKC}7fGcTPqbyu_}F0N`|?@4YAuy5!Wu_?WSM^nSrHmaA~N2b8Fb#CSdYgqx? zhkjQm2!Xqx&hIh7K2%49Gigzm&&sIXT7kA>zR0rQl+L4DaR4XAHf2+kq`oO?i@^gQ zQaTLkLSmC_={Af6^a^d`u|VZKW7ZDQc>2g7WII`fR(9s?gUYA2wTlIF<$b}-V`7)` z3BgBEA?h!C%hyj?5EQF8r8H=iAHpGLcEtkrBUbFLSlOEH{L1++or55|PbrDOvt2Bc?2CDQ^3e>(TCO$~X3&0p zqTU~m_DQMDrx{J1+Iu&po}&gf6x1eeCg72e?^KNdZ!Um8O(Os3P0O&5WHo&2I*L%q z?udUsGr;O|V{hAcxu2olau{zUmRb zXppD>eHc_1hGBDeX+DMaqGV~1}BYDT#IpD$c zzlHX&ib|L9de8~-`bKN78;YvU{%&(Y3BGnzS^JpF|G8I4$bbbFJgIV zp%65CR!5hVZtjxC@k_RnunjUBoR$i4`hTteE6x%VT&{;IJl2E0%DTaZojarq zzl(dbr&qE4sw%NjPZil-67i|2ptfOTFvgZMhQ%NHKw^6Oo0nE8hoeK znO;aZBr`d1@f11sA|A$s?3q8v(DGJb!R*D+fdnhaWICF7Zi>{z=CN_{>#bwFndH@D z)8kjC?}Qe9#$vX)*!R-7()II<(=Cl%JFDNLCLBykM>)6>{G^w@=4W-VQXfar%JUZF zWt4ENq6e{u47M#@$e7$-F@kF@3TXf@f}&%sJ8(wnlcA!kLB~l><7x?PWyF-mpPdju z23am%=bo$FdbKrmuX`8SW-8zv{)){ZCsa4;qwN|DQbbadzJ&)~yh(f_lPP?4hEPOk zLVPD-JfHdx`8zU%P)~9nwAXBs{LFF$*TgOB>uud+83rmC5)KOhv85j1ADpXBJzUCF z71QR2GjFUuS3Pci$HGCc~uIbKRA(aDvIO!aYit6eatg;6}ic$!Aok{{GN_uK_TwwXtjc=d1 zoYE1w85Xp0gg8~z*B54cmkM+wMM!@m-OQ9`dD;2PpRFz4HuvhSvgWBPD>~y?o?|!G zXYZ!5ye~}P%v%PZ3AK^Zo{}LK+Q#n9Um>j=3X~RD0D;m`{AIS5CT)5nb0B2TckzVO z+pi~`j3EJmZ4;usr1sHmiOxhcy|;YSNN;Vb9@)%0|A&EgXL*)$k9IgsgA&o}3(Kh3 zpbD$rqUw1uB#hBxWp8#^`N@08CsFamIDC31V3e z;E*HuXRm4pcQ+8FyXmg@%c~mDaFY_3D+%>uqE%<95oIYF+Fdw41$;iK3WSK*jg5Ff zz8I?zT>jPa3cuG~o>oP5LyCAzo?M;CSz^~W zKXmwwlB9CSUl@`1^Bv3%F*C#dD)%o#zSpVFuTOk%m~fw=;m<5WQg4(FD>o#%F&7MYw}~0j%Pk3l+SH#gb^Y`n zR5CyK$PuT^qT{!CA1f_wU?gyL|AaV)#qrpI6Mt+&W1BgJOqmz87Rw%tY)Y9{me|Jg z>>0M?c(J|EZeN=|<25v+IaZ>Fx3+OC+JxECq}O{q(gkD1q-ynP3u-U zZw9}<_BoMA3jI9Bv(q|3e(er;SBP;Rh5b%IDPBRQcsj{~S}`a%WeO+r^!p_rsj|+F z^YYPX>*3GNTvXD^&v6Y(=rgOdj)(bcFp+h#T@d%xG@^qjv!K z%P*~Z?x^fcEFz7`79LYle&Nm%xOft4bcKjj?qixZAWasq`)U~yEi5)Dkf~mmE;T9y zLv4<)I7piVybdM13Ga(7f3g8*cuFI3wrfV)Z(f|$iMw;hbZMoHm*eMEvtBNtk(y%N zDe)?t_GL3|=u;-z`iIzQ+EMx#&d3uPvX@@dWgJSJYVL;p`pN;)Aa{Ag{c6X#Yf;wI zHf4%j^R4GM8Lp#dC%3W4N2-nq+iP^}&DN4r1=U>Cq~1FSG2OChL(L5 zoJDqJr2A7n(yQiK%HRyw1xrFp>QT3_K7Gesg7OTfS~$-*?MpgAVILb>*46*<#w>9p z_uS51bCU;g1${^2x@pfBdV?olLA9eyb54Euff%IVl@U6?yT%p-g{OObHxukFG}+tk zBn=I>!Y=(?8c_%KCvE!ubd{6wuneM$V}U9nOn`AS%s)2l#r#ttZxP5>8jLAB%xmH? zWdO611=1_iDK&toTsG~c-y%V8~{p#;ec(zK#$MB?!ai~1wymk%+9H-my6$q_- z6}6VL{(g1VYKF;rCc3n-?5j@^+(uhxS&dU~%GdkSWxj7_x*)_nhYeLU{kXq*`FD@M zW=b;Sz4(&nfmt$ir_xnK$35k5quB%^)_7ARJ|Bzg^5wsU05$sjrsf`EtMIF-R4?El z_lfLW@sJloep0-`v%Qx`qqD~0;?_MeRc*#KMl-h(ily_8ptlUN+onKG7F~;;7fIDJ z(n790*~j7yM8*}B%VYa$ zx>-s%(stw87EeXcTb(5|#?dEO@lF>gd25@|tl;m%+=&Q{2{Hfv*Z>-9%6Ko_0hNS1 zhxVd(Cw{uMa1tn)2hYoy@h6{6yTO7+hLLsDg!CNr)<)Ne0R^@7S_keE$b&eO6#h6bQD*W-;bT ze=^m3v*&g|p&W88fV7P`c_9Sl8aYciWK<;I@@)tM?=PJh@}L$d9m1Hyj@`*L=wKOV z%UZV|e4@Dyc^N62Ju{)aZ1O5Zo|vM_HkM zu|C#1vUWjr*8&XWgTt+RdWq5S#VK*zaeL5Y?wfVE?}E1=FpldS#s!n$x9KuG_s=}I z=&1V4cA4T3PKmGN!CQnWxg9_fm&AS4fTqQxoPd*vq9c*9VeA~*MHazg0%7h$mCVzA z=WJqV8zxGEtN6MmNy;c_8VB*IqIOK8W$}tK$cxxwH}sf-|0XnV>Db2ODrfZD2xy}} z{@)^6y;xMtLUi670Bj$r0=)U3JOzn)f=t__E9O|cG09oUF=f%e3U}4ZdAK^Jx-Uxf^VmwsYz~V`5)$75{cNN2$*Mh-qo$jyi$y?p;nt!O%QNlc^ zOoJ%=aeSX65b&ie@7UgiTk+=_bcgR8v_&7DiCG!Yn1viSm_SUHooqE*VQUEbnUBGL54!Ae$|+;KY&^?Op7=; z|AMu2dmfbXkb5g+H|E+uu?d6!y2xODgQywR#hqyQBe>&jF>F$?HFo|go_4p-Kb97j zP8T}~iTnrTRqw<6#k%8;DOO)Zw&pMUr^)#|RXp6Qu>HqN`_Q+e{HSEnQ@1QTQkLtS zjiEXfn(V`pb_F;l@$F&|C}?;aOCt6-vmNjFu6kNujR5jluF;BCAQUGoy6j8oh*o%R zXZgJQxo4UyPfTqp`Cca7b}2?(%A~gm_3UX9EQhqh*XLu?#*W|WdCwN!Niyztrqq8% zIe0@wiq!omBW$)7uAt*5JxZk$=@!_JLnHVe5&dqv)a1* zD~}Euv_t8T-N&2WED}4hM2o*B=z2d3Xn+jng}j}j0lGn8q{h z?)N5qhLtVx%F&_xhGmCkiMORAHXjVI&`Ng8OLfJfK?1)`&iI(og zGxtO+!svxK!_Yvz#-5ACU!0E!ZBWXYJuZtkW|HtF-x}9_XVo%o1)RcT%iG^O0eL#H zGNDA4Hw>LP!|&a)`mc$xRC40kH~nM8!#MloKbYuyNWXZJ`zp%vzBbBx`OF`g8Q#~Th1D_Vo~hT?E%D63zZQgHTM|5M-HX=mW4Kwm^lA@atuBAc)NzVD2&Wy& z&_^xF)N9Rd}5yBfi}c zm|FbKp+_0korI{^jCI-j=#zE>R`UpG5Dd^is)o zWtS~ZF`vV{!}W>4Z2MeHQ)OC{M%m$Xzh`HKAImDl+Xl#MG7Gkmw_PrEP&;%}1iVDq zG3$z1spU@=!YriF>53JfSrI8s3}mt5IR^KEZK8Dr zCyP%lX5u%&=nI553bDEqPw&sAms6dpqP;G|szJ*(=!?!>>Ry&5bYX8bz~I6*8m#Y7 zGTs@=P(+}i=R(295oY~h9c&XM=!=`P8EURhqs;PK+8q#$;iSMLc=_Y@W?63n{3XfHm|D}(C;GPc2`N8 z=ho-K9$1!|OkO4Bnn)LtN#n;VjO`FGoP}V|odAi!2hEz@j;tCVW~`u#=3VCjBXV(8 z3z`rr$P{JEd0N(%;>i9li zD%+?CK+P#DE*ZGpWiJ=fJIKG}_k0leaLDSQ8a_NL=24Tf4O~Q`{P7;L(=!PpAfXPr9TZ_2KEB`biH4-{DQ$71)v=Kl z^`stwk5dyL{A>SAQ;SzOTE`Wb(mb1iEr}goKU|&Q*)SLj>a-%rFv(l+ss)Se_N5Yz zbZx+nljHLGz#vd07u>$P^E-wy2(yGis~8Q@nB7;%zPuOymBs0JAV~VLVKrjQ**C z?BUt8?O@ng`{jY!#Mvjzj+jlUYQ?uEicjrs5#du8kXpXE)lj4%B zwir+i`8f%!bBWd3)OZ$vGGN{;f#e?fb7WYJK}Q9aPn9Q}4U`MlX9A0_`J5?vodz-6 zd&2k7`XoRL(pR&6&ocqm=uF072E47}lE&tBs>6J^Mp25Sv9X3~wJn7w55-o;OF9fV z0Lf`AocexkJQ7YCnesDOo`+O`<@APWuT%eOifAt$7?$F-*( z4+d_RV%J;$!*Z$~X$xm<8Z?Y)gMaa0nEwE`A{I#o3MQwxEUWRss4@bKNwl??Kc=3| ziL&>!pM{lhsh4+7LyeS?n$2*q4|$+hkQiTB*@_y0EMo*~8b&ZyqJR1WGkQ*Q13XIG zln|uxdCztki?^mlW#R^DU3HzlDA+!QYRQo=K1zm~s%lyD8v0~2l}~0Xc@*1`LAP|; zzo&48o*utPF{62W#w-tXr#)S(Y%iW(zq9t?v({tM@0-^UB455-;>~P)ASuXSkq*ft#}_ zhDm9x$Lyl)BEs4K5U6`)KpcjB{UbhLD4@I{nvMwCIH`L5UX`3+y7A<$OY^%5w~(6f zF1TJ7@=yw=^=F}%d(V!Dnow6a-mc<0-EY!r($k!49G5$Sx!z(&_~C|0mUmI)ml_oM zEZ93IIii5jwJ$Wfk8dYRE{jM0{6yPPSBiZ1Ik%lcp4fFau)-k7^JN{mvWFkSFR(t} z3V%$~*v?2Pb$XyJf)N3aDdmFKN<>kHmnBHw>{2qJ&d@wtFw1u+Fu2^O6Xo0XnJ(8b zYGpILL08U9i*onfe$W!~(i)ZFT2Lff_IHHb`*Xt6N6`=m@YkR*MBUSG^kUc_paUfQFd#+=o! zPq=HISSSF9Id)=D9|%@4Lhf~_Yr|TMEiAna#f39Xb$r%UT*0Df*`7apkqA1jSxTVd ztD6&7(vJGydjaWzo#)0|fzHNCP(wS6A+xUtp>XF!8fFi%v*pd(ot}_f4_53`r)xi$vKy1&DuQcBEvRTXG*r(OC*YKYZ&Cg zPogxepZX!{rfItslV;AR_dfWxa}fXLga5iY!KgZ!ra_ouQKwH@HB)1zTE#KHC80V+ zno6);*XJWKUCdd`E`!?U2B{3I}($sY>IJa*z(fqxeLxS_g4kiokl z;>7De%%}pK=O3wKS?VQSo!o`r$oMh1c=S7wnWMeZKsb_uEfvLTzc+3_7Tk3YZ;>a6 z7F%Qg_|@7XY@zMDOzJ-+Gh zK}B<$`}_}v)EiN=XO%O=D{2pLV8VpGbR!Vb)mjmVjxAOgV8NS)v^j>ap=JLU$DjfO z1{T=`bn$ECcg*N~pY>O?ER;VzMQBt+_#Q6ww3XD^?;uhl`MfGDH<&2IXe zJq0AOpa4UP6YAxE8vp;psrrXx0)C6t%|MZJ`G;SU!#?l=+RgX93K;*(#`@=%LI^K~ z0k4I|*G}gT{3&0ddl145ltNtYA6Q!m-qHw?wW&`48I6B2ZY_IXg;P^ zz@%9-&ZoBnjn$)3>io*!_d&WqfB^}gK580usLp;gq#JKfa|02&Ps3*dE(0`a9fcf2wHX~zQSmPdu~1I|DK zfeBboSn}^7x>Jraba8m3SxuqyfC$7ZKZ~0`s7WPlkXt;{VKQ`rcq^;#Mby zr$!wL>;yLPi-FkhLZk>9z2NC_i6=kAGh<~gP7Y&W7w9�_ef3bQa+P$h;Br@bI(& zA%fAspK*D=KdnX-Zvlo#i09w}P+AQw%7>sHXLGkVLI2CdvFFo9vpd4+wnfT^wyJ027W>{&Lc~$4-0mR()(;qL!W@|8HaQ zJnv8Q%&|3W&d z7J~TG+34MX;5PSzh-%PW2B2`XgYJE@raeCvU+8=HV>1BN!nqwth2YBp9?#x#jC_2_ zxa(&i&>ml|s^^DG9hop>GA*SsRfIus4<7Bg`%vbyca(amu>Dnbt>e(GymAiBjU+TF zK%VrUg(0&VEcb8+_&RQEsgR=KC}P$XI5L{x)dG5o99@YRfv2Yp!ljk~cf+2;VMv!r*A@r;*clVs3)SAY9){R+?t7^vkl zq<}LDWiK<#17R9zYDEE9WexL68)1EJfbE%w&y#Wk+}etudx zc@B_IQb-<2*OC%PVrme@+yXoYLW&k4A|`F%*}pEKKcX{zV{YYfT&Tk&NN}1%H}@^* z?(|%ThP|i|LYrl2JW^7lGv?LT~&2v0Qj+=}|X-FpjW#R zj|?Pe^>DUt0a=7$t{htf*>cSK(~^yRg-ak8!W(GBk7Va8fD<;v<8si&Ky6C$)N}{Y zWA!lC0`OB@5`l|t8~Y&Ni*U+}*m%eH>n%&Zb)Q!>XLk`=39CK#AVEa7acdDNjNlD9 zpPrk?ys$IYrIS!-Q<6J+{rY=Mi|JA22D_2JVuCE%(g_I;M!X~8kl^za0oGK9wO9=N zPE)2{ADZ4XLz-S7OpqYJ#c%dm(^d8TQIos0uya?a_d=1Y%OL42I^UiU*U!@S9`9KW zz!4ONwW-p*1f?y!r|rOt24@GDV%Wr(fz~W{;dU2J%fkXU1WM9%yEi= z0?|(wsxZ$61RSm+AhdqM&T|52lf*zle#NwAHrs5kqwXA3oEWfXuk8mg9RTm>fVu5s z?u6r$p8@IrJ_D>8m79JsJ|r^@&W!l(6$r??H)hfs^YmCn$K4)(7q-638p0^* z3AQp5trsG5KT!nzH?r4v)F% zY_ZSj9vx%Vv`z2ly@GC7mTSYN+p|+k7 z(+j}(U40URQ7X$d_*}Iv$SR#W^pJQC>sb0dusY%k1fOgQ?l)-l&YMmG&iw)YDO40@yD~Ed&pVw3V_PArr9#kQ)9^r}#3K z)**7X%4&ZdR0YzE4wAWO$12j`&Uh`}rs(W6l}E*9Dt{ z8k^QEtSX=Niho}k_ip*|2X+-(%`6=t>)-88Y+4%!?`U`Oq`Ji_Lg2#0AWD3Fm8U~{ z64F#%OE=3C-eNgshy|j6+o0L@N$1)9xDq!S7Ym5Y_4IN1iA)k!3{a%=zO;r~$kNgSJk*s^`h=6a^vpW6*qDeJWWfza0xEEl`zq0IHRkQ>$a6nvq zD`iWDYDo&=Tw`ZYwUBwT8}$Xk++v=ga$W;=qySc@GlCz~*&G1$wGPN{ciq-_S4=~( zzb^oq3Ze{SnW(B!l3@=UeHC!J>PGf)rMx@IAQ3nA1#}yHyv_ShR0LKAG8ILY)rb=~ z|CWXS9K0)NA)Qa-rO}}KFx1xlC)&Gqjzaq#=c_9Xl7-9h0rhfH&rgAKyxSEozL@Q) z(kYRjntr+_UBN|GQt;v3siERK=%!kk#^|$V>ncE{U?fs>ylD^$pHHOL)ZZ|aZNPMG zh#(W3_3-Z^b!9990gB#Madr5ozj`%I8+CTIWt`y% zvyo(CUF-Fa!PqIg?=U>Lh6Q4G@hozqbI1ma$E zuNT$oI>5^Fww>(eo1rZC&}ILWw__?$lv&|XBPtWs_lCm@JkTv{1U3cqJmZ3Q$Bi5` z9icY>*9izkI&3T&dl~WJgYk*<9L~921l}}Gdd4S~u3vcs*mb#2kG{!}ECi|4f1)`# z-F6G3xd;f0-^QtH{8Mu+OMZM~c~ZDG0;hLreUF3}E09!n#T=F;r_EPq@+~!QqY!QY zt%HJqLi8H@qAb~!Vq(}@G;@F`@+T|3{O?5==L=Fp1c z6?{4h9Wo;;k#x}8NmC%R&uko&CxoY{fGiSLP>-?}C5dGR7PbOmj7SiY9I zZvB17ffjfj6#0-E$gFg_{K#@Me%l-*$p_c(!FsnzS3$&RwA|UF9eaz>KlGuHJ(t3( z%lJ=&X3_fW8@LOr&1pk#w;$gIUJ6dXRX9_oHzEF>*5;JV`a`W?;1&em{G1slcB-C_;?fGPL=R6vpAmu)3yD)>{CMADk?iHDw5ICDI+C)71 z=s{qW9274Vtom-c<@iW=6^hrTR9ls2xRBeMj3dWZgo_(sQ0r$An97m%^nz7=#%Kyhdt^gkA4gzgWZkzk`@c*&cX>YNxob%@<51LRM^!YM#4-m{Ff!Spf?^BST_crkaVHC0lLFewZ2AP=iK9^qOAc++g(~zL-zM>;&k|* zyNi}`HY`d)i)=RTUe_QcxGw)@{7%4to(q7q+?L@quF)<`hLZ@&mH{8?ltPVuo3Obn$i;B6tUzxjs-Kwq5T%(Wh)P`t~{uBK*;sbyqqq-QsbHbD$ zL`#RlcmF?9yLs%TM8~sW%VlDTrKJg|rDa6g&ovWSa-Rpi}kxtD#0&>=~lsV*O>Q!lSZpFmr*#>kTI*KG=MNF@Ej zBxs&tpTUVaArVTjz#_IbcRt|Qqmg3WDq-4nJY9mC*I*3?W2l$bAU3d;^hD;$jAb0| z*G1Y}JHktIOh)#npY^m`S&j>IKIbV6OXy(Y1k2SGC_!~cvo`e!n@1_!d^LX89@Lz& zGBz8kic!haghvcb*FTy>t_9J7U{|Ioe_qA(APA#MjVj#A5#(>8gA}22b|bPY?#*S< zWVU^5-o;%QUe+l_=9OYLc@fy9rL=+9{k%Lp?>Y1{_CY5U2PFMaHE7ILyg`n zE;fp^lUi`FdcKmoskA$hw*X6|tKJm|<`6(yV3dK=sm9`QP(0FB86l5a%{Rj`?Qzzi zmtHGOP-bAtrI;9*bj(fVH4DPug`)C-9_g`)Q*B_YS9;LHAhFVZ(cJgASJV@-nN$RD zbN-k$1tAPN2z=FI(YDx`$7i>biFB<&fp_&YsRZHE`MK-}fd|g4kx%jVU#Vz4y(m|~ zBup!wzf!lYPq5c~lN;Aljd{R;Sq590ll`MrR^V$GS#BqYz_>M(VZYP^Hsw9&O$C@C zbVa&7{5+7`sE_S)(Om#-yT36iZ-H>347~0})*Fyf`LhpjW#{_szvD<_SHG8DEni)E z95NO}6`J9}+jds5KGfdC#)Z{+zCSCvgD0`7q-7b7@HP$snNiu$_hBp(Bo0_TQHeX~ zt=rwgozw4~>+Q)NZUio`Yz64bJ4H+b`3x1U_lZQezAT;VGj#!bHWy7BmSCc9cjZ}K&(Dd7tR=4qI4M4(L=@70x(+C z)yJF`gIxgPQB}n1F)1g~#!*-~jehW$Cb|c)dHzUt?ZSP$d4K3}M7VY}jqOv@pmr7pFjHI8%~g!lfrhNR2n1 zaa7NRSR=ZR(}_bslL~_cTETNEJ9K33L7`B1(Iu9!5a@O6e*W9j3fY0?OiDA}oZ*YNQ1k-bmaf`txC*$z<_@WlQUub`ZX zF<*iw9OWgZ1#FJ~o-B|dJs<}#jm`A)2(HCIN!2&vA$sYZhPv|AiB`&EYe>Q+s$(d_%$#geyVvUfRG(>`q7LPJ;Jz{^32Vj433+%wUrPLXD1g$(B2#zaP@lt!0!!_WgElR<(%SO zBmKsb+PpSj`jc{d;TVy<4O)fHOQXCps)y>IyZGytgtn-FMR@U?zu&m-=24Q~t_&@1 zddcHI!0+BY2;GK_kHw`aemlfpBc5pm;9q>SkAM0l(0wC>0QYQ4NOy1je}>c##0?Er zl5ttTMiYNOM{rtDF9?lNx-+Z({)3Jf9N15!0OfaB`LDR-r8uYv7>33Q|FWC?cWV<` z00(9vRx+gf@`>}n(=VIje`)TOvz;%c0l+)v?KwPqGcj~q} z<^ZSR`aV3o%*1EIT#xIE;SwD|XHcjx98Bk*xG3}f{c?h-ppgz1&%Pr$^+P|)QYvrX zR{G@fnqH&03D!?cQfc~G&{U=3&H2eaDVcDVc|se(X`}M5VDZnjUh1rMROZKc>Iu?6 z18@(YU5?*IgzM4%>neL-&Eg)tI*=l@8ZCHr>`umI)xvPPYYw#T^Yzzr{IM3zUHIu2 z6x9a1&hds{#2B>GzAym&B?I*hb*OB!Ia1j08J8#9BO4n)EGwd;qeF6QtT^{op)wW; z$@VwPRv)+n5b|*^;H7~Vd%rtG=8jRL>bYDhmtK{1t_fC4`P-rQeoZyF49eXRf;Xy9SYlPgV5o)u z`S=NW`7~f;uARnucvC1nEC|kdm>OFyMSHmX;Y~t-EL*zWL1|ISbq(>^>$rCjNcQTz zx#&%FE3Y28ZjnYv>slq6otgNEkXf3cknnuBdnarz&$sVuHu3q3nXR@n5RtRDu5NO_ z+|-hEBztTS7)r!BP+Qn*F8V`0Lx9P6Fx|dZkR@zkVPR7KnY(kZy!NL?)(de_(ZoEl zeSrsvkx;*UT0leM4a;(7Ci%u>WuY6GMRhi(=Nf~z50JvNfUv;uqJTSn>>qLbGL)Ort(M70lfaudjErohY3$!aBIReV$=qaP4_%xmV{#sAc(@b?2U>Sd`=!6G+9utm^>>?WG>wVV(t?`WqQln^O;thCr z06izJ>~$B?TQqV?(?DxG-){fRX5hZC3}T}=P|s5ju`{N{Sn&4t_Ib2RFn9x`4ma4g zP1sRhEnr?Z0;cuxvE@wN=nq5Y#+5F+!~k5)!UFWH61JyLM;qh~kSi?bv~7Ls&zuI4 z&%F7y<(`;NzGS_Be`J`3mbSj((RZj89EWw&tDSTnduiiGUMa`PnmoExzf2-1#8`3| zJw3e(3ydG->Om}<{_AI;KvC#70gNOVK|MmaiE0G{4`T`AAqExp!{>XowfI8y99U&oM=HMx&!o}2HO*e2A+U5p>cot!ph)-#gW zrln5D*h<&(3JzS(x}R!&RLJtN2oDLt4pg=z{EX+1N)+EuB1+X!dHDS;JUVGCJgF18 z#%-=lLb7>a0a4dN0z3Dm1$Ll*j&!ni#G=rmE?p8eIAHY< zZp&VMXNSoY#iFYen-RGD$v_^+8u`~pGeP!wFRWtjY&O!2-dKYE#PSyVi(k}Z=YUu4Io1ls63Bs5 zg;Iq=sZc!*SNjESl`;c0R40OK(`lqQ9b(Szd9KZFXhxh}Pkn!2)KM;_dM-^$ zmY^YFI(a&jEn&O0v+p7LuDD6jZ-yHt1O-vl2#9aWvMWvJ%HUV~5PuxSm>LV-NlHqp z%bW%}QM$pd4f)o_vj+H8S=inNElv<3zVn>k6%o%i< zJGelC6d#Ivp4Z0-x{uoLqdr*YUS?y2in3*O0rv0xO-^6Ei+ttYnn;+L6>bTUc{4#y@FI{phO_9pWw|sCGC5Sbv`^73!Rs zmHK`>8ErMGCFRBSq-hD_hiEX1C;QX+r}#)&(u$V0%kGKrhpxfh?}ie3ATT_oSZV=^ zj;nja&iOE2qN9^{Bpga5>_oBMePVARpO6&wP)A@yEMCB)a%(W(l$wg^)uoUgr#w@E z97DP3aGCq6%QKZCv10|CK@FqOuU>*wNGUP9qlGHGKi`gjN^7sOZ+Et^+iJSKxg4CX z5MS#T?+vcLQq^>62o4nJ+~J*?ZFIH&K4ns(xO9a~)R50gxy42LVo)uEB%Isrtdu#_ zn0R;Ri~mr7%x{~*A*^&9d6zkhPLpN`av;jf$pNWYqA2T}Rmz@#o|#$VL$@2u8XXNm z&xV}5hbuqcOa)Iw<2Y&)@HmRblDtT3-L&7NZ;cun(E4mXW#%zlb9h}xf_d>SWz1kJ zX70YJn0;a3K#AZI7gcnKxv23B;VOm%Qmvl2bY+>jR$bwH&jW*<31oj3%X7A?6~hgS z5BK^8EfSBNE6gT_O~&4nh@ogDRfY?%^u%e|>k-{qc_W3;V5Pqh*sQrq)G@gEM!`Z=LpmDFEXk)Bata|~GMUAzS(0`-W=J675)QE@t6xee{4%IFR#kPyg zJ6+_2rvk&X$%5x8UQlz2O!9`^SwJLPY+-VhBJT|*=y-h0D|`T=n|E{)51uNGfIYZ~ z>K(??rviD`@S1sk`Jc1Y84>H`L#WJA2TzG)#Cr(2OE-w5_(|5H4zHU_BCm;>;@=jR z#BG{?N+9ZDipwwxiyVO`rus@pIewCEoVzgUUNJW!ank((ZqU^ZO`eycAk@x;z#qn` zUBP7VEyhJ{kd1*5!786t(vY@N{J_W6^y!5CP`(Y*qcS6U0&~siU*jk!;1-+t9m$6L z9H6boH9xXM0^TZy(+^QnQs%rF`QY=O(4Uld%#u2cFOsIl)aBfhfDjzyq}Dmzln>aWMFLy zZnG@TYH%e1(ff75o3rfNL1#F|p*+@dsMM06sb9laY6W71*tgHZ;eBCrFt9*1dc<$` z8y*=Mi@e}*v-I?x{fEZi%-s02QGqdL@9J2TqZAWr}4f;)+(MA|6TeGC}y%}|{ z9NeDDM@EPHSQA8qZwxuOp9U2X&&f6=IVH$_v3C2BEy_r@$-5BY@Sz6y> zd=EA%#}PQCG0U2T`?gbcJ`{*fh`IzF4?pa`=f*0cf z;Le@#H0y<67Z(?oL1-2O(uZVYvyl&BZa_RqT@?7Vi>lO>DCGu+2NSiCq$y^i=#%OX zy_JX{#m#A-15(v>gk8_m`P7X0TgXJY9tCI=mWkN$AWIew4mSxH4-oUmQh_vgPv4*~ zk?phPoWMCig6%tqmu?lkXIQ@*SaI;-FBwWde;x|D0ECSt$=h3#=@;CHe+!nLU}TcB zpvu=G%r3S0A=~4+ifW@qh0+YC+e*93$;pkqzO*gKM9U&2Az>onb$GH%@FJ!50G>_2 zW?2%Cl3mJBr`7Ha#tJCbXPlj%Z)|La9@(wUc0VcoP)l^_d9guz&1c1jg6Zf+K0x1% zDt+e-j#Y*s7SUGEaKo&zd?_r!)(Bpvjc!M|1mAovr9rlEK3X;lV%|5y)M*3ZbPWtL z^78nBqM~c3yK}n*AycMhq`JkTJ8$2zshE!~g&j{{5nLa9?r?W$$G`-W>y#w8LG4m(NitX6j}2>oHVwfr{`>L95^quds6=I0&%|o!qGt+hB;o{Or#&jkch$^ZSsZyL{Cg#`8mgJr5rC2T|4;Yp;0RDEJc4u1lS_jo`_p5>!Shz4??y)E z82r;{-v`*%JJ^I@kM}?Swt@ywnvT#9KHR%Y@L!7_0f3}XvF^M*|AQ&Cz)=f5^MKYn zeEElC>VlV>Q_@Q6504!`5-d<~zWpzLTU8;4^?K&zGfmz~^M?!36oP@q-_zuKg-MGs zZydAHNSPzzE;!-Y#*E%48!57%Bz9Hc?EL$sDDWY&3TyLaq>Pt5(03Exwc7apL*8f9 zG`X&fBgs|A_a)QX?y>C<#lKGIg#gFh45uHs&p>1tw99X;rntX}Yxa2e=CKlc@cUF9 z>|b8rzux0L4lux$*i1$@sb9_#JU{d@v1G*8&Si``D&?kT1jh!*q?rUC=%1K4*Zy{q zVyOUH#Pk$uYT#4RcU$%m{aO+7Tsw);(kL~4j9JG z)m+-yPg=Yam8J1{?NXz{gaTxe(e_(oQlO|LbN_Yoj70({IIbI4zZrR$ zJ+WjcB;*C$8zd&H%o#v{8uQq&XWbOQaZ#WftfwcA!4M#VK8N4#WJ&;oZIK3#RhlYe z3q*RgkWL?)YK@Yx+bml(2^gwG(DR!C`ON+Q;+^-c=%c{adSW7jXyt@rA zuCB|6H^AD+tzl7gy{}iB6#tCt(=mI}F8JfuzlRK;(`G zcM^H0ag|a~pHB;wj;x+e4DUK>v2u%!QNqdk06;8>c>$&2!NJk%>n0@q91fa5G@r4z&Wy)xz>PPI4ikMv88{L@Xe;U z0UuL4vpnPTxZIRO=FZWIRhurcti@g91Qw1QX=#^*h^%x$PmiithoxDT`k#+U2qj{P z$3DKGRcmsqH3KNs`ul|SaexSyGf`2Aq`RM}sWp2vPWZ?D4ax>TeML%t)Em#lV8dcc z%$!b?43&<%iQd8*y8sLkzHeRKy@OfGmRNhssFxlNKYwJ2IRm&4M9U(1C9jFC70dkN z%1IrC3%bj7s5jnjj+vUO(}Y4=RJ>C0I~J*9!$xUTc+2}Y8$e2p(G zou9TZsy?zBnzBe4hcOY4UH*ixuz9zy72c8&eB}X%|9huvZTlaJt^}YE>RYT;`K-Gu zC@5IW@J*7DlJ4N@09ay33|~JlFc9JJC*72yz0;vCbVe2H;(Tu+sAn+hu>kb+z5Pat8?ps5;fCE)b< zh4OW7tSc#Mly%_#R&rWK^;I;_je4x$6P7Yb(H`5nTqcdGZV;K!c3XJ?oV`EGG+R8( z4%a?=AHcD=#4#y=PN7iexXX<3nbp*H=3F(k{4XdLnF8%^C>8<@Z<-VzT6doKz#-e! z@fUnO-JVjp-1sjXD`L<2^Bqgh>}w+^6@W2)ax4EW+anKwqFNR>dnmLl6x-F+WlG^I zZ0S#M*t)^z!0lk|<5o#isN~YS>GAkQkd5f4s1G*|&4>EauLI;r(!QKkYmC5=B~tXT zg*Va>J*p(~*EdrJZNF1}vZ7tK4_;e1d@cgTb>2E8!AJW`t_wXD!0^QxXoV~T>~*Q)7g%+ubb*61K4Li`NbH6rBqUtgH$VhMoWg$_2r& zNsrO&K}>%cJZk5$UPI$kUZ@Eef;mr6w%?dzV91!BUi9n0Rng1}KnI}eEb-LtCf9c; zD*;ZVr|7E%K zAte+h!%9v~dOUhWN^$&X((b8!DBTR=ksw_WjI$ltQO4qkcJN^`E;CZd(sL zUt>#!k}1^V;e8i16rMnI*0R@z%A#s=XB}2h0P!9i&HCyY2x3#-H^`;&=;xHs($Lg$ z_f*$|i@ITz(%!;mCLy|zNl{mm!XkVNk5prChLI0jE7@-%IKe02xip8$7H)goKrNcA z(ly@4OdGTPNK9a)np0J17o*;vTQIM*l=ZbrmA>U_xLZ*R3+0@$$lHteVj&%LNFarW z_f~lF7K>bIs!)aHSyo*t%6lwfA|IE*TJCR({5K!-(uWnDhyw)Z_&A*pmQ$zK1!nhR zmJp|Z3gKP>AT@~vuSg35r1H_C;97HB4d{8w`RAW{LEk&VaR61C{*M|}4#s8a#rKHr zY>q7=FwfOaF1|#MaLqL8v{Pl>coJ zo>P!X=_qGGDHRN;D5TDVV#nCXhul14RA?cM1ZJ($?jfZYFDidwi}y={mib5#3SX7WWBM29`e1%l#k+%V+0b6 z5J#U*J+@EgpYdq>=HHukReviI0mVBPEcHsFbL>$yA*#gyPFA1#Vf%qep1 zg`lf-!eviGsT>V40>O*&ID11Q1<&!>VZ>3e}ye;B8Pc$EgAg|K?FKQcqsH=&}wIe zQ6dwxf7zA4-cLbM?z<~vgA|C4Ql8A5373_ESVlST8>Nyq&d;J`BDblmLrj&erzFbu zT}HQ&mjoNhiZ?dK^+JeBVx!rkjEaUn=EG$kkx^T&wSD~PeLWBQ)w)49|q7jBUYeT+}0g3<8K4Y;y$#wubSv;gD z1$z8k%r^0f{K>~rf7_Jty+DM;0%ed2^QKb#?KX%KS-z>cdnPw_u`hr1qxWH3?=Lw2 z3Xd+{bI31j|BNe?kmZp+F*U}5xWV_cmqn_IA)F!x*%1_-itBw7p-+q}G{h=wn;R@N zsCLEeIB3ZTwnA}i+Vx+*4jVO-!4c{G%Ie$p0?b?aLE~-sp|lvaJgf?Vhd)Gsy?|Tp zhe-g2L@TSC4ZCPu6dI}s6O?7*;rBOx;hd(5LXIPG&Px|aQvtRGwY1}}`KaVC9=I2& zA&C>+%@5~IA!#_Z8SWCsyF}hP>ZyRO0R(|k1za}I{1=H2BrFn6J>;TBj0fg5<*D8h&b57w!Xs)8b`TrfLI1%j^QmMJJl1Az092w(`IC1LPw%WUNIc=ErtaW+ zGR5U+D?zVaCZ`fWS65%Qpc^FMmjtH}OonTj_76%S%@49GGIzKlec~_KALKK3ve|L@ zt1*_?cf(^twhVBk>+7r|r+L@xqA*tAvMQbS^rl<3j*@0ZMWDp9!&esML?Z(T(;E0F>t$K%~GzA=sZ<)_l9po6?FOJy0 za9wbPtb_X43?B~tR;QhT(NZn$ONTBQsCIG=Hb4CSD6dLY4TGGE*^lya{)Fgz&*B^$ zjb7S1jt5-cnVjS$AQwNBW=yzIW()7021{+Kc~7pUCeGB%fWF&4*~VwP*JD?!{&eU_ zbhGc>KrHElW7}>~xABmW(;_uppx=Z&UeYwV`1mxQHx-f>y{H_R615$%-EHQ;b=bbr&K-F2kE!W6^r`Bekz~fl!N;eC&=<96HDL=V;f7wEF(1$ZseqkD2QUWIxB z0P-;?n(8xkPK_VN2zF)QGuBQwVTE`s8uvb$uG$<#ady-5(JeGhQFxXUhl!{}1#@_J4&w5o)8-cczJw z?vFijd@cB)x&5$ZzZ41g(K>nuH(c$rpkM1>-aZ(*Juc7I(|D^uNqd!VDStim`{LE} z@nt_5`qGeN*O7`yasArnjQ%CC;x zSoZwVZ6k@+rcc&Rc@z*yFn<6Xwbp%%>vpVI18%9?Hf6wUI7i9{I5;e5skyk+rBjg% zor4#$esl3ey$D{FA*x<~xuD;rp)5UxEL%rAzDC}XfN~MI3O0#EaTd;vY^ z0&^ut%2o|$pUf^FO?sZ$a|um~UlS>BVuT+ct6kRev)d5`DbA=VJQ{KL!gpuWlMl@x znB0gdQ7eCFi613mnM2zQZ1jBIRF*RCc$PX^kTO2azP7E==vocN^Z9xJ%k+o8RDm**5@UB!oyC8AEQ|QHykRHLI7V!A~9^(Id1&)i*kY0 z6xe&aW7F5Za8WMdENNdv5260{`yhbgYQHBnUYPD9ntRwgWm-NwM5KHWqZ`+%Jv{Oq zIpE2l^lwl?G=c%@4U?2(hbuh}3`?S13M`ZhGuLD?DVW z%k01eH3930iaiUH#RJi#nufiBLH6J9lSmo^R;rrj*9{If0 zT#36)V;Le*val_(V;myhO?m7;#nR*qAM7e*TOd8fZpN3y+7M^OExvVtmUpMas z>n1}^uDflJkxIc&`Qj2)QgsFQR{L0meJk3{epkCzkLM3}J_b8n;?}o)%r3lxhwk|A z+mH0J1mkw;&(Qd%?=-;JSnON<8vXn|ZfD@})>_sK|N99-A=c1<0O@~yy4X)?&!BG( z&U3P&YaFV~l5Tyzhcl_vLG4-G>G1(iXdVVDlf2ohH00nTd8eSeN1)x#4{@J+_j2@# zA04cizRvMjK@vOv>}>w~>{KuV_!|C{#9AXVXUADsLiL#3?S!i&i>XR+zL;}%0oyDu zA^*QoaT72cxmO|cjGg!;H)6>rq)a1%svo|-EWx2Fe3{dr7ABpWies7a`=e`+0ck}& zU&#N%*jtB1xqW}2f&n6eAV`UnNP~2Dx0LjZfPl1gGl;ZEcXx+$gVK`H4Z_gf9d~n% zdd~U&?tPy7uY>cxvt#YG*ZM>RyQ*&8@>oo0j8Rlbkt^+ELs;f^mX+yU_1(31I_Y(4 zybuWOe>a2xsIk9lDJmM%f>d^CbP&O|!=}aMu|JvEQGnv2gYnU_B1uUA&ky*$94u`) zdUHk13L*}J4f-Mdt_hX&Yco}>75sTQ@{ZE|5+(#KP876*sQZ`O!RriWRE!FX&zX2+ zHezBz9bSkY5&DPOKXE0qpuZMu{QBOOXCgN;D7byQLA}s?RMC{JHvdZBx}tCuFQ&x# zTH$oAuTj6b@b>u8^2jS*YQ;>{P4XUsq2TB}{S29RzUf(6(k=sdg{Gu7HNWVIEMnWB&8=K4U*Fp={T@`B80=i5wFwH@N!j`SvDZBIbyN26 zjNaP4a>5MHtI0mpj$vp)^j?WcKa0_5svsNV)>=ys{!lotbmjx52mh_?yid6=5YU&z zA;TBXB2(CytP=b51p_df?uhV(>b)T*d2`4&B&p=MYa3mc2T8Sp@x+>R8vS4sjY&~j; zFs|Y5u*D&C!q7K}sK2!U6bG2P-PF|2C*EOOn*aP!u{nFW*E?_VwmFbjv0gD;%UY7T zI@gRjN0CwLc>nk~iapWoRef*{W!kLP!u$OFw!Q69ZkD?_WhZM9x*1NbC|s$Z6MXF& zuZC`WMBN5Uw?BoS4e{dQ?Ilv*I+l|j1rzT3q@)4}#iVk?#fX$U)Yeidf_ir9;8PUR z`b+|GB`=R3)y6#z`hN!&dgKpI!!-AqWI@NFUCCSeIE3N*C;>0}2?@8&cWJV8*DhK{ zXqJX3)nlwz8_WV`2!vZllJSj#BAISqxF6R&_B_sP++yupGtsPK@5qBTbPeOK_k~k3 z5aH2J+7ks6AT<&r$-Qtme0taNsK3f5ebSb&?18cpJ4%(hE2-?XZ$MUtVaG?fVW%^2 z2;Q9U=MvGoj-$thMeOlzcO{X37uBc|58Ug&4?pFPWl|*aV~I@eI&)fBETmxwJ%%`p zZN!CyknlcpREW$!?=med>~>jz4K%Ko8$1@Vtfv=VY=md^ROD$4MW`?*u~pt?sBhP@ zhs2z`QN^=7Hd~70+dH}^Pmk?KlQR?|snPu%1*SsiaLTJXimF+8ELU4S^B-O8m zCOXBWUlo?d+7Fcs>rp|jb=J+RUB|FE@scP%8>>bvGK)z@Sj~xgOO&3m9+t<;$wYor zn(cXUFu|Hn@+pfqcW|gLGo!0szA@H<@d}`N_SPOBchRJstwWz(U}D=gAL8F5f$jhF zK-pdIq4WS>-*2vzKE&v)XcMRk! zBZ$#ojpG=T2qK7HA7@Ftgp2-xh+`Qc7P%Z11jL^Oj*!ghknF8~3hKn}=|u|E(ss%A z_ki&^MpL!HPW0lYcwCH&0X%D~z527qREz{dC2XQW+? z@3g!FWj9or5G&tlRT4u+A;rDNc-30go-S04JYBZ^lTrQ461rYxyR=9fNt;Fe8shd0Zngq4iB&e!R&-d$nrwB}kGh?2D4Y@Rujo{LUY`=6>&4r*LdP=IisKGMyYBU9mQX^jFH;J2^NGT@+4(}$(r0vuCttK2Et%`jYiF-RHN_g# zHU@_Zj->?6Gv+c5ZMl!W(;3H`$LB^*r$Zi|TAfk)`SCCu)!D@I)K*&nqqOgM z1m&vheuQG0Rlt+w6?CWrm1mRVxk?J>h48v5=z4984w}`8n53);vJ@$NpLq zu#gCxSmC+FXy;RACpkDO?78#g%{7pMz(z<7bVf0iOi25g$FjS1Wxn?K6j8F9#Sd?J zUX^zDe}SA@W4;ndH$N?K zXR|WOYb$Y?ARDN%V@=2m?AX}4?Q>VjT0i5v0ge=>c3h*sr`yXTL1rd2k{Q?Tyih8; z)2q*nHLvhR9P{y^&>{cnxkRoq3Ey{^a<$-A$p^)pyn9p-QMoGR)%hG|#K7l}vECxIaI_*!pz8sejl%%_UVDYM)oi+BXwC`l(boc1 zsiHV~cD6%IhLRZq{vW2b3k{ZQ)f{f@-3kA?cYn|oxYhazgB&LNkbn+HK@2@XG?URI z?qt$%&RTe(2Cd*AA^*xTY>LsTCZ6g8>>9Mcni^d0DBuB<#%4T@_ z)CT&W?l3*o%F1K7OptGQ0llnKsD$QvW+zU_)F%ibR6fjg;zU)dB6nqPhxe_rmX`Wi z#+!UCAj~iuJe#0c8sjTySdWiO@LE|h`;3&@pa+;F1t0zgEhj50=dcQ z2u30!in6QgRiun{0Qc#Bqu!8>-~Eg+e&D6H;Z&n3pEfv(Ycx*a$??C-ciGl%py}TkE)cgX zX&YPEx6y(9&$&-&aAzr}(RGXHxTY}8)9LkW-CmAQxkIUpia_a4(FcH+9UY!%nyi)+ICi^>E2`Efa@A-XyFw~p0ImtV$T+{31Fp+YY&ShNU%irKxNWej=tT4%4_GF1acD3?;$`|p@l#6!T z#W?e+`Ji%Yr|8>#&EWL!N_yNJl)T$TaKz`f7ws8$K(~TVzrm|tz>$3~6QEk$oi|yCb zS<+db^e4pg7Si!2uV;6a&OTZ$Q=b&lDb=&~8lCa(KmX6Jm%0TbAfd79tr9dKOtWws zr_~nk`7x0iJrgM&B`dgvk?CQ7W<9HO>aZBJw{vYC@+5amMd4+@x0B1HOL~UIuOV=z z-DU>utx`6&!Y6r4^9^zyLt{Vv!Nri2R)VY2guaOXG+Az!4o&yz@$G)YbTOZMNkNgC z+rtFwFc?$oH`zDtNwJlDLPEkG-TP_6x&FQo{vA%A2FXYkiw$I@ZpH$2!OhIBlD4I< z=p1TL#Hzje#cfxEj4ln8r%+6_^Y)yWdsFJ6_ziEAD~fa_qwM+GG~MDaooYVgu)GGq zPoLQ!?qQc530`#7am3f>jVAPLN>d?(df0HW#^Z)Fl|q$98sFgNLIu&;Cs$u2i}sQJ zxjl!!KtwjMsm-i!rmIxWS;ra9i`4tkmSW8~!$I_fwvAokDgC9TN{r*Xq`1S3Ud_51 z4y_XL(JHfR1;=+ywAv&Xt(6UNhh=rA2f|!?>RK%v=0jY=xT#kSB^tR*m4_dVm$D(| zb*Nt*N!qxNoHbrfeKUq}W22d*Qky$B=Vo`Vcqhnl>E$m~v86v&g$`LYGKWb}ai9d^`=;T2p7wZ6NOiT*GQ@h?QU4Np9V;8f zl4nt9?e$MxLFxg5uO;fFNWak-7pu%8SR5Id_4lrz`v#uW-`kK+?>f{xg2R0-lAdVe zKHvu0@(@fdRF-4876pRQz7U zOYvZZIaAms@4}Cv1|#jFI>rFSyDX^OxdR#2rTlhpv9E;r*N5KqwyV?laBp{YRa9Oq zEXI#Cz_=|dw}i1TUg0p1cSi9-qqn`KH9HRpH8jwfNBgi-MkgOCliU9bFbU?UZOA~3 z^>A^vD9Sty8j%QU0hLL3*;)EmTEjorxO8(bz8qf6CX@SN+^r5u%2WyH)4q@m)H8N< z!Q->6&g^#WE6KZa>M} zu^ZDF|7t_T@~?PCcSqZK^OmUlVAd+UX2RJacmtvQ_936oj zNj*?0AR5V(0d9fr#bt&`MpEqS{4(5@lTaM- z&P5!371T!2&q&oeD+{D_omq4z16T_Va;5DL!=0vyc2`1roX!MG*Bl07_WKQj(0cW< zPP9fdZ8wBq5w+=?`qsj#81G9v(8VZn#kIn_z+a zo!^wt_?%Z2R(Ct3OPmIKJ+wtIxMFYGlrQsjU9L*D(`O%Ks0Q3-AgzJQ!DLd&np^}X zx#>gb8^)Q~YBF=5T`i^AFRxuxpPRF{a&W;K-JA>CvC^CKJ~c)#N3N`Evuo4WtB7t- zZNUA6Wf=n-MyNJ#$}RMrh9_!luHnMaG{P>5Yx~lnY?p?Tv-#om{Mu@_z^@Yzmm87H zrcgilD&M{2$~8rGSyK({4S^rD7Ls3W3qQKdB9UH zj~Et|WAvEmrM1v^BGq|R?_c~B(MW;xSoL?~Wg(fk(DQTXz~Zjzol^eAw-yDjm{nxs z$?$TM>6z5|U5z}oMr#D+rax(O->r@!ictw*P&@s2;OZ8bDAn1OMHRXgPX;HV)ubVU zou)Os77o|Tmry7-52~Lhmf5a{_s1IpWoWXHV?PN3*NDw4BrNko}hZzcN?Pu>YoI5D`S%1qG8Q1#hidI5MG?| zSxrfa(;1wLaK==bMxqCHmh#Ntaa*Z;V&l}*Ql}##QLd#toAf`THa^@|I5A; z7uNohJnAllB)3Y4&NN;2yt!I(r`Ef)%u6tQf&vfBu(5HRYsPvLi;>=@PvnV3MHn+K zZhQzeIF^<#m(DzciyfxCV9&Pv8*}|C`pzR?wcM7IjDnJqcQ==I8%LX1O-6IA?fi_{ zyv7f+3NWH^P&Un`jptG{H@0?rO^^n))OwXFqGcoHRv z+xEWte3@9rJEPf>l)!Ltuc#1Elv`38=atG1j5`Ink`=MdWmZv!nplT zv{c0XkcnYxNz+pa9~Ah>NBaXBBCTL&VUhD*0swkoISHrA z%h{^pJ`RJXeEEg3)wg|iXzzq9vb`nsO+s|5I-rl9?jTN z9k1ra#pUL0pTIAK98O=|Mi@jk5J&)rZjE5c2lhbu*&l`*iqF^d54`EZU9=dNgKe8u z>m4|EML+SDk2apj4QK5|QITyc4BqqUh~NE~t}_~9#g*YO%92Wad_WK^&7X&Ry|JE@ zaa?Ng#Dvjpv9;&<;yGva)ZytgTU-)e%B5BY<1yZKWWzNQ7xV=VdtWT{R9PtWx!OQM zACb1r2;}6ASA2kcxT@G8`*wR@-wW9_fZw?Uu2R_}$3R;>FpQW6EY z3f$lC>J}!6#<*W8*4&d|tnY(qrOL}Pli@3;e2qK9vBRl;wT8>fz!lg$yA@3_9Z`RN zTDdoM9uQcot5@b)Z2n1ELGKu8%CX*DDE%qvQ=&Y<0Jwhd_ZBRZQV39wF0j(}$;V-c zPAhf&bnlFL|5N$1dfDEk;D_YC?wul|7<)3#$!F%C-gn0HREo?Wpdtt39yYx^mGP%j zXWBUGwu#e@E{AwL?c5w*toNR!Gy{^+|X-6@Wf zb~ER6ok|Q3DvIF1tx0>-6eEyF7TWYMagYw*g=O18;H;>}G&pYKB3gIHzHd z%b&FeZS8r=*oPayyp0`$Zu@eeWSN16;Syb(8MM{sYKrgLR83O&rwEFS5FLPObJ}VBYLRnKewnwI^T#@& zM$2-w)4k#Tz1rA`>~p0e91v!{3WR(C1#SE|^C*?nF6qX?(KWt2w+Tx7fY`z;niX4H zO_dy%;KUiahur4hpS3l@5lS7}y@ zx8VzoRfDaNfRA61a)~3dMl#J=m4c&GlrY}ZnM6s)b!qA$Me9Bdu@r%RN=rGbVXSmg zXt;VxHvVhL18 z;t0LSZ!32?C2-kpAP9b|*h@hO4GlNvcU}q~GrOYRq>^|@uG*FSGNPpr5}CFV_j*NL zZqp4xBby-WXXx)+z}I8Y)UI!FIPO=1>?zkx<(Brcf>y{o>^|$|< z>GX;cywrocKvZ?Ls#=;{<`ThIYF1XopSk-Hyp>V5{w*)z48Sx12%Yt_mZOv(QeN?{~#^K?=qcZ)NDdug9#~YK z!iX3s6mc3}vb&F3o`c@fu$L#oPv4K(jl_i6nPBX|n^^M>eU;&d^>ThB;#=0WQR+@`wJEaGH8zYMWaDQ072RJc5kGY#_hAIMq9qt-?H zrVVvu$E08D@24|pc{Uho8TY;f#hGtDf7eof5+JJIoen6TwbK^3w2`X0 z(?;=qiNA78K9NT!?{z^eFD0b}woYF#lTJ#-)$vieYK~&-OujlZr`x>eLF2X?>k-c& zoAHU7v#Crz=7imOO0)PvF{6BEzn&$g;tQcwi-?Ir(w*pbo_ebX#Yp$^uM9L~|^KKb(4LBro4l81k()+aHzU!6(yfe_G<7dyq? z^#sn%0^yU?wy?>r2pS4(YUM8~1sW>ELn=%)XADZZdb-M**fJJI%~(+gM>cV=CG{L- z5+VIEq_Mzuim|~>qY6~+CFVY5qfI9*N2AnU=U?xFkn}j2_?S~S@h|)Zn8-c(TPT{t zuPX08Zyz@^?2}k3hP5QxjYK9UW-|3%Zp2^|hsGO3ZNjegyRYs+J;#uhC&aMMz zaAZ14N#0{mv0P&dxBQmGYw_=7zj}kj1YkLtN{s!5Z2|Ks>VGRYWzx*Z@HsKFN@`jc zje0!2VQ3iRizz$ByL}OFA^~J-QZ6a*)wg{a?o9r?*X8(a;nE@^o}OM_B$$O{3)C6f zZ!!aPe8rX33YWN9m^xM)L>aBSn|BW?b?B@)MZygwa zEu~l8HwcD(q>e6%zacKwcODU{3aNp+UC<<5algKPZyfWc0`=WXfZ>X~jfCFW@|~g6 zFYH`@-#hH+1m12+MMcFHfS1JM7~09whb7X6f$eikoaF)_H3uf}F|3g7yP<`O6Z#Wn zMqG+FT2Y*~Ys!(q@)A!bS8Wu|W(%=6TOGb%=I-|Xz1>j!0NjV2bz z-z8Itlxw>0UyPlh+o~pGmU+iES-B85tEhJA) z(gkn*Ro8Bh$l<^tTam8I+lZ!8`IEPm)3vHgkS*&7znBvwRz3d>I_p3O^K$VD{(E5F z>maWvTgq^~eo+!Ht0GWI^oiTx0UBB;AfHKZPLyxLlL!qbYwe48cV&$<`l&;YR|#(QmMaU)K@ZtT4mkw4^!3= z0J0J(v^HdZ^`da%-9zDQGQtMZs_O9e`M}AKA3e;Z1STUKMurD5Rtqyx%1G`KnzdmI zz1MTIQmeuoTQQt&yQ-?J3+j_)ctw*Y$~kX|&W9dXjE$+~XjT>u<*O?#^~7Zrr2_1o z`e0nnJA1&zasfK!HT5Q=g&M(nsGJ*xwOgS>L&`S*(Ssv3Y#kqdTu-Nq)j>+EEb3~X zF%`60H>za&y+6e@_fh0K>Cn^Y(Rg*jyU$IB;#M&V@_SAp$GMHXk4p-B#E%oV;}$;r zddoND?j)1Og_e>k3R|#|2_;4Oj=2Rh%=b*{fzS9;Jng>_TKnbcnpLa~=dF_|R6WdD zeu-tuO!`wz+;ght;hgKmiRCoT`WSkb@~6@mw@`dsD!iWBF(!|+4aEj;Usy7JcjBU^ z{lXs^xetf{IbJ7l;_&iUG4Le#)>_tS4%R{P%aB~U%~h)1it8(t;UW2q&dF`Jw?*pY zBWyN5gpc+BEIiys>pn^;o;T(gbTdOus_EnerN=t(jDb<9geCB=oYezK#g$5f#T zSpgCf&kJw&7HC)h=|jE(%-Dz}h^7EeAv(IU-uRmcw5RWhod=Re6>Yeyg`+1 zTfYV-m<)m0*p=R+;7+NmSIcI5Cxn*d2uo6Nc(}{cYn(Bw3KNdzkJp}x&)vPFXIjgZ zvZmKQ1!zk5LlbSJo^QDqG?k>R7%M5*6Y|+}dC{={!*Y86;t}@ZRB9z2iM^lK=id>d z44G0e-#>qRlXA-YV6Q;4lJs(_!R^v1F1E9Kq|VWrdtf-BM*?bZZeRY{e;urj2-owY z@t0Rx8P$`P5PhJUbgs_1S%_xvM+!wC5d@)SJUsMYpqLL2q`p#nJhaTuq7iOAAQKN1 zNS}Q95I=Pk92Eu=HN~IvT5AUOYktP<52q1^SYs1b67so>74oGnF^^taK1EX{TDRq> z{mJ&biYveeuxZP;@zH+)+de!H6}Yck;EO@1n&+!#k`f;ps>|(kknOrPRjpWUrB9XB z7NSRQsjMY_#)Udx_qsr1iOZAysQF8u1hlDY)0L0r z8^hN~Dt*|t+&?o?&1ViqSG5!>ftrOHL(Fe}?5p(q@37srP=-&;MZO$zSY+F`3yWuasW2tS{iorPk*Q&P&LdVr2TDjo2!9oHs6R9xfi+ef5DV4!jYSZ>hZ+^hAX zMxo@moNZj;LNpPZL|v`Ne>P1A_lIn2(dp38(5Gkyo>K@&?7#4d$t#%dl-|gy_deuD zMMIw=YKoR0@`-d7qcfCXkOu`QD@7(SBfm$sO3zJmCpaS+Cgj5~YIm?%AKSHFeu=jH zPqNx|ldPhKZ~gk-pNNkH0C_cC_JO6ZvF|1jn_6Ev0enUg73ipfl@_|l$<^ig;#TeU zlea}FFhGTVJ5%TA)XkY&0T32RG$ci3RXx7`1YHl_Yx4ZY*oM>J!1LA}_kZC8{rM5B zY1x&TJvrN)_54`uR8}5(OcbV0v-z5&X9~NJJtV{)x<20qSBwt`A+~z&@Dx`7`Rb5y zxuxobmo*mbZQbqPfj9OBKe}X16`qIks`*aVv41ey;pN-w`?WkT*#*&B4WD$D@1*~| z&kFD3BI1#1#ypw-(H;Hq>FT-cg{zqZI`1S)ZxzG(> zIR%JT5<^iOfaAW!d^N8r{sm*EjaqR;N6J%AXSP_+hw4Ya`$}~`&LqdP_GGSFrfD1@ zZPzk>8wDby{vh~2ptKpX;6s?j_Gwy{uGm-g0NANv&D7qWQOxq3ssPd@>uJ4$S5V{Dz6B zT^@5TeV23Q`L9rdMT#KS@c_e3Cut%Am#CUpA-IaeQNJW!vh=%q-t7$CblDk&)Qhc3 zOgt)v4WG9Cn`W^4V+7wF>ky0lHR{!65q2ltD91ZTX<ccE5ju+NhvxQUC~kSzD3fid!RJ6 zE!Jwk21(xyM|K)Xk9ro+`FyUTqbdQD0?lmX;l+nM{}&dW@){lcY5BX!rzI=;`{C!PONpg0px{)dx!H!tr{Oa{`~x3;&>k z9YB6p9VPIDf*k-er{w=fA%H*}t>5&b=LCcrXW9+|Hd9LN8k_#0hp=F-2UguZCL6~K zwNeh{jV#t>u@e%Sn5{qian_#+*AZfqm-z$Aj%z zXj4;DiLJ^Yamswj4O`|+zZvKr=pU{P|1G+j?11U+{`%Sg7v5N&`r1CVwR|x!pu^!c zq=u{IgneiLsgO2K>-X&fa|atCkmMM9oOF$yLHDA-xmnqBDpPUHY;R-P@a^nYs$HpUJV zb~&sQ6-%TB$aWb*UML0!jeyj)w-jg5DU8nh99-woE{i{819^#Q!U>EVC?NS?9F9UM z$DV^@KN2K_($h7zJ#N0)qkxtF445O8ex?E8t2NF&jIwXl02i^(4|VcaBL5p?{oq{r zajjnBZoctqcBVPj`%(zbz|iLo2MurSM73yYY4&C906lNe-$22J@z=T(lc*>TeE#|A z(6rU?5|DN~cXc((jmdAA&dNoQ>5W04p+r{(XiUZx)f%VpApW<2CRJ!B3FJrLPE}bd z7VC=E6TxQy2^!%>dzJ7kica&j)A82kkE8DtNh|N_%Pj#p)oFh%uPD-o$W85&4a*x^ z?zVU!COA5>rN!WJXJ`H{-s3MoE_TJ~mgo+XqvWqd=&u{xGt4)gH9pBwLY4ocMsRNY z|3{4=eCka%0ATBXbw?;M9;SXwp~#oyc4lPF%};ZFygllsVAK0jb#>>4y4}ip23Y+s z<8avn+xLKfT!`Sudog#Det6s1%hPdjX{cHrl{MD7NgRq-2nzOSx}i7`Ht;=Ty@e{w zcJJTx)MN)BQDEcf;s%&)5grQJh$n_B?~04z4sSjE4bZR1Bof=N-7;!ucQo}P#nG>o zPjM^owtxC?p|Ea%6lB(kF-sI~hyBJo5eh0SEspk?lGio4 zunr8(ZrMWumXDQtbuT6hR(}KcLLcUA;^LUAL4ck?I|oCh*`xv69JfdDqIZf^Evuq` zs#PMm1`h&f6S5^F_)Nk>rRE!bSEpDu83x14KwT}affWoh81Qkk4zPifBYLv8wE>+L zF(4|EH@I*^hEGN0p`HX@W?h_e?8|igy~iu7Y;C)6{N4Gcms7VB5!&YAtrC#b%MO-z zvkmo$iD)tC4uTWkWo~_)q((gM#m~X|3-dE>Jx&yX-a} z(%nu}KX<)w(gJ;70t(eAmMRFxG&{q>`5Ha3l9oW1A*OccQjn`A%iEWQbe0pZEke(< zu2kpB%GVm%s+sr&kdJ7aP9UkDd35OOpTub`MqKs~I_86;j7zjhtzmI;1d8rS6w-AL z?sKubp@?^n^6y@v5`ppiM5|ph1OzudtFC~9=JJki>&l&3=}20P3r)RpknhF?1lZM0 zeMPD;8Knn~lJ)`Gza#HoycKZdVJ$d1ygDH}jWJB@$7t~Iwek(rY4*O~f+d2b7!Y#g zOoEyUEF!}e%K7F7ic=cPav~z0hosmgyKA=J`ydtkX(-?*tOwq;v>uB3{~~h04CVVY zCx(K(nVS6+HDm%(@AMt$(j8Xh5j~oDH1TN2`DmkG`(kMF>Dwna2fq`6>pirml)$ah zZn0?t{*a$D@oNtnkCGM?)R4L$zD{s*6g5r)(zQo>4k>e;Um8I+5xWbr2rXO7af7-a z@2kUp-wFTPRG_;XiBD4!rFuY*sw5#84#XZ-smD$wp+rDrMa#N}RU?2b*EXf4O7O8# z%tJSR1-B~->8Ic%uuNJmimjO&RMTj6sANmo8!}4)R7*_wabuWipgQ|mR10}(g!kOy zCla;K6?L>BnC5R4epZ1}hfj0JJ#*YlplOg2Tgl4QXgbbB_<v=fEMOFkh=$QM~sd z6i3@8B=CY@QfG||C!mDDL)3?=q^|Rnw#}7H@y5UO`(t^YIfXNjR)91ltMvrP7}+vbx@ZY~ zK$lcXb@}RiUy;2H+M7PEFCVXmf4Zd?vp2qNRGpZmtIoe_ZS-2|FxD)FOO`NXcT&SU z8)aP(P1j}p+tlTvQ{>axI)X9o_7$7Z%!f_{7Ckb{{U7Q;=5^iKq zHG_ua_@%#CPrMXIaWW)zvy=FdVNa~S*kFiTV3S>~{kGk(w)|atkS9i+2d{m0_Erb7 z-&}fIb`~d=i<4n&Ux1W!E+@rBZvuBYM|`OAmh>)xErn#cd7y*HtV0BkG(qnWI< z7)^cJZ-q=RFT$Cs2UKn9pTI5Z4%EN13-0TiV`CJ;rtJ!`$05 zUST43+Lf@0NG|b^BJedcQ^frW$@Virrj%NIy?m4^lABQ*AZq4GcocR0uGrhh$YinNtr1O=6)1b9>n^ss0ZCWdZ z7ndLjH7I4}vS8DAUANdi&m^B0y*wR@=l*e*_G>N>p_#x@JSiB-Rv@$92ge>uYgNB5 zoDq#^_2Oj!Yu3L>Vd17gGKAslek;*zvW6T)ntE=;A7jdeh zImJ21up{h&(jC1YaVq;^=+6Semz_OPP?{pd7|}QYN!IqwTkKdg?p`xuy6H1Ptlw0{p0akZ;aX2wN{IZi#ctvMZh@7j4$N?&}-JR_15wA zX2OGL6_xS!S-dv}0=UYr%)FQ_1h4qjchQ!U>U;q`tipPzb|Iz`#A}g(Zy`Bs6m}8+u!N&7SiNjss!(QOJ3v zJ>SK#v$~sx2do>N>Nv@VFLFirlZ@@HP{!eXWy)Ha{(HE0h;Jbv3*16{_No3fQVMi*pEx-UF;42+6JwAZFz%&qu*A6+CFy{oMBZBfibcK_{? z^Fi35WCpJ`Jl(3`gzu`|=R|KlxWmPwa_q*N-mj*$EGJ1(x6FX&-mmw&b%*$iGp0ar z)?9U=MmcphCV1#df(Q^Ef|_e|sUr&MYISjL2C}UWQ`3B|lT{^z;$xZDnmgGw4YdwC zTHn~B0vdB}B>s@1@r7B#O)pf$lQ=AOtDuwKdqd)ZPx0&&T4AnQp~FK%o?0O0K-Oz%q+Iut2C9eL``QII^dl>;qpT@i#d8X`eO5F_Cs+mu$AYaoMsn3&%p)kYT+wprau_ ziBpBIB(11Fus{-p1DV%&VOgkO6`!29IRS*#*sq&CM;Ps2yy(qy{vn7|z2w=bG2c}Q z?gA&ZrsBvaL{#BzU*69NRsFR_f4$#V%C*R)D1k0sqx&p=1dFl8Mrpk20S_#Wk3`v4 zORQ)tYV=MEZFHb@4wf9@R?*4!=pXx znd|p~kW0xYiPz^{1pS9S36u;*Adavs6lTiCJrN9guXeH40UeGM9zN z-q9viYK2+uv_dWhOy;TO3z-wrY|CA(c@pd)?PZZ1kL>&{oM!k_o1)*;;lj8`TP$ML@njmBf#tlf z+-F32M@RKyv-|-61DmNB>S!ABH!D9<=H7$rP`P@k!QF^PPy)G3)+Qas2`nt#D^SYu z;B!7QwU^qVNeMTdw(xu5RhY$O-kn_gC2is^<~V&4r1dLSLNV|dP)e-fO*A* z`PGNO@YJgS+Q`>2IqC-dOY|bUl{`gVz6tW;xY~r~Se=90w?!wjYV57kCBN!Hlt0mD zF9vMtq$D}C<)Z}p)(RJyu5iOw%F3p#q6$H47mKV%pD3thoDC0+jd{7(SSQVsR4q!^ zdL|X0F%Jhy-G2w<*>jR(7#@`emOf_K)%$)9)Wy2P^JdyV0b`A+29bWCirBRCb~JGn zO4pbbgn8o-e!o5H|)M=mJ?(%E%@^wSyu?k;&{sP08X(usB;vw=j zB>CKu&K~Zj;+X9D(`%xI{#IO%Sv zG`(aj19WnK2WfwFN6tnrmF?Q4QhSu65K8Zc%}Sz>m|x)@nD;X-nxY0e9cPS9x%RD6 z#Pt$xDtvRI{rYtjA5wvwj)4eCwf`uQ%Z{MN7Ec2FaC4G9TFE8aT;oLz0qQ`bc#f^{ zG-U?2(O~M>y#v2{8g)BzpS5{21}emcPN;k$ClYRVjCX>4r~p z*PK8x1<@kEY3~q$IHG(&i4Z2go)wO?7}C{DmzbwoB$vb008h{U97&g^Kub;PM`{1| z#Oqh=yM=fv{*~_Y26qsP^c}}5-aL<3g-W*1W=E|3g!xclbI4y<(FEd ztw->^0J1jb_M{M!-&w+6+rNq2otRaB z6Zw6P{F;Px33uvsDvwmn%{y&<*G*FR?3K(;tQENua}_B{rBTd~^KOoRw%T9+nVA1R zamW6CHP%9YTW>*GvGoJ1H5vRr)B4wWATVqXAb{-N{n>p(E_J?FZq8)_^(GGTh07_)ITpPfQC%mVSOt>TIl~g z`_V$(?LXi9`_m6k9|;66-{Ok${6Eho?r=oObgr#m6&GCE)M_(U zH;uS9^bjuL{P~VwZ!sWo3*8Z4=gHnHO!#B^cjm6OIGRQa%sn5@amE*t%D$jEvr{ij z>TdndpzKA0C161$d-P(d<)nBvz#p(=(jdFHkLXI;>1VsR%7wxWw~gMD|Ne!4jnNx< z5_jTRr9_+1^8g(9exyY?(NyjC7Ys8`? zOL3cVvlj*huAIpK8O{y$I~^pmrHS_4(=RaL%?8R76`Kb3mxt1M6N+zDpUt*RgyW2- z`%)4e)JkdYOUicRR?pzEybF$qjJ!8^vY&Kqx;|PYZzKh@gUScxxan5TL9K6`=xWC& zyf@DxYtp>72MG1y#*YADM(+J?=5=n7`g7?Ms7=4<2uN3H7#J#F$i|L=@-jd=4@5FR z&4CU27ocH0?5rjC*PNlKwYlB8g?%U~BB=c5)d!@(T)#*J3SGsp%XMf4@IPbu96hdM z{20E~`8e^iNrP>=tM@wTMFsQ|>3V;PptF5aLMzaDgsHnzTgZfCZErt!*Js( zLC-qi{vL3m;*9_B@@itV!2oo!A5IQbTmJljGyy7jw@IGy16~Hic@z67H_-VtySg~F z0~MQAdp)B6tc4CzaDWZoI(MEuPn-tM?MJ}wegeq%MtEZ|bBj>xYB6)LkbaftmquOe-%gU4iqC-wtHcnma9Fw6|_+XZmXNuc)r*njsu;!GPTT|Y(M3k}6Aije)! zN+6EAvlE>4;Rtl6by1wRex!Vy1~=Rf9M=bf@`Ep}&U~_iaIdE`dPtw`rAi<>hkSM` z>5OcsB)@z-(IbAb$N)0}^|0fQKX$O!2i>+J~*^^bSPtYhOe zCuD|)v=P{0Pf(QX6L;xYC(Rb%P*#Zm@krLnQfJn3g!-=hd`p)0$ws*pi##VMzA-$* zc%2(#Z4T`FtoKc!0X|4%1STW-#UQKy(EyBZzo^e`$;PogJT{qp2S${5-ef&&tr)Zb z;@T!vq(?bwxq@Vpwo3IcnPSCqtf-09hTm_#` zxzetGn*kdd^Vx0fQ*vw#{{+cq+!$GuNb?Z8_&}>gw1V0G^|Mti`7qqZ<+>+!11bF( z17}1dE#5q@jfOSt9Zc6b-;UHmZHHCtrYzgvIhczMM-j=P(MTULTIva z4L%myw#+bC|oCps5T0T~@= z&jPoPkxZEw%w7sNcQ7&RzW55YR#VrfeXxl>wJZtwDn~)b&PChEQ8gH7NfrY`x7ge;6Dmdu zsl&X^MbJWeAYw#Xqz{}An=PLUB}gvNSWIdQ56_byz&F8$uQ}YF<=>z{uZnybhK++0 zms`QD)Zmb7gxP60T3Z7u$3NwpxMynYHu0xMK{Ac+&^qm!msD{GF~MWMZ{^iQ!E}99 z{&dpw{^JeMpFrh-o7VxD*WLc-z=A^dOprty{ybKX+RWb<)csv$`o(&n)u;1m&bfWp zeXW|yE7P2Fo{u+xmonOH-pMKDfY|6!`>1mo+a=ZJt&jhly_7PD4js%pQ_o|=N4X8U zal?{Bvs_wyF}`vAz^l^g6b_74(pl73=(Fv+?Qfd##GO8%%8QJ^y<*_NIVA8;kDTQjH8YY=biq8SzdSJEZ3Bbjyy4y;inlB&_{(J zxhih$7>Q!1LXxY9jAEyRhRKvAgCyhl?4Tuk{PzykzWk?j;QBrmvhq^+lPWEh=N05y z`en80WZQ9jBT@$~1dz>5b$J?}hpr+ifatYNlsA(vD@45%R`o_t+;HHokgHqI93xY$ zd9lka>%igqF48*K$9;zxF;96oCwVD8hnRmM^^$X%AK3?aq7~P0i2tCFFWa3`wQ3+x zkJ~p+dPm2*e>-_vqMPT52O)2#=soThBmAtwy=!12SEjbeOCiG>Ym4(QZqmy1uIxRH zIaHr-!P6uqtc*1R4Q^IhGrb%8g)( zYWe7xCQc8(y9@Gyrz>7o&QKnJndqC74S@@5tJOez-2`dHHC_xm$qq@9YPS3`xWSsx zCSJuW!@cuGYj6@!RIbdJ&N=9#WTFT62l)i$VAdNG)kDePGj@bt@ywip610c%5cwi! zHMX(G1ulsEe4H8B8<@FF#Uk6Pczzf~ug|=Uy`I3Z1H_p!zDxQh<*`@{>!`BpxFhfi zK)+e!pE-JZU3kwm6?X0cMxrU!=v)E2_0B?Z56j!Ex%nqdA4T=(YuxWs`f1e1MRzZ?OXo}iw$rd zunxYm4}ajz{{LV*k*EB-&(|0E^A~>r>%3z%51F1)CCG^u^l0Ma^CV9rykLd3SpMic z@@=>Ab5NzC@1hy=#a5qg(oT+*{8n}}He#AbE&zi%kty4=KC98Wkf>;6)4blnIWJVy zeRMiq*Al@@kSkDkHrGwmjjwImp+q%fhmm(6$e6zQ&w>;N z#uHTN%zWxB%-)D{%|5S=o>J2VlK*l_j$2zrcxnDI@J>39+rPmHnxg;p2^Wh9~th=AeV`cAk z@6@~Up~W`CHMPRnf{$yPJNe8MZdX6&!M4LIPBs#8qt=CDN=0i>nRhQap>M~!rAn{H zo%P1&b}Cxs*q~Lm81IE4HGac6>Yk7`OH?qfbm33W@M@oVKCig}xTYXQb(Ir(|C70k zkliq3(3&*Vu-gtnb7CG-)20_C2;yv9($LuT)jn;}5?PQ`eJmlLJT(0#mR@qYb^i>* zIRku5P`d}37dgu*Dl5dwl|!{h+ljn5q0PNe#&CCYw5*QajrgtABHV|5gd*-wr57JZ zKEQ8{3DOcrGpwb{avydU+Dh|2EGFwjMgA_j0e7b?M&P_k6% zHuUCxZ7XR}j{nrCpF#4b1I0X9vW%H#7%+-9tAms36XmqbghW0FCa8oovOyLu&H+mYp_5xi zC$nUhau2>)cAWnSN%ObKmVc&{6BEia5XlcF50P#Uy7C810EfGK+2M;Jmwfg0^+&o} zIar`Ev+Ir%*+B&UG7^kL61C@*O}Du|mC1ec9|=)j8tr+Cpb z{pIH3u|d@)*tes~h{c>{0_-q5kMEDF+{XrfE=k$>TjYw|@Pi%?DY{;azq~s9^cJ-y2u}%-HnB zcIlkbvI&|>_pF|N#l1Pp_WIv+yoKLe;-m2w>eby}rC=rM$!hcv;04>=n_r+@$RHS- z9ST8SGnwd9~hI0CAS=fayQUkrdyF#I!Sk%w=l|Z&pL6RH^EC8 zj3zF3>M%jN1}Y4w6KKfbl2raO#r-5`HOeiRX4MDXGOF#hcEPg{D&Q9nyX*}RT@WCl z4%A4-F8U4G9GkTN4E>?{iic9Gwshh%d5qug`g3@SvJAQjrB|{rv|AGrZ42FhXY<7s zy5qUwgv&|}7f7pxZY{q(kq8Bd3pna<72H^q^zAo<{5kY4Mg?nO9WeX)jk&PIuHv%` zFB+9`lx7}y39!65c<3{F^O=NkL7uaLey~huDVVJ8cj;SIHMOR(wXHQ4_md)!2vtZj z;<|*3(y&8C5oQHVR)S7)WpK)E+{x6`jL@ZR9NM%fcU^o$*+Yt?KiI9gro(BG9@Ge6 z2VQHQ-%prE<$iZ(*aUTBP-25akqbw{RTLl{xRq~`1D=>!1w0Ep`9i}Z4#CqLzzhkQ zqJ&A5;@<$mE9WS0_hL=&-S=&e@{%!8=;SLWfHR$FO-Z7_KnLt6h8s4EthrC z7S?+n^Eu@ONBXVMI5C4!00-{-h!6h+D&Si6yW_vEGgFb>GM%CQ!{#RdDV*x1$8+0K zTczw=a{T4DSI#K;4s;o9worK2Hb8MB#7zaZqB!bv?sRJ7l)3a*yMV=+aY2<6MJVFG5J zF5b$USG4RMQUmVY-|if*cgX8q0j{`5Z~&TS9-~*%(zMX6u}#|?ETBG%y8DV#9C$e> z>&rNP@PM?gj`PU)eFw?r@Dw>ZCsrDIZv=SQ4>#|*#I!=W>UGnyj!Z{-un_ zDk7HiOExnmFKfn)AgUXCw(`CKBEVRHUg(RkSo)Y74?C(u;2k2Cl7izO#~YwqCSe+@ z9|>2q)`OJ{@TGDVC2ejKPADvuSr15!gdxxuNl1>M0*5M|lhV(#RpNexUFl=ob76F5 zzs98A8_v(yFh{IZbw_G$1I#x8nr8%p&NDQfb6ybNdQy!7f!~kqP1c|C-rG*u1ir7y zoyM}5^Jj4AvOVmnr*3)WkzbH#w?G9{@ioadHgkAWlc~Hc z-l($)F=K zG9PGdNZ+S6m{4ul0ex{y2kzyuh_+f@JdSc{40B9iHmNDS%YBF%As1(kloEcWMZT_W=+PH^8a)TM!UVtPl`8`VbKOi4YKY zwkfrWVBo?R6LsmguU9Km>>vv;2YM{nGDEZwP!&6QxcjX1Ny(mPs4%`c_St* z4Sc>avNtiYb}+MX)O6Zk1x_H^zSMAlfWW1CdP7PpQ62%ZKsHxacT|5R$8Tg~#cW_~ zV`#$cYGwOW4+PkiA2_r!aWo)xwX(E!;CFpN{?{G+!12>*7IMNPO5adLb?PX5%;fB*jO zr=$7XUoBZX{AXCe09l@{u&^<+viwiQOkBRphr* zf8MR)U}7(3V+FM1DELcJ|5NVwi+>jUtA`rDddSYp`>!7UcIiJw!7NYe@|*hnt($+H z1(Z_|3C!|eMHfU`S{yrrfDnd|78g-=h1^SpPkh!f85EWa(g>?|_AY`Uy%`n8pp`g!&k$9#ce79-!z%hB|e0dg-I%Cpw3a(oi z)6r46cjo^#nDA0D?;qSo`T-jG5e0<};!k1+qM4h-KN%;*xO7Z>yzzqJUoPU;KP0>e8? z_3yqbyntd?{obWZ7548&DKVl;Vj!gI{?&0|nD0`;oigXsKaBrq6b7gTYuPN}x4{GT z37;c$tLG7W#4-N6(P&|y7AV4ftltI?2qKpl5Z8oJQH;dz(h`Qj0&0RIVb$RJQb|ZqA9t>PbzDnu4lD{z`nG`}2gl!^YeB zL^dPCF*B)qvf`swK{ zduO_isGqBC+X7o?)D=mE*Lc#)P**l2=#j>uZk!+MX+aD9YvyT5eA9g%X1p$DOu5)8 z>kDKPm|HGj@wA7TO1sIKd(^jwGyUiE{ZZ$G2&}g*rj5{fjJq)|yD6f^yQ^XdX3Nc2 z<8|G)WpJNs;Cg<0m5f_cq(p$HX_lFvd9q)%RJ-K!*pU4tLCc!YWh2S7cI>r)RoY2y zk2>9U<#5ZdXF)+1ib68n_U?MLP)ES!X8YT1@=4(sjp=-~UE!N=uM4f_Di4~W(H+Yh z^}Mgw52}{D`#e0YDAm2rCv=*;@841E`kjE+W8XkUy%B+rlFU}NP04Y}k zk`?4FHyui|UT*e9D+&{b*@1?1Q{8D(Q6dT>FWxKdtF>OJg)P9X_Rs?+{CoTE&yD?w z9|;yqjSU{R`&A2DBUxem=}Kr{;sPfbTVZwG*5kfcf@mC&T=eB!EPWK@(DF90s`FY>Id7KHeYth>#HT z+u1bTUs#Vw;5nJ5*aht_HTm>pO64(}JR)ABDOp#n?-n%`I^!sN%Rr$Hs;{hQd+b*j z9rq`)wRv4GxE%PS3))^TdK9g?sN<^Z`|77h@MA5KJiKt)QLfrd^LWc+Q!$}>P~3*r z_b^yxznSU+m*pK@I9_^6G&FsR%g}dqYL}IVLJa1$rQ7^bx4&dj)G!}Q_h#=~+3IG0 z(&}r$>$Q3$@Km@(FbWa>_=4x@z)bxej-(6U@_Xp{crEK~&}VkW_HXvD6Lg$4T-A;Q zNYal)RxB)TYj_6vHVLP)q@wh%Ax6q}fIYUv@PUSXYxJ&b>#+WGkcY9G6^Jx%0|w z+e8<-!*E3KygTmF8HaB_$t^Io-sn#{=pyk!DTKeoL$Xy5Ik#S{cXM@ET&kBho1q*` znUov!Jz3rn!d85)1BlVOhbjTZ`NiYismK}<<{ax7pa2tqLaMcJd7q6)WtF0x zymZG4LdWMci}pBbfp1IJp}W+G=*QyTD<6Ap4G1*S>(CZ`0hkQ5*LFXbrtHk`_HPfH znv!|#zBEbik}D`ed5$UU4Jfz)I=ql<)2I<$Vu-Wx!frK!2kwxq@naujS-8MT0QP*% zW}1aGed$`AMucQtXi5rF9?sL0XNuvY`=ns(N?Cw?lC3I}1n!R7ENGw6>U?$WSYNe4 zXBfENStjYL_3_Lz*%U}il#=|qY}0T)i5$)zo>QCwrnW=*hR%@W3_cg}%~&M~l1(}K zQhDM@?CpM1zw`Y^>2dmlpcaj{%cob|h?;Ms1P-e4iGw^n8A-GxCt#gHV(su!h?_0f z+V(wE2N!cT==@;G90rp)!P~u6=LM%J#xh&o@SwIe^FEfQdjB?-rn{I=0ks5&M-_e2 zSC2x=NCK4ol>Or;_m5)GMreU4LXumVu$cZ`3AV%!jk)KxL?|c4j*PF*@LCs8=9v$j%WYB+gY)pNnpeWMHktbNHf+@V28`=i zn(2?nQ9j0HZAQXUuX>x$<$0&^E%GsAhisnJiGsam!o##PRfYpR!Yn#DOyLkakvgF= z(G->ToJ#7=d`-u3uzPdR=CM`b4?1j#a(T{Ck-?ZKp+~3f7f*^HLAn-JD#AxAM?0yw zD2oEEdH(9?UHihf0Nxv7`~#*!23Z*Y*unBHVvl?iHKQH@ltO7N?;({P!7NC>Xcp|j zxXsUlS=#8l*7e7oGp5$}=Tqolk9jHSn2nSl6Oep)i?a!G5LA)3ex_kfl$pLL>qUh~ z5y1j`#bOFC@CA-^6U45VM>0BxySBidn%9>H9{gvZDWbTgMm|IQ+hW8lCdLk>@+dW6$>b? z;57;i&WSy(I;GBrrSgAx)`1`yY?F)8y6F+13#=`qTAIut$bhKlOtQezSJc-1A;$1( zl-`De9Q$2|Wp5+fyQ$;^+)iviHN-=u8W^h4fapZqZi1}9qmN(EXa`(5SwRe$iC(7z zJRdI!*Rf+He_%<@x4@I^J)EXLPMfbWhSe~8!-3!xhX|cAu<3)aou;j-7VJ^J@)~`f zd_9Ui8-1ve$Y~x!!59`z4Fe4&LBrd&G ze46gw74VoDQK9iO#kD(~w2D=;{S1$*2JhTDw=&yujq&%%2EP93y=!HaLPACuu=wnh zHTguU!Hn%36{o4NJQ6$IB9w-y)UGqsM|;+3=|z-jGmr8ecu?rk(`D$zB}{1cbEby} zrAa37kV%euu%c)^E90b&%R&a4tKm2rUGq$|_Mp`c;g+HT zH;AKr+ofYU1JaRn*qojK4CURZR<3f7iHmPKqnYvbjL(ADSHt7V-EOw&?b7c7<75!G zr|cQGa;X3ZL;Iq`et%Z`Z89sDbQ`&_f3zFoYyRzQ{YcDoh+B}#55aE0T*OqyE&?x# zV=pdoevPG<7rZuJ4&2oETG2S&*($o*QqLkyRvfsYW_crns?+ZhFUfj?^{}h=NMBjo zV&~dh<8v)}lv<9tDAQX`OgLXnJYGVta;Ask-KVQU|0YNDK3?@%>MVw4jTxV$8Kl^U zAsX74#^KO+1;YWQ!v;dqTj&PpylEVovAtBZ3DnXCT(q6Rg!V6oPa;0z;@*53mXWt` zEfEdFB&u13k%wbCynAsrEN0h3Uzw^P0%k!d_~PnA&lFDEQ6a)QYVqS79j_6wGvE+s z?V-XW<@vW&6kP#9^KHE;rGuPo_IJC2Swj&Op{Hpf?yv9MM`pvJjE6|G4h=A5@Tnme zcWFBiP^B(jZEAIHg*)jS2|8|%Qsr)N41wP;kO_HanT-o0D8`2f>rjyn$@Iq^3v5lU zCr&2rB$$*vhcJ%Mc1_R(6%gQd$%DEg1r8EH7g)?utS_*CPLH+QVHp~4O6mFABYKG; zG>IT&wG9ySuwi36N%Xw?Q4miw`Ee3k(@@SgQifzvG8dDHnf49@wxej!@Ad?qkCnC` zH*=TYnJT9ZRi3lKSyNXyEhU>GixIR13|@dDT&w$S4c7Q?EMmzYY#VXY)MdOMN-Dh4 z*_z+?0Dewd@2U(1jDg=ql2M4NBylV!Ba|znIf|&+0JUj3;tthRx}j6I(tqnIIw zWb=vb*$Rt``gabABC*6ll15;v@G#Ou#yBUpc`;CyxwCjvC5HGJPc1Dcyy=Sg~I@2PbX2zvMKf({dcZ{?+7j ztdG?})~C9jqm?$R3PV|bi)0>Hic0R)Bt&!aXBdZ&4p8U&b3CRYr?M;!oD=skFu~pg z)ZS7)bE>_im4{`9X%>U<>wS>}Jf*McHj9jQnITbdc6VE8htzd& zgizvajI9v8_T|iI^8!jlv-~7@)Mqq9g|Psy71J>U_FN$9CiB_Kn&a!Y^QV+#Xpg?| z#36F6(Y_Z-5i;*Z0SAsnd>@HlGC|4N_-j%`yXxriq9|)3Hs!lT#7HT!AvO4)Yo|`& zucxG$^5K&becYk*Kn)bZ_&aTMCzyo@Llv|npOumj)bgbMX0qq1Yh(WJ&OdD6cV zVjAv`Ta&d~GM%IeVn?pQJ=1723ZxA5UC~*;4#i%a@us!Z=C>-+g4rTz4hci>H&?&H zYf1toLO!1!N?&RUymyjjd=|?;@wMcq5cI`Bku0Gy9_D*tU_0CrKL~~x!GC>~KmzA? zp3d0=VtM8;(Yc@`y@FS6tUfI@eq4^y`J5pqK4MhrM;+qzaVb($@!eok{G{H7fX{~K zON#Iii7waPF8x;Z56O2ULqkO@h4=Q-C7x`Opn>fWibw=Wc5;#csvx?O@w=50qw$*J zDJ1_OG8A(w7H+xS@i+3QqC{7ux0fXO-Is|*k%&88UHsOD-S&1Un?~U2*9##1_X>hR zZ@Aq6E84Q6vvhjb@cD=o3^R3jq39A-nh*9Oa!!!GhOV@%;mGsPcBFe{& zlR(XX3veatC&D>LF*4@BKw88ZZ3O1niJh`Mudtv1(iRACieC2g*2+0k2lZ52_4-g`3Mtln&# z)LZ7)GBwO=AM8>cmwZw`pqpj{&l2Ac4j$&04I;%F)-i+j0^3K?33-Q0L$wWaS?QHV zwWa1Qr0*SKL=7x<1(AiOt`|9+PrR?*jq=enxb*bdbeGmw0x5&TS{wh==0GLrk^I@5SlnoSmcv_LD*&UiT>rzWiK ztb_ickfokMWeCnHe_8Wf=p_-qW4F6BtFi*+DI)ek+?Op>B7RQ$!C=qz(x&lJMZ-Jz zcwEh}^9`}rOPC%8ofE-e8NNr`#(Mp&qE_T!Jmk<6Ay0GO7fstCmL#;#PVE7`J;|}A zl?C7jkE1S|di-)%?1!%Fm5$XcjYtF99w8alfPPS`a7$V9-UUb&A*=bQZnn+AC}3H! zWi^R1M;Gq^Pv~_rt0*$x7cL@-vKY~D-b3=!;`m+uBa|`nzHXz&DzcqbgN1+CI-yR`1Ol5LGTBph~?w~_c0!RB+r zBfR`>J$uVYhaS#Lz)eg=SukDq{tSPghN9LM8Wyj+)poIY}#3U}n>el4e&eS0coBcmm zPIi9U1i6;Gx&>a_H_;0{0B|dm|2p_YShI98swt<zRtyoz&mY^_hRFna> z^*Y%FzJDKBQqfJ^%4pi?lpb@F#AP3J9ys|DI~mmP&7objE~ZzQ?M*q)^43-5Cd>4k z$w|68mIN-irAta1>s5yKrH12yk4;WXYkia~V%O$q0O$XJI05?`3-xqjVeGVd3!I z*s>j{Hq_{(8phUqTUwhbJfpUvy6+9Vh<5!lV>Sp!voqOf?t(Mz4;wDO^406+j7;(V zw9o8kk3rz!Jsv7&9qYxT@+6FUaWS2%Gh^jATM(fUYw^QUQjFYtO0(}+vz-o+*716f z1fyJgb2mR7cB5dqRbrcUwRJA8Oy;3He1^3Lp2$&w)gN3SQJ-=|KAeO~(L(YrDcYBp zE{Nsyn*utFcRi;W(Q1|$3cFK=O>7AjQK7d`KPq%bV(B0R^aUH0S4?waWKzIDbGQOa zy@{*FR@eKv^!S$>oCb;3Fj)b&3wS&JEv>YM2qe;?o!w!!(gCL#-k$TXLVgV5eK#@> z)4_Gn8IA3Li2%?xqv6n=7ozZ%-~v>U>aLUXp;f`kt3&$*)9Yz8d`{XU*O_H#S53t2 zj3FFU)Ow%lbaR7pNQ&-HUM$oo6DdX7`$Zlwu2o~KKQE(BEqc5cr2;EyPp!-1UFyGA znZk)5n*i{;2<^vLjN6Al+WGIdLr$8_T3L+*zbY-2xY8#*!zK&Qc9u&3y zdHADHUR(~S(Qzy1l-r?Q0scfkyMVVPq(p>>5tSGd`y|7`ge())k-mMo(wo+E>naRO z(7n1ROV<@dgJN0-fZK4vUs$b6I&fR`ly&D<%d4j{9w`5jD?W-nwGg^zRJAs z2g5ew8cUvFx!mUx(ON*R(k7*cM{VdXXE|A?jIDPHmSNc8J~w19H<+M2_DZDkGh35J zdBxiBDP(X*6;nLu^4L&TLk{IDZpwz6K1#Ckhd3Oru5Zk#ScXt6!PY-;-jJe5z8B$U z>3M=CwK*Eo>zhUpj~+PK4RyAP*jY*n$7?uwji~mwc*DnyDPY);{EPBeH5nyFC&|8t zEA_qh0Ha?4E|Wh)m$Q{%qL~VU|rF4rw1N+oq;|)*nazy zKRfnBOiFxI-1Ca|WU)N}%52^TfRXF0K2?}*pyhhhf(>Z#-MB{4e{lr&8pSG z^i#Zs-?sH(Z$x!1XXF2}SS8I>=6T#55DZV9k_bt__)1Bzq6R&ROA`B1h(ecroR?o_ zlr~B6e5EdAY7s2m@1}x^P6-V+-L0*7&S*?Re zR8^mR%nH6<`{Z5O@*Xu{e1jmLax+8oMkEV+1RBJMfXd5Rh(f^4czBy~WCZRI7df)Z zRtyNr1>%f#@$ZsTtkV1Ssqb&jWnahIvqZ#lekGeHnGEhh!8sKku?=qV6OjcmU_Wa5 zWW-9~q@qBFJ}7`@QR?4>>gOBgk_XgDNl3)T_)tYMbrzc0rp3u? z+1FSMO8I&d-{H`&vntbKqs#CG*`CwfGn{G1dN0x_6y7qD&}oWK(0`7Kxfrs5L73^t z@-vA1DVr`9>+pau(he=}^Clegl$Gr}4bk?!cc3f%_G%KR`CF^9EP3M^(V@E_p_|#! zVzeJe*-;O)1}I;Qq3I?rYGRt)9Ba7cP;^3BD}to+FyZ6nz0`Zv^U&3#ou2&@d^S6R zNdqFF&7VZ|UIuAWGsbx5xW>i=cz^EK3810df6pX+9akE)YoME zdwYWzIlp6QDD05JdN*EAJ}-#fg1d=im(k!0P;A=B!JI!~`&bea6@Bq5v@T3U_FfGt zCjnA;>$U~sJm7lt<2+t#BtlgCs&Fi1=uJH{h*4V9XH*8c6Whxqah1xtI9YUuvj|N- zr5y;FV-f9R|I_PS4wxyy-jAiil{8I$oO5{S!P^ph28&uQ=-} z77=X4=~z`Tb4BkO{*d9{}Y*ImB-LOk*9taiKSWid?A!99~m>~xZ?*FSG&NqJ|!DrgE)+M1{fyvIjUTz zjN3E=-mi%26DPtK2C4(Z5P^L%_6NTKpb6jufQyP)k>4LQftV+nz?UqqU!mc@2plR; z+>CcyD#icA;{PT(`~}-cJC@7;it9gd9WvfMaWhaGkpARmkOSO|50Mvtax)?TR>k*B zwcp~=e>E!o#LXBP5&UCN-=DY{Txay^|8A7@15gVk|18ZvbMQCwM1kvxn-Qc7`%8iT z>N_P+ON_YG?VsEXNkBy=hJWb)(I_QQOU#y8`XA$;c;aT@yW{-H%@F@X(f($r{2vnf zFsJ#qKV=>w%!gQRvn1%`e3G9nnG^8Ge8*7&KrEH{55|9q!E71k>#OAW&+owud5UzI zf5?Ig5s(F?RooxsnCQ>Iyo~3@D*Su8GGd-|B&M?W5B|!_SAgi=Da+9QF^$r|1kg)s znf#$U+0=kaanV!#cOm+Zq$vUQj>|46{bPESuz^{}kHZT6H&I792I!;AsH(;vV#ouA zBpo4x_{T#+FAenZ{~;X=-*I3_|D!O6%VPrb@`FV5194Psfe=%Img9(o4iG@sDSJCG z-=D;>0Qhbjf>+DXfQxfk^F${xXdc%1y_e5kWhiz<5fz&qM@N06U4Ob zW3I~=c%oiVP?1EMrh85502GJo81tszrl%P!+PoKPKK3<7kitz>DeHS3qL%a48h=4W zoR--K>%wy^oz&Z179OjgF4pt#EHyT|1UnC(IqD`Enl5H7>HyZr0GIhFrHY1@Wl7-f zY{ie=sUp2)Cuyf>tl#H@lNRbEe|)uXA=i#$%BU-AzITpj=kwqc`-$Aw$2)%l?{iG+ z)~+J)1C}mHFuj_#U7*Eyt|E7xM%mjMJYCloAoQ(e-5X119jBsd2L!t8i_gX|@hg|S zZ)=`&yfu!R2>pFb^->w2q0sO+cXtv_t&ECYk4cKc{g{O^)At7KCiA9+Gj~i2qN%ke z@dbWfWsiv)y7fQxFYi6tO3nLf?B}eiYJiv_Z}K|8I@r6pJWRJc8V_gHewUXs53p}A z&P+<67hP9TT>vV?v^>);Eo68_&!tmT)Zf{$}DB|<*p48@NvLBx+9!wBKy1eP~x^W>+;*>3@D$_De zcbLCRvgZ~&z`ZmqVMiDPNJYE89mMJ(ZqA`+N<3Pd?y&O1a| z%-9$5gkY4y;;NIzx#YN=ZWOBgP(ApT?j6dM6-~51Tfrc!yWaaPx>k*XDF`Wsd%g(Y zNjd4iDt?OQ>u#FQSkjJV4G>KnTG0hzdu^Bow__DD;)T#qjQ*5OyN5?s5^9i z^6#Q!aXn7l);>y+qj!+b8Uz#hSs2T(o3OP`=PG8+5AZDQ0n{rDw>{v>1~)KE`XmPC zQ#vlG0ykS({?X+UxEhoQIutBm*tTHwzr6c`4eX>E?%y9LajaamPMKgdBEMm5l(6fK z54{C^GEWbCY#V_bjEyI)P*`Ntle9b&@QM&A6!JhxYL%e&Fc3f;RpIx(C z(zar~_UgX=Sx3=m?_%DYZ0;ziQFF5a2?I)B?dL!MY_?1o<`#|KZF>AS&Yc!u_mxp(UpN=)|&SJzhlj zEOSNoC`bGH$_=;l^}T+e!8-cwe2y&MRD;K5h8~7)0dE$QHtuG>U9pLodE^uC2`Y;z z1=ky(dZD&9S&~Su{tROwzPnqE5=xOo=499WulQm=2{*8-Dg+4KpN$ENNMwr3u&KxE zdAqqxYM8=??o`{Y&IA3^eO<|Q+DjYq;*yW2OW_#1!fbBjeEt#Ya=u33HrZiv&QW)y z;$WI;`r^Ppp_Z@g?Z#fz4V!ttmi1mY&BACyrAzA6Y^`NyFC2ko^jKcnN(l1D)kpzm zw(F;uw(FZaaey;ux~#|s9pFVfxYohy3a|ptK5gc+RL$GZ+uU4brB|CS16r~vnM|X|qy;)dp@n3jiC?&6R z&snYlV@mB-C|?=XJ*s?qaj#m;m?!mii+zV+aq z#>Z_p*TUhtcv_M+`an%+bg~ZKkqUN zVO4}5C7}~fGKGC)$zsIl+5|sx%FWvZT0XIb=k6H>4 zHmUF#)_ZAa^Fw?r@=GKqe_#rxNb{mUvF;wOH%*6zL_I^Y0f$E$_=r7H`&3D zqKAVaF_KR_OP*s)HES`En4K7-t2$&UJupQO_bGB|o+r{S0J6&q=A;f#xj*S+5kUaW zDbybi-`$-K#ka>Xq+15T7P9Vfcv)f`Xc+^HJrSW~KwkuST?jr#9Dzr@0Xmro|84L` zvE1aItK(JwQ^FVSj>{%&tG1bexJZ%~9Fd=>U)ckDc`8L};4mBKc{G{WycCJovpXs` z`+P9de+>$JZpbo*VT#e-`Ao0TuZ0xbW*szgn>h)(NY7-!i~@&*B>d}Y$?--U$^ciE zz-1@z+t&lrmCZN(H|5f+N<BiV*Hj&Zuv%VED_O7g%5 zAd1?ffZp_GVI?r6=hlx|w{5iwhzpU0htwU^GIW)hz@@AQL$(RSQUer^*UdjW@bpk_ z%rS2yT$6es{0A%RGMLV>5S}h``CTE%24^n1QqcsjHX<|pRrR3sy~4{& zVBM%wQPbh#H3k@E6=D{Q$7z&3KtI!nUeDB{C~o(~$ty3KSFXT%0wh8U%8vIX7%WXB zXo$epvS9g$d*D`2ZeU=G2xF`g*iQODgH?~eGfC8Tx3v1dccA@GP4dqapWL7f--8uK7d9w70RSz__`clES? zz+QERokQf2r2ro3R~m}uTShokVDz`bK@n1t4IBoA$-J5V4Tvi-Sih|KTxwtUT@5xf zTC)UT+>O`sQb6Prqm@2ugExfG^u8SgRqE42Nh?CRUC3eiSsBE71MoUMn%aV*S z>TEdeNz4-WbCeTvJXTMHPo z9N#FH)!2$_{m05>T}jc4JzPDfH#|Pc67wWuU#so1(n5(K8MUV_iL1Mv4r(GSlC~|I zCW*h9ye-vhY2e%9_0zMB((ANj=g0djqk-( za%>We{n;Cyy3DR~XPZTD`vMUCk@1zb9^eKX@;mQs*?cdFtZJ9EF`~)T(q{Q&&=$0g zLLmpzo6q)_gW8UQ;5KYW=#!LB7{okJQUpNsv zR-$P{96P&`svCl~>q?;YaE6Eo30aI`@v;Hm)v`ZP+a8v4d$&^g_*{QZJQYPKTZ^eW zs9_&~{CX~e4IZL`NxY{47F`>hP3U=Ci4m|b4DyfB^z10B#h)HF6;1ZXMgfL z;xEK(lPD@5mC>NUY721FYHc0v=AH$?p?m3pQuhEe0hwmP=AYSx#H3AX20w>WWydj5 zz<{hL3UCVCeAIvMkE5o|w$&xd;5n_(a@uOwSk;d7H4E&paz)f;el3+pT~wUeigJby zj((1-i6skR6KA8mY_L6&6>J3aqF()NfPSt%?ad-{-#7_mq(U)}K~S^U;9(k7;rPqS z1rQPd+@XV)SYKgfKFXd4Jae}O0@{`YUH7~?U<1YOiFnRIjiBZZ5Ir4kim%}B0aMb~ zbmRMv!V@0TDS(`+Dt%*!+WG5JP0|?$h>;f-89&iiPsJ^-3~| zn5aU46+tQjKh8*8l6qY7x8XzkN+t~sf8bcxxVu^F`P8S7QffvEW0u$TqZQ2Ks;m%6 zBbT_W3sBkgLEEBy%PLQ7F^op>FhNPAta@|T1ySjT)JB~;XKR4$hF(v8M48fem~i6K zyHzn+patJ$Z?)M0$@pbRQ|rGLNhp=RvjmyDe|Vgs`7ZCTuKcAa<9|E3@%Tmme?uq$ z{uooP68{?j@RS?%4nT#JvXn&s=%x%701-;h(*Kb&M#=@CLjU(5SHql6z5JhTKTyRZ zyyLc>ujbbjQ~9IE%?NU7;2G6Z=G}&pm`jwnf9#F_Y=s$v7-sXlUbYG0ar_Q5%9#HW zL*gZXUL{=?l??tHdfTD$rGE7}yRhniMDg-q@PSkpe&qjAP>%u^raKJ|44C$G1$Ue{~c{5TlAVP6{{@&YR}DUvC|O*^mHcrjQ5c`1Bl2U3A!YfW z;2?r;Ib2wHT&Ol04ms3$a`O}B0pMILm#0HQbwHlS46x41pWg=TNbmE^S#nI37VMg)k*8l|4&uA#^#R8t5=L#U&XC-RM;u!{*%YpWQGgbF? z;4>`EVvU2TOFO32I3fZNCi!MD@+B~t!!)uE@Mxd#zc&*WzOcBqj}Mnk{^d;%H$R@D zd~b-q-E)C1UHch!MG$Q20*kYZn#lTpP&eTj#9V(b#FsqN`p^I(Cv*uo>$PuQ z^L4yR1J?x1JDO#}D3B*}nqv_H=^srv?$x7HP`=DMwZ^D8rFw=RsNQA9HkGyWzMCmE zDTZ97=z*94nCPXKeZX3`c_f(qk=tsvn#g&YEcpz`&2Xt*D&;Nz$Chyl0#M*gSzo&o z06>F5nt2ZiJ>Nr54P6b+3Xp#58mWVu<6%FlvzJtU0%(frMmrF8Ieijoo8Y=8X`rcsB1@8Lc_Hcn~ zO8ZmoY=LHlJx!{kQuG6`DGlGUbMZZa!@2SRHXBel`Gocjf=}iC!Ybg9S6IHKx$l5& ziaFoS_n%K6(^j202iJZ*9k7}h{IY#8UrKP__G)RBljm-SVm=^Bujkd2>xUu=>L%hf!V3++>(ZZHEEN4y650YeFQr%!vbX3{R@f z8UMmeREt+1k@V0T#3ERCW^?@tvEdycwthoux8jJFA3#b*e{}F`tK) zSI;k#EVzASs}#OjCz&uzeX)|NGJg~~=wLu|UTWg$k9ju(fq)U}1khP{-g8XQaT`Mc zBdxcXtF(ImKIl7uo{sZJuE%RJYXu!vP`2AWSz#A10F&?_PK|!-Wo$vX>eG^v5s6`@ zEw!V#`TcnNTRI#Za;nX?g4|1x-GP(J=c;#`noRQ@T&YbG1}gn$=+n#HyQM+B?L$&E znKWm1VrY5`s078y9yj4u^9}Hh4lH^O0k`*6=}<*|om~qth&Xe##rE`xpS@WCUf@d= z!#d}ErJTNmTmY7)S|A~88Wsh@vvmv4Rv~jHMx2uh%g|Rj!ipvH^6HDjoSU zWr1_rPFVUzx=W~;|n|2&~wLlh3h_!bd2MZ^lZc$KCD#d#8$*EFsuefc#lfZ^> z58!tJZgt-XJ+O06SuTYFh=$PIWefZEc-|*~W3oz!1ck&Idaajqn&rTC{1b3HGCTnP zl*w)I1X#Daotc|}B*-Vg?I03-^0Er}0%sC~=f%is3&a$hFPk3kSvIeZ&~M)E0hxR) z!{xXqoyYTfu9wWUTjV?*&h{jk03CdAAF$ZaV5i}z6S*u@Sz{J}K*GjTW>)v4fn>cA zDPn-)*GRsNMEw93Q6<(hUc5vFp*6r3M;ZKd{*>dlIsfHE(|tQ<0g)zU8CTEqIR8sW z4V!HT02!?nzi zKC{aTqA(v#8%5O^w5=WYL#lT93^(f^f)@$u!zlmWP-=h$VC^LLIE*ZOKz}>tISwK^ zG7U3*&a_le{!OZKVWz_;-|R zy8|uXCOMVs+kF6kqQQN&r10=LdR^zM&vnECga?v~oN6oQF*x<1Qod2n>I%wPm7maZ zCND50eUFQ`zJzTUw;VTSIDVN9t~w>?h!WzjH8ewc&fYG>sV?=Qid{|J0M-RaUYs&B z>C#{JRCwT-w{2?{P@~A|4+8Q~BU=rTGCZ>qBs!8_T-$oGe4pq&4wc{z}U{CvJWvn1uNHbu%xJ z$#LSEncbQuXa~UT=%ZAr1a8`ZAv%*8&ct-BpY1SYH*jZRWZqjsdAOdSCs1uoK1%Oc)9tQ;g7JE?rSCL2xRK{-EnlW zrNx2l^@V1yj(TBEL5TtMc00nTD`J)dmDc7~6OE+G_R~S9wVJNw=Bv%RkCLi@OKDjO z?3mQ*!O!>fEn908NZL)!Z^szjA`@9Yc;K$$62~Dp$iCsp!F-C5{~g)Adu#+2@zS6c zzu*qRP`XlDpO}95%1yz|4G_II*%`}r%_HnVjf2Yt&t`trcKsKNg%a7~{RMayCx^G| z=~;%OiBme01w{N0a4B;16}KA0MF!${B!_APgLe!^Y% zbK~J%%$83Vvo=if1=N zq>D0fB`Rzmk8(G1AT42G`Gl$#Z0h+FbSJ=hq}-f-dMSWB*C5hb?a=*JsWT2(LrISU zt~BhK=fhmr_xLkWI6_kg=6-(b$~YSe-1Tn_7YY4n>SrL}eRri_2%5vy{i%4I9&uFh zmIRLT8c2wqTWOQMc?#tTfX$o-jU`htQNp#XYjT3f-qU>W#zWA38S5I!=KtRGnGq~H zog0Pg;t^aYVw#*sCx|W@vlt^>B48KVp%6C6P@UEGXfJ7{-8+o}Tw$=n$7Nu)JEiBd z;0@QuHL2caM>1E}wj}*9B=lHMz-AcGRMW}dnf`%GhBf%(P||(9Y(9lx&@f=pybWI0 zF(mvMcX22t5aMA5IJbO3>Hk6OcaQ*|fUXFJDP+T7W!;9%K)T>PT^TdI#%@QQ34i0` zeP&p1w$Y_-;iX+w9bcKL#J+O@?sc7t_vkkp@vk3dd9Qz5LK~Spz|Pg;?&CU)`P|e6 za?pDPr{*cs=Cv4#a@l*vS@vMFeo&_(B((9?pwM z(J!*}0qBnV-~&zCvMK3P^ei&7wHe)&?oNUH854)Sf+sJWKb17-L&(f_54=PvsN|Gd zZU@+T9D(dPZT+)loN4oKsq?@OHk_}0ycBvkjv{qLG{IvF#DtFOgn;S%dIU#6e}+DU zTQEaCujL(V5zbz33#{eN(E+MXvARB-G@@Oeu*VnSU6HI}*eJxVT0X?j^r$Ptv8P+l zQyd2Eo)nEc=`e=^HOQ7PB95tQo}IO}3^qu$x)LNUm%n{kbM(m|VDJ~#F z8@AIUCY0$AjpT>t^xkQNVWl>5_g?v{B4lf4y$nSthcb6oolR7&-5D+9ud9WYC0_uZ zGfn~{fvSujusf1+r#4Yoc3<7L?h;s)e_YkUrLk=tHabFSeM|MU6DZ&TZV}z`cTXTE z)we~|41wfze1Pp0QxmMQn$qZG7}KYnt3iwNfn@8Avpnn0*czr-H*8ZZIig1mZjCTB z*;vbbV-5y`bzl-BRD=~c6$}|+)_`BU=wnl+#a%}iLo9+2HIs_T5XZ*aB0$KNGRTsc z(G*cM^b+AaiqqFzE6|NU0B{q;8%hj%|3tsHw~Bsp3c84oEgY-JOhupxkO6|*%0ft; z8X|IwdWaI%z2{hdTaam++9>Nwg-P?k&MQ%cTGqX-A4+H<(iM&JIU7~L?@O2#Ar~BT zDqo5A@sYU$YNqE5)dvJh!L0F2M2iU6g;6%S#i^=RizJ{y(DS>A#H9!KDVe8t_MI6r>8k%qd_?9ec%$1oCZX5L1|C0<@j>| z6F{ne=4qw~Qcy+|`Vu$BPmh+$@&hz zLbpC{>y7dnruh}el_e&;2d$5ft5rMbo8jLm^KxQssh7&I1W9i1fuQKy2%K2h@RVu= z+Gi=nnTFRH%>X@>n3f9H*-%zC3==aB{ZcI!!(DVi4#5qPfW9&cgB|=PKJ9kml3Ig5 z-c>|S@|(elEU$6vx2s1s#9qi*<_zde#X}4FWU(k{rupoJ{llCX{C=46WPen zXsh=*BVX!ySio6!4NuSJuAI>ay8QSeCPh)ikD{JN(-(MU0Xfg(38P$+Y+0}i**3+~ zX~S{;7jthN6;<^2`x_|THMDehw}OC#lt`z5fYLeCpfG^KAR*l?DBayHFn~x445{SM zF~mLNbHC4XU3cBTf4{X{{vobm?=$D@v-keI-mg7jy#Vbl;0Zhj&yU94r5wiEq>mQT z7r@Xs#AkV&_ugep0IMHZ7cwmPuT=s%frlZ)NLpZxYjC$b{+A+cv>gi%FwZg&i^pdU zvBOg&sSL1xTtSR|5Pz6~6^XAt>`jfU0jjwxSWdSM?KfJy(qsmedh>zwZakF1)YH)} zl>6*^T9@|(4MYAGO199%nUX2798bU%R(0jnFjE!hpBT=ifSqLA2iDA*0($hj;^Jj8 zTqCc(Ij@LGACwK3DVGIKiwb*ukzq(E>8txci#&bPGZugFYYr2+0xF!I@_mog-;+2n zG46Kw06z<*u-?XlRR#xJTX8KYa`4uykhEuMY41JPjxiqw8M-MLL6g@>iX5vmW zbCrwC+xzz4d8$2!_BBw4BZ~9n!$YYN4j+LF3=LFdPbcof_C*dWg$7#{C9g!ie_zj} zsofQ**gGK$Knm~c$6;79**eOx1<}SUoTB{1aE6Daz-Z^!Vush#RlEEv81b(15AJ#z ziQ8yib!;&#;lCEX?-AzU>njA95S1*mj_{UU>5!k-B$iq9-3B37_D-9a~&eb%JB*;{~Jti z3r&fGySXu%?QlO`X;BGXLwp&Tz6bMpXXuv?I)xbBNYPbi`EF1Jz1WdrW*WC($;u8O z9Swc*k>q*cMPzdw1cEpAn1SvEwV>heq%H>D8L%xlD8ID2wgF{3<0se`O-ECIj7rY7 zSK(}-jWl3+!|_IbDd2?J1>8YXhtL+dyiM-$-F1{_{??0)H)7cNj$_jBpw&)93fZEJ z9d;+jiK4I9&B{7-d_&_v&* zx&N^9Sh@Wk-|&o zbkX0X|HBKYf}jX*-$ScF2R!mo%)ErV6xzDo8d!~;&saTi%c(Qgj#N3%Oxs%WPEnc4 zcpr6HqO5lPDX6@BC{&Bup@l1?rpv-FKr_9d5>< z*>1P~^SzQqYpJ`Z`MzRGj7WbaMgUc6zq6l^I|IB z0>6DJDsb1T{`_lN)5Sc`%1yFZZxqdm-VT-}qHD>2nQ6LlQ>@IEE#0wHR`dI?O?$Av z$C!ij)bYX%H&|~O}7C516GiF~r!;q?s%=QOH!NX|0pqk)Z zfr>q!mD-xxN*0z-d1F{g5>$Ya%+Nx+P&*NOT>+tVHNM$}qaBkybc^LX$X;WQiX)p? z{qk*1Dv$s`I;zu|1bd3O&v2*TCwj$iv4kYmcwVUt+De#R3XNCo)5w5EjuRd|XUtn# z*DIrdoWmo_!P5#lPZGhh0?3B&D{TRJ3>YH%GpsH^t$j(nN4#$$^E1O~KSzM-ioaPG zyl%-N^wWMVtLM}b5Gx2ljduO!z*Q_(iKg#lILYUyI{W%x;~~@EiD!gMk-#CDnvEH8 z7#rV|YqIF7!CCv=TX&Om?<|u?);m1Sh%)CvQE?ynD7sz9=+(%uXSU0{E$g&fuX)O+ z^U{zPH<06R&TAus-OE$tVv`|LPc05|S!v)(4Yf2%`MP}(2aG%+@)))WnjNrV>o6>w zhoyUXTkpu%`#f=9n;xP}QbL=N>LcD798idW^_;OW)XD8%Y?aFeLZI2$R& z)ZZd>{Z?7O9b3>24T#IL=yq*({#V7_=4K;dcSZP{)%-ZA21;Fey3ps^64>Ptx-r+W z(L8~yNgOpHn1lpjN+%|^ycl6kMB&cC3Y+y~bKXI!!tm#TT-*QyG2zCagJ@iTkE_50^RonBvIVsd zBlS;>C~#hfBtyGXSGKGb$y9dFvI>fPjdMR8)L-NzUBJlOgN$ZySC3#cIdJ}Z+HH^$ z*ciEJ2JEItdaR6TTc>~5Nj&^peQ^iM8gb!;z#U`Crr0|gdANOMfl>XdT>|;&>|{H- zk*Zz8wc1XT@P^Hn?LF=Fl7q3AGvw?I+>vg?XRx1gz%nTay5be)f1Iv7@F*q-8VzKmMB zVF?A_Y3EpS7RousDB5~V*&Lv97H%v4CpM{f zvF^Yj9{x`!(RR8zqo-F$@zCR-}DSX zbGu$1;FE`0x2{BRGnuTBRu%a)qbsj8koBO2knpmty;+~x<^AFps3sks^gT%0xmRgc zG$$8X&)S=H?TWyHNMs~mjvOi26{O0gdBCx$YSRCv1s6?z0lV?`Q9C*U0WR7NFQ*uM zu?g>#=36O|tb^|NLa`68z=8ATH0n}0FTPY%s)8a$bhR2mtYTDI7K9tC$X@xrC3BN) zj{LT}@1KAul!dL8Q7l$`gk{1((AW#*YP$X+S4b9RyJAg2@f1nOtc0P&s|G%h0Vo?3 ze-&D8CJ~sr^tvd|Qg}RHwXN|b51o~-tp90E)+{TjX&`yFI! zCOSWgZ=LEVToLRWCfOIp$IdVBC@|Q(3is88O@iJ0QdCS)M7bn=<$51zc@2ylE z(7Kq1gm-d;YVt1wJteY=B!%9r$_PHzo{}I8vj`Jb6d*la;Z}~n2G*)i!whmdjO6mW zS3|8-*j+xUrj~~suMx4Ny5*N@2Hn-VHqX&VowX|9;s zwV%-G>ySeG)-C4e8z0uRd9*X(;k`rjd31|*VKJ`_#k*(fEQM%0UL(caS{tKzXrtSh zbQFF26FT2LfHDi+;_di;nQ`2u?T+cyxi4KYav3bh$&3$b`D{KsY-e-*vf&o=?!6u8 ze$R3@B9bh!cuz$OVl9Eo;^oslVHB0fNa`$AbS*DV(W-?Mhf zet%3jyxDcK+Jqpk_E>L4M8|Jz=KZaX6^Q}wSEjOeE=5R0enK)~ef}x7z**>0lj_Ie z7f^v& z%mt$phd0M(4#u_|1$r1%nc<@vR-R<*<<_^^++~<9jhJ5FC%UxXDOiMBk@XLIi669+ zxC2Gw&Ebs^(LZqq1Beu1#nAE2IJP5el4pCeK*%poiqv1|N3%LohVVE*Nwxx2l0>H{R(Nber{*3d z9>>fVVm!vtpBT}+{Up>w0WZ86Ui4W<%f24`Tkm@>g|=+|juW_>K9Z-fO_O&$ppE_H zYi|}*M$nfk$HXeHmtV{wD{FM>05)C~B}7V5XT_59?tFzC$C3Y34l7ADabdE7Lp=Vak&J~gu?&%&UpiCCZ9rp{mW>s`^@VxiUinUbG0%8lK}|v zJf3lqXe+7&!bPk-%%MXSzv_To;Dc{S%GeyDD~~S{p>#s2@h-IUf(v$FBe2$GgdhsH zwVy{Tabdb`dG;CiF*i(z2@!mdAVS4qj=QHN3|<-Pk@g1cWt&jwg~P#^z{OiVc^ct# zu-sM7!1bz;-BR|GkT)Rdo0BNNpz{l)=4wFK8Jh$62CRZ~t{IcQHT+w~eQd@!#_?k= z?zd0{L6%#F#~=bBJa^3Z_(5B9Pu!f%-qDZN+-XOv>i zAw*;RV{=0op$S{xiOtd)c?-o3iRnghv3(e|jNf5D=-z%eF(esk;;}l!*YDj`gBAHi zyTftH5wiLRvCxoMbG+ifah$@GkGD@fL>}mZBb(_L&vjgnFbg4xP)I9$;n|>3MoQP> zVAKy%dZHmT)73v6x@3wgvHYhiI})dWOGOs@e)U~`F#Tav2YAtU^FgosZ3a++?UZV< zic-0|2+9lvX@g_26j!M)uJyV$RitEIgBU@ecv3qk5@u{jcXkNe%b-!dL-9qL?W3Oh zhGe^Uc0`0k^yEXDQeOtt?DPIEAKt&4C3o8&EMfwN&8MLh)j%ay4jfB)^aRWHOBO59 zW7Q&j#o(Evrb+QT1@9RvBgyw(yaMz^m6RsRmHk#CvIjk+z4Y|7;XbTc;RX5kbZIp< zS9PTyd1MH9o_{OX{rL91?yqK_eIBOE8!~2OVbH^%UaW@>)OqnDkOm7~Bayh|eA%#L z5rwC|i#+8b>Wg2z4(MVF_tXH*?pOt|P5EM9OP~EO0ALHmy9zWOB}DzL#6R}M8aso=)=Z4fD|{eDSl>Lpzh10*;eMbZ z!m?oaUuEQ&Pw%8=zO3f!0iQ33k=z{|J4hsTE948zka1W$9ua#+*y&X&NOe&ff13C& zpg=GBu^`=!ZHeB#U-7^GTzdgDn<4^N3u%<~%>A#L&;R~gyB!_VvY1w{CI3(I@9%u- zp&~Fugfvb{>HNJW|M^*w1yCxQEZ;7e`)}kO&96QVVFdcuY5FFk=x+!A6Cs-92R59x zrcZR8{+prsTTAT(>bw({e6^bYhxHct!4@Sj??2i0E3W&ejboNT8yy6ajM=&VhZY3> z$4Mlx*1z;>)ie30jX-a0N<@;?OG*Fj(xnp6rRyg1uYVV{|Bc;>fPgkq4;M^*{I^Tj z=&uZ7#{X0Qe^^$ajnsHR8}T=Ex1avoB?vlt*It(NhWnp3;)elkeCSc^_5Y8{Za-%^ zs>g1;k5$sJ5`oT3lmVoEncHgLUQRf@J75X*YLzB@DUsR%NS!u-+8C+$>tT>M7-=bP zH=6z%I``jSz?kLV2j`e$0H|n&0vv^IkGa!Vr+a8k3cBJ3kY5&>I%vupmO#riE)OjC z>N`5D_klF=DtZU$H}tmM3)mm>0J0fa5HugQH?G3)Tw@7{VblYe6HEtS`c3Hr6z$my zqvvKzO~ieM1^Y3Y^^I)F0iiLe&v)MXr>sB4{`1Yhf2e_er>(eDbUrMzXonCK?8d*) zbOTh|1O;R8IhJ>ntV7zxBSM?CE%mdnY0zlJat$CvE^X->0=m96SLZ~Ne+xh^N7UT6 zSDW&`0H<*Ba|>AI1TeDD?f}$<->3^j#A$AFME>@w^AwN={~F1ZZC0jsLO(Bm|0LG~ z*96haBX9&7FFv|Pj zP5`9os2yr@B<|6;v{cRmh4_#V3*U9}GeG`9o)r7(E6)2UvAF>!fE=#%zSfl9t;x=@ zoGLNe+o^_^4M`?`8@o>+(ZLIpr=NIFn|ef{0K(*Zf|t?JxbU9^0FIy!xOGUD+AgUc z8Zt=v=>j}n3e)?*pzm#$>H8X)ZCQ~H&f9MGhlWDEm!G!U_ccN-I;^SXBEWe4Qka z!2W)TTReo-;Z+^BJ1+wix$61;>!_hG$1&1%l!kXZ-7tERb+SQ@-U#Dg=v}e92L3l z;hM=&qm&=+PlyjF19SlWpU{nqdV`>xC6UQgyxQ0)y?Jh^h|9R z4)^5DYnmxQ#aSW_btlOr#ZDoH%d&hI35 zPBeOKK4O90ko{kD3l!n#y5J= zWnS2Cza|3-(=hm`v*|f*cMYkg%yk7n`y^xnLlUD`CX0%k-vIv7_eX*1*w>fevZIyS zY6=qj4@T0&;Rdjy?&<)Sm**WoC7i8=^LIM@;_1hwX`4P6)gC}EeNfZ4rh#h~WHCr2 z49#oWAVMiM3uuk}c$EME9tjbKW8z3WRF~{Mn+pH}DeJ>cYb`r1KVIu4;g{EJW_uh5jy{2tGm2W1~WoGD5 zY{)Jc)L`C5w0&J3bp1Uiz}U!>%TAB`vd7fx=gZ2RduZl=Ea~GFU-M4YW1TMyy;a}& z(q7v9!O{dx0U3`hUhn4JI^5A{KdVPMrWz?iy*XHa!30koZjJ@0x?5QdNvfbqhTyY@ z_|#-jozQ638X)$Wh$F@iXdn^4I$cgpZXazu9S9G_a6&7%0fAtVTl$j4Ixup=XBjqx zo#sU90X`4&U`2lvO}K;+&{5O+pbg#Y9(<-+3f8yP{XdH;zHs*tr_2VCmz))p@H*`SACIi4c zu>fBwx%Uf6E6TCL27`zL%6?08n!549MR5Y2jQvV#fDCXVNR(rc_`wiX6?jmN&1-a; zW-X}yVP357#;pW3pVuWsg_Ck4W|f2YsX$RElTjHJ8F3`8I5KgwDSwM?_wsbfG^oX- zVwsvKI3hMYTcpbx(3OAeF7kkI*yi_VhpqzjeoCOeTB(S#7#wtwN4x6PcOFTa$((ur zGJV!QV$y9r!GHkv>=NJdccIfUCuUwl)0}siB%W`12W>guro(jC#KrfbBj_z!;4G3X zFc!LxsIgX;&Ej~lu%$rd8ikTONH=1>GtKA-?tU*16|kR&d*IPP8Lu1Ji4RxJRHQOF z7a6@ZVjdwF_E{FKz;2CpjqJIFxXX+0saDRmv-xor&4wbkBVYBql~8dzGHc!5*tBEg zlLovcqOQ!{ySp{)5vwQWXfLxm=PHX;jN2YkI-Jj%4tn%%yU+K~NljXaX*$j&LAF_> zql+hs>_`K8w0=pV21iBtVD|AxJt@GN?xT)I?x>8wdp!nxl4{~H#fLsMiC-kfEK*Ar z(`-5iX2fNF(H&Lnr$8GPaLfbFyC356p1LtW$&3;+S&pi98{v(TVG&uJ*Xn+ILGS(d z7Ba3!B)6TU%6}XC?2tJ6pLMGLN;mS5e=PY?L7pwj_j?rGMZQVX^l6y~8(9f$9}WZ8 z_Q0TeFd=qVWN%RAzTr$zC|&0ht4(AIrk{Mr|YfIV1*0sGd# z2AH6UmYm-QUqZu8*5}*uAtK4z6b!W7k-L+0(Zuw07U0I!V&so12G&@o4N*&zEEp!v zIszaO*6bcl(iOHqiIpKZaO+@QTLqO1ojhgWg?%(M14#t)mY9O%0ow3pFWn1I%%XvU zgIb4yLY@w_YyR$wMW$KM&_>EJFk{QnD64eQj$=2kY(1UhKDfY-mI-sW^g8&?>rL0| zvIIK|+oq);!)Z|mUI}2=Xg2~kW}(g$$Wt9J#n~PcJy`J5j0SlmOX&WSQ@3`+NWLID z55R%Mm;g)jkdaB6L=JVU9CPsW3SO4GAWqWu?M9|eO;Hgl!;?c22o%2^^&j|j20seM z<#Sb`b$fc+qS{@(N5j1?#qc7Rqr0o74w%Y1=3$-TG8%#Z>ME2Ci)|X6l5!uQuC}h+zAR^ku5twwCSguKH!Pc^*bNk15{1Sodq+00~Tk zo6!`H_XG7zf9`Ja7SPvUi%I|tqdY2jmE9S_iAO|D)V2S*Znd=+^aPoUDk(TmIx(FMk}3CKF(?$PBa(P7%Fuw%cP@A~{xoC(!>CH_(s0+HXvs(hy(uJk};oIUd78P^q}0MNlLl$FcoqY_x0y zIlm{cHS56Eb3Fyi;Lu&P>&W>;z+wXg6=@{7ZU20DxI3px{<$;qC-pA4`y0q*3&ch==^%<-I zZuCBkbIr(7XZUH4Hm*<6JDPx_Ho_VBO#-zNbNGLSjyzg_U(7;}**>VP|jq{H{ z(I}A@EK$~_j1AF>>CJ5VzaJsB>Ys^=(NOT&y|$e<(iKCU^C?F?9;p9(?bcO2BQ4OKY<_TY# z)hFB<>ENh27v2FIURjlPqt2rYKA?wZ{pr@}uF-py{q#=^_W;i|X;Mj_ z_a04i;lkO%QCm*e-4^jc>qR(ppL#(+&V2YOb3Qp({j1hvgPeD7-!XmY49*Q>>i@tp zIuhLd`{fBR9TS`TR6Z5SUkFYq)oB!gIJn0HJJ-5yqha|UorFc4Xw+?|7 z%)@E6ueD)`G7A?0`K~k!UXwbKVK@mvz$~%4H2%sQ@G2Vh;P9)*n+fYZdp3&}AdU(K z#DgzTLwv$RsOlaQ+zh&b@S!t`alZ(+9q#mF)hCjIwhwyW-!Qg~sdTek-TT5H!$|`c zDtRz->zap$>ayo#V}P9Jlq&NTQ{rHj);r)Q48U|9pX$_jJn^-3W)aqtNZ;0M7gi|I zu;)}hhy+kE<0<=&sy_t>cw1b!#K?IQz`*-L*ZYPtAr9K?6}BPp^% zX3u(05%&k;{&Qc76^aA{FwGE*^TEz)N)F;(?0gY>=_K3nwkpYhwC{zr@aZ*8N#M=GlzbtV$e}rgITm1+B!MY#XF4G%Dwv@IPRY{p15!K_|GHxC&0gU_)Z8L-e)rf}_opte`t6R|m05($f&N8Q9`wL_ z5Oif#*_Q6eAP)%`(;|d3oYgV-QHeSR7Q->{Nx@tiB{xtq5Bk1$*Tcmd;>ojuI_iLg z`&(~dpX{c^i%&VR-y05iCJa?>pDmi*JQe_t+8c^gq6W?$9+uYnbMF~duGW^|p^N%! zFrjdvr?^u5l^&y#>>u9q3#7$;)=CrMdr998yImN$@X)uJyD+wuVNP+a+~B3~95JbL z6aI2dlET+gx!#pgopy8Xq~_gf%*~+)!P0A>FK@g2y2_u0bYm-{uS_yu5ahsVo9>(# zf8_yi4R++u<@#DAx(|Um&)=}gjK*6Q26@yBs#G_iqj4!0130Up3K3qqmq-!%FjF7mv!RXqHj#WN><2y1 zpqN3>O5~FW5{b#M*id#1TR2pJcL}LD-g~6{YXyqx-FN9ZLC0~b26Yw$G$kyI(*=j@ zU$!6g(NsIc>1#XgX0AgHFe8^URSt5?<7*G|OmpL(VxNihqj<2z^E>b4(ISBTmfia5 z@T3qOvFc>^<(c0gA;nXR%p*QwOQp0;$Nf%VkwX0kCi1mAm#B zuPO0;jOf(eFN+*aE#+X!R{}`$Rmn9v`>@XpiO+o9&gi_5O9*e7`Nq-H(H#=qQ^eaa zEC*tkL%YsU5dS5P@WWIp=%g=juF`wVP#G{=7I@`j_Iq5hiVax?f)UnI>`mooRa9>2 z_cabAK4TujgKmVMS@*ljtwmYRjA8zO`ryLp15d%;2N)!;URx!vgW2o_gdvDa>yk&u zCpA8gqUc0|Sq6cfmH5^#jqC^BKU{Zk!-|!NE9h1S+BZNvlOrv!*Bs!4OYS!TaE_g; z)$6JnbGmy`-(EOg^Ci%vjS*oGs*BnTi8DxVD5#R)C8(gBe$&gleI95xaFMyWC3IPv z^Lc*Ca=u<;^3@;d@n7lxK2-D9DraVBg02LA5K8+qAwww3fxIq6N{;77Mb3Gm2N^kL zj$Hc*?2hwtsT(s3DxV+WldBV|2>Z=$ER`qrC2}~l<-D2~+Q<%GEri78E0Z( zZ&?C<+eB>nzE|E-CicV6W_F)cK8gYVGJSAnefXC-DnnfR!>N-bR=omj2gBf4_m(ixa}{fuef7 z$xq+w*A!jCxE*ezTaVr)P`}WGM&)AGV^P|t zO}N>ARCaTLM(Ym2c6g2t;zrx*(=`9XJfx}W{N7K5A?SvfUtJ=HyJ9wz6826*w`3d9 zJk(CJf&&IS$3IcfiXH1%@sEp1(jXzQJTLOlopoHQhf+1(Q~Qv$v?&k}b(s@Kn|WX9 zbHEdpHHySRTWOJ&xVCbTYtM%@NdnnXx$%2T`vI8Q9NjXZQ1lx3}jIRvEngaGDngx1~bU!hP+_J~xS@z`p7oTdFH$`lHJz%7@W7aKS!ZG0tOQ zVu;0fyTDUE8UE{A;A9l)J++0t$i6j3EM-E;G}ZeIw1im+j`wAc2ZMw`B}2#8Pt)R3 z(G`%)v-`{se8enBpy|lFU-?pm*lG!szcS3{zkcv`!f}0$>DYEKSn{+F8M*UaJZor9 z^!073%%%BuV}IpTj-ADJBYaTo&&M?3Y1NEx`v(Rlt3UNekB<(1XDMn48GNNuasBmq za*yvY!jGJ*mOO#?^q}D|QOn|-5kHE&g$+cD_2LWtBltUYSG)*ZFP7)Y30!>pKIHo3Fr@mBr(*@!y~V zdrGc+U=9%8=C-;L(ywCrs)K1;vgRhOp!gtK4Mdo#BCNzKwVr_Kv^AsRv}@jM_(Okp zH(cq+@e-;-Wp^hs>*`l{Sw&gkY28NgLc(G=p<1a;drlOC+(06)`Ja1dhhV)r**_1r zD-c7o;e{|d2Mf(2`trCl&S`@wJgcvNw8vM}Gx%L@o(b4NVZ?O;{LIs*k!GLT8nbO| zUKDbuWa%UpQQjiDk(PQ9vGQb^5IqC}h$W-sV6Ni&GJ0IJjVs{iu>w}{a;zhd$xait zRfylI^7(35{d(nScT^O*8xDWdkOds4q8>Ma=m)PidRwy^tVer%#-j6c?Gihh9X;{u z7V{|m9Hld3K2gV=o*qej=-9P6L@tR}7ol9V>O{WnXL`5bn&p9v41YBbG4<;7$HU}Z zBSh2H#k+8=F`~LZ^Nr@wNt>b~72Vgfb*=leLpD+y_l0RUr3Lsn+Shm=(QlBCf(Fl+ z+VLU#DW(|KtiuM_vhJ$ea=q}B;sq4jZ2PLCi-SBe-&@q3oZsYej@ z>>8nUc#3$p-IA}tsW36YjoxI?<>%;St4`}Uhj(45Os%vWw*)QOgi?B7!_j?>o|t0Z~!JF9EmmvKb-@#aDJ9;r&f!#-DbRUxx{+>R5u9ox@@ut!(BuyVOG2C>Ev53tl}ajuM3<$2H<7Ytb(CxGJl6>d`~mZA5zF;? zQWHaCU9?K)>YD25Fd5&9@+;luh5O=1&~ zZX8{1<@z(qjPt#gdgB+eGN1MzkU5lbt4h#w1zb>b)fkm*W6LtA;(%v@Ams`VK0&?W zr!~yST?^X-T3is4)lT67zL3v_3nze(>Lt}wO{eU)0c5UIrX36~>-u}t6Ebk78+4xp zMwJDbZxy_fgI)x?t;?8<(k#+gsB^u@{2ZRDl=19*bBU2w?NV;TKSTb|{`{s7jN6V| z1V6sk=vdYT9xgmW&%#A-TXXE3Lan{smHHIUp6f-Zli$*;3d`d{J4`~n`!8v4WfJol z>b~rzqH!34GZqoOXLp9hj`Rpdel{@S76#eFP5oj{WcR-6Nr049zx__w)qYl#C3CrS zx?x61B`I91q|h6AVj71>UiQGCJ(5QJrj5(u;^5%v+$*qOvhQ#dd7+c%I&S$>kPwAA zGeVkPjaZX8^-sTOiAlozF=lV)KBbyxAywPgDvOj+C6Y>RYS`LqA^k3*4`7okLzD+G zSH*#Q3!zF)byP{Xm!fszesexNGQ1yp5>ac0r%ZnG0RBE2!y5E@hyh3Cx6TJH=H3SS zOcw(&okHPIlX3aItj*c%c!cPafobbUOtjCV3G#13KeNO$iLl|lRlq*Mesguz8-U*p z-0$WA{RbJ*I^lvVS;g8PdpUXxttuYEo4FAO0$&8=L-z9&%FAncu6&b3@!K6NNK{xC4{4`-s7g&t8 zU9p|u4A-c{3W6M>m#hfc51R*I2JIFFR@(h!f4UZpb2XmEBc}J&Us;cXvsqANU#;Yk zkkE(TVA!sMq!apfEyk6Bi&@>w&1=35Fu>k=MCqp0B@CO-i%q09`Kz7JE|&QR6TBMt(yNB zL$EPi3mH545Jx0Pt|Lw_EHH&^CFbRT+@hw$ecH&aCU&l4xlm=EEcG*ienpRqfJWj0 zS*-5fl}+16L*?U`Mk}`Gi<{LMEUs>F;y(L@XUo;tY2bNq(B5-iPg+d^Qa@MRYYpgG z@=bXA7lhYxtZkIQYi4V+R2D=tg9~jCV%Q^~KK$(o_}5r-ywjrxe4}!-0|(w$KQV=_ zTFt*@0qz2$y(jiX0=n)%}_c%x+@S^Sv*TblL4{8-V) zpm76MLdYYxV$?IP%AZ${dhHHpott-aC6BH#MKsRwP~T5Ty_feq<;C7iaxC$jscBNY zWrNb5-yO8;MYu<8G%{-#Z-j@MrsdPpz?w-lB^ck(<%JPFr-(S##wBpw)ul@jsUzb3sV_si4owwnfHS}J;5B55m4N!sOMz~*j^muIc-KVE9X!2-n zGabtDJfv+TiHaM_t%Rn9k?`Gwivv=@GHgN>t0!&u3q`_r0wrkrrWVE@f*U;-2~F{DeG}!7v{Es?6<8pLuW|wM{7IuHDHV5wo8Dw z8SIb~(KEz?>1?~^iz|xmmc7RSaWW%_Is-?IF#?K(6b1=tQ0FA2s_sAmNBIchpM@+F z$n7EI8Ny)1&C&f{?Rzu=$nv^>GK8rJkR{o1m>2N<9NRcHaMYHtB`O!lmFH$E z%?N_k`3B42%H|$7$8}w^kii7RJ%QW`y-67dQ@ znf?ovkKXWSV3_~NNlx-3j%*%UwR0S}D0Ax0Q4=&uFgNv^>yjB_xt~rEQy+9vv#ab` zw)?WsGzTWZ9=UgM2ZO$T>}cQw*}9xp!IL_$83M=7!Y>onGvvr}7|Ay+D<9j&ZpE{D z}Zn0B`Jv!OXVRxykL`p@)Fft|g;7)SFoq-nzJYr%HmQ3+;U3Mv`P3DQiD zcpbUCW9BPO+-cK;qk|Oh@_VY&(APQft9RG@6uKi_t0VCE+q4wxR2PN!Wbfng-TeB! zf1N;N9k+PFb2MvBbT>0#8cA=Pdeelw?h71Zk!cRlvP5teYS9&un}+tOv#Rd z_++H%yTZnMH@l*g3fudthkYm?(rxz6954B1=8b71Oc9n0XOWwq{V|&r9EgT9D&U6 zG3_S^7T)f2Aft1Htv+< zN_0Q8Rd@_Kfq2J_I+*2@jjFxl6GN!a~{2O^-tp zq$Q2$if$ncQU~vMoe5;U6^~gSJ|hbgKtOYHx;8g~0y^^Ry17o3Lo{lR*EYXml)O*# zTm!x7zuv~6)6u=}{m|iD^U+oG&z0+=@3PN?$Z)PTmX#QXr;eJ>7kdY?Zv&#CVK_?` z2w`Ia1xL(n%+Phgp#@|vM@UQ#W(r&&%kEI5doy^yC%@PE;3_ne3-665MuY0tEC5WK zk$u$CHLcUh8$nauL_TduxqUnzBPu_f($kJ7Oh46}#BNfA55`iR>jDZ- zpM=cRgJ}@{w}J#`FJGsDJs1SFq{LAI+6{GMteKvC$_rg$?t`));be&-6I7Ypj`9mx z4$_pGAV%0d_FvHzX38%Qm~%OY@*~j%_!-UgNT5fj z$A)>qSTzwtHu6w)DGkDtWuV;sN2L(S5I+tac`cth$W?46KA~73#FJExAO|{&v%hlo zeu*fFg?T7ihaiOY^OJ73wr^CA2TlXKPXsMvdp<5HkzH%NMQBH_T4l8c$yDRr5o|~p ztTrF83qBf?dwbcz%V#PKXAfLMar4b4yTbN`Z%bYcpz{W{sU6jy|C|NC-Mo161ve_Q z;_g-+CNyi;F%W0_O>+$fbw1C2R#isVm$67VB(_xZCH>KOM83|@&~Zp?Opao>GM|F^ zNw$92tC6x0Y1#*OY-1QQ!5Wa~JF@f-l*4~ZuasS<&P?l`U_XSB<)+)|&Rs2lDM5qQ zPcw8hlr=sp=eGvDY!xE;y=26}1a)G?zV63T(st)IK1fM{*eB6a%&nsYRE<=n4C(>{ zl+R0uraC`3v9Su8&|J`SvoU8&^poGEu>m2C)5Gh4Rhi5JF9V0s#eN z9NCM9g!Ht4+Q1y`H+dzNQOm1bc0ksRS&j=gkW(f;ON5I@6amCyG`R{8qrc2WkKvaaph)%vf!j~n!t)+J{ zz8D0{+AJ{9UdY z0LeZeeJrW?@s$V_qt6{{D2r7WGYG%uGx5d@^o17X!h7Sf$7a@gzfT(OjsA8nL?#R^ z>cda6A)H4qx#rsgwqk!E!VG|CM{c*c1i*Yp(3tPW0`3X=a1DB*NAjV6)KwFEBNdA7 z1-UVbrJdh=#mV@P)h@L(EvlT*O0NJ0E63=9<(@&jIqRnT=1adC#O0WMwH8@|ml^=H z+6w7FW~b5B5wP_QAe2SZ7@$rzhEFkd`G)o+_Jfj)*mZ4OW?oS`w&m>U`VBEL#TpC} zS#%NU5xP*~*HSK=r~YybPSf7q-^c)qCsvHlBAJFx&=5?d5KG5V*CTX)fL_cGfy$-i9^L!B`z9u#u&4rt^52bcZZ zWS{0YIFq&+hW9TogL&W045!1jL2p2GJR3pI{q-BSRQdm0g-KuZCAi`(qoV{5Foqif zhuG(iwgH}(Qvh6hy?YLvbpIA$Y~A!W0D!nEmRaRB?-GDA132ohT%n^)cs(XVfWX^} z{1CJ@J;SY!YCLB|YAAny$}OS~Pzg`bwtT%Cz;sqJW_U5;0F>tPP z?y&!2N!1Y76d)F|zXfPhm+wjZ{04<$d-k(c1pp&D2pIeHXqwCA4B{RG0Adt;8nE_o z&jx?}k7LgYxM6zEf!=s~1LOrw-U;B^Y6}GHA(bS4COG%@+fdGA=1TK_4?B$jeEvs)%XkXqi_)fiO$de(cekZGa)xtp7uUhBf9mR%C+C zyBV&uO9Oi&p_{lVfEv&YaB9?^8oyER_nC8Gp9g5l+&w_S`wduO7QxKq0W!`BSh_}i zS_(KC5%bv4kVZgB@f7c00CO2D3&@3SO$NN(;`VZS>YG7%5l-f4KfOOs))v=)$bBVX zlw%DeUR^_TrpmG5cvP>@OfUM_`bu4|Gp8f(j8cz%*;21BC#AMuPKF_ugA^kd6NT-! zanEU_yCB9o)LNYY5Sb;WFCd52$i&a0`VbGK?JVBNG7BR=o6~l?IzL$7+N=uMTn3H- zt^}x+{=O(0J5qMdVdD{59wXeJ!69I3Gqfacz+cZmMj2uzx?5}2owX_Fu-M`|wO0zz zP~!(#x>+yr$XQC3N{p)QE!u9y03ntqxdNYUNxqxC;@^ia8wZW)cd`wy19FZ!b-qRx zAwxdk?Ca*`VBD>i&WUU*>-qC{KOBDqYSp~dkK$C}{PHxK4&oF^$Y4{x;2ANJdHe=u zjBr^3;ByX}2m!91&oiHmZ`S#PjGTRn40{Qk0fyhf1*^X*O1FQzZ)w%20m!1QHg6xJ zfct3)eL}!*YDzv+!?-aq8D09Remm#EI99Jzhe|#IMVp^54Wd#_DlDH=rbf+O04;QE zJ<+{S!Fd4)V)Le(4z=JWW{sbl2!J=mC3;xxGP?nUhd&+y2&Wk!bK{Ft=-L=bOVVyS z-G9?!tWM)PXgyW<9iF2b*`8s0R2bwKl_pj{l`*Z=T37MfNos~~lc1im92GJW;Nv;Y zpI*Uf5P>Z_!~%qe_j!=!y~jA0LLmV;!4{E8t>&eAO&;CmtzGjyYJ(l@oxUvxh=*k9iQ<%46OlA;iJa0-P~8r${KPS7x4Eeh7Fe z<0T=$Q4CZUchS7DB>?Z13vLa45K>*oGG{nUiUFu-b}4z){>k2=4jNA&(V^5YC}xT! zi;MEuF0H3VIJ?aPkJ)a+>L+15YlUT^vM7ve19RVAXTW#$TWa+L90WXzO7xHG057w# zNsWE%HGu9%yMxkyp!HMC%CPKb1);GgO=3;v4j|E@74(@Fdn0Jw=dpLC7*}dSx#6ac z^#6XZ#h_2J(y-A*4W@uZ>|^lhWh8_Qv9z8z@On2WCGzQk?W5>TfGcA0`fW+G1J`7+ z&Kok%#^?Ul@vVWk+9neIYN03MlCIfaivgQO3v1(NGYvr*H0#Cj#mTzqU)nA|j9f00 z)3_p-r{3(Il-&NQpZQSjJ^Y8(c5dh}49TOl2DoS|z0{wJ(;CDGJ~K_mu-cj|-jF0` znGNrIpY(RREW>3kSiAN8Ey z`OUp^-#hO=@63B=9A!oZ*4k^WXMLYff)KBuc?mpD)x+j3p4r*Af+x(D!;oc_>cr0m z&8m4?a~}Cu_q3)QMISJ2P)Myl58rwac8tBxbzOkkMZ)kriF~EMV(i)_6TNCx(Q-U1 z$hz)lC--(msSmcFbtH!O*A^V_cy#)PFy!D2)S!JF8rc^ljN37Li**a-NcDJ#xGzXm z4|lLO9bJy^w;$r2G|{PZNVCvg_hh%{S}7jKn(?Lp=I`_Sg6E$-K%*NqceU-TnZl3# zQ#6$c`c8zPz?YHZB?X6RPZ0=}+*up?Tvhu%sCc)Je`oyWeNs%9gh4+g5^WsI`6B-? zREY<(*;S;#7QChdnuUC^{P9sz&8lP^%i4a(4D{8;vgKLW$dEF%2O`~+Af5N0N+ zMl)JnHuaMDqu#N-R1Ctqi9DP+ShWMDGFfF+tI)J;0J`A_&e~bFqtN&S>W}e7i z^a$ur8O_lAkHA|I!^rvzbN6O^9#Md_;?yP!^uRQxpvJsZI}b?J)F5q8e! zqm-t(hnkjcQ^fj7D?d^R_nefk3Yv<29B>&a)Q*Pk95ZDz;zVgVk*ZbpGby030JG1XW0J~D7W$nd2WT^AlxTzq!E`w7nRajxD~PGYJE z|6z&|v;j^jH19n|p%?Tv5?7_`hoAv^GAn8ShJT?f{g%H>mGEm4pW7LN1*^*V;Zm6g zY)3oK?>~$-&rUyS>sA#Ia-m%1S%86RUL%-kP2wWE~4{v<0A~zp&<63j+a6r`AvV|1pwMs zRjs1rxTS0nLdmr)S(1LCi@X`TX7u<61lGb`C3>iB_{zos%3MaPn4S<2!kW z3PNcQB@6;dLDAJ3^Ss=n!yx#L3?d=A1iB)KRR_G2s_ZVyx+h4Gjwp^>JKKeSxM+SM z*iWW=vo8Rs!0g*(7GU}k=CVIrQknZ1C@%>yZJx{@eG`o=$M;#IpNkL%&;D)D5cEqU>K44B(_XF4^r->js_+U{!AkNTzA z-FC-sPG|bqs7(u83z|JAE(ga)zig2T^F{?M2z9-1CuDQTM%Zb++n7K*5!<5_f(6{57l1jvD_L69I{gxMXkODw}>l!lLc z>ie`g-3og`pINc-mlsfklF9B+M{aym_(r)rFKLj4D_tMZ@4{cV;J1s(kK5mPwPm|D zKnv?uFm!p#3hjtD2)0ZPrFz~(Cgcw3qtKX68?clh2SdS52WFZF57MPJq_;}K7yi+Y zNC)WxX89HvuO!-N+wi=k#x|WIb!P$>;v8ie+Jzcw zhg8cJ3Vr@&FW_Qh#YRGYk48d%uE8U19iky(lreD8aR=Mx61EA#dtbv*$xCS3v6>r(h30)iq9eb@$l_H%IhebQg5R5zJmdoJKW%BaGE`b1 zAHt!O`;>1Z_o=cgTM&gy&vChH-0EjhQPmLNBG)r4CuGqKcd7&<15g(zFSOR07NH$& z;EScBZo?)9W6gWxr%yR%y!Kf-gWE%FiZNhbx1nE85hA*y!+Q}0KIBo+?3Yb9kkIjV zP*aHN9x+pi!5OLQYT^C|A9xnq-c|aYJm!4iq3SG5*6>9e{)rKGKuF9m``{i_CeElu z2p0myx)u>_?H+RGfla{A*gWgLs0%!9(4gvqC6W?nGnnD=L_>sbqCuAq%pl8pdTu zfd-#!S+-;6?dfV+4qrDD9G(WcMHDxsvmEw{$iChNtIHQ+&KVKY_-JwNRA|`?0`agI zLQS)KTvS?Q{%*G%)c0cb1*80QtMsn zk&WHpVr&NPGrF&P6(Lv2yw?gMMpV-&WucK;%G6U|vg4s4+J)4dgM>P?cS;}c)%m46 zELkrxEb$v-3lI}B#@|^^yhN|V_kCqByyV!(QBLAYRJxq2Agr}7D1jEmKfnW}6(CQg zWf?k)CqnCwA<>5TpBg>7DSDHIW;UxTLS@whZR6r_DE>SPIM@+)gp&`LnFV$zB~aML z=tzAPiU#zYI-}XL?_w%njni;3YDAeb60oMT$l&)B^Z4tj%zp{(x#n^?tRHm@H_8|7 zZy0q@}z zGM>Of@i1agBuAU=KEBDYm<%i&64jODo4QsZoYk3*f-Uj+YG0SI<1i)?WYw8@5jYa zQb^IPL<(AVy>tv92;x#_ExGapt3XEqbEaki$0=LS`7c_i2*Xat0wXVmj> z&^DMZ+6aG5CpZR@^z=|XHOPqqNgcMqEzfb}QS-gsIJ@E|ZJg}p3h8P&uU!!cFQ&95 zP`F4>gij80CsPx^Vv!DVJF$t!sIH>=u|9Sc6k_}H(D;e9_2cwMpoAZwoqysayof;_ zGvaQsYwF0aoPF9w;=uAEMaW~YO*-|2?Q)ajZpp89vCrE$<{Bp~#k-7X=yqHlw!F?Z`&|RE7A+Od@oQsCiA^NCBvBYWc zYqiz+NJY(w=JXg#K#QH1^769UtRTl_{`GDVg99(vDxBjArJWtxr-gD$6BN?_RHBWlskymq z|4F)Q7yVZ%JgFTS*V)1Qe^M5d>AwSQ~R7`C0B*>AH zvu4TR2u%Zf`Yy5GEGdif_k)$?!{0;K6@s5$CIcL(0c~F3Lv{aBmBN^S2Er5HJ#`#k zyKL4azwr~RE~4U#9x*YI8dSbpc&&$S6NYtdj<*SCTBBe+;<8Pf$on8402=v0XU{m6 zQltsVk@D-^{+pCz29c)#Lrt{b>o7d5e>(M}Mi9e?^)dc!ahEp6=brcX5jN~T-bE-K z?Jd!509#dp$_DWeOjwAM6HM{at~Vy9fje2WUy^UDY{;rBn*Mve(ct~Mn?j+E4Ob#q z&V|_Xb^=<}fo{aB_lK?q0|rWOFCru+)tq=lsDw8JN|!S930WzLya;dhpy0pECv_F& zeV%#}!DXazh$VW5r5|V_NS59AJmw`lR;64o_u9L4)S3IWs--7?-zrRqjlw%YPL;S` zf2!3Qn=IpJiq^Hw5c*JB}dsGjOLwMF5Xq<5_&5J84~%U zQJ06%E`x{wDYUIrf#9{PlR4Wx_&cM$hEkTOC1vjHP|%CYs+2Xuh#PA_KK7w^94Q`X z_T-J2&lrY10DKJWnPf7Dtgs7!3CJ#yu?M=*B1_hRcsfS&&8%-)D&b>bl}&Y8VtJHW3Lvn-E;jj;~fDaSmdT>*V|WW2@)3Tx3r<(9`CK` zrzORlwlDb5CPNH4T|3_--KyOB2GJB3*gCC9g{wA;YSE?a<_wk=EZ zA-Yl-mAUpaGYst>T~qSc$l*t+`o33~LT%=P$(2;CyxBIY+s!^rC_BDP-`j)HN~}DZ zYgbSy_h+Jw(M)pX8<6{ntk?)6>mE$3n?04NokmRtvHvpc`s^d%($&DR!|{gb;rwzg z1^rr?*OtGkx1Vj_nJ&<0HB5*pHH_U%JTXs^Lbb{H)=#2qAY?9asZ-pI2eM}1uh_70^YLh2y=6P{nP zm7Dwvn)t;}xY*nfZrIOi`LPO;f1Z_Nt%5?cr+UA`9>tbL$*HkaO~*cj2ROG4vU?pb5i~{V@85t%(lWd zT>s6O#Uch-8Jp;uKV3_|=Ded&Fz21WZFj2w4VL~j{XJHYL6CsdJkm4&X7$byW^<3) z#Twn;2mbr;sSzY#BuR$NzmL&_5*%a0jJ?i(&|hG>7{&%qo<(P<63@Snu>lc>G`C$< zp#HB@vOor>RK6sUo$&8toI@O=t+V#n9}ec%f0z4$Q+nl)HZ1q=W3)vaVJg*syOP!odW%kKHq- zl>Q>ge_bWa_c`EtP``HqgV83~%#?$;78=r#f*h)W3TrJyH;xrR-4R80T7U=K0-T2B3MH!c7*h!b-LjOZqL#u*tt|6CRYM{rGoW};(k4tj-4Xa`!^3htPU=Zp)=VpzAd_lLezg8(5_~Hy_3sR znemva(=pqac9XZ9dF^32@mjDn#j`}*v7u~jr_p$*EvzU-?Fq|SYo+B_pH0>LJyD`Q z@!gtf0HePJ&B~LYwc0WJ3;SK{aZm-Z2|@*fUj1PZ?^a~3|HtVjKT87ykhYTNNx$1C zE=&A+%@X6zetye=SjIBIdgv&+im#3VzE*$ArZzv7^JIHw3Zz0>avhzAIDF5RAm+5P zN+5+~ZrOG9f&uzwg~Um3o_Lqt>ta&^%i6xSiau_)30MAyHhS`B-|%~{zG!@98hZ)M zwOfFR{Fo2u%QYk6475y8_A^BW`Ek?jbUt7UgjklEbjMBrA?fSfypqA$Hk?vGR*I!v zLRv-EMshz|mvqv6F91covt$5I-Yu$Ne6A08mk`)~)?}TFqZ_b%TDJz{#h6Q}{@MT3Rx%DOI^gx*i+#4~*F5_A=!p8V*b3@<~} zCwFnp&jNouj$+_AfH7@A{8TYPxkM=O1Rp#x%Cj(Z5DNle+o4bv?W%!T7MA*UCJzzo zUpp4WQ%iDdKhL;o#MdYF!@Pk3H@(0U&{Gv8(!#EiJvt-zs;qqERelWdjIIS^EZinP z6I&$jXZ+8Sa_(`O^`D9IG~)-N7?sZ3jnkQR`yV{^VU1fp5j+!@n#xxBq35rnO!KXh zgxsbo^u_I}jmASuluN^;I<@!H8I`m0i=Ap@G_dT#i5Ui4R!Z+_t#x#eS} z*I(xhDf=ebzB)#~X6#PTyINO&E$U|oYT!fIEc7A(!OZox{h@RE#x|f+W0Lna-rass z1ZJDL7qjikKA8Rz``5n6IGbR32#12IiuHk1pCpha_MJmC?y)k>(7iEsBT#h=sCR1+ z`!eyV2+^zY=`MP|?l=Zo6<`6W*ClCguY!1k5QhTZCIHG?hTt=m%mH@qJJce#pvL0O zEO?xHC7^^6a0{qBi_Ny_{P?Ze8@mQb2*mf!xC~aZk4eB1 znc}4vtu^?$sVh)OSIc?!(Y~jw*NOPWeC=jw{1N~4E3I;I%LJY)Be%WDw9T*4m5-w? zPn#cKoFAOzw?7EbXqE`g|DGdfeXiAS%8-0=jc5m)UT913R!Rx^42^1x>}m|gb*wXF zsXtVzv0zTd>$?D6%%m)tKkf`8Gw__+<5e>13?#vx+tc@|4GY-V^b8Bc(4SRB%- z@U6Q9z@{V;_bIHU9NLJ?#Qaf|x!Nz9WX(b39jk*xTR6Gx!AdXV7^prZ#ag~pz;FJP zoiYX@>E3u^;6XAuk{Y#rkoi~D+ru8F9TX+?Dwj@L-Y~DQy?S0 z%;q2eyzibdQT}k%6?zIp{Cv~K*flfFgPh zJ@#eJNjZj-N!%3{?B7 z!y3Ssf9#BJO7LcTK8xPw?Z#z(`}Qp#3L!Tl|E_}C?N^yP=S=~`i|u|DyPqz%og2H) zNe#9AM6?JA{AijBOZ%QG29jAXGQkEyP~jBN@DeF+=b%p9_46e>sj&i&znAf9Fm^Tv;07D+brn1|ke~EeQ_XFMF(<~y8 zW`-L3Y=VH{%D1>$Ut8OuliCD7&$u@Z+}I2rNrX=bbRntgL3Iy&i>iO*f2(b=dV`eu znl{C~q}ZI^8}1u)Kdyu?L7y{}l#l&W+~+U5rB+)@ZqNq9bQ5d=4#-fI#5o-zx%=0j z_q^{@jC2+x4;x5tE*!9FAfZ#@257>jlT-`rf}G1X#|6J~yx9``R&z8%yH}`lqN_+E zL%>o*ePFsgOy&n`IR`YuvNns>bCEoe>@MxVs`t+#8$o|j&|2OesqZ~p-3KDYA{5g- zcW+Z}KYYBo4)!U~(Jh0mY9VsZym9EE|uc>~RVMA}A zQ8}pO3@Y#elf|Gvibou`^(R5l6B^MZh6I|=2(t-8{-a*~1<2K;4w8=b@ZCBUiD;SA z18MG>(jT`6zElr6TDV_BdUOshv|{&FoW*&{&r37TgjpcW zB9rsC1MjJ?wZ6cmLLT(zQG18ff=q|@vh_1*b7!#5#Wf}up zeuSBA%9@=;eKZ1WpGkN+i$L2M6VA51IAYN;%5=V(9lmW1m$B~T;ed(8%!bKpJPTBI z*UKR3SF#7*48Q8$06#BtC z2&&ROD?A6}VHoiWHzxFb(7%kOd(FGjNwv!q_csrlCpg(f7MOr!=40>)Odz;_CxZ~C z3OWvY5&H8C?!U)(3vw``$5fdia+3A}FejBB{tF~e@?3^08wkh2PVT0gS5XA<@POnl0Mw za(6e|UBu7c8r}6_z!j-Os&+{8LB75FdAEI2UAzA_hJZe~`|fiMrb)mlIL3PbQXz`9 z@hgzn5QQvgW?SrW+48bPAsJ(fOiycczD18;|Gl|HY~5yPfoM};hfSgjq>4UV#>dP- zh*ef5MD~K^i4H!5R)f}ZI8`(?>dH7HPl|>J(7QW^zLXNr zVff@UH-FC}SF%=^%)>dp*{=?vRq9fqB}ij122`gmD*Q1)_G3_5DnBUheuXployjuF^)HqZK@DD|m;D|{tb=}i|S#^i8u_~c~$+v3dp$tjGGm5D>-H1@t5 z_`++o7&-!!?iH7L*XvcAeCe^%E@CB%$DIW$XE7@h5?}b z=VL@!!(u%udJO2&9JQ%g%OtgoZ5R(;?q;z5KSHFj_p~GkI2;!&xwnrUgFOwF+#wGz zEn_iX`mra6u!gecHc<8mwARp9BXM|CLmmV+%DbW;h?Xo5W9~0Uv9M|paD;6EeZ=>i zp{m0bQ&WM+9GkjG|0 z4D=YVuM&FYLm6OEp4ssa{2Us3KAoPH{Fi$k0MJSy!Vdz`@UYi~-QUKAk~w5C zX(@3Z<`EKHfSD-qM>uvjF02}9<8u-3ZT}}8L23mjF5S(jD1SsKfY;Flb zC@~Z}a}{&~tJi10h;Y7*yhw96Yz20VYYyesl$;A9i_D%6=b^6&dH-YG4a5Q6y0qAC zLOR#V5D43aQ$bC|DF@9Os{d5c>wO69b5>iVc9_ujpgvQpdQo>rdvIK$L+L5_wAgb z;Fm^40<ZH6e4tn(s=s^&q_mVfp^fd|T)BVYV_~4NROk!2gFt+s6mGdxSEwot* zH!_o_0D0_-(BFF<6_%UJdB#7$r9sS^oEBvMYsOibGk3AS4Hbjj9SIJz@3{B6!Ai1W zUJh|kY`8wRG?LaAAf;*v(8a7{_d4s=+uy!9bY~7ofYw?m5EPFa$PY@3+1P{F71+(c zcxeYTPSKk0lcx#J;Rc3;SO^q=t(%Ap2MR^(-q}U!eDXN3yE-B$t-*K7vd{(19|F^c z#1L83kjNLI5Hr_X{7zD*#10+Om$Ln+8OG=!)&kDr_(DT~aozF=ELnsH18y4J<3 zeBHm_C1Q`{z%s@eoPxMyBaI;JX#MGq;@R1HuGa>5W0nEpwpdn@6<@*O;&_XszRheL zM)&Quk4R$S2P1osNFRgzycx-`Ex$mgHkLyLiNkF*HJ~fWpG$3}|ECK+*Ljch_EO3Z zWP@WCg6K1#Zf%*%)Z3#%JQ!;yR(AOiinyRBoGM6_e}xf8=hNG=9>ZQIgXU+;C*LLt z^%~jzyMT6f6AYj^^fPM0e{Yd32*I6JOj0tY>y|O5NRfE=t_oQUVLAgO%CD31@^?;*7pvU9$#ITivJh7Mg)9hR$yva+k)lS+m_^R^D&( zg9p+F+cun;)}Ahe;H2dGp%P32RM;|a{s3&b+7kk2OI@#a=bN_xMe-u?c`ifp*ayW- z%{qT{5>tefIzI7H5neCiwe*z zEd+jE9&lTZ<_TD+7nmdPI#Sc#p7PWL-F}^mKM=OgN3{SOc+i38KjN zRhhW~Avs8!uScX}>v|liUVyYQqYwg?zCd4BgxEA~5roS}L1UolZ~hWIARe2~3<%l( z_gUb>-vp6|6=31*>H{G11;{8jqCTupx$_nnCrDWJpY`>!XcT<|dv~vKMBJuhQjHp> zE`S*GxWFk-XK>j}j{^~^6T)@AHP(1tj*$Fu1BIN!a#>p09qvhU$^@|6W74iFDx+1} z`2EZ&+(-QS85L+0$USU&>a=bw>W@|oi~;=(AlhFeyXK<8rM7Rh`XqU@X6SshWXRPt zTk-h6Fx^5ng1HhC8Gd-8<72-|pi|FdyUNK4BL#ttimPhOaLeJWh$NSlc>e`=@Je1$ z5;qBcc7sI|EbeKs0Qn`m09JvJft*jjANcp@h(?P|UHMO(oy-lVhP6wPGLL+isUhs*UTwk6YgTVU< z0DK8I^G+CsFj)=B@syJZx~41s2wDUE_Qp#Y&K7A@cXr=ar^1lWkGB|&GN{r=e)VZQ z)1d)()%-U5`PzS)9|C_Ch73Z=W4V7sw&%%xj8@(MS%Sc5ZO_|Bt6n==&bV9+TcTQy zX8LS7Y}>i*)d-LGILnZ{6!UET(d1Ll4}8a?UMID4L~SSz;H{$N!KU`A+`-+VkQovp+ciB&XkVAx6W_pY3sbs^jg%U zj~2(Og^z!dlGfHBEr8TeUazcj#%uHZc)$1S_z7xbP#rTrdL1PmLxuXNsf-8Ep^}#Y zF9Wm3CslQwa@dkPOOH8EsV?5*VcLj#%v4X4$DF^=w_U4PIGHiGmq08yklo&o=7L`V^}>({_sOe?H~uQl?jC%UF8pafFp{)WpdB z116a_GP$AobI7(B%WNxdh_94LLRQ^tKlR(eIaJ&!<0`?73**{#VG)A}nd|*(!a)GRe78cCC~H3& z=dM$r$;3YCRLh$Tt2TUpKONVb{S@q9Ni6j~>wn2&yFgeXWbA9|CaoIxjw>$CPBcSQ zl-zypF-m5*SkJiJ*818%%Y8LNuobXV(r-Gtb82Zlf}fxEp^%_Zu**}Lem8!pog9B+ zETu7}OOLOGnA^fCl+Er(+Mg@{HEWBZIahAwy}A<)6V9&3Fsl2ql?ieRVO|FbbLo;x ziXC%c?qgR~$U^jzO9U;`RoY}JAo&s5YJzfDYp_4A07GTItd3W+F=|ix2xu31(d69v zY0+x&;ToXpjCrwM#jH(yt9_pe;~;Uxsv`&f8#Ck04|jGXZj9j&v&wGOfgOSC*d{=6 z%7F;`$(7L5y)K?30;X5ER|ZgyjfWx+27u?_LxCq?tuiW`Lxr!_Y^N@O#T81fK-3uaegXf0?f zZF0mUNM_sK11_P3fJ5Ei!|?iDFwOGKuFxxIrlaGl5uAKrmB`K&eN|BJx%9mA?e+t~ z3(cSJo2^>+jW^20FPPWX9fcV-B3_Wc7(aFGa*HS)d~?J-nrAJk1zJ#jl``i1K0d}Z zZ9?%QnsHxByz<4H53=*al7DbUu4@Y1nx(*sItf6FWSe{PzKjW*?m(dO#cc=F;nHMQ z($t|+)CH41K)X#4ZAo(nX&FFhp)_~&2k!}>CSHVlT^jLGGUPSC9VdYGmk@F9)%ev( zranN&lMrL=(WtzOK|l(kAZTss?B#&KtR!6cn%Bbuwkoo7m%?gkx=R0Y2DFVtV08#Z zpH>wsK`++fsCw0v2)|d_UCHLFjOn)8Hv`Y(@8kZcK~SA-#qx0qUDsLGzvuj{*LT#~ z9*mUfjtpFXZeg#HHNSU;)q}mVotLxi&_&nb#)6DZ)^SZ2*@?8!TP0c%8}Msx3Py42trBrj9`rfeS>BGa@1P%gC}zcvDk@ z3`;%d48bvNP=KHV)Utqq;;{R!E`(r>!yn~V0AS;QY_hwj=Kw%j)N`P{ z-?w(>&s$d%D8D1&p>a>0EI1>>O8nWO-cn!eRJ*@an}{Z}CwT#E-KsUbGdV_8D_wvx z+pd_VSFMt!S8HNWyqdnTcWm>M&wMFu;1T3sGv`99o~p6(g~>LT@Y7D zr9{F;VC)&=rt8-IWAnPY_N{oYjYeqcfZC2@kR!_AaNmRSH$C$HTnr3%)Z5%a(>=Qf z235Rgp-p_sDD>EgtKu*htxyduXd~nOCW5u@ybNPd-^t z>cE|YC(K+|ommdNS{o4pY=z3n5q zPgPRpYUW}P!e{~-g651bcs>7OMdB$rpFXfc z+gN4Up5*@h41^%`p9=-pM%(r)^1@eds9(|5b;&a-G1h_7F0PLMXG->p z%ZrkLTS5C@gkMg5tl-iP{H?cYTW@r zA<5|8)<9NPhekZ;{z@-1wfKky>DRJ18S7WhQ8(Q|xr?E|?E*0OE>iy_HEkCcj?or0P3hMc4FeZ#f}z5;x?vwg>+A&_k&cY>cpujP z5+M!W2fOtBx^6=Hyvq0NXJC>RN1k%wRxDdkZWD+uQ6?Ve&em-D#Hm%yeJ7MfleK-8 z&@;m0Y(-hKzjad&kmQzvC%mIi4>!TTd=oT+_Kqu0vmK8FuPrR=CcCia!zDi^Wzaje zUWQZ2>GT}d$g*v~TzL#0*Y3c}Uk{a(rkf~TH$QFY&C^ZYe?f*iFL_orZ`~@$MMyc} z``u|@7)EtiROd!F4oC5V)ZtF~&QxQvSvPypwq!CB_L` z^Thy+5|fbbmBJs26UXT!dCmTeUkj4h-==%;rM}UnmYC4YVqsp2&I5?*cN-D!j-VI& zYQ6`~X}kfz(lE7j1vqKEd_ECD*rj#{(Ks0HuB;D(Dg07>2VW|2QZDwy;`vLv_*EH( zdnXU;vGg;}3&s03xaguM$562-WY)yYkTZLCaYdS8`KPr`iDO}eVn!~BzVD>}nuy(- z*v8qPr{nt^ca*~23QR1aT_3vVmB34MOLfeWked*Wv!iUz+a6jD(flp;#C%_akgkO} za{^H--)d%pO;@q$TFC#Ke9dbZcAp(U4J9{qicLoLfc3ILTMCLq?q{mx3ux?E#;EQG znappWXV(GbuKUU0YWps(FJt$Gr|zCa6-Bdmc)Um=-4s(y)Q_>Yiz+A~+n1gv=CVOO z2r!>QWj=UQR$3tfOS0D2)K7$tbm-C)8yc| zr-7k*jVmR?~yPDap>J|wu;?D9IToW>JbE)H3gFJ-;n=%C4`9Z<^+g~e5Ix~R}iv4 z(<7fYvaMHl4Y{~HYPasZoTzYLTUfQVvB2sGO0%iNx_vQqY5*tdyc(g0sKer;I&1?` z$&d*4B}($A`(*Wj?nUO%84TgBTt*L}3heYJbU!E1`~hmtGb0NUF1jHSgZGt4pr3!P z@H8SjQ`-bYJ^fK!4TAgzlLK2&vXgwfv2s?t2TA-1&aTtd_hn6c&s`pp?fXAwF{qtU&gn>K=H?)Fq_)l*&iR5ZQ~zUFfvq&9uiQ7h&sISD`if@8qg3 zM6`I}2gLR|bQ5%C%ye!`LcTDl%KQ`gCpqnU+8T-v!$m<*!k0$Y;)j0hDxWm-See0S zFIqNk?Cy1%{Mu!)4R2mQ6|=7t_l2z@Ndu)SIv!&#e>71H$aH4Zx3>lqT!4QEiglte-=iQ` zn;Uq8j9K7u!F1^gdN7;r4FcTtCZGEkQysN1LmxVKDi<6we1`66y?siNcs3&mf^x8Z+XB2g`=^?T=9yI%oJSwrca%5$DI7UF&bjuuG&@cl z(y*F$0c8+;j%pTp4C;EW*pu&=3j65Lfh%m_Hd5{Z6jg*cymnr!DjHSMtc{3W8VX|z zD*PUr)dFgYntG$tAI)u+G*CEE4IrlH-9-aQ0FUPeBkM=|T7Lb!n|kU8bA1HC6rsdf zyhnLEgV*Mf{s;fG(=tE9o*rXn!uwQBi@@N0H!bZ8eNe!Ihp(9D?wUOGm`l+L0begT>kFRKSj#fvxB7vIuEDkVkt+3`H*)ZNVGXSjbQG8UzYQ3F9B2Bb$Z= zyHO$O8MY8F^Q{S$VBGerBM`Y-U^nTC?x6JHq0SbPKyeZce_6G)rAW|v0f4n4yK^1k zu223J)3!Bep7YNJg5d9ah;zlSD>COjmmbmPOPi7vdPPuP}CSANsU0gy#Vd@ z-7=D06?%{rnnU~S%N5A8(C>NVuWzp3vP4OhbI_A2<_WMXqL^bVYeNM#hdy$D)GNBf zGwQ-rub>u1MZ#rfy$t$P z`Yr4~49G8h^aWNHv>4Qm`byF&uS$t1{`da_A<}YQG*aHDf9Tl1egGX5D70ax*}t*? z{`CF^J_`ckVR|eLu7B&fA1fe;HNQROKa=bqa26Y}(myV;_;=T?*k@KS`=Bg-o&3GS z`NRHDBf5FT^hZD@+26ksA^Sk+{c+pm%l{!D|ANG&5xaw+Z#kO(#TCI22MJBF_*;tn z>!(Erf^))tuUh(FztsfcbO3Qc?>cph^Tc6IS4Ow!i!qfR!Ilp%#AwEC^N8(N6N% zIOFUeCgvhQDR5uNenBh&Y?wz&%8vjMb2}dxxD-#|J@HZ8YMy%`dgBQH^HxAJ*agQQ zVJl{a3m%WZi3iLt%(O2t-ZQ1le&G522}C*JLsw9~xCLg%mkhB^b0Cz;6qK8Wc3FX* z%(o!ez?4bv;eB;bU(63&o_WqYv!gXmmX@=1)%hC5u!f)4ZV7^J6$shfF<@*a!LBa4 z*v;2i#hKFbGzYR_5={OHJx3xqL%-P z$bX)#oz%$dd?;lTBww+pzvP#eydmm<5KACxY}p^lIMbB>&aNN9$pIqxw|#tYLg`bm*2mY|J>)$HWtrlXS2nWx z)AI&hRNH=giwV{h@wPM#$6Hf8AZJW}I7=~TnPKt_@C4)dT2@$i&7Ywp45U?njTnSK z1^3hz0G@NRCcML&w?JiKS+bzp$}s+HERg)oBAf&yg062~I;mu_-pb9d_yn%Mi@!Q1 z3%PC$I2gRqtlz7~umyOWEsnMrb>Sqy0zg7;_sP0uC#a$;r?;Csad69ZbJtSK{q&LB za;CD?ak_dp8A}sCZZv?pb7i~fShpt2OTht48t{{f-9$9qrxj&OlDVYIp4FWCvlak3 z6@LvUY^9_VyhapOSPf*LuW&?SsSuE(>j|)&>4PM=*XJHWBtUCwutd=uN6Ka>=BGN- ztz^)23soCnCd+6s2u~t~9s-%4uZDF@g55d;Y*ZylwTW1ANkJIVOitFo8b<5YcnO%A&-<-L$xa?JyTFoAY=i3jh zjMf~SXOH2J=E>ckTArRiUU2d%irlxvDb@TNXAb@Z1qD;~K7Mj9l!^O5@^U_U(>!npV z=+!SDRF>);;?bPhoKhlWMB>(~yop}5*OO0-;#+Rh-+f!FY%4{=;3yQYPuVx|os7=~ zl|*m})9w6A{noY7SS85VYHiRvm0NN`#BOCP6sO=-x4f09JNTQF!goMtknTHy(e3xB zs0J!DDumxSIxIb-0{vA>Hxk}f(KEmaG*_Bb?7ganKI9+eDQz&kF~9bOMV0<+ zwize}nPvo|e<0||#MDUU50j@rC1wMbIhmb>r}BbF!-p3l+~%k(4+{N|&~~}i)PhGa z`s7d5PA^p|Tw7)zNXC<*@c0(}2e3>mILRS9^>S|i$5yzKvJU`utO}Rk)Hlh+hQquZc9<2@0KJ61ehjX|iIHk3#`8J?; zjbDLX4{(TR0nOoC6;fT$s_7q*{xa|Z3WGF@^a?0}Cyp`j&jUIyo=JbkE?U?VGaCi} z;HOfXAZ{+St0#${2W2B<_Rdd~>Hrd(Dgz>xArS3our#7VC0pAw+ycHiIX-y3ExZ|p z8Tg)F5;cdp$sk{^L?5lGz|1D%&o7ULf;VNjd|lBSCog8)F2dyi3G%l!70 zSO~v&waoi$j84Q(b^cv)51|iG-klMnuK`#Er8qWq`f%~+e}-ytR%$7H2^zrwSU!~bE$wwX(3 zezRicbH7-ze9{?aWx8Y2?Ij-T^{TH{fL+lXQ=!vvB4&hE)TWB?%#)!6BM4b_(>w8t zP_eW`KPQka+HU?PB|c?={mtq?M;7{t5#2nx{_xDy^Y5k%CZjun-3aHsn5f@0Deg1% zeY*BH4FKqLK{AkzO(=e5u+zLF0NFQ1Mei!DzZh(5D19dAhnG3(YFbr(LVmx9W8H;CG8fuuNN2AdD|1q)NjxW zGS@TSzOosC`%?Xn{PWy~=t5{ic8a*IC&fzVi!qw$U@`B?FiH%{0DVt2U7NO{z*_dhmLsm+0h9gQW!>lh{iu*D?7-EX)36v@Z2(kZSKpm!!3CJ!DN5jKa$2EmLQV5o&XByKLjyY|3KC1j?b)x?q zgVdBUmRA01;>%Tk+K!{JE2X8@eA9p1M|#jO)F-&-RZLZb!bE?m3iloped^?|Wd@5t0K zBo<|H*b#zlQyfh{WdAf zK+yj#u6_?YI(ys0{s;F^`imLC^;n|z3$e#`1sO)oO4XdcS=GDs&Gox>T-%Q3dHPlC zY4NE3ciRzO`i>u&B5@D^D?dXJne$d1Kox5D1BM;e(N|5IpY$sEsp%7 zIXEQyvXqP^?c~?ys661=aw&&pKbPqyz3?Q^y6UBg-z8F9x*t}3b$|uaCcIj_zYnOU zK-@7U;9Ds)Y2W5^Dz)6XnO#)$KPUUbhfEU6H=<_?y3ievwa-@Se3|5u1^8y}iX_3l z>*2>WcOb=cO?P() zN_U5}w50FJRnL9h@2~I2H}lPW&&(dj*|XVk=5efJt#wGLR^&(ohSJbQ(@ChiNF{$z z$zdXRePdG+EkRIHWy!fedtsJ$nZA@bEV#8;e=(f8+3b3t*qT3*>MD3BuyvazIQD+g zLSU1q#El>2Z9XVb51SHf9p!lsLAkH?pGI)c^a_~f^LVviiuu>>)Fcxz)YY`5_M*=F zQ)xFZqR!7+`)#x<#((#A46`;~h()1r6Wbg~PcJ6yvt`V^w*8b@viGZ@HMdHcnHf|C zE(E&UC+gfEu@bZhs?1s|ptl5kD68mJiuXM%D=EkzYr+0Sv37{nk}nPa%z4?h+a=(I zH3ptKl%Lw#A-viyT&!Jkvkt;zHOY2+jU$JQ!x;~G$mf6b!jlS`LAjau(@trG6~r4|_EU7Z1p?1H(;@x=wO3Fi@r;yd9?);5ol(FqD|8z1!@f(P z$r9v3%mGbSAso}9#tiA=ItFmu_|J^>acJEXY}v$rDJ8Gt!eDK;4`VbH*MCkf6`Pp& z=c%GF_|$PiePs$JV(x6>Paq)U9Jt*f#S_(0C;g&5 z5#}+onPRyqX?LZxHLaoI4k#g;nNq36y-7`aVi_%PI!gngh^IKYwFC%UTQ}EtM&177 zA|^!?mFN4!1om20?6R<9F@bi8au~ykE4*!7=FMGj;b3t*lB_S1Y#TBHaZ?3CFf@hw z2N6D(;=F{a_OeH$vse@|3O-z&_sS>pob~0&3IA%93~MAjCYGi1w|GA)2+F)lp6m)6 z=wHZY%AWzhFDHcfYEA;SWEjQZOx>8H@o%i_Xsc@|grRoz`Qy%M>d7~N=3!tn$4yf6 zwEN4qdPHr-@Csvu54Uj`@33M7b>CMo_ST^|AWLVMBH;_kFbxc(2sn|MA=^Tve?B9_ zH|qm+lcr)Z==`HE82%-nDvT>T0y&|O=(h(Ln!}&0Tw2w5gMUSkFhJX+C8DSY?M zwWp|ZI{g}TRk4ji)a?5atv+$nF^hI?0o7aQPqlfYHU=WFxot>(dy$!pBD*Gw88cLD zK4-9MA>uL3@fXA*V5(y@q6v*WY|g{-u<~DU;1Dn~JKLGK5`^kC!yY8c@iV~OdH`V1 z56EmAPaZ0KVs{I$izS@%H&vF$#l}|*$9M(fvDJ67!u)QrhQ7%;2i3i;-aSgCyN(Fb zC%SGyr)sg2MZ^@x#L%k`9}PPS4uYrD9{lOy>wpo9+IDv*F86SA(ah)28d=Y^>&lPn zTsAD#h13U#M~W3yijJ;7y20@1<$vTL`VE{2p+&)=MTxz`?;}`P2(v!qx+HyO4fR%t z{9Tm^`_%)3;K55FAISc~#wUuCWn~E-$TMW@FQfH9H&(J1aQG#9xzS8fVe|dT^}A>< z)8DVJY1vrcN*I=8s#{G6%RMT?50&fWkfc|L5V|^TpcFl8*xQ8XdC?xSct`bXfhcGC zEWaQld_OzVB-|ncyL4_obHtjUQH+_)dIU-$8v;;uJ+Fwf?b>C zhm6OUlB(suU!4}R(1av=Co<`J|4xrB@Jclu(Nb3;7i5|vR>p?H57oXJWPBLUldwcr zA1kPH+nA=sI%CM@6X9VnOm-1r=qI%iDif?w9!9#u^6d7m#9Ei^hP}y7msy~P)a2K< zYjidQ`}Tmgm#BBQcg2KIH_~<}10Pz*7fiL?Rgt<567dGDrcX7Z5K|u3Q^+HLYM1Z6R)a*Cg`Pmp>Bj00q&I@#%B_lst0nNEiv@ z0OTdnhr;4$JosR}f4-PGd5cmh>69X*LPRzHQ4OCvM)VbLOT=jWlYa~u|{t$Iq5^xTv(o2 zvgYpUb6(dLMngy%ND&wC3H;^I>RoM7O84147YHAZ89e#(HY@55HufI8L>nISzDx6C#ZW zK-=L?%z_ow}&~zh$S4nzcy>inN+aR4R*wL0|9Q#a zC+-)i+R9x-;U)$CXL1>Dme-PVS+&Wv2_{`;jy0q`51*F5;K$yeiSo!k^g?Y3K|nT} zA)0&q#^<-%F{X>xcbJ`Mh@E1T-S54nx%FbMmWyNPA9o{F1FDLSoVgIN6UYpxT!}`w zAlFv^*~6FJ|2dWP^zDVy(g95}u_q%V*8b<|vEDO_GSFsl;`ICR*O-?ZBjsMm?YEZ9 zu+nLKakji4tiXTdlM{L=yEZXhR9gp^$1Kkt%(_TbUhnmgWQhnR zl$iRzx;W_XvM9V>wl!N^&rTy`)r8sDQ-wkBt`&2DFdHL$0F$ti=Q z5Q5!eb%#{=Idh$)e65s-)4*EEDu@-j8n}di(g@+C+g6VX7IbMM))RkO9A}rX9R0D) z9W|O&MdygV%j7;XlQl#tC=TQ@&_25Xl;#+NW_Nm>C91%=F^$hY#NM2(m_D)$u3y;G zU5g4t$I`8+!$iHrBkpPiSkjQaFU~uH?9Dp5Mu|+dDtL*EZhjsSYSaGABlkODa9fP^ zi#Ae*%PhD;udp*J?}zvkRJIHd&bM2)=a8`0qa^vxnC*M{lcJFOc7$9#f+x92{5s2c36X0W;=@QhZ+R40byWiX2AXGT;h|gno*0-8r4!6@}EKwU;1!=&_H&SMkU(2@Tn{MiIjv(z!5WN^PP%fpiG^V& zd6Xa5HLK7MkE9At=|%Jr;Ty%|qQU=um8baKuFdEZEKY*22M0Sa!?Aeqd*Lg_txK*n zXrGA?Hx{g|Jt?>x7DK=6!42r!blb(Lp$K~raiuPTN5taeVL7;NF524=xypq&m4^g! zOHXyU45YvJ(2)ZV7{TyE28P4FALf(N;^Z*88F{vWEKXo-(qVRM4I714^}`c^wkx0JOu5R#W=O-^g6rnZH-(+A>2F;Oy{?L=iJmMUWm56xxX5*?FwvI z5gGhGYie>`)*=r)ij`U7yR!|?w6p-ce1u)pv?=n;0loWHR2?WFh1d#Pcmx z&1~MS*VGXY__J%v64@%YgrYMn+#&qRG@VIY_$JzAt>-dhHVQM6C@)oX8oj8d#!TeGtKx}zp^)N+GqQ`>E zYwORro$3j=(C)_smnv%XBQir%4UHe0Yzq-u-72)U?3`{w2y0keTzo#PHRD~~=Qf&v z+QYkpkZ6Aon_32sC0mpjStdLec>^bM{4PML)n=91)GN>xB;;Y3a@pAsbDo|l`l&g) zRGH}U>N!Vd>~BC(cE$|m)bhEN-6u)fY9o3yo-uAbe^YHXu===#*q*cMGHG{u7Cqf- zak1BpN|!vAeCbI?9+~knkT&JK!oR*gSSDMQ`ROGC%p^ZGd~SHWG$B^_@v{s05mO6B zT9P+p9>{ZPL1!1)EJ&sz(}>j$`W8_Q7o1OO@@B_h3WHn0eyn3jKzjk!g6e;C zbS^@WJ2+0bL$m}!4@g`-t? zRQ?E+p{Oa56%u?fu7?l4(1ZN~#4YK+ro{#ggdDne)~WyXJRJCv@NGYz{OhYniOu4H*{!*mdH!F|!`+9w zzAKjhwHT`C!Eds;taSdb=i$JYM1t!a`!mn|=U+<&Dh8Wmi(R^?7F5%LSUFHEX9b;X z;-;sy?=34vrx0O(rN)2!)QJ0VNhYAXG%rF-Q8E18-n8n@R2k>x$yNfuHTq{NEjHHs zV=630C;-bUcqr+q!s^-@;xqLE73z8rpPo6#W>?_h5OV&{?1&ZJ564pp%_s^~A)q{R z1bC_fI4EM?@#)rBM{^pA6zbJiygQiJalbfV2lfg|Y7vYbGH{{m^a*-2KEH2kHK{tM z@T~rsLs{kf>B#>sZ{hW+KS^@nS%b!LLMg+gyhePnz)4?uv!Ll=uvx`j{*mM6uS}Te z=iar?AUcq(emf!Y7GpC0{_YOOrsNR4`(+|~0aMvQUGMuWc`vWCFK-0CjT;kMhH<IC~7b;{WzT@Uf8PA%*if=JbNa?! zB1n&)yvZOWOnGqaezw!^kBs3de-!oiB3hQ96EoY8AUop{Jt16Jj0=4^%U_bBNfe7X zH#soFn;#dK8@k@7`@*Z)GAW;y~w1bT&3n^Na;ew2-Xgr~Wx+IjF$S(tB*JD7|7Q3b-Cq2!Xh#3&lFT zi9BYZ3eA`XFKaxda#s)~b6%8uK{{fH61MssyD^||H8o)9zf@dGRN|65-n?e1Z+qCY z!Q;`E>b<6RZl}lc>bfSXCo)2X!>A;uhkaq=+wfV1Lz;)B?>&i7`17;WI}kMZv#{jN zgDvFz)|iP1XO72YAg}?{*mdtcFw{WbD@GV4ddK!yezmg&h>TtfplceI8+WfC^-?AP z8k{UB^&t;D*l9!Z0UluP(XD{52GKy>`7v3NJYJ-s0Q?D8W2N-^iZs5Mf>m8%Gxuz( zsTt8~?CF$Vwlb(!Df4*WxV_&g%+FEFm2VKO`w7U3{h$_QwG_X48-Rj60Lt)DtJiLb ze;-C-Jq%GgF9!^;&OZn^ZH^~opV=@n*hc|tg}8YdfXT0@_gmK$bf@r{A*Vv4oV+Qm%FFR1AMAh@#}rG@5AB} zszt*e>WKttMNNDGU52hW6c2C%{GC-La6J>aBEaB`*J$xF4oD zEb6|6!1J?X-~rim@DxSG#n5B_DY?&DGTOu50#fI;2^&y>-+X3)QOj@tM1C0ffoE0b zWqQ%dUZ>2OBK<(|PB=>Gjilt$V4PW<$^0-`peo54EldA!1KUK3`tLpU!4^TfOjeGD z{I#YY$t%TRpx&bcXml)aN8BEgMnu69vcte&uz^C1qdznJIJ6JIzVv7F-c;pZ_M zE@I;dFx@&%UmyC|c^3+Kz)rtQ*W_m?dZ8fDWRm*ZaZ+de?1HZ|cjXq5-lwogEkp02 z089Xu9PeyZ(Fh%SJX33D7^)Wr2u#esQp%cs-(Alp881b;JyI-Md2WU@&;dv`g^aWI zJ{7t0aV*EDV`!QzDZQ?klPFAT#QN_*ux(G_ECiCQig(*;0K~>n67Ywn0{+bN-f3}R zwZbq%E!^zR|pw#{LM3%lAzJ%MRg-F@V=;L$buyp=sXHYOx)M_MuR>$> zT=la?U3bQr#iM>_N`Bn!ry?QeFox>!S&qJN#VA#_FYCDIl?9ZeucbFYZ(tCL&e68N z3(hGQy-}P_V+Ej{uU3c?xBzAPz8txjp-fF}97AL!5afT`%3RL;-LBUF>}Yf2IPi17ucJI^=^jY#c;Y28=B7~OuLq3=Pc|f;M2H7fWI5M&WQ_b?@*hjK}25ANydELJful1NE zoYmS*p|HN-5@+CAYX}y*5W|zhh}#aZQEfjAZk2le^?jY6IN0rGwtpq8AW5N0#m}g~ z%Y#n=77tkF!5#i+T2w*Z+UhtepX8>1#4$(Tvtzu(cY-7Bo1y%;br!MZsn(mbQ*L-d z!q^b!2iu{XMed2}4OdD!Pn~m8o|AbB<3uvXs5h2}*owuTAXvd8jrjh;^TeVQDdu&h&Jg<6z`ZFl9~KU& z@pwEuV;uWDrSpP2sxHofQ{@)8S9TA|!j|8g@&s(45IsfONnlcy$ai^m;rJ(5dXLnL zpwVrrzf8Z4+%W*tqYp?7!Zxbh=wYcnM2`|7QiZRpiwyYIx`_=H@uiS)A`fPnc(1EJ zcQ!ur;QsVVzAw~nDTW^3;OP0`A!K6w;%s~GJ-S>rhaJ_|*jD4=`lMRn!V)h2@|6T; zJfHJ$C-!Q=z9zG-9Ij`*E3-6M-Jmws&zV>6ee3>8NG_O_iO1eXBACNayr!TLT@UO7q$ zV3!Udb6LMIz82YsRs=+Kf$dMx9;JZ*A#bd-BP%+kQ3=|}a?Qt{V3%f17xYDs0eL_( zrY?rj^!iw!|Fx`QYYlr6$6Wkyyqa^e+_wAI+zr;HK|4wz=8#&CvJr%Y@~expn4&3{ z@mh+-W}SE+fj0HE0ipr_#2;6A^Uk^OqH)t!xHF+)=~-9mpwVZlf<5U8Arl+9pv$;} z;)hOOH4Lflnm{UM#8FCGS;_(Kt^<^a@?d$G#%AXHvNux8 zLH0Y#!*PU|1oZ(CXbTY>v&IeKVwj@)%{dNNodY%=X^P_F#0-{|+g}+k|J^D59e2oG9MId|&Xp@h4 z&{6do{h3c|{7IO~#TyXYu>QLK+K`oeZ6UZ<2vp>Iz_2ox@iS^eZ&nag#fXONWvKjH zhptp1mdc5pGpj8{74)P~>JR-aL9FBP@>&bbXR{Y^=%42JLkC={{cnd7UiW8~9FzqZ z=fn5#&S0=HT!d?X&uqwt1Q8fMu_uX$OOpxyO!#SQD1L86r2$1lL zTj8h3m{{Db(!L|14|aT4?yn-YjrZ{;VYEL96?Rs{`R({-OM2$#30>m%)=xu^#tkA4 zFIxLd-zJLTR?W}|Y664pm=Ypet<9Gr=qTur3oTp%HSH(iRGrED6=vYD23+fK{nopy zFHVnUS{H#=_^rNp=$e@LAsB1tgLWnG15!;YQR$V$bi`k8i1`O8%0!SAik?eE`whq( zH;4+MCw!p*@02J3%~s^3IQ3t&leuC zN-Y$v3b!tC!D&QzQ-zfuD05sQK09MLJd^(3P7~6{Vu8W1uU)-}IxCHCOq=d=SC7>s zL;j2Ey=0j|bF}tR)WHS7>UnGXv!udOZr@CcUs)6zM0OE;FueC@In7FPSwbh~*kgnPr3{%Avp$Mf?Kym|?7`Vn=NF2symWQ~nJ_V6|N+ux+ zN&y+v0WaGFh6gC(N7C_ZI>s(%E?dK??)vvJ_O^)-X|ekU`1VR#4W3E$^4+$l#annU z{T-md`>4up$-ouEBnZlx!~Fn*$(i4xVow~cTOxtrpPMvX9BhC`CS;cc@ThIZ@VlrC z&C(D{rdN645L}rqEF0O`IA>HU834K!-Iu!h zz+P=ye6!#Zi~I*C&*TgdRT=5n zPq+cy2&H)XZ2zuL3NM1vXLrqjEqGshRL>p`F7jmHP%d_vP0n&erX=R zN4(~Aa)Z{h5=%0m;zg|abgS+k@KN?RlF$Gl-BDa+`WE*WpToJ@k&%fiQM1XS=@zd> z*3^1n+U+b%EU)*G_1mZ_z5T^o(An0dyu+2WoU*Sg2;$k)?0Q`yRLlbV0Pr61T4Z|U znI-Ela!kLXDo8l2VF)XC5N1!ri4c#^9FImGx-x6C?Y$fHI|!>ek)`up^3YD^a{0A@ z9e*V-do^r0Apf0} z{t5H>UIQ4SZY1{Uzy1IYtq$OLp%^&-`||%Cng0iifT}0xxi(l~VzGDyAO(5yc3k$t zc4Ht8WVE9nPOAWDs#dJcy)ZeH$h*<{`z}nj(K_&-Q`O`iqA%}>QFe+D1J_J0Ac=^- zPZ9Re1WFD@Qa;;Qa4oUEnK^Yo0(}qS-%b>5A8>-^+jM64UD)G+p^pr(S)l1p<|{Mp zkCswO7h%80u)xRY-Y3E{KzXA=2in}k0H|Y|XRg*R0f2_gZ{8dH1Ibar`+&{xD|_|0 z{=Tj1GwW$8kBbB6dUH);AiV>WZ-6iPaWD$_fLt(9#H%5kLil-qHO2-~F&7Y{7Hg_` zrawYgDAlV!eX9Kuq;&MaP5OepXN?R0{(SA~?nWaJL;q|0>7jy;3Va_6eivJ_`&l$F z#oPTzwYdH&D-y04?q0`2z^wM-0TvmF7dfU=BtSK(z!BGEI-DYq6Q9Iq#}UV(F}h@< zt0pEL_f8l5U6nt*;{}qPX2xZ_%WgcEfQa$+$0yuGVAkRQrC<;^10XC!XAXB}857_Lk+f8f2%Us?_4W#m)W4lzxkvJI zz|xpK4%8-+1?Jcrz?Ll@yoZ2UJw62jKy%7!%rnn{UtmZ+?xg%*@rtz=Mq>=~1##>2GCoC+%7nE07Zyqw8ygRQ!CJdAe=K877>li|$~^^J0KOCV zWd-08x|tG;p%XzSkg_(uN5G+{T>;iz5(}Nha7y7pBw#7$6s8oC((j09y3HLgTKrp^7F|9wl`84luB< z^k9_iudD1i)(kvFWAIH?levui!E{IwBO{JGiEtCR1>e(o8xzFkxP{sgwj=Tt6SMFs z>82QA*0WWGo0e>*_GSTt^gN%v&Sxz8HK?})OJy_s>Ykt?ECDE!Ia1`RVEl`ga)#$h zVw7A7AW-z5Q`aM?6f^&YKiN}*6AZ8YyFsH%36^oN0^WV9NPRuJ(iuWK9oF{JY*0)6 zu?W`-`SAvM96N-WY&M#~xW>uSxK^Z=qxrb3iPG2xSD})~b6Y)*N|gC+J`S_rJ-Iy9 zb{Lj7KTtRDCgtDGkLqLX>v589d40AfU19Te;2J<*9KJ?p7y}*SYic4c4LSn}1{Cs@ zQjyxXpv6}hLNes`7*G60U4&6>LxPagacQOBxQf>qUg9$e!hE6q&CDiOKp&^v2eb?p zO4keKpuUyCM$-O*f833Q$xHM5zZ)lggR!v;)I9RXm`vD2+WE)>(=YiJ=#)h+ zO~uk=Kd6A6PWP26;L#|jq#S4!zJoJqjku1)5E-6lTQhbjL(pG0!YDq-esC$+>t?8=;m{T8fjMT- zJ?wM!)he)YbwY1)9(f~AE8i+C)GPB%+yh7b^pW+13yPQ$jFxgv`kAunQ>>>)Lu~gT z_zkY`4GYU=9_Te=+8&}T_nGC4POE*@vHvA@etm_%Tt2Q(j0+Qvd8XtmEW1N$C<&?f z>^e~JX(@Mo6{rmIU-?pyfmNp=uF7);5p??r*#_SE(a3~f)C*U&RNHe1VmwXE#8sNM z(=C@4X`z6Z@sb+V+;DuLpiE6Kuae?x*~-S5cW8K zvw?>1a^mSG;9JM7E$rF-$>FJUN0A9D3rw?Jo`96pDm3MSXp&XiiVD~ThC#|-8j3#? zEdCr|aY4w$qD?}uyRPzgF}oQD+@KF=gQQTs2#K_{msz4U)oO2@0FS;7XX zX6kPk1)OMn31xMNk#0_KTuq?ks1fXxId>G&OEsXKPIcgo3Urit)v}#gT`jr_ak^8 zxU?OVp^kk1GYA$N$A$QMJ${JB^U}x>!xgLPQLqBkL0mqJBd&;e-i%4N^v2O9{b+Q( z9Q)S0bo=b%eUZ@BYpKVKTC`))Ss|qYd2sUGIc+^6kXFowLn^|Cv})#oslzZe9zCSO zhJaA7&F%ch_fFQI^RBA$rjH~Q(HRB-xEp<6xl>RT7yjVd&T=F@l>E*K^9y<`4y~Ze z-nWC7E-p;P>X9vN12~E}zihhF2b1bI&(A?+@uyl=o>D5yeeb92f(&8BZ~M zJT`>(S@zfUEwURBdc0Ed*|a8&?s3OAb8oA|fI{jIAbtOe2oS}Q2dF)-zRr0#ky}+v zmp=rxRAPLjNFEYipBRsDQiQdYNfdEU_?@Lj*D{g2{n{m@^mU|oye9aN=}By|IeeEo zVr0i||2VAnypCA3GVhXLtat^o zskda3E$SqG$HjaN&o;)*-(U>5>Eq*`P~g;&CI->4g1nYtS0hE(Nll6TrO56Sz|%L{ zTEEznV z8jCG+T+!pNT}%FsnOGY>1T2jAcJ_h5x|F!KidgxSW!VWQ!U5T?E?ep~i$>A>^3OB=y9$gyAmezja%~;;28d#20f=ihWli5x!=Je@* zpy#*oe$IKfKb}SlC_H0UFU_;Qy|rrH@Kh!$Qt1_N-fJxx_ReutN)sy7KZ@tjPs*{J z+ei0Xf?@`zZ|w6O z_IUNRP?UNJx>1zMw<8fz)>S6H>#Es(%%&qN@X6VLYB>Z-An27lh9qsgL*RMqxgHxzMhtnaR^U_=Bh&P@i#4#K#Y#6sl+cOAFlx3 z>#MQ}|Dm@nleB!j6Q_*qH8ltcdMVKknUbOlulsn}(-Lj|%-Z$%?|121d2qBNFlg9C zn+LwvRgvDW-);XW{lrsgNNPE{=GCuToH7=~pBpHqie~n`N8{B}LRgUj!^{amw6y+W zRK1;xL~P}&ghQpTb7|w6Bq>Wj2MNTVui1aJlV-09sQnHz7teSCA;bBx&kF1le?AX7 zbM!~!@i)@P=mSZ|4na4mjj2~9=A32o#C*n-b+Et>T1H(_0@$vF^^=`*Jr8wf58`Gjv%d~lVJ*`Hy+jRXI&D4{Bhm-Psal(`yuq5>%3SpYbF~Ho zC(Fx$i{m*I<$;y`#ZnRbM_+slMGM_+Wb`*y7Lv^_6XXz=W2`uHYt$Ls4fDikG(kQ}GY@6JntWd!Ch;)=X4V0<`NVj}(LaD|U?;(3)*(yR2k5tm@nOD4} znWu~?X5oa`Hf^teT&I@L1>;%J39yDU_p<+$X6Mk~v)aU#G}X;V(%3;ht{h+xnBQL) zBV%@+(P+A#f;NDt*eo9Tf5H5+2NGyRe)~ROV-h*5qa}5qC z9Hg|r;Z8DpE+cFzFF!L^-n7XbC~-KOvZWhoFEZ6d%zSZ$&%|x+r>F;<@S9tWg=GHD zPNhKEG%%!7Q#~sF41Dznd+d%SGJ^P^?DS|$&Uv=qOc)R6n{b*Ue-Y>KFaE1&psZKS zaU*=PrjOWz<9i~u&xk7Zj^1j)B=X|?&fP>s#@ zFqQZXo7@}#h8mvmw5#?ZTl^bk#m{IM;_EeS5=cPtB0%`H1Zsr! zJ4IhooX(Xh&A-0$hZBrIdQ`h7GP7~JuMCBgdd3}(wwnR@im%)DU?4vAcIbd{;1BkP zi^{=493JY(fqn(4R?PpW;^)Gj5xvh*|Hte0B!QEIZwpf{7H`nvI)%6+!M zPTDa17;6*q_JL#A0M#6kh<$xWuObBzK&DOQ7u zJXga4+9gb|4!(YUmi!+_D7Z`_q7?3r&_d1oez9dAixs}Yl1}L0v}kni8`M$09fT)3 zta{aH?2oyNtY-zQjzg%sxNOii%CgQUm44rJ7GjKa!A)i&Aq*h)d>h>vZpO?X5~>x* z_};}XkynbM8M4z3k6hk+P%@{iOd*<+2pL6&R(HkK2)~)Z_LdrlA(s5)y`!>TP^+J*(eb#xl$Q z7IbVxMEc}n)g2fnoT6{IV20I4u(KiB;|5F&X+`l|0Qixp*zwTLKl0-#24@o$G>{Z+ ze7Hb0dmRYQaec};p-m$iIz8XCct7)4{eS8G~<=`>#j%@8Y6A@=awvE zyk|;DN6^!1z9f5*Uo1<>73j?EdDQ=1U$c~I%f8vcPwl-u?Jy;)81uYhjyKO&ywY5% z?bw>5#!m_DPb2jouLC&fm|f$MG(HF49fO8#Rrhlm892Mr&kJ9SyF(jPyYe8TR?18< zyAKyVO^js6krQ|o>qcZ$u-T|SoU~>6KvVM11vP7J_4O?Z)pDz(?g_24W)EA5fufN= z-S$BWUVj% zKZ)>EewZ@gl>dM6s~byzLowAnhSH)g;4s-(VT zH?+2-hV{qFJ#e<6b>`q;#J9L($juIPCb(ByOjGap%H@gjntLL$w+P zC&@i>q$2$)Hd-Z(Y(Gx~h{cUc<<6z@1!AtP7mo34ewi6PN#&GmVH747de5NLP-DfM z$n*ZxWPK>@B?Xxf$1?4P+Q3)Vg}Jca7?ryohO>>`nr4Lf-k?n7dDiYQqH(h%yM^}# z0|QSJrjFlcdz}Q!#l|%WRLD(2hZ^P=X?vRo^0xxq zevGpQjE29P_%%4och#Aw9~Q+YVCP$SMk2DXiWpMWV%Pq56;nzPq9+_ANO;tFEq@|| z(${*8)wrOjJ{eMb;i~VNs9vTm&%vRkcDy;vJa@hq*Uie;AF}kD?K(2;Lz1_}NW4aC zJEulfRF#*}i}6!dJ@StlG0QfM7xIa#Qt$A!#_Rdtyt=4C;q=y$G zOyb0Z9LsvwM(vIkqV$3$t!cE9X-!&7c$Ac!c1qC1^>e~>Y{%FvhovF3qIdN50`kh3 znx%H^WyRSZ7hmT^H@YKRK8>bT=>9n~%+x;}^EHp>CHL1?T)v*A{PZ+1o?_9}Yw`F} zOTZ3Q1}ted+Ll`ZdVN_}vqDhG>hm@zSGDz7bxyo7=Pb-=(c@j*r)6)zj<>nytqIyV z*nQmhC9)<#jhOJ9W?qbC&}Ir)S2xac+=@ckBpL6PorU#K#=Lb>a&~AgPvjHvq&Bi7 zE>o;Z${O{|HwUQM(>yx`5>#C`nYBtiUK^HBmg)6}`HPr0Zfd#dhwwNqoh1zR@)EnH z@}blks?L?qF3%T~dGjQwRgve-kKMh#D4WmN@o+ufGxf0pBmX(;D!clp##xb|eq%#B zJ&n318O8iPkJ#^2G2?ZRTEjJ=JaNo+=S0fS?|#hgAN8{pxi7ul?3gwA+u5&IkLgEw zBciU9%;@P93t6AoZAjO@WM(sD4$76EiRQUYT^v>O5~!u*k8TLfK0Q!2bW@pK`qs6* zkeS_Ub{6cjahj-IuYDW6`ciC|Y{m>S5)sX6@$9|X-i6YP{eIPfun=UTgmf}$PjMq6Qrl{iIew-&jHkyel7WM4U=B3i0m^=Z*-P4C|pVM{V!bW8N2Ph6R-6vWJ= zbjHKmDt=u|>Thy&&rv3n0jdq%rN;tMeUrJ_ZQtPU8Bc2A=jn0x_aDy_KP*lks`NtW zj0<~d?0}grjtW(F^+|YsX+H}>UY_v2C+7>8BB=&C_$&-#wZfA$U;9pZ3 z$?KD8WrKc_G8%w`3>a$elcG`OfjFH{IR zSr_6|a@l~X#j2s}y`@X$LCmj;xctI5$eb(oxc)kr^=s)=jr>Q6XjU1uFsWMwJccoY zpb>IC16Jw+$`GGIqz9Su-C^kH#ws+-w&B!QVGhN5O&Jh2HCF8AyN{S;C!*V;Z!1pS zc2CANd$0eb{$&Ni&F3E;Tx5E3-x4om7j0y~=tLQt>R#a9Ry1}=1)X|z_9{115fBQo zv9r4O(Vnc;K26@(ObghUGa8X~HZy{j4!M2xz>Ho9iIB=e?6302e0?Y~CRf_nl(((| zKNS_^Qxc~onnJ@%R3^7+``O&NsP9C7ZpTqU(=L{jf^wCQ1DWjcJ8zwy*3+nYcYVqC zt1Y^wsqQCf@#4mP*2owtB7|Cqg(@sCUGI}ZEdX3)@xDg`P$~sgw9Qc z<))gu#%=ps?OGx~zZ5{_m3n*nCErT4m|Tc>74FB@?8XwP%g2cTyo2?5Cc$Tw&^(A)Ddo|@m;v-ID?T$-3lx~t_B%bz02gRGl@)1mf?%ZFmy)5L^aERs(_TD5#|b2w2y zLF)3D9{*Y^2V$_*)UOhJ4U4SUjNz9+tIl)5#Ml^I8`qagJQRI@CthE6R(&OkbK(WN zHW5{jKgZbWr$)`_%J((KQ5+!t%GvjPx=|8i+hJ}3;f_J@HSF5$(;v|{)jq12(e~-A z%<(p#lQUzH78^V7;z6pZ5^7|?_My-hyg53$u1C+$cQNEu;mKH7uUPSBD1C~2S2Tqm z?D8EYs0Vk9>Mn;y$AcKfC8FnGF0xq1c)i5pAOWEsQ8|fAys<^KgLHqSC7oC;yu&Y> z(X7B-5UyP}=s}#q^)vGCYY3Nk3&$?|g3z3Ov-F2JibXblq;dQ5K~|_qx;On_T(~WM zS0*%|PhylKUv#pVOd)Ra%Cu1ZqTOKC(qv_%yx2X+#mT^o?c$Y3doub0*>b9-&R^iP zk^JmTZ%b&lc%M4U$-MPhA9@vkbY_e_(?)bR>mbens1?*bl?p0U z*ooc%0pNT_UvAL+ct(SZ6^~7s8XCOy`?K~*wNg{nt?|75#*LP(B`1VY+^F9h%V|n1 z5GJ+}=O$Hy|3ltecvaa&?Yn|BNSD$`$R-5=>23t1ySoMH?v(CUx*McJkQ79a?(XjV zE%bfg^PTUUG0q=w#@J(zJ#_QzC)b+mo^xLJjb(~Gn*M8R(-_m-cEiSy(1ocAu4uD` zCYzt*&u9JF&n^~bg)>O*J}JOEen;C}iWj|1>@Hp{tmlo*C8*o6W7qKvK{*&zGtnQ& z!;aGRr9iXQ@Rh<_CB~_*a_^$pz#YSh8k6D&%Q?KNMX&2lU& z@Tj!^syoFu)Y?YG{W7j*BPcTQh>tGn0hQ!Jg|4q5()ga?lE2QAXHY`xc5XJnt_S&m znkEtp^vJh`))!;{^q7Ca80)1*C5}jh=tKE&mKIHF$XyL+%6UT7$8-0K zPF1enM5xHe8Hcn5eUsTRtqY=Z2tT9ASQ}??UQ?fu0Dto-Cd7{<@pxnQmkt4|gmwhK z;1^Rv(E-U95lB>ta;lDdJOwLoS?}Phwf&2WZ9l!HIrOddiT{aVuzWDuDetIkGlfy| zqTFJD-O6b`2al_svr)wdfsp@wGE+hRDCRP+_RZUlWdoV>xAmc%5omv)~x7DRyQICN*}qUJypTV;O$HP!49sp)xO54v4K$>!H4D2vl?Qdmp& zb`9W_aaO8BS3W)L$5_P2SYYF%-B>S4n%N#8+ue525K_^GsgyYNYq}-Z|K4a-4zboA zxR;O<+_-NMs0gNl8Bek>cWEk)=dE`{E{n`Or)7hJMUuN~y6)9bSobd}fGMF%Mh0iu z25V=`sYO0DANGhcT3*r4@KZEnT&FoU&R50zx?2*W@Q0%bzCPF5C;8D9W6r#ztpOaX z{~!Y-5kI=Euw3ctcdF|<6e~sX@w&Y$9|_uTWwu*}EiPER@@sPxL6lTQq+6&FvQ~sr zQixAR(-|tXIgO!hY<)4#=0Pj=gt;ni%37|~$Ts{EvpL{`TCLh}WT$sgox`Y1IImDH zCp2a91<8Yuo~FlbkG;UOW9BjK1ef7&Ca9J57vqMNqmnNjyCUe#&L(v^wy8}C_GNhB zA`)b^-K15Na`?5ugNAIajO-^!HKb9*cTL(GnTc0ER{83cev|If

uRbp}GO&FmUZ znhd#WU&{{#A*1WyjTp@np4T775A3wEVR8}7u&vA#*+(Rg*TS!ca@)qOabJ~3-PkmU zdR|-j%?B;()4kqYSk+0~I`Q#z=h=uf#6pB=WT)Jidm*&Img#vi8caizx7UGIAMd+C zr}v?MZ?9})KHh=rB4yxsT*W>|XG0_B?DwjVgC4Hq|Qj<@p_zRf$nVF$%V*x{(-SxL^;|VPxnG!a5tP+myPIvdldFRxAlF=Qw={tNn*Qm<83ZrOzjLy9~lJ5(-MB<&h$y z^TweAiG~`2mZlSk9+JtS>NDTnP%g5vGx{5#9BCz8Z$Q|ap37@J8!Bz{?JV~?E=~NR z4`~m*n4wGjF;rT{ocK>6aj-nJbH|i0#7?DeVz`emHPdYi)Xjw%-VXH=ef20gNAai}c|F=k;g5G!#;OBfqYnPCzO z)Y!QMF7~5(Wbzx0$hsdn4U&}_4bmT4@QuQYCJbY%2CnxvhGUapbhATKC1lPEQA~Q} zaiteqYdnT|iB^2=E(%Mm5fwf8?ZIe#BA{2))rJ@d@OvjAIgzvU> zc9z(-$^4|udt$bE@f`n)O}R^HdF)Ac3ZHAti{`E@dF zC)5&VGzy$17vN{aTefj1%OzLBzDDwghJJ`s@u_Z32ivn;3Guh8xeaC(=!sGqM~l~PlR00 z-wwR!r{lph@W126yVT%d%6e4W_-il=6icE8o$Jf93!1qFTlhUcMm6(&2;65L7fQ`d zXymtxM}GfUyYU{#<2j{n&YSsWlLWO-a@3#ZZKt=a6}`a8QO^AZ!Pz%UZe2Vuq^HOm z4VK+}<5fsH`6!#L)}v)4mX&`>Xsg?OBXWuhb@z>UC(DERgZ^mU+_7d79{cf+f&j;A zlOK>sC(8h>(l;o3<1((FqzC=1LuI;D2{@B)cIw z^WDMFw;$h_mNZ|9s^n=<wv&dsTB)KXT(qdwpn#xlT}r1bm5hqW;0wG<4nr>$SxyYP*b@TSE7q_IGNnJo?*6 z=Mqzl`tMAonfV2|x5UbYseXNsoePGhP@3$&dZjRl4}=n_@U+V#{o&#U#N~Yy(<`OJ zXV(r|(`*xFpG*ob*Kr}%jw;L-fPDY zucY}B>_i2!4r5=cMPG2>CkDd{cWF6xmV2BUoEa%Tqi9i zxnm8rd$;k!rW+vL;4EwXSP#Q#_^I2t*+!saeN15MOzgTviap3<+Rnx?6S*m z-M#O}N|P4l1CGVTOkt1(udxiD78mQQg`W5TG(i^on% z9}}ht^6IsUmiin=?c#xOzzj#<-{xGOH)L|v(yxiKeb=x^ZHgurH$=UNY8ag>KjFVmkGq*0-H>-+ZNii?5ozUs^4N|loOni)TSz*b`)W@3CtY^(@i7|H`)X*`51HvDm^JC&E=wM=hh95Yi08D(8N7>EGrLZe zr9ccV*J|~ryIp17+g8rCl%UaACNKZ`sZ`l~oqE4T4ZX<#d9dR_+C@ksd(cH^pg$)w zvF|l)V4l%RFYV@jv-^&j)V?((thts!U<>OICwPU) z+(TwD*@Gd8VKx0v2XH$d)F4hCWTJez{|4%~?zm=?NPm_=+t$l-|A1p$B(+dqYf&V6 zgz_Uvbhr%p;IIiTB-Z4P>f)3n{WDHH6R^v4ef8u{iTj;MyePi!8=oJHrI6}`+m#x7 zx6Gv~gYO7p(Wc`S5owoK22)oHyi;{gRGW4_Z9r2tv`Pke+uxraw4ccu(y98 ziN|?nvCgk+rOs-Uw5x3KlaR|((A>d&T%rMR`x>VXPD+t&7R;6Csg1@6IC3=x#Mi3o z^==r{D%{g071DJYv=Z>>#LW#SFAo?eKPCo3{i#nxr-%98dRl?an3kp6d~x+g;MTGL zS{AzbuzPu6Z>-Tcsop)Ss=QNy@+C4oEe5`p#``Pt^8-7lw)cvP=tv^%q|j z;1M5l23pL#gVeHUPKEO$zH)i5moEK;H^1%+v*$MV)=7s-Uco$X_AkEjnV2LpAbb=J z>b)Zv`|0)7Hk?m*y%f>9N-Is;h-6rEF^A>N#$iyg;Ff+P?`Y&6#HB-@cK&1jy=BF52#{mm3neZC%sMi_w${%b9Q4zaMfpZC+bB z6)5dokZ|Pf{3!X}8Y)?5d+_6(Lvw6tfppS+ad>L{n-ISpBK?5BAvYPu=Ip~$AKJIS zFfZIU2BP*Wk@d8HRz3vzUahY0xvfT{U|(4B<+o&B@-=uQ+>~KBpN{7ZaY7buzshko z-}+3Q+h5rk=Hk)5K=N@}27%hs|gRy+Bb=$k%X=W@ti z#tTTgw$w*@mWjxsT8Sd3P1;c}E%NghjO*7Cb2m9>27ZQp+n?6sy5LmZ;I>dmh?vwZ z@m|?umC(c*4<#1$-!W+4Nrg<(j;^#iQ9nfO?v0L};0w%PZP2p@dDh=!Gqu6~mN3+j zlBpwkOa6mopYbHI4D~HTUc^|$QXl%Y!L;NL+^Oa8H2DPr9gfeqIhGH*T*nuUrIMe| zS|E0sG`}<5*=6Cpcv-eYGX63`#95%F*ew%9(=+gH`8->FA1q#H3dekT*5C9pJ1k$d zQs48wSjv4@FaXonOL;ym>#lo{AjbJM(s|LU#HFmq9){CtS)CEqe46Zh%X_e+V$2R>91kpM0sDB@@r=TJ_cS#PQ0go2tY7@H3LW;tNGa#)6$7 zJ{9EIiXkyM*_D)dN{u>vwMx&_+bY*l*c+awQny2Ey%|b%`%@%u@LC8AoOxH)&UvQB ziq32MaTA$EkY7<=mXs)tH$7`&zW36$wm0W;;w=24*JBemM0GRBhDu){33}fn&&p;jtm3+Pk&cAsQ1q-1l2d z%ss`Ho|5+eG^*o+ZJ;3j+mcU;0t2WXDdb0$%N!o$y!Nv+G-|4)-03v-w`3eIKUC(A zZC681(j|k+ltsVzP@)*LLx8^WixJ96-ZXb%!({lM)_&4vn7E6?G|u%9zR#GYgwyl^ z_QvP#=Udbnh?ySbM)qOUGtqm?9A)|hvc&I^eyDA-2~>X!tgkNl110+s_Jp2xgq3*r z+22zV_%iDwnw1&{K_(>?i@ZfoB1CICmXb=eB6#Bni<+m0l7aW$vV=O<;-%4BMR(Fi zjrx#`f8c-gYIQan>EQ-5c-+KZ710ACHjWoLE2X}Slm7xy59 z!$Fl>b?=klpVamudX`bQK*ztd3c)9yA7sd<>< zD4B8yi-2S!x_XR?k{JBlO=I9zf9IHz@}C~8kw=8yn@?~ze>Wd>gupKq(g*qQ z0rO#Do+u+tjr-`s_LF|e03@7cgDT?_>|TtQ)REoaGkHJA(Lj>pm1C8OP^ z;TK;{9C3sUV}BdegM)hGJOs86J!Z09OHxN$;r3w+$Q>d^~v=A zvH*s=dcJ>mtuh*bOJrKR&#m(5&Nw&h|0w9dE*enw3OP*Ojgr?M#X4fx-=KvzEY!+J zi^@XgOh`i)bKJs5Q20+m4@Ddt4jiE(FXrD1g^l6;A{y~w4l*jaPG?6okJV<-BU7q{ zt!%BEFPj;`2QWZEPWw@W9QMU)D$F}W21E%8YR3>{DJt0v{51qzJX-jReOx1@Bk{hJ zqZrB{F^?>SLf3d-KM|_J8sU-lYFcoV)}Rw(8BS#U-|F#uL{(h0Ncg3bN|WLKk!-Ow z;5L2)PT;S7-4Ff0pp9~L=QTkegR(3z?on)TzH)l_Pzx7E-D3a^j5WA~Df`1ijJ`~+e3@g|;Zi!a zD)Bl?wH46KKn;tGeHcflCa9~`=YC#uOdJ;bB>>q~GT~LAr9QDCb{L08SL{_66peCo z#WB-~^EnX@x|gUH7nYm6nknd4WHK_7xOjE-Ps}1 zLijv#N&vxMlokT~H;)Lp9HkQl@+v^_?v@*ik)$f>-j^_jn3(McE|^uJ1_7$d&H$wE zXf9_)XxQn1DmDkO95Nhl*R)mjqi~<6Ah87cyaJ^$q*_=AYM?z{wr}Vkt_8m#f+?lI z76ih;8;eb=EKQ3C;T55!mlMwlC**aXvI@IokIpi)cBZhf$D!obH3e<+*5 z5aruz?AYC@>K)h5Ff^3#Mk^n*v{>@6=itx2uD@lL9a8>tp-?~tlRp%s_-_v3>mfpW zif_I$0U8~oI}9%o+|e(C=c_?o-c+SQ5Xy@;;^)PxaR7POL&EL+)Bbo>LKgJTEB5?kf!a!Ptz;E(C)K~j6u_PQXgr`!OEvudXIfvaU!fflQI&INO2DwP6 z>Do6>^f*7GZiMabp0prlXh0}ZjU&fGLk^1!gkB37$9WnBoHcwtN`Q?w60$k|#ZZzW zA(~#$zQSs--#C9ILVgeWkw%gEl*=^L`V$#R7Rq(_h5^fH6>x0E%5?i7n2+OgN#D@bAK%ZjA-Qsj+6zt7y`IzIeI-Z5;iH&qNA?4Q>MW;#REpgihn>6~Zp-pF!>kqVO{)BDXL4@6ofDU4UKFO1wU1IUP*~ zU|mRdhvMXdMptI**|&2`NH~zqb{AENIEy5AfCRg~1!clz@i<*2{L}R|jG&J&hJC?Z zfus#IfcajN%IPb%x8A>k71K~&49sIZhJXCE9D6W5|GIiob@TVZ7!i9S`SceKs2SF1 zGN*B3dPRA(@e9pOGLfN_>l3N#D+oHwFs)R0b$QVtXP|tQ%>u6n(al%X4qkoz!}t1h zw3GRN1g=KF@}itWBO?L1k;nAN3k1?dEgE#d@^i&-KiFV85$`!-}dYaf1f4 zP!$qYr5e_CO)W)5_s>TF=uSHK$V#;iiXi$+ZVhIGmQ?$h0k^TA#)27X|39r(V|A z?HjSm)^Z5j3T@5g`*YD+9)C<*EaHyJ2OWX*oq4wCr;mN{v?~3-a;1d6hKYbLX_9B* zz`bIStT_`hIuU&0$&DN6f2{WlP3XCgzQR{qy}38%Wb3L*IguD?<=B~*fa14#6HT-W z%67rE6oH358bJzc4f*?Xk6;!|pN^*6;{hXp%aK9qM|k7X!R}L^02<_x`=LBnFC#ry zzJEFfgDWh3sx~9C2a5G!wSuG;5gmS{U1D=;RISF-Y0W!ArWD=HUY$ zND?3J>8rIZ+Nv3=P0l?p*iPJdGxu6bol0xl4l>l+E?hEdab+g3hS? zR;{sMqb{Kup zMZFHzk~U}od*FSh_KXw_Hss{6opb|s?T{SqqC?2<7LP@RvgO&1=O08JzYHmVCFXK| zF#82Q!itwL=)Xw3zq=`}*C(@PBQF|GY>QH~9)B;XSVY-PJ5PFrj29bAb3`z6Dfo0NXZ$qqH%ip+X)mx_VNY*hVtvd23!t>o83I!I0;W3tB zYGOPsD|I|tz4@0q1L!~d?!U2`j?<@v&OmO8qJdW4N5F;^?2OX! zEgcx}&2bBUR`M+dzVoR9kx(yb4KjZw-0;>d%JoCDVxh=g`RIz1u)6v{MmT&ZuWZDO}^B~q z;Xdp)fRY%zT|;`-({R#6eu?5FGr5GHRM9$OCeWbnn!O9>oOfUZ|5)})RBa7x!IXf$zCn* zanTxO6Pk~_-H)U_lKHY)2&YmDiY!lcbGIvBpN`so&+#C-%_q`TXomMm#E;x zP|igD80Hg*YJ+sDtwn5OP^}SEhO99bP~SXND;R*JR{h+lo&jYY*q?GGbGuYoPF9dL z{XX)I!the4s5X9)rKE@>+f z%Tv8N#$cSJ{T{KhT0*9Z;Szx)ZovJpo3W~=IS9Dd-o)yZL8m^Ti9?jW5c8-4#Iixp*ds=`V^C!tMIYXyP^CPB zQc+I%;?Z?izK!1IB6TBrmdoS%V@PBqUNGy4+h)LWz0zp-;<@pOlVRWB??anhH$GcB77{30l zt6927;_jNux0ub;=u`{}f?Z0tnD6? z6e7&O>0Sd5>Dc^CPxZ{II>6b8hkN!y6+m!_#1jJKexB+6F=KV%(%fPG8ejfCJ1~p* z#+#_}0KeUo5m`8LKT`wtJCq;>m^3VEiy5~Z2;?&BfZJuk^U!&Z3oQ@GH(dShHz+me z%f9tpQ?>gHWCcQVKR4py;!uoJ&Mt!EQ0KS(G220;V1TSHW12+<;yfa!jC>sG1B5sI5?}EvMPP%UHnsNVut_~8qdVj3phq< zzPW%`q%4jPSGrUY`oYSBO(RPjv7u$*4Tr;JR*&5k61HN<$yevMX0biY88Ugdo|G3^ zifgWVShV=7DZ~sV9=g+@dwrQou>yT2)uxrE?ZHzmPU!>Xs*ty~*WKAR8v^cX%U}l2 zOc7j05ct9rr|2X;H)$Szhp#Pu+&3nEcRg-vwn73}q zK2hqDYTE8PX1|AbdW>fmi>TU}DwueAQ5asgJN$sj?o?!wO3FXXmUF^DJPQcZAvU}* zr@apR4(Wk$p*olC7IjjsCzd)tP#Ot{+!Ok8x0VW0Z2mzq!F#{qss$dva^*Y6Bzs)F}@hP(naM%v#s+Z?eJb! zf#q-ULT#)d>@vEqJb~~7aD_JlB95;+h3!P3iP^=IEsNP0Zv~i|uioF=sd*lRZ7^Qs zb$V&*a`WcYCI9+|<72$sZU%i?^7h^AIPv`A$~z9+@UAE3y-{S^_D?PA z;r0&or=N4UEeB)2+IBl0L@!;ihWkRDZ)c?r?RC z+)fEMK4Fc}1kq}&nKUu__GB(i`9;G^V{1ehD#6g*z(R*;d^A}Y6xw&$ zo;;U2)yAI|SB4j91Q?J|{A@?1q5OYB=;Ai}(BpkSyGYkLOEx zn7o_yLb~f!TIs=2o{qN{lLnE6P=wo^PGSh@xT>xxD_{Z{Nl8YDC0MliBnrJm@#Jjj zI(-|R@dj6GbRN^=os$a5x05deo8VMEu973jLD&ctiB-*~mykV_&J{$DartwcM6>T~ zuld(kXR>#=&$buUeLTy1m=A|OrdBDHHPm?vcaO*NShO9h4tX*cnG5ZV_3>S(-gL|h z+>09Tf_PtPXEDi0qsCN^NS`S}0<(DvEE4A!K5MN?7>=Wd8oN&HuAuv)nEPcN$8Y!x za9&%ir*FDd0yFOjhXUAmht9eaV5_dCPw8<^t`Ra2q`Ej{=J#ssN)-QsjAFn4@<7rC zeIAYskr)9n?9(TQ6LBq?J|0Zl2!5(=IM@*TNPl`D00`rQApyaeG`M=A^8B&0Qq}eJ zN21w-1lQb87XGfPt0<#h_X6aXz54csEyojZSrF|D?q5KfaXzEjx7 z=H|M=N=Qv4niCV`5X?g7G-PMlMS401Ld&TzXP!Fh)ofCjJ}{C|tAr5R+6gHOr15pN9vd+d|alz8NNT$;;K6-=!K8=9tSr6x!q#Pq#*W z1J2R!;l6noU#4;Pc|G2ue_rIFPr4*m?UtYZPC%Y zy^GuF|E{j}brF*c9YVlIVw`?bS8a4JOsR~78x~_M+d?_e^RZ`^Z63G<$h5@c!NEF~ zUCT42e<-m09{&7!ktlv#1_v@Rf~+7c`_#9t-KQ5QI0V$1w6X+Z#o&R5f=o~U;OIOLFZLjNA;Iy`dx{Dx7~|Ld%m%~ z!>j!gNTq*X2x#Df9NMasrU=3k<4AuZkdc~;C$)EFkNw4Joe&dm(HFpyPsyQIsDl== zf6|(X+u!<03a0Noe)*n!56+9;g6b?`rElgVMy*J%>I!wB_>UP9rJf$Ny#)W{=k}7b z*8S^Hpb3&d&P3bj#x^}^p{i~OyGDZs_M|o$J*{eZoq2nAIdIP9P2|)fcc{QCYg*U=tJ!Q{G zYFZ=qgG;#bNAT7^lb74hpr_T3!e3{BmOk|Ocbs~twW726&2H1tT z27X1)ttu96h)@YF3=Gig`Uu%q%+#1E)OVA-N6KJy76e{W$GCn*{wME}m;^kSR^r$c z-X`sMZthHyZ_Q~w?b=s+O|8rC0R)jZHcm4ntmbhum3O)T5I9aVc=)^8M0RJg)z2*K z%;^P)6YuO)Be5_sOU^rnQ+p7HzhkqgevJ|Rw>1)d3MdAI?!4e8QS3J8i8y=PUF4^Q zWTsFkL11(jbZ0mu^}|p!a+~=s1j1Y|Yp=#KQj4$u<>5?JaGE3Ajzt9_BRo;ZbLwj- z2Q&j>-kb5jEx*dZkhz`r4R%Irlwl#D6(Jr%Gvap$v0ATb?Tr^03O55>$k;-K{6-g{ zT`S!H7Z&MKiC8KCu~(^n!D{vdCVorAmNv?>Lhl<~ZF_EOU^nKEZpJeg>A6|pvv8Jv z*fL1rrA3vd;cuVZ@Cseav2O7acynl2R4x*aR$~v{S6lt_jFUngz$8^Kr@xacPZBf& zYf#_%(ewIs0jsEZqm(Bw{C0L#XQIRzpFofqeD&QEtdp5R+$|-pYvkf=!~{D15kRLu zHXfi*wasO`R!)eFj8wVnM9~6v2D$w0x~y)&PtsTpF3w>|%dSti479%c1_K9>3d!xB zvHsqHpJ$8gc^9Pw;pvY;DiDy&(>wSdLZ~z3p1K5+`v+|`<6;YX9JD>OD)GuYk{(xV zzHEMn4pCjijD12uNr@;-0h5Ev1jMJ)x6y(*?s(^Z|BS)ni%3we%4HP@QyDahK%Axl zOM6FxuD}vK+)XlGyI~?bk~pX9(uT2 zcxd?XCEytSDY__vZ0GAP?}p$UY60YOLjM>ubQ;w?;L7Dc6+W3h!cp}M5aw5tO)@Y_?7Z%8U2crlK0k^nvw7&}2B4?<>ObTI6KD z#qIa_>UWR-Zf+AqQ!Sqdc9Zc}%9c64^vZHPcF&p6pBa)`?BJsPlj{CEZ-Zbv3a7FY z#tojB1GB>HzZv512OkXGAxYh4)&FJ1)lLSDO^fPd%<}*EdX_3;IG)sh#v+d=>Ivdo zVIVq@{j?1H_b&bW)1*vbD?Ug1|I3LBD(G?Wgh~1F_mBU3hY=FCE>f=Uv2DlcY3CQY@W;Mh*&NVw&cGEBhtv{1nYlXAAEiVEL~Q? z+BzC+b^y4`G@qZ9HSNcMhqh=3-58rz`~4C0zjuQ7C$OW>#4FIif`jbgvWIUKD_R zBl|(Vv1Da%m!H(^v+pquM6uVF0Ona2)GnY)ZO49=Ly+a??s#eUgLT5tG#6yT{2y1OhH{uO^tT|(*9hnMIulzHC0|d8Vl5xJCTO+KHS=$CG)t| z@e&==3;zP_lpLTQ0HLkrDY_zX_KB#mni=;D=^-PR1GacB!0&FY1D+(#=P7(6;sQEr zpZPzJIw>8@rW)ew^R6yo)}%*1s>IKn?QYQDQY@Oq#H$mbY<6f3;+qP|%))TrmZ$*9?9?ZAX=$PmQ} z&3MHQl)Ie&g~nYHV+ei$ZG#_|YlZ++CjmW~~eWmF?w^Rrh7`c?F(a z`x9raID`MW{@AYJ?+EM(2Fm4tcdF&=&t?~Uf#o)~VBTBDOt*cGWG<(&WAakfO2O|D zM6~D{$hdSrMSj--lkbLpx6Si4B5tN{*q{X~Dr!-8$1?Pv!$<>T=Q8>Z0D!9b!uE|J zAPNn7QK-*bAWK$ODN%Yw$3Qui%LKJa)h49>_W1u2?)%@kR+PWlW+Pp6yq!B7mE}_t z0ro~Fdoxbx=0@!P0%S1#v1B8J6SY(DJJ-z~VHHFQtCQtH%lLQxcwM{YZI0T`OR&WV za20<61dG}A z)YQjTxOW~{%C)feo%}Ph+bUt{;wSk<+JH0qh}C}1`LTe%f|LS9;=*{}*Ke-xr=rjD z$gi4>RQXmm~O?UeZ;RI?E~MWZ*Ug7W86|KlJ zK8Y|^vmE-nq~+(;QCG}Nmp2O!W%u9PiFW1S;Vuuq>P2}iwnOPCi95dS+{vMq#s_Pc zMWoGb?xZFN3;X3i#h+-l-;J{XX#N(u&Y#lwoYa*g57yon zY~CmQW#@BK!TY0G*cUIz+U<*SX~B?DC8jHUH?f7Jj+2H{;v zyE|~yG>?HlaoP5qnn7K~_eJ6m%#|vhP`(n zl3W_uW^F0`bt!NQ9Vfd+7dOSALO5iX@K$OFB0@lL>%~*;$o9Rrcp3|$XXHzLmgf5cw<(lu|DzZ%7%CBRj1PImh+={~_#4NYCRThV< zpBbEgkOp%8`gQ#?j_OhW zNdQ1MkNE3$6wUW)wbSXXdyf>Bk$rBpJ=(cr!}Aa_h&S-%#=6PZ&*mz*iKM)_DBA4o zIVT&rKi=K|@X6o#epRe1R~T@)A_$Nk0y7qDxTg|HS9QdFr+H-Iq8cDdoZ(l?WUp79 zA{u5Nd+N0shE1pX)BGA3GQO`4@1TVmt&aGS^!gjbZy}lI%@7LFNg|%U61G24HHm60 zg2cVCJfG^zNR!`1v@Qylt;Ult8udbD>Bh{Ea5Q^L+!0iy-n~;IX`*IPB^*g$L%>0x z@ZY!^h#k3rF+fJc=qh959tP`s?I<1ph3tDPes>|dlm^Lb;{WQIF~Rc1|B8kbB^P9j z%@6&5eg@Q7AIEP-KAk9 zGror$*%9eUqx((|3cU6Y^86Noh|1(00L%(x@l-wwjr&;ipu~0-Ew!SMNgtxV)yoU+ zbbVz^-a5sLd4G1%<#H2lGHu~+acKQkuebcQ(A=Z(Q#l8t>O=QnPy>*(<{C|1)tE%S zQs02_Czo6Lkohz3sBia32^P5?ka5-vqYi{VrjSJ>g`l^INOOMX@vvz*OGxAM(msRD zs23x4p;*1sfBMg;5`_cSf@+^}SKXMnu#PEpT^$QZ*_Y}Yg*4_)@OIfXo1>`0;2fgL zbz0|6@^v&Cq8ydyaB+6r-iuW47Zj=3Nyywc^fY6~d5jkO7&=+Gjx!LZvS=^S^Xgye z;Wwu$5WGAPAb|619h4aMpJH@l2y2O4m(~^k$@Wb9iB;=(@`#adr3h~TTaArt-KmSc z#kxw;0)f$>g_34Y~Kob*|3k(!>QUmx@Uf8 z|E%};nGl#vDDknY)p{NsDKFb^pZQi##ZAn3W$-HYPgu~XmW;$wGNeJM70z87bA~#& z{I){N%U!A!ZAO*q1~W-bBJ-W;BawmWLA?52ro7SO)Kq4L<{tb>CZ$vYf}pH633tkU%NEg36ej8yd2Sm~Sc)9>ZjP1V2 z^UWVw{1tn5VJn`72g!N$H;fAld-eSu0>^UQ$dO&^Ic)n1hNK8E& zi=k|0wT;v6q+WW}1&JVB$VC#=ptY(^?)z*}bE{NfsWPjMCYFdyC8lNaweGt9)A_~z z*}X5+#E)hog}2Ku{!&W3VaR>j<5jAnf2L}V!z&c%dWTai+p2Gu!6c71CC8P0sE#Yq zlwwHZpzlMJx^or(n0awmBX@44NiJSjj(ve=#(rs`#!^^MC_A4QRzQz=O~ChB*nIxJ zfRoqLw4P#+a9^1!3Eo+w#2)|fJE2W36aIH~B2{?!Fgo)SyrP@|j>=kD z#_%+3|0)+2l>X>6R6Sj0;yar)yQ9H5jX`H2LZ3aJ8Q;)9rV?=!$uTuSu0!e^r&< z$HYONi2)MM;EOZhvfpx{U0cM{-+nUI;^rh(vZz_{_Dh-V z3tXY!&@i91T90j{eN1Jv(f@}`%!H`v(ruZs<|&StBnZoaBlF`0&M(Gz?W_NpbEp;2 zJUYBOmf9+Mmg8DnhB;B%rf4-Xg$e0PRq6fWb7W^ZoDk*s0~@chvNFL|ARCmXUrg4~ z=vOQ^4Mn!6n%aBs9T^A7rhMk+w4|riYHy^=OGc(k+2GkTNXK)|JVeYv3keBPb~rS= zLgr&BH=(jbi5D7MWcQ|VtvrulbB>nw!KRC7ytlRbaiF(u=CU1mp0kv~#~7Y}%LLS?Z<4uTFvH&Ht87Y$^S`b;EW3AAYRBfEhfz`2_o!>af zMC}aS{KMR+Oal6|_m+2y8d&=p0i)h+0|ttaW9MqrNPo7$h_m{(yLTk6bL6{{ZGB>7 zHi)|!koJWq+%Kjp#?f1gslN?suZL^pW%=V-4ko=m2A*@pG$DJj@SE3z8=>};l5sy1 zXU84;P{oQnnf8*P(KSyG=H`iR@22w&8dox2U2=~G`5GI}({6Mq`tRKyopXF*W z{tg74*~Wa8Iw!6z&UXDs^-0~V2jFlUmY;?tz4{Z7TG!MBQPGTht7gEOX0Ge0;36Cv z@$S0M&kKHZ-Sfwob4y`cyCgV6O{k6CX5JEqQLQQmg@l*;+Yo^82bxy}! z)Aag>@e<@eth&d34<;<#p->^Tc5Fy5ZM%jpPy5FLK$O0kXHwdZ$ zIq4Y^ndKpBM{&-xi3-$^IrQZ~2ad+3V~&~*1wS9|ZNVAwUt$a}%{1%bnu~t!&+V5a zw1Ud%VKBER8iYZ>8VkG*7a+5!AqR(fn-+~3U;#O3MMZl`;3CQ?jiSE`iY^pA0(PHiWYw?l*3+E6D2#2y^awg9gcO3V~?Ul zMl?`H*?S-B;Ac}=9dhiSICe(%^*PP!cjNls`ro@Qw>;c951jM;e&6rc_&iJOqCMZ? za6acaQw!v+X@i`CJEZS|)EdfxfpDnv!JWKgLF52F6$_A1m2u7Wrzv_3^phfGly54V zfOrVj_c&T`j6cdXiKq7Uu`Yo?}H%mIf2;t?4jp7nap-w*FVDj9%sq*Vb z%s6)U&=85B8M3{4$f2*5vsLf)gKKL50!!R!Od)?%p!zgs8x_x zr4}`n<$Ij+)4x?nV{V;6Uzk$_ z01~x=XlLJLZEA7o7AmLdHn}qOO${1F23V5MUVOpwxs0vmQ4yTaa)x|%aRSfhY^4Y` z{wDhMhUy?6PTH&3nx01|=qOpR{L2sExkkFBRMCO!I%@g1Y~~t+DQPVzAfK0dV*bwT zMxfTf?gB-v3U;knjt22L1FOIk7DF6CqPjUsHuwcsTM82rEAhR z+#bhL4BAF|l1G#NVUL`N2RmZZ)syaz{_$g!6fi12@9l0a{KMGbwxrYg-;4h@|63Gl zA1tt6xj`=TQUToLrwdy+W6s`;Y>mCB!z(E%8Rlo@EC0tK`)M&1!MFdoCMxlZGwK0H z?MA9f0fXuKh%0y^BuXKZdgv{!tfsrKEWz&xn_({irE%Txs%DbhN8p|`uua=`Ch&kt z(5mN?y)%m#!0DI*Tt3}r08vLu5SAaG6#M0WkkL)qgDsE8cNY-ZqWpFzxlo{T;5i+q zKeasG$O8`c1sOuPQQ5sr1(OP=hJ1qn4~jtTVN!Sw^ftCd8_NJI@?)XZP2w)FM(=dQ z3k^8ixN}F9XhF9?Wy!kYNMHsao6qe6E0{9*B?7PsV`SatT1hFsCNqt18oLZXw@n4| z*F0d3J(;JM%XHqhCrLNx1F&FJ5BB$5)GIV=cSF)p#gW=?w1qC3tx&g_S0ubt!;4(J zBGndorLdC~z`YHzc)frjK3H^BCWmi##NhkhV`5{LS$bby_)E)^w(G^+&n(>ow+KpuMO z&zrSOvQLl2aVM#&sgc}{_7rGPECTwaCp*9P`6QLG8+2JQVO!jGX>Z0z&U$`8Jc_!QPTzx^}wCoFfX9 z`~vZciv4cH;-y{y`tAaOIerG`B`HYOa6W+}qhS zULVLe9~~-R0{CKrugdbh)tR2;aqQLGRjzky1W2RF2E_=}065~syYz7i#K%*sHs}$c zbMf-<@I0;VsQvlOA)^aUBzdxo2oOGqM?l3TVYqqZ8pmAtsLh|MK641+u0{dqh@DqxM&JljmI(VZXw4#SQ8^W-d9d~w7g#Td*3;1lsOA4YdS;pGjWea6HMDsnSFoz&^6E1G-K&efZ%)!>()Ymw6?h8&?WE(Jb~w% zjkK%dv7W_o;27o~UP2ZUaNq(>PJIF`=eXAQC#hi=i2wq|u zZ=1}Dj_IWIePt{GloxzCGFf=B>`2MMeJL6zr?oG)Nf2s^iXf%m&p(s^$h@5PUHzWu z_oP2@p|LR#`_RdF|4sT-k_F^k#)@gY7Bu;fxW6A$on5*(aT0ERcBbc5eE+1kCky|! zw^>yJACYe}Iwq5*FZ17|eZb8uoU;ejF5pt$uNbFxPp5 zb_%f&`u6b(Xb?4dAqw~}c%*L2#{M^AU5kpiGjm`T7>79Pf;_JnSgZztJP9psU9`j4 z)wRSh_-CsU+|FcPlF18LpzCklx)mR$L=xkf&KwhV=v&ae$gE4S`s$hAwzOLDD3*e* zgLk3ixJZ-(!tnLD++hi&`HUY&PNhup&|-^8zZmlnieD)5|(=d6hB(If1(&EiXR z)?7FyohjT&P{JJ_0DI%GER8b-vQ<^2CXJFR81_SqSBv)7;zLpwTAo~$DBy!AMqdjG zqoHu)O?eU6Y|_yz97{seiqw94W880d%tDVz%81uuI9p!?+};W7$oNqfGp2=%?6CALMJW9H07b(L zQYXFFoBK*-d2@b5>R}N$3)V?-FV0S1mJ8aj`%6+JIo&R)Y~w5Ig$p5h)w^W}P9OP* z*t%D-qxm_u+F7WYl<+u^{WG>~?v>?RzGT<<2!7)8^%8}&`sK?9L5x@5E5YsJIOS63 z!Qh4gPtJb0eJD>$1Ya~m`uTfBpVw{gw+o5T0ljy%?lq+3;+PY!>)Sg?34ettCj4TNhr zOmRnbuE43SXxmk*%*Koomr_CtokkZULs>AuJkL_^9rm~O>Sc;d6FPUUc_}ay)Y0^M zq~24VbvuY+QnSNcDG*36w?Tc? zS+z;(6ckstpCrBH_?Lu@Vw(;~&E!erzlG-O{Ugb5 z8RJ&hjp*4N)~p{w-nO^J)+7mMx-54iswRjI1i}7c*EpfC=UefU7tJ~*VM%hH>3TbF z;p}Mljq`%~I}N)uy@$@0(AqHEbLe|#AzAD+4s+nT!X#b(85JQL#$E>KN7**%AGi3y z2PvGp{6#Ng$=Hgk(5IOw5UJjilvm!y66}%8k(iLvvUtA{DYccHF8x(GJ`MjFnjVUA zp9I5B6H54C!|$+ceZQuE68KzPf55^Ev0{2iMO{d^AwA#Jz-xFZt@!k{is9TXi15)> z;^)mfjjlb#mlH$EJColrKz?M#sU$7Ux(m#EO1)|}aPubX=?K|L#4xzj?7Heh5=*1r zg2>86fx$yKD{yh$?|L41RA&a)sU^^RW!&BB1Vd+Ht?SosLY_Nc#6Q`u+)0AW)F%2B zdE|RoJi7W^VkS|dB&7J@-mjA=l)1lA=3$>6@g2$ezpyK}O z(k?;?RXY7$!h2qDoyhY6L)h$v;me&La^q{YKbxnGjOE|yN+Y1;JYNXEDO3J}+-+jY z^xR3E`}Xbi$Wu|53gbis=UA>cfEmN%mgN#6gwEKu-ZvUUh>{3er3?&ghJ7}C<3yM7 zp&@<$n;P^mJg`BJJY9+IIChsn>+;;j&%%zDrm;9c2`8&WK!5#pBZ*Sj0FILN6Z80` z%OUhOrd~@mn?)x21xU63H}N**goHXtyw!Kk^LQyewFt@;iOsnn-zqnk{t3o}P6IE-mR0d(3qFZAS<^dcvZr3Ln_*$;7DIaiMm!DE1TAYe~PBcV{;&v zI&-0`VQlmUA#)V1nn%eAHh*`BhXrdKzcM^B;F?o1gH*JPAp%PW*BT-T; z$bnbaNcS?c7Eh>Aj06FyXSV#;X=62%Z??u(@?-HC7cDyti`{_Ujs2?=(RkoDZcxuh7XE}R^!%qn`+irN>M^_j9NJ!?+LCa7oQi2UtnJ=7#6dWW-0G4v za;*f0$=pa3(qmz0dLYycPgqDJl10edTTtBo3WQI{zhvHM7 z<6kD4BeGO!`@I3BPQo=GaJIj%YuH!OZUH~f049wrfn+Xvr@bC!XhF>y62z=>4rN?7 zZSC&@v-}PF!MugP6!MLh+&@Zx0`p@DNui{Gk|7g*DwF|a0Tw8-P8D~sIvN79UEB*4 z$cC_XvvG0dw_8~H1D2<86h)w6DHD!*DXOwvJ`V#Zs#!j1yA-t`J@*tSv6;d=NRq1> zc=yq`d;nRa%z|z}N)zna@0BayrRsMjG50?yiB72n?DU~l!H;MAd!6TlbB#*dbJ>G$ z=)Ti}cDh+=Atst07J+k5%8QuJpBQVYT6I->9T-&s4h)dlH~lO-Gff6@eJ*pORUNn) zk%*(3Q-yY~QH!5t0Q1d6(U(TV{PEl0!)=iQ5dR?W$t%-;$bTRl9S`<|+wUmJMV0S} zH7|ixoft(IIaw-FL zn>$O6J$T@^0BAMOW_Pf#Nkcs0cRrzg_^MEG9v7zN(rCiG|1W8kH>wzB9s4&b#8|z7D$Mu&RazgNp6igkQ%q5k?gZtuecpWiU7kLEX{qK2(d}_FQGS z0@wo6tlOUQ=Tqni>}(5JjCVYjbfDk&Jt|D~L@BMyGCnaZZ0S**^Hc$M7_HyTa@MbG zxIbmyRdOSJ%2iyn4A2g`4bb_3_st*NIyffYG=f-{@h8IyB(4;UW#g)gz~+^`Z%niL zDt}v~L*$n-#~T#@v@~kohHK7}wI3L+QVsl5x%*$r`M)RSyn9VE;p>T|4X7X)_`0Q} Kp_m6l1pW)y^<~xo literal 0 HcmV?d00001 diff --git a/gopls/doc/features/navigation.md b/gopls/doc/features/navigation.md index 9895fcf4d9a..4f8a0f6fad7 100644 --- a/gopls/doc/features/navigation.md +++ b/gopls/doc/features/navigation.md @@ -100,6 +100,28 @@ Interfaces and concrete types are matched using method sets: - When invoked on a **concrete method**, it returns the locations of the matching interface methods. +For example: +- `implementation(io.Reader)` includes subinterfaces such as `io.ReadCloser`, + and concrete implementations such as `*os.File`. It also includes + other declarations equivalent to `io.Reader`. +- `implementation(os.File)` includes only interfaces, such as + `io.Reader` and `io.ReadCloser`. + +The LSP's Implementation feature has a built-in bias towards subtypes, +possibly because in languages such as Java and C++ the relationship +between a type and its supertypes is explicit in the syntax, so the +corresponding "Go to interfaces" operation can be achieved as sequence +of two or more "Go to definition" steps: the first to visit the type +declaration, and the rest to sequentially visit ancestors. +(See https://github.com/microsoft/language-server-protocol/issues/2037.) + +In Go, where there is no syntactic relationship between two types, a +search is required when navigating in either direction between +subtypes and supertypes. The heuristic above works well in many cases, +but it is not possible to ask for the superinterfaces of +`io.ReadCloser`. For more explicit navigation between subtypes and +supertypes, use the [Type Hierarchy](#Type Hierarchy) feature. + Only non-trivial interfaces are considered; no implementations are reported for type `any`. @@ -300,11 +322,9 @@ As with an Implementation query, a type hierarchy query reports function-local types only within the same package as the query type. Also the result does not include alias types, only defined types. - + + + Caveats: diff --git a/gopls/doc/release/v0.19.0.md b/gopls/doc/release/v0.19.0.md index f09bc65307a..d2d570692d4 100644 --- a/gopls/doc/release/v0.19.0.md +++ b/gopls/doc/release/v0.19.0.md @@ -33,6 +33,25 @@ and queries using signatures should be invoked on a `func` or `(` token. Only the local (same-package) algorithm is currently supported. TODO: implement global. +## Go to Implementation + +The "Go to Implementation" operation now reports relationships between +interfaces. Gopls now uses the concreteness of the query type to +determine whether a query is "downwards" (from an interface to the +types that implement it) or "upwards" (from a concrete type to the +interfaces to which it may be assigned). So, for example: + +- `implementation(io.Reader)` subinterfaces such as `io.ReadCloser`, + and concrete implementations such as `*os.File`. + +- `implementation(os.File)` includes only interfaces, such as + `io.Reader` and `io.ReadCloser`. + +To request an "upwards" query starting from an interface, for example +to find the superinterfaces of `io.ReadCloser`, use the Type Hierarchy +feature described below. +(See https://github.com/microsoft/language-server-protocol/issues/2037.) + ## Support for Type Hierarchy @@ -45,8 +64,9 @@ In VS Code, select "Show Type Hierarchy" from the context menu to see a tree widget displaying all the supertypes or subtypes of the selected named type. -TODO: screenshot, but wait till #68641 is fixed. + + ## "Eliminate dot import" code action diff --git a/gopls/internal/cache/methodsets/methodsets.go b/gopls/internal/cache/methodsets/methodsets.go index fd2aedc5ad8..873d2d01289 100644 --- a/gopls/internal/cache/methodsets/methodsets.go +++ b/gopls/internal/cache/methodsets/methodsets.go @@ -143,20 +143,6 @@ const ( func (index *Index) Search(key Key, want TypeRelation, method *types.Func) []Result { var results []Result for _, candidate := range index.pkg.MethodSets { - // The historical behavior enshrined by this - // function rejects cases where both are - // (nontrivial) interface types, but this is - // useful information; see #68641 and CL 619719. - // TODO(adonovan): rescind this policy choice, - // and report I/I relationships, - // by deleting this continue statement. - // (It is also necessary to remove self-matches.) - // - // The same question appears in the local algorithm (implementations). - if candidate.IsInterface && key.mset.IsInterface { - continue - } - // Test the direction of the relation. // The client may request either direction or both // (e.g. when the client is References), diff --git a/gopls/internal/golang/addtest.go b/gopls/internal/golang/addtest.go index e952874e109..3a5b1e03308 100644 --- a/gopls/internal/golang/addtest.go +++ b/gopls/internal/golang/addtest.go @@ -480,8 +480,6 @@ func AddTestForFunc(ctx context.Context, snapshot *cache.Snapshot, loc protocol. }, } - errorType := types.Universe.Lookup("error").Type() - var isContextType = func(t types.Type) bool { named, ok := t.(*types.Named) if !ok { diff --git a/gopls/internal/golang/implementation.go b/gopls/internal/golang/implementation.go index e7850e19b1a..19f1257c76d 100644 --- a/gopls/internal/golang/implementation.go +++ b/gopls/internal/golang/implementation.go @@ -91,7 +91,11 @@ func implementations(ctx context.Context, snapshot *cache.Snapshot, fh file.Hand ) // relation=0 here means infer direction of the relation // (Supertypes/Subtypes) from concreteness of query type/method. - err = implementationsMsets(ctx, snapshot, pkg, pgf, pos, 0, func(_ metadata.PackagePath, _ string, _ bool, loc protocol.Location) { + // (Ideally the implementations request would provide directionality + // so that one could ask for, say, the superinterfaces of io.ReadCloser; + // see https://github.com/golang/go/issues/68641#issuecomment-2269293762.) + const relation = methodsets.TypeRelation(0) + err = implementationsMsets(ctx, snapshot, pkg, pgf, pos, relation, func(_ metadata.PackagePath, _ string, _ bool, loc protocol.Location) { locsMu.Lock() locs = append(locs, loc) locsMu.Unlock() @@ -161,20 +165,6 @@ func implementationsMsets(ctx context.Context, snapshot *cache.Snapshot, pkg *ca // Is the selected identifier a type name or method? // (For methods, report the corresponding method names.) - // - // This logic is reused for local queries. - typeOrMethod := func(obj types.Object) (types.Type, *types.Func) { - switch obj := obj.(type) { - case *types.TypeName: - return obj.Type(), nil - case *types.Func: - // For methods, use the receiver type, which may be anonymous. - if recv := obj.Signature().Recv(); recv != nil { - return recv.Type(), obj - } - } - return nil, nil - } queryType, queryMethod := typeOrMethod(obj) if queryType == nil { return bug.Errorf("%s is not a type or method", obj.Name()) // should have been handled by implementsObj @@ -231,26 +221,25 @@ func implementationsMsets(ctx context.Context, snapshot *cache.Snapshot, pkg *ca var group errgroup.Group // local search - for _, localPkg := range localPkgs { + for _, pkg := range localPkgs { // The localImplementations algorithm assumes needle and haystack // belong to a single package (="realm" of types symbol identities), // so we need to recompute obj for each local package. // (By contrast the global algorithm is name-based.) - declPkg := localPkg group.Go(func() error { - pkgID := declPkg.Metadata().ID - declFile, err := declPkg.File(declURI) + pkgID := pkg.Metadata().ID + + // Find declaring identifier based on (URI, offset) + // so that localImplementations can locate the + // corresponding obj/queryType/queryMethod in pkg. + declFile, err := pkg.File(declURI) if err != nil { return err // "can't happen" } - - // Find declaration of corresponding object - // in this package based on (URI, offset). pos, err := safetoken.Pos(declFile.Tok, declOffset) if err != nil { return err // also "can't happen" } - // TODO(adonovan): simplify: use objectsAt? path := pathEnclosingObjNode(declFile.File, pos) if path == nil { return ErrNoIdentFound // checked earlier @@ -259,13 +248,7 @@ func implementationsMsets(ctx context.Context, snapshot *cache.Snapshot, pkg *ca if !ok { return ErrNoIdentFound // checked earlier } - // Shadow obj, queryType, and queryMethod in this package. - obj := declPkg.TypesInfo().ObjectOf(id) // may be nil - queryType, queryMethod := typeOrMethod(obj) - if queryType == nil { - return fmt.Errorf("querying method sets in package %q: %v", pkgID, err) - } - if err := localImplementations(ctx, snapshot, declPkg, queryType, rel, queryMethod, yield); err != nil { + if err := localImplementations(ctx, snapshot, pkg, id, rel, yield); err != nil { return fmt.Errorf("querying local implementations %q: %v", pkgID, err) } return nil @@ -292,6 +275,25 @@ func implementationsMsets(ctx context.Context, snapshot *cache.Snapshot, pkg *ca return group.Wait() } +// typeOrMethod returns the type and optional method to use in an +// Implementations operation on the specified symbol. +// It returns a nil type to indicate that the query should not proceed. +// +// (It is factored out to allow it to be used both in the query package +// then (in [localImplementations]) again in the declarating package.) +func typeOrMethod(obj types.Object) (types.Type, *types.Func) { + switch obj := obj.(type) { + case *types.TypeName: + return obj.Type(), nil + case *types.Func: + // For methods, use the receiver type, which may be anonymous. + if recv := obj.Signature().Recv(); recv != nil { + return recv.Type(), obj + } + } + return nil, nil +} + // offsetToLocation converts an offset-based position to a protocol.Location, // which requires reading the file. func offsetToLocation(ctx context.Context, snapshot *cache.Snapshot, filename string, start, end int) (protocol.Location, error) { @@ -359,8 +361,8 @@ func implementsObj(info *types.Info, file *ast.File, pos token.Pos) (types.Objec // localImplementations searches within pkg for declarations of all // supertypes (if rel contains Supertype) or subtypes (if rel contains -// Subtype) of the query type, and returns a new unordered array of -// their locations. +// Subtype) of the type or method declared by id within the same +// package, and returns a new unordered array of their locations. // // If method is non-nil, the function instead returns the location // of each type's method (if any) of that ID. @@ -371,15 +373,39 @@ func implementsObj(info *types.Info, file *ast.File, pos token.Pos) (types.Objec // because reliably naming such types is hard.) // // Results are reported via the the yield function. -func localImplementations(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, queryType types.Type, rel methodsets.TypeRelation, method *types.Func, yield implYieldFunc) error { +func localImplementations(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, id *ast.Ident, rel methodsets.TypeRelation, yield implYieldFunc) error { + queryType, queryMethod := typeOrMethod(pkg.TypesInfo().Defs[id]) + if queryType == nil { + return bug.Errorf("can't find corresponding symbol for %q in package %q", id.Name, pkg) + } queryType = methodsets.EnsurePointer(queryType) var msets typeutil.MethodSetCache + matches := func(candidateType types.Type) bool { + // Test the direction of the relation. + // The client may request either direction or both + // (e.g. when the client is References), + // and the Result reports each test independently; + // both tests succeed when comparing identical + // interface types. + var got methodsets.TypeRelation + if rel&methodsets.Supertype != 0 && implements(&msets, queryType, candidateType) { + got |= methodsets.Supertype + } + if rel&methodsets.Subtype != 0 && implements(&msets, candidateType, queryType) { + got |= methodsets.Subtype + } + return got != 0 + } + // Scan through all type declarations in the syntax. for _, pgf := range pkg.CompiledGoFiles() { for cur := range pgf.Cursor.Preorder((*ast.TypeSpec)(nil)) { spec := cur.Node().(*ast.TypeSpec) + if spec.Name == id { + continue // avoid self-comparison of query type + } def := pkg.TypesInfo().Defs[spec.Name] if def == nil { continue // "can't happen" for types @@ -388,37 +414,7 @@ func localImplementations(ctx context.Context, snapshot *cache.Snapshot, pkg *ca continue // skip type aliases to avoid duplicate reporting } candidateType := methodsets.EnsurePointer(def.Type()) - - // The historical behavior enshrined by this - // function rejects cases where both are - // (nontrivial) interface types, but this is - // useful information; see #68641 and CL 619719. - // TODO(adonovan): rescind this policy choice, - // and report I/I relationships, - // by deleting this continue statement. - // (It is also necessary to remove self-matches.) - // - // The same question appears in the global algorithm (methodsets). - xiface := types.IsInterface(queryType) - yiface := types.IsInterface(candidateType) - if xiface == yiface { - continue - } - - // Test the direction of the relation. - // The client may request either direction or both - // (e.g. when the client is References), - // and the Result reports each test independently; - // both tests succeed when comparing identical - // interface types. - var got methodsets.TypeRelation - if rel&methodsets.Supertype != 0 && implements(&msets, queryType, candidateType) { - got |= methodsets.Supertype - } - if rel&methodsets.Subtype != 0 && implements(&msets, candidateType, queryType) { - got |= methodsets.Subtype - } - if got&rel == 0 { + if !matches(candidateType) { continue } @@ -431,7 +427,7 @@ func localImplementations(ctx context.Context, snapshot *cache.Snapshot, pkg *ca isInterface := types.IsInterface(def.Type()) - if method == nil { + if queryMethod == nil { // Found matching type. loc := mustLocation(pgf, spec.Name) yield(pkg.Metadata().PkgPath, spec.Name.Name, isInterface, loc) @@ -446,7 +442,10 @@ func localImplementations(ctx context.Context, snapshot *cache.Snapshot, pkg *ca // but it's easier to walk the method set. for i := 0; i < mset.Len(); i++ { m := mset.At(i).Obj() - if m.Id() == method.Id() { + if m.Pos() == id.Pos() { + continue // avoid self-comparison of query method + } + if m.Id() == queryMethod.Id() { posn := safetoken.StartPosition(pkg.FileSet(), m.Pos()) loc, err := offsetToLocation(ctx, snapshot, posn.Filename, posn.Offset, posn.Offset+len(m.Name())) if err != nil { @@ -462,15 +461,9 @@ func localImplementations(ctx context.Context, snapshot *cache.Snapshot, pkg *ca // Special case: for types that satisfy error, // report error in builtin.go (see #59527). // - // Two inconsistencies: - // 1. we always report the type "error" - // even when the query was for the method "Error"; - // 2. we report error even when the query type was - // an interface, but according to our current policy, - // we never report I/I relations; see #68641 above. - // This will soon change, at which point we should - // check rel&methodsets.Supertype != 0 here. - if types.Implements(queryType, errorInterfaceType) { + // (An inconsistency: we always report the type error + // even when the query was for the method error.Error.) + if matches(errorType) { loc, err := errorLocation(ctx, snapshot) if err != nil { return err @@ -481,7 +474,7 @@ func localImplementations(ctx context.Context, snapshot *cache.Snapshot, pkg *ca return nil } -var errorInterfaceType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) +var errorType = types.Universe.Lookup("error").Type() // errorLocation returns the location of the 'error' type in builtin.go. func errorLocation(ctx context.Context, snapshot *cache.Snapshot) (protocol.Location, error) { diff --git a/gopls/internal/test/marker/testdata/implementation/basic.txt b/gopls/internal/test/marker/testdata/implementation/basic.txt index 7882437ccb6..dd440c5c7ed 100644 --- a/gopls/internal/test/marker/testdata/implementation/basic.txt +++ b/gopls/internal/test/marker/testdata/implementation/basic.txt @@ -19,8 +19,8 @@ type ImpS struct{} //@loc(ImpS, "ImpS"),implementation("ImpS", Laugher, OtherLau func (ImpS) Laugh() { //@loc(LaughS, "Laugh"),implementation("Laugh", Laugh, OtherLaugh) } -type Laugher interface { //@loc(Laugher, "Laugher"),implementation("Laugher", ImpP, OtherImpP, ImpS, OtherImpS, embedsImpP) - Laugh() //@loc(Laugh, "Laugh"),implementation("Laugh", LaughP, OtherLaughP, LaughS, OtherLaughS) +type Laugher interface { //@loc(Laugher, "Laugher"),implementation("Laugher", ImpP, OtherImpP, ImpS, OtherLaugher, OtherImpS, embedsImpP) + Laugh() //@loc(Laugh, "Laugh"),implementation("Laugh", LaughP, OtherLaughP, LaughS, OtherLaugh, OtherLaughS) } type Foo struct { //@implementation("Foo", Joker) @@ -37,7 +37,7 @@ func (cryer) Cry(other.CryType) {} //@loc(CryImpl, "Cry"),implementation("Cry", type Empty any //@implementation("Empty") -var _ interface{ Joke() } //@implementation("Joke", ImpJoker) +var _ interface{ Joke() } //@implementation("Joke", Joke, ImpJoker) type embedsImpP struct { //@loc(embedsImpP, "embedsImpP") ImpP //@implementation("ImpP", Laugher, OtherLaugher) diff --git a/gopls/internal/test/marker/testdata/implementation/generics-basicalias.txt b/gopls/internal/test/marker/testdata/implementation/generics-basicalias.txt index bd17a8a72ab..385f775db90 100644 --- a/gopls/internal/test/marker/testdata/implementation/generics-basicalias.txt +++ b/gopls/internal/test/marker/testdata/implementation/generics-basicalias.txt @@ -15,7 +15,7 @@ package a type C[T any] struct{} func (C[T]) F(rune, T) {} //@ loc(aCF, "F"), implementation("F", aIF, bIF) -type I[T any] interface{ F(int32, T) } //@ loc(aIF, "F"), implementation("F", aCF, bCF) +type I[T any] interface{ F(int32, T) } //@ loc(aIF, "F"), implementation("F", aCF, bCF, bIF) -- b/b.go -- package b @@ -23,4 +23,4 @@ package b type C[T any] struct{} func (C[T]) F(rune, T) {} //@ loc(bCF, "F"), implementation("F", aIF, bIF) -type I[T any] interface{ F(int32, T) } //@ loc(bIF, "F"), implementation("F", aCF, bCF) +type I[T any] interface{ F(int32, T) } //@ loc(bIF, "F"), implementation("F", aCF, aIF, bCF) diff --git a/gopls/internal/test/marker/testdata/implementation/generics.txt b/gopls/internal/test/marker/testdata/implementation/generics.txt index a526102890a..63908b53583 100644 --- a/gopls/internal/test/marker/testdata/implementation/generics.txt +++ b/gopls/internal/test/marker/testdata/implementation/generics.txt @@ -7,8 +7,8 @@ go 1.18 -- implementation/implementation.go -- package implementation -type GenIface[T any] interface { //@loc(GenIface, "GenIface"),implementation("GenIface", GC, GenConc, GenConcString) - F(int, string, T) //@loc(GenIfaceF, "F"),implementation("F", GCF, GenConcF) +type GenIface[T any] interface { //@loc(GenIface, "GenIface"),implementation("GenIface", GC, GenConc, GI, GIString, GenConcString) + F(int, string, T) //@loc(GenIfaceF, "F"),implementation("F", GCF, GenConcF, GIF) } type GenConc[U any] int //@loc(GenConc, "GenConc"),implementation("GenConc", GI, GIString, GenIface) @@ -20,11 +20,11 @@ type GenConcString struct{ GenConc[string] } //@loc(GenConcString, "GenConcStrin -- other/other.go -- package other -type GI[T any] interface { //@loc(GI, "GI"),implementation("GI", GenConc, GenConcString, GC) - F(int, string, T) //@loc(GIF, "F"),implementation("F", GenConcF, GCF) +type GI[T any] interface { //@loc(GI, "GI"),implementation("GI", GenConc, GenIface, GenConcString, GIString, GC) + F(int, string, T) //@loc(GIF, "F"),implementation("F", GenIfaceF, GenConcF, GCF) } -type GIString GI[string] //@loc(GIString, "GIString"),implementation("GIString", GenConcString, GenConc, GC) +type GIString GI[string] //@loc(GIString, "GIString"),implementation("GIString", GenConcString, GenIface, GenConc, GI, GC) type GC[U any] int //@loc(GC, "GC"),implementation("GC", GenIface, GI, GIString) diff --git a/gopls/internal/test/marker/testdata/implementation/issue68641.txt b/gopls/internal/test/marker/testdata/implementation/issue68641.txt new file mode 100644 index 00000000000..23f4de9d61c --- /dev/null +++ b/gopls/internal/test/marker/testdata/implementation/issue68641.txt @@ -0,0 +1,64 @@ +Regression test that Implementation(I) returns J even when I and J are +both interfaces; see issue #68641. Previously, interface/interface +matches were never reported. + +However, the direction of the query is determined by the concreteness +of the query type: Implements on a.B, an interface, reports types that +are assignable to it, a.C; but Implements on concrete a.impl reports +only interface types to which it may be assigned, and there is no way +to query from interface B to find the (wider) interface A. (This would +be a useful feature of LSP though; see +https://github.com/microsoft/language-server-protocol/issues/2037.) + +The test exercises both the local (intra-) and global (cross-package) +algorithms and checks that they are consistent. + +-- go.mod -- +module example.com +go 1.12 + +-- a/a.go -- +package a + +type A interface { //@ loc(aA, "A"), implementation("A", aB, aC, aimpl, bA, bB, bC, bimpl) + A() //@ loc(aAA, "A"), implementation("A", aimplA, bimplA, bAA) +} + +type B interface { //@ loc(aB, "B"), implementation("B", aC, aimpl, bB, bC, bimpl) + A + B() +} + +type C interface { //@ loc(aC, "C"), implementation("C", aimpl, bC, bimpl) + B + C() +} + +type impl int //@ loc(aimpl, "impl"), implementation("impl", aA, aB, aC, bA, bB, bC) + +func (impl) A() //@ loc(aimplA, "A") +func (impl) B() +func (impl) C() + +-- b/b.go -- +package b + +type A interface { //@ loc(bA, "A"), implementation("A", aA, aB, aC, aimpl, bB, bC, bimpl) + A() //@ loc(bAA, "A") +} + +type B interface { //@ loc(bB, "B"), implementation("B", aB, aC, aimpl, bC, bimpl) + A + B() +} + +type C interface { //@ loc(bC, "C"), implementation("C", aC, aimpl, bimpl) + B + C() +} + +type impl int //@ loc(bimpl, "impl"), implementation("impl", aA, aB, aC, bA, bB, bC) + +func (impl) A() //@ loc(bimplA, "A") +func (impl) B() +func (impl) C() diff --git a/gopls/internal/test/marker/testdata/typehierarchy/basic.txt b/gopls/internal/test/marker/testdata/typehierarchy/basic.txt index 5d7df964d2e..9b0c08ae52d 100644 --- a/gopls/internal/test/marker/testdata/typehierarchy/basic.txt +++ b/gopls/internal/test/marker/testdata/typehierarchy/basic.txt @@ -3,10 +3,6 @@ Basic test of type hierarchy. We pose the same queries across two identical packages to exercise the local and global algorithms. -TODO(adonovan): I and J are related by subtyping, but Implementations -refuses to report it and thus so does Type Hierarchy; see issue #68641 -and CL 619719. - TODO(adonovan): test other properties of the result, such as kind. -- go.mod -- @@ -26,12 +22,12 @@ func (S) F() {} func (S) G() {} //@subtypes(S) -//@subtypes(I, S, BS) -//@subtypes(J, S, BS) +//@subtypes(I, J, S, BI, BJ, BS) +//@subtypes(J, S, BJ, BS) //@supertypes(S, I, J, BI, BJ) -//@supertypes(I) -//@supertypes(J) +//@supertypes(I, BI) +//@supertypes(J, I, BI, BJ) -- b/b.go -- package b @@ -46,9 +42,9 @@ func (BS) F() {} func (BS) G() {} //@subtypes(BS) -//@subtypes(BI, BS, S) -//@subtypes(BJ, BS, S) +//@subtypes(BI, BJ, BS, I, J, S) +//@subtypes(BJ, BS, J, S) //@supertypes(BS, BI, BJ, I, J) -//@supertypes(BI) -//@supertypes(BJ) +//@supertypes(BI, I) +//@supertypes(BJ, BI, I, J) From fd3eb08ded2919ed8b732f67cd04fb74f773bc08 Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Wed, 9 Apr 2025 19:28:36 +0800 Subject: [PATCH 191/270] gopls/internal/cache/parsego: new test case for fixed syntax This CL adds a new test case to verify the fixed syntax in parsego.Parse, which helps us better understand its behavior. Updates: golang/go#64335 Change-Id: I8bf93e43a1bb67853ca02e32232aef48159295d7 Reviewed-on: https://go-review.googlesource.com/c/tools/+/664095 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan Reviewed-by: Robert Findley Auto-Submit: Alan Donovan --- gopls/internal/cache/parsego/parse.go | 38 +++++----- gopls/internal/cache/parsego/parse_test.go | 87 ++++++++++++++++++++++ 2 files changed, 106 insertions(+), 19 deletions(-) diff --git a/gopls/internal/cache/parsego/parse.go b/gopls/internal/cache/parsego/parse.go index 4b37816caff..3ffa531735f 100644 --- a/gopls/internal/cache/parsego/parse.go +++ b/gopls/internal/cache/parsego/parse.go @@ -49,7 +49,7 @@ const ( // Parse parses a buffer of Go source, repairing the tree if necessary. // // The provided ctx is used only for logging. -func Parse(ctx context.Context, fset *token.FileSet, uri protocol.DocumentURI, src []byte, mode parser.Mode, purgeFuncBodies bool) (res *File, fixes []fixType) { +func Parse(ctx context.Context, fset *token.FileSet, uri protocol.DocumentURI, src []byte, mode parser.Mode, purgeFuncBodies bool) (res *File, fixes []FixType) { if purgeFuncBodies { src = astutil.PurgeFuncBodies(src) } @@ -147,13 +147,13 @@ func Parse(ctx context.Context, fset *token.FileSet, uri protocol.DocumentURI, s // // If fixAST returns true, the resulting AST is considered "fixed", meaning // positions have been mangled, and type checker errors may not make sense. -func fixAST(n ast.Node, tok *token.File, src []byte) (fixes []fixType) { +func fixAST(n ast.Node, tok *token.File, src []byte) (fixes []FixType) { var err error walkASTWithParent(n, func(n, parent ast.Node) bool { switch n := n.(type) { case *ast.BadStmt: if fixDeferOrGoStmt(n, parent, tok, src) { - fixes = append(fixes, fixedDeferOrGo) + fixes = append(fixes, FixedDeferOrGo) // Recursively fix in our fixed node. moreFixes := fixAST(parent, tok, src) fixes = append(fixes, moreFixes...) @@ -163,7 +163,7 @@ func fixAST(n ast.Node, tok *token.File, src []byte) (fixes []fixType) { return false case *ast.BadExpr: if fixArrayType(n, parent, tok, src) { - fixes = append(fixes, fixedArrayType) + fixes = append(fixes, FixedArrayType) // Recursively fix in our fixed node. moreFixes := fixAST(parent, tok, src) fixes = append(fixes, moreFixes...) @@ -177,7 +177,7 @@ func fixAST(n ast.Node, tok *token.File, src []byte) (fixes []fixType) { // for i := foo // if fixInitStmt(n, parent, tok, src) { - fixes = append(fixes, fixedInit) + fixes = append(fixes, FixedInit) } return false case *ast.SelectorExpr: @@ -186,7 +186,7 @@ func fixAST(n ast.Node, tok *token.File, src []byte) (fixes []fixType) { // foo.var<> // want to complete to "foo.variance" // if fixPhantomSelector(n, tok, src) { - fixes = append(fixes, fixedPhantomSelector) + fixes = append(fixes, FixedPhantomSelector) } return true @@ -196,7 +196,7 @@ func fixAST(n ast.Node, tok *token.File, src []byte) (fixes []fixType) { // Adjust closing curly brace of empty switch/select // statements so we can complete inside them. if fixEmptySwitch(n, tok, src) { - fixes = append(fixes, fixedEmptySwitch) + fixes = append(fixes, FixedEmptySwitch) } } @@ -235,24 +235,24 @@ func walkASTWithParent(n ast.Node, f func(n ast.Node, parent ast.Node) bool) { // TODO(rfindley): revert this intrumentation once we're certain the crash in // #59097 is fixed. -type fixType int +type FixType int const ( - noFix fixType = iota - fixedCurlies - fixedDanglingSelector - fixedDeferOrGo - fixedArrayType - fixedInit - fixedPhantomSelector - fixedEmptySwitch + noFix FixType = iota + FixedCurlies + FixedDanglingSelector + FixedDeferOrGo + FixedArrayType + FixedInit + FixedPhantomSelector + FixedEmptySwitch ) // fixSrc attempts to modify the file's source code to fix certain // syntax errors that leave the rest of the file unparsed. // // fixSrc returns a non-nil result if and only if a fix was applied. -func fixSrc(f *ast.File, tf *token.File, src []byte) (newSrc []byte, fix fixType) { +func fixSrc(f *ast.File, tf *token.File, src []byte) (newSrc []byte, fix FixType) { walkASTWithParent(f, func(n, parent ast.Node) bool { if newSrc != nil { return false @@ -262,12 +262,12 @@ func fixSrc(f *ast.File, tf *token.File, src []byte) (newSrc []byte, fix fixType case *ast.BlockStmt: newSrc = fixMissingCurlies(f, n, parent, tf, src) if newSrc != nil { - fix = fixedCurlies + fix = FixedCurlies } case *ast.SelectorExpr: newSrc = fixDanglingSelector(n, tf, src) if newSrc != nil { - fix = fixedDanglingSelector + fix = FixedDanglingSelector } } diff --git a/gopls/internal/cache/parsego/parse_test.go b/gopls/internal/cache/parsego/parse_test.go index c64125427b1..84a344cab52 100644 --- a/gopls/internal/cache/parsego/parse_test.go +++ b/gopls/internal/cache/parsego/parse_test.go @@ -6,12 +6,15 @@ package parsego_test import ( "context" + "fmt" "go/ast" "go/token" + "slices" "testing" "golang.org/x/tools/gopls/internal/cache/parsego" "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/internal/analysisinternal" "golang.org/x/tools/internal/tokeninternal" ) @@ -44,3 +47,87 @@ func _() { return true }) } + +func TestFixGoAndDefer_GoStmt(t *testing.T) { + var testCases = []struct { + source string + fixes []parsego.FixType + wantFix string + }{ + {source: "g", fixes: nil}, + {source: "go", fixes: nil}, + {source: "go a.b(", fixes: nil}, + {source: "go a.b()", fixes: nil}, + {source: "go func {", fixes: nil}, + { + source: "go f", + fixes: []parsego.FixType{parsego.FixedDeferOrGo}, + wantFix: "go f()", + }, + { + source: "go func", + fixes: []parsego.FixType{parsego.FixedDeferOrGo}, + wantFix: "go (func())()", + }, + { + source: "go func {}", + fixes: []parsego.FixType{parsego.FixedDeferOrGo}, + wantFix: "go (func())()", + }, + { + source: "go func {}(", + fixes: []parsego.FixType{parsego.FixedDeferOrGo}, + wantFix: "go (func())()", + }, + { + source: "go func {}()", + fixes: []parsego.FixType{parsego.FixedDeferOrGo}, + wantFix: "go (func())()", + }, + { + source: "go a.", + fixes: []parsego.FixType{parsego.FixedDeferOrGo, parsego.FixedDanglingSelector, parsego.FixedDeferOrGo}, + wantFix: "go a._()", + }, + { + source: "go a.b", + fixes: []parsego.FixType{parsego.FixedDeferOrGo}, + wantFix: "go a.b()", + }, + } + + for _, tc := range testCases { + t.Run(tc.source, func(t *testing.T) { + src := filesrc(tc.source) + pgf, fixes := parsego.Parse(context.Background(), token.NewFileSet(), "file://foo.go", src, parsego.Full, false) + if !slices.Equal(fixes, tc.fixes) { + t.Fatalf("TestFixGoAndDefer_GoStmt(): got %v want %v", fixes, tc.fixes) + } + fset := tokeninternal.FileSetFor(pgf.Tok) + check := func(n ast.Node) bool { + if n != nil { + posn := safetoken.StartPosition(fset, n.Pos()) + if !posn.IsValid() { + t.Fatalf("invalid position for %T (%v): %v not in [%d, %d]", n, n, n.Pos(), pgf.Tok.Base(), pgf.Tok.Base()+pgf.Tok.Size()) + } + if deferStmt, ok := n.(*ast.GoStmt); ok && tc.fixes != nil { + if got, want := fmt.Sprintf("go %s", analysisinternal.Format(fset, deferStmt.Call)), tc.wantFix; got != want { + t.Fatalf("TestFixGoAndDefer_GoStmt(): got %v want %v", got, want) + } + } + } + return true + } + ast.Inspect(pgf.File, check) + }) + } +} + +func filesrc(expressions string) []byte { + const srcTmpl = `package foo + +func _() { + %s +}` + return fmt.Appendf(nil, srcTmpl, expressions) +} From 9df6bbd7f9017b6a84c90a240562ad1bdc2064d3 Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Mon, 7 Apr 2025 19:15:16 +0800 Subject: [PATCH 192/270] gopls: hide todo inside readme.md This CL hides todo inside readme.md as they're left for developers. Change-Id: Ied183d38cfc3ae53706020da730ac963eb5930ea Reviewed-on: https://go-review.googlesource.com/c/tools/+/663415 Reviewed-by: Alan Donovan Reviewed-by: Robert Findley LUCI-TryBot-Result: Go LUCI Commit-Queue: Alan Donovan Auto-Submit: Alan Donovan --- gopls/README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/gopls/README.md b/gopls/README.md index 6602e0c27a7..e17184e0d51 100644 --- a/gopls/README.md +++ b/gopls/README.md @@ -20,8 +20,10 @@ supported in each client editor. To get started with `gopls`, install an LSP plugin in your editor of choice. + * [VS Code](https://github.com/golang/vscode-go/blob/master/README.md) * [Vim / Neovim](doc/vim.md) From 7829e07e5422c868b98bf2660355de1bdf93dd28 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Mon, 7 Apr 2025 15:39:38 -0400 Subject: [PATCH 193/270] go/analysis/passes/testinggoroutine: used UsedIdent Replace and generalize some logic with typesinternal.UsedIdent. Change-Id: I971fed8a13f44add54da20685ef13d03c7218c13 Reviewed-on: https://go-review.googlesource.com/c/tools/+/663595 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- .../passes/testinggoroutine/testinggoroutine.go | 5 +++-- go/analysis/passes/testinggoroutine/util.go | 15 --------------- 2 files changed, 3 insertions(+), 17 deletions(-) diff --git a/go/analysis/passes/testinggoroutine/testinggoroutine.go b/go/analysis/passes/testinggoroutine/testinggoroutine.go index f49ac4eb1a0..360ba0e74d8 100644 --- a/go/analysis/passes/testinggoroutine/testinggoroutine.go +++ b/go/analysis/passes/testinggoroutine/testinggoroutine.go @@ -17,6 +17,7 @@ import ( "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/typesinternal" ) //go:embed doc.go @@ -186,7 +187,7 @@ func goAsyncCall(info *types.Info, goStmt *ast.GoStmt, toDecl func(*types.Func) call := goStmt.Call fun := ast.Unparen(call.Fun) - if id := funcIdent(fun); id != nil { + if id := typesinternal.UsedIdent(info, fun); id != nil { if lit := funcLitInScope(id); lit != nil { return &asyncCall{region: lit, async: goStmt, scope: nil, fun: fun} } @@ -217,7 +218,7 @@ func tRunAsyncCall(info *types.Info, call *ast.CallExpr) *asyncCall { return &asyncCall{region: lit, async: call, scope: lit, fun: fun} } - if id := funcIdent(fun); id != nil { + if id := typesinternal.UsedIdent(info, fun); id != nil { if lit := funcLitInScope(id); lit != nil { // function lit in variable? return &asyncCall{region: lit, async: call, scope: lit, fun: fun} } diff --git a/go/analysis/passes/testinggoroutine/util.go b/go/analysis/passes/testinggoroutine/util.go index 88e77fb4fc4..db2e5f76d14 100644 --- a/go/analysis/passes/testinggoroutine/util.go +++ b/go/analysis/passes/testinggoroutine/util.go @@ -8,8 +8,6 @@ import ( "go/ast" "go/types" "slices" - - "golang.org/x/tools/internal/typeparams" ) // AST and types utilities that not specific to testinggoroutines. @@ -52,19 +50,6 @@ func isMethodNamed(f *types.Func, pkgPath string, names ...string) bool { return slices.Contains(names, f.Name()) } -func funcIdent(fun ast.Expr) *ast.Ident { - switch fun := ast.Unparen(fun).(type) { - case *ast.IndexExpr, *ast.IndexListExpr: - x, _, _, _ := typeparams.UnpackIndexExpr(fun) // necessary? - id, _ := x.(*ast.Ident) - return id - case *ast.Ident: - return fun - default: - return nil - } -} - // funcLitInScope returns a FuncLit that id is at least initially assigned to. // // TODO: This is closely tied to id.Obj which is deprecated. From d363f109f07bee18d117ef79c884ff3ec5f0ad1d Mon Sep 17 00:00:00 2001 From: Alberto Fanjul Date: Fri, 11 Apr 2025 11:47:57 +0000 Subject: [PATCH 194/270] gopls/doc: fix config for vim-lsp Avoid duplicate results if name and and server info do not match Change-Id: I9994c8136ec58fd84eccd5a236dc4160dde3395a GitHub-Last-Rev: 451da6019d5a56b1cbff8aaf271f2f203919e5d0 GitHub-Pull-Request: golang/tools#569 Reviewed-on: https://go-review.googlesource.com/c/tools/+/662555 Reviewed-by: Sean Liao Auto-Submit: Sean Liao Reviewed-by: Carlos Amedee Reviewed-by: Alberto Fanjul Alonso LUCI-TryBot-Result: Go LUCI Reviewed-by: Dmitri Shuralyov --- gopls/doc/vim.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gopls/doc/vim.md b/gopls/doc/vim.md index 444a7d6ff31..eedac5925f4 100644 --- a/gopls/doc/vim.md +++ b/gopls/doc/vim.md @@ -56,7 +56,7 @@ Use [prabirshrestha/vim-lsp], with the following configuration: augroup LspGo au! autocmd User lsp_setup call lsp#register_server({ - \ 'name': 'go-lang', + \ 'name': 'gopls', \ 'cmd': {server_info->['gopls']}, \ 'whitelist': ['go'], \ }) From 7ceff1355d9d7ec7f8404fb670ac112a46681ff6 Mon Sep 17 00:00:00 2001 From: Damien Neil Date: Fri, 11 Apr 2025 14:31:32 -0700 Subject: [PATCH 195/270] go/analysis/passes/structtag: ignore findings for "encoding/..." Don't warn on structtag findings for any of the new packages added as part of the json v2 project. Just skip all of "encoding/..." on the theory that if anything in std is using invalid tags, it has a good reason. Change-Id: Ie9fbbe9ef1be7ea574c74824a26951501f6c5e2d Reviewed-on: https://go-review.googlesource.com/c/tools/+/665055 LUCI-TryBot-Result: Go LUCI Reviewed-by: Dmitri Shuralyov Reviewed-by: Dmitri Shuralyov --- go/analysis/passes/structtag/structtag.go | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/go/analysis/passes/structtag/structtag.go b/go/analysis/passes/structtag/structtag.go index da4afd1b232..13a9997316e 100644 --- a/go/analysis/passes/structtag/structtag.go +++ b/go/analysis/passes/structtag/structtag.go @@ -89,8 +89,7 @@ var checkTagSpaces = map[string]bool{"json": true, "xml": true, "asn1": true} // checkCanonicalFieldTag checks a single struct field tag. func checkCanonicalFieldTag(pass *analysis.Pass, field *types.Var, tag string, seen *namesSeen) { - switch pass.Pkg.Path() { - case "encoding/json", "encoding/json/v2", "encoding/xml": + if strings.HasPrefix(pass.Pkg.Path(), "encoding/") { // These packages know how to use their own APIs. // Sometimes they are testing what happens to incorrect programs. return From fd6857200264c194748febb654947cd468d4daec Mon Sep 17 00:00:00 2001 From: Peter Weinberger Date: Fri, 11 Apr 2025 15:43:16 -0400 Subject: [PATCH 196/270] internal/stdlib: find api directory more robustly stdlib contains precomputed information about exported symbols in the standard library. The old code looked in runtime.GOROOT()/api for this information. This works on many systems, but not always. The new code runs go env, and looks first in GOROOT/api, and failing that, in GOPATH/api. (The situation seems to depend on mysterious details of the history of the user's Go environment.) This CL is on the path to adding function snippets for the standard library, for the new unimported completions code. Change-Id: If2853a3f26edf9d0f1603563104592b95eb466a1 Reviewed-on: https://go-review.googlesource.com/c/tools/+/664995 LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan Reviewed-by: Alan Donovan Commit-Queue: Alan Donovan --- internal/stdlib/generate.go | 33 ++++++++++++++++++++++++++++----- internal/stdlib/manifest.go | 29 +++++++++++++++++++++++++++++ 2 files changed, 57 insertions(+), 5 deletions(-) diff --git a/internal/stdlib/generate.go b/internal/stdlib/generate.go index cfef0a2438f..b70ed475eb9 100644 --- a/internal/stdlib/generate.go +++ b/internal/stdlib/generate.go @@ -29,7 +29,6 @@ import ( "os/exec" "path/filepath" "regexp" - "runtime" "slices" "strings" @@ -37,13 +36,15 @@ import ( ) func main() { - manifest() + log.SetFlags(log.Lshortfile) // to identify the source of the log messages + dir := apidir() + manifest(dir) deps() } // -- generate std manifest -- -func manifest() { +func manifest(apidir string) { pkgs := make(map[string]map[string]symInfo) // package -> symbol -> info symRE := regexp.MustCompile(`^pkg (\S+).*?, (var|func|type|const|method \([^)]*\)) ([\pL\p{Nd}_]+)(.*)`) @@ -111,7 +112,7 @@ func manifest() { if minor > 0 { base = fmt.Sprintf("go1.%d.txt", minor) } - filename := filepath.Join(runtime.GOROOT(), "api", base) + filename := filepath.Join(apidir, base) data, err := os.ReadFile(filename) if err != nil { if errors.Is(err, fs.ErrNotExist) { @@ -119,7 +120,7 @@ func manifest() { // Synthesize one final file from any api/next/*.txt fragments. // (They are consolidated into a go1.%d file some time between // the freeze and the first release candidate.) - filenames, err := filepath.Glob(filepath.Join(runtime.GOROOT(), "api/next/*.txt")) + filenames, err := filepath.Glob(filepath.Join(apidir, "next", "*.txt")) if err != nil { log.Fatal(err) } @@ -177,6 +178,28 @@ var PackageSymbols = map[string][]Symbol{ } } +// find the api directory, In most situations it is in GOROOT/api, but not always. +// TODO(pjw): understand where it might be, and if there could be newer and older versions +func apidir() string { + stdout := new(bytes.Buffer) + cmd := exec.Command("go", "env", "GOROOT", "GOPATH") + cmd.Stdout = stdout + cmd.Stderr = os.Stderr + if err := cmd.Run(); err != nil { + log.Fatal(err) + } + // Prefer GOROOT/api over GOPATH/api. + for line := range strings.SplitSeq(stdout.String(), "\n") { + apidir := filepath.Join(line, "api") + info, err := os.Stat(apidir) + if err == nil && info.IsDir() { + return apidir + } + } + log.Fatal("could not find api dir") + return "" +} + type symInfo struct { kind string // e.g. "func" minor int // go1.%d diff --git a/internal/stdlib/manifest.go b/internal/stdlib/manifest.go index 2b418796abb..08838a0eb04 100644 --- a/internal/stdlib/manifest.go +++ b/internal/stdlib/manifest.go @@ -988,6 +988,7 @@ var PackageSymbols = map[string][]Symbol{ {"Conn", Type, 0}, {"ConnectionState", Type, 0}, {"ConnectionState.CipherSuite", Field, 0}, + {"ConnectionState.CurveID", Field, 25}, {"ConnectionState.DidResume", Field, 1}, {"ConnectionState.ECHAccepted", Field, 23}, {"ConnectionState.HandshakeComplete", Field, 0}, @@ -2682,6 +2683,7 @@ var PackageSymbols = map[string][]Symbol{ {"PT_OPENBSD_WXNEEDED", Const, 16}, {"PT_PAX_FLAGS", Const, 16}, {"PT_PHDR", Const, 0}, + {"PT_RISCV_ATTRIBUTES", Const, 25}, {"PT_S390_PGSTE", Const, 16}, {"PT_SHLIB", Const, 0}, {"PT_SUNWSTACK", Const, 16}, @@ -3743,6 +3745,7 @@ var PackageSymbols = map[string][]Symbol{ {"SHT_PROGBITS", Const, 0}, {"SHT_REL", Const, 0}, {"SHT_RELA", Const, 0}, + {"SHT_RISCV_ATTRIBUTES", Const, 25}, {"SHT_SHLIB", Const, 0}, {"SHT_STRTAB", Const, 0}, {"SHT_SYMTAB", Const, 0}, @@ -6265,11 +6268,13 @@ var PackageSymbols = map[string][]Symbol{ {"(*Var).Exported", Method, 5}, {"(*Var).Id", Method, 5}, {"(*Var).IsField", Method, 5}, + {"(*Var).Kind", Method, 25}, {"(*Var).Name", Method, 5}, {"(*Var).Origin", Method, 19}, {"(*Var).Parent", Method, 5}, {"(*Var).Pkg", Method, 5}, {"(*Var).Pos", Method, 5}, + {"(*Var).SetKind", Method, 25}, {"(*Var).String", Method, 5}, {"(*Var).Type", Method, 5}, {"(Checker).ObjectOf", Method, 5}, @@ -6284,6 +6289,7 @@ var PackageSymbols = map[string][]Symbol{ {"(TypeAndValue).IsType", Method, 5}, {"(TypeAndValue).IsValue", Method, 5}, {"(TypeAndValue).IsVoid", Method, 5}, + {"(VarKind).String", Method, 25}, {"Alias", Type, 22}, {"ArgumentError", Type, 18}, {"ArgumentError.Err", Field, 18}, @@ -6327,6 +6333,7 @@ var PackageSymbols = map[string][]Symbol{ {"Eval", Func, 5}, {"ExprString", Func, 5}, {"FieldVal", Const, 5}, + {"FieldVar", Const, 25}, {"Float32", Const, 5}, {"Float64", Const, 5}, {"Func", Type, 5}, @@ -6373,7 +6380,9 @@ var PackageSymbols = map[string][]Symbol{ {"IsUnsigned", Const, 5}, {"IsUntyped", Const, 5}, {"Label", Type, 5}, + {"LocalVar", Const, 25}, {"LookupFieldOrMethod", Func, 5}, + {"LookupSelection", Func, 25}, {"Map", Type, 5}, {"MethodExpr", Const, 5}, {"MethodSet", Type, 5}, @@ -6413,11 +6422,15 @@ var PackageSymbols = map[string][]Symbol{ {"Object", Type, 5}, {"ObjectString", Func, 5}, {"Package", Type, 5}, + {"PackageVar", Const, 25}, + {"ParamVar", Const, 25}, {"PkgName", Type, 5}, {"Pointer", Type, 5}, {"Qualifier", Type, 5}, {"RecvOnly", Const, 5}, + {"RecvVar", Const, 25}, {"RelativeTo", Func, 5}, + {"ResultVar", Const, 25}, {"Rune", Const, 5}, {"Satisfies", Func, 20}, {"Scope", Type, 5}, @@ -6466,6 +6479,7 @@ var PackageSymbols = map[string][]Symbol{ {"UntypedRune", Const, 5}, {"UntypedString", Const, 5}, {"Var", Type, 5}, + {"VarKind", Type, 25}, {"WriteExpr", Func, 5}, {"WriteSignature", Func, 5}, {"WriteType", Func, 5}, @@ -7119,6 +7133,7 @@ var PackageSymbols = map[string][]Symbol{ {"FormatFileInfo", Func, 21}, {"Glob", Func, 16}, {"GlobFS", Type, 16}, + {"Lstat", Func, 25}, {"ModeAppend", Const, 16}, {"ModeCharDevice", Const, 16}, {"ModeDevice", Const, 16}, @@ -7143,6 +7158,8 @@ var PackageSymbols = map[string][]Symbol{ {"ReadDirFile", Type, 16}, {"ReadFile", Func, 16}, {"ReadFileFS", Type, 16}, + {"ReadLink", Func, 25}, + {"ReadLinkFS", Type, 25}, {"SkipAll", Var, 20}, {"SkipDir", Var, 16}, {"Stat", Func, 16}, @@ -7899,6 +7916,7 @@ var PackageSymbols = map[string][]Symbol{ {"(*Writer).WriteField", Method, 0}, {"ErrMessageTooLarge", Var, 9}, {"File", Type, 0}, + {"FileContentDisposition", Func, 25}, {"FileHeader", Type, 0}, {"FileHeader.Filename", Field, 0}, {"FileHeader.Header", Field, 0}, @@ -9146,17 +9164,25 @@ var PackageSymbols = map[string][]Symbol{ {"(*ProcessState).SysUsage", Method, 0}, {"(*ProcessState).SystemTime", Method, 0}, {"(*ProcessState).UserTime", Method, 0}, + {"(*Root).Chmod", Method, 25}, + {"(*Root).Chown", Method, 25}, + {"(*Root).Chtimes", Method, 25}, {"(*Root).Close", Method, 24}, {"(*Root).Create", Method, 24}, {"(*Root).FS", Method, 24}, + {"(*Root).Lchown", Method, 25}, + {"(*Root).Link", Method, 25}, {"(*Root).Lstat", Method, 24}, {"(*Root).Mkdir", Method, 24}, {"(*Root).Name", Method, 24}, {"(*Root).Open", Method, 24}, {"(*Root).OpenFile", Method, 24}, {"(*Root).OpenRoot", Method, 24}, + {"(*Root).Readlink", Method, 25}, {"(*Root).Remove", Method, 24}, + {"(*Root).Rename", Method, 25}, {"(*Root).Stat", Method, 24}, + {"(*Root).Symlink", Method, 25}, {"(*SyscallError).Error", Method, 0}, {"(*SyscallError).Timeout", Method, 10}, {"(*SyscallError).Unwrap", Method, 13}, @@ -10250,6 +10276,7 @@ var PackageSymbols = map[string][]Symbol{ {"(*RWMutex).Unlock", Method, 0}, {"(*WaitGroup).Add", Method, 0}, {"(*WaitGroup).Done", Method, 0}, + {"(*WaitGroup).Go", Method, 25}, {"(*WaitGroup).Wait", Method, 0}, {"Cond", Type, 0}, {"Cond.L", Field, 0}, @@ -16754,9 +16781,11 @@ var PackageSymbols = map[string][]Symbol{ }, "testing/fstest": { {"(MapFS).Glob", Method, 16}, + {"(MapFS).Lstat", Method, 25}, {"(MapFS).Open", Method, 16}, {"(MapFS).ReadDir", Method, 16}, {"(MapFS).ReadFile", Method, 16}, + {"(MapFS).ReadLink", Method, 25}, {"(MapFS).Stat", Method, 16}, {"(MapFS).Sub", Method, 16}, {"MapFS", Type, 16}, From 7e7983f54530cfd9a17c8f35205f724d2c651432 Mon Sep 17 00:00:00 2001 From: Madeline Kalil Date: Tue, 25 Mar 2025 12:22:27 -0400 Subject: [PATCH 197/270] gopls/internal/golang: fix extract with free control statements Extract generates invalid code when the extracted block contains free control statements (a goto, continue, or break statement whose continuation lies outside the extracted block). This CL implements a check for free control statements inside the extracted block, and replaces them with a return statement that returns an integer value. The new function call accepts this return value and uses it in a switch statement where each case contains a control statement from the extracted block. See extract.go for a commented example. Fixes golang/go#63394 Change-Id: I1279c1fdf657bd53542a47440175a14c8328b3af Reviewed-on: https://go-review.googlesource.com/c/tools/+/660615 Reviewed-by: Alan Donovan Reviewed-by: Robert Findley LUCI-TryBot-Result: Go LUCI --- gopls/internal/golang/extract.go | 267 +++++++++++++++++- .../testdata/codeaction/extract_control.txt | 259 +++++++++++++++++ 2 files changed, 520 insertions(+), 6 deletions(-) create mode 100644 gopls/internal/test/marker/testdata/codeaction/extract_control.txt diff --git a/gopls/internal/golang/extract.go b/gopls/internal/golang/extract.go index f73e772e676..18a64dac44b 100644 --- a/gopls/internal/golang/extract.go +++ b/gopls/internal/golang/extract.go @@ -15,6 +15,7 @@ import ( "go/types" "slices" "sort" + "strconv" "strings" "text/scanner" @@ -914,6 +915,123 @@ func extractFunctionMethod(cpkg *cache.Package, pgf *parsego.File, start, end to } } + // Determine if the extracted block contains any free branch statements, for + // example: "continue label" where "label" is declared outside of the + // extracted block, or continue inside a "for" statement where the for + // statement is declared outside of the extracted block. + + // If the extracted block contains free branch statements, we add another + // return value "ctrl" to the extracted function that will be used to + // determine the control flow. See the following example, where === denotes + // the range to be extracted. + // + // Before: + // func f(cond bool) { + // for range "abc" { + // ============== + // if cond { + // continue + // } + // ============== + // println(0) + // } + // } + + // After: + // func f(cond bool) { + // for range "abc" { + // ctrl := newFunction(cond) + // switch ctrl { + // case 1: + // continue + // } + // println(0) + // } + // } + // + // func newFunction(cond bool) int { + // if cond { + // return 1 + // } + // return 0 + // } + // + + curSel, _ := pgf.Cursor.FindPos(start, end) // since canExtractFunction succeeded, this will always return a valid cursor + freeBranches := freeBranches(info, curSel, start, end) + + // Generate an unused identifier for the control value. + ctrlVar, _ := freshName(info, file, start, "ctrl", 0) + if len(freeBranches) > 0 { + + zeroValExpr := &ast.BasicLit{ + Kind: token.INT, + Value: "0", + } + var branchStmts []*ast.BranchStmt + var stack []ast.Node + // Add the zero "ctrl" value to each return statement in the extracted block. + ast.Inspect(extractedBlock, func(n ast.Node) bool { + if n != nil { + stack = append(stack, n) + } else { + stack = stack[:len(stack)-1] + } + switch n := n.(type) { + case *ast.ReturnStmt: + n.Results = append(n.Results, zeroValExpr) + case *ast.BranchStmt: + // Collect a list of branch statements in the extracted block to examine later. + if isFreeBranchStmt(stack) { + branchStmts = append(branchStmts, n) + } + case *ast.FuncLit: + // Don't descend into nested functions. When we return false + // here, ast.Inspect does not give us a "pop" event when leaving + // the subtree, so we need to pop here. (golang/go#73319) + stack = stack[:len(stack)-1] + return false + } + return true + }) + + // Construct a return statement to replace each free branch statement in the extracted block. It should have + // zero values for all return parameters except one, "ctrl", which dictates which continuation to follow. + var freeCtrlStmtReturns []ast.Expr + // Create "zero values" for each type. + for _, returnType := range returnTypes { + var val ast.Expr + var isValid bool + for obj, typ := range seenVars { + if typ == returnType.Type { + val, isValid = typesinternal.ZeroExpr(obj.Type(), qual) + break + } + } + if !isValid { + return nil, nil, fmt.Errorf("could not find matching AST expression for %T", returnType.Type) + } + freeCtrlStmtReturns = append(freeCtrlStmtReturns, val) + } + freeCtrlStmtReturns = append(freeCtrlStmtReturns, getZeroVals(retVars)...) + + for i, branchStmt := range branchStmts { + replaceBranchStmtWithReturnStmt(extractedBlock, branchStmt, &ast.ReturnStmt{ + Return: branchStmt.Pos(), + Results: append(slices.Clip(freeCtrlStmtReturns), &ast.BasicLit{ + Kind: token.INT, + Value: strconv.Itoa(i + 1), // start with 1 because 0 is reserved for base case + }), + }) + + } + retVars = append(retVars, &returnVariable{ + name: ast.NewIdent(ctrlVar), + decl: &ast.Field{Type: ast.NewIdent("int")}, + zeroVal: zeroValExpr, + }) + } + // Add a return statement to the end of the new function. This return statement must include // the values for the types of the original extracted function signature and (if a return // statement is present in the selection) enclosing function signature. @@ -1042,6 +1160,22 @@ func extractFunctionMethod(cpkg *cache.Package, pgf *parsego.File, start, end to strings.ReplaceAll(ifBuf.String(), "\n", newLineIndent) fullReplacement.WriteString(ifstatement) } + + // Add the switch statement for free branch statements after the new function call. + if len(freeBranches) > 0 { + fmt.Fprintf(&fullReplacement, "%[1]sswitch %[2]s {%[1]s", newLineIndent, ctrlVar) + for i, br := range freeBranches { + // Preserve spacing at the beginning of the line containing the branch statement. + startPos := tok.LineStart(safetoken.Line(tok, br.Pos())) + start, end, err := safetoken.Offsets(tok, startPos, br.End()) + if err != nil { + return nil, nil, err + } + fmt.Fprintf(&fullReplacement, "case %d:\n%s%s", i+1, pgf.Src[start:end], newLineIndent) + } + fullReplacement.WriteString("}") + } + fullReplacement.Write(after) fullReplacement.WriteString("\n\n") // add newlines after the enclosing function fullReplacement.Write(newFuncBuf.Bytes()) // insert the extracted function @@ -1271,6 +1405,9 @@ func collectFreeVars(info *types.Info, file *ast.File, start, end token.Pos, nod var obj types.Object var isFree, prune bool switch n := n.(type) { + case *ast.BranchStmt: + // Avoid including labels attached to branch statements. + return false case *ast.Ident: obj, isFree = id(n) case *ast.SelectorExpr: @@ -1706,8 +1843,8 @@ func varNameForType(t types.Type) (string, bool) { return AbbreviateVarName(typeName), true } -// adjustReturnStatements adds "zero values" of the given types to each return statement -// in the given AST node. +// adjustReturnStatements adds "zero values" of the given types to each return +// statement in the given AST node. func adjustReturnStatements(returnTypes []*ast.Field, seenVars map[types.Object]ast.Expr, extractedBlock *ast.BlockStmt, qual types.Qualifier) error { var zeroVals []ast.Expr // Create "zero values" for each type. @@ -1715,11 +1852,10 @@ func adjustReturnStatements(returnTypes []*ast.Field, seenVars map[types.Object] var val ast.Expr var isValid bool for obj, typ := range seenVars { - if typ != returnType.Type { - continue + if typ == returnType.Type { + val, isValid = typesinternal.ZeroExpr(obj.Type(), qual) + break } - val, isValid = typesinternal.ZeroExpr(obj.Type(), qual) - break } if !isValid { return fmt.Errorf("could not find matching AST expression for %T", returnType.Type) @@ -1860,3 +1996,122 @@ func cond[T any](cond bool, t, f T) T { return f } } + +// replaceBranchStmtWithReturnStmt modifies the ast node to replace the given +// branch statement with the given return statement. +func replaceBranchStmtWithReturnStmt(block ast.Node, br *ast.BranchStmt, ret *ast.ReturnStmt) { + ast.Inspect(block, func(n ast.Node) bool { + // Look for the branch statement within a BlockStmt or CaseClause. + switch n := n.(type) { + case *ast.BlockStmt: + for i, stmt := range n.List { + if stmt == br { + n.List[i] = ret + return false + } + } + case *ast.CaseClause: + for i, stmt := range n.Body { + if stmt.Pos() == br.Pos() { + n.Body[i] = ret + return false + } + } + } + return true + }) +} + +// freeBranches returns all branch statements beneath cur whose continuation +// lies outside the (start, end) range. +func freeBranches(info *types.Info, cur cursor.Cursor, start, end token.Pos) (free []*ast.BranchStmt) { +nextBranch: + for curBr := range cur.Preorder((*ast.BranchStmt)(nil)) { + br := curBr.Node().(*ast.BranchStmt) + if br.End() < start || br.Pos() > end { + continue + } + label, _ := info.Uses[br.Label].(*types.Label) + if label != nil && !(start <= label.Pos() && label.Pos() <= end) { + free = append(free, br) + continue + } + if br.Tok == token.BREAK || br.Tok == token.CONTINUE { + filter := []ast.Node{ + (*ast.ForStmt)(nil), + (*ast.RangeStmt)(nil), + (*ast.SwitchStmt)(nil), + (*ast.TypeSwitchStmt)(nil), + (*ast.SelectStmt)(nil), + } + // Find innermost relevant ancestor for break/continue. + for curAncestor := range curBr.Ancestors(filter...) { + if l, ok := curAncestor.Parent().Node().(*ast.LabeledStmt); ok && + label != nil && + l.Label.Name == label.Name() { + continue + } + switch n := curAncestor.Node().(type) { + case *ast.ForStmt, *ast.RangeStmt: + if n.Pos() < start { + free = append(free, br) + } + continue nextBranch + case *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.SelectStmt: + if br.Tok == token.BREAK { + if n.Pos() < start { + free = append(free, br) + } + continue nextBranch + } + } + } + } + } + return +} + +// isFreeBranchStmt returns true if the relevant ancestor for the branch +// statement at stack[len(stack)-1] cannot be found in the stack. This is used +// when we are examining the extracted block, since type information isn't +// available. We need to find the location of the label without using +// types.Info. +func isFreeBranchStmt(stack []ast.Node) bool { + switch node := stack[len(stack)-1].(type) { + case *ast.BranchStmt: + isLabeled := node.Label != nil + switch node.Tok { + case token.GOTO: + if isLabeled { + return !enclosingLabel(stack, node.Label.Name) + } + case token.BREAK, token.CONTINUE: + // Find innermost relevant ancestor for break/continue. + for i := len(stack) - 2; i >= 0; i-- { + n := stack[i] + if isLabeled { + l, ok := n.(*ast.LabeledStmt) + if !(ok && l.Label.Name == node.Label.Name) { + continue + } + } + switch n.(type) { + case *ast.ForStmt, *ast.RangeStmt, *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.SelectStmt: + return false + } + } + } + } + // We didn't find the relevant ancestor on the path, so this must be a free branch statement. + return true +} + +// enclosingLabel returns true if the given label is found on the stack. +func enclosingLabel(stack []ast.Node, label string) bool { + for _, n := range stack { + if labelStmt, ok := n.(*ast.LabeledStmt); ok && labelStmt.Label.Name == label { + return true + } + } + return false +} diff --git a/gopls/internal/test/marker/testdata/codeaction/extract_control.txt b/gopls/internal/test/marker/testdata/codeaction/extract_control.txt new file mode 100644 index 00000000000..844bc87b31d --- /dev/null +++ b/gopls/internal/test/marker/testdata/codeaction/extract_control.txt @@ -0,0 +1,259 @@ +This test verifies various behaviors of function extraction involving free control statements. + +-- go.mod -- +module mod.test/extract + +go 1.18 + +-- freecontrol.go -- +package extract + +//@codeaction(ifCondContinue, "refactor.extract.function", edit=freeControl1) +//@codeaction(ifCondGotoLabel, "refactor.extract.function", edit=freeControl2) +//@codeaction(ifCondGotoLabelWithLabel, "refactor.extract.function", edit=freeControl3) +//@codeaction(multipleCtrl, "refactor.extract.function", edit=freeControl4) +//@codeaction(multipleCtrlNotAllSelected, "refactor.extract.function", edit=freeControl5) +//@codeaction(ctrlVarExists, "refactor.extract.function", edit=freeControl6) +//@codeaction(twoReturns, "refactor.extract.function", edit=freeControl7) +//@codeaction(forWithLabel, "refactor.extract.function", edit=freeControl8) + +func FuncContinue(cond bool) { + for range "abc" { + if cond { //@ loc(ifCondContinue, re`(?s)if.*println.0.`) + continue + } + println(0) + } +} + +func FuncGoTo(cond bool) { + for range "abc" { + if cond { //@ loc(ifCondGotoLabel, re`(?s)if.*println.1.`), loc(ifCondGotoLabelWithLabel, re`(?s)if.*goto.label1....`) + goto label1 + } + label1: + println(1) + } +} + +func FuncMultipleCtrl(x int) { + for range "abc" { + if x < 10 { //@ loc(multipleCtrl, re`(?s)if.x.*return...next1....`), loc(multipleCtrlNotAllSelected, re`(?s)if.x.*break....`) + continue + } + if x > 2 { + break + } + if x == 1 { + return //next1 + } + } +} + +func FuncCtrlVarExists(x int) { + ctrl := "abc" + for range ctrl { + if x < 10 { //@ loc(ctrlVarExists, re`(?s)if.x.*continue...next2....`) + continue //next2 + } + } +} + +func FuncTwoReturns(x int) int { + outer: + for range "abc" { + if x < 10 { //@ loc(twoReturns, re`(?s)if.x.*return.1....`) + return 0 + } + test := x - 4 + if test > 2 { + continue + } + if test == 10 { + return 1 + } + + for range "def" { //@ loc(forWithLabel, re`(?s)for.*outer.........`) + if x < 2 { + continue + } + if x > 10 { + continue outer + } + } + } + return 0 +} +-- @freeControl1/freecontrol.go -- +@@ -14 +14,3 @@ +- if cond { //@ loc(ifCondContinue, re`(?s)if.*println.0.`) ++ ctrl := newFunction(cond) ++ switch ctrl { ++ case 1: +@@ -17 +19 @@ +- println(0) +@@ -21 +22,8 @@ ++func newFunction(cond bool) int { ++ if cond { //@ loc(ifCondContinue, re`(?s)if.*println.0.`) ++ return 1 ++ } ++ println(0) ++ return 0 ++} ++ +-- @freeControl2/freecontrol.go -- +@@ -23,5 +23 @@ +- if cond { //@ loc(ifCondGotoLabel, re`(?s)if.*println.1.`), loc(ifCondGotoLabelWithLabel, re`(?s)if.*goto.label1....`) +- goto label1 +- } +- label1: +- println(1) ++ newFunction(cond) +@@ -31 +27,8 @@ ++func newFunction(cond bool) { ++ if cond { //@ loc(ifCondGotoLabel, re`(?s)if.*println.1.`), loc(ifCondGotoLabelWithLabel, re`(?s)if.*goto.label1....`) ++ goto label1 ++ } ++label1: ++ println(1) ++} ++ +-- @freeControl3/freecontrol.go -- +@@ -23 +23,3 @@ +- if cond { //@ loc(ifCondGotoLabel, re`(?s)if.*println.1.`), loc(ifCondGotoLabelWithLabel, re`(?s)if.*goto.label1....`) ++ ctrl := newFunction(cond) ++ switch ctrl { ++ case 1: +@@ -31 +33,7 @@ ++func newFunction(cond bool) int { ++ if cond { //@ loc(ifCondGotoLabel, re`(?s)if.*println.1.`), loc(ifCondGotoLabelWithLabel, re`(?s)if.*goto.label1....`) ++ return 1 ++ } ++ return 0 ++} ++ +-- @freeControl4/freecontrol.go -- +@@ -33,2 +33,3 @@ +- if x < 10 { //@ loc(multipleCtrl, re`(?s)if.x.*return...next1....`), loc(multipleCtrlNotAllSelected, re`(?s)if.x.*break....`) +- continue ++ shouldReturn, ctrl := newFunction(x) ++ if shouldReturn { ++ return +@@ -36 +37,4 @@ +- if x > 2 { ++ switch ctrl { ++ case 1: ++ continue ++ case 2: +@@ -39,3 +43 @@ +- if x == 1 { +- return //next1 +- } +@@ -45 +46,14 @@ ++func newFunction(x int) (bool, int) { ++ if x < 10 { //@ loc(multipleCtrl, re`(?s)if.x.*return...next1....`), loc(multipleCtrlNotAllSelected, re`(?s)if.x.*break....`) ++ return false, 1 ++ } ++ if x > 2 { ++ return false, 2 ++ } ++ if x == 1 { ++ return true, //next1 ++ 0 ++ } ++ return false, 0 ++} ++ +-- @freeControl5/freecontrol.go -- +@@ -33 +33,3 @@ +- if x < 10 { //@ loc(multipleCtrl, re`(?s)if.x.*return...next1....`), loc(multipleCtrlNotAllSelected, re`(?s)if.x.*break....`) ++ ctrl := newFunction(x) ++ switch ctrl { ++ case 1: +@@ -35,2 +37 @@ +- } +- if x > 2 { ++ case 2: +@@ -45 +46,10 @@ ++func newFunction(x int) int { ++ if x < 10 { //@ loc(multipleCtrl, re`(?s)if.x.*return...next1....`), loc(multipleCtrlNotAllSelected, re`(?s)if.x.*break....`) ++ return 1 ++ } ++ if x > 2 { ++ return 2 ++ } ++ return 0 ++} ++ +-- @freeControl6/freecontrol.go -- +@@ -48,2 +48,4 @@ +- if x < 10 { //@ loc(ctrlVarExists, re`(?s)if.x.*continue...next2....`) +- continue //next2 ++ ctrl1 := newFunction(x) ++ switch ctrl1 { ++ case 1: ++ continue +@@ -54 +56,7 @@ ++func newFunction(x int) int { ++ if x < 10 { //@ loc(ctrlVarExists, re`(?s)if.x.*continue...next2....`) ++ return 1 //next2 ++ } ++ return 0 ++} ++ +-- @freeControl7/freecontrol.go -- +@@ -57,2 +57,3 @@ +- if x < 10 { //@ loc(twoReturns, re`(?s)if.x.*return.1....`) +- return 0 ++ shouldReturn, i, ctrl := newFunction(x) ++ if shouldReturn { ++ return i +@@ -60,2 +61,2 @@ +- test := x - 4 +- if test > 2 { ++ switch ctrl { ++ case 1: +@@ -64,3 +65 @@ +- if test == 10 { +- return 1 +- } +@@ -79 +77,14 @@ ++ ++func newFunction(x int) (bool, int, int) { ++ if x < 10 { //@ loc(twoReturns, re`(?s)if.x.*return.1....`) ++ return true, 0, 0 ++ } ++ test := x - 4 ++ if test > 2 { ++ return false, 0, 1 ++ } ++ if test == 10 { ++ return true, 1, 0 ++ } ++ return false, 0, 0 ++} +-- @freeControl8/freecontrol.go -- +@@ -68,5 +68,3 @@ +- for range "def" { //@ loc(forWithLabel, re`(?s)for.*outer.........`) +- if x < 2 { +- continue +- } +- if x > 10 { ++ ctrl := newFunction(x) ++ switch ctrl { ++ case 1: +@@ -74 +72 @@ +- } +@@ -79 +76,12 @@ ++ ++func newFunction(x int) int { ++ for range "def" { //@ loc(forWithLabel, re`(?s)for.*outer.........`) ++ if x < 2 { ++ continue ++ } ++ if x > 10 { ++ return 1 ++ } ++ } ++ return 0 ++} From ce1c5d5963f44e413084a90b6e9011d97ba36888 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Wed, 9 Apr 2025 18:22:39 -0400 Subject: [PATCH 198/270] gopls/internal/golang: CallHierarchy: show only *types.Func callees This CL changes the OutgoingCalls part of the call hierarchy to report only calls whose callee is a *types.Func: that is, a function or method, possibly abstract. It no longer shows dynamic calls on arbitrary values of func type. Using the new UsedIdent function also fixed a bug whereby explicitly instantiated generic functions/methods would be omitted; added a test. Also, sort the items by location, and make the marker test assertions ordered. + tests, doc update Fixes golang/go#68153 Change-Id: I0d80f5c9f99f5fab0a4b6653bc0922279ee4d09c Reviewed-on: https://go-review.googlesource.com/c/tools/+/663916 Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley Commit-Queue: Alan Donovan --- gopls/doc/features/navigation.md | 4 -- gopls/internal/golang/call_hierarchy.go | 55 +++++++++---------- gopls/internal/server/call_hierarchy.go | 21 ++++--- gopls/internal/test/marker/marker_test.go | 5 +- .../testdata/callhierarchy/callhierarchy.txt | 8 ++- 5 files changed, 44 insertions(+), 49 deletions(-) diff --git a/gopls/doc/features/navigation.md b/gopls/doc/features/navigation.md index 4f8a0f6fad7..11b40797cd4 100644 --- a/gopls/doc/features/navigation.md +++ b/gopls/doc/features/navigation.md @@ -296,10 +296,6 @@ of `fmt.Stringer` through the guts of `fmt.Sprint:` -Caveats: -- In some cases dynamic function calls are (erroneously) included in - the output; see golang/go#68153. - Client support: - **VS Code**: `Show Call Hierarchy` menu item (`⌥⇧H`) opens [Call hierarchy view](https://code.visualstudio.com/docs/cpp/cpp-ide#_call-hierarchy) (note: docs refer to C++ but the idea is the same for Go). - **Emacs + eglot**: Not standard; install with `(package-vc-install "https://github.com/dolmens/eglot-hierarchy")`. Use `M-x eglot-hierarchy-call-hierarchy` to show the direct incoming calls to the selected function; use a prefix argument (`C-u`) to show the direct outgoing calls. There is no way to expand the tree. diff --git a/gopls/internal/golang/call_hierarchy.go b/gopls/internal/golang/call_hierarchy.go index 04dc9deeb5d..b9f21cd18d7 100644 --- a/gopls/internal/golang/call_hierarchy.go +++ b/gopls/internal/golang/call_hierarchy.go @@ -14,13 +14,16 @@ import ( "path/filepath" "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/go/types/typeutil" "golang.org/x/tools/gopls/internal/cache" "golang.org/x/tools/gopls/internal/cache/parsego" "golang.org/x/tools/gopls/internal/file" "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/moremaps" "golang.org/x/tools/gopls/internal/util/safetoken" "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/typesinternal" ) // PrepareCallHierarchy returns an array of CallHierarchyItem for a file and the position within the file. @@ -99,7 +102,7 @@ func IncomingCalls(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle // Flatten the map of pointers into a slice of values. incomingCallItems := make([]protocol.CallHierarchyIncomingCall, 0, len(incomingCalls)) - for _, callItem := range incomingCalls { + for _, callItem := range moremaps.SortedFunc(incomingCalls, protocol.CompareLocation) { incomingCallItems = append(incomingCallItems, *callItem) } return incomingCallItems, nil @@ -247,30 +250,21 @@ func OutgoingCalls(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle type callRange struct { start, end token.Pos } - callRanges := []callRange{} - ast.Inspect(declNode, func(n ast.Node) bool { - if call, ok := n.(*ast.CallExpr); ok { - var start, end token.Pos - switch n := call.Fun.(type) { - case *ast.SelectorExpr: - start, end = n.Sel.NamePos, call.Lparen - case *ast.Ident: - start, end = n.NamePos, call.Lparen - case *ast.FuncLit: - // while we don't add the function literal as an 'outgoing' call - // we still want to traverse into it - return true - default: - // ignore any other kind of call expressions - // for ex: direct function literal calls since that's not an 'outgoing' call - return false - } - callRanges = append(callRanges, callRange{start: start, end: end}) + + // Find calls to known functions/methods, including interface methods. + var callRanges []callRange + for n := range ast.Preorder(declNode) { + if call, ok := n.(*ast.CallExpr); ok && + is[*types.Func](typeutil.Callee(pkg.TypesInfo(), call)) { + id := typesinternal.UsedIdent(pkg.TypesInfo(), call.Fun) + callRanges = append(callRanges, callRange{ + start: id.NamePos, + end: call.Lparen, + }) } - return true - }) + } - outgoingCalls := map[token.Pos]*protocol.CallHierarchyOutgoingCall{} + outgoingCalls := make(map[protocol.Location]*protocol.CallHierarchyOutgoingCall) for _, callRange := range callRanges { _, obj, _ := referencedObject(declPkg, declPGF, callRange.start) if obj == nil { @@ -280,12 +274,13 @@ func OutgoingCalls(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle continue // built-ins have no position } - outgoingCall, ok := outgoingCalls[obj.Pos()] + loc, err := mapPosition(ctx, declPkg.FileSet(), snapshot, obj.Pos(), obj.Pos()+token.Pos(len(obj.Name()))) + if err != nil { + return nil, err + } + + outgoingCall, ok := outgoingCalls[loc] if !ok { - loc, err := mapPosition(ctx, declPkg.FileSet(), snapshot, obj.Pos(), obj.Pos()+token.Pos(len(obj.Name()))) - if err != nil { - return nil, err - } outgoingCall = &protocol.CallHierarchyOutgoingCall{ To: protocol.CallHierarchyItem{ Name: obj.Name(), @@ -297,7 +292,7 @@ func OutgoingCalls(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle SelectionRange: loc.Range, }, } - outgoingCalls[obj.Pos()] = outgoingCall + outgoingCalls[loc] = outgoingCall } rng, err := declPGF.PosRange(callRange.start, callRange.end) @@ -308,7 +303,7 @@ func OutgoingCalls(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle } outgoingCallItems := make([]protocol.CallHierarchyOutgoingCall, 0, len(outgoingCalls)) - for _, callItem := range outgoingCalls { + for _, callItem := range moremaps.SortedFunc(outgoingCalls, protocol.CompareLocation) { outgoingCallItems = append(outgoingCallItems, *callItem) } return outgoingCallItems, nil diff --git a/gopls/internal/server/call_hierarchy.go b/gopls/internal/server/call_hierarchy.go index 758a4628948..1887767250c 100644 --- a/gopls/internal/server/call_hierarchy.go +++ b/gopls/internal/server/call_hierarchy.go @@ -22,10 +22,11 @@ func (s *server) PrepareCallHierarchy(ctx context.Context, params *protocol.Call return nil, err } defer release() - if snapshot.FileKind(fh) != file.Go { - return nil, nil // empty result + switch snapshot.FileKind(fh) { + case file.Go: + return golang.PrepareCallHierarchy(ctx, snapshot, fh, params.Position) } - return golang.PrepareCallHierarchy(ctx, snapshot, fh, params.Position) + return nil, nil // empty result } func (s *server) IncomingCalls(ctx context.Context, params *protocol.CallHierarchyIncomingCallsParams) ([]protocol.CallHierarchyIncomingCall, error) { @@ -37,10 +38,11 @@ func (s *server) IncomingCalls(ctx context.Context, params *protocol.CallHierarc return nil, err } defer release() - if snapshot.FileKind(fh) != file.Go { - return nil, nil // empty result + switch snapshot.FileKind(fh) { + case file.Go: + return golang.IncomingCalls(ctx, snapshot, fh, params.Item.Range.Start) } - return golang.IncomingCalls(ctx, snapshot, fh, params.Item.Range.Start) + return nil, nil // empty result } func (s *server) OutgoingCalls(ctx context.Context, params *protocol.CallHierarchyOutgoingCallsParams) ([]protocol.CallHierarchyOutgoingCall, error) { @@ -52,8 +54,9 @@ func (s *server) OutgoingCalls(ctx context.Context, params *protocol.CallHierarc return nil, err } defer release() - if snapshot.FileKind(fh) != file.Go { - return nil, nil // empty result + switch snapshot.FileKind(fh) { + case file.Go: + return golang.OutgoingCalls(ctx, snapshot, fh, params.Item.Range.Start) } - return golang.OutgoingCalls(ctx, snapshot, fh, params.Item.Range.Start) + return nil, nil // empty result } diff --git a/gopls/internal/test/marker/marker_test.go b/gopls/internal/test/marker/marker_test.go index c25eb3150a4..d5194e39952 100644 --- a/gopls/internal/test/marker/marker_test.go +++ b/gopls/internal/test/marker/marker_test.go @@ -2456,11 +2456,8 @@ func callHierarchy(mark marker, src protocol.Location, getCalls callHierarchyFun return } if calls == nil { - calls = []protocol.Location{} + calls = []protocol.Location{} // non-nil; cmp.Diff cares } - // TODO(rfindley): why aren't call hierarchy results stable? - slices.SortFunc(want, protocol.CompareLocation) - slices.SortFunc(calls, protocol.CompareLocation) if d := cmp.Diff(want, calls); d != "" { mark.errorf("call hierarchy: unexpected results (-want +got):\n%s", d) } diff --git a/gopls/internal/test/marker/testdata/callhierarchy/callhierarchy.txt b/gopls/internal/test/marker/testdata/callhierarchy/callhierarchy.txt index 43fbdd68281..b5f4f1d23ad 100644 --- a/gopls/internal/test/marker/testdata/callhierarchy/callhierarchy.txt +++ b/gopls/internal/test/marker/testdata/callhierarchy/callhierarchy.txt @@ -48,12 +48,12 @@ func C() { //@loc(hC, "C") var x = func() { D() } //@loc(hX, "x"),loc(hXGlobal, "x") // D is exported to test incoming/outgoing calls across packages -func D() { //@loc(hD, "D"),incomingcalls(hD, hA, hB, hC, hXGlobal, incomingA),outgoingcalls(hD, hE, hF, hG, hX, outgoingB, hFoo, hH, hI, hJ, hK) +func D() { //@ loc(hD, "D"), incomingcalls(hD, hA, hB, hC, hXGlobal, incomingA), outgoingcalls(hD, hE, hF, hG, hH, hI, Generic, outgoingB) e() x() F() outgoing.B() - foo := func() {} //@loc(hFoo, "foo"),incomingcalls(hFoo, hD),outgoingcalls(hFoo) + foo := func() {} //@ loc(hFoo, "foo"), incomingcalls(hFoo, hD), outgoingcalls(hFoo) foo() func() { @@ -67,6 +67,8 @@ func D() { //@loc(hD, "D"),incomingcalls(hD, hA, hB, hC, hXGlobal, incomingA),ou s := Struct{} s.J() s.K() + + Generic[string]() } func e() {} //@loc(hE, "e") @@ -90,3 +92,5 @@ type Struct struct { J func() //@loc(hJ, "J") K func() //@loc(hK, "K") } + +func Generic[T any]() //@loc(Generic, "Generic") From b68d703f1b954038efbdd360b894fc1405f4f9bf Mon Sep 17 00:00:00 2001 From: Jake Bailey Date: Wed, 9 Apr 2025 12:43:07 -0700 Subject: [PATCH 199/270] gopls/internal/cache: handle VS Code Insiders in watchSubdirs I noticed that VS Code was hardcoded here based on its client name. Insiders has its own client name, so wasn't receiving the same treatment. I don't belive vscode-go sets this option yet, so I am thinking gopls is currently doing the wrong thing for Insiders. Change-Id: Ia235a869d29316d8cab0dd5ed9bf35d87a71043b Reviewed-on: https://go-review.googlesource.com/c/tools/+/664255 Auto-Submit: Robert Findley Reviewed-by: Robert Findley LUCI-TryBot-Result: Go LUCI Reviewed-by: Michael Pratt --- gopls/internal/cache/snapshot.go | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/gopls/internal/cache/snapshot.go b/gopls/internal/cache/snapshot.go index 81cfafc1470..f936bbfc458 100644 --- a/gopls/internal/cache/snapshot.go +++ b/gopls/internal/cache/snapshot.go @@ -916,10 +916,12 @@ func (s *Snapshot) watchSubdirs() bool { // requirements that client names do not change. We should update the VS // Code extension to set a default value of "subdirWatchPatterns" to "on", // so that this workaround is only temporary. - if s.Options().ClientInfo.Name == "Visual Studio Code" { + switch s.Options().ClientInfo.Name { + case "Visual Studio Code", "Visual Studio Code - Insiders": return true + default: + return false } - return false default: bug.Reportf("invalid subdirWatchPatterns: %q", p) return false From 808c8703b46d8600073098605cd801da2aea79f2 Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Fri, 11 Apr 2025 21:03:11 -0600 Subject: [PATCH 200/270] gopls/internal/cache: more test cases for syntax repairing This CL adds unit tests for: - fix defer stmt. - fix init stmt. - fix phantom selector Updates: golang/go#64335 Change-Id: I8ac7c4fc30627b0e541e9f2769de7e9f057d2995 Reviewed-on: https://go-review.googlesource.com/c/tools/+/664876 LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley Reviewed-by: Michael Pratt Auto-Submit: Robert Findley --- gopls/internal/cache/parsego/parse_test.go | 304 ++++++++++++++++++--- internal/analysisinternal/analysis.go | 6 +- 2 files changed, 272 insertions(+), 38 deletions(-) diff --git a/gopls/internal/cache/parsego/parse_test.go b/gopls/internal/cache/parsego/parse_test.go index 84a344cab52..db78b596042 100644 --- a/gopls/internal/cache/parsego/parse_test.go +++ b/gopls/internal/cache/parsego/parse_test.go @@ -8,7 +8,9 @@ import ( "context" "fmt" "go/ast" + "go/parser" "go/token" + "reflect" "slices" "testing" @@ -48,81 +50,313 @@ func _() { }) } -func TestFixGoAndDefer_GoStmt(t *testing.T) { +func TestFixGoAndDefer(t *testing.T) { var testCases = []struct { source string fixes []parsego.FixType wantFix string }{ - {source: "g", fixes: nil}, - {source: "go", fixes: nil}, - {source: "go a.b(", fixes: nil}, - {source: "go a.b()", fixes: nil}, - {source: "go func {", fixes: nil}, + {source: "", fixes: nil}, // keyword alone + {source: "a.b(", fixes: nil}, + {source: "a.b()", fixes: nil}, + {source: "func {", fixes: nil}, { - source: "go f", + source: "f", fixes: []parsego.FixType{parsego.FixedDeferOrGo}, - wantFix: "go f()", + wantFix: "f()", }, { - source: "go func", + source: "func", fixes: []parsego.FixType{parsego.FixedDeferOrGo}, - wantFix: "go (func())()", + wantFix: "(func())()", }, { - source: "go func {}", + source: "func {}", fixes: []parsego.FixType{parsego.FixedDeferOrGo}, - wantFix: "go (func())()", + wantFix: "(func())()", }, { - source: "go func {}(", + source: "func {}(", fixes: []parsego.FixType{parsego.FixedDeferOrGo}, - wantFix: "go (func())()", + wantFix: "(func())()", }, { - source: "go func {}()", + source: "func {}()", fixes: []parsego.FixType{parsego.FixedDeferOrGo}, - wantFix: "go (func())()", + wantFix: "(func())()", }, { - source: "go a.", + source: "a.", fixes: []parsego.FixType{parsego.FixedDeferOrGo, parsego.FixedDanglingSelector, parsego.FixedDeferOrGo}, - wantFix: "go a._()", + wantFix: "a._()", }, { - source: "go a.b", + source: "a.b", fixes: []parsego.FixType{parsego.FixedDeferOrGo}, - wantFix: "go a.b()", + wantFix: "a.b()", }, } + for _, keyword := range []string{"go", "defer"} { + for _, tc := range testCases { + source := fmt.Sprintf("%s %s", keyword, tc.source) + t.Run(source, func(t *testing.T) { + src := filesrc(source) + pgf, fixes := parsego.Parse(context.Background(), token.NewFileSet(), "file://foo.go", src, parsego.Full, false) + if !slices.Equal(fixes, tc.fixes) { + t.Fatalf("got %v want %v", fixes, tc.fixes) + } + if tc.fixes == nil { + return + } + + fset := tokeninternal.FileSetFor(pgf.Tok) + inspect(t, pgf, func(stmt ast.Stmt) { + var call *ast.CallExpr + switch stmt := stmt.(type) { + case *ast.DeferStmt: + call = stmt.Call + case *ast.GoStmt: + call = stmt.Call + default: + return + } + + if got := analysisinternal.Format(fset, call); got != tc.wantFix { + t.Fatalf("got %v want %v", got, tc.wantFix) + } + }) + }) + } + } +} + +// TestFixInit tests the init stmt after if/for/switch which is put under cond after parsing +// will be fixed and moved to Init. +func TestFixInit(t *testing.T) { + var testCases = []struct { + name string + source string + fixes []parsego.FixType + wantInitFix string + }{ + { + name: "simple define", + source: "i := 0", + fixes: []parsego.FixType{parsego.FixedInit}, + wantInitFix: "i := 0", + }, + { + name: "simple assign", + source: "i = 0", + fixes: []parsego.FixType{parsego.FixedInit}, + wantInitFix: "i = 0", + }, + { + name: "define with function call", + source: "i := f()", + fixes: []parsego.FixType{parsego.FixedInit}, + wantInitFix: "i := f()", + }, + { + name: "assign with function call", + source: "i = f()", + fixes: []parsego.FixType{parsego.FixedInit}, + wantInitFix: "i = f()", + }, + { + name: "assign with receiving chan", + source: "i = <-ch", + fixes: []parsego.FixType{parsego.FixedInit}, + wantInitFix: "i = <-ch", + }, + + // fixInitStmt won't fix the following cases. + { + name: "call in if", + source: `fmt.Println("helloworld")`, + fixes: nil, + }, + { + name: "receive chan", + source: `<- ch`, + fixes: nil, + }, + } + + // currently, switch will leave its Tag empty after fix because it allows empty, + // and if and for will leave an underscore in Cond. + getWantCond := func(keyword string) string { + if keyword == "switch" { + return "" + } + return "_" + } + + for _, keyword := range []string{"if", "for", "switch"} { + for _, tc := range testCases { + caseName := fmt.Sprintf("%s %s", keyword, tc.name) + t.Run(caseName, func(t *testing.T) { + // the init stmt is treated as a cond. + src := filesrc(fmt.Sprintf("%s %s {}", keyword, tc.source)) + pgf, fixes := parsego.Parse(context.Background(), token.NewFileSet(), "file://foo.go", src, parsego.Full, false) + if !slices.Equal(fixes, tc.fixes) { + t.Fatalf("TestFixArrayType(): got %v want %v", fixes, tc.fixes) + } + if tc.fixes == nil { + return + } + + // ensure the init stmt is parsed to a BadExpr. + ensureSource(t, src, func(bad *ast.BadExpr) {}) + + info := func(n ast.Node, wantStmt string) (init ast.Stmt, cond ast.Expr, has bool) { + switch wantStmt { + case "if": + if e, ok := n.(*ast.IfStmt); ok { + return e.Init, e.Cond, true + } + case "switch": + if e, ok := n.(*ast.SwitchStmt); ok { + return e.Init, e.Tag, true + } + case "for": + if e, ok := n.(*ast.ForStmt); ok { + return e.Init, e.Cond, true + } + } + return nil, nil, false + } + fset := tokeninternal.FileSetFor(pgf.Tok) + inspect(t, pgf, func(n ast.Stmt) { + if init, cond, ok := info(n, keyword); ok { + if got := analysisinternal.Format(fset, init); got != tc.wantInitFix { + t.Fatalf("%s: Init got %v want %v", tc.source, got, tc.wantInitFix) + } + + wantCond := getWantCond(keyword) + if got := analysisinternal.Format(fset, cond); got != wantCond { + t.Fatalf("%s: Cond got %v want %v", tc.source, got, wantCond) + } + } + }) + }) + } + } +} + +func TestFixPhantomSelector(t *testing.T) { + wantFixes := []parsego.FixType{parsego.FixedPhantomSelector} + var testCases = []struct { + source string + fixes []parsego.FixType + }{ + {source: "a.break", fixes: wantFixes}, + {source: "_.break", fixes: wantFixes}, + {source: "a.case", fixes: wantFixes}, + {source: "a.chan", fixes: wantFixes}, + {source: "a.const", fixes: wantFixes}, + {source: "a.continue", fixes: wantFixes}, + {source: "a.default", fixes: wantFixes}, + {source: "a.defer", fixes: wantFixes}, + {source: "a.else", fixes: wantFixes}, + {source: "a.fallthrough", fixes: wantFixes}, + {source: "a.for", fixes: wantFixes}, + {source: "a.func", fixes: wantFixes}, + {source: "a.go", fixes: wantFixes}, + {source: "a.goto", fixes: wantFixes}, + {source: "a.if", fixes: wantFixes}, + {source: "a.import", fixes: wantFixes}, + {source: "a.interface", fixes: wantFixes}, + {source: "a.map", fixes: wantFixes}, + {source: "a.package", fixes: wantFixes}, + {source: "a.range", fixes: wantFixes}, + {source: "a.return", fixes: wantFixes}, + {source: "a.select", fixes: wantFixes}, + {source: "a.struct", fixes: wantFixes}, + {source: "a.switch", fixes: wantFixes}, + {source: "a.type", fixes: wantFixes}, + {source: "a.var", fixes: wantFixes}, + + {source: "break.break"}, + {source: "a.BREAK"}, + {source: "a.break_"}, + {source: "a.breaka"}, + } + for _, tc := range testCases { t.Run(tc.source, func(t *testing.T) { src := filesrc(tc.source) pgf, fixes := parsego.Parse(context.Background(), token.NewFileSet(), "file://foo.go", src, parsego.Full, false) if !slices.Equal(fixes, tc.fixes) { - t.Fatalf("TestFixGoAndDefer_GoStmt(): got %v want %v", fixes, tc.fixes) + t.Fatalf("got %v want %v", fixes, tc.fixes) + } + + // some fixes don't fit the fix scenario, but we want to confirm it. + if fixes == nil { + return } + + // ensure the selector has been converted to underscore by parser. + ensureSource(t, src, func(sel *ast.SelectorExpr) { + if sel.Sel.Name != "_" { + t.Errorf("%s: the input doesn't cause a blank selector after parser", tc.source) + } + }) + fset := tokeninternal.FileSetFor(pgf.Tok) - check := func(n ast.Node) bool { - if n != nil { - posn := safetoken.StartPosition(fset, n.Pos()) - if !posn.IsValid() { - t.Fatalf("invalid position for %T (%v): %v not in [%d, %d]", n, n, n.Pos(), pgf.Tok.Base(), pgf.Tok.Base()+pgf.Tok.Size()) - } - if deferStmt, ok := n.(*ast.GoStmt); ok && tc.fixes != nil { - if got, want := fmt.Sprintf("go %s", analysisinternal.Format(fset, deferStmt.Call)), tc.wantFix; got != want { - t.Fatalf("TestFixGoAndDefer_GoStmt(): got %v want %v", got, want) - } - } + inspect(t, pgf, func(sel *ast.SelectorExpr) { + // the fix should restore the selector as is. + if got, want := fmt.Sprintf("%s", analysisinternal.Format(fset, sel)), tc.source; got != want { + t.Fatalf("got %v want %v", got, want) } - return true - } - ast.Inspect(pgf.File, check) + }) }) } } +// inspect helps to go through each node of pgf and trigger checkFn if the type matches T. +func inspect[T ast.Node](t *testing.T, pgf *parsego.File, checkFn func(n T)) { + fset := tokeninternal.FileSetFor(pgf.Tok) + var visited bool + ast.Inspect(pgf.File, func(node ast.Node) bool { + if node != nil { + posn := safetoken.StartPosition(fset, node.Pos()) + if !posn.IsValid() { + t.Fatalf("invalid position for %T (%v): %v not in [%d, %d]", node, node, node.Pos(), pgf.Tok.Base(), pgf.Tok.Base()+pgf.Tok.Size()) + } + if n, ok := node.(T); ok { + visited = true + checkFn(n) + } + } + return true + }) + if !visited { + var n T + t.Fatalf("got no %s node but want at least one", reflect.TypeOf(n)) + } +} + +// ensureSource helps to parse src into an ast.File by go/parser and trigger checkFn if the type matches T. +func ensureSource[T ast.Node](t *testing.T, src []byte, checkFn func(n T)) { + // tolerate error as usually the src is problematic. + originFile, _ := parser.ParseFile(token.NewFileSet(), "file://foo.go", src, parsego.Full) + var visited bool + ast.Inspect(originFile, func(node ast.Node) bool { + if n, ok := node.(T); ok { + visited = true + checkFn(n) + } + return true + }) + + if !visited { + var n T + t.Fatalf("got no %s node but want at least one", reflect.TypeOf(n)) + } +} + func filesrc(expressions string) []byte { const srcTmpl = `package foo diff --git a/internal/analysisinternal/analysis.go b/internal/analysisinternal/analysis.go index b22e314cf45..cd2595a3dd1 100644 --- a/internal/analysisinternal/analysis.go +++ b/internal/analysisinternal/analysis.go @@ -305,10 +305,10 @@ func FreshName(scope *types.Scope, pos token.Pos, preferred string) string { return newName } -// Format returns a string representation of the expression e. -func Format(fset *token.FileSet, e ast.Expr) string { +// Format returns a string representation of the node n. +func Format(fset *token.FileSet, n ast.Node) string { var buf strings.Builder - printer.Fprint(&buf, fset, e) // ignore errors + printer.Fprint(&buf, fset, n) // ignore errors return buf.String() } From 26ef4d1e7e58703248c9a528006b80acced411a6 Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Tue, 15 Apr 2025 13:32:50 +0000 Subject: [PATCH 201/270] gopls/internal/golang: fix broken build due to semantic merge conflict Update Cursor.Ancestors()->Cursor.Parent().Enclosing(). CL 660775 renamed Ancestors to Enclosing and made it reflexive. The additional call to Parent leaves the logic equivalent. Change-Id: I792ac981ae3191f6bc26696c50f5990434e39d15 Reviewed-on: https://go-review.googlesource.com/c/tools/+/665635 Auto-Submit: Robert Findley LUCI-TryBot-Result: Go LUCI Reviewed-by: Michael Pratt --- gopls/internal/golang/extract.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gopls/internal/golang/extract.go b/gopls/internal/golang/extract.go index 18a64dac44b..322b1169fd6 100644 --- a/gopls/internal/golang/extract.go +++ b/gopls/internal/golang/extract.go @@ -2045,7 +2045,7 @@ nextBranch: (*ast.SelectStmt)(nil), } // Find innermost relevant ancestor for break/continue. - for curAncestor := range curBr.Ancestors(filter...) { + for curAncestor := range curBr.Parent().Enclosing(filter...) { if l, ok := curAncestor.Parent().Node().(*ast.LabeledStmt); ok && label != nil && l.Label.Name == label.Name() { From 20a69d61b6910e3c19bd73c5a13feab085c9ffdf Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Tue, 15 Apr 2025 13:48:09 -0400 Subject: [PATCH 202/270] gopls: warn against using go get -tool with gopls The problem is that gopls needs exactly the right version of x/tools, as found on its release branch. The solution to an MVS constraint system which requires x/tools at higher versions will inevitably lead to build failures. (We should move modernize to x/tools.) Updates golang/go#73279 Change-Id: I6f43bb2e74b29384cedcb92b5d958cf695aa529d Reviewed-on: https://go-review.googlesource.com/c/tools/+/665636 Reviewed-by: Robert Findley Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI --- gopls/doc/analyzers.md | 2 ++ gopls/internal/analysis/modernize/doc.go | 2 ++ gopls/internal/doc/api.json | 4 ++-- internal/gofix/doc.go | 2 ++ 4 files changed, 8 insertions(+), 2 deletions(-) diff --git a/gopls/doc/analyzers.md b/gopls/doc/analyzers.md index 4b2bff1a63a..abb1227a59b 100644 --- a/gopls/doc/analyzers.md +++ b/gopls/doc/analyzers.md @@ -492,6 +492,8 @@ following command: $ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./... +(Do not use "go get -tool" to add gopls as a dependency of your module; it will not work.) + If the tool warns of conflicting fixes, you may need to run it more than once until it has applied all fixes cleanly. This command is not an officially supported interface and may change in the future. diff --git a/gopls/internal/analysis/modernize/doc.go b/gopls/internal/analysis/modernize/doc.go index aa052540832..1d64cc7dc7c 100644 --- a/gopls/internal/analysis/modernize/doc.go +++ b/gopls/internal/analysis/modernize/doc.go @@ -25,6 +25,8 @@ // // $ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./... // +// (Do not use "go get -tool" to add gopls as a dependency of your module; it will not work.) +// // If the tool warns of conflicting fixes, you may need to run it more // than once until it has applied all fixes cleanly. This command is // not an officially supported interface and may change in the future. diff --git a/gopls/internal/doc/api.json b/gopls/internal/doc/api.json index 0852870ba41..c4e99473edb 100644 --- a/gopls/internal/doc/api.json +++ b/gopls/internal/doc/api.json @@ -562,7 +562,7 @@ }, { "Name": "\"modernize\"", - "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go and its standard\nlibrary.\n\nEach diagnostic provides a fix. Our intent is that these fixes may\nbe safely applied en masse without changing the behavior of your\nprogram. In some cases the suggested fixes are imperfect and may\nlead to (for example) unused imports or unused local variables,\ncausing build breakage. However, these problems are generally\ntrivial to fix. We regard any modernizer whose fix changes program\nbehavior to have a serious bug and will endeavor to fix it.\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./...\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.\n\nChanges produced by this tool should be reviewed as usual before\nbeing merged. In some cases, a loop may be replaced by a simple\nfunction call, causing comments within the loop to be discarded.\nHuman judgment may be required to avoid losing comments of value.\n\nEach diagnostic reported by modernize has a specific category. (The\ncategories are listed below.) Diagnostics in some categories, such\nas \"efaceany\" (which replaces \"interface{}\" with \"any\" where it is\nsafe to do so) are particularly numerous. It may ease the burden of\ncode review to apply fixes in two passes, the first change\nconsisting only of fixes of category \"efaceany\", the second\nconsisting of all others. This can be achieved using the -category flag:\n\n\t$ modernize -category=efaceany -fix -test ./...\n\t$ modernize -category=-efaceany -fix -test ./...\n\nCategories of modernize diagnostic:\n\n - forvar: remove x := x variable declarations made unnecessary by the new semantics of loops in go1.22.\n\n - slicescontains: replace 'for i, elem := range s { if elem == needle { ...; break }'\n by a call to slices.Contains, added in go1.21.\n\n - minmax: replace an if/else conditional assignment by a call to\n the built-in min or max functions added in go1.21.\n\n - sortslice: replace sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21.\n\n - efaceany: replace interface{} by the 'any' type added in go1.18.\n\n - slicesclone: replace append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21.\n\n - mapsloop: replace a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions from\n the maps package, added in go1.21.\n\n - fmtappendf: replace []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19.\n\n - testingcontext: replace uses of context.WithCancel in tests\n with t.Context, added in go1.24.\n\n - omitzero: replace omitempty by omitzero on structs, added in go1.24.\n\n - bloop: replace \"for i := range b.N\" or \"for range b.N\" in a\n benchmark with \"for b.Loop()\", and remove any preceding calls\n to b.StopTimer, b.StartTimer, and b.ResetTimer.\n\n - slicesdelete: replace append(s[:i], s[i+1]...) by\n slices.Delete(s, i, i+1), added in go1.21.\n\n - rangeint: replace a 3-clause \"for i := 0; i \u003c n; i++\" loop by\n \"for i := range n\", added in go1.22.\n\n - stringsseq: replace Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq.\n\n - stringscutprefix: replace some uses of HasPrefix followed by TrimPrefix with CutPrefix,\n added to the strings package in go1.20.\n\n - waitgroup: replace old complex usages of sync.WaitGroup by less complex WaitGroup.Go method in go1.25.", + "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go and its standard\nlibrary.\n\nEach diagnostic provides a fix. Our intent is that these fixes may\nbe safely applied en masse without changing the behavior of your\nprogram. In some cases the suggested fixes are imperfect and may\nlead to (for example) unused imports or unused local variables,\ncausing build breakage. However, these problems are generally\ntrivial to fix. We regard any modernizer whose fix changes program\nbehavior to have a serious bug and will endeavor to fix it.\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./...\n\n(Do not use \"go get -tool\" to add gopls as a dependency of your module; it will not work.)\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.\n\nChanges produced by this tool should be reviewed as usual before\nbeing merged. In some cases, a loop may be replaced by a simple\nfunction call, causing comments within the loop to be discarded.\nHuman judgment may be required to avoid losing comments of value.\n\nEach diagnostic reported by modernize has a specific category. (The\ncategories are listed below.) Diagnostics in some categories, such\nas \"efaceany\" (which replaces \"interface{}\" with \"any\" where it is\nsafe to do so) are particularly numerous. It may ease the burden of\ncode review to apply fixes in two passes, the first change\nconsisting only of fixes of category \"efaceany\", the second\nconsisting of all others. This can be achieved using the -category flag:\n\n\t$ modernize -category=efaceany -fix -test ./...\n\t$ modernize -category=-efaceany -fix -test ./...\n\nCategories of modernize diagnostic:\n\n - forvar: remove x := x variable declarations made unnecessary by the new semantics of loops in go1.22.\n\n - slicescontains: replace 'for i, elem := range s { if elem == needle { ...; break }'\n by a call to slices.Contains, added in go1.21.\n\n - minmax: replace an if/else conditional assignment by a call to\n the built-in min or max functions added in go1.21.\n\n - sortslice: replace sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21.\n\n - efaceany: replace interface{} by the 'any' type added in go1.18.\n\n - slicesclone: replace append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21.\n\n - mapsloop: replace a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions from\n the maps package, added in go1.21.\n\n - fmtappendf: replace []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19.\n\n - testingcontext: replace uses of context.WithCancel in tests\n with t.Context, added in go1.24.\n\n - omitzero: replace omitempty by omitzero on structs, added in go1.24.\n\n - bloop: replace \"for i := range b.N\" or \"for range b.N\" in a\n benchmark with \"for b.Loop()\", and remove any preceding calls\n to b.StopTimer, b.StartTimer, and b.ResetTimer.\n\n - slicesdelete: replace append(s[:i], s[i+1]...) by\n slices.Delete(s, i, i+1), added in go1.21.\n\n - rangeint: replace a 3-clause \"for i := 0; i \u003c n; i++\" loop by\n \"for i := range n\", added in go1.22.\n\n - stringsseq: replace Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq.\n\n - stringscutprefix: replace some uses of HasPrefix followed by TrimPrefix with CutPrefix,\n added to the strings package in go1.20.\n\n - waitgroup: replace old complex usages of sync.WaitGroup by less complex WaitGroup.Go method in go1.25.", "Default": "true", "Status": "" }, @@ -1338,7 +1338,7 @@ }, { "Name": "modernize", - "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go and its standard\nlibrary.\n\nEach diagnostic provides a fix. Our intent is that these fixes may\nbe safely applied en masse without changing the behavior of your\nprogram. In some cases the suggested fixes are imperfect and may\nlead to (for example) unused imports or unused local variables,\ncausing build breakage. However, these problems are generally\ntrivial to fix. We regard any modernizer whose fix changes program\nbehavior to have a serious bug and will endeavor to fix it.\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./...\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.\n\nChanges produced by this tool should be reviewed as usual before\nbeing merged. In some cases, a loop may be replaced by a simple\nfunction call, causing comments within the loop to be discarded.\nHuman judgment may be required to avoid losing comments of value.\n\nEach diagnostic reported by modernize has a specific category. (The\ncategories are listed below.) Diagnostics in some categories, such\nas \"efaceany\" (which replaces \"interface{}\" with \"any\" where it is\nsafe to do so) are particularly numerous. It may ease the burden of\ncode review to apply fixes in two passes, the first change\nconsisting only of fixes of category \"efaceany\", the second\nconsisting of all others. This can be achieved using the -category flag:\n\n\t$ modernize -category=efaceany -fix -test ./...\n\t$ modernize -category=-efaceany -fix -test ./...\n\nCategories of modernize diagnostic:\n\n - forvar: remove x := x variable declarations made unnecessary by the new semantics of loops in go1.22.\n\n - slicescontains: replace 'for i, elem := range s { if elem == needle { ...; break }'\n by a call to slices.Contains, added in go1.21.\n\n - minmax: replace an if/else conditional assignment by a call to\n the built-in min or max functions added in go1.21.\n\n - sortslice: replace sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21.\n\n - efaceany: replace interface{} by the 'any' type added in go1.18.\n\n - slicesclone: replace append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21.\n\n - mapsloop: replace a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions from\n the maps package, added in go1.21.\n\n - fmtappendf: replace []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19.\n\n - testingcontext: replace uses of context.WithCancel in tests\n with t.Context, added in go1.24.\n\n - omitzero: replace omitempty by omitzero on structs, added in go1.24.\n\n - bloop: replace \"for i := range b.N\" or \"for range b.N\" in a\n benchmark with \"for b.Loop()\", and remove any preceding calls\n to b.StopTimer, b.StartTimer, and b.ResetTimer.\n\n - slicesdelete: replace append(s[:i], s[i+1]...) by\n slices.Delete(s, i, i+1), added in go1.21.\n\n - rangeint: replace a 3-clause \"for i := 0; i \u003c n; i++\" loop by\n \"for i := range n\", added in go1.22.\n\n - stringsseq: replace Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq.\n\n - stringscutprefix: replace some uses of HasPrefix followed by TrimPrefix with CutPrefix,\n added to the strings package in go1.20.\n\n - waitgroup: replace old complex usages of sync.WaitGroup by less complex WaitGroup.Go method in go1.25.", + "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go and its standard\nlibrary.\n\nEach diagnostic provides a fix. Our intent is that these fixes may\nbe safely applied en masse without changing the behavior of your\nprogram. In some cases the suggested fixes are imperfect and may\nlead to (for example) unused imports or unused local variables,\ncausing build breakage. However, these problems are generally\ntrivial to fix. We regard any modernizer whose fix changes program\nbehavior to have a serious bug and will endeavor to fix it.\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./...\n\n(Do not use \"go get -tool\" to add gopls as a dependency of your module; it will not work.)\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.\n\nChanges produced by this tool should be reviewed as usual before\nbeing merged. In some cases, a loop may be replaced by a simple\nfunction call, causing comments within the loop to be discarded.\nHuman judgment may be required to avoid losing comments of value.\n\nEach diagnostic reported by modernize has a specific category. (The\ncategories are listed below.) Diagnostics in some categories, such\nas \"efaceany\" (which replaces \"interface{}\" with \"any\" where it is\nsafe to do so) are particularly numerous. It may ease the burden of\ncode review to apply fixes in two passes, the first change\nconsisting only of fixes of category \"efaceany\", the second\nconsisting of all others. This can be achieved using the -category flag:\n\n\t$ modernize -category=efaceany -fix -test ./...\n\t$ modernize -category=-efaceany -fix -test ./...\n\nCategories of modernize diagnostic:\n\n - forvar: remove x := x variable declarations made unnecessary by the new semantics of loops in go1.22.\n\n - slicescontains: replace 'for i, elem := range s { if elem == needle { ...; break }'\n by a call to slices.Contains, added in go1.21.\n\n - minmax: replace an if/else conditional assignment by a call to\n the built-in min or max functions added in go1.21.\n\n - sortslice: replace sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21.\n\n - efaceany: replace interface{} by the 'any' type added in go1.18.\n\n - slicesclone: replace append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21.\n\n - mapsloop: replace a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions from\n the maps package, added in go1.21.\n\n - fmtappendf: replace []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19.\n\n - testingcontext: replace uses of context.WithCancel in tests\n with t.Context, added in go1.24.\n\n - omitzero: replace omitempty by omitzero on structs, added in go1.24.\n\n - bloop: replace \"for i := range b.N\" or \"for range b.N\" in a\n benchmark with \"for b.Loop()\", and remove any preceding calls\n to b.StopTimer, b.StartTimer, and b.ResetTimer.\n\n - slicesdelete: replace append(s[:i], s[i+1]...) by\n slices.Delete(s, i, i+1), added in go1.21.\n\n - rangeint: replace a 3-clause \"for i := 0; i \u003c n; i++\" loop by\n \"for i := range n\", added in go1.22.\n\n - stringsseq: replace Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq.\n\n - stringscutprefix: replace some uses of HasPrefix followed by TrimPrefix with CutPrefix,\n added to the strings package in go1.20.\n\n - waitgroup: replace old complex usages of sync.WaitGroup by less complex WaitGroup.Go method in go1.25.", "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/modernize", "Default": true }, diff --git a/internal/gofix/doc.go b/internal/gofix/doc.go index 15de4f28b27..8f5f4b9dc46 100644 --- a/internal/gofix/doc.go +++ b/internal/gofix/doc.go @@ -83,6 +83,8 @@ You can use this (officially unsupported) command to apply gofix fixes en masse: $ go run golang.org/x/tools/gopls/internal/analysis/gofix/cmd/gofix@latest -test ./... +(Do not use "go get -tool" to add gopls as a dependency of your module; it will not work.) + # Analyzer gofixdirective gofixdirective: validate uses of gofix comment directives From 1494dfe09eb07afa583b7c039b1e96f63b92d76d Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Tue, 15 Apr 2025 14:30:18 -0400 Subject: [PATCH 203/270] gopls: improve warning against using go get -tool with gopls (Belatedly addresses review comments on CL 665636.) Updates golang/go#73279 Change-Id: I03c2e5e2306f7c6de349e7d11ba87df6332622f5 Reviewed-on: https://go-review.googlesource.com/c/tools/+/665637 LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley Auto-Submit: Alan Donovan --- gopls/doc/analyzers.md | 3 ++- gopls/internal/analysis/modernize/doc.go | 3 ++- gopls/internal/doc/api.json | 4 ++-- internal/gofix/doc.go | 3 ++- 4 files changed, 8 insertions(+), 5 deletions(-) diff --git a/gopls/doc/analyzers.md b/gopls/doc/analyzers.md index abb1227a59b..6e9ee81058c 100644 --- a/gopls/doc/analyzers.md +++ b/gopls/doc/analyzers.md @@ -492,7 +492,8 @@ following command: $ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./... -(Do not use "go get -tool" to add gopls as a dependency of your module; it will not work.) +(Do not use "go get -tool" to add gopls as a dependency of your +module; gopls commands must be built from their release branch.) If the tool warns of conflicting fixes, you may need to run it more than once until it has applied all fixes cleanly. This command is diff --git a/gopls/internal/analysis/modernize/doc.go b/gopls/internal/analysis/modernize/doc.go index 1d64cc7dc7c..2c4b893f6d2 100644 --- a/gopls/internal/analysis/modernize/doc.go +++ b/gopls/internal/analysis/modernize/doc.go @@ -25,7 +25,8 @@ // // $ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./... // -// (Do not use "go get -tool" to add gopls as a dependency of your module; it will not work.) +// (Do not use "go get -tool" to add gopls as a dependency of your +// module; gopls commands must be built from their release branch.) // // If the tool warns of conflicting fixes, you may need to run it more // than once until it has applied all fixes cleanly. This command is diff --git a/gopls/internal/doc/api.json b/gopls/internal/doc/api.json index c4e99473edb..d622bb48251 100644 --- a/gopls/internal/doc/api.json +++ b/gopls/internal/doc/api.json @@ -562,7 +562,7 @@ }, { "Name": "\"modernize\"", - "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go and its standard\nlibrary.\n\nEach diagnostic provides a fix. Our intent is that these fixes may\nbe safely applied en masse without changing the behavior of your\nprogram. In some cases the suggested fixes are imperfect and may\nlead to (for example) unused imports or unused local variables,\ncausing build breakage. However, these problems are generally\ntrivial to fix. We regard any modernizer whose fix changes program\nbehavior to have a serious bug and will endeavor to fix it.\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./...\n\n(Do not use \"go get -tool\" to add gopls as a dependency of your module; it will not work.)\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.\n\nChanges produced by this tool should be reviewed as usual before\nbeing merged. In some cases, a loop may be replaced by a simple\nfunction call, causing comments within the loop to be discarded.\nHuman judgment may be required to avoid losing comments of value.\n\nEach diagnostic reported by modernize has a specific category. (The\ncategories are listed below.) Diagnostics in some categories, such\nas \"efaceany\" (which replaces \"interface{}\" with \"any\" where it is\nsafe to do so) are particularly numerous. It may ease the burden of\ncode review to apply fixes in two passes, the first change\nconsisting only of fixes of category \"efaceany\", the second\nconsisting of all others. This can be achieved using the -category flag:\n\n\t$ modernize -category=efaceany -fix -test ./...\n\t$ modernize -category=-efaceany -fix -test ./...\n\nCategories of modernize diagnostic:\n\n - forvar: remove x := x variable declarations made unnecessary by the new semantics of loops in go1.22.\n\n - slicescontains: replace 'for i, elem := range s { if elem == needle { ...; break }'\n by a call to slices.Contains, added in go1.21.\n\n - minmax: replace an if/else conditional assignment by a call to\n the built-in min or max functions added in go1.21.\n\n - sortslice: replace sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21.\n\n - efaceany: replace interface{} by the 'any' type added in go1.18.\n\n - slicesclone: replace append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21.\n\n - mapsloop: replace a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions from\n the maps package, added in go1.21.\n\n - fmtappendf: replace []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19.\n\n - testingcontext: replace uses of context.WithCancel in tests\n with t.Context, added in go1.24.\n\n - omitzero: replace omitempty by omitzero on structs, added in go1.24.\n\n - bloop: replace \"for i := range b.N\" or \"for range b.N\" in a\n benchmark with \"for b.Loop()\", and remove any preceding calls\n to b.StopTimer, b.StartTimer, and b.ResetTimer.\n\n - slicesdelete: replace append(s[:i], s[i+1]...) by\n slices.Delete(s, i, i+1), added in go1.21.\n\n - rangeint: replace a 3-clause \"for i := 0; i \u003c n; i++\" loop by\n \"for i := range n\", added in go1.22.\n\n - stringsseq: replace Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq.\n\n - stringscutprefix: replace some uses of HasPrefix followed by TrimPrefix with CutPrefix,\n added to the strings package in go1.20.\n\n - waitgroup: replace old complex usages of sync.WaitGroup by less complex WaitGroup.Go method in go1.25.", + "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go and its standard\nlibrary.\n\nEach diagnostic provides a fix. Our intent is that these fixes may\nbe safely applied en masse without changing the behavior of your\nprogram. In some cases the suggested fixes are imperfect and may\nlead to (for example) unused imports or unused local variables,\ncausing build breakage. However, these problems are generally\ntrivial to fix. We regard any modernizer whose fix changes program\nbehavior to have a serious bug and will endeavor to fix it.\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./...\n\n(Do not use \"go get -tool\" to add gopls as a dependency of your\nmodule; gopls commands must be built from their release branch.)\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.\n\nChanges produced by this tool should be reviewed as usual before\nbeing merged. In some cases, a loop may be replaced by a simple\nfunction call, causing comments within the loop to be discarded.\nHuman judgment may be required to avoid losing comments of value.\n\nEach diagnostic reported by modernize has a specific category. (The\ncategories are listed below.) Diagnostics in some categories, such\nas \"efaceany\" (which replaces \"interface{}\" with \"any\" where it is\nsafe to do so) are particularly numerous. It may ease the burden of\ncode review to apply fixes in two passes, the first change\nconsisting only of fixes of category \"efaceany\", the second\nconsisting of all others. This can be achieved using the -category flag:\n\n\t$ modernize -category=efaceany -fix -test ./...\n\t$ modernize -category=-efaceany -fix -test ./...\n\nCategories of modernize diagnostic:\n\n - forvar: remove x := x variable declarations made unnecessary by the new semantics of loops in go1.22.\n\n - slicescontains: replace 'for i, elem := range s { if elem == needle { ...; break }'\n by a call to slices.Contains, added in go1.21.\n\n - minmax: replace an if/else conditional assignment by a call to\n the built-in min or max functions added in go1.21.\n\n - sortslice: replace sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21.\n\n - efaceany: replace interface{} by the 'any' type added in go1.18.\n\n - slicesclone: replace append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21.\n\n - mapsloop: replace a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions from\n the maps package, added in go1.21.\n\n - fmtappendf: replace []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19.\n\n - testingcontext: replace uses of context.WithCancel in tests\n with t.Context, added in go1.24.\n\n - omitzero: replace omitempty by omitzero on structs, added in go1.24.\n\n - bloop: replace \"for i := range b.N\" or \"for range b.N\" in a\n benchmark with \"for b.Loop()\", and remove any preceding calls\n to b.StopTimer, b.StartTimer, and b.ResetTimer.\n\n - slicesdelete: replace append(s[:i], s[i+1]...) by\n slices.Delete(s, i, i+1), added in go1.21.\n\n - rangeint: replace a 3-clause \"for i := 0; i \u003c n; i++\" loop by\n \"for i := range n\", added in go1.22.\n\n - stringsseq: replace Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq.\n\n - stringscutprefix: replace some uses of HasPrefix followed by TrimPrefix with CutPrefix,\n added to the strings package in go1.20.\n\n - waitgroup: replace old complex usages of sync.WaitGroup by less complex WaitGroup.Go method in go1.25.", "Default": "true", "Status": "" }, @@ -1338,7 +1338,7 @@ }, { "Name": "modernize", - "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go and its standard\nlibrary.\n\nEach diagnostic provides a fix. Our intent is that these fixes may\nbe safely applied en masse without changing the behavior of your\nprogram. In some cases the suggested fixes are imperfect and may\nlead to (for example) unused imports or unused local variables,\ncausing build breakage. However, these problems are generally\ntrivial to fix. We regard any modernizer whose fix changes program\nbehavior to have a serious bug and will endeavor to fix it.\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./...\n\n(Do not use \"go get -tool\" to add gopls as a dependency of your module; it will not work.)\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.\n\nChanges produced by this tool should be reviewed as usual before\nbeing merged. In some cases, a loop may be replaced by a simple\nfunction call, causing comments within the loop to be discarded.\nHuman judgment may be required to avoid losing comments of value.\n\nEach diagnostic reported by modernize has a specific category. (The\ncategories are listed below.) Diagnostics in some categories, such\nas \"efaceany\" (which replaces \"interface{}\" with \"any\" where it is\nsafe to do so) are particularly numerous. It may ease the burden of\ncode review to apply fixes in two passes, the first change\nconsisting only of fixes of category \"efaceany\", the second\nconsisting of all others. This can be achieved using the -category flag:\n\n\t$ modernize -category=efaceany -fix -test ./...\n\t$ modernize -category=-efaceany -fix -test ./...\n\nCategories of modernize diagnostic:\n\n - forvar: remove x := x variable declarations made unnecessary by the new semantics of loops in go1.22.\n\n - slicescontains: replace 'for i, elem := range s { if elem == needle { ...; break }'\n by a call to slices.Contains, added in go1.21.\n\n - minmax: replace an if/else conditional assignment by a call to\n the built-in min or max functions added in go1.21.\n\n - sortslice: replace sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21.\n\n - efaceany: replace interface{} by the 'any' type added in go1.18.\n\n - slicesclone: replace append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21.\n\n - mapsloop: replace a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions from\n the maps package, added in go1.21.\n\n - fmtappendf: replace []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19.\n\n - testingcontext: replace uses of context.WithCancel in tests\n with t.Context, added in go1.24.\n\n - omitzero: replace omitempty by omitzero on structs, added in go1.24.\n\n - bloop: replace \"for i := range b.N\" or \"for range b.N\" in a\n benchmark with \"for b.Loop()\", and remove any preceding calls\n to b.StopTimer, b.StartTimer, and b.ResetTimer.\n\n - slicesdelete: replace append(s[:i], s[i+1]...) by\n slices.Delete(s, i, i+1), added in go1.21.\n\n - rangeint: replace a 3-clause \"for i := 0; i \u003c n; i++\" loop by\n \"for i := range n\", added in go1.22.\n\n - stringsseq: replace Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq.\n\n - stringscutprefix: replace some uses of HasPrefix followed by TrimPrefix with CutPrefix,\n added to the strings package in go1.20.\n\n - waitgroup: replace old complex usages of sync.WaitGroup by less complex WaitGroup.Go method in go1.25.", + "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go and its standard\nlibrary.\n\nEach diagnostic provides a fix. Our intent is that these fixes may\nbe safely applied en masse without changing the behavior of your\nprogram. In some cases the suggested fixes are imperfect and may\nlead to (for example) unused imports or unused local variables,\ncausing build breakage. However, these problems are generally\ntrivial to fix. We regard any modernizer whose fix changes program\nbehavior to have a serious bug and will endeavor to fix it.\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./...\n\n(Do not use \"go get -tool\" to add gopls as a dependency of your\nmodule; gopls commands must be built from their release branch.)\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.\n\nChanges produced by this tool should be reviewed as usual before\nbeing merged. In some cases, a loop may be replaced by a simple\nfunction call, causing comments within the loop to be discarded.\nHuman judgment may be required to avoid losing comments of value.\n\nEach diagnostic reported by modernize has a specific category. (The\ncategories are listed below.) Diagnostics in some categories, such\nas \"efaceany\" (which replaces \"interface{}\" with \"any\" where it is\nsafe to do so) are particularly numerous. It may ease the burden of\ncode review to apply fixes in two passes, the first change\nconsisting only of fixes of category \"efaceany\", the second\nconsisting of all others. This can be achieved using the -category flag:\n\n\t$ modernize -category=efaceany -fix -test ./...\n\t$ modernize -category=-efaceany -fix -test ./...\n\nCategories of modernize diagnostic:\n\n - forvar: remove x := x variable declarations made unnecessary by the new semantics of loops in go1.22.\n\n - slicescontains: replace 'for i, elem := range s { if elem == needle { ...; break }'\n by a call to slices.Contains, added in go1.21.\n\n - minmax: replace an if/else conditional assignment by a call to\n the built-in min or max functions added in go1.21.\n\n - sortslice: replace sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21.\n\n - efaceany: replace interface{} by the 'any' type added in go1.18.\n\n - slicesclone: replace append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21.\n\n - mapsloop: replace a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions from\n the maps package, added in go1.21.\n\n - fmtappendf: replace []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19.\n\n - testingcontext: replace uses of context.WithCancel in tests\n with t.Context, added in go1.24.\n\n - omitzero: replace omitempty by omitzero on structs, added in go1.24.\n\n - bloop: replace \"for i := range b.N\" or \"for range b.N\" in a\n benchmark with \"for b.Loop()\", and remove any preceding calls\n to b.StopTimer, b.StartTimer, and b.ResetTimer.\n\n - slicesdelete: replace append(s[:i], s[i+1]...) by\n slices.Delete(s, i, i+1), added in go1.21.\n\n - rangeint: replace a 3-clause \"for i := 0; i \u003c n; i++\" loop by\n \"for i := range n\", added in go1.22.\n\n - stringsseq: replace Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq.\n\n - stringscutprefix: replace some uses of HasPrefix followed by TrimPrefix with CutPrefix,\n added to the strings package in go1.20.\n\n - waitgroup: replace old complex usages of sync.WaitGroup by less complex WaitGroup.Go method in go1.25.", "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/modernize", "Default": true }, diff --git a/internal/gofix/doc.go b/internal/gofix/doc.go index 8f5f4b9dc46..0859faa31d3 100644 --- a/internal/gofix/doc.go +++ b/internal/gofix/doc.go @@ -83,7 +83,8 @@ You can use this (officially unsupported) command to apply gofix fixes en masse: $ go run golang.org/x/tools/gopls/internal/analysis/gofix/cmd/gofix@latest -test ./... -(Do not use "go get -tool" to add gopls as a dependency of your module; it will not work.) +(Do not use "go get -tool" to add gopls as a dependency of your +module; gopls commands must be built from their release branch.) # Analyzer gofixdirective From ee8f138447fd9a49b174cf4d7e3a7283c88e8560 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Fri, 4 Apr 2025 12:23:03 -0400 Subject: [PATCH 204/270] go/analysis/passes/gofix: go:fix directive checker Add an analyzer that checks the validity of go:fix directives. To avoid pulling the inlining framework into go vet, we split the gofix analyzers into three packages. The internal/gofix/findgofix package finds each `go:fix inline` directive, checks it for errors, and invokes a callback. The internal/gofix package uses findgofix with callbacks that register facts, and then does the inlining. The go/analysis/passes/gofix package uses findgofix to find errors, but takes no other action. vet_std_test passes, indicating that there were no findings in the standard library. A followup CL on the go repo will vendor x/tools at head and add this analyzer to go vet. Change-Id: Ib9ebe38cc719d7b3d85beccd76a9bedf8a2cd077 Reviewed-on: https://go-review.googlesource.com/c/tools/+/664176 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- go/analysis/passes/gofix/doc.go | 49 ++++++ go/analysis/passes/gofix/gofix.go | 34 ++++ go/analysis/passes/gofix/gofix_test.go | 17 ++ go/analysis/passes/gofix/testdata/src/a/a.go | 47 ++++++ go/analysis/unitchecker/vet_std_test.go | 2 + internal/gofix/doc.go | 1 - internal/gofix/findgofix/findgofix.go | 143 ++++++++++++++++ internal/gofix/gofix.go | 167 +++---------------- internal/gofix/gofix_test.go | 5 - 9 files changed, 318 insertions(+), 147 deletions(-) create mode 100644 go/analysis/passes/gofix/doc.go create mode 100644 go/analysis/passes/gofix/gofix.go create mode 100644 go/analysis/passes/gofix/gofix_test.go create mode 100644 go/analysis/passes/gofix/testdata/src/a/a.go create mode 100644 internal/gofix/findgofix/findgofix.go diff --git a/go/analysis/passes/gofix/doc.go b/go/analysis/passes/gofix/doc.go new file mode 100644 index 00000000000..683bac9cb48 --- /dev/null +++ b/go/analysis/passes/gofix/doc.go @@ -0,0 +1,49 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* +Package gofix defines an Analyzer that checks "//go:fix inline" directives. +See golang.org/x/tools/internal/gofix/doc.go for details. + +# Analyzer gofixdirective + +gofixdirective: validate uses of gofix comment directives + +The gofixdirective analyzer checks "//go:fix inline" directives for correctness. + +The proposal https://go.dev/issue/32816 introduces the "//go:fix" directives. + +The analyzer checks for the following issues: + +- A constant definition can be marked for inlining only if it refers to another +named constant. + + //go:fix inline + const ( + a = 1 // error + b = iota // error + c = math.Pi // OK + ) + +- A type definition can be marked for inlining only if it is an alias. + + //go:fix inline + type ( + T int // error + A = int // OK + ) + +- An alias whose right-hand side contains a non-literal array size +cannot be marked for inlining. + + const two = 2 + + //go:fix inline + type ( + A = []int // OK + B = [1]int // OK + C = [two]int // error + ) +*/ +package gofix diff --git a/go/analysis/passes/gofix/gofix.go b/go/analysis/passes/gofix/gofix.go new file mode 100644 index 00000000000..706e0759c3a --- /dev/null +++ b/go/analysis/passes/gofix/gofix.go @@ -0,0 +1,34 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package gofix defines an analyzer that checks go:fix directives. +package gofix + +import ( + _ "embed" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/astutil/cursor" + "golang.org/x/tools/internal/gofix/findgofix" +) + +//go:embed doc.go +var doc string + +var Analyzer = &analysis.Analyzer{ + Name: "gofixdirective", + Doc: analysisinternal.MustExtractDoc(doc, "gofixdirective"), + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/gofix", + Run: run, + Requires: []*analysis.Analyzer{inspect.Analyzer}, +} + +func run(pass *analysis.Pass) (any, error) { + root := cursor.Root(pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)) + findgofix.Find(pass, root, nil) + return nil, nil +} diff --git a/go/analysis/passes/gofix/gofix_test.go b/go/analysis/passes/gofix/gofix_test.go new file mode 100644 index 00000000000..b2e6d4387d4 --- /dev/null +++ b/go/analysis/passes/gofix/gofix_test.go @@ -0,0 +1,17 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gofix_test + +import ( + "testing" + + "golang.org/x/tools/go/analysis/analysistest" + "golang.org/x/tools/go/analysis/passes/gofix" +) + +func Test(t *testing.T) { + testdata := analysistest.TestData() + analysistest.Run(t, testdata, gofix.Analyzer, "a") +} diff --git a/go/analysis/passes/gofix/testdata/src/a/a.go b/go/analysis/passes/gofix/testdata/src/a/a.go new file mode 100644 index 00000000000..3588290cfb3 --- /dev/null +++ b/go/analysis/passes/gofix/testdata/src/a/a.go @@ -0,0 +1,47 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file contains tests for the gofix checker. + +package a + +const one = 1 + +//go:fix inline +const ( + in3 = one + in4 = one + bad1 = 1 // want `invalid //go:fix inline directive: const value is not the name of another constant` +) + +//go:fix inline +const in5, + in6, + bad2 = one, one, + one + 1 // want `invalid //go:fix inline directive: const value is not the name of another constant` + +//go:fix inline +const ( + a = iota // want `invalid //go:fix inline directive: const value is iota` + b + in7 = one +) + +func shadow() { + //go:fix inline + const a = iota // want `invalid //go:fix inline directive: const value is iota` + + const iota = 2 + + //go:fix inline + const b = iota // not an error: iota is not the builtin +} + +// Type aliases + +//go:fix inline +type A int // want `invalid //go:fix inline directive: not a type alias` + +//go:fix inline +type E = map[[one]string][]int // want `invalid //go:fix inline directive: array types not supported` diff --git a/go/analysis/unitchecker/vet_std_test.go b/go/analysis/unitchecker/vet_std_test.go index a79224c7188..ac61950d739 100644 --- a/go/analysis/unitchecker/vet_std_test.go +++ b/go/analysis/unitchecker/vet_std_test.go @@ -24,6 +24,7 @@ import ( "golang.org/x/tools/go/analysis/passes/directive" "golang.org/x/tools/go/analysis/passes/errorsas" "golang.org/x/tools/go/analysis/passes/framepointer" + "golang.org/x/tools/go/analysis/passes/gofix" "golang.org/x/tools/go/analysis/passes/httpresponse" "golang.org/x/tools/go/analysis/passes/ifaceassert" "golang.org/x/tools/go/analysis/passes/loopclosure" @@ -62,6 +63,7 @@ func vet() { directive.Analyzer, errorsas.Analyzer, framepointer.Analyzer, + gofix.Analyzer, httpresponse.Analyzer, ifaceassert.Analyzer, loopclosure.Analyzer, diff --git a/internal/gofix/doc.go b/internal/gofix/doc.go index 0859faa31d3..7b7576cb828 100644 --- a/internal/gofix/doc.go +++ b/internal/gofix/doc.go @@ -6,7 +6,6 @@ Package gofix defines an Analyzer that inlines calls to functions and uses of constants marked with a "//go:fix inline" directive. -A second analyzer only checks uses of the directive. # Analyzer gofix diff --git a/internal/gofix/findgofix/findgofix.go b/internal/gofix/findgofix/findgofix.go new file mode 100644 index 00000000000..38ce079b923 --- /dev/null +++ b/internal/gofix/findgofix/findgofix.go @@ -0,0 +1,143 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package findgofix searches for and validates go:fix directives. The +// internal/gofix package uses findgofix to perform inlining. +// The go/analysis/passes/gofix package uses findgofix to check for problems +// with go:fix directives. +// +// findgofix is separate from gofix to avoid depending on refactor/inline, +// which is large. +package findgofix + +// This package is tested by internal/gofix. + +import ( + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + internalastutil "golang.org/x/tools/internal/astutil" + "golang.org/x/tools/internal/astutil/cursor" +) + +// A Handler handles language entities with go:fix directives. +type Handler interface { + HandleFunc(*ast.FuncDecl) + HandleAlias(*ast.TypeSpec) + HandleConst(name, rhs *ast.Ident) +} + +// Find finds functions and constants annotated with an appropriate "//go:fix" +// comment (the syntax proposed by #32816), and calls handler methods for each one. +// h may be nil. +func Find(pass *analysis.Pass, root cursor.Cursor, h Handler) { + for cur := range root.Preorder((*ast.FuncDecl)(nil), (*ast.GenDecl)(nil)) { + switch decl := cur.Node().(type) { + case *ast.FuncDecl: + findFunc(decl, h) + + case *ast.GenDecl: + if decl.Tok != token.CONST && decl.Tok != token.TYPE { + continue + } + declInline := hasFixInline(decl.Doc) + // Accept inline directives on the entire decl as well as individual specs. + for _, spec := range decl.Specs { + switch spec := spec.(type) { + case *ast.TypeSpec: // Tok == TYPE + findAlias(pass, spec, declInline, h) + + case *ast.ValueSpec: // Tok == CONST + findConst(pass, spec, declInline, h) + } + } + } + } +} + +func findFunc(decl *ast.FuncDecl, h Handler) { + if !hasFixInline(decl.Doc) { + return + } + if h != nil { + h.HandleFunc(decl) + } +} + +func findAlias(pass *analysis.Pass, spec *ast.TypeSpec, declInline bool, h Handler) { + if !declInline && !hasFixInline(spec.Doc) { + return + } + if !spec.Assign.IsValid() { + pass.Reportf(spec.Pos(), "invalid //go:fix inline directive: not a type alias") + return + } + + // Disallow inlines of type expressions containing array types. + // Given an array type like [N]int where N is a named constant, go/types provides + // only the value of the constant as an int64. So inlining A in this code: + // + // const N = 5 + // type A = [N]int + // + // would result in [5]int, breaking the connection with N. + for n := range ast.Preorder(spec.Type) { + if ar, ok := n.(*ast.ArrayType); ok && ar.Len != nil { + // Make an exception when the array length is a literal int. + if lit, ok := ast.Unparen(ar.Len).(*ast.BasicLit); ok && lit.Kind == token.INT { + continue + } + pass.Reportf(spec.Pos(), "invalid //go:fix inline directive: array types not supported") + return + } + } + if h != nil { + h.HandleAlias(spec) + } +} + +func findConst(pass *analysis.Pass, spec *ast.ValueSpec, declInline bool, h Handler) { + specInline := hasFixInline(spec.Doc) + if declInline || specInline { + for i, nameIdent := range spec.Names { + if i >= len(spec.Values) { + // Possible following an iota. + break + } + var rhsIdent *ast.Ident + switch val := spec.Values[i].(type) { + case *ast.Ident: + // Constants defined with the predeclared iota cannot be inlined. + if pass.TypesInfo.Uses[val] == builtinIota { + pass.Reportf(val.Pos(), "invalid //go:fix inline directive: const value is iota") + return + } + rhsIdent = val + case *ast.SelectorExpr: + rhsIdent = val.Sel + default: + pass.Reportf(val.Pos(), "invalid //go:fix inline directive: const value is not the name of another constant") + return + } + if h != nil { + h.HandleConst(nameIdent, rhsIdent) + } + } + } +} + +// hasFixInline reports the presence of a "//go:fix inline" directive +// in the comments. +func hasFixInline(cg *ast.CommentGroup) bool { + for _, d := range internalastutil.Directives(cg) { + if d.Tool == "go" && d.Name == "fix" && d.Args == "inline" { + return true + } + } + return false +} + +var builtinIota = types.Universe.Lookup("iota") diff --git a/internal/gofix/gofix.go b/internal/gofix/gofix.go index 565272b5e46..904f17cf3d5 100644 --- a/internal/gofix/gofix.go +++ b/internal/gofix/gofix.go @@ -20,10 +20,10 @@ import ( "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" "golang.org/x/tools/internal/analysisinternal" - internalastutil "golang.org/x/tools/internal/astutil" "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/astutil/edge" "golang.org/x/tools/internal/diff" + "golang.org/x/tools/internal/gofix/findgofix" "golang.org/x/tools/internal/refactor/inline" "golang.org/x/tools/internal/typesinternal" ) @@ -35,20 +35,7 @@ var Analyzer = &analysis.Analyzer{ Name: "gofix", Doc: analysisinternal.MustExtractDoc(doc, "gofix"), URL: "https://pkg.go.dev/golang.org/x/tools/internal/gofix", - Run: func(pass *analysis.Pass) (any, error) { return run(pass, true) }, - FactTypes: []analysis.Fact{ - (*goFixInlineFuncFact)(nil), - (*goFixInlineConstFact)(nil), - (*goFixInlineAliasFact)(nil), - }, - Requires: []*analysis.Analyzer{inspect.Analyzer}, -} - -var DirectiveAnalyzer = &analysis.Analyzer{ - Name: "gofixdirective", - Doc: analysisinternal.MustExtractDoc(doc, "gofixdirective"), - URL: "https://pkg.go.dev/golang.org/x/tools/internal/gofix", - Run: func(pass *analysis.Pass) (any, error) { return run(pass, false) }, + Run: run, FactTypes: []analysis.Fact{ (*goFixInlineFuncFact)(nil), (*goFixInlineConstFact)(nil), @@ -60,7 +47,6 @@ var DirectiveAnalyzer = &analysis.Analyzer{ // analyzer holds the state for this analysis. type analyzer struct { pass *analysis.Pass - fix bool // only suggest fixes if true; else, just check directives root cursor.Cursor // memoization of repeated calls for same file. fileContent map[string][]byte @@ -70,53 +56,22 @@ type analyzer struct { inlinableAliases map[*types.TypeName]*goFixInlineAliasFact } -func run(pass *analysis.Pass, fix bool) (any, error) { +func run(pass *analysis.Pass) (any, error) { a := &analyzer{ pass: pass, - fix: fix, root: cursor.Root(pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)), fileContent: make(map[string][]byte), inlinableFuncs: make(map[*types.Func]*inline.Callee), inlinableConsts: make(map[*types.Const]*goFixInlineConstFact), inlinableAliases: make(map[*types.TypeName]*goFixInlineAliasFact), } - a.find() + findgofix.Find(pass, a.root, a) a.inline() return nil, nil } -// find finds functions and constants annotated with an appropriate "//go:fix" -// comment (the syntax proposed by #32816), -// and exports a fact for each one. -func (a *analyzer) find() { - for cur := range a.root.Preorder((*ast.FuncDecl)(nil), (*ast.GenDecl)(nil)) { - switch decl := cur.Node().(type) { - case *ast.FuncDecl: - a.findFunc(decl) - - case *ast.GenDecl: - if decl.Tok != token.CONST && decl.Tok != token.TYPE { - continue - } - declInline := hasFixInline(decl.Doc) - // Accept inline directives on the entire decl as well as individual specs. - for _, spec := range decl.Specs { - switch spec := spec.(type) { - case *ast.TypeSpec: // Tok == TYPE - a.findAlias(spec, declInline) - - case *ast.ValueSpec: // Tok == CONST - a.findConst(spec, declInline) - } - } - } - } -} - -func (a *analyzer) findFunc(decl *ast.FuncDecl) { - if !hasFixInline(decl.Doc) { - return - } +// HandleFunc exports a fact for functions marked with go:fix. +func (a *analyzer) HandleFunc(decl *ast.FuncDecl) { content, err := a.readFile(decl) if err != nil { a.pass.Reportf(decl.Doc.Pos(), "invalid inlining candidate: cannot read source file: %v", err) @@ -132,34 +87,8 @@ func (a *analyzer) findFunc(decl *ast.FuncDecl) { a.inlinableFuncs[fn] = callee } -func (a *analyzer) findAlias(spec *ast.TypeSpec, declInline bool) { - if !declInline && !hasFixInline(spec.Doc) { - return - } - if !spec.Assign.IsValid() { - a.pass.Reportf(spec.Pos(), "invalid //go:fix inline directive: not a type alias") - return - } - - // Disallow inlines of type expressions containing array types. - // Given an array type like [N]int where N is a named constant, go/types provides - // only the value of the constant as an int64. So inlining A in this code: - // - // const N = 5 - // type A = [N]int - // - // would result in [5]int, breaking the connection with N. - for n := range ast.Preorder(spec.Type) { - if ar, ok := n.(*ast.ArrayType); ok && ar.Len != nil { - // Make an exception when the array length is a literal int. - if lit, ok := ast.Unparen(ar.Len).(*ast.BasicLit); ok && lit.Kind == token.INT { - continue - } - a.pass.Reportf(spec.Pos(), "invalid //go:fix inline directive: array types not supported") - return - } - } - +// HandleAlias exports a fact for aliases marked with go:fix. +func (a *analyzer) HandleAlias(spec *ast.TypeSpec) { // Remember that this is an inlinable alias. typ := &goFixInlineAliasFact{} lhs := a.pass.TypesInfo.Defs[spec.Name].(*types.TypeName) @@ -172,49 +101,24 @@ func (a *analyzer) findAlias(spec *ast.TypeSpec, declInline bool) { } } -func (a *analyzer) findConst(spec *ast.ValueSpec, declInline bool) { - info := a.pass.TypesInfo - specInline := hasFixInline(spec.Doc) - if declInline || specInline { - for i, name := range spec.Names { - if i >= len(spec.Values) { - // Possible following an iota. - break - } - val := spec.Values[i] - var rhsID *ast.Ident - switch e := val.(type) { - case *ast.Ident: - // Constants defined with the predeclared iota cannot be inlined. - if info.Uses[e] == builtinIota { - a.pass.Reportf(val.Pos(), "invalid //go:fix inline directive: const value is iota") - return - } - rhsID = e - case *ast.SelectorExpr: - rhsID = e.Sel - default: - a.pass.Reportf(val.Pos(), "invalid //go:fix inline directive: const value is not the name of another constant") - return - } - lhs := info.Defs[name].(*types.Const) - rhs := info.Uses[rhsID].(*types.Const) // must be so in a well-typed program - con := &goFixInlineConstFact{ - RHSName: rhs.Name(), - RHSPkgName: rhs.Pkg().Name(), - RHSPkgPath: rhs.Pkg().Path(), - } - if rhs.Pkg() == a.pass.Pkg { - con.rhsObj = rhs - } - a.inlinableConsts[lhs] = con - // Create a fact only if the LHS is exported and defined at top level. - // We create a fact even if the RHS is non-exported, - // so we can warn about uses in other packages. - if lhs.Exported() && typesinternal.IsPackageLevel(lhs) { - a.pass.ExportObjectFact(lhs, con) - } - } +// HandleConst exports a fact for constants marked with go:fix. +func (a *analyzer) HandleConst(nameIdent, rhsIdent *ast.Ident) { + lhs := a.pass.TypesInfo.Defs[nameIdent].(*types.Const) + rhs := a.pass.TypesInfo.Uses[rhsIdent].(*types.Const) // must be so in a well-typed program + con := &goFixInlineConstFact{ + RHSName: rhs.Name(), + RHSPkgName: rhs.Pkg().Name(), + RHSPkgPath: rhs.Pkg().Path(), + } + if rhs.Pkg() == a.pass.Pkg { + con.rhsObj = rhs + } + a.inlinableConsts[lhs] = con + // Create a fact only if the LHS is exported and defined at top level. + // We create a fact even if the RHS is non-exported, + // so we can warn about uses in other packages. + if lhs.Exported() && typesinternal.IsPackageLevel(lhs) { + a.pass.ExportObjectFact(lhs, con) } } @@ -275,9 +179,6 @@ func (a *analyzer) inlineCall(call *ast.CallExpr, cur cursor.Cursor) { a.pass.Reportf(call.Lparen, "%v", err) return } - if !a.fix { - return - } if res.Literalized { // Users are not fond of inlinings that literalize @@ -556,9 +457,6 @@ func (a *analyzer) inlineConst(con *types.Const, cur cursor.Cursor) { // reportInline reports a diagnostic for fixing an inlinable name. func (a *analyzer) reportInline(kind, capKind string, ident ast.Expr, edits []analysis.TextEdit, newText string) { - if !a.fix { - return - } edits = append(edits, analysis.TextEdit{ Pos: ident.Pos(), End: ident.End(), @@ -598,17 +496,6 @@ func currentFile(c cursor.Cursor) *ast.File { panic("no *ast.File enclosing a cursor: impossible") } -// hasFixInline reports the presence of a "//go:fix inline" directive -// in the comments. -func hasFixInline(cg *ast.CommentGroup) bool { - for _, d := range internalastutil.Directives(cg) { - if d.Tool == "go" && d.Name == "fix" && d.Args == "inline" { - return true - } - } - return false -} - // A goFixInlineFuncFact is exported for each function marked "//go:fix inline". // It holds information about the callee to support inlining. type goFixInlineFuncFact struct{ Callee *inline.Callee } @@ -641,8 +528,6 @@ func (*goFixInlineAliasFact) AFact() {} func discard(string, ...any) {} -var builtinIota = types.Universe.Lookup("iota") - type list[T any] interface { Len() int At(int) T diff --git a/internal/gofix/gofix_test.go b/internal/gofix/gofix_test.go index ae2df3860a8..9194d893577 100644 --- a/internal/gofix/gofix_test.go +++ b/internal/gofix/gofix_test.go @@ -25,11 +25,6 @@ func TestAnalyzer(t *testing.T) { analysistest.RunWithSuggestedFixes(t, analysistest.TestData(), Analyzer, "a", "b") } -func TestDirectiveAnalyzer(t *testing.T) { - analysistest.RunWithSuggestedFixes(t, analysistest.TestData(), DirectiveAnalyzer, "directive") - -} - func TestTypesWithNames(t *testing.T) { // Test setup inspired by internal/analysisinternal/addimport_test.go. testenv.NeedsDefaultImporter(t) From f76b112f11ae44045f70b22a3d0dc0627f72ae10 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Tue, 15 Apr 2025 23:31:30 -0400 Subject: [PATCH 205/270] gopls/internal/golang: Rename all receiver variables This CL causes the Rename operation, when applied to a method receiver variable, to rename all receiver variables of methods of the same named type, where possible. (Errors in secondary receivers, for example due to shadowing conflicts, are silently ignored.) + test, doc, relnote Fixes golang/go#41892 Change-Id: Ie5755508afc9ebdcfe0578cbd9cac71ee122fc38 Reviewed-on: https://go-review.googlesource.com/c/tools/+/665935 LUCI-TryBot-Result: Go LUCI Reviewed-by: Jonathan Amsterdam Auto-Submit: Alan Donovan --- gopls/doc/features/transformation.md | 13 +++- gopls/doc/release/v0.19.0.md | 7 ++ gopls/internal/golang/rename.go | 69 ++++++++++++++++++- .../test/marker/testdata/rename/recv.txt | 68 ++++++++++++++++++ 4 files changed, 153 insertions(+), 4 deletions(-) create mode 100644 gopls/internal/test/marker/testdata/rename/recv.txt diff --git a/gopls/doc/features/transformation.md b/gopls/doc/features/transformation.md index a72ff676832..b080a842996 100644 --- a/gopls/doc/features/transformation.md +++ b/gopls/doc/features/transformation.md @@ -315,11 +315,18 @@ Similar problems may arise with packages that use reflection, such as `encoding/json` or `text/template`. There is no substitute for good judgment and testing. +Special cases: + +- When renaming the receiver of a method, the tool also attempts to + rename the receivers of all other methods associated with the same + named type. Each other receiver that cannot be fully renamed is + quietly skipped. + +- Renaming a package declaration additionally causes the package's + directory to be renamed. + Some tips for best results: -- There is currently no special support for renaming all receivers of - a family of methods at once, so you will need to rename one receiver - one at a time (golang/go#41892). - The safety checks performed by the Rename algorithm require type information. If the program is grossly malformed, there may be insufficient information for it to run (golang/go#41870), diff --git a/gopls/doc/release/v0.19.0.md b/gopls/doc/release/v0.19.0.md index d2d570692d4..cf3b47067b0 100644 --- a/gopls/doc/release/v0.19.0.md +++ b/gopls/doc/release/v0.19.0.md @@ -7,6 +7,13 @@ # New features +## "Rename" of method receivers + +The Rename operation, when applied to the receiver of a method, now +also attempts to rename the receivers of all other methods associated +with the same named type. Each other receiver that cannot be fully +renamed is quietly skipped. + ## "Implementations" supports signature types The Implementations query reports the correspondence between abstract diff --git a/gopls/internal/golang/rename.go b/gopls/internal/golang/rename.go index 26e9d0a5a52..f1406cd773c 100644 --- a/gopls/internal/golang/rename.go +++ b/gopls/internal/golang/rename.go @@ -69,8 +69,10 @@ import ( "golang.org/x/tools/gopls/internal/protocol" goplsastutil "golang.org/x/tools/gopls/internal/util/astutil" "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/moreiters" "golang.org/x/tools/gopls/internal/util/safetoken" internalastutil "golang.org/x/tools/internal/astutil" + "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/diff" "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/typesinternal" @@ -482,6 +484,7 @@ func renameOrdinary(ctx context.Context, snapshot *cache.Snapshot, f file.Handle // computes the union across all variants.) var targets map[types.Object]ast.Node var pkg *cache.Package + var cur cursor.Cursor // of selected Ident or ImportSpec { mps, err := snapshot.MetadataForFile(ctx, f.URI()) if err != nil { @@ -505,6 +508,11 @@ func renameOrdinary(ctx context.Context, snapshot *cache.Snapshot, f file.Handle if err != nil { return nil, err } + var ok bool + cur, ok = pgf.Cursor.FindPos(pos, pos) + if !ok { + return nil, fmt.Errorf("can't find cursor for selection") + } objects, _, err := objectsAt(pkg.TypesInfo(), pgf.File, pos) if err != nil { return nil, err @@ -571,8 +579,34 @@ func renameOrdinary(ctx context.Context, snapshot *cache.Snapshot, f file.Handle for obj := range targets { objects = append(objects, obj) } + editMap, _, err := renameObjects(newName, pkg, objects...) - return editMap, err + if err != nil { + return nil, err + } + + // If target is a receiver, also rename receivers of + // other methods of the same type that don't already + // have the target name. Quietly discard edits from + // any that can't be renamed. + // + // TODO(adonovan): UX question: require that the + // selection be the declaration of the receiver before + // we broaden the renaming? + if curDecl, ok := moreiters.First(cur.Enclosing((*ast.FuncDecl)(nil))); ok { + decl := curDecl.Node().(*ast.FuncDecl) // enclosing func + if decl.Recv != nil && + len(decl.Recv.List) > 0 && + len(decl.Recv.List[0].Names) > 0 { + recv := pkg.TypesInfo().Defs[decl.Recv.List[0].Names[0]] + if recv == obj { + // TODO(adonovan): simplify the above 7 lines to + // to "if obj.(*Var).Kind==Recv" in go1.25. + renameReceivers(pkg, recv.(*types.Var), newName, editMap) + } + } + } + return editMap, nil } // Exported: search globally. @@ -632,6 +666,39 @@ func renameOrdinary(ctx context.Context, snapshot *cache.Snapshot, f file.Handle return renameExported(pkgs, declPkgPath, declObjPath, newName) } +// renameReceivers renames all receivers of methods of the same named +// type as recv. The edits of each successful renaming are added to +// editMap; the failed ones are quietly discarded. +func renameReceivers(pkg *cache.Package, recv *types.Var, newName string, editMap map[protocol.DocumentURI][]diff.Edit) { + _, named := typesinternal.ReceiverNamed(recv) + if named == nil { + return + } + + // Find receivers of other methods of the same named type. + for m := range named.Origin().Methods() { + recv2 := m.Signature().Recv() + if recv2 == recv { + continue // don't re-rename original receiver + } + if recv2.Name() == newName { + continue // no renaming needed + } + editMap2, _, err := renameObjects(newName, pkg, recv2) + if err != nil { + continue // ignore secondary failures + } + + // Since all methods (and their comments) + // are disjoint, and don't affect imports, + // we can safely assume that all edits are + // nonconflicting and disjoint. + for uri, edits := range editMap2 { + editMap[uri] = append(editMap[uri], edits...) + } + } +} + // typeCheckReverseDependencies returns the type-checked packages for // the reverse dependencies of all packages variants containing // file declURI. The packages are in some topological order. diff --git a/gopls/internal/test/marker/testdata/rename/recv.txt b/gopls/internal/test/marker/testdata/rename/recv.txt new file mode 100644 index 00000000000..73c9c34d381 --- /dev/null +++ b/gopls/internal/test/marker/testdata/rename/recv.txt @@ -0,0 +1,68 @@ +This test exercises renaming of method receivers (golang/go#41892). + +Notes: +- x to print fails for A.J because it would shadow the built-in print; + that renaming is quietly skipped. +- various combinations of named, aliases, and pointers are tested. +- package b exercises generics. + +-- a/a.go -- +package a + +type T int +type A = T + +func (T) F() {} +func (t T) G() {} //@rename("t", "x", tx) +func (U T) H() {} //@rename("U", "v", Uv) +func (_ T) I() {} +func (v A) J() { print(v) } +func (w *T) K() {} +func (x *A) L() {} //@rename("x", "print", xprint) + +-- @tx/a/a.go -- +@@ -7,2 +7,2 @@ +-func (t T) G() {} //@rename("t", "x", tx) +-func (U T) H() {} //@rename("U", "v", Uv) ++func (x T) G() {} //@rename("t", "x", tx) ++func (x T) H() {} //@rename("U", "v", Uv) +@@ -10,2 +10,2 @@ +-func (v A) J() { print(v) } +-func (w *T) K() {} ++func (x A) J() { print(x) } ++func (x *T) K() {} +-- @Uv/a/a.go -- +@@ -7,2 +7,2 @@ +-func (t T) G() {} //@rename("t", "x", tx) +-func (U T) H() {} //@rename("U", "v", Uv) ++func (v T) G() {} //@rename("t", "x", tx) ++func (v T) H() {} //@rename("U", "v", Uv) +@@ -11,2 +11,2 @@ +-func (w *T) K() {} +-func (x *A) L() {} //@rename("x", "print", xprint) ++func (v *T) K() {} ++func (v *A) L() {} //@rename("x", "print", xprint) +-- @xprint/a/a.go -- +@@ -7,2 +7,2 @@ +-func (t T) G() {} //@rename("t", "x", tx) +-func (U T) H() {} //@rename("U", "v", Uv) ++func (print T) G() {} //@rename("t", "x", tx) ++func (print T) H() {} //@rename("U", "v", Uv) +@@ -11,2 +11,2 @@ +-func (w *T) K() {} +-func (x *A) L() {} //@rename("x", "print", xprint) ++func (print *T) K() {} ++func (print *A) L() {} //@rename("x", "print", xprint) +-- b/b.go -- +package b + +type C[T any] int +func (r C[T]) F() {} //@rename("r", "c", rc) +func (r C[T]) G() {} + +-- @rc/b/b.go -- +@@ -4,2 +4,2 @@ +-func (r C[T]) F() {} //@rename("r", "c", rc) +-func (r C[T]) G() {} ++func (c C[T]) F() {} //@rename("r", "c", rc) ++func (c C[T]) G() {} From cf5cb0064b78cb3874f8e71cbecda39c288406d4 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Thu, 10 Apr 2025 15:29:07 -0400 Subject: [PATCH 206/270] internal/astutil: PreorderStack: a safer ast.Inspect for stacks This CL defines PreorderStack, a safer function than ast.Inspect for when you need to maintain a stack. Beware, the stack that it produces does not include n itself--a half-open interval--so that nested traversals compose correctly. The CL also uses the new function in various places in x/tools where appropriate; in some cases it was clearer to rewrite using cursor.Cursor. + test Updates golang/go#73319 Change-Id: I843122cdd49cc4af8a7318badd8c34389479a92a Reviewed-on: https://go-review.googlesource.com/c/tools/+/664635 Auto-Submit: Alan Donovan Commit-Queue: Alan Donovan Reviewed-by: Robert Findley LUCI-TryBot-Result: Go LUCI --- go/analysis/passes/lostcancel/lostcancel.go | 21 +++---- gopls/internal/golang/codeaction.go | 31 +++------- gopls/internal/golang/hover.go | 11 +--- gopls/internal/golang/rename_check.go | 64 ++++++++++---------- internal/astutil/util.go | 32 ++++++++++ internal/astutil/util_test.go | 67 +++++++++++++++++++++ internal/refactor/inline/callee.go | 29 +++------ refactor/rename/check.go | 31 ++++------ 8 files changed, 171 insertions(+), 115 deletions(-) create mode 100644 internal/astutil/util_test.go diff --git a/go/analysis/passes/lostcancel/lostcancel.go b/go/analysis/passes/lostcancel/lostcancel.go index a7fee180925..c0746789e9c 100644 --- a/go/analysis/passes/lostcancel/lostcancel.go +++ b/go/analysis/passes/lostcancel/lostcancel.go @@ -17,6 +17,7 @@ import ( "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/cfg" "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/astutil" ) //go:embed doc.go @@ -83,30 +84,22 @@ func runFunc(pass *analysis.Pass, node ast.Node) { // {FuncDecl,FuncLit,CallExpr,SelectorExpr}. // Find the set of cancel vars to analyze. - stack := make([]ast.Node, 0, 32) - ast.Inspect(node, func(n ast.Node) bool { - switch n.(type) { - case *ast.FuncLit: - if len(stack) > 0 { - return false // don't stray into nested functions - } - case nil: - stack = stack[:len(stack)-1] // pop - return true + astutil.PreorderStack(node, nil, func(n ast.Node, stack []ast.Node) bool { + if _, ok := n.(*ast.FuncLit); ok && len(stack) > 0 { + return false // don't stray into nested functions } - stack = append(stack, n) // push - // Look for [{AssignStmt,ValueSpec} CallExpr SelectorExpr]: + // Look for n=SelectorExpr beneath stack=[{AssignStmt,ValueSpec} CallExpr]: // // ctx, cancel := context.WithCancel(...) // ctx, cancel = context.WithCancel(...) // var ctx, cancel = context.WithCancel(...) // - if !isContextWithCancel(pass.TypesInfo, n) || !isCall(stack[len(stack)-2]) { + if !isContextWithCancel(pass.TypesInfo, n) || !isCall(stack[len(stack)-1]) { return true } var id *ast.Ident // id of cancel var - stmt := stack[len(stack)-3] + stmt := stack[len(stack)-2] switch stmt := stmt.(type) { case *ast.ValueSpec: if len(stmt.Names) > 1 { diff --git a/gopls/internal/golang/codeaction.go b/gopls/internal/golang/codeaction.go index 7949493a896..c6811e8315a 100644 --- a/gopls/internal/golang/codeaction.go +++ b/gopls/internal/golang/codeaction.go @@ -713,33 +713,24 @@ func refactorRewriteEliminateDotImport(ctx context.Context, req *codeActionsRequ // Go through each use of the dot imported package, checking its scope for // shadowing and calculating an edit to qualify the identifier. - var stack []ast.Node - ast.Inspect(req.pgf.File, func(n ast.Node) bool { - if n == nil { - stack = stack[:len(stack)-1] // pop - return false - } - stack = append(stack, n) // push + for curId := range req.pgf.Cursor.Preorder((*ast.Ident)(nil)) { + ident := curId.Node().(*ast.Ident) - ident, ok := n.(*ast.Ident) - if !ok { - return true - } // Only keep identifiers that use a symbol from the // dot imported package. use := req.pkg.TypesInfo().Uses[ident] if use == nil || use.Pkg() == nil { - return true + continue } if use.Pkg() != imported { - return true + continue } // Only qualify unqualified identifiers (due to dot imports). // All other references to a symbol imported from another package // are nested within a select expression (pkg.Foo, v.Method, v.Field). - if is[*ast.SelectorExpr](stack[len(stack)-2]) { - return true + if is[*ast.SelectorExpr](curId.Parent().Node()) { + continue } // Make sure that the package name will not be shadowed by something else in scope. @@ -750,24 +741,22 @@ func refactorRewriteEliminateDotImport(ctx context.Context, req *codeActionsRequ // allowed to go through. sc := fileScope.Innermost(ident.Pos()) if sc == nil { - return true + continue } _, obj := sc.LookupParent(newName, ident.Pos()) if obj != nil { - return true + continue } rng, err := req.pgf.PosRange(ident.Pos(), ident.Pos()) // sic, zero-width range before ident if err != nil { - return true + continue } edits = append(edits, protocol.TextEdit{ Range: rng, NewText: newName + ".", }) - - return true - }) + } req.addEditAction("Eliminate dot import", nil, protocol.DocumentChangeEdit( req.fh, diff --git a/gopls/internal/golang/hover.go b/gopls/internal/golang/hover.go index c3fecd1c9d1..d707c202a1c 100644 --- a/gopls/internal/golang/hover.go +++ b/gopls/internal/golang/hover.go @@ -38,6 +38,7 @@ import ( gastutil "golang.org/x/tools/gopls/internal/util/astutil" "golang.org/x/tools/gopls/internal/util/bug" "golang.org/x/tools/gopls/internal/util/safetoken" + internalastutil "golang.org/x/tools/internal/astutil" "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/stdlib" "golang.org/x/tools/internal/tokeninternal" @@ -1502,16 +1503,10 @@ func findDeclInfo(files []*ast.File, pos token.Pos) (decl ast.Decl, spec ast.Spe stack := make([]ast.Node, 0, 20) // Allocate the closure once, outside the loop. - f := func(n ast.Node) bool { + f := func(n ast.Node, stack []ast.Node) bool { if found { return false } - if n != nil { - stack = append(stack, n) // push - } else { - stack = stack[:len(stack)-1] // pop - return false - } // Skip subtrees (incl. files) that don't contain the search point. if !(n.Pos() <= pos && pos < n.End()) { @@ -1596,7 +1591,7 @@ func findDeclInfo(files []*ast.File, pos token.Pos) (decl ast.Decl, spec ast.Spe return true } for _, file := range files { - ast.Inspect(file, f) + internalastutil.PreorderStack(file, stack, f) if found { return decl, spec, field } diff --git a/gopls/internal/golang/rename_check.go b/gopls/internal/golang/rename_check.go index 6521e809773..6b1629e5ab8 100644 --- a/gopls/internal/golang/rename_check.go +++ b/gopls/internal/golang/rename_check.go @@ -45,6 +45,8 @@ import ( "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/gopls/internal/cache" "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/internal/astutil/cursor" + "golang.org/x/tools/internal/astutil/edge" "golang.org/x/tools/internal/typeparams" "golang.org/x/tools/internal/typesinternal" "golang.org/x/tools/refactor/satisfy" @@ -338,64 +340,61 @@ func deeper(x, y *types.Scope) bool { // lexical block enclosing the reference. If fn returns false the // iteration is terminated and findLexicalRefs returns false. func forEachLexicalRef(pkg *cache.Package, obj types.Object, fn func(id *ast.Ident, block *types.Scope) bool) bool { + filter := []ast.Node{ + (*ast.Ident)(nil), + (*ast.SelectorExpr)(nil), + (*ast.CompositeLit)(nil), + } ok := true - var stack []ast.Node - - var visit func(n ast.Node) bool - visit = func(n ast.Node) bool { - if n == nil { - stack = stack[:len(stack)-1] // pop - return false - } + var visit func(cur cursor.Cursor, push bool) (descend bool) + visit = func(cur cursor.Cursor, push bool) (descend bool) { if !ok { return false // bail out } - - stack = append(stack, n) // push - switch n := n.(type) { + if !push { + return false + } + switch n := cur.Node().(type) { case *ast.Ident: if pkg.TypesInfo().Uses[n] == obj { - block := enclosingBlock(pkg.TypesInfo(), stack) + block := enclosingBlock(pkg.TypesInfo(), cur) if !fn(n, block) { ok = false } } - return visit(nil) // pop stack case *ast.SelectorExpr: // don't visit n.Sel - ast.Inspect(n.X, visit) - return visit(nil) // pop stack, don't descend + cur.ChildAt(edge.SelectorExpr_X, -1).Inspect(filter, visit) + return false // don't descend case *ast.CompositeLit: // Handle recursion ourselves for struct literals // so we don't visit field identifiers. tv, ok := pkg.TypesInfo().Types[n] if !ok { - return visit(nil) // pop stack, don't descend + return false // don't descend } if is[*types.Struct](typeparams.CoreType(typeparams.Deref(tv.Type))) { if n.Type != nil { - ast.Inspect(n.Type, visit) + cur.ChildAt(edge.CompositeLit_Type, -1).Inspect(filter, visit) } - for _, elt := range n.Elts { - if kv, ok := elt.(*ast.KeyValueExpr); ok { - ast.Inspect(kv.Value, visit) - } else { - ast.Inspect(elt, visit) + for i, elt := range n.Elts { + curElt := cur.ChildAt(edge.CompositeLit_Elts, i) + if _, ok := elt.(*ast.KeyValueExpr); ok { + // skip kv.Key + curElt = curElt.ChildAt(edge.KeyValueExpr_Value, -1) } + curElt.Inspect(filter, visit) } - return visit(nil) // pop stack, don't descend + return false // don't descend } } return true } - for _, f := range pkg.Syntax() { - ast.Inspect(f, visit) - if len(stack) != 0 { - panic(stack) - } + for _, pgf := range pkg.CompiledGoFiles() { + pgf.Cursor.Inspect(filter, visit) if !ok { break } @@ -404,11 +403,10 @@ func forEachLexicalRef(pkg *cache.Package, obj types.Object, fn func(id *ast.Ide } // enclosingBlock returns the innermost block logically enclosing the -// specified AST node (an ast.Ident), specified in the form of a path -// from the root of the file, [file...n]. -func enclosingBlock(info *types.Info, stack []ast.Node) *types.Scope { - for i := range stack { - n := stack[len(stack)-1-i] +// AST node (an ast.Ident), specified as a Cursor. +func enclosingBlock(info *types.Info, curId cursor.Cursor) *types.Scope { + for cur := range curId.Enclosing() { + n := cur.Node() // For some reason, go/types always associates a // function's scope with its FuncType. // See comments about scope above. diff --git a/internal/astutil/util.go b/internal/astutil/util.go index 849d45d8539..1862668a7c6 100644 --- a/internal/astutil/util.go +++ b/internal/astutil/util.go @@ -57,3 +57,35 @@ func PosInStringLiteral(lit *ast.BasicLit, offset int) (token.Pos, error) { } return pos, nil } + +// PreorderStack traverses the tree rooted at root, +// calling f before visiting each node. +// +// Each call to f provides the current node and traversal stack, +// consisting of the original value of stack appended with all nodes +// from root to n, excluding n itself. (This design allows calls +// to PreorderStack to be nested without double counting.) +// +// If f returns false, the traversal skips over that subtree. Unlike +// [ast.Inspect], no second call to f is made after visiting node n. +// In practice, the second call is nearly always used only to pop the +// stack, and it is surprisingly tricky to do this correctly; see +// https://go.dev/issue/73319. +func PreorderStack(root ast.Node, stack []ast.Node, f func(n ast.Node, stack []ast.Node) bool) { + before := len(stack) + ast.Inspect(root, func(n ast.Node) bool { + if n != nil { + if !f(n, stack) { + // Do not push, as there will be no corresponding pop. + return false + } + stack = append(stack, n) // push + } else { + stack = stack[:len(stack)-1] // pop + } + return true + }) + if len(stack) != before { + panic("push/pop mismatch") + } +} diff --git a/internal/astutil/util_test.go b/internal/astutil/util_test.go new file mode 100644 index 00000000000..da07ea88594 --- /dev/null +++ b/internal/astutil/util_test.go @@ -0,0 +1,67 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package astutil_test + +import ( + "fmt" + "go/ast" + "go/parser" + "go/token" + "reflect" + "strings" + "testing" + + "golang.org/x/tools/internal/astutil" +) + +func TestPreorderStack(t *testing.T) { + const src = `package a +func f() { + print("hello") +} +func g() { + print("goodbye") + panic("oops") +} +` + fset := token.NewFileSet() + f, _ := parser.ParseFile(fset, "a.go", src, 0) + + str := func(n ast.Node) string { + return strings.TrimPrefix(reflect.TypeOf(n).String(), "*ast.") + } + + var events []string + var gotStack []string + astutil.PreorderStack(f, nil, func(n ast.Node, stack []ast.Node) bool { + events = append(events, str(n)) + if decl, ok := n.(*ast.FuncDecl); ok && decl.Name.Name == "f" { + return false // skip subtree of f() + } + if lit, ok := n.(*ast.BasicLit); ok && lit.Value == `"oops"` { + for _, n := range stack { + gotStack = append(gotStack, str(n)) + } + } + return true + }) + + // Check sequence of events. + const wantEvents = `[File Ident ` + // package a + `FuncDecl ` + // func f() [pruned] + `FuncDecl Ident FuncType FieldList BlockStmt ` + // func g() + `ExprStmt CallExpr Ident BasicLit ` + // print... + `ExprStmt CallExpr Ident BasicLit]` // panic... + if got := fmt.Sprint(events); got != wantEvents { + t.Errorf("PreorderStack events:\ngot: %s\nwant: %s", got, wantEvents) + } + + // Check captured stack. + const wantStack = `[File FuncDecl BlockStmt ExprStmt CallExpr]` + if got := fmt.Sprint(gotStack); got != wantStack { + t.Errorf("PreorderStack stack:\ngot: %s\nwant: %s", got, wantStack) + } + +} diff --git a/internal/refactor/inline/callee.go b/internal/refactor/inline/callee.go index ca9426a2656..f3f6b653c73 100644 --- a/internal/refactor/inline/callee.go +++ b/internal/refactor/inline/callee.go @@ -18,6 +18,7 @@ import ( "strings" "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/astutil" "golang.org/x/tools/internal/typeparams" "golang.org/x/tools/internal/typesinternal" ) @@ -132,16 +133,11 @@ func AnalyzeCallee(logf func(string, ...any), fset *token.FileSet, pkg *types.Pa freeRefs []freeRef // free refs that may need renaming unexported []string // free refs to unexported objects, for later error checks ) - var f func(n ast.Node) bool - visit := func(n ast.Node) { ast.Inspect(n, f) } + var f func(n ast.Node, stack []ast.Node) bool var stack []ast.Node stack = append(stack, decl.Type) // for scope of function itself - f = func(n ast.Node) bool { - if n != nil { - stack = append(stack, n) // push - } else { - stack = stack[:len(stack)-1] // pop - } + visit := func(n ast.Node, stack []ast.Node) { astutil.PreorderStack(n, stack, f) } + f = func(n ast.Node, stack []ast.Node) bool { switch n := n.(type) { case *ast.SelectorExpr: // Check selections of free fields/methods. @@ -153,7 +149,7 @@ func AnalyzeCallee(logf func(string, ...any), fset *token.FileSet, pkg *types.Pa } // Don't recur into SelectorExpr.Sel. - visit(n.X) + visit(n.X, stack) return false case *ast.CompositeLit: @@ -162,7 +158,7 @@ func AnalyzeCallee(logf func(string, ...any), fset *token.FileSet, pkg *types.Pa litType := typeparams.Deref(info.TypeOf(n)) if s, ok := typeparams.CoreType(litType).(*types.Struct); ok { if n.Type != nil { - visit(n.Type) + visit(n.Type, stack) } for i, elt := range n.Elts { var field *types.Var @@ -180,7 +176,7 @@ func AnalyzeCallee(logf func(string, ...any), fset *token.FileSet, pkg *types.Pa } // Don't recur into KeyValueExpr.Key. - visit(value) + visit(value, stack) } return false } @@ -234,7 +230,7 @@ func AnalyzeCallee(logf func(string, ...any), fset *token.FileSet, pkg *types.Pa } return true } - visit(decl) + visit(decl, stack) // Analyze callee body for "return expr" form, // where expr is f() or <-ch. These forms are @@ -466,13 +462,7 @@ func analyzeParams(logf func(string, ...any), fset *token.FileSet, info *types.I fieldObjs := fieldObjs(sig) var stack []ast.Node stack = append(stack, decl.Type) // for scope of function itself - ast.Inspect(decl.Body, func(n ast.Node) bool { - if n != nil { - stack = append(stack, n) // push - } else { - stack = stack[:len(stack)-1] // pop - } - + astutil.PreorderStack(decl.Body, stack, func(n ast.Node, stack []ast.Node) bool { if id, ok := n.(*ast.Ident); ok { if v, ok := info.Uses[id].(*types.Var); ok { if pinfo, ok := paramInfos[v]; ok { @@ -487,6 +477,7 @@ func analyzeParams(logf func(string, ...any), fset *token.FileSet, info *types.I // Contrapositively, if param is not an interface type, then the // assignment may lose type information, for example in the case that // the substituted expression is an untyped constant or unnamed type. + stack = append(stack, n) // (the two calls below want n) assignable, ifaceAssign, affectsInference := analyzeAssignment(info, stack) ref := refInfo{ Offset: int(n.Pos() - decl.Pos()), diff --git a/refactor/rename/check.go b/refactor/rename/check.go index 7b29dbf6a72..58cbff9b594 100644 --- a/refactor/rename/check.go +++ b/refactor/rename/check.go @@ -13,6 +13,7 @@ import ( "go/types" "golang.org/x/tools/go/loader" + "golang.org/x/tools/internal/astutil" "golang.org/x/tools/internal/typeparams" "golang.org/x/tools/internal/typesinternal" "golang.org/x/tools/refactor/satisfy" @@ -313,19 +314,12 @@ func deeper(x, y *types.Scope) bool { // iteration is terminated and findLexicalRefs returns false. func forEachLexicalRef(info *loader.PackageInfo, obj types.Object, fn func(id *ast.Ident, block *types.Scope) bool) bool { ok := true - var stack []ast.Node - var visit func(n ast.Node) bool - visit = func(n ast.Node) bool { - if n == nil { - stack = stack[:len(stack)-1] // pop - return false - } + var visit func(n ast.Node, stack []ast.Node) bool + visit = func(n ast.Node, stack []ast.Node) bool { if !ok { return false // bail out } - - stack = append(stack, n) // push switch n := n.(type) { case *ast.Ident: if info.Uses[n] == obj { @@ -334,12 +328,12 @@ func forEachLexicalRef(info *loader.PackageInfo, obj types.Object, fn func(id *a ok = false } } - return visit(nil) // pop stack + return false case *ast.SelectorExpr: // don't visit n.Sel - ast.Inspect(n.X, visit) - return visit(nil) // pop stack, don't descend + astutil.PreorderStack(n.X, stack, visit) + return false // don't descend case *ast.CompositeLit: // Handle recursion ourselves for struct literals @@ -347,26 +341,23 @@ func forEachLexicalRef(info *loader.PackageInfo, obj types.Object, fn func(id *a tv := info.Types[n] if is[*types.Struct](typeparams.CoreType(typeparams.Deref(tv.Type))) { if n.Type != nil { - ast.Inspect(n.Type, visit) + astutil.PreorderStack(n.Type, stack, visit) } for _, elt := range n.Elts { if kv, ok := elt.(*ast.KeyValueExpr); ok { - ast.Inspect(kv.Value, visit) + astutil.PreorderStack(kv.Value, stack, visit) } else { - ast.Inspect(elt, visit) + astutil.PreorderStack(elt, stack, visit) } } - return visit(nil) // pop stack, don't descend + return false // don't descend } } return true } for _, f := range info.Files { - ast.Inspect(f, visit) - if len(stack) != 0 { - panic(stack) - } + astutil.PreorderStack(f, nil, visit) if !ok { break } From e78fd89f2803b27ec8ef490d0a6d5a90bbfd1050 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Wed, 16 Apr 2025 18:56:08 -0400 Subject: [PATCH 207/270] internal/astutil/cursor: four API refinements 1. Eliminate Cursor.Stack: it is redundant wrt Cursor.Enclosing, and essentially never needed since the stack can easily be computed cheaply on demand. 2. Rename FindPos to FindByPos. 3. Remove "push bool" parameter from Inspect callback. It has never once been needed; the only times we have ever needed to have some effect after visiting a node, we have wanted to express the whole recursion ourselves (see e.g. freeVisitor in gopls). 4. Add Cursor.Inspector accessor for completeness. Also, simplify the traversal in unusedparams. Updates golang/go#70859 Change-Id: I1c5218d1597105d6ffb423cbbb2306d62cabc47d Reviewed-on: https://go-review.googlesource.com/c/tools/+/666056 Commit-Queue: Alan Donovan LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley Auto-Submit: Alan Donovan --- go/types/internal/play/play.go | 6 +- .../analysis/fillreturns/fillreturns.go | 2 +- gopls/internal/analysis/modernize/bloop.go | 40 ++- .../internal/analysis/nonewvars/nonewvars.go | 2 +- .../analysis/noresultvalues/noresultvalues.go | 2 +- .../analysis/unusedparams/unusedparams.go | 303 ++++++++---------- gopls/internal/golang/codeaction.go | 2 +- gopls/internal/golang/extract.go | 2 +- gopls/internal/golang/implementation.go | 9 +- gopls/internal/golang/inlay_hint.go | 2 +- internal/astutil/cursor/cursor.go | 58 +--- internal/astutil/cursor/cursor_test.go | 96 ++---- 12 files changed, 225 insertions(+), 299 deletions(-) diff --git a/go/types/internal/play/play.go b/go/types/internal/play/play.go index 4212a6b82cf..77a90502135 100644 --- a/go/types/internal/play/play.go +++ b/go/types/internal/play/play.go @@ -26,6 +26,7 @@ import ( "os" "path/filepath" "reflect" + "slices" "strconv" "strings" @@ -167,8 +168,9 @@ func handleSelectJSON(w http.ResponseWriter, req *http.Request) { // It's usually the same, but may differ in edge // cases (e.g. around FuncType.Func). inspect := inspector.New([]*ast.File{file}) - if cur, ok := cursor.Root(inspect).FindPos(startPos, endPos); ok { - fmt.Fprintf(out, "Cursor.FindPos().Stack() = %v\n", cur.Stack(nil)) + if cur, ok := cursor.Root(inspect).FindByPos(startPos, endPos); ok { + fmt.Fprintf(out, "Cursor.FindPos().Enclosing() = %v\n", + slices.Collect(cur.Enclosing())) } else { fmt.Fprintf(out, "Cursor.FindPos() failed\n") } diff --git a/gopls/internal/analysis/fillreturns/fillreturns.go b/gopls/internal/analysis/fillreturns/fillreturns.go index a90105f6f56..e23e620acc2 100644 --- a/gopls/internal/analysis/fillreturns/fillreturns.go +++ b/gopls/internal/analysis/fillreturns/fillreturns.go @@ -49,7 +49,7 @@ outer: if !ok { continue // no position information } - curErr, ok := cursor.Root(inspect).FindPos(start, end) + curErr, ok := cursor.Root(inspect).FindByPos(start, end) if !ok { continue // can't find node } diff --git a/gopls/internal/analysis/modernize/bloop.go b/gopls/internal/analysis/modernize/bloop.go index 5bfb0b7d8e8..ea2359c7fb6 100644 --- a/gopls/internal/analysis/modernize/bloop.go +++ b/gopls/internal/analysis/modernize/bloop.go @@ -48,27 +48,25 @@ func bloop(pass *analysis.Pass) { // Within the same function, delete all calls to // b.{Start,Stop,Timer} that precede the loop. filter := []ast.Node{(*ast.ExprStmt)(nil), (*ast.FuncLit)(nil)} - curFn.Inspect(filter, func(cur cursor.Cursor, push bool) (descend bool) { - if push { - node := cur.Node() - if is[*ast.FuncLit](node) { - return false // don't descend into FuncLits (e.g. sub-benchmarks) - } - stmt := node.(*ast.ExprStmt) - if stmt.Pos() > start { - return false // not preceding: stop - } - if call, ok := stmt.X.(*ast.CallExpr); ok { - obj := typeutil.Callee(info, call) - if analysisinternal.IsMethodNamed(obj, "testing", "B", "StopTimer", "StartTimer", "ResetTimer") { - // Delete call statement. - // TODO(adonovan): delete following newline, or - // up to start of next stmt? (May delete a comment.) - edits = append(edits, analysis.TextEdit{ - Pos: stmt.Pos(), - End: stmt.End(), - }) - } + curFn.Inspect(filter, func(cur cursor.Cursor) (descend bool) { + node := cur.Node() + if is[*ast.FuncLit](node) { + return false // don't descend into FuncLits (e.g. sub-benchmarks) + } + stmt := node.(*ast.ExprStmt) + if stmt.Pos() > start { + return false // not preceding: stop + } + if call, ok := stmt.X.(*ast.CallExpr); ok { + obj := typeutil.Callee(info, call) + if analysisinternal.IsMethodNamed(obj, "testing", "B", "StopTimer", "StartTimer", "ResetTimer") { + // Delete call statement. + // TODO(adonovan): delete following newline, or + // up to start of next stmt? (May delete a comment.) + edits = append(edits, analysis.TextEdit{ + Pos: stmt.Pos(), + End: stmt.End(), + }) } } return true diff --git a/gopls/internal/analysis/nonewvars/nonewvars.go b/gopls/internal/analysis/nonewvars/nonewvars.go index eeae7211c97..62383dc2309 100644 --- a/gopls/internal/analysis/nonewvars/nonewvars.go +++ b/gopls/internal/analysis/nonewvars/nonewvars.go @@ -43,7 +43,7 @@ func run(pass *analysis.Pass) (any, error) { if !ok { continue // can't get position info } - curErr, ok := cursor.Root(inspect).FindPos(start, end) + curErr, ok := cursor.Root(inspect).FindByPos(start, end) if !ok { continue // can't find errant node } diff --git a/gopls/internal/analysis/noresultvalues/noresultvalues.go b/gopls/internal/analysis/noresultvalues/noresultvalues.go index 848f6532ce0..4f095c941c4 100644 --- a/gopls/internal/analysis/noresultvalues/noresultvalues.go +++ b/gopls/internal/analysis/noresultvalues/noresultvalues.go @@ -43,7 +43,7 @@ func run(pass *analysis.Pass) (any, error) { if !ok { continue // can't get position info } - curErr, ok := cursor.Root(inspect).FindPos(start, end) + curErr, ok := cursor.Root(inspect).FindByPos(start, end) if !ok { continue // can't find errant node } diff --git a/gopls/internal/analysis/unusedparams/unusedparams.go b/gopls/internal/analysis/unusedparams/unusedparams.go index 12076c5f273..824711242da 100644 --- a/gopls/internal/analysis/unusedparams/unusedparams.go +++ b/gopls/internal/analysis/unusedparams/unusedparams.go @@ -124,194 +124,171 @@ func run(pass *analysis.Pass) (any, error) { } } - // Inspect each file to see if it is generated. - // - // We do not want to report unused parameters in generated code itself, - // however we need to include generated code in the overall analysis as - // it may be calling functions in non-generated code. - files := []ast.Node{(*ast.File)(nil)} - cursor.Root(inspect).Inspect(files, func(c cursor.Cursor, push bool) bool { - if !push { - return true - } - - isGenerated := ast.IsGenerated(c.Node().(*ast.File)) - - // Descend into the file, check each non-address-taken function's parameters - // are all used. - funcs := []ast.Node{ - (*ast.FuncDecl)(nil), - (*ast.FuncLit)(nil), - } - c.Inspect(funcs, func(c cursor.Cursor, push bool) bool { - // (We always return true so that we visit nested FuncLits.) - if !push { - return true + // Check each non-address-taken function's parameters are all used. +funcloop: + for c := range cursor.Root(inspect).Preorder((*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)) { + var ( + fn types.Object // function symbol (*Func, possibly *Var for a FuncLit) + ftype *ast.FuncType + body *ast.BlockStmt + ) + switch n := c.Node().(type) { + case *ast.FuncDecl: + // We can't analyze non-Go functions. + if n.Body == nil { + continue } - var ( - fn types.Object // function symbol (*Func, possibly *Var for a FuncLit) - ftype *ast.FuncType - body *ast.BlockStmt - ) - switch n := c.Node().(type) { - case *ast.FuncDecl: - // We can't analyze non-Go functions. - if n.Body == nil { - return true - } - - // Ignore exported functions and methods: we - // must assume they may be address-taken in - // another package. - if n.Name.IsExported() { - return true - } - - // Ignore methods that match the name of any - // interface method declared in this package, - // as the method's signature may need to conform - // to the interface. - if n.Recv != nil && unexportedIMethodNames[n.Name.Name] { - return true - } - - fn = pass.TypesInfo.Defs[n.Name].(*types.Func) - ftype, body = n.Type, n.Body + // Ignore exported functions and methods: we + // must assume they may be address-taken in + // another package. + if n.Name.IsExported() { + continue + } - case *ast.FuncLit: - // Find the symbol for the variable (if any) - // to which the FuncLit is bound. - // (We don't bother to allow ParenExprs.) - switch parent := c.Parent().Node().(type) { - case *ast.AssignStmt: - // f = func() {...} - // f := func() {...} - if ek, idx := c.ParentEdge(); ek == edge.AssignStmt_Rhs { - // Inv: n == AssignStmt.Rhs[idx] - if id, ok := parent.Lhs[idx].(*ast.Ident); ok { - fn = pass.TypesInfo.ObjectOf(id) + // Ignore methods that match the name of any + // interface method declared in this package, + // as the method's signature may need to conform + // to the interface. + if n.Recv != nil && unexportedIMethodNames[n.Name.Name] { + continue + } - // Edge case: f = func() {...} - // should not count as a use. - if pass.TypesInfo.Uses[id] != nil { - usesOutsideCall[fn] = moreslices.Remove(usesOutsideCall[fn], id) - } + fn = pass.TypesInfo.Defs[n.Name].(*types.Func) + ftype, body = n.Type, n.Body + + case *ast.FuncLit: + // Find the symbol for the variable (if any) + // to which the FuncLit is bound. + // (We don't bother to allow ParenExprs.) + switch parent := c.Parent().Node().(type) { + case *ast.AssignStmt: + // f = func() {...} + // f := func() {...} + if ek, idx := c.ParentEdge(); ek == edge.AssignStmt_Rhs { + // Inv: n == AssignStmt.Rhs[idx] + if id, ok := parent.Lhs[idx].(*ast.Ident); ok { + fn = pass.TypesInfo.ObjectOf(id) + + // Edge case: f = func() {...} + // should not count as a use. + if pass.TypesInfo.Uses[id] != nil { + usesOutsideCall[fn] = moreslices.Remove(usesOutsideCall[fn], id) + } - if fn == nil && id.Name == "_" { - // Edge case: _ = func() {...} - // has no local var. Fake one. - v := types.NewVar(id.Pos(), pass.Pkg, id.Name, pass.TypesInfo.TypeOf(n)) - typesinternal.SetVarKind(v, typesinternal.LocalVar) - fn = v - } + if fn == nil && id.Name == "_" { + // Edge case: _ = func() {...} + // has no local var. Fake one. + v := types.NewVar(id.Pos(), pass.Pkg, id.Name, pass.TypesInfo.TypeOf(n)) + typesinternal.SetVarKind(v, typesinternal.LocalVar) + fn = v } } + } - case *ast.ValueSpec: - // var f = func() { ... } - // (unless f is an exported package-level var) - for i, val := range parent.Values { - if val == n { - v := pass.TypesInfo.Defs[parent.Names[i]] - if !(v.Parent() == pass.Pkg.Scope() && v.Exported()) { - fn = v - } - break + case *ast.ValueSpec: + // var f = func() { ... } + // (unless f is an exported package-level var) + for i, val := range parent.Values { + if val == n { + v := pass.TypesInfo.Defs[parent.Names[i]] + if !(v.Parent() == pass.Pkg.Scope() && v.Exported()) { + fn = v } + break } } - - ftype, body = n.Type, n.Body } - // Ignore address-taken functions and methods: unused - // parameters may be needed to conform to a func type. - if fn == nil || len(usesOutsideCall[fn]) > 0 { - return true - } + ftype, body = n.Type, n.Body + } - // If there are no parameters, there are no unused parameters. - if ftype.Params.NumFields() == 0 { - return true - } + // Ignore address-taken functions and methods: unused + // parameters may be needed to conform to a func type. + if fn == nil || len(usesOutsideCall[fn]) > 0 { + continue + } - // To reduce false positives, ignore functions with an - // empty or panic body. - // - // We choose not to ignore functions whose body is a - // single return statement (as earlier versions did) - // func f() { return } - // func f() { return g(...) } - // as we suspect that was just heuristic to reduce - // false positives in the earlier unsound algorithm. - switch len(body.List) { - case 0: - // Empty body. Although the parameter is - // unnecessary, it's pretty obvious to the - // reader that that's the case, so we allow it. - return true // func f() {} - case 1: - if stmt, ok := body.List[0].(*ast.ExprStmt); ok { - // We allow a panic body, as it is often a - // placeholder for a future implementation: - // func f() { panic(...) } - if call, ok := stmt.X.(*ast.CallExpr); ok { - if fun, ok := call.Fun.(*ast.Ident); ok && fun.Name == "panic" { - return true - } + // If there are no parameters, there are no unused parameters. + if ftype.Params.NumFields() == 0 { + continue + } + + // To reduce false positives, ignore functions with an + // empty or panic body. + // + // We choose not to ignore functions whose body is a + // single return statement (as earlier versions did) + // func f() { return } + // func f() { return g(...) } + // as we suspect that was just heuristic to reduce + // false positives in the earlier unsound algorithm. + switch len(body.List) { + case 0: + // Empty body. Although the parameter is + // unnecessary, it's pretty obvious to the + // reader that that's the case, so we allow it. + continue // func f() {} + case 1: + if stmt, ok := body.List[0].(*ast.ExprStmt); ok { + // We allow a panic body, as it is often a + // placeholder for a future implementation: + // func f() { panic(...) } + if call, ok := stmt.X.(*ast.CallExpr); ok { + if fun, ok := call.Fun.(*ast.Ident); ok && fun.Name == "panic" { + continue } } } + } - // Don't report diagnostics on generated files. - if isGenerated { - return true + // Don't report diagnostics on generated files. + // (We can't skip analysis of generated files, though.) + for curFile := range c.Enclosing((*ast.File)(nil)) { + if ast.IsGenerated(curFile.Node().(*ast.File)) { + continue funcloop } + } - // Report each unused parameter. - for _, field := range ftype.Params.List { - for _, id := range field.Names { - if id.Name == "_" { - continue + // Report each unused parameter. + for _, field := range ftype.Params.List { + for _, id := range field.Names { + if id.Name == "_" { + continue + } + param := pass.TypesInfo.Defs[id].(*types.Var) + if !usedVars[param] { + start, end := field.Pos(), field.End() + if len(field.Names) > 1 { + start, end = id.Pos(), id.End() } - param := pass.TypesInfo.Defs[id].(*types.Var) - if !usedVars[param] { - start, end := field.Pos(), field.End() - if len(field.Names) > 1 { - start, end = id.Pos(), id.End() - } - // This diagnostic carries both an edit-based fix to - // rename the unused parameter, and a command-based fix - // to remove it (see golang.RemoveUnusedParameter). - pass.Report(analysis.Diagnostic{ - Pos: start, - End: end, - Message: fmt.Sprintf("unused parameter: %s", id.Name), - Category: FixCategory, - SuggestedFixes: []analysis.SuggestedFix{ - { - Message: `Rename parameter to "_"`, - TextEdits: []analysis.TextEdit{{ - Pos: id.Pos(), - End: id.End(), - NewText: []byte("_"), - }}, - }, - { - Message: fmt.Sprintf("Remove unused parameter %q", id.Name), - // No TextEdits => computed by gopls command - }, + // This diagnostic carries both an edit-based fix to + // rename the unused parameter, and a command-based fix + // to remove it (see golang.RemoveUnusedParameter). + pass.Report(analysis.Diagnostic{ + Pos: start, + End: end, + Message: fmt.Sprintf("unused parameter: %s", id.Name), + Category: FixCategory, + SuggestedFixes: []analysis.SuggestedFix{ + { + Message: `Rename parameter to "_"`, + TextEdits: []analysis.TextEdit{{ + Pos: id.Pos(), + End: id.End(), + NewText: []byte("_"), + }}, }, - }) - } + { + Message: fmt.Sprintf("Remove unused parameter %q", id.Name), + // No TextEdits => computed by gopls command + }, + }, + }) } } - - return true - }) - return true - }) + } + } return nil, nil } diff --git a/gopls/internal/golang/codeaction.go b/gopls/internal/golang/codeaction.go index c6811e8315a..4efddaa8a18 100644 --- a/gopls/internal/golang/codeaction.go +++ b/gopls/internal/golang/codeaction.go @@ -942,7 +942,7 @@ func goAssembly(ctx context.Context, req *codeActionsRequest) error { } sym.WriteString(".") - curSel, _ := req.pgf.Cursor.FindPos(req.start, req.end) + curSel, _ := req.pgf.Cursor.FindByPos(req.start, req.end) for cur := range curSel.Enclosing((*ast.FuncDecl)(nil), (*ast.ValueSpec)(nil)) { var name string // in command title switch node := cur.Node().(type) { diff --git a/gopls/internal/golang/extract.go b/gopls/internal/golang/extract.go index 322b1169fd6..a832ec305e8 100644 --- a/gopls/internal/golang/extract.go +++ b/gopls/internal/golang/extract.go @@ -957,7 +957,7 @@ func extractFunctionMethod(cpkg *cache.Package, pgf *parsego.File, start, end to // } // - curSel, _ := pgf.Cursor.FindPos(start, end) // since canExtractFunction succeeded, this will always return a valid cursor + curSel, _ := pgf.Cursor.FindByPos(start, end) // since canExtractFunction succeeded, this will always return a valid cursor freeBranches := freeBranches(info, curSel, start, end) // Generate an unused identifier for the control value. diff --git a/gopls/internal/golang/implementation.go b/gopls/internal/golang/implementation.go index 19f1257c76d..675b232d0eb 100644 --- a/gopls/internal/golang/implementation.go +++ b/gopls/internal/golang/implementation.go @@ -951,7 +951,7 @@ func pathEnclosingObjNode(f *ast.File, pos token.Pos) []ast.Node { // implFuncs returns errNotHandled to indicate that we should try the // regular method-sets algorithm. func implFuncs(pkg *cache.Package, pgf *parsego.File, pos token.Pos) ([]protocol.Location, error) { - curSel, ok := pgf.Cursor.FindPos(pos, pos) + curSel, ok := pgf.Cursor.FindByPos(pos, pos) if !ok { return nil, fmt.Errorf("no code selected") } @@ -975,7 +975,12 @@ func implFuncs(pkg *cache.Package, pgf *parsego.File, pos token.Pos) ([]protocol // are inconsistent. Consequently, the ancestors for a "func" // token of Func{Lit,Decl} do not include FuncType, hence the // explicit cases below. - for _, cur := range curSel.Stack(nil) { + for cur := range curSel.Enclosing( + (*ast.FuncDecl)(nil), + (*ast.FuncLit)(nil), + (*ast.FuncType)(nil), + (*ast.CallExpr)(nil), + ) { switch n := cur.Node().(type) { case *ast.FuncDecl, *ast.FuncLit: if inToken(n.Pos(), "func", pos) { diff --git a/gopls/internal/golang/inlay_hint.go b/gopls/internal/golang/inlay_hint.go index b49ebd85e21..617231a4f8c 100644 --- a/gopls/internal/golang/inlay_hint.go +++ b/gopls/internal/golang/inlay_hint.go @@ -65,7 +65,7 @@ func InlayHint(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pR } var hints []protocol.InlayHint - if curSubrange, ok := pgf.Cursor.FindPos(start, end); ok { + if curSubrange, ok := pgf.Cursor.FindByPos(start, end); ok { add := func(hint protocol.InlayHint) { hints = append(hints, hint) } for _, fn := range enabledHints { fn(info, pgf, qual, curSubrange, add) diff --git a/internal/astutil/cursor/cursor.go b/internal/astutil/cursor/cursor.go index 3f015998c52..78d874a86fa 100644 --- a/internal/astutil/cursor/cursor.go +++ b/internal/astutil/cursor/cursor.go @@ -20,7 +20,6 @@ import ( "go/token" "iter" "reflect" - "slices" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/internal/astutil/edge" @@ -46,7 +45,7 @@ func Root(in *inspector.Inspector) Cursor { // At returns the cursor at the specified index in the traversal, // which must have been obtained from [Cursor.Index] on a Cursor -// belonging to the same Inspector. +// belonging to the same Inspector (see [Cursor.Inspector]). func At(in *inspector.Inspector, index int32) Cursor { if index < 0 { panic("negative index") @@ -61,6 +60,9 @@ func At(in *inspector.Inspector, index int32) Cursor { return Cursor{in, index} } +// Inspector returns the cursor's Inspector. +func (c Cursor) Inspector() *inspector.Inspector { return c.in } + // Index returns the index of this cursor position within the package. // // Clients should not assume anything about the numeric Index value @@ -142,10 +144,9 @@ func (c Cursor) Preorder(types ...ast.Node) iter.Seq[Cursor] { } // Inspect visits the nodes of the subtree represented by c in -// depth-first order. It calls f(n, true) for each node n before it +// depth-first order. It calls f(n) for each node n before it // visits n's children. If f returns true, Inspect invokes f -// recursively for each of the non-nil children of the node, followed -// by a call of f(n, false). +// recursively for each of the non-nil children of the node. // // Each node is represented by a Cursor that allows access to the // Node, but may also be used to start a new traversal, or to obtain @@ -155,7 +156,7 @@ func (c Cursor) Preorder(types ...ast.Node) iter.Seq[Cursor] { // The types argument, if non-empty, enables type-based filtering of // events. The function f if is called only for nodes whose type // matches an element of the types slice. -func (c Cursor) Inspect(types []ast.Node, f func(c Cursor, push bool) (descend bool)) { +func (c Cursor) Inspect(types []ast.Node, f func(c Cursor) (descend bool)) { mask := maskOf(types) events := c.events() for i, limit := c.indices(); i < limit; { @@ -163,46 +164,19 @@ func (c Cursor) Inspect(types []ast.Node, f func(c Cursor, push bool) (descend b if ev.index > i { // push pop := ev.index - if ev.typ&mask != 0 && !f(Cursor{c.in, i}, true) { - i = pop + 1 // past the pop + if ev.typ&mask != 0 && !f(Cursor{c.in, i}) || + events[pop].typ&mask == 0 { + // The user opted not to descend, or the + // subtree does not contain types: + // skip past the pop. + i = pop + 1 continue } - if events[pop].typ&mask == 0 { - // Subtree does not contain types: skip to pop. - i = pop - continue - } - } else { - // pop - push := ev.index - if events[push].typ&mask != 0 { - f(Cursor{c.in, push}, false) - } } i++ } } -// Stack returns the stack of enclosing nodes, outermost first: -// from the [ast.File] down to the current cursor's node. -// -// To amortize allocation, it appends to the provided slice, which -// must be empty. -// -// Stack must not be called on the Root node. -func (c Cursor) Stack(stack []Cursor) []Cursor { - if len(stack) > 0 { - panic("stack is non-empty") - } - if c.index < 0 { - panic("Cursor.Stack called on Root node") - } - - stack = slices.AppendSeq(stack, c.Enclosing()) - slices.Reverse(stack) - return stack -} - // Enclosing returns an iterator over the nodes enclosing the current // current node, starting with the Cursor itself. // @@ -453,7 +427,7 @@ func (c Cursor) FindNode(n ast.Node) (Cursor, bool) { // TODO(adonovan): opt: should we assume Node.Pos is accurate // and combine type-based filtering with position filtering - // like FindPos? + // like FindByPos? mask := maskOf([]ast.Node{n}) events := c.events() @@ -474,7 +448,7 @@ func (c Cursor) FindNode(n ast.Node) (Cursor, bool) { return Cursor{}, false } -// FindPos returns the cursor for the innermost node n in the tree +// FindByPos returns the cursor for the innermost node n in the tree // rooted at c such that n.Pos() <= start && end <= n.End(). // (For an *ast.File, it uses the bounds n.FileStart-n.FileEnd.) // @@ -483,7 +457,7 @@ func (c Cursor) FindNode(n ast.Node) (Cursor, bool) { // // See also [astutil.PathEnclosingInterval], which // tolerates adjoining whitespace. -func (c Cursor) FindPos(start, end token.Pos) (Cursor, bool) { +func (c Cursor) FindByPos(start, end token.Pos) (Cursor, bool) { if end < start { panic("end < start") } diff --git a/internal/astutil/cursor/cursor_test.go b/internal/astutil/cursor/cursor_test.go index 9effae912a3..0573512fc3b 100644 --- a/internal/astutil/cursor/cursor_test.go +++ b/internal/astutil/cursor/cursor_test.go @@ -136,53 +136,49 @@ func g() { } nfuncs++ - stack := curFunc.Stack(nil) + stack := slices.Collect(curFunc.Enclosing()) // Stacks are convenient to print! - if got, want := fmt.Sprint(stack), "[*ast.File *ast.FuncDecl]"; got != want { - t.Errorf("curFunc.Stack() = %q, want %q", got, want) + if got, want := fmt.Sprint(stack), "[*ast.FuncDecl *ast.File]"; got != want { + t.Errorf("curFunc.Enclosing() = %q, want %q", got, want) } - // Parent, iterated, is Stack. + // Parent, iterated, is Enclosing stack. i := 0 for c := curFunc; c.Node() != nil; c = c.Parent() { - if got, want := stack[len(stack)-1-i], c; got != want { - t.Errorf("Stack[%d] = %v; Parent()^%d = %v", i, got, i, want) + if got, want := stack[i], c; got != want { + t.Errorf("Enclosing[%d] = %v; Parent()^%d = %v", i, got, i, want) } i++ } + wantStack := "[*ast.CallExpr *ast.ExprStmt *ast.BlockStmt *ast.FuncDecl *ast.File]" + // nested Preorder traversal preorderCount := 0 for curCall := range curFunc.Preorder(callExprs...) { _ = curCall.Node().(*ast.CallExpr) preorderCount++ - stack := curCall.Stack(nil) - if got, want := fmt.Sprint(stack), "[*ast.File *ast.FuncDecl *ast.BlockStmt *ast.ExprStmt *ast.CallExpr]"; got != want { - t.Errorf("curCall.Stack() = %q, want %q", got, want) - } - - // Enclosing = Reverse(Stack()). - slices.Reverse(stack) - if got, want := slices.Collect(curCall.Enclosing()), stack; !reflect.DeepEqual(got, want) { - t.Errorf("Enclosing = %v, Reverse(Stack - last element) = %v", got, want) + stack := slices.Collect(curCall.Enclosing()) + if got := fmt.Sprint(stack); got != wantStack { + t.Errorf("curCall.Enclosing() = %q, want %q", got, wantStack) } } // nested Inspect traversal - inspectCount := 0 // pushes and pops - curFunc.Inspect(callExprs, func(curCall cursor.Cursor, push bool) (proceed bool) { + inspectCount := 0 + curFunc.Inspect(callExprs, func(curCall cursor.Cursor) (proceed bool) { _ = curCall.Node().(*ast.CallExpr) inspectCount++ - stack := curCall.Stack(nil) - if got, want := fmt.Sprint(stack), "[*ast.File *ast.FuncDecl *ast.BlockStmt *ast.ExprStmt *ast.CallExpr]"; got != want { - t.Errorf("curCall.Stack() = %q, want %q", got, want) + stack := slices.Collect(curCall.Enclosing()) + if got := fmt.Sprint(stack); got != wantStack { + t.Errorf("curCall.Enclosing() = %q, want %q", got, wantStack) } return true }) - if inspectCount != preorderCount*2 { - t.Errorf("Inspect (%d push/pop events) and Preorder (%d push events) are not consistent", inspectCount, preorderCount) + if inspectCount != preorderCount { + t.Errorf("Inspect (%d) and Preorder (%d) events are not consistent", inspectCount, preorderCount) } ncalls += preorderCount @@ -269,12 +265,10 @@ func TestCursor_Inspect(t *testing.T) { // Test Cursor.Inspect implementation. var nodesB []ast.Node - cursor.Root(inspect).Inspect(switches, func(c cursor.Cursor, push bool) (proceed bool) { - if push { - n := c.Node() - nodesB = append(nodesB, n) - return !is[*ast.SwitchStmt](n) // descend only into TypeSwitchStmt - } + cursor.Root(inspect).Inspect(switches, func(c cursor.Cursor) (proceed bool) { + n := c.Node() + nodesB = append(nodesB, n) + return !is[*ast.SwitchStmt](n) // descend only into TypeSwitchStmt return false }) compare(t, nodesA, nodesB) @@ -339,7 +333,7 @@ func TestCursor_FindPos_order(t *testing.T) { target := netFiles[7].Decls[0] // Find the target decl by its position. - cur, ok := cursor.Root(netInspect).FindPos(target.Pos(), target.End()) + cur, ok := cursor.Root(netInspect).FindByPos(target.Pos(), target.End()) if !ok || cur.Node() != target { t.Fatalf("unshuffled: FindPos(%T) = (%v, %t)", target, cur, ok) } @@ -352,7 +346,7 @@ func TestCursor_FindPos_order(t *testing.T) { // Find it again. inspect := inspector.New(files) - cur, ok = cursor.Root(inspect).FindPos(target.Pos(), target.End()) + cur, ok = cursor.Root(inspect).FindByPos(target.Pos(), target.End()) if !ok || cur.Node() != target { t.Fatalf("shuffled: FindPos(%T) = (%v, %t)", target, cur, ok) } @@ -500,37 +494,6 @@ func BenchmarkInspectCalls(b *testing.B) { } }) - // Cursor.Stack(nil) is ~6x slower than WithStack. - // Even using Cursor.Stack(stack[:0]) to amortize the - // allocation, it's ~4x slower. - // - // But it depends on the selectivity of the nodeTypes - // filter: searching for *ast.InterfaceType, results in - // fewer calls to Stack, making it only 2x slower. - // And if the calls to Stack are very selective, - // or are replaced by 2 calls to Parent, it runs - // 27% faster than WithStack. - // - // But the purpose of inspect.WithStack is not to obtain the - // stack on every node, but to perform a traversal in which it - // one as the _option_ to access the stack if it should be - // needed, but the need is rare and usually only for a small - // portion. Arguably, because Cursor traversals always - // provide, at no extra cost, the option to access the - // complete stack, the right comparison is the plain Cursor - // benchmark below. - b.Run("CursorStack", func(b *testing.B) { - var ncalls int - for range b.N { - var stack []cursor.Cursor // recycle across calls - for cur := range cursor.Root(inspect).Preorder(callExprs...) { - _ = cur.Node().(*ast.CallExpr) - stack = cur.Stack(stack[:0]) - ncalls++ - } - } - }) - b.Run("Cursor", func(b *testing.B) { var ncalls int for range b.N { @@ -568,7 +531,7 @@ func BenchmarkCursor_FindNode(b *testing.B) { found := false for c := range root.Preorder(callExprs...) { count++ - if count >= 1000 && len(c.Stack(nil)) >= 6 { + if count >= 1000 && iterlen(c.Enclosing()) >= 6 { needle = c found = true break @@ -611,10 +574,17 @@ func BenchmarkCursor_FindNode(b *testing.B) { b.Run("Cursor.FindPos", func(b *testing.B) { needleNode := needle.Node() for range b.N { - found, ok := root.FindPos(needleNode.Pos(), needleNode.End()) + found, ok := root.FindByPos(needleNode.Pos(), needleNode.End()) if !ok || found != needle { b.Errorf("FindPos search failed: got %v, want %v", found, needle) } } }) } + +func iterlen[T any](seq iter.Seq[T]) (len int) { + for range seq { + len++ + } + return +} From 33c6419a072d7b5e3d09ebfdb90ef9bcde103174 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Thu, 17 Apr 2025 11:09:44 -0400 Subject: [PATCH 208/270] gopls/internal/golang: fix build CLs 666056 and 664635 contained semantic conflicts and were merged independently close in time (my bad). Change-Id: I1530e059ce9ff30b8fef8cd3473ea9398a26246f Reviewed-on: https://go-review.googlesource.com/c/tools/+/666415 LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan Reviewed-by: Jonathan Amsterdam Reviewed-by: Robert Findley --- gopls/internal/golang/rename.go | 2 +- gopls/internal/golang/rename_check.go | 7 ++----- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/gopls/internal/golang/rename.go b/gopls/internal/golang/rename.go index f1406cd773c..6e705350266 100644 --- a/gopls/internal/golang/rename.go +++ b/gopls/internal/golang/rename.go @@ -509,7 +509,7 @@ func renameOrdinary(ctx context.Context, snapshot *cache.Snapshot, f file.Handle return nil, err } var ok bool - cur, ok = pgf.Cursor.FindPos(pos, pos) + cur, ok = pgf.Cursor.FindByPos(pos, pos) if !ok { return nil, fmt.Errorf("can't find cursor for selection") } diff --git a/gopls/internal/golang/rename_check.go b/gopls/internal/golang/rename_check.go index 6b1629e5ab8..6b89cabbe81 100644 --- a/gopls/internal/golang/rename_check.go +++ b/gopls/internal/golang/rename_check.go @@ -346,14 +346,11 @@ func forEachLexicalRef(pkg *cache.Package, obj types.Object, fn func(id *ast.Ide (*ast.CompositeLit)(nil), } ok := true - var visit func(cur cursor.Cursor, push bool) (descend bool) - visit = func(cur cursor.Cursor, push bool) (descend bool) { + var visit func(cur cursor.Cursor) (descend bool) + visit = func(cur cursor.Cursor) (descend bool) { if !ok { return false // bail out } - if !push { - return false - } switch n := cur.Node().(type) { case *ast.Ident: if pkg.TypesInfo().Uses[n] == obj { From 93bb7f0875b53bc3af9c6749591bba09b07ee67b Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Thu, 17 Apr 2025 11:24:31 -0400 Subject: [PATCH 209/270] gopls: update x/telemetry Updates golang/go#73268 Change-Id: I13168d1edae1358919e69c7ee965bafd4c6833d0 Reviewed-on: https://go-review.googlesource.com/c/tools/+/666416 Auto-Submit: Alan Donovan Reviewed-by: Robert Findley LUCI-TryBot-Result: Go LUCI --- gopls/go.mod | 2 +- gopls/go.sum | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/gopls/go.mod b/gopls/go.mod index c09e2daf7bd..c2a8f6e019c 100644 --- a/gopls/go.mod +++ b/gopls/go.mod @@ -8,7 +8,7 @@ require ( golang.org/x/mod v0.24.0 golang.org/x/sync v0.13.0 golang.org/x/sys v0.32.0 - golang.org/x/telemetry v0.0.0-20250220152412-165e2f84edbc + golang.org/x/telemetry v0.0.0-20250417124945-06ef541f3fa3 golang.org/x/text v0.24.0 golang.org/x/tools v0.30.0 golang.org/x/vuln v1.1.4 diff --git a/gopls/go.sum b/gopls/go.sum index f5a9bbde4ca..cfe49a42d4e 100644 --- a/gopls/go.sum +++ b/gopls/go.sum @@ -38,6 +38,8 @@ golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457/go.mod h1:pRgIJT+bRLFKnoM1ldnzKoxTIn14Yxz928LQRYYgIN0= golang.org/x/telemetry v0.0.0-20250220152412-165e2f84edbc h1:HS+G1Mhh2dxM8ObutfYKdjfD7zpkyeP/UxeRnJpIZtQ= golang.org/x/telemetry v0.0.0-20250220152412-165e2f84edbc/go.mod h1:bDzXkYUaHzz51CtDy5kh/jR4lgPxsdbqC37kp/dzhCc= +golang.org/x/telemetry v0.0.0-20250417124945-06ef541f3fa3 h1:RXY2+rSHXvxO2Y+gKrPjYVaEoGOqh3VEXFhnWAt1Irg= +golang.org/x/telemetry v0.0.0-20250417124945-06ef541f3fa3/go.mod h1:RoaXAWDwS90j6FxVKwJdBV+0HCU+llrKUGgJaxiKl6M= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= golang.org/x/term v0.31.0/go.mod h1:R4BeIy7D95HzImkxGkTW1UQTtP54tio2RyHz7PwK0aw= From 2337e7c3e52683a4cf72625544decfb74e6c80b9 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Thu, 17 Apr 2025 07:34:30 -0400 Subject: [PATCH 210/270] internal/refactor/inline: factor out free obj renaming Extract the part of inlineCall that renames the callee's free objects into a separate method. It factors out cleanly and takes a page-sized bite out of inlineCall. Change-Id: I75cc24e5822ba662a5f0f456f8637abca12eb354 Reviewed-on: https://go-review.googlesource.com/c/tools/+/666296 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- internal/refactor/inline/inline.go | 171 +++++++++++++++-------------- 1 file changed, 91 insertions(+), 80 deletions(-) diff --git a/internal/refactor/inline/inline.go b/internal/refactor/inline/inline.go index edd5d836613..0aaee5c7cb5 100644 --- a/internal/refactor/inline/inline.go +++ b/internal/refactor/inline/inline.go @@ -14,6 +14,7 @@ import ( "go/printer" "go/token" "go/types" + "maps" pathpkg "path" "reflect" "slices" @@ -26,7 +27,6 @@ import ( internalastutil "golang.org/x/tools/internal/astutil" "golang.org/x/tools/internal/typeparams" "golang.org/x/tools/internal/typesinternal" - "maps" ) // A Caller describes the function call and its enclosing context. @@ -691,84 +691,9 @@ func (st *state) inlineCall() (*inlineCallResult, error) { istate := newImportState(logf, caller, callee) // Compute the renaming of the callee's free identifiers. - objRenames := make([]ast.Expr, len(callee.FreeObjs)) // nil => no change - for i, obj := range callee.FreeObjs { - // obj is a free object of the callee. - // - // Possible cases are: - // - builtin function, type, or value (e.g. nil, zero) - // => check not shadowed in caller. - // - package-level var/func/const/types - // => same package: check not shadowed in caller. - // => otherwise: import other package, form a qualified identifier. - // (Unexported cross-package references were rejected already.) - // - type parameter - // => not yet supported - // - pkgname - // => import other package and use its local name. - // - // There can be no free references to labels, fields, or methods. - - // Note that we must consider potential shadowing both - // at the caller side (caller.lookup) and, when - // choosing new PkgNames, within the callee (obj.shadow). - - var newName ast.Expr - if obj.Kind == "pkgname" { - // Use locally appropriate import, creating as needed. - n := istate.localName(obj.PkgPath, obj.PkgName, obj.Shadow) - newName = makeIdent(n) // imported package - } else if !obj.ValidPos { - // Built-in function, type, or value (e.g. nil, zero): - // check not shadowed at caller. - found := caller.lookup(obj.Name) // always finds something - if found.Pos().IsValid() { - return nil, fmt.Errorf("cannot inline, because the callee refers to built-in %q, which in the caller is shadowed by a %s (declared at line %d)", - obj.Name, objectKind(found), - caller.Fset.PositionFor(found.Pos(), false).Line) - } - - } else { - // Must be reference to package-level var/func/const/type, - // since type parameters are not yet supported. - qualify := false - if obj.PkgPath == callee.PkgPath { - // reference within callee package - if samePkg { - // Caller and callee are in same package. - // Check caller has not shadowed the decl. - // - // This may fail if the callee is "fake", such as for signature - // refactoring where the callee is modified to be a trivial wrapper - // around the refactored signature. - found := caller.lookup(obj.Name) - if found != nil && !isPkgLevel(found) { - return nil, fmt.Errorf("cannot inline, because the callee refers to %s %q, which in the caller is shadowed by a %s (declared at line %d)", - obj.Kind, obj.Name, - objectKind(found), - caller.Fset.PositionFor(found.Pos(), false).Line) - } - } else { - // Cross-package reference. - qualify = true - } - } else { - // Reference to a package-level declaration - // in another package, without a qualified identifier: - // it must be a dot import. - qualify = true - } - - // Form a qualified identifier, pkg.Name. - if qualify { - pkgName := istate.localName(obj.PkgPath, obj.PkgName, obj.Shadow) - newName = &ast.SelectorExpr{ - X: makeIdent(pkgName), - Sel: makeIdent(obj.Name), - } - } - } - objRenames[i] = newName + objRenames, err := st.renameFreeObjs(istate) + if err != nil { + return nil, err } res := &inlineCallResult{ @@ -1353,6 +1278,93 @@ func (st *state) inlineCall() (*inlineCallResult, error) { return res, nil } +// renameFreeObjs computes the renaming of the callee's free identifiers. +// It returns a slice of names (identifiers or selector expressions) corresponding +// to the callee's free objects (gobCallee.FreeObjs). +func (st *state) renameFreeObjs(istate *importState) ([]ast.Expr, error) { + caller, callee := st.caller, &st.callee.impl + objRenames := make([]ast.Expr, len(callee.FreeObjs)) // nil => no change + for i, obj := range callee.FreeObjs { + // obj is a free object of the callee. + // + // Possible cases are: + // - builtin function, type, or value (e.g. nil, zero) + // => check not shadowed in caller. + // - package-level var/func/const/types + // => same package: check not shadowed in caller. + // => otherwise: import other package, form a qualified identifier. + // (Unexported cross-package references were rejected already.) + // - type parameter + // => not yet supported + // - pkgname + // => import other package and use its local name. + // + // There can be no free references to labels, fields, or methods. + + // Note that we must consider potential shadowing both + // at the caller side (caller.lookup) and, when + // choosing new PkgNames, within the callee (obj.shadow). + + var newName ast.Expr + if obj.Kind == "pkgname" { + // Use locally appropriate import, creating as needed. + n := istate.localName(obj.PkgPath, obj.PkgName, obj.Shadow) + newName = makeIdent(n) // imported package + } else if !obj.ValidPos { + // Built-in function, type, or value (e.g. nil, zero): + // check not shadowed at caller. + found := caller.lookup(obj.Name) // always finds something + if found.Pos().IsValid() { + return nil, fmt.Errorf("cannot inline, because the callee refers to built-in %q, which in the caller is shadowed by a %s (declared at line %d)", + obj.Name, objectKind(found), + caller.Fset.PositionFor(found.Pos(), false).Line) + } + + } else { + // Must be reference to package-level var/func/const/type, + // since type parameters are not yet supported. + qualify := false + if obj.PkgPath == callee.PkgPath { + // reference within callee package + if caller.Types.Path() == callee.PkgPath { + // Caller and callee are in same package. + // Check caller has not shadowed the decl. + // + // This may fail if the callee is "fake", such as for signature + // refactoring where the callee is modified to be a trivial wrapper + // around the refactored signature. + found := caller.lookup(obj.Name) + if found != nil && !isPkgLevel(found) { + return nil, fmt.Errorf("cannot inline, because the callee refers to %s %q, which in the caller is shadowed by a %s (declared at line %d)", + obj.Kind, obj.Name, + objectKind(found), + caller.Fset.PositionFor(found.Pos(), false).Line) + } + } else { + // Cross-package reference. + qualify = true + } + } else { + // Reference to a package-level declaration + // in another package, without a qualified identifier: + // it must be a dot import. + qualify = true + } + + // Form a qualified identifier, pkg.Name. + if qualify { + pkgName := istate.localName(obj.PkgPath, obj.PkgName, obj.Shadow) + newName = &ast.SelectorExpr{ + X: makeIdent(pkgName), + Sel: makeIdent(obj.Name), + } + } + } + objRenames[i] = newName + } + return objRenames, nil +} + type argument struct { expr ast.Expr typ types.Type // may be tuple for sole non-receiver arg in spread call @@ -2562,7 +2574,6 @@ func pure(info *types.Info, assign1 func(*types.Var) bool, e ast.Expr) bool { case *ast.SelectorExpr: if seln, ok := info.Selections[e]; ok { - // See types.SelectionKind for background. switch seln.Kind() { case types.MethodExpr: From 6f344f96c53eb66b91fac105c020e643c9d165ed Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Thu, 17 Apr 2025 08:01:21 -0400 Subject: [PATCH 211/270] internal/refactor/inline: add test for type param shadowing Add a test case verifying that the inliner errors if a type parameter shadows an inlined name. Change-Id: I34e9f61e7d6590c846f93b34e79b2c8bfbd44c0d Reviewed-on: https://go-review.googlesource.com/c/tools/+/666297 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- .../refactor/inline/testdata/err-shadow-builtin.txtar | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/internal/refactor/inline/testdata/err-shadow-builtin.txtar b/internal/refactor/inline/testdata/err-shadow-builtin.txtar index 34ea586ab3e..520cda5d4e7 100644 --- a/internal/refactor/inline/testdata/err-shadow-builtin.txtar +++ b/internal/refactor/inline/testdata/err-shadow-builtin.txtar @@ -3,7 +3,7 @@ is shadowed by caller. -- go.mod -- module testdata -go 1.12 +go 1.18 -- a/nil.go -- package a @@ -15,6 +15,13 @@ func _() { func f() *int { return nil } +-- a/nil-typename.go -- +package a + +func _[nil any]() { + _ = f() //@ inline(re"f", re"nil.*shadowed.*by.*typename.*line 3") +} + -- a/append.go -- package a From 035d8c68bd2486af116aa52b2b1a49d25b6a31ec Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Thu, 17 Apr 2025 13:32:24 -0400 Subject: [PATCH 212/270] gopls/internal/golang: Rename all receivers only at declaration This change modifies the behavior of the new "rename receiver" operation, which affects the receivers of all methods of the same type: now, the broader renaming occurs only when the renaming is requested at the declaration of the receiver. Renaming a use of a receiver affects only that variable. + test, doc, relnote Fixes golang/go#41892 Change-Id: I54c771f51da90b8b2a25801435e81875639ab459 Reviewed-on: https://go-review.googlesource.com/c/tools/+/666455 LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley Auto-Submit: Alan Donovan --- gopls/doc/features/transformation.md | 20 ++++++++--- gopls/doc/release/v0.19.0.md | 22 +++++++++--- gopls/internal/golang/rename.go | 36 ++++++++++--------- .../test/marker/testdata/rename/recv.txt | 12 +++++-- 4 files changed, 62 insertions(+), 28 deletions(-) diff --git a/gopls/doc/features/transformation.md b/gopls/doc/features/transformation.md index b080a842996..1d7c0fa14be 100644 --- a/gopls/doc/features/transformation.md +++ b/gopls/doc/features/transformation.md @@ -317,10 +317,22 @@ judgment and testing. Special cases: -- When renaming the receiver of a method, the tool also attempts to - rename the receivers of all other methods associated with the same - named type. Each other receiver that cannot be fully renamed is - quietly skipped. +- When renaming the declaration of a method receiver, the tool also + attempts to rename the receivers of all other methods associated + with the same named type. Each other receiver that cannot be fully + renamed is quietly skipped. Renaming any _use_ of a receiver affects + only that variable. + + ```go + type Counter struct { x int } + + Rename here to affect only this method + ↓ + func (c *Counter) Inc() { c.x++ } + func (c *Counter) Dec() { c.x++ } + ↑ + Rename here to affect all methods + ``` - Renaming a package declaration additionally causes the package's directory to be renamed. diff --git a/gopls/doc/release/v0.19.0.md b/gopls/doc/release/v0.19.0.md index cf3b47067b0..1536b036f3e 100644 --- a/gopls/doc/release/v0.19.0.md +++ b/gopls/doc/release/v0.19.0.md @@ -9,10 +9,24 @@ ## "Rename" of method receivers -The Rename operation, when applied to the receiver of a method, now -also attempts to rename the receivers of all other methods associated -with the same named type. Each other receiver that cannot be fully -renamed is quietly skipped. +The Rename operation, when applied to the declaration of a method +receiver, now also attempts to rename the receivers of all other +methods associated with the same named type. Each other receiver that +cannot be fully renamed is quietly skipped. + +Renaming a _use_ of a method receiver continues to affect only that +variable. + +```go +type Counter struct { x int } + + Rename here to affect only this method + ↓ +func (c *Counter) Inc() { c.x++ } +func (c *Counter) Dec() { c.x++ } + ↑ + Rename here to affect all methods +``` ## "Implementations" supports signature types diff --git a/gopls/internal/golang/rename.go b/gopls/internal/golang/rename.go index 6e705350266..c5910f7872c 100644 --- a/gopls/internal/golang/rename.go +++ b/gopls/internal/golang/rename.go @@ -585,24 +585,26 @@ func renameOrdinary(ctx context.Context, snapshot *cache.Snapshot, f file.Handle return nil, err } - // If target is a receiver, also rename receivers of - // other methods of the same type that don't already - // have the target name. Quietly discard edits from - // any that can't be renamed. + // If the selected identifier is a receiver declaration, + // also rename receivers of other methods of the same type + // that don't already have the desired name. + // Quietly discard edits from any that can't be renamed. // - // TODO(adonovan): UX question: require that the - // selection be the declaration of the receiver before - // we broaden the renaming? - if curDecl, ok := moreiters.First(cur.Enclosing((*ast.FuncDecl)(nil))); ok { - decl := curDecl.Node().(*ast.FuncDecl) // enclosing func - if decl.Recv != nil && - len(decl.Recv.List) > 0 && - len(decl.Recv.List[0].Names) > 0 { - recv := pkg.TypesInfo().Defs[decl.Recv.List[0].Names[0]] - if recv == obj { - // TODO(adonovan): simplify the above 7 lines to - // to "if obj.(*Var).Kind==Recv" in go1.25. - renameReceivers(pkg, recv.(*types.Var), newName, editMap) + // We interpret renaming the receiver declaration as + // intent for the broader renaming; renaming a use of + // the receiver effects only the local renaming. + if id, ok := cur.Node().(*ast.Ident); ok && id.Pos() == obj.Pos() { + if curDecl, ok := moreiters.First(cur.Enclosing((*ast.FuncDecl)(nil))); ok { + decl := curDecl.Node().(*ast.FuncDecl) // enclosing func + if decl.Recv != nil && + len(decl.Recv.List) > 0 && + len(decl.Recv.List[0].Names) > 0 { + recv := pkg.TypesInfo().Defs[decl.Recv.List[0].Names[0]] + if recv == obj { + // TODO(adonovan): simplify the above 7 lines to + // to "if obj.(*Var).Kind==Recv" in go1.25. + renameReceivers(pkg, recv.(*types.Var), newName, editMap) + } } } } diff --git a/gopls/internal/test/marker/testdata/rename/recv.txt b/gopls/internal/test/marker/testdata/rename/recv.txt index 73c9c34d381..f82572a81c3 100644 --- a/gopls/internal/test/marker/testdata/rename/recv.txt +++ b/gopls/internal/test/marker/testdata/rename/recv.txt @@ -5,6 +5,8 @@ Notes: that renaming is quietly skipped. - various combinations of named, aliases, and pointers are tested. - package b exercises generics. +- renaming a receiver declaration causes the broader renaming; + renaming a receiver use (see vrefz) effects only a local renaming. -- a/a.go -- package a @@ -16,7 +18,7 @@ func (T) F() {} func (t T) G() {} //@rename("t", "x", tx) func (U T) H() {} //@rename("U", "v", Uv) func (_ T) I() {} -func (v A) J() { print(v) } +func (v A) J() { print(-v) } //@rename(re"-(v)", "z", vrefz) func (w *T) K() {} func (x *A) L() {} //@rename("x", "print", xprint) @@ -27,9 +29,9 @@ func (x *A) L() {} //@rename("x", "print", xprint) +func (x T) G() {} //@rename("t", "x", tx) +func (x T) H() {} //@rename("U", "v", Uv) @@ -10,2 +10,2 @@ --func (v A) J() { print(v) } +-func (v A) J() { print(-v) } //@rename(re"-(v)", "z", vrefz) -func (w *T) K() {} -+func (x A) J() { print(x) } ++func (x A) J() { print(-x) } //@rename(re"-(v)", "z", vrefz) +func (x *T) K() {} -- @Uv/a/a.go -- @@ -7,2 +7,2 @@ @@ -53,6 +55,10 @@ func (x *A) L() {} //@rename("x", "print", xprint) -func (x *A) L() {} //@rename("x", "print", xprint) +func (print *T) K() {} +func (print *A) L() {} //@rename("x", "print", xprint) +-- @vrefz/a/a.go -- +@@ -10 +10 @@ +-func (v A) J() { print(-v) } //@rename(re"-(v)", "z", vrefz) ++func (z A) J() { print(-z) } //@rename(re"-(v)", "z", vrefz) -- b/b.go -- package b From a318c19ff2fd8d6aae74e36fe7e1a8b8afef3bf7 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Thu, 17 Apr 2025 14:50:40 -0400 Subject: [PATCH 213/270] internal/refactor/inline: test type params shadowing pkg symbols Add a test showing that the inliner errors when a type parameter shadows a package-level name. Change-Id: I91133aadca4ab866eb77dee901ce175a8d2a1bd5 Reviewed-on: https://go-review.googlesource.com/c/tools/+/666495 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- internal/refactor/inline/testdata/err-shadow-pkg.txtar | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/internal/refactor/inline/testdata/err-shadow-pkg.txtar b/internal/refactor/inline/testdata/err-shadow-pkg.txtar index 792418dd453..a55b026abdc 100644 --- a/internal/refactor/inline/testdata/err-shadow-pkg.txtar +++ b/internal/refactor/inline/testdata/err-shadow-pkg.txtar @@ -7,7 +7,7 @@ to f is within the scope of the local constant v. -- go.mod -- module testdata -go 1.12 +go 1.18 -- a/a.go -- package a @@ -18,6 +18,10 @@ func _() { f() //@ inline(re"f", re"v.*shadowed.*by.*const.*line 5") } +func _[v any]() { + f() //@ inline(re"f", re"v.*shadowed.*by.*typename.*line 9") +} + func f() int { return v } var v int @@ -31,6 +35,10 @@ func _() { f() //@ inline(re"f", re"v.*shadowed.*by.*const.*line 5") } +func _[v any]() { + f() //@ inline(re"f", re"v.*shadowed.*by.*typename.*line 9") +} + func f() int { return v } var v int From 48422addd9c4ef2cfdcfa553cc8b2c88166fa2c6 Mon Sep 17 00:00:00 2001 From: Madeline Kalil Date: Wed, 2 Apr 2025 11:24:23 -0400 Subject: [PATCH 214/270] gopls/internal/golang: add embedded struct info to hover Modifies signature information when hovering over an embedded struct field. For example, if we have: ``` type A struct { *B } type B struct { *C } type C struct { D int } var a A var _ = a.D // hover over "D" ``` This would produce a hover signature of `field D int // through *B, *C` Fixes golang/go#73016 Change-Id: I4b08815d93852ea29a24dffc88f102f472682d85 Reviewed-on: https://go-review.googlesource.com/c/tools/+/662275 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- gopls/internal/golang/hover.go | 44 ++++++++++-- .../test/marker/testdata/definition/embed.txt | 8 +-- .../test/marker/testdata/hover/embed.txt | 72 +++++++++++++++++++ .../test/marker/testdata/hover/linkable.txt | 2 +- 4 files changed, 117 insertions(+), 9 deletions(-) diff --git a/gopls/internal/golang/hover.go b/gopls/internal/golang/hover.go index d707c202a1c..43cc68ff8b2 100644 --- a/gopls/internal/golang/hover.go +++ b/gopls/internal/golang/hover.go @@ -287,6 +287,10 @@ func hover(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp pro } } + // By convention, we qualify hover information relative to the package + // from which the request originated. + qual := typesinternal.FileQualifier(pgf.File, pkg.Types()) + // Handle hover over identifier. // The general case: compute hover information for the object referenced by @@ -305,10 +309,6 @@ func hover(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp pro hoverRange = &rng } - // By convention, we qualify hover information relative to the package - // from which the request originated. - qual := typesinternal.FileQualifier(pgf.File, pkg.Types()) - // Handle type switch identifiers as a special case, since they don't have an // object. // @@ -344,6 +344,42 @@ func hover(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp pro // By default, types.ObjectString provides a reasonable signature. signature := objectString(obj, qual, declPos, declPGF.Tok, spec) + + // When hovering over a reference to a promoted struct field, + // show the implicitly selected intervening fields. + cur, ok := pgf.Cursor.FindByPos(pos, pos) + if !ok { + return protocol.Range{}, nil, fmt.Errorf("Invalid hover position, failed to get cursor") + } + if obj, ok := obj.(*types.Var); ok && obj.IsField() { + if selExpr, ok := cur.Parent().Node().(*ast.SelectorExpr); ok { + sel := pkg.TypesInfo().Selections[selExpr] + if len(sel.Index()) > 1 { + var buf bytes.Buffer + buf.WriteString(" // through ") + t := typesinternal.Unpointer(sel.Recv()) + for i, index := range sel.Index()[:len(sel.Index())-1] { + if i > 0 { + buf.WriteString(", ") + } + field := typesinternal.Unpointer(t.Underlying()).(*types.Struct).Field(index) + t = field.Type() + // Inv: fieldType is N or *N for some NamedOrAlias type N. + if ptr, ok := t.(*types.Pointer); ok { + buf.WriteString("*") + t = ptr.Elem() + } + // Be defensive in case of ill-typed code: + if named, ok := t.(typesinternal.NamedOrAlias); ok { + buf.WriteString(named.Obj().Name()) + } + } + // Update signature to include embedded struct info. + signature += buf.String() + } + } + } + singleLineSignature := signature // Display struct tag for struct fields at the end of the signature. diff --git a/gopls/internal/test/marker/testdata/definition/embed.txt b/gopls/internal/test/marker/testdata/definition/embed.txt index 5a29b31708f..da55dbc3c39 100644 --- a/gopls/internal/test/marker/testdata/definition/embed.txt +++ b/gopls/internal/test/marker/testdata/definition/embed.txt @@ -120,7 +120,7 @@ func (a.A) Hi() [`(a.A).Hi` on pkg.go.dev](https://pkg.go.dev/mod.com/a#A.Hi) -- @F -- ```go -field F int +field F int // through embed ``` --- @@ -180,7 +180,7 @@ func (embed) M() [`(b.Embed).M` on pkg.go.dev](https://pkg.go.dev/mod.com/b#Embed.M) -- @RField2 -- ```go -field Field2 int +field Field2 int // through S, R ``` --- @@ -278,7 +278,7 @@ field F1 string [`(b.S2).F1` on pkg.go.dev](https://pkg.go.dev/mod.com/b#S2.F1) -- @S2F2 -- ```go -field F2 int +field F2 int // through S2 ``` --- @@ -291,7 +291,7 @@ field F2 int [`(b.S2).F2` on pkg.go.dev](https://pkg.go.dev/mod.com/b#S2.F2) -- @SField -- ```go -field Field int +field Field int // through S ``` --- diff --git a/gopls/internal/test/marker/testdata/hover/embed.txt b/gopls/internal/test/marker/testdata/hover/embed.txt index 2abc25bfcad..1e359882c0c 100644 --- a/gopls/internal/test/marker/testdata/hover/embed.txt +++ b/gopls/internal/test/marker/testdata/hover/embed.txt @@ -36,6 +36,54 @@ var p P //@hover("P", "P", P) var _ = P.m + +type A struct { + *B +} + +type B struct { + *C +} + +type C struct { + *D +} + +type D struct { + E int +} + +type X struct{ + *Y +} + +type Y struct { + *Z +} + +type Z struct{ + z int +} + +var a A +var _ = a.E //@hover("E", "E", E) + +var x struct { + *X +} +var _ = x.z //@hover("z", "z", Z) + +type Al2 = int +type N struct{ + x Al2 + y struct{ ZA } +} +type Al = *N +type S struct{ Al } +type ZA = *Z +var _ = new(S).x //@hover("x", "x", X) +var _ = new(S).y.z //@hover("z", "z", Zz), hover("y", "y", y) + -- @P -- ```go type P struct { @@ -61,3 +109,27 @@ func (P) m() --- [`p.P` on pkg.go.dev](https://pkg.go.dev/example.com#P) +-- @E -- +```go +field E int // through *B, *C, *D +``` + +--- + +[`(p.D).E` on pkg.go.dev](https://pkg.go.dev/example.com#D.E) +-- @Z -- +```go +field z int // through *X, *Y, *Z +``` +-- @X -- +```go +field x Al2 // through Al +``` +-- @Zz -- +```go +field z int // through ZA +``` +-- @y -- +```go +field y struct{ZA} // through Al +``` diff --git a/gopls/internal/test/marker/testdata/hover/linkable.txt b/gopls/internal/test/marker/testdata/hover/linkable.txt index e5d2efe8480..888848a8d89 100644 --- a/gopls/internal/test/marker/testdata/hover/linkable.txt +++ b/gopls/internal/test/marker/testdata/hover/linkable.txt @@ -59,7 +59,7 @@ func _() { } -- @Embed -- ```go -field Embed int64 +field Embed int64 // through E ``` --- From bacd4ba3666bbac3f6d08bede00fdcb2f5cbaacf Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Wed, 16 Apr 2025 16:15:07 -0400 Subject: [PATCH 215/270] go/analysis/passes/gofix: add an example to doc Change-Id: I76febc19b0870d706675d2bc25bab1ad33ce197e Reviewed-on: https://go-review.googlesource.com/c/tools/+/666055 Auto-Submit: Alan Donovan Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI Commit-Queue: Alan Donovan --- go/analysis/passes/gofix/doc.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/go/analysis/passes/gofix/doc.go b/go/analysis/passes/gofix/doc.go index 683bac9cb48..cb66e83fae1 100644 --- a/go/analysis/passes/gofix/doc.go +++ b/go/analysis/passes/gofix/doc.go @@ -23,7 +23,8 @@ named constant. const ( a = 1 // error b = iota // error - c = math.Pi // OK + c = a // OK + d = math.Pi // OK ) - A type definition can be marked for inlining only if it is an alias. From d0ead435cfe7f51330edf44f57aac1f5d82a223b Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Wed, 16 Apr 2025 15:39:32 -0400 Subject: [PATCH 216/270] gopls/internal/settings: enable most staticcheck analyzers This CL changes the interpretation of the "staticcheck" option: - true means (as before) run all staticcheck analyzers (except those strictly redundant with existing ones; - false means (as before) run no staticcheck analyzers; - unset means run a hand-picked subset (~50%) of analyzers that are expected to be fast, precise, and useful. We expect the subset to evolve based on experience. Notes: - the "staticcheck" option is now effectively a tri-state; this is represented in Options as pair of booleans. - The policy of whether an option is enabled (based on gopls defaults, honnef.co defaults, name-based options, or "staticcheck" options) is now centralized and encapsulated within Analyzer.Enabled. - AllAnalyzers is the joint list of all possible analyzers. The global vars are now initialized at construction. + test, relnote, doc Fixes golang/go#71038 Change-Id: I6e4aac49edaf4467da00cea87c6f8cd9639222c6 Reviewed-on: https://go-review.googlesource.com/c/tools/+/666095 LUCI-TryBot-Result: Go LUCI Commit-Queue: Alan Donovan Reviewed-by: Robert Findley Auto-Submit: Alan Donovan --- gopls/doc/analyzers.md | 3202 ++++++++++++++++- gopls/doc/features/diagnostics.md | 8 +- gopls/doc/release/v0.19.0.md | 20 + gopls/doc/settings.md | 20 +- gopls/internal/cache/analysis.go | 21 +- gopls/internal/cache/cache.go | 2 +- gopls/internal/doc/api.json | 2740 +++++++++++--- gopls/internal/doc/generate/generate.go | 17 +- gopls/internal/settings/analysis.go | 236 +- gopls/internal/settings/settings.go | 19 +- gopls/internal/settings/staticcheck.go | 429 ++- .../functionextraction_issue66289.txt | 4 +- .../marker/testdata/definition/branch.txt | 8 + .../marker/testdata/diagnostics/analyzers.txt | 17 + .../marker/testdata/highlight/switchbreak.txt | 8 + 15 files changed, 6147 insertions(+), 604 deletions(-) diff --git a/gopls/doc/analyzers.md b/gopls/doc/analyzers.md index 6e9ee81058c..dfca652d426 100644 --- a/gopls/doc/analyzers.md +++ b/gopls/doc/analyzers.md @@ -22,15 +22,3207 @@ which aggregates analyzers from a variety of sources: To enable or disable analyzers, use the [analyses](settings.md#analyses) setting. -In addition, gopls includes the [`staticcheck` suite](https://staticcheck.dev/docs/checks), -though these analyzers are off by default. -Use the [`staticcheck`](settings.md#staticcheck`) setting to enable them, -and consult staticcheck's documentation for analyzer details. +In addition, gopls includes the [`staticcheck` suite](https://staticcheck.dev/docs/checks). +When the [`staticcheck`](settings.md#staticcheck`) boolean option is +unset, slightly more than half of these analyzers are enabled by +default; this subset has been chosen for precision and efficiency. Set +`staticcheck` to `true` to enable the complete set, or to `false` to +disable the complete set. - +Staticcheck analyzers, like all other analyzers, can be explicitly +enabled or disabled using the `analyzers` configuration setting; this +setting takes precedence over the `staticcheck` setting, so, +regardless of what value of `staticcheck` you use (true/false/unset), +you can make adjustments to your preferred set of analyzers. + +## `QF1001`: Apply De Morgan's law + + +Available since + 2021.1 + + +Default: off. Enable by setting `"analyses": {"QF1001": true}`. + +Package documentation: [QF1001](https://staticcheck.dev/docs/checks/#QF1001) + + +## `QF1002`: Convert untagged switch to tagged switch + + +An untagged switch that compares a single variable against a series of +values can be replaced with a tagged switch. + +Before: + + switch { + case x == 1 || x == 2, x == 3: + ... + case x == 4: + ... + default: + ... + } + +After: + + switch x { + case 1, 2, 3: + ... + case 4: + ... + default: + ... + } + +Available since + 2021.1 + + +Default: on. + +Package documentation: [QF1002](https://staticcheck.dev/docs/checks/#QF1002) + + +## `QF1003`: Convert if/else-if chain to tagged switch + + +A series of if/else-if checks comparing the same variable against +values can be replaced with a tagged switch. + +Before: + + if x == 1 || x == 2 { + ... + } else if x == 3 { + ... + } else { + ... + } + +After: + + switch x { + case 1, 2: + ... + case 3: + ... + default: + ... + } + +Available since + 2021.1 + + +Default: on. + +Package documentation: [QF1003](https://staticcheck.dev/docs/checks/#QF1003) + + +## `QF1004`: Use strings.ReplaceAll instead of strings.Replace with n == -1 + + +Available since + 2021.1 + + +Default: on. + +Package documentation: [QF1004](https://staticcheck.dev/docs/checks/#QF1004) + + +## `QF1005`: Expand call to math.Pow + + +Some uses of math.Pow can be simplified to basic multiplication. + +Before: + + math.Pow(x, 2) + +After: + + x * x + +Available since + 2021.1 + + +Default: off. Enable by setting `"analyses": {"QF1005": true}`. + +Package documentation: [QF1005](https://staticcheck.dev/docs/checks/#QF1005) + + +## `QF1006`: Lift if+break into loop condition + + +Before: + + for { + if done { + break + } + ... + } + +After: + + for !done { + ... + } + +Available since + 2021.1 + + +Default: off. Enable by setting `"analyses": {"QF1006": true}`. + +Package documentation: [QF1006](https://staticcheck.dev/docs/checks/#QF1006) + + +## `QF1007`: Merge conditional assignment into variable declaration + + +Before: + + x := false + if someCondition { + x = true + } + +After: + + x := someCondition + +Available since + 2021.1 + + +Default: off. Enable by setting `"analyses": {"QF1007": true}`. + +Package documentation: [QF1007](https://staticcheck.dev/docs/checks/#QF1007) + + +## `QF1008`: Omit embedded fields from selector expression + + +Available since + 2021.1 + + +Default: off. Enable by setting `"analyses": {"QF1008": true}`. + +Package documentation: [QF1008](https://staticcheck.dev/docs/checks/#QF1008) + + +## `QF1009`: Use time.Time.Equal instead of == operator + + +Available since + 2021.1 + + +Default: on. + +Package documentation: [QF1009](https://staticcheck.dev/docs/checks/#QF1009) + + +## `QF1010`: Convert slice of bytes to string when printing it + + +Available since + 2021.1 + + +Default: on. + +Package documentation: [QF1010](https://staticcheck.dev/docs/checks/#QF1010) + + +## `QF1011`: Omit redundant type from variable declaration + + +Available since + 2021.1 + + +Default: off. Enable by setting `"analyses": {"QF1011": true}`. + +Package documentation: [QF1011](https://staticcheck.dev/docs/checks/#) + + +## `QF1012`: Use fmt.Fprintf(x, ...) instead of x.Write(fmt.Sprintf(...)) + + +Available since + 2022.1 + + +Default: on. + +Package documentation: [QF1012](https://staticcheck.dev/docs/checks/#QF1012) + + +## `S1000`: Use plain channel send or receive instead of single-case select + + +Select statements with a single case can be replaced with a simple +send or receive. + +Before: + + select { + case x := <-ch: + fmt.Println(x) + } + +After: + + x := <-ch + fmt.Println(x) + +Available since + 2017.1 + + +Default: on. + +Package documentation: [S1000](https://staticcheck.dev/docs/checks/#S1000) + + +## `S1001`: Replace for loop with call to copy + + +Use copy() for copying elements from one slice to another. For +arrays of identical size, you can use simple assignment. + +Before: + + for i, x := range src { + dst[i] = x + } + +After: + + copy(dst, src) + +Available since + 2017.1 + + +Default: on. + +Package documentation: [S1001](https://staticcheck.dev/docs/checks/#S1001) + + +## `S1002`: Omit comparison with boolean constant + + +Before: + + if x == true {} + +After: + + if x {} + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"S1002": true}`. + +Package documentation: [S1002](https://staticcheck.dev/docs/checks/#S1002) + + +## `S1003`: Replace call to strings.Index with strings.Contains + + +Before: + + if strings.Index(x, y) != -1 {} + +After: + + if strings.Contains(x, y) {} + +Available since + 2017.1 + + +Default: on. + +Package documentation: [S1003](https://staticcheck.dev/docs/checks/#S1003) + + +## `S1004`: Replace call to bytes.Compare with bytes.Equal + + +Before: + + if bytes.Compare(x, y) == 0 {} + +After: + + if bytes.Equal(x, y) {} + +Available since + 2017.1 + + +Default: on. + +Package documentation: [S1004](https://staticcheck.dev/docs/checks/#S1004) + + +## `S1005`: Drop unnecessary use of the blank identifier + + +In many cases, assigning to the blank identifier is unnecessary. + +Before: + + for _ = range s {} + x, _ = someMap[key] + _ = <-ch + +After: + + for range s{} + x = someMap[key] + <-ch + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"S1005": true}`. + +Package documentation: [S1005](https://staticcheck.dev/docs/checks/#S1005) + + +## `S1006`: Use 'for { ... }' for infinite loops + + +For infinite loops, using for { ... } is the most idiomatic choice. + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"S1006": true}`. + +Package documentation: [S1006](https://staticcheck.dev/docs/checks/#S1006) + + +## `S1007`: Simplify regular expression by using raw string literal + + +Raw string literals use backticks instead of quotation marks and do not support +any escape sequences. This means that the backslash can be used +freely, without the need of escaping. + +Since regular expressions have their own escape sequences, raw strings +can improve their readability. + +Before: + + regexp.Compile("\\A(\\w+) profile: total \\d+\\n\\z") + +After: + + regexp.Compile(`\A(\w+) profile: total \d+\n\z`) + +Available since + 2017.1 + + +Default: on. + +Package documentation: [S1007](https://staticcheck.dev/docs/checks/#S1007) + + +## `S1008`: Simplify returning boolean expression + + +Before: + + if { + return true + } + return false + +After: + + return + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"S1008": true}`. + +Package documentation: [S1008](https://staticcheck.dev/docs/checks/#S1008) + + +## `S1009`: Omit redundant nil check on slices, maps, and channels + + +The len function is defined for all slices, maps, and +channels, even nil ones, which have a length of zero. It is not necessary to +check for nil before checking that their length is not zero. + +Before: + + if x != nil && len(x) != 0 {} + +After: + + if len(x) != 0 {} + +Available since + 2017.1 + + +Default: on. + +Package documentation: [S1009](https://staticcheck.dev/docs/checks/#S1009) + + +## `S1010`: Omit default slice index + + +When slicing, the second index defaults to the length of the value, +making s[n:len(s)] and s[n:] equivalent. + +Available since + 2017.1 + + +Default: on. + +Package documentation: [S1010](https://staticcheck.dev/docs/checks/#S1010) + + +## `S1011`: Use a single append to concatenate two slices + + +Before: + + for _, e := range y { + x = append(x, e) + } + + for i := range y { + x = append(x, y[i]) + } + + for i := range y { + v := y[i] + x = append(x, v) + } + +After: + + x = append(x, y...) + x = append(x, y...) + x = append(x, y...) + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"S1011": true}`. + +Package documentation: [S1011](https://staticcheck.dev/docs/checks/#S1011) + + +## `S1012`: Replace time.Now().Sub(x) with time.Since(x) + + +The time.Since helper has the same effect as using time.Now().Sub(x) +but is easier to read. + +Before: + + time.Now().Sub(x) + +After: + + time.Since(x) + +Available since + 2017.1 + + +Default: on. + +Package documentation: [S1012](https://staticcheck.dev/docs/checks/#S1012) + + +## `S1016`: Use a type conversion instead of manually copying struct fields + + +Two struct types with identical fields can be converted between each +other. In older versions of Go, the fields had to have identical +struct tags. Since Go 1.8, however, struct tags are ignored during +conversions. It is thus not necessary to manually copy every field +individually. + +Before: + + var x T1 + y := T2{ + Field1: x.Field1, + Field2: x.Field2, + } + +After: + + var x T1 + y := T2(x) + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"S1016": true}`. + +Package documentation: [S1016](https://staticcheck.dev/docs/checks/#S1016) + + +## `S1017`: Replace manual trimming with strings.TrimPrefix + + +Instead of using strings.HasPrefix and manual slicing, use the +strings.TrimPrefix function. If the string doesn't start with the +prefix, the original string will be returned. Using strings.TrimPrefix +reduces complexity, and avoids common bugs, such as off-by-one +mistakes. + +Before: + + if strings.HasPrefix(str, prefix) { + str = str[len(prefix):] + } + +After: + + str = strings.TrimPrefix(str, prefix) + +Available since + 2017.1 + + +Default: on. + +Package documentation: [S1017](https://staticcheck.dev/docs/checks/#S1017) + + +## `S1018`: Use 'copy' for sliding elements + + +copy() permits using the same source and destination slice, even with +overlapping ranges. This makes it ideal for sliding elements in a +slice. + +Before: + + for i := 0; i < n; i++ { + bs[i] = bs[offset+i] + } + +After: + + copy(bs[:n], bs[offset:]) + +Available since + 2017.1 + + +Default: on. + +Package documentation: [S1018](https://staticcheck.dev/docs/checks/#S1018) + + +## `S1019`: Simplify 'make' call by omitting redundant arguments + + +The 'make' function has default values for the length and capacity +arguments. For channels, the length defaults to zero, and for slices, +the capacity defaults to the length. + +Available since + 2017.1 + + +Default: on. + +Package documentation: [S1019](https://staticcheck.dev/docs/checks/#S1019) + + +## `S1020`: Omit redundant nil check in type assertion + + +Before: + + if _, ok := i.(T); ok && i != nil {} + +After: + + if _, ok := i.(T); ok {} + +Available since + 2017.1 + + +Default: on. + +Package documentation: [S1020](https://staticcheck.dev/docs/checks/#S1020) + + +## `S1021`: Merge variable declaration and assignment + + +Before: + + var x uint + x = 1 + +After: + + var x uint = 1 + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"S1021": true}`. + +Package documentation: [S1021](https://staticcheck.dev/docs/checks/#S1021) + + +## `S1023`: Omit redundant control flow + + +Functions that have no return value do not need a return statement as +the final statement of the function. + +Switches in Go do not have automatic fallthrough, unlike languages +like C. It is not necessary to have a break statement as the final +statement in a case block. + +Available since + 2017.1 + + +Default: on. + +Package documentation: [S1023](https://staticcheck.dev/docs/checks/#S1023) + + +## `S1024`: Replace x.Sub(time.Now()) with time.Until(x) + + +The time.Until helper has the same effect as using x.Sub(time.Now()) +but is easier to read. + +Before: + + x.Sub(time.Now()) + +After: + + time.Until(x) + +Available since + 2017.1 + + +Default: on. + +Package documentation: [S1024](https://staticcheck.dev/docs/checks/#S1024) + + +## `S1025`: Don't use fmt.Sprintf("%s", x) unnecessarily + + +In many instances, there are easier and more efficient ways of getting +a value's string representation. Whenever a value's underlying type is +a string already, or the type has a String method, they should be used +directly. + +Given the following shared definitions + + type T1 string + type T2 int + + func (T2) String() string { return "Hello, world" } + + var x string + var y T1 + var z T2 + +we can simplify + + fmt.Sprintf("%s", x) + fmt.Sprintf("%s", y) + fmt.Sprintf("%s", z) + +to + + x + string(y) + z.String() + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"S1025": true}`. + +Package documentation: [S1025](https://staticcheck.dev/docs/checks/#S1025) + + +## `S1028`: Simplify error construction with fmt.Errorf + + +Before: + + errors.New(fmt.Sprintf(...)) + +After: + + fmt.Errorf(...) + +Available since + 2017.1 + + +Default: on. + +Package documentation: [S1028](https://staticcheck.dev/docs/checks/#S1028) + + +## `S1029`: Range over the string directly + + +Ranging over a string will yield byte offsets and runes. If the offset +isn't used, this is functionally equivalent to converting the string +to a slice of runes and ranging over that. Ranging directly over the +string will be more performant, however, as it avoids allocating a new +slice, the size of which depends on the length of the string. + +Before: + + for _, r := range []rune(s) {} + +After: + + for _, r := range s {} + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"S1029": true}`. + +Package documentation: [S1029](https://staticcheck.dev/docs/checks/#S1029) + + +## `S1030`: Use bytes.Buffer.String or bytes.Buffer.Bytes + + +bytes.Buffer has both a String and a Bytes method. It is almost never +necessary to use string(buf.Bytes()) or []byte(buf.String()) – simply +use the other method. + +The only exception to this are map lookups. Due to a compiler optimization, +m[string(buf.Bytes())] is more efficient than m[buf.String()]. + +Available since + 2017.1 + + +Default: on. + +Package documentation: [S1030](https://staticcheck.dev/docs/checks/#S1030) + + +## `S1031`: Omit redundant nil check around loop + + +You can use range on nil slices and maps, the loop will simply never +execute. This makes an additional nil check around the loop +unnecessary. + +Before: + + if s != nil { + for _, x := range s { + ... + } + } + +After: + + for _, x := range s { + ... + } + +Available since + 2017.1 + + +Default: on. + +Package documentation: [S1031](https://staticcheck.dev/docs/checks/#S1031) + + +## `S1032`: Use sort.Ints(x), sort.Float64s(x), and sort.Strings(x) + + +The sort.Ints, sort.Float64s and sort.Strings functions are easier to +read than sort.Sort(sort.IntSlice(x)), sort.Sort(sort.Float64Slice(x)) +and sort.Sort(sort.StringSlice(x)). + +Before: + + sort.Sort(sort.StringSlice(x)) + +After: + + sort.Strings(x) + +Available since + 2019.1 + + +Default: on. + +Package documentation: [S1032](https://staticcheck.dev/docs/checks/#S1032) + + +## `S1033`: Unnecessary guard around call to 'delete' + + +Calling delete on a nil map is a no-op. + +Available since + 2019.2 + + +Default: on. + +Package documentation: [S1033](https://staticcheck.dev/docs/checks/#S1033) + + +## `S1034`: Use result of type assertion to simplify cases + + +Available since + 2019.2 + + +Default: on. + +Package documentation: [S1034](https://staticcheck.dev/docs/checks/#S1034) + + +## `S1035`: Redundant call to net/http.CanonicalHeaderKey in method call on net/http.Header + + +The methods on net/http.Header, namely Add, Del, Get +and Set, already canonicalize the given header name. + +Available since + 2020.1 + + +Default: on. + +Package documentation: [S1035](https://staticcheck.dev/docs/checks/#S1035) + + +## `S1036`: Unnecessary guard around map access + + +When accessing a map key that doesn't exist yet, one receives a zero +value. Often, the zero value is a suitable value, for example when +using append or doing integer math. + +The following + + if _, ok := m["foo"]; ok { + m["foo"] = append(m["foo"], "bar") + } else { + m["foo"] = []string{"bar"} + } + +can be simplified to + + m["foo"] = append(m["foo"], "bar") + +and + + if _, ok := m2["k"]; ok { + m2["k"] += 4 + } else { + m2["k"] = 4 + } + +can be simplified to + + m["k"] += 4 + +Available since + 2020.1 + + +Default: on. + +Package documentation: [S1036](https://staticcheck.dev/docs/checks/#S1036) + + +## `S1037`: Elaborate way of sleeping + + +Using a select statement with a single case receiving +from the result of time.After is a very elaborate way of sleeping that +can much simpler be expressed with a simple call to time.Sleep. + +Available since + 2020.1 + + +Default: on. + +Package documentation: [S1037](https://staticcheck.dev/docs/checks/#S1037) + + +## `S1038`: Unnecessarily complex way of printing formatted string + + +Instead of using fmt.Print(fmt.Sprintf(...)), one can use fmt.Printf(...). + +Available since + 2020.1 + + +Default: on. + +Package documentation: [S1038](https://staticcheck.dev/docs/checks/#S1038) + + +## `S1039`: Unnecessary use of fmt.Sprint + + +Calling fmt.Sprint with a single string argument is unnecessary +and identical to using the string directly. + +Available since + 2020.1 + + +Default: on. + +Package documentation: [S1039](https://staticcheck.dev/docs/checks/#S1039) + + +## `S1040`: Type assertion to current type + + +The type assertion x.(SomeInterface), when x already has type +SomeInterface, can only fail if x is nil. Usually, this is +left-over code from when x had a different type and you can safely +delete the type assertion. If you want to check that x is not nil, +consider being explicit and using an actual if x == nil comparison +instead of relying on the type assertion panicking. + +Available since + 2021.1 + + +Default: on. + +Package documentation: [S1040](https://staticcheck.dev/docs/checks/#S1040) + + +## `SA1000`: Invalid regular expression + + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA1000": true}`. + +Package documentation: [SA1000](https://staticcheck.dev/docs/checks/#SA1000) + + +## `SA1001`: Invalid template + + +Available since + 2017.1 + + +Default: on. + +Package documentation: [SA1001](https://staticcheck.dev/docs/checks/#SA1001) + + +## `SA1002`: Invalid format in time.Parse + + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA1002": true}`. + +Package documentation: [SA1002](https://staticcheck.dev/docs/checks/#SA1002) + + +## `SA1003`: Unsupported argument to functions in encoding/binary + + +The encoding/binary package can only serialize types with known sizes. +This precludes the use of the int and uint types, as their sizes +differ on different architectures. Furthermore, it doesn't support +serializing maps, channels, strings, or functions. + +Before Go 1.8, bool wasn't supported, either. + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA1003": true}`. + +Package documentation: [SA1003](https://staticcheck.dev/docs/checks/#SA1003) + + +## `SA1004`: Suspiciously small untyped constant in time.Sleep + + +The time.Sleep function takes a time.Duration as its only argument. +Durations are expressed in nanoseconds. Thus, calling time.Sleep(1) +will sleep for 1 nanosecond. This is a common source of bugs, as sleep +functions in other languages often accept seconds or milliseconds. + +The time package provides constants such as time.Second to express +large durations. These can be combined with arithmetic to express +arbitrary durations, for example 5 * time.Second for 5 seconds. + +If you truly meant to sleep for a tiny amount of time, use +n * time.Nanosecond to signal to Staticcheck that you did mean to sleep +for some amount of nanoseconds. + +Available since + 2017.1 + + +Default: on. + +Package documentation: [SA1004](https://staticcheck.dev/docs/checks/#SA1004) + + +## `SA1005`: Invalid first argument to exec.Command + + +os/exec runs programs directly (using variants of the fork and exec +system calls on Unix systems). This shouldn't be confused with running +a command in a shell. The shell will allow for features such as input +redirection, pipes, and general scripting. The shell is also +responsible for splitting the user's input into a program name and its +arguments. For example, the equivalent to + + ls / /tmp + +would be + + exec.Command("ls", "/", "/tmp") + +If you want to run a command in a shell, consider using something like +the following – but be aware that not all systems, particularly +Windows, will have a /bin/sh program: + + exec.Command("/bin/sh", "-c", "ls | grep Awesome") + +Available since + 2017.1 + + +Default: on. + +Package documentation: [SA1005](https://staticcheck.dev/docs/checks/#SA1005) + + +## `SA1007`: Invalid URL in net/url.Parse + + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA1007": true}`. + +Package documentation: [SA1007](https://staticcheck.dev/docs/checks/#SA1007) + + +## `SA1008`: Non-canonical key in http.Header map + + +Keys in http.Header maps are canonical, meaning they follow a specific +combination of uppercase and lowercase letters. Methods such as +http.Header.Add and http.Header.Del convert inputs into this canonical +form before manipulating the map. + +When manipulating http.Header maps directly, as opposed to using the +provided methods, care should be taken to stick to canonical form in +order to avoid inconsistencies. The following piece of code +demonstrates one such inconsistency: + + h := http.Header{} + h["etag"] = []string{"1234"} + h.Add("etag", "5678") + fmt.Println(h) + + // Output: + // map[Etag:[5678] etag:[1234]] + +The easiest way of obtaining the canonical form of a key is to use +http.CanonicalHeaderKey. + +Available since + 2017.1 + + +Default: on. + +Package documentation: [SA1008](https://staticcheck.dev/docs/checks/#SA1008) + + +## `SA1010`: (*regexp.Regexp).FindAll called with n == 0, which will always return zero results + + +If n >= 0, the function returns at most n matches/submatches. To +return all results, specify a negative number. + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA1010": true}`. + +Package documentation: [SA1010](https://staticcheck.dev/docs/checks/#SA1010) + + +## `SA1011`: Various methods in the 'strings' package expect valid UTF-8, but invalid input is provided + + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA1011": true}`. + +Package documentation: [SA1011](https://staticcheck.dev/docs/checks/#SA1011) + + +## `SA1012`: A nil context.Context is being passed to a function, consider using context.TODO instead + + +Available since + 2017.1 + + +Default: on. + +Package documentation: [SA1012](https://staticcheck.dev/docs/checks/#SA1012) + + +## `SA1013`: io.Seeker.Seek is being called with the whence constant as the first argument, but it should be the second + + +Available since + 2017.1 + + +Default: on. + +Package documentation: [SA1013](https://staticcheck.dev/docs/checks/#SA1013) + + +## `SA1014`: Non-pointer value passed to Unmarshal or Decode + + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA1014": true}`. + +Package documentation: [SA1014](https://staticcheck.dev/docs/checks/#SA1014) + + +## `SA1015`: Using time.Tick in a way that will leak. Consider using time.NewTicker, and only use time.Tick in tests, commands and endless functions + + +Before Go 1.23, time.Tickers had to be closed to be able to be garbage +collected. Since time.Tick doesn't make it possible to close the underlying +ticker, using it repeatedly would leak memory. + +Go 1.23 fixes this by allowing tickers to be collected even if they weren't closed. + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA1015": true}`. + +Package documentation: [SA1015](https://staticcheck.dev/docs/checks/#SA1015) + + +## `SA1016`: Trapping a signal that cannot be trapped + + +Not all signals can be intercepted by a process. Specifically, on +UNIX-like systems, the syscall.SIGKILL and syscall.SIGSTOP signals are +never passed to the process, but instead handled directly by the +kernel. It is therefore pointless to try and handle these signals. + +Available since + 2017.1 + + +Default: on. + +Package documentation: [SA1016](https://staticcheck.dev/docs/checks/#SA1016) + + +## `SA1017`: Channels used with os/signal.Notify should be buffered + + +The os/signal package uses non-blocking channel sends when delivering +signals. If the receiving end of the channel isn't ready and the +channel is either unbuffered or full, the signal will be dropped. To +avoid missing signals, the channel should be buffered and of the +appropriate size. For a channel used for notification of just one +signal value, a buffer of size 1 is sufficient. + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA1017": true}`. + +Package documentation: [SA1017](https://staticcheck.dev/docs/checks/#SA1017) + + +## `SA1018`: strings.Replace called with n == 0, which does nothing + + +With n == 0, zero instances will be replaced. To replace all +instances, use a negative number, or use strings.ReplaceAll. + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA1018": true}`. + +Package documentation: [SA1018](https://staticcheck.dev/docs/checks/#SA1018) + + +## `SA1020`: Using an invalid host:port pair with a net.Listen-related function + + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA1020": true}`. + +Package documentation: [SA1020](https://staticcheck.dev/docs/checks/#SA1020) + + +## `SA1021`: Using bytes.Equal to compare two net.IP + + +A net.IP stores an IPv4 or IPv6 address as a slice of bytes. The +length of the slice for an IPv4 address, however, can be either 4 or +16 bytes long, using different ways of representing IPv4 addresses. In +order to correctly compare two net.IPs, the net.IP.Equal method should +be used, as it takes both representations into account. + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA1021": true}`. + +Package documentation: [SA1021](https://staticcheck.dev/docs/checks/#SA1021) + + +## `SA1023`: Modifying the buffer in an io.Writer implementation + + +Write must not modify the slice data, even temporarily. + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA1023": true}`. + +Package documentation: [SA1023](https://staticcheck.dev/docs/checks/#SA1023) + + +## `SA1024`: A string cutset contains duplicate characters + + +The strings.TrimLeft and strings.TrimRight functions take cutsets, not +prefixes. A cutset is treated as a set of characters to remove from a +string. For example, + + strings.TrimLeft("42133word", "1234") + +will result in the string "word" – any characters that are 1, 2, 3 or +4 are cut from the left of the string. + +In order to remove one string from another, use strings.TrimPrefix instead. + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA1024": true}`. + +Package documentation: [SA1024](https://staticcheck.dev/docs/checks/#SA1024) + + +## `SA1025`: It is not possible to use (*time.Timer).Reset's return value correctly + + +Available since + 2019.1 + + +Default: off. Enable by setting `"analyses": {"SA1025": true}`. + +Package documentation: [SA1025](https://staticcheck.dev/docs/checks/#SA1025) + + +## `SA1026`: Cannot marshal channels or functions + + +Available since + 2019.2 + + +Default: off. Enable by setting `"analyses": {"SA1026": true}`. + +Package documentation: [SA1026](https://staticcheck.dev/docs/checks/#SA1026) + + +## `SA1027`: Atomic access to 64-bit variable must be 64-bit aligned + + +On ARM, x86-32, and 32-bit MIPS, it is the caller's responsibility to +arrange for 64-bit alignment of 64-bit words accessed atomically. The +first word in a variable or in an allocated struct, array, or slice +can be relied upon to be 64-bit aligned. + +You can use the structlayout tool to inspect the alignment of fields +in a struct. + +Available since + 2019.2 + + +Default: off. Enable by setting `"analyses": {"SA1027": true}`. + +Package documentation: [SA1027](https://staticcheck.dev/docs/checks/#SA1027) + + +## `SA1028`: sort.Slice can only be used on slices + + +The first argument of sort.Slice must be a slice. + +Available since + 2020.1 + + +Default: off. Enable by setting `"analyses": {"SA1028": true}`. + +Package documentation: [SA1028](https://staticcheck.dev/docs/checks/#SA1028) + + +## `SA1029`: Inappropriate key in call to context.WithValue + + +The provided key must be comparable and should not be +of type string or any other built-in type to avoid collisions between +packages using context. Users of WithValue should define their own +types for keys. + +To avoid allocating when assigning to an interface{}, +context keys often have concrete type struct{}. Alternatively, +exported context key variables' static type should be a pointer or +interface. + +Available since + 2020.1 + + +Default: off. Enable by setting `"analyses": {"SA1029": true}`. + +Package documentation: [SA1029](https://staticcheck.dev/docs/checks/#SA1029) + + +## `SA1030`: Invalid argument in call to a strconv function + + +This check validates the format, number base and bit size arguments of +the various parsing and formatting functions in strconv. + +Available since + 2021.1 + + +Default: off. Enable by setting `"analyses": {"SA1030": true}`. + +Package documentation: [SA1030](https://staticcheck.dev/docs/checks/#SA1030) + + +## `SA1031`: Overlapping byte slices passed to an encoder + + +In an encoding function of the form Encode(dst, src), dst and +src were found to reference the same memory. This can result in +src bytes being overwritten before they are read, when the encoder +writes more than one byte per src byte. + +Available since + 2024.1 + + +Default: off. Enable by setting `"analyses": {"SA1031": true}`. + +Package documentation: [SA1031](https://staticcheck.dev/docs/checks/#SA1031) + + +## `SA1032`: Wrong order of arguments to errors.Is + + +The first argument of the function errors.Is is the error +that we have and the second argument is the error we're trying to match against. +For example: + + if errors.Is(err, io.EOF) { ... } + +This check detects some cases where the two arguments have been swapped. It +flags any calls where the first argument is referring to a package-level error +variable, such as + + if errors.Is(io.EOF, err) { /* this is wrong */ } + +Available since + 2024.1 + + +Default: off. Enable by setting `"analyses": {"SA1032": true}`. + +Package documentation: [SA1032](https://staticcheck.dev/docs/checks/#SA1032) + + +## `SA2001`: Empty critical section, did you mean to defer the unlock? + + +Empty critical sections of the kind + + mu.Lock() + mu.Unlock() + +are very often a typo, and the following was intended instead: + + mu.Lock() + defer mu.Unlock() + +Do note that sometimes empty critical sections can be useful, as a +form of signaling to wait on another goroutine. Many times, there are +simpler ways of achieving the same effect. When that isn't the case, +the code should be amply commented to avoid confusion. Combining such +comments with a //lint:ignore directive can be used to suppress this +rare false positive. + +Available since + 2017.1 + + +Default: on. + +Package documentation: [SA2001](https://staticcheck.dev/docs/checks/#SA2001) + + +## `SA2002`: Called testing.T.FailNow or SkipNow in a goroutine, which isn't allowed + + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA2002": true}`. + +Package documentation: [SA2002](https://staticcheck.dev/docs/checks/#SA2002) + + +## `SA2003`: Deferred Lock right after locking, likely meant to defer Unlock instead + + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA2003": true}`. + +Package documentation: [SA2003](https://staticcheck.dev/docs/checks/#SA2003) + + +## `SA3000`: TestMain doesn't call os.Exit, hiding test failures + + +Test executables (and in turn 'go test') exit with a non-zero status +code if any tests failed. When specifying your own TestMain function, +it is your responsibility to arrange for this, by calling os.Exit with +the correct code. The correct code is returned by (*testing.M).Run, so +the usual way of implementing TestMain is to end it with +os.Exit(m.Run()). + +Available since + 2017.1 + + +Default: on. + +Package documentation: [SA3000](https://staticcheck.dev/docs/checks/#SA3000) + + +## `SA3001`: Assigning to b.N in benchmarks distorts the results + + +The testing package dynamically sets b.N to improve the reliability of +benchmarks and uses it in computations to determine the duration of a +single operation. Benchmark code must not alter b.N as this would +falsify results. + +Available since + 2017.1 + + +Default: on. + +Package documentation: [SA3001](https://staticcheck.dev/docs/checks/#SA3001) + + +## `SA4000`: Binary operator has identical expressions on both sides + + +Available since + 2017.1 + + +Default: on. + +Package documentation: [SA4000](https://staticcheck.dev/docs/checks/#SA4000) + + +## `SA4001`: &*x gets simplified to x, it does not copy x + + +Available since + 2017.1 + + +Default: on. + +Package documentation: [SA4001](https://staticcheck.dev/docs/checks/#SA4001) + + +## `SA4003`: Comparing unsigned values against negative values is pointless + + +Available since + 2017.1 + + +Default: on. + +Package documentation: [SA4003](https://staticcheck.dev/docs/checks/#SA4003) + + +## `SA4004`: The loop exits unconditionally after one iteration + + +Available since + 2017.1 + + +Default: on. + +Package documentation: [SA4004](https://staticcheck.dev/docs/checks/#SA4004) + + +## `SA4005`: Field assignment that will never be observed. Did you mean to use a pointer receiver? + + +Available since + 2021.1 + + +Default: off. Enable by setting `"analyses": {"SA4005": true}`. + +Package documentation: [SA4005](https://staticcheck.dev/docs/checks/#SA4005) + + +## `SA4006`: A value assigned to a variable is never read before being overwritten. Forgotten error check or dead code? + + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA4006": true}`. + +Package documentation: [SA4006](https://staticcheck.dev/docs/checks/#SA4006) + + +## `SA4008`: The variable in the loop condition never changes, are you incrementing the wrong variable? + + +For example: + + for i := 0; i < 10; j++ { ... } + +This may also occur when a loop can only execute once because of unconditional +control flow that terminates the loop. For example, when a loop body contains an +unconditional break, return, or panic: + + func f() { + panic("oops") + } + func g() { + for i := 0; i < 10; i++ { + // f unconditionally calls panic, which means "i" is + // never incremented. + f() + } + } + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA4008": true}`. + +Package documentation: [SA4008](https://staticcheck.dev/docs/checks/#SA4008) + + +## `SA4009`: A function argument is overwritten before its first use + + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA4009": true}`. + +Package documentation: [SA4009](https://staticcheck.dev/docs/checks/#SA4009) + + +## `SA4010`: The result of append will never be observed anywhere + + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA4010": true}`. + +Package documentation: [SA4010](https://staticcheck.dev/docs/checks/#SA4010) + + +## `SA4011`: Break statement with no effect. Did you mean to break out of an outer loop? + + +Available since + 2017.1 + + +Default: on. + +Package documentation: [SA4011](https://staticcheck.dev/docs/checks/#SA4011) + + +## `SA4012`: Comparing a value against NaN even though no value is equal to NaN + + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA4012": true}`. + +Package documentation: [SA4012](https://staticcheck.dev/docs/checks/#SA4012) + + +## `SA4013`: Negating a boolean twice (!!b) is the same as writing b. This is either redundant, or a typo. + + +Available since + 2017.1 + + +Default: on. + +Package documentation: [SA4013](https://staticcheck.dev/docs/checks/#SA4013) + + +## `SA4014`: An if/else if chain has repeated conditions and no side-effects; if the condition didn't match the first time, it won't match the second time, either + + +Available since + 2017.1 + + +Default: on. + +Package documentation: [SA4014](https://staticcheck.dev/docs/checks/#SA4014) + + +## `SA4015`: Calling functions like math.Ceil on floats converted from integers doesn't do anything useful + + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA4015": true}`. + +Package documentation: [SA4015](https://staticcheck.dev/docs/checks/#SA4015) + + +## `SA4016`: Certain bitwise operations, such as x ^ 0, do not do anything useful + + +Available since + 2017.1 + + +Default: on. + +Package documentation: [SA4016](https://staticcheck.dev/docs/checks/#SA4016) + + +## `SA4017`: Discarding the return values of a function without side effects, making the call pointless + + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA4017": true}`. + +Package documentation: [SA4017](https://staticcheck.dev/docs/checks/#SA4017) + + +## `SA4018`: Self-assignment of variables + + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA4018": true}`. + +Package documentation: [SA4018](https://staticcheck.dev/docs/checks/#SA4018) + + +## `SA4019`: Multiple, identical build constraints in the same file + + +Available since + 2017.1 + + +Default: on. + +Package documentation: [SA4019](https://staticcheck.dev/docs/checks/#SA4019) + + +## `SA4020`: Unreachable case clause in a type switch + + +In a type switch like the following + + type T struct{} + func (T) Read(b []byte) (int, error) { return 0, nil } + + var v interface{} = T{} + + switch v.(type) { + case io.Reader: + // ... + case T: + // unreachable + } + +the second case clause can never be reached because T implements +io.Reader and case clauses are evaluated in source order. + +Another example: + + type T struct{} + func (T) Read(b []byte) (int, error) { return 0, nil } + func (T) Close() error { return nil } + + var v interface{} = T{} + + switch v.(type) { + case io.Reader: + // ... + case io.ReadCloser: + // unreachable + } + +Even though T has a Close method and thus implements io.ReadCloser, +io.Reader will always match first. The method set of io.Reader is a +subset of io.ReadCloser. Thus it is impossible to match the second +case without matching the first case. + + +Structurally equivalent interfaces + +A special case of the previous example are structurally identical +interfaces. Given these declarations + + type T error + type V error + + func doSomething() error { + err, ok := doAnotherThing() + if ok { + return T(err) + } + + return U(err) + } + +the following type switch will have an unreachable case clause: + + switch doSomething().(type) { + case T: + // ... + case V: + // unreachable + } + +T will always match before V because they are structurally equivalent +and therefore doSomething()'s return value implements both. + +Available since + 2019.2 + + +Default: on. + +Package documentation: [SA4020](https://staticcheck.dev/docs/checks/#SA4020) + + +## `SA4022`: Comparing the address of a variable against nil + + +Code such as 'if &x == nil' is meaningless, because taking the address of a variable always yields a non-nil pointer. + +Available since + 2020.1 + + +Default: on. + +Package documentation: [SA4022](https://staticcheck.dev/docs/checks/#SA4022) + + +## `SA4023`: Impossible comparison of interface value with untyped nil + + +Under the covers, interfaces are implemented as two elements, a +type T and a value V. V is a concrete value such as an int, +struct or pointer, never an interface itself, and has type T. For +instance, if we store the int value 3 in an interface, the +resulting interface value has, schematically, (T=int, V=3). The +value V is also known as the interface's dynamic value, since a +given interface variable might hold different values V (and +corresponding types T) during the execution of the program. + +An interface value is nil only if the V and T are both +unset, (T=nil, V is not set), In particular, a nil interface will +always hold a nil type. If we store a nil pointer of type *int +inside an interface value, the inner type will be *int regardless +of the value of the pointer: (T=*int, V=nil). Such an interface +value will therefore be non-nil even when the pointer value V +inside is nil. + +This situation can be confusing, and arises when a nil value is +stored inside an interface value such as an error return: + + func returnsError() error { + var p *MyError = nil + if bad() { + p = ErrBad + } + return p // Will always return a non-nil error. + } + +If all goes well, the function returns a nil p, so the return +value is an error interface value holding (T=*MyError, V=nil). +This means that if the caller compares the returned error to nil, +it will always look as if there was an error even if nothing bad +happened. To return a proper nil error to the caller, the +function must return an explicit nil: + + func returnsError() error { + if bad() { + return ErrBad + } + return nil + } + +It's a good idea for functions that return errors always to use +the error type in their signature (as we did above) rather than a +concrete type such as *MyError, to help guarantee the error is +created correctly. As an example, os.Open returns an error even +though, if not nil, it's always of concrete type *os.PathError. + +Similar situations to those described here can arise whenever +interfaces are used. Just keep in mind that if any concrete value +has been stored in the interface, the interface will not be nil. +For more information, see The Laws of +Reflection at https://golang.org/doc/articles/laws_of_reflection.html. + +This text has been copied from +https://golang.org/doc/faq#nil_error, licensed under the Creative +Commons Attribution 3.0 License. + +Available since + 2020.2 + + +Default: off. Enable by setting `"analyses": {"SA4023": true}`. + +Package documentation: [SA4023](https://staticcheck.dev/docs/checks/#SA4023) + + +## `SA4024`: Checking for impossible return value from a builtin function + + +Return values of the len and cap builtins cannot be negative. + +See https://golang.org/pkg/builtin/#len and https://golang.org/pkg/builtin/#cap. + +Example: + + if len(slice) < 0 { + fmt.Println("unreachable code") + } + +Available since + 2021.1 + + +Default: on. + +Package documentation: [SA4024](https://staticcheck.dev/docs/checks/#SA4024) + + +## `SA4025`: Integer division of literals that results in zero + + +When dividing two integer constants, the result will +also be an integer. Thus, a division such as 2 / 3 results in 0. +This is true for all of the following examples: + + _ = 2 / 3 + const _ = 2 / 3 + const _ float64 = 2 / 3 + _ = float64(2 / 3) + +Staticcheck will flag such divisions if both sides of the division are +integer literals, as it is highly unlikely that the division was +intended to truncate to zero. Staticcheck will not flag integer +division involving named constants, to avoid noisy positives. + +Available since + 2021.1 + + +Default: on. + +Package documentation: [SA4025](https://staticcheck.dev/docs/checks/#SA4025) + + +## `SA4026`: Go constants cannot express negative zero + + +In IEEE 754 floating point math, zero has a sign and can be positive +or negative. This can be useful in certain numerical code. + +Go constants, however, cannot express negative zero. This means that +the literals -0.0 and 0.0 have the same ideal value (zero) and +will both represent positive zero at runtime. + +To explicitly and reliably create a negative zero, you can use the +math.Copysign function: math.Copysign(0, -1). + +Available since + 2021.1 + + +Default: on. + +Package documentation: [SA4026](https://staticcheck.dev/docs/checks/#SA4026) + + +## `SA4027`: (*net/url.URL).Query returns a copy, modifying it doesn't change the URL + + +(*net/url.URL).Query parses the current value of net/url.URL.RawQuery +and returns it as a map of type net/url.Values. Subsequent changes to +this map will not affect the URL unless the map gets encoded and +assigned to the URL's RawQuery. + +As a consequence, the following code pattern is an expensive no-op: +u.Query().Add(key, value). + +Available since + 2021.1 + + +Default: on. + +Package documentation: [SA4027](https://staticcheck.dev/docs/checks/#SA4027) + + +## `SA4028`: x % 1 is always zero + + +Available since + 2022.1 + + +Default: on. + +Package documentation: [SA4028](https://staticcheck.dev/docs/checks/#SA4028) + + +## `SA4029`: Ineffective attempt at sorting slice + + +sort.Float64Slice, sort.IntSlice, and sort.StringSlice are +types, not functions. Doing x = sort.StringSlice(x) does nothing, +especially not sort any values. The correct usage is +sort.Sort(sort.StringSlice(x)) or sort.StringSlice(x).Sort(), +but there are more convenient helpers, namely sort.Float64s, +sort.Ints, and sort.Strings. + +Available since + 2022.1 + + +Default: on. + +Package documentation: [SA4029](https://staticcheck.dev/docs/checks/#SA4029) + + +## `SA4030`: Ineffective attempt at generating random number + + +Functions in the math/rand package that accept upper limits, such +as Intn, generate random numbers in the half-open interval [0,n). In +other words, the generated numbers will be >= 0 and < n – they +don't include n. rand.Intn(1) therefore doesn't generate 0 +or 1, it always generates 0. + +Available since + 2022.1 + + +Default: on. + +Package documentation: [SA4030](https://staticcheck.dev/docs/checks/#SA4030) + + +## `SA4031`: Checking never-nil value against nil + + +Available since + 2022.1 + + +Default: off. Enable by setting `"analyses": {"SA4031": true}`. + +Package documentation: [SA4031](https://staticcheck.dev/docs/checks/#SA4031) + + +## `SA4032`: Comparing runtime.GOOS or runtime.GOARCH against impossible value + + +Available since + 2024.1 + + +Default: on. + +Package documentation: [SA4032](https://staticcheck.dev/docs/checks/#SA4032) + + +## `SA5000`: Assignment to nil map + + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA5000": true}`. + +Package documentation: [SA5000](https://staticcheck.dev/docs/checks/#SA5000) + + +## `SA5001`: Deferring Close before checking for a possible error + + +Available since + 2017.1 + + +Default: on. + +Package documentation: [SA5001](https://staticcheck.dev/docs/checks/#SA5001) + + +## `SA5002`: The empty for loop ('for {}') spins and can block the scheduler + + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA5002": true}`. + +Package documentation: [SA5002](https://staticcheck.dev/docs/checks/#SA5002) + + +## `SA5003`: Defers in infinite loops will never execute + + +Defers are scoped to the surrounding function, not the surrounding +block. In a function that never returns, i.e. one containing an +infinite loop, defers will never execute. + +Available since + 2017.1 + + +Default: on. + +Package documentation: [SA5003](https://staticcheck.dev/docs/checks/#SA5003) + + +## `SA5004`: 'for { select { ...' with an empty default branch spins + + +Available since + 2017.1 + + +Default: on. + +Package documentation: [SA5004](https://staticcheck.dev/docs/checks/#SA5004) + + +## `SA5005`: The finalizer references the finalized object, preventing garbage collection + + +A finalizer is a function associated with an object that runs when the +garbage collector is ready to collect said object, that is when the +object is no longer referenced by anything. + +If the finalizer references the object, however, it will always remain +as the final reference to that object, preventing the garbage +collector from collecting the object. The finalizer will never run, +and the object will never be collected, leading to a memory leak. That +is why the finalizer should instead use its first argument to operate +on the object. That way, the number of references can temporarily go +to zero before the object is being passed to the finalizer. + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA5005": true}`. + +Package documentation: [SA5005](https://staticcheck.dev/docs/checks/#SA5005) + + +## `SA5007`: Infinite recursive call + + +A function that calls itself recursively needs to have an exit +condition. Otherwise it will recurse forever, until the system runs +out of memory. + +This issue can be caused by simple bugs such as forgetting to add an +exit condition. It can also happen "on purpose". Some languages have +tail call optimization which makes certain infinite recursive calls +safe to use. Go, however, does not implement TCO, and as such a loop +should be used instead. + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA5007": true}`. + +Package documentation: [SA5007](https://staticcheck.dev/docs/checks/#SA5007) + + +## `SA5008`: Invalid struct tag + + +Available since + 2019.2 + + +Default: on. + +Package documentation: [SA5008](https://staticcheck.dev/docs/checks/#SA5008) + + +## `SA5010`: Impossible type assertion + + +Some type assertions can be statically proven to be +impossible. This is the case when the method sets of both +arguments of the type assertion conflict with each other, for +example by containing the same method with different +signatures. + +The Go compiler already applies this check when asserting from an +interface value to a concrete type. If the concrete type misses +methods from the interface, or if function signatures don't match, +then the type assertion can never succeed. + +This check applies the same logic when asserting from one interface to +another. If both interface types contain the same method but with +different signatures, then the type assertion can never succeed, +either. + +Available since + 2020.1 + + +Default: off. Enable by setting `"analyses": {"SA5010": true}`. + +Package documentation: [SA5010](https://staticcheck.dev/docs/checks/#SA5010) + + +## `SA5011`: Possible nil pointer dereference + + +A pointer is being dereferenced unconditionally, while +also being checked against nil in another place. This suggests that +the pointer may be nil and dereferencing it may panic. This is +commonly a result of improperly ordered code or missing return +statements. Consider the following examples: + + func fn(x *int) { + fmt.Println(*x) + + // This nil check is equally important for the previous dereference + if x != nil { + foo(*x) + } + } + + func TestFoo(t *testing.T) { + x := compute() + if x == nil { + t.Errorf("nil pointer received") + } + + // t.Errorf does not abort the test, so if x is nil, the next line will panic. + foo(*x) + } + +Staticcheck tries to deduce which functions abort control flow. +For example, it is aware that a function will not continue +execution after a call to panic or log.Fatal. However, sometimes +this detection fails, in particular in the presence of +conditionals. Consider the following example: + + func Log(msg string, level int) { + fmt.Println(msg) + if level == levelFatal { + os.Exit(1) + } + } + + func Fatal(msg string) { + Log(msg, levelFatal) + } + + func fn(x *int) { + if x == nil { + Fatal("unexpected nil pointer") + } + fmt.Println(*x) + } + +Staticcheck will flag the dereference of x, even though it is perfectly +safe. Staticcheck is not able to deduce that a call to +Fatal will exit the program. For the time being, the easiest +workaround is to modify the definition of Fatal like so: + + func Fatal(msg string) { + Log(msg, levelFatal) + panic("unreachable") + } + +We also hard-code functions from common logging packages such as +logrus. Please file an issue if we're missing support for a +popular package. + +Available since + 2020.1 + + +Default: off. Enable by setting `"analyses": {"SA5011": true}`. + +Package documentation: [SA5011](https://staticcheck.dev/docs/checks/#SA5011) + + +## `SA5012`: Passing odd-sized slice to function expecting even size + + +Some functions that take slices as parameters expect the slices to have an even number of elements. +Often, these functions treat elements in a slice as pairs. +For example, strings.NewReplacer takes pairs of old and new strings, +and calling it with an odd number of elements would be an error. + +Available since + 2020.2 + + +Default: off. Enable by setting `"analyses": {"SA5012": true}`. + +Package documentation: [SA5012](https://staticcheck.dev/docs/checks/#SA5012) + + +## `SA6000`: Using regexp.Match or related in a loop, should use regexp.Compile + + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA6000": true}`. + +Package documentation: [SA6000](https://staticcheck.dev/docs/checks/#SA6000) + + +## `SA6001`: Missing an optimization opportunity when indexing maps by byte slices + + +Map keys must be comparable, which precludes the use of byte slices. +This usually leads to using string keys and converting byte slices to +strings. + +Normally, a conversion of a byte slice to a string needs to copy the data and +causes allocations. The compiler, however, recognizes m[string(b)] and +uses the data of b directly, without copying it, because it knows that +the data can't change during the map lookup. This leads to the +counter-intuitive situation that + + k := string(b) + println(m[k]) + println(m[k]) + +will be less efficient than + + println(m[string(b)]) + println(m[string(b)]) + +because the first version needs to copy and allocate, while the second +one does not. + +For some history on this optimization, check out commit +f5f5a8b6209f84961687d993b93ea0d397f5d5bf in the Go repository. + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA6001": true}`. + +Package documentation: [SA6001](https://staticcheck.dev/docs/checks/#SA6001) + + +## `SA6002`: Storing non-pointer values in sync.Pool allocates memory + + +A sync.Pool is used to avoid unnecessary allocations and reduce the +amount of work the garbage collector has to do. + +When passing a value that is not a pointer to a function that accepts +an interface, the value needs to be placed on the heap, which means an +additional allocation. Slices are a common thing to put in sync.Pools, +and they're structs with 3 fields (length, capacity, and a pointer to +an array). In order to avoid the extra allocation, one should store a +pointer to the slice instead. + +See the comments on https://go-review.googlesource.com/c/go/+/24371 +that discuss this problem. + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA6002": true}`. + +Package documentation: [SA6002](https://staticcheck.dev/docs/checks/#SA6002) + + +## `SA6003`: Converting a string to a slice of runes before ranging over it + + +You may want to loop over the runes in a string. Instead of converting +the string to a slice of runes and looping over that, you can loop +over the string itself. That is, + + for _, r := range s {} + +and + + for _, r := range []rune(s) {} + +will yield the same values. The first version, however, will be faster +and avoid unnecessary memory allocations. + +Do note that if you are interested in the indices, ranging over a +string and over a slice of runes will yield different indices. The +first one yields byte offsets, while the second one yields indices in +the slice of runes. + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA6003": true}`. + +Package documentation: [SA6003](https://staticcheck.dev/docs/checks/#SA6003) + + +## `SA6005`: Inefficient string comparison with strings.ToLower or strings.ToUpper + + +Converting two strings to the same case and comparing them like so + + if strings.ToLower(s1) == strings.ToLower(s2) { + ... + } + +is significantly more expensive than comparing them with +strings.EqualFold(s1, s2). This is due to memory usage as well as +computational complexity. + +strings.ToLower will have to allocate memory for the new strings, as +well as convert both strings fully, even if they differ on the very +first byte. strings.EqualFold, on the other hand, compares the strings +one character at a time. It doesn't need to create two intermediate +strings and can return as soon as the first non-matching character has +been found. + +For a more in-depth explanation of this issue, see +https://blog.digitalocean.com/how-to-efficiently-compare-strings-in-go/ + +Available since + 2019.2 + + +Default: on. + +Package documentation: [SA6005](https://staticcheck.dev/docs/checks/#SA6005) + + +## `SA6006`: Using io.WriteString to write []byte + + +Using io.WriteString to write a slice of bytes, as in + + io.WriteString(w, string(b)) + +is both unnecessary and inefficient. Converting from []byte to string +has to allocate and copy the data, and we could simply use w.Write(b) +instead. + +Available since + 2024.1 + + +Default: on. + +Package documentation: [SA6006](https://staticcheck.dev/docs/checks/#SA6006) + + +## `SA9001`: Defers in range loops may not run when you expect them to + + +Available since + 2017.1 + + +Default: off. Enable by setting `"analyses": {"SA9001": true}`. + +Package documentation: [SA9001](https://staticcheck.dev/docs/checks/#SA9001) + + +## `SA9002`: Using a non-octal os.FileMode that looks like it was meant to be in octal. + + +Available since + 2017.1 + + +Default: on. + +Package documentation: [SA9002](https://staticcheck.dev/docs/checks/#SA9002) + + +## `SA9003`: Empty body in an if or else branch + + +Available since + 2017.1, non-default + + +Default: off. Enable by setting `"analyses": {"SA9003": true}`. + +Package documentation: [SA9003](https://staticcheck.dev/docs/checks/#SA9003) + + +## `SA9004`: Only the first constant has an explicit type + + +In a constant declaration such as the following: + + const ( + First byte = 1 + Second = 2 + ) + +the constant Second does not have the same type as the constant First. +This construct shouldn't be confused with + + const ( + First byte = iota + Second + ) + +where First and Second do indeed have the same type. The type is only +passed on when no explicit value is assigned to the constant. + +When declaring enumerations with explicit values it is therefore +important not to write + + const ( + EnumFirst EnumType = 1 + EnumSecond = 2 + EnumThird = 3 + ) + +This discrepancy in types can cause various confusing behaviors and +bugs. + + +Wrong type in variable declarations + +The most obvious issue with such incorrect enumerations expresses +itself as a compile error: + + package pkg + + const ( + EnumFirst uint8 = 1 + EnumSecond = 2 + ) + + func fn(useFirst bool) { + x := EnumSecond + if useFirst { + x = EnumFirst + } + } + +fails to compile with + + ./const.go:11:5: cannot use EnumFirst (type uint8) as type int in assignment + + +Losing method sets + +A more subtle issue occurs with types that have methods and optional +interfaces. Consider the following: + + package main + + import "fmt" + + type Enum int + + func (e Enum) String() string { + return "an enum" + } + + const ( + EnumFirst Enum = 1 + EnumSecond = 2 + ) + + func main() { + fmt.Println(EnumFirst) + fmt.Println(EnumSecond) + } + +This code will output + + an enum + 2 + +as EnumSecond has no explicit type, and thus defaults to int. + +Available since + 2019.1 + + +Default: on. + +Package documentation: [SA9004](https://staticcheck.dev/docs/checks/#SA9004) + + +## `SA9005`: Trying to marshal a struct with no public fields nor custom marshaling + + +The encoding/json and encoding/xml packages only operate on exported +fields in structs, not unexported ones. It is usually an error to try +to (un)marshal structs that only consist of unexported fields. + +This check will not flag calls involving types that define custom +marshaling behavior, e.g. via MarshalJSON methods. It will also not +flag empty structs. + +Available since + 2019.2 + + +Default: off. Enable by setting `"analyses": {"SA9005": true}`. + +Package documentation: [SA9005](https://staticcheck.dev/docs/checks/#SA9005) + + +## `SA9006`: Dubious bit shifting of a fixed size integer value + + +Bit shifting a value past its size will always clear the value. + +For instance: + + v := int8(42) + v >>= 8 + +will always result in 0. + +This check flags bit shifting operations on fixed size integer values only. +That is, int, uint and uintptr are never flagged to avoid potential false +positives in somewhat exotic but valid bit twiddling tricks: + + // Clear any value above 32 bits if integers are more than 32 bits. + func f(i int) int { + v := i >> 32 + v = v << 32 + return i-v + } + +Available since + 2020.2 + + +Default: on. + +Package documentation: [SA9006](https://staticcheck.dev/docs/checks/#SA9006) + + +## `SA9007`: Deleting a directory that shouldn't be deleted + + +It is virtually never correct to delete system directories such as +/tmp or the user's home directory. However, it can be fairly easy to +do by mistake, for example by mistakenly using os.TempDir instead +of ioutil.TempDir, or by forgetting to add a suffix to the result +of os.UserHomeDir. + +Writing + + d := os.TempDir() + defer os.RemoveAll(d) + +in your unit tests will have a devastating effect on the stability of your system. + +This check flags attempts at deleting the following directories: + +- os.TempDir +- os.UserCacheDir +- os.UserConfigDir +- os.UserHomeDir + +Available since + 2022.1 + + +Default: off. Enable by setting `"analyses": {"SA9007": true}`. + +Package documentation: [SA9007](https://staticcheck.dev/docs/checks/#SA9007) + + +## `SA9008`: else branch of a type assertion is probably not reading the right value + + +When declaring variables as part of an if statement (like in 'if +foo := ...; foo {'), the same variables will also be in the scope of +the else branch. This means that in the following example + + if x, ok := x.(int); ok { + // ... + } else { + fmt.Printf("unexpected type %T", x) + } + +x in the else branch will refer to the x from x, ok +:=; it will not refer to the x that is being type-asserted. The +result of a failed type assertion is the zero value of the type that +is being asserted to, so x in the else branch will always have the +value 0 and the type int. + +Available since + 2022.1 + + +Default: off. Enable by setting `"analyses": {"SA9008": true}`. + +Package documentation: [SA9008](https://staticcheck.dev/docs/checks/#SA9008) + + +## `SA9009`: Ineffectual Go compiler directive + + +A potential Go compiler directive was found, but is ineffectual as it begins +with whitespace. + +Available since + 2024.1 + + +Default: on. + +Package documentation: [SA9009](https://staticcheck.dev/docs/checks/#SA9009) + + +## `ST1000`: Incorrect or missing package comment + + +Packages must have a package comment that is formatted according to +the guidelines laid out in +https://go.dev/wiki/CodeReviewComments#package-comments. + +Available since + 2019.1, non-default + + +Default: off. Enable by setting `"analyses": {"ST1000": true}`. + +Package documentation: [ST1000](https://staticcheck.dev/docs/checks/#ST1000) + + +## `ST1001`: Dot imports are discouraged + + +Dot imports that aren't in external test packages are discouraged. + +The dot_import_whitelist option can be used to whitelist certain +imports. + +Quoting Go Code Review Comments: + +> The import . form can be useful in tests that, due to circular +> dependencies, cannot be made part of the package being tested: +> +> package foo_test +> +> import ( +> "bar/testutil" // also imports "foo" +> . "foo" +> ) +> +> In this case, the test file cannot be in package foo because it +> uses bar/testutil, which imports foo. So we use the import . +> form to let the file pretend to be part of package foo even though +> it is not. Except for this one case, do not use import . in your +> programs. It makes the programs much harder to read because it is +> unclear whether a name like Quux is a top-level identifier in the +> current package or in an imported package. + +Available since + 2019.1 + +Options + dot_import_whitelist + + +Default: off. Enable by setting `"analyses": {"ST1001": true}`. + +Package documentation: [ST1001](https://staticcheck.dev/docs/checks/#ST1001) + + +## `ST1003`: Poorly chosen identifier + + +Identifiers, such as variable and package names, follow certain rules. + +See the following links for details: + +- https://go.dev/doc/effective_go#package-names +- https://go.dev/doc/effective_go#mixed-caps +- https://go.dev/wiki/CodeReviewComments#initialisms +- https://go.dev/wiki/CodeReviewComments#variable-names + +Available since + 2019.1, non-default + +Options + initialisms + + +Default: off. Enable by setting `"analyses": {"ST1003": true}`. + +Package documentation: [ST1003](https://staticcheck.dev/docs/checks/#ST1003) + + +## `ST1005`: Incorrectly formatted error string + + +Error strings follow a set of guidelines to ensure uniformity and good +composability. + +Quoting Go Code Review Comments: + +> Error strings should not be capitalized (unless beginning with +> proper nouns or acronyms) or end with punctuation, since they are +> usually printed following other context. That is, use +> fmt.Errorf("something bad") not fmt.Errorf("Something bad"), so +> that log.Printf("Reading %s: %v", filename, err) formats without a +> spurious capital letter mid-message. + +Available since + 2019.1 + + +Default: off. Enable by setting `"analyses": {"ST1005": true}`. + +Package documentation: [ST1005](https://staticcheck.dev/docs/checks/#ST1005) + + +## `ST1006`: Poorly chosen receiver name + + +Quoting Go Code Review Comments: + +> The name of a method's receiver should be a reflection of its +> identity; often a one or two letter abbreviation of its type +> suffices (such as "c" or "cl" for "Client"). Don't use generic +> names such as "me", "this" or "self", identifiers typical of +> object-oriented languages that place more emphasis on methods as +> opposed to functions. The name need not be as descriptive as that +> of a method argument, as its role is obvious and serves no +> documentary purpose. It can be very short as it will appear on +> almost every line of every method of the type; familiarity admits +> brevity. Be consistent, too: if you call the receiver "c" in one +> method, don't call it "cl" in another. + +Available since + 2019.1 + + +Default: off. Enable by setting `"analyses": {"ST1006": true}`. + +Package documentation: [ST1006](https://staticcheck.dev/docs/checks/#ST1006) + + +## `ST1008`: A function's error value should be its last return value + + +A function's error value should be its last return value. + +Available since + 2019.1 + + +Default: off. Enable by setting `"analyses": {"ST1008": true}`. + +Package documentation: [ST1008](https://staticcheck.dev/docs/checks/#ST1008) + + +## `ST1011`: Poorly chosen name for variable of type time.Duration + + +time.Duration values represent an amount of time, which is represented +as a count of nanoseconds. An expression like 5 * time.Microsecond +yields the value 5000. It is therefore not appropriate to suffix a +variable of type time.Duration with any time unit, such as Msec or +Milli. + +Available since + 2019.1 + + +Default: off. Enable by setting `"analyses": {"ST1011": true}`. + +Package documentation: [ST1011](https://staticcheck.dev/docs/checks/#ST1011) + + +## `ST1012`: Poorly chosen name for error variable + + +Error variables that are part of an API should be called errFoo or +ErrFoo. + +Available since + 2019.1 + + +Default: off. Enable by setting `"analyses": {"ST1012": true}`. + +Package documentation: [ST1012](https://staticcheck.dev/docs/checks/#ST1012) + + +## `ST1013`: Should use constants for HTTP error codes, not magic numbers + + +HTTP has a tremendous number of status codes. While some of those are +well known (200, 400, 404, 500), most of them are not. The net/http +package provides constants for all status codes that are part of the +various specifications. It is recommended to use these constants +instead of hard-coding magic numbers, to vastly improve the +readability of your code. + +Available since + 2019.1 + +Options + http_status_code_whitelist + + +Default: off. Enable by setting `"analyses": {"ST1013": true}`. + +Package documentation: [ST1013](https://staticcheck.dev/docs/checks/#ST1013) + + +## `ST1015`: A switch's default case should be the first or last case + + +Available since + 2019.1 + + +Default: off. Enable by setting `"analyses": {"ST1015": true}`. + +Package documentation: [ST1015](https://staticcheck.dev/docs/checks/#ST1015) + + +## `ST1016`: Use consistent method receiver names + + +Available since + 2019.1, non-default + + +Default: off. Enable by setting `"analyses": {"ST1016": true}`. + +Package documentation: [ST1016](https://staticcheck.dev/docs/checks/#ST1016) + + +## `ST1017`: Don't use Yoda conditions + + +Yoda conditions are conditions of the kind 'if 42 == x', where the +literal is on the left side of the comparison. These are a common +idiom in languages in which assignment is an expression, to avoid bugs +of the kind 'if (x = 42)'. In Go, which doesn't allow for this kind of +bug, we prefer the more idiomatic 'if x == 42'. + +Available since + 2019.2 + + +Default: off. Enable by setting `"analyses": {"ST1017": true}`. + +Package documentation: [ST1017](https://staticcheck.dev/docs/checks/#ST1017) + + +## `ST1018`: Avoid zero-width and control characters in string literals + + +Available since + 2019.2 + + +Default: off. Enable by setting `"analyses": {"ST1018": true}`. + +Package documentation: [ST1018](https://staticcheck.dev/docs/checks/#ST1018) + + +## `ST1019`: Importing the same package multiple times + + +Go allows importing the same package multiple times, as long as +different import aliases are being used. That is, the following +bit of code is valid: + + import ( + "fmt" + fumpt "fmt" + format "fmt" + _ "fmt" + ) + +However, this is very rarely done on purpose. Usually, it is a +sign of code that got refactored, accidentally adding duplicate +import statements. It is also a rarely known feature, which may +contribute to confusion. + +Do note that sometimes, this feature may be used +intentionally (see for example +https://github.com/golang/go/commit/3409ce39bfd7584523b7a8c150a310cea92d879d) +– if you want to allow this pattern in your code base, you're +advised to disable this check. + +Available since + 2020.1 + + +Default: off. Enable by setting `"analyses": {"ST1019": true}`. + +Package documentation: [ST1019](https://staticcheck.dev/docs/checks/#ST1019) + + +## `ST1020`: The documentation of an exported function should start with the function's name + + +Doc comments work best as complete sentences, which +allow a wide variety of automated presentations. The first sentence +should be a one-sentence summary that starts with the name being +declared. + +If every doc comment begins with the name of the item it describes, +you can use the doc subcommand of the go tool and run the output +through grep. + +See https://go.dev/doc/effective_go#commentary for more +information on how to write good documentation. + +Available since + 2020.1, non-default + + +Default: off. Enable by setting `"analyses": {"ST1020": true}`. + +Package documentation: [ST1020](https://staticcheck.dev/docs/checks/#ST1020) + + +## `ST1021`: The documentation of an exported type should start with type's name + + +Doc comments work best as complete sentences, which +allow a wide variety of automated presentations. The first sentence +should be a one-sentence summary that starts with the name being +declared. + +If every doc comment begins with the name of the item it describes, +you can use the doc subcommand of the go tool and run the output +through grep. + +See https://go.dev/doc/effective_go#commentary for more +information on how to write good documentation. + +Available since + 2020.1, non-default + + +Default: off. Enable by setting `"analyses": {"ST1021": true}`. + +Package documentation: [ST1021](https://staticcheck.dev/docs/checks/#ST1021) + + +## `ST1022`: The documentation of an exported variable or constant should start with variable's name + + +Doc comments work best as complete sentences, which +allow a wide variety of automated presentations. The first sentence +should be a one-sentence summary that starts with the name being +declared. + +If every doc comment begins with the name of the item it describes, +you can use the doc subcommand of the go tool and run the output +through grep. + +See https://go.dev/doc/effective_go#commentary for more +information on how to write good documentation. + +Available since + 2020.1, non-default + + +Default: off. Enable by setting `"analyses": {"ST1022": true}`. + +Package documentation: [ST1022](https://staticcheck.dev/docs/checks/#ST1022) + + +## `ST1023`: Redundant type in variable declaration + + +Available since + 2021.1, non-default + + +Default: off. Enable by setting `"analyses": {"ST1023": true}`. + +Package documentation: [ST1023](https://staticcheck.dev/docs/checks/#) + ## `appends`: check for missing values after append diff --git a/gopls/doc/features/diagnostics.md b/gopls/doc/features/diagnostics.md index 6be7a43493a..75c29d5f795 100644 --- a/gopls/doc/features/diagnostics.md +++ b/gopls/doc/features/diagnostics.md @@ -314,12 +314,8 @@ dorky details and deletia: currently: atomicalign deepequalerrors nilness sortslice unusedwrite embeddirective -- **staticcheck**: four suites: - - add(simple.Analyzers, nil) - add(staticcheck.Analyzers - SA5009, SA5011 - add(stylecheck.Analyzers, nil) - add(quickfix.Analyzers, nil) +- **staticcheck**: four suites (S=simple, SA=static analysis, QF=quickfix, ST=stylecheck) + Only a hand-picked subset of them are enabled by default. - **Experimental analyzers**. Gopls has some analyzers that are not enabled by default, because they produce too high a rate of false diff --git a/gopls/doc/release/v0.19.0.md b/gopls/doc/release/v0.19.0.md index 1536b036f3e..d99f9af1dfe 100644 --- a/gopls/doc/release/v0.19.0.md +++ b/gopls/doc/release/v0.19.0.md @@ -28,6 +28,26 @@ func (c *Counter) Dec() { c.x++ } Rename here to affect all methods ``` +## Many `staticcheck` analyzers are enabled by default + +Slightly more than half of the analyzers in the +[Staticcheck](https://staticcheck.dev/docs/checks) suite are now +enabled by default. This subset has been chosen for precision and +efficiency. + +Prevously, Staticcheck analyzers (all of them) would be run only if +the experimental `staticcheck` boolean option was set to `true`. This +value continues to enable the complete set, and a value of `false` +continues to disable the complete set. Leaving the option unspecified +enables the preferred subset of analyzers. + +Staticcheck analyzers, like all other analyzers, can be explicitly +enabled or disabled using the `analyzers` configuration setting; this +setting now takes precedence over the `staticcheck` setting, so, +regardless of what value of `staticcheck` you use (true/false/unset), +you can make adjustments to your preferred set of analyzers. + + ## "Implementations" supports signature types The Implementations query reports the correspondence between abstract diff --git a/gopls/doc/settings.md b/gopls/doc/settings.md index 1f4f5746524..00415bb36f4 100644 --- a/gopls/doc/settings.md +++ b/gopls/doc/settings.md @@ -349,10 +349,28 @@ Default: `{}`. **This setting is experimental and may be deleted.** -staticcheck enables additional analyses from staticcheck.io. +staticcheck configures the default set of analyses staticcheck.io. These analyses are documented on [Staticcheck's website](https://staticcheck.io/docs/checks/). +The "staticcheck" option has three values: +- false: disable all staticcheck analyzers +- true: enable all staticcheck analyzers +- unset: enable a subset of staticcheck analyzers + selected by gopls maintainers for runtime efficiency + and analytic precision. + +Regardless of this setting, individual analyzers can be +selectively enabled or disabled using the `analyses` setting. + +Default: `false`. + + +### `staticcheckProvided bool` + +**This setting is experimental and may be deleted.** + + Default: `false`. diff --git a/gopls/internal/cache/analysis.go b/gopls/internal/cache/analysis.go index cf5518cf79f..7bf9464adb1 100644 --- a/gopls/internal/cache/analysis.go +++ b/gopls/internal/cache/analysis.go @@ -18,7 +18,6 @@ import ( "go/token" "go/types" "log" - "maps" urlpkg "net/url" "path/filepath" "reflect" @@ -127,11 +126,12 @@ func (s *Snapshot) Analyze(ctx context.Context, pkgs map[PackageID]*metadata.Pac // Filter and sort enabled root analyzers. // A disabled analyzer may still be run if required by another. - analyzers := analyzers(s.Options().Staticcheck) - toSrc := make(map[*analysis.Analyzer]*settings.Analyzer) - var enabledAnalyzers []*analysis.Analyzer // enabled subset + transitive requirements - for _, a := range analyzers { - if enabled, ok := s.Options().Analyses[a.Analyzer().Name]; enabled || !ok && a.EnabledByDefault() { + var ( + toSrc = make(map[*analysis.Analyzer]*settings.Analyzer) + enabledAnalyzers []*analysis.Analyzer // enabled subset + transitive requirements + ) + for _, a := range settings.AllAnalyzers { + if a.Enabled(s.Options()) { toSrc[a.Analyzer()] = a enabledAnalyzers = append(enabledAnalyzers, a.Analyzer()) } @@ -139,7 +139,6 @@ func (s *Snapshot) Analyze(ctx context.Context, pkgs map[PackageID]*metadata.Pac sort.Slice(enabledAnalyzers, func(i, j int) bool { return enabledAnalyzers[i].Name < enabledAnalyzers[j].Name }) - analyzers = nil // prevent accidental use enabledAnalyzers = requiredAnalyzers(enabledAnalyzers) @@ -431,14 +430,6 @@ func (s *Snapshot) Analyze(ctx context.Context, pkgs map[PackageID]*metadata.Pac return results, nil } -func analyzers(staticcheck bool) []*settings.Analyzer { - analyzers := slices.Collect(maps.Values(settings.DefaultAnalyzers)) - if staticcheck { - analyzers = slices.AppendSeq(analyzers, maps.Values(settings.StaticcheckAnalyzers)) - } - return analyzers -} - func (an *analysisNode) decrefPreds() { if an.unfinishedPreds.Add(-1) == 0 { an.summary.Actions = nil diff --git a/gopls/internal/cache/cache.go b/gopls/internal/cache/cache.go index 9f85846165f..9d6d64c9e71 100644 --- a/gopls/internal/cache/cache.go +++ b/gopls/internal/cache/cache.go @@ -105,7 +105,7 @@ type Cache struct { // our best knowledge of the current file system state. *memoizedFS - // modCache holds the + // modCache holds the shared goimports state for GOMODCACHE directories modCache *sharedModCache } diff --git a/gopls/internal/doc/api.json b/gopls/internal/doc/api.json index d622bb48251..8ef813d82bb 100644 --- a/gopls/internal/doc/api.json +++ b/gopls/internal/doc/api.json @@ -411,662 +411,1606 @@ "ValueType": "bool", "Keys": [ { - "Name": "\"appends\"", - "Doc": "check for missing values after append\n\nThis checker reports calls to append that pass\nno values to be appended to the slice.\n\n\ts := []string{\"a\", \"b\", \"c\"}\n\t_ = append(s)\n\nSuch calls are always no-ops and often indicate an\nunderlying mistake.", - "Default": "true", + "Name": "\"QF1001\"", + "Doc": "Apply De Morgan's law\n\nAvailable since\n 2021.1\n", + "Default": "false", "Status": "" }, { - "Name": "\"asmdecl\"", - "Doc": "report mismatches between assembly files and Go declarations", + "Name": "\"QF1002\"", + "Doc": "Convert untagged switch to tagged switch\n\nAn untagged switch that compares a single variable against a series of\nvalues can be replaced with a tagged switch.\n\nBefore:\n\n switch {\n case x == 1 || x == 2, x == 3:\n ...\n case x == 4:\n ...\n default:\n ...\n }\n\nAfter:\n\n switch x {\n case 1, 2, 3:\n ...\n case 4:\n ...\n default:\n ...\n }\n\nAvailable since\n 2021.1\n", "Default": "true", "Status": "" }, { - "Name": "\"assign\"", - "Doc": "check for useless assignments\n\nThis checker reports assignments of the form x = x or a[i] = a[i].\nThese are almost always useless, and even when they aren't they are\nusually a mistake.", + "Name": "\"QF1003\"", + "Doc": "Convert if/else-if chain to tagged switch\n\nA series of if/else-if checks comparing the same variable against\nvalues can be replaced with a tagged switch.\n\nBefore:\n\n if x == 1 || x == 2 {\n ...\n } else if x == 3 {\n ...\n } else {\n ...\n }\n\nAfter:\n\n switch x {\n case 1, 2:\n ...\n case 3:\n ...\n default:\n ...\n }\n\nAvailable since\n 2021.1\n", "Default": "true", "Status": "" }, { - "Name": "\"atomic\"", - "Doc": "check for common mistakes using the sync/atomic package\n\nThe atomic checker looks for assignment statements of the form:\n\n\tx = atomic.AddUint64(\u0026x, 1)\n\nwhich are not atomic.", + "Name": "\"QF1004\"", + "Doc": "Use strings.ReplaceAll instead of strings.Replace with n == -1\n\nAvailable since\n 2021.1\n", "Default": "true", "Status": "" }, { - "Name": "\"atomicalign\"", - "Doc": "check for non-64-bits-aligned arguments to sync/atomic functions", - "Default": "true", + "Name": "\"QF1005\"", + "Doc": "Expand call to math.Pow\n\nSome uses of math.Pow can be simplified to basic multiplication.\n\nBefore:\n\n math.Pow(x, 2)\n\nAfter:\n\n x * x\n\nAvailable since\n 2021.1\n", + "Default": "false", "Status": "" }, { - "Name": "\"bools\"", - "Doc": "check for common mistakes involving boolean operators", - "Default": "true", + "Name": "\"QF1006\"", + "Doc": "Lift if+break into loop condition\n\nBefore:\n\n for {\n if done {\n break\n }\n ...\n }\n\nAfter:\n\n for !done {\n ...\n }\n\nAvailable since\n 2021.1\n", + "Default": "false", "Status": "" }, { - "Name": "\"buildtag\"", - "Doc": "check //go:build and // +build directives", - "Default": "true", + "Name": "\"QF1007\"", + "Doc": "Merge conditional assignment into variable declaration\n\nBefore:\n\n x := false\n if someCondition {\n x = true\n }\n\nAfter:\n\n x := someCondition\n\nAvailable since\n 2021.1\n", + "Default": "false", "Status": "" }, { - "Name": "\"cgocall\"", - "Doc": "detect some violations of the cgo pointer passing rules\n\nCheck for invalid cgo pointer passing.\nThis looks for code that uses cgo to call C code passing values\nwhose types are almost always invalid according to the cgo pointer\nsharing rules.\nSpecifically, it warns about attempts to pass a Go chan, map, func,\nor slice to C, either directly, or via a pointer, array, or struct.", - "Default": "true", + "Name": "\"QF1008\"", + "Doc": "Omit embedded fields from selector expression\n\nAvailable since\n 2021.1\n", + "Default": "false", "Status": "" }, { - "Name": "\"composites\"", - "Doc": "check for unkeyed composite literals\n\nThis analyzer reports a diagnostic for composite literals of struct\ntypes imported from another package that do not use the field-keyed\nsyntax. Such literals are fragile because the addition of a new field\n(even if unexported) to the struct will cause compilation to fail.\n\nAs an example,\n\n\terr = \u0026net.DNSConfigError{err}\n\nshould be replaced by:\n\n\terr = \u0026net.DNSConfigError{Err: err}\n", + "Name": "\"QF1009\"", + "Doc": "Use time.Time.Equal instead of == operator\n\nAvailable since\n 2021.1\n", "Default": "true", "Status": "" }, { - "Name": "\"copylocks\"", - "Doc": "check for locks erroneously passed by value\n\nInadvertently copying a value containing a lock, such as sync.Mutex or\nsync.WaitGroup, may cause both copies to malfunction. Generally such\nvalues should be referred to through a pointer.", + "Name": "\"QF1010\"", + "Doc": "Convert slice of bytes to string when printing it\n\nAvailable since\n 2021.1\n", "Default": "true", "Status": "" }, { - "Name": "\"deepequalerrors\"", - "Doc": "check for calls of reflect.DeepEqual on error values\n\nThe deepequalerrors checker looks for calls of the form:\n\n reflect.DeepEqual(err1, err2)\n\nwhere err1 and err2 are errors. Using reflect.DeepEqual to compare\nerrors is discouraged.", - "Default": "true", + "Name": "\"QF1011\"", + "Doc": "Omit redundant type from variable declaration\n\nAvailable since\n 2021.1\n", + "Default": "false", "Status": "" }, { - "Name": "\"defers\"", - "Doc": "report common mistakes in defer statements\n\nThe defers analyzer reports a diagnostic when a defer statement would\nresult in a non-deferred call to time.Since, as experience has shown\nthat this is nearly always a mistake.\n\nFor example:\n\n\tstart := time.Now()\n\t...\n\tdefer recordLatency(time.Since(start)) // error: call to time.Since is not deferred\n\nThe correct code is:\n\n\tdefer func() { recordLatency(time.Since(start)) }()", + "Name": "\"QF1012\"", + "Doc": "Use fmt.Fprintf(x, ...) instead of x.Write(fmt.Sprintf(...))\n\nAvailable since\n 2022.1\n", "Default": "true", "Status": "" }, { - "Name": "\"deprecated\"", - "Doc": "check for use of deprecated identifiers\n\nThe deprecated analyzer looks for deprecated symbols and package\nimports.\n\nSee https://go.dev/wiki/Deprecated to learn about Go's convention\nfor documenting and signaling deprecated identifiers.", + "Name": "\"S1000\"", + "Doc": "Use plain channel send or receive instead of single-case select\n\nSelect statements with a single case can be replaced with a simple\nsend or receive.\n\nBefore:\n\n select {\n case x := \u003c-ch:\n fmt.Println(x)\n }\n\nAfter:\n\n x := \u003c-ch\n fmt.Println(x)\n\nAvailable since\n 2017.1\n", "Default": "true", "Status": "" }, { - "Name": "\"directive\"", - "Doc": "check Go toolchain directives such as //go:debug\n\nThis analyzer checks for problems with known Go toolchain directives\nin all Go source files in a package directory, even those excluded by\n//go:build constraints, and all non-Go source files too.\n\nFor //go:debug (see https://go.dev/doc/godebug), the analyzer checks\nthat the directives are placed only in Go source files, only above the\npackage comment, and only in package main or *_test.go files.\n\nSupport for other known directives may be added in the future.\n\nThis analyzer does not check //go:build, which is handled by the\nbuildtag analyzer.\n", + "Name": "\"S1001\"", + "Doc": "Replace for loop with call to copy\n\nUse copy() for copying elements from one slice to another. For\narrays of identical size, you can use simple assignment.\n\nBefore:\n\n for i, x := range src {\n dst[i] = x\n }\n\nAfter:\n\n copy(dst, src)\n\nAvailable since\n 2017.1\n", "Default": "true", "Status": "" }, { - "Name": "\"embed\"", - "Doc": "check //go:embed directive usage\n\nThis analyzer checks that the embed package is imported if //go:embed\ndirectives are present, providing a suggested fix to add the import if\nit is missing.\n\nThis analyzer also checks that //go:embed directives precede the\ndeclaration of a single variable.", - "Default": "true", + "Name": "\"S1002\"", + "Doc": "Omit comparison with boolean constant\n\nBefore:\n\n if x == true {}\n\nAfter:\n\n if x {}\n\nAvailable since\n 2017.1\n", + "Default": "false", "Status": "" }, { - "Name": "\"errorsas\"", - "Doc": "report passing non-pointer or non-error values to errors.As\n\nThe errorsas analysis reports calls to errors.As where the type\nof the second argument is not a pointer to a type implementing error.", + "Name": "\"S1003\"", + "Doc": "Replace call to strings.Index with strings.Contains\n\nBefore:\n\n if strings.Index(x, y) != -1 {}\n\nAfter:\n\n if strings.Contains(x, y) {}\n\nAvailable since\n 2017.1\n", "Default": "true", "Status": "" }, { - "Name": "\"fillreturns\"", - "Doc": "suggest fixes for errors due to an incorrect number of return values\n\nThis checker provides suggested fixes for type errors of the\ntype \"wrong number of return values (want %d, got %d)\". For example:\n\n\tfunc m() (int, string, *bool, error) {\n\t\treturn\n\t}\n\nwill turn into\n\n\tfunc m() (int, string, *bool, error) {\n\t\treturn 0, \"\", nil, nil\n\t}\n\nThis functionality is similar to https://github.com/sqs/goreturns.", + "Name": "\"S1004\"", + "Doc": "Replace call to bytes.Compare with bytes.Equal\n\nBefore:\n\n if bytes.Compare(x, y) == 0 {}\n\nAfter:\n\n if bytes.Equal(x, y) {}\n\nAvailable since\n 2017.1\n", "Default": "true", "Status": "" }, { - "Name": "\"framepointer\"", - "Doc": "report assembly that clobbers the frame pointer before saving it", - "Default": "true", + "Name": "\"S1005\"", + "Doc": "Drop unnecessary use of the blank identifier\n\nIn many cases, assigning to the blank identifier is unnecessary.\n\nBefore:\n\n for _ = range s {}\n x, _ = someMap[key]\n _ = \u003c-ch\n\nAfter:\n\n for range s{}\n x = someMap[key]\n \u003c-ch\n\nAvailable since\n 2017.1\n", + "Default": "false", "Status": "" }, { - "Name": "\"gofix\"", - "Doc": "apply fixes based on go:fix comment directives\n\nThe gofix analyzer inlines functions and constants that are marked for inlining.", - "Default": "true", + "Name": "\"S1006\"", + "Doc": "Use 'for { ... }' for infinite loops\n\nFor infinite loops, using for { ... } is the most idiomatic choice.\n\nAvailable since\n 2017.1\n", + "Default": "false", "Status": "" }, { - "Name": "\"hostport\"", - "Doc": "check format of addresses passed to net.Dial\n\nThis analyzer flags code that produce network address strings using\nfmt.Sprintf, as in this example:\n\n addr := fmt.Sprintf(\"%s:%d\", host, 12345) // \"will not work with IPv6\"\n ...\n conn, err := net.Dial(\"tcp\", addr) // \"when passed to dial here\"\n\nThe analyzer suggests a fix to use the correct approach, a call to\nnet.JoinHostPort:\n\n addr := net.JoinHostPort(host, \"12345\")\n ...\n conn, err := net.Dial(\"tcp\", addr)\n\nA similar diagnostic and fix are produced for a format string of \"%s:%s\".\n", + "Name": "\"S1007\"", + "Doc": "Simplify regular expression by using raw string literal\n\nRaw string literals use backticks instead of quotation marks and do not support\nany escape sequences. This means that the backslash can be used\nfreely, without the need of escaping.\n\nSince regular expressions have their own escape sequences, raw strings\ncan improve their readability.\n\nBefore:\n\n regexp.Compile(\"\\\\A(\\\\w+) profile: total \\\\d+\\\\n\\\\z\")\n\nAfter:\n\n regexp.Compile(`\\A(\\w+) profile: total \\d+\\n\\z`)\n\nAvailable since\n 2017.1\n", "Default": "true", "Status": "" }, { - "Name": "\"httpresponse\"", - "Doc": "check for mistakes using HTTP responses\n\nA common mistake when using the net/http package is to defer a function\ncall to close the http.Response Body before checking the error that\ndetermines whether the response is valid:\n\n\tresp, err := http.Head(url)\n\tdefer resp.Body.Close()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\t// (defer statement belongs here)\n\nThis checker helps uncover latent nil dereference bugs by reporting a\ndiagnostic for such mistakes.", - "Default": "true", + "Name": "\"S1008\"", + "Doc": "Simplify returning boolean expression\n\nBefore:\n\n if \u003cexpr\u003e {\n return true\n }\n return false\n\nAfter:\n\n return \u003cexpr\u003e\n\nAvailable since\n 2017.1\n", + "Default": "false", "Status": "" }, { - "Name": "\"ifaceassert\"", - "Doc": "detect impossible interface-to-interface type assertions\n\nThis checker flags type assertions v.(T) and corresponding type-switch cases\nin which the static type V of v is an interface that cannot possibly implement\nthe target interface T. This occurs when V and T contain methods with the same\nname but different signatures. Example:\n\n\tvar v interface {\n\t\tRead()\n\t}\n\t_ = v.(io.Reader)\n\nThe Read method in v has a different signature than the Read method in\nio.Reader, so this assertion cannot succeed.", + "Name": "\"S1009\"", + "Doc": "Omit redundant nil check on slices, maps, and channels\n\nThe len function is defined for all slices, maps, and\nchannels, even nil ones, which have a length of zero. It is not necessary to\ncheck for nil before checking that their length is not zero.\n\nBefore:\n\n if x != nil \u0026\u0026 len(x) != 0 {}\n\nAfter:\n\n if len(x) != 0 {}\n\nAvailable since\n 2017.1\n", "Default": "true", "Status": "" }, { - "Name": "\"infertypeargs\"", - "Doc": "check for unnecessary type arguments in call expressions\n\nExplicit type arguments may be omitted from call expressions if they can be\ninferred from function arguments, or from other type arguments:\n\n\tfunc f[T any](T) {}\n\t\n\tfunc _() {\n\t\tf[string](\"foo\") // string could be inferred\n\t}\n", + "Name": "\"S1010\"", + "Doc": "Omit default slice index\n\nWhen slicing, the second index defaults to the length of the value,\nmaking s[n:len(s)] and s[n:] equivalent.\n\nAvailable since\n 2017.1\n", "Default": "true", "Status": "" }, { - "Name": "\"loopclosure\"", - "Doc": "check references to loop variables from within nested functions\n\nThis analyzer reports places where a function literal references the\niteration variable of an enclosing loop, and the loop calls the function\nin such a way (e.g. with go or defer) that it may outlive the loop\niteration and possibly observe the wrong value of the variable.\n\nNote: An iteration variable can only outlive a loop iteration in Go versions \u003c=1.21.\nIn Go 1.22 and later, the loop variable lifetimes changed to create a new\niteration variable per loop iteration. (See go.dev/issue/60078.)\n\nIn this example, all the deferred functions run after the loop has\ncompleted, so all observe the final value of v [\u003cgo1.22].\n\n\tfor _, v := range list {\n\t defer func() {\n\t use(v) // incorrect\n\t }()\n\t}\n\nOne fix is to create a new variable for each iteration of the loop:\n\n\tfor _, v := range list {\n\t v := v // new var per iteration\n\t defer func() {\n\t use(v) // ok\n\t }()\n\t}\n\nAfter Go version 1.22, the previous two for loops are equivalent\nand both are correct.\n\nThe next example uses a go statement and has a similar problem [\u003cgo1.22].\nIn addition, it has a data race because the loop updates v\nconcurrent with the goroutines accessing it.\n\n\tfor _, v := range elem {\n\t go func() {\n\t use(v) // incorrect, and a data race\n\t }()\n\t}\n\nA fix is the same as before. The checker also reports problems\nin goroutines started by golang.org/x/sync/errgroup.Group.\nA hard-to-spot variant of this form is common in parallel tests:\n\n\tfunc Test(t *testing.T) {\n\t for _, test := range tests {\n\t t.Run(test.name, func(t *testing.T) {\n\t t.Parallel()\n\t use(test) // incorrect, and a data race\n\t })\n\t }\n\t}\n\nThe t.Parallel() call causes the rest of the function to execute\nconcurrent with the loop [\u003cgo1.22].\n\nThe analyzer reports references only in the last statement,\nas it is not deep enough to understand the effects of subsequent\nstatements that might render the reference benign.\n(\"Last statement\" is defined recursively in compound\nstatements such as if, switch, and select.)\n\nSee: https://golang.org/doc/go_faq.html#closures_and_goroutines", - "Default": "true", + "Name": "\"S1011\"", + "Doc": "Use a single append to concatenate two slices\n\nBefore:\n\n for _, e := range y {\n x = append(x, e)\n }\n \n for i := range y {\n x = append(x, y[i])\n }\n \n for i := range y {\n v := y[i]\n x = append(x, v)\n }\n\nAfter:\n\n x = append(x, y...)\n x = append(x, y...)\n x = append(x, y...)\n\nAvailable since\n 2017.1\n", + "Default": "false", "Status": "" }, { - "Name": "\"lostcancel\"", - "Doc": "check cancel func returned by context.WithCancel is called\n\nThe cancellation function returned by context.WithCancel, WithTimeout,\nWithDeadline and variants such as WithCancelCause must be called,\nor the new context will remain live until its parent context is cancelled.\n(The background context is never cancelled.)", + "Name": "\"S1012\"", + "Doc": "Replace time.Now().Sub(x) with time.Since(x)\n\nThe time.Since helper has the same effect as using time.Now().Sub(x)\nbut is easier to read.\n\nBefore:\n\n time.Now().Sub(x)\n\nAfter:\n\n time.Since(x)\n\nAvailable since\n 2017.1\n", "Default": "true", "Status": "" }, { - "Name": "\"modernize\"", - "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go and its standard\nlibrary.\n\nEach diagnostic provides a fix. Our intent is that these fixes may\nbe safely applied en masse without changing the behavior of your\nprogram. In some cases the suggested fixes are imperfect and may\nlead to (for example) unused imports or unused local variables,\ncausing build breakage. However, these problems are generally\ntrivial to fix. We regard any modernizer whose fix changes program\nbehavior to have a serious bug and will endeavor to fix it.\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./...\n\n(Do not use \"go get -tool\" to add gopls as a dependency of your\nmodule; gopls commands must be built from their release branch.)\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.\n\nChanges produced by this tool should be reviewed as usual before\nbeing merged. In some cases, a loop may be replaced by a simple\nfunction call, causing comments within the loop to be discarded.\nHuman judgment may be required to avoid losing comments of value.\n\nEach diagnostic reported by modernize has a specific category. (The\ncategories are listed below.) Diagnostics in some categories, such\nas \"efaceany\" (which replaces \"interface{}\" with \"any\" where it is\nsafe to do so) are particularly numerous. It may ease the burden of\ncode review to apply fixes in two passes, the first change\nconsisting only of fixes of category \"efaceany\", the second\nconsisting of all others. This can be achieved using the -category flag:\n\n\t$ modernize -category=efaceany -fix -test ./...\n\t$ modernize -category=-efaceany -fix -test ./...\n\nCategories of modernize diagnostic:\n\n - forvar: remove x := x variable declarations made unnecessary by the new semantics of loops in go1.22.\n\n - slicescontains: replace 'for i, elem := range s { if elem == needle { ...; break }'\n by a call to slices.Contains, added in go1.21.\n\n - minmax: replace an if/else conditional assignment by a call to\n the built-in min or max functions added in go1.21.\n\n - sortslice: replace sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21.\n\n - efaceany: replace interface{} by the 'any' type added in go1.18.\n\n - slicesclone: replace append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21.\n\n - mapsloop: replace a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions from\n the maps package, added in go1.21.\n\n - fmtappendf: replace []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19.\n\n - testingcontext: replace uses of context.WithCancel in tests\n with t.Context, added in go1.24.\n\n - omitzero: replace omitempty by omitzero on structs, added in go1.24.\n\n - bloop: replace \"for i := range b.N\" or \"for range b.N\" in a\n benchmark with \"for b.Loop()\", and remove any preceding calls\n to b.StopTimer, b.StartTimer, and b.ResetTimer.\n\n - slicesdelete: replace append(s[:i], s[i+1]...) by\n slices.Delete(s, i, i+1), added in go1.21.\n\n - rangeint: replace a 3-clause \"for i := 0; i \u003c n; i++\" loop by\n \"for i := range n\", added in go1.22.\n\n - stringsseq: replace Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq.\n\n - stringscutprefix: replace some uses of HasPrefix followed by TrimPrefix with CutPrefix,\n added to the strings package in go1.20.\n\n - waitgroup: replace old complex usages of sync.WaitGroup by less complex WaitGroup.Go method in go1.25.", + "Name": "\"S1016\"", + "Doc": "Use a type conversion instead of manually copying struct fields\n\nTwo struct types with identical fields can be converted between each\nother. In older versions of Go, the fields had to have identical\nstruct tags. Since Go 1.8, however, struct tags are ignored during\nconversions. It is thus not necessary to manually copy every field\nindividually.\n\nBefore:\n\n var x T1\n y := T2{\n Field1: x.Field1,\n Field2: x.Field2,\n }\n\nAfter:\n\n var x T1\n y := T2(x)\n\nAvailable since\n 2017.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"S1017\"", + "Doc": "Replace manual trimming with strings.TrimPrefix\n\nInstead of using strings.HasPrefix and manual slicing, use the\nstrings.TrimPrefix function. If the string doesn't start with the\nprefix, the original string will be returned. Using strings.TrimPrefix\nreduces complexity, and avoids common bugs, such as off-by-one\nmistakes.\n\nBefore:\n\n if strings.HasPrefix(str, prefix) {\n str = str[len(prefix):]\n }\n\nAfter:\n\n str = strings.TrimPrefix(str, prefix)\n\nAvailable since\n 2017.1\n", "Default": "true", "Status": "" }, { - "Name": "\"nilfunc\"", - "Doc": "check for useless comparisons between functions and nil\n\nA useless comparison is one like f == nil as opposed to f() == nil.", + "Name": "\"S1018\"", + "Doc": "Use 'copy' for sliding elements\n\ncopy() permits using the same source and destination slice, even with\noverlapping ranges. This makes it ideal for sliding elements in a\nslice.\n\nBefore:\n\n for i := 0; i \u003c n; i++ {\n bs[i] = bs[offset+i]\n }\n\nAfter:\n\n copy(bs[:n], bs[offset:])\n\nAvailable since\n 2017.1\n", "Default": "true", "Status": "" }, { - "Name": "\"nilness\"", - "Doc": "check for redundant or impossible nil comparisons\n\nThe nilness checker inspects the control-flow graph of each function in\na package and reports nil pointer dereferences, degenerate nil\npointers, and panics with nil values. A degenerate comparison is of the form\nx==nil or x!=nil where x is statically known to be nil or non-nil. These are\noften a mistake, especially in control flow related to errors. Panics with nil\nvalues are checked because they are not detectable by\n\n\tif r := recover(); r != nil {\n\nThis check reports conditions such as:\n\n\tif f == nil { // impossible condition (f is a function)\n\t}\n\nand:\n\n\tp := \u0026v\n\t...\n\tif p != nil { // tautological condition\n\t}\n\nand:\n\n\tif p == nil {\n\t\tprint(*p) // nil dereference\n\t}\n\nand:\n\n\tif p == nil {\n\t\tpanic(p)\n\t}\n\nSometimes the control flow may be quite complex, making bugs hard\nto spot. In the example below, the err.Error expression is\nguaranteed to panic because, after the first return, err must be\nnil. The intervening loop is just a distraction.\n\n\t...\n\terr := g.Wait()\n\tif err != nil {\n\t\treturn err\n\t}\n\tpartialSuccess := false\n\tfor _, err := range errs {\n\t\tif err == nil {\n\t\t\tpartialSuccess = true\n\t\t\tbreak\n\t\t}\n\t}\n\tif partialSuccess {\n\t\treportStatus(StatusMessage{\n\t\t\tCode: code.ERROR,\n\t\t\tDetail: err.Error(), // \"nil dereference in dynamic method call\"\n\t\t})\n\t\treturn nil\n\t}\n\n...", + "Name": "\"S1019\"", + "Doc": "Simplify 'make' call by omitting redundant arguments\n\nThe 'make' function has default values for the length and capacity\narguments. For channels, the length defaults to zero, and for slices,\nthe capacity defaults to the length.\n\nAvailable since\n 2017.1\n", "Default": "true", "Status": "" }, { - "Name": "\"nonewvars\"", - "Doc": "suggested fixes for \"no new vars on left side of :=\"\n\nThis checker provides suggested fixes for type errors of the\ntype \"no new vars on left side of :=\". For example:\n\n\tz := 1\n\tz := 2\n\nwill turn into\n\n\tz := 1\n\tz = 2", + "Name": "\"S1020\"", + "Doc": "Omit redundant nil check in type assertion\n\nBefore:\n\n if _, ok := i.(T); ok \u0026\u0026 i != nil {}\n\nAfter:\n\n if _, ok := i.(T); ok {}\n\nAvailable since\n 2017.1\n", "Default": "true", "Status": "" }, { - "Name": "\"noresultvalues\"", - "Doc": "suggested fixes for unexpected return values\n\nThis checker provides suggested fixes for type errors of the\ntype \"no result values expected\" or \"too many return values\".\nFor example:\n\n\tfunc z() { return nil }\n\nwill turn into\n\n\tfunc z() { return }", + "Name": "\"S1021\"", + "Doc": "Merge variable declaration and assignment\n\nBefore:\n\n var x uint\n x = 1\n\nAfter:\n\n var x uint = 1\n\nAvailable since\n 2017.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"S1023\"", + "Doc": "Omit redundant control flow\n\nFunctions that have no return value do not need a return statement as\nthe final statement of the function.\n\nSwitches in Go do not have automatic fallthrough, unlike languages\nlike C. It is not necessary to have a break statement as the final\nstatement in a case block.\n\nAvailable since\n 2017.1\n", "Default": "true", "Status": "" }, { - "Name": "\"printf\"", - "Doc": "check consistency of Printf format strings and arguments\n\nThe check applies to calls of the formatting functions such as\n[fmt.Printf] and [fmt.Sprintf], as well as any detected wrappers of\nthose functions such as [log.Printf]. It reports a variety of\nmistakes such as syntax errors in the format string and mismatches\n(of number and type) between the verbs and their arguments.\n\nSee the documentation of the fmt package for the complete set of\nformat operators and their operand types.", + "Name": "\"S1024\"", + "Doc": "Replace x.Sub(time.Now()) with time.Until(x)\n\nThe time.Until helper has the same effect as using x.Sub(time.Now())\nbut is easier to read.\n\nBefore:\n\n x.Sub(time.Now())\n\nAfter:\n\n time.Until(x)\n\nAvailable since\n 2017.1\n", "Default": "true", "Status": "" }, { - "Name": "\"shadow\"", - "Doc": "check for possible unintended shadowing of variables\n\nThis analyzer check for shadowed variables.\nA shadowed variable is a variable declared in an inner scope\nwith the same name and type as a variable in an outer scope,\nand where the outer variable is mentioned after the inner one\nis declared.\n\n(This definition can be refined; the module generates too many\nfalse positives and is not yet enabled by default.)\n\nFor example:\n\n\tfunc BadRead(f *os.File, buf []byte) error {\n\t\tvar err error\n\t\tfor {\n\t\t\tn, err := f.Read(buf) // shadows the function variable 'err'\n\t\t\tif err != nil {\n\t\t\t\tbreak // causes return of wrong value\n\t\t\t}\n\t\t\tfoo(buf)\n\t\t}\n\t\treturn err\n\t}", + "Name": "\"S1025\"", + "Doc": "Don't use fmt.Sprintf(\"%s\", x) unnecessarily\n\nIn many instances, there are easier and more efficient ways of getting\na value's string representation. Whenever a value's underlying type is\na string already, or the type has a String method, they should be used\ndirectly.\n\nGiven the following shared definitions\n\n type T1 string\n type T2 int\n\n func (T2) String() string { return \"Hello, world\" }\n\n var x string\n var y T1\n var z T2\n\nwe can simplify\n\n fmt.Sprintf(\"%s\", x)\n fmt.Sprintf(\"%s\", y)\n fmt.Sprintf(\"%s\", z)\n\nto\n\n x\n string(y)\n z.String()\n\nAvailable since\n 2017.1\n", "Default": "false", "Status": "" }, { - "Name": "\"shift\"", - "Doc": "check for shifts that equal or exceed the width of the integer", + "Name": "\"S1028\"", + "Doc": "Simplify error construction with fmt.Errorf\n\nBefore:\n\n errors.New(fmt.Sprintf(...))\n\nAfter:\n\n fmt.Errorf(...)\n\nAvailable since\n 2017.1\n", "Default": "true", "Status": "" }, { - "Name": "\"sigchanyzer\"", - "Doc": "check for unbuffered channel of os.Signal\n\nThis checker reports call expression of the form\n\n\tsignal.Notify(c \u003c-chan os.Signal, sig ...os.Signal),\n\nwhere c is an unbuffered channel, which can be at risk of missing the signal.", - "Default": "true", + "Name": "\"S1029\"", + "Doc": "Range over the string directly\n\nRanging over a string will yield byte offsets and runes. If the offset\nisn't used, this is functionally equivalent to converting the string\nto a slice of runes and ranging over that. Ranging directly over the\nstring will be more performant, however, as it avoids allocating a new\nslice, the size of which depends on the length of the string.\n\nBefore:\n\n for _, r := range []rune(s) {}\n\nAfter:\n\n for _, r := range s {}\n\nAvailable since\n 2017.1\n", + "Default": "false", "Status": "" }, { - "Name": "\"simplifycompositelit\"", - "Doc": "check for composite literal simplifications\n\nAn array, slice, or map composite literal of the form:\n\n\t[]T{T{}, T{}}\n\nwill be simplified to:\n\n\t[]T{{}, {}}\n\nThis is one of the simplifications that \"gofmt -s\" applies.\n\nThis analyzer ignores generated code.", + "Name": "\"S1030\"", + "Doc": "Use bytes.Buffer.String or bytes.Buffer.Bytes\n\nbytes.Buffer has both a String and a Bytes method. It is almost never\nnecessary to use string(buf.Bytes()) or []byte(buf.String()) – simply\nuse the other method.\n\nThe only exception to this are map lookups. Due to a compiler optimization,\nm[string(buf.Bytes())] is more efficient than m[buf.String()].\n\nAvailable since\n 2017.1\n", "Default": "true", "Status": "" }, { - "Name": "\"simplifyrange\"", - "Doc": "check for range statement simplifications\n\nA range of the form:\n\n\tfor x, _ = range v {...}\n\nwill be simplified to:\n\n\tfor x = range v {...}\n\nA range of the form:\n\n\tfor _ = range v {...}\n\nwill be simplified to:\n\n\tfor range v {...}\n\nThis is one of the simplifications that \"gofmt -s\" applies.\n\nThis analyzer ignores generated code.", + "Name": "\"S1031\"", + "Doc": "Omit redundant nil check around loop\n\nYou can use range on nil slices and maps, the loop will simply never\nexecute. This makes an additional nil check around the loop\nunnecessary.\n\nBefore:\n\n if s != nil {\n for _, x := range s {\n ...\n }\n }\n\nAfter:\n\n for _, x := range s {\n ...\n }\n\nAvailable since\n 2017.1\n", "Default": "true", "Status": "" }, { - "Name": "\"simplifyslice\"", - "Doc": "check for slice simplifications\n\nA slice expression of the form:\n\n\ts[a:len(s)]\n\nwill be simplified to:\n\n\ts[a:]\n\nThis is one of the simplifications that \"gofmt -s\" applies.\n\nThis analyzer ignores generated code.", + "Name": "\"S1032\"", + "Doc": "Use sort.Ints(x), sort.Float64s(x), and sort.Strings(x)\n\nThe sort.Ints, sort.Float64s and sort.Strings functions are easier to\nread than sort.Sort(sort.IntSlice(x)), sort.Sort(sort.Float64Slice(x))\nand sort.Sort(sort.StringSlice(x)).\n\nBefore:\n\n sort.Sort(sort.StringSlice(x))\n\nAfter:\n\n sort.Strings(x)\n\nAvailable since\n 2019.1\n", "Default": "true", "Status": "" }, { - "Name": "\"slog\"", - "Doc": "check for invalid structured logging calls\n\nThe slog checker looks for calls to functions from the log/slog\npackage that take alternating key-value pairs. It reports calls\nwhere an argument in a key position is neither a string nor a\nslog.Attr, and where a final key is missing its value.\nFor example,it would report\n\n\tslog.Warn(\"message\", 11, \"k\") // slog.Warn arg \"11\" should be a string or a slog.Attr\n\nand\n\n\tslog.Info(\"message\", \"k1\", v1, \"k2\") // call to slog.Info missing a final value", + "Name": "\"S1033\"", + "Doc": "Unnecessary guard around call to 'delete'\n\nCalling delete on a nil map is a no-op.\n\nAvailable since\n 2019.2\n", "Default": "true", "Status": "" }, { - "Name": "\"sortslice\"", - "Doc": "check the argument type of sort.Slice\n\nsort.Slice requires an argument of a slice type. Check that\nthe interface{} value passed to sort.Slice is actually a slice.", + "Name": "\"S1034\"", + "Doc": "Use result of type assertion to simplify cases\n\nAvailable since\n 2019.2\n", "Default": "true", "Status": "" }, { - "Name": "\"stdmethods\"", - "Doc": "check signature of methods of well-known interfaces\n\nSometimes a type may be intended to satisfy an interface but may fail to\ndo so because of a mistake in its method signature.\nFor example, the result of this WriteTo method should be (int64, error),\nnot error, to satisfy io.WriterTo:\n\n\ttype myWriterTo struct{...}\n\tfunc (myWriterTo) WriteTo(w io.Writer) error { ... }\n\nThis check ensures that each method whose name matches one of several\nwell-known interface methods from the standard library has the correct\nsignature for that interface.\n\nChecked method names include:\n\n\tFormat GobEncode GobDecode MarshalJSON MarshalXML\n\tPeek ReadByte ReadFrom ReadRune Scan Seek\n\tUnmarshalJSON UnreadByte UnreadRune WriteByte\n\tWriteTo", + "Name": "\"S1035\"", + "Doc": "Redundant call to net/http.CanonicalHeaderKey in method call on net/http.Header\n\nThe methods on net/http.Header, namely Add, Del, Get\nand Set, already canonicalize the given header name.\n\nAvailable since\n 2020.1\n", "Default": "true", "Status": "" }, { - "Name": "\"stdversion\"", - "Doc": "report uses of too-new standard library symbols\n\nThe stdversion analyzer reports references to symbols in the standard\nlibrary that were introduced by a Go release higher than the one in\nforce in the referring file. (Recall that the file's Go version is\ndefined by the 'go' directive its module's go.mod file, or by a\n\"//go:build go1.X\" build tag at the top of the file.)\n\nThe analyzer does not report a diagnostic for a reference to a \"too\nnew\" field or method of a type that is itself \"too new\", as this may\nhave false positives, for example if fields or methods are accessed\nthrough a type alias that is guarded by a Go version constraint.\n", + "Name": "\"S1036\"", + "Doc": "Unnecessary guard around map access\n\nWhen accessing a map key that doesn't exist yet, one receives a zero\nvalue. Often, the zero value is a suitable value, for example when\nusing append or doing integer math.\n\nThe following\n\n if _, ok := m[\"foo\"]; ok {\n m[\"foo\"] = append(m[\"foo\"], \"bar\")\n } else {\n m[\"foo\"] = []string{\"bar\"}\n }\n\ncan be simplified to\n\n m[\"foo\"] = append(m[\"foo\"], \"bar\")\n\nand\n\n if _, ok := m2[\"k\"]; ok {\n m2[\"k\"] += 4\n } else {\n m2[\"k\"] = 4\n }\n\ncan be simplified to\n\n m[\"k\"] += 4\n\nAvailable since\n 2020.1\n", "Default": "true", "Status": "" }, { - "Name": "\"stringintconv\"", - "Doc": "check for string(int) conversions\n\nThis checker flags conversions of the form string(x) where x is an integer\n(but not byte or rune) type. Such conversions are discouraged because they\nreturn the UTF-8 representation of the Unicode code point x, and not a decimal\nstring representation of x as one might expect. Furthermore, if x denotes an\ninvalid code point, the conversion cannot be statically rejected.\n\nFor conversions that intend on using the code point, consider replacing them\nwith string(rune(x)). Otherwise, strconv.Itoa and its equivalents return the\nstring representation of the value in the desired base.", + "Name": "\"S1037\"", + "Doc": "Elaborate way of sleeping\n\nUsing a select statement with a single case receiving\nfrom the result of time.After is a very elaborate way of sleeping that\ncan much simpler be expressed with a simple call to time.Sleep.\n\nAvailable since\n 2020.1\n", "Default": "true", "Status": "" }, { - "Name": "\"structtag\"", - "Doc": "check that struct field tags conform to reflect.StructTag.Get\n\nAlso report certain struct tags (json, xml) used with unexported fields.", + "Name": "\"S1038\"", + "Doc": "Unnecessarily complex way of printing formatted string\n\nInstead of using fmt.Print(fmt.Sprintf(...)), one can use fmt.Printf(...).\n\nAvailable since\n 2020.1\n", "Default": "true", "Status": "" }, { - "Name": "\"testinggoroutine\"", - "Doc": "report calls to (*testing.T).Fatal from goroutines started by a test\n\nFunctions that abruptly terminate a test, such as the Fatal, Fatalf, FailNow, and\nSkip{,f,Now} methods of *testing.T, must be called from the test goroutine itself.\nThis checker detects calls to these functions that occur within a goroutine\nstarted by the test. For example:\n\n\tfunc TestFoo(t *testing.T) {\n\t go func() {\n\t t.Fatal(\"oops\") // error: (*T).Fatal called from non-test goroutine\n\t }()\n\t}", + "Name": "\"S1039\"", + "Doc": "Unnecessary use of fmt.Sprint\n\nCalling fmt.Sprint with a single string argument is unnecessary\nand identical to using the string directly.\n\nAvailable since\n 2020.1\n", "Default": "true", "Status": "" }, { - "Name": "\"tests\"", - "Doc": "check for common mistaken usages of tests and examples\n\nThe tests checker walks Test, Benchmark, Fuzzing and Example functions checking\nmalformed names, wrong signatures and examples documenting non-existent\nidentifiers.\n\nPlease see the documentation for package testing in golang.org/pkg/testing\nfor the conventions that are enforced for Tests, Benchmarks, and Examples.", + "Name": "\"S1040\"", + "Doc": "Type assertion to current type\n\nThe type assertion x.(SomeInterface), when x already has type\nSomeInterface, can only fail if x is nil. Usually, this is\nleft-over code from when x had a different type and you can safely\ndelete the type assertion. If you want to check that x is not nil,\nconsider being explicit and using an actual if x == nil comparison\ninstead of relying on the type assertion panicking.\n\nAvailable since\n 2021.1\n", "Default": "true", "Status": "" }, { - "Name": "\"timeformat\"", - "Doc": "check for calls of (time.Time).Format or time.Parse with 2006-02-01\n\nThe timeformat checker looks for time formats with the 2006-02-01 (yyyy-dd-mm)\nformat. Internationally, \"yyyy-dd-mm\" does not occur in common calendar date\nstandards, and so it is more likely that 2006-01-02 (yyyy-mm-dd) was intended.", - "Default": "true", + "Name": "\"SA1000\"", + "Doc": "Invalid regular expression\n\nAvailable since\n 2017.1\n", + "Default": "false", "Status": "" }, { - "Name": "\"unmarshal\"", - "Doc": "report passing non-pointer or non-interface values to unmarshal\n\nThe unmarshal analysis reports calls to functions such as json.Unmarshal\nin which the argument type is not a pointer or an interface.", + "Name": "\"SA1001\"", + "Doc": "Invalid template\n\nAvailable since\n 2017.1\n", "Default": "true", "Status": "" }, { - "Name": "\"unreachable\"", - "Doc": "check for unreachable code\n\nThe unreachable analyzer finds statements that execution can never reach\nbecause they are preceded by a return statement, a call to panic, an\ninfinite loop, or similar constructs.", - "Default": "true", + "Name": "\"SA1002\"", + "Doc": "Invalid format in time.Parse\n\nAvailable since\n 2017.1\n", + "Default": "false", "Status": "" }, { - "Name": "\"unsafeptr\"", - "Doc": "check for invalid conversions of uintptr to unsafe.Pointer\n\nThe unsafeptr analyzer reports likely incorrect uses of unsafe.Pointer\nto convert integers to pointers. A conversion from uintptr to\nunsafe.Pointer is invalid if it implies that there is a uintptr-typed\nword in memory that holds a pointer value, because that word will be\ninvisible to stack copying and to the garbage collector.", - "Default": "true", + "Name": "\"SA1003\"", + "Doc": "Unsupported argument to functions in encoding/binary\n\nThe encoding/binary package can only serialize types with known sizes.\nThis precludes the use of the int and uint types, as their sizes\ndiffer on different architectures. Furthermore, it doesn't support\nserializing maps, channels, strings, or functions.\n\nBefore Go 1.8, bool wasn't supported, either.\n\nAvailable since\n 2017.1\n", + "Default": "false", "Status": "" }, { - "Name": "\"unusedfunc\"", - "Doc": "check for unused functions and methods\n\nThe unusedfunc analyzer reports functions and methods that are\nnever referenced outside of their own declaration.\n\nA function is considered unused if it is unexported and not\nreferenced (except within its own declaration).\n\nA method is considered unused if it is unexported, not referenced\n(except within its own declaration), and its name does not match\nthat of any method of an interface type declared within the same\npackage.\n\nThe tool may report false positives in some situations, for\nexample:\n\n - For a declaration of an unexported function that is referenced\n from another package using the go:linkname mechanism, if the\n declaration's doc comment does not also have a go:linkname\n comment.\n\n (Such code is in any case strongly discouraged: linkname\n annotations, if they must be used at all, should be used on both\n the declaration and the alias.)\n\n - For compiler intrinsics in the \"runtime\" package that, though\n never referenced, are known to the compiler and are called\n indirectly by compiled object code.\n\n - For functions called only from assembly.\n\n - For functions called only from files whose build tags are not\n selected in the current build configuration.\n\nSee https://github.com/golang/go/issues/71686 for discussion of\nthese limitations.\n\nThe unusedfunc algorithm is not as precise as the\ngolang.org/x/tools/cmd/deadcode tool, but it has the advantage that\nit runs within the modular analysis framework, enabling near\nreal-time feedback within gopls.", + "Name": "\"SA1004\"", + "Doc": "Suspiciously small untyped constant in time.Sleep\n\nThe time.Sleep function takes a time.Duration as its only argument.\nDurations are expressed in nanoseconds. Thus, calling time.Sleep(1)\nwill sleep for 1 nanosecond. This is a common source of bugs, as sleep\nfunctions in other languages often accept seconds or milliseconds.\n\nThe time package provides constants such as time.Second to express\nlarge durations. These can be combined with arithmetic to express\narbitrary durations, for example 5 * time.Second for 5 seconds.\n\nIf you truly meant to sleep for a tiny amount of time, use\nn * time.Nanosecond to signal to Staticcheck that you did mean to sleep\nfor some amount of nanoseconds.\n\nAvailable since\n 2017.1\n", "Default": "true", "Status": "" }, { - "Name": "\"unusedparams\"", - "Doc": "check for unused parameters of functions\n\nThe unusedparams analyzer checks functions to see if there are\nany parameters that are not being used.\n\nTo ensure soundness, it ignores:\n - \"address-taken\" functions, that is, functions that are used as\n a value rather than being called directly; their signatures may\n be required to conform to a func type.\n - exported functions or methods, since they may be address-taken\n in another package.\n - unexported methods whose name matches an interface method\n declared in the same package, since the method's signature\n may be required to conform to the interface type.\n - functions with empty bodies, or containing just a call to panic.\n - parameters that are unnamed, or named \"_\", the blank identifier.\n\nThe analyzer suggests a fix of replacing the parameter name by \"_\",\nbut in such cases a deeper fix can be obtained by invoking the\n\"Refactor: remove unused parameter\" code action, which will\neliminate the parameter entirely, along with all corresponding\narguments at call sites, while taking care to preserve any side\neffects in the argument expressions; see\nhttps://github.com/golang/tools/releases/tag/gopls%2Fv0.14.\n\nThis analyzer ignores generated code.", + "Name": "\"SA1005\"", + "Doc": "Invalid first argument to exec.Command\n\nos/exec runs programs directly (using variants of the fork and exec\nsystem calls on Unix systems). This shouldn't be confused with running\na command in a shell. The shell will allow for features such as input\nredirection, pipes, and general scripting. The shell is also\nresponsible for splitting the user's input into a program name and its\narguments. For example, the equivalent to\n\n ls / /tmp\n\nwould be\n\n exec.Command(\"ls\", \"/\", \"/tmp\")\n\nIf you want to run a command in a shell, consider using something like\nthe following – but be aware that not all systems, particularly\nWindows, will have a /bin/sh program:\n\n exec.Command(\"/bin/sh\", \"-c\", \"ls | grep Awesome\")\n\nAvailable since\n 2017.1\n", "Default": "true", "Status": "" }, { - "Name": "\"unusedresult\"", - "Doc": "check for unused results of calls to some functions\n\nSome functions like fmt.Errorf return a result and have no side\neffects, so it is always a mistake to discard the result. Other\nfunctions may return an error that must not be ignored, or a cleanup\noperation that must be called. This analyzer reports calls to\nfunctions like these when the result of the call is ignored.\n\nThe set of functions may be controlled using flags.", - "Default": "true", + "Name": "\"SA1007\"", + "Doc": "Invalid URL in net/url.Parse\n\nAvailable since\n 2017.1\n", + "Default": "false", "Status": "" }, { - "Name": "\"unusedvariable\"", - "Doc": "check for unused variables and suggest fixes", + "Name": "\"SA1008\"", + "Doc": "Non-canonical key in http.Header map\n\nKeys in http.Header maps are canonical, meaning they follow a specific\ncombination of uppercase and lowercase letters. Methods such as\nhttp.Header.Add and http.Header.Del convert inputs into this canonical\nform before manipulating the map.\n\nWhen manipulating http.Header maps directly, as opposed to using the\nprovided methods, care should be taken to stick to canonical form in\norder to avoid inconsistencies. The following piece of code\ndemonstrates one such inconsistency:\n\n h := http.Header{}\n h[\"etag\"] = []string{\"1234\"}\n h.Add(\"etag\", \"5678\")\n fmt.Println(h)\n\n // Output:\n // map[Etag:[5678] etag:[1234]]\n\nThe easiest way of obtaining the canonical form of a key is to use\nhttp.CanonicalHeaderKey.\n\nAvailable since\n 2017.1\n", "Default": "true", "Status": "" }, { - "Name": "\"unusedwrite\"", - "Doc": "checks for unused writes\n\nThe analyzer reports instances of writes to struct fields and\narrays that are never read. Specifically, when a struct object\nor an array is copied, its elements are copied implicitly by\nthe compiler, and any element write to this copy does nothing\nwith the original object.\n\nFor example:\n\n\ttype T struct { x int }\n\n\tfunc f(input []T) {\n\t\tfor i, v := range input { // v is a copy\n\t\t\tv.x = i // unused write to field x\n\t\t}\n\t}\n\nAnother example is about non-pointer receiver:\n\n\ttype T struct { x int }\n\n\tfunc (t T) f() { // t is a copy\n\t\tt.x = i // unused write to field x\n\t}", - "Default": "true", + "Name": "\"SA1010\"", + "Doc": "(*regexp.Regexp).FindAll called with n == 0, which will always return zero results\n\nIf n \u003e= 0, the function returns at most n matches/submatches. To\nreturn all results, specify a negative number.\n\nAvailable since\n 2017.1\n", + "Default": "false", "Status": "" }, { - "Name": "\"waitgroup\"", - "Doc": "check for misuses of sync.WaitGroup\n\nThis analyzer detects mistaken calls to the (*sync.WaitGroup).Add\nmethod from inside a new goroutine, causing Add to race with Wait:\n\n\t// WRONG\n\tvar wg sync.WaitGroup\n\tgo func() {\n\t wg.Add(1) // \"WaitGroup.Add called from inside new goroutine\"\n\t defer wg.Done()\n\t ...\n\t}()\n\twg.Wait() // (may return prematurely before new goroutine starts)\n\nThe correct code calls Add before starting the goroutine:\n\n\t// RIGHT\n\tvar wg sync.WaitGroup\n\twg.Add(1)\n\tgo func() {\n\t\tdefer wg.Done()\n\t\t...\n\t}()\n\twg.Wait()", - "Default": "true", + "Name": "\"SA1011\"", + "Doc": "Various methods in the 'strings' package expect valid UTF-8, but invalid input is provided\n\nAvailable since\n 2017.1\n", + "Default": "false", "Status": "" }, { - "Name": "\"yield\"", - "Doc": "report calls to yield where the result is ignored\n\nAfter a yield function returns false, the caller should not call\nthe yield function again; generally the iterator should return\npromptly.\n\nThis example fails to check the result of the call to yield,\ncausing this analyzer to report a diagnostic:\n\n\tyield(1) // yield may be called again (on L2) after returning false\n\tyield(2)\n\nThe corrected code is either this:\n\n\tif yield(1) { yield(2) }\n\nor simply:\n\n\t_ = yield(1) \u0026\u0026 yield(2)\n\nIt is not always a mistake to ignore the result of yield.\nFor example, this is a valid single-element iterator:\n\n\tyield(1) // ok to ignore result\n\treturn\n\nIt is only a mistake when the yield call that returned false may be\nfollowed by another call.", + "Name": "\"SA1012\"", + "Doc": "A nil context.Context is being passed to a function, consider using context.TODO instead\n\nAvailable since\n 2017.1\n", "Default": "true", "Status": "" - } - ] - }, - "EnumValues": null, - "Default": "{}", - "Status": "", - "Hierarchy": "ui.diagnostic", - "DeprecationMessage": "" - }, - { - "Name": "staticcheck", - "Type": "bool", - "Doc": "staticcheck enables additional analyses from staticcheck.io.\nThese analyses are documented on\n[Staticcheck's website](https://staticcheck.io/docs/checks/).\n", - "EnumKeys": { - "ValueType": "", - "Keys": null - }, - "EnumValues": null, - "Default": "false", - "Status": "experimental", - "Hierarchy": "ui.diagnostic", - "DeprecationMessage": "" - }, - { - "Name": "annotations", - "Type": "map[enum]bool", - "Doc": "annotations specifies the various kinds of compiler\noptimization details that should be reported as diagnostics\nwhen enabled for a package by the \"Toggle compiler\noptimization details\" (`gopls.gc_details`) command.\n\n(Some users care only about one kind of annotation in their\nprofiling efforts. More importantly, in large packages, the\nnumber of annotations can sometimes overwhelm the user\ninterface and exceed the per-file diagnostic limit.)\n\nTODO(adonovan): rename this field to CompilerOptDetail.\n", - "EnumKeys": { - "ValueType": "bool", - "Keys": [ + }, { - "Name": "\"bounds\"", - "Doc": "`\"bounds\"` controls bounds checking diagnostics.\n", + "Name": "\"SA1013\"", + "Doc": "io.Seeker.Seek is being called with the whence constant as the first argument, but it should be the second\n\nAvailable since\n 2017.1\n", "Default": "true", "Status": "" }, { - "Name": "\"escape\"", - "Doc": "`\"escape\"` controls diagnostics about escape choices.\n", - "Default": "true", + "Name": "\"SA1014\"", + "Doc": "Non-pointer value passed to Unmarshal or Decode\n\nAvailable since\n 2017.1\n", + "Default": "false", "Status": "" }, { - "Name": "\"inline\"", - "Doc": "`\"inline\"` controls diagnostics about inlining choices.\n", + "Name": "\"SA1015\"", + "Doc": "Using time.Tick in a way that will leak. Consider using time.NewTicker, and only use time.Tick in tests, commands and endless functions\n\nBefore Go 1.23, time.Tickers had to be closed to be able to be garbage\ncollected. Since time.Tick doesn't make it possible to close the underlying\nticker, using it repeatedly would leak memory.\n\nGo 1.23 fixes this by allowing tickers to be collected even if they weren't closed.\n\nAvailable since\n 2017.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA1016\"", + "Doc": "Trapping a signal that cannot be trapped\n\nNot all signals can be intercepted by a process. Specifically, on\nUNIX-like systems, the syscall.SIGKILL and syscall.SIGSTOP signals are\nnever passed to the process, but instead handled directly by the\nkernel. It is therefore pointless to try and handle these signals.\n\nAvailable since\n 2017.1\n", "Default": "true", "Status": "" }, { - "Name": "\"nil\"", - "Doc": "`\"nil\"` controls nil checks.\n", + "Name": "\"SA1017\"", + "Doc": "Channels used with os/signal.Notify should be buffered\n\nThe os/signal package uses non-blocking channel sends when delivering\nsignals. If the receiving end of the channel isn't ready and the\nchannel is either unbuffered or full, the signal will be dropped. To\navoid missing signals, the channel should be buffered and of the\nappropriate size. For a channel used for notification of just one\nsignal value, a buffer of size 1 is sufficient.\n\nAvailable since\n 2017.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA1018\"", + "Doc": "strings.Replace called with n == 0, which does nothing\n\nWith n == 0, zero instances will be replaced. To replace all\ninstances, use a negative number, or use strings.ReplaceAll.\n\nAvailable since\n 2017.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA1020\"", + "Doc": "Using an invalid host:port pair with a net.Listen-related function\n\nAvailable since\n 2017.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA1021\"", + "Doc": "Using bytes.Equal to compare two net.IP\n\nA net.IP stores an IPv4 or IPv6 address as a slice of bytes. The\nlength of the slice for an IPv4 address, however, can be either 4 or\n16 bytes long, using different ways of representing IPv4 addresses. In\norder to correctly compare two net.IPs, the net.IP.Equal method should\nbe used, as it takes both representations into account.\n\nAvailable since\n 2017.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA1023\"", + "Doc": "Modifying the buffer in an io.Writer implementation\n\nWrite must not modify the slice data, even temporarily.\n\nAvailable since\n 2017.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA1024\"", + "Doc": "A string cutset contains duplicate characters\n\nThe strings.TrimLeft and strings.TrimRight functions take cutsets, not\nprefixes. A cutset is treated as a set of characters to remove from a\nstring. For example,\n\n strings.TrimLeft(\"42133word\", \"1234\")\n\nwill result in the string \"word\" – any characters that are 1, 2, 3 or\n4 are cut from the left of the string.\n\nIn order to remove one string from another, use strings.TrimPrefix instead.\n\nAvailable since\n 2017.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA1025\"", + "Doc": "It is not possible to use (*time.Timer).Reset's return value correctly\n\nAvailable since\n 2019.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA1026\"", + "Doc": "Cannot marshal channels or functions\n\nAvailable since\n 2019.2\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA1027\"", + "Doc": "Atomic access to 64-bit variable must be 64-bit aligned\n\nOn ARM, x86-32, and 32-bit MIPS, it is the caller's responsibility to\narrange for 64-bit alignment of 64-bit words accessed atomically. The\nfirst word in a variable or in an allocated struct, array, or slice\ncan be relied upon to be 64-bit aligned.\n\nYou can use the structlayout tool to inspect the alignment of fields\nin a struct.\n\nAvailable since\n 2019.2\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA1028\"", + "Doc": "sort.Slice can only be used on slices\n\nThe first argument of sort.Slice must be a slice.\n\nAvailable since\n 2020.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA1029\"", + "Doc": "Inappropriate key in call to context.WithValue\n\nThe provided key must be comparable and should not be\nof type string or any other built-in type to avoid collisions between\npackages using context. Users of WithValue should define their own\ntypes for keys.\n\nTo avoid allocating when assigning to an interface{},\ncontext keys often have concrete type struct{}. Alternatively,\nexported context key variables' static type should be a pointer or\ninterface.\n\nAvailable since\n 2020.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA1030\"", + "Doc": "Invalid argument in call to a strconv function\n\nThis check validates the format, number base and bit size arguments of\nthe various parsing and formatting functions in strconv.\n\nAvailable since\n 2021.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA1031\"", + "Doc": "Overlapping byte slices passed to an encoder\n\nIn an encoding function of the form Encode(dst, src), dst and\nsrc were found to reference the same memory. This can result in\nsrc bytes being overwritten before they are read, when the encoder\nwrites more than one byte per src byte.\n\nAvailable since\n 2024.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA1032\"", + "Doc": "Wrong order of arguments to errors.Is\n\nThe first argument of the function errors.Is is the error\nthat we have and the second argument is the error we're trying to match against.\nFor example:\n\n\tif errors.Is(err, io.EOF) { ... }\n\nThis check detects some cases where the two arguments have been swapped. It\nflags any calls where the first argument is referring to a package-level error\nvariable, such as\n\n\tif errors.Is(io.EOF, err) { /* this is wrong */ }\n\nAvailable since\n 2024.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA2001\"", + "Doc": "Empty critical section, did you mean to defer the unlock?\n\nEmpty critical sections of the kind\n\n mu.Lock()\n mu.Unlock()\n\nare very often a typo, and the following was intended instead:\n\n mu.Lock()\n defer mu.Unlock()\n\nDo note that sometimes empty critical sections can be useful, as a\nform of signaling to wait on another goroutine. Many times, there are\nsimpler ways of achieving the same effect. When that isn't the case,\nthe code should be amply commented to avoid confusion. Combining such\ncomments with a //lint:ignore directive can be used to suppress this\nrare false positive.\n\nAvailable since\n 2017.1\n", "Default": "true", "Status": "" - } - ] - }, - "EnumValues": null, - "Default": "{\"bounds\":true,\"escape\":true,\"inline\":true,\"nil\":true}", - "Status": "", - "Hierarchy": "ui.diagnostic", - "DeprecationMessage": "" - }, - { - "Name": "vulncheck", - "Type": "enum", - "Doc": "vulncheck enables vulnerability scanning.\n", - "EnumKeys": { - "ValueType": "", - "Keys": null - }, - "EnumValues": [ - { - "Value": "\"Imports\"", - "Doc": "`\"Imports\"`: In Imports mode, `gopls` will report vulnerabilities that affect packages\ndirectly and indirectly used by the analyzed main module.\n", - "Status": "" - }, - { - "Value": "\"Off\"", - "Doc": "`\"Off\"`: Disable vulnerability analysis.\n", - "Status": "" - } - ], - "Default": "\"Off\"", - "Status": "experimental", - "Hierarchy": "ui.diagnostic", - "DeprecationMessage": "" - }, - { - "Name": "diagnosticsDelay", - "Type": "time.Duration", - "Doc": "diagnosticsDelay controls the amount of time that gopls waits\nafter the most recent file modification before computing deep diagnostics.\nSimple diagnostics (parsing and type-checking) are always run immediately\non recently modified packages.\n\nThis option must be set to a valid duration string, for example `\"250ms\"`.\n", - "EnumKeys": { - "ValueType": "", - "Keys": null - }, - "EnumValues": null, - "Default": "\"1s\"", - "Status": "advanced", - "Hierarchy": "ui.diagnostic", - "DeprecationMessage": "" - }, - { - "Name": "diagnosticsTrigger", - "Type": "enum", - "Doc": "diagnosticsTrigger controls when to run diagnostics.\n", - "EnumKeys": { - "ValueType": "", - "Keys": null - }, - "EnumValues": [ - { - "Value": "\"Edit\"", - "Doc": "`\"Edit\"`: Trigger diagnostics on file edit and save. (default)\n", - "Status": "" - }, - { - "Value": "\"Save\"", - "Doc": "`\"Save\"`: Trigger diagnostics only on file save. Events like initial workspace load\nor configuration change will still trigger diagnostics.\n", - "Status": "" - } - ], - "Default": "\"Edit\"", - "Status": "experimental", - "Hierarchy": "ui.diagnostic", - "DeprecationMessage": "" - }, - { - "Name": "analysisProgressReporting", - "Type": "bool", - "Doc": "analysisProgressReporting controls whether gopls sends progress\nnotifications when construction of its index of analysis facts is taking a\nlong time. Cancelling these notifications will cancel the indexing task,\nthough it will restart after the next change in the workspace.\n\nWhen a package is opened for the first time and heavyweight analyses such as\nstaticcheck are enabled, it can take a while to construct the index of\nanalysis facts for all its dependencies. The index is cached in the\nfilesystem, so subsequent analysis should be faster.\n", - "EnumKeys": { - "ValueType": "", - "Keys": null - }, - "EnumValues": null, - "Default": "true", - "Status": "", - "Hierarchy": "ui.diagnostic", - "DeprecationMessage": "" - }, - { - "Name": "hints", - "Type": "map[enum]bool", - "Doc": "hints specify inlay hints that users want to see. A full list of hints\nthat gopls uses can be found in\n[inlayHints.md](https://github.com/golang/tools/blob/master/gopls/doc/inlayHints.md).\n", - "EnumKeys": { - "ValueType": "bool", - "Keys": [ + }, { - "Name": "\"assignVariableTypes\"", - "Doc": "`\"assignVariableTypes\"` controls inlay hints for variable types in assign statements:\n```go\n\ti/* int*/, j/* int*/ := 0, len(r)-1\n```\n", + "Name": "\"SA2002\"", + "Doc": "Called testing.T.FailNow or SkipNow in a goroutine, which isn't allowed\n\nAvailable since\n 2017.1\n", "Default": "false", "Status": "" }, { - "Name": "\"compositeLiteralFields\"", - "Doc": "`\"compositeLiteralFields\"` inlay hints for composite literal field names:\n```go\n\t{/*in: */\"Hello, world\", /*want: */\"dlrow ,olleH\"}\n```\n", + "Name": "\"SA2003\"", + "Doc": "Deferred Lock right after locking, likely meant to defer Unlock instead\n\nAvailable since\n 2017.1\n", "Default": "false", "Status": "" }, { - "Name": "\"compositeLiteralTypes\"", - "Doc": "`\"compositeLiteralTypes\"` controls inlay hints for composite literal types:\n```go\n\tfor _, c := range []struct {\n\t\tin, want string\n\t}{\n\t\t/*struct{ in string; want string }*/{\"Hello, world\", \"dlrow ,olleH\"},\n\t}\n```\n", + "Name": "\"SA3000\"", + "Doc": "TestMain doesn't call os.Exit, hiding test failures\n\nTest executables (and in turn 'go test') exit with a non-zero status\ncode if any tests failed. When specifying your own TestMain function,\nit is your responsibility to arrange for this, by calling os.Exit with\nthe correct code. The correct code is returned by (*testing.M).Run, so\nthe usual way of implementing TestMain is to end it with\nos.Exit(m.Run()).\n\nAvailable since\n 2017.1\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"SA3001\"", + "Doc": "Assigning to b.N in benchmarks distorts the results\n\nThe testing package dynamically sets b.N to improve the reliability of\nbenchmarks and uses it in computations to determine the duration of a\nsingle operation. Benchmark code must not alter b.N as this would\nfalsify results.\n\nAvailable since\n 2017.1\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"SA4000\"", + "Doc": "Binary operator has identical expressions on both sides\n\nAvailable since\n 2017.1\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"SA4001\"", + "Doc": "\u0026*x gets simplified to x, it does not copy x\n\nAvailable since\n 2017.1\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"SA4003\"", + "Doc": "Comparing unsigned values against negative values is pointless\n\nAvailable since\n 2017.1\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"SA4004\"", + "Doc": "The loop exits unconditionally after one iteration\n\nAvailable since\n 2017.1\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"SA4005\"", + "Doc": "Field assignment that will never be observed. Did you mean to use a pointer receiver?\n\nAvailable since\n 2021.1\n", "Default": "false", "Status": "" }, { - "Name": "\"constantValues\"", - "Doc": "`\"constantValues\"` controls inlay hints for constant values:\n```go\n\tconst (\n\t\tKindNone Kind = iota/* = 0*/\n\t\tKindPrint/* = 1*/\n\t\tKindPrintf/* = 2*/\n\t\tKindErrorf/* = 3*/\n\t)\n```\n", + "Name": "\"SA4006\"", + "Doc": "A value assigned to a variable is never read before being overwritten. Forgotten error check or dead code?\n\nAvailable since\n 2017.1\n", "Default": "false", "Status": "" }, { - "Name": "\"functionTypeParameters\"", - "Doc": "`\"functionTypeParameters\"` inlay hints for implicit type parameters on generic functions:\n```go\n\tmyFoo/*[int, string]*/(1, \"hello\")\n```\n", + "Name": "\"SA4008\"", + "Doc": "The variable in the loop condition never changes, are you incrementing the wrong variable?\n\nFor example:\n\n\tfor i := 0; i \u003c 10; j++ { ... }\n\nThis may also occur when a loop can only execute once because of unconditional\ncontrol flow that terminates the loop. For example, when a loop body contains an\nunconditional break, return, or panic:\n\n\tfunc f() {\n\t\tpanic(\"oops\")\n\t}\n\tfunc g() {\n\t\tfor i := 0; i \u003c 10; i++ {\n\t\t\t// f unconditionally calls panic, which means \"i\" is\n\t\t\t// never incremented.\n\t\t\tf()\n\t\t}\n\t}\n\nAvailable since\n 2017.1\n", "Default": "false", "Status": "" }, { - "Name": "\"parameterNames\"", - "Doc": "`\"parameterNames\"` controls inlay hints for parameter names:\n```go\n\tparseInt(/* str: */ \"123\", /* radix: */ 8)\n```\n", + "Name": "\"SA4009\"", + "Doc": "A function argument is overwritten before its first use\n\nAvailable since\n 2017.1\n", "Default": "false", "Status": "" }, { - "Name": "\"rangeVariableTypes\"", - "Doc": "`\"rangeVariableTypes\"` controls inlay hints for variable types in range statements:\n```go\n\tfor k/* int*/, v/* string*/ := range []string{} {\n\t\tfmt.Println(k, v)\n\t}\n```\n", + "Name": "\"SA4010\"", + "Doc": "The result of append will never be observed anywhere\n\nAvailable since\n 2017.1\n", "Default": "false", "Status": "" - } - ] - }, - "EnumValues": null, - "Default": "{}", - "Status": "experimental", - "Hierarchy": "ui.inlayhint", - "DeprecationMessage": "" - }, - { - "Name": "codelenses", - "Type": "map[enum]bool", - "Doc": "codelenses overrides the enabled/disabled state of each of gopls'\nsources of [Code Lenses](codelenses.md).\n\nExample Usage:\n\n```json5\n\"gopls\": {\n...\n \"codelenses\": {\n \"generate\": false, // Don't show the `go generate` lens.\n }\n...\n}\n```\n", - "EnumKeys": { - "ValueType": "bool", - "Keys": [ + }, { - "Name": "\"generate\"", - "Doc": "`\"generate\"`: Run `go generate`\n\nThis codelens source annotates any `//go:generate` comments\nwith commands to run `go generate` in this directory, on\nall directories recursively beneath this one.\n\nSee [Generating code](https://go.dev/blog/generate) for\nmore details.\n", + "Name": "\"SA4011\"", + "Doc": "Break statement with no effect. Did you mean to break out of an outer loop?\n\nAvailable since\n 2017.1\n", "Default": "true", "Status": "" }, { - "Name": "\"regenerate_cgo\"", - "Doc": "`\"regenerate_cgo\"`: Re-generate cgo declarations\n\nThis codelens source annotates an `import \"C\"` declaration\nwith a command to re-run the [cgo\ncommand](https://pkg.go.dev/cmd/cgo) to regenerate the\ncorresponding Go declarations.\n\nUse this after editing the C code in comments attached to\nthe import, or in C header files included by it.\n", + "Name": "\"SA4012\"", + "Doc": "Comparing a value against NaN even though no value is equal to NaN\n\nAvailable since\n 2017.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA4013\"", + "Doc": "Negating a boolean twice (!!b) is the same as writing b. This is either redundant, or a typo.\n\nAvailable since\n 2017.1\n", "Default": "true", "Status": "" }, { - "Name": "\"run_govulncheck\"", - "Doc": "`\"run_govulncheck\"`: Run govulncheck (legacy)\n\nThis codelens source annotates the `module` directive in a go.mod file\nwith a command to run Govulncheck asynchronously.\n\n[Govulncheck](https://go.dev/blog/vuln) is a static analysis tool that\ncomputes the set of functions reachable within your application, including\ndependencies; queries a database of known security vulnerabilities; and\nreports any potential problems it finds.\n", + "Name": "\"SA4014\"", + "Doc": "An if/else if chain has repeated conditions and no side-effects; if the condition didn't match the first time, it won't match the second time, either\n\nAvailable since\n 2017.1\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"SA4015\"", + "Doc": "Calling functions like math.Ceil on floats converted from integers doesn't do anything useful\n\nAvailable since\n 2017.1\n", "Default": "false", - "Status": "experimental" + "Status": "" }, { - "Name": "\"test\"", - "Doc": "`\"test\"`: Run tests and benchmarks\n\nThis codelens source annotates each `Test` and `Benchmark`\nfunction in a `*_test.go` file with a command to run it.\n\nThis source is off by default because VS Code has\na client-side custom UI for testing, and because progress\nnotifications are not a great UX for streamed test output.\nSee:\n- golang/go#67400 for a discussion of this feature.\n- https://github.com/joaotavora/eglot/discussions/1402\n for an alternative approach.\n", + "Name": "\"SA4016\"", + "Doc": "Certain bitwise operations, such as x ^ 0, do not do anything useful\n\nAvailable since\n 2017.1\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"SA4017\"", + "Doc": "Discarding the return values of a function without side effects, making the call pointless\n\nAvailable since\n 2017.1\n", "Default": "false", "Status": "" }, { - "Name": "\"tidy\"", - "Doc": "`\"tidy\"`: Tidy go.mod file\n\nThis codelens source annotates the `module` directive in a\ngo.mod file with a command to run [`go mod\ntidy`](https://go.dev/ref/mod#go-mod-tidy), which ensures\nthat the go.mod file matches the source code in the module.\n", + "Name": "\"SA4018\"", + "Doc": "Self-assignment of variables\n\nAvailable since\n 2017.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA4019\"", + "Doc": "Multiple, identical build constraints in the same file\n\nAvailable since\n 2017.1\n", "Default": "true", "Status": "" }, { - "Name": "\"upgrade_dependency\"", - "Doc": "`\"upgrade_dependency\"`: Update dependencies\n\nThis codelens source annotates the `module` directive in a\ngo.mod file with commands to:\n\n- check for available upgrades,\n- upgrade direct dependencies, and\n- upgrade all dependencies transitively.\n", + "Name": "\"SA4020\"", + "Doc": "Unreachable case clause in a type switch\n\nIn a type switch like the following\n\n type T struct{}\n func (T) Read(b []byte) (int, error) { return 0, nil }\n\n var v interface{} = T{}\n\n switch v.(type) {\n case io.Reader:\n // ...\n case T:\n // unreachable\n }\n\nthe second case clause can never be reached because T implements\nio.Reader and case clauses are evaluated in source order.\n\nAnother example:\n\n type T struct{}\n func (T) Read(b []byte) (int, error) { return 0, nil }\n func (T) Close() error { return nil }\n\n var v interface{} = T{}\n\n switch v.(type) {\n case io.Reader:\n // ...\n case io.ReadCloser:\n // unreachable\n }\n\nEven though T has a Close method and thus implements io.ReadCloser,\nio.Reader will always match first. The method set of io.Reader is a\nsubset of io.ReadCloser. Thus it is impossible to match the second\ncase without matching the first case.\n\n\nStructurally equivalent interfaces\n\nA special case of the previous example are structurally identical\ninterfaces. Given these declarations\n\n type T error\n type V error\n\n func doSomething() error {\n err, ok := doAnotherThing()\n if ok {\n return T(err)\n }\n\n return U(err)\n }\n\nthe following type switch will have an unreachable case clause:\n\n switch doSomething().(type) {\n case T:\n // ...\n case V:\n // unreachable\n }\n\nT will always match before V because they are structurally equivalent\nand therefore doSomething()'s return value implements both.\n\nAvailable since\n 2019.2\n", "Default": "true", "Status": "" }, { - "Name": "\"vendor\"", - "Doc": "`\"vendor\"`: Update vendor directory\n\nThis codelens source annotates the `module` directive in a\ngo.mod file with a command to run [`go mod\nvendor`](https://go.dev/ref/mod#go-mod-vendor), which\ncreates or updates the directory named `vendor` in the\nmodule root so that it contains an up-to-date copy of all\nnecessary package dependencies.\n", + "Name": "\"SA4022\"", + "Doc": "Comparing the address of a variable against nil\n\nCode such as 'if \u0026x == nil' is meaningless, because taking the address of a variable always yields a non-nil pointer.\n\nAvailable since\n 2020.1\n", "Default": "true", "Status": "" }, { - "Name": "\"vulncheck\"", - "Doc": "`\"vulncheck\"`: Run govulncheck\n\nThis codelens source annotates the `module` directive in a go.mod file\nwith a command to run govulncheck synchronously.\n\n[Govulncheck](https://go.dev/blog/vuln) is a static analysis tool that\ncomputes the set of functions reachable within your application, including\ndependencies; queries a database of known security vulnerabilities; and\nreports any potential problems it finds.\n", + "Name": "\"SA4023\"", + "Doc": "Impossible comparison of interface value with untyped nil\n\nUnder the covers, interfaces are implemented as two elements, a\ntype T and a value V. V is a concrete value such as an int,\nstruct or pointer, never an interface itself, and has type T. For\ninstance, if we store the int value 3 in an interface, the\nresulting interface value has, schematically, (T=int, V=3). The\nvalue V is also known as the interface's dynamic value, since a\ngiven interface variable might hold different values V (and\ncorresponding types T) during the execution of the program.\n\nAn interface value is nil only if the V and T are both\nunset, (T=nil, V is not set), In particular, a nil interface will\nalways hold a nil type. If we store a nil pointer of type *int\ninside an interface value, the inner type will be *int regardless\nof the value of the pointer: (T=*int, V=nil). Such an interface\nvalue will therefore be non-nil even when the pointer value V\ninside is nil.\n\nThis situation can be confusing, and arises when a nil value is\nstored inside an interface value such as an error return:\n\n func returnsError() error {\n var p *MyError = nil\n if bad() {\n p = ErrBad\n }\n return p // Will always return a non-nil error.\n }\n\nIf all goes well, the function returns a nil p, so the return\nvalue is an error interface value holding (T=*MyError, V=nil).\nThis means that if the caller compares the returned error to nil,\nit will always look as if there was an error even if nothing bad\nhappened. To return a proper nil error to the caller, the\nfunction must return an explicit nil:\n\n func returnsError() error {\n if bad() {\n return ErrBad\n }\n return nil\n }\n\nIt's a good idea for functions that return errors always to use\nthe error type in their signature (as we did above) rather than a\nconcrete type such as *MyError, to help guarantee the error is\ncreated correctly. As an example, os.Open returns an error even\nthough, if not nil, it's always of concrete type *os.PathError.\n\nSimilar situations to those described here can arise whenever\ninterfaces are used. Just keep in mind that if any concrete value\nhas been stored in the interface, the interface will not be nil.\nFor more information, see The Laws of\nReflection at https://golang.org/doc/articles/laws_of_reflection.html.\n\nThis text has been copied from\nhttps://golang.org/doc/faq#nil_error, licensed under the Creative\nCommons Attribution 3.0 License.\n\nAvailable since\n 2020.2\n", "Default": "false", - "Status": "experimental" - } - ] - }, - "EnumValues": null, - "Default": "{\"generate\":true,\"regenerate_cgo\":true,\"run_govulncheck\":false,\"tidy\":true,\"upgrade_dependency\":true,\"vendor\":true}", - "Status": "", - "Hierarchy": "ui", - "DeprecationMessage": "" - }, - { - "Name": "semanticTokens", - "Type": "bool", - "Doc": "semanticTokens controls whether the LSP server will send\nsemantic tokens to the client.\n", - "EnumKeys": { - "ValueType": "", - "Keys": null - }, - "EnumValues": null, - "Default": "false", - "Status": "experimental", - "Hierarchy": "ui", - "DeprecationMessage": "" - }, - { - "Name": "noSemanticString", - "Type": "bool", - "Doc": "noSemanticString turns off the sending of the semantic token 'string'\n\nDeprecated: Use SemanticTokenTypes[\"string\"] = false instead. See\ngolang/vscode-go#3632\n", - "EnumKeys": { - "ValueType": "", - "Keys": null - }, - "EnumValues": null, - "Default": "false", - "Status": "experimental", - "Hierarchy": "ui", - "DeprecationMessage": "use SemanticTokenTypes[\"string\"] = false instead. See\ngolang/vscode-go#3632\n" - }, - { - "Name": "noSemanticNumber", - "Type": "bool", - "Doc": "noSemanticNumber turns off the sending of the semantic token 'number'\n\nDeprecated: Use SemanticTokenTypes[\"number\"] = false instead. See\ngolang/vscode-go#3632.\n", - "EnumKeys": { - "ValueType": "", - "Keys": null - }, - "EnumValues": null, - "Default": "false", - "Status": "experimental", - "Hierarchy": "ui", - "DeprecationMessage": "use SemanticTokenTypes[\"number\"] = false instead. See\ngolang/vscode-go#3632.\n" - }, - { - "Name": "semanticTokenTypes", - "Type": "map[string]bool", - "Doc": "semanticTokenTypes configures the semantic token types. It allows\ndisabling types by setting each value to false.\nBy default, all types are enabled.\n", - "EnumKeys": { - "ValueType": "", - "Keys": null - }, - "EnumValues": null, - "Default": "{}", - "Status": "experimental", - "Hierarchy": "ui", - "DeprecationMessage": "" - }, - { - "Name": "semanticTokenModifiers", - "Type": "map[string]bool", - "Doc": "semanticTokenModifiers configures the semantic token modifiers. It allows\ndisabling modifiers by setting each value to false.\nBy default, all modifiers are enabled.\n", - "EnumKeys": { - "ValueType": "", + "Status": "" + }, + { + "Name": "\"SA4024\"", + "Doc": "Checking for impossible return value from a builtin function\n\nReturn values of the len and cap builtins cannot be negative.\n\nSee https://golang.org/pkg/builtin/#len and https://golang.org/pkg/builtin/#cap.\n\nExample:\n\n if len(slice) \u003c 0 {\n fmt.Println(\"unreachable code\")\n }\n\nAvailable since\n 2021.1\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"SA4025\"", + "Doc": "Integer division of literals that results in zero\n\nWhen dividing two integer constants, the result will\nalso be an integer. Thus, a division such as 2 / 3 results in 0.\nThis is true for all of the following examples:\n\n\t_ = 2 / 3\n\tconst _ = 2 / 3\n\tconst _ float64 = 2 / 3\n\t_ = float64(2 / 3)\n\nStaticcheck will flag such divisions if both sides of the division are\ninteger literals, as it is highly unlikely that the division was\nintended to truncate to zero. Staticcheck will not flag integer\ndivision involving named constants, to avoid noisy positives.\n\nAvailable since\n 2021.1\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"SA4026\"", + "Doc": "Go constants cannot express negative zero\n\nIn IEEE 754 floating point math, zero has a sign and can be positive\nor negative. This can be useful in certain numerical code.\n\nGo constants, however, cannot express negative zero. This means that\nthe literals -0.0 and 0.0 have the same ideal value (zero) and\nwill both represent positive zero at runtime.\n\nTo explicitly and reliably create a negative zero, you can use the\nmath.Copysign function: math.Copysign(0, -1).\n\nAvailable since\n 2021.1\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"SA4027\"", + "Doc": "(*net/url.URL).Query returns a copy, modifying it doesn't change the URL\n\n(*net/url.URL).Query parses the current value of net/url.URL.RawQuery\nand returns it as a map of type net/url.Values. Subsequent changes to\nthis map will not affect the URL unless the map gets encoded and\nassigned to the URL's RawQuery.\n\nAs a consequence, the following code pattern is an expensive no-op:\nu.Query().Add(key, value).\n\nAvailable since\n 2021.1\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"SA4028\"", + "Doc": "x % 1 is always zero\n\nAvailable since\n 2022.1\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"SA4029\"", + "Doc": "Ineffective attempt at sorting slice\n\nsort.Float64Slice, sort.IntSlice, and sort.StringSlice are\ntypes, not functions. Doing x = sort.StringSlice(x) does nothing,\nespecially not sort any values. The correct usage is\nsort.Sort(sort.StringSlice(x)) or sort.StringSlice(x).Sort(),\nbut there are more convenient helpers, namely sort.Float64s,\nsort.Ints, and sort.Strings.\n\nAvailable since\n 2022.1\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"SA4030\"", + "Doc": "Ineffective attempt at generating random number\n\nFunctions in the math/rand package that accept upper limits, such\nas Intn, generate random numbers in the half-open interval [0,n). In\nother words, the generated numbers will be \u003e= 0 and \u003c n – they\ndon't include n. rand.Intn(1) therefore doesn't generate 0\nor 1, it always generates 0.\n\nAvailable since\n 2022.1\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"SA4031\"", + "Doc": "Checking never-nil value against nil\n\nAvailable since\n 2022.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA4032\"", + "Doc": "Comparing runtime.GOOS or runtime.GOARCH against impossible value\n\nAvailable since\n 2024.1\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"SA5000\"", + "Doc": "Assignment to nil map\n\nAvailable since\n 2017.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA5001\"", + "Doc": "Deferring Close before checking for a possible error\n\nAvailable since\n 2017.1\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"SA5002\"", + "Doc": "The empty for loop ('for {}') spins and can block the scheduler\n\nAvailable since\n 2017.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA5003\"", + "Doc": "Defers in infinite loops will never execute\n\nDefers are scoped to the surrounding function, not the surrounding\nblock. In a function that never returns, i.e. one containing an\ninfinite loop, defers will never execute.\n\nAvailable since\n 2017.1\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"SA5004\"", + "Doc": "'for { select { ...' with an empty default branch spins\n\nAvailable since\n 2017.1\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"SA5005\"", + "Doc": "The finalizer references the finalized object, preventing garbage collection\n\nA finalizer is a function associated with an object that runs when the\ngarbage collector is ready to collect said object, that is when the\nobject is no longer referenced by anything.\n\nIf the finalizer references the object, however, it will always remain\nas the final reference to that object, preventing the garbage\ncollector from collecting the object. The finalizer will never run,\nand the object will never be collected, leading to a memory leak. That\nis why the finalizer should instead use its first argument to operate\non the object. That way, the number of references can temporarily go\nto zero before the object is being passed to the finalizer.\n\nAvailable since\n 2017.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA5007\"", + "Doc": "Infinite recursive call\n\nA function that calls itself recursively needs to have an exit\ncondition. Otherwise it will recurse forever, until the system runs\nout of memory.\n\nThis issue can be caused by simple bugs such as forgetting to add an\nexit condition. It can also happen \"on purpose\". Some languages have\ntail call optimization which makes certain infinite recursive calls\nsafe to use. Go, however, does not implement TCO, and as such a loop\nshould be used instead.\n\nAvailable since\n 2017.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA5008\"", + "Doc": "Invalid struct tag\n\nAvailable since\n 2019.2\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"SA5010\"", + "Doc": "Impossible type assertion\n\nSome type assertions can be statically proven to be\nimpossible. This is the case when the method sets of both\narguments of the type assertion conflict with each other, for\nexample by containing the same method with different\nsignatures.\n\nThe Go compiler already applies this check when asserting from an\ninterface value to a concrete type. If the concrete type misses\nmethods from the interface, or if function signatures don't match,\nthen the type assertion can never succeed.\n\nThis check applies the same logic when asserting from one interface to\nanother. If both interface types contain the same method but with\ndifferent signatures, then the type assertion can never succeed,\neither.\n\nAvailable since\n 2020.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA5011\"", + "Doc": "Possible nil pointer dereference\n\nA pointer is being dereferenced unconditionally, while\nalso being checked against nil in another place. This suggests that\nthe pointer may be nil and dereferencing it may panic. This is\ncommonly a result of improperly ordered code or missing return\nstatements. Consider the following examples:\n\n func fn(x *int) {\n fmt.Println(*x)\n\n // This nil check is equally important for the previous dereference\n if x != nil {\n foo(*x)\n }\n }\n\n func TestFoo(t *testing.T) {\n x := compute()\n if x == nil {\n t.Errorf(\"nil pointer received\")\n }\n\n // t.Errorf does not abort the test, so if x is nil, the next line will panic.\n foo(*x)\n }\n\nStaticcheck tries to deduce which functions abort control flow.\nFor example, it is aware that a function will not continue\nexecution after a call to panic or log.Fatal. However, sometimes\nthis detection fails, in particular in the presence of\nconditionals. Consider the following example:\n\n func Log(msg string, level int) {\n fmt.Println(msg)\n if level == levelFatal {\n os.Exit(1)\n }\n }\n\n func Fatal(msg string) {\n Log(msg, levelFatal)\n }\n\n func fn(x *int) {\n if x == nil {\n Fatal(\"unexpected nil pointer\")\n }\n fmt.Println(*x)\n }\n\nStaticcheck will flag the dereference of x, even though it is perfectly\nsafe. Staticcheck is not able to deduce that a call to\nFatal will exit the program. For the time being, the easiest\nworkaround is to modify the definition of Fatal like so:\n\n func Fatal(msg string) {\n Log(msg, levelFatal)\n panic(\"unreachable\")\n }\n\nWe also hard-code functions from common logging packages such as\nlogrus. Please file an issue if we're missing support for a\npopular package.\n\nAvailable since\n 2020.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA5012\"", + "Doc": "Passing odd-sized slice to function expecting even size\n\nSome functions that take slices as parameters expect the slices to have an even number of elements. \nOften, these functions treat elements in a slice as pairs. \nFor example, strings.NewReplacer takes pairs of old and new strings, \nand calling it with an odd number of elements would be an error.\n\nAvailable since\n 2020.2\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA6000\"", + "Doc": "Using regexp.Match or related in a loop, should use regexp.Compile\n\nAvailable since\n 2017.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA6001\"", + "Doc": "Missing an optimization opportunity when indexing maps by byte slices\n\nMap keys must be comparable, which precludes the use of byte slices.\nThis usually leads to using string keys and converting byte slices to\nstrings.\n\nNormally, a conversion of a byte slice to a string needs to copy the data and\ncauses allocations. The compiler, however, recognizes m[string(b)] and\nuses the data of b directly, without copying it, because it knows that\nthe data can't change during the map lookup. This leads to the\ncounter-intuitive situation that\n\n k := string(b)\n println(m[k])\n println(m[k])\n\nwill be less efficient than\n\n println(m[string(b)])\n println(m[string(b)])\n\nbecause the first version needs to copy and allocate, while the second\none does not.\n\nFor some history on this optimization, check out commit\nf5f5a8b6209f84961687d993b93ea0d397f5d5bf in the Go repository.\n\nAvailable since\n 2017.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA6002\"", + "Doc": "Storing non-pointer values in sync.Pool allocates memory\n\nA sync.Pool is used to avoid unnecessary allocations and reduce the\namount of work the garbage collector has to do.\n\nWhen passing a value that is not a pointer to a function that accepts\nan interface, the value needs to be placed on the heap, which means an\nadditional allocation. Slices are a common thing to put in sync.Pools,\nand they're structs with 3 fields (length, capacity, and a pointer to\nan array). In order to avoid the extra allocation, one should store a\npointer to the slice instead.\n\nSee the comments on https://go-review.googlesource.com/c/go/+/24371\nthat discuss this problem.\n\nAvailable since\n 2017.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA6003\"", + "Doc": "Converting a string to a slice of runes before ranging over it\n\nYou may want to loop over the runes in a string. Instead of converting\nthe string to a slice of runes and looping over that, you can loop\nover the string itself. That is,\n\n for _, r := range s {}\n\nand\n\n for _, r := range []rune(s) {}\n\nwill yield the same values. The first version, however, will be faster\nand avoid unnecessary memory allocations.\n\nDo note that if you are interested in the indices, ranging over a\nstring and over a slice of runes will yield different indices. The\nfirst one yields byte offsets, while the second one yields indices in\nthe slice of runes.\n\nAvailable since\n 2017.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA6005\"", + "Doc": "Inefficient string comparison with strings.ToLower or strings.ToUpper\n\nConverting two strings to the same case and comparing them like so\n\n if strings.ToLower(s1) == strings.ToLower(s2) {\n ...\n }\n\nis significantly more expensive than comparing them with\nstrings.EqualFold(s1, s2). This is due to memory usage as well as\ncomputational complexity.\n\nstrings.ToLower will have to allocate memory for the new strings, as\nwell as convert both strings fully, even if they differ on the very\nfirst byte. strings.EqualFold, on the other hand, compares the strings\none character at a time. It doesn't need to create two intermediate\nstrings and can return as soon as the first non-matching character has\nbeen found.\n\nFor a more in-depth explanation of this issue, see\nhttps://blog.digitalocean.com/how-to-efficiently-compare-strings-in-go/\n\nAvailable since\n 2019.2\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"SA6006\"", + "Doc": "Using io.WriteString to write []byte\n\nUsing io.WriteString to write a slice of bytes, as in\n\n io.WriteString(w, string(b))\n\nis both unnecessary and inefficient. Converting from []byte to string\nhas to allocate and copy the data, and we could simply use w.Write(b)\ninstead.\n\nAvailable since\n 2024.1\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"SA9001\"", + "Doc": "Defers in range loops may not run when you expect them to\n\nAvailable since\n 2017.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA9002\"", + "Doc": "Using a non-octal os.FileMode that looks like it was meant to be in octal.\n\nAvailable since\n 2017.1\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"SA9003\"", + "Doc": "Empty body in an if or else branch\n\nAvailable since\n 2017.1, non-default\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA9004\"", + "Doc": "Only the first constant has an explicit type\n\nIn a constant declaration such as the following:\n\n const (\n First byte = 1\n Second = 2\n )\n\nthe constant Second does not have the same type as the constant First.\nThis construct shouldn't be confused with\n\n const (\n First byte = iota\n Second\n )\n\nwhere First and Second do indeed have the same type. The type is only\npassed on when no explicit value is assigned to the constant.\n\nWhen declaring enumerations with explicit values it is therefore\nimportant not to write\n\n const (\n EnumFirst EnumType = 1\n EnumSecond = 2\n EnumThird = 3\n )\n\nThis discrepancy in types can cause various confusing behaviors and\nbugs.\n\n\nWrong type in variable declarations\n\nThe most obvious issue with such incorrect enumerations expresses\nitself as a compile error:\n\n package pkg\n\n const (\n EnumFirst uint8 = 1\n EnumSecond = 2\n )\n\n func fn(useFirst bool) {\n x := EnumSecond\n if useFirst {\n x = EnumFirst\n }\n }\n\nfails to compile with\n\n ./const.go:11:5: cannot use EnumFirst (type uint8) as type int in assignment\n\n\nLosing method sets\n\nA more subtle issue occurs with types that have methods and optional\ninterfaces. Consider the following:\n\n package main\n\n import \"fmt\"\n\n type Enum int\n\n func (e Enum) String() string {\n return \"an enum\"\n }\n\n const (\n EnumFirst Enum = 1\n EnumSecond = 2\n )\n\n func main() {\n fmt.Println(EnumFirst)\n fmt.Println(EnumSecond)\n }\n\nThis code will output\n\n an enum\n 2\n\nas EnumSecond has no explicit type, and thus defaults to int.\n\nAvailable since\n 2019.1\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"SA9005\"", + "Doc": "Trying to marshal a struct with no public fields nor custom marshaling\n\nThe encoding/json and encoding/xml packages only operate on exported\nfields in structs, not unexported ones. It is usually an error to try\nto (un)marshal structs that only consist of unexported fields.\n\nThis check will not flag calls involving types that define custom\nmarshaling behavior, e.g. via MarshalJSON methods. It will also not\nflag empty structs.\n\nAvailable since\n 2019.2\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA9006\"", + "Doc": "Dubious bit shifting of a fixed size integer value\n\nBit shifting a value past its size will always clear the value.\n\nFor instance:\n\n v := int8(42)\n v \u003e\u003e= 8\n\nwill always result in 0.\n\nThis check flags bit shifting operations on fixed size integer values only.\nThat is, int, uint and uintptr are never flagged to avoid potential false\npositives in somewhat exotic but valid bit twiddling tricks:\n\n // Clear any value above 32 bits if integers are more than 32 bits.\n func f(i int) int {\n v := i \u003e\u003e 32\n v = v \u003c\u003c 32\n return i-v\n }\n\nAvailable since\n 2020.2\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"SA9007\"", + "Doc": "Deleting a directory that shouldn't be deleted\n\nIt is virtually never correct to delete system directories such as\n/tmp or the user's home directory. However, it can be fairly easy to\ndo by mistake, for example by mistakenly using os.TempDir instead\nof ioutil.TempDir, or by forgetting to add a suffix to the result\nof os.UserHomeDir.\n\nWriting\n\n d := os.TempDir()\n defer os.RemoveAll(d)\n\nin your unit tests will have a devastating effect on the stability of your system.\n\nThis check flags attempts at deleting the following directories:\n\n- os.TempDir\n- os.UserCacheDir\n- os.UserConfigDir\n- os.UserHomeDir\n\nAvailable since\n 2022.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA9008\"", + "Doc": "else branch of a type assertion is probably not reading the right value\n\nWhen declaring variables as part of an if statement (like in 'if\nfoo := ...; foo {'), the same variables will also be in the scope of\nthe else branch. This means that in the following example\n\n if x, ok := x.(int); ok {\n // ...\n } else {\n fmt.Printf(\"unexpected type %T\", x)\n }\n\nx in the else branch will refer to the x from x, ok\n:=; it will not refer to the x that is being type-asserted. The\nresult of a failed type assertion is the zero value of the type that\nis being asserted to, so x in the else branch will always have the\nvalue 0 and the type int.\n\nAvailable since\n 2022.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"SA9009\"", + "Doc": "Ineffectual Go compiler directive\n\nA potential Go compiler directive was found, but is ineffectual as it begins\nwith whitespace.\n\nAvailable since\n 2024.1\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"ST1000\"", + "Doc": "Incorrect or missing package comment\n\nPackages must have a package comment that is formatted according to\nthe guidelines laid out in\nhttps://go.dev/wiki/CodeReviewComments#package-comments.\n\nAvailable since\n 2019.1, non-default\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"ST1001\"", + "Doc": "Dot imports are discouraged\n\nDot imports that aren't in external test packages are discouraged.\n\nThe dot_import_whitelist option can be used to whitelist certain\nimports.\n\nQuoting Go Code Review Comments:\n\n\u003e The import . form can be useful in tests that, due to circular\n\u003e dependencies, cannot be made part of the package being tested:\n\u003e \n\u003e package foo_test\n\u003e \n\u003e import (\n\u003e \"bar/testutil\" // also imports \"foo\"\n\u003e . \"foo\"\n\u003e )\n\u003e \n\u003e In this case, the test file cannot be in package foo because it\n\u003e uses bar/testutil, which imports foo. So we use the import .\n\u003e form to let the file pretend to be part of package foo even though\n\u003e it is not. Except for this one case, do not use import . in your\n\u003e programs. It makes the programs much harder to read because it is\n\u003e unclear whether a name like Quux is a top-level identifier in the\n\u003e current package or in an imported package.\n\nAvailable since\n 2019.1\n\nOptions\n dot_import_whitelist\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"ST1003\"", + "Doc": "Poorly chosen identifier\n\nIdentifiers, such as variable and package names, follow certain rules.\n\nSee the following links for details:\n\n- https://go.dev/doc/effective_go#package-names\n- https://go.dev/doc/effective_go#mixed-caps\n- https://go.dev/wiki/CodeReviewComments#initialisms\n- https://go.dev/wiki/CodeReviewComments#variable-names\n\nAvailable since\n 2019.1, non-default\n\nOptions\n initialisms\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"ST1005\"", + "Doc": "Incorrectly formatted error string\n\nError strings follow a set of guidelines to ensure uniformity and good\ncomposability.\n\nQuoting Go Code Review Comments:\n\n\u003e Error strings should not be capitalized (unless beginning with\n\u003e proper nouns or acronyms) or end with punctuation, since they are\n\u003e usually printed following other context. That is, use\n\u003e fmt.Errorf(\"something bad\") not fmt.Errorf(\"Something bad\"), so\n\u003e that log.Printf(\"Reading %s: %v\", filename, err) formats without a\n\u003e spurious capital letter mid-message.\n\nAvailable since\n 2019.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"ST1006\"", + "Doc": "Poorly chosen receiver name\n\nQuoting Go Code Review Comments:\n\n\u003e The name of a method's receiver should be a reflection of its\n\u003e identity; often a one or two letter abbreviation of its type\n\u003e suffices (such as \"c\" or \"cl\" for \"Client\"). Don't use generic\n\u003e names such as \"me\", \"this\" or \"self\", identifiers typical of\n\u003e object-oriented languages that place more emphasis on methods as\n\u003e opposed to functions. The name need not be as descriptive as that\n\u003e of a method argument, as its role is obvious and serves no\n\u003e documentary purpose. It can be very short as it will appear on\n\u003e almost every line of every method of the type; familiarity admits\n\u003e brevity. Be consistent, too: if you call the receiver \"c\" in one\n\u003e method, don't call it \"cl\" in another.\n\nAvailable since\n 2019.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"ST1008\"", + "Doc": "A function's error value should be its last return value\n\nA function's error value should be its last return value.\n\nAvailable since\n 2019.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"ST1011\"", + "Doc": "Poorly chosen name for variable of type time.Duration\n\ntime.Duration values represent an amount of time, which is represented\nas a count of nanoseconds. An expression like 5 * time.Microsecond\nyields the value 5000. It is therefore not appropriate to suffix a\nvariable of type time.Duration with any time unit, such as Msec or\nMilli.\n\nAvailable since\n 2019.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"ST1012\"", + "Doc": "Poorly chosen name for error variable\n\nError variables that are part of an API should be called errFoo or\nErrFoo.\n\nAvailable since\n 2019.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"ST1013\"", + "Doc": "Should use constants for HTTP error codes, not magic numbers\n\nHTTP has a tremendous number of status codes. While some of those are\nwell known (200, 400, 404, 500), most of them are not. The net/http\npackage provides constants for all status codes that are part of the\nvarious specifications. It is recommended to use these constants\ninstead of hard-coding magic numbers, to vastly improve the\nreadability of your code.\n\nAvailable since\n 2019.1\n\nOptions\n http_status_code_whitelist\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"ST1015\"", + "Doc": "A switch's default case should be the first or last case\n\nAvailable since\n 2019.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"ST1016\"", + "Doc": "Use consistent method receiver names\n\nAvailable since\n 2019.1, non-default\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"ST1017\"", + "Doc": "Don't use Yoda conditions\n\nYoda conditions are conditions of the kind 'if 42 == x', where the\nliteral is on the left side of the comparison. These are a common\nidiom in languages in which assignment is an expression, to avoid bugs\nof the kind 'if (x = 42)'. In Go, which doesn't allow for this kind of\nbug, we prefer the more idiomatic 'if x == 42'.\n\nAvailable since\n 2019.2\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"ST1018\"", + "Doc": "Avoid zero-width and control characters in string literals\n\nAvailable since\n 2019.2\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"ST1019\"", + "Doc": "Importing the same package multiple times\n\nGo allows importing the same package multiple times, as long as\ndifferent import aliases are being used. That is, the following\nbit of code is valid:\n\n import (\n \"fmt\"\n fumpt \"fmt\"\n format \"fmt\"\n _ \"fmt\"\n )\n\nHowever, this is very rarely done on purpose. Usually, it is a\nsign of code that got refactored, accidentally adding duplicate\nimport statements. It is also a rarely known feature, which may\ncontribute to confusion.\n\nDo note that sometimes, this feature may be used\nintentionally (see for example\nhttps://github.com/golang/go/commit/3409ce39bfd7584523b7a8c150a310cea92d879d)\n– if you want to allow this pattern in your code base, you're\nadvised to disable this check.\n\nAvailable since\n 2020.1\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"ST1020\"", + "Doc": "The documentation of an exported function should start with the function's name\n\nDoc comments work best as complete sentences, which\nallow a wide variety of automated presentations. The first sentence\nshould be a one-sentence summary that starts with the name being\ndeclared.\n\nIf every doc comment begins with the name of the item it describes,\nyou can use the doc subcommand of the go tool and run the output\nthrough grep.\n\nSee https://go.dev/doc/effective_go#commentary for more\ninformation on how to write good documentation.\n\nAvailable since\n 2020.1, non-default\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"ST1021\"", + "Doc": "The documentation of an exported type should start with type's name\n\nDoc comments work best as complete sentences, which\nallow a wide variety of automated presentations. The first sentence\nshould be a one-sentence summary that starts with the name being\ndeclared.\n\nIf every doc comment begins with the name of the item it describes,\nyou can use the doc subcommand of the go tool and run the output\nthrough grep.\n\nSee https://go.dev/doc/effective_go#commentary for more\ninformation on how to write good documentation.\n\nAvailable since\n 2020.1, non-default\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"ST1022\"", + "Doc": "The documentation of an exported variable or constant should start with variable's name\n\nDoc comments work best as complete sentences, which\nallow a wide variety of automated presentations. The first sentence\nshould be a one-sentence summary that starts with the name being\ndeclared.\n\nIf every doc comment begins with the name of the item it describes,\nyou can use the doc subcommand of the go tool and run the output\nthrough grep.\n\nSee https://go.dev/doc/effective_go#commentary for more\ninformation on how to write good documentation.\n\nAvailable since\n 2020.1, non-default\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"ST1023\"", + "Doc": "Redundant type in variable declaration\n\nAvailable since\n 2021.1, non-default\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"appends\"", + "Doc": "check for missing values after append\n\nThis checker reports calls to append that pass\nno values to be appended to the slice.\n\n\ts := []string{\"a\", \"b\", \"c\"}\n\t_ = append(s)\n\nSuch calls are always no-ops and often indicate an\nunderlying mistake.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"asmdecl\"", + "Doc": "report mismatches between assembly files and Go declarations", + "Default": "true", + "Status": "" + }, + { + "Name": "\"assign\"", + "Doc": "check for useless assignments\n\nThis checker reports assignments of the form x = x or a[i] = a[i].\nThese are almost always useless, and even when they aren't they are\nusually a mistake.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"atomic\"", + "Doc": "check for common mistakes using the sync/atomic package\n\nThe atomic checker looks for assignment statements of the form:\n\n\tx = atomic.AddUint64(\u0026x, 1)\n\nwhich are not atomic.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"atomicalign\"", + "Doc": "check for non-64-bits-aligned arguments to sync/atomic functions", + "Default": "true", + "Status": "" + }, + { + "Name": "\"bools\"", + "Doc": "check for common mistakes involving boolean operators", + "Default": "true", + "Status": "" + }, + { + "Name": "\"buildtag\"", + "Doc": "check //go:build and // +build directives", + "Default": "true", + "Status": "" + }, + { + "Name": "\"cgocall\"", + "Doc": "detect some violations of the cgo pointer passing rules\n\nCheck for invalid cgo pointer passing.\nThis looks for code that uses cgo to call C code passing values\nwhose types are almost always invalid according to the cgo pointer\nsharing rules.\nSpecifically, it warns about attempts to pass a Go chan, map, func,\nor slice to C, either directly, or via a pointer, array, or struct.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"composites\"", + "Doc": "check for unkeyed composite literals\n\nThis analyzer reports a diagnostic for composite literals of struct\ntypes imported from another package that do not use the field-keyed\nsyntax. Such literals are fragile because the addition of a new field\n(even if unexported) to the struct will cause compilation to fail.\n\nAs an example,\n\n\terr = \u0026net.DNSConfigError{err}\n\nshould be replaced by:\n\n\terr = \u0026net.DNSConfigError{Err: err}\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"copylocks\"", + "Doc": "check for locks erroneously passed by value\n\nInadvertently copying a value containing a lock, such as sync.Mutex or\nsync.WaitGroup, may cause both copies to malfunction. Generally such\nvalues should be referred to through a pointer.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"deepequalerrors\"", + "Doc": "check for calls of reflect.DeepEqual on error values\n\nThe deepequalerrors checker looks for calls of the form:\n\n reflect.DeepEqual(err1, err2)\n\nwhere err1 and err2 are errors. Using reflect.DeepEqual to compare\nerrors is discouraged.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"defers\"", + "Doc": "report common mistakes in defer statements\n\nThe defers analyzer reports a diagnostic when a defer statement would\nresult in a non-deferred call to time.Since, as experience has shown\nthat this is nearly always a mistake.\n\nFor example:\n\n\tstart := time.Now()\n\t...\n\tdefer recordLatency(time.Since(start)) // error: call to time.Since is not deferred\n\nThe correct code is:\n\n\tdefer func() { recordLatency(time.Since(start)) }()", + "Default": "true", + "Status": "" + }, + { + "Name": "\"deprecated\"", + "Doc": "check for use of deprecated identifiers\n\nThe deprecated analyzer looks for deprecated symbols and package\nimports.\n\nSee https://go.dev/wiki/Deprecated to learn about Go's convention\nfor documenting and signaling deprecated identifiers.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"directive\"", + "Doc": "check Go toolchain directives such as //go:debug\n\nThis analyzer checks for problems with known Go toolchain directives\nin all Go source files in a package directory, even those excluded by\n//go:build constraints, and all non-Go source files too.\n\nFor //go:debug (see https://go.dev/doc/godebug), the analyzer checks\nthat the directives are placed only in Go source files, only above the\npackage comment, and only in package main or *_test.go files.\n\nSupport for other known directives may be added in the future.\n\nThis analyzer does not check //go:build, which is handled by the\nbuildtag analyzer.\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"embed\"", + "Doc": "check //go:embed directive usage\n\nThis analyzer checks that the embed package is imported if //go:embed\ndirectives are present, providing a suggested fix to add the import if\nit is missing.\n\nThis analyzer also checks that //go:embed directives precede the\ndeclaration of a single variable.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"errorsas\"", + "Doc": "report passing non-pointer or non-error values to errors.As\n\nThe errorsas analysis reports calls to errors.As where the type\nof the second argument is not a pointer to a type implementing error.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"fillreturns\"", + "Doc": "suggest fixes for errors due to an incorrect number of return values\n\nThis checker provides suggested fixes for type errors of the\ntype \"wrong number of return values (want %d, got %d)\". For example:\n\n\tfunc m() (int, string, *bool, error) {\n\t\treturn\n\t}\n\nwill turn into\n\n\tfunc m() (int, string, *bool, error) {\n\t\treturn 0, \"\", nil, nil\n\t}\n\nThis functionality is similar to https://github.com/sqs/goreturns.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"framepointer\"", + "Doc": "report assembly that clobbers the frame pointer before saving it", + "Default": "true", + "Status": "" + }, + { + "Name": "\"gofix\"", + "Doc": "apply fixes based on go:fix comment directives\n\nThe gofix analyzer inlines functions and constants that are marked for inlining.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"hostport\"", + "Doc": "check format of addresses passed to net.Dial\n\nThis analyzer flags code that produce network address strings using\nfmt.Sprintf, as in this example:\n\n addr := fmt.Sprintf(\"%s:%d\", host, 12345) // \"will not work with IPv6\"\n ...\n conn, err := net.Dial(\"tcp\", addr) // \"when passed to dial here\"\n\nThe analyzer suggests a fix to use the correct approach, a call to\nnet.JoinHostPort:\n\n addr := net.JoinHostPort(host, \"12345\")\n ...\n conn, err := net.Dial(\"tcp\", addr)\n\nA similar diagnostic and fix are produced for a format string of \"%s:%s\".\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"httpresponse\"", + "Doc": "check for mistakes using HTTP responses\n\nA common mistake when using the net/http package is to defer a function\ncall to close the http.Response Body before checking the error that\ndetermines whether the response is valid:\n\n\tresp, err := http.Head(url)\n\tdefer resp.Body.Close()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\t// (defer statement belongs here)\n\nThis checker helps uncover latent nil dereference bugs by reporting a\ndiagnostic for such mistakes.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"ifaceassert\"", + "Doc": "detect impossible interface-to-interface type assertions\n\nThis checker flags type assertions v.(T) and corresponding type-switch cases\nin which the static type V of v is an interface that cannot possibly implement\nthe target interface T. This occurs when V and T contain methods with the same\nname but different signatures. Example:\n\n\tvar v interface {\n\t\tRead()\n\t}\n\t_ = v.(io.Reader)\n\nThe Read method in v has a different signature than the Read method in\nio.Reader, so this assertion cannot succeed.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"infertypeargs\"", + "Doc": "check for unnecessary type arguments in call expressions\n\nExplicit type arguments may be omitted from call expressions if they can be\ninferred from function arguments, or from other type arguments:\n\n\tfunc f[T any](T) {}\n\t\n\tfunc _() {\n\t\tf[string](\"foo\") // string could be inferred\n\t}\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"loopclosure\"", + "Doc": "check references to loop variables from within nested functions\n\nThis analyzer reports places where a function literal references the\niteration variable of an enclosing loop, and the loop calls the function\nin such a way (e.g. with go or defer) that it may outlive the loop\niteration and possibly observe the wrong value of the variable.\n\nNote: An iteration variable can only outlive a loop iteration in Go versions \u003c=1.21.\nIn Go 1.22 and later, the loop variable lifetimes changed to create a new\niteration variable per loop iteration. (See go.dev/issue/60078.)\n\nIn this example, all the deferred functions run after the loop has\ncompleted, so all observe the final value of v [\u003cgo1.22].\n\n\tfor _, v := range list {\n\t defer func() {\n\t use(v) // incorrect\n\t }()\n\t}\n\nOne fix is to create a new variable for each iteration of the loop:\n\n\tfor _, v := range list {\n\t v := v // new var per iteration\n\t defer func() {\n\t use(v) // ok\n\t }()\n\t}\n\nAfter Go version 1.22, the previous two for loops are equivalent\nand both are correct.\n\nThe next example uses a go statement and has a similar problem [\u003cgo1.22].\nIn addition, it has a data race because the loop updates v\nconcurrent with the goroutines accessing it.\n\n\tfor _, v := range elem {\n\t go func() {\n\t use(v) // incorrect, and a data race\n\t }()\n\t}\n\nA fix is the same as before. The checker also reports problems\nin goroutines started by golang.org/x/sync/errgroup.Group.\nA hard-to-spot variant of this form is common in parallel tests:\n\n\tfunc Test(t *testing.T) {\n\t for _, test := range tests {\n\t t.Run(test.name, func(t *testing.T) {\n\t t.Parallel()\n\t use(test) // incorrect, and a data race\n\t })\n\t }\n\t}\n\nThe t.Parallel() call causes the rest of the function to execute\nconcurrent with the loop [\u003cgo1.22].\n\nThe analyzer reports references only in the last statement,\nas it is not deep enough to understand the effects of subsequent\nstatements that might render the reference benign.\n(\"Last statement\" is defined recursively in compound\nstatements such as if, switch, and select.)\n\nSee: https://golang.org/doc/go_faq.html#closures_and_goroutines", + "Default": "true", + "Status": "" + }, + { + "Name": "\"lostcancel\"", + "Doc": "check cancel func returned by context.WithCancel is called\n\nThe cancellation function returned by context.WithCancel, WithTimeout,\nWithDeadline and variants such as WithCancelCause must be called,\nor the new context will remain live until its parent context is cancelled.\n(The background context is never cancelled.)", + "Default": "true", + "Status": "" + }, + { + "Name": "\"modernize\"", + "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go and its standard\nlibrary.\n\nEach diagnostic provides a fix. Our intent is that these fixes may\nbe safely applied en masse without changing the behavior of your\nprogram. In some cases the suggested fixes are imperfect and may\nlead to (for example) unused imports or unused local variables,\ncausing build breakage. However, these problems are generally\ntrivial to fix. We regard any modernizer whose fix changes program\nbehavior to have a serious bug and will endeavor to fix it.\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./...\n\n(Do not use \"go get -tool\" to add gopls as a dependency of your\nmodule; gopls commands must be built from their release branch.)\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.\n\nChanges produced by this tool should be reviewed as usual before\nbeing merged. In some cases, a loop may be replaced by a simple\nfunction call, causing comments within the loop to be discarded.\nHuman judgment may be required to avoid losing comments of value.\n\nEach diagnostic reported by modernize has a specific category. (The\ncategories are listed below.) Diagnostics in some categories, such\nas \"efaceany\" (which replaces \"interface{}\" with \"any\" where it is\nsafe to do so) are particularly numerous. It may ease the burden of\ncode review to apply fixes in two passes, the first change\nconsisting only of fixes of category \"efaceany\", the second\nconsisting of all others. This can be achieved using the -category flag:\n\n\t$ modernize -category=efaceany -fix -test ./...\n\t$ modernize -category=-efaceany -fix -test ./...\n\nCategories of modernize diagnostic:\n\n - forvar: remove x := x variable declarations made unnecessary by the new semantics of loops in go1.22.\n\n - slicescontains: replace 'for i, elem := range s { if elem == needle { ...; break }'\n by a call to slices.Contains, added in go1.21.\n\n - minmax: replace an if/else conditional assignment by a call to\n the built-in min or max functions added in go1.21.\n\n - sortslice: replace sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21.\n\n - efaceany: replace interface{} by the 'any' type added in go1.18.\n\n - slicesclone: replace append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21.\n\n - mapsloop: replace a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions from\n the maps package, added in go1.21.\n\n - fmtappendf: replace []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19.\n\n - testingcontext: replace uses of context.WithCancel in tests\n with t.Context, added in go1.24.\n\n - omitzero: replace omitempty by omitzero on structs, added in go1.24.\n\n - bloop: replace \"for i := range b.N\" or \"for range b.N\" in a\n benchmark with \"for b.Loop()\", and remove any preceding calls\n to b.StopTimer, b.StartTimer, and b.ResetTimer.\n\n - slicesdelete: replace append(s[:i], s[i+1]...) by\n slices.Delete(s, i, i+1), added in go1.21.\n\n - rangeint: replace a 3-clause \"for i := 0; i \u003c n; i++\" loop by\n \"for i := range n\", added in go1.22.\n\n - stringsseq: replace Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq.\n\n - stringscutprefix: replace some uses of HasPrefix followed by TrimPrefix with CutPrefix,\n added to the strings package in go1.20.\n\n - waitgroup: replace old complex usages of sync.WaitGroup by less complex WaitGroup.Go method in go1.25.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"nilfunc\"", + "Doc": "check for useless comparisons between functions and nil\n\nA useless comparison is one like f == nil as opposed to f() == nil.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"nilness\"", + "Doc": "check for redundant or impossible nil comparisons\n\nThe nilness checker inspects the control-flow graph of each function in\na package and reports nil pointer dereferences, degenerate nil\npointers, and panics with nil values. A degenerate comparison is of the form\nx==nil or x!=nil where x is statically known to be nil or non-nil. These are\noften a mistake, especially in control flow related to errors. Panics with nil\nvalues are checked because they are not detectable by\n\n\tif r := recover(); r != nil {\n\nThis check reports conditions such as:\n\n\tif f == nil { // impossible condition (f is a function)\n\t}\n\nand:\n\n\tp := \u0026v\n\t...\n\tif p != nil { // tautological condition\n\t}\n\nand:\n\n\tif p == nil {\n\t\tprint(*p) // nil dereference\n\t}\n\nand:\n\n\tif p == nil {\n\t\tpanic(p)\n\t}\n\nSometimes the control flow may be quite complex, making bugs hard\nto spot. In the example below, the err.Error expression is\nguaranteed to panic because, after the first return, err must be\nnil. The intervening loop is just a distraction.\n\n\t...\n\terr := g.Wait()\n\tif err != nil {\n\t\treturn err\n\t}\n\tpartialSuccess := false\n\tfor _, err := range errs {\n\t\tif err == nil {\n\t\t\tpartialSuccess = true\n\t\t\tbreak\n\t\t}\n\t}\n\tif partialSuccess {\n\t\treportStatus(StatusMessage{\n\t\t\tCode: code.ERROR,\n\t\t\tDetail: err.Error(), // \"nil dereference in dynamic method call\"\n\t\t})\n\t\treturn nil\n\t}\n\n...", + "Default": "true", + "Status": "" + }, + { + "Name": "\"nonewvars\"", + "Doc": "suggested fixes for \"no new vars on left side of :=\"\n\nThis checker provides suggested fixes for type errors of the\ntype \"no new vars on left side of :=\". For example:\n\n\tz := 1\n\tz := 2\n\nwill turn into\n\n\tz := 1\n\tz = 2", + "Default": "true", + "Status": "" + }, + { + "Name": "\"noresultvalues\"", + "Doc": "suggested fixes for unexpected return values\n\nThis checker provides suggested fixes for type errors of the\ntype \"no result values expected\" or \"too many return values\".\nFor example:\n\n\tfunc z() { return nil }\n\nwill turn into\n\n\tfunc z() { return }", + "Default": "true", + "Status": "" + }, + { + "Name": "\"printf\"", + "Doc": "check consistency of Printf format strings and arguments\n\nThe check applies to calls of the formatting functions such as\n[fmt.Printf] and [fmt.Sprintf], as well as any detected wrappers of\nthose functions such as [log.Printf]. It reports a variety of\nmistakes such as syntax errors in the format string and mismatches\n(of number and type) between the verbs and their arguments.\n\nSee the documentation of the fmt package for the complete set of\nformat operators and their operand types.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"shadow\"", + "Doc": "check for possible unintended shadowing of variables\n\nThis analyzer check for shadowed variables.\nA shadowed variable is a variable declared in an inner scope\nwith the same name and type as a variable in an outer scope,\nand where the outer variable is mentioned after the inner one\nis declared.\n\n(This definition can be refined; the module generates too many\nfalse positives and is not yet enabled by default.)\n\nFor example:\n\n\tfunc BadRead(f *os.File, buf []byte) error {\n\t\tvar err error\n\t\tfor {\n\t\t\tn, err := f.Read(buf) // shadows the function variable 'err'\n\t\t\tif err != nil {\n\t\t\t\tbreak // causes return of wrong value\n\t\t\t}\n\t\t\tfoo(buf)\n\t\t}\n\t\treturn err\n\t}", + "Default": "false", + "Status": "" + }, + { + "Name": "\"shift\"", + "Doc": "check for shifts that equal or exceed the width of the integer", + "Default": "true", + "Status": "" + }, + { + "Name": "\"sigchanyzer\"", + "Doc": "check for unbuffered channel of os.Signal\n\nThis checker reports call expression of the form\n\n\tsignal.Notify(c \u003c-chan os.Signal, sig ...os.Signal),\n\nwhere c is an unbuffered channel, which can be at risk of missing the signal.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"simplifycompositelit\"", + "Doc": "check for composite literal simplifications\n\nAn array, slice, or map composite literal of the form:\n\n\t[]T{T{}, T{}}\n\nwill be simplified to:\n\n\t[]T{{}, {}}\n\nThis is one of the simplifications that \"gofmt -s\" applies.\n\nThis analyzer ignores generated code.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"simplifyrange\"", + "Doc": "check for range statement simplifications\n\nA range of the form:\n\n\tfor x, _ = range v {...}\n\nwill be simplified to:\n\n\tfor x = range v {...}\n\nA range of the form:\n\n\tfor _ = range v {...}\n\nwill be simplified to:\n\n\tfor range v {...}\n\nThis is one of the simplifications that \"gofmt -s\" applies.\n\nThis analyzer ignores generated code.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"simplifyslice\"", + "Doc": "check for slice simplifications\n\nA slice expression of the form:\n\n\ts[a:len(s)]\n\nwill be simplified to:\n\n\ts[a:]\n\nThis is one of the simplifications that \"gofmt -s\" applies.\n\nThis analyzer ignores generated code.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"slog\"", + "Doc": "check for invalid structured logging calls\n\nThe slog checker looks for calls to functions from the log/slog\npackage that take alternating key-value pairs. It reports calls\nwhere an argument in a key position is neither a string nor a\nslog.Attr, and where a final key is missing its value.\nFor example,it would report\n\n\tslog.Warn(\"message\", 11, \"k\") // slog.Warn arg \"11\" should be a string or a slog.Attr\n\nand\n\n\tslog.Info(\"message\", \"k1\", v1, \"k2\") // call to slog.Info missing a final value", + "Default": "true", + "Status": "" + }, + { + "Name": "\"sortslice\"", + "Doc": "check the argument type of sort.Slice\n\nsort.Slice requires an argument of a slice type. Check that\nthe interface{} value passed to sort.Slice is actually a slice.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"stdmethods\"", + "Doc": "check signature of methods of well-known interfaces\n\nSometimes a type may be intended to satisfy an interface but may fail to\ndo so because of a mistake in its method signature.\nFor example, the result of this WriteTo method should be (int64, error),\nnot error, to satisfy io.WriterTo:\n\n\ttype myWriterTo struct{...}\n\tfunc (myWriterTo) WriteTo(w io.Writer) error { ... }\n\nThis check ensures that each method whose name matches one of several\nwell-known interface methods from the standard library has the correct\nsignature for that interface.\n\nChecked method names include:\n\n\tFormat GobEncode GobDecode MarshalJSON MarshalXML\n\tPeek ReadByte ReadFrom ReadRune Scan Seek\n\tUnmarshalJSON UnreadByte UnreadRune WriteByte\n\tWriteTo", + "Default": "true", + "Status": "" + }, + { + "Name": "\"stdversion\"", + "Doc": "report uses of too-new standard library symbols\n\nThe stdversion analyzer reports references to symbols in the standard\nlibrary that were introduced by a Go release higher than the one in\nforce in the referring file. (Recall that the file's Go version is\ndefined by the 'go' directive its module's go.mod file, or by a\n\"//go:build go1.X\" build tag at the top of the file.)\n\nThe analyzer does not report a diagnostic for a reference to a \"too\nnew\" field or method of a type that is itself \"too new\", as this may\nhave false positives, for example if fields or methods are accessed\nthrough a type alias that is guarded by a Go version constraint.\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"stringintconv\"", + "Doc": "check for string(int) conversions\n\nThis checker flags conversions of the form string(x) where x is an integer\n(but not byte or rune) type. Such conversions are discouraged because they\nreturn the UTF-8 representation of the Unicode code point x, and not a decimal\nstring representation of x as one might expect. Furthermore, if x denotes an\ninvalid code point, the conversion cannot be statically rejected.\n\nFor conversions that intend on using the code point, consider replacing them\nwith string(rune(x)). Otherwise, strconv.Itoa and its equivalents return the\nstring representation of the value in the desired base.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"structtag\"", + "Doc": "check that struct field tags conform to reflect.StructTag.Get\n\nAlso report certain struct tags (json, xml) used with unexported fields.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"testinggoroutine\"", + "Doc": "report calls to (*testing.T).Fatal from goroutines started by a test\n\nFunctions that abruptly terminate a test, such as the Fatal, Fatalf, FailNow, and\nSkip{,f,Now} methods of *testing.T, must be called from the test goroutine itself.\nThis checker detects calls to these functions that occur within a goroutine\nstarted by the test. For example:\n\n\tfunc TestFoo(t *testing.T) {\n\t go func() {\n\t t.Fatal(\"oops\") // error: (*T).Fatal called from non-test goroutine\n\t }()\n\t}", + "Default": "true", + "Status": "" + }, + { + "Name": "\"tests\"", + "Doc": "check for common mistaken usages of tests and examples\n\nThe tests checker walks Test, Benchmark, Fuzzing and Example functions checking\nmalformed names, wrong signatures and examples documenting non-existent\nidentifiers.\n\nPlease see the documentation for package testing in golang.org/pkg/testing\nfor the conventions that are enforced for Tests, Benchmarks, and Examples.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"timeformat\"", + "Doc": "check for calls of (time.Time).Format or time.Parse with 2006-02-01\n\nThe timeformat checker looks for time formats with the 2006-02-01 (yyyy-dd-mm)\nformat. Internationally, \"yyyy-dd-mm\" does not occur in common calendar date\nstandards, and so it is more likely that 2006-01-02 (yyyy-mm-dd) was intended.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"unmarshal\"", + "Doc": "report passing non-pointer or non-interface values to unmarshal\n\nThe unmarshal analysis reports calls to functions such as json.Unmarshal\nin which the argument type is not a pointer or an interface.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"unreachable\"", + "Doc": "check for unreachable code\n\nThe unreachable analyzer finds statements that execution can never reach\nbecause they are preceded by a return statement, a call to panic, an\ninfinite loop, or similar constructs.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"unsafeptr\"", + "Doc": "check for invalid conversions of uintptr to unsafe.Pointer\n\nThe unsafeptr analyzer reports likely incorrect uses of unsafe.Pointer\nto convert integers to pointers. A conversion from uintptr to\nunsafe.Pointer is invalid if it implies that there is a uintptr-typed\nword in memory that holds a pointer value, because that word will be\ninvisible to stack copying and to the garbage collector.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"unusedfunc\"", + "Doc": "check for unused functions and methods\n\nThe unusedfunc analyzer reports functions and methods that are\nnever referenced outside of their own declaration.\n\nA function is considered unused if it is unexported and not\nreferenced (except within its own declaration).\n\nA method is considered unused if it is unexported, not referenced\n(except within its own declaration), and its name does not match\nthat of any method of an interface type declared within the same\npackage.\n\nThe tool may report false positives in some situations, for\nexample:\n\n - For a declaration of an unexported function that is referenced\n from another package using the go:linkname mechanism, if the\n declaration's doc comment does not also have a go:linkname\n comment.\n\n (Such code is in any case strongly discouraged: linkname\n annotations, if they must be used at all, should be used on both\n the declaration and the alias.)\n\n - For compiler intrinsics in the \"runtime\" package that, though\n never referenced, are known to the compiler and are called\n indirectly by compiled object code.\n\n - For functions called only from assembly.\n\n - For functions called only from files whose build tags are not\n selected in the current build configuration.\n\nSee https://github.com/golang/go/issues/71686 for discussion of\nthese limitations.\n\nThe unusedfunc algorithm is not as precise as the\ngolang.org/x/tools/cmd/deadcode tool, but it has the advantage that\nit runs within the modular analysis framework, enabling near\nreal-time feedback within gopls.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"unusedparams\"", + "Doc": "check for unused parameters of functions\n\nThe unusedparams analyzer checks functions to see if there are\nany parameters that are not being used.\n\nTo ensure soundness, it ignores:\n - \"address-taken\" functions, that is, functions that are used as\n a value rather than being called directly; their signatures may\n be required to conform to a func type.\n - exported functions or methods, since they may be address-taken\n in another package.\n - unexported methods whose name matches an interface method\n declared in the same package, since the method's signature\n may be required to conform to the interface type.\n - functions with empty bodies, or containing just a call to panic.\n - parameters that are unnamed, or named \"_\", the blank identifier.\n\nThe analyzer suggests a fix of replacing the parameter name by \"_\",\nbut in such cases a deeper fix can be obtained by invoking the\n\"Refactor: remove unused parameter\" code action, which will\neliminate the parameter entirely, along with all corresponding\narguments at call sites, while taking care to preserve any side\neffects in the argument expressions; see\nhttps://github.com/golang/tools/releases/tag/gopls%2Fv0.14.\n\nThis analyzer ignores generated code.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"unusedresult\"", + "Doc": "check for unused results of calls to some functions\n\nSome functions like fmt.Errorf return a result and have no side\neffects, so it is always a mistake to discard the result. Other\nfunctions may return an error that must not be ignored, or a cleanup\noperation that must be called. This analyzer reports calls to\nfunctions like these when the result of the call is ignored.\n\nThe set of functions may be controlled using flags.", + "Default": "true", + "Status": "" + }, + { + "Name": "\"unusedvariable\"", + "Doc": "check for unused variables and suggest fixes", + "Default": "true", + "Status": "" + }, + { + "Name": "\"unusedwrite\"", + "Doc": "checks for unused writes\n\nThe analyzer reports instances of writes to struct fields and\narrays that are never read. Specifically, when a struct object\nor an array is copied, its elements are copied implicitly by\nthe compiler, and any element write to this copy does nothing\nwith the original object.\n\nFor example:\n\n\ttype T struct { x int }\n\n\tfunc f(input []T) {\n\t\tfor i, v := range input { // v is a copy\n\t\t\tv.x = i // unused write to field x\n\t\t}\n\t}\n\nAnother example is about non-pointer receiver:\n\n\ttype T struct { x int }\n\n\tfunc (t T) f() { // t is a copy\n\t\tt.x = i // unused write to field x\n\t}", + "Default": "true", + "Status": "" + }, + { + "Name": "\"waitgroup\"", + "Doc": "check for misuses of sync.WaitGroup\n\nThis analyzer detects mistaken calls to the (*sync.WaitGroup).Add\nmethod from inside a new goroutine, causing Add to race with Wait:\n\n\t// WRONG\n\tvar wg sync.WaitGroup\n\tgo func() {\n\t wg.Add(1) // \"WaitGroup.Add called from inside new goroutine\"\n\t defer wg.Done()\n\t ...\n\t}()\n\twg.Wait() // (may return prematurely before new goroutine starts)\n\nThe correct code calls Add before starting the goroutine:\n\n\t// RIGHT\n\tvar wg sync.WaitGroup\n\twg.Add(1)\n\tgo func() {\n\t\tdefer wg.Done()\n\t\t...\n\t}()\n\twg.Wait()", + "Default": "true", + "Status": "" + }, + { + "Name": "\"yield\"", + "Doc": "report calls to yield where the result is ignored\n\nAfter a yield function returns false, the caller should not call\nthe yield function again; generally the iterator should return\npromptly.\n\nThis example fails to check the result of the call to yield,\ncausing this analyzer to report a diagnostic:\n\n\tyield(1) // yield may be called again (on L2) after returning false\n\tyield(2)\n\nThe corrected code is either this:\n\n\tif yield(1) { yield(2) }\n\nor simply:\n\n\t_ = yield(1) \u0026\u0026 yield(2)\n\nIt is not always a mistake to ignore the result of yield.\nFor example, this is a valid single-element iterator:\n\n\tyield(1) // ok to ignore result\n\treturn\n\nIt is only a mistake when the yield call that returned false may be\nfollowed by another call.", + "Default": "true", + "Status": "" + } + ] + }, + "EnumValues": null, + "Default": "{}", + "Status": "", + "Hierarchy": "ui.diagnostic", + "DeprecationMessage": "" + }, + { + "Name": "staticcheck", + "Type": "bool", + "Doc": "staticcheck configures the default set of analyses staticcheck.io.\nThese analyses are documented on\n[Staticcheck's website](https://staticcheck.io/docs/checks/).\n\nThe \"staticcheck\" option has three values:\n- false: disable all staticcheck analyzers\n- true: enable all staticcheck analyzers\n- unset: enable a subset of staticcheck analyzers\n selected by gopls maintainers for runtime efficiency\n and analytic precision.\n\nRegardless of this setting, individual analyzers can be\nselectively enabled or disabled using the `analyses` setting.\n", + "EnumKeys": { + "ValueType": "", + "Keys": null + }, + "EnumValues": null, + "Default": "false", + "Status": "experimental", + "Hierarchy": "ui.diagnostic", + "DeprecationMessage": "" + }, + { + "Name": "staticcheckProvided", + "Type": "bool", + "Doc": "", + "EnumKeys": { + "ValueType": "", + "Keys": null + }, + "EnumValues": null, + "Default": "false", + "Status": "experimental", + "Hierarchy": "ui.diagnostic", + "DeprecationMessage": "" + }, + { + "Name": "annotations", + "Type": "map[enum]bool", + "Doc": "annotations specifies the various kinds of compiler\noptimization details that should be reported as diagnostics\nwhen enabled for a package by the \"Toggle compiler\noptimization details\" (`gopls.gc_details`) command.\n\n(Some users care only about one kind of annotation in their\nprofiling efforts. More importantly, in large packages, the\nnumber of annotations can sometimes overwhelm the user\ninterface and exceed the per-file diagnostic limit.)\n\nTODO(adonovan): rename this field to CompilerOptDetail.\n", + "EnumKeys": { + "ValueType": "bool", + "Keys": [ + { + "Name": "\"bounds\"", + "Doc": "`\"bounds\"` controls bounds checking diagnostics.\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"escape\"", + "Doc": "`\"escape\"` controls diagnostics about escape choices.\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"inline\"", + "Doc": "`\"inline\"` controls diagnostics about inlining choices.\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"nil\"", + "Doc": "`\"nil\"` controls nil checks.\n", + "Default": "true", + "Status": "" + } + ] + }, + "EnumValues": null, + "Default": "{\"bounds\":true,\"escape\":true,\"inline\":true,\"nil\":true}", + "Status": "", + "Hierarchy": "ui.diagnostic", + "DeprecationMessage": "" + }, + { + "Name": "vulncheck", + "Type": "enum", + "Doc": "vulncheck enables vulnerability scanning.\n", + "EnumKeys": { + "ValueType": "", + "Keys": null + }, + "EnumValues": [ + { + "Value": "\"Imports\"", + "Doc": "`\"Imports\"`: In Imports mode, `gopls` will report vulnerabilities that affect packages\ndirectly and indirectly used by the analyzed main module.\n", + "Status": "" + }, + { + "Value": "\"Off\"", + "Doc": "`\"Off\"`: Disable vulnerability analysis.\n", + "Status": "" + } + ], + "Default": "\"Off\"", + "Status": "experimental", + "Hierarchy": "ui.diagnostic", + "DeprecationMessage": "" + }, + { + "Name": "diagnosticsDelay", + "Type": "time.Duration", + "Doc": "diagnosticsDelay controls the amount of time that gopls waits\nafter the most recent file modification before computing deep diagnostics.\nSimple diagnostics (parsing and type-checking) are always run immediately\non recently modified packages.\n\nThis option must be set to a valid duration string, for example `\"250ms\"`.\n", + "EnumKeys": { + "ValueType": "", + "Keys": null + }, + "EnumValues": null, + "Default": "\"1s\"", + "Status": "advanced", + "Hierarchy": "ui.diagnostic", + "DeprecationMessage": "" + }, + { + "Name": "diagnosticsTrigger", + "Type": "enum", + "Doc": "diagnosticsTrigger controls when to run diagnostics.\n", + "EnumKeys": { + "ValueType": "", + "Keys": null + }, + "EnumValues": [ + { + "Value": "\"Edit\"", + "Doc": "`\"Edit\"`: Trigger diagnostics on file edit and save. (default)\n", + "Status": "" + }, + { + "Value": "\"Save\"", + "Doc": "`\"Save\"`: Trigger diagnostics only on file save. Events like initial workspace load\nor configuration change will still trigger diagnostics.\n", + "Status": "" + } + ], + "Default": "\"Edit\"", + "Status": "experimental", + "Hierarchy": "ui.diagnostic", + "DeprecationMessage": "" + }, + { + "Name": "analysisProgressReporting", + "Type": "bool", + "Doc": "analysisProgressReporting controls whether gopls sends progress\nnotifications when construction of its index of analysis facts is taking a\nlong time. Cancelling these notifications will cancel the indexing task,\nthough it will restart after the next change in the workspace.\n\nWhen a package is opened for the first time and heavyweight analyses such as\nstaticcheck are enabled, it can take a while to construct the index of\nanalysis facts for all its dependencies. The index is cached in the\nfilesystem, so subsequent analysis should be faster.\n", + "EnumKeys": { + "ValueType": "", + "Keys": null + }, + "EnumValues": null, + "Default": "true", + "Status": "", + "Hierarchy": "ui.diagnostic", + "DeprecationMessage": "" + }, + { + "Name": "hints", + "Type": "map[enum]bool", + "Doc": "hints specify inlay hints that users want to see. A full list of hints\nthat gopls uses can be found in\n[inlayHints.md](https://github.com/golang/tools/blob/master/gopls/doc/inlayHints.md).\n", + "EnumKeys": { + "ValueType": "bool", + "Keys": [ + { + "Name": "\"assignVariableTypes\"", + "Doc": "`\"assignVariableTypes\"` controls inlay hints for variable types in assign statements:\n```go\n\ti/* int*/, j/* int*/ := 0, len(r)-1\n```\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"compositeLiteralFields\"", + "Doc": "`\"compositeLiteralFields\"` inlay hints for composite literal field names:\n```go\n\t{/*in: */\"Hello, world\", /*want: */\"dlrow ,olleH\"}\n```\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"compositeLiteralTypes\"", + "Doc": "`\"compositeLiteralTypes\"` controls inlay hints for composite literal types:\n```go\n\tfor _, c := range []struct {\n\t\tin, want string\n\t}{\n\t\t/*struct{ in string; want string }*/{\"Hello, world\", \"dlrow ,olleH\"},\n\t}\n```\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"constantValues\"", + "Doc": "`\"constantValues\"` controls inlay hints for constant values:\n```go\n\tconst (\n\t\tKindNone Kind = iota/* = 0*/\n\t\tKindPrint/* = 1*/\n\t\tKindPrintf/* = 2*/\n\t\tKindErrorf/* = 3*/\n\t)\n```\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"functionTypeParameters\"", + "Doc": "`\"functionTypeParameters\"` inlay hints for implicit type parameters on generic functions:\n```go\n\tmyFoo/*[int, string]*/(1, \"hello\")\n```\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"parameterNames\"", + "Doc": "`\"parameterNames\"` controls inlay hints for parameter names:\n```go\n\tparseInt(/* str: */ \"123\", /* radix: */ 8)\n```\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"rangeVariableTypes\"", + "Doc": "`\"rangeVariableTypes\"` controls inlay hints for variable types in range statements:\n```go\n\tfor k/* int*/, v/* string*/ := range []string{} {\n\t\tfmt.Println(k, v)\n\t}\n```\n", + "Default": "false", + "Status": "" + } + ] + }, + "EnumValues": null, + "Default": "{}", + "Status": "experimental", + "Hierarchy": "ui.inlayhint", + "DeprecationMessage": "" + }, + { + "Name": "codelenses", + "Type": "map[enum]bool", + "Doc": "codelenses overrides the enabled/disabled state of each of gopls'\nsources of [Code Lenses](codelenses.md).\n\nExample Usage:\n\n```json5\n\"gopls\": {\n...\n \"codelenses\": {\n \"generate\": false, // Don't show the `go generate` lens.\n }\n...\n}\n```\n", + "EnumKeys": { + "ValueType": "bool", + "Keys": [ + { + "Name": "\"generate\"", + "Doc": "`\"generate\"`: Run `go generate`\n\nThis codelens source annotates any `//go:generate` comments\nwith commands to run `go generate` in this directory, on\nall directories recursively beneath this one.\n\nSee [Generating code](https://go.dev/blog/generate) for\nmore details.\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"regenerate_cgo\"", + "Doc": "`\"regenerate_cgo\"`: Re-generate cgo declarations\n\nThis codelens source annotates an `import \"C\"` declaration\nwith a command to re-run the [cgo\ncommand](https://pkg.go.dev/cmd/cgo) to regenerate the\ncorresponding Go declarations.\n\nUse this after editing the C code in comments attached to\nthe import, or in C header files included by it.\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"run_govulncheck\"", + "Doc": "`\"run_govulncheck\"`: Run govulncheck (legacy)\n\nThis codelens source annotates the `module` directive in a go.mod file\nwith a command to run Govulncheck asynchronously.\n\n[Govulncheck](https://go.dev/blog/vuln) is a static analysis tool that\ncomputes the set of functions reachable within your application, including\ndependencies; queries a database of known security vulnerabilities; and\nreports any potential problems it finds.\n", + "Default": "false", + "Status": "experimental" + }, + { + "Name": "\"test\"", + "Doc": "`\"test\"`: Run tests and benchmarks\n\nThis codelens source annotates each `Test` and `Benchmark`\nfunction in a `*_test.go` file with a command to run it.\n\nThis source is off by default because VS Code has\na client-side custom UI for testing, and because progress\nnotifications are not a great UX for streamed test output.\nSee:\n- golang/go#67400 for a discussion of this feature.\n- https://github.com/joaotavora/eglot/discussions/1402\n for an alternative approach.\n", + "Default": "false", + "Status": "" + }, + { + "Name": "\"tidy\"", + "Doc": "`\"tidy\"`: Tidy go.mod file\n\nThis codelens source annotates the `module` directive in a\ngo.mod file with a command to run [`go mod\ntidy`](https://go.dev/ref/mod#go-mod-tidy), which ensures\nthat the go.mod file matches the source code in the module.\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"upgrade_dependency\"", + "Doc": "`\"upgrade_dependency\"`: Update dependencies\n\nThis codelens source annotates the `module` directive in a\ngo.mod file with commands to:\n\n- check for available upgrades,\n- upgrade direct dependencies, and\n- upgrade all dependencies transitively.\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"vendor\"", + "Doc": "`\"vendor\"`: Update vendor directory\n\nThis codelens source annotates the `module` directive in a\ngo.mod file with a command to run [`go mod\nvendor`](https://go.dev/ref/mod#go-mod-vendor), which\ncreates or updates the directory named `vendor` in the\nmodule root so that it contains an up-to-date copy of all\nnecessary package dependencies.\n", + "Default": "true", + "Status": "" + }, + { + "Name": "\"vulncheck\"", + "Doc": "`\"vulncheck\"`: Run govulncheck\n\nThis codelens source annotates the `module` directive in a go.mod file\nwith a command to run govulncheck synchronously.\n\n[Govulncheck](https://go.dev/blog/vuln) is a static analysis tool that\ncomputes the set of functions reachable within your application, including\ndependencies; queries a database of known security vulnerabilities; and\nreports any potential problems it finds.\n", + "Default": "false", + "Status": "experimental" + } + ] + }, + "EnumValues": null, + "Default": "{\"generate\":true,\"regenerate_cgo\":true,\"run_govulncheck\":false,\"tidy\":true,\"upgrade_dependency\":true,\"vendor\":true}", + "Status": "", + "Hierarchy": "ui", + "DeprecationMessage": "" + }, + { + "Name": "semanticTokens", + "Type": "bool", + "Doc": "semanticTokens controls whether the LSP server will send\nsemantic tokens to the client.\n", + "EnumKeys": { + "ValueType": "", + "Keys": null + }, + "EnumValues": null, + "Default": "false", + "Status": "experimental", + "Hierarchy": "ui", + "DeprecationMessage": "" + }, + { + "Name": "noSemanticString", + "Type": "bool", + "Doc": "noSemanticString turns off the sending of the semantic token 'string'\n\nDeprecated: Use SemanticTokenTypes[\"string\"] = false instead. See\ngolang/vscode-go#3632\n", + "EnumKeys": { + "ValueType": "", + "Keys": null + }, + "EnumValues": null, + "Default": "false", + "Status": "experimental", + "Hierarchy": "ui", + "DeprecationMessage": "use SemanticTokenTypes[\"string\"] = false instead. See\ngolang/vscode-go#3632\n" + }, + { + "Name": "noSemanticNumber", + "Type": "bool", + "Doc": "noSemanticNumber turns off the sending of the semantic token 'number'\n\nDeprecated: Use SemanticTokenTypes[\"number\"] = false instead. See\ngolang/vscode-go#3632.\n", + "EnumKeys": { + "ValueType": "", + "Keys": null + }, + "EnumValues": null, + "Default": "false", + "Status": "experimental", + "Hierarchy": "ui", + "DeprecationMessage": "use SemanticTokenTypes[\"number\"] = false instead. See\ngolang/vscode-go#3632.\n" + }, + { + "Name": "semanticTokenTypes", + "Type": "map[string]bool", + "Doc": "semanticTokenTypes configures the semantic token types. It allows\ndisabling types by setting each value to false.\nBy default, all types are enabled.\n", + "EnumKeys": { + "ValueType": "", + "Keys": null + }, + "EnumValues": null, + "Default": "{}", + "Status": "experimental", + "Hierarchy": "ui", + "DeprecationMessage": "" + }, + { + "Name": "semanticTokenModifiers", + "Type": "map[string]bool", + "Doc": "semanticTokenModifiers configures the semantic token modifiers. It allows\ndisabling modifiers by setting each value to false.\nBy default, all modifiers are enabled.\n", + "EnumKeys": { + "ValueType": "", "Keys": null }, "EnumValues": null, @@ -1121,71 +2065,1001 @@ }, "Lenses": [ { - "FileType": "Go", - "Lens": "generate", - "Title": "Run `go generate`", - "Doc": "\nThis codelens source annotates any `//go:generate` comments\nwith commands to run `go generate` in this directory, on\nall directories recursively beneath this one.\n\nSee [Generating code](https://go.dev/blog/generate) for\nmore details.\n", - "Default": true, - "Status": "" + "FileType": "Go", + "Lens": "generate", + "Title": "Run `go generate`", + "Doc": "\nThis codelens source annotates any `//go:generate` comments\nwith commands to run `go generate` in this directory, on\nall directories recursively beneath this one.\n\nSee [Generating code](https://go.dev/blog/generate) for\nmore details.\n", + "Default": true, + "Status": "" + }, + { + "FileType": "Go", + "Lens": "regenerate_cgo", + "Title": "Re-generate cgo declarations", + "Doc": "\nThis codelens source annotates an `import \"C\"` declaration\nwith a command to re-run the [cgo\ncommand](https://pkg.go.dev/cmd/cgo) to regenerate the\ncorresponding Go declarations.\n\nUse this after editing the C code in comments attached to\nthe import, or in C header files included by it.\n", + "Default": true, + "Status": "" + }, + { + "FileType": "Go", + "Lens": "test", + "Title": "Run tests and benchmarks", + "Doc": "\nThis codelens source annotates each `Test` and `Benchmark`\nfunction in a `*_test.go` file with a command to run it.\n\nThis source is off by default because VS Code has\na client-side custom UI for testing, and because progress\nnotifications are not a great UX for streamed test output.\nSee:\n- golang/go#67400 for a discussion of this feature.\n- https://github.com/joaotavora/eglot/discussions/1402\n for an alternative approach.\n", + "Default": false, + "Status": "" + }, + { + "FileType": "go.mod", + "Lens": "run_govulncheck", + "Title": "Run govulncheck (legacy)", + "Doc": "\nThis codelens source annotates the `module` directive in a go.mod file\nwith a command to run Govulncheck asynchronously.\n\n[Govulncheck](https://go.dev/blog/vuln) is a static analysis tool that\ncomputes the set of functions reachable within your application, including\ndependencies; queries a database of known security vulnerabilities; and\nreports any potential problems it finds.\n", + "Default": false, + "Status": "experimental" + }, + { + "FileType": "go.mod", + "Lens": "tidy", + "Title": "Tidy go.mod file", + "Doc": "\nThis codelens source annotates the `module` directive in a\ngo.mod file with a command to run [`go mod\ntidy`](https://go.dev/ref/mod#go-mod-tidy), which ensures\nthat the go.mod file matches the source code in the module.\n", + "Default": true, + "Status": "" + }, + { + "FileType": "go.mod", + "Lens": "upgrade_dependency", + "Title": "Update dependencies", + "Doc": "\nThis codelens source annotates the `module` directive in a\ngo.mod file with commands to:\n\n- check for available upgrades,\n- upgrade direct dependencies, and\n- upgrade all dependencies transitively.\n", + "Default": true, + "Status": "" + }, + { + "FileType": "go.mod", + "Lens": "vendor", + "Title": "Update vendor directory", + "Doc": "\nThis codelens source annotates the `module` directive in a\ngo.mod file with a command to run [`go mod\nvendor`](https://go.dev/ref/mod#go-mod-vendor), which\ncreates or updates the directory named `vendor` in the\nmodule root so that it contains an up-to-date copy of all\nnecessary package dependencies.\n", + "Default": true, + "Status": "" + }, + { + "FileType": "go.mod", + "Lens": "vulncheck", + "Title": "Run govulncheck", + "Doc": "\nThis codelens source annotates the `module` directive in a go.mod file\nwith a command to run govulncheck synchronously.\n\n[Govulncheck](https://go.dev/blog/vuln) is a static analysis tool that\ncomputes the set of functions reachable within your application, including\ndependencies; queries a database of known security vulnerabilities; and\nreports any potential problems it finds.\n", + "Default": false, + "Status": "experimental" + } + ], + "Analyzers": [ + { + "Name": "QF1001", + "Doc": "Apply De Morgan's law\n\nAvailable since\n 2021.1\n", + "URL": "https://staticcheck.dev/docs/checks/#QF1001", + "Default": false + }, + { + "Name": "QF1002", + "Doc": "Convert untagged switch to tagged switch\n\nAn untagged switch that compares a single variable against a series of\nvalues can be replaced with a tagged switch.\n\nBefore:\n\n switch {\n case x == 1 || x == 2, x == 3:\n ...\n case x == 4:\n ...\n default:\n ...\n }\n\nAfter:\n\n switch x {\n case 1, 2, 3:\n ...\n case 4:\n ...\n default:\n ...\n }\n\nAvailable since\n 2021.1\n", + "URL": "https://staticcheck.dev/docs/checks/#QF1002", + "Default": true + }, + { + "Name": "QF1003", + "Doc": "Convert if/else-if chain to tagged switch\n\nA series of if/else-if checks comparing the same variable against\nvalues can be replaced with a tagged switch.\n\nBefore:\n\n if x == 1 || x == 2 {\n ...\n } else if x == 3 {\n ...\n } else {\n ...\n }\n\nAfter:\n\n switch x {\n case 1, 2:\n ...\n case 3:\n ...\n default:\n ...\n }\n\nAvailable since\n 2021.1\n", + "URL": "https://staticcheck.dev/docs/checks/#QF1003", + "Default": true + }, + { + "Name": "QF1004", + "Doc": "Use strings.ReplaceAll instead of strings.Replace with n == -1\n\nAvailable since\n 2021.1\n", + "URL": "https://staticcheck.dev/docs/checks/#QF1004", + "Default": true + }, + { + "Name": "QF1005", + "Doc": "Expand call to math.Pow\n\nSome uses of math.Pow can be simplified to basic multiplication.\n\nBefore:\n\n math.Pow(x, 2)\n\nAfter:\n\n x * x\n\nAvailable since\n 2021.1\n", + "URL": "https://staticcheck.dev/docs/checks/#QF1005", + "Default": false + }, + { + "Name": "QF1006", + "Doc": "Lift if+break into loop condition\n\nBefore:\n\n for {\n if done {\n break\n }\n ...\n }\n\nAfter:\n\n for !done {\n ...\n }\n\nAvailable since\n 2021.1\n", + "URL": "https://staticcheck.dev/docs/checks/#QF1006", + "Default": false + }, + { + "Name": "QF1007", + "Doc": "Merge conditional assignment into variable declaration\n\nBefore:\n\n x := false\n if someCondition {\n x = true\n }\n\nAfter:\n\n x := someCondition\n\nAvailable since\n 2021.1\n", + "URL": "https://staticcheck.dev/docs/checks/#QF1007", + "Default": false + }, + { + "Name": "QF1008", + "Doc": "Omit embedded fields from selector expression\n\nAvailable since\n 2021.1\n", + "URL": "https://staticcheck.dev/docs/checks/#QF1008", + "Default": false + }, + { + "Name": "QF1009", + "Doc": "Use time.Time.Equal instead of == operator\n\nAvailable since\n 2021.1\n", + "URL": "https://staticcheck.dev/docs/checks/#QF1009", + "Default": true + }, + { + "Name": "QF1010", + "Doc": "Convert slice of bytes to string when printing it\n\nAvailable since\n 2021.1\n", + "URL": "https://staticcheck.dev/docs/checks/#QF1010", + "Default": true + }, + { + "Name": "QF1011", + "Doc": "Omit redundant type from variable declaration\n\nAvailable since\n 2021.1\n", + "URL": "https://staticcheck.dev/docs/checks/#", + "Default": false + }, + { + "Name": "QF1012", + "Doc": "Use fmt.Fprintf(x, ...) instead of x.Write(fmt.Sprintf(...))\n\nAvailable since\n 2022.1\n", + "URL": "https://staticcheck.dev/docs/checks/#QF1012", + "Default": true + }, + { + "Name": "S1000", + "Doc": "Use plain channel send or receive instead of single-case select\n\nSelect statements with a single case can be replaced with a simple\nsend or receive.\n\nBefore:\n\n select {\n case x := \u003c-ch:\n fmt.Println(x)\n }\n\nAfter:\n\n x := \u003c-ch\n fmt.Println(x)\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1000", + "Default": true + }, + { + "Name": "S1001", + "Doc": "Replace for loop with call to copy\n\nUse copy() for copying elements from one slice to another. For\narrays of identical size, you can use simple assignment.\n\nBefore:\n\n for i, x := range src {\n dst[i] = x\n }\n\nAfter:\n\n copy(dst, src)\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1001", + "Default": true + }, + { + "Name": "S1002", + "Doc": "Omit comparison with boolean constant\n\nBefore:\n\n if x == true {}\n\nAfter:\n\n if x {}\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1002", + "Default": false + }, + { + "Name": "S1003", + "Doc": "Replace call to strings.Index with strings.Contains\n\nBefore:\n\n if strings.Index(x, y) != -1 {}\n\nAfter:\n\n if strings.Contains(x, y) {}\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1003", + "Default": true + }, + { + "Name": "S1004", + "Doc": "Replace call to bytes.Compare with bytes.Equal\n\nBefore:\n\n if bytes.Compare(x, y) == 0 {}\n\nAfter:\n\n if bytes.Equal(x, y) {}\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1004", + "Default": true + }, + { + "Name": "S1005", + "Doc": "Drop unnecessary use of the blank identifier\n\nIn many cases, assigning to the blank identifier is unnecessary.\n\nBefore:\n\n for _ = range s {}\n x, _ = someMap[key]\n _ = \u003c-ch\n\nAfter:\n\n for range s{}\n x = someMap[key]\n \u003c-ch\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1005", + "Default": false + }, + { + "Name": "S1006", + "Doc": "Use 'for { ... }' for infinite loops\n\nFor infinite loops, using for { ... } is the most idiomatic choice.\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1006", + "Default": false + }, + { + "Name": "S1007", + "Doc": "Simplify regular expression by using raw string literal\n\nRaw string literals use backticks instead of quotation marks and do not support\nany escape sequences. This means that the backslash can be used\nfreely, without the need of escaping.\n\nSince regular expressions have their own escape sequences, raw strings\ncan improve their readability.\n\nBefore:\n\n regexp.Compile(\"\\\\A(\\\\w+) profile: total \\\\d+\\\\n\\\\z\")\n\nAfter:\n\n regexp.Compile(`\\A(\\w+) profile: total \\d+\\n\\z`)\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1007", + "Default": true + }, + { + "Name": "S1008", + "Doc": "Simplify returning boolean expression\n\nBefore:\n\n if \u003cexpr\u003e {\n return true\n }\n return false\n\nAfter:\n\n return \u003cexpr\u003e\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1008", + "Default": false + }, + { + "Name": "S1009", + "Doc": "Omit redundant nil check on slices, maps, and channels\n\nThe len function is defined for all slices, maps, and\nchannels, even nil ones, which have a length of zero. It is not necessary to\ncheck for nil before checking that their length is not zero.\n\nBefore:\n\n if x != nil \u0026\u0026 len(x) != 0 {}\n\nAfter:\n\n if len(x) != 0 {}\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1009", + "Default": true + }, + { + "Name": "S1010", + "Doc": "Omit default slice index\n\nWhen slicing, the second index defaults to the length of the value,\nmaking s[n:len(s)] and s[n:] equivalent.\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1010", + "Default": true + }, + { + "Name": "S1011", + "Doc": "Use a single append to concatenate two slices\n\nBefore:\n\n for _, e := range y {\n x = append(x, e)\n }\n \n for i := range y {\n x = append(x, y[i])\n }\n \n for i := range y {\n v := y[i]\n x = append(x, v)\n }\n\nAfter:\n\n x = append(x, y...)\n x = append(x, y...)\n x = append(x, y...)\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1011", + "Default": false + }, + { + "Name": "S1012", + "Doc": "Replace time.Now().Sub(x) with time.Since(x)\n\nThe time.Since helper has the same effect as using time.Now().Sub(x)\nbut is easier to read.\n\nBefore:\n\n time.Now().Sub(x)\n\nAfter:\n\n time.Since(x)\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1012", + "Default": true + }, + { + "Name": "S1016", + "Doc": "Use a type conversion instead of manually copying struct fields\n\nTwo struct types with identical fields can be converted between each\nother. In older versions of Go, the fields had to have identical\nstruct tags. Since Go 1.8, however, struct tags are ignored during\nconversions. It is thus not necessary to manually copy every field\nindividually.\n\nBefore:\n\n var x T1\n y := T2{\n Field1: x.Field1,\n Field2: x.Field2,\n }\n\nAfter:\n\n var x T1\n y := T2(x)\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1016", + "Default": false + }, + { + "Name": "S1017", + "Doc": "Replace manual trimming with strings.TrimPrefix\n\nInstead of using strings.HasPrefix and manual slicing, use the\nstrings.TrimPrefix function. If the string doesn't start with the\nprefix, the original string will be returned. Using strings.TrimPrefix\nreduces complexity, and avoids common bugs, such as off-by-one\nmistakes.\n\nBefore:\n\n if strings.HasPrefix(str, prefix) {\n str = str[len(prefix):]\n }\n\nAfter:\n\n str = strings.TrimPrefix(str, prefix)\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1017", + "Default": true + }, + { + "Name": "S1018", + "Doc": "Use 'copy' for sliding elements\n\ncopy() permits using the same source and destination slice, even with\noverlapping ranges. This makes it ideal for sliding elements in a\nslice.\n\nBefore:\n\n for i := 0; i \u003c n; i++ {\n bs[i] = bs[offset+i]\n }\n\nAfter:\n\n copy(bs[:n], bs[offset:])\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1018", + "Default": true + }, + { + "Name": "S1019", + "Doc": "Simplify 'make' call by omitting redundant arguments\n\nThe 'make' function has default values for the length and capacity\narguments. For channels, the length defaults to zero, and for slices,\nthe capacity defaults to the length.\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1019", + "Default": true + }, + { + "Name": "S1020", + "Doc": "Omit redundant nil check in type assertion\n\nBefore:\n\n if _, ok := i.(T); ok \u0026\u0026 i != nil {}\n\nAfter:\n\n if _, ok := i.(T); ok {}\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1020", + "Default": true + }, + { + "Name": "S1021", + "Doc": "Merge variable declaration and assignment\n\nBefore:\n\n var x uint\n x = 1\n\nAfter:\n\n var x uint = 1\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1021", + "Default": false + }, + { + "Name": "S1023", + "Doc": "Omit redundant control flow\n\nFunctions that have no return value do not need a return statement as\nthe final statement of the function.\n\nSwitches in Go do not have automatic fallthrough, unlike languages\nlike C. It is not necessary to have a break statement as the final\nstatement in a case block.\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1023", + "Default": true + }, + { + "Name": "S1024", + "Doc": "Replace x.Sub(time.Now()) with time.Until(x)\n\nThe time.Until helper has the same effect as using x.Sub(time.Now())\nbut is easier to read.\n\nBefore:\n\n x.Sub(time.Now())\n\nAfter:\n\n time.Until(x)\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1024", + "Default": true + }, + { + "Name": "S1025", + "Doc": "Don't use fmt.Sprintf(\"%s\", x) unnecessarily\n\nIn many instances, there are easier and more efficient ways of getting\na value's string representation. Whenever a value's underlying type is\na string already, or the type has a String method, they should be used\ndirectly.\n\nGiven the following shared definitions\n\n type T1 string\n type T2 int\n\n func (T2) String() string { return \"Hello, world\" }\n\n var x string\n var y T1\n var z T2\n\nwe can simplify\n\n fmt.Sprintf(\"%s\", x)\n fmt.Sprintf(\"%s\", y)\n fmt.Sprintf(\"%s\", z)\n\nto\n\n x\n string(y)\n z.String()\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1025", + "Default": false + }, + { + "Name": "S1028", + "Doc": "Simplify error construction with fmt.Errorf\n\nBefore:\n\n errors.New(fmt.Sprintf(...))\n\nAfter:\n\n fmt.Errorf(...)\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1028", + "Default": true + }, + { + "Name": "S1029", + "Doc": "Range over the string directly\n\nRanging over a string will yield byte offsets and runes. If the offset\nisn't used, this is functionally equivalent to converting the string\nto a slice of runes and ranging over that. Ranging directly over the\nstring will be more performant, however, as it avoids allocating a new\nslice, the size of which depends on the length of the string.\n\nBefore:\n\n for _, r := range []rune(s) {}\n\nAfter:\n\n for _, r := range s {}\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1029", + "Default": false + }, + { + "Name": "S1030", + "Doc": "Use bytes.Buffer.String or bytes.Buffer.Bytes\n\nbytes.Buffer has both a String and a Bytes method. It is almost never\nnecessary to use string(buf.Bytes()) or []byte(buf.String()) – simply\nuse the other method.\n\nThe only exception to this are map lookups. Due to a compiler optimization,\nm[string(buf.Bytes())] is more efficient than m[buf.String()].\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1030", + "Default": true + }, + { + "Name": "S1031", + "Doc": "Omit redundant nil check around loop\n\nYou can use range on nil slices and maps, the loop will simply never\nexecute. This makes an additional nil check around the loop\nunnecessary.\n\nBefore:\n\n if s != nil {\n for _, x := range s {\n ...\n }\n }\n\nAfter:\n\n for _, x := range s {\n ...\n }\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1031", + "Default": true + }, + { + "Name": "S1032", + "Doc": "Use sort.Ints(x), sort.Float64s(x), and sort.Strings(x)\n\nThe sort.Ints, sort.Float64s and sort.Strings functions are easier to\nread than sort.Sort(sort.IntSlice(x)), sort.Sort(sort.Float64Slice(x))\nand sort.Sort(sort.StringSlice(x)).\n\nBefore:\n\n sort.Sort(sort.StringSlice(x))\n\nAfter:\n\n sort.Strings(x)\n\nAvailable since\n 2019.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1032", + "Default": true + }, + { + "Name": "S1033", + "Doc": "Unnecessary guard around call to 'delete'\n\nCalling delete on a nil map is a no-op.\n\nAvailable since\n 2019.2\n", + "URL": "https://staticcheck.dev/docs/checks/#S1033", + "Default": true + }, + { + "Name": "S1034", + "Doc": "Use result of type assertion to simplify cases\n\nAvailable since\n 2019.2\n", + "URL": "https://staticcheck.dev/docs/checks/#S1034", + "Default": true + }, + { + "Name": "S1035", + "Doc": "Redundant call to net/http.CanonicalHeaderKey in method call on net/http.Header\n\nThe methods on net/http.Header, namely Add, Del, Get\nand Set, already canonicalize the given header name.\n\nAvailable since\n 2020.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1035", + "Default": true + }, + { + "Name": "S1036", + "Doc": "Unnecessary guard around map access\n\nWhen accessing a map key that doesn't exist yet, one receives a zero\nvalue. Often, the zero value is a suitable value, for example when\nusing append or doing integer math.\n\nThe following\n\n if _, ok := m[\"foo\"]; ok {\n m[\"foo\"] = append(m[\"foo\"], \"bar\")\n } else {\n m[\"foo\"] = []string{\"bar\"}\n }\n\ncan be simplified to\n\n m[\"foo\"] = append(m[\"foo\"], \"bar\")\n\nand\n\n if _, ok := m2[\"k\"]; ok {\n m2[\"k\"] += 4\n } else {\n m2[\"k\"] = 4\n }\n\ncan be simplified to\n\n m[\"k\"] += 4\n\nAvailable since\n 2020.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1036", + "Default": true + }, + { + "Name": "S1037", + "Doc": "Elaborate way of sleeping\n\nUsing a select statement with a single case receiving\nfrom the result of time.After is a very elaborate way of sleeping that\ncan much simpler be expressed with a simple call to time.Sleep.\n\nAvailable since\n 2020.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1037", + "Default": true + }, + { + "Name": "S1038", + "Doc": "Unnecessarily complex way of printing formatted string\n\nInstead of using fmt.Print(fmt.Sprintf(...)), one can use fmt.Printf(...).\n\nAvailable since\n 2020.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1038", + "Default": true + }, + { + "Name": "S1039", + "Doc": "Unnecessary use of fmt.Sprint\n\nCalling fmt.Sprint with a single string argument is unnecessary\nand identical to using the string directly.\n\nAvailable since\n 2020.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1039", + "Default": true + }, + { + "Name": "S1040", + "Doc": "Type assertion to current type\n\nThe type assertion x.(SomeInterface), when x already has type\nSomeInterface, can only fail if x is nil. Usually, this is\nleft-over code from when x had a different type and you can safely\ndelete the type assertion. If you want to check that x is not nil,\nconsider being explicit and using an actual if x == nil comparison\ninstead of relying on the type assertion panicking.\n\nAvailable since\n 2021.1\n", + "URL": "https://staticcheck.dev/docs/checks/#S1040", + "Default": true + }, + { + "Name": "SA1000", + "Doc": "Invalid regular expression\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA1000", + "Default": false + }, + { + "Name": "SA1001", + "Doc": "Invalid template\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA1001", + "Default": true + }, + { + "Name": "SA1002", + "Doc": "Invalid format in time.Parse\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA1002", + "Default": false + }, + { + "Name": "SA1003", + "Doc": "Unsupported argument to functions in encoding/binary\n\nThe encoding/binary package can only serialize types with known sizes.\nThis precludes the use of the int and uint types, as their sizes\ndiffer on different architectures. Furthermore, it doesn't support\nserializing maps, channels, strings, or functions.\n\nBefore Go 1.8, bool wasn't supported, either.\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA1003", + "Default": false + }, + { + "Name": "SA1004", + "Doc": "Suspiciously small untyped constant in time.Sleep\n\nThe time.Sleep function takes a time.Duration as its only argument.\nDurations are expressed in nanoseconds. Thus, calling time.Sleep(1)\nwill sleep for 1 nanosecond. This is a common source of bugs, as sleep\nfunctions in other languages often accept seconds or milliseconds.\n\nThe time package provides constants such as time.Second to express\nlarge durations. These can be combined with arithmetic to express\narbitrary durations, for example 5 * time.Second for 5 seconds.\n\nIf you truly meant to sleep for a tiny amount of time, use\nn * time.Nanosecond to signal to Staticcheck that you did mean to sleep\nfor some amount of nanoseconds.\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA1004", + "Default": true + }, + { + "Name": "SA1005", + "Doc": "Invalid first argument to exec.Command\n\nos/exec runs programs directly (using variants of the fork and exec\nsystem calls on Unix systems). This shouldn't be confused with running\na command in a shell. The shell will allow for features such as input\nredirection, pipes, and general scripting. The shell is also\nresponsible for splitting the user's input into a program name and its\narguments. For example, the equivalent to\n\n ls / /tmp\n\nwould be\n\n exec.Command(\"ls\", \"/\", \"/tmp\")\n\nIf you want to run a command in a shell, consider using something like\nthe following – but be aware that not all systems, particularly\nWindows, will have a /bin/sh program:\n\n exec.Command(\"/bin/sh\", \"-c\", \"ls | grep Awesome\")\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA1005", + "Default": true + }, + { + "Name": "SA1007", + "Doc": "Invalid URL in net/url.Parse\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA1007", + "Default": false + }, + { + "Name": "SA1008", + "Doc": "Non-canonical key in http.Header map\n\nKeys in http.Header maps are canonical, meaning they follow a specific\ncombination of uppercase and lowercase letters. Methods such as\nhttp.Header.Add and http.Header.Del convert inputs into this canonical\nform before manipulating the map.\n\nWhen manipulating http.Header maps directly, as opposed to using the\nprovided methods, care should be taken to stick to canonical form in\norder to avoid inconsistencies. The following piece of code\ndemonstrates one such inconsistency:\n\n h := http.Header{}\n h[\"etag\"] = []string{\"1234\"}\n h.Add(\"etag\", \"5678\")\n fmt.Println(h)\n\n // Output:\n // map[Etag:[5678] etag:[1234]]\n\nThe easiest way of obtaining the canonical form of a key is to use\nhttp.CanonicalHeaderKey.\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA1008", + "Default": true + }, + { + "Name": "SA1010", + "Doc": "(*regexp.Regexp).FindAll called with n == 0, which will always return zero results\n\nIf n \u003e= 0, the function returns at most n matches/submatches. To\nreturn all results, specify a negative number.\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA1010", + "Default": false + }, + { + "Name": "SA1011", + "Doc": "Various methods in the 'strings' package expect valid UTF-8, but invalid input is provided\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA1011", + "Default": false + }, + { + "Name": "SA1012", + "Doc": "A nil context.Context is being passed to a function, consider using context.TODO instead\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA1012", + "Default": true + }, + { + "Name": "SA1013", + "Doc": "io.Seeker.Seek is being called with the whence constant as the first argument, but it should be the second\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA1013", + "Default": true + }, + { + "Name": "SA1014", + "Doc": "Non-pointer value passed to Unmarshal or Decode\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA1014", + "Default": false + }, + { + "Name": "SA1015", + "Doc": "Using time.Tick in a way that will leak. Consider using time.NewTicker, and only use time.Tick in tests, commands and endless functions\n\nBefore Go 1.23, time.Tickers had to be closed to be able to be garbage\ncollected. Since time.Tick doesn't make it possible to close the underlying\nticker, using it repeatedly would leak memory.\n\nGo 1.23 fixes this by allowing tickers to be collected even if they weren't closed.\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA1015", + "Default": false + }, + { + "Name": "SA1016", + "Doc": "Trapping a signal that cannot be trapped\n\nNot all signals can be intercepted by a process. Specifically, on\nUNIX-like systems, the syscall.SIGKILL and syscall.SIGSTOP signals are\nnever passed to the process, but instead handled directly by the\nkernel. It is therefore pointless to try and handle these signals.\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA1016", + "Default": true + }, + { + "Name": "SA1017", + "Doc": "Channels used with os/signal.Notify should be buffered\n\nThe os/signal package uses non-blocking channel sends when delivering\nsignals. If the receiving end of the channel isn't ready and the\nchannel is either unbuffered or full, the signal will be dropped. To\navoid missing signals, the channel should be buffered and of the\nappropriate size. For a channel used for notification of just one\nsignal value, a buffer of size 1 is sufficient.\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA1017", + "Default": false + }, + { + "Name": "SA1018", + "Doc": "strings.Replace called with n == 0, which does nothing\n\nWith n == 0, zero instances will be replaced. To replace all\ninstances, use a negative number, or use strings.ReplaceAll.\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA1018", + "Default": false + }, + { + "Name": "SA1020", + "Doc": "Using an invalid host:port pair with a net.Listen-related function\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA1020", + "Default": false + }, + { + "Name": "SA1021", + "Doc": "Using bytes.Equal to compare two net.IP\n\nA net.IP stores an IPv4 or IPv6 address as a slice of bytes. The\nlength of the slice for an IPv4 address, however, can be either 4 or\n16 bytes long, using different ways of representing IPv4 addresses. In\norder to correctly compare two net.IPs, the net.IP.Equal method should\nbe used, as it takes both representations into account.\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA1021", + "Default": false + }, + { + "Name": "SA1023", + "Doc": "Modifying the buffer in an io.Writer implementation\n\nWrite must not modify the slice data, even temporarily.\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA1023", + "Default": false + }, + { + "Name": "SA1024", + "Doc": "A string cutset contains duplicate characters\n\nThe strings.TrimLeft and strings.TrimRight functions take cutsets, not\nprefixes. A cutset is treated as a set of characters to remove from a\nstring. For example,\n\n strings.TrimLeft(\"42133word\", \"1234\")\n\nwill result in the string \"word\" – any characters that are 1, 2, 3 or\n4 are cut from the left of the string.\n\nIn order to remove one string from another, use strings.TrimPrefix instead.\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA1024", + "Default": false + }, + { + "Name": "SA1025", + "Doc": "It is not possible to use (*time.Timer).Reset's return value correctly\n\nAvailable since\n 2019.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA1025", + "Default": false + }, + { + "Name": "SA1026", + "Doc": "Cannot marshal channels or functions\n\nAvailable since\n 2019.2\n", + "URL": "https://staticcheck.dev/docs/checks/#SA1026", + "Default": false + }, + { + "Name": "SA1027", + "Doc": "Atomic access to 64-bit variable must be 64-bit aligned\n\nOn ARM, x86-32, and 32-bit MIPS, it is the caller's responsibility to\narrange for 64-bit alignment of 64-bit words accessed atomically. The\nfirst word in a variable or in an allocated struct, array, or slice\ncan be relied upon to be 64-bit aligned.\n\nYou can use the structlayout tool to inspect the alignment of fields\nin a struct.\n\nAvailable since\n 2019.2\n", + "URL": "https://staticcheck.dev/docs/checks/#SA1027", + "Default": false + }, + { + "Name": "SA1028", + "Doc": "sort.Slice can only be used on slices\n\nThe first argument of sort.Slice must be a slice.\n\nAvailable since\n 2020.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA1028", + "Default": false + }, + { + "Name": "SA1029", + "Doc": "Inappropriate key in call to context.WithValue\n\nThe provided key must be comparable and should not be\nof type string or any other built-in type to avoid collisions between\npackages using context. Users of WithValue should define their own\ntypes for keys.\n\nTo avoid allocating when assigning to an interface{},\ncontext keys often have concrete type struct{}. Alternatively,\nexported context key variables' static type should be a pointer or\ninterface.\n\nAvailable since\n 2020.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA1029", + "Default": false + }, + { + "Name": "SA1030", + "Doc": "Invalid argument in call to a strconv function\n\nThis check validates the format, number base and bit size arguments of\nthe various parsing and formatting functions in strconv.\n\nAvailable since\n 2021.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA1030", + "Default": false + }, + { + "Name": "SA1031", + "Doc": "Overlapping byte slices passed to an encoder\n\nIn an encoding function of the form Encode(dst, src), dst and\nsrc were found to reference the same memory. This can result in\nsrc bytes being overwritten before they are read, when the encoder\nwrites more than one byte per src byte.\n\nAvailable since\n 2024.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA1031", + "Default": false + }, + { + "Name": "SA1032", + "Doc": "Wrong order of arguments to errors.Is\n\nThe first argument of the function errors.Is is the error\nthat we have and the second argument is the error we're trying to match against.\nFor example:\n\n\tif errors.Is(err, io.EOF) { ... }\n\nThis check detects some cases where the two arguments have been swapped. It\nflags any calls where the first argument is referring to a package-level error\nvariable, such as\n\n\tif errors.Is(io.EOF, err) { /* this is wrong */ }\n\nAvailable since\n 2024.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA1032", + "Default": false + }, + { + "Name": "SA2001", + "Doc": "Empty critical section, did you mean to defer the unlock?\n\nEmpty critical sections of the kind\n\n mu.Lock()\n mu.Unlock()\n\nare very often a typo, and the following was intended instead:\n\n mu.Lock()\n defer mu.Unlock()\n\nDo note that sometimes empty critical sections can be useful, as a\nform of signaling to wait on another goroutine. Many times, there are\nsimpler ways of achieving the same effect. When that isn't the case,\nthe code should be amply commented to avoid confusion. Combining such\ncomments with a //lint:ignore directive can be used to suppress this\nrare false positive.\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA2001", + "Default": true + }, + { + "Name": "SA2002", + "Doc": "Called testing.T.FailNow or SkipNow in a goroutine, which isn't allowed\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA2002", + "Default": false + }, + { + "Name": "SA2003", + "Doc": "Deferred Lock right after locking, likely meant to defer Unlock instead\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA2003", + "Default": false + }, + { + "Name": "SA3000", + "Doc": "TestMain doesn't call os.Exit, hiding test failures\n\nTest executables (and in turn 'go test') exit with a non-zero status\ncode if any tests failed. When specifying your own TestMain function,\nit is your responsibility to arrange for this, by calling os.Exit with\nthe correct code. The correct code is returned by (*testing.M).Run, so\nthe usual way of implementing TestMain is to end it with\nos.Exit(m.Run()).\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA3000", + "Default": true + }, + { + "Name": "SA3001", + "Doc": "Assigning to b.N in benchmarks distorts the results\n\nThe testing package dynamically sets b.N to improve the reliability of\nbenchmarks and uses it in computations to determine the duration of a\nsingle operation. Benchmark code must not alter b.N as this would\nfalsify results.\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA3001", + "Default": true + }, + { + "Name": "SA4000", + "Doc": "Binary operator has identical expressions on both sides\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA4000", + "Default": true + }, + { + "Name": "SA4001", + "Doc": "\u0026*x gets simplified to x, it does not copy x\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA4001", + "Default": true }, { - "FileType": "Go", - "Lens": "regenerate_cgo", - "Title": "Re-generate cgo declarations", - "Doc": "\nThis codelens source annotates an `import \"C\"` declaration\nwith a command to re-run the [cgo\ncommand](https://pkg.go.dev/cmd/cgo) to regenerate the\ncorresponding Go declarations.\n\nUse this after editing the C code in comments attached to\nthe import, or in C header files included by it.\n", - "Default": true, - "Status": "" + "Name": "SA4003", + "Doc": "Comparing unsigned values against negative values is pointless\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA4003", + "Default": true }, { - "FileType": "Go", - "Lens": "test", - "Title": "Run tests and benchmarks", - "Doc": "\nThis codelens source annotates each `Test` and `Benchmark`\nfunction in a `*_test.go` file with a command to run it.\n\nThis source is off by default because VS Code has\na client-side custom UI for testing, and because progress\nnotifications are not a great UX for streamed test output.\nSee:\n- golang/go#67400 for a discussion of this feature.\n- https://github.com/joaotavora/eglot/discussions/1402\n for an alternative approach.\n", - "Default": false, - "Status": "" + "Name": "SA4004", + "Doc": "The loop exits unconditionally after one iteration\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA4004", + "Default": true }, { - "FileType": "go.mod", - "Lens": "run_govulncheck", - "Title": "Run govulncheck (legacy)", - "Doc": "\nThis codelens source annotates the `module` directive in a go.mod file\nwith a command to run Govulncheck asynchronously.\n\n[Govulncheck](https://go.dev/blog/vuln) is a static analysis tool that\ncomputes the set of functions reachable within your application, including\ndependencies; queries a database of known security vulnerabilities; and\nreports any potential problems it finds.\n", - "Default": false, - "Status": "experimental" + "Name": "SA4005", + "Doc": "Field assignment that will never be observed. Did you mean to use a pointer receiver?\n\nAvailable since\n 2021.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA4005", + "Default": false }, { - "FileType": "go.mod", - "Lens": "tidy", - "Title": "Tidy go.mod file", - "Doc": "\nThis codelens source annotates the `module` directive in a\ngo.mod file with a command to run [`go mod\ntidy`](https://go.dev/ref/mod#go-mod-tidy), which ensures\nthat the go.mod file matches the source code in the module.\n", - "Default": true, - "Status": "" + "Name": "SA4006", + "Doc": "A value assigned to a variable is never read before being overwritten. Forgotten error check or dead code?\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA4006", + "Default": false }, { - "FileType": "go.mod", - "Lens": "upgrade_dependency", - "Title": "Update dependencies", - "Doc": "\nThis codelens source annotates the `module` directive in a\ngo.mod file with commands to:\n\n- check for available upgrades,\n- upgrade direct dependencies, and\n- upgrade all dependencies transitively.\n", - "Default": true, - "Status": "" + "Name": "SA4008", + "Doc": "The variable in the loop condition never changes, are you incrementing the wrong variable?\n\nFor example:\n\n\tfor i := 0; i \u003c 10; j++ { ... }\n\nThis may also occur when a loop can only execute once because of unconditional\ncontrol flow that terminates the loop. For example, when a loop body contains an\nunconditional break, return, or panic:\n\n\tfunc f() {\n\t\tpanic(\"oops\")\n\t}\n\tfunc g() {\n\t\tfor i := 0; i \u003c 10; i++ {\n\t\t\t// f unconditionally calls panic, which means \"i\" is\n\t\t\t// never incremented.\n\t\t\tf()\n\t\t}\n\t}\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA4008", + "Default": false }, { - "FileType": "go.mod", - "Lens": "vendor", - "Title": "Update vendor directory", - "Doc": "\nThis codelens source annotates the `module` directive in a\ngo.mod file with a command to run [`go mod\nvendor`](https://go.dev/ref/mod#go-mod-vendor), which\ncreates or updates the directory named `vendor` in the\nmodule root so that it contains an up-to-date copy of all\nnecessary package dependencies.\n", - "Default": true, - "Status": "" + "Name": "SA4009", + "Doc": "A function argument is overwritten before its first use\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA4009", + "Default": false }, { - "FileType": "go.mod", - "Lens": "vulncheck", - "Title": "Run govulncheck", - "Doc": "\nThis codelens source annotates the `module` directive in a go.mod file\nwith a command to run govulncheck synchronously.\n\n[Govulncheck](https://go.dev/blog/vuln) is a static analysis tool that\ncomputes the set of functions reachable within your application, including\ndependencies; queries a database of known security vulnerabilities; and\nreports any potential problems it finds.\n", - "Default": false, - "Status": "experimental" - } - ], - "Analyzers": [ + "Name": "SA4010", + "Doc": "The result of append will never be observed anywhere\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA4010", + "Default": false + }, + { + "Name": "SA4011", + "Doc": "Break statement with no effect. Did you mean to break out of an outer loop?\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA4011", + "Default": true + }, + { + "Name": "SA4012", + "Doc": "Comparing a value against NaN even though no value is equal to NaN\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA4012", + "Default": false + }, + { + "Name": "SA4013", + "Doc": "Negating a boolean twice (!!b) is the same as writing b. This is either redundant, or a typo.\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA4013", + "Default": true + }, + { + "Name": "SA4014", + "Doc": "An if/else if chain has repeated conditions and no side-effects; if the condition didn't match the first time, it won't match the second time, either\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA4014", + "Default": true + }, + { + "Name": "SA4015", + "Doc": "Calling functions like math.Ceil on floats converted from integers doesn't do anything useful\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA4015", + "Default": false + }, + { + "Name": "SA4016", + "Doc": "Certain bitwise operations, such as x ^ 0, do not do anything useful\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA4016", + "Default": true + }, + { + "Name": "SA4017", + "Doc": "Discarding the return values of a function without side effects, making the call pointless\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA4017", + "Default": false + }, + { + "Name": "SA4018", + "Doc": "Self-assignment of variables\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA4018", + "Default": false + }, + { + "Name": "SA4019", + "Doc": "Multiple, identical build constraints in the same file\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA4019", + "Default": true + }, + { + "Name": "SA4020", + "Doc": "Unreachable case clause in a type switch\n\nIn a type switch like the following\n\n type T struct{}\n func (T) Read(b []byte) (int, error) { return 0, nil }\n\n var v interface{} = T{}\n\n switch v.(type) {\n case io.Reader:\n // ...\n case T:\n // unreachable\n }\n\nthe second case clause can never be reached because T implements\nio.Reader and case clauses are evaluated in source order.\n\nAnother example:\n\n type T struct{}\n func (T) Read(b []byte) (int, error) { return 0, nil }\n func (T) Close() error { return nil }\n\n var v interface{} = T{}\n\n switch v.(type) {\n case io.Reader:\n // ...\n case io.ReadCloser:\n // unreachable\n }\n\nEven though T has a Close method and thus implements io.ReadCloser,\nio.Reader will always match first. The method set of io.Reader is a\nsubset of io.ReadCloser. Thus it is impossible to match the second\ncase without matching the first case.\n\n\nStructurally equivalent interfaces\n\nA special case of the previous example are structurally identical\ninterfaces. Given these declarations\n\n type T error\n type V error\n\n func doSomething() error {\n err, ok := doAnotherThing()\n if ok {\n return T(err)\n }\n\n return U(err)\n }\n\nthe following type switch will have an unreachable case clause:\n\n switch doSomething().(type) {\n case T:\n // ...\n case V:\n // unreachable\n }\n\nT will always match before V because they are structurally equivalent\nand therefore doSomething()'s return value implements both.\n\nAvailable since\n 2019.2\n", + "URL": "https://staticcheck.dev/docs/checks/#SA4020", + "Default": true + }, + { + "Name": "SA4022", + "Doc": "Comparing the address of a variable against nil\n\nCode such as 'if \u0026x == nil' is meaningless, because taking the address of a variable always yields a non-nil pointer.\n\nAvailable since\n 2020.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA4022", + "Default": true + }, + { + "Name": "SA4023", + "Doc": "Impossible comparison of interface value with untyped nil\n\nUnder the covers, interfaces are implemented as two elements, a\ntype T and a value V. V is a concrete value such as an int,\nstruct or pointer, never an interface itself, and has type T. For\ninstance, if we store the int value 3 in an interface, the\nresulting interface value has, schematically, (T=int, V=3). The\nvalue V is also known as the interface's dynamic value, since a\ngiven interface variable might hold different values V (and\ncorresponding types T) during the execution of the program.\n\nAn interface value is nil only if the V and T are both\nunset, (T=nil, V is not set), In particular, a nil interface will\nalways hold a nil type. If we store a nil pointer of type *int\ninside an interface value, the inner type will be *int regardless\nof the value of the pointer: (T=*int, V=nil). Such an interface\nvalue will therefore be non-nil even when the pointer value V\ninside is nil.\n\nThis situation can be confusing, and arises when a nil value is\nstored inside an interface value such as an error return:\n\n func returnsError() error {\n var p *MyError = nil\n if bad() {\n p = ErrBad\n }\n return p // Will always return a non-nil error.\n }\n\nIf all goes well, the function returns a nil p, so the return\nvalue is an error interface value holding (T=*MyError, V=nil).\nThis means that if the caller compares the returned error to nil,\nit will always look as if there was an error even if nothing bad\nhappened. To return a proper nil error to the caller, the\nfunction must return an explicit nil:\n\n func returnsError() error {\n if bad() {\n return ErrBad\n }\n return nil\n }\n\nIt's a good idea for functions that return errors always to use\nthe error type in their signature (as we did above) rather than a\nconcrete type such as *MyError, to help guarantee the error is\ncreated correctly. As an example, os.Open returns an error even\nthough, if not nil, it's always of concrete type *os.PathError.\n\nSimilar situations to those described here can arise whenever\ninterfaces are used. Just keep in mind that if any concrete value\nhas been stored in the interface, the interface will not be nil.\nFor more information, see The Laws of\nReflection at https://golang.org/doc/articles/laws_of_reflection.html.\n\nThis text has been copied from\nhttps://golang.org/doc/faq#nil_error, licensed under the Creative\nCommons Attribution 3.0 License.\n\nAvailable since\n 2020.2\n", + "URL": "https://staticcheck.dev/docs/checks/#SA4023", + "Default": false + }, + { + "Name": "SA4024", + "Doc": "Checking for impossible return value from a builtin function\n\nReturn values of the len and cap builtins cannot be negative.\n\nSee https://golang.org/pkg/builtin/#len and https://golang.org/pkg/builtin/#cap.\n\nExample:\n\n if len(slice) \u003c 0 {\n fmt.Println(\"unreachable code\")\n }\n\nAvailable since\n 2021.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA4024", + "Default": true + }, + { + "Name": "SA4025", + "Doc": "Integer division of literals that results in zero\n\nWhen dividing two integer constants, the result will\nalso be an integer. Thus, a division such as 2 / 3 results in 0.\nThis is true for all of the following examples:\n\n\t_ = 2 / 3\n\tconst _ = 2 / 3\n\tconst _ float64 = 2 / 3\n\t_ = float64(2 / 3)\n\nStaticcheck will flag such divisions if both sides of the division are\ninteger literals, as it is highly unlikely that the division was\nintended to truncate to zero. Staticcheck will not flag integer\ndivision involving named constants, to avoid noisy positives.\n\nAvailable since\n 2021.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA4025", + "Default": true + }, + { + "Name": "SA4026", + "Doc": "Go constants cannot express negative zero\n\nIn IEEE 754 floating point math, zero has a sign and can be positive\nor negative. This can be useful in certain numerical code.\n\nGo constants, however, cannot express negative zero. This means that\nthe literals -0.0 and 0.0 have the same ideal value (zero) and\nwill both represent positive zero at runtime.\n\nTo explicitly and reliably create a negative zero, you can use the\nmath.Copysign function: math.Copysign(0, -1).\n\nAvailable since\n 2021.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA4026", + "Default": true + }, + { + "Name": "SA4027", + "Doc": "(*net/url.URL).Query returns a copy, modifying it doesn't change the URL\n\n(*net/url.URL).Query parses the current value of net/url.URL.RawQuery\nand returns it as a map of type net/url.Values. Subsequent changes to\nthis map will not affect the URL unless the map gets encoded and\nassigned to the URL's RawQuery.\n\nAs a consequence, the following code pattern is an expensive no-op:\nu.Query().Add(key, value).\n\nAvailable since\n 2021.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA4027", + "Default": true + }, + { + "Name": "SA4028", + "Doc": "x % 1 is always zero\n\nAvailable since\n 2022.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA4028", + "Default": true + }, + { + "Name": "SA4029", + "Doc": "Ineffective attempt at sorting slice\n\nsort.Float64Slice, sort.IntSlice, and sort.StringSlice are\ntypes, not functions. Doing x = sort.StringSlice(x) does nothing,\nespecially not sort any values. The correct usage is\nsort.Sort(sort.StringSlice(x)) or sort.StringSlice(x).Sort(),\nbut there are more convenient helpers, namely sort.Float64s,\nsort.Ints, and sort.Strings.\n\nAvailable since\n 2022.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA4029", + "Default": true + }, + { + "Name": "SA4030", + "Doc": "Ineffective attempt at generating random number\n\nFunctions in the math/rand package that accept upper limits, such\nas Intn, generate random numbers in the half-open interval [0,n). In\nother words, the generated numbers will be \u003e= 0 and \u003c n – they\ndon't include n. rand.Intn(1) therefore doesn't generate 0\nor 1, it always generates 0.\n\nAvailable since\n 2022.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA4030", + "Default": true + }, + { + "Name": "SA4031", + "Doc": "Checking never-nil value against nil\n\nAvailable since\n 2022.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA4031", + "Default": false + }, + { + "Name": "SA4032", + "Doc": "Comparing runtime.GOOS or runtime.GOARCH against impossible value\n\nAvailable since\n 2024.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA4032", + "Default": true + }, + { + "Name": "SA5000", + "Doc": "Assignment to nil map\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA5000", + "Default": false + }, + { + "Name": "SA5001", + "Doc": "Deferring Close before checking for a possible error\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA5001", + "Default": true + }, + { + "Name": "SA5002", + "Doc": "The empty for loop ('for {}') spins and can block the scheduler\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA5002", + "Default": false + }, + { + "Name": "SA5003", + "Doc": "Defers in infinite loops will never execute\n\nDefers are scoped to the surrounding function, not the surrounding\nblock. In a function that never returns, i.e. one containing an\ninfinite loop, defers will never execute.\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA5003", + "Default": true + }, + { + "Name": "SA5004", + "Doc": "'for { select { ...' with an empty default branch spins\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA5004", + "Default": true + }, + { + "Name": "SA5005", + "Doc": "The finalizer references the finalized object, preventing garbage collection\n\nA finalizer is a function associated with an object that runs when the\ngarbage collector is ready to collect said object, that is when the\nobject is no longer referenced by anything.\n\nIf the finalizer references the object, however, it will always remain\nas the final reference to that object, preventing the garbage\ncollector from collecting the object. The finalizer will never run,\nand the object will never be collected, leading to a memory leak. That\nis why the finalizer should instead use its first argument to operate\non the object. That way, the number of references can temporarily go\nto zero before the object is being passed to the finalizer.\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA5005", + "Default": false + }, + { + "Name": "SA5007", + "Doc": "Infinite recursive call\n\nA function that calls itself recursively needs to have an exit\ncondition. Otherwise it will recurse forever, until the system runs\nout of memory.\n\nThis issue can be caused by simple bugs such as forgetting to add an\nexit condition. It can also happen \"on purpose\". Some languages have\ntail call optimization which makes certain infinite recursive calls\nsafe to use. Go, however, does not implement TCO, and as such a loop\nshould be used instead.\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA5007", + "Default": false + }, + { + "Name": "SA5008", + "Doc": "Invalid struct tag\n\nAvailable since\n 2019.2\n", + "URL": "https://staticcheck.dev/docs/checks/#SA5008", + "Default": true + }, + { + "Name": "SA5010", + "Doc": "Impossible type assertion\n\nSome type assertions can be statically proven to be\nimpossible. This is the case when the method sets of both\narguments of the type assertion conflict with each other, for\nexample by containing the same method with different\nsignatures.\n\nThe Go compiler already applies this check when asserting from an\ninterface value to a concrete type. If the concrete type misses\nmethods from the interface, or if function signatures don't match,\nthen the type assertion can never succeed.\n\nThis check applies the same logic when asserting from one interface to\nanother. If both interface types contain the same method but with\ndifferent signatures, then the type assertion can never succeed,\neither.\n\nAvailable since\n 2020.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA5010", + "Default": false + }, + { + "Name": "SA5011", + "Doc": "Possible nil pointer dereference\n\nA pointer is being dereferenced unconditionally, while\nalso being checked against nil in another place. This suggests that\nthe pointer may be nil and dereferencing it may panic. This is\ncommonly a result of improperly ordered code or missing return\nstatements. Consider the following examples:\n\n func fn(x *int) {\n fmt.Println(*x)\n\n // This nil check is equally important for the previous dereference\n if x != nil {\n foo(*x)\n }\n }\n\n func TestFoo(t *testing.T) {\n x := compute()\n if x == nil {\n t.Errorf(\"nil pointer received\")\n }\n\n // t.Errorf does not abort the test, so if x is nil, the next line will panic.\n foo(*x)\n }\n\nStaticcheck tries to deduce which functions abort control flow.\nFor example, it is aware that a function will not continue\nexecution after a call to panic or log.Fatal. However, sometimes\nthis detection fails, in particular in the presence of\nconditionals. Consider the following example:\n\n func Log(msg string, level int) {\n fmt.Println(msg)\n if level == levelFatal {\n os.Exit(1)\n }\n }\n\n func Fatal(msg string) {\n Log(msg, levelFatal)\n }\n\n func fn(x *int) {\n if x == nil {\n Fatal(\"unexpected nil pointer\")\n }\n fmt.Println(*x)\n }\n\nStaticcheck will flag the dereference of x, even though it is perfectly\nsafe. Staticcheck is not able to deduce that a call to\nFatal will exit the program. For the time being, the easiest\nworkaround is to modify the definition of Fatal like so:\n\n func Fatal(msg string) {\n Log(msg, levelFatal)\n panic(\"unreachable\")\n }\n\nWe also hard-code functions from common logging packages such as\nlogrus. Please file an issue if we're missing support for a\npopular package.\n\nAvailable since\n 2020.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA5011", + "Default": false + }, + { + "Name": "SA5012", + "Doc": "Passing odd-sized slice to function expecting even size\n\nSome functions that take slices as parameters expect the slices to have an even number of elements. \nOften, these functions treat elements in a slice as pairs. \nFor example, strings.NewReplacer takes pairs of old and new strings, \nand calling it with an odd number of elements would be an error.\n\nAvailable since\n 2020.2\n", + "URL": "https://staticcheck.dev/docs/checks/#SA5012", + "Default": false + }, + { + "Name": "SA6000", + "Doc": "Using regexp.Match or related in a loop, should use regexp.Compile\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA6000", + "Default": false + }, + { + "Name": "SA6001", + "Doc": "Missing an optimization opportunity when indexing maps by byte slices\n\nMap keys must be comparable, which precludes the use of byte slices.\nThis usually leads to using string keys and converting byte slices to\nstrings.\n\nNormally, a conversion of a byte slice to a string needs to copy the data and\ncauses allocations. The compiler, however, recognizes m[string(b)] and\nuses the data of b directly, without copying it, because it knows that\nthe data can't change during the map lookup. This leads to the\ncounter-intuitive situation that\n\n k := string(b)\n println(m[k])\n println(m[k])\n\nwill be less efficient than\n\n println(m[string(b)])\n println(m[string(b)])\n\nbecause the first version needs to copy and allocate, while the second\none does not.\n\nFor some history on this optimization, check out commit\nf5f5a8b6209f84961687d993b93ea0d397f5d5bf in the Go repository.\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA6001", + "Default": false + }, + { + "Name": "SA6002", + "Doc": "Storing non-pointer values in sync.Pool allocates memory\n\nA sync.Pool is used to avoid unnecessary allocations and reduce the\namount of work the garbage collector has to do.\n\nWhen passing a value that is not a pointer to a function that accepts\nan interface, the value needs to be placed on the heap, which means an\nadditional allocation. Slices are a common thing to put in sync.Pools,\nand they're structs with 3 fields (length, capacity, and a pointer to\nan array). In order to avoid the extra allocation, one should store a\npointer to the slice instead.\n\nSee the comments on https://go-review.googlesource.com/c/go/+/24371\nthat discuss this problem.\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA6002", + "Default": false + }, + { + "Name": "SA6003", + "Doc": "Converting a string to a slice of runes before ranging over it\n\nYou may want to loop over the runes in a string. Instead of converting\nthe string to a slice of runes and looping over that, you can loop\nover the string itself. That is,\n\n for _, r := range s {}\n\nand\n\n for _, r := range []rune(s) {}\n\nwill yield the same values. The first version, however, will be faster\nand avoid unnecessary memory allocations.\n\nDo note that if you are interested in the indices, ranging over a\nstring and over a slice of runes will yield different indices. The\nfirst one yields byte offsets, while the second one yields indices in\nthe slice of runes.\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA6003", + "Default": false + }, + { + "Name": "SA6005", + "Doc": "Inefficient string comparison with strings.ToLower or strings.ToUpper\n\nConverting two strings to the same case and comparing them like so\n\n if strings.ToLower(s1) == strings.ToLower(s2) {\n ...\n }\n\nis significantly more expensive than comparing them with\nstrings.EqualFold(s1, s2). This is due to memory usage as well as\ncomputational complexity.\n\nstrings.ToLower will have to allocate memory for the new strings, as\nwell as convert both strings fully, even if they differ on the very\nfirst byte. strings.EqualFold, on the other hand, compares the strings\none character at a time. It doesn't need to create two intermediate\nstrings and can return as soon as the first non-matching character has\nbeen found.\n\nFor a more in-depth explanation of this issue, see\nhttps://blog.digitalocean.com/how-to-efficiently-compare-strings-in-go/\n\nAvailable since\n 2019.2\n", + "URL": "https://staticcheck.dev/docs/checks/#SA6005", + "Default": true + }, + { + "Name": "SA6006", + "Doc": "Using io.WriteString to write []byte\n\nUsing io.WriteString to write a slice of bytes, as in\n\n io.WriteString(w, string(b))\n\nis both unnecessary and inefficient. Converting from []byte to string\nhas to allocate and copy the data, and we could simply use w.Write(b)\ninstead.\n\nAvailable since\n 2024.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA6006", + "Default": true + }, + { + "Name": "SA9001", + "Doc": "Defers in range loops may not run when you expect them to\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA9001", + "Default": false + }, + { + "Name": "SA9002", + "Doc": "Using a non-octal os.FileMode that looks like it was meant to be in octal.\n\nAvailable since\n 2017.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA9002", + "Default": true + }, + { + "Name": "SA9003", + "Doc": "Empty body in an if or else branch\n\nAvailable since\n 2017.1, non-default\n", + "URL": "https://staticcheck.dev/docs/checks/#SA9003", + "Default": false + }, + { + "Name": "SA9004", + "Doc": "Only the first constant has an explicit type\n\nIn a constant declaration such as the following:\n\n const (\n First byte = 1\n Second = 2\n )\n\nthe constant Second does not have the same type as the constant First.\nThis construct shouldn't be confused with\n\n const (\n First byte = iota\n Second\n )\n\nwhere First and Second do indeed have the same type. The type is only\npassed on when no explicit value is assigned to the constant.\n\nWhen declaring enumerations with explicit values it is therefore\nimportant not to write\n\n const (\n EnumFirst EnumType = 1\n EnumSecond = 2\n EnumThird = 3\n )\n\nThis discrepancy in types can cause various confusing behaviors and\nbugs.\n\n\nWrong type in variable declarations\n\nThe most obvious issue with such incorrect enumerations expresses\nitself as a compile error:\n\n package pkg\n\n const (\n EnumFirst uint8 = 1\n EnumSecond = 2\n )\n\n func fn(useFirst bool) {\n x := EnumSecond\n if useFirst {\n x = EnumFirst\n }\n }\n\nfails to compile with\n\n ./const.go:11:5: cannot use EnumFirst (type uint8) as type int in assignment\n\n\nLosing method sets\n\nA more subtle issue occurs with types that have methods and optional\ninterfaces. Consider the following:\n\n package main\n\n import \"fmt\"\n\n type Enum int\n\n func (e Enum) String() string {\n return \"an enum\"\n }\n\n const (\n EnumFirst Enum = 1\n EnumSecond = 2\n )\n\n func main() {\n fmt.Println(EnumFirst)\n fmt.Println(EnumSecond)\n }\n\nThis code will output\n\n an enum\n 2\n\nas EnumSecond has no explicit type, and thus defaults to int.\n\nAvailable since\n 2019.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA9004", + "Default": true + }, + { + "Name": "SA9005", + "Doc": "Trying to marshal a struct with no public fields nor custom marshaling\n\nThe encoding/json and encoding/xml packages only operate on exported\nfields in structs, not unexported ones. It is usually an error to try\nto (un)marshal structs that only consist of unexported fields.\n\nThis check will not flag calls involving types that define custom\nmarshaling behavior, e.g. via MarshalJSON methods. It will also not\nflag empty structs.\n\nAvailable since\n 2019.2\n", + "URL": "https://staticcheck.dev/docs/checks/#SA9005", + "Default": false + }, + { + "Name": "SA9006", + "Doc": "Dubious bit shifting of a fixed size integer value\n\nBit shifting a value past its size will always clear the value.\n\nFor instance:\n\n v := int8(42)\n v \u003e\u003e= 8\n\nwill always result in 0.\n\nThis check flags bit shifting operations on fixed size integer values only.\nThat is, int, uint and uintptr are never flagged to avoid potential false\npositives in somewhat exotic but valid bit twiddling tricks:\n\n // Clear any value above 32 bits if integers are more than 32 bits.\n func f(i int) int {\n v := i \u003e\u003e 32\n v = v \u003c\u003c 32\n return i-v\n }\n\nAvailable since\n 2020.2\n", + "URL": "https://staticcheck.dev/docs/checks/#SA9006", + "Default": true + }, + { + "Name": "SA9007", + "Doc": "Deleting a directory that shouldn't be deleted\n\nIt is virtually never correct to delete system directories such as\n/tmp or the user's home directory. However, it can be fairly easy to\ndo by mistake, for example by mistakenly using os.TempDir instead\nof ioutil.TempDir, or by forgetting to add a suffix to the result\nof os.UserHomeDir.\n\nWriting\n\n d := os.TempDir()\n defer os.RemoveAll(d)\n\nin your unit tests will have a devastating effect on the stability of your system.\n\nThis check flags attempts at deleting the following directories:\n\n- os.TempDir\n- os.UserCacheDir\n- os.UserConfigDir\n- os.UserHomeDir\n\nAvailable since\n 2022.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA9007", + "Default": false + }, + { + "Name": "SA9008", + "Doc": "else branch of a type assertion is probably not reading the right value\n\nWhen declaring variables as part of an if statement (like in 'if\nfoo := ...; foo {'), the same variables will also be in the scope of\nthe else branch. This means that in the following example\n\n if x, ok := x.(int); ok {\n // ...\n } else {\n fmt.Printf(\"unexpected type %T\", x)\n }\n\nx in the else branch will refer to the x from x, ok\n:=; it will not refer to the x that is being type-asserted. The\nresult of a failed type assertion is the zero value of the type that\nis being asserted to, so x in the else branch will always have the\nvalue 0 and the type int.\n\nAvailable since\n 2022.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA9008", + "Default": false + }, + { + "Name": "SA9009", + "Doc": "Ineffectual Go compiler directive\n\nA potential Go compiler directive was found, but is ineffectual as it begins\nwith whitespace.\n\nAvailable since\n 2024.1\n", + "URL": "https://staticcheck.dev/docs/checks/#SA9009", + "Default": true + }, + { + "Name": "ST1000", + "Doc": "Incorrect or missing package comment\n\nPackages must have a package comment that is formatted according to\nthe guidelines laid out in\nhttps://go.dev/wiki/CodeReviewComments#package-comments.\n\nAvailable since\n 2019.1, non-default\n", + "URL": "https://staticcheck.dev/docs/checks/#ST1000", + "Default": false + }, + { + "Name": "ST1001", + "Doc": "Dot imports are discouraged\n\nDot imports that aren't in external test packages are discouraged.\n\nThe dot_import_whitelist option can be used to whitelist certain\nimports.\n\nQuoting Go Code Review Comments:\n\n\u003e The import . form can be useful in tests that, due to circular\n\u003e dependencies, cannot be made part of the package being tested:\n\u003e \n\u003e package foo_test\n\u003e \n\u003e import (\n\u003e \"bar/testutil\" // also imports \"foo\"\n\u003e . \"foo\"\n\u003e )\n\u003e \n\u003e In this case, the test file cannot be in package foo because it\n\u003e uses bar/testutil, which imports foo. So we use the import .\n\u003e form to let the file pretend to be part of package foo even though\n\u003e it is not. Except for this one case, do not use import . in your\n\u003e programs. It makes the programs much harder to read because it is\n\u003e unclear whether a name like Quux is a top-level identifier in the\n\u003e current package or in an imported package.\n\nAvailable since\n 2019.1\n\nOptions\n dot_import_whitelist\n", + "URL": "https://staticcheck.dev/docs/checks/#ST1001", + "Default": false + }, + { + "Name": "ST1003", + "Doc": "Poorly chosen identifier\n\nIdentifiers, such as variable and package names, follow certain rules.\n\nSee the following links for details:\n\n- https://go.dev/doc/effective_go#package-names\n- https://go.dev/doc/effective_go#mixed-caps\n- https://go.dev/wiki/CodeReviewComments#initialisms\n- https://go.dev/wiki/CodeReviewComments#variable-names\n\nAvailable since\n 2019.1, non-default\n\nOptions\n initialisms\n", + "URL": "https://staticcheck.dev/docs/checks/#ST1003", + "Default": false + }, + { + "Name": "ST1005", + "Doc": "Incorrectly formatted error string\n\nError strings follow a set of guidelines to ensure uniformity and good\ncomposability.\n\nQuoting Go Code Review Comments:\n\n\u003e Error strings should not be capitalized (unless beginning with\n\u003e proper nouns or acronyms) or end with punctuation, since they are\n\u003e usually printed following other context. That is, use\n\u003e fmt.Errorf(\"something bad\") not fmt.Errorf(\"Something bad\"), so\n\u003e that log.Printf(\"Reading %s: %v\", filename, err) formats without a\n\u003e spurious capital letter mid-message.\n\nAvailable since\n 2019.1\n", + "URL": "https://staticcheck.dev/docs/checks/#ST1005", + "Default": false + }, + { + "Name": "ST1006", + "Doc": "Poorly chosen receiver name\n\nQuoting Go Code Review Comments:\n\n\u003e The name of a method's receiver should be a reflection of its\n\u003e identity; often a one or two letter abbreviation of its type\n\u003e suffices (such as \"c\" or \"cl\" for \"Client\"). Don't use generic\n\u003e names such as \"me\", \"this\" or \"self\", identifiers typical of\n\u003e object-oriented languages that place more emphasis on methods as\n\u003e opposed to functions. The name need not be as descriptive as that\n\u003e of a method argument, as its role is obvious and serves no\n\u003e documentary purpose. It can be very short as it will appear on\n\u003e almost every line of every method of the type; familiarity admits\n\u003e brevity. Be consistent, too: if you call the receiver \"c\" in one\n\u003e method, don't call it \"cl\" in another.\n\nAvailable since\n 2019.1\n", + "URL": "https://staticcheck.dev/docs/checks/#ST1006", + "Default": false + }, + { + "Name": "ST1008", + "Doc": "A function's error value should be its last return value\n\nA function's error value should be its last return value.\n\nAvailable since\n 2019.1\n", + "URL": "https://staticcheck.dev/docs/checks/#ST1008", + "Default": false + }, + { + "Name": "ST1011", + "Doc": "Poorly chosen name for variable of type time.Duration\n\ntime.Duration values represent an amount of time, which is represented\nas a count of nanoseconds. An expression like 5 * time.Microsecond\nyields the value 5000. It is therefore not appropriate to suffix a\nvariable of type time.Duration with any time unit, such as Msec or\nMilli.\n\nAvailable since\n 2019.1\n", + "URL": "https://staticcheck.dev/docs/checks/#ST1011", + "Default": false + }, + { + "Name": "ST1012", + "Doc": "Poorly chosen name for error variable\n\nError variables that are part of an API should be called errFoo or\nErrFoo.\n\nAvailable since\n 2019.1\n", + "URL": "https://staticcheck.dev/docs/checks/#ST1012", + "Default": false + }, + { + "Name": "ST1013", + "Doc": "Should use constants for HTTP error codes, not magic numbers\n\nHTTP has a tremendous number of status codes. While some of those are\nwell known (200, 400, 404, 500), most of them are not. The net/http\npackage provides constants for all status codes that are part of the\nvarious specifications. It is recommended to use these constants\ninstead of hard-coding magic numbers, to vastly improve the\nreadability of your code.\n\nAvailable since\n 2019.1\n\nOptions\n http_status_code_whitelist\n", + "URL": "https://staticcheck.dev/docs/checks/#ST1013", + "Default": false + }, + { + "Name": "ST1015", + "Doc": "A switch's default case should be the first or last case\n\nAvailable since\n 2019.1\n", + "URL": "https://staticcheck.dev/docs/checks/#ST1015", + "Default": false + }, + { + "Name": "ST1016", + "Doc": "Use consistent method receiver names\n\nAvailable since\n 2019.1, non-default\n", + "URL": "https://staticcheck.dev/docs/checks/#ST1016", + "Default": false + }, + { + "Name": "ST1017", + "Doc": "Don't use Yoda conditions\n\nYoda conditions are conditions of the kind 'if 42 == x', where the\nliteral is on the left side of the comparison. These are a common\nidiom in languages in which assignment is an expression, to avoid bugs\nof the kind 'if (x = 42)'. In Go, which doesn't allow for this kind of\nbug, we prefer the more idiomatic 'if x == 42'.\n\nAvailable since\n 2019.2\n", + "URL": "https://staticcheck.dev/docs/checks/#ST1017", + "Default": false + }, + { + "Name": "ST1018", + "Doc": "Avoid zero-width and control characters in string literals\n\nAvailable since\n 2019.2\n", + "URL": "https://staticcheck.dev/docs/checks/#ST1018", + "Default": false + }, + { + "Name": "ST1019", + "Doc": "Importing the same package multiple times\n\nGo allows importing the same package multiple times, as long as\ndifferent import aliases are being used. That is, the following\nbit of code is valid:\n\n import (\n \"fmt\"\n fumpt \"fmt\"\n format \"fmt\"\n _ \"fmt\"\n )\n\nHowever, this is very rarely done on purpose. Usually, it is a\nsign of code that got refactored, accidentally adding duplicate\nimport statements. It is also a rarely known feature, which may\ncontribute to confusion.\n\nDo note that sometimes, this feature may be used\nintentionally (see for example\nhttps://github.com/golang/go/commit/3409ce39bfd7584523b7a8c150a310cea92d879d)\n– if you want to allow this pattern in your code base, you're\nadvised to disable this check.\n\nAvailable since\n 2020.1\n", + "URL": "https://staticcheck.dev/docs/checks/#ST1019", + "Default": false + }, + { + "Name": "ST1020", + "Doc": "The documentation of an exported function should start with the function's name\n\nDoc comments work best as complete sentences, which\nallow a wide variety of automated presentations. The first sentence\nshould be a one-sentence summary that starts with the name being\ndeclared.\n\nIf every doc comment begins with the name of the item it describes,\nyou can use the doc subcommand of the go tool and run the output\nthrough grep.\n\nSee https://go.dev/doc/effective_go#commentary for more\ninformation on how to write good documentation.\n\nAvailable since\n 2020.1, non-default\n", + "URL": "https://staticcheck.dev/docs/checks/#ST1020", + "Default": false + }, + { + "Name": "ST1021", + "Doc": "The documentation of an exported type should start with type's name\n\nDoc comments work best as complete sentences, which\nallow a wide variety of automated presentations. The first sentence\nshould be a one-sentence summary that starts with the name being\ndeclared.\n\nIf every doc comment begins with the name of the item it describes,\nyou can use the doc subcommand of the go tool and run the output\nthrough grep.\n\nSee https://go.dev/doc/effective_go#commentary for more\ninformation on how to write good documentation.\n\nAvailable since\n 2020.1, non-default\n", + "URL": "https://staticcheck.dev/docs/checks/#ST1021", + "Default": false + }, + { + "Name": "ST1022", + "Doc": "The documentation of an exported variable or constant should start with variable's name\n\nDoc comments work best as complete sentences, which\nallow a wide variety of automated presentations. The first sentence\nshould be a one-sentence summary that starts with the name being\ndeclared.\n\nIf every doc comment begins with the name of the item it describes,\nyou can use the doc subcommand of the go tool and run the output\nthrough grep.\n\nSee https://go.dev/doc/effective_go#commentary for more\ninformation on how to write good documentation.\n\nAvailable since\n 2020.1, non-default\n", + "URL": "https://staticcheck.dev/docs/checks/#ST1022", + "Default": false + }, + { + "Name": "ST1023", + "Doc": "Redundant type in variable declaration\n\nAvailable since\n 2021.1, non-default\n", + "URL": "https://staticcheck.dev/docs/checks/#", + "Default": false + }, { "Name": "appends", "Doc": "check for missing values after append\n\nThis checker reports calls to append that pass\nno values to be appended to the slice.\n\n\ts := []string{\"a\", \"b\", \"c\"}\n\t_ = append(s)\n\nSuch calls are always no-ops and often indicate an\nunderlying mistake.", diff --git a/gopls/internal/doc/generate/generate.go b/gopls/internal/doc/generate/generate.go index 762fceeb4b9..9256d2ec835 100644 --- a/gopls/internal/doc/generate/generate.go +++ b/gopls/internal/doc/generate/generate.go @@ -136,7 +136,7 @@ func loadAPI() (*doc.API, error) { defaults := settings.DefaultOptions() api := &doc.API{ Options: map[string][]*doc.Option{}, - Analyzers: loadAnalyzers(settings.DefaultAnalyzers), // no staticcheck analyzers + Analyzers: loadAnalyzers(settings.AllAnalyzers, defaults), } api.Lenses, err = loadLenses(settingsPkg, defaults.Codelenses) @@ -505,20 +505,17 @@ func loadLenses(settingsPkg *packages.Package, defaults map[settings.CodeLensSou return lenses, nil } -func loadAnalyzers(m map[string]*settings.Analyzer) []*doc.Analyzer { - var sorted []string - for _, a := range m { - sorted = append(sorted, a.Analyzer().Name) - } - sort.Strings(sorted) +func loadAnalyzers(analyzers []*settings.Analyzer, defaults *settings.Options) []*doc.Analyzer { + slices.SortFunc(analyzers, func(x, y *settings.Analyzer) int { + return strings.Compare(x.Analyzer().Name, y.Analyzer().Name) + }) var json []*doc.Analyzer - for _, name := range sorted { - a := m[name] + for _, a := range analyzers { json = append(json, &doc.Analyzer{ Name: a.Analyzer().Name, Doc: a.Analyzer().Doc, URL: a.Analyzer().URL, - Default: a.EnabledByDefault(), + Default: a.Enabled(defaults), }) } return json diff --git a/gopls/internal/settings/analysis.go b/gopls/internal/settings/analysis.go index e914407fe6b..584bbd5f7bd 100644 --- a/gopls/internal/settings/analysis.go +++ b/gopls/internal/settings/analysis.go @@ -5,6 +5,8 @@ package settings import ( + "slices" + "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/appends" "golang.org/x/tools/go/analysis/passes/asmdecl" @@ -63,14 +65,18 @@ import ( "golang.org/x/tools/gopls/internal/analysis/yield" "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/internal/gofix" + "honnef.co/go/tools/analysis/lint" ) -// Analyzer augments a [analysis.Analyzer] with additional LSP configuration. +var AllAnalyzers = slices.Concat(DefaultAnalyzers, StaticcheckAnalyzers) + +// Analyzer augments an [analysis.Analyzer] with additional LSP configuration. // // Analyzers are immutable, since they are shared across multiple LSP sessions. type Analyzer struct { analyzer *analysis.Analyzer - nonDefault bool + staticcheck *lint.RawDocumentation // only for staticcheck analyzers + nonDefault bool // (sense is negated so we can mostly omit it) actionKinds []protocol.CodeActionKind severity protocol.DiagnosticSeverity tags []protocol.DiagnosticTag @@ -79,9 +85,28 @@ type Analyzer struct { // Analyzer returns the [analysis.Analyzer] that this Analyzer wraps. func (a *Analyzer) Analyzer() *analysis.Analyzer { return a.analyzer } -// EnabledByDefault reports whether the analyzer is enabled by default for all sessions. +// Enabled reports whether the analyzer is enabled by the options. // This value can be configured per-analysis in user settings. -func (a *Analyzer) EnabledByDefault() bool { return !a.nonDefault } +func (a *Analyzer) Enabled(o *Options) bool { + // An explicit setting by name takes precedence. + if v, found := o.Analyses[a.Analyzer().Name]; found { + return v + } + if a.staticcheck != nil { + // An explicit staticcheck={true,false} setting + // enables/disables all staticcheck analyzers. + if o.StaticcheckProvided { + return o.Staticcheck + } + // Respect staticcheck's off-by-default options too. + // (This applies to only a handful of analyzers.) + if a.staticcheck.NonDefault { + return false + } + } + // Respect gopls' default setting. + return !a.nonDefault +} // ActionKinds is the set of kinds of code action this analyzer produces. // @@ -126,108 +151,105 @@ func (a *Analyzer) Tags() []protocol.DiagnosticTag { return a.tags } // String returns the name of this analyzer. func (a *Analyzer) String() string { return a.analyzer.String() } -// DefaultAnalyzers holds the set of Analyzers available to all gopls sessions, -// independent of build version, keyed by analyzer name. -// -// It is the source from which gopls/doc/analyzers.md is generated. -var DefaultAnalyzers = make(map[string]*Analyzer) // initialized below - -func init() { +// DefaultAnalyzers holds the list of Analyzers available to all gopls +// sessions, independent of build version. It is the source from which +// gopls/doc/analyzers.md is generated. +var DefaultAnalyzers = []*Analyzer{ // See [Analyzer.Severity] for guidance on setting analyzer severity below. - analyzers := []*Analyzer{ - // The traditional vet suite: - {analyzer: appends.Analyzer}, - {analyzer: asmdecl.Analyzer}, - {analyzer: assign.Analyzer}, - {analyzer: atomic.Analyzer}, - {analyzer: bools.Analyzer}, - {analyzer: buildtag.Analyzer}, - {analyzer: cgocall.Analyzer}, - {analyzer: composite.Analyzer}, - {analyzer: copylock.Analyzer}, - {analyzer: defers.Analyzer}, - {analyzer: deprecated.Analyzer, severity: protocol.SeverityHint, tags: []protocol.DiagnosticTag{protocol.Deprecated}}, - {analyzer: directive.Analyzer}, - {analyzer: errorsas.Analyzer}, - {analyzer: framepointer.Analyzer}, - {analyzer: httpresponse.Analyzer}, - {analyzer: ifaceassert.Analyzer}, - {analyzer: loopclosure.Analyzer}, - {analyzer: lostcancel.Analyzer}, - {analyzer: nilfunc.Analyzer}, - {analyzer: printf.Analyzer}, - {analyzer: shift.Analyzer}, - {analyzer: sigchanyzer.Analyzer}, - {analyzer: slog.Analyzer}, - {analyzer: stdmethods.Analyzer}, - {analyzer: stdversion.Analyzer}, - {analyzer: stringintconv.Analyzer}, - {analyzer: structtag.Analyzer}, - {analyzer: testinggoroutine.Analyzer}, - {analyzer: tests.Analyzer}, - {analyzer: timeformat.Analyzer}, - {analyzer: unmarshal.Analyzer}, - {analyzer: unreachable.Analyzer}, - {analyzer: unsafeptr.Analyzer}, - {analyzer: unusedresult.Analyzer}, - - // not suitable for vet: - // - some (nilness, yield) use go/ssa; see #59714. - // - others don't meet the "frequency" criterion; - // see GOROOT/src/cmd/vet/README. - {analyzer: atomicalign.Analyzer}, - {analyzer: deepequalerrors.Analyzer}, - {analyzer: nilness.Analyzer}, // uses go/ssa - {analyzer: yield.Analyzer}, // uses go/ssa - {analyzer: sortslice.Analyzer}, - {analyzer: embeddirective.Analyzer}, - {analyzer: waitgroup.Analyzer}, // to appear in cmd/vet@go1.25 - {analyzer: hostport.Analyzer}, // to appear in cmd/vet@go1.25 - - // disabled due to high false positives - {analyzer: shadow.Analyzer, nonDefault: true}, // very noisy - // fieldalignment is not even off-by-default; see #67762. - - // simplifiers and modernizers - // - // These analyzers offer mere style fixes on correct code, - // thus they will never appear in cmd/vet and - // their severity level is "information". - // - // gofmt -s suite - { - analyzer: simplifycompositelit.Analyzer, - actionKinds: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix}, - severity: protocol.SeverityInformation, - }, - { - analyzer: simplifyrange.Analyzer, - actionKinds: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix}, - severity: protocol.SeverityInformation, - }, - { - analyzer: simplifyslice.Analyzer, - actionKinds: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix}, - severity: protocol.SeverityInformation, - }, - // other simplifiers - {analyzer: gofix.Analyzer, severity: protocol.SeverityHint}, - {analyzer: infertypeargs.Analyzer, severity: protocol.SeverityInformation}, - {analyzer: unusedparams.Analyzer, severity: protocol.SeverityInformation}, - {analyzer: unusedfunc.Analyzer, severity: protocol.SeverityInformation}, - {analyzer: unusedwrite.Analyzer, severity: protocol.SeverityInformation}, // uses go/ssa - {analyzer: modernize.Analyzer, severity: protocol.SeverityHint}, - - // type-error analyzers - // These analyzers enrich go/types errors with suggested fixes. - // Since they exist only to attach their fixes to type errors, their - // severity is irrelevant. - {analyzer: fillreturns.Analyzer}, - {analyzer: nonewvars.Analyzer}, - {analyzer: noresultvalues.Analyzer}, - {analyzer: unusedvariable.Analyzer}, - } - for _, analyzer := range analyzers { - DefaultAnalyzers[analyzer.analyzer.Name] = analyzer - } + + // The traditional vet suite: + {analyzer: appends.Analyzer}, + {analyzer: asmdecl.Analyzer}, + {analyzer: assign.Analyzer}, + {analyzer: atomic.Analyzer}, + {analyzer: bools.Analyzer}, + {analyzer: buildtag.Analyzer}, + {analyzer: cgocall.Analyzer}, + {analyzer: composite.Analyzer}, + {analyzer: copylock.Analyzer}, + {analyzer: defers.Analyzer}, + { + analyzer: deprecated.Analyzer, + severity: protocol.SeverityHint, + tags: []protocol.DiagnosticTag{protocol.Deprecated}, + }, + {analyzer: directive.Analyzer}, + {analyzer: errorsas.Analyzer}, + {analyzer: framepointer.Analyzer}, + {analyzer: httpresponse.Analyzer}, + {analyzer: ifaceassert.Analyzer}, + {analyzer: loopclosure.Analyzer}, + {analyzer: lostcancel.Analyzer}, + {analyzer: nilfunc.Analyzer}, + {analyzer: printf.Analyzer}, + {analyzer: shift.Analyzer}, + {analyzer: sigchanyzer.Analyzer}, + {analyzer: slog.Analyzer}, + {analyzer: stdmethods.Analyzer}, + {analyzer: stdversion.Analyzer}, + {analyzer: stringintconv.Analyzer}, + {analyzer: structtag.Analyzer}, + {analyzer: testinggoroutine.Analyzer}, + {analyzer: tests.Analyzer}, + {analyzer: timeformat.Analyzer}, + {analyzer: unmarshal.Analyzer}, + {analyzer: unreachable.Analyzer}, + {analyzer: unsafeptr.Analyzer}, + {analyzer: unusedresult.Analyzer}, + + // not suitable for vet: + // - some (nilness, yield) use go/ssa; see #59714. + // - others don't meet the "frequency" criterion; + // see GOROOT/src/cmd/vet/README. + {analyzer: atomicalign.Analyzer}, + {analyzer: deepequalerrors.Analyzer}, + {analyzer: nilness.Analyzer}, // uses go/ssa + {analyzer: yield.Analyzer}, // uses go/ssa + {analyzer: sortslice.Analyzer}, + {analyzer: embeddirective.Analyzer}, + {analyzer: waitgroup.Analyzer}, // to appear in cmd/vet@go1.25 + {analyzer: hostport.Analyzer}, // to appear in cmd/vet@go1.25 + + // disabled due to high false positives + {analyzer: shadow.Analyzer, nonDefault: true}, // very noisy + // fieldalignment is not even off-by-default; see #67762. + + // simplifiers and modernizers + // + // These analyzers offer mere style fixes on correct code, + // thus they will never appear in cmd/vet and + // their severity level is "information". + // + // gofmt -s suite + { + analyzer: simplifycompositelit.Analyzer, + actionKinds: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix}, + severity: protocol.SeverityInformation, + }, + { + analyzer: simplifyrange.Analyzer, + actionKinds: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix}, + severity: protocol.SeverityInformation, + }, + { + analyzer: simplifyslice.Analyzer, + actionKinds: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix}, + severity: protocol.SeverityInformation, + }, + // other simplifiers + {analyzer: gofix.Analyzer, severity: protocol.SeverityHint}, + {analyzer: infertypeargs.Analyzer, severity: protocol.SeverityInformation}, + {analyzer: unusedparams.Analyzer, severity: protocol.SeverityInformation}, + {analyzer: unusedfunc.Analyzer, severity: protocol.SeverityInformation}, + {analyzer: unusedwrite.Analyzer, severity: protocol.SeverityInformation}, // uses go/ssa + {analyzer: modernize.Analyzer, severity: protocol.SeverityHint}, + + // type-error analyzers + // These analyzers enrich go/types errors with suggested fixes. + // Since they exist only to attach their fixes to type errors, their + // severity is irrelevant. + {analyzer: fillreturns.Analyzer}, + {analyzer: nonewvars.Analyzer}, + {analyzer: noresultvalues.Analyzer}, + {analyzer: unusedvariable.Analyzer}, } diff --git a/gopls/internal/settings/settings.go b/gopls/internal/settings/settings.go index a47a69b0296..8a694854edd 100644 --- a/gopls/internal/settings/settings.go +++ b/gopls/internal/settings/settings.go @@ -433,7 +433,8 @@ type FormattingOptions struct { Gofumpt bool } -// Note: DiagnosticOptions must be comparable with reflect.DeepEqual. +// Note: DiagnosticOptions must be comparable with reflect.DeepEqual, +// and frob-encodable (no interfaces). type DiagnosticOptions struct { // Analyses specify analyses that the user would like to enable or disable. // A map of the names of analysis passes that should be enabled/disabled. @@ -452,10 +453,21 @@ type DiagnosticOptions struct { // ``` Analyses map[string]bool - // Staticcheck enables additional analyses from staticcheck.io. + // Staticcheck configures the default set of analyses staticcheck.io. // These analyses are documented on // [Staticcheck's website](https://staticcheck.io/docs/checks/). - Staticcheck bool `status:"experimental"` + // + // The "staticcheck" option has three values: + // - false: disable all staticcheck analyzers + // - true: enable all staticcheck analyzers + // - unset: enable a subset of staticcheck analyzers + // selected by gopls maintainers for runtime efficiency + // and analytic precision. + // + // Regardless of this setting, individual analyzers can be + // selectively enabled or disabled using the `analyses` setting. + Staticcheck bool `status:"experimental"` + StaticcheckProvided bool `status:"experimental"` // = "staticcheck" was explicitly provided // Annotations specifies the various kinds of compiler // optimization details that should be reported as diagnostics @@ -1187,6 +1199,7 @@ func (o *Options) setOne(name string, value any) (applied []CounterPath, _ error return counts, nil case "staticcheck": + o.StaticcheckProvided = true return setBool(&o.Staticcheck, value) case "local": diff --git a/gopls/internal/settings/staticcheck.go b/gopls/internal/settings/staticcheck.go index 6e06e0b44ea..68e48819cfc 100644 --- a/gopls/internal/settings/staticcheck.go +++ b/gopls/internal/settings/staticcheck.go @@ -5,19 +5,183 @@ package settings import ( + "fmt" + "log" + + "golang.org/x/tools/go/analysis" "golang.org/x/tools/gopls/internal/protocol" "honnef.co/go/tools/analysis/lint" "honnef.co/go/tools/quickfix" + "honnef.co/go/tools/quickfix/qf1001" + "honnef.co/go/tools/quickfix/qf1002" + "honnef.co/go/tools/quickfix/qf1003" + "honnef.co/go/tools/quickfix/qf1004" + "honnef.co/go/tools/quickfix/qf1005" + "honnef.co/go/tools/quickfix/qf1006" + "honnef.co/go/tools/quickfix/qf1007" + "honnef.co/go/tools/quickfix/qf1008" + "honnef.co/go/tools/quickfix/qf1009" + "honnef.co/go/tools/quickfix/qf1010" + "honnef.co/go/tools/quickfix/qf1011" + "honnef.co/go/tools/quickfix/qf1012" "honnef.co/go/tools/simple" + "honnef.co/go/tools/simple/s1000" + "honnef.co/go/tools/simple/s1001" + "honnef.co/go/tools/simple/s1002" + "honnef.co/go/tools/simple/s1003" + "honnef.co/go/tools/simple/s1004" + "honnef.co/go/tools/simple/s1005" + "honnef.co/go/tools/simple/s1006" + "honnef.co/go/tools/simple/s1007" + "honnef.co/go/tools/simple/s1008" + "honnef.co/go/tools/simple/s1009" + "honnef.co/go/tools/simple/s1010" + "honnef.co/go/tools/simple/s1011" + "honnef.co/go/tools/simple/s1012" + "honnef.co/go/tools/simple/s1016" + "honnef.co/go/tools/simple/s1017" + "honnef.co/go/tools/simple/s1018" + "honnef.co/go/tools/simple/s1019" + "honnef.co/go/tools/simple/s1020" + "honnef.co/go/tools/simple/s1021" + "honnef.co/go/tools/simple/s1023" + "honnef.co/go/tools/simple/s1024" + "honnef.co/go/tools/simple/s1025" + "honnef.co/go/tools/simple/s1028" + "honnef.co/go/tools/simple/s1029" + "honnef.co/go/tools/simple/s1030" + "honnef.co/go/tools/simple/s1031" + "honnef.co/go/tools/simple/s1032" + "honnef.co/go/tools/simple/s1033" + "honnef.co/go/tools/simple/s1034" + "honnef.co/go/tools/simple/s1035" + "honnef.co/go/tools/simple/s1036" + "honnef.co/go/tools/simple/s1037" + "honnef.co/go/tools/simple/s1038" + "honnef.co/go/tools/simple/s1039" + "honnef.co/go/tools/simple/s1040" "honnef.co/go/tools/staticcheck" + "honnef.co/go/tools/staticcheck/sa1000" + "honnef.co/go/tools/staticcheck/sa1001" + "honnef.co/go/tools/staticcheck/sa1002" + "honnef.co/go/tools/staticcheck/sa1003" + "honnef.co/go/tools/staticcheck/sa1004" + "honnef.co/go/tools/staticcheck/sa1005" + "honnef.co/go/tools/staticcheck/sa1006" + "honnef.co/go/tools/staticcheck/sa1007" + "honnef.co/go/tools/staticcheck/sa1008" + "honnef.co/go/tools/staticcheck/sa1010" + "honnef.co/go/tools/staticcheck/sa1011" + "honnef.co/go/tools/staticcheck/sa1012" + "honnef.co/go/tools/staticcheck/sa1013" + "honnef.co/go/tools/staticcheck/sa1014" + "honnef.co/go/tools/staticcheck/sa1015" + "honnef.co/go/tools/staticcheck/sa1016" + "honnef.co/go/tools/staticcheck/sa1017" + "honnef.co/go/tools/staticcheck/sa1018" + "honnef.co/go/tools/staticcheck/sa1019" + "honnef.co/go/tools/staticcheck/sa1020" + "honnef.co/go/tools/staticcheck/sa1021" + "honnef.co/go/tools/staticcheck/sa1023" + "honnef.co/go/tools/staticcheck/sa1024" + "honnef.co/go/tools/staticcheck/sa1025" + "honnef.co/go/tools/staticcheck/sa1026" + "honnef.co/go/tools/staticcheck/sa1027" + "honnef.co/go/tools/staticcheck/sa1028" + "honnef.co/go/tools/staticcheck/sa1029" + "honnef.co/go/tools/staticcheck/sa1030" + "honnef.co/go/tools/staticcheck/sa1031" + "honnef.co/go/tools/staticcheck/sa1032" + "honnef.co/go/tools/staticcheck/sa2000" + "honnef.co/go/tools/staticcheck/sa2001" + "honnef.co/go/tools/staticcheck/sa2002" + "honnef.co/go/tools/staticcheck/sa2003" + "honnef.co/go/tools/staticcheck/sa3000" + "honnef.co/go/tools/staticcheck/sa3001" + "honnef.co/go/tools/staticcheck/sa4000" + "honnef.co/go/tools/staticcheck/sa4001" + "honnef.co/go/tools/staticcheck/sa4003" + "honnef.co/go/tools/staticcheck/sa4004" + "honnef.co/go/tools/staticcheck/sa4005" + "honnef.co/go/tools/staticcheck/sa4006" + "honnef.co/go/tools/staticcheck/sa4008" + "honnef.co/go/tools/staticcheck/sa4009" + "honnef.co/go/tools/staticcheck/sa4010" + "honnef.co/go/tools/staticcheck/sa4011" + "honnef.co/go/tools/staticcheck/sa4012" + "honnef.co/go/tools/staticcheck/sa4013" + "honnef.co/go/tools/staticcheck/sa4014" + "honnef.co/go/tools/staticcheck/sa4015" + "honnef.co/go/tools/staticcheck/sa4016" + "honnef.co/go/tools/staticcheck/sa4017" + "honnef.co/go/tools/staticcheck/sa4018" + "honnef.co/go/tools/staticcheck/sa4019" + "honnef.co/go/tools/staticcheck/sa4020" + "honnef.co/go/tools/staticcheck/sa4021" + "honnef.co/go/tools/staticcheck/sa4022" + "honnef.co/go/tools/staticcheck/sa4023" + "honnef.co/go/tools/staticcheck/sa4024" + "honnef.co/go/tools/staticcheck/sa4025" + "honnef.co/go/tools/staticcheck/sa4026" + "honnef.co/go/tools/staticcheck/sa4027" + "honnef.co/go/tools/staticcheck/sa4028" + "honnef.co/go/tools/staticcheck/sa4029" + "honnef.co/go/tools/staticcheck/sa4030" + "honnef.co/go/tools/staticcheck/sa4031" + "honnef.co/go/tools/staticcheck/sa4032" + "honnef.co/go/tools/staticcheck/sa5000" + "honnef.co/go/tools/staticcheck/sa5001" + "honnef.co/go/tools/staticcheck/sa5002" + "honnef.co/go/tools/staticcheck/sa5003" + "honnef.co/go/tools/staticcheck/sa5004" + "honnef.co/go/tools/staticcheck/sa5005" + "honnef.co/go/tools/staticcheck/sa5007" + "honnef.co/go/tools/staticcheck/sa5008" + "honnef.co/go/tools/staticcheck/sa5009" + "honnef.co/go/tools/staticcheck/sa5010" + "honnef.co/go/tools/staticcheck/sa5011" + "honnef.co/go/tools/staticcheck/sa5012" + "honnef.co/go/tools/staticcheck/sa6000" + "honnef.co/go/tools/staticcheck/sa6001" + "honnef.co/go/tools/staticcheck/sa6002" + "honnef.co/go/tools/staticcheck/sa6003" + "honnef.co/go/tools/staticcheck/sa6005" + "honnef.co/go/tools/staticcheck/sa6006" + "honnef.co/go/tools/staticcheck/sa9001" + "honnef.co/go/tools/staticcheck/sa9002" + "honnef.co/go/tools/staticcheck/sa9003" + "honnef.co/go/tools/staticcheck/sa9004" + "honnef.co/go/tools/staticcheck/sa9005" + "honnef.co/go/tools/staticcheck/sa9006" + "honnef.co/go/tools/staticcheck/sa9007" + "honnef.co/go/tools/staticcheck/sa9008" + "honnef.co/go/tools/staticcheck/sa9009" "honnef.co/go/tools/stylecheck" + "honnef.co/go/tools/stylecheck/st1000" + "honnef.co/go/tools/stylecheck/st1001" + "honnef.co/go/tools/stylecheck/st1003" + "honnef.co/go/tools/stylecheck/st1005" + "honnef.co/go/tools/stylecheck/st1006" + "honnef.co/go/tools/stylecheck/st1008" + "honnef.co/go/tools/stylecheck/st1011" + "honnef.co/go/tools/stylecheck/st1012" + "honnef.co/go/tools/stylecheck/st1013" + "honnef.co/go/tools/stylecheck/st1015" + "honnef.co/go/tools/stylecheck/st1016" + "honnef.co/go/tools/stylecheck/st1017" + "honnef.co/go/tools/stylecheck/st1018" + "honnef.co/go/tools/stylecheck/st1019" + "honnef.co/go/tools/stylecheck/st1020" + "honnef.co/go/tools/stylecheck/st1021" + "honnef.co/go/tools/stylecheck/st1022" + "honnef.co/go/tools/stylecheck/st1023" ) -// StaticcheckAnalzyers describes available Staticcheck analyzers, keyed by -// analyzer name. -var StaticcheckAnalyzers = make(map[string]*Analyzer) // written by analysis_.go +// StaticcheckAnalyzers lists available Staticcheck analyzers. +var StaticcheckAnalyzers = initStaticcheckAnalyzers() + +func initStaticcheckAnalyzers() (res []*Analyzer) { -func init() { mapSeverity := func(severity lint.Severity) protocol.DiagnosticSeverity { switch severity { case lint.SeverityError: @@ -36,28 +200,251 @@ func init() { return protocol.SeverityWarning } } - add := func(analyzers []*lint.Analyzer, skip map[string]struct{}) { - for _, a := range analyzers { - if _, ok := skip[a.Analyzer.Name]; ok { - continue + + // We can't import buildir.Analyzer directly, so grab it from another analyzer. + buildir := sa1000.SCAnalyzer.Analyzer.Requires[0] + if buildir.Name != "buildir" { + panic("sa1000.Requires[0] is not buildir") + } + + add := func(a *lint.Analyzer, dflt bool) { + // Assert that no analyzer that requires "buildir", + // even indirectly, is enabled by default. + if dflt { + var visit func(aa *analysis.Analyzer) + visit = func(aa *analysis.Analyzer) { + if aa == buildir { + log.Fatalf("%s requires buildir (perhaps indirectly) yet is enabled by default", a.Analyzer.Name) + } + for _, req := range aa.Requires { + visit(req) + } } + visit(a.Analyzer) + } + res = append(res, &Analyzer{ + analyzer: a.Analyzer, + staticcheck: a.Doc, + nonDefault: !dflt, + severity: mapSeverity(a.Doc.Severity), + }) + } - StaticcheckAnalyzers[a.Analyzer.Name] = &Analyzer{ - analyzer: a.Analyzer, - nonDefault: a.Doc.NonDefault, - severity: mapSeverity(a.Doc.Severity), + type M = map[*lint.Analyzer]any // value = true|false|nil + + addAll := func(suite string, upstream []*lint.Analyzer, config M) { + for _, a := range upstream { + v, ok := config[a] + if !ok { + panic(fmt.Sprintf("%s.Analyzers includes %s but config mapping does not; settings audit required", suite, a.Analyzer.Name)) + } + if v != nil { + add(a, v.(bool)) } } } - add(simple.Analyzers, nil) - add(staticcheck.Analyzers, map[string]struct{}{ - // This check conflicts with the vet printf check (golang/go#34494). - "SA5009": {}, - // This check relies on facts from dependencies, which - // we don't currently compute. - "SA5011": {}, + // For each analyzer in the four suites provided by + // staticcheck, we provide a complete configuration, mapping + // it to a boolean, indicating whether it should be on by + // default in gopls, or nil to indicate explicitly that it has + // been excluded (e.g. because it is redundant with an + // existing vet analyzer such as printf, waitgroup, appends). + // + // This approach ensures that as suites grow, we make an + // affirmative decision, positive or negative, about adding + // new items. + // + // An analyzer may be off by default if: + // - it requires, even indirectly, "buildir", which is like + // buildssa but uses facts, making it expensive; + // - it has significant false positives; + // - it reports on non-problematic style issues; + // - its fixes are lossy (e.g. of comments) or not always sound; + // - it reports "maybes", not "definites" (e.g. sa9001). + // - it reports on harmless stylistic choices that may have + // been chosen deliberately for clarity or emphasis (e.g. s1005). + // - it makes deductions from build tags that are not true + // for all configurations. + + addAll("simple", simple.Analyzers, M{ + s1000.SCAnalyzer: true, + s1001.SCAnalyzer: true, + s1002.SCAnalyzer: false, // makes unsound deductions from build tags + s1003.SCAnalyzer: true, + s1004.SCAnalyzer: true, + s1005.SCAnalyzer: false, // not a correctness/style issue + s1006.SCAnalyzer: false, // makes unsound deductions from build tags + s1007.SCAnalyzer: true, + s1008.SCAnalyzer: false, // may lose important comments + s1009.SCAnalyzer: true, + s1010.SCAnalyzer: true, + s1011.SCAnalyzer: false, // requires buildir + s1012.SCAnalyzer: true, + s1016.SCAnalyzer: false, // may rely on coincidental structural subtyping + s1017.SCAnalyzer: true, + s1018.SCAnalyzer: true, + s1019.SCAnalyzer: true, + s1020.SCAnalyzer: true, + s1021.SCAnalyzer: false, // may lose important comments + s1023.SCAnalyzer: true, + s1024.SCAnalyzer: true, + s1025.SCAnalyzer: false, // requires buildir + s1028.SCAnalyzer: true, + s1029.SCAnalyzer: false, // requires buildir + s1030.SCAnalyzer: true, // (tentative: see docs, + s1031.SCAnalyzer: true, + s1032.SCAnalyzer: true, + s1033.SCAnalyzer: true, + s1034.SCAnalyzer: true, + s1035.SCAnalyzer: true, + s1036.SCAnalyzer: true, + s1037.SCAnalyzer: true, + s1038.SCAnalyzer: true, + s1039.SCAnalyzer: true, + s1040.SCAnalyzer: true, + }) + + addAll("stylecheck", stylecheck.Analyzers, M{ + // These are all slightly too opinionated to be on by default. + st1000.SCAnalyzer: false, + st1001.SCAnalyzer: false, + st1003.SCAnalyzer: false, + st1005.SCAnalyzer: false, + st1006.SCAnalyzer: false, + st1008.SCAnalyzer: false, + st1011.SCAnalyzer: false, + st1012.SCAnalyzer: false, + st1013.SCAnalyzer: false, + st1015.SCAnalyzer: false, + st1016.SCAnalyzer: false, + st1017.SCAnalyzer: false, + st1018.SCAnalyzer: false, + st1019.SCAnalyzer: false, + st1020.SCAnalyzer: false, + st1021.SCAnalyzer: false, + st1022.SCAnalyzer: false, + st1023.SCAnalyzer: false, + }) + + // These are not bug fixes but code transformations: some + // reversible and value-neutral, of the kind typically listed + // on the VS Code's Refactor/Source Action/Quick Fix menus. + // + // TODO(adonovan): plumb these to the appropriate menu, + // as we do for code actions such as split/join lines. + addAll("quickfix", quickfix.Analyzers, M{ + qf1001.SCAnalyzer: false, // not always a style improvement + qf1002.SCAnalyzer: true, + qf1003.SCAnalyzer: true, + qf1004.SCAnalyzer: true, + qf1005.SCAnalyzer: false, // not always a style improvement + qf1006.SCAnalyzer: false, // may lose important comments + qf1007.SCAnalyzer: false, // may lose important comments + qf1008.SCAnalyzer: false, // not always a style improvement + qf1009.SCAnalyzer: true, + qf1010.SCAnalyzer: true, + qf1011.SCAnalyzer: false, // not always a style improvement + qf1012.SCAnalyzer: true, + }) + + addAll("staticcheck", staticcheck.Analyzers, M{ + sa1000.SCAnalyzer: false, // requires buildir + sa1001.SCAnalyzer: true, + sa1002.SCAnalyzer: false, // requires buildir + sa1003.SCAnalyzer: false, // requires buildir + sa1004.SCAnalyzer: true, + sa1005.SCAnalyzer: true, + sa1006.SCAnalyzer: nil, // redundant wrt 'printf' + sa1007.SCAnalyzer: false, // requires buildir + sa1008.SCAnalyzer: true, + sa1010.SCAnalyzer: false, // requires buildir + sa1011.SCAnalyzer: false, // requires buildir + sa1012.SCAnalyzer: true, + sa1013.SCAnalyzer: true, + sa1014.SCAnalyzer: false, // requires buildir + sa1015.SCAnalyzer: false, // requires buildir + sa1016.SCAnalyzer: true, + sa1017.SCAnalyzer: false, // requires buildir + sa1018.SCAnalyzer: false, // requires buildir + sa1019.SCAnalyzer: nil, // redundant wrt 'deprecated' + sa1020.SCAnalyzer: false, // requires buildir + sa1021.SCAnalyzer: false, // requires buildir + sa1023.SCAnalyzer: false, // requires buildir + sa1024.SCAnalyzer: false, // requires buildir + sa1025.SCAnalyzer: false, // requires buildir + sa1026.SCAnalyzer: false, // requires buildir + sa1027.SCAnalyzer: false, // requires buildir + sa1028.SCAnalyzer: false, // requires buildir + sa1029.SCAnalyzer: false, // requires buildir + sa1030.SCAnalyzer: false, // requires buildir + sa1031.SCAnalyzer: false, // requires buildir + sa1032.SCAnalyzer: false, // requires buildir + sa2000.SCAnalyzer: nil, // redundant wrt 'waitgroup' + sa2001.SCAnalyzer: true, + sa2002.SCAnalyzer: false, // requires buildir + sa2003.SCAnalyzer: false, // requires buildir + sa3000.SCAnalyzer: true, + sa3001.SCAnalyzer: true, + sa4000.SCAnalyzer: true, + sa4001.SCAnalyzer: true, + sa4003.SCAnalyzer: true, + sa4004.SCAnalyzer: true, + sa4005.SCAnalyzer: false, // requires buildir + sa4006.SCAnalyzer: false, // requires buildir + sa4008.SCAnalyzer: false, // requires buildir + sa4009.SCAnalyzer: false, // requires buildir + sa4010.SCAnalyzer: false, // requires buildir + sa4011.SCAnalyzer: true, + sa4012.SCAnalyzer: false, // requires buildir + sa4013.SCAnalyzer: true, + sa4014.SCAnalyzer: true, + sa4015.SCAnalyzer: false, // requires buildir + sa4016.SCAnalyzer: true, + sa4017.SCAnalyzer: false, // requires buildir + sa4018.SCAnalyzer: false, // requires buildir + sa4019.SCAnalyzer: true, + sa4020.SCAnalyzer: true, + sa4021.SCAnalyzer: nil, // redundant wrt 'appends' + sa4022.SCAnalyzer: true, + sa4023.SCAnalyzer: false, // requires buildir + sa4024.SCAnalyzer: true, + sa4025.SCAnalyzer: true, + sa4026.SCAnalyzer: true, + sa4027.SCAnalyzer: true, + sa4028.SCAnalyzer: true, + sa4029.SCAnalyzer: true, + sa4030.SCAnalyzer: true, + sa4031.SCAnalyzer: false, // requires buildir + sa4032.SCAnalyzer: true, + sa5000.SCAnalyzer: false, // requires buildir + sa5001.SCAnalyzer: true, + sa5002.SCAnalyzer: false, // makes unsound deductions from build tags + sa5003.SCAnalyzer: true, + sa5004.SCAnalyzer: true, + sa5005.SCAnalyzer: false, // requires buildir + sa5007.SCAnalyzer: false, // requires buildir + sa5008.SCAnalyzer: true, + sa5009.SCAnalyzer: nil, // requires buildir; redundant wrt 'printf' (#34494, + sa5010.SCAnalyzer: false, // requires buildir + sa5011.SCAnalyzer: false, // requires buildir + sa5012.SCAnalyzer: false, // requires buildir + sa6000.SCAnalyzer: false, // requires buildir + sa6001.SCAnalyzer: false, // requires buildir + sa6002.SCAnalyzer: false, // requires buildir + sa6003.SCAnalyzer: false, // requires buildir + sa6005.SCAnalyzer: true, + sa6006.SCAnalyzer: true, + sa9001.SCAnalyzer: false, // reports a "maybe" bug (low signal/noise, + sa9002.SCAnalyzer: true, + sa9003.SCAnalyzer: false, // requires buildir; NonDefault + sa9004.SCAnalyzer: true, + sa9005.SCAnalyzer: false, // requires buildir + sa9006.SCAnalyzer: true, + sa9007.SCAnalyzer: false, // requires buildir + sa9008.SCAnalyzer: false, // requires buildir + sa9009.SCAnalyzer: true, }) - add(stylecheck.Analyzers, nil) - add(quickfix.Analyzers, nil) + + return res } diff --git a/gopls/internal/test/marker/testdata/codeaction/functionextraction_issue66289.txt b/gopls/internal/test/marker/testdata/codeaction/functionextraction_issue66289.txt index 30db2fb3ed0..0b2622f1d58 100644 --- a/gopls/internal/test/marker/testdata/codeaction/functionextraction_issue66289.txt +++ b/gopls/internal/test/marker/testdata/codeaction/functionextraction_issue66289.txt @@ -16,7 +16,7 @@ func F() error { if err != nil { return fmt.Errorf("2: %w", err) } //@loc(endF, "}") - fmt.Println(a, b) + fmt.Printf("%s %s", a, b) return nil } @@ -33,7 +33,7 @@ func F() error { if shouldReturn { return err } //@loc(endF, "}") - fmt.Println(a, b) + fmt.Printf("%s %s", a, b) return nil } diff --git a/gopls/internal/test/marker/testdata/definition/branch.txt b/gopls/internal/test/marker/testdata/definition/branch.txt index e80c83a92ae..39b51429bd1 100644 --- a/gopls/internal/test/marker/testdata/definition/branch.txt +++ b/gopls/internal/test/marker/testdata/definition/branch.txt @@ -1,5 +1,13 @@ This test checks definition operations in branch statements break, goto and continue. +We suppress staticheck since it also gives a diagnostic +about the break being ineffective. + +-- settings.json -- +{ + "staticcheck": false +} + -- go.mod -- module mod.com diff --git a/gopls/internal/test/marker/testdata/diagnostics/analyzers.txt b/gopls/internal/test/marker/testdata/diagnostics/analyzers.txt index fb7876a0492..ba9f125ebd6 100644 --- a/gopls/internal/test/marker/testdata/diagnostics/analyzers.txt +++ b/gopls/internal/test/marker/testdata/diagnostics/analyzers.txt @@ -96,3 +96,20 @@ import "C" func _(c chan bool) { C.f(unsafe.Pointer(&c)) //@ diag("unsafe", re"passing Go type with embedded pointer to C") } + +-- staticcheck/staticcheck.go -- +package staticcheck + +// staticcheck includes hundreds of other analyzers. +// Here we test only two: one enabled by default, one disabled. + +func S1000(ch chan int) { + select { case <-ch: } //@ diag("select", re"use .*receive instead of select") +} + +func S1011(x, y []int) { + for _, e := range y { + x = append(x, e) // no "replace loop with append" diagnostic + } +} + diff --git a/gopls/internal/test/marker/testdata/highlight/switchbreak.txt b/gopls/internal/test/marker/testdata/highlight/switchbreak.txt index 3893b4c502d..8efccfcdb66 100644 --- a/gopls/internal/test/marker/testdata/highlight/switchbreak.txt +++ b/gopls/internal/test/marker/testdata/highlight/switchbreak.txt @@ -1,6 +1,14 @@ This is a regression test for issue 65752: a break in a switch should highlight the switch, not the enclosing loop. +We suppress staticheck since it also gives a diagnostic +about the break being ineffective. + +-- settings.json -- +{ + "staticcheck": false +} + -- a.go -- package a From d97a910f5bcab2a6a85443664cbb5ad7928763c6 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Mon, 21 Apr 2025 13:53:51 -0400 Subject: [PATCH 217/270] gopls/internal/golang: CodeAction: don't fail because of 1 producer If a CodeAction producer function returns an error, it shouldn't stop other producers from running. Fixes golang/go#71275 Change-Id: I23a477305d81a75f5fc1323c6bbd9fecc0227804 Reviewed-on: https://go-review.googlesource.com/c/tools/+/667115 Reviewed-by: Robert Findley LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan --- gopls/internal/golang/codeaction.go | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/gopls/internal/golang/codeaction.go b/gopls/internal/golang/codeaction.go index 4efddaa8a18..5ba8b0c4ae5 100644 --- a/gopls/internal/golang/codeaction.go +++ b/gopls/internal/golang/codeaction.go @@ -105,9 +105,13 @@ func CodeActions(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, req.pkg = nil } if err := p.fn(ctx, req); err != nil { - // TODO(adonovan): most errors in code action providers should - // not block other providers; see https://go.dev/issue/71275. - return nil, err + // An error in one code action producer + // should not affect the others. + if ctx.Err() != nil { + return nil, err + } + event.Error(ctx, fmt.Sprintf("CodeAction producer %s failed", p.kind), err) + continue } } From 015c0fc714afb97ce4e98feb8c2863d0e42f8e43 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Mon, 21 Apr 2025 14:36:43 -0400 Subject: [PATCH 218/270] gopls/internal/test/integration/web: move misc/webserver_test.go This CL moves and splits the tests of web-based features into their own package. No substantive changes. Change-Id: Iea39256e0549edbaf698d8fd5b4c4cc18f3cc17e Reviewed-on: https://go-review.googlesource.com/c/tools/+/667116 Auto-Submit: Alan Donovan Reviewed-by: Robert Findley LUCI-TryBot-Result: Go LUCI --- .../test/integration/misc/compileropt_test.go | 12 + .../test/integration/web/assembly_test.go | 135 ++++++++++ .../test/integration/web/freesymbols_test.go | 76 ++++++ .../webserver_test.go => web/pkdoc_test.go} | 251 +----------------- .../test/integration/web/util_test.go | 81 ++++++ 5 files changed, 309 insertions(+), 246 deletions(-) create mode 100644 gopls/internal/test/integration/web/assembly_test.go create mode 100644 gopls/internal/test/integration/web/freesymbols_test.go rename gopls/internal/test/integration/{misc/webserver_test.go => web/pkdoc_test.go} (64%) create mode 100644 gopls/internal/test/integration/web/util_test.go diff --git a/gopls/internal/test/integration/misc/compileropt_test.go b/gopls/internal/test/integration/misc/compileropt_test.go index 68138fabc43..a02a5dddebd 100644 --- a/gopls/internal/test/integration/misc/compileropt_test.go +++ b/gopls/internal/test/integration/misc/compileropt_test.go @@ -5,6 +5,7 @@ package misc import ( + "fmt" "runtime" "testing" @@ -44,6 +45,7 @@ func main() { if err != nil { t.Fatal(err) } + params := &protocol.ExecuteCommandParams{ Command: docAction.Command.Command, Arguments: docAction.Command.Arguments, @@ -229,3 +231,13 @@ func cond[T any](cond bool, x, y T) T { return y } } + +// codeActionByKind returns the first action of (exactly) the specified kind, or an error. +func codeActionByKind(actions []protocol.CodeAction, kind protocol.CodeActionKind) (*protocol.CodeAction, error) { + for _, act := range actions { + if act.Kind == kind { + return &act, nil + } + } + return nil, fmt.Errorf("can't find action with kind %s, only %#v", kind, actions) +} diff --git a/gopls/internal/test/integration/web/assembly_test.go b/gopls/internal/test/integration/web/assembly_test.go new file mode 100644 index 00000000000..6820cbb7864 --- /dev/null +++ b/gopls/internal/test/integration/web/assembly_test.go @@ -0,0 +1,135 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package web_test + +import ( + "runtime" + "testing" + + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/gopls/internal/settings" + . "golang.org/x/tools/gopls/internal/test/integration" + "golang.org/x/tools/internal/testenv" +) + +// TestAssembly is a basic test of the web-based assembly listing. +func TestAssembly(t *testing.T) { + testenv.NeedsGoCommand1Point(t, 22) // for up-to-date assembly listing + + const files = ` +-- go.mod -- +module example.com + +-- a/a.go -- +package a + +func f(x int) int { + println("hello") + defer println("world") + return x +} + +func g() { + println("goodbye") +} + +var v = [...]int{ + f(123), + f(456), +} + +func init() { + f(789) +} +` + Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("a/a.go") + + asmFor := func(pattern string) []byte { + // Invoke the "Browse assembly" code action to start the server. + loc := env.RegexpSearch("a/a.go", pattern) + actions, err := env.Editor.CodeAction(env.Ctx, loc, nil, protocol.CodeActionUnknownTrigger) + if err != nil { + t.Fatalf("CodeAction: %v", err) + } + action, err := codeActionByKind(actions, settings.GoAssembly) + if err != nil { + t.Fatal(err) + } + + // Execute the command. + // Its side effect should be a single showDocument request. + params := &protocol.ExecuteCommandParams{ + Command: action.Command.Command, + Arguments: action.Command.Arguments, + } + var result command.DebuggingResult + collectDocs := env.Awaiter.ListenToShownDocuments() + env.ExecuteCommand(params, &result) + doc := shownDocument(t, collectDocs(), "http:") + if doc == nil { + t.Fatalf("no showDocument call had 'file:' prefix") + } + t.Log("showDocument(package doc) URL:", doc.URI) + + return get(t, doc.URI) + } + + // Get the report and do some minimal checks for sensible results. + // + // Use only portable instructions below! Remember that + // this is a test of plumbing, not compilation, so + // it's better to skip the tests, rather than refine + // them, on any architecture that gives us trouble + // (e.g. uses JAL for CALL, or BL for RET). + // We conservatively test only on the two most popular + // architectures. + { + report := asmFor("println") + checkMatch(t, true, report, `TEXT.*example.com/a.f`) + switch runtime.GOARCH { + case "amd64", "arm64": + checkMatch(t, true, report, `CALL runtime.printlock`) + checkMatch(t, true, report, `CALL runtime.printstring`) + checkMatch(t, true, report, `CALL runtime.printunlock`) + checkMatch(t, true, report, `CALL example.com/a.f.deferwrap`) + checkMatch(t, true, report, `RET`) + checkMatch(t, true, report, `CALL runtime.morestack_noctxt`) + } + + // Nested functions are also shown. + // + // The condition here was relaxed to unblock go.dev/cl/639515. + checkMatch(t, true, report, `example.com/a.f.deferwrap`) + + // But other functions are not. + checkMatch(t, false, report, `TEXT.*example.com/a.g`) + } + + // Check that code in a package-level var initializer is found too. + { + report := asmFor(`f\(123\)`) + switch runtime.GOARCH { + case "amd64", "arm64": + checkMatch(t, true, report, `TEXT.*example.com/a.init`) + checkMatch(t, true, report, `MOV.? \$123`) + checkMatch(t, true, report, `MOV.? \$456`) + checkMatch(t, true, report, `CALL example.com/a.f`) + } + } + + // And code in a source-level init function. + { + report := asmFor(`f\(789\)`) + switch runtime.GOARCH { + case "amd64", "arm64": + checkMatch(t, true, report, `TEXT.*example.com/a.init`) + checkMatch(t, true, report, `MOV.? \$789`) + checkMatch(t, true, report, `CALL example.com/a.f`) + } + } + }) +} diff --git a/gopls/internal/test/integration/web/freesymbols_test.go b/gopls/internal/test/integration/web/freesymbols_test.go new file mode 100644 index 00000000000..7f44c29ec1f --- /dev/null +++ b/gopls/internal/test/integration/web/freesymbols_test.go @@ -0,0 +1,76 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package web_test + +import ( + "testing" + + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/gopls/internal/settings" + . "golang.org/x/tools/gopls/internal/test/integration" +) + +// TestFreeSymbols is a basic test of interaction with the "free symbols" web report. +func TestFreeSymbols(t *testing.T) { + const files = ` +-- go.mod -- +module example.com + +-- a/a.go -- +package a + +import "fmt" +import "bytes" + +func f(buf bytes.Buffer, greeting string) { +/* « */ + fmt.Fprintf(&buf, "%s", greeting) + buf.WriteString(fmt.Sprint("foo")) + buf.WriteByte(0) +/* » */ + buf.Write(nil) +} +` + Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("a/a.go") + + // Invoke the "Browse free symbols" code + // action to start the server. + loc := env.RegexpSearch("a/a.go", "«((?:.|\n)*)»") + actions, err := env.Editor.CodeAction(env.Ctx, loc, nil, protocol.CodeActionUnknownTrigger) + if err != nil { + t.Fatalf("CodeAction: %v", err) + } + action, err := codeActionByKind(actions, settings.GoFreeSymbols) + if err != nil { + t.Fatal(err) + } + + // Execute the command. + // Its side effect should be a single showDocument request. + params := &protocol.ExecuteCommandParams{ + Command: action.Command.Command, + Arguments: action.Command.Arguments, + } + var result command.DebuggingResult + collectDocs := env.Awaiter.ListenToShownDocuments() + env.ExecuteCommand(params, &result) + doc := shownDocument(t, collectDocs(), "http:") + if doc == nil { + t.Fatalf("no showDocument call had 'file:' prefix") + } + t.Log("showDocument(package doc) URL:", doc.URI) + + // Get the report and do some minimal checks for sensible results. + report := get(t, doc.URI) + checkMatch(t, true, report, `

  • import "fmt" // for Fprintf, Sprint
  • `) + checkMatch(t, true, report, `
  • var buf bytes.Buffer
  • `) + checkMatch(t, true, report, `
  • func WriteByte func\(c byte\) error
  • `) + checkMatch(t, true, report, `
  • func WriteString func\(s string\) \(n int, err error\)
  • `) + checkMatch(t, false, report, `
  • func Write`) // not in selection + checkMatch(t, true, report, `
  • var greeting string
  • `) + }) +} diff --git a/gopls/internal/test/integration/misc/webserver_test.go b/gopls/internal/test/integration/web/pkdoc_test.go similarity index 64% rename from gopls/internal/test/integration/misc/webserver_test.go rename to gopls/internal/test/integration/web/pkdoc_test.go index 691d45baa6e..8d1573320d0 100644 --- a/gopls/internal/test/integration/misc/webserver_test.go +++ b/gopls/internal/test/integration/web/pkdoc_test.go @@ -1,31 +1,26 @@ -// Copyright 2024 The Go Authors. All rights reserved. +// Copyright 2025 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package misc +package web_test import ( "fmt" "html" - "io" - "net/http" "regexp" - "runtime" "strings" "testing" "golang.org/x/tools/gopls/internal/protocol" - "golang.org/x/tools/gopls/internal/protocol/command" "golang.org/x/tools/gopls/internal/settings" . "golang.org/x/tools/gopls/internal/test/integration" - "golang.org/x/tools/internal/testenv" ) // TODO(adonovan): define marker test verbs for checking package docs. -// TestWebServer exercises the web server created on demand -// for code actions such as "Browse package documentation". -func TestWebServer(t *testing.T) { +// TestBrowsePkgDoc provides basic coverage of the "Browse package +// documentation", which creates a web server on demand. +func TestBrowsePkgDoc(t *testing.T) { const files = ` -- go.mod -- module example.com @@ -446,239 +441,3 @@ func viewPkgDoc(t *testing.T, env *Env, loc protocol.Location) protocol.URI { } return doc.URI } - -// TestFreeSymbols is a basic test of interaction with the "free symbols" web report. -func TestFreeSymbols(t *testing.T) { - const files = ` --- go.mod -- -module example.com - --- a/a.go -- -package a - -import "fmt" -import "bytes" - -func f(buf bytes.Buffer, greeting string) { -/* « */ - fmt.Fprintf(&buf, "%s", greeting) - buf.WriteString(fmt.Sprint("foo")) - buf.WriteByte(0) -/* » */ - buf.Write(nil) -} -` - Run(t, files, func(t *testing.T, env *Env) { - env.OpenFile("a/a.go") - - // Invoke the "Browse free symbols" code - // action to start the server. - loc := env.RegexpSearch("a/a.go", "«((?:.|\n)*)»") - actions, err := env.Editor.CodeAction(env.Ctx, loc, nil, protocol.CodeActionUnknownTrigger) - if err != nil { - t.Fatalf("CodeAction: %v", err) - } - action, err := codeActionByKind(actions, settings.GoFreeSymbols) - if err != nil { - t.Fatal(err) - } - - // Execute the command. - // Its side effect should be a single showDocument request. - params := &protocol.ExecuteCommandParams{ - Command: action.Command.Command, - Arguments: action.Command.Arguments, - } - var result command.DebuggingResult - collectDocs := env.Awaiter.ListenToShownDocuments() - env.ExecuteCommand(params, &result) - doc := shownDocument(t, collectDocs(), "http:") - if doc == nil { - t.Fatalf("no showDocument call had 'file:' prefix") - } - t.Log("showDocument(package doc) URL:", doc.URI) - - // Get the report and do some minimal checks for sensible results. - report := get(t, doc.URI) - checkMatch(t, true, report, `
  • import "fmt" // for Fprintf, Sprint
  • `) - checkMatch(t, true, report, `
  • var buf bytes.Buffer
  • `) - checkMatch(t, true, report, `
  • func WriteByte func\(c byte\) error
  • `) - checkMatch(t, true, report, `
  • func WriteString func\(s string\) \(n int, err error\)
  • `) - checkMatch(t, false, report, `
  • func Write`) // not in selection - checkMatch(t, true, report, `
  • var greeting string
  • `) - }) -} - -// TestAssembly is a basic test of the web-based assembly listing. -func TestAssembly(t *testing.T) { - testenv.NeedsGoCommand1Point(t, 22) // for up-to-date assembly listing - - const files = ` --- go.mod -- -module example.com - --- a/a.go -- -package a - -func f(x int) int { - println("hello") - defer println("world") - return x -} - -func g() { - println("goodbye") -} - -var v = [...]int{ - f(123), - f(456), -} - -func init() { - f(789) -} -` - Run(t, files, func(t *testing.T, env *Env) { - env.OpenFile("a/a.go") - - asmFor := func(pattern string) []byte { - // Invoke the "Browse assembly" code action to start the server. - loc := env.RegexpSearch("a/a.go", pattern) - actions, err := env.Editor.CodeAction(env.Ctx, loc, nil, protocol.CodeActionUnknownTrigger) - if err != nil { - t.Fatalf("CodeAction: %v", err) - } - action, err := codeActionByKind(actions, settings.GoAssembly) - if err != nil { - t.Fatal(err) - } - - // Execute the command. - // Its side effect should be a single showDocument request. - params := &protocol.ExecuteCommandParams{ - Command: action.Command.Command, - Arguments: action.Command.Arguments, - } - var result command.DebuggingResult - collectDocs := env.Awaiter.ListenToShownDocuments() - env.ExecuteCommand(params, &result) - doc := shownDocument(t, collectDocs(), "http:") - if doc == nil { - t.Fatalf("no showDocument call had 'file:' prefix") - } - t.Log("showDocument(package doc) URL:", doc.URI) - - return get(t, doc.URI) - } - - // Get the report and do some minimal checks for sensible results. - // - // Use only portable instructions below! Remember that - // this is a test of plumbing, not compilation, so - // it's better to skip the tests, rather than refine - // them, on any architecture that gives us trouble - // (e.g. uses JAL for CALL, or BL for RET). - // We conservatively test only on the two most popular - // architectures. - { - report := asmFor("println") - checkMatch(t, true, report, `TEXT.*example.com/a.f`) - switch runtime.GOARCH { - case "amd64", "arm64": - checkMatch(t, true, report, `CALL runtime.printlock`) - checkMatch(t, true, report, `CALL runtime.printstring`) - checkMatch(t, true, report, `CALL runtime.printunlock`) - checkMatch(t, true, report, `CALL example.com/a.f.deferwrap`) - checkMatch(t, true, report, `RET`) - checkMatch(t, true, report, `CALL runtime.morestack_noctxt`) - } - - // Nested functions are also shown. - // - // The condition here was relaxed to unblock go.dev/cl/639515. - checkMatch(t, true, report, `example.com/a.f.deferwrap`) - - // But other functions are not. - checkMatch(t, false, report, `TEXT.*example.com/a.g`) - } - - // Check that code in a package-level var initializer is found too. - { - report := asmFor(`f\(123\)`) - switch runtime.GOARCH { - case "amd64", "arm64": - checkMatch(t, true, report, `TEXT.*example.com/a.init`) - checkMatch(t, true, report, `MOV.? \$123`) - checkMatch(t, true, report, `MOV.? \$456`) - checkMatch(t, true, report, `CALL example.com/a.f`) - } - } - - // And code in a source-level init function. - { - report := asmFor(`f\(789\)`) - switch runtime.GOARCH { - case "amd64", "arm64": - checkMatch(t, true, report, `TEXT.*example.com/a.init`) - checkMatch(t, true, report, `MOV.? \$789`) - checkMatch(t, true, report, `CALL example.com/a.f`) - } - } - }) -} - -// shownDocument returns the first shown document matching the URI prefix. -// It may be nil. -// As a side effect, it clears the list of accumulated shown documents. -func shownDocument(t *testing.T, shown []*protocol.ShowDocumentParams, prefix string) *protocol.ShowDocumentParams { - t.Helper() - var first *protocol.ShowDocumentParams - for _, sd := range shown { - if strings.HasPrefix(sd.URI, prefix) { - if first != nil { - t.Errorf("got multiple showDocument requests: %#v", shown) - break - } - first = sd - } - } - return first -} - -// get fetches the content of a document over HTTP. -func get(t *testing.T, url string) []byte { - t.Helper() - resp, err := http.Get(url) - if err != nil { - t.Fatal(err) - } - defer resp.Body.Close() - got, err := io.ReadAll(resp.Body) - if err != nil { - t.Fatal(err) - } - return got -} - -// checkMatch asserts that got matches (or doesn't match, if !want) the pattern. -func checkMatch(t *testing.T, want bool, got []byte, pattern string) { - t.Helper() - if regexp.MustCompile(pattern).Match(got) != want { - if want { - t.Errorf("input did not match wanted pattern %q; got:\n%s", pattern, got) - } else { - t.Errorf("input matched unwanted pattern %q; got:\n%s", pattern, got) - } - } -} - -// codeActionByKind returns the first action of (exactly) the specified kind, or an error. -func codeActionByKind(actions []protocol.CodeAction, kind protocol.CodeActionKind) (*protocol.CodeAction, error) { - for _, act := range actions { - if act.Kind == kind { - return &act, nil - } - } - return nil, fmt.Errorf("can't find action with kind %s, only %#v", kind, actions) -} diff --git a/gopls/internal/test/integration/web/util_test.go b/gopls/internal/test/integration/web/util_test.go new file mode 100644 index 00000000000..c16f154e286 --- /dev/null +++ b/gopls/internal/test/integration/web/util_test.go @@ -0,0 +1,81 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package web_test + +// This file defines web server testing utilities. + +import ( + "fmt" + "io" + "net/http" + "os" + "regexp" + "strings" + "testing" + + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/test/integration" + "golang.org/x/tools/gopls/internal/util/bug" +) + +func TestMain(m *testing.M) { + bug.PanicOnBugs = true + os.Exit(integration.Main(m)) +} + +// shownDocument returns the first shown document matching the URI prefix. +// It may be nil. +// As a side effect, it clears the list of accumulated shown documents. +func shownDocument(t *testing.T, shown []*protocol.ShowDocumentParams, prefix string) *protocol.ShowDocumentParams { + t.Helper() + var first *protocol.ShowDocumentParams + for _, sd := range shown { + if strings.HasPrefix(sd.URI, prefix) { + if first != nil { + t.Errorf("got multiple showDocument requests: %#v", shown) + break + } + first = sd + } + } + return first +} + +// get fetches the content of a document over HTTP. +func get(t *testing.T, url string) []byte { + t.Helper() + resp, err := http.Get(url) + if err != nil { + t.Fatal(err) + } + defer resp.Body.Close() + got, err := io.ReadAll(resp.Body) + if err != nil { + t.Fatal(err) + } + return got +} + +// checkMatch asserts that got matches (or doesn't match, if !want) the pattern. +func checkMatch(t *testing.T, want bool, got []byte, pattern string) { + t.Helper() + if regexp.MustCompile(pattern).Match(got) != want { + if want { + t.Errorf("input did not match wanted pattern %q; got:\n%s", pattern, got) + } else { + t.Errorf("input matched unwanted pattern %q; got:\n%s", pattern, got) + } + } +} + +// codeActionByKind returns the first action of (exactly) the specified kind, or an error. +func codeActionByKind(actions []protocol.CodeAction, kind protocol.CodeActionKind) (*protocol.CodeAction, error) { + for _, act := range actions { + if act.Kind == kind { + return &act, nil + } + } + return nil, fmt.Errorf("can't find action with kind %s, only %#v", kind, actions) +} From 6da8d2eaf083fafeed6ea3808ba0f57c427c77e1 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Mon, 21 Apr 2025 16:22:31 -0400 Subject: [PATCH 219/270] gopls/internal/golang: Rename imports: be defensive This CL makes the map lookup/type assertion defensive. I have no hypothesis for why it fails (it should be sound given the postcondition of the "pass 1" loop), but this will at least stop the tool from crashing. Fixes golang/go#71656 Change-Id: I32576689e4a2e2fcf7f2640f877a8580107cc387 Reviewed-on: https://go-review.googlesource.com/c/tools/+/667195 LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan Reviewed-by: Robert Findley --- gopls/internal/golang/rename.go | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/gopls/internal/golang/rename.go b/gopls/internal/golang/rename.go index c5910f7872c..fe36cb78a7f 100644 --- a/gopls/internal/golang/rename.go +++ b/gopls/internal/golang/rename.go @@ -1152,7 +1152,12 @@ func renameImports(ctx context.Context, snapshot *cache.Snapshot, mp *metadata.P continue // not the import we're looking for } - pkgname := pkg.TypesInfo().Implicits[imp].(*types.PkgName) + pkgname, ok := pkg.TypesInfo().Implicits[imp].(*types.PkgName) + if !ok { + // "can't happen", but be defensive (#71656) + return fmt.Errorf("internal error: missing type information for %s import at %s", + imp.Path.Value, safetoken.StartPosition(pkg.FileSet(), imp.Pos())) + } pkgScope := pkg.Types().Scope() fileScope := pkg.TypesInfo().Scopes[f.File] From 7c6d4c633718fe7548d009ebf9885930b29e9073 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Wed, 2 Apr 2025 14:55:07 -0400 Subject: [PATCH 220/270] internal/refactor/inline: handle generic functions This CL is a first step towards support for inlining all forms of generic functions. Its limitations include: - No support for methods on generic types. - Conservative shadowing. - Unnecessary type conversions (see generic.txtar, a1). - Conservative parenthesizing (see generic.txtar, file a1a). For golang/go#68236. Change-Id: Ib00b89cb61c611e8d1efd0e2f8b5d93032638b83 Reviewed-on: https://go-review.googlesource.com/c/tools/+/666716 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- internal/refactor/inline/callee.go | 71 ++++++++++---- internal/refactor/inline/inline.go | 86 ++++++++++++++++- internal/refactor/inline/inline_test.go | 38 ++++++-- .../refactor/inline/testdata/err-basic.txtar | 9 -- .../inline/testdata/err-shadow-builtin.txtar | 6 ++ .../refactor/inline/testdata/generic.txtar | 95 +++++++++++++++++++ 6 files changed, 267 insertions(+), 38 deletions(-) create mode 100644 internal/refactor/inline/testdata/generic.txtar diff --git a/internal/refactor/inline/callee.go b/internal/refactor/inline/callee.go index f3f6b653c73..d4f53310a2a 100644 --- a/internal/refactor/inline/callee.go +++ b/internal/refactor/inline/callee.go @@ -42,6 +42,7 @@ type gobCallee struct { ValidForCallStmt bool // function body is "return expr" where expr is f() or <-ch NumResults int // number of results (according to type, not ast.FieldList) Params []*paramInfo // information about parameters (incl. receiver) + TypeParams []*paramInfo // information about type parameters Results []*paramInfo // information about result variables Effects []int // order in which parameters are evaluated (see calleefx) HasDefer bool // uses defer @@ -113,17 +114,6 @@ func AnalyzeCallee(logf func(string, ...any), fset *token.FileSet, pkg *types.Pa return nil, fmt.Errorf("cannot inline function %s as it has no body", name) } - // TODO(adonovan): support inlining of instantiated generic - // functions by replacing each occurrence of a type parameter - // T by its instantiating type argument (e.g. int). We'll need - // to wrap the instantiating type in parens when it's not an - // ident or qualified ident to prevent "if x == struct{}" - // parsing ambiguity, or "T(x)" where T = "*int" or "func()" - // from misparsing. - if funcHasTypeParams(decl) { - return nil, fmt.Errorf("cannot inline generic function %s: type parameters are not yet supported", name) - } - // Record the location of all free references in the FuncDecl. // (Parameters are not free by this definition.) var ( @@ -347,6 +337,7 @@ func AnalyzeCallee(logf func(string, ...any), fset *token.FileSet, pkg *types.Pa } params, results, effects, falcon := analyzeParams(logf, fset, info, decl) + tparams := analyzeTypeParams(logf, fset, info, decl) return &Callee{gobCallee{ Content: content, PkgPath: pkg.Path(), @@ -357,6 +348,7 @@ func AnalyzeCallee(logf func(string, ...any), fset *token.FileSet, pkg *types.Pa ValidForCallStmt: validForCallStmt, NumResults: sig.Results().Len(), Params: params, + TypeParams: tparams, Results: results, Effects: effects, HasDefer: hasDefer, @@ -404,20 +396,15 @@ type refInfo struct { IsSelectionOperand bool } -// analyzeParams computes information about parameters of function fn, +// analyzeParams computes information about parameters of the function declared by decl, // including a simple "address taken" escape analysis. // // It returns two new arrays, one of the receiver and parameters, and -// the other of the result variables of function fn. +// the other of the result variables of the function. // // The input must be well-typed. func analyzeParams(logf func(string, ...any), fset *token.FileSet, info *types.Info, decl *ast.FuncDecl) (params, results []*paramInfo, effects []int, _ falconResult) { - fnobj, ok := info.Defs[decl.Name] - if !ok { - panic(fmt.Sprintf("%s: no func object for %q", - fset.PositionFor(decl.Name.Pos(), false), decl.Name)) // ill-typed? - } - sig := fnobj.Type().(*types.Signature) + sig := signature(fset, info, decl) paramInfos := make(map[*types.Var]*paramInfo) { @@ -504,6 +491,52 @@ func analyzeParams(logf func(string, ...any), fset *token.FileSet, info *types.I return params, results, effects, falcon } +// analyzeTypeParams computes information about the type parameters of the function declared by decl. +func analyzeTypeParams(_ logger, fset *token.FileSet, info *types.Info, decl *ast.FuncDecl) []*paramInfo { + sig := signature(fset, info, decl) + paramInfos := make(map[*types.TypeName]*paramInfo) + var params []*paramInfo + collect := func(tpl *types.TypeParamList) { + for i := range tpl.Len() { + typeName := tpl.At(i).Obj() + info := ¶mInfo{Name: typeName.Name()} + params = append(params, info) + paramInfos[typeName] = info + } + } + collect(sig.RecvTypeParams()) + collect(sig.TypeParams()) + + // Find references. + // We don't care about most of the properties that matter for parameter references: + // a type is immutable, cannot have its address taken, and does not undergo conversions. + // TODO(jba): can we nevertheless combine this with the traversal in analyzeParams? + var stack []ast.Node + stack = append(stack, decl.Type) // for scope of function itself + astutil.PreorderStack(decl.Body, stack, func(n ast.Node, stack []ast.Node) bool { + if id, ok := n.(*ast.Ident); ok { + if v, ok := info.Uses[id].(*types.TypeName); ok { + if pinfo, ok := paramInfos[v]; ok { + ref := refInfo{Offset: int(n.Pos() - decl.Pos())} + pinfo.Refs = append(pinfo.Refs, ref) + pinfo.Shadow = pinfo.Shadow.add(info, nil, pinfo.Name, stack) + } + } + } + return true + }) + return params +} + +func signature(fset *token.FileSet, info *types.Info, decl *ast.FuncDecl) *types.Signature { + fnobj, ok := info.Defs[decl.Name] + if !ok { + panic(fmt.Sprintf("%s: no func object for %q", + fset.PositionFor(decl.Name.Pos(), false), decl.Name)) // ill-typed? + } + return fnobj.Type().(*types.Signature) +} + // -- callee helpers -- // analyzeAssignment looks at the the given stack, and analyzes certain diff --git a/internal/refactor/inline/inline.go b/internal/refactor/inline/inline.go index 0aaee5c7cb5..652ce8b28f2 100644 --- a/internal/refactor/inline/inline.go +++ b/internal/refactor/inline/inline.go @@ -839,6 +839,14 @@ func (st *state) inlineCall() (*inlineCallResult, error) { } } + typeArgs := st.typeArguments(caller.Call) + if len(typeArgs) != len(callee.TypeParams) { + return nil, fmt.Errorf("cannot inline: type parameter inference is not yet supported") + } + if err := substituteTypeParams(logf, callee.TypeParams, typeArgs, params, replaceCalleeID); err != nil { + return nil, err + } + // Log effective arguments. for i, arg := range args { logf("arg #%d: %s pure=%t effects=%t duplicable=%t free=%v type=%v", @@ -1378,6 +1386,35 @@ type argument struct { desugaredRecv bool // is *recv or &recv, where operator was elided } +// typeArguments returns the type arguments of the call. +// It only collects the arguments that are explicitly provided; it does +// not attempt type inference. +func (st *state) typeArguments(call *ast.CallExpr) []*argument { + var exprs []ast.Expr + switch d := ast.Unparen(call.Fun).(type) { + case *ast.IndexExpr: + exprs = []ast.Expr{d.Index} + case *ast.IndexListExpr: + exprs = d.Indices + default: + // No type arguments + return nil + } + var args []*argument + for _, e := range exprs { + arg := &argument{expr: e, freevars: freeVars(st.caller.Info, e)} + // Wrap the instantiating type in parens when it's not an + // ident or qualified ident to prevent "if x == struct{}" + // parsing ambiguity, or "T(x)" where T = "*int" or "func()" + // from misparsing. + if _, ok := arg.expr.(*ast.Ident); !ok { + arg.expr = &ast.ParenExpr{X: arg.expr} + } + args = append(args, arg) + } + return args +} + // arguments returns the effective arguments of the call. // // If the receiver argument and parameter have @@ -1413,6 +1450,9 @@ func (st *state) arguments(caller *Caller, calleeDecl *ast.FuncDecl, assign1 fun callArgs := caller.Call.Args if calleeDecl.Recv != nil { + if len(st.callee.impl.TypeParams) > 0 { + return nil, fmt.Errorf("cannot inline: generic methods not yet supported") + } sel := ast.Unparen(caller.Call.Fun).(*ast.SelectorExpr) seln := caller.Info.Selections[sel] var recvArg ast.Expr @@ -1536,9 +1576,52 @@ type parameter struct { // A replacer replaces an identifier at the given offset in the callee. // The replacement tree must not belong to the caller; use cloneNode as needed. // If unpackVariadic is set, the replacement is a composite resulting from -// variadic elimination, and may be unpackeded into variadic calls. +// variadic elimination, and may be unpacked into variadic calls. type replacer = func(offset int, repl ast.Expr, unpackVariadic bool) +// substituteTypeParams replaces type parameters in the callee with the corresponding type arguments +// from the call. +func substituteTypeParams(logf logger, typeParams []*paramInfo, typeArgs []*argument, params []*parameter, replace replacer) error { + assert(len(typeParams) == len(typeArgs), "mismatched number of type params/args") + for i, paramInfo := range typeParams { + arg := typeArgs[i] + // Perform a simplified, conservative shadow analysis: fail if there is any shadowing. + for free := range arg.freevars { + if paramInfo.Shadow[free] != 0 { + return fmt.Errorf("cannot inline: type argument #%d (type parameter %s) is shadowed", i, paramInfo.Name) + } + } + logf("replacing type param %s with %s", paramInfo.Name, debugFormatNode(token.NewFileSet(), arg.expr)) + for _, ref := range paramInfo.Refs { + replace(ref.Offset, internalastutil.CloneNode(arg.expr), false) + } + // Also replace parameter field types. + // TODO(jba): find a way to do this that is not so slow and clumsy. + // Ideally, we'd walk each p.fieldType once, replacing all type params together. + for _, p := range params { + if id, ok := p.fieldType.(*ast.Ident); ok && id.Name == paramInfo.Name { + p.fieldType = arg.expr + } else { + for _, id := range identsNamed(p.fieldType, paramInfo.Name) { + replaceNode(p.fieldType, id, arg.expr) + } + } + } + } + return nil +} + +func identsNamed(n ast.Node, name string) []*ast.Ident { + var ids []*ast.Ident + ast.Inspect(n, func(n ast.Node) bool { + if id, ok := n.(*ast.Ident); ok && id.Name == name { + ids = append(ids, id) + } + return true + }) + return ids +} + // substitute implements parameter elimination by substitution. // // It considers each parameter and its corresponding argument in turn @@ -1664,7 +1747,6 @@ next: // parameter is also removed by substitution. sg[arg] = nil // Absent shadowing, the arg is substitutable. - for free := range arg.freevars { switch s := param.info.Shadow[free]; { case s < 0: diff --git a/internal/refactor/inline/inline_test.go b/internal/refactor/inline/inline_test.go index a3934b5cd68..6a2a8b1d6b3 100644 --- a/internal/refactor/inline/inline_test.go +++ b/internal/refactor/inline/inline_test.go @@ -308,14 +308,22 @@ func doInlineNote(logf func(string, ...any), pkg *packages.Package, file *ast.Fi if want, ok := want.([]byte); ok { got = append(bytes.TrimSpace(got), '\n') want = append(bytes.TrimSpace(want), '\n') - if diff := diff.Unified("want", "got", string(want), string(got)); diff != "" { - return fmt.Errorf("Inline returned wrong output:\n%s\nWant:\n%s\nDiff:\n%s", - got, want, diff) + // If the "want" file begins "...", it need only be a substring of the "got" result, + // rather than an exact match. + if rest, ok := bytes.CutPrefix(want, []byte("...\n")); ok { + want = rest + if !bytes.Contains(got, want) { + return fmt.Errorf("Inline returned wrong output:\n%s\nWant substring:\n%s", got, want) + } + } else { + if diff := diff.Unified("want", "got", string(want), string(got)); diff != "" { + return fmt.Errorf("Inline returned wrong output:\n%s\nWant:\n%s\nDiff:\n%s", + got, want, diff) + } } return nil } return fmt.Errorf("Inline succeeded unexpectedly: want error matching %q, got <<%s>>", want, got) - } // findFuncByPosition returns the FuncDecl at the specified (package-agnostic) position. @@ -364,16 +372,16 @@ type testcase struct { func TestErrors(t *testing.T) { runTests(t, []testcase{ { - "Generic functions are not yet supported.", + "Inference of type parameters is not yet supported.", `func f[T any](x T) T { return x }`, `var _ = f(0)`, - `error: type parameters are not yet supported`, + `error: type parameter inference is not yet supported`, }, { "Methods on generic types are not yet supported.", `type G[T any] struct{}; func (G[T]) f(x T) T { return x }`, `var _ = G[int]{}.f(0)`, - `error: type parameters are not yet supported`, + `error: generic methods not yet supported`, }, }) } @@ -434,6 +442,13 @@ func TestBasics(t *testing.T) { } }`, }, + { + "Explicit type parameters.", + `func f[T any](x T) T { return x }`, + `var _ = f[int](0)`, + // TODO(jba): remove the unnecessary conversion. + `var _ = int(0)`, + }, }) } @@ -602,7 +617,6 @@ func f1(i int) int { return i + 1 }`, `func _() { print(F(f1), F(f1)) }`, }, }) - }) } @@ -1832,6 +1846,14 @@ func runTests(t *testing.T, tests []testcase) { if fun.Name == funcName { call = n } + case *ast.IndexExpr: + if id, ok := fun.X.(*ast.Ident); ok && id.Name == funcName { + call = n + } + case *ast.IndexListExpr: + if id, ok := fun.X.(*ast.Ident); ok && id.Name == funcName { + call = n + } } } return call == nil diff --git a/internal/refactor/inline/testdata/err-basic.txtar b/internal/refactor/inline/testdata/err-basic.txtar index 54377c70c4b..c57232ed60e 100644 --- a/internal/refactor/inline/testdata/err-basic.txtar +++ b/internal/refactor/inline/testdata/err-basic.txtar @@ -11,15 +11,6 @@ it doesn't even reach the Indent function. module testdata go 1.12 --- a/generic.go -- -package a - -func _() { - f[int]() //@ inline(re"f", re"type parameters are not yet supported") -} - -func f[T any]() {} - -- a/nobody.go -- package a diff --git a/internal/refactor/inline/testdata/err-shadow-builtin.txtar b/internal/refactor/inline/testdata/err-shadow-builtin.txtar index 520cda5d4e7..944fc336e4d 100644 --- a/internal/refactor/inline/testdata/err-shadow-builtin.txtar +++ b/internal/refactor/inline/testdata/err-shadow-builtin.txtar @@ -14,6 +14,12 @@ func _() { } func f() *int { return nil } +-- a/nil-type-param.go -- +package a + +func _[nil any]() { + _ = f() //@ inline(re"f", re"nil.*shadowed.*by.*typename.*line 3") +} -- a/nil-typename.go -- package a diff --git a/internal/refactor/inline/testdata/generic.txtar b/internal/refactor/inline/testdata/generic.txtar new file mode 100644 index 00000000000..ea0f5bf2677 --- /dev/null +++ b/internal/refactor/inline/testdata/generic.txtar @@ -0,0 +1,95 @@ +Inlining a call to a generic function. + +a1: explicit type args, no shadowing +a2: the call uses type inference +a3: the type argument is shadowed in the callee +a4: ditto, with a more complicated arg +a5: a free identifier in the callee is captured by a global + in the caller's scope (covered elsewhere; verifying for generics) +-- go.mod -- +module testdata +go 1.18 + +-- a/a1.go -- +package a + +func _() { + f[int](1) //@ inline(re"f", a1) +} + +func f[T any](x T) { print(x) } +-- a1 -- +... +func _() { + print(int(1)) //@ inline(re"f", a1) +} + +-- a/a1a.go -- +package a + +func _() { + f[([]int)]([]int{1}) //@ inline(re"f", a1a) +} + +func f[T any](x T) { print(x) } +-- a1a -- +... +func _() { + print(([]int)([]int{1})) //@ inline(re"f", a1a) +} + +-- a/a2.go -- +package a + +func _() { + f(1) //@ inline(re"f", re"cannot inline.*type.*inference") +} + +-- a/a3.go -- +package a + +func _() { + g[int]() //@ inline(re"g", re"cannot inline:.*shadow") +} + +func g[T any]() { + type int bool + var x T + print(x) +} + +-- a/a4.go -- +package a + +func _() { + g[map[int]string]() //@ inline(re"g", re"cannot inline:.*shadow") +} + +-- a/a5.go -- +package a + +import "testdata/b" + +type bool int + +func _() { + b.H[int]() //@ inline(re"H", re"cannot inline.*shadowed") +} +-- b/b.go -- +package b + +func H[T comparable]() { + var x map[T]bool + print(x) +} + +-- a/a6.go -- +package a + +type G[T any] struct{} + +func (G[T]) f(x T) { print(x) } + +func _() { + G[int]{}.f[bool]() //@ inline(re"f", re"generic methods not yet supported") +} From 264b0a5a37e0a3c97ba9989ca1e7459160387aa4 Mon Sep 17 00:00:00 2001 From: Peter Weinberger Date: Mon, 21 Apr 2025 18:17:32 -0400 Subject: [PATCH 221/270] gopls/internal/golang/completion: modernize This CL applies the modernizers to the completion code, and removes some ununsed functions. Change-Id: I67a309f752deee000b7f556707e76f08aff14b9f Reviewed-on: https://go-review.googlesource.com/c/tools/+/667196 Reviewed-by: Alan Donovan Reviewed-by: Madeline Kalil LUCI-TryBot-Result: Go LUCI --- .../internal/golang/completion/completion.go | 8 +--- .../golang/completion/deep_completion_test.go | 2 +- gopls/internal/golang/completion/labels.go | 8 +--- gopls/internal/golang/completion/unify.go | 47 ------------------- 4 files changed, 4 insertions(+), 61 deletions(-) diff --git a/gopls/internal/golang/completion/completion.go b/gopls/internal/golang/completion/completion.go index a3270f97909..35d8dfff6a2 100644 --- a/gopls/internal/golang/completion/completion.go +++ b/gopls/internal/golang/completion/completion.go @@ -489,12 +489,7 @@ type candidate struct { } func (c candidate) hasMod(mod typeModKind) bool { - for _, m := range c.mods { - if m == mod { - return true - } - } - return false + return slices.Contains(c.mods, mod) } // Completion returns a list of possible candidates for completion, given a @@ -1487,7 +1482,6 @@ func (c *completer) selector(ctx context.Context, sel *ast.SelectorExpr) error { } for _, uri := range mp.CompiledGoFiles { - uri := uri g.Go(func() error { return quickParse(uri, mp, tooNew) }) diff --git a/gopls/internal/golang/completion/deep_completion_test.go b/gopls/internal/golang/completion/deep_completion_test.go index 27009af1b4f..d522b9be9a9 100644 --- a/gopls/internal/golang/completion/deep_completion_test.go +++ b/gopls/internal/golang/completion/deep_completion_test.go @@ -20,7 +20,7 @@ func TestDeepCompletionIsHighScore(t *testing.T) { } // Fill up with higher scores. - for i := 0; i < MaxDeepCompletions; i++ { + for range MaxDeepCompletions { if !s.isHighScore(10) { t.Error("10 should be high score") } diff --git a/gopls/internal/golang/completion/labels.go b/gopls/internal/golang/completion/labels.go index f0e5f42a67a..52afafebf25 100644 --- a/gopls/internal/golang/completion/labels.go +++ b/gopls/internal/golang/completion/labels.go @@ -8,6 +8,7 @@ import ( "go/ast" "go/token" "math" + "slices" ) type labelType int @@ -96,12 +97,7 @@ func (c *completer) labels(lt labelType) { // Only search into block-like nodes enclosing our "goto". // This prevents us from finding labels in nested blocks. case *ast.BlockStmt, *ast.CommClause, *ast.CaseClause: - for _, p := range c.path { - if n == p { - return true - } - } - return false + return slices.Contains(c.path, n) case *ast.LabeledStmt: addLabel(highScore, n) } diff --git a/gopls/internal/golang/completion/unify.go b/gopls/internal/golang/completion/unify.go index 8f4a1d3cbe0..f28ad49cd52 100644 --- a/gopls/internal/golang/completion/unify.go +++ b/gopls/internal/golang/completion/unify.go @@ -189,29 +189,6 @@ func (u *unifier) set(x *types.TypeParam, t types.Type) { *u.handles[x] = t } -// unknowns returns the number of type parameters for which no type has been set yet. -func (u *unifier) unknowns() int { - n := 0 - for _, h := range u.handles { - if *h == nil { - n++ - } - } - return n -} - -// inferred returns the list of inferred types for the given type parameter list. -// The result is never nil and has the same length as tparams; result types that -// could not be inferred are nil. Corresponding type parameters and result types -// have identical indices. -func (u *unifier) inferred(tparams []*types.TypeParam) []types.Type { - list := make([]types.Type, len(tparams)) - for i, x := range tparams { - list[i] = u.at(x) - } - return list -} - // asInterface returns the underlying type of x as an interface if // it is a non-type parameter interface. Otherwise it returns nil. func asInterface(x types.Type) (i *types.Interface) { @@ -245,30 +222,6 @@ func identicalOrigin(x, y *types.Named) bool { return x.Origin().Obj() == y.Origin().Obj() } -func match(x, y types.Type) types.Type { - // Common case: we don't have channels. - if types.Identical(x, y) { - return x - } - - // We may have channels that differ in direction only. - if x, _ := x.(*types.Chan); x != nil { - if y, _ := y.(*types.Chan); y != nil && types.Identical(x.Elem(), y.Elem()) { - // We have channels that differ in direction only. - // If there's an unrestricted channel, select the restricted one. - switch { - case x.Dir() == types.SendRecv: - return y - case y.Dir() == types.SendRecv: - return x - } - } - } - - // types are different - return nil -} - func coreType(t types.Type) types.Type { t = types.Unalias(t) tpar, _ := t.(*types.TypeParam) From e27768f877a2d5049ea7d1058aaca2fecd2c19b5 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Tue, 15 Apr 2025 16:06:05 -0400 Subject: [PATCH 222/270] internal/refactor/inline: freeishNames initial scope Start the freeishNames walk with a non-nil scope. We may need it if the argument node establishes a binding without first opening a scope. (Example in the code.) For golang/go#73321. Change-Id: I72d45e26d27a726d869c9b8eb7d47bc187059832 Reviewed-on: https://go-review.googlesource.com/c/tools/+/665776 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- internal/refactor/inline/free.go | 6 +++++ internal/refactor/inline/free_test.go | 8 ++++++ internal/refactor/inline/testdata/n-ary.txtar | 25 +++++++++++++++++++ 3 files changed, 39 insertions(+) diff --git a/internal/refactor/inline/free.go b/internal/refactor/inline/free.go index 28cebeea3db..e3cf313a8a8 100644 --- a/internal/refactor/inline/free.go +++ b/internal/refactor/inline/free.go @@ -35,7 +35,13 @@ import ( // panics if it sees one. func freeishNames(free map[string]bool, n ast.Node, includeComplitIdents bool) { v := &freeVisitor{free: free, includeComplitIdents: includeComplitIdents} + // Begin with a scope, even though n might not be a form that establishes a scope. + // For example, n might be: + // x := ... + // Then we need to add the first x to some scope. + v.openScope() ast.Walk(v, n) + v.closeScope() assert(v.scope == nil, "unbalanced scopes") } diff --git a/internal/refactor/inline/free_test.go b/internal/refactor/inline/free_test.go index 28fa56db099..1922bfb6d2a 100644 --- a/internal/refactor/inline/free_test.go +++ b/internal/refactor/inline/free_test.go @@ -229,6 +229,14 @@ func TestFreeishNames(t *testing.T) { } } +func TestFreeishNamesScope(t *testing.T) { + // Verify that inputs that don't start a scope don't crash. + _, f := mustParse(t, "free.go", `package p; func _() { x := 1; _ = x }`) + // Select the short var decl, not the entire function body. + n := f.Decls[0].(*ast.FuncDecl).Body.List[0] + freeishNames(map[string]bool{}, n, false) +} + func mustParse(t *testing.T, filename string, content any) (*token.FileSet, *ast.File) { fset := token.NewFileSet() f, err := parser.ParseFile(fset, filename, content, parser.ParseComments|parser.SkipObjectResolution) diff --git a/internal/refactor/inline/testdata/n-ary.txtar b/internal/refactor/inline/testdata/n-ary.txtar index 2de97358aed..9a96645fc92 100644 --- a/internal/refactor/inline/testdata/n-ary.txtar +++ b/internal/refactor/inline/testdata/n-ary.txtar @@ -77,3 +77,28 @@ func _() { } func f4() int { return 2 + 2 } +-- e/e.go -- +package e + +func _() { + switch { + case true: + a, b := f5() //@ inline(re"f5", f5) + _, _ = a, b + } +} + +func f5() (int, int) { return 2, 2} + +-- f5 -- +package e + +func _() { + switch { + case true: + a, b := 2, 2 //@ inline(re"f5", f5) + _, _ = a, b + } +} + +func f5() (int, int) { return 2, 2 } From d905d0b869ef8e25d111c12f4dd11af29cc6639d Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Mon, 21 Apr 2025 14:21:38 -0400 Subject: [PATCH 223/270] gopls/internal/golang: add test for (unfixed) issue 65098 The test confirms the reported problematic behavior, and investigation shows it to be (as suspected) a dup of Also, a couple of minor cleanups from static analyzers. Updates golang/go#58461 Updates golang/go#65098 Change-Id: I316c2f3bd23005526ccd9da461a18f1198373fe4 Reviewed-on: https://go-review.googlesource.com/c/tools/+/667135 LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan Reviewed-by: Robert Findley --- gopls/internal/golang/rename.go | 23 +++------ .../marker/testdata/rename/issue65098.txt | 49 +++++++++++++++++++ .../test/marker/testdata/rename/methods.txt | 8 +-- 3 files changed, 60 insertions(+), 20 deletions(-) create mode 100644 gopls/internal/test/marker/testdata/rename/issue65098.txt diff --git a/gopls/internal/golang/rename.go b/gopls/internal/golang/rename.go index fe36cb78a7f..24dbcbadc05 100644 --- a/gopls/internal/golang/rename.go +++ b/gopls/internal/golang/rename.go @@ -54,6 +54,7 @@ import ( "path" "path/filepath" "regexp" + "slices" "sort" "strconv" "strings" @@ -169,13 +170,7 @@ func PrepareRename(ctx context.Context, snapshot *cache.Snapshot, f file.Handle, func prepareRenamePackageName(ctx context.Context, snapshot *cache.Snapshot, pgf *parsego.File) (*PrepareItem, error) { // Does the client support file renaming? - fileRenameSupported := false - for _, op := range snapshot.Options().SupportedResourceOperations { - if op == protocol.Rename { - fileRenameSupported = true - break - } - } + fileRenameSupported := slices.Contains(snapshot.Options().SupportedResourceOperations, protocol.Rename) if !fileRenameSupported { return nil, errors.New("can't rename package: LSP client does not support file renaming") } @@ -438,13 +433,7 @@ func Rename(ctx context.Context, snapshot *cache.Snapshot, f file.Handle, pp pro // become reordered) and that are either identical or // non-overlapping. diff.SortEdits(edits) - filtered := edits[:0] - for i, edit := range edits { - if i == 0 || edit != filtered[len(filtered)-1] { - filtered = append(filtered, edit) - } - } - edits = filtered + edits = slices.Compact(edits) // TODO(adonovan): the logic above handles repeat edits to the // same file URI (e.g. as a member of package p and p_test) but @@ -541,7 +530,7 @@ func renameOrdinary(ctx context.Context, snapshot *cache.Snapshot, f file.Handle // // Note that unlike Funcs, TypeNames are always canonical (they are "left" // of the type parameters, unlike methods). - switch obj.(type) { // avoid "obj :=" since cases reassign the var + switch obj0 := obj.(type) { // avoid "obj :=" since cases reassign the var case *types.TypeName: if _, ok := types.Unalias(obj.Type()).(*types.TypeParam); ok { // As with capitalized function parameters below, type parameters are @@ -549,7 +538,7 @@ func renameOrdinary(ctx context.Context, snapshot *cache.Snapshot, f file.Handle goto skipObjectPath } case *types.Func: - obj = obj.(*types.Func).Origin() + obj = obj0.Origin() case *types.Var: // TODO(adonovan): do vars need the origin treatment too? (issue #58462) @@ -563,7 +552,7 @@ func renameOrdinary(ctx context.Context, snapshot *cache.Snapshot, f file.Handle // objectpath, the classifies them as local vars, but as // they came from export data they lack syntax and the // correct scope tree (issue #61294). - if !obj.(*types.Var).IsField() && !typesinternal.IsPackageLevel(obj) { + if !obj0.IsField() && !typesinternal.IsPackageLevel(obj) { goto skipObjectPath } } diff --git a/gopls/internal/test/marker/testdata/rename/issue65098.txt b/gopls/internal/test/marker/testdata/rename/issue65098.txt new file mode 100644 index 00000000000..0285c32f294 --- /dev/null +++ b/gopls/internal/test/marker/testdata/rename/issue65098.txt @@ -0,0 +1,49 @@ +This is a test for issue 65098: a renaming in package a does not +propagate to package b, even though the two packages are coupled via +an assignment in c, which is renamed. + + c + / \ + a b + +The bug (a dup of #58461) is not yet fixed, so the golden file records +the wrong behavior (i.e. no changes to package b). +TODO(adonovan): fix. + +-- go.mod -- +module example.com +go 1.12 + +-- a/a.go -- +package a + +type I interface { + F() //@ rename("F", "FF", fToFF) +} + +-- b/b.go -- +package b + +type S struct{} + +func (s S) F() {} + +-- c/c.go -- +package c + +import ( + "example.com/a" + "example.com/b" +) + +var _ a.I = b.S{} +var _ = a.I.F + +-- @fToFF/a/a.go -- +@@ -4 +4 @@ +- F() //@ rename("F", "FF", fToFF) ++ FF() //@ rename("F", "FF", fToFF) +-- @fToFF/c/c.go -- +@@ -9 +9 @@ +-var _ = a.I.F ++var _ = a.I.FF diff --git a/gopls/internal/test/marker/testdata/rename/methods.txt b/gopls/internal/test/marker/testdata/rename/methods.txt index 5f5c5688479..0f38f85e3bf 100644 --- a/gopls/internal/test/marker/testdata/rename/methods.txt +++ b/gopls/internal/test/marker/testdata/rename/methods.txt @@ -1,8 +1,5 @@ This test exercises renaming of interface methods. -The golden is currently wrong due to https://github.com/golang/go/issues/58506: -the reference to B.F in package b should be renamed too. - -- go.mod -- module example.com go 1.12 @@ -25,6 +22,8 @@ type B interface { F() } //@rename("F", "G", BfToG) var _ B = a.A(0) var _ B = c.C(0) +var _ = B.F + -- c/c.go -- package c @@ -47,6 +46,9 @@ b/b.go:6:20: (rename example.com/b.B.F if you intend to change both types) @@ -6 +6 @@ -type B interface { F() } //@rename("F", "G", BfToG) +type B interface { G() } //@rename("F", "G", BfToG) +@@ -11 +11 @@ +-var _ = B.F ++var _ = B.G -- @BfToG/d/d.go -- @@ -5 +5 @@ -var _ = b.B.F From e9d2a36e6ce01df31bcf1342e7fbe6746b22268c Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Mon, 21 Apr 2025 14:59:45 -0400 Subject: [PATCH 224/270] gopls/internal/golang: pkgdoc: don't discard NewT for unexported t This CL causes the "Browse package doc" web page not to discard constructor functions NewT for unexported types t. Instead they are treated as ordinary top-level functions. Fixes golang/go#69553 Change-Id: If9b5395e214dfba34c97b98ccf8fb5e372c0c264 Reviewed-on: https://go-review.googlesource.com/c/tools/+/667117 Reviewed-by: Robert Findley Commit-Queue: Alan Donovan LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan --- gopls/internal/golang/pkgdoc.go | 14 ++++++- .../test/integration/web/pkdoc_test.go | 42 +++++++++++++++++++ 2 files changed, 54 insertions(+), 2 deletions(-) diff --git a/gopls/internal/golang/pkgdoc.go b/gopls/internal/golang/pkgdoc.go index 2faff1a1526..9f2b2bf51a4 100644 --- a/gopls/internal/golang/pkgdoc.go +++ b/gopls/internal/golang/pkgdoc.go @@ -14,7 +14,7 @@ package golang // - rewrite using html/template. // Or factor with golang.org/x/pkgsite/internal/godoc/dochtml. // - emit breadcrumbs for parent + sibling packages. -// - list promoted methods---we have type information! +// - list promoted methods---we have type information! (golang/go#67158) // - gather Example tests, following go/doc and pkgsite. // - add option for doc.AllDecls: show non-exported symbols too. // - style the
  • bullets in the index as invisible. @@ -328,7 +328,17 @@ func PackageDocHTML(viewID string, pkg *cache.Package, web Web) ([]byte, error) filterValues(&t.Vars) filterFuncs(&t.Funcs) filterFuncs(&t.Methods) - return unexported(t.Name) + if unexported(t.Name) { + // If an unexported type has an exported constructor function, + // treat the constructor as an ordinary standalone function. + // We will sort Funcs again below. + docpkg.Funcs = append(docpkg.Funcs, t.Funcs...) + return true // delete this type + } + return false // keep this type + }) + slices.SortFunc(docpkg.Funcs, func(x, y *doc.Func) int { + return strings.Compare(x.Name, y.Name) }) } diff --git a/gopls/internal/test/integration/web/pkdoc_test.go b/gopls/internal/test/integration/web/pkdoc_test.go index 8d1573320d0..7f940e9ddd1 100644 --- a/gopls/internal/test/integration/web/pkdoc_test.go +++ b/gopls/internal/test/integration/web/pkdoc_test.go @@ -410,6 +410,48 @@ type D int }) } +// TestPkgDocConstructorOfUnexported tests that exported constructor +// functions (NewT) whose result type (t) is unexported are not +// discarded but are presented as ordinary top-level functions (#69553). +func TestPkgDocConstructorOfUnexported(t *testing.T) { + const files = ` +-- go.mod -- +module mod.com +go 1.20 + +-- a/a.go -- +package a + +func A() {} +func Z() {} + +type unexported int +func NewUnexported() unexported // exported constructor of unexported type + +type Exported int +func NewExported() Exported // exported constructor of exported type +` + Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("a/a.go") + uri1 := viewPkgDoc(t, env, env.Sandbox.Workdir.EntireFile("a/a.go")) + doc := get(t, uri1) + + want := regexp.QuoteMeta(` + + + + + + `) + checkMatch(t, true, doc, want) + }) +} + // viewPkgDoc invokes the "Browse package documentation" code action // at the specified location. It returns the URI of the document, or // fails the test. From cc6bc88ec9fb9219287c8a7e1c64725744ae95dc Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Mon, 7 Apr 2025 20:46:00 +0000 Subject: [PATCH 225/270] internal/mcp: an MCP SDK prototype This CL contains an minimal prototype of an MCP SDK, using the "new" jsonrpc2_v2 package (similar to x/exp/jsonrpc2). Much is still yet to do, but this initial version addressed the following aspects: - Support for newline delimited JSONRPC2 framing, and message logging. - A generated protocol package, containing type definitions to be used in transport. - A minimal jsonschema package to be used for both reading the MCP spec, and for serving Tool input schemas. - A transport abstraction, and two transport implementations: stdio, and local (for testing). - Client and Server types, to be configured with features and then connected using their Connect methods, which return (respectively) a ServerConnection and ClientConnection object. - A minimal binding API for tools. A catalog of things not yet done is in doc.go, but we should review this CL eagerly, as it contains the fundamental building blocks of an eventual SDK. It is the intention that this package is more-or-less standalone, as it may eventually be carved out into a separate repository. As such, its only dependency within x/tools is jsonrpc2_v2, and the only change required to that package is to expose a callback when the connection is closed. Change-Id: Ib66018961014f85a8884d56ab721d5642c98443c Reviewed-on: https://go-review.googlesource.com/c/tools/+/667036 Reviewed-by: Jonathan Amsterdam LUCI-TryBot-Result: Go LUCI --- gopls/internal/lsprpc/binder_test.go | 2 +- gopls/internal/lsprpc/export_test.go | 2 +- internal/jsonrpc2_v2/frame.go | 4 +- internal/jsonrpc2_v2/jsonrpc2_test.go | 2 +- internal/jsonrpc2_v2/serve.go | 6 +- internal/jsonrpc2_v2/serve_test.go | 12 +- internal/mcp/client.go | 191 +++++++++ internal/mcp/content.go | 64 +++ internal/mcp/examples/hello/main.go | 38 ++ internal/mcp/internal/jsonschema/schema.go | 130 ++++++ .../mcp/internal/jsonschema/schema_test.go | 65 +++ internal/mcp/internal/protocol/doc.go | 13 + internal/mcp/internal/protocol/generate.go | 400 ++++++++++++++++++ internal/mcp/internal/protocol/protocol.go | 225 ++++++++++ internal/mcp/mcp.go | 23 + internal/mcp/mcp_test.go | 161 +++++++ internal/mcp/server.go | 212 ++++++++++ internal/mcp/tool.go | 70 +++ internal/mcp/transport.go | 259 ++++++++++++ internal/mcp/util.go | 16 + 20 files changed, 1882 insertions(+), 13 deletions(-) create mode 100644 internal/mcp/client.go create mode 100644 internal/mcp/content.go create mode 100644 internal/mcp/examples/hello/main.go create mode 100644 internal/mcp/internal/jsonschema/schema.go create mode 100644 internal/mcp/internal/jsonschema/schema_test.go create mode 100644 internal/mcp/internal/protocol/doc.go create mode 100644 internal/mcp/internal/protocol/generate.go create mode 100644 internal/mcp/internal/protocol/protocol.go create mode 100644 internal/mcp/mcp.go create mode 100644 internal/mcp/mcp_test.go create mode 100644 internal/mcp/server.go create mode 100644 internal/mcp/tool.go create mode 100644 internal/mcp/transport.go create mode 100644 internal/mcp/util.go diff --git a/gopls/internal/lsprpc/binder_test.go b/gopls/internal/lsprpc/binder_test.go index 07a8b2cdf99..7072529d1c6 100644 --- a/gopls/internal/lsprpc/binder_test.go +++ b/gopls/internal/lsprpc/binder_test.go @@ -105,7 +105,7 @@ func (e *TestEnv) dial(ctx context.Context, t *testing.T, dialer jsonrpc2_v2.Dia l, _ := e.serve(ctx, t, NewForwardBinder(dialer)) dialer = l.Dialer() } - conn, err := jsonrpc2_v2.Dial(ctx, dialer, client) + conn, err := jsonrpc2_v2.Dial(ctx, dialer, client, nil) if err != nil { t.Fatal(err) } diff --git a/gopls/internal/lsprpc/export_test.go b/gopls/internal/lsprpc/export_test.go index 8cbdecc98a2..5050d3eda44 100644 --- a/gopls/internal/lsprpc/export_test.go +++ b/gopls/internal/lsprpc/export_test.go @@ -62,7 +62,7 @@ func (b *ForwardBinder) Bind(ctx context.Context, conn *jsonrpc2_v2.Connection) client := protocol.ClientDispatcherV2(conn) clientBinder := NewClientBinder(func(context.Context, protocol.Server) protocol.Client { return client }) - serverConn, err := jsonrpc2_v2.Dial(context.Background(), b.dialer, clientBinder) + serverConn, err := jsonrpc2_v2.Dial(context.Background(), b.dialer, clientBinder, nil) if err != nil { return jsonrpc2_v2.ConnectionOptions{ Handler: jsonrpc2_v2.HandlerFunc(func(context.Context, *jsonrpc2_v2.Request) (any, error) { diff --git a/internal/jsonrpc2_v2/frame.go b/internal/jsonrpc2_v2/frame.go index e4248328132..f993b0741e1 100644 --- a/internal/jsonrpc2_v2/frame.go +++ b/internal/jsonrpc2_v2/frame.go @@ -41,9 +41,9 @@ type Writer interface { // It is responsible for the framing and encoding of messages into wire form. type Framer interface { // Reader wraps a byte reader into a message reader. - Reader(rw io.Reader) Reader + Reader(io.Reader) Reader // Writer wraps a byte writer into a message writer. - Writer(rw io.Writer) Writer + Writer(io.Writer) Writer } // RawFramer returns a new Framer. diff --git a/internal/jsonrpc2_v2/jsonrpc2_test.go b/internal/jsonrpc2_v2/jsonrpc2_test.go index e42f63736c0..25e54fc80be 100644 --- a/internal/jsonrpc2_v2/jsonrpc2_test.go +++ b/internal/jsonrpc2_v2/jsonrpc2_test.go @@ -153,7 +153,7 @@ func testConnection(t *testing.T, framer jsonrpc2.Framer) { // also run all simple call tests in echo mode (*echo)(call).Invoke(t, ctx, h) } - }}) + }}, nil) if err != nil { t.Fatal(err) } diff --git a/internal/jsonrpc2_v2/serve.go b/internal/jsonrpc2_v2/serve.go index 7bac0103e8f..5c732907002 100644 --- a/internal/jsonrpc2_v2/serve.go +++ b/internal/jsonrpc2_v2/serve.go @@ -54,13 +54,15 @@ type Server struct { // Handler provided by the Binder, and will release its own resources when the // connection is broken, but the caller may Close it earlier to stop accepting // (or sending) new requests. -func Dial(ctx context.Context, dialer Dialer, binder Binder) (*Connection, error) { +// +// If non-nil, the onDone function is called when the connection is closed. +func Dial(ctx context.Context, dialer Dialer, binder Binder, onDone func()) (*Connection, error) { // dial a server rwc, err := dialer.Dial(ctx) if err != nil { return nil, err } - return newConnection(ctx, rwc, binder, nil), nil + return newConnection(ctx, rwc, binder, onDone), nil } // NewServer starts a new server listening for incoming connections and returns diff --git a/internal/jsonrpc2_v2/serve_test.go b/internal/jsonrpc2_v2/serve_test.go index 8eb572c9d01..7115cfbbd61 100644 --- a/internal/jsonrpc2_v2/serve_test.go +++ b/internal/jsonrpc2_v2/serve_test.go @@ -47,7 +47,7 @@ func TestIdleTimeout(t *testing.T) { // Exercise some connection/disconnection patterns, and then assert that when // our timer fires, the server exits. - conn1, err := jsonrpc2.Dial(ctx, listener.Dialer(), jsonrpc2.ConnectionOptions{}) + conn1, err := jsonrpc2.Dial(ctx, listener.Dialer(), jsonrpc2.ConnectionOptions{}, nil) if err != nil { if since := time.Since(idleStart); since < d { t.Fatalf("conn1 failed to connect after %v: %v", since, err) @@ -71,7 +71,7 @@ func TestIdleTimeout(t *testing.T) { // Since conn1 was successfully accepted and remains open, the server is // definitely non-idle. Dialing another simultaneous connection should // succeed. - conn2, err := jsonrpc2.Dial(ctx, listener.Dialer(), jsonrpc2.ConnectionOptions{}) + conn2, err := jsonrpc2.Dial(ctx, listener.Dialer(), jsonrpc2.ConnectionOptions{}, nil) if err != nil { conn1.Close() t.Fatalf("conn2 failed to connect while non-idle after %v: %v", time.Since(idleStart), err) @@ -96,7 +96,7 @@ func TestIdleTimeout(t *testing.T) { t.Fatalf("conn2.Close failed with error: %v", err) } - conn3, err := jsonrpc2.Dial(ctx, listener.Dialer(), jsonrpc2.ConnectionOptions{}) + conn3, err := jsonrpc2.Dial(ctx, listener.Dialer(), jsonrpc2.ConnectionOptions{}, nil) if err != nil { if since := time.Since(idleStart); since < d { t.Fatalf("conn3 failed to connect after %v: %v", since, err) @@ -205,7 +205,7 @@ func newFake(t *testing.T, ctx context.Context, l jsonrpc2.Listener) (*jsonrpc2. l.Dialer(), jsonrpc2.ConnectionOptions{ Handler: fakeHandler{}, - }) + }, nil) if err != nil { return nil, nil, err } @@ -250,7 +250,7 @@ func TestIdleListenerAcceptCloseRace(t *testing.T) { done := make(chan struct{}) go func() { - conn, err := jsonrpc2.Dial(ctx, listener.Dialer(), jsonrpc2.ConnectionOptions{}) + conn, err := jsonrpc2.Dial(ctx, listener.Dialer(), jsonrpc2.ConnectionOptions{}, nil) listener.Close() if err == nil { conn.Close() @@ -313,7 +313,7 @@ func TestCloseCallRace(t *testing.T) { return jsonrpc2.ConnectionOptions{Handler: h} })) - dialConn, err := jsonrpc2.Dial(ctx, listener.Dialer(), jsonrpc2.ConnectionOptions{}) + dialConn, err := jsonrpc2.Dial(ctx, listener.Dialer(), jsonrpc2.ConnectionOptions{}, nil) if err != nil { listener.Close() s.Wait() diff --git a/internal/mcp/client.go b/internal/mcp/client.go new file mode 100644 index 00000000000..2369a73d7df --- /dev/null +++ b/internal/mcp/client.go @@ -0,0 +1,191 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mcp + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "iter" + "slices" + "sync" + + jsonrpc2 "golang.org/x/tools/internal/jsonrpc2_v2" + "golang.org/x/tools/internal/mcp/internal/protocol" +) + +// A Client is an MCP client, which may be connected to one or more MCP servers +// using the [Client.Connect] method. +// +// TODO(rfindley): revisit the many-to-one relationship of clients and servers. +// It is a bit odd. +type Client struct { + name string + version string + + mu sync.Mutex + servers []*ServerConnection +} + +// NewClient creates a new Client. +// +// Use [Client.Connect] to connect it to an MCP server. +// +// If non-nil, the provided options configure the Client. +func NewClient(name, version string, opts *ClientOptions) *Client { + return &Client{ + name: name, + version: version, + } +} + +// Servers returns an iterator that yields the current set of server +// connections. +func (c *Client) Servers() iter.Seq[*ServerConnection] { + c.mu.Lock() + clients := slices.Clone(c.servers) + c.mu.Unlock() + return slices.Values(clients) +} + +// ClientOptions configures the behavior of the client, and apply to every +// client-server connection created using [Client.Connect]. +type ClientOptions struct{} + +// bind implements the binder[*ServerConnection] interface, so that Clients can +// be connected using [connect]. +func (c *Client) bind(conn *jsonrpc2.Connection) *ServerConnection { + sc := &ServerConnection{ + conn: conn, + client: c, + } + c.mu.Lock() + c.servers = append(c.servers, sc) + c.mu.Unlock() + return sc +} + +// disconnect implements the binder[*ServerConnection] interface, so that +// Clients can be connected using [connect]. +func (c *Client) disconnect(sc *ServerConnection) { + c.mu.Lock() + defer c.mu.Unlock() + c.servers = slices.DeleteFunc(c.servers, func(sc2 *ServerConnection) bool { + return sc2 == sc + }) +} + +// Connect connects the MCP client over the given transport and initializes an +// MCP session. +// +// It returns a connection object that may be used to query the MCP server, +// terminate the connection (with [Connection.Close]), or await server +// termination (with [Connection.Wait]). +// +// Typically, it is the responsibility of the client to close the connection +// when it is no longer needed. However, if the connection is closed by the +// server, calls or notifications will return an error wrapping +// [ErrConnectionClosed]. +func (c *Client) Connect(ctx context.Context, t *Transport, opts *ConnectionOptions) (sc *ServerConnection, err error) { + defer func() { + if sc != nil && err != nil { + _ = sc.Close() + } + }() + sc, err = connect(ctx, t, opts, c) + if err != nil { + return nil, err + } + params := &protocol.InitializeParams{ + ClientInfo: protocol.Implementation{Name: c.name, Version: c.version}, + } + if err := call(ctx, sc.conn, "initialize", params, &sc.initializeResult); err != nil { + return nil, err + } + if err := sc.conn.Notify(ctx, "initialized", &protocol.InitializedParams{}); err != nil { + return nil, err + } + return sc, nil +} + +// A ServerConnection is a connection with an MCP server. +// +// It handles messages from the client, and can be used to send messages to the +// client. Create a connection by calling [Server.Connect]. +type ServerConnection struct { + conn *jsonrpc2.Connection + client *Client + initializeResult *protocol.InitializeResult +} + +// Close performs a graceful close of the connection, preventing new requests +// from being handled, and waiting for ongoing requests to return. Close then +// terminates the connection. +func (cc *ServerConnection) Close() error { + return cc.conn.Close() +} + +// Wait waits for the connection to be closed by the server. +// Generally, clients should be responsible for closing the connection. +func (cc *ServerConnection) Wait() error { + return cc.conn.Wait() +} + +func (sc *ServerConnection) handle(ctx context.Context, req *jsonrpc2.Request) (any, error) { + switch req.Method { + } + return nil, jsonrpc2.ErrNotHandled +} + +// ListTools lists tools that are currently available on the server. +func (sc *ServerConnection) ListTools(ctx context.Context) ([]protocol.Tool, error) { + var ( + params = &protocol.ListToolsParams{} + result protocol.ListToolsResult + ) + if err := call(ctx, sc.conn, "tools/list", params, &result); err != nil { + return nil, err + } + return result.Tools, nil +} + +// CallTool calls the tool with the given name and arguments. +// +// TODO: make the following true: +// If the provided arguments do not conform to the schema for the given tool, +// the call fails. +func (sc *ServerConnection) CallTool(ctx context.Context, name string, args any) (_ []Content, err error) { + defer func() { + if err != nil { + err = fmt.Errorf("calling tool %q: %w", name, err) + } + }() + argJSON, err := json.Marshal(args) + if err != nil { + return nil, fmt.Errorf("marshaling args: %v", err) + } + var ( + params = &protocol.CallToolParams{ + Name: name, + Arguments: argJSON, + } + result protocol.CallToolResult + ) + if err := call(ctx, sc.conn, "tools/call", params, &result); err != nil { + return nil, err + } + content, err := unmarshalContent(result.Content) + if err != nil { + return nil, fmt.Errorf("unmarshaling tool content: %v", err) + } + if result.IsError { + if len(content) != 1 || !is[TextContent](content[0]) { + return nil, errors.New("malformed error content") + } + return nil, errors.New(content[0].(TextContent).Text) + } + return content, nil +} diff --git a/internal/mcp/content.go b/internal/mcp/content.go new file mode 100644 index 00000000000..5f13e4834c3 --- /dev/null +++ b/internal/mcp/content.go @@ -0,0 +1,64 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mcp + +import ( + "encoding/json" + "fmt" + + "golang.org/x/tools/internal/mcp/internal/protocol" +) + +// Content is the abstract result of a Tool call. +// +// TODO: support all content types. +type Content interface { + toProtocol() any +} + +func marshalContent(content []Content) []json.RawMessage { + var msgs []json.RawMessage + for _, c := range content { + msg, err := json.Marshal(c.toProtocol()) + if err != nil { + panic(fmt.Sprintf("marshaling content: %v", err)) + } + msgs = append(msgs, msg) + } + return msgs +} + +func unmarshalContent(msgs []json.RawMessage) ([]Content, error) { + var content []Content + for _, msg := range msgs { + var allContent struct { + Type string `json:"type"` + Text json.RawMessage + } + if err := json.Unmarshal(msg, &allContent); err != nil { + return nil, fmt.Errorf("content missing \"type\"") + } + switch allContent.Type { + case "text": + var text string + if err := json.Unmarshal(allContent.Text, &text); err != nil { + return nil, fmt.Errorf("unmarshalling text content: %v", err) + } + content = append(content, TextContent{Text: text}) + default: + return nil, fmt.Errorf("unsupported content type %q", allContent.Type) + } + } + return content, nil +} + +// TextContent is a textual content. +type TextContent struct { + Text string +} + +func (c TextContent) toProtocol() any { + return protocol.TextContent{Type: "text", Text: c.Text} +} diff --git a/internal/mcp/examples/hello/main.go b/internal/mcp/examples/hello/main.go new file mode 100644 index 00000000000..36bb40a2bac --- /dev/null +++ b/internal/mcp/examples/hello/main.go @@ -0,0 +1,38 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package main + +import ( + "context" + "fmt" + "os" + + "golang.org/x/tools/internal/mcp" +) + +type Optional[T any] struct { + present bool + value T +} + +type SayHiParams struct { + Name string `json:"name" mcp:"the name to say hi to"` +} + +func SayHi(ctx context.Context, params *SayHiParams) ([]mcp.Content, error) { + return []mcp.Content{ + mcp.TextContent{Text: "Hi " + params.Name}, + }, nil +} + +func main() { + server := mcp.NewServer("greeter", "v0.0.1", nil) + server.AddTools(mcp.MakeTool("greet", "say hi", SayHi)) + + opts := &mcp.ConnectionOptions{Logger: os.Stderr} + if err := server.Run(context.Background(), mcp.NewStdIOTransport(), opts); err != nil { + fmt.Fprintf(os.Stderr, "Server failed: %v", err) + } +} diff --git a/internal/mcp/internal/jsonschema/schema.go b/internal/mcp/internal/jsonschema/schema.go new file mode 100644 index 00000000000..5ed9cbcdcf6 --- /dev/null +++ b/internal/mcp/internal/jsonschema/schema.go @@ -0,0 +1,130 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package jsonschema + +import ( + "fmt" + "reflect" + "strings" +) + +// A Schema is a JSON schema object. +// +// Right now, Schemas are only used for JSON serialization. In the future, they +// should support validation. +type Schema struct { + Definitions map[string]*Schema `json:"definitions"` + Type any `json:"type,omitempty"` + Ref string `json:"$ref,omitempty"` + Description string `json:"description,omitempty"` + Properties map[string]*Schema `json:"properties,omitempty"` + Required []string `json:"required,omitempty"` + Items *Schema `json:"items,omitempty"` + AdditionalProperties any `json:"additionalProperties,omitempty"` +} + +// ForType constructs a JSON schema object for the given type argument. +// +// The type T must not contain (possibly recursively) any of the following Go +// types, as they are incompatible with the JSON schema spec. +// - maps with key other than 'string' +// - function types +// - complex numbers +// - unsafe pointers +// +// TODO(rfindley): we could perhaps just skip these incompatible fields. +func ForType[T any]() (*Schema, error) { + return typeSchema(reflect.TypeFor[T](), make(map[reflect.Type]*Schema)) +} + +func typeSchema(t reflect.Type, seen map[reflect.Type]*Schema) (*Schema, error) { + if s := seen[t]; s != nil { + return s, nil + } + var ( + s = new(Schema) + err error + ) + seen[t] = s + + switch t.Kind() { + case reflect.Bool: + s.Type = "boolean" + + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, + reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + s.Type = "integer" + + case reflect.Float32, reflect.Float64: + s.Type = "number" + + case reflect.Interface: + // Unrestricted + + case reflect.Map: + if t.Key().Kind() != reflect.String { + return nil, fmt.Errorf("unsupported map key type %v", t.Key().Kind()) + } + s.Type = "object" + valueSchema, err := typeSchema(t.Elem(), seen) + if err != nil { + return nil, fmt.Errorf("computing map value schema: %v", err) + } + s.AdditionalProperties = valueSchema + + case reflect.Pointer: + s2, err := typeSchema(t.Elem(), seen) + if err != nil { + return nil, err + } + *s = *s2 + + case reflect.Slice, reflect.Array: + s.Type = "array" + itemSchema, err := typeSchema(t.Elem(), seen) + if err != nil { + return nil, fmt.Errorf("computing element schema: %v", err) + } + s.Items = itemSchema + + case reflect.String: + s.Type = "string" + + case reflect.Struct: + s.Type = "object" + s.AdditionalProperties = false + + for i := range t.NumField() { + if s.Properties == nil { + s.Properties = make(map[string]*Schema) + } + rfld := t.Field(i) + name, ok := jsonName(rfld) + if !ok { + continue + } + s.Properties[name], err = typeSchema(rfld.Type, seen) + if err != nil { + return nil, err + } + } + + default: + return nil, fmt.Errorf("type %v is unsupported by jsonschema", t.Kind()) + } + return s, nil +} + +func jsonName(f reflect.StructField) (string, bool) { + j, ok := f.Tag.Lookup("json") + if !ok { + return f.Name, f.IsExported() + } + name, _, _ := strings.Cut(j, ",") + if name == "" { + return f.Name, f.IsExported() + } + return name, name != "" && name != "-" +} diff --git a/internal/mcp/internal/jsonschema/schema_test.go b/internal/mcp/internal/jsonschema/schema_test.go new file mode 100644 index 00000000000..443ef54efd1 --- /dev/null +++ b/internal/mcp/internal/jsonschema/schema_test.go @@ -0,0 +1,65 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package jsonschema_test + +import ( + "testing" + + "github.com/google/go-cmp/cmp" + "golang.org/x/tools/internal/mcp/internal/jsonschema" +) + +func forType[T any]() *jsonschema.Schema { + s, err := jsonschema.ForType[T]() + if err != nil { + panic(err) + } + return s +} + +func TestForType(t *testing.T) { + type schema = jsonschema.Schema + tests := []struct { + name string + got *jsonschema.Schema + want *jsonschema.Schema + }{ + {"string", forType[string](), &schema{Type: "string"}}, + {"int", forType[int](), &schema{Type: "integer"}}, + {"int16", forType[int16](), &schema{Type: "integer"}}, + {"uint32", forType[int16](), &schema{Type: "integer"}}, + {"float64", forType[float64](), &schema{Type: "number"}}, + {"bool", forType[bool](), &schema{Type: "boolean"}}, + {"intmap", forType[map[string]int](), &schema{ + Type: "object", + AdditionalProperties: &schema{Type: "integer"}, + }}, + {"anymap", forType[map[string]any](), &schema{ + Type: "object", + AdditionalProperties: &schema{}, + }}, + {"struct", forType[struct { + F int `json:"f"` + G []float64 + Skip string `json:"-"` + unexported float64 + }](), &schema{ + Type: "object", + Properties: map[string]*schema{ + "f": {Type: "integer"}, + "G": {Type: "array", Items: &schema{Type: "number"}}, + }, + AdditionalProperties: false, + }}, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + if diff := cmp.Diff(test.want, test.got); diff != "" { + t.Errorf("ForType mismatch (-want +got):\n%s", diff) + } + }) + } +} diff --git a/internal/mcp/internal/protocol/doc.go b/internal/mcp/internal/protocol/doc.go new file mode 100644 index 00000000000..ec86936a35d --- /dev/null +++ b/internal/mcp/internal/protocol/doc.go @@ -0,0 +1,13 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:generate go run generate.go + +// The protocol package contains types that define the MCP protocol. +// +// It is auto-generated from the MCP spec. Run go generate to update it. +// The generated set of types is intended to be minimal, in the sense that we +// only generate types that are actually used by the SDK. See generate.go for +// instructions on how to generate more (or different) types. +package protocol diff --git a/internal/mcp/internal/protocol/generate.go b/internal/mcp/internal/protocol/generate.go new file mode 100644 index 00000000000..e4f430b750b --- /dev/null +++ b/internal/mcp/internal/protocol/generate.go @@ -0,0 +1,400 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build ignore + +// This script generates protocol definitions in protocol.go from the MCP spec. +// +// Only the set of declarations configured by the [declarations] value are +// generated. + +package main + +import ( + "bytes" + "cmp" + "encoding/json" + "flag" + "fmt" + "go/format" + "io" + "iter" + "log" + "net/http" + "os" + "slices" + "strings" + + "golang.org/x/tools/internal/mcp/internal/jsonschema" +) + +var schemaFile = flag.String("schema_file", "", "if set, use this file as the persistent schema file") + +// A typeConfig defines a rewrite to perform to a (possibly nested) struct +// field. In some cases, we may want to use an external type for the nested +// struct field. In others, we may want to extract the type definition to a +// name. +type typeConfig struct { + Name string // declaration name for the type + Substitute string // type definition to substitute + Fields config // individual field configuration, or nil +} + +type config map[string]*typeConfig + +// declarations configures the set of declarations to write. +// +// Top level declarations are only created if they are configured with a +// non-empty Name. Otherwise, they are discarded, though their fields may be +// extracted to types if they have a nested field configuration. +var declarations = config{ + "Annotations": {Name: "Annotations"}, + "CallToolRequest": { + Fields: config{"Params": {Name: "CallToolParams"}}, + }, + "CallToolResult": { + Name: "CallToolResult", + }, + "ClientCapabilities": {Name: "ClientCapabilities"}, + "Implementation": {Name: "Implementation"}, + "InitializeRequest": { + Fields: config{"Params": {Name: "InitializeParams"}}, + }, + "InitializeResult": { + Name: "InitializeResult", + }, + "InitializedNotification": { + Fields: config{"Params": {Name: "InitializedParams"}}, + }, + "ListToolsRequest": { + Fields: config{"Params": {Name: "ListToolsParams"}}, + }, + "ListToolsResult": { + Name: "ListToolsResult", + }, + "Role": {Name: "Role"}, + "ServerCapabilities": { + Name: "ServerCapabilities", + Fields: config{ + "Prompts": {Name: "PromptCapabilities"}, + "Resources": {Name: "ResourceCapabilities"}, + "Tools": {Name: "ToolCapabilities"}, + }, + }, + "TextContent": {Name: "TextContent"}, + "Tool": { + Name: "Tool", + Fields: config{"InputSchema": {Substitute: "*jsonschema.Schema"}}, + }, + "ToolAnnotations": { + Name: "ToolAnnotations", + }, +} + +func main() { + flag.Parse() + + // Load and unmarshal the schema. + data, err := loadSchema(*schemaFile) + if err != nil { + log.Fatal(err) + } + schema := new(jsonschema.Schema) + if err := json.Unmarshal(data, &schema); err != nil { + log.Fatal(err) + } + + // Collect named types. Since we may create new type definitions while + // writing types, we collect definitions and concatenate them later. This + // also allows us to sort. + named := make(map[string]*bytes.Buffer) + for name, def := range sorted(schema.Definitions) { + config := declarations[name] + if config == nil { + continue + } + if err := writeDecl(*config, def, named); err != nil { + log.Fatal(err) + } + } + + buf := new(bytes.Buffer) + fmt.Fprintf(buf, ` +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate.go. DO NOT EDIT. + +package protocol + +import ( + "encoding/json" + + "golang.org/x/tools/internal/mcp/internal/jsonschema" +) +`) + + // Write out types. + for _, b := range sorted(named) { + fmt.Fprintln(buf) + fmt.Fprint(buf, b.String()) + } + + formatted, err := format.Source(buf.Bytes()) + if err != nil { + log.Println(buf.String()) + log.Fatalf("failed to format: %v", err) + } + if err := os.WriteFile("protocol.go", formatted, 0666); err != nil { + log.Fatalf("failed to write protocol.go: %v", err) + } +} + +func loadSchema(schemaFile string) (data []byte, err error) { + const schemaURL = "https://raw.githubusercontent.com/modelcontextprotocol/modelcontextprotocol/refs/heads/main/schema/2025-03-26/schema.json" + + if schemaFile != "" { + data, err = os.ReadFile(schemaFile) + if os.IsNotExist(err) { + data = nil + } else if err != nil { + return nil, fmt.Errorf("reading schema file %q: %v", schemaFile, err) + } + } + if data == nil { + resp, err := http.Get(schemaURL) + if err != nil { + return nil, fmt.Errorf("downloading schema: %v", err) + } + defer resp.Body.Close() + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("downloading schema: %v", resp.Status) + } + data, err = io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("reading schema body: %v", err) + } + if schemaFile != "" { + if err := os.WriteFile(schemaFile, data, 0666); err != nil { + return nil, fmt.Errorf("persisting schema: %v", err) + } + } + } + return data, nil +} + +func writeDecl(config typeConfig, def *jsonschema.Schema, named map[string]*bytes.Buffer) error { + var w io.Writer = io.Discard + if name := config.Name; name != "" { + if _, ok := named[name]; ok { + return nil + } + buf := new(bytes.Buffer) + w = buf + named[name] = buf + if def.Description != "" { + fmt.Fprintf(buf, "%s\n", toComment(def.Description)) + } + fmt.Fprintf(buf, "type %s ", name) + } + if err := writeType(w, &config, def, named); err != nil { + return err // Better error here? + } + fmt.Fprintf(w, "\n") + return nil +} + +// writeType writes the type definition to the given writer. +// +// If path is non-empty, it is the path to the field using this type, for the +// purpose of detecting field rewrites (see [fieldRewrite]). +// +// named is the in-progress collection of type definitions. New named types may +// be added during writeType, if they are extracted from inner fields. +func writeType(w io.Writer, config *typeConfig, def *jsonschema.Schema, named map[string]*bytes.Buffer) error { + // Use type names for Named types. + if name := strings.TrimPrefix(def.Ref, "#/definitions/"); name != "" { + // TODO: this check is not quite right: we should really panic if the + // definition is missing, *but only if w is not io.Discard*. That's not a + // great API: see if we can do something more explicit than io.Discard. + if cfg, ok := declarations[name]; ok { + if cfg.Name == "" && cfg.Substitute == "" { + panic(fmt.Sprintf("referenced type %q cannot be referred to (no name or substitution)", name)) + } + if cfg.Substitute != "" { + name = cfg.Substitute + } else { + name = cfg.Name + } + } + w.Write([]byte(name)) + return nil + } + + // For types that explicitly allow additional properties, we can either + // unmarshal them into a map[string]any, or delay unmarshalling with + // json.RawMessage. For now, use json.RawMessage as it defers the choice. + if def.Type == "object" && def.AdditionalProperties != nil && def.AdditionalProperties != false { + w.Write([]byte("json.RawMessage")) + return nil + } + + switch typ := def.Type.(type) { + case string: + switch typ { + case "array": + fmt.Fprintf(w, "[]") + return writeType(w, nil, def.Items, named) + + case "boolean": + fmt.Fprintf(w, "bool") + + case "integer": + fmt.Fprintf(w, "int64") + + // not handled: "null" + + case "number": + // We could use json.Number here; use float64 for simplicity. + fmt.Fprintf(w, "float64") + + case "object": + fmt.Fprintf(w, "struct {\n") + for name, fieldDef := range sorted(def.Properties) { + if fieldDef.Description != "" { + fmt.Fprintf(w, "%s\n", toComment(fieldDef.Description)) + } + export := exportName(name) + fmt.Fprintf(w, "\t%s ", export) + + required := slices.Contains(def.Required, name) + + // If the field is not required, and is a struct type, indirect with a + // pointer so that it can be empty as defined by encoding/json. + // + // TODO: use omitzero when available. + needPointer := !required && + (strings.HasPrefix(fieldDef.Ref, "#/definitions/") || + fieldDef.Type == "object" && + (fieldDef.AdditionalProperties == nil || fieldDef.AdditionalProperties == false)) + + if config != nil && config.Fields[export] != nil { + r := config.Fields[export] + if r.Substitute != "" { + fmt.Fprintf(w, r.Substitute) + } else { + assert(r.Name != "", "missing ExtractTo") + if err := writeDecl(*r, fieldDef, named); err != nil { + return err + } + if needPointer { + fmt.Fprintf(w, "*") + } + fmt.Fprintf(w, r.Name) + } + } else { + if needPointer { + fmt.Fprintf(w, "*") + } + if err := writeType(w, nil, fieldDef, named); err != nil { + return fmt.Errorf("failed to write type for field %s: %v", export, err) + } + } + fmt.Fprintf(w, " `json:\"%s", name) + if !required { + fmt.Fprint(w, ",omitempty") + } + fmt.Fprint(w, "\"`\n") + } + fmt.Fprintf(w, "}") + + case "string": + fmt.Fprintf(w, "string") + + default: + fmt.Fprintf(w, "any") + } + + default: + // E.g. union types. + fmt.Fprintf(w, "json.RawMessage") + } + return nil +} + +// toComment converts a JSON schema description to a Go comment. +func toComment(description string) string { + var ( + buf strings.Builder + lineBuf strings.Builder + ) + const wrapAt = 80 + for line := range strings.SplitSeq(description, "\n") { + // Start a new paragraph, if the current is nonempty. + if len(line) == 0 && lineBuf.Len() > 0 { + buf.WriteString(lineBuf.String()) + lineBuf.Reset() + buf.WriteString("\n//\n") + continue + } + // Otherwise, fill in the current paragraph. + for field := range strings.FieldsSeq(line) { + if lineBuf.Len() > 0 && lineBuf.Len()+len(" ")+len(field) > wrapAt { + buf.WriteString(lineBuf.String()) + buf.WriteRune('\n') + lineBuf.Reset() + } + if lineBuf.Len() == 0 { + lineBuf.WriteString("//") + } + lineBuf.WriteString(" ") + lineBuf.WriteString(field) + } + } + if lineBuf.Len() > 0 { + buf.WriteString(lineBuf.String()) + } + return strings.TrimRight(buf.String(), "\n") +} + +// exportName returns an exported name for a Go symbol, based on the given name +// in the JSON schema, removing leading underscores and capitalizing. +func exportName(s string) string { + if strings.HasPrefix(s, "_") { + s = s[1:] + } + return strings.ToUpper(s[:1]) + s[1:] +} + +func assert(cond bool, msg string) { + if !cond { + panic(msg) + } +} + +// Helpers below are copied from gopls' moremaps package. + +// sorted returns an iterator over the entries of m in key order. +func sorted[M ~map[K]V, K cmp.Ordered, V any](m M) iter.Seq2[K, V] { + // TODO(adonovan): use maps.Sorted if proposal #68598 is accepted. + return func(yield func(K, V) bool) { + keys := keySlice(m) + slices.Sort(keys) + for _, k := range keys { + if !yield(k, m[k]) { + break + } + } + } +} + +// keySlice returns the keys of the map M, like slices.Collect(maps.Keys(m)). +func keySlice[M ~map[K]V, K comparable, V any](m M) []K { + r := make([]K, 0, len(m)) + for k := range m { + r = append(r, k) + } + return r +} diff --git a/internal/mcp/internal/protocol/protocol.go b/internal/mcp/internal/protocol/protocol.go new file mode 100644 index 00000000000..080c6d194da --- /dev/null +++ b/internal/mcp/internal/protocol/protocol.go @@ -0,0 +1,225 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate.go. DO NOT EDIT. + +package protocol + +import ( + "encoding/json" + + "golang.org/x/tools/internal/mcp/internal/jsonschema" +) + +// Optional annotations for the client. The client can use annotations to inform +// how objects are used or displayed +type Annotations struct { + // Describes who the intended customer of this object or data is. + // + // It can include multiple entries to indicate content useful for multiple + // audiences (e.g., `["user", "assistant"]`). + Audience []Role `json:"audience,omitempty"` + // Describes how important this data is for operating the server. + // + // A value of 1 means "most important," and indicates that the data is + // effectively required, while 0 means "least important," and indicates that the + // data is entirely optional. + Priority float64 `json:"priority,omitempty"` +} + +type CallToolParams struct { + Arguments json.RawMessage `json:"arguments,omitempty"` + Name string `json:"name"` +} + +// The server's response to a tool call. +// +// Any errors that originate from the tool SHOULD be reported inside the result +// object, with `isError` set to true, _not_ as an MCP protocol-level error +// response. Otherwise, the LLM would not be able to see that an error occurred +// and self-correct. +// +// However, any errors in _finding_ the tool, an error indicating that the +// server does not support tool calls, or any other exceptional conditions, +// should be reported as an MCP error response. +type CallToolResult struct { + // This result property is reserved by the protocol to allow clients and servers + // to attach additional metadata to their responses. + Meta json.RawMessage `json:"_meta,omitempty"` + Content []json.RawMessage `json:"content"` + // Whether the tool call ended in an error. + // + // If not set, this is assumed to be false (the call was successful). + IsError bool `json:"isError,omitempty"` +} + +// Capabilities a client may support. Known capabilities are defined here, in +// this schema, but this is not a closed set: any client can define its own, +// additional capabilities. +type ClientCapabilities struct { + // Experimental, non-standard capabilities that the client supports. + Experimental json.RawMessage `json:"experimental,omitempty"` + // Present if the client supports listing roots. + Roots *struct { + // Whether the client supports notifications for changes to the roots list. + ListChanged bool `json:"listChanged,omitempty"` + } `json:"roots,omitempty"` + // Present if the client supports sampling from an LLM. + Sampling json.RawMessage `json:"sampling,omitempty"` +} + +// Describes the name and version of an MCP implementation. +type Implementation struct { + Name string `json:"name"` + Version string `json:"version"` +} + +type InitializeParams struct { + Capabilities ClientCapabilities `json:"capabilities"` + ClientInfo Implementation `json:"clientInfo"` + // The latest version of the Model Context Protocol that the client supports. + // The client MAY decide to support older versions as well. + ProtocolVersion string `json:"protocolVersion"` +} + +// After receiving an initialize request from the client, the server sends this +// response. +type InitializeResult struct { + // This result property is reserved by the protocol to allow clients and servers + // to attach additional metadata to their responses. + Meta json.RawMessage `json:"_meta,omitempty"` + Capabilities ServerCapabilities `json:"capabilities"` + // Instructions describing how to use the server and its features. + // + // This can be used by clients to improve the LLM's understanding of available + // tools, resources, etc. It can be thought of like a "hint" to the model. For + // example, this information MAY be added to the system prompt. + Instructions string `json:"instructions,omitempty"` + // The version of the Model Context Protocol that the server wants to use. This + // may not match the version that the client requested. If the client cannot + // support this version, it MUST disconnect. + ProtocolVersion string `json:"protocolVersion"` + ServerInfo Implementation `json:"serverInfo"` +} + +type InitializedParams json.RawMessage + +type ListToolsParams struct { + // An opaque token representing the current pagination position. If provided, + // the server should return results starting after this cursor. + Cursor string `json:"cursor,omitempty"` +} + +// The server's response to a tools/list request from the client. +type ListToolsResult struct { + // This result property is reserved by the protocol to allow clients and servers + // to attach additional metadata to their responses. + Meta json.RawMessage `json:"_meta,omitempty"` + // An opaque token representing the pagination position after the last returned + // result. If present, there may be more results available. + NextCursor string `json:"nextCursor,omitempty"` + Tools []Tool `json:"tools"` +} + +// Present if the server offers any prompt templates. +type PromptCapabilities struct { + // Whether this server supports notifications for changes to the prompt list. + ListChanged bool `json:"listChanged,omitempty"` +} + +// Present if the server offers any resources to read. +type ResourceCapabilities struct { + // Whether this server supports notifications for changes to the resource list. + ListChanged bool `json:"listChanged,omitempty"` + // Whether this server supports subscribing to resource updates. + Subscribe bool `json:"subscribe,omitempty"` +} + +// The sender or recipient of messages and data in a conversation. +type Role string + +// Capabilities that a server may support. Known capabilities are defined here, +// in this schema, but this is not a closed set: any server can define its own, +// additional capabilities. +type ServerCapabilities struct { + // Present if the server supports argument autocompletion suggestions. + Completions json.RawMessage `json:"completions,omitempty"` + // Experimental, non-standard capabilities that the server supports. + Experimental json.RawMessage `json:"experimental,omitempty"` + // Present if the server supports sending log messages to the client. + Logging json.RawMessage `json:"logging,omitempty"` + // Present if the server offers any prompt templates. + Prompts *PromptCapabilities `json:"prompts,omitempty"` + // Present if the server offers any resources to read. + Resources *ResourceCapabilities `json:"resources,omitempty"` + // Present if the server offers any tools to call. + Tools *ToolCapabilities `json:"tools,omitempty"` +} + +// Text provided to or from an LLM. +type TextContent struct { + // Optional annotations for the client. + Annotations *Annotations `json:"annotations,omitempty"` + // The text content of the message. + Text string `json:"text"` + Type string `json:"type"` +} + +// Definition for a tool the client can call. +type Tool struct { + // Optional additional tool information. + Annotations *ToolAnnotations `json:"annotations,omitempty"` + // A human-readable description of the tool. + // + // This can be used by clients to improve the LLM's understanding of available + // tools. It can be thought of like a "hint" to the model. + Description string `json:"description,omitempty"` + // A JSON Schema object defining the expected parameters for the tool. + InputSchema *jsonschema.Schema `json:"inputSchema"` + // The name of the tool. + Name string `json:"name"` +} + +// Additional properties describing a Tool to clients. +// +// NOTE: all properties in ToolAnnotations are **hints**. They are not +// guaranteed to provide a faithful description of tool behavior (including +// descriptive properties like `title`). +// +// Clients should never make tool use decisions based on ToolAnnotations +// received from untrusted servers. +type ToolAnnotations struct { + // If true, the tool may perform destructive updates to its environment. If + // false, the tool performs only additive updates. + // + // (This property is meaningful only when `readOnlyHint == false`) + // + // Default: true + DestructiveHint bool `json:"destructiveHint,omitempty"` + // If true, calling the tool repeatedly with the same arguments will have no + // additional effect on the its environment. + // + // (This property is meaningful only when `readOnlyHint == false`) + // + // Default: false + IdempotentHint bool `json:"idempotentHint,omitempty"` + // If true, this tool may interact with an "open world" of external entities. If + // false, the tool's domain of interaction is closed. For example, the world of + // a web search tool is open, whereas that of a memory tool is not. + // + // Default: true + OpenWorldHint bool `json:"openWorldHint,omitempty"` + // If true, the tool does not modify its environment. + // + // Default: false + ReadOnlyHint bool `json:"readOnlyHint,omitempty"` + // A human-readable title for the tool. + Title string `json:"title,omitempty"` +} + +// Present if the server offers any tools to call. +type ToolCapabilities struct { + // Whether this server supports notifications for changes to the tool list. + ListChanged bool `json:"listChanged,omitempty"` +} diff --git a/internal/mcp/mcp.go b/internal/mcp/mcp.go new file mode 100644 index 00000000000..76f27bdceb3 --- /dev/null +++ b/internal/mcp/mcp.go @@ -0,0 +1,23 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// The mcp package provides an SDK for writing model context protocol clients +// and servers. It is a work-in-progress. As of writing, it is a prototype to +// explore the design space of client/server lifecycle and binding. +// +// To get started, create an MCP client or server with [NewClient] or +// [NewServer], then add features to your client or server using Add +// methods, then connect to a peer using a [Transport] instance and a call to +// [Client.Connect] or [Server.Connect]. +// +// TODO: +// - Support cancellation. +// - Support pagination. +// - Support all client/server operations. +// - Support Streamable HTTP transport. +// - Support multiple versions of the spec. +// - Implement proper JSON schema support, with both client-side and +// server-side validation.. +// - Support batched JSON messages. +package mcp diff --git a/internal/mcp/mcp_test.go b/internal/mcp/mcp_test.go new file mode 100644 index 00000000000..a66b25de0a6 --- /dev/null +++ b/internal/mcp/mcp_test.go @@ -0,0 +1,161 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mcp_test + +import ( + "context" + "errors" + "slices" + "strings" + "sync" + "testing" + + "github.com/google/go-cmp/cmp" + "golang.org/x/tools/internal/mcp" + "golang.org/x/tools/internal/mcp/internal/jsonschema" + "golang.org/x/tools/internal/mcp/internal/protocol" +) + +type hiParams struct { + Name string +} + +func sayHi(_ context.Context, v hiParams) ([]mcp.Content, error) { + return []mcp.Content{mcp.TextContent{Text: "hi " + v.Name}}, nil +} + +func TestEndToEnd(t *testing.T) { + ctx := context.Background() + ct, st := mcp.NewLocalTransport() + + s := mcp.NewServer("testServer", "v1.0.0", nil) + + // The 'greet' tool says hi. + s.AddTools(mcp.MakeTool("greet", "say hi", sayHi)) + + // The 'fail' tool returns this error. + failure := errors.New("mcp failure") + s.AddTools(mcp.MakeTool("fail", "just fail", func(context.Context, struct{}) ([]mcp.Content, error) { + return nil, failure + })) + + // Connect the server. + cc, err := s.Connect(ctx, st, nil) + if err != nil { + t.Fatal(err) + } + if got := slices.Collect(s.Clients()); len(got) != 1 { + t.Errorf("after connection, Clients() has length %d, want 1", len(got)) + } + + // Wait for the server to exit after the client closes its connection. + var clientWG sync.WaitGroup + clientWG.Add(1) + go func() { + if err := cc.Wait(); err != nil { + t.Errorf("server failed: %v", err) + } + clientWG.Done() + }() + + c := mcp.NewClient("testClient", "v1.0.0", nil) + + // Connect the client. + sc, err := c.Connect(ctx, ct, nil) + if err != nil { + t.Fatal(err) + } + if got := slices.Collect(c.Servers()); len(got) != 1 { + t.Errorf("after connection, Servers() has length %d, want 1", len(got)) + } + + gotTools, err := sc.ListTools(ctx) + if err != nil { + t.Errorf("tools/list failed: %v", err) + } + wantTools := []protocol.Tool{{ + Name: "greet", + Description: "say hi", + InputSchema: &jsonschema.Schema{ + Type: "object", + Properties: map[string]*jsonschema.Schema{ + "Name": {Type: "string"}, + }, + AdditionalProperties: false, + }, + }, { + Name: "fail", + Description: "just fail", + InputSchema: &jsonschema.Schema{ + Type: "object", + AdditionalProperties: false, + }, + }} + if diff := cmp.Diff(wantTools, gotTools); diff != "" { + t.Fatalf("tools/list mismatch (-want +got):\n%s", diff) + } + + gotHi, err := sc.CallTool(ctx, "greet", hiParams{"user"}) + if err != nil { + t.Fatal(err) + } + wantHi := []mcp.Content{mcp.TextContent{Text: "hi user"}} + if diff := cmp.Diff(wantHi, gotHi); diff != "" { + t.Errorf("tools/call 'greet' mismatch (-want +got):\n%s", diff) + } + + if _, err := sc.CallTool(ctx, "fail", struct{}{}); err == nil || !strings.Contains(err.Error(), failure.Error()) { + t.Errorf("fail returned unexpected error: got %v, want containing %v", err, failure) + } + + // Disconnect. + sc.Close() + clientWG.Wait() + + // After disconnecting, neither client nor server should have any + // connections. + for range s.Clients() { + t.Errorf("unexpected client after disconnection") + } + for range c.Servers() { + t.Errorf("unexpected server after disconnection") + } +} + +func TestServerClosing(t *testing.T) { + ctx := context.Background() + ct, st := mcp.NewLocalTransport() + + s := mcp.NewServer("testServer", "v1.0.0", nil) + + // The 'greet' tool says hi. + s.AddTools(mcp.MakeTool("greet", "say hi", sayHi)) + cc, err := s.Connect(ctx, st, nil) + if err != nil { + t.Fatal(err) + } + + c := mcp.NewClient("testClient", "v1.0.0", nil) + sc, err := c.Connect(ctx, ct, nil) + if err != nil { + t.Fatal(err) + } + var wg sync.WaitGroup + wg.Add(1) + go func() { + if err := sc.Wait(); err != nil { + t.Errorf("server connection failed: %v", err) + } + wg.Done() + }() + if _, err := sc.CallTool(ctx, "greet", hiParams{"user"}); err != nil { + t.Fatalf("after connecting: %v", err) + } + cc.Close() + wg.Wait() + if _, err = sc.CallTool(ctx, "greet", hiParams{"user"}); !errors.Is(err, mcp.ErrConnectionClosed) { + t.Errorf("after disconnection, got error %v, want EOF", err) + } +} diff --git a/internal/mcp/server.go b/internal/mcp/server.go new file mode 100644 index 00000000000..c5553729a11 --- /dev/null +++ b/internal/mcp/server.go @@ -0,0 +1,212 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mcp + +import ( + "context" + "encoding/json" + "fmt" + "iter" + "slices" + "sync" + + jsonrpc2 "golang.org/x/tools/internal/jsonrpc2_v2" + "golang.org/x/tools/internal/mcp/internal/protocol" +) + +// A Server is an instance of an MCP server. +// +// Servers expose server-side MCP features, which can serve one or more MCP +// sessions by using [Server.Start] or [Server.Run]. +type Server struct { + name string + version string + opts ServerOptions + + mu sync.Mutex + tools []*Tool + clients []*ClientConnection +} + +// ServerOptions is used to configure behavior of the server. +type ServerOptions struct { + Instructions string +} + +// NewServer creates a new MCP server. The resulting server has no features: +// add features using [Server.AddTools]. (TODO: support more features). +// +// The server can be connected to one or more MCP clients using [Server.Start] +// or [Server.Run]. +// +// If non-nil, the provided options is used to configure the server. +func NewServer(name, version string, opts *ServerOptions) *Server { + if opts == nil { + opts = new(ServerOptions) + } + return &Server{ + name: name, + version: version, + opts: *opts, + } +} + +// AddTools adds the given tools to the server. +// +// TODO(rfindley): notify connected clients of any changes. +func (c *Server) AddTools(tools ...*Tool) { + c.mu.Lock() + defer c.mu.Unlock() + c.tools = append(c.tools, tools...) +} + +// Clients returns an iterator that yields the current set of client +// connections. +func (s *Server) Clients() iter.Seq[*ClientConnection] { + s.mu.Lock() + clients := slices.Clone(s.clients) + s.mu.Unlock() + return slices.Values(clients) +} + +func (c *Server) listTools(_ context.Context, params *protocol.ListToolsParams) (*protocol.ListToolsResult, error) { + c.mu.Lock() + defer c.mu.Unlock() + + res := new(protocol.ListToolsResult) + for _, t := range c.tools { + res.Tools = append(res.Tools, t.Definition) + } + return res, nil +} + +func (c *Server) callTool(ctx context.Context, params *protocol.CallToolParams) (*protocol.CallToolResult, error) { + c.mu.Lock() + var tool *Tool + if i := slices.IndexFunc(c.tools, func(t *Tool) bool { + return t.Definition.Name == params.Name + }); i >= 0 { + tool = c.tools[i] + } + c.mu.Unlock() + + if tool == nil { + return nil, fmt.Errorf("%s: unknown tool %q", jsonrpc2.ErrInvalidParams, params.Name) + } + return tool.Handler(ctx, params.Arguments) +} + +// Run runs the server over the given transport. +// +// Run blocks until the client terminates the connection. +func (c *Server) Run(ctx context.Context, t *Transport, opts *ConnectionOptions) error { + conn, err := c.Connect(ctx, t, opts) + if err != nil { + return err + } + return conn.Wait() +} + +// bind implements the binder[*ClientConnection] interface, so that Servers can +// be connected using [connect]. +func (c *Server) bind(conn *jsonrpc2.Connection) *ClientConnection { + cc := &ClientConnection{conn: conn, server: c} + c.mu.Lock() + c.clients = append(c.clients, cc) + c.mu.Unlock() + return cc +} + +// disconnect implements the binder[*ClientConnection] interface, so that +// Servers can be connected using [connect]. +func (c *Server) disconnect(cc *ClientConnection) { + c.mu.Lock() + defer c.mu.Unlock() + c.clients = slices.DeleteFunc(c.clients, func(cc2 *ClientConnection) bool { + return cc2 == cc + }) +} + +// Connect connects the MCP server over the given transport and starts handling +// messages. +// +// It returns a connection object that may be used to terminate the connection +// (with [Connection.Close]), or await client termination (with +// [Connection.Wait]). +func (c *Server) Connect(ctx context.Context, t *Transport, opts *ConnectionOptions) (*ClientConnection, error) { + return connect(ctx, t, opts, c) +} + +// A ClientConnection is a connection with an MCP client. +// +// It handles messages from the client, and can be used to send messages to the +// client. Create a connection by calling [Server.Connect]. +type ClientConnection struct { + conn *jsonrpc2.Connection + server *Server + + mu sync.Mutex + initializeParams *protocol.InitializeParams // set once initialize has been received +} + +func (cc *ClientConnection) handle(ctx context.Context, req *jsonrpc2.Request) (any, error) { + switch req.Method { + case "initialize": + return dispatch(ctx, req, cc.initialize) + + // TODO: handle initialized + + case "tools/list": + return dispatch(ctx, req, cc.server.listTools) + + case "tools/call": + return dispatch(ctx, req, cc.server.callTool) + + case "notifications/initialized": + } + return nil, jsonrpc2.ErrNotHandled +} + +func (cc *ClientConnection) initialize(ctx context.Context, params *protocol.InitializeParams) (*protocol.InitializeResult, error) { + cc.mu.Lock() + cc.initializeParams = params + cc.mu.Unlock() + + return &protocol.InitializeResult{ + // TODO(rfindley): support multiple protocol versions. + ProtocolVersion: "2024-11-05", + Capabilities: protocol.ServerCapabilities{ + Tools: &protocol.ToolCapabilities{ + ListChanged: true, + }, + }, + Instructions: cc.server.opts.Instructions, + ServerInfo: protocol.Implementation{ + Name: cc.server.name, + Version: cc.server.version, + }, + }, nil +} + +// Close performs a graceful close of the connection, preventing new requests +// from being handled, and waiting for ongoing requests to return. Close then +// terminates the connection. +func (cc *ClientConnection) Close() error { + return cc.conn.Close() +} + +// Wait waits for the connection to be closed by the client. +func (cc *ClientConnection) Wait() error { + return cc.conn.Wait() +} + +func dispatch[TParams, TResult any](ctx context.Context, req *jsonrpc2.Request, f func(context.Context, TParams) (TResult, error)) (TResult, error) { + var params TParams + if err := json.Unmarshal(req.Params, ¶ms); err != nil { + var zero TResult + return zero, err + } + return f(ctx, params) +} diff --git a/internal/mcp/tool.go b/internal/mcp/tool.go new file mode 100644 index 00000000000..7f1de944526 --- /dev/null +++ b/internal/mcp/tool.go @@ -0,0 +1,70 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mcp + +import ( + "context" + "encoding/json" + + "golang.org/x/tools/internal/mcp/internal/jsonschema" + "golang.org/x/tools/internal/mcp/internal/protocol" +) + +// A ToolHandler handles a call to tools/call. +type ToolHandler func(context.Context, json.RawMessage) (*protocol.CallToolResult, error) + +// A Tool is a tool definition that is bound to a tool handler. +type Tool struct { + Definition protocol.Tool + Handler ToolHandler +} + +// MakeTool is a helper to make a tool using reflection on the given handler. +// +// The input schema for the tool is extracted from the request type for the +// handler, and used to unmmarshal and validate requests to the handler. +// +// It is the caller's responsibility that the handler request type can produce +// a valid schema, as documented by [jsonschema.ForType]; otherwise, MakeTool +// panics. +func MakeTool[TReq any](name, description string, handler func(context.Context, TReq) ([]Content, error)) *Tool { + schema, err := jsonschema.ForType[TReq]() + if err != nil { + panic(err) + } + wrapped := func(ctx context.Context, args json.RawMessage) (*protocol.CallToolResult, error) { + var v TReq + if err := unmarshalSchema(args, schema, &v); err != nil { + return nil, err + } + content, err := handler(ctx, v) + if err != nil { + return &protocol.CallToolResult{ + Content: marshalContent([]Content{TextContent{Text: err.Error()}}), + IsError: true, + }, nil + } + res := &protocol.CallToolResult{ + Content: marshalContent(content), + } + return res, nil + } + return &Tool{ + Definition: protocol.Tool{ + Name: name, + Description: description, + InputSchema: schema, + }, + Handler: wrapped, + } +} + +// unmarshalSchema unmarshals data into v and validates the result according to +// the given schema. +func unmarshalSchema(data json.RawMessage, _ *jsonschema.Schema, v any) error { + // TODO: use reflection to create the struct type to unmarshal into. + // Separate validation from assignment. + return json.Unmarshal(data, v) +} diff --git a/internal/mcp/transport.go b/internal/mcp/transport.go new file mode 100644 index 00000000000..9b61c94063d --- /dev/null +++ b/internal/mcp/transport.go @@ -0,0 +1,259 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mcp + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "io" + "log" + "net" + "os" + + jsonrpc2 "golang.org/x/tools/internal/jsonrpc2_v2" +) + +// A JSONRPC2 error is an error defined by the JSONRPC2 spec. +type JSONRPC2Error = jsonrpc2.WireError + +// ErrConnectionClosed is returned when sending a message to a connection that +// is closed or in the process of closing. +var ErrConnectionClosed = errors.New("connection closed") + +// A Transport is used to create a bidirectional connection between MCP client +// and server. +type Transport struct { + dialer jsonrpc2.Dialer +} + +// ConnectionOptions configures the behavior of an individual client<->server +// connection. +type ConnectionOptions struct { + Logger io.Writer +} + +// NewStdIOTransport constructs a transport that communicates over +// stdin/stdout. +func NewStdIOTransport() *Transport { + dialer := dialerFunc(func(ctx context.Context) (io.ReadWriteCloser, error) { + return rwc{os.Stdin, os.Stdout}, nil + }) + return &Transport{ + dialer: dialer, + } +} + +// NewLocalTransport returns two in-memory transports that connect to +// each other, for testing purposes. +func NewLocalTransport() (*Transport, *Transport) { + c1, c2 := net.Pipe() + t1 := &Transport{ + dialer: dialerFunc(func(ctx context.Context) (io.ReadWriteCloser, error) { + return c1, nil + }), + } + t2 := &Transport{ + dialer: dialerFunc(func(ctx context.Context) (io.ReadWriteCloser, error) { + return c2, nil + }), + } + return t1, t2 +} + +// handler is an unexported version of jsonrpc2.Handler, to be implemented by +// [ServerConnection] and [ClientConnection]. +type handler interface { + handle(ctx context.Context, req *jsonrpc2.Request) (result any, err error) + comparable +} + +type binder[T handler] interface { + bind(*jsonrpc2.Connection) T + disconnect(T) +} + +func connect[H handler](ctx context.Context, t *Transport, opts *ConnectionOptions, b binder[H]) (H, error) { + if opts == nil { + opts = new(ConnectionOptions) + } + + // Frame messages using newline delimited JSON. + // + // If logging is configured, write message logs. + var framer jsonrpc2.Framer = &ndjsonFramer{} + if opts.Logger != nil { + framer = &loggingFramer{opts.Logger, framer} + } + + var h H + + // Bind the server connection. + binder := jsonrpc2.BinderFunc(func(_ context.Context, conn *jsonrpc2.Connection) jsonrpc2.ConnectionOptions { + h = b.bind(conn) + return jsonrpc2.ConnectionOptions{ + Framer: framer, + Handler: jsonrpc2.HandlerFunc(h.handle), + OnInternalError: func(err error) { + log.Printf("Internal error: %v", err) + }, + } + }) + + // Clean up the connection when done. + onDone := func() { + b.disconnect(h) + } + + var zero H + _, err := jsonrpc2.Dial(ctx, t.dialer, binder, onDone) + if err != nil { + return zero, err + } + assert(h != zero, "unbound connection") + return h, nil +} + +// call executes and awaits a jsonrpc2 call on the given connection, +// translating errors into the mcp domain. +func call(ctx context.Context, conn *jsonrpc2.Connection, method string, params, result any) error { + err := conn.Call(ctx, method, params).Await(ctx, result) + switch { + case errors.Is(err, jsonrpc2.ErrClientClosing), errors.Is(err, jsonrpc2.ErrServerClosing): + return fmt.Errorf("calling %q: %w", method, ErrConnectionClosed) + case err != nil: + return fmt.Errorf("calling %q: %v", method, err) + } + return nil +} + +// The helpers below are used to bind transports to jsonrpc2. + +// A dialerFunc implements jsonrpc2.Dialer.Dial. +type dialerFunc func(context.Context) (io.ReadWriteCloser, error) + +func (f dialerFunc) Dial(ctx context.Context) (io.ReadWriteCloser, error) { + return f(ctx) +} + +// A readerFunc implements jsonrpc2.Reader.Read. +type readerFunc func(context.Context) (jsonrpc2.Message, int64, error) + +func (f readerFunc) Read(ctx context.Context) (jsonrpc2.Message, int64, error) { + return f(ctx) +} + +// A writerFunc implements jsonrpc2.Writer.Write. +type writerFunc func(context.Context, jsonrpc2.Message) (int64, error) + +func (f writerFunc) Write(ctx context.Context, msg jsonrpc2.Message) (int64, error) { + return f(ctx, msg) +} + +// A loggingFramer logs jsonrpc2 messages to its enclosed writer. +type loggingFramer struct { + w io.Writer + delegate jsonrpc2.Framer +} + +func (f *loggingFramer) Reader(rw io.Reader) jsonrpc2.Reader { + delegate := f.delegate.Reader(rw) + return readerFunc(func(ctx context.Context) (jsonrpc2.Message, int64, error) { + msg, n, err := delegate.Read(ctx) + if err != nil { + fmt.Fprintf(f.w, "read error: %v", err) + } else { + data, err := jsonrpc2.EncodeMessage(msg) + if err != nil { + fmt.Fprintf(f.w, "LoggingFramer: failed to marshal: %v", err) + } + fmt.Fprintf(f.w, "read: %s", string(data)) + } + return msg, n, err + }) +} + +func (f *loggingFramer) Writer(w io.Writer) jsonrpc2.Writer { + delegate := f.delegate.Writer(w) + return writerFunc(func(ctx context.Context, msg jsonrpc2.Message) (int64, error) { + n, err := delegate.Write(ctx, msg) + if err != nil { + fmt.Fprintf(f.w, "write error: %v", err) + } else { + data, err := jsonrpc2.EncodeMessage(msg) + if err != nil { + fmt.Fprintf(f.w, "LoggingFramer: failed to marshal: %v", err) + } + fmt.Fprintf(f.w, "write: %s", string(data)) + } + return n, err + }) +} + +// A rwc binds an io.ReadCloser and io.WriteCloser together to create an +// io.ReadWriteCloser. +type rwc struct { + rc io.ReadCloser + wc io.WriteCloser +} + +func (r rwc) Read(p []byte) (n int, err error) { + n, err = r.rc.Read(p) + return n, err +} + +func (r rwc) Write(p []byte) (n int, err error) { + n, err = r.wc.Write(p) + return n, err +} + +func (r rwc) Close() error { + return errors.Join(r.rc.Close(), r.wc.Close()) +} + +// A ndjsonFramer is a jsonrpc2.Framer that delimits messages with newlines. +// +// See also https://github.com/ndjson/ndjson-spec. +type ndjsonFramer struct{} +type rawReader struct{ in *json.Decoder } // relies on json.Decoder message boundaries +type ndjsonWriter struct{ out io.Writer } // writes newline message boundaries + +func (ndjsonFramer) Reader(rw io.Reader) jsonrpc2.Reader { + return &rawReader{in: json.NewDecoder(rw)} +} + +func (ndjsonFramer) Writer(rw io.Writer) jsonrpc2.Writer { + return &ndjsonWriter{out: rw} +} + +func (r *rawReader) Read(ctx context.Context) (jsonrpc2.Message, int64, error) { + select { + case <-ctx.Done(): + return nil, 0, ctx.Err() + default: + } + var raw json.RawMessage + if err := r.in.Decode(&raw); err != nil { + return nil, 0, err + } + msg, err := jsonrpc2.DecodeMessage(raw) + return msg, int64(len(raw)), err +} + +func (w *ndjsonWriter) Write(ctx context.Context, msg jsonrpc2.Message) (int64, error) { + select { + case <-ctx.Done(): + return 0, ctx.Err() + default: + } + data, err := jsonrpc2.EncodeMessage(msg) + if err != nil { + return 0, fmt.Errorf("marshaling message: %v", err) + } + data = append(data, '\n') // newline delimited + n, err := w.out.Write(data) + return int64(n), err +} diff --git a/internal/mcp/util.go b/internal/mcp/util.go new file mode 100644 index 00000000000..13f2056fb6c --- /dev/null +++ b/internal/mcp/util.go @@ -0,0 +1,16 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mcp + +func assert(cond bool, msg string) { + if !cond { + panic(msg) + } +} + +func is[T any](v any) bool { + _, ok := v.(T) + return ok +} From 357c3da4dc8194ec66440635675199255a5bce28 Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Tue, 22 Apr 2025 13:28:16 +0000 Subject: [PATCH 226/270] internal/mcp: add batching support Update the jsonrpc2 MCP framer to support both incoming and outgoing batches. In order to achieve this, we must correlate the framed Reader and Writer, which is not explicitly supported by the jsonrpc2 API, but does work. A note is left to revisit the Framer interface. Change-Id: If060cb5822da067833db20d58f5f112a0528da91 Reviewed-on: https://go-review.googlesource.com/c/tools/+/667295 Reviewed-by: Jonathan Amsterdam LUCI-TryBot-Result: Go LUCI --- internal/jsonrpc2_v2/frame.go | 9 ++ internal/mcp/mcp.go | 1 - internal/mcp/mcp_test.go | 32 ++++- internal/mcp/transport.go | 233 +++++++++++++++++++++++++++++++-- internal/mcp/transport_test.go | 61 +++++++++ 5 files changed, 324 insertions(+), 12 deletions(-) create mode 100644 internal/mcp/transport_test.go diff --git a/internal/jsonrpc2_v2/frame.go b/internal/jsonrpc2_v2/frame.go index f993b0741e1..62c2152b566 100644 --- a/internal/jsonrpc2_v2/frame.go +++ b/internal/jsonrpc2_v2/frame.go @@ -39,6 +39,15 @@ type Writer interface { // Framer wraps low level byte readers and writers into jsonrpc2 message // readers and writers. // It is responsible for the framing and encoding of messages into wire form. +// +// TODO(rfindley): rethink the framer interface, as with JSONRPC2 batching +// there is a need for Reader and Writer to be correlated, and while the +// implementation of framing here allows that, it is not made explicit by the +// interface. +// +// Perhaps a better interface would be +// +// Frame(io.ReadWriteCloser) (Reader, Writer). type Framer interface { // Reader wraps a byte reader into a message reader. Reader(io.Reader) Reader diff --git a/internal/mcp/mcp.go b/internal/mcp/mcp.go index 76f27bdceb3..1dc6413fbc4 100644 --- a/internal/mcp/mcp.go +++ b/internal/mcp/mcp.go @@ -19,5 +19,4 @@ // - Support multiple versions of the spec. // - Implement proper JSON schema support, with both client-side and // server-side validation.. -// - Support batched JSON messages. package mcp diff --git a/internal/mcp/mcp_test.go b/internal/mcp/mcp_test.go index a66b25de0a6..375818cae3c 100644 --- a/internal/mcp/mcp_test.go +++ b/internal/mcp/mcp_test.go @@ -142,6 +142,7 @@ func TestServerClosing(t *testing.T) { if err != nil { t.Fatal(err) } + var wg sync.WaitGroup wg.Add(1) go func() { @@ -155,7 +156,36 @@ func TestServerClosing(t *testing.T) { } cc.Close() wg.Wait() - if _, err = sc.CallTool(ctx, "greet", hiParams{"user"}); !errors.Is(err, mcp.ErrConnectionClosed) { + if _, err := sc.CallTool(ctx, "greet", hiParams{"user"}); !errors.Is(err, mcp.ErrConnectionClosed) { t.Errorf("after disconnection, got error %v, want EOF", err) } } + +func TestBatching(t *testing.T) { + ctx := context.Background() + ct, st := mcp.NewLocalTransport() + + s := mcp.NewServer("testServer", "v1.0.0", nil) + _, err := s.Connect(ctx, st, nil) + if err != nil { + t.Fatal(err) + } + + c := mcp.NewClient("testClient", "v1.0.0", nil) + opts := new(mcp.ConnectionOptions) + mcp.BatchSize(opts, 2) + sc, err := c.Connect(ctx, ct, opts) + if err != nil { + t.Fatal(err) + } + defer sc.Close() + + errs := make(chan error, 2) + for range 2 { + go func() { + _, err := sc.ListTools(ctx) + errs <- err + }() + } + +} diff --git a/internal/mcp/transport.go b/internal/mcp/transport.go index 9b61c94063d..28283313629 100644 --- a/internal/mcp/transport.go +++ b/internal/mcp/transport.go @@ -13,6 +13,7 @@ import ( "log" "net" "os" + "sync" jsonrpc2 "golang.org/x/tools/internal/jsonrpc2_v2" ) @@ -33,7 +34,9 @@ type Transport struct { // ConnectionOptions configures the behavior of an individual client<->server // connection. type ConnectionOptions struct { - Logger io.Writer + Logger io.Writer // if set, write RPC logs + + batchSize int // outgoing batch size for requests/notifications, for testing } // NewStdIOTransport constructs a transport that communicates over @@ -215,40 +218,238 @@ func (r rwc) Close() error { } // A ndjsonFramer is a jsonrpc2.Framer that delimits messages with newlines. +// It also supports jsonrpc2 batching. +// +// See https://github.com/ndjson/ndjson-spec for discussion of newline +// delimited JSON. // -// See also https://github.com/ndjson/ndjson-spec. -type ndjsonFramer struct{} -type rawReader struct{ in *json.Decoder } // relies on json.Decoder message boundaries -type ndjsonWriter struct{ out io.Writer } // writes newline message boundaries +// See [msgBatch] for more discussion of message batching. +type ndjsonFramer struct { + // batchSize allows customizing batching behavior for testing. + // + // If set to a positive number, requests and notifications will be buffered + // into groups of this size before being sent as a batch. + batchSize int + + // batches correlate incoming requests to the batch in which they arrived. + batchMu sync.Mutex + batches map[jsonrpc2.ID]*msgBatch // lazily allocated +} -func (ndjsonFramer) Reader(rw io.Reader) jsonrpc2.Reader { - return &rawReader{in: json.NewDecoder(rw)} +// addBatch records a msgBatch for an incoming batch payload. +// It returns an error if batch is malformed, containing previously seen IDs. +// +// See [msgBatch] for more. +func (f *ndjsonFramer) addBatch(batch *msgBatch) error { + f.batchMu.Lock() + defer f.batchMu.Unlock() + for id := range batch.unresolved { + if _, ok := f.batches[id]; ok { + return fmt.Errorf("%w: batch contains previously seen request %v", jsonrpc2.ErrInvalidRequest, id.Raw()) + } + } + for id := range batch.unresolved { + if f.batches == nil { + f.batches = make(map[jsonrpc2.ID]*msgBatch) + } + f.batches[id] = batch + } + return nil } -func (ndjsonFramer) Writer(rw io.Writer) jsonrpc2.Writer { - return &ndjsonWriter{out: rw} +// updateBatch records a response in the message batch tracking the +// corresponding incoming call, if any. +// +// The second result reports whether resp was part of a batch. If this is true, +// the first result is nil if the batch is still incomplete, or the full set of +// batch responses if resp completed the batch. +func (f *ndjsonFramer) updateBatch(resp *jsonrpc2.Response) ([]*jsonrpc2.Response, bool) { + f.batchMu.Lock() + defer f.batchMu.Unlock() + + if batch, ok := f.batches[resp.ID]; ok { + idx, ok := batch.unresolved[resp.ID] + if !ok { + panic("internal error: inconsistent batches") + } + batch.responses[idx] = resp + delete(batch.unresolved, resp.ID) + delete(f.batches, resp.ID) + if len(batch.unresolved) == 0 { + return batch.responses, true + } + return nil, true + } + return nil, false } -func (r *rawReader) Read(ctx context.Context) (jsonrpc2.Message, int64, error) { +// A msgBatch records information about an incoming batch of JSONRPC2 calls. +// +// The JSONRPC2 spec (https://www.jsonrpc.org/specification#batch) says: +// +// "The Server should respond with an Array containing the corresponding +// Response objects, after all of the batch Request objects have been +// processed. A Response object SHOULD exist for each Request object, except +// that there SHOULD NOT be any Response objects for notifications. The Server +// MAY process a batch rpc call as a set of concurrent tasks, processing them +// in any order and with any width of parallelism." +// +// Therefore, a msgBatch keeps track of outstanding calls and their responses. +// When there are no unresolved calls, the response payload is sent. +type msgBatch struct { + unresolved map[jsonrpc2.ID]int + responses []*jsonrpc2.Response +} + +// An ndjsonReader reads newline-delimited messages or message batches. +type ndjsonReader struct { + queue []jsonrpc2.Message + framer *ndjsonFramer + in *json.Decoder +} + +// A ndjsonWriter writes newline-delimited messages to the wrapped io.Writer. +// +// If batch is set, messages are wrapped in a JSONRPC2 batch. +type ndjsonWriter struct { + // Testing support: if outgoingBatch has capacity, it is used to buffer + // outgoing messages before sending a JSONRPC2 message batch. + outgoingBatch []jsonrpc2.Message + + framer *ndjsonFramer // to track batch responses + out io.Writer // to write to the wire +} + +func (f *ndjsonFramer) Reader(r io.Reader) jsonrpc2.Reader { + return &ndjsonReader{framer: f, in: json.NewDecoder(r)} +} + +func (f *ndjsonFramer) Writer(w io.Writer) jsonrpc2.Writer { + writer := &ndjsonWriter{framer: f, out: w} + if f.batchSize > 0 { + writer.outgoingBatch = make([]jsonrpc2.Message, 0, f.batchSize) + } + return writer +} + +func (r *ndjsonReader) Read(ctx context.Context) (jsonrpc2.Message, int64, error) { select { case <-ctx.Done(): return nil, 0, ctx.Err() default: } + if len(r.queue) > 0 { + next := r.queue[0] + r.queue = r.queue[1:] + return next, 0, nil + } var raw json.RawMessage if err := r.in.Decode(&raw); err != nil { return nil, 0, err } + var rawBatch []json.RawMessage + if err := json.Unmarshal(raw, &rawBatch); err == nil { + msg, err := r.readBatch(rawBatch) + if err != nil { + return nil, 0, err + } + return msg, int64(len(raw)), nil + } msg, err := jsonrpc2.DecodeMessage(raw) return msg, int64(len(raw)), err } +// readBatch reads a batch of jsonrpc2 messages, and records the batch +// in the framer so that responses can be collected and send back together. +func (r *ndjsonReader) readBatch(rawBatch []json.RawMessage) (jsonrpc2.Message, error) { + if len(rawBatch) == 0 { + return nil, fmt.Errorf("empty batch") + } + + // From the spec: + // "If the batch rpc call itself fails to be recognized as an valid JSON or + // as an Array with at least one value, the response from the Server MUST be + // a single Response object. If there are no Response objects contained + // within the Response array as it is to be sent to the client, the server + // MUST NOT return an empty Array and should return nothing at all." + // + // In our case, an error actually breaks the jsonrpc2 connection entirely, + // but defensively we collect batch information before recording it, so that + // we don't leave the framer in an inconsistent state. + var ( + first jsonrpc2.Message // first message, to return + queue []jsonrpc2.Message // remaining messages + respBatch *msgBatch // tracks incoming requests in the batch + ) + for i, raw := range rawBatch { + msg, err := jsonrpc2.DecodeMessage(raw) + if err != nil { + return nil, err + } + if i == 0 { + first = msg + } else { + queue = append(queue, msg) + } + if req, ok := msg.(*jsonrpc2.Request); ok { + if respBatch == nil { + respBatch = &msgBatch{ + unresolved: make(map[jsonrpc2.ID]int), + } + } + respBatch.unresolved[req.ID] = len(respBatch.responses) + respBatch.responses = append(respBatch.responses, nil) + } + } + if respBatch != nil { + // The batch contains one or more incoming requests to track. + if err := r.framer.addBatch(respBatch); err != nil { + return nil, err + } + } + + r.queue = append(r.queue, queue...) + return first, nil +} + func (w *ndjsonWriter) Write(ctx context.Context, msg jsonrpc2.Message) (int64, error) { select { case <-ctx.Done(): return 0, ctx.Err() default: } + + // Batching support: if msg is a Response, it may have completed a batch, so + // check that first. Otherwise, it is a request or notification, and we may + // want to collect it into a batch before sending, if we're configured to use + // outgoing batches. + if resp, ok := msg.(*jsonrpc2.Response); ok { + if batch, ok := w.framer.updateBatch(resp); ok { + if len(batch) > 0 { + data, err := marshalMessages(batch) + if err != nil { + return 0, err + } + data = append(data, '\n') + n, err := w.out.Write(data) + return int64(n), err + } + return 0, nil + } + } else if len(w.outgoingBatch) < cap(w.outgoingBatch) { + w.outgoingBatch = append(w.outgoingBatch, msg) + if len(w.outgoingBatch) == cap(w.outgoingBatch) { + data, err := marshalMessages(w.outgoingBatch) + w.outgoingBatch = w.outgoingBatch[:0] + if err != nil { + return 0, err + } + data = append(data, '\n') + n, err := w.out.Write(data) + return int64(n), err + } + return 0, nil + } data, err := jsonrpc2.EncodeMessage(msg) if err != nil { return 0, fmt.Errorf("marshaling message: %v", err) @@ -257,3 +458,15 @@ func (w *ndjsonWriter) Write(ctx context.Context, msg jsonrpc2.Message) (int64, n, err := w.out.Write(data) return int64(n), err } + +func marshalMessages[T jsonrpc2.Message](msgs []T) ([]byte, error) { + var rawMsgs []json.RawMessage + for _, msg := range msgs { + raw, err := jsonrpc2.EncodeMessage(msg) + if err != nil { + return nil, fmt.Errorf("encoding batch message: %w", err) + } + rawMsgs = append(rawMsgs, raw) + } + return json.Marshal(rawMsgs) +} diff --git a/internal/mcp/transport_test.go b/internal/mcp/transport_test.go new file mode 100644 index 00000000000..a96a3855952 --- /dev/null +++ b/internal/mcp/transport_test.go @@ -0,0 +1,61 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mcp + +import ( + "context" + "io" + "testing" + + jsonrpc2 "golang.org/x/tools/internal/jsonrpc2_v2" +) + +// BatchSize causes a connection to collect n requests or notifications before +// sending a batch on the wire (responses are always sent in isolation). +// +// Exported for testing in the mcp_test package. +func BatchSize(opts *ConnectionOptions, n int) { + opts.batchSize = n +} + +func TestBatchFraming(t *testing.T) { + // This test checks that the ndjsonFramer can read and write JSON batches. + // + // The framer is configured to write a batch size of 2, and we confirm that + // nothing is sent over the wire until the second write, at which point both + // messages become available. + ctx := context.Background() + + r, w := io.Pipe() + framer := ndjsonFramer{batchSize: 2} + reader := framer.Reader(r) + writer := framer.Writer(w) + + // Read the two messages into a channel, for easy testing later. + read := make(chan jsonrpc2.Message) + go func() { + for range 2 { + msg, _, _ := reader.Read(ctx) + read <- msg + } + }() + + // The first write should not yet be observed by the reader. + writer.Write(ctx, &jsonrpc2.Request{ID: jsonrpc2.Int64ID(1), Method: "test"}) + select { + case got := <-read: + t.Fatalf("after one write, got message %v", got) + default: + } + + // ...but the second write causes both messages to be observed. + writer.Write(ctx, &jsonrpc2.Request{ID: jsonrpc2.Int64ID(2), Method: "test"}) + for _, want := range []int64{1, 2} { + got := <-read + if got := got.(*jsonrpc2.Request).ID.Raw(); got != want { + t.Errorf("got message #%d, want #%d", got, want) + } + } +} From 04af9bfdf69474e344a6c4a8ada2adb60802351d Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Tue, 22 Apr 2025 17:41:06 -0400 Subject: [PATCH 227/270] go/analysis/passes/hostport: publish This CL moves gopls' hostport analyzer into the public passes directory. It will be added to cmd/vet soon. Updates golang/go#28308 Change-Id: I97449d7bfda63f0dad321f6b8e1b8acb6711b3bf Reviewed-on: https://go-review.googlesource.com/c/tools/+/667375 Reviewed-by: Robert Findley LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan Commit-Queue: Alan Donovan --- .../analysis => go/analysis/passes}/hostport/hostport.go | 5 ++--- .../analysis/passes}/hostport/hostport_test.go | 2 +- .../analysis => go/analysis/passes}/hostport/main.go | 0 .../analysis/passes}/hostport/testdata/src/a/a.go | 0 .../analysis/passes}/hostport/testdata/src/a/a.go.golden | 0 go/analysis/unitchecker/vet_std_test.go | 2 ++ gopls/doc/analyzers.md | 2 +- gopls/internal/doc/api.json | 2 +- gopls/internal/settings/analysis.go | 2 +- 9 files changed, 8 insertions(+), 7 deletions(-) rename {gopls/internal/analysis => go/analysis/passes}/hostport/hostport.go (96%) rename {gopls/internal/analysis => go/analysis/passes}/hostport/hostport_test.go (87%) rename {gopls/internal/analysis => go/analysis/passes}/hostport/main.go (100%) rename {gopls/internal/analysis => go/analysis/passes}/hostport/testdata/src/a/a.go (100%) rename {gopls/internal/analysis => go/analysis/passes}/hostport/testdata/src/a/a.go.golden (100%) diff --git a/gopls/internal/analysis/hostport/hostport.go b/go/analysis/passes/hostport/hostport.go similarity index 96% rename from gopls/internal/analysis/hostport/hostport.go rename to go/analysis/passes/hostport/hostport.go index d95e475d1bf..e808b1aa1ba 100644 --- a/gopls/internal/analysis/hostport/hostport.go +++ b/go/analysis/passes/hostport/hostport.go @@ -15,7 +15,6 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/gopls/internal/util/safetoken" typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" "golang.org/x/tools/internal/typesinternal/typeindex" ) @@ -42,7 +41,7 @@ A similar diagnostic and fix are produced for a format string of "%s:%s". var Analyzer = &analysis.Analyzer{ Name: "hostport", Doc: Doc, - URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/hostport", + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/hostport", Requires: []*analysis.Analyzer{inspect.Analyzer, typeindexanalyzer.Analyzer}, Run: run, } @@ -121,7 +120,7 @@ func run(pass *analysis.Pass) (any, error) { suffix := "" if dialCall != nil { suffix = fmt.Sprintf(" (passed to net.Dial at L%d)", - safetoken.StartPosition(pass.Fset, dialCall.Pos()).Line) + pass.Fset.Position(dialCall.Pos()).Line) } pass.Report(analysis.Diagnostic{ diff --git a/gopls/internal/analysis/hostport/hostport_test.go b/go/analysis/passes/hostport/hostport_test.go similarity index 87% rename from gopls/internal/analysis/hostport/hostport_test.go rename to go/analysis/passes/hostport/hostport_test.go index 4e57a43e8d4..f3c18840fa0 100644 --- a/gopls/internal/analysis/hostport/hostport_test.go +++ b/go/analysis/passes/hostport/hostport_test.go @@ -8,7 +8,7 @@ import ( "testing" "golang.org/x/tools/go/analysis/analysistest" - "golang.org/x/tools/gopls/internal/analysis/hostport" + "golang.org/x/tools/go/analysis/passes/hostport" ) func Test(t *testing.T) { diff --git a/gopls/internal/analysis/hostport/main.go b/go/analysis/passes/hostport/main.go similarity index 100% rename from gopls/internal/analysis/hostport/main.go rename to go/analysis/passes/hostport/main.go diff --git a/gopls/internal/analysis/hostport/testdata/src/a/a.go b/go/analysis/passes/hostport/testdata/src/a/a.go similarity index 100% rename from gopls/internal/analysis/hostport/testdata/src/a/a.go rename to go/analysis/passes/hostport/testdata/src/a/a.go diff --git a/gopls/internal/analysis/hostport/testdata/src/a/a.go.golden b/go/analysis/passes/hostport/testdata/src/a/a.go.golden similarity index 100% rename from gopls/internal/analysis/hostport/testdata/src/a/a.go.golden rename to go/analysis/passes/hostport/testdata/src/a/a.go.golden diff --git a/go/analysis/unitchecker/vet_std_test.go b/go/analysis/unitchecker/vet_std_test.go index ac61950d739..b489ad486d8 100644 --- a/go/analysis/unitchecker/vet_std_test.go +++ b/go/analysis/unitchecker/vet_std_test.go @@ -25,6 +25,7 @@ import ( "golang.org/x/tools/go/analysis/passes/errorsas" "golang.org/x/tools/go/analysis/passes/framepointer" "golang.org/x/tools/go/analysis/passes/gofix" + "golang.org/x/tools/go/analysis/passes/hostport" "golang.org/x/tools/go/analysis/passes/httpresponse" "golang.org/x/tools/go/analysis/passes/ifaceassert" "golang.org/x/tools/go/analysis/passes/loopclosure" @@ -65,6 +66,7 @@ func vet() { framepointer.Analyzer, gofix.Analyzer, httpresponse.Analyzer, + hostport.Analyzer, ifaceassert.Analyzer, loopclosure.Analyzer, lostcancel.Analyzer, diff --git a/gopls/doc/analyzers.md b/gopls/doc/analyzers.md index dfca652d426..e18a7c7efda 100644 --- a/gopls/doc/analyzers.md +++ b/gopls/doc/analyzers.md @@ -3515,7 +3515,7 @@ A similar diagnostic and fix are produced for a format string of "%s:%s". Default: on. -Package documentation: [hostport](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/hostport) +Package documentation: [hostport](https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/hostport) ## `httpresponse`: check for mistakes using HTTP responses diff --git a/gopls/internal/doc/api.json b/gopls/internal/doc/api.json index 8ef813d82bb..37a996950be 100644 --- a/gopls/internal/doc/api.json +++ b/gopls/internal/doc/api.json @@ -3177,7 +3177,7 @@ { "Name": "hostport", "Doc": "check format of addresses passed to net.Dial\n\nThis analyzer flags code that produce network address strings using\nfmt.Sprintf, as in this example:\n\n addr := fmt.Sprintf(\"%s:%d\", host, 12345) // \"will not work with IPv6\"\n ...\n conn, err := net.Dial(\"tcp\", addr) // \"when passed to dial here\"\n\nThe analyzer suggests a fix to use the correct approach, a call to\nnet.JoinHostPort:\n\n addr := net.JoinHostPort(host, \"12345\")\n ...\n conn, err := net.Dial(\"tcp\", addr)\n\nA similar diagnostic and fix are produced for a format string of \"%s:%s\".\n", - "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/hostport", + "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/hostport", "Default": true }, { diff --git a/gopls/internal/settings/analysis.go b/gopls/internal/settings/analysis.go index 584bbd5f7bd..99b55cc6b24 100644 --- a/gopls/internal/settings/analysis.go +++ b/gopls/internal/settings/analysis.go @@ -23,6 +23,7 @@ import ( "golang.org/x/tools/go/analysis/passes/directive" "golang.org/x/tools/go/analysis/passes/errorsas" "golang.org/x/tools/go/analysis/passes/framepointer" + "golang.org/x/tools/go/analysis/passes/hostport" "golang.org/x/tools/go/analysis/passes/httpresponse" "golang.org/x/tools/go/analysis/passes/ifaceassert" "golang.org/x/tools/go/analysis/passes/loopclosure" @@ -51,7 +52,6 @@ import ( "golang.org/x/tools/gopls/internal/analysis/deprecated" "golang.org/x/tools/gopls/internal/analysis/embeddirective" "golang.org/x/tools/gopls/internal/analysis/fillreturns" - "golang.org/x/tools/gopls/internal/analysis/hostport" "golang.org/x/tools/gopls/internal/analysis/infertypeargs" "golang.org/x/tools/gopls/internal/analysis/modernize" "golang.org/x/tools/gopls/internal/analysis/nonewvars" From cd183620b17f4a5ef1289bb0995c15cec62bc39f Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Thu, 6 Feb 2025 22:14:46 -0500 Subject: [PATCH 228/270] go/packages: add test that go list closes file handles Updates golang/go#71544 Change-Id: I349f97a0c5eb679115ea251d37dbd56042c7679b Reviewed-on: https://go-review.googlesource.com/c/tools/+/647516 Reviewed-by: Robert Findley Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI Commit-Queue: Alan Donovan --- go/packages/packages_test.go | 56 ++++++++++++++++++++++++++++++++++++ 1 file changed, 56 insertions(+) diff --git a/go/packages/packages_test.go b/go/packages/packages_test.go index ae3cbb6bb2b..2911d595c34 100644 --- a/go/packages/packages_test.go +++ b/go/packages/packages_test.go @@ -28,7 +28,9 @@ import ( "time" "github.com/google/go-cmp/cmp" + "golang.org/x/sync/errgroup" "golang.org/x/tools/go/packages" + "golang.org/x/tools/internal/gocommand" "golang.org/x/tools/internal/packagesinternal" "golang.org/x/tools/internal/packagestest" "golang.org/x/tools/internal/testenv" @@ -3400,3 +3402,57 @@ func writeTree(t *testing.T, archive string) string { } return root } + +// This is not a test of go/packages at all: it's a test of whether it +// is possible to delete the directory used by go list once it has +// finished. It is intended to evaluate the hypothesis (to explain +// issue #71544) that the go command, on Windows, occasionally fails +// to release all its handles to the temporary directory even when it +// should have finished. If this test ever fails, the go command has a bug. +func TestRmdirAfterGoList(t *testing.T) { + testenv.NeedsExec(t) + + dir := t.TempDir() + if err := os.Mkdir(filepath.Join(dir, "p"), 0777); err != nil { + t.Fatalf("mkdir p: %v", err) + } + + // Create a go.mod file and 100 trivial Go files for the go command to read. + if err := os.WriteFile(filepath.Join(dir, "go.mod"), []byte("module example.com"), 0666); err != nil { + t.Fatal(err) + } + for i := range 100 { + filename := filepath.Join(dir, fmt.Sprintf("p/%d.go", i)) + if err := os.WriteFile(filename, []byte("package p"), 0666); err != nil { + t.Fatal(err) + } + } + + runner := gocommand.Runner{} + + g, ctx := errgroup.WithContext(context.Background()) + for range 10 { + g.Go(func() error { + stdout, stderr, friendlyErr, err := runner.RunRaw(ctx, gocommand.Invocation{ + Verb: "list", + Args: []string{"-json", "example.com/p"}, + WorkingDir: dir, + }) + if ctx.Err() != nil { + return nil // don't report error if canceled + } + if err != nil || friendlyErr != nil { + t.Fatalf("go list failed: %v, %v (stdout=%s stderr=%s)", + err, friendlyErr, stdout, stderr) + } + // Return an error so that concurrent invocations are canceled. + return fmt.Errorf("oops") + }) + } + g.Wait() // ignore expected error + + // This is the critical operation. + if err := os.RemoveAll(dir); err != nil { + t.Fatalf("failed to remove temp dir: %v", err) + } +} From f01b9f65c7b7e98bf38cf704313f95e3c5b323a1 Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Thu, 17 Apr 2025 18:16:04 +0800 Subject: [PATCH 229/270] gopls/internal/server: support links and hovers for replace directive This CL supports links and hovers for replace directive. It supports links for local replacement such as 'replace A => ../'. It also respects replacement for a module version(replace A => A v1.2.3), and replacement from a module to another(replace A => B v1.2.3), so the hover messages above import decl in .go files and modules in go.mod are correct. Fixes golang/go#73423 Change-Id: I777ff3b77e399406b066780501c084d59af9e442 Reviewed-on: https://go-review.googlesource.com/c/tools/+/667015 Reviewed-by: Alan Donovan Auto-Submit: Alan Donovan Reviewed-by: Robert Findley Commit-Queue: Alan Donovan LUCI-TryBot-Result: Go LUCI --- gopls/internal/cache/mod.go | 39 +++++ gopls/internal/golang/hover.go | 2 +- gopls/internal/mod/hover.go | 17 +- gopls/internal/server/link.go | 44 ++++- .../test/integration/misc/failures_test.go | 2 +- .../test/integration/misc/link_test.go | 155 ++++++++++++++++-- .../integration/workspace/misspelling_test.go | 2 +- 7 files changed, 234 insertions(+), 27 deletions(-) diff --git a/gopls/internal/cache/mod.go b/gopls/internal/cache/mod.go index f6dd22754cc..ddbe516f165 100644 --- a/gopls/internal/cache/mod.go +++ b/gopls/internal/cache/mod.go @@ -13,6 +13,7 @@ import ( "golang.org/x/mod/modfile" "golang.org/x/mod/module" + "golang.org/x/tools/go/packages" "golang.org/x/tools/gopls/internal/file" "golang.org/x/tools/gopls/internal/label" "golang.org/x/tools/gopls/internal/protocol" @@ -25,6 +26,7 @@ import ( type ParsedModule struct { URI protocol.DocumentURI File *modfile.File + ReplaceMap map[module.Version]module.Version Mapper *protocol.Mapper ParseErrors []*Diagnostic } @@ -98,10 +100,19 @@ func parseModImpl(ctx context.Context, fh file.Handle) (*ParsedModule, error) { }) } } + + replaceMap := make(map[module.Version]module.Version) + if parseErr == nil { + for _, rep := range file.Replace { + replaceMap[rep.Old] = rep.New + } + } + return &ParsedModule{ URI: fh.URI(), Mapper: m, File: file, + ReplaceMap: replaceMap, ParseErrors: parseErrors, }, parseErr } @@ -487,3 +498,31 @@ func findModuleReference(mf *modfile.File, ver module.Version) *modfile.Line { } return nil } + +// ResolvedVersion returns the version used for a module, which considers replace directive. +func ResolvedVersion(module *packages.Module) string { + // don't visit replace recursively as src/cmd/go/internal/modinfo/info.go + // visits replace field only once. + if module.Replace != nil { + return module.Replace.Version + } + return module.Version +} + +// ResolvedPath returns the the module path, which considers replace directive. +func ResolvedPath(module *packages.Module) string { + if module.Replace != nil { + return module.Replace.Path + } + return module.Path +} + +// ResolvedString returns a representation of the Version suitable for logging +// (Path@Version, or just Path if Version is empty), +// which considers replace directive. +func ResolvedString(module *packages.Module) string { + if ResolvedVersion(module) == "" { + ResolvedPath(module) + } + return ResolvedPath(module) + "@" + ResolvedVersion(module) +} diff --git a/gopls/internal/golang/hover.go b/gopls/internal/golang/hover.go index 43cc68ff8b2..93c89f3af97 100644 --- a/gopls/internal/golang/hover.go +++ b/gopls/internal/golang/hover.go @@ -651,7 +651,7 @@ func hover(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp pro linkPath = "" } else if linkMeta.Module != nil && linkMeta.Module.Version != "" { mod := linkMeta.Module - linkPath = strings.Replace(linkPath, mod.Path, mod.Path+"@"+mod.Version, 1) + linkPath = strings.Replace(linkPath, mod.Path, cache.ResolvedString(mod), 1) } var footer string diff --git a/gopls/internal/mod/hover.go b/gopls/internal/mod/hover.go index 458c5ce67d5..04834f3cd7e 100644 --- a/gopls/internal/mod/hover.go +++ b/gopls/internal/mod/hover.go @@ -12,6 +12,7 @@ import ( "strings" "golang.org/x/mod/modfile" + "golang.org/x/mod/module" "golang.org/x/mod/semver" "golang.org/x/tools/gopls/internal/cache" "golang.org/x/tools/gopls/internal/file" @@ -116,7 +117,7 @@ func hoverOnRequireStatement(ctx context.Context, pm *cache.ParsedModule, offset options := snapshot.Options() isPrivate := snapshot.IsGoPrivatePath(req.Mod.Path) header := formatHeader(req.Mod.Path, options) - explanation = formatExplanation(explanation, req, options, isPrivate) + explanation = formatExplanation(explanation, pm.ReplaceMap, req, options, isPrivate) vulns := formatVulnerabilities(affecting, nonaffecting, osvs, options, fromGovulncheck) return &protocol.Hover{ @@ -327,7 +328,7 @@ func vulnerablePkgsInfo(findings []*govulncheck.Finding, useMarkdown bool) strin return b.String() } -func formatExplanation(text string, req *modfile.Require, options *settings.Options, isPrivate bool) string { +func formatExplanation(text string, replaceMap map[module.Version]module.Version, req *modfile.Require, options *settings.Options, isPrivate bool) string { text = strings.TrimSuffix(text, "\n") splt := strings.Split(text, "\n") length := len(splt) @@ -348,7 +349,17 @@ func formatExplanation(text string, req *modfile.Require, options *settings.Opti if !isPrivate && options.PreferredContentFormat == protocol.Markdown { target := imp if strings.ToLower(options.LinkTarget) == "pkg.go.dev" { - target = strings.Replace(target, req.Mod.Path, req.Mod.String(), 1) + mod := req.Mod + // respect the repalcement when constructing a module link. + if m, ok := replaceMap[req.Mod]; ok { + // Have: 'replace A v1.2.3 => A vx.x.x' or 'replace A v1.2.3 => B vx.x.x'. + mod = m + } else if m, ok := replaceMap[module.Version{Path: req.Mod.Path}]; ok && + !modfile.IsDirectoryPath(m.Path) { // exclude local replacement. + // Have: 'replace A => A vx.x.x' or 'replace A => B vx.x.x'. + mod = m + } + target = strings.Replace(target, req.Mod.Path, mod.String(), 1) } reference = fmt.Sprintf("[%s](%s)", imp, cache.BuildLink(options.LinkTarget, target, "")) } diff --git a/gopls/internal/server/link.go b/gopls/internal/server/link.go index cf475ca90c9..75c717dbe8e 100644 --- a/gopls/internal/server/link.go +++ b/gopls/internal/server/link.go @@ -11,11 +11,13 @@ import ( "go/ast" "go/token" "net/url" + "path/filepath" "regexp" "strings" "sync" "golang.org/x/mod/modfile" + "golang.org/x/mod/module" "golang.org/x/tools/gopls/internal/cache" "golang.org/x/tools/gopls/internal/cache/metadata" "golang.org/x/tools/gopls/internal/cache/parsego" @@ -59,6 +61,30 @@ func modLinks(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([] } var links []protocol.DocumentLink + for _, rep := range pm.File.Replace { + if modfile.IsDirectoryPath(rep.New.Path) { + // Have local replacement, such as 'replace A => ../'. + dep := []byte(rep.New.Path) + start, end := rep.Syntax.Start.Byte, rep.Syntax.End.Byte + i := bytes.Index(pm.Mapper.Content[start:end], dep) + if i < 0 { + continue + } + path := rep.New.Path + if !filepath.IsAbs(path) { + path = filepath.Join(fh.URI().DirPath(), path) + } + // jump to the go.mod file of replaced module. + path = filepath.Join(filepath.Clean(path), "go.mod") + l, err := toProtocolLink(pm.Mapper, protocol.URIFromPath(path).Path(), start+i, start+i+len(dep)) + if err != nil { + return nil, err + } + links = append(links, l) + continue + } + } + for _, req := range pm.File.Require { if req.Syntax == nil { continue @@ -73,9 +99,21 @@ func modLinks(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([] if i == -1 { continue } + + mod := req.Mod + // respect the repalcement when constructing a module link. + if m, ok := pm.ReplaceMap[req.Mod]; ok { + // Have: 'replace A v1.2.3 => A vx.x.x' or 'replace A v1.2.3 => B vx.x.x'. + mod = m + } else if m, ok := pm.ReplaceMap[module.Version{Path: req.Mod.Path}]; ok && + !modfile.IsDirectoryPath(m.Path) { // exclude local replacement. + // Have: 'replace A => A vx.x.x' or 'replace A => B vx.x.x'. + mod = m + } + // Shift the start position to the location of the // dependency within the require statement. - target := cache.BuildLink(snapshot.Options().LinkTarget, "mod/"+req.Mod.String(), "") + target := cache.BuildLink(snapshot.Options().LinkTarget, "mod/"+mod.String(), "") l, err := toProtocolLink(pm.Mapper, target, start+i, start+i+len(dep)) if err != nil { return nil, err @@ -142,8 +180,8 @@ func goLinks(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]p urlPath := string(importPath) // For pkg.go.dev, append module version suffix to package import path. - if mp := snapshot.Metadata(depsByImpPath[importPath]); mp != nil && mp.Module != nil && mp.Module.Path != "" && mp.Module.Version != "" { - urlPath = strings.Replace(urlPath, mp.Module.Path, mp.Module.Path+"@"+mp.Module.Version, 1) + if mp := snapshot.Metadata(depsByImpPath[importPath]); mp != nil && mp.Module != nil && cache.ResolvedPath(mp.Module) != "" && cache.ResolvedVersion(mp.Module) != "" { + urlPath = strings.Replace(urlPath, mp.Module.Path, cache.ResolvedString(mp.Module), 1) } start, end, err := safetoken.Offsets(pgf.Tok, imp.Path.Pos(), imp.Path.End()) diff --git a/gopls/internal/test/integration/misc/failures_test.go b/gopls/internal/test/integration/misc/failures_test.go index 81fa17deb9b..543e36a9e44 100644 --- a/gopls/internal/test/integration/misc/failures_test.go +++ b/gopls/internal/test/integration/misc/failures_test.go @@ -7,8 +7,8 @@ package misc import ( "testing" - . "golang.org/x/tools/gopls/internal/test/integration" "golang.org/x/tools/gopls/internal/test/compare" + . "golang.org/x/tools/gopls/internal/test/integration" ) // This is a slight variant of TestHoverOnError in definition_test.go diff --git a/gopls/internal/test/integration/misc/link_test.go b/gopls/internal/test/integration/misc/link_test.go index 53b0f0818f3..079d84cb6ee 100644 --- a/gopls/internal/test/integration/misc/link_test.go +++ b/gopls/internal/test/integration/misc/link_test.go @@ -5,9 +5,12 @@ package misc import ( + "path/filepath" + "slices" "strings" "testing" + "golang.org/x/tools/gopls/internal/protocol" . "golang.org/x/tools/gopls/internal/test/integration" ) @@ -19,15 +22,35 @@ module mod.test go 1.12 require import.test v1.2.3 + +require replace.test v1.2.3 +replace replace.test => replace.test v1.2.4 + +require replace.fixed.test v1.2.3 +replace replace.fixed.test v1.2.3 => replace.fixed.test v1.2.4 + +require replace.another.test v1.2.3 +replace replace.another.test => another.test v1.2.3 + + +replace example.com/non-exist => ./ +replace example.com/non-exist1 => ../work/ + -- main.go -- package main import "import.test/pkg" +import "replace.test/replace" +import "replace.fixed.test/fixed" +import "replace.another.test/another" func main() { // Issue 43990: this is not a link that most users can open from an LSP // client: mongodb://not.a.link.com println(pkg.Hello) + println(replace.Hello) + println(fixed.Hello) + println(another.Hello) }` const proxy = ` @@ -38,6 +61,32 @@ go 1.12 -- import.test@v1.2.3/pkg/const.go -- package pkg + +-- replace.test@v1.2.4/go.mod -- +module replace.test + +go 1.12 +-- replace.test@v1.2.4/replace/const.go -- +package replace + +const Hello = "Hello" + +-- replace.fixed.test@v1.2.4/go.mod -- +module replace.fixed.test + +go 1.12 +-- replace.fixed.test@v1.2.4/fixed/const.go -- +package fixed + +const Hello = "Hello" + +-- another.test@v1.2.3/go.mod -- +module another.test + +go 1.12 +-- another.test@v1.2.3/another/const.go -- +package another + const Hello = "Hello" ` WithOptions( @@ -47,25 +96,82 @@ const Hello = "Hello" env.OpenFile("main.go") env.OpenFile("go.mod") - modLink := "https://pkg.go.dev/mod/import.test@v1.2.3" - pkgLink := "https://pkg.go.dev/import.test@v1.2.3/pkg" + const ( + modImportLink = "https://pkg.go.dev/mod/import.test@v1.2.3" + modReplaceLink = "https://pkg.go.dev/mod/replace.test@v1.2.4" + modReplaceFixedeLink = "https://pkg.go.dev/mod/replace.fixed.test@v1.2.4" + modAnotherLink = "https://pkg.go.dev/mod/another.test@v1.2.3" + + pkgImportLink = "https://pkg.go.dev/import.test@v1.2.3/pkg" + pkgReplaceLink = "https://pkg.go.dev/replace.test@v1.2.4/replace" + pkgReplaceFixedLink = "https://pkg.go.dev/replace.fixed.test@v1.2.4/fixed" + pkgAnotherLink = "https://pkg.go.dev/another.test@v1.2.3/another" + ) // First, check that we get the expected links via hover and documentLink. content, _ := env.Hover(env.RegexpSearch("main.go", "pkg.Hello")) - if content == nil || !strings.Contains(content.Value, pkgLink) { - t.Errorf("hover: got %v in main.go, want contains %q", content, pkgLink) + if content == nil || !strings.Contains(content.Value, pkgImportLink) { + t.Errorf("hover: got %v in main.go, want contains %q", content, pkgImportLink) + } + content, _ = env.Hover(env.RegexpSearch("main.go", "replace.Hello")) + if content == nil || !strings.Contains(content.Value, pkgReplaceLink) { + t.Errorf("hover: got %v in main.go, want contains %q", content, pkgReplaceLink) + } + content, _ = env.Hover(env.RegexpSearch("main.go", "fixed.Hello")) + if content == nil || !strings.Contains(content.Value, pkgReplaceFixedLink) { + t.Errorf("hover: got %v in main.go, want contains %q", content, pkgReplaceFixedLink) + } + content, _ = env.Hover(env.RegexpSearch("main.go", "another.Hello")) + if content == nil || !strings.Contains(content.Value, pkgAnotherLink) { + t.Errorf("hover: got %v in main.go, want contains %q", content, pkgAnotherLink) } + content, _ = env.Hover(env.RegexpSearch("go.mod", "import.test")) - if content == nil || !strings.Contains(content.Value, pkgLink) { - t.Errorf("hover: got %v in go.mod, want contains %q", content, pkgLink) + if content == nil || !strings.Contains(content.Value, pkgImportLink) { + t.Errorf("hover: got %v in main.go, want contains %q", content, pkgImportLink) + } + content, _ = env.Hover(env.RegexpSearch("go.mod", "replace.test")) + if content == nil || !strings.Contains(content.Value, pkgReplaceLink) { + t.Errorf("hover: got %v in main.go, want contains %q", content, pkgReplaceLink) + } + content, _ = env.Hover(env.RegexpSearch("go.mod", "replace.fixed.test")) + if content == nil || !strings.Contains(content.Value, pkgReplaceFixedLink) { + t.Errorf("hover: got %v in main.go, want contains %q", content, pkgReplaceFixedLink) + } + content, _ = env.Hover(env.RegexpSearch("go.mod", "replace.another.test")) + if content == nil || !strings.Contains(content.Value, pkgAnotherLink) { + t.Errorf("hover: got %v in main.go, want contains %q", content, pkgAnotherLink) + } + + getLinks := func(links []protocol.DocumentLink) []string { + var got []string + for i := range links { + got = append(got, *links[i].Target) + } + return got } links := env.DocumentLink("main.go") - if len(links) != 1 || *links[0].Target != pkgLink { - t.Errorf("documentLink: got links %+v for main.go, want one link with target %q", links, pkgLink) + got, want := getLinks(links), []string{ + pkgImportLink, + pkgReplaceLink, + pkgReplaceFixedLink, + pkgAnotherLink, + } + if !slices.Equal(got, want) { + t.Errorf("documentLink: got links %v for main.go, want links %v", got, want) } + links = env.DocumentLink("go.mod") - if len(links) != 1 || *links[0].Target != modLink { - t.Errorf("documentLink: got links %+v for go.mod, want one link with target %q", links, modLink) + localReplacePath := filepath.Join(env.Sandbox.Workdir.RootURI().Path(), "go.mod") + got, want = getLinks(links), []string{ + localReplacePath, localReplacePath, + modImportLink, + modReplaceLink, + modReplaceFixedeLink, + modAnotherLink, + } + if !slices.Equal(got, want) { + t.Errorf("documentLink: got links %v for go.mod, want links %v", got, want) } // Then change the environment to make these links private. @@ -75,20 +181,33 @@ const Hello = "Hello" // Finally, verify that the links are gone. content, _ = env.Hover(env.RegexpSearch("main.go", "pkg.Hello")) - if content == nil || strings.Contains(content.Value, pkgLink) { - t.Errorf("hover: got %v in main.go, want non-empty hover without %q", content, pkgLink) + if content == nil || strings.Contains(content.Value, pkgImportLink) { + t.Errorf("hover: got %v in main.go, want non-empty hover without %q", content, pkgImportLink) } content, _ = env.Hover(env.RegexpSearch("go.mod", "import.test")) - if content == nil || strings.Contains(content.Value, modLink) { - t.Errorf("hover: got %v in go.mod, want contains %q", content, modLink) + if content == nil || strings.Contains(content.Value, modImportLink) { + t.Errorf("hover: got %v in go.mod, want contains %q", content, modImportLink) } + links = env.DocumentLink("main.go") - if len(links) != 0 { - t.Errorf("documentLink: got %d document links for main.go, want 0\nlinks: %v", len(links), links) + got, want = getLinks(links), []string{ + pkgReplaceLink, + pkgReplaceFixedLink, + pkgAnotherLink, + } + if !slices.Equal(got, want) { + t.Errorf("documentLink: got links %v for main.go, want links %v", got, want) } + links = env.DocumentLink("go.mod") - if len(links) != 0 { - t.Errorf("documentLink: got %d document links for go.mod, want 0\nlinks: %v", len(links), links) + got, want = getLinks(links), []string{ + localReplacePath, localReplacePath, + modReplaceLink, + modReplaceFixedeLink, + modAnotherLink, + } + if !slices.Equal(got, want) { + t.Errorf("documentLink: got links %v for go.mod, want links %v", got, want) } }) } diff --git a/gopls/internal/test/integration/workspace/misspelling_test.go b/gopls/internal/test/integration/workspace/misspelling_test.go index ddca05c860e..3ea379a18f1 100644 --- a/gopls/internal/test/integration/workspace/misspelling_test.go +++ b/gopls/internal/test/integration/workspace/misspelling_test.go @@ -8,8 +8,8 @@ import ( "runtime" "testing" - . "golang.org/x/tools/gopls/internal/test/integration" "golang.org/x/tools/gopls/internal/test/compare" + . "golang.org/x/tools/gopls/internal/test/integration" ) // Test for golang/go#57081. From 0c2f68a582533c6fcde9cd079f4ec56376e4f11c Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Wed, 23 Apr 2025 17:14:17 +0800 Subject: [PATCH 230/270] gopls/internal/golang/completion: show typeparam for preceding funcs in chain Current, gopls offers a chained function call completion 'reflect.TypeFor().Align' after word 'Align' when reflect is imported. As function reflect.TypeFor requires to explicitly specify a type parameter, gopls completion should offer 'reflect.TypeFor[T]().Align' instead. - Before: reflect.TypeFor().Align() - After: reflect.TypeFor[T]().Align() Change-Id: I262b001a9bed767a2a8120f8a01cd0b9f79af942 Reviewed-on: https://go-review.googlesource.com/c/tools/+/667535 Reviewed-by: Robert Findley LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan Auto-Submit: Alan Donovan --- .../golang/completion/deep_completion.go | 5 ++++ .../marker/testdata/completion/func_value.txt | 28 +++++++++++++++++++ 2 files changed, 33 insertions(+) diff --git a/gopls/internal/golang/completion/deep_completion.go b/gopls/internal/golang/completion/deep_completion.go index 053ece8219e..523c5b8652b 100644 --- a/gopls/internal/golang/completion/deep_completion.go +++ b/gopls/internal/golang/completion/deep_completion.go @@ -9,6 +9,8 @@ import ( "go/types" "strings" "time" + + "golang.org/x/tools/gopls/internal/util/typesutil" ) // MaxDeepCompletions limits deep completion results because in most cases @@ -312,6 +314,9 @@ func deepCandName(cand *candidate) string { for i, obj := range cand.path { buf.WriteString(obj.Name()) + if fn, ok := obj.(*types.Func); ok { + buf.WriteString(typesutil.FormatTypeParams(fn.Signature().TypeParams())) + } if cand.pathInvokeMask&(1< 0 { buf.WriteByte('(') buf.WriteByte(')') diff --git a/gopls/internal/test/marker/testdata/completion/func_value.txt b/gopls/internal/test/marker/testdata/completion/func_value.txt index 9b1370f129d..0e3cb50f28b 100644 --- a/gopls/internal/test/marker/testdata/completion/func_value.txt +++ b/gopls/internal/test/marker/testdata/completion/func_value.txt @@ -31,3 +31,31 @@ func _() { var i int i = foo //@complete(" //", fvFooFuncCall, fvFooTypeCall, fvFooVarCall) } + +-- generic/func_value.go -- +package funcvalue + +type bar struct{} + +func (b bar) Num() int { + return 0 +} + +func Bar[T any]() bar { + return bar{} +} + +func BarWithArg[T any](a int) bar { + return bar{} +} + +func (b bar) Bar2() bar { + return b +} + +func _() { + Bar[T].Num //@item(bar, "Bar[T]().Num", "func() int", "method") + Bar[T].Bar2().Num //@item(bar2, "Bar[T]().Bar2().Num", "func() int", "method") + var i int + i = Num //@complete(" //", bar, bar2) +} From d346382e272cad23ea94eefab7d63c1ff6587983 Mon Sep 17 00:00:00 2001 From: Madeline Kalilh Date: Thu, 6 Feb 2025 16:11:01 -0500 Subject: [PATCH 231/270] gopls/internal/server: add modify_tags command handler Previously, the VSCode extension installed and executed gomodifytags directly. To reduce third-party dependencies in gopls, we are submitting a change to gomodifytags to extract the functionality into a library called by gopls (https://github.com/fatih/gomodifytags/pull/117). Now, the extension will call the gopls.modify_tags command and the command handler will invoke the modifytags package's Apply() method. Also adds basic code actions for adding and removing struct tags. This will be extended once we implement a dialogue feature in gopls. Change-Id: Idf130c95eec5d469a454cb6f21897629b3364b06 Reviewed-on: https://go-review.googlesource.com/c/tools/+/652495 Reviewed-by: Hongxiang Jiang LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley --- gopls/go.mod | 3 + gopls/go.sum | 10 ++ gopls/internal/golang/codeaction.go | 79 +++++++++ gopls/internal/licenses/gen-licenses.sh | 4 +- gopls/internal/licenses/licenses.go | 116 +++++++++++++ .../internal/protocol/command/command_gen.go | 16 ++ gopls/internal/protocol/command/interface.go | 19 +++ gopls/internal/server/command.go | 139 +++++++++++++++ gopls/internal/settings/codeactionkind.go | 2 + .../test/integration/misc/modify_tags_test.go | 159 ++++++++++++++++++ .../testdata/codeaction/add_struct_tags.txt | 31 ++++ .../codeaction/remove_struct_tags.txt | 31 ++++ 12 files changed, 607 insertions(+), 2 deletions(-) create mode 100644 gopls/internal/test/integration/misc/modify_tags_test.go create mode 100644 gopls/internal/test/marker/testdata/codeaction/add_struct_tags.txt create mode 100644 gopls/internal/test/marker/testdata/codeaction/remove_struct_tags.txt diff --git a/gopls/go.mod b/gopls/go.mod index c2a8f6e019c..5f24d3ad79b 100644 --- a/gopls/go.mod +++ b/gopls/go.mod @@ -20,6 +20,9 @@ require ( require ( github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c // indirect + github.com/fatih/camelcase v1.0.0 // indirect + github.com/fatih/gomodifytags v1.17.1-0.20250423142747-f3939df9aa3c // indirect + github.com/fatih/structtag v1.2.0 // indirect github.com/google/safehtml v0.1.0 // indirect golang.org/x/exp/typeparams v0.0.0-20250218142911-aa4b98e5adaa // indirect gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 // indirect diff --git a/gopls/go.sum b/gopls/go.sum index cfe49a42d4e..01231ae5d8a 100644 --- a/gopls/go.sum +++ b/gopls/go.sum @@ -1,5 +1,15 @@ github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c h1:pxW6RcqyfI9/kWtOwnv/G+AzdKuy2ZrqINhenH4HyNs= github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= +github.com/fatih/camelcase v1.0.0 h1:hxNvNX/xYBp0ovncs8WyWZrOrpBNub/JfaMvbURyft8= +github.com/fatih/camelcase v1.0.0/go.mod h1:yN2Sb0lFhZJUdVvtELVWefmrXpuZESvPmqwoZc+/fpc= +github.com/fatih/gomodifytags v1.16.0 h1:B65npXIXSk44F6c1hZGE1NazSnt+eXvtdEOG2Uy+QdU= +github.com/fatih/gomodifytags v1.16.0/go.mod h1:TbUyEjH1Zo0GkJd2Q52oVYqYcJ0eGNqG8bsiOb75P9c= +github.com/fatih/gomodifytags v1.17.1-0.20250325171527-8c663b1c0765 h1:T+oCz1SRpqkn4meT0PiAX5vM8HcESrWvsAzyvy0Vdh0= +github.com/fatih/gomodifytags v1.17.1-0.20250325171527-8c663b1c0765/go.mod h1:YVLagR57bBxMai8IAEc7V4E/MWUYi0oUutLrZcTcnI8= +github.com/fatih/gomodifytags v1.17.1-0.20250423142747-f3939df9aa3c h1:dDSgAjoOMp8da3egfz0t2S+t8RGOpEmEXZubcGuc0Bg= +github.com/fatih/gomodifytags v1.17.1-0.20250423142747-f3939df9aa3c/go.mod h1:YVLagR57bBxMai8IAEc7V4E/MWUYi0oUutLrZcTcnI8= +github.com/fatih/structtag v1.2.0 h1:/OdNE99OxoI/PqaW/SuSK9uxxT3f/tcSZgon/ssNSx4= +github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94= github.com/go-quicktest/qt v1.101.0 h1:O1K29Txy5P2OK0dGo59b7b0LR6wKfIhttaAhHUyn7eI= github.com/go-quicktest/qt v1.101.0/go.mod h1:14Bz/f7NwaXPtdYEgzsx46kqSxVwTbzVZsDC26tQJow= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= diff --git a/gopls/internal/golang/codeaction.go b/gopls/internal/golang/codeaction.go index 5ba8b0c4ae5..a7917fbbda4 100644 --- a/gopls/internal/golang/codeaction.go +++ b/gopls/internal/golang/codeaction.go @@ -27,6 +27,7 @@ import ( "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/gopls/internal/protocol/command" "golang.org/x/tools/gopls/internal/settings" + "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/imports" "golang.org/x/tools/internal/typesinternal" @@ -262,6 +263,8 @@ var codeActionProducers = [...]codeActionProducer{ {kind: settings.RefactorRewriteMoveParamRight, fn: refactorRewriteMoveParamRight, needPkg: true}, {kind: settings.RefactorRewriteSplitLines, fn: refactorRewriteSplitLines, needPkg: true}, {kind: settings.RefactorRewriteEliminateDotImport, fn: refactorRewriteEliminateDotImport, needPkg: true}, + {kind: settings.RefactorRewriteAddTags, fn: refactorRewriteAddStructTags, needPkg: true}, + {kind: settings.RefactorRewriteRemoveTags, fn: refactorRewriteRemoveStructTags, needPkg: true}, {kind: settings.GoplsDocFeatures, fn: goplsDocFeatures}, // offer this one last (#72742) // Note: don't forget to update the allow-list in Server.CodeAction @@ -810,6 +813,82 @@ func refactorRewriteFillSwitch(ctx context.Context, req *codeActionsRequest) err return nil } +// selectionContainsStructField returns true if the given struct contains a +// field between start and end pos. If needsTag is true, it only returns true if +// the struct field found contains a struct tag. +func selectionContainsStructField(node *ast.StructType, start, end token.Pos, needsTag bool) bool { + for _, field := range node.Fields.List { + if start <= field.End() && end >= field.Pos() { + if !needsTag || field.Tag != nil { + return true + } + } + } + return false +} + +// selectionContainsStruct returns true if there exists a struct containing +// fields within start and end positions. If removeTags is true, it means the +// current command is for remove tags rather than add tags, so we only return +// true if the struct field found contains a struct tag to remove. +func selectionContainsStruct(cursor cursor.Cursor, start, end token.Pos, removeTags bool) bool { + cur, ok := cursor.FindByPos(start, end) + if !ok { + return false + } + if _, ok := cur.Node().(*ast.StructType); ok { + return true + } + + // Handles case where selection is within struct. + for c := range cur.Enclosing((*ast.StructType)(nil)) { + if selectionContainsStructField(c.Node().(*ast.StructType), start, end, removeTags) { + return true + } + } + + // Handles case where selection contains struct but may contain other nodes, including other structs. + for c := range cur.Preorder((*ast.StructType)(nil)) { + node := c.Node().(*ast.StructType) + // Check that at least one field is located within the selection. If we are removing tags, that field + // must also have a struct tag, otherwise we do not provide the code action. + if selectionContainsStructField(node, start, end, removeTags) { + return true + } + } + return false +} + +// refactorRewriteAddStructTags produces "Add struct tags" code actions. +// See [server.commandHandler.ModifyTags] for command implementation. +func refactorRewriteAddStructTags(ctx context.Context, req *codeActionsRequest) error { + if selectionContainsStruct(req.pgf.Cursor, req.start, req.end, false) { + // TODO(mkalil): Prompt user for modification args once we have dialogue capabilities. + cmdAdd := command.NewModifyTagsCommand("Add struct tags", command.ModifyTagsArgs{ + URI: req.loc.URI, + Range: req.loc.Range, + Add: "json", + }) + req.addCommandAction(cmdAdd, false) + } + return nil +} + +// refactorRewriteRemoveStructTags produces "Remove struct tags" code actions. +// See [server.commandHandler.ModifyTags] for command implementation. +func refactorRewriteRemoveStructTags(ctx context.Context, req *codeActionsRequest) error { + // TODO(mkalil): Prompt user for modification args once we have dialogue capabilities. + if selectionContainsStruct(req.pgf.Cursor, req.start, req.end, true) { + cmdRemove := command.NewModifyTagsCommand("Remove struct tags", command.ModifyTagsArgs{ + URI: req.loc.URI, + Range: req.loc.Range, + Clear: true, + }) + req.addCommandAction(cmdRemove, false) + } + return nil +} + // removableParameter returns paramInfo about a removable parameter indicated // by the given [start, end) range, or nil if no such removal is available. // diff --git a/gopls/internal/licenses/gen-licenses.sh b/gopls/internal/licenses/gen-licenses.sh index a39f87ce845..b615e566324 100755 --- a/gopls/internal/licenses/gen-licenses.sh +++ b/gopls/internal/licenses/gen-licenses.sh @@ -25,9 +25,9 @@ END # are known to have the same license. mods=$(go list -deps -f '{{with .Module}}{{.Path}}{{end}}' golang.org/x/tools/gopls | sort -u | grep -v golang.org) for mod in $mods; do - # Find the license file, either LICENSE or COPYING, and add it to the result. + # Find the license file, either LICENSE, COPYING, or LICENSE.md and add it to the result. dir=$(go list -m -f {{.Dir}} $mod) - license=$(ls -1 $dir | grep -E -i '^(LICENSE|COPYING)$') + license=$(ls -1 $dir | grep -E -i '^(LICENSE|LICENSE.md|COPYING)?$') echo "-- $mod $license --" >> $tempfile echo >> $tempfile sed 's/^-- / &/' $dir/$license >> $tempfile diff --git a/gopls/internal/licenses/licenses.go b/gopls/internal/licenses/licenses.go index e8c5ba9c691..ee73aba2e41 100644 --- a/gopls/internal/licenses/licenses.go +++ b/gopls/internal/licenses/licenses.go @@ -30,6 +30,122 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +-- github.com/fatih/camelcase LICENSE.md -- + +The MIT License (MIT) + +Copyright (c) 2015 Fatih Arslan + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +-- github.com/fatih/gomodifytags LICENSE -- + +Copyright (c) 2017, Fatih Arslan +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of gomodifytags nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-- github.com/fatih/structtag LICENSE -- + +Copyright (c) 2017, Fatih Arslan +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of structtag nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +This software includes some portions from Go. Go is used under the terms of the +BSD like license. + +Copyright (c) 2012 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +The Go gopher was designed by Renee French. http://reneefrench.blogspot.com/ The design is licensed under the Creative Commons 3.0 Attributions license. Read this article for more details: https://blog.golang.org/gopher + -- github.com/google/go-cmp LICENSE -- Copyright (c) 2017 The Go Authors. All rights reserved. diff --git a/gopls/internal/protocol/command/command_gen.go b/gopls/internal/protocol/command/command_gen.go index c9b18a40cb8..b6c12e4b50c 100644 --- a/gopls/internal/protocol/command/command_gen.go +++ b/gopls/internal/protocol/command/command_gen.go @@ -46,6 +46,7 @@ const ( ListKnownPackages Command = "gopls.list_known_packages" MaybePromptForTelemetry Command = "gopls.maybe_prompt_for_telemetry" MemStats Command = "gopls.mem_stats" + ModifyTags Command = "gopls.modify_tags" Modules Command = "gopls.modules" PackageSymbols Command = "gopls.package_symbols" Packages Command = "gopls.packages" @@ -91,6 +92,7 @@ var Commands = []Command{ ListKnownPackages, MaybePromptForTelemetry, MemStats, + ModifyTags, Modules, PackageSymbols, Packages, @@ -242,6 +244,12 @@ func Dispatch(ctx context.Context, params *protocol.ExecuteCommandParams, s Inte return nil, s.MaybePromptForTelemetry(ctx) case MemStats: return s.MemStats(ctx) + case ModifyTags: + var a0 ModifyTagsArgs + if err := UnmarshalArgs(params.Arguments, &a0); err != nil { + return nil, err + } + return nil, s.ModifyTags(ctx, a0) case Modules: var a0 ModulesArgs if err := UnmarshalArgs(params.Arguments, &a0); err != nil { @@ -530,6 +538,14 @@ func NewMemStatsCommand(title string) *protocol.Command { } } +func NewModifyTagsCommand(title string, a0 ModifyTagsArgs) *protocol.Command { + return &protocol.Command{ + Title: title, + Command: ModifyTags.String(), + Arguments: MustMarshalArgs(a0), + } +} + func NewModulesCommand(title string, a0 ModulesArgs) *protocol.Command { return &protocol.Command{ Title: title, diff --git a/gopls/internal/protocol/command/interface.go b/gopls/internal/protocol/command/interface.go index 34adf59b38e..01d41dec473 100644 --- a/gopls/internal/protocol/command/interface.go +++ b/gopls/internal/protocol/command/interface.go @@ -297,6 +297,9 @@ type Interface interface { // PackageSymbols: Return information about symbols in the given file's package. PackageSymbols(context.Context, PackageSymbolsArgs) (PackageSymbolsResult, error) + + // ModifyTags: Add or remove struct tags on a given node. + ModifyTags(context.Context, ModifyTagsArgs) error } type RunTestsArgs struct { @@ -830,3 +833,19 @@ type PackageSymbol struct { // Index of this symbol's file in PackageSymbolsResult.Files File int `json:"file,omitempty"` } + +// ModifyTagsArgs holds variables that determine how struct tags are modified. +type ModifyTagsArgs struct { + URI protocol.DocumentURI // uri of the file to be modified + Range protocol.Range // range in the file for where to modify struct tags + Add string // comma-separated list of tags to add; i.e. "json,xml" + AddOptions string // comma-separated list of options to add, per tag; i.e. "json=omitempty" + Remove string // comma-separated list of tags to remove + RemoveOptions string // comma-separated list of options to remove + Clear bool // if set, clear all tags. tags are cleared before any new tags are added + ClearOptions bool // if set, clear all tag options; options are cleared before any new options are added + Overwrite bool // if set, replace existing tags when adding + SkipUnexportedFields bool // if set, do not modify tags on unexported struct fields + Transform string // transform rule for adding tags; i.e. "snakecase" + ValueFormat string // format for the tag's value, after transformation; for example "column:{field}" +} diff --git a/gopls/internal/server/command.go b/gopls/internal/server/command.go index ca8177530e5..3711e42549b 100644 --- a/gopls/internal/server/command.go +++ b/gopls/internal/server/command.go @@ -10,6 +10,9 @@ import ( "encoding/json" "errors" "fmt" + "go/ast" + "go/format" + "go/token" "io" "log" "os" @@ -22,6 +25,7 @@ import ( "strings" "sync" + "github.com/fatih/gomodifytags/modifytags" "golang.org/x/mod/modfile" "golang.org/x/telemetry/counter" "golang.org/x/tools/go/ast/astutil" @@ -38,6 +42,8 @@ import ( "golang.org/x/tools/gopls/internal/util/bug" "golang.org/x/tools/gopls/internal/vulncheck" "golang.org/x/tools/gopls/internal/vulncheck/scan" + internalastutil "golang.org/x/tools/internal/astutil" + "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/diff" "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/gocommand" @@ -1764,3 +1770,136 @@ func (c *commandHandler) PackageSymbols(ctx context.Context, args command.Packag return result, err } + +// optionsStringToMap transforms comma-separated options of the form +// "foo=bar,baz=quux" to a go map. Returns nil if any options are malformed. +func optionsStringToMap(options string) (map[string][]string, error) { + optionsMap := make(map[string][]string) + for item := range strings.SplitSeq(options, ",") { + key, option, found := strings.Cut(item, "=") + if !found { + return nil, fmt.Errorf("invalid option %q", item) + } + optionsMap[key] = append(optionsMap[key], option) + } + return optionsMap, nil +} + +func (c *commandHandler) ModifyTags(ctx context.Context, args command.ModifyTagsArgs) error { + return c.run(ctx, commandConfig{ + progress: "Modifying tags", + forURI: args.URI, + }, func(ctx context.Context, deps commandDeps) error { + m := &modifytags.Modification{ + Clear: args.Clear, + ClearOptions: args.ClearOptions, + ValueFormat: args.ValueFormat, + Overwrite: args.Overwrite, + } + + transform, err := parseTransform(args.Transform) + if err != nil { + return err + } + m.Transform = transform + + if args.Add != "" { + m.Add = strings.Split(args.Add, ",") + } + if args.AddOptions != "" { + if options, err := optionsStringToMap(args.AddOptions); err != nil { + return err + } else { + m.AddOptions = options + } + } + if args.Remove != "" { + m.Remove = strings.Split(args.Remove, ",") + } + if args.RemoveOptions != "" { + if options, err := optionsStringToMap(args.RemoveOptions); err != nil { + return err + } else { + m.RemoveOptions = options + } + } + fh, err := deps.snapshot.ReadFile(ctx, args.URI) + if err != nil { + return err + } + pgf, err := deps.snapshot.ParseGo(ctx, fh, parsego.Full) + if err != nil { + return fmt.Errorf("error fetching package file: %v", err) + } + start, end, err := pgf.RangePos(args.Range) + if err != nil { + return fmt.Errorf("error getting position information: %v", err) + } + // If the cursor is at a point and not a selection, we should use the entire enclosing struct. + if start == end { + cur, ok := pgf.Cursor.FindByPos(start, end) + if !ok { + return fmt.Errorf("error finding start and end positions: %v", err) + } + start, end, err = findEnclosingStruct(cur) + if err != nil { + return fmt.Errorf("error finding enclosing struct: %v", err) + } + } + + // Create a copy of the file node in order to avoid race conditions when we modify the node in Apply. + cloned := internalastutil.CloneNode(pgf.File) + fset := tokeninternal.FileSetFor(pgf.Tok) + + if err = m.Apply(fset, cloned, start, end); err != nil { + return fmt.Errorf("could not modify tags: %v", err) + } + + // Construct a list of DocumentChanges based on the diff between the formatted node and the + // original file content. + var after bytes.Buffer + if err := format.Node(&after, fset, cloned); err != nil { + return err + } + edits := diff.Bytes(pgf.Src, after.Bytes()) + if len(edits) == 0 { + return nil + } + textedits, err := protocol.EditsFromDiffEdits(pgf.Mapper, edits) + if err != nil { + return fmt.Errorf("error computing edits for %s: %v", args.URI, err) + } + return applyChanges(ctx, c.s.client, []protocol.DocumentChange{ + protocol.DocumentChangeEdit(fh, textedits), + }) + }) +} + +// Finds the start and end positions of the enclosing struct or returns an error if none is found. +func findEnclosingStruct(c cursor.Cursor) (token.Pos, token.Pos, error) { + for cur := range c.Enclosing((*ast.StructType)(nil)) { + return cur.Node().Pos(), cur.Node().End(), nil + } + return token.NoPos, token.NoPos, fmt.Errorf("no struct enclosing the given positions") +} + +func parseTransform(input string) (modifytags.Transform, error) { + switch input { + case "camelcase": + return modifytags.CamelCase, nil + case "lispcase": + return modifytags.LispCase, nil + case "pascalcase": + return modifytags.PascalCase, nil + case "titlecase": + return modifytags.TitleCase, nil + case "keep": + return modifytags.Keep, nil + case "": + fallthrough + case "snakecase": + return modifytags.SnakeCase, nil + default: + return modifytags.SnakeCase, fmt.Errorf("invalid Transform value") + } +} diff --git a/gopls/internal/settings/codeactionkind.go b/gopls/internal/settings/codeactionkind.go index f6f8a4df2a4..ebe9606adab 100644 --- a/gopls/internal/settings/codeactionkind.go +++ b/gopls/internal/settings/codeactionkind.go @@ -97,6 +97,8 @@ const ( RefactorRewriteMoveParamRight protocol.CodeActionKind = "refactor.rewrite.moveParamRight" RefactorRewriteSplitLines protocol.CodeActionKind = "refactor.rewrite.splitLines" RefactorRewriteEliminateDotImport protocol.CodeActionKind = "refactor.rewrite.eliminateDotImport" + RefactorRewriteAddTags protocol.CodeActionKind = "refactor.rewrite.addTags" + RefactorRewriteRemoveTags protocol.CodeActionKind = "refactor.rewrite.removeTags" // refactor.inline RefactorInlineCall protocol.CodeActionKind = "refactor.inline.call" diff --git a/gopls/internal/test/integration/misc/modify_tags_test.go b/gopls/internal/test/integration/misc/modify_tags_test.go new file mode 100644 index 00000000000..48b5f772ffb --- /dev/null +++ b/gopls/internal/test/integration/misc/modify_tags_test.go @@ -0,0 +1,159 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package misc + +import ( + "testing" + + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/gopls/internal/test/compare" + "golang.org/x/tools/gopls/internal/test/integration" +) + +func TestModifyTags(t *testing.T) { + const files = ` +-- go.mod -- +module example.com + +go 1.20 + +-- a.go -- +package a + +type A struct { + B string + C int + D bool + E string +} + +-- b.go -- +package b + +type B struct { + B string ` + "`json:\"b,omitempty\"`" + ` + C int ` + "`json:\"c,omitempty\"`" + ` + D bool ` + "`json:\"d,omitempty\"`" + ` + E string ` + "`json:\"e,omitempty\"`" + ` +} + +-- c.go -- +package c + +type C struct { + B string + C int + D bool ` + "`json:\"d,omitempty\"`" + ` + E string +} +` + + const wantAddTagsEntireStruct = `package a + +type A struct { + B string ` + "`json:\"b,omitempty\"`" + ` + C int ` + "`json:\"c,omitempty\"`" + ` + D bool ` + "`json:\"d,omitempty\"`" + ` + E string ` + "`json:\"e,omitempty\"`" + ` +} +` + + const wantRemoveTags = `package b + +type B struct { + B string + C int + D bool ` + "`json:\"d,omitempty\"`" + ` + E string ` + "`json:\"e,omitempty\"`" + ` +} +` + + const wantAddTagsSingleLine = `package a + +type A struct { + B string + C int + D bool ` + "`json:\"d,omitempty\"`" + ` + E string +} +` + + const wantRemoveOptions = `package c + +type C struct { + B string + C int + D bool ` + "`json:\"d\"`" + ` + E string +} +` + + tests := []struct { + file string + args command.ModifyTagsArgs + want string + }{ + {file: "a.go", args: command.ModifyTagsArgs{ + Range: protocol.Range{ + Start: protocol.Position{Line: 2, Character: 0}, + End: protocol.Position{Line: 8, Character: 0}, + }, + Add: "json", + AddOptions: "json=omitempty", + }, want: wantAddTagsEntireStruct}, + {file: "b.go", args: command.ModifyTagsArgs{ + Range: protocol.Range{ + Start: protocol.Position{Line: 3, Character: 2}, + End: protocol.Position{Line: 4, Character: 6}, + }, + Remove: "json", + }, want: wantRemoveTags}, + {file: "a.go", args: command.ModifyTagsArgs{ + Range: protocol.Range{ + Start: protocol.Position{Line: 5, Character: 0}, + End: protocol.Position{Line: 5, Character: 7}, + }, + Add: "json", + AddOptions: "json=omitempty", + }, want: wantAddTagsSingleLine}, + {file: "c.go", args: command.ModifyTagsArgs{ + Range: protocol.Range{ + Start: protocol.Position{Line: 3, Character: 0}, + End: protocol.Position{Line: 7, Character: 0}, + }, + RemoveOptions: "json=omitempty", + }, want: wantRemoveOptions}, + } + + for _, test := range tests { + integration.Run(t, files, func(t *testing.T, env *integration.Env) { + uri := env.Sandbox.Workdir.URI(test.file) + args, err := command.MarshalArgs( + command.ModifyTagsArgs{ + URI: uri, + Range: test.args.Range, + Add: test.args.Add, + AddOptions: test.args.AddOptions, + Remove: test.args.Remove, + RemoveOptions: test.args.RemoveOptions, + }, + ) + if err != nil { + t.Fatal(err) + } + var res any + env.ExecuteCommand(&protocol.ExecuteCommandParams{ + Command: command.ModifyTags.String(), + Arguments: args, + }, &res) + // Wait until we finish writing to the file. + env.AfterChange() + if got := env.BufferText(test.file); got != test.want { + t.Errorf("modify_tags returned unexpected diff (-want +got):\n%s", compare.Text(test.want, got)) + } + }) + } +} diff --git a/gopls/internal/test/marker/testdata/codeaction/add_struct_tags.txt b/gopls/internal/test/marker/testdata/codeaction/add_struct_tags.txt new file mode 100644 index 00000000000..f517fd92f54 --- /dev/null +++ b/gopls/internal/test/marker/testdata/codeaction/add_struct_tags.txt @@ -0,0 +1,31 @@ +This test checks the behavior of the 'Add struct tags' code action. + +-- flags -- +-ignore_extra_diags + +-- addtags.go -- +package addtags + +type A struct { + x int //@codeaction("x", "refactor.rewrite.addTags", edit=singleline) + y int //@codeaction(re`(?s)y.*.z int`, "refactor.rewrite.addTags", edit=twolines) + z int //@codeaction(re`()n`, "refactor.rewrite.addTags", edit=entirestruct) +} +-- @entirestruct/addtags.go -- +@@ -4,3 +4,3 @@ +- x int //@codeaction("x", "refactor.rewrite.addTags", edit=singleline) +- y int //@codeaction(re`(?s)y.*.z int`, "refactor.rewrite.addTags", edit=twolines) +- z int //@codeaction(re`()n`, "refactor.rewrite.addTags", edit=entirestruct) ++ x int `json:"x"` //@codeaction("x", "refactor.rewrite.addTags", edit=singleline) ++ y int `json:"y"` //@codeaction(re`(?s)y.*.z int`, "refactor.rewrite.addTags", edit=twolines) ++ z int `json:"z"` //@codeaction(re`()n`, "refactor.rewrite.addTags", edit=entirestruct) +-- @singleline/addtags.go -- +@@ -4 +4 @@ +- x int //@codeaction("x", "refactor.rewrite.addTags", edit=singleline) ++ x int `json:"x"` //@codeaction("x", "refactor.rewrite.addTags", edit=singleline) +-- @twolines/addtags.go -- +@@ -5,2 +5,2 @@ +- y int //@codeaction(re`(?s)y.*.z int`, "refactor.rewrite.addTags", edit=twolines) +- z int //@codeaction(re`()n`, "refactor.rewrite.addTags", edit=entirestruct) ++ y int `json:"y"` //@codeaction(re`(?s)y.*.z int`, "refactor.rewrite.addTags", edit=twolines) ++ z int `json:"z"` //@codeaction(re`()n`, "refactor.rewrite.addTags", edit=entirestruct) diff --git a/gopls/internal/test/marker/testdata/codeaction/remove_struct_tags.txt b/gopls/internal/test/marker/testdata/codeaction/remove_struct_tags.txt new file mode 100644 index 00000000000..cfd802370cf --- /dev/null +++ b/gopls/internal/test/marker/testdata/codeaction/remove_struct_tags.txt @@ -0,0 +1,31 @@ +This test checks the behavior of the 'Remove struct tags' code action. + +-- flags -- +-ignore_extra_diags + +-- removetags.go -- +package removetags + +type A struct { + x int `json:"x"` //@codeaction("x", "refactor.rewrite.removeTags", edit=singleline) + y int `json:"y"` //@codeaction(re`(?s)y.*.z int`, "refactor.rewrite.removeTags", edit=twolines) + z int `json:"z"` //@codeaction(re`()n`, "refactor.rewrite.removeTags", edit=entirestruct) +} +-- @entirestruct/removetags.go -- +@@ -4,3 +4,3 @@ +- x int `json:"x"` //@codeaction("x", "refactor.rewrite.removeTags", edit=singleline) +- y int `json:"y"` //@codeaction(re`(?s)y.*.z int`, "refactor.rewrite.removeTags", edit=twolines) +- z int `json:"z"` //@codeaction(re`()n`, "refactor.rewrite.removeTags", edit=entirestruct) ++ x int //@codeaction("x", "refactor.rewrite.removeTags", edit=singleline) ++ y int //@codeaction(re`(?s)y.*.z int`, "refactor.rewrite.removeTags", edit=twolines) ++ z int //@codeaction(re`()n`, "refactor.rewrite.removeTags", edit=entirestruct) +-- @singleline/removetags.go -- +@@ -4 +4 @@ +- x int `json:"x"` //@codeaction("x", "refactor.rewrite.removeTags", edit=singleline) ++ x int //@codeaction("x", "refactor.rewrite.removeTags", edit=singleline) +-- @twolines/removetags.go -- +@@ -5,2 +5,2 @@ +- y int `json:"y"` //@codeaction(re`(?s)y.*.z int`, "refactor.rewrite.removeTags", edit=twolines) +- z int `json:"z"` //@codeaction(re`()n`, "refactor.rewrite.removeTags", edit=entirestruct) ++ y int //@codeaction(re`(?s)y.*.z int`, "refactor.rewrite.removeTags", edit=twolines) ++ z int //@codeaction(re`()n`, "refactor.rewrite.removeTags", edit=entirestruct) From 46e932f4088f37fbb96e945d56833ae685ea5f05 Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Sun, 13 Apr 2025 17:30:59 +0800 Subject: [PATCH 232/270] internal/analysisinternal: add std import at the start of the import group This CL ensures that standard library imports are added at the start of the import group rather than at the end. In projects with third-party dependencies (such as gopls), imports are usually organized in the following order: standard library imports, imports from the module itself, and third-party imports. Previously, when analysisinternal.AddImport was called to add a new standard library package (e.g., "slices") to a file that already imports both standard library and third-party modules, the newly added "slices" import would inappropriately be placed among the third-party imports. This violates widely accepted conventions, and gofmt does not automatically fix such cases. This CL solves the problem when running 'modernize -fix ./...' under gopls, we need to manually move the new added std package to put std imports together. Change-Id: I714c92c205542358dcdd1a6ed6db171be9c5fb56 Reviewed-on: https://go-review.googlesource.com/c/tools/+/664436 LUCI-TryBot-Result: Go LUCI Commit-Queue: Alan Donovan Auto-Submit: Alan Donovan Reviewed-by: Alan Donovan Reviewed-by: Junyang Shao --- .../internal/analysis/modernize/modernize.go | 14 +--- internal/analysisinternal/addimport_test.go | 71 +++++++++++++++++++ internal/analysisinternal/analysis.go | 22 +++++- 3 files changed, 93 insertions(+), 14 deletions(-) diff --git a/gopls/internal/analysis/modernize/modernize.go b/gopls/internal/analysis/modernize/modernize.go index b7e943a0c51..44992c3aa14 100644 --- a/gopls/internal/analysis/modernize/modernize.go +++ b/gopls/internal/analysis/modernize/modernize.go @@ -170,22 +170,10 @@ func enclosingFile(c cursor.Cursor) *ast.File { // specified standard packages or their dependencies. func within(pass *analysis.Pass, pkgs ...string) bool { path := pass.Pkg.Path() - return standard(path) && + return analysisinternal.IsStdPackage(path) && moreiters.Contains(stdlib.Dependencies(pkgs...), path) } -// standard reports whether the specified package path belongs to a -// package in the standard library (including internal dependencies). -func standard(path string) bool { - // A standard package has no dot in its first segment. - // (It may yet have a dot, e.g. "vendor/golang.org/x/foo".) - slash := strings.IndexByte(path, '/') - if slash < 0 { - slash = len(path) - } - return !strings.Contains(path[:slash], ".") && path != "testdata" -} - var ( builtinAny = types.Universe.Lookup("any") builtinAppend = types.Universe.Lookup("append") diff --git a/internal/analysisinternal/addimport_test.go b/internal/analysisinternal/addimport_test.go index da7c7f90114..50bb5333525 100644 --- a/internal/analysisinternal/addimport_test.go +++ b/internal/analysisinternal/addimport_test.go @@ -238,10 +238,59 @@ func _(io.Reader) { want: `package a import ( + "fmt" "io" +) + +func _(io.Reader) { + fmt +}`, + }, + { + descr: descr("add import to group which imports std and a 3rd module"), + src: `package a + +import ( + "io" + + "vendor/golang.org/x/net/dns/dnsmessage" +) + +func _(io.Reader) { + «fmt fmt» +}`, + want: `package a + +import ( "fmt" + "io" + + "vendor/golang.org/x/net/dns/dnsmessage" ) +func _(io.Reader) { + fmt +}`, + }, + { + descr: descr("add import to group which imports std and a 3rd module without parens"), + src: `package a + +import "io" + +import "vendor/golang.org/x/net/dns/dnsmessage" + +func _(io.Reader) { + «fmt fmt» +}`, + want: `package a + +import "fmt" + +import "io" + +import "vendor/golang.org/x/net/dns/dnsmessage" + func _(io.Reader) { fmt }`, @@ -315,3 +364,25 @@ func _(io.Reader) { }) } } + +func TestIsStdPackage(t *testing.T) { + testCases := []struct { + pkgpath string + isStd bool + }{ + {pkgpath: "os", isStd: true}, + {pkgpath: "net/http", isStd: true}, + {pkgpath: "vendor/golang.org/x/net/dns/dnsmessage", isStd: true}, + {pkgpath: "golang.org/x/net/dns/dnsmessage", isStd: false}, + {pkgpath: "testdata", isStd: false}, + } + + for _, tc := range testCases { + t.Run(tc.pkgpath, func(t *testing.T) { + got := analysisinternal.IsStdPackage(tc.pkgpath) + if got != tc.isStd { + t.Fatalf("got %t want %t", got, tc.isStd) + } + }) + } +} diff --git a/internal/analysisinternal/analysis.go b/internal/analysisinternal/analysis.go index cd2595a3dd1..20115350973 100644 --- a/internal/analysisinternal/analysis.go +++ b/internal/analysisinternal/analysis.go @@ -280,7 +280,15 @@ func AddImport(info *types.Info, file *ast.File, preferredName, pkgpath, member // If the first decl is an import group, add this new import at the end. if gd, ok := before.(*ast.GenDecl); ok && gd.Tok == token.IMPORT && gd.Rparen.IsValid() { pos = gd.Rparen - newText = "\t" + newText + "\n" + // if it's a std lib, we should append it at the beginning of import group. + // otherwise we may see the std package is put at the last behind a 3rd module which doesn't follow our convention. + // besides, gofmt doesn't help in this case. + if IsStdPackage(pkgpath) && len(gd.Specs) != 0 { + pos = gd.Specs[0].Pos() + newText += "\n\t" + } else { + newText = "\t" + newText + "\n" + } } else { pos = before.Pos() newText = "import " + newText + "\n\n" @@ -637,3 +645,15 @@ func Comments(file *ast.File, start, end token.Pos) iter.Seq[*ast.Comment] { } } } + +// IsStdPackage reports whether the specified package path belongs to a +// package in the standard library (including internal dependencies). +func IsStdPackage(path string) bool { + // A standard package has no dot in its first segment. + // (It may yet have a dot, e.g. "vendor/golang.org/x/foo".) + slash := strings.IndexByte(path, '/') + if slash < 0 { + slash = len(path) + } + return !strings.Contains(path[:slash], ".") && path != "testdata" +} From f11abcb12b4715f4b4ee15d2192a29c646ef2857 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Fri, 18 Apr 2025 14:02:50 -0400 Subject: [PATCH 233/270] internal/analysisinternal: permit Fix.End slightly beyond EOF This CL fixes by far the most prolific source of gopls telemetry field reports: the unreachable analyzer often reports diagnostics at the end of a truncated file; because of problems with AST the fix's end position may be beyond EOF, causing ValidateFixes to fail, and gopls to report a bug. The mitigation is to make validation tolerant of fixes whose end is just slightly beyond EOF, and to clamp them to EOF. + regression test Fixes golang/go#71659 Fixes golang/go#71812 Change-Id: Ib68a38d0797c9274eaf3d59fabd45cc120f1dd36 Reviewed-on: https://go-review.googlesource.com/c/tools/+/666675 Auto-Submit: Alan Donovan Commit-Queue: Alan Donovan LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley --- go/analysis/passes/unreachable/unreachable.go | 3 +++ gopls/internal/cache/analysis.go | 4 ++++ .../marker/testdata/diagnostics/issue71812.txt | 17 +++++++++++++++++ internal/analysisinternal/analysis.go | 16 +++++++++++++++- 4 files changed, 39 insertions(+), 1 deletion(-) create mode 100644 gopls/internal/test/marker/testdata/diagnostics/issue71812.txt diff --git a/go/analysis/passes/unreachable/unreachable.go b/go/analysis/passes/unreachable/unreachable.go index fcf5fbd9060..317f034992b 100644 --- a/go/analysis/passes/unreachable/unreachable.go +++ b/go/analysis/passes/unreachable/unreachable.go @@ -188,6 +188,9 @@ func (d *deadState) findDead(stmt ast.Stmt) { case *ast.EmptyStmt: // do not warn about unreachable empty statements default: + // (This call to pass.Report is a frequent source + // of diagnostics beyond EOF in a truncated file; + // see #71659.) d.pass.Report(analysis.Diagnostic{ Pos: stmt.Pos(), End: stmt.End(), diff --git a/gopls/internal/cache/analysis.go b/gopls/internal/cache/analysis.go index 7bf9464adb1..747fac913ce 100644 --- a/gopls/internal/cache/analysis.go +++ b/gopls/internal/cache/analysis.go @@ -1122,6 +1122,10 @@ func (act *action) exec(ctx context.Context) (any, *actionSummary, error) { ResultOf: inputs, Report: func(d analysis.Diagnostic) { // Assert that SuggestedFixes are well formed. + // + // ValidateFixes allows a fix.End to be slightly beyond + // EOF to avoid spurious assertions when reporting + // fixes as the end of truncated files; see #71659. if err := analysisinternal.ValidateFixes(apkg.pkg.FileSet(), analyzer, d.SuggestedFixes); err != nil { bug.Reportf("invalid SuggestedFixes: %v", err) d.SuggestedFixes = nil diff --git a/gopls/internal/test/marker/testdata/diagnostics/issue71812.txt b/gopls/internal/test/marker/testdata/diagnostics/issue71812.txt new file mode 100644 index 00000000000..79487d3b148 --- /dev/null +++ b/gopls/internal/test/marker/testdata/diagnostics/issue71812.txt @@ -0,0 +1,17 @@ +This input causes the unreachable analyzer to report a diagnostic +about the var decl statement. Since the computed End pos of +ast.StructType is beyond EOF, validation of SuggestedFixes fails. +This used to trigger an assertion in gopls' analysis driver. + +See golang/go#71659 (and also #71812). + +-- flags -- +-ignore_extra_diags + +-- go.mod -- +module example.com +go 1.18 + +-- a/a.go -- +package a +func _() { return; var x struct{ diff --git a/internal/analysisinternal/analysis.go b/internal/analysisinternal/analysis.go index 20115350973..c4d10de3e91 100644 --- a/internal/analysisinternal/analysis.go +++ b/internal/analysisinternal/analysis.go @@ -439,11 +439,25 @@ func validateFix(fset *token.FileSet, fix *analysis.SuggestedFix) error { if file == nil { return fmt.Errorf("no token.File for TextEdit.Pos (%v)", edit.Pos) } + fileEnd := token.Pos(file.Base() + file.Size()) if end := edit.End; end.IsValid() { if end < start { return fmt.Errorf("TextEdit.Pos (%v) > TextEdit.End (%v)", edit.Pos, edit.End) } endFile := fset.File(end) + if endFile != file && end < fileEnd+10 { + // Relax the checks below in the special case when the end position + // is only slightly beyond EOF, as happens when End is computed + // (as in ast.{Struct,Interface}Type) rather than based on + // actual token positions. In such cases, truncate end to EOF. + // + // This is a workaround for #71659; see: + // https://github.com/golang/go/issues/71659#issuecomment-2651606031 + // A better fix would be more faithful recording of token + // positions (or their absence) in the AST. + edit.End = fileEnd + continue + } if endFile == nil { return fmt.Errorf("no token.File for TextEdit.End (%v; File(start).FileEnd is %d)", end, file.Base()+file.Size()) } @@ -454,7 +468,7 @@ func validateFix(fset *token.FileSet, fix *analysis.SuggestedFix) error { } else { edit.End = start // update the SuggestedFix } - if eof := token.Pos(file.Base() + file.Size()); edit.End > eof { + if eof := fileEnd; edit.End > eof { return fmt.Errorf("end is (%v) beyond end of file (%v)", edit.End, eof) } From 68e94bd1775e560aee2292caf27576ecd36828b3 Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Wed, 23 Apr 2025 14:38:16 -0400 Subject: [PATCH 234/270] x/tools: use 2-arg form of //go:linkname Using just "//go:linkname x" seems to offend the GOROOT build when x/tools is vendored. Use "//go:linkname x pkg.x" in all cases. Change-Id: Ida1d358d7ea730c817c0ca29af203b7a285a2d15 Reviewed-on: https://go-review.googlesource.com/c/tools/+/667577 Auto-Submit: Alan Donovan Reviewed-by: Jonathan Amsterdam LUCI-TryBot-Result: Go LUCI --- go/ast/inspector/inspector.go | 6 +++--- go/ast/inspector/typeof.go | 2 +- go/types/typeutil/callee.go | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/go/ast/inspector/inspector.go b/go/ast/inspector/inspector.go index 1da4a361f0b..674490a65b4 100644 --- a/go/ast/inspector/inspector.go +++ b/go/ast/inspector/inspector.go @@ -48,10 +48,10 @@ type Inspector struct { events []event } -//go:linkname events +//go:linkname events golang.org/x/tools/go/ast/inspector.events func events(in *Inspector) []event { return in.events } -//go:linkname packEdgeKindAndIndex +//go:linkname packEdgeKindAndIndex golang.org/x/tools/go/ast/inspector.packEdgeKindAndIndex func packEdgeKindAndIndex(ek edge.Kind, index int) int32 { return int32(uint32(index+1)<<7 | uint32(ek)) } @@ -59,7 +59,7 @@ func packEdgeKindAndIndex(ek edge.Kind, index int) int32 { // unpackEdgeKindAndIndex unpacks the edge kind and edge index (within // an []ast.Node slice) from the parent field of a pop event. // -//go:linkname unpackEdgeKindAndIndex +//go:linkname unpackEdgeKindAndIndex golang.org/x/tools/go/ast/inspector.unpackEdgeKindAndIndex func unpackEdgeKindAndIndex(x int32) (edge.Kind, int) { // The "parent" field of a pop node holds the // edge Kind in the lower 7 bits and the index+1 diff --git a/go/ast/inspector/typeof.go b/go/ast/inspector/typeof.go index 97784484578..e936c67c985 100644 --- a/go/ast/inspector/typeof.go +++ b/go/ast/inspector/typeof.go @@ -217,7 +217,7 @@ func typeOf(n ast.Node) uint64 { return 0 } -//go:linkname maskOf +//go:linkname maskOf golang.org/x/tools/go/ast/inspector.maskOf func maskOf(nodes []ast.Node) uint64 { if len(nodes) == 0 { return math.MaxUint64 // match all node types diff --git a/go/types/typeutil/callee.go b/go/types/typeutil/callee.go index 53b71339305..5f10f56cbaf 100644 --- a/go/types/typeutil/callee.go +++ b/go/types/typeutil/callee.go @@ -48,7 +48,7 @@ func StaticCallee(info *types.Info, call *ast.CallExpr) *types.Func { // This function should live in typesinternal, but cannot because it would // create an import cycle. // -//go:linkname usedIdent +//go:linkname usedIdent golang.org/x/tools/go/types/typeutil.usedIdent func usedIdent(info *types.Info, e ast.Expr) *ast.Ident { if info.Types == nil || info.Uses == nil { panic("one of info.Types or info.Uses is nil; both must be populated") @@ -78,7 +78,7 @@ func usedIdent(info *types.Info, e ast.Expr) *ast.Ident { // interfaceMethod reports whether its argument is a method of an interface. // This function should live in typesinternal, but cannot because it would create an import cycle. // -//go:linkname interfaceMethod +//go:linkname interfaceMethod golang.org/x/tools/go/types/typeutil.interfaceMethod func interfaceMethod(f *types.Func) bool { recv := f.Signature().Recv() return recv != nil && types.IsInterface(recv.Type()) From caf7cdc866b9560a8e3eb62f0e81c8903b9f7e68 Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Tue, 22 Apr 2025 13:56:51 +0000 Subject: [PATCH 235/270] internal/mcp: implement HTTP SSE transport Add support for SSE transport, as defined by version 2024-11-05 and 2025-03-26 of the MCP spec. In order to achieve this, the jsonrpc2 stream must be abstracted so that we can implement it across multiple HTTP requests. This required some refactoring of the jsonrpc2_v2 package, so that we can avoid the Dialer/Binder interfaces and construct the Connection directly from its constituent primitives. Change-Id: Id627b5b5a474d92f2c613cfad904d14611e7c768 Reviewed-on: https://go-review.googlesource.com/c/tools/+/667475 LUCI-TryBot-Result: Go LUCI Reviewed-by: Jonathan Amsterdam --- internal/jsonrpc2_v2/conn.go | 40 ++- internal/jsonrpc2_v2/serve.go | 4 +- internal/mcp/client.go | 2 +- internal/mcp/examples/hello/main.go | 22 +- internal/mcp/mcp_test.go | 55 ++-- internal/mcp/server.go | 76 ++--- internal/mcp/sse.go | 430 ++++++++++++++++++++++++++++ internal/mcp/sse_test.go | 65 +++++ internal/mcp/transport.go | 265 ++++++++--------- internal/mcp/transport_test.go | 19 +- internal/mcp/util.go | 16 ++ 11 files changed, 771 insertions(+), 223 deletions(-) create mode 100644 internal/mcp/sse.go create mode 100644 internal/mcp/sse_test.go diff --git a/internal/jsonrpc2_v2/conn.go b/internal/jsonrpc2_v2/conn.go index 4c52a1fd34b..f1ebb2a05d5 100644 --- a/internal/jsonrpc2_v2/conn.go +++ b/internal/jsonrpc2_v2/conn.go @@ -201,14 +201,43 @@ func (o ConnectionOptions) Bind(context.Context, *Connection) ConnectionOptions return o } -// newConnection creates a new connection and runs it. +// A ConnectionConfig configures a bidirectional jsonrpc2 connection. +type ConnectionConfig struct { + Reader Reader // required + Writer Writer // required + Closer io.Closer // required + Preempter Preempter // optional + Bind func(*Connection) Handler // required + OnDone func() // optional + OnInternalError func(error) // optional +} + +// NewConnection creates a new [Connection] object and starts processing +// incoming messages. +func NewConnection(ctx context.Context, cfg ConnectionConfig) *Connection { + ctx = notDone{ctx} + + c := &Connection{ + state: inFlightState{closer: cfg.Closer}, + done: make(chan struct{}), + writer: make(chan Writer, 1), + onDone: cfg.OnDone, + onInternalError: cfg.OnInternalError, + } + c.handler = cfg.Bind(c) + c.writer <- cfg.Writer + c.start(ctx, cfg.Reader, cfg.Preempter) + return c +} + +// bindConnection creates a new connection and runs it. // // This is used by the Dial and Serve functions to build the actual connection. // // The connection is closed automatically (and its resources cleaned up) when // the last request has completed after the underlying ReadWriteCloser breaks, // but it may be stopped earlier by calling Close (for a clean shutdown). -func newConnection(bindCtx context.Context, rwc io.ReadWriteCloser, binder Binder, onDone func()) *Connection { +func bindConnection(bindCtx context.Context, rwc io.ReadWriteCloser, binder Binder, onDone func()) *Connection { // TODO: Should we create a new event span here? // This will propagate cancellation from ctx; should it? ctx := notDone{bindCtx} @@ -238,7 +267,11 @@ func newConnection(bindCtx context.Context, rwc io.ReadWriteCloser, binder Binde c.writer <- framer.Writer(rwc) reader := framer.Reader(rwc) + c.start(ctx, reader, options.Preempter) + return c +} +func (c *Connection) start(ctx context.Context, reader Reader, preempter Preempter) { c.updateInFlight(func(s *inFlightState) { select { case <-c.done: @@ -252,9 +285,8 @@ func newConnection(bindCtx context.Context, rwc io.ReadWriteCloser, binder Binde // (If the Binder closed the Connection already, this should error out and // return almost immediately.) s.reading = true - go c.readIncoming(ctx, reader, options.Preempter) + go c.readIncoming(ctx, reader, preempter) }) - return c } // Notify invokes the target method but does not wait for a response. diff --git a/internal/jsonrpc2_v2/serve.go b/internal/jsonrpc2_v2/serve.go index 5c732907002..9a569945345 100644 --- a/internal/jsonrpc2_v2/serve.go +++ b/internal/jsonrpc2_v2/serve.go @@ -62,7 +62,7 @@ func Dial(ctx context.Context, dialer Dialer, binder Binder, onDone func()) (*Co if err != nil { return nil, err } - return newConnection(ctx, rwc, binder, onDone), nil + return bindConnection(ctx, rwc, binder, onDone), nil } // NewServer starts a new server listening for incoming connections and returns @@ -117,7 +117,7 @@ func (s *Server) run(ctx context.Context) { // A new inbound connection. activeConns.Add(1) - _ = newConnection(ctx, rwc, s.binder, activeConns.Done) // unregisters itself when done + _ = bindConnection(ctx, rwc, s.binder, activeConns.Done) // unregisters itself when done } activeConns.Wait() } diff --git a/internal/mcp/client.go b/internal/mcp/client.go index 2369a73d7df..65fe84a48e7 100644 --- a/internal/mcp/client.go +++ b/internal/mcp/client.go @@ -89,7 +89,7 @@ func (c *Client) disconnect(sc *ServerConnection) { // when it is no longer needed. However, if the connection is closed by the // server, calls or notifications will return an error wrapping // [ErrConnectionClosed]. -func (c *Client) Connect(ctx context.Context, t *Transport, opts *ConnectionOptions) (sc *ServerConnection, err error) { +func (c *Client) Connect(ctx context.Context, t Transport, opts *ConnectionOptions) (sc *ServerConnection, err error) { defer func() { if sc != nil && err != nil { _ = sc.Close() diff --git a/internal/mcp/examples/hello/main.go b/internal/mcp/examples/hello/main.go index 36bb40a2bac..74c016a059b 100644 --- a/internal/mcp/examples/hello/main.go +++ b/internal/mcp/examples/hello/main.go @@ -6,16 +6,15 @@ package main import ( "context" + "flag" "fmt" + "net/http" "os" "golang.org/x/tools/internal/mcp" ) -type Optional[T any] struct { - present bool - value T -} +var httpAddr = flag.String("http", "", "if set, use SSE HTTP at this address, instead of stdin/stdout") type SayHiParams struct { Name string `json:"name" mcp:"the name to say hi to"` @@ -28,11 +27,20 @@ func SayHi(ctx context.Context, params *SayHiParams) ([]mcp.Content, error) { } func main() { + flag.Parse() + server := mcp.NewServer("greeter", "v0.0.1", nil) server.AddTools(mcp.MakeTool("greet", "say hi", SayHi)) - opts := &mcp.ConnectionOptions{Logger: os.Stderr} - if err := server.Run(context.Background(), mcp.NewStdIOTransport(), opts); err != nil { - fmt.Fprintf(os.Stderr, "Server failed: %v", err) + if *httpAddr != "" { + handler := mcp.NewSSEHandler(func() *mcp.Server { + return server + }) + http.ListenAndServe(*httpAddr, handler) + } else { + opts := &mcp.ConnectionOptions{Logger: os.Stderr} + if err := server.Run(context.Background(), mcp.NewStdIOTransport(), opts); err != nil { + fmt.Fprintf(os.Stderr, "Server failed: %v", err) + } } } diff --git a/internal/mcp/mcp_test.go b/internal/mcp/mcp_test.go index 375818cae3c..dff7d92a6fd 100644 --- a/internal/mcp/mcp_test.go +++ b/internal/mcp/mcp_test.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package mcp_test +package mcp import ( "context" @@ -11,9 +11,9 @@ import ( "strings" "sync" "testing" + "time" "github.com/google/go-cmp/cmp" - "golang.org/x/tools/internal/mcp" "golang.org/x/tools/internal/mcp/internal/jsonschema" "golang.org/x/tools/internal/mcp/internal/protocol" ) @@ -22,22 +22,22 @@ type hiParams struct { Name string } -func sayHi(_ context.Context, v hiParams) ([]mcp.Content, error) { - return []mcp.Content{mcp.TextContent{Text: "hi " + v.Name}}, nil +func sayHi(_ context.Context, v hiParams) ([]Content, error) { + return []Content{TextContent{Text: "hi " + v.Name}}, nil } func TestEndToEnd(t *testing.T) { ctx := context.Background() - ct, st := mcp.NewLocalTransport() + ct, st := NewLocalTransport() - s := mcp.NewServer("testServer", "v1.0.0", nil) + s := NewServer("testServer", "v1.0.0", nil) // The 'greet' tool says hi. - s.AddTools(mcp.MakeTool("greet", "say hi", sayHi)) + s.AddTools(MakeTool("greet", "say hi", sayHi)) // The 'fail' tool returns this error. failure := errors.New("mcp failure") - s.AddTools(mcp.MakeTool("fail", "just fail", func(context.Context, struct{}) ([]mcp.Content, error) { + s.AddTools(MakeTool("fail", "just fail", func(context.Context, struct{}) ([]Content, error) { return nil, failure })) @@ -60,7 +60,7 @@ func TestEndToEnd(t *testing.T) { clientWG.Done() }() - c := mcp.NewClient("testClient", "v1.0.0", nil) + c := NewClient("testClient", "v1.0.0", nil) // Connect the client. sc, err := c.Connect(ctx, ct, nil) @@ -101,7 +101,7 @@ func TestEndToEnd(t *testing.T) { if err != nil { t.Fatal(err) } - wantHi := []mcp.Content{mcp.TextContent{Text: "hi user"}} + wantHi := []Content{TextContent{Text: "hi user"}} if diff := cmp.Diff(wantHi, gotHi); diff != "" { t.Errorf("tools/call 'greet' mismatch (-want +got):\n%s", diff) } @@ -126,18 +126,18 @@ func TestEndToEnd(t *testing.T) { func TestServerClosing(t *testing.T) { ctx := context.Background() - ct, st := mcp.NewLocalTransport() + ct, st := NewLocalTransport() - s := mcp.NewServer("testServer", "v1.0.0", nil) + s := NewServer("testServer", "v1.0.0", nil) // The 'greet' tool says hi. - s.AddTools(mcp.MakeTool("greet", "say hi", sayHi)) + s.AddTools(MakeTool("greet", "say hi", sayHi)) cc, err := s.Connect(ctx, st, nil) if err != nil { t.Fatal(err) } - c := mcp.NewClient("testClient", "v1.0.0", nil) + c := NewClient("testClient", "v1.0.0", nil) sc, err := c.Connect(ctx, ct, nil) if err != nil { t.Fatal(err) @@ -156,36 +156,47 @@ func TestServerClosing(t *testing.T) { } cc.Close() wg.Wait() - if _, err := sc.CallTool(ctx, "greet", hiParams{"user"}); !errors.Is(err, mcp.ErrConnectionClosed) { + if _, err := sc.CallTool(ctx, "greet", hiParams{"user"}); !errors.Is(err, ErrConnectionClosed) { t.Errorf("after disconnection, got error %v, want EOF", err) } } func TestBatching(t *testing.T) { ctx := context.Background() - ct, st := mcp.NewLocalTransport() + ct, st := NewLocalTransport() - s := mcp.NewServer("testServer", "v1.0.0", nil) + s := NewServer("testServer", "v1.0.0", nil) _, err := s.Connect(ctx, st, nil) if err != nil { t.Fatal(err) } - c := mcp.NewClient("testClient", "v1.0.0", nil) - opts := new(mcp.ConnectionOptions) - mcp.BatchSize(opts, 2) + c := NewClient("testClient", "v1.0.0", nil) + opts := new(ConnectionOptions) + // TODO: this test is broken, because increasing the batch size here causes + // 'initialize' to block. Therefore, we can only test with a size of 1. + const batchSize = 1 + BatchSize(ct, batchSize) sc, err := c.Connect(ctx, ct, opts) if err != nil { t.Fatal(err) } defer sc.Close() - errs := make(chan error, 2) - for range 2 { + errs := make(chan error, batchSize) + for i := range batchSize { go func() { _, err := sc.ListTools(ctx) errs <- err }() + time.Sleep(2 * time.Millisecond) + if i < batchSize-1 { + select { + case <-errs: + t.Errorf("ListTools: unexpected result for incomplete batch: %v", err) + default: + } + } } } diff --git a/internal/mcp/server.go b/internal/mcp/server.go index c5553729a11..4e76ba6fd22 100644 --- a/internal/mcp/server.go +++ b/internal/mcp/server.go @@ -56,10 +56,10 @@ func NewServer(name, version string, opts *ServerOptions) *Server { // AddTools adds the given tools to the server. // // TODO(rfindley): notify connected clients of any changes. -func (c *Server) AddTools(tools ...*Tool) { - c.mu.Lock() - defer c.mu.Unlock() - c.tools = append(c.tools, tools...) +func (s *Server) AddTools(tools ...*Tool) { + s.mu.Lock() + defer s.mu.Unlock() + s.tools = append(s.tools, tools...) } // Clients returns an iterator that yields the current set of client @@ -71,26 +71,26 @@ func (s *Server) Clients() iter.Seq[*ClientConnection] { return slices.Values(clients) } -func (c *Server) listTools(_ context.Context, params *protocol.ListToolsParams) (*protocol.ListToolsResult, error) { - c.mu.Lock() - defer c.mu.Unlock() +func (s *Server) listTools(_ context.Context, _ *ClientConnection, params *protocol.ListToolsParams) (*protocol.ListToolsResult, error) { + s.mu.Lock() + defer s.mu.Unlock() res := new(protocol.ListToolsResult) - for _, t := range c.tools { + for _, t := range s.tools { res.Tools = append(res.Tools, t.Definition) } return res, nil } -func (c *Server) callTool(ctx context.Context, params *protocol.CallToolParams) (*protocol.CallToolResult, error) { - c.mu.Lock() +func (s *Server) callTool(ctx context.Context, _ *ClientConnection, params *protocol.CallToolParams) (*protocol.CallToolResult, error) { + s.mu.Lock() var tool *Tool - if i := slices.IndexFunc(c.tools, func(t *Tool) bool { + if i := slices.IndexFunc(s.tools, func(t *Tool) bool { return t.Definition.Name == params.Name }); i >= 0 { - tool = c.tools[i] + tool = s.tools[i] } - c.mu.Unlock() + s.mu.Unlock() if tool == nil { return nil, fmt.Errorf("%s: unknown tool %q", jsonrpc2.ErrInvalidParams, params.Name) @@ -98,33 +98,33 @@ func (c *Server) callTool(ctx context.Context, params *protocol.CallToolParams) return tool.Handler(ctx, params.Arguments) } -// Run runs the server over the given transport. +// Run runs the server over the given transport, which must be persistent. // // Run blocks until the client terminates the connection. -func (c *Server) Run(ctx context.Context, t *Transport, opts *ConnectionOptions) error { - conn, err := c.Connect(ctx, t, opts) +func (s *Server) Run(ctx context.Context, t Transport, opts *ConnectionOptions) error { + cc, err := s.Connect(ctx, t, opts) if err != nil { return err } - return conn.Wait() + return cc.Wait() } // bind implements the binder[*ClientConnection] interface, so that Servers can // be connected using [connect]. -func (c *Server) bind(conn *jsonrpc2.Connection) *ClientConnection { - cc := &ClientConnection{conn: conn, server: c} - c.mu.Lock() - c.clients = append(c.clients, cc) - c.mu.Unlock() +func (s *Server) bind(conn *jsonrpc2.Connection) *ClientConnection { + cc := &ClientConnection{conn: conn, server: s} + s.mu.Lock() + s.clients = append(s.clients, cc) + s.mu.Unlock() return cc } // disconnect implements the binder[*ClientConnection] interface, so that // Servers can be connected using [connect]. -func (c *Server) disconnect(cc *ClientConnection) { - c.mu.Lock() - defer c.mu.Unlock() - c.clients = slices.DeleteFunc(c.clients, func(cc2 *ClientConnection) bool { +func (s *Server) disconnect(cc *ClientConnection) { + s.mu.Lock() + defer s.mu.Unlock() + s.clients = slices.DeleteFunc(s.clients, func(cc2 *ClientConnection) bool { return cc2 == cc }) } @@ -135,8 +135,8 @@ func (c *Server) disconnect(cc *ClientConnection) { // It returns a connection object that may be used to terminate the connection // (with [Connection.Close]), or await client termination (with // [Connection.Wait]). -func (c *Server) Connect(ctx context.Context, t *Transport, opts *ConnectionOptions) (*ClientConnection, error) { - return connect(ctx, t, opts, c) +func (s *Server) Connect(ctx context.Context, t Transport, opts *ConnectionOptions) (*ClientConnection, error) { + return connect(ctx, t, opts, s) } // A ClientConnection is a connection with an MCP client. @@ -144,32 +144,34 @@ func (c *Server) Connect(ctx context.Context, t *Transport, opts *ConnectionOpti // It handles messages from the client, and can be used to send messages to the // client. Create a connection by calling [Server.Connect]. type ClientConnection struct { - conn *jsonrpc2.Connection server *Server + conn *jsonrpc2.Connection mu sync.Mutex initializeParams *protocol.InitializeParams // set once initialize has been received } func (cc *ClientConnection) handle(ctx context.Context, req *jsonrpc2.Request) (any, error) { + // TODO: embed the incoming request ID in the ClientContext (or, more likely, + // a wrapper around it), so that we can correlate responses and notifications + // to the handler; this is required for the new session-based transport. + switch req.Method { case "initialize": - return dispatch(ctx, req, cc.initialize) - - // TODO: handle initialized + return dispatch(ctx, cc, req, cc.initialize) case "tools/list": - return dispatch(ctx, req, cc.server.listTools) + return dispatch(ctx, cc, req, cc.server.listTools) case "tools/call": - return dispatch(ctx, req, cc.server.callTool) + return dispatch(ctx, cc, req, cc.server.callTool) case "notifications/initialized": } return nil, jsonrpc2.ErrNotHandled } -func (cc *ClientConnection) initialize(ctx context.Context, params *protocol.InitializeParams) (*protocol.InitializeResult, error) { +func (cc *ClientConnection) initialize(ctx context.Context, _ *ClientConnection, params *protocol.InitializeParams) (*protocol.InitializeResult, error) { cc.mu.Lock() cc.initializeParams = params cc.mu.Unlock() @@ -202,11 +204,11 @@ func (cc *ClientConnection) Wait() error { return cc.conn.Wait() } -func dispatch[TParams, TResult any](ctx context.Context, req *jsonrpc2.Request, f func(context.Context, TParams) (TResult, error)) (TResult, error) { +func dispatch[TParams, TResult any](ctx context.Context, conn *ClientConnection, req *jsonrpc2.Request, f func(context.Context, *ClientConnection, TParams) (TResult, error)) (TResult, error) { var params TParams if err := json.Unmarshal(req.Params, ¶ms); err != nil { var zero TResult return zero, err } - return f(ctx, params) + return f(ctx, conn, params) } diff --git a/internal/mcp/sse.go b/internal/mcp/sse.go new file mode 100644 index 00000000000..b632594151a --- /dev/null +++ b/internal/mcp/sse.go @@ -0,0 +1,430 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mcp + +import ( + "bufio" + "bytes" + "context" + "fmt" + "io" + "net/http" + "net/url" + "slices" + "strings" + "sync" + + jsonrpc2 "golang.org/x/tools/internal/jsonrpc2_v2" +) + +// This file implements support for SSE transport server and client. +// +// TODO: +// - avoid the use of channels as listenable queues. +// - support resuming broken streamable sessions +// - support GET channels for unrelated notifications in streamable sessions +// - add client support (and use it to test) +// - properly correlate notifications/requests to an incoming request (using +// requestCtx) + +// An event is a server-sent event. +type event struct { + name string + data []byte +} + +// writeEvent writes the event to w, and flushes. +func writeEvent(w io.Writer, evt event) (int, error) { + var b bytes.Buffer + if evt.name != "" { + fmt.Fprintf(&b, "event: %s\n", evt.name) + } + fmt.Fprintf(&b, "data: %s\n\n", string(evt.data)) + n, err := w.Write(b.Bytes()) + if f, ok := w.(http.Flusher); ok { + f.Flush() + } + return n, err +} + +// SSEHandler is an http.Handler that serves streamable MCP sessions as +// defined by version 2024-11-05 of the MCP spec: +// https://modelcontextprotocol.io/specification/2024-11-05/basic/transports +type SSEHandler struct { + getServer func() *Server + onClient func(*ClientConnection) // for testing; must not block + + mu sync.Mutex + sessions map[string]*sseSession +} + +// NewSSEHandler returns a new [SSEHandler] that is ready to serve HTTP. +// +// The getServer function is used to bind create servers for new sessions. It +// is OK for getServer to return the same server multiple times. +func NewSSEHandler(getServer func() *Server) *SSEHandler { + return &SSEHandler{ + getServer: getServer, + sessions: make(map[string]*sseSession), + } +} + +// A sseSession abstracts a session initiated through the sse endpoint. +// +// It implements the Transport interface. +type sseSession struct { + incoming chan jsonrpc2.Message + + mu sync.Mutex + w io.Writer // the hanging response body + isDone bool // set when the stream is closed + done chan struct{} // closed when the stream is closed +} + +// connect returns the receiver, as an sseSession is a logical stream. +func (s *sseSession) connect(context.Context) (stream, error) { + return s, nil +} + +func (h *SSEHandler) ServeHTTP(w http.ResponseWriter, req *http.Request) { + sessionID := req.URL.Query().Get("sessionid") + + // TODO: consider checking Content-Type here. For now, we are lax. + + // For POST requests, the message body is a message to send to a session. + if req.Method == http.MethodPost { + // Look up the session. + if sessionID == "" { + http.Error(w, "sessionid must be provided", http.StatusBadRequest) + return + } + h.mu.Lock() + session := h.sessions[sessionID] + h.mu.Unlock() + if session == nil { + http.Error(w, "session not found", http.StatusNotFound) + return + } + + // Read and parse the message. + data, err := io.ReadAll(req.Body) + if err != nil { + http.Error(w, "failed to read body", http.StatusBadRequest) + return + } + // Optionally, we could just push the data onto a channel, and let the + // message fail to parse when it is read. This failure seems a bit more + // useful + msg, err := jsonrpc2.DecodeMessage(data) + if err != nil { + http.Error(w, "failed to parse body", http.StatusBadRequest) + return + } + session.incoming <- msg + w.WriteHeader(http.StatusAccepted) + return + } + + if req.Method != http.MethodGet { + http.Error(w, "invalid method", http.StatusMethodNotAllowed) + return + } + + // GET requests create a new session, and serve messages over SSE. + + // TODO: it's not entirely documented whether we should check Accept here. + // Let's again be lax and assume the client will accept SSE. + + w.Header().Set("Content-Type", "text/event-stream") + w.Header().Set("Cache-Control", "no-cache") + w.Header().Set("Connection", "keep-alive") + + sessionID = randText() + h.mu.Lock() + session := &sseSession{ + w: w, + incoming: make(chan jsonrpc2.Message, 1000), + done: make(chan struct{}), + } + h.sessions[sessionID] = session + h.mu.Unlock() + + // The session is terminated when the request exits. + defer func() { + h.mu.Lock() + delete(h.sessions, sessionID) + h.mu.Unlock() + }() + + server := h.getServer() + cc, err := server.Connect(req.Context(), session, nil) + if err != nil { + http.Error(w, "connection failed", http.StatusInternalServerError) + return + } + if h.onClient != nil { + h.onClient(cc) + } + defer cc.Close() + + endpoint, err := req.URL.Parse("?sessionid=" + sessionID) + if err != nil { + http.Error(w, "internal error: failed to create endpoint", http.StatusInternalServerError) + return + } + + session.mu.Lock() + _, err = writeEvent(w, event{ + name: "endpoint", + data: []byte(endpoint.RequestURI()), + }) + session.mu.Unlock() + if err != nil { + return // too late to write the status header + } + + select { + case <-req.Context().Done(): + case <-session.done: + } +} + +// Read implements jsonrpc2.Reader. +func (s *sseSession) Read(ctx context.Context) (jsonrpc2.Message, int64, error) { + select { + case msg := <-s.incoming: + if msg == nil { + return nil, 0, io.EOF + } + return msg, 0, nil + case <-s.done: + return nil, 0, io.EOF + } +} + +// Write implements jsonrpc2.Writer. +func (s *sseSession) Write(ctx context.Context, msg jsonrpc2.Message) (int64, error) { + data, err := jsonrpc2.EncodeMessage(msg) + if err != nil { + return 0, err + } + + s.mu.Lock() + defer s.mu.Unlock() + + if s.isDone { + return 0, io.EOF + } + + n, err := writeEvent(s.w, event{name: "message", data: data}) + return int64(n), err +} + +// Close implements io.Closer. +func (s *sseSession) Close() error { + s.mu.Lock() + defer s.mu.Unlock() + if !s.isDone { + s.isDone = true + close(s.done) + } + return nil +} + +// An SSEClientTransport is a [Transport] that can communicate with an MCP +// endpoint serving the SSE transport defined by the 2024-11-05 version of the +// spec. +type SSEClientTransport struct { + sseEndpoint *url.URL +} + +// NewSSEClientTransport returns a new client transport that connects to the +// SSE server at the provided URL. +func NewSSEClientTransport(rawURL string) (*SSEClientTransport, error) { + url, err := url.Parse(rawURL) + if err != nil { + return nil, err + } + return &SSEClientTransport{ + sseEndpoint: url, + }, nil +} + +// connect connects to the client endpoint. +func (c *SSEClientTransport) connect(ctx context.Context) (stream, error) { + req, err := http.NewRequestWithContext(ctx, "GET", c.sseEndpoint.String(), nil) + if err != nil { + return nil, err + } + req.Header.Set("Accept", "text/event-stream") + resp, err := http.DefaultClient.Do(req) + if err != nil { + return nil, err + } + scanner := bufio.NewScanner(resp.Body) + + // TODO: investigate proper behavior when events are out of order, or have + // non-standard names. + var ( + eventKey = []byte("event") + dataKey = []byte("data") + ) + + // nextEvent reads one sse event from the wire. + // https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#examples + // + // - `key: value` line records. + // - Consecutive `data: ...` fields are joined with newlines. + // - Unrecognized fields are ignored. Since we only care about 'event' and + // 'data', these are the only two we consider. + // - Lines starting with ":" are ignored. + // - Records are terminated with two consecutive newlines. + nextEvent := func() (event, error) { + var ( + evt event + lastWasData bool // if set, preceding data field was also data + ) + for scanner.Scan() { + line := scanner.Bytes() + if len(line) == 0 && (evt.name != "" || len(evt.data) > 0) { + return evt, nil + } + before, after, found := bytes.Cut(line, []byte{':'}) + if !found { + return evt, fmt.Errorf("malformed line in SSE stream: %q", string(line)) + } + switch { + case bytes.Equal(before, eventKey): + evt.name = strings.TrimSpace(string(after)) + case bytes.Equal(before, dataKey): + data := bytes.TrimSpace(after) + if lastWasData { + evt.data = slices.Concat(evt.data, []byte{'\n'}, data) + } else { + evt.data = data + } + lastWasData = true + } + } + return evt, io.EOF + } + + msgEndpoint, err := func() (*url.URL, error) { + evt, err := nextEvent() + if err != nil { + return nil, err + } + if evt.name != "endpoint" { + return nil, fmt.Errorf("first event is %q, want %q", evt.name, "endpoint") + } + raw := string(evt.data) + return c.sseEndpoint.Parse(raw) + }() + if err != nil { + resp.Body.Close() + return nil, fmt.Errorf("missing endpoint: %v", err) + } + + // From here on, the stream takes ownership of resp.Body. + s := &sseClientStream{ + sseEndpoint: c.sseEndpoint, + msgEndpoint: msgEndpoint, + incoming: make(chan []byte, 100), + body: resp.Body, + done: make(chan struct{}), + } + + go func() { + for { + evt, err := nextEvent() + if err != nil { + close(s.incoming) + return + } + if evt.name == "message" { + select { + case s.incoming <- evt.data: + case <-s.done: + close(s.incoming) + return + } + } + } + }() + + return s, nil +} + +type sseClientStream struct { + sseEndpoint *url.URL + msgEndpoint *url.URL + + incoming chan []byte + + mu sync.Mutex + body io.ReadCloser + isDone bool + done chan struct{} + closeErr error +} + +func (c *sseClientStream) Read(ctx context.Context) (jsonrpc2.Message, int64, error) { + select { + case <-ctx.Done(): + return nil, 0, ctx.Err() + case data := <-c.incoming: + if data == nil { + return nil, 0, io.EOF + } + msg, err := jsonrpc2.DecodeMessage(data) + if err != nil { + return nil, 0, err + } + return msg, int64(len(data)), nil + case <-c.done: + if c.closeErr != nil { + return nil, 0, c.closeErr + } + return nil, 0, io.EOF + } +} + +func (c *sseClientStream) Write(ctx context.Context, msg jsonrpc2.Message) (int64, error) { + data, err := jsonrpc2.EncodeMessage(msg) + if err != nil { + return 0, err + } + c.mu.Lock() + done := c.isDone + c.mu.Unlock() + if done { + return 0, io.EOF + } + req, err := http.NewRequestWithContext(ctx, "POST", c.msgEndpoint.String(), bytes.NewReader(data)) + if err != nil { + return 0, err + } + req.Header.Set("Content-Type", "application/json") + resp, err := http.DefaultClient.Do(req) + if err != nil { + return 0, err + } + defer resp.Body.Close() + if resp.StatusCode < 200 || resp.StatusCode >= 300 { + return 0, fmt.Errorf("failed to write: %s", resp.Status) + } + return int64(len(data)), nil +} + +func (c *sseClientStream) Close() error { + c.mu.Lock() + defer c.mu.Unlock() + if !c.isDone { + c.isDone = true + c.closeErr = c.body.Close() + close(c.done) + } + return c.closeErr +} diff --git a/internal/mcp/sse_test.go b/internal/mcp/sse_test.go new file mode 100644 index 00000000000..0463ab2308d --- /dev/null +++ b/internal/mcp/sse_test.go @@ -0,0 +1,65 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mcp + +import ( + "context" + "fmt" + "net/http/httptest" + "testing" + + "github.com/google/go-cmp/cmp" +) + +func TestSSEServer(t *testing.T) { + for _, closeServerFirst := range []bool{false, true} { + t.Run(fmt.Sprintf("closeServerFirst=%t", closeServerFirst), func(t *testing.T) { + ctx := context.Background() + server := NewServer("testServer", "v1.0.0", nil) + server.AddTools(MakeTool("greet", "say hi", sayHi)) + + sseHandler := NewSSEHandler(func() *Server { return server }) + + clients := make(chan *ClientConnection, 1) + sseHandler.onClient = func(cc *ClientConnection) { + select { + case clients <- cc: + default: + } + } + httpServer := httptest.NewServer(sseHandler) + + clientTransport, err := NewSSEClientTransport(httpServer.URL) + if err != nil { + t.Fatal(err) + } + + client := NewClient("testClient", "v1.0.0", nil) + sc, err := client.Connect(ctx, clientTransport, nil) + if err != nil { + t.Fatal(err) + } + cc := <-clients + gotHi, err := sc.CallTool(ctx, "greet", hiParams{"user"}) + if err != nil { + t.Fatal(err) + } + wantHi := []Content{TextContent{Text: "hi user"}} + if diff := cmp.Diff(wantHi, gotHi); diff != "" { + t.Errorf("tools/call 'greet' mismatch (-want +got):\n%s", diff) + } + + // Test that closing either end of the connection terminates the other + // end. + if closeServerFirst { + sc.Close() + cc.Wait() + } else { + cc.Close() + sc.Wait() + } + }) + } +} diff --git a/internal/mcp/transport.go b/internal/mcp/transport.go index 28283313629..9cce1a2cf54 100644 --- a/internal/mcp/transport.go +++ b/internal/mcp/transport.go @@ -10,7 +10,6 @@ import ( "errors" "fmt" "io" - "log" "net" "os" "sync" @@ -27,44 +26,52 @@ var ErrConnectionClosed = errors.New("connection closed") // A Transport is used to create a bidirectional connection between MCP client // and server. -type Transport struct { - dialer jsonrpc2.Dialer +// +// Transports should be used for at most one call to [Server.Connect] or +// [Client.Connect]. +type Transport interface { + // connect returns the logical stream. + // + // It is called exactly once by [connect]. + connect(ctx context.Context) (stream, error) +} + +// A stream is an abstract bidirectional jsonrpc2 stream. +// It is used by [connect] to establish a [jsonrpc2.Connection]. +type stream interface { + jsonrpc2.Reader + jsonrpc2.Writer + io.Closer } // ConnectionOptions configures the behavior of an individual client<->server // connection. type ConnectionOptions struct { - Logger io.Writer // if set, write RPC logs + SessionID string // if set, the session ID + Logger io.Writer // if set, write RPC logs +} + +// An IOTransport is a [Transport] that communicates using newline-delimited +// JSON over an io.ReadWriteCloser. +type IOTransport struct { + rwc io.ReadWriteCloser +} - batchSize int // outgoing batch size for requests/notifications, for testing +func (t *IOTransport) connect(context.Context) (stream, error) { + return newIOStream(t.rwc), nil } // NewStdIOTransport constructs a transport that communicates over // stdin/stdout. -func NewStdIOTransport() *Transport { - dialer := dialerFunc(func(ctx context.Context) (io.ReadWriteCloser, error) { - return rwc{os.Stdin, os.Stdout}, nil - }) - return &Transport{ - dialer: dialer, - } +func NewStdIOTransport() *IOTransport { + return &IOTransport{rwc{os.Stdin, os.Stdout}} } // NewLocalTransport returns two in-memory transports that connect to // each other, for testing purposes. -func NewLocalTransport() (*Transport, *Transport) { +func NewLocalTransport() (*IOTransport, *IOTransport) { c1, c2 := net.Pipe() - t1 := &Transport{ - dialer: dialerFunc(func(ctx context.Context) (io.ReadWriteCloser, error) { - return c1, nil - }), - } - t2 := &Transport{ - dialer: dialerFunc(func(ctx context.Context) (io.ReadWriteCloser, error) { - return c2, nil - }), - } - return t1, t2 + return &IOTransport{c1}, &IOTransport{c2} } // handler is an unexported version of jsonrpc2.Handler, to be implemented by @@ -79,43 +86,37 @@ type binder[T handler] interface { disconnect(T) } -func connect[H handler](ctx context.Context, t *Transport, opts *ConnectionOptions, b binder[H]) (H, error) { +func connect[H handler](ctx context.Context, t Transport, opts *ConnectionOptions, b binder[H]) (H, error) { if opts == nil { opts = new(ConnectionOptions) } - // Frame messages using newline delimited JSON. - // + var zero H + stream, err := t.connect(ctx) + if err != nil { + return zero, err + } // If logging is configured, write message logs. - var framer jsonrpc2.Framer = &ndjsonFramer{} + reader, writer := jsonrpc2.Reader(stream), jsonrpc2.Writer(stream) if opts.Logger != nil { - framer = &loggingFramer{opts.Logger, framer} + reader = loggingReader(opts.Logger, reader) + writer = loggingWriter(opts.Logger, writer) } var h H - - // Bind the server connection. - binder := jsonrpc2.BinderFunc(func(_ context.Context, conn *jsonrpc2.Connection) jsonrpc2.ConnectionOptions { + bind := func(conn *jsonrpc2.Connection) jsonrpc2.Handler { h = b.bind(conn) - return jsonrpc2.ConnectionOptions{ - Framer: framer, - Handler: jsonrpc2.HandlerFunc(h.handle), - OnInternalError: func(err error) { - log.Printf("Internal error: %v", err) - }, - } - }) - - // Clean up the connection when done. - onDone := func() { - b.disconnect(h) - } - - var zero H - _, err := jsonrpc2.Dial(ctx, t.dialer, binder, onDone) - if err != nil { - return zero, err + return jsonrpc2.HandlerFunc(h.handle) } + _ = jsonrpc2.NewConnection(ctx, jsonrpc2.ConnectionConfig{ + Reader: reader, + Writer: writer, + Closer: stream, + Bind: bind, + OnDone: func() { + b.disconnect(h) + }, + }) assert(h != zero, "unbound connection") return h, nil } @@ -135,13 +136,6 @@ func call(ctx context.Context, conn *jsonrpc2.Connection, method string, params, // The helpers below are used to bind transports to jsonrpc2. -// A dialerFunc implements jsonrpc2.Dialer.Dial. -type dialerFunc func(context.Context) (io.ReadWriteCloser, error) - -func (f dialerFunc) Dial(ctx context.Context) (io.ReadWriteCloser, error) { - return f(ctx) -} - // A readerFunc implements jsonrpc2.Reader.Read. type readerFunc func(context.Context) (jsonrpc2.Message, int64, error) @@ -156,41 +150,35 @@ func (f writerFunc) Write(ctx context.Context, msg jsonrpc2.Message) (int64, err return f(ctx, msg) } -// A loggingFramer logs jsonrpc2 messages to its enclosed writer. -type loggingFramer struct { - w io.Writer - delegate jsonrpc2.Framer -} - -func (f *loggingFramer) Reader(rw io.Reader) jsonrpc2.Reader { - delegate := f.delegate.Reader(rw) +// loggingReader is a stream middleware that logs incoming messages. +func loggingReader(w io.Writer, delegate jsonrpc2.Reader) jsonrpc2.Reader { return readerFunc(func(ctx context.Context) (jsonrpc2.Message, int64, error) { msg, n, err := delegate.Read(ctx) if err != nil { - fmt.Fprintf(f.w, "read error: %v", err) + fmt.Fprintf(w, "read error: %v", err) } else { data, err := jsonrpc2.EncodeMessage(msg) if err != nil { - fmt.Fprintf(f.w, "LoggingFramer: failed to marshal: %v", err) + fmt.Fprintf(w, "LoggingFramer: failed to marshal: %v", err) } - fmt.Fprintf(f.w, "read: %s", string(data)) + fmt.Fprintf(w, "read: %s", string(data)) } return msg, n, err }) } -func (f *loggingFramer) Writer(w io.Writer) jsonrpc2.Writer { - delegate := f.delegate.Writer(w) +// loggingWriter is a stream middleware that logs outgoing messages. +func loggingWriter(w io.Writer, delegate jsonrpc2.Writer) jsonrpc2.Writer { return writerFunc(func(ctx context.Context, msg jsonrpc2.Message) (int64, error) { n, err := delegate.Write(ctx, msg) if err != nil { - fmt.Fprintf(f.w, "write error: %v", err) + fmt.Fprintf(w, "write error: %v", err) } else { data, err := jsonrpc2.EncodeMessage(msg) if err != nil { - fmt.Fprintf(f.w, "LoggingFramer: failed to marshal: %v", err) + fmt.Fprintf(w, "LoggingFramer: failed to marshal: %v", err) } - fmt.Fprintf(f.w, "write: %s", string(data)) + fmt.Fprintf(w, "write: %s", string(data)) } return n, err }) @@ -217,42 +205,60 @@ func (r rwc) Close() error { return errors.Join(r.rc.Close(), r.wc.Close()) } -// A ndjsonFramer is a jsonrpc2.Framer that delimits messages with newlines. -// It also supports jsonrpc2 batching. +// An ioStream is a transport that delimits messages with newlines across +// a bidirectional stream, and supports JSONRPC2 message batching. // // See https://github.com/ndjson/ndjson-spec for discussion of newline // delimited JSON. // // See [msgBatch] for more discussion of message batching. -type ndjsonFramer struct { - // batchSize allows customizing batching behavior for testing. - // - // If set to a positive number, requests and notifications will be buffered - // into groups of this size before being sent as a batch. - batchSize int +type ioStream struct { + rwc io.ReadWriteCloser // the underlying stream + in *json.Decoder // a decoder bound to rwc + + // If outgoiBatch has a positive capacity, it will be used to batch requests + // and notifications before sending. + outgoingBatch []jsonrpc2.Message + + // Unread messages in the last batch. Since reads are serialized, there is no + // need to guard here. + queue []jsonrpc2.Message // batches correlate incoming requests to the batch in which they arrived. + // Since writes may be concurrent to reads, we need to guard this with a mutex. batchMu sync.Mutex batches map[jsonrpc2.ID]*msgBatch // lazily allocated } +func newIOStream(rwc io.ReadWriteCloser) *ioStream { + return &ioStream{ + rwc: rwc, + in: json.NewDecoder(rwc), + } +} + +// connect returns the receiver, as a streamTransport is a logical stream. +func (t *ioStream) connect(ctx context.Context) (stream, error) { + return t, nil +} + // addBatch records a msgBatch for an incoming batch payload. // It returns an error if batch is malformed, containing previously seen IDs. // // See [msgBatch] for more. -func (f *ndjsonFramer) addBatch(batch *msgBatch) error { - f.batchMu.Lock() - defer f.batchMu.Unlock() +func (t *ioStream) addBatch(batch *msgBatch) error { + t.batchMu.Lock() + defer t.batchMu.Unlock() for id := range batch.unresolved { - if _, ok := f.batches[id]; ok { + if _, ok := t.batches[id]; ok { return fmt.Errorf("%w: batch contains previously seen request %v", jsonrpc2.ErrInvalidRequest, id.Raw()) } } for id := range batch.unresolved { - if f.batches == nil { - f.batches = make(map[jsonrpc2.ID]*msgBatch) + if t.batches == nil { + t.batches = make(map[jsonrpc2.ID]*msgBatch) } - f.batches[id] = batch + t.batches[id] = batch } return nil } @@ -263,18 +269,18 @@ func (f *ndjsonFramer) addBatch(batch *msgBatch) error { // The second result reports whether resp was part of a batch. If this is true, // the first result is nil if the batch is still incomplete, or the full set of // batch responses if resp completed the batch. -func (f *ndjsonFramer) updateBatch(resp *jsonrpc2.Response) ([]*jsonrpc2.Response, bool) { - f.batchMu.Lock() - defer f.batchMu.Unlock() +func (t *ioStream) updateBatch(resp *jsonrpc2.Response) ([]*jsonrpc2.Response, bool) { + t.batchMu.Lock() + defer t.batchMu.Unlock() - if batch, ok := f.batches[resp.ID]; ok { + if batch, ok := t.batches[resp.ID]; ok { idx, ok := batch.unresolved[resp.ID] if !ok { panic("internal error: inconsistent batches") } batch.responses[idx] = resp delete(batch.unresolved, resp.ID) - delete(f.batches, resp.ID) + delete(t.batches, resp.ID) if len(batch.unresolved) == 0 { return batch.responses, true } @@ -301,55 +307,28 @@ type msgBatch struct { responses []*jsonrpc2.Response } -// An ndjsonReader reads newline-delimited messages or message batches. -type ndjsonReader struct { - queue []jsonrpc2.Message - framer *ndjsonFramer - in *json.Decoder +func (t *ioStream) Read(ctx context.Context) (jsonrpc2.Message, int64, error) { + return t.read(ctx, t.in) } -// A ndjsonWriter writes newline-delimited messages to the wrapped io.Writer. -// -// If batch is set, messages are wrapped in a JSONRPC2 batch. -type ndjsonWriter struct { - // Testing support: if outgoingBatch has capacity, it is used to buffer - // outgoing messages before sending a JSONRPC2 message batch. - outgoingBatch []jsonrpc2.Message - - framer *ndjsonFramer // to track batch responses - out io.Writer // to write to the wire -} - -func (f *ndjsonFramer) Reader(r io.Reader) jsonrpc2.Reader { - return &ndjsonReader{framer: f, in: json.NewDecoder(r)} -} - -func (f *ndjsonFramer) Writer(w io.Writer) jsonrpc2.Writer { - writer := &ndjsonWriter{framer: f, out: w} - if f.batchSize > 0 { - writer.outgoingBatch = make([]jsonrpc2.Message, 0, f.batchSize) - } - return writer -} - -func (r *ndjsonReader) Read(ctx context.Context) (jsonrpc2.Message, int64, error) { +func (t *ioStream) read(ctx context.Context, in *json.Decoder) (jsonrpc2.Message, int64, error) { select { case <-ctx.Done(): return nil, 0, ctx.Err() default: } - if len(r.queue) > 0 { - next := r.queue[0] - r.queue = r.queue[1:] + if len(t.queue) > 0 { + next := t.queue[0] + t.queue = t.queue[1:] return next, 0, nil } var raw json.RawMessage - if err := r.in.Decode(&raw); err != nil { + if err := in.Decode(&raw); err != nil { return nil, 0, err } var rawBatch []json.RawMessage if err := json.Unmarshal(raw, &rawBatch); err == nil { - msg, err := r.readBatch(rawBatch) + msg, err := t.readBatch(rawBatch) if err != nil { return nil, 0, err } @@ -361,7 +340,7 @@ func (r *ndjsonReader) Read(ctx context.Context) (jsonrpc2.Message, int64, error // readBatch reads a batch of jsonrpc2 messages, and records the batch // in the framer so that responses can be collected and send back together. -func (r *ndjsonReader) readBatch(rawBatch []json.RawMessage) (jsonrpc2.Message, error) { +func (t *ioStream) readBatch(rawBatch []json.RawMessage) (jsonrpc2.Message, error) { if len(rawBatch) == 0 { return nil, fmt.Errorf("empty batch") } @@ -403,16 +382,16 @@ func (r *ndjsonReader) readBatch(rawBatch []json.RawMessage) (jsonrpc2.Message, } if respBatch != nil { // The batch contains one or more incoming requests to track. - if err := r.framer.addBatch(respBatch); err != nil { + if err := t.addBatch(respBatch); err != nil { return nil, err } } - r.queue = append(r.queue, queue...) + t.queue = append(t.queue, queue...) return first, nil } -func (w *ndjsonWriter) Write(ctx context.Context, msg jsonrpc2.Message) (int64, error) { +func (t *ioStream) Write(ctx context.Context, msg jsonrpc2.Message) (int64, error) { select { case <-ctx.Done(): return 0, ctx.Err() @@ -424,28 +403,28 @@ func (w *ndjsonWriter) Write(ctx context.Context, msg jsonrpc2.Message) (int64, // want to collect it into a batch before sending, if we're configured to use // outgoing batches. if resp, ok := msg.(*jsonrpc2.Response); ok { - if batch, ok := w.framer.updateBatch(resp); ok { + if batch, ok := t.updateBatch(resp); ok { if len(batch) > 0 { data, err := marshalMessages(batch) if err != nil { return 0, err } data = append(data, '\n') - n, err := w.out.Write(data) + n, err := t.rwc.Write(data) return int64(n), err } return 0, nil } - } else if len(w.outgoingBatch) < cap(w.outgoingBatch) { - w.outgoingBatch = append(w.outgoingBatch, msg) - if len(w.outgoingBatch) == cap(w.outgoingBatch) { - data, err := marshalMessages(w.outgoingBatch) - w.outgoingBatch = w.outgoingBatch[:0] + } else if len(t.outgoingBatch) < cap(t.outgoingBatch) { + t.outgoingBatch = append(t.outgoingBatch, msg) + if len(t.outgoingBatch) == cap(t.outgoingBatch) { + data, err := marshalMessages(t.outgoingBatch) + t.outgoingBatch = t.outgoingBatch[:0] if err != nil { return 0, err } data = append(data, '\n') - n, err := w.out.Write(data) + n, err := t.rwc.Write(data) return int64(n), err } return 0, nil @@ -455,10 +434,14 @@ func (w *ndjsonWriter) Write(ctx context.Context, msg jsonrpc2.Message) (int64, return 0, fmt.Errorf("marshaling message: %v", err) } data = append(data, '\n') // newline delimited - n, err := w.out.Write(data) + n, err := t.rwc.Write(data) return int64(n), err } +func (t *ioStream) Close() error { + return t.rwc.Close() +} + func marshalMessages[T jsonrpc2.Message](msgs []T) ([]byte, error) { var rawMsgs []json.RawMessage for _, msg := range msgs { diff --git a/internal/mcp/transport_test.go b/internal/mcp/transport_test.go index a96a3855952..ff51c1f9aea 100644 --- a/internal/mcp/transport_test.go +++ b/internal/mcp/transport_test.go @@ -12,12 +12,14 @@ import ( jsonrpc2 "golang.org/x/tools/internal/jsonrpc2_v2" ) -// BatchSize causes a connection to collect n requests or notifications before +// BatchSize causes a transport to collect n requests or notifications before // sending a batch on the wire (responses are always sent in isolation). // // Exported for testing in the mcp_test package. -func BatchSize(opts *ConnectionOptions, n int) { - opts.batchSize = n +func BatchSize(t Transport, n int) { + if st, ok := t.(*ioStream); ok { + st.outgoingBatch = make([]jsonrpc2.Message, 0, n) + } } func TestBatchFraming(t *testing.T) { @@ -29,21 +31,20 @@ func TestBatchFraming(t *testing.T) { ctx := context.Background() r, w := io.Pipe() - framer := ndjsonFramer{batchSize: 2} - reader := framer.Reader(r) - writer := framer.Writer(w) + tport := newIOStream(rwc{r, w}) + tport.outgoingBatch = make([]jsonrpc2.Message, 0, 2) // Read the two messages into a channel, for easy testing later. read := make(chan jsonrpc2.Message) go func() { for range 2 { - msg, _, _ := reader.Read(ctx) + msg, _, _ := tport.Read(ctx) read <- msg } }() // The first write should not yet be observed by the reader. - writer.Write(ctx, &jsonrpc2.Request{ID: jsonrpc2.Int64ID(1), Method: "test"}) + tport.Write(ctx, &jsonrpc2.Request{ID: jsonrpc2.Int64ID(1), Method: "test"}) select { case got := <-read: t.Fatalf("after one write, got message %v", got) @@ -51,7 +52,7 @@ func TestBatchFraming(t *testing.T) { } // ...but the second write causes both messages to be observed. - writer.Write(ctx, &jsonrpc2.Request{ID: jsonrpc2.Int64ID(2), Method: "test"}) + tport.Write(ctx, &jsonrpc2.Request{ID: jsonrpc2.Int64ID(2), Method: "test"}) for _, want := range []int64{1, 2} { got := <-read if got := got.(*jsonrpc2.Request).ID.Raw(); got != want { diff --git a/internal/mcp/util.go b/internal/mcp/util.go index 13f2056fb6c..5b7afbbfe1c 100644 --- a/internal/mcp/util.go +++ b/internal/mcp/util.go @@ -4,6 +4,8 @@ package mcp +import "crypto/rand" + func assert(cond bool, msg string) { if !cond { panic(msg) @@ -14,3 +16,17 @@ func is[T any](v any) bool { _, ok := v.(T) return ok } + +// Copied from crypto/rand. +// TODO: once 1.24 is assured, just use crypto/rand. +const base32alphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567" + +func randText() string { + // ⌈log₃₂ 2¹²⁸⌉ = 26 chars + src := make([]byte, 26) + rand.Read(src) + for i := range src { + src[i] = base32alphabet[src[i]%32] + } + return string(src) +} From 2863098bb42066e9694a60d0674606f4ca179f47 Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Wed, 23 Apr 2025 16:03:54 +0000 Subject: [PATCH 236/270] internal/mcp: implement ping, and test request interleaving Also validate that requests observe the lifecycle rules defined in the spec. Change-Id: I302282f81e053e76926e0bb295f7dbf8d526f02d Reviewed-on: https://go-review.googlesource.com/c/tools/+/667575 Reviewed-by: Jonathan Amsterdam Commit-Queue: Robert Findley Auto-Submit: Robert Findley TryBot-Bypass: Robert Findley --- internal/mcp/client.go | 18 +++++++--- internal/mcp/examples/hello/main.go | 2 +- internal/mcp/mcp_test.go | 13 ++++++-- internal/mcp/server.go | 51 +++++++++++++++++++++++++---- internal/mcp/sse_test.go | 3 ++ internal/mcp/tool.go | 8 ++--- internal/mcp/util.go | 4 ++- 7 files changed, 81 insertions(+), 18 deletions(-) diff --git a/internal/mcp/client.go b/internal/mcp/client.go index 65fe84a48e7..6883843311e 100644 --- a/internal/mcp/client.go +++ b/internal/mcp/client.go @@ -81,9 +81,9 @@ func (c *Client) disconnect(sc *ServerConnection) { // Connect connects the MCP client over the given transport and initializes an // MCP session. // -// It returns a connection object that may be used to query the MCP server, -// terminate the connection (with [Connection.Close]), or await server -// termination (with [Connection.Wait]). +// It returns an initialized [ServerConnection] object that may be used to +// query the MCP server, terminate the connection (with [Connection.Close]), or +// await server termination (with [Connection.Wait]). // // Typically, it is the responsibility of the client to close the connection // when it is no longer needed. However, if the connection is closed by the @@ -105,7 +105,7 @@ func (c *Client) Connect(ctx context.Context, t Transport, opts *ConnectionOptio if err := call(ctx, sc.conn, "initialize", params, &sc.initializeResult); err != nil { return nil, err } - if err := sc.conn.Notify(ctx, "initialized", &protocol.InitializedParams{}); err != nil { + if err := sc.conn.Notify(ctx, "notifications/initialized", &protocol.InitializedParams{}); err != nil { return nil, err } return sc, nil @@ -135,11 +135,21 @@ func (cc *ServerConnection) Wait() error { } func (sc *ServerConnection) handle(ctx context.Context, req *jsonrpc2.Request) (any, error) { + // No need to check that the connection is initialized, since we initialize + // it in Connect. switch req.Method { + case "ping": + // The spec says that 'ping' expects an empty object result. + return struct{}{}, nil } return nil, jsonrpc2.ErrNotHandled } +// Ping makes an MCP "ping" request to the server. +func (sc *ServerConnection) Ping(ctx context.Context) error { + return call(ctx, sc.conn, "ping", nil, nil) +} + // ListTools lists tools that are currently available on the server. func (sc *ServerConnection) ListTools(ctx context.Context) ([]protocol.Tool, error) { var ( diff --git a/internal/mcp/examples/hello/main.go b/internal/mcp/examples/hello/main.go index 74c016a059b..5254d658710 100644 --- a/internal/mcp/examples/hello/main.go +++ b/internal/mcp/examples/hello/main.go @@ -20,7 +20,7 @@ type SayHiParams struct { Name string `json:"name" mcp:"the name to say hi to"` } -func SayHi(ctx context.Context, params *SayHiParams) ([]mcp.Content, error) { +func SayHi(ctx context.Context, cc *mcp.ClientConnection, params *SayHiParams) ([]mcp.Content, error) { return []mcp.Content{ mcp.TextContent{Text: "Hi " + params.Name}, }, nil diff --git a/internal/mcp/mcp_test.go b/internal/mcp/mcp_test.go index dff7d92a6fd..598c2202c58 100644 --- a/internal/mcp/mcp_test.go +++ b/internal/mcp/mcp_test.go @@ -7,6 +7,7 @@ package mcp import ( "context" "errors" + "fmt" "slices" "strings" "sync" @@ -22,7 +23,10 @@ type hiParams struct { Name string } -func sayHi(_ context.Context, v hiParams) ([]Content, error) { +func sayHi(ctx context.Context, cc *ClientConnection, v hiParams) ([]Content, error) { + if err := cc.Ping(ctx); err != nil { + return nil, fmt.Errorf("ping failed: %v", err) + } return []Content{TextContent{Text: "hi " + v.Name}}, nil } @@ -37,7 +41,7 @@ func TestEndToEnd(t *testing.T) { // The 'fail' tool returns this error. failure := errors.New("mcp failure") - s.AddTools(MakeTool("fail", "just fail", func(context.Context, struct{}) ([]Content, error) { + s.AddTools(MakeTool("fail", "just fail", func(context.Context, *ClientConnection, struct{}) ([]Content, error) { return nil, failure })) @@ -67,10 +71,15 @@ func TestEndToEnd(t *testing.T) { if err != nil { t.Fatal(err) } + if got := slices.Collect(c.Servers()); len(got) != 1 { t.Errorf("after connection, Servers() has length %d, want 1", len(got)) } + if err := sc.Ping(ctx); err != nil { + t.Fatalf("ping failed: %v", err) + } + gotTools, err := sc.ListTools(ctx) if err != nil { t.Errorf("tools/list failed: %v", err) diff --git a/internal/mcp/server.go b/internal/mcp/server.go index 4e76ba6fd22..0be51944678 100644 --- a/internal/mcp/server.go +++ b/internal/mcp/server.go @@ -82,7 +82,7 @@ func (s *Server) listTools(_ context.Context, _ *ClientConnection, params *proto return res, nil } -func (s *Server) callTool(ctx context.Context, _ *ClientConnection, params *protocol.CallToolParams) (*protocol.CallToolResult, error) { +func (s *Server) callTool(ctx context.Context, cc *ClientConnection, params *protocol.CallToolParams) (*protocol.CallToolResult, error) { s.mu.Lock() var tool *Tool if i := slices.IndexFunc(s.tools, func(t *Tool) bool { @@ -95,7 +95,7 @@ func (s *Server) callTool(ctx context.Context, _ *ClientConnection, params *prot if tool == nil { return nil, fmt.Errorf("%s: unknown tool %q", jsonrpc2.ErrInvalidParams, params.Name) } - return tool.Handler(ctx, params.Arguments) + return tool.Handler(ctx, cc, params.Arguments) } // Run runs the server over the given transport, which must be persistent. @@ -148,10 +148,31 @@ type ClientConnection struct { conn *jsonrpc2.Connection mu sync.Mutex - initializeParams *protocol.InitializeParams // set once initialize has been received + initializeParams *protocol.InitializeParams + initialized bool +} + +// Ping makes an MCP "ping" request to the client. +func (cc *ClientConnection) Ping(ctx context.Context) error { + return call(ctx, cc.conn, "ping", nil, nil) } func (cc *ClientConnection) handle(ctx context.Context, req *jsonrpc2.Request) (any, error) { + cc.mu.Lock() + initialized := cc.initialized + cc.mu.Unlock() + + // From the spec: + // "The client SHOULD NOT send requests other than pings before the server + // has responded to the initialize request." + switch req.Method { + case "initialize", "ping": + default: + if !initialized { + return nil, fmt.Errorf("method %q is invalid during session ininitialization", req.Method) + } + } + // TODO: embed the incoming request ID in the ClientContext (or, more likely, // a wrapper around it), so that we can correlate responses and notifications // to the handler; this is required for the new session-based transport. @@ -160,6 +181,10 @@ func (cc *ClientConnection) handle(ctx context.Context, req *jsonrpc2.Request) ( case "initialize": return dispatch(ctx, cc, req, cc.initialize) + case "ping": + // The spec says that 'ping' expects an empty object result. + return struct{}{}, nil + case "tools/list": return dispatch(ctx, cc, req, cc.server.listTools) @@ -176,6 +201,17 @@ func (cc *ClientConnection) initialize(ctx context.Context, _ *ClientConnection, cc.initializeParams = params cc.mu.Unlock() + // Mark the connection as initialized when this method exits. TODO: + // Technically, the server should not be considered initialized until it has + // *responded*, but we don't have adequate visibility into the jsonrpc2 + // connection to implement that easily. In any case, once we've initialized + // here, we can handle requests. + defer func() { + cc.mu.Lock() + cc.initialized = true + cc.mu.Unlock() + }() + return &protocol.InitializeResult{ // TODO(rfindley): support multiple protocol versions. ProtocolVersion: "2024-11-05", @@ -204,11 +240,14 @@ func (cc *ClientConnection) Wait() error { return cc.conn.Wait() } -func dispatch[TParams, TResult any](ctx context.Context, conn *ClientConnection, req *jsonrpc2.Request, f func(context.Context, *ClientConnection, TParams) (TResult, error)) (TResult, error) { +// dispatch turns a strongly type handler into a jsonrpc2 handler. +// +// Importantly, it returns nil if the handler returned an error, which is a +// requirement of the jsonrpc2 package. +func dispatch[TConn, TParams, TResult any](ctx context.Context, conn TConn, req *jsonrpc2.Request, f func(context.Context, TConn, TParams) (TResult, error)) (any, error) { var params TParams if err := json.Unmarshal(req.Params, ¶ms); err != nil { - var zero TResult - return zero, err + return nil, err } return f(ctx, conn, params) } diff --git a/internal/mcp/sse_test.go b/internal/mcp/sse_test.go index 0463ab2308d..a12825a1301 100644 --- a/internal/mcp/sse_test.go +++ b/internal/mcp/sse_test.go @@ -41,6 +41,9 @@ func TestSSEServer(t *testing.T) { if err != nil { t.Fatal(err) } + if err := sc.Ping(ctx); err != nil { + t.Fatal(err) + } cc := <-clients gotHi, err := sc.CallTool(ctx, "greet", hiParams{"user"}) if err != nil { diff --git a/internal/mcp/tool.go b/internal/mcp/tool.go index 7f1de944526..9859c04aa1d 100644 --- a/internal/mcp/tool.go +++ b/internal/mcp/tool.go @@ -13,7 +13,7 @@ import ( ) // A ToolHandler handles a call to tools/call. -type ToolHandler func(context.Context, json.RawMessage) (*protocol.CallToolResult, error) +type ToolHandler func(context.Context, *ClientConnection, json.RawMessage) (*protocol.CallToolResult, error) // A Tool is a tool definition that is bound to a tool handler. type Tool struct { @@ -29,17 +29,17 @@ type Tool struct { // It is the caller's responsibility that the handler request type can produce // a valid schema, as documented by [jsonschema.ForType]; otherwise, MakeTool // panics. -func MakeTool[TReq any](name, description string, handler func(context.Context, TReq) ([]Content, error)) *Tool { +func MakeTool[TReq any](name, description string, handler func(context.Context, *ClientConnection, TReq) ([]Content, error)) *Tool { schema, err := jsonschema.ForType[TReq]() if err != nil { panic(err) } - wrapped := func(ctx context.Context, args json.RawMessage) (*protocol.CallToolResult, error) { + wrapped := func(ctx context.Context, cc *ClientConnection, args json.RawMessage) (*protocol.CallToolResult, error) { var v TReq if err := unmarshalSchema(args, schema, &v); err != nil { return nil, err } - content, err := handler(ctx, v) + content, err := handler(ctx, cc, v) if err != nil { return &protocol.CallToolResult{ Content: marshalContent([]Content{TextContent{Text: err.Error()}}), diff --git a/internal/mcp/util.go b/internal/mcp/util.go index 5b7afbbfe1c..64d4d4851d0 100644 --- a/internal/mcp/util.go +++ b/internal/mcp/util.go @@ -4,7 +4,9 @@ package mcp -import "crypto/rand" +import ( + "crypto/rand" +) func assert(cond bool, msg string) { if !cond { From d3a3775b0aff296cc4f01394543cac8484e04c0e Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Wed, 23 Apr 2025 19:29:46 +0000 Subject: [PATCH 237/270] internal/mcp: implement cancellation Use the existing jsonrpc2 preemption mechanism to implement MCP cancellation. This was mostly straightforward, except where I got confused about ID unmarshaling. Leave a note about using omitempty when it is available. Change-Id: I13e073f08c5d5c2cc78d882da4e6ff47f09fb340 Reviewed-on: https://go-review.googlesource.com/c/tools/+/667578 LUCI-TryBot-Result: Go LUCI Reviewed-by: Jonathan Amsterdam --- gopls/internal/lsprpc/export_test.go | 9 +--- internal/jsonrpc2_v2/messages.go | 38 ++++++++++------ internal/mcp/internal/protocol/generate.go | 6 ++- internal/mcp/internal/protocol/protocol.go | 11 +++++ internal/mcp/mcp.go | 1 - internal/mcp/mcp_test.go | 48 ++++++++++++++++++++- internal/mcp/server.go | 2 +- internal/mcp/transport.go | 50 +++++++++++++++++++--- 8 files changed, 134 insertions(+), 31 deletions(-) diff --git a/gopls/internal/lsprpc/export_test.go b/gopls/internal/lsprpc/export_test.go index 5050d3eda44..1caf22415cb 100644 --- a/gopls/internal/lsprpc/export_test.go +++ b/gopls/internal/lsprpc/export_test.go @@ -34,13 +34,8 @@ func (c *Canceler) Preempt(ctx context.Context, req *jsonrpc2_v2.Request) (any, if err := json.Unmarshal(req.Params, ¶ms); err != nil { return nil, fmt.Errorf("%w: %v", jsonrpc2_v2.ErrParse, err) } - var id jsonrpc2_v2.ID - switch raw := params.ID.(type) { - case float64: - id = jsonrpc2_v2.Int64ID(int64(raw)) - case string: - id = jsonrpc2_v2.StringID(raw) - default: + id, err := jsonrpc2_v2.MakeID(params.ID) + if err != nil { return nil, fmt.Errorf("%w: invalid ID type %T", jsonrpc2_v2.ErrParse, params.ID) } c.Conn.Cancel(id) diff --git a/internal/jsonrpc2_v2/messages.go b/internal/jsonrpc2_v2/messages.go index 9cfe6e70fe5..3b2ebc7afeb 100644 --- a/internal/jsonrpc2_v2/messages.go +++ b/internal/jsonrpc2_v2/messages.go @@ -10,11 +10,32 @@ import ( "fmt" ) -// ID is a Request identifier. +// ID is a Request identifier, which is defined by the spec to be a string, integer, or null. +// https://www.jsonrpc.org/specification#request_object type ID struct { value any } +// MakeID coerces the given Go value to an ID. The value is assumed to be the +// default JSON marshaling of a Request identifier -- nil, float64, or string. +// +// Returns an error if the value type was a valid Request ID type. +// +// TODO: ID can't be a json.Marshaler/Unmarshaler, because we want to omitzero. +// Simplify this package by making ID json serializable once we can rely on +// omitzero. +func MakeID(v any) (ID, error) { + switch v := v.(type) { + case nil: + return ID{}, nil + case float64: + return Int64ID(int64(v)), nil + case string: + return StringID(v), nil + } + return ID{}, fmt.Errorf("%w: invalid ID type %T", ErrParse, v) +} + // Message is the interface to all jsonrpc2 message types. // They share no common functionality, but are a closed set of concrete types // that are allowed to implement this interface. The message types are *Request @@ -133,18 +154,9 @@ func DecodeMessage(data []byte) (Message, error) { if msg.VersionTag != wireVersion { return nil, fmt.Errorf("invalid message version tag %s expected %s", msg.VersionTag, wireVersion) } - id := ID{} - switch v := msg.ID.(type) { - case nil: - case float64: - // coerce the id type to int64 if it is float64, the spec does not allow fractional parts - id = Int64ID(int64(v)) - case int64: - id = Int64ID(v) - case string: - id = StringID(v) - default: - return nil, fmt.Errorf("invalid message id type <%T>%v", v, v) + id, err := MakeID(msg.ID) + if err != nil { + return nil, err } if msg.Method != "" { // has a method, must be a call diff --git a/internal/mcp/internal/protocol/generate.go b/internal/mcp/internal/protocol/generate.go index e4f430b750b..d993a2b014f 100644 --- a/internal/mcp/internal/protocol/generate.go +++ b/internal/mcp/internal/protocol/generate.go @@ -56,6 +56,9 @@ var declarations = config{ "CallToolResult": { Name: "CallToolResult", }, + "CancelledNotification": { + Fields: config{"Params": {Name: "CancelledParams"}}, + }, "ClientCapabilities": {Name: "ClientCapabilities"}, "Implementation": {Name: "Implementation"}, "InitializeRequest": { @@ -73,7 +76,8 @@ var declarations = config{ "ListToolsResult": { Name: "ListToolsResult", }, - "Role": {Name: "Role"}, + "RequestId": {Substitute: "any"}, // null|number|string + "Role": {Name: "Role"}, "ServerCapabilities": { Name: "ServerCapabilities", Fields: config{ diff --git a/internal/mcp/internal/protocol/protocol.go b/internal/mcp/internal/protocol/protocol.go index 080c6d194da..b460e950801 100644 --- a/internal/mcp/internal/protocol/protocol.go +++ b/internal/mcp/internal/protocol/protocol.go @@ -54,6 +54,17 @@ type CallToolResult struct { IsError bool `json:"isError,omitempty"` } +type CancelledParams struct { + // An optional string describing the reason for the cancellation. This MAY be + // logged or presented to the user. + Reason string `json:"reason,omitempty"` + // The ID of the request to cancel. + // + // This MUST correspond to the ID of a request previously issued in the same + // direction. + RequestId any `json:"requestId"` +} + // Capabilities a client may support. Known capabilities are defined here, in // this schema, but this is not a closed set: any client can define its own, // additional capabilities. diff --git a/internal/mcp/mcp.go b/internal/mcp/mcp.go index 1dc6413fbc4..b911e959b91 100644 --- a/internal/mcp/mcp.go +++ b/internal/mcp/mcp.go @@ -12,7 +12,6 @@ // [Client.Connect] or [Server.Connect]. // // TODO: -// - Support cancellation. // - Support pagination. // - Support all client/server operations. // - Support Streamable HTTP transport. diff --git a/internal/mcp/mcp_test.go b/internal/mcp/mcp_test.go index 598c2202c58..8b6a47fe77d 100644 --- a/internal/mcp/mcp_test.go +++ b/internal/mcp/mcp_test.go @@ -133,14 +133,21 @@ func TestEndToEnd(t *testing.T) { } } -func TestServerClosing(t *testing.T) { +// basicConnection returns a new basic client-server connection configured with +// the provided tools. +// +// The caller should cancel either the client connection or server connection +// when the connections are no longer needed. +func basicConnection(t *testing.T, tools ...*Tool) (*ClientConnection, *ServerConnection) { + t.Helper() + ctx := context.Background() ct, st := NewLocalTransport() s := NewServer("testServer", "v1.0.0", nil) // The 'greet' tool says hi. - s.AddTools(MakeTool("greet", "say hi", sayHi)) + s.AddTools(tools...) cc, err := s.Connect(ctx, st, nil) if err != nil { t.Fatal(err) @@ -151,7 +158,14 @@ func TestServerClosing(t *testing.T) { if err != nil { t.Fatal(err) } + return cc, sc +} +func TestServerClosing(t *testing.T) { + cc, sc := basicConnection(t, MakeTool("greet", "say hi", sayHi)) + defer sc.Close() + + ctx := context.Background() var wg sync.WaitGroup wg.Add(1) go func() { @@ -209,3 +223,33 @@ func TestBatching(t *testing.T) { } } + +func TestCancellation(t *testing.T) { + var ( + start = make(chan struct{}) + cancelled = make(chan struct{}, 1) // don't block the request + ) + + slowRequest := func(ctx context.Context, cc *ClientConnection, v struct{}) ([]Content, error) { + start <- struct{}{} + select { + case <-ctx.Done(): + cancelled <- struct{}{} + case <-time.After(5 * time.Second): + return nil, nil + } + return nil, nil + } + _, sc := basicConnection(t, MakeTool("slow", "a slow request", slowRequest)) + defer sc.Close() + + ctx, cancel := context.WithCancel(context.Background()) + go sc.CallTool(ctx, "slow", struct{}{}) + <-start + cancel() + select { + case <-cancelled: + case <-time.After(5 * time.Second): + t.Fatal("timeout waiting for cancellation") + } +} diff --git a/internal/mcp/server.go b/internal/mcp/server.go index 0be51944678..527cddad66b 100644 --- a/internal/mcp/server.go +++ b/internal/mcp/server.go @@ -240,7 +240,7 @@ func (cc *ClientConnection) Wait() error { return cc.conn.Wait() } -// dispatch turns a strongly type handler into a jsonrpc2 handler. +// dispatch turns a strongly type request handler into a jsonrpc2 handler. // // Importantly, it returns nil if the handler returned an error, which is a // requirement of the jsonrpc2 package. diff --git a/internal/mcp/transport.go b/internal/mcp/transport.go index 9cce1a2cf54..416fc5300c3 100644 --- a/internal/mcp/transport.go +++ b/internal/mcp/transport.go @@ -15,6 +15,8 @@ import ( "sync" jsonrpc2 "golang.org/x/tools/internal/jsonrpc2_v2" + "golang.org/x/tools/internal/mcp/internal/protocol" + "golang.org/x/tools/internal/xcontext" ) // A JSONRPC2 error is an error defined by the JSONRPC2 spec. @@ -103,31 +105,67 @@ func connect[H handler](ctx context.Context, t Transport, opts *ConnectionOption writer = loggingWriter(opts.Logger, writer) } - var h H + var ( + h H + preempter canceller + ) bind := func(conn *jsonrpc2.Connection) jsonrpc2.Handler { h = b.bind(conn) + preempter.conn = conn return jsonrpc2.HandlerFunc(h.handle) } _ = jsonrpc2.NewConnection(ctx, jsonrpc2.ConnectionConfig{ - Reader: reader, - Writer: writer, - Closer: stream, - Bind: bind, + Reader: reader, + Writer: writer, + Closer: stream, + Bind: bind, + Preempter: &preempter, OnDone: func() { b.disconnect(h) }, }) + assert(preempter.conn != nil, "unbound preempter") assert(h != zero, "unbound connection") return h, nil } +// A canceller is a jsonrpc2.Preempter that cancels in-flight requests on MCP +// cancelled notifications. +type canceller struct { + conn *jsonrpc2.Connection +} + +// Preempt implements jsonrpc2.Preempter. +func (c *canceller) Preempt(ctx context.Context, req *jsonrpc2.Request) (result any, err error) { + if req.Method == "notifications/cancelled" { + var params protocol.CancelledParams + if err := json.Unmarshal(req.Params, ¶ms); err != nil { + return nil, err + } + id, err := jsonrpc2.MakeID(params.RequestId) + if err != nil { + return nil, err + } + go c.conn.Cancel(id) + } + return nil, jsonrpc2.ErrNotHandled +} + // call executes and awaits a jsonrpc2 call on the given connection, // translating errors into the mcp domain. func call(ctx context.Context, conn *jsonrpc2.Connection, method string, params, result any) error { - err := conn.Call(ctx, method, params).Await(ctx, result) + call := conn.Call(ctx, method, params) + err := call.Await(ctx, result) switch { case errors.Is(err, jsonrpc2.ErrClientClosing), errors.Is(err, jsonrpc2.ErrServerClosing): return fmt.Errorf("calling %q: %w", method, ErrConnectionClosed) + case ctx.Err() != nil: + // Notify the peer of cancellation. + err := conn.Notify(xcontext.Detach(ctx), "notifications/cancelled", &protocol.CancelledParams{ + Reason: ctx.Err().Error(), + RequestId: call.ID().Raw(), + }) + return errors.Join(ctx.Err(), err) case err != nil: return fmt.Errorf("calling %q: %v", method, err) } From cd9151d4acc6a1a2baaa9d6ed462d401b1e1146c Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Wed, 23 Apr 2025 12:16:10 -0400 Subject: [PATCH 238/270] gopls/internal/cache: fix bug in toGobDiagnostic(Related) Analyzers report Diagnostics, SuggestedFixes, and RelatedInformation containing token.Pos positions that need to be mapped to protocol.Location. Unlike the first two kinds, RelatedInformation positions needn't be in the current package, so the existing assertion was overstrict. This CL relaxes the assertion and attempts to map positions in dependencies to protocol.Location form, though it can do so at best heuristically since Mappers are not generally available. Also, extract posToLocation to a top-level function. + test Fixes golang/go#70791 Change-Id: Ibec2804b1b4b0574edd53b8ae2e0484630f01d6e Reviewed-on: https://go-review.googlesource.com/c/tools/+/667517 Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI Reviewed-by: Jonathan Amsterdam --- gopls/internal/cache/analysis.go | 221 ++++++++++-------- .../testdata/diagnostics/issue70791.txt | 29 +++ 2 files changed, 158 insertions(+), 92 deletions(-) create mode 100644 gopls/internal/test/marker/testdata/diagnostics/issue70791.txt diff --git a/gopls/internal/cache/analysis.go b/gopls/internal/cache/analysis.go index 747fac913ce..cbe30c6f794 100644 --- a/gopls/internal/cache/analysis.go +++ b/gopls/internal/cache/analysis.go @@ -44,6 +44,7 @@ import ( "golang.org/x/tools/gopls/internal/util/frob" "golang.org/x/tools/gopls/internal/util/moremaps" "golang.org/x/tools/gopls/internal/util/persistent" + "golang.org/x/tools/gopls/internal/util/safetoken" "golang.org/x/tools/internal/analysisinternal" "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/facts" @@ -1019,93 +1020,6 @@ func (act *action) exec(ctx context.Context) (any, *actionSummary, error) { factFilter[reflect.TypeOf(f)] = true } - // posToLocation converts from token.Pos to protocol form. - posToLocation := func(start, end token.Pos) (protocol.Location, error) { - tokFile := apkg.pkg.FileSet().File(start) - - // Find existing mapper by file name. - // (Don't require an exact token.File match - // as the analyzer may have re-parsed the file.) - var ( - mapper *protocol.Mapper - fixed bool - ) - for _, p := range apkg.pkg.CompiledGoFiles() { - if p.Tok.Name() == tokFile.Name() { - mapper = p.Mapper - fixed = p.Fixed() // suppress some assertions after parser recovery - break - } - } - if mapper == nil { - // The start position was not among the package's parsed - // Go files, indicating that the analyzer added new files - // to the FileSet. - // - // For example, the cgocall analyzer re-parses and - // type-checks some of the files in a special environment; - // and asmdecl and other low-level runtime analyzers call - // ReadFile to parse non-Go files. - // (This is a supported feature, documented at go/analysis.) - // - // In principle these files could be: - // - // - OtherFiles (non-Go files such as asm). - // However, we set Pass.OtherFiles=[] because - // gopls won't service "diagnose" requests - // for non-Go files, so there's no point - // reporting diagnostics in them. - // - // - IgnoredFiles (files tagged for other configs). - // However, we set Pass.IgnoredFiles=[] because, - // in most cases, zero-config gopls should create - // another view that covers these files. - // - // - Referents of //line directives, as in cgo packages. - // The file names in this case are not known a priori. - // gopls generally tries to avoid honoring line directives, - // but analyzers such as cgocall may honor them. - // - // In short, it's unclear how this can be reached - // other than due to an analyzer bug. - return protocol.Location{}, bug.Errorf("diagnostic location is not among files of package: %s", tokFile.Name()) - } - // Inv: mapper != nil - - if end == token.NoPos { - end = start - } - - // debugging #64547 - fileStart := token.Pos(tokFile.Base()) - fileEnd := fileStart + token.Pos(tokFile.Size()) - if start < fileStart { - if !fixed { - bug.Reportf("start < start of file") - } - start = fileStart - } - if end < start { - // This can happen if End is zero (#66683) - // or a small positive displacement from zero - // due to recursive Node.End() computation. - // This usually arises from poor parser recovery - // of an incomplete term at EOF. - if !fixed { - bug.Reportf("end < start of file") - } - end = fileEnd - } - if end > fileEnd+1 { - if !fixed { - bug.Reportf("end > end of file + 1") - } - end = fileEnd - } - - return mapper.PosLocation(tokFile, start, end) - } - // Now run the (pkg, analyzer) action. var diagnostics []gobDiagnostic @@ -1130,7 +1044,7 @@ func (act *action) exec(ctx context.Context) (any, *actionSummary, error) { bug.Reportf("invalid SuggestedFixes: %v", err) d.SuggestedFixes = nil } - diagnostic, err := toGobDiagnostic(posToLocation, analyzer, d) + diagnostic, err := toGobDiagnostic(apkg.pkg, analyzer, d) if err != nil { // Don't bug.Report here: these errors all originate in // posToLocation, and we can more accurately discriminate @@ -1322,12 +1236,12 @@ type gobTextEdit struct { // toGobDiagnostic converts an analysis.Diagnosic to a serializable gobDiagnostic, // which requires expanding token.Pos positions into protocol.Location form. -func toGobDiagnostic(posToLocation func(start, end token.Pos) (protocol.Location, error), a *analysis.Analyzer, diag analysis.Diagnostic) (gobDiagnostic, error) { +func toGobDiagnostic(pkg *Package, a *analysis.Analyzer, diag analysis.Diagnostic) (gobDiagnostic, error) { var fixes []gobSuggestedFix for _, fix := range diag.SuggestedFixes { var gobEdits []gobTextEdit for _, textEdit := range fix.TextEdits { - loc, err := posToLocation(textEdit.Pos, textEdit.End) + loc, err := diagnosticPosToLocation(pkg, false, textEdit.Pos, textEdit.End) if err != nil { return gobDiagnostic{}, fmt.Errorf("in SuggestedFixes: %w", err) } @@ -1344,7 +1258,10 @@ func toGobDiagnostic(posToLocation func(start, end token.Pos) (protocol.Location var related []gobRelatedInformation for _, r := range diag.Related { - loc, err := posToLocation(r.Pos, r.End) + // The position of RelatedInformation may be + // within another (dependency) package. + const allowDeps = true + loc, err := diagnosticPosToLocation(pkg, allowDeps, r.Pos, r.End) if err != nil { return gobDiagnostic{}, fmt.Errorf("in Related: %w", err) } @@ -1354,7 +1271,7 @@ func toGobDiagnostic(posToLocation func(start, end token.Pos) (protocol.Location }) } - loc, err := posToLocation(diag.Pos, diag.End) + loc, err := diagnosticPosToLocation(pkg, false, diag.Pos, diag.End) if err != nil { return gobDiagnostic{}, err } @@ -1382,6 +1299,126 @@ func toGobDiagnostic(posToLocation func(start, end token.Pos) (protocol.Location }, nil } +// diagnosticPosToLocation converts from token.Pos to protocol form, in the +// context of the specified package and, optionally, its dependencies. +func diagnosticPosToLocation(pkg *Package, allowDeps bool, start, end token.Pos) (protocol.Location, error) { + if end == token.NoPos { + end = start + } + + fset := pkg.FileSet() + tokFile := fset.File(start) + + // Find existing mapper by file name. + // (Don't require an exact token.File match + // as the analyzer may have re-parsed the file.) + var ( + mapper *protocol.Mapper + fixed bool + ) + for _, p := range pkg.CompiledGoFiles() { + if p.Tok.Name() == tokFile.Name() { + mapper = p.Mapper + fixed = p.Fixed() // suppress some assertions after parser recovery + break + } + } + // TODO(adonovan): search pkg.AsmFiles too; see #71754. + if mapper != nil { + // debugging #64547 + fileStart := token.Pos(tokFile.Base()) + fileEnd := fileStart + token.Pos(tokFile.Size()) + if start < fileStart { + if !fixed { + bug.Reportf("start < start of file") + } + start = fileStart + } + if end < start { + // This can happen if End is zero (#66683) + // or a small positive displacement from zero + // due to recursive Node.End() computation. + // This usually arises from poor parser recovery + // of an incomplete term at EOF. + if !fixed { + bug.Reportf("end < start of file") + } + end = fileEnd + } + if end > fileEnd+1 { + if !fixed { + bug.Reportf("end > end of file + 1") + } + end = fileEnd + } + + return mapper.PosLocation(tokFile, start, end) + } + + // Inv: the positions are not within this package. + + if allowDeps { + // Positions in Diagnostic.RelatedInformation may belong to a + // dependency package. We cannot accurately map them to + // protocol.Location coordinates without a Mapper for the + // relevant file, but none exists if the file was loaded from + // export data, and we have no means (Snapshot) of loading it. + // + // So, fall back to approximate conversion to UTF-16: + // for non-ASCII text, the column numbers may be wrong. + var ( + startPosn = safetoken.StartPosition(fset, start) + endPosn = safetoken.EndPosition(fset, end) + ) + return protocol.Location{ + URI: protocol.URIFromPath(startPosn.Filename), + Range: protocol.Range{ + Start: protocol.Position{ + Line: uint32(startPosn.Line - 1), + Character: uint32(startPosn.Column - 1), + }, + End: protocol.Position{ + Line: uint32(endPosn.Line - 1), + Character: uint32(endPosn.Column - 1), + }, + }, + }, nil + } + + // The start position was not among the package's parsed + // Go files, indicating that the analyzer added new files + // to the FileSet. + // + // For example, the cgocall analyzer re-parses and + // type-checks some of the files in a special environment; + // and asmdecl and other low-level runtime analyzers call + // ReadFile to parse non-Go files. + // (This is a supported feature, documented at go/analysis.) + // + // In principle these files could be: + // + // - OtherFiles (non-Go files such as asm). + // However, we set Pass.OtherFiles=[] because + // gopls won't service "diagnose" requests + // for non-Go files, so there's no point + // reporting diagnostics in them. + // + // - IgnoredFiles (files tagged for other configs). + // However, we set Pass.IgnoredFiles=[] because, + // in most cases, zero-config gopls should create + // another view that covers these files. + // + // - Referents of //line directives, as in cgo packages. + // The file names in this case are not known a priori. + // gopls generally tries to avoid honoring line directives, + // but analyzers such as cgocall may honor them. + // + // In short, it's unclear how this can be reached + // other than due to an analyzer bug. + + return protocol.Location{}, bug.Errorf("diagnostic location is not among files of package: %s", tokFile.Name()) +} + // effectiveURL computes the effective URL of diag, // using the algorithm specified at Diagnostic.URL. func effectiveURL(a *analysis.Analyzer, diag analysis.Diagnostic) string { diff --git a/gopls/internal/test/marker/testdata/diagnostics/issue70791.txt b/gopls/internal/test/marker/testdata/diagnostics/issue70791.txt new file mode 100644 index 00000000000..b531354416c --- /dev/null +++ b/gopls/internal/test/marker/testdata/diagnostics/issue70791.txt @@ -0,0 +1,29 @@ +In addition to the Diagnostic, the SA4023 analyzer reports a +RelatedInformation at the position of b.B, in an another package. +Since this is in a dependency package, we cannot resolve to +protocol.Location coordinates. This used to trigger an assertion, but +now we resolve the location approximately. + +This is a regression test for #70791. + +-- settings.json -- +{"analyses": {"SA4023": true}} + +-- go.mod -- +module example.com +go 1.18 + +-- a/a.go -- +package a + +import "example.com/b" + +var _ = b.B() == nil //@ diag("b.B", re"comparison is never true") + +-- b/b.go -- +package b + +func B() any { return (*int)(nil) } + + + From 2f3d4ad5ff006ae9ad212bee2f6f56ab07b0366d Mon Sep 17 00:00:00 2001 From: Alan Donovan Date: Thu, 24 Apr 2025 14:14:50 -0400 Subject: [PATCH 239/270] go/packages: add variant of TestRmdirAfterGoList without gocommand This CL adds the test in the first patchset of CL 647516, which executes go list directly, thus providing a control for the substantial gocommand wrapper package. Updates golang/go#71544 Updates golang/go#73481 Change-Id: I3dbc91cb1144bd5cafbd438817a17abda1c811ae Reviewed-on: https://go-review.googlesource.com/c/tools/+/667857 Auto-Submit: Alan Donovan LUCI-TryBot-Result: Go LUCI Reviewed-by: Sam Thanawalla --- go/packages/packages_test.go | 62 ++++++++++++++++++++++++++---------- 1 file changed, 46 insertions(+), 16 deletions(-) diff --git a/go/packages/packages_test.go b/go/packages/packages_test.go index 2911d595c34..aa86ae6842c 100644 --- a/go/packages/packages_test.go +++ b/go/packages/packages_test.go @@ -3408,8 +3408,51 @@ func writeTree(t *testing.T, archive string) string { // finished. It is intended to evaluate the hypothesis (to explain // issue #71544) that the go command, on Windows, occasionally fails // to release all its handles to the temporary directory even when it -// should have finished. If this test ever fails, the go command has a bug. -func TestRmdirAfterGoList(t *testing.T) { +// should have finished. +// +// If this test ever fails, the combination of the gocommand package +// and the go command itself has a bug. +func TestRmdirAfterGoList_Runner(t *testing.T) { + testRmdirAfterGoList(t, func(ctx context.Context, dir string) { + var runner gocommand.Runner + stdout, stderr, friendlyErr, err := runner.RunRaw(ctx, gocommand.Invocation{ + Verb: "list", + Args: []string{"-json", "example.com/p"}, + WorkingDir: dir, + }) + if ctx.Err() != nil { + return // don't report error if canceled + } + if err != nil || friendlyErr != nil { + t.Fatalf("go list failed: %v, %v (stdout=%s stderr=%s)", + err, friendlyErr, stdout, stderr) + } + }) +} + +// TestRmdirAfterGoList_Direct is a variant of +// TestRmdirAfterGoList_Runner that executes go list directly, to +// control for the substantial logic of the gocommand package. +// +// If this test ever fails, the go command itself has a bug. +func TestRmdirAfterGoList_Direct(t *testing.T) { + testRmdirAfterGoList(t, func(ctx context.Context, dir string) { + cmd := exec.Command("go", "list", "-json", "example.com/p") + cmd.Dir = dir + cmd.Stdout = new(strings.Builder) + cmd.Stderr = new(strings.Builder) + err := cmd.Run() + if ctx.Err() != nil { + return // don't report error if canceled + } + if err != nil { + t.Fatalf("go list failed: %v (stdout=%s stderr=%s)", + err, cmd.Stdout, cmd.Stderr) + } + }) +} + +func testRmdirAfterGoList(t *testing.T, f func(ctx context.Context, dir string)) { testenv.NeedsExec(t) dir := t.TempDir() @@ -3428,23 +3471,10 @@ func TestRmdirAfterGoList(t *testing.T) { } } - runner := gocommand.Runner{} - g, ctx := errgroup.WithContext(context.Background()) for range 10 { g.Go(func() error { - stdout, stderr, friendlyErr, err := runner.RunRaw(ctx, gocommand.Invocation{ - Verb: "list", - Args: []string{"-json", "example.com/p"}, - WorkingDir: dir, - }) - if ctx.Err() != nil { - return nil // don't report error if canceled - } - if err != nil || friendlyErr != nil { - t.Fatalf("go list failed: %v, %v (stdout=%s stderr=%s)", - err, friendlyErr, stdout, stderr) - } + f(ctx, dir) // Return an error so that concurrent invocations are canceled. return fmt.Errorf("oops") }) From de18b0bf1345e2dda43ba4fa57605b4ccbbe67ab Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Sun, 13 Apr 2025 16:48:56 +0800 Subject: [PATCH 240/270] gopls: run modernize -fix in gopls Most of changes of this CL are produced by modernize@fd6857200 after running 'modernize -fix ./...' under gopls in wsl. Additionally, some changes were made: - move the 'slices' and 'maps' import declaration into the std packages block. - inline some slices.Contains calls. - run 'go fmt ./...' under gopls. Change-Id: Ifbe2886cc0226c995fc87dd3d19d1cd9506339d9 Reviewed-on: https://go-review.googlesource.com/c/tools/+/665215 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan Reviewed-by: Junyang Shao Auto-Submit: Alan Donovan --- .../analysis/fillreturns/fillreturns.go | 3 ++- .../analysis/fillstruct/fillstruct.go | 4 +-- .../analysis/modernize/stringscutprefix.go | 4 +-- gopls/internal/cache/analysis.go | 1 - gopls/internal/cache/constraints.go | 9 +++---- gopls/internal/cache/filterer.go | 4 +-- gopls/internal/cache/load.go | 2 +- gopls/internal/cache/metadata/cycle_test.go | 9 +++---- gopls/internal/cache/metadata/graph.go | 5 ++-- gopls/internal/cache/mod_vuln.go | 1 - gopls/internal/cache/parse_cache_test.go | 4 +-- gopls/internal/cache/parsego/parse.go | 7 ++--- gopls/internal/cache/port_test.go | 3 +-- gopls/internal/cache/session.go | 2 +- gopls/internal/cache/source.go | 2 +- gopls/internal/cache/typerefs/packageset.go | 2 +- .../internal/cache/typerefs/pkggraph_test.go | 1 - gopls/internal/cache/typerefs/pkgrefs_test.go | 9 +++---- gopls/internal/cache/view.go | 6 ++--- gopls/internal/clonetest/clonetest.go | 9 +++---- gopls/internal/cmd/codeaction.go | 2 +- gopls/internal/debug/trace.go | 3 ++- gopls/internal/doc/generate/generate.go | 2 +- gopls/internal/filecache/filecache_test.go | 7 +++-- gopls/internal/golang/comment.go | 6 +---- gopls/internal/golang/extract.go | 6 ++--- gopls/internal/golang/identifier_test.go | 1 - gopls/internal/golang/invertifcondition.go | 5 +--- gopls/internal/golang/references.go | 1 - gopls/internal/golang/rename.go | 8 +++--- gopls/internal/golang/semtok.go | 9 +++---- gopls/internal/golang/undeclared.go | 2 +- gopls/internal/golang/util.go | 7 +++-- gopls/internal/golang/workspace_symbol.go | 4 +-- gopls/internal/lsprpc/dialer.go | 2 +- gopls/internal/lsprpc/lsprpc_test.go | 6 ++--- gopls/internal/mod/diagnostics.go | 1 - gopls/internal/mod/hover.go | 11 ++------ gopls/internal/progress/progress_test.go | 1 - .../protocol/command/commandmeta/meta.go | 5 +--- gopls/internal/protocol/edits.go | 3 ++- gopls/internal/protocol/generate/generate.go | 2 +- gopls/internal/protocol/json_test.go | 12 +++------ gopls/internal/protocol/semtok/semtok.go | 2 +- gopls/internal/server/code_action.go | 5 +--- gopls/internal/server/command.go | 15 +++-------- gopls/internal/server/diagnostics.go | 1 - gopls/internal/server/prompt.go | 4 +-- gopls/internal/server/prompt_test.go | 1 - gopls/internal/settings/vet_test.go | 2 +- gopls/internal/telemetry/cmd/stacks/stacks.go | 4 +-- gopls/internal/template/highlight.go | 2 +- .../test/integration/bench/codeaction_test.go | 4 +-- .../test/integration/bench/completion_test.go | 6 ++--- .../test/integration/bench/definition_test.go | 2 +- .../test/integration/bench/diagnostic_test.go | 4 +-- .../test/integration/bench/didchange_test.go | 4 +-- .../test/integration/bench/hover_test.go | 2 +- .../integration/bench/implementations_test.go | 2 +- .../test/integration/bench/imports_test.go | 4 +-- .../test/integration/bench/iwl_test.go | 4 +-- .../test/integration/bench/references_test.go | 2 +- .../test/integration/bench/reload_test.go | 2 +- .../test/integration/bench/rename_test.go | 2 +- .../test/integration/bench/tests_test.go | 2 +- .../test/integration/bench/typing_test.go | 2 +- .../bench/workspace_symbols_test.go | 2 +- .../diagnostics/invalidation_test.go | 2 +- .../diagnostics/undeclared_test.go | 8 ++---- .../test/integration/fake/edit_test.go | 1 - .../internal/test/integration/fake/editor.go | 27 +++++-------------- .../test/integration/misc/prompt_test.go | 2 +- .../test/integration/misc/vuln_test.go | 1 - gopls/internal/test/integration/options.go | 9 +++---- gopls/internal/test/integration/runner.go | 1 - .../test/integration/workspace/adhoc_test.go | 2 +- gopls/internal/test/marker/marker_test.go | 1 - gopls/internal/util/bug/bug_test.go | 2 +- gopls/internal/util/frob/frob.go | 10 +++---- gopls/internal/util/lru/lru_test.go | 1 - gopls/internal/util/persistent/map_test.go | 11 +++----- gopls/internal/vulncheck/types.go | 2 +- gopls/internal/vulncheck/vulntest/db.go | 3 ++- 83 files changed, 131 insertions(+), 225 deletions(-) diff --git a/gopls/internal/analysis/fillreturns/fillreturns.go b/gopls/internal/analysis/fillreturns/fillreturns.go index e23e620acc2..b2cc1caf872 100644 --- a/gopls/internal/analysis/fillreturns/fillreturns.go +++ b/gopls/internal/analysis/fillreturns/fillreturns.go @@ -12,6 +12,7 @@ import ( "go/format" "go/types" "regexp" + "slices" "strings" "golang.org/x/tools/go/analysis" @@ -134,7 +135,7 @@ outer: if match != nil { fixed[i] = match - remaining = append(remaining[:idx], remaining[idx+1:]...) + remaining = slices.Delete(remaining, idx, idx+1) } else { names, ok := matches[retTyp] if !ok { diff --git a/gopls/internal/analysis/fillstruct/fillstruct.go b/gopls/internal/analysis/fillstruct/fillstruct.go index 62f7d77f58e..5a18da9a221 100644 --- a/gopls/internal/analysis/fillstruct/fillstruct.go +++ b/gopls/internal/analysis/fillstruct/fillstruct.go @@ -76,7 +76,7 @@ func Diagnose(f *ast.File, start, end token.Pos, pkg *types.Package, info *types // Are any fields in need of filling? var fillableFields []string - for i := 0; i < fieldCount; i++ { + for i := range fieldCount { field := tStruct.Field(i) // Ignore fields that are not accessible in the current package. if field.Pkg() != nil && field.Pkg() != pkg && !field.Exported() { @@ -182,7 +182,7 @@ func SuggestedFix(cpkg *cache.Package, pgf *parsego.File, start, end token.Pos) } var fieldTyps []types.Type - for i := 0; i < fieldCount; i++ { + for i := range fieldCount { field := tStruct.Field(i) // Ignore fields that are not accessible in the current package. if field.Pkg() != nil && field.Pkg() != pkg && !field.Exported() { diff --git a/gopls/internal/analysis/modernize/stringscutprefix.go b/gopls/internal/analysis/modernize/stringscutprefix.go index cd053539910..f8e9be63e3c 100644 --- a/gopls/internal/analysis/modernize/stringscutprefix.go +++ b/gopls/internal/analysis/modernize/stringscutprefix.go @@ -114,7 +114,7 @@ func stringscutprefix(pass *analysis.Pass) { { Pos: call.Fun.Pos(), End: call.Fun.Pos(), - NewText: []byte(fmt.Sprintf("%s, %s :=", after, okVarName)), + NewText: fmt.Appendf(nil, "%s, %s :=", after, okVarName), }, { Pos: call.Fun.Pos(), @@ -124,7 +124,7 @@ func stringscutprefix(pass *analysis.Pass) { { Pos: call.End(), End: call.End(), - NewText: []byte(fmt.Sprintf("; %s ", okVarName)), + NewText: fmt.Appendf(nil, "; %s ", okVarName), }, { Pos: call1.Pos(), diff --git a/gopls/internal/cache/analysis.go b/gopls/internal/cache/analysis.go index cbe30c6f794..f63bcab2374 100644 --- a/gopls/internal/cache/analysis.go +++ b/gopls/internal/cache/analysis.go @@ -891,7 +891,6 @@ func (act *action) String() string { func execActions(ctx context.Context, actions []*action) { var wg sync.WaitGroup for _, act := range actions { - act := act wg.Add(1) go func() { defer wg.Done() diff --git a/gopls/internal/cache/constraints.go b/gopls/internal/cache/constraints.go index 9503abc1ebd..a9a87ae6d4b 100644 --- a/gopls/internal/cache/constraints.go +++ b/gopls/internal/cache/constraints.go @@ -9,6 +9,7 @@ import ( "go/build/constraint" "go/parser" "go/token" + "slices" ) // isStandaloneFile reports whether a file with the given contents should be @@ -27,11 +28,9 @@ func isStandaloneFile(src []byte, standaloneTags []string) bool { found := false walkConstraints(f, func(c constraint.Expr) bool { if tag, ok := c.(*constraint.TagExpr); ok { - for _, t := range standaloneTags { - if t == tag.Tag { - found = true - return false - } + if slices.Contains(standaloneTags, tag.Tag) { + found = true + return false } } return true diff --git a/gopls/internal/cache/filterer.go b/gopls/internal/cache/filterer.go index 13dbd8a1b04..9f911ec9de8 100644 --- a/gopls/internal/cache/filterer.go +++ b/gopls/internal/cache/filterer.go @@ -71,8 +71,8 @@ func convertFilterToRegexp(filter string) *regexp.Regexp { } var ret strings.Builder ret.WriteString("^/") - segs := strings.Split(filter, "/") - for _, seg := range segs { + segs := strings.SplitSeq(filter, "/") + for seg := range segs { // Inv: seg != "" since path is clean. if seg == "**" { ret.WriteString(".*") diff --git a/gopls/internal/cache/load.go b/gopls/internal/cache/load.go index e15e0cef0b6..b45669b3b79 100644 --- a/gopls/internal/cache/load.go +++ b/gopls/internal/cache/load.go @@ -791,7 +791,7 @@ func computeWorkspacePackagesLocked(ctx context.Context, s *Snapshot, meta *meta func allFilesHaveRealPackages(g *metadata.Graph, mp *metadata.Package) bool { n := len(mp.CompiledGoFiles) checkURIs: - for _, uri := range append(mp.CompiledGoFiles[0:n:n], mp.GoFiles...) { + for _, uri := range slices.Concat(mp.CompiledGoFiles[0:n:n], mp.GoFiles) { for _, id := range g.IDs[uri] { if !metadata.IsCommandLineArguments(id) { continue checkURIs diff --git a/gopls/internal/cache/metadata/cycle_test.go b/gopls/internal/cache/metadata/cycle_test.go index 09628d881e9..5f935f603c8 100644 --- a/gopls/internal/cache/metadata/cycle_test.go +++ b/gopls/internal/cache/metadata/cycle_test.go @@ -5,6 +5,7 @@ package metadata import ( + "maps" "sort" "strings" "testing" @@ -40,11 +41,11 @@ func TestBreakImportCycles(t *testing.T) { return n } if s != "" { - for _, item := range strings.Split(s, ";") { + for item := range strings.SplitSeq(s, ";") { nodeID, succIDs, ok := strings.Cut(item, "->") node := makeNode(nodeID) if ok { - for _, succID := range strings.Split(succIDs, ",") { + for succID := range strings.SplitSeq(succIDs, ",") { node.DepsByPkgPath[PackagePath(succID)] = PackageID(succID) } } @@ -119,9 +120,7 @@ func TestBreakImportCycles(t *testing.T) { // Apply updates. // (parse doesn't have a way to express node deletions, // but they aren't very interesting.) - for id, mp := range updates { - metadata[id] = mp - } + maps.Copy(metadata, updates) t.Log("updated", format(metadata)) diff --git a/gopls/internal/cache/metadata/graph.go b/gopls/internal/cache/metadata/graph.go index 716b767e37b..b029b51aa7e 100644 --- a/gopls/internal/cache/metadata/graph.go +++ b/gopls/internal/cache/metadata/graph.go @@ -6,6 +6,7 @@ package metadata import ( "iter" + "maps" "sort" "strings" @@ -63,9 +64,7 @@ func (g *Graph) Update(updates map[PackageID]*Package) *Graph { // Copy pkgs map then apply updates. pkgs := make(map[PackageID]*Package, len(g.Packages)) - for id, mp := range g.Packages { - pkgs[id] = mp - } + maps.Copy(pkgs, g.Packages) for id, mp := range updates { if mp == nil { delete(pkgs, id) diff --git a/gopls/internal/cache/mod_vuln.go b/gopls/internal/cache/mod_vuln.go index a48b18e4ba4..5b7d679fa48 100644 --- a/gopls/internal/cache/mod_vuln.go +++ b/gopls/internal/cache/mod_vuln.go @@ -126,7 +126,6 @@ func modVulnImpl(ctx context.Context, snapshot *Snapshot) (*vulncheck.Result, er var group errgroup.Group group.SetLimit(10) // limit govulncheck api runs for _, mps := range packagesByModule { - mps := mps group.Go(func() error { effectiveModule := stdlibModule if m := mps[0].Module; m != nil { diff --git a/gopls/internal/cache/parse_cache_test.go b/gopls/internal/cache/parse_cache_test.go index fe0548aa20d..4e3a7cf32b7 100644 --- a/gopls/internal/cache/parse_cache_test.go +++ b/gopls/internal/cache/parse_cache_test.go @@ -195,9 +195,9 @@ func TestParseCache_Duplicates(t *testing.T) { func dummyFileHandles(n int) []file.Handle { var fhs []file.Handle - for i := 0; i < n; i++ { + for i := range n { uri := protocol.DocumentURI(fmt.Sprintf("file:///_%d", i)) - src := []byte(fmt.Sprintf("package p\nvar _ = %d", i)) + src := fmt.Appendf(nil, "package p\nvar _ = %d", i) fhs = append(fhs, makeFakeFileHandle(uri, src)) } return fhs diff --git a/gopls/internal/cache/parsego/parse.go b/gopls/internal/cache/parsego/parse.go index 3ffa531735f..bc5483fc166 100644 --- a/gopls/internal/cache/parsego/parse.go +++ b/gopls/internal/cache/parsego/parse.go @@ -81,7 +81,7 @@ func Parse(ctx context.Context, fset *token.FileSet, uri protocol.DocumentURI, s fixes = append(fixes, astFixes...) } - for i := 0; i < 10; i++ { + for i := range 10 { // Fix certain syntax errors that render the file unparseable. newSrc, srcFix := fixSrc(file, tok, src) if newSrc == nil { @@ -903,10 +903,7 @@ func offsetPositions(tok *token.File, n ast.Node, offset token.Pos) { // // TODO(golang/go#64335): this is a hack, because our fixes should not // produce positions that overflow (but they do: golang/go#64488). - pos := f.Int() + int64(offset) - if pos < fileBase { - pos = fileBase - } + pos := max(f.Int()+int64(offset), fileBase) if pos > fileEnd { pos = fileEnd } diff --git a/gopls/internal/cache/port_test.go b/gopls/internal/cache/port_test.go index a92056a9c22..5d0c5d4a50f 100644 --- a/gopls/internal/cache/port_test.go +++ b/gopls/internal/cache/port_test.go @@ -46,7 +46,6 @@ func TestMatchingPortsStdlib(t *testing.T) { var g errgroup.Group packages.Visit(pkgs, nil, func(pkg *packages.Package) { for _, f := range pkg.CompiledGoFiles { - f := f g.Go(func() error { content, err := os.ReadFile(f) // We report errors via t.Error, not by returning, @@ -118,7 +117,7 @@ func getFileID(filename string) (FileID, time.Time, error) { } ` fh := makeFakeFileHandle("file:///path/to/test/file.go", []byte(src)) - for i := 0; i < b.N; i++ { + for b.Loop() { _ = matchingPreferredPorts(b, fh, true) } } diff --git a/gopls/internal/cache/session.go b/gopls/internal/cache/session.go index aa970b41e2c..f0d8f062138 100644 --- a/gopls/internal/cache/session.go +++ b/gopls/internal/cache/session.go @@ -182,7 +182,7 @@ func (s *Session) createView(ctx context.Context, def *viewDefinition) (*View, * // Note that the logic below doesn't handle the case where uri == // v.folder.Dir, because there is no point in excluding the entire // workspace folder! - if rel := strings.TrimPrefix(uri, dirPrefix); rel != uri { + if rel, ok := strings.CutPrefix(uri, dirPrefix); ok { return !pathIncluded(rel) } return false diff --git a/gopls/internal/cache/source.go b/gopls/internal/cache/source.go index 7946b9746ab..047cc3971d8 100644 --- a/gopls/internal/cache/source.go +++ b/gopls/internal/cache/source.go @@ -103,7 +103,7 @@ func (s *goplsSource) ResolveReferences(ctx context.Context, filename string, mi } dbgpr := func(hdr string, v []*imports.Result) { - for i := 0; i < len(v); i++ { + for i := range v { log.Printf("%s%d %+v %+v", hdr, i, v[i].Import, v[i].Package) } } diff --git a/gopls/internal/cache/typerefs/packageset.go b/gopls/internal/cache/typerefs/packageset.go index f4f7c94f712..af495d1573c 100644 --- a/gopls/internal/cache/typerefs/packageset.go +++ b/gopls/internal/cache/typerefs/packageset.go @@ -124,7 +124,7 @@ func (s *PackageSet) Contains(id metadata.PackageID) bool { // Elems calls f for each element of the set in ascending order. func (s *PackageSet) Elems(f func(IndexID)) { for i, v := range moremaps.Sorted(s.sparse) { - for b := 0; b < blockSize; b++ { + for b := range blockSize { if (v & (1 << b)) != 0 { f(IndexID(i*blockSize + b)) } diff --git a/gopls/internal/cache/typerefs/pkggraph_test.go b/gopls/internal/cache/typerefs/pkggraph_test.go index 20e34ce1aa9..f205da85b35 100644 --- a/gopls/internal/cache/typerefs/pkggraph_test.go +++ b/gopls/internal/cache/typerefs/pkggraph_test.go @@ -84,7 +84,6 @@ func BuildPackageGraph(ctx context.Context, meta metadata.Source, ids []metadata var eg errgroup.Group eg.SetLimit(workers) for _, id := range ids { - id := id eg.Go(func() error { _, err := g.Package(ctx, id) return err diff --git a/gopls/internal/cache/typerefs/pkgrefs_test.go b/gopls/internal/cache/typerefs/pkgrefs_test.go index 3f9a976ccf7..ce297e4380b 100644 --- a/gopls/internal/cache/typerefs/pkgrefs_test.go +++ b/gopls/internal/cache/typerefs/pkgrefs_test.go @@ -12,7 +12,7 @@ import ( "go/token" "go/types" "os" - "sort" + "slices" "strings" "sync" "testing" @@ -81,9 +81,7 @@ func TestBuildPackageGraph(t *testing.T) { for id := range exports { ids = append(ids, id) } - sort.Slice(ids, func(i, j int) bool { - return ids[i] < ids[j] - }) + slices.Sort(ids) t0 = time.Now() g, err := BuildPackageGraph(ctx, meta, ids, newParser().parse) @@ -259,9 +257,8 @@ func BenchmarkBuildPackageGraph(b *testing.B) { for id := range exports { ids = append(ids, id) } - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { _, err := BuildPackageGraph(ctx, meta, ids, newParser().parse) if err != nil { b.Fatal(err) diff --git a/gopls/internal/cache/view.go b/gopls/internal/cache/view.go index 6bb0ae8edeb..4e8375a77db 100644 --- a/gopls/internal/cache/view.go +++ b/gopls/internal/cache/view.go @@ -473,7 +473,7 @@ func (v *View) filterFunc() func(protocol.DocumentURI) bool { gomodcache := v.folder.Env.GOMODCACHE var filters []string filters = append(filters, v.folder.Options.DirectoryFilters...) - if pref := strings.TrimPrefix(gomodcache, folderDir); pref != gomodcache { + if pref, ok := strings.CutPrefix(gomodcache, folderDir); ok { modcacheFilter := "-" + strings.TrimPrefix(filepath.ToSlash(pref), "/") filters = append(filters, modcacheFilter) } @@ -550,7 +550,7 @@ func newIgnoreFilter(dirs []string) *ignoreFilter { func (f *ignoreFilter) ignored(filename string) bool { for _, prefix := range f.prefixes { - if suffix := strings.TrimPrefix(filename, prefix); suffix != filename { + if suffix, ok := strings.CutPrefix(filename, prefix); ok { if checkIgnored(suffix) { return true } @@ -567,7 +567,7 @@ func (f *ignoreFilter) ignored(filename string) bool { func checkIgnored(suffix string) bool { // Note: this could be further optimized by writing a HasSegment helper, a // segment-boundary respecting variant of strings.Contains. - for _, component := range strings.Split(suffix, string(filepath.Separator)) { + for component := range strings.SplitSeq(suffix, string(filepath.Separator)) { if len(component) == 0 { continue } diff --git a/gopls/internal/clonetest/clonetest.go b/gopls/internal/clonetest/clonetest.go index 3542476ae09..773bc170fe7 100644 --- a/gopls/internal/clonetest/clonetest.go +++ b/gopls/internal/clonetest/clonetest.go @@ -13,6 +13,7 @@ package clonetest import ( "fmt" "reflect" + "slices" ) // NonZero returns a T set to some appropriate nonzero value: @@ -36,11 +37,9 @@ func NonZero[T any]() T { // nonZeroValue returns a non-zero, addressable value of the given type. func nonZeroValue(t reflect.Type, seen []reflect.Type) reflect.Value { - for _, t2 := range seen { - if t == t2 { - // Cycle: return the zero value. - return reflect.Zero(t) - } + if slices.Contains(seen, t) { + // Cycle: return the zero value. + return reflect.Zero(t) } seen = append(seen, t) v := reflect.New(t).Elem() diff --git a/gopls/internal/cmd/codeaction.go b/gopls/internal/cmd/codeaction.go index 2096a153681..6931af37d40 100644 --- a/gopls/internal/cmd/codeaction.go +++ b/gopls/internal/cmd/codeaction.go @@ -142,7 +142,7 @@ func (cmd *codeaction) Run(ctx context.Context, args ...string) error { // Request code actions of the desired kinds. var kinds []protocol.CodeActionKind if cmd.Kind != "" { - for _, kind := range strings.Split(cmd.Kind, ",") { + for kind := range strings.SplitSeq(cmd.Kind, ",") { kinds = append(kinds, protocol.CodeActionKind(kind)) } } else { diff --git a/gopls/internal/debug/trace.go b/gopls/internal/debug/trace.go index e6ff9697b67..d80a32eecbe 100644 --- a/gopls/internal/debug/trace.go +++ b/gopls/internal/debug/trace.go @@ -11,6 +11,7 @@ import ( "html/template" "net/http" "runtime/trace" + "slices" "sort" "strings" "sync" @@ -271,7 +272,7 @@ func (t *traces) addRecentLocked(span *traceSpan, start bool) { // as Go's GC cannot collect the ever-growing unused prefix. // So, compact it periodically. if t.recentEvictions%maxRecent == 0 { - t.recent = append([]spanStartEnd(nil), t.recent...) + t.recent = slices.Clone(t.recent) } } } diff --git a/gopls/internal/doc/generate/generate.go b/gopls/internal/doc/generate/generate.go index 9256d2ec835..d470fb71333 100644 --- a/gopls/internal/doc/generate/generate.go +++ b/gopls/internal/doc/generate/generate.go @@ -802,7 +802,7 @@ func replaceSection(content []byte, sectionName string, replacement []byte) ([]b if idx == nil { return nil, fmt.Errorf("could not find section %q", sectionName) } - result := append([]byte(nil), content[:idx[2]]...) + result := slices.Clone(content[:idx[2]]) result = append(result, replacement...) result = append(result, content[idx[3]:]...) return result, nil diff --git a/gopls/internal/filecache/filecache_test.go b/gopls/internal/filecache/filecache_test.go index 3419db4b513..4dbc04490f5 100644 --- a/gopls/internal/filecache/filecache_test.go +++ b/gopls/internal/filecache/filecache_test.go @@ -100,7 +100,6 @@ func TestConcurrency(t *testing.T) { // there is no third possibility. var group errgroup.Group for i := range values { - i := i group.Go(func() error { return filecache.Set(kind, key, values[i][:]) }) group.Go(func() error { return get(false) }) } @@ -217,12 +216,12 @@ func BenchmarkUncontendedGet(b *testing.B) { if err := filecache.Set(kind, key, value[:]); err != nil { b.Fatal(err) } - b.ResetTimer() + b.SetBytes(int64(len(value))) var group errgroup.Group group.SetLimit(50) - for i := 0; i < b.N; i++ { + for b.Loop() { group.Go(func() error { _, err := filecache.Get(kind, key) return err @@ -246,7 +245,7 @@ func BenchmarkUncontendedSet(b *testing.B) { const P = 1000 // parallelism b.SetBytes(P * int64(len(value))) - for i := 0; i < b.N; i++ { + for b.Loop() { // Perform P concurrent calls to Set. All must succeed. var group errgroup.Group for range [P]bool{} { diff --git a/gopls/internal/golang/comment.go b/gopls/internal/golang/comment.go index 9a360ce2e2b..a58045b1819 100644 --- a/gopls/internal/golang/comment.go +++ b/gopls/internal/golang/comment.go @@ -96,11 +96,7 @@ func parseDocLink(pkg *cache.Package, pgf *parsego.File, pos token.Pos) (types.O // position of each doc link from the parsed result. line := safetoken.Line(pgf.Tok, pos) var start, end token.Pos - if pgf.Tok.LineStart(line) > comment.Pos() { - start = pgf.Tok.LineStart(line) - } else { - start = comment.Pos() - } + start = max(pgf.Tok.LineStart(line), comment.Pos()) if line < pgf.Tok.LineCount() && pgf.Tok.LineStart(line+1) < comment.End() { end = pgf.Tok.LineStart(line + 1) } else { diff --git a/gopls/internal/golang/extract.go b/gopls/internal/golang/extract.go index a832ec305e8..5e82e430225 100644 --- a/gopls/internal/golang/extract.go +++ b/gopls/internal/golang/extract.go @@ -488,10 +488,8 @@ func canExtractVariable(info *types.Info, curFile cursor.Cursor, start, end toke path, _ := astutil.PathEnclosingInterval(file, e.Pos(), e.End()) for _, n := range path { if assignment, ok := n.(*ast.AssignStmt); ok { - for _, lhs := range assignment.Lhs { - if lhs == e { - return nil, fmt.Errorf("node %T is in LHS of an AssignStmt", expr) - } + if slices.Contains(assignment.Lhs, e) { + return nil, fmt.Errorf("node %T is in LHS of an AssignStmt", expr) } break } diff --git a/gopls/internal/golang/identifier_test.go b/gopls/internal/golang/identifier_test.go index 8206d8731ae..0823793466f 100644 --- a/gopls/internal/golang/identifier_test.go +++ b/gopls/internal/golang/identifier_test.go @@ -41,7 +41,6 @@ func TestSearchForEnclosing(t *testing.T) { } for _, test := range tests { - test := test t.Run(test.desc, func(t *testing.T) { fset := token.NewFileSet() file, err := parser.ParseFile(fset, "a.go", test.src, parser.AllErrors|parser.SkipObjectResolution) diff --git a/gopls/internal/golang/invertifcondition.go b/gopls/internal/golang/invertifcondition.go index 012278df79e..c8cd7deef5e 100644 --- a/gopls/internal/golang/invertifcondition.go +++ b/gopls/internal/golang/invertifcondition.go @@ -42,10 +42,7 @@ func invertIfCondition(pkg *cache.Package, pgf *parsego.File, start, end token.P // version of the original if body sourcePos := safetoken.StartPosition(fset, ifStatement.Pos()) - indent := sourcePos.Column - 1 - if indent < 0 { - indent = 0 - } + indent := max(sourcePos.Column-1, 0) standaloneBodyText := ifBodyToStandaloneCode(fset, ifStatement.Body, src) replaceElse = analysis.TextEdit{ diff --git a/gopls/internal/golang/references.go b/gopls/internal/golang/references.go index cf24685ca91..7fe054a5a7d 100644 --- a/gopls/internal/golang/references.go +++ b/gopls/internal/golang/references.go @@ -520,7 +520,6 @@ func expandMethodSearch(ctx context.Context, snapshot *cache.Snapshot, workspace var mu sync.Mutex // guards addRdeps, targets, expansions var group errgroup.Group for i, index := range indexes { - i := i index := index group.Go(func() error { // Consult index for matching (super/sub) methods. diff --git a/gopls/internal/golang/rename.go b/gopls/internal/golang/rename.go index 24dbcbadc05..f23f179c6ff 100644 --- a/gopls/internal/golang/rename.go +++ b/gopls/internal/golang/rename.go @@ -51,6 +51,7 @@ import ( "go/printer" "go/token" "go/types" + "maps" "path" "path/filepath" "regexp" @@ -170,8 +171,7 @@ func PrepareRename(ctx context.Context, snapshot *cache.Snapshot, f file.Handle, func prepareRenamePackageName(ctx context.Context, snapshot *cache.Snapshot, pgf *parsego.File) (*PrepareItem, error) { // Does the client support file renaming? - fileRenameSupported := slices.Contains(snapshot.Options().SupportedResourceOperations, protocol.Rename) - if !fileRenameSupported { + if !slices.Contains(snapshot.Options().SupportedResourceOperations, protocol.Rename) { return nil, errors.New("can't rename package: LSP client does not support file renaming") } @@ -715,9 +715,7 @@ func typeCheckReverseDependencies(ctx context.Context, snapshot *cache.Snapshot, return nil, err } allRdeps[variant.ID] = variant // include self - for id, meta := range rdeps { - allRdeps[id] = meta - } + maps.Copy(allRdeps, rdeps) } var ids []PackageID for id, meta := range allRdeps { diff --git a/gopls/internal/golang/semtok.go b/gopls/internal/golang/semtok.go index 121531d8280..f0286ff1fb3 100644 --- a/gopls/internal/golang/semtok.go +++ b/gopls/internal/golang/semtok.go @@ -17,6 +17,7 @@ import ( "log" "path/filepath" "regexp" + "slices" "strconv" "strings" "time" @@ -210,7 +211,7 @@ func (tv *tokenVisitor) comment(c *ast.Comment, importByName map[string]*types.P } pos := c.Pos() - for _, line := range strings.Split(c.Text, "\n") { + for line := range strings.SplitSeq(c.Text, "\n") { last := 0 for _, idx := range docLinkRegex.FindAllStringSubmatchIndex(line, -1) { @@ -721,10 +722,8 @@ func (tv *tokenVisitor) unkIdent(id *ast.Ident) (semtok.Type, []semtok.Modifier) return semtok.TokType, nil } case *ast.ValueSpec: - for _, p := range parent.Names { - if p == id { - return semtok.TokVariable, def - } + if slices.Contains(parent.Names, id) { + return semtok.TokVariable, def } for _, p := range parent.Values { if p == id { diff --git a/gopls/internal/golang/undeclared.go b/gopls/internal/golang/undeclared.go index 9df8e2bfd2e..515da9bd891 100644 --- a/gopls/internal/golang/undeclared.go +++ b/gopls/internal/golang/undeclared.go @@ -251,7 +251,7 @@ func newFunctionDeclaration(path []ast.Node, file *ast.File, pkg *types.Package, // results is used as an argument case *types.Tuple: n := t.Len() - for i := 0; i < n; i++ { + for i := range n { name := typeToArgName(t.At(i).Type()) nameCounts[name]++ diff --git a/gopls/internal/golang/util.go b/gopls/internal/golang/util.go index b13056e02b9..5c54bfcf751 100644 --- a/gopls/internal/golang/util.go +++ b/gopls/internal/golang/util.go @@ -11,6 +11,7 @@ import ( "go/token" "go/types" "regexp" + "slices" "strings" "unicode" @@ -89,10 +90,8 @@ func findFileInDeps(s metadata.Source, mp *metadata.Package, uri protocol.Docume return nil } seen[mp.ID] = true - for _, cgf := range mp.CompiledGoFiles { - if cgf == uri { - return mp - } + if slices.Contains(mp.CompiledGoFiles, uri) { + return mp } for _, dep := range mp.DepsByPkgPath { mp := s.Metadata(dep) diff --git a/gopls/internal/golang/workspace_symbol.go b/gopls/internal/golang/workspace_symbol.go index 91c5ee22925..1a0819b4d52 100644 --- a/gopls/internal/golang/workspace_symbol.go +++ b/gopls/internal/golang/workspace_symbol.go @@ -389,7 +389,7 @@ func collectSymbols(ctx context.Context, snapshots []*cache.Snapshot, matcherTyp // which we merge at the end. nmatchers := runtime.GOMAXPROCS(-1) // matching is CPU bound results := make(chan *symbolStore) - for i := 0; i < nmatchers; i++ { + for i := range nmatchers { go func(i int) { matcher := buildMatcher(matcherType, query) store := new(symbolStore) @@ -403,7 +403,7 @@ func collectSymbols(ctx context.Context, snapshots []*cache.Snapshot, matcherTyp // Gather and merge results as they arrive. var unified symbolStore - for i := 0; i < nmatchers; i++ { + for range nmatchers { store := <-results for _, syms := range store.res { if syms != nil { diff --git a/gopls/internal/lsprpc/dialer.go b/gopls/internal/lsprpc/dialer.go index a5f038df9f1..b9aabe4947b 100644 --- a/gopls/internal/lsprpc/dialer.go +++ b/gopls/internal/lsprpc/dialer.go @@ -97,7 +97,7 @@ func (d *autoDialer) dialNet(ctx context.Context) (net.Conn, error) { const retries = 5 // It can take some time for the newly started server to bind to our address, // so we retry for a bit. - for retry := 0; retry < retries; retry++ { + for retry := range retries { startDial := time.Now() netConn, err = net.DialTimeout(d.network, d.addr, dialTimeout) if err == nil { diff --git a/gopls/internal/lsprpc/lsprpc_test.go b/gopls/internal/lsprpc/lsprpc_test.go index eda00b28c7a..c8f0267cc3c 100644 --- a/gopls/internal/lsprpc/lsprpc_test.go +++ b/gopls/internal/lsprpc/lsprpc_test.go @@ -52,8 +52,7 @@ func (s PingServer) DidOpen(ctx context.Context, params *protocol.DidOpenTextDoc } func TestClientLogging(t *testing.T) { - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() + ctx := t.Context() server := PingServer{} client := FakeClient{Logs: make(chan string, 10)} @@ -212,8 +211,7 @@ func TestDebugInfoLifecycle(t *testing.T) { } }() - baseCtx, cancel := context.WithCancel(context.Background()) - defer cancel() + baseCtx := t.Context() clientCtx := debug.WithInstance(baseCtx) serverCtx := debug.WithInstance(baseCtx) diff --git a/gopls/internal/mod/diagnostics.go b/gopls/internal/mod/diagnostics.go index 8ad1ece05e7..52f3704ed0f 100644 --- a/gopls/internal/mod/diagnostics.go +++ b/gopls/internal/mod/diagnostics.go @@ -69,7 +69,6 @@ func collectDiagnostics(ctx context.Context, snapshot *cache.Snapshot, diagFn fu reports := make(map[protocol.DocumentURI][]*cache.Diagnostic) for _, uri := range snapshot.View().ModFiles() { - uri := uri g.Go(func() error { fh, err := snapshot.ReadFile(ctx, uri) if err != nil { diff --git a/gopls/internal/mod/hover.go b/gopls/internal/mod/hover.go index 04834f3cd7e..b9b026674fa 100644 --- a/gopls/internal/mod/hover.go +++ b/gopls/internal/mod/hover.go @@ -8,6 +8,7 @@ import ( "bytes" "context" "fmt" + "slices" "sort" "strings" @@ -25,16 +26,8 @@ import ( ) func Hover(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) (*protocol.Hover, error) { - var found bool - for _, uri := range snapshot.View().ModFiles() { - if fh.URI() == uri { - found = true - break - } - } - // We only provide hover information for the view's go.mod files. - if !found { + if !slices.Contains(snapshot.View().ModFiles(), fh.URI()) { return nil, nil } diff --git a/gopls/internal/progress/progress_test.go b/gopls/internal/progress/progress_test.go index 642103ae025..687f99ba4a1 100644 --- a/gopls/internal/progress/progress_test.go +++ b/gopls/internal/progress/progress_test.go @@ -107,7 +107,6 @@ func TestProgressTracker_Reporting(t *testing.T) { wantEnded: 1, }, } { - test := test t.Run(test.name, func(t *testing.T) { ctx, tracker, client := setup() ctx, cancel := context.WithCancel(ctx) diff --git a/gopls/internal/protocol/command/commandmeta/meta.go b/gopls/internal/protocol/command/commandmeta/meta.go index f147898e192..7c3a3acc12f 100644 --- a/gopls/internal/protocol/command/commandmeta/meta.go +++ b/gopls/internal/protocol/command/commandmeta/meta.go @@ -224,10 +224,7 @@ func lspName(methodName string) string { func splitCamel(s string) []string { var words []string for len(s) > 0 { - last := strings.LastIndexFunc(s, unicode.IsUpper) - if last < 0 { - last = 0 - } + last := max(strings.LastIndexFunc(s, unicode.IsUpper), 0) if last == len(s)-1 { // Group initialisms as a single word. last = 1 + strings.LastIndexFunc(s[:last], func(r rune) bool { return !unicode.IsUpper(r) }) diff --git a/gopls/internal/protocol/edits.go b/gopls/internal/protocol/edits.go index 5f70c4efdb5..c5d3592a8ee 100644 --- a/gopls/internal/protocol/edits.go +++ b/gopls/internal/protocol/edits.go @@ -6,6 +6,7 @@ package protocol import ( "fmt" + "slices" "golang.org/x/tools/internal/diff" ) @@ -16,7 +17,7 @@ func EditsFromDiffEdits(m *Mapper, edits []diff.Edit) ([]TextEdit, error) { // LSP doesn't require TextEditArray to be sorted: // this is the receiver's concern. But govim, and perhaps // other clients have historically relied on the order. - edits = append([]diff.Edit(nil), edits...) + edits = slices.Clone(edits) diff.SortEdits(edits) result := make([]TextEdit, len(edits)) diff --git a/gopls/internal/protocol/generate/generate.go b/gopls/internal/protocol/generate/generate.go index 9c7009113ab..fef8ef417eb 100644 --- a/gopls/internal/protocol/generate/generate.go +++ b/gopls/internal/protocol/generate/generate.go @@ -32,7 +32,7 @@ func generateDoc(out *bytes.Buffer, doc string) { return } var list bool - for _, line := range strings.Split(doc, "\n") { + for line := range strings.SplitSeq(doc, "\n") { // Lists in metaModel.json start with a dash. // To make a go doc list they have to be preceded // by a blank line, and indented. diff --git a/gopls/internal/protocol/json_test.go b/gopls/internal/protocol/json_test.go index 9aac110fa3b..2c03095a84c 100644 --- a/gopls/internal/protocol/json_test.go +++ b/gopls/internal/protocol/json_test.go @@ -103,15 +103,9 @@ func tryChange(start, end int, repl string) error { var p, q protocol.ParamInitialize mod := input[:start] + repl + input[end:] excerpt := func() (string, string) { - a := start - 5 - if a < 0 { - a = 0 - } - b := end + 5 - if b > len(input) { - // trusting repl to be no longer than what it replaces - b = len(input) - } + a := max(start-5, 0) + // trusting repl to be no longer than what it replaces + b := min(end+5, len(input)) ma := input[a:b] mb := mod[a:b] return ma, mb diff --git a/gopls/internal/protocol/semtok/semtok.go b/gopls/internal/protocol/semtok/semtok.go index 6b05b8bb5e2..86332d37e1a 100644 --- a/gopls/internal/protocol/semtok/semtok.go +++ b/gopls/internal/protocol/semtok/semtok.go @@ -173,7 +173,7 @@ func Encode( x := make([]uint32, 5*len(tokens)) var j int var last Token - for i := 0; i < len(tokens); i++ { + for i := range tokens { item := tokens[i] typ, ok := typeMap[item.Type] if !ok { diff --git a/gopls/internal/server/code_action.go b/gopls/internal/server/code_action.go index 4617fad5de7..9fa2bf54459 100644 --- a/gopls/internal/server/code_action.go +++ b/gopls/internal/server/code_action.go @@ -8,7 +8,6 @@ import ( "context" "fmt" "slices" - "sort" "strings" "golang.org/x/tools/gopls/internal/cache" @@ -354,9 +353,7 @@ func (s *server) getSupportedCodeActions() []protocol.CodeActionKind { for kind := range allCodeActionKinds { result = append(result, kind) } - sort.Slice(result, func(i, j int) bool { - return result[i] < result[j] - }) + slices.Sort(result) return result } diff --git a/gopls/internal/server/command.go b/gopls/internal/server/command.go index 3711e42549b..60c7389d095 100644 --- a/gopls/internal/server/command.go +++ b/gopls/internal/server/command.go @@ -15,6 +15,7 @@ import ( "go/token" "io" "log" + "maps" "os" "path/filepath" "regexp" @@ -65,14 +66,7 @@ func (s *server) ExecuteCommand(ctx context.Context, params *protocol.ExecuteCom defer work.End(ctx, "Done.") } - var found bool - for _, name := range s.Options().SupportedCommands { - if name == params.Command { - found = true - break - } - } - if !found { + if !slices.Contains(s.Options().SupportedCommands, params.Command) { return nil, fmt.Errorf("%s is not a supported command", params.Command) } @@ -1208,9 +1202,7 @@ func (c *commandHandler) FetchVulncheckResult(ctx context.Context, arg command.U } } // Overwrite if there is any govulncheck-based result. - for modfile, result := range deps.snapshot.Vulnerabilities() { - ret[modfile] = result - } + maps.Copy(ret, deps.snapshot.Vulnerabilities()) return nil }) return ret, err @@ -1677,7 +1669,6 @@ func (c *commandHandler) DiagnoseFiles(ctx context.Context, args command.Diagnos var wg sync.WaitGroup for snapshot := range snapshots { - snapshot := snapshot wg.Add(1) go func() { defer wg.Done() diff --git a/gopls/internal/server/diagnostics.go b/gopls/internal/server/diagnostics.go index 92ca54e226a..dbffc58fd99 100644 --- a/gopls/internal/server/diagnostics.go +++ b/gopls/internal/server/diagnostics.go @@ -128,7 +128,6 @@ func (s *server) diagnoseChangedViews(ctx context.Context, modID uint64, lastCha // Diagnose views concurrently. var wg sync.WaitGroup for _, v := range needsDiagnosis { - v := v snapshot, release, err := v.Snapshot() if err != nil { s.modificationMu.Lock() diff --git a/gopls/internal/server/prompt.go b/gopls/internal/server/prompt.go index 37f591487a6..f8895358942 100644 --- a/gopls/internal/server/prompt.go +++ b/gopls/internal/server/prompt.go @@ -283,7 +283,7 @@ func (s *server) maybePromptForTelemetry(ctx context.Context, enabled bool) { attempts++ } - pendingContent := []byte(fmt.Sprintf("%s %d %d %d", state, attempts, creationTime, token)) + pendingContent := fmt.Appendf(nil, "%s %d %d %d", state, attempts, creationTime, token) if err := os.WriteFile(promptFile, pendingContent, 0666); err != nil { errorf("writing pending state: %v", err) return @@ -351,7 +351,7 @@ Would you like to enable Go telemetry? message(protocol.Error, fmt.Sprintf("Unrecognized response %q", item.Title)) } } - resultContent := []byte(fmt.Sprintf("%s %d %d %d", result, attempts, creationTime, token)) + resultContent := fmt.Appendf(nil, "%s %d %d %d", result, attempts, creationTime, token) if err := os.WriteFile(promptFile, resultContent, 0666); err != nil { errorf("error writing result state to prompt file: %v", err) } diff --git a/gopls/internal/server/prompt_test.go b/gopls/internal/server/prompt_test.go index f4484cb6437..6af5b98eab7 100644 --- a/gopls/internal/server/prompt_test.go +++ b/gopls/internal/server/prompt_test.go @@ -27,7 +27,6 @@ func TestAcquireFileLock(t *testing.T) { var wg sync.WaitGroup for i := range releasers { - i := i wg.Add(1) go func() { defer wg.Done() diff --git a/gopls/internal/settings/vet_test.go b/gopls/internal/settings/vet_test.go index 56daf678c43..f70b72e2151 100644 --- a/gopls/internal/settings/vet_test.go +++ b/gopls/internal/settings/vet_test.go @@ -41,7 +41,7 @@ func TestVetSuite(t *testing.T) { out := fmt.Sprint(cmd.Stdout) _, out, _ = strings.Cut(out, "Registered analyzers:\n\n") out, _, _ = strings.Cut(out, "\n\n") - for _, line := range strings.Split(out, "\n") { + for line := range strings.SplitSeq(out, "\n") { name := strings.Fields(line)[0] if !goplsAnalyzers[name] { t.Errorf("gopls lacks vet analyzer %q", name) diff --git a/gopls/internal/telemetry/cmd/stacks/stacks.go b/gopls/internal/telemetry/cmd/stacks/stacks.go index f8caabd67e6..cb0a21b4ec2 100644 --- a/gopls/internal/telemetry/cmd/stacks/stacks.go +++ b/gopls/internal/telemetry/cmd/stacks/stacks.go @@ -741,7 +741,7 @@ func newIssue(pcfg ProgramConfig, stack, id, jsonURL string, counts map[Info]int // lines around the PC in this symbol. var symbol string outer: - for _, line := range strings.Split(stack, "\n") { + for line := range strings.SplitSeq(stack, "\n") { for _, s := range pcfg.IgnoreSymbolContains { if strings.Contains(line, s) { continue outer // not interesting @@ -814,7 +814,7 @@ func writeStackComment(body *bytes.Buffer, stack, id string, jsonURL string, cou } // Parse the stack and get the symbol names out. - for _, frame := range strings.Split(stack, "\n") { + for frame := range strings.SplitSeq(stack, "\n") { if url := frameURL(pclntab, info, frame); url != "" { fmt.Fprintf(body, "- [`%s`](%s)\n", frame, url) } else { diff --git a/gopls/internal/template/highlight.go b/gopls/internal/template/highlight.go index 39812cfd0ba..c6b0c0f778e 100644 --- a/gopls/internal/template/highlight.go +++ b/gopls/internal/template/highlight.go @@ -70,7 +70,7 @@ func markWordInToken(p *Parsed, wordAt string) ([]protocol.DocumentHighlight, er } for _, tok := range p.tokens { got := pat.FindAllIndex(p.buf[tok.Start:tok.End], -1) - for i := 0; i < len(got); i++ { + for i := range got { ans = append(ans, protocol.DocumentHighlight{ Range: p.Range(got[i][0], got[i][1]-got[i][0]), Kind: protocol.Text, diff --git a/gopls/internal/test/integration/bench/codeaction_test.go b/gopls/internal/test/integration/bench/codeaction_test.go index 679f2d4cf3d..4bba9e6f317 100644 --- a/gopls/internal/test/integration/bench/codeaction_test.go +++ b/gopls/internal/test/integration/bench/codeaction_test.go @@ -28,7 +28,7 @@ func BenchmarkCodeAction(b *testing.B) { defer stopAndRecord() } - for i := 0; i < b.N; i++ { + for b.Loop() { env.CodeActionForFile(test.file, nil) } }) @@ -52,7 +52,7 @@ func BenchmarkCodeActionFollowingEdit(b *testing.B) { defer stopAndRecord() } - for i := 0; i < b.N; i++ { + for b.Loop() { edits := atomic.AddInt64(&editID, 1) env.EditBuffer(test.file, protocol.TextEdit{ Range: protocol.Range{ diff --git a/gopls/internal/test/integration/bench/completion_test.go b/gopls/internal/test/integration/bench/completion_test.go index 48ecf0cefd6..2140e30d123 100644 --- a/gopls/internal/test/integration/bench/completion_test.go +++ b/gopls/internal/test/integration/bench/completion_test.go @@ -53,7 +53,7 @@ func benchmarkCompletion(options completionBenchOptions, b *testing.B) { defer stopAndRecord() } - for i := 0; i < b.N; i++ { + for b.Loop() { if options.beforeCompletion != nil { options.beforeCompletion(env) } @@ -314,13 +314,11 @@ func runCompletion(b *testing.B, test completionTest, followingEdit, completeUni } } - b.ResetTimer() - if stopAndRecord := startProfileIfSupported(b, env, qualifiedName(test.repo, "completion")); stopAndRecord != nil { defer stopAndRecord() } - for i := 0; i < b.N; i++ { + for b.Loop() { if followingEdit { editPlaceholder() } diff --git a/gopls/internal/test/integration/bench/definition_test.go b/gopls/internal/test/integration/bench/definition_test.go index b703378a27b..e456d5a7c87 100644 --- a/gopls/internal/test/integration/bench/definition_test.go +++ b/gopls/internal/test/integration/bench/definition_test.go @@ -38,7 +38,7 @@ func BenchmarkDefinition(b *testing.B) { defer stopAndRecord() } - for i := 0; i < b.N; i++ { + for b.Loop() { env.GoToDefinition(loc) // pre-warm the query } }) diff --git a/gopls/internal/test/integration/bench/diagnostic_test.go b/gopls/internal/test/integration/bench/diagnostic_test.go index ce8a84d9eb2..6dd00afd5d8 100644 --- a/gopls/internal/test/integration/bench/diagnostic_test.go +++ b/gopls/internal/test/integration/bench/diagnostic_test.go @@ -58,9 +58,7 @@ func BenchmarkDiagnosePackageFiles(b *testing.B) { defer stopAndRecord() } - b.ResetTimer() - - for i := 0; i < b.N; i++ { + for b.Loop() { edit() var wg sync.WaitGroup for _, file := range files { diff --git a/gopls/internal/test/integration/bench/didchange_test.go b/gopls/internal/test/integration/bench/didchange_test.go index b1613bb1b03..aa87a4f9b0e 100644 --- a/gopls/internal/test/integration/bench/didchange_test.go +++ b/gopls/internal/test/integration/bench/didchange_test.go @@ -56,7 +56,7 @@ func BenchmarkDidChange(b *testing.B) { } b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { edit() env.Await(env.StartedChange()) } @@ -142,7 +142,7 @@ func runChangeDiagnosticsBenchmark(b *testing.B, test changeTest, save bool, ope if stopAndRecord := startProfileIfSupported(b, env, qualifiedName(test.repo, operation)); stopAndRecord != nil { defer stopAndRecord() } - for i := 0; i < b.N; i++ { + for b.Loop() { edits := atomic.AddInt64(&editID, 1) env.EditBuffer(test.file, protocol.TextEdit{ Range: protocol.Range{ diff --git a/gopls/internal/test/integration/bench/hover_test.go b/gopls/internal/test/integration/bench/hover_test.go index c3b0c6bc0cb..07a60c354f7 100644 --- a/gopls/internal/test/integration/bench/hover_test.go +++ b/gopls/internal/test/integration/bench/hover_test.go @@ -39,7 +39,7 @@ func BenchmarkHover(b *testing.B) { defer stopAndRecord() } - for i := 0; i < b.N; i++ { + for b.Loop() { env.Hover(loc) // pre-warm the query } }) diff --git a/gopls/internal/test/integration/bench/implementations_test.go b/gopls/internal/test/integration/bench/implementations_test.go index b7e08aa3141..0c3acca89b1 100644 --- a/gopls/internal/test/integration/bench/implementations_test.go +++ b/gopls/internal/test/integration/bench/implementations_test.go @@ -36,7 +36,7 @@ func BenchmarkImplementations(b *testing.B) { defer stopAndRecord() } - for i := 0; i < b.N; i++ { + for b.Loop() { env.Implementations(loc) } }) diff --git a/gopls/internal/test/integration/bench/imports_test.go b/gopls/internal/test/integration/bench/imports_test.go index 97419cb10c5..3f47a561681 100644 --- a/gopls/internal/test/integration/bench/imports_test.go +++ b/gopls/internal/test/integration/bench/imports_test.go @@ -29,9 +29,7 @@ func BenchmarkInitialGoimportsScan(b *testing.B) { repo := getRepo(b, "tools") // since this a test of module cache scanning, any repo will do - b.ResetTimer() - - for i := 0; i < b.N; i++ { + for b.Loop() { func() { // Unfortunately we (intentionally) don't support resetting the module // cache scan state, so in order to have an accurate benchmark we must diff --git a/gopls/internal/test/integration/bench/iwl_test.go b/gopls/internal/test/integration/bench/iwl_test.go index 09ccb301a58..0f94b6a3857 100644 --- a/gopls/internal/test/integration/bench/iwl_test.go +++ b/gopls/internal/test/integration/bench/iwl_test.go @@ -41,7 +41,7 @@ func BenchmarkInitialWorkspaceLoad(b *testing.B) { sharedEnv := repo.sharedEnv(b) b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { doIWL(b, sharedEnv.Sandbox.GOPATH(), repo, nil) } }) @@ -61,7 +61,7 @@ func BenchmarkInitialWorkspaceLoadOpenFiles(b *testing.B) { sharedEnv := repo.sharedEnv(b) b.ResetTimer() - for range b.N { + for b.Loop() { doIWL(b, sharedEnv.Sandbox.GOPATH(), repo, []string{t.file}) } }) diff --git a/gopls/internal/test/integration/bench/references_test.go b/gopls/internal/test/integration/bench/references_test.go index aeaba6f5683..7a4152a8b70 100644 --- a/gopls/internal/test/integration/bench/references_test.go +++ b/gopls/internal/test/integration/bench/references_test.go @@ -36,7 +36,7 @@ func BenchmarkReferences(b *testing.B) { defer stopAndRecord() } - for i := 0; i < b.N; i++ { + for b.Loop() { env.References(loc) } }) diff --git a/gopls/internal/test/integration/bench/reload_test.go b/gopls/internal/test/integration/bench/reload_test.go index b93b76f945d..1a40cc5eba1 100644 --- a/gopls/internal/test/integration/bench/reload_test.go +++ b/gopls/internal/test/integration/bench/reload_test.go @@ -48,7 +48,7 @@ func BenchmarkReload(b *testing.B) { } b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { // Mutate the file. This may result in cache hits, but that's OK: the // goal is to ensure that we don't reload more than just the current // package. diff --git a/gopls/internal/test/integration/bench/rename_test.go b/gopls/internal/test/integration/bench/rename_test.go index ca5ed5f4397..32cbace5faa 100644 --- a/gopls/internal/test/integration/bench/rename_test.go +++ b/gopls/internal/test/integration/bench/rename_test.go @@ -39,7 +39,7 @@ func BenchmarkRename(b *testing.B) { defer stopAndRecord() } - for i := 0; i < b.N; i++ { + for b.Loop() { names++ newName := fmt.Sprintf("%s%d", test.baseName, names) env.Rename(loc, newName) diff --git a/gopls/internal/test/integration/bench/tests_test.go b/gopls/internal/test/integration/bench/tests_test.go index 3bc69ef95e1..77ba88c7156 100644 --- a/gopls/internal/test/integration/bench/tests_test.go +++ b/gopls/internal/test/integration/bench/tests_test.go @@ -75,7 +75,7 @@ func BenchmarkPackagesCommand(b *testing.B) { defer stopAndRecord() } - for i := 0; i < b.N; i++ { + for b.Loop() { executePackagesCmd(b, env, args) } }) diff --git a/gopls/internal/test/integration/bench/typing_test.go b/gopls/internal/test/integration/bench/typing_test.go index 78bd16cef5b..b32e707858f 100644 --- a/gopls/internal/test/integration/bench/typing_test.go +++ b/gopls/internal/test/integration/bench/typing_test.go @@ -41,7 +41,7 @@ func BenchmarkTyping(b *testing.B) { defer stopAndRecord() } ticker := time.NewTicker(delay) - for i := 0; i < b.N; i++ { + for b.Loop() { edits := atomic.AddInt64(&editID, 1) env.EditBuffer(test.file, protocol.TextEdit{ Range: protocol.Range{ diff --git a/gopls/internal/test/integration/bench/workspace_symbols_test.go b/gopls/internal/test/integration/bench/workspace_symbols_test.go index d3e1d207b2d..fb914563191 100644 --- a/gopls/internal/test/integration/bench/workspace_symbols_test.go +++ b/gopls/internal/test/integration/bench/workspace_symbols_test.go @@ -35,7 +35,7 @@ func BenchmarkWorkspaceSymbols(b *testing.B) { defer stopAndRecord() } - for i := 0; i < b.N; i++ { + for b.Loop() { env.Symbol(*symbolQuery) } }) diff --git a/gopls/internal/test/integration/diagnostics/invalidation_test.go b/gopls/internal/test/integration/diagnostics/invalidation_test.go index e8d39c3c38a..0ee23eda003 100644 --- a/gopls/internal/test/integration/diagnostics/invalidation_test.go +++ b/gopls/internal/test/integration/diagnostics/invalidation_test.go @@ -82,7 +82,7 @@ func _() { } msg := d.Diagnostics[0].Message - for i := 0; i < 5; i++ { + for i := range 5 { before := d.Version env.RegexpReplace("main.go", "Irrelevant comment #.", fmt.Sprintf("Irrelevant comment #%d", i)) env.AfterChange( diff --git a/gopls/internal/test/integration/diagnostics/undeclared_test.go b/gopls/internal/test/integration/diagnostics/undeclared_test.go index 5579c0752d7..2b399f52f3c 100644 --- a/gopls/internal/test/integration/diagnostics/undeclared_test.go +++ b/gopls/internal/test/integration/diagnostics/undeclared_test.go @@ -5,6 +5,7 @@ package diagnostics import ( + "slices" "testing" "golang.org/x/tools/gopls/internal/protocol" @@ -34,12 +35,7 @@ func _() int { ` Run(t, src, func(t *testing.T, env *Env) { isUnnecessary := func(diag protocol.Diagnostic) bool { - for _, tag := range diag.Tags { - if tag == protocol.Unnecessary { - return true - } - } - return false + return slices.Contains(diag.Tags, protocol.Unnecessary) } // 'x' is undeclared, but still necessary. diff --git a/gopls/internal/test/integration/fake/edit_test.go b/gopls/internal/test/integration/fake/edit_test.go index 0d7ac18c414..f0a44846d31 100644 --- a/gopls/internal/test/integration/fake/edit_test.go +++ b/gopls/internal/test/integration/fake/edit_test.go @@ -79,7 +79,6 @@ func TestApplyEdits(t *testing.T) { } for _, test := range tests { - test := test t.Run(test.label, func(t *testing.T) { got, err := applyEdits(protocol.NewMapper("", []byte(test.content)), test.edits, false) if (err != nil) != test.wantErr { diff --git a/gopls/internal/test/integration/fake/editor.go b/gopls/internal/test/integration/fake/editor.go index bd459decea4..a2dabf61c46 100644 --- a/gopls/internal/test/integration/fake/editor.go +++ b/gopls/internal/test/integration/fake/editor.go @@ -10,6 +10,7 @@ import ( "encoding/json" "errors" "fmt" + "maps" "math/rand/v2" "os" "path" @@ -261,12 +262,8 @@ func (e *Editor) Client() *Client { // makeSettings builds the settings map for use in LSP settings RPCs. func makeSettings(sandbox *Sandbox, config EditorConfig, scopeURI *protocol.URI) map[string]any { env := make(map[string]string) - for k, v := range sandbox.GoEnv() { - env[k] = v - } - for k, v := range config.Env { - env[k] = v - } + maps.Copy(env, sandbox.GoEnv()) + maps.Copy(env, config.Env) for k, v := range env { v = strings.ReplaceAll(v, "$SANDBOX_WORKDIR", sandbox.Workdir.RootURI().Path()) env[k] = v @@ -307,9 +304,7 @@ func makeSettings(sandbox *Sandbox, config EditorConfig, scopeURI *protocol.URI) } } if closestSettings != nil { - for k, v := range closestSettings { - settings[k] = v - } + maps.Copy(settings, closestSettings) } } @@ -443,12 +438,7 @@ func marshalUnmarshal[T any](v any) (T, error) { // HasCommand reports whether the connected server supports the command with the given ID. func (e *Editor) HasCommand(cmd command.Command) bool { - for _, command := range e.serverCapabilities.ExecuteCommandProvider.Commands { - if command == cmd.String() { - return true - } - } - return false + return slices.Contains(e.serverCapabilities.ExecuteCommandProvider.Commands, cmd.String()) } // Examples: https://www.iana.org/assignments/uri-schemes/uri-schemes.xhtml @@ -1179,11 +1169,8 @@ func (e *Editor) ExecuteCommand(ctx context.Context, params *protocol.ExecuteCom var match bool if e.serverCapabilities.ExecuteCommandProvider != nil { // Ensure that this command was actually listed as a supported command. - for _, command := range e.serverCapabilities.ExecuteCommandProvider.Commands { - if command == params.Command { - match = true - break - } + if slices.Contains(e.serverCapabilities.ExecuteCommandProvider.Commands, params.Command) { + match = true } } if !match { diff --git a/gopls/internal/test/integration/misc/prompt_test.go b/gopls/internal/test/integration/misc/prompt_test.go index 37cd654b08d..21da1b5853f 100644 --- a/gopls/internal/test/integration/misc/prompt_test.go +++ b/gopls/internal/test/integration/misc/prompt_test.go @@ -429,7 +429,7 @@ func main() { const maxPrompts = 5 // internal prompt limit defined by gopls - for i := 0; i < maxPrompts+1; i++ { + for i := range maxPrompts + 1 { WithOptions( Modes(Default), // no need to run this in all modes EnvVars{ diff --git a/gopls/internal/test/integration/misc/vuln_test.go b/gopls/internal/test/integration/misc/vuln_test.go index 9dad13179af..47f4c6a77b7 100644 --- a/gopls/internal/test/integration/misc/vuln_test.go +++ b/gopls/internal/test/integration/misc/vuln_test.go @@ -912,7 +912,6 @@ func testVulnDiagnostics(t *testing.T, env *Env, pattern string, want vulnDiagEx // Find the diagnostics at loc.start. var diag *protocol.Diagnostic for _, g := range got.Diagnostics { - g := g if g.Range.Start == loc.Range.Start && w.msg == g.Message { modPathDiagnostics = append(modPathDiagnostics, g) diag = &g diff --git a/gopls/internal/test/integration/options.go b/gopls/internal/test/integration/options.go index 5c5cdc19227..176a8a64f24 100644 --- a/gopls/internal/test/integration/options.go +++ b/gopls/internal/test/integration/options.go @@ -5,6 +5,7 @@ package integration import ( + "maps" "strings" "testing" "time" @@ -115,9 +116,7 @@ func (s Settings) set(opts *runConfig) { if opts.editor.Settings == nil { opts.editor.Settings = make(map[string]any) } - for k, v := range s { - opts.editor.Settings[k] = v - } + maps.Copy(opts.editor.Settings, s) } // WorkspaceFolders configures the workdir-relative workspace folders or uri @@ -177,9 +176,7 @@ func (e EnvVars) set(opts *runConfig) { if opts.editor.Env == nil { opts.editor.Env = make(map[string]string) } - for k, v := range e { - opts.editor.Env[k] = v - } + maps.Copy(opts.editor.Env, e) } // FakeGoPackagesDriver configures gopls to run with a fake GOPACKAGESDRIVER diff --git a/gopls/internal/test/integration/runner.go b/gopls/internal/test/integration/runner.go index c4609cb8f91..8fdcc26af59 100644 --- a/gopls/internal/test/integration/runner.go +++ b/gopls/internal/test/integration/runner.go @@ -142,7 +142,6 @@ func (r *Runner) Run(t *testing.T, files string, test TestFunc, opts ...RunOptio } for _, tc := range tests { - tc := tc config := defaultConfig() for _, opt := range opts { opt.set(&config) diff --git a/gopls/internal/test/integration/workspace/adhoc_test.go b/gopls/internal/test/integration/workspace/adhoc_test.go index 3d451dd5f08..717e881f815 100644 --- a/gopls/internal/test/integration/workspace/adhoc_test.go +++ b/gopls/internal/test/integration/workspace/adhoc_test.go @@ -30,7 +30,7 @@ const Y = X Run(t, files, func(t *testing.T, env *Env) { env.OpenFile("b.go") - for i := 0; i < 10; i++ { + for range 10 { env.RegexpReplace("b.go", `// import "errors"`, `import "errors"`) env.RegexpReplace("b.go", `import "errors"`, `// import "errors"`) env.AfterChange(NoDiagnostics()) diff --git a/gopls/internal/test/marker/marker_test.go b/gopls/internal/test/marker/marker_test.go index d5194e39952..51aba838b1b 100644 --- a/gopls/internal/test/marker/marker_test.go +++ b/gopls/internal/test/marker/marker_test.go @@ -113,7 +113,6 @@ func Test(t *testing.T) { cache := cache.New(nil) for _, test := range tests { - test := test t.Run(test.name, func(t *testing.T) { t.Parallel() diff --git a/gopls/internal/util/bug/bug_test.go b/gopls/internal/util/bug/bug_test.go index 8ca2aa5fd64..fa549e7501d 100644 --- a/gopls/internal/util/bug/bug_test.go +++ b/gopls/internal/util/bug/bug_test.go @@ -25,7 +25,7 @@ func TestListBugs(t *testing.T) { wantBugs(t, "bad") - for i := 0; i < 3; i++ { + for i := range 3 { Report(fmt.Sprintf("index:%d", i)) } diff --git a/gopls/internal/util/frob/frob.go b/gopls/internal/util/frob/frob.go index 00ef7c7f95e..e5670a28a95 100644 --- a/gopls/internal/util/frob/frob.go +++ b/gopls/internal/util/frob/frob.go @@ -183,7 +183,7 @@ func (fr *frob) encode(out *writer, v reflect.Value) { case reflect.Array: len := v.Type().Len() elem := fr.elems[0] - for i := 0; i < len; i++ { + for i := range len { elem.encode(out, v.Index(i)) } @@ -196,7 +196,7 @@ func (fr *frob) encode(out *writer, v reflect.Value) { // []byte fast path out.bytes(v.Bytes()) } else { - for i := 0; i < len; i++ { + for i := range len { elem.encode(out, v.Index(i)) } } @@ -298,7 +298,7 @@ func (fr *frob) decode(in *reader, addr reflect.Value) { case reflect.Array: len := fr.t.Len() - for i := 0; i < len; i++ { + for i := range len { fr.elems[0].decode(in, addr.Index(i)) } @@ -312,7 +312,7 @@ func (fr *frob) decode(in *reader, addr reflect.Value) { addr.Set(reflect.AppendSlice(addr, reflect.ValueOf(in.bytes(len)))) } else { addr.Set(reflect.MakeSlice(fr.t, len, len)) - for i := 0; i < len; i++ { + for i := range len { elem.decode(in, addr.Index(i)) } } @@ -326,7 +326,7 @@ func (fr *frob) decode(in *reader, addr reflect.Value) { kfrob, vfrob := fr.elems[0], fr.elems[1] k := reflect.New(kfrob.t).Elem() v := reflect.New(vfrob.t).Elem() - for i := 0; i < len; i++ { + for range len { k.SetZero() v.SetZero() kfrob.decode(in, k) diff --git a/gopls/internal/util/lru/lru_test.go b/gopls/internal/util/lru/lru_test.go index bf96e8d31b7..2146ef00458 100644 --- a/gopls/internal/util/lru/lru_test.go +++ b/gopls/internal/util/lru/lru_test.go @@ -122,7 +122,6 @@ func TestConcurrency(t *testing.T) { // there is no third possibility. var group errgroup.Group for i := range values { - i := i v := values[i][:] group.Go(func() error { cache.Set(key, v, len(v)) diff --git a/gopls/internal/util/persistent/map_test.go b/gopls/internal/util/persistent/map_test.go index 88dced2a85f..09482a11f81 100644 --- a/gopls/internal/util/persistent/map_test.go +++ b/gopls/internal/util/persistent/map_test.go @@ -6,6 +6,7 @@ package persistent import ( "fmt" + "maps" "math/rand" "reflect" "sync/atomic" @@ -128,7 +129,7 @@ func TestRandomMap(t *testing.T) { } keys := make([]int, 0, 1000) - for i := 0; i < 1000; i++ { + for i := range 1000 { key := rand.Intn(10000) m.set(t, key, key) keys = append(keys, key) @@ -282,9 +283,7 @@ func (vm *validatedMap) setAll(t *testing.T, other *validatedMap) { // Note: this is buggy because we are not updating vm.clock, vm.deleted, or // vm.seen. - for key, value := range other.expected { - vm.expected[key] = value - } + maps.Copy(vm.expected, other.expected) vm.validate(t) } @@ -327,9 +326,7 @@ func (vm *validatedMap) remove(t *testing.T, key int) { func (vm *validatedMap) clone() *validatedMap { expected := make(map[int]int, len(vm.expected)) - for key, value := range vm.expected { - expected[key] = value - } + maps.Copy(expected, vm.expected) return &validatedMap{ impl: vm.impl.Clone(), diff --git a/gopls/internal/vulncheck/types.go b/gopls/internal/vulncheck/types.go index 450cd961797..e2be9fc5f72 100644 --- a/gopls/internal/vulncheck/types.go +++ b/gopls/internal/vulncheck/types.go @@ -35,7 +35,7 @@ type Result struct { // AsOf describes when this Result was computed using govulncheck. // It is valid only with the govulncheck analysis mode. - AsOf time.Time `json:",omitempty"` + AsOf time.Time } type AnalysisMode string diff --git a/gopls/internal/vulncheck/vulntest/db.go b/gopls/internal/vulncheck/vulntest/db.go index ee2a6923264..9a5c054520d 100644 --- a/gopls/internal/vulncheck/vulntest/db.go +++ b/gopls/internal/vulncheck/vulntest/db.go @@ -12,6 +12,7 @@ import ( "fmt" "os" "path/filepath" + "slices" "sort" "strings" "time" @@ -199,7 +200,7 @@ func AffectedRanges(versions []VersionRange) []osv.Range { func toOSVPackages(pkgs []*Package) (imps []osv.Package) { for _, p := range pkgs { - syms := append([]string{}, p.Symbols...) + syms := slices.Clone(p.Symbols) syms = append(syms, p.DerivedSymbols...) sort.Strings(syms) imps = append(imps, osv.Package{ From d3f8716f1a40f0c58c1c83fa91ccc53362d97ccf Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Thu, 24 Apr 2025 23:44:07 -0600 Subject: [PATCH 241/270] gopls/internal/golang/completion: offer return with whitespace Currently, gopls offers a return completion under a func decl as 'return|' and the '|' is the cursor place. Gopls could do more to predict whether a whitespace after 'return' is helpful in current function declaration scope so users could avoid typing a whitespace by themselves after accepting return completion. For example, gopls could offer 'return ' for functions without any parameter lists such as 'func () int' and 'func () (int, string)'. This CL tackles this problem, and for function 'func () int {': - before: offer 'return|' - after: offer 'return |' Change-Id: Iaa2d594b80333b48a70632e5d0c837c7efd2e09d Reviewed-on: https://go-review.googlesource.com/c/tools/+/667736 LUCI-TryBot-Result: Go LUCI Auto-Submit: Alan Donovan Reviewed-by: Alan Donovan Reviewed-by: Junyang Shao --- gopls/go.mod | 2 +- gopls/go.sum | 6 -- gopls/internal/golang/completion/keywords.go | 27 +++++++- .../marker/testdata/completion/keywords.txt | 69 +++++++++++++++++++ 4 files changed, 96 insertions(+), 8 deletions(-) diff --git a/gopls/go.mod b/gopls/go.mod index 5f24d3ad79b..7b3772bfab4 100644 --- a/gopls/go.mod +++ b/gopls/go.mod @@ -3,6 +3,7 @@ module golang.org/x/tools/gopls go 1.24.2 require ( + github.com/fatih/gomodifytags v1.17.1-0.20250423142747-f3939df9aa3c github.com/google/go-cmp v0.6.0 github.com/jba/templatecheck v0.7.1 golang.org/x/mod v0.24.0 @@ -21,7 +22,6 @@ require ( require ( github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c // indirect github.com/fatih/camelcase v1.0.0 // indirect - github.com/fatih/gomodifytags v1.17.1-0.20250423142747-f3939df9aa3c // indirect github.com/fatih/structtag v1.2.0 // indirect github.com/google/safehtml v0.1.0 // indirect golang.org/x/exp/typeparams v0.0.0-20250218142911-aa4b98e5adaa // indirect diff --git a/gopls/go.sum b/gopls/go.sum index 01231ae5d8a..519e4e79f81 100644 --- a/gopls/go.sum +++ b/gopls/go.sum @@ -2,10 +2,6 @@ github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c h1:pxW6RcqyfI9/k github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= github.com/fatih/camelcase v1.0.0 h1:hxNvNX/xYBp0ovncs8WyWZrOrpBNub/JfaMvbURyft8= github.com/fatih/camelcase v1.0.0/go.mod h1:yN2Sb0lFhZJUdVvtELVWefmrXpuZESvPmqwoZc+/fpc= -github.com/fatih/gomodifytags v1.16.0 h1:B65npXIXSk44F6c1hZGE1NazSnt+eXvtdEOG2Uy+QdU= -github.com/fatih/gomodifytags v1.16.0/go.mod h1:TbUyEjH1Zo0GkJd2Q52oVYqYcJ0eGNqG8bsiOb75P9c= -github.com/fatih/gomodifytags v1.17.1-0.20250325171527-8c663b1c0765 h1:T+oCz1SRpqkn4meT0PiAX5vM8HcESrWvsAzyvy0Vdh0= -github.com/fatih/gomodifytags v1.17.1-0.20250325171527-8c663b1c0765/go.mod h1:YVLagR57bBxMai8IAEc7V4E/MWUYi0oUutLrZcTcnI8= github.com/fatih/gomodifytags v1.17.1-0.20250423142747-f3939df9aa3c h1:dDSgAjoOMp8da3egfz0t2S+t8RGOpEmEXZubcGuc0Bg= github.com/fatih/gomodifytags v1.17.1-0.20250423142747-f3939df9aa3c/go.mod h1:YVLagR57bBxMai8IAEc7V4E/MWUYi0oUutLrZcTcnI8= github.com/fatih/structtag v1.2.0 h1:/OdNE99OxoI/PqaW/SuSK9uxxT3f/tcSZgon/ssNSx4= @@ -46,8 +42,6 @@ golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20= golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457/go.mod h1:pRgIJT+bRLFKnoM1ldnzKoxTIn14Yxz928LQRYYgIN0= -golang.org/x/telemetry v0.0.0-20250220152412-165e2f84edbc h1:HS+G1Mhh2dxM8ObutfYKdjfD7zpkyeP/UxeRnJpIZtQ= -golang.org/x/telemetry v0.0.0-20250220152412-165e2f84edbc/go.mod h1:bDzXkYUaHzz51CtDy5kh/jR4lgPxsdbqC37kp/dzhCc= golang.org/x/telemetry v0.0.0-20250417124945-06ef541f3fa3 h1:RXY2+rSHXvxO2Y+gKrPjYVaEoGOqh3VEXFhnWAt1Irg= golang.org/x/telemetry v0.0.0-20250417124945-06ef541f3fa3/go.mod h1:RoaXAWDwS90j6FxVKwJdBV+0HCU+llrKUGgJaxiKl6M= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= diff --git a/gopls/internal/golang/completion/keywords.go b/gopls/internal/golang/completion/keywords.go index 6b61e101c13..fb1fa1694ce 100644 --- a/gopls/internal/golang/completion/keywords.go +++ b/gopls/internal/golang/completion/keywords.go @@ -131,7 +131,32 @@ func (c *completer) addKeywordCompletions() { // This is a bit weak, functions allow for many keywords case *ast.FuncDecl: if node.Body != nil && c.pos > node.Body.Lbrace { - c.addKeywordItems(seen, stdScore, DEFER, RETURN, FOR, GO, SWITCH, SELECT, IF, ELSE, VAR, CONST, GOTO, TYPE) + // requireReturnObj checks whether user must provide some objects after return. + requireReturnObj := func(sig *ast.FuncType) bool { + results := sig.Results + if results == nil || results.List == nil { + return false // nothing to return + } + // If any result is named, allow a bare return. + for _, r := range results.List { + for _, name := range r.Names { + if name.Name != "_" { + return false + } + } + } + return true + } + ret := RETURN + if requireReturnObj(node.Type) { + // as user must return something, we offer a space after return. + // function literal inside a function will be affected by outer function, + // but 'go fmt' will help to remove the ending space. + // the benefit is greater than introducing an unncessary space. + ret += " " + } + + c.addKeywordItems(seen, stdScore, DEFER, ret, FOR, GO, SWITCH, SELECT, IF, ELSE, VAR, CONST, GOTO, TYPE) } } } diff --git a/gopls/internal/test/marker/testdata/completion/keywords.txt b/gopls/internal/test/marker/testdata/completion/keywords.txt index 3c69f2cd2ef..86bc1a31e76 100644 --- a/gopls/internal/test/marker/testdata/completion/keywords.txt +++ b/gopls/internal/test/marker/testdata/completion/keywords.txt @@ -189,3 +189,72 @@ func _() { d //@complete(" //", default, defaultVar, defer) } } + +-- return_different_func.go -- +package keywords + +/* return */ //@item(returnWithSpace, "return ", "", "keyword") + + +func _ () int { + r //@complete(" //", returnWithSpace) +} + +func _ () (int, int) { + r //@complete(" //", returnWithSpace) +} + +func _ () (_ int) { + r //@complete(" //", returnWithSpace) +} + +func _ () (_ int) { + r //@complete(" //", returnWithSpace) +} + +func _ () (_, _ int) { + r //@complete(" //", returnWithSpace) +} + +func _ () (_, a int) { + r //@complete(" //", return) +} + +func _ () { + r //@complete(" //", return) +} + +func _ () (a int) { + r //@complete(" //", return) +} + +func _ () (a, b int) { + r //@complete(" //", return) +} + +func _ () (a, b int, c string) { + r //@complete(" //", return) +} + +func _ () (a int) { + _ = func (){ + r //@complete(" //", return) + } + return +} + +func _ () int { + _ = func () (a int) { + // func lit will be affected by outer function. + r //@complete(" //", returnWithSpace) + } + return +} + +func _ () { + _ = func () int { + // func lit will be affected by outer function. + r //@complete(" //", return) + } + return +} From 4ec26d68b3c042c274fa5dcc633cb014846e2dd9 Mon Sep 17 00:00:00 2001 From: Madeline Kalil Date: Thu, 24 Apr 2025 13:30:10 -0400 Subject: [PATCH 242/270] gopls/internal/server: add modify tags code action docs Add documentation about the new add and remove tags code actions to the release notes and doc/features. Also, extract part of the ModifyTags command handler to a new function in the golang package. Change-Id: I5e67d283f22d217d90edf23336b7cfe8d4cdf3c7 Reviewed-on: https://go-review.googlesource.com/c/tools/+/667856 TryBot-Bypass: Madeline Kalil Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- gopls/doc/features/transformation.md | 21 ++++++- gopls/doc/release/v0.19.0.md | 17 +++++- gopls/internal/golang/modify_tags.go | 84 ++++++++++++++++++++++++++++ gopls/internal/server/command.go | 57 +------------------ 4 files changed, 121 insertions(+), 58 deletions(-) create mode 100644 gopls/internal/golang/modify_tags.go diff --git a/gopls/doc/features/transformation.md b/gopls/doc/features/transformation.md index 1d7c0fa14be..91b6c46b74d 100644 --- a/gopls/doc/features/transformation.md +++ b/gopls/doc/features/transformation.md @@ -79,15 +79,17 @@ Gopls supports the following code actions: - [`refactor.extract.variable`](#extract) - [`refactor.extract.variable-all`](#extract) - [`refactor.inline.call`](#refactor.inline.call) +- [`refactor.rewrite.addTags`](#refactor.rewrite.addTags) - [`refactor.rewrite.changeQuote`](#refactor.rewrite.changeQuote) - [`refactor.rewrite.fillStruct`](#refactor.rewrite.fillStruct) - [`refactor.rewrite.fillSwitch`](#refactor.rewrite.fillSwitch) - [`refactor.rewrite.invertIf`](#refactor.rewrite.invertIf) - [`refactor.rewrite.joinLines`](#refactor.rewrite.joinLines) -- [`refactor.rewrite.removeUnusedParam`](#refactor.rewrite.removeUnusedParam) -- [`refactor.rewrite.splitLines`](#refactor.rewrite.splitLines) - [`refactor.rewrite.moveParamLeft`](#refactor.rewrite.moveParamLeft) - [`refactor.rewrite.moveParamRight`](#refactor.rewrite.moveParamRight) +- [`refactor.rewrite.removeTags`](#refactor.rewrite.removeTags) +- [`refactor.rewrite.removeUnusedParam`](#refactor.rewrite.removeUnusedParam) +- [`refactor.rewrite.splitLines`](#refactor.rewrite.splitLines) Gopls reports some code actions twice, with two different kinds, so that they appear in multiple UI elements: simplifications, @@ -842,3 +844,18 @@ When the cursor is on a dot import gopls can offer the "Eliminate dot import" code action, which removes the dot from the import and qualifies uses of the package throughout the file. This code action is offered only if each use of the package can be qualified without collisions with existing names. + + +### `refactor.rewrite.addTags`: Add struct tags + +When the cursor is within a struct, this code action adds to each field a `json` +struct tag that specifies its JSON name, using lower case with underscores +(e.g. LinkTarget becomes link_target). For a highlighted selection, it only +adds tags on selected fields. + + +### `refactor.rewrite.removeTags`: Remove struct tags + +When the cursor is within a struct, this code action clears struct tags on +all struct fields. For a highlighted selection, it removes tags from only +the selected fields. diff --git a/gopls/doc/release/v0.19.0.md b/gopls/doc/release/v0.19.0.md index d99f9af1dfe..b8f53a72304 100644 --- a/gopls/doc/release/v0.19.0.md +++ b/gopls/doc/release/v0.19.0.md @@ -120,4 +120,19 @@ with its name. Gopls now automatically adds the appropriate `package` clause to newly created Go files, so that you can immediately get started writing the interesting part. -It requires client support for `workspace/didCreateFiles` \ No newline at end of file +It requires client support for `workspace/didCreateFiles` + +## Add/remove tags from struct fields + +Gopls now provides two new code actions, available on an entire struct +or some of its fields, that allow you to add and remove struct tags. +It adds only 'json' tags with a snakecase naming format, or clears all +tags within the selection. + +Add tags example: +```go +type Info struct { + LinkTarget string -> LinkTarget string `json:"link_target"` + ... +} +``` \ No newline at end of file diff --git a/gopls/internal/golang/modify_tags.go b/gopls/internal/golang/modify_tags.go new file mode 100644 index 00000000000..46748c841d1 --- /dev/null +++ b/gopls/internal/golang/modify_tags.go @@ -0,0 +1,84 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +import ( + "bytes" + "context" + "fmt" + "go/ast" + "go/format" + "go/token" + + "github.com/fatih/gomodifytags/modifytags" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/gopls/internal/util/moreiters" + internalastutil "golang.org/x/tools/internal/astutil" + "golang.org/x/tools/internal/astutil/cursor" + "golang.org/x/tools/internal/diff" + "golang.org/x/tools/internal/tokeninternal" +) + +// Finds the start and end positions of the enclosing struct or returns an error if none is found. +func findEnclosingStruct(c cursor.Cursor) (token.Pos, token.Pos, error) { + for cur := range c.Enclosing((*ast.StructType)(nil)) { + return cur.Node().Pos(), cur.Node().End(), nil + } + return token.NoPos, token.NoPos, fmt.Errorf("no struct enclosing the given positions") +} + +// ModifyTags applies the given struct tag modifications to the specified struct. +func ModifyTags(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, args command.ModifyTagsArgs, m *modifytags.Modification) ([]protocol.DocumentChange, error) { + pgf, err := snapshot.ParseGo(ctx, fh, parsego.Full) + if err != nil { + return nil, fmt.Errorf("error fetching package file: %v", err) + } + start, end, err := pgf.RangePos(args.Range) + if err != nil { + return nil, fmt.Errorf("error getting position information: %v", err) + } + // If the cursor is at a point and not a selection, we should use the entire enclosing struct. + if start == end { + cur, ok := pgf.Cursor.FindByPos(start, end) + if !ok { + return nil, fmt.Errorf("error finding start and end positions: %v", err) + } + curStruct, ok := moreiters.First(cur.Enclosing((*ast.StructType)(nil))) + if !ok { + return nil, fmt.Errorf("no enclosing struct type") + } + start, end = curStruct.Node().Pos(), curStruct.Node().End() + } + + // Create a copy of the file node in order to avoid race conditions when we modify the node in Apply. + cloned := internalastutil.CloneNode(pgf.File) + fset := tokeninternal.FileSetFor(pgf.Tok) + + if err = m.Apply(fset, cloned, start, end); err != nil { + return nil, fmt.Errorf("could not modify tags: %v", err) + } + + // Construct a list of DocumentChanges based on the diff between the formatted node and the + // original file content. + var after bytes.Buffer + if err := format.Node(&after, fset, cloned); err != nil { + return nil, err + } + edits := diff.Bytes(pgf.Src, after.Bytes()) + if len(edits) == 0 { + return nil, nil + } + textedits, err := protocol.EditsFromDiffEdits(pgf.Mapper, edits) + if err != nil { + return nil, fmt.Errorf("error computing edits for %s: %v", args.URI, err) + } + return []protocol.DocumentChange{ + protocol.DocumentChangeEdit(fh, textedits), + }, nil +} diff --git a/gopls/internal/server/command.go b/gopls/internal/server/command.go index 60c7389d095..a3345d33a1d 100644 --- a/gopls/internal/server/command.go +++ b/gopls/internal/server/command.go @@ -10,9 +10,6 @@ import ( "encoding/json" "errors" "fmt" - "go/ast" - "go/format" - "go/token" "io" "log" "maps" @@ -43,8 +40,6 @@ import ( "golang.org/x/tools/gopls/internal/util/bug" "golang.org/x/tools/gopls/internal/vulncheck" "golang.org/x/tools/gopls/internal/vulncheck/scan" - internalastutil "golang.org/x/tools/internal/astutil" - "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/diff" "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/gocommand" @@ -1818,62 +1813,14 @@ func (c *commandHandler) ModifyTags(ctx context.Context, args command.ModifyTags if err != nil { return err } - pgf, err := deps.snapshot.ParseGo(ctx, fh, parsego.Full) + changes, err := golang.ModifyTags(ctx, deps.snapshot, fh, args, m) if err != nil { - return fmt.Errorf("error fetching package file: %v", err) - } - start, end, err := pgf.RangePos(args.Range) - if err != nil { - return fmt.Errorf("error getting position information: %v", err) - } - // If the cursor is at a point and not a selection, we should use the entire enclosing struct. - if start == end { - cur, ok := pgf.Cursor.FindByPos(start, end) - if !ok { - return fmt.Errorf("error finding start and end positions: %v", err) - } - start, end, err = findEnclosingStruct(cur) - if err != nil { - return fmt.Errorf("error finding enclosing struct: %v", err) - } - } - - // Create a copy of the file node in order to avoid race conditions when we modify the node in Apply. - cloned := internalastutil.CloneNode(pgf.File) - fset := tokeninternal.FileSetFor(pgf.Tok) - - if err = m.Apply(fset, cloned, start, end); err != nil { - return fmt.Errorf("could not modify tags: %v", err) - } - - // Construct a list of DocumentChanges based on the diff between the formatted node and the - // original file content. - var after bytes.Buffer - if err := format.Node(&after, fset, cloned); err != nil { return err } - edits := diff.Bytes(pgf.Src, after.Bytes()) - if len(edits) == 0 { - return nil - } - textedits, err := protocol.EditsFromDiffEdits(pgf.Mapper, edits) - if err != nil { - return fmt.Errorf("error computing edits for %s: %v", args.URI, err) - } - return applyChanges(ctx, c.s.client, []protocol.DocumentChange{ - protocol.DocumentChangeEdit(fh, textedits), - }) + return applyChanges(ctx, c.s.client, changes) }) } -// Finds the start and end positions of the enclosing struct or returns an error if none is found. -func findEnclosingStruct(c cursor.Cursor) (token.Pos, token.Pos, error) { - for cur := range c.Enclosing((*ast.StructType)(nil)) { - return cur.Node().Pos(), cur.Node().End(), nil - } - return token.NoPos, token.NoPos, fmt.Errorf("no struct enclosing the given positions") -} - func parseTransform(input string) (modifytags.Transform, error) { switch input { case "camelcase": From bf81a8d6013efa6320a078f16f4651287f726639 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Thu, 24 Apr 2025 11:40:50 -0400 Subject: [PATCH 243/270] internal/mcp/internal/jsonschema: schema definition The first of several CLs providing JSON Schema validation. Replace the existing Schema type with one that has all the keywords of the most current version of the JSON Schema spec, called 2020-12. The 2020-12 spec can be represented type-safely in Go with one exception: the "type" keyword can be either a string or a []string. To handle this safely, both Type and Types fields are defined. Custom JSONMarshal/Unmarshal methods handle the conversion. We need the unmarshal anyway because "true" and "false" are valid schemas. Unfortunately, the MCP spec uses the older "draft7" version of JSON Schema, but the two are close enough for code generation to work. I confirmed that the protocol generator produces the identical output. Change-Id: I44f6ea2f3a3f8498979946f69dfb7f50afa1acad Reviewed-on: https://go-review.googlesource.com/c/tools/+/667855 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- internal/mcp/internal/jsonschema/infer.go | 118 ++++++++ .../mcp/internal/jsonschema/infer_test.go | 70 +++++ internal/mcp/internal/jsonschema/schema.go | 268 +++++++++++------- .../mcp/internal/jsonschema/schema_test.go | 81 ++---- internal/mcp/internal/jsonschema/util.go | 162 +++++++++++ internal/mcp/internal/jsonschema/util_test.go | 39 +++ internal/mcp/internal/protocol/generate.go | 28 +- internal/mcp/mcp_test.go | 7 +- internal/mcp/tool.go | 2 +- 9 files changed, 606 insertions(+), 169 deletions(-) create mode 100644 internal/mcp/internal/jsonschema/infer.go create mode 100644 internal/mcp/internal/jsonschema/infer_test.go create mode 100644 internal/mcp/internal/jsonschema/util.go create mode 100644 internal/mcp/internal/jsonschema/util_test.go diff --git a/internal/mcp/internal/jsonschema/infer.go b/internal/mcp/internal/jsonschema/infer.go new file mode 100644 index 00000000000..2c4e73411b2 --- /dev/null +++ b/internal/mcp/internal/jsonschema/infer.go @@ -0,0 +1,118 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file contains functions that infer a schema from a Go type. + +package jsonschema + +import ( + "fmt" + "reflect" + "strings" +) + +// For constructs a JSON schema object for the given type argument. +// +// It is a convenience for ForType. +func For[T any]() (*Schema, error) { + return ForType(reflect.TypeFor[T]()) +} + +// It returns an error if t contains (possibly recursively) any of the following Go +// types, as they are incompatible with the JSON schema spec. +// - maps with key other than 'string' +// - function types +// - complex numbers +// - unsafe pointers +// +// TODO(rfindley): we could perhaps just skip these incompatible fields. +func ForType(t reflect.Type) (*Schema, error) { + return typeSchema(t, make(map[reflect.Type]*Schema)) +} + +func typeSchema(t reflect.Type, seen map[reflect.Type]*Schema) (*Schema, error) { + if t.Kind() == reflect.Pointer { + t = t.Elem() + } + if s := seen[t]; s != nil { + return s, nil + } + var ( + s = new(Schema) + err error + ) + seen[t] = s + + switch t.Kind() { + case reflect.Bool: + s.Type = "boolean" + + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, + reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, + reflect.Uintptr: + s.Type = "integer" + + case reflect.Float32, reflect.Float64: + s.Type = "number" + + case reflect.Interface: + // Unrestricted + + case reflect.Map: + if t.Key().Kind() != reflect.String { + return nil, fmt.Errorf("unsupported map key type %v", t.Key().Kind()) + } + s.Type = "object" + s.AdditionalProperties, err = typeSchema(t.Elem(), seen) + if err != nil { + return nil, fmt.Errorf("computing map value schema: %v", err) + } + + case reflect.Slice, reflect.Array: + s.Type = "array" + s.Items, err = typeSchema(t.Elem(), seen) + if err != nil { + return nil, fmt.Errorf("computing element schema: %v", err) + } + + case reflect.String: + s.Type = "string" + + case reflect.Struct: + s.Type = "object" + // no additional properties are allowed + s.AdditionalProperties = &Schema{Not: &Schema{}} + + for i := range t.NumField() { + field := t.Field(i) + name, ok := jsonName(field) + if !ok { + continue + } + if s.Properties == nil { + s.Properties = make(map[string]*Schema) + } + s.Properties[name], err = typeSchema(field.Type, seen) + if err != nil { + return nil, err + } + } + + default: + return nil, fmt.Errorf("type %v is unsupported by jsonschema", t) + } + return s, nil +} + +func jsonName(f reflect.StructField) (string, bool) { + if !f.IsExported() { + return "", false + } + if tag, ok := f.Tag.Lookup("json"); ok { + if name, _, _ := strings.Cut(tag, ","); name != "" { + return name, name != "-" + } + } + return f.Name, true +} diff --git a/internal/mcp/internal/jsonschema/infer_test.go b/internal/mcp/internal/jsonschema/infer_test.go new file mode 100644 index 00000000000..7f3b5d0abe3 --- /dev/null +++ b/internal/mcp/internal/jsonschema/infer_test.go @@ -0,0 +1,70 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package jsonschema_test + +import ( + "testing" + + "github.com/google/go-cmp/cmp" + "golang.org/x/tools/internal/mcp/internal/jsonschema" +) + +func forType[T any]() *jsonschema.Schema { + s, err := jsonschema.For[T]() + if err != nil { + panic(err) + } + return s +} + +func TestForType(t *testing.T) { + type schema = jsonschema.Schema + tests := []struct { + name string + got *jsonschema.Schema + want *jsonschema.Schema + }{ + {"string", forType[string](), &schema{Type: "string"}}, + {"int", forType[int](), &schema{Type: "integer"}}, + {"int16", forType[int16](), &schema{Type: "integer"}}, + {"uint32", forType[int16](), &schema{Type: "integer"}}, + {"float64", forType[float64](), &schema{Type: "number"}}, + {"bool", forType[bool](), &schema{Type: "boolean"}}, + {"intmap", forType[map[string]int](), &schema{ + Type: "object", + AdditionalProperties: &schema{Type: "integer"}, + }}, + {"anymap", forType[map[string]any](), &schema{ + Type: "object", + AdditionalProperties: &schema{}, + }}, + {"struct", forType[struct { + F int `json:"f"` + G []float64 + P *bool + Skip string `json:"-"` + NoSkip string `json:",omitempty"` + unexported float64 + unexported2 int `json:"No"` + }](), &schema{ + Type: "object", + Properties: map[string]*schema{ + "f": {Type: "integer"}, + "G": {Type: "array", Items: &schema{Type: "number"}}, + "P": {Type: "boolean"}, + "NoSkip": {Type: "string"}, + }, + AdditionalProperties: &jsonschema.Schema{Not: &jsonschema.Schema{}}, + }}, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + if diff := cmp.Diff(test.want, test.got); diff != "" { + t.Errorf("ForType mismatch (-want +got):\n%s", diff) + } + }) + } +} diff --git a/internal/mcp/internal/jsonschema/schema.go b/internal/mcp/internal/jsonschema/schema.go index 5ed9cbcdcf6..2ee77b77904 100644 --- a/internal/mcp/internal/jsonschema/schema.go +++ b/internal/mcp/internal/jsonschema/schema.go @@ -2,129 +2,193 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +// Package jsonschema is an implementation of the JSON Schema +// specification: https://json-schema.org. package jsonschema import ( + "bytes" + "encoding/json" + "errors" "fmt" - "reflect" - "strings" ) // A Schema is a JSON schema object. -// -// Right now, Schemas are only used for JSON serialization. In the future, they -// should support validation. +// It corresponds to the 2020-12 draft, as described in +// https://json-schema.org/draft/2020-12. type Schema struct { - Definitions map[string]*Schema `json:"definitions"` - Type any `json:"type,omitempty"` - Ref string `json:"$ref,omitempty"` - Description string `json:"description,omitempty"` - Properties map[string]*Schema `json:"properties,omitempty"` - Required []string `json:"required,omitempty"` - Items *Schema `json:"items,omitempty"` - AdditionalProperties any `json:"additionalProperties,omitempty"` + // core + ID string `json:"$id,omitempty"` + Schema string `json:"$schema,omitempty"` + Ref string `json:"$ref,omitempty"` + Comment string `json:"$comment,omitempty"` + Defs map[string]*Schema `json:"$defs,omitempty"` + // definitions is deprecated but still allowed. It is a synonym for defs. + Definitions map[string]*Schema `json:"definitions,omitempty"` + + Anchor string `json:"$anchor,omitempty"` + DynamicAnchor string `json:"$dynamicAnchor,omitempty"` + DynamicRef string `json:"$dynamicRef,omitempty"` + Vocabulary map[string]bool `json:"$vocabulary,omitempty"` + + // metadata + Title string `json:"title,omitempty"` + Description string `json:"description,omitempty"` + + // validation + // Use Type for a single type, or Types for multiple types; never both. + Type string `json:"-"` + Types []string `json:"-"` + Enum []any `json:"enum,omitempty"` + // Const is *any because a JSON null (Go nil) is a valid value. + Const *any `json:"const,omitempty"` + MultipleOf *float64 `json:"multipleOf,omitempty"` + Minimum *float64 `json:"minimum,omitempty"` + Maximum *float64 `json:"maximum,omitempty"` + ExclusiveMinimum *float64 `json:"exclusiveMinimum,omitempty"` + ExclusiveMaximum *float64 `json:"exclusiveMaximum,omitempty"` + MinLength *float64 `json:"minLength,omitempty"` + MaxLength *float64 `json:"maxLength,omitempty"` + Pattern string `json:"pattern,omitempty"` + + // arrays + PrefixItems []*Schema `json:"prefixItems,omitempty"` + Items *Schema `json:"items,omitempty"` + MinItems *float64 `json:"minItems,omitempty"` + MaxItems *float64 `json:"maxItems,omitempty"` + AdditionalItems *Schema `json:"additionalItems,omitempty"` + UniqueItems bool `json:"uniqueItems,omitempty"` + Contains *Schema `json:"contains,omitempty"` + MinContains *float64 `json:"minContains,omitempty"` + MaxContains *float64 `json:"maxContains,omitempty"` + UnevaluatedItems *Schema `json:"unevaluatedItems,omitempty"` + + // objects + MinProperties *float64 `json:"minProperties,omitempty"` + MaxProperties *float64 `json:"maxProperties,omitempty"` + Required []string `json:"required,omitempty"` + DependentRequired map[string][]string `json:"dependentRequired,omitempty"` + Properties map[string]*Schema `json:"properties,omitempty"` + PatternProperties map[string]*Schema `json:"patternProperties,omitempty"` + AdditionalProperties *Schema `json:"additionalProperties,omitempty"` + PropertyNames *Schema `json:"propertyNames,omitempty"` + UnevaluatedProperties *Schema `json:"unevaluatedProperties,omitempty"` + + // logic + AllOf []*Schema `json:"allOf,omitempty"` + AnyOf []*Schema `json:"anyOf,omitempty"` + OneOf []*Schema `json:"oneOf,omitempty"` + Not *Schema `json:"not,omitempty"` + + // conditional + If *Schema `json:"if,omitempty"` + Then *Schema `json:"then,omitempty"` + Else *Schema `json:"else,omitempty"` + DependentSchemas map[string]*Schema `json:"dependentSchemas,omitempty"` } -// ForType constructs a JSON schema object for the given type argument. -// -// The type T must not contain (possibly recursively) any of the following Go -// types, as they are incompatible with the JSON schema spec. -// - maps with key other than 'string' -// - function types -// - complex numbers -// - unsafe pointers -// -// TODO(rfindley): we could perhaps just skip these incompatible fields. -func ForType[T any]() (*Schema, error) { - return typeSchema(reflect.TypeFor[T](), make(map[reflect.Type]*Schema)) +// String returns a short description of the schema. +func (s *Schema) String() string { + if s.ID != "" { + return s.ID + } + // TODO: return something better, like a JSON Pointer from the base. + return "" } -func typeSchema(t reflect.Type, seen map[reflect.Type]*Schema) (*Schema, error) { - if s := seen[t]; s != nil { - return s, nil +// json returns the schema in json format. +func (s *Schema) json() string { + data, err := json.MarshalIndent(s, "", " ") + if err != nil { + return fmt.Sprintf("", err) } - var ( - s = new(Schema) - err error - ) - seen[t] = s - - switch t.Kind() { - case reflect.Bool: - s.Type = "boolean" - - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, - reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: - s.Type = "integer" + return string(data) +} - case reflect.Float32, reflect.Float64: - s.Type = "number" +func (s *Schema) basicChecks() error { + if s.Type != "" && s.Types != nil { + return errors.New("both Type and Types are set; at most one should be") + } + if s.Defs != nil && s.Definitions != nil { + return errors.New("both Defs and Definitions are set; at most one should be") + } + return nil +} - case reflect.Interface: - // Unrestricted +type schemaWithoutMethods Schema // doesn't implement json.{Unm,M}arshaler - case reflect.Map: - if t.Key().Kind() != reflect.String { - return nil, fmt.Errorf("unsupported map key type %v", t.Key().Kind()) - } - s.Type = "object" - valueSchema, err := typeSchema(t.Elem(), seen) - if err != nil { - return nil, fmt.Errorf("computing map value schema: %v", err) - } - s.AdditionalProperties = valueSchema +func (s *Schema) MarshalJSON() ([]byte, error) { + if err := s.basicChecks(); err != nil { + return nil, err + } + // Marshal either Type or Types as "type". + var typ any + switch { + case s.Type != "": + typ = s.Type + case s.Types != nil: + typ = s.Types + } + ms := struct { + Type any `json:"type,omitempty"` + *schemaWithoutMethods + }{ + Type: typ, + schemaWithoutMethods: (*schemaWithoutMethods)(s), + } + return json.Marshal(ms) +} - case reflect.Pointer: - s2, err := typeSchema(t.Elem(), seen) - if err != nil { - return nil, err +func (s *Schema) UnmarshalJSON(data []byte) error { + // A JSON boolean is a valid schema. + var b bool + if err := json.Unmarshal(data, &b); err == nil { + if b { + // true is the empty schema, which validates everything. + *s = Schema{} + } else { + // false is the schema that validates nothing. + *s = Schema{Not: &Schema{}} } - *s = *s2 + return nil + } - case reflect.Slice, reflect.Array: - s.Type = "array" - itemSchema, err := typeSchema(t.Elem(), seen) - if err != nil { - return nil, fmt.Errorf("computing element schema: %v", err) - } - s.Items = itemSchema - - case reflect.String: - s.Type = "string" - - case reflect.Struct: - s.Type = "object" - s.AdditionalProperties = false - - for i := range t.NumField() { - if s.Properties == nil { - s.Properties = make(map[string]*Schema) - } - rfld := t.Field(i) - name, ok := jsonName(rfld) - if !ok { - continue - } - s.Properties[name], err = typeSchema(rfld.Type, seen) - if err != nil { - return nil, err - } + ms := struct { + Type json.RawMessage `json:"type,omitempty"` + Const json.RawMessage `json:"const,omitempty"` + *schemaWithoutMethods + }{ + schemaWithoutMethods: (*schemaWithoutMethods)(s), + } + if err := json.Unmarshal(data, &ms); err != nil { + return err + } + // Unmarshal "type" as either Type or Types. + var err error + if len(ms.Type) > 0 { + switch ms.Type[0] { + case '"': + err = json.Unmarshal(ms.Type, &s.Type) + case '[': + err = json.Unmarshal(ms.Type, &s.Types) + default: + err = fmt.Errorf("invalid type: %q", ms.Type) } - - default: - return nil, fmt.Errorf("type %v is unsupported by jsonschema", t.Kind()) } - return s, nil -} - -func jsonName(f reflect.StructField) (string, bool) { - j, ok := f.Tag.Lookup("json") - if !ok { - return f.Name, f.IsExported() + if err != nil { + return err } - name, _, _ := strings.Cut(j, ",") - if name == "" { - return f.Name, f.IsExported() + // Setting Const to a pointer to null will marshal properly, but won't unmarshal: + // the *any is set to nil, not a pointer to nil. + if len(ms.Const) > 0 { + if bytes.Equal(ms.Const, []byte("null")) { + s.Const = new(any) + } else if err := json.Unmarshal(ms.Const, &s.Const); err != nil { + return err + } } - return name, name != "" && name != "-" + return nil } + +// Ptr returns a pointer to a new variable whose value is x. +func Ptr[T any](x T) *T { return &x } diff --git a/internal/mcp/internal/jsonschema/schema_test.go b/internal/mcp/internal/jsonschema/schema_test.go index 443ef54efd1..62b92d2968d 100644 --- a/internal/mcp/internal/jsonschema/schema_test.go +++ b/internal/mcp/internal/jsonschema/schema_test.go @@ -2,64 +2,39 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package jsonschema_test +package jsonschema import ( + "encoding/json" "testing" - - "github.com/google/go-cmp/cmp" - "golang.org/x/tools/internal/mcp/internal/jsonschema" ) -func forType[T any]() *jsonschema.Schema { - s, err := jsonschema.ForType[T]() - if err != nil { - panic(err) - } - return s -} - -func TestForType(t *testing.T) { - type schema = jsonschema.Schema - tests := []struct { - name string - got *jsonschema.Schema - want *jsonschema.Schema - }{ - {"string", forType[string](), &schema{Type: "string"}}, - {"int", forType[int](), &schema{Type: "integer"}}, - {"int16", forType[int16](), &schema{Type: "integer"}}, - {"uint32", forType[int16](), &schema{Type: "integer"}}, - {"float64", forType[float64](), &schema{Type: "number"}}, - {"bool", forType[bool](), &schema{Type: "boolean"}}, - {"intmap", forType[map[string]int](), &schema{ - Type: "object", - AdditionalProperties: &schema{Type: "integer"}, - }}, - {"anymap", forType[map[string]any](), &schema{ - Type: "object", - AdditionalProperties: &schema{}, - }}, - {"struct", forType[struct { - F int `json:"f"` - G []float64 - Skip string `json:"-"` - unexported float64 - }](), &schema{ - Type: "object", - Properties: map[string]*schema{ - "f": {Type: "integer"}, - "G": {Type: "array", Items: &schema{Type: "number"}}, - }, - AdditionalProperties: false, - }}, - } - - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - if diff := cmp.Diff(test.want, test.got); diff != "" { - t.Errorf("ForType mismatch (-want +got):\n%s", diff) +func TestMarshal(t *testing.T) { + for _, s := range []*Schema{ + {Type: "null"}, + {Types: []string{"null", "number"}}, + {Type: "string", MinLength: Ptr(20.0)}, + {Minimum: Ptr(20.0)}, + {Items: &Schema{Type: "integer"}}, + {Const: Ptr(any(0))}, + {Const: Ptr(any(nil))}, + {Const: Ptr(any([]int{}))}, + {Const: Ptr(any(map[string]any{}))}, + } { + data, err := json.Marshal(s) + if err != nil { + t.Fatal(err) + } + t.Logf("marshal: %s", data) + var got *Schema + if err := json.Unmarshal(data, &got); err != nil { + t.Fatal(err) + } + if !Equal(got, s) { + t.Errorf("got %+v, want %+v", got, s) + if got.Const != nil && s.Const != nil { + t.Logf("Consts: got %#v (%[1]T), want %#v (%[2]T)", *got.Const, *s.Const) } - }) + } } } diff --git a/internal/mcp/internal/jsonschema/util.go b/internal/mcp/internal/jsonschema/util.go new file mode 100644 index 00000000000..08ba79fd69d --- /dev/null +++ b/internal/mcp/internal/jsonschema/util.go @@ -0,0 +1,162 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package jsonschema + +import ( + "bytes" + "encoding/json" + "fmt" + "math/big" + "reflect" +) + +// Equal reports whether two Go values representing JSON values are equal according +// to the JSON Schema spec. +// The values must not contain cycles. +// See https://json-schema.org/draft/2020-12/json-schema-core#section-4.2.2. +// It behaves like reflect.DeepEqual, except that numbers are compared according +// to mathematical equality. +func Equal(x, y any) bool { + return equalValue(reflect.ValueOf(x), reflect.ValueOf(y)) +} + +func equalValue(x, y reflect.Value) bool { + // Copied from src/reflect/deepequal.go, omitting the visited check (because JSON + // values are trees). + if !x.IsValid() || !y.IsValid() { + return x.IsValid() == y.IsValid() + } + + // Treat numbers specially. + rx, ok1 := jsonNumber(x) + ry, ok2 := jsonNumber(y) + if ok1 && ok2 { + return rx.Cmp(ry) == 0 + } + if x.Kind() != y.Kind() { + return false + } + switch x.Kind() { + case reflect.Array: + if x.Len() != y.Len() { + return false + } + for i := range x.Len() { + if !equalValue(x.Index(i), y.Index(i)) { + return false + } + } + return true + case reflect.Slice: + if x.IsNil() != y.IsNil() { + return false + } + if x.Len() != y.Len() { + return false + } + if x.UnsafePointer() == y.UnsafePointer() { + return true + } + // Special case for []byte, which is common. + if x.Type().Elem().Kind() == reflect.Uint8 && x.Type() == y.Type() { + return bytes.Equal(x.Bytes(), y.Bytes()) + } + for i := range x.Len() { + if !equalValue(x.Index(i), y.Index(i)) { + return false + } + } + return true + case reflect.Interface: + if x.IsNil() || y.IsNil() { + return x.IsNil() == y.IsNil() + } + return equalValue(x.Elem(), y.Elem()) + case reflect.Pointer: + if x.UnsafePointer() == y.UnsafePointer() { + return true + } + return equalValue(x.Elem(), y.Elem()) + case reflect.Struct: + t := x.Type() + if t != y.Type() { + return false + } + for i := range t.NumField() { + sf := t.Field(i) + if !sf.IsExported() { + continue + } + if !equalValue(x.FieldByIndex(sf.Index), y.FieldByIndex(sf.Index)) { + return false + } + } + return true + case reflect.Map: + if x.IsNil() != y.IsNil() { + return false + } + if x.Len() != y.Len() { + return false + } + if x.UnsafePointer() == y.UnsafePointer() { + return true + } + iter := x.MapRange() + for iter.Next() { + vx := iter.Value() + vy := y.MapIndex(iter.Key()) + if !vy.IsValid() || !equalValue(vx, vy) { + return false + } + } + return true + case reflect.Func: + if x.Type() != y.Type() { + return false + } + if x.IsNil() && y.IsNil() { + return true + } + panic("cannot compare functions") + case reflect.String: + return x.String() == y.String() + case reflect.Bool: + return x.Bool() == y.Bool() + case reflect.Complex64, reflect.Complex128: + return x.Complex() == y.Complex() + // Ints, uints and floats handled in jsonNumber, at top of function. + default: + panic(fmt.Sprintf("unsupported kind: %s", x.Kind())) + } +} + +// jsonNumber converts a numeric value or a json.Number to a [big.Rat]. +// If v is not a number, it returns nil, false. +func jsonNumber(v reflect.Value) (*big.Rat, bool) { + r := new(big.Rat) + switch { + case !v.IsValid(): + return nil, false + case v.CanInt(): + r.SetInt64(v.Int()) + case v.CanUint(): + r.SetUint64(v.Uint()) + case v.CanFloat(): + r.SetFloat64(v.Float()) + default: + jn, ok := v.Interface().(json.Number) + if !ok { + return nil, false + } + if _, ok := r.SetString(jn.String()); !ok { + // This can fail in rare cases; for example, "1e9999999". + // That is a valid JSON number, since the spec puts no limit on the size + // of the exponent. + return nil, false + } + } + return r, true +} diff --git a/internal/mcp/internal/jsonschema/util_test.go b/internal/mcp/internal/jsonschema/util_test.go new file mode 100644 index 00000000000..965d6834b58 --- /dev/null +++ b/internal/mcp/internal/jsonschema/util_test.go @@ -0,0 +1,39 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package jsonschema + +import "testing" + +func TestEqual(t *testing.T) { + for _, tt := range []struct { + x1, x2 any + want bool + }{ + {0, 1, false}, + {1, 1.0, true}, + {nil, 0, false}, + {"0", 0, false}, + {2.5, 2.5, true}, + {[]int{1, 2}, []float64{1.0, 2.0}, true}, + {[]int(nil), []int{}, false}, + {[]map[string]any(nil), []map[string]any{}, false}, + { + map[string]any{"a": 1, "b": 2.0}, + map[string]any{"a": 1.0, "b": 2}, + true, + }, + } { + check := func(x1, x2 any, want bool) { + t.Helper() + if got := Equal(x1, x2); got != want { + t.Errorf("jsonEqual(%#v, %#v) = %t, want %t", x1, x2, got, want) + } + } + check(tt.x1, tt.x1, true) + check(tt.x2, tt.x2, true) + check(tt.x1, tt.x2, tt.want) + check(tt.x2, tt.x1, tt.want) + } +} diff --git a/internal/mcp/internal/protocol/generate.go b/internal/mcp/internal/protocol/generate.go index d993a2b014f..73f3cc8d332 100644 --- a/internal/mcp/internal/protocol/generate.go +++ b/internal/mcp/internal/protocol/generate.go @@ -23,6 +23,7 @@ import ( "log" "net/http" "os" + "reflect" "slices" "strings" @@ -240,14 +241,16 @@ func writeType(w io.Writer, config *typeConfig, def *jsonschema.Schema, named ma // For types that explicitly allow additional properties, we can either // unmarshal them into a map[string]any, or delay unmarshalling with // json.RawMessage. For now, use json.RawMessage as it defers the choice. - if def.Type == "object" && def.AdditionalProperties != nil && def.AdditionalProperties != false { + if def.Type == "object" && canHaveAdditionalProperties(def) { w.Write([]byte("json.RawMessage")) return nil } - switch typ := def.Type.(type) { - case string: - switch typ { + if def.Type == "" { + // E.g. union types. + fmt.Fprintf(w, "json.RawMessage") + } else { + switch def.Type { case "array": fmt.Fprintf(w, "[]") return writeType(w, nil, def.Items, named) @@ -281,8 +284,7 @@ func writeType(w io.Writer, config *typeConfig, def *jsonschema.Schema, named ma // TODO: use omitzero when available. needPointer := !required && (strings.HasPrefix(fieldDef.Ref, "#/definitions/") || - fieldDef.Type == "object" && - (fieldDef.AdditionalProperties == nil || fieldDef.AdditionalProperties == false)) + fieldDef.Type == "object" && !canHaveAdditionalProperties(fieldDef)) if config != nil && config.Fields[export] != nil { r := config.Fields[export] @@ -320,10 +322,6 @@ func writeType(w io.Writer, config *typeConfig, def *jsonschema.Schema, named ma default: fmt.Fprintf(w, "any") } - - default: - // E.g. union types. - fmt.Fprintf(w, "json.RawMessage") } return nil } @@ -363,6 +361,16 @@ func toComment(description string) string { return strings.TrimRight(buf.String(), "\n") } +// The MCP spec improperly uses the absence of the additionalProperties keyword to +// mean that additional properties are not allowed. In fact, it means just the opposite +// (https://json-schema.org/draft-07/draft-handrews-json-schema-validation-01#rfc.section.6.5.6). +// If the MCP spec wants to allow additional properties, it will write "true" or +// an object explicitly. +func canHaveAdditionalProperties(s *jsonschema.Schema) bool { + ap := s.AdditionalProperties + return ap != nil && !reflect.DeepEqual(ap, &jsonschema.Schema{Not: &jsonschema.Schema{}}) +} + // exportName returns an exported name for a Go symbol, based on the given name // in the JSON schema, removing leading underscores and capitalizing. func exportName(s string) string { diff --git a/internal/mcp/mcp_test.go b/internal/mcp/mcp_test.go index 8b6a47fe77d..b5878e5b7fd 100644 --- a/internal/mcp/mcp_test.go +++ b/internal/mcp/mcp_test.go @@ -92,14 +92,14 @@ func TestEndToEnd(t *testing.T) { Properties: map[string]*jsonschema.Schema{ "Name": {Type: "string"}, }, - AdditionalProperties: false, + AdditionalProperties: falseSchema, }, }, { Name: "fail", Description: "just fail", InputSchema: &jsonschema.Schema{ Type: "object", - AdditionalProperties: false, + AdditionalProperties: falseSchema, }, }} if diff := cmp.Diff(wantTools, gotTools); diff != "" { @@ -221,7 +221,6 @@ func TestBatching(t *testing.T) { } } } - } func TestCancellation(t *testing.T) { @@ -253,3 +252,5 @@ func TestCancellation(t *testing.T) { t.Fatal("timeout waiting for cancellation") } } + +var falseSchema = &jsonschema.Schema{Not: &jsonschema.Schema{}} diff --git a/internal/mcp/tool.go b/internal/mcp/tool.go index 9859c04aa1d..7d1fbbb0e9d 100644 --- a/internal/mcp/tool.go +++ b/internal/mcp/tool.go @@ -30,7 +30,7 @@ type Tool struct { // a valid schema, as documented by [jsonschema.ForType]; otherwise, MakeTool // panics. func MakeTool[TReq any](name, description string, handler func(context.Context, *ClientConnection, TReq) ([]Content, error)) *Tool { - schema, err := jsonschema.ForType[TReq]() + schema, err := jsonschema.For[TReq]() if err != nil { panic(err) } From 37f9bd1e6aa395b2b57dfbd0891f3ca1757b1d31 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Thu, 24 Apr 2025 12:42:10 -0400 Subject: [PATCH 244/270] jsonschema: validate some keywords This CL implements validation for the "type", "enum" and "const" keywords. Validation in its full generality requires that references in the schema be resolved. We put that off for now; it isn't needed for most simple keywords. The JSON Schema folks provide an extensive test suite for validation at https://github.com/json-schema-org/JSON-Schema-Test-Suite. We use it for our tests, copying the necessary files into testdata. The files aren't perfectly independent; for example, one test in enum.json requires the "properties" keyword, which we haven't done yet. We will excise those tests via calls to t.Skip as they arise. Change-Id: I568b5c87802a39aecbc58f342931308fc8fb211c Reviewed-on: https://go-review.googlesource.com/c/tools/+/667875 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- .../testdata/draft2020-12/README.md | 4 + .../testdata/draft2020-12/const.json | 387 ++++++++++++++ .../testdata/draft2020-12/enum.json | 358 +++++++++++++ .../testdata/draft2020-12/type.json | 501 ++++++++++++++++++ internal/mcp/internal/jsonschema/util.go | 39 ++ internal/mcp/internal/jsonschema/util_test.go | 36 +- internal/mcp/internal/jsonschema/validate.go | 122 +++++ .../mcp/internal/jsonschema/validate_test.go | 77 +++ 8 files changed, 1523 insertions(+), 1 deletion(-) create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/README.md create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/const.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/enum.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/type.json create mode 100644 internal/mcp/internal/jsonschema/validate.go create mode 100644 internal/mcp/internal/jsonschema/validate_test.go diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/README.md b/internal/mcp/internal/jsonschema/testdata/draft2020-12/README.md new file mode 100644 index 00000000000..09ae570447c --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/README.md @@ -0,0 +1,4 @@ +# JSON Schema test suite for 2020-12 + +These files were copied from +https://github.com/json-schema-org/JSON-Schema-Test-Suite/tree/83e866b46c9f9e7082fd51e83a61c5f2145a1ab7/tests/draft2020-12. diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/const.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/const.json new file mode 100644 index 00000000000..50be86a0d0a --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/const.json @@ -0,0 +1,387 @@ +[ + { + "description": "const validation", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "const": 2 + }, + "tests": [ + { + "description": "same value is valid", + "data": 2, + "valid": true + }, + { + "description": "another value is invalid", + "data": 5, + "valid": false + }, + { + "description": "another type is invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "const with object", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "const": {"foo": "bar", "baz": "bax"} + }, + "tests": [ + { + "description": "same object is valid", + "data": {"foo": "bar", "baz": "bax"}, + "valid": true + }, + { + "description": "same object with different property order is valid", + "data": {"baz": "bax", "foo": "bar"}, + "valid": true + }, + { + "description": "another object is invalid", + "data": {"foo": "bar"}, + "valid": false + }, + { + "description": "another type is invalid", + "data": [1, 2], + "valid": false + } + ] + }, + { + "description": "const with array", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "const": [{ "foo": "bar" }] + }, + "tests": [ + { + "description": "same array is valid", + "data": [{"foo": "bar"}], + "valid": true + }, + { + "description": "another array item is invalid", + "data": [2], + "valid": false + }, + { + "description": "array with additional items is invalid", + "data": [1, 2, 3], + "valid": false + } + ] + }, + { + "description": "const with null", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "const": null + }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "not null is invalid", + "data": 0, + "valid": false + } + ] + }, + { + "description": "const with false does not match 0", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "const": false + }, + "tests": [ + { + "description": "false is valid", + "data": false, + "valid": true + }, + { + "description": "integer zero is invalid", + "data": 0, + "valid": false + }, + { + "description": "float zero is invalid", + "data": 0.0, + "valid": false + } + ] + }, + { + "description": "const with true does not match 1", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "const": true + }, + "tests": [ + { + "description": "true is valid", + "data": true, + "valid": true + }, + { + "description": "integer one is invalid", + "data": 1, + "valid": false + }, + { + "description": "float one is invalid", + "data": 1.0, + "valid": false + } + ] + }, + { + "description": "const with [false] does not match [0]", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "const": [false] + }, + "tests": [ + { + "description": "[false] is valid", + "data": [false], + "valid": true + }, + { + "description": "[0] is invalid", + "data": [0], + "valid": false + }, + { + "description": "[0.0] is invalid", + "data": [0.0], + "valid": false + } + ] + }, + { + "description": "const with [true] does not match [1]", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "const": [true] + }, + "tests": [ + { + "description": "[true] is valid", + "data": [true], + "valid": true + }, + { + "description": "[1] is invalid", + "data": [1], + "valid": false + }, + { + "description": "[1.0] is invalid", + "data": [1.0], + "valid": false + } + ] + }, + { + "description": "const with {\"a\": false} does not match {\"a\": 0}", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "const": {"a": false} + }, + "tests": [ + { + "description": "{\"a\": false} is valid", + "data": {"a": false}, + "valid": true + }, + { + "description": "{\"a\": 0} is invalid", + "data": {"a": 0}, + "valid": false + }, + { + "description": "{\"a\": 0.0} is invalid", + "data": {"a": 0.0}, + "valid": false + } + ] + }, + { + "description": "const with {\"a\": true} does not match {\"a\": 1}", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "const": {"a": true} + }, + "tests": [ + { + "description": "{\"a\": true} is valid", + "data": {"a": true}, + "valid": true + }, + { + "description": "{\"a\": 1} is invalid", + "data": {"a": 1}, + "valid": false + }, + { + "description": "{\"a\": 1.0} is invalid", + "data": {"a": 1.0}, + "valid": false + } + ] + }, + { + "description": "const with 0 does not match other zero-like types", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "const": 0 + }, + "tests": [ + { + "description": "false is invalid", + "data": false, + "valid": false + }, + { + "description": "integer zero is valid", + "data": 0, + "valid": true + }, + { + "description": "float zero is valid", + "data": 0.0, + "valid": true + }, + { + "description": "empty object is invalid", + "data": {}, + "valid": false + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + }, + { + "description": "empty string is invalid", + "data": "", + "valid": false + } + ] + }, + { + "description": "const with 1 does not match true", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "const": 1 + }, + "tests": [ + { + "description": "true is invalid", + "data": true, + "valid": false + }, + { + "description": "integer one is valid", + "data": 1, + "valid": true + }, + { + "description": "float one is valid", + "data": 1.0, + "valid": true + } + ] + }, + { + "description": "const with -2.0 matches integer and float types", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "const": -2.0 + }, + "tests": [ + { + "description": "integer -2 is valid", + "data": -2, + "valid": true + }, + { + "description": "integer 2 is invalid", + "data": 2, + "valid": false + }, + { + "description": "float -2.0 is valid", + "data": -2.0, + "valid": true + }, + { + "description": "float 2.0 is invalid", + "data": 2.0, + "valid": false + }, + { + "description": "float -2.00001 is invalid", + "data": -2.00001, + "valid": false + } + ] + }, + { + "description": "float and integers are equal up to 64-bit representation limits", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "const": 9007199254740992 + }, + "tests": [ + { + "description": "integer is valid", + "data": 9007199254740992, + "valid": true + }, + { + "description": "integer minus one is invalid", + "data": 9007199254740991, + "valid": false + }, + { + "description": "float is valid", + "data": 9007199254740992.0, + "valid": true + }, + { + "description": "float minus one is invalid", + "data": 9007199254740991.0, + "valid": false + } + ] + }, + { + "description": "nul characters in strings", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "const": "hello\u0000there" + }, + "tests": [ + { + "description": "match string with nul", + "data": "hello\u0000there", + "valid": true + }, + { + "description": "do not match string lacking nul", + "data": "hellothere", + "valid": false + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/enum.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/enum.json new file mode 100644 index 00000000000..c8f35eacfcd --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/enum.json @@ -0,0 +1,358 @@ +[ + { + "description": "simple enum validation", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "enum": [1, 2, 3] + }, + "tests": [ + { + "description": "one of the enum is valid", + "data": 1, + "valid": true + }, + { + "description": "something else is invalid", + "data": 4, + "valid": false + } + ] + }, + { + "description": "heterogeneous enum validation", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "enum": [6, "foo", [], true, {"foo": 12}] + }, + "tests": [ + { + "description": "one of the enum is valid", + "data": [], + "valid": true + }, + { + "description": "something else is invalid", + "data": null, + "valid": false + }, + { + "description": "objects are deep compared", + "data": {"foo": false}, + "valid": false + }, + { + "description": "valid object matches", + "data": {"foo": 12}, + "valid": true + }, + { + "description": "extra properties in object is invalid", + "data": {"foo": 12, "boo": 42}, + "valid": false + } + ] + }, + { + "description": "heterogeneous enum-with-null validation", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "enum": [6, null] + }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "number is valid", + "data": 6, + "valid": true + }, + { + "description": "something else is invalid", + "data": "test", + "valid": false + } + ] + }, + { + "description": "enums in properties", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type":"object", + "properties": { + "foo": {"enum":["foo"]}, + "bar": {"enum":["bar"]} + }, + "required": ["bar"] + }, + "tests": [ + { + "description": "both properties are valid", + "data": {"foo":"foo", "bar":"bar"}, + "valid": true + }, + { + "description": "wrong foo value", + "data": {"foo":"foot", "bar":"bar"}, + "valid": false + }, + { + "description": "wrong bar value", + "data": {"foo":"foo", "bar":"bart"}, + "valid": false + }, + { + "description": "missing optional property is valid", + "data": {"bar":"bar"}, + "valid": true + }, + { + "description": "missing required property is invalid", + "data": {"foo":"foo"}, + "valid": false + }, + { + "description": "missing all properties is invalid", + "data": {}, + "valid": false + } + ] + }, + { + "description": "enum with escaped characters", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "enum": ["foo\nbar", "foo\rbar"] + }, + "tests": [ + { + "description": "member 1 is valid", + "data": "foo\nbar", + "valid": true + }, + { + "description": "member 2 is valid", + "data": "foo\rbar", + "valid": true + }, + { + "description": "another string is invalid", + "data": "abc", + "valid": false + } + ] + }, + { + "description": "enum with false does not match 0", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "enum": [false] + }, + "tests": [ + { + "description": "false is valid", + "data": false, + "valid": true + }, + { + "description": "integer zero is invalid", + "data": 0, + "valid": false + }, + { + "description": "float zero is invalid", + "data": 0.0, + "valid": false + } + ] + }, + { + "description": "enum with [false] does not match [0]", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "enum": [[false]] + }, + "tests": [ + { + "description": "[false] is valid", + "data": [false], + "valid": true + }, + { + "description": "[0] is invalid", + "data": [0], + "valid": false + }, + { + "description": "[0.0] is invalid", + "data": [0.0], + "valid": false + } + ] + }, + { + "description": "enum with true does not match 1", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "enum": [true] + }, + "tests": [ + { + "description": "true is valid", + "data": true, + "valid": true + }, + { + "description": "integer one is invalid", + "data": 1, + "valid": false + }, + { + "description": "float one is invalid", + "data": 1.0, + "valid": false + } + ] + }, + { + "description": "enum with [true] does not match [1]", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "enum": [[true]] + }, + "tests": [ + { + "description": "[true] is valid", + "data": [true], + "valid": true + }, + { + "description": "[1] is invalid", + "data": [1], + "valid": false + }, + { + "description": "[1.0] is invalid", + "data": [1.0], + "valid": false + } + ] + }, + { + "description": "enum with 0 does not match false", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "enum": [0] + }, + "tests": [ + { + "description": "false is invalid", + "data": false, + "valid": false + }, + { + "description": "integer zero is valid", + "data": 0, + "valid": true + }, + { + "description": "float zero is valid", + "data": 0.0, + "valid": true + } + ] + }, + { + "description": "enum with [0] does not match [false]", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "enum": [[0]] + }, + "tests": [ + { + "description": "[false] is invalid", + "data": [false], + "valid": false + }, + { + "description": "[0] is valid", + "data": [0], + "valid": true + }, + { + "description": "[0.0] is valid", + "data": [0.0], + "valid": true + } + ] + }, + { + "description": "enum with 1 does not match true", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "enum": [1] + }, + "tests": [ + { + "description": "true is invalid", + "data": true, + "valid": false + }, + { + "description": "integer one is valid", + "data": 1, + "valid": true + }, + { + "description": "float one is valid", + "data": 1.0, + "valid": true + } + ] + }, + { + "description": "enum with [1] does not match [true]", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "enum": [[1]] + }, + "tests": [ + { + "description": "[true] is invalid", + "data": [true], + "valid": false + }, + { + "description": "[1] is valid", + "data": [1], + "valid": true + }, + { + "description": "[1.0] is valid", + "data": [1.0], + "valid": true + } + ] + }, + { + "description": "nul characters in strings", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "enum": [ "hello\u0000there" ] + }, + "tests": [ + { + "description": "match string with nul", + "data": "hello\u0000there", + "valid": true + }, + { + "description": "do not match string lacking nul", + "data": "hellothere", + "valid": false + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/type.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/type.json new file mode 100644 index 00000000000..2123c408d9a --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/type.json @@ -0,0 +1,501 @@ +[ + { + "description": "integer type matches integers", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "integer" + }, + "tests": [ + { + "description": "an integer is an integer", + "data": 1, + "valid": true + }, + { + "description": "a float with zero fractional part is an integer", + "data": 1.0, + "valid": true + }, + { + "description": "a float is not an integer", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an integer", + "data": "foo", + "valid": false + }, + { + "description": "a string is still not an integer, even if it looks like one", + "data": "1", + "valid": false + }, + { + "description": "an object is not an integer", + "data": {}, + "valid": false + }, + { + "description": "an array is not an integer", + "data": [], + "valid": false + }, + { + "description": "a boolean is not an integer", + "data": true, + "valid": false + }, + { + "description": "null is not an integer", + "data": null, + "valid": false + } + ] + }, + { + "description": "number type matches numbers", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "number" + }, + "tests": [ + { + "description": "an integer is a number", + "data": 1, + "valid": true + }, + { + "description": "a float with zero fractional part is a number (and an integer)", + "data": 1.0, + "valid": true + }, + { + "description": "a float is a number", + "data": 1.1, + "valid": true + }, + { + "description": "a string is not a number", + "data": "foo", + "valid": false + }, + { + "description": "a string is still not a number, even if it looks like one", + "data": "1", + "valid": false + }, + { + "description": "an object is not a number", + "data": {}, + "valid": false + }, + { + "description": "an array is not a number", + "data": [], + "valid": false + }, + { + "description": "a boolean is not a number", + "data": true, + "valid": false + }, + { + "description": "null is not a number", + "data": null, + "valid": false + } + ] + }, + { + "description": "string type matches strings", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "string" + }, + "tests": [ + { + "description": "1 is not a string", + "data": 1, + "valid": false + }, + { + "description": "a float is not a string", + "data": 1.1, + "valid": false + }, + { + "description": "a string is a string", + "data": "foo", + "valid": true + }, + { + "description": "a string is still a string, even if it looks like a number", + "data": "1", + "valid": true + }, + { + "description": "an empty string is still a string", + "data": "", + "valid": true + }, + { + "description": "an object is not a string", + "data": {}, + "valid": false + }, + { + "description": "an array is not a string", + "data": [], + "valid": false + }, + { + "description": "a boolean is not a string", + "data": true, + "valid": false + }, + { + "description": "null is not a string", + "data": null, + "valid": false + } + ] + }, + { + "description": "object type matches objects", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object" + }, + "tests": [ + { + "description": "an integer is not an object", + "data": 1, + "valid": false + }, + { + "description": "a float is not an object", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an object", + "data": "foo", + "valid": false + }, + { + "description": "an object is an object", + "data": {}, + "valid": true + }, + { + "description": "an array is not an object", + "data": [], + "valid": false + }, + { + "description": "a boolean is not an object", + "data": true, + "valid": false + }, + { + "description": "null is not an object", + "data": null, + "valid": false + } + ] + }, + { + "description": "array type matches arrays", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "array" + }, + "tests": [ + { + "description": "an integer is not an array", + "data": 1, + "valid": false + }, + { + "description": "a float is not an array", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an array", + "data": "foo", + "valid": false + }, + { + "description": "an object is not an array", + "data": {}, + "valid": false + }, + { + "description": "an array is an array", + "data": [], + "valid": true + }, + { + "description": "a boolean is not an array", + "data": true, + "valid": false + }, + { + "description": "null is not an array", + "data": null, + "valid": false + } + ] + }, + { + "description": "boolean type matches booleans", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "boolean" + }, + "tests": [ + { + "description": "an integer is not a boolean", + "data": 1, + "valid": false + }, + { + "description": "zero is not a boolean", + "data": 0, + "valid": false + }, + { + "description": "a float is not a boolean", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not a boolean", + "data": "foo", + "valid": false + }, + { + "description": "an empty string is not a boolean", + "data": "", + "valid": false + }, + { + "description": "an object is not a boolean", + "data": {}, + "valid": false + }, + { + "description": "an array is not a boolean", + "data": [], + "valid": false + }, + { + "description": "true is a boolean", + "data": true, + "valid": true + }, + { + "description": "false is a boolean", + "data": false, + "valid": true + }, + { + "description": "null is not a boolean", + "data": null, + "valid": false + } + ] + }, + { + "description": "null type matches only the null object", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "null" + }, + "tests": [ + { + "description": "an integer is not null", + "data": 1, + "valid": false + }, + { + "description": "a float is not null", + "data": 1.1, + "valid": false + }, + { + "description": "zero is not null", + "data": 0, + "valid": false + }, + { + "description": "a string is not null", + "data": "foo", + "valid": false + }, + { + "description": "an empty string is not null", + "data": "", + "valid": false + }, + { + "description": "an object is not null", + "data": {}, + "valid": false + }, + { + "description": "an array is not null", + "data": [], + "valid": false + }, + { + "description": "true is not null", + "data": true, + "valid": false + }, + { + "description": "false is not null", + "data": false, + "valid": false + }, + { + "description": "null is null", + "data": null, + "valid": true + } + ] + }, + { + "description": "multiple types can be specified in an array", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": ["integer", "string"] + }, + "tests": [ + { + "description": "an integer is valid", + "data": 1, + "valid": true + }, + { + "description": "a string is valid", + "data": "foo", + "valid": true + }, + { + "description": "a float is invalid", + "data": 1.1, + "valid": false + }, + { + "description": "an object is invalid", + "data": {}, + "valid": false + }, + { + "description": "an array is invalid", + "data": [], + "valid": false + }, + { + "description": "a boolean is invalid", + "data": true, + "valid": false + }, + { + "description": "null is invalid", + "data": null, + "valid": false + } + ] + }, + { + "description": "type as array with one item", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": ["string"] + }, + "tests": [ + { + "description": "string is valid", + "data": "foo", + "valid": true + }, + { + "description": "number is invalid", + "data": 123, + "valid": false + } + ] + }, + { + "description": "type: array or object", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": ["array", "object"] + }, + "tests": [ + { + "description": "array is valid", + "data": [1,2,3], + "valid": true + }, + { + "description": "object is valid", + "data": {"foo": 123}, + "valid": true + }, + { + "description": "number is invalid", + "data": 123, + "valid": false + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + }, + { + "description": "null is invalid", + "data": null, + "valid": false + } + ] + }, + { + "description": "type: array, object or null", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": ["array", "object", "null"] + }, + "tests": [ + { + "description": "array is valid", + "data": [1,2,3], + "valid": true + }, + { + "description": "object is valid", + "data": {"foo": 123}, + "valid": true + }, + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "number is invalid", + "data": 123, + "valid": false + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/util.go b/internal/mcp/internal/jsonschema/util.go index 08ba79fd69d..0e7bd083ea0 100644 --- a/internal/mcp/internal/jsonschema/util.go +++ b/internal/mcp/internal/jsonschema/util.go @@ -8,6 +8,7 @@ import ( "bytes" "encoding/json" "fmt" + "math" "math/big" "reflect" ) @@ -160,3 +161,41 @@ func jsonNumber(v reflect.Value) (*big.Rat, bool) { } return r, true } + +// jsonType returns a string describing the type of the JSON value, +// as described in the JSON Schema specification: +// https://json-schema.org/draft/2020-12/draft-bhutton-json-schema-validation-01#section-6.1.1. +// It returns "", false if the value is not valid JSON. +func jsonType(v reflect.Value) (string, bool) { + if !v.IsValid() { + // Not v.IsNil(): a nil []any is still a JSON array. + return "null", true + } + if v.CanInt() || v.CanUint() { + return "integer", true + } + if v.CanFloat() { + if _, f := math.Modf(v.Float()); f == 0 { + return "integer", true + } + return "number", true + } + switch v.Kind() { + case reflect.Bool: + return "boolean", true + case reflect.String: + return "string", true + case reflect.Slice, reflect.Array: + return "array", true + case reflect.Map: + return "object", true + default: + return "", false + } +} + +func assert(cond bool, msg string) { + if !cond { + panic(msg) + } +} diff --git a/internal/mcp/internal/jsonschema/util_test.go b/internal/mcp/internal/jsonschema/util_test.go index 965d6834b58..ee79b3396c9 100644 --- a/internal/mcp/internal/jsonschema/util_test.go +++ b/internal/mcp/internal/jsonschema/util_test.go @@ -4,7 +4,11 @@ package jsonschema -import "testing" +import ( + "encoding/json" + "reflect" + "testing" +) func TestEqual(t *testing.T) { for _, tt := range []struct { @@ -37,3 +41,33 @@ func TestEqual(t *testing.T) { check(tt.x2, tt.x1, tt.want) } } + +func TestJSONType(t *testing.T) { + for _, tt := range []struct { + val string + want string + }{ + {`null`, "null"}, + {`0`, "integer"}, + {`0.0`, "integer"}, + {`1e2`, "integer"}, + {`0.1`, "number"}, + {`""`, "string"}, + {`true`, "boolean"}, + {`[]`, "array"}, + {`{}`, "object"}, + } { + var val any + if err := json.Unmarshal([]byte(tt.val), &val); err != nil { + t.Fatal(err) + } + got, ok := jsonType(reflect.ValueOf(val)) + if !ok { + t.Fatalf("jsonType failed on %q", tt.val) + } + if got != tt.want { + t.Errorf("%s: got %q, want %q", tt.val, got, tt.want) + } + + } +} diff --git a/internal/mcp/internal/jsonschema/validate.go b/internal/mcp/internal/jsonschema/validate.go new file mode 100644 index 00000000000..39f633b922f --- /dev/null +++ b/internal/mcp/internal/jsonschema/validate.go @@ -0,0 +1,122 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package jsonschema + +import ( + "fmt" + "reflect" + "slices" + "strings" +) + +// The value of the "$schema" keyword for the version that we can validate. +const draft202012 = "https://json-schema.org/draft/2020-12/schema" + +// Temporary definition of ResolvedSchema. +// The full definition deals with references between schemas, specifically the $id, $anchor and $ref keywords. +// We'll ignore that for now. +type ResolvedSchema struct { + root *Schema +} + +// Validate validates the instance, which must be a JSON value, against the schema. +// It returns nil if validation is successful or an error if it is not. +func (rs *ResolvedSchema) Validate(instance any) error { + if s := rs.root.Schema; s != "" && s != draft202012 { + return fmt.Errorf("cannot validate version %s, only %s", s, draft202012) + } + st := &state{rs: rs} + return st.validate(reflect.ValueOf(instance), st.rs.root, nil) +} + +// state is the state of single call to ResolvedSchema.Validate. +type state struct { + rs *ResolvedSchema + depth int +} + +// validate validates the reflected value of the instance. +// It keeps track of the path within the instance for better error messages. +func (st *state) validate(instance reflect.Value, schema *Schema, path []any) (err error) { + defer func() { + if err != nil { + if p := formatPath(path); p != "" { + err = fmt.Errorf("%s: %w", p, err) + } + } + }() + + st.depth++ + defer func() { st.depth-- }() + if st.depth >= 100 { + return fmt.Errorf("max recursion depth of %d reached", st.depth) + } + + // Treat the nil schema like the empty schema, as accepting everything. + if schema == nil { + return nil + } + + // Step through interfaces. + if instance.IsValid() && instance.Kind() == reflect.Interface { + instance = instance.Elem() + } + + // type: https://json-schema.org/draft/2020-12/draft-bhutton-json-schema-validation-01#section-6.1.1 + if schema.Type != "" || schema.Types != nil { + gotType, ok := jsonType(instance) + if !ok { + return fmt.Errorf("%v of type %[1]T is not a valid JSON value", instance) + } + if schema.Type != "" { + // "number" subsumes integers + if !(gotType == schema.Type || + gotType == "integer" && schema.Type == "number") { + return fmt.Errorf("type: %s has type %q, want %q", instance, gotType, schema.Type) + } + } else { + if !(slices.Contains(schema.Types, gotType) || (gotType == "integer" && slices.Contains(schema.Types, "number"))) { + return fmt.Errorf("type: %s has type %q, want one of %q", + instance, gotType, strings.Join(schema.Types, ", ")) + } + } + } + // enum: https://json-schema.org/draft/2020-12/draft-bhutton-json-schema-validation-01#section-6.1.2 + if schema.Enum != nil { + ok := false + for _, e := range schema.Enum { + if equalValue(reflect.ValueOf(e), instance) { + ok = true + break + } + } + if !ok { + return fmt.Errorf("enum: %v does not equal any of: %v", instance, schema.Enum) + } + } + + // const: https://json-schema.org/draft/2020-12/draft-bhutton-json-schema-validation-01#section-6.1.3 + if schema.Const != nil { + if !equalValue(reflect.ValueOf(*schema.Const), instance) { + return fmt.Errorf("const: %v does not equal %v", instance, *schema.Const) + } + } + return nil +} + +func formatPath(path []any) string { + var b strings.Builder + for i, p := range path { + if n, ok := p.(int); ok { + fmt.Fprintf(&b, "[%d]", n) + } else { + if i > 0 { + b.WriteByte('.') + } + fmt.Fprintf(&b, "%q", p) + } + } + return b.String() +} diff --git a/internal/mcp/internal/jsonschema/validate_test.go b/internal/mcp/internal/jsonschema/validate_test.go new file mode 100644 index 00000000000..88baec95283 --- /dev/null +++ b/internal/mcp/internal/jsonschema/validate_test.go @@ -0,0 +1,77 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package jsonschema + +import ( + "encoding/json" + "os" + "path/filepath" + "testing" +) + +// The test for validation uses the official test suite, expressed as a set of JSON files. +// Each file is an array of group objects. + +// A testGroup consists of a schema and some tests on it. +type testGroup struct { + Description string + Schema *Schema + Tests []test +} + +// A test consists of a JSON instance to be validated and the expected result. +type test struct { + Description string + Data any + Valid bool +} + +func TestValidate(t *testing.T) { + files, err := filepath.Glob(filepath.FromSlash("testdata/draft2020-12/*.json")) + if err != nil { + t.Fatal(err) + } + if len(files) == 0 { + t.Fatal("no files") + } + for _, file := range files { + base := filepath.Base(file) + t.Run(base, func(t *testing.T) { + f, err := os.Open(file) + if err != nil { + t.Fatal(err) + } + defer f.Close() + dec := json.NewDecoder(f) + var groups []testGroup + if err := dec.Decode(&groups); err != nil { + t.Fatal(err) + } + for _, g := range groups { + if g.Schema.Properties != nil { + t.Skip("schema has properties") + } + rs := &ResolvedSchema{root: g.Schema} + t.Run(g.Description, func(t *testing.T) { + for _, test := range g.Tests { + t.Run(test.Description, func(t *testing.T) { + err = rs.Validate(test.Data) + if err != nil && test.Valid { + t.Errorf("wanted success, but failed with: %v", err) + } + if err == nil && !test.Valid { + t.Error("succeeded but wanted failure") + } + if t.Failed() { + t.Errorf("schema: %s", g.Schema.json()) + t.Fatalf("instance: %v", test.Data) + } + }) + } + }) + } + }) + } +} From ffe579ab576905eda2d49341935157110974d150 Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Fri, 25 Apr 2025 14:30:41 +0000 Subject: [PATCH 245/270] go/packages: remove unexported fields from packages.Config These fields are no longer used in x/tools, and their documentation indicates that they are only for use in x/tools, so we should remove them. Change-Id: Iad42363ff38276da06440e13da5c3d66ebc29dee Reviewed-on: https://go-review.googlesource.com/c/tools/+/668255 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- go/packages/golist.go | 2 -- go/packages/packages.go | 14 -------------- internal/packagesinternal/packages.go | 3 --- 3 files changed, 19 deletions(-) diff --git a/go/packages/golist.go b/go/packages/golist.go index 0458b4f9c43..96e43cd8093 100644 --- a/go/packages/golist.go +++ b/go/packages/golist.go @@ -851,8 +851,6 @@ func (state *golistState) cfgInvocation() gocommand.Invocation { cfg := state.cfg return gocommand.Invocation{ BuildFlags: cfg.BuildFlags, - ModFile: cfg.modFile, - ModFlag: cfg.modFlag, CleanEnv: cfg.Env != nil, Env: cfg.Env, Logf: cfg.Logf, diff --git a/go/packages/packages.go b/go/packages/packages.go index 6665a04c173..060ab08efbc 100644 --- a/go/packages/packages.go +++ b/go/packages/packages.go @@ -229,14 +229,6 @@ type Config struct { // consistent package metadata about unsaved files. However, // drivers may vary in their level of support for overlays. Overlay map[string][]byte - - // -- Hidden configuration fields only for use in x/tools -- - - // modFile will be used for -modfile in go command invocations. - modFile string - - // modFlag will be used for -modfile in go command invocations. - modFlag string } // Load loads and returns the Go packages named by the given patterns. @@ -569,12 +561,6 @@ func init() { packagesinternal.GetDepsErrors = func(p any) []*packagesinternal.PackageError { return p.(*Package).depsErrors } - packagesinternal.SetModFile = func(config any, value string) { - config.(*Config).modFile = value - } - packagesinternal.SetModFlag = func(config any, value string) { - config.(*Config).modFlag = value - } packagesinternal.TypecheckCgo = int(typecheckCgo) packagesinternal.DepsErrors = int(needInternalDepsErrors) } diff --git a/internal/packagesinternal/packages.go b/internal/packagesinternal/packages.go index 25ebab663ba..73eefa2a7d0 100644 --- a/internal/packagesinternal/packages.go +++ b/internal/packagesinternal/packages.go @@ -15,6 +15,3 @@ type PackageError struct { var TypecheckCgo int var DepsErrors int // must be set as a LoadMode to call GetDepsErrors - -var SetModFlag = func(config any, value string) {} -var SetModFile = func(config any, value string) {} From 26f40ac5bcfd7ab5c4f016a6c4f912e48fa5d48c Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Fri, 25 Apr 2025 20:44:39 +0000 Subject: [PATCH 246/270] internal/mcp: clean up SSE server shutdown. Add a bit more theory around SSE transport shutdown, and some documentation of the SSE transport protocol itself. Additionally, remove a TODO to avoid the use of channel buffering for message queues in http transports; instead, always write to message buffers in a select statement containing a read from the done channel. The bounded size of message buffers then serves to create backpressure for an overloaded server. Change-Id: I5701c5b95a5ae73d796a634e434f41dfcdbddf51 Reviewed-on: https://go-review.googlesource.com/c/tools/+/668237 Reviewed-by: Jonathan Amsterdam LUCI-TryBot-Result: Go LUCI Auto-Submit: Robert Findley --- internal/mcp/sse.go | 170 ++++++++++++++++++++++++++++---------------- 1 file changed, 108 insertions(+), 62 deletions(-) diff --git a/internal/mcp/sse.go b/internal/mcp/sse.go index b632594151a..1988d34dc33 100644 --- a/internal/mcp/sse.go +++ b/internal/mcp/sse.go @@ -20,15 +20,33 @@ import ( ) // This file implements support for SSE transport server and client. +// https://modelcontextprotocol.io/specification/2024-11-05/basic/transports +// +// The transport is simple, at least relative to the new streamable transport +// introduced in the 2025-03-26 version of the spec. In short: +// +// 1. Sessions are initiated via a hanging GET request, which streams +// server->client messages as SSE 'message' events. +// 2. The first event in the SSE stream must be an 'endpoint' event that +// informs the client of the session endpoint. +// 3. The client POSTs client->server messages to the session endpoint. +// +// Therefore, the each new GET request hands off its responsewriter to an +// [sseSession] type that abstracts the transport as follows: +// - Write writes a new event to the responseWriter, or fails if the GET has +// exited. +// - Read reads off a message queue that is pushed to via POST requests. +// - Close causes the hanging GEt to exit. // // TODO: -// - avoid the use of channels as listenable queues. // - support resuming broken streamable sessions // - support GET channels for unrelated notifications in streamable sessions // - add client support (and use it to test) // - properly correlate notifications/requests to an incoming request (using // requestCtx) +// TODO(rfindley): reorganize this file, and split it into sse_server.go and sse_client.go. + // An event is a server-sent event. type event struct { name string @@ -49,8 +67,9 @@ func writeEvent(w io.Writer, evt event) (int, error) { return n, err } -// SSEHandler is an http.Handler that serves streamable MCP sessions as -// defined by version 2024-11-05 of the MCP spec: +// SSEHandler is an http.Handler that serves SSE-based MCP sessions as defined by +// the 2024-11-05 version of the MCP protocol: +// // https://modelcontextprotocol.io/specification/2024-11-05/basic/transports type SSEHandler struct { getServer func() *Server @@ -71,16 +90,23 @@ func NewSSEHandler(getServer func() *Server) *SSEHandler { } } -// A sseSession abstracts a session initiated through the sse endpoint. -// -// It implements the Transport interface. +// A sseSession is a logical jsonrpc2 stream implementing the server side of +// MCP SSE transport, initiated through the hanging GET +// - Writes are SSE 'message' events to the GET response body. +// - Reads are received from POSTs to the session endpoing, mediated through a +// buffered channel. +// - Close terminates the hanging GET. type sseSession struct { - incoming chan jsonrpc2.Message + incoming chan jsonrpc2.Message // queue of incoming messages; never closed + // We must guard both pushes to the incoming queue and writes to the response + // writer, because incoming POST requests are abitrarily concurrent and we + // need to ensure we don't write push to the queue, or write to the + // ResponseWriter, after the session GET request exits. mu sync.Mutex - w io.Writer // the hanging response body - isDone bool // set when the stream is closed - done chan struct{} // closed when the stream is closed + w http.ResponseWriter // the hanging response body + closed bool // set when the stream is closed + done chan struct{} // closed when the stream is closed } // connect returns the receiver, as an sseSession is a logical stream. @@ -122,8 +148,12 @@ func (h *SSEHandler) ServeHTTP(w http.ResponseWriter, req *http.Request) { http.Error(w, "failed to parse body", http.StatusBadRequest) return } - session.incoming <- msg - w.WriteHeader(http.StatusAccepted) + select { + case session.incoming <- msg: + w.WriteHeader(http.StatusAccepted) + case <-session.done: + http.Error(w, "session closed", http.StatusBadRequest) + } return } @@ -142,21 +172,12 @@ func (h *SSEHandler) ServeHTTP(w http.ResponseWriter, req *http.Request) { w.Header().Set("Connection", "keep-alive") sessionID = randText() - h.mu.Lock() session := &sseSession{ w: w, - incoming: make(chan jsonrpc2.Message, 1000), + incoming: make(chan jsonrpc2.Message, 100), done: make(chan struct{}), } - h.sessions[sessionID] = session - h.mu.Unlock() - - // The session is terminated when the request exits. - defer func() { - h.mu.Lock() - delete(h.sessions, sessionID) - h.mu.Unlock() - }() + defer session.Close() server := h.getServer() cc, err := server.Connect(req.Context(), session, nil) @@ -167,7 +188,17 @@ func (h *SSEHandler) ServeHTTP(w http.ResponseWriter, req *http.Request) { if h.onClient != nil { h.onClient(cc) } - defer cc.Close() + defer cc.Close() // close the transport when the GET exits + + // The session is terminated when the request exits. + h.mu.Lock() + h.sessions[sessionID] = session + h.mu.Unlock() + defer func() { + h.mu.Lock() + delete(h.sessions, sessionID) + h.mu.Unlock() + }() endpoint, err := req.URL.Parse("?sessionid=" + sessionID) if err != nil { @@ -194,10 +225,9 @@ func (h *SSEHandler) ServeHTTP(w http.ResponseWriter, req *http.Request) { // Read implements jsonrpc2.Reader. func (s *sseSession) Read(ctx context.Context) (jsonrpc2.Message, int64, error) { select { + case <-ctx.Done(): + return nil, 0, ctx.Err() case msg := <-s.incoming: - if msg == nil { - return nil, 0, io.EOF - } return msg, 0, nil case <-s.done: return nil, 0, io.EOF @@ -206,6 +236,10 @@ func (s *sseSession) Read(ctx context.Context) (jsonrpc2.Message, int64, error) // Write implements jsonrpc2.Writer. func (s *sseSession) Write(ctx context.Context, msg jsonrpc2.Message) (int64, error) { + if ctx.Err() != nil { + return 0, ctx.Err() + } + data, err := jsonrpc2.EncodeMessage(msg) if err != nil { return 0, err @@ -214,7 +248,10 @@ func (s *sseSession) Write(ctx context.Context, msg jsonrpc2.Message) (int64, er s.mu.Lock() defer s.mu.Unlock() - if s.isDone { + // Note that it is invalid to write to a ResponseWriter after ServeHTTP has + // exited, and so we must lock around this write and check isDone, which is + // set before the hanging GET exits. + if s.closed { return 0, io.EOF } @@ -222,12 +259,16 @@ func (s *sseSession) Write(ctx context.Context, msg jsonrpc2.Message) (int64, er return int64(n), err } -// Close implements io.Closer. +// Close implements io.Closer, and closes the session. +// +// It must be safe to call Close more than once, as the close may +// asynchronously be initiated by either the server closing its connection, or +// by the hanging GET exiting. func (s *sseSession) Close() error { s.mu.Lock() defer s.mu.Unlock() - if !s.isDone { - s.isDone = true + if !s.closed { + s.closed = true close(s.done) } return nil @@ -236,6 +277,8 @@ func (s *sseSession) Close() error { // An SSEClientTransport is a [Transport] that can communicate with an MCP // endpoint serving the SSE transport defined by the 2024-11-05 version of the // spec. +// +// https://modelcontextprotocol.io/specification/2024-11-05/basic/transports type SSEClientTransport struct { sseEndpoint *url.URL } @@ -252,7 +295,7 @@ func NewSSEClientTransport(rawURL string) (*SSEClientTransport, error) { }, nil } -// connect connects to the client endpoint. +// connect connects through the client endpoint. func (c *SSEClientTransport) connect(ctx context.Context) (stream, error) { req, err := http.NewRequestWithContext(ctx, "GET", c.sseEndpoint.String(), nil) if err != nil { @@ -337,19 +380,17 @@ func (c *SSEClientTransport) connect(ctx context.Context) (stream, error) { } go func() { + defer s.Close() // close the transport when the GET exits + for { evt, err := nextEvent() if err != nil { - close(s.incoming) return } - if evt.name == "message" { - select { - case s.incoming <- evt.data: - case <-s.done: - close(s.incoming) - return - } + select { + case s.incoming <- evt.data: + case <-s.done: + return } } }() @@ -357,25 +398,38 @@ func (c *SSEClientTransport) connect(ctx context.Context) (stream, error) { return s, nil } +// An sseClientStream is a logical jsonrpc2 stream that implements the client +// half of the SSE protocol: +// - Writes are POSTS to the sesion endpoint. +// - Reads are SSE 'message' events, and pushes them onto a buffered channel. +// - Close terminates the GET request. type sseClientStream struct { - sseEndpoint *url.URL - msgEndpoint *url.URL + sseEndpoint *url.URL // SSE endpoint for the GET + msgEndpoint *url.URL // session endpoint for POSTs + incoming chan []byte // queue of incoming messages - incoming chan []byte + mu sync.Mutex + body io.ReadCloser // body of the hanging GET + closed bool // set when the stream is closed + done chan struct{} // closed when the stream is closed +} - mu sync.Mutex - body io.ReadCloser - isDone bool - done chan struct{} - closeErr error +func (c *sseClientStream) isDone() bool { + c.mu.Lock() + defer c.mu.Unlock() + return c.closed } func (c *sseClientStream) Read(ctx context.Context) (jsonrpc2.Message, int64, error) { select { case <-ctx.Done(): return nil, 0, ctx.Err() + + case <-c.done: + return nil, 0, io.EOF + case data := <-c.incoming: - if data == nil { + if c.isDone() { return nil, 0, io.EOF } msg, err := jsonrpc2.DecodeMessage(data) @@ -383,11 +437,6 @@ func (c *sseClientStream) Read(ctx context.Context) (jsonrpc2.Message, int64, er return nil, 0, err } return msg, int64(len(data)), nil - case <-c.done: - if c.closeErr != nil { - return nil, 0, c.closeErr - } - return nil, 0, io.EOF } } @@ -396,10 +445,7 @@ func (c *sseClientStream) Write(ctx context.Context, msg jsonrpc2.Message) (int6 if err != nil { return 0, err } - c.mu.Lock() - done := c.isDone - c.mu.Unlock() - if done { + if c.isDone() { return 0, io.EOF } req, err := http.NewRequestWithContext(ctx, "POST", c.msgEndpoint.String(), bytes.NewReader(data)) @@ -421,10 +467,10 @@ func (c *sseClientStream) Write(ctx context.Context, msg jsonrpc2.Message) (int6 func (c *sseClientStream) Close() error { c.mu.Lock() defer c.mu.Unlock() - if !c.isDone { - c.isDone = true - c.closeErr = c.body.Close() + if !c.closed { + c.closed = true + _ = c.body.Close() close(c.done) } - return c.closeErr + return nil } From e4c83cc9c20873834e19b1f9a104436bf3adc103 Mon Sep 17 00:00:00 2001 From: Hongxiang Jiang Date: Mon, 28 Apr 2025 10:49:40 -0400 Subject: [PATCH 247/270] internal/mcp: dynamically determine the mcp server based on URL Based on my understanding, the getServer function is used to determine which MCP server should be serving the session. User might want to dynamically determine which MCP server to serve when a new request comes in. In mcp/examples, two MCP servers are created serving at the same port but under different URL "/greeter1", "/greeter2". The MCP client will reach out to different URLs for different MCP servers. As a TODO, if the getServer() returns nil, the program will crash when handing MCP request. Change-Id: I4431266666e9ea28118e827a94eb1359b1fb3a81 Reviewed-on: https://go-review.googlesource.com/c/tools/+/668535 Reviewed-by: Robert Findley Auto-Submit: Hongxiang Jiang LUCI-TryBot-Result: Go LUCI --- internal/mcp/examples/hello/main.go | 2 +- internal/mcp/examples/sse/main.go | 55 +++++++++++++++++++++++++++++ internal/mcp/sse.go | 9 ++--- internal/mcp/sse_test.go | 3 +- 4 files changed, 63 insertions(+), 6 deletions(-) create mode 100644 internal/mcp/examples/sse/main.go diff --git a/internal/mcp/examples/hello/main.go b/internal/mcp/examples/hello/main.go index 5254d658710..d9b9967ff2c 100644 --- a/internal/mcp/examples/hello/main.go +++ b/internal/mcp/examples/hello/main.go @@ -33,7 +33,7 @@ func main() { server.AddTools(mcp.MakeTool("greet", "say hi", SayHi)) if *httpAddr != "" { - handler := mcp.NewSSEHandler(func() *mcp.Server { + handler := mcp.NewSSEHandler(func(*http.Request) *mcp.Server { return server }) http.ListenAndServe(*httpAddr, handler) diff --git a/internal/mcp/examples/sse/main.go b/internal/mcp/examples/sse/main.go new file mode 100644 index 00000000000..c95fd34c746 --- /dev/null +++ b/internal/mcp/examples/sse/main.go @@ -0,0 +1,55 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package main + +import ( + "context" + "flag" + "log" + "net/http" + + "golang.org/x/tools/internal/mcp" +) + +var httpAddr = flag.String("http", "", "use SSE HTTP at this address") + +type SayHiParams struct { + Name string `json:"name" mcp:"the name to say hi to"` +} + +func SayHi(ctx context.Context, cc *mcp.ClientConnection, params *SayHiParams) ([]mcp.Content, error) { + return []mcp.Content{ + mcp.TextContent{Text: "Hi " + params.Name}, + }, nil +} + +func main() { + flag.Parse() + + if httpAddr == nil || *httpAddr == "" { + log.Fatal("http address not set") + } + + server1 := mcp.NewServer("greeter1", "v0.0.1", nil) + server1.AddTools(mcp.MakeTool("greet1", "say hi", SayHi)) + + server2 := mcp.NewServer("greeter2", "v0.0.1", nil) + server2.AddTools(mcp.MakeTool("greet2", "say hello", SayHi)) + + log.Printf("MCP servers serving at %s\n", *httpAddr) + handler := mcp.NewSSEHandler(func(request *http.Request) *mcp.Server { + url := request.URL.Path + log.Printf("Handling request for URL %s\n", url) + switch url { + case "/greeter1": + return server1 + case "/greeter2": + return server2 + default: + return nil + } + }) + http.ListenAndServe(*httpAddr, handler) +} diff --git a/internal/mcp/sse.go b/internal/mcp/sse.go index 1988d34dc33..ca83d22a517 100644 --- a/internal/mcp/sse.go +++ b/internal/mcp/sse.go @@ -72,7 +72,7 @@ func writeEvent(w io.Writer, evt event) (int, error) { // // https://modelcontextprotocol.io/specification/2024-11-05/basic/transports type SSEHandler struct { - getServer func() *Server + getServer func(request *http.Request) *Server onClient func(*ClientConnection) // for testing; must not block mu sync.Mutex @@ -81,9 +81,9 @@ type SSEHandler struct { // NewSSEHandler returns a new [SSEHandler] that is ready to serve HTTP. // -// The getServer function is used to bind create servers for new sessions. It +// The getServer function is used to bind created servers for new sessions. It // is OK for getServer to return the same server multiple times. -func NewSSEHandler(getServer func() *Server) *SSEHandler { +func NewSSEHandler(getServer func(request *http.Request) *Server) *SSEHandler { return &SSEHandler{ getServer: getServer, sessions: make(map[string]*sseSession), @@ -179,7 +179,8 @@ func (h *SSEHandler) ServeHTTP(w http.ResponseWriter, req *http.Request) { } defer session.Close() - server := h.getServer() + // TODO(hxjiang): getServer returns nil will panic. + server := h.getServer(req) cc, err := server.Connect(req.Context(), session, nil) if err != nil { http.Error(w, "connection failed", http.StatusInternalServerError) diff --git a/internal/mcp/sse_test.go b/internal/mcp/sse_test.go index a12825a1301..f1ae5e40725 100644 --- a/internal/mcp/sse_test.go +++ b/internal/mcp/sse_test.go @@ -7,6 +7,7 @@ package mcp import ( "context" "fmt" + "net/http" "net/http/httptest" "testing" @@ -20,7 +21,7 @@ func TestSSEServer(t *testing.T) { server := NewServer("testServer", "v1.0.0", nil) server.AddTools(MakeTool("greet", "say hi", sayHi)) - sseHandler := NewSSEHandler(func() *Server { return server }) + sseHandler := NewSSEHandler(func(*http.Request) *Server { return server }) clients := make(chan *ClientConnection, 1) sseHandler.onClient = func(cc *ClientConnection) { From 7261efd2918b3625fa81e68b97fff6e9ade2907a Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Mon, 28 Apr 2025 20:10:20 +0000 Subject: [PATCH 248/270] internal/mcp: export the Transport interface; add a Server example Export the Transport interface, as there's no reason users shouldn't be able to define their own transports: they are just bidirectional streams. Also, add a godoc example for basic Server usage. Finally, remove some TODOs that existed due to a bad merge: they are related to the (as yet unsubmitted) streamable HTTP transport. Change-Id: Id669793fc80630e667d50611fe2876d09125a025 Reviewed-on: https://go-review.googlesource.com/c/tools/+/668675 TryBot-Bypass: Robert Findley Reviewed-by: Sam Thanawalla Auto-Submit: Robert Findley --- internal/mcp/example_test.go | 53 ++++++++++++++++++++++++++++++++++++ internal/mcp/sse.go | 17 +++--------- internal/mcp/transport.go | 21 ++++++-------- 3 files changed, 66 insertions(+), 25 deletions(-) create mode 100644 internal/mcp/example_test.go diff --git a/internal/mcp/example_test.go b/internal/mcp/example_test.go new file mode 100644 index 00000000000..679cbe0c144 --- /dev/null +++ b/internal/mcp/example_test.go @@ -0,0 +1,53 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mcp_test + +import ( + "context" + "fmt" + "log" + + "golang.org/x/tools/internal/mcp" +) + +type SayHiParams struct { + Name string `json:"name" mcp:"the name to say hi to"` +} + +func SayHi(ctx context.Context, cc *mcp.ClientConnection, params *SayHiParams) ([]mcp.Content, error) { + return []mcp.Content{ + mcp.TextContent{Text: "Hi " + params.Name}, + }, nil +} + +func ExampleServer() { + ctx := context.Background() + clientTransport, serverTransport := mcp.NewLocalTransport() + + server := mcp.NewServer("greeter", "v0.0.1", nil) + server.AddTools(mcp.MakeTool("greet", "say hi", SayHi)) + + clientConnection, err := server.Connect(ctx, serverTransport, nil) + if err != nil { + log.Fatal(err) + } + + client := mcp.NewClient("client", "v0.0.1", nil) + serverConnection, err := client.Connect(ctx, clientTransport, nil) + if err != nil { + log.Fatal(err) + } + + content, err := serverConnection.CallTool(ctx, "greet", SayHiParams{Name: "user"}) + if err != nil { + log.Fatal(err) + } + fmt.Println(content[0].(mcp.TextContent).Text) + + serverConnection.Close() + clientConnection.Wait() + + // Output: Hi user +} diff --git a/internal/mcp/sse.go b/internal/mcp/sse.go index ca83d22a517..da364e161b3 100644 --- a/internal/mcp/sse.go +++ b/internal/mcp/sse.go @@ -37,15 +37,6 @@ import ( // exited. // - Read reads off a message queue that is pushed to via POST requests. // - Close causes the hanging GEt to exit. -// -// TODO: -// - support resuming broken streamable sessions -// - support GET channels for unrelated notifications in streamable sessions -// - add client support (and use it to test) -// - properly correlate notifications/requests to an incoming request (using -// requestCtx) - -// TODO(rfindley): reorganize this file, and split it into sse_server.go and sse_client.go. // An event is a server-sent event. type event struct { @@ -109,8 +100,8 @@ type sseSession struct { done chan struct{} // closed when the stream is closed } -// connect returns the receiver, as an sseSession is a logical stream. -func (s *sseSession) connect(context.Context) (stream, error) { +// Connect returns the receiver, as an sseSession is a logical stream. +func (s *sseSession) Connect(context.Context) (Stream, error) { return s, nil } @@ -296,8 +287,8 @@ func NewSSEClientTransport(rawURL string) (*SSEClientTransport, error) { }, nil } -// connect connects through the client endpoint. -func (c *SSEClientTransport) connect(ctx context.Context) (stream, error) { +// Connect connects through the client endpoint. +func (c *SSEClientTransport) Connect(ctx context.Context) (Stream, error) { req, err := http.NewRequestWithContext(ctx, "GET", c.sseEndpoint.String(), nil) if err != nil { return nil, err diff --git a/internal/mcp/transport.go b/internal/mcp/transport.go index 416fc5300c3..841cc099c10 100644 --- a/internal/mcp/transport.go +++ b/internal/mcp/transport.go @@ -19,9 +19,6 @@ import ( "golang.org/x/tools/internal/xcontext" ) -// A JSONRPC2 error is an error defined by the JSONRPC2 spec. -type JSONRPC2Error = jsonrpc2.WireError - // ErrConnectionClosed is returned when sending a message to a connection that // is closed or in the process of closing. var ErrConnectionClosed = errors.New("connection closed") @@ -32,15 +29,15 @@ var ErrConnectionClosed = errors.New("connection closed") // Transports should be used for at most one call to [Server.Connect] or // [Client.Connect]. type Transport interface { - // connect returns the logical stream. + // Connect returns the logical stream. // - // It is called exactly once by [connect]. - connect(ctx context.Context) (stream, error) + // It is called exactly once by [Connect]. + Connect(ctx context.Context) (Stream, error) } -// A stream is an abstract bidirectional jsonrpc2 stream. +// A Stream is an abstract bidirectional jsonrpc2 Stream. // It is used by [connect] to establish a [jsonrpc2.Connection]. -type stream interface { +type Stream interface { jsonrpc2.Reader jsonrpc2.Writer io.Closer @@ -59,7 +56,7 @@ type IOTransport struct { rwc io.ReadWriteCloser } -func (t *IOTransport) connect(context.Context) (stream, error) { +func (t *IOTransport) Connect(context.Context) (Stream, error) { return newIOStream(t.rwc), nil } @@ -94,7 +91,7 @@ func connect[H handler](ctx context.Context, t Transport, opts *ConnectionOption } var zero H - stream, err := t.connect(ctx) + stream, err := t.Connect(ctx) if err != nil { return zero, err } @@ -275,8 +272,8 @@ func newIOStream(rwc io.ReadWriteCloser) *ioStream { } } -// connect returns the receiver, as a streamTransport is a logical stream. -func (t *ioStream) connect(ctx context.Context) (stream, error) { +// Connect returns the receiver, as a streamTransport is a logical stream. +func (t *ioStream) Connect(ctx context.Context) (Stream, error) { return t, nil } From 6a96d1d1cba8645be588e49ebb6e2aa3d54f2542 Mon Sep 17 00:00:00 2001 From: Shashank Priyadarshi Date: Mon, 21 Apr 2025 12:37:22 +0530 Subject: [PATCH 249/270] cmd/auth: carve out module for deprecated command `auth` For #70872 Change-Id: I20e7b1e871edc9efbc4e5de227d1d688b989500d Reviewed-on: https://go-review.googlesource.com/c/tools/+/666975 Reviewed-by: Sam Thanawalla Reviewed-by: Dmitri Shuralyov LUCI-TryBot-Result: Go LUCI --- cmd/auth/go.mod | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 cmd/auth/go.mod diff --git a/cmd/auth/go.mod b/cmd/auth/go.mod new file mode 100644 index 00000000000..ea912ce7743 --- /dev/null +++ b/cmd/auth/go.mod @@ -0,0 +1,3 @@ +module golang.org/x/tools/cmd/auth + +go 1.23.0 From 68cf9bee450fdb3ebdfc75ed4233567bc8748b44 Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Thu, 17 Apr 2025 13:10:25 +0800 Subject: [PATCH 250/270] gopls/internal/golang/completion: tostring, tobytes postfix snippets Updates: golang/go#73367 Change-Id: I0fad8ddd9c245eb4ab8d7809b16a2e982126aecd Reviewed-on: https://go-review.googlesource.com/c/tools/+/666195 Reviewed-by: Carlos Amedee Reviewed-by: Peter Weinberger LUCI-TryBot-Result: Go LUCI --- .../golang/completion/postfix_snippets.go | 24 ++++++++++++++++++- .../marker/testdata/completion/postfix.txt | 18 ++++++++++++++ 2 files changed, 41 insertions(+), 1 deletion(-) diff --git a/gopls/internal/golang/completion/postfix_snippets.go b/gopls/internal/golang/completion/postfix_snippets.go index 1bafe848490..1d306e3518d 100644 --- a/gopls/internal/golang/completion/postfix_snippets.go +++ b/gopls/internal/golang/completion/postfix_snippets.go @@ -334,7 +334,29 @@ if {{$errName | $a.SpecifiedPlaceholder 1}} != nil { {{end}} } {{end}}`, -}} +}, + { + label: "tostring", + details: "[]byte to string", + body: `{{if (eq (.TypeName .Type) "[]byte") -}} + string({{.X}}) + {{- end}}`, + }, + { + label: "tostring", + details: "int to string", + body: `{{if (eq (.TypeName .Type) "int") -}} + {{.Import "strconv"}}.Itoa({{.X}}) + {{- end}}`, + }, + { + label: "tobytes", + details: "string to []byte", + body: `{{if (eq (.TypeName .Type) "string") -}} + []byte({{.X}}) + {{- end}}`, + }, +} // Cursor indicates where the client's cursor should end up after the // snippet is done. diff --git a/gopls/internal/test/marker/testdata/completion/postfix.txt b/gopls/internal/test/marker/testdata/completion/postfix.txt index 9b54b578f4c..fc8c98a9dd3 100644 --- a/gopls/internal/test/marker/testdata/completion/postfix.txt +++ b/gopls/internal/test/marker/testdata/completion/postfix.txt @@ -129,3 +129,21 @@ func _() (string, T, map[string]string, error) { bar().iferr //@snippet(" //", postfixIfErr, "if err := bar(); err != nil {\n\treturn \"\", T{}, nil, ${1:}\n}\n") bar().variferr //@snippet(" //", postfixVarIfErr, "${1:} := bar()\nif ${1:} != nil {\n\treturn \"\", T{}, nil, ${2:}\n}\n") } + +func _(){ + /* tostring! */ //@item(postfixToString, "tostring!", "[]byte to string", "snippet") + var bs []byte + bs. //@complete(" //", postfixAppend, postfixCopy, postfixFor, postfixForr, postfixIfNotNil, postfixLast, postfixLen, postfixPrint, postfixRange, postfixReverse, postfixSort, postfixToString, postfixVar) + bs = nil + + /* tobytes! */ //@item(postfixToBytes, "tobytes!", "string to []byte", "snippet") + /* split! */ //@item(postfixSplit, "split!", "split string", "snippet") + var s string + s. //@complete(" //", postfixPrint, postfixSplit, postfixToBytes, postfixVar) + s = "" + + /* tostring! */ //@item(postfixIntToString, "tostring!", "int to string", "snippet") + var i int + i. //@complete(" //", postfixPrint, postfixIntToString, postfixVar) + i = 0 +} From 9e366f589d3c4143dc3cc554e7dae6e57bb2f800 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Thu, 24 Apr 2025 19:18:21 -0400 Subject: [PATCH 251/270] jsonschema: validate numbers and strings Add validation for keywords related to numbers and strings. It may be surprising that keywords like "minimum" or "minLength" don't imply that the instance is a number or string. But that is how JSON Schema is designed. Change-Id: I8805a5fe91ee345931e1c989f174ea2d0be6665a Reviewed-on: https://go-review.googlesource.com/c/tools/+/667975 Reviewed-by: Robert Findley LUCI-TryBot-Result: Go LUCI Auto-Submit: Jonathan Amsterdam --- .../draft2020-12/exclusiveMaximum.json | 31 ++++++ .../draft2020-12/exclusiveMinimum.json | 31 ++++++ .../testdata/draft2020-12/maxLength.json | 55 +++++++++++ .../testdata/draft2020-12/maximum.json | 60 ++++++++++++ .../testdata/draft2020-12/minLength.json | 55 +++++++++++ .../testdata/draft2020-12/minimum.json | 75 ++++++++++++++ .../testdata/draft2020-12/multipleOf.json | 97 +++++++++++++++++++ .../testdata/draft2020-12/pattern.json | 65 +++++++++++++ internal/mcp/internal/jsonschema/validate.go | 63 ++++++++++++ 9 files changed, 532 insertions(+) create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/exclusiveMaximum.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/exclusiveMinimum.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/maxLength.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/maximum.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/minLength.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/minimum.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/multipleOf.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/pattern.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/exclusiveMaximum.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/exclusiveMaximum.json new file mode 100644 index 00000000000..05db23351be --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/exclusiveMaximum.json @@ -0,0 +1,31 @@ +[ + { + "description": "exclusiveMaximum validation", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "exclusiveMaximum": 3.0 + }, + "tests": [ + { + "description": "below the exclusiveMaximum is valid", + "data": 2.2, + "valid": true + }, + { + "description": "boundary point is invalid", + "data": 3.0, + "valid": false + }, + { + "description": "above the exclusiveMaximum is invalid", + "data": 3.5, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/exclusiveMinimum.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/exclusiveMinimum.json new file mode 100644 index 00000000000..00af9d7ff5d --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/exclusiveMinimum.json @@ -0,0 +1,31 @@ +[ + { + "description": "exclusiveMinimum validation", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "exclusiveMinimum": 1.1 + }, + "tests": [ + { + "description": "above the exclusiveMinimum is valid", + "data": 1.2, + "valid": true + }, + { + "description": "boundary point is invalid", + "data": 1.1, + "valid": false + }, + { + "description": "below the exclusiveMinimum is invalid", + "data": 0.6, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/maxLength.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/maxLength.json new file mode 100644 index 00000000000..7462726d760 --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/maxLength.json @@ -0,0 +1,55 @@ +[ + { + "description": "maxLength validation", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "maxLength": 2 + }, + "tests": [ + { + "description": "shorter is valid", + "data": "f", + "valid": true + }, + { + "description": "exact length is valid", + "data": "fo", + "valid": true + }, + { + "description": "too long is invalid", + "data": "foo", + "valid": false + }, + { + "description": "ignores non-strings", + "data": 100, + "valid": true + }, + { + "description": "two graphemes is long enough", + "data": "\uD83D\uDCA9\uD83D\uDCA9", + "valid": true + } + ] + }, + { + "description": "maxLength validation with a decimal", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "maxLength": 2.0 + }, + "tests": [ + { + "description": "shorter is valid", + "data": "f", + "valid": true + }, + { + "description": "too long is invalid", + "data": "foo", + "valid": false + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/maximum.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/maximum.json new file mode 100644 index 00000000000..b99a541ea2e --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/maximum.json @@ -0,0 +1,60 @@ +[ + { + "description": "maximum validation", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "maximum": 3.0 + }, + "tests": [ + { + "description": "below the maximum is valid", + "data": 2.6, + "valid": true + }, + { + "description": "boundary point is valid", + "data": 3.0, + "valid": true + }, + { + "description": "above the maximum is invalid", + "data": 3.5, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + }, + { + "description": "maximum validation with unsigned integer", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "maximum": 300 + }, + "tests": [ + { + "description": "below the maximum is invalid", + "data": 299.97, + "valid": true + }, + { + "description": "boundary point integer is valid", + "data": 300, + "valid": true + }, + { + "description": "boundary point float is valid", + "data": 300.00, + "valid": true + }, + { + "description": "above the maximum is invalid", + "data": 300.5, + "valid": false + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/minLength.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/minLength.json new file mode 100644 index 00000000000..5076c5a928f --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/minLength.json @@ -0,0 +1,55 @@ +[ + { + "description": "minLength validation", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "minLength": 2 + }, + "tests": [ + { + "description": "longer is valid", + "data": "foo", + "valid": true + }, + { + "description": "exact length is valid", + "data": "fo", + "valid": true + }, + { + "description": "too short is invalid", + "data": "f", + "valid": false + }, + { + "description": "ignores non-strings", + "data": 1, + "valid": true + }, + { + "description": "one grapheme is not long enough", + "data": "\uD83D\uDCA9", + "valid": false + } + ] + }, + { + "description": "minLength validation with a decimal", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "minLength": 2.0 + }, + "tests": [ + { + "description": "longer is valid", + "data": "foo", + "valid": true + }, + { + "description": "too short is invalid", + "data": "f", + "valid": false + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/minimum.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/minimum.json new file mode 100644 index 00000000000..dc44052784c --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/minimum.json @@ -0,0 +1,75 @@ +[ + { + "description": "minimum validation", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "minimum": 1.1 + }, + "tests": [ + { + "description": "above the minimum is valid", + "data": 2.6, + "valid": true + }, + { + "description": "boundary point is valid", + "data": 1.1, + "valid": true + }, + { + "description": "below the minimum is invalid", + "data": 0.6, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + }, + { + "description": "minimum validation with signed integer", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "minimum": -2 + }, + "tests": [ + { + "description": "negative above the minimum is valid", + "data": -1, + "valid": true + }, + { + "description": "positive above the minimum is valid", + "data": 0, + "valid": true + }, + { + "description": "boundary point is valid", + "data": -2, + "valid": true + }, + { + "description": "boundary point with float is valid", + "data": -2.0, + "valid": true + }, + { + "description": "float below the minimum is invalid", + "data": -2.0001, + "valid": false + }, + { + "description": "int below the minimum is invalid", + "data": -3, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/multipleOf.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/multipleOf.json new file mode 100644 index 00000000000..92d6979b09c --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/multipleOf.json @@ -0,0 +1,97 @@ +[ + { + "description": "by int", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "multipleOf": 2 + }, + "tests": [ + { + "description": "int by int", + "data": 10, + "valid": true + }, + { + "description": "int by int fail", + "data": 7, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "by number", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "multipleOf": 1.5 + }, + "tests": [ + { + "description": "zero is multiple of anything", + "data": 0, + "valid": true + }, + { + "description": "4.5 is multiple of 1.5", + "data": 4.5, + "valid": true + }, + { + "description": "35 is not multiple of 1.5", + "data": 35, + "valid": false + } + ] + }, + { + "description": "by small number", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "multipleOf": 0.0001 + }, + "tests": [ + { + "description": "0.0075 is multiple of 0.0001", + "data": 0.0075, + "valid": true + }, + { + "description": "0.00751 is not multiple of 0.0001", + "data": 0.00751, + "valid": false + } + ] + }, + { + "description": "float division = inf", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "integer", "multipleOf": 0.123456789 + }, + "tests": [ + { + "description": "always invalid, but naive implementations may raise an overflow error", + "data": 1e308, + "valid": false + } + ] + }, + { + "description": "small multiple of large integer", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "integer", "multipleOf": 1e-8 + }, + "tests": [ + { + "description": "any integer is a multiple of 1e-8", + "data": 12391239123, + "valid": true + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/pattern.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/pattern.json new file mode 100644 index 00000000000..af0b8d89bd2 --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/pattern.json @@ -0,0 +1,65 @@ +[ + { + "description": "pattern validation", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "pattern": "^a*$" + }, + "tests": [ + { + "description": "a matching pattern is valid", + "data": "aaa", + "valid": true + }, + { + "description": "a non-matching pattern is invalid", + "data": "abc", + "valid": false + }, + { + "description": "ignores booleans", + "data": true, + "valid": true + }, + { + "description": "ignores integers", + "data": 123, + "valid": true + }, + { + "description": "ignores floats", + "data": 1.0, + "valid": true + }, + { + "description": "ignores objects", + "data": {}, + "valid": true + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores null", + "data": null, + "valid": true + } + ] + }, + { + "description": "pattern is not anchored", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "pattern": "a+" + }, + "tests": [ + { + "description": "matches a substring", + "data": "xxaayy", + "valid": true + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/validate.go b/internal/mcp/internal/jsonschema/validate.go index 39f633b922f..f155de2720c 100644 --- a/internal/mcp/internal/jsonschema/validate.go +++ b/internal/mcp/internal/jsonschema/validate.go @@ -6,9 +6,13 @@ package jsonschema import ( "fmt" + "math" + "math/big" "reflect" + "regexp" "slices" "strings" + "unicode/utf8" ) // The value of the "$schema" keyword for the version that we can validate. @@ -103,6 +107,65 @@ func (st *state) validate(instance reflect.Value, schema *Schema, path []any) (e return fmt.Errorf("const: %v does not equal %v", instance, *schema.Const) } } + + // numbers: https://json-schema.org/draft/2020-12/draft-bhutton-json-schema-validation-01#section-6.2 + if schema.MultipleOf != nil || schema.Minimum != nil || schema.Maximum != nil || schema.ExclusiveMinimum != nil || schema.ExclusiveMaximum != nil { + n, ok := jsonNumber(instance) + if ok { // these keywords don't apply to non-numbers + if schema.MultipleOf != nil { + // TODO: validate MultipleOf as non-zero. + // The test suite assumes floats. + nf, _ := n.Float64() // don't care if it's exact or not + if _, f := math.Modf(nf / *schema.MultipleOf); f != 0 { + return fmt.Errorf("multipleOf: %s is not a multiple of %f", n, *schema.MultipleOf) + } + } + + m := new(big.Rat) // reuse for all of the following + cmp := func(f float64) int { return n.Cmp(m.SetFloat64(f)) } + + if schema.Minimum != nil && cmp(*schema.Minimum) < 0 { + return fmt.Errorf("minimum: %s is less than %f", n, *schema.Minimum) + } + if schema.Maximum != nil && cmp(*schema.Maximum) > 0 { + return fmt.Errorf("maximum: %s is greater than %f", n, *schema.Maximum) + } + if schema.ExclusiveMinimum != nil && cmp(*schema.ExclusiveMinimum) <= 0 { + return fmt.Errorf("exclusiveMinimum: %s is less than or equal to %f", n, *schema.ExclusiveMinimum) + } + if schema.ExclusiveMaximum != nil && cmp(*schema.ExclusiveMaximum) >= 0 { + return fmt.Errorf("exclusiveMaximum: %s is greater than or equal to %f", n, *schema.ExclusiveMaximum) + } + } + } + + // strings: https://json-schema.org/draft/2020-12/draft-bhutton-json-schema-validation-01#section-6.3 + if instance.Kind() == reflect.String && (schema.MinLength != nil || schema.MaxLength != nil || schema.Pattern != "") { + str := instance.String() + n := utf8.RuneCountInString(str) + if schema.MinLength != nil { + if m := int(*schema.MinLength); n < m { + return fmt.Errorf("minLength: %q contains %d Unicode code points, fewer than %d", str, n, m) + } + } + if schema.MaxLength != nil { + if m := int(*schema.MaxLength); n > m { + return fmt.Errorf("maxLength: %q contains %d Unicode code points, more than %d", str, n, m) + } + } + + if schema.Pattern != "" { + // TODO(jba): compile regexps during schema validation. + m, err := regexp.MatchString(schema.Pattern, str) + if err != nil { + return err + } + if !m { + return fmt.Errorf("pattern: %q does not match pattern %q", str, schema.Pattern) + } + } + } + return nil } From 953b452e0ef967245f8efb5d6dd45c662bb8e110 Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Wed, 30 Apr 2025 16:29:42 +0000 Subject: [PATCH 252/270] go/packages: skip TestRmdirAfterGoList_Runner This test is frequently flaking. It has successfully narrowed down the problem, but now we should skip it until the problem is fixed. Change-Id: Idd6f97cd9ba2f4bbd63d845af48b054dae899008 Reviewed-on: https://go-review.googlesource.com/c/tools/+/669136 Auto-Submit: Robert Findley Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- go/packages/packages_test.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/go/packages/packages_test.go b/go/packages/packages_test.go index aa86ae6842c..2623aa5a03b 100644 --- a/go/packages/packages_test.go +++ b/go/packages/packages_test.go @@ -3413,6 +3413,8 @@ func writeTree(t *testing.T, archive string) string { // If this test ever fails, the combination of the gocommand package // and the go command itself has a bug. func TestRmdirAfterGoList_Runner(t *testing.T) { + t.Skip("golang/go#73503: this test is frequently flaky") + testRmdirAfterGoList(t, func(ctx context.Context, dir string) { var runner gocommand.Runner stdout, stderr, friendlyErr, err := runner.RunRaw(ctx, gocommand.Invocation{ From 984d58c946f82914be5739ef9a293143392bccde Mon Sep 17 00:00:00 2001 From: Dmitri Shuralyov Date: Tue, 29 Apr 2025 23:44:56 -0400 Subject: [PATCH 253/270] go/analysis/unitchecker: update TestVetStdlib's skip for "devel" move The intention of TestVetStdlib is to run and potentially fail only at Go tip, where standard library code can be relatively easily modified. The current skip logic relies on the property that the "devel" string, something that we aim to include in all non-release versions, happens to be a prefix only at Go tip. That won't be the case after the issue go.dev/issue/73372 is resolved, so update the skip to keep working as originally intended. For golang/go#73372. Change-Id: I122439a590782f86fc3b3a8ba4b3adbfee1e591f Reviewed-on: https://go-review.googlesource.com/c/tools/+/669016 LUCI-TryBot-Result: Go LUCI Reviewed-by: Dmitri Shuralyov Auto-Submit: Dmitri Shuralyov Reviewed-by: Alan Donovan --- go/analysis/unitchecker/vet_std_test.go | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/go/analysis/unitchecker/vet_std_test.go b/go/analysis/unitchecker/vet_std_test.go index b489ad486d8..a761bc02f31 100644 --- a/go/analysis/unitchecker/vet_std_test.go +++ b/go/analysis/unitchecker/vet_std_test.go @@ -5,6 +5,7 @@ package unitchecker_test import ( + "go/version" "os" "os/exec" "runtime" @@ -95,8 +96,14 @@ func TestVetStdlib(t *testing.T) { if testing.Short() { t.Skip("skipping in -short mode") } - if version := runtime.Version(); !strings.HasPrefix(version, "devel") { - t.Skipf("This test is only wanted on development branches where code can be easily fixed. Skipping because runtime.Version=%q.", version) + if builder := os.Getenv("GO_BUILDER_NAME"); builder != "" && !strings.HasPrefix(builder, "x_tools-gotip-") { + // Run on builders like x_tools-gotip-linux-amd64-longtest, + // skip on others like x_tools-go1.24-linux-amd64-longtest. + t.Skipf("This test is only wanted on development branches where code can be easily fixed. Skipping on non-gotip builder %q.", builder) + } else if v := runtime.Version(); !strings.Contains(v, "devel") || version.Compare(v, version.Lang(v)) != 0 { + // Run on versions like "go1.25-devel_9ce47e66e8 Wed Mar 26 03:48:50 2025 -0700", + // skip on others like "go1.24.2" or "go1.24.2-devel_[…]". + t.Skipf("This test is only wanted on development versions where code can be easily fixed. Skipping on non-gotip version %q.", v) } cmd := exec.Command("go", "vet", "-vettool="+os.Args[0], "std") From fd1a208c2abc6367e05bc72cc7868f64753a4e48 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Fri, 25 Apr 2025 06:07:13 -0400 Subject: [PATCH 254/270] jsonschema: validate logic keywords Validate keywords that combine schemas with various logical operators. Also add iterators over schemas. Now we only need them to skip some tests, but later they will come in handy for non-test code. Change-Id: Idaf41efea4d693b6be0ee5c6c3a527664b8c472f Reviewed-on: https://go-review.googlesource.com/c/tools/+/668115 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- internal/mcp/internal/jsonschema/schema.go | 75 ++++- .../testdata/draft2020-12/allOf.json | 312 ++++++++++++++++++ .../testdata/draft2020-12/anyOf.json | 203 ++++++++++++ .../testdata/draft2020-12/if-then-else.json | 268 +++++++++++++++ .../jsonschema/testdata/draft2020-12/not.json | 301 +++++++++++++++++ .../testdata/draft2020-12/oneOf.json | 293 ++++++++++++++++ internal/mcp/internal/jsonschema/validate.go | 64 ++++ .../mcp/internal/jsonschema/validate_test.go | 6 +- 8 files changed, 1517 insertions(+), 5 deletions(-) create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/allOf.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/anyOf.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/if-then-else.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/not.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/oneOf.json diff --git a/internal/mcp/internal/jsonschema/schema.go b/internal/mcp/internal/jsonschema/schema.go index 2ee77b77904..52358f13337 100644 --- a/internal/mcp/internal/jsonschema/schema.go +++ b/internal/mcp/internal/jsonschema/schema.go @@ -11,11 +11,31 @@ import ( "encoding/json" "errors" "fmt" + "iter" ) // A Schema is a JSON schema object. -// It corresponds to the 2020-12 draft, as described in -// https://json-schema.org/draft/2020-12. +// It corresponds to the 2020-12 draft, as described in https://json-schema.org/draft/2020-12, +// specifically: +// - https://json-schema.org/draft/2020-12/draft-bhutton-json-schema-01 +// - https://json-schema.org/draft/2020-12/draft-bhutton-json-schema-validation-01 +// +// A Schema value may have non-zero values for more than one field: +// all relevant non-zero fields are used for validation. +// There is one exception to provide more Go type-safety: the Type and Types fields +// are mutually exclusive. +// +// Since this struct is a Go representation of a JSON value, it inherits JSON's +// distinction between nil and empty. Nil slices and maps are considered absent, +// but empty ones are present and affect validation. For example, +// +// Schema{Enum: nil} +// +// is equivalent to an empty schema, so it validates every instance. But +// +// Schema{Enum: []any{}} +// +// requires equality to some slice element, so it vacuously rejects every instance. type Schema struct { // core ID string `json:"$id,omitempty"` @@ -23,7 +43,7 @@ type Schema struct { Ref string `json:"$ref,omitempty"` Comment string `json:"$comment,omitempty"` Defs map[string]*Schema `json:"$defs,omitempty"` - // definitions is deprecated but still allowed. It is a synonym for defs. + // definitions is deprecated but still allowed. It is a synonym for $defs. Definitions map[string]*Schema `json:"definitions,omitempty"` Anchor string `json:"$anchor,omitempty"` @@ -192,3 +212,52 @@ func (s *Schema) UnmarshalJSON(data []byte) error { // Ptr returns a pointer to a new variable whose value is x. func Ptr[T any](x T) *T { return &x } + +// every applies f preorder to every schema under s including s. +// It stops when f returns false. +func (s *Schema) every(f func(*Schema) bool) bool { + return s == nil || + f(s) && s.everyChild(f) +} + +// everyChild returns an iterator over the immediate child schemas of s. +// +// It does not yield nils from fields holding individual schemas, like Contains, +// because a nil value indicates that the field is absent. +// It does yield nils when they occur in slices and maps, so those invalid values +// can be detected when the schema is validated. +func (s *Schema) everyChild(f func(*Schema) bool) bool { + // Fields that contain individual schemas. A nil is valid: it just means the field isn't present. + for _, c := range []*Schema{ + s.Items, s.AdditionalItems, s.Contains, s.PropertyNames, s.AdditionalProperties, + s.If, s.Then, s.Else, s.Not, s.UnevaluatedItems, s.UnevaluatedProperties, + } { + if c != nil && !f(c) { + return false + } + } + // Fields that contain slices of schemas. Yield nils so we can check for their presence. + for _, sl := range [][]*Schema{s.PrefixItems, s.AllOf, s.AnyOf, s.OneOf} { + for _, c := range sl { + if !f(c) { + return false + } + } + } + // Fields that are maps of schemas. Ditto about nils. + for _, m := range []map[string]*Schema{ + s.Defs, s.Definitions, s.Properties, s.PatternProperties, s.DependentSchemas, + } { + for _, c := range m { + if !f(c) { + return false + } + } + } + return true +} + +// all wraps every in an iterator. +func (s *Schema) all() iter.Seq[*Schema] { + return func(yield func(*Schema) bool) { s.every(yield) } +} diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/allOf.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/allOf.json new file mode 100644 index 00000000000..9e87903fe21 --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/allOf.json @@ -0,0 +1,312 @@ +[ + { + "description": "allOf", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "allOf": [ + { + "properties": { + "bar": {"type": "integer"} + }, + "required": ["bar"] + }, + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + } + ] + }, + "tests": [ + { + "description": "allOf", + "data": {"foo": "baz", "bar": 2}, + "valid": true + }, + { + "description": "mismatch second", + "data": {"foo": "baz"}, + "valid": false + }, + { + "description": "mismatch first", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "wrong type", + "data": {"foo": "baz", "bar": "quux"}, + "valid": false + } + ] + }, + { + "description": "allOf with base schema", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "properties": {"bar": {"type": "integer"}}, + "required": ["bar"], + "allOf" : [ + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + }, + { + "properties": { + "baz": {"type": "null"} + }, + "required": ["baz"] + } + ] + }, + "tests": [ + { + "description": "valid", + "data": {"foo": "quux", "bar": 2, "baz": null}, + "valid": true + }, + { + "description": "mismatch base schema", + "data": {"foo": "quux", "baz": null}, + "valid": false + }, + { + "description": "mismatch first allOf", + "data": {"bar": 2, "baz": null}, + "valid": false + }, + { + "description": "mismatch second allOf", + "data": {"foo": "quux", "bar": 2}, + "valid": false + }, + { + "description": "mismatch both", + "data": {"bar": 2}, + "valid": false + } + ] + }, + { + "description": "allOf simple types", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "allOf": [ + {"maximum": 30}, + {"minimum": 20} + ] + }, + "tests": [ + { + "description": "valid", + "data": 25, + "valid": true + }, + { + "description": "mismatch one", + "data": 35, + "valid": false + } + ] + }, + { + "description": "allOf with boolean schemas, all true", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "allOf": [true, true] + }, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "allOf with boolean schemas, some false", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "allOf": [true, false] + }, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "allOf with boolean schemas, all false", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "allOf": [false, false] + }, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "allOf with one empty schema", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "allOf": [ + {} + ] + }, + "tests": [ + { + "description": "any data is valid", + "data": 1, + "valid": true + } + ] + }, + { + "description": "allOf with two empty schemas", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "allOf": [ + {}, + {} + ] + }, + "tests": [ + { + "description": "any data is valid", + "data": 1, + "valid": true + } + ] + }, + { + "description": "allOf with the first empty schema", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "allOf": [ + {}, + { "type": "number" } + ] + }, + "tests": [ + { + "description": "number is valid", + "data": 1, + "valid": true + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "allOf with the last empty schema", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "allOf": [ + { "type": "number" }, + {} + ] + }, + "tests": [ + { + "description": "number is valid", + "data": 1, + "valid": true + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "nested allOf, to check validation semantics", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "allOf": [ + { + "allOf": [ + { + "type": "null" + } + ] + } + ] + }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "anything non-null is invalid", + "data": 123, + "valid": false + } + ] + }, + { + "description": "allOf combined with anyOf, oneOf", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "allOf": [ { "multipleOf": 2 } ], + "anyOf": [ { "multipleOf": 3 } ], + "oneOf": [ { "multipleOf": 5 } ] + }, + "tests": [ + { + "description": "allOf: false, anyOf: false, oneOf: false", + "data": 1, + "valid": false + }, + { + "description": "allOf: false, anyOf: false, oneOf: true", + "data": 5, + "valid": false + }, + { + "description": "allOf: false, anyOf: true, oneOf: false", + "data": 3, + "valid": false + }, + { + "description": "allOf: false, anyOf: true, oneOf: true", + "data": 15, + "valid": false + }, + { + "description": "allOf: true, anyOf: false, oneOf: false", + "data": 2, + "valid": false + }, + { + "description": "allOf: true, anyOf: false, oneOf: true", + "data": 10, + "valid": false + }, + { + "description": "allOf: true, anyOf: true, oneOf: false", + "data": 6, + "valid": false + }, + { + "description": "allOf: true, anyOf: true, oneOf: true", + "data": 30, + "valid": true + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/anyOf.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/anyOf.json new file mode 100644 index 00000000000..89b192dbd0a --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/anyOf.json @@ -0,0 +1,203 @@ +[ + { + "description": "anyOf", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "anyOf": [ + { + "type": "integer" + }, + { + "minimum": 2 + } + ] + }, + "tests": [ + { + "description": "first anyOf valid", + "data": 1, + "valid": true + }, + { + "description": "second anyOf valid", + "data": 2.5, + "valid": true + }, + { + "description": "both anyOf valid", + "data": 3, + "valid": true + }, + { + "description": "neither anyOf valid", + "data": 1.5, + "valid": false + } + ] + }, + { + "description": "anyOf with base schema", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "string", + "anyOf" : [ + { + "maxLength": 2 + }, + { + "minLength": 4 + } + ] + }, + "tests": [ + { + "description": "mismatch base schema", + "data": 3, + "valid": false + }, + { + "description": "one anyOf valid", + "data": "foobar", + "valid": true + }, + { + "description": "both anyOf invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "anyOf with boolean schemas, all true", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "anyOf": [true, true] + }, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "anyOf with boolean schemas, some true", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "anyOf": [true, false] + }, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "anyOf with boolean schemas, all false", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "anyOf": [false, false] + }, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "anyOf complex types", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "anyOf": [ + { + "properties": { + "bar": {"type": "integer"} + }, + "required": ["bar"] + }, + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + } + ] + }, + "tests": [ + { + "description": "first anyOf valid (complex)", + "data": {"bar": 2}, + "valid": true + }, + { + "description": "second anyOf valid (complex)", + "data": {"foo": "baz"}, + "valid": true + }, + { + "description": "both anyOf valid (complex)", + "data": {"foo": "baz", "bar": 2}, + "valid": true + }, + { + "description": "neither anyOf valid (complex)", + "data": {"foo": 2, "bar": "quux"}, + "valid": false + } + ] + }, + { + "description": "anyOf with one empty schema", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "anyOf": [ + { "type": "number" }, + {} + ] + }, + "tests": [ + { + "description": "string is valid", + "data": "foo", + "valid": true + }, + { + "description": "number is valid", + "data": 123, + "valid": true + } + ] + }, + { + "description": "nested anyOf, to check validation semantics", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "anyOf": [ + { + "anyOf": [ + { + "type": "null" + } + ] + } + ] + }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "anything non-null is invalid", + "data": 123, + "valid": false + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/if-then-else.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/if-then-else.json new file mode 100644 index 00000000000..1c35d7e610a --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/if-then-else.json @@ -0,0 +1,268 @@ +[ + { + "description": "ignore if without then or else", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "if": { + "const": 0 + } + }, + "tests": [ + { + "description": "valid when valid against lone if", + "data": 0, + "valid": true + }, + { + "description": "valid when invalid against lone if", + "data": "hello", + "valid": true + } + ] + }, + { + "description": "ignore then without if", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "then": { + "const": 0 + } + }, + "tests": [ + { + "description": "valid when valid against lone then", + "data": 0, + "valid": true + }, + { + "description": "valid when invalid against lone then", + "data": "hello", + "valid": true + } + ] + }, + { + "description": "ignore else without if", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "else": { + "const": 0 + } + }, + "tests": [ + { + "description": "valid when valid against lone else", + "data": 0, + "valid": true + }, + { + "description": "valid when invalid against lone else", + "data": "hello", + "valid": true + } + ] + }, + { + "description": "if and then without else", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "if": { + "exclusiveMaximum": 0 + }, + "then": { + "minimum": -10 + } + }, + "tests": [ + { + "description": "valid through then", + "data": -1, + "valid": true + }, + { + "description": "invalid through then", + "data": -100, + "valid": false + }, + { + "description": "valid when if test fails", + "data": 3, + "valid": true + } + ] + }, + { + "description": "if and else without then", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "if": { + "exclusiveMaximum": 0 + }, + "else": { + "multipleOf": 2 + } + }, + "tests": [ + { + "description": "valid when if test passes", + "data": -1, + "valid": true + }, + { + "description": "valid through else", + "data": 4, + "valid": true + }, + { + "description": "invalid through else", + "data": 3, + "valid": false + } + ] + }, + { + "description": "validate against correct branch, then vs else", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "if": { + "exclusiveMaximum": 0 + }, + "then": { + "minimum": -10 + }, + "else": { + "multipleOf": 2 + } + }, + "tests": [ + { + "description": "valid through then", + "data": -1, + "valid": true + }, + { + "description": "invalid through then", + "data": -100, + "valid": false + }, + { + "description": "valid through else", + "data": 4, + "valid": true + }, + { + "description": "invalid through else", + "data": 3, + "valid": false + } + ] + }, + { + "description": "non-interference across combined schemas", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "allOf": [ + { + "if": { + "exclusiveMaximum": 0 + } + }, + { + "then": { + "minimum": -10 + } + }, + { + "else": { + "multipleOf": 2 + } + } + ] + }, + "tests": [ + { + "description": "valid, but would have been invalid through then", + "data": -100, + "valid": true + }, + { + "description": "valid, but would have been invalid through else", + "data": 3, + "valid": true + } + ] + }, + { + "description": "if with boolean schema true", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "if": true, + "then": { "const": "then" }, + "else": { "const": "else" } + }, + "tests": [ + { + "description": "boolean schema true in if always chooses the then path (valid)", + "data": "then", + "valid": true + }, + { + "description": "boolean schema true in if always chooses the then path (invalid)", + "data": "else", + "valid": false + } + ] + }, + { + "description": "if with boolean schema false", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "if": false, + "then": { "const": "then" }, + "else": { "const": "else" } + }, + "tests": [ + { + "description": "boolean schema false in if always chooses the else path (invalid)", + "data": "then", + "valid": false + }, + { + "description": "boolean schema false in if always chooses the else path (valid)", + "data": "else", + "valid": true + } + ] + }, + { + "description": "if appears at the end when serialized (keyword processing sequence)", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "then": { "const": "yes" }, + "else": { "const": "other" }, + "if": { "maxLength": 4 } + }, + "tests": [ + { + "description": "yes redirects to then and passes", + "data": "yes", + "valid": true + }, + { + "description": "other redirects to else and passes", + "data": "other", + "valid": true + }, + { + "description": "no redirects to then and fails", + "data": "no", + "valid": false + }, + { + "description": "invalid redirects to else and fails", + "data": "invalid", + "valid": false + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/not.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/not.json new file mode 100644 index 00000000000..346d4a7e529 --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/not.json @@ -0,0 +1,301 @@ +[ + { + "description": "not", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "not": {"type": "integer"} + }, + "tests": [ + { + "description": "allowed", + "data": "foo", + "valid": true + }, + { + "description": "disallowed", + "data": 1, + "valid": false + } + ] + }, + { + "description": "not multiple types", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "not": {"type": ["integer", "boolean"]} + }, + "tests": [ + { + "description": "valid", + "data": "foo", + "valid": true + }, + { + "description": "mismatch", + "data": 1, + "valid": false + }, + { + "description": "other mismatch", + "data": true, + "valid": false + } + ] + }, + { + "description": "not more complex schema", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "not": { + "type": "object", + "properties": { + "foo": { + "type": "string" + } + } + } + }, + "tests": [ + { + "description": "match", + "data": 1, + "valid": true + }, + { + "description": "other match", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "mismatch", + "data": {"foo": "bar"}, + "valid": false + } + ] + }, + { + "description": "forbidden property", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "properties": { + "foo": { + "not": {} + } + } + }, + "tests": [ + { + "description": "property present", + "data": {"foo": 1, "bar": 2}, + "valid": false + }, + { + "description": "property absent", + "data": {"bar": 1, "baz": 2}, + "valid": true + } + ] + }, + { + "description": "forbid everything with empty schema", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "not": {} + }, + "tests": [ + { + "description": "number is invalid", + "data": 1, + "valid": false + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + }, + { + "description": "boolean true is invalid", + "data": true, + "valid": false + }, + { + "description": "boolean false is invalid", + "data": false, + "valid": false + }, + { + "description": "null is invalid", + "data": null, + "valid": false + }, + { + "description": "object is invalid", + "data": {"foo": "bar"}, + "valid": false + }, + { + "description": "empty object is invalid", + "data": {}, + "valid": false + }, + { + "description": "array is invalid", + "data": ["foo"], + "valid": false + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + } + ] + }, + { + "description": "forbid everything with boolean schema true", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "not": true + }, + "tests": [ + { + "description": "number is invalid", + "data": 1, + "valid": false + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + }, + { + "description": "boolean true is invalid", + "data": true, + "valid": false + }, + { + "description": "boolean false is invalid", + "data": false, + "valid": false + }, + { + "description": "null is invalid", + "data": null, + "valid": false + }, + { + "description": "object is invalid", + "data": {"foo": "bar"}, + "valid": false + }, + { + "description": "empty object is invalid", + "data": {}, + "valid": false + }, + { + "description": "array is invalid", + "data": ["foo"], + "valid": false + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + } + ] + }, + { + "description": "allow everything with boolean schema false", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "not": false + }, + "tests": [ + { + "description": "number is valid", + "data": 1, + "valid": true + }, + { + "description": "string is valid", + "data": "foo", + "valid": true + }, + { + "description": "boolean true is valid", + "data": true, + "valid": true + }, + { + "description": "boolean false is valid", + "data": false, + "valid": true + }, + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "object is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + }, + { + "description": "array is valid", + "data": ["foo"], + "valid": true + }, + { + "description": "empty array is valid", + "data": [], + "valid": true + } + ] + }, + { + "description": "double negation", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "not": { "not": {} } + }, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "collect annotations inside a 'not', even if collection is disabled", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "not": { + "$comment": "this subschema must still produce annotations internally, even though the 'not' will ultimately discard them", + "anyOf": [ + true, + { "properties": { "foo": true } } + ], + "unevaluatedProperties": false + } + }, + "tests": [ + { + "description": "unevaluated property", + "data": { "bar": 1 }, + "valid": true + }, + { + "description": "annotations are still collected inside a 'not'", + "data": { "foo": 1 }, + "valid": false + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/oneOf.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/oneOf.json new file mode 100644 index 00000000000..7a7c7ffe34b --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/oneOf.json @@ -0,0 +1,293 @@ +[ + { + "description": "oneOf", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "oneOf": [ + { + "type": "integer" + }, + { + "minimum": 2 + } + ] + }, + "tests": [ + { + "description": "first oneOf valid", + "data": 1, + "valid": true + }, + { + "description": "second oneOf valid", + "data": 2.5, + "valid": true + }, + { + "description": "both oneOf valid", + "data": 3, + "valid": false + }, + { + "description": "neither oneOf valid", + "data": 1.5, + "valid": false + } + ] + }, + { + "description": "oneOf with base schema", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "string", + "oneOf" : [ + { + "minLength": 2 + }, + { + "maxLength": 4 + } + ] + }, + "tests": [ + { + "description": "mismatch base schema", + "data": 3, + "valid": false + }, + { + "description": "one oneOf valid", + "data": "foobar", + "valid": true + }, + { + "description": "both oneOf valid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "oneOf with boolean schemas, all true", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "oneOf": [true, true, true] + }, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "oneOf with boolean schemas, one true", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "oneOf": [true, false, false] + }, + "tests": [ + { + "description": "any value is valid", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "oneOf with boolean schemas, more than one true", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "oneOf": [true, true, false] + }, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "oneOf with boolean schemas, all false", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "oneOf": [false, false, false] + }, + "tests": [ + { + "description": "any value is invalid", + "data": "foo", + "valid": false + } + ] + }, + { + "description": "oneOf complex types", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "oneOf": [ + { + "properties": { + "bar": {"type": "integer"} + }, + "required": ["bar"] + }, + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + } + ] + }, + "tests": [ + { + "description": "first oneOf valid (complex)", + "data": {"bar": 2}, + "valid": true + }, + { + "description": "second oneOf valid (complex)", + "data": {"foo": "baz"}, + "valid": true + }, + { + "description": "both oneOf valid (complex)", + "data": {"foo": "baz", "bar": 2}, + "valid": false + }, + { + "description": "neither oneOf valid (complex)", + "data": {"foo": 2, "bar": "quux"}, + "valid": false + } + ] + }, + { + "description": "oneOf with empty schema", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "oneOf": [ + { "type": "number" }, + {} + ] + }, + "tests": [ + { + "description": "one valid - valid", + "data": "foo", + "valid": true + }, + { + "description": "both valid - invalid", + "data": 123, + "valid": false + } + ] + }, + { + "description": "oneOf with required", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "oneOf": [ + { "required": ["foo", "bar"] }, + { "required": ["foo", "baz"] } + ] + }, + "tests": [ + { + "description": "both invalid - invalid", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "first valid - valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "second valid - valid", + "data": {"foo": 1, "baz": 3}, + "valid": true + }, + { + "description": "both valid - invalid", + "data": {"foo": 1, "bar": 2, "baz" : 3}, + "valid": false + } + ] + }, + { + "description": "oneOf with missing optional property", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "oneOf": [ + { + "properties": { + "bar": true, + "baz": true + }, + "required": ["bar"] + }, + { + "properties": { + "foo": true + }, + "required": ["foo"] + } + ] + }, + "tests": [ + { + "description": "first oneOf valid", + "data": {"bar": 8}, + "valid": true + }, + { + "description": "second oneOf valid", + "data": {"foo": "foo"}, + "valid": true + }, + { + "description": "both oneOf valid", + "data": {"foo": "foo", "bar": 8}, + "valid": false + }, + { + "description": "neither oneOf valid", + "data": {"baz": "quux"}, + "valid": false + } + ] + }, + { + "description": "nested oneOf, to check validation semantics", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "oneOf": [ + { + "oneOf": [ + { + "type": "null" + } + ] + } + ] + }, + "tests": [ + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "anything non-null is invalid", + "data": 123, + "valid": false + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/validate.go b/internal/mcp/internal/jsonschema/validate.go index f155de2720c..ce4a98b2c9b 100644 --- a/internal/mcp/internal/jsonschema/validate.go +++ b/internal/mcp/internal/jsonschema/validate.go @@ -166,6 +166,70 @@ func (st *state) validate(instance reflect.Value, schema *Schema, path []any) (e } } + // logic + // https://json-schema.org/draft/2020-12/json-schema-core#section-10.2 + // These must happen before arrays and objects because if they evaluate an item or property, + // then the unevaluatedItems/Properties schemas don't apply to it. + // See https://json-schema.org/draft/2020-12/json-schema-core#section-11.2, paragraph 4. + // + // If any of these fail, then validation fails, even if there is an unevaluatedXXX + // keyword in the schema. The spec is unclear about this, but that is the intention. + + valid := func(s *Schema) bool { return st.validate(instance, s, path) == nil } + + if schema.AllOf != nil { + for _, ss := range schema.AllOf { + if err := st.validate(instance, ss, path); err != nil { + return err + } + } + } + if schema.AnyOf != nil { + ok := false + for _, ss := range schema.AnyOf { + if valid(ss) { + ok = true + break + } + } + if !ok { + return fmt.Errorf("anyOf: did not validate against any of %v", schema.AnyOf) + } + } + if schema.OneOf != nil { + // Exactly one. + var okSchema *Schema + for _, ss := range schema.OneOf { + if valid(ss) { + if okSchema != nil { + return fmt.Errorf("oneOf: validated against both %v and %v", okSchema, ss) + } + okSchema = ss + } + } + if okSchema == nil { + return fmt.Errorf("oneOf: did not validate against any of %v", schema.OneOf) + } + } + if schema.Not != nil { + if valid(schema.Not) { + return fmt.Errorf("not: validated against %v", schema.Not) + } + } + if schema.If != nil { + var ss *Schema + if valid(schema.If) { + ss = schema.Then + } else { + ss = schema.Else + } + if ss != nil { + if err := st.validate(instance, ss, path); err != nil { + return err + } + } + } + return nil } diff --git a/internal/mcp/internal/jsonschema/validate_test.go b/internal/mcp/internal/jsonschema/validate_test.go index 88baec95283..c7dac5a77c8 100644 --- a/internal/mcp/internal/jsonschema/validate_test.go +++ b/internal/mcp/internal/jsonschema/validate_test.go @@ -50,8 +50,10 @@ func TestValidate(t *testing.T) { t.Fatal(err) } for _, g := range groups { - if g.Schema.Properties != nil { - t.Skip("schema has properties") + for s := range g.Schema.all() { + if s.Properties != nil { + t.Skip("schema or subschema has properties") + } } rs := &ResolvedSchema{root: g.Schema} t.Run(g.Description, func(t *testing.T) { From 0d2acf1e3850cb48239e2579e7b9327f7f9315d4 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Fri, 25 Apr 2025 07:35:00 -0400 Subject: [PATCH 255/270] jsonschema: validate array keywords Add validation for the keywords dealing with JSON arrays. We call the elements of an array its "items." The unevaluatedItems keyword introduces some complexity. Its schema applies to all items in the array that have not been successfully validated ("evaluated" is the spec's misleading term) by this schema or subschemas acting on the same array. The logic keywords like allOf and anyOf thus contribute to the set of evaluated items. Following the spec, we say that a schema produces "annotations" that describe the items it has evaluated. The annotations type collects this information. For the "contains" keyword we need to represent the set of evaluated items, but for others we can optimize with an integer or boolean. For example, the "items" keyword applies its schema to every item in the array. If even one fails to validate, the array fails to validate, and the validate call exits before reaching unevaluatedItems. Therefore, if "items" succeeds, it has successfully validated every item, so unevaluatedItems does not apply. A boolean annotation suffices to record this fact. The logic of "anyOf" must change so it can track all evaluated items. It must still visit the remaining subschemas even after a subschema succeeds. Anticipating the next CL, which deals with JSON objects (maps), the annotations struct supports properties as well as items. Change-Id: I98283d36c9a55f4cae97636cbec20c4886daea11 Reviewed-on: https://go-review.googlesource.com/c/tools/+/668235 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- .../mcp/internal/jsonschema/annotations.go | 72 ++ .../testdata/draft2020-12/contains.json | 176 ++++ .../testdata/draft2020-12/items.json | 304 +++++++ .../testdata/draft2020-12/maxContains.json | 102 +++ .../testdata/draft2020-12/maxItems.json | 50 ++ .../testdata/draft2020-12/minContains.json | 224 +++++ .../testdata/draft2020-12/minItems.json | 50 ++ .../testdata/draft2020-12/prefixItems.json | 104 +++ .../draft2020-12/unevaluatedItems.json | 798 ++++++++++++++++++ .../testdata/draft2020-12/uniqueItems.json | 419 +++++++++ internal/mcp/internal/jsonschema/validate.go | 116 ++- .../mcp/internal/jsonschema/validate_test.go | 12 +- 12 files changed, 2411 insertions(+), 16 deletions(-) create mode 100644 internal/mcp/internal/jsonschema/annotations.go create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/contains.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/items.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/maxContains.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/maxItems.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/minContains.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/minItems.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/prefixItems.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/unevaluatedItems.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/uniqueItems.json diff --git a/internal/mcp/internal/jsonschema/annotations.go b/internal/mcp/internal/jsonschema/annotations.go new file mode 100644 index 00000000000..7b0932de8d4 --- /dev/null +++ b/internal/mcp/internal/jsonschema/annotations.go @@ -0,0 +1,72 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package jsonschema + +import "maps" + +// An annotations tracks certain properties computed by keywords that are used by validation. +// ("Annotation" is the spec's term.) +// In particular, the unevaluatedItems and unevaluatedProperties keywords need to know which +// items and properties were evaluated (validated successfully). +type annotations struct { + allItems bool // all items were evaluated + endIndex int // 1+largest index evaluated by prefixItems + evaluatedIndexes map[int]bool // set of indexes evaluated by contains + evaluatedProperties map[string]bool // set of properties evaluated by various keywords +} + +// noteIndex marks i as evaluated. +func (a *annotations) noteIndex(i int) { + if a.evaluatedIndexes == nil { + a.evaluatedIndexes = map[int]bool{} + } + a.evaluatedIndexes[i] = true +} + +// noteEndIndex marks items with index less than end as evaluated. +func (a *annotations) noteEndIndex(end int) { + if end > a.endIndex { + a.endIndex = end + } +} + +// noteProperty marks prop as evaluated. +func (a *annotations) noteProperty(prop string) { + if a.evaluatedProperties == nil { + a.evaluatedProperties = map[string]bool{} + } + a.evaluatedProperties[prop] = true +} + +// noteProperties marks all the properties in props as evaluated. +func (a *annotations) noteProperties(props map[string]bool) { + a.evaluatedProperties = merge(a.evaluatedProperties, props) +} + +// merge adds b's annotations to a. +// a must not be nil. +func (a *annotations) merge(b *annotations) { + if b == nil { + return + } + if b.allItems { + a.allItems = true + } + if b.endIndex > a.endIndex { + a.endIndex = b.endIndex + } + a.evaluatedIndexes = merge(a.evaluatedIndexes, b.evaluatedIndexes) + a.evaluatedProperties = merge(a.evaluatedProperties, b.evaluatedProperties) +} + +// merge adds t's keys to s and returns s. +// If s is nil, it returns a copy of t. +func merge[K comparable](s, t map[K]bool) map[K]bool { + if s == nil { + return maps.Clone(t) + } + maps.Copy(s, t) + return s +} diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/contains.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/contains.json new file mode 100644 index 00000000000..08a00a753f7 --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/contains.json @@ -0,0 +1,176 @@ +[ + { + "description": "contains keyword validation", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "contains": {"minimum": 5} + }, + "tests": [ + { + "description": "array with item matching schema (5) is valid", + "data": [3, 4, 5], + "valid": true + }, + { + "description": "array with item matching schema (6) is valid", + "data": [3, 4, 6], + "valid": true + }, + { + "description": "array with two items matching schema (5, 6) is valid", + "data": [3, 4, 5, 6], + "valid": true + }, + { + "description": "array without items matching schema is invalid", + "data": [2, 3, 4], + "valid": false + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + }, + { + "description": "not array is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "contains keyword with const keyword", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "contains": { "const": 5 } + }, + "tests": [ + { + "description": "array with item 5 is valid", + "data": [3, 4, 5], + "valid": true + }, + { + "description": "array with two items 5 is valid", + "data": [3, 4, 5, 5], + "valid": true + }, + { + "description": "array without item 5 is invalid", + "data": [1, 2, 3, 4], + "valid": false + } + ] + }, + { + "description": "contains keyword with boolean schema true", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "contains": true + }, + "tests": [ + { + "description": "any non-empty array is valid", + "data": ["foo"], + "valid": true + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + } + ] + }, + { + "description": "contains keyword with boolean schema false", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "contains": false + }, + "tests": [ + { + "description": "any non-empty array is invalid", + "data": ["foo"], + "valid": false + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + }, + { + "description": "non-arrays are valid", + "data": "contains does not apply to strings", + "valid": true + } + ] + }, + { + "description": "items + contains", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "items": { "multipleOf": 2 }, + "contains": { "multipleOf": 3 } + }, + "tests": [ + { + "description": "matches items, does not match contains", + "data": [ 2, 4, 8 ], + "valid": false + }, + { + "description": "does not match items, matches contains", + "data": [ 3, 6, 9 ], + "valid": false + }, + { + "description": "matches both items and contains", + "data": [ 6, 12 ], + "valid": true + }, + { + "description": "matches neither items nor contains", + "data": [ 1, 5 ], + "valid": false + } + ] + }, + { + "description": "contains with false if subschema", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "contains": { + "if": false, + "else": true + } + }, + "tests": [ + { + "description": "any non-empty array is valid", + "data": ["foo"], + "valid": true + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + } + ] + }, + { + "description": "contains with null instance elements", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "contains": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null items", + "data": [ null ], + "valid": true + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/items.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/items.json new file mode 100644 index 00000000000..6a3e1cf26e0 --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/items.json @@ -0,0 +1,304 @@ +[ + { + "description": "a schema given for items", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "items": {"type": "integer"} + }, + "tests": [ + { + "description": "valid items", + "data": [ 1, 2, 3 ], + "valid": true + }, + { + "description": "wrong type of items", + "data": [1, "x"], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": {"foo" : "bar"}, + "valid": true + }, + { + "description": "JavaScript pseudo-array is valid", + "data": { + "0": "invalid", + "length": 1 + }, + "valid": true + } + ] + }, + { + "description": "items with boolean schema (true)", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "items": true + }, + "tests": [ + { + "description": "any array is valid", + "data": [ 1, "foo", true ], + "valid": true + }, + { + "description": "empty array is valid", + "data": [], + "valid": true + } + ] + }, + { + "description": "items with boolean schema (false)", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "items": false + }, + "tests": [ + { + "description": "any non-empty array is invalid", + "data": [ 1, "foo", true ], + "valid": false + }, + { + "description": "empty array is valid", + "data": [], + "valid": true + } + ] + }, + { + "description": "items and subitems", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$defs": { + "item": { + "type": "array", + "items": false, + "prefixItems": [ + { "$ref": "#/$defs/sub-item" }, + { "$ref": "#/$defs/sub-item" } + ] + }, + "sub-item": { + "type": "object", + "required": ["foo"] + } + }, + "type": "array", + "items": false, + "prefixItems": [ + { "$ref": "#/$defs/item" }, + { "$ref": "#/$defs/item" }, + { "$ref": "#/$defs/item" } + ] + }, + "tests": [ + { + "description": "valid items", + "data": [ + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": true + }, + { + "description": "too many items", + "data": [ + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": false + }, + { + "description": "too many sub-items", + "data": [ + [ {"foo": null}, {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": false + }, + { + "description": "wrong item", + "data": [ + {"foo": null}, + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": false + }, + { + "description": "wrong sub-item", + "data": [ + [ {}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ], + [ {"foo": null}, {"foo": null} ] + ], + "valid": false + }, + { + "description": "fewer items is valid", + "data": [ + [ {"foo": null} ], + [ {"foo": null} ] + ], + "valid": true + } + ] + }, + { + "description": "nested items", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "array", + "items": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "number" + } + } + } + } + }, + "tests": [ + { + "description": "valid nested array", + "data": [[[[1]], [[2],[3]]], [[[4], [5], [6]]]], + "valid": true + }, + { + "description": "nested array with invalid type", + "data": [[[["1"]], [[2],[3]]], [[[4], [5], [6]]]], + "valid": false + }, + { + "description": "not deep enough", + "data": [[[1], [2],[3]], [[4], [5], [6]]], + "valid": false + } + ] + }, + { + "description": "prefixItems with no additional items allowed", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "prefixItems": [{}, {}, {}], + "items": false + }, + "tests": [ + { + "description": "empty array", + "data": [ ], + "valid": true + }, + { + "description": "fewer number of items present (1)", + "data": [ 1 ], + "valid": true + }, + { + "description": "fewer number of items present (2)", + "data": [ 1, 2 ], + "valid": true + }, + { + "description": "equal number of items present", + "data": [ 1, 2, 3 ], + "valid": true + }, + { + "description": "additional items are not permitted", + "data": [ 1, 2, 3, 4 ], + "valid": false + } + ] + }, + { + "description": "items does not look in applicators, valid case", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "allOf": [ + { "prefixItems": [ { "minimum": 3 } ] } + ], + "items": { "minimum": 5 } + }, + "tests": [ + { + "description": "prefixItems in allOf does not constrain items, invalid case", + "data": [ 3, 5 ], + "valid": false + }, + { + "description": "prefixItems in allOf does not constrain items, valid case", + "data": [ 5, 5 ], + "valid": true + } + ] + }, + { + "description": "prefixItems validation adjusts the starting index for items", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "prefixItems": [ { "type": "string" } ], + "items": { "type": "integer" } + }, + "tests": [ + { + "description": "valid items", + "data": [ "x", 2, 3 ], + "valid": true + }, + { + "description": "wrong type of second item", + "data": [ "x", "y" ], + "valid": false + } + ] + }, + { + "description": "items with heterogeneous array", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "prefixItems": [{}], + "items": false + }, + "tests": [ + { + "description": "heterogeneous invalid instance", + "data": [ "foo", "bar", 37 ], + "valid": false + }, + { + "description": "valid instance", + "data": [ null ], + "valid": true + } + ] + }, + { + "description": "items with null instance elements", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "items": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null elements", + "data": [ null ], + "valid": true + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/maxContains.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/maxContains.json new file mode 100644 index 00000000000..8cd3ca741dd --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/maxContains.json @@ -0,0 +1,102 @@ +[ + { + "description": "maxContains without contains is ignored", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "maxContains": 1 + }, + "tests": [ + { + "description": "one item valid against lone maxContains", + "data": [ 1 ], + "valid": true + }, + { + "description": "two items still valid against lone maxContains", + "data": [ 1, 2 ], + "valid": true + } + ] + }, + { + "description": "maxContains with contains", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "contains": {"const": 1}, + "maxContains": 1 + }, + "tests": [ + { + "description": "empty data", + "data": [ ], + "valid": false + }, + { + "description": "all elements match, valid maxContains", + "data": [ 1 ], + "valid": true + }, + { + "description": "all elements match, invalid maxContains", + "data": [ 1, 1 ], + "valid": false + }, + { + "description": "some elements match, valid maxContains", + "data": [ 1, 2 ], + "valid": true + }, + { + "description": "some elements match, invalid maxContains", + "data": [ 1, 2, 1 ], + "valid": false + } + ] + }, + { + "description": "maxContains with contains, value with a decimal", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "contains": {"const": 1}, + "maxContains": 1.0 + }, + "tests": [ + { + "description": "one element matches, valid maxContains", + "data": [ 1 ], + "valid": true + }, + { + "description": "too many elements match, invalid maxContains", + "data": [ 1, 1 ], + "valid": false + } + ] + }, + { + "description": "minContains < maxContains", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "contains": {"const": 1}, + "minContains": 1, + "maxContains": 3 + }, + "tests": [ + { + "description": "actual < minContains < maxContains", + "data": [ ], + "valid": false + }, + { + "description": "minContains < actual < maxContains", + "data": [ 1, 1 ], + "valid": true + }, + { + "description": "minContains < maxContains < actual", + "data": [ 1, 1, 1, 1 ], + "valid": false + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/maxItems.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/maxItems.json new file mode 100644 index 00000000000..f6a6b7c9af4 --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/maxItems.json @@ -0,0 +1,50 @@ +[ + { + "description": "maxItems validation", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "maxItems": 2 + }, + "tests": [ + { + "description": "shorter is valid", + "data": [1], + "valid": true + }, + { + "description": "exact length is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "too long is invalid", + "data": [1, 2, 3], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": "foobar", + "valid": true + } + ] + }, + { + "description": "maxItems validation with a decimal", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "maxItems": 2.0 + }, + "tests": [ + { + "description": "shorter is valid", + "data": [1], + "valid": true + }, + { + "description": "too long is invalid", + "data": [1, 2, 3], + "valid": false + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/minContains.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/minContains.json new file mode 100644 index 00000000000..ee72d7d6209 --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/minContains.json @@ -0,0 +1,224 @@ +[ + { + "description": "minContains without contains is ignored", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "minContains": 1 + }, + "tests": [ + { + "description": "one item valid against lone minContains", + "data": [ 1 ], + "valid": true + }, + { + "description": "zero items still valid against lone minContains", + "data": [], + "valid": true + } + ] + }, + { + "description": "minContains=1 with contains", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "contains": {"const": 1}, + "minContains": 1 + }, + "tests": [ + { + "description": "empty data", + "data": [ ], + "valid": false + }, + { + "description": "no elements match", + "data": [ 2 ], + "valid": false + }, + { + "description": "single element matches, valid minContains", + "data": [ 1 ], + "valid": true + }, + { + "description": "some elements match, valid minContains", + "data": [ 1, 2 ], + "valid": true + }, + { + "description": "all elements match, valid minContains", + "data": [ 1, 1 ], + "valid": true + } + ] + }, + { + "description": "minContains=2 with contains", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "contains": {"const": 1}, + "minContains": 2 + }, + "tests": [ + { + "description": "empty data", + "data": [ ], + "valid": false + }, + { + "description": "all elements match, invalid minContains", + "data": [ 1 ], + "valid": false + }, + { + "description": "some elements match, invalid minContains", + "data": [ 1, 2 ], + "valid": false + }, + { + "description": "all elements match, valid minContains (exactly as needed)", + "data": [ 1, 1 ], + "valid": true + }, + { + "description": "all elements match, valid minContains (more than needed)", + "data": [ 1, 1, 1 ], + "valid": true + }, + { + "description": "some elements match, valid minContains", + "data": [ 1, 2, 1 ], + "valid": true + } + ] + }, + { + "description": "minContains=2 with contains with a decimal value", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "contains": {"const": 1}, + "minContains": 2.0 + }, + "tests": [ + { + "description": "one element matches, invalid minContains", + "data": [ 1 ], + "valid": false + }, + { + "description": "both elements match, valid minContains", + "data": [ 1, 1 ], + "valid": true + } + ] + }, + { + "description": "maxContains = minContains", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "contains": {"const": 1}, + "maxContains": 2, + "minContains": 2 + }, + "tests": [ + { + "description": "empty data", + "data": [ ], + "valid": false + }, + { + "description": "all elements match, invalid minContains", + "data": [ 1 ], + "valid": false + }, + { + "description": "all elements match, invalid maxContains", + "data": [ 1, 1, 1 ], + "valid": false + }, + { + "description": "all elements match, valid maxContains and minContains", + "data": [ 1, 1 ], + "valid": true + } + ] + }, + { + "description": "maxContains < minContains", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "contains": {"const": 1}, + "maxContains": 1, + "minContains": 3 + }, + "tests": [ + { + "description": "empty data", + "data": [ ], + "valid": false + }, + { + "description": "invalid minContains", + "data": [ 1 ], + "valid": false + }, + { + "description": "invalid maxContains", + "data": [ 1, 1, 1 ], + "valid": false + }, + { + "description": "invalid maxContains and minContains", + "data": [ 1, 1 ], + "valid": false + } + ] + }, + { + "description": "minContains = 0", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "contains": {"const": 1}, + "minContains": 0 + }, + "tests": [ + { + "description": "empty data", + "data": [ ], + "valid": true + }, + { + "description": "minContains = 0 makes contains always pass", + "data": [ 2 ], + "valid": true + } + ] + }, + { + "description": "minContains = 0 with maxContains", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "contains": {"const": 1}, + "minContains": 0, + "maxContains": 1 + }, + "tests": [ + { + "description": "empty data", + "data": [ ], + "valid": true + }, + { + "description": "not more than maxContains", + "data": [ 1 ], + "valid": true + }, + { + "description": "too many", + "data": [ 1, 1 ], + "valid": false + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/minItems.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/minItems.json new file mode 100644 index 00000000000..9d6a8b6d2fc --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/minItems.json @@ -0,0 +1,50 @@ +[ + { + "description": "minItems validation", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "minItems": 1 + }, + "tests": [ + { + "description": "longer is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "exact length is valid", + "data": [1], + "valid": true + }, + { + "description": "too short is invalid", + "data": [], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": "", + "valid": true + } + ] + }, + { + "description": "minItems validation with a decimal", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "minItems": 1.0 + }, + "tests": [ + { + "description": "longer is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "too short is invalid", + "data": [], + "valid": false + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/prefixItems.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/prefixItems.json new file mode 100644 index 00000000000..0adfc069e3f --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/prefixItems.json @@ -0,0 +1,104 @@ +[ + { + "description": "a schema given for prefixItems", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "prefixItems": [ + {"type": "integer"}, + {"type": "string"} + ] + }, + "tests": [ + { + "description": "correct types", + "data": [ 1, "foo" ], + "valid": true + }, + { + "description": "wrong types", + "data": [ "foo", 1 ], + "valid": false + }, + { + "description": "incomplete array of items", + "data": [ 1 ], + "valid": true + }, + { + "description": "array with additional items", + "data": [ 1, "foo", true ], + "valid": true + }, + { + "description": "empty array", + "data": [ ], + "valid": true + }, + { + "description": "JavaScript pseudo-array is valid", + "data": { + "0": "invalid", + "1": "valid", + "length": 2 + }, + "valid": true + } + ] + }, + { + "description": "prefixItems with boolean schemas", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "prefixItems": [true, false] + }, + "tests": [ + { + "description": "array with one item is valid", + "data": [ 1 ], + "valid": true + }, + { + "description": "array with two items is invalid", + "data": [ 1, "foo" ], + "valid": false + }, + { + "description": "empty array is valid", + "data": [], + "valid": true + } + ] + }, + { + "description": "additional items are allowed by default", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "prefixItems": [{"type": "integer"}] + }, + "tests": [ + { + "description": "only the first item is validated", + "data": [1, "foo", false], + "valid": true + } + ] + }, + { + "description": "prefixItems with null instance elements", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "prefixItems": [ + { + "type": "null" + } + ] + }, + "tests": [ + { + "description": "allows null elements", + "data": [ null ], + "valid": true + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/unevaluatedItems.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/unevaluatedItems.json new file mode 100644 index 00000000000..f861cefaded --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/unevaluatedItems.json @@ -0,0 +1,798 @@ +[ + { + "description": "unevaluatedItems true", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "unevaluatedItems": true + }, + "tests": [ + { + "description": "with no unevaluated items", + "data": [], + "valid": true + }, + { + "description": "with unevaluated items", + "data": ["foo"], + "valid": true + } + ] + }, + { + "description": "unevaluatedItems false", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "unevaluatedItems": false + }, + "tests": [ + { + "description": "with no unevaluated items", + "data": [], + "valid": true + }, + { + "description": "with unevaluated items", + "data": ["foo"], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems as schema", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "unevaluatedItems": { "type": "string" } + }, + "tests": [ + { + "description": "with no unevaluated items", + "data": [], + "valid": true + }, + { + "description": "with valid unevaluated items", + "data": ["foo"], + "valid": true + }, + { + "description": "with invalid unevaluated items", + "data": [42], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with uniform items", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "items": { "type": "string" }, + "unevaluatedItems": false + }, + "tests": [ + { + "description": "unevaluatedItems doesn't apply", + "data": ["foo", "bar"], + "valid": true + } + ] + }, + { + "description": "unevaluatedItems with tuple", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "prefixItems": [ + { "type": "string" } + ], + "unevaluatedItems": false + }, + "tests": [ + { + "description": "with no unevaluated items", + "data": ["foo"], + "valid": true + }, + { + "description": "with unevaluated items", + "data": ["foo", "bar"], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with items and prefixItems", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "prefixItems": [ + { "type": "string" } + ], + "items": true, + "unevaluatedItems": false + }, + "tests": [ + { + "description": "unevaluatedItems doesn't apply", + "data": ["foo", 42], + "valid": true + } + ] + }, + { + "description": "unevaluatedItems with items", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "items": {"type": "number"}, + "unevaluatedItems": {"type": "string"} + }, + "tests": [ + { + "description": "valid under items", + "comment": "no elements are considered by unevaluatedItems", + "data": [5, 6, 7, 8], + "valid": true + }, + { + "description": "invalid under items", + "data": ["foo", "bar", "baz"], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with nested tuple", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "prefixItems": [ + { "type": "string" } + ], + "allOf": [ + { + "prefixItems": [ + true, + { "type": "number" } + ] + } + ], + "unevaluatedItems": false + }, + "tests": [ + { + "description": "with no unevaluated items", + "data": ["foo", 42], + "valid": true + }, + { + "description": "with unevaluated items", + "data": ["foo", 42, true], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with nested items", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "unevaluatedItems": {"type": "boolean"}, + "anyOf": [ + { "items": {"type": "string"} }, + true + ] + }, + "tests": [ + { + "description": "with only (valid) additional items", + "data": [true, false], + "valid": true + }, + { + "description": "with no additional items", + "data": ["yes", "no"], + "valid": true + }, + { + "description": "with invalid additional item", + "data": ["yes", false], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with nested prefixItems and items", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "allOf": [ + { + "prefixItems": [ + { "type": "string" } + ], + "items": true + } + ], + "unevaluatedItems": false + }, + "tests": [ + { + "description": "with no additional items", + "data": ["foo"], + "valid": true + }, + { + "description": "with additional items", + "data": ["foo", 42, true], + "valid": true + } + ] + }, + { + "description": "unevaluatedItems with nested unevaluatedItems", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "allOf": [ + { + "prefixItems": [ + { "type": "string" } + ] + }, + { "unevaluatedItems": true } + ], + "unevaluatedItems": false + }, + "tests": [ + { + "description": "with no additional items", + "data": ["foo"], + "valid": true + }, + { + "description": "with additional items", + "data": ["foo", 42, true], + "valid": true + } + ] + }, + { + "description": "unevaluatedItems with anyOf", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "prefixItems": [ + { "const": "foo" } + ], + "anyOf": [ + { + "prefixItems": [ + true, + { "const": "bar" } + ] + }, + { + "prefixItems": [ + true, + true, + { "const": "baz" } + ] + } + ], + "unevaluatedItems": false + }, + "tests": [ + { + "description": "when one schema matches and has no unevaluated items", + "data": ["foo", "bar"], + "valid": true + }, + { + "description": "when one schema matches and has unevaluated items", + "data": ["foo", "bar", 42], + "valid": false + }, + { + "description": "when two schemas match and has no unevaluated items", + "data": ["foo", "bar", "baz"], + "valid": true + }, + { + "description": "when two schemas match and has unevaluated items", + "data": ["foo", "bar", "baz", 42], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with oneOf", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "prefixItems": [ + { "const": "foo" } + ], + "oneOf": [ + { + "prefixItems": [ + true, + { "const": "bar" } + ] + }, + { + "prefixItems": [ + true, + { "const": "baz" } + ] + } + ], + "unevaluatedItems": false + }, + "tests": [ + { + "description": "with no unevaluated items", + "data": ["foo", "bar"], + "valid": true + }, + { + "description": "with unevaluated items", + "data": ["foo", "bar", 42], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with not", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "prefixItems": [ + { "const": "foo" } + ], + "not": { + "not": { + "prefixItems": [ + true, + { "const": "bar" } + ] + } + }, + "unevaluatedItems": false + }, + "tests": [ + { + "description": "with unevaluated items", + "data": ["foo", "bar"], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with if/then/else", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "prefixItems": [ + { "const": "foo" } + ], + "if": { + "prefixItems": [ + true, + { "const": "bar" } + ] + }, + "then": { + "prefixItems": [ + true, + true, + { "const": "then" } + ] + }, + "else": { + "prefixItems": [ + true, + true, + true, + { "const": "else" } + ] + }, + "unevaluatedItems": false + }, + "tests": [ + { + "description": "when if matches and it has no unevaluated items", + "data": ["foo", "bar", "then"], + "valid": true + }, + { + "description": "when if matches and it has unevaluated items", + "data": ["foo", "bar", "then", "else"], + "valid": false + }, + { + "description": "when if doesn't match and it has no unevaluated items", + "data": ["foo", 42, 42, "else"], + "valid": true + }, + { + "description": "when if doesn't match and it has unevaluated items", + "data": ["foo", 42, 42, "else", 42], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with boolean schemas", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "allOf": [true], + "unevaluatedItems": false + }, + "tests": [ + { + "description": "with no unevaluated items", + "data": [], + "valid": true + }, + { + "description": "with unevaluated items", + "data": ["foo"], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with $ref", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$ref": "#/$defs/bar", + "prefixItems": [ + { "type": "string" } + ], + "unevaluatedItems": false, + "$defs": { + "bar": { + "prefixItems": [ + true, + { "type": "string" } + ] + } + } + }, + "tests": [ + { + "description": "with no unevaluated items", + "data": ["foo", "bar"], + "valid": true + }, + { + "description": "with unevaluated items", + "data": ["foo", "bar", "baz"], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems before $ref", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "unevaluatedItems": false, + "prefixItems": [ + { "type": "string" } + ], + "$ref": "#/$defs/bar", + "$defs": { + "bar": { + "prefixItems": [ + true, + { "type": "string" } + ] + } + } + }, + "tests": [ + { + "description": "with no unevaluated items", + "data": ["foo", "bar"], + "valid": true + }, + { + "description": "with unevaluated items", + "data": ["foo", "bar", "baz"], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems with $dynamicRef", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://example.com/unevaluated-items-with-dynamic-ref/derived", + + "$ref": "./baseSchema", + + "$defs": { + "derived": { + "$dynamicAnchor": "addons", + "prefixItems": [ + true, + { "type": "string" } + ] + }, + "baseSchema": { + "$id": "./baseSchema", + + "$comment": "unevaluatedItems comes first so it's more likely to catch bugs with implementations that are sensitive to keyword ordering", + "unevaluatedItems": false, + "type": "array", + "prefixItems": [ + { "type": "string" } + ], + "$dynamicRef": "#addons", + + "$defs": { + "defaultAddons": { + "$comment": "Needed to satisfy the bookending requirement", + "$dynamicAnchor": "addons" + } + } + } + } + }, + "tests": [ + { + "description": "with no unevaluated items", + "data": ["foo", "bar"], + "valid": true + }, + { + "description": "with unevaluated items", + "data": ["foo", "bar", "baz"], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems can't see inside cousins", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "allOf": [ + { + "prefixItems": [ true ] + }, + { "unevaluatedItems": false } + ] + }, + "tests": [ + { + "description": "always fails", + "data": [ 1 ], + "valid": false + } + ] + }, + { + "description": "item is evaluated in an uncle schema to unevaluatedItems", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "properties": { + "foo": { + "prefixItems": [ + { "type": "string" } + ], + "unevaluatedItems": false + } + }, + "anyOf": [ + { + "properties": { + "foo": { + "prefixItems": [ + true, + { "type": "string" } + ] + } + } + } + ] + }, + "tests": [ + { + "description": "no extra items", + "data": { + "foo": [ + "test" + ] + }, + "valid": true + }, + { + "description": "uncle keyword evaluation is not significant", + "data": { + "foo": [ + "test", + "test" + ] + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedItems depends on adjacent contains", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "prefixItems": [true], + "contains": {"type": "string"}, + "unevaluatedItems": false + }, + "tests": [ + { + "description": "second item is evaluated by contains", + "data": [ 1, "foo" ], + "valid": true + }, + { + "description": "contains fails, second item is not evaluated", + "data": [ 1, 2 ], + "valid": false + }, + { + "description": "contains passes, second item is not evaluated", + "data": [ 1, 2, "foo" ], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems depends on multiple nested contains", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "allOf": [ + { "contains": { "multipleOf": 2 } }, + { "contains": { "multipleOf": 3 } } + ], + "unevaluatedItems": { "multipleOf": 5 } + }, + "tests": [ + { + "description": "5 not evaluated, passes unevaluatedItems", + "data": [ 2, 3, 4, 5, 6 ], + "valid": true + }, + { + "description": "7 not evaluated, fails unevaluatedItems", + "data": [ 2, 3, 4, 7, 8 ], + "valid": false + } + ] + }, + { + "description": "unevaluatedItems and contains interact to control item dependency relationship", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "if": { + "contains": {"const": "a"} + }, + "then": { + "if": { + "contains": {"const": "b"} + }, + "then": { + "if": { + "contains": {"const": "c"} + } + } + }, + "unevaluatedItems": false + }, + "tests": [ + { + "description": "empty array is valid", + "data": [], + "valid": true + }, + { + "description": "only a's are valid", + "data": [ "a", "a" ], + "valid": true + }, + { + "description": "a's and b's are valid", + "data": [ "a", "b", "a", "b", "a" ], + "valid": true + }, + { + "description": "a's, b's and c's are valid", + "data": [ "c", "a", "c", "c", "b", "a" ], + "valid": true + }, + { + "description": "only b's are invalid", + "data": [ "b", "b" ], + "valid": false + }, + { + "description": "only c's are invalid", + "data": [ "c", "c" ], + "valid": false + }, + { + "description": "only b's and c's are invalid", + "data": [ "c", "b", "c", "b", "c" ], + "valid": false + }, + { + "description": "only a's and c's are invalid", + "data": [ "c", "a", "c", "a", "c" ], + "valid": false + } + ] + }, + { + "description": "non-array instances are valid", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "unevaluatedItems": false + }, + "tests": [ + { + "description": "ignores booleans", + "data": true, + "valid": true + }, + { + "description": "ignores integers", + "data": 123, + "valid": true + }, + { + "description": "ignores floats", + "data": 1.0, + "valid": true + }, + { + "description": "ignores objects", + "data": {}, + "valid": true + }, + { + "description": "ignores strings", + "data": "foo", + "valid": true + }, + { + "description": "ignores null", + "data": null, + "valid": true + } + ] + }, + { + "description": "unevaluatedItems with null instance elements", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "unevaluatedItems": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null elements", + "data": [ null ], + "valid": true + } + ] + }, + { + "description": "unevaluatedItems can see annotations from if without then and else", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "if": { + "prefixItems": [{"const": "a"}] + }, + "unevaluatedItems": false + }, + "tests": [ + { + "description": "valid in case if is evaluated", + "data": [ "a" ], + "valid": true + }, + { + "description": "invalid in case if is evaluated", + "data": [ "b" ], + "valid": false + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/uniqueItems.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/uniqueItems.json new file mode 100644 index 00000000000..4ea3bf98515 --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/uniqueItems.json @@ -0,0 +1,419 @@ +[ + { + "description": "uniqueItems validation", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "uniqueItems": true + }, + "tests": [ + { + "description": "unique array of integers is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "non-unique array of integers is invalid", + "data": [1, 1], + "valid": false + }, + { + "description": "non-unique array of more than two integers is invalid", + "data": [1, 2, 1], + "valid": false + }, + { + "description": "numbers are unique if mathematically unequal", + "data": [1.0, 1.00, 1], + "valid": false + }, + { + "description": "false is not equal to zero", + "data": [0, false], + "valid": true + }, + { + "description": "true is not equal to one", + "data": [1, true], + "valid": true + }, + { + "description": "unique array of strings is valid", + "data": ["foo", "bar", "baz"], + "valid": true + }, + { + "description": "non-unique array of strings is invalid", + "data": ["foo", "bar", "foo"], + "valid": false + }, + { + "description": "unique array of objects is valid", + "data": [{"foo": "bar"}, {"foo": "baz"}], + "valid": true + }, + { + "description": "non-unique array of objects is invalid", + "data": [{"foo": "bar"}, {"foo": "bar"}], + "valid": false + }, + { + "description": "property order of array of objects is ignored", + "data": [{"foo": "bar", "bar": "foo"}, {"bar": "foo", "foo": "bar"}], + "valid": false + }, + { + "description": "unique array of nested objects is valid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : false}}} + ], + "valid": true + }, + { + "description": "non-unique array of nested objects is invalid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : true}}} + ], + "valid": false + }, + { + "description": "unique array of arrays is valid", + "data": [["foo"], ["bar"]], + "valid": true + }, + { + "description": "non-unique array of arrays is invalid", + "data": [["foo"], ["foo"]], + "valid": false + }, + { + "description": "non-unique array of more than two arrays is invalid", + "data": [["foo"], ["bar"], ["foo"]], + "valid": false + }, + { + "description": "1 and true are unique", + "data": [1, true], + "valid": true + }, + { + "description": "0 and false are unique", + "data": [0, false], + "valid": true + }, + { + "description": "[1] and [true] are unique", + "data": [[1], [true]], + "valid": true + }, + { + "description": "[0] and [false] are unique", + "data": [[0], [false]], + "valid": true + }, + { + "description": "nested [1] and [true] are unique", + "data": [[[1], "foo"], [[true], "foo"]], + "valid": true + }, + { + "description": "nested [0] and [false] are unique", + "data": [[[0], "foo"], [[false], "foo"]], + "valid": true + }, + { + "description": "unique heterogeneous types are valid", + "data": [{}, [1], true, null, 1, "{}"], + "valid": true + }, + { + "description": "non-unique heterogeneous types are invalid", + "data": [{}, [1], true, null, {}, 1], + "valid": false + }, + { + "description": "different objects are unique", + "data": [{"a": 1, "b": 2}, {"a": 2, "b": 1}], + "valid": true + }, + { + "description": "objects are non-unique despite key order", + "data": [{"a": 1, "b": 2}, {"b": 2, "a": 1}], + "valid": false + }, + { + "description": "{\"a\": false} and {\"a\": 0} are unique", + "data": [{"a": false}, {"a": 0}], + "valid": true + }, + { + "description": "{\"a\": true} and {\"a\": 1} are unique", + "data": [{"a": true}, {"a": 1}], + "valid": true + } + ] + }, + { + "description": "uniqueItems with an array of items", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "prefixItems": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": true + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is not valid", + "data": [false, false], + "valid": false + }, + { + "description": "[true, true] from items array is not valid", + "data": [true, true], + "valid": false + }, + { + "description": "unique array extended from [false, true] is valid", + "data": [false, true, "foo", "bar"], + "valid": true + }, + { + "description": "unique array extended from [true, false] is valid", + "data": [true, false, "foo", "bar"], + "valid": true + }, + { + "description": "non-unique array extended from [false, true] is not valid", + "data": [false, true, "foo", "foo"], + "valid": false + }, + { + "description": "non-unique array extended from [true, false] is not valid", + "data": [true, false, "foo", "foo"], + "valid": false + } + ] + }, + { + "description": "uniqueItems with an array of items and additionalItems=false", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "prefixItems": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": true, + "items": false + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is not valid", + "data": [false, false], + "valid": false + }, + { + "description": "[true, true] from items array is not valid", + "data": [true, true], + "valid": false + }, + { + "description": "extra items are invalid even if unique", + "data": [false, true, null], + "valid": false + } + ] + }, + { + "description": "uniqueItems=false validation", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "uniqueItems": false + }, + "tests": [ + { + "description": "unique array of integers is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "non-unique array of integers is valid", + "data": [1, 1], + "valid": true + }, + { + "description": "numbers are unique if mathematically unequal", + "data": [1.0, 1.00, 1], + "valid": true + }, + { + "description": "false is not equal to zero", + "data": [0, false], + "valid": true + }, + { + "description": "true is not equal to one", + "data": [1, true], + "valid": true + }, + { + "description": "unique array of objects is valid", + "data": [{"foo": "bar"}, {"foo": "baz"}], + "valid": true + }, + { + "description": "non-unique array of objects is valid", + "data": [{"foo": "bar"}, {"foo": "bar"}], + "valid": true + }, + { + "description": "unique array of nested objects is valid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : false}}} + ], + "valid": true + }, + { + "description": "non-unique array of nested objects is valid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : true}}} + ], + "valid": true + }, + { + "description": "unique array of arrays is valid", + "data": [["foo"], ["bar"]], + "valid": true + }, + { + "description": "non-unique array of arrays is valid", + "data": [["foo"], ["foo"]], + "valid": true + }, + { + "description": "1 and true are unique", + "data": [1, true], + "valid": true + }, + { + "description": "0 and false are unique", + "data": [0, false], + "valid": true + }, + { + "description": "unique heterogeneous types are valid", + "data": [{}, [1], true, null, 1], + "valid": true + }, + { + "description": "non-unique heterogeneous types are valid", + "data": [{}, [1], true, null, {}, 1], + "valid": true + } + ] + }, + { + "description": "uniqueItems=false with an array of items", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "prefixItems": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": false + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is valid", + "data": [false, false], + "valid": true + }, + { + "description": "[true, true] from items array is valid", + "data": [true, true], + "valid": true + }, + { + "description": "unique array extended from [false, true] is valid", + "data": [false, true, "foo", "bar"], + "valid": true + }, + { + "description": "unique array extended from [true, false] is valid", + "data": [true, false, "foo", "bar"], + "valid": true + }, + { + "description": "non-unique array extended from [false, true] is valid", + "data": [false, true, "foo", "foo"], + "valid": true + }, + { + "description": "non-unique array extended from [true, false] is valid", + "data": [true, false, "foo", "foo"], + "valid": true + } + ] + }, + { + "description": "uniqueItems=false with an array of items and additionalItems=false", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "prefixItems": [{"type": "boolean"}, {"type": "boolean"}], + "uniqueItems": false, + "items": false + }, + "tests": [ + { + "description": "[false, true] from items array is valid", + "data": [false, true], + "valid": true + }, + { + "description": "[true, false] from items array is valid", + "data": [true, false], + "valid": true + }, + { + "description": "[false, false] from items array is valid", + "data": [false, false], + "valid": true + }, + { + "description": "[true, true] from items array is valid", + "data": [true, true], + "valid": true + }, + { + "description": "extra items are invalid even if unique", + "data": [false, true, null], + "valid": false + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/validate.go b/internal/mcp/internal/jsonschema/validate.go index ce4a98b2c9b..2231914b9bc 100644 --- a/internal/mcp/internal/jsonschema/validate.go +++ b/internal/mcp/internal/jsonschema/validate.go @@ -32,7 +32,8 @@ func (rs *ResolvedSchema) Validate(instance any) error { return fmt.Errorf("cannot validate version %s, only %s", s, draft202012) } st := &state{rs: rs} - return st.validate(reflect.ValueOf(instance), st.rs.root, nil) + var pathBuffer [4]any + return st.validate(reflect.ValueOf(instance), st.rs.root, nil, pathBuffer[:0]) } // state is the state of single call to ResolvedSchema.Validate. @@ -43,7 +44,7 @@ type state struct { // validate validates the reflected value of the instance. // It keeps track of the path within the instance for better error messages. -func (st *state) validate(instance reflect.Value, schema *Schema, path []any) (err error) { +func (st *state) validate(instance reflect.Value, schema *Schema, callerAnns *annotations, path []any) (err error) { defer func() { if err != nil { if p := formatPath(path); p != "" { @@ -175,21 +176,23 @@ func (st *state) validate(instance reflect.Value, schema *Schema, path []any) (e // If any of these fail, then validation fails, even if there is an unevaluatedXXX // keyword in the schema. The spec is unclear about this, but that is the intention. - valid := func(s *Schema) bool { return st.validate(instance, s, path) == nil } + var anns annotations // all the annotations for this call and child calls + + valid := func(s *Schema, anns *annotations) bool { return st.validate(instance, s, anns, path) == nil } if schema.AllOf != nil { for _, ss := range schema.AllOf { - if err := st.validate(instance, ss, path); err != nil { + if err := st.validate(instance, ss, &anns, path); err != nil { return err } } } if schema.AnyOf != nil { + // We must visit them all, to collect annotations. ok := false for _, ss := range schema.AnyOf { - if valid(ss) { + if valid(ss, &anns) { ok = true - break } } if !ok { @@ -200,7 +203,7 @@ func (st *state) validate(instance reflect.Value, schema *Schema, path []any) (e // Exactly one. var okSchema *Schema for _, ss := range schema.OneOf { - if valid(ss) { + if valid(ss, &anns) { if okSchema != nil { return fmt.Errorf("oneOf: validated against both %v and %v", okSchema, ss) } @@ -212,24 +215,117 @@ func (st *state) validate(instance reflect.Value, schema *Schema, path []any) (e } } if schema.Not != nil { - if valid(schema.Not) { + // Ignore annotations from "not". + if valid(schema.Not, nil) { return fmt.Errorf("not: validated against %v", schema.Not) } } if schema.If != nil { var ss *Schema - if valid(schema.If) { + if valid(schema.If, &anns) { ss = schema.Then } else { ss = schema.Else } if ss != nil { - if err := st.validate(instance, ss, path); err != nil { + if err := st.validate(instance, ss, &anns, path); err != nil { return err } } } + // arrays + if instance.Kind() == reflect.Array || instance.Kind() == reflect.Slice { + // https://json-schema.org/draft/2020-12/json-schema-core#section-10.3.1 + // This validate call doesn't collect annotations for the items of the instance; they are separate + // instances in their own right. + // TODO(jba): if the test suite doesn't cover this case, add a test. For example, nested arrays. + for i, ischema := range schema.PrefixItems { + if i >= instance.Len() { + break // shorter is OK + } + if err := st.validate(instance.Index(i), ischema, nil, append(path, i)); err != nil { + return err + } + } + anns.noteEndIndex(min(len(schema.PrefixItems), instance.Len())) + + if schema.Items != nil { + for i := len(schema.PrefixItems); i < instance.Len(); i++ { + if err := st.validate(instance.Index(i), schema.Items, nil, append(path, i)); err != nil { + return err + } + } + // Note that all the items in this array have been validated. + anns.allItems = true + } + + nContains := 0 + if schema.Contains != nil { + for i := range instance.Len() { + if err := st.validate(instance.Index(i), schema.Contains, nil, append(path, i)); err == nil { + nContains++ + anns.noteIndex(i) + } + } + if nContains == 0 && (schema.MinContains == nil || int(*schema.MinContains) > 0) { + return fmt.Errorf("contains: %s does not have an item matching %s", + instance, schema.Contains) + } + } + + // https://json-schema.org/draft/2020-12/draft-bhutton-json-schema-validation-01#section-6.4 + // TODO(jba): check that these next four keywords' values are integers. + if schema.MinContains != nil && schema.Contains != nil { + if m := int(*schema.MinContains); nContains < m { + return fmt.Errorf("minContains: contains validated %d items, less than %d", nContains, m) + } + } + if schema.MaxContains != nil && schema.Contains != nil { + if m := int(*schema.MaxContains); nContains > m { + return fmt.Errorf("maxContains: contains validated %d items, greater than %d", nContains, m) + } + } + if schema.MinItems != nil { + if m := int(*schema.MinItems); instance.Len() < m { + return fmt.Errorf("minItems: array length %d is less than %d", instance.Len(), m) + } + } + if schema.MaxItems != nil { + if m := int(*schema.MaxItems); instance.Len() > m { + return fmt.Errorf("minItems: array length %d is greater than %d", instance.Len(), m) + } + } + if schema.UniqueItems { + // Determine uniqueness with O(n²) comparisons. + // TODO: optimize via hashing. + for i := range instance.Len() { + for j := i + 1; j < instance.Len(); j++ { + if equalValue(instance.Index(i), instance.Index(j)) { + return fmt.Errorf("uniqueItems: array items %d and %d are equal", i, j) + } + } + } + } + // https://json-schema.org/draft/2020-12/json-schema-core#section-11.2 + if schema.UnevaluatedItems != nil && !anns.allItems { + // Apply this subschema to all items in the array that haven't been successfully validated. + // That includes validations by subschemas on the same instance, like allOf. + for i := anns.endIndex; i < instance.Len(); i++ { + if !anns.evaluatedIndexes[i] { + if err := st.validate(instance.Index(i), schema.UnevaluatedItems, nil, append(path, i)); err != nil { + return err + } + } + } + anns.allItems = true + } + } + + if callerAnns != nil { + // Our caller wants to know what we've validated. + callerAnns.merge(&anns) + } return nil } diff --git a/internal/mcp/internal/jsonschema/validate_test.go b/internal/mcp/internal/jsonschema/validate_test.go index c7dac5a77c8..e86df8e2621 100644 --- a/internal/mcp/internal/jsonschema/validate_test.go +++ b/internal/mcp/internal/jsonschema/validate_test.go @@ -50,13 +50,13 @@ func TestValidate(t *testing.T) { t.Fatal(err) } for _, g := range groups { - for s := range g.Schema.all() { - if s.Properties != nil { - t.Skip("schema or subschema has properties") - } - } - rs := &ResolvedSchema{root: g.Schema} t.Run(g.Description, func(t *testing.T) { + rs := &ResolvedSchema{root: g.Schema} + for s := range g.Schema.all() { + if s.Properties != nil || s.Required != nil || s.Defs != nil || s.UnevaluatedProperties != nil { + t.Skip("schema or subschema has unimplemented keywords") + } + } for _, test := range g.Tests { t.Run(test.Description, func(t *testing.T) { err = rs.Validate(test.Data) From f11e7de88e54bdcef1a170b582fd0cfecb9b958c Mon Sep 17 00:00:00 2001 From: Peter Weinberger Date: Mon, 14 Apr 2025 08:40:44 -0400 Subject: [PATCH 256/270] internal/stdlib: add arg types for functions The stdlib package provides pre-compiled information about exported symbols in the standard library. This change adds a string that contains the function signature as text. (E.g., "func[T comparable](value T) Handle[T]") The intended use is for unimported completions, so there is no need yet for the more complicated case of method signatures. Change-Id: Id7e60fb9f45d86f6fb945e2a070b3466b9eb4fb4 Reviewed-on: https://go-review.googlesource.com/c/tools/+/665335 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- internal/stdlib/deps.go | 384 +- internal/stdlib/generate.go | 68 +- internal/stdlib/manifest.go | 34648 ++++++++++----------- internal/stdlib/stdlib.go | 8 + internal/stdlib/testdata/nethttp.deps | 2 + internal/stdlib/testdata/nethttp.imports | 2 + 6 files changed, 17593 insertions(+), 17519 deletions(-) diff --git a/internal/stdlib/deps.go b/internal/stdlib/deps.go index c50bf406b7f..77cf8d2181a 100644 --- a/internal/stdlib/deps.go +++ b/internal/stdlib/deps.go @@ -12,61 +12,61 @@ type pkginfo struct { } var deps = [...]pkginfo{ - {"archive/tar", "\x03j\x03E6\x01\v\x01\"\x01\x01\x02\x05\n\x02\x01\x02\x02\v"}, - {"archive/zip", "\x02\x04`\a\x16\x0206\x01*\x05\x01\x11\x03\x02\r\x04"}, - {"bufio", "\x03j~E\x13"}, - {"bytes", "m+S\x03\fG\x02\x02"}, + {"archive/tar", "\x03j\x03E5\x01\v\x01#\x01\x01\x02\x05\n\x02\x01\x02\x02\v"}, + {"archive/zip", "\x02\x04`\a\x16\x0205\x01+\x05\x01\x11\x03\x02\r\x04"}, + {"bufio", "\x03j}F\x13"}, + {"bytes", "m+R\x03\fH\x02\x02"}, {"cmp", ""}, - {"compress/bzip2", "\x02\x02\xe7\x01B"}, - {"compress/flate", "\x02k\x03{\r\x024\x01\x03"}, - {"compress/gzip", "\x02\x04`\a\x03\x15fT"}, - {"compress/lzw", "\x02k\x03{"}, - {"compress/zlib", "\x02\x04`\a\x03\x13\x01g"}, + {"compress/bzip2", "\x02\x02\xe6\x01C"}, + {"compress/flate", "\x02k\x03z\r\x025\x01\x03"}, + {"compress/gzip", "\x02\x04`\a\x03\x15eU"}, + {"compress/lzw", "\x02k\x03z"}, + {"compress/zlib", "\x02\x04`\a\x03\x13\x01f"}, {"container/heap", "\xae\x02"}, {"container/list", ""}, {"container/ring", ""}, {"context", "m\\i\x01\f"}, - {"crypto", "\x83\x01hD"}, + {"crypto", "\x83\x01gE"}, {"crypto/aes", "\x10\n\a\x8e\x02"}, - {"crypto/cipher", "\x03\x1e\x01\x01\x1d\x11\x1c,R"}, + {"crypto/cipher", "\x03\x1e\x01\x01\x1d\x11\x1c,Q"}, {"crypto/des", "\x10\x13\x1d-,\x96\x01\x03"}, - {"crypto/dsa", "@\x04)~\x0e"}, - {"crypto/ecdh", "\x03\v\f\x0e\x04\x14\x04\r\x1c~"}, - {"crypto/ecdsa", "\x0e\x05\x03\x04\x01\x0e\x16\x01\x04\f\x01\x1c~\x0e\x04K\x01"}, - {"crypto/ed25519", "\x0e\x1c\x16\n\a\x1c~D"}, - {"crypto/elliptic", "0=~\x0e9"}, + {"crypto/dsa", "@\x04)}\x0e"}, + {"crypto/ecdh", "\x03\v\f\x0e\x04\x14\x04\r\x1c}"}, + {"crypto/ecdsa", "\x0e\x05\x03\x04\x01\x0e\x16\x01\x04\f\x01\x1c}\x0e\x04L\x01"}, + {"crypto/ed25519", "\x0e\x1c\x16\n\a\x1c}E"}, + {"crypto/elliptic", "0=}\x0e:"}, {"crypto/fips140", " \x05\x90\x01"}, {"crypto/hkdf", "-\x12\x01-\x16"}, {"crypto/hmac", "\x1a\x14\x11\x01\x112"}, {"crypto/internal/boring", "\x0e\x02\rf"}, - {"crypto/internal/boring/bbig", "\x1a\xdf\x01L"}, + {"crypto/internal/boring/bbig", "\x1a\xde\x01M"}, {"crypto/internal/boring/bcache", "\xb3\x02\x12"}, {"crypto/internal/boring/sig", ""}, - {"crypto/internal/cryptotest", "\x03\r\n)\x0e\x19\x06\x13\x12#\a\t\x11\x12\x11\x1a\r\r\x05\n"}, + {"crypto/internal/cryptotest", "\x03\r\n)\x0e\x19\x06\x13\x12#\a\t\x11\x11\x11\x1b\x01\f\r\x05\n"}, {"crypto/internal/entropy", "E"}, - {"crypto/internal/fips140", ">/~8\r\x15"}, - {"crypto/internal/fips140/aes", "\x03\x1d\x03\x02\x13\x04\x01\x01\x05*\x8d\x015"}, - {"crypto/internal/fips140/aes/gcm", " \x01\x02\x02\x02\x11\x04\x01\x06*\x8b\x01"}, + {"crypto/internal/fips140", ">/}9\r\x15"}, + {"crypto/internal/fips140/aes", "\x03\x1d\x03\x02\x13\x04\x01\x01\x05*\x8c\x016"}, + {"crypto/internal/fips140/aes/gcm", " \x01\x02\x02\x02\x11\x04\x01\x06*\x8a\x01"}, {"crypto/internal/fips140/alias", "\xc5\x02"}, - {"crypto/internal/fips140/bigmod", "%\x17\x01\x06*\x8d\x01"}, - {"crypto/internal/fips140/check", " \x0e\x06\b\x02\xad\x01Z"}, + {"crypto/internal/fips140/bigmod", "%\x17\x01\x06*\x8c\x01"}, + {"crypto/internal/fips140/check", " \x0e\x06\b\x02\xac\x01["}, {"crypto/internal/fips140/check/checktest", "%\xfe\x01\""}, - {"crypto/internal/fips140/drbg", "\x03\x1c\x01\x01\x04\x13\x04\b\x01(~\x0f8"}, - {"crypto/internal/fips140/ecdh", "\x03\x1d\x05\x02\t\f1~\x0f8"}, - {"crypto/internal/fips140/ecdsa", "\x03\x1d\x04\x01\x02\a\x02\x067~G"}, + {"crypto/internal/fips140/drbg", "\x03\x1c\x01\x01\x04\x13\x04\b\x01(}\x0f9"}, + {"crypto/internal/fips140/ecdh", "\x03\x1d\x05\x02\t\f1}\x0f9"}, + {"crypto/internal/fips140/ecdsa", "\x03\x1d\x04\x01\x02\a\x02\x067}H"}, {"crypto/internal/fips140/ed25519", "\x03\x1d\x05\x02\x04\v7\xc2\x01\x03"}, - {"crypto/internal/fips140/edwards25519", "%\a\f\x041\x8d\x018"}, - {"crypto/internal/fips140/edwards25519/field", "%\x13\x041\x8d\x01"}, + {"crypto/internal/fips140/edwards25519", "%\a\f\x041\x8c\x019"}, + {"crypto/internal/fips140/edwards25519/field", "%\x13\x041\x8c\x01"}, {"crypto/internal/fips140/hkdf", "\x03\x1d\x05\t\x069"}, {"crypto/internal/fips140/hmac", "\x03\x1d\x14\x01\x017"}, {"crypto/internal/fips140/mlkem", "\x03\x1d\x05\x02\x0e\x03\x041"}, - {"crypto/internal/fips140/nistec", "%\f\a\x041\x8d\x01)\x0f\x13"}, - {"crypto/internal/fips140/nistec/fiat", "%\x135\x8d\x01"}, + {"crypto/internal/fips140/nistec", "%\f\a\x041\x8c\x01*\x0f\x13"}, + {"crypto/internal/fips140/nistec/fiat", "%\x135\x8c\x01"}, {"crypto/internal/fips140/pbkdf2", "\x03\x1d\x05\t\x069"}, - {"crypto/internal/fips140/rsa", "\x03\x1d\x04\x01\x02\r\x01\x01\x025~G"}, - {"crypto/internal/fips140/sha256", "\x03\x1d\x1c\x01\x06*\x8d\x01"}, - {"crypto/internal/fips140/sha3", "\x03\x1d\x18\x04\x010\x8d\x01K"}, - {"crypto/internal/fips140/sha512", "\x03\x1d\x1c\x01\x06*\x8d\x01"}, + {"crypto/internal/fips140/rsa", "\x03\x1d\x04\x01\x02\r\x01\x01\x025}H"}, + {"crypto/internal/fips140/sha256", "\x03\x1d\x1c\x01\x06*\x8c\x01"}, + {"crypto/internal/fips140/sha3", "\x03\x1d\x18\x04\x010\x8c\x01L"}, + {"crypto/internal/fips140/sha512", "\x03\x1d\x1c\x01\x06*\x8c\x01"}, {"crypto/internal/fips140/ssh", " \x05"}, {"crypto/internal/fips140/subtle", "#"}, {"crypto/internal/fips140/tls12", "\x03\x1d\x05\t\x06\x027"}, @@ -76,90 +76,90 @@ var deps = [...]pkginfo{ {"crypto/internal/fips140deps/cpu", "\xad\x01\a"}, {"crypto/internal/fips140deps/godebug", "\xb5\x01"}, {"crypto/internal/fips140hash", "5\x1a4\xc2\x01"}, - {"crypto/internal/fips140only", "'\r\x01\x01M26"}, + {"crypto/internal/fips140only", "'\r\x01\x01M25"}, {"crypto/internal/fips140test", ""}, - {"crypto/internal/hpke", "\x0e\x01\x01\x03\x1a\x1d#,aM"}, + {"crypto/internal/hpke", "\x0e\x01\x01\x03\x1a\x1d#,`N"}, {"crypto/internal/impl", "\xb0\x02"}, - {"crypto/internal/randutil", "\xeb\x01\x12"}, - {"crypto/internal/sysrand", "mi\"\x1e\r\x0f\x01\x01\v\x06"}, + {"crypto/internal/randutil", "\xea\x01\x12"}, + {"crypto/internal/sysrand", "mi!\x1f\r\x0f\x01\x01\v\x06"}, {"crypto/internal/sysrand/internal/seccomp", "m"}, - {"crypto/md5", "\x0e2-\x16\x16a"}, + {"crypto/md5", "\x0e2-\x16\x16`"}, {"crypto/mlkem", "/"}, {"crypto/pbkdf2", "2\r\x01-\x16"}, - {"crypto/rand", "\x1a\x06\a\x19\x04\x01(~\x0eL"}, + {"crypto/rand", "\x1a\x06\a\x19\x04\x01(}\x0eM"}, {"crypto/rc4", "#\x1d-\xc2\x01"}, - {"crypto/rsa", "\x0e\f\x01\t\x0f\f\x01\x04\x06\a\x1c\x03\x1326\r\x01"}, - {"crypto/sha1", "\x0e\f&-\x16\x16\x14M"}, + {"crypto/rsa", "\x0e\f\x01\t\x0f\f\x01\x04\x06\a\x1c\x03\x1325\r\x01"}, + {"crypto/sha1", "\x0e\f&-\x16\x16\x14L"}, {"crypto/sha256", "\x0e\f\x1aO"}, {"crypto/sha3", "\x0e'N\xc2\x01"}, {"crypto/sha512", "\x0e\f\x1cM"}, {"crypto/subtle", "8\x96\x01U"}, - {"crypto/tls", "\x03\b\x02\x01\x01\x01\x01\x02\x01\x01\x01\x03\x01\a\x01\v\x02\n\x01\b\x05\x03\x01\x01\x01\x01\x02\x01\x02\x01\x17\x02\x03\x13\x16\x14\b6\x16\x15\r\n\x01\x01\x01\x02\x01\f\x06\x02\x01"}, + {"crypto/tls", "\x03\b\x02\x01\x01\x01\x01\x02\x01\x01\x01\x03\x01\a\x01\v\x02\n\x01\b\x05\x03\x01\x01\x01\x01\x02\x01\x02\x01\x17\x02\x03\x13\x16\x14\b5\x16\x16\r\n\x01\x01\x01\x02\x01\f\x06\x02\x01"}, {"crypto/tls/internal/fips140tls", " \x93\x02"}, - {"crypto/x509", "\x03\v\x01\x01\x01\x01\x01\x01\x01\x011\x03\x02\x01\x01\x02\x05\x0e\x06\x02\x02\x03E\x033\x01\x02\t\x01\x01\x01\a\x0f\x05\x01\x06\x02\x05\f\x01\x02\r\x02\x01\x01\x02\x03\x01"}, - {"crypto/x509/pkix", "c\x06\a\x89\x01F"}, - {"database/sql", "\x03\nJ\x16\x03{\f\x06!\x05\n\x02\x03\x01\f\x02\x02\x02"}, + {"crypto/x509", "\x03\v\x01\x01\x01\x01\x01\x01\x01\x011\x03\x02\x01\x01\x02\x05\x0e\x06\x02\x02\x03E\x032\x01\x02\t\x01\x01\x01\a\x10\x05\x01\x06\x02\x05\f\x01\x02\r\x02\x01\x01\x02\x03\x01"}, + {"crypto/x509/pkix", "c\x06\a\x88\x01G"}, + {"database/sql", "\x03\nJ\x16\x03z\f\x06\"\x05\n\x02\x03\x01\f\x02\x02\x02"}, {"database/sql/driver", "\r`\x03\xae\x01\x11\x10"}, - {"debug/buildinfo", "\x03W\x02\x01\x01\b\a\x03`\x19\x02\x01*\x0f "}, - {"debug/dwarf", "\x03c\a\x03{0\x13\x01\x01"}, - {"debug/elf", "\x03\x06P\r\a\x03`\x1a\x01+\x19\x01\x15"}, + {"debug/buildinfo", "\x03W\x02\x01\x01\b\a\x03`\x18\x02\x01+\x0f "}, + {"debug/dwarf", "\x03c\a\x03z1\x13\x01\x01"}, + {"debug/elf", "\x03\x06P\r\a\x03`\x19\x01,\x19\x01\x15"}, {"debug/gosym", "\x03c\n\xbe\x01\x01\x01\x02"}, - {"debug/macho", "\x03\x06P\r\n`\x1b+\x19\x01"}, - {"debug/pe", "\x03\x06P\r\a\x03`\x1b+\x19\x01\x15"}, - {"debug/plan9obj", "f\a\x03`\x1b+"}, - {"embed", "m+:\x19\x01S"}, + {"debug/macho", "\x03\x06P\r\n`\x1a,\x19\x01"}, + {"debug/pe", "\x03\x06P\r\a\x03`\x1a,\x19\x01\x15"}, + {"debug/plan9obj", "f\a\x03`\x1a,"}, + {"embed", "m+:\x18\x01T"}, {"embed/internal/embedtest", ""}, {"encoding", ""}, - {"encoding/ascii85", "\xeb\x01D"}, - {"encoding/asn1", "\x03j\x03\x88\x01\x01%\x0f\x02\x01\x0f\x03\x01"}, - {"encoding/base32", "\xeb\x01B\x02"}, - {"encoding/base64", "f\x85\x01B\x02"}, - {"encoding/binary", "m~\r&\x0f\x05"}, - {"encoding/csv", "\x02\x01j\x03{E\x11\x02"}, - {"encoding/gob", "\x02_\x05\a\x03`\x1b\f\x01\x02\x1c\b\x14\x01\x0e\x02"}, - {"encoding/hex", "m\x03{B\x03"}, - {"encoding/json", "\x03\x01]\x04\b\x03{\r&\x0f\x02\x01\x02\x0f\x01\x01\x02"}, - {"encoding/pem", "\x03b\b~B\x03"}, - {"encoding/xml", "\x02\x01^\f\x03{3\x05\f\x01\x02\x0f\x02"}, + {"encoding/ascii85", "\xea\x01E"}, + {"encoding/asn1", "\x03j\x03\x87\x01\x01&\x0f\x02\x01\x0f\x03\x01"}, + {"encoding/base32", "\xea\x01C\x02"}, + {"encoding/base64", "\x99\x01QC\x02"}, + {"encoding/binary", "m}\r'\x0f\x05"}, + {"encoding/csv", "\x02\x01j\x03zF\x11\x02"}, + {"encoding/gob", "\x02_\x05\a\x03`\x1a\f\x01\x02\x1d\b\x14\x01\x0e\x02"}, + {"encoding/hex", "m\x03zC\x03"}, + {"encoding/json", "\x03\x01]\x04\b\x03z\r'\x0f\x02\x01\x02\x0f\x01\x01\x02"}, + {"encoding/pem", "\x03b\b}C\x03"}, + {"encoding/xml", "\x02\x01^\f\x03z4\x05\f\x01\x02\x0f\x02"}, {"errors", "\xc9\x01|"}, - {"expvar", "jK:\t\n\x14\r\n\x02\x03\x01\x10"}, - {"flag", "a\f\x03{+\b\x05\n\x02\x01\x0f"}, - {"fmt", "mE9\r\x1e\b\x0f\x02\x03\x11"}, - {"go/ast", "\x03\x01l\x0f\x01k\x03(\b\x0f\x02\x01"}, + {"expvar", "jK9\t\n\x15\r\n\x02\x03\x01\x10"}, + {"flag", "a\f\x03z,\b\x05\n\x02\x01\x0f"}, + {"fmt", "mE8\r\x1f\b\x0f\x02\x03\x11"}, + {"go/ast", "\x03\x01l\x0f\x01j\x03)\b\x0f\x02\x01"}, {"go/ast/internal/tests", ""}, - {"go/build", "\x02\x01j\x03\x01\x03\x02\a\x02\x01\x17\x1e\x04\x02\t\x14\x13\x01*\x01\x04\x01\a\n\x02\x01\x11\x02\x02"}, + {"go/build", "\x02\x01j\x03\x01\x03\x02\a\x02\x01\x17\x1e\x04\x02\t\x14\x12\x01+\x01\x04\x01\a\n\x02\x01\x11\x02\x02"}, {"go/build/constraint", "m\xc2\x01\x01\x11\x02"}, - {"go/constant", "p\x10x\x01\x015\x01\x02\x11"}, - {"go/doc", "\x04l\x01\x06\t=.0\x12\x02\x01\x11\x02"}, + {"go/constant", "p\x10w\x01\x016\x01\x02\x11"}, + {"go/doc", "\x04l\x01\x06\t=-1\x12\x02\x01\x11\x02"}, {"go/doc/comment", "\x03m\xbd\x01\x01\x01\x01\x11\x02"}, - {"go/format", "\x03m\x01\f\x01\x02kE"}, - {"go/importer", "s\a\x01\x01\x04\x01j8"}, - {"go/internal/gccgoimporter", "\x02\x01W\x13\x03\x05\v\x01h\x02+\x01\x05\x13\x01\v\b"}, - {"go/internal/gcimporter", "\x02n\x10\x01/\x05\x0e(+\x17\x03\x02"}, - {"go/internal/srcimporter", "p\x01\x02\n\x03\x01j+\x01\x05\x14\x02\x13"}, - {"go/parser", "\x03j\x03\x01\x03\v\x01k\x01*\x06\x14"}, - {"go/printer", "p\x01\x03\x03\tk\r\x1e\x17\x02\x01\x02\n\x05\x02"}, - {"go/scanner", "\x03m\x10k1\x12\x01\x12\x02"}, + {"go/format", "\x03m\x01\f\x01\x02jF"}, + {"go/importer", "s\a\x01\x01\x04\x01i9"}, + {"go/internal/gccgoimporter", "\x02\x01W\x13\x03\x05\v\x01g\x02,\x01\x05\x13\x01\v\b"}, + {"go/internal/gcimporter", "\x02n\x10\x01/\x05\x0e',\x17\x03\x02"}, + {"go/internal/srcimporter", "p\x01\x02\n\x03\x01i,\x01\x05\x14\x02\x13"}, + {"go/parser", "\x03j\x03\x01\x03\v\x01j\x01+\x06\x14"}, + {"go/printer", "p\x01\x03\x03\tj\r\x1f\x17\x02\x01\x02\n\x05\x02"}, + {"go/scanner", "\x03m\x10j2\x12\x01\x12\x02"}, {"go/token", "\x04l\xbd\x01\x02\x03\x01\x0e\x02"}, - {"go/types", "\x03\x01\x06c\x03\x01\x04\b\x03\x02\x15\x1e\x06,\x04\x03\n$\a\n\x01\x01\x01\x02\x01\x0e\x02\x02"}, + {"go/types", "\x03\x01\x06c\x03\x01\x04\b\x03\x02\x15\x1e\x06+\x04\x03\n%\a\n\x01\x01\x01\x02\x01\x0e\x02\x02"}, {"go/version", "\xba\x01v"}, - {"hash", "\xeb\x01"}, + {"hash", "\xea\x01"}, {"hash/adler32", "m\x16\x16"}, {"hash/crc32", "m\x16\x16\x14\x85\x01\x01\x12"}, {"hash/crc64", "m\x16\x16\x99\x01"}, - {"hash/fnv", "m\x16\x16a"}, - {"hash/maphash", "\x94\x01\x05\x1b\x03AM"}, + {"hash/fnv", "m\x16\x16`"}, + {"hash/maphash", "\x94\x01\x05\x1b\x03@N"}, {"html", "\xb0\x02\x02\x11"}, - {"html/template", "\x03g\x06\x19,6\x01\v\x1f\x05\x01\x02\x03\x0e\x01\x02\v\x01\x03\x02"}, - {"image", "\x02k\x1f_\x0f5\x03\x01"}, + {"html/template", "\x03g\x06\x19,5\x01\v \x05\x01\x02\x03\x0e\x01\x02\v\x01\x03\x02"}, + {"image", "\x02k\x1f^\x0f6\x03\x01"}, {"image/color", ""}, {"image/color/palette", "\x8c\x01"}, {"image/draw", "\x8b\x01\x01\x04"}, - {"image/gif", "\x02\x01\x05e\x03\x1b\x01\x01\x01\vR"}, + {"image/gif", "\x02\x01\x05e\x03\x1b\x01\x01\x01\vQ"}, {"image/internal/imageutil", "\x8b\x01"}, - {"image/jpeg", "\x02k\x1e\x01\x04["}, - {"image/png", "\x02\a]\n\x13\x02\x06\x01_D"}, - {"index/suffixarray", "\x03c\a~\r)\f\x01"}, + {"image/jpeg", "\x02k\x1e\x01\x04Z"}, + {"image/png", "\x02\a]\n\x13\x02\x06\x01^E"}, + {"index/suffixarray", "\x03c\a}\r*\f\x01"}, {"internal/abi", "\xb4\x01\x91\x01"}, {"internal/asan", "\xc5\x02"}, {"internal/bisect", "\xa3\x02\x0f\x01"}, @@ -171,27 +171,27 @@ var deps = [...]pkginfo{ {"internal/copyright", ""}, {"internal/coverage", ""}, {"internal/coverage/calloc", ""}, - {"internal/coverage/cfile", "j\x06\x17\x16\x01\x02\x01\x01\x01\x01\x01\x01\x01#\x01 +\x06\a\f\x01\x03\f\x06"}, - {"internal/coverage/cformat", "\x04l-\x04J\f6\x01\x02\f"}, - {"internal/coverage/cmerge", "p-["}, - {"internal/coverage/decodecounter", "f\n-\v\x02A+\x19\x16"}, - {"internal/coverage/decodemeta", "\x02d\n\x17\x16\v\x02A+"}, - {"internal/coverage/encodecounter", "\x02d\n-\f\x01\x02?\f\x1f\x17"}, - {"internal/coverage/encodemeta", "\x02\x01c\n\x13\x04\x16\r\x02?+/"}, + {"internal/coverage/cfile", "j\x06\x17\x16\x01\x02\x01\x01\x01\x01\x01\x01\x01#\x01\x1f,\x06\a\f\x01\x03\f\x06"}, + {"internal/coverage/cformat", "\x04l-\x04I\f7\x01\x02\f"}, + {"internal/coverage/cmerge", "p-Z"}, + {"internal/coverage/decodecounter", "f\n-\v\x02@,\x19\x16"}, + {"internal/coverage/decodemeta", "\x02d\n\x17\x16\v\x02@,"}, + {"internal/coverage/encodecounter", "\x02d\n-\f\x01\x02>\f \x17"}, + {"internal/coverage/encodemeta", "\x02\x01c\n\x13\x04\x16\r\x02>,/"}, {"internal/coverage/pods", "\x04l-y\x06\x05\f\x02\x01"}, {"internal/coverage/rtcov", "\xc5\x02"}, - {"internal/coverage/slicereader", "f\n{Z"}, - {"internal/coverage/slicewriter", "p{"}, - {"internal/coverage/stringtab", "p8\x04?"}, + {"internal/coverage/slicereader", "f\nz["}, + {"internal/coverage/slicewriter", "pz"}, + {"internal/coverage/stringtab", "p8\x04>"}, {"internal/coverage/test", ""}, {"internal/coverage/uleb128", ""}, {"internal/cpu", "\xc5\x02"}, {"internal/dag", "\x04l\xbd\x01\x03"}, {"internal/diff", "\x03m\xbe\x01\x02"}, - {"internal/exportdata", "\x02\x01j\x03\x03]\x1b+\x01\x05\x13\x01\x02"}, - {"internal/filepathlite", "m+:\x1aA"}, + {"internal/exportdata", "\x02\x01j\x03\x03]\x1a,\x01\x05\x13\x01\x02"}, + {"internal/filepathlite", "m+:\x19B"}, {"internal/fmtsort", "\x04\x9a\x02\x0f"}, - {"internal/fuzz", "\x03\nA\x18\x04\x03\x03\x01\f\x0356\r\x02\x1c\x01\x05\x02\x05\f\x01\x02\x01\x01\v\x04\x02"}, + {"internal/fuzz", "\x03\nA\x18\x04\x03\x03\x01\f\x0355\r\x02\x1d\x01\x05\x02\x05\f\x01\x02\x01\x01\v\x04\x02"}, {"internal/goarch", ""}, {"internal/godebug", "\x96\x01 |\x01\x12"}, {"internal/godebugs", ""}, @@ -202,158 +202,158 @@ var deps = [...]pkginfo{ {"internal/goversion", ""}, {"internal/itoa", ""}, {"internal/lazyregexp", "\x96\x02\v\x0f\x02"}, - {"internal/lazytemplate", "\xeb\x01+\x1a\x02\v"}, + {"internal/lazytemplate", "\xea\x01,\x1a\x02\v"}, {"internal/msan", "\xc5\x02"}, {"internal/nettrace", ""}, - {"internal/obscuretestdata", "e\x86\x01+"}, + {"internal/obscuretestdata", "e\x85\x01,"}, {"internal/oserror", "m"}, - {"internal/pkgbits", "\x03K\x18\a\x03\x05\vk\x0e\x1d\r\f\x01"}, + {"internal/pkgbits", "\x03K\x18\a\x03\x05\vj\x0e\x1e\r\f\x01"}, {"internal/platform", ""}, - {"internal/poll", "mO\x1a\x158\x0f\x01\x01\v\x06"}, - {"internal/profile", "\x03\x04f\x03{6\r\x01\x01\x0f"}, + {"internal/poll", "mO\x1a\x149\x0f\x01\x01\v\x06"}, + {"internal/profile", "\x03\x04f\x03z7\r\x01\x01\x0f"}, {"internal/profilerecord", ""}, {"internal/race", "\x94\x01\xb1\x01"}, - {"internal/reflectlite", "\x94\x01 4;\""}, + {"internal/reflectlite", "\x94\x01 3<\""}, {"internal/runtime/atomic", "\xc5\x02"}, {"internal/runtime/exithook", "\xca\x01{"}, {"internal/runtime/maps", "\x94\x01\x01\x1f\v\t\x05\x01w"}, {"internal/runtime/math", "\xb4\x01"}, {"internal/runtime/sys", "\xb4\x01\x04"}, {"internal/runtime/syscall", "\xc5\x02"}, - {"internal/saferio", "\xeb\x01Z"}, + {"internal/saferio", "\xea\x01["}, {"internal/singleflight", "\xb2\x02"}, {"internal/stringslite", "\x98\x01\xad\x01"}, {"internal/sync", "\x94\x01 \x14k\x12"}, {"internal/synctest", "\xc5\x02"}, {"internal/syscall/execenv", "\xb4\x02"}, {"internal/syscall/unix", "\xa3\x02\x10\x01\x11"}, - {"internal/sysinfo", "\x02\x01\xaa\x01>+\x1a\x02"}, + {"internal/sysinfo", "\x02\x01\xaa\x01=,\x1a\x02"}, {"internal/syslist", ""}, - {"internal/testenv", "\x03\n`\x02\x01*\x1a\x10(*\x01\x05\a\f\x01\x02\x02\x01\n"}, + {"internal/testenv", "\x03\n`\x02\x01*\x1a\x10'+\x01\x05\a\f\x01\x02\x02\x01\n"}, {"internal/testlog", "\xb2\x02\x01\x12"}, {"internal/testpty", "m\x03\xa6\x01"}, - {"internal/trace", "\x02\x01\x01\x06\\\a\x03m\x01\x01\x06\x06\x03\n5\x01\x02\x0f"}, - {"internal/trace/event", ""}, - {"internal/trace/event/go122", "pm"}, - {"internal/trace/internal/oldtrace", "\x03\x01b\a\x03m\b\x06\r5\x01"}, - {"internal/trace/internal/testgen/go122", "\x03c\nl\x01\x01\x03\x04\x010\v\x0f"}, - {"internal/trace/raw", "\x02d\nm\b\x06D\x01\x11"}, - {"internal/trace/testtrace", "\x02\x01j\x03l\x05\x05\x056\f\x02\x01"}, - {"internal/trace/traceviewer", "\x02]\v\x06\x1a<\x16\b\a\x04\t\n\x14\x01\x05\a\f\x01\x02\r"}, + {"internal/trace", "\x02\x01\x01\x06\\\a\x03n\x03\x03\x06\x03\n6\x01\x02\x0f\x06"}, + {"internal/trace/internal/testgen", "\x03c\nl\x03\x02\x03\x011\v\x0f"}, + {"internal/trace/internal/tracev1", "\x03\x01b\a\x03t\x06\r6\x01"}, + {"internal/trace/raw", "\x02d\nq\x03\x06E\x01\x11"}, + {"internal/trace/testtrace", "\x02\x01j\x03l\x03\x06\x057\f\x02\x01"}, + {"internal/trace/tracev2", ""}, + {"internal/trace/traceviewer", "\x02]\v\x06\x1a<\x16\a\a\x04\t\n\x15\x01\x05\a\f\x01\x02\r"}, {"internal/trace/traceviewer/format", ""}, - {"internal/trace/version", "pm\x01\r"}, + {"internal/trace/version", "pq\t"}, {"internal/txtar", "\x03m\xa6\x01\x1a"}, {"internal/types/errors", "\xaf\x02"}, {"internal/unsafeheader", "\xc5\x02"}, - {"internal/xcoff", "Y\r\a\x03`\x1b+\x19\x01"}, - {"internal/zstd", "f\a\x03{\x0f"}, + {"internal/xcoff", "Y\r\a\x03`\x1a,\x19\x01"}, + {"internal/zstd", "f\a\x03z\x0f"}, {"io", "m\xc5\x01"}, - {"io/fs", "m+*)0\x12\x12\x04"}, - {"io/ioutil", "\xeb\x01\x01*\x17\x03"}, + {"io/fs", "m+*(1\x12\x12\x04"}, + {"io/ioutil", "\xea\x01\x01+\x17\x03"}, {"iter", "\xc8\x01[\""}, - {"log", "p{\x05&\r\x0f\x01\f"}, + {"log", "pz\x05'\r\x0f\x01\f"}, {"log/internal", ""}, - {"log/slog", "\x03\nT\t\x03\x03{\x04\x01\x02\x02\x04&\x05\n\x02\x01\x02\x01\f\x02\x02\x02"}, + {"log/slog", "\x03\nT\t\x03\x03z\x04\x01\x02\x02\x04'\x05\n\x02\x01\x02\x01\f\x02\x02\x02"}, {"log/slog/internal", ""}, - {"log/slog/internal/benchmarks", "\r`\x03{\x06\x03;\x10"}, + {"log/slog/internal/benchmarks", "\r`\x03z\x06\x03<\x10"}, {"log/slog/internal/buffer", "\xb2\x02"}, - {"log/slog/internal/slogtest", "\xf1\x01"}, - {"log/syslog", "m\x03\x7f\x12\x15\x1a\x02\r"}, - {"maps", "\xee\x01W"}, - {"math", "\xad\x01MK"}, - {"math/big", "\x03j\x03)\x14>\r\x02\x023\x01\x02\x13"}, + {"log/slog/internal/slogtest", "\xf0\x01"}, + {"log/syslog", "m\x03~\x12\x16\x1a\x02\r"}, + {"maps", "\xed\x01X"}, + {"math", "\xad\x01LL"}, + {"math/big", "\x03j\x03)\x14=\r\x02\x024\x01\x02\x13"}, {"math/bits", "\xc5\x02"}, - {"math/cmplx", "\xf8\x01\x02"}, - {"math/rand", "\xb5\x01C:\x01\x12"}, - {"math/rand/v2", "m,\x02]\x02K"}, - {"mime", "\x02\x01b\b\x03{\f\x1f\x17\x03\x02\x0f\x02"}, - {"mime/multipart", "\x02\x01G#\x03E6\f\x01\x06\x02\x14\x02\x06\x11\x02\x01\x15"}, - {"mime/quotedprintable", "\x02\x01m{"}, - {"net", "\x04\t`+\x1d\a\x04\x05\f\x01\x04\x15\x01$\x06\r\n\x05\x01\x01\v\x06\a"}, - {"net/http", "\x02\x01\x04\x04\x02=\b\x13\x01\a\x03E6\x01\x03\b\x01\x02\x02\x02\x01\x02\x06\x02\x01\n\x01\x01\x05\x01\x02\x05\n\x01\x01\x01\x02\x01\f\x02\x02\x02\b\x01\x01\x01"}, - {"net/http/cgi", "\x02P\x1b\x03{\x04\b\n\x01\x12\x01\x01\x01\x04\x01\x05\x02\n\x02\x01\x0f\x0e"}, - {"net/http/cookiejar", "\x04i\x03\x91\x01\x01\b\v\x18\x03\x02\r\x04"}, - {"net/http/fcgi", "\x02\x01\nY\a\x03{\x16\x01\x01\x13\x1a\x02\r"}, - {"net/http/httptest", "\x02\x01\nE\x02\x1b\x01{\x04\x12\x01\t\t\x02\x19\x01\x02\r\x0e"}, - {"net/http/httptrace", "\rEnA\x13\n!"}, - {"net/http/httputil", "\x02\x01\n`\x03{\x04\x0f\x03\x01\x05\x02\x01\n\x01\x1b\x02\r\x0e"}, - {"net/http/internal", "\x02\x01j\x03{"}, + {"math/cmplx", "\xf7\x01\x02"}, + {"math/rand", "\xb5\x01B;\x01\x12"}, + {"math/rand/v2", "m,\x02\\\x02L"}, + {"mime", "\x02\x01b\b\x03z\f \x17\x03\x02\x0f\x02"}, + {"mime/multipart", "\x02\x01G#\x03E5\f\x01\x06\x02\x15\x02\x06\x11\x02\x01\x15"}, + {"mime/quotedprintable", "\x02\x01mz"}, + {"net", "\x04\t`+\x1d\a\x04\x05\f\x01\x04\x14\x01%\x06\r\n\x05\x01\x01\v\x06\a"}, + {"net/http", "\x02\x01\x04\x04\x02=\b\x13\x01\a\x03E5\x01\x03\b\x01\x02\x02\x02\x01\x02\x06\x02\x01\x01\n\x01\x01\x05\x01\x02\x05\n\x01\x01\x01\x02\x01\x01\v\x02\x02\x02\b\x01\x01\x01"}, + {"net/http/cgi", "\x02P\x1b\x03z\x04\b\n\x01\x13\x01\x01\x01\x04\x01\x05\x02\n\x02\x01\x0f\x0e"}, + {"net/http/cookiejar", "\x04i\x03\x90\x01\x01\b\f\x18\x03\x02\r\x04"}, + {"net/http/fcgi", "\x02\x01\nY\a\x03z\x16\x01\x01\x14\x1a\x02\r"}, + {"net/http/httptest", "\x02\x01\nE\x02\x1b\x01z\x04\x12\x01\n\t\x02\x19\x01\x02\r\x0e"}, + {"net/http/httptrace", "\rEn@\x14\n!"}, + {"net/http/httputil", "\x02\x01\n`\x03z\x04\x0f\x03\x01\x05\x02\x01\v\x01\x1b\x02\r\x0e"}, + {"net/http/internal", "\x02\x01j\x03z"}, {"net/http/internal/ascii", "\xb0\x02\x11"}, + {"net/http/internal/httpcommon", "\r`\x03\x96\x01\x0e\x01\x19\x01\x01\x02\x1b\x02"}, {"net/http/internal/testcert", "\xb0\x02"}, - {"net/http/pprof", "\x02\x01\nc\x19,\x11%\x04\x13\x13\x01\r\x06\x03\x01\x02\x01\x0f"}, + {"net/http/pprof", "\x02\x01\nc\x19,\x11$\x04\x13\x14\x01\r\x06\x03\x01\x02\x01\x0f"}, {"net/internal/cgotest", ""}, {"net/internal/socktest", "p\xc2\x01\x02"}, - {"net/mail", "\x02k\x03{\x04\x0f\x03\x13\x1c\x02\r\x04"}, - {"net/netip", "\x04i+\x01#<\x025\x15"}, - {"net/rpc", "\x02f\x05\x03\x10\na\x04\x12\x01\x1c\x0f\x03\x02"}, - {"net/rpc/jsonrpc", "j\x03\x03{\x16\x10!"}, - {"net/smtp", "\x19.\v\x13\b\x03{\x16\x13\x1c"}, - {"net/textproto", "\x02\x01j\x03{\r\t.\x01\x02\x13"}, - {"net/url", "m\x03\x87\x01$\x12\x02\x01\x15"}, - {"os", "m+\x01\x18\x03\b\t\r\x03\x01\x04\x11\x017\n\x05\x01\x01\v\x06"}, - {"os/exec", "\x03\n`H \x01\x15\x01*\x06\a\f\x01\x04\v"}, + {"net/mail", "\x02k\x03z\x04\x0f\x03\x14\x1c\x02\r\x04"}, + {"net/netip", "\x04i+\x01#;\x026\x15"}, + {"net/rpc", "\x02f\x05\x03\x10\n`\x04\x12\x01\x1d\x0f\x03\x02"}, + {"net/rpc/jsonrpc", "j\x03\x03z\x16\x11!"}, + {"net/smtp", "\x19.\v\x13\b\x03z\x16\x14\x1c"}, + {"net/textproto", "\x02\x01j\x03z\r\t/\x01\x02\x13"}, + {"net/url", "m\x03\x86\x01%\x12\x02\x01\x15"}, + {"os", "m+\x01\x18\x03\b\t\r\x03\x01\x04\x10\x018\n\x05\x01\x01\v\x06"}, + {"os/exec", "\x03\n`H \x01\x14\x01+\x06\a\f\x01\x04\v"}, {"os/exec/internal/fdtest", "\xb4\x02"}, {"os/signal", "\r\x89\x02\x17\x05\x02"}, - {"os/user", "\x02\x01j\x03{+\r\f\x01\x02"}, + {"os/user", "\x02\x01j\x03z,\r\f\x01\x02"}, {"path", "m+\xab\x01"}, - {"path/filepath", "m+\x19;*\r\n\x03\x04\x0f"}, + {"path/filepath", "m+\x19:+\r\n\x03\x04\x0f"}, {"plugin", "m"}, - {"reflect", "m'\x04\x1c\b\f\x04\x02\x1a\x06\n+\f\x03\x0f\x02\x02"}, + {"reflect", "m'\x04\x1c\b\f\x04\x02\x19\x10,\f\x03\x0f\x02\x02"}, {"reflect/internal/example1", ""}, {"reflect/internal/example2", ""}, - {"regexp", "\x03\xe8\x017\v\x02\x01\x02\x0f\x02"}, + {"regexp", "\x03\xe7\x018\v\x02\x01\x02\x0f\x02"}, {"regexp/syntax", "\xad\x02\x01\x01\x01\x11\x02"}, - {"runtime", "\x94\x01\x04\x01\x02\f\x06\a\x02\x01\x01\x0f\x03\x01\x01\x01\x01\x01\x03s"}, - {"runtime/coverage", "\x9f\x01L"}, + {"runtime", "\x94\x01\x04\x01\x02\f\x06\a\x02\x01\x01\x0f\x03\x01\x01\x01\x01\x01\x03\x0fd"}, + {"runtime/coverage", "\x9f\x01K"}, {"runtime/debug", "pUQ\r\n\x02\x01\x0f\x06"}, {"runtime/internal/startlinetest", ""}, {"runtime/internal/wasitest", ""}, - {"runtime/metrics", "\xb6\x01B+\""}, - {"runtime/pprof", "\x02\x01\x01\x03\x06Y\a\x03$3$\r\x1e\r\n\x01\x01\x01\x02\x02\b\x03\x06"}, + {"runtime/metrics", "\xb6\x01A,\""}, + {"runtime/pprof", "\x02\x01\x01\x03\x06Y\a\x03$3#\r\x1f\r\n\x01\x01\x01\x02\x02\b\x03\x06"}, {"runtime/race", "\xab\x02"}, {"runtime/race/internal/amd64v1", ""}, - {"runtime/trace", "\rc{8\x0f\x01\x12"}, - {"slices", "\x04\xea\x01\fK"}, - {"sort", "\xc9\x0113"}, - {"strconv", "m+:&\x02I"}, - {"strings", "m'\x04:\x19\x03\f8\x0f\x02\x02"}, + {"runtime/trace", "\rcz9\x0f\x01\x12"}, + {"slices", "\x04\xe9\x01\fL"}, + {"sort", "\xc9\x0104"}, + {"strconv", "m+:%\x02J"}, + {"strings", "m'\x04:\x18\x03\f9\x0f\x02\x02"}, {"structs", ""}, {"sync", "\xc8\x01\vP\x10\x12"}, {"sync/atomic", "\xc5\x02"}, - {"syscall", "m(\x03\x01\x1b\b\x03\x03\x06\aT\x0f\x01\x12"}, - {"testing", "\x03\n`\x02\x01G\x11\x0f\x14\r\x04\x1a\x06\x02\x05\x02\a\x01\x02\x01\x02\x01\f\x02\x02\x02"}, - {"testing/fstest", "m\x03{\x01\v$\x12\x03\b\a"}, - {"testing/internal/testdeps", "\x02\v\xa6\x01'\x11+\x03\x05\x03\b\a\x02\r"}, - {"testing/iotest", "\x03j\x03{\x04"}, - {"testing/quick", "o\x01\x88\x01\x04\"\x12\x0f"}, - {"testing/slogtest", "\r`\x03\x81\x01-\x05\x12\n"}, - {"text/scanner", "\x03m{++\x02"}, - {"text/tabwriter", "p{X"}, - {"text/template", "m\x03B9\x01\v\x1e\x01\x05\x01\x02\x05\r\x02\f\x03\x02"}, + {"syscall", "m(\x03\x01\x1b\b\x03\x03\x06\aT\n\x05\x01\x12"}, + {"testing", "\x03\n`\x02\x01X\x0f\x13\r\x04\x1b\x06\x02\x05\x02\a\x01\x02\x01\x02\x01\f\x02\x02\x02"}, + {"testing/fstest", "m\x03z\x01\v%\x12\x03\b\a"}, + {"testing/internal/testdeps", "\x02\v\xa6\x01'\x10,\x03\x05\x03\b\a\x02\r"}, + {"testing/iotest", "\x03j\x03z\x04"}, + {"testing/quick", "o\x01\x87\x01\x04#\x12\x0f"}, + {"testing/slogtest", "\r`\x03\x80\x01.\x05\x12\n"}, + {"text/scanner", "\x03mz,+\x02"}, + {"text/tabwriter", "pzY"}, + {"text/template", "m\x03B8\x01\v\x1f\x01\x05\x01\x02\x05\r\x02\f\x03\x02"}, {"text/template/parse", "\x03m\xb3\x01\f\x01\x11\x02"}, - {"time", "m+\x1d\x1d()\x0f\x02\x11"}, + {"time", "m+\x1d\x1d'*\x0f\x02\x11"}, {"time/tzdata", "m\xc7\x01\x11"}, {"unicode", ""}, {"unicode/utf16", ""}, {"unicode/utf8", ""}, {"unique", "\x94\x01>\x01P\x0f\x13\x12"}, {"unsafe", ""}, - {"vendor/golang.org/x/crypto/chacha20", "\x10V\a\x8d\x01)'"}, + {"vendor/golang.org/x/crypto/chacha20", "\x10V\a\x8c\x01*'"}, {"vendor/golang.org/x/crypto/chacha20poly1305", "\x10V\a\xd9\x01\x04\x01\a"}, - {"vendor/golang.org/x/crypto/cryptobyte", "c\n\x03\x89\x01%!\n"}, + {"vendor/golang.org/x/crypto/cryptobyte", "c\n\x03\x88\x01&!\n"}, {"vendor/golang.org/x/crypto/cryptobyte/asn1", ""}, {"vendor/golang.org/x/crypto/internal/alias", "\xc5\x02"}, - {"vendor/golang.org/x/crypto/internal/poly1305", "Q\x15\x94\x01"}, + {"vendor/golang.org/x/crypto/internal/poly1305", "Q\x15\x93\x01"}, {"vendor/golang.org/x/net/dns/dnsmessage", "m"}, - {"vendor/golang.org/x/net/http/httpguts", "\x81\x02\x13\x1c\x13\r"}, - {"vendor/golang.org/x/net/http/httpproxy", "m\x03\x91\x01\x0f\x05\x01\x1a\x13\r"}, - {"vendor/golang.org/x/net/http2/hpack", "\x03j\x03{G"}, - {"vendor/golang.org/x/net/idna", "p\x88\x018\x13\x10\x02\x01"}, - {"vendor/golang.org/x/net/nettest", "\x03c\a\x03{\x11\x05\x15\x01\f\f\x01\x02\x02\x01\n"}, + {"vendor/golang.org/x/net/http/httpguts", "\x80\x02\x14\x1c\x13\r"}, + {"vendor/golang.org/x/net/http/httpproxy", "m\x03\x90\x01\x15\x01\x1a\x13\r"}, + {"vendor/golang.org/x/net/http2/hpack", "\x03j\x03zH"}, + {"vendor/golang.org/x/net/idna", "p\x87\x019\x13\x10\x02\x01"}, + {"vendor/golang.org/x/net/nettest", "\x03c\a\x03z\x11\x05\x16\x01\f\f\x01\x02\x02\x01\n"}, {"vendor/golang.org/x/sys/cpu", "\x96\x02\r\f\x01\x15"}, {"vendor/golang.org/x/text/secure/bidirule", "m\xd6\x01\x11\x01"}, - {"vendor/golang.org/x/text/transform", "\x03j~X"}, - {"vendor/golang.org/x/text/unicode/bidi", "\x03\be\x7f?\x15"}, - {"vendor/golang.org/x/text/unicode/norm", "f\n{G\x11\x11"}, + {"vendor/golang.org/x/text/transform", "\x03j}Y"}, + {"vendor/golang.org/x/text/unicode/bidi", "\x03\be~@\x15"}, + {"vendor/golang.org/x/text/unicode/norm", "f\nzH\x11\x11"}, {"weak", "\x94\x01\x8f\x01\""}, } diff --git a/internal/stdlib/generate.go b/internal/stdlib/generate.go index b70ed475eb9..3a6d8559dcb 100644 --- a/internal/stdlib/generate.go +++ b/internal/stdlib/generate.go @@ -37,6 +37,7 @@ import ( func main() { log.SetFlags(log.Lshortfile) // to identify the source of the log messages + dir := apidir() manifest(dir) deps() @@ -45,6 +46,57 @@ func main() { // -- generate std manifest -- func manifest(apidir string) { + // find the signatures + cfg := packages.Config{ + Mode: packages.LoadTypes, + Env: append(os.Environ(), "CGO_ENABLED=0", "GOOS=linux", "GOARCH=amd64"), + } + // find the source. This is not totally reliable: different + // systems may get different versions of unreleased APIs. + // The result depends on the toolchain. + // The x/tools release process regenerates the table + // with the canonical toolchain. + stdpkgs, err := packages.Load(&cfg, "std") + if err != nil { + log.Fatal(err) + } + signatures := make(map[string]map[string]string) // PkgPath->FuncName->signature + // signatures start with func and may contain type parameters + // "func[T comparable](value T) unique.Handle[T]" + for _, pkg := range stdpkgs { + if strings.HasPrefix(pkg.PkgPath, "vendor/") || + strings.HasPrefix(pkg.PkgPath, "internal/") || + strings.Contains(pkg.PkgPath, "/internal/") { + continue + } + for _, name := range pkg.Types.Scope().Names() { + fixer := func(p *types.Package) string { + // fn.Signature() would have produced + // "func(fi io/fs.FileInfo, link string) (*archive/tar.Header, error)"}, + // This produces + // "func FileInfoHeader(fi fs.FileInfo, link string) (*Header, error)"" + // Note that the function name is superfluous, so it is removed below + if p != pkg.Types { + return p.Name() + } + return "" + } + obj := pkg.Types.Scope().Lookup(name) + if fn, ok := obj.(*types.Func); ok { + mp, ok := signatures[pkg.PkgPath] + if !ok { + mp = make(map[string]string) + signatures[pkg.PkgPath] = mp + } + sig := types.ObjectString(fn, fixer) + // remove the space and function name introduced by fixer + sig = strings.Replace(sig, " "+name, "", 1) + mp[name] = sig + } + } + } + + // read the api data pkgs := make(map[string]map[string]symInfo) // package -> symbol -> info symRE := regexp.MustCompile(`^pkg (\S+).*?, (var|func|type|const|method \([^)]*\)) ([\pL\p{Nd}_]+)(.*)`) @@ -101,7 +153,15 @@ func manifest(apidir string) { // as their encoding changes; // deprecations count as updates too. if _, ok := symbols[sym]; !ok { - symbols[sym] = symInfo{kind, minor} + var sig string + if kind == "func" { + sig = signatures[path][sym] + } + symbols[sym] = symInfo{ + kind: kind, + minor: minor, + signature: sig, + } } } } @@ -163,8 +223,8 @@ var PackageSymbols = map[string][]Symbol{ fmt.Fprintf(&buf, "\t%q: {\n", path) for _, name := range sortedKeys(pkg) { info := pkg[name] - fmt.Fprintf(&buf, "\t\t{%q, %s, %d},\n", - name, strings.Title(info.kind), info.minor) + fmt.Fprintf(&buf, "\t\t{%q, %s, %d, %q},\n", + name, strings.Title(info.kind), info.minor, info.signature) } fmt.Fprintln(&buf, "},") } @@ -203,6 +263,8 @@ func apidir() string { type symInfo struct { kind string // e.g. "func" minor int // go1.%d + // for completion snippets + signature string // for Kind == stdlib.Func } // loadSymbols computes the exported symbols in the specified package diff --git a/internal/stdlib/manifest.go b/internal/stdlib/manifest.go index 08838a0eb04..64f0326b644 100644 --- a/internal/stdlib/manifest.go +++ b/internal/stdlib/manifest.go @@ -8,17669 +8,17669 @@ package stdlib var PackageSymbols = map[string][]Symbol{ "archive/tar": { - {"(*Header).FileInfo", Method, 1}, - {"(*Reader).Next", Method, 0}, - {"(*Reader).Read", Method, 0}, - {"(*Writer).AddFS", Method, 22}, - {"(*Writer).Close", Method, 0}, - {"(*Writer).Flush", Method, 0}, - {"(*Writer).Write", Method, 0}, - {"(*Writer).WriteHeader", Method, 0}, - {"(Format).String", Method, 10}, - {"ErrFieldTooLong", Var, 0}, - {"ErrHeader", Var, 0}, - {"ErrInsecurePath", Var, 20}, - {"ErrWriteAfterClose", Var, 0}, - {"ErrWriteTooLong", Var, 0}, - {"FileInfoHeader", Func, 1}, - {"FileInfoNames", Type, 23}, - {"Format", Type, 10}, - {"FormatGNU", Const, 10}, - {"FormatPAX", Const, 10}, - {"FormatUSTAR", Const, 10}, - {"FormatUnknown", Const, 10}, - {"Header", Type, 0}, - {"Header.AccessTime", Field, 0}, - {"Header.ChangeTime", Field, 0}, - {"Header.Devmajor", Field, 0}, - {"Header.Devminor", Field, 0}, - {"Header.Format", Field, 10}, - {"Header.Gid", Field, 0}, - {"Header.Gname", Field, 0}, - {"Header.Linkname", Field, 0}, - {"Header.ModTime", Field, 0}, - {"Header.Mode", Field, 0}, - {"Header.Name", Field, 0}, - {"Header.PAXRecords", Field, 10}, - {"Header.Size", Field, 0}, - {"Header.Typeflag", Field, 0}, - {"Header.Uid", Field, 0}, - {"Header.Uname", Field, 0}, - {"Header.Xattrs", Field, 3}, - {"NewReader", Func, 0}, - {"NewWriter", Func, 0}, - {"Reader", Type, 0}, - {"TypeBlock", Const, 0}, - {"TypeChar", Const, 0}, - {"TypeCont", Const, 0}, - {"TypeDir", Const, 0}, - {"TypeFifo", Const, 0}, - {"TypeGNULongLink", Const, 1}, - {"TypeGNULongName", Const, 1}, - {"TypeGNUSparse", Const, 3}, - {"TypeLink", Const, 0}, - {"TypeReg", Const, 0}, - {"TypeRegA", Const, 0}, - {"TypeSymlink", Const, 0}, - {"TypeXGlobalHeader", Const, 0}, - {"TypeXHeader", Const, 0}, - {"Writer", Type, 0}, + {"(*Header).FileInfo", Method, 1, ""}, + {"(*Reader).Next", Method, 0, ""}, + {"(*Reader).Read", Method, 0, ""}, + {"(*Writer).AddFS", Method, 22, ""}, + {"(*Writer).Close", Method, 0, ""}, + {"(*Writer).Flush", Method, 0, ""}, + {"(*Writer).Write", Method, 0, ""}, + {"(*Writer).WriteHeader", Method, 0, ""}, + {"(Format).String", Method, 10, ""}, + {"ErrFieldTooLong", Var, 0, ""}, + {"ErrHeader", Var, 0, ""}, + {"ErrInsecurePath", Var, 20, ""}, + {"ErrWriteAfterClose", Var, 0, ""}, + {"ErrWriteTooLong", Var, 0, ""}, + {"FileInfoHeader", Func, 1, "func(fi fs.FileInfo, link string) (*Header, error)"}, + {"FileInfoNames", Type, 23, ""}, + {"Format", Type, 10, ""}, + {"FormatGNU", Const, 10, ""}, + {"FormatPAX", Const, 10, ""}, + {"FormatUSTAR", Const, 10, ""}, + {"FormatUnknown", Const, 10, ""}, + {"Header", Type, 0, ""}, + {"Header.AccessTime", Field, 0, ""}, + {"Header.ChangeTime", Field, 0, ""}, + {"Header.Devmajor", Field, 0, ""}, + {"Header.Devminor", Field, 0, ""}, + {"Header.Format", Field, 10, ""}, + {"Header.Gid", Field, 0, ""}, + {"Header.Gname", Field, 0, ""}, + {"Header.Linkname", Field, 0, ""}, + {"Header.ModTime", Field, 0, ""}, + {"Header.Mode", Field, 0, ""}, + {"Header.Name", Field, 0, ""}, + {"Header.PAXRecords", Field, 10, ""}, + {"Header.Size", Field, 0, ""}, + {"Header.Typeflag", Field, 0, ""}, + {"Header.Uid", Field, 0, ""}, + {"Header.Uname", Field, 0, ""}, + {"Header.Xattrs", Field, 3, ""}, + {"NewReader", Func, 0, "func(r io.Reader) *Reader"}, + {"NewWriter", Func, 0, "func(w io.Writer) *Writer"}, + {"Reader", Type, 0, ""}, + {"TypeBlock", Const, 0, ""}, + {"TypeChar", Const, 0, ""}, + {"TypeCont", Const, 0, ""}, + {"TypeDir", Const, 0, ""}, + {"TypeFifo", Const, 0, ""}, + {"TypeGNULongLink", Const, 1, ""}, + {"TypeGNULongName", Const, 1, ""}, + {"TypeGNUSparse", Const, 3, ""}, + {"TypeLink", Const, 0, ""}, + {"TypeReg", Const, 0, ""}, + {"TypeRegA", Const, 0, ""}, + {"TypeSymlink", Const, 0, ""}, + {"TypeXGlobalHeader", Const, 0, ""}, + {"TypeXHeader", Const, 0, ""}, + {"Writer", Type, 0, ""}, }, "archive/zip": { - {"(*File).DataOffset", Method, 2}, - {"(*File).FileInfo", Method, 0}, - {"(*File).ModTime", Method, 0}, - {"(*File).Mode", Method, 0}, - {"(*File).Open", Method, 0}, - {"(*File).OpenRaw", Method, 17}, - {"(*File).SetModTime", Method, 0}, - {"(*File).SetMode", Method, 0}, - {"(*FileHeader).FileInfo", Method, 0}, - {"(*FileHeader).ModTime", Method, 0}, - {"(*FileHeader).Mode", Method, 0}, - {"(*FileHeader).SetModTime", Method, 0}, - {"(*FileHeader).SetMode", Method, 0}, - {"(*ReadCloser).Close", Method, 0}, - {"(*ReadCloser).Open", Method, 16}, - {"(*ReadCloser).RegisterDecompressor", Method, 6}, - {"(*Reader).Open", Method, 16}, - {"(*Reader).RegisterDecompressor", Method, 6}, - {"(*Writer).AddFS", Method, 22}, - {"(*Writer).Close", Method, 0}, - {"(*Writer).Copy", Method, 17}, - {"(*Writer).Create", Method, 0}, - {"(*Writer).CreateHeader", Method, 0}, - {"(*Writer).CreateRaw", Method, 17}, - {"(*Writer).Flush", Method, 4}, - {"(*Writer).RegisterCompressor", Method, 6}, - {"(*Writer).SetComment", Method, 10}, - {"(*Writer).SetOffset", Method, 5}, - {"Compressor", Type, 2}, - {"Decompressor", Type, 2}, - {"Deflate", Const, 0}, - {"ErrAlgorithm", Var, 0}, - {"ErrChecksum", Var, 0}, - {"ErrFormat", Var, 0}, - {"ErrInsecurePath", Var, 20}, - {"File", Type, 0}, - {"File.FileHeader", Field, 0}, - {"FileHeader", Type, 0}, - {"FileHeader.CRC32", Field, 0}, - {"FileHeader.Comment", Field, 0}, - {"FileHeader.CompressedSize", Field, 0}, - {"FileHeader.CompressedSize64", Field, 1}, - {"FileHeader.CreatorVersion", Field, 0}, - {"FileHeader.ExternalAttrs", Field, 0}, - {"FileHeader.Extra", Field, 0}, - {"FileHeader.Flags", Field, 0}, - {"FileHeader.Method", Field, 0}, - {"FileHeader.Modified", Field, 10}, - {"FileHeader.ModifiedDate", Field, 0}, - {"FileHeader.ModifiedTime", Field, 0}, - {"FileHeader.Name", Field, 0}, - {"FileHeader.NonUTF8", Field, 10}, - {"FileHeader.ReaderVersion", Field, 0}, - {"FileHeader.UncompressedSize", Field, 0}, - {"FileHeader.UncompressedSize64", Field, 1}, - {"FileInfoHeader", Func, 0}, - {"NewReader", Func, 0}, - {"NewWriter", Func, 0}, - {"OpenReader", Func, 0}, - {"ReadCloser", Type, 0}, - {"ReadCloser.Reader", Field, 0}, - {"Reader", Type, 0}, - {"Reader.Comment", Field, 0}, - {"Reader.File", Field, 0}, - {"RegisterCompressor", Func, 2}, - {"RegisterDecompressor", Func, 2}, - {"Store", Const, 0}, - {"Writer", Type, 0}, + {"(*File).DataOffset", Method, 2, ""}, + {"(*File).FileInfo", Method, 0, ""}, + {"(*File).ModTime", Method, 0, ""}, + {"(*File).Mode", Method, 0, ""}, + {"(*File).Open", Method, 0, ""}, + {"(*File).OpenRaw", Method, 17, ""}, + {"(*File).SetModTime", Method, 0, ""}, + {"(*File).SetMode", Method, 0, ""}, + {"(*FileHeader).FileInfo", Method, 0, ""}, + {"(*FileHeader).ModTime", Method, 0, ""}, + {"(*FileHeader).Mode", Method, 0, ""}, + {"(*FileHeader).SetModTime", Method, 0, ""}, + {"(*FileHeader).SetMode", Method, 0, ""}, + {"(*ReadCloser).Close", Method, 0, ""}, + {"(*ReadCloser).Open", Method, 16, ""}, + {"(*ReadCloser).RegisterDecompressor", Method, 6, ""}, + {"(*Reader).Open", Method, 16, ""}, + {"(*Reader).RegisterDecompressor", Method, 6, ""}, + {"(*Writer).AddFS", Method, 22, ""}, + {"(*Writer).Close", Method, 0, ""}, + {"(*Writer).Copy", Method, 17, ""}, + {"(*Writer).Create", Method, 0, ""}, + {"(*Writer).CreateHeader", Method, 0, ""}, + {"(*Writer).CreateRaw", Method, 17, ""}, + {"(*Writer).Flush", Method, 4, ""}, + {"(*Writer).RegisterCompressor", Method, 6, ""}, + {"(*Writer).SetComment", Method, 10, ""}, + {"(*Writer).SetOffset", Method, 5, ""}, + {"Compressor", Type, 2, ""}, + {"Decompressor", Type, 2, ""}, + {"Deflate", Const, 0, ""}, + {"ErrAlgorithm", Var, 0, ""}, + {"ErrChecksum", Var, 0, ""}, + {"ErrFormat", Var, 0, ""}, + {"ErrInsecurePath", Var, 20, ""}, + {"File", Type, 0, ""}, + {"File.FileHeader", Field, 0, ""}, + {"FileHeader", Type, 0, ""}, + {"FileHeader.CRC32", Field, 0, ""}, + {"FileHeader.Comment", Field, 0, ""}, + {"FileHeader.CompressedSize", Field, 0, ""}, + {"FileHeader.CompressedSize64", Field, 1, ""}, + {"FileHeader.CreatorVersion", Field, 0, ""}, + {"FileHeader.ExternalAttrs", Field, 0, ""}, + {"FileHeader.Extra", Field, 0, ""}, + {"FileHeader.Flags", Field, 0, ""}, + {"FileHeader.Method", Field, 0, ""}, + {"FileHeader.Modified", Field, 10, ""}, + {"FileHeader.ModifiedDate", Field, 0, ""}, + {"FileHeader.ModifiedTime", Field, 0, ""}, + {"FileHeader.Name", Field, 0, ""}, + {"FileHeader.NonUTF8", Field, 10, ""}, + {"FileHeader.ReaderVersion", Field, 0, ""}, + {"FileHeader.UncompressedSize", Field, 0, ""}, + {"FileHeader.UncompressedSize64", Field, 1, ""}, + {"FileInfoHeader", Func, 0, "func(fi fs.FileInfo) (*FileHeader, error)"}, + {"NewReader", Func, 0, "func(r io.ReaderAt, size int64) (*Reader, error)"}, + {"NewWriter", Func, 0, "func(w io.Writer) *Writer"}, + {"OpenReader", Func, 0, "func(name string) (*ReadCloser, error)"}, + {"ReadCloser", Type, 0, ""}, + {"ReadCloser.Reader", Field, 0, ""}, + {"Reader", Type, 0, ""}, + {"Reader.Comment", Field, 0, ""}, + {"Reader.File", Field, 0, ""}, + {"RegisterCompressor", Func, 2, "func(method uint16, comp Compressor)"}, + {"RegisterDecompressor", Func, 2, "func(method uint16, dcomp Decompressor)"}, + {"Store", Const, 0, ""}, + {"Writer", Type, 0, ""}, }, "bufio": { - {"(*Reader).Buffered", Method, 0}, - {"(*Reader).Discard", Method, 5}, - {"(*Reader).Peek", Method, 0}, - {"(*Reader).Read", Method, 0}, - {"(*Reader).ReadByte", Method, 0}, - {"(*Reader).ReadBytes", Method, 0}, - {"(*Reader).ReadLine", Method, 0}, - {"(*Reader).ReadRune", Method, 0}, - {"(*Reader).ReadSlice", Method, 0}, - {"(*Reader).ReadString", Method, 0}, - {"(*Reader).Reset", Method, 2}, - {"(*Reader).Size", Method, 10}, - {"(*Reader).UnreadByte", Method, 0}, - {"(*Reader).UnreadRune", Method, 0}, - {"(*Reader).WriteTo", Method, 1}, - {"(*Scanner).Buffer", Method, 6}, - {"(*Scanner).Bytes", Method, 1}, - {"(*Scanner).Err", Method, 1}, - {"(*Scanner).Scan", Method, 1}, - {"(*Scanner).Split", Method, 1}, - {"(*Scanner).Text", Method, 1}, - {"(*Writer).Available", Method, 0}, - {"(*Writer).AvailableBuffer", Method, 18}, - {"(*Writer).Buffered", Method, 0}, - {"(*Writer).Flush", Method, 0}, - {"(*Writer).ReadFrom", Method, 1}, - {"(*Writer).Reset", Method, 2}, - {"(*Writer).Size", Method, 10}, - {"(*Writer).Write", Method, 0}, - {"(*Writer).WriteByte", Method, 0}, - {"(*Writer).WriteRune", Method, 0}, - {"(*Writer).WriteString", Method, 0}, - {"(ReadWriter).Available", Method, 0}, - {"(ReadWriter).AvailableBuffer", Method, 18}, - {"(ReadWriter).Discard", Method, 5}, - {"(ReadWriter).Flush", Method, 0}, - {"(ReadWriter).Peek", Method, 0}, - {"(ReadWriter).Read", Method, 0}, - {"(ReadWriter).ReadByte", Method, 0}, - {"(ReadWriter).ReadBytes", Method, 0}, - {"(ReadWriter).ReadFrom", Method, 1}, - {"(ReadWriter).ReadLine", Method, 0}, - {"(ReadWriter).ReadRune", Method, 0}, - {"(ReadWriter).ReadSlice", Method, 0}, - {"(ReadWriter).ReadString", Method, 0}, - {"(ReadWriter).UnreadByte", Method, 0}, - {"(ReadWriter).UnreadRune", Method, 0}, - {"(ReadWriter).Write", Method, 0}, - {"(ReadWriter).WriteByte", Method, 0}, - {"(ReadWriter).WriteRune", Method, 0}, - {"(ReadWriter).WriteString", Method, 0}, - {"(ReadWriter).WriteTo", Method, 1}, - {"ErrAdvanceTooFar", Var, 1}, - {"ErrBadReadCount", Var, 15}, - {"ErrBufferFull", Var, 0}, - {"ErrFinalToken", Var, 6}, - {"ErrInvalidUnreadByte", Var, 0}, - {"ErrInvalidUnreadRune", Var, 0}, - {"ErrNegativeAdvance", Var, 1}, - {"ErrNegativeCount", Var, 0}, - {"ErrTooLong", Var, 1}, - {"MaxScanTokenSize", Const, 1}, - {"NewReadWriter", Func, 0}, - {"NewReader", Func, 0}, - {"NewReaderSize", Func, 0}, - {"NewScanner", Func, 1}, - {"NewWriter", Func, 0}, - {"NewWriterSize", Func, 0}, - {"ReadWriter", Type, 0}, - {"ReadWriter.Reader", Field, 0}, - {"ReadWriter.Writer", Field, 0}, - {"Reader", Type, 0}, - {"ScanBytes", Func, 1}, - {"ScanLines", Func, 1}, - {"ScanRunes", Func, 1}, - {"ScanWords", Func, 1}, - {"Scanner", Type, 1}, - {"SplitFunc", Type, 1}, - {"Writer", Type, 0}, + {"(*Reader).Buffered", Method, 0, ""}, + {"(*Reader).Discard", Method, 5, ""}, + {"(*Reader).Peek", Method, 0, ""}, + {"(*Reader).Read", Method, 0, ""}, + {"(*Reader).ReadByte", Method, 0, ""}, + {"(*Reader).ReadBytes", Method, 0, ""}, + {"(*Reader).ReadLine", Method, 0, ""}, + {"(*Reader).ReadRune", Method, 0, ""}, + {"(*Reader).ReadSlice", Method, 0, ""}, + {"(*Reader).ReadString", Method, 0, ""}, + {"(*Reader).Reset", Method, 2, ""}, + {"(*Reader).Size", Method, 10, ""}, + {"(*Reader).UnreadByte", Method, 0, ""}, + {"(*Reader).UnreadRune", Method, 0, ""}, + {"(*Reader).WriteTo", Method, 1, ""}, + {"(*Scanner).Buffer", Method, 6, ""}, + {"(*Scanner).Bytes", Method, 1, ""}, + {"(*Scanner).Err", Method, 1, ""}, + {"(*Scanner).Scan", Method, 1, ""}, + {"(*Scanner).Split", Method, 1, ""}, + {"(*Scanner).Text", Method, 1, ""}, + {"(*Writer).Available", Method, 0, ""}, + {"(*Writer).AvailableBuffer", Method, 18, ""}, + {"(*Writer).Buffered", Method, 0, ""}, + {"(*Writer).Flush", Method, 0, ""}, + {"(*Writer).ReadFrom", Method, 1, ""}, + {"(*Writer).Reset", Method, 2, ""}, + {"(*Writer).Size", Method, 10, ""}, + {"(*Writer).Write", Method, 0, ""}, + {"(*Writer).WriteByte", Method, 0, ""}, + {"(*Writer).WriteRune", Method, 0, ""}, + {"(*Writer).WriteString", Method, 0, ""}, + {"(ReadWriter).Available", Method, 0, ""}, + {"(ReadWriter).AvailableBuffer", Method, 18, ""}, + {"(ReadWriter).Discard", Method, 5, ""}, + {"(ReadWriter).Flush", Method, 0, ""}, + {"(ReadWriter).Peek", Method, 0, ""}, + {"(ReadWriter).Read", Method, 0, ""}, + {"(ReadWriter).ReadByte", Method, 0, ""}, + {"(ReadWriter).ReadBytes", Method, 0, ""}, + {"(ReadWriter).ReadFrom", Method, 1, ""}, + {"(ReadWriter).ReadLine", Method, 0, ""}, + {"(ReadWriter).ReadRune", Method, 0, ""}, + {"(ReadWriter).ReadSlice", Method, 0, ""}, + {"(ReadWriter).ReadString", Method, 0, ""}, + {"(ReadWriter).UnreadByte", Method, 0, ""}, + {"(ReadWriter).UnreadRune", Method, 0, ""}, + {"(ReadWriter).Write", Method, 0, ""}, + {"(ReadWriter).WriteByte", Method, 0, ""}, + {"(ReadWriter).WriteRune", Method, 0, ""}, + {"(ReadWriter).WriteString", Method, 0, ""}, + {"(ReadWriter).WriteTo", Method, 1, ""}, + {"ErrAdvanceTooFar", Var, 1, ""}, + {"ErrBadReadCount", Var, 15, ""}, + {"ErrBufferFull", Var, 0, ""}, + {"ErrFinalToken", Var, 6, ""}, + {"ErrInvalidUnreadByte", Var, 0, ""}, + {"ErrInvalidUnreadRune", Var, 0, ""}, + {"ErrNegativeAdvance", Var, 1, ""}, + {"ErrNegativeCount", Var, 0, ""}, + {"ErrTooLong", Var, 1, ""}, + {"MaxScanTokenSize", Const, 1, ""}, + {"NewReadWriter", Func, 0, "func(r *Reader, w *Writer) *ReadWriter"}, + {"NewReader", Func, 0, "func(rd io.Reader) *Reader"}, + {"NewReaderSize", Func, 0, "func(rd io.Reader, size int) *Reader"}, + {"NewScanner", Func, 1, "func(r io.Reader) *Scanner"}, + {"NewWriter", Func, 0, "func(w io.Writer) *Writer"}, + {"NewWriterSize", Func, 0, "func(w io.Writer, size int) *Writer"}, + {"ReadWriter", Type, 0, ""}, + {"ReadWriter.Reader", Field, 0, ""}, + {"ReadWriter.Writer", Field, 0, ""}, + {"Reader", Type, 0, ""}, + {"ScanBytes", Func, 1, "func(data []byte, atEOF bool) (advance int, token []byte, err error)"}, + {"ScanLines", Func, 1, "func(data []byte, atEOF bool) (advance int, token []byte, err error)"}, + {"ScanRunes", Func, 1, "func(data []byte, atEOF bool) (advance int, token []byte, err error)"}, + {"ScanWords", Func, 1, "func(data []byte, atEOF bool) (advance int, token []byte, err error)"}, + {"Scanner", Type, 1, ""}, + {"SplitFunc", Type, 1, ""}, + {"Writer", Type, 0, ""}, }, "bytes": { - {"(*Buffer).Available", Method, 21}, - {"(*Buffer).AvailableBuffer", Method, 21}, - {"(*Buffer).Bytes", Method, 0}, - {"(*Buffer).Cap", Method, 5}, - {"(*Buffer).Grow", Method, 1}, - {"(*Buffer).Len", Method, 0}, - {"(*Buffer).Next", Method, 0}, - {"(*Buffer).Read", Method, 0}, - {"(*Buffer).ReadByte", Method, 0}, - {"(*Buffer).ReadBytes", Method, 0}, - {"(*Buffer).ReadFrom", Method, 0}, - {"(*Buffer).ReadRune", Method, 0}, - {"(*Buffer).ReadString", Method, 0}, - {"(*Buffer).Reset", Method, 0}, - {"(*Buffer).String", Method, 0}, - {"(*Buffer).Truncate", Method, 0}, - {"(*Buffer).UnreadByte", Method, 0}, - {"(*Buffer).UnreadRune", Method, 0}, - {"(*Buffer).Write", Method, 0}, - {"(*Buffer).WriteByte", Method, 0}, - {"(*Buffer).WriteRune", Method, 0}, - {"(*Buffer).WriteString", Method, 0}, - {"(*Buffer).WriteTo", Method, 0}, - {"(*Reader).Len", Method, 0}, - {"(*Reader).Read", Method, 0}, - {"(*Reader).ReadAt", Method, 0}, - {"(*Reader).ReadByte", Method, 0}, - {"(*Reader).ReadRune", Method, 0}, - {"(*Reader).Reset", Method, 7}, - {"(*Reader).Seek", Method, 0}, - {"(*Reader).Size", Method, 5}, - {"(*Reader).UnreadByte", Method, 0}, - {"(*Reader).UnreadRune", Method, 0}, - {"(*Reader).WriteTo", Method, 1}, - {"Buffer", Type, 0}, - {"Clone", Func, 20}, - {"Compare", Func, 0}, - {"Contains", Func, 0}, - {"ContainsAny", Func, 7}, - {"ContainsFunc", Func, 21}, - {"ContainsRune", Func, 7}, - {"Count", Func, 0}, - {"Cut", Func, 18}, - {"CutPrefix", Func, 20}, - {"CutSuffix", Func, 20}, - {"Equal", Func, 0}, - {"EqualFold", Func, 0}, - {"ErrTooLarge", Var, 0}, - {"Fields", Func, 0}, - {"FieldsFunc", Func, 0}, - {"FieldsFuncSeq", Func, 24}, - {"FieldsSeq", Func, 24}, - {"HasPrefix", Func, 0}, - {"HasSuffix", Func, 0}, - {"Index", Func, 0}, - {"IndexAny", Func, 0}, - {"IndexByte", Func, 0}, - {"IndexFunc", Func, 0}, - {"IndexRune", Func, 0}, - {"Join", Func, 0}, - {"LastIndex", Func, 0}, - {"LastIndexAny", Func, 0}, - {"LastIndexByte", Func, 5}, - {"LastIndexFunc", Func, 0}, - {"Lines", Func, 24}, - {"Map", Func, 0}, - {"MinRead", Const, 0}, - {"NewBuffer", Func, 0}, - {"NewBufferString", Func, 0}, - {"NewReader", Func, 0}, - {"Reader", Type, 0}, - {"Repeat", Func, 0}, - {"Replace", Func, 0}, - {"ReplaceAll", Func, 12}, - {"Runes", Func, 0}, - {"Split", Func, 0}, - {"SplitAfter", Func, 0}, - {"SplitAfterN", Func, 0}, - {"SplitAfterSeq", Func, 24}, - {"SplitN", Func, 0}, - {"SplitSeq", Func, 24}, - {"Title", Func, 0}, - {"ToLower", Func, 0}, - {"ToLowerSpecial", Func, 0}, - {"ToTitle", Func, 0}, - {"ToTitleSpecial", Func, 0}, - {"ToUpper", Func, 0}, - {"ToUpperSpecial", Func, 0}, - {"ToValidUTF8", Func, 13}, - {"Trim", Func, 0}, - {"TrimFunc", Func, 0}, - {"TrimLeft", Func, 0}, - {"TrimLeftFunc", Func, 0}, - {"TrimPrefix", Func, 1}, - {"TrimRight", Func, 0}, - {"TrimRightFunc", Func, 0}, - {"TrimSpace", Func, 0}, - {"TrimSuffix", Func, 1}, + {"(*Buffer).Available", Method, 21, ""}, + {"(*Buffer).AvailableBuffer", Method, 21, ""}, + {"(*Buffer).Bytes", Method, 0, ""}, + {"(*Buffer).Cap", Method, 5, ""}, + {"(*Buffer).Grow", Method, 1, ""}, + {"(*Buffer).Len", Method, 0, ""}, + {"(*Buffer).Next", Method, 0, ""}, + {"(*Buffer).Read", Method, 0, ""}, + {"(*Buffer).ReadByte", Method, 0, ""}, + {"(*Buffer).ReadBytes", Method, 0, ""}, + {"(*Buffer).ReadFrom", Method, 0, ""}, + {"(*Buffer).ReadRune", Method, 0, ""}, + {"(*Buffer).ReadString", Method, 0, ""}, + {"(*Buffer).Reset", Method, 0, ""}, + {"(*Buffer).String", Method, 0, ""}, + {"(*Buffer).Truncate", Method, 0, ""}, + {"(*Buffer).UnreadByte", Method, 0, ""}, + {"(*Buffer).UnreadRune", Method, 0, ""}, + {"(*Buffer).Write", Method, 0, ""}, + {"(*Buffer).WriteByte", Method, 0, ""}, + {"(*Buffer).WriteRune", Method, 0, ""}, + {"(*Buffer).WriteString", Method, 0, ""}, + {"(*Buffer).WriteTo", Method, 0, ""}, + {"(*Reader).Len", Method, 0, ""}, + {"(*Reader).Read", Method, 0, ""}, + {"(*Reader).ReadAt", Method, 0, ""}, + {"(*Reader).ReadByte", Method, 0, ""}, + {"(*Reader).ReadRune", Method, 0, ""}, + {"(*Reader).Reset", Method, 7, ""}, + {"(*Reader).Seek", Method, 0, ""}, + {"(*Reader).Size", Method, 5, ""}, + {"(*Reader).UnreadByte", Method, 0, ""}, + {"(*Reader).UnreadRune", Method, 0, ""}, + {"(*Reader).WriteTo", Method, 1, ""}, + {"Buffer", Type, 0, ""}, + {"Clone", Func, 20, "func(b []byte) []byte"}, + {"Compare", Func, 0, "func(a []byte, b []byte) int"}, + {"Contains", Func, 0, "func(b []byte, subslice []byte) bool"}, + {"ContainsAny", Func, 7, "func(b []byte, chars string) bool"}, + {"ContainsFunc", Func, 21, "func(b []byte, f func(rune) bool) bool"}, + {"ContainsRune", Func, 7, "func(b []byte, r rune) bool"}, + {"Count", Func, 0, "func(s []byte, sep []byte) int"}, + {"Cut", Func, 18, "func(s []byte, sep []byte) (before []byte, after []byte, found bool)"}, + {"CutPrefix", Func, 20, "func(s []byte, prefix []byte) (after []byte, found bool)"}, + {"CutSuffix", Func, 20, "func(s []byte, suffix []byte) (before []byte, found bool)"}, + {"Equal", Func, 0, "func(a []byte, b []byte) bool"}, + {"EqualFold", Func, 0, "func(s []byte, t []byte) bool"}, + {"ErrTooLarge", Var, 0, ""}, + {"Fields", Func, 0, "func(s []byte) [][]byte"}, + {"FieldsFunc", Func, 0, "func(s []byte, f func(rune) bool) [][]byte"}, + {"FieldsFuncSeq", Func, 24, "func(s []byte, f func(rune) bool) iter.Seq[[]byte]"}, + {"FieldsSeq", Func, 24, "func(s []byte) iter.Seq[[]byte]"}, + {"HasPrefix", Func, 0, "func(s []byte, prefix []byte) bool"}, + {"HasSuffix", Func, 0, "func(s []byte, suffix []byte) bool"}, + {"Index", Func, 0, "func(s []byte, sep []byte) int"}, + {"IndexAny", Func, 0, "func(s []byte, chars string) int"}, + {"IndexByte", Func, 0, "func(b []byte, c byte) int"}, + {"IndexFunc", Func, 0, "func(s []byte, f func(r rune) bool) int"}, + {"IndexRune", Func, 0, "func(s []byte, r rune) int"}, + {"Join", Func, 0, "func(s [][]byte, sep []byte) []byte"}, + {"LastIndex", Func, 0, "func(s []byte, sep []byte) int"}, + {"LastIndexAny", Func, 0, "func(s []byte, chars string) int"}, + {"LastIndexByte", Func, 5, "func(s []byte, c byte) int"}, + {"LastIndexFunc", Func, 0, "func(s []byte, f func(r rune) bool) int"}, + {"Lines", Func, 24, "func(s []byte) iter.Seq[[]byte]"}, + {"Map", Func, 0, "func(mapping func(r rune) rune, s []byte) []byte"}, + {"MinRead", Const, 0, ""}, + {"NewBuffer", Func, 0, "func(buf []byte) *Buffer"}, + {"NewBufferString", Func, 0, "func(s string) *Buffer"}, + {"NewReader", Func, 0, "func(b []byte) *Reader"}, + {"Reader", Type, 0, ""}, + {"Repeat", Func, 0, "func(b []byte, count int) []byte"}, + {"Replace", Func, 0, "func(s []byte, old []byte, new []byte, n int) []byte"}, + {"ReplaceAll", Func, 12, "func(s []byte, old []byte, new []byte) []byte"}, + {"Runes", Func, 0, "func(s []byte) []rune"}, + {"Split", Func, 0, "func(s []byte, sep []byte) [][]byte"}, + {"SplitAfter", Func, 0, "func(s []byte, sep []byte) [][]byte"}, + {"SplitAfterN", Func, 0, "func(s []byte, sep []byte, n int) [][]byte"}, + {"SplitAfterSeq", Func, 24, "func(s []byte, sep []byte) iter.Seq[[]byte]"}, + {"SplitN", Func, 0, "func(s []byte, sep []byte, n int) [][]byte"}, + {"SplitSeq", Func, 24, "func(s []byte, sep []byte) iter.Seq[[]byte]"}, + {"Title", Func, 0, "func(s []byte) []byte"}, + {"ToLower", Func, 0, "func(s []byte) []byte"}, + {"ToLowerSpecial", Func, 0, "func(c unicode.SpecialCase, s []byte) []byte"}, + {"ToTitle", Func, 0, "func(s []byte) []byte"}, + {"ToTitleSpecial", Func, 0, "func(c unicode.SpecialCase, s []byte) []byte"}, + {"ToUpper", Func, 0, "func(s []byte) []byte"}, + {"ToUpperSpecial", Func, 0, "func(c unicode.SpecialCase, s []byte) []byte"}, + {"ToValidUTF8", Func, 13, "func(s []byte, replacement []byte) []byte"}, + {"Trim", Func, 0, "func(s []byte, cutset string) []byte"}, + {"TrimFunc", Func, 0, "func(s []byte, f func(r rune) bool) []byte"}, + {"TrimLeft", Func, 0, "func(s []byte, cutset string) []byte"}, + {"TrimLeftFunc", Func, 0, "func(s []byte, f func(r rune) bool) []byte"}, + {"TrimPrefix", Func, 1, "func(s []byte, prefix []byte) []byte"}, + {"TrimRight", Func, 0, "func(s []byte, cutset string) []byte"}, + {"TrimRightFunc", Func, 0, "func(s []byte, f func(r rune) bool) []byte"}, + {"TrimSpace", Func, 0, "func(s []byte) []byte"}, + {"TrimSuffix", Func, 1, "func(s []byte, suffix []byte) []byte"}, }, "cmp": { - {"Compare", Func, 21}, - {"Less", Func, 21}, - {"Or", Func, 22}, - {"Ordered", Type, 21}, + {"Compare", Func, 21, "func[T Ordered](x T, y T) int"}, + {"Less", Func, 21, "func[T Ordered](x T, y T) bool"}, + {"Or", Func, 22, "func[T comparable](vals ...T) T"}, + {"Ordered", Type, 21, ""}, }, "compress/bzip2": { - {"(StructuralError).Error", Method, 0}, - {"NewReader", Func, 0}, - {"StructuralError", Type, 0}, + {"(StructuralError).Error", Method, 0, ""}, + {"NewReader", Func, 0, "func(r io.Reader) io.Reader"}, + {"StructuralError", Type, 0, ""}, }, "compress/flate": { - {"(*ReadError).Error", Method, 0}, - {"(*WriteError).Error", Method, 0}, - {"(*Writer).Close", Method, 0}, - {"(*Writer).Flush", Method, 0}, - {"(*Writer).Reset", Method, 2}, - {"(*Writer).Write", Method, 0}, - {"(CorruptInputError).Error", Method, 0}, - {"(InternalError).Error", Method, 0}, - {"BestCompression", Const, 0}, - {"BestSpeed", Const, 0}, - {"CorruptInputError", Type, 0}, - {"DefaultCompression", Const, 0}, - {"HuffmanOnly", Const, 7}, - {"InternalError", Type, 0}, - {"NewReader", Func, 0}, - {"NewReaderDict", Func, 0}, - {"NewWriter", Func, 0}, - {"NewWriterDict", Func, 0}, - {"NoCompression", Const, 0}, - {"ReadError", Type, 0}, - {"ReadError.Err", Field, 0}, - {"ReadError.Offset", Field, 0}, - {"Reader", Type, 0}, - {"Resetter", Type, 4}, - {"WriteError", Type, 0}, - {"WriteError.Err", Field, 0}, - {"WriteError.Offset", Field, 0}, - {"Writer", Type, 0}, + {"(*ReadError).Error", Method, 0, ""}, + {"(*WriteError).Error", Method, 0, ""}, + {"(*Writer).Close", Method, 0, ""}, + {"(*Writer).Flush", Method, 0, ""}, + {"(*Writer).Reset", Method, 2, ""}, + {"(*Writer).Write", Method, 0, ""}, + {"(CorruptInputError).Error", Method, 0, ""}, + {"(InternalError).Error", Method, 0, ""}, + {"BestCompression", Const, 0, ""}, + {"BestSpeed", Const, 0, ""}, + {"CorruptInputError", Type, 0, ""}, + {"DefaultCompression", Const, 0, ""}, + {"HuffmanOnly", Const, 7, ""}, + {"InternalError", Type, 0, ""}, + {"NewReader", Func, 0, "func(r io.Reader) io.ReadCloser"}, + {"NewReaderDict", Func, 0, "func(r io.Reader, dict []byte) io.ReadCloser"}, + {"NewWriter", Func, 0, "func(w io.Writer, level int) (*Writer, error)"}, + {"NewWriterDict", Func, 0, "func(w io.Writer, level int, dict []byte) (*Writer, error)"}, + {"NoCompression", Const, 0, ""}, + {"ReadError", Type, 0, ""}, + {"ReadError.Err", Field, 0, ""}, + {"ReadError.Offset", Field, 0, ""}, + {"Reader", Type, 0, ""}, + {"Resetter", Type, 4, ""}, + {"WriteError", Type, 0, ""}, + {"WriteError.Err", Field, 0, ""}, + {"WriteError.Offset", Field, 0, ""}, + {"Writer", Type, 0, ""}, }, "compress/gzip": { - {"(*Reader).Close", Method, 0}, - {"(*Reader).Multistream", Method, 4}, - {"(*Reader).Read", Method, 0}, - {"(*Reader).Reset", Method, 3}, - {"(*Writer).Close", Method, 0}, - {"(*Writer).Flush", Method, 1}, - {"(*Writer).Reset", Method, 2}, - {"(*Writer).Write", Method, 0}, - {"BestCompression", Const, 0}, - {"BestSpeed", Const, 0}, - {"DefaultCompression", Const, 0}, - {"ErrChecksum", Var, 0}, - {"ErrHeader", Var, 0}, - {"Header", Type, 0}, - {"Header.Comment", Field, 0}, - {"Header.Extra", Field, 0}, - {"Header.ModTime", Field, 0}, - {"Header.Name", Field, 0}, - {"Header.OS", Field, 0}, - {"HuffmanOnly", Const, 8}, - {"NewReader", Func, 0}, - {"NewWriter", Func, 0}, - {"NewWriterLevel", Func, 0}, - {"NoCompression", Const, 0}, - {"Reader", Type, 0}, - {"Reader.Header", Field, 0}, - {"Writer", Type, 0}, - {"Writer.Header", Field, 0}, + {"(*Reader).Close", Method, 0, ""}, + {"(*Reader).Multistream", Method, 4, ""}, + {"(*Reader).Read", Method, 0, ""}, + {"(*Reader).Reset", Method, 3, ""}, + {"(*Writer).Close", Method, 0, ""}, + {"(*Writer).Flush", Method, 1, ""}, + {"(*Writer).Reset", Method, 2, ""}, + {"(*Writer).Write", Method, 0, ""}, + {"BestCompression", Const, 0, ""}, + {"BestSpeed", Const, 0, ""}, + {"DefaultCompression", Const, 0, ""}, + {"ErrChecksum", Var, 0, ""}, + {"ErrHeader", Var, 0, ""}, + {"Header", Type, 0, ""}, + {"Header.Comment", Field, 0, ""}, + {"Header.Extra", Field, 0, ""}, + {"Header.ModTime", Field, 0, ""}, + {"Header.Name", Field, 0, ""}, + {"Header.OS", Field, 0, ""}, + {"HuffmanOnly", Const, 8, ""}, + {"NewReader", Func, 0, "func(r io.Reader) (*Reader, error)"}, + {"NewWriter", Func, 0, "func(w io.Writer) *Writer"}, + {"NewWriterLevel", Func, 0, "func(w io.Writer, level int) (*Writer, error)"}, + {"NoCompression", Const, 0, ""}, + {"Reader", Type, 0, ""}, + {"Reader.Header", Field, 0, ""}, + {"Writer", Type, 0, ""}, + {"Writer.Header", Field, 0, ""}, }, "compress/lzw": { - {"(*Reader).Close", Method, 17}, - {"(*Reader).Read", Method, 17}, - {"(*Reader).Reset", Method, 17}, - {"(*Writer).Close", Method, 17}, - {"(*Writer).Reset", Method, 17}, - {"(*Writer).Write", Method, 17}, - {"LSB", Const, 0}, - {"MSB", Const, 0}, - {"NewReader", Func, 0}, - {"NewWriter", Func, 0}, - {"Order", Type, 0}, - {"Reader", Type, 17}, - {"Writer", Type, 17}, + {"(*Reader).Close", Method, 17, ""}, + {"(*Reader).Read", Method, 17, ""}, + {"(*Reader).Reset", Method, 17, ""}, + {"(*Writer).Close", Method, 17, ""}, + {"(*Writer).Reset", Method, 17, ""}, + {"(*Writer).Write", Method, 17, ""}, + {"LSB", Const, 0, ""}, + {"MSB", Const, 0, ""}, + {"NewReader", Func, 0, "func(r io.Reader, order Order, litWidth int) io.ReadCloser"}, + {"NewWriter", Func, 0, "func(w io.Writer, order Order, litWidth int) io.WriteCloser"}, + {"Order", Type, 0, ""}, + {"Reader", Type, 17, ""}, + {"Writer", Type, 17, ""}, }, "compress/zlib": { - {"(*Writer).Close", Method, 0}, - {"(*Writer).Flush", Method, 0}, - {"(*Writer).Reset", Method, 2}, - {"(*Writer).Write", Method, 0}, - {"BestCompression", Const, 0}, - {"BestSpeed", Const, 0}, - {"DefaultCompression", Const, 0}, - {"ErrChecksum", Var, 0}, - {"ErrDictionary", Var, 0}, - {"ErrHeader", Var, 0}, - {"HuffmanOnly", Const, 8}, - {"NewReader", Func, 0}, - {"NewReaderDict", Func, 0}, - {"NewWriter", Func, 0}, - {"NewWriterLevel", Func, 0}, - {"NewWriterLevelDict", Func, 0}, - {"NoCompression", Const, 0}, - {"Resetter", Type, 4}, - {"Writer", Type, 0}, + {"(*Writer).Close", Method, 0, ""}, + {"(*Writer).Flush", Method, 0, ""}, + {"(*Writer).Reset", Method, 2, ""}, + {"(*Writer).Write", Method, 0, ""}, + {"BestCompression", Const, 0, ""}, + {"BestSpeed", Const, 0, ""}, + {"DefaultCompression", Const, 0, ""}, + {"ErrChecksum", Var, 0, ""}, + {"ErrDictionary", Var, 0, ""}, + {"ErrHeader", Var, 0, ""}, + {"HuffmanOnly", Const, 8, ""}, + {"NewReader", Func, 0, "func(r io.Reader) (io.ReadCloser, error)"}, + {"NewReaderDict", Func, 0, "func(r io.Reader, dict []byte) (io.ReadCloser, error)"}, + {"NewWriter", Func, 0, "func(w io.Writer) *Writer"}, + {"NewWriterLevel", Func, 0, "func(w io.Writer, level int) (*Writer, error)"}, + {"NewWriterLevelDict", Func, 0, "func(w io.Writer, level int, dict []byte) (*Writer, error)"}, + {"NoCompression", Const, 0, ""}, + {"Resetter", Type, 4, ""}, + {"Writer", Type, 0, ""}, }, "container/heap": { - {"Fix", Func, 2}, - {"Init", Func, 0}, - {"Interface", Type, 0}, - {"Pop", Func, 0}, - {"Push", Func, 0}, - {"Remove", Func, 0}, + {"Fix", Func, 2, "func(h Interface, i int)"}, + {"Init", Func, 0, "func(h Interface)"}, + {"Interface", Type, 0, ""}, + {"Pop", Func, 0, "func(h Interface) any"}, + {"Push", Func, 0, "func(h Interface, x any)"}, + {"Remove", Func, 0, "func(h Interface, i int) any"}, }, "container/list": { - {"(*Element).Next", Method, 0}, - {"(*Element).Prev", Method, 0}, - {"(*List).Back", Method, 0}, - {"(*List).Front", Method, 0}, - {"(*List).Init", Method, 0}, - {"(*List).InsertAfter", Method, 0}, - {"(*List).InsertBefore", Method, 0}, - {"(*List).Len", Method, 0}, - {"(*List).MoveAfter", Method, 2}, - {"(*List).MoveBefore", Method, 2}, - {"(*List).MoveToBack", Method, 0}, - {"(*List).MoveToFront", Method, 0}, - {"(*List).PushBack", Method, 0}, - {"(*List).PushBackList", Method, 0}, - {"(*List).PushFront", Method, 0}, - {"(*List).PushFrontList", Method, 0}, - {"(*List).Remove", Method, 0}, - {"Element", Type, 0}, - {"Element.Value", Field, 0}, - {"List", Type, 0}, - {"New", Func, 0}, + {"(*Element).Next", Method, 0, ""}, + {"(*Element).Prev", Method, 0, ""}, + {"(*List).Back", Method, 0, ""}, + {"(*List).Front", Method, 0, ""}, + {"(*List).Init", Method, 0, ""}, + {"(*List).InsertAfter", Method, 0, ""}, + {"(*List).InsertBefore", Method, 0, ""}, + {"(*List).Len", Method, 0, ""}, + {"(*List).MoveAfter", Method, 2, ""}, + {"(*List).MoveBefore", Method, 2, ""}, + {"(*List).MoveToBack", Method, 0, ""}, + {"(*List).MoveToFront", Method, 0, ""}, + {"(*List).PushBack", Method, 0, ""}, + {"(*List).PushBackList", Method, 0, ""}, + {"(*List).PushFront", Method, 0, ""}, + {"(*List).PushFrontList", Method, 0, ""}, + {"(*List).Remove", Method, 0, ""}, + {"Element", Type, 0, ""}, + {"Element.Value", Field, 0, ""}, + {"List", Type, 0, ""}, + {"New", Func, 0, "func() *List"}, }, "container/ring": { - {"(*Ring).Do", Method, 0}, - {"(*Ring).Len", Method, 0}, - {"(*Ring).Link", Method, 0}, - {"(*Ring).Move", Method, 0}, - {"(*Ring).Next", Method, 0}, - {"(*Ring).Prev", Method, 0}, - {"(*Ring).Unlink", Method, 0}, - {"New", Func, 0}, - {"Ring", Type, 0}, - {"Ring.Value", Field, 0}, + {"(*Ring).Do", Method, 0, ""}, + {"(*Ring).Len", Method, 0, ""}, + {"(*Ring).Link", Method, 0, ""}, + {"(*Ring).Move", Method, 0, ""}, + {"(*Ring).Next", Method, 0, ""}, + {"(*Ring).Prev", Method, 0, ""}, + {"(*Ring).Unlink", Method, 0, ""}, + {"New", Func, 0, "func(n int) *Ring"}, + {"Ring", Type, 0, ""}, + {"Ring.Value", Field, 0, ""}, }, "context": { - {"AfterFunc", Func, 21}, - {"Background", Func, 7}, - {"CancelCauseFunc", Type, 20}, - {"CancelFunc", Type, 7}, - {"Canceled", Var, 7}, - {"Cause", Func, 20}, - {"Context", Type, 7}, - {"DeadlineExceeded", Var, 7}, - {"TODO", Func, 7}, - {"WithCancel", Func, 7}, - {"WithCancelCause", Func, 20}, - {"WithDeadline", Func, 7}, - {"WithDeadlineCause", Func, 21}, - {"WithTimeout", Func, 7}, - {"WithTimeoutCause", Func, 21}, - {"WithValue", Func, 7}, - {"WithoutCancel", Func, 21}, + {"AfterFunc", Func, 21, "func(ctx Context, f func()) (stop func() bool)"}, + {"Background", Func, 7, "func() Context"}, + {"CancelCauseFunc", Type, 20, ""}, + {"CancelFunc", Type, 7, ""}, + {"Canceled", Var, 7, ""}, + {"Cause", Func, 20, "func(c Context) error"}, + {"Context", Type, 7, ""}, + {"DeadlineExceeded", Var, 7, ""}, + {"TODO", Func, 7, "func() Context"}, + {"WithCancel", Func, 7, "func(parent Context) (ctx Context, cancel CancelFunc)"}, + {"WithCancelCause", Func, 20, "func(parent Context) (ctx Context, cancel CancelCauseFunc)"}, + {"WithDeadline", Func, 7, "func(parent Context, d time.Time) (Context, CancelFunc)"}, + {"WithDeadlineCause", Func, 21, "func(parent Context, d time.Time, cause error) (Context, CancelFunc)"}, + {"WithTimeout", Func, 7, "func(parent Context, timeout time.Duration) (Context, CancelFunc)"}, + {"WithTimeoutCause", Func, 21, "func(parent Context, timeout time.Duration, cause error) (Context, CancelFunc)"}, + {"WithValue", Func, 7, "func(parent Context, key any, val any) Context"}, + {"WithoutCancel", Func, 21, "func(parent Context) Context"}, }, "crypto": { - {"(Hash).Available", Method, 0}, - {"(Hash).HashFunc", Method, 4}, - {"(Hash).New", Method, 0}, - {"(Hash).Size", Method, 0}, - {"(Hash).String", Method, 15}, - {"BLAKE2b_256", Const, 9}, - {"BLAKE2b_384", Const, 9}, - {"BLAKE2b_512", Const, 9}, - {"BLAKE2s_256", Const, 9}, - {"Decrypter", Type, 5}, - {"DecrypterOpts", Type, 5}, - {"Hash", Type, 0}, - {"MD4", Const, 0}, - {"MD5", Const, 0}, - {"MD5SHA1", Const, 0}, - {"PrivateKey", Type, 0}, - {"PublicKey", Type, 2}, - {"RIPEMD160", Const, 0}, - {"RegisterHash", Func, 0}, - {"SHA1", Const, 0}, - {"SHA224", Const, 0}, - {"SHA256", Const, 0}, - {"SHA384", Const, 0}, - {"SHA3_224", Const, 4}, - {"SHA3_256", Const, 4}, - {"SHA3_384", Const, 4}, - {"SHA3_512", Const, 4}, - {"SHA512", Const, 0}, - {"SHA512_224", Const, 5}, - {"SHA512_256", Const, 5}, - {"Signer", Type, 4}, - {"SignerOpts", Type, 4}, + {"(Hash).Available", Method, 0, ""}, + {"(Hash).HashFunc", Method, 4, ""}, + {"(Hash).New", Method, 0, ""}, + {"(Hash).Size", Method, 0, ""}, + {"(Hash).String", Method, 15, ""}, + {"BLAKE2b_256", Const, 9, ""}, + {"BLAKE2b_384", Const, 9, ""}, + {"BLAKE2b_512", Const, 9, ""}, + {"BLAKE2s_256", Const, 9, ""}, + {"Decrypter", Type, 5, ""}, + {"DecrypterOpts", Type, 5, ""}, + {"Hash", Type, 0, ""}, + {"MD4", Const, 0, ""}, + {"MD5", Const, 0, ""}, + {"MD5SHA1", Const, 0, ""}, + {"PrivateKey", Type, 0, ""}, + {"PublicKey", Type, 2, ""}, + {"RIPEMD160", Const, 0, ""}, + {"RegisterHash", Func, 0, "func(h Hash, f func() hash.Hash)"}, + {"SHA1", Const, 0, ""}, + {"SHA224", Const, 0, ""}, + {"SHA256", Const, 0, ""}, + {"SHA384", Const, 0, ""}, + {"SHA3_224", Const, 4, ""}, + {"SHA3_256", Const, 4, ""}, + {"SHA3_384", Const, 4, ""}, + {"SHA3_512", Const, 4, ""}, + {"SHA512", Const, 0, ""}, + {"SHA512_224", Const, 5, ""}, + {"SHA512_256", Const, 5, ""}, + {"Signer", Type, 4, ""}, + {"SignerOpts", Type, 4, ""}, }, "crypto/aes": { - {"(KeySizeError).Error", Method, 0}, - {"BlockSize", Const, 0}, - {"KeySizeError", Type, 0}, - {"NewCipher", Func, 0}, + {"(KeySizeError).Error", Method, 0, ""}, + {"BlockSize", Const, 0, ""}, + {"KeySizeError", Type, 0, ""}, + {"NewCipher", Func, 0, "func(key []byte) (cipher.Block, error)"}, }, "crypto/cipher": { - {"(StreamReader).Read", Method, 0}, - {"(StreamWriter).Close", Method, 0}, - {"(StreamWriter).Write", Method, 0}, - {"AEAD", Type, 2}, - {"Block", Type, 0}, - {"BlockMode", Type, 0}, - {"NewCBCDecrypter", Func, 0}, - {"NewCBCEncrypter", Func, 0}, - {"NewCFBDecrypter", Func, 0}, - {"NewCFBEncrypter", Func, 0}, - {"NewCTR", Func, 0}, - {"NewGCM", Func, 2}, - {"NewGCMWithNonceSize", Func, 5}, - {"NewGCMWithRandomNonce", Func, 24}, - {"NewGCMWithTagSize", Func, 11}, - {"NewOFB", Func, 0}, - {"Stream", Type, 0}, - {"StreamReader", Type, 0}, - {"StreamReader.R", Field, 0}, - {"StreamReader.S", Field, 0}, - {"StreamWriter", Type, 0}, - {"StreamWriter.Err", Field, 0}, - {"StreamWriter.S", Field, 0}, - {"StreamWriter.W", Field, 0}, + {"(StreamReader).Read", Method, 0, ""}, + {"(StreamWriter).Close", Method, 0, ""}, + {"(StreamWriter).Write", Method, 0, ""}, + {"AEAD", Type, 2, ""}, + {"Block", Type, 0, ""}, + {"BlockMode", Type, 0, ""}, + {"NewCBCDecrypter", Func, 0, "func(b Block, iv []byte) BlockMode"}, + {"NewCBCEncrypter", Func, 0, "func(b Block, iv []byte) BlockMode"}, + {"NewCFBDecrypter", Func, 0, "func(block Block, iv []byte) Stream"}, + {"NewCFBEncrypter", Func, 0, "func(block Block, iv []byte) Stream"}, + {"NewCTR", Func, 0, "func(block Block, iv []byte) Stream"}, + {"NewGCM", Func, 2, "func(cipher Block) (AEAD, error)"}, + {"NewGCMWithNonceSize", Func, 5, "func(cipher Block, size int) (AEAD, error)"}, + {"NewGCMWithRandomNonce", Func, 24, "func(cipher Block) (AEAD, error)"}, + {"NewGCMWithTagSize", Func, 11, "func(cipher Block, tagSize int) (AEAD, error)"}, + {"NewOFB", Func, 0, "func(b Block, iv []byte) Stream"}, + {"Stream", Type, 0, ""}, + {"StreamReader", Type, 0, ""}, + {"StreamReader.R", Field, 0, ""}, + {"StreamReader.S", Field, 0, ""}, + {"StreamWriter", Type, 0, ""}, + {"StreamWriter.Err", Field, 0, ""}, + {"StreamWriter.S", Field, 0, ""}, + {"StreamWriter.W", Field, 0, ""}, }, "crypto/des": { - {"(KeySizeError).Error", Method, 0}, - {"BlockSize", Const, 0}, - {"KeySizeError", Type, 0}, - {"NewCipher", Func, 0}, - {"NewTripleDESCipher", Func, 0}, + {"(KeySizeError).Error", Method, 0, ""}, + {"BlockSize", Const, 0, ""}, + {"KeySizeError", Type, 0, ""}, + {"NewCipher", Func, 0, "func(key []byte) (cipher.Block, error)"}, + {"NewTripleDESCipher", Func, 0, "func(key []byte) (cipher.Block, error)"}, }, "crypto/dsa": { - {"ErrInvalidPublicKey", Var, 0}, - {"GenerateKey", Func, 0}, - {"GenerateParameters", Func, 0}, - {"L1024N160", Const, 0}, - {"L2048N224", Const, 0}, - {"L2048N256", Const, 0}, - {"L3072N256", Const, 0}, - {"ParameterSizes", Type, 0}, - {"Parameters", Type, 0}, - {"Parameters.G", Field, 0}, - {"Parameters.P", Field, 0}, - {"Parameters.Q", Field, 0}, - {"PrivateKey", Type, 0}, - {"PrivateKey.PublicKey", Field, 0}, - {"PrivateKey.X", Field, 0}, - {"PublicKey", Type, 0}, - {"PublicKey.Parameters", Field, 0}, - {"PublicKey.Y", Field, 0}, - {"Sign", Func, 0}, - {"Verify", Func, 0}, + {"ErrInvalidPublicKey", Var, 0, ""}, + {"GenerateKey", Func, 0, "func(priv *PrivateKey, rand io.Reader) error"}, + {"GenerateParameters", Func, 0, "func(params *Parameters, rand io.Reader, sizes ParameterSizes) error"}, + {"L1024N160", Const, 0, ""}, + {"L2048N224", Const, 0, ""}, + {"L2048N256", Const, 0, ""}, + {"L3072N256", Const, 0, ""}, + {"ParameterSizes", Type, 0, ""}, + {"Parameters", Type, 0, ""}, + {"Parameters.G", Field, 0, ""}, + {"Parameters.P", Field, 0, ""}, + {"Parameters.Q", Field, 0, ""}, + {"PrivateKey", Type, 0, ""}, + {"PrivateKey.PublicKey", Field, 0, ""}, + {"PrivateKey.X", Field, 0, ""}, + {"PublicKey", Type, 0, ""}, + {"PublicKey.Parameters", Field, 0, ""}, + {"PublicKey.Y", Field, 0, ""}, + {"Sign", Func, 0, "func(rand io.Reader, priv *PrivateKey, hash []byte) (r *big.Int, s *big.Int, err error)"}, + {"Verify", Func, 0, "func(pub *PublicKey, hash []byte, r *big.Int, s *big.Int) bool"}, }, "crypto/ecdh": { - {"(*PrivateKey).Bytes", Method, 20}, - {"(*PrivateKey).Curve", Method, 20}, - {"(*PrivateKey).ECDH", Method, 20}, - {"(*PrivateKey).Equal", Method, 20}, - {"(*PrivateKey).Public", Method, 20}, - {"(*PrivateKey).PublicKey", Method, 20}, - {"(*PublicKey).Bytes", Method, 20}, - {"(*PublicKey).Curve", Method, 20}, - {"(*PublicKey).Equal", Method, 20}, - {"Curve", Type, 20}, - {"P256", Func, 20}, - {"P384", Func, 20}, - {"P521", Func, 20}, - {"PrivateKey", Type, 20}, - {"PublicKey", Type, 20}, - {"X25519", Func, 20}, + {"(*PrivateKey).Bytes", Method, 20, ""}, + {"(*PrivateKey).Curve", Method, 20, ""}, + {"(*PrivateKey).ECDH", Method, 20, ""}, + {"(*PrivateKey).Equal", Method, 20, ""}, + {"(*PrivateKey).Public", Method, 20, ""}, + {"(*PrivateKey).PublicKey", Method, 20, ""}, + {"(*PublicKey).Bytes", Method, 20, ""}, + {"(*PublicKey).Curve", Method, 20, ""}, + {"(*PublicKey).Equal", Method, 20, ""}, + {"Curve", Type, 20, ""}, + {"P256", Func, 20, "func() Curve"}, + {"P384", Func, 20, "func() Curve"}, + {"P521", Func, 20, "func() Curve"}, + {"PrivateKey", Type, 20, ""}, + {"PublicKey", Type, 20, ""}, + {"X25519", Func, 20, "func() Curve"}, }, "crypto/ecdsa": { - {"(*PrivateKey).ECDH", Method, 20}, - {"(*PrivateKey).Equal", Method, 15}, - {"(*PrivateKey).Public", Method, 4}, - {"(*PrivateKey).Sign", Method, 4}, - {"(*PublicKey).ECDH", Method, 20}, - {"(*PublicKey).Equal", Method, 15}, - {"(PrivateKey).Add", Method, 0}, - {"(PrivateKey).Double", Method, 0}, - {"(PrivateKey).IsOnCurve", Method, 0}, - {"(PrivateKey).Params", Method, 0}, - {"(PrivateKey).ScalarBaseMult", Method, 0}, - {"(PrivateKey).ScalarMult", Method, 0}, - {"(PublicKey).Add", Method, 0}, - {"(PublicKey).Double", Method, 0}, - {"(PublicKey).IsOnCurve", Method, 0}, - {"(PublicKey).Params", Method, 0}, - {"(PublicKey).ScalarBaseMult", Method, 0}, - {"(PublicKey).ScalarMult", Method, 0}, - {"GenerateKey", Func, 0}, - {"PrivateKey", Type, 0}, - {"PrivateKey.D", Field, 0}, - {"PrivateKey.PublicKey", Field, 0}, - {"PublicKey", Type, 0}, - {"PublicKey.Curve", Field, 0}, - {"PublicKey.X", Field, 0}, - {"PublicKey.Y", Field, 0}, - {"Sign", Func, 0}, - {"SignASN1", Func, 15}, - {"Verify", Func, 0}, - {"VerifyASN1", Func, 15}, + {"(*PrivateKey).ECDH", Method, 20, ""}, + {"(*PrivateKey).Equal", Method, 15, ""}, + {"(*PrivateKey).Public", Method, 4, ""}, + {"(*PrivateKey).Sign", Method, 4, ""}, + {"(*PublicKey).ECDH", Method, 20, ""}, + {"(*PublicKey).Equal", Method, 15, ""}, + {"(PrivateKey).Add", Method, 0, ""}, + {"(PrivateKey).Double", Method, 0, ""}, + {"(PrivateKey).IsOnCurve", Method, 0, ""}, + {"(PrivateKey).Params", Method, 0, ""}, + {"(PrivateKey).ScalarBaseMult", Method, 0, ""}, + {"(PrivateKey).ScalarMult", Method, 0, ""}, + {"(PublicKey).Add", Method, 0, ""}, + {"(PublicKey).Double", Method, 0, ""}, + {"(PublicKey).IsOnCurve", Method, 0, ""}, + {"(PublicKey).Params", Method, 0, ""}, + {"(PublicKey).ScalarBaseMult", Method, 0, ""}, + {"(PublicKey).ScalarMult", Method, 0, ""}, + {"GenerateKey", Func, 0, "func(c elliptic.Curve, rand io.Reader) (*PrivateKey, error)"}, + {"PrivateKey", Type, 0, ""}, + {"PrivateKey.D", Field, 0, ""}, + {"PrivateKey.PublicKey", Field, 0, ""}, + {"PublicKey", Type, 0, ""}, + {"PublicKey.Curve", Field, 0, ""}, + {"PublicKey.X", Field, 0, ""}, + {"PublicKey.Y", Field, 0, ""}, + {"Sign", Func, 0, "func(rand io.Reader, priv *PrivateKey, hash []byte) (r *big.Int, s *big.Int, err error)"}, + {"SignASN1", Func, 15, "func(rand io.Reader, priv *PrivateKey, hash []byte) ([]byte, error)"}, + {"Verify", Func, 0, "func(pub *PublicKey, hash []byte, r *big.Int, s *big.Int) bool"}, + {"VerifyASN1", Func, 15, "func(pub *PublicKey, hash []byte, sig []byte) bool"}, }, "crypto/ed25519": { - {"(*Options).HashFunc", Method, 20}, - {"(PrivateKey).Equal", Method, 15}, - {"(PrivateKey).Public", Method, 13}, - {"(PrivateKey).Seed", Method, 13}, - {"(PrivateKey).Sign", Method, 13}, - {"(PublicKey).Equal", Method, 15}, - {"GenerateKey", Func, 13}, - {"NewKeyFromSeed", Func, 13}, - {"Options", Type, 20}, - {"Options.Context", Field, 20}, - {"Options.Hash", Field, 20}, - {"PrivateKey", Type, 13}, - {"PrivateKeySize", Const, 13}, - {"PublicKey", Type, 13}, - {"PublicKeySize", Const, 13}, - {"SeedSize", Const, 13}, - {"Sign", Func, 13}, - {"SignatureSize", Const, 13}, - {"Verify", Func, 13}, - {"VerifyWithOptions", Func, 20}, + {"(*Options).HashFunc", Method, 20, ""}, + {"(PrivateKey).Equal", Method, 15, ""}, + {"(PrivateKey).Public", Method, 13, ""}, + {"(PrivateKey).Seed", Method, 13, ""}, + {"(PrivateKey).Sign", Method, 13, ""}, + {"(PublicKey).Equal", Method, 15, ""}, + {"GenerateKey", Func, 13, "func(rand io.Reader) (PublicKey, PrivateKey, error)"}, + {"NewKeyFromSeed", Func, 13, "func(seed []byte) PrivateKey"}, + {"Options", Type, 20, ""}, + {"Options.Context", Field, 20, ""}, + {"Options.Hash", Field, 20, ""}, + {"PrivateKey", Type, 13, ""}, + {"PrivateKeySize", Const, 13, ""}, + {"PublicKey", Type, 13, ""}, + {"PublicKeySize", Const, 13, ""}, + {"SeedSize", Const, 13, ""}, + {"Sign", Func, 13, "func(privateKey PrivateKey, message []byte) []byte"}, + {"SignatureSize", Const, 13, ""}, + {"Verify", Func, 13, "func(publicKey PublicKey, message []byte, sig []byte) bool"}, + {"VerifyWithOptions", Func, 20, "func(publicKey PublicKey, message []byte, sig []byte, opts *Options) error"}, }, "crypto/elliptic": { - {"(*CurveParams).Add", Method, 0}, - {"(*CurveParams).Double", Method, 0}, - {"(*CurveParams).IsOnCurve", Method, 0}, - {"(*CurveParams).Params", Method, 0}, - {"(*CurveParams).ScalarBaseMult", Method, 0}, - {"(*CurveParams).ScalarMult", Method, 0}, - {"Curve", Type, 0}, - {"CurveParams", Type, 0}, - {"CurveParams.B", Field, 0}, - {"CurveParams.BitSize", Field, 0}, - {"CurveParams.Gx", Field, 0}, - {"CurveParams.Gy", Field, 0}, - {"CurveParams.N", Field, 0}, - {"CurveParams.Name", Field, 5}, - {"CurveParams.P", Field, 0}, - {"GenerateKey", Func, 0}, - {"Marshal", Func, 0}, - {"MarshalCompressed", Func, 15}, - {"P224", Func, 0}, - {"P256", Func, 0}, - {"P384", Func, 0}, - {"P521", Func, 0}, - {"Unmarshal", Func, 0}, - {"UnmarshalCompressed", Func, 15}, + {"(*CurveParams).Add", Method, 0, ""}, + {"(*CurveParams).Double", Method, 0, ""}, + {"(*CurveParams).IsOnCurve", Method, 0, ""}, + {"(*CurveParams).Params", Method, 0, ""}, + {"(*CurveParams).ScalarBaseMult", Method, 0, ""}, + {"(*CurveParams).ScalarMult", Method, 0, ""}, + {"Curve", Type, 0, ""}, + {"CurveParams", Type, 0, ""}, + {"CurveParams.B", Field, 0, ""}, + {"CurveParams.BitSize", Field, 0, ""}, + {"CurveParams.Gx", Field, 0, ""}, + {"CurveParams.Gy", Field, 0, ""}, + {"CurveParams.N", Field, 0, ""}, + {"CurveParams.Name", Field, 5, ""}, + {"CurveParams.P", Field, 0, ""}, + {"GenerateKey", Func, 0, "func(curve Curve, rand io.Reader) (priv []byte, x *big.Int, y *big.Int, err error)"}, + {"Marshal", Func, 0, "func(curve Curve, x *big.Int, y *big.Int) []byte"}, + {"MarshalCompressed", Func, 15, "func(curve Curve, x *big.Int, y *big.Int) []byte"}, + {"P224", Func, 0, "func() Curve"}, + {"P256", Func, 0, "func() Curve"}, + {"P384", Func, 0, "func() Curve"}, + {"P521", Func, 0, "func() Curve"}, + {"Unmarshal", Func, 0, "func(curve Curve, data []byte) (x *big.Int, y *big.Int)"}, + {"UnmarshalCompressed", Func, 15, "func(curve Curve, data []byte) (x *big.Int, y *big.Int)"}, }, "crypto/fips140": { - {"Enabled", Func, 24}, + {"Enabled", Func, 24, "func() bool"}, }, "crypto/hkdf": { - {"Expand", Func, 24}, - {"Extract", Func, 24}, - {"Key", Func, 24}, + {"Expand", Func, 24, "func[H hash.Hash](h func() H, pseudorandomKey []byte, info string, keyLength int) ([]byte, error)"}, + {"Extract", Func, 24, "func[H hash.Hash](h func() H, secret []byte, salt []byte) ([]byte, error)"}, + {"Key", Func, 24, "func[Hash hash.Hash](h func() Hash, secret []byte, salt []byte, info string, keyLength int) ([]byte, error)"}, }, "crypto/hmac": { - {"Equal", Func, 1}, - {"New", Func, 0}, + {"Equal", Func, 1, "func(mac1 []byte, mac2 []byte) bool"}, + {"New", Func, 0, "func(h func() hash.Hash, key []byte) hash.Hash"}, }, "crypto/md5": { - {"BlockSize", Const, 0}, - {"New", Func, 0}, - {"Size", Const, 0}, - {"Sum", Func, 2}, + {"BlockSize", Const, 0, ""}, + {"New", Func, 0, "func() hash.Hash"}, + {"Size", Const, 0, ""}, + {"Sum", Func, 2, "func(data []byte) [16]byte"}, }, "crypto/mlkem": { - {"(*DecapsulationKey1024).Bytes", Method, 24}, - {"(*DecapsulationKey1024).Decapsulate", Method, 24}, - {"(*DecapsulationKey1024).EncapsulationKey", Method, 24}, - {"(*DecapsulationKey768).Bytes", Method, 24}, - {"(*DecapsulationKey768).Decapsulate", Method, 24}, - {"(*DecapsulationKey768).EncapsulationKey", Method, 24}, - {"(*EncapsulationKey1024).Bytes", Method, 24}, - {"(*EncapsulationKey1024).Encapsulate", Method, 24}, - {"(*EncapsulationKey768).Bytes", Method, 24}, - {"(*EncapsulationKey768).Encapsulate", Method, 24}, - {"CiphertextSize1024", Const, 24}, - {"CiphertextSize768", Const, 24}, - {"DecapsulationKey1024", Type, 24}, - {"DecapsulationKey768", Type, 24}, - {"EncapsulationKey1024", Type, 24}, - {"EncapsulationKey768", Type, 24}, - {"EncapsulationKeySize1024", Const, 24}, - {"EncapsulationKeySize768", Const, 24}, - {"GenerateKey1024", Func, 24}, - {"GenerateKey768", Func, 24}, - {"NewDecapsulationKey1024", Func, 24}, - {"NewDecapsulationKey768", Func, 24}, - {"NewEncapsulationKey1024", Func, 24}, - {"NewEncapsulationKey768", Func, 24}, - {"SeedSize", Const, 24}, - {"SharedKeySize", Const, 24}, + {"(*DecapsulationKey1024).Bytes", Method, 24, ""}, + {"(*DecapsulationKey1024).Decapsulate", Method, 24, ""}, + {"(*DecapsulationKey1024).EncapsulationKey", Method, 24, ""}, + {"(*DecapsulationKey768).Bytes", Method, 24, ""}, + {"(*DecapsulationKey768).Decapsulate", Method, 24, ""}, + {"(*DecapsulationKey768).EncapsulationKey", Method, 24, ""}, + {"(*EncapsulationKey1024).Bytes", Method, 24, ""}, + {"(*EncapsulationKey1024).Encapsulate", Method, 24, ""}, + {"(*EncapsulationKey768).Bytes", Method, 24, ""}, + {"(*EncapsulationKey768).Encapsulate", Method, 24, ""}, + {"CiphertextSize1024", Const, 24, ""}, + {"CiphertextSize768", Const, 24, ""}, + {"DecapsulationKey1024", Type, 24, ""}, + {"DecapsulationKey768", Type, 24, ""}, + {"EncapsulationKey1024", Type, 24, ""}, + {"EncapsulationKey768", Type, 24, ""}, + {"EncapsulationKeySize1024", Const, 24, ""}, + {"EncapsulationKeySize768", Const, 24, ""}, + {"GenerateKey1024", Func, 24, "func() (*DecapsulationKey1024, error)"}, + {"GenerateKey768", Func, 24, "func() (*DecapsulationKey768, error)"}, + {"NewDecapsulationKey1024", Func, 24, "func(seed []byte) (*DecapsulationKey1024, error)"}, + {"NewDecapsulationKey768", Func, 24, "func(seed []byte) (*DecapsulationKey768, error)"}, + {"NewEncapsulationKey1024", Func, 24, "func(encapsulationKey []byte) (*EncapsulationKey1024, error)"}, + {"NewEncapsulationKey768", Func, 24, "func(encapsulationKey []byte) (*EncapsulationKey768, error)"}, + {"SeedSize", Const, 24, ""}, + {"SharedKeySize", Const, 24, ""}, }, "crypto/pbkdf2": { - {"Key", Func, 24}, + {"Key", Func, 24, "func[Hash hash.Hash](h func() Hash, password string, salt []byte, iter int, keyLength int) ([]byte, error)"}, }, "crypto/rand": { - {"Int", Func, 0}, - {"Prime", Func, 0}, - {"Read", Func, 0}, - {"Reader", Var, 0}, - {"Text", Func, 24}, + {"Int", Func, 0, "func(rand io.Reader, max *big.Int) (n *big.Int, err error)"}, + {"Prime", Func, 0, "func(rand io.Reader, bits int) (*big.Int, error)"}, + {"Read", Func, 0, "func(b []byte) (n int, err error)"}, + {"Reader", Var, 0, ""}, + {"Text", Func, 24, "func() string"}, }, "crypto/rc4": { - {"(*Cipher).Reset", Method, 0}, - {"(*Cipher).XORKeyStream", Method, 0}, - {"(KeySizeError).Error", Method, 0}, - {"Cipher", Type, 0}, - {"KeySizeError", Type, 0}, - {"NewCipher", Func, 0}, + {"(*Cipher).Reset", Method, 0, ""}, + {"(*Cipher).XORKeyStream", Method, 0, ""}, + {"(KeySizeError).Error", Method, 0, ""}, + {"Cipher", Type, 0, ""}, + {"KeySizeError", Type, 0, ""}, + {"NewCipher", Func, 0, "func(key []byte) (*Cipher, error)"}, }, "crypto/rsa": { - {"(*PSSOptions).HashFunc", Method, 4}, - {"(*PrivateKey).Decrypt", Method, 5}, - {"(*PrivateKey).Equal", Method, 15}, - {"(*PrivateKey).Precompute", Method, 0}, - {"(*PrivateKey).Public", Method, 4}, - {"(*PrivateKey).Sign", Method, 4}, - {"(*PrivateKey).Size", Method, 11}, - {"(*PrivateKey).Validate", Method, 0}, - {"(*PublicKey).Equal", Method, 15}, - {"(*PublicKey).Size", Method, 11}, - {"CRTValue", Type, 0}, - {"CRTValue.Coeff", Field, 0}, - {"CRTValue.Exp", Field, 0}, - {"CRTValue.R", Field, 0}, - {"DecryptOAEP", Func, 0}, - {"DecryptPKCS1v15", Func, 0}, - {"DecryptPKCS1v15SessionKey", Func, 0}, - {"EncryptOAEP", Func, 0}, - {"EncryptPKCS1v15", Func, 0}, - {"ErrDecryption", Var, 0}, - {"ErrMessageTooLong", Var, 0}, - {"ErrVerification", Var, 0}, - {"GenerateKey", Func, 0}, - {"GenerateMultiPrimeKey", Func, 0}, - {"OAEPOptions", Type, 5}, - {"OAEPOptions.Hash", Field, 5}, - {"OAEPOptions.Label", Field, 5}, - {"OAEPOptions.MGFHash", Field, 20}, - {"PKCS1v15DecryptOptions", Type, 5}, - {"PKCS1v15DecryptOptions.SessionKeyLen", Field, 5}, - {"PSSOptions", Type, 2}, - {"PSSOptions.Hash", Field, 4}, - {"PSSOptions.SaltLength", Field, 2}, - {"PSSSaltLengthAuto", Const, 2}, - {"PSSSaltLengthEqualsHash", Const, 2}, - {"PrecomputedValues", Type, 0}, - {"PrecomputedValues.CRTValues", Field, 0}, - {"PrecomputedValues.Dp", Field, 0}, - {"PrecomputedValues.Dq", Field, 0}, - {"PrecomputedValues.Qinv", Field, 0}, - {"PrivateKey", Type, 0}, - {"PrivateKey.D", Field, 0}, - {"PrivateKey.Precomputed", Field, 0}, - {"PrivateKey.Primes", Field, 0}, - {"PrivateKey.PublicKey", Field, 0}, - {"PublicKey", Type, 0}, - {"PublicKey.E", Field, 0}, - {"PublicKey.N", Field, 0}, - {"SignPKCS1v15", Func, 0}, - {"SignPSS", Func, 2}, - {"VerifyPKCS1v15", Func, 0}, - {"VerifyPSS", Func, 2}, + {"(*PSSOptions).HashFunc", Method, 4, ""}, + {"(*PrivateKey).Decrypt", Method, 5, ""}, + {"(*PrivateKey).Equal", Method, 15, ""}, + {"(*PrivateKey).Precompute", Method, 0, ""}, + {"(*PrivateKey).Public", Method, 4, ""}, + {"(*PrivateKey).Sign", Method, 4, ""}, + {"(*PrivateKey).Size", Method, 11, ""}, + {"(*PrivateKey).Validate", Method, 0, ""}, + {"(*PublicKey).Equal", Method, 15, ""}, + {"(*PublicKey).Size", Method, 11, ""}, + {"CRTValue", Type, 0, ""}, + {"CRTValue.Coeff", Field, 0, ""}, + {"CRTValue.Exp", Field, 0, ""}, + {"CRTValue.R", Field, 0, ""}, + {"DecryptOAEP", Func, 0, "func(hash hash.Hash, random io.Reader, priv *PrivateKey, ciphertext []byte, label []byte) ([]byte, error)"}, + {"DecryptPKCS1v15", Func, 0, "func(random io.Reader, priv *PrivateKey, ciphertext []byte) ([]byte, error)"}, + {"DecryptPKCS1v15SessionKey", Func, 0, "func(random io.Reader, priv *PrivateKey, ciphertext []byte, key []byte) error"}, + {"EncryptOAEP", Func, 0, "func(hash hash.Hash, random io.Reader, pub *PublicKey, msg []byte, label []byte) ([]byte, error)"}, + {"EncryptPKCS1v15", Func, 0, "func(random io.Reader, pub *PublicKey, msg []byte) ([]byte, error)"}, + {"ErrDecryption", Var, 0, ""}, + {"ErrMessageTooLong", Var, 0, ""}, + {"ErrVerification", Var, 0, ""}, + {"GenerateKey", Func, 0, "func(random io.Reader, bits int) (*PrivateKey, error)"}, + {"GenerateMultiPrimeKey", Func, 0, "func(random io.Reader, nprimes int, bits int) (*PrivateKey, error)"}, + {"OAEPOptions", Type, 5, ""}, + {"OAEPOptions.Hash", Field, 5, ""}, + {"OAEPOptions.Label", Field, 5, ""}, + {"OAEPOptions.MGFHash", Field, 20, ""}, + {"PKCS1v15DecryptOptions", Type, 5, ""}, + {"PKCS1v15DecryptOptions.SessionKeyLen", Field, 5, ""}, + {"PSSOptions", Type, 2, ""}, + {"PSSOptions.Hash", Field, 4, ""}, + {"PSSOptions.SaltLength", Field, 2, ""}, + {"PSSSaltLengthAuto", Const, 2, ""}, + {"PSSSaltLengthEqualsHash", Const, 2, ""}, + {"PrecomputedValues", Type, 0, ""}, + {"PrecomputedValues.CRTValues", Field, 0, ""}, + {"PrecomputedValues.Dp", Field, 0, ""}, + {"PrecomputedValues.Dq", Field, 0, ""}, + {"PrecomputedValues.Qinv", Field, 0, ""}, + {"PrivateKey", Type, 0, ""}, + {"PrivateKey.D", Field, 0, ""}, + {"PrivateKey.Precomputed", Field, 0, ""}, + {"PrivateKey.Primes", Field, 0, ""}, + {"PrivateKey.PublicKey", Field, 0, ""}, + {"PublicKey", Type, 0, ""}, + {"PublicKey.E", Field, 0, ""}, + {"PublicKey.N", Field, 0, ""}, + {"SignPKCS1v15", Func, 0, "func(random io.Reader, priv *PrivateKey, hash crypto.Hash, hashed []byte) ([]byte, error)"}, + {"SignPSS", Func, 2, "func(rand io.Reader, priv *PrivateKey, hash crypto.Hash, digest []byte, opts *PSSOptions) ([]byte, error)"}, + {"VerifyPKCS1v15", Func, 0, "func(pub *PublicKey, hash crypto.Hash, hashed []byte, sig []byte) error"}, + {"VerifyPSS", Func, 2, "func(pub *PublicKey, hash crypto.Hash, digest []byte, sig []byte, opts *PSSOptions) error"}, }, "crypto/sha1": { - {"BlockSize", Const, 0}, - {"New", Func, 0}, - {"Size", Const, 0}, - {"Sum", Func, 2}, + {"BlockSize", Const, 0, ""}, + {"New", Func, 0, "func() hash.Hash"}, + {"Size", Const, 0, ""}, + {"Sum", Func, 2, "func(data []byte) [20]byte"}, }, "crypto/sha256": { - {"BlockSize", Const, 0}, - {"New", Func, 0}, - {"New224", Func, 0}, - {"Size", Const, 0}, - {"Size224", Const, 0}, - {"Sum224", Func, 2}, - {"Sum256", Func, 2}, + {"BlockSize", Const, 0, ""}, + {"New", Func, 0, "func() hash.Hash"}, + {"New224", Func, 0, "func() hash.Hash"}, + {"Size", Const, 0, ""}, + {"Size224", Const, 0, ""}, + {"Sum224", Func, 2, "func(data []byte) [28]byte"}, + {"Sum256", Func, 2, "func(data []byte) [32]byte"}, }, "crypto/sha3": { - {"(*SHA3).AppendBinary", Method, 24}, - {"(*SHA3).BlockSize", Method, 24}, - {"(*SHA3).MarshalBinary", Method, 24}, - {"(*SHA3).Reset", Method, 24}, - {"(*SHA3).Size", Method, 24}, - {"(*SHA3).Sum", Method, 24}, - {"(*SHA3).UnmarshalBinary", Method, 24}, - {"(*SHA3).Write", Method, 24}, - {"(*SHAKE).AppendBinary", Method, 24}, - {"(*SHAKE).BlockSize", Method, 24}, - {"(*SHAKE).MarshalBinary", Method, 24}, - {"(*SHAKE).Read", Method, 24}, - {"(*SHAKE).Reset", Method, 24}, - {"(*SHAKE).UnmarshalBinary", Method, 24}, - {"(*SHAKE).Write", Method, 24}, - {"New224", Func, 24}, - {"New256", Func, 24}, - {"New384", Func, 24}, - {"New512", Func, 24}, - {"NewCSHAKE128", Func, 24}, - {"NewCSHAKE256", Func, 24}, - {"NewSHAKE128", Func, 24}, - {"NewSHAKE256", Func, 24}, - {"SHA3", Type, 24}, - {"SHAKE", Type, 24}, - {"Sum224", Func, 24}, - {"Sum256", Func, 24}, - {"Sum384", Func, 24}, - {"Sum512", Func, 24}, - {"SumSHAKE128", Func, 24}, - {"SumSHAKE256", Func, 24}, + {"(*SHA3).AppendBinary", Method, 24, ""}, + {"(*SHA3).BlockSize", Method, 24, ""}, + {"(*SHA3).MarshalBinary", Method, 24, ""}, + {"(*SHA3).Reset", Method, 24, ""}, + {"(*SHA3).Size", Method, 24, ""}, + {"(*SHA3).Sum", Method, 24, ""}, + {"(*SHA3).UnmarshalBinary", Method, 24, ""}, + {"(*SHA3).Write", Method, 24, ""}, + {"(*SHAKE).AppendBinary", Method, 24, ""}, + {"(*SHAKE).BlockSize", Method, 24, ""}, + {"(*SHAKE).MarshalBinary", Method, 24, ""}, + {"(*SHAKE).Read", Method, 24, ""}, + {"(*SHAKE).Reset", Method, 24, ""}, + {"(*SHAKE).UnmarshalBinary", Method, 24, ""}, + {"(*SHAKE).Write", Method, 24, ""}, + {"New224", Func, 24, "func() *SHA3"}, + {"New256", Func, 24, "func() *SHA3"}, + {"New384", Func, 24, "func() *SHA3"}, + {"New512", Func, 24, "func() *SHA3"}, + {"NewCSHAKE128", Func, 24, "func(N []byte, S []byte) *SHAKE"}, + {"NewCSHAKE256", Func, 24, "func(N []byte, S []byte) *SHAKE"}, + {"NewSHAKE128", Func, 24, "func() *SHAKE"}, + {"NewSHAKE256", Func, 24, "func() *SHAKE"}, + {"SHA3", Type, 24, ""}, + {"SHAKE", Type, 24, ""}, + {"Sum224", Func, 24, "func(data []byte) [28]byte"}, + {"Sum256", Func, 24, "func(data []byte) [32]byte"}, + {"Sum384", Func, 24, "func(data []byte) [48]byte"}, + {"Sum512", Func, 24, "func(data []byte) [64]byte"}, + {"SumSHAKE128", Func, 24, "func(data []byte, length int) []byte"}, + {"SumSHAKE256", Func, 24, "func(data []byte, length int) []byte"}, }, "crypto/sha512": { - {"BlockSize", Const, 0}, - {"New", Func, 0}, - {"New384", Func, 0}, - {"New512_224", Func, 5}, - {"New512_256", Func, 5}, - {"Size", Const, 0}, - {"Size224", Const, 5}, - {"Size256", Const, 5}, - {"Size384", Const, 0}, - {"Sum384", Func, 2}, - {"Sum512", Func, 2}, - {"Sum512_224", Func, 5}, - {"Sum512_256", Func, 5}, + {"BlockSize", Const, 0, ""}, + {"New", Func, 0, "func() hash.Hash"}, + {"New384", Func, 0, "func() hash.Hash"}, + {"New512_224", Func, 5, "func() hash.Hash"}, + {"New512_256", Func, 5, "func() hash.Hash"}, + {"Size", Const, 0, ""}, + {"Size224", Const, 5, ""}, + {"Size256", Const, 5, ""}, + {"Size384", Const, 0, ""}, + {"Sum384", Func, 2, "func(data []byte) [48]byte"}, + {"Sum512", Func, 2, "func(data []byte) [64]byte"}, + {"Sum512_224", Func, 5, "func(data []byte) [28]byte"}, + {"Sum512_256", Func, 5, "func(data []byte) [32]byte"}, }, "crypto/subtle": { - {"ConstantTimeByteEq", Func, 0}, - {"ConstantTimeCompare", Func, 0}, - {"ConstantTimeCopy", Func, 0}, - {"ConstantTimeEq", Func, 0}, - {"ConstantTimeLessOrEq", Func, 2}, - {"ConstantTimeSelect", Func, 0}, - {"WithDataIndependentTiming", Func, 24}, - {"XORBytes", Func, 20}, + {"ConstantTimeByteEq", Func, 0, "func(x uint8, y uint8) int"}, + {"ConstantTimeCompare", Func, 0, "func(x []byte, y []byte) int"}, + {"ConstantTimeCopy", Func, 0, "func(v int, x []byte, y []byte)"}, + {"ConstantTimeEq", Func, 0, "func(x int32, y int32) int"}, + {"ConstantTimeLessOrEq", Func, 2, "func(x int, y int) int"}, + {"ConstantTimeSelect", Func, 0, "func(v int, x int, y int) int"}, + {"WithDataIndependentTiming", Func, 24, "func(f func())"}, + {"XORBytes", Func, 20, "func(dst []byte, x []byte, y []byte) int"}, }, "crypto/tls": { - {"(*CertificateRequestInfo).Context", Method, 17}, - {"(*CertificateRequestInfo).SupportsCertificate", Method, 14}, - {"(*CertificateVerificationError).Error", Method, 20}, - {"(*CertificateVerificationError).Unwrap", Method, 20}, - {"(*ClientHelloInfo).Context", Method, 17}, - {"(*ClientHelloInfo).SupportsCertificate", Method, 14}, - {"(*ClientSessionState).ResumptionState", Method, 21}, - {"(*Config).BuildNameToCertificate", Method, 0}, - {"(*Config).Clone", Method, 8}, - {"(*Config).DecryptTicket", Method, 21}, - {"(*Config).EncryptTicket", Method, 21}, - {"(*Config).SetSessionTicketKeys", Method, 5}, - {"(*Conn).Close", Method, 0}, - {"(*Conn).CloseWrite", Method, 8}, - {"(*Conn).ConnectionState", Method, 0}, - {"(*Conn).Handshake", Method, 0}, - {"(*Conn).HandshakeContext", Method, 17}, - {"(*Conn).LocalAddr", Method, 0}, - {"(*Conn).NetConn", Method, 18}, - {"(*Conn).OCSPResponse", Method, 0}, - {"(*Conn).Read", Method, 0}, - {"(*Conn).RemoteAddr", Method, 0}, - {"(*Conn).SetDeadline", Method, 0}, - {"(*Conn).SetReadDeadline", Method, 0}, - {"(*Conn).SetWriteDeadline", Method, 0}, - {"(*Conn).VerifyHostname", Method, 0}, - {"(*Conn).Write", Method, 0}, - {"(*ConnectionState).ExportKeyingMaterial", Method, 11}, - {"(*Dialer).Dial", Method, 15}, - {"(*Dialer).DialContext", Method, 15}, - {"(*ECHRejectionError).Error", Method, 23}, - {"(*QUICConn).Close", Method, 21}, - {"(*QUICConn).ConnectionState", Method, 21}, - {"(*QUICConn).HandleData", Method, 21}, - {"(*QUICConn).NextEvent", Method, 21}, - {"(*QUICConn).SendSessionTicket", Method, 21}, - {"(*QUICConn).SetTransportParameters", Method, 21}, - {"(*QUICConn).Start", Method, 21}, - {"(*QUICConn).StoreSession", Method, 23}, - {"(*SessionState).Bytes", Method, 21}, - {"(AlertError).Error", Method, 21}, - {"(ClientAuthType).String", Method, 15}, - {"(CurveID).String", Method, 15}, - {"(QUICEncryptionLevel).String", Method, 21}, - {"(RecordHeaderError).Error", Method, 6}, - {"(SignatureScheme).String", Method, 15}, - {"AlertError", Type, 21}, - {"Certificate", Type, 0}, - {"Certificate.Certificate", Field, 0}, - {"Certificate.Leaf", Field, 0}, - {"Certificate.OCSPStaple", Field, 0}, - {"Certificate.PrivateKey", Field, 0}, - {"Certificate.SignedCertificateTimestamps", Field, 5}, - {"Certificate.SupportedSignatureAlgorithms", Field, 14}, - {"CertificateRequestInfo", Type, 8}, - {"CertificateRequestInfo.AcceptableCAs", Field, 8}, - {"CertificateRequestInfo.SignatureSchemes", Field, 8}, - {"CertificateRequestInfo.Version", Field, 14}, - {"CertificateVerificationError", Type, 20}, - {"CertificateVerificationError.Err", Field, 20}, - {"CertificateVerificationError.UnverifiedCertificates", Field, 20}, - {"CipherSuite", Type, 14}, - {"CipherSuite.ID", Field, 14}, - {"CipherSuite.Insecure", Field, 14}, - {"CipherSuite.Name", Field, 14}, - {"CipherSuite.SupportedVersions", Field, 14}, - {"CipherSuiteName", Func, 14}, - {"CipherSuites", Func, 14}, - {"Client", Func, 0}, - {"ClientAuthType", Type, 0}, - {"ClientHelloInfo", Type, 4}, - {"ClientHelloInfo.CipherSuites", Field, 4}, - {"ClientHelloInfo.Conn", Field, 8}, - {"ClientHelloInfo.Extensions", Field, 24}, - {"ClientHelloInfo.ServerName", Field, 4}, - {"ClientHelloInfo.SignatureSchemes", Field, 8}, - {"ClientHelloInfo.SupportedCurves", Field, 4}, - {"ClientHelloInfo.SupportedPoints", Field, 4}, - {"ClientHelloInfo.SupportedProtos", Field, 8}, - {"ClientHelloInfo.SupportedVersions", Field, 8}, - {"ClientSessionCache", Type, 3}, - {"ClientSessionState", Type, 3}, - {"Config", Type, 0}, - {"Config.Certificates", Field, 0}, - {"Config.CipherSuites", Field, 0}, - {"Config.ClientAuth", Field, 0}, - {"Config.ClientCAs", Field, 0}, - {"Config.ClientSessionCache", Field, 3}, - {"Config.CurvePreferences", Field, 3}, - {"Config.DynamicRecordSizingDisabled", Field, 7}, - {"Config.EncryptedClientHelloConfigList", Field, 23}, - {"Config.EncryptedClientHelloKeys", Field, 24}, - {"Config.EncryptedClientHelloRejectionVerify", Field, 23}, - {"Config.GetCertificate", Field, 4}, - {"Config.GetClientCertificate", Field, 8}, - {"Config.GetConfigForClient", Field, 8}, - {"Config.InsecureSkipVerify", Field, 0}, - {"Config.KeyLogWriter", Field, 8}, - {"Config.MaxVersion", Field, 2}, - {"Config.MinVersion", Field, 2}, - {"Config.NameToCertificate", Field, 0}, - {"Config.NextProtos", Field, 0}, - {"Config.PreferServerCipherSuites", Field, 1}, - {"Config.Rand", Field, 0}, - {"Config.Renegotiation", Field, 7}, - {"Config.RootCAs", Field, 0}, - {"Config.ServerName", Field, 0}, - {"Config.SessionTicketKey", Field, 1}, - {"Config.SessionTicketsDisabled", Field, 1}, - {"Config.Time", Field, 0}, - {"Config.UnwrapSession", Field, 21}, - {"Config.VerifyConnection", Field, 15}, - {"Config.VerifyPeerCertificate", Field, 8}, - {"Config.WrapSession", Field, 21}, - {"Conn", Type, 0}, - {"ConnectionState", Type, 0}, - {"ConnectionState.CipherSuite", Field, 0}, - {"ConnectionState.CurveID", Field, 25}, - {"ConnectionState.DidResume", Field, 1}, - {"ConnectionState.ECHAccepted", Field, 23}, - {"ConnectionState.HandshakeComplete", Field, 0}, - {"ConnectionState.NegotiatedProtocol", Field, 0}, - {"ConnectionState.NegotiatedProtocolIsMutual", Field, 0}, - {"ConnectionState.OCSPResponse", Field, 5}, - {"ConnectionState.PeerCertificates", Field, 0}, - {"ConnectionState.ServerName", Field, 0}, - {"ConnectionState.SignedCertificateTimestamps", Field, 5}, - {"ConnectionState.TLSUnique", Field, 4}, - {"ConnectionState.VerifiedChains", Field, 0}, - {"ConnectionState.Version", Field, 3}, - {"CurveID", Type, 3}, - {"CurveP256", Const, 3}, - {"CurveP384", Const, 3}, - {"CurveP521", Const, 3}, - {"Dial", Func, 0}, - {"DialWithDialer", Func, 3}, - {"Dialer", Type, 15}, - {"Dialer.Config", Field, 15}, - {"Dialer.NetDialer", Field, 15}, - {"ECDSAWithP256AndSHA256", Const, 8}, - {"ECDSAWithP384AndSHA384", Const, 8}, - {"ECDSAWithP521AndSHA512", Const, 8}, - {"ECDSAWithSHA1", Const, 10}, - {"ECHRejectionError", Type, 23}, - {"ECHRejectionError.RetryConfigList", Field, 23}, - {"Ed25519", Const, 13}, - {"EncryptedClientHelloKey", Type, 24}, - {"EncryptedClientHelloKey.Config", Field, 24}, - {"EncryptedClientHelloKey.PrivateKey", Field, 24}, - {"EncryptedClientHelloKey.SendAsRetry", Field, 24}, - {"InsecureCipherSuites", Func, 14}, - {"Listen", Func, 0}, - {"LoadX509KeyPair", Func, 0}, - {"NewLRUClientSessionCache", Func, 3}, - {"NewListener", Func, 0}, - {"NewResumptionState", Func, 21}, - {"NoClientCert", Const, 0}, - {"PKCS1WithSHA1", Const, 8}, - {"PKCS1WithSHA256", Const, 8}, - {"PKCS1WithSHA384", Const, 8}, - {"PKCS1WithSHA512", Const, 8}, - {"PSSWithSHA256", Const, 8}, - {"PSSWithSHA384", Const, 8}, - {"PSSWithSHA512", Const, 8}, - {"ParseSessionState", Func, 21}, - {"QUICClient", Func, 21}, - {"QUICConfig", Type, 21}, - {"QUICConfig.EnableSessionEvents", Field, 23}, - {"QUICConfig.TLSConfig", Field, 21}, - {"QUICConn", Type, 21}, - {"QUICEncryptionLevel", Type, 21}, - {"QUICEncryptionLevelApplication", Const, 21}, - {"QUICEncryptionLevelEarly", Const, 21}, - {"QUICEncryptionLevelHandshake", Const, 21}, - {"QUICEncryptionLevelInitial", Const, 21}, - {"QUICEvent", Type, 21}, - {"QUICEvent.Data", Field, 21}, - {"QUICEvent.Kind", Field, 21}, - {"QUICEvent.Level", Field, 21}, - {"QUICEvent.SessionState", Field, 23}, - {"QUICEvent.Suite", Field, 21}, - {"QUICEventKind", Type, 21}, - {"QUICHandshakeDone", Const, 21}, - {"QUICNoEvent", Const, 21}, - {"QUICRejectedEarlyData", Const, 21}, - {"QUICResumeSession", Const, 23}, - {"QUICServer", Func, 21}, - {"QUICSessionTicketOptions", Type, 21}, - {"QUICSessionTicketOptions.EarlyData", Field, 21}, - {"QUICSessionTicketOptions.Extra", Field, 23}, - {"QUICSetReadSecret", Const, 21}, - {"QUICSetWriteSecret", Const, 21}, - {"QUICStoreSession", Const, 23}, - {"QUICTransportParameters", Const, 21}, - {"QUICTransportParametersRequired", Const, 21}, - {"QUICWriteData", Const, 21}, - {"RecordHeaderError", Type, 6}, - {"RecordHeaderError.Conn", Field, 12}, - {"RecordHeaderError.Msg", Field, 6}, - {"RecordHeaderError.RecordHeader", Field, 6}, - {"RenegotiateFreelyAsClient", Const, 7}, - {"RenegotiateNever", Const, 7}, - {"RenegotiateOnceAsClient", Const, 7}, - {"RenegotiationSupport", Type, 7}, - {"RequestClientCert", Const, 0}, - {"RequireAndVerifyClientCert", Const, 0}, - {"RequireAnyClientCert", Const, 0}, - {"Server", Func, 0}, - {"SessionState", Type, 21}, - {"SessionState.EarlyData", Field, 21}, - {"SessionState.Extra", Field, 21}, - {"SignatureScheme", Type, 8}, - {"TLS_AES_128_GCM_SHA256", Const, 12}, - {"TLS_AES_256_GCM_SHA384", Const, 12}, - {"TLS_CHACHA20_POLY1305_SHA256", Const, 12}, - {"TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA", Const, 2}, - {"TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256", Const, 8}, - {"TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256", Const, 2}, - {"TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA", Const, 2}, - {"TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384", Const, 5}, - {"TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305", Const, 8}, - {"TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256", Const, 14}, - {"TLS_ECDHE_ECDSA_WITH_RC4_128_SHA", Const, 2}, - {"TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA", Const, 0}, - {"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA", Const, 0}, - {"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256", Const, 8}, - {"TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256", Const, 2}, - {"TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA", Const, 1}, - {"TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384", Const, 5}, - {"TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305", Const, 8}, - {"TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256", Const, 14}, - {"TLS_ECDHE_RSA_WITH_RC4_128_SHA", Const, 0}, - {"TLS_FALLBACK_SCSV", Const, 4}, - {"TLS_RSA_WITH_3DES_EDE_CBC_SHA", Const, 0}, - {"TLS_RSA_WITH_AES_128_CBC_SHA", Const, 0}, - {"TLS_RSA_WITH_AES_128_CBC_SHA256", Const, 8}, - {"TLS_RSA_WITH_AES_128_GCM_SHA256", Const, 6}, - {"TLS_RSA_WITH_AES_256_CBC_SHA", Const, 1}, - {"TLS_RSA_WITH_AES_256_GCM_SHA384", Const, 6}, - {"TLS_RSA_WITH_RC4_128_SHA", Const, 0}, - {"VerifyClientCertIfGiven", Const, 0}, - {"VersionName", Func, 21}, - {"VersionSSL30", Const, 2}, - {"VersionTLS10", Const, 2}, - {"VersionTLS11", Const, 2}, - {"VersionTLS12", Const, 2}, - {"VersionTLS13", Const, 12}, - {"X25519", Const, 8}, - {"X25519MLKEM768", Const, 24}, - {"X509KeyPair", Func, 0}, + {"(*CertificateRequestInfo).Context", Method, 17, ""}, + {"(*CertificateRequestInfo).SupportsCertificate", Method, 14, ""}, + {"(*CertificateVerificationError).Error", Method, 20, ""}, + {"(*CertificateVerificationError).Unwrap", Method, 20, ""}, + {"(*ClientHelloInfo).Context", Method, 17, ""}, + {"(*ClientHelloInfo).SupportsCertificate", Method, 14, ""}, + {"(*ClientSessionState).ResumptionState", Method, 21, ""}, + {"(*Config).BuildNameToCertificate", Method, 0, ""}, + {"(*Config).Clone", Method, 8, ""}, + {"(*Config).DecryptTicket", Method, 21, ""}, + {"(*Config).EncryptTicket", Method, 21, ""}, + {"(*Config).SetSessionTicketKeys", Method, 5, ""}, + {"(*Conn).Close", Method, 0, ""}, + {"(*Conn).CloseWrite", Method, 8, ""}, + {"(*Conn).ConnectionState", Method, 0, ""}, + {"(*Conn).Handshake", Method, 0, ""}, + {"(*Conn).HandshakeContext", Method, 17, ""}, + {"(*Conn).LocalAddr", Method, 0, ""}, + {"(*Conn).NetConn", Method, 18, ""}, + {"(*Conn).OCSPResponse", Method, 0, ""}, + {"(*Conn).Read", Method, 0, ""}, + {"(*Conn).RemoteAddr", Method, 0, ""}, + {"(*Conn).SetDeadline", Method, 0, ""}, + {"(*Conn).SetReadDeadline", Method, 0, ""}, + {"(*Conn).SetWriteDeadline", Method, 0, ""}, + {"(*Conn).VerifyHostname", Method, 0, ""}, + {"(*Conn).Write", Method, 0, ""}, + {"(*ConnectionState).ExportKeyingMaterial", Method, 11, ""}, + {"(*Dialer).Dial", Method, 15, ""}, + {"(*Dialer).DialContext", Method, 15, ""}, + {"(*ECHRejectionError).Error", Method, 23, ""}, + {"(*QUICConn).Close", Method, 21, ""}, + {"(*QUICConn).ConnectionState", Method, 21, ""}, + {"(*QUICConn).HandleData", Method, 21, ""}, + {"(*QUICConn).NextEvent", Method, 21, ""}, + {"(*QUICConn).SendSessionTicket", Method, 21, ""}, + {"(*QUICConn).SetTransportParameters", Method, 21, ""}, + {"(*QUICConn).Start", Method, 21, ""}, + {"(*QUICConn).StoreSession", Method, 23, ""}, + {"(*SessionState).Bytes", Method, 21, ""}, + {"(AlertError).Error", Method, 21, ""}, + {"(ClientAuthType).String", Method, 15, ""}, + {"(CurveID).String", Method, 15, ""}, + {"(QUICEncryptionLevel).String", Method, 21, ""}, + {"(RecordHeaderError).Error", Method, 6, ""}, + {"(SignatureScheme).String", Method, 15, ""}, + {"AlertError", Type, 21, ""}, + {"Certificate", Type, 0, ""}, + {"Certificate.Certificate", Field, 0, ""}, + {"Certificate.Leaf", Field, 0, ""}, + {"Certificate.OCSPStaple", Field, 0, ""}, + {"Certificate.PrivateKey", Field, 0, ""}, + {"Certificate.SignedCertificateTimestamps", Field, 5, ""}, + {"Certificate.SupportedSignatureAlgorithms", Field, 14, ""}, + {"CertificateRequestInfo", Type, 8, ""}, + {"CertificateRequestInfo.AcceptableCAs", Field, 8, ""}, + {"CertificateRequestInfo.SignatureSchemes", Field, 8, ""}, + {"CertificateRequestInfo.Version", Field, 14, ""}, + {"CertificateVerificationError", Type, 20, ""}, + {"CertificateVerificationError.Err", Field, 20, ""}, + {"CertificateVerificationError.UnverifiedCertificates", Field, 20, ""}, + {"CipherSuite", Type, 14, ""}, + {"CipherSuite.ID", Field, 14, ""}, + {"CipherSuite.Insecure", Field, 14, ""}, + {"CipherSuite.Name", Field, 14, ""}, + {"CipherSuite.SupportedVersions", Field, 14, ""}, + {"CipherSuiteName", Func, 14, "func(id uint16) string"}, + {"CipherSuites", Func, 14, "func() []*CipherSuite"}, + {"Client", Func, 0, "func(conn net.Conn, config *Config) *Conn"}, + {"ClientAuthType", Type, 0, ""}, + {"ClientHelloInfo", Type, 4, ""}, + {"ClientHelloInfo.CipherSuites", Field, 4, ""}, + {"ClientHelloInfo.Conn", Field, 8, ""}, + {"ClientHelloInfo.Extensions", Field, 24, ""}, + {"ClientHelloInfo.ServerName", Field, 4, ""}, + {"ClientHelloInfo.SignatureSchemes", Field, 8, ""}, + {"ClientHelloInfo.SupportedCurves", Field, 4, ""}, + {"ClientHelloInfo.SupportedPoints", Field, 4, ""}, + {"ClientHelloInfo.SupportedProtos", Field, 8, ""}, + {"ClientHelloInfo.SupportedVersions", Field, 8, ""}, + {"ClientSessionCache", Type, 3, ""}, + {"ClientSessionState", Type, 3, ""}, + {"Config", Type, 0, ""}, + {"Config.Certificates", Field, 0, ""}, + {"Config.CipherSuites", Field, 0, ""}, + {"Config.ClientAuth", Field, 0, ""}, + {"Config.ClientCAs", Field, 0, ""}, + {"Config.ClientSessionCache", Field, 3, ""}, + {"Config.CurvePreferences", Field, 3, ""}, + {"Config.DynamicRecordSizingDisabled", Field, 7, ""}, + {"Config.EncryptedClientHelloConfigList", Field, 23, ""}, + {"Config.EncryptedClientHelloKeys", Field, 24, ""}, + {"Config.EncryptedClientHelloRejectionVerify", Field, 23, ""}, + {"Config.GetCertificate", Field, 4, ""}, + {"Config.GetClientCertificate", Field, 8, ""}, + {"Config.GetConfigForClient", Field, 8, ""}, + {"Config.InsecureSkipVerify", Field, 0, ""}, + {"Config.KeyLogWriter", Field, 8, ""}, + {"Config.MaxVersion", Field, 2, ""}, + {"Config.MinVersion", Field, 2, ""}, + {"Config.NameToCertificate", Field, 0, ""}, + {"Config.NextProtos", Field, 0, ""}, + {"Config.PreferServerCipherSuites", Field, 1, ""}, + {"Config.Rand", Field, 0, ""}, + {"Config.Renegotiation", Field, 7, ""}, + {"Config.RootCAs", Field, 0, ""}, + {"Config.ServerName", Field, 0, ""}, + {"Config.SessionTicketKey", Field, 1, ""}, + {"Config.SessionTicketsDisabled", Field, 1, ""}, + {"Config.Time", Field, 0, ""}, + {"Config.UnwrapSession", Field, 21, ""}, + {"Config.VerifyConnection", Field, 15, ""}, + {"Config.VerifyPeerCertificate", Field, 8, ""}, + {"Config.WrapSession", Field, 21, ""}, + {"Conn", Type, 0, ""}, + {"ConnectionState", Type, 0, ""}, + {"ConnectionState.CipherSuite", Field, 0, ""}, + {"ConnectionState.CurveID", Field, 25, ""}, + {"ConnectionState.DidResume", Field, 1, ""}, + {"ConnectionState.ECHAccepted", Field, 23, ""}, + {"ConnectionState.HandshakeComplete", Field, 0, ""}, + {"ConnectionState.NegotiatedProtocol", Field, 0, ""}, + {"ConnectionState.NegotiatedProtocolIsMutual", Field, 0, ""}, + {"ConnectionState.OCSPResponse", Field, 5, ""}, + {"ConnectionState.PeerCertificates", Field, 0, ""}, + {"ConnectionState.ServerName", Field, 0, ""}, + {"ConnectionState.SignedCertificateTimestamps", Field, 5, ""}, + {"ConnectionState.TLSUnique", Field, 4, ""}, + {"ConnectionState.VerifiedChains", Field, 0, ""}, + {"ConnectionState.Version", Field, 3, ""}, + {"CurveID", Type, 3, ""}, + {"CurveP256", Const, 3, ""}, + {"CurveP384", Const, 3, ""}, + {"CurveP521", Const, 3, ""}, + {"Dial", Func, 0, "func(network string, addr string, config *Config) (*Conn, error)"}, + {"DialWithDialer", Func, 3, "func(dialer *net.Dialer, network string, addr string, config *Config) (*Conn, error)"}, + {"Dialer", Type, 15, ""}, + {"Dialer.Config", Field, 15, ""}, + {"Dialer.NetDialer", Field, 15, ""}, + {"ECDSAWithP256AndSHA256", Const, 8, ""}, + {"ECDSAWithP384AndSHA384", Const, 8, ""}, + {"ECDSAWithP521AndSHA512", Const, 8, ""}, + {"ECDSAWithSHA1", Const, 10, ""}, + {"ECHRejectionError", Type, 23, ""}, + {"ECHRejectionError.RetryConfigList", Field, 23, ""}, + {"Ed25519", Const, 13, ""}, + {"EncryptedClientHelloKey", Type, 24, ""}, + {"EncryptedClientHelloKey.Config", Field, 24, ""}, + {"EncryptedClientHelloKey.PrivateKey", Field, 24, ""}, + {"EncryptedClientHelloKey.SendAsRetry", Field, 24, ""}, + {"InsecureCipherSuites", Func, 14, "func() []*CipherSuite"}, + {"Listen", Func, 0, "func(network string, laddr string, config *Config) (net.Listener, error)"}, + {"LoadX509KeyPair", Func, 0, "func(certFile string, keyFile string) (Certificate, error)"}, + {"NewLRUClientSessionCache", Func, 3, "func(capacity int) ClientSessionCache"}, + {"NewListener", Func, 0, "func(inner net.Listener, config *Config) net.Listener"}, + {"NewResumptionState", Func, 21, "func(ticket []byte, state *SessionState) (*ClientSessionState, error)"}, + {"NoClientCert", Const, 0, ""}, + {"PKCS1WithSHA1", Const, 8, ""}, + {"PKCS1WithSHA256", Const, 8, ""}, + {"PKCS1WithSHA384", Const, 8, ""}, + {"PKCS1WithSHA512", Const, 8, ""}, + {"PSSWithSHA256", Const, 8, ""}, + {"PSSWithSHA384", Const, 8, ""}, + {"PSSWithSHA512", Const, 8, ""}, + {"ParseSessionState", Func, 21, "func(data []byte) (*SessionState, error)"}, + {"QUICClient", Func, 21, "func(config *QUICConfig) *QUICConn"}, + {"QUICConfig", Type, 21, ""}, + {"QUICConfig.EnableSessionEvents", Field, 23, ""}, + {"QUICConfig.TLSConfig", Field, 21, ""}, + {"QUICConn", Type, 21, ""}, + {"QUICEncryptionLevel", Type, 21, ""}, + {"QUICEncryptionLevelApplication", Const, 21, ""}, + {"QUICEncryptionLevelEarly", Const, 21, ""}, + {"QUICEncryptionLevelHandshake", Const, 21, ""}, + {"QUICEncryptionLevelInitial", Const, 21, ""}, + {"QUICEvent", Type, 21, ""}, + {"QUICEvent.Data", Field, 21, ""}, + {"QUICEvent.Kind", Field, 21, ""}, + {"QUICEvent.Level", Field, 21, ""}, + {"QUICEvent.SessionState", Field, 23, ""}, + {"QUICEvent.Suite", Field, 21, ""}, + {"QUICEventKind", Type, 21, ""}, + {"QUICHandshakeDone", Const, 21, ""}, + {"QUICNoEvent", Const, 21, ""}, + {"QUICRejectedEarlyData", Const, 21, ""}, + {"QUICResumeSession", Const, 23, ""}, + {"QUICServer", Func, 21, "func(config *QUICConfig) *QUICConn"}, + {"QUICSessionTicketOptions", Type, 21, ""}, + {"QUICSessionTicketOptions.EarlyData", Field, 21, ""}, + {"QUICSessionTicketOptions.Extra", Field, 23, ""}, + {"QUICSetReadSecret", Const, 21, ""}, + {"QUICSetWriteSecret", Const, 21, ""}, + {"QUICStoreSession", Const, 23, ""}, + {"QUICTransportParameters", Const, 21, ""}, + {"QUICTransportParametersRequired", Const, 21, ""}, + {"QUICWriteData", Const, 21, ""}, + {"RecordHeaderError", Type, 6, ""}, + {"RecordHeaderError.Conn", Field, 12, ""}, + {"RecordHeaderError.Msg", Field, 6, ""}, + {"RecordHeaderError.RecordHeader", Field, 6, ""}, + {"RenegotiateFreelyAsClient", Const, 7, ""}, + {"RenegotiateNever", Const, 7, ""}, + {"RenegotiateOnceAsClient", Const, 7, ""}, + {"RenegotiationSupport", Type, 7, ""}, + {"RequestClientCert", Const, 0, ""}, + {"RequireAndVerifyClientCert", Const, 0, ""}, + {"RequireAnyClientCert", Const, 0, ""}, + {"Server", Func, 0, "func(conn net.Conn, config *Config) *Conn"}, + {"SessionState", Type, 21, ""}, + {"SessionState.EarlyData", Field, 21, ""}, + {"SessionState.Extra", Field, 21, ""}, + {"SignatureScheme", Type, 8, ""}, + {"TLS_AES_128_GCM_SHA256", Const, 12, ""}, + {"TLS_AES_256_GCM_SHA384", Const, 12, ""}, + {"TLS_CHACHA20_POLY1305_SHA256", Const, 12, ""}, + {"TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA", Const, 2, ""}, + {"TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256", Const, 8, ""}, + {"TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256", Const, 2, ""}, + {"TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA", Const, 2, ""}, + {"TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384", Const, 5, ""}, + {"TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305", Const, 8, ""}, + {"TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256", Const, 14, ""}, + {"TLS_ECDHE_ECDSA_WITH_RC4_128_SHA", Const, 2, ""}, + {"TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA", Const, 0, ""}, + {"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA", Const, 0, ""}, + {"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256", Const, 8, ""}, + {"TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256", Const, 2, ""}, + {"TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA", Const, 1, ""}, + {"TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384", Const, 5, ""}, + {"TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305", Const, 8, ""}, + {"TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256", Const, 14, ""}, + {"TLS_ECDHE_RSA_WITH_RC4_128_SHA", Const, 0, ""}, + {"TLS_FALLBACK_SCSV", Const, 4, ""}, + {"TLS_RSA_WITH_3DES_EDE_CBC_SHA", Const, 0, ""}, + {"TLS_RSA_WITH_AES_128_CBC_SHA", Const, 0, ""}, + {"TLS_RSA_WITH_AES_128_CBC_SHA256", Const, 8, ""}, + {"TLS_RSA_WITH_AES_128_GCM_SHA256", Const, 6, ""}, + {"TLS_RSA_WITH_AES_256_CBC_SHA", Const, 1, ""}, + {"TLS_RSA_WITH_AES_256_GCM_SHA384", Const, 6, ""}, + {"TLS_RSA_WITH_RC4_128_SHA", Const, 0, ""}, + {"VerifyClientCertIfGiven", Const, 0, ""}, + {"VersionName", Func, 21, "func(version uint16) string"}, + {"VersionSSL30", Const, 2, ""}, + {"VersionTLS10", Const, 2, ""}, + {"VersionTLS11", Const, 2, ""}, + {"VersionTLS12", Const, 2, ""}, + {"VersionTLS13", Const, 12, ""}, + {"X25519", Const, 8, ""}, + {"X25519MLKEM768", Const, 24, ""}, + {"X509KeyPair", Func, 0, "func(certPEMBlock []byte, keyPEMBlock []byte) (Certificate, error)"}, }, "crypto/x509": { - {"(*CertPool).AddCert", Method, 0}, - {"(*CertPool).AddCertWithConstraint", Method, 22}, - {"(*CertPool).AppendCertsFromPEM", Method, 0}, - {"(*CertPool).Clone", Method, 19}, - {"(*CertPool).Equal", Method, 19}, - {"(*CertPool).Subjects", Method, 0}, - {"(*Certificate).CheckCRLSignature", Method, 0}, - {"(*Certificate).CheckSignature", Method, 0}, - {"(*Certificate).CheckSignatureFrom", Method, 0}, - {"(*Certificate).CreateCRL", Method, 0}, - {"(*Certificate).Equal", Method, 0}, - {"(*Certificate).Verify", Method, 0}, - {"(*Certificate).VerifyHostname", Method, 0}, - {"(*CertificateRequest).CheckSignature", Method, 5}, - {"(*OID).UnmarshalBinary", Method, 23}, - {"(*OID).UnmarshalText", Method, 23}, - {"(*RevocationList).CheckSignatureFrom", Method, 19}, - {"(CertificateInvalidError).Error", Method, 0}, - {"(ConstraintViolationError).Error", Method, 0}, - {"(HostnameError).Error", Method, 0}, - {"(InsecureAlgorithmError).Error", Method, 6}, - {"(OID).AppendBinary", Method, 24}, - {"(OID).AppendText", Method, 24}, - {"(OID).Equal", Method, 22}, - {"(OID).EqualASN1OID", Method, 22}, - {"(OID).MarshalBinary", Method, 23}, - {"(OID).MarshalText", Method, 23}, - {"(OID).String", Method, 22}, - {"(PublicKeyAlgorithm).String", Method, 10}, - {"(SignatureAlgorithm).String", Method, 6}, - {"(SystemRootsError).Error", Method, 1}, - {"(SystemRootsError).Unwrap", Method, 16}, - {"(UnhandledCriticalExtension).Error", Method, 0}, - {"(UnknownAuthorityError).Error", Method, 0}, - {"CANotAuthorizedForExtKeyUsage", Const, 10}, - {"CANotAuthorizedForThisName", Const, 0}, - {"CertPool", Type, 0}, - {"Certificate", Type, 0}, - {"Certificate.AuthorityKeyId", Field, 0}, - {"Certificate.BasicConstraintsValid", Field, 0}, - {"Certificate.CRLDistributionPoints", Field, 2}, - {"Certificate.DNSNames", Field, 0}, - {"Certificate.EmailAddresses", Field, 0}, - {"Certificate.ExcludedDNSDomains", Field, 9}, - {"Certificate.ExcludedEmailAddresses", Field, 10}, - {"Certificate.ExcludedIPRanges", Field, 10}, - {"Certificate.ExcludedURIDomains", Field, 10}, - {"Certificate.ExtKeyUsage", Field, 0}, - {"Certificate.Extensions", Field, 2}, - {"Certificate.ExtraExtensions", Field, 2}, - {"Certificate.IPAddresses", Field, 1}, - {"Certificate.InhibitAnyPolicy", Field, 24}, - {"Certificate.InhibitAnyPolicyZero", Field, 24}, - {"Certificate.InhibitPolicyMapping", Field, 24}, - {"Certificate.InhibitPolicyMappingZero", Field, 24}, - {"Certificate.IsCA", Field, 0}, - {"Certificate.Issuer", Field, 0}, - {"Certificate.IssuingCertificateURL", Field, 2}, - {"Certificate.KeyUsage", Field, 0}, - {"Certificate.MaxPathLen", Field, 0}, - {"Certificate.MaxPathLenZero", Field, 4}, - {"Certificate.NotAfter", Field, 0}, - {"Certificate.NotBefore", Field, 0}, - {"Certificate.OCSPServer", Field, 2}, - {"Certificate.PermittedDNSDomains", Field, 0}, - {"Certificate.PermittedDNSDomainsCritical", Field, 0}, - {"Certificate.PermittedEmailAddresses", Field, 10}, - {"Certificate.PermittedIPRanges", Field, 10}, - {"Certificate.PermittedURIDomains", Field, 10}, - {"Certificate.Policies", Field, 22}, - {"Certificate.PolicyIdentifiers", Field, 0}, - {"Certificate.PolicyMappings", Field, 24}, - {"Certificate.PublicKey", Field, 0}, - {"Certificate.PublicKeyAlgorithm", Field, 0}, - {"Certificate.Raw", Field, 0}, - {"Certificate.RawIssuer", Field, 0}, - {"Certificate.RawSubject", Field, 0}, - {"Certificate.RawSubjectPublicKeyInfo", Field, 0}, - {"Certificate.RawTBSCertificate", Field, 0}, - {"Certificate.RequireExplicitPolicy", Field, 24}, - {"Certificate.RequireExplicitPolicyZero", Field, 24}, - {"Certificate.SerialNumber", Field, 0}, - {"Certificate.Signature", Field, 0}, - {"Certificate.SignatureAlgorithm", Field, 0}, - {"Certificate.Subject", Field, 0}, - {"Certificate.SubjectKeyId", Field, 0}, - {"Certificate.URIs", Field, 10}, - {"Certificate.UnhandledCriticalExtensions", Field, 5}, - {"Certificate.UnknownExtKeyUsage", Field, 0}, - {"Certificate.Version", Field, 0}, - {"CertificateInvalidError", Type, 0}, - {"CertificateInvalidError.Cert", Field, 0}, - {"CertificateInvalidError.Detail", Field, 10}, - {"CertificateInvalidError.Reason", Field, 0}, - {"CertificateRequest", Type, 3}, - {"CertificateRequest.Attributes", Field, 3}, - {"CertificateRequest.DNSNames", Field, 3}, - {"CertificateRequest.EmailAddresses", Field, 3}, - {"CertificateRequest.Extensions", Field, 3}, - {"CertificateRequest.ExtraExtensions", Field, 3}, - {"CertificateRequest.IPAddresses", Field, 3}, - {"CertificateRequest.PublicKey", Field, 3}, - {"CertificateRequest.PublicKeyAlgorithm", Field, 3}, - {"CertificateRequest.Raw", Field, 3}, - {"CertificateRequest.RawSubject", Field, 3}, - {"CertificateRequest.RawSubjectPublicKeyInfo", Field, 3}, - {"CertificateRequest.RawTBSCertificateRequest", Field, 3}, - {"CertificateRequest.Signature", Field, 3}, - {"CertificateRequest.SignatureAlgorithm", Field, 3}, - {"CertificateRequest.Subject", Field, 3}, - {"CertificateRequest.URIs", Field, 10}, - {"CertificateRequest.Version", Field, 3}, - {"ConstraintViolationError", Type, 0}, - {"CreateCertificate", Func, 0}, - {"CreateCertificateRequest", Func, 3}, - {"CreateRevocationList", Func, 15}, - {"DSA", Const, 0}, - {"DSAWithSHA1", Const, 0}, - {"DSAWithSHA256", Const, 0}, - {"DecryptPEMBlock", Func, 1}, - {"ECDSA", Const, 1}, - {"ECDSAWithSHA1", Const, 1}, - {"ECDSAWithSHA256", Const, 1}, - {"ECDSAWithSHA384", Const, 1}, - {"ECDSAWithSHA512", Const, 1}, - {"Ed25519", Const, 13}, - {"EncryptPEMBlock", Func, 1}, - {"ErrUnsupportedAlgorithm", Var, 0}, - {"Expired", Const, 0}, - {"ExtKeyUsage", Type, 0}, - {"ExtKeyUsageAny", Const, 0}, - {"ExtKeyUsageClientAuth", Const, 0}, - {"ExtKeyUsageCodeSigning", Const, 0}, - {"ExtKeyUsageEmailProtection", Const, 0}, - {"ExtKeyUsageIPSECEndSystem", Const, 1}, - {"ExtKeyUsageIPSECTunnel", Const, 1}, - {"ExtKeyUsageIPSECUser", Const, 1}, - {"ExtKeyUsageMicrosoftCommercialCodeSigning", Const, 10}, - {"ExtKeyUsageMicrosoftKernelCodeSigning", Const, 10}, - {"ExtKeyUsageMicrosoftServerGatedCrypto", Const, 1}, - {"ExtKeyUsageNetscapeServerGatedCrypto", Const, 1}, - {"ExtKeyUsageOCSPSigning", Const, 0}, - {"ExtKeyUsageServerAuth", Const, 0}, - {"ExtKeyUsageTimeStamping", Const, 0}, - {"HostnameError", Type, 0}, - {"HostnameError.Certificate", Field, 0}, - {"HostnameError.Host", Field, 0}, - {"IncompatibleUsage", Const, 1}, - {"IncorrectPasswordError", Var, 1}, - {"InsecureAlgorithmError", Type, 6}, - {"InvalidReason", Type, 0}, - {"IsEncryptedPEMBlock", Func, 1}, - {"KeyUsage", Type, 0}, - {"KeyUsageCRLSign", Const, 0}, - {"KeyUsageCertSign", Const, 0}, - {"KeyUsageContentCommitment", Const, 0}, - {"KeyUsageDataEncipherment", Const, 0}, - {"KeyUsageDecipherOnly", Const, 0}, - {"KeyUsageDigitalSignature", Const, 0}, - {"KeyUsageEncipherOnly", Const, 0}, - {"KeyUsageKeyAgreement", Const, 0}, - {"KeyUsageKeyEncipherment", Const, 0}, - {"MD2WithRSA", Const, 0}, - {"MD5WithRSA", Const, 0}, - {"MarshalECPrivateKey", Func, 2}, - {"MarshalPKCS1PrivateKey", Func, 0}, - {"MarshalPKCS1PublicKey", Func, 10}, - {"MarshalPKCS8PrivateKey", Func, 10}, - {"MarshalPKIXPublicKey", Func, 0}, - {"NameConstraintsWithoutSANs", Const, 10}, - {"NameMismatch", Const, 8}, - {"NewCertPool", Func, 0}, - {"NoValidChains", Const, 24}, - {"NotAuthorizedToSign", Const, 0}, - {"OID", Type, 22}, - {"OIDFromInts", Func, 22}, - {"PEMCipher", Type, 1}, - {"PEMCipher3DES", Const, 1}, - {"PEMCipherAES128", Const, 1}, - {"PEMCipherAES192", Const, 1}, - {"PEMCipherAES256", Const, 1}, - {"PEMCipherDES", Const, 1}, - {"ParseCRL", Func, 0}, - {"ParseCertificate", Func, 0}, - {"ParseCertificateRequest", Func, 3}, - {"ParseCertificates", Func, 0}, - {"ParseDERCRL", Func, 0}, - {"ParseECPrivateKey", Func, 1}, - {"ParseOID", Func, 23}, - {"ParsePKCS1PrivateKey", Func, 0}, - {"ParsePKCS1PublicKey", Func, 10}, - {"ParsePKCS8PrivateKey", Func, 0}, - {"ParsePKIXPublicKey", Func, 0}, - {"ParseRevocationList", Func, 19}, - {"PolicyMapping", Type, 24}, - {"PolicyMapping.IssuerDomainPolicy", Field, 24}, - {"PolicyMapping.SubjectDomainPolicy", Field, 24}, - {"PublicKeyAlgorithm", Type, 0}, - {"PureEd25519", Const, 13}, - {"RSA", Const, 0}, - {"RevocationList", Type, 15}, - {"RevocationList.AuthorityKeyId", Field, 19}, - {"RevocationList.Extensions", Field, 19}, - {"RevocationList.ExtraExtensions", Field, 15}, - {"RevocationList.Issuer", Field, 19}, - {"RevocationList.NextUpdate", Field, 15}, - {"RevocationList.Number", Field, 15}, - {"RevocationList.Raw", Field, 19}, - {"RevocationList.RawIssuer", Field, 19}, - {"RevocationList.RawTBSRevocationList", Field, 19}, - {"RevocationList.RevokedCertificateEntries", Field, 21}, - {"RevocationList.RevokedCertificates", Field, 15}, - {"RevocationList.Signature", Field, 19}, - {"RevocationList.SignatureAlgorithm", Field, 15}, - {"RevocationList.ThisUpdate", Field, 15}, - {"RevocationListEntry", Type, 21}, - {"RevocationListEntry.Extensions", Field, 21}, - {"RevocationListEntry.ExtraExtensions", Field, 21}, - {"RevocationListEntry.Raw", Field, 21}, - {"RevocationListEntry.ReasonCode", Field, 21}, - {"RevocationListEntry.RevocationTime", Field, 21}, - {"RevocationListEntry.SerialNumber", Field, 21}, - {"SHA1WithRSA", Const, 0}, - {"SHA256WithRSA", Const, 0}, - {"SHA256WithRSAPSS", Const, 8}, - {"SHA384WithRSA", Const, 0}, - {"SHA384WithRSAPSS", Const, 8}, - {"SHA512WithRSA", Const, 0}, - {"SHA512WithRSAPSS", Const, 8}, - {"SetFallbackRoots", Func, 20}, - {"SignatureAlgorithm", Type, 0}, - {"SystemCertPool", Func, 7}, - {"SystemRootsError", Type, 1}, - {"SystemRootsError.Err", Field, 7}, - {"TooManyConstraints", Const, 10}, - {"TooManyIntermediates", Const, 0}, - {"UnconstrainedName", Const, 10}, - {"UnhandledCriticalExtension", Type, 0}, - {"UnknownAuthorityError", Type, 0}, - {"UnknownAuthorityError.Cert", Field, 8}, - {"UnknownPublicKeyAlgorithm", Const, 0}, - {"UnknownSignatureAlgorithm", Const, 0}, - {"VerifyOptions", Type, 0}, - {"VerifyOptions.CertificatePolicies", Field, 24}, - {"VerifyOptions.CurrentTime", Field, 0}, - {"VerifyOptions.DNSName", Field, 0}, - {"VerifyOptions.Intermediates", Field, 0}, - {"VerifyOptions.KeyUsages", Field, 1}, - {"VerifyOptions.MaxConstraintComparisions", Field, 10}, - {"VerifyOptions.Roots", Field, 0}, + {"(*CertPool).AddCert", Method, 0, ""}, + {"(*CertPool).AddCertWithConstraint", Method, 22, ""}, + {"(*CertPool).AppendCertsFromPEM", Method, 0, ""}, + {"(*CertPool).Clone", Method, 19, ""}, + {"(*CertPool).Equal", Method, 19, ""}, + {"(*CertPool).Subjects", Method, 0, ""}, + {"(*Certificate).CheckCRLSignature", Method, 0, ""}, + {"(*Certificate).CheckSignature", Method, 0, ""}, + {"(*Certificate).CheckSignatureFrom", Method, 0, ""}, + {"(*Certificate).CreateCRL", Method, 0, ""}, + {"(*Certificate).Equal", Method, 0, ""}, + {"(*Certificate).Verify", Method, 0, ""}, + {"(*Certificate).VerifyHostname", Method, 0, ""}, + {"(*CertificateRequest).CheckSignature", Method, 5, ""}, + {"(*OID).UnmarshalBinary", Method, 23, ""}, + {"(*OID).UnmarshalText", Method, 23, ""}, + {"(*RevocationList).CheckSignatureFrom", Method, 19, ""}, + {"(CertificateInvalidError).Error", Method, 0, ""}, + {"(ConstraintViolationError).Error", Method, 0, ""}, + {"(HostnameError).Error", Method, 0, ""}, + {"(InsecureAlgorithmError).Error", Method, 6, ""}, + {"(OID).AppendBinary", Method, 24, ""}, + {"(OID).AppendText", Method, 24, ""}, + {"(OID).Equal", Method, 22, ""}, + {"(OID).EqualASN1OID", Method, 22, ""}, + {"(OID).MarshalBinary", Method, 23, ""}, + {"(OID).MarshalText", Method, 23, ""}, + {"(OID).String", Method, 22, ""}, + {"(PublicKeyAlgorithm).String", Method, 10, ""}, + {"(SignatureAlgorithm).String", Method, 6, ""}, + {"(SystemRootsError).Error", Method, 1, ""}, + {"(SystemRootsError).Unwrap", Method, 16, ""}, + {"(UnhandledCriticalExtension).Error", Method, 0, ""}, + {"(UnknownAuthorityError).Error", Method, 0, ""}, + {"CANotAuthorizedForExtKeyUsage", Const, 10, ""}, + {"CANotAuthorizedForThisName", Const, 0, ""}, + {"CertPool", Type, 0, ""}, + {"Certificate", Type, 0, ""}, + {"Certificate.AuthorityKeyId", Field, 0, ""}, + {"Certificate.BasicConstraintsValid", Field, 0, ""}, + {"Certificate.CRLDistributionPoints", Field, 2, ""}, + {"Certificate.DNSNames", Field, 0, ""}, + {"Certificate.EmailAddresses", Field, 0, ""}, + {"Certificate.ExcludedDNSDomains", Field, 9, ""}, + {"Certificate.ExcludedEmailAddresses", Field, 10, ""}, + {"Certificate.ExcludedIPRanges", Field, 10, ""}, + {"Certificate.ExcludedURIDomains", Field, 10, ""}, + {"Certificate.ExtKeyUsage", Field, 0, ""}, + {"Certificate.Extensions", Field, 2, ""}, + {"Certificate.ExtraExtensions", Field, 2, ""}, + {"Certificate.IPAddresses", Field, 1, ""}, + {"Certificate.InhibitAnyPolicy", Field, 24, ""}, + {"Certificate.InhibitAnyPolicyZero", Field, 24, ""}, + {"Certificate.InhibitPolicyMapping", Field, 24, ""}, + {"Certificate.InhibitPolicyMappingZero", Field, 24, ""}, + {"Certificate.IsCA", Field, 0, ""}, + {"Certificate.Issuer", Field, 0, ""}, + {"Certificate.IssuingCertificateURL", Field, 2, ""}, + {"Certificate.KeyUsage", Field, 0, ""}, + {"Certificate.MaxPathLen", Field, 0, ""}, + {"Certificate.MaxPathLenZero", Field, 4, ""}, + {"Certificate.NotAfter", Field, 0, ""}, + {"Certificate.NotBefore", Field, 0, ""}, + {"Certificate.OCSPServer", Field, 2, ""}, + {"Certificate.PermittedDNSDomains", Field, 0, ""}, + {"Certificate.PermittedDNSDomainsCritical", Field, 0, ""}, + {"Certificate.PermittedEmailAddresses", Field, 10, ""}, + {"Certificate.PermittedIPRanges", Field, 10, ""}, + {"Certificate.PermittedURIDomains", Field, 10, ""}, + {"Certificate.Policies", Field, 22, ""}, + {"Certificate.PolicyIdentifiers", Field, 0, ""}, + {"Certificate.PolicyMappings", Field, 24, ""}, + {"Certificate.PublicKey", Field, 0, ""}, + {"Certificate.PublicKeyAlgorithm", Field, 0, ""}, + {"Certificate.Raw", Field, 0, ""}, + {"Certificate.RawIssuer", Field, 0, ""}, + {"Certificate.RawSubject", Field, 0, ""}, + {"Certificate.RawSubjectPublicKeyInfo", Field, 0, ""}, + {"Certificate.RawTBSCertificate", Field, 0, ""}, + {"Certificate.RequireExplicitPolicy", Field, 24, ""}, + {"Certificate.RequireExplicitPolicyZero", Field, 24, ""}, + {"Certificate.SerialNumber", Field, 0, ""}, + {"Certificate.Signature", Field, 0, ""}, + {"Certificate.SignatureAlgorithm", Field, 0, ""}, + {"Certificate.Subject", Field, 0, ""}, + {"Certificate.SubjectKeyId", Field, 0, ""}, + {"Certificate.URIs", Field, 10, ""}, + {"Certificate.UnhandledCriticalExtensions", Field, 5, ""}, + {"Certificate.UnknownExtKeyUsage", Field, 0, ""}, + {"Certificate.Version", Field, 0, ""}, + {"CertificateInvalidError", Type, 0, ""}, + {"CertificateInvalidError.Cert", Field, 0, ""}, + {"CertificateInvalidError.Detail", Field, 10, ""}, + {"CertificateInvalidError.Reason", Field, 0, ""}, + {"CertificateRequest", Type, 3, ""}, + {"CertificateRequest.Attributes", Field, 3, ""}, + {"CertificateRequest.DNSNames", Field, 3, ""}, + {"CertificateRequest.EmailAddresses", Field, 3, ""}, + {"CertificateRequest.Extensions", Field, 3, ""}, + {"CertificateRequest.ExtraExtensions", Field, 3, ""}, + {"CertificateRequest.IPAddresses", Field, 3, ""}, + {"CertificateRequest.PublicKey", Field, 3, ""}, + {"CertificateRequest.PublicKeyAlgorithm", Field, 3, ""}, + {"CertificateRequest.Raw", Field, 3, ""}, + {"CertificateRequest.RawSubject", Field, 3, ""}, + {"CertificateRequest.RawSubjectPublicKeyInfo", Field, 3, ""}, + {"CertificateRequest.RawTBSCertificateRequest", Field, 3, ""}, + {"CertificateRequest.Signature", Field, 3, ""}, + {"CertificateRequest.SignatureAlgorithm", Field, 3, ""}, + {"CertificateRequest.Subject", Field, 3, ""}, + {"CertificateRequest.URIs", Field, 10, ""}, + {"CertificateRequest.Version", Field, 3, ""}, + {"ConstraintViolationError", Type, 0, ""}, + {"CreateCertificate", Func, 0, "func(rand io.Reader, template *Certificate, parent *Certificate, pub any, priv any) ([]byte, error)"}, + {"CreateCertificateRequest", Func, 3, "func(rand io.Reader, template *CertificateRequest, priv any) (csr []byte, err error)"}, + {"CreateRevocationList", Func, 15, "func(rand io.Reader, template *RevocationList, issuer *Certificate, priv crypto.Signer) ([]byte, error)"}, + {"DSA", Const, 0, ""}, + {"DSAWithSHA1", Const, 0, ""}, + {"DSAWithSHA256", Const, 0, ""}, + {"DecryptPEMBlock", Func, 1, "func(b *pem.Block, password []byte) ([]byte, error)"}, + {"ECDSA", Const, 1, ""}, + {"ECDSAWithSHA1", Const, 1, ""}, + {"ECDSAWithSHA256", Const, 1, ""}, + {"ECDSAWithSHA384", Const, 1, ""}, + {"ECDSAWithSHA512", Const, 1, ""}, + {"Ed25519", Const, 13, ""}, + {"EncryptPEMBlock", Func, 1, "func(rand io.Reader, blockType string, data []byte, password []byte, alg PEMCipher) (*pem.Block, error)"}, + {"ErrUnsupportedAlgorithm", Var, 0, ""}, + {"Expired", Const, 0, ""}, + {"ExtKeyUsage", Type, 0, ""}, + {"ExtKeyUsageAny", Const, 0, ""}, + {"ExtKeyUsageClientAuth", Const, 0, ""}, + {"ExtKeyUsageCodeSigning", Const, 0, ""}, + {"ExtKeyUsageEmailProtection", Const, 0, ""}, + {"ExtKeyUsageIPSECEndSystem", Const, 1, ""}, + {"ExtKeyUsageIPSECTunnel", Const, 1, ""}, + {"ExtKeyUsageIPSECUser", Const, 1, ""}, + {"ExtKeyUsageMicrosoftCommercialCodeSigning", Const, 10, ""}, + {"ExtKeyUsageMicrosoftKernelCodeSigning", Const, 10, ""}, + {"ExtKeyUsageMicrosoftServerGatedCrypto", Const, 1, ""}, + {"ExtKeyUsageNetscapeServerGatedCrypto", Const, 1, ""}, + {"ExtKeyUsageOCSPSigning", Const, 0, ""}, + {"ExtKeyUsageServerAuth", Const, 0, ""}, + {"ExtKeyUsageTimeStamping", Const, 0, ""}, + {"HostnameError", Type, 0, ""}, + {"HostnameError.Certificate", Field, 0, ""}, + {"HostnameError.Host", Field, 0, ""}, + {"IncompatibleUsage", Const, 1, ""}, + {"IncorrectPasswordError", Var, 1, ""}, + {"InsecureAlgorithmError", Type, 6, ""}, + {"InvalidReason", Type, 0, ""}, + {"IsEncryptedPEMBlock", Func, 1, "func(b *pem.Block) bool"}, + {"KeyUsage", Type, 0, ""}, + {"KeyUsageCRLSign", Const, 0, ""}, + {"KeyUsageCertSign", Const, 0, ""}, + {"KeyUsageContentCommitment", Const, 0, ""}, + {"KeyUsageDataEncipherment", Const, 0, ""}, + {"KeyUsageDecipherOnly", Const, 0, ""}, + {"KeyUsageDigitalSignature", Const, 0, ""}, + {"KeyUsageEncipherOnly", Const, 0, ""}, + {"KeyUsageKeyAgreement", Const, 0, ""}, + {"KeyUsageKeyEncipherment", Const, 0, ""}, + {"MD2WithRSA", Const, 0, ""}, + {"MD5WithRSA", Const, 0, ""}, + {"MarshalECPrivateKey", Func, 2, "func(key *ecdsa.PrivateKey) ([]byte, error)"}, + {"MarshalPKCS1PrivateKey", Func, 0, "func(key *rsa.PrivateKey) []byte"}, + {"MarshalPKCS1PublicKey", Func, 10, "func(key *rsa.PublicKey) []byte"}, + {"MarshalPKCS8PrivateKey", Func, 10, "func(key any) ([]byte, error)"}, + {"MarshalPKIXPublicKey", Func, 0, "func(pub any) ([]byte, error)"}, + {"NameConstraintsWithoutSANs", Const, 10, ""}, + {"NameMismatch", Const, 8, ""}, + {"NewCertPool", Func, 0, "func() *CertPool"}, + {"NoValidChains", Const, 24, ""}, + {"NotAuthorizedToSign", Const, 0, ""}, + {"OID", Type, 22, ""}, + {"OIDFromInts", Func, 22, "func(oid []uint64) (OID, error)"}, + {"PEMCipher", Type, 1, ""}, + {"PEMCipher3DES", Const, 1, ""}, + {"PEMCipherAES128", Const, 1, ""}, + {"PEMCipherAES192", Const, 1, ""}, + {"PEMCipherAES256", Const, 1, ""}, + {"PEMCipherDES", Const, 1, ""}, + {"ParseCRL", Func, 0, "func(crlBytes []byte) (*pkix.CertificateList, error)"}, + {"ParseCertificate", Func, 0, "func(der []byte) (*Certificate, error)"}, + {"ParseCertificateRequest", Func, 3, "func(asn1Data []byte) (*CertificateRequest, error)"}, + {"ParseCertificates", Func, 0, "func(der []byte) ([]*Certificate, error)"}, + {"ParseDERCRL", Func, 0, "func(derBytes []byte) (*pkix.CertificateList, error)"}, + {"ParseECPrivateKey", Func, 1, "func(der []byte) (*ecdsa.PrivateKey, error)"}, + {"ParseOID", Func, 23, "func(oid string) (OID, error)"}, + {"ParsePKCS1PrivateKey", Func, 0, "func(der []byte) (*rsa.PrivateKey, error)"}, + {"ParsePKCS1PublicKey", Func, 10, "func(der []byte) (*rsa.PublicKey, error)"}, + {"ParsePKCS8PrivateKey", Func, 0, "func(der []byte) (key any, err error)"}, + {"ParsePKIXPublicKey", Func, 0, "func(derBytes []byte) (pub any, err error)"}, + {"ParseRevocationList", Func, 19, "func(der []byte) (*RevocationList, error)"}, + {"PolicyMapping", Type, 24, ""}, + {"PolicyMapping.IssuerDomainPolicy", Field, 24, ""}, + {"PolicyMapping.SubjectDomainPolicy", Field, 24, ""}, + {"PublicKeyAlgorithm", Type, 0, ""}, + {"PureEd25519", Const, 13, ""}, + {"RSA", Const, 0, ""}, + {"RevocationList", Type, 15, ""}, + {"RevocationList.AuthorityKeyId", Field, 19, ""}, + {"RevocationList.Extensions", Field, 19, ""}, + {"RevocationList.ExtraExtensions", Field, 15, ""}, + {"RevocationList.Issuer", Field, 19, ""}, + {"RevocationList.NextUpdate", Field, 15, ""}, + {"RevocationList.Number", Field, 15, ""}, + {"RevocationList.Raw", Field, 19, ""}, + {"RevocationList.RawIssuer", Field, 19, ""}, + {"RevocationList.RawTBSRevocationList", Field, 19, ""}, + {"RevocationList.RevokedCertificateEntries", Field, 21, ""}, + {"RevocationList.RevokedCertificates", Field, 15, ""}, + {"RevocationList.Signature", Field, 19, ""}, + {"RevocationList.SignatureAlgorithm", Field, 15, ""}, + {"RevocationList.ThisUpdate", Field, 15, ""}, + {"RevocationListEntry", Type, 21, ""}, + {"RevocationListEntry.Extensions", Field, 21, ""}, + {"RevocationListEntry.ExtraExtensions", Field, 21, ""}, + {"RevocationListEntry.Raw", Field, 21, ""}, + {"RevocationListEntry.ReasonCode", Field, 21, ""}, + {"RevocationListEntry.RevocationTime", Field, 21, ""}, + {"RevocationListEntry.SerialNumber", Field, 21, ""}, + {"SHA1WithRSA", Const, 0, ""}, + {"SHA256WithRSA", Const, 0, ""}, + {"SHA256WithRSAPSS", Const, 8, ""}, + {"SHA384WithRSA", Const, 0, ""}, + {"SHA384WithRSAPSS", Const, 8, ""}, + {"SHA512WithRSA", Const, 0, ""}, + {"SHA512WithRSAPSS", Const, 8, ""}, + {"SetFallbackRoots", Func, 20, "func(roots *CertPool)"}, + {"SignatureAlgorithm", Type, 0, ""}, + {"SystemCertPool", Func, 7, "func() (*CertPool, error)"}, + {"SystemRootsError", Type, 1, ""}, + {"SystemRootsError.Err", Field, 7, ""}, + {"TooManyConstraints", Const, 10, ""}, + {"TooManyIntermediates", Const, 0, ""}, + {"UnconstrainedName", Const, 10, ""}, + {"UnhandledCriticalExtension", Type, 0, ""}, + {"UnknownAuthorityError", Type, 0, ""}, + {"UnknownAuthorityError.Cert", Field, 8, ""}, + {"UnknownPublicKeyAlgorithm", Const, 0, ""}, + {"UnknownSignatureAlgorithm", Const, 0, ""}, + {"VerifyOptions", Type, 0, ""}, + {"VerifyOptions.CertificatePolicies", Field, 24, ""}, + {"VerifyOptions.CurrentTime", Field, 0, ""}, + {"VerifyOptions.DNSName", Field, 0, ""}, + {"VerifyOptions.Intermediates", Field, 0, ""}, + {"VerifyOptions.KeyUsages", Field, 1, ""}, + {"VerifyOptions.MaxConstraintComparisions", Field, 10, ""}, + {"VerifyOptions.Roots", Field, 0, ""}, }, "crypto/x509/pkix": { - {"(*CertificateList).HasExpired", Method, 0}, - {"(*Name).FillFromRDNSequence", Method, 0}, - {"(Name).String", Method, 10}, - {"(Name).ToRDNSequence", Method, 0}, - {"(RDNSequence).String", Method, 10}, - {"AlgorithmIdentifier", Type, 0}, - {"AlgorithmIdentifier.Algorithm", Field, 0}, - {"AlgorithmIdentifier.Parameters", Field, 0}, - {"AttributeTypeAndValue", Type, 0}, - {"AttributeTypeAndValue.Type", Field, 0}, - {"AttributeTypeAndValue.Value", Field, 0}, - {"AttributeTypeAndValueSET", Type, 3}, - {"AttributeTypeAndValueSET.Type", Field, 3}, - {"AttributeTypeAndValueSET.Value", Field, 3}, - {"CertificateList", Type, 0}, - {"CertificateList.SignatureAlgorithm", Field, 0}, - {"CertificateList.SignatureValue", Field, 0}, - {"CertificateList.TBSCertList", Field, 0}, - {"Extension", Type, 0}, - {"Extension.Critical", Field, 0}, - {"Extension.Id", Field, 0}, - {"Extension.Value", Field, 0}, - {"Name", Type, 0}, - {"Name.CommonName", Field, 0}, - {"Name.Country", Field, 0}, - {"Name.ExtraNames", Field, 5}, - {"Name.Locality", Field, 0}, - {"Name.Names", Field, 0}, - {"Name.Organization", Field, 0}, - {"Name.OrganizationalUnit", Field, 0}, - {"Name.PostalCode", Field, 0}, - {"Name.Province", Field, 0}, - {"Name.SerialNumber", Field, 0}, - {"Name.StreetAddress", Field, 0}, - {"RDNSequence", Type, 0}, - {"RelativeDistinguishedNameSET", Type, 0}, - {"RevokedCertificate", Type, 0}, - {"RevokedCertificate.Extensions", Field, 0}, - {"RevokedCertificate.RevocationTime", Field, 0}, - {"RevokedCertificate.SerialNumber", Field, 0}, - {"TBSCertificateList", Type, 0}, - {"TBSCertificateList.Extensions", Field, 0}, - {"TBSCertificateList.Issuer", Field, 0}, - {"TBSCertificateList.NextUpdate", Field, 0}, - {"TBSCertificateList.Raw", Field, 0}, - {"TBSCertificateList.RevokedCertificates", Field, 0}, - {"TBSCertificateList.Signature", Field, 0}, - {"TBSCertificateList.ThisUpdate", Field, 0}, - {"TBSCertificateList.Version", Field, 0}, + {"(*CertificateList).HasExpired", Method, 0, ""}, + {"(*Name).FillFromRDNSequence", Method, 0, ""}, + {"(Name).String", Method, 10, ""}, + {"(Name).ToRDNSequence", Method, 0, ""}, + {"(RDNSequence).String", Method, 10, ""}, + {"AlgorithmIdentifier", Type, 0, ""}, + {"AlgorithmIdentifier.Algorithm", Field, 0, ""}, + {"AlgorithmIdentifier.Parameters", Field, 0, ""}, + {"AttributeTypeAndValue", Type, 0, ""}, + {"AttributeTypeAndValue.Type", Field, 0, ""}, + {"AttributeTypeAndValue.Value", Field, 0, ""}, + {"AttributeTypeAndValueSET", Type, 3, ""}, + {"AttributeTypeAndValueSET.Type", Field, 3, ""}, + {"AttributeTypeAndValueSET.Value", Field, 3, ""}, + {"CertificateList", Type, 0, ""}, + {"CertificateList.SignatureAlgorithm", Field, 0, ""}, + {"CertificateList.SignatureValue", Field, 0, ""}, + {"CertificateList.TBSCertList", Field, 0, ""}, + {"Extension", Type, 0, ""}, + {"Extension.Critical", Field, 0, ""}, + {"Extension.Id", Field, 0, ""}, + {"Extension.Value", Field, 0, ""}, + {"Name", Type, 0, ""}, + {"Name.CommonName", Field, 0, ""}, + {"Name.Country", Field, 0, ""}, + {"Name.ExtraNames", Field, 5, ""}, + {"Name.Locality", Field, 0, ""}, + {"Name.Names", Field, 0, ""}, + {"Name.Organization", Field, 0, ""}, + {"Name.OrganizationalUnit", Field, 0, ""}, + {"Name.PostalCode", Field, 0, ""}, + {"Name.Province", Field, 0, ""}, + {"Name.SerialNumber", Field, 0, ""}, + {"Name.StreetAddress", Field, 0, ""}, + {"RDNSequence", Type, 0, ""}, + {"RelativeDistinguishedNameSET", Type, 0, ""}, + {"RevokedCertificate", Type, 0, ""}, + {"RevokedCertificate.Extensions", Field, 0, ""}, + {"RevokedCertificate.RevocationTime", Field, 0, ""}, + {"RevokedCertificate.SerialNumber", Field, 0, ""}, + {"TBSCertificateList", Type, 0, ""}, + {"TBSCertificateList.Extensions", Field, 0, ""}, + {"TBSCertificateList.Issuer", Field, 0, ""}, + {"TBSCertificateList.NextUpdate", Field, 0, ""}, + {"TBSCertificateList.Raw", Field, 0, ""}, + {"TBSCertificateList.RevokedCertificates", Field, 0, ""}, + {"TBSCertificateList.Signature", Field, 0, ""}, + {"TBSCertificateList.ThisUpdate", Field, 0, ""}, + {"TBSCertificateList.Version", Field, 0, ""}, }, "database/sql": { - {"(*ColumnType).DatabaseTypeName", Method, 8}, - {"(*ColumnType).DecimalSize", Method, 8}, - {"(*ColumnType).Length", Method, 8}, - {"(*ColumnType).Name", Method, 8}, - {"(*ColumnType).Nullable", Method, 8}, - {"(*ColumnType).ScanType", Method, 8}, - {"(*Conn).BeginTx", Method, 9}, - {"(*Conn).Close", Method, 9}, - {"(*Conn).ExecContext", Method, 9}, - {"(*Conn).PingContext", Method, 9}, - {"(*Conn).PrepareContext", Method, 9}, - {"(*Conn).QueryContext", Method, 9}, - {"(*Conn).QueryRowContext", Method, 9}, - {"(*Conn).Raw", Method, 13}, - {"(*DB).Begin", Method, 0}, - {"(*DB).BeginTx", Method, 8}, - {"(*DB).Close", Method, 0}, - {"(*DB).Conn", Method, 9}, - {"(*DB).Driver", Method, 0}, - {"(*DB).Exec", Method, 0}, - {"(*DB).ExecContext", Method, 8}, - {"(*DB).Ping", Method, 1}, - {"(*DB).PingContext", Method, 8}, - {"(*DB).Prepare", Method, 0}, - {"(*DB).PrepareContext", Method, 8}, - {"(*DB).Query", Method, 0}, - {"(*DB).QueryContext", Method, 8}, - {"(*DB).QueryRow", Method, 0}, - {"(*DB).QueryRowContext", Method, 8}, - {"(*DB).SetConnMaxIdleTime", Method, 15}, - {"(*DB).SetConnMaxLifetime", Method, 6}, - {"(*DB).SetMaxIdleConns", Method, 1}, - {"(*DB).SetMaxOpenConns", Method, 2}, - {"(*DB).Stats", Method, 5}, - {"(*Null).Scan", Method, 22}, - {"(*NullBool).Scan", Method, 0}, - {"(*NullByte).Scan", Method, 17}, - {"(*NullFloat64).Scan", Method, 0}, - {"(*NullInt16).Scan", Method, 17}, - {"(*NullInt32).Scan", Method, 13}, - {"(*NullInt64).Scan", Method, 0}, - {"(*NullString).Scan", Method, 0}, - {"(*NullTime).Scan", Method, 13}, - {"(*Row).Err", Method, 15}, - {"(*Row).Scan", Method, 0}, - {"(*Rows).Close", Method, 0}, - {"(*Rows).ColumnTypes", Method, 8}, - {"(*Rows).Columns", Method, 0}, - {"(*Rows).Err", Method, 0}, - {"(*Rows).Next", Method, 0}, - {"(*Rows).NextResultSet", Method, 8}, - {"(*Rows).Scan", Method, 0}, - {"(*Stmt).Close", Method, 0}, - {"(*Stmt).Exec", Method, 0}, - {"(*Stmt).ExecContext", Method, 8}, - {"(*Stmt).Query", Method, 0}, - {"(*Stmt).QueryContext", Method, 8}, - {"(*Stmt).QueryRow", Method, 0}, - {"(*Stmt).QueryRowContext", Method, 8}, - {"(*Tx).Commit", Method, 0}, - {"(*Tx).Exec", Method, 0}, - {"(*Tx).ExecContext", Method, 8}, - {"(*Tx).Prepare", Method, 0}, - {"(*Tx).PrepareContext", Method, 8}, - {"(*Tx).Query", Method, 0}, - {"(*Tx).QueryContext", Method, 8}, - {"(*Tx).QueryRow", Method, 0}, - {"(*Tx).QueryRowContext", Method, 8}, - {"(*Tx).Rollback", Method, 0}, - {"(*Tx).Stmt", Method, 0}, - {"(*Tx).StmtContext", Method, 8}, - {"(IsolationLevel).String", Method, 11}, - {"(Null).Value", Method, 22}, - {"(NullBool).Value", Method, 0}, - {"(NullByte).Value", Method, 17}, - {"(NullFloat64).Value", Method, 0}, - {"(NullInt16).Value", Method, 17}, - {"(NullInt32).Value", Method, 13}, - {"(NullInt64).Value", Method, 0}, - {"(NullString).Value", Method, 0}, - {"(NullTime).Value", Method, 13}, - {"ColumnType", Type, 8}, - {"Conn", Type, 9}, - {"DB", Type, 0}, - {"DBStats", Type, 5}, - {"DBStats.Idle", Field, 11}, - {"DBStats.InUse", Field, 11}, - {"DBStats.MaxIdleClosed", Field, 11}, - {"DBStats.MaxIdleTimeClosed", Field, 15}, - {"DBStats.MaxLifetimeClosed", Field, 11}, - {"DBStats.MaxOpenConnections", Field, 11}, - {"DBStats.OpenConnections", Field, 5}, - {"DBStats.WaitCount", Field, 11}, - {"DBStats.WaitDuration", Field, 11}, - {"Drivers", Func, 4}, - {"ErrConnDone", Var, 9}, - {"ErrNoRows", Var, 0}, - {"ErrTxDone", Var, 0}, - {"IsolationLevel", Type, 8}, - {"LevelDefault", Const, 8}, - {"LevelLinearizable", Const, 8}, - {"LevelReadCommitted", Const, 8}, - {"LevelReadUncommitted", Const, 8}, - {"LevelRepeatableRead", Const, 8}, - {"LevelSerializable", Const, 8}, - {"LevelSnapshot", Const, 8}, - {"LevelWriteCommitted", Const, 8}, - {"Named", Func, 8}, - {"NamedArg", Type, 8}, - {"NamedArg.Name", Field, 8}, - {"NamedArg.Value", Field, 8}, - {"Null", Type, 22}, - {"Null.V", Field, 22}, - {"Null.Valid", Field, 22}, - {"NullBool", Type, 0}, - {"NullBool.Bool", Field, 0}, - {"NullBool.Valid", Field, 0}, - {"NullByte", Type, 17}, - {"NullByte.Byte", Field, 17}, - {"NullByte.Valid", Field, 17}, - {"NullFloat64", Type, 0}, - {"NullFloat64.Float64", Field, 0}, - {"NullFloat64.Valid", Field, 0}, - {"NullInt16", Type, 17}, - {"NullInt16.Int16", Field, 17}, - {"NullInt16.Valid", Field, 17}, - {"NullInt32", Type, 13}, - {"NullInt32.Int32", Field, 13}, - {"NullInt32.Valid", Field, 13}, - {"NullInt64", Type, 0}, - {"NullInt64.Int64", Field, 0}, - {"NullInt64.Valid", Field, 0}, - {"NullString", Type, 0}, - {"NullString.String", Field, 0}, - {"NullString.Valid", Field, 0}, - {"NullTime", Type, 13}, - {"NullTime.Time", Field, 13}, - {"NullTime.Valid", Field, 13}, - {"Open", Func, 0}, - {"OpenDB", Func, 10}, - {"Out", Type, 9}, - {"Out.Dest", Field, 9}, - {"Out.In", Field, 9}, - {"RawBytes", Type, 0}, - {"Register", Func, 0}, - {"Result", Type, 0}, - {"Row", Type, 0}, - {"Rows", Type, 0}, - {"Scanner", Type, 0}, - {"Stmt", Type, 0}, - {"Tx", Type, 0}, - {"TxOptions", Type, 8}, - {"TxOptions.Isolation", Field, 8}, - {"TxOptions.ReadOnly", Field, 8}, + {"(*ColumnType).DatabaseTypeName", Method, 8, ""}, + {"(*ColumnType).DecimalSize", Method, 8, ""}, + {"(*ColumnType).Length", Method, 8, ""}, + {"(*ColumnType).Name", Method, 8, ""}, + {"(*ColumnType).Nullable", Method, 8, ""}, + {"(*ColumnType).ScanType", Method, 8, ""}, + {"(*Conn).BeginTx", Method, 9, ""}, + {"(*Conn).Close", Method, 9, ""}, + {"(*Conn).ExecContext", Method, 9, ""}, + {"(*Conn).PingContext", Method, 9, ""}, + {"(*Conn).PrepareContext", Method, 9, ""}, + {"(*Conn).QueryContext", Method, 9, ""}, + {"(*Conn).QueryRowContext", Method, 9, ""}, + {"(*Conn).Raw", Method, 13, ""}, + {"(*DB).Begin", Method, 0, ""}, + {"(*DB).BeginTx", Method, 8, ""}, + {"(*DB).Close", Method, 0, ""}, + {"(*DB).Conn", Method, 9, ""}, + {"(*DB).Driver", Method, 0, ""}, + {"(*DB).Exec", Method, 0, ""}, + {"(*DB).ExecContext", Method, 8, ""}, + {"(*DB).Ping", Method, 1, ""}, + {"(*DB).PingContext", Method, 8, ""}, + {"(*DB).Prepare", Method, 0, ""}, + {"(*DB).PrepareContext", Method, 8, ""}, + {"(*DB).Query", Method, 0, ""}, + {"(*DB).QueryContext", Method, 8, ""}, + {"(*DB).QueryRow", Method, 0, ""}, + {"(*DB).QueryRowContext", Method, 8, ""}, + {"(*DB).SetConnMaxIdleTime", Method, 15, ""}, + {"(*DB).SetConnMaxLifetime", Method, 6, ""}, + {"(*DB).SetMaxIdleConns", Method, 1, ""}, + {"(*DB).SetMaxOpenConns", Method, 2, ""}, + {"(*DB).Stats", Method, 5, ""}, + {"(*Null).Scan", Method, 22, ""}, + {"(*NullBool).Scan", Method, 0, ""}, + {"(*NullByte).Scan", Method, 17, ""}, + {"(*NullFloat64).Scan", Method, 0, ""}, + {"(*NullInt16).Scan", Method, 17, ""}, + {"(*NullInt32).Scan", Method, 13, ""}, + {"(*NullInt64).Scan", Method, 0, ""}, + {"(*NullString).Scan", Method, 0, ""}, + {"(*NullTime).Scan", Method, 13, ""}, + {"(*Row).Err", Method, 15, ""}, + {"(*Row).Scan", Method, 0, ""}, + {"(*Rows).Close", Method, 0, ""}, + {"(*Rows).ColumnTypes", Method, 8, ""}, + {"(*Rows).Columns", Method, 0, ""}, + {"(*Rows).Err", Method, 0, ""}, + {"(*Rows).Next", Method, 0, ""}, + {"(*Rows).NextResultSet", Method, 8, ""}, + {"(*Rows).Scan", Method, 0, ""}, + {"(*Stmt).Close", Method, 0, ""}, + {"(*Stmt).Exec", Method, 0, ""}, + {"(*Stmt).ExecContext", Method, 8, ""}, + {"(*Stmt).Query", Method, 0, ""}, + {"(*Stmt).QueryContext", Method, 8, ""}, + {"(*Stmt).QueryRow", Method, 0, ""}, + {"(*Stmt).QueryRowContext", Method, 8, ""}, + {"(*Tx).Commit", Method, 0, ""}, + {"(*Tx).Exec", Method, 0, ""}, + {"(*Tx).ExecContext", Method, 8, ""}, + {"(*Tx).Prepare", Method, 0, ""}, + {"(*Tx).PrepareContext", Method, 8, ""}, + {"(*Tx).Query", Method, 0, ""}, + {"(*Tx).QueryContext", Method, 8, ""}, + {"(*Tx).QueryRow", Method, 0, ""}, + {"(*Tx).QueryRowContext", Method, 8, ""}, + {"(*Tx).Rollback", Method, 0, ""}, + {"(*Tx).Stmt", Method, 0, ""}, + {"(*Tx).StmtContext", Method, 8, ""}, + {"(IsolationLevel).String", Method, 11, ""}, + {"(Null).Value", Method, 22, ""}, + {"(NullBool).Value", Method, 0, ""}, + {"(NullByte).Value", Method, 17, ""}, + {"(NullFloat64).Value", Method, 0, ""}, + {"(NullInt16).Value", Method, 17, ""}, + {"(NullInt32).Value", Method, 13, ""}, + {"(NullInt64).Value", Method, 0, ""}, + {"(NullString).Value", Method, 0, ""}, + {"(NullTime).Value", Method, 13, ""}, + {"ColumnType", Type, 8, ""}, + {"Conn", Type, 9, ""}, + {"DB", Type, 0, ""}, + {"DBStats", Type, 5, ""}, + {"DBStats.Idle", Field, 11, ""}, + {"DBStats.InUse", Field, 11, ""}, + {"DBStats.MaxIdleClosed", Field, 11, ""}, + {"DBStats.MaxIdleTimeClosed", Field, 15, ""}, + {"DBStats.MaxLifetimeClosed", Field, 11, ""}, + {"DBStats.MaxOpenConnections", Field, 11, ""}, + {"DBStats.OpenConnections", Field, 5, ""}, + {"DBStats.WaitCount", Field, 11, ""}, + {"DBStats.WaitDuration", Field, 11, ""}, + {"Drivers", Func, 4, "func() []string"}, + {"ErrConnDone", Var, 9, ""}, + {"ErrNoRows", Var, 0, ""}, + {"ErrTxDone", Var, 0, ""}, + {"IsolationLevel", Type, 8, ""}, + {"LevelDefault", Const, 8, ""}, + {"LevelLinearizable", Const, 8, ""}, + {"LevelReadCommitted", Const, 8, ""}, + {"LevelReadUncommitted", Const, 8, ""}, + {"LevelRepeatableRead", Const, 8, ""}, + {"LevelSerializable", Const, 8, ""}, + {"LevelSnapshot", Const, 8, ""}, + {"LevelWriteCommitted", Const, 8, ""}, + {"Named", Func, 8, "func(name string, value any) NamedArg"}, + {"NamedArg", Type, 8, ""}, + {"NamedArg.Name", Field, 8, ""}, + {"NamedArg.Value", Field, 8, ""}, + {"Null", Type, 22, ""}, + {"Null.V", Field, 22, ""}, + {"Null.Valid", Field, 22, ""}, + {"NullBool", Type, 0, ""}, + {"NullBool.Bool", Field, 0, ""}, + {"NullBool.Valid", Field, 0, ""}, + {"NullByte", Type, 17, ""}, + {"NullByte.Byte", Field, 17, ""}, + {"NullByte.Valid", Field, 17, ""}, + {"NullFloat64", Type, 0, ""}, + {"NullFloat64.Float64", Field, 0, ""}, + {"NullFloat64.Valid", Field, 0, ""}, + {"NullInt16", Type, 17, ""}, + {"NullInt16.Int16", Field, 17, ""}, + {"NullInt16.Valid", Field, 17, ""}, + {"NullInt32", Type, 13, ""}, + {"NullInt32.Int32", Field, 13, ""}, + {"NullInt32.Valid", Field, 13, ""}, + {"NullInt64", Type, 0, ""}, + {"NullInt64.Int64", Field, 0, ""}, + {"NullInt64.Valid", Field, 0, ""}, + {"NullString", Type, 0, ""}, + {"NullString.String", Field, 0, ""}, + {"NullString.Valid", Field, 0, ""}, + {"NullTime", Type, 13, ""}, + {"NullTime.Time", Field, 13, ""}, + {"NullTime.Valid", Field, 13, ""}, + {"Open", Func, 0, "func(driverName string, dataSourceName string) (*DB, error)"}, + {"OpenDB", Func, 10, "func(c driver.Connector) *DB"}, + {"Out", Type, 9, ""}, + {"Out.Dest", Field, 9, ""}, + {"Out.In", Field, 9, ""}, + {"RawBytes", Type, 0, ""}, + {"Register", Func, 0, "func(name string, driver driver.Driver)"}, + {"Result", Type, 0, ""}, + {"Row", Type, 0, ""}, + {"Rows", Type, 0, ""}, + {"Scanner", Type, 0, ""}, + {"Stmt", Type, 0, ""}, + {"Tx", Type, 0, ""}, + {"TxOptions", Type, 8, ""}, + {"TxOptions.Isolation", Field, 8, ""}, + {"TxOptions.ReadOnly", Field, 8, ""}, }, "database/sql/driver": { - {"(NotNull).ConvertValue", Method, 0}, - {"(Null).ConvertValue", Method, 0}, - {"(RowsAffected).LastInsertId", Method, 0}, - {"(RowsAffected).RowsAffected", Method, 0}, - {"Bool", Var, 0}, - {"ColumnConverter", Type, 0}, - {"Conn", Type, 0}, - {"ConnBeginTx", Type, 8}, - {"ConnPrepareContext", Type, 8}, - {"Connector", Type, 10}, - {"DefaultParameterConverter", Var, 0}, - {"Driver", Type, 0}, - {"DriverContext", Type, 10}, - {"ErrBadConn", Var, 0}, - {"ErrRemoveArgument", Var, 9}, - {"ErrSkip", Var, 0}, - {"Execer", Type, 0}, - {"ExecerContext", Type, 8}, - {"Int32", Var, 0}, - {"IsScanValue", Func, 0}, - {"IsValue", Func, 0}, - {"IsolationLevel", Type, 8}, - {"NamedValue", Type, 8}, - {"NamedValue.Name", Field, 8}, - {"NamedValue.Ordinal", Field, 8}, - {"NamedValue.Value", Field, 8}, - {"NamedValueChecker", Type, 9}, - {"NotNull", Type, 0}, - {"NotNull.Converter", Field, 0}, - {"Null", Type, 0}, - {"Null.Converter", Field, 0}, - {"Pinger", Type, 8}, - {"Queryer", Type, 1}, - {"QueryerContext", Type, 8}, - {"Result", Type, 0}, - {"ResultNoRows", Var, 0}, - {"Rows", Type, 0}, - {"RowsAffected", Type, 0}, - {"RowsColumnTypeDatabaseTypeName", Type, 8}, - {"RowsColumnTypeLength", Type, 8}, - {"RowsColumnTypeNullable", Type, 8}, - {"RowsColumnTypePrecisionScale", Type, 8}, - {"RowsColumnTypeScanType", Type, 8}, - {"RowsNextResultSet", Type, 8}, - {"SessionResetter", Type, 10}, - {"Stmt", Type, 0}, - {"StmtExecContext", Type, 8}, - {"StmtQueryContext", Type, 8}, - {"String", Var, 0}, - {"Tx", Type, 0}, - {"TxOptions", Type, 8}, - {"TxOptions.Isolation", Field, 8}, - {"TxOptions.ReadOnly", Field, 8}, - {"Validator", Type, 15}, - {"Value", Type, 0}, - {"ValueConverter", Type, 0}, - {"Valuer", Type, 0}, + {"(NotNull).ConvertValue", Method, 0, ""}, + {"(Null).ConvertValue", Method, 0, ""}, + {"(RowsAffected).LastInsertId", Method, 0, ""}, + {"(RowsAffected).RowsAffected", Method, 0, ""}, + {"Bool", Var, 0, ""}, + {"ColumnConverter", Type, 0, ""}, + {"Conn", Type, 0, ""}, + {"ConnBeginTx", Type, 8, ""}, + {"ConnPrepareContext", Type, 8, ""}, + {"Connector", Type, 10, ""}, + {"DefaultParameterConverter", Var, 0, ""}, + {"Driver", Type, 0, ""}, + {"DriverContext", Type, 10, ""}, + {"ErrBadConn", Var, 0, ""}, + {"ErrRemoveArgument", Var, 9, ""}, + {"ErrSkip", Var, 0, ""}, + {"Execer", Type, 0, ""}, + {"ExecerContext", Type, 8, ""}, + {"Int32", Var, 0, ""}, + {"IsScanValue", Func, 0, "func(v any) bool"}, + {"IsValue", Func, 0, "func(v any) bool"}, + {"IsolationLevel", Type, 8, ""}, + {"NamedValue", Type, 8, ""}, + {"NamedValue.Name", Field, 8, ""}, + {"NamedValue.Ordinal", Field, 8, ""}, + {"NamedValue.Value", Field, 8, ""}, + {"NamedValueChecker", Type, 9, ""}, + {"NotNull", Type, 0, ""}, + {"NotNull.Converter", Field, 0, ""}, + {"Null", Type, 0, ""}, + {"Null.Converter", Field, 0, ""}, + {"Pinger", Type, 8, ""}, + {"Queryer", Type, 1, ""}, + {"QueryerContext", Type, 8, ""}, + {"Result", Type, 0, ""}, + {"ResultNoRows", Var, 0, ""}, + {"Rows", Type, 0, ""}, + {"RowsAffected", Type, 0, ""}, + {"RowsColumnTypeDatabaseTypeName", Type, 8, ""}, + {"RowsColumnTypeLength", Type, 8, ""}, + {"RowsColumnTypeNullable", Type, 8, ""}, + {"RowsColumnTypePrecisionScale", Type, 8, ""}, + {"RowsColumnTypeScanType", Type, 8, ""}, + {"RowsNextResultSet", Type, 8, ""}, + {"SessionResetter", Type, 10, ""}, + {"Stmt", Type, 0, ""}, + {"StmtExecContext", Type, 8, ""}, + {"StmtQueryContext", Type, 8, ""}, + {"String", Var, 0, ""}, + {"Tx", Type, 0, ""}, + {"TxOptions", Type, 8, ""}, + {"TxOptions.Isolation", Field, 8, ""}, + {"TxOptions.ReadOnly", Field, 8, ""}, + {"Validator", Type, 15, ""}, + {"Value", Type, 0, ""}, + {"ValueConverter", Type, 0, ""}, + {"Valuer", Type, 0, ""}, }, "debug/buildinfo": { - {"BuildInfo", Type, 18}, - {"Read", Func, 18}, - {"ReadFile", Func, 18}, + {"BuildInfo", Type, 18, ""}, + {"Read", Func, 18, "func(r io.ReaderAt) (*BuildInfo, error)"}, + {"ReadFile", Func, 18, "func(name string) (info *BuildInfo, err error)"}, }, "debug/dwarf": { - {"(*AddrType).Basic", Method, 0}, - {"(*AddrType).Common", Method, 0}, - {"(*AddrType).Size", Method, 0}, - {"(*AddrType).String", Method, 0}, - {"(*ArrayType).Common", Method, 0}, - {"(*ArrayType).Size", Method, 0}, - {"(*ArrayType).String", Method, 0}, - {"(*BasicType).Basic", Method, 0}, - {"(*BasicType).Common", Method, 0}, - {"(*BasicType).Size", Method, 0}, - {"(*BasicType).String", Method, 0}, - {"(*BoolType).Basic", Method, 0}, - {"(*BoolType).Common", Method, 0}, - {"(*BoolType).Size", Method, 0}, - {"(*BoolType).String", Method, 0}, - {"(*CharType).Basic", Method, 0}, - {"(*CharType).Common", Method, 0}, - {"(*CharType).Size", Method, 0}, - {"(*CharType).String", Method, 0}, - {"(*CommonType).Common", Method, 0}, - {"(*CommonType).Size", Method, 0}, - {"(*ComplexType).Basic", Method, 0}, - {"(*ComplexType).Common", Method, 0}, - {"(*ComplexType).Size", Method, 0}, - {"(*ComplexType).String", Method, 0}, - {"(*Data).AddSection", Method, 14}, - {"(*Data).AddTypes", Method, 3}, - {"(*Data).LineReader", Method, 5}, - {"(*Data).Ranges", Method, 7}, - {"(*Data).Reader", Method, 0}, - {"(*Data).Type", Method, 0}, - {"(*DotDotDotType).Common", Method, 0}, - {"(*DotDotDotType).Size", Method, 0}, - {"(*DotDotDotType).String", Method, 0}, - {"(*Entry).AttrField", Method, 5}, - {"(*Entry).Val", Method, 0}, - {"(*EnumType).Common", Method, 0}, - {"(*EnumType).Size", Method, 0}, - {"(*EnumType).String", Method, 0}, - {"(*FloatType).Basic", Method, 0}, - {"(*FloatType).Common", Method, 0}, - {"(*FloatType).Size", Method, 0}, - {"(*FloatType).String", Method, 0}, - {"(*FuncType).Common", Method, 0}, - {"(*FuncType).Size", Method, 0}, - {"(*FuncType).String", Method, 0}, - {"(*IntType).Basic", Method, 0}, - {"(*IntType).Common", Method, 0}, - {"(*IntType).Size", Method, 0}, - {"(*IntType).String", Method, 0}, - {"(*LineReader).Files", Method, 14}, - {"(*LineReader).Next", Method, 5}, - {"(*LineReader).Reset", Method, 5}, - {"(*LineReader).Seek", Method, 5}, - {"(*LineReader).SeekPC", Method, 5}, - {"(*LineReader).Tell", Method, 5}, - {"(*PtrType).Common", Method, 0}, - {"(*PtrType).Size", Method, 0}, - {"(*PtrType).String", Method, 0}, - {"(*QualType).Common", Method, 0}, - {"(*QualType).Size", Method, 0}, - {"(*QualType).String", Method, 0}, - {"(*Reader).AddressSize", Method, 5}, - {"(*Reader).ByteOrder", Method, 14}, - {"(*Reader).Next", Method, 0}, - {"(*Reader).Seek", Method, 0}, - {"(*Reader).SeekPC", Method, 7}, - {"(*Reader).SkipChildren", Method, 0}, - {"(*StructType).Common", Method, 0}, - {"(*StructType).Defn", Method, 0}, - {"(*StructType).Size", Method, 0}, - {"(*StructType).String", Method, 0}, - {"(*TypedefType).Common", Method, 0}, - {"(*TypedefType).Size", Method, 0}, - {"(*TypedefType).String", Method, 0}, - {"(*UcharType).Basic", Method, 0}, - {"(*UcharType).Common", Method, 0}, - {"(*UcharType).Size", Method, 0}, - {"(*UcharType).String", Method, 0}, - {"(*UintType).Basic", Method, 0}, - {"(*UintType).Common", Method, 0}, - {"(*UintType).Size", Method, 0}, - {"(*UintType).String", Method, 0}, - {"(*UnspecifiedType).Basic", Method, 4}, - {"(*UnspecifiedType).Common", Method, 4}, - {"(*UnspecifiedType).Size", Method, 4}, - {"(*UnspecifiedType).String", Method, 4}, - {"(*UnsupportedType).Common", Method, 13}, - {"(*UnsupportedType).Size", Method, 13}, - {"(*UnsupportedType).String", Method, 13}, - {"(*VoidType).Common", Method, 0}, - {"(*VoidType).Size", Method, 0}, - {"(*VoidType).String", Method, 0}, - {"(Attr).GoString", Method, 0}, - {"(Attr).String", Method, 0}, - {"(Class).GoString", Method, 5}, - {"(Class).String", Method, 5}, - {"(DecodeError).Error", Method, 0}, - {"(Tag).GoString", Method, 0}, - {"(Tag).String", Method, 0}, - {"AddrType", Type, 0}, - {"AddrType.BasicType", Field, 0}, - {"ArrayType", Type, 0}, - {"ArrayType.CommonType", Field, 0}, - {"ArrayType.Count", Field, 0}, - {"ArrayType.StrideBitSize", Field, 0}, - {"ArrayType.Type", Field, 0}, - {"Attr", Type, 0}, - {"AttrAbstractOrigin", Const, 0}, - {"AttrAccessibility", Const, 0}, - {"AttrAddrBase", Const, 14}, - {"AttrAddrClass", Const, 0}, - {"AttrAlignment", Const, 14}, - {"AttrAllocated", Const, 0}, - {"AttrArtificial", Const, 0}, - {"AttrAssociated", Const, 0}, - {"AttrBaseTypes", Const, 0}, - {"AttrBinaryScale", Const, 14}, - {"AttrBitOffset", Const, 0}, - {"AttrBitSize", Const, 0}, - {"AttrByteSize", Const, 0}, - {"AttrCallAllCalls", Const, 14}, - {"AttrCallAllSourceCalls", Const, 14}, - {"AttrCallAllTailCalls", Const, 14}, - {"AttrCallColumn", Const, 0}, - {"AttrCallDataLocation", Const, 14}, - {"AttrCallDataValue", Const, 14}, - {"AttrCallFile", Const, 0}, - {"AttrCallLine", Const, 0}, - {"AttrCallOrigin", Const, 14}, - {"AttrCallPC", Const, 14}, - {"AttrCallParameter", Const, 14}, - {"AttrCallReturnPC", Const, 14}, - {"AttrCallTailCall", Const, 14}, - {"AttrCallTarget", Const, 14}, - {"AttrCallTargetClobbered", Const, 14}, - {"AttrCallValue", Const, 14}, - {"AttrCalling", Const, 0}, - {"AttrCommonRef", Const, 0}, - {"AttrCompDir", Const, 0}, - {"AttrConstExpr", Const, 14}, - {"AttrConstValue", Const, 0}, - {"AttrContainingType", Const, 0}, - {"AttrCount", Const, 0}, - {"AttrDataBitOffset", Const, 14}, - {"AttrDataLocation", Const, 0}, - {"AttrDataMemberLoc", Const, 0}, - {"AttrDecimalScale", Const, 14}, - {"AttrDecimalSign", Const, 14}, - {"AttrDeclColumn", Const, 0}, - {"AttrDeclFile", Const, 0}, - {"AttrDeclLine", Const, 0}, - {"AttrDeclaration", Const, 0}, - {"AttrDefaultValue", Const, 0}, - {"AttrDefaulted", Const, 14}, - {"AttrDeleted", Const, 14}, - {"AttrDescription", Const, 0}, - {"AttrDigitCount", Const, 14}, - {"AttrDiscr", Const, 0}, - {"AttrDiscrList", Const, 0}, - {"AttrDiscrValue", Const, 0}, - {"AttrDwoName", Const, 14}, - {"AttrElemental", Const, 14}, - {"AttrEncoding", Const, 0}, - {"AttrEndianity", Const, 14}, - {"AttrEntrypc", Const, 0}, - {"AttrEnumClass", Const, 14}, - {"AttrExplicit", Const, 14}, - {"AttrExportSymbols", Const, 14}, - {"AttrExtension", Const, 0}, - {"AttrExternal", Const, 0}, - {"AttrFrameBase", Const, 0}, - {"AttrFriend", Const, 0}, - {"AttrHighpc", Const, 0}, - {"AttrIdentifierCase", Const, 0}, - {"AttrImport", Const, 0}, - {"AttrInline", Const, 0}, - {"AttrIsOptional", Const, 0}, - {"AttrLanguage", Const, 0}, - {"AttrLinkageName", Const, 14}, - {"AttrLocation", Const, 0}, - {"AttrLoclistsBase", Const, 14}, - {"AttrLowerBound", Const, 0}, - {"AttrLowpc", Const, 0}, - {"AttrMacroInfo", Const, 0}, - {"AttrMacros", Const, 14}, - {"AttrMainSubprogram", Const, 14}, - {"AttrMutable", Const, 14}, - {"AttrName", Const, 0}, - {"AttrNamelistItem", Const, 0}, - {"AttrNoreturn", Const, 14}, - {"AttrObjectPointer", Const, 14}, - {"AttrOrdering", Const, 0}, - {"AttrPictureString", Const, 14}, - {"AttrPriority", Const, 0}, - {"AttrProducer", Const, 0}, - {"AttrPrototyped", Const, 0}, - {"AttrPure", Const, 14}, - {"AttrRanges", Const, 0}, - {"AttrRank", Const, 14}, - {"AttrRecursive", Const, 14}, - {"AttrReference", Const, 14}, - {"AttrReturnAddr", Const, 0}, - {"AttrRnglistsBase", Const, 14}, - {"AttrRvalueReference", Const, 14}, - {"AttrSegment", Const, 0}, - {"AttrSibling", Const, 0}, - {"AttrSignature", Const, 14}, - {"AttrSmall", Const, 14}, - {"AttrSpecification", Const, 0}, - {"AttrStartScope", Const, 0}, - {"AttrStaticLink", Const, 0}, - {"AttrStmtList", Const, 0}, - {"AttrStrOffsetsBase", Const, 14}, - {"AttrStride", Const, 0}, - {"AttrStrideSize", Const, 0}, - {"AttrStringLength", Const, 0}, - {"AttrStringLengthBitSize", Const, 14}, - {"AttrStringLengthByteSize", Const, 14}, - {"AttrThreadsScaled", Const, 14}, - {"AttrTrampoline", Const, 0}, - {"AttrType", Const, 0}, - {"AttrUpperBound", Const, 0}, - {"AttrUseLocation", Const, 0}, - {"AttrUseUTF8", Const, 0}, - {"AttrVarParam", Const, 0}, - {"AttrVirtuality", Const, 0}, - {"AttrVisibility", Const, 0}, - {"AttrVtableElemLoc", Const, 0}, - {"BasicType", Type, 0}, - {"BasicType.BitOffset", Field, 0}, - {"BasicType.BitSize", Field, 0}, - {"BasicType.CommonType", Field, 0}, - {"BasicType.DataBitOffset", Field, 18}, - {"BoolType", Type, 0}, - {"BoolType.BasicType", Field, 0}, - {"CharType", Type, 0}, - {"CharType.BasicType", Field, 0}, - {"Class", Type, 5}, - {"ClassAddrPtr", Const, 14}, - {"ClassAddress", Const, 5}, - {"ClassBlock", Const, 5}, - {"ClassConstant", Const, 5}, - {"ClassExprLoc", Const, 5}, - {"ClassFlag", Const, 5}, - {"ClassLinePtr", Const, 5}, - {"ClassLocList", Const, 14}, - {"ClassLocListPtr", Const, 5}, - {"ClassMacPtr", Const, 5}, - {"ClassRangeListPtr", Const, 5}, - {"ClassReference", Const, 5}, - {"ClassReferenceAlt", Const, 5}, - {"ClassReferenceSig", Const, 5}, - {"ClassRngList", Const, 14}, - {"ClassRngListsPtr", Const, 14}, - {"ClassStrOffsetsPtr", Const, 14}, - {"ClassString", Const, 5}, - {"ClassStringAlt", Const, 5}, - {"ClassUnknown", Const, 6}, - {"CommonType", Type, 0}, - {"CommonType.ByteSize", Field, 0}, - {"CommonType.Name", Field, 0}, - {"ComplexType", Type, 0}, - {"ComplexType.BasicType", Field, 0}, - {"Data", Type, 0}, - {"DecodeError", Type, 0}, - {"DecodeError.Err", Field, 0}, - {"DecodeError.Name", Field, 0}, - {"DecodeError.Offset", Field, 0}, - {"DotDotDotType", Type, 0}, - {"DotDotDotType.CommonType", Field, 0}, - {"Entry", Type, 0}, - {"Entry.Children", Field, 0}, - {"Entry.Field", Field, 0}, - {"Entry.Offset", Field, 0}, - {"Entry.Tag", Field, 0}, - {"EnumType", Type, 0}, - {"EnumType.CommonType", Field, 0}, - {"EnumType.EnumName", Field, 0}, - {"EnumType.Val", Field, 0}, - {"EnumValue", Type, 0}, - {"EnumValue.Name", Field, 0}, - {"EnumValue.Val", Field, 0}, - {"ErrUnknownPC", Var, 5}, - {"Field", Type, 0}, - {"Field.Attr", Field, 0}, - {"Field.Class", Field, 5}, - {"Field.Val", Field, 0}, - {"FloatType", Type, 0}, - {"FloatType.BasicType", Field, 0}, - {"FuncType", Type, 0}, - {"FuncType.CommonType", Field, 0}, - {"FuncType.ParamType", Field, 0}, - {"FuncType.ReturnType", Field, 0}, - {"IntType", Type, 0}, - {"IntType.BasicType", Field, 0}, - {"LineEntry", Type, 5}, - {"LineEntry.Address", Field, 5}, - {"LineEntry.BasicBlock", Field, 5}, - {"LineEntry.Column", Field, 5}, - {"LineEntry.Discriminator", Field, 5}, - {"LineEntry.EndSequence", Field, 5}, - {"LineEntry.EpilogueBegin", Field, 5}, - {"LineEntry.File", Field, 5}, - {"LineEntry.ISA", Field, 5}, - {"LineEntry.IsStmt", Field, 5}, - {"LineEntry.Line", Field, 5}, - {"LineEntry.OpIndex", Field, 5}, - {"LineEntry.PrologueEnd", Field, 5}, - {"LineFile", Type, 5}, - {"LineFile.Length", Field, 5}, - {"LineFile.Mtime", Field, 5}, - {"LineFile.Name", Field, 5}, - {"LineReader", Type, 5}, - {"LineReaderPos", Type, 5}, - {"New", Func, 0}, - {"Offset", Type, 0}, - {"PtrType", Type, 0}, - {"PtrType.CommonType", Field, 0}, - {"PtrType.Type", Field, 0}, - {"QualType", Type, 0}, - {"QualType.CommonType", Field, 0}, - {"QualType.Qual", Field, 0}, - {"QualType.Type", Field, 0}, - {"Reader", Type, 0}, - {"StructField", Type, 0}, - {"StructField.BitOffset", Field, 0}, - {"StructField.BitSize", Field, 0}, - {"StructField.ByteOffset", Field, 0}, - {"StructField.ByteSize", Field, 0}, - {"StructField.DataBitOffset", Field, 18}, - {"StructField.Name", Field, 0}, - {"StructField.Type", Field, 0}, - {"StructType", Type, 0}, - {"StructType.CommonType", Field, 0}, - {"StructType.Field", Field, 0}, - {"StructType.Incomplete", Field, 0}, - {"StructType.Kind", Field, 0}, - {"StructType.StructName", Field, 0}, - {"Tag", Type, 0}, - {"TagAccessDeclaration", Const, 0}, - {"TagArrayType", Const, 0}, - {"TagAtomicType", Const, 14}, - {"TagBaseType", Const, 0}, - {"TagCallSite", Const, 14}, - {"TagCallSiteParameter", Const, 14}, - {"TagCatchDwarfBlock", Const, 0}, - {"TagClassType", Const, 0}, - {"TagCoarrayType", Const, 14}, - {"TagCommonDwarfBlock", Const, 0}, - {"TagCommonInclusion", Const, 0}, - {"TagCompileUnit", Const, 0}, - {"TagCondition", Const, 3}, - {"TagConstType", Const, 0}, - {"TagConstant", Const, 0}, - {"TagDwarfProcedure", Const, 0}, - {"TagDynamicType", Const, 14}, - {"TagEntryPoint", Const, 0}, - {"TagEnumerationType", Const, 0}, - {"TagEnumerator", Const, 0}, - {"TagFileType", Const, 0}, - {"TagFormalParameter", Const, 0}, - {"TagFriend", Const, 0}, - {"TagGenericSubrange", Const, 14}, - {"TagImmutableType", Const, 14}, - {"TagImportedDeclaration", Const, 0}, - {"TagImportedModule", Const, 0}, - {"TagImportedUnit", Const, 0}, - {"TagInheritance", Const, 0}, - {"TagInlinedSubroutine", Const, 0}, - {"TagInterfaceType", Const, 0}, - {"TagLabel", Const, 0}, - {"TagLexDwarfBlock", Const, 0}, - {"TagMember", Const, 0}, - {"TagModule", Const, 0}, - {"TagMutableType", Const, 0}, - {"TagNamelist", Const, 0}, - {"TagNamelistItem", Const, 0}, - {"TagNamespace", Const, 0}, - {"TagPackedType", Const, 0}, - {"TagPartialUnit", Const, 0}, - {"TagPointerType", Const, 0}, - {"TagPtrToMemberType", Const, 0}, - {"TagReferenceType", Const, 0}, - {"TagRestrictType", Const, 0}, - {"TagRvalueReferenceType", Const, 3}, - {"TagSetType", Const, 0}, - {"TagSharedType", Const, 3}, - {"TagSkeletonUnit", Const, 14}, - {"TagStringType", Const, 0}, - {"TagStructType", Const, 0}, - {"TagSubprogram", Const, 0}, - {"TagSubrangeType", Const, 0}, - {"TagSubroutineType", Const, 0}, - {"TagTemplateAlias", Const, 3}, - {"TagTemplateTypeParameter", Const, 0}, - {"TagTemplateValueParameter", Const, 0}, - {"TagThrownType", Const, 0}, - {"TagTryDwarfBlock", Const, 0}, - {"TagTypeUnit", Const, 3}, - {"TagTypedef", Const, 0}, - {"TagUnionType", Const, 0}, - {"TagUnspecifiedParameters", Const, 0}, - {"TagUnspecifiedType", Const, 0}, - {"TagVariable", Const, 0}, - {"TagVariant", Const, 0}, - {"TagVariantPart", Const, 0}, - {"TagVolatileType", Const, 0}, - {"TagWithStmt", Const, 0}, - {"Type", Type, 0}, - {"TypedefType", Type, 0}, - {"TypedefType.CommonType", Field, 0}, - {"TypedefType.Type", Field, 0}, - {"UcharType", Type, 0}, - {"UcharType.BasicType", Field, 0}, - {"UintType", Type, 0}, - {"UintType.BasicType", Field, 0}, - {"UnspecifiedType", Type, 4}, - {"UnspecifiedType.BasicType", Field, 4}, - {"UnsupportedType", Type, 13}, - {"UnsupportedType.CommonType", Field, 13}, - {"UnsupportedType.Tag", Field, 13}, - {"VoidType", Type, 0}, - {"VoidType.CommonType", Field, 0}, + {"(*AddrType).Basic", Method, 0, ""}, + {"(*AddrType).Common", Method, 0, ""}, + {"(*AddrType).Size", Method, 0, ""}, + {"(*AddrType).String", Method, 0, ""}, + {"(*ArrayType).Common", Method, 0, ""}, + {"(*ArrayType).Size", Method, 0, ""}, + {"(*ArrayType).String", Method, 0, ""}, + {"(*BasicType).Basic", Method, 0, ""}, + {"(*BasicType).Common", Method, 0, ""}, + {"(*BasicType).Size", Method, 0, ""}, + {"(*BasicType).String", Method, 0, ""}, + {"(*BoolType).Basic", Method, 0, ""}, + {"(*BoolType).Common", Method, 0, ""}, + {"(*BoolType).Size", Method, 0, ""}, + {"(*BoolType).String", Method, 0, ""}, + {"(*CharType).Basic", Method, 0, ""}, + {"(*CharType).Common", Method, 0, ""}, + {"(*CharType).Size", Method, 0, ""}, + {"(*CharType).String", Method, 0, ""}, + {"(*CommonType).Common", Method, 0, ""}, + {"(*CommonType).Size", Method, 0, ""}, + {"(*ComplexType).Basic", Method, 0, ""}, + {"(*ComplexType).Common", Method, 0, ""}, + {"(*ComplexType).Size", Method, 0, ""}, + {"(*ComplexType).String", Method, 0, ""}, + {"(*Data).AddSection", Method, 14, ""}, + {"(*Data).AddTypes", Method, 3, ""}, + {"(*Data).LineReader", Method, 5, ""}, + {"(*Data).Ranges", Method, 7, ""}, + {"(*Data).Reader", Method, 0, ""}, + {"(*Data).Type", Method, 0, ""}, + {"(*DotDotDotType).Common", Method, 0, ""}, + {"(*DotDotDotType).Size", Method, 0, ""}, + {"(*DotDotDotType).String", Method, 0, ""}, + {"(*Entry).AttrField", Method, 5, ""}, + {"(*Entry).Val", Method, 0, ""}, + {"(*EnumType).Common", Method, 0, ""}, + {"(*EnumType).Size", Method, 0, ""}, + {"(*EnumType).String", Method, 0, ""}, + {"(*FloatType).Basic", Method, 0, ""}, + {"(*FloatType).Common", Method, 0, ""}, + {"(*FloatType).Size", Method, 0, ""}, + {"(*FloatType).String", Method, 0, ""}, + {"(*FuncType).Common", Method, 0, ""}, + {"(*FuncType).Size", Method, 0, ""}, + {"(*FuncType).String", Method, 0, ""}, + {"(*IntType).Basic", Method, 0, ""}, + {"(*IntType).Common", Method, 0, ""}, + {"(*IntType).Size", Method, 0, ""}, + {"(*IntType).String", Method, 0, ""}, + {"(*LineReader).Files", Method, 14, ""}, + {"(*LineReader).Next", Method, 5, ""}, + {"(*LineReader).Reset", Method, 5, ""}, + {"(*LineReader).Seek", Method, 5, ""}, + {"(*LineReader).SeekPC", Method, 5, ""}, + {"(*LineReader).Tell", Method, 5, ""}, + {"(*PtrType).Common", Method, 0, ""}, + {"(*PtrType).Size", Method, 0, ""}, + {"(*PtrType).String", Method, 0, ""}, + {"(*QualType).Common", Method, 0, ""}, + {"(*QualType).Size", Method, 0, ""}, + {"(*QualType).String", Method, 0, ""}, + {"(*Reader).AddressSize", Method, 5, ""}, + {"(*Reader).ByteOrder", Method, 14, ""}, + {"(*Reader).Next", Method, 0, ""}, + {"(*Reader).Seek", Method, 0, ""}, + {"(*Reader).SeekPC", Method, 7, ""}, + {"(*Reader).SkipChildren", Method, 0, ""}, + {"(*StructType).Common", Method, 0, ""}, + {"(*StructType).Defn", Method, 0, ""}, + {"(*StructType).Size", Method, 0, ""}, + {"(*StructType).String", Method, 0, ""}, + {"(*TypedefType).Common", Method, 0, ""}, + {"(*TypedefType).Size", Method, 0, ""}, + {"(*TypedefType).String", Method, 0, ""}, + {"(*UcharType).Basic", Method, 0, ""}, + {"(*UcharType).Common", Method, 0, ""}, + {"(*UcharType).Size", Method, 0, ""}, + {"(*UcharType).String", Method, 0, ""}, + {"(*UintType).Basic", Method, 0, ""}, + {"(*UintType).Common", Method, 0, ""}, + {"(*UintType).Size", Method, 0, ""}, + {"(*UintType).String", Method, 0, ""}, + {"(*UnspecifiedType).Basic", Method, 4, ""}, + {"(*UnspecifiedType).Common", Method, 4, ""}, + {"(*UnspecifiedType).Size", Method, 4, ""}, + {"(*UnspecifiedType).String", Method, 4, ""}, + {"(*UnsupportedType).Common", Method, 13, ""}, + {"(*UnsupportedType).Size", Method, 13, ""}, + {"(*UnsupportedType).String", Method, 13, ""}, + {"(*VoidType).Common", Method, 0, ""}, + {"(*VoidType).Size", Method, 0, ""}, + {"(*VoidType).String", Method, 0, ""}, + {"(Attr).GoString", Method, 0, ""}, + {"(Attr).String", Method, 0, ""}, + {"(Class).GoString", Method, 5, ""}, + {"(Class).String", Method, 5, ""}, + {"(DecodeError).Error", Method, 0, ""}, + {"(Tag).GoString", Method, 0, ""}, + {"(Tag).String", Method, 0, ""}, + {"AddrType", Type, 0, ""}, + {"AddrType.BasicType", Field, 0, ""}, + {"ArrayType", Type, 0, ""}, + {"ArrayType.CommonType", Field, 0, ""}, + {"ArrayType.Count", Field, 0, ""}, + {"ArrayType.StrideBitSize", Field, 0, ""}, + {"ArrayType.Type", Field, 0, ""}, + {"Attr", Type, 0, ""}, + {"AttrAbstractOrigin", Const, 0, ""}, + {"AttrAccessibility", Const, 0, ""}, + {"AttrAddrBase", Const, 14, ""}, + {"AttrAddrClass", Const, 0, ""}, + {"AttrAlignment", Const, 14, ""}, + {"AttrAllocated", Const, 0, ""}, + {"AttrArtificial", Const, 0, ""}, + {"AttrAssociated", Const, 0, ""}, + {"AttrBaseTypes", Const, 0, ""}, + {"AttrBinaryScale", Const, 14, ""}, + {"AttrBitOffset", Const, 0, ""}, + {"AttrBitSize", Const, 0, ""}, + {"AttrByteSize", Const, 0, ""}, + {"AttrCallAllCalls", Const, 14, ""}, + {"AttrCallAllSourceCalls", Const, 14, ""}, + {"AttrCallAllTailCalls", Const, 14, ""}, + {"AttrCallColumn", Const, 0, ""}, + {"AttrCallDataLocation", Const, 14, ""}, + {"AttrCallDataValue", Const, 14, ""}, + {"AttrCallFile", Const, 0, ""}, + {"AttrCallLine", Const, 0, ""}, + {"AttrCallOrigin", Const, 14, ""}, + {"AttrCallPC", Const, 14, ""}, + {"AttrCallParameter", Const, 14, ""}, + {"AttrCallReturnPC", Const, 14, ""}, + {"AttrCallTailCall", Const, 14, ""}, + {"AttrCallTarget", Const, 14, ""}, + {"AttrCallTargetClobbered", Const, 14, ""}, + {"AttrCallValue", Const, 14, ""}, + {"AttrCalling", Const, 0, ""}, + {"AttrCommonRef", Const, 0, ""}, + {"AttrCompDir", Const, 0, ""}, + {"AttrConstExpr", Const, 14, ""}, + {"AttrConstValue", Const, 0, ""}, + {"AttrContainingType", Const, 0, ""}, + {"AttrCount", Const, 0, ""}, + {"AttrDataBitOffset", Const, 14, ""}, + {"AttrDataLocation", Const, 0, ""}, + {"AttrDataMemberLoc", Const, 0, ""}, + {"AttrDecimalScale", Const, 14, ""}, + {"AttrDecimalSign", Const, 14, ""}, + {"AttrDeclColumn", Const, 0, ""}, + {"AttrDeclFile", Const, 0, ""}, + {"AttrDeclLine", Const, 0, ""}, + {"AttrDeclaration", Const, 0, ""}, + {"AttrDefaultValue", Const, 0, ""}, + {"AttrDefaulted", Const, 14, ""}, + {"AttrDeleted", Const, 14, ""}, + {"AttrDescription", Const, 0, ""}, + {"AttrDigitCount", Const, 14, ""}, + {"AttrDiscr", Const, 0, ""}, + {"AttrDiscrList", Const, 0, ""}, + {"AttrDiscrValue", Const, 0, ""}, + {"AttrDwoName", Const, 14, ""}, + {"AttrElemental", Const, 14, ""}, + {"AttrEncoding", Const, 0, ""}, + {"AttrEndianity", Const, 14, ""}, + {"AttrEntrypc", Const, 0, ""}, + {"AttrEnumClass", Const, 14, ""}, + {"AttrExplicit", Const, 14, ""}, + {"AttrExportSymbols", Const, 14, ""}, + {"AttrExtension", Const, 0, ""}, + {"AttrExternal", Const, 0, ""}, + {"AttrFrameBase", Const, 0, ""}, + {"AttrFriend", Const, 0, ""}, + {"AttrHighpc", Const, 0, ""}, + {"AttrIdentifierCase", Const, 0, ""}, + {"AttrImport", Const, 0, ""}, + {"AttrInline", Const, 0, ""}, + {"AttrIsOptional", Const, 0, ""}, + {"AttrLanguage", Const, 0, ""}, + {"AttrLinkageName", Const, 14, ""}, + {"AttrLocation", Const, 0, ""}, + {"AttrLoclistsBase", Const, 14, ""}, + {"AttrLowerBound", Const, 0, ""}, + {"AttrLowpc", Const, 0, ""}, + {"AttrMacroInfo", Const, 0, ""}, + {"AttrMacros", Const, 14, ""}, + {"AttrMainSubprogram", Const, 14, ""}, + {"AttrMutable", Const, 14, ""}, + {"AttrName", Const, 0, ""}, + {"AttrNamelistItem", Const, 0, ""}, + {"AttrNoreturn", Const, 14, ""}, + {"AttrObjectPointer", Const, 14, ""}, + {"AttrOrdering", Const, 0, ""}, + {"AttrPictureString", Const, 14, ""}, + {"AttrPriority", Const, 0, ""}, + {"AttrProducer", Const, 0, ""}, + {"AttrPrototyped", Const, 0, ""}, + {"AttrPure", Const, 14, ""}, + {"AttrRanges", Const, 0, ""}, + {"AttrRank", Const, 14, ""}, + {"AttrRecursive", Const, 14, ""}, + {"AttrReference", Const, 14, ""}, + {"AttrReturnAddr", Const, 0, ""}, + {"AttrRnglistsBase", Const, 14, ""}, + {"AttrRvalueReference", Const, 14, ""}, + {"AttrSegment", Const, 0, ""}, + {"AttrSibling", Const, 0, ""}, + {"AttrSignature", Const, 14, ""}, + {"AttrSmall", Const, 14, ""}, + {"AttrSpecification", Const, 0, ""}, + {"AttrStartScope", Const, 0, ""}, + {"AttrStaticLink", Const, 0, ""}, + {"AttrStmtList", Const, 0, ""}, + {"AttrStrOffsetsBase", Const, 14, ""}, + {"AttrStride", Const, 0, ""}, + {"AttrStrideSize", Const, 0, ""}, + {"AttrStringLength", Const, 0, ""}, + {"AttrStringLengthBitSize", Const, 14, ""}, + {"AttrStringLengthByteSize", Const, 14, ""}, + {"AttrThreadsScaled", Const, 14, ""}, + {"AttrTrampoline", Const, 0, ""}, + {"AttrType", Const, 0, ""}, + {"AttrUpperBound", Const, 0, ""}, + {"AttrUseLocation", Const, 0, ""}, + {"AttrUseUTF8", Const, 0, ""}, + {"AttrVarParam", Const, 0, ""}, + {"AttrVirtuality", Const, 0, ""}, + {"AttrVisibility", Const, 0, ""}, + {"AttrVtableElemLoc", Const, 0, ""}, + {"BasicType", Type, 0, ""}, + {"BasicType.BitOffset", Field, 0, ""}, + {"BasicType.BitSize", Field, 0, ""}, + {"BasicType.CommonType", Field, 0, ""}, + {"BasicType.DataBitOffset", Field, 18, ""}, + {"BoolType", Type, 0, ""}, + {"BoolType.BasicType", Field, 0, ""}, + {"CharType", Type, 0, ""}, + {"CharType.BasicType", Field, 0, ""}, + {"Class", Type, 5, ""}, + {"ClassAddrPtr", Const, 14, ""}, + {"ClassAddress", Const, 5, ""}, + {"ClassBlock", Const, 5, ""}, + {"ClassConstant", Const, 5, ""}, + {"ClassExprLoc", Const, 5, ""}, + {"ClassFlag", Const, 5, ""}, + {"ClassLinePtr", Const, 5, ""}, + {"ClassLocList", Const, 14, ""}, + {"ClassLocListPtr", Const, 5, ""}, + {"ClassMacPtr", Const, 5, ""}, + {"ClassRangeListPtr", Const, 5, ""}, + {"ClassReference", Const, 5, ""}, + {"ClassReferenceAlt", Const, 5, ""}, + {"ClassReferenceSig", Const, 5, ""}, + {"ClassRngList", Const, 14, ""}, + {"ClassRngListsPtr", Const, 14, ""}, + {"ClassStrOffsetsPtr", Const, 14, ""}, + {"ClassString", Const, 5, ""}, + {"ClassStringAlt", Const, 5, ""}, + {"ClassUnknown", Const, 6, ""}, + {"CommonType", Type, 0, ""}, + {"CommonType.ByteSize", Field, 0, ""}, + {"CommonType.Name", Field, 0, ""}, + {"ComplexType", Type, 0, ""}, + {"ComplexType.BasicType", Field, 0, ""}, + {"Data", Type, 0, ""}, + {"DecodeError", Type, 0, ""}, + {"DecodeError.Err", Field, 0, ""}, + {"DecodeError.Name", Field, 0, ""}, + {"DecodeError.Offset", Field, 0, ""}, + {"DotDotDotType", Type, 0, ""}, + {"DotDotDotType.CommonType", Field, 0, ""}, + {"Entry", Type, 0, ""}, + {"Entry.Children", Field, 0, ""}, + {"Entry.Field", Field, 0, ""}, + {"Entry.Offset", Field, 0, ""}, + {"Entry.Tag", Field, 0, ""}, + {"EnumType", Type, 0, ""}, + {"EnumType.CommonType", Field, 0, ""}, + {"EnumType.EnumName", Field, 0, ""}, + {"EnumType.Val", Field, 0, ""}, + {"EnumValue", Type, 0, ""}, + {"EnumValue.Name", Field, 0, ""}, + {"EnumValue.Val", Field, 0, ""}, + {"ErrUnknownPC", Var, 5, ""}, + {"Field", Type, 0, ""}, + {"Field.Attr", Field, 0, ""}, + {"Field.Class", Field, 5, ""}, + {"Field.Val", Field, 0, ""}, + {"FloatType", Type, 0, ""}, + {"FloatType.BasicType", Field, 0, ""}, + {"FuncType", Type, 0, ""}, + {"FuncType.CommonType", Field, 0, ""}, + {"FuncType.ParamType", Field, 0, ""}, + {"FuncType.ReturnType", Field, 0, ""}, + {"IntType", Type, 0, ""}, + {"IntType.BasicType", Field, 0, ""}, + {"LineEntry", Type, 5, ""}, + {"LineEntry.Address", Field, 5, ""}, + {"LineEntry.BasicBlock", Field, 5, ""}, + {"LineEntry.Column", Field, 5, ""}, + {"LineEntry.Discriminator", Field, 5, ""}, + {"LineEntry.EndSequence", Field, 5, ""}, + {"LineEntry.EpilogueBegin", Field, 5, ""}, + {"LineEntry.File", Field, 5, ""}, + {"LineEntry.ISA", Field, 5, ""}, + {"LineEntry.IsStmt", Field, 5, ""}, + {"LineEntry.Line", Field, 5, ""}, + {"LineEntry.OpIndex", Field, 5, ""}, + {"LineEntry.PrologueEnd", Field, 5, ""}, + {"LineFile", Type, 5, ""}, + {"LineFile.Length", Field, 5, ""}, + {"LineFile.Mtime", Field, 5, ""}, + {"LineFile.Name", Field, 5, ""}, + {"LineReader", Type, 5, ""}, + {"LineReaderPos", Type, 5, ""}, + {"New", Func, 0, "func(abbrev []byte, aranges []byte, frame []byte, info []byte, line []byte, pubnames []byte, ranges []byte, str []byte) (*Data, error)"}, + {"Offset", Type, 0, ""}, + {"PtrType", Type, 0, ""}, + {"PtrType.CommonType", Field, 0, ""}, + {"PtrType.Type", Field, 0, ""}, + {"QualType", Type, 0, ""}, + {"QualType.CommonType", Field, 0, ""}, + {"QualType.Qual", Field, 0, ""}, + {"QualType.Type", Field, 0, ""}, + {"Reader", Type, 0, ""}, + {"StructField", Type, 0, ""}, + {"StructField.BitOffset", Field, 0, ""}, + {"StructField.BitSize", Field, 0, ""}, + {"StructField.ByteOffset", Field, 0, ""}, + {"StructField.ByteSize", Field, 0, ""}, + {"StructField.DataBitOffset", Field, 18, ""}, + {"StructField.Name", Field, 0, ""}, + {"StructField.Type", Field, 0, ""}, + {"StructType", Type, 0, ""}, + {"StructType.CommonType", Field, 0, ""}, + {"StructType.Field", Field, 0, ""}, + {"StructType.Incomplete", Field, 0, ""}, + {"StructType.Kind", Field, 0, ""}, + {"StructType.StructName", Field, 0, ""}, + {"Tag", Type, 0, ""}, + {"TagAccessDeclaration", Const, 0, ""}, + {"TagArrayType", Const, 0, ""}, + {"TagAtomicType", Const, 14, ""}, + {"TagBaseType", Const, 0, ""}, + {"TagCallSite", Const, 14, ""}, + {"TagCallSiteParameter", Const, 14, ""}, + {"TagCatchDwarfBlock", Const, 0, ""}, + {"TagClassType", Const, 0, ""}, + {"TagCoarrayType", Const, 14, ""}, + {"TagCommonDwarfBlock", Const, 0, ""}, + {"TagCommonInclusion", Const, 0, ""}, + {"TagCompileUnit", Const, 0, ""}, + {"TagCondition", Const, 3, ""}, + {"TagConstType", Const, 0, ""}, + {"TagConstant", Const, 0, ""}, + {"TagDwarfProcedure", Const, 0, ""}, + {"TagDynamicType", Const, 14, ""}, + {"TagEntryPoint", Const, 0, ""}, + {"TagEnumerationType", Const, 0, ""}, + {"TagEnumerator", Const, 0, ""}, + {"TagFileType", Const, 0, ""}, + {"TagFormalParameter", Const, 0, ""}, + {"TagFriend", Const, 0, ""}, + {"TagGenericSubrange", Const, 14, ""}, + {"TagImmutableType", Const, 14, ""}, + {"TagImportedDeclaration", Const, 0, ""}, + {"TagImportedModule", Const, 0, ""}, + {"TagImportedUnit", Const, 0, ""}, + {"TagInheritance", Const, 0, ""}, + {"TagInlinedSubroutine", Const, 0, ""}, + {"TagInterfaceType", Const, 0, ""}, + {"TagLabel", Const, 0, ""}, + {"TagLexDwarfBlock", Const, 0, ""}, + {"TagMember", Const, 0, ""}, + {"TagModule", Const, 0, ""}, + {"TagMutableType", Const, 0, ""}, + {"TagNamelist", Const, 0, ""}, + {"TagNamelistItem", Const, 0, ""}, + {"TagNamespace", Const, 0, ""}, + {"TagPackedType", Const, 0, ""}, + {"TagPartialUnit", Const, 0, ""}, + {"TagPointerType", Const, 0, ""}, + {"TagPtrToMemberType", Const, 0, ""}, + {"TagReferenceType", Const, 0, ""}, + {"TagRestrictType", Const, 0, ""}, + {"TagRvalueReferenceType", Const, 3, ""}, + {"TagSetType", Const, 0, ""}, + {"TagSharedType", Const, 3, ""}, + {"TagSkeletonUnit", Const, 14, ""}, + {"TagStringType", Const, 0, ""}, + {"TagStructType", Const, 0, ""}, + {"TagSubprogram", Const, 0, ""}, + {"TagSubrangeType", Const, 0, ""}, + {"TagSubroutineType", Const, 0, ""}, + {"TagTemplateAlias", Const, 3, ""}, + {"TagTemplateTypeParameter", Const, 0, ""}, + {"TagTemplateValueParameter", Const, 0, ""}, + {"TagThrownType", Const, 0, ""}, + {"TagTryDwarfBlock", Const, 0, ""}, + {"TagTypeUnit", Const, 3, ""}, + {"TagTypedef", Const, 0, ""}, + {"TagUnionType", Const, 0, ""}, + {"TagUnspecifiedParameters", Const, 0, ""}, + {"TagUnspecifiedType", Const, 0, ""}, + {"TagVariable", Const, 0, ""}, + {"TagVariant", Const, 0, ""}, + {"TagVariantPart", Const, 0, ""}, + {"TagVolatileType", Const, 0, ""}, + {"TagWithStmt", Const, 0, ""}, + {"Type", Type, 0, ""}, + {"TypedefType", Type, 0, ""}, + {"TypedefType.CommonType", Field, 0, ""}, + {"TypedefType.Type", Field, 0, ""}, + {"UcharType", Type, 0, ""}, + {"UcharType.BasicType", Field, 0, ""}, + {"UintType", Type, 0, ""}, + {"UintType.BasicType", Field, 0, ""}, + {"UnspecifiedType", Type, 4, ""}, + {"UnspecifiedType.BasicType", Field, 4, ""}, + {"UnsupportedType", Type, 13, ""}, + {"UnsupportedType.CommonType", Field, 13, ""}, + {"UnsupportedType.Tag", Field, 13, ""}, + {"VoidType", Type, 0, ""}, + {"VoidType.CommonType", Field, 0, ""}, }, "debug/elf": { - {"(*File).Close", Method, 0}, - {"(*File).DWARF", Method, 0}, - {"(*File).DynString", Method, 1}, - {"(*File).DynValue", Method, 21}, - {"(*File).DynamicSymbols", Method, 4}, - {"(*File).DynamicVersionNeeds", Method, 24}, - {"(*File).DynamicVersions", Method, 24}, - {"(*File).ImportedLibraries", Method, 0}, - {"(*File).ImportedSymbols", Method, 0}, - {"(*File).Section", Method, 0}, - {"(*File).SectionByType", Method, 0}, - {"(*File).Symbols", Method, 0}, - {"(*FormatError).Error", Method, 0}, - {"(*Prog).Open", Method, 0}, - {"(*Section).Data", Method, 0}, - {"(*Section).Open", Method, 0}, - {"(Class).GoString", Method, 0}, - {"(Class).String", Method, 0}, - {"(CompressionType).GoString", Method, 6}, - {"(CompressionType).String", Method, 6}, - {"(Data).GoString", Method, 0}, - {"(Data).String", Method, 0}, - {"(DynFlag).GoString", Method, 0}, - {"(DynFlag).String", Method, 0}, - {"(DynFlag1).GoString", Method, 21}, - {"(DynFlag1).String", Method, 21}, - {"(DynTag).GoString", Method, 0}, - {"(DynTag).String", Method, 0}, - {"(Machine).GoString", Method, 0}, - {"(Machine).String", Method, 0}, - {"(NType).GoString", Method, 0}, - {"(NType).String", Method, 0}, - {"(OSABI).GoString", Method, 0}, - {"(OSABI).String", Method, 0}, - {"(Prog).ReadAt", Method, 0}, - {"(ProgFlag).GoString", Method, 0}, - {"(ProgFlag).String", Method, 0}, - {"(ProgType).GoString", Method, 0}, - {"(ProgType).String", Method, 0}, - {"(R_386).GoString", Method, 0}, - {"(R_386).String", Method, 0}, - {"(R_390).GoString", Method, 7}, - {"(R_390).String", Method, 7}, - {"(R_AARCH64).GoString", Method, 4}, - {"(R_AARCH64).String", Method, 4}, - {"(R_ALPHA).GoString", Method, 0}, - {"(R_ALPHA).String", Method, 0}, - {"(R_ARM).GoString", Method, 0}, - {"(R_ARM).String", Method, 0}, - {"(R_LARCH).GoString", Method, 19}, - {"(R_LARCH).String", Method, 19}, - {"(R_MIPS).GoString", Method, 6}, - {"(R_MIPS).String", Method, 6}, - {"(R_PPC).GoString", Method, 0}, - {"(R_PPC).String", Method, 0}, - {"(R_PPC64).GoString", Method, 5}, - {"(R_PPC64).String", Method, 5}, - {"(R_RISCV).GoString", Method, 11}, - {"(R_RISCV).String", Method, 11}, - {"(R_SPARC).GoString", Method, 0}, - {"(R_SPARC).String", Method, 0}, - {"(R_X86_64).GoString", Method, 0}, - {"(R_X86_64).String", Method, 0}, - {"(Section).ReadAt", Method, 0}, - {"(SectionFlag).GoString", Method, 0}, - {"(SectionFlag).String", Method, 0}, - {"(SectionIndex).GoString", Method, 0}, - {"(SectionIndex).String", Method, 0}, - {"(SectionType).GoString", Method, 0}, - {"(SectionType).String", Method, 0}, - {"(SymBind).GoString", Method, 0}, - {"(SymBind).String", Method, 0}, - {"(SymType).GoString", Method, 0}, - {"(SymType).String", Method, 0}, - {"(SymVis).GoString", Method, 0}, - {"(SymVis).String", Method, 0}, - {"(Type).GoString", Method, 0}, - {"(Type).String", Method, 0}, - {"(Version).GoString", Method, 0}, - {"(Version).String", Method, 0}, - {"(VersionIndex).Index", Method, 24}, - {"(VersionIndex).IsHidden", Method, 24}, - {"ARM_MAGIC_TRAMP_NUMBER", Const, 0}, - {"COMPRESS_HIOS", Const, 6}, - {"COMPRESS_HIPROC", Const, 6}, - {"COMPRESS_LOOS", Const, 6}, - {"COMPRESS_LOPROC", Const, 6}, - {"COMPRESS_ZLIB", Const, 6}, - {"COMPRESS_ZSTD", Const, 21}, - {"Chdr32", Type, 6}, - {"Chdr32.Addralign", Field, 6}, - {"Chdr32.Size", Field, 6}, - {"Chdr32.Type", Field, 6}, - {"Chdr64", Type, 6}, - {"Chdr64.Addralign", Field, 6}, - {"Chdr64.Size", Field, 6}, - {"Chdr64.Type", Field, 6}, - {"Class", Type, 0}, - {"CompressionType", Type, 6}, - {"DF_1_CONFALT", Const, 21}, - {"DF_1_DIRECT", Const, 21}, - {"DF_1_DISPRELDNE", Const, 21}, - {"DF_1_DISPRELPND", Const, 21}, - {"DF_1_EDITED", Const, 21}, - {"DF_1_ENDFILTEE", Const, 21}, - {"DF_1_GLOBAL", Const, 21}, - {"DF_1_GLOBAUDIT", Const, 21}, - {"DF_1_GROUP", Const, 21}, - {"DF_1_IGNMULDEF", Const, 21}, - {"DF_1_INITFIRST", Const, 21}, - {"DF_1_INTERPOSE", Const, 21}, - {"DF_1_KMOD", Const, 21}, - {"DF_1_LOADFLTR", Const, 21}, - {"DF_1_NOCOMMON", Const, 21}, - {"DF_1_NODEFLIB", Const, 21}, - {"DF_1_NODELETE", Const, 21}, - {"DF_1_NODIRECT", Const, 21}, - {"DF_1_NODUMP", Const, 21}, - {"DF_1_NOHDR", Const, 21}, - {"DF_1_NOKSYMS", Const, 21}, - {"DF_1_NOOPEN", Const, 21}, - {"DF_1_NORELOC", Const, 21}, - {"DF_1_NOW", Const, 21}, - {"DF_1_ORIGIN", Const, 21}, - {"DF_1_PIE", Const, 21}, - {"DF_1_SINGLETON", Const, 21}, - {"DF_1_STUB", Const, 21}, - {"DF_1_SYMINTPOSE", Const, 21}, - {"DF_1_TRANS", Const, 21}, - {"DF_1_WEAKFILTER", Const, 21}, - {"DF_BIND_NOW", Const, 0}, - {"DF_ORIGIN", Const, 0}, - {"DF_STATIC_TLS", Const, 0}, - {"DF_SYMBOLIC", Const, 0}, - {"DF_TEXTREL", Const, 0}, - {"DT_ADDRRNGHI", Const, 16}, - {"DT_ADDRRNGLO", Const, 16}, - {"DT_AUDIT", Const, 16}, - {"DT_AUXILIARY", Const, 16}, - {"DT_BIND_NOW", Const, 0}, - {"DT_CHECKSUM", Const, 16}, - {"DT_CONFIG", Const, 16}, - {"DT_DEBUG", Const, 0}, - {"DT_DEPAUDIT", Const, 16}, - {"DT_ENCODING", Const, 0}, - {"DT_FEATURE", Const, 16}, - {"DT_FILTER", Const, 16}, - {"DT_FINI", Const, 0}, - {"DT_FINI_ARRAY", Const, 0}, - {"DT_FINI_ARRAYSZ", Const, 0}, - {"DT_FLAGS", Const, 0}, - {"DT_FLAGS_1", Const, 16}, - {"DT_GNU_CONFLICT", Const, 16}, - {"DT_GNU_CONFLICTSZ", Const, 16}, - {"DT_GNU_HASH", Const, 16}, - {"DT_GNU_LIBLIST", Const, 16}, - {"DT_GNU_LIBLISTSZ", Const, 16}, - {"DT_GNU_PRELINKED", Const, 16}, - {"DT_HASH", Const, 0}, - {"DT_HIOS", Const, 0}, - {"DT_HIPROC", Const, 0}, - {"DT_INIT", Const, 0}, - {"DT_INIT_ARRAY", Const, 0}, - {"DT_INIT_ARRAYSZ", Const, 0}, - {"DT_JMPREL", Const, 0}, - {"DT_LOOS", Const, 0}, - {"DT_LOPROC", Const, 0}, - {"DT_MIPS_AUX_DYNAMIC", Const, 16}, - {"DT_MIPS_BASE_ADDRESS", Const, 16}, - {"DT_MIPS_COMPACT_SIZE", Const, 16}, - {"DT_MIPS_CONFLICT", Const, 16}, - {"DT_MIPS_CONFLICTNO", Const, 16}, - {"DT_MIPS_CXX_FLAGS", Const, 16}, - {"DT_MIPS_DELTA_CLASS", Const, 16}, - {"DT_MIPS_DELTA_CLASSSYM", Const, 16}, - {"DT_MIPS_DELTA_CLASSSYM_NO", Const, 16}, - {"DT_MIPS_DELTA_CLASS_NO", Const, 16}, - {"DT_MIPS_DELTA_INSTANCE", Const, 16}, - {"DT_MIPS_DELTA_INSTANCE_NO", Const, 16}, - {"DT_MIPS_DELTA_RELOC", Const, 16}, - {"DT_MIPS_DELTA_RELOC_NO", Const, 16}, - {"DT_MIPS_DELTA_SYM", Const, 16}, - {"DT_MIPS_DELTA_SYM_NO", Const, 16}, - {"DT_MIPS_DYNSTR_ALIGN", Const, 16}, - {"DT_MIPS_FLAGS", Const, 16}, - {"DT_MIPS_GOTSYM", Const, 16}, - {"DT_MIPS_GP_VALUE", Const, 16}, - {"DT_MIPS_HIDDEN_GOTIDX", Const, 16}, - {"DT_MIPS_HIPAGENO", Const, 16}, - {"DT_MIPS_ICHECKSUM", Const, 16}, - {"DT_MIPS_INTERFACE", Const, 16}, - {"DT_MIPS_INTERFACE_SIZE", Const, 16}, - {"DT_MIPS_IVERSION", Const, 16}, - {"DT_MIPS_LIBLIST", Const, 16}, - {"DT_MIPS_LIBLISTNO", Const, 16}, - {"DT_MIPS_LOCALPAGE_GOTIDX", Const, 16}, - {"DT_MIPS_LOCAL_GOTIDX", Const, 16}, - {"DT_MIPS_LOCAL_GOTNO", Const, 16}, - {"DT_MIPS_MSYM", Const, 16}, - {"DT_MIPS_OPTIONS", Const, 16}, - {"DT_MIPS_PERF_SUFFIX", Const, 16}, - {"DT_MIPS_PIXIE_INIT", Const, 16}, - {"DT_MIPS_PLTGOT", Const, 16}, - {"DT_MIPS_PROTECTED_GOTIDX", Const, 16}, - {"DT_MIPS_RLD_MAP", Const, 16}, - {"DT_MIPS_RLD_MAP_REL", Const, 16}, - {"DT_MIPS_RLD_TEXT_RESOLVE_ADDR", Const, 16}, - {"DT_MIPS_RLD_VERSION", Const, 16}, - {"DT_MIPS_RWPLT", Const, 16}, - {"DT_MIPS_SYMBOL_LIB", Const, 16}, - {"DT_MIPS_SYMTABNO", Const, 16}, - {"DT_MIPS_TIME_STAMP", Const, 16}, - {"DT_MIPS_UNREFEXTNO", Const, 16}, - {"DT_MOVEENT", Const, 16}, - {"DT_MOVESZ", Const, 16}, - {"DT_MOVETAB", Const, 16}, - {"DT_NEEDED", Const, 0}, - {"DT_NULL", Const, 0}, - {"DT_PLTGOT", Const, 0}, - {"DT_PLTPAD", Const, 16}, - {"DT_PLTPADSZ", Const, 16}, - {"DT_PLTREL", Const, 0}, - {"DT_PLTRELSZ", Const, 0}, - {"DT_POSFLAG_1", Const, 16}, - {"DT_PPC64_GLINK", Const, 16}, - {"DT_PPC64_OPD", Const, 16}, - {"DT_PPC64_OPDSZ", Const, 16}, - {"DT_PPC64_OPT", Const, 16}, - {"DT_PPC_GOT", Const, 16}, - {"DT_PPC_OPT", Const, 16}, - {"DT_PREINIT_ARRAY", Const, 0}, - {"DT_PREINIT_ARRAYSZ", Const, 0}, - {"DT_REL", Const, 0}, - {"DT_RELA", Const, 0}, - {"DT_RELACOUNT", Const, 16}, - {"DT_RELAENT", Const, 0}, - {"DT_RELASZ", Const, 0}, - {"DT_RELCOUNT", Const, 16}, - {"DT_RELENT", Const, 0}, - {"DT_RELSZ", Const, 0}, - {"DT_RPATH", Const, 0}, - {"DT_RUNPATH", Const, 0}, - {"DT_SONAME", Const, 0}, - {"DT_SPARC_REGISTER", Const, 16}, - {"DT_STRSZ", Const, 0}, - {"DT_STRTAB", Const, 0}, - {"DT_SYMBOLIC", Const, 0}, - {"DT_SYMENT", Const, 0}, - {"DT_SYMINENT", Const, 16}, - {"DT_SYMINFO", Const, 16}, - {"DT_SYMINSZ", Const, 16}, - {"DT_SYMTAB", Const, 0}, - {"DT_SYMTAB_SHNDX", Const, 16}, - {"DT_TEXTREL", Const, 0}, - {"DT_TLSDESC_GOT", Const, 16}, - {"DT_TLSDESC_PLT", Const, 16}, - {"DT_USED", Const, 16}, - {"DT_VALRNGHI", Const, 16}, - {"DT_VALRNGLO", Const, 16}, - {"DT_VERDEF", Const, 16}, - {"DT_VERDEFNUM", Const, 16}, - {"DT_VERNEED", Const, 0}, - {"DT_VERNEEDNUM", Const, 0}, - {"DT_VERSYM", Const, 0}, - {"Data", Type, 0}, - {"Dyn32", Type, 0}, - {"Dyn32.Tag", Field, 0}, - {"Dyn32.Val", Field, 0}, - {"Dyn64", Type, 0}, - {"Dyn64.Tag", Field, 0}, - {"Dyn64.Val", Field, 0}, - {"DynFlag", Type, 0}, - {"DynFlag1", Type, 21}, - {"DynTag", Type, 0}, - {"DynamicVersion", Type, 24}, - {"DynamicVersion.Deps", Field, 24}, - {"DynamicVersion.Flags", Field, 24}, - {"DynamicVersion.Index", Field, 24}, - {"DynamicVersion.Name", Field, 24}, - {"DynamicVersionDep", Type, 24}, - {"DynamicVersionDep.Dep", Field, 24}, - {"DynamicVersionDep.Flags", Field, 24}, - {"DynamicVersionDep.Index", Field, 24}, - {"DynamicVersionFlag", Type, 24}, - {"DynamicVersionNeed", Type, 24}, - {"DynamicVersionNeed.Name", Field, 24}, - {"DynamicVersionNeed.Needs", Field, 24}, - {"EI_ABIVERSION", Const, 0}, - {"EI_CLASS", Const, 0}, - {"EI_DATA", Const, 0}, - {"EI_NIDENT", Const, 0}, - {"EI_OSABI", Const, 0}, - {"EI_PAD", Const, 0}, - {"EI_VERSION", Const, 0}, - {"ELFCLASS32", Const, 0}, - {"ELFCLASS64", Const, 0}, - {"ELFCLASSNONE", Const, 0}, - {"ELFDATA2LSB", Const, 0}, - {"ELFDATA2MSB", Const, 0}, - {"ELFDATANONE", Const, 0}, - {"ELFMAG", Const, 0}, - {"ELFOSABI_86OPEN", Const, 0}, - {"ELFOSABI_AIX", Const, 0}, - {"ELFOSABI_ARM", Const, 0}, - {"ELFOSABI_AROS", Const, 11}, - {"ELFOSABI_CLOUDABI", Const, 11}, - {"ELFOSABI_FENIXOS", Const, 11}, - {"ELFOSABI_FREEBSD", Const, 0}, - {"ELFOSABI_HPUX", Const, 0}, - {"ELFOSABI_HURD", Const, 0}, - {"ELFOSABI_IRIX", Const, 0}, - {"ELFOSABI_LINUX", Const, 0}, - {"ELFOSABI_MODESTO", Const, 0}, - {"ELFOSABI_NETBSD", Const, 0}, - {"ELFOSABI_NONE", Const, 0}, - {"ELFOSABI_NSK", Const, 0}, - {"ELFOSABI_OPENBSD", Const, 0}, - {"ELFOSABI_OPENVMS", Const, 0}, - {"ELFOSABI_SOLARIS", Const, 0}, - {"ELFOSABI_STANDALONE", Const, 0}, - {"ELFOSABI_TRU64", Const, 0}, - {"EM_386", Const, 0}, - {"EM_486", Const, 0}, - {"EM_56800EX", Const, 11}, - {"EM_68HC05", Const, 11}, - {"EM_68HC08", Const, 11}, - {"EM_68HC11", Const, 11}, - {"EM_68HC12", Const, 0}, - {"EM_68HC16", Const, 11}, - {"EM_68K", Const, 0}, - {"EM_78KOR", Const, 11}, - {"EM_8051", Const, 11}, - {"EM_860", Const, 0}, - {"EM_88K", Const, 0}, - {"EM_960", Const, 0}, - {"EM_AARCH64", Const, 4}, - {"EM_ALPHA", Const, 0}, - {"EM_ALPHA_STD", Const, 0}, - {"EM_ALTERA_NIOS2", Const, 11}, - {"EM_AMDGPU", Const, 11}, - {"EM_ARC", Const, 0}, - {"EM_ARCA", Const, 11}, - {"EM_ARC_COMPACT", Const, 11}, - {"EM_ARC_COMPACT2", Const, 11}, - {"EM_ARM", Const, 0}, - {"EM_AVR", Const, 11}, - {"EM_AVR32", Const, 11}, - {"EM_BA1", Const, 11}, - {"EM_BA2", Const, 11}, - {"EM_BLACKFIN", Const, 11}, - {"EM_BPF", Const, 11}, - {"EM_C166", Const, 11}, - {"EM_CDP", Const, 11}, - {"EM_CE", Const, 11}, - {"EM_CLOUDSHIELD", Const, 11}, - {"EM_COGE", Const, 11}, - {"EM_COLDFIRE", Const, 0}, - {"EM_COOL", Const, 11}, - {"EM_COREA_1ST", Const, 11}, - {"EM_COREA_2ND", Const, 11}, - {"EM_CR", Const, 11}, - {"EM_CR16", Const, 11}, - {"EM_CRAYNV2", Const, 11}, - {"EM_CRIS", Const, 11}, - {"EM_CRX", Const, 11}, - {"EM_CSR_KALIMBA", Const, 11}, - {"EM_CUDA", Const, 11}, - {"EM_CYPRESS_M8C", Const, 11}, - {"EM_D10V", Const, 11}, - {"EM_D30V", Const, 11}, - {"EM_DSP24", Const, 11}, - {"EM_DSPIC30F", Const, 11}, - {"EM_DXP", Const, 11}, - {"EM_ECOG1", Const, 11}, - {"EM_ECOG16", Const, 11}, - {"EM_ECOG1X", Const, 11}, - {"EM_ECOG2", Const, 11}, - {"EM_ETPU", Const, 11}, - {"EM_EXCESS", Const, 11}, - {"EM_F2MC16", Const, 11}, - {"EM_FIREPATH", Const, 11}, - {"EM_FR20", Const, 0}, - {"EM_FR30", Const, 11}, - {"EM_FT32", Const, 11}, - {"EM_FX66", Const, 11}, - {"EM_H8S", Const, 0}, - {"EM_H8_300", Const, 0}, - {"EM_H8_300H", Const, 0}, - {"EM_H8_500", Const, 0}, - {"EM_HUANY", Const, 11}, - {"EM_IA_64", Const, 0}, - {"EM_INTEL205", Const, 11}, - {"EM_INTEL206", Const, 11}, - {"EM_INTEL207", Const, 11}, - {"EM_INTEL208", Const, 11}, - {"EM_INTEL209", Const, 11}, - {"EM_IP2K", Const, 11}, - {"EM_JAVELIN", Const, 11}, - {"EM_K10M", Const, 11}, - {"EM_KM32", Const, 11}, - {"EM_KMX16", Const, 11}, - {"EM_KMX32", Const, 11}, - {"EM_KMX8", Const, 11}, - {"EM_KVARC", Const, 11}, - {"EM_L10M", Const, 11}, - {"EM_LANAI", Const, 11}, - {"EM_LATTICEMICO32", Const, 11}, - {"EM_LOONGARCH", Const, 19}, - {"EM_M16C", Const, 11}, - {"EM_M32", Const, 0}, - {"EM_M32C", Const, 11}, - {"EM_M32R", Const, 11}, - {"EM_MANIK", Const, 11}, - {"EM_MAX", Const, 11}, - {"EM_MAXQ30", Const, 11}, - {"EM_MCHP_PIC", Const, 11}, - {"EM_MCST_ELBRUS", Const, 11}, - {"EM_ME16", Const, 0}, - {"EM_METAG", Const, 11}, - {"EM_MICROBLAZE", Const, 11}, - {"EM_MIPS", Const, 0}, - {"EM_MIPS_RS3_LE", Const, 0}, - {"EM_MIPS_RS4_BE", Const, 0}, - {"EM_MIPS_X", Const, 0}, - {"EM_MMA", Const, 0}, - {"EM_MMDSP_PLUS", Const, 11}, - {"EM_MMIX", Const, 11}, - {"EM_MN10200", Const, 11}, - {"EM_MN10300", Const, 11}, - {"EM_MOXIE", Const, 11}, - {"EM_MSP430", Const, 11}, - {"EM_NCPU", Const, 0}, - {"EM_NDR1", Const, 0}, - {"EM_NDS32", Const, 11}, - {"EM_NONE", Const, 0}, - {"EM_NORC", Const, 11}, - {"EM_NS32K", Const, 11}, - {"EM_OPEN8", Const, 11}, - {"EM_OPENRISC", Const, 11}, - {"EM_PARISC", Const, 0}, - {"EM_PCP", Const, 0}, - {"EM_PDP10", Const, 11}, - {"EM_PDP11", Const, 11}, - {"EM_PDSP", Const, 11}, - {"EM_PJ", Const, 11}, - {"EM_PPC", Const, 0}, - {"EM_PPC64", Const, 0}, - {"EM_PRISM", Const, 11}, - {"EM_QDSP6", Const, 11}, - {"EM_R32C", Const, 11}, - {"EM_RCE", Const, 0}, - {"EM_RH32", Const, 0}, - {"EM_RISCV", Const, 11}, - {"EM_RL78", Const, 11}, - {"EM_RS08", Const, 11}, - {"EM_RX", Const, 11}, - {"EM_S370", Const, 0}, - {"EM_S390", Const, 0}, - {"EM_SCORE7", Const, 11}, - {"EM_SEP", Const, 11}, - {"EM_SE_C17", Const, 11}, - {"EM_SE_C33", Const, 11}, - {"EM_SH", Const, 0}, - {"EM_SHARC", Const, 11}, - {"EM_SLE9X", Const, 11}, - {"EM_SNP1K", Const, 11}, - {"EM_SPARC", Const, 0}, - {"EM_SPARC32PLUS", Const, 0}, - {"EM_SPARCV9", Const, 0}, - {"EM_ST100", Const, 0}, - {"EM_ST19", Const, 11}, - {"EM_ST200", Const, 11}, - {"EM_ST7", Const, 11}, - {"EM_ST9PLUS", Const, 11}, - {"EM_STARCORE", Const, 0}, - {"EM_STM8", Const, 11}, - {"EM_STXP7X", Const, 11}, - {"EM_SVX", Const, 11}, - {"EM_TILE64", Const, 11}, - {"EM_TILEGX", Const, 11}, - {"EM_TILEPRO", Const, 11}, - {"EM_TINYJ", Const, 0}, - {"EM_TI_ARP32", Const, 11}, - {"EM_TI_C2000", Const, 11}, - {"EM_TI_C5500", Const, 11}, - {"EM_TI_C6000", Const, 11}, - {"EM_TI_PRU", Const, 11}, - {"EM_TMM_GPP", Const, 11}, - {"EM_TPC", Const, 11}, - {"EM_TRICORE", Const, 0}, - {"EM_TRIMEDIA", Const, 11}, - {"EM_TSK3000", Const, 11}, - {"EM_UNICORE", Const, 11}, - {"EM_V800", Const, 0}, - {"EM_V850", Const, 11}, - {"EM_VAX", Const, 11}, - {"EM_VIDEOCORE", Const, 11}, - {"EM_VIDEOCORE3", Const, 11}, - {"EM_VIDEOCORE5", Const, 11}, - {"EM_VISIUM", Const, 11}, - {"EM_VPP500", Const, 0}, - {"EM_X86_64", Const, 0}, - {"EM_XCORE", Const, 11}, - {"EM_XGATE", Const, 11}, - {"EM_XIMO16", Const, 11}, - {"EM_XTENSA", Const, 11}, - {"EM_Z80", Const, 11}, - {"EM_ZSP", Const, 11}, - {"ET_CORE", Const, 0}, - {"ET_DYN", Const, 0}, - {"ET_EXEC", Const, 0}, - {"ET_HIOS", Const, 0}, - {"ET_HIPROC", Const, 0}, - {"ET_LOOS", Const, 0}, - {"ET_LOPROC", Const, 0}, - {"ET_NONE", Const, 0}, - {"ET_REL", Const, 0}, - {"EV_CURRENT", Const, 0}, - {"EV_NONE", Const, 0}, - {"ErrNoSymbols", Var, 4}, - {"File", Type, 0}, - {"File.FileHeader", Field, 0}, - {"File.Progs", Field, 0}, - {"File.Sections", Field, 0}, - {"FileHeader", Type, 0}, - {"FileHeader.ABIVersion", Field, 0}, - {"FileHeader.ByteOrder", Field, 0}, - {"FileHeader.Class", Field, 0}, - {"FileHeader.Data", Field, 0}, - {"FileHeader.Entry", Field, 1}, - {"FileHeader.Machine", Field, 0}, - {"FileHeader.OSABI", Field, 0}, - {"FileHeader.Type", Field, 0}, - {"FileHeader.Version", Field, 0}, - {"FormatError", Type, 0}, - {"Header32", Type, 0}, - {"Header32.Ehsize", Field, 0}, - {"Header32.Entry", Field, 0}, - {"Header32.Flags", Field, 0}, - {"Header32.Ident", Field, 0}, - {"Header32.Machine", Field, 0}, - {"Header32.Phentsize", Field, 0}, - {"Header32.Phnum", Field, 0}, - {"Header32.Phoff", Field, 0}, - {"Header32.Shentsize", Field, 0}, - {"Header32.Shnum", Field, 0}, - {"Header32.Shoff", Field, 0}, - {"Header32.Shstrndx", Field, 0}, - {"Header32.Type", Field, 0}, - {"Header32.Version", Field, 0}, - {"Header64", Type, 0}, - {"Header64.Ehsize", Field, 0}, - {"Header64.Entry", Field, 0}, - {"Header64.Flags", Field, 0}, - {"Header64.Ident", Field, 0}, - {"Header64.Machine", Field, 0}, - {"Header64.Phentsize", Field, 0}, - {"Header64.Phnum", Field, 0}, - {"Header64.Phoff", Field, 0}, - {"Header64.Shentsize", Field, 0}, - {"Header64.Shnum", Field, 0}, - {"Header64.Shoff", Field, 0}, - {"Header64.Shstrndx", Field, 0}, - {"Header64.Type", Field, 0}, - {"Header64.Version", Field, 0}, - {"ImportedSymbol", Type, 0}, - {"ImportedSymbol.Library", Field, 0}, - {"ImportedSymbol.Name", Field, 0}, - {"ImportedSymbol.Version", Field, 0}, - {"Machine", Type, 0}, - {"NT_FPREGSET", Const, 0}, - {"NT_PRPSINFO", Const, 0}, - {"NT_PRSTATUS", Const, 0}, - {"NType", Type, 0}, - {"NewFile", Func, 0}, - {"OSABI", Type, 0}, - {"Open", Func, 0}, - {"PF_MASKOS", Const, 0}, - {"PF_MASKPROC", Const, 0}, - {"PF_R", Const, 0}, - {"PF_W", Const, 0}, - {"PF_X", Const, 0}, - {"PT_AARCH64_ARCHEXT", Const, 16}, - {"PT_AARCH64_UNWIND", Const, 16}, - {"PT_ARM_ARCHEXT", Const, 16}, - {"PT_ARM_EXIDX", Const, 16}, - {"PT_DYNAMIC", Const, 0}, - {"PT_GNU_EH_FRAME", Const, 16}, - {"PT_GNU_MBIND_HI", Const, 16}, - {"PT_GNU_MBIND_LO", Const, 16}, - {"PT_GNU_PROPERTY", Const, 16}, - {"PT_GNU_RELRO", Const, 16}, - {"PT_GNU_STACK", Const, 16}, - {"PT_HIOS", Const, 0}, - {"PT_HIPROC", Const, 0}, - {"PT_INTERP", Const, 0}, - {"PT_LOAD", Const, 0}, - {"PT_LOOS", Const, 0}, - {"PT_LOPROC", Const, 0}, - {"PT_MIPS_ABIFLAGS", Const, 16}, - {"PT_MIPS_OPTIONS", Const, 16}, - {"PT_MIPS_REGINFO", Const, 16}, - {"PT_MIPS_RTPROC", Const, 16}, - {"PT_NOTE", Const, 0}, - {"PT_NULL", Const, 0}, - {"PT_OPENBSD_BOOTDATA", Const, 16}, - {"PT_OPENBSD_NOBTCFI", Const, 23}, - {"PT_OPENBSD_RANDOMIZE", Const, 16}, - {"PT_OPENBSD_WXNEEDED", Const, 16}, - {"PT_PAX_FLAGS", Const, 16}, - {"PT_PHDR", Const, 0}, - {"PT_RISCV_ATTRIBUTES", Const, 25}, - {"PT_S390_PGSTE", Const, 16}, - {"PT_SHLIB", Const, 0}, - {"PT_SUNWSTACK", Const, 16}, - {"PT_SUNW_EH_FRAME", Const, 16}, - {"PT_TLS", Const, 0}, - {"Prog", Type, 0}, - {"Prog.ProgHeader", Field, 0}, - {"Prog.ReaderAt", Field, 0}, - {"Prog32", Type, 0}, - {"Prog32.Align", Field, 0}, - {"Prog32.Filesz", Field, 0}, - {"Prog32.Flags", Field, 0}, - {"Prog32.Memsz", Field, 0}, - {"Prog32.Off", Field, 0}, - {"Prog32.Paddr", Field, 0}, - {"Prog32.Type", Field, 0}, - {"Prog32.Vaddr", Field, 0}, - {"Prog64", Type, 0}, - {"Prog64.Align", Field, 0}, - {"Prog64.Filesz", Field, 0}, - {"Prog64.Flags", Field, 0}, - {"Prog64.Memsz", Field, 0}, - {"Prog64.Off", Field, 0}, - {"Prog64.Paddr", Field, 0}, - {"Prog64.Type", Field, 0}, - {"Prog64.Vaddr", Field, 0}, - {"ProgFlag", Type, 0}, - {"ProgHeader", Type, 0}, - {"ProgHeader.Align", Field, 0}, - {"ProgHeader.Filesz", Field, 0}, - {"ProgHeader.Flags", Field, 0}, - {"ProgHeader.Memsz", Field, 0}, - {"ProgHeader.Off", Field, 0}, - {"ProgHeader.Paddr", Field, 0}, - {"ProgHeader.Type", Field, 0}, - {"ProgHeader.Vaddr", Field, 0}, - {"ProgType", Type, 0}, - {"R_386", Type, 0}, - {"R_386_16", Const, 10}, - {"R_386_32", Const, 0}, - {"R_386_32PLT", Const, 10}, - {"R_386_8", Const, 10}, - {"R_386_COPY", Const, 0}, - {"R_386_GLOB_DAT", Const, 0}, - {"R_386_GOT32", Const, 0}, - {"R_386_GOT32X", Const, 10}, - {"R_386_GOTOFF", Const, 0}, - {"R_386_GOTPC", Const, 0}, - {"R_386_IRELATIVE", Const, 10}, - {"R_386_JMP_SLOT", Const, 0}, - {"R_386_NONE", Const, 0}, - {"R_386_PC16", Const, 10}, - {"R_386_PC32", Const, 0}, - {"R_386_PC8", Const, 10}, - {"R_386_PLT32", Const, 0}, - {"R_386_RELATIVE", Const, 0}, - {"R_386_SIZE32", Const, 10}, - {"R_386_TLS_DESC", Const, 10}, - {"R_386_TLS_DESC_CALL", Const, 10}, - {"R_386_TLS_DTPMOD32", Const, 0}, - {"R_386_TLS_DTPOFF32", Const, 0}, - {"R_386_TLS_GD", Const, 0}, - {"R_386_TLS_GD_32", Const, 0}, - {"R_386_TLS_GD_CALL", Const, 0}, - {"R_386_TLS_GD_POP", Const, 0}, - {"R_386_TLS_GD_PUSH", Const, 0}, - {"R_386_TLS_GOTDESC", Const, 10}, - {"R_386_TLS_GOTIE", Const, 0}, - {"R_386_TLS_IE", Const, 0}, - {"R_386_TLS_IE_32", Const, 0}, - {"R_386_TLS_LDM", Const, 0}, - {"R_386_TLS_LDM_32", Const, 0}, - {"R_386_TLS_LDM_CALL", Const, 0}, - {"R_386_TLS_LDM_POP", Const, 0}, - {"R_386_TLS_LDM_PUSH", Const, 0}, - {"R_386_TLS_LDO_32", Const, 0}, - {"R_386_TLS_LE", Const, 0}, - {"R_386_TLS_LE_32", Const, 0}, - {"R_386_TLS_TPOFF", Const, 0}, - {"R_386_TLS_TPOFF32", Const, 0}, - {"R_390", Type, 7}, - {"R_390_12", Const, 7}, - {"R_390_16", Const, 7}, - {"R_390_20", Const, 7}, - {"R_390_32", Const, 7}, - {"R_390_64", Const, 7}, - {"R_390_8", Const, 7}, - {"R_390_COPY", Const, 7}, - {"R_390_GLOB_DAT", Const, 7}, - {"R_390_GOT12", Const, 7}, - {"R_390_GOT16", Const, 7}, - {"R_390_GOT20", Const, 7}, - {"R_390_GOT32", Const, 7}, - {"R_390_GOT64", Const, 7}, - {"R_390_GOTENT", Const, 7}, - {"R_390_GOTOFF", Const, 7}, - {"R_390_GOTOFF16", Const, 7}, - {"R_390_GOTOFF64", Const, 7}, - {"R_390_GOTPC", Const, 7}, - {"R_390_GOTPCDBL", Const, 7}, - {"R_390_GOTPLT12", Const, 7}, - {"R_390_GOTPLT16", Const, 7}, - {"R_390_GOTPLT20", Const, 7}, - {"R_390_GOTPLT32", Const, 7}, - {"R_390_GOTPLT64", Const, 7}, - {"R_390_GOTPLTENT", Const, 7}, - {"R_390_GOTPLTOFF16", Const, 7}, - {"R_390_GOTPLTOFF32", Const, 7}, - {"R_390_GOTPLTOFF64", Const, 7}, - {"R_390_JMP_SLOT", Const, 7}, - {"R_390_NONE", Const, 7}, - {"R_390_PC16", Const, 7}, - {"R_390_PC16DBL", Const, 7}, - {"R_390_PC32", Const, 7}, - {"R_390_PC32DBL", Const, 7}, - {"R_390_PC64", Const, 7}, - {"R_390_PLT16DBL", Const, 7}, - {"R_390_PLT32", Const, 7}, - {"R_390_PLT32DBL", Const, 7}, - {"R_390_PLT64", Const, 7}, - {"R_390_RELATIVE", Const, 7}, - {"R_390_TLS_DTPMOD", Const, 7}, - {"R_390_TLS_DTPOFF", Const, 7}, - {"R_390_TLS_GD32", Const, 7}, - {"R_390_TLS_GD64", Const, 7}, - {"R_390_TLS_GDCALL", Const, 7}, - {"R_390_TLS_GOTIE12", Const, 7}, - {"R_390_TLS_GOTIE20", Const, 7}, - {"R_390_TLS_GOTIE32", Const, 7}, - {"R_390_TLS_GOTIE64", Const, 7}, - {"R_390_TLS_IE32", Const, 7}, - {"R_390_TLS_IE64", Const, 7}, - {"R_390_TLS_IEENT", Const, 7}, - {"R_390_TLS_LDCALL", Const, 7}, - {"R_390_TLS_LDM32", Const, 7}, - {"R_390_TLS_LDM64", Const, 7}, - {"R_390_TLS_LDO32", Const, 7}, - {"R_390_TLS_LDO64", Const, 7}, - {"R_390_TLS_LE32", Const, 7}, - {"R_390_TLS_LE64", Const, 7}, - {"R_390_TLS_LOAD", Const, 7}, - {"R_390_TLS_TPOFF", Const, 7}, - {"R_AARCH64", Type, 4}, - {"R_AARCH64_ABS16", Const, 4}, - {"R_AARCH64_ABS32", Const, 4}, - {"R_AARCH64_ABS64", Const, 4}, - {"R_AARCH64_ADD_ABS_LO12_NC", Const, 4}, - {"R_AARCH64_ADR_GOT_PAGE", Const, 4}, - {"R_AARCH64_ADR_PREL_LO21", Const, 4}, - {"R_AARCH64_ADR_PREL_PG_HI21", Const, 4}, - {"R_AARCH64_ADR_PREL_PG_HI21_NC", Const, 4}, - {"R_AARCH64_CALL26", Const, 4}, - {"R_AARCH64_CONDBR19", Const, 4}, - {"R_AARCH64_COPY", Const, 4}, - {"R_AARCH64_GLOB_DAT", Const, 4}, - {"R_AARCH64_GOT_LD_PREL19", Const, 4}, - {"R_AARCH64_IRELATIVE", Const, 4}, - {"R_AARCH64_JUMP26", Const, 4}, - {"R_AARCH64_JUMP_SLOT", Const, 4}, - {"R_AARCH64_LD64_GOTOFF_LO15", Const, 10}, - {"R_AARCH64_LD64_GOTPAGE_LO15", Const, 10}, - {"R_AARCH64_LD64_GOT_LO12_NC", Const, 4}, - {"R_AARCH64_LDST128_ABS_LO12_NC", Const, 4}, - {"R_AARCH64_LDST16_ABS_LO12_NC", Const, 4}, - {"R_AARCH64_LDST32_ABS_LO12_NC", Const, 4}, - {"R_AARCH64_LDST64_ABS_LO12_NC", Const, 4}, - {"R_AARCH64_LDST8_ABS_LO12_NC", Const, 4}, - {"R_AARCH64_LD_PREL_LO19", Const, 4}, - {"R_AARCH64_MOVW_SABS_G0", Const, 4}, - {"R_AARCH64_MOVW_SABS_G1", Const, 4}, - {"R_AARCH64_MOVW_SABS_G2", Const, 4}, - {"R_AARCH64_MOVW_UABS_G0", Const, 4}, - {"R_AARCH64_MOVW_UABS_G0_NC", Const, 4}, - {"R_AARCH64_MOVW_UABS_G1", Const, 4}, - {"R_AARCH64_MOVW_UABS_G1_NC", Const, 4}, - {"R_AARCH64_MOVW_UABS_G2", Const, 4}, - {"R_AARCH64_MOVW_UABS_G2_NC", Const, 4}, - {"R_AARCH64_MOVW_UABS_G3", Const, 4}, - {"R_AARCH64_NONE", Const, 4}, - {"R_AARCH64_NULL", Const, 4}, - {"R_AARCH64_P32_ABS16", Const, 4}, - {"R_AARCH64_P32_ABS32", Const, 4}, - {"R_AARCH64_P32_ADD_ABS_LO12_NC", Const, 4}, - {"R_AARCH64_P32_ADR_GOT_PAGE", Const, 4}, - {"R_AARCH64_P32_ADR_PREL_LO21", Const, 4}, - {"R_AARCH64_P32_ADR_PREL_PG_HI21", Const, 4}, - {"R_AARCH64_P32_CALL26", Const, 4}, - {"R_AARCH64_P32_CONDBR19", Const, 4}, - {"R_AARCH64_P32_COPY", Const, 4}, - {"R_AARCH64_P32_GLOB_DAT", Const, 4}, - {"R_AARCH64_P32_GOT_LD_PREL19", Const, 4}, - {"R_AARCH64_P32_IRELATIVE", Const, 4}, - {"R_AARCH64_P32_JUMP26", Const, 4}, - {"R_AARCH64_P32_JUMP_SLOT", Const, 4}, - {"R_AARCH64_P32_LD32_GOT_LO12_NC", Const, 4}, - {"R_AARCH64_P32_LDST128_ABS_LO12_NC", Const, 4}, - {"R_AARCH64_P32_LDST16_ABS_LO12_NC", Const, 4}, - {"R_AARCH64_P32_LDST32_ABS_LO12_NC", Const, 4}, - {"R_AARCH64_P32_LDST64_ABS_LO12_NC", Const, 4}, - {"R_AARCH64_P32_LDST8_ABS_LO12_NC", Const, 4}, - {"R_AARCH64_P32_LD_PREL_LO19", Const, 4}, - {"R_AARCH64_P32_MOVW_SABS_G0", Const, 4}, - {"R_AARCH64_P32_MOVW_UABS_G0", Const, 4}, - {"R_AARCH64_P32_MOVW_UABS_G0_NC", Const, 4}, - {"R_AARCH64_P32_MOVW_UABS_G1", Const, 4}, - {"R_AARCH64_P32_PREL16", Const, 4}, - {"R_AARCH64_P32_PREL32", Const, 4}, - {"R_AARCH64_P32_RELATIVE", Const, 4}, - {"R_AARCH64_P32_TLSDESC", Const, 4}, - {"R_AARCH64_P32_TLSDESC_ADD_LO12_NC", Const, 4}, - {"R_AARCH64_P32_TLSDESC_ADR_PAGE21", Const, 4}, - {"R_AARCH64_P32_TLSDESC_ADR_PREL21", Const, 4}, - {"R_AARCH64_P32_TLSDESC_CALL", Const, 4}, - {"R_AARCH64_P32_TLSDESC_LD32_LO12_NC", Const, 4}, - {"R_AARCH64_P32_TLSDESC_LD_PREL19", Const, 4}, - {"R_AARCH64_P32_TLSGD_ADD_LO12_NC", Const, 4}, - {"R_AARCH64_P32_TLSGD_ADR_PAGE21", Const, 4}, - {"R_AARCH64_P32_TLSIE_ADR_GOTTPREL_PAGE21", Const, 4}, - {"R_AARCH64_P32_TLSIE_LD32_GOTTPREL_LO12_NC", Const, 4}, - {"R_AARCH64_P32_TLSIE_LD_GOTTPREL_PREL19", Const, 4}, - {"R_AARCH64_P32_TLSLE_ADD_TPREL_HI12", Const, 4}, - {"R_AARCH64_P32_TLSLE_ADD_TPREL_LO12", Const, 4}, - {"R_AARCH64_P32_TLSLE_ADD_TPREL_LO12_NC", Const, 4}, - {"R_AARCH64_P32_TLSLE_MOVW_TPREL_G0", Const, 4}, - {"R_AARCH64_P32_TLSLE_MOVW_TPREL_G0_NC", Const, 4}, - {"R_AARCH64_P32_TLSLE_MOVW_TPREL_G1", Const, 4}, - {"R_AARCH64_P32_TLS_DTPMOD", Const, 4}, - {"R_AARCH64_P32_TLS_DTPREL", Const, 4}, - {"R_AARCH64_P32_TLS_TPREL", Const, 4}, - {"R_AARCH64_P32_TSTBR14", Const, 4}, - {"R_AARCH64_PREL16", Const, 4}, - {"R_AARCH64_PREL32", Const, 4}, - {"R_AARCH64_PREL64", Const, 4}, - {"R_AARCH64_RELATIVE", Const, 4}, - {"R_AARCH64_TLSDESC", Const, 4}, - {"R_AARCH64_TLSDESC_ADD", Const, 4}, - {"R_AARCH64_TLSDESC_ADD_LO12_NC", Const, 4}, - {"R_AARCH64_TLSDESC_ADR_PAGE21", Const, 4}, - {"R_AARCH64_TLSDESC_ADR_PREL21", Const, 4}, - {"R_AARCH64_TLSDESC_CALL", Const, 4}, - {"R_AARCH64_TLSDESC_LD64_LO12_NC", Const, 4}, - {"R_AARCH64_TLSDESC_LDR", Const, 4}, - {"R_AARCH64_TLSDESC_LD_PREL19", Const, 4}, - {"R_AARCH64_TLSDESC_OFF_G0_NC", Const, 4}, - {"R_AARCH64_TLSDESC_OFF_G1", Const, 4}, - {"R_AARCH64_TLSGD_ADD_LO12_NC", Const, 4}, - {"R_AARCH64_TLSGD_ADR_PAGE21", Const, 4}, - {"R_AARCH64_TLSGD_ADR_PREL21", Const, 10}, - {"R_AARCH64_TLSGD_MOVW_G0_NC", Const, 10}, - {"R_AARCH64_TLSGD_MOVW_G1", Const, 10}, - {"R_AARCH64_TLSIE_ADR_GOTTPREL_PAGE21", Const, 4}, - {"R_AARCH64_TLSIE_LD64_GOTTPREL_LO12_NC", Const, 4}, - {"R_AARCH64_TLSIE_LD_GOTTPREL_PREL19", Const, 4}, - {"R_AARCH64_TLSIE_MOVW_GOTTPREL_G0_NC", Const, 4}, - {"R_AARCH64_TLSIE_MOVW_GOTTPREL_G1", Const, 4}, - {"R_AARCH64_TLSLD_ADR_PAGE21", Const, 10}, - {"R_AARCH64_TLSLD_ADR_PREL21", Const, 10}, - {"R_AARCH64_TLSLD_LDST128_DTPREL_LO12", Const, 10}, - {"R_AARCH64_TLSLD_LDST128_DTPREL_LO12_NC", Const, 10}, - {"R_AARCH64_TLSLE_ADD_TPREL_HI12", Const, 4}, - {"R_AARCH64_TLSLE_ADD_TPREL_LO12", Const, 4}, - {"R_AARCH64_TLSLE_ADD_TPREL_LO12_NC", Const, 4}, - {"R_AARCH64_TLSLE_LDST128_TPREL_LO12", Const, 10}, - {"R_AARCH64_TLSLE_LDST128_TPREL_LO12_NC", Const, 10}, - {"R_AARCH64_TLSLE_MOVW_TPREL_G0", Const, 4}, - {"R_AARCH64_TLSLE_MOVW_TPREL_G0_NC", Const, 4}, - {"R_AARCH64_TLSLE_MOVW_TPREL_G1", Const, 4}, - {"R_AARCH64_TLSLE_MOVW_TPREL_G1_NC", Const, 4}, - {"R_AARCH64_TLSLE_MOVW_TPREL_G2", Const, 4}, - {"R_AARCH64_TLS_DTPMOD64", Const, 4}, - {"R_AARCH64_TLS_DTPREL64", Const, 4}, - {"R_AARCH64_TLS_TPREL64", Const, 4}, - {"R_AARCH64_TSTBR14", Const, 4}, - {"R_ALPHA", Type, 0}, - {"R_ALPHA_BRADDR", Const, 0}, - {"R_ALPHA_COPY", Const, 0}, - {"R_ALPHA_GLOB_DAT", Const, 0}, - {"R_ALPHA_GPDISP", Const, 0}, - {"R_ALPHA_GPREL32", Const, 0}, - {"R_ALPHA_GPRELHIGH", Const, 0}, - {"R_ALPHA_GPRELLOW", Const, 0}, - {"R_ALPHA_GPVALUE", Const, 0}, - {"R_ALPHA_HINT", Const, 0}, - {"R_ALPHA_IMMED_BR_HI32", Const, 0}, - {"R_ALPHA_IMMED_GP_16", Const, 0}, - {"R_ALPHA_IMMED_GP_HI32", Const, 0}, - {"R_ALPHA_IMMED_LO32", Const, 0}, - {"R_ALPHA_IMMED_SCN_HI32", Const, 0}, - {"R_ALPHA_JMP_SLOT", Const, 0}, - {"R_ALPHA_LITERAL", Const, 0}, - {"R_ALPHA_LITUSE", Const, 0}, - {"R_ALPHA_NONE", Const, 0}, - {"R_ALPHA_OP_PRSHIFT", Const, 0}, - {"R_ALPHA_OP_PSUB", Const, 0}, - {"R_ALPHA_OP_PUSH", Const, 0}, - {"R_ALPHA_OP_STORE", Const, 0}, - {"R_ALPHA_REFLONG", Const, 0}, - {"R_ALPHA_REFQUAD", Const, 0}, - {"R_ALPHA_RELATIVE", Const, 0}, - {"R_ALPHA_SREL16", Const, 0}, - {"R_ALPHA_SREL32", Const, 0}, - {"R_ALPHA_SREL64", Const, 0}, - {"R_ARM", Type, 0}, - {"R_ARM_ABS12", Const, 0}, - {"R_ARM_ABS16", Const, 0}, - {"R_ARM_ABS32", Const, 0}, - {"R_ARM_ABS32_NOI", Const, 10}, - {"R_ARM_ABS8", Const, 0}, - {"R_ARM_ALU_PCREL_15_8", Const, 10}, - {"R_ARM_ALU_PCREL_23_15", Const, 10}, - {"R_ARM_ALU_PCREL_7_0", Const, 10}, - {"R_ARM_ALU_PC_G0", Const, 10}, - {"R_ARM_ALU_PC_G0_NC", Const, 10}, - {"R_ARM_ALU_PC_G1", Const, 10}, - {"R_ARM_ALU_PC_G1_NC", Const, 10}, - {"R_ARM_ALU_PC_G2", Const, 10}, - {"R_ARM_ALU_SBREL_19_12_NC", Const, 10}, - {"R_ARM_ALU_SBREL_27_20_CK", Const, 10}, - {"R_ARM_ALU_SB_G0", Const, 10}, - {"R_ARM_ALU_SB_G0_NC", Const, 10}, - {"R_ARM_ALU_SB_G1", Const, 10}, - {"R_ARM_ALU_SB_G1_NC", Const, 10}, - {"R_ARM_ALU_SB_G2", Const, 10}, - {"R_ARM_AMP_VCALL9", Const, 0}, - {"R_ARM_BASE_ABS", Const, 10}, - {"R_ARM_CALL", Const, 10}, - {"R_ARM_COPY", Const, 0}, - {"R_ARM_GLOB_DAT", Const, 0}, - {"R_ARM_GNU_VTENTRY", Const, 0}, - {"R_ARM_GNU_VTINHERIT", Const, 0}, - {"R_ARM_GOT32", Const, 0}, - {"R_ARM_GOTOFF", Const, 0}, - {"R_ARM_GOTOFF12", Const, 10}, - {"R_ARM_GOTPC", Const, 0}, - {"R_ARM_GOTRELAX", Const, 10}, - {"R_ARM_GOT_ABS", Const, 10}, - {"R_ARM_GOT_BREL12", Const, 10}, - {"R_ARM_GOT_PREL", Const, 10}, - {"R_ARM_IRELATIVE", Const, 10}, - {"R_ARM_JUMP24", Const, 10}, - {"R_ARM_JUMP_SLOT", Const, 0}, - {"R_ARM_LDC_PC_G0", Const, 10}, - {"R_ARM_LDC_PC_G1", Const, 10}, - {"R_ARM_LDC_PC_G2", Const, 10}, - {"R_ARM_LDC_SB_G0", Const, 10}, - {"R_ARM_LDC_SB_G1", Const, 10}, - {"R_ARM_LDC_SB_G2", Const, 10}, - {"R_ARM_LDRS_PC_G0", Const, 10}, - {"R_ARM_LDRS_PC_G1", Const, 10}, - {"R_ARM_LDRS_PC_G2", Const, 10}, - {"R_ARM_LDRS_SB_G0", Const, 10}, - {"R_ARM_LDRS_SB_G1", Const, 10}, - {"R_ARM_LDRS_SB_G2", Const, 10}, - {"R_ARM_LDR_PC_G1", Const, 10}, - {"R_ARM_LDR_PC_G2", Const, 10}, - {"R_ARM_LDR_SBREL_11_10_NC", Const, 10}, - {"R_ARM_LDR_SB_G0", Const, 10}, - {"R_ARM_LDR_SB_G1", Const, 10}, - {"R_ARM_LDR_SB_G2", Const, 10}, - {"R_ARM_ME_TOO", Const, 10}, - {"R_ARM_MOVT_ABS", Const, 10}, - {"R_ARM_MOVT_BREL", Const, 10}, - {"R_ARM_MOVT_PREL", Const, 10}, - {"R_ARM_MOVW_ABS_NC", Const, 10}, - {"R_ARM_MOVW_BREL", Const, 10}, - {"R_ARM_MOVW_BREL_NC", Const, 10}, - {"R_ARM_MOVW_PREL_NC", Const, 10}, - {"R_ARM_NONE", Const, 0}, - {"R_ARM_PC13", Const, 0}, - {"R_ARM_PC24", Const, 0}, - {"R_ARM_PLT32", Const, 0}, - {"R_ARM_PLT32_ABS", Const, 10}, - {"R_ARM_PREL31", Const, 10}, - {"R_ARM_PRIVATE_0", Const, 10}, - {"R_ARM_PRIVATE_1", Const, 10}, - {"R_ARM_PRIVATE_10", Const, 10}, - {"R_ARM_PRIVATE_11", Const, 10}, - {"R_ARM_PRIVATE_12", Const, 10}, - {"R_ARM_PRIVATE_13", Const, 10}, - {"R_ARM_PRIVATE_14", Const, 10}, - {"R_ARM_PRIVATE_15", Const, 10}, - {"R_ARM_PRIVATE_2", Const, 10}, - {"R_ARM_PRIVATE_3", Const, 10}, - {"R_ARM_PRIVATE_4", Const, 10}, - {"R_ARM_PRIVATE_5", Const, 10}, - {"R_ARM_PRIVATE_6", Const, 10}, - {"R_ARM_PRIVATE_7", Const, 10}, - {"R_ARM_PRIVATE_8", Const, 10}, - {"R_ARM_PRIVATE_9", Const, 10}, - {"R_ARM_RABS32", Const, 0}, - {"R_ARM_RBASE", Const, 0}, - {"R_ARM_REL32", Const, 0}, - {"R_ARM_REL32_NOI", Const, 10}, - {"R_ARM_RELATIVE", Const, 0}, - {"R_ARM_RPC24", Const, 0}, - {"R_ARM_RREL32", Const, 0}, - {"R_ARM_RSBREL32", Const, 0}, - {"R_ARM_RXPC25", Const, 10}, - {"R_ARM_SBREL31", Const, 10}, - {"R_ARM_SBREL32", Const, 0}, - {"R_ARM_SWI24", Const, 0}, - {"R_ARM_TARGET1", Const, 10}, - {"R_ARM_TARGET2", Const, 10}, - {"R_ARM_THM_ABS5", Const, 0}, - {"R_ARM_THM_ALU_ABS_G0_NC", Const, 10}, - {"R_ARM_THM_ALU_ABS_G1_NC", Const, 10}, - {"R_ARM_THM_ALU_ABS_G2_NC", Const, 10}, - {"R_ARM_THM_ALU_ABS_G3", Const, 10}, - {"R_ARM_THM_ALU_PREL_11_0", Const, 10}, - {"R_ARM_THM_GOT_BREL12", Const, 10}, - {"R_ARM_THM_JUMP11", Const, 10}, - {"R_ARM_THM_JUMP19", Const, 10}, - {"R_ARM_THM_JUMP24", Const, 10}, - {"R_ARM_THM_JUMP6", Const, 10}, - {"R_ARM_THM_JUMP8", Const, 10}, - {"R_ARM_THM_MOVT_ABS", Const, 10}, - {"R_ARM_THM_MOVT_BREL", Const, 10}, - {"R_ARM_THM_MOVT_PREL", Const, 10}, - {"R_ARM_THM_MOVW_ABS_NC", Const, 10}, - {"R_ARM_THM_MOVW_BREL", Const, 10}, - {"R_ARM_THM_MOVW_BREL_NC", Const, 10}, - {"R_ARM_THM_MOVW_PREL_NC", Const, 10}, - {"R_ARM_THM_PC12", Const, 10}, - {"R_ARM_THM_PC22", Const, 0}, - {"R_ARM_THM_PC8", Const, 0}, - {"R_ARM_THM_RPC22", Const, 0}, - {"R_ARM_THM_SWI8", Const, 0}, - {"R_ARM_THM_TLS_CALL", Const, 10}, - {"R_ARM_THM_TLS_DESCSEQ16", Const, 10}, - {"R_ARM_THM_TLS_DESCSEQ32", Const, 10}, - {"R_ARM_THM_XPC22", Const, 0}, - {"R_ARM_TLS_CALL", Const, 10}, - {"R_ARM_TLS_DESCSEQ", Const, 10}, - {"R_ARM_TLS_DTPMOD32", Const, 10}, - {"R_ARM_TLS_DTPOFF32", Const, 10}, - {"R_ARM_TLS_GD32", Const, 10}, - {"R_ARM_TLS_GOTDESC", Const, 10}, - {"R_ARM_TLS_IE12GP", Const, 10}, - {"R_ARM_TLS_IE32", Const, 10}, - {"R_ARM_TLS_LDM32", Const, 10}, - {"R_ARM_TLS_LDO12", Const, 10}, - {"R_ARM_TLS_LDO32", Const, 10}, - {"R_ARM_TLS_LE12", Const, 10}, - {"R_ARM_TLS_LE32", Const, 10}, - {"R_ARM_TLS_TPOFF32", Const, 10}, - {"R_ARM_V4BX", Const, 10}, - {"R_ARM_XPC25", Const, 0}, - {"R_INFO", Func, 0}, - {"R_INFO32", Func, 0}, - {"R_LARCH", Type, 19}, - {"R_LARCH_32", Const, 19}, - {"R_LARCH_32_PCREL", Const, 20}, - {"R_LARCH_64", Const, 19}, - {"R_LARCH_64_PCREL", Const, 22}, - {"R_LARCH_ABS64_HI12", Const, 20}, - {"R_LARCH_ABS64_LO20", Const, 20}, - {"R_LARCH_ABS_HI20", Const, 20}, - {"R_LARCH_ABS_LO12", Const, 20}, - {"R_LARCH_ADD16", Const, 19}, - {"R_LARCH_ADD24", Const, 19}, - {"R_LARCH_ADD32", Const, 19}, - {"R_LARCH_ADD6", Const, 22}, - {"R_LARCH_ADD64", Const, 19}, - {"R_LARCH_ADD8", Const, 19}, - {"R_LARCH_ADD_ULEB128", Const, 22}, - {"R_LARCH_ALIGN", Const, 22}, - {"R_LARCH_B16", Const, 20}, - {"R_LARCH_B21", Const, 20}, - {"R_LARCH_B26", Const, 20}, - {"R_LARCH_CFA", Const, 22}, - {"R_LARCH_COPY", Const, 19}, - {"R_LARCH_DELETE", Const, 22}, - {"R_LARCH_GNU_VTENTRY", Const, 20}, - {"R_LARCH_GNU_VTINHERIT", Const, 20}, - {"R_LARCH_GOT64_HI12", Const, 20}, - {"R_LARCH_GOT64_LO20", Const, 20}, - {"R_LARCH_GOT64_PC_HI12", Const, 20}, - {"R_LARCH_GOT64_PC_LO20", Const, 20}, - {"R_LARCH_GOT_HI20", Const, 20}, - {"R_LARCH_GOT_LO12", Const, 20}, - {"R_LARCH_GOT_PC_HI20", Const, 20}, - {"R_LARCH_GOT_PC_LO12", Const, 20}, - {"R_LARCH_IRELATIVE", Const, 19}, - {"R_LARCH_JUMP_SLOT", Const, 19}, - {"R_LARCH_MARK_LA", Const, 19}, - {"R_LARCH_MARK_PCREL", Const, 19}, - {"R_LARCH_NONE", Const, 19}, - {"R_LARCH_PCALA64_HI12", Const, 20}, - {"R_LARCH_PCALA64_LO20", Const, 20}, - {"R_LARCH_PCALA_HI20", Const, 20}, - {"R_LARCH_PCALA_LO12", Const, 20}, - {"R_LARCH_PCREL20_S2", Const, 22}, - {"R_LARCH_RELATIVE", Const, 19}, - {"R_LARCH_RELAX", Const, 20}, - {"R_LARCH_SOP_ADD", Const, 19}, - {"R_LARCH_SOP_AND", Const, 19}, - {"R_LARCH_SOP_ASSERT", Const, 19}, - {"R_LARCH_SOP_IF_ELSE", Const, 19}, - {"R_LARCH_SOP_NOT", Const, 19}, - {"R_LARCH_SOP_POP_32_S_0_10_10_16_S2", Const, 19}, - {"R_LARCH_SOP_POP_32_S_0_5_10_16_S2", Const, 19}, - {"R_LARCH_SOP_POP_32_S_10_12", Const, 19}, - {"R_LARCH_SOP_POP_32_S_10_16", Const, 19}, - {"R_LARCH_SOP_POP_32_S_10_16_S2", Const, 19}, - {"R_LARCH_SOP_POP_32_S_10_5", Const, 19}, - {"R_LARCH_SOP_POP_32_S_5_20", Const, 19}, - {"R_LARCH_SOP_POP_32_U", Const, 19}, - {"R_LARCH_SOP_POP_32_U_10_12", Const, 19}, - {"R_LARCH_SOP_PUSH_ABSOLUTE", Const, 19}, - {"R_LARCH_SOP_PUSH_DUP", Const, 19}, - {"R_LARCH_SOP_PUSH_GPREL", Const, 19}, - {"R_LARCH_SOP_PUSH_PCREL", Const, 19}, - {"R_LARCH_SOP_PUSH_PLT_PCREL", Const, 19}, - {"R_LARCH_SOP_PUSH_TLS_GD", Const, 19}, - {"R_LARCH_SOP_PUSH_TLS_GOT", Const, 19}, - {"R_LARCH_SOP_PUSH_TLS_TPREL", Const, 19}, - {"R_LARCH_SOP_SL", Const, 19}, - {"R_LARCH_SOP_SR", Const, 19}, - {"R_LARCH_SOP_SUB", Const, 19}, - {"R_LARCH_SUB16", Const, 19}, - {"R_LARCH_SUB24", Const, 19}, - {"R_LARCH_SUB32", Const, 19}, - {"R_LARCH_SUB6", Const, 22}, - {"R_LARCH_SUB64", Const, 19}, - {"R_LARCH_SUB8", Const, 19}, - {"R_LARCH_SUB_ULEB128", Const, 22}, - {"R_LARCH_TLS_DTPMOD32", Const, 19}, - {"R_LARCH_TLS_DTPMOD64", Const, 19}, - {"R_LARCH_TLS_DTPREL32", Const, 19}, - {"R_LARCH_TLS_DTPREL64", Const, 19}, - {"R_LARCH_TLS_GD_HI20", Const, 20}, - {"R_LARCH_TLS_GD_PC_HI20", Const, 20}, - {"R_LARCH_TLS_IE64_HI12", Const, 20}, - {"R_LARCH_TLS_IE64_LO20", Const, 20}, - {"R_LARCH_TLS_IE64_PC_HI12", Const, 20}, - {"R_LARCH_TLS_IE64_PC_LO20", Const, 20}, - {"R_LARCH_TLS_IE_HI20", Const, 20}, - {"R_LARCH_TLS_IE_LO12", Const, 20}, - {"R_LARCH_TLS_IE_PC_HI20", Const, 20}, - {"R_LARCH_TLS_IE_PC_LO12", Const, 20}, - {"R_LARCH_TLS_LD_HI20", Const, 20}, - {"R_LARCH_TLS_LD_PC_HI20", Const, 20}, - {"R_LARCH_TLS_LE64_HI12", Const, 20}, - {"R_LARCH_TLS_LE64_LO20", Const, 20}, - {"R_LARCH_TLS_LE_HI20", Const, 20}, - {"R_LARCH_TLS_LE_LO12", Const, 20}, - {"R_LARCH_TLS_TPREL32", Const, 19}, - {"R_LARCH_TLS_TPREL64", Const, 19}, - {"R_MIPS", Type, 6}, - {"R_MIPS_16", Const, 6}, - {"R_MIPS_26", Const, 6}, - {"R_MIPS_32", Const, 6}, - {"R_MIPS_64", Const, 6}, - {"R_MIPS_ADD_IMMEDIATE", Const, 6}, - {"R_MIPS_CALL16", Const, 6}, - {"R_MIPS_CALL_HI16", Const, 6}, - {"R_MIPS_CALL_LO16", Const, 6}, - {"R_MIPS_DELETE", Const, 6}, - {"R_MIPS_GOT16", Const, 6}, - {"R_MIPS_GOT_DISP", Const, 6}, - {"R_MIPS_GOT_HI16", Const, 6}, - {"R_MIPS_GOT_LO16", Const, 6}, - {"R_MIPS_GOT_OFST", Const, 6}, - {"R_MIPS_GOT_PAGE", Const, 6}, - {"R_MIPS_GPREL16", Const, 6}, - {"R_MIPS_GPREL32", Const, 6}, - {"R_MIPS_HI16", Const, 6}, - {"R_MIPS_HIGHER", Const, 6}, - {"R_MIPS_HIGHEST", Const, 6}, - {"R_MIPS_INSERT_A", Const, 6}, - {"R_MIPS_INSERT_B", Const, 6}, - {"R_MIPS_JALR", Const, 6}, - {"R_MIPS_LITERAL", Const, 6}, - {"R_MIPS_LO16", Const, 6}, - {"R_MIPS_NONE", Const, 6}, - {"R_MIPS_PC16", Const, 6}, - {"R_MIPS_PC32", Const, 22}, - {"R_MIPS_PJUMP", Const, 6}, - {"R_MIPS_REL16", Const, 6}, - {"R_MIPS_REL32", Const, 6}, - {"R_MIPS_RELGOT", Const, 6}, - {"R_MIPS_SCN_DISP", Const, 6}, - {"R_MIPS_SHIFT5", Const, 6}, - {"R_MIPS_SHIFT6", Const, 6}, - {"R_MIPS_SUB", Const, 6}, - {"R_MIPS_TLS_DTPMOD32", Const, 6}, - {"R_MIPS_TLS_DTPMOD64", Const, 6}, - {"R_MIPS_TLS_DTPREL32", Const, 6}, - {"R_MIPS_TLS_DTPREL64", Const, 6}, - {"R_MIPS_TLS_DTPREL_HI16", Const, 6}, - {"R_MIPS_TLS_DTPREL_LO16", Const, 6}, - {"R_MIPS_TLS_GD", Const, 6}, - {"R_MIPS_TLS_GOTTPREL", Const, 6}, - {"R_MIPS_TLS_LDM", Const, 6}, - {"R_MIPS_TLS_TPREL32", Const, 6}, - {"R_MIPS_TLS_TPREL64", Const, 6}, - {"R_MIPS_TLS_TPREL_HI16", Const, 6}, - {"R_MIPS_TLS_TPREL_LO16", Const, 6}, - {"R_PPC", Type, 0}, - {"R_PPC64", Type, 5}, - {"R_PPC64_ADDR14", Const, 5}, - {"R_PPC64_ADDR14_BRNTAKEN", Const, 5}, - {"R_PPC64_ADDR14_BRTAKEN", Const, 5}, - {"R_PPC64_ADDR16", Const, 5}, - {"R_PPC64_ADDR16_DS", Const, 5}, - {"R_PPC64_ADDR16_HA", Const, 5}, - {"R_PPC64_ADDR16_HI", Const, 5}, - {"R_PPC64_ADDR16_HIGH", Const, 10}, - {"R_PPC64_ADDR16_HIGHA", Const, 10}, - {"R_PPC64_ADDR16_HIGHER", Const, 5}, - {"R_PPC64_ADDR16_HIGHER34", Const, 20}, - {"R_PPC64_ADDR16_HIGHERA", Const, 5}, - {"R_PPC64_ADDR16_HIGHERA34", Const, 20}, - {"R_PPC64_ADDR16_HIGHEST", Const, 5}, - {"R_PPC64_ADDR16_HIGHEST34", Const, 20}, - {"R_PPC64_ADDR16_HIGHESTA", Const, 5}, - {"R_PPC64_ADDR16_HIGHESTA34", Const, 20}, - {"R_PPC64_ADDR16_LO", Const, 5}, - {"R_PPC64_ADDR16_LO_DS", Const, 5}, - {"R_PPC64_ADDR24", Const, 5}, - {"R_PPC64_ADDR32", Const, 5}, - {"R_PPC64_ADDR64", Const, 5}, - {"R_PPC64_ADDR64_LOCAL", Const, 10}, - {"R_PPC64_COPY", Const, 20}, - {"R_PPC64_D28", Const, 20}, - {"R_PPC64_D34", Const, 20}, - {"R_PPC64_D34_HA30", Const, 20}, - {"R_PPC64_D34_HI30", Const, 20}, - {"R_PPC64_D34_LO", Const, 20}, - {"R_PPC64_DTPMOD64", Const, 5}, - {"R_PPC64_DTPREL16", Const, 5}, - {"R_PPC64_DTPREL16_DS", Const, 5}, - {"R_PPC64_DTPREL16_HA", Const, 5}, - {"R_PPC64_DTPREL16_HI", Const, 5}, - {"R_PPC64_DTPREL16_HIGH", Const, 10}, - {"R_PPC64_DTPREL16_HIGHA", Const, 10}, - {"R_PPC64_DTPREL16_HIGHER", Const, 5}, - {"R_PPC64_DTPREL16_HIGHERA", Const, 5}, - {"R_PPC64_DTPREL16_HIGHEST", Const, 5}, - {"R_PPC64_DTPREL16_HIGHESTA", Const, 5}, - {"R_PPC64_DTPREL16_LO", Const, 5}, - {"R_PPC64_DTPREL16_LO_DS", Const, 5}, - {"R_PPC64_DTPREL34", Const, 20}, - {"R_PPC64_DTPREL64", Const, 5}, - {"R_PPC64_ENTRY", Const, 10}, - {"R_PPC64_GLOB_DAT", Const, 20}, - {"R_PPC64_GNU_VTENTRY", Const, 20}, - {"R_PPC64_GNU_VTINHERIT", Const, 20}, - {"R_PPC64_GOT16", Const, 5}, - {"R_PPC64_GOT16_DS", Const, 5}, - {"R_PPC64_GOT16_HA", Const, 5}, - {"R_PPC64_GOT16_HI", Const, 5}, - {"R_PPC64_GOT16_LO", Const, 5}, - {"R_PPC64_GOT16_LO_DS", Const, 5}, - {"R_PPC64_GOT_DTPREL16_DS", Const, 5}, - {"R_PPC64_GOT_DTPREL16_HA", Const, 5}, - {"R_PPC64_GOT_DTPREL16_HI", Const, 5}, - {"R_PPC64_GOT_DTPREL16_LO_DS", Const, 5}, - {"R_PPC64_GOT_DTPREL_PCREL34", Const, 20}, - {"R_PPC64_GOT_PCREL34", Const, 20}, - {"R_PPC64_GOT_TLSGD16", Const, 5}, - {"R_PPC64_GOT_TLSGD16_HA", Const, 5}, - {"R_PPC64_GOT_TLSGD16_HI", Const, 5}, - {"R_PPC64_GOT_TLSGD16_LO", Const, 5}, - {"R_PPC64_GOT_TLSGD_PCREL34", Const, 20}, - {"R_PPC64_GOT_TLSLD16", Const, 5}, - {"R_PPC64_GOT_TLSLD16_HA", Const, 5}, - {"R_PPC64_GOT_TLSLD16_HI", Const, 5}, - {"R_PPC64_GOT_TLSLD16_LO", Const, 5}, - {"R_PPC64_GOT_TLSLD_PCREL34", Const, 20}, - {"R_PPC64_GOT_TPREL16_DS", Const, 5}, - {"R_PPC64_GOT_TPREL16_HA", Const, 5}, - {"R_PPC64_GOT_TPREL16_HI", Const, 5}, - {"R_PPC64_GOT_TPREL16_LO_DS", Const, 5}, - {"R_PPC64_GOT_TPREL_PCREL34", Const, 20}, - {"R_PPC64_IRELATIVE", Const, 10}, - {"R_PPC64_JMP_IREL", Const, 10}, - {"R_PPC64_JMP_SLOT", Const, 5}, - {"R_PPC64_NONE", Const, 5}, - {"R_PPC64_PCREL28", Const, 20}, - {"R_PPC64_PCREL34", Const, 20}, - {"R_PPC64_PCREL_OPT", Const, 20}, - {"R_PPC64_PLT16_HA", Const, 20}, - {"R_PPC64_PLT16_HI", Const, 20}, - {"R_PPC64_PLT16_LO", Const, 20}, - {"R_PPC64_PLT16_LO_DS", Const, 10}, - {"R_PPC64_PLT32", Const, 20}, - {"R_PPC64_PLT64", Const, 20}, - {"R_PPC64_PLTCALL", Const, 20}, - {"R_PPC64_PLTCALL_NOTOC", Const, 20}, - {"R_PPC64_PLTGOT16", Const, 10}, - {"R_PPC64_PLTGOT16_DS", Const, 10}, - {"R_PPC64_PLTGOT16_HA", Const, 10}, - {"R_PPC64_PLTGOT16_HI", Const, 10}, - {"R_PPC64_PLTGOT16_LO", Const, 10}, - {"R_PPC64_PLTGOT_LO_DS", Const, 10}, - {"R_PPC64_PLTREL32", Const, 20}, - {"R_PPC64_PLTREL64", Const, 20}, - {"R_PPC64_PLTSEQ", Const, 20}, - {"R_PPC64_PLTSEQ_NOTOC", Const, 20}, - {"R_PPC64_PLT_PCREL34", Const, 20}, - {"R_PPC64_PLT_PCREL34_NOTOC", Const, 20}, - {"R_PPC64_REL14", Const, 5}, - {"R_PPC64_REL14_BRNTAKEN", Const, 5}, - {"R_PPC64_REL14_BRTAKEN", Const, 5}, - {"R_PPC64_REL16", Const, 5}, - {"R_PPC64_REL16DX_HA", Const, 10}, - {"R_PPC64_REL16_HA", Const, 5}, - {"R_PPC64_REL16_HI", Const, 5}, - {"R_PPC64_REL16_HIGH", Const, 20}, - {"R_PPC64_REL16_HIGHA", Const, 20}, - {"R_PPC64_REL16_HIGHER", Const, 20}, - {"R_PPC64_REL16_HIGHER34", Const, 20}, - {"R_PPC64_REL16_HIGHERA", Const, 20}, - {"R_PPC64_REL16_HIGHERA34", Const, 20}, - {"R_PPC64_REL16_HIGHEST", Const, 20}, - {"R_PPC64_REL16_HIGHEST34", Const, 20}, - {"R_PPC64_REL16_HIGHESTA", Const, 20}, - {"R_PPC64_REL16_HIGHESTA34", Const, 20}, - {"R_PPC64_REL16_LO", Const, 5}, - {"R_PPC64_REL24", Const, 5}, - {"R_PPC64_REL24_NOTOC", Const, 10}, - {"R_PPC64_REL24_P9NOTOC", Const, 21}, - {"R_PPC64_REL30", Const, 20}, - {"R_PPC64_REL32", Const, 5}, - {"R_PPC64_REL64", Const, 5}, - {"R_PPC64_RELATIVE", Const, 18}, - {"R_PPC64_SECTOFF", Const, 20}, - {"R_PPC64_SECTOFF_DS", Const, 10}, - {"R_PPC64_SECTOFF_HA", Const, 20}, - {"R_PPC64_SECTOFF_HI", Const, 20}, - {"R_PPC64_SECTOFF_LO", Const, 20}, - {"R_PPC64_SECTOFF_LO_DS", Const, 10}, - {"R_PPC64_TLS", Const, 5}, - {"R_PPC64_TLSGD", Const, 5}, - {"R_PPC64_TLSLD", Const, 5}, - {"R_PPC64_TOC", Const, 5}, - {"R_PPC64_TOC16", Const, 5}, - {"R_PPC64_TOC16_DS", Const, 5}, - {"R_PPC64_TOC16_HA", Const, 5}, - {"R_PPC64_TOC16_HI", Const, 5}, - {"R_PPC64_TOC16_LO", Const, 5}, - {"R_PPC64_TOC16_LO_DS", Const, 5}, - {"R_PPC64_TOCSAVE", Const, 10}, - {"R_PPC64_TPREL16", Const, 5}, - {"R_PPC64_TPREL16_DS", Const, 5}, - {"R_PPC64_TPREL16_HA", Const, 5}, - {"R_PPC64_TPREL16_HI", Const, 5}, - {"R_PPC64_TPREL16_HIGH", Const, 10}, - {"R_PPC64_TPREL16_HIGHA", Const, 10}, - {"R_PPC64_TPREL16_HIGHER", Const, 5}, - {"R_PPC64_TPREL16_HIGHERA", Const, 5}, - {"R_PPC64_TPREL16_HIGHEST", Const, 5}, - {"R_PPC64_TPREL16_HIGHESTA", Const, 5}, - {"R_PPC64_TPREL16_LO", Const, 5}, - {"R_PPC64_TPREL16_LO_DS", Const, 5}, - {"R_PPC64_TPREL34", Const, 20}, - {"R_PPC64_TPREL64", Const, 5}, - {"R_PPC64_UADDR16", Const, 20}, - {"R_PPC64_UADDR32", Const, 20}, - {"R_PPC64_UADDR64", Const, 20}, - {"R_PPC_ADDR14", Const, 0}, - {"R_PPC_ADDR14_BRNTAKEN", Const, 0}, - {"R_PPC_ADDR14_BRTAKEN", Const, 0}, - {"R_PPC_ADDR16", Const, 0}, - {"R_PPC_ADDR16_HA", Const, 0}, - {"R_PPC_ADDR16_HI", Const, 0}, - {"R_PPC_ADDR16_LO", Const, 0}, - {"R_PPC_ADDR24", Const, 0}, - {"R_PPC_ADDR32", Const, 0}, - {"R_PPC_COPY", Const, 0}, - {"R_PPC_DTPMOD32", Const, 0}, - {"R_PPC_DTPREL16", Const, 0}, - {"R_PPC_DTPREL16_HA", Const, 0}, - {"R_PPC_DTPREL16_HI", Const, 0}, - {"R_PPC_DTPREL16_LO", Const, 0}, - {"R_PPC_DTPREL32", Const, 0}, - {"R_PPC_EMB_BIT_FLD", Const, 0}, - {"R_PPC_EMB_MRKREF", Const, 0}, - {"R_PPC_EMB_NADDR16", Const, 0}, - {"R_PPC_EMB_NADDR16_HA", Const, 0}, - {"R_PPC_EMB_NADDR16_HI", Const, 0}, - {"R_PPC_EMB_NADDR16_LO", Const, 0}, - {"R_PPC_EMB_NADDR32", Const, 0}, - {"R_PPC_EMB_RELSDA", Const, 0}, - {"R_PPC_EMB_RELSEC16", Const, 0}, - {"R_PPC_EMB_RELST_HA", Const, 0}, - {"R_PPC_EMB_RELST_HI", Const, 0}, - {"R_PPC_EMB_RELST_LO", Const, 0}, - {"R_PPC_EMB_SDA21", Const, 0}, - {"R_PPC_EMB_SDA2I16", Const, 0}, - {"R_PPC_EMB_SDA2REL", Const, 0}, - {"R_PPC_EMB_SDAI16", Const, 0}, - {"R_PPC_GLOB_DAT", Const, 0}, - {"R_PPC_GOT16", Const, 0}, - {"R_PPC_GOT16_HA", Const, 0}, - {"R_PPC_GOT16_HI", Const, 0}, - {"R_PPC_GOT16_LO", Const, 0}, - {"R_PPC_GOT_TLSGD16", Const, 0}, - {"R_PPC_GOT_TLSGD16_HA", Const, 0}, - {"R_PPC_GOT_TLSGD16_HI", Const, 0}, - {"R_PPC_GOT_TLSGD16_LO", Const, 0}, - {"R_PPC_GOT_TLSLD16", Const, 0}, - {"R_PPC_GOT_TLSLD16_HA", Const, 0}, - {"R_PPC_GOT_TLSLD16_HI", Const, 0}, - {"R_PPC_GOT_TLSLD16_LO", Const, 0}, - {"R_PPC_GOT_TPREL16", Const, 0}, - {"R_PPC_GOT_TPREL16_HA", Const, 0}, - {"R_PPC_GOT_TPREL16_HI", Const, 0}, - {"R_PPC_GOT_TPREL16_LO", Const, 0}, - {"R_PPC_JMP_SLOT", Const, 0}, - {"R_PPC_LOCAL24PC", Const, 0}, - {"R_PPC_NONE", Const, 0}, - {"R_PPC_PLT16_HA", Const, 0}, - {"R_PPC_PLT16_HI", Const, 0}, - {"R_PPC_PLT16_LO", Const, 0}, - {"R_PPC_PLT32", Const, 0}, - {"R_PPC_PLTREL24", Const, 0}, - {"R_PPC_PLTREL32", Const, 0}, - {"R_PPC_REL14", Const, 0}, - {"R_PPC_REL14_BRNTAKEN", Const, 0}, - {"R_PPC_REL14_BRTAKEN", Const, 0}, - {"R_PPC_REL24", Const, 0}, - {"R_PPC_REL32", Const, 0}, - {"R_PPC_RELATIVE", Const, 0}, - {"R_PPC_SDAREL16", Const, 0}, - {"R_PPC_SECTOFF", Const, 0}, - {"R_PPC_SECTOFF_HA", Const, 0}, - {"R_PPC_SECTOFF_HI", Const, 0}, - {"R_PPC_SECTOFF_LO", Const, 0}, - {"R_PPC_TLS", Const, 0}, - {"R_PPC_TPREL16", Const, 0}, - {"R_PPC_TPREL16_HA", Const, 0}, - {"R_PPC_TPREL16_HI", Const, 0}, - {"R_PPC_TPREL16_LO", Const, 0}, - {"R_PPC_TPREL32", Const, 0}, - {"R_PPC_UADDR16", Const, 0}, - {"R_PPC_UADDR32", Const, 0}, - {"R_RISCV", Type, 11}, - {"R_RISCV_32", Const, 11}, - {"R_RISCV_32_PCREL", Const, 12}, - {"R_RISCV_64", Const, 11}, - {"R_RISCV_ADD16", Const, 11}, - {"R_RISCV_ADD32", Const, 11}, - {"R_RISCV_ADD64", Const, 11}, - {"R_RISCV_ADD8", Const, 11}, - {"R_RISCV_ALIGN", Const, 11}, - {"R_RISCV_BRANCH", Const, 11}, - {"R_RISCV_CALL", Const, 11}, - {"R_RISCV_CALL_PLT", Const, 11}, - {"R_RISCV_COPY", Const, 11}, - {"R_RISCV_GNU_VTENTRY", Const, 11}, - {"R_RISCV_GNU_VTINHERIT", Const, 11}, - {"R_RISCV_GOT_HI20", Const, 11}, - {"R_RISCV_GPREL_I", Const, 11}, - {"R_RISCV_GPREL_S", Const, 11}, - {"R_RISCV_HI20", Const, 11}, - {"R_RISCV_JAL", Const, 11}, - {"R_RISCV_JUMP_SLOT", Const, 11}, - {"R_RISCV_LO12_I", Const, 11}, - {"R_RISCV_LO12_S", Const, 11}, - {"R_RISCV_NONE", Const, 11}, - {"R_RISCV_PCREL_HI20", Const, 11}, - {"R_RISCV_PCREL_LO12_I", Const, 11}, - {"R_RISCV_PCREL_LO12_S", Const, 11}, - {"R_RISCV_RELATIVE", Const, 11}, - {"R_RISCV_RELAX", Const, 11}, - {"R_RISCV_RVC_BRANCH", Const, 11}, - {"R_RISCV_RVC_JUMP", Const, 11}, - {"R_RISCV_RVC_LUI", Const, 11}, - {"R_RISCV_SET16", Const, 11}, - {"R_RISCV_SET32", Const, 11}, - {"R_RISCV_SET6", Const, 11}, - {"R_RISCV_SET8", Const, 11}, - {"R_RISCV_SUB16", Const, 11}, - {"R_RISCV_SUB32", Const, 11}, - {"R_RISCV_SUB6", Const, 11}, - {"R_RISCV_SUB64", Const, 11}, - {"R_RISCV_SUB8", Const, 11}, - {"R_RISCV_TLS_DTPMOD32", Const, 11}, - {"R_RISCV_TLS_DTPMOD64", Const, 11}, - {"R_RISCV_TLS_DTPREL32", Const, 11}, - {"R_RISCV_TLS_DTPREL64", Const, 11}, - {"R_RISCV_TLS_GD_HI20", Const, 11}, - {"R_RISCV_TLS_GOT_HI20", Const, 11}, - {"R_RISCV_TLS_TPREL32", Const, 11}, - {"R_RISCV_TLS_TPREL64", Const, 11}, - {"R_RISCV_TPREL_ADD", Const, 11}, - {"R_RISCV_TPREL_HI20", Const, 11}, - {"R_RISCV_TPREL_I", Const, 11}, - {"R_RISCV_TPREL_LO12_I", Const, 11}, - {"R_RISCV_TPREL_LO12_S", Const, 11}, - {"R_RISCV_TPREL_S", Const, 11}, - {"R_SPARC", Type, 0}, - {"R_SPARC_10", Const, 0}, - {"R_SPARC_11", Const, 0}, - {"R_SPARC_13", Const, 0}, - {"R_SPARC_16", Const, 0}, - {"R_SPARC_22", Const, 0}, - {"R_SPARC_32", Const, 0}, - {"R_SPARC_5", Const, 0}, - {"R_SPARC_6", Const, 0}, - {"R_SPARC_64", Const, 0}, - {"R_SPARC_7", Const, 0}, - {"R_SPARC_8", Const, 0}, - {"R_SPARC_COPY", Const, 0}, - {"R_SPARC_DISP16", Const, 0}, - {"R_SPARC_DISP32", Const, 0}, - {"R_SPARC_DISP64", Const, 0}, - {"R_SPARC_DISP8", Const, 0}, - {"R_SPARC_GLOB_DAT", Const, 0}, - {"R_SPARC_GLOB_JMP", Const, 0}, - {"R_SPARC_GOT10", Const, 0}, - {"R_SPARC_GOT13", Const, 0}, - {"R_SPARC_GOT22", Const, 0}, - {"R_SPARC_H44", Const, 0}, - {"R_SPARC_HH22", Const, 0}, - {"R_SPARC_HI22", Const, 0}, - {"R_SPARC_HIPLT22", Const, 0}, - {"R_SPARC_HIX22", Const, 0}, - {"R_SPARC_HM10", Const, 0}, - {"R_SPARC_JMP_SLOT", Const, 0}, - {"R_SPARC_L44", Const, 0}, - {"R_SPARC_LM22", Const, 0}, - {"R_SPARC_LO10", Const, 0}, - {"R_SPARC_LOPLT10", Const, 0}, - {"R_SPARC_LOX10", Const, 0}, - {"R_SPARC_M44", Const, 0}, - {"R_SPARC_NONE", Const, 0}, - {"R_SPARC_OLO10", Const, 0}, - {"R_SPARC_PC10", Const, 0}, - {"R_SPARC_PC22", Const, 0}, - {"R_SPARC_PCPLT10", Const, 0}, - {"R_SPARC_PCPLT22", Const, 0}, - {"R_SPARC_PCPLT32", Const, 0}, - {"R_SPARC_PC_HH22", Const, 0}, - {"R_SPARC_PC_HM10", Const, 0}, - {"R_SPARC_PC_LM22", Const, 0}, - {"R_SPARC_PLT32", Const, 0}, - {"R_SPARC_PLT64", Const, 0}, - {"R_SPARC_REGISTER", Const, 0}, - {"R_SPARC_RELATIVE", Const, 0}, - {"R_SPARC_UA16", Const, 0}, - {"R_SPARC_UA32", Const, 0}, - {"R_SPARC_UA64", Const, 0}, - {"R_SPARC_WDISP16", Const, 0}, - {"R_SPARC_WDISP19", Const, 0}, - {"R_SPARC_WDISP22", Const, 0}, - {"R_SPARC_WDISP30", Const, 0}, - {"R_SPARC_WPLT30", Const, 0}, - {"R_SYM32", Func, 0}, - {"R_SYM64", Func, 0}, - {"R_TYPE32", Func, 0}, - {"R_TYPE64", Func, 0}, - {"R_X86_64", Type, 0}, - {"R_X86_64_16", Const, 0}, - {"R_X86_64_32", Const, 0}, - {"R_X86_64_32S", Const, 0}, - {"R_X86_64_64", Const, 0}, - {"R_X86_64_8", Const, 0}, - {"R_X86_64_COPY", Const, 0}, - {"R_X86_64_DTPMOD64", Const, 0}, - {"R_X86_64_DTPOFF32", Const, 0}, - {"R_X86_64_DTPOFF64", Const, 0}, - {"R_X86_64_GLOB_DAT", Const, 0}, - {"R_X86_64_GOT32", Const, 0}, - {"R_X86_64_GOT64", Const, 10}, - {"R_X86_64_GOTOFF64", Const, 10}, - {"R_X86_64_GOTPC32", Const, 10}, - {"R_X86_64_GOTPC32_TLSDESC", Const, 10}, - {"R_X86_64_GOTPC64", Const, 10}, - {"R_X86_64_GOTPCREL", Const, 0}, - {"R_X86_64_GOTPCREL64", Const, 10}, - {"R_X86_64_GOTPCRELX", Const, 10}, - {"R_X86_64_GOTPLT64", Const, 10}, - {"R_X86_64_GOTTPOFF", Const, 0}, - {"R_X86_64_IRELATIVE", Const, 10}, - {"R_X86_64_JMP_SLOT", Const, 0}, - {"R_X86_64_NONE", Const, 0}, - {"R_X86_64_PC16", Const, 0}, - {"R_X86_64_PC32", Const, 0}, - {"R_X86_64_PC32_BND", Const, 10}, - {"R_X86_64_PC64", Const, 10}, - {"R_X86_64_PC8", Const, 0}, - {"R_X86_64_PLT32", Const, 0}, - {"R_X86_64_PLT32_BND", Const, 10}, - {"R_X86_64_PLTOFF64", Const, 10}, - {"R_X86_64_RELATIVE", Const, 0}, - {"R_X86_64_RELATIVE64", Const, 10}, - {"R_X86_64_REX_GOTPCRELX", Const, 10}, - {"R_X86_64_SIZE32", Const, 10}, - {"R_X86_64_SIZE64", Const, 10}, - {"R_X86_64_TLSDESC", Const, 10}, - {"R_X86_64_TLSDESC_CALL", Const, 10}, - {"R_X86_64_TLSGD", Const, 0}, - {"R_X86_64_TLSLD", Const, 0}, - {"R_X86_64_TPOFF32", Const, 0}, - {"R_X86_64_TPOFF64", Const, 0}, - {"Rel32", Type, 0}, - {"Rel32.Info", Field, 0}, - {"Rel32.Off", Field, 0}, - {"Rel64", Type, 0}, - {"Rel64.Info", Field, 0}, - {"Rel64.Off", Field, 0}, - {"Rela32", Type, 0}, - {"Rela32.Addend", Field, 0}, - {"Rela32.Info", Field, 0}, - {"Rela32.Off", Field, 0}, - {"Rela64", Type, 0}, - {"Rela64.Addend", Field, 0}, - {"Rela64.Info", Field, 0}, - {"Rela64.Off", Field, 0}, - {"SHF_ALLOC", Const, 0}, - {"SHF_COMPRESSED", Const, 6}, - {"SHF_EXECINSTR", Const, 0}, - {"SHF_GROUP", Const, 0}, - {"SHF_INFO_LINK", Const, 0}, - {"SHF_LINK_ORDER", Const, 0}, - {"SHF_MASKOS", Const, 0}, - {"SHF_MASKPROC", Const, 0}, - {"SHF_MERGE", Const, 0}, - {"SHF_OS_NONCONFORMING", Const, 0}, - {"SHF_STRINGS", Const, 0}, - {"SHF_TLS", Const, 0}, - {"SHF_WRITE", Const, 0}, - {"SHN_ABS", Const, 0}, - {"SHN_COMMON", Const, 0}, - {"SHN_HIOS", Const, 0}, - {"SHN_HIPROC", Const, 0}, - {"SHN_HIRESERVE", Const, 0}, - {"SHN_LOOS", Const, 0}, - {"SHN_LOPROC", Const, 0}, - {"SHN_LORESERVE", Const, 0}, - {"SHN_UNDEF", Const, 0}, - {"SHN_XINDEX", Const, 0}, - {"SHT_DYNAMIC", Const, 0}, - {"SHT_DYNSYM", Const, 0}, - {"SHT_FINI_ARRAY", Const, 0}, - {"SHT_GNU_ATTRIBUTES", Const, 0}, - {"SHT_GNU_HASH", Const, 0}, - {"SHT_GNU_LIBLIST", Const, 0}, - {"SHT_GNU_VERDEF", Const, 0}, - {"SHT_GNU_VERNEED", Const, 0}, - {"SHT_GNU_VERSYM", Const, 0}, - {"SHT_GROUP", Const, 0}, - {"SHT_HASH", Const, 0}, - {"SHT_HIOS", Const, 0}, - {"SHT_HIPROC", Const, 0}, - {"SHT_HIUSER", Const, 0}, - {"SHT_INIT_ARRAY", Const, 0}, - {"SHT_LOOS", Const, 0}, - {"SHT_LOPROC", Const, 0}, - {"SHT_LOUSER", Const, 0}, - {"SHT_MIPS_ABIFLAGS", Const, 17}, - {"SHT_NOBITS", Const, 0}, - {"SHT_NOTE", Const, 0}, - {"SHT_NULL", Const, 0}, - {"SHT_PREINIT_ARRAY", Const, 0}, - {"SHT_PROGBITS", Const, 0}, - {"SHT_REL", Const, 0}, - {"SHT_RELA", Const, 0}, - {"SHT_RISCV_ATTRIBUTES", Const, 25}, - {"SHT_SHLIB", Const, 0}, - {"SHT_STRTAB", Const, 0}, - {"SHT_SYMTAB", Const, 0}, - {"SHT_SYMTAB_SHNDX", Const, 0}, - {"STB_GLOBAL", Const, 0}, - {"STB_HIOS", Const, 0}, - {"STB_HIPROC", Const, 0}, - {"STB_LOCAL", Const, 0}, - {"STB_LOOS", Const, 0}, - {"STB_LOPROC", Const, 0}, - {"STB_WEAK", Const, 0}, - {"STT_COMMON", Const, 0}, - {"STT_FILE", Const, 0}, - {"STT_FUNC", Const, 0}, - {"STT_GNU_IFUNC", Const, 23}, - {"STT_HIOS", Const, 0}, - {"STT_HIPROC", Const, 0}, - {"STT_LOOS", Const, 0}, - {"STT_LOPROC", Const, 0}, - {"STT_NOTYPE", Const, 0}, - {"STT_OBJECT", Const, 0}, - {"STT_RELC", Const, 23}, - {"STT_SECTION", Const, 0}, - {"STT_SRELC", Const, 23}, - {"STT_TLS", Const, 0}, - {"STV_DEFAULT", Const, 0}, - {"STV_HIDDEN", Const, 0}, - {"STV_INTERNAL", Const, 0}, - {"STV_PROTECTED", Const, 0}, - {"ST_BIND", Func, 0}, - {"ST_INFO", Func, 0}, - {"ST_TYPE", Func, 0}, - {"ST_VISIBILITY", Func, 0}, - {"Section", Type, 0}, - {"Section.ReaderAt", Field, 0}, - {"Section.SectionHeader", Field, 0}, - {"Section32", Type, 0}, - {"Section32.Addr", Field, 0}, - {"Section32.Addralign", Field, 0}, - {"Section32.Entsize", Field, 0}, - {"Section32.Flags", Field, 0}, - {"Section32.Info", Field, 0}, - {"Section32.Link", Field, 0}, - {"Section32.Name", Field, 0}, - {"Section32.Off", Field, 0}, - {"Section32.Size", Field, 0}, - {"Section32.Type", Field, 0}, - {"Section64", Type, 0}, - {"Section64.Addr", Field, 0}, - {"Section64.Addralign", Field, 0}, - {"Section64.Entsize", Field, 0}, - {"Section64.Flags", Field, 0}, - {"Section64.Info", Field, 0}, - {"Section64.Link", Field, 0}, - {"Section64.Name", Field, 0}, - {"Section64.Off", Field, 0}, - {"Section64.Size", Field, 0}, - {"Section64.Type", Field, 0}, - {"SectionFlag", Type, 0}, - {"SectionHeader", Type, 0}, - {"SectionHeader.Addr", Field, 0}, - {"SectionHeader.Addralign", Field, 0}, - {"SectionHeader.Entsize", Field, 0}, - {"SectionHeader.FileSize", Field, 6}, - {"SectionHeader.Flags", Field, 0}, - {"SectionHeader.Info", Field, 0}, - {"SectionHeader.Link", Field, 0}, - {"SectionHeader.Name", Field, 0}, - {"SectionHeader.Offset", Field, 0}, - {"SectionHeader.Size", Field, 0}, - {"SectionHeader.Type", Field, 0}, - {"SectionIndex", Type, 0}, - {"SectionType", Type, 0}, - {"Sym32", Type, 0}, - {"Sym32.Info", Field, 0}, - {"Sym32.Name", Field, 0}, - {"Sym32.Other", Field, 0}, - {"Sym32.Shndx", Field, 0}, - {"Sym32.Size", Field, 0}, - {"Sym32.Value", Field, 0}, - {"Sym32Size", Const, 0}, - {"Sym64", Type, 0}, - {"Sym64.Info", Field, 0}, - {"Sym64.Name", Field, 0}, - {"Sym64.Other", Field, 0}, - {"Sym64.Shndx", Field, 0}, - {"Sym64.Size", Field, 0}, - {"Sym64.Value", Field, 0}, - {"Sym64Size", Const, 0}, - {"SymBind", Type, 0}, - {"SymType", Type, 0}, - {"SymVis", Type, 0}, - {"Symbol", Type, 0}, - {"Symbol.HasVersion", Field, 24}, - {"Symbol.Info", Field, 0}, - {"Symbol.Library", Field, 13}, - {"Symbol.Name", Field, 0}, - {"Symbol.Other", Field, 0}, - {"Symbol.Section", Field, 0}, - {"Symbol.Size", Field, 0}, - {"Symbol.Value", Field, 0}, - {"Symbol.Version", Field, 13}, - {"Symbol.VersionIndex", Field, 24}, - {"Type", Type, 0}, - {"VER_FLG_BASE", Const, 24}, - {"VER_FLG_INFO", Const, 24}, - {"VER_FLG_WEAK", Const, 24}, - {"Version", Type, 0}, - {"VersionIndex", Type, 24}, + {"(*File).Close", Method, 0, ""}, + {"(*File).DWARF", Method, 0, ""}, + {"(*File).DynString", Method, 1, ""}, + {"(*File).DynValue", Method, 21, ""}, + {"(*File).DynamicSymbols", Method, 4, ""}, + {"(*File).DynamicVersionNeeds", Method, 24, ""}, + {"(*File).DynamicVersions", Method, 24, ""}, + {"(*File).ImportedLibraries", Method, 0, ""}, + {"(*File).ImportedSymbols", Method, 0, ""}, + {"(*File).Section", Method, 0, ""}, + {"(*File).SectionByType", Method, 0, ""}, + {"(*File).Symbols", Method, 0, ""}, + {"(*FormatError).Error", Method, 0, ""}, + {"(*Prog).Open", Method, 0, ""}, + {"(*Section).Data", Method, 0, ""}, + {"(*Section).Open", Method, 0, ""}, + {"(Class).GoString", Method, 0, ""}, + {"(Class).String", Method, 0, ""}, + {"(CompressionType).GoString", Method, 6, ""}, + {"(CompressionType).String", Method, 6, ""}, + {"(Data).GoString", Method, 0, ""}, + {"(Data).String", Method, 0, ""}, + {"(DynFlag).GoString", Method, 0, ""}, + {"(DynFlag).String", Method, 0, ""}, + {"(DynFlag1).GoString", Method, 21, ""}, + {"(DynFlag1).String", Method, 21, ""}, + {"(DynTag).GoString", Method, 0, ""}, + {"(DynTag).String", Method, 0, ""}, + {"(Machine).GoString", Method, 0, ""}, + {"(Machine).String", Method, 0, ""}, + {"(NType).GoString", Method, 0, ""}, + {"(NType).String", Method, 0, ""}, + {"(OSABI).GoString", Method, 0, ""}, + {"(OSABI).String", Method, 0, ""}, + {"(Prog).ReadAt", Method, 0, ""}, + {"(ProgFlag).GoString", Method, 0, ""}, + {"(ProgFlag).String", Method, 0, ""}, + {"(ProgType).GoString", Method, 0, ""}, + {"(ProgType).String", Method, 0, ""}, + {"(R_386).GoString", Method, 0, ""}, + {"(R_386).String", Method, 0, ""}, + {"(R_390).GoString", Method, 7, ""}, + {"(R_390).String", Method, 7, ""}, + {"(R_AARCH64).GoString", Method, 4, ""}, + {"(R_AARCH64).String", Method, 4, ""}, + {"(R_ALPHA).GoString", Method, 0, ""}, + {"(R_ALPHA).String", Method, 0, ""}, + {"(R_ARM).GoString", Method, 0, ""}, + {"(R_ARM).String", Method, 0, ""}, + {"(R_LARCH).GoString", Method, 19, ""}, + {"(R_LARCH).String", Method, 19, ""}, + {"(R_MIPS).GoString", Method, 6, ""}, + {"(R_MIPS).String", Method, 6, ""}, + {"(R_PPC).GoString", Method, 0, ""}, + {"(R_PPC).String", Method, 0, ""}, + {"(R_PPC64).GoString", Method, 5, ""}, + {"(R_PPC64).String", Method, 5, ""}, + {"(R_RISCV).GoString", Method, 11, ""}, + {"(R_RISCV).String", Method, 11, ""}, + {"(R_SPARC).GoString", Method, 0, ""}, + {"(R_SPARC).String", Method, 0, ""}, + {"(R_X86_64).GoString", Method, 0, ""}, + {"(R_X86_64).String", Method, 0, ""}, + {"(Section).ReadAt", Method, 0, ""}, + {"(SectionFlag).GoString", Method, 0, ""}, + {"(SectionFlag).String", Method, 0, ""}, + {"(SectionIndex).GoString", Method, 0, ""}, + {"(SectionIndex).String", Method, 0, ""}, + {"(SectionType).GoString", Method, 0, ""}, + {"(SectionType).String", Method, 0, ""}, + {"(SymBind).GoString", Method, 0, ""}, + {"(SymBind).String", Method, 0, ""}, + {"(SymType).GoString", Method, 0, ""}, + {"(SymType).String", Method, 0, ""}, + {"(SymVis).GoString", Method, 0, ""}, + {"(SymVis).String", Method, 0, ""}, + {"(Type).GoString", Method, 0, ""}, + {"(Type).String", Method, 0, ""}, + {"(Version).GoString", Method, 0, ""}, + {"(Version).String", Method, 0, ""}, + {"(VersionIndex).Index", Method, 24, ""}, + {"(VersionIndex).IsHidden", Method, 24, ""}, + {"ARM_MAGIC_TRAMP_NUMBER", Const, 0, ""}, + {"COMPRESS_HIOS", Const, 6, ""}, + {"COMPRESS_HIPROC", Const, 6, ""}, + {"COMPRESS_LOOS", Const, 6, ""}, + {"COMPRESS_LOPROC", Const, 6, ""}, + {"COMPRESS_ZLIB", Const, 6, ""}, + {"COMPRESS_ZSTD", Const, 21, ""}, + {"Chdr32", Type, 6, ""}, + {"Chdr32.Addralign", Field, 6, ""}, + {"Chdr32.Size", Field, 6, ""}, + {"Chdr32.Type", Field, 6, ""}, + {"Chdr64", Type, 6, ""}, + {"Chdr64.Addralign", Field, 6, ""}, + {"Chdr64.Size", Field, 6, ""}, + {"Chdr64.Type", Field, 6, ""}, + {"Class", Type, 0, ""}, + {"CompressionType", Type, 6, ""}, + {"DF_1_CONFALT", Const, 21, ""}, + {"DF_1_DIRECT", Const, 21, ""}, + {"DF_1_DISPRELDNE", Const, 21, ""}, + {"DF_1_DISPRELPND", Const, 21, ""}, + {"DF_1_EDITED", Const, 21, ""}, + {"DF_1_ENDFILTEE", Const, 21, ""}, + {"DF_1_GLOBAL", Const, 21, ""}, + {"DF_1_GLOBAUDIT", Const, 21, ""}, + {"DF_1_GROUP", Const, 21, ""}, + {"DF_1_IGNMULDEF", Const, 21, ""}, + {"DF_1_INITFIRST", Const, 21, ""}, + {"DF_1_INTERPOSE", Const, 21, ""}, + {"DF_1_KMOD", Const, 21, ""}, + {"DF_1_LOADFLTR", Const, 21, ""}, + {"DF_1_NOCOMMON", Const, 21, ""}, + {"DF_1_NODEFLIB", Const, 21, ""}, + {"DF_1_NODELETE", Const, 21, ""}, + {"DF_1_NODIRECT", Const, 21, ""}, + {"DF_1_NODUMP", Const, 21, ""}, + {"DF_1_NOHDR", Const, 21, ""}, + {"DF_1_NOKSYMS", Const, 21, ""}, + {"DF_1_NOOPEN", Const, 21, ""}, + {"DF_1_NORELOC", Const, 21, ""}, + {"DF_1_NOW", Const, 21, ""}, + {"DF_1_ORIGIN", Const, 21, ""}, + {"DF_1_PIE", Const, 21, ""}, + {"DF_1_SINGLETON", Const, 21, ""}, + {"DF_1_STUB", Const, 21, ""}, + {"DF_1_SYMINTPOSE", Const, 21, ""}, + {"DF_1_TRANS", Const, 21, ""}, + {"DF_1_WEAKFILTER", Const, 21, ""}, + {"DF_BIND_NOW", Const, 0, ""}, + {"DF_ORIGIN", Const, 0, ""}, + {"DF_STATIC_TLS", Const, 0, ""}, + {"DF_SYMBOLIC", Const, 0, ""}, + {"DF_TEXTREL", Const, 0, ""}, + {"DT_ADDRRNGHI", Const, 16, ""}, + {"DT_ADDRRNGLO", Const, 16, ""}, + {"DT_AUDIT", Const, 16, ""}, + {"DT_AUXILIARY", Const, 16, ""}, + {"DT_BIND_NOW", Const, 0, ""}, + {"DT_CHECKSUM", Const, 16, ""}, + {"DT_CONFIG", Const, 16, ""}, + {"DT_DEBUG", Const, 0, ""}, + {"DT_DEPAUDIT", Const, 16, ""}, + {"DT_ENCODING", Const, 0, ""}, + {"DT_FEATURE", Const, 16, ""}, + {"DT_FILTER", Const, 16, ""}, + {"DT_FINI", Const, 0, ""}, + {"DT_FINI_ARRAY", Const, 0, ""}, + {"DT_FINI_ARRAYSZ", Const, 0, ""}, + {"DT_FLAGS", Const, 0, ""}, + {"DT_FLAGS_1", Const, 16, ""}, + {"DT_GNU_CONFLICT", Const, 16, ""}, + {"DT_GNU_CONFLICTSZ", Const, 16, ""}, + {"DT_GNU_HASH", Const, 16, ""}, + {"DT_GNU_LIBLIST", Const, 16, ""}, + {"DT_GNU_LIBLISTSZ", Const, 16, ""}, + {"DT_GNU_PRELINKED", Const, 16, ""}, + {"DT_HASH", Const, 0, ""}, + {"DT_HIOS", Const, 0, ""}, + {"DT_HIPROC", Const, 0, ""}, + {"DT_INIT", Const, 0, ""}, + {"DT_INIT_ARRAY", Const, 0, ""}, + {"DT_INIT_ARRAYSZ", Const, 0, ""}, + {"DT_JMPREL", Const, 0, ""}, + {"DT_LOOS", Const, 0, ""}, + {"DT_LOPROC", Const, 0, ""}, + {"DT_MIPS_AUX_DYNAMIC", Const, 16, ""}, + {"DT_MIPS_BASE_ADDRESS", Const, 16, ""}, + {"DT_MIPS_COMPACT_SIZE", Const, 16, ""}, + {"DT_MIPS_CONFLICT", Const, 16, ""}, + {"DT_MIPS_CONFLICTNO", Const, 16, ""}, + {"DT_MIPS_CXX_FLAGS", Const, 16, ""}, + {"DT_MIPS_DELTA_CLASS", Const, 16, ""}, + {"DT_MIPS_DELTA_CLASSSYM", Const, 16, ""}, + {"DT_MIPS_DELTA_CLASSSYM_NO", Const, 16, ""}, + {"DT_MIPS_DELTA_CLASS_NO", Const, 16, ""}, + {"DT_MIPS_DELTA_INSTANCE", Const, 16, ""}, + {"DT_MIPS_DELTA_INSTANCE_NO", Const, 16, ""}, + {"DT_MIPS_DELTA_RELOC", Const, 16, ""}, + {"DT_MIPS_DELTA_RELOC_NO", Const, 16, ""}, + {"DT_MIPS_DELTA_SYM", Const, 16, ""}, + {"DT_MIPS_DELTA_SYM_NO", Const, 16, ""}, + {"DT_MIPS_DYNSTR_ALIGN", Const, 16, ""}, + {"DT_MIPS_FLAGS", Const, 16, ""}, + {"DT_MIPS_GOTSYM", Const, 16, ""}, + {"DT_MIPS_GP_VALUE", Const, 16, ""}, + {"DT_MIPS_HIDDEN_GOTIDX", Const, 16, ""}, + {"DT_MIPS_HIPAGENO", Const, 16, ""}, + {"DT_MIPS_ICHECKSUM", Const, 16, ""}, + {"DT_MIPS_INTERFACE", Const, 16, ""}, + {"DT_MIPS_INTERFACE_SIZE", Const, 16, ""}, + {"DT_MIPS_IVERSION", Const, 16, ""}, + {"DT_MIPS_LIBLIST", Const, 16, ""}, + {"DT_MIPS_LIBLISTNO", Const, 16, ""}, + {"DT_MIPS_LOCALPAGE_GOTIDX", Const, 16, ""}, + {"DT_MIPS_LOCAL_GOTIDX", Const, 16, ""}, + {"DT_MIPS_LOCAL_GOTNO", Const, 16, ""}, + {"DT_MIPS_MSYM", Const, 16, ""}, + {"DT_MIPS_OPTIONS", Const, 16, ""}, + {"DT_MIPS_PERF_SUFFIX", Const, 16, ""}, + {"DT_MIPS_PIXIE_INIT", Const, 16, ""}, + {"DT_MIPS_PLTGOT", Const, 16, ""}, + {"DT_MIPS_PROTECTED_GOTIDX", Const, 16, ""}, + {"DT_MIPS_RLD_MAP", Const, 16, ""}, + {"DT_MIPS_RLD_MAP_REL", Const, 16, ""}, + {"DT_MIPS_RLD_TEXT_RESOLVE_ADDR", Const, 16, ""}, + {"DT_MIPS_RLD_VERSION", Const, 16, ""}, + {"DT_MIPS_RWPLT", Const, 16, ""}, + {"DT_MIPS_SYMBOL_LIB", Const, 16, ""}, + {"DT_MIPS_SYMTABNO", Const, 16, ""}, + {"DT_MIPS_TIME_STAMP", Const, 16, ""}, + {"DT_MIPS_UNREFEXTNO", Const, 16, ""}, + {"DT_MOVEENT", Const, 16, ""}, + {"DT_MOVESZ", Const, 16, ""}, + {"DT_MOVETAB", Const, 16, ""}, + {"DT_NEEDED", Const, 0, ""}, + {"DT_NULL", Const, 0, ""}, + {"DT_PLTGOT", Const, 0, ""}, + {"DT_PLTPAD", Const, 16, ""}, + {"DT_PLTPADSZ", Const, 16, ""}, + {"DT_PLTREL", Const, 0, ""}, + {"DT_PLTRELSZ", Const, 0, ""}, + {"DT_POSFLAG_1", Const, 16, ""}, + {"DT_PPC64_GLINK", Const, 16, ""}, + {"DT_PPC64_OPD", Const, 16, ""}, + {"DT_PPC64_OPDSZ", Const, 16, ""}, + {"DT_PPC64_OPT", Const, 16, ""}, + {"DT_PPC_GOT", Const, 16, ""}, + {"DT_PPC_OPT", Const, 16, ""}, + {"DT_PREINIT_ARRAY", Const, 0, ""}, + {"DT_PREINIT_ARRAYSZ", Const, 0, ""}, + {"DT_REL", Const, 0, ""}, + {"DT_RELA", Const, 0, ""}, + {"DT_RELACOUNT", Const, 16, ""}, + {"DT_RELAENT", Const, 0, ""}, + {"DT_RELASZ", Const, 0, ""}, + {"DT_RELCOUNT", Const, 16, ""}, + {"DT_RELENT", Const, 0, ""}, + {"DT_RELSZ", Const, 0, ""}, + {"DT_RPATH", Const, 0, ""}, + {"DT_RUNPATH", Const, 0, ""}, + {"DT_SONAME", Const, 0, ""}, + {"DT_SPARC_REGISTER", Const, 16, ""}, + {"DT_STRSZ", Const, 0, ""}, + {"DT_STRTAB", Const, 0, ""}, + {"DT_SYMBOLIC", Const, 0, ""}, + {"DT_SYMENT", Const, 0, ""}, + {"DT_SYMINENT", Const, 16, ""}, + {"DT_SYMINFO", Const, 16, ""}, + {"DT_SYMINSZ", Const, 16, ""}, + {"DT_SYMTAB", Const, 0, ""}, + {"DT_SYMTAB_SHNDX", Const, 16, ""}, + {"DT_TEXTREL", Const, 0, ""}, + {"DT_TLSDESC_GOT", Const, 16, ""}, + {"DT_TLSDESC_PLT", Const, 16, ""}, + {"DT_USED", Const, 16, ""}, + {"DT_VALRNGHI", Const, 16, ""}, + {"DT_VALRNGLO", Const, 16, ""}, + {"DT_VERDEF", Const, 16, ""}, + {"DT_VERDEFNUM", Const, 16, ""}, + {"DT_VERNEED", Const, 0, ""}, + {"DT_VERNEEDNUM", Const, 0, ""}, + {"DT_VERSYM", Const, 0, ""}, + {"Data", Type, 0, ""}, + {"Dyn32", Type, 0, ""}, + {"Dyn32.Tag", Field, 0, ""}, + {"Dyn32.Val", Field, 0, ""}, + {"Dyn64", Type, 0, ""}, + {"Dyn64.Tag", Field, 0, ""}, + {"Dyn64.Val", Field, 0, ""}, + {"DynFlag", Type, 0, ""}, + {"DynFlag1", Type, 21, ""}, + {"DynTag", Type, 0, ""}, + {"DynamicVersion", Type, 24, ""}, + {"DynamicVersion.Deps", Field, 24, ""}, + {"DynamicVersion.Flags", Field, 24, ""}, + {"DynamicVersion.Index", Field, 24, ""}, + {"DynamicVersion.Name", Field, 24, ""}, + {"DynamicVersionDep", Type, 24, ""}, + {"DynamicVersionDep.Dep", Field, 24, ""}, + {"DynamicVersionDep.Flags", Field, 24, ""}, + {"DynamicVersionDep.Index", Field, 24, ""}, + {"DynamicVersionFlag", Type, 24, ""}, + {"DynamicVersionNeed", Type, 24, ""}, + {"DynamicVersionNeed.Name", Field, 24, ""}, + {"DynamicVersionNeed.Needs", Field, 24, ""}, + {"EI_ABIVERSION", Const, 0, ""}, + {"EI_CLASS", Const, 0, ""}, + {"EI_DATA", Const, 0, ""}, + {"EI_NIDENT", Const, 0, ""}, + {"EI_OSABI", Const, 0, ""}, + {"EI_PAD", Const, 0, ""}, + {"EI_VERSION", Const, 0, ""}, + {"ELFCLASS32", Const, 0, ""}, + {"ELFCLASS64", Const, 0, ""}, + {"ELFCLASSNONE", Const, 0, ""}, + {"ELFDATA2LSB", Const, 0, ""}, + {"ELFDATA2MSB", Const, 0, ""}, + {"ELFDATANONE", Const, 0, ""}, + {"ELFMAG", Const, 0, ""}, + {"ELFOSABI_86OPEN", Const, 0, ""}, + {"ELFOSABI_AIX", Const, 0, ""}, + {"ELFOSABI_ARM", Const, 0, ""}, + {"ELFOSABI_AROS", Const, 11, ""}, + {"ELFOSABI_CLOUDABI", Const, 11, ""}, + {"ELFOSABI_FENIXOS", Const, 11, ""}, + {"ELFOSABI_FREEBSD", Const, 0, ""}, + {"ELFOSABI_HPUX", Const, 0, ""}, + {"ELFOSABI_HURD", Const, 0, ""}, + {"ELFOSABI_IRIX", Const, 0, ""}, + {"ELFOSABI_LINUX", Const, 0, ""}, + {"ELFOSABI_MODESTO", Const, 0, ""}, + {"ELFOSABI_NETBSD", Const, 0, ""}, + {"ELFOSABI_NONE", Const, 0, ""}, + {"ELFOSABI_NSK", Const, 0, ""}, + {"ELFOSABI_OPENBSD", Const, 0, ""}, + {"ELFOSABI_OPENVMS", Const, 0, ""}, + {"ELFOSABI_SOLARIS", Const, 0, ""}, + {"ELFOSABI_STANDALONE", Const, 0, ""}, + {"ELFOSABI_TRU64", Const, 0, ""}, + {"EM_386", Const, 0, ""}, + {"EM_486", Const, 0, ""}, + {"EM_56800EX", Const, 11, ""}, + {"EM_68HC05", Const, 11, ""}, + {"EM_68HC08", Const, 11, ""}, + {"EM_68HC11", Const, 11, ""}, + {"EM_68HC12", Const, 0, ""}, + {"EM_68HC16", Const, 11, ""}, + {"EM_68K", Const, 0, ""}, + {"EM_78KOR", Const, 11, ""}, + {"EM_8051", Const, 11, ""}, + {"EM_860", Const, 0, ""}, + {"EM_88K", Const, 0, ""}, + {"EM_960", Const, 0, ""}, + {"EM_AARCH64", Const, 4, ""}, + {"EM_ALPHA", Const, 0, ""}, + {"EM_ALPHA_STD", Const, 0, ""}, + {"EM_ALTERA_NIOS2", Const, 11, ""}, + {"EM_AMDGPU", Const, 11, ""}, + {"EM_ARC", Const, 0, ""}, + {"EM_ARCA", Const, 11, ""}, + {"EM_ARC_COMPACT", Const, 11, ""}, + {"EM_ARC_COMPACT2", Const, 11, ""}, + {"EM_ARM", Const, 0, ""}, + {"EM_AVR", Const, 11, ""}, + {"EM_AVR32", Const, 11, ""}, + {"EM_BA1", Const, 11, ""}, + {"EM_BA2", Const, 11, ""}, + {"EM_BLACKFIN", Const, 11, ""}, + {"EM_BPF", Const, 11, ""}, + {"EM_C166", Const, 11, ""}, + {"EM_CDP", Const, 11, ""}, + {"EM_CE", Const, 11, ""}, + {"EM_CLOUDSHIELD", Const, 11, ""}, + {"EM_COGE", Const, 11, ""}, + {"EM_COLDFIRE", Const, 0, ""}, + {"EM_COOL", Const, 11, ""}, + {"EM_COREA_1ST", Const, 11, ""}, + {"EM_COREA_2ND", Const, 11, ""}, + {"EM_CR", Const, 11, ""}, + {"EM_CR16", Const, 11, ""}, + {"EM_CRAYNV2", Const, 11, ""}, + {"EM_CRIS", Const, 11, ""}, + {"EM_CRX", Const, 11, ""}, + {"EM_CSR_KALIMBA", Const, 11, ""}, + {"EM_CUDA", Const, 11, ""}, + {"EM_CYPRESS_M8C", Const, 11, ""}, + {"EM_D10V", Const, 11, ""}, + {"EM_D30V", Const, 11, ""}, + {"EM_DSP24", Const, 11, ""}, + {"EM_DSPIC30F", Const, 11, ""}, + {"EM_DXP", Const, 11, ""}, + {"EM_ECOG1", Const, 11, ""}, + {"EM_ECOG16", Const, 11, ""}, + {"EM_ECOG1X", Const, 11, ""}, + {"EM_ECOG2", Const, 11, ""}, + {"EM_ETPU", Const, 11, ""}, + {"EM_EXCESS", Const, 11, ""}, + {"EM_F2MC16", Const, 11, ""}, + {"EM_FIREPATH", Const, 11, ""}, + {"EM_FR20", Const, 0, ""}, + {"EM_FR30", Const, 11, ""}, + {"EM_FT32", Const, 11, ""}, + {"EM_FX66", Const, 11, ""}, + {"EM_H8S", Const, 0, ""}, + {"EM_H8_300", Const, 0, ""}, + {"EM_H8_300H", Const, 0, ""}, + {"EM_H8_500", Const, 0, ""}, + {"EM_HUANY", Const, 11, ""}, + {"EM_IA_64", Const, 0, ""}, + {"EM_INTEL205", Const, 11, ""}, + {"EM_INTEL206", Const, 11, ""}, + {"EM_INTEL207", Const, 11, ""}, + {"EM_INTEL208", Const, 11, ""}, + {"EM_INTEL209", Const, 11, ""}, + {"EM_IP2K", Const, 11, ""}, + {"EM_JAVELIN", Const, 11, ""}, + {"EM_K10M", Const, 11, ""}, + {"EM_KM32", Const, 11, ""}, + {"EM_KMX16", Const, 11, ""}, + {"EM_KMX32", Const, 11, ""}, + {"EM_KMX8", Const, 11, ""}, + {"EM_KVARC", Const, 11, ""}, + {"EM_L10M", Const, 11, ""}, + {"EM_LANAI", Const, 11, ""}, + {"EM_LATTICEMICO32", Const, 11, ""}, + {"EM_LOONGARCH", Const, 19, ""}, + {"EM_M16C", Const, 11, ""}, + {"EM_M32", Const, 0, ""}, + {"EM_M32C", Const, 11, ""}, + {"EM_M32R", Const, 11, ""}, + {"EM_MANIK", Const, 11, ""}, + {"EM_MAX", Const, 11, ""}, + {"EM_MAXQ30", Const, 11, ""}, + {"EM_MCHP_PIC", Const, 11, ""}, + {"EM_MCST_ELBRUS", Const, 11, ""}, + {"EM_ME16", Const, 0, ""}, + {"EM_METAG", Const, 11, ""}, + {"EM_MICROBLAZE", Const, 11, ""}, + {"EM_MIPS", Const, 0, ""}, + {"EM_MIPS_RS3_LE", Const, 0, ""}, + {"EM_MIPS_RS4_BE", Const, 0, ""}, + {"EM_MIPS_X", Const, 0, ""}, + {"EM_MMA", Const, 0, ""}, + {"EM_MMDSP_PLUS", Const, 11, ""}, + {"EM_MMIX", Const, 11, ""}, + {"EM_MN10200", Const, 11, ""}, + {"EM_MN10300", Const, 11, ""}, + {"EM_MOXIE", Const, 11, ""}, + {"EM_MSP430", Const, 11, ""}, + {"EM_NCPU", Const, 0, ""}, + {"EM_NDR1", Const, 0, ""}, + {"EM_NDS32", Const, 11, ""}, + {"EM_NONE", Const, 0, ""}, + {"EM_NORC", Const, 11, ""}, + {"EM_NS32K", Const, 11, ""}, + {"EM_OPEN8", Const, 11, ""}, + {"EM_OPENRISC", Const, 11, ""}, + {"EM_PARISC", Const, 0, ""}, + {"EM_PCP", Const, 0, ""}, + {"EM_PDP10", Const, 11, ""}, + {"EM_PDP11", Const, 11, ""}, + {"EM_PDSP", Const, 11, ""}, + {"EM_PJ", Const, 11, ""}, + {"EM_PPC", Const, 0, ""}, + {"EM_PPC64", Const, 0, ""}, + {"EM_PRISM", Const, 11, ""}, + {"EM_QDSP6", Const, 11, ""}, + {"EM_R32C", Const, 11, ""}, + {"EM_RCE", Const, 0, ""}, + {"EM_RH32", Const, 0, ""}, + {"EM_RISCV", Const, 11, ""}, + {"EM_RL78", Const, 11, ""}, + {"EM_RS08", Const, 11, ""}, + {"EM_RX", Const, 11, ""}, + {"EM_S370", Const, 0, ""}, + {"EM_S390", Const, 0, ""}, + {"EM_SCORE7", Const, 11, ""}, + {"EM_SEP", Const, 11, ""}, + {"EM_SE_C17", Const, 11, ""}, + {"EM_SE_C33", Const, 11, ""}, + {"EM_SH", Const, 0, ""}, + {"EM_SHARC", Const, 11, ""}, + {"EM_SLE9X", Const, 11, ""}, + {"EM_SNP1K", Const, 11, ""}, + {"EM_SPARC", Const, 0, ""}, + {"EM_SPARC32PLUS", Const, 0, ""}, + {"EM_SPARCV9", Const, 0, ""}, + {"EM_ST100", Const, 0, ""}, + {"EM_ST19", Const, 11, ""}, + {"EM_ST200", Const, 11, ""}, + {"EM_ST7", Const, 11, ""}, + {"EM_ST9PLUS", Const, 11, ""}, + {"EM_STARCORE", Const, 0, ""}, + {"EM_STM8", Const, 11, ""}, + {"EM_STXP7X", Const, 11, ""}, + {"EM_SVX", Const, 11, ""}, + {"EM_TILE64", Const, 11, ""}, + {"EM_TILEGX", Const, 11, ""}, + {"EM_TILEPRO", Const, 11, ""}, + {"EM_TINYJ", Const, 0, ""}, + {"EM_TI_ARP32", Const, 11, ""}, + {"EM_TI_C2000", Const, 11, ""}, + {"EM_TI_C5500", Const, 11, ""}, + {"EM_TI_C6000", Const, 11, ""}, + {"EM_TI_PRU", Const, 11, ""}, + {"EM_TMM_GPP", Const, 11, ""}, + {"EM_TPC", Const, 11, ""}, + {"EM_TRICORE", Const, 0, ""}, + {"EM_TRIMEDIA", Const, 11, ""}, + {"EM_TSK3000", Const, 11, ""}, + {"EM_UNICORE", Const, 11, ""}, + {"EM_V800", Const, 0, ""}, + {"EM_V850", Const, 11, ""}, + {"EM_VAX", Const, 11, ""}, + {"EM_VIDEOCORE", Const, 11, ""}, + {"EM_VIDEOCORE3", Const, 11, ""}, + {"EM_VIDEOCORE5", Const, 11, ""}, + {"EM_VISIUM", Const, 11, ""}, + {"EM_VPP500", Const, 0, ""}, + {"EM_X86_64", Const, 0, ""}, + {"EM_XCORE", Const, 11, ""}, + {"EM_XGATE", Const, 11, ""}, + {"EM_XIMO16", Const, 11, ""}, + {"EM_XTENSA", Const, 11, ""}, + {"EM_Z80", Const, 11, ""}, + {"EM_ZSP", Const, 11, ""}, + {"ET_CORE", Const, 0, ""}, + {"ET_DYN", Const, 0, ""}, + {"ET_EXEC", Const, 0, ""}, + {"ET_HIOS", Const, 0, ""}, + {"ET_HIPROC", Const, 0, ""}, + {"ET_LOOS", Const, 0, ""}, + {"ET_LOPROC", Const, 0, ""}, + {"ET_NONE", Const, 0, ""}, + {"ET_REL", Const, 0, ""}, + {"EV_CURRENT", Const, 0, ""}, + {"EV_NONE", Const, 0, ""}, + {"ErrNoSymbols", Var, 4, ""}, + {"File", Type, 0, ""}, + {"File.FileHeader", Field, 0, ""}, + {"File.Progs", Field, 0, ""}, + {"File.Sections", Field, 0, ""}, + {"FileHeader", Type, 0, ""}, + {"FileHeader.ABIVersion", Field, 0, ""}, + {"FileHeader.ByteOrder", Field, 0, ""}, + {"FileHeader.Class", Field, 0, ""}, + {"FileHeader.Data", Field, 0, ""}, + {"FileHeader.Entry", Field, 1, ""}, + {"FileHeader.Machine", Field, 0, ""}, + {"FileHeader.OSABI", Field, 0, ""}, + {"FileHeader.Type", Field, 0, ""}, + {"FileHeader.Version", Field, 0, ""}, + {"FormatError", Type, 0, ""}, + {"Header32", Type, 0, ""}, + {"Header32.Ehsize", Field, 0, ""}, + {"Header32.Entry", Field, 0, ""}, + {"Header32.Flags", Field, 0, ""}, + {"Header32.Ident", Field, 0, ""}, + {"Header32.Machine", Field, 0, ""}, + {"Header32.Phentsize", Field, 0, ""}, + {"Header32.Phnum", Field, 0, ""}, + {"Header32.Phoff", Field, 0, ""}, + {"Header32.Shentsize", Field, 0, ""}, + {"Header32.Shnum", Field, 0, ""}, + {"Header32.Shoff", Field, 0, ""}, + {"Header32.Shstrndx", Field, 0, ""}, + {"Header32.Type", Field, 0, ""}, + {"Header32.Version", Field, 0, ""}, + {"Header64", Type, 0, ""}, + {"Header64.Ehsize", Field, 0, ""}, + {"Header64.Entry", Field, 0, ""}, + {"Header64.Flags", Field, 0, ""}, + {"Header64.Ident", Field, 0, ""}, + {"Header64.Machine", Field, 0, ""}, + {"Header64.Phentsize", Field, 0, ""}, + {"Header64.Phnum", Field, 0, ""}, + {"Header64.Phoff", Field, 0, ""}, + {"Header64.Shentsize", Field, 0, ""}, + {"Header64.Shnum", Field, 0, ""}, + {"Header64.Shoff", Field, 0, ""}, + {"Header64.Shstrndx", Field, 0, ""}, + {"Header64.Type", Field, 0, ""}, + {"Header64.Version", Field, 0, ""}, + {"ImportedSymbol", Type, 0, ""}, + {"ImportedSymbol.Library", Field, 0, ""}, + {"ImportedSymbol.Name", Field, 0, ""}, + {"ImportedSymbol.Version", Field, 0, ""}, + {"Machine", Type, 0, ""}, + {"NT_FPREGSET", Const, 0, ""}, + {"NT_PRPSINFO", Const, 0, ""}, + {"NT_PRSTATUS", Const, 0, ""}, + {"NType", Type, 0, ""}, + {"NewFile", Func, 0, "func(r io.ReaderAt) (*File, error)"}, + {"OSABI", Type, 0, ""}, + {"Open", Func, 0, "func(name string) (*File, error)"}, + {"PF_MASKOS", Const, 0, ""}, + {"PF_MASKPROC", Const, 0, ""}, + {"PF_R", Const, 0, ""}, + {"PF_W", Const, 0, ""}, + {"PF_X", Const, 0, ""}, + {"PT_AARCH64_ARCHEXT", Const, 16, ""}, + {"PT_AARCH64_UNWIND", Const, 16, ""}, + {"PT_ARM_ARCHEXT", Const, 16, ""}, + {"PT_ARM_EXIDX", Const, 16, ""}, + {"PT_DYNAMIC", Const, 0, ""}, + {"PT_GNU_EH_FRAME", Const, 16, ""}, + {"PT_GNU_MBIND_HI", Const, 16, ""}, + {"PT_GNU_MBIND_LO", Const, 16, ""}, + {"PT_GNU_PROPERTY", Const, 16, ""}, + {"PT_GNU_RELRO", Const, 16, ""}, + {"PT_GNU_STACK", Const, 16, ""}, + {"PT_HIOS", Const, 0, ""}, + {"PT_HIPROC", Const, 0, ""}, + {"PT_INTERP", Const, 0, ""}, + {"PT_LOAD", Const, 0, ""}, + {"PT_LOOS", Const, 0, ""}, + {"PT_LOPROC", Const, 0, ""}, + {"PT_MIPS_ABIFLAGS", Const, 16, ""}, + {"PT_MIPS_OPTIONS", Const, 16, ""}, + {"PT_MIPS_REGINFO", Const, 16, ""}, + {"PT_MIPS_RTPROC", Const, 16, ""}, + {"PT_NOTE", Const, 0, ""}, + {"PT_NULL", Const, 0, ""}, + {"PT_OPENBSD_BOOTDATA", Const, 16, ""}, + {"PT_OPENBSD_NOBTCFI", Const, 23, ""}, + {"PT_OPENBSD_RANDOMIZE", Const, 16, ""}, + {"PT_OPENBSD_WXNEEDED", Const, 16, ""}, + {"PT_PAX_FLAGS", Const, 16, ""}, + {"PT_PHDR", Const, 0, ""}, + {"PT_RISCV_ATTRIBUTES", Const, 25, ""}, + {"PT_S390_PGSTE", Const, 16, ""}, + {"PT_SHLIB", Const, 0, ""}, + {"PT_SUNWSTACK", Const, 16, ""}, + {"PT_SUNW_EH_FRAME", Const, 16, ""}, + {"PT_TLS", Const, 0, ""}, + {"Prog", Type, 0, ""}, + {"Prog.ProgHeader", Field, 0, ""}, + {"Prog.ReaderAt", Field, 0, ""}, + {"Prog32", Type, 0, ""}, + {"Prog32.Align", Field, 0, ""}, + {"Prog32.Filesz", Field, 0, ""}, + {"Prog32.Flags", Field, 0, ""}, + {"Prog32.Memsz", Field, 0, ""}, + {"Prog32.Off", Field, 0, ""}, + {"Prog32.Paddr", Field, 0, ""}, + {"Prog32.Type", Field, 0, ""}, + {"Prog32.Vaddr", Field, 0, ""}, + {"Prog64", Type, 0, ""}, + {"Prog64.Align", Field, 0, ""}, + {"Prog64.Filesz", Field, 0, ""}, + {"Prog64.Flags", Field, 0, ""}, + {"Prog64.Memsz", Field, 0, ""}, + {"Prog64.Off", Field, 0, ""}, + {"Prog64.Paddr", Field, 0, ""}, + {"Prog64.Type", Field, 0, ""}, + {"Prog64.Vaddr", Field, 0, ""}, + {"ProgFlag", Type, 0, ""}, + {"ProgHeader", Type, 0, ""}, + {"ProgHeader.Align", Field, 0, ""}, + {"ProgHeader.Filesz", Field, 0, ""}, + {"ProgHeader.Flags", Field, 0, ""}, + {"ProgHeader.Memsz", Field, 0, ""}, + {"ProgHeader.Off", Field, 0, ""}, + {"ProgHeader.Paddr", Field, 0, ""}, + {"ProgHeader.Type", Field, 0, ""}, + {"ProgHeader.Vaddr", Field, 0, ""}, + {"ProgType", Type, 0, ""}, + {"R_386", Type, 0, ""}, + {"R_386_16", Const, 10, ""}, + {"R_386_32", Const, 0, ""}, + {"R_386_32PLT", Const, 10, ""}, + {"R_386_8", Const, 10, ""}, + {"R_386_COPY", Const, 0, ""}, + {"R_386_GLOB_DAT", Const, 0, ""}, + {"R_386_GOT32", Const, 0, ""}, + {"R_386_GOT32X", Const, 10, ""}, + {"R_386_GOTOFF", Const, 0, ""}, + {"R_386_GOTPC", Const, 0, ""}, + {"R_386_IRELATIVE", Const, 10, ""}, + {"R_386_JMP_SLOT", Const, 0, ""}, + {"R_386_NONE", Const, 0, ""}, + {"R_386_PC16", Const, 10, ""}, + {"R_386_PC32", Const, 0, ""}, + {"R_386_PC8", Const, 10, ""}, + {"R_386_PLT32", Const, 0, ""}, + {"R_386_RELATIVE", Const, 0, ""}, + {"R_386_SIZE32", Const, 10, ""}, + {"R_386_TLS_DESC", Const, 10, ""}, + {"R_386_TLS_DESC_CALL", Const, 10, ""}, + {"R_386_TLS_DTPMOD32", Const, 0, ""}, + {"R_386_TLS_DTPOFF32", Const, 0, ""}, + {"R_386_TLS_GD", Const, 0, ""}, + {"R_386_TLS_GD_32", Const, 0, ""}, + {"R_386_TLS_GD_CALL", Const, 0, ""}, + {"R_386_TLS_GD_POP", Const, 0, ""}, + {"R_386_TLS_GD_PUSH", Const, 0, ""}, + {"R_386_TLS_GOTDESC", Const, 10, ""}, + {"R_386_TLS_GOTIE", Const, 0, ""}, + {"R_386_TLS_IE", Const, 0, ""}, + {"R_386_TLS_IE_32", Const, 0, ""}, + {"R_386_TLS_LDM", Const, 0, ""}, + {"R_386_TLS_LDM_32", Const, 0, ""}, + {"R_386_TLS_LDM_CALL", Const, 0, ""}, + {"R_386_TLS_LDM_POP", Const, 0, ""}, + {"R_386_TLS_LDM_PUSH", Const, 0, ""}, + {"R_386_TLS_LDO_32", Const, 0, ""}, + {"R_386_TLS_LE", Const, 0, ""}, + {"R_386_TLS_LE_32", Const, 0, ""}, + {"R_386_TLS_TPOFF", Const, 0, ""}, + {"R_386_TLS_TPOFF32", Const, 0, ""}, + {"R_390", Type, 7, ""}, + {"R_390_12", Const, 7, ""}, + {"R_390_16", Const, 7, ""}, + {"R_390_20", Const, 7, ""}, + {"R_390_32", Const, 7, ""}, + {"R_390_64", Const, 7, ""}, + {"R_390_8", Const, 7, ""}, + {"R_390_COPY", Const, 7, ""}, + {"R_390_GLOB_DAT", Const, 7, ""}, + {"R_390_GOT12", Const, 7, ""}, + {"R_390_GOT16", Const, 7, ""}, + {"R_390_GOT20", Const, 7, ""}, + {"R_390_GOT32", Const, 7, ""}, + {"R_390_GOT64", Const, 7, ""}, + {"R_390_GOTENT", Const, 7, ""}, + {"R_390_GOTOFF", Const, 7, ""}, + {"R_390_GOTOFF16", Const, 7, ""}, + {"R_390_GOTOFF64", Const, 7, ""}, + {"R_390_GOTPC", Const, 7, ""}, + {"R_390_GOTPCDBL", Const, 7, ""}, + {"R_390_GOTPLT12", Const, 7, ""}, + {"R_390_GOTPLT16", Const, 7, ""}, + {"R_390_GOTPLT20", Const, 7, ""}, + {"R_390_GOTPLT32", Const, 7, ""}, + {"R_390_GOTPLT64", Const, 7, ""}, + {"R_390_GOTPLTENT", Const, 7, ""}, + {"R_390_GOTPLTOFF16", Const, 7, ""}, + {"R_390_GOTPLTOFF32", Const, 7, ""}, + {"R_390_GOTPLTOFF64", Const, 7, ""}, + {"R_390_JMP_SLOT", Const, 7, ""}, + {"R_390_NONE", Const, 7, ""}, + {"R_390_PC16", Const, 7, ""}, + {"R_390_PC16DBL", Const, 7, ""}, + {"R_390_PC32", Const, 7, ""}, + {"R_390_PC32DBL", Const, 7, ""}, + {"R_390_PC64", Const, 7, ""}, + {"R_390_PLT16DBL", Const, 7, ""}, + {"R_390_PLT32", Const, 7, ""}, + {"R_390_PLT32DBL", Const, 7, ""}, + {"R_390_PLT64", Const, 7, ""}, + {"R_390_RELATIVE", Const, 7, ""}, + {"R_390_TLS_DTPMOD", Const, 7, ""}, + {"R_390_TLS_DTPOFF", Const, 7, ""}, + {"R_390_TLS_GD32", Const, 7, ""}, + {"R_390_TLS_GD64", Const, 7, ""}, + {"R_390_TLS_GDCALL", Const, 7, ""}, + {"R_390_TLS_GOTIE12", Const, 7, ""}, + {"R_390_TLS_GOTIE20", Const, 7, ""}, + {"R_390_TLS_GOTIE32", Const, 7, ""}, + {"R_390_TLS_GOTIE64", Const, 7, ""}, + {"R_390_TLS_IE32", Const, 7, ""}, + {"R_390_TLS_IE64", Const, 7, ""}, + {"R_390_TLS_IEENT", Const, 7, ""}, + {"R_390_TLS_LDCALL", Const, 7, ""}, + {"R_390_TLS_LDM32", Const, 7, ""}, + {"R_390_TLS_LDM64", Const, 7, ""}, + {"R_390_TLS_LDO32", Const, 7, ""}, + {"R_390_TLS_LDO64", Const, 7, ""}, + {"R_390_TLS_LE32", Const, 7, ""}, + {"R_390_TLS_LE64", Const, 7, ""}, + {"R_390_TLS_LOAD", Const, 7, ""}, + {"R_390_TLS_TPOFF", Const, 7, ""}, + {"R_AARCH64", Type, 4, ""}, + {"R_AARCH64_ABS16", Const, 4, ""}, + {"R_AARCH64_ABS32", Const, 4, ""}, + {"R_AARCH64_ABS64", Const, 4, ""}, + {"R_AARCH64_ADD_ABS_LO12_NC", Const, 4, ""}, + {"R_AARCH64_ADR_GOT_PAGE", Const, 4, ""}, + {"R_AARCH64_ADR_PREL_LO21", Const, 4, ""}, + {"R_AARCH64_ADR_PREL_PG_HI21", Const, 4, ""}, + {"R_AARCH64_ADR_PREL_PG_HI21_NC", Const, 4, ""}, + {"R_AARCH64_CALL26", Const, 4, ""}, + {"R_AARCH64_CONDBR19", Const, 4, ""}, + {"R_AARCH64_COPY", Const, 4, ""}, + {"R_AARCH64_GLOB_DAT", Const, 4, ""}, + {"R_AARCH64_GOT_LD_PREL19", Const, 4, ""}, + {"R_AARCH64_IRELATIVE", Const, 4, ""}, + {"R_AARCH64_JUMP26", Const, 4, ""}, + {"R_AARCH64_JUMP_SLOT", Const, 4, ""}, + {"R_AARCH64_LD64_GOTOFF_LO15", Const, 10, ""}, + {"R_AARCH64_LD64_GOTPAGE_LO15", Const, 10, ""}, + {"R_AARCH64_LD64_GOT_LO12_NC", Const, 4, ""}, + {"R_AARCH64_LDST128_ABS_LO12_NC", Const, 4, ""}, + {"R_AARCH64_LDST16_ABS_LO12_NC", Const, 4, ""}, + {"R_AARCH64_LDST32_ABS_LO12_NC", Const, 4, ""}, + {"R_AARCH64_LDST64_ABS_LO12_NC", Const, 4, ""}, + {"R_AARCH64_LDST8_ABS_LO12_NC", Const, 4, ""}, + {"R_AARCH64_LD_PREL_LO19", Const, 4, ""}, + {"R_AARCH64_MOVW_SABS_G0", Const, 4, ""}, + {"R_AARCH64_MOVW_SABS_G1", Const, 4, ""}, + {"R_AARCH64_MOVW_SABS_G2", Const, 4, ""}, + {"R_AARCH64_MOVW_UABS_G0", Const, 4, ""}, + {"R_AARCH64_MOVW_UABS_G0_NC", Const, 4, ""}, + {"R_AARCH64_MOVW_UABS_G1", Const, 4, ""}, + {"R_AARCH64_MOVW_UABS_G1_NC", Const, 4, ""}, + {"R_AARCH64_MOVW_UABS_G2", Const, 4, ""}, + {"R_AARCH64_MOVW_UABS_G2_NC", Const, 4, ""}, + {"R_AARCH64_MOVW_UABS_G3", Const, 4, ""}, + {"R_AARCH64_NONE", Const, 4, ""}, + {"R_AARCH64_NULL", Const, 4, ""}, + {"R_AARCH64_P32_ABS16", Const, 4, ""}, + {"R_AARCH64_P32_ABS32", Const, 4, ""}, + {"R_AARCH64_P32_ADD_ABS_LO12_NC", Const, 4, ""}, + {"R_AARCH64_P32_ADR_GOT_PAGE", Const, 4, ""}, + {"R_AARCH64_P32_ADR_PREL_LO21", Const, 4, ""}, + {"R_AARCH64_P32_ADR_PREL_PG_HI21", Const, 4, ""}, + {"R_AARCH64_P32_CALL26", Const, 4, ""}, + {"R_AARCH64_P32_CONDBR19", Const, 4, ""}, + {"R_AARCH64_P32_COPY", Const, 4, ""}, + {"R_AARCH64_P32_GLOB_DAT", Const, 4, ""}, + {"R_AARCH64_P32_GOT_LD_PREL19", Const, 4, ""}, + {"R_AARCH64_P32_IRELATIVE", Const, 4, ""}, + {"R_AARCH64_P32_JUMP26", Const, 4, ""}, + {"R_AARCH64_P32_JUMP_SLOT", Const, 4, ""}, + {"R_AARCH64_P32_LD32_GOT_LO12_NC", Const, 4, ""}, + {"R_AARCH64_P32_LDST128_ABS_LO12_NC", Const, 4, ""}, + {"R_AARCH64_P32_LDST16_ABS_LO12_NC", Const, 4, ""}, + {"R_AARCH64_P32_LDST32_ABS_LO12_NC", Const, 4, ""}, + {"R_AARCH64_P32_LDST64_ABS_LO12_NC", Const, 4, ""}, + {"R_AARCH64_P32_LDST8_ABS_LO12_NC", Const, 4, ""}, + {"R_AARCH64_P32_LD_PREL_LO19", Const, 4, ""}, + {"R_AARCH64_P32_MOVW_SABS_G0", Const, 4, ""}, + {"R_AARCH64_P32_MOVW_UABS_G0", Const, 4, ""}, + {"R_AARCH64_P32_MOVW_UABS_G0_NC", Const, 4, ""}, + {"R_AARCH64_P32_MOVW_UABS_G1", Const, 4, ""}, + {"R_AARCH64_P32_PREL16", Const, 4, ""}, + {"R_AARCH64_P32_PREL32", Const, 4, ""}, + {"R_AARCH64_P32_RELATIVE", Const, 4, ""}, + {"R_AARCH64_P32_TLSDESC", Const, 4, ""}, + {"R_AARCH64_P32_TLSDESC_ADD_LO12_NC", Const, 4, ""}, + {"R_AARCH64_P32_TLSDESC_ADR_PAGE21", Const, 4, ""}, + {"R_AARCH64_P32_TLSDESC_ADR_PREL21", Const, 4, ""}, + {"R_AARCH64_P32_TLSDESC_CALL", Const, 4, ""}, + {"R_AARCH64_P32_TLSDESC_LD32_LO12_NC", Const, 4, ""}, + {"R_AARCH64_P32_TLSDESC_LD_PREL19", Const, 4, ""}, + {"R_AARCH64_P32_TLSGD_ADD_LO12_NC", Const, 4, ""}, + {"R_AARCH64_P32_TLSGD_ADR_PAGE21", Const, 4, ""}, + {"R_AARCH64_P32_TLSIE_ADR_GOTTPREL_PAGE21", Const, 4, ""}, + {"R_AARCH64_P32_TLSIE_LD32_GOTTPREL_LO12_NC", Const, 4, ""}, + {"R_AARCH64_P32_TLSIE_LD_GOTTPREL_PREL19", Const, 4, ""}, + {"R_AARCH64_P32_TLSLE_ADD_TPREL_HI12", Const, 4, ""}, + {"R_AARCH64_P32_TLSLE_ADD_TPREL_LO12", Const, 4, ""}, + {"R_AARCH64_P32_TLSLE_ADD_TPREL_LO12_NC", Const, 4, ""}, + {"R_AARCH64_P32_TLSLE_MOVW_TPREL_G0", Const, 4, ""}, + {"R_AARCH64_P32_TLSLE_MOVW_TPREL_G0_NC", Const, 4, ""}, + {"R_AARCH64_P32_TLSLE_MOVW_TPREL_G1", Const, 4, ""}, + {"R_AARCH64_P32_TLS_DTPMOD", Const, 4, ""}, + {"R_AARCH64_P32_TLS_DTPREL", Const, 4, ""}, + {"R_AARCH64_P32_TLS_TPREL", Const, 4, ""}, + {"R_AARCH64_P32_TSTBR14", Const, 4, ""}, + {"R_AARCH64_PREL16", Const, 4, ""}, + {"R_AARCH64_PREL32", Const, 4, ""}, + {"R_AARCH64_PREL64", Const, 4, ""}, + {"R_AARCH64_RELATIVE", Const, 4, ""}, + {"R_AARCH64_TLSDESC", Const, 4, ""}, + {"R_AARCH64_TLSDESC_ADD", Const, 4, ""}, + {"R_AARCH64_TLSDESC_ADD_LO12_NC", Const, 4, ""}, + {"R_AARCH64_TLSDESC_ADR_PAGE21", Const, 4, ""}, + {"R_AARCH64_TLSDESC_ADR_PREL21", Const, 4, ""}, + {"R_AARCH64_TLSDESC_CALL", Const, 4, ""}, + {"R_AARCH64_TLSDESC_LD64_LO12_NC", Const, 4, ""}, + {"R_AARCH64_TLSDESC_LDR", Const, 4, ""}, + {"R_AARCH64_TLSDESC_LD_PREL19", Const, 4, ""}, + {"R_AARCH64_TLSDESC_OFF_G0_NC", Const, 4, ""}, + {"R_AARCH64_TLSDESC_OFF_G1", Const, 4, ""}, + {"R_AARCH64_TLSGD_ADD_LO12_NC", Const, 4, ""}, + {"R_AARCH64_TLSGD_ADR_PAGE21", Const, 4, ""}, + {"R_AARCH64_TLSGD_ADR_PREL21", Const, 10, ""}, + {"R_AARCH64_TLSGD_MOVW_G0_NC", Const, 10, ""}, + {"R_AARCH64_TLSGD_MOVW_G1", Const, 10, ""}, + {"R_AARCH64_TLSIE_ADR_GOTTPREL_PAGE21", Const, 4, ""}, + {"R_AARCH64_TLSIE_LD64_GOTTPREL_LO12_NC", Const, 4, ""}, + {"R_AARCH64_TLSIE_LD_GOTTPREL_PREL19", Const, 4, ""}, + {"R_AARCH64_TLSIE_MOVW_GOTTPREL_G0_NC", Const, 4, ""}, + {"R_AARCH64_TLSIE_MOVW_GOTTPREL_G1", Const, 4, ""}, + {"R_AARCH64_TLSLD_ADR_PAGE21", Const, 10, ""}, + {"R_AARCH64_TLSLD_ADR_PREL21", Const, 10, ""}, + {"R_AARCH64_TLSLD_LDST128_DTPREL_LO12", Const, 10, ""}, + {"R_AARCH64_TLSLD_LDST128_DTPREL_LO12_NC", Const, 10, ""}, + {"R_AARCH64_TLSLE_ADD_TPREL_HI12", Const, 4, ""}, + {"R_AARCH64_TLSLE_ADD_TPREL_LO12", Const, 4, ""}, + {"R_AARCH64_TLSLE_ADD_TPREL_LO12_NC", Const, 4, ""}, + {"R_AARCH64_TLSLE_LDST128_TPREL_LO12", Const, 10, ""}, + {"R_AARCH64_TLSLE_LDST128_TPREL_LO12_NC", Const, 10, ""}, + {"R_AARCH64_TLSLE_MOVW_TPREL_G0", Const, 4, ""}, + {"R_AARCH64_TLSLE_MOVW_TPREL_G0_NC", Const, 4, ""}, + {"R_AARCH64_TLSLE_MOVW_TPREL_G1", Const, 4, ""}, + {"R_AARCH64_TLSLE_MOVW_TPREL_G1_NC", Const, 4, ""}, + {"R_AARCH64_TLSLE_MOVW_TPREL_G2", Const, 4, ""}, + {"R_AARCH64_TLS_DTPMOD64", Const, 4, ""}, + {"R_AARCH64_TLS_DTPREL64", Const, 4, ""}, + {"R_AARCH64_TLS_TPREL64", Const, 4, ""}, + {"R_AARCH64_TSTBR14", Const, 4, ""}, + {"R_ALPHA", Type, 0, ""}, + {"R_ALPHA_BRADDR", Const, 0, ""}, + {"R_ALPHA_COPY", Const, 0, ""}, + {"R_ALPHA_GLOB_DAT", Const, 0, ""}, + {"R_ALPHA_GPDISP", Const, 0, ""}, + {"R_ALPHA_GPREL32", Const, 0, ""}, + {"R_ALPHA_GPRELHIGH", Const, 0, ""}, + {"R_ALPHA_GPRELLOW", Const, 0, ""}, + {"R_ALPHA_GPVALUE", Const, 0, ""}, + {"R_ALPHA_HINT", Const, 0, ""}, + {"R_ALPHA_IMMED_BR_HI32", Const, 0, ""}, + {"R_ALPHA_IMMED_GP_16", Const, 0, ""}, + {"R_ALPHA_IMMED_GP_HI32", Const, 0, ""}, + {"R_ALPHA_IMMED_LO32", Const, 0, ""}, + {"R_ALPHA_IMMED_SCN_HI32", Const, 0, ""}, + {"R_ALPHA_JMP_SLOT", Const, 0, ""}, + {"R_ALPHA_LITERAL", Const, 0, ""}, + {"R_ALPHA_LITUSE", Const, 0, ""}, + {"R_ALPHA_NONE", Const, 0, ""}, + {"R_ALPHA_OP_PRSHIFT", Const, 0, ""}, + {"R_ALPHA_OP_PSUB", Const, 0, ""}, + {"R_ALPHA_OP_PUSH", Const, 0, ""}, + {"R_ALPHA_OP_STORE", Const, 0, ""}, + {"R_ALPHA_REFLONG", Const, 0, ""}, + {"R_ALPHA_REFQUAD", Const, 0, ""}, + {"R_ALPHA_RELATIVE", Const, 0, ""}, + {"R_ALPHA_SREL16", Const, 0, ""}, + {"R_ALPHA_SREL32", Const, 0, ""}, + {"R_ALPHA_SREL64", Const, 0, ""}, + {"R_ARM", Type, 0, ""}, + {"R_ARM_ABS12", Const, 0, ""}, + {"R_ARM_ABS16", Const, 0, ""}, + {"R_ARM_ABS32", Const, 0, ""}, + {"R_ARM_ABS32_NOI", Const, 10, ""}, + {"R_ARM_ABS8", Const, 0, ""}, + {"R_ARM_ALU_PCREL_15_8", Const, 10, ""}, + {"R_ARM_ALU_PCREL_23_15", Const, 10, ""}, + {"R_ARM_ALU_PCREL_7_0", Const, 10, ""}, + {"R_ARM_ALU_PC_G0", Const, 10, ""}, + {"R_ARM_ALU_PC_G0_NC", Const, 10, ""}, + {"R_ARM_ALU_PC_G1", Const, 10, ""}, + {"R_ARM_ALU_PC_G1_NC", Const, 10, ""}, + {"R_ARM_ALU_PC_G2", Const, 10, ""}, + {"R_ARM_ALU_SBREL_19_12_NC", Const, 10, ""}, + {"R_ARM_ALU_SBREL_27_20_CK", Const, 10, ""}, + {"R_ARM_ALU_SB_G0", Const, 10, ""}, + {"R_ARM_ALU_SB_G0_NC", Const, 10, ""}, + {"R_ARM_ALU_SB_G1", Const, 10, ""}, + {"R_ARM_ALU_SB_G1_NC", Const, 10, ""}, + {"R_ARM_ALU_SB_G2", Const, 10, ""}, + {"R_ARM_AMP_VCALL9", Const, 0, ""}, + {"R_ARM_BASE_ABS", Const, 10, ""}, + {"R_ARM_CALL", Const, 10, ""}, + {"R_ARM_COPY", Const, 0, ""}, + {"R_ARM_GLOB_DAT", Const, 0, ""}, + {"R_ARM_GNU_VTENTRY", Const, 0, ""}, + {"R_ARM_GNU_VTINHERIT", Const, 0, ""}, + {"R_ARM_GOT32", Const, 0, ""}, + {"R_ARM_GOTOFF", Const, 0, ""}, + {"R_ARM_GOTOFF12", Const, 10, ""}, + {"R_ARM_GOTPC", Const, 0, ""}, + {"R_ARM_GOTRELAX", Const, 10, ""}, + {"R_ARM_GOT_ABS", Const, 10, ""}, + {"R_ARM_GOT_BREL12", Const, 10, ""}, + {"R_ARM_GOT_PREL", Const, 10, ""}, + {"R_ARM_IRELATIVE", Const, 10, ""}, + {"R_ARM_JUMP24", Const, 10, ""}, + {"R_ARM_JUMP_SLOT", Const, 0, ""}, + {"R_ARM_LDC_PC_G0", Const, 10, ""}, + {"R_ARM_LDC_PC_G1", Const, 10, ""}, + {"R_ARM_LDC_PC_G2", Const, 10, ""}, + {"R_ARM_LDC_SB_G0", Const, 10, ""}, + {"R_ARM_LDC_SB_G1", Const, 10, ""}, + {"R_ARM_LDC_SB_G2", Const, 10, ""}, + {"R_ARM_LDRS_PC_G0", Const, 10, ""}, + {"R_ARM_LDRS_PC_G1", Const, 10, ""}, + {"R_ARM_LDRS_PC_G2", Const, 10, ""}, + {"R_ARM_LDRS_SB_G0", Const, 10, ""}, + {"R_ARM_LDRS_SB_G1", Const, 10, ""}, + {"R_ARM_LDRS_SB_G2", Const, 10, ""}, + {"R_ARM_LDR_PC_G1", Const, 10, ""}, + {"R_ARM_LDR_PC_G2", Const, 10, ""}, + {"R_ARM_LDR_SBREL_11_10_NC", Const, 10, ""}, + {"R_ARM_LDR_SB_G0", Const, 10, ""}, + {"R_ARM_LDR_SB_G1", Const, 10, ""}, + {"R_ARM_LDR_SB_G2", Const, 10, ""}, + {"R_ARM_ME_TOO", Const, 10, ""}, + {"R_ARM_MOVT_ABS", Const, 10, ""}, + {"R_ARM_MOVT_BREL", Const, 10, ""}, + {"R_ARM_MOVT_PREL", Const, 10, ""}, + {"R_ARM_MOVW_ABS_NC", Const, 10, ""}, + {"R_ARM_MOVW_BREL", Const, 10, ""}, + {"R_ARM_MOVW_BREL_NC", Const, 10, ""}, + {"R_ARM_MOVW_PREL_NC", Const, 10, ""}, + {"R_ARM_NONE", Const, 0, ""}, + {"R_ARM_PC13", Const, 0, ""}, + {"R_ARM_PC24", Const, 0, ""}, + {"R_ARM_PLT32", Const, 0, ""}, + {"R_ARM_PLT32_ABS", Const, 10, ""}, + {"R_ARM_PREL31", Const, 10, ""}, + {"R_ARM_PRIVATE_0", Const, 10, ""}, + {"R_ARM_PRIVATE_1", Const, 10, ""}, + {"R_ARM_PRIVATE_10", Const, 10, ""}, + {"R_ARM_PRIVATE_11", Const, 10, ""}, + {"R_ARM_PRIVATE_12", Const, 10, ""}, + {"R_ARM_PRIVATE_13", Const, 10, ""}, + {"R_ARM_PRIVATE_14", Const, 10, ""}, + {"R_ARM_PRIVATE_15", Const, 10, ""}, + {"R_ARM_PRIVATE_2", Const, 10, ""}, + {"R_ARM_PRIVATE_3", Const, 10, ""}, + {"R_ARM_PRIVATE_4", Const, 10, ""}, + {"R_ARM_PRIVATE_5", Const, 10, ""}, + {"R_ARM_PRIVATE_6", Const, 10, ""}, + {"R_ARM_PRIVATE_7", Const, 10, ""}, + {"R_ARM_PRIVATE_8", Const, 10, ""}, + {"R_ARM_PRIVATE_9", Const, 10, ""}, + {"R_ARM_RABS32", Const, 0, ""}, + {"R_ARM_RBASE", Const, 0, ""}, + {"R_ARM_REL32", Const, 0, ""}, + {"R_ARM_REL32_NOI", Const, 10, ""}, + {"R_ARM_RELATIVE", Const, 0, ""}, + {"R_ARM_RPC24", Const, 0, ""}, + {"R_ARM_RREL32", Const, 0, ""}, + {"R_ARM_RSBREL32", Const, 0, ""}, + {"R_ARM_RXPC25", Const, 10, ""}, + {"R_ARM_SBREL31", Const, 10, ""}, + {"R_ARM_SBREL32", Const, 0, ""}, + {"R_ARM_SWI24", Const, 0, ""}, + {"R_ARM_TARGET1", Const, 10, ""}, + {"R_ARM_TARGET2", Const, 10, ""}, + {"R_ARM_THM_ABS5", Const, 0, ""}, + {"R_ARM_THM_ALU_ABS_G0_NC", Const, 10, ""}, + {"R_ARM_THM_ALU_ABS_G1_NC", Const, 10, ""}, + {"R_ARM_THM_ALU_ABS_G2_NC", Const, 10, ""}, + {"R_ARM_THM_ALU_ABS_G3", Const, 10, ""}, + {"R_ARM_THM_ALU_PREL_11_0", Const, 10, ""}, + {"R_ARM_THM_GOT_BREL12", Const, 10, ""}, + {"R_ARM_THM_JUMP11", Const, 10, ""}, + {"R_ARM_THM_JUMP19", Const, 10, ""}, + {"R_ARM_THM_JUMP24", Const, 10, ""}, + {"R_ARM_THM_JUMP6", Const, 10, ""}, + {"R_ARM_THM_JUMP8", Const, 10, ""}, + {"R_ARM_THM_MOVT_ABS", Const, 10, ""}, + {"R_ARM_THM_MOVT_BREL", Const, 10, ""}, + {"R_ARM_THM_MOVT_PREL", Const, 10, ""}, + {"R_ARM_THM_MOVW_ABS_NC", Const, 10, ""}, + {"R_ARM_THM_MOVW_BREL", Const, 10, ""}, + {"R_ARM_THM_MOVW_BREL_NC", Const, 10, ""}, + {"R_ARM_THM_MOVW_PREL_NC", Const, 10, ""}, + {"R_ARM_THM_PC12", Const, 10, ""}, + {"R_ARM_THM_PC22", Const, 0, ""}, + {"R_ARM_THM_PC8", Const, 0, ""}, + {"R_ARM_THM_RPC22", Const, 0, ""}, + {"R_ARM_THM_SWI8", Const, 0, ""}, + {"R_ARM_THM_TLS_CALL", Const, 10, ""}, + {"R_ARM_THM_TLS_DESCSEQ16", Const, 10, ""}, + {"R_ARM_THM_TLS_DESCSEQ32", Const, 10, ""}, + {"R_ARM_THM_XPC22", Const, 0, ""}, + {"R_ARM_TLS_CALL", Const, 10, ""}, + {"R_ARM_TLS_DESCSEQ", Const, 10, ""}, + {"R_ARM_TLS_DTPMOD32", Const, 10, ""}, + {"R_ARM_TLS_DTPOFF32", Const, 10, ""}, + {"R_ARM_TLS_GD32", Const, 10, ""}, + {"R_ARM_TLS_GOTDESC", Const, 10, ""}, + {"R_ARM_TLS_IE12GP", Const, 10, ""}, + {"R_ARM_TLS_IE32", Const, 10, ""}, + {"R_ARM_TLS_LDM32", Const, 10, ""}, + {"R_ARM_TLS_LDO12", Const, 10, ""}, + {"R_ARM_TLS_LDO32", Const, 10, ""}, + {"R_ARM_TLS_LE12", Const, 10, ""}, + {"R_ARM_TLS_LE32", Const, 10, ""}, + {"R_ARM_TLS_TPOFF32", Const, 10, ""}, + {"R_ARM_V4BX", Const, 10, ""}, + {"R_ARM_XPC25", Const, 0, ""}, + {"R_INFO", Func, 0, "func(sym uint32, typ uint32) uint64"}, + {"R_INFO32", Func, 0, "func(sym uint32, typ uint32) uint32"}, + {"R_LARCH", Type, 19, ""}, + {"R_LARCH_32", Const, 19, ""}, + {"R_LARCH_32_PCREL", Const, 20, ""}, + {"R_LARCH_64", Const, 19, ""}, + {"R_LARCH_64_PCREL", Const, 22, ""}, + {"R_LARCH_ABS64_HI12", Const, 20, ""}, + {"R_LARCH_ABS64_LO20", Const, 20, ""}, + {"R_LARCH_ABS_HI20", Const, 20, ""}, + {"R_LARCH_ABS_LO12", Const, 20, ""}, + {"R_LARCH_ADD16", Const, 19, ""}, + {"R_LARCH_ADD24", Const, 19, ""}, + {"R_LARCH_ADD32", Const, 19, ""}, + {"R_LARCH_ADD6", Const, 22, ""}, + {"R_LARCH_ADD64", Const, 19, ""}, + {"R_LARCH_ADD8", Const, 19, ""}, + {"R_LARCH_ADD_ULEB128", Const, 22, ""}, + {"R_LARCH_ALIGN", Const, 22, ""}, + {"R_LARCH_B16", Const, 20, ""}, + {"R_LARCH_B21", Const, 20, ""}, + {"R_LARCH_B26", Const, 20, ""}, + {"R_LARCH_CFA", Const, 22, ""}, + {"R_LARCH_COPY", Const, 19, ""}, + {"R_LARCH_DELETE", Const, 22, ""}, + {"R_LARCH_GNU_VTENTRY", Const, 20, ""}, + {"R_LARCH_GNU_VTINHERIT", Const, 20, ""}, + {"R_LARCH_GOT64_HI12", Const, 20, ""}, + {"R_LARCH_GOT64_LO20", Const, 20, ""}, + {"R_LARCH_GOT64_PC_HI12", Const, 20, ""}, + {"R_LARCH_GOT64_PC_LO20", Const, 20, ""}, + {"R_LARCH_GOT_HI20", Const, 20, ""}, + {"R_LARCH_GOT_LO12", Const, 20, ""}, + {"R_LARCH_GOT_PC_HI20", Const, 20, ""}, + {"R_LARCH_GOT_PC_LO12", Const, 20, ""}, + {"R_LARCH_IRELATIVE", Const, 19, ""}, + {"R_LARCH_JUMP_SLOT", Const, 19, ""}, + {"R_LARCH_MARK_LA", Const, 19, ""}, + {"R_LARCH_MARK_PCREL", Const, 19, ""}, + {"R_LARCH_NONE", Const, 19, ""}, + {"R_LARCH_PCALA64_HI12", Const, 20, ""}, + {"R_LARCH_PCALA64_LO20", Const, 20, ""}, + {"R_LARCH_PCALA_HI20", Const, 20, ""}, + {"R_LARCH_PCALA_LO12", Const, 20, ""}, + {"R_LARCH_PCREL20_S2", Const, 22, ""}, + {"R_LARCH_RELATIVE", Const, 19, ""}, + {"R_LARCH_RELAX", Const, 20, ""}, + {"R_LARCH_SOP_ADD", Const, 19, ""}, + {"R_LARCH_SOP_AND", Const, 19, ""}, + {"R_LARCH_SOP_ASSERT", Const, 19, ""}, + {"R_LARCH_SOP_IF_ELSE", Const, 19, ""}, + {"R_LARCH_SOP_NOT", Const, 19, ""}, + {"R_LARCH_SOP_POP_32_S_0_10_10_16_S2", Const, 19, ""}, + {"R_LARCH_SOP_POP_32_S_0_5_10_16_S2", Const, 19, ""}, + {"R_LARCH_SOP_POP_32_S_10_12", Const, 19, ""}, + {"R_LARCH_SOP_POP_32_S_10_16", Const, 19, ""}, + {"R_LARCH_SOP_POP_32_S_10_16_S2", Const, 19, ""}, + {"R_LARCH_SOP_POP_32_S_10_5", Const, 19, ""}, + {"R_LARCH_SOP_POP_32_S_5_20", Const, 19, ""}, + {"R_LARCH_SOP_POP_32_U", Const, 19, ""}, + {"R_LARCH_SOP_POP_32_U_10_12", Const, 19, ""}, + {"R_LARCH_SOP_PUSH_ABSOLUTE", Const, 19, ""}, + {"R_LARCH_SOP_PUSH_DUP", Const, 19, ""}, + {"R_LARCH_SOP_PUSH_GPREL", Const, 19, ""}, + {"R_LARCH_SOP_PUSH_PCREL", Const, 19, ""}, + {"R_LARCH_SOP_PUSH_PLT_PCREL", Const, 19, ""}, + {"R_LARCH_SOP_PUSH_TLS_GD", Const, 19, ""}, + {"R_LARCH_SOP_PUSH_TLS_GOT", Const, 19, ""}, + {"R_LARCH_SOP_PUSH_TLS_TPREL", Const, 19, ""}, + {"R_LARCH_SOP_SL", Const, 19, ""}, + {"R_LARCH_SOP_SR", Const, 19, ""}, + {"R_LARCH_SOP_SUB", Const, 19, ""}, + {"R_LARCH_SUB16", Const, 19, ""}, + {"R_LARCH_SUB24", Const, 19, ""}, + {"R_LARCH_SUB32", Const, 19, ""}, + {"R_LARCH_SUB6", Const, 22, ""}, + {"R_LARCH_SUB64", Const, 19, ""}, + {"R_LARCH_SUB8", Const, 19, ""}, + {"R_LARCH_SUB_ULEB128", Const, 22, ""}, + {"R_LARCH_TLS_DTPMOD32", Const, 19, ""}, + {"R_LARCH_TLS_DTPMOD64", Const, 19, ""}, + {"R_LARCH_TLS_DTPREL32", Const, 19, ""}, + {"R_LARCH_TLS_DTPREL64", Const, 19, ""}, + {"R_LARCH_TLS_GD_HI20", Const, 20, ""}, + {"R_LARCH_TLS_GD_PC_HI20", Const, 20, ""}, + {"R_LARCH_TLS_IE64_HI12", Const, 20, ""}, + {"R_LARCH_TLS_IE64_LO20", Const, 20, ""}, + {"R_LARCH_TLS_IE64_PC_HI12", Const, 20, ""}, + {"R_LARCH_TLS_IE64_PC_LO20", Const, 20, ""}, + {"R_LARCH_TLS_IE_HI20", Const, 20, ""}, + {"R_LARCH_TLS_IE_LO12", Const, 20, ""}, + {"R_LARCH_TLS_IE_PC_HI20", Const, 20, ""}, + {"R_LARCH_TLS_IE_PC_LO12", Const, 20, ""}, + {"R_LARCH_TLS_LD_HI20", Const, 20, ""}, + {"R_LARCH_TLS_LD_PC_HI20", Const, 20, ""}, + {"R_LARCH_TLS_LE64_HI12", Const, 20, ""}, + {"R_LARCH_TLS_LE64_LO20", Const, 20, ""}, + {"R_LARCH_TLS_LE_HI20", Const, 20, ""}, + {"R_LARCH_TLS_LE_LO12", Const, 20, ""}, + {"R_LARCH_TLS_TPREL32", Const, 19, ""}, + {"R_LARCH_TLS_TPREL64", Const, 19, ""}, + {"R_MIPS", Type, 6, ""}, + {"R_MIPS_16", Const, 6, ""}, + {"R_MIPS_26", Const, 6, ""}, + {"R_MIPS_32", Const, 6, ""}, + {"R_MIPS_64", Const, 6, ""}, + {"R_MIPS_ADD_IMMEDIATE", Const, 6, ""}, + {"R_MIPS_CALL16", Const, 6, ""}, + {"R_MIPS_CALL_HI16", Const, 6, ""}, + {"R_MIPS_CALL_LO16", Const, 6, ""}, + {"R_MIPS_DELETE", Const, 6, ""}, + {"R_MIPS_GOT16", Const, 6, ""}, + {"R_MIPS_GOT_DISP", Const, 6, ""}, + {"R_MIPS_GOT_HI16", Const, 6, ""}, + {"R_MIPS_GOT_LO16", Const, 6, ""}, + {"R_MIPS_GOT_OFST", Const, 6, ""}, + {"R_MIPS_GOT_PAGE", Const, 6, ""}, + {"R_MIPS_GPREL16", Const, 6, ""}, + {"R_MIPS_GPREL32", Const, 6, ""}, + {"R_MIPS_HI16", Const, 6, ""}, + {"R_MIPS_HIGHER", Const, 6, ""}, + {"R_MIPS_HIGHEST", Const, 6, ""}, + {"R_MIPS_INSERT_A", Const, 6, ""}, + {"R_MIPS_INSERT_B", Const, 6, ""}, + {"R_MIPS_JALR", Const, 6, ""}, + {"R_MIPS_LITERAL", Const, 6, ""}, + {"R_MIPS_LO16", Const, 6, ""}, + {"R_MIPS_NONE", Const, 6, ""}, + {"R_MIPS_PC16", Const, 6, ""}, + {"R_MIPS_PC32", Const, 22, ""}, + {"R_MIPS_PJUMP", Const, 6, ""}, + {"R_MIPS_REL16", Const, 6, ""}, + {"R_MIPS_REL32", Const, 6, ""}, + {"R_MIPS_RELGOT", Const, 6, ""}, + {"R_MIPS_SCN_DISP", Const, 6, ""}, + {"R_MIPS_SHIFT5", Const, 6, ""}, + {"R_MIPS_SHIFT6", Const, 6, ""}, + {"R_MIPS_SUB", Const, 6, ""}, + {"R_MIPS_TLS_DTPMOD32", Const, 6, ""}, + {"R_MIPS_TLS_DTPMOD64", Const, 6, ""}, + {"R_MIPS_TLS_DTPREL32", Const, 6, ""}, + {"R_MIPS_TLS_DTPREL64", Const, 6, ""}, + {"R_MIPS_TLS_DTPREL_HI16", Const, 6, ""}, + {"R_MIPS_TLS_DTPREL_LO16", Const, 6, ""}, + {"R_MIPS_TLS_GD", Const, 6, ""}, + {"R_MIPS_TLS_GOTTPREL", Const, 6, ""}, + {"R_MIPS_TLS_LDM", Const, 6, ""}, + {"R_MIPS_TLS_TPREL32", Const, 6, ""}, + {"R_MIPS_TLS_TPREL64", Const, 6, ""}, + {"R_MIPS_TLS_TPREL_HI16", Const, 6, ""}, + {"R_MIPS_TLS_TPREL_LO16", Const, 6, ""}, + {"R_PPC", Type, 0, ""}, + {"R_PPC64", Type, 5, ""}, + {"R_PPC64_ADDR14", Const, 5, ""}, + {"R_PPC64_ADDR14_BRNTAKEN", Const, 5, ""}, + {"R_PPC64_ADDR14_BRTAKEN", Const, 5, ""}, + {"R_PPC64_ADDR16", Const, 5, ""}, + {"R_PPC64_ADDR16_DS", Const, 5, ""}, + {"R_PPC64_ADDR16_HA", Const, 5, ""}, + {"R_PPC64_ADDR16_HI", Const, 5, ""}, + {"R_PPC64_ADDR16_HIGH", Const, 10, ""}, + {"R_PPC64_ADDR16_HIGHA", Const, 10, ""}, + {"R_PPC64_ADDR16_HIGHER", Const, 5, ""}, + {"R_PPC64_ADDR16_HIGHER34", Const, 20, ""}, + {"R_PPC64_ADDR16_HIGHERA", Const, 5, ""}, + {"R_PPC64_ADDR16_HIGHERA34", Const, 20, ""}, + {"R_PPC64_ADDR16_HIGHEST", Const, 5, ""}, + {"R_PPC64_ADDR16_HIGHEST34", Const, 20, ""}, + {"R_PPC64_ADDR16_HIGHESTA", Const, 5, ""}, + {"R_PPC64_ADDR16_HIGHESTA34", Const, 20, ""}, + {"R_PPC64_ADDR16_LO", Const, 5, ""}, + {"R_PPC64_ADDR16_LO_DS", Const, 5, ""}, + {"R_PPC64_ADDR24", Const, 5, ""}, + {"R_PPC64_ADDR32", Const, 5, ""}, + {"R_PPC64_ADDR64", Const, 5, ""}, + {"R_PPC64_ADDR64_LOCAL", Const, 10, ""}, + {"R_PPC64_COPY", Const, 20, ""}, + {"R_PPC64_D28", Const, 20, ""}, + {"R_PPC64_D34", Const, 20, ""}, + {"R_PPC64_D34_HA30", Const, 20, ""}, + {"R_PPC64_D34_HI30", Const, 20, ""}, + {"R_PPC64_D34_LO", Const, 20, ""}, + {"R_PPC64_DTPMOD64", Const, 5, ""}, + {"R_PPC64_DTPREL16", Const, 5, ""}, + {"R_PPC64_DTPREL16_DS", Const, 5, ""}, + {"R_PPC64_DTPREL16_HA", Const, 5, ""}, + {"R_PPC64_DTPREL16_HI", Const, 5, ""}, + {"R_PPC64_DTPREL16_HIGH", Const, 10, ""}, + {"R_PPC64_DTPREL16_HIGHA", Const, 10, ""}, + {"R_PPC64_DTPREL16_HIGHER", Const, 5, ""}, + {"R_PPC64_DTPREL16_HIGHERA", Const, 5, ""}, + {"R_PPC64_DTPREL16_HIGHEST", Const, 5, ""}, + {"R_PPC64_DTPREL16_HIGHESTA", Const, 5, ""}, + {"R_PPC64_DTPREL16_LO", Const, 5, ""}, + {"R_PPC64_DTPREL16_LO_DS", Const, 5, ""}, + {"R_PPC64_DTPREL34", Const, 20, ""}, + {"R_PPC64_DTPREL64", Const, 5, ""}, + {"R_PPC64_ENTRY", Const, 10, ""}, + {"R_PPC64_GLOB_DAT", Const, 20, ""}, + {"R_PPC64_GNU_VTENTRY", Const, 20, ""}, + {"R_PPC64_GNU_VTINHERIT", Const, 20, ""}, + {"R_PPC64_GOT16", Const, 5, ""}, + {"R_PPC64_GOT16_DS", Const, 5, ""}, + {"R_PPC64_GOT16_HA", Const, 5, ""}, + {"R_PPC64_GOT16_HI", Const, 5, ""}, + {"R_PPC64_GOT16_LO", Const, 5, ""}, + {"R_PPC64_GOT16_LO_DS", Const, 5, ""}, + {"R_PPC64_GOT_DTPREL16_DS", Const, 5, ""}, + {"R_PPC64_GOT_DTPREL16_HA", Const, 5, ""}, + {"R_PPC64_GOT_DTPREL16_HI", Const, 5, ""}, + {"R_PPC64_GOT_DTPREL16_LO_DS", Const, 5, ""}, + {"R_PPC64_GOT_DTPREL_PCREL34", Const, 20, ""}, + {"R_PPC64_GOT_PCREL34", Const, 20, ""}, + {"R_PPC64_GOT_TLSGD16", Const, 5, ""}, + {"R_PPC64_GOT_TLSGD16_HA", Const, 5, ""}, + {"R_PPC64_GOT_TLSGD16_HI", Const, 5, ""}, + {"R_PPC64_GOT_TLSGD16_LO", Const, 5, ""}, + {"R_PPC64_GOT_TLSGD_PCREL34", Const, 20, ""}, + {"R_PPC64_GOT_TLSLD16", Const, 5, ""}, + {"R_PPC64_GOT_TLSLD16_HA", Const, 5, ""}, + {"R_PPC64_GOT_TLSLD16_HI", Const, 5, ""}, + {"R_PPC64_GOT_TLSLD16_LO", Const, 5, ""}, + {"R_PPC64_GOT_TLSLD_PCREL34", Const, 20, ""}, + {"R_PPC64_GOT_TPREL16_DS", Const, 5, ""}, + {"R_PPC64_GOT_TPREL16_HA", Const, 5, ""}, + {"R_PPC64_GOT_TPREL16_HI", Const, 5, ""}, + {"R_PPC64_GOT_TPREL16_LO_DS", Const, 5, ""}, + {"R_PPC64_GOT_TPREL_PCREL34", Const, 20, ""}, + {"R_PPC64_IRELATIVE", Const, 10, ""}, + {"R_PPC64_JMP_IREL", Const, 10, ""}, + {"R_PPC64_JMP_SLOT", Const, 5, ""}, + {"R_PPC64_NONE", Const, 5, ""}, + {"R_PPC64_PCREL28", Const, 20, ""}, + {"R_PPC64_PCREL34", Const, 20, ""}, + {"R_PPC64_PCREL_OPT", Const, 20, ""}, + {"R_PPC64_PLT16_HA", Const, 20, ""}, + {"R_PPC64_PLT16_HI", Const, 20, ""}, + {"R_PPC64_PLT16_LO", Const, 20, ""}, + {"R_PPC64_PLT16_LO_DS", Const, 10, ""}, + {"R_PPC64_PLT32", Const, 20, ""}, + {"R_PPC64_PLT64", Const, 20, ""}, + {"R_PPC64_PLTCALL", Const, 20, ""}, + {"R_PPC64_PLTCALL_NOTOC", Const, 20, ""}, + {"R_PPC64_PLTGOT16", Const, 10, ""}, + {"R_PPC64_PLTGOT16_DS", Const, 10, ""}, + {"R_PPC64_PLTGOT16_HA", Const, 10, ""}, + {"R_PPC64_PLTGOT16_HI", Const, 10, ""}, + {"R_PPC64_PLTGOT16_LO", Const, 10, ""}, + {"R_PPC64_PLTGOT_LO_DS", Const, 10, ""}, + {"R_PPC64_PLTREL32", Const, 20, ""}, + {"R_PPC64_PLTREL64", Const, 20, ""}, + {"R_PPC64_PLTSEQ", Const, 20, ""}, + {"R_PPC64_PLTSEQ_NOTOC", Const, 20, ""}, + {"R_PPC64_PLT_PCREL34", Const, 20, ""}, + {"R_PPC64_PLT_PCREL34_NOTOC", Const, 20, ""}, + {"R_PPC64_REL14", Const, 5, ""}, + {"R_PPC64_REL14_BRNTAKEN", Const, 5, ""}, + {"R_PPC64_REL14_BRTAKEN", Const, 5, ""}, + {"R_PPC64_REL16", Const, 5, ""}, + {"R_PPC64_REL16DX_HA", Const, 10, ""}, + {"R_PPC64_REL16_HA", Const, 5, ""}, + {"R_PPC64_REL16_HI", Const, 5, ""}, + {"R_PPC64_REL16_HIGH", Const, 20, ""}, + {"R_PPC64_REL16_HIGHA", Const, 20, ""}, + {"R_PPC64_REL16_HIGHER", Const, 20, ""}, + {"R_PPC64_REL16_HIGHER34", Const, 20, ""}, + {"R_PPC64_REL16_HIGHERA", Const, 20, ""}, + {"R_PPC64_REL16_HIGHERA34", Const, 20, ""}, + {"R_PPC64_REL16_HIGHEST", Const, 20, ""}, + {"R_PPC64_REL16_HIGHEST34", Const, 20, ""}, + {"R_PPC64_REL16_HIGHESTA", Const, 20, ""}, + {"R_PPC64_REL16_HIGHESTA34", Const, 20, ""}, + {"R_PPC64_REL16_LO", Const, 5, ""}, + {"R_PPC64_REL24", Const, 5, ""}, + {"R_PPC64_REL24_NOTOC", Const, 10, ""}, + {"R_PPC64_REL24_P9NOTOC", Const, 21, ""}, + {"R_PPC64_REL30", Const, 20, ""}, + {"R_PPC64_REL32", Const, 5, ""}, + {"R_PPC64_REL64", Const, 5, ""}, + {"R_PPC64_RELATIVE", Const, 18, ""}, + {"R_PPC64_SECTOFF", Const, 20, ""}, + {"R_PPC64_SECTOFF_DS", Const, 10, ""}, + {"R_PPC64_SECTOFF_HA", Const, 20, ""}, + {"R_PPC64_SECTOFF_HI", Const, 20, ""}, + {"R_PPC64_SECTOFF_LO", Const, 20, ""}, + {"R_PPC64_SECTOFF_LO_DS", Const, 10, ""}, + {"R_PPC64_TLS", Const, 5, ""}, + {"R_PPC64_TLSGD", Const, 5, ""}, + {"R_PPC64_TLSLD", Const, 5, ""}, + {"R_PPC64_TOC", Const, 5, ""}, + {"R_PPC64_TOC16", Const, 5, ""}, + {"R_PPC64_TOC16_DS", Const, 5, ""}, + {"R_PPC64_TOC16_HA", Const, 5, ""}, + {"R_PPC64_TOC16_HI", Const, 5, ""}, + {"R_PPC64_TOC16_LO", Const, 5, ""}, + {"R_PPC64_TOC16_LO_DS", Const, 5, ""}, + {"R_PPC64_TOCSAVE", Const, 10, ""}, + {"R_PPC64_TPREL16", Const, 5, ""}, + {"R_PPC64_TPREL16_DS", Const, 5, ""}, + {"R_PPC64_TPREL16_HA", Const, 5, ""}, + {"R_PPC64_TPREL16_HI", Const, 5, ""}, + {"R_PPC64_TPREL16_HIGH", Const, 10, ""}, + {"R_PPC64_TPREL16_HIGHA", Const, 10, ""}, + {"R_PPC64_TPREL16_HIGHER", Const, 5, ""}, + {"R_PPC64_TPREL16_HIGHERA", Const, 5, ""}, + {"R_PPC64_TPREL16_HIGHEST", Const, 5, ""}, + {"R_PPC64_TPREL16_HIGHESTA", Const, 5, ""}, + {"R_PPC64_TPREL16_LO", Const, 5, ""}, + {"R_PPC64_TPREL16_LO_DS", Const, 5, ""}, + {"R_PPC64_TPREL34", Const, 20, ""}, + {"R_PPC64_TPREL64", Const, 5, ""}, + {"R_PPC64_UADDR16", Const, 20, ""}, + {"R_PPC64_UADDR32", Const, 20, ""}, + {"R_PPC64_UADDR64", Const, 20, ""}, + {"R_PPC_ADDR14", Const, 0, ""}, + {"R_PPC_ADDR14_BRNTAKEN", Const, 0, ""}, + {"R_PPC_ADDR14_BRTAKEN", Const, 0, ""}, + {"R_PPC_ADDR16", Const, 0, ""}, + {"R_PPC_ADDR16_HA", Const, 0, ""}, + {"R_PPC_ADDR16_HI", Const, 0, ""}, + {"R_PPC_ADDR16_LO", Const, 0, ""}, + {"R_PPC_ADDR24", Const, 0, ""}, + {"R_PPC_ADDR32", Const, 0, ""}, + {"R_PPC_COPY", Const, 0, ""}, + {"R_PPC_DTPMOD32", Const, 0, ""}, + {"R_PPC_DTPREL16", Const, 0, ""}, + {"R_PPC_DTPREL16_HA", Const, 0, ""}, + {"R_PPC_DTPREL16_HI", Const, 0, ""}, + {"R_PPC_DTPREL16_LO", Const, 0, ""}, + {"R_PPC_DTPREL32", Const, 0, ""}, + {"R_PPC_EMB_BIT_FLD", Const, 0, ""}, + {"R_PPC_EMB_MRKREF", Const, 0, ""}, + {"R_PPC_EMB_NADDR16", Const, 0, ""}, + {"R_PPC_EMB_NADDR16_HA", Const, 0, ""}, + {"R_PPC_EMB_NADDR16_HI", Const, 0, ""}, + {"R_PPC_EMB_NADDR16_LO", Const, 0, ""}, + {"R_PPC_EMB_NADDR32", Const, 0, ""}, + {"R_PPC_EMB_RELSDA", Const, 0, ""}, + {"R_PPC_EMB_RELSEC16", Const, 0, ""}, + {"R_PPC_EMB_RELST_HA", Const, 0, ""}, + {"R_PPC_EMB_RELST_HI", Const, 0, ""}, + {"R_PPC_EMB_RELST_LO", Const, 0, ""}, + {"R_PPC_EMB_SDA21", Const, 0, ""}, + {"R_PPC_EMB_SDA2I16", Const, 0, ""}, + {"R_PPC_EMB_SDA2REL", Const, 0, ""}, + {"R_PPC_EMB_SDAI16", Const, 0, ""}, + {"R_PPC_GLOB_DAT", Const, 0, ""}, + {"R_PPC_GOT16", Const, 0, ""}, + {"R_PPC_GOT16_HA", Const, 0, ""}, + {"R_PPC_GOT16_HI", Const, 0, ""}, + {"R_PPC_GOT16_LO", Const, 0, ""}, + {"R_PPC_GOT_TLSGD16", Const, 0, ""}, + {"R_PPC_GOT_TLSGD16_HA", Const, 0, ""}, + {"R_PPC_GOT_TLSGD16_HI", Const, 0, ""}, + {"R_PPC_GOT_TLSGD16_LO", Const, 0, ""}, + {"R_PPC_GOT_TLSLD16", Const, 0, ""}, + {"R_PPC_GOT_TLSLD16_HA", Const, 0, ""}, + {"R_PPC_GOT_TLSLD16_HI", Const, 0, ""}, + {"R_PPC_GOT_TLSLD16_LO", Const, 0, ""}, + {"R_PPC_GOT_TPREL16", Const, 0, ""}, + {"R_PPC_GOT_TPREL16_HA", Const, 0, ""}, + {"R_PPC_GOT_TPREL16_HI", Const, 0, ""}, + {"R_PPC_GOT_TPREL16_LO", Const, 0, ""}, + {"R_PPC_JMP_SLOT", Const, 0, ""}, + {"R_PPC_LOCAL24PC", Const, 0, ""}, + {"R_PPC_NONE", Const, 0, ""}, + {"R_PPC_PLT16_HA", Const, 0, ""}, + {"R_PPC_PLT16_HI", Const, 0, ""}, + {"R_PPC_PLT16_LO", Const, 0, ""}, + {"R_PPC_PLT32", Const, 0, ""}, + {"R_PPC_PLTREL24", Const, 0, ""}, + {"R_PPC_PLTREL32", Const, 0, ""}, + {"R_PPC_REL14", Const, 0, ""}, + {"R_PPC_REL14_BRNTAKEN", Const, 0, ""}, + {"R_PPC_REL14_BRTAKEN", Const, 0, ""}, + {"R_PPC_REL24", Const, 0, ""}, + {"R_PPC_REL32", Const, 0, ""}, + {"R_PPC_RELATIVE", Const, 0, ""}, + {"R_PPC_SDAREL16", Const, 0, ""}, + {"R_PPC_SECTOFF", Const, 0, ""}, + {"R_PPC_SECTOFF_HA", Const, 0, ""}, + {"R_PPC_SECTOFF_HI", Const, 0, ""}, + {"R_PPC_SECTOFF_LO", Const, 0, ""}, + {"R_PPC_TLS", Const, 0, ""}, + {"R_PPC_TPREL16", Const, 0, ""}, + {"R_PPC_TPREL16_HA", Const, 0, ""}, + {"R_PPC_TPREL16_HI", Const, 0, ""}, + {"R_PPC_TPREL16_LO", Const, 0, ""}, + {"R_PPC_TPREL32", Const, 0, ""}, + {"R_PPC_UADDR16", Const, 0, ""}, + {"R_PPC_UADDR32", Const, 0, ""}, + {"R_RISCV", Type, 11, ""}, + {"R_RISCV_32", Const, 11, ""}, + {"R_RISCV_32_PCREL", Const, 12, ""}, + {"R_RISCV_64", Const, 11, ""}, + {"R_RISCV_ADD16", Const, 11, ""}, + {"R_RISCV_ADD32", Const, 11, ""}, + {"R_RISCV_ADD64", Const, 11, ""}, + {"R_RISCV_ADD8", Const, 11, ""}, + {"R_RISCV_ALIGN", Const, 11, ""}, + {"R_RISCV_BRANCH", Const, 11, ""}, + {"R_RISCV_CALL", Const, 11, ""}, + {"R_RISCV_CALL_PLT", Const, 11, ""}, + {"R_RISCV_COPY", Const, 11, ""}, + {"R_RISCV_GNU_VTENTRY", Const, 11, ""}, + {"R_RISCV_GNU_VTINHERIT", Const, 11, ""}, + {"R_RISCV_GOT_HI20", Const, 11, ""}, + {"R_RISCV_GPREL_I", Const, 11, ""}, + {"R_RISCV_GPREL_S", Const, 11, ""}, + {"R_RISCV_HI20", Const, 11, ""}, + {"R_RISCV_JAL", Const, 11, ""}, + {"R_RISCV_JUMP_SLOT", Const, 11, ""}, + {"R_RISCV_LO12_I", Const, 11, ""}, + {"R_RISCV_LO12_S", Const, 11, ""}, + {"R_RISCV_NONE", Const, 11, ""}, + {"R_RISCV_PCREL_HI20", Const, 11, ""}, + {"R_RISCV_PCREL_LO12_I", Const, 11, ""}, + {"R_RISCV_PCREL_LO12_S", Const, 11, ""}, + {"R_RISCV_RELATIVE", Const, 11, ""}, + {"R_RISCV_RELAX", Const, 11, ""}, + {"R_RISCV_RVC_BRANCH", Const, 11, ""}, + {"R_RISCV_RVC_JUMP", Const, 11, ""}, + {"R_RISCV_RVC_LUI", Const, 11, ""}, + {"R_RISCV_SET16", Const, 11, ""}, + {"R_RISCV_SET32", Const, 11, ""}, + {"R_RISCV_SET6", Const, 11, ""}, + {"R_RISCV_SET8", Const, 11, ""}, + {"R_RISCV_SUB16", Const, 11, ""}, + {"R_RISCV_SUB32", Const, 11, ""}, + {"R_RISCV_SUB6", Const, 11, ""}, + {"R_RISCV_SUB64", Const, 11, ""}, + {"R_RISCV_SUB8", Const, 11, ""}, + {"R_RISCV_TLS_DTPMOD32", Const, 11, ""}, + {"R_RISCV_TLS_DTPMOD64", Const, 11, ""}, + {"R_RISCV_TLS_DTPREL32", Const, 11, ""}, + {"R_RISCV_TLS_DTPREL64", Const, 11, ""}, + {"R_RISCV_TLS_GD_HI20", Const, 11, ""}, + {"R_RISCV_TLS_GOT_HI20", Const, 11, ""}, + {"R_RISCV_TLS_TPREL32", Const, 11, ""}, + {"R_RISCV_TLS_TPREL64", Const, 11, ""}, + {"R_RISCV_TPREL_ADD", Const, 11, ""}, + {"R_RISCV_TPREL_HI20", Const, 11, ""}, + {"R_RISCV_TPREL_I", Const, 11, ""}, + {"R_RISCV_TPREL_LO12_I", Const, 11, ""}, + {"R_RISCV_TPREL_LO12_S", Const, 11, ""}, + {"R_RISCV_TPREL_S", Const, 11, ""}, + {"R_SPARC", Type, 0, ""}, + {"R_SPARC_10", Const, 0, ""}, + {"R_SPARC_11", Const, 0, ""}, + {"R_SPARC_13", Const, 0, ""}, + {"R_SPARC_16", Const, 0, ""}, + {"R_SPARC_22", Const, 0, ""}, + {"R_SPARC_32", Const, 0, ""}, + {"R_SPARC_5", Const, 0, ""}, + {"R_SPARC_6", Const, 0, ""}, + {"R_SPARC_64", Const, 0, ""}, + {"R_SPARC_7", Const, 0, ""}, + {"R_SPARC_8", Const, 0, ""}, + {"R_SPARC_COPY", Const, 0, ""}, + {"R_SPARC_DISP16", Const, 0, ""}, + {"R_SPARC_DISP32", Const, 0, ""}, + {"R_SPARC_DISP64", Const, 0, ""}, + {"R_SPARC_DISP8", Const, 0, ""}, + {"R_SPARC_GLOB_DAT", Const, 0, ""}, + {"R_SPARC_GLOB_JMP", Const, 0, ""}, + {"R_SPARC_GOT10", Const, 0, ""}, + {"R_SPARC_GOT13", Const, 0, ""}, + {"R_SPARC_GOT22", Const, 0, ""}, + {"R_SPARC_H44", Const, 0, ""}, + {"R_SPARC_HH22", Const, 0, ""}, + {"R_SPARC_HI22", Const, 0, ""}, + {"R_SPARC_HIPLT22", Const, 0, ""}, + {"R_SPARC_HIX22", Const, 0, ""}, + {"R_SPARC_HM10", Const, 0, ""}, + {"R_SPARC_JMP_SLOT", Const, 0, ""}, + {"R_SPARC_L44", Const, 0, ""}, + {"R_SPARC_LM22", Const, 0, ""}, + {"R_SPARC_LO10", Const, 0, ""}, + {"R_SPARC_LOPLT10", Const, 0, ""}, + {"R_SPARC_LOX10", Const, 0, ""}, + {"R_SPARC_M44", Const, 0, ""}, + {"R_SPARC_NONE", Const, 0, ""}, + {"R_SPARC_OLO10", Const, 0, ""}, + {"R_SPARC_PC10", Const, 0, ""}, + {"R_SPARC_PC22", Const, 0, ""}, + {"R_SPARC_PCPLT10", Const, 0, ""}, + {"R_SPARC_PCPLT22", Const, 0, ""}, + {"R_SPARC_PCPLT32", Const, 0, ""}, + {"R_SPARC_PC_HH22", Const, 0, ""}, + {"R_SPARC_PC_HM10", Const, 0, ""}, + {"R_SPARC_PC_LM22", Const, 0, ""}, + {"R_SPARC_PLT32", Const, 0, ""}, + {"R_SPARC_PLT64", Const, 0, ""}, + {"R_SPARC_REGISTER", Const, 0, ""}, + {"R_SPARC_RELATIVE", Const, 0, ""}, + {"R_SPARC_UA16", Const, 0, ""}, + {"R_SPARC_UA32", Const, 0, ""}, + {"R_SPARC_UA64", Const, 0, ""}, + {"R_SPARC_WDISP16", Const, 0, ""}, + {"R_SPARC_WDISP19", Const, 0, ""}, + {"R_SPARC_WDISP22", Const, 0, ""}, + {"R_SPARC_WDISP30", Const, 0, ""}, + {"R_SPARC_WPLT30", Const, 0, ""}, + {"R_SYM32", Func, 0, "func(info uint32) uint32"}, + {"R_SYM64", Func, 0, "func(info uint64) uint32"}, + {"R_TYPE32", Func, 0, "func(info uint32) uint32"}, + {"R_TYPE64", Func, 0, "func(info uint64) uint32"}, + {"R_X86_64", Type, 0, ""}, + {"R_X86_64_16", Const, 0, ""}, + {"R_X86_64_32", Const, 0, ""}, + {"R_X86_64_32S", Const, 0, ""}, + {"R_X86_64_64", Const, 0, ""}, + {"R_X86_64_8", Const, 0, ""}, + {"R_X86_64_COPY", Const, 0, ""}, + {"R_X86_64_DTPMOD64", Const, 0, ""}, + {"R_X86_64_DTPOFF32", Const, 0, ""}, + {"R_X86_64_DTPOFF64", Const, 0, ""}, + {"R_X86_64_GLOB_DAT", Const, 0, ""}, + {"R_X86_64_GOT32", Const, 0, ""}, + {"R_X86_64_GOT64", Const, 10, ""}, + {"R_X86_64_GOTOFF64", Const, 10, ""}, + {"R_X86_64_GOTPC32", Const, 10, ""}, + {"R_X86_64_GOTPC32_TLSDESC", Const, 10, ""}, + {"R_X86_64_GOTPC64", Const, 10, ""}, + {"R_X86_64_GOTPCREL", Const, 0, ""}, + {"R_X86_64_GOTPCREL64", Const, 10, ""}, + {"R_X86_64_GOTPCRELX", Const, 10, ""}, + {"R_X86_64_GOTPLT64", Const, 10, ""}, + {"R_X86_64_GOTTPOFF", Const, 0, ""}, + {"R_X86_64_IRELATIVE", Const, 10, ""}, + {"R_X86_64_JMP_SLOT", Const, 0, ""}, + {"R_X86_64_NONE", Const, 0, ""}, + {"R_X86_64_PC16", Const, 0, ""}, + {"R_X86_64_PC32", Const, 0, ""}, + {"R_X86_64_PC32_BND", Const, 10, ""}, + {"R_X86_64_PC64", Const, 10, ""}, + {"R_X86_64_PC8", Const, 0, ""}, + {"R_X86_64_PLT32", Const, 0, ""}, + {"R_X86_64_PLT32_BND", Const, 10, ""}, + {"R_X86_64_PLTOFF64", Const, 10, ""}, + {"R_X86_64_RELATIVE", Const, 0, ""}, + {"R_X86_64_RELATIVE64", Const, 10, ""}, + {"R_X86_64_REX_GOTPCRELX", Const, 10, ""}, + {"R_X86_64_SIZE32", Const, 10, ""}, + {"R_X86_64_SIZE64", Const, 10, ""}, + {"R_X86_64_TLSDESC", Const, 10, ""}, + {"R_X86_64_TLSDESC_CALL", Const, 10, ""}, + {"R_X86_64_TLSGD", Const, 0, ""}, + {"R_X86_64_TLSLD", Const, 0, ""}, + {"R_X86_64_TPOFF32", Const, 0, ""}, + {"R_X86_64_TPOFF64", Const, 0, ""}, + {"Rel32", Type, 0, ""}, + {"Rel32.Info", Field, 0, ""}, + {"Rel32.Off", Field, 0, ""}, + {"Rel64", Type, 0, ""}, + {"Rel64.Info", Field, 0, ""}, + {"Rel64.Off", Field, 0, ""}, + {"Rela32", Type, 0, ""}, + {"Rela32.Addend", Field, 0, ""}, + {"Rela32.Info", Field, 0, ""}, + {"Rela32.Off", Field, 0, ""}, + {"Rela64", Type, 0, ""}, + {"Rela64.Addend", Field, 0, ""}, + {"Rela64.Info", Field, 0, ""}, + {"Rela64.Off", Field, 0, ""}, + {"SHF_ALLOC", Const, 0, ""}, + {"SHF_COMPRESSED", Const, 6, ""}, + {"SHF_EXECINSTR", Const, 0, ""}, + {"SHF_GROUP", Const, 0, ""}, + {"SHF_INFO_LINK", Const, 0, ""}, + {"SHF_LINK_ORDER", Const, 0, ""}, + {"SHF_MASKOS", Const, 0, ""}, + {"SHF_MASKPROC", Const, 0, ""}, + {"SHF_MERGE", Const, 0, ""}, + {"SHF_OS_NONCONFORMING", Const, 0, ""}, + {"SHF_STRINGS", Const, 0, ""}, + {"SHF_TLS", Const, 0, ""}, + {"SHF_WRITE", Const, 0, ""}, + {"SHN_ABS", Const, 0, ""}, + {"SHN_COMMON", Const, 0, ""}, + {"SHN_HIOS", Const, 0, ""}, + {"SHN_HIPROC", Const, 0, ""}, + {"SHN_HIRESERVE", Const, 0, ""}, + {"SHN_LOOS", Const, 0, ""}, + {"SHN_LOPROC", Const, 0, ""}, + {"SHN_LORESERVE", Const, 0, ""}, + {"SHN_UNDEF", Const, 0, ""}, + {"SHN_XINDEX", Const, 0, ""}, + {"SHT_DYNAMIC", Const, 0, ""}, + {"SHT_DYNSYM", Const, 0, ""}, + {"SHT_FINI_ARRAY", Const, 0, ""}, + {"SHT_GNU_ATTRIBUTES", Const, 0, ""}, + {"SHT_GNU_HASH", Const, 0, ""}, + {"SHT_GNU_LIBLIST", Const, 0, ""}, + {"SHT_GNU_VERDEF", Const, 0, ""}, + {"SHT_GNU_VERNEED", Const, 0, ""}, + {"SHT_GNU_VERSYM", Const, 0, ""}, + {"SHT_GROUP", Const, 0, ""}, + {"SHT_HASH", Const, 0, ""}, + {"SHT_HIOS", Const, 0, ""}, + {"SHT_HIPROC", Const, 0, ""}, + {"SHT_HIUSER", Const, 0, ""}, + {"SHT_INIT_ARRAY", Const, 0, ""}, + {"SHT_LOOS", Const, 0, ""}, + {"SHT_LOPROC", Const, 0, ""}, + {"SHT_LOUSER", Const, 0, ""}, + {"SHT_MIPS_ABIFLAGS", Const, 17, ""}, + {"SHT_NOBITS", Const, 0, ""}, + {"SHT_NOTE", Const, 0, ""}, + {"SHT_NULL", Const, 0, ""}, + {"SHT_PREINIT_ARRAY", Const, 0, ""}, + {"SHT_PROGBITS", Const, 0, ""}, + {"SHT_REL", Const, 0, ""}, + {"SHT_RELA", Const, 0, ""}, + {"SHT_RISCV_ATTRIBUTES", Const, 25, ""}, + {"SHT_SHLIB", Const, 0, ""}, + {"SHT_STRTAB", Const, 0, ""}, + {"SHT_SYMTAB", Const, 0, ""}, + {"SHT_SYMTAB_SHNDX", Const, 0, ""}, + {"STB_GLOBAL", Const, 0, ""}, + {"STB_HIOS", Const, 0, ""}, + {"STB_HIPROC", Const, 0, ""}, + {"STB_LOCAL", Const, 0, ""}, + {"STB_LOOS", Const, 0, ""}, + {"STB_LOPROC", Const, 0, ""}, + {"STB_WEAK", Const, 0, ""}, + {"STT_COMMON", Const, 0, ""}, + {"STT_FILE", Const, 0, ""}, + {"STT_FUNC", Const, 0, ""}, + {"STT_GNU_IFUNC", Const, 23, ""}, + {"STT_HIOS", Const, 0, ""}, + {"STT_HIPROC", Const, 0, ""}, + {"STT_LOOS", Const, 0, ""}, + {"STT_LOPROC", Const, 0, ""}, + {"STT_NOTYPE", Const, 0, ""}, + {"STT_OBJECT", Const, 0, ""}, + {"STT_RELC", Const, 23, ""}, + {"STT_SECTION", Const, 0, ""}, + {"STT_SRELC", Const, 23, ""}, + {"STT_TLS", Const, 0, ""}, + {"STV_DEFAULT", Const, 0, ""}, + {"STV_HIDDEN", Const, 0, ""}, + {"STV_INTERNAL", Const, 0, ""}, + {"STV_PROTECTED", Const, 0, ""}, + {"ST_BIND", Func, 0, "func(info uint8) SymBind"}, + {"ST_INFO", Func, 0, "func(bind SymBind, typ SymType) uint8"}, + {"ST_TYPE", Func, 0, "func(info uint8) SymType"}, + {"ST_VISIBILITY", Func, 0, "func(other uint8) SymVis"}, + {"Section", Type, 0, ""}, + {"Section.ReaderAt", Field, 0, ""}, + {"Section.SectionHeader", Field, 0, ""}, + {"Section32", Type, 0, ""}, + {"Section32.Addr", Field, 0, ""}, + {"Section32.Addralign", Field, 0, ""}, + {"Section32.Entsize", Field, 0, ""}, + {"Section32.Flags", Field, 0, ""}, + {"Section32.Info", Field, 0, ""}, + {"Section32.Link", Field, 0, ""}, + {"Section32.Name", Field, 0, ""}, + {"Section32.Off", Field, 0, ""}, + {"Section32.Size", Field, 0, ""}, + {"Section32.Type", Field, 0, ""}, + {"Section64", Type, 0, ""}, + {"Section64.Addr", Field, 0, ""}, + {"Section64.Addralign", Field, 0, ""}, + {"Section64.Entsize", Field, 0, ""}, + {"Section64.Flags", Field, 0, ""}, + {"Section64.Info", Field, 0, ""}, + {"Section64.Link", Field, 0, ""}, + {"Section64.Name", Field, 0, ""}, + {"Section64.Off", Field, 0, ""}, + {"Section64.Size", Field, 0, ""}, + {"Section64.Type", Field, 0, ""}, + {"SectionFlag", Type, 0, ""}, + {"SectionHeader", Type, 0, ""}, + {"SectionHeader.Addr", Field, 0, ""}, + {"SectionHeader.Addralign", Field, 0, ""}, + {"SectionHeader.Entsize", Field, 0, ""}, + {"SectionHeader.FileSize", Field, 6, ""}, + {"SectionHeader.Flags", Field, 0, ""}, + {"SectionHeader.Info", Field, 0, ""}, + {"SectionHeader.Link", Field, 0, ""}, + {"SectionHeader.Name", Field, 0, ""}, + {"SectionHeader.Offset", Field, 0, ""}, + {"SectionHeader.Size", Field, 0, ""}, + {"SectionHeader.Type", Field, 0, ""}, + {"SectionIndex", Type, 0, ""}, + {"SectionType", Type, 0, ""}, + {"Sym32", Type, 0, ""}, + {"Sym32.Info", Field, 0, ""}, + {"Sym32.Name", Field, 0, ""}, + {"Sym32.Other", Field, 0, ""}, + {"Sym32.Shndx", Field, 0, ""}, + {"Sym32.Size", Field, 0, ""}, + {"Sym32.Value", Field, 0, ""}, + {"Sym32Size", Const, 0, ""}, + {"Sym64", Type, 0, ""}, + {"Sym64.Info", Field, 0, ""}, + {"Sym64.Name", Field, 0, ""}, + {"Sym64.Other", Field, 0, ""}, + {"Sym64.Shndx", Field, 0, ""}, + {"Sym64.Size", Field, 0, ""}, + {"Sym64.Value", Field, 0, ""}, + {"Sym64Size", Const, 0, ""}, + {"SymBind", Type, 0, ""}, + {"SymType", Type, 0, ""}, + {"SymVis", Type, 0, ""}, + {"Symbol", Type, 0, ""}, + {"Symbol.HasVersion", Field, 24, ""}, + {"Symbol.Info", Field, 0, ""}, + {"Symbol.Library", Field, 13, ""}, + {"Symbol.Name", Field, 0, ""}, + {"Symbol.Other", Field, 0, ""}, + {"Symbol.Section", Field, 0, ""}, + {"Symbol.Size", Field, 0, ""}, + {"Symbol.Value", Field, 0, ""}, + {"Symbol.Version", Field, 13, ""}, + {"Symbol.VersionIndex", Field, 24, ""}, + {"Type", Type, 0, ""}, + {"VER_FLG_BASE", Const, 24, ""}, + {"VER_FLG_INFO", Const, 24, ""}, + {"VER_FLG_WEAK", Const, 24, ""}, + {"Version", Type, 0, ""}, + {"VersionIndex", Type, 24, ""}, }, "debug/gosym": { - {"(*DecodingError).Error", Method, 0}, - {"(*LineTable).LineToPC", Method, 0}, - {"(*LineTable).PCToLine", Method, 0}, - {"(*Sym).BaseName", Method, 0}, - {"(*Sym).PackageName", Method, 0}, - {"(*Sym).ReceiverName", Method, 0}, - {"(*Sym).Static", Method, 0}, - {"(*Table).LineToPC", Method, 0}, - {"(*Table).LookupFunc", Method, 0}, - {"(*Table).LookupSym", Method, 0}, - {"(*Table).PCToFunc", Method, 0}, - {"(*Table).PCToLine", Method, 0}, - {"(*Table).SymByAddr", Method, 0}, - {"(*UnknownLineError).Error", Method, 0}, - {"(Func).BaseName", Method, 0}, - {"(Func).PackageName", Method, 0}, - {"(Func).ReceiverName", Method, 0}, - {"(Func).Static", Method, 0}, - {"(UnknownFileError).Error", Method, 0}, - {"DecodingError", Type, 0}, - {"Func", Type, 0}, - {"Func.End", Field, 0}, - {"Func.Entry", Field, 0}, - {"Func.FrameSize", Field, 0}, - {"Func.LineTable", Field, 0}, - {"Func.Locals", Field, 0}, - {"Func.Obj", Field, 0}, - {"Func.Params", Field, 0}, - {"Func.Sym", Field, 0}, - {"LineTable", Type, 0}, - {"LineTable.Data", Field, 0}, - {"LineTable.Line", Field, 0}, - {"LineTable.PC", Field, 0}, - {"NewLineTable", Func, 0}, - {"NewTable", Func, 0}, - {"Obj", Type, 0}, - {"Obj.Funcs", Field, 0}, - {"Obj.Paths", Field, 0}, - {"Sym", Type, 0}, - {"Sym.Func", Field, 0}, - {"Sym.GoType", Field, 0}, - {"Sym.Name", Field, 0}, - {"Sym.Type", Field, 0}, - {"Sym.Value", Field, 0}, - {"Table", Type, 0}, - {"Table.Files", Field, 0}, - {"Table.Funcs", Field, 0}, - {"Table.Objs", Field, 0}, - {"Table.Syms", Field, 0}, - {"UnknownFileError", Type, 0}, - {"UnknownLineError", Type, 0}, - {"UnknownLineError.File", Field, 0}, - {"UnknownLineError.Line", Field, 0}, + {"(*DecodingError).Error", Method, 0, ""}, + {"(*LineTable).LineToPC", Method, 0, ""}, + {"(*LineTable).PCToLine", Method, 0, ""}, + {"(*Sym).BaseName", Method, 0, ""}, + {"(*Sym).PackageName", Method, 0, ""}, + {"(*Sym).ReceiverName", Method, 0, ""}, + {"(*Sym).Static", Method, 0, ""}, + {"(*Table).LineToPC", Method, 0, ""}, + {"(*Table).LookupFunc", Method, 0, ""}, + {"(*Table).LookupSym", Method, 0, ""}, + {"(*Table).PCToFunc", Method, 0, ""}, + {"(*Table).PCToLine", Method, 0, ""}, + {"(*Table).SymByAddr", Method, 0, ""}, + {"(*UnknownLineError).Error", Method, 0, ""}, + {"(Func).BaseName", Method, 0, ""}, + {"(Func).PackageName", Method, 0, ""}, + {"(Func).ReceiverName", Method, 0, ""}, + {"(Func).Static", Method, 0, ""}, + {"(UnknownFileError).Error", Method, 0, ""}, + {"DecodingError", Type, 0, ""}, + {"Func", Type, 0, ""}, + {"Func.End", Field, 0, ""}, + {"Func.Entry", Field, 0, ""}, + {"Func.FrameSize", Field, 0, ""}, + {"Func.LineTable", Field, 0, ""}, + {"Func.Locals", Field, 0, ""}, + {"Func.Obj", Field, 0, ""}, + {"Func.Params", Field, 0, ""}, + {"Func.Sym", Field, 0, ""}, + {"LineTable", Type, 0, ""}, + {"LineTable.Data", Field, 0, ""}, + {"LineTable.Line", Field, 0, ""}, + {"LineTable.PC", Field, 0, ""}, + {"NewLineTable", Func, 0, "func(data []byte, text uint64) *LineTable"}, + {"NewTable", Func, 0, "func(symtab []byte, pcln *LineTable) (*Table, error)"}, + {"Obj", Type, 0, ""}, + {"Obj.Funcs", Field, 0, ""}, + {"Obj.Paths", Field, 0, ""}, + {"Sym", Type, 0, ""}, + {"Sym.Func", Field, 0, ""}, + {"Sym.GoType", Field, 0, ""}, + {"Sym.Name", Field, 0, ""}, + {"Sym.Type", Field, 0, ""}, + {"Sym.Value", Field, 0, ""}, + {"Table", Type, 0, ""}, + {"Table.Files", Field, 0, ""}, + {"Table.Funcs", Field, 0, ""}, + {"Table.Objs", Field, 0, ""}, + {"Table.Syms", Field, 0, ""}, + {"UnknownFileError", Type, 0, ""}, + {"UnknownLineError", Type, 0, ""}, + {"UnknownLineError.File", Field, 0, ""}, + {"UnknownLineError.Line", Field, 0, ""}, }, "debug/macho": { - {"(*FatFile).Close", Method, 3}, - {"(*File).Close", Method, 0}, - {"(*File).DWARF", Method, 0}, - {"(*File).ImportedLibraries", Method, 0}, - {"(*File).ImportedSymbols", Method, 0}, - {"(*File).Section", Method, 0}, - {"(*File).Segment", Method, 0}, - {"(*FormatError).Error", Method, 0}, - {"(*Section).Data", Method, 0}, - {"(*Section).Open", Method, 0}, - {"(*Segment).Data", Method, 0}, - {"(*Segment).Open", Method, 0}, - {"(Cpu).GoString", Method, 0}, - {"(Cpu).String", Method, 0}, - {"(Dylib).Raw", Method, 0}, - {"(Dysymtab).Raw", Method, 0}, - {"(FatArch).Close", Method, 3}, - {"(FatArch).DWARF", Method, 3}, - {"(FatArch).ImportedLibraries", Method, 3}, - {"(FatArch).ImportedSymbols", Method, 3}, - {"(FatArch).Section", Method, 3}, - {"(FatArch).Segment", Method, 3}, - {"(LoadBytes).Raw", Method, 0}, - {"(LoadCmd).GoString", Method, 0}, - {"(LoadCmd).String", Method, 0}, - {"(RelocTypeARM).GoString", Method, 10}, - {"(RelocTypeARM).String", Method, 10}, - {"(RelocTypeARM64).GoString", Method, 10}, - {"(RelocTypeARM64).String", Method, 10}, - {"(RelocTypeGeneric).GoString", Method, 10}, - {"(RelocTypeGeneric).String", Method, 10}, - {"(RelocTypeX86_64).GoString", Method, 10}, - {"(RelocTypeX86_64).String", Method, 10}, - {"(Rpath).Raw", Method, 10}, - {"(Section).ReadAt", Method, 0}, - {"(Segment).Raw", Method, 0}, - {"(Segment).ReadAt", Method, 0}, - {"(Symtab).Raw", Method, 0}, - {"(Type).GoString", Method, 10}, - {"(Type).String", Method, 10}, - {"ARM64_RELOC_ADDEND", Const, 10}, - {"ARM64_RELOC_BRANCH26", Const, 10}, - {"ARM64_RELOC_GOT_LOAD_PAGE21", Const, 10}, - {"ARM64_RELOC_GOT_LOAD_PAGEOFF12", Const, 10}, - {"ARM64_RELOC_PAGE21", Const, 10}, - {"ARM64_RELOC_PAGEOFF12", Const, 10}, - {"ARM64_RELOC_POINTER_TO_GOT", Const, 10}, - {"ARM64_RELOC_SUBTRACTOR", Const, 10}, - {"ARM64_RELOC_TLVP_LOAD_PAGE21", Const, 10}, - {"ARM64_RELOC_TLVP_LOAD_PAGEOFF12", Const, 10}, - {"ARM64_RELOC_UNSIGNED", Const, 10}, - {"ARM_RELOC_BR24", Const, 10}, - {"ARM_RELOC_HALF", Const, 10}, - {"ARM_RELOC_HALF_SECTDIFF", Const, 10}, - {"ARM_RELOC_LOCAL_SECTDIFF", Const, 10}, - {"ARM_RELOC_PAIR", Const, 10}, - {"ARM_RELOC_PB_LA_PTR", Const, 10}, - {"ARM_RELOC_SECTDIFF", Const, 10}, - {"ARM_RELOC_VANILLA", Const, 10}, - {"ARM_THUMB_32BIT_BRANCH", Const, 10}, - {"ARM_THUMB_RELOC_BR22", Const, 10}, - {"Cpu", Type, 0}, - {"Cpu386", Const, 0}, - {"CpuAmd64", Const, 0}, - {"CpuArm", Const, 3}, - {"CpuArm64", Const, 11}, - {"CpuPpc", Const, 3}, - {"CpuPpc64", Const, 3}, - {"Dylib", Type, 0}, - {"Dylib.CompatVersion", Field, 0}, - {"Dylib.CurrentVersion", Field, 0}, - {"Dylib.LoadBytes", Field, 0}, - {"Dylib.Name", Field, 0}, - {"Dylib.Time", Field, 0}, - {"DylibCmd", Type, 0}, - {"DylibCmd.Cmd", Field, 0}, - {"DylibCmd.CompatVersion", Field, 0}, - {"DylibCmd.CurrentVersion", Field, 0}, - {"DylibCmd.Len", Field, 0}, - {"DylibCmd.Name", Field, 0}, - {"DylibCmd.Time", Field, 0}, - {"Dysymtab", Type, 0}, - {"Dysymtab.DysymtabCmd", Field, 0}, - {"Dysymtab.IndirectSyms", Field, 0}, - {"Dysymtab.LoadBytes", Field, 0}, - {"DysymtabCmd", Type, 0}, - {"DysymtabCmd.Cmd", Field, 0}, - {"DysymtabCmd.Extrefsymoff", Field, 0}, - {"DysymtabCmd.Extreloff", Field, 0}, - {"DysymtabCmd.Iextdefsym", Field, 0}, - {"DysymtabCmd.Ilocalsym", Field, 0}, - {"DysymtabCmd.Indirectsymoff", Field, 0}, - {"DysymtabCmd.Iundefsym", Field, 0}, - {"DysymtabCmd.Len", Field, 0}, - {"DysymtabCmd.Locreloff", Field, 0}, - {"DysymtabCmd.Modtaboff", Field, 0}, - {"DysymtabCmd.Nextdefsym", Field, 0}, - {"DysymtabCmd.Nextrefsyms", Field, 0}, - {"DysymtabCmd.Nextrel", Field, 0}, - {"DysymtabCmd.Nindirectsyms", Field, 0}, - {"DysymtabCmd.Nlocalsym", Field, 0}, - {"DysymtabCmd.Nlocrel", Field, 0}, - {"DysymtabCmd.Nmodtab", Field, 0}, - {"DysymtabCmd.Ntoc", Field, 0}, - {"DysymtabCmd.Nundefsym", Field, 0}, - {"DysymtabCmd.Tocoffset", Field, 0}, - {"ErrNotFat", Var, 3}, - {"FatArch", Type, 3}, - {"FatArch.FatArchHeader", Field, 3}, - {"FatArch.File", Field, 3}, - {"FatArchHeader", Type, 3}, - {"FatArchHeader.Align", Field, 3}, - {"FatArchHeader.Cpu", Field, 3}, - {"FatArchHeader.Offset", Field, 3}, - {"FatArchHeader.Size", Field, 3}, - {"FatArchHeader.SubCpu", Field, 3}, - {"FatFile", Type, 3}, - {"FatFile.Arches", Field, 3}, - {"FatFile.Magic", Field, 3}, - {"File", Type, 0}, - {"File.ByteOrder", Field, 0}, - {"File.Dysymtab", Field, 0}, - {"File.FileHeader", Field, 0}, - {"File.Loads", Field, 0}, - {"File.Sections", Field, 0}, - {"File.Symtab", Field, 0}, - {"FileHeader", Type, 0}, - {"FileHeader.Cmdsz", Field, 0}, - {"FileHeader.Cpu", Field, 0}, - {"FileHeader.Flags", Field, 0}, - {"FileHeader.Magic", Field, 0}, - {"FileHeader.Ncmd", Field, 0}, - {"FileHeader.SubCpu", Field, 0}, - {"FileHeader.Type", Field, 0}, - {"FlagAllModsBound", Const, 10}, - {"FlagAllowStackExecution", Const, 10}, - {"FlagAppExtensionSafe", Const, 10}, - {"FlagBindAtLoad", Const, 10}, - {"FlagBindsToWeak", Const, 10}, - {"FlagCanonical", Const, 10}, - {"FlagDeadStrippableDylib", Const, 10}, - {"FlagDyldLink", Const, 10}, - {"FlagForceFlat", Const, 10}, - {"FlagHasTLVDescriptors", Const, 10}, - {"FlagIncrLink", Const, 10}, - {"FlagLazyInit", Const, 10}, - {"FlagNoFixPrebinding", Const, 10}, - {"FlagNoHeapExecution", Const, 10}, - {"FlagNoMultiDefs", Const, 10}, - {"FlagNoReexportedDylibs", Const, 10}, - {"FlagNoUndefs", Const, 10}, - {"FlagPIE", Const, 10}, - {"FlagPrebindable", Const, 10}, - {"FlagPrebound", Const, 10}, - {"FlagRootSafe", Const, 10}, - {"FlagSetuidSafe", Const, 10}, - {"FlagSplitSegs", Const, 10}, - {"FlagSubsectionsViaSymbols", Const, 10}, - {"FlagTwoLevel", Const, 10}, - {"FlagWeakDefines", Const, 10}, - {"FormatError", Type, 0}, - {"GENERIC_RELOC_LOCAL_SECTDIFF", Const, 10}, - {"GENERIC_RELOC_PAIR", Const, 10}, - {"GENERIC_RELOC_PB_LA_PTR", Const, 10}, - {"GENERIC_RELOC_SECTDIFF", Const, 10}, - {"GENERIC_RELOC_TLV", Const, 10}, - {"GENERIC_RELOC_VANILLA", Const, 10}, - {"Load", Type, 0}, - {"LoadBytes", Type, 0}, - {"LoadCmd", Type, 0}, - {"LoadCmdDylib", Const, 0}, - {"LoadCmdDylinker", Const, 0}, - {"LoadCmdDysymtab", Const, 0}, - {"LoadCmdRpath", Const, 10}, - {"LoadCmdSegment", Const, 0}, - {"LoadCmdSegment64", Const, 0}, - {"LoadCmdSymtab", Const, 0}, - {"LoadCmdThread", Const, 0}, - {"LoadCmdUnixThread", Const, 0}, - {"Magic32", Const, 0}, - {"Magic64", Const, 0}, - {"MagicFat", Const, 3}, - {"NewFatFile", Func, 3}, - {"NewFile", Func, 0}, - {"Nlist32", Type, 0}, - {"Nlist32.Desc", Field, 0}, - {"Nlist32.Name", Field, 0}, - {"Nlist32.Sect", Field, 0}, - {"Nlist32.Type", Field, 0}, - {"Nlist32.Value", Field, 0}, - {"Nlist64", Type, 0}, - {"Nlist64.Desc", Field, 0}, - {"Nlist64.Name", Field, 0}, - {"Nlist64.Sect", Field, 0}, - {"Nlist64.Type", Field, 0}, - {"Nlist64.Value", Field, 0}, - {"Open", Func, 0}, - {"OpenFat", Func, 3}, - {"Regs386", Type, 0}, - {"Regs386.AX", Field, 0}, - {"Regs386.BP", Field, 0}, - {"Regs386.BX", Field, 0}, - {"Regs386.CS", Field, 0}, - {"Regs386.CX", Field, 0}, - {"Regs386.DI", Field, 0}, - {"Regs386.DS", Field, 0}, - {"Regs386.DX", Field, 0}, - {"Regs386.ES", Field, 0}, - {"Regs386.FLAGS", Field, 0}, - {"Regs386.FS", Field, 0}, - {"Regs386.GS", Field, 0}, - {"Regs386.IP", Field, 0}, - {"Regs386.SI", Field, 0}, - {"Regs386.SP", Field, 0}, - {"Regs386.SS", Field, 0}, - {"RegsAMD64", Type, 0}, - {"RegsAMD64.AX", Field, 0}, - {"RegsAMD64.BP", Field, 0}, - {"RegsAMD64.BX", Field, 0}, - {"RegsAMD64.CS", Field, 0}, - {"RegsAMD64.CX", Field, 0}, - {"RegsAMD64.DI", Field, 0}, - {"RegsAMD64.DX", Field, 0}, - {"RegsAMD64.FLAGS", Field, 0}, - {"RegsAMD64.FS", Field, 0}, - {"RegsAMD64.GS", Field, 0}, - {"RegsAMD64.IP", Field, 0}, - {"RegsAMD64.R10", Field, 0}, - {"RegsAMD64.R11", Field, 0}, - {"RegsAMD64.R12", Field, 0}, - {"RegsAMD64.R13", Field, 0}, - {"RegsAMD64.R14", Field, 0}, - {"RegsAMD64.R15", Field, 0}, - {"RegsAMD64.R8", Field, 0}, - {"RegsAMD64.R9", Field, 0}, - {"RegsAMD64.SI", Field, 0}, - {"RegsAMD64.SP", Field, 0}, - {"Reloc", Type, 10}, - {"Reloc.Addr", Field, 10}, - {"Reloc.Extern", Field, 10}, - {"Reloc.Len", Field, 10}, - {"Reloc.Pcrel", Field, 10}, - {"Reloc.Scattered", Field, 10}, - {"Reloc.Type", Field, 10}, - {"Reloc.Value", Field, 10}, - {"RelocTypeARM", Type, 10}, - {"RelocTypeARM64", Type, 10}, - {"RelocTypeGeneric", Type, 10}, - {"RelocTypeX86_64", Type, 10}, - {"Rpath", Type, 10}, - {"Rpath.LoadBytes", Field, 10}, - {"Rpath.Path", Field, 10}, - {"RpathCmd", Type, 10}, - {"RpathCmd.Cmd", Field, 10}, - {"RpathCmd.Len", Field, 10}, - {"RpathCmd.Path", Field, 10}, - {"Section", Type, 0}, - {"Section.ReaderAt", Field, 0}, - {"Section.Relocs", Field, 10}, - {"Section.SectionHeader", Field, 0}, - {"Section32", Type, 0}, - {"Section32.Addr", Field, 0}, - {"Section32.Align", Field, 0}, - {"Section32.Flags", Field, 0}, - {"Section32.Name", Field, 0}, - {"Section32.Nreloc", Field, 0}, - {"Section32.Offset", Field, 0}, - {"Section32.Reloff", Field, 0}, - {"Section32.Reserve1", Field, 0}, - {"Section32.Reserve2", Field, 0}, - {"Section32.Seg", Field, 0}, - {"Section32.Size", Field, 0}, - {"Section64", Type, 0}, - {"Section64.Addr", Field, 0}, - {"Section64.Align", Field, 0}, - {"Section64.Flags", Field, 0}, - {"Section64.Name", Field, 0}, - {"Section64.Nreloc", Field, 0}, - {"Section64.Offset", Field, 0}, - {"Section64.Reloff", Field, 0}, - {"Section64.Reserve1", Field, 0}, - {"Section64.Reserve2", Field, 0}, - {"Section64.Reserve3", Field, 0}, - {"Section64.Seg", Field, 0}, - {"Section64.Size", Field, 0}, - {"SectionHeader", Type, 0}, - {"SectionHeader.Addr", Field, 0}, - {"SectionHeader.Align", Field, 0}, - {"SectionHeader.Flags", Field, 0}, - {"SectionHeader.Name", Field, 0}, - {"SectionHeader.Nreloc", Field, 0}, - {"SectionHeader.Offset", Field, 0}, - {"SectionHeader.Reloff", Field, 0}, - {"SectionHeader.Seg", Field, 0}, - {"SectionHeader.Size", Field, 0}, - {"Segment", Type, 0}, - {"Segment.LoadBytes", Field, 0}, - {"Segment.ReaderAt", Field, 0}, - {"Segment.SegmentHeader", Field, 0}, - {"Segment32", Type, 0}, - {"Segment32.Addr", Field, 0}, - {"Segment32.Cmd", Field, 0}, - {"Segment32.Filesz", Field, 0}, - {"Segment32.Flag", Field, 0}, - {"Segment32.Len", Field, 0}, - {"Segment32.Maxprot", Field, 0}, - {"Segment32.Memsz", Field, 0}, - {"Segment32.Name", Field, 0}, - {"Segment32.Nsect", Field, 0}, - {"Segment32.Offset", Field, 0}, - {"Segment32.Prot", Field, 0}, - {"Segment64", Type, 0}, - {"Segment64.Addr", Field, 0}, - {"Segment64.Cmd", Field, 0}, - {"Segment64.Filesz", Field, 0}, - {"Segment64.Flag", Field, 0}, - {"Segment64.Len", Field, 0}, - {"Segment64.Maxprot", Field, 0}, - {"Segment64.Memsz", Field, 0}, - {"Segment64.Name", Field, 0}, - {"Segment64.Nsect", Field, 0}, - {"Segment64.Offset", Field, 0}, - {"Segment64.Prot", Field, 0}, - {"SegmentHeader", Type, 0}, - {"SegmentHeader.Addr", Field, 0}, - {"SegmentHeader.Cmd", Field, 0}, - {"SegmentHeader.Filesz", Field, 0}, - {"SegmentHeader.Flag", Field, 0}, - {"SegmentHeader.Len", Field, 0}, - {"SegmentHeader.Maxprot", Field, 0}, - {"SegmentHeader.Memsz", Field, 0}, - {"SegmentHeader.Name", Field, 0}, - {"SegmentHeader.Nsect", Field, 0}, - {"SegmentHeader.Offset", Field, 0}, - {"SegmentHeader.Prot", Field, 0}, - {"Symbol", Type, 0}, - {"Symbol.Desc", Field, 0}, - {"Symbol.Name", Field, 0}, - {"Symbol.Sect", Field, 0}, - {"Symbol.Type", Field, 0}, - {"Symbol.Value", Field, 0}, - {"Symtab", Type, 0}, - {"Symtab.LoadBytes", Field, 0}, - {"Symtab.Syms", Field, 0}, - {"Symtab.SymtabCmd", Field, 0}, - {"SymtabCmd", Type, 0}, - {"SymtabCmd.Cmd", Field, 0}, - {"SymtabCmd.Len", Field, 0}, - {"SymtabCmd.Nsyms", Field, 0}, - {"SymtabCmd.Stroff", Field, 0}, - {"SymtabCmd.Strsize", Field, 0}, - {"SymtabCmd.Symoff", Field, 0}, - {"Thread", Type, 0}, - {"Thread.Cmd", Field, 0}, - {"Thread.Data", Field, 0}, - {"Thread.Len", Field, 0}, - {"Thread.Type", Field, 0}, - {"Type", Type, 0}, - {"TypeBundle", Const, 3}, - {"TypeDylib", Const, 3}, - {"TypeExec", Const, 0}, - {"TypeObj", Const, 0}, - {"X86_64_RELOC_BRANCH", Const, 10}, - {"X86_64_RELOC_GOT", Const, 10}, - {"X86_64_RELOC_GOT_LOAD", Const, 10}, - {"X86_64_RELOC_SIGNED", Const, 10}, - {"X86_64_RELOC_SIGNED_1", Const, 10}, - {"X86_64_RELOC_SIGNED_2", Const, 10}, - {"X86_64_RELOC_SIGNED_4", Const, 10}, - {"X86_64_RELOC_SUBTRACTOR", Const, 10}, - {"X86_64_RELOC_TLV", Const, 10}, - {"X86_64_RELOC_UNSIGNED", Const, 10}, + {"(*FatFile).Close", Method, 3, ""}, + {"(*File).Close", Method, 0, ""}, + {"(*File).DWARF", Method, 0, ""}, + {"(*File).ImportedLibraries", Method, 0, ""}, + {"(*File).ImportedSymbols", Method, 0, ""}, + {"(*File).Section", Method, 0, ""}, + {"(*File).Segment", Method, 0, ""}, + {"(*FormatError).Error", Method, 0, ""}, + {"(*Section).Data", Method, 0, ""}, + {"(*Section).Open", Method, 0, ""}, + {"(*Segment).Data", Method, 0, ""}, + {"(*Segment).Open", Method, 0, ""}, + {"(Cpu).GoString", Method, 0, ""}, + {"(Cpu).String", Method, 0, ""}, + {"(Dylib).Raw", Method, 0, ""}, + {"(Dysymtab).Raw", Method, 0, ""}, + {"(FatArch).Close", Method, 3, ""}, + {"(FatArch).DWARF", Method, 3, ""}, + {"(FatArch).ImportedLibraries", Method, 3, ""}, + {"(FatArch).ImportedSymbols", Method, 3, ""}, + {"(FatArch).Section", Method, 3, ""}, + {"(FatArch).Segment", Method, 3, ""}, + {"(LoadBytes).Raw", Method, 0, ""}, + {"(LoadCmd).GoString", Method, 0, ""}, + {"(LoadCmd).String", Method, 0, ""}, + {"(RelocTypeARM).GoString", Method, 10, ""}, + {"(RelocTypeARM).String", Method, 10, ""}, + {"(RelocTypeARM64).GoString", Method, 10, ""}, + {"(RelocTypeARM64).String", Method, 10, ""}, + {"(RelocTypeGeneric).GoString", Method, 10, ""}, + {"(RelocTypeGeneric).String", Method, 10, ""}, + {"(RelocTypeX86_64).GoString", Method, 10, ""}, + {"(RelocTypeX86_64).String", Method, 10, ""}, + {"(Rpath).Raw", Method, 10, ""}, + {"(Section).ReadAt", Method, 0, ""}, + {"(Segment).Raw", Method, 0, ""}, + {"(Segment).ReadAt", Method, 0, ""}, + {"(Symtab).Raw", Method, 0, ""}, + {"(Type).GoString", Method, 10, ""}, + {"(Type).String", Method, 10, ""}, + {"ARM64_RELOC_ADDEND", Const, 10, ""}, + {"ARM64_RELOC_BRANCH26", Const, 10, ""}, + {"ARM64_RELOC_GOT_LOAD_PAGE21", Const, 10, ""}, + {"ARM64_RELOC_GOT_LOAD_PAGEOFF12", Const, 10, ""}, + {"ARM64_RELOC_PAGE21", Const, 10, ""}, + {"ARM64_RELOC_PAGEOFF12", Const, 10, ""}, + {"ARM64_RELOC_POINTER_TO_GOT", Const, 10, ""}, + {"ARM64_RELOC_SUBTRACTOR", Const, 10, ""}, + {"ARM64_RELOC_TLVP_LOAD_PAGE21", Const, 10, ""}, + {"ARM64_RELOC_TLVP_LOAD_PAGEOFF12", Const, 10, ""}, + {"ARM64_RELOC_UNSIGNED", Const, 10, ""}, + {"ARM_RELOC_BR24", Const, 10, ""}, + {"ARM_RELOC_HALF", Const, 10, ""}, + {"ARM_RELOC_HALF_SECTDIFF", Const, 10, ""}, + {"ARM_RELOC_LOCAL_SECTDIFF", Const, 10, ""}, + {"ARM_RELOC_PAIR", Const, 10, ""}, + {"ARM_RELOC_PB_LA_PTR", Const, 10, ""}, + {"ARM_RELOC_SECTDIFF", Const, 10, ""}, + {"ARM_RELOC_VANILLA", Const, 10, ""}, + {"ARM_THUMB_32BIT_BRANCH", Const, 10, ""}, + {"ARM_THUMB_RELOC_BR22", Const, 10, ""}, + {"Cpu", Type, 0, ""}, + {"Cpu386", Const, 0, ""}, + {"CpuAmd64", Const, 0, ""}, + {"CpuArm", Const, 3, ""}, + {"CpuArm64", Const, 11, ""}, + {"CpuPpc", Const, 3, ""}, + {"CpuPpc64", Const, 3, ""}, + {"Dylib", Type, 0, ""}, + {"Dylib.CompatVersion", Field, 0, ""}, + {"Dylib.CurrentVersion", Field, 0, ""}, + {"Dylib.LoadBytes", Field, 0, ""}, + {"Dylib.Name", Field, 0, ""}, + {"Dylib.Time", Field, 0, ""}, + {"DylibCmd", Type, 0, ""}, + {"DylibCmd.Cmd", Field, 0, ""}, + {"DylibCmd.CompatVersion", Field, 0, ""}, + {"DylibCmd.CurrentVersion", Field, 0, ""}, + {"DylibCmd.Len", Field, 0, ""}, + {"DylibCmd.Name", Field, 0, ""}, + {"DylibCmd.Time", Field, 0, ""}, + {"Dysymtab", Type, 0, ""}, + {"Dysymtab.DysymtabCmd", Field, 0, ""}, + {"Dysymtab.IndirectSyms", Field, 0, ""}, + {"Dysymtab.LoadBytes", Field, 0, ""}, + {"DysymtabCmd", Type, 0, ""}, + {"DysymtabCmd.Cmd", Field, 0, ""}, + {"DysymtabCmd.Extrefsymoff", Field, 0, ""}, + {"DysymtabCmd.Extreloff", Field, 0, ""}, + {"DysymtabCmd.Iextdefsym", Field, 0, ""}, + {"DysymtabCmd.Ilocalsym", Field, 0, ""}, + {"DysymtabCmd.Indirectsymoff", Field, 0, ""}, + {"DysymtabCmd.Iundefsym", Field, 0, ""}, + {"DysymtabCmd.Len", Field, 0, ""}, + {"DysymtabCmd.Locreloff", Field, 0, ""}, + {"DysymtabCmd.Modtaboff", Field, 0, ""}, + {"DysymtabCmd.Nextdefsym", Field, 0, ""}, + {"DysymtabCmd.Nextrefsyms", Field, 0, ""}, + {"DysymtabCmd.Nextrel", Field, 0, ""}, + {"DysymtabCmd.Nindirectsyms", Field, 0, ""}, + {"DysymtabCmd.Nlocalsym", Field, 0, ""}, + {"DysymtabCmd.Nlocrel", Field, 0, ""}, + {"DysymtabCmd.Nmodtab", Field, 0, ""}, + {"DysymtabCmd.Ntoc", Field, 0, ""}, + {"DysymtabCmd.Nundefsym", Field, 0, ""}, + {"DysymtabCmd.Tocoffset", Field, 0, ""}, + {"ErrNotFat", Var, 3, ""}, + {"FatArch", Type, 3, ""}, + {"FatArch.FatArchHeader", Field, 3, ""}, + {"FatArch.File", Field, 3, ""}, + {"FatArchHeader", Type, 3, ""}, + {"FatArchHeader.Align", Field, 3, ""}, + {"FatArchHeader.Cpu", Field, 3, ""}, + {"FatArchHeader.Offset", Field, 3, ""}, + {"FatArchHeader.Size", Field, 3, ""}, + {"FatArchHeader.SubCpu", Field, 3, ""}, + {"FatFile", Type, 3, ""}, + {"FatFile.Arches", Field, 3, ""}, + {"FatFile.Magic", Field, 3, ""}, + {"File", Type, 0, ""}, + {"File.ByteOrder", Field, 0, ""}, + {"File.Dysymtab", Field, 0, ""}, + {"File.FileHeader", Field, 0, ""}, + {"File.Loads", Field, 0, ""}, + {"File.Sections", Field, 0, ""}, + {"File.Symtab", Field, 0, ""}, + {"FileHeader", Type, 0, ""}, + {"FileHeader.Cmdsz", Field, 0, ""}, + {"FileHeader.Cpu", Field, 0, ""}, + {"FileHeader.Flags", Field, 0, ""}, + {"FileHeader.Magic", Field, 0, ""}, + {"FileHeader.Ncmd", Field, 0, ""}, + {"FileHeader.SubCpu", Field, 0, ""}, + {"FileHeader.Type", Field, 0, ""}, + {"FlagAllModsBound", Const, 10, ""}, + {"FlagAllowStackExecution", Const, 10, ""}, + {"FlagAppExtensionSafe", Const, 10, ""}, + {"FlagBindAtLoad", Const, 10, ""}, + {"FlagBindsToWeak", Const, 10, ""}, + {"FlagCanonical", Const, 10, ""}, + {"FlagDeadStrippableDylib", Const, 10, ""}, + {"FlagDyldLink", Const, 10, ""}, + {"FlagForceFlat", Const, 10, ""}, + {"FlagHasTLVDescriptors", Const, 10, ""}, + {"FlagIncrLink", Const, 10, ""}, + {"FlagLazyInit", Const, 10, ""}, + {"FlagNoFixPrebinding", Const, 10, ""}, + {"FlagNoHeapExecution", Const, 10, ""}, + {"FlagNoMultiDefs", Const, 10, ""}, + {"FlagNoReexportedDylibs", Const, 10, ""}, + {"FlagNoUndefs", Const, 10, ""}, + {"FlagPIE", Const, 10, ""}, + {"FlagPrebindable", Const, 10, ""}, + {"FlagPrebound", Const, 10, ""}, + {"FlagRootSafe", Const, 10, ""}, + {"FlagSetuidSafe", Const, 10, ""}, + {"FlagSplitSegs", Const, 10, ""}, + {"FlagSubsectionsViaSymbols", Const, 10, ""}, + {"FlagTwoLevel", Const, 10, ""}, + {"FlagWeakDefines", Const, 10, ""}, + {"FormatError", Type, 0, ""}, + {"GENERIC_RELOC_LOCAL_SECTDIFF", Const, 10, ""}, + {"GENERIC_RELOC_PAIR", Const, 10, ""}, + {"GENERIC_RELOC_PB_LA_PTR", Const, 10, ""}, + {"GENERIC_RELOC_SECTDIFF", Const, 10, ""}, + {"GENERIC_RELOC_TLV", Const, 10, ""}, + {"GENERIC_RELOC_VANILLA", Const, 10, ""}, + {"Load", Type, 0, ""}, + {"LoadBytes", Type, 0, ""}, + {"LoadCmd", Type, 0, ""}, + {"LoadCmdDylib", Const, 0, ""}, + {"LoadCmdDylinker", Const, 0, ""}, + {"LoadCmdDysymtab", Const, 0, ""}, + {"LoadCmdRpath", Const, 10, ""}, + {"LoadCmdSegment", Const, 0, ""}, + {"LoadCmdSegment64", Const, 0, ""}, + {"LoadCmdSymtab", Const, 0, ""}, + {"LoadCmdThread", Const, 0, ""}, + {"LoadCmdUnixThread", Const, 0, ""}, + {"Magic32", Const, 0, ""}, + {"Magic64", Const, 0, ""}, + {"MagicFat", Const, 3, ""}, + {"NewFatFile", Func, 3, "func(r io.ReaderAt) (*FatFile, error)"}, + {"NewFile", Func, 0, "func(r io.ReaderAt) (*File, error)"}, + {"Nlist32", Type, 0, ""}, + {"Nlist32.Desc", Field, 0, ""}, + {"Nlist32.Name", Field, 0, ""}, + {"Nlist32.Sect", Field, 0, ""}, + {"Nlist32.Type", Field, 0, ""}, + {"Nlist32.Value", Field, 0, ""}, + {"Nlist64", Type, 0, ""}, + {"Nlist64.Desc", Field, 0, ""}, + {"Nlist64.Name", Field, 0, ""}, + {"Nlist64.Sect", Field, 0, ""}, + {"Nlist64.Type", Field, 0, ""}, + {"Nlist64.Value", Field, 0, ""}, + {"Open", Func, 0, "func(name string) (*File, error)"}, + {"OpenFat", Func, 3, "func(name string) (*FatFile, error)"}, + {"Regs386", Type, 0, ""}, + {"Regs386.AX", Field, 0, ""}, + {"Regs386.BP", Field, 0, ""}, + {"Regs386.BX", Field, 0, ""}, + {"Regs386.CS", Field, 0, ""}, + {"Regs386.CX", Field, 0, ""}, + {"Regs386.DI", Field, 0, ""}, + {"Regs386.DS", Field, 0, ""}, + {"Regs386.DX", Field, 0, ""}, + {"Regs386.ES", Field, 0, ""}, + {"Regs386.FLAGS", Field, 0, ""}, + {"Regs386.FS", Field, 0, ""}, + {"Regs386.GS", Field, 0, ""}, + {"Regs386.IP", Field, 0, ""}, + {"Regs386.SI", Field, 0, ""}, + {"Regs386.SP", Field, 0, ""}, + {"Regs386.SS", Field, 0, ""}, + {"RegsAMD64", Type, 0, ""}, + {"RegsAMD64.AX", Field, 0, ""}, + {"RegsAMD64.BP", Field, 0, ""}, + {"RegsAMD64.BX", Field, 0, ""}, + {"RegsAMD64.CS", Field, 0, ""}, + {"RegsAMD64.CX", Field, 0, ""}, + {"RegsAMD64.DI", Field, 0, ""}, + {"RegsAMD64.DX", Field, 0, ""}, + {"RegsAMD64.FLAGS", Field, 0, ""}, + {"RegsAMD64.FS", Field, 0, ""}, + {"RegsAMD64.GS", Field, 0, ""}, + {"RegsAMD64.IP", Field, 0, ""}, + {"RegsAMD64.R10", Field, 0, ""}, + {"RegsAMD64.R11", Field, 0, ""}, + {"RegsAMD64.R12", Field, 0, ""}, + {"RegsAMD64.R13", Field, 0, ""}, + {"RegsAMD64.R14", Field, 0, ""}, + {"RegsAMD64.R15", Field, 0, ""}, + {"RegsAMD64.R8", Field, 0, ""}, + {"RegsAMD64.R9", Field, 0, ""}, + {"RegsAMD64.SI", Field, 0, ""}, + {"RegsAMD64.SP", Field, 0, ""}, + {"Reloc", Type, 10, ""}, + {"Reloc.Addr", Field, 10, ""}, + {"Reloc.Extern", Field, 10, ""}, + {"Reloc.Len", Field, 10, ""}, + {"Reloc.Pcrel", Field, 10, ""}, + {"Reloc.Scattered", Field, 10, ""}, + {"Reloc.Type", Field, 10, ""}, + {"Reloc.Value", Field, 10, ""}, + {"RelocTypeARM", Type, 10, ""}, + {"RelocTypeARM64", Type, 10, ""}, + {"RelocTypeGeneric", Type, 10, ""}, + {"RelocTypeX86_64", Type, 10, ""}, + {"Rpath", Type, 10, ""}, + {"Rpath.LoadBytes", Field, 10, ""}, + {"Rpath.Path", Field, 10, ""}, + {"RpathCmd", Type, 10, ""}, + {"RpathCmd.Cmd", Field, 10, ""}, + {"RpathCmd.Len", Field, 10, ""}, + {"RpathCmd.Path", Field, 10, ""}, + {"Section", Type, 0, ""}, + {"Section.ReaderAt", Field, 0, ""}, + {"Section.Relocs", Field, 10, ""}, + {"Section.SectionHeader", Field, 0, ""}, + {"Section32", Type, 0, ""}, + {"Section32.Addr", Field, 0, ""}, + {"Section32.Align", Field, 0, ""}, + {"Section32.Flags", Field, 0, ""}, + {"Section32.Name", Field, 0, ""}, + {"Section32.Nreloc", Field, 0, ""}, + {"Section32.Offset", Field, 0, ""}, + {"Section32.Reloff", Field, 0, ""}, + {"Section32.Reserve1", Field, 0, ""}, + {"Section32.Reserve2", Field, 0, ""}, + {"Section32.Seg", Field, 0, ""}, + {"Section32.Size", Field, 0, ""}, + {"Section64", Type, 0, ""}, + {"Section64.Addr", Field, 0, ""}, + {"Section64.Align", Field, 0, ""}, + {"Section64.Flags", Field, 0, ""}, + {"Section64.Name", Field, 0, ""}, + {"Section64.Nreloc", Field, 0, ""}, + {"Section64.Offset", Field, 0, ""}, + {"Section64.Reloff", Field, 0, ""}, + {"Section64.Reserve1", Field, 0, ""}, + {"Section64.Reserve2", Field, 0, ""}, + {"Section64.Reserve3", Field, 0, ""}, + {"Section64.Seg", Field, 0, ""}, + {"Section64.Size", Field, 0, ""}, + {"SectionHeader", Type, 0, ""}, + {"SectionHeader.Addr", Field, 0, ""}, + {"SectionHeader.Align", Field, 0, ""}, + {"SectionHeader.Flags", Field, 0, ""}, + {"SectionHeader.Name", Field, 0, ""}, + {"SectionHeader.Nreloc", Field, 0, ""}, + {"SectionHeader.Offset", Field, 0, ""}, + {"SectionHeader.Reloff", Field, 0, ""}, + {"SectionHeader.Seg", Field, 0, ""}, + {"SectionHeader.Size", Field, 0, ""}, + {"Segment", Type, 0, ""}, + {"Segment.LoadBytes", Field, 0, ""}, + {"Segment.ReaderAt", Field, 0, ""}, + {"Segment.SegmentHeader", Field, 0, ""}, + {"Segment32", Type, 0, ""}, + {"Segment32.Addr", Field, 0, ""}, + {"Segment32.Cmd", Field, 0, ""}, + {"Segment32.Filesz", Field, 0, ""}, + {"Segment32.Flag", Field, 0, ""}, + {"Segment32.Len", Field, 0, ""}, + {"Segment32.Maxprot", Field, 0, ""}, + {"Segment32.Memsz", Field, 0, ""}, + {"Segment32.Name", Field, 0, ""}, + {"Segment32.Nsect", Field, 0, ""}, + {"Segment32.Offset", Field, 0, ""}, + {"Segment32.Prot", Field, 0, ""}, + {"Segment64", Type, 0, ""}, + {"Segment64.Addr", Field, 0, ""}, + {"Segment64.Cmd", Field, 0, ""}, + {"Segment64.Filesz", Field, 0, ""}, + {"Segment64.Flag", Field, 0, ""}, + {"Segment64.Len", Field, 0, ""}, + {"Segment64.Maxprot", Field, 0, ""}, + {"Segment64.Memsz", Field, 0, ""}, + {"Segment64.Name", Field, 0, ""}, + {"Segment64.Nsect", Field, 0, ""}, + {"Segment64.Offset", Field, 0, ""}, + {"Segment64.Prot", Field, 0, ""}, + {"SegmentHeader", Type, 0, ""}, + {"SegmentHeader.Addr", Field, 0, ""}, + {"SegmentHeader.Cmd", Field, 0, ""}, + {"SegmentHeader.Filesz", Field, 0, ""}, + {"SegmentHeader.Flag", Field, 0, ""}, + {"SegmentHeader.Len", Field, 0, ""}, + {"SegmentHeader.Maxprot", Field, 0, ""}, + {"SegmentHeader.Memsz", Field, 0, ""}, + {"SegmentHeader.Name", Field, 0, ""}, + {"SegmentHeader.Nsect", Field, 0, ""}, + {"SegmentHeader.Offset", Field, 0, ""}, + {"SegmentHeader.Prot", Field, 0, ""}, + {"Symbol", Type, 0, ""}, + {"Symbol.Desc", Field, 0, ""}, + {"Symbol.Name", Field, 0, ""}, + {"Symbol.Sect", Field, 0, ""}, + {"Symbol.Type", Field, 0, ""}, + {"Symbol.Value", Field, 0, ""}, + {"Symtab", Type, 0, ""}, + {"Symtab.LoadBytes", Field, 0, ""}, + {"Symtab.Syms", Field, 0, ""}, + {"Symtab.SymtabCmd", Field, 0, ""}, + {"SymtabCmd", Type, 0, ""}, + {"SymtabCmd.Cmd", Field, 0, ""}, + {"SymtabCmd.Len", Field, 0, ""}, + {"SymtabCmd.Nsyms", Field, 0, ""}, + {"SymtabCmd.Stroff", Field, 0, ""}, + {"SymtabCmd.Strsize", Field, 0, ""}, + {"SymtabCmd.Symoff", Field, 0, ""}, + {"Thread", Type, 0, ""}, + {"Thread.Cmd", Field, 0, ""}, + {"Thread.Data", Field, 0, ""}, + {"Thread.Len", Field, 0, ""}, + {"Thread.Type", Field, 0, ""}, + {"Type", Type, 0, ""}, + {"TypeBundle", Const, 3, ""}, + {"TypeDylib", Const, 3, ""}, + {"TypeExec", Const, 0, ""}, + {"TypeObj", Const, 0, ""}, + {"X86_64_RELOC_BRANCH", Const, 10, ""}, + {"X86_64_RELOC_GOT", Const, 10, ""}, + {"X86_64_RELOC_GOT_LOAD", Const, 10, ""}, + {"X86_64_RELOC_SIGNED", Const, 10, ""}, + {"X86_64_RELOC_SIGNED_1", Const, 10, ""}, + {"X86_64_RELOC_SIGNED_2", Const, 10, ""}, + {"X86_64_RELOC_SIGNED_4", Const, 10, ""}, + {"X86_64_RELOC_SUBTRACTOR", Const, 10, ""}, + {"X86_64_RELOC_TLV", Const, 10, ""}, + {"X86_64_RELOC_UNSIGNED", Const, 10, ""}, }, "debug/pe": { - {"(*COFFSymbol).FullName", Method, 8}, - {"(*File).COFFSymbolReadSectionDefAux", Method, 19}, - {"(*File).Close", Method, 0}, - {"(*File).DWARF", Method, 0}, - {"(*File).ImportedLibraries", Method, 0}, - {"(*File).ImportedSymbols", Method, 0}, - {"(*File).Section", Method, 0}, - {"(*FormatError).Error", Method, 0}, - {"(*Section).Data", Method, 0}, - {"(*Section).Open", Method, 0}, - {"(Section).ReadAt", Method, 0}, - {"(StringTable).String", Method, 8}, - {"COFFSymbol", Type, 1}, - {"COFFSymbol.Name", Field, 1}, - {"COFFSymbol.NumberOfAuxSymbols", Field, 1}, - {"COFFSymbol.SectionNumber", Field, 1}, - {"COFFSymbol.StorageClass", Field, 1}, - {"COFFSymbol.Type", Field, 1}, - {"COFFSymbol.Value", Field, 1}, - {"COFFSymbolAuxFormat5", Type, 19}, - {"COFFSymbolAuxFormat5.Checksum", Field, 19}, - {"COFFSymbolAuxFormat5.NumLineNumbers", Field, 19}, - {"COFFSymbolAuxFormat5.NumRelocs", Field, 19}, - {"COFFSymbolAuxFormat5.SecNum", Field, 19}, - {"COFFSymbolAuxFormat5.Selection", Field, 19}, - {"COFFSymbolAuxFormat5.Size", Field, 19}, - {"COFFSymbolSize", Const, 1}, - {"DataDirectory", Type, 3}, - {"DataDirectory.Size", Field, 3}, - {"DataDirectory.VirtualAddress", Field, 3}, - {"File", Type, 0}, - {"File.COFFSymbols", Field, 8}, - {"File.FileHeader", Field, 0}, - {"File.OptionalHeader", Field, 3}, - {"File.Sections", Field, 0}, - {"File.StringTable", Field, 8}, - {"File.Symbols", Field, 1}, - {"FileHeader", Type, 0}, - {"FileHeader.Characteristics", Field, 0}, - {"FileHeader.Machine", Field, 0}, - {"FileHeader.NumberOfSections", Field, 0}, - {"FileHeader.NumberOfSymbols", Field, 0}, - {"FileHeader.PointerToSymbolTable", Field, 0}, - {"FileHeader.SizeOfOptionalHeader", Field, 0}, - {"FileHeader.TimeDateStamp", Field, 0}, - {"FormatError", Type, 0}, - {"IMAGE_COMDAT_SELECT_ANY", Const, 19}, - {"IMAGE_COMDAT_SELECT_ASSOCIATIVE", Const, 19}, - {"IMAGE_COMDAT_SELECT_EXACT_MATCH", Const, 19}, - {"IMAGE_COMDAT_SELECT_LARGEST", Const, 19}, - {"IMAGE_COMDAT_SELECT_NODUPLICATES", Const, 19}, - {"IMAGE_COMDAT_SELECT_SAME_SIZE", Const, 19}, - {"IMAGE_DIRECTORY_ENTRY_ARCHITECTURE", Const, 11}, - {"IMAGE_DIRECTORY_ENTRY_BASERELOC", Const, 11}, - {"IMAGE_DIRECTORY_ENTRY_BOUND_IMPORT", Const, 11}, - {"IMAGE_DIRECTORY_ENTRY_COM_DESCRIPTOR", Const, 11}, - {"IMAGE_DIRECTORY_ENTRY_DEBUG", Const, 11}, - {"IMAGE_DIRECTORY_ENTRY_DELAY_IMPORT", Const, 11}, - {"IMAGE_DIRECTORY_ENTRY_EXCEPTION", Const, 11}, - {"IMAGE_DIRECTORY_ENTRY_EXPORT", Const, 11}, - {"IMAGE_DIRECTORY_ENTRY_GLOBALPTR", Const, 11}, - {"IMAGE_DIRECTORY_ENTRY_IAT", Const, 11}, - {"IMAGE_DIRECTORY_ENTRY_IMPORT", Const, 11}, - {"IMAGE_DIRECTORY_ENTRY_LOAD_CONFIG", Const, 11}, - {"IMAGE_DIRECTORY_ENTRY_RESOURCE", Const, 11}, - {"IMAGE_DIRECTORY_ENTRY_SECURITY", Const, 11}, - {"IMAGE_DIRECTORY_ENTRY_TLS", Const, 11}, - {"IMAGE_DLLCHARACTERISTICS_APPCONTAINER", Const, 15}, - {"IMAGE_DLLCHARACTERISTICS_DYNAMIC_BASE", Const, 15}, - {"IMAGE_DLLCHARACTERISTICS_FORCE_INTEGRITY", Const, 15}, - {"IMAGE_DLLCHARACTERISTICS_GUARD_CF", Const, 15}, - {"IMAGE_DLLCHARACTERISTICS_HIGH_ENTROPY_VA", Const, 15}, - {"IMAGE_DLLCHARACTERISTICS_NO_BIND", Const, 15}, - {"IMAGE_DLLCHARACTERISTICS_NO_ISOLATION", Const, 15}, - {"IMAGE_DLLCHARACTERISTICS_NO_SEH", Const, 15}, - {"IMAGE_DLLCHARACTERISTICS_NX_COMPAT", Const, 15}, - {"IMAGE_DLLCHARACTERISTICS_TERMINAL_SERVER_AWARE", Const, 15}, - {"IMAGE_DLLCHARACTERISTICS_WDM_DRIVER", Const, 15}, - {"IMAGE_FILE_32BIT_MACHINE", Const, 15}, - {"IMAGE_FILE_AGGRESIVE_WS_TRIM", Const, 15}, - {"IMAGE_FILE_BYTES_REVERSED_HI", Const, 15}, - {"IMAGE_FILE_BYTES_REVERSED_LO", Const, 15}, - {"IMAGE_FILE_DEBUG_STRIPPED", Const, 15}, - {"IMAGE_FILE_DLL", Const, 15}, - {"IMAGE_FILE_EXECUTABLE_IMAGE", Const, 15}, - {"IMAGE_FILE_LARGE_ADDRESS_AWARE", Const, 15}, - {"IMAGE_FILE_LINE_NUMS_STRIPPED", Const, 15}, - {"IMAGE_FILE_LOCAL_SYMS_STRIPPED", Const, 15}, - {"IMAGE_FILE_MACHINE_AM33", Const, 0}, - {"IMAGE_FILE_MACHINE_AMD64", Const, 0}, - {"IMAGE_FILE_MACHINE_ARM", Const, 0}, - {"IMAGE_FILE_MACHINE_ARM64", Const, 11}, - {"IMAGE_FILE_MACHINE_ARMNT", Const, 12}, - {"IMAGE_FILE_MACHINE_EBC", Const, 0}, - {"IMAGE_FILE_MACHINE_I386", Const, 0}, - {"IMAGE_FILE_MACHINE_IA64", Const, 0}, - {"IMAGE_FILE_MACHINE_LOONGARCH32", Const, 19}, - {"IMAGE_FILE_MACHINE_LOONGARCH64", Const, 19}, - {"IMAGE_FILE_MACHINE_M32R", Const, 0}, - {"IMAGE_FILE_MACHINE_MIPS16", Const, 0}, - {"IMAGE_FILE_MACHINE_MIPSFPU", Const, 0}, - {"IMAGE_FILE_MACHINE_MIPSFPU16", Const, 0}, - {"IMAGE_FILE_MACHINE_POWERPC", Const, 0}, - {"IMAGE_FILE_MACHINE_POWERPCFP", Const, 0}, - {"IMAGE_FILE_MACHINE_R4000", Const, 0}, - {"IMAGE_FILE_MACHINE_RISCV128", Const, 20}, - {"IMAGE_FILE_MACHINE_RISCV32", Const, 20}, - {"IMAGE_FILE_MACHINE_RISCV64", Const, 20}, - {"IMAGE_FILE_MACHINE_SH3", Const, 0}, - {"IMAGE_FILE_MACHINE_SH3DSP", Const, 0}, - {"IMAGE_FILE_MACHINE_SH4", Const, 0}, - {"IMAGE_FILE_MACHINE_SH5", Const, 0}, - {"IMAGE_FILE_MACHINE_THUMB", Const, 0}, - {"IMAGE_FILE_MACHINE_UNKNOWN", Const, 0}, - {"IMAGE_FILE_MACHINE_WCEMIPSV2", Const, 0}, - {"IMAGE_FILE_NET_RUN_FROM_SWAP", Const, 15}, - {"IMAGE_FILE_RELOCS_STRIPPED", Const, 15}, - {"IMAGE_FILE_REMOVABLE_RUN_FROM_SWAP", Const, 15}, - {"IMAGE_FILE_SYSTEM", Const, 15}, - {"IMAGE_FILE_UP_SYSTEM_ONLY", Const, 15}, - {"IMAGE_SCN_CNT_CODE", Const, 19}, - {"IMAGE_SCN_CNT_INITIALIZED_DATA", Const, 19}, - {"IMAGE_SCN_CNT_UNINITIALIZED_DATA", Const, 19}, - {"IMAGE_SCN_LNK_COMDAT", Const, 19}, - {"IMAGE_SCN_MEM_DISCARDABLE", Const, 19}, - {"IMAGE_SCN_MEM_EXECUTE", Const, 19}, - {"IMAGE_SCN_MEM_READ", Const, 19}, - {"IMAGE_SCN_MEM_WRITE", Const, 19}, - {"IMAGE_SUBSYSTEM_EFI_APPLICATION", Const, 15}, - {"IMAGE_SUBSYSTEM_EFI_BOOT_SERVICE_DRIVER", Const, 15}, - {"IMAGE_SUBSYSTEM_EFI_ROM", Const, 15}, - {"IMAGE_SUBSYSTEM_EFI_RUNTIME_DRIVER", Const, 15}, - {"IMAGE_SUBSYSTEM_NATIVE", Const, 15}, - {"IMAGE_SUBSYSTEM_NATIVE_WINDOWS", Const, 15}, - {"IMAGE_SUBSYSTEM_OS2_CUI", Const, 15}, - {"IMAGE_SUBSYSTEM_POSIX_CUI", Const, 15}, - {"IMAGE_SUBSYSTEM_UNKNOWN", Const, 15}, - {"IMAGE_SUBSYSTEM_WINDOWS_BOOT_APPLICATION", Const, 15}, - {"IMAGE_SUBSYSTEM_WINDOWS_CE_GUI", Const, 15}, - {"IMAGE_SUBSYSTEM_WINDOWS_CUI", Const, 15}, - {"IMAGE_SUBSYSTEM_WINDOWS_GUI", Const, 15}, - {"IMAGE_SUBSYSTEM_XBOX", Const, 15}, - {"ImportDirectory", Type, 0}, - {"ImportDirectory.FirstThunk", Field, 0}, - {"ImportDirectory.ForwarderChain", Field, 0}, - {"ImportDirectory.Name", Field, 0}, - {"ImportDirectory.OriginalFirstThunk", Field, 0}, - {"ImportDirectory.TimeDateStamp", Field, 0}, - {"NewFile", Func, 0}, - {"Open", Func, 0}, - {"OptionalHeader32", Type, 3}, - {"OptionalHeader32.AddressOfEntryPoint", Field, 3}, - {"OptionalHeader32.BaseOfCode", Field, 3}, - {"OptionalHeader32.BaseOfData", Field, 3}, - {"OptionalHeader32.CheckSum", Field, 3}, - {"OptionalHeader32.DataDirectory", Field, 3}, - {"OptionalHeader32.DllCharacteristics", Field, 3}, - {"OptionalHeader32.FileAlignment", Field, 3}, - {"OptionalHeader32.ImageBase", Field, 3}, - {"OptionalHeader32.LoaderFlags", Field, 3}, - {"OptionalHeader32.Magic", Field, 3}, - {"OptionalHeader32.MajorImageVersion", Field, 3}, - {"OptionalHeader32.MajorLinkerVersion", Field, 3}, - {"OptionalHeader32.MajorOperatingSystemVersion", Field, 3}, - {"OptionalHeader32.MajorSubsystemVersion", Field, 3}, - {"OptionalHeader32.MinorImageVersion", Field, 3}, - {"OptionalHeader32.MinorLinkerVersion", Field, 3}, - {"OptionalHeader32.MinorOperatingSystemVersion", Field, 3}, - {"OptionalHeader32.MinorSubsystemVersion", Field, 3}, - {"OptionalHeader32.NumberOfRvaAndSizes", Field, 3}, - {"OptionalHeader32.SectionAlignment", Field, 3}, - {"OptionalHeader32.SizeOfCode", Field, 3}, - {"OptionalHeader32.SizeOfHeaders", Field, 3}, - {"OptionalHeader32.SizeOfHeapCommit", Field, 3}, - {"OptionalHeader32.SizeOfHeapReserve", Field, 3}, - {"OptionalHeader32.SizeOfImage", Field, 3}, - {"OptionalHeader32.SizeOfInitializedData", Field, 3}, - {"OptionalHeader32.SizeOfStackCommit", Field, 3}, - {"OptionalHeader32.SizeOfStackReserve", Field, 3}, - {"OptionalHeader32.SizeOfUninitializedData", Field, 3}, - {"OptionalHeader32.Subsystem", Field, 3}, - {"OptionalHeader32.Win32VersionValue", Field, 3}, - {"OptionalHeader64", Type, 3}, - {"OptionalHeader64.AddressOfEntryPoint", Field, 3}, - {"OptionalHeader64.BaseOfCode", Field, 3}, - {"OptionalHeader64.CheckSum", Field, 3}, - {"OptionalHeader64.DataDirectory", Field, 3}, - {"OptionalHeader64.DllCharacteristics", Field, 3}, - {"OptionalHeader64.FileAlignment", Field, 3}, - {"OptionalHeader64.ImageBase", Field, 3}, - {"OptionalHeader64.LoaderFlags", Field, 3}, - {"OptionalHeader64.Magic", Field, 3}, - {"OptionalHeader64.MajorImageVersion", Field, 3}, - {"OptionalHeader64.MajorLinkerVersion", Field, 3}, - {"OptionalHeader64.MajorOperatingSystemVersion", Field, 3}, - {"OptionalHeader64.MajorSubsystemVersion", Field, 3}, - {"OptionalHeader64.MinorImageVersion", Field, 3}, - {"OptionalHeader64.MinorLinkerVersion", Field, 3}, - {"OptionalHeader64.MinorOperatingSystemVersion", Field, 3}, - {"OptionalHeader64.MinorSubsystemVersion", Field, 3}, - {"OptionalHeader64.NumberOfRvaAndSizes", Field, 3}, - {"OptionalHeader64.SectionAlignment", Field, 3}, - {"OptionalHeader64.SizeOfCode", Field, 3}, - {"OptionalHeader64.SizeOfHeaders", Field, 3}, - {"OptionalHeader64.SizeOfHeapCommit", Field, 3}, - {"OptionalHeader64.SizeOfHeapReserve", Field, 3}, - {"OptionalHeader64.SizeOfImage", Field, 3}, - {"OptionalHeader64.SizeOfInitializedData", Field, 3}, - {"OptionalHeader64.SizeOfStackCommit", Field, 3}, - {"OptionalHeader64.SizeOfStackReserve", Field, 3}, - {"OptionalHeader64.SizeOfUninitializedData", Field, 3}, - {"OptionalHeader64.Subsystem", Field, 3}, - {"OptionalHeader64.Win32VersionValue", Field, 3}, - {"Reloc", Type, 8}, - {"Reloc.SymbolTableIndex", Field, 8}, - {"Reloc.Type", Field, 8}, - {"Reloc.VirtualAddress", Field, 8}, - {"Section", Type, 0}, - {"Section.ReaderAt", Field, 0}, - {"Section.Relocs", Field, 8}, - {"Section.SectionHeader", Field, 0}, - {"SectionHeader", Type, 0}, - {"SectionHeader.Characteristics", Field, 0}, - {"SectionHeader.Name", Field, 0}, - {"SectionHeader.NumberOfLineNumbers", Field, 0}, - {"SectionHeader.NumberOfRelocations", Field, 0}, - {"SectionHeader.Offset", Field, 0}, - {"SectionHeader.PointerToLineNumbers", Field, 0}, - {"SectionHeader.PointerToRelocations", Field, 0}, - {"SectionHeader.Size", Field, 0}, - {"SectionHeader.VirtualAddress", Field, 0}, - {"SectionHeader.VirtualSize", Field, 0}, - {"SectionHeader32", Type, 0}, - {"SectionHeader32.Characteristics", Field, 0}, - {"SectionHeader32.Name", Field, 0}, - {"SectionHeader32.NumberOfLineNumbers", Field, 0}, - {"SectionHeader32.NumberOfRelocations", Field, 0}, - {"SectionHeader32.PointerToLineNumbers", Field, 0}, - {"SectionHeader32.PointerToRawData", Field, 0}, - {"SectionHeader32.PointerToRelocations", Field, 0}, - {"SectionHeader32.SizeOfRawData", Field, 0}, - {"SectionHeader32.VirtualAddress", Field, 0}, - {"SectionHeader32.VirtualSize", Field, 0}, - {"StringTable", Type, 8}, - {"Symbol", Type, 1}, - {"Symbol.Name", Field, 1}, - {"Symbol.SectionNumber", Field, 1}, - {"Symbol.StorageClass", Field, 1}, - {"Symbol.Type", Field, 1}, - {"Symbol.Value", Field, 1}, + {"(*COFFSymbol).FullName", Method, 8, ""}, + {"(*File).COFFSymbolReadSectionDefAux", Method, 19, ""}, + {"(*File).Close", Method, 0, ""}, + {"(*File).DWARF", Method, 0, ""}, + {"(*File).ImportedLibraries", Method, 0, ""}, + {"(*File).ImportedSymbols", Method, 0, ""}, + {"(*File).Section", Method, 0, ""}, + {"(*FormatError).Error", Method, 0, ""}, + {"(*Section).Data", Method, 0, ""}, + {"(*Section).Open", Method, 0, ""}, + {"(Section).ReadAt", Method, 0, ""}, + {"(StringTable).String", Method, 8, ""}, + {"COFFSymbol", Type, 1, ""}, + {"COFFSymbol.Name", Field, 1, ""}, + {"COFFSymbol.NumberOfAuxSymbols", Field, 1, ""}, + {"COFFSymbol.SectionNumber", Field, 1, ""}, + {"COFFSymbol.StorageClass", Field, 1, ""}, + {"COFFSymbol.Type", Field, 1, ""}, + {"COFFSymbol.Value", Field, 1, ""}, + {"COFFSymbolAuxFormat5", Type, 19, ""}, + {"COFFSymbolAuxFormat5.Checksum", Field, 19, ""}, + {"COFFSymbolAuxFormat5.NumLineNumbers", Field, 19, ""}, + {"COFFSymbolAuxFormat5.NumRelocs", Field, 19, ""}, + {"COFFSymbolAuxFormat5.SecNum", Field, 19, ""}, + {"COFFSymbolAuxFormat5.Selection", Field, 19, ""}, + {"COFFSymbolAuxFormat5.Size", Field, 19, ""}, + {"COFFSymbolSize", Const, 1, ""}, + {"DataDirectory", Type, 3, ""}, + {"DataDirectory.Size", Field, 3, ""}, + {"DataDirectory.VirtualAddress", Field, 3, ""}, + {"File", Type, 0, ""}, + {"File.COFFSymbols", Field, 8, ""}, + {"File.FileHeader", Field, 0, ""}, + {"File.OptionalHeader", Field, 3, ""}, + {"File.Sections", Field, 0, ""}, + {"File.StringTable", Field, 8, ""}, + {"File.Symbols", Field, 1, ""}, + {"FileHeader", Type, 0, ""}, + {"FileHeader.Characteristics", Field, 0, ""}, + {"FileHeader.Machine", Field, 0, ""}, + {"FileHeader.NumberOfSections", Field, 0, ""}, + {"FileHeader.NumberOfSymbols", Field, 0, ""}, + {"FileHeader.PointerToSymbolTable", Field, 0, ""}, + {"FileHeader.SizeOfOptionalHeader", Field, 0, ""}, + {"FileHeader.TimeDateStamp", Field, 0, ""}, + {"FormatError", Type, 0, ""}, + {"IMAGE_COMDAT_SELECT_ANY", Const, 19, ""}, + {"IMAGE_COMDAT_SELECT_ASSOCIATIVE", Const, 19, ""}, + {"IMAGE_COMDAT_SELECT_EXACT_MATCH", Const, 19, ""}, + {"IMAGE_COMDAT_SELECT_LARGEST", Const, 19, ""}, + {"IMAGE_COMDAT_SELECT_NODUPLICATES", Const, 19, ""}, + {"IMAGE_COMDAT_SELECT_SAME_SIZE", Const, 19, ""}, + {"IMAGE_DIRECTORY_ENTRY_ARCHITECTURE", Const, 11, ""}, + {"IMAGE_DIRECTORY_ENTRY_BASERELOC", Const, 11, ""}, + {"IMAGE_DIRECTORY_ENTRY_BOUND_IMPORT", Const, 11, ""}, + {"IMAGE_DIRECTORY_ENTRY_COM_DESCRIPTOR", Const, 11, ""}, + {"IMAGE_DIRECTORY_ENTRY_DEBUG", Const, 11, ""}, + {"IMAGE_DIRECTORY_ENTRY_DELAY_IMPORT", Const, 11, ""}, + {"IMAGE_DIRECTORY_ENTRY_EXCEPTION", Const, 11, ""}, + {"IMAGE_DIRECTORY_ENTRY_EXPORT", Const, 11, ""}, + {"IMAGE_DIRECTORY_ENTRY_GLOBALPTR", Const, 11, ""}, + {"IMAGE_DIRECTORY_ENTRY_IAT", Const, 11, ""}, + {"IMAGE_DIRECTORY_ENTRY_IMPORT", Const, 11, ""}, + {"IMAGE_DIRECTORY_ENTRY_LOAD_CONFIG", Const, 11, ""}, + {"IMAGE_DIRECTORY_ENTRY_RESOURCE", Const, 11, ""}, + {"IMAGE_DIRECTORY_ENTRY_SECURITY", Const, 11, ""}, + {"IMAGE_DIRECTORY_ENTRY_TLS", Const, 11, ""}, + {"IMAGE_DLLCHARACTERISTICS_APPCONTAINER", Const, 15, ""}, + {"IMAGE_DLLCHARACTERISTICS_DYNAMIC_BASE", Const, 15, ""}, + {"IMAGE_DLLCHARACTERISTICS_FORCE_INTEGRITY", Const, 15, ""}, + {"IMAGE_DLLCHARACTERISTICS_GUARD_CF", Const, 15, ""}, + {"IMAGE_DLLCHARACTERISTICS_HIGH_ENTROPY_VA", Const, 15, ""}, + {"IMAGE_DLLCHARACTERISTICS_NO_BIND", Const, 15, ""}, + {"IMAGE_DLLCHARACTERISTICS_NO_ISOLATION", Const, 15, ""}, + {"IMAGE_DLLCHARACTERISTICS_NO_SEH", Const, 15, ""}, + {"IMAGE_DLLCHARACTERISTICS_NX_COMPAT", Const, 15, ""}, + {"IMAGE_DLLCHARACTERISTICS_TERMINAL_SERVER_AWARE", Const, 15, ""}, + {"IMAGE_DLLCHARACTERISTICS_WDM_DRIVER", Const, 15, ""}, + {"IMAGE_FILE_32BIT_MACHINE", Const, 15, ""}, + {"IMAGE_FILE_AGGRESIVE_WS_TRIM", Const, 15, ""}, + {"IMAGE_FILE_BYTES_REVERSED_HI", Const, 15, ""}, + {"IMAGE_FILE_BYTES_REVERSED_LO", Const, 15, ""}, + {"IMAGE_FILE_DEBUG_STRIPPED", Const, 15, ""}, + {"IMAGE_FILE_DLL", Const, 15, ""}, + {"IMAGE_FILE_EXECUTABLE_IMAGE", Const, 15, ""}, + {"IMAGE_FILE_LARGE_ADDRESS_AWARE", Const, 15, ""}, + {"IMAGE_FILE_LINE_NUMS_STRIPPED", Const, 15, ""}, + {"IMAGE_FILE_LOCAL_SYMS_STRIPPED", Const, 15, ""}, + {"IMAGE_FILE_MACHINE_AM33", Const, 0, ""}, + {"IMAGE_FILE_MACHINE_AMD64", Const, 0, ""}, + {"IMAGE_FILE_MACHINE_ARM", Const, 0, ""}, + {"IMAGE_FILE_MACHINE_ARM64", Const, 11, ""}, + {"IMAGE_FILE_MACHINE_ARMNT", Const, 12, ""}, + {"IMAGE_FILE_MACHINE_EBC", Const, 0, ""}, + {"IMAGE_FILE_MACHINE_I386", Const, 0, ""}, + {"IMAGE_FILE_MACHINE_IA64", Const, 0, ""}, + {"IMAGE_FILE_MACHINE_LOONGARCH32", Const, 19, ""}, + {"IMAGE_FILE_MACHINE_LOONGARCH64", Const, 19, ""}, + {"IMAGE_FILE_MACHINE_M32R", Const, 0, ""}, + {"IMAGE_FILE_MACHINE_MIPS16", Const, 0, ""}, + {"IMAGE_FILE_MACHINE_MIPSFPU", Const, 0, ""}, + {"IMAGE_FILE_MACHINE_MIPSFPU16", Const, 0, ""}, + {"IMAGE_FILE_MACHINE_POWERPC", Const, 0, ""}, + {"IMAGE_FILE_MACHINE_POWERPCFP", Const, 0, ""}, + {"IMAGE_FILE_MACHINE_R4000", Const, 0, ""}, + {"IMAGE_FILE_MACHINE_RISCV128", Const, 20, ""}, + {"IMAGE_FILE_MACHINE_RISCV32", Const, 20, ""}, + {"IMAGE_FILE_MACHINE_RISCV64", Const, 20, ""}, + {"IMAGE_FILE_MACHINE_SH3", Const, 0, ""}, + {"IMAGE_FILE_MACHINE_SH3DSP", Const, 0, ""}, + {"IMAGE_FILE_MACHINE_SH4", Const, 0, ""}, + {"IMAGE_FILE_MACHINE_SH5", Const, 0, ""}, + {"IMAGE_FILE_MACHINE_THUMB", Const, 0, ""}, + {"IMAGE_FILE_MACHINE_UNKNOWN", Const, 0, ""}, + {"IMAGE_FILE_MACHINE_WCEMIPSV2", Const, 0, ""}, + {"IMAGE_FILE_NET_RUN_FROM_SWAP", Const, 15, ""}, + {"IMAGE_FILE_RELOCS_STRIPPED", Const, 15, ""}, + {"IMAGE_FILE_REMOVABLE_RUN_FROM_SWAP", Const, 15, ""}, + {"IMAGE_FILE_SYSTEM", Const, 15, ""}, + {"IMAGE_FILE_UP_SYSTEM_ONLY", Const, 15, ""}, + {"IMAGE_SCN_CNT_CODE", Const, 19, ""}, + {"IMAGE_SCN_CNT_INITIALIZED_DATA", Const, 19, ""}, + {"IMAGE_SCN_CNT_UNINITIALIZED_DATA", Const, 19, ""}, + {"IMAGE_SCN_LNK_COMDAT", Const, 19, ""}, + {"IMAGE_SCN_MEM_DISCARDABLE", Const, 19, ""}, + {"IMAGE_SCN_MEM_EXECUTE", Const, 19, ""}, + {"IMAGE_SCN_MEM_READ", Const, 19, ""}, + {"IMAGE_SCN_MEM_WRITE", Const, 19, ""}, + {"IMAGE_SUBSYSTEM_EFI_APPLICATION", Const, 15, ""}, + {"IMAGE_SUBSYSTEM_EFI_BOOT_SERVICE_DRIVER", Const, 15, ""}, + {"IMAGE_SUBSYSTEM_EFI_ROM", Const, 15, ""}, + {"IMAGE_SUBSYSTEM_EFI_RUNTIME_DRIVER", Const, 15, ""}, + {"IMAGE_SUBSYSTEM_NATIVE", Const, 15, ""}, + {"IMAGE_SUBSYSTEM_NATIVE_WINDOWS", Const, 15, ""}, + {"IMAGE_SUBSYSTEM_OS2_CUI", Const, 15, ""}, + {"IMAGE_SUBSYSTEM_POSIX_CUI", Const, 15, ""}, + {"IMAGE_SUBSYSTEM_UNKNOWN", Const, 15, ""}, + {"IMAGE_SUBSYSTEM_WINDOWS_BOOT_APPLICATION", Const, 15, ""}, + {"IMAGE_SUBSYSTEM_WINDOWS_CE_GUI", Const, 15, ""}, + {"IMAGE_SUBSYSTEM_WINDOWS_CUI", Const, 15, ""}, + {"IMAGE_SUBSYSTEM_WINDOWS_GUI", Const, 15, ""}, + {"IMAGE_SUBSYSTEM_XBOX", Const, 15, ""}, + {"ImportDirectory", Type, 0, ""}, + {"ImportDirectory.FirstThunk", Field, 0, ""}, + {"ImportDirectory.ForwarderChain", Field, 0, ""}, + {"ImportDirectory.Name", Field, 0, ""}, + {"ImportDirectory.OriginalFirstThunk", Field, 0, ""}, + {"ImportDirectory.TimeDateStamp", Field, 0, ""}, + {"NewFile", Func, 0, "func(r io.ReaderAt) (*File, error)"}, + {"Open", Func, 0, "func(name string) (*File, error)"}, + {"OptionalHeader32", Type, 3, ""}, + {"OptionalHeader32.AddressOfEntryPoint", Field, 3, ""}, + {"OptionalHeader32.BaseOfCode", Field, 3, ""}, + {"OptionalHeader32.BaseOfData", Field, 3, ""}, + {"OptionalHeader32.CheckSum", Field, 3, ""}, + {"OptionalHeader32.DataDirectory", Field, 3, ""}, + {"OptionalHeader32.DllCharacteristics", Field, 3, ""}, + {"OptionalHeader32.FileAlignment", Field, 3, ""}, + {"OptionalHeader32.ImageBase", Field, 3, ""}, + {"OptionalHeader32.LoaderFlags", Field, 3, ""}, + {"OptionalHeader32.Magic", Field, 3, ""}, + {"OptionalHeader32.MajorImageVersion", Field, 3, ""}, + {"OptionalHeader32.MajorLinkerVersion", Field, 3, ""}, + {"OptionalHeader32.MajorOperatingSystemVersion", Field, 3, ""}, + {"OptionalHeader32.MajorSubsystemVersion", Field, 3, ""}, + {"OptionalHeader32.MinorImageVersion", Field, 3, ""}, + {"OptionalHeader32.MinorLinkerVersion", Field, 3, ""}, + {"OptionalHeader32.MinorOperatingSystemVersion", Field, 3, ""}, + {"OptionalHeader32.MinorSubsystemVersion", Field, 3, ""}, + {"OptionalHeader32.NumberOfRvaAndSizes", Field, 3, ""}, + {"OptionalHeader32.SectionAlignment", Field, 3, ""}, + {"OptionalHeader32.SizeOfCode", Field, 3, ""}, + {"OptionalHeader32.SizeOfHeaders", Field, 3, ""}, + {"OptionalHeader32.SizeOfHeapCommit", Field, 3, ""}, + {"OptionalHeader32.SizeOfHeapReserve", Field, 3, ""}, + {"OptionalHeader32.SizeOfImage", Field, 3, ""}, + {"OptionalHeader32.SizeOfInitializedData", Field, 3, ""}, + {"OptionalHeader32.SizeOfStackCommit", Field, 3, ""}, + {"OptionalHeader32.SizeOfStackReserve", Field, 3, ""}, + {"OptionalHeader32.SizeOfUninitializedData", Field, 3, ""}, + {"OptionalHeader32.Subsystem", Field, 3, ""}, + {"OptionalHeader32.Win32VersionValue", Field, 3, ""}, + {"OptionalHeader64", Type, 3, ""}, + {"OptionalHeader64.AddressOfEntryPoint", Field, 3, ""}, + {"OptionalHeader64.BaseOfCode", Field, 3, ""}, + {"OptionalHeader64.CheckSum", Field, 3, ""}, + {"OptionalHeader64.DataDirectory", Field, 3, ""}, + {"OptionalHeader64.DllCharacteristics", Field, 3, ""}, + {"OptionalHeader64.FileAlignment", Field, 3, ""}, + {"OptionalHeader64.ImageBase", Field, 3, ""}, + {"OptionalHeader64.LoaderFlags", Field, 3, ""}, + {"OptionalHeader64.Magic", Field, 3, ""}, + {"OptionalHeader64.MajorImageVersion", Field, 3, ""}, + {"OptionalHeader64.MajorLinkerVersion", Field, 3, ""}, + {"OptionalHeader64.MajorOperatingSystemVersion", Field, 3, ""}, + {"OptionalHeader64.MajorSubsystemVersion", Field, 3, ""}, + {"OptionalHeader64.MinorImageVersion", Field, 3, ""}, + {"OptionalHeader64.MinorLinkerVersion", Field, 3, ""}, + {"OptionalHeader64.MinorOperatingSystemVersion", Field, 3, ""}, + {"OptionalHeader64.MinorSubsystemVersion", Field, 3, ""}, + {"OptionalHeader64.NumberOfRvaAndSizes", Field, 3, ""}, + {"OptionalHeader64.SectionAlignment", Field, 3, ""}, + {"OptionalHeader64.SizeOfCode", Field, 3, ""}, + {"OptionalHeader64.SizeOfHeaders", Field, 3, ""}, + {"OptionalHeader64.SizeOfHeapCommit", Field, 3, ""}, + {"OptionalHeader64.SizeOfHeapReserve", Field, 3, ""}, + {"OptionalHeader64.SizeOfImage", Field, 3, ""}, + {"OptionalHeader64.SizeOfInitializedData", Field, 3, ""}, + {"OptionalHeader64.SizeOfStackCommit", Field, 3, ""}, + {"OptionalHeader64.SizeOfStackReserve", Field, 3, ""}, + {"OptionalHeader64.SizeOfUninitializedData", Field, 3, ""}, + {"OptionalHeader64.Subsystem", Field, 3, ""}, + {"OptionalHeader64.Win32VersionValue", Field, 3, ""}, + {"Reloc", Type, 8, ""}, + {"Reloc.SymbolTableIndex", Field, 8, ""}, + {"Reloc.Type", Field, 8, ""}, + {"Reloc.VirtualAddress", Field, 8, ""}, + {"Section", Type, 0, ""}, + {"Section.ReaderAt", Field, 0, ""}, + {"Section.Relocs", Field, 8, ""}, + {"Section.SectionHeader", Field, 0, ""}, + {"SectionHeader", Type, 0, ""}, + {"SectionHeader.Characteristics", Field, 0, ""}, + {"SectionHeader.Name", Field, 0, ""}, + {"SectionHeader.NumberOfLineNumbers", Field, 0, ""}, + {"SectionHeader.NumberOfRelocations", Field, 0, ""}, + {"SectionHeader.Offset", Field, 0, ""}, + {"SectionHeader.PointerToLineNumbers", Field, 0, ""}, + {"SectionHeader.PointerToRelocations", Field, 0, ""}, + {"SectionHeader.Size", Field, 0, ""}, + {"SectionHeader.VirtualAddress", Field, 0, ""}, + {"SectionHeader.VirtualSize", Field, 0, ""}, + {"SectionHeader32", Type, 0, ""}, + {"SectionHeader32.Characteristics", Field, 0, ""}, + {"SectionHeader32.Name", Field, 0, ""}, + {"SectionHeader32.NumberOfLineNumbers", Field, 0, ""}, + {"SectionHeader32.NumberOfRelocations", Field, 0, ""}, + {"SectionHeader32.PointerToLineNumbers", Field, 0, ""}, + {"SectionHeader32.PointerToRawData", Field, 0, ""}, + {"SectionHeader32.PointerToRelocations", Field, 0, ""}, + {"SectionHeader32.SizeOfRawData", Field, 0, ""}, + {"SectionHeader32.VirtualAddress", Field, 0, ""}, + {"SectionHeader32.VirtualSize", Field, 0, ""}, + {"StringTable", Type, 8, ""}, + {"Symbol", Type, 1, ""}, + {"Symbol.Name", Field, 1, ""}, + {"Symbol.SectionNumber", Field, 1, ""}, + {"Symbol.StorageClass", Field, 1, ""}, + {"Symbol.Type", Field, 1, ""}, + {"Symbol.Value", Field, 1, ""}, }, "debug/plan9obj": { - {"(*File).Close", Method, 3}, - {"(*File).Section", Method, 3}, - {"(*File).Symbols", Method, 3}, - {"(*Section).Data", Method, 3}, - {"(*Section).Open", Method, 3}, - {"(Section).ReadAt", Method, 3}, - {"ErrNoSymbols", Var, 18}, - {"File", Type, 3}, - {"File.FileHeader", Field, 3}, - {"File.Sections", Field, 3}, - {"FileHeader", Type, 3}, - {"FileHeader.Bss", Field, 3}, - {"FileHeader.Entry", Field, 3}, - {"FileHeader.HdrSize", Field, 4}, - {"FileHeader.LoadAddress", Field, 4}, - {"FileHeader.Magic", Field, 3}, - {"FileHeader.PtrSize", Field, 3}, - {"Magic386", Const, 3}, - {"Magic64", Const, 3}, - {"MagicAMD64", Const, 3}, - {"MagicARM", Const, 3}, - {"NewFile", Func, 3}, - {"Open", Func, 3}, - {"Section", Type, 3}, - {"Section.ReaderAt", Field, 3}, - {"Section.SectionHeader", Field, 3}, - {"SectionHeader", Type, 3}, - {"SectionHeader.Name", Field, 3}, - {"SectionHeader.Offset", Field, 3}, - {"SectionHeader.Size", Field, 3}, - {"Sym", Type, 3}, - {"Sym.Name", Field, 3}, - {"Sym.Type", Field, 3}, - {"Sym.Value", Field, 3}, + {"(*File).Close", Method, 3, ""}, + {"(*File).Section", Method, 3, ""}, + {"(*File).Symbols", Method, 3, ""}, + {"(*Section).Data", Method, 3, ""}, + {"(*Section).Open", Method, 3, ""}, + {"(Section).ReadAt", Method, 3, ""}, + {"ErrNoSymbols", Var, 18, ""}, + {"File", Type, 3, ""}, + {"File.FileHeader", Field, 3, ""}, + {"File.Sections", Field, 3, ""}, + {"FileHeader", Type, 3, ""}, + {"FileHeader.Bss", Field, 3, ""}, + {"FileHeader.Entry", Field, 3, ""}, + {"FileHeader.HdrSize", Field, 4, ""}, + {"FileHeader.LoadAddress", Field, 4, ""}, + {"FileHeader.Magic", Field, 3, ""}, + {"FileHeader.PtrSize", Field, 3, ""}, + {"Magic386", Const, 3, ""}, + {"Magic64", Const, 3, ""}, + {"MagicAMD64", Const, 3, ""}, + {"MagicARM", Const, 3, ""}, + {"NewFile", Func, 3, "func(r io.ReaderAt) (*File, error)"}, + {"Open", Func, 3, "func(name string) (*File, error)"}, + {"Section", Type, 3, ""}, + {"Section.ReaderAt", Field, 3, ""}, + {"Section.SectionHeader", Field, 3, ""}, + {"SectionHeader", Type, 3, ""}, + {"SectionHeader.Name", Field, 3, ""}, + {"SectionHeader.Offset", Field, 3, ""}, + {"SectionHeader.Size", Field, 3, ""}, + {"Sym", Type, 3, ""}, + {"Sym.Name", Field, 3, ""}, + {"Sym.Type", Field, 3, ""}, + {"Sym.Value", Field, 3, ""}, }, "embed": { - {"(FS).Open", Method, 16}, - {"(FS).ReadDir", Method, 16}, - {"(FS).ReadFile", Method, 16}, - {"FS", Type, 16}, + {"(FS).Open", Method, 16, ""}, + {"(FS).ReadDir", Method, 16, ""}, + {"(FS).ReadFile", Method, 16, ""}, + {"FS", Type, 16, ""}, }, "encoding": { - {"BinaryAppender", Type, 24}, - {"BinaryMarshaler", Type, 2}, - {"BinaryUnmarshaler", Type, 2}, - {"TextAppender", Type, 24}, - {"TextMarshaler", Type, 2}, - {"TextUnmarshaler", Type, 2}, + {"BinaryAppender", Type, 24, ""}, + {"BinaryMarshaler", Type, 2, ""}, + {"BinaryUnmarshaler", Type, 2, ""}, + {"TextAppender", Type, 24, ""}, + {"TextMarshaler", Type, 2, ""}, + {"TextUnmarshaler", Type, 2, ""}, }, "encoding/ascii85": { - {"(CorruptInputError).Error", Method, 0}, - {"CorruptInputError", Type, 0}, - {"Decode", Func, 0}, - {"Encode", Func, 0}, - {"MaxEncodedLen", Func, 0}, - {"NewDecoder", Func, 0}, - {"NewEncoder", Func, 0}, + {"(CorruptInputError).Error", Method, 0, ""}, + {"CorruptInputError", Type, 0, ""}, + {"Decode", Func, 0, "func(dst []byte, src []byte, flush bool) (ndst int, nsrc int, err error)"}, + {"Encode", Func, 0, "func(dst []byte, src []byte) int"}, + {"MaxEncodedLen", Func, 0, "func(n int) int"}, + {"NewDecoder", Func, 0, "func(r io.Reader) io.Reader"}, + {"NewEncoder", Func, 0, "func(w io.Writer) io.WriteCloser"}, }, "encoding/asn1": { - {"(BitString).At", Method, 0}, - {"(BitString).RightAlign", Method, 0}, - {"(ObjectIdentifier).Equal", Method, 0}, - {"(ObjectIdentifier).String", Method, 3}, - {"(StructuralError).Error", Method, 0}, - {"(SyntaxError).Error", Method, 0}, - {"BitString", Type, 0}, - {"BitString.BitLength", Field, 0}, - {"BitString.Bytes", Field, 0}, - {"ClassApplication", Const, 6}, - {"ClassContextSpecific", Const, 6}, - {"ClassPrivate", Const, 6}, - {"ClassUniversal", Const, 6}, - {"Enumerated", Type, 0}, - {"Flag", Type, 0}, - {"Marshal", Func, 0}, - {"MarshalWithParams", Func, 10}, - {"NullBytes", Var, 9}, - {"NullRawValue", Var, 9}, - {"ObjectIdentifier", Type, 0}, - {"RawContent", Type, 0}, - {"RawValue", Type, 0}, - {"RawValue.Bytes", Field, 0}, - {"RawValue.Class", Field, 0}, - {"RawValue.FullBytes", Field, 0}, - {"RawValue.IsCompound", Field, 0}, - {"RawValue.Tag", Field, 0}, - {"StructuralError", Type, 0}, - {"StructuralError.Msg", Field, 0}, - {"SyntaxError", Type, 0}, - {"SyntaxError.Msg", Field, 0}, - {"TagBMPString", Const, 14}, - {"TagBitString", Const, 6}, - {"TagBoolean", Const, 6}, - {"TagEnum", Const, 6}, - {"TagGeneralString", Const, 6}, - {"TagGeneralizedTime", Const, 6}, - {"TagIA5String", Const, 6}, - {"TagInteger", Const, 6}, - {"TagNull", Const, 9}, - {"TagNumericString", Const, 10}, - {"TagOID", Const, 6}, - {"TagOctetString", Const, 6}, - {"TagPrintableString", Const, 6}, - {"TagSequence", Const, 6}, - {"TagSet", Const, 6}, - {"TagT61String", Const, 6}, - {"TagUTCTime", Const, 6}, - {"TagUTF8String", Const, 6}, - {"Unmarshal", Func, 0}, - {"UnmarshalWithParams", Func, 0}, + {"(BitString).At", Method, 0, ""}, + {"(BitString).RightAlign", Method, 0, ""}, + {"(ObjectIdentifier).Equal", Method, 0, ""}, + {"(ObjectIdentifier).String", Method, 3, ""}, + {"(StructuralError).Error", Method, 0, ""}, + {"(SyntaxError).Error", Method, 0, ""}, + {"BitString", Type, 0, ""}, + {"BitString.BitLength", Field, 0, ""}, + {"BitString.Bytes", Field, 0, ""}, + {"ClassApplication", Const, 6, ""}, + {"ClassContextSpecific", Const, 6, ""}, + {"ClassPrivate", Const, 6, ""}, + {"ClassUniversal", Const, 6, ""}, + {"Enumerated", Type, 0, ""}, + {"Flag", Type, 0, ""}, + {"Marshal", Func, 0, "func(val any) ([]byte, error)"}, + {"MarshalWithParams", Func, 10, "func(val any, params string) ([]byte, error)"}, + {"NullBytes", Var, 9, ""}, + {"NullRawValue", Var, 9, ""}, + {"ObjectIdentifier", Type, 0, ""}, + {"RawContent", Type, 0, ""}, + {"RawValue", Type, 0, ""}, + {"RawValue.Bytes", Field, 0, ""}, + {"RawValue.Class", Field, 0, ""}, + {"RawValue.FullBytes", Field, 0, ""}, + {"RawValue.IsCompound", Field, 0, ""}, + {"RawValue.Tag", Field, 0, ""}, + {"StructuralError", Type, 0, ""}, + {"StructuralError.Msg", Field, 0, ""}, + {"SyntaxError", Type, 0, ""}, + {"SyntaxError.Msg", Field, 0, ""}, + {"TagBMPString", Const, 14, ""}, + {"TagBitString", Const, 6, ""}, + {"TagBoolean", Const, 6, ""}, + {"TagEnum", Const, 6, ""}, + {"TagGeneralString", Const, 6, ""}, + {"TagGeneralizedTime", Const, 6, ""}, + {"TagIA5String", Const, 6, ""}, + {"TagInteger", Const, 6, ""}, + {"TagNull", Const, 9, ""}, + {"TagNumericString", Const, 10, ""}, + {"TagOID", Const, 6, ""}, + {"TagOctetString", Const, 6, ""}, + {"TagPrintableString", Const, 6, ""}, + {"TagSequence", Const, 6, ""}, + {"TagSet", Const, 6, ""}, + {"TagT61String", Const, 6, ""}, + {"TagUTCTime", Const, 6, ""}, + {"TagUTF8String", Const, 6, ""}, + {"Unmarshal", Func, 0, "func(b []byte, val any) (rest []byte, err error)"}, + {"UnmarshalWithParams", Func, 0, "func(b []byte, val any, params string) (rest []byte, err error)"}, }, "encoding/base32": { - {"(*Encoding).AppendDecode", Method, 22}, - {"(*Encoding).AppendEncode", Method, 22}, - {"(*Encoding).Decode", Method, 0}, - {"(*Encoding).DecodeString", Method, 0}, - {"(*Encoding).DecodedLen", Method, 0}, - {"(*Encoding).Encode", Method, 0}, - {"(*Encoding).EncodeToString", Method, 0}, - {"(*Encoding).EncodedLen", Method, 0}, - {"(CorruptInputError).Error", Method, 0}, - {"(Encoding).WithPadding", Method, 9}, - {"CorruptInputError", Type, 0}, - {"Encoding", Type, 0}, - {"HexEncoding", Var, 0}, - {"NewDecoder", Func, 0}, - {"NewEncoder", Func, 0}, - {"NewEncoding", Func, 0}, - {"NoPadding", Const, 9}, - {"StdEncoding", Var, 0}, - {"StdPadding", Const, 9}, + {"(*Encoding).AppendDecode", Method, 22, ""}, + {"(*Encoding).AppendEncode", Method, 22, ""}, + {"(*Encoding).Decode", Method, 0, ""}, + {"(*Encoding).DecodeString", Method, 0, ""}, + {"(*Encoding).DecodedLen", Method, 0, ""}, + {"(*Encoding).Encode", Method, 0, ""}, + {"(*Encoding).EncodeToString", Method, 0, ""}, + {"(*Encoding).EncodedLen", Method, 0, ""}, + {"(CorruptInputError).Error", Method, 0, ""}, + {"(Encoding).WithPadding", Method, 9, ""}, + {"CorruptInputError", Type, 0, ""}, + {"Encoding", Type, 0, ""}, + {"HexEncoding", Var, 0, ""}, + {"NewDecoder", Func, 0, "func(enc *Encoding, r io.Reader) io.Reader"}, + {"NewEncoder", Func, 0, "func(enc *Encoding, w io.Writer) io.WriteCloser"}, + {"NewEncoding", Func, 0, "func(encoder string) *Encoding"}, + {"NoPadding", Const, 9, ""}, + {"StdEncoding", Var, 0, ""}, + {"StdPadding", Const, 9, ""}, }, "encoding/base64": { - {"(*Encoding).AppendDecode", Method, 22}, - {"(*Encoding).AppendEncode", Method, 22}, - {"(*Encoding).Decode", Method, 0}, - {"(*Encoding).DecodeString", Method, 0}, - {"(*Encoding).DecodedLen", Method, 0}, - {"(*Encoding).Encode", Method, 0}, - {"(*Encoding).EncodeToString", Method, 0}, - {"(*Encoding).EncodedLen", Method, 0}, - {"(CorruptInputError).Error", Method, 0}, - {"(Encoding).Strict", Method, 8}, - {"(Encoding).WithPadding", Method, 5}, - {"CorruptInputError", Type, 0}, - {"Encoding", Type, 0}, - {"NewDecoder", Func, 0}, - {"NewEncoder", Func, 0}, - {"NewEncoding", Func, 0}, - {"NoPadding", Const, 5}, - {"RawStdEncoding", Var, 5}, - {"RawURLEncoding", Var, 5}, - {"StdEncoding", Var, 0}, - {"StdPadding", Const, 5}, - {"URLEncoding", Var, 0}, + {"(*Encoding).AppendDecode", Method, 22, ""}, + {"(*Encoding).AppendEncode", Method, 22, ""}, + {"(*Encoding).Decode", Method, 0, ""}, + {"(*Encoding).DecodeString", Method, 0, ""}, + {"(*Encoding).DecodedLen", Method, 0, ""}, + {"(*Encoding).Encode", Method, 0, ""}, + {"(*Encoding).EncodeToString", Method, 0, ""}, + {"(*Encoding).EncodedLen", Method, 0, ""}, + {"(CorruptInputError).Error", Method, 0, ""}, + {"(Encoding).Strict", Method, 8, ""}, + {"(Encoding).WithPadding", Method, 5, ""}, + {"CorruptInputError", Type, 0, ""}, + {"Encoding", Type, 0, ""}, + {"NewDecoder", Func, 0, "func(enc *Encoding, r io.Reader) io.Reader"}, + {"NewEncoder", Func, 0, "func(enc *Encoding, w io.Writer) io.WriteCloser"}, + {"NewEncoding", Func, 0, "func(encoder string) *Encoding"}, + {"NoPadding", Const, 5, ""}, + {"RawStdEncoding", Var, 5, ""}, + {"RawURLEncoding", Var, 5, ""}, + {"StdEncoding", Var, 0, ""}, + {"StdPadding", Const, 5, ""}, + {"URLEncoding", Var, 0, ""}, }, "encoding/binary": { - {"Append", Func, 23}, - {"AppendByteOrder", Type, 19}, - {"AppendUvarint", Func, 19}, - {"AppendVarint", Func, 19}, - {"BigEndian", Var, 0}, - {"ByteOrder", Type, 0}, - {"Decode", Func, 23}, - {"Encode", Func, 23}, - {"LittleEndian", Var, 0}, - {"MaxVarintLen16", Const, 0}, - {"MaxVarintLen32", Const, 0}, - {"MaxVarintLen64", Const, 0}, - {"NativeEndian", Var, 21}, - {"PutUvarint", Func, 0}, - {"PutVarint", Func, 0}, - {"Read", Func, 0}, - {"ReadUvarint", Func, 0}, - {"ReadVarint", Func, 0}, - {"Size", Func, 0}, - {"Uvarint", Func, 0}, - {"Varint", Func, 0}, - {"Write", Func, 0}, + {"Append", Func, 23, "func(buf []byte, order ByteOrder, data any) ([]byte, error)"}, + {"AppendByteOrder", Type, 19, ""}, + {"AppendUvarint", Func, 19, "func(buf []byte, x uint64) []byte"}, + {"AppendVarint", Func, 19, "func(buf []byte, x int64) []byte"}, + {"BigEndian", Var, 0, ""}, + {"ByteOrder", Type, 0, ""}, + {"Decode", Func, 23, "func(buf []byte, order ByteOrder, data any) (int, error)"}, + {"Encode", Func, 23, "func(buf []byte, order ByteOrder, data any) (int, error)"}, + {"LittleEndian", Var, 0, ""}, + {"MaxVarintLen16", Const, 0, ""}, + {"MaxVarintLen32", Const, 0, ""}, + {"MaxVarintLen64", Const, 0, ""}, + {"NativeEndian", Var, 21, ""}, + {"PutUvarint", Func, 0, "func(buf []byte, x uint64) int"}, + {"PutVarint", Func, 0, "func(buf []byte, x int64) int"}, + {"Read", Func, 0, "func(r io.Reader, order ByteOrder, data any) error"}, + {"ReadUvarint", Func, 0, "func(r io.ByteReader) (uint64, error)"}, + {"ReadVarint", Func, 0, "func(r io.ByteReader) (int64, error)"}, + {"Size", Func, 0, "func(v any) int"}, + {"Uvarint", Func, 0, "func(buf []byte) (uint64, int)"}, + {"Varint", Func, 0, "func(buf []byte) (int64, int)"}, + {"Write", Func, 0, "func(w io.Writer, order ByteOrder, data any) error"}, }, "encoding/csv": { - {"(*ParseError).Error", Method, 0}, - {"(*ParseError).Unwrap", Method, 13}, - {"(*Reader).FieldPos", Method, 17}, - {"(*Reader).InputOffset", Method, 19}, - {"(*Reader).Read", Method, 0}, - {"(*Reader).ReadAll", Method, 0}, - {"(*Writer).Error", Method, 1}, - {"(*Writer).Flush", Method, 0}, - {"(*Writer).Write", Method, 0}, - {"(*Writer).WriteAll", Method, 0}, - {"ErrBareQuote", Var, 0}, - {"ErrFieldCount", Var, 0}, - {"ErrQuote", Var, 0}, - {"ErrTrailingComma", Var, 0}, - {"NewReader", Func, 0}, - {"NewWriter", Func, 0}, - {"ParseError", Type, 0}, - {"ParseError.Column", Field, 0}, - {"ParseError.Err", Field, 0}, - {"ParseError.Line", Field, 0}, - {"ParseError.StartLine", Field, 10}, - {"Reader", Type, 0}, - {"Reader.Comma", Field, 0}, - {"Reader.Comment", Field, 0}, - {"Reader.FieldsPerRecord", Field, 0}, - {"Reader.LazyQuotes", Field, 0}, - {"Reader.ReuseRecord", Field, 9}, - {"Reader.TrailingComma", Field, 0}, - {"Reader.TrimLeadingSpace", Field, 0}, - {"Writer", Type, 0}, - {"Writer.Comma", Field, 0}, - {"Writer.UseCRLF", Field, 0}, + {"(*ParseError).Error", Method, 0, ""}, + {"(*ParseError).Unwrap", Method, 13, ""}, + {"(*Reader).FieldPos", Method, 17, ""}, + {"(*Reader).InputOffset", Method, 19, ""}, + {"(*Reader).Read", Method, 0, ""}, + {"(*Reader).ReadAll", Method, 0, ""}, + {"(*Writer).Error", Method, 1, ""}, + {"(*Writer).Flush", Method, 0, ""}, + {"(*Writer).Write", Method, 0, ""}, + {"(*Writer).WriteAll", Method, 0, ""}, + {"ErrBareQuote", Var, 0, ""}, + {"ErrFieldCount", Var, 0, ""}, + {"ErrQuote", Var, 0, ""}, + {"ErrTrailingComma", Var, 0, ""}, + {"NewReader", Func, 0, "func(r io.Reader) *Reader"}, + {"NewWriter", Func, 0, "func(w io.Writer) *Writer"}, + {"ParseError", Type, 0, ""}, + {"ParseError.Column", Field, 0, ""}, + {"ParseError.Err", Field, 0, ""}, + {"ParseError.Line", Field, 0, ""}, + {"ParseError.StartLine", Field, 10, ""}, + {"Reader", Type, 0, ""}, + {"Reader.Comma", Field, 0, ""}, + {"Reader.Comment", Field, 0, ""}, + {"Reader.FieldsPerRecord", Field, 0, ""}, + {"Reader.LazyQuotes", Field, 0, ""}, + {"Reader.ReuseRecord", Field, 9, ""}, + {"Reader.TrailingComma", Field, 0, ""}, + {"Reader.TrimLeadingSpace", Field, 0, ""}, + {"Writer", Type, 0, ""}, + {"Writer.Comma", Field, 0, ""}, + {"Writer.UseCRLF", Field, 0, ""}, }, "encoding/gob": { - {"(*Decoder).Decode", Method, 0}, - {"(*Decoder).DecodeValue", Method, 0}, - {"(*Encoder).Encode", Method, 0}, - {"(*Encoder).EncodeValue", Method, 0}, - {"CommonType", Type, 0}, - {"CommonType.Id", Field, 0}, - {"CommonType.Name", Field, 0}, - {"Decoder", Type, 0}, - {"Encoder", Type, 0}, - {"GobDecoder", Type, 0}, - {"GobEncoder", Type, 0}, - {"NewDecoder", Func, 0}, - {"NewEncoder", Func, 0}, - {"Register", Func, 0}, - {"RegisterName", Func, 0}, + {"(*Decoder).Decode", Method, 0, ""}, + {"(*Decoder).DecodeValue", Method, 0, ""}, + {"(*Encoder).Encode", Method, 0, ""}, + {"(*Encoder).EncodeValue", Method, 0, ""}, + {"CommonType", Type, 0, ""}, + {"CommonType.Id", Field, 0, ""}, + {"CommonType.Name", Field, 0, ""}, + {"Decoder", Type, 0, ""}, + {"Encoder", Type, 0, ""}, + {"GobDecoder", Type, 0, ""}, + {"GobEncoder", Type, 0, ""}, + {"NewDecoder", Func, 0, "func(r io.Reader) *Decoder"}, + {"NewEncoder", Func, 0, "func(w io.Writer) *Encoder"}, + {"Register", Func, 0, "func(value any)"}, + {"RegisterName", Func, 0, "func(name string, value any)"}, }, "encoding/hex": { - {"(InvalidByteError).Error", Method, 0}, - {"AppendDecode", Func, 22}, - {"AppendEncode", Func, 22}, - {"Decode", Func, 0}, - {"DecodeString", Func, 0}, - {"DecodedLen", Func, 0}, - {"Dump", Func, 0}, - {"Dumper", Func, 0}, - {"Encode", Func, 0}, - {"EncodeToString", Func, 0}, - {"EncodedLen", Func, 0}, - {"ErrLength", Var, 0}, - {"InvalidByteError", Type, 0}, - {"NewDecoder", Func, 10}, - {"NewEncoder", Func, 10}, + {"(InvalidByteError).Error", Method, 0, ""}, + {"AppendDecode", Func, 22, "func(dst []byte, src []byte) ([]byte, error)"}, + {"AppendEncode", Func, 22, "func(dst []byte, src []byte) []byte"}, + {"Decode", Func, 0, "func(dst []byte, src []byte) (int, error)"}, + {"DecodeString", Func, 0, "func(s string) ([]byte, error)"}, + {"DecodedLen", Func, 0, "func(x int) int"}, + {"Dump", Func, 0, "func(data []byte) string"}, + {"Dumper", Func, 0, "func(w io.Writer) io.WriteCloser"}, + {"Encode", Func, 0, "func(dst []byte, src []byte) int"}, + {"EncodeToString", Func, 0, "func(src []byte) string"}, + {"EncodedLen", Func, 0, "func(n int) int"}, + {"ErrLength", Var, 0, ""}, + {"InvalidByteError", Type, 0, ""}, + {"NewDecoder", Func, 10, "func(r io.Reader) io.Reader"}, + {"NewEncoder", Func, 10, "func(w io.Writer) io.Writer"}, }, "encoding/json": { - {"(*Decoder).Buffered", Method, 1}, - {"(*Decoder).Decode", Method, 0}, - {"(*Decoder).DisallowUnknownFields", Method, 10}, - {"(*Decoder).InputOffset", Method, 14}, - {"(*Decoder).More", Method, 5}, - {"(*Decoder).Token", Method, 5}, - {"(*Decoder).UseNumber", Method, 1}, - {"(*Encoder).Encode", Method, 0}, - {"(*Encoder).SetEscapeHTML", Method, 7}, - {"(*Encoder).SetIndent", Method, 7}, - {"(*InvalidUTF8Error).Error", Method, 0}, - {"(*InvalidUnmarshalError).Error", Method, 0}, - {"(*MarshalerError).Error", Method, 0}, - {"(*MarshalerError).Unwrap", Method, 13}, - {"(*RawMessage).MarshalJSON", Method, 0}, - {"(*RawMessage).UnmarshalJSON", Method, 0}, - {"(*SyntaxError).Error", Method, 0}, - {"(*UnmarshalFieldError).Error", Method, 0}, - {"(*UnmarshalTypeError).Error", Method, 0}, - {"(*UnsupportedTypeError).Error", Method, 0}, - {"(*UnsupportedValueError).Error", Method, 0}, - {"(Delim).String", Method, 5}, - {"(Number).Float64", Method, 1}, - {"(Number).Int64", Method, 1}, - {"(Number).String", Method, 1}, - {"(RawMessage).MarshalJSON", Method, 8}, - {"Compact", Func, 0}, - {"Decoder", Type, 0}, - {"Delim", Type, 5}, - {"Encoder", Type, 0}, - {"HTMLEscape", Func, 0}, - {"Indent", Func, 0}, - {"InvalidUTF8Error", Type, 0}, - {"InvalidUTF8Error.S", Field, 0}, - {"InvalidUnmarshalError", Type, 0}, - {"InvalidUnmarshalError.Type", Field, 0}, - {"Marshal", Func, 0}, - {"MarshalIndent", Func, 0}, - {"Marshaler", Type, 0}, - {"MarshalerError", Type, 0}, - {"MarshalerError.Err", Field, 0}, - {"MarshalerError.Type", Field, 0}, - {"NewDecoder", Func, 0}, - {"NewEncoder", Func, 0}, - {"Number", Type, 1}, - {"RawMessage", Type, 0}, - {"SyntaxError", Type, 0}, - {"SyntaxError.Offset", Field, 0}, - {"Token", Type, 5}, - {"Unmarshal", Func, 0}, - {"UnmarshalFieldError", Type, 0}, - {"UnmarshalFieldError.Field", Field, 0}, - {"UnmarshalFieldError.Key", Field, 0}, - {"UnmarshalFieldError.Type", Field, 0}, - {"UnmarshalTypeError", Type, 0}, - {"UnmarshalTypeError.Field", Field, 8}, - {"UnmarshalTypeError.Offset", Field, 5}, - {"UnmarshalTypeError.Struct", Field, 8}, - {"UnmarshalTypeError.Type", Field, 0}, - {"UnmarshalTypeError.Value", Field, 0}, - {"Unmarshaler", Type, 0}, - {"UnsupportedTypeError", Type, 0}, - {"UnsupportedTypeError.Type", Field, 0}, - {"UnsupportedValueError", Type, 0}, - {"UnsupportedValueError.Str", Field, 0}, - {"UnsupportedValueError.Value", Field, 0}, - {"Valid", Func, 9}, + {"(*Decoder).Buffered", Method, 1, ""}, + {"(*Decoder).Decode", Method, 0, ""}, + {"(*Decoder).DisallowUnknownFields", Method, 10, ""}, + {"(*Decoder).InputOffset", Method, 14, ""}, + {"(*Decoder).More", Method, 5, ""}, + {"(*Decoder).Token", Method, 5, ""}, + {"(*Decoder).UseNumber", Method, 1, ""}, + {"(*Encoder).Encode", Method, 0, ""}, + {"(*Encoder).SetEscapeHTML", Method, 7, ""}, + {"(*Encoder).SetIndent", Method, 7, ""}, + {"(*InvalidUTF8Error).Error", Method, 0, ""}, + {"(*InvalidUnmarshalError).Error", Method, 0, ""}, + {"(*MarshalerError).Error", Method, 0, ""}, + {"(*MarshalerError).Unwrap", Method, 13, ""}, + {"(*RawMessage).MarshalJSON", Method, 0, ""}, + {"(*RawMessage).UnmarshalJSON", Method, 0, ""}, + {"(*SyntaxError).Error", Method, 0, ""}, + {"(*UnmarshalFieldError).Error", Method, 0, ""}, + {"(*UnmarshalTypeError).Error", Method, 0, ""}, + {"(*UnsupportedTypeError).Error", Method, 0, ""}, + {"(*UnsupportedValueError).Error", Method, 0, ""}, + {"(Delim).String", Method, 5, ""}, + {"(Number).Float64", Method, 1, ""}, + {"(Number).Int64", Method, 1, ""}, + {"(Number).String", Method, 1, ""}, + {"(RawMessage).MarshalJSON", Method, 8, ""}, + {"Compact", Func, 0, "func(dst *bytes.Buffer, src []byte) error"}, + {"Decoder", Type, 0, ""}, + {"Delim", Type, 5, ""}, + {"Encoder", Type, 0, ""}, + {"HTMLEscape", Func, 0, "func(dst *bytes.Buffer, src []byte)"}, + {"Indent", Func, 0, "func(dst *bytes.Buffer, src []byte, prefix string, indent string) error"}, + {"InvalidUTF8Error", Type, 0, ""}, + {"InvalidUTF8Error.S", Field, 0, ""}, + {"InvalidUnmarshalError", Type, 0, ""}, + {"InvalidUnmarshalError.Type", Field, 0, ""}, + {"Marshal", Func, 0, "func(v any) ([]byte, error)"}, + {"MarshalIndent", Func, 0, "func(v any, prefix string, indent string) ([]byte, error)"}, + {"Marshaler", Type, 0, ""}, + {"MarshalerError", Type, 0, ""}, + {"MarshalerError.Err", Field, 0, ""}, + {"MarshalerError.Type", Field, 0, ""}, + {"NewDecoder", Func, 0, "func(r io.Reader) *Decoder"}, + {"NewEncoder", Func, 0, "func(w io.Writer) *Encoder"}, + {"Number", Type, 1, ""}, + {"RawMessage", Type, 0, ""}, + {"SyntaxError", Type, 0, ""}, + {"SyntaxError.Offset", Field, 0, ""}, + {"Token", Type, 5, ""}, + {"Unmarshal", Func, 0, "func(data []byte, v any) error"}, + {"UnmarshalFieldError", Type, 0, ""}, + {"UnmarshalFieldError.Field", Field, 0, ""}, + {"UnmarshalFieldError.Key", Field, 0, ""}, + {"UnmarshalFieldError.Type", Field, 0, ""}, + {"UnmarshalTypeError", Type, 0, ""}, + {"UnmarshalTypeError.Field", Field, 8, ""}, + {"UnmarshalTypeError.Offset", Field, 5, ""}, + {"UnmarshalTypeError.Struct", Field, 8, ""}, + {"UnmarshalTypeError.Type", Field, 0, ""}, + {"UnmarshalTypeError.Value", Field, 0, ""}, + {"Unmarshaler", Type, 0, ""}, + {"UnsupportedTypeError", Type, 0, ""}, + {"UnsupportedTypeError.Type", Field, 0, ""}, + {"UnsupportedValueError", Type, 0, ""}, + {"UnsupportedValueError.Str", Field, 0, ""}, + {"UnsupportedValueError.Value", Field, 0, ""}, + {"Valid", Func, 9, "func(data []byte) bool"}, }, "encoding/pem": { - {"Block", Type, 0}, - {"Block.Bytes", Field, 0}, - {"Block.Headers", Field, 0}, - {"Block.Type", Field, 0}, - {"Decode", Func, 0}, - {"Encode", Func, 0}, - {"EncodeToMemory", Func, 0}, + {"Block", Type, 0, ""}, + {"Block.Bytes", Field, 0, ""}, + {"Block.Headers", Field, 0, ""}, + {"Block.Type", Field, 0, ""}, + {"Decode", Func, 0, "func(data []byte) (p *Block, rest []byte)"}, + {"Encode", Func, 0, "func(out io.Writer, b *Block) error"}, + {"EncodeToMemory", Func, 0, "func(b *Block) []byte"}, }, "encoding/xml": { - {"(*Decoder).Decode", Method, 0}, - {"(*Decoder).DecodeElement", Method, 0}, - {"(*Decoder).InputOffset", Method, 4}, - {"(*Decoder).InputPos", Method, 19}, - {"(*Decoder).RawToken", Method, 0}, - {"(*Decoder).Skip", Method, 0}, - {"(*Decoder).Token", Method, 0}, - {"(*Encoder).Close", Method, 20}, - {"(*Encoder).Encode", Method, 0}, - {"(*Encoder).EncodeElement", Method, 2}, - {"(*Encoder).EncodeToken", Method, 2}, - {"(*Encoder).Flush", Method, 2}, - {"(*Encoder).Indent", Method, 1}, - {"(*SyntaxError).Error", Method, 0}, - {"(*TagPathError).Error", Method, 0}, - {"(*UnsupportedTypeError).Error", Method, 0}, - {"(CharData).Copy", Method, 0}, - {"(Comment).Copy", Method, 0}, - {"(Directive).Copy", Method, 0}, - {"(ProcInst).Copy", Method, 0}, - {"(StartElement).Copy", Method, 0}, - {"(StartElement).End", Method, 2}, - {"(UnmarshalError).Error", Method, 0}, - {"Attr", Type, 0}, - {"Attr.Name", Field, 0}, - {"Attr.Value", Field, 0}, - {"CharData", Type, 0}, - {"Comment", Type, 0}, - {"CopyToken", Func, 0}, - {"Decoder", Type, 0}, - {"Decoder.AutoClose", Field, 0}, - {"Decoder.CharsetReader", Field, 0}, - {"Decoder.DefaultSpace", Field, 1}, - {"Decoder.Entity", Field, 0}, - {"Decoder.Strict", Field, 0}, - {"Directive", Type, 0}, - {"Encoder", Type, 0}, - {"EndElement", Type, 0}, - {"EndElement.Name", Field, 0}, - {"Escape", Func, 0}, - {"EscapeText", Func, 1}, - {"HTMLAutoClose", Var, 0}, - {"HTMLEntity", Var, 0}, - {"Header", Const, 0}, - {"Marshal", Func, 0}, - {"MarshalIndent", Func, 0}, - {"Marshaler", Type, 2}, - {"MarshalerAttr", Type, 2}, - {"Name", Type, 0}, - {"Name.Local", Field, 0}, - {"Name.Space", Field, 0}, - {"NewDecoder", Func, 0}, - {"NewEncoder", Func, 0}, - {"NewTokenDecoder", Func, 10}, - {"ProcInst", Type, 0}, - {"ProcInst.Inst", Field, 0}, - {"ProcInst.Target", Field, 0}, - {"StartElement", Type, 0}, - {"StartElement.Attr", Field, 0}, - {"StartElement.Name", Field, 0}, - {"SyntaxError", Type, 0}, - {"SyntaxError.Line", Field, 0}, - {"SyntaxError.Msg", Field, 0}, - {"TagPathError", Type, 0}, - {"TagPathError.Field1", Field, 0}, - {"TagPathError.Field2", Field, 0}, - {"TagPathError.Struct", Field, 0}, - {"TagPathError.Tag1", Field, 0}, - {"TagPathError.Tag2", Field, 0}, - {"Token", Type, 0}, - {"TokenReader", Type, 10}, - {"Unmarshal", Func, 0}, - {"UnmarshalError", Type, 0}, - {"Unmarshaler", Type, 2}, - {"UnmarshalerAttr", Type, 2}, - {"UnsupportedTypeError", Type, 0}, - {"UnsupportedTypeError.Type", Field, 0}, + {"(*Decoder).Decode", Method, 0, ""}, + {"(*Decoder).DecodeElement", Method, 0, ""}, + {"(*Decoder).InputOffset", Method, 4, ""}, + {"(*Decoder).InputPos", Method, 19, ""}, + {"(*Decoder).RawToken", Method, 0, ""}, + {"(*Decoder).Skip", Method, 0, ""}, + {"(*Decoder).Token", Method, 0, ""}, + {"(*Encoder).Close", Method, 20, ""}, + {"(*Encoder).Encode", Method, 0, ""}, + {"(*Encoder).EncodeElement", Method, 2, ""}, + {"(*Encoder).EncodeToken", Method, 2, ""}, + {"(*Encoder).Flush", Method, 2, ""}, + {"(*Encoder).Indent", Method, 1, ""}, + {"(*SyntaxError).Error", Method, 0, ""}, + {"(*TagPathError).Error", Method, 0, ""}, + {"(*UnsupportedTypeError).Error", Method, 0, ""}, + {"(CharData).Copy", Method, 0, ""}, + {"(Comment).Copy", Method, 0, ""}, + {"(Directive).Copy", Method, 0, ""}, + {"(ProcInst).Copy", Method, 0, ""}, + {"(StartElement).Copy", Method, 0, ""}, + {"(StartElement).End", Method, 2, ""}, + {"(UnmarshalError).Error", Method, 0, ""}, + {"Attr", Type, 0, ""}, + {"Attr.Name", Field, 0, ""}, + {"Attr.Value", Field, 0, ""}, + {"CharData", Type, 0, ""}, + {"Comment", Type, 0, ""}, + {"CopyToken", Func, 0, "func(t Token) Token"}, + {"Decoder", Type, 0, ""}, + {"Decoder.AutoClose", Field, 0, ""}, + {"Decoder.CharsetReader", Field, 0, ""}, + {"Decoder.DefaultSpace", Field, 1, ""}, + {"Decoder.Entity", Field, 0, ""}, + {"Decoder.Strict", Field, 0, ""}, + {"Directive", Type, 0, ""}, + {"Encoder", Type, 0, ""}, + {"EndElement", Type, 0, ""}, + {"EndElement.Name", Field, 0, ""}, + {"Escape", Func, 0, "func(w io.Writer, s []byte)"}, + {"EscapeText", Func, 1, "func(w io.Writer, s []byte) error"}, + {"HTMLAutoClose", Var, 0, ""}, + {"HTMLEntity", Var, 0, ""}, + {"Header", Const, 0, ""}, + {"Marshal", Func, 0, "func(v any) ([]byte, error)"}, + {"MarshalIndent", Func, 0, "func(v any, prefix string, indent string) ([]byte, error)"}, + {"Marshaler", Type, 2, ""}, + {"MarshalerAttr", Type, 2, ""}, + {"Name", Type, 0, ""}, + {"Name.Local", Field, 0, ""}, + {"Name.Space", Field, 0, ""}, + {"NewDecoder", Func, 0, "func(r io.Reader) *Decoder"}, + {"NewEncoder", Func, 0, "func(w io.Writer) *Encoder"}, + {"NewTokenDecoder", Func, 10, "func(t TokenReader) *Decoder"}, + {"ProcInst", Type, 0, ""}, + {"ProcInst.Inst", Field, 0, ""}, + {"ProcInst.Target", Field, 0, ""}, + {"StartElement", Type, 0, ""}, + {"StartElement.Attr", Field, 0, ""}, + {"StartElement.Name", Field, 0, ""}, + {"SyntaxError", Type, 0, ""}, + {"SyntaxError.Line", Field, 0, ""}, + {"SyntaxError.Msg", Field, 0, ""}, + {"TagPathError", Type, 0, ""}, + {"TagPathError.Field1", Field, 0, ""}, + {"TagPathError.Field2", Field, 0, ""}, + {"TagPathError.Struct", Field, 0, ""}, + {"TagPathError.Tag1", Field, 0, ""}, + {"TagPathError.Tag2", Field, 0, ""}, + {"Token", Type, 0, ""}, + {"TokenReader", Type, 10, ""}, + {"Unmarshal", Func, 0, "func(data []byte, v any) error"}, + {"UnmarshalError", Type, 0, ""}, + {"Unmarshaler", Type, 2, ""}, + {"UnmarshalerAttr", Type, 2, ""}, + {"UnsupportedTypeError", Type, 0, ""}, + {"UnsupportedTypeError.Type", Field, 0, ""}, }, "errors": { - {"As", Func, 13}, - {"ErrUnsupported", Var, 21}, - {"Is", Func, 13}, - {"Join", Func, 20}, - {"New", Func, 0}, - {"Unwrap", Func, 13}, + {"As", Func, 13, "func(err error, target any) bool"}, + {"ErrUnsupported", Var, 21, ""}, + {"Is", Func, 13, "func(err error, target error) bool"}, + {"Join", Func, 20, "func(errs ...error) error"}, + {"New", Func, 0, "func(text string) error"}, + {"Unwrap", Func, 13, "func(err error) error"}, }, "expvar": { - {"(*Float).Add", Method, 0}, - {"(*Float).Set", Method, 0}, - {"(*Float).String", Method, 0}, - {"(*Float).Value", Method, 8}, - {"(*Int).Add", Method, 0}, - {"(*Int).Set", Method, 0}, - {"(*Int).String", Method, 0}, - {"(*Int).Value", Method, 8}, - {"(*Map).Add", Method, 0}, - {"(*Map).AddFloat", Method, 0}, - {"(*Map).Delete", Method, 12}, - {"(*Map).Do", Method, 0}, - {"(*Map).Get", Method, 0}, - {"(*Map).Init", Method, 0}, - {"(*Map).Set", Method, 0}, - {"(*Map).String", Method, 0}, - {"(*String).Set", Method, 0}, - {"(*String).String", Method, 0}, - {"(*String).Value", Method, 8}, - {"(Func).String", Method, 0}, - {"(Func).Value", Method, 8}, - {"Do", Func, 0}, - {"Float", Type, 0}, - {"Func", Type, 0}, - {"Get", Func, 0}, - {"Handler", Func, 8}, - {"Int", Type, 0}, - {"KeyValue", Type, 0}, - {"KeyValue.Key", Field, 0}, - {"KeyValue.Value", Field, 0}, - {"Map", Type, 0}, - {"NewFloat", Func, 0}, - {"NewInt", Func, 0}, - {"NewMap", Func, 0}, - {"NewString", Func, 0}, - {"Publish", Func, 0}, - {"String", Type, 0}, - {"Var", Type, 0}, + {"(*Float).Add", Method, 0, ""}, + {"(*Float).Set", Method, 0, ""}, + {"(*Float).String", Method, 0, ""}, + {"(*Float).Value", Method, 8, ""}, + {"(*Int).Add", Method, 0, ""}, + {"(*Int).Set", Method, 0, ""}, + {"(*Int).String", Method, 0, ""}, + {"(*Int).Value", Method, 8, ""}, + {"(*Map).Add", Method, 0, ""}, + {"(*Map).AddFloat", Method, 0, ""}, + {"(*Map).Delete", Method, 12, ""}, + {"(*Map).Do", Method, 0, ""}, + {"(*Map).Get", Method, 0, ""}, + {"(*Map).Init", Method, 0, ""}, + {"(*Map).Set", Method, 0, ""}, + {"(*Map).String", Method, 0, ""}, + {"(*String).Set", Method, 0, ""}, + {"(*String).String", Method, 0, ""}, + {"(*String).Value", Method, 8, ""}, + {"(Func).String", Method, 0, ""}, + {"(Func).Value", Method, 8, ""}, + {"Do", Func, 0, "func(f func(KeyValue))"}, + {"Float", Type, 0, ""}, + {"Func", Type, 0, ""}, + {"Get", Func, 0, "func(name string) Var"}, + {"Handler", Func, 8, "func() http.Handler"}, + {"Int", Type, 0, ""}, + {"KeyValue", Type, 0, ""}, + {"KeyValue.Key", Field, 0, ""}, + {"KeyValue.Value", Field, 0, ""}, + {"Map", Type, 0, ""}, + {"NewFloat", Func, 0, "func(name string) *Float"}, + {"NewInt", Func, 0, "func(name string) *Int"}, + {"NewMap", Func, 0, "func(name string) *Map"}, + {"NewString", Func, 0, "func(name string) *String"}, + {"Publish", Func, 0, "func(name string, v Var)"}, + {"String", Type, 0, ""}, + {"Var", Type, 0, ""}, }, "flag": { - {"(*FlagSet).Arg", Method, 0}, - {"(*FlagSet).Args", Method, 0}, - {"(*FlagSet).Bool", Method, 0}, - {"(*FlagSet).BoolFunc", Method, 21}, - {"(*FlagSet).BoolVar", Method, 0}, - {"(*FlagSet).Duration", Method, 0}, - {"(*FlagSet).DurationVar", Method, 0}, - {"(*FlagSet).ErrorHandling", Method, 10}, - {"(*FlagSet).Float64", Method, 0}, - {"(*FlagSet).Float64Var", Method, 0}, - {"(*FlagSet).Func", Method, 16}, - {"(*FlagSet).Init", Method, 0}, - {"(*FlagSet).Int", Method, 0}, - {"(*FlagSet).Int64", Method, 0}, - {"(*FlagSet).Int64Var", Method, 0}, - {"(*FlagSet).IntVar", Method, 0}, - {"(*FlagSet).Lookup", Method, 0}, - {"(*FlagSet).NArg", Method, 0}, - {"(*FlagSet).NFlag", Method, 0}, - {"(*FlagSet).Name", Method, 10}, - {"(*FlagSet).Output", Method, 10}, - {"(*FlagSet).Parse", Method, 0}, - {"(*FlagSet).Parsed", Method, 0}, - {"(*FlagSet).PrintDefaults", Method, 0}, - {"(*FlagSet).Set", Method, 0}, - {"(*FlagSet).SetOutput", Method, 0}, - {"(*FlagSet).String", Method, 0}, - {"(*FlagSet).StringVar", Method, 0}, - {"(*FlagSet).TextVar", Method, 19}, - {"(*FlagSet).Uint", Method, 0}, - {"(*FlagSet).Uint64", Method, 0}, - {"(*FlagSet).Uint64Var", Method, 0}, - {"(*FlagSet).UintVar", Method, 0}, - {"(*FlagSet).Var", Method, 0}, - {"(*FlagSet).Visit", Method, 0}, - {"(*FlagSet).VisitAll", Method, 0}, - {"Arg", Func, 0}, - {"Args", Func, 0}, - {"Bool", Func, 0}, - {"BoolFunc", Func, 21}, - {"BoolVar", Func, 0}, - {"CommandLine", Var, 2}, - {"ContinueOnError", Const, 0}, - {"Duration", Func, 0}, - {"DurationVar", Func, 0}, - {"ErrHelp", Var, 0}, - {"ErrorHandling", Type, 0}, - {"ExitOnError", Const, 0}, - {"Flag", Type, 0}, - {"Flag.DefValue", Field, 0}, - {"Flag.Name", Field, 0}, - {"Flag.Usage", Field, 0}, - {"Flag.Value", Field, 0}, - {"FlagSet", Type, 0}, - {"FlagSet.Usage", Field, 0}, - {"Float64", Func, 0}, - {"Float64Var", Func, 0}, - {"Func", Func, 16}, - {"Getter", Type, 2}, - {"Int", Func, 0}, - {"Int64", Func, 0}, - {"Int64Var", Func, 0}, - {"IntVar", Func, 0}, - {"Lookup", Func, 0}, - {"NArg", Func, 0}, - {"NFlag", Func, 0}, - {"NewFlagSet", Func, 0}, - {"PanicOnError", Const, 0}, - {"Parse", Func, 0}, - {"Parsed", Func, 0}, - {"PrintDefaults", Func, 0}, - {"Set", Func, 0}, - {"String", Func, 0}, - {"StringVar", Func, 0}, - {"TextVar", Func, 19}, - {"Uint", Func, 0}, - {"Uint64", Func, 0}, - {"Uint64Var", Func, 0}, - {"UintVar", Func, 0}, - {"UnquoteUsage", Func, 5}, - {"Usage", Var, 0}, - {"Value", Type, 0}, - {"Var", Func, 0}, - {"Visit", Func, 0}, - {"VisitAll", Func, 0}, + {"(*FlagSet).Arg", Method, 0, ""}, + {"(*FlagSet).Args", Method, 0, ""}, + {"(*FlagSet).Bool", Method, 0, ""}, + {"(*FlagSet).BoolFunc", Method, 21, ""}, + {"(*FlagSet).BoolVar", Method, 0, ""}, + {"(*FlagSet).Duration", Method, 0, ""}, + {"(*FlagSet).DurationVar", Method, 0, ""}, + {"(*FlagSet).ErrorHandling", Method, 10, ""}, + {"(*FlagSet).Float64", Method, 0, ""}, + {"(*FlagSet).Float64Var", Method, 0, ""}, + {"(*FlagSet).Func", Method, 16, ""}, + {"(*FlagSet).Init", Method, 0, ""}, + {"(*FlagSet).Int", Method, 0, ""}, + {"(*FlagSet).Int64", Method, 0, ""}, + {"(*FlagSet).Int64Var", Method, 0, ""}, + {"(*FlagSet).IntVar", Method, 0, ""}, + {"(*FlagSet).Lookup", Method, 0, ""}, + {"(*FlagSet).NArg", Method, 0, ""}, + {"(*FlagSet).NFlag", Method, 0, ""}, + {"(*FlagSet).Name", Method, 10, ""}, + {"(*FlagSet).Output", Method, 10, ""}, + {"(*FlagSet).Parse", Method, 0, ""}, + {"(*FlagSet).Parsed", Method, 0, ""}, + {"(*FlagSet).PrintDefaults", Method, 0, ""}, + {"(*FlagSet).Set", Method, 0, ""}, + {"(*FlagSet).SetOutput", Method, 0, ""}, + {"(*FlagSet).String", Method, 0, ""}, + {"(*FlagSet).StringVar", Method, 0, ""}, + {"(*FlagSet).TextVar", Method, 19, ""}, + {"(*FlagSet).Uint", Method, 0, ""}, + {"(*FlagSet).Uint64", Method, 0, ""}, + {"(*FlagSet).Uint64Var", Method, 0, ""}, + {"(*FlagSet).UintVar", Method, 0, ""}, + {"(*FlagSet).Var", Method, 0, ""}, + {"(*FlagSet).Visit", Method, 0, ""}, + {"(*FlagSet).VisitAll", Method, 0, ""}, + {"Arg", Func, 0, "func(i int) string"}, + {"Args", Func, 0, "func() []string"}, + {"Bool", Func, 0, "func(name string, value bool, usage string) *bool"}, + {"BoolFunc", Func, 21, "func(name string, usage string, fn func(string) error)"}, + {"BoolVar", Func, 0, "func(p *bool, name string, value bool, usage string)"}, + {"CommandLine", Var, 2, ""}, + {"ContinueOnError", Const, 0, ""}, + {"Duration", Func, 0, "func(name string, value time.Duration, usage string) *time.Duration"}, + {"DurationVar", Func, 0, "func(p *time.Duration, name string, value time.Duration, usage string)"}, + {"ErrHelp", Var, 0, ""}, + {"ErrorHandling", Type, 0, ""}, + {"ExitOnError", Const, 0, ""}, + {"Flag", Type, 0, ""}, + {"Flag.DefValue", Field, 0, ""}, + {"Flag.Name", Field, 0, ""}, + {"Flag.Usage", Field, 0, ""}, + {"Flag.Value", Field, 0, ""}, + {"FlagSet", Type, 0, ""}, + {"FlagSet.Usage", Field, 0, ""}, + {"Float64", Func, 0, "func(name string, value float64, usage string) *float64"}, + {"Float64Var", Func, 0, "func(p *float64, name string, value float64, usage string)"}, + {"Func", Func, 16, "func(name string, usage string, fn func(string) error)"}, + {"Getter", Type, 2, ""}, + {"Int", Func, 0, "func(name string, value int, usage string) *int"}, + {"Int64", Func, 0, "func(name string, value int64, usage string) *int64"}, + {"Int64Var", Func, 0, "func(p *int64, name string, value int64, usage string)"}, + {"IntVar", Func, 0, "func(p *int, name string, value int, usage string)"}, + {"Lookup", Func, 0, "func(name string) *Flag"}, + {"NArg", Func, 0, "func() int"}, + {"NFlag", Func, 0, "func() int"}, + {"NewFlagSet", Func, 0, "func(name string, errorHandling ErrorHandling) *FlagSet"}, + {"PanicOnError", Const, 0, ""}, + {"Parse", Func, 0, "func()"}, + {"Parsed", Func, 0, "func() bool"}, + {"PrintDefaults", Func, 0, "func()"}, + {"Set", Func, 0, "func(name string, value string) error"}, + {"String", Func, 0, "func(name string, value string, usage string) *string"}, + {"StringVar", Func, 0, "func(p *string, name string, value string, usage string)"}, + {"TextVar", Func, 19, "func(p encoding.TextUnmarshaler, name string, value encoding.TextMarshaler, usage string)"}, + {"Uint", Func, 0, "func(name string, value uint, usage string) *uint"}, + {"Uint64", Func, 0, "func(name string, value uint64, usage string) *uint64"}, + {"Uint64Var", Func, 0, "func(p *uint64, name string, value uint64, usage string)"}, + {"UintVar", Func, 0, "func(p *uint, name string, value uint, usage string)"}, + {"UnquoteUsage", Func, 5, "func(flag *Flag) (name string, usage string)"}, + {"Usage", Var, 0, ""}, + {"Value", Type, 0, ""}, + {"Var", Func, 0, "func(value Value, name string, usage string)"}, + {"Visit", Func, 0, "func(fn func(*Flag))"}, + {"VisitAll", Func, 0, "func(fn func(*Flag))"}, }, "fmt": { - {"Append", Func, 19}, - {"Appendf", Func, 19}, - {"Appendln", Func, 19}, - {"Errorf", Func, 0}, - {"FormatString", Func, 20}, - {"Formatter", Type, 0}, - {"Fprint", Func, 0}, - {"Fprintf", Func, 0}, - {"Fprintln", Func, 0}, - {"Fscan", Func, 0}, - {"Fscanf", Func, 0}, - {"Fscanln", Func, 0}, - {"GoStringer", Type, 0}, - {"Print", Func, 0}, - {"Printf", Func, 0}, - {"Println", Func, 0}, - {"Scan", Func, 0}, - {"ScanState", Type, 0}, - {"Scanf", Func, 0}, - {"Scanln", Func, 0}, - {"Scanner", Type, 0}, - {"Sprint", Func, 0}, - {"Sprintf", Func, 0}, - {"Sprintln", Func, 0}, - {"Sscan", Func, 0}, - {"Sscanf", Func, 0}, - {"Sscanln", Func, 0}, - {"State", Type, 0}, - {"Stringer", Type, 0}, + {"Append", Func, 19, "func(b []byte, a ...any) []byte"}, + {"Appendf", Func, 19, "func(b []byte, format string, a ...any) []byte"}, + {"Appendln", Func, 19, "func(b []byte, a ...any) []byte"}, + {"Errorf", Func, 0, "func(format string, a ...any) error"}, + {"FormatString", Func, 20, "func(state State, verb rune) string"}, + {"Formatter", Type, 0, ""}, + {"Fprint", Func, 0, "func(w io.Writer, a ...any) (n int, err error)"}, + {"Fprintf", Func, 0, "func(w io.Writer, format string, a ...any) (n int, err error)"}, + {"Fprintln", Func, 0, "func(w io.Writer, a ...any) (n int, err error)"}, + {"Fscan", Func, 0, "func(r io.Reader, a ...any) (n int, err error)"}, + {"Fscanf", Func, 0, "func(r io.Reader, format string, a ...any) (n int, err error)"}, + {"Fscanln", Func, 0, "func(r io.Reader, a ...any) (n int, err error)"}, + {"GoStringer", Type, 0, ""}, + {"Print", Func, 0, "func(a ...any) (n int, err error)"}, + {"Printf", Func, 0, "func(format string, a ...any) (n int, err error)"}, + {"Println", Func, 0, "func(a ...any) (n int, err error)"}, + {"Scan", Func, 0, "func(a ...any) (n int, err error)"}, + {"ScanState", Type, 0, ""}, + {"Scanf", Func, 0, "func(format string, a ...any) (n int, err error)"}, + {"Scanln", Func, 0, "func(a ...any) (n int, err error)"}, + {"Scanner", Type, 0, ""}, + {"Sprint", Func, 0, "func(a ...any) string"}, + {"Sprintf", Func, 0, "func(format string, a ...any) string"}, + {"Sprintln", Func, 0, "func(a ...any) string"}, + {"Sscan", Func, 0, "func(str string, a ...any) (n int, err error)"}, + {"Sscanf", Func, 0, "func(str string, format string, a ...any) (n int, err error)"}, + {"Sscanln", Func, 0, "func(str string, a ...any) (n int, err error)"}, + {"State", Type, 0, ""}, + {"Stringer", Type, 0, ""}, }, "go/ast": { - {"(*ArrayType).End", Method, 0}, - {"(*ArrayType).Pos", Method, 0}, - {"(*AssignStmt).End", Method, 0}, - {"(*AssignStmt).Pos", Method, 0}, - {"(*BadDecl).End", Method, 0}, - {"(*BadDecl).Pos", Method, 0}, - {"(*BadExpr).End", Method, 0}, - {"(*BadExpr).Pos", Method, 0}, - {"(*BadStmt).End", Method, 0}, - {"(*BadStmt).Pos", Method, 0}, - {"(*BasicLit).End", Method, 0}, - {"(*BasicLit).Pos", Method, 0}, - {"(*BinaryExpr).End", Method, 0}, - {"(*BinaryExpr).Pos", Method, 0}, - {"(*BlockStmt).End", Method, 0}, - {"(*BlockStmt).Pos", Method, 0}, - {"(*BranchStmt).End", Method, 0}, - {"(*BranchStmt).Pos", Method, 0}, - {"(*CallExpr).End", Method, 0}, - {"(*CallExpr).Pos", Method, 0}, - {"(*CaseClause).End", Method, 0}, - {"(*CaseClause).Pos", Method, 0}, - {"(*ChanType).End", Method, 0}, - {"(*ChanType).Pos", Method, 0}, - {"(*CommClause).End", Method, 0}, - {"(*CommClause).Pos", Method, 0}, - {"(*Comment).End", Method, 0}, - {"(*Comment).Pos", Method, 0}, - {"(*CommentGroup).End", Method, 0}, - {"(*CommentGroup).Pos", Method, 0}, - {"(*CommentGroup).Text", Method, 0}, - {"(*CompositeLit).End", Method, 0}, - {"(*CompositeLit).Pos", Method, 0}, - {"(*DeclStmt).End", Method, 0}, - {"(*DeclStmt).Pos", Method, 0}, - {"(*DeferStmt).End", Method, 0}, - {"(*DeferStmt).Pos", Method, 0}, - {"(*Ellipsis).End", Method, 0}, - {"(*Ellipsis).Pos", Method, 0}, - {"(*EmptyStmt).End", Method, 0}, - {"(*EmptyStmt).Pos", Method, 0}, - {"(*ExprStmt).End", Method, 0}, - {"(*ExprStmt).Pos", Method, 0}, - {"(*Field).End", Method, 0}, - {"(*Field).Pos", Method, 0}, - {"(*FieldList).End", Method, 0}, - {"(*FieldList).NumFields", Method, 0}, - {"(*FieldList).Pos", Method, 0}, - {"(*File).End", Method, 0}, - {"(*File).Pos", Method, 0}, - {"(*ForStmt).End", Method, 0}, - {"(*ForStmt).Pos", Method, 0}, - {"(*FuncDecl).End", Method, 0}, - {"(*FuncDecl).Pos", Method, 0}, - {"(*FuncLit).End", Method, 0}, - {"(*FuncLit).Pos", Method, 0}, - {"(*FuncType).End", Method, 0}, - {"(*FuncType).Pos", Method, 0}, - {"(*GenDecl).End", Method, 0}, - {"(*GenDecl).Pos", Method, 0}, - {"(*GoStmt).End", Method, 0}, - {"(*GoStmt).Pos", Method, 0}, - {"(*Ident).End", Method, 0}, - {"(*Ident).IsExported", Method, 0}, - {"(*Ident).Pos", Method, 0}, - {"(*Ident).String", Method, 0}, - {"(*IfStmt).End", Method, 0}, - {"(*IfStmt).Pos", Method, 0}, - {"(*ImportSpec).End", Method, 0}, - {"(*ImportSpec).Pos", Method, 0}, - {"(*IncDecStmt).End", Method, 0}, - {"(*IncDecStmt).Pos", Method, 0}, - {"(*IndexExpr).End", Method, 0}, - {"(*IndexExpr).Pos", Method, 0}, - {"(*IndexListExpr).End", Method, 18}, - {"(*IndexListExpr).Pos", Method, 18}, - {"(*InterfaceType).End", Method, 0}, - {"(*InterfaceType).Pos", Method, 0}, - {"(*KeyValueExpr).End", Method, 0}, - {"(*KeyValueExpr).Pos", Method, 0}, - {"(*LabeledStmt).End", Method, 0}, - {"(*LabeledStmt).Pos", Method, 0}, - {"(*MapType).End", Method, 0}, - {"(*MapType).Pos", Method, 0}, - {"(*Object).Pos", Method, 0}, - {"(*Package).End", Method, 0}, - {"(*Package).Pos", Method, 0}, - {"(*ParenExpr).End", Method, 0}, - {"(*ParenExpr).Pos", Method, 0}, - {"(*RangeStmt).End", Method, 0}, - {"(*RangeStmt).Pos", Method, 0}, - {"(*ReturnStmt).End", Method, 0}, - {"(*ReturnStmt).Pos", Method, 0}, - {"(*Scope).Insert", Method, 0}, - {"(*Scope).Lookup", Method, 0}, - {"(*Scope).String", Method, 0}, - {"(*SelectStmt).End", Method, 0}, - {"(*SelectStmt).Pos", Method, 0}, - {"(*SelectorExpr).End", Method, 0}, - {"(*SelectorExpr).Pos", Method, 0}, - {"(*SendStmt).End", Method, 0}, - {"(*SendStmt).Pos", Method, 0}, - {"(*SliceExpr).End", Method, 0}, - {"(*SliceExpr).Pos", Method, 0}, - {"(*StarExpr).End", Method, 0}, - {"(*StarExpr).Pos", Method, 0}, - {"(*StructType).End", Method, 0}, - {"(*StructType).Pos", Method, 0}, - {"(*SwitchStmt).End", Method, 0}, - {"(*SwitchStmt).Pos", Method, 0}, - {"(*TypeAssertExpr).End", Method, 0}, - {"(*TypeAssertExpr).Pos", Method, 0}, - {"(*TypeSpec).End", Method, 0}, - {"(*TypeSpec).Pos", Method, 0}, - {"(*TypeSwitchStmt).End", Method, 0}, - {"(*TypeSwitchStmt).Pos", Method, 0}, - {"(*UnaryExpr).End", Method, 0}, - {"(*UnaryExpr).Pos", Method, 0}, - {"(*ValueSpec).End", Method, 0}, - {"(*ValueSpec).Pos", Method, 0}, - {"(CommentMap).Comments", Method, 1}, - {"(CommentMap).Filter", Method, 1}, - {"(CommentMap).String", Method, 1}, - {"(CommentMap).Update", Method, 1}, - {"(ObjKind).String", Method, 0}, - {"ArrayType", Type, 0}, - {"ArrayType.Elt", Field, 0}, - {"ArrayType.Lbrack", Field, 0}, - {"ArrayType.Len", Field, 0}, - {"AssignStmt", Type, 0}, - {"AssignStmt.Lhs", Field, 0}, - {"AssignStmt.Rhs", Field, 0}, - {"AssignStmt.Tok", Field, 0}, - {"AssignStmt.TokPos", Field, 0}, - {"Bad", Const, 0}, - {"BadDecl", Type, 0}, - {"BadDecl.From", Field, 0}, - {"BadDecl.To", Field, 0}, - {"BadExpr", Type, 0}, - {"BadExpr.From", Field, 0}, - {"BadExpr.To", Field, 0}, - {"BadStmt", Type, 0}, - {"BadStmt.From", Field, 0}, - {"BadStmt.To", Field, 0}, - {"BasicLit", Type, 0}, - {"BasicLit.Kind", Field, 0}, - {"BasicLit.Value", Field, 0}, - {"BasicLit.ValuePos", Field, 0}, - {"BinaryExpr", Type, 0}, - {"BinaryExpr.Op", Field, 0}, - {"BinaryExpr.OpPos", Field, 0}, - {"BinaryExpr.X", Field, 0}, - {"BinaryExpr.Y", Field, 0}, - {"BlockStmt", Type, 0}, - {"BlockStmt.Lbrace", Field, 0}, - {"BlockStmt.List", Field, 0}, - {"BlockStmt.Rbrace", Field, 0}, - {"BranchStmt", Type, 0}, - {"BranchStmt.Label", Field, 0}, - {"BranchStmt.Tok", Field, 0}, - {"BranchStmt.TokPos", Field, 0}, - {"CallExpr", Type, 0}, - {"CallExpr.Args", Field, 0}, - {"CallExpr.Ellipsis", Field, 0}, - {"CallExpr.Fun", Field, 0}, - {"CallExpr.Lparen", Field, 0}, - {"CallExpr.Rparen", Field, 0}, - {"CaseClause", Type, 0}, - {"CaseClause.Body", Field, 0}, - {"CaseClause.Case", Field, 0}, - {"CaseClause.Colon", Field, 0}, - {"CaseClause.List", Field, 0}, - {"ChanDir", Type, 0}, - {"ChanType", Type, 0}, - {"ChanType.Arrow", Field, 1}, - {"ChanType.Begin", Field, 0}, - {"ChanType.Dir", Field, 0}, - {"ChanType.Value", Field, 0}, - {"CommClause", Type, 0}, - {"CommClause.Body", Field, 0}, - {"CommClause.Case", Field, 0}, - {"CommClause.Colon", Field, 0}, - {"CommClause.Comm", Field, 0}, - {"Comment", Type, 0}, - {"Comment.Slash", Field, 0}, - {"Comment.Text", Field, 0}, - {"CommentGroup", Type, 0}, - {"CommentGroup.List", Field, 0}, - {"CommentMap", Type, 1}, - {"CompositeLit", Type, 0}, - {"CompositeLit.Elts", Field, 0}, - {"CompositeLit.Incomplete", Field, 11}, - {"CompositeLit.Lbrace", Field, 0}, - {"CompositeLit.Rbrace", Field, 0}, - {"CompositeLit.Type", Field, 0}, - {"Con", Const, 0}, - {"Decl", Type, 0}, - {"DeclStmt", Type, 0}, - {"DeclStmt.Decl", Field, 0}, - {"DeferStmt", Type, 0}, - {"DeferStmt.Call", Field, 0}, - {"DeferStmt.Defer", Field, 0}, - {"Ellipsis", Type, 0}, - {"Ellipsis.Ellipsis", Field, 0}, - {"Ellipsis.Elt", Field, 0}, - {"EmptyStmt", Type, 0}, - {"EmptyStmt.Implicit", Field, 5}, - {"EmptyStmt.Semicolon", Field, 0}, - {"Expr", Type, 0}, - {"ExprStmt", Type, 0}, - {"ExprStmt.X", Field, 0}, - {"Field", Type, 0}, - {"Field.Comment", Field, 0}, - {"Field.Doc", Field, 0}, - {"Field.Names", Field, 0}, - {"Field.Tag", Field, 0}, - {"Field.Type", Field, 0}, - {"FieldFilter", Type, 0}, - {"FieldList", Type, 0}, - {"FieldList.Closing", Field, 0}, - {"FieldList.List", Field, 0}, - {"FieldList.Opening", Field, 0}, - {"File", Type, 0}, - {"File.Comments", Field, 0}, - {"File.Decls", Field, 0}, - {"File.Doc", Field, 0}, - {"File.FileEnd", Field, 20}, - {"File.FileStart", Field, 20}, - {"File.GoVersion", Field, 21}, - {"File.Imports", Field, 0}, - {"File.Name", Field, 0}, - {"File.Package", Field, 0}, - {"File.Scope", Field, 0}, - {"File.Unresolved", Field, 0}, - {"FileExports", Func, 0}, - {"Filter", Type, 0}, - {"FilterDecl", Func, 0}, - {"FilterFile", Func, 0}, - {"FilterFuncDuplicates", Const, 0}, - {"FilterImportDuplicates", Const, 0}, - {"FilterPackage", Func, 0}, - {"FilterUnassociatedComments", Const, 0}, - {"ForStmt", Type, 0}, - {"ForStmt.Body", Field, 0}, - {"ForStmt.Cond", Field, 0}, - {"ForStmt.For", Field, 0}, - {"ForStmt.Init", Field, 0}, - {"ForStmt.Post", Field, 0}, - {"Fprint", Func, 0}, - {"Fun", Const, 0}, - {"FuncDecl", Type, 0}, - {"FuncDecl.Body", Field, 0}, - {"FuncDecl.Doc", Field, 0}, - {"FuncDecl.Name", Field, 0}, - {"FuncDecl.Recv", Field, 0}, - {"FuncDecl.Type", Field, 0}, - {"FuncLit", Type, 0}, - {"FuncLit.Body", Field, 0}, - {"FuncLit.Type", Field, 0}, - {"FuncType", Type, 0}, - {"FuncType.Func", Field, 0}, - {"FuncType.Params", Field, 0}, - {"FuncType.Results", Field, 0}, - {"FuncType.TypeParams", Field, 18}, - {"GenDecl", Type, 0}, - {"GenDecl.Doc", Field, 0}, - {"GenDecl.Lparen", Field, 0}, - {"GenDecl.Rparen", Field, 0}, - {"GenDecl.Specs", Field, 0}, - {"GenDecl.Tok", Field, 0}, - {"GenDecl.TokPos", Field, 0}, - {"GoStmt", Type, 0}, - {"GoStmt.Call", Field, 0}, - {"GoStmt.Go", Field, 0}, - {"Ident", Type, 0}, - {"Ident.Name", Field, 0}, - {"Ident.NamePos", Field, 0}, - {"Ident.Obj", Field, 0}, - {"IfStmt", Type, 0}, - {"IfStmt.Body", Field, 0}, - {"IfStmt.Cond", Field, 0}, - {"IfStmt.Else", Field, 0}, - {"IfStmt.If", Field, 0}, - {"IfStmt.Init", Field, 0}, - {"ImportSpec", Type, 0}, - {"ImportSpec.Comment", Field, 0}, - {"ImportSpec.Doc", Field, 0}, - {"ImportSpec.EndPos", Field, 0}, - {"ImportSpec.Name", Field, 0}, - {"ImportSpec.Path", Field, 0}, - {"Importer", Type, 0}, - {"IncDecStmt", Type, 0}, - {"IncDecStmt.Tok", Field, 0}, - {"IncDecStmt.TokPos", Field, 0}, - {"IncDecStmt.X", Field, 0}, - {"IndexExpr", Type, 0}, - {"IndexExpr.Index", Field, 0}, - {"IndexExpr.Lbrack", Field, 0}, - {"IndexExpr.Rbrack", Field, 0}, - {"IndexExpr.X", Field, 0}, - {"IndexListExpr", Type, 18}, - {"IndexListExpr.Indices", Field, 18}, - {"IndexListExpr.Lbrack", Field, 18}, - {"IndexListExpr.Rbrack", Field, 18}, - {"IndexListExpr.X", Field, 18}, - {"Inspect", Func, 0}, - {"InterfaceType", Type, 0}, - {"InterfaceType.Incomplete", Field, 0}, - {"InterfaceType.Interface", Field, 0}, - {"InterfaceType.Methods", Field, 0}, - {"IsExported", Func, 0}, - {"IsGenerated", Func, 21}, - {"KeyValueExpr", Type, 0}, - {"KeyValueExpr.Colon", Field, 0}, - {"KeyValueExpr.Key", Field, 0}, - {"KeyValueExpr.Value", Field, 0}, - {"LabeledStmt", Type, 0}, - {"LabeledStmt.Colon", Field, 0}, - {"LabeledStmt.Label", Field, 0}, - {"LabeledStmt.Stmt", Field, 0}, - {"Lbl", Const, 0}, - {"MapType", Type, 0}, - {"MapType.Key", Field, 0}, - {"MapType.Map", Field, 0}, - {"MapType.Value", Field, 0}, - {"MergeMode", Type, 0}, - {"MergePackageFiles", Func, 0}, - {"NewCommentMap", Func, 1}, - {"NewIdent", Func, 0}, - {"NewObj", Func, 0}, - {"NewPackage", Func, 0}, - {"NewScope", Func, 0}, - {"Node", Type, 0}, - {"NotNilFilter", Func, 0}, - {"ObjKind", Type, 0}, - {"Object", Type, 0}, - {"Object.Data", Field, 0}, - {"Object.Decl", Field, 0}, - {"Object.Kind", Field, 0}, - {"Object.Name", Field, 0}, - {"Object.Type", Field, 0}, - {"Package", Type, 0}, - {"Package.Files", Field, 0}, - {"Package.Imports", Field, 0}, - {"Package.Name", Field, 0}, - {"Package.Scope", Field, 0}, - {"PackageExports", Func, 0}, - {"ParenExpr", Type, 0}, - {"ParenExpr.Lparen", Field, 0}, - {"ParenExpr.Rparen", Field, 0}, - {"ParenExpr.X", Field, 0}, - {"Pkg", Const, 0}, - {"Preorder", Func, 23}, - {"Print", Func, 0}, - {"RECV", Const, 0}, - {"RangeStmt", Type, 0}, - {"RangeStmt.Body", Field, 0}, - {"RangeStmt.For", Field, 0}, - {"RangeStmt.Key", Field, 0}, - {"RangeStmt.Range", Field, 20}, - {"RangeStmt.Tok", Field, 0}, - {"RangeStmt.TokPos", Field, 0}, - {"RangeStmt.Value", Field, 0}, - {"RangeStmt.X", Field, 0}, - {"ReturnStmt", Type, 0}, - {"ReturnStmt.Results", Field, 0}, - {"ReturnStmt.Return", Field, 0}, - {"SEND", Const, 0}, - {"Scope", Type, 0}, - {"Scope.Objects", Field, 0}, - {"Scope.Outer", Field, 0}, - {"SelectStmt", Type, 0}, - {"SelectStmt.Body", Field, 0}, - {"SelectStmt.Select", Field, 0}, - {"SelectorExpr", Type, 0}, - {"SelectorExpr.Sel", Field, 0}, - {"SelectorExpr.X", Field, 0}, - {"SendStmt", Type, 0}, - {"SendStmt.Arrow", Field, 0}, - {"SendStmt.Chan", Field, 0}, - {"SendStmt.Value", Field, 0}, - {"SliceExpr", Type, 0}, - {"SliceExpr.High", Field, 0}, - {"SliceExpr.Lbrack", Field, 0}, - {"SliceExpr.Low", Field, 0}, - {"SliceExpr.Max", Field, 2}, - {"SliceExpr.Rbrack", Field, 0}, - {"SliceExpr.Slice3", Field, 2}, - {"SliceExpr.X", Field, 0}, - {"SortImports", Func, 0}, - {"Spec", Type, 0}, - {"StarExpr", Type, 0}, - {"StarExpr.Star", Field, 0}, - {"StarExpr.X", Field, 0}, - {"Stmt", Type, 0}, - {"StructType", Type, 0}, - {"StructType.Fields", Field, 0}, - {"StructType.Incomplete", Field, 0}, - {"StructType.Struct", Field, 0}, - {"SwitchStmt", Type, 0}, - {"SwitchStmt.Body", Field, 0}, - {"SwitchStmt.Init", Field, 0}, - {"SwitchStmt.Switch", Field, 0}, - {"SwitchStmt.Tag", Field, 0}, - {"Typ", Const, 0}, - {"TypeAssertExpr", Type, 0}, - {"TypeAssertExpr.Lparen", Field, 2}, - {"TypeAssertExpr.Rparen", Field, 2}, - {"TypeAssertExpr.Type", Field, 0}, - {"TypeAssertExpr.X", Field, 0}, - {"TypeSpec", Type, 0}, - {"TypeSpec.Assign", Field, 9}, - {"TypeSpec.Comment", Field, 0}, - {"TypeSpec.Doc", Field, 0}, - {"TypeSpec.Name", Field, 0}, - {"TypeSpec.Type", Field, 0}, - {"TypeSpec.TypeParams", Field, 18}, - {"TypeSwitchStmt", Type, 0}, - {"TypeSwitchStmt.Assign", Field, 0}, - {"TypeSwitchStmt.Body", Field, 0}, - {"TypeSwitchStmt.Init", Field, 0}, - {"TypeSwitchStmt.Switch", Field, 0}, - {"UnaryExpr", Type, 0}, - {"UnaryExpr.Op", Field, 0}, - {"UnaryExpr.OpPos", Field, 0}, - {"UnaryExpr.X", Field, 0}, - {"Unparen", Func, 22}, - {"ValueSpec", Type, 0}, - {"ValueSpec.Comment", Field, 0}, - {"ValueSpec.Doc", Field, 0}, - {"ValueSpec.Names", Field, 0}, - {"ValueSpec.Type", Field, 0}, - {"ValueSpec.Values", Field, 0}, - {"Var", Const, 0}, - {"Visitor", Type, 0}, - {"Walk", Func, 0}, + {"(*ArrayType).End", Method, 0, ""}, + {"(*ArrayType).Pos", Method, 0, ""}, + {"(*AssignStmt).End", Method, 0, ""}, + {"(*AssignStmt).Pos", Method, 0, ""}, + {"(*BadDecl).End", Method, 0, ""}, + {"(*BadDecl).Pos", Method, 0, ""}, + {"(*BadExpr).End", Method, 0, ""}, + {"(*BadExpr).Pos", Method, 0, ""}, + {"(*BadStmt).End", Method, 0, ""}, + {"(*BadStmt).Pos", Method, 0, ""}, + {"(*BasicLit).End", Method, 0, ""}, + {"(*BasicLit).Pos", Method, 0, ""}, + {"(*BinaryExpr).End", Method, 0, ""}, + {"(*BinaryExpr).Pos", Method, 0, ""}, + {"(*BlockStmt).End", Method, 0, ""}, + {"(*BlockStmt).Pos", Method, 0, ""}, + {"(*BranchStmt).End", Method, 0, ""}, + {"(*BranchStmt).Pos", Method, 0, ""}, + {"(*CallExpr).End", Method, 0, ""}, + {"(*CallExpr).Pos", Method, 0, ""}, + {"(*CaseClause).End", Method, 0, ""}, + {"(*CaseClause).Pos", Method, 0, ""}, + {"(*ChanType).End", Method, 0, ""}, + {"(*ChanType).Pos", Method, 0, ""}, + {"(*CommClause).End", Method, 0, ""}, + {"(*CommClause).Pos", Method, 0, ""}, + {"(*Comment).End", Method, 0, ""}, + {"(*Comment).Pos", Method, 0, ""}, + {"(*CommentGroup).End", Method, 0, ""}, + {"(*CommentGroup).Pos", Method, 0, ""}, + {"(*CommentGroup).Text", Method, 0, ""}, + {"(*CompositeLit).End", Method, 0, ""}, + {"(*CompositeLit).Pos", Method, 0, ""}, + {"(*DeclStmt).End", Method, 0, ""}, + {"(*DeclStmt).Pos", Method, 0, ""}, + {"(*DeferStmt).End", Method, 0, ""}, + {"(*DeferStmt).Pos", Method, 0, ""}, + {"(*Ellipsis).End", Method, 0, ""}, + {"(*Ellipsis).Pos", Method, 0, ""}, + {"(*EmptyStmt).End", Method, 0, ""}, + {"(*EmptyStmt).Pos", Method, 0, ""}, + {"(*ExprStmt).End", Method, 0, ""}, + {"(*ExprStmt).Pos", Method, 0, ""}, + {"(*Field).End", Method, 0, ""}, + {"(*Field).Pos", Method, 0, ""}, + {"(*FieldList).End", Method, 0, ""}, + {"(*FieldList).NumFields", Method, 0, ""}, + {"(*FieldList).Pos", Method, 0, ""}, + {"(*File).End", Method, 0, ""}, + {"(*File).Pos", Method, 0, ""}, + {"(*ForStmt).End", Method, 0, ""}, + {"(*ForStmt).Pos", Method, 0, ""}, + {"(*FuncDecl).End", Method, 0, ""}, + {"(*FuncDecl).Pos", Method, 0, ""}, + {"(*FuncLit).End", Method, 0, ""}, + {"(*FuncLit).Pos", Method, 0, ""}, + {"(*FuncType).End", Method, 0, ""}, + {"(*FuncType).Pos", Method, 0, ""}, + {"(*GenDecl).End", Method, 0, ""}, + {"(*GenDecl).Pos", Method, 0, ""}, + {"(*GoStmt).End", Method, 0, ""}, + {"(*GoStmt).Pos", Method, 0, ""}, + {"(*Ident).End", Method, 0, ""}, + {"(*Ident).IsExported", Method, 0, ""}, + {"(*Ident).Pos", Method, 0, ""}, + {"(*Ident).String", Method, 0, ""}, + {"(*IfStmt).End", Method, 0, ""}, + {"(*IfStmt).Pos", Method, 0, ""}, + {"(*ImportSpec).End", Method, 0, ""}, + {"(*ImportSpec).Pos", Method, 0, ""}, + {"(*IncDecStmt).End", Method, 0, ""}, + {"(*IncDecStmt).Pos", Method, 0, ""}, + {"(*IndexExpr).End", Method, 0, ""}, + {"(*IndexExpr).Pos", Method, 0, ""}, + {"(*IndexListExpr).End", Method, 18, ""}, + {"(*IndexListExpr).Pos", Method, 18, ""}, + {"(*InterfaceType).End", Method, 0, ""}, + {"(*InterfaceType).Pos", Method, 0, ""}, + {"(*KeyValueExpr).End", Method, 0, ""}, + {"(*KeyValueExpr).Pos", Method, 0, ""}, + {"(*LabeledStmt).End", Method, 0, ""}, + {"(*LabeledStmt).Pos", Method, 0, ""}, + {"(*MapType).End", Method, 0, ""}, + {"(*MapType).Pos", Method, 0, ""}, + {"(*Object).Pos", Method, 0, ""}, + {"(*Package).End", Method, 0, ""}, + {"(*Package).Pos", Method, 0, ""}, + {"(*ParenExpr).End", Method, 0, ""}, + {"(*ParenExpr).Pos", Method, 0, ""}, + {"(*RangeStmt).End", Method, 0, ""}, + {"(*RangeStmt).Pos", Method, 0, ""}, + {"(*ReturnStmt).End", Method, 0, ""}, + {"(*ReturnStmt).Pos", Method, 0, ""}, + {"(*Scope).Insert", Method, 0, ""}, + {"(*Scope).Lookup", Method, 0, ""}, + {"(*Scope).String", Method, 0, ""}, + {"(*SelectStmt).End", Method, 0, ""}, + {"(*SelectStmt).Pos", Method, 0, ""}, + {"(*SelectorExpr).End", Method, 0, ""}, + {"(*SelectorExpr).Pos", Method, 0, ""}, + {"(*SendStmt).End", Method, 0, ""}, + {"(*SendStmt).Pos", Method, 0, ""}, + {"(*SliceExpr).End", Method, 0, ""}, + {"(*SliceExpr).Pos", Method, 0, ""}, + {"(*StarExpr).End", Method, 0, ""}, + {"(*StarExpr).Pos", Method, 0, ""}, + {"(*StructType).End", Method, 0, ""}, + {"(*StructType).Pos", Method, 0, ""}, + {"(*SwitchStmt).End", Method, 0, ""}, + {"(*SwitchStmt).Pos", Method, 0, ""}, + {"(*TypeAssertExpr).End", Method, 0, ""}, + {"(*TypeAssertExpr).Pos", Method, 0, ""}, + {"(*TypeSpec).End", Method, 0, ""}, + {"(*TypeSpec).Pos", Method, 0, ""}, + {"(*TypeSwitchStmt).End", Method, 0, ""}, + {"(*TypeSwitchStmt).Pos", Method, 0, ""}, + {"(*UnaryExpr).End", Method, 0, ""}, + {"(*UnaryExpr).Pos", Method, 0, ""}, + {"(*ValueSpec).End", Method, 0, ""}, + {"(*ValueSpec).Pos", Method, 0, ""}, + {"(CommentMap).Comments", Method, 1, ""}, + {"(CommentMap).Filter", Method, 1, ""}, + {"(CommentMap).String", Method, 1, ""}, + {"(CommentMap).Update", Method, 1, ""}, + {"(ObjKind).String", Method, 0, ""}, + {"ArrayType", Type, 0, ""}, + {"ArrayType.Elt", Field, 0, ""}, + {"ArrayType.Lbrack", Field, 0, ""}, + {"ArrayType.Len", Field, 0, ""}, + {"AssignStmt", Type, 0, ""}, + {"AssignStmt.Lhs", Field, 0, ""}, + {"AssignStmt.Rhs", Field, 0, ""}, + {"AssignStmt.Tok", Field, 0, ""}, + {"AssignStmt.TokPos", Field, 0, ""}, + {"Bad", Const, 0, ""}, + {"BadDecl", Type, 0, ""}, + {"BadDecl.From", Field, 0, ""}, + {"BadDecl.To", Field, 0, ""}, + {"BadExpr", Type, 0, ""}, + {"BadExpr.From", Field, 0, ""}, + {"BadExpr.To", Field, 0, ""}, + {"BadStmt", Type, 0, ""}, + {"BadStmt.From", Field, 0, ""}, + {"BadStmt.To", Field, 0, ""}, + {"BasicLit", Type, 0, ""}, + {"BasicLit.Kind", Field, 0, ""}, + {"BasicLit.Value", Field, 0, ""}, + {"BasicLit.ValuePos", Field, 0, ""}, + {"BinaryExpr", Type, 0, ""}, + {"BinaryExpr.Op", Field, 0, ""}, + {"BinaryExpr.OpPos", Field, 0, ""}, + {"BinaryExpr.X", Field, 0, ""}, + {"BinaryExpr.Y", Field, 0, ""}, + {"BlockStmt", Type, 0, ""}, + {"BlockStmt.Lbrace", Field, 0, ""}, + {"BlockStmt.List", Field, 0, ""}, + {"BlockStmt.Rbrace", Field, 0, ""}, + {"BranchStmt", Type, 0, ""}, + {"BranchStmt.Label", Field, 0, ""}, + {"BranchStmt.Tok", Field, 0, ""}, + {"BranchStmt.TokPos", Field, 0, ""}, + {"CallExpr", Type, 0, ""}, + {"CallExpr.Args", Field, 0, ""}, + {"CallExpr.Ellipsis", Field, 0, ""}, + {"CallExpr.Fun", Field, 0, ""}, + {"CallExpr.Lparen", Field, 0, ""}, + {"CallExpr.Rparen", Field, 0, ""}, + {"CaseClause", Type, 0, ""}, + {"CaseClause.Body", Field, 0, ""}, + {"CaseClause.Case", Field, 0, ""}, + {"CaseClause.Colon", Field, 0, ""}, + {"CaseClause.List", Field, 0, ""}, + {"ChanDir", Type, 0, ""}, + {"ChanType", Type, 0, ""}, + {"ChanType.Arrow", Field, 1, ""}, + {"ChanType.Begin", Field, 0, ""}, + {"ChanType.Dir", Field, 0, ""}, + {"ChanType.Value", Field, 0, ""}, + {"CommClause", Type, 0, ""}, + {"CommClause.Body", Field, 0, ""}, + {"CommClause.Case", Field, 0, ""}, + {"CommClause.Colon", Field, 0, ""}, + {"CommClause.Comm", Field, 0, ""}, + {"Comment", Type, 0, ""}, + {"Comment.Slash", Field, 0, ""}, + {"Comment.Text", Field, 0, ""}, + {"CommentGroup", Type, 0, ""}, + {"CommentGroup.List", Field, 0, ""}, + {"CommentMap", Type, 1, ""}, + {"CompositeLit", Type, 0, ""}, + {"CompositeLit.Elts", Field, 0, ""}, + {"CompositeLit.Incomplete", Field, 11, ""}, + {"CompositeLit.Lbrace", Field, 0, ""}, + {"CompositeLit.Rbrace", Field, 0, ""}, + {"CompositeLit.Type", Field, 0, ""}, + {"Con", Const, 0, ""}, + {"Decl", Type, 0, ""}, + {"DeclStmt", Type, 0, ""}, + {"DeclStmt.Decl", Field, 0, ""}, + {"DeferStmt", Type, 0, ""}, + {"DeferStmt.Call", Field, 0, ""}, + {"DeferStmt.Defer", Field, 0, ""}, + {"Ellipsis", Type, 0, ""}, + {"Ellipsis.Ellipsis", Field, 0, ""}, + {"Ellipsis.Elt", Field, 0, ""}, + {"EmptyStmt", Type, 0, ""}, + {"EmptyStmt.Implicit", Field, 5, ""}, + {"EmptyStmt.Semicolon", Field, 0, ""}, + {"Expr", Type, 0, ""}, + {"ExprStmt", Type, 0, ""}, + {"ExprStmt.X", Field, 0, ""}, + {"Field", Type, 0, ""}, + {"Field.Comment", Field, 0, ""}, + {"Field.Doc", Field, 0, ""}, + {"Field.Names", Field, 0, ""}, + {"Field.Tag", Field, 0, ""}, + {"Field.Type", Field, 0, ""}, + {"FieldFilter", Type, 0, ""}, + {"FieldList", Type, 0, ""}, + {"FieldList.Closing", Field, 0, ""}, + {"FieldList.List", Field, 0, ""}, + {"FieldList.Opening", Field, 0, ""}, + {"File", Type, 0, ""}, + {"File.Comments", Field, 0, ""}, + {"File.Decls", Field, 0, ""}, + {"File.Doc", Field, 0, ""}, + {"File.FileEnd", Field, 20, ""}, + {"File.FileStart", Field, 20, ""}, + {"File.GoVersion", Field, 21, ""}, + {"File.Imports", Field, 0, ""}, + {"File.Name", Field, 0, ""}, + {"File.Package", Field, 0, ""}, + {"File.Scope", Field, 0, ""}, + {"File.Unresolved", Field, 0, ""}, + {"FileExports", Func, 0, "func(src *File) bool"}, + {"Filter", Type, 0, ""}, + {"FilterDecl", Func, 0, "func(decl Decl, f Filter) bool"}, + {"FilterFile", Func, 0, "func(src *File, f Filter) bool"}, + {"FilterFuncDuplicates", Const, 0, ""}, + {"FilterImportDuplicates", Const, 0, ""}, + {"FilterPackage", Func, 0, "func(pkg *Package, f Filter) bool"}, + {"FilterUnassociatedComments", Const, 0, ""}, + {"ForStmt", Type, 0, ""}, + {"ForStmt.Body", Field, 0, ""}, + {"ForStmt.Cond", Field, 0, ""}, + {"ForStmt.For", Field, 0, ""}, + {"ForStmt.Init", Field, 0, ""}, + {"ForStmt.Post", Field, 0, ""}, + {"Fprint", Func, 0, "func(w io.Writer, fset *token.FileSet, x any, f FieldFilter) error"}, + {"Fun", Const, 0, ""}, + {"FuncDecl", Type, 0, ""}, + {"FuncDecl.Body", Field, 0, ""}, + {"FuncDecl.Doc", Field, 0, ""}, + {"FuncDecl.Name", Field, 0, ""}, + {"FuncDecl.Recv", Field, 0, ""}, + {"FuncDecl.Type", Field, 0, ""}, + {"FuncLit", Type, 0, ""}, + {"FuncLit.Body", Field, 0, ""}, + {"FuncLit.Type", Field, 0, ""}, + {"FuncType", Type, 0, ""}, + {"FuncType.Func", Field, 0, ""}, + {"FuncType.Params", Field, 0, ""}, + {"FuncType.Results", Field, 0, ""}, + {"FuncType.TypeParams", Field, 18, ""}, + {"GenDecl", Type, 0, ""}, + {"GenDecl.Doc", Field, 0, ""}, + {"GenDecl.Lparen", Field, 0, ""}, + {"GenDecl.Rparen", Field, 0, ""}, + {"GenDecl.Specs", Field, 0, ""}, + {"GenDecl.Tok", Field, 0, ""}, + {"GenDecl.TokPos", Field, 0, ""}, + {"GoStmt", Type, 0, ""}, + {"GoStmt.Call", Field, 0, ""}, + {"GoStmt.Go", Field, 0, ""}, + {"Ident", Type, 0, ""}, + {"Ident.Name", Field, 0, ""}, + {"Ident.NamePos", Field, 0, ""}, + {"Ident.Obj", Field, 0, ""}, + {"IfStmt", Type, 0, ""}, + {"IfStmt.Body", Field, 0, ""}, + {"IfStmt.Cond", Field, 0, ""}, + {"IfStmt.Else", Field, 0, ""}, + {"IfStmt.If", Field, 0, ""}, + {"IfStmt.Init", Field, 0, ""}, + {"ImportSpec", Type, 0, ""}, + {"ImportSpec.Comment", Field, 0, ""}, + {"ImportSpec.Doc", Field, 0, ""}, + {"ImportSpec.EndPos", Field, 0, ""}, + {"ImportSpec.Name", Field, 0, ""}, + {"ImportSpec.Path", Field, 0, ""}, + {"Importer", Type, 0, ""}, + {"IncDecStmt", Type, 0, ""}, + {"IncDecStmt.Tok", Field, 0, ""}, + {"IncDecStmt.TokPos", Field, 0, ""}, + {"IncDecStmt.X", Field, 0, ""}, + {"IndexExpr", Type, 0, ""}, + {"IndexExpr.Index", Field, 0, ""}, + {"IndexExpr.Lbrack", Field, 0, ""}, + {"IndexExpr.Rbrack", Field, 0, ""}, + {"IndexExpr.X", Field, 0, ""}, + {"IndexListExpr", Type, 18, ""}, + {"IndexListExpr.Indices", Field, 18, ""}, + {"IndexListExpr.Lbrack", Field, 18, ""}, + {"IndexListExpr.Rbrack", Field, 18, ""}, + {"IndexListExpr.X", Field, 18, ""}, + {"Inspect", Func, 0, "func(node Node, f func(Node) bool)"}, + {"InterfaceType", Type, 0, ""}, + {"InterfaceType.Incomplete", Field, 0, ""}, + {"InterfaceType.Interface", Field, 0, ""}, + {"InterfaceType.Methods", Field, 0, ""}, + {"IsExported", Func, 0, "func(name string) bool"}, + {"IsGenerated", Func, 21, "func(file *File) bool"}, + {"KeyValueExpr", Type, 0, ""}, + {"KeyValueExpr.Colon", Field, 0, ""}, + {"KeyValueExpr.Key", Field, 0, ""}, + {"KeyValueExpr.Value", Field, 0, ""}, + {"LabeledStmt", Type, 0, ""}, + {"LabeledStmt.Colon", Field, 0, ""}, + {"LabeledStmt.Label", Field, 0, ""}, + {"LabeledStmt.Stmt", Field, 0, ""}, + {"Lbl", Const, 0, ""}, + {"MapType", Type, 0, ""}, + {"MapType.Key", Field, 0, ""}, + {"MapType.Map", Field, 0, ""}, + {"MapType.Value", Field, 0, ""}, + {"MergeMode", Type, 0, ""}, + {"MergePackageFiles", Func, 0, "func(pkg *Package, mode MergeMode) *File"}, + {"NewCommentMap", Func, 1, "func(fset *token.FileSet, node Node, comments []*CommentGroup) CommentMap"}, + {"NewIdent", Func, 0, "func(name string) *Ident"}, + {"NewObj", Func, 0, "func(kind ObjKind, name string) *Object"}, + {"NewPackage", Func, 0, "func(fset *token.FileSet, files map[string]*File, importer Importer, universe *Scope) (*Package, error)"}, + {"NewScope", Func, 0, "func(outer *Scope) *Scope"}, + {"Node", Type, 0, ""}, + {"NotNilFilter", Func, 0, "func(_ string, v reflect.Value) bool"}, + {"ObjKind", Type, 0, ""}, + {"Object", Type, 0, ""}, + {"Object.Data", Field, 0, ""}, + {"Object.Decl", Field, 0, ""}, + {"Object.Kind", Field, 0, ""}, + {"Object.Name", Field, 0, ""}, + {"Object.Type", Field, 0, ""}, + {"Package", Type, 0, ""}, + {"Package.Files", Field, 0, ""}, + {"Package.Imports", Field, 0, ""}, + {"Package.Name", Field, 0, ""}, + {"Package.Scope", Field, 0, ""}, + {"PackageExports", Func, 0, "func(pkg *Package) bool"}, + {"ParenExpr", Type, 0, ""}, + {"ParenExpr.Lparen", Field, 0, ""}, + {"ParenExpr.Rparen", Field, 0, ""}, + {"ParenExpr.X", Field, 0, ""}, + {"Pkg", Const, 0, ""}, + {"Preorder", Func, 23, "func(root Node) iter.Seq[Node]"}, + {"Print", Func, 0, "func(fset *token.FileSet, x any) error"}, + {"RECV", Const, 0, ""}, + {"RangeStmt", Type, 0, ""}, + {"RangeStmt.Body", Field, 0, ""}, + {"RangeStmt.For", Field, 0, ""}, + {"RangeStmt.Key", Field, 0, ""}, + {"RangeStmt.Range", Field, 20, ""}, + {"RangeStmt.Tok", Field, 0, ""}, + {"RangeStmt.TokPos", Field, 0, ""}, + {"RangeStmt.Value", Field, 0, ""}, + {"RangeStmt.X", Field, 0, ""}, + {"ReturnStmt", Type, 0, ""}, + {"ReturnStmt.Results", Field, 0, ""}, + {"ReturnStmt.Return", Field, 0, ""}, + {"SEND", Const, 0, ""}, + {"Scope", Type, 0, ""}, + {"Scope.Objects", Field, 0, ""}, + {"Scope.Outer", Field, 0, ""}, + {"SelectStmt", Type, 0, ""}, + {"SelectStmt.Body", Field, 0, ""}, + {"SelectStmt.Select", Field, 0, ""}, + {"SelectorExpr", Type, 0, ""}, + {"SelectorExpr.Sel", Field, 0, ""}, + {"SelectorExpr.X", Field, 0, ""}, + {"SendStmt", Type, 0, ""}, + {"SendStmt.Arrow", Field, 0, ""}, + {"SendStmt.Chan", Field, 0, ""}, + {"SendStmt.Value", Field, 0, ""}, + {"SliceExpr", Type, 0, ""}, + {"SliceExpr.High", Field, 0, ""}, + {"SliceExpr.Lbrack", Field, 0, ""}, + {"SliceExpr.Low", Field, 0, ""}, + {"SliceExpr.Max", Field, 2, ""}, + {"SliceExpr.Rbrack", Field, 0, ""}, + {"SliceExpr.Slice3", Field, 2, ""}, + {"SliceExpr.X", Field, 0, ""}, + {"SortImports", Func, 0, "func(fset *token.FileSet, f *File)"}, + {"Spec", Type, 0, ""}, + {"StarExpr", Type, 0, ""}, + {"StarExpr.Star", Field, 0, ""}, + {"StarExpr.X", Field, 0, ""}, + {"Stmt", Type, 0, ""}, + {"StructType", Type, 0, ""}, + {"StructType.Fields", Field, 0, ""}, + {"StructType.Incomplete", Field, 0, ""}, + {"StructType.Struct", Field, 0, ""}, + {"SwitchStmt", Type, 0, ""}, + {"SwitchStmt.Body", Field, 0, ""}, + {"SwitchStmt.Init", Field, 0, ""}, + {"SwitchStmt.Switch", Field, 0, ""}, + {"SwitchStmt.Tag", Field, 0, ""}, + {"Typ", Const, 0, ""}, + {"TypeAssertExpr", Type, 0, ""}, + {"TypeAssertExpr.Lparen", Field, 2, ""}, + {"TypeAssertExpr.Rparen", Field, 2, ""}, + {"TypeAssertExpr.Type", Field, 0, ""}, + {"TypeAssertExpr.X", Field, 0, ""}, + {"TypeSpec", Type, 0, ""}, + {"TypeSpec.Assign", Field, 9, ""}, + {"TypeSpec.Comment", Field, 0, ""}, + {"TypeSpec.Doc", Field, 0, ""}, + {"TypeSpec.Name", Field, 0, ""}, + {"TypeSpec.Type", Field, 0, ""}, + {"TypeSpec.TypeParams", Field, 18, ""}, + {"TypeSwitchStmt", Type, 0, ""}, + {"TypeSwitchStmt.Assign", Field, 0, ""}, + {"TypeSwitchStmt.Body", Field, 0, ""}, + {"TypeSwitchStmt.Init", Field, 0, ""}, + {"TypeSwitchStmt.Switch", Field, 0, ""}, + {"UnaryExpr", Type, 0, ""}, + {"UnaryExpr.Op", Field, 0, ""}, + {"UnaryExpr.OpPos", Field, 0, ""}, + {"UnaryExpr.X", Field, 0, ""}, + {"Unparen", Func, 22, "func(e Expr) Expr"}, + {"ValueSpec", Type, 0, ""}, + {"ValueSpec.Comment", Field, 0, ""}, + {"ValueSpec.Doc", Field, 0, ""}, + {"ValueSpec.Names", Field, 0, ""}, + {"ValueSpec.Type", Field, 0, ""}, + {"ValueSpec.Values", Field, 0, ""}, + {"Var", Const, 0, ""}, + {"Visitor", Type, 0, ""}, + {"Walk", Func, 0, "func(v Visitor, node Node)"}, }, "go/build": { - {"(*Context).Import", Method, 0}, - {"(*Context).ImportDir", Method, 0}, - {"(*Context).MatchFile", Method, 2}, - {"(*Context).SrcDirs", Method, 0}, - {"(*MultiplePackageError).Error", Method, 4}, - {"(*NoGoError).Error", Method, 0}, - {"(*Package).IsCommand", Method, 0}, - {"AllowBinary", Const, 0}, - {"ArchChar", Func, 0}, - {"Context", Type, 0}, - {"Context.BuildTags", Field, 0}, - {"Context.CgoEnabled", Field, 0}, - {"Context.Compiler", Field, 0}, - {"Context.Dir", Field, 14}, - {"Context.GOARCH", Field, 0}, - {"Context.GOOS", Field, 0}, - {"Context.GOPATH", Field, 0}, - {"Context.GOROOT", Field, 0}, - {"Context.HasSubdir", Field, 0}, - {"Context.InstallSuffix", Field, 1}, - {"Context.IsAbsPath", Field, 0}, - {"Context.IsDir", Field, 0}, - {"Context.JoinPath", Field, 0}, - {"Context.OpenFile", Field, 0}, - {"Context.ReadDir", Field, 0}, - {"Context.ReleaseTags", Field, 1}, - {"Context.SplitPathList", Field, 0}, - {"Context.ToolTags", Field, 17}, - {"Context.UseAllFiles", Field, 0}, - {"Default", Var, 0}, - {"Directive", Type, 21}, - {"Directive.Pos", Field, 21}, - {"Directive.Text", Field, 21}, - {"FindOnly", Const, 0}, - {"IgnoreVendor", Const, 6}, - {"Import", Func, 0}, - {"ImportComment", Const, 4}, - {"ImportDir", Func, 0}, - {"ImportMode", Type, 0}, - {"IsLocalImport", Func, 0}, - {"MultiplePackageError", Type, 4}, - {"MultiplePackageError.Dir", Field, 4}, - {"MultiplePackageError.Files", Field, 4}, - {"MultiplePackageError.Packages", Field, 4}, - {"NoGoError", Type, 0}, - {"NoGoError.Dir", Field, 0}, - {"Package", Type, 0}, - {"Package.AllTags", Field, 2}, - {"Package.BinDir", Field, 0}, - {"Package.BinaryOnly", Field, 7}, - {"Package.CFiles", Field, 0}, - {"Package.CXXFiles", Field, 2}, - {"Package.CgoCFLAGS", Field, 0}, - {"Package.CgoCPPFLAGS", Field, 2}, - {"Package.CgoCXXFLAGS", Field, 2}, - {"Package.CgoFFLAGS", Field, 7}, - {"Package.CgoFiles", Field, 0}, - {"Package.CgoLDFLAGS", Field, 0}, - {"Package.CgoPkgConfig", Field, 0}, - {"Package.ConflictDir", Field, 2}, - {"Package.Dir", Field, 0}, - {"Package.Directives", Field, 21}, - {"Package.Doc", Field, 0}, - {"Package.EmbedPatternPos", Field, 16}, - {"Package.EmbedPatterns", Field, 16}, - {"Package.FFiles", Field, 7}, - {"Package.GoFiles", Field, 0}, - {"Package.Goroot", Field, 0}, - {"Package.HFiles", Field, 0}, - {"Package.IgnoredGoFiles", Field, 1}, - {"Package.IgnoredOtherFiles", Field, 16}, - {"Package.ImportComment", Field, 4}, - {"Package.ImportPath", Field, 0}, - {"Package.ImportPos", Field, 0}, - {"Package.Imports", Field, 0}, - {"Package.InvalidGoFiles", Field, 6}, - {"Package.MFiles", Field, 3}, - {"Package.Name", Field, 0}, - {"Package.PkgObj", Field, 0}, - {"Package.PkgRoot", Field, 0}, - {"Package.PkgTargetRoot", Field, 5}, - {"Package.Root", Field, 0}, - {"Package.SFiles", Field, 0}, - {"Package.SrcRoot", Field, 0}, - {"Package.SwigCXXFiles", Field, 1}, - {"Package.SwigFiles", Field, 1}, - {"Package.SysoFiles", Field, 0}, - {"Package.TestDirectives", Field, 21}, - {"Package.TestEmbedPatternPos", Field, 16}, - {"Package.TestEmbedPatterns", Field, 16}, - {"Package.TestGoFiles", Field, 0}, - {"Package.TestImportPos", Field, 0}, - {"Package.TestImports", Field, 0}, - {"Package.XTestDirectives", Field, 21}, - {"Package.XTestEmbedPatternPos", Field, 16}, - {"Package.XTestEmbedPatterns", Field, 16}, - {"Package.XTestGoFiles", Field, 0}, - {"Package.XTestImportPos", Field, 0}, - {"Package.XTestImports", Field, 0}, - {"ToolDir", Var, 0}, + {"(*Context).Import", Method, 0, ""}, + {"(*Context).ImportDir", Method, 0, ""}, + {"(*Context).MatchFile", Method, 2, ""}, + {"(*Context).SrcDirs", Method, 0, ""}, + {"(*MultiplePackageError).Error", Method, 4, ""}, + {"(*NoGoError).Error", Method, 0, ""}, + {"(*Package).IsCommand", Method, 0, ""}, + {"AllowBinary", Const, 0, ""}, + {"ArchChar", Func, 0, "func(goarch string) (string, error)"}, + {"Context", Type, 0, ""}, + {"Context.BuildTags", Field, 0, ""}, + {"Context.CgoEnabled", Field, 0, ""}, + {"Context.Compiler", Field, 0, ""}, + {"Context.Dir", Field, 14, ""}, + {"Context.GOARCH", Field, 0, ""}, + {"Context.GOOS", Field, 0, ""}, + {"Context.GOPATH", Field, 0, ""}, + {"Context.GOROOT", Field, 0, ""}, + {"Context.HasSubdir", Field, 0, ""}, + {"Context.InstallSuffix", Field, 1, ""}, + {"Context.IsAbsPath", Field, 0, ""}, + {"Context.IsDir", Field, 0, ""}, + {"Context.JoinPath", Field, 0, ""}, + {"Context.OpenFile", Field, 0, ""}, + {"Context.ReadDir", Field, 0, ""}, + {"Context.ReleaseTags", Field, 1, ""}, + {"Context.SplitPathList", Field, 0, ""}, + {"Context.ToolTags", Field, 17, ""}, + {"Context.UseAllFiles", Field, 0, ""}, + {"Default", Var, 0, ""}, + {"Directive", Type, 21, ""}, + {"Directive.Pos", Field, 21, ""}, + {"Directive.Text", Field, 21, ""}, + {"FindOnly", Const, 0, ""}, + {"IgnoreVendor", Const, 6, ""}, + {"Import", Func, 0, "func(path string, srcDir string, mode ImportMode) (*Package, error)"}, + {"ImportComment", Const, 4, ""}, + {"ImportDir", Func, 0, "func(dir string, mode ImportMode) (*Package, error)"}, + {"ImportMode", Type, 0, ""}, + {"IsLocalImport", Func, 0, "func(path string) bool"}, + {"MultiplePackageError", Type, 4, ""}, + {"MultiplePackageError.Dir", Field, 4, ""}, + {"MultiplePackageError.Files", Field, 4, ""}, + {"MultiplePackageError.Packages", Field, 4, ""}, + {"NoGoError", Type, 0, ""}, + {"NoGoError.Dir", Field, 0, ""}, + {"Package", Type, 0, ""}, + {"Package.AllTags", Field, 2, ""}, + {"Package.BinDir", Field, 0, ""}, + {"Package.BinaryOnly", Field, 7, ""}, + {"Package.CFiles", Field, 0, ""}, + {"Package.CXXFiles", Field, 2, ""}, + {"Package.CgoCFLAGS", Field, 0, ""}, + {"Package.CgoCPPFLAGS", Field, 2, ""}, + {"Package.CgoCXXFLAGS", Field, 2, ""}, + {"Package.CgoFFLAGS", Field, 7, ""}, + {"Package.CgoFiles", Field, 0, ""}, + {"Package.CgoLDFLAGS", Field, 0, ""}, + {"Package.CgoPkgConfig", Field, 0, ""}, + {"Package.ConflictDir", Field, 2, ""}, + {"Package.Dir", Field, 0, ""}, + {"Package.Directives", Field, 21, ""}, + {"Package.Doc", Field, 0, ""}, + {"Package.EmbedPatternPos", Field, 16, ""}, + {"Package.EmbedPatterns", Field, 16, ""}, + {"Package.FFiles", Field, 7, ""}, + {"Package.GoFiles", Field, 0, ""}, + {"Package.Goroot", Field, 0, ""}, + {"Package.HFiles", Field, 0, ""}, + {"Package.IgnoredGoFiles", Field, 1, ""}, + {"Package.IgnoredOtherFiles", Field, 16, ""}, + {"Package.ImportComment", Field, 4, ""}, + {"Package.ImportPath", Field, 0, ""}, + {"Package.ImportPos", Field, 0, ""}, + {"Package.Imports", Field, 0, ""}, + {"Package.InvalidGoFiles", Field, 6, ""}, + {"Package.MFiles", Field, 3, ""}, + {"Package.Name", Field, 0, ""}, + {"Package.PkgObj", Field, 0, ""}, + {"Package.PkgRoot", Field, 0, ""}, + {"Package.PkgTargetRoot", Field, 5, ""}, + {"Package.Root", Field, 0, ""}, + {"Package.SFiles", Field, 0, ""}, + {"Package.SrcRoot", Field, 0, ""}, + {"Package.SwigCXXFiles", Field, 1, ""}, + {"Package.SwigFiles", Field, 1, ""}, + {"Package.SysoFiles", Field, 0, ""}, + {"Package.TestDirectives", Field, 21, ""}, + {"Package.TestEmbedPatternPos", Field, 16, ""}, + {"Package.TestEmbedPatterns", Field, 16, ""}, + {"Package.TestGoFiles", Field, 0, ""}, + {"Package.TestImportPos", Field, 0, ""}, + {"Package.TestImports", Field, 0, ""}, + {"Package.XTestDirectives", Field, 21, ""}, + {"Package.XTestEmbedPatternPos", Field, 16, ""}, + {"Package.XTestEmbedPatterns", Field, 16, ""}, + {"Package.XTestGoFiles", Field, 0, ""}, + {"Package.XTestImportPos", Field, 0, ""}, + {"Package.XTestImports", Field, 0, ""}, + {"ToolDir", Var, 0, ""}, }, "go/build/constraint": { - {"(*AndExpr).Eval", Method, 16}, - {"(*AndExpr).String", Method, 16}, - {"(*NotExpr).Eval", Method, 16}, - {"(*NotExpr).String", Method, 16}, - {"(*OrExpr).Eval", Method, 16}, - {"(*OrExpr).String", Method, 16}, - {"(*SyntaxError).Error", Method, 16}, - {"(*TagExpr).Eval", Method, 16}, - {"(*TagExpr).String", Method, 16}, - {"AndExpr", Type, 16}, - {"AndExpr.X", Field, 16}, - {"AndExpr.Y", Field, 16}, - {"Expr", Type, 16}, - {"GoVersion", Func, 21}, - {"IsGoBuild", Func, 16}, - {"IsPlusBuild", Func, 16}, - {"NotExpr", Type, 16}, - {"NotExpr.X", Field, 16}, - {"OrExpr", Type, 16}, - {"OrExpr.X", Field, 16}, - {"OrExpr.Y", Field, 16}, - {"Parse", Func, 16}, - {"PlusBuildLines", Func, 16}, - {"SyntaxError", Type, 16}, - {"SyntaxError.Err", Field, 16}, - {"SyntaxError.Offset", Field, 16}, - {"TagExpr", Type, 16}, - {"TagExpr.Tag", Field, 16}, + {"(*AndExpr).Eval", Method, 16, ""}, + {"(*AndExpr).String", Method, 16, ""}, + {"(*NotExpr).Eval", Method, 16, ""}, + {"(*NotExpr).String", Method, 16, ""}, + {"(*OrExpr).Eval", Method, 16, ""}, + {"(*OrExpr).String", Method, 16, ""}, + {"(*SyntaxError).Error", Method, 16, ""}, + {"(*TagExpr).Eval", Method, 16, ""}, + {"(*TagExpr).String", Method, 16, ""}, + {"AndExpr", Type, 16, ""}, + {"AndExpr.X", Field, 16, ""}, + {"AndExpr.Y", Field, 16, ""}, + {"Expr", Type, 16, ""}, + {"GoVersion", Func, 21, "func(x Expr) string"}, + {"IsGoBuild", Func, 16, "func(line string) bool"}, + {"IsPlusBuild", Func, 16, "func(line string) bool"}, + {"NotExpr", Type, 16, ""}, + {"NotExpr.X", Field, 16, ""}, + {"OrExpr", Type, 16, ""}, + {"OrExpr.X", Field, 16, ""}, + {"OrExpr.Y", Field, 16, ""}, + {"Parse", Func, 16, "func(line string) (Expr, error)"}, + {"PlusBuildLines", Func, 16, "func(x Expr) ([]string, error)"}, + {"SyntaxError", Type, 16, ""}, + {"SyntaxError.Err", Field, 16, ""}, + {"SyntaxError.Offset", Field, 16, ""}, + {"TagExpr", Type, 16, ""}, + {"TagExpr.Tag", Field, 16, ""}, }, "go/constant": { - {"(Kind).String", Method, 18}, - {"BinaryOp", Func, 5}, - {"BitLen", Func, 5}, - {"Bool", Const, 5}, - {"BoolVal", Func, 5}, - {"Bytes", Func, 5}, - {"Compare", Func, 5}, - {"Complex", Const, 5}, - {"Denom", Func, 5}, - {"Float", Const, 5}, - {"Float32Val", Func, 5}, - {"Float64Val", Func, 5}, - {"Imag", Func, 5}, - {"Int", Const, 5}, - {"Int64Val", Func, 5}, - {"Kind", Type, 5}, - {"Make", Func, 13}, - {"MakeBool", Func, 5}, - {"MakeFloat64", Func, 5}, - {"MakeFromBytes", Func, 5}, - {"MakeFromLiteral", Func, 5}, - {"MakeImag", Func, 5}, - {"MakeInt64", Func, 5}, - {"MakeString", Func, 5}, - {"MakeUint64", Func, 5}, - {"MakeUnknown", Func, 5}, - {"Num", Func, 5}, - {"Real", Func, 5}, - {"Shift", Func, 5}, - {"Sign", Func, 5}, - {"String", Const, 5}, - {"StringVal", Func, 5}, - {"ToComplex", Func, 6}, - {"ToFloat", Func, 6}, - {"ToInt", Func, 6}, - {"Uint64Val", Func, 5}, - {"UnaryOp", Func, 5}, - {"Unknown", Const, 5}, - {"Val", Func, 13}, - {"Value", Type, 5}, + {"(Kind).String", Method, 18, ""}, + {"BinaryOp", Func, 5, "func(x_ Value, op token.Token, y_ Value) Value"}, + {"BitLen", Func, 5, "func(x Value) int"}, + {"Bool", Const, 5, ""}, + {"BoolVal", Func, 5, "func(x Value) bool"}, + {"Bytes", Func, 5, "func(x Value) []byte"}, + {"Compare", Func, 5, "func(x_ Value, op token.Token, y_ Value) bool"}, + {"Complex", Const, 5, ""}, + {"Denom", Func, 5, "func(x Value) Value"}, + {"Float", Const, 5, ""}, + {"Float32Val", Func, 5, "func(x Value) (float32, bool)"}, + {"Float64Val", Func, 5, "func(x Value) (float64, bool)"}, + {"Imag", Func, 5, "func(x Value) Value"}, + {"Int", Const, 5, ""}, + {"Int64Val", Func, 5, "func(x Value) (int64, bool)"}, + {"Kind", Type, 5, ""}, + {"Make", Func, 13, "func(x any) Value"}, + {"MakeBool", Func, 5, "func(b bool) Value"}, + {"MakeFloat64", Func, 5, "func(x float64) Value"}, + {"MakeFromBytes", Func, 5, "func(bytes []byte) Value"}, + {"MakeFromLiteral", Func, 5, "func(lit string, tok token.Token, zero uint) Value"}, + {"MakeImag", Func, 5, "func(x Value) Value"}, + {"MakeInt64", Func, 5, "func(x int64) Value"}, + {"MakeString", Func, 5, "func(s string) Value"}, + {"MakeUint64", Func, 5, "func(x uint64) Value"}, + {"MakeUnknown", Func, 5, "func() Value"}, + {"Num", Func, 5, "func(x Value) Value"}, + {"Real", Func, 5, "func(x Value) Value"}, + {"Shift", Func, 5, "func(x Value, op token.Token, s uint) Value"}, + {"Sign", Func, 5, "func(x Value) int"}, + {"String", Const, 5, ""}, + {"StringVal", Func, 5, "func(x Value) string"}, + {"ToComplex", Func, 6, "func(x Value) Value"}, + {"ToFloat", Func, 6, "func(x Value) Value"}, + {"ToInt", Func, 6, "func(x Value) Value"}, + {"Uint64Val", Func, 5, "func(x Value) (uint64, bool)"}, + {"UnaryOp", Func, 5, "func(op token.Token, y Value, prec uint) Value"}, + {"Unknown", Const, 5, ""}, + {"Val", Func, 13, "func(x Value) any"}, + {"Value", Type, 5, ""}, }, "go/doc": { - {"(*Package).Filter", Method, 0}, - {"(*Package).HTML", Method, 19}, - {"(*Package).Markdown", Method, 19}, - {"(*Package).Parser", Method, 19}, - {"(*Package).Printer", Method, 19}, - {"(*Package).Synopsis", Method, 19}, - {"(*Package).Text", Method, 19}, - {"AllDecls", Const, 0}, - {"AllMethods", Const, 0}, - {"Example", Type, 0}, - {"Example.Code", Field, 0}, - {"Example.Comments", Field, 0}, - {"Example.Doc", Field, 0}, - {"Example.EmptyOutput", Field, 1}, - {"Example.Name", Field, 0}, - {"Example.Order", Field, 1}, - {"Example.Output", Field, 0}, - {"Example.Play", Field, 1}, - {"Example.Suffix", Field, 14}, - {"Example.Unordered", Field, 7}, - {"Examples", Func, 0}, - {"Filter", Type, 0}, - {"Func", Type, 0}, - {"Func.Decl", Field, 0}, - {"Func.Doc", Field, 0}, - {"Func.Examples", Field, 14}, - {"Func.Level", Field, 0}, - {"Func.Name", Field, 0}, - {"Func.Orig", Field, 0}, - {"Func.Recv", Field, 0}, - {"IllegalPrefixes", Var, 1}, - {"IsPredeclared", Func, 8}, - {"Mode", Type, 0}, - {"New", Func, 0}, - {"NewFromFiles", Func, 14}, - {"Note", Type, 1}, - {"Note.Body", Field, 1}, - {"Note.End", Field, 1}, - {"Note.Pos", Field, 1}, - {"Note.UID", Field, 1}, - {"Package", Type, 0}, - {"Package.Bugs", Field, 0}, - {"Package.Consts", Field, 0}, - {"Package.Doc", Field, 0}, - {"Package.Examples", Field, 14}, - {"Package.Filenames", Field, 0}, - {"Package.Funcs", Field, 0}, - {"Package.ImportPath", Field, 0}, - {"Package.Imports", Field, 0}, - {"Package.Name", Field, 0}, - {"Package.Notes", Field, 1}, - {"Package.Types", Field, 0}, - {"Package.Vars", Field, 0}, - {"PreserveAST", Const, 12}, - {"Synopsis", Func, 0}, - {"ToHTML", Func, 0}, - {"ToText", Func, 0}, - {"Type", Type, 0}, - {"Type.Consts", Field, 0}, - {"Type.Decl", Field, 0}, - {"Type.Doc", Field, 0}, - {"Type.Examples", Field, 14}, - {"Type.Funcs", Field, 0}, - {"Type.Methods", Field, 0}, - {"Type.Name", Field, 0}, - {"Type.Vars", Field, 0}, - {"Value", Type, 0}, - {"Value.Decl", Field, 0}, - {"Value.Doc", Field, 0}, - {"Value.Names", Field, 0}, + {"(*Package).Filter", Method, 0, ""}, + {"(*Package).HTML", Method, 19, ""}, + {"(*Package).Markdown", Method, 19, ""}, + {"(*Package).Parser", Method, 19, ""}, + {"(*Package).Printer", Method, 19, ""}, + {"(*Package).Synopsis", Method, 19, ""}, + {"(*Package).Text", Method, 19, ""}, + {"AllDecls", Const, 0, ""}, + {"AllMethods", Const, 0, ""}, + {"Example", Type, 0, ""}, + {"Example.Code", Field, 0, ""}, + {"Example.Comments", Field, 0, ""}, + {"Example.Doc", Field, 0, ""}, + {"Example.EmptyOutput", Field, 1, ""}, + {"Example.Name", Field, 0, ""}, + {"Example.Order", Field, 1, ""}, + {"Example.Output", Field, 0, ""}, + {"Example.Play", Field, 1, ""}, + {"Example.Suffix", Field, 14, ""}, + {"Example.Unordered", Field, 7, ""}, + {"Examples", Func, 0, "func(testFiles ...*ast.File) []*Example"}, + {"Filter", Type, 0, ""}, + {"Func", Type, 0, ""}, + {"Func.Decl", Field, 0, ""}, + {"Func.Doc", Field, 0, ""}, + {"Func.Examples", Field, 14, ""}, + {"Func.Level", Field, 0, ""}, + {"Func.Name", Field, 0, ""}, + {"Func.Orig", Field, 0, ""}, + {"Func.Recv", Field, 0, ""}, + {"IllegalPrefixes", Var, 1, ""}, + {"IsPredeclared", Func, 8, "func(s string) bool"}, + {"Mode", Type, 0, ""}, + {"New", Func, 0, "func(pkg *ast.Package, importPath string, mode Mode) *Package"}, + {"NewFromFiles", Func, 14, "func(fset *token.FileSet, files []*ast.File, importPath string, opts ...any) (*Package, error)"}, + {"Note", Type, 1, ""}, + {"Note.Body", Field, 1, ""}, + {"Note.End", Field, 1, ""}, + {"Note.Pos", Field, 1, ""}, + {"Note.UID", Field, 1, ""}, + {"Package", Type, 0, ""}, + {"Package.Bugs", Field, 0, ""}, + {"Package.Consts", Field, 0, ""}, + {"Package.Doc", Field, 0, ""}, + {"Package.Examples", Field, 14, ""}, + {"Package.Filenames", Field, 0, ""}, + {"Package.Funcs", Field, 0, ""}, + {"Package.ImportPath", Field, 0, ""}, + {"Package.Imports", Field, 0, ""}, + {"Package.Name", Field, 0, ""}, + {"Package.Notes", Field, 1, ""}, + {"Package.Types", Field, 0, ""}, + {"Package.Vars", Field, 0, ""}, + {"PreserveAST", Const, 12, ""}, + {"Synopsis", Func, 0, "func(text string) string"}, + {"ToHTML", Func, 0, "func(w io.Writer, text string, words map[string]string)"}, + {"ToText", Func, 0, "func(w io.Writer, text string, prefix string, codePrefix string, width int)"}, + {"Type", Type, 0, ""}, + {"Type.Consts", Field, 0, ""}, + {"Type.Decl", Field, 0, ""}, + {"Type.Doc", Field, 0, ""}, + {"Type.Examples", Field, 14, ""}, + {"Type.Funcs", Field, 0, ""}, + {"Type.Methods", Field, 0, ""}, + {"Type.Name", Field, 0, ""}, + {"Type.Vars", Field, 0, ""}, + {"Value", Type, 0, ""}, + {"Value.Decl", Field, 0, ""}, + {"Value.Doc", Field, 0, ""}, + {"Value.Names", Field, 0, ""}, }, "go/doc/comment": { - {"(*DocLink).DefaultURL", Method, 19}, - {"(*Heading).DefaultID", Method, 19}, - {"(*List).BlankBefore", Method, 19}, - {"(*List).BlankBetween", Method, 19}, - {"(*Parser).Parse", Method, 19}, - {"(*Printer).Comment", Method, 19}, - {"(*Printer).HTML", Method, 19}, - {"(*Printer).Markdown", Method, 19}, - {"(*Printer).Text", Method, 19}, - {"Block", Type, 19}, - {"Code", Type, 19}, - {"Code.Text", Field, 19}, - {"DefaultLookupPackage", Func, 19}, - {"Doc", Type, 19}, - {"Doc.Content", Field, 19}, - {"Doc.Links", Field, 19}, - {"DocLink", Type, 19}, - {"DocLink.ImportPath", Field, 19}, - {"DocLink.Name", Field, 19}, - {"DocLink.Recv", Field, 19}, - {"DocLink.Text", Field, 19}, - {"Heading", Type, 19}, - {"Heading.Text", Field, 19}, - {"Italic", Type, 19}, - {"Link", Type, 19}, - {"Link.Auto", Field, 19}, - {"Link.Text", Field, 19}, - {"Link.URL", Field, 19}, - {"LinkDef", Type, 19}, - {"LinkDef.Text", Field, 19}, - {"LinkDef.URL", Field, 19}, - {"LinkDef.Used", Field, 19}, - {"List", Type, 19}, - {"List.ForceBlankBefore", Field, 19}, - {"List.ForceBlankBetween", Field, 19}, - {"List.Items", Field, 19}, - {"ListItem", Type, 19}, - {"ListItem.Content", Field, 19}, - {"ListItem.Number", Field, 19}, - {"Paragraph", Type, 19}, - {"Paragraph.Text", Field, 19}, - {"Parser", Type, 19}, - {"Parser.LookupPackage", Field, 19}, - {"Parser.LookupSym", Field, 19}, - {"Parser.Words", Field, 19}, - {"Plain", Type, 19}, - {"Printer", Type, 19}, - {"Printer.DocLinkBaseURL", Field, 19}, - {"Printer.DocLinkURL", Field, 19}, - {"Printer.HeadingID", Field, 19}, - {"Printer.HeadingLevel", Field, 19}, - {"Printer.TextCodePrefix", Field, 19}, - {"Printer.TextPrefix", Field, 19}, - {"Printer.TextWidth", Field, 19}, - {"Text", Type, 19}, + {"(*DocLink).DefaultURL", Method, 19, ""}, + {"(*Heading).DefaultID", Method, 19, ""}, + {"(*List).BlankBefore", Method, 19, ""}, + {"(*List).BlankBetween", Method, 19, ""}, + {"(*Parser).Parse", Method, 19, ""}, + {"(*Printer).Comment", Method, 19, ""}, + {"(*Printer).HTML", Method, 19, ""}, + {"(*Printer).Markdown", Method, 19, ""}, + {"(*Printer).Text", Method, 19, ""}, + {"Block", Type, 19, ""}, + {"Code", Type, 19, ""}, + {"Code.Text", Field, 19, ""}, + {"DefaultLookupPackage", Func, 19, "func(name string) (importPath string, ok bool)"}, + {"Doc", Type, 19, ""}, + {"Doc.Content", Field, 19, ""}, + {"Doc.Links", Field, 19, ""}, + {"DocLink", Type, 19, ""}, + {"DocLink.ImportPath", Field, 19, ""}, + {"DocLink.Name", Field, 19, ""}, + {"DocLink.Recv", Field, 19, ""}, + {"DocLink.Text", Field, 19, ""}, + {"Heading", Type, 19, ""}, + {"Heading.Text", Field, 19, ""}, + {"Italic", Type, 19, ""}, + {"Link", Type, 19, ""}, + {"Link.Auto", Field, 19, ""}, + {"Link.Text", Field, 19, ""}, + {"Link.URL", Field, 19, ""}, + {"LinkDef", Type, 19, ""}, + {"LinkDef.Text", Field, 19, ""}, + {"LinkDef.URL", Field, 19, ""}, + {"LinkDef.Used", Field, 19, ""}, + {"List", Type, 19, ""}, + {"List.ForceBlankBefore", Field, 19, ""}, + {"List.ForceBlankBetween", Field, 19, ""}, + {"List.Items", Field, 19, ""}, + {"ListItem", Type, 19, ""}, + {"ListItem.Content", Field, 19, ""}, + {"ListItem.Number", Field, 19, ""}, + {"Paragraph", Type, 19, ""}, + {"Paragraph.Text", Field, 19, ""}, + {"Parser", Type, 19, ""}, + {"Parser.LookupPackage", Field, 19, ""}, + {"Parser.LookupSym", Field, 19, ""}, + {"Parser.Words", Field, 19, ""}, + {"Plain", Type, 19, ""}, + {"Printer", Type, 19, ""}, + {"Printer.DocLinkBaseURL", Field, 19, ""}, + {"Printer.DocLinkURL", Field, 19, ""}, + {"Printer.HeadingID", Field, 19, ""}, + {"Printer.HeadingLevel", Field, 19, ""}, + {"Printer.TextCodePrefix", Field, 19, ""}, + {"Printer.TextPrefix", Field, 19, ""}, + {"Printer.TextWidth", Field, 19, ""}, + {"Text", Type, 19, ""}, }, "go/format": { - {"Node", Func, 1}, - {"Source", Func, 1}, + {"Node", Func, 1, "func(dst io.Writer, fset *token.FileSet, node any) error"}, + {"Source", Func, 1, "func(src []byte) ([]byte, error)"}, }, "go/importer": { - {"Default", Func, 5}, - {"For", Func, 5}, - {"ForCompiler", Func, 12}, - {"Lookup", Type, 5}, + {"Default", Func, 5, "func() types.Importer"}, + {"For", Func, 5, "func(compiler string, lookup Lookup) types.Importer"}, + {"ForCompiler", Func, 12, "func(fset *token.FileSet, compiler string, lookup Lookup) types.Importer"}, + {"Lookup", Type, 5, ""}, }, "go/parser": { - {"AllErrors", Const, 1}, - {"DeclarationErrors", Const, 0}, - {"ImportsOnly", Const, 0}, - {"Mode", Type, 0}, - {"PackageClauseOnly", Const, 0}, - {"ParseComments", Const, 0}, - {"ParseDir", Func, 0}, - {"ParseExpr", Func, 0}, - {"ParseExprFrom", Func, 5}, - {"ParseFile", Func, 0}, - {"SkipObjectResolution", Const, 17}, - {"SpuriousErrors", Const, 0}, - {"Trace", Const, 0}, + {"AllErrors", Const, 1, ""}, + {"DeclarationErrors", Const, 0, ""}, + {"ImportsOnly", Const, 0, ""}, + {"Mode", Type, 0, ""}, + {"PackageClauseOnly", Const, 0, ""}, + {"ParseComments", Const, 0, ""}, + {"ParseDir", Func, 0, "func(fset *token.FileSet, path string, filter func(fs.FileInfo) bool, mode Mode) (pkgs map[string]*ast.Package, first error)"}, + {"ParseExpr", Func, 0, "func(x string) (ast.Expr, error)"}, + {"ParseExprFrom", Func, 5, "func(fset *token.FileSet, filename string, src any, mode Mode) (expr ast.Expr, err error)"}, + {"ParseFile", Func, 0, "func(fset *token.FileSet, filename string, src any, mode Mode) (f *ast.File, err error)"}, + {"SkipObjectResolution", Const, 17, ""}, + {"SpuriousErrors", Const, 0, ""}, + {"Trace", Const, 0, ""}, }, "go/printer": { - {"(*Config).Fprint", Method, 0}, - {"CommentedNode", Type, 0}, - {"CommentedNode.Comments", Field, 0}, - {"CommentedNode.Node", Field, 0}, - {"Config", Type, 0}, - {"Config.Indent", Field, 1}, - {"Config.Mode", Field, 0}, - {"Config.Tabwidth", Field, 0}, - {"Fprint", Func, 0}, - {"Mode", Type, 0}, - {"RawFormat", Const, 0}, - {"SourcePos", Const, 0}, - {"TabIndent", Const, 0}, - {"UseSpaces", Const, 0}, + {"(*Config).Fprint", Method, 0, ""}, + {"CommentedNode", Type, 0, ""}, + {"CommentedNode.Comments", Field, 0, ""}, + {"CommentedNode.Node", Field, 0, ""}, + {"Config", Type, 0, ""}, + {"Config.Indent", Field, 1, ""}, + {"Config.Mode", Field, 0, ""}, + {"Config.Tabwidth", Field, 0, ""}, + {"Fprint", Func, 0, "func(output io.Writer, fset *token.FileSet, node any) error"}, + {"Mode", Type, 0, ""}, + {"RawFormat", Const, 0, ""}, + {"SourcePos", Const, 0, ""}, + {"TabIndent", Const, 0, ""}, + {"UseSpaces", Const, 0, ""}, }, "go/scanner": { - {"(*ErrorList).Add", Method, 0}, - {"(*ErrorList).RemoveMultiples", Method, 0}, - {"(*ErrorList).Reset", Method, 0}, - {"(*Scanner).Init", Method, 0}, - {"(*Scanner).Scan", Method, 0}, - {"(Error).Error", Method, 0}, - {"(ErrorList).Err", Method, 0}, - {"(ErrorList).Error", Method, 0}, - {"(ErrorList).Len", Method, 0}, - {"(ErrorList).Less", Method, 0}, - {"(ErrorList).Sort", Method, 0}, - {"(ErrorList).Swap", Method, 0}, - {"Error", Type, 0}, - {"Error.Msg", Field, 0}, - {"Error.Pos", Field, 0}, - {"ErrorHandler", Type, 0}, - {"ErrorList", Type, 0}, - {"Mode", Type, 0}, - {"PrintError", Func, 0}, - {"ScanComments", Const, 0}, - {"Scanner", Type, 0}, - {"Scanner.ErrorCount", Field, 0}, + {"(*ErrorList).Add", Method, 0, ""}, + {"(*ErrorList).RemoveMultiples", Method, 0, ""}, + {"(*ErrorList).Reset", Method, 0, ""}, + {"(*Scanner).Init", Method, 0, ""}, + {"(*Scanner).Scan", Method, 0, ""}, + {"(Error).Error", Method, 0, ""}, + {"(ErrorList).Err", Method, 0, ""}, + {"(ErrorList).Error", Method, 0, ""}, + {"(ErrorList).Len", Method, 0, ""}, + {"(ErrorList).Less", Method, 0, ""}, + {"(ErrorList).Sort", Method, 0, ""}, + {"(ErrorList).Swap", Method, 0, ""}, + {"Error", Type, 0, ""}, + {"Error.Msg", Field, 0, ""}, + {"Error.Pos", Field, 0, ""}, + {"ErrorHandler", Type, 0, ""}, + {"ErrorList", Type, 0, ""}, + {"Mode", Type, 0, ""}, + {"PrintError", Func, 0, "func(w io.Writer, err error)"}, + {"ScanComments", Const, 0, ""}, + {"Scanner", Type, 0, ""}, + {"Scanner.ErrorCount", Field, 0, ""}, }, "go/token": { - {"(*File).AddLine", Method, 0}, - {"(*File).AddLineColumnInfo", Method, 11}, - {"(*File).AddLineInfo", Method, 0}, - {"(*File).Base", Method, 0}, - {"(*File).Line", Method, 0}, - {"(*File).LineCount", Method, 0}, - {"(*File).LineStart", Method, 12}, - {"(*File).Lines", Method, 21}, - {"(*File).MergeLine", Method, 2}, - {"(*File).Name", Method, 0}, - {"(*File).Offset", Method, 0}, - {"(*File).Pos", Method, 0}, - {"(*File).Position", Method, 0}, - {"(*File).PositionFor", Method, 4}, - {"(*File).SetLines", Method, 0}, - {"(*File).SetLinesForContent", Method, 0}, - {"(*File).Size", Method, 0}, - {"(*FileSet).AddFile", Method, 0}, - {"(*FileSet).Base", Method, 0}, - {"(*FileSet).File", Method, 0}, - {"(*FileSet).Iterate", Method, 0}, - {"(*FileSet).Position", Method, 0}, - {"(*FileSet).PositionFor", Method, 4}, - {"(*FileSet).Read", Method, 0}, - {"(*FileSet).RemoveFile", Method, 20}, - {"(*FileSet).Write", Method, 0}, - {"(*Position).IsValid", Method, 0}, - {"(Pos).IsValid", Method, 0}, - {"(Position).String", Method, 0}, - {"(Token).IsKeyword", Method, 0}, - {"(Token).IsLiteral", Method, 0}, - {"(Token).IsOperator", Method, 0}, - {"(Token).Precedence", Method, 0}, - {"(Token).String", Method, 0}, - {"ADD", Const, 0}, - {"ADD_ASSIGN", Const, 0}, - {"AND", Const, 0}, - {"AND_ASSIGN", Const, 0}, - {"AND_NOT", Const, 0}, - {"AND_NOT_ASSIGN", Const, 0}, - {"ARROW", Const, 0}, - {"ASSIGN", Const, 0}, - {"BREAK", Const, 0}, - {"CASE", Const, 0}, - {"CHAN", Const, 0}, - {"CHAR", Const, 0}, - {"COLON", Const, 0}, - {"COMMA", Const, 0}, - {"COMMENT", Const, 0}, - {"CONST", Const, 0}, - {"CONTINUE", Const, 0}, - {"DEC", Const, 0}, - {"DEFAULT", Const, 0}, - {"DEFER", Const, 0}, - {"DEFINE", Const, 0}, - {"ELLIPSIS", Const, 0}, - {"ELSE", Const, 0}, - {"EOF", Const, 0}, - {"EQL", Const, 0}, - {"FALLTHROUGH", Const, 0}, - {"FLOAT", Const, 0}, - {"FOR", Const, 0}, - {"FUNC", Const, 0}, - {"File", Type, 0}, - {"FileSet", Type, 0}, - {"GEQ", Const, 0}, - {"GO", Const, 0}, - {"GOTO", Const, 0}, - {"GTR", Const, 0}, - {"HighestPrec", Const, 0}, - {"IDENT", Const, 0}, - {"IF", Const, 0}, - {"ILLEGAL", Const, 0}, - {"IMAG", Const, 0}, - {"IMPORT", Const, 0}, - {"INC", Const, 0}, - {"INT", Const, 0}, - {"INTERFACE", Const, 0}, - {"IsExported", Func, 13}, - {"IsIdentifier", Func, 13}, - {"IsKeyword", Func, 13}, - {"LAND", Const, 0}, - {"LBRACE", Const, 0}, - {"LBRACK", Const, 0}, - {"LEQ", Const, 0}, - {"LOR", Const, 0}, - {"LPAREN", Const, 0}, - {"LSS", Const, 0}, - {"Lookup", Func, 0}, - {"LowestPrec", Const, 0}, - {"MAP", Const, 0}, - {"MUL", Const, 0}, - {"MUL_ASSIGN", Const, 0}, - {"NEQ", Const, 0}, - {"NOT", Const, 0}, - {"NewFileSet", Func, 0}, - {"NoPos", Const, 0}, - {"OR", Const, 0}, - {"OR_ASSIGN", Const, 0}, - {"PACKAGE", Const, 0}, - {"PERIOD", Const, 0}, - {"Pos", Type, 0}, - {"Position", Type, 0}, - {"Position.Column", Field, 0}, - {"Position.Filename", Field, 0}, - {"Position.Line", Field, 0}, - {"Position.Offset", Field, 0}, - {"QUO", Const, 0}, - {"QUO_ASSIGN", Const, 0}, - {"RANGE", Const, 0}, - {"RBRACE", Const, 0}, - {"RBRACK", Const, 0}, - {"REM", Const, 0}, - {"REM_ASSIGN", Const, 0}, - {"RETURN", Const, 0}, - {"RPAREN", Const, 0}, - {"SELECT", Const, 0}, - {"SEMICOLON", Const, 0}, - {"SHL", Const, 0}, - {"SHL_ASSIGN", Const, 0}, - {"SHR", Const, 0}, - {"SHR_ASSIGN", Const, 0}, - {"STRING", Const, 0}, - {"STRUCT", Const, 0}, - {"SUB", Const, 0}, - {"SUB_ASSIGN", Const, 0}, - {"SWITCH", Const, 0}, - {"TILDE", Const, 18}, - {"TYPE", Const, 0}, - {"Token", Type, 0}, - {"UnaryPrec", Const, 0}, - {"VAR", Const, 0}, - {"XOR", Const, 0}, - {"XOR_ASSIGN", Const, 0}, + {"(*File).AddLine", Method, 0, ""}, + {"(*File).AddLineColumnInfo", Method, 11, ""}, + {"(*File).AddLineInfo", Method, 0, ""}, + {"(*File).Base", Method, 0, ""}, + {"(*File).Line", Method, 0, ""}, + {"(*File).LineCount", Method, 0, ""}, + {"(*File).LineStart", Method, 12, ""}, + {"(*File).Lines", Method, 21, ""}, + {"(*File).MergeLine", Method, 2, ""}, + {"(*File).Name", Method, 0, ""}, + {"(*File).Offset", Method, 0, ""}, + {"(*File).Pos", Method, 0, ""}, + {"(*File).Position", Method, 0, ""}, + {"(*File).PositionFor", Method, 4, ""}, + {"(*File).SetLines", Method, 0, ""}, + {"(*File).SetLinesForContent", Method, 0, ""}, + {"(*File).Size", Method, 0, ""}, + {"(*FileSet).AddFile", Method, 0, ""}, + {"(*FileSet).Base", Method, 0, ""}, + {"(*FileSet).File", Method, 0, ""}, + {"(*FileSet).Iterate", Method, 0, ""}, + {"(*FileSet).Position", Method, 0, ""}, + {"(*FileSet).PositionFor", Method, 4, ""}, + {"(*FileSet).Read", Method, 0, ""}, + {"(*FileSet).RemoveFile", Method, 20, ""}, + {"(*FileSet).Write", Method, 0, ""}, + {"(*Position).IsValid", Method, 0, ""}, + {"(Pos).IsValid", Method, 0, ""}, + {"(Position).String", Method, 0, ""}, + {"(Token).IsKeyword", Method, 0, ""}, + {"(Token).IsLiteral", Method, 0, ""}, + {"(Token).IsOperator", Method, 0, ""}, + {"(Token).Precedence", Method, 0, ""}, + {"(Token).String", Method, 0, ""}, + {"ADD", Const, 0, ""}, + {"ADD_ASSIGN", Const, 0, ""}, + {"AND", Const, 0, ""}, + {"AND_ASSIGN", Const, 0, ""}, + {"AND_NOT", Const, 0, ""}, + {"AND_NOT_ASSIGN", Const, 0, ""}, + {"ARROW", Const, 0, ""}, + {"ASSIGN", Const, 0, ""}, + {"BREAK", Const, 0, ""}, + {"CASE", Const, 0, ""}, + {"CHAN", Const, 0, ""}, + {"CHAR", Const, 0, ""}, + {"COLON", Const, 0, ""}, + {"COMMA", Const, 0, ""}, + {"COMMENT", Const, 0, ""}, + {"CONST", Const, 0, ""}, + {"CONTINUE", Const, 0, ""}, + {"DEC", Const, 0, ""}, + {"DEFAULT", Const, 0, ""}, + {"DEFER", Const, 0, ""}, + {"DEFINE", Const, 0, ""}, + {"ELLIPSIS", Const, 0, ""}, + {"ELSE", Const, 0, ""}, + {"EOF", Const, 0, ""}, + {"EQL", Const, 0, ""}, + {"FALLTHROUGH", Const, 0, ""}, + {"FLOAT", Const, 0, ""}, + {"FOR", Const, 0, ""}, + {"FUNC", Const, 0, ""}, + {"File", Type, 0, ""}, + {"FileSet", Type, 0, ""}, + {"GEQ", Const, 0, ""}, + {"GO", Const, 0, ""}, + {"GOTO", Const, 0, ""}, + {"GTR", Const, 0, ""}, + {"HighestPrec", Const, 0, ""}, + {"IDENT", Const, 0, ""}, + {"IF", Const, 0, ""}, + {"ILLEGAL", Const, 0, ""}, + {"IMAG", Const, 0, ""}, + {"IMPORT", Const, 0, ""}, + {"INC", Const, 0, ""}, + {"INT", Const, 0, ""}, + {"INTERFACE", Const, 0, ""}, + {"IsExported", Func, 13, "func(name string) bool"}, + {"IsIdentifier", Func, 13, "func(name string) bool"}, + {"IsKeyword", Func, 13, "func(name string) bool"}, + {"LAND", Const, 0, ""}, + {"LBRACE", Const, 0, ""}, + {"LBRACK", Const, 0, ""}, + {"LEQ", Const, 0, ""}, + {"LOR", Const, 0, ""}, + {"LPAREN", Const, 0, ""}, + {"LSS", Const, 0, ""}, + {"Lookup", Func, 0, "func(ident string) Token"}, + {"LowestPrec", Const, 0, ""}, + {"MAP", Const, 0, ""}, + {"MUL", Const, 0, ""}, + {"MUL_ASSIGN", Const, 0, ""}, + {"NEQ", Const, 0, ""}, + {"NOT", Const, 0, ""}, + {"NewFileSet", Func, 0, "func() *FileSet"}, + {"NoPos", Const, 0, ""}, + {"OR", Const, 0, ""}, + {"OR_ASSIGN", Const, 0, ""}, + {"PACKAGE", Const, 0, ""}, + {"PERIOD", Const, 0, ""}, + {"Pos", Type, 0, ""}, + {"Position", Type, 0, ""}, + {"Position.Column", Field, 0, ""}, + {"Position.Filename", Field, 0, ""}, + {"Position.Line", Field, 0, ""}, + {"Position.Offset", Field, 0, ""}, + {"QUO", Const, 0, ""}, + {"QUO_ASSIGN", Const, 0, ""}, + {"RANGE", Const, 0, ""}, + {"RBRACE", Const, 0, ""}, + {"RBRACK", Const, 0, ""}, + {"REM", Const, 0, ""}, + {"REM_ASSIGN", Const, 0, ""}, + {"RETURN", Const, 0, ""}, + {"RPAREN", Const, 0, ""}, + {"SELECT", Const, 0, ""}, + {"SEMICOLON", Const, 0, ""}, + {"SHL", Const, 0, ""}, + {"SHL_ASSIGN", Const, 0, ""}, + {"SHR", Const, 0, ""}, + {"SHR_ASSIGN", Const, 0, ""}, + {"STRING", Const, 0, ""}, + {"STRUCT", Const, 0, ""}, + {"SUB", Const, 0, ""}, + {"SUB_ASSIGN", Const, 0, ""}, + {"SWITCH", Const, 0, ""}, + {"TILDE", Const, 18, ""}, + {"TYPE", Const, 0, ""}, + {"Token", Type, 0, ""}, + {"UnaryPrec", Const, 0, ""}, + {"VAR", Const, 0, ""}, + {"XOR", Const, 0, ""}, + {"XOR_ASSIGN", Const, 0, ""}, }, "go/types": { - {"(*Alias).Obj", Method, 22}, - {"(*Alias).Origin", Method, 23}, - {"(*Alias).Rhs", Method, 23}, - {"(*Alias).SetTypeParams", Method, 23}, - {"(*Alias).String", Method, 22}, - {"(*Alias).TypeArgs", Method, 23}, - {"(*Alias).TypeParams", Method, 23}, - {"(*Alias).Underlying", Method, 22}, - {"(*ArgumentError).Error", Method, 18}, - {"(*ArgumentError).Unwrap", Method, 18}, - {"(*Array).Elem", Method, 5}, - {"(*Array).Len", Method, 5}, - {"(*Array).String", Method, 5}, - {"(*Array).Underlying", Method, 5}, - {"(*Basic).Info", Method, 5}, - {"(*Basic).Kind", Method, 5}, - {"(*Basic).Name", Method, 5}, - {"(*Basic).String", Method, 5}, - {"(*Basic).Underlying", Method, 5}, - {"(*Builtin).Exported", Method, 5}, - {"(*Builtin).Id", Method, 5}, - {"(*Builtin).Name", Method, 5}, - {"(*Builtin).Parent", Method, 5}, - {"(*Builtin).Pkg", Method, 5}, - {"(*Builtin).Pos", Method, 5}, - {"(*Builtin).String", Method, 5}, - {"(*Builtin).Type", Method, 5}, - {"(*Chan).Dir", Method, 5}, - {"(*Chan).Elem", Method, 5}, - {"(*Chan).String", Method, 5}, - {"(*Chan).Underlying", Method, 5}, - {"(*Checker).Files", Method, 5}, - {"(*Config).Check", Method, 5}, - {"(*Const).Exported", Method, 5}, - {"(*Const).Id", Method, 5}, - {"(*Const).Name", Method, 5}, - {"(*Const).Parent", Method, 5}, - {"(*Const).Pkg", Method, 5}, - {"(*Const).Pos", Method, 5}, - {"(*Const).String", Method, 5}, - {"(*Const).Type", Method, 5}, - {"(*Const).Val", Method, 5}, - {"(*Func).Exported", Method, 5}, - {"(*Func).FullName", Method, 5}, - {"(*Func).Id", Method, 5}, - {"(*Func).Name", Method, 5}, - {"(*Func).Origin", Method, 19}, - {"(*Func).Parent", Method, 5}, - {"(*Func).Pkg", Method, 5}, - {"(*Func).Pos", Method, 5}, - {"(*Func).Scope", Method, 5}, - {"(*Func).Signature", Method, 23}, - {"(*Func).String", Method, 5}, - {"(*Func).Type", Method, 5}, - {"(*Info).ObjectOf", Method, 5}, - {"(*Info).PkgNameOf", Method, 22}, - {"(*Info).TypeOf", Method, 5}, - {"(*Initializer).String", Method, 5}, - {"(*Interface).Complete", Method, 5}, - {"(*Interface).Embedded", Method, 5}, - {"(*Interface).EmbeddedType", Method, 11}, - {"(*Interface).EmbeddedTypes", Method, 24}, - {"(*Interface).Empty", Method, 5}, - {"(*Interface).ExplicitMethod", Method, 5}, - {"(*Interface).ExplicitMethods", Method, 24}, - {"(*Interface).IsComparable", Method, 18}, - {"(*Interface).IsImplicit", Method, 18}, - {"(*Interface).IsMethodSet", Method, 18}, - {"(*Interface).MarkImplicit", Method, 18}, - {"(*Interface).Method", Method, 5}, - {"(*Interface).Methods", Method, 24}, - {"(*Interface).NumEmbeddeds", Method, 5}, - {"(*Interface).NumExplicitMethods", Method, 5}, - {"(*Interface).NumMethods", Method, 5}, - {"(*Interface).String", Method, 5}, - {"(*Interface).Underlying", Method, 5}, - {"(*Label).Exported", Method, 5}, - {"(*Label).Id", Method, 5}, - {"(*Label).Name", Method, 5}, - {"(*Label).Parent", Method, 5}, - {"(*Label).Pkg", Method, 5}, - {"(*Label).Pos", Method, 5}, - {"(*Label).String", Method, 5}, - {"(*Label).Type", Method, 5}, - {"(*Map).Elem", Method, 5}, - {"(*Map).Key", Method, 5}, - {"(*Map).String", Method, 5}, - {"(*Map).Underlying", Method, 5}, - {"(*MethodSet).At", Method, 5}, - {"(*MethodSet).Len", Method, 5}, - {"(*MethodSet).Lookup", Method, 5}, - {"(*MethodSet).Methods", Method, 24}, - {"(*MethodSet).String", Method, 5}, - {"(*Named).AddMethod", Method, 5}, - {"(*Named).Method", Method, 5}, - {"(*Named).Methods", Method, 24}, - {"(*Named).NumMethods", Method, 5}, - {"(*Named).Obj", Method, 5}, - {"(*Named).Origin", Method, 18}, - {"(*Named).SetTypeParams", Method, 18}, - {"(*Named).SetUnderlying", Method, 5}, - {"(*Named).String", Method, 5}, - {"(*Named).TypeArgs", Method, 18}, - {"(*Named).TypeParams", Method, 18}, - {"(*Named).Underlying", Method, 5}, - {"(*Nil).Exported", Method, 5}, - {"(*Nil).Id", Method, 5}, - {"(*Nil).Name", Method, 5}, - {"(*Nil).Parent", Method, 5}, - {"(*Nil).Pkg", Method, 5}, - {"(*Nil).Pos", Method, 5}, - {"(*Nil).String", Method, 5}, - {"(*Nil).Type", Method, 5}, - {"(*Package).Complete", Method, 5}, - {"(*Package).GoVersion", Method, 21}, - {"(*Package).Imports", Method, 5}, - {"(*Package).MarkComplete", Method, 5}, - {"(*Package).Name", Method, 5}, - {"(*Package).Path", Method, 5}, - {"(*Package).Scope", Method, 5}, - {"(*Package).SetImports", Method, 5}, - {"(*Package).SetName", Method, 6}, - {"(*Package).String", Method, 5}, - {"(*PkgName).Exported", Method, 5}, - {"(*PkgName).Id", Method, 5}, - {"(*PkgName).Imported", Method, 5}, - {"(*PkgName).Name", Method, 5}, - {"(*PkgName).Parent", Method, 5}, - {"(*PkgName).Pkg", Method, 5}, - {"(*PkgName).Pos", Method, 5}, - {"(*PkgName).String", Method, 5}, - {"(*PkgName).Type", Method, 5}, - {"(*Pointer).Elem", Method, 5}, - {"(*Pointer).String", Method, 5}, - {"(*Pointer).Underlying", Method, 5}, - {"(*Scope).Child", Method, 5}, - {"(*Scope).Children", Method, 24}, - {"(*Scope).Contains", Method, 5}, - {"(*Scope).End", Method, 5}, - {"(*Scope).Innermost", Method, 5}, - {"(*Scope).Insert", Method, 5}, - {"(*Scope).Len", Method, 5}, - {"(*Scope).Lookup", Method, 5}, - {"(*Scope).LookupParent", Method, 5}, - {"(*Scope).Names", Method, 5}, - {"(*Scope).NumChildren", Method, 5}, - {"(*Scope).Parent", Method, 5}, - {"(*Scope).Pos", Method, 5}, - {"(*Scope).String", Method, 5}, - {"(*Scope).WriteTo", Method, 5}, - {"(*Selection).Index", Method, 5}, - {"(*Selection).Indirect", Method, 5}, - {"(*Selection).Kind", Method, 5}, - {"(*Selection).Obj", Method, 5}, - {"(*Selection).Recv", Method, 5}, - {"(*Selection).String", Method, 5}, - {"(*Selection).Type", Method, 5}, - {"(*Signature).Params", Method, 5}, - {"(*Signature).Recv", Method, 5}, - {"(*Signature).RecvTypeParams", Method, 18}, - {"(*Signature).Results", Method, 5}, - {"(*Signature).String", Method, 5}, - {"(*Signature).TypeParams", Method, 18}, - {"(*Signature).Underlying", Method, 5}, - {"(*Signature).Variadic", Method, 5}, - {"(*Slice).Elem", Method, 5}, - {"(*Slice).String", Method, 5}, - {"(*Slice).Underlying", Method, 5}, - {"(*StdSizes).Alignof", Method, 5}, - {"(*StdSizes).Offsetsof", Method, 5}, - {"(*StdSizes).Sizeof", Method, 5}, - {"(*Struct).Field", Method, 5}, - {"(*Struct).Fields", Method, 24}, - {"(*Struct).NumFields", Method, 5}, - {"(*Struct).String", Method, 5}, - {"(*Struct).Tag", Method, 5}, - {"(*Struct).Underlying", Method, 5}, - {"(*Term).String", Method, 18}, - {"(*Term).Tilde", Method, 18}, - {"(*Term).Type", Method, 18}, - {"(*Tuple).At", Method, 5}, - {"(*Tuple).Len", Method, 5}, - {"(*Tuple).String", Method, 5}, - {"(*Tuple).Underlying", Method, 5}, - {"(*Tuple).Variables", Method, 24}, - {"(*TypeList).At", Method, 18}, - {"(*TypeList).Len", Method, 18}, - {"(*TypeList).Types", Method, 24}, - {"(*TypeName).Exported", Method, 5}, - {"(*TypeName).Id", Method, 5}, - {"(*TypeName).IsAlias", Method, 9}, - {"(*TypeName).Name", Method, 5}, - {"(*TypeName).Parent", Method, 5}, - {"(*TypeName).Pkg", Method, 5}, - {"(*TypeName).Pos", Method, 5}, - {"(*TypeName).String", Method, 5}, - {"(*TypeName).Type", Method, 5}, - {"(*TypeParam).Constraint", Method, 18}, - {"(*TypeParam).Index", Method, 18}, - {"(*TypeParam).Obj", Method, 18}, - {"(*TypeParam).SetConstraint", Method, 18}, - {"(*TypeParam).String", Method, 18}, - {"(*TypeParam).Underlying", Method, 18}, - {"(*TypeParamList).At", Method, 18}, - {"(*TypeParamList).Len", Method, 18}, - {"(*TypeParamList).TypeParams", Method, 24}, - {"(*Union).Len", Method, 18}, - {"(*Union).String", Method, 18}, - {"(*Union).Term", Method, 18}, - {"(*Union).Terms", Method, 24}, - {"(*Union).Underlying", Method, 18}, - {"(*Var).Anonymous", Method, 5}, - {"(*Var).Embedded", Method, 11}, - {"(*Var).Exported", Method, 5}, - {"(*Var).Id", Method, 5}, - {"(*Var).IsField", Method, 5}, - {"(*Var).Kind", Method, 25}, - {"(*Var).Name", Method, 5}, - {"(*Var).Origin", Method, 19}, - {"(*Var).Parent", Method, 5}, - {"(*Var).Pkg", Method, 5}, - {"(*Var).Pos", Method, 5}, - {"(*Var).SetKind", Method, 25}, - {"(*Var).String", Method, 5}, - {"(*Var).Type", Method, 5}, - {"(Checker).ObjectOf", Method, 5}, - {"(Checker).PkgNameOf", Method, 22}, - {"(Checker).TypeOf", Method, 5}, - {"(Error).Error", Method, 5}, - {"(TypeAndValue).Addressable", Method, 5}, - {"(TypeAndValue).Assignable", Method, 5}, - {"(TypeAndValue).HasOk", Method, 5}, - {"(TypeAndValue).IsBuiltin", Method, 5}, - {"(TypeAndValue).IsNil", Method, 5}, - {"(TypeAndValue).IsType", Method, 5}, - {"(TypeAndValue).IsValue", Method, 5}, - {"(TypeAndValue).IsVoid", Method, 5}, - {"(VarKind).String", Method, 25}, - {"Alias", Type, 22}, - {"ArgumentError", Type, 18}, - {"ArgumentError.Err", Field, 18}, - {"ArgumentError.Index", Field, 18}, - {"Array", Type, 5}, - {"AssertableTo", Func, 5}, - {"AssignableTo", Func, 5}, - {"Basic", Type, 5}, - {"BasicInfo", Type, 5}, - {"BasicKind", Type, 5}, - {"Bool", Const, 5}, - {"Builtin", Type, 5}, - {"Byte", Const, 5}, - {"Chan", Type, 5}, - {"ChanDir", Type, 5}, - {"CheckExpr", Func, 13}, - {"Checker", Type, 5}, - {"Checker.Info", Field, 5}, - {"Comparable", Func, 5}, - {"Complex128", Const, 5}, - {"Complex64", Const, 5}, - {"Config", Type, 5}, - {"Config.Context", Field, 18}, - {"Config.DisableUnusedImportCheck", Field, 5}, - {"Config.Error", Field, 5}, - {"Config.FakeImportC", Field, 5}, - {"Config.GoVersion", Field, 18}, - {"Config.IgnoreFuncBodies", Field, 5}, - {"Config.Importer", Field, 5}, - {"Config.Sizes", Field, 5}, - {"Const", Type, 5}, - {"Context", Type, 18}, - {"ConvertibleTo", Func, 5}, - {"DefPredeclaredTestFuncs", Func, 5}, - {"Default", Func, 8}, - {"Error", Type, 5}, - {"Error.Fset", Field, 5}, - {"Error.Msg", Field, 5}, - {"Error.Pos", Field, 5}, - {"Error.Soft", Field, 5}, - {"Eval", Func, 5}, - {"ExprString", Func, 5}, - {"FieldVal", Const, 5}, - {"FieldVar", Const, 25}, - {"Float32", Const, 5}, - {"Float64", Const, 5}, - {"Func", Type, 5}, - {"Id", Func, 5}, - {"Identical", Func, 5}, - {"IdenticalIgnoreTags", Func, 8}, - {"Implements", Func, 5}, - {"ImportMode", Type, 6}, - {"Importer", Type, 5}, - {"ImporterFrom", Type, 6}, - {"Info", Type, 5}, - {"Info.Defs", Field, 5}, - {"Info.FileVersions", Field, 22}, - {"Info.Implicits", Field, 5}, - {"Info.InitOrder", Field, 5}, - {"Info.Instances", Field, 18}, - {"Info.Scopes", Field, 5}, - {"Info.Selections", Field, 5}, - {"Info.Types", Field, 5}, - {"Info.Uses", Field, 5}, - {"Initializer", Type, 5}, - {"Initializer.Lhs", Field, 5}, - {"Initializer.Rhs", Field, 5}, - {"Instance", Type, 18}, - {"Instance.Type", Field, 18}, - {"Instance.TypeArgs", Field, 18}, - {"Instantiate", Func, 18}, - {"Int", Const, 5}, - {"Int16", Const, 5}, - {"Int32", Const, 5}, - {"Int64", Const, 5}, - {"Int8", Const, 5}, - {"Interface", Type, 5}, - {"Invalid", Const, 5}, - {"IsBoolean", Const, 5}, - {"IsComplex", Const, 5}, - {"IsConstType", Const, 5}, - {"IsFloat", Const, 5}, - {"IsInteger", Const, 5}, - {"IsInterface", Func, 5}, - {"IsNumeric", Const, 5}, - {"IsOrdered", Const, 5}, - {"IsString", Const, 5}, - {"IsUnsigned", Const, 5}, - {"IsUntyped", Const, 5}, - {"Label", Type, 5}, - {"LocalVar", Const, 25}, - {"LookupFieldOrMethod", Func, 5}, - {"LookupSelection", Func, 25}, - {"Map", Type, 5}, - {"MethodExpr", Const, 5}, - {"MethodSet", Type, 5}, - {"MethodVal", Const, 5}, - {"MissingMethod", Func, 5}, - {"Named", Type, 5}, - {"NewAlias", Func, 22}, - {"NewArray", Func, 5}, - {"NewChan", Func, 5}, - {"NewChecker", Func, 5}, - {"NewConst", Func, 5}, - {"NewContext", Func, 18}, - {"NewField", Func, 5}, - {"NewFunc", Func, 5}, - {"NewInterface", Func, 5}, - {"NewInterfaceType", Func, 11}, - {"NewLabel", Func, 5}, - {"NewMap", Func, 5}, - {"NewMethodSet", Func, 5}, - {"NewNamed", Func, 5}, - {"NewPackage", Func, 5}, - {"NewParam", Func, 5}, - {"NewPkgName", Func, 5}, - {"NewPointer", Func, 5}, - {"NewScope", Func, 5}, - {"NewSignature", Func, 5}, - {"NewSignatureType", Func, 18}, - {"NewSlice", Func, 5}, - {"NewStruct", Func, 5}, - {"NewTerm", Func, 18}, - {"NewTuple", Func, 5}, - {"NewTypeName", Func, 5}, - {"NewTypeParam", Func, 18}, - {"NewUnion", Func, 18}, - {"NewVar", Func, 5}, - {"Nil", Type, 5}, - {"Object", Type, 5}, - {"ObjectString", Func, 5}, - {"Package", Type, 5}, - {"PackageVar", Const, 25}, - {"ParamVar", Const, 25}, - {"PkgName", Type, 5}, - {"Pointer", Type, 5}, - {"Qualifier", Type, 5}, - {"RecvOnly", Const, 5}, - {"RecvVar", Const, 25}, - {"RelativeTo", Func, 5}, - {"ResultVar", Const, 25}, - {"Rune", Const, 5}, - {"Satisfies", Func, 20}, - {"Scope", Type, 5}, - {"Selection", Type, 5}, - {"SelectionKind", Type, 5}, - {"SelectionString", Func, 5}, - {"SendOnly", Const, 5}, - {"SendRecv", Const, 5}, - {"Signature", Type, 5}, - {"Sizes", Type, 5}, - {"SizesFor", Func, 9}, - {"Slice", Type, 5}, - {"StdSizes", Type, 5}, - {"StdSizes.MaxAlign", Field, 5}, - {"StdSizes.WordSize", Field, 5}, - {"String", Const, 5}, - {"Struct", Type, 5}, - {"Term", Type, 18}, - {"Tuple", Type, 5}, - {"Typ", Var, 5}, - {"Type", Type, 5}, - {"TypeAndValue", Type, 5}, - {"TypeAndValue.Type", Field, 5}, - {"TypeAndValue.Value", Field, 5}, - {"TypeList", Type, 18}, - {"TypeName", Type, 5}, - {"TypeParam", Type, 18}, - {"TypeParamList", Type, 18}, - {"TypeString", Func, 5}, - {"Uint", Const, 5}, - {"Uint16", Const, 5}, - {"Uint32", Const, 5}, - {"Uint64", Const, 5}, - {"Uint8", Const, 5}, - {"Uintptr", Const, 5}, - {"Unalias", Func, 22}, - {"Union", Type, 18}, - {"Universe", Var, 5}, - {"Unsafe", Var, 5}, - {"UnsafePointer", Const, 5}, - {"UntypedBool", Const, 5}, - {"UntypedComplex", Const, 5}, - {"UntypedFloat", Const, 5}, - {"UntypedInt", Const, 5}, - {"UntypedNil", Const, 5}, - {"UntypedRune", Const, 5}, - {"UntypedString", Const, 5}, - {"Var", Type, 5}, - {"VarKind", Type, 25}, - {"WriteExpr", Func, 5}, - {"WriteSignature", Func, 5}, - {"WriteType", Func, 5}, + {"(*Alias).Obj", Method, 22, ""}, + {"(*Alias).Origin", Method, 23, ""}, + {"(*Alias).Rhs", Method, 23, ""}, + {"(*Alias).SetTypeParams", Method, 23, ""}, + {"(*Alias).String", Method, 22, ""}, + {"(*Alias).TypeArgs", Method, 23, ""}, + {"(*Alias).TypeParams", Method, 23, ""}, + {"(*Alias).Underlying", Method, 22, ""}, + {"(*ArgumentError).Error", Method, 18, ""}, + {"(*ArgumentError).Unwrap", Method, 18, ""}, + {"(*Array).Elem", Method, 5, ""}, + {"(*Array).Len", Method, 5, ""}, + {"(*Array).String", Method, 5, ""}, + {"(*Array).Underlying", Method, 5, ""}, + {"(*Basic).Info", Method, 5, ""}, + {"(*Basic).Kind", Method, 5, ""}, + {"(*Basic).Name", Method, 5, ""}, + {"(*Basic).String", Method, 5, ""}, + {"(*Basic).Underlying", Method, 5, ""}, + {"(*Builtin).Exported", Method, 5, ""}, + {"(*Builtin).Id", Method, 5, ""}, + {"(*Builtin).Name", Method, 5, ""}, + {"(*Builtin).Parent", Method, 5, ""}, + {"(*Builtin).Pkg", Method, 5, ""}, + {"(*Builtin).Pos", Method, 5, ""}, + {"(*Builtin).String", Method, 5, ""}, + {"(*Builtin).Type", Method, 5, ""}, + {"(*Chan).Dir", Method, 5, ""}, + {"(*Chan).Elem", Method, 5, ""}, + {"(*Chan).String", Method, 5, ""}, + {"(*Chan).Underlying", Method, 5, ""}, + {"(*Checker).Files", Method, 5, ""}, + {"(*Config).Check", Method, 5, ""}, + {"(*Const).Exported", Method, 5, ""}, + {"(*Const).Id", Method, 5, ""}, + {"(*Const).Name", Method, 5, ""}, + {"(*Const).Parent", Method, 5, ""}, + {"(*Const).Pkg", Method, 5, ""}, + {"(*Const).Pos", Method, 5, ""}, + {"(*Const).String", Method, 5, ""}, + {"(*Const).Type", Method, 5, ""}, + {"(*Const).Val", Method, 5, ""}, + {"(*Func).Exported", Method, 5, ""}, + {"(*Func).FullName", Method, 5, ""}, + {"(*Func).Id", Method, 5, ""}, + {"(*Func).Name", Method, 5, ""}, + {"(*Func).Origin", Method, 19, ""}, + {"(*Func).Parent", Method, 5, ""}, + {"(*Func).Pkg", Method, 5, ""}, + {"(*Func).Pos", Method, 5, ""}, + {"(*Func).Scope", Method, 5, ""}, + {"(*Func).Signature", Method, 23, ""}, + {"(*Func).String", Method, 5, ""}, + {"(*Func).Type", Method, 5, ""}, + {"(*Info).ObjectOf", Method, 5, ""}, + {"(*Info).PkgNameOf", Method, 22, ""}, + {"(*Info).TypeOf", Method, 5, ""}, + {"(*Initializer).String", Method, 5, ""}, + {"(*Interface).Complete", Method, 5, ""}, + {"(*Interface).Embedded", Method, 5, ""}, + {"(*Interface).EmbeddedType", Method, 11, ""}, + {"(*Interface).EmbeddedTypes", Method, 24, ""}, + {"(*Interface).Empty", Method, 5, ""}, + {"(*Interface).ExplicitMethod", Method, 5, ""}, + {"(*Interface).ExplicitMethods", Method, 24, ""}, + {"(*Interface).IsComparable", Method, 18, ""}, + {"(*Interface).IsImplicit", Method, 18, ""}, + {"(*Interface).IsMethodSet", Method, 18, ""}, + {"(*Interface).MarkImplicit", Method, 18, ""}, + {"(*Interface).Method", Method, 5, ""}, + {"(*Interface).Methods", Method, 24, ""}, + {"(*Interface).NumEmbeddeds", Method, 5, ""}, + {"(*Interface).NumExplicitMethods", Method, 5, ""}, + {"(*Interface).NumMethods", Method, 5, ""}, + {"(*Interface).String", Method, 5, ""}, + {"(*Interface).Underlying", Method, 5, ""}, + {"(*Label).Exported", Method, 5, ""}, + {"(*Label).Id", Method, 5, ""}, + {"(*Label).Name", Method, 5, ""}, + {"(*Label).Parent", Method, 5, ""}, + {"(*Label).Pkg", Method, 5, ""}, + {"(*Label).Pos", Method, 5, ""}, + {"(*Label).String", Method, 5, ""}, + {"(*Label).Type", Method, 5, ""}, + {"(*Map).Elem", Method, 5, ""}, + {"(*Map).Key", Method, 5, ""}, + {"(*Map).String", Method, 5, ""}, + {"(*Map).Underlying", Method, 5, ""}, + {"(*MethodSet).At", Method, 5, ""}, + {"(*MethodSet).Len", Method, 5, ""}, + {"(*MethodSet).Lookup", Method, 5, ""}, + {"(*MethodSet).Methods", Method, 24, ""}, + {"(*MethodSet).String", Method, 5, ""}, + {"(*Named).AddMethod", Method, 5, ""}, + {"(*Named).Method", Method, 5, ""}, + {"(*Named).Methods", Method, 24, ""}, + {"(*Named).NumMethods", Method, 5, ""}, + {"(*Named).Obj", Method, 5, ""}, + {"(*Named).Origin", Method, 18, ""}, + {"(*Named).SetTypeParams", Method, 18, ""}, + {"(*Named).SetUnderlying", Method, 5, ""}, + {"(*Named).String", Method, 5, ""}, + {"(*Named).TypeArgs", Method, 18, ""}, + {"(*Named).TypeParams", Method, 18, ""}, + {"(*Named).Underlying", Method, 5, ""}, + {"(*Nil).Exported", Method, 5, ""}, + {"(*Nil).Id", Method, 5, ""}, + {"(*Nil).Name", Method, 5, ""}, + {"(*Nil).Parent", Method, 5, ""}, + {"(*Nil).Pkg", Method, 5, ""}, + {"(*Nil).Pos", Method, 5, ""}, + {"(*Nil).String", Method, 5, ""}, + {"(*Nil).Type", Method, 5, ""}, + {"(*Package).Complete", Method, 5, ""}, + {"(*Package).GoVersion", Method, 21, ""}, + {"(*Package).Imports", Method, 5, ""}, + {"(*Package).MarkComplete", Method, 5, ""}, + {"(*Package).Name", Method, 5, ""}, + {"(*Package).Path", Method, 5, ""}, + {"(*Package).Scope", Method, 5, ""}, + {"(*Package).SetImports", Method, 5, ""}, + {"(*Package).SetName", Method, 6, ""}, + {"(*Package).String", Method, 5, ""}, + {"(*PkgName).Exported", Method, 5, ""}, + {"(*PkgName).Id", Method, 5, ""}, + {"(*PkgName).Imported", Method, 5, ""}, + {"(*PkgName).Name", Method, 5, ""}, + {"(*PkgName).Parent", Method, 5, ""}, + {"(*PkgName).Pkg", Method, 5, ""}, + {"(*PkgName).Pos", Method, 5, ""}, + {"(*PkgName).String", Method, 5, ""}, + {"(*PkgName).Type", Method, 5, ""}, + {"(*Pointer).Elem", Method, 5, ""}, + {"(*Pointer).String", Method, 5, ""}, + {"(*Pointer).Underlying", Method, 5, ""}, + {"(*Scope).Child", Method, 5, ""}, + {"(*Scope).Children", Method, 24, ""}, + {"(*Scope).Contains", Method, 5, ""}, + {"(*Scope).End", Method, 5, ""}, + {"(*Scope).Innermost", Method, 5, ""}, + {"(*Scope).Insert", Method, 5, ""}, + {"(*Scope).Len", Method, 5, ""}, + {"(*Scope).Lookup", Method, 5, ""}, + {"(*Scope).LookupParent", Method, 5, ""}, + {"(*Scope).Names", Method, 5, ""}, + {"(*Scope).NumChildren", Method, 5, ""}, + {"(*Scope).Parent", Method, 5, ""}, + {"(*Scope).Pos", Method, 5, ""}, + {"(*Scope).String", Method, 5, ""}, + {"(*Scope).WriteTo", Method, 5, ""}, + {"(*Selection).Index", Method, 5, ""}, + {"(*Selection).Indirect", Method, 5, ""}, + {"(*Selection).Kind", Method, 5, ""}, + {"(*Selection).Obj", Method, 5, ""}, + {"(*Selection).Recv", Method, 5, ""}, + {"(*Selection).String", Method, 5, ""}, + {"(*Selection).Type", Method, 5, ""}, + {"(*Signature).Params", Method, 5, ""}, + {"(*Signature).Recv", Method, 5, ""}, + {"(*Signature).RecvTypeParams", Method, 18, ""}, + {"(*Signature).Results", Method, 5, ""}, + {"(*Signature).String", Method, 5, ""}, + {"(*Signature).TypeParams", Method, 18, ""}, + {"(*Signature).Underlying", Method, 5, ""}, + {"(*Signature).Variadic", Method, 5, ""}, + {"(*Slice).Elem", Method, 5, ""}, + {"(*Slice).String", Method, 5, ""}, + {"(*Slice).Underlying", Method, 5, ""}, + {"(*StdSizes).Alignof", Method, 5, ""}, + {"(*StdSizes).Offsetsof", Method, 5, ""}, + {"(*StdSizes).Sizeof", Method, 5, ""}, + {"(*Struct).Field", Method, 5, ""}, + {"(*Struct).Fields", Method, 24, ""}, + {"(*Struct).NumFields", Method, 5, ""}, + {"(*Struct).String", Method, 5, ""}, + {"(*Struct).Tag", Method, 5, ""}, + {"(*Struct).Underlying", Method, 5, ""}, + {"(*Term).String", Method, 18, ""}, + {"(*Term).Tilde", Method, 18, ""}, + {"(*Term).Type", Method, 18, ""}, + {"(*Tuple).At", Method, 5, ""}, + {"(*Tuple).Len", Method, 5, ""}, + {"(*Tuple).String", Method, 5, ""}, + {"(*Tuple).Underlying", Method, 5, ""}, + {"(*Tuple).Variables", Method, 24, ""}, + {"(*TypeList).At", Method, 18, ""}, + {"(*TypeList).Len", Method, 18, ""}, + {"(*TypeList).Types", Method, 24, ""}, + {"(*TypeName).Exported", Method, 5, ""}, + {"(*TypeName).Id", Method, 5, ""}, + {"(*TypeName).IsAlias", Method, 9, ""}, + {"(*TypeName).Name", Method, 5, ""}, + {"(*TypeName).Parent", Method, 5, ""}, + {"(*TypeName).Pkg", Method, 5, ""}, + {"(*TypeName).Pos", Method, 5, ""}, + {"(*TypeName).String", Method, 5, ""}, + {"(*TypeName).Type", Method, 5, ""}, + {"(*TypeParam).Constraint", Method, 18, ""}, + {"(*TypeParam).Index", Method, 18, ""}, + {"(*TypeParam).Obj", Method, 18, ""}, + {"(*TypeParam).SetConstraint", Method, 18, ""}, + {"(*TypeParam).String", Method, 18, ""}, + {"(*TypeParam).Underlying", Method, 18, ""}, + {"(*TypeParamList).At", Method, 18, ""}, + {"(*TypeParamList).Len", Method, 18, ""}, + {"(*TypeParamList).TypeParams", Method, 24, ""}, + {"(*Union).Len", Method, 18, ""}, + {"(*Union).String", Method, 18, ""}, + {"(*Union).Term", Method, 18, ""}, + {"(*Union).Terms", Method, 24, ""}, + {"(*Union).Underlying", Method, 18, ""}, + {"(*Var).Anonymous", Method, 5, ""}, + {"(*Var).Embedded", Method, 11, ""}, + {"(*Var).Exported", Method, 5, ""}, + {"(*Var).Id", Method, 5, ""}, + {"(*Var).IsField", Method, 5, ""}, + {"(*Var).Kind", Method, 25, ""}, + {"(*Var).Name", Method, 5, ""}, + {"(*Var).Origin", Method, 19, ""}, + {"(*Var).Parent", Method, 5, ""}, + {"(*Var).Pkg", Method, 5, ""}, + {"(*Var).Pos", Method, 5, ""}, + {"(*Var).SetKind", Method, 25, ""}, + {"(*Var).String", Method, 5, ""}, + {"(*Var).Type", Method, 5, ""}, + {"(Checker).ObjectOf", Method, 5, ""}, + {"(Checker).PkgNameOf", Method, 22, ""}, + {"(Checker).TypeOf", Method, 5, ""}, + {"(Error).Error", Method, 5, ""}, + {"(TypeAndValue).Addressable", Method, 5, ""}, + {"(TypeAndValue).Assignable", Method, 5, ""}, + {"(TypeAndValue).HasOk", Method, 5, ""}, + {"(TypeAndValue).IsBuiltin", Method, 5, ""}, + {"(TypeAndValue).IsNil", Method, 5, ""}, + {"(TypeAndValue).IsType", Method, 5, ""}, + {"(TypeAndValue).IsValue", Method, 5, ""}, + {"(TypeAndValue).IsVoid", Method, 5, ""}, + {"(VarKind).String", Method, 25, ""}, + {"Alias", Type, 22, ""}, + {"ArgumentError", Type, 18, ""}, + {"ArgumentError.Err", Field, 18, ""}, + {"ArgumentError.Index", Field, 18, ""}, + {"Array", Type, 5, ""}, + {"AssertableTo", Func, 5, "func(V *Interface, T Type) bool"}, + {"AssignableTo", Func, 5, "func(V Type, T Type) bool"}, + {"Basic", Type, 5, ""}, + {"BasicInfo", Type, 5, ""}, + {"BasicKind", Type, 5, ""}, + {"Bool", Const, 5, ""}, + {"Builtin", Type, 5, ""}, + {"Byte", Const, 5, ""}, + {"Chan", Type, 5, ""}, + {"ChanDir", Type, 5, ""}, + {"CheckExpr", Func, 13, "func(fset *token.FileSet, pkg *Package, pos token.Pos, expr ast.Expr, info *Info) (err error)"}, + {"Checker", Type, 5, ""}, + {"Checker.Info", Field, 5, ""}, + {"Comparable", Func, 5, "func(T Type) bool"}, + {"Complex128", Const, 5, ""}, + {"Complex64", Const, 5, ""}, + {"Config", Type, 5, ""}, + {"Config.Context", Field, 18, ""}, + {"Config.DisableUnusedImportCheck", Field, 5, ""}, + {"Config.Error", Field, 5, ""}, + {"Config.FakeImportC", Field, 5, ""}, + {"Config.GoVersion", Field, 18, ""}, + {"Config.IgnoreFuncBodies", Field, 5, ""}, + {"Config.Importer", Field, 5, ""}, + {"Config.Sizes", Field, 5, ""}, + {"Const", Type, 5, ""}, + {"Context", Type, 18, ""}, + {"ConvertibleTo", Func, 5, "func(V Type, T Type) bool"}, + {"DefPredeclaredTestFuncs", Func, 5, "func()"}, + {"Default", Func, 8, "func(t Type) Type"}, + {"Error", Type, 5, ""}, + {"Error.Fset", Field, 5, ""}, + {"Error.Msg", Field, 5, ""}, + {"Error.Pos", Field, 5, ""}, + {"Error.Soft", Field, 5, ""}, + {"Eval", Func, 5, "func(fset *token.FileSet, pkg *Package, pos token.Pos, expr string) (_ TypeAndValue, err error)"}, + {"ExprString", Func, 5, "func(x ast.Expr) string"}, + {"FieldVal", Const, 5, ""}, + {"FieldVar", Const, 25, ""}, + {"Float32", Const, 5, ""}, + {"Float64", Const, 5, ""}, + {"Func", Type, 5, ""}, + {"Id", Func, 5, "func(pkg *Package, name string) string"}, + {"Identical", Func, 5, "func(x Type, y Type) bool"}, + {"IdenticalIgnoreTags", Func, 8, "func(x Type, y Type) bool"}, + {"Implements", Func, 5, "func(V Type, T *Interface) bool"}, + {"ImportMode", Type, 6, ""}, + {"Importer", Type, 5, ""}, + {"ImporterFrom", Type, 6, ""}, + {"Info", Type, 5, ""}, + {"Info.Defs", Field, 5, ""}, + {"Info.FileVersions", Field, 22, ""}, + {"Info.Implicits", Field, 5, ""}, + {"Info.InitOrder", Field, 5, ""}, + {"Info.Instances", Field, 18, ""}, + {"Info.Scopes", Field, 5, ""}, + {"Info.Selections", Field, 5, ""}, + {"Info.Types", Field, 5, ""}, + {"Info.Uses", Field, 5, ""}, + {"Initializer", Type, 5, ""}, + {"Initializer.Lhs", Field, 5, ""}, + {"Initializer.Rhs", Field, 5, ""}, + {"Instance", Type, 18, ""}, + {"Instance.Type", Field, 18, ""}, + {"Instance.TypeArgs", Field, 18, ""}, + {"Instantiate", Func, 18, "func(ctxt *Context, orig Type, targs []Type, validate bool) (Type, error)"}, + {"Int", Const, 5, ""}, + {"Int16", Const, 5, ""}, + {"Int32", Const, 5, ""}, + {"Int64", Const, 5, ""}, + {"Int8", Const, 5, ""}, + {"Interface", Type, 5, ""}, + {"Invalid", Const, 5, ""}, + {"IsBoolean", Const, 5, ""}, + {"IsComplex", Const, 5, ""}, + {"IsConstType", Const, 5, ""}, + {"IsFloat", Const, 5, ""}, + {"IsInteger", Const, 5, ""}, + {"IsInterface", Func, 5, "func(t Type) bool"}, + {"IsNumeric", Const, 5, ""}, + {"IsOrdered", Const, 5, ""}, + {"IsString", Const, 5, ""}, + {"IsUnsigned", Const, 5, ""}, + {"IsUntyped", Const, 5, ""}, + {"Label", Type, 5, ""}, + {"LocalVar", Const, 25, ""}, + {"LookupFieldOrMethod", Func, 5, "func(T Type, addressable bool, pkg *Package, name string) (obj Object, index []int, indirect bool)"}, + {"LookupSelection", Func, 25, ""}, + {"Map", Type, 5, ""}, + {"MethodExpr", Const, 5, ""}, + {"MethodSet", Type, 5, ""}, + {"MethodVal", Const, 5, ""}, + {"MissingMethod", Func, 5, "func(V Type, T *Interface, static bool) (method *Func, wrongType bool)"}, + {"Named", Type, 5, ""}, + {"NewAlias", Func, 22, "func(obj *TypeName, rhs Type) *Alias"}, + {"NewArray", Func, 5, "func(elem Type, len int64) *Array"}, + {"NewChan", Func, 5, "func(dir ChanDir, elem Type) *Chan"}, + {"NewChecker", Func, 5, "func(conf *Config, fset *token.FileSet, pkg *Package, info *Info) *Checker"}, + {"NewConst", Func, 5, "func(pos token.Pos, pkg *Package, name string, typ Type, val constant.Value) *Const"}, + {"NewContext", Func, 18, "func() *Context"}, + {"NewField", Func, 5, "func(pos token.Pos, pkg *Package, name string, typ Type, embedded bool) *Var"}, + {"NewFunc", Func, 5, "func(pos token.Pos, pkg *Package, name string, sig *Signature) *Func"}, + {"NewInterface", Func, 5, "func(methods []*Func, embeddeds []*Named) *Interface"}, + {"NewInterfaceType", Func, 11, "func(methods []*Func, embeddeds []Type) *Interface"}, + {"NewLabel", Func, 5, "func(pos token.Pos, pkg *Package, name string) *Label"}, + {"NewMap", Func, 5, "func(key Type, elem Type) *Map"}, + {"NewMethodSet", Func, 5, "func(T Type) *MethodSet"}, + {"NewNamed", Func, 5, "func(obj *TypeName, underlying Type, methods []*Func) *Named"}, + {"NewPackage", Func, 5, "func(path string, name string) *Package"}, + {"NewParam", Func, 5, "func(pos token.Pos, pkg *Package, name string, typ Type) *Var"}, + {"NewPkgName", Func, 5, "func(pos token.Pos, pkg *Package, name string, imported *Package) *PkgName"}, + {"NewPointer", Func, 5, "func(elem Type) *Pointer"}, + {"NewScope", Func, 5, "func(parent *Scope, pos token.Pos, end token.Pos, comment string) *Scope"}, + {"NewSignature", Func, 5, "func(recv *Var, params *Tuple, results *Tuple, variadic bool) *Signature"}, + {"NewSignatureType", Func, 18, "func(recv *Var, recvTypeParams []*TypeParam, typeParams []*TypeParam, params *Tuple, results *Tuple, variadic bool) *Signature"}, + {"NewSlice", Func, 5, "func(elem Type) *Slice"}, + {"NewStruct", Func, 5, "func(fields []*Var, tags []string) *Struct"}, + {"NewTerm", Func, 18, "func(tilde bool, typ Type) *Term"}, + {"NewTuple", Func, 5, "func(x ...*Var) *Tuple"}, + {"NewTypeName", Func, 5, "func(pos token.Pos, pkg *Package, name string, typ Type) *TypeName"}, + {"NewTypeParam", Func, 18, "func(obj *TypeName, constraint Type) *TypeParam"}, + {"NewUnion", Func, 18, "func(terms []*Term) *Union"}, + {"NewVar", Func, 5, "func(pos token.Pos, pkg *Package, name string, typ Type) *Var"}, + {"Nil", Type, 5, ""}, + {"Object", Type, 5, ""}, + {"ObjectString", Func, 5, "func(obj Object, qf Qualifier) string"}, + {"Package", Type, 5, ""}, + {"PackageVar", Const, 25, ""}, + {"ParamVar", Const, 25, ""}, + {"PkgName", Type, 5, ""}, + {"Pointer", Type, 5, ""}, + {"Qualifier", Type, 5, ""}, + {"RecvOnly", Const, 5, ""}, + {"RecvVar", Const, 25, ""}, + {"RelativeTo", Func, 5, "func(pkg *Package) Qualifier"}, + {"ResultVar", Const, 25, ""}, + {"Rune", Const, 5, ""}, + {"Satisfies", Func, 20, "func(V Type, T *Interface) bool"}, + {"Scope", Type, 5, ""}, + {"Selection", Type, 5, ""}, + {"SelectionKind", Type, 5, ""}, + {"SelectionString", Func, 5, "func(s *Selection, qf Qualifier) string"}, + {"SendOnly", Const, 5, ""}, + {"SendRecv", Const, 5, ""}, + {"Signature", Type, 5, ""}, + {"Sizes", Type, 5, ""}, + {"SizesFor", Func, 9, "func(compiler string, arch string) Sizes"}, + {"Slice", Type, 5, ""}, + {"StdSizes", Type, 5, ""}, + {"StdSizes.MaxAlign", Field, 5, ""}, + {"StdSizes.WordSize", Field, 5, ""}, + {"String", Const, 5, ""}, + {"Struct", Type, 5, ""}, + {"Term", Type, 18, ""}, + {"Tuple", Type, 5, ""}, + {"Typ", Var, 5, ""}, + {"Type", Type, 5, ""}, + {"TypeAndValue", Type, 5, ""}, + {"TypeAndValue.Type", Field, 5, ""}, + {"TypeAndValue.Value", Field, 5, ""}, + {"TypeList", Type, 18, ""}, + {"TypeName", Type, 5, ""}, + {"TypeParam", Type, 18, ""}, + {"TypeParamList", Type, 18, ""}, + {"TypeString", Func, 5, "func(typ Type, qf Qualifier) string"}, + {"Uint", Const, 5, ""}, + {"Uint16", Const, 5, ""}, + {"Uint32", Const, 5, ""}, + {"Uint64", Const, 5, ""}, + {"Uint8", Const, 5, ""}, + {"Uintptr", Const, 5, ""}, + {"Unalias", Func, 22, "func(t Type) Type"}, + {"Union", Type, 18, ""}, + {"Universe", Var, 5, ""}, + {"Unsafe", Var, 5, ""}, + {"UnsafePointer", Const, 5, ""}, + {"UntypedBool", Const, 5, ""}, + {"UntypedComplex", Const, 5, ""}, + {"UntypedFloat", Const, 5, ""}, + {"UntypedInt", Const, 5, ""}, + {"UntypedNil", Const, 5, ""}, + {"UntypedRune", Const, 5, ""}, + {"UntypedString", Const, 5, ""}, + {"Var", Type, 5, ""}, + {"VarKind", Type, 25, ""}, + {"WriteExpr", Func, 5, "func(buf *bytes.Buffer, x ast.Expr)"}, + {"WriteSignature", Func, 5, "func(buf *bytes.Buffer, sig *Signature, qf Qualifier)"}, + {"WriteType", Func, 5, "func(buf *bytes.Buffer, typ Type, qf Qualifier)"}, }, "go/version": { - {"Compare", Func, 22}, - {"IsValid", Func, 22}, - {"Lang", Func, 22}, + {"Compare", Func, 22, "func(x string, y string) int"}, + {"IsValid", Func, 22, "func(x string) bool"}, + {"Lang", Func, 22, "func(x string) string"}, }, "hash": { - {"Hash", Type, 0}, - {"Hash32", Type, 0}, - {"Hash64", Type, 0}, + {"Hash", Type, 0, ""}, + {"Hash32", Type, 0, ""}, + {"Hash64", Type, 0, ""}, }, "hash/adler32": { - {"Checksum", Func, 0}, - {"New", Func, 0}, - {"Size", Const, 0}, + {"Checksum", Func, 0, "func(data []byte) uint32"}, + {"New", Func, 0, "func() hash.Hash32"}, + {"Size", Const, 0, ""}, }, "hash/crc32": { - {"Castagnoli", Const, 0}, - {"Checksum", Func, 0}, - {"ChecksumIEEE", Func, 0}, - {"IEEE", Const, 0}, - {"IEEETable", Var, 0}, - {"Koopman", Const, 0}, - {"MakeTable", Func, 0}, - {"New", Func, 0}, - {"NewIEEE", Func, 0}, - {"Size", Const, 0}, - {"Table", Type, 0}, - {"Update", Func, 0}, + {"Castagnoli", Const, 0, ""}, + {"Checksum", Func, 0, "func(data []byte, tab *Table) uint32"}, + {"ChecksumIEEE", Func, 0, "func(data []byte) uint32"}, + {"IEEE", Const, 0, ""}, + {"IEEETable", Var, 0, ""}, + {"Koopman", Const, 0, ""}, + {"MakeTable", Func, 0, "func(poly uint32) *Table"}, + {"New", Func, 0, "func(tab *Table) hash.Hash32"}, + {"NewIEEE", Func, 0, "func() hash.Hash32"}, + {"Size", Const, 0, ""}, + {"Table", Type, 0, ""}, + {"Update", Func, 0, "func(crc uint32, tab *Table, p []byte) uint32"}, }, "hash/crc64": { - {"Checksum", Func, 0}, - {"ECMA", Const, 0}, - {"ISO", Const, 0}, - {"MakeTable", Func, 0}, - {"New", Func, 0}, - {"Size", Const, 0}, - {"Table", Type, 0}, - {"Update", Func, 0}, + {"Checksum", Func, 0, "func(data []byte, tab *Table) uint64"}, + {"ECMA", Const, 0, ""}, + {"ISO", Const, 0, ""}, + {"MakeTable", Func, 0, "func(poly uint64) *Table"}, + {"New", Func, 0, "func(tab *Table) hash.Hash64"}, + {"Size", Const, 0, ""}, + {"Table", Type, 0, ""}, + {"Update", Func, 0, "func(crc uint64, tab *Table, p []byte) uint64"}, }, "hash/fnv": { - {"New128", Func, 9}, - {"New128a", Func, 9}, - {"New32", Func, 0}, - {"New32a", Func, 0}, - {"New64", Func, 0}, - {"New64a", Func, 0}, + {"New128", Func, 9, "func() hash.Hash"}, + {"New128a", Func, 9, "func() hash.Hash"}, + {"New32", Func, 0, "func() hash.Hash32"}, + {"New32a", Func, 0, "func() hash.Hash32"}, + {"New64", Func, 0, "func() hash.Hash64"}, + {"New64a", Func, 0, "func() hash.Hash64"}, }, "hash/maphash": { - {"(*Hash).BlockSize", Method, 14}, - {"(*Hash).Reset", Method, 14}, - {"(*Hash).Seed", Method, 14}, - {"(*Hash).SetSeed", Method, 14}, - {"(*Hash).Size", Method, 14}, - {"(*Hash).Sum", Method, 14}, - {"(*Hash).Sum64", Method, 14}, - {"(*Hash).Write", Method, 14}, - {"(*Hash).WriteByte", Method, 14}, - {"(*Hash).WriteString", Method, 14}, - {"Bytes", Func, 19}, - {"Comparable", Func, 24}, - {"Hash", Type, 14}, - {"MakeSeed", Func, 14}, - {"Seed", Type, 14}, - {"String", Func, 19}, - {"WriteComparable", Func, 24}, + {"(*Hash).BlockSize", Method, 14, ""}, + {"(*Hash).Reset", Method, 14, ""}, + {"(*Hash).Seed", Method, 14, ""}, + {"(*Hash).SetSeed", Method, 14, ""}, + {"(*Hash).Size", Method, 14, ""}, + {"(*Hash).Sum", Method, 14, ""}, + {"(*Hash).Sum64", Method, 14, ""}, + {"(*Hash).Write", Method, 14, ""}, + {"(*Hash).WriteByte", Method, 14, ""}, + {"(*Hash).WriteString", Method, 14, ""}, + {"Bytes", Func, 19, "func(seed Seed, b []byte) uint64"}, + {"Comparable", Func, 24, "func[T comparable](seed Seed, v T) uint64"}, + {"Hash", Type, 14, ""}, + {"MakeSeed", Func, 14, "func() Seed"}, + {"Seed", Type, 14, ""}, + {"String", Func, 19, "func(seed Seed, s string) uint64"}, + {"WriteComparable", Func, 24, "func[T comparable](h *Hash, x T)"}, }, "html": { - {"EscapeString", Func, 0}, - {"UnescapeString", Func, 0}, + {"EscapeString", Func, 0, "func(s string) string"}, + {"UnescapeString", Func, 0, "func(s string) string"}, }, "html/template": { - {"(*Error).Error", Method, 0}, - {"(*Template).AddParseTree", Method, 0}, - {"(*Template).Clone", Method, 0}, - {"(*Template).DefinedTemplates", Method, 6}, - {"(*Template).Delims", Method, 0}, - {"(*Template).Execute", Method, 0}, - {"(*Template).ExecuteTemplate", Method, 0}, - {"(*Template).Funcs", Method, 0}, - {"(*Template).Lookup", Method, 0}, - {"(*Template).Name", Method, 0}, - {"(*Template).New", Method, 0}, - {"(*Template).Option", Method, 5}, - {"(*Template).Parse", Method, 0}, - {"(*Template).ParseFS", Method, 16}, - {"(*Template).ParseFiles", Method, 0}, - {"(*Template).ParseGlob", Method, 0}, - {"(*Template).Templates", Method, 0}, - {"CSS", Type, 0}, - {"ErrAmbigContext", Const, 0}, - {"ErrBadHTML", Const, 0}, - {"ErrBranchEnd", Const, 0}, - {"ErrEndContext", Const, 0}, - {"ErrJSTemplate", Const, 21}, - {"ErrNoSuchTemplate", Const, 0}, - {"ErrOutputContext", Const, 0}, - {"ErrPartialCharset", Const, 0}, - {"ErrPartialEscape", Const, 0}, - {"ErrPredefinedEscaper", Const, 9}, - {"ErrRangeLoopReentry", Const, 0}, - {"ErrSlashAmbig", Const, 0}, - {"Error", Type, 0}, - {"Error.Description", Field, 0}, - {"Error.ErrorCode", Field, 0}, - {"Error.Line", Field, 0}, - {"Error.Name", Field, 0}, - {"Error.Node", Field, 4}, - {"ErrorCode", Type, 0}, - {"FuncMap", Type, 0}, - {"HTML", Type, 0}, - {"HTMLAttr", Type, 0}, - {"HTMLEscape", Func, 0}, - {"HTMLEscapeString", Func, 0}, - {"HTMLEscaper", Func, 0}, - {"IsTrue", Func, 6}, - {"JS", Type, 0}, - {"JSEscape", Func, 0}, - {"JSEscapeString", Func, 0}, - {"JSEscaper", Func, 0}, - {"JSStr", Type, 0}, - {"Must", Func, 0}, - {"New", Func, 0}, - {"OK", Const, 0}, - {"ParseFS", Func, 16}, - {"ParseFiles", Func, 0}, - {"ParseGlob", Func, 0}, - {"Srcset", Type, 10}, - {"Template", Type, 0}, - {"Template.Tree", Field, 2}, - {"URL", Type, 0}, - {"URLQueryEscaper", Func, 0}, + {"(*Error).Error", Method, 0, ""}, + {"(*Template).AddParseTree", Method, 0, ""}, + {"(*Template).Clone", Method, 0, ""}, + {"(*Template).DefinedTemplates", Method, 6, ""}, + {"(*Template).Delims", Method, 0, ""}, + {"(*Template).Execute", Method, 0, ""}, + {"(*Template).ExecuteTemplate", Method, 0, ""}, + {"(*Template).Funcs", Method, 0, ""}, + {"(*Template).Lookup", Method, 0, ""}, + {"(*Template).Name", Method, 0, ""}, + {"(*Template).New", Method, 0, ""}, + {"(*Template).Option", Method, 5, ""}, + {"(*Template).Parse", Method, 0, ""}, + {"(*Template).ParseFS", Method, 16, ""}, + {"(*Template).ParseFiles", Method, 0, ""}, + {"(*Template).ParseGlob", Method, 0, ""}, + {"(*Template).Templates", Method, 0, ""}, + {"CSS", Type, 0, ""}, + {"ErrAmbigContext", Const, 0, ""}, + {"ErrBadHTML", Const, 0, ""}, + {"ErrBranchEnd", Const, 0, ""}, + {"ErrEndContext", Const, 0, ""}, + {"ErrJSTemplate", Const, 21, ""}, + {"ErrNoSuchTemplate", Const, 0, ""}, + {"ErrOutputContext", Const, 0, ""}, + {"ErrPartialCharset", Const, 0, ""}, + {"ErrPartialEscape", Const, 0, ""}, + {"ErrPredefinedEscaper", Const, 9, ""}, + {"ErrRangeLoopReentry", Const, 0, ""}, + {"ErrSlashAmbig", Const, 0, ""}, + {"Error", Type, 0, ""}, + {"Error.Description", Field, 0, ""}, + {"Error.ErrorCode", Field, 0, ""}, + {"Error.Line", Field, 0, ""}, + {"Error.Name", Field, 0, ""}, + {"Error.Node", Field, 4, ""}, + {"ErrorCode", Type, 0, ""}, + {"FuncMap", Type, 0, ""}, + {"HTML", Type, 0, ""}, + {"HTMLAttr", Type, 0, ""}, + {"HTMLEscape", Func, 0, "func(w io.Writer, b []byte)"}, + {"HTMLEscapeString", Func, 0, "func(s string) string"}, + {"HTMLEscaper", Func, 0, "func(args ...any) string"}, + {"IsTrue", Func, 6, "func(val any) (truth bool, ok bool)"}, + {"JS", Type, 0, ""}, + {"JSEscape", Func, 0, "func(w io.Writer, b []byte)"}, + {"JSEscapeString", Func, 0, "func(s string) string"}, + {"JSEscaper", Func, 0, "func(args ...any) string"}, + {"JSStr", Type, 0, ""}, + {"Must", Func, 0, "func(t *Template, err error) *Template"}, + {"New", Func, 0, "func(name string) *Template"}, + {"OK", Const, 0, ""}, + {"ParseFS", Func, 16, "func(fs fs.FS, patterns ...string) (*Template, error)"}, + {"ParseFiles", Func, 0, "func(filenames ...string) (*Template, error)"}, + {"ParseGlob", Func, 0, "func(pattern string) (*Template, error)"}, + {"Srcset", Type, 10, ""}, + {"Template", Type, 0, ""}, + {"Template.Tree", Field, 2, ""}, + {"URL", Type, 0, ""}, + {"URLQueryEscaper", Func, 0, "func(args ...any) string"}, }, "image": { - {"(*Alpha).AlphaAt", Method, 4}, - {"(*Alpha).At", Method, 0}, - {"(*Alpha).Bounds", Method, 0}, - {"(*Alpha).ColorModel", Method, 0}, - {"(*Alpha).Opaque", Method, 0}, - {"(*Alpha).PixOffset", Method, 0}, - {"(*Alpha).RGBA64At", Method, 17}, - {"(*Alpha).Set", Method, 0}, - {"(*Alpha).SetAlpha", Method, 0}, - {"(*Alpha).SetRGBA64", Method, 17}, - {"(*Alpha).SubImage", Method, 0}, - {"(*Alpha16).Alpha16At", Method, 4}, - {"(*Alpha16).At", Method, 0}, - {"(*Alpha16).Bounds", Method, 0}, - {"(*Alpha16).ColorModel", Method, 0}, - {"(*Alpha16).Opaque", Method, 0}, - {"(*Alpha16).PixOffset", Method, 0}, - {"(*Alpha16).RGBA64At", Method, 17}, - {"(*Alpha16).Set", Method, 0}, - {"(*Alpha16).SetAlpha16", Method, 0}, - {"(*Alpha16).SetRGBA64", Method, 17}, - {"(*Alpha16).SubImage", Method, 0}, - {"(*CMYK).At", Method, 5}, - {"(*CMYK).Bounds", Method, 5}, - {"(*CMYK).CMYKAt", Method, 5}, - {"(*CMYK).ColorModel", Method, 5}, - {"(*CMYK).Opaque", Method, 5}, - {"(*CMYK).PixOffset", Method, 5}, - {"(*CMYK).RGBA64At", Method, 17}, - {"(*CMYK).Set", Method, 5}, - {"(*CMYK).SetCMYK", Method, 5}, - {"(*CMYK).SetRGBA64", Method, 17}, - {"(*CMYK).SubImage", Method, 5}, - {"(*Gray).At", Method, 0}, - {"(*Gray).Bounds", Method, 0}, - {"(*Gray).ColorModel", Method, 0}, - {"(*Gray).GrayAt", Method, 4}, - {"(*Gray).Opaque", Method, 0}, - {"(*Gray).PixOffset", Method, 0}, - {"(*Gray).RGBA64At", Method, 17}, - {"(*Gray).Set", Method, 0}, - {"(*Gray).SetGray", Method, 0}, - {"(*Gray).SetRGBA64", Method, 17}, - {"(*Gray).SubImage", Method, 0}, - {"(*Gray16).At", Method, 0}, - {"(*Gray16).Bounds", Method, 0}, - {"(*Gray16).ColorModel", Method, 0}, - {"(*Gray16).Gray16At", Method, 4}, - {"(*Gray16).Opaque", Method, 0}, - {"(*Gray16).PixOffset", Method, 0}, - {"(*Gray16).RGBA64At", Method, 17}, - {"(*Gray16).Set", Method, 0}, - {"(*Gray16).SetGray16", Method, 0}, - {"(*Gray16).SetRGBA64", Method, 17}, - {"(*Gray16).SubImage", Method, 0}, - {"(*NRGBA).At", Method, 0}, - {"(*NRGBA).Bounds", Method, 0}, - {"(*NRGBA).ColorModel", Method, 0}, - {"(*NRGBA).NRGBAAt", Method, 4}, - {"(*NRGBA).Opaque", Method, 0}, - {"(*NRGBA).PixOffset", Method, 0}, - {"(*NRGBA).RGBA64At", Method, 17}, - {"(*NRGBA).Set", Method, 0}, - {"(*NRGBA).SetNRGBA", Method, 0}, - {"(*NRGBA).SetRGBA64", Method, 17}, - {"(*NRGBA).SubImage", Method, 0}, - {"(*NRGBA64).At", Method, 0}, - {"(*NRGBA64).Bounds", Method, 0}, - {"(*NRGBA64).ColorModel", Method, 0}, - {"(*NRGBA64).NRGBA64At", Method, 4}, - {"(*NRGBA64).Opaque", Method, 0}, - {"(*NRGBA64).PixOffset", Method, 0}, - {"(*NRGBA64).RGBA64At", Method, 17}, - {"(*NRGBA64).Set", Method, 0}, - {"(*NRGBA64).SetNRGBA64", Method, 0}, - {"(*NRGBA64).SetRGBA64", Method, 17}, - {"(*NRGBA64).SubImage", Method, 0}, - {"(*NYCbCrA).AOffset", Method, 6}, - {"(*NYCbCrA).At", Method, 6}, - {"(*NYCbCrA).Bounds", Method, 6}, - {"(*NYCbCrA).COffset", Method, 6}, - {"(*NYCbCrA).ColorModel", Method, 6}, - {"(*NYCbCrA).NYCbCrAAt", Method, 6}, - {"(*NYCbCrA).Opaque", Method, 6}, - {"(*NYCbCrA).RGBA64At", Method, 17}, - {"(*NYCbCrA).SubImage", Method, 6}, - {"(*NYCbCrA).YCbCrAt", Method, 6}, - {"(*NYCbCrA).YOffset", Method, 6}, - {"(*Paletted).At", Method, 0}, - {"(*Paletted).Bounds", Method, 0}, - {"(*Paletted).ColorIndexAt", Method, 0}, - {"(*Paletted).ColorModel", Method, 0}, - {"(*Paletted).Opaque", Method, 0}, - {"(*Paletted).PixOffset", Method, 0}, - {"(*Paletted).RGBA64At", Method, 17}, - {"(*Paletted).Set", Method, 0}, - {"(*Paletted).SetColorIndex", Method, 0}, - {"(*Paletted).SetRGBA64", Method, 17}, - {"(*Paletted).SubImage", Method, 0}, - {"(*RGBA).At", Method, 0}, - {"(*RGBA).Bounds", Method, 0}, - {"(*RGBA).ColorModel", Method, 0}, - {"(*RGBA).Opaque", Method, 0}, - {"(*RGBA).PixOffset", Method, 0}, - {"(*RGBA).RGBA64At", Method, 17}, - {"(*RGBA).RGBAAt", Method, 4}, - {"(*RGBA).Set", Method, 0}, - {"(*RGBA).SetRGBA", Method, 0}, - {"(*RGBA).SetRGBA64", Method, 17}, - {"(*RGBA).SubImage", Method, 0}, - {"(*RGBA64).At", Method, 0}, - {"(*RGBA64).Bounds", Method, 0}, - {"(*RGBA64).ColorModel", Method, 0}, - {"(*RGBA64).Opaque", Method, 0}, - {"(*RGBA64).PixOffset", Method, 0}, - {"(*RGBA64).RGBA64At", Method, 4}, - {"(*RGBA64).Set", Method, 0}, - {"(*RGBA64).SetRGBA64", Method, 0}, - {"(*RGBA64).SubImage", Method, 0}, - {"(*Uniform).At", Method, 0}, - {"(*Uniform).Bounds", Method, 0}, - {"(*Uniform).ColorModel", Method, 0}, - {"(*Uniform).Convert", Method, 0}, - {"(*Uniform).Opaque", Method, 0}, - {"(*Uniform).RGBA", Method, 0}, - {"(*Uniform).RGBA64At", Method, 17}, - {"(*YCbCr).At", Method, 0}, - {"(*YCbCr).Bounds", Method, 0}, - {"(*YCbCr).COffset", Method, 0}, - {"(*YCbCr).ColorModel", Method, 0}, - {"(*YCbCr).Opaque", Method, 0}, - {"(*YCbCr).RGBA64At", Method, 17}, - {"(*YCbCr).SubImage", Method, 0}, - {"(*YCbCr).YCbCrAt", Method, 4}, - {"(*YCbCr).YOffset", Method, 0}, - {"(Point).Add", Method, 0}, - {"(Point).Div", Method, 0}, - {"(Point).Eq", Method, 0}, - {"(Point).In", Method, 0}, - {"(Point).Mod", Method, 0}, - {"(Point).Mul", Method, 0}, - {"(Point).String", Method, 0}, - {"(Point).Sub", Method, 0}, - {"(Rectangle).Add", Method, 0}, - {"(Rectangle).At", Method, 5}, - {"(Rectangle).Bounds", Method, 5}, - {"(Rectangle).Canon", Method, 0}, - {"(Rectangle).ColorModel", Method, 5}, - {"(Rectangle).Dx", Method, 0}, - {"(Rectangle).Dy", Method, 0}, - {"(Rectangle).Empty", Method, 0}, - {"(Rectangle).Eq", Method, 0}, - {"(Rectangle).In", Method, 0}, - {"(Rectangle).Inset", Method, 0}, - {"(Rectangle).Intersect", Method, 0}, - {"(Rectangle).Overlaps", Method, 0}, - {"(Rectangle).RGBA64At", Method, 17}, - {"(Rectangle).Size", Method, 0}, - {"(Rectangle).String", Method, 0}, - {"(Rectangle).Sub", Method, 0}, - {"(Rectangle).Union", Method, 0}, - {"(YCbCrSubsampleRatio).String", Method, 0}, - {"Alpha", Type, 0}, - {"Alpha.Pix", Field, 0}, - {"Alpha.Rect", Field, 0}, - {"Alpha.Stride", Field, 0}, - {"Alpha16", Type, 0}, - {"Alpha16.Pix", Field, 0}, - {"Alpha16.Rect", Field, 0}, - {"Alpha16.Stride", Field, 0}, - {"Black", Var, 0}, - {"CMYK", Type, 5}, - {"CMYK.Pix", Field, 5}, - {"CMYK.Rect", Field, 5}, - {"CMYK.Stride", Field, 5}, - {"Config", Type, 0}, - {"Config.ColorModel", Field, 0}, - {"Config.Height", Field, 0}, - {"Config.Width", Field, 0}, - {"Decode", Func, 0}, - {"DecodeConfig", Func, 0}, - {"ErrFormat", Var, 0}, - {"Gray", Type, 0}, - {"Gray.Pix", Field, 0}, - {"Gray.Rect", Field, 0}, - {"Gray.Stride", Field, 0}, - {"Gray16", Type, 0}, - {"Gray16.Pix", Field, 0}, - {"Gray16.Rect", Field, 0}, - {"Gray16.Stride", Field, 0}, - {"Image", Type, 0}, - {"NRGBA", Type, 0}, - {"NRGBA.Pix", Field, 0}, - {"NRGBA.Rect", Field, 0}, - {"NRGBA.Stride", Field, 0}, - {"NRGBA64", Type, 0}, - {"NRGBA64.Pix", Field, 0}, - {"NRGBA64.Rect", Field, 0}, - {"NRGBA64.Stride", Field, 0}, - {"NYCbCrA", Type, 6}, - {"NYCbCrA.A", Field, 6}, - {"NYCbCrA.AStride", Field, 6}, - {"NYCbCrA.YCbCr", Field, 6}, - {"NewAlpha", Func, 0}, - {"NewAlpha16", Func, 0}, - {"NewCMYK", Func, 5}, - {"NewGray", Func, 0}, - {"NewGray16", Func, 0}, - {"NewNRGBA", Func, 0}, - {"NewNRGBA64", Func, 0}, - {"NewNYCbCrA", Func, 6}, - {"NewPaletted", Func, 0}, - {"NewRGBA", Func, 0}, - {"NewRGBA64", Func, 0}, - {"NewUniform", Func, 0}, - {"NewYCbCr", Func, 0}, - {"Opaque", Var, 0}, - {"Paletted", Type, 0}, - {"Paletted.Palette", Field, 0}, - {"Paletted.Pix", Field, 0}, - {"Paletted.Rect", Field, 0}, - {"Paletted.Stride", Field, 0}, - {"PalettedImage", Type, 0}, - {"Point", Type, 0}, - {"Point.X", Field, 0}, - {"Point.Y", Field, 0}, - {"Pt", Func, 0}, - {"RGBA", Type, 0}, - {"RGBA.Pix", Field, 0}, - {"RGBA.Rect", Field, 0}, - {"RGBA.Stride", Field, 0}, - {"RGBA64", Type, 0}, - {"RGBA64.Pix", Field, 0}, - {"RGBA64.Rect", Field, 0}, - {"RGBA64.Stride", Field, 0}, - {"RGBA64Image", Type, 17}, - {"Rect", Func, 0}, - {"Rectangle", Type, 0}, - {"Rectangle.Max", Field, 0}, - {"Rectangle.Min", Field, 0}, - {"RegisterFormat", Func, 0}, - {"Transparent", Var, 0}, - {"Uniform", Type, 0}, - {"Uniform.C", Field, 0}, - {"White", Var, 0}, - {"YCbCr", Type, 0}, - {"YCbCr.CStride", Field, 0}, - {"YCbCr.Cb", Field, 0}, - {"YCbCr.Cr", Field, 0}, - {"YCbCr.Rect", Field, 0}, - {"YCbCr.SubsampleRatio", Field, 0}, - {"YCbCr.Y", Field, 0}, - {"YCbCr.YStride", Field, 0}, - {"YCbCrSubsampleRatio", Type, 0}, - {"YCbCrSubsampleRatio410", Const, 5}, - {"YCbCrSubsampleRatio411", Const, 5}, - {"YCbCrSubsampleRatio420", Const, 0}, - {"YCbCrSubsampleRatio422", Const, 0}, - {"YCbCrSubsampleRatio440", Const, 1}, - {"YCbCrSubsampleRatio444", Const, 0}, - {"ZP", Var, 0}, - {"ZR", Var, 0}, + {"(*Alpha).AlphaAt", Method, 4, ""}, + {"(*Alpha).At", Method, 0, ""}, + {"(*Alpha).Bounds", Method, 0, ""}, + {"(*Alpha).ColorModel", Method, 0, ""}, + {"(*Alpha).Opaque", Method, 0, ""}, + {"(*Alpha).PixOffset", Method, 0, ""}, + {"(*Alpha).RGBA64At", Method, 17, ""}, + {"(*Alpha).Set", Method, 0, ""}, + {"(*Alpha).SetAlpha", Method, 0, ""}, + {"(*Alpha).SetRGBA64", Method, 17, ""}, + {"(*Alpha).SubImage", Method, 0, ""}, + {"(*Alpha16).Alpha16At", Method, 4, ""}, + {"(*Alpha16).At", Method, 0, ""}, + {"(*Alpha16).Bounds", Method, 0, ""}, + {"(*Alpha16).ColorModel", Method, 0, ""}, + {"(*Alpha16).Opaque", Method, 0, ""}, + {"(*Alpha16).PixOffset", Method, 0, ""}, + {"(*Alpha16).RGBA64At", Method, 17, ""}, + {"(*Alpha16).Set", Method, 0, ""}, + {"(*Alpha16).SetAlpha16", Method, 0, ""}, + {"(*Alpha16).SetRGBA64", Method, 17, ""}, + {"(*Alpha16).SubImage", Method, 0, ""}, + {"(*CMYK).At", Method, 5, ""}, + {"(*CMYK).Bounds", Method, 5, ""}, + {"(*CMYK).CMYKAt", Method, 5, ""}, + {"(*CMYK).ColorModel", Method, 5, ""}, + {"(*CMYK).Opaque", Method, 5, ""}, + {"(*CMYK).PixOffset", Method, 5, ""}, + {"(*CMYK).RGBA64At", Method, 17, ""}, + {"(*CMYK).Set", Method, 5, ""}, + {"(*CMYK).SetCMYK", Method, 5, ""}, + {"(*CMYK).SetRGBA64", Method, 17, ""}, + {"(*CMYK).SubImage", Method, 5, ""}, + {"(*Gray).At", Method, 0, ""}, + {"(*Gray).Bounds", Method, 0, ""}, + {"(*Gray).ColorModel", Method, 0, ""}, + {"(*Gray).GrayAt", Method, 4, ""}, + {"(*Gray).Opaque", Method, 0, ""}, + {"(*Gray).PixOffset", Method, 0, ""}, + {"(*Gray).RGBA64At", Method, 17, ""}, + {"(*Gray).Set", Method, 0, ""}, + {"(*Gray).SetGray", Method, 0, ""}, + {"(*Gray).SetRGBA64", Method, 17, ""}, + {"(*Gray).SubImage", Method, 0, ""}, + {"(*Gray16).At", Method, 0, ""}, + {"(*Gray16).Bounds", Method, 0, ""}, + {"(*Gray16).ColorModel", Method, 0, ""}, + {"(*Gray16).Gray16At", Method, 4, ""}, + {"(*Gray16).Opaque", Method, 0, ""}, + {"(*Gray16).PixOffset", Method, 0, ""}, + {"(*Gray16).RGBA64At", Method, 17, ""}, + {"(*Gray16).Set", Method, 0, ""}, + {"(*Gray16).SetGray16", Method, 0, ""}, + {"(*Gray16).SetRGBA64", Method, 17, ""}, + {"(*Gray16).SubImage", Method, 0, ""}, + {"(*NRGBA).At", Method, 0, ""}, + {"(*NRGBA).Bounds", Method, 0, ""}, + {"(*NRGBA).ColorModel", Method, 0, ""}, + {"(*NRGBA).NRGBAAt", Method, 4, ""}, + {"(*NRGBA).Opaque", Method, 0, ""}, + {"(*NRGBA).PixOffset", Method, 0, ""}, + {"(*NRGBA).RGBA64At", Method, 17, ""}, + {"(*NRGBA).Set", Method, 0, ""}, + {"(*NRGBA).SetNRGBA", Method, 0, ""}, + {"(*NRGBA).SetRGBA64", Method, 17, ""}, + {"(*NRGBA).SubImage", Method, 0, ""}, + {"(*NRGBA64).At", Method, 0, ""}, + {"(*NRGBA64).Bounds", Method, 0, ""}, + {"(*NRGBA64).ColorModel", Method, 0, ""}, + {"(*NRGBA64).NRGBA64At", Method, 4, ""}, + {"(*NRGBA64).Opaque", Method, 0, ""}, + {"(*NRGBA64).PixOffset", Method, 0, ""}, + {"(*NRGBA64).RGBA64At", Method, 17, ""}, + {"(*NRGBA64).Set", Method, 0, ""}, + {"(*NRGBA64).SetNRGBA64", Method, 0, ""}, + {"(*NRGBA64).SetRGBA64", Method, 17, ""}, + {"(*NRGBA64).SubImage", Method, 0, ""}, + {"(*NYCbCrA).AOffset", Method, 6, ""}, + {"(*NYCbCrA).At", Method, 6, ""}, + {"(*NYCbCrA).Bounds", Method, 6, ""}, + {"(*NYCbCrA).COffset", Method, 6, ""}, + {"(*NYCbCrA).ColorModel", Method, 6, ""}, + {"(*NYCbCrA).NYCbCrAAt", Method, 6, ""}, + {"(*NYCbCrA).Opaque", Method, 6, ""}, + {"(*NYCbCrA).RGBA64At", Method, 17, ""}, + {"(*NYCbCrA).SubImage", Method, 6, ""}, + {"(*NYCbCrA).YCbCrAt", Method, 6, ""}, + {"(*NYCbCrA).YOffset", Method, 6, ""}, + {"(*Paletted).At", Method, 0, ""}, + {"(*Paletted).Bounds", Method, 0, ""}, + {"(*Paletted).ColorIndexAt", Method, 0, ""}, + {"(*Paletted).ColorModel", Method, 0, ""}, + {"(*Paletted).Opaque", Method, 0, ""}, + {"(*Paletted).PixOffset", Method, 0, ""}, + {"(*Paletted).RGBA64At", Method, 17, ""}, + {"(*Paletted).Set", Method, 0, ""}, + {"(*Paletted).SetColorIndex", Method, 0, ""}, + {"(*Paletted).SetRGBA64", Method, 17, ""}, + {"(*Paletted).SubImage", Method, 0, ""}, + {"(*RGBA).At", Method, 0, ""}, + {"(*RGBA).Bounds", Method, 0, ""}, + {"(*RGBA).ColorModel", Method, 0, ""}, + {"(*RGBA).Opaque", Method, 0, ""}, + {"(*RGBA).PixOffset", Method, 0, ""}, + {"(*RGBA).RGBA64At", Method, 17, ""}, + {"(*RGBA).RGBAAt", Method, 4, ""}, + {"(*RGBA).Set", Method, 0, ""}, + {"(*RGBA).SetRGBA", Method, 0, ""}, + {"(*RGBA).SetRGBA64", Method, 17, ""}, + {"(*RGBA).SubImage", Method, 0, ""}, + {"(*RGBA64).At", Method, 0, ""}, + {"(*RGBA64).Bounds", Method, 0, ""}, + {"(*RGBA64).ColorModel", Method, 0, ""}, + {"(*RGBA64).Opaque", Method, 0, ""}, + {"(*RGBA64).PixOffset", Method, 0, ""}, + {"(*RGBA64).RGBA64At", Method, 4, ""}, + {"(*RGBA64).Set", Method, 0, ""}, + {"(*RGBA64).SetRGBA64", Method, 0, ""}, + {"(*RGBA64).SubImage", Method, 0, ""}, + {"(*Uniform).At", Method, 0, ""}, + {"(*Uniform).Bounds", Method, 0, ""}, + {"(*Uniform).ColorModel", Method, 0, ""}, + {"(*Uniform).Convert", Method, 0, ""}, + {"(*Uniform).Opaque", Method, 0, ""}, + {"(*Uniform).RGBA", Method, 0, ""}, + {"(*Uniform).RGBA64At", Method, 17, ""}, + {"(*YCbCr).At", Method, 0, ""}, + {"(*YCbCr).Bounds", Method, 0, ""}, + {"(*YCbCr).COffset", Method, 0, ""}, + {"(*YCbCr).ColorModel", Method, 0, ""}, + {"(*YCbCr).Opaque", Method, 0, ""}, + {"(*YCbCr).RGBA64At", Method, 17, ""}, + {"(*YCbCr).SubImage", Method, 0, ""}, + {"(*YCbCr).YCbCrAt", Method, 4, ""}, + {"(*YCbCr).YOffset", Method, 0, ""}, + {"(Point).Add", Method, 0, ""}, + {"(Point).Div", Method, 0, ""}, + {"(Point).Eq", Method, 0, ""}, + {"(Point).In", Method, 0, ""}, + {"(Point).Mod", Method, 0, ""}, + {"(Point).Mul", Method, 0, ""}, + {"(Point).String", Method, 0, ""}, + {"(Point).Sub", Method, 0, ""}, + {"(Rectangle).Add", Method, 0, ""}, + {"(Rectangle).At", Method, 5, ""}, + {"(Rectangle).Bounds", Method, 5, ""}, + {"(Rectangle).Canon", Method, 0, ""}, + {"(Rectangle).ColorModel", Method, 5, ""}, + {"(Rectangle).Dx", Method, 0, ""}, + {"(Rectangle).Dy", Method, 0, ""}, + {"(Rectangle).Empty", Method, 0, ""}, + {"(Rectangle).Eq", Method, 0, ""}, + {"(Rectangle).In", Method, 0, ""}, + {"(Rectangle).Inset", Method, 0, ""}, + {"(Rectangle).Intersect", Method, 0, ""}, + {"(Rectangle).Overlaps", Method, 0, ""}, + {"(Rectangle).RGBA64At", Method, 17, ""}, + {"(Rectangle).Size", Method, 0, ""}, + {"(Rectangle).String", Method, 0, ""}, + {"(Rectangle).Sub", Method, 0, ""}, + {"(Rectangle).Union", Method, 0, ""}, + {"(YCbCrSubsampleRatio).String", Method, 0, ""}, + {"Alpha", Type, 0, ""}, + {"Alpha.Pix", Field, 0, ""}, + {"Alpha.Rect", Field, 0, ""}, + {"Alpha.Stride", Field, 0, ""}, + {"Alpha16", Type, 0, ""}, + {"Alpha16.Pix", Field, 0, ""}, + {"Alpha16.Rect", Field, 0, ""}, + {"Alpha16.Stride", Field, 0, ""}, + {"Black", Var, 0, ""}, + {"CMYK", Type, 5, ""}, + {"CMYK.Pix", Field, 5, ""}, + {"CMYK.Rect", Field, 5, ""}, + {"CMYK.Stride", Field, 5, ""}, + {"Config", Type, 0, ""}, + {"Config.ColorModel", Field, 0, ""}, + {"Config.Height", Field, 0, ""}, + {"Config.Width", Field, 0, ""}, + {"Decode", Func, 0, "func(r io.Reader) (Image, string, error)"}, + {"DecodeConfig", Func, 0, "func(r io.Reader) (Config, string, error)"}, + {"ErrFormat", Var, 0, ""}, + {"Gray", Type, 0, ""}, + {"Gray.Pix", Field, 0, ""}, + {"Gray.Rect", Field, 0, ""}, + {"Gray.Stride", Field, 0, ""}, + {"Gray16", Type, 0, ""}, + {"Gray16.Pix", Field, 0, ""}, + {"Gray16.Rect", Field, 0, ""}, + {"Gray16.Stride", Field, 0, ""}, + {"Image", Type, 0, ""}, + {"NRGBA", Type, 0, ""}, + {"NRGBA.Pix", Field, 0, ""}, + {"NRGBA.Rect", Field, 0, ""}, + {"NRGBA.Stride", Field, 0, ""}, + {"NRGBA64", Type, 0, ""}, + {"NRGBA64.Pix", Field, 0, ""}, + {"NRGBA64.Rect", Field, 0, ""}, + {"NRGBA64.Stride", Field, 0, ""}, + {"NYCbCrA", Type, 6, ""}, + {"NYCbCrA.A", Field, 6, ""}, + {"NYCbCrA.AStride", Field, 6, ""}, + {"NYCbCrA.YCbCr", Field, 6, ""}, + {"NewAlpha", Func, 0, "func(r Rectangle) *Alpha"}, + {"NewAlpha16", Func, 0, "func(r Rectangle) *Alpha16"}, + {"NewCMYK", Func, 5, "func(r Rectangle) *CMYK"}, + {"NewGray", Func, 0, "func(r Rectangle) *Gray"}, + {"NewGray16", Func, 0, "func(r Rectangle) *Gray16"}, + {"NewNRGBA", Func, 0, "func(r Rectangle) *NRGBA"}, + {"NewNRGBA64", Func, 0, "func(r Rectangle) *NRGBA64"}, + {"NewNYCbCrA", Func, 6, "func(r Rectangle, subsampleRatio YCbCrSubsampleRatio) *NYCbCrA"}, + {"NewPaletted", Func, 0, "func(r Rectangle, p color.Palette) *Paletted"}, + {"NewRGBA", Func, 0, "func(r Rectangle) *RGBA"}, + {"NewRGBA64", Func, 0, "func(r Rectangle) *RGBA64"}, + {"NewUniform", Func, 0, "func(c color.Color) *Uniform"}, + {"NewYCbCr", Func, 0, "func(r Rectangle, subsampleRatio YCbCrSubsampleRatio) *YCbCr"}, + {"Opaque", Var, 0, ""}, + {"Paletted", Type, 0, ""}, + {"Paletted.Palette", Field, 0, ""}, + {"Paletted.Pix", Field, 0, ""}, + {"Paletted.Rect", Field, 0, ""}, + {"Paletted.Stride", Field, 0, ""}, + {"PalettedImage", Type, 0, ""}, + {"Point", Type, 0, ""}, + {"Point.X", Field, 0, ""}, + {"Point.Y", Field, 0, ""}, + {"Pt", Func, 0, "func(X int, Y int) Point"}, + {"RGBA", Type, 0, ""}, + {"RGBA.Pix", Field, 0, ""}, + {"RGBA.Rect", Field, 0, ""}, + {"RGBA.Stride", Field, 0, ""}, + {"RGBA64", Type, 0, ""}, + {"RGBA64.Pix", Field, 0, ""}, + {"RGBA64.Rect", Field, 0, ""}, + {"RGBA64.Stride", Field, 0, ""}, + {"RGBA64Image", Type, 17, ""}, + {"Rect", Func, 0, "func(x0 int, y0 int, x1 int, y1 int) Rectangle"}, + {"Rectangle", Type, 0, ""}, + {"Rectangle.Max", Field, 0, ""}, + {"Rectangle.Min", Field, 0, ""}, + {"RegisterFormat", Func, 0, "func(name string, magic string, decode func(io.Reader) (Image, error), decodeConfig func(io.Reader) (Config, error))"}, + {"Transparent", Var, 0, ""}, + {"Uniform", Type, 0, ""}, + {"Uniform.C", Field, 0, ""}, + {"White", Var, 0, ""}, + {"YCbCr", Type, 0, ""}, + {"YCbCr.CStride", Field, 0, ""}, + {"YCbCr.Cb", Field, 0, ""}, + {"YCbCr.Cr", Field, 0, ""}, + {"YCbCr.Rect", Field, 0, ""}, + {"YCbCr.SubsampleRatio", Field, 0, ""}, + {"YCbCr.Y", Field, 0, ""}, + {"YCbCr.YStride", Field, 0, ""}, + {"YCbCrSubsampleRatio", Type, 0, ""}, + {"YCbCrSubsampleRatio410", Const, 5, ""}, + {"YCbCrSubsampleRatio411", Const, 5, ""}, + {"YCbCrSubsampleRatio420", Const, 0, ""}, + {"YCbCrSubsampleRatio422", Const, 0, ""}, + {"YCbCrSubsampleRatio440", Const, 1, ""}, + {"YCbCrSubsampleRatio444", Const, 0, ""}, + {"ZP", Var, 0, ""}, + {"ZR", Var, 0, ""}, }, "image/color": { - {"(Alpha).RGBA", Method, 0}, - {"(Alpha16).RGBA", Method, 0}, - {"(CMYK).RGBA", Method, 5}, - {"(Gray).RGBA", Method, 0}, - {"(Gray16).RGBA", Method, 0}, - {"(NRGBA).RGBA", Method, 0}, - {"(NRGBA64).RGBA", Method, 0}, - {"(NYCbCrA).RGBA", Method, 6}, - {"(Palette).Convert", Method, 0}, - {"(Palette).Index", Method, 0}, - {"(RGBA).RGBA", Method, 0}, - {"(RGBA64).RGBA", Method, 0}, - {"(YCbCr).RGBA", Method, 0}, - {"Alpha", Type, 0}, - {"Alpha.A", Field, 0}, - {"Alpha16", Type, 0}, - {"Alpha16.A", Field, 0}, - {"Alpha16Model", Var, 0}, - {"AlphaModel", Var, 0}, - {"Black", Var, 0}, - {"CMYK", Type, 5}, - {"CMYK.C", Field, 5}, - {"CMYK.K", Field, 5}, - {"CMYK.M", Field, 5}, - {"CMYK.Y", Field, 5}, - {"CMYKModel", Var, 5}, - {"CMYKToRGB", Func, 5}, - {"Color", Type, 0}, - {"Gray", Type, 0}, - {"Gray.Y", Field, 0}, - {"Gray16", Type, 0}, - {"Gray16.Y", Field, 0}, - {"Gray16Model", Var, 0}, - {"GrayModel", Var, 0}, - {"Model", Type, 0}, - {"ModelFunc", Func, 0}, - {"NRGBA", Type, 0}, - {"NRGBA.A", Field, 0}, - {"NRGBA.B", Field, 0}, - {"NRGBA.G", Field, 0}, - {"NRGBA.R", Field, 0}, - {"NRGBA64", Type, 0}, - {"NRGBA64.A", Field, 0}, - {"NRGBA64.B", Field, 0}, - {"NRGBA64.G", Field, 0}, - {"NRGBA64.R", Field, 0}, - {"NRGBA64Model", Var, 0}, - {"NRGBAModel", Var, 0}, - {"NYCbCrA", Type, 6}, - {"NYCbCrA.A", Field, 6}, - {"NYCbCrA.YCbCr", Field, 6}, - {"NYCbCrAModel", Var, 6}, - {"Opaque", Var, 0}, - {"Palette", Type, 0}, - {"RGBA", Type, 0}, - {"RGBA.A", Field, 0}, - {"RGBA.B", Field, 0}, - {"RGBA.G", Field, 0}, - {"RGBA.R", Field, 0}, - {"RGBA64", Type, 0}, - {"RGBA64.A", Field, 0}, - {"RGBA64.B", Field, 0}, - {"RGBA64.G", Field, 0}, - {"RGBA64.R", Field, 0}, - {"RGBA64Model", Var, 0}, - {"RGBAModel", Var, 0}, - {"RGBToCMYK", Func, 5}, - {"RGBToYCbCr", Func, 0}, - {"Transparent", Var, 0}, - {"White", Var, 0}, - {"YCbCr", Type, 0}, - {"YCbCr.Cb", Field, 0}, - {"YCbCr.Cr", Field, 0}, - {"YCbCr.Y", Field, 0}, - {"YCbCrModel", Var, 0}, - {"YCbCrToRGB", Func, 0}, + {"(Alpha).RGBA", Method, 0, ""}, + {"(Alpha16).RGBA", Method, 0, ""}, + {"(CMYK).RGBA", Method, 5, ""}, + {"(Gray).RGBA", Method, 0, ""}, + {"(Gray16).RGBA", Method, 0, ""}, + {"(NRGBA).RGBA", Method, 0, ""}, + {"(NRGBA64).RGBA", Method, 0, ""}, + {"(NYCbCrA).RGBA", Method, 6, ""}, + {"(Palette).Convert", Method, 0, ""}, + {"(Palette).Index", Method, 0, ""}, + {"(RGBA).RGBA", Method, 0, ""}, + {"(RGBA64).RGBA", Method, 0, ""}, + {"(YCbCr).RGBA", Method, 0, ""}, + {"Alpha", Type, 0, ""}, + {"Alpha.A", Field, 0, ""}, + {"Alpha16", Type, 0, ""}, + {"Alpha16.A", Field, 0, ""}, + {"Alpha16Model", Var, 0, ""}, + {"AlphaModel", Var, 0, ""}, + {"Black", Var, 0, ""}, + {"CMYK", Type, 5, ""}, + {"CMYK.C", Field, 5, ""}, + {"CMYK.K", Field, 5, ""}, + {"CMYK.M", Field, 5, ""}, + {"CMYK.Y", Field, 5, ""}, + {"CMYKModel", Var, 5, ""}, + {"CMYKToRGB", Func, 5, "func(c uint8, m uint8, y uint8, k uint8) (uint8, uint8, uint8)"}, + {"Color", Type, 0, ""}, + {"Gray", Type, 0, ""}, + {"Gray.Y", Field, 0, ""}, + {"Gray16", Type, 0, ""}, + {"Gray16.Y", Field, 0, ""}, + {"Gray16Model", Var, 0, ""}, + {"GrayModel", Var, 0, ""}, + {"Model", Type, 0, ""}, + {"ModelFunc", Func, 0, "func(f func(Color) Color) Model"}, + {"NRGBA", Type, 0, ""}, + {"NRGBA.A", Field, 0, ""}, + {"NRGBA.B", Field, 0, ""}, + {"NRGBA.G", Field, 0, ""}, + {"NRGBA.R", Field, 0, ""}, + {"NRGBA64", Type, 0, ""}, + {"NRGBA64.A", Field, 0, ""}, + {"NRGBA64.B", Field, 0, ""}, + {"NRGBA64.G", Field, 0, ""}, + {"NRGBA64.R", Field, 0, ""}, + {"NRGBA64Model", Var, 0, ""}, + {"NRGBAModel", Var, 0, ""}, + {"NYCbCrA", Type, 6, ""}, + {"NYCbCrA.A", Field, 6, ""}, + {"NYCbCrA.YCbCr", Field, 6, ""}, + {"NYCbCrAModel", Var, 6, ""}, + {"Opaque", Var, 0, ""}, + {"Palette", Type, 0, ""}, + {"RGBA", Type, 0, ""}, + {"RGBA.A", Field, 0, ""}, + {"RGBA.B", Field, 0, ""}, + {"RGBA.G", Field, 0, ""}, + {"RGBA.R", Field, 0, ""}, + {"RGBA64", Type, 0, ""}, + {"RGBA64.A", Field, 0, ""}, + {"RGBA64.B", Field, 0, ""}, + {"RGBA64.G", Field, 0, ""}, + {"RGBA64.R", Field, 0, ""}, + {"RGBA64Model", Var, 0, ""}, + {"RGBAModel", Var, 0, ""}, + {"RGBToCMYK", Func, 5, "func(r uint8, g uint8, b uint8) (uint8, uint8, uint8, uint8)"}, + {"RGBToYCbCr", Func, 0, "func(r uint8, g uint8, b uint8) (uint8, uint8, uint8)"}, + {"Transparent", Var, 0, ""}, + {"White", Var, 0, ""}, + {"YCbCr", Type, 0, ""}, + {"YCbCr.Cb", Field, 0, ""}, + {"YCbCr.Cr", Field, 0, ""}, + {"YCbCr.Y", Field, 0, ""}, + {"YCbCrModel", Var, 0, ""}, + {"YCbCrToRGB", Func, 0, "func(y uint8, cb uint8, cr uint8) (uint8, uint8, uint8)"}, }, "image/color/palette": { - {"Plan9", Var, 2}, - {"WebSafe", Var, 2}, + {"Plan9", Var, 2, ""}, + {"WebSafe", Var, 2, ""}, }, "image/draw": { - {"(Op).Draw", Method, 2}, - {"Draw", Func, 0}, - {"DrawMask", Func, 0}, - {"Drawer", Type, 2}, - {"FloydSteinberg", Var, 2}, - {"Image", Type, 0}, - {"Op", Type, 0}, - {"Over", Const, 0}, - {"Quantizer", Type, 2}, - {"RGBA64Image", Type, 17}, - {"Src", Const, 0}, + {"(Op).Draw", Method, 2, ""}, + {"Draw", Func, 0, "func(dst Image, r image.Rectangle, src image.Image, sp image.Point, op Op)"}, + {"DrawMask", Func, 0, "func(dst Image, r image.Rectangle, src image.Image, sp image.Point, mask image.Image, mp image.Point, op Op)"}, + {"Drawer", Type, 2, ""}, + {"FloydSteinberg", Var, 2, ""}, + {"Image", Type, 0, ""}, + {"Op", Type, 0, ""}, + {"Over", Const, 0, ""}, + {"Quantizer", Type, 2, ""}, + {"RGBA64Image", Type, 17, ""}, + {"Src", Const, 0, ""}, }, "image/gif": { - {"Decode", Func, 0}, - {"DecodeAll", Func, 0}, - {"DecodeConfig", Func, 0}, - {"DisposalBackground", Const, 5}, - {"DisposalNone", Const, 5}, - {"DisposalPrevious", Const, 5}, - {"Encode", Func, 2}, - {"EncodeAll", Func, 2}, - {"GIF", Type, 0}, - {"GIF.BackgroundIndex", Field, 5}, - {"GIF.Config", Field, 5}, - {"GIF.Delay", Field, 0}, - {"GIF.Disposal", Field, 5}, - {"GIF.Image", Field, 0}, - {"GIF.LoopCount", Field, 0}, - {"Options", Type, 2}, - {"Options.Drawer", Field, 2}, - {"Options.NumColors", Field, 2}, - {"Options.Quantizer", Field, 2}, + {"Decode", Func, 0, "func(r io.Reader) (image.Image, error)"}, + {"DecodeAll", Func, 0, "func(r io.Reader) (*GIF, error)"}, + {"DecodeConfig", Func, 0, "func(r io.Reader) (image.Config, error)"}, + {"DisposalBackground", Const, 5, ""}, + {"DisposalNone", Const, 5, ""}, + {"DisposalPrevious", Const, 5, ""}, + {"Encode", Func, 2, "func(w io.Writer, m image.Image, o *Options) error"}, + {"EncodeAll", Func, 2, "func(w io.Writer, g *GIF) error"}, + {"GIF", Type, 0, ""}, + {"GIF.BackgroundIndex", Field, 5, ""}, + {"GIF.Config", Field, 5, ""}, + {"GIF.Delay", Field, 0, ""}, + {"GIF.Disposal", Field, 5, ""}, + {"GIF.Image", Field, 0, ""}, + {"GIF.LoopCount", Field, 0, ""}, + {"Options", Type, 2, ""}, + {"Options.Drawer", Field, 2, ""}, + {"Options.NumColors", Field, 2, ""}, + {"Options.Quantizer", Field, 2, ""}, }, "image/jpeg": { - {"(FormatError).Error", Method, 0}, - {"(UnsupportedError).Error", Method, 0}, - {"Decode", Func, 0}, - {"DecodeConfig", Func, 0}, - {"DefaultQuality", Const, 0}, - {"Encode", Func, 0}, - {"FormatError", Type, 0}, - {"Options", Type, 0}, - {"Options.Quality", Field, 0}, - {"Reader", Type, 0}, - {"UnsupportedError", Type, 0}, + {"(FormatError).Error", Method, 0, ""}, + {"(UnsupportedError).Error", Method, 0, ""}, + {"Decode", Func, 0, "func(r io.Reader) (image.Image, error)"}, + {"DecodeConfig", Func, 0, "func(r io.Reader) (image.Config, error)"}, + {"DefaultQuality", Const, 0, ""}, + {"Encode", Func, 0, "func(w io.Writer, m image.Image, o *Options) error"}, + {"FormatError", Type, 0, ""}, + {"Options", Type, 0, ""}, + {"Options.Quality", Field, 0, ""}, + {"Reader", Type, 0, ""}, + {"UnsupportedError", Type, 0, ""}, }, "image/png": { - {"(*Encoder).Encode", Method, 4}, - {"(FormatError).Error", Method, 0}, - {"(UnsupportedError).Error", Method, 0}, - {"BestCompression", Const, 4}, - {"BestSpeed", Const, 4}, - {"CompressionLevel", Type, 4}, - {"Decode", Func, 0}, - {"DecodeConfig", Func, 0}, - {"DefaultCompression", Const, 4}, - {"Encode", Func, 0}, - {"Encoder", Type, 4}, - {"Encoder.BufferPool", Field, 9}, - {"Encoder.CompressionLevel", Field, 4}, - {"EncoderBuffer", Type, 9}, - {"EncoderBufferPool", Type, 9}, - {"FormatError", Type, 0}, - {"NoCompression", Const, 4}, - {"UnsupportedError", Type, 0}, + {"(*Encoder).Encode", Method, 4, ""}, + {"(FormatError).Error", Method, 0, ""}, + {"(UnsupportedError).Error", Method, 0, ""}, + {"BestCompression", Const, 4, ""}, + {"BestSpeed", Const, 4, ""}, + {"CompressionLevel", Type, 4, ""}, + {"Decode", Func, 0, "func(r io.Reader) (image.Image, error)"}, + {"DecodeConfig", Func, 0, "func(r io.Reader) (image.Config, error)"}, + {"DefaultCompression", Const, 4, ""}, + {"Encode", Func, 0, "func(w io.Writer, m image.Image) error"}, + {"Encoder", Type, 4, ""}, + {"Encoder.BufferPool", Field, 9, ""}, + {"Encoder.CompressionLevel", Field, 4, ""}, + {"EncoderBuffer", Type, 9, ""}, + {"EncoderBufferPool", Type, 9, ""}, + {"FormatError", Type, 0, ""}, + {"NoCompression", Const, 4, ""}, + {"UnsupportedError", Type, 0, ""}, }, "index/suffixarray": { - {"(*Index).Bytes", Method, 0}, - {"(*Index).FindAllIndex", Method, 0}, - {"(*Index).Lookup", Method, 0}, - {"(*Index).Read", Method, 0}, - {"(*Index).Write", Method, 0}, - {"Index", Type, 0}, - {"New", Func, 0}, + {"(*Index).Bytes", Method, 0, ""}, + {"(*Index).FindAllIndex", Method, 0, ""}, + {"(*Index).Lookup", Method, 0, ""}, + {"(*Index).Read", Method, 0, ""}, + {"(*Index).Write", Method, 0, ""}, + {"Index", Type, 0, ""}, + {"New", Func, 0, "func(data []byte) *Index"}, }, "io": { - {"(*LimitedReader).Read", Method, 0}, - {"(*OffsetWriter).Seek", Method, 20}, - {"(*OffsetWriter).Write", Method, 20}, - {"(*OffsetWriter).WriteAt", Method, 20}, - {"(*PipeReader).Close", Method, 0}, - {"(*PipeReader).CloseWithError", Method, 0}, - {"(*PipeReader).Read", Method, 0}, - {"(*PipeWriter).Close", Method, 0}, - {"(*PipeWriter).CloseWithError", Method, 0}, - {"(*PipeWriter).Write", Method, 0}, - {"(*SectionReader).Outer", Method, 22}, - {"(*SectionReader).Read", Method, 0}, - {"(*SectionReader).ReadAt", Method, 0}, - {"(*SectionReader).Seek", Method, 0}, - {"(*SectionReader).Size", Method, 0}, - {"ByteReader", Type, 0}, - {"ByteScanner", Type, 0}, - {"ByteWriter", Type, 1}, - {"Closer", Type, 0}, - {"Copy", Func, 0}, - {"CopyBuffer", Func, 5}, - {"CopyN", Func, 0}, - {"Discard", Var, 16}, - {"EOF", Var, 0}, - {"ErrClosedPipe", Var, 0}, - {"ErrNoProgress", Var, 1}, - {"ErrShortBuffer", Var, 0}, - {"ErrShortWrite", Var, 0}, - {"ErrUnexpectedEOF", Var, 0}, - {"LimitReader", Func, 0}, - {"LimitedReader", Type, 0}, - {"LimitedReader.N", Field, 0}, - {"LimitedReader.R", Field, 0}, - {"MultiReader", Func, 0}, - {"MultiWriter", Func, 0}, - {"NewOffsetWriter", Func, 20}, - {"NewSectionReader", Func, 0}, - {"NopCloser", Func, 16}, - {"OffsetWriter", Type, 20}, - {"Pipe", Func, 0}, - {"PipeReader", Type, 0}, - {"PipeWriter", Type, 0}, - {"ReadAll", Func, 16}, - {"ReadAtLeast", Func, 0}, - {"ReadCloser", Type, 0}, - {"ReadFull", Func, 0}, - {"ReadSeekCloser", Type, 16}, - {"ReadSeeker", Type, 0}, - {"ReadWriteCloser", Type, 0}, - {"ReadWriteSeeker", Type, 0}, - {"ReadWriter", Type, 0}, - {"Reader", Type, 0}, - {"ReaderAt", Type, 0}, - {"ReaderFrom", Type, 0}, - {"RuneReader", Type, 0}, - {"RuneScanner", Type, 0}, - {"SectionReader", Type, 0}, - {"SeekCurrent", Const, 7}, - {"SeekEnd", Const, 7}, - {"SeekStart", Const, 7}, - {"Seeker", Type, 0}, - {"StringWriter", Type, 12}, - {"TeeReader", Func, 0}, - {"WriteCloser", Type, 0}, - {"WriteSeeker", Type, 0}, - {"WriteString", Func, 0}, - {"Writer", Type, 0}, - {"WriterAt", Type, 0}, - {"WriterTo", Type, 0}, + {"(*LimitedReader).Read", Method, 0, ""}, + {"(*OffsetWriter).Seek", Method, 20, ""}, + {"(*OffsetWriter).Write", Method, 20, ""}, + {"(*OffsetWriter).WriteAt", Method, 20, ""}, + {"(*PipeReader).Close", Method, 0, ""}, + {"(*PipeReader).CloseWithError", Method, 0, ""}, + {"(*PipeReader).Read", Method, 0, ""}, + {"(*PipeWriter).Close", Method, 0, ""}, + {"(*PipeWriter).CloseWithError", Method, 0, ""}, + {"(*PipeWriter).Write", Method, 0, ""}, + {"(*SectionReader).Outer", Method, 22, ""}, + {"(*SectionReader).Read", Method, 0, ""}, + {"(*SectionReader).ReadAt", Method, 0, ""}, + {"(*SectionReader).Seek", Method, 0, ""}, + {"(*SectionReader).Size", Method, 0, ""}, + {"ByteReader", Type, 0, ""}, + {"ByteScanner", Type, 0, ""}, + {"ByteWriter", Type, 1, ""}, + {"Closer", Type, 0, ""}, + {"Copy", Func, 0, "func(dst Writer, src Reader) (written int64, err error)"}, + {"CopyBuffer", Func, 5, "func(dst Writer, src Reader, buf []byte) (written int64, err error)"}, + {"CopyN", Func, 0, "func(dst Writer, src Reader, n int64) (written int64, err error)"}, + {"Discard", Var, 16, ""}, + {"EOF", Var, 0, ""}, + {"ErrClosedPipe", Var, 0, ""}, + {"ErrNoProgress", Var, 1, ""}, + {"ErrShortBuffer", Var, 0, ""}, + {"ErrShortWrite", Var, 0, ""}, + {"ErrUnexpectedEOF", Var, 0, ""}, + {"LimitReader", Func, 0, "func(r Reader, n int64) Reader"}, + {"LimitedReader", Type, 0, ""}, + {"LimitedReader.N", Field, 0, ""}, + {"LimitedReader.R", Field, 0, ""}, + {"MultiReader", Func, 0, "func(readers ...Reader) Reader"}, + {"MultiWriter", Func, 0, "func(writers ...Writer) Writer"}, + {"NewOffsetWriter", Func, 20, "func(w WriterAt, off int64) *OffsetWriter"}, + {"NewSectionReader", Func, 0, "func(r ReaderAt, off int64, n int64) *SectionReader"}, + {"NopCloser", Func, 16, "func(r Reader) ReadCloser"}, + {"OffsetWriter", Type, 20, ""}, + {"Pipe", Func, 0, "func() (*PipeReader, *PipeWriter)"}, + {"PipeReader", Type, 0, ""}, + {"PipeWriter", Type, 0, ""}, + {"ReadAll", Func, 16, "func(r Reader) ([]byte, error)"}, + {"ReadAtLeast", Func, 0, "func(r Reader, buf []byte, min int) (n int, err error)"}, + {"ReadCloser", Type, 0, ""}, + {"ReadFull", Func, 0, "func(r Reader, buf []byte) (n int, err error)"}, + {"ReadSeekCloser", Type, 16, ""}, + {"ReadSeeker", Type, 0, ""}, + {"ReadWriteCloser", Type, 0, ""}, + {"ReadWriteSeeker", Type, 0, ""}, + {"ReadWriter", Type, 0, ""}, + {"Reader", Type, 0, ""}, + {"ReaderAt", Type, 0, ""}, + {"ReaderFrom", Type, 0, ""}, + {"RuneReader", Type, 0, ""}, + {"RuneScanner", Type, 0, ""}, + {"SectionReader", Type, 0, ""}, + {"SeekCurrent", Const, 7, ""}, + {"SeekEnd", Const, 7, ""}, + {"SeekStart", Const, 7, ""}, + {"Seeker", Type, 0, ""}, + {"StringWriter", Type, 12, ""}, + {"TeeReader", Func, 0, "func(r Reader, w Writer) Reader"}, + {"WriteCloser", Type, 0, ""}, + {"WriteSeeker", Type, 0, ""}, + {"WriteString", Func, 0, "func(w Writer, s string) (n int, err error)"}, + {"Writer", Type, 0, ""}, + {"WriterAt", Type, 0, ""}, + {"WriterTo", Type, 0, ""}, }, "io/fs": { - {"(*PathError).Error", Method, 16}, - {"(*PathError).Timeout", Method, 16}, - {"(*PathError).Unwrap", Method, 16}, - {"(FileMode).IsDir", Method, 16}, - {"(FileMode).IsRegular", Method, 16}, - {"(FileMode).Perm", Method, 16}, - {"(FileMode).String", Method, 16}, - {"(FileMode).Type", Method, 16}, - {"DirEntry", Type, 16}, - {"ErrClosed", Var, 16}, - {"ErrExist", Var, 16}, - {"ErrInvalid", Var, 16}, - {"ErrNotExist", Var, 16}, - {"ErrPermission", Var, 16}, - {"FS", Type, 16}, - {"File", Type, 16}, - {"FileInfo", Type, 16}, - {"FileInfoToDirEntry", Func, 17}, - {"FileMode", Type, 16}, - {"FormatDirEntry", Func, 21}, - {"FormatFileInfo", Func, 21}, - {"Glob", Func, 16}, - {"GlobFS", Type, 16}, - {"Lstat", Func, 25}, - {"ModeAppend", Const, 16}, - {"ModeCharDevice", Const, 16}, - {"ModeDevice", Const, 16}, - {"ModeDir", Const, 16}, - {"ModeExclusive", Const, 16}, - {"ModeIrregular", Const, 16}, - {"ModeNamedPipe", Const, 16}, - {"ModePerm", Const, 16}, - {"ModeSetgid", Const, 16}, - {"ModeSetuid", Const, 16}, - {"ModeSocket", Const, 16}, - {"ModeSticky", Const, 16}, - {"ModeSymlink", Const, 16}, - {"ModeTemporary", Const, 16}, - {"ModeType", Const, 16}, - {"PathError", Type, 16}, - {"PathError.Err", Field, 16}, - {"PathError.Op", Field, 16}, - {"PathError.Path", Field, 16}, - {"ReadDir", Func, 16}, - {"ReadDirFS", Type, 16}, - {"ReadDirFile", Type, 16}, - {"ReadFile", Func, 16}, - {"ReadFileFS", Type, 16}, - {"ReadLink", Func, 25}, - {"ReadLinkFS", Type, 25}, - {"SkipAll", Var, 20}, - {"SkipDir", Var, 16}, - {"Stat", Func, 16}, - {"StatFS", Type, 16}, - {"Sub", Func, 16}, - {"SubFS", Type, 16}, - {"ValidPath", Func, 16}, - {"WalkDir", Func, 16}, - {"WalkDirFunc", Type, 16}, + {"(*PathError).Error", Method, 16, ""}, + {"(*PathError).Timeout", Method, 16, ""}, + {"(*PathError).Unwrap", Method, 16, ""}, + {"(FileMode).IsDir", Method, 16, ""}, + {"(FileMode).IsRegular", Method, 16, ""}, + {"(FileMode).Perm", Method, 16, ""}, + {"(FileMode).String", Method, 16, ""}, + {"(FileMode).Type", Method, 16, ""}, + {"DirEntry", Type, 16, ""}, + {"ErrClosed", Var, 16, ""}, + {"ErrExist", Var, 16, ""}, + {"ErrInvalid", Var, 16, ""}, + {"ErrNotExist", Var, 16, ""}, + {"ErrPermission", Var, 16, ""}, + {"FS", Type, 16, ""}, + {"File", Type, 16, ""}, + {"FileInfo", Type, 16, ""}, + {"FileInfoToDirEntry", Func, 17, "func(info FileInfo) DirEntry"}, + {"FileMode", Type, 16, ""}, + {"FormatDirEntry", Func, 21, "func(dir DirEntry) string"}, + {"FormatFileInfo", Func, 21, "func(info FileInfo) string"}, + {"Glob", Func, 16, "func(fsys FS, pattern string) (matches []string, err error)"}, + {"GlobFS", Type, 16, ""}, + {"Lstat", Func, 25, ""}, + {"ModeAppend", Const, 16, ""}, + {"ModeCharDevice", Const, 16, ""}, + {"ModeDevice", Const, 16, ""}, + {"ModeDir", Const, 16, ""}, + {"ModeExclusive", Const, 16, ""}, + {"ModeIrregular", Const, 16, ""}, + {"ModeNamedPipe", Const, 16, ""}, + {"ModePerm", Const, 16, ""}, + {"ModeSetgid", Const, 16, ""}, + {"ModeSetuid", Const, 16, ""}, + {"ModeSocket", Const, 16, ""}, + {"ModeSticky", Const, 16, ""}, + {"ModeSymlink", Const, 16, ""}, + {"ModeTemporary", Const, 16, ""}, + {"ModeType", Const, 16, ""}, + {"PathError", Type, 16, ""}, + {"PathError.Err", Field, 16, ""}, + {"PathError.Op", Field, 16, ""}, + {"PathError.Path", Field, 16, ""}, + {"ReadDir", Func, 16, "func(fsys FS, name string) ([]DirEntry, error)"}, + {"ReadDirFS", Type, 16, ""}, + {"ReadDirFile", Type, 16, ""}, + {"ReadFile", Func, 16, "func(fsys FS, name string) ([]byte, error)"}, + {"ReadFileFS", Type, 16, ""}, + {"ReadLink", Func, 25, ""}, + {"ReadLinkFS", Type, 25, ""}, + {"SkipAll", Var, 20, ""}, + {"SkipDir", Var, 16, ""}, + {"Stat", Func, 16, "func(fsys FS, name string) (FileInfo, error)"}, + {"StatFS", Type, 16, ""}, + {"Sub", Func, 16, "func(fsys FS, dir string) (FS, error)"}, + {"SubFS", Type, 16, ""}, + {"ValidPath", Func, 16, "func(name string) bool"}, + {"WalkDir", Func, 16, "func(fsys FS, root string, fn WalkDirFunc) error"}, + {"WalkDirFunc", Type, 16, ""}, }, "io/ioutil": { - {"Discard", Var, 0}, - {"NopCloser", Func, 0}, - {"ReadAll", Func, 0}, - {"ReadDir", Func, 0}, - {"ReadFile", Func, 0}, - {"TempDir", Func, 0}, - {"TempFile", Func, 0}, - {"WriteFile", Func, 0}, + {"Discard", Var, 0, ""}, + {"NopCloser", Func, 0, "func(r io.Reader) io.ReadCloser"}, + {"ReadAll", Func, 0, "func(r io.Reader) ([]byte, error)"}, + {"ReadDir", Func, 0, "func(dirname string) ([]fs.FileInfo, error)"}, + {"ReadFile", Func, 0, "func(filename string) ([]byte, error)"}, + {"TempDir", Func, 0, "func(dir string, pattern string) (name string, err error)"}, + {"TempFile", Func, 0, "func(dir string, pattern string) (f *os.File, err error)"}, + {"WriteFile", Func, 0, "func(filename string, data []byte, perm fs.FileMode) error"}, }, "iter": { - {"Pull", Func, 23}, - {"Pull2", Func, 23}, - {"Seq", Type, 23}, - {"Seq2", Type, 23}, + {"Pull", Func, 23, "func[V any](seq Seq[V]) (next func() (V, bool), stop func())"}, + {"Pull2", Func, 23, "func[K, V any](seq Seq2[K, V]) (next func() (K, V, bool), stop func())"}, + {"Seq", Type, 23, ""}, + {"Seq2", Type, 23, ""}, }, "log": { - {"(*Logger).Fatal", Method, 0}, - {"(*Logger).Fatalf", Method, 0}, - {"(*Logger).Fatalln", Method, 0}, - {"(*Logger).Flags", Method, 0}, - {"(*Logger).Output", Method, 0}, - {"(*Logger).Panic", Method, 0}, - {"(*Logger).Panicf", Method, 0}, - {"(*Logger).Panicln", Method, 0}, - {"(*Logger).Prefix", Method, 0}, - {"(*Logger).Print", Method, 0}, - {"(*Logger).Printf", Method, 0}, - {"(*Logger).Println", Method, 0}, - {"(*Logger).SetFlags", Method, 0}, - {"(*Logger).SetOutput", Method, 5}, - {"(*Logger).SetPrefix", Method, 0}, - {"(*Logger).Writer", Method, 12}, - {"Default", Func, 16}, - {"Fatal", Func, 0}, - {"Fatalf", Func, 0}, - {"Fatalln", Func, 0}, - {"Flags", Func, 0}, - {"LUTC", Const, 5}, - {"Ldate", Const, 0}, - {"Llongfile", Const, 0}, - {"Lmicroseconds", Const, 0}, - {"Lmsgprefix", Const, 14}, - {"Logger", Type, 0}, - {"Lshortfile", Const, 0}, - {"LstdFlags", Const, 0}, - {"Ltime", Const, 0}, - {"New", Func, 0}, - {"Output", Func, 5}, - {"Panic", Func, 0}, - {"Panicf", Func, 0}, - {"Panicln", Func, 0}, - {"Prefix", Func, 0}, - {"Print", Func, 0}, - {"Printf", Func, 0}, - {"Println", Func, 0}, - {"SetFlags", Func, 0}, - {"SetOutput", Func, 0}, - {"SetPrefix", Func, 0}, - {"Writer", Func, 13}, + {"(*Logger).Fatal", Method, 0, ""}, + {"(*Logger).Fatalf", Method, 0, ""}, + {"(*Logger).Fatalln", Method, 0, ""}, + {"(*Logger).Flags", Method, 0, ""}, + {"(*Logger).Output", Method, 0, ""}, + {"(*Logger).Panic", Method, 0, ""}, + {"(*Logger).Panicf", Method, 0, ""}, + {"(*Logger).Panicln", Method, 0, ""}, + {"(*Logger).Prefix", Method, 0, ""}, + {"(*Logger).Print", Method, 0, ""}, + {"(*Logger).Printf", Method, 0, ""}, + {"(*Logger).Println", Method, 0, ""}, + {"(*Logger).SetFlags", Method, 0, ""}, + {"(*Logger).SetOutput", Method, 5, ""}, + {"(*Logger).SetPrefix", Method, 0, ""}, + {"(*Logger).Writer", Method, 12, ""}, + {"Default", Func, 16, "func() *Logger"}, + {"Fatal", Func, 0, "func(v ...any)"}, + {"Fatalf", Func, 0, "func(format string, v ...any)"}, + {"Fatalln", Func, 0, "func(v ...any)"}, + {"Flags", Func, 0, "func() int"}, + {"LUTC", Const, 5, ""}, + {"Ldate", Const, 0, ""}, + {"Llongfile", Const, 0, ""}, + {"Lmicroseconds", Const, 0, ""}, + {"Lmsgprefix", Const, 14, ""}, + {"Logger", Type, 0, ""}, + {"Lshortfile", Const, 0, ""}, + {"LstdFlags", Const, 0, ""}, + {"Ltime", Const, 0, ""}, + {"New", Func, 0, "func(out io.Writer, prefix string, flag int) *Logger"}, + {"Output", Func, 5, "func(calldepth int, s string) error"}, + {"Panic", Func, 0, "func(v ...any)"}, + {"Panicf", Func, 0, "func(format string, v ...any)"}, + {"Panicln", Func, 0, "func(v ...any)"}, + {"Prefix", Func, 0, "func() string"}, + {"Print", Func, 0, "func(v ...any)"}, + {"Printf", Func, 0, "func(format string, v ...any)"}, + {"Println", Func, 0, "func(v ...any)"}, + {"SetFlags", Func, 0, "func(flag int)"}, + {"SetOutput", Func, 0, "func(w io.Writer)"}, + {"SetPrefix", Func, 0, "func(prefix string)"}, + {"Writer", Func, 13, "func() io.Writer"}, }, "log/slog": { - {"(*JSONHandler).Enabled", Method, 21}, - {"(*JSONHandler).Handle", Method, 21}, - {"(*JSONHandler).WithAttrs", Method, 21}, - {"(*JSONHandler).WithGroup", Method, 21}, - {"(*Level).UnmarshalJSON", Method, 21}, - {"(*Level).UnmarshalText", Method, 21}, - {"(*LevelVar).AppendText", Method, 24}, - {"(*LevelVar).Level", Method, 21}, - {"(*LevelVar).MarshalText", Method, 21}, - {"(*LevelVar).Set", Method, 21}, - {"(*LevelVar).String", Method, 21}, - {"(*LevelVar).UnmarshalText", Method, 21}, - {"(*Logger).Debug", Method, 21}, - {"(*Logger).DebugContext", Method, 21}, - {"(*Logger).Enabled", Method, 21}, - {"(*Logger).Error", Method, 21}, - {"(*Logger).ErrorContext", Method, 21}, - {"(*Logger).Handler", Method, 21}, - {"(*Logger).Info", Method, 21}, - {"(*Logger).InfoContext", Method, 21}, - {"(*Logger).Log", Method, 21}, - {"(*Logger).LogAttrs", Method, 21}, - {"(*Logger).Warn", Method, 21}, - {"(*Logger).WarnContext", Method, 21}, - {"(*Logger).With", Method, 21}, - {"(*Logger).WithGroup", Method, 21}, - {"(*Record).Add", Method, 21}, - {"(*Record).AddAttrs", Method, 21}, - {"(*TextHandler).Enabled", Method, 21}, - {"(*TextHandler).Handle", Method, 21}, - {"(*TextHandler).WithAttrs", Method, 21}, - {"(*TextHandler).WithGroup", Method, 21}, - {"(Attr).Equal", Method, 21}, - {"(Attr).String", Method, 21}, - {"(Kind).String", Method, 21}, - {"(Level).AppendText", Method, 24}, - {"(Level).Level", Method, 21}, - {"(Level).MarshalJSON", Method, 21}, - {"(Level).MarshalText", Method, 21}, - {"(Level).String", Method, 21}, - {"(Record).Attrs", Method, 21}, - {"(Record).Clone", Method, 21}, - {"(Record).NumAttrs", Method, 21}, - {"(Value).Any", Method, 21}, - {"(Value).Bool", Method, 21}, - {"(Value).Duration", Method, 21}, - {"(Value).Equal", Method, 21}, - {"(Value).Float64", Method, 21}, - {"(Value).Group", Method, 21}, - {"(Value).Int64", Method, 21}, - {"(Value).Kind", Method, 21}, - {"(Value).LogValuer", Method, 21}, - {"(Value).Resolve", Method, 21}, - {"(Value).String", Method, 21}, - {"(Value).Time", Method, 21}, - {"(Value).Uint64", Method, 21}, - {"Any", Func, 21}, - {"AnyValue", Func, 21}, - {"Attr", Type, 21}, - {"Attr.Key", Field, 21}, - {"Attr.Value", Field, 21}, - {"Bool", Func, 21}, - {"BoolValue", Func, 21}, - {"Debug", Func, 21}, - {"DebugContext", Func, 21}, - {"Default", Func, 21}, - {"DiscardHandler", Var, 24}, - {"Duration", Func, 21}, - {"DurationValue", Func, 21}, - {"Error", Func, 21}, - {"ErrorContext", Func, 21}, - {"Float64", Func, 21}, - {"Float64Value", Func, 21}, - {"Group", Func, 21}, - {"GroupValue", Func, 21}, - {"Handler", Type, 21}, - {"HandlerOptions", Type, 21}, - {"HandlerOptions.AddSource", Field, 21}, - {"HandlerOptions.Level", Field, 21}, - {"HandlerOptions.ReplaceAttr", Field, 21}, - {"Info", Func, 21}, - {"InfoContext", Func, 21}, - {"Int", Func, 21}, - {"Int64", Func, 21}, - {"Int64Value", Func, 21}, - {"IntValue", Func, 21}, - {"JSONHandler", Type, 21}, - {"Kind", Type, 21}, - {"KindAny", Const, 21}, - {"KindBool", Const, 21}, - {"KindDuration", Const, 21}, - {"KindFloat64", Const, 21}, - {"KindGroup", Const, 21}, - {"KindInt64", Const, 21}, - {"KindLogValuer", Const, 21}, - {"KindString", Const, 21}, - {"KindTime", Const, 21}, - {"KindUint64", Const, 21}, - {"Level", Type, 21}, - {"LevelDebug", Const, 21}, - {"LevelError", Const, 21}, - {"LevelInfo", Const, 21}, - {"LevelKey", Const, 21}, - {"LevelVar", Type, 21}, - {"LevelWarn", Const, 21}, - {"Leveler", Type, 21}, - {"Log", Func, 21}, - {"LogAttrs", Func, 21}, - {"LogValuer", Type, 21}, - {"Logger", Type, 21}, - {"MessageKey", Const, 21}, - {"New", Func, 21}, - {"NewJSONHandler", Func, 21}, - {"NewLogLogger", Func, 21}, - {"NewRecord", Func, 21}, - {"NewTextHandler", Func, 21}, - {"Record", Type, 21}, - {"Record.Level", Field, 21}, - {"Record.Message", Field, 21}, - {"Record.PC", Field, 21}, - {"Record.Time", Field, 21}, - {"SetDefault", Func, 21}, - {"SetLogLoggerLevel", Func, 22}, - {"Source", Type, 21}, - {"Source.File", Field, 21}, - {"Source.Function", Field, 21}, - {"Source.Line", Field, 21}, - {"SourceKey", Const, 21}, - {"String", Func, 21}, - {"StringValue", Func, 21}, - {"TextHandler", Type, 21}, - {"Time", Func, 21}, - {"TimeKey", Const, 21}, - {"TimeValue", Func, 21}, - {"Uint64", Func, 21}, - {"Uint64Value", Func, 21}, - {"Value", Type, 21}, - {"Warn", Func, 21}, - {"WarnContext", Func, 21}, - {"With", Func, 21}, + {"(*JSONHandler).Enabled", Method, 21, ""}, + {"(*JSONHandler).Handle", Method, 21, ""}, + {"(*JSONHandler).WithAttrs", Method, 21, ""}, + {"(*JSONHandler).WithGroup", Method, 21, ""}, + {"(*Level).UnmarshalJSON", Method, 21, ""}, + {"(*Level).UnmarshalText", Method, 21, ""}, + {"(*LevelVar).AppendText", Method, 24, ""}, + {"(*LevelVar).Level", Method, 21, ""}, + {"(*LevelVar).MarshalText", Method, 21, ""}, + {"(*LevelVar).Set", Method, 21, ""}, + {"(*LevelVar).String", Method, 21, ""}, + {"(*LevelVar).UnmarshalText", Method, 21, ""}, + {"(*Logger).Debug", Method, 21, ""}, + {"(*Logger).DebugContext", Method, 21, ""}, + {"(*Logger).Enabled", Method, 21, ""}, + {"(*Logger).Error", Method, 21, ""}, + {"(*Logger).ErrorContext", Method, 21, ""}, + {"(*Logger).Handler", Method, 21, ""}, + {"(*Logger).Info", Method, 21, ""}, + {"(*Logger).InfoContext", Method, 21, ""}, + {"(*Logger).Log", Method, 21, ""}, + {"(*Logger).LogAttrs", Method, 21, ""}, + {"(*Logger).Warn", Method, 21, ""}, + {"(*Logger).WarnContext", Method, 21, ""}, + {"(*Logger).With", Method, 21, ""}, + {"(*Logger).WithGroup", Method, 21, ""}, + {"(*Record).Add", Method, 21, ""}, + {"(*Record).AddAttrs", Method, 21, ""}, + {"(*TextHandler).Enabled", Method, 21, ""}, + {"(*TextHandler).Handle", Method, 21, ""}, + {"(*TextHandler).WithAttrs", Method, 21, ""}, + {"(*TextHandler).WithGroup", Method, 21, ""}, + {"(Attr).Equal", Method, 21, ""}, + {"(Attr).String", Method, 21, ""}, + {"(Kind).String", Method, 21, ""}, + {"(Level).AppendText", Method, 24, ""}, + {"(Level).Level", Method, 21, ""}, + {"(Level).MarshalJSON", Method, 21, ""}, + {"(Level).MarshalText", Method, 21, ""}, + {"(Level).String", Method, 21, ""}, + {"(Record).Attrs", Method, 21, ""}, + {"(Record).Clone", Method, 21, ""}, + {"(Record).NumAttrs", Method, 21, ""}, + {"(Value).Any", Method, 21, ""}, + {"(Value).Bool", Method, 21, ""}, + {"(Value).Duration", Method, 21, ""}, + {"(Value).Equal", Method, 21, ""}, + {"(Value).Float64", Method, 21, ""}, + {"(Value).Group", Method, 21, ""}, + {"(Value).Int64", Method, 21, ""}, + {"(Value).Kind", Method, 21, ""}, + {"(Value).LogValuer", Method, 21, ""}, + {"(Value).Resolve", Method, 21, ""}, + {"(Value).String", Method, 21, ""}, + {"(Value).Time", Method, 21, ""}, + {"(Value).Uint64", Method, 21, ""}, + {"Any", Func, 21, "func(key string, value any) Attr"}, + {"AnyValue", Func, 21, "func(v any) Value"}, + {"Attr", Type, 21, ""}, + {"Attr.Key", Field, 21, ""}, + {"Attr.Value", Field, 21, ""}, + {"Bool", Func, 21, "func(key string, v bool) Attr"}, + {"BoolValue", Func, 21, "func(v bool) Value"}, + {"Debug", Func, 21, "func(msg string, args ...any)"}, + {"DebugContext", Func, 21, "func(ctx context.Context, msg string, args ...any)"}, + {"Default", Func, 21, "func() *Logger"}, + {"DiscardHandler", Var, 24, ""}, + {"Duration", Func, 21, "func(key string, v time.Duration) Attr"}, + {"DurationValue", Func, 21, "func(v time.Duration) Value"}, + {"Error", Func, 21, "func(msg string, args ...any)"}, + {"ErrorContext", Func, 21, "func(ctx context.Context, msg string, args ...any)"}, + {"Float64", Func, 21, "func(key string, v float64) Attr"}, + {"Float64Value", Func, 21, "func(v float64) Value"}, + {"Group", Func, 21, "func(key string, args ...any) Attr"}, + {"GroupValue", Func, 21, "func(as ...Attr) Value"}, + {"Handler", Type, 21, ""}, + {"HandlerOptions", Type, 21, ""}, + {"HandlerOptions.AddSource", Field, 21, ""}, + {"HandlerOptions.Level", Field, 21, ""}, + {"HandlerOptions.ReplaceAttr", Field, 21, ""}, + {"Info", Func, 21, "func(msg string, args ...any)"}, + {"InfoContext", Func, 21, "func(ctx context.Context, msg string, args ...any)"}, + {"Int", Func, 21, "func(key string, value int) Attr"}, + {"Int64", Func, 21, "func(key string, value int64) Attr"}, + {"Int64Value", Func, 21, "func(v int64) Value"}, + {"IntValue", Func, 21, "func(v int) Value"}, + {"JSONHandler", Type, 21, ""}, + {"Kind", Type, 21, ""}, + {"KindAny", Const, 21, ""}, + {"KindBool", Const, 21, ""}, + {"KindDuration", Const, 21, ""}, + {"KindFloat64", Const, 21, ""}, + {"KindGroup", Const, 21, ""}, + {"KindInt64", Const, 21, ""}, + {"KindLogValuer", Const, 21, ""}, + {"KindString", Const, 21, ""}, + {"KindTime", Const, 21, ""}, + {"KindUint64", Const, 21, ""}, + {"Level", Type, 21, ""}, + {"LevelDebug", Const, 21, ""}, + {"LevelError", Const, 21, ""}, + {"LevelInfo", Const, 21, ""}, + {"LevelKey", Const, 21, ""}, + {"LevelVar", Type, 21, ""}, + {"LevelWarn", Const, 21, ""}, + {"Leveler", Type, 21, ""}, + {"Log", Func, 21, "func(ctx context.Context, level Level, msg string, args ...any)"}, + {"LogAttrs", Func, 21, "func(ctx context.Context, level Level, msg string, attrs ...Attr)"}, + {"LogValuer", Type, 21, ""}, + {"Logger", Type, 21, ""}, + {"MessageKey", Const, 21, ""}, + {"New", Func, 21, "func(h Handler) *Logger"}, + {"NewJSONHandler", Func, 21, "func(w io.Writer, opts *HandlerOptions) *JSONHandler"}, + {"NewLogLogger", Func, 21, "func(h Handler, level Level) *log.Logger"}, + {"NewRecord", Func, 21, "func(t time.Time, level Level, msg string, pc uintptr) Record"}, + {"NewTextHandler", Func, 21, "func(w io.Writer, opts *HandlerOptions) *TextHandler"}, + {"Record", Type, 21, ""}, + {"Record.Level", Field, 21, ""}, + {"Record.Message", Field, 21, ""}, + {"Record.PC", Field, 21, ""}, + {"Record.Time", Field, 21, ""}, + {"SetDefault", Func, 21, "func(l *Logger)"}, + {"SetLogLoggerLevel", Func, 22, "func(level Level) (oldLevel Level)"}, + {"Source", Type, 21, ""}, + {"Source.File", Field, 21, ""}, + {"Source.Function", Field, 21, ""}, + {"Source.Line", Field, 21, ""}, + {"SourceKey", Const, 21, ""}, + {"String", Func, 21, "func(key string, value string) Attr"}, + {"StringValue", Func, 21, "func(value string) Value"}, + {"TextHandler", Type, 21, ""}, + {"Time", Func, 21, "func(key string, v time.Time) Attr"}, + {"TimeKey", Const, 21, ""}, + {"TimeValue", Func, 21, "func(v time.Time) Value"}, + {"Uint64", Func, 21, "func(key string, v uint64) Attr"}, + {"Uint64Value", Func, 21, "func(v uint64) Value"}, + {"Value", Type, 21, ""}, + {"Warn", Func, 21, "func(msg string, args ...any)"}, + {"WarnContext", Func, 21, "func(ctx context.Context, msg string, args ...any)"}, + {"With", Func, 21, "func(args ...any) *Logger"}, }, "log/syslog": { - {"(*Writer).Alert", Method, 0}, - {"(*Writer).Close", Method, 0}, - {"(*Writer).Crit", Method, 0}, - {"(*Writer).Debug", Method, 0}, - {"(*Writer).Emerg", Method, 0}, - {"(*Writer).Err", Method, 0}, - {"(*Writer).Info", Method, 0}, - {"(*Writer).Notice", Method, 0}, - {"(*Writer).Warning", Method, 0}, - {"(*Writer).Write", Method, 0}, - {"Dial", Func, 0}, - {"LOG_ALERT", Const, 0}, - {"LOG_AUTH", Const, 1}, - {"LOG_AUTHPRIV", Const, 1}, - {"LOG_CRIT", Const, 0}, - {"LOG_CRON", Const, 1}, - {"LOG_DAEMON", Const, 1}, - {"LOG_DEBUG", Const, 0}, - {"LOG_EMERG", Const, 0}, - {"LOG_ERR", Const, 0}, - {"LOG_FTP", Const, 1}, - {"LOG_INFO", Const, 0}, - {"LOG_KERN", Const, 1}, - {"LOG_LOCAL0", Const, 1}, - {"LOG_LOCAL1", Const, 1}, - {"LOG_LOCAL2", Const, 1}, - {"LOG_LOCAL3", Const, 1}, - {"LOG_LOCAL4", Const, 1}, - {"LOG_LOCAL5", Const, 1}, - {"LOG_LOCAL6", Const, 1}, - {"LOG_LOCAL7", Const, 1}, - {"LOG_LPR", Const, 1}, - {"LOG_MAIL", Const, 1}, - {"LOG_NEWS", Const, 1}, - {"LOG_NOTICE", Const, 0}, - {"LOG_SYSLOG", Const, 1}, - {"LOG_USER", Const, 1}, - {"LOG_UUCP", Const, 1}, - {"LOG_WARNING", Const, 0}, - {"New", Func, 0}, - {"NewLogger", Func, 0}, - {"Priority", Type, 0}, - {"Writer", Type, 0}, + {"(*Writer).Alert", Method, 0, ""}, + {"(*Writer).Close", Method, 0, ""}, + {"(*Writer).Crit", Method, 0, ""}, + {"(*Writer).Debug", Method, 0, ""}, + {"(*Writer).Emerg", Method, 0, ""}, + {"(*Writer).Err", Method, 0, ""}, + {"(*Writer).Info", Method, 0, ""}, + {"(*Writer).Notice", Method, 0, ""}, + {"(*Writer).Warning", Method, 0, ""}, + {"(*Writer).Write", Method, 0, ""}, + {"Dial", Func, 0, "func(network string, raddr string, priority Priority, tag string) (*Writer, error)"}, + {"LOG_ALERT", Const, 0, ""}, + {"LOG_AUTH", Const, 1, ""}, + {"LOG_AUTHPRIV", Const, 1, ""}, + {"LOG_CRIT", Const, 0, ""}, + {"LOG_CRON", Const, 1, ""}, + {"LOG_DAEMON", Const, 1, ""}, + {"LOG_DEBUG", Const, 0, ""}, + {"LOG_EMERG", Const, 0, ""}, + {"LOG_ERR", Const, 0, ""}, + {"LOG_FTP", Const, 1, ""}, + {"LOG_INFO", Const, 0, ""}, + {"LOG_KERN", Const, 1, ""}, + {"LOG_LOCAL0", Const, 1, ""}, + {"LOG_LOCAL1", Const, 1, ""}, + {"LOG_LOCAL2", Const, 1, ""}, + {"LOG_LOCAL3", Const, 1, ""}, + {"LOG_LOCAL4", Const, 1, ""}, + {"LOG_LOCAL5", Const, 1, ""}, + {"LOG_LOCAL6", Const, 1, ""}, + {"LOG_LOCAL7", Const, 1, ""}, + {"LOG_LPR", Const, 1, ""}, + {"LOG_MAIL", Const, 1, ""}, + {"LOG_NEWS", Const, 1, ""}, + {"LOG_NOTICE", Const, 0, ""}, + {"LOG_SYSLOG", Const, 1, ""}, + {"LOG_USER", Const, 1, ""}, + {"LOG_UUCP", Const, 1, ""}, + {"LOG_WARNING", Const, 0, ""}, + {"New", Func, 0, "func(priority Priority, tag string) (*Writer, error)"}, + {"NewLogger", Func, 0, "func(p Priority, logFlag int) (*log.Logger, error)"}, + {"Priority", Type, 0, ""}, + {"Writer", Type, 0, ""}, }, "maps": { - {"All", Func, 23}, - {"Clone", Func, 21}, - {"Collect", Func, 23}, - {"Copy", Func, 21}, - {"DeleteFunc", Func, 21}, - {"Equal", Func, 21}, - {"EqualFunc", Func, 21}, - {"Insert", Func, 23}, - {"Keys", Func, 23}, - {"Values", Func, 23}, + {"All", Func, 23, "func[Map ~map[K]V, K comparable, V any](m Map) iter.Seq2[K, V]"}, + {"Clone", Func, 21, "func[M ~map[K]V, K comparable, V any](m M) M"}, + {"Collect", Func, 23, "func[K comparable, V any](seq iter.Seq2[K, V]) map[K]V"}, + {"Copy", Func, 21, "func[M1 ~map[K]V, M2 ~map[K]V, K comparable, V any](dst M1, src M2)"}, + {"DeleteFunc", Func, 21, "func[M ~map[K]V, K comparable, V any](m M, del func(K, V) bool)"}, + {"Equal", Func, 21, "func[M1, M2 ~map[K]V, K, V comparable](m1 M1, m2 M2) bool"}, + {"EqualFunc", Func, 21, "func[M1 ~map[K]V1, M2 ~map[K]V2, K comparable, V1, V2 any](m1 M1, m2 M2, eq func(V1, V2) bool) bool"}, + {"Insert", Func, 23, "func[Map ~map[K]V, K comparable, V any](m Map, seq iter.Seq2[K, V])"}, + {"Keys", Func, 23, "func[Map ~map[K]V, K comparable, V any](m Map) iter.Seq[K]"}, + {"Values", Func, 23, "func[Map ~map[K]V, K comparable, V any](m Map) iter.Seq[V]"}, }, "math": { - {"Abs", Func, 0}, - {"Acos", Func, 0}, - {"Acosh", Func, 0}, - {"Asin", Func, 0}, - {"Asinh", Func, 0}, - {"Atan", Func, 0}, - {"Atan2", Func, 0}, - {"Atanh", Func, 0}, - {"Cbrt", Func, 0}, - {"Ceil", Func, 0}, - {"Copysign", Func, 0}, - {"Cos", Func, 0}, - {"Cosh", Func, 0}, - {"Dim", Func, 0}, - {"E", Const, 0}, - {"Erf", Func, 0}, - {"Erfc", Func, 0}, - {"Erfcinv", Func, 10}, - {"Erfinv", Func, 10}, - {"Exp", Func, 0}, - {"Exp2", Func, 0}, - {"Expm1", Func, 0}, - {"FMA", Func, 14}, - {"Float32bits", Func, 0}, - {"Float32frombits", Func, 0}, - {"Float64bits", Func, 0}, - {"Float64frombits", Func, 0}, - {"Floor", Func, 0}, - {"Frexp", Func, 0}, - {"Gamma", Func, 0}, - {"Hypot", Func, 0}, - {"Ilogb", Func, 0}, - {"Inf", Func, 0}, - {"IsInf", Func, 0}, - {"IsNaN", Func, 0}, - {"J0", Func, 0}, - {"J1", Func, 0}, - {"Jn", Func, 0}, - {"Ldexp", Func, 0}, - {"Lgamma", Func, 0}, - {"Ln10", Const, 0}, - {"Ln2", Const, 0}, - {"Log", Func, 0}, - {"Log10", Func, 0}, - {"Log10E", Const, 0}, - {"Log1p", Func, 0}, - {"Log2", Func, 0}, - {"Log2E", Const, 0}, - {"Logb", Func, 0}, - {"Max", Func, 0}, - {"MaxFloat32", Const, 0}, - {"MaxFloat64", Const, 0}, - {"MaxInt", Const, 17}, - {"MaxInt16", Const, 0}, - {"MaxInt32", Const, 0}, - {"MaxInt64", Const, 0}, - {"MaxInt8", Const, 0}, - {"MaxUint", Const, 17}, - {"MaxUint16", Const, 0}, - {"MaxUint32", Const, 0}, - {"MaxUint64", Const, 0}, - {"MaxUint8", Const, 0}, - {"Min", Func, 0}, - {"MinInt", Const, 17}, - {"MinInt16", Const, 0}, - {"MinInt32", Const, 0}, - {"MinInt64", Const, 0}, - {"MinInt8", Const, 0}, - {"Mod", Func, 0}, - {"Modf", Func, 0}, - {"NaN", Func, 0}, - {"Nextafter", Func, 0}, - {"Nextafter32", Func, 4}, - {"Phi", Const, 0}, - {"Pi", Const, 0}, - {"Pow", Func, 0}, - {"Pow10", Func, 0}, - {"Remainder", Func, 0}, - {"Round", Func, 10}, - {"RoundToEven", Func, 10}, - {"Signbit", Func, 0}, - {"Sin", Func, 0}, - {"Sincos", Func, 0}, - {"Sinh", Func, 0}, - {"SmallestNonzeroFloat32", Const, 0}, - {"SmallestNonzeroFloat64", Const, 0}, - {"Sqrt", Func, 0}, - {"Sqrt2", Const, 0}, - {"SqrtE", Const, 0}, - {"SqrtPhi", Const, 0}, - {"SqrtPi", Const, 0}, - {"Tan", Func, 0}, - {"Tanh", Func, 0}, - {"Trunc", Func, 0}, - {"Y0", Func, 0}, - {"Y1", Func, 0}, - {"Yn", Func, 0}, + {"Abs", Func, 0, "func(x float64) float64"}, + {"Acos", Func, 0, "func(x float64) float64"}, + {"Acosh", Func, 0, "func(x float64) float64"}, + {"Asin", Func, 0, "func(x float64) float64"}, + {"Asinh", Func, 0, "func(x float64) float64"}, + {"Atan", Func, 0, "func(x float64) float64"}, + {"Atan2", Func, 0, "func(y float64, x float64) float64"}, + {"Atanh", Func, 0, "func(x float64) float64"}, + {"Cbrt", Func, 0, "func(x float64) float64"}, + {"Ceil", Func, 0, "func(x float64) float64"}, + {"Copysign", Func, 0, "func(f float64, sign float64) float64"}, + {"Cos", Func, 0, "func(x float64) float64"}, + {"Cosh", Func, 0, "func(x float64) float64"}, + {"Dim", Func, 0, "func(x float64, y float64) float64"}, + {"E", Const, 0, ""}, + {"Erf", Func, 0, "func(x float64) float64"}, + {"Erfc", Func, 0, "func(x float64) float64"}, + {"Erfcinv", Func, 10, "func(x float64) float64"}, + {"Erfinv", Func, 10, "func(x float64) float64"}, + {"Exp", Func, 0, "func(x float64) float64"}, + {"Exp2", Func, 0, "func(x float64) float64"}, + {"Expm1", Func, 0, "func(x float64) float64"}, + {"FMA", Func, 14, "func(x float64, y float64, z float64) float64"}, + {"Float32bits", Func, 0, "func(f float32) uint32"}, + {"Float32frombits", Func, 0, "func(b uint32) float32"}, + {"Float64bits", Func, 0, "func(f float64) uint64"}, + {"Float64frombits", Func, 0, "func(b uint64) float64"}, + {"Floor", Func, 0, "func(x float64) float64"}, + {"Frexp", Func, 0, "func(f float64) (frac float64, exp int)"}, + {"Gamma", Func, 0, "func(x float64) float64"}, + {"Hypot", Func, 0, "func(p float64, q float64) float64"}, + {"Ilogb", Func, 0, "func(x float64) int"}, + {"Inf", Func, 0, "func(sign int) float64"}, + {"IsInf", Func, 0, "func(f float64, sign int) bool"}, + {"IsNaN", Func, 0, "func(f float64) (is bool)"}, + {"J0", Func, 0, "func(x float64) float64"}, + {"J1", Func, 0, "func(x float64) float64"}, + {"Jn", Func, 0, "func(n int, x float64) float64"}, + {"Ldexp", Func, 0, "func(frac float64, exp int) float64"}, + {"Lgamma", Func, 0, "func(x float64) (lgamma float64, sign int)"}, + {"Ln10", Const, 0, ""}, + {"Ln2", Const, 0, ""}, + {"Log", Func, 0, "func(x float64) float64"}, + {"Log10", Func, 0, "func(x float64) float64"}, + {"Log10E", Const, 0, ""}, + {"Log1p", Func, 0, "func(x float64) float64"}, + {"Log2", Func, 0, "func(x float64) float64"}, + {"Log2E", Const, 0, ""}, + {"Logb", Func, 0, "func(x float64) float64"}, + {"Max", Func, 0, "func(x float64, y float64) float64"}, + {"MaxFloat32", Const, 0, ""}, + {"MaxFloat64", Const, 0, ""}, + {"MaxInt", Const, 17, ""}, + {"MaxInt16", Const, 0, ""}, + {"MaxInt32", Const, 0, ""}, + {"MaxInt64", Const, 0, ""}, + {"MaxInt8", Const, 0, ""}, + {"MaxUint", Const, 17, ""}, + {"MaxUint16", Const, 0, ""}, + {"MaxUint32", Const, 0, ""}, + {"MaxUint64", Const, 0, ""}, + {"MaxUint8", Const, 0, ""}, + {"Min", Func, 0, "func(x float64, y float64) float64"}, + {"MinInt", Const, 17, ""}, + {"MinInt16", Const, 0, ""}, + {"MinInt32", Const, 0, ""}, + {"MinInt64", Const, 0, ""}, + {"MinInt8", Const, 0, ""}, + {"Mod", Func, 0, "func(x float64, y float64) float64"}, + {"Modf", Func, 0, "func(f float64) (int float64, frac float64)"}, + {"NaN", Func, 0, "func() float64"}, + {"Nextafter", Func, 0, "func(x float64, y float64) (r float64)"}, + {"Nextafter32", Func, 4, "func(x float32, y float32) (r float32)"}, + {"Phi", Const, 0, ""}, + {"Pi", Const, 0, ""}, + {"Pow", Func, 0, "func(x float64, y float64) float64"}, + {"Pow10", Func, 0, "func(n int) float64"}, + {"Remainder", Func, 0, "func(x float64, y float64) float64"}, + {"Round", Func, 10, "func(x float64) float64"}, + {"RoundToEven", Func, 10, "func(x float64) float64"}, + {"Signbit", Func, 0, "func(x float64) bool"}, + {"Sin", Func, 0, "func(x float64) float64"}, + {"Sincos", Func, 0, "func(x float64) (sin float64, cos float64)"}, + {"Sinh", Func, 0, "func(x float64) float64"}, + {"SmallestNonzeroFloat32", Const, 0, ""}, + {"SmallestNonzeroFloat64", Const, 0, ""}, + {"Sqrt", Func, 0, "func(x float64) float64"}, + {"Sqrt2", Const, 0, ""}, + {"SqrtE", Const, 0, ""}, + {"SqrtPhi", Const, 0, ""}, + {"SqrtPi", Const, 0, ""}, + {"Tan", Func, 0, "func(x float64) float64"}, + {"Tanh", Func, 0, "func(x float64) float64"}, + {"Trunc", Func, 0, "func(x float64) float64"}, + {"Y0", Func, 0, "func(x float64) float64"}, + {"Y1", Func, 0, "func(x float64) float64"}, + {"Yn", Func, 0, "func(n int, x float64) float64"}, }, "math/big": { - {"(*Float).Abs", Method, 5}, - {"(*Float).Acc", Method, 5}, - {"(*Float).Add", Method, 5}, - {"(*Float).Append", Method, 5}, - {"(*Float).AppendText", Method, 24}, - {"(*Float).Cmp", Method, 5}, - {"(*Float).Copy", Method, 5}, - {"(*Float).Float32", Method, 5}, - {"(*Float).Float64", Method, 5}, - {"(*Float).Format", Method, 5}, - {"(*Float).GobDecode", Method, 7}, - {"(*Float).GobEncode", Method, 7}, - {"(*Float).Int", Method, 5}, - {"(*Float).Int64", Method, 5}, - {"(*Float).IsInf", Method, 5}, - {"(*Float).IsInt", Method, 5}, - {"(*Float).MantExp", Method, 5}, - {"(*Float).MarshalText", Method, 6}, - {"(*Float).MinPrec", Method, 5}, - {"(*Float).Mode", Method, 5}, - {"(*Float).Mul", Method, 5}, - {"(*Float).Neg", Method, 5}, - {"(*Float).Parse", Method, 5}, - {"(*Float).Prec", Method, 5}, - {"(*Float).Quo", Method, 5}, - {"(*Float).Rat", Method, 5}, - {"(*Float).Scan", Method, 8}, - {"(*Float).Set", Method, 5}, - {"(*Float).SetFloat64", Method, 5}, - {"(*Float).SetInf", Method, 5}, - {"(*Float).SetInt", Method, 5}, - {"(*Float).SetInt64", Method, 5}, - {"(*Float).SetMantExp", Method, 5}, - {"(*Float).SetMode", Method, 5}, - {"(*Float).SetPrec", Method, 5}, - {"(*Float).SetRat", Method, 5}, - {"(*Float).SetString", Method, 5}, - {"(*Float).SetUint64", Method, 5}, - {"(*Float).Sign", Method, 5}, - {"(*Float).Signbit", Method, 5}, - {"(*Float).Sqrt", Method, 10}, - {"(*Float).String", Method, 5}, - {"(*Float).Sub", Method, 5}, - {"(*Float).Text", Method, 5}, - {"(*Float).Uint64", Method, 5}, - {"(*Float).UnmarshalText", Method, 6}, - {"(*Int).Abs", Method, 0}, - {"(*Int).Add", Method, 0}, - {"(*Int).And", Method, 0}, - {"(*Int).AndNot", Method, 0}, - {"(*Int).Append", Method, 6}, - {"(*Int).AppendText", Method, 24}, - {"(*Int).Binomial", Method, 0}, - {"(*Int).Bit", Method, 0}, - {"(*Int).BitLen", Method, 0}, - {"(*Int).Bits", Method, 0}, - {"(*Int).Bytes", Method, 0}, - {"(*Int).Cmp", Method, 0}, - {"(*Int).CmpAbs", Method, 10}, - {"(*Int).Div", Method, 0}, - {"(*Int).DivMod", Method, 0}, - {"(*Int).Exp", Method, 0}, - {"(*Int).FillBytes", Method, 15}, - {"(*Int).Float64", Method, 21}, - {"(*Int).Format", Method, 0}, - {"(*Int).GCD", Method, 0}, - {"(*Int).GobDecode", Method, 0}, - {"(*Int).GobEncode", Method, 0}, - {"(*Int).Int64", Method, 0}, - {"(*Int).IsInt64", Method, 9}, - {"(*Int).IsUint64", Method, 9}, - {"(*Int).Lsh", Method, 0}, - {"(*Int).MarshalJSON", Method, 1}, - {"(*Int).MarshalText", Method, 3}, - {"(*Int).Mod", Method, 0}, - {"(*Int).ModInverse", Method, 0}, - {"(*Int).ModSqrt", Method, 5}, - {"(*Int).Mul", Method, 0}, - {"(*Int).MulRange", Method, 0}, - {"(*Int).Neg", Method, 0}, - {"(*Int).Not", Method, 0}, - {"(*Int).Or", Method, 0}, - {"(*Int).ProbablyPrime", Method, 0}, - {"(*Int).Quo", Method, 0}, - {"(*Int).QuoRem", Method, 0}, - {"(*Int).Rand", Method, 0}, - {"(*Int).Rem", Method, 0}, - {"(*Int).Rsh", Method, 0}, - {"(*Int).Scan", Method, 0}, - {"(*Int).Set", Method, 0}, - {"(*Int).SetBit", Method, 0}, - {"(*Int).SetBits", Method, 0}, - {"(*Int).SetBytes", Method, 0}, - {"(*Int).SetInt64", Method, 0}, - {"(*Int).SetString", Method, 0}, - {"(*Int).SetUint64", Method, 1}, - {"(*Int).Sign", Method, 0}, - {"(*Int).Sqrt", Method, 8}, - {"(*Int).String", Method, 0}, - {"(*Int).Sub", Method, 0}, - {"(*Int).Text", Method, 6}, - {"(*Int).TrailingZeroBits", Method, 13}, - {"(*Int).Uint64", Method, 1}, - {"(*Int).UnmarshalJSON", Method, 1}, - {"(*Int).UnmarshalText", Method, 3}, - {"(*Int).Xor", Method, 0}, - {"(*Rat).Abs", Method, 0}, - {"(*Rat).Add", Method, 0}, - {"(*Rat).AppendText", Method, 24}, - {"(*Rat).Cmp", Method, 0}, - {"(*Rat).Denom", Method, 0}, - {"(*Rat).Float32", Method, 4}, - {"(*Rat).Float64", Method, 1}, - {"(*Rat).FloatPrec", Method, 22}, - {"(*Rat).FloatString", Method, 0}, - {"(*Rat).GobDecode", Method, 0}, - {"(*Rat).GobEncode", Method, 0}, - {"(*Rat).Inv", Method, 0}, - {"(*Rat).IsInt", Method, 0}, - {"(*Rat).MarshalText", Method, 3}, - {"(*Rat).Mul", Method, 0}, - {"(*Rat).Neg", Method, 0}, - {"(*Rat).Num", Method, 0}, - {"(*Rat).Quo", Method, 0}, - {"(*Rat).RatString", Method, 0}, - {"(*Rat).Scan", Method, 0}, - {"(*Rat).Set", Method, 0}, - {"(*Rat).SetFloat64", Method, 1}, - {"(*Rat).SetFrac", Method, 0}, - {"(*Rat).SetFrac64", Method, 0}, - {"(*Rat).SetInt", Method, 0}, - {"(*Rat).SetInt64", Method, 0}, - {"(*Rat).SetString", Method, 0}, - {"(*Rat).SetUint64", Method, 13}, - {"(*Rat).Sign", Method, 0}, - {"(*Rat).String", Method, 0}, - {"(*Rat).Sub", Method, 0}, - {"(*Rat).UnmarshalText", Method, 3}, - {"(Accuracy).String", Method, 5}, - {"(ErrNaN).Error", Method, 5}, - {"(RoundingMode).String", Method, 5}, - {"Above", Const, 5}, - {"Accuracy", Type, 5}, - {"AwayFromZero", Const, 5}, - {"Below", Const, 5}, - {"ErrNaN", Type, 5}, - {"Exact", Const, 5}, - {"Float", Type, 5}, - {"Int", Type, 0}, - {"Jacobi", Func, 5}, - {"MaxBase", Const, 0}, - {"MaxExp", Const, 5}, - {"MaxPrec", Const, 5}, - {"MinExp", Const, 5}, - {"NewFloat", Func, 5}, - {"NewInt", Func, 0}, - {"NewRat", Func, 0}, - {"ParseFloat", Func, 5}, - {"Rat", Type, 0}, - {"RoundingMode", Type, 5}, - {"ToNearestAway", Const, 5}, - {"ToNearestEven", Const, 5}, - {"ToNegativeInf", Const, 5}, - {"ToPositiveInf", Const, 5}, - {"ToZero", Const, 5}, - {"Word", Type, 0}, + {"(*Float).Abs", Method, 5, ""}, + {"(*Float).Acc", Method, 5, ""}, + {"(*Float).Add", Method, 5, ""}, + {"(*Float).Append", Method, 5, ""}, + {"(*Float).AppendText", Method, 24, ""}, + {"(*Float).Cmp", Method, 5, ""}, + {"(*Float).Copy", Method, 5, ""}, + {"(*Float).Float32", Method, 5, ""}, + {"(*Float).Float64", Method, 5, ""}, + {"(*Float).Format", Method, 5, ""}, + {"(*Float).GobDecode", Method, 7, ""}, + {"(*Float).GobEncode", Method, 7, ""}, + {"(*Float).Int", Method, 5, ""}, + {"(*Float).Int64", Method, 5, ""}, + {"(*Float).IsInf", Method, 5, ""}, + {"(*Float).IsInt", Method, 5, ""}, + {"(*Float).MantExp", Method, 5, ""}, + {"(*Float).MarshalText", Method, 6, ""}, + {"(*Float).MinPrec", Method, 5, ""}, + {"(*Float).Mode", Method, 5, ""}, + {"(*Float).Mul", Method, 5, ""}, + {"(*Float).Neg", Method, 5, ""}, + {"(*Float).Parse", Method, 5, ""}, + {"(*Float).Prec", Method, 5, ""}, + {"(*Float).Quo", Method, 5, ""}, + {"(*Float).Rat", Method, 5, ""}, + {"(*Float).Scan", Method, 8, ""}, + {"(*Float).Set", Method, 5, ""}, + {"(*Float).SetFloat64", Method, 5, ""}, + {"(*Float).SetInf", Method, 5, ""}, + {"(*Float).SetInt", Method, 5, ""}, + {"(*Float).SetInt64", Method, 5, ""}, + {"(*Float).SetMantExp", Method, 5, ""}, + {"(*Float).SetMode", Method, 5, ""}, + {"(*Float).SetPrec", Method, 5, ""}, + {"(*Float).SetRat", Method, 5, ""}, + {"(*Float).SetString", Method, 5, ""}, + {"(*Float).SetUint64", Method, 5, ""}, + {"(*Float).Sign", Method, 5, ""}, + {"(*Float).Signbit", Method, 5, ""}, + {"(*Float).Sqrt", Method, 10, ""}, + {"(*Float).String", Method, 5, ""}, + {"(*Float).Sub", Method, 5, ""}, + {"(*Float).Text", Method, 5, ""}, + {"(*Float).Uint64", Method, 5, ""}, + {"(*Float).UnmarshalText", Method, 6, ""}, + {"(*Int).Abs", Method, 0, ""}, + {"(*Int).Add", Method, 0, ""}, + {"(*Int).And", Method, 0, ""}, + {"(*Int).AndNot", Method, 0, ""}, + {"(*Int).Append", Method, 6, ""}, + {"(*Int).AppendText", Method, 24, ""}, + {"(*Int).Binomial", Method, 0, ""}, + {"(*Int).Bit", Method, 0, ""}, + {"(*Int).BitLen", Method, 0, ""}, + {"(*Int).Bits", Method, 0, ""}, + {"(*Int).Bytes", Method, 0, ""}, + {"(*Int).Cmp", Method, 0, ""}, + {"(*Int).CmpAbs", Method, 10, ""}, + {"(*Int).Div", Method, 0, ""}, + {"(*Int).DivMod", Method, 0, ""}, + {"(*Int).Exp", Method, 0, ""}, + {"(*Int).FillBytes", Method, 15, ""}, + {"(*Int).Float64", Method, 21, ""}, + {"(*Int).Format", Method, 0, ""}, + {"(*Int).GCD", Method, 0, ""}, + {"(*Int).GobDecode", Method, 0, ""}, + {"(*Int).GobEncode", Method, 0, ""}, + {"(*Int).Int64", Method, 0, ""}, + {"(*Int).IsInt64", Method, 9, ""}, + {"(*Int).IsUint64", Method, 9, ""}, + {"(*Int).Lsh", Method, 0, ""}, + {"(*Int).MarshalJSON", Method, 1, ""}, + {"(*Int).MarshalText", Method, 3, ""}, + {"(*Int).Mod", Method, 0, ""}, + {"(*Int).ModInverse", Method, 0, ""}, + {"(*Int).ModSqrt", Method, 5, ""}, + {"(*Int).Mul", Method, 0, ""}, + {"(*Int).MulRange", Method, 0, ""}, + {"(*Int).Neg", Method, 0, ""}, + {"(*Int).Not", Method, 0, ""}, + {"(*Int).Or", Method, 0, ""}, + {"(*Int).ProbablyPrime", Method, 0, ""}, + {"(*Int).Quo", Method, 0, ""}, + {"(*Int).QuoRem", Method, 0, ""}, + {"(*Int).Rand", Method, 0, ""}, + {"(*Int).Rem", Method, 0, ""}, + {"(*Int).Rsh", Method, 0, ""}, + {"(*Int).Scan", Method, 0, ""}, + {"(*Int).Set", Method, 0, ""}, + {"(*Int).SetBit", Method, 0, ""}, + {"(*Int).SetBits", Method, 0, ""}, + {"(*Int).SetBytes", Method, 0, ""}, + {"(*Int).SetInt64", Method, 0, ""}, + {"(*Int).SetString", Method, 0, ""}, + {"(*Int).SetUint64", Method, 1, ""}, + {"(*Int).Sign", Method, 0, ""}, + {"(*Int).Sqrt", Method, 8, ""}, + {"(*Int).String", Method, 0, ""}, + {"(*Int).Sub", Method, 0, ""}, + {"(*Int).Text", Method, 6, ""}, + {"(*Int).TrailingZeroBits", Method, 13, ""}, + {"(*Int).Uint64", Method, 1, ""}, + {"(*Int).UnmarshalJSON", Method, 1, ""}, + {"(*Int).UnmarshalText", Method, 3, ""}, + {"(*Int).Xor", Method, 0, ""}, + {"(*Rat).Abs", Method, 0, ""}, + {"(*Rat).Add", Method, 0, ""}, + {"(*Rat).AppendText", Method, 24, ""}, + {"(*Rat).Cmp", Method, 0, ""}, + {"(*Rat).Denom", Method, 0, ""}, + {"(*Rat).Float32", Method, 4, ""}, + {"(*Rat).Float64", Method, 1, ""}, + {"(*Rat).FloatPrec", Method, 22, ""}, + {"(*Rat).FloatString", Method, 0, ""}, + {"(*Rat).GobDecode", Method, 0, ""}, + {"(*Rat).GobEncode", Method, 0, ""}, + {"(*Rat).Inv", Method, 0, ""}, + {"(*Rat).IsInt", Method, 0, ""}, + {"(*Rat).MarshalText", Method, 3, ""}, + {"(*Rat).Mul", Method, 0, ""}, + {"(*Rat).Neg", Method, 0, ""}, + {"(*Rat).Num", Method, 0, ""}, + {"(*Rat).Quo", Method, 0, ""}, + {"(*Rat).RatString", Method, 0, ""}, + {"(*Rat).Scan", Method, 0, ""}, + {"(*Rat).Set", Method, 0, ""}, + {"(*Rat).SetFloat64", Method, 1, ""}, + {"(*Rat).SetFrac", Method, 0, ""}, + {"(*Rat).SetFrac64", Method, 0, ""}, + {"(*Rat).SetInt", Method, 0, ""}, + {"(*Rat).SetInt64", Method, 0, ""}, + {"(*Rat).SetString", Method, 0, ""}, + {"(*Rat).SetUint64", Method, 13, ""}, + {"(*Rat).Sign", Method, 0, ""}, + {"(*Rat).String", Method, 0, ""}, + {"(*Rat).Sub", Method, 0, ""}, + {"(*Rat).UnmarshalText", Method, 3, ""}, + {"(Accuracy).String", Method, 5, ""}, + {"(ErrNaN).Error", Method, 5, ""}, + {"(RoundingMode).String", Method, 5, ""}, + {"Above", Const, 5, ""}, + {"Accuracy", Type, 5, ""}, + {"AwayFromZero", Const, 5, ""}, + {"Below", Const, 5, ""}, + {"ErrNaN", Type, 5, ""}, + {"Exact", Const, 5, ""}, + {"Float", Type, 5, ""}, + {"Int", Type, 0, ""}, + {"Jacobi", Func, 5, "func(x *Int, y *Int) int"}, + {"MaxBase", Const, 0, ""}, + {"MaxExp", Const, 5, ""}, + {"MaxPrec", Const, 5, ""}, + {"MinExp", Const, 5, ""}, + {"NewFloat", Func, 5, "func(x float64) *Float"}, + {"NewInt", Func, 0, "func(x int64) *Int"}, + {"NewRat", Func, 0, "func(a int64, b int64) *Rat"}, + {"ParseFloat", Func, 5, "func(s string, base int, prec uint, mode RoundingMode) (f *Float, b int, err error)"}, + {"Rat", Type, 0, ""}, + {"RoundingMode", Type, 5, ""}, + {"ToNearestAway", Const, 5, ""}, + {"ToNearestEven", Const, 5, ""}, + {"ToNegativeInf", Const, 5, ""}, + {"ToPositiveInf", Const, 5, ""}, + {"ToZero", Const, 5, ""}, + {"Word", Type, 0, ""}, }, "math/bits": { - {"Add", Func, 12}, - {"Add32", Func, 12}, - {"Add64", Func, 12}, - {"Div", Func, 12}, - {"Div32", Func, 12}, - {"Div64", Func, 12}, - {"LeadingZeros", Func, 9}, - {"LeadingZeros16", Func, 9}, - {"LeadingZeros32", Func, 9}, - {"LeadingZeros64", Func, 9}, - {"LeadingZeros8", Func, 9}, - {"Len", Func, 9}, - {"Len16", Func, 9}, - {"Len32", Func, 9}, - {"Len64", Func, 9}, - {"Len8", Func, 9}, - {"Mul", Func, 12}, - {"Mul32", Func, 12}, - {"Mul64", Func, 12}, - {"OnesCount", Func, 9}, - {"OnesCount16", Func, 9}, - {"OnesCount32", Func, 9}, - {"OnesCount64", Func, 9}, - {"OnesCount8", Func, 9}, - {"Rem", Func, 14}, - {"Rem32", Func, 14}, - {"Rem64", Func, 14}, - {"Reverse", Func, 9}, - {"Reverse16", Func, 9}, - {"Reverse32", Func, 9}, - {"Reverse64", Func, 9}, - {"Reverse8", Func, 9}, - {"ReverseBytes", Func, 9}, - {"ReverseBytes16", Func, 9}, - {"ReverseBytes32", Func, 9}, - {"ReverseBytes64", Func, 9}, - {"RotateLeft", Func, 9}, - {"RotateLeft16", Func, 9}, - {"RotateLeft32", Func, 9}, - {"RotateLeft64", Func, 9}, - {"RotateLeft8", Func, 9}, - {"Sub", Func, 12}, - {"Sub32", Func, 12}, - {"Sub64", Func, 12}, - {"TrailingZeros", Func, 9}, - {"TrailingZeros16", Func, 9}, - {"TrailingZeros32", Func, 9}, - {"TrailingZeros64", Func, 9}, - {"TrailingZeros8", Func, 9}, - {"UintSize", Const, 9}, + {"Add", Func, 12, "func(x uint, y uint, carry uint) (sum uint, carryOut uint)"}, + {"Add32", Func, 12, "func(x uint32, y uint32, carry uint32) (sum uint32, carryOut uint32)"}, + {"Add64", Func, 12, "func(x uint64, y uint64, carry uint64) (sum uint64, carryOut uint64)"}, + {"Div", Func, 12, "func(hi uint, lo uint, y uint) (quo uint, rem uint)"}, + {"Div32", Func, 12, "func(hi uint32, lo uint32, y uint32) (quo uint32, rem uint32)"}, + {"Div64", Func, 12, "func(hi uint64, lo uint64, y uint64) (quo uint64, rem uint64)"}, + {"LeadingZeros", Func, 9, "func(x uint) int"}, + {"LeadingZeros16", Func, 9, "func(x uint16) int"}, + {"LeadingZeros32", Func, 9, "func(x uint32) int"}, + {"LeadingZeros64", Func, 9, "func(x uint64) int"}, + {"LeadingZeros8", Func, 9, "func(x uint8) int"}, + {"Len", Func, 9, "func(x uint) int"}, + {"Len16", Func, 9, "func(x uint16) (n int)"}, + {"Len32", Func, 9, "func(x uint32) (n int)"}, + {"Len64", Func, 9, "func(x uint64) (n int)"}, + {"Len8", Func, 9, "func(x uint8) int"}, + {"Mul", Func, 12, "func(x uint, y uint) (hi uint, lo uint)"}, + {"Mul32", Func, 12, "func(x uint32, y uint32) (hi uint32, lo uint32)"}, + {"Mul64", Func, 12, "func(x uint64, y uint64) (hi uint64, lo uint64)"}, + {"OnesCount", Func, 9, "func(x uint) int"}, + {"OnesCount16", Func, 9, "func(x uint16) int"}, + {"OnesCount32", Func, 9, "func(x uint32) int"}, + {"OnesCount64", Func, 9, "func(x uint64) int"}, + {"OnesCount8", Func, 9, "func(x uint8) int"}, + {"Rem", Func, 14, "func(hi uint, lo uint, y uint) uint"}, + {"Rem32", Func, 14, "func(hi uint32, lo uint32, y uint32) uint32"}, + {"Rem64", Func, 14, "func(hi uint64, lo uint64, y uint64) uint64"}, + {"Reverse", Func, 9, "func(x uint) uint"}, + {"Reverse16", Func, 9, "func(x uint16) uint16"}, + {"Reverse32", Func, 9, "func(x uint32) uint32"}, + {"Reverse64", Func, 9, "func(x uint64) uint64"}, + {"Reverse8", Func, 9, "func(x uint8) uint8"}, + {"ReverseBytes", Func, 9, "func(x uint) uint"}, + {"ReverseBytes16", Func, 9, "func(x uint16) uint16"}, + {"ReverseBytes32", Func, 9, "func(x uint32) uint32"}, + {"ReverseBytes64", Func, 9, "func(x uint64) uint64"}, + {"RotateLeft", Func, 9, "func(x uint, k int) uint"}, + {"RotateLeft16", Func, 9, "func(x uint16, k int) uint16"}, + {"RotateLeft32", Func, 9, "func(x uint32, k int) uint32"}, + {"RotateLeft64", Func, 9, "func(x uint64, k int) uint64"}, + {"RotateLeft8", Func, 9, "func(x uint8, k int) uint8"}, + {"Sub", Func, 12, "func(x uint, y uint, borrow uint) (diff uint, borrowOut uint)"}, + {"Sub32", Func, 12, "func(x uint32, y uint32, borrow uint32) (diff uint32, borrowOut uint32)"}, + {"Sub64", Func, 12, "func(x uint64, y uint64, borrow uint64) (diff uint64, borrowOut uint64)"}, + {"TrailingZeros", Func, 9, "func(x uint) int"}, + {"TrailingZeros16", Func, 9, "func(x uint16) int"}, + {"TrailingZeros32", Func, 9, "func(x uint32) int"}, + {"TrailingZeros64", Func, 9, "func(x uint64) int"}, + {"TrailingZeros8", Func, 9, "func(x uint8) int"}, + {"UintSize", Const, 9, ""}, }, "math/cmplx": { - {"Abs", Func, 0}, - {"Acos", Func, 0}, - {"Acosh", Func, 0}, - {"Asin", Func, 0}, - {"Asinh", Func, 0}, - {"Atan", Func, 0}, - {"Atanh", Func, 0}, - {"Conj", Func, 0}, - {"Cos", Func, 0}, - {"Cosh", Func, 0}, - {"Cot", Func, 0}, - {"Exp", Func, 0}, - {"Inf", Func, 0}, - {"IsInf", Func, 0}, - {"IsNaN", Func, 0}, - {"Log", Func, 0}, - {"Log10", Func, 0}, - {"NaN", Func, 0}, - {"Phase", Func, 0}, - {"Polar", Func, 0}, - {"Pow", Func, 0}, - {"Rect", Func, 0}, - {"Sin", Func, 0}, - {"Sinh", Func, 0}, - {"Sqrt", Func, 0}, - {"Tan", Func, 0}, - {"Tanh", Func, 0}, + {"Abs", Func, 0, "func(x complex128) float64"}, + {"Acos", Func, 0, "func(x complex128) complex128"}, + {"Acosh", Func, 0, "func(x complex128) complex128"}, + {"Asin", Func, 0, "func(x complex128) complex128"}, + {"Asinh", Func, 0, "func(x complex128) complex128"}, + {"Atan", Func, 0, "func(x complex128) complex128"}, + {"Atanh", Func, 0, "func(x complex128) complex128"}, + {"Conj", Func, 0, "func(x complex128) complex128"}, + {"Cos", Func, 0, "func(x complex128) complex128"}, + {"Cosh", Func, 0, "func(x complex128) complex128"}, + {"Cot", Func, 0, "func(x complex128) complex128"}, + {"Exp", Func, 0, "func(x complex128) complex128"}, + {"Inf", Func, 0, "func() complex128"}, + {"IsInf", Func, 0, "func(x complex128) bool"}, + {"IsNaN", Func, 0, "func(x complex128) bool"}, + {"Log", Func, 0, "func(x complex128) complex128"}, + {"Log10", Func, 0, "func(x complex128) complex128"}, + {"NaN", Func, 0, "func() complex128"}, + {"Phase", Func, 0, "func(x complex128) float64"}, + {"Polar", Func, 0, "func(x complex128) (r float64, θ float64)"}, + {"Pow", Func, 0, "func(x complex128, y complex128) complex128"}, + {"Rect", Func, 0, "func(r float64, θ float64) complex128"}, + {"Sin", Func, 0, "func(x complex128) complex128"}, + {"Sinh", Func, 0, "func(x complex128) complex128"}, + {"Sqrt", Func, 0, "func(x complex128) complex128"}, + {"Tan", Func, 0, "func(x complex128) complex128"}, + {"Tanh", Func, 0, "func(x complex128) complex128"}, }, "math/rand": { - {"(*Rand).ExpFloat64", Method, 0}, - {"(*Rand).Float32", Method, 0}, - {"(*Rand).Float64", Method, 0}, - {"(*Rand).Int", Method, 0}, - {"(*Rand).Int31", Method, 0}, - {"(*Rand).Int31n", Method, 0}, - {"(*Rand).Int63", Method, 0}, - {"(*Rand).Int63n", Method, 0}, - {"(*Rand).Intn", Method, 0}, - {"(*Rand).NormFloat64", Method, 0}, - {"(*Rand).Perm", Method, 0}, - {"(*Rand).Read", Method, 6}, - {"(*Rand).Seed", Method, 0}, - {"(*Rand).Shuffle", Method, 10}, - {"(*Rand).Uint32", Method, 0}, - {"(*Rand).Uint64", Method, 8}, - {"(*Zipf).Uint64", Method, 0}, - {"ExpFloat64", Func, 0}, - {"Float32", Func, 0}, - {"Float64", Func, 0}, - {"Int", Func, 0}, - {"Int31", Func, 0}, - {"Int31n", Func, 0}, - {"Int63", Func, 0}, - {"Int63n", Func, 0}, - {"Intn", Func, 0}, - {"New", Func, 0}, - {"NewSource", Func, 0}, - {"NewZipf", Func, 0}, - {"NormFloat64", Func, 0}, - {"Perm", Func, 0}, - {"Rand", Type, 0}, - {"Read", Func, 6}, - {"Seed", Func, 0}, - {"Shuffle", Func, 10}, - {"Source", Type, 0}, - {"Source64", Type, 8}, - {"Uint32", Func, 0}, - {"Uint64", Func, 8}, - {"Zipf", Type, 0}, + {"(*Rand).ExpFloat64", Method, 0, ""}, + {"(*Rand).Float32", Method, 0, ""}, + {"(*Rand).Float64", Method, 0, ""}, + {"(*Rand).Int", Method, 0, ""}, + {"(*Rand).Int31", Method, 0, ""}, + {"(*Rand).Int31n", Method, 0, ""}, + {"(*Rand).Int63", Method, 0, ""}, + {"(*Rand).Int63n", Method, 0, ""}, + {"(*Rand).Intn", Method, 0, ""}, + {"(*Rand).NormFloat64", Method, 0, ""}, + {"(*Rand).Perm", Method, 0, ""}, + {"(*Rand).Read", Method, 6, ""}, + {"(*Rand).Seed", Method, 0, ""}, + {"(*Rand).Shuffle", Method, 10, ""}, + {"(*Rand).Uint32", Method, 0, ""}, + {"(*Rand).Uint64", Method, 8, ""}, + {"(*Zipf).Uint64", Method, 0, ""}, + {"ExpFloat64", Func, 0, "func() float64"}, + {"Float32", Func, 0, "func() float32"}, + {"Float64", Func, 0, "func() float64"}, + {"Int", Func, 0, "func() int"}, + {"Int31", Func, 0, "func() int32"}, + {"Int31n", Func, 0, "func(n int32) int32"}, + {"Int63", Func, 0, "func() int64"}, + {"Int63n", Func, 0, "func(n int64) int64"}, + {"Intn", Func, 0, "func(n int) int"}, + {"New", Func, 0, "func(src Source) *Rand"}, + {"NewSource", Func, 0, "func(seed int64) Source"}, + {"NewZipf", Func, 0, "func(r *Rand, s float64, v float64, imax uint64) *Zipf"}, + {"NormFloat64", Func, 0, "func() float64"}, + {"Perm", Func, 0, "func(n int) []int"}, + {"Rand", Type, 0, ""}, + {"Read", Func, 6, "func(p []byte) (n int, err error)"}, + {"Seed", Func, 0, "func(seed int64)"}, + {"Shuffle", Func, 10, "func(n int, swap func(i int, j int))"}, + {"Source", Type, 0, ""}, + {"Source64", Type, 8, ""}, + {"Uint32", Func, 0, "func() uint32"}, + {"Uint64", Func, 8, "func() uint64"}, + {"Zipf", Type, 0, ""}, }, "math/rand/v2": { - {"(*ChaCha8).AppendBinary", Method, 24}, - {"(*ChaCha8).MarshalBinary", Method, 22}, - {"(*ChaCha8).Read", Method, 23}, - {"(*ChaCha8).Seed", Method, 22}, - {"(*ChaCha8).Uint64", Method, 22}, - {"(*ChaCha8).UnmarshalBinary", Method, 22}, - {"(*PCG).AppendBinary", Method, 24}, - {"(*PCG).MarshalBinary", Method, 22}, - {"(*PCG).Seed", Method, 22}, - {"(*PCG).Uint64", Method, 22}, - {"(*PCG).UnmarshalBinary", Method, 22}, - {"(*Rand).ExpFloat64", Method, 22}, - {"(*Rand).Float32", Method, 22}, - {"(*Rand).Float64", Method, 22}, - {"(*Rand).Int", Method, 22}, - {"(*Rand).Int32", Method, 22}, - {"(*Rand).Int32N", Method, 22}, - {"(*Rand).Int64", Method, 22}, - {"(*Rand).Int64N", Method, 22}, - {"(*Rand).IntN", Method, 22}, - {"(*Rand).NormFloat64", Method, 22}, - {"(*Rand).Perm", Method, 22}, - {"(*Rand).Shuffle", Method, 22}, - {"(*Rand).Uint", Method, 23}, - {"(*Rand).Uint32", Method, 22}, - {"(*Rand).Uint32N", Method, 22}, - {"(*Rand).Uint64", Method, 22}, - {"(*Rand).Uint64N", Method, 22}, - {"(*Rand).UintN", Method, 22}, - {"(*Zipf).Uint64", Method, 22}, - {"ChaCha8", Type, 22}, - {"ExpFloat64", Func, 22}, - {"Float32", Func, 22}, - {"Float64", Func, 22}, - {"Int", Func, 22}, - {"Int32", Func, 22}, - {"Int32N", Func, 22}, - {"Int64", Func, 22}, - {"Int64N", Func, 22}, - {"IntN", Func, 22}, - {"N", Func, 22}, - {"New", Func, 22}, - {"NewChaCha8", Func, 22}, - {"NewPCG", Func, 22}, - {"NewZipf", Func, 22}, - {"NormFloat64", Func, 22}, - {"PCG", Type, 22}, - {"Perm", Func, 22}, - {"Rand", Type, 22}, - {"Shuffle", Func, 22}, - {"Source", Type, 22}, - {"Uint", Func, 23}, - {"Uint32", Func, 22}, - {"Uint32N", Func, 22}, - {"Uint64", Func, 22}, - {"Uint64N", Func, 22}, - {"UintN", Func, 22}, - {"Zipf", Type, 22}, + {"(*ChaCha8).AppendBinary", Method, 24, ""}, + {"(*ChaCha8).MarshalBinary", Method, 22, ""}, + {"(*ChaCha8).Read", Method, 23, ""}, + {"(*ChaCha8).Seed", Method, 22, ""}, + {"(*ChaCha8).Uint64", Method, 22, ""}, + {"(*ChaCha8).UnmarshalBinary", Method, 22, ""}, + {"(*PCG).AppendBinary", Method, 24, ""}, + {"(*PCG).MarshalBinary", Method, 22, ""}, + {"(*PCG).Seed", Method, 22, ""}, + {"(*PCG).Uint64", Method, 22, ""}, + {"(*PCG).UnmarshalBinary", Method, 22, ""}, + {"(*Rand).ExpFloat64", Method, 22, ""}, + {"(*Rand).Float32", Method, 22, ""}, + {"(*Rand).Float64", Method, 22, ""}, + {"(*Rand).Int", Method, 22, ""}, + {"(*Rand).Int32", Method, 22, ""}, + {"(*Rand).Int32N", Method, 22, ""}, + {"(*Rand).Int64", Method, 22, ""}, + {"(*Rand).Int64N", Method, 22, ""}, + {"(*Rand).IntN", Method, 22, ""}, + {"(*Rand).NormFloat64", Method, 22, ""}, + {"(*Rand).Perm", Method, 22, ""}, + {"(*Rand).Shuffle", Method, 22, ""}, + {"(*Rand).Uint", Method, 23, ""}, + {"(*Rand).Uint32", Method, 22, ""}, + {"(*Rand).Uint32N", Method, 22, ""}, + {"(*Rand).Uint64", Method, 22, ""}, + {"(*Rand).Uint64N", Method, 22, ""}, + {"(*Rand).UintN", Method, 22, ""}, + {"(*Zipf).Uint64", Method, 22, ""}, + {"ChaCha8", Type, 22, ""}, + {"ExpFloat64", Func, 22, "func() float64"}, + {"Float32", Func, 22, "func() float32"}, + {"Float64", Func, 22, "func() float64"}, + {"Int", Func, 22, "func() int"}, + {"Int32", Func, 22, "func() int32"}, + {"Int32N", Func, 22, "func(n int32) int32"}, + {"Int64", Func, 22, "func() int64"}, + {"Int64N", Func, 22, "func(n int64) int64"}, + {"IntN", Func, 22, "func(n int) int"}, + {"N", Func, 22, "func[Int intType](n Int) Int"}, + {"New", Func, 22, "func(src Source) *Rand"}, + {"NewChaCha8", Func, 22, "func(seed [32]byte) *ChaCha8"}, + {"NewPCG", Func, 22, "func(seed1 uint64, seed2 uint64) *PCG"}, + {"NewZipf", Func, 22, "func(r *Rand, s float64, v float64, imax uint64) *Zipf"}, + {"NormFloat64", Func, 22, "func() float64"}, + {"PCG", Type, 22, ""}, + {"Perm", Func, 22, "func(n int) []int"}, + {"Rand", Type, 22, ""}, + {"Shuffle", Func, 22, "func(n int, swap func(i int, j int))"}, + {"Source", Type, 22, ""}, + {"Uint", Func, 23, "func() uint"}, + {"Uint32", Func, 22, "func() uint32"}, + {"Uint32N", Func, 22, "func(n uint32) uint32"}, + {"Uint64", Func, 22, "func() uint64"}, + {"Uint64N", Func, 22, "func(n uint64) uint64"}, + {"UintN", Func, 22, "func(n uint) uint"}, + {"Zipf", Type, 22, ""}, }, "mime": { - {"(*WordDecoder).Decode", Method, 5}, - {"(*WordDecoder).DecodeHeader", Method, 5}, - {"(WordEncoder).Encode", Method, 5}, - {"AddExtensionType", Func, 0}, - {"BEncoding", Const, 5}, - {"ErrInvalidMediaParameter", Var, 9}, - {"ExtensionsByType", Func, 5}, - {"FormatMediaType", Func, 0}, - {"ParseMediaType", Func, 0}, - {"QEncoding", Const, 5}, - {"TypeByExtension", Func, 0}, - {"WordDecoder", Type, 5}, - {"WordDecoder.CharsetReader", Field, 5}, - {"WordEncoder", Type, 5}, + {"(*WordDecoder).Decode", Method, 5, ""}, + {"(*WordDecoder).DecodeHeader", Method, 5, ""}, + {"(WordEncoder).Encode", Method, 5, ""}, + {"AddExtensionType", Func, 0, "func(ext string, typ string) error"}, + {"BEncoding", Const, 5, ""}, + {"ErrInvalidMediaParameter", Var, 9, ""}, + {"ExtensionsByType", Func, 5, "func(typ string) ([]string, error)"}, + {"FormatMediaType", Func, 0, "func(t string, param map[string]string) string"}, + {"ParseMediaType", Func, 0, "func(v string) (mediatype string, params map[string]string, err error)"}, + {"QEncoding", Const, 5, ""}, + {"TypeByExtension", Func, 0, "func(ext string) string"}, + {"WordDecoder", Type, 5, ""}, + {"WordDecoder.CharsetReader", Field, 5, ""}, + {"WordEncoder", Type, 5, ""}, }, "mime/multipart": { - {"(*FileHeader).Open", Method, 0}, - {"(*Form).RemoveAll", Method, 0}, - {"(*Part).Close", Method, 0}, - {"(*Part).FileName", Method, 0}, - {"(*Part).FormName", Method, 0}, - {"(*Part).Read", Method, 0}, - {"(*Reader).NextPart", Method, 0}, - {"(*Reader).NextRawPart", Method, 14}, - {"(*Reader).ReadForm", Method, 0}, - {"(*Writer).Boundary", Method, 0}, - {"(*Writer).Close", Method, 0}, - {"(*Writer).CreateFormField", Method, 0}, - {"(*Writer).CreateFormFile", Method, 0}, - {"(*Writer).CreatePart", Method, 0}, - {"(*Writer).FormDataContentType", Method, 0}, - {"(*Writer).SetBoundary", Method, 1}, - {"(*Writer).WriteField", Method, 0}, - {"ErrMessageTooLarge", Var, 9}, - {"File", Type, 0}, - {"FileContentDisposition", Func, 25}, - {"FileHeader", Type, 0}, - {"FileHeader.Filename", Field, 0}, - {"FileHeader.Header", Field, 0}, - {"FileHeader.Size", Field, 9}, - {"Form", Type, 0}, - {"Form.File", Field, 0}, - {"Form.Value", Field, 0}, - {"NewReader", Func, 0}, - {"NewWriter", Func, 0}, - {"Part", Type, 0}, - {"Part.Header", Field, 0}, - {"Reader", Type, 0}, - {"Writer", Type, 0}, + {"(*FileHeader).Open", Method, 0, ""}, + {"(*Form).RemoveAll", Method, 0, ""}, + {"(*Part).Close", Method, 0, ""}, + {"(*Part).FileName", Method, 0, ""}, + {"(*Part).FormName", Method, 0, ""}, + {"(*Part).Read", Method, 0, ""}, + {"(*Reader).NextPart", Method, 0, ""}, + {"(*Reader).NextRawPart", Method, 14, ""}, + {"(*Reader).ReadForm", Method, 0, ""}, + {"(*Writer).Boundary", Method, 0, ""}, + {"(*Writer).Close", Method, 0, ""}, + {"(*Writer).CreateFormField", Method, 0, ""}, + {"(*Writer).CreateFormFile", Method, 0, ""}, + {"(*Writer).CreatePart", Method, 0, ""}, + {"(*Writer).FormDataContentType", Method, 0, ""}, + {"(*Writer).SetBoundary", Method, 1, ""}, + {"(*Writer).WriteField", Method, 0, ""}, + {"ErrMessageTooLarge", Var, 9, ""}, + {"File", Type, 0, ""}, + {"FileContentDisposition", Func, 25, ""}, + {"FileHeader", Type, 0, ""}, + {"FileHeader.Filename", Field, 0, ""}, + {"FileHeader.Header", Field, 0, ""}, + {"FileHeader.Size", Field, 9, ""}, + {"Form", Type, 0, ""}, + {"Form.File", Field, 0, ""}, + {"Form.Value", Field, 0, ""}, + {"NewReader", Func, 0, "func(r io.Reader, boundary string) *Reader"}, + {"NewWriter", Func, 0, "func(w io.Writer) *Writer"}, + {"Part", Type, 0, ""}, + {"Part.Header", Field, 0, ""}, + {"Reader", Type, 0, ""}, + {"Writer", Type, 0, ""}, }, "mime/quotedprintable": { - {"(*Reader).Read", Method, 5}, - {"(*Writer).Close", Method, 5}, - {"(*Writer).Write", Method, 5}, - {"NewReader", Func, 5}, - {"NewWriter", Func, 5}, - {"Reader", Type, 5}, - {"Writer", Type, 5}, - {"Writer.Binary", Field, 5}, + {"(*Reader).Read", Method, 5, ""}, + {"(*Writer).Close", Method, 5, ""}, + {"(*Writer).Write", Method, 5, ""}, + {"NewReader", Func, 5, "func(r io.Reader) *Reader"}, + {"NewWriter", Func, 5, "func(w io.Writer) *Writer"}, + {"Reader", Type, 5, ""}, + {"Writer", Type, 5, ""}, + {"Writer.Binary", Field, 5, ""}, }, "net": { - {"(*AddrError).Error", Method, 0}, - {"(*AddrError).Temporary", Method, 0}, - {"(*AddrError).Timeout", Method, 0}, - {"(*Buffers).Read", Method, 8}, - {"(*Buffers).WriteTo", Method, 8}, - {"(*DNSConfigError).Error", Method, 0}, - {"(*DNSConfigError).Temporary", Method, 0}, - {"(*DNSConfigError).Timeout", Method, 0}, - {"(*DNSConfigError).Unwrap", Method, 13}, - {"(*DNSError).Error", Method, 0}, - {"(*DNSError).Temporary", Method, 0}, - {"(*DNSError).Timeout", Method, 0}, - {"(*DNSError).Unwrap", Method, 23}, - {"(*Dialer).Dial", Method, 1}, - {"(*Dialer).DialContext", Method, 7}, - {"(*Dialer).MultipathTCP", Method, 21}, - {"(*Dialer).SetMultipathTCP", Method, 21}, - {"(*IP).UnmarshalText", Method, 2}, - {"(*IPAddr).Network", Method, 0}, - {"(*IPAddr).String", Method, 0}, - {"(*IPConn).Close", Method, 0}, - {"(*IPConn).File", Method, 0}, - {"(*IPConn).LocalAddr", Method, 0}, - {"(*IPConn).Read", Method, 0}, - {"(*IPConn).ReadFrom", Method, 0}, - {"(*IPConn).ReadFromIP", Method, 0}, - {"(*IPConn).ReadMsgIP", Method, 1}, - {"(*IPConn).RemoteAddr", Method, 0}, - {"(*IPConn).SetDeadline", Method, 0}, - {"(*IPConn).SetReadBuffer", Method, 0}, - {"(*IPConn).SetReadDeadline", Method, 0}, - {"(*IPConn).SetWriteBuffer", Method, 0}, - {"(*IPConn).SetWriteDeadline", Method, 0}, - {"(*IPConn).SyscallConn", Method, 9}, - {"(*IPConn).Write", Method, 0}, - {"(*IPConn).WriteMsgIP", Method, 1}, - {"(*IPConn).WriteTo", Method, 0}, - {"(*IPConn).WriteToIP", Method, 0}, - {"(*IPNet).Contains", Method, 0}, - {"(*IPNet).Network", Method, 0}, - {"(*IPNet).String", Method, 0}, - {"(*Interface).Addrs", Method, 0}, - {"(*Interface).MulticastAddrs", Method, 0}, - {"(*ListenConfig).Listen", Method, 11}, - {"(*ListenConfig).ListenPacket", Method, 11}, - {"(*ListenConfig).MultipathTCP", Method, 21}, - {"(*ListenConfig).SetMultipathTCP", Method, 21}, - {"(*OpError).Error", Method, 0}, - {"(*OpError).Temporary", Method, 0}, - {"(*OpError).Timeout", Method, 0}, - {"(*OpError).Unwrap", Method, 13}, - {"(*ParseError).Error", Method, 0}, - {"(*ParseError).Temporary", Method, 17}, - {"(*ParseError).Timeout", Method, 17}, - {"(*Resolver).LookupAddr", Method, 8}, - {"(*Resolver).LookupCNAME", Method, 8}, - {"(*Resolver).LookupHost", Method, 8}, - {"(*Resolver).LookupIP", Method, 15}, - {"(*Resolver).LookupIPAddr", Method, 8}, - {"(*Resolver).LookupMX", Method, 8}, - {"(*Resolver).LookupNS", Method, 8}, - {"(*Resolver).LookupNetIP", Method, 18}, - {"(*Resolver).LookupPort", Method, 8}, - {"(*Resolver).LookupSRV", Method, 8}, - {"(*Resolver).LookupTXT", Method, 8}, - {"(*TCPAddr).AddrPort", Method, 18}, - {"(*TCPAddr).Network", Method, 0}, - {"(*TCPAddr).String", Method, 0}, - {"(*TCPConn).Close", Method, 0}, - {"(*TCPConn).CloseRead", Method, 0}, - {"(*TCPConn).CloseWrite", Method, 0}, - {"(*TCPConn).File", Method, 0}, - {"(*TCPConn).LocalAddr", Method, 0}, - {"(*TCPConn).MultipathTCP", Method, 21}, - {"(*TCPConn).Read", Method, 0}, - {"(*TCPConn).ReadFrom", Method, 0}, - {"(*TCPConn).RemoteAddr", Method, 0}, - {"(*TCPConn).SetDeadline", Method, 0}, - {"(*TCPConn).SetKeepAlive", Method, 0}, - {"(*TCPConn).SetKeepAliveConfig", Method, 23}, - {"(*TCPConn).SetKeepAlivePeriod", Method, 2}, - {"(*TCPConn).SetLinger", Method, 0}, - {"(*TCPConn).SetNoDelay", Method, 0}, - {"(*TCPConn).SetReadBuffer", Method, 0}, - {"(*TCPConn).SetReadDeadline", Method, 0}, - {"(*TCPConn).SetWriteBuffer", Method, 0}, - {"(*TCPConn).SetWriteDeadline", Method, 0}, - {"(*TCPConn).SyscallConn", Method, 9}, - {"(*TCPConn).Write", Method, 0}, - {"(*TCPConn).WriteTo", Method, 22}, - {"(*TCPListener).Accept", Method, 0}, - {"(*TCPListener).AcceptTCP", Method, 0}, - {"(*TCPListener).Addr", Method, 0}, - {"(*TCPListener).Close", Method, 0}, - {"(*TCPListener).File", Method, 0}, - {"(*TCPListener).SetDeadline", Method, 0}, - {"(*TCPListener).SyscallConn", Method, 10}, - {"(*UDPAddr).AddrPort", Method, 18}, - {"(*UDPAddr).Network", Method, 0}, - {"(*UDPAddr).String", Method, 0}, - {"(*UDPConn).Close", Method, 0}, - {"(*UDPConn).File", Method, 0}, - {"(*UDPConn).LocalAddr", Method, 0}, - {"(*UDPConn).Read", Method, 0}, - {"(*UDPConn).ReadFrom", Method, 0}, - {"(*UDPConn).ReadFromUDP", Method, 0}, - {"(*UDPConn).ReadFromUDPAddrPort", Method, 18}, - {"(*UDPConn).ReadMsgUDP", Method, 1}, - {"(*UDPConn).ReadMsgUDPAddrPort", Method, 18}, - {"(*UDPConn).RemoteAddr", Method, 0}, - {"(*UDPConn).SetDeadline", Method, 0}, - {"(*UDPConn).SetReadBuffer", Method, 0}, - {"(*UDPConn).SetReadDeadline", Method, 0}, - {"(*UDPConn).SetWriteBuffer", Method, 0}, - {"(*UDPConn).SetWriteDeadline", Method, 0}, - {"(*UDPConn).SyscallConn", Method, 9}, - {"(*UDPConn).Write", Method, 0}, - {"(*UDPConn).WriteMsgUDP", Method, 1}, - {"(*UDPConn).WriteMsgUDPAddrPort", Method, 18}, - {"(*UDPConn).WriteTo", Method, 0}, - {"(*UDPConn).WriteToUDP", Method, 0}, - {"(*UDPConn).WriteToUDPAddrPort", Method, 18}, - {"(*UnixAddr).Network", Method, 0}, - {"(*UnixAddr).String", Method, 0}, - {"(*UnixConn).Close", Method, 0}, - {"(*UnixConn).CloseRead", Method, 1}, - {"(*UnixConn).CloseWrite", Method, 1}, - {"(*UnixConn).File", Method, 0}, - {"(*UnixConn).LocalAddr", Method, 0}, - {"(*UnixConn).Read", Method, 0}, - {"(*UnixConn).ReadFrom", Method, 0}, - {"(*UnixConn).ReadFromUnix", Method, 0}, - {"(*UnixConn).ReadMsgUnix", Method, 0}, - {"(*UnixConn).RemoteAddr", Method, 0}, - {"(*UnixConn).SetDeadline", Method, 0}, - {"(*UnixConn).SetReadBuffer", Method, 0}, - {"(*UnixConn).SetReadDeadline", Method, 0}, - {"(*UnixConn).SetWriteBuffer", Method, 0}, - {"(*UnixConn).SetWriteDeadline", Method, 0}, - {"(*UnixConn).SyscallConn", Method, 9}, - {"(*UnixConn).Write", Method, 0}, - {"(*UnixConn).WriteMsgUnix", Method, 0}, - {"(*UnixConn).WriteTo", Method, 0}, - {"(*UnixConn).WriteToUnix", Method, 0}, - {"(*UnixListener).Accept", Method, 0}, - {"(*UnixListener).AcceptUnix", Method, 0}, - {"(*UnixListener).Addr", Method, 0}, - {"(*UnixListener).Close", Method, 0}, - {"(*UnixListener).File", Method, 0}, - {"(*UnixListener).SetDeadline", Method, 0}, - {"(*UnixListener).SetUnlinkOnClose", Method, 8}, - {"(*UnixListener).SyscallConn", Method, 10}, - {"(Flags).String", Method, 0}, - {"(HardwareAddr).String", Method, 0}, - {"(IP).AppendText", Method, 24}, - {"(IP).DefaultMask", Method, 0}, - {"(IP).Equal", Method, 0}, - {"(IP).IsGlobalUnicast", Method, 0}, - {"(IP).IsInterfaceLocalMulticast", Method, 0}, - {"(IP).IsLinkLocalMulticast", Method, 0}, - {"(IP).IsLinkLocalUnicast", Method, 0}, - {"(IP).IsLoopback", Method, 0}, - {"(IP).IsMulticast", Method, 0}, - {"(IP).IsPrivate", Method, 17}, - {"(IP).IsUnspecified", Method, 0}, - {"(IP).MarshalText", Method, 2}, - {"(IP).Mask", Method, 0}, - {"(IP).String", Method, 0}, - {"(IP).To16", Method, 0}, - {"(IP).To4", Method, 0}, - {"(IPMask).Size", Method, 0}, - {"(IPMask).String", Method, 0}, - {"(InvalidAddrError).Error", Method, 0}, - {"(InvalidAddrError).Temporary", Method, 0}, - {"(InvalidAddrError).Timeout", Method, 0}, - {"(UnknownNetworkError).Error", Method, 0}, - {"(UnknownNetworkError).Temporary", Method, 0}, - {"(UnknownNetworkError).Timeout", Method, 0}, - {"Addr", Type, 0}, - {"AddrError", Type, 0}, - {"AddrError.Addr", Field, 0}, - {"AddrError.Err", Field, 0}, - {"Buffers", Type, 8}, - {"CIDRMask", Func, 0}, - {"Conn", Type, 0}, - {"DNSConfigError", Type, 0}, - {"DNSConfigError.Err", Field, 0}, - {"DNSError", Type, 0}, - {"DNSError.Err", Field, 0}, - {"DNSError.IsNotFound", Field, 13}, - {"DNSError.IsTemporary", Field, 6}, - {"DNSError.IsTimeout", Field, 0}, - {"DNSError.Name", Field, 0}, - {"DNSError.Server", Field, 0}, - {"DNSError.UnwrapErr", Field, 23}, - {"DefaultResolver", Var, 8}, - {"Dial", Func, 0}, - {"DialIP", Func, 0}, - {"DialTCP", Func, 0}, - {"DialTimeout", Func, 0}, - {"DialUDP", Func, 0}, - {"DialUnix", Func, 0}, - {"Dialer", Type, 1}, - {"Dialer.Cancel", Field, 6}, - {"Dialer.Control", Field, 11}, - {"Dialer.ControlContext", Field, 20}, - {"Dialer.Deadline", Field, 1}, - {"Dialer.DualStack", Field, 2}, - {"Dialer.FallbackDelay", Field, 5}, - {"Dialer.KeepAlive", Field, 3}, - {"Dialer.KeepAliveConfig", Field, 23}, - {"Dialer.LocalAddr", Field, 1}, - {"Dialer.Resolver", Field, 8}, - {"Dialer.Timeout", Field, 1}, - {"ErrClosed", Var, 16}, - {"ErrWriteToConnected", Var, 0}, - {"Error", Type, 0}, - {"FileConn", Func, 0}, - {"FileListener", Func, 0}, - {"FilePacketConn", Func, 0}, - {"FlagBroadcast", Const, 0}, - {"FlagLoopback", Const, 0}, - {"FlagMulticast", Const, 0}, - {"FlagPointToPoint", Const, 0}, - {"FlagRunning", Const, 20}, - {"FlagUp", Const, 0}, - {"Flags", Type, 0}, - {"HardwareAddr", Type, 0}, - {"IP", Type, 0}, - {"IPAddr", Type, 0}, - {"IPAddr.IP", Field, 0}, - {"IPAddr.Zone", Field, 1}, - {"IPConn", Type, 0}, - {"IPMask", Type, 0}, - {"IPNet", Type, 0}, - {"IPNet.IP", Field, 0}, - {"IPNet.Mask", Field, 0}, - {"IPv4", Func, 0}, - {"IPv4Mask", Func, 0}, - {"IPv4allrouter", Var, 0}, - {"IPv4allsys", Var, 0}, - {"IPv4bcast", Var, 0}, - {"IPv4len", Const, 0}, - {"IPv4zero", Var, 0}, - {"IPv6interfacelocalallnodes", Var, 0}, - {"IPv6len", Const, 0}, - {"IPv6linklocalallnodes", Var, 0}, - {"IPv6linklocalallrouters", Var, 0}, - {"IPv6loopback", Var, 0}, - {"IPv6unspecified", Var, 0}, - {"IPv6zero", Var, 0}, - {"Interface", Type, 0}, - {"Interface.Flags", Field, 0}, - {"Interface.HardwareAddr", Field, 0}, - {"Interface.Index", Field, 0}, - {"Interface.MTU", Field, 0}, - {"Interface.Name", Field, 0}, - {"InterfaceAddrs", Func, 0}, - {"InterfaceByIndex", Func, 0}, - {"InterfaceByName", Func, 0}, - {"Interfaces", Func, 0}, - {"InvalidAddrError", Type, 0}, - {"JoinHostPort", Func, 0}, - {"KeepAliveConfig", Type, 23}, - {"KeepAliveConfig.Count", Field, 23}, - {"KeepAliveConfig.Enable", Field, 23}, - {"KeepAliveConfig.Idle", Field, 23}, - {"KeepAliveConfig.Interval", Field, 23}, - {"Listen", Func, 0}, - {"ListenConfig", Type, 11}, - {"ListenConfig.Control", Field, 11}, - {"ListenConfig.KeepAlive", Field, 13}, - {"ListenConfig.KeepAliveConfig", Field, 23}, - {"ListenIP", Func, 0}, - {"ListenMulticastUDP", Func, 0}, - {"ListenPacket", Func, 0}, - {"ListenTCP", Func, 0}, - {"ListenUDP", Func, 0}, - {"ListenUnix", Func, 0}, - {"ListenUnixgram", Func, 0}, - {"Listener", Type, 0}, - {"LookupAddr", Func, 0}, - {"LookupCNAME", Func, 0}, - {"LookupHost", Func, 0}, - {"LookupIP", Func, 0}, - {"LookupMX", Func, 0}, - {"LookupNS", Func, 1}, - {"LookupPort", Func, 0}, - {"LookupSRV", Func, 0}, - {"LookupTXT", Func, 0}, - {"MX", Type, 0}, - {"MX.Host", Field, 0}, - {"MX.Pref", Field, 0}, - {"NS", Type, 1}, - {"NS.Host", Field, 1}, - {"OpError", Type, 0}, - {"OpError.Addr", Field, 0}, - {"OpError.Err", Field, 0}, - {"OpError.Net", Field, 0}, - {"OpError.Op", Field, 0}, - {"OpError.Source", Field, 5}, - {"PacketConn", Type, 0}, - {"ParseCIDR", Func, 0}, - {"ParseError", Type, 0}, - {"ParseError.Text", Field, 0}, - {"ParseError.Type", Field, 0}, - {"ParseIP", Func, 0}, - {"ParseMAC", Func, 0}, - {"Pipe", Func, 0}, - {"ResolveIPAddr", Func, 0}, - {"ResolveTCPAddr", Func, 0}, - {"ResolveUDPAddr", Func, 0}, - {"ResolveUnixAddr", Func, 0}, - {"Resolver", Type, 8}, - {"Resolver.Dial", Field, 9}, - {"Resolver.PreferGo", Field, 8}, - {"Resolver.StrictErrors", Field, 9}, - {"SRV", Type, 0}, - {"SRV.Port", Field, 0}, - {"SRV.Priority", Field, 0}, - {"SRV.Target", Field, 0}, - {"SRV.Weight", Field, 0}, - {"SplitHostPort", Func, 0}, - {"TCPAddr", Type, 0}, - {"TCPAddr.IP", Field, 0}, - {"TCPAddr.Port", Field, 0}, - {"TCPAddr.Zone", Field, 1}, - {"TCPAddrFromAddrPort", Func, 18}, - {"TCPConn", Type, 0}, - {"TCPListener", Type, 0}, - {"UDPAddr", Type, 0}, - {"UDPAddr.IP", Field, 0}, - {"UDPAddr.Port", Field, 0}, - {"UDPAddr.Zone", Field, 1}, - {"UDPAddrFromAddrPort", Func, 18}, - {"UDPConn", Type, 0}, - {"UnixAddr", Type, 0}, - {"UnixAddr.Name", Field, 0}, - {"UnixAddr.Net", Field, 0}, - {"UnixConn", Type, 0}, - {"UnixListener", Type, 0}, - {"UnknownNetworkError", Type, 0}, + {"(*AddrError).Error", Method, 0, ""}, + {"(*AddrError).Temporary", Method, 0, ""}, + {"(*AddrError).Timeout", Method, 0, ""}, + {"(*Buffers).Read", Method, 8, ""}, + {"(*Buffers).WriteTo", Method, 8, ""}, + {"(*DNSConfigError).Error", Method, 0, ""}, + {"(*DNSConfigError).Temporary", Method, 0, ""}, + {"(*DNSConfigError).Timeout", Method, 0, ""}, + {"(*DNSConfigError).Unwrap", Method, 13, ""}, + {"(*DNSError).Error", Method, 0, ""}, + {"(*DNSError).Temporary", Method, 0, ""}, + {"(*DNSError).Timeout", Method, 0, ""}, + {"(*DNSError).Unwrap", Method, 23, ""}, + {"(*Dialer).Dial", Method, 1, ""}, + {"(*Dialer).DialContext", Method, 7, ""}, + {"(*Dialer).MultipathTCP", Method, 21, ""}, + {"(*Dialer).SetMultipathTCP", Method, 21, ""}, + {"(*IP).UnmarshalText", Method, 2, ""}, + {"(*IPAddr).Network", Method, 0, ""}, + {"(*IPAddr).String", Method, 0, ""}, + {"(*IPConn).Close", Method, 0, ""}, + {"(*IPConn).File", Method, 0, ""}, + {"(*IPConn).LocalAddr", Method, 0, ""}, + {"(*IPConn).Read", Method, 0, ""}, + {"(*IPConn).ReadFrom", Method, 0, ""}, + {"(*IPConn).ReadFromIP", Method, 0, ""}, + {"(*IPConn).ReadMsgIP", Method, 1, ""}, + {"(*IPConn).RemoteAddr", Method, 0, ""}, + {"(*IPConn).SetDeadline", Method, 0, ""}, + {"(*IPConn).SetReadBuffer", Method, 0, ""}, + {"(*IPConn).SetReadDeadline", Method, 0, ""}, + {"(*IPConn).SetWriteBuffer", Method, 0, ""}, + {"(*IPConn).SetWriteDeadline", Method, 0, ""}, + {"(*IPConn).SyscallConn", Method, 9, ""}, + {"(*IPConn).Write", Method, 0, ""}, + {"(*IPConn).WriteMsgIP", Method, 1, ""}, + {"(*IPConn).WriteTo", Method, 0, ""}, + {"(*IPConn).WriteToIP", Method, 0, ""}, + {"(*IPNet).Contains", Method, 0, ""}, + {"(*IPNet).Network", Method, 0, ""}, + {"(*IPNet).String", Method, 0, ""}, + {"(*Interface).Addrs", Method, 0, ""}, + {"(*Interface).MulticastAddrs", Method, 0, ""}, + {"(*ListenConfig).Listen", Method, 11, ""}, + {"(*ListenConfig).ListenPacket", Method, 11, ""}, + {"(*ListenConfig).MultipathTCP", Method, 21, ""}, + {"(*ListenConfig).SetMultipathTCP", Method, 21, ""}, + {"(*OpError).Error", Method, 0, ""}, + {"(*OpError).Temporary", Method, 0, ""}, + {"(*OpError).Timeout", Method, 0, ""}, + {"(*OpError).Unwrap", Method, 13, ""}, + {"(*ParseError).Error", Method, 0, ""}, + {"(*ParseError).Temporary", Method, 17, ""}, + {"(*ParseError).Timeout", Method, 17, ""}, + {"(*Resolver).LookupAddr", Method, 8, ""}, + {"(*Resolver).LookupCNAME", Method, 8, ""}, + {"(*Resolver).LookupHost", Method, 8, ""}, + {"(*Resolver).LookupIP", Method, 15, ""}, + {"(*Resolver).LookupIPAddr", Method, 8, ""}, + {"(*Resolver).LookupMX", Method, 8, ""}, + {"(*Resolver).LookupNS", Method, 8, ""}, + {"(*Resolver).LookupNetIP", Method, 18, ""}, + {"(*Resolver).LookupPort", Method, 8, ""}, + {"(*Resolver).LookupSRV", Method, 8, ""}, + {"(*Resolver).LookupTXT", Method, 8, ""}, + {"(*TCPAddr).AddrPort", Method, 18, ""}, + {"(*TCPAddr).Network", Method, 0, ""}, + {"(*TCPAddr).String", Method, 0, ""}, + {"(*TCPConn).Close", Method, 0, ""}, + {"(*TCPConn).CloseRead", Method, 0, ""}, + {"(*TCPConn).CloseWrite", Method, 0, ""}, + {"(*TCPConn).File", Method, 0, ""}, + {"(*TCPConn).LocalAddr", Method, 0, ""}, + {"(*TCPConn).MultipathTCP", Method, 21, ""}, + {"(*TCPConn).Read", Method, 0, ""}, + {"(*TCPConn).ReadFrom", Method, 0, ""}, + {"(*TCPConn).RemoteAddr", Method, 0, ""}, + {"(*TCPConn).SetDeadline", Method, 0, ""}, + {"(*TCPConn).SetKeepAlive", Method, 0, ""}, + {"(*TCPConn).SetKeepAliveConfig", Method, 23, ""}, + {"(*TCPConn).SetKeepAlivePeriod", Method, 2, ""}, + {"(*TCPConn).SetLinger", Method, 0, ""}, + {"(*TCPConn).SetNoDelay", Method, 0, ""}, + {"(*TCPConn).SetReadBuffer", Method, 0, ""}, + {"(*TCPConn).SetReadDeadline", Method, 0, ""}, + {"(*TCPConn).SetWriteBuffer", Method, 0, ""}, + {"(*TCPConn).SetWriteDeadline", Method, 0, ""}, + {"(*TCPConn).SyscallConn", Method, 9, ""}, + {"(*TCPConn).Write", Method, 0, ""}, + {"(*TCPConn).WriteTo", Method, 22, ""}, + {"(*TCPListener).Accept", Method, 0, ""}, + {"(*TCPListener).AcceptTCP", Method, 0, ""}, + {"(*TCPListener).Addr", Method, 0, ""}, + {"(*TCPListener).Close", Method, 0, ""}, + {"(*TCPListener).File", Method, 0, ""}, + {"(*TCPListener).SetDeadline", Method, 0, ""}, + {"(*TCPListener).SyscallConn", Method, 10, ""}, + {"(*UDPAddr).AddrPort", Method, 18, ""}, + {"(*UDPAddr).Network", Method, 0, ""}, + {"(*UDPAddr).String", Method, 0, ""}, + {"(*UDPConn).Close", Method, 0, ""}, + {"(*UDPConn).File", Method, 0, ""}, + {"(*UDPConn).LocalAddr", Method, 0, ""}, + {"(*UDPConn).Read", Method, 0, ""}, + {"(*UDPConn).ReadFrom", Method, 0, ""}, + {"(*UDPConn).ReadFromUDP", Method, 0, ""}, + {"(*UDPConn).ReadFromUDPAddrPort", Method, 18, ""}, + {"(*UDPConn).ReadMsgUDP", Method, 1, ""}, + {"(*UDPConn).ReadMsgUDPAddrPort", Method, 18, ""}, + {"(*UDPConn).RemoteAddr", Method, 0, ""}, + {"(*UDPConn).SetDeadline", Method, 0, ""}, + {"(*UDPConn).SetReadBuffer", Method, 0, ""}, + {"(*UDPConn).SetReadDeadline", Method, 0, ""}, + {"(*UDPConn).SetWriteBuffer", Method, 0, ""}, + {"(*UDPConn).SetWriteDeadline", Method, 0, ""}, + {"(*UDPConn).SyscallConn", Method, 9, ""}, + {"(*UDPConn).Write", Method, 0, ""}, + {"(*UDPConn).WriteMsgUDP", Method, 1, ""}, + {"(*UDPConn).WriteMsgUDPAddrPort", Method, 18, ""}, + {"(*UDPConn).WriteTo", Method, 0, ""}, + {"(*UDPConn).WriteToUDP", Method, 0, ""}, + {"(*UDPConn).WriteToUDPAddrPort", Method, 18, ""}, + {"(*UnixAddr).Network", Method, 0, ""}, + {"(*UnixAddr).String", Method, 0, ""}, + {"(*UnixConn).Close", Method, 0, ""}, + {"(*UnixConn).CloseRead", Method, 1, ""}, + {"(*UnixConn).CloseWrite", Method, 1, ""}, + {"(*UnixConn).File", Method, 0, ""}, + {"(*UnixConn).LocalAddr", Method, 0, ""}, + {"(*UnixConn).Read", Method, 0, ""}, + {"(*UnixConn).ReadFrom", Method, 0, ""}, + {"(*UnixConn).ReadFromUnix", Method, 0, ""}, + {"(*UnixConn).ReadMsgUnix", Method, 0, ""}, + {"(*UnixConn).RemoteAddr", Method, 0, ""}, + {"(*UnixConn).SetDeadline", Method, 0, ""}, + {"(*UnixConn).SetReadBuffer", Method, 0, ""}, + {"(*UnixConn).SetReadDeadline", Method, 0, ""}, + {"(*UnixConn).SetWriteBuffer", Method, 0, ""}, + {"(*UnixConn).SetWriteDeadline", Method, 0, ""}, + {"(*UnixConn).SyscallConn", Method, 9, ""}, + {"(*UnixConn).Write", Method, 0, ""}, + {"(*UnixConn).WriteMsgUnix", Method, 0, ""}, + {"(*UnixConn).WriteTo", Method, 0, ""}, + {"(*UnixConn).WriteToUnix", Method, 0, ""}, + {"(*UnixListener).Accept", Method, 0, ""}, + {"(*UnixListener).AcceptUnix", Method, 0, ""}, + {"(*UnixListener).Addr", Method, 0, ""}, + {"(*UnixListener).Close", Method, 0, ""}, + {"(*UnixListener).File", Method, 0, ""}, + {"(*UnixListener).SetDeadline", Method, 0, ""}, + {"(*UnixListener).SetUnlinkOnClose", Method, 8, ""}, + {"(*UnixListener).SyscallConn", Method, 10, ""}, + {"(Flags).String", Method, 0, ""}, + {"(HardwareAddr).String", Method, 0, ""}, + {"(IP).AppendText", Method, 24, ""}, + {"(IP).DefaultMask", Method, 0, ""}, + {"(IP).Equal", Method, 0, ""}, + {"(IP).IsGlobalUnicast", Method, 0, ""}, + {"(IP).IsInterfaceLocalMulticast", Method, 0, ""}, + {"(IP).IsLinkLocalMulticast", Method, 0, ""}, + {"(IP).IsLinkLocalUnicast", Method, 0, ""}, + {"(IP).IsLoopback", Method, 0, ""}, + {"(IP).IsMulticast", Method, 0, ""}, + {"(IP).IsPrivate", Method, 17, ""}, + {"(IP).IsUnspecified", Method, 0, ""}, + {"(IP).MarshalText", Method, 2, ""}, + {"(IP).Mask", Method, 0, ""}, + {"(IP).String", Method, 0, ""}, + {"(IP).To16", Method, 0, ""}, + {"(IP).To4", Method, 0, ""}, + {"(IPMask).Size", Method, 0, ""}, + {"(IPMask).String", Method, 0, ""}, + {"(InvalidAddrError).Error", Method, 0, ""}, + {"(InvalidAddrError).Temporary", Method, 0, ""}, + {"(InvalidAddrError).Timeout", Method, 0, ""}, + {"(UnknownNetworkError).Error", Method, 0, ""}, + {"(UnknownNetworkError).Temporary", Method, 0, ""}, + {"(UnknownNetworkError).Timeout", Method, 0, ""}, + {"Addr", Type, 0, ""}, + {"AddrError", Type, 0, ""}, + {"AddrError.Addr", Field, 0, ""}, + {"AddrError.Err", Field, 0, ""}, + {"Buffers", Type, 8, ""}, + {"CIDRMask", Func, 0, "func(ones int, bits int) IPMask"}, + {"Conn", Type, 0, ""}, + {"DNSConfigError", Type, 0, ""}, + {"DNSConfigError.Err", Field, 0, ""}, + {"DNSError", Type, 0, ""}, + {"DNSError.Err", Field, 0, ""}, + {"DNSError.IsNotFound", Field, 13, ""}, + {"DNSError.IsTemporary", Field, 6, ""}, + {"DNSError.IsTimeout", Field, 0, ""}, + {"DNSError.Name", Field, 0, ""}, + {"DNSError.Server", Field, 0, ""}, + {"DNSError.UnwrapErr", Field, 23, ""}, + {"DefaultResolver", Var, 8, ""}, + {"Dial", Func, 0, "func(network string, address string) (Conn, error)"}, + {"DialIP", Func, 0, "func(network string, laddr *IPAddr, raddr *IPAddr) (*IPConn, error)"}, + {"DialTCP", Func, 0, "func(network string, laddr *TCPAddr, raddr *TCPAddr) (*TCPConn, error)"}, + {"DialTimeout", Func, 0, "func(network string, address string, timeout time.Duration) (Conn, error)"}, + {"DialUDP", Func, 0, "func(network string, laddr *UDPAddr, raddr *UDPAddr) (*UDPConn, error)"}, + {"DialUnix", Func, 0, "func(network string, laddr *UnixAddr, raddr *UnixAddr) (*UnixConn, error)"}, + {"Dialer", Type, 1, ""}, + {"Dialer.Cancel", Field, 6, ""}, + {"Dialer.Control", Field, 11, ""}, + {"Dialer.ControlContext", Field, 20, ""}, + {"Dialer.Deadline", Field, 1, ""}, + {"Dialer.DualStack", Field, 2, ""}, + {"Dialer.FallbackDelay", Field, 5, ""}, + {"Dialer.KeepAlive", Field, 3, ""}, + {"Dialer.KeepAliveConfig", Field, 23, ""}, + {"Dialer.LocalAddr", Field, 1, ""}, + {"Dialer.Resolver", Field, 8, ""}, + {"Dialer.Timeout", Field, 1, ""}, + {"ErrClosed", Var, 16, ""}, + {"ErrWriteToConnected", Var, 0, ""}, + {"Error", Type, 0, ""}, + {"FileConn", Func, 0, "func(f *os.File) (c Conn, err error)"}, + {"FileListener", Func, 0, "func(f *os.File) (ln Listener, err error)"}, + {"FilePacketConn", Func, 0, "func(f *os.File) (c PacketConn, err error)"}, + {"FlagBroadcast", Const, 0, ""}, + {"FlagLoopback", Const, 0, ""}, + {"FlagMulticast", Const, 0, ""}, + {"FlagPointToPoint", Const, 0, ""}, + {"FlagRunning", Const, 20, ""}, + {"FlagUp", Const, 0, ""}, + {"Flags", Type, 0, ""}, + {"HardwareAddr", Type, 0, ""}, + {"IP", Type, 0, ""}, + {"IPAddr", Type, 0, ""}, + {"IPAddr.IP", Field, 0, ""}, + {"IPAddr.Zone", Field, 1, ""}, + {"IPConn", Type, 0, ""}, + {"IPMask", Type, 0, ""}, + {"IPNet", Type, 0, ""}, + {"IPNet.IP", Field, 0, ""}, + {"IPNet.Mask", Field, 0, ""}, + {"IPv4", Func, 0, "func(a byte, b byte, c byte, d byte) IP"}, + {"IPv4Mask", Func, 0, "func(a byte, b byte, c byte, d byte) IPMask"}, + {"IPv4allrouter", Var, 0, ""}, + {"IPv4allsys", Var, 0, ""}, + {"IPv4bcast", Var, 0, ""}, + {"IPv4len", Const, 0, ""}, + {"IPv4zero", Var, 0, ""}, + {"IPv6interfacelocalallnodes", Var, 0, ""}, + {"IPv6len", Const, 0, ""}, + {"IPv6linklocalallnodes", Var, 0, ""}, + {"IPv6linklocalallrouters", Var, 0, ""}, + {"IPv6loopback", Var, 0, ""}, + {"IPv6unspecified", Var, 0, ""}, + {"IPv6zero", Var, 0, ""}, + {"Interface", Type, 0, ""}, + {"Interface.Flags", Field, 0, ""}, + {"Interface.HardwareAddr", Field, 0, ""}, + {"Interface.Index", Field, 0, ""}, + {"Interface.MTU", Field, 0, ""}, + {"Interface.Name", Field, 0, ""}, + {"InterfaceAddrs", Func, 0, "func() ([]Addr, error)"}, + {"InterfaceByIndex", Func, 0, "func(index int) (*Interface, error)"}, + {"InterfaceByName", Func, 0, "func(name string) (*Interface, error)"}, + {"Interfaces", Func, 0, "func() ([]Interface, error)"}, + {"InvalidAddrError", Type, 0, ""}, + {"JoinHostPort", Func, 0, "func(host string, port string) string"}, + {"KeepAliveConfig", Type, 23, ""}, + {"KeepAliveConfig.Count", Field, 23, ""}, + {"KeepAliveConfig.Enable", Field, 23, ""}, + {"KeepAliveConfig.Idle", Field, 23, ""}, + {"KeepAliveConfig.Interval", Field, 23, ""}, + {"Listen", Func, 0, "func(network string, address string) (Listener, error)"}, + {"ListenConfig", Type, 11, ""}, + {"ListenConfig.Control", Field, 11, ""}, + {"ListenConfig.KeepAlive", Field, 13, ""}, + {"ListenConfig.KeepAliveConfig", Field, 23, ""}, + {"ListenIP", Func, 0, "func(network string, laddr *IPAddr) (*IPConn, error)"}, + {"ListenMulticastUDP", Func, 0, "func(network string, ifi *Interface, gaddr *UDPAddr) (*UDPConn, error)"}, + {"ListenPacket", Func, 0, "func(network string, address string) (PacketConn, error)"}, + {"ListenTCP", Func, 0, "func(network string, laddr *TCPAddr) (*TCPListener, error)"}, + {"ListenUDP", Func, 0, "func(network string, laddr *UDPAddr) (*UDPConn, error)"}, + {"ListenUnix", Func, 0, "func(network string, laddr *UnixAddr) (*UnixListener, error)"}, + {"ListenUnixgram", Func, 0, "func(network string, laddr *UnixAddr) (*UnixConn, error)"}, + {"Listener", Type, 0, ""}, + {"LookupAddr", Func, 0, "func(addr string) (names []string, err error)"}, + {"LookupCNAME", Func, 0, "func(host string) (cname string, err error)"}, + {"LookupHost", Func, 0, "func(host string) (addrs []string, err error)"}, + {"LookupIP", Func, 0, "func(host string) ([]IP, error)"}, + {"LookupMX", Func, 0, "func(name string) ([]*MX, error)"}, + {"LookupNS", Func, 1, "func(name string) ([]*NS, error)"}, + {"LookupPort", Func, 0, "func(network string, service string) (port int, err error)"}, + {"LookupSRV", Func, 0, "func(service string, proto string, name string) (cname string, addrs []*SRV, err error)"}, + {"LookupTXT", Func, 0, "func(name string) ([]string, error)"}, + {"MX", Type, 0, ""}, + {"MX.Host", Field, 0, ""}, + {"MX.Pref", Field, 0, ""}, + {"NS", Type, 1, ""}, + {"NS.Host", Field, 1, ""}, + {"OpError", Type, 0, ""}, + {"OpError.Addr", Field, 0, ""}, + {"OpError.Err", Field, 0, ""}, + {"OpError.Net", Field, 0, ""}, + {"OpError.Op", Field, 0, ""}, + {"OpError.Source", Field, 5, ""}, + {"PacketConn", Type, 0, ""}, + {"ParseCIDR", Func, 0, "func(s string) (IP, *IPNet, error)"}, + {"ParseError", Type, 0, ""}, + {"ParseError.Text", Field, 0, ""}, + {"ParseError.Type", Field, 0, ""}, + {"ParseIP", Func, 0, "func(s string) IP"}, + {"ParseMAC", Func, 0, "func(s string) (hw HardwareAddr, err error)"}, + {"Pipe", Func, 0, "func() (Conn, Conn)"}, + {"ResolveIPAddr", Func, 0, "func(network string, address string) (*IPAddr, error)"}, + {"ResolveTCPAddr", Func, 0, "func(network string, address string) (*TCPAddr, error)"}, + {"ResolveUDPAddr", Func, 0, "func(network string, address string) (*UDPAddr, error)"}, + {"ResolveUnixAddr", Func, 0, "func(network string, address string) (*UnixAddr, error)"}, + {"Resolver", Type, 8, ""}, + {"Resolver.Dial", Field, 9, ""}, + {"Resolver.PreferGo", Field, 8, ""}, + {"Resolver.StrictErrors", Field, 9, ""}, + {"SRV", Type, 0, ""}, + {"SRV.Port", Field, 0, ""}, + {"SRV.Priority", Field, 0, ""}, + {"SRV.Target", Field, 0, ""}, + {"SRV.Weight", Field, 0, ""}, + {"SplitHostPort", Func, 0, "func(hostport string) (host string, port string, err error)"}, + {"TCPAddr", Type, 0, ""}, + {"TCPAddr.IP", Field, 0, ""}, + {"TCPAddr.Port", Field, 0, ""}, + {"TCPAddr.Zone", Field, 1, ""}, + {"TCPAddrFromAddrPort", Func, 18, "func(addr netip.AddrPort) *TCPAddr"}, + {"TCPConn", Type, 0, ""}, + {"TCPListener", Type, 0, ""}, + {"UDPAddr", Type, 0, ""}, + {"UDPAddr.IP", Field, 0, ""}, + {"UDPAddr.Port", Field, 0, ""}, + {"UDPAddr.Zone", Field, 1, ""}, + {"UDPAddrFromAddrPort", Func, 18, "func(addr netip.AddrPort) *UDPAddr"}, + {"UDPConn", Type, 0, ""}, + {"UnixAddr", Type, 0, ""}, + {"UnixAddr.Name", Field, 0, ""}, + {"UnixAddr.Net", Field, 0, ""}, + {"UnixConn", Type, 0, ""}, + {"UnixListener", Type, 0, ""}, + {"UnknownNetworkError", Type, 0, ""}, }, "net/http": { - {"(*Client).CloseIdleConnections", Method, 12}, - {"(*Client).Do", Method, 0}, - {"(*Client).Get", Method, 0}, - {"(*Client).Head", Method, 0}, - {"(*Client).Post", Method, 0}, - {"(*Client).PostForm", Method, 0}, - {"(*Cookie).String", Method, 0}, - {"(*Cookie).Valid", Method, 18}, - {"(*MaxBytesError).Error", Method, 19}, - {"(*ProtocolError).Error", Method, 0}, - {"(*ProtocolError).Is", Method, 21}, - {"(*Protocols).SetHTTP1", Method, 24}, - {"(*Protocols).SetHTTP2", Method, 24}, - {"(*Protocols).SetUnencryptedHTTP2", Method, 24}, - {"(*Request).AddCookie", Method, 0}, - {"(*Request).BasicAuth", Method, 4}, - {"(*Request).Clone", Method, 13}, - {"(*Request).Context", Method, 7}, - {"(*Request).Cookie", Method, 0}, - {"(*Request).Cookies", Method, 0}, - {"(*Request).CookiesNamed", Method, 23}, - {"(*Request).FormFile", Method, 0}, - {"(*Request).FormValue", Method, 0}, - {"(*Request).MultipartReader", Method, 0}, - {"(*Request).ParseForm", Method, 0}, - {"(*Request).ParseMultipartForm", Method, 0}, - {"(*Request).PathValue", Method, 22}, - {"(*Request).PostFormValue", Method, 1}, - {"(*Request).ProtoAtLeast", Method, 0}, - {"(*Request).Referer", Method, 0}, - {"(*Request).SetBasicAuth", Method, 0}, - {"(*Request).SetPathValue", Method, 22}, - {"(*Request).UserAgent", Method, 0}, - {"(*Request).WithContext", Method, 7}, - {"(*Request).Write", Method, 0}, - {"(*Request).WriteProxy", Method, 0}, - {"(*Response).Cookies", Method, 0}, - {"(*Response).Location", Method, 0}, - {"(*Response).ProtoAtLeast", Method, 0}, - {"(*Response).Write", Method, 0}, - {"(*ResponseController).EnableFullDuplex", Method, 21}, - {"(*ResponseController).Flush", Method, 20}, - {"(*ResponseController).Hijack", Method, 20}, - {"(*ResponseController).SetReadDeadline", Method, 20}, - {"(*ResponseController).SetWriteDeadline", Method, 20}, - {"(*ServeMux).Handle", Method, 0}, - {"(*ServeMux).HandleFunc", Method, 0}, - {"(*ServeMux).Handler", Method, 1}, - {"(*ServeMux).ServeHTTP", Method, 0}, - {"(*Server).Close", Method, 8}, - {"(*Server).ListenAndServe", Method, 0}, - {"(*Server).ListenAndServeTLS", Method, 0}, - {"(*Server).RegisterOnShutdown", Method, 9}, - {"(*Server).Serve", Method, 0}, - {"(*Server).ServeTLS", Method, 9}, - {"(*Server).SetKeepAlivesEnabled", Method, 3}, - {"(*Server).Shutdown", Method, 8}, - {"(*Transport).CancelRequest", Method, 1}, - {"(*Transport).Clone", Method, 13}, - {"(*Transport).CloseIdleConnections", Method, 0}, - {"(*Transport).RegisterProtocol", Method, 0}, - {"(*Transport).RoundTrip", Method, 0}, - {"(ConnState).String", Method, 3}, - {"(Dir).Open", Method, 0}, - {"(HandlerFunc).ServeHTTP", Method, 0}, - {"(Header).Add", Method, 0}, - {"(Header).Clone", Method, 13}, - {"(Header).Del", Method, 0}, - {"(Header).Get", Method, 0}, - {"(Header).Set", Method, 0}, - {"(Header).Values", Method, 14}, - {"(Header).Write", Method, 0}, - {"(Header).WriteSubset", Method, 0}, - {"(Protocols).HTTP1", Method, 24}, - {"(Protocols).HTTP2", Method, 24}, - {"(Protocols).String", Method, 24}, - {"(Protocols).UnencryptedHTTP2", Method, 24}, - {"AllowQuerySemicolons", Func, 17}, - {"CanonicalHeaderKey", Func, 0}, - {"Client", Type, 0}, - {"Client.CheckRedirect", Field, 0}, - {"Client.Jar", Field, 0}, - {"Client.Timeout", Field, 3}, - {"Client.Transport", Field, 0}, - {"CloseNotifier", Type, 1}, - {"ConnState", Type, 3}, - {"Cookie", Type, 0}, - {"Cookie.Domain", Field, 0}, - {"Cookie.Expires", Field, 0}, - {"Cookie.HttpOnly", Field, 0}, - {"Cookie.MaxAge", Field, 0}, - {"Cookie.Name", Field, 0}, - {"Cookie.Partitioned", Field, 23}, - {"Cookie.Path", Field, 0}, - {"Cookie.Quoted", Field, 23}, - {"Cookie.Raw", Field, 0}, - {"Cookie.RawExpires", Field, 0}, - {"Cookie.SameSite", Field, 11}, - {"Cookie.Secure", Field, 0}, - {"Cookie.Unparsed", Field, 0}, - {"Cookie.Value", Field, 0}, - {"CookieJar", Type, 0}, - {"DefaultClient", Var, 0}, - {"DefaultMaxHeaderBytes", Const, 0}, - {"DefaultMaxIdleConnsPerHost", Const, 0}, - {"DefaultServeMux", Var, 0}, - {"DefaultTransport", Var, 0}, - {"DetectContentType", Func, 0}, - {"Dir", Type, 0}, - {"ErrAbortHandler", Var, 8}, - {"ErrBodyNotAllowed", Var, 0}, - {"ErrBodyReadAfterClose", Var, 0}, - {"ErrContentLength", Var, 0}, - {"ErrHandlerTimeout", Var, 0}, - {"ErrHeaderTooLong", Var, 0}, - {"ErrHijacked", Var, 0}, - {"ErrLineTooLong", Var, 0}, - {"ErrMissingBoundary", Var, 0}, - {"ErrMissingContentLength", Var, 0}, - {"ErrMissingFile", Var, 0}, - {"ErrNoCookie", Var, 0}, - {"ErrNoLocation", Var, 0}, - {"ErrNotMultipart", Var, 0}, - {"ErrNotSupported", Var, 0}, - {"ErrSchemeMismatch", Var, 21}, - {"ErrServerClosed", Var, 8}, - {"ErrShortBody", Var, 0}, - {"ErrSkipAltProtocol", Var, 6}, - {"ErrUnexpectedTrailer", Var, 0}, - {"ErrUseLastResponse", Var, 7}, - {"ErrWriteAfterFlush", Var, 0}, - {"Error", Func, 0}, - {"FS", Func, 16}, - {"File", Type, 0}, - {"FileServer", Func, 0}, - {"FileServerFS", Func, 22}, - {"FileSystem", Type, 0}, - {"Flusher", Type, 0}, - {"Get", Func, 0}, - {"HTTP2Config", Type, 24}, - {"HTTP2Config.CountError", Field, 24}, - {"HTTP2Config.MaxConcurrentStreams", Field, 24}, - {"HTTP2Config.MaxDecoderHeaderTableSize", Field, 24}, - {"HTTP2Config.MaxEncoderHeaderTableSize", Field, 24}, - {"HTTP2Config.MaxReadFrameSize", Field, 24}, - {"HTTP2Config.MaxReceiveBufferPerConnection", Field, 24}, - {"HTTP2Config.MaxReceiveBufferPerStream", Field, 24}, - {"HTTP2Config.PermitProhibitedCipherSuites", Field, 24}, - {"HTTP2Config.PingTimeout", Field, 24}, - {"HTTP2Config.SendPingTimeout", Field, 24}, - {"HTTP2Config.WriteByteTimeout", Field, 24}, - {"Handle", Func, 0}, - {"HandleFunc", Func, 0}, - {"Handler", Type, 0}, - {"HandlerFunc", Type, 0}, - {"Head", Func, 0}, - {"Header", Type, 0}, - {"Hijacker", Type, 0}, - {"ListenAndServe", Func, 0}, - {"ListenAndServeTLS", Func, 0}, - {"LocalAddrContextKey", Var, 7}, - {"MaxBytesError", Type, 19}, - {"MaxBytesError.Limit", Field, 19}, - {"MaxBytesHandler", Func, 18}, - {"MaxBytesReader", Func, 0}, - {"MethodConnect", Const, 6}, - {"MethodDelete", Const, 6}, - {"MethodGet", Const, 6}, - {"MethodHead", Const, 6}, - {"MethodOptions", Const, 6}, - {"MethodPatch", Const, 6}, - {"MethodPost", Const, 6}, - {"MethodPut", Const, 6}, - {"MethodTrace", Const, 6}, - {"NewFileTransport", Func, 0}, - {"NewFileTransportFS", Func, 22}, - {"NewRequest", Func, 0}, - {"NewRequestWithContext", Func, 13}, - {"NewResponseController", Func, 20}, - {"NewServeMux", Func, 0}, - {"NoBody", Var, 8}, - {"NotFound", Func, 0}, - {"NotFoundHandler", Func, 0}, - {"ParseCookie", Func, 23}, - {"ParseHTTPVersion", Func, 0}, - {"ParseSetCookie", Func, 23}, - {"ParseTime", Func, 1}, - {"Post", Func, 0}, - {"PostForm", Func, 0}, - {"ProtocolError", Type, 0}, - {"ProtocolError.ErrorString", Field, 0}, - {"Protocols", Type, 24}, - {"ProxyFromEnvironment", Func, 0}, - {"ProxyURL", Func, 0}, - {"PushOptions", Type, 8}, - {"PushOptions.Header", Field, 8}, - {"PushOptions.Method", Field, 8}, - {"Pusher", Type, 8}, - {"ReadRequest", Func, 0}, - {"ReadResponse", Func, 0}, - {"Redirect", Func, 0}, - {"RedirectHandler", Func, 0}, - {"Request", Type, 0}, - {"Request.Body", Field, 0}, - {"Request.Cancel", Field, 5}, - {"Request.Close", Field, 0}, - {"Request.ContentLength", Field, 0}, - {"Request.Form", Field, 0}, - {"Request.GetBody", Field, 8}, - {"Request.Header", Field, 0}, - {"Request.Host", Field, 0}, - {"Request.Method", Field, 0}, - {"Request.MultipartForm", Field, 0}, - {"Request.Pattern", Field, 23}, - {"Request.PostForm", Field, 1}, - {"Request.Proto", Field, 0}, - {"Request.ProtoMajor", Field, 0}, - {"Request.ProtoMinor", Field, 0}, - {"Request.RemoteAddr", Field, 0}, - {"Request.RequestURI", Field, 0}, - {"Request.Response", Field, 7}, - {"Request.TLS", Field, 0}, - {"Request.Trailer", Field, 0}, - {"Request.TransferEncoding", Field, 0}, - {"Request.URL", Field, 0}, - {"Response", Type, 0}, - {"Response.Body", Field, 0}, - {"Response.Close", Field, 0}, - {"Response.ContentLength", Field, 0}, - {"Response.Header", Field, 0}, - {"Response.Proto", Field, 0}, - {"Response.ProtoMajor", Field, 0}, - {"Response.ProtoMinor", Field, 0}, - {"Response.Request", Field, 0}, - {"Response.Status", Field, 0}, - {"Response.StatusCode", Field, 0}, - {"Response.TLS", Field, 3}, - {"Response.Trailer", Field, 0}, - {"Response.TransferEncoding", Field, 0}, - {"Response.Uncompressed", Field, 7}, - {"ResponseController", Type, 20}, - {"ResponseWriter", Type, 0}, - {"RoundTripper", Type, 0}, - {"SameSite", Type, 11}, - {"SameSiteDefaultMode", Const, 11}, - {"SameSiteLaxMode", Const, 11}, - {"SameSiteNoneMode", Const, 13}, - {"SameSiteStrictMode", Const, 11}, - {"Serve", Func, 0}, - {"ServeContent", Func, 0}, - {"ServeFile", Func, 0}, - {"ServeFileFS", Func, 22}, - {"ServeMux", Type, 0}, - {"ServeTLS", Func, 9}, - {"Server", Type, 0}, - {"Server.Addr", Field, 0}, - {"Server.BaseContext", Field, 13}, - {"Server.ConnContext", Field, 13}, - {"Server.ConnState", Field, 3}, - {"Server.DisableGeneralOptionsHandler", Field, 20}, - {"Server.ErrorLog", Field, 3}, - {"Server.HTTP2", Field, 24}, - {"Server.Handler", Field, 0}, - {"Server.IdleTimeout", Field, 8}, - {"Server.MaxHeaderBytes", Field, 0}, - {"Server.Protocols", Field, 24}, - {"Server.ReadHeaderTimeout", Field, 8}, - {"Server.ReadTimeout", Field, 0}, - {"Server.TLSConfig", Field, 0}, - {"Server.TLSNextProto", Field, 1}, - {"Server.WriteTimeout", Field, 0}, - {"ServerContextKey", Var, 7}, - {"SetCookie", Func, 0}, - {"StateActive", Const, 3}, - {"StateClosed", Const, 3}, - {"StateHijacked", Const, 3}, - {"StateIdle", Const, 3}, - {"StateNew", Const, 3}, - {"StatusAccepted", Const, 0}, - {"StatusAlreadyReported", Const, 7}, - {"StatusBadGateway", Const, 0}, - {"StatusBadRequest", Const, 0}, - {"StatusConflict", Const, 0}, - {"StatusContinue", Const, 0}, - {"StatusCreated", Const, 0}, - {"StatusEarlyHints", Const, 13}, - {"StatusExpectationFailed", Const, 0}, - {"StatusFailedDependency", Const, 7}, - {"StatusForbidden", Const, 0}, - {"StatusFound", Const, 0}, - {"StatusGatewayTimeout", Const, 0}, - {"StatusGone", Const, 0}, - {"StatusHTTPVersionNotSupported", Const, 0}, - {"StatusIMUsed", Const, 7}, - {"StatusInsufficientStorage", Const, 7}, - {"StatusInternalServerError", Const, 0}, - {"StatusLengthRequired", Const, 0}, - {"StatusLocked", Const, 7}, - {"StatusLoopDetected", Const, 7}, - {"StatusMethodNotAllowed", Const, 0}, - {"StatusMisdirectedRequest", Const, 11}, - {"StatusMovedPermanently", Const, 0}, - {"StatusMultiStatus", Const, 7}, - {"StatusMultipleChoices", Const, 0}, - {"StatusNetworkAuthenticationRequired", Const, 6}, - {"StatusNoContent", Const, 0}, - {"StatusNonAuthoritativeInfo", Const, 0}, - {"StatusNotAcceptable", Const, 0}, - {"StatusNotExtended", Const, 7}, - {"StatusNotFound", Const, 0}, - {"StatusNotImplemented", Const, 0}, - {"StatusNotModified", Const, 0}, - {"StatusOK", Const, 0}, - {"StatusPartialContent", Const, 0}, - {"StatusPaymentRequired", Const, 0}, - {"StatusPermanentRedirect", Const, 7}, - {"StatusPreconditionFailed", Const, 0}, - {"StatusPreconditionRequired", Const, 6}, - {"StatusProcessing", Const, 7}, - {"StatusProxyAuthRequired", Const, 0}, - {"StatusRequestEntityTooLarge", Const, 0}, - {"StatusRequestHeaderFieldsTooLarge", Const, 6}, - {"StatusRequestTimeout", Const, 0}, - {"StatusRequestURITooLong", Const, 0}, - {"StatusRequestedRangeNotSatisfiable", Const, 0}, - {"StatusResetContent", Const, 0}, - {"StatusSeeOther", Const, 0}, - {"StatusServiceUnavailable", Const, 0}, - {"StatusSwitchingProtocols", Const, 0}, - {"StatusTeapot", Const, 0}, - {"StatusTemporaryRedirect", Const, 0}, - {"StatusText", Func, 0}, - {"StatusTooEarly", Const, 12}, - {"StatusTooManyRequests", Const, 6}, - {"StatusUnauthorized", Const, 0}, - {"StatusUnavailableForLegalReasons", Const, 6}, - {"StatusUnprocessableEntity", Const, 7}, - {"StatusUnsupportedMediaType", Const, 0}, - {"StatusUpgradeRequired", Const, 7}, - {"StatusUseProxy", Const, 0}, - {"StatusVariantAlsoNegotiates", Const, 7}, - {"StripPrefix", Func, 0}, - {"TimeFormat", Const, 0}, - {"TimeoutHandler", Func, 0}, - {"TrailerPrefix", Const, 8}, - {"Transport", Type, 0}, - {"Transport.Dial", Field, 0}, - {"Transport.DialContext", Field, 7}, - {"Transport.DialTLS", Field, 4}, - {"Transport.DialTLSContext", Field, 14}, - {"Transport.DisableCompression", Field, 0}, - {"Transport.DisableKeepAlives", Field, 0}, - {"Transport.ExpectContinueTimeout", Field, 6}, - {"Transport.ForceAttemptHTTP2", Field, 13}, - {"Transport.GetProxyConnectHeader", Field, 16}, - {"Transport.HTTP2", Field, 24}, - {"Transport.IdleConnTimeout", Field, 7}, - {"Transport.MaxConnsPerHost", Field, 11}, - {"Transport.MaxIdleConns", Field, 7}, - {"Transport.MaxIdleConnsPerHost", Field, 0}, - {"Transport.MaxResponseHeaderBytes", Field, 7}, - {"Transport.OnProxyConnectResponse", Field, 20}, - {"Transport.Protocols", Field, 24}, - {"Transport.Proxy", Field, 0}, - {"Transport.ProxyConnectHeader", Field, 8}, - {"Transport.ReadBufferSize", Field, 13}, - {"Transport.ResponseHeaderTimeout", Field, 1}, - {"Transport.TLSClientConfig", Field, 0}, - {"Transport.TLSHandshakeTimeout", Field, 3}, - {"Transport.TLSNextProto", Field, 6}, - {"Transport.WriteBufferSize", Field, 13}, + {"(*Client).CloseIdleConnections", Method, 12, ""}, + {"(*Client).Do", Method, 0, ""}, + {"(*Client).Get", Method, 0, ""}, + {"(*Client).Head", Method, 0, ""}, + {"(*Client).Post", Method, 0, ""}, + {"(*Client).PostForm", Method, 0, ""}, + {"(*Cookie).String", Method, 0, ""}, + {"(*Cookie).Valid", Method, 18, ""}, + {"(*MaxBytesError).Error", Method, 19, ""}, + {"(*ProtocolError).Error", Method, 0, ""}, + {"(*ProtocolError).Is", Method, 21, ""}, + {"(*Protocols).SetHTTP1", Method, 24, ""}, + {"(*Protocols).SetHTTP2", Method, 24, ""}, + {"(*Protocols).SetUnencryptedHTTP2", Method, 24, ""}, + {"(*Request).AddCookie", Method, 0, ""}, + {"(*Request).BasicAuth", Method, 4, ""}, + {"(*Request).Clone", Method, 13, ""}, + {"(*Request).Context", Method, 7, ""}, + {"(*Request).Cookie", Method, 0, ""}, + {"(*Request).Cookies", Method, 0, ""}, + {"(*Request).CookiesNamed", Method, 23, ""}, + {"(*Request).FormFile", Method, 0, ""}, + {"(*Request).FormValue", Method, 0, ""}, + {"(*Request).MultipartReader", Method, 0, ""}, + {"(*Request).ParseForm", Method, 0, ""}, + {"(*Request).ParseMultipartForm", Method, 0, ""}, + {"(*Request).PathValue", Method, 22, ""}, + {"(*Request).PostFormValue", Method, 1, ""}, + {"(*Request).ProtoAtLeast", Method, 0, ""}, + {"(*Request).Referer", Method, 0, ""}, + {"(*Request).SetBasicAuth", Method, 0, ""}, + {"(*Request).SetPathValue", Method, 22, ""}, + {"(*Request).UserAgent", Method, 0, ""}, + {"(*Request).WithContext", Method, 7, ""}, + {"(*Request).Write", Method, 0, ""}, + {"(*Request).WriteProxy", Method, 0, ""}, + {"(*Response).Cookies", Method, 0, ""}, + {"(*Response).Location", Method, 0, ""}, + {"(*Response).ProtoAtLeast", Method, 0, ""}, + {"(*Response).Write", Method, 0, ""}, + {"(*ResponseController).EnableFullDuplex", Method, 21, ""}, + {"(*ResponseController).Flush", Method, 20, ""}, + {"(*ResponseController).Hijack", Method, 20, ""}, + {"(*ResponseController).SetReadDeadline", Method, 20, ""}, + {"(*ResponseController).SetWriteDeadline", Method, 20, ""}, + {"(*ServeMux).Handle", Method, 0, ""}, + {"(*ServeMux).HandleFunc", Method, 0, ""}, + {"(*ServeMux).Handler", Method, 1, ""}, + {"(*ServeMux).ServeHTTP", Method, 0, ""}, + {"(*Server).Close", Method, 8, ""}, + {"(*Server).ListenAndServe", Method, 0, ""}, + {"(*Server).ListenAndServeTLS", Method, 0, ""}, + {"(*Server).RegisterOnShutdown", Method, 9, ""}, + {"(*Server).Serve", Method, 0, ""}, + {"(*Server).ServeTLS", Method, 9, ""}, + {"(*Server).SetKeepAlivesEnabled", Method, 3, ""}, + {"(*Server).Shutdown", Method, 8, ""}, + {"(*Transport).CancelRequest", Method, 1, ""}, + {"(*Transport).Clone", Method, 13, ""}, + {"(*Transport).CloseIdleConnections", Method, 0, ""}, + {"(*Transport).RegisterProtocol", Method, 0, ""}, + {"(*Transport).RoundTrip", Method, 0, ""}, + {"(ConnState).String", Method, 3, ""}, + {"(Dir).Open", Method, 0, ""}, + {"(HandlerFunc).ServeHTTP", Method, 0, ""}, + {"(Header).Add", Method, 0, ""}, + {"(Header).Clone", Method, 13, ""}, + {"(Header).Del", Method, 0, ""}, + {"(Header).Get", Method, 0, ""}, + {"(Header).Set", Method, 0, ""}, + {"(Header).Values", Method, 14, ""}, + {"(Header).Write", Method, 0, ""}, + {"(Header).WriteSubset", Method, 0, ""}, + {"(Protocols).HTTP1", Method, 24, ""}, + {"(Protocols).HTTP2", Method, 24, ""}, + {"(Protocols).String", Method, 24, ""}, + {"(Protocols).UnencryptedHTTP2", Method, 24, ""}, + {"AllowQuerySemicolons", Func, 17, "func(h Handler) Handler"}, + {"CanonicalHeaderKey", Func, 0, "func(s string) string"}, + {"Client", Type, 0, ""}, + {"Client.CheckRedirect", Field, 0, ""}, + {"Client.Jar", Field, 0, ""}, + {"Client.Timeout", Field, 3, ""}, + {"Client.Transport", Field, 0, ""}, + {"CloseNotifier", Type, 1, ""}, + {"ConnState", Type, 3, ""}, + {"Cookie", Type, 0, ""}, + {"Cookie.Domain", Field, 0, ""}, + {"Cookie.Expires", Field, 0, ""}, + {"Cookie.HttpOnly", Field, 0, ""}, + {"Cookie.MaxAge", Field, 0, ""}, + {"Cookie.Name", Field, 0, ""}, + {"Cookie.Partitioned", Field, 23, ""}, + {"Cookie.Path", Field, 0, ""}, + {"Cookie.Quoted", Field, 23, ""}, + {"Cookie.Raw", Field, 0, ""}, + {"Cookie.RawExpires", Field, 0, ""}, + {"Cookie.SameSite", Field, 11, ""}, + {"Cookie.Secure", Field, 0, ""}, + {"Cookie.Unparsed", Field, 0, ""}, + {"Cookie.Value", Field, 0, ""}, + {"CookieJar", Type, 0, ""}, + {"DefaultClient", Var, 0, ""}, + {"DefaultMaxHeaderBytes", Const, 0, ""}, + {"DefaultMaxIdleConnsPerHost", Const, 0, ""}, + {"DefaultServeMux", Var, 0, ""}, + {"DefaultTransport", Var, 0, ""}, + {"DetectContentType", Func, 0, "func(data []byte) string"}, + {"Dir", Type, 0, ""}, + {"ErrAbortHandler", Var, 8, ""}, + {"ErrBodyNotAllowed", Var, 0, ""}, + {"ErrBodyReadAfterClose", Var, 0, ""}, + {"ErrContentLength", Var, 0, ""}, + {"ErrHandlerTimeout", Var, 0, ""}, + {"ErrHeaderTooLong", Var, 0, ""}, + {"ErrHijacked", Var, 0, ""}, + {"ErrLineTooLong", Var, 0, ""}, + {"ErrMissingBoundary", Var, 0, ""}, + {"ErrMissingContentLength", Var, 0, ""}, + {"ErrMissingFile", Var, 0, ""}, + {"ErrNoCookie", Var, 0, ""}, + {"ErrNoLocation", Var, 0, ""}, + {"ErrNotMultipart", Var, 0, ""}, + {"ErrNotSupported", Var, 0, ""}, + {"ErrSchemeMismatch", Var, 21, ""}, + {"ErrServerClosed", Var, 8, ""}, + {"ErrShortBody", Var, 0, ""}, + {"ErrSkipAltProtocol", Var, 6, ""}, + {"ErrUnexpectedTrailer", Var, 0, ""}, + {"ErrUseLastResponse", Var, 7, ""}, + {"ErrWriteAfterFlush", Var, 0, ""}, + {"Error", Func, 0, "func(w ResponseWriter, error string, code int)"}, + {"FS", Func, 16, "func(fsys fs.FS) FileSystem"}, + {"File", Type, 0, ""}, + {"FileServer", Func, 0, "func(root FileSystem) Handler"}, + {"FileServerFS", Func, 22, "func(root fs.FS) Handler"}, + {"FileSystem", Type, 0, ""}, + {"Flusher", Type, 0, ""}, + {"Get", Func, 0, "func(url string) (resp *Response, err error)"}, + {"HTTP2Config", Type, 24, ""}, + {"HTTP2Config.CountError", Field, 24, ""}, + {"HTTP2Config.MaxConcurrentStreams", Field, 24, ""}, + {"HTTP2Config.MaxDecoderHeaderTableSize", Field, 24, ""}, + {"HTTP2Config.MaxEncoderHeaderTableSize", Field, 24, ""}, + {"HTTP2Config.MaxReadFrameSize", Field, 24, ""}, + {"HTTP2Config.MaxReceiveBufferPerConnection", Field, 24, ""}, + {"HTTP2Config.MaxReceiveBufferPerStream", Field, 24, ""}, + {"HTTP2Config.PermitProhibitedCipherSuites", Field, 24, ""}, + {"HTTP2Config.PingTimeout", Field, 24, ""}, + {"HTTP2Config.SendPingTimeout", Field, 24, ""}, + {"HTTP2Config.WriteByteTimeout", Field, 24, ""}, + {"Handle", Func, 0, "func(pattern string, handler Handler)"}, + {"HandleFunc", Func, 0, "func(pattern string, handler func(ResponseWriter, *Request))"}, + {"Handler", Type, 0, ""}, + {"HandlerFunc", Type, 0, ""}, + {"Head", Func, 0, "func(url string) (resp *Response, err error)"}, + {"Header", Type, 0, ""}, + {"Hijacker", Type, 0, ""}, + {"ListenAndServe", Func, 0, "func(addr string, handler Handler) error"}, + {"ListenAndServeTLS", Func, 0, "func(addr string, certFile string, keyFile string, handler Handler) error"}, + {"LocalAddrContextKey", Var, 7, ""}, + {"MaxBytesError", Type, 19, ""}, + {"MaxBytesError.Limit", Field, 19, ""}, + {"MaxBytesHandler", Func, 18, "func(h Handler, n int64) Handler"}, + {"MaxBytesReader", Func, 0, "func(w ResponseWriter, r io.ReadCloser, n int64) io.ReadCloser"}, + {"MethodConnect", Const, 6, ""}, + {"MethodDelete", Const, 6, ""}, + {"MethodGet", Const, 6, ""}, + {"MethodHead", Const, 6, ""}, + {"MethodOptions", Const, 6, ""}, + {"MethodPatch", Const, 6, ""}, + {"MethodPost", Const, 6, ""}, + {"MethodPut", Const, 6, ""}, + {"MethodTrace", Const, 6, ""}, + {"NewFileTransport", Func, 0, "func(fs FileSystem) RoundTripper"}, + {"NewFileTransportFS", Func, 22, "func(fsys fs.FS) RoundTripper"}, + {"NewRequest", Func, 0, "func(method string, url string, body io.Reader) (*Request, error)"}, + {"NewRequestWithContext", Func, 13, "func(ctx context.Context, method string, url string, body io.Reader) (*Request, error)"}, + {"NewResponseController", Func, 20, "func(rw ResponseWriter) *ResponseController"}, + {"NewServeMux", Func, 0, "func() *ServeMux"}, + {"NoBody", Var, 8, ""}, + {"NotFound", Func, 0, "func(w ResponseWriter, r *Request)"}, + {"NotFoundHandler", Func, 0, "func() Handler"}, + {"ParseCookie", Func, 23, "func(line string) ([]*Cookie, error)"}, + {"ParseHTTPVersion", Func, 0, "func(vers string) (major int, minor int, ok bool)"}, + {"ParseSetCookie", Func, 23, "func(line string) (*Cookie, error)"}, + {"ParseTime", Func, 1, "func(text string) (t time.Time, err error)"}, + {"Post", Func, 0, "func(url string, contentType string, body io.Reader) (resp *Response, err error)"}, + {"PostForm", Func, 0, "func(url string, data url.Values) (resp *Response, err error)"}, + {"ProtocolError", Type, 0, ""}, + {"ProtocolError.ErrorString", Field, 0, ""}, + {"Protocols", Type, 24, ""}, + {"ProxyFromEnvironment", Func, 0, "func(req *Request) (*url.URL, error)"}, + {"ProxyURL", Func, 0, "func(fixedURL *url.URL) func(*Request) (*url.URL, error)"}, + {"PushOptions", Type, 8, ""}, + {"PushOptions.Header", Field, 8, ""}, + {"PushOptions.Method", Field, 8, ""}, + {"Pusher", Type, 8, ""}, + {"ReadRequest", Func, 0, "func(b *bufio.Reader) (*Request, error)"}, + {"ReadResponse", Func, 0, "func(r *bufio.Reader, req *Request) (*Response, error)"}, + {"Redirect", Func, 0, "func(w ResponseWriter, r *Request, url string, code int)"}, + {"RedirectHandler", Func, 0, "func(url string, code int) Handler"}, + {"Request", Type, 0, ""}, + {"Request.Body", Field, 0, ""}, + {"Request.Cancel", Field, 5, ""}, + {"Request.Close", Field, 0, ""}, + {"Request.ContentLength", Field, 0, ""}, + {"Request.Form", Field, 0, ""}, + {"Request.GetBody", Field, 8, ""}, + {"Request.Header", Field, 0, ""}, + {"Request.Host", Field, 0, ""}, + {"Request.Method", Field, 0, ""}, + {"Request.MultipartForm", Field, 0, ""}, + {"Request.Pattern", Field, 23, ""}, + {"Request.PostForm", Field, 1, ""}, + {"Request.Proto", Field, 0, ""}, + {"Request.ProtoMajor", Field, 0, ""}, + {"Request.ProtoMinor", Field, 0, ""}, + {"Request.RemoteAddr", Field, 0, ""}, + {"Request.RequestURI", Field, 0, ""}, + {"Request.Response", Field, 7, ""}, + {"Request.TLS", Field, 0, ""}, + {"Request.Trailer", Field, 0, ""}, + {"Request.TransferEncoding", Field, 0, ""}, + {"Request.URL", Field, 0, ""}, + {"Response", Type, 0, ""}, + {"Response.Body", Field, 0, ""}, + {"Response.Close", Field, 0, ""}, + {"Response.ContentLength", Field, 0, ""}, + {"Response.Header", Field, 0, ""}, + {"Response.Proto", Field, 0, ""}, + {"Response.ProtoMajor", Field, 0, ""}, + {"Response.ProtoMinor", Field, 0, ""}, + {"Response.Request", Field, 0, ""}, + {"Response.Status", Field, 0, ""}, + {"Response.StatusCode", Field, 0, ""}, + {"Response.TLS", Field, 3, ""}, + {"Response.Trailer", Field, 0, ""}, + {"Response.TransferEncoding", Field, 0, ""}, + {"Response.Uncompressed", Field, 7, ""}, + {"ResponseController", Type, 20, ""}, + {"ResponseWriter", Type, 0, ""}, + {"RoundTripper", Type, 0, ""}, + {"SameSite", Type, 11, ""}, + {"SameSiteDefaultMode", Const, 11, ""}, + {"SameSiteLaxMode", Const, 11, ""}, + {"SameSiteNoneMode", Const, 13, ""}, + {"SameSiteStrictMode", Const, 11, ""}, + {"Serve", Func, 0, "func(l net.Listener, handler Handler) error"}, + {"ServeContent", Func, 0, "func(w ResponseWriter, req *Request, name string, modtime time.Time, content io.ReadSeeker)"}, + {"ServeFile", Func, 0, "func(w ResponseWriter, r *Request, name string)"}, + {"ServeFileFS", Func, 22, "func(w ResponseWriter, r *Request, fsys fs.FS, name string)"}, + {"ServeMux", Type, 0, ""}, + {"ServeTLS", Func, 9, "func(l net.Listener, handler Handler, certFile string, keyFile string) error"}, + {"Server", Type, 0, ""}, + {"Server.Addr", Field, 0, ""}, + {"Server.BaseContext", Field, 13, ""}, + {"Server.ConnContext", Field, 13, ""}, + {"Server.ConnState", Field, 3, ""}, + {"Server.DisableGeneralOptionsHandler", Field, 20, ""}, + {"Server.ErrorLog", Field, 3, ""}, + {"Server.HTTP2", Field, 24, ""}, + {"Server.Handler", Field, 0, ""}, + {"Server.IdleTimeout", Field, 8, ""}, + {"Server.MaxHeaderBytes", Field, 0, ""}, + {"Server.Protocols", Field, 24, ""}, + {"Server.ReadHeaderTimeout", Field, 8, ""}, + {"Server.ReadTimeout", Field, 0, ""}, + {"Server.TLSConfig", Field, 0, ""}, + {"Server.TLSNextProto", Field, 1, ""}, + {"Server.WriteTimeout", Field, 0, ""}, + {"ServerContextKey", Var, 7, ""}, + {"SetCookie", Func, 0, "func(w ResponseWriter, cookie *Cookie)"}, + {"StateActive", Const, 3, ""}, + {"StateClosed", Const, 3, ""}, + {"StateHijacked", Const, 3, ""}, + {"StateIdle", Const, 3, ""}, + {"StateNew", Const, 3, ""}, + {"StatusAccepted", Const, 0, ""}, + {"StatusAlreadyReported", Const, 7, ""}, + {"StatusBadGateway", Const, 0, ""}, + {"StatusBadRequest", Const, 0, ""}, + {"StatusConflict", Const, 0, ""}, + {"StatusContinue", Const, 0, ""}, + {"StatusCreated", Const, 0, ""}, + {"StatusEarlyHints", Const, 13, ""}, + {"StatusExpectationFailed", Const, 0, ""}, + {"StatusFailedDependency", Const, 7, ""}, + {"StatusForbidden", Const, 0, ""}, + {"StatusFound", Const, 0, ""}, + {"StatusGatewayTimeout", Const, 0, ""}, + {"StatusGone", Const, 0, ""}, + {"StatusHTTPVersionNotSupported", Const, 0, ""}, + {"StatusIMUsed", Const, 7, ""}, + {"StatusInsufficientStorage", Const, 7, ""}, + {"StatusInternalServerError", Const, 0, ""}, + {"StatusLengthRequired", Const, 0, ""}, + {"StatusLocked", Const, 7, ""}, + {"StatusLoopDetected", Const, 7, ""}, + {"StatusMethodNotAllowed", Const, 0, ""}, + {"StatusMisdirectedRequest", Const, 11, ""}, + {"StatusMovedPermanently", Const, 0, ""}, + {"StatusMultiStatus", Const, 7, ""}, + {"StatusMultipleChoices", Const, 0, ""}, + {"StatusNetworkAuthenticationRequired", Const, 6, ""}, + {"StatusNoContent", Const, 0, ""}, + {"StatusNonAuthoritativeInfo", Const, 0, ""}, + {"StatusNotAcceptable", Const, 0, ""}, + {"StatusNotExtended", Const, 7, ""}, + {"StatusNotFound", Const, 0, ""}, + {"StatusNotImplemented", Const, 0, ""}, + {"StatusNotModified", Const, 0, ""}, + {"StatusOK", Const, 0, ""}, + {"StatusPartialContent", Const, 0, ""}, + {"StatusPaymentRequired", Const, 0, ""}, + {"StatusPermanentRedirect", Const, 7, ""}, + {"StatusPreconditionFailed", Const, 0, ""}, + {"StatusPreconditionRequired", Const, 6, ""}, + {"StatusProcessing", Const, 7, ""}, + {"StatusProxyAuthRequired", Const, 0, ""}, + {"StatusRequestEntityTooLarge", Const, 0, ""}, + {"StatusRequestHeaderFieldsTooLarge", Const, 6, ""}, + {"StatusRequestTimeout", Const, 0, ""}, + {"StatusRequestURITooLong", Const, 0, ""}, + {"StatusRequestedRangeNotSatisfiable", Const, 0, ""}, + {"StatusResetContent", Const, 0, ""}, + {"StatusSeeOther", Const, 0, ""}, + {"StatusServiceUnavailable", Const, 0, ""}, + {"StatusSwitchingProtocols", Const, 0, ""}, + {"StatusTeapot", Const, 0, ""}, + {"StatusTemporaryRedirect", Const, 0, ""}, + {"StatusText", Func, 0, "func(code int) string"}, + {"StatusTooEarly", Const, 12, ""}, + {"StatusTooManyRequests", Const, 6, ""}, + {"StatusUnauthorized", Const, 0, ""}, + {"StatusUnavailableForLegalReasons", Const, 6, ""}, + {"StatusUnprocessableEntity", Const, 7, ""}, + {"StatusUnsupportedMediaType", Const, 0, ""}, + {"StatusUpgradeRequired", Const, 7, ""}, + {"StatusUseProxy", Const, 0, ""}, + {"StatusVariantAlsoNegotiates", Const, 7, ""}, + {"StripPrefix", Func, 0, "func(prefix string, h Handler) Handler"}, + {"TimeFormat", Const, 0, ""}, + {"TimeoutHandler", Func, 0, "func(h Handler, dt time.Duration, msg string) Handler"}, + {"TrailerPrefix", Const, 8, ""}, + {"Transport", Type, 0, ""}, + {"Transport.Dial", Field, 0, ""}, + {"Transport.DialContext", Field, 7, ""}, + {"Transport.DialTLS", Field, 4, ""}, + {"Transport.DialTLSContext", Field, 14, ""}, + {"Transport.DisableCompression", Field, 0, ""}, + {"Transport.DisableKeepAlives", Field, 0, ""}, + {"Transport.ExpectContinueTimeout", Field, 6, ""}, + {"Transport.ForceAttemptHTTP2", Field, 13, ""}, + {"Transport.GetProxyConnectHeader", Field, 16, ""}, + {"Transport.HTTP2", Field, 24, ""}, + {"Transport.IdleConnTimeout", Field, 7, ""}, + {"Transport.MaxConnsPerHost", Field, 11, ""}, + {"Transport.MaxIdleConns", Field, 7, ""}, + {"Transport.MaxIdleConnsPerHost", Field, 0, ""}, + {"Transport.MaxResponseHeaderBytes", Field, 7, ""}, + {"Transport.OnProxyConnectResponse", Field, 20, ""}, + {"Transport.Protocols", Field, 24, ""}, + {"Transport.Proxy", Field, 0, ""}, + {"Transport.ProxyConnectHeader", Field, 8, ""}, + {"Transport.ReadBufferSize", Field, 13, ""}, + {"Transport.ResponseHeaderTimeout", Field, 1, ""}, + {"Transport.TLSClientConfig", Field, 0, ""}, + {"Transport.TLSHandshakeTimeout", Field, 3, ""}, + {"Transport.TLSNextProto", Field, 6, ""}, + {"Transport.WriteBufferSize", Field, 13, ""}, }, "net/http/cgi": { - {"(*Handler).ServeHTTP", Method, 0}, - {"Handler", Type, 0}, - {"Handler.Args", Field, 0}, - {"Handler.Dir", Field, 0}, - {"Handler.Env", Field, 0}, - {"Handler.InheritEnv", Field, 0}, - {"Handler.Logger", Field, 0}, - {"Handler.Path", Field, 0}, - {"Handler.PathLocationHandler", Field, 0}, - {"Handler.Root", Field, 0}, - {"Handler.Stderr", Field, 7}, - {"Request", Func, 0}, - {"RequestFromMap", Func, 0}, - {"Serve", Func, 0}, + {"(*Handler).ServeHTTP", Method, 0, ""}, + {"Handler", Type, 0, ""}, + {"Handler.Args", Field, 0, ""}, + {"Handler.Dir", Field, 0, ""}, + {"Handler.Env", Field, 0, ""}, + {"Handler.InheritEnv", Field, 0, ""}, + {"Handler.Logger", Field, 0, ""}, + {"Handler.Path", Field, 0, ""}, + {"Handler.PathLocationHandler", Field, 0, ""}, + {"Handler.Root", Field, 0, ""}, + {"Handler.Stderr", Field, 7, ""}, + {"Request", Func, 0, "func() (*http.Request, error)"}, + {"RequestFromMap", Func, 0, "func(params map[string]string) (*http.Request, error)"}, + {"Serve", Func, 0, "func(handler http.Handler) error"}, }, "net/http/cookiejar": { - {"(*Jar).Cookies", Method, 1}, - {"(*Jar).SetCookies", Method, 1}, - {"Jar", Type, 1}, - {"New", Func, 1}, - {"Options", Type, 1}, - {"Options.PublicSuffixList", Field, 1}, - {"PublicSuffixList", Type, 1}, + {"(*Jar).Cookies", Method, 1, ""}, + {"(*Jar).SetCookies", Method, 1, ""}, + {"Jar", Type, 1, ""}, + {"New", Func, 1, "func(o *Options) (*Jar, error)"}, + {"Options", Type, 1, ""}, + {"Options.PublicSuffixList", Field, 1, ""}, + {"PublicSuffixList", Type, 1, ""}, }, "net/http/fcgi": { - {"ErrConnClosed", Var, 5}, - {"ErrRequestAborted", Var, 5}, - {"ProcessEnv", Func, 9}, - {"Serve", Func, 0}, + {"ErrConnClosed", Var, 5, ""}, + {"ErrRequestAborted", Var, 5, ""}, + {"ProcessEnv", Func, 9, "func(r *http.Request) map[string]string"}, + {"Serve", Func, 0, "func(l net.Listener, handler http.Handler) error"}, }, "net/http/httptest": { - {"(*ResponseRecorder).Flush", Method, 0}, - {"(*ResponseRecorder).Header", Method, 0}, - {"(*ResponseRecorder).Result", Method, 7}, - {"(*ResponseRecorder).Write", Method, 0}, - {"(*ResponseRecorder).WriteHeader", Method, 0}, - {"(*ResponseRecorder).WriteString", Method, 6}, - {"(*Server).Certificate", Method, 9}, - {"(*Server).Client", Method, 9}, - {"(*Server).Close", Method, 0}, - {"(*Server).CloseClientConnections", Method, 0}, - {"(*Server).Start", Method, 0}, - {"(*Server).StartTLS", Method, 0}, - {"DefaultRemoteAddr", Const, 0}, - {"NewRecorder", Func, 0}, - {"NewRequest", Func, 7}, - {"NewRequestWithContext", Func, 23}, - {"NewServer", Func, 0}, - {"NewTLSServer", Func, 0}, - {"NewUnstartedServer", Func, 0}, - {"ResponseRecorder", Type, 0}, - {"ResponseRecorder.Body", Field, 0}, - {"ResponseRecorder.Code", Field, 0}, - {"ResponseRecorder.Flushed", Field, 0}, - {"ResponseRecorder.HeaderMap", Field, 0}, - {"Server", Type, 0}, - {"Server.Config", Field, 0}, - {"Server.EnableHTTP2", Field, 14}, - {"Server.Listener", Field, 0}, - {"Server.TLS", Field, 0}, - {"Server.URL", Field, 0}, + {"(*ResponseRecorder).Flush", Method, 0, ""}, + {"(*ResponseRecorder).Header", Method, 0, ""}, + {"(*ResponseRecorder).Result", Method, 7, ""}, + {"(*ResponseRecorder).Write", Method, 0, ""}, + {"(*ResponseRecorder).WriteHeader", Method, 0, ""}, + {"(*ResponseRecorder).WriteString", Method, 6, ""}, + {"(*Server).Certificate", Method, 9, ""}, + {"(*Server).Client", Method, 9, ""}, + {"(*Server).Close", Method, 0, ""}, + {"(*Server).CloseClientConnections", Method, 0, ""}, + {"(*Server).Start", Method, 0, ""}, + {"(*Server).StartTLS", Method, 0, ""}, + {"DefaultRemoteAddr", Const, 0, ""}, + {"NewRecorder", Func, 0, "func() *ResponseRecorder"}, + {"NewRequest", Func, 7, "func(method string, target string, body io.Reader) *http.Request"}, + {"NewRequestWithContext", Func, 23, "func(ctx context.Context, method string, target string, body io.Reader) *http.Request"}, + {"NewServer", Func, 0, "func(handler http.Handler) *Server"}, + {"NewTLSServer", Func, 0, "func(handler http.Handler) *Server"}, + {"NewUnstartedServer", Func, 0, "func(handler http.Handler) *Server"}, + {"ResponseRecorder", Type, 0, ""}, + {"ResponseRecorder.Body", Field, 0, ""}, + {"ResponseRecorder.Code", Field, 0, ""}, + {"ResponseRecorder.Flushed", Field, 0, ""}, + {"ResponseRecorder.HeaderMap", Field, 0, ""}, + {"Server", Type, 0, ""}, + {"Server.Config", Field, 0, ""}, + {"Server.EnableHTTP2", Field, 14, ""}, + {"Server.Listener", Field, 0, ""}, + {"Server.TLS", Field, 0, ""}, + {"Server.URL", Field, 0, ""}, }, "net/http/httptrace": { - {"ClientTrace", Type, 7}, - {"ClientTrace.ConnectDone", Field, 7}, - {"ClientTrace.ConnectStart", Field, 7}, - {"ClientTrace.DNSDone", Field, 7}, - {"ClientTrace.DNSStart", Field, 7}, - {"ClientTrace.GetConn", Field, 7}, - {"ClientTrace.Got100Continue", Field, 7}, - {"ClientTrace.Got1xxResponse", Field, 11}, - {"ClientTrace.GotConn", Field, 7}, - {"ClientTrace.GotFirstResponseByte", Field, 7}, - {"ClientTrace.PutIdleConn", Field, 7}, - {"ClientTrace.TLSHandshakeDone", Field, 8}, - {"ClientTrace.TLSHandshakeStart", Field, 8}, - {"ClientTrace.Wait100Continue", Field, 7}, - {"ClientTrace.WroteHeaderField", Field, 11}, - {"ClientTrace.WroteHeaders", Field, 7}, - {"ClientTrace.WroteRequest", Field, 7}, - {"ContextClientTrace", Func, 7}, - {"DNSDoneInfo", Type, 7}, - {"DNSDoneInfo.Addrs", Field, 7}, - {"DNSDoneInfo.Coalesced", Field, 7}, - {"DNSDoneInfo.Err", Field, 7}, - {"DNSStartInfo", Type, 7}, - {"DNSStartInfo.Host", Field, 7}, - {"GotConnInfo", Type, 7}, - {"GotConnInfo.Conn", Field, 7}, - {"GotConnInfo.IdleTime", Field, 7}, - {"GotConnInfo.Reused", Field, 7}, - {"GotConnInfo.WasIdle", Field, 7}, - {"WithClientTrace", Func, 7}, - {"WroteRequestInfo", Type, 7}, - {"WroteRequestInfo.Err", Field, 7}, + {"ClientTrace", Type, 7, ""}, + {"ClientTrace.ConnectDone", Field, 7, ""}, + {"ClientTrace.ConnectStart", Field, 7, ""}, + {"ClientTrace.DNSDone", Field, 7, ""}, + {"ClientTrace.DNSStart", Field, 7, ""}, + {"ClientTrace.GetConn", Field, 7, ""}, + {"ClientTrace.Got100Continue", Field, 7, ""}, + {"ClientTrace.Got1xxResponse", Field, 11, ""}, + {"ClientTrace.GotConn", Field, 7, ""}, + {"ClientTrace.GotFirstResponseByte", Field, 7, ""}, + {"ClientTrace.PutIdleConn", Field, 7, ""}, + {"ClientTrace.TLSHandshakeDone", Field, 8, ""}, + {"ClientTrace.TLSHandshakeStart", Field, 8, ""}, + {"ClientTrace.Wait100Continue", Field, 7, ""}, + {"ClientTrace.WroteHeaderField", Field, 11, ""}, + {"ClientTrace.WroteHeaders", Field, 7, ""}, + {"ClientTrace.WroteRequest", Field, 7, ""}, + {"ContextClientTrace", Func, 7, "func(ctx context.Context) *ClientTrace"}, + {"DNSDoneInfo", Type, 7, ""}, + {"DNSDoneInfo.Addrs", Field, 7, ""}, + {"DNSDoneInfo.Coalesced", Field, 7, ""}, + {"DNSDoneInfo.Err", Field, 7, ""}, + {"DNSStartInfo", Type, 7, ""}, + {"DNSStartInfo.Host", Field, 7, ""}, + {"GotConnInfo", Type, 7, ""}, + {"GotConnInfo.Conn", Field, 7, ""}, + {"GotConnInfo.IdleTime", Field, 7, ""}, + {"GotConnInfo.Reused", Field, 7, ""}, + {"GotConnInfo.WasIdle", Field, 7, ""}, + {"WithClientTrace", Func, 7, "func(ctx context.Context, trace *ClientTrace) context.Context"}, + {"WroteRequestInfo", Type, 7, ""}, + {"WroteRequestInfo.Err", Field, 7, ""}, }, "net/http/httputil": { - {"(*ClientConn).Close", Method, 0}, - {"(*ClientConn).Do", Method, 0}, - {"(*ClientConn).Hijack", Method, 0}, - {"(*ClientConn).Pending", Method, 0}, - {"(*ClientConn).Read", Method, 0}, - {"(*ClientConn).Write", Method, 0}, - {"(*ProxyRequest).SetURL", Method, 20}, - {"(*ProxyRequest).SetXForwarded", Method, 20}, - {"(*ReverseProxy).ServeHTTP", Method, 0}, - {"(*ServerConn).Close", Method, 0}, - {"(*ServerConn).Hijack", Method, 0}, - {"(*ServerConn).Pending", Method, 0}, - {"(*ServerConn).Read", Method, 0}, - {"(*ServerConn).Write", Method, 0}, - {"BufferPool", Type, 6}, - {"ClientConn", Type, 0}, - {"DumpRequest", Func, 0}, - {"DumpRequestOut", Func, 0}, - {"DumpResponse", Func, 0}, - {"ErrClosed", Var, 0}, - {"ErrLineTooLong", Var, 0}, - {"ErrPersistEOF", Var, 0}, - {"ErrPipeline", Var, 0}, - {"NewChunkedReader", Func, 0}, - {"NewChunkedWriter", Func, 0}, - {"NewClientConn", Func, 0}, - {"NewProxyClientConn", Func, 0}, - {"NewServerConn", Func, 0}, - {"NewSingleHostReverseProxy", Func, 0}, - {"ProxyRequest", Type, 20}, - {"ProxyRequest.In", Field, 20}, - {"ProxyRequest.Out", Field, 20}, - {"ReverseProxy", Type, 0}, - {"ReverseProxy.BufferPool", Field, 6}, - {"ReverseProxy.Director", Field, 0}, - {"ReverseProxy.ErrorHandler", Field, 11}, - {"ReverseProxy.ErrorLog", Field, 4}, - {"ReverseProxy.FlushInterval", Field, 0}, - {"ReverseProxy.ModifyResponse", Field, 8}, - {"ReverseProxy.Rewrite", Field, 20}, - {"ReverseProxy.Transport", Field, 0}, - {"ServerConn", Type, 0}, + {"(*ClientConn).Close", Method, 0, ""}, + {"(*ClientConn).Do", Method, 0, ""}, + {"(*ClientConn).Hijack", Method, 0, ""}, + {"(*ClientConn).Pending", Method, 0, ""}, + {"(*ClientConn).Read", Method, 0, ""}, + {"(*ClientConn).Write", Method, 0, ""}, + {"(*ProxyRequest).SetURL", Method, 20, ""}, + {"(*ProxyRequest).SetXForwarded", Method, 20, ""}, + {"(*ReverseProxy).ServeHTTP", Method, 0, ""}, + {"(*ServerConn).Close", Method, 0, ""}, + {"(*ServerConn).Hijack", Method, 0, ""}, + {"(*ServerConn).Pending", Method, 0, ""}, + {"(*ServerConn).Read", Method, 0, ""}, + {"(*ServerConn).Write", Method, 0, ""}, + {"BufferPool", Type, 6, ""}, + {"ClientConn", Type, 0, ""}, + {"DumpRequest", Func, 0, "func(req *http.Request, body bool) ([]byte, error)"}, + {"DumpRequestOut", Func, 0, "func(req *http.Request, body bool) ([]byte, error)"}, + {"DumpResponse", Func, 0, "func(resp *http.Response, body bool) ([]byte, error)"}, + {"ErrClosed", Var, 0, ""}, + {"ErrLineTooLong", Var, 0, ""}, + {"ErrPersistEOF", Var, 0, ""}, + {"ErrPipeline", Var, 0, ""}, + {"NewChunkedReader", Func, 0, "func(r io.Reader) io.Reader"}, + {"NewChunkedWriter", Func, 0, "func(w io.Writer) io.WriteCloser"}, + {"NewClientConn", Func, 0, "func(c net.Conn, r *bufio.Reader) *ClientConn"}, + {"NewProxyClientConn", Func, 0, "func(c net.Conn, r *bufio.Reader) *ClientConn"}, + {"NewServerConn", Func, 0, "func(c net.Conn, r *bufio.Reader) *ServerConn"}, + {"NewSingleHostReverseProxy", Func, 0, "func(target *url.URL) *ReverseProxy"}, + {"ProxyRequest", Type, 20, ""}, + {"ProxyRequest.In", Field, 20, ""}, + {"ProxyRequest.Out", Field, 20, ""}, + {"ReverseProxy", Type, 0, ""}, + {"ReverseProxy.BufferPool", Field, 6, ""}, + {"ReverseProxy.Director", Field, 0, ""}, + {"ReverseProxy.ErrorHandler", Field, 11, ""}, + {"ReverseProxy.ErrorLog", Field, 4, ""}, + {"ReverseProxy.FlushInterval", Field, 0, ""}, + {"ReverseProxy.ModifyResponse", Field, 8, ""}, + {"ReverseProxy.Rewrite", Field, 20, ""}, + {"ReverseProxy.Transport", Field, 0, ""}, + {"ServerConn", Type, 0, ""}, }, "net/http/pprof": { - {"Cmdline", Func, 0}, - {"Handler", Func, 0}, - {"Index", Func, 0}, - {"Profile", Func, 0}, - {"Symbol", Func, 0}, - {"Trace", Func, 5}, + {"Cmdline", Func, 0, "func(w http.ResponseWriter, r *http.Request)"}, + {"Handler", Func, 0, "func(name string) http.Handler"}, + {"Index", Func, 0, "func(w http.ResponseWriter, r *http.Request)"}, + {"Profile", Func, 0, "func(w http.ResponseWriter, r *http.Request)"}, + {"Symbol", Func, 0, "func(w http.ResponseWriter, r *http.Request)"}, + {"Trace", Func, 5, "func(w http.ResponseWriter, r *http.Request)"}, }, "net/mail": { - {"(*Address).String", Method, 0}, - {"(*AddressParser).Parse", Method, 5}, - {"(*AddressParser).ParseList", Method, 5}, - {"(Header).AddressList", Method, 0}, - {"(Header).Date", Method, 0}, - {"(Header).Get", Method, 0}, - {"Address", Type, 0}, - {"Address.Address", Field, 0}, - {"Address.Name", Field, 0}, - {"AddressParser", Type, 5}, - {"AddressParser.WordDecoder", Field, 5}, - {"ErrHeaderNotPresent", Var, 0}, - {"Header", Type, 0}, - {"Message", Type, 0}, - {"Message.Body", Field, 0}, - {"Message.Header", Field, 0}, - {"ParseAddress", Func, 1}, - {"ParseAddressList", Func, 1}, - {"ParseDate", Func, 8}, - {"ReadMessage", Func, 0}, + {"(*Address).String", Method, 0, ""}, + {"(*AddressParser).Parse", Method, 5, ""}, + {"(*AddressParser).ParseList", Method, 5, ""}, + {"(Header).AddressList", Method, 0, ""}, + {"(Header).Date", Method, 0, ""}, + {"(Header).Get", Method, 0, ""}, + {"Address", Type, 0, ""}, + {"Address.Address", Field, 0, ""}, + {"Address.Name", Field, 0, ""}, + {"AddressParser", Type, 5, ""}, + {"AddressParser.WordDecoder", Field, 5, ""}, + {"ErrHeaderNotPresent", Var, 0, ""}, + {"Header", Type, 0, ""}, + {"Message", Type, 0, ""}, + {"Message.Body", Field, 0, ""}, + {"Message.Header", Field, 0, ""}, + {"ParseAddress", Func, 1, "func(address string) (*Address, error)"}, + {"ParseAddressList", Func, 1, "func(list string) ([]*Address, error)"}, + {"ParseDate", Func, 8, "func(date string) (time.Time, error)"}, + {"ReadMessage", Func, 0, "func(r io.Reader) (msg *Message, err error)"}, }, "net/netip": { - {"(*Addr).UnmarshalBinary", Method, 18}, - {"(*Addr).UnmarshalText", Method, 18}, - {"(*AddrPort).UnmarshalBinary", Method, 18}, - {"(*AddrPort).UnmarshalText", Method, 18}, - {"(*Prefix).UnmarshalBinary", Method, 18}, - {"(*Prefix).UnmarshalText", Method, 18}, - {"(Addr).AppendBinary", Method, 24}, - {"(Addr).AppendText", Method, 24}, - {"(Addr).AppendTo", Method, 18}, - {"(Addr).As16", Method, 18}, - {"(Addr).As4", Method, 18}, - {"(Addr).AsSlice", Method, 18}, - {"(Addr).BitLen", Method, 18}, - {"(Addr).Compare", Method, 18}, - {"(Addr).Is4", Method, 18}, - {"(Addr).Is4In6", Method, 18}, - {"(Addr).Is6", Method, 18}, - {"(Addr).IsGlobalUnicast", Method, 18}, - {"(Addr).IsInterfaceLocalMulticast", Method, 18}, - {"(Addr).IsLinkLocalMulticast", Method, 18}, - {"(Addr).IsLinkLocalUnicast", Method, 18}, - {"(Addr).IsLoopback", Method, 18}, - {"(Addr).IsMulticast", Method, 18}, - {"(Addr).IsPrivate", Method, 18}, - {"(Addr).IsUnspecified", Method, 18}, - {"(Addr).IsValid", Method, 18}, - {"(Addr).Less", Method, 18}, - {"(Addr).MarshalBinary", Method, 18}, - {"(Addr).MarshalText", Method, 18}, - {"(Addr).Next", Method, 18}, - {"(Addr).Prefix", Method, 18}, - {"(Addr).Prev", Method, 18}, - {"(Addr).String", Method, 18}, - {"(Addr).StringExpanded", Method, 18}, - {"(Addr).Unmap", Method, 18}, - {"(Addr).WithZone", Method, 18}, - {"(Addr).Zone", Method, 18}, - {"(AddrPort).Addr", Method, 18}, - {"(AddrPort).AppendBinary", Method, 24}, - {"(AddrPort).AppendText", Method, 24}, - {"(AddrPort).AppendTo", Method, 18}, - {"(AddrPort).Compare", Method, 22}, - {"(AddrPort).IsValid", Method, 18}, - {"(AddrPort).MarshalBinary", Method, 18}, - {"(AddrPort).MarshalText", Method, 18}, - {"(AddrPort).Port", Method, 18}, - {"(AddrPort).String", Method, 18}, - {"(Prefix).Addr", Method, 18}, - {"(Prefix).AppendBinary", Method, 24}, - {"(Prefix).AppendText", Method, 24}, - {"(Prefix).AppendTo", Method, 18}, - {"(Prefix).Bits", Method, 18}, - {"(Prefix).Contains", Method, 18}, - {"(Prefix).IsSingleIP", Method, 18}, - {"(Prefix).IsValid", Method, 18}, - {"(Prefix).MarshalBinary", Method, 18}, - {"(Prefix).MarshalText", Method, 18}, - {"(Prefix).Masked", Method, 18}, - {"(Prefix).Overlaps", Method, 18}, - {"(Prefix).String", Method, 18}, - {"Addr", Type, 18}, - {"AddrFrom16", Func, 18}, - {"AddrFrom4", Func, 18}, - {"AddrFromSlice", Func, 18}, - {"AddrPort", Type, 18}, - {"AddrPortFrom", Func, 18}, - {"IPv4Unspecified", Func, 18}, - {"IPv6LinkLocalAllNodes", Func, 18}, - {"IPv6LinkLocalAllRouters", Func, 20}, - {"IPv6Loopback", Func, 20}, - {"IPv6Unspecified", Func, 18}, - {"MustParseAddr", Func, 18}, - {"MustParseAddrPort", Func, 18}, - {"MustParsePrefix", Func, 18}, - {"ParseAddr", Func, 18}, - {"ParseAddrPort", Func, 18}, - {"ParsePrefix", Func, 18}, - {"Prefix", Type, 18}, - {"PrefixFrom", Func, 18}, + {"(*Addr).UnmarshalBinary", Method, 18, ""}, + {"(*Addr).UnmarshalText", Method, 18, ""}, + {"(*AddrPort).UnmarshalBinary", Method, 18, ""}, + {"(*AddrPort).UnmarshalText", Method, 18, ""}, + {"(*Prefix).UnmarshalBinary", Method, 18, ""}, + {"(*Prefix).UnmarshalText", Method, 18, ""}, + {"(Addr).AppendBinary", Method, 24, ""}, + {"(Addr).AppendText", Method, 24, ""}, + {"(Addr).AppendTo", Method, 18, ""}, + {"(Addr).As16", Method, 18, ""}, + {"(Addr).As4", Method, 18, ""}, + {"(Addr).AsSlice", Method, 18, ""}, + {"(Addr).BitLen", Method, 18, ""}, + {"(Addr).Compare", Method, 18, ""}, + {"(Addr).Is4", Method, 18, ""}, + {"(Addr).Is4In6", Method, 18, ""}, + {"(Addr).Is6", Method, 18, ""}, + {"(Addr).IsGlobalUnicast", Method, 18, ""}, + {"(Addr).IsInterfaceLocalMulticast", Method, 18, ""}, + {"(Addr).IsLinkLocalMulticast", Method, 18, ""}, + {"(Addr).IsLinkLocalUnicast", Method, 18, ""}, + {"(Addr).IsLoopback", Method, 18, ""}, + {"(Addr).IsMulticast", Method, 18, ""}, + {"(Addr).IsPrivate", Method, 18, ""}, + {"(Addr).IsUnspecified", Method, 18, ""}, + {"(Addr).IsValid", Method, 18, ""}, + {"(Addr).Less", Method, 18, ""}, + {"(Addr).MarshalBinary", Method, 18, ""}, + {"(Addr).MarshalText", Method, 18, ""}, + {"(Addr).Next", Method, 18, ""}, + {"(Addr).Prefix", Method, 18, ""}, + {"(Addr).Prev", Method, 18, ""}, + {"(Addr).String", Method, 18, ""}, + {"(Addr).StringExpanded", Method, 18, ""}, + {"(Addr).Unmap", Method, 18, ""}, + {"(Addr).WithZone", Method, 18, ""}, + {"(Addr).Zone", Method, 18, ""}, + {"(AddrPort).Addr", Method, 18, ""}, + {"(AddrPort).AppendBinary", Method, 24, ""}, + {"(AddrPort).AppendText", Method, 24, ""}, + {"(AddrPort).AppendTo", Method, 18, ""}, + {"(AddrPort).Compare", Method, 22, ""}, + {"(AddrPort).IsValid", Method, 18, ""}, + {"(AddrPort).MarshalBinary", Method, 18, ""}, + {"(AddrPort).MarshalText", Method, 18, ""}, + {"(AddrPort).Port", Method, 18, ""}, + {"(AddrPort).String", Method, 18, ""}, + {"(Prefix).Addr", Method, 18, ""}, + {"(Prefix).AppendBinary", Method, 24, ""}, + {"(Prefix).AppendText", Method, 24, ""}, + {"(Prefix).AppendTo", Method, 18, ""}, + {"(Prefix).Bits", Method, 18, ""}, + {"(Prefix).Contains", Method, 18, ""}, + {"(Prefix).IsSingleIP", Method, 18, ""}, + {"(Prefix).IsValid", Method, 18, ""}, + {"(Prefix).MarshalBinary", Method, 18, ""}, + {"(Prefix).MarshalText", Method, 18, ""}, + {"(Prefix).Masked", Method, 18, ""}, + {"(Prefix).Overlaps", Method, 18, ""}, + {"(Prefix).String", Method, 18, ""}, + {"Addr", Type, 18, ""}, + {"AddrFrom16", Func, 18, "func(addr [16]byte) Addr"}, + {"AddrFrom4", Func, 18, "func(addr [4]byte) Addr"}, + {"AddrFromSlice", Func, 18, "func(slice []byte) (ip Addr, ok bool)"}, + {"AddrPort", Type, 18, ""}, + {"AddrPortFrom", Func, 18, "func(ip Addr, port uint16) AddrPort"}, + {"IPv4Unspecified", Func, 18, "func() Addr"}, + {"IPv6LinkLocalAllNodes", Func, 18, "func() Addr"}, + {"IPv6LinkLocalAllRouters", Func, 20, "func() Addr"}, + {"IPv6Loopback", Func, 20, "func() Addr"}, + {"IPv6Unspecified", Func, 18, "func() Addr"}, + {"MustParseAddr", Func, 18, "func(s string) Addr"}, + {"MustParseAddrPort", Func, 18, "func(s string) AddrPort"}, + {"MustParsePrefix", Func, 18, "func(s string) Prefix"}, + {"ParseAddr", Func, 18, "func(s string) (Addr, error)"}, + {"ParseAddrPort", Func, 18, "func(s string) (AddrPort, error)"}, + {"ParsePrefix", Func, 18, "func(s string) (Prefix, error)"}, + {"Prefix", Type, 18, ""}, + {"PrefixFrom", Func, 18, "func(ip Addr, bits int) Prefix"}, }, "net/rpc": { - {"(*Client).Call", Method, 0}, - {"(*Client).Close", Method, 0}, - {"(*Client).Go", Method, 0}, - {"(*Server).Accept", Method, 0}, - {"(*Server).HandleHTTP", Method, 0}, - {"(*Server).Register", Method, 0}, - {"(*Server).RegisterName", Method, 0}, - {"(*Server).ServeCodec", Method, 0}, - {"(*Server).ServeConn", Method, 0}, - {"(*Server).ServeHTTP", Method, 0}, - {"(*Server).ServeRequest", Method, 0}, - {"(ServerError).Error", Method, 0}, - {"Accept", Func, 0}, - {"Call", Type, 0}, - {"Call.Args", Field, 0}, - {"Call.Done", Field, 0}, - {"Call.Error", Field, 0}, - {"Call.Reply", Field, 0}, - {"Call.ServiceMethod", Field, 0}, - {"Client", Type, 0}, - {"ClientCodec", Type, 0}, - {"DefaultDebugPath", Const, 0}, - {"DefaultRPCPath", Const, 0}, - {"DefaultServer", Var, 0}, - {"Dial", Func, 0}, - {"DialHTTP", Func, 0}, - {"DialHTTPPath", Func, 0}, - {"ErrShutdown", Var, 0}, - {"HandleHTTP", Func, 0}, - {"NewClient", Func, 0}, - {"NewClientWithCodec", Func, 0}, - {"NewServer", Func, 0}, - {"Register", Func, 0}, - {"RegisterName", Func, 0}, - {"Request", Type, 0}, - {"Request.Seq", Field, 0}, - {"Request.ServiceMethod", Field, 0}, - {"Response", Type, 0}, - {"Response.Error", Field, 0}, - {"Response.Seq", Field, 0}, - {"Response.ServiceMethod", Field, 0}, - {"ServeCodec", Func, 0}, - {"ServeConn", Func, 0}, - {"ServeRequest", Func, 0}, - {"Server", Type, 0}, - {"ServerCodec", Type, 0}, - {"ServerError", Type, 0}, + {"(*Client).Call", Method, 0, ""}, + {"(*Client).Close", Method, 0, ""}, + {"(*Client).Go", Method, 0, ""}, + {"(*Server).Accept", Method, 0, ""}, + {"(*Server).HandleHTTP", Method, 0, ""}, + {"(*Server).Register", Method, 0, ""}, + {"(*Server).RegisterName", Method, 0, ""}, + {"(*Server).ServeCodec", Method, 0, ""}, + {"(*Server).ServeConn", Method, 0, ""}, + {"(*Server).ServeHTTP", Method, 0, ""}, + {"(*Server).ServeRequest", Method, 0, ""}, + {"(ServerError).Error", Method, 0, ""}, + {"Accept", Func, 0, "func(lis net.Listener)"}, + {"Call", Type, 0, ""}, + {"Call.Args", Field, 0, ""}, + {"Call.Done", Field, 0, ""}, + {"Call.Error", Field, 0, ""}, + {"Call.Reply", Field, 0, ""}, + {"Call.ServiceMethod", Field, 0, ""}, + {"Client", Type, 0, ""}, + {"ClientCodec", Type, 0, ""}, + {"DefaultDebugPath", Const, 0, ""}, + {"DefaultRPCPath", Const, 0, ""}, + {"DefaultServer", Var, 0, ""}, + {"Dial", Func, 0, "func(network string, address string) (*Client, error)"}, + {"DialHTTP", Func, 0, "func(network string, address string) (*Client, error)"}, + {"DialHTTPPath", Func, 0, "func(network string, address string, path string) (*Client, error)"}, + {"ErrShutdown", Var, 0, ""}, + {"HandleHTTP", Func, 0, "func()"}, + {"NewClient", Func, 0, "func(conn io.ReadWriteCloser) *Client"}, + {"NewClientWithCodec", Func, 0, "func(codec ClientCodec) *Client"}, + {"NewServer", Func, 0, "func() *Server"}, + {"Register", Func, 0, "func(rcvr any) error"}, + {"RegisterName", Func, 0, "func(name string, rcvr any) error"}, + {"Request", Type, 0, ""}, + {"Request.Seq", Field, 0, ""}, + {"Request.ServiceMethod", Field, 0, ""}, + {"Response", Type, 0, ""}, + {"Response.Error", Field, 0, ""}, + {"Response.Seq", Field, 0, ""}, + {"Response.ServiceMethod", Field, 0, ""}, + {"ServeCodec", Func, 0, "func(codec ServerCodec)"}, + {"ServeConn", Func, 0, "func(conn io.ReadWriteCloser)"}, + {"ServeRequest", Func, 0, "func(codec ServerCodec) error"}, + {"Server", Type, 0, ""}, + {"ServerCodec", Type, 0, ""}, + {"ServerError", Type, 0, ""}, }, "net/rpc/jsonrpc": { - {"Dial", Func, 0}, - {"NewClient", Func, 0}, - {"NewClientCodec", Func, 0}, - {"NewServerCodec", Func, 0}, - {"ServeConn", Func, 0}, + {"Dial", Func, 0, "func(network string, address string) (*rpc.Client, error)"}, + {"NewClient", Func, 0, "func(conn io.ReadWriteCloser) *rpc.Client"}, + {"NewClientCodec", Func, 0, "func(conn io.ReadWriteCloser) rpc.ClientCodec"}, + {"NewServerCodec", Func, 0, "func(conn io.ReadWriteCloser) rpc.ServerCodec"}, + {"ServeConn", Func, 0, "func(conn io.ReadWriteCloser)"}, }, "net/smtp": { - {"(*Client).Auth", Method, 0}, - {"(*Client).Close", Method, 2}, - {"(*Client).Data", Method, 0}, - {"(*Client).Extension", Method, 0}, - {"(*Client).Hello", Method, 1}, - {"(*Client).Mail", Method, 0}, - {"(*Client).Noop", Method, 10}, - {"(*Client).Quit", Method, 0}, - {"(*Client).Rcpt", Method, 0}, - {"(*Client).Reset", Method, 0}, - {"(*Client).StartTLS", Method, 0}, - {"(*Client).TLSConnectionState", Method, 5}, - {"(*Client).Verify", Method, 0}, - {"Auth", Type, 0}, - {"CRAMMD5Auth", Func, 0}, - {"Client", Type, 0}, - {"Client.Text", Field, 0}, - {"Dial", Func, 0}, - {"NewClient", Func, 0}, - {"PlainAuth", Func, 0}, - {"SendMail", Func, 0}, - {"ServerInfo", Type, 0}, - {"ServerInfo.Auth", Field, 0}, - {"ServerInfo.Name", Field, 0}, - {"ServerInfo.TLS", Field, 0}, + {"(*Client).Auth", Method, 0, ""}, + {"(*Client).Close", Method, 2, ""}, + {"(*Client).Data", Method, 0, ""}, + {"(*Client).Extension", Method, 0, ""}, + {"(*Client).Hello", Method, 1, ""}, + {"(*Client).Mail", Method, 0, ""}, + {"(*Client).Noop", Method, 10, ""}, + {"(*Client).Quit", Method, 0, ""}, + {"(*Client).Rcpt", Method, 0, ""}, + {"(*Client).Reset", Method, 0, ""}, + {"(*Client).StartTLS", Method, 0, ""}, + {"(*Client).TLSConnectionState", Method, 5, ""}, + {"(*Client).Verify", Method, 0, ""}, + {"Auth", Type, 0, ""}, + {"CRAMMD5Auth", Func, 0, "func(username string, secret string) Auth"}, + {"Client", Type, 0, ""}, + {"Client.Text", Field, 0, ""}, + {"Dial", Func, 0, "func(addr string) (*Client, error)"}, + {"NewClient", Func, 0, "func(conn net.Conn, host string) (*Client, error)"}, + {"PlainAuth", Func, 0, "func(identity string, username string, password string, host string) Auth"}, + {"SendMail", Func, 0, "func(addr string, a Auth, from string, to []string, msg []byte) error"}, + {"ServerInfo", Type, 0, ""}, + {"ServerInfo.Auth", Field, 0, ""}, + {"ServerInfo.Name", Field, 0, ""}, + {"ServerInfo.TLS", Field, 0, ""}, }, "net/textproto": { - {"(*Conn).Close", Method, 0}, - {"(*Conn).Cmd", Method, 0}, - {"(*Conn).DotReader", Method, 0}, - {"(*Conn).DotWriter", Method, 0}, - {"(*Conn).EndRequest", Method, 0}, - {"(*Conn).EndResponse", Method, 0}, - {"(*Conn).Next", Method, 0}, - {"(*Conn).PrintfLine", Method, 0}, - {"(*Conn).ReadCodeLine", Method, 0}, - {"(*Conn).ReadContinuedLine", Method, 0}, - {"(*Conn).ReadContinuedLineBytes", Method, 0}, - {"(*Conn).ReadDotBytes", Method, 0}, - {"(*Conn).ReadDotLines", Method, 0}, - {"(*Conn).ReadLine", Method, 0}, - {"(*Conn).ReadLineBytes", Method, 0}, - {"(*Conn).ReadMIMEHeader", Method, 0}, - {"(*Conn).ReadResponse", Method, 0}, - {"(*Conn).StartRequest", Method, 0}, - {"(*Conn).StartResponse", Method, 0}, - {"(*Error).Error", Method, 0}, - {"(*Pipeline).EndRequest", Method, 0}, - {"(*Pipeline).EndResponse", Method, 0}, - {"(*Pipeline).Next", Method, 0}, - {"(*Pipeline).StartRequest", Method, 0}, - {"(*Pipeline).StartResponse", Method, 0}, - {"(*Reader).DotReader", Method, 0}, - {"(*Reader).ReadCodeLine", Method, 0}, - {"(*Reader).ReadContinuedLine", Method, 0}, - {"(*Reader).ReadContinuedLineBytes", Method, 0}, - {"(*Reader).ReadDotBytes", Method, 0}, - {"(*Reader).ReadDotLines", Method, 0}, - {"(*Reader).ReadLine", Method, 0}, - {"(*Reader).ReadLineBytes", Method, 0}, - {"(*Reader).ReadMIMEHeader", Method, 0}, - {"(*Reader).ReadResponse", Method, 0}, - {"(*Writer).DotWriter", Method, 0}, - {"(*Writer).PrintfLine", Method, 0}, - {"(MIMEHeader).Add", Method, 0}, - {"(MIMEHeader).Del", Method, 0}, - {"(MIMEHeader).Get", Method, 0}, - {"(MIMEHeader).Set", Method, 0}, - {"(MIMEHeader).Values", Method, 14}, - {"(ProtocolError).Error", Method, 0}, - {"CanonicalMIMEHeaderKey", Func, 0}, - {"Conn", Type, 0}, - {"Conn.Pipeline", Field, 0}, - {"Conn.Reader", Field, 0}, - {"Conn.Writer", Field, 0}, - {"Dial", Func, 0}, - {"Error", Type, 0}, - {"Error.Code", Field, 0}, - {"Error.Msg", Field, 0}, - {"MIMEHeader", Type, 0}, - {"NewConn", Func, 0}, - {"NewReader", Func, 0}, - {"NewWriter", Func, 0}, - {"Pipeline", Type, 0}, - {"ProtocolError", Type, 0}, - {"Reader", Type, 0}, - {"Reader.R", Field, 0}, - {"TrimBytes", Func, 1}, - {"TrimString", Func, 1}, - {"Writer", Type, 0}, - {"Writer.W", Field, 0}, + {"(*Conn).Close", Method, 0, ""}, + {"(*Conn).Cmd", Method, 0, ""}, + {"(*Conn).DotReader", Method, 0, ""}, + {"(*Conn).DotWriter", Method, 0, ""}, + {"(*Conn).EndRequest", Method, 0, ""}, + {"(*Conn).EndResponse", Method, 0, ""}, + {"(*Conn).Next", Method, 0, ""}, + {"(*Conn).PrintfLine", Method, 0, ""}, + {"(*Conn).ReadCodeLine", Method, 0, ""}, + {"(*Conn).ReadContinuedLine", Method, 0, ""}, + {"(*Conn).ReadContinuedLineBytes", Method, 0, ""}, + {"(*Conn).ReadDotBytes", Method, 0, ""}, + {"(*Conn).ReadDotLines", Method, 0, ""}, + {"(*Conn).ReadLine", Method, 0, ""}, + {"(*Conn).ReadLineBytes", Method, 0, ""}, + {"(*Conn).ReadMIMEHeader", Method, 0, ""}, + {"(*Conn).ReadResponse", Method, 0, ""}, + {"(*Conn).StartRequest", Method, 0, ""}, + {"(*Conn).StartResponse", Method, 0, ""}, + {"(*Error).Error", Method, 0, ""}, + {"(*Pipeline).EndRequest", Method, 0, ""}, + {"(*Pipeline).EndResponse", Method, 0, ""}, + {"(*Pipeline).Next", Method, 0, ""}, + {"(*Pipeline).StartRequest", Method, 0, ""}, + {"(*Pipeline).StartResponse", Method, 0, ""}, + {"(*Reader).DotReader", Method, 0, ""}, + {"(*Reader).ReadCodeLine", Method, 0, ""}, + {"(*Reader).ReadContinuedLine", Method, 0, ""}, + {"(*Reader).ReadContinuedLineBytes", Method, 0, ""}, + {"(*Reader).ReadDotBytes", Method, 0, ""}, + {"(*Reader).ReadDotLines", Method, 0, ""}, + {"(*Reader).ReadLine", Method, 0, ""}, + {"(*Reader).ReadLineBytes", Method, 0, ""}, + {"(*Reader).ReadMIMEHeader", Method, 0, ""}, + {"(*Reader).ReadResponse", Method, 0, ""}, + {"(*Writer).DotWriter", Method, 0, ""}, + {"(*Writer).PrintfLine", Method, 0, ""}, + {"(MIMEHeader).Add", Method, 0, ""}, + {"(MIMEHeader).Del", Method, 0, ""}, + {"(MIMEHeader).Get", Method, 0, ""}, + {"(MIMEHeader).Set", Method, 0, ""}, + {"(MIMEHeader).Values", Method, 14, ""}, + {"(ProtocolError).Error", Method, 0, ""}, + {"CanonicalMIMEHeaderKey", Func, 0, "func(s string) string"}, + {"Conn", Type, 0, ""}, + {"Conn.Pipeline", Field, 0, ""}, + {"Conn.Reader", Field, 0, ""}, + {"Conn.Writer", Field, 0, ""}, + {"Dial", Func, 0, "func(network string, addr string) (*Conn, error)"}, + {"Error", Type, 0, ""}, + {"Error.Code", Field, 0, ""}, + {"Error.Msg", Field, 0, ""}, + {"MIMEHeader", Type, 0, ""}, + {"NewConn", Func, 0, "func(conn io.ReadWriteCloser) *Conn"}, + {"NewReader", Func, 0, "func(r *bufio.Reader) *Reader"}, + {"NewWriter", Func, 0, "func(w *bufio.Writer) *Writer"}, + {"Pipeline", Type, 0, ""}, + {"ProtocolError", Type, 0, ""}, + {"Reader", Type, 0, ""}, + {"Reader.R", Field, 0, ""}, + {"TrimBytes", Func, 1, "func(b []byte) []byte"}, + {"TrimString", Func, 1, "func(s string) string"}, + {"Writer", Type, 0, ""}, + {"Writer.W", Field, 0, ""}, }, "net/url": { - {"(*Error).Error", Method, 0}, - {"(*Error).Temporary", Method, 6}, - {"(*Error).Timeout", Method, 6}, - {"(*Error).Unwrap", Method, 13}, - {"(*URL).AppendBinary", Method, 24}, - {"(*URL).EscapedFragment", Method, 15}, - {"(*URL).EscapedPath", Method, 5}, - {"(*URL).Hostname", Method, 8}, - {"(*URL).IsAbs", Method, 0}, - {"(*URL).JoinPath", Method, 19}, - {"(*URL).MarshalBinary", Method, 8}, - {"(*URL).Parse", Method, 0}, - {"(*URL).Port", Method, 8}, - {"(*URL).Query", Method, 0}, - {"(*URL).Redacted", Method, 15}, - {"(*URL).RequestURI", Method, 0}, - {"(*URL).ResolveReference", Method, 0}, - {"(*URL).String", Method, 0}, - {"(*URL).UnmarshalBinary", Method, 8}, - {"(*Userinfo).Password", Method, 0}, - {"(*Userinfo).String", Method, 0}, - {"(*Userinfo).Username", Method, 0}, - {"(EscapeError).Error", Method, 0}, - {"(InvalidHostError).Error", Method, 6}, - {"(Values).Add", Method, 0}, - {"(Values).Del", Method, 0}, - {"(Values).Encode", Method, 0}, - {"(Values).Get", Method, 0}, - {"(Values).Has", Method, 17}, - {"(Values).Set", Method, 0}, - {"Error", Type, 0}, - {"Error.Err", Field, 0}, - {"Error.Op", Field, 0}, - {"Error.URL", Field, 0}, - {"EscapeError", Type, 0}, - {"InvalidHostError", Type, 6}, - {"JoinPath", Func, 19}, - {"Parse", Func, 0}, - {"ParseQuery", Func, 0}, - {"ParseRequestURI", Func, 0}, - {"PathEscape", Func, 8}, - {"PathUnescape", Func, 8}, - {"QueryEscape", Func, 0}, - {"QueryUnescape", Func, 0}, - {"URL", Type, 0}, - {"URL.ForceQuery", Field, 7}, - {"URL.Fragment", Field, 0}, - {"URL.Host", Field, 0}, - {"URL.OmitHost", Field, 19}, - {"URL.Opaque", Field, 0}, - {"URL.Path", Field, 0}, - {"URL.RawFragment", Field, 15}, - {"URL.RawPath", Field, 5}, - {"URL.RawQuery", Field, 0}, - {"URL.Scheme", Field, 0}, - {"URL.User", Field, 0}, - {"User", Func, 0}, - {"UserPassword", Func, 0}, - {"Userinfo", Type, 0}, - {"Values", Type, 0}, + {"(*Error).Error", Method, 0, ""}, + {"(*Error).Temporary", Method, 6, ""}, + {"(*Error).Timeout", Method, 6, ""}, + {"(*Error).Unwrap", Method, 13, ""}, + {"(*URL).AppendBinary", Method, 24, ""}, + {"(*URL).EscapedFragment", Method, 15, ""}, + {"(*URL).EscapedPath", Method, 5, ""}, + {"(*URL).Hostname", Method, 8, ""}, + {"(*URL).IsAbs", Method, 0, ""}, + {"(*URL).JoinPath", Method, 19, ""}, + {"(*URL).MarshalBinary", Method, 8, ""}, + {"(*URL).Parse", Method, 0, ""}, + {"(*URL).Port", Method, 8, ""}, + {"(*URL).Query", Method, 0, ""}, + {"(*URL).Redacted", Method, 15, ""}, + {"(*URL).RequestURI", Method, 0, ""}, + {"(*URL).ResolveReference", Method, 0, ""}, + {"(*URL).String", Method, 0, ""}, + {"(*URL).UnmarshalBinary", Method, 8, ""}, + {"(*Userinfo).Password", Method, 0, ""}, + {"(*Userinfo).String", Method, 0, ""}, + {"(*Userinfo).Username", Method, 0, ""}, + {"(EscapeError).Error", Method, 0, ""}, + {"(InvalidHostError).Error", Method, 6, ""}, + {"(Values).Add", Method, 0, ""}, + {"(Values).Del", Method, 0, ""}, + {"(Values).Encode", Method, 0, ""}, + {"(Values).Get", Method, 0, ""}, + {"(Values).Has", Method, 17, ""}, + {"(Values).Set", Method, 0, ""}, + {"Error", Type, 0, ""}, + {"Error.Err", Field, 0, ""}, + {"Error.Op", Field, 0, ""}, + {"Error.URL", Field, 0, ""}, + {"EscapeError", Type, 0, ""}, + {"InvalidHostError", Type, 6, ""}, + {"JoinPath", Func, 19, "func(base string, elem ...string) (result string, err error)"}, + {"Parse", Func, 0, "func(rawURL string) (*URL, error)"}, + {"ParseQuery", Func, 0, "func(query string) (Values, error)"}, + {"ParseRequestURI", Func, 0, "func(rawURL string) (*URL, error)"}, + {"PathEscape", Func, 8, "func(s string) string"}, + {"PathUnescape", Func, 8, "func(s string) (string, error)"}, + {"QueryEscape", Func, 0, "func(s string) string"}, + {"QueryUnescape", Func, 0, "func(s string) (string, error)"}, + {"URL", Type, 0, ""}, + {"URL.ForceQuery", Field, 7, ""}, + {"URL.Fragment", Field, 0, ""}, + {"URL.Host", Field, 0, ""}, + {"URL.OmitHost", Field, 19, ""}, + {"URL.Opaque", Field, 0, ""}, + {"URL.Path", Field, 0, ""}, + {"URL.RawFragment", Field, 15, ""}, + {"URL.RawPath", Field, 5, ""}, + {"URL.RawQuery", Field, 0, ""}, + {"URL.Scheme", Field, 0, ""}, + {"URL.User", Field, 0, ""}, + {"User", Func, 0, "func(username string) *Userinfo"}, + {"UserPassword", Func, 0, "func(username string, password string) *Userinfo"}, + {"Userinfo", Type, 0, ""}, + {"Values", Type, 0, ""}, }, "os": { - {"(*File).Chdir", Method, 0}, - {"(*File).Chmod", Method, 0}, - {"(*File).Chown", Method, 0}, - {"(*File).Close", Method, 0}, - {"(*File).Fd", Method, 0}, - {"(*File).Name", Method, 0}, - {"(*File).Read", Method, 0}, - {"(*File).ReadAt", Method, 0}, - {"(*File).ReadDir", Method, 16}, - {"(*File).ReadFrom", Method, 15}, - {"(*File).Readdir", Method, 0}, - {"(*File).Readdirnames", Method, 0}, - {"(*File).Seek", Method, 0}, - {"(*File).SetDeadline", Method, 10}, - {"(*File).SetReadDeadline", Method, 10}, - {"(*File).SetWriteDeadline", Method, 10}, - {"(*File).Stat", Method, 0}, - {"(*File).Sync", Method, 0}, - {"(*File).SyscallConn", Method, 12}, - {"(*File).Truncate", Method, 0}, - {"(*File).Write", Method, 0}, - {"(*File).WriteAt", Method, 0}, - {"(*File).WriteString", Method, 0}, - {"(*File).WriteTo", Method, 22}, - {"(*LinkError).Error", Method, 0}, - {"(*LinkError).Unwrap", Method, 13}, - {"(*PathError).Error", Method, 0}, - {"(*PathError).Timeout", Method, 10}, - {"(*PathError).Unwrap", Method, 13}, - {"(*Process).Kill", Method, 0}, - {"(*Process).Release", Method, 0}, - {"(*Process).Signal", Method, 0}, - {"(*Process).Wait", Method, 0}, - {"(*ProcessState).ExitCode", Method, 12}, - {"(*ProcessState).Exited", Method, 0}, - {"(*ProcessState).Pid", Method, 0}, - {"(*ProcessState).String", Method, 0}, - {"(*ProcessState).Success", Method, 0}, - {"(*ProcessState).Sys", Method, 0}, - {"(*ProcessState).SysUsage", Method, 0}, - {"(*ProcessState).SystemTime", Method, 0}, - {"(*ProcessState).UserTime", Method, 0}, - {"(*Root).Chmod", Method, 25}, - {"(*Root).Chown", Method, 25}, - {"(*Root).Chtimes", Method, 25}, - {"(*Root).Close", Method, 24}, - {"(*Root).Create", Method, 24}, - {"(*Root).FS", Method, 24}, - {"(*Root).Lchown", Method, 25}, - {"(*Root).Link", Method, 25}, - {"(*Root).Lstat", Method, 24}, - {"(*Root).Mkdir", Method, 24}, - {"(*Root).Name", Method, 24}, - {"(*Root).Open", Method, 24}, - {"(*Root).OpenFile", Method, 24}, - {"(*Root).OpenRoot", Method, 24}, - {"(*Root).Readlink", Method, 25}, - {"(*Root).Remove", Method, 24}, - {"(*Root).Rename", Method, 25}, - {"(*Root).Stat", Method, 24}, - {"(*Root).Symlink", Method, 25}, - {"(*SyscallError).Error", Method, 0}, - {"(*SyscallError).Timeout", Method, 10}, - {"(*SyscallError).Unwrap", Method, 13}, - {"(FileMode).IsDir", Method, 0}, - {"(FileMode).IsRegular", Method, 1}, - {"(FileMode).Perm", Method, 0}, - {"(FileMode).String", Method, 0}, - {"Args", Var, 0}, - {"Chdir", Func, 0}, - {"Chmod", Func, 0}, - {"Chown", Func, 0}, - {"Chtimes", Func, 0}, - {"Clearenv", Func, 0}, - {"CopyFS", Func, 23}, - {"Create", Func, 0}, - {"CreateTemp", Func, 16}, - {"DevNull", Const, 0}, - {"DirEntry", Type, 16}, - {"DirFS", Func, 16}, - {"Environ", Func, 0}, - {"ErrClosed", Var, 8}, - {"ErrDeadlineExceeded", Var, 15}, - {"ErrExist", Var, 0}, - {"ErrInvalid", Var, 0}, - {"ErrNoDeadline", Var, 10}, - {"ErrNotExist", Var, 0}, - {"ErrPermission", Var, 0}, - {"ErrProcessDone", Var, 16}, - {"Executable", Func, 8}, - {"Exit", Func, 0}, - {"Expand", Func, 0}, - {"ExpandEnv", Func, 0}, - {"File", Type, 0}, - {"FileInfo", Type, 0}, - {"FileMode", Type, 0}, - {"FindProcess", Func, 0}, - {"Getegid", Func, 0}, - {"Getenv", Func, 0}, - {"Geteuid", Func, 0}, - {"Getgid", Func, 0}, - {"Getgroups", Func, 0}, - {"Getpagesize", Func, 0}, - {"Getpid", Func, 0}, - {"Getppid", Func, 0}, - {"Getuid", Func, 0}, - {"Getwd", Func, 0}, - {"Hostname", Func, 0}, - {"Interrupt", Var, 0}, - {"IsExist", Func, 0}, - {"IsNotExist", Func, 0}, - {"IsPathSeparator", Func, 0}, - {"IsPermission", Func, 0}, - {"IsTimeout", Func, 10}, - {"Kill", Var, 0}, - {"Lchown", Func, 0}, - {"Link", Func, 0}, - {"LinkError", Type, 0}, - {"LinkError.Err", Field, 0}, - {"LinkError.New", Field, 0}, - {"LinkError.Old", Field, 0}, - {"LinkError.Op", Field, 0}, - {"LookupEnv", Func, 5}, - {"Lstat", Func, 0}, - {"Mkdir", Func, 0}, - {"MkdirAll", Func, 0}, - {"MkdirTemp", Func, 16}, - {"ModeAppend", Const, 0}, - {"ModeCharDevice", Const, 0}, - {"ModeDevice", Const, 0}, - {"ModeDir", Const, 0}, - {"ModeExclusive", Const, 0}, - {"ModeIrregular", Const, 11}, - {"ModeNamedPipe", Const, 0}, - {"ModePerm", Const, 0}, - {"ModeSetgid", Const, 0}, - {"ModeSetuid", Const, 0}, - {"ModeSocket", Const, 0}, - {"ModeSticky", Const, 0}, - {"ModeSymlink", Const, 0}, - {"ModeTemporary", Const, 0}, - {"ModeType", Const, 0}, - {"NewFile", Func, 0}, - {"NewSyscallError", Func, 0}, - {"O_APPEND", Const, 0}, - {"O_CREATE", Const, 0}, - {"O_EXCL", Const, 0}, - {"O_RDONLY", Const, 0}, - {"O_RDWR", Const, 0}, - {"O_SYNC", Const, 0}, - {"O_TRUNC", Const, 0}, - {"O_WRONLY", Const, 0}, - {"Open", Func, 0}, - {"OpenFile", Func, 0}, - {"OpenInRoot", Func, 24}, - {"OpenRoot", Func, 24}, - {"PathError", Type, 0}, - {"PathError.Err", Field, 0}, - {"PathError.Op", Field, 0}, - {"PathError.Path", Field, 0}, - {"PathListSeparator", Const, 0}, - {"PathSeparator", Const, 0}, - {"Pipe", Func, 0}, - {"ProcAttr", Type, 0}, - {"ProcAttr.Dir", Field, 0}, - {"ProcAttr.Env", Field, 0}, - {"ProcAttr.Files", Field, 0}, - {"ProcAttr.Sys", Field, 0}, - {"Process", Type, 0}, - {"Process.Pid", Field, 0}, - {"ProcessState", Type, 0}, - {"ReadDir", Func, 16}, - {"ReadFile", Func, 16}, - {"Readlink", Func, 0}, - {"Remove", Func, 0}, - {"RemoveAll", Func, 0}, - {"Rename", Func, 0}, - {"Root", Type, 24}, - {"SEEK_CUR", Const, 0}, - {"SEEK_END", Const, 0}, - {"SEEK_SET", Const, 0}, - {"SameFile", Func, 0}, - {"Setenv", Func, 0}, - {"Signal", Type, 0}, - {"StartProcess", Func, 0}, - {"Stat", Func, 0}, - {"Stderr", Var, 0}, - {"Stdin", Var, 0}, - {"Stdout", Var, 0}, - {"Symlink", Func, 0}, - {"SyscallError", Type, 0}, - {"SyscallError.Err", Field, 0}, - {"SyscallError.Syscall", Field, 0}, - {"TempDir", Func, 0}, - {"Truncate", Func, 0}, - {"Unsetenv", Func, 4}, - {"UserCacheDir", Func, 11}, - {"UserConfigDir", Func, 13}, - {"UserHomeDir", Func, 12}, - {"WriteFile", Func, 16}, + {"(*File).Chdir", Method, 0, ""}, + {"(*File).Chmod", Method, 0, ""}, + {"(*File).Chown", Method, 0, ""}, + {"(*File).Close", Method, 0, ""}, + {"(*File).Fd", Method, 0, ""}, + {"(*File).Name", Method, 0, ""}, + {"(*File).Read", Method, 0, ""}, + {"(*File).ReadAt", Method, 0, ""}, + {"(*File).ReadDir", Method, 16, ""}, + {"(*File).ReadFrom", Method, 15, ""}, + {"(*File).Readdir", Method, 0, ""}, + {"(*File).Readdirnames", Method, 0, ""}, + {"(*File).Seek", Method, 0, ""}, + {"(*File).SetDeadline", Method, 10, ""}, + {"(*File).SetReadDeadline", Method, 10, ""}, + {"(*File).SetWriteDeadline", Method, 10, ""}, + {"(*File).Stat", Method, 0, ""}, + {"(*File).Sync", Method, 0, ""}, + {"(*File).SyscallConn", Method, 12, ""}, + {"(*File).Truncate", Method, 0, ""}, + {"(*File).Write", Method, 0, ""}, + {"(*File).WriteAt", Method, 0, ""}, + {"(*File).WriteString", Method, 0, ""}, + {"(*File).WriteTo", Method, 22, ""}, + {"(*LinkError).Error", Method, 0, ""}, + {"(*LinkError).Unwrap", Method, 13, ""}, + {"(*PathError).Error", Method, 0, ""}, + {"(*PathError).Timeout", Method, 10, ""}, + {"(*PathError).Unwrap", Method, 13, ""}, + {"(*Process).Kill", Method, 0, ""}, + {"(*Process).Release", Method, 0, ""}, + {"(*Process).Signal", Method, 0, ""}, + {"(*Process).Wait", Method, 0, ""}, + {"(*ProcessState).ExitCode", Method, 12, ""}, + {"(*ProcessState).Exited", Method, 0, ""}, + {"(*ProcessState).Pid", Method, 0, ""}, + {"(*ProcessState).String", Method, 0, ""}, + {"(*ProcessState).Success", Method, 0, ""}, + {"(*ProcessState).Sys", Method, 0, ""}, + {"(*ProcessState).SysUsage", Method, 0, ""}, + {"(*ProcessState).SystemTime", Method, 0, ""}, + {"(*ProcessState).UserTime", Method, 0, ""}, + {"(*Root).Chmod", Method, 25, ""}, + {"(*Root).Chown", Method, 25, ""}, + {"(*Root).Chtimes", Method, 25, ""}, + {"(*Root).Close", Method, 24, ""}, + {"(*Root).Create", Method, 24, ""}, + {"(*Root).FS", Method, 24, ""}, + {"(*Root).Lchown", Method, 25, ""}, + {"(*Root).Link", Method, 25, ""}, + {"(*Root).Lstat", Method, 24, ""}, + {"(*Root).Mkdir", Method, 24, ""}, + {"(*Root).Name", Method, 24, ""}, + {"(*Root).Open", Method, 24, ""}, + {"(*Root).OpenFile", Method, 24, ""}, + {"(*Root).OpenRoot", Method, 24, ""}, + {"(*Root).Readlink", Method, 25, ""}, + {"(*Root).Remove", Method, 24, ""}, + {"(*Root).Rename", Method, 25, ""}, + {"(*Root).Stat", Method, 24, ""}, + {"(*Root).Symlink", Method, 25, ""}, + {"(*SyscallError).Error", Method, 0, ""}, + {"(*SyscallError).Timeout", Method, 10, ""}, + {"(*SyscallError).Unwrap", Method, 13, ""}, + {"(FileMode).IsDir", Method, 0, ""}, + {"(FileMode).IsRegular", Method, 1, ""}, + {"(FileMode).Perm", Method, 0, ""}, + {"(FileMode).String", Method, 0, ""}, + {"Args", Var, 0, ""}, + {"Chdir", Func, 0, "func(dir string) error"}, + {"Chmod", Func, 0, "func(name string, mode FileMode) error"}, + {"Chown", Func, 0, "func(name string, uid int, gid int) error"}, + {"Chtimes", Func, 0, "func(name string, atime time.Time, mtime time.Time) error"}, + {"Clearenv", Func, 0, "func()"}, + {"CopyFS", Func, 23, "func(dir string, fsys fs.FS) error"}, + {"Create", Func, 0, "func(name string) (*File, error)"}, + {"CreateTemp", Func, 16, "func(dir string, pattern string) (*File, error)"}, + {"DevNull", Const, 0, ""}, + {"DirEntry", Type, 16, ""}, + {"DirFS", Func, 16, "func(dir string) fs.FS"}, + {"Environ", Func, 0, "func() []string"}, + {"ErrClosed", Var, 8, ""}, + {"ErrDeadlineExceeded", Var, 15, ""}, + {"ErrExist", Var, 0, ""}, + {"ErrInvalid", Var, 0, ""}, + {"ErrNoDeadline", Var, 10, ""}, + {"ErrNotExist", Var, 0, ""}, + {"ErrPermission", Var, 0, ""}, + {"ErrProcessDone", Var, 16, ""}, + {"Executable", Func, 8, "func() (string, error)"}, + {"Exit", Func, 0, "func(code int)"}, + {"Expand", Func, 0, "func(s string, mapping func(string) string) string"}, + {"ExpandEnv", Func, 0, "func(s string) string"}, + {"File", Type, 0, ""}, + {"FileInfo", Type, 0, ""}, + {"FileMode", Type, 0, ""}, + {"FindProcess", Func, 0, "func(pid int) (*Process, error)"}, + {"Getegid", Func, 0, "func() int"}, + {"Getenv", Func, 0, "func(key string) string"}, + {"Geteuid", Func, 0, "func() int"}, + {"Getgid", Func, 0, "func() int"}, + {"Getgroups", Func, 0, "func() ([]int, error)"}, + {"Getpagesize", Func, 0, "func() int"}, + {"Getpid", Func, 0, "func() int"}, + {"Getppid", Func, 0, "func() int"}, + {"Getuid", Func, 0, "func() int"}, + {"Getwd", Func, 0, "func() (dir string, err error)"}, + {"Hostname", Func, 0, "func() (name string, err error)"}, + {"Interrupt", Var, 0, ""}, + {"IsExist", Func, 0, "func(err error) bool"}, + {"IsNotExist", Func, 0, "func(err error) bool"}, + {"IsPathSeparator", Func, 0, "func(c uint8) bool"}, + {"IsPermission", Func, 0, "func(err error) bool"}, + {"IsTimeout", Func, 10, "func(err error) bool"}, + {"Kill", Var, 0, ""}, + {"Lchown", Func, 0, "func(name string, uid int, gid int) error"}, + {"Link", Func, 0, "func(oldname string, newname string) error"}, + {"LinkError", Type, 0, ""}, + {"LinkError.Err", Field, 0, ""}, + {"LinkError.New", Field, 0, ""}, + {"LinkError.Old", Field, 0, ""}, + {"LinkError.Op", Field, 0, ""}, + {"LookupEnv", Func, 5, "func(key string) (string, bool)"}, + {"Lstat", Func, 0, "func(name string) (FileInfo, error)"}, + {"Mkdir", Func, 0, "func(name string, perm FileMode) error"}, + {"MkdirAll", Func, 0, "func(path string, perm FileMode) error"}, + {"MkdirTemp", Func, 16, "func(dir string, pattern string) (string, error)"}, + {"ModeAppend", Const, 0, ""}, + {"ModeCharDevice", Const, 0, ""}, + {"ModeDevice", Const, 0, ""}, + {"ModeDir", Const, 0, ""}, + {"ModeExclusive", Const, 0, ""}, + {"ModeIrregular", Const, 11, ""}, + {"ModeNamedPipe", Const, 0, ""}, + {"ModePerm", Const, 0, ""}, + {"ModeSetgid", Const, 0, ""}, + {"ModeSetuid", Const, 0, ""}, + {"ModeSocket", Const, 0, ""}, + {"ModeSticky", Const, 0, ""}, + {"ModeSymlink", Const, 0, ""}, + {"ModeTemporary", Const, 0, ""}, + {"ModeType", Const, 0, ""}, + {"NewFile", Func, 0, "func(fd uintptr, name string) *File"}, + {"NewSyscallError", Func, 0, "func(syscall string, err error) error"}, + {"O_APPEND", Const, 0, ""}, + {"O_CREATE", Const, 0, ""}, + {"O_EXCL", Const, 0, ""}, + {"O_RDONLY", Const, 0, ""}, + {"O_RDWR", Const, 0, ""}, + {"O_SYNC", Const, 0, ""}, + {"O_TRUNC", Const, 0, ""}, + {"O_WRONLY", Const, 0, ""}, + {"Open", Func, 0, "func(name string) (*File, error)"}, + {"OpenFile", Func, 0, "func(name string, flag int, perm FileMode) (*File, error)"}, + {"OpenInRoot", Func, 24, "func(dir string, name string) (*File, error)"}, + {"OpenRoot", Func, 24, "func(name string) (*Root, error)"}, + {"PathError", Type, 0, ""}, + {"PathError.Err", Field, 0, ""}, + {"PathError.Op", Field, 0, ""}, + {"PathError.Path", Field, 0, ""}, + {"PathListSeparator", Const, 0, ""}, + {"PathSeparator", Const, 0, ""}, + {"Pipe", Func, 0, "func() (r *File, w *File, err error)"}, + {"ProcAttr", Type, 0, ""}, + {"ProcAttr.Dir", Field, 0, ""}, + {"ProcAttr.Env", Field, 0, ""}, + {"ProcAttr.Files", Field, 0, ""}, + {"ProcAttr.Sys", Field, 0, ""}, + {"Process", Type, 0, ""}, + {"Process.Pid", Field, 0, ""}, + {"ProcessState", Type, 0, ""}, + {"ReadDir", Func, 16, "func(name string) ([]DirEntry, error)"}, + {"ReadFile", Func, 16, "func(name string) ([]byte, error)"}, + {"Readlink", Func, 0, "func(name string) (string, error)"}, + {"Remove", Func, 0, "func(name string) error"}, + {"RemoveAll", Func, 0, "func(path string) error"}, + {"Rename", Func, 0, "func(oldpath string, newpath string) error"}, + {"Root", Type, 24, ""}, + {"SEEK_CUR", Const, 0, ""}, + {"SEEK_END", Const, 0, ""}, + {"SEEK_SET", Const, 0, ""}, + {"SameFile", Func, 0, "func(fi1 FileInfo, fi2 FileInfo) bool"}, + {"Setenv", Func, 0, "func(key string, value string) error"}, + {"Signal", Type, 0, ""}, + {"StartProcess", Func, 0, "func(name string, argv []string, attr *ProcAttr) (*Process, error)"}, + {"Stat", Func, 0, "func(name string) (FileInfo, error)"}, + {"Stderr", Var, 0, ""}, + {"Stdin", Var, 0, ""}, + {"Stdout", Var, 0, ""}, + {"Symlink", Func, 0, "func(oldname string, newname string) error"}, + {"SyscallError", Type, 0, ""}, + {"SyscallError.Err", Field, 0, ""}, + {"SyscallError.Syscall", Field, 0, ""}, + {"TempDir", Func, 0, "func() string"}, + {"Truncate", Func, 0, "func(name string, size int64) error"}, + {"Unsetenv", Func, 4, "func(key string) error"}, + {"UserCacheDir", Func, 11, "func() (string, error)"}, + {"UserConfigDir", Func, 13, "func() (string, error)"}, + {"UserHomeDir", Func, 12, "func() (string, error)"}, + {"WriteFile", Func, 16, "func(name string, data []byte, perm FileMode) error"}, }, "os/exec": { - {"(*Cmd).CombinedOutput", Method, 0}, - {"(*Cmd).Environ", Method, 19}, - {"(*Cmd).Output", Method, 0}, - {"(*Cmd).Run", Method, 0}, - {"(*Cmd).Start", Method, 0}, - {"(*Cmd).StderrPipe", Method, 0}, - {"(*Cmd).StdinPipe", Method, 0}, - {"(*Cmd).StdoutPipe", Method, 0}, - {"(*Cmd).String", Method, 13}, - {"(*Cmd).Wait", Method, 0}, - {"(*Error).Error", Method, 0}, - {"(*Error).Unwrap", Method, 13}, - {"(*ExitError).Error", Method, 0}, - {"(ExitError).ExitCode", Method, 12}, - {"(ExitError).Exited", Method, 0}, - {"(ExitError).Pid", Method, 0}, - {"(ExitError).String", Method, 0}, - {"(ExitError).Success", Method, 0}, - {"(ExitError).Sys", Method, 0}, - {"(ExitError).SysUsage", Method, 0}, - {"(ExitError).SystemTime", Method, 0}, - {"(ExitError).UserTime", Method, 0}, - {"Cmd", Type, 0}, - {"Cmd.Args", Field, 0}, - {"Cmd.Cancel", Field, 20}, - {"Cmd.Dir", Field, 0}, - {"Cmd.Env", Field, 0}, - {"Cmd.Err", Field, 19}, - {"Cmd.ExtraFiles", Field, 0}, - {"Cmd.Path", Field, 0}, - {"Cmd.Process", Field, 0}, - {"Cmd.ProcessState", Field, 0}, - {"Cmd.Stderr", Field, 0}, - {"Cmd.Stdin", Field, 0}, - {"Cmd.Stdout", Field, 0}, - {"Cmd.SysProcAttr", Field, 0}, - {"Cmd.WaitDelay", Field, 20}, - {"Command", Func, 0}, - {"CommandContext", Func, 7}, - {"ErrDot", Var, 19}, - {"ErrNotFound", Var, 0}, - {"ErrWaitDelay", Var, 20}, - {"Error", Type, 0}, - {"Error.Err", Field, 0}, - {"Error.Name", Field, 0}, - {"ExitError", Type, 0}, - {"ExitError.ProcessState", Field, 0}, - {"ExitError.Stderr", Field, 6}, - {"LookPath", Func, 0}, + {"(*Cmd).CombinedOutput", Method, 0, ""}, + {"(*Cmd).Environ", Method, 19, ""}, + {"(*Cmd).Output", Method, 0, ""}, + {"(*Cmd).Run", Method, 0, ""}, + {"(*Cmd).Start", Method, 0, ""}, + {"(*Cmd).StderrPipe", Method, 0, ""}, + {"(*Cmd).StdinPipe", Method, 0, ""}, + {"(*Cmd).StdoutPipe", Method, 0, ""}, + {"(*Cmd).String", Method, 13, ""}, + {"(*Cmd).Wait", Method, 0, ""}, + {"(*Error).Error", Method, 0, ""}, + {"(*Error).Unwrap", Method, 13, ""}, + {"(*ExitError).Error", Method, 0, ""}, + {"(ExitError).ExitCode", Method, 12, ""}, + {"(ExitError).Exited", Method, 0, ""}, + {"(ExitError).Pid", Method, 0, ""}, + {"(ExitError).String", Method, 0, ""}, + {"(ExitError).Success", Method, 0, ""}, + {"(ExitError).Sys", Method, 0, ""}, + {"(ExitError).SysUsage", Method, 0, ""}, + {"(ExitError).SystemTime", Method, 0, ""}, + {"(ExitError).UserTime", Method, 0, ""}, + {"Cmd", Type, 0, ""}, + {"Cmd.Args", Field, 0, ""}, + {"Cmd.Cancel", Field, 20, ""}, + {"Cmd.Dir", Field, 0, ""}, + {"Cmd.Env", Field, 0, ""}, + {"Cmd.Err", Field, 19, ""}, + {"Cmd.ExtraFiles", Field, 0, ""}, + {"Cmd.Path", Field, 0, ""}, + {"Cmd.Process", Field, 0, ""}, + {"Cmd.ProcessState", Field, 0, ""}, + {"Cmd.Stderr", Field, 0, ""}, + {"Cmd.Stdin", Field, 0, ""}, + {"Cmd.Stdout", Field, 0, ""}, + {"Cmd.SysProcAttr", Field, 0, ""}, + {"Cmd.WaitDelay", Field, 20, ""}, + {"Command", Func, 0, "func(name string, arg ...string) *Cmd"}, + {"CommandContext", Func, 7, "func(ctx context.Context, name string, arg ...string) *Cmd"}, + {"ErrDot", Var, 19, ""}, + {"ErrNotFound", Var, 0, ""}, + {"ErrWaitDelay", Var, 20, ""}, + {"Error", Type, 0, ""}, + {"Error.Err", Field, 0, ""}, + {"Error.Name", Field, 0, ""}, + {"ExitError", Type, 0, ""}, + {"ExitError.ProcessState", Field, 0, ""}, + {"ExitError.Stderr", Field, 6, ""}, + {"LookPath", Func, 0, "func(file string) (string, error)"}, }, "os/signal": { - {"Ignore", Func, 5}, - {"Ignored", Func, 11}, - {"Notify", Func, 0}, - {"NotifyContext", Func, 16}, - {"Reset", Func, 5}, - {"Stop", Func, 1}, + {"Ignore", Func, 5, "func(sig ...os.Signal)"}, + {"Ignored", Func, 11, "func(sig os.Signal) bool"}, + {"Notify", Func, 0, "func(c chan<- os.Signal, sig ...os.Signal)"}, + {"NotifyContext", Func, 16, "func(parent context.Context, signals ...os.Signal) (ctx context.Context, stop context.CancelFunc)"}, + {"Reset", Func, 5, "func(sig ...os.Signal)"}, + {"Stop", Func, 1, "func(c chan<- os.Signal)"}, }, "os/user": { - {"(*User).GroupIds", Method, 7}, - {"(UnknownGroupError).Error", Method, 7}, - {"(UnknownGroupIdError).Error", Method, 7}, - {"(UnknownUserError).Error", Method, 0}, - {"(UnknownUserIdError).Error", Method, 0}, - {"Current", Func, 0}, - {"Group", Type, 7}, - {"Group.Gid", Field, 7}, - {"Group.Name", Field, 7}, - {"Lookup", Func, 0}, - {"LookupGroup", Func, 7}, - {"LookupGroupId", Func, 7}, - {"LookupId", Func, 0}, - {"UnknownGroupError", Type, 7}, - {"UnknownGroupIdError", Type, 7}, - {"UnknownUserError", Type, 0}, - {"UnknownUserIdError", Type, 0}, - {"User", Type, 0}, - {"User.Gid", Field, 0}, - {"User.HomeDir", Field, 0}, - {"User.Name", Field, 0}, - {"User.Uid", Field, 0}, - {"User.Username", Field, 0}, + {"(*User).GroupIds", Method, 7, ""}, + {"(UnknownGroupError).Error", Method, 7, ""}, + {"(UnknownGroupIdError).Error", Method, 7, ""}, + {"(UnknownUserError).Error", Method, 0, ""}, + {"(UnknownUserIdError).Error", Method, 0, ""}, + {"Current", Func, 0, "func() (*User, error)"}, + {"Group", Type, 7, ""}, + {"Group.Gid", Field, 7, ""}, + {"Group.Name", Field, 7, ""}, + {"Lookup", Func, 0, "func(username string) (*User, error)"}, + {"LookupGroup", Func, 7, "func(name string) (*Group, error)"}, + {"LookupGroupId", Func, 7, "func(gid string) (*Group, error)"}, + {"LookupId", Func, 0, "func(uid string) (*User, error)"}, + {"UnknownGroupError", Type, 7, ""}, + {"UnknownGroupIdError", Type, 7, ""}, + {"UnknownUserError", Type, 0, ""}, + {"UnknownUserIdError", Type, 0, ""}, + {"User", Type, 0, ""}, + {"User.Gid", Field, 0, ""}, + {"User.HomeDir", Field, 0, ""}, + {"User.Name", Field, 0, ""}, + {"User.Uid", Field, 0, ""}, + {"User.Username", Field, 0, ""}, }, "path": { - {"Base", Func, 0}, - {"Clean", Func, 0}, - {"Dir", Func, 0}, - {"ErrBadPattern", Var, 0}, - {"Ext", Func, 0}, - {"IsAbs", Func, 0}, - {"Join", Func, 0}, - {"Match", Func, 0}, - {"Split", Func, 0}, + {"Base", Func, 0, "func(path string) string"}, + {"Clean", Func, 0, "func(path string) string"}, + {"Dir", Func, 0, "func(path string) string"}, + {"ErrBadPattern", Var, 0, ""}, + {"Ext", Func, 0, "func(path string) string"}, + {"IsAbs", Func, 0, "func(path string) bool"}, + {"Join", Func, 0, "func(elem ...string) string"}, + {"Match", Func, 0, "func(pattern string, name string) (matched bool, err error)"}, + {"Split", Func, 0, "func(path string) (dir string, file string)"}, }, "path/filepath": { - {"Abs", Func, 0}, - {"Base", Func, 0}, - {"Clean", Func, 0}, - {"Dir", Func, 0}, - {"ErrBadPattern", Var, 0}, - {"EvalSymlinks", Func, 0}, - {"Ext", Func, 0}, - {"FromSlash", Func, 0}, - {"Glob", Func, 0}, - {"HasPrefix", Func, 0}, - {"IsAbs", Func, 0}, - {"IsLocal", Func, 20}, - {"Join", Func, 0}, - {"ListSeparator", Const, 0}, - {"Localize", Func, 23}, - {"Match", Func, 0}, - {"Rel", Func, 0}, - {"Separator", Const, 0}, - {"SkipAll", Var, 20}, - {"SkipDir", Var, 0}, - {"Split", Func, 0}, - {"SplitList", Func, 0}, - {"ToSlash", Func, 0}, - {"VolumeName", Func, 0}, - {"Walk", Func, 0}, - {"WalkDir", Func, 16}, - {"WalkFunc", Type, 0}, + {"Abs", Func, 0, "func(path string) (string, error)"}, + {"Base", Func, 0, "func(path string) string"}, + {"Clean", Func, 0, "func(path string) string"}, + {"Dir", Func, 0, "func(path string) string"}, + {"ErrBadPattern", Var, 0, ""}, + {"EvalSymlinks", Func, 0, "func(path string) (string, error)"}, + {"Ext", Func, 0, "func(path string) string"}, + {"FromSlash", Func, 0, "func(path string) string"}, + {"Glob", Func, 0, "func(pattern string) (matches []string, err error)"}, + {"HasPrefix", Func, 0, "func(p string, prefix string) bool"}, + {"IsAbs", Func, 0, "func(path string) bool"}, + {"IsLocal", Func, 20, "func(path string) bool"}, + {"Join", Func, 0, "func(elem ...string) string"}, + {"ListSeparator", Const, 0, ""}, + {"Localize", Func, 23, "func(path string) (string, error)"}, + {"Match", Func, 0, "func(pattern string, name string) (matched bool, err error)"}, + {"Rel", Func, 0, "func(basepath string, targpath string) (string, error)"}, + {"Separator", Const, 0, ""}, + {"SkipAll", Var, 20, ""}, + {"SkipDir", Var, 0, ""}, + {"Split", Func, 0, "func(path string) (dir string, file string)"}, + {"SplitList", Func, 0, "func(path string) []string"}, + {"ToSlash", Func, 0, "func(path string) string"}, + {"VolumeName", Func, 0, "func(path string) string"}, + {"Walk", Func, 0, "func(root string, fn WalkFunc) error"}, + {"WalkDir", Func, 16, "func(root string, fn fs.WalkDirFunc) error"}, + {"WalkFunc", Type, 0, ""}, }, "plugin": { - {"(*Plugin).Lookup", Method, 8}, - {"Open", Func, 8}, - {"Plugin", Type, 8}, - {"Symbol", Type, 8}, + {"(*Plugin).Lookup", Method, 8, ""}, + {"Open", Func, 8, "func(path string) (*Plugin, error)"}, + {"Plugin", Type, 8, ""}, + {"Symbol", Type, 8, ""}, }, "reflect": { - {"(*MapIter).Key", Method, 12}, - {"(*MapIter).Next", Method, 12}, - {"(*MapIter).Reset", Method, 18}, - {"(*MapIter).Value", Method, 12}, - {"(*ValueError).Error", Method, 0}, - {"(ChanDir).String", Method, 0}, - {"(Kind).String", Method, 0}, - {"(Method).IsExported", Method, 17}, - {"(StructField).IsExported", Method, 17}, - {"(StructTag).Get", Method, 0}, - {"(StructTag).Lookup", Method, 7}, - {"(Value).Addr", Method, 0}, - {"(Value).Bool", Method, 0}, - {"(Value).Bytes", Method, 0}, - {"(Value).Call", Method, 0}, - {"(Value).CallSlice", Method, 0}, - {"(Value).CanAddr", Method, 0}, - {"(Value).CanComplex", Method, 18}, - {"(Value).CanConvert", Method, 17}, - {"(Value).CanFloat", Method, 18}, - {"(Value).CanInt", Method, 18}, - {"(Value).CanInterface", Method, 0}, - {"(Value).CanSet", Method, 0}, - {"(Value).CanUint", Method, 18}, - {"(Value).Cap", Method, 0}, - {"(Value).Clear", Method, 21}, - {"(Value).Close", Method, 0}, - {"(Value).Comparable", Method, 20}, - {"(Value).Complex", Method, 0}, - {"(Value).Convert", Method, 1}, - {"(Value).Elem", Method, 0}, - {"(Value).Equal", Method, 20}, - {"(Value).Field", Method, 0}, - {"(Value).FieldByIndex", Method, 0}, - {"(Value).FieldByIndexErr", Method, 18}, - {"(Value).FieldByName", Method, 0}, - {"(Value).FieldByNameFunc", Method, 0}, - {"(Value).Float", Method, 0}, - {"(Value).Grow", Method, 20}, - {"(Value).Index", Method, 0}, - {"(Value).Int", Method, 0}, - {"(Value).Interface", Method, 0}, - {"(Value).InterfaceData", Method, 0}, - {"(Value).IsNil", Method, 0}, - {"(Value).IsValid", Method, 0}, - {"(Value).IsZero", Method, 13}, - {"(Value).Kind", Method, 0}, - {"(Value).Len", Method, 0}, - {"(Value).MapIndex", Method, 0}, - {"(Value).MapKeys", Method, 0}, - {"(Value).MapRange", Method, 12}, - {"(Value).Method", Method, 0}, - {"(Value).MethodByName", Method, 0}, - {"(Value).NumField", Method, 0}, - {"(Value).NumMethod", Method, 0}, - {"(Value).OverflowComplex", Method, 0}, - {"(Value).OverflowFloat", Method, 0}, - {"(Value).OverflowInt", Method, 0}, - {"(Value).OverflowUint", Method, 0}, - {"(Value).Pointer", Method, 0}, - {"(Value).Recv", Method, 0}, - {"(Value).Send", Method, 0}, - {"(Value).Seq", Method, 23}, - {"(Value).Seq2", Method, 23}, - {"(Value).Set", Method, 0}, - {"(Value).SetBool", Method, 0}, - {"(Value).SetBytes", Method, 0}, - {"(Value).SetCap", Method, 2}, - {"(Value).SetComplex", Method, 0}, - {"(Value).SetFloat", Method, 0}, - {"(Value).SetInt", Method, 0}, - {"(Value).SetIterKey", Method, 18}, - {"(Value).SetIterValue", Method, 18}, - {"(Value).SetLen", Method, 0}, - {"(Value).SetMapIndex", Method, 0}, - {"(Value).SetPointer", Method, 0}, - {"(Value).SetString", Method, 0}, - {"(Value).SetUint", Method, 0}, - {"(Value).SetZero", Method, 20}, - {"(Value).Slice", Method, 0}, - {"(Value).Slice3", Method, 2}, - {"(Value).String", Method, 0}, - {"(Value).TryRecv", Method, 0}, - {"(Value).TrySend", Method, 0}, - {"(Value).Type", Method, 0}, - {"(Value).Uint", Method, 0}, - {"(Value).UnsafeAddr", Method, 0}, - {"(Value).UnsafePointer", Method, 18}, - {"Append", Func, 0}, - {"AppendSlice", Func, 0}, - {"Array", Const, 0}, - {"ArrayOf", Func, 5}, - {"Bool", Const, 0}, - {"BothDir", Const, 0}, - {"Chan", Const, 0}, - {"ChanDir", Type, 0}, - {"ChanOf", Func, 1}, - {"Complex128", Const, 0}, - {"Complex64", Const, 0}, - {"Copy", Func, 0}, - {"DeepEqual", Func, 0}, - {"Float32", Const, 0}, - {"Float64", Const, 0}, - {"Func", Const, 0}, - {"FuncOf", Func, 5}, - {"Indirect", Func, 0}, - {"Int", Const, 0}, - {"Int16", Const, 0}, - {"Int32", Const, 0}, - {"Int64", Const, 0}, - {"Int8", Const, 0}, - {"Interface", Const, 0}, - {"Invalid", Const, 0}, - {"Kind", Type, 0}, - {"MakeChan", Func, 0}, - {"MakeFunc", Func, 1}, - {"MakeMap", Func, 0}, - {"MakeMapWithSize", Func, 9}, - {"MakeSlice", Func, 0}, - {"Map", Const, 0}, - {"MapIter", Type, 12}, - {"MapOf", Func, 1}, - {"Method", Type, 0}, - {"Method.Func", Field, 0}, - {"Method.Index", Field, 0}, - {"Method.Name", Field, 0}, - {"Method.PkgPath", Field, 0}, - {"Method.Type", Field, 0}, - {"New", Func, 0}, - {"NewAt", Func, 0}, - {"Pointer", Const, 18}, - {"PointerTo", Func, 18}, - {"Ptr", Const, 0}, - {"PtrTo", Func, 0}, - {"RecvDir", Const, 0}, - {"Select", Func, 1}, - {"SelectCase", Type, 1}, - {"SelectCase.Chan", Field, 1}, - {"SelectCase.Dir", Field, 1}, - {"SelectCase.Send", Field, 1}, - {"SelectDefault", Const, 1}, - {"SelectDir", Type, 1}, - {"SelectRecv", Const, 1}, - {"SelectSend", Const, 1}, - {"SendDir", Const, 0}, - {"Slice", Const, 0}, - {"SliceAt", Func, 23}, - {"SliceHeader", Type, 0}, - {"SliceHeader.Cap", Field, 0}, - {"SliceHeader.Data", Field, 0}, - {"SliceHeader.Len", Field, 0}, - {"SliceOf", Func, 1}, - {"String", Const, 0}, - {"StringHeader", Type, 0}, - {"StringHeader.Data", Field, 0}, - {"StringHeader.Len", Field, 0}, - {"Struct", Const, 0}, - {"StructField", Type, 0}, - {"StructField.Anonymous", Field, 0}, - {"StructField.Index", Field, 0}, - {"StructField.Name", Field, 0}, - {"StructField.Offset", Field, 0}, - {"StructField.PkgPath", Field, 0}, - {"StructField.Tag", Field, 0}, - {"StructField.Type", Field, 0}, - {"StructOf", Func, 7}, - {"StructTag", Type, 0}, - {"Swapper", Func, 8}, - {"Type", Type, 0}, - {"TypeFor", Func, 22}, - {"TypeOf", Func, 0}, - {"Uint", Const, 0}, - {"Uint16", Const, 0}, - {"Uint32", Const, 0}, - {"Uint64", Const, 0}, - {"Uint8", Const, 0}, - {"Uintptr", Const, 0}, - {"UnsafePointer", Const, 0}, - {"Value", Type, 0}, - {"ValueError", Type, 0}, - {"ValueError.Kind", Field, 0}, - {"ValueError.Method", Field, 0}, - {"ValueOf", Func, 0}, - {"VisibleFields", Func, 17}, - {"Zero", Func, 0}, + {"(*MapIter).Key", Method, 12, ""}, + {"(*MapIter).Next", Method, 12, ""}, + {"(*MapIter).Reset", Method, 18, ""}, + {"(*MapIter).Value", Method, 12, ""}, + {"(*ValueError).Error", Method, 0, ""}, + {"(ChanDir).String", Method, 0, ""}, + {"(Kind).String", Method, 0, ""}, + {"(Method).IsExported", Method, 17, ""}, + {"(StructField).IsExported", Method, 17, ""}, + {"(StructTag).Get", Method, 0, ""}, + {"(StructTag).Lookup", Method, 7, ""}, + {"(Value).Addr", Method, 0, ""}, + {"(Value).Bool", Method, 0, ""}, + {"(Value).Bytes", Method, 0, ""}, + {"(Value).Call", Method, 0, ""}, + {"(Value).CallSlice", Method, 0, ""}, + {"(Value).CanAddr", Method, 0, ""}, + {"(Value).CanComplex", Method, 18, ""}, + {"(Value).CanConvert", Method, 17, ""}, + {"(Value).CanFloat", Method, 18, ""}, + {"(Value).CanInt", Method, 18, ""}, + {"(Value).CanInterface", Method, 0, ""}, + {"(Value).CanSet", Method, 0, ""}, + {"(Value).CanUint", Method, 18, ""}, + {"(Value).Cap", Method, 0, ""}, + {"(Value).Clear", Method, 21, ""}, + {"(Value).Close", Method, 0, ""}, + {"(Value).Comparable", Method, 20, ""}, + {"(Value).Complex", Method, 0, ""}, + {"(Value).Convert", Method, 1, ""}, + {"(Value).Elem", Method, 0, ""}, + {"(Value).Equal", Method, 20, ""}, + {"(Value).Field", Method, 0, ""}, + {"(Value).FieldByIndex", Method, 0, ""}, + {"(Value).FieldByIndexErr", Method, 18, ""}, + {"(Value).FieldByName", Method, 0, ""}, + {"(Value).FieldByNameFunc", Method, 0, ""}, + {"(Value).Float", Method, 0, ""}, + {"(Value).Grow", Method, 20, ""}, + {"(Value).Index", Method, 0, ""}, + {"(Value).Int", Method, 0, ""}, + {"(Value).Interface", Method, 0, ""}, + {"(Value).InterfaceData", Method, 0, ""}, + {"(Value).IsNil", Method, 0, ""}, + {"(Value).IsValid", Method, 0, ""}, + {"(Value).IsZero", Method, 13, ""}, + {"(Value).Kind", Method, 0, ""}, + {"(Value).Len", Method, 0, ""}, + {"(Value).MapIndex", Method, 0, ""}, + {"(Value).MapKeys", Method, 0, ""}, + {"(Value).MapRange", Method, 12, ""}, + {"(Value).Method", Method, 0, ""}, + {"(Value).MethodByName", Method, 0, ""}, + {"(Value).NumField", Method, 0, ""}, + {"(Value).NumMethod", Method, 0, ""}, + {"(Value).OverflowComplex", Method, 0, ""}, + {"(Value).OverflowFloat", Method, 0, ""}, + {"(Value).OverflowInt", Method, 0, ""}, + {"(Value).OverflowUint", Method, 0, ""}, + {"(Value).Pointer", Method, 0, ""}, + {"(Value).Recv", Method, 0, ""}, + {"(Value).Send", Method, 0, ""}, + {"(Value).Seq", Method, 23, ""}, + {"(Value).Seq2", Method, 23, ""}, + {"(Value).Set", Method, 0, ""}, + {"(Value).SetBool", Method, 0, ""}, + {"(Value).SetBytes", Method, 0, ""}, + {"(Value).SetCap", Method, 2, ""}, + {"(Value).SetComplex", Method, 0, ""}, + {"(Value).SetFloat", Method, 0, ""}, + {"(Value).SetInt", Method, 0, ""}, + {"(Value).SetIterKey", Method, 18, ""}, + {"(Value).SetIterValue", Method, 18, ""}, + {"(Value).SetLen", Method, 0, ""}, + {"(Value).SetMapIndex", Method, 0, ""}, + {"(Value).SetPointer", Method, 0, ""}, + {"(Value).SetString", Method, 0, ""}, + {"(Value).SetUint", Method, 0, ""}, + {"(Value).SetZero", Method, 20, ""}, + {"(Value).Slice", Method, 0, ""}, + {"(Value).Slice3", Method, 2, ""}, + {"(Value).String", Method, 0, ""}, + {"(Value).TryRecv", Method, 0, ""}, + {"(Value).TrySend", Method, 0, ""}, + {"(Value).Type", Method, 0, ""}, + {"(Value).Uint", Method, 0, ""}, + {"(Value).UnsafeAddr", Method, 0, ""}, + {"(Value).UnsafePointer", Method, 18, ""}, + {"Append", Func, 0, "func(s Value, x ...Value) Value"}, + {"AppendSlice", Func, 0, "func(s Value, t Value) Value"}, + {"Array", Const, 0, ""}, + {"ArrayOf", Func, 5, "func(length int, elem Type) Type"}, + {"Bool", Const, 0, ""}, + {"BothDir", Const, 0, ""}, + {"Chan", Const, 0, ""}, + {"ChanDir", Type, 0, ""}, + {"ChanOf", Func, 1, "func(dir ChanDir, t Type) Type"}, + {"Complex128", Const, 0, ""}, + {"Complex64", Const, 0, ""}, + {"Copy", Func, 0, "func(dst Value, src Value) int"}, + {"DeepEqual", Func, 0, "func(x any, y any) bool"}, + {"Float32", Const, 0, ""}, + {"Float64", Const, 0, ""}, + {"Func", Const, 0, ""}, + {"FuncOf", Func, 5, "func(in []Type, out []Type, variadic bool) Type"}, + {"Indirect", Func, 0, "func(v Value) Value"}, + {"Int", Const, 0, ""}, + {"Int16", Const, 0, ""}, + {"Int32", Const, 0, ""}, + {"Int64", Const, 0, ""}, + {"Int8", Const, 0, ""}, + {"Interface", Const, 0, ""}, + {"Invalid", Const, 0, ""}, + {"Kind", Type, 0, ""}, + {"MakeChan", Func, 0, "func(typ Type, buffer int) Value"}, + {"MakeFunc", Func, 1, "func(typ Type, fn func(args []Value) (results []Value)) Value"}, + {"MakeMap", Func, 0, "func(typ Type) Value"}, + {"MakeMapWithSize", Func, 9, "func(typ Type, n int) Value"}, + {"MakeSlice", Func, 0, "func(typ Type, len int, cap int) Value"}, + {"Map", Const, 0, ""}, + {"MapIter", Type, 12, ""}, + {"MapOf", Func, 1, "func(key Type, elem Type) Type"}, + {"Method", Type, 0, ""}, + {"Method.Func", Field, 0, ""}, + {"Method.Index", Field, 0, ""}, + {"Method.Name", Field, 0, ""}, + {"Method.PkgPath", Field, 0, ""}, + {"Method.Type", Field, 0, ""}, + {"New", Func, 0, "func(typ Type) Value"}, + {"NewAt", Func, 0, "func(typ Type, p unsafe.Pointer) Value"}, + {"Pointer", Const, 18, ""}, + {"PointerTo", Func, 18, "func(t Type) Type"}, + {"Ptr", Const, 0, ""}, + {"PtrTo", Func, 0, "func(t Type) Type"}, + {"RecvDir", Const, 0, ""}, + {"Select", Func, 1, "func(cases []SelectCase) (chosen int, recv Value, recvOK bool)"}, + {"SelectCase", Type, 1, ""}, + {"SelectCase.Chan", Field, 1, ""}, + {"SelectCase.Dir", Field, 1, ""}, + {"SelectCase.Send", Field, 1, ""}, + {"SelectDefault", Const, 1, ""}, + {"SelectDir", Type, 1, ""}, + {"SelectRecv", Const, 1, ""}, + {"SelectSend", Const, 1, ""}, + {"SendDir", Const, 0, ""}, + {"Slice", Const, 0, ""}, + {"SliceAt", Func, 23, "func(typ Type, p unsafe.Pointer, n int) Value"}, + {"SliceHeader", Type, 0, ""}, + {"SliceHeader.Cap", Field, 0, ""}, + {"SliceHeader.Data", Field, 0, ""}, + {"SliceHeader.Len", Field, 0, ""}, + {"SliceOf", Func, 1, "func(t Type) Type"}, + {"String", Const, 0, ""}, + {"StringHeader", Type, 0, ""}, + {"StringHeader.Data", Field, 0, ""}, + {"StringHeader.Len", Field, 0, ""}, + {"Struct", Const, 0, ""}, + {"StructField", Type, 0, ""}, + {"StructField.Anonymous", Field, 0, ""}, + {"StructField.Index", Field, 0, ""}, + {"StructField.Name", Field, 0, ""}, + {"StructField.Offset", Field, 0, ""}, + {"StructField.PkgPath", Field, 0, ""}, + {"StructField.Tag", Field, 0, ""}, + {"StructField.Type", Field, 0, ""}, + {"StructOf", Func, 7, "func(fields []StructField) Type"}, + {"StructTag", Type, 0, ""}, + {"Swapper", Func, 8, "func(slice any) func(i int, j int)"}, + {"Type", Type, 0, ""}, + {"TypeFor", Func, 22, "func[T any]() Type"}, + {"TypeOf", Func, 0, "func(i any) Type"}, + {"Uint", Const, 0, ""}, + {"Uint16", Const, 0, ""}, + {"Uint32", Const, 0, ""}, + {"Uint64", Const, 0, ""}, + {"Uint8", Const, 0, ""}, + {"Uintptr", Const, 0, ""}, + {"UnsafePointer", Const, 0, ""}, + {"Value", Type, 0, ""}, + {"ValueError", Type, 0, ""}, + {"ValueError.Kind", Field, 0, ""}, + {"ValueError.Method", Field, 0, ""}, + {"ValueOf", Func, 0, "func(i any) Value"}, + {"VisibleFields", Func, 17, "func(t Type) []StructField"}, + {"Zero", Func, 0, "func(typ Type) Value"}, }, "regexp": { - {"(*Regexp).AppendText", Method, 24}, - {"(*Regexp).Copy", Method, 6}, - {"(*Regexp).Expand", Method, 0}, - {"(*Regexp).ExpandString", Method, 0}, - {"(*Regexp).Find", Method, 0}, - {"(*Regexp).FindAll", Method, 0}, - {"(*Regexp).FindAllIndex", Method, 0}, - {"(*Regexp).FindAllString", Method, 0}, - {"(*Regexp).FindAllStringIndex", Method, 0}, - {"(*Regexp).FindAllStringSubmatch", Method, 0}, - {"(*Regexp).FindAllStringSubmatchIndex", Method, 0}, - {"(*Regexp).FindAllSubmatch", Method, 0}, - {"(*Regexp).FindAllSubmatchIndex", Method, 0}, - {"(*Regexp).FindIndex", Method, 0}, - {"(*Regexp).FindReaderIndex", Method, 0}, - {"(*Regexp).FindReaderSubmatchIndex", Method, 0}, - {"(*Regexp).FindString", Method, 0}, - {"(*Regexp).FindStringIndex", Method, 0}, - {"(*Regexp).FindStringSubmatch", Method, 0}, - {"(*Regexp).FindStringSubmatchIndex", Method, 0}, - {"(*Regexp).FindSubmatch", Method, 0}, - {"(*Regexp).FindSubmatchIndex", Method, 0}, - {"(*Regexp).LiteralPrefix", Method, 0}, - {"(*Regexp).Longest", Method, 1}, - {"(*Regexp).MarshalText", Method, 21}, - {"(*Regexp).Match", Method, 0}, - {"(*Regexp).MatchReader", Method, 0}, - {"(*Regexp).MatchString", Method, 0}, - {"(*Regexp).NumSubexp", Method, 0}, - {"(*Regexp).ReplaceAll", Method, 0}, - {"(*Regexp).ReplaceAllFunc", Method, 0}, - {"(*Regexp).ReplaceAllLiteral", Method, 0}, - {"(*Regexp).ReplaceAllLiteralString", Method, 0}, - {"(*Regexp).ReplaceAllString", Method, 0}, - {"(*Regexp).ReplaceAllStringFunc", Method, 0}, - {"(*Regexp).Split", Method, 1}, - {"(*Regexp).String", Method, 0}, - {"(*Regexp).SubexpIndex", Method, 15}, - {"(*Regexp).SubexpNames", Method, 0}, - {"(*Regexp).UnmarshalText", Method, 21}, - {"Compile", Func, 0}, - {"CompilePOSIX", Func, 0}, - {"Match", Func, 0}, - {"MatchReader", Func, 0}, - {"MatchString", Func, 0}, - {"MustCompile", Func, 0}, - {"MustCompilePOSIX", Func, 0}, - {"QuoteMeta", Func, 0}, - {"Regexp", Type, 0}, + {"(*Regexp).AppendText", Method, 24, ""}, + {"(*Regexp).Copy", Method, 6, ""}, + {"(*Regexp).Expand", Method, 0, ""}, + {"(*Regexp).ExpandString", Method, 0, ""}, + {"(*Regexp).Find", Method, 0, ""}, + {"(*Regexp).FindAll", Method, 0, ""}, + {"(*Regexp).FindAllIndex", Method, 0, ""}, + {"(*Regexp).FindAllString", Method, 0, ""}, + {"(*Regexp).FindAllStringIndex", Method, 0, ""}, + {"(*Regexp).FindAllStringSubmatch", Method, 0, ""}, + {"(*Regexp).FindAllStringSubmatchIndex", Method, 0, ""}, + {"(*Regexp).FindAllSubmatch", Method, 0, ""}, + {"(*Regexp).FindAllSubmatchIndex", Method, 0, ""}, + {"(*Regexp).FindIndex", Method, 0, ""}, + {"(*Regexp).FindReaderIndex", Method, 0, ""}, + {"(*Regexp).FindReaderSubmatchIndex", Method, 0, ""}, + {"(*Regexp).FindString", Method, 0, ""}, + {"(*Regexp).FindStringIndex", Method, 0, ""}, + {"(*Regexp).FindStringSubmatch", Method, 0, ""}, + {"(*Regexp).FindStringSubmatchIndex", Method, 0, ""}, + {"(*Regexp).FindSubmatch", Method, 0, ""}, + {"(*Regexp).FindSubmatchIndex", Method, 0, ""}, + {"(*Regexp).LiteralPrefix", Method, 0, ""}, + {"(*Regexp).Longest", Method, 1, ""}, + {"(*Regexp).MarshalText", Method, 21, ""}, + {"(*Regexp).Match", Method, 0, ""}, + {"(*Regexp).MatchReader", Method, 0, ""}, + {"(*Regexp).MatchString", Method, 0, ""}, + {"(*Regexp).NumSubexp", Method, 0, ""}, + {"(*Regexp).ReplaceAll", Method, 0, ""}, + {"(*Regexp).ReplaceAllFunc", Method, 0, ""}, + {"(*Regexp).ReplaceAllLiteral", Method, 0, ""}, + {"(*Regexp).ReplaceAllLiteralString", Method, 0, ""}, + {"(*Regexp).ReplaceAllString", Method, 0, ""}, + {"(*Regexp).ReplaceAllStringFunc", Method, 0, ""}, + {"(*Regexp).Split", Method, 1, ""}, + {"(*Regexp).String", Method, 0, ""}, + {"(*Regexp).SubexpIndex", Method, 15, ""}, + {"(*Regexp).SubexpNames", Method, 0, ""}, + {"(*Regexp).UnmarshalText", Method, 21, ""}, + {"Compile", Func, 0, "func(expr string) (*Regexp, error)"}, + {"CompilePOSIX", Func, 0, "func(expr string) (*Regexp, error)"}, + {"Match", Func, 0, "func(pattern string, b []byte) (matched bool, err error)"}, + {"MatchReader", Func, 0, "func(pattern string, r io.RuneReader) (matched bool, err error)"}, + {"MatchString", Func, 0, "func(pattern string, s string) (matched bool, err error)"}, + {"MustCompile", Func, 0, "func(str string) *Regexp"}, + {"MustCompilePOSIX", Func, 0, "func(str string) *Regexp"}, + {"QuoteMeta", Func, 0, "func(s string) string"}, + {"Regexp", Type, 0, ""}, }, "regexp/syntax": { - {"(*Error).Error", Method, 0}, - {"(*Inst).MatchEmptyWidth", Method, 0}, - {"(*Inst).MatchRune", Method, 0}, - {"(*Inst).MatchRunePos", Method, 3}, - {"(*Inst).String", Method, 0}, - {"(*Prog).Prefix", Method, 0}, - {"(*Prog).StartCond", Method, 0}, - {"(*Prog).String", Method, 0}, - {"(*Regexp).CapNames", Method, 0}, - {"(*Regexp).Equal", Method, 0}, - {"(*Regexp).MaxCap", Method, 0}, - {"(*Regexp).Simplify", Method, 0}, - {"(*Regexp).String", Method, 0}, - {"(ErrorCode).String", Method, 0}, - {"(InstOp).String", Method, 3}, - {"(Op).String", Method, 11}, - {"ClassNL", Const, 0}, - {"Compile", Func, 0}, - {"DotNL", Const, 0}, - {"EmptyBeginLine", Const, 0}, - {"EmptyBeginText", Const, 0}, - {"EmptyEndLine", Const, 0}, - {"EmptyEndText", Const, 0}, - {"EmptyNoWordBoundary", Const, 0}, - {"EmptyOp", Type, 0}, - {"EmptyOpContext", Func, 0}, - {"EmptyWordBoundary", Const, 0}, - {"ErrInternalError", Const, 0}, - {"ErrInvalidCharClass", Const, 0}, - {"ErrInvalidCharRange", Const, 0}, - {"ErrInvalidEscape", Const, 0}, - {"ErrInvalidNamedCapture", Const, 0}, - {"ErrInvalidPerlOp", Const, 0}, - {"ErrInvalidRepeatOp", Const, 0}, - {"ErrInvalidRepeatSize", Const, 0}, - {"ErrInvalidUTF8", Const, 0}, - {"ErrLarge", Const, 20}, - {"ErrMissingBracket", Const, 0}, - {"ErrMissingParen", Const, 0}, - {"ErrMissingRepeatArgument", Const, 0}, - {"ErrNestingDepth", Const, 19}, - {"ErrTrailingBackslash", Const, 0}, - {"ErrUnexpectedParen", Const, 1}, - {"Error", Type, 0}, - {"Error.Code", Field, 0}, - {"Error.Expr", Field, 0}, - {"ErrorCode", Type, 0}, - {"Flags", Type, 0}, - {"FoldCase", Const, 0}, - {"Inst", Type, 0}, - {"Inst.Arg", Field, 0}, - {"Inst.Op", Field, 0}, - {"Inst.Out", Field, 0}, - {"Inst.Rune", Field, 0}, - {"InstAlt", Const, 0}, - {"InstAltMatch", Const, 0}, - {"InstCapture", Const, 0}, - {"InstEmptyWidth", Const, 0}, - {"InstFail", Const, 0}, - {"InstMatch", Const, 0}, - {"InstNop", Const, 0}, - {"InstOp", Type, 0}, - {"InstRune", Const, 0}, - {"InstRune1", Const, 0}, - {"InstRuneAny", Const, 0}, - {"InstRuneAnyNotNL", Const, 0}, - {"IsWordChar", Func, 0}, - {"Literal", Const, 0}, - {"MatchNL", Const, 0}, - {"NonGreedy", Const, 0}, - {"OneLine", Const, 0}, - {"Op", Type, 0}, - {"OpAlternate", Const, 0}, - {"OpAnyChar", Const, 0}, - {"OpAnyCharNotNL", Const, 0}, - {"OpBeginLine", Const, 0}, - {"OpBeginText", Const, 0}, - {"OpCapture", Const, 0}, - {"OpCharClass", Const, 0}, - {"OpConcat", Const, 0}, - {"OpEmptyMatch", Const, 0}, - {"OpEndLine", Const, 0}, - {"OpEndText", Const, 0}, - {"OpLiteral", Const, 0}, - {"OpNoMatch", Const, 0}, - {"OpNoWordBoundary", Const, 0}, - {"OpPlus", Const, 0}, - {"OpQuest", Const, 0}, - {"OpRepeat", Const, 0}, - {"OpStar", Const, 0}, - {"OpWordBoundary", Const, 0}, - {"POSIX", Const, 0}, - {"Parse", Func, 0}, - {"Perl", Const, 0}, - {"PerlX", Const, 0}, - {"Prog", Type, 0}, - {"Prog.Inst", Field, 0}, - {"Prog.NumCap", Field, 0}, - {"Prog.Start", Field, 0}, - {"Regexp", Type, 0}, - {"Regexp.Cap", Field, 0}, - {"Regexp.Flags", Field, 0}, - {"Regexp.Max", Field, 0}, - {"Regexp.Min", Field, 0}, - {"Regexp.Name", Field, 0}, - {"Regexp.Op", Field, 0}, - {"Regexp.Rune", Field, 0}, - {"Regexp.Rune0", Field, 0}, - {"Regexp.Sub", Field, 0}, - {"Regexp.Sub0", Field, 0}, - {"Simple", Const, 0}, - {"UnicodeGroups", Const, 0}, - {"WasDollar", Const, 0}, + {"(*Error).Error", Method, 0, ""}, + {"(*Inst).MatchEmptyWidth", Method, 0, ""}, + {"(*Inst).MatchRune", Method, 0, ""}, + {"(*Inst).MatchRunePos", Method, 3, ""}, + {"(*Inst).String", Method, 0, ""}, + {"(*Prog).Prefix", Method, 0, ""}, + {"(*Prog).StartCond", Method, 0, ""}, + {"(*Prog).String", Method, 0, ""}, + {"(*Regexp).CapNames", Method, 0, ""}, + {"(*Regexp).Equal", Method, 0, ""}, + {"(*Regexp).MaxCap", Method, 0, ""}, + {"(*Regexp).Simplify", Method, 0, ""}, + {"(*Regexp).String", Method, 0, ""}, + {"(ErrorCode).String", Method, 0, ""}, + {"(InstOp).String", Method, 3, ""}, + {"(Op).String", Method, 11, ""}, + {"ClassNL", Const, 0, ""}, + {"Compile", Func, 0, "func(re *Regexp) (*Prog, error)"}, + {"DotNL", Const, 0, ""}, + {"EmptyBeginLine", Const, 0, ""}, + {"EmptyBeginText", Const, 0, ""}, + {"EmptyEndLine", Const, 0, ""}, + {"EmptyEndText", Const, 0, ""}, + {"EmptyNoWordBoundary", Const, 0, ""}, + {"EmptyOp", Type, 0, ""}, + {"EmptyOpContext", Func, 0, "func(r1 rune, r2 rune) EmptyOp"}, + {"EmptyWordBoundary", Const, 0, ""}, + {"ErrInternalError", Const, 0, ""}, + {"ErrInvalidCharClass", Const, 0, ""}, + {"ErrInvalidCharRange", Const, 0, ""}, + {"ErrInvalidEscape", Const, 0, ""}, + {"ErrInvalidNamedCapture", Const, 0, ""}, + {"ErrInvalidPerlOp", Const, 0, ""}, + {"ErrInvalidRepeatOp", Const, 0, ""}, + {"ErrInvalidRepeatSize", Const, 0, ""}, + {"ErrInvalidUTF8", Const, 0, ""}, + {"ErrLarge", Const, 20, ""}, + {"ErrMissingBracket", Const, 0, ""}, + {"ErrMissingParen", Const, 0, ""}, + {"ErrMissingRepeatArgument", Const, 0, ""}, + {"ErrNestingDepth", Const, 19, ""}, + {"ErrTrailingBackslash", Const, 0, ""}, + {"ErrUnexpectedParen", Const, 1, ""}, + {"Error", Type, 0, ""}, + {"Error.Code", Field, 0, ""}, + {"Error.Expr", Field, 0, ""}, + {"ErrorCode", Type, 0, ""}, + {"Flags", Type, 0, ""}, + {"FoldCase", Const, 0, ""}, + {"Inst", Type, 0, ""}, + {"Inst.Arg", Field, 0, ""}, + {"Inst.Op", Field, 0, ""}, + {"Inst.Out", Field, 0, ""}, + {"Inst.Rune", Field, 0, ""}, + {"InstAlt", Const, 0, ""}, + {"InstAltMatch", Const, 0, ""}, + {"InstCapture", Const, 0, ""}, + {"InstEmptyWidth", Const, 0, ""}, + {"InstFail", Const, 0, ""}, + {"InstMatch", Const, 0, ""}, + {"InstNop", Const, 0, ""}, + {"InstOp", Type, 0, ""}, + {"InstRune", Const, 0, ""}, + {"InstRune1", Const, 0, ""}, + {"InstRuneAny", Const, 0, ""}, + {"InstRuneAnyNotNL", Const, 0, ""}, + {"IsWordChar", Func, 0, "func(r rune) bool"}, + {"Literal", Const, 0, ""}, + {"MatchNL", Const, 0, ""}, + {"NonGreedy", Const, 0, ""}, + {"OneLine", Const, 0, ""}, + {"Op", Type, 0, ""}, + {"OpAlternate", Const, 0, ""}, + {"OpAnyChar", Const, 0, ""}, + {"OpAnyCharNotNL", Const, 0, ""}, + {"OpBeginLine", Const, 0, ""}, + {"OpBeginText", Const, 0, ""}, + {"OpCapture", Const, 0, ""}, + {"OpCharClass", Const, 0, ""}, + {"OpConcat", Const, 0, ""}, + {"OpEmptyMatch", Const, 0, ""}, + {"OpEndLine", Const, 0, ""}, + {"OpEndText", Const, 0, ""}, + {"OpLiteral", Const, 0, ""}, + {"OpNoMatch", Const, 0, ""}, + {"OpNoWordBoundary", Const, 0, ""}, + {"OpPlus", Const, 0, ""}, + {"OpQuest", Const, 0, ""}, + {"OpRepeat", Const, 0, ""}, + {"OpStar", Const, 0, ""}, + {"OpWordBoundary", Const, 0, ""}, + {"POSIX", Const, 0, ""}, + {"Parse", Func, 0, "func(s string, flags Flags) (*Regexp, error)"}, + {"Perl", Const, 0, ""}, + {"PerlX", Const, 0, ""}, + {"Prog", Type, 0, ""}, + {"Prog.Inst", Field, 0, ""}, + {"Prog.NumCap", Field, 0, ""}, + {"Prog.Start", Field, 0, ""}, + {"Regexp", Type, 0, ""}, + {"Regexp.Cap", Field, 0, ""}, + {"Regexp.Flags", Field, 0, ""}, + {"Regexp.Max", Field, 0, ""}, + {"Regexp.Min", Field, 0, ""}, + {"Regexp.Name", Field, 0, ""}, + {"Regexp.Op", Field, 0, ""}, + {"Regexp.Rune", Field, 0, ""}, + {"Regexp.Rune0", Field, 0, ""}, + {"Regexp.Sub", Field, 0, ""}, + {"Regexp.Sub0", Field, 0, ""}, + {"Simple", Const, 0, ""}, + {"UnicodeGroups", Const, 0, ""}, + {"WasDollar", Const, 0, ""}, }, "runtime": { - {"(*BlockProfileRecord).Stack", Method, 1}, - {"(*Frames).Next", Method, 7}, - {"(*Func).Entry", Method, 0}, - {"(*Func).FileLine", Method, 0}, - {"(*Func).Name", Method, 0}, - {"(*MemProfileRecord).InUseBytes", Method, 0}, - {"(*MemProfileRecord).InUseObjects", Method, 0}, - {"(*MemProfileRecord).Stack", Method, 0}, - {"(*PanicNilError).Error", Method, 21}, - {"(*PanicNilError).RuntimeError", Method, 21}, - {"(*Pinner).Pin", Method, 21}, - {"(*Pinner).Unpin", Method, 21}, - {"(*StackRecord).Stack", Method, 0}, - {"(*TypeAssertionError).Error", Method, 0}, - {"(*TypeAssertionError).RuntimeError", Method, 0}, - {"(Cleanup).Stop", Method, 24}, - {"AddCleanup", Func, 24}, - {"BlockProfile", Func, 1}, - {"BlockProfileRecord", Type, 1}, - {"BlockProfileRecord.Count", Field, 1}, - {"BlockProfileRecord.Cycles", Field, 1}, - {"BlockProfileRecord.StackRecord", Field, 1}, - {"Breakpoint", Func, 0}, - {"CPUProfile", Func, 0}, - {"Caller", Func, 0}, - {"Callers", Func, 0}, - {"CallersFrames", Func, 7}, - {"Cleanup", Type, 24}, - {"Compiler", Const, 0}, - {"Error", Type, 0}, - {"Frame", Type, 7}, - {"Frame.Entry", Field, 7}, - {"Frame.File", Field, 7}, - {"Frame.Func", Field, 7}, - {"Frame.Function", Field, 7}, - {"Frame.Line", Field, 7}, - {"Frame.PC", Field, 7}, - {"Frames", Type, 7}, - {"Func", Type, 0}, - {"FuncForPC", Func, 0}, - {"GC", Func, 0}, - {"GOARCH", Const, 0}, - {"GOMAXPROCS", Func, 0}, - {"GOOS", Const, 0}, - {"GOROOT", Func, 0}, - {"Goexit", Func, 0}, - {"GoroutineProfile", Func, 0}, - {"Gosched", Func, 0}, - {"KeepAlive", Func, 7}, - {"LockOSThread", Func, 0}, - {"MemProfile", Func, 0}, - {"MemProfileRate", Var, 0}, - {"MemProfileRecord", Type, 0}, - {"MemProfileRecord.AllocBytes", Field, 0}, - {"MemProfileRecord.AllocObjects", Field, 0}, - {"MemProfileRecord.FreeBytes", Field, 0}, - {"MemProfileRecord.FreeObjects", Field, 0}, - {"MemProfileRecord.Stack0", Field, 0}, - {"MemStats", Type, 0}, - {"MemStats.Alloc", Field, 0}, - {"MemStats.BuckHashSys", Field, 0}, - {"MemStats.BySize", Field, 0}, - {"MemStats.DebugGC", Field, 0}, - {"MemStats.EnableGC", Field, 0}, - {"MemStats.Frees", Field, 0}, - {"MemStats.GCCPUFraction", Field, 5}, - {"MemStats.GCSys", Field, 2}, - {"MemStats.HeapAlloc", Field, 0}, - {"MemStats.HeapIdle", Field, 0}, - {"MemStats.HeapInuse", Field, 0}, - {"MemStats.HeapObjects", Field, 0}, - {"MemStats.HeapReleased", Field, 0}, - {"MemStats.HeapSys", Field, 0}, - {"MemStats.LastGC", Field, 0}, - {"MemStats.Lookups", Field, 0}, - {"MemStats.MCacheInuse", Field, 0}, - {"MemStats.MCacheSys", Field, 0}, - {"MemStats.MSpanInuse", Field, 0}, - {"MemStats.MSpanSys", Field, 0}, - {"MemStats.Mallocs", Field, 0}, - {"MemStats.NextGC", Field, 0}, - {"MemStats.NumForcedGC", Field, 8}, - {"MemStats.NumGC", Field, 0}, - {"MemStats.OtherSys", Field, 2}, - {"MemStats.PauseEnd", Field, 4}, - {"MemStats.PauseNs", Field, 0}, - {"MemStats.PauseTotalNs", Field, 0}, - {"MemStats.StackInuse", Field, 0}, - {"MemStats.StackSys", Field, 0}, - {"MemStats.Sys", Field, 0}, - {"MemStats.TotalAlloc", Field, 0}, - {"MutexProfile", Func, 8}, - {"NumCPU", Func, 0}, - {"NumCgoCall", Func, 0}, - {"NumGoroutine", Func, 0}, - {"PanicNilError", Type, 21}, - {"Pinner", Type, 21}, - {"ReadMemStats", Func, 0}, - {"ReadTrace", Func, 5}, - {"SetBlockProfileRate", Func, 1}, - {"SetCPUProfileRate", Func, 0}, - {"SetCgoTraceback", Func, 7}, - {"SetFinalizer", Func, 0}, - {"SetMutexProfileFraction", Func, 8}, - {"Stack", Func, 0}, - {"StackRecord", Type, 0}, - {"StackRecord.Stack0", Field, 0}, - {"StartTrace", Func, 5}, - {"StopTrace", Func, 5}, - {"ThreadCreateProfile", Func, 0}, - {"TypeAssertionError", Type, 0}, - {"UnlockOSThread", Func, 0}, - {"Version", Func, 0}, + {"(*BlockProfileRecord).Stack", Method, 1, ""}, + {"(*Frames).Next", Method, 7, ""}, + {"(*Func).Entry", Method, 0, ""}, + {"(*Func).FileLine", Method, 0, ""}, + {"(*Func).Name", Method, 0, ""}, + {"(*MemProfileRecord).InUseBytes", Method, 0, ""}, + {"(*MemProfileRecord).InUseObjects", Method, 0, ""}, + {"(*MemProfileRecord).Stack", Method, 0, ""}, + {"(*PanicNilError).Error", Method, 21, ""}, + {"(*PanicNilError).RuntimeError", Method, 21, ""}, + {"(*Pinner).Pin", Method, 21, ""}, + {"(*Pinner).Unpin", Method, 21, ""}, + {"(*StackRecord).Stack", Method, 0, ""}, + {"(*TypeAssertionError).Error", Method, 0, ""}, + {"(*TypeAssertionError).RuntimeError", Method, 0, ""}, + {"(Cleanup).Stop", Method, 24, ""}, + {"AddCleanup", Func, 24, "func[T, S any](ptr *T, cleanup func(S), arg S) Cleanup"}, + {"BlockProfile", Func, 1, "func(p []BlockProfileRecord) (n int, ok bool)"}, + {"BlockProfileRecord", Type, 1, ""}, + {"BlockProfileRecord.Count", Field, 1, ""}, + {"BlockProfileRecord.Cycles", Field, 1, ""}, + {"BlockProfileRecord.StackRecord", Field, 1, ""}, + {"Breakpoint", Func, 0, "func()"}, + {"CPUProfile", Func, 0, "func() []byte"}, + {"Caller", Func, 0, "func(skip int) (pc uintptr, file string, line int, ok bool)"}, + {"Callers", Func, 0, "func(skip int, pc []uintptr) int"}, + {"CallersFrames", Func, 7, "func(callers []uintptr) *Frames"}, + {"Cleanup", Type, 24, ""}, + {"Compiler", Const, 0, ""}, + {"Error", Type, 0, ""}, + {"Frame", Type, 7, ""}, + {"Frame.Entry", Field, 7, ""}, + {"Frame.File", Field, 7, ""}, + {"Frame.Func", Field, 7, ""}, + {"Frame.Function", Field, 7, ""}, + {"Frame.Line", Field, 7, ""}, + {"Frame.PC", Field, 7, ""}, + {"Frames", Type, 7, ""}, + {"Func", Type, 0, ""}, + {"FuncForPC", Func, 0, "func(pc uintptr) *Func"}, + {"GC", Func, 0, "func()"}, + {"GOARCH", Const, 0, ""}, + {"GOMAXPROCS", Func, 0, "func(n int) int"}, + {"GOOS", Const, 0, ""}, + {"GOROOT", Func, 0, "func() string"}, + {"Goexit", Func, 0, "func()"}, + {"GoroutineProfile", Func, 0, "func(p []StackRecord) (n int, ok bool)"}, + {"Gosched", Func, 0, "func()"}, + {"KeepAlive", Func, 7, "func(x any)"}, + {"LockOSThread", Func, 0, "func()"}, + {"MemProfile", Func, 0, "func(p []MemProfileRecord, inuseZero bool) (n int, ok bool)"}, + {"MemProfileRate", Var, 0, ""}, + {"MemProfileRecord", Type, 0, ""}, + {"MemProfileRecord.AllocBytes", Field, 0, ""}, + {"MemProfileRecord.AllocObjects", Field, 0, ""}, + {"MemProfileRecord.FreeBytes", Field, 0, ""}, + {"MemProfileRecord.FreeObjects", Field, 0, ""}, + {"MemProfileRecord.Stack0", Field, 0, ""}, + {"MemStats", Type, 0, ""}, + {"MemStats.Alloc", Field, 0, ""}, + {"MemStats.BuckHashSys", Field, 0, ""}, + {"MemStats.BySize", Field, 0, ""}, + {"MemStats.DebugGC", Field, 0, ""}, + {"MemStats.EnableGC", Field, 0, ""}, + {"MemStats.Frees", Field, 0, ""}, + {"MemStats.GCCPUFraction", Field, 5, ""}, + {"MemStats.GCSys", Field, 2, ""}, + {"MemStats.HeapAlloc", Field, 0, ""}, + {"MemStats.HeapIdle", Field, 0, ""}, + {"MemStats.HeapInuse", Field, 0, ""}, + {"MemStats.HeapObjects", Field, 0, ""}, + {"MemStats.HeapReleased", Field, 0, ""}, + {"MemStats.HeapSys", Field, 0, ""}, + {"MemStats.LastGC", Field, 0, ""}, + {"MemStats.Lookups", Field, 0, ""}, + {"MemStats.MCacheInuse", Field, 0, ""}, + {"MemStats.MCacheSys", Field, 0, ""}, + {"MemStats.MSpanInuse", Field, 0, ""}, + {"MemStats.MSpanSys", Field, 0, ""}, + {"MemStats.Mallocs", Field, 0, ""}, + {"MemStats.NextGC", Field, 0, ""}, + {"MemStats.NumForcedGC", Field, 8, ""}, + {"MemStats.NumGC", Field, 0, ""}, + {"MemStats.OtherSys", Field, 2, ""}, + {"MemStats.PauseEnd", Field, 4, ""}, + {"MemStats.PauseNs", Field, 0, ""}, + {"MemStats.PauseTotalNs", Field, 0, ""}, + {"MemStats.StackInuse", Field, 0, ""}, + {"MemStats.StackSys", Field, 0, ""}, + {"MemStats.Sys", Field, 0, ""}, + {"MemStats.TotalAlloc", Field, 0, ""}, + {"MutexProfile", Func, 8, "func(p []BlockProfileRecord) (n int, ok bool)"}, + {"NumCPU", Func, 0, "func() int"}, + {"NumCgoCall", Func, 0, "func() int64"}, + {"NumGoroutine", Func, 0, "func() int"}, + {"PanicNilError", Type, 21, ""}, + {"Pinner", Type, 21, ""}, + {"ReadMemStats", Func, 0, "func(m *MemStats)"}, + {"ReadTrace", Func, 5, "func() []byte"}, + {"SetBlockProfileRate", Func, 1, "func(rate int)"}, + {"SetCPUProfileRate", Func, 0, "func(hz int)"}, + {"SetCgoTraceback", Func, 7, "func(version int, traceback unsafe.Pointer, context unsafe.Pointer, symbolizer unsafe.Pointer)"}, + {"SetFinalizer", Func, 0, "func(obj any, finalizer any)"}, + {"SetMutexProfileFraction", Func, 8, "func(rate int) int"}, + {"Stack", Func, 0, "func(buf []byte, all bool) int"}, + {"StackRecord", Type, 0, ""}, + {"StackRecord.Stack0", Field, 0, ""}, + {"StartTrace", Func, 5, "func() error"}, + {"StopTrace", Func, 5, "func()"}, + {"ThreadCreateProfile", Func, 0, "func(p []StackRecord) (n int, ok bool)"}, + {"TypeAssertionError", Type, 0, ""}, + {"UnlockOSThread", Func, 0, "func()"}, + {"Version", Func, 0, "func() string"}, }, "runtime/cgo": { - {"(Handle).Delete", Method, 17}, - {"(Handle).Value", Method, 17}, - {"Handle", Type, 17}, - {"Incomplete", Type, 20}, - {"NewHandle", Func, 17}, + {"(Handle).Delete", Method, 17, ""}, + {"(Handle).Value", Method, 17, ""}, + {"Handle", Type, 17, ""}, + {"Incomplete", Type, 20, ""}, + {"NewHandle", Func, 17, ""}, }, "runtime/coverage": { - {"ClearCounters", Func, 20}, - {"WriteCounters", Func, 20}, - {"WriteCountersDir", Func, 20}, - {"WriteMeta", Func, 20}, - {"WriteMetaDir", Func, 20}, + {"ClearCounters", Func, 20, "func() error"}, + {"WriteCounters", Func, 20, "func(w io.Writer) error"}, + {"WriteCountersDir", Func, 20, "func(dir string) error"}, + {"WriteMeta", Func, 20, "func(w io.Writer) error"}, + {"WriteMetaDir", Func, 20, "func(dir string) error"}, }, "runtime/debug": { - {"(*BuildInfo).String", Method, 18}, - {"BuildInfo", Type, 12}, - {"BuildInfo.Deps", Field, 12}, - {"BuildInfo.GoVersion", Field, 18}, - {"BuildInfo.Main", Field, 12}, - {"BuildInfo.Path", Field, 12}, - {"BuildInfo.Settings", Field, 18}, - {"BuildSetting", Type, 18}, - {"BuildSetting.Key", Field, 18}, - {"BuildSetting.Value", Field, 18}, - {"CrashOptions", Type, 23}, - {"FreeOSMemory", Func, 1}, - {"GCStats", Type, 1}, - {"GCStats.LastGC", Field, 1}, - {"GCStats.NumGC", Field, 1}, - {"GCStats.Pause", Field, 1}, - {"GCStats.PauseEnd", Field, 4}, - {"GCStats.PauseQuantiles", Field, 1}, - {"GCStats.PauseTotal", Field, 1}, - {"Module", Type, 12}, - {"Module.Path", Field, 12}, - {"Module.Replace", Field, 12}, - {"Module.Sum", Field, 12}, - {"Module.Version", Field, 12}, - {"ParseBuildInfo", Func, 18}, - {"PrintStack", Func, 0}, - {"ReadBuildInfo", Func, 12}, - {"ReadGCStats", Func, 1}, - {"SetCrashOutput", Func, 23}, - {"SetGCPercent", Func, 1}, - {"SetMaxStack", Func, 2}, - {"SetMaxThreads", Func, 2}, - {"SetMemoryLimit", Func, 19}, - {"SetPanicOnFault", Func, 3}, - {"SetTraceback", Func, 6}, - {"Stack", Func, 0}, - {"WriteHeapDump", Func, 3}, + {"(*BuildInfo).String", Method, 18, ""}, + {"BuildInfo", Type, 12, ""}, + {"BuildInfo.Deps", Field, 12, ""}, + {"BuildInfo.GoVersion", Field, 18, ""}, + {"BuildInfo.Main", Field, 12, ""}, + {"BuildInfo.Path", Field, 12, ""}, + {"BuildInfo.Settings", Field, 18, ""}, + {"BuildSetting", Type, 18, ""}, + {"BuildSetting.Key", Field, 18, ""}, + {"BuildSetting.Value", Field, 18, ""}, + {"CrashOptions", Type, 23, ""}, + {"FreeOSMemory", Func, 1, "func()"}, + {"GCStats", Type, 1, ""}, + {"GCStats.LastGC", Field, 1, ""}, + {"GCStats.NumGC", Field, 1, ""}, + {"GCStats.Pause", Field, 1, ""}, + {"GCStats.PauseEnd", Field, 4, ""}, + {"GCStats.PauseQuantiles", Field, 1, ""}, + {"GCStats.PauseTotal", Field, 1, ""}, + {"Module", Type, 12, ""}, + {"Module.Path", Field, 12, ""}, + {"Module.Replace", Field, 12, ""}, + {"Module.Sum", Field, 12, ""}, + {"Module.Version", Field, 12, ""}, + {"ParseBuildInfo", Func, 18, "func(data string) (bi *BuildInfo, err error)"}, + {"PrintStack", Func, 0, "func()"}, + {"ReadBuildInfo", Func, 12, "func() (info *BuildInfo, ok bool)"}, + {"ReadGCStats", Func, 1, "func(stats *GCStats)"}, + {"SetCrashOutput", Func, 23, "func(f *os.File, opts CrashOptions) error"}, + {"SetGCPercent", Func, 1, "func(percent int) int"}, + {"SetMaxStack", Func, 2, "func(bytes int) int"}, + {"SetMaxThreads", Func, 2, "func(threads int) int"}, + {"SetMemoryLimit", Func, 19, "func(limit int64) int64"}, + {"SetPanicOnFault", Func, 3, "func(enabled bool) bool"}, + {"SetTraceback", Func, 6, "func(level string)"}, + {"Stack", Func, 0, "func() []byte"}, + {"WriteHeapDump", Func, 3, "func(fd uintptr)"}, }, "runtime/metrics": { - {"(Value).Float64", Method, 16}, - {"(Value).Float64Histogram", Method, 16}, - {"(Value).Kind", Method, 16}, - {"(Value).Uint64", Method, 16}, - {"All", Func, 16}, - {"Description", Type, 16}, - {"Description.Cumulative", Field, 16}, - {"Description.Description", Field, 16}, - {"Description.Kind", Field, 16}, - {"Description.Name", Field, 16}, - {"Float64Histogram", Type, 16}, - {"Float64Histogram.Buckets", Field, 16}, - {"Float64Histogram.Counts", Field, 16}, - {"KindBad", Const, 16}, - {"KindFloat64", Const, 16}, - {"KindFloat64Histogram", Const, 16}, - {"KindUint64", Const, 16}, - {"Read", Func, 16}, - {"Sample", Type, 16}, - {"Sample.Name", Field, 16}, - {"Sample.Value", Field, 16}, - {"Value", Type, 16}, - {"ValueKind", Type, 16}, + {"(Value).Float64", Method, 16, ""}, + {"(Value).Float64Histogram", Method, 16, ""}, + {"(Value).Kind", Method, 16, ""}, + {"(Value).Uint64", Method, 16, ""}, + {"All", Func, 16, "func() []Description"}, + {"Description", Type, 16, ""}, + {"Description.Cumulative", Field, 16, ""}, + {"Description.Description", Field, 16, ""}, + {"Description.Kind", Field, 16, ""}, + {"Description.Name", Field, 16, ""}, + {"Float64Histogram", Type, 16, ""}, + {"Float64Histogram.Buckets", Field, 16, ""}, + {"Float64Histogram.Counts", Field, 16, ""}, + {"KindBad", Const, 16, ""}, + {"KindFloat64", Const, 16, ""}, + {"KindFloat64Histogram", Const, 16, ""}, + {"KindUint64", Const, 16, ""}, + {"Read", Func, 16, "func(m []Sample)"}, + {"Sample", Type, 16, ""}, + {"Sample.Name", Field, 16, ""}, + {"Sample.Value", Field, 16, ""}, + {"Value", Type, 16, ""}, + {"ValueKind", Type, 16, ""}, }, "runtime/pprof": { - {"(*Profile).Add", Method, 0}, - {"(*Profile).Count", Method, 0}, - {"(*Profile).Name", Method, 0}, - {"(*Profile).Remove", Method, 0}, - {"(*Profile).WriteTo", Method, 0}, - {"Do", Func, 9}, - {"ForLabels", Func, 9}, - {"Label", Func, 9}, - {"LabelSet", Type, 9}, - {"Labels", Func, 9}, - {"Lookup", Func, 0}, - {"NewProfile", Func, 0}, - {"Profile", Type, 0}, - {"Profiles", Func, 0}, - {"SetGoroutineLabels", Func, 9}, - {"StartCPUProfile", Func, 0}, - {"StopCPUProfile", Func, 0}, - {"WithLabels", Func, 9}, - {"WriteHeapProfile", Func, 0}, + {"(*Profile).Add", Method, 0, ""}, + {"(*Profile).Count", Method, 0, ""}, + {"(*Profile).Name", Method, 0, ""}, + {"(*Profile).Remove", Method, 0, ""}, + {"(*Profile).WriteTo", Method, 0, ""}, + {"Do", Func, 9, "func(ctx context.Context, labels LabelSet, f func(context.Context))"}, + {"ForLabels", Func, 9, "func(ctx context.Context, f func(key string, value string) bool)"}, + {"Label", Func, 9, "func(ctx context.Context, key string) (string, bool)"}, + {"LabelSet", Type, 9, ""}, + {"Labels", Func, 9, "func(args ...string) LabelSet"}, + {"Lookup", Func, 0, "func(name string) *Profile"}, + {"NewProfile", Func, 0, "func(name string) *Profile"}, + {"Profile", Type, 0, ""}, + {"Profiles", Func, 0, "func() []*Profile"}, + {"SetGoroutineLabels", Func, 9, "func(ctx context.Context)"}, + {"StartCPUProfile", Func, 0, "func(w io.Writer) error"}, + {"StopCPUProfile", Func, 0, "func()"}, + {"WithLabels", Func, 9, "func(ctx context.Context, labels LabelSet) context.Context"}, + {"WriteHeapProfile", Func, 0, "func(w io.Writer) error"}, }, "runtime/trace": { - {"(*Region).End", Method, 11}, - {"(*Task).End", Method, 11}, - {"IsEnabled", Func, 11}, - {"Log", Func, 11}, - {"Logf", Func, 11}, - {"NewTask", Func, 11}, - {"Region", Type, 11}, - {"Start", Func, 5}, - {"StartRegion", Func, 11}, - {"Stop", Func, 5}, - {"Task", Type, 11}, - {"WithRegion", Func, 11}, + {"(*Region).End", Method, 11, ""}, + {"(*Task).End", Method, 11, ""}, + {"IsEnabled", Func, 11, "func() bool"}, + {"Log", Func, 11, "func(ctx context.Context, category string, message string)"}, + {"Logf", Func, 11, "func(ctx context.Context, category string, format string, args ...any)"}, + {"NewTask", Func, 11, "func(pctx context.Context, taskType string) (ctx context.Context, task *Task)"}, + {"Region", Type, 11, ""}, + {"Start", Func, 5, "func(w io.Writer) error"}, + {"StartRegion", Func, 11, "func(ctx context.Context, regionType string) *Region"}, + {"Stop", Func, 5, "func()"}, + {"Task", Type, 11, ""}, + {"WithRegion", Func, 11, "func(ctx context.Context, regionType string, fn func())"}, }, "slices": { - {"All", Func, 23}, - {"AppendSeq", Func, 23}, - {"Backward", Func, 23}, - {"BinarySearch", Func, 21}, - {"BinarySearchFunc", Func, 21}, - {"Chunk", Func, 23}, - {"Clip", Func, 21}, - {"Clone", Func, 21}, - {"Collect", Func, 23}, - {"Compact", Func, 21}, - {"CompactFunc", Func, 21}, - {"Compare", Func, 21}, - {"CompareFunc", Func, 21}, - {"Concat", Func, 22}, - {"Contains", Func, 21}, - {"ContainsFunc", Func, 21}, - {"Delete", Func, 21}, - {"DeleteFunc", Func, 21}, - {"Equal", Func, 21}, - {"EqualFunc", Func, 21}, - {"Grow", Func, 21}, - {"Index", Func, 21}, - {"IndexFunc", Func, 21}, - {"Insert", Func, 21}, - {"IsSorted", Func, 21}, - {"IsSortedFunc", Func, 21}, - {"Max", Func, 21}, - {"MaxFunc", Func, 21}, - {"Min", Func, 21}, - {"MinFunc", Func, 21}, - {"Repeat", Func, 23}, - {"Replace", Func, 21}, - {"Reverse", Func, 21}, - {"Sort", Func, 21}, - {"SortFunc", Func, 21}, - {"SortStableFunc", Func, 21}, - {"Sorted", Func, 23}, - {"SortedFunc", Func, 23}, - {"SortedStableFunc", Func, 23}, - {"Values", Func, 23}, + {"All", Func, 23, "func[Slice ~[]E, E any](s Slice) iter.Seq2[int, E]"}, + {"AppendSeq", Func, 23, "func[Slice ~[]E, E any](s Slice, seq iter.Seq[E]) Slice"}, + {"Backward", Func, 23, "func[Slice ~[]E, E any](s Slice) iter.Seq2[int, E]"}, + {"BinarySearch", Func, 21, "func[S ~[]E, E cmp.Ordered](x S, target E) (int, bool)"}, + {"BinarySearchFunc", Func, 21, "func[S ~[]E, E, T any](x S, target T, cmp func(E, T) int) (int, bool)"}, + {"Chunk", Func, 23, "func[Slice ~[]E, E any](s Slice, n int) iter.Seq[Slice]"}, + {"Clip", Func, 21, "func[S ~[]E, E any](s S) S"}, + {"Clone", Func, 21, "func[S ~[]E, E any](s S) S"}, + {"Collect", Func, 23, "func[E any](seq iter.Seq[E]) []E"}, + {"Compact", Func, 21, "func[S ~[]E, E comparable](s S) S"}, + {"CompactFunc", Func, 21, "func[S ~[]E, E any](s S, eq func(E, E) bool) S"}, + {"Compare", Func, 21, "func[S ~[]E, E cmp.Ordered](s1 S, s2 S) int"}, + {"CompareFunc", Func, 21, "func[S1 ~[]E1, S2 ~[]E2, E1, E2 any](s1 S1, s2 S2, cmp func(E1, E2) int) int"}, + {"Concat", Func, 22, "func[S ~[]E, E any](slices ...S) S"}, + {"Contains", Func, 21, "func[S ~[]E, E comparable](s S, v E) bool"}, + {"ContainsFunc", Func, 21, "func[S ~[]E, E any](s S, f func(E) bool) bool"}, + {"Delete", Func, 21, "func[S ~[]E, E any](s S, i int, j int) S"}, + {"DeleteFunc", Func, 21, "func[S ~[]E, E any](s S, del func(E) bool) S"}, + {"Equal", Func, 21, "func[S ~[]E, E comparable](s1 S, s2 S) bool"}, + {"EqualFunc", Func, 21, "func[S1 ~[]E1, S2 ~[]E2, E1, E2 any](s1 S1, s2 S2, eq func(E1, E2) bool) bool"}, + {"Grow", Func, 21, "func[S ~[]E, E any](s S, n int) S"}, + {"Index", Func, 21, "func[S ~[]E, E comparable](s S, v E) int"}, + {"IndexFunc", Func, 21, "func[S ~[]E, E any](s S, f func(E) bool) int"}, + {"Insert", Func, 21, "func[S ~[]E, E any](s S, i int, v ...E) S"}, + {"IsSorted", Func, 21, "func[S ~[]E, E cmp.Ordered](x S) bool"}, + {"IsSortedFunc", Func, 21, "func[S ~[]E, E any](x S, cmp func(a E, b E) int) bool"}, + {"Max", Func, 21, "func[S ~[]E, E cmp.Ordered](x S) E"}, + {"MaxFunc", Func, 21, "func[S ~[]E, E any](x S, cmp func(a E, b E) int) E"}, + {"Min", Func, 21, "func[S ~[]E, E cmp.Ordered](x S) E"}, + {"MinFunc", Func, 21, "func[S ~[]E, E any](x S, cmp func(a E, b E) int) E"}, + {"Repeat", Func, 23, "func[S ~[]E, E any](x S, count int) S"}, + {"Replace", Func, 21, "func[S ~[]E, E any](s S, i int, j int, v ...E) S"}, + {"Reverse", Func, 21, "func[S ~[]E, E any](s S)"}, + {"Sort", Func, 21, "func[S ~[]E, E cmp.Ordered](x S)"}, + {"SortFunc", Func, 21, "func[S ~[]E, E any](x S, cmp func(a E, b E) int)"}, + {"SortStableFunc", Func, 21, "func[S ~[]E, E any](x S, cmp func(a E, b E) int)"}, + {"Sorted", Func, 23, "func[E cmp.Ordered](seq iter.Seq[E]) []E"}, + {"SortedFunc", Func, 23, "func[E any](seq iter.Seq[E], cmp func(E, E) int) []E"}, + {"SortedStableFunc", Func, 23, "func[E any](seq iter.Seq[E], cmp func(E, E) int) []E"}, + {"Values", Func, 23, "func[Slice ~[]E, E any](s Slice) iter.Seq[E]"}, }, "sort": { - {"(Float64Slice).Len", Method, 0}, - {"(Float64Slice).Less", Method, 0}, - {"(Float64Slice).Search", Method, 0}, - {"(Float64Slice).Sort", Method, 0}, - {"(Float64Slice).Swap", Method, 0}, - {"(IntSlice).Len", Method, 0}, - {"(IntSlice).Less", Method, 0}, - {"(IntSlice).Search", Method, 0}, - {"(IntSlice).Sort", Method, 0}, - {"(IntSlice).Swap", Method, 0}, - {"(StringSlice).Len", Method, 0}, - {"(StringSlice).Less", Method, 0}, - {"(StringSlice).Search", Method, 0}, - {"(StringSlice).Sort", Method, 0}, - {"(StringSlice).Swap", Method, 0}, - {"Find", Func, 19}, - {"Float64Slice", Type, 0}, - {"Float64s", Func, 0}, - {"Float64sAreSorted", Func, 0}, - {"IntSlice", Type, 0}, - {"Interface", Type, 0}, - {"Ints", Func, 0}, - {"IntsAreSorted", Func, 0}, - {"IsSorted", Func, 0}, - {"Reverse", Func, 1}, - {"Search", Func, 0}, - {"SearchFloat64s", Func, 0}, - {"SearchInts", Func, 0}, - {"SearchStrings", Func, 0}, - {"Slice", Func, 8}, - {"SliceIsSorted", Func, 8}, - {"SliceStable", Func, 8}, - {"Sort", Func, 0}, - {"Stable", Func, 2}, - {"StringSlice", Type, 0}, - {"Strings", Func, 0}, - {"StringsAreSorted", Func, 0}, + {"(Float64Slice).Len", Method, 0, ""}, + {"(Float64Slice).Less", Method, 0, ""}, + {"(Float64Slice).Search", Method, 0, ""}, + {"(Float64Slice).Sort", Method, 0, ""}, + {"(Float64Slice).Swap", Method, 0, ""}, + {"(IntSlice).Len", Method, 0, ""}, + {"(IntSlice).Less", Method, 0, ""}, + {"(IntSlice).Search", Method, 0, ""}, + {"(IntSlice).Sort", Method, 0, ""}, + {"(IntSlice).Swap", Method, 0, ""}, + {"(StringSlice).Len", Method, 0, ""}, + {"(StringSlice).Less", Method, 0, ""}, + {"(StringSlice).Search", Method, 0, ""}, + {"(StringSlice).Sort", Method, 0, ""}, + {"(StringSlice).Swap", Method, 0, ""}, + {"Find", Func, 19, "func(n int, cmp func(int) int) (i int, found bool)"}, + {"Float64Slice", Type, 0, ""}, + {"Float64s", Func, 0, "func(x []float64)"}, + {"Float64sAreSorted", Func, 0, "func(x []float64) bool"}, + {"IntSlice", Type, 0, ""}, + {"Interface", Type, 0, ""}, + {"Ints", Func, 0, "func(x []int)"}, + {"IntsAreSorted", Func, 0, "func(x []int) bool"}, + {"IsSorted", Func, 0, "func(data Interface) bool"}, + {"Reverse", Func, 1, "func(data Interface) Interface"}, + {"Search", Func, 0, "func(n int, f func(int) bool) int"}, + {"SearchFloat64s", Func, 0, "func(a []float64, x float64) int"}, + {"SearchInts", Func, 0, "func(a []int, x int) int"}, + {"SearchStrings", Func, 0, "func(a []string, x string) int"}, + {"Slice", Func, 8, "func(x any, less func(i int, j int) bool)"}, + {"SliceIsSorted", Func, 8, "func(x any, less func(i int, j int) bool) bool"}, + {"SliceStable", Func, 8, "func(x any, less func(i int, j int) bool)"}, + {"Sort", Func, 0, "func(data Interface)"}, + {"Stable", Func, 2, "func(data Interface)"}, + {"StringSlice", Type, 0, ""}, + {"Strings", Func, 0, "func(x []string)"}, + {"StringsAreSorted", Func, 0, "func(x []string) bool"}, }, "strconv": { - {"(*NumError).Error", Method, 0}, - {"(*NumError).Unwrap", Method, 14}, - {"AppendBool", Func, 0}, - {"AppendFloat", Func, 0}, - {"AppendInt", Func, 0}, - {"AppendQuote", Func, 0}, - {"AppendQuoteRune", Func, 0}, - {"AppendQuoteRuneToASCII", Func, 0}, - {"AppendQuoteRuneToGraphic", Func, 6}, - {"AppendQuoteToASCII", Func, 0}, - {"AppendQuoteToGraphic", Func, 6}, - {"AppendUint", Func, 0}, - {"Atoi", Func, 0}, - {"CanBackquote", Func, 0}, - {"ErrRange", Var, 0}, - {"ErrSyntax", Var, 0}, - {"FormatBool", Func, 0}, - {"FormatComplex", Func, 15}, - {"FormatFloat", Func, 0}, - {"FormatInt", Func, 0}, - {"FormatUint", Func, 0}, - {"IntSize", Const, 0}, - {"IsGraphic", Func, 6}, - {"IsPrint", Func, 0}, - {"Itoa", Func, 0}, - {"NumError", Type, 0}, - {"NumError.Err", Field, 0}, - {"NumError.Func", Field, 0}, - {"NumError.Num", Field, 0}, - {"ParseBool", Func, 0}, - {"ParseComplex", Func, 15}, - {"ParseFloat", Func, 0}, - {"ParseInt", Func, 0}, - {"ParseUint", Func, 0}, - {"Quote", Func, 0}, - {"QuoteRune", Func, 0}, - {"QuoteRuneToASCII", Func, 0}, - {"QuoteRuneToGraphic", Func, 6}, - {"QuoteToASCII", Func, 0}, - {"QuoteToGraphic", Func, 6}, - {"QuotedPrefix", Func, 17}, - {"Unquote", Func, 0}, - {"UnquoteChar", Func, 0}, + {"(*NumError).Error", Method, 0, ""}, + {"(*NumError).Unwrap", Method, 14, ""}, + {"AppendBool", Func, 0, "func(dst []byte, b bool) []byte"}, + {"AppendFloat", Func, 0, "func(dst []byte, f float64, fmt byte, prec int, bitSize int) []byte"}, + {"AppendInt", Func, 0, "func(dst []byte, i int64, base int) []byte"}, + {"AppendQuote", Func, 0, "func(dst []byte, s string) []byte"}, + {"AppendQuoteRune", Func, 0, "func(dst []byte, r rune) []byte"}, + {"AppendQuoteRuneToASCII", Func, 0, "func(dst []byte, r rune) []byte"}, + {"AppendQuoteRuneToGraphic", Func, 6, "func(dst []byte, r rune) []byte"}, + {"AppendQuoteToASCII", Func, 0, "func(dst []byte, s string) []byte"}, + {"AppendQuoteToGraphic", Func, 6, "func(dst []byte, s string) []byte"}, + {"AppendUint", Func, 0, "func(dst []byte, i uint64, base int) []byte"}, + {"Atoi", Func, 0, "func(s string) (int, error)"}, + {"CanBackquote", Func, 0, "func(s string) bool"}, + {"ErrRange", Var, 0, ""}, + {"ErrSyntax", Var, 0, ""}, + {"FormatBool", Func, 0, "func(b bool) string"}, + {"FormatComplex", Func, 15, "func(c complex128, fmt byte, prec int, bitSize int) string"}, + {"FormatFloat", Func, 0, "func(f float64, fmt byte, prec int, bitSize int) string"}, + {"FormatInt", Func, 0, "func(i int64, base int) string"}, + {"FormatUint", Func, 0, "func(i uint64, base int) string"}, + {"IntSize", Const, 0, ""}, + {"IsGraphic", Func, 6, "func(r rune) bool"}, + {"IsPrint", Func, 0, "func(r rune) bool"}, + {"Itoa", Func, 0, "func(i int) string"}, + {"NumError", Type, 0, ""}, + {"NumError.Err", Field, 0, ""}, + {"NumError.Func", Field, 0, ""}, + {"NumError.Num", Field, 0, ""}, + {"ParseBool", Func, 0, "func(str string) (bool, error)"}, + {"ParseComplex", Func, 15, "func(s string, bitSize int) (complex128, error)"}, + {"ParseFloat", Func, 0, "func(s string, bitSize int) (float64, error)"}, + {"ParseInt", Func, 0, "func(s string, base int, bitSize int) (i int64, err error)"}, + {"ParseUint", Func, 0, "func(s string, base int, bitSize int) (uint64, error)"}, + {"Quote", Func, 0, "func(s string) string"}, + {"QuoteRune", Func, 0, "func(r rune) string"}, + {"QuoteRuneToASCII", Func, 0, "func(r rune) string"}, + {"QuoteRuneToGraphic", Func, 6, "func(r rune) string"}, + {"QuoteToASCII", Func, 0, "func(s string) string"}, + {"QuoteToGraphic", Func, 6, "func(s string) string"}, + {"QuotedPrefix", Func, 17, "func(s string) (string, error)"}, + {"Unquote", Func, 0, "func(s string) (string, error)"}, + {"UnquoteChar", Func, 0, "func(s string, quote byte) (value rune, multibyte bool, tail string, err error)"}, }, "strings": { - {"(*Builder).Cap", Method, 12}, - {"(*Builder).Grow", Method, 10}, - {"(*Builder).Len", Method, 10}, - {"(*Builder).Reset", Method, 10}, - {"(*Builder).String", Method, 10}, - {"(*Builder).Write", Method, 10}, - {"(*Builder).WriteByte", Method, 10}, - {"(*Builder).WriteRune", Method, 10}, - {"(*Builder).WriteString", Method, 10}, - {"(*Reader).Len", Method, 0}, - {"(*Reader).Read", Method, 0}, - {"(*Reader).ReadAt", Method, 0}, - {"(*Reader).ReadByte", Method, 0}, - {"(*Reader).ReadRune", Method, 0}, - {"(*Reader).Reset", Method, 7}, - {"(*Reader).Seek", Method, 0}, - {"(*Reader).Size", Method, 5}, - {"(*Reader).UnreadByte", Method, 0}, - {"(*Reader).UnreadRune", Method, 0}, - {"(*Reader).WriteTo", Method, 1}, - {"(*Replacer).Replace", Method, 0}, - {"(*Replacer).WriteString", Method, 0}, - {"Builder", Type, 10}, - {"Clone", Func, 18}, - {"Compare", Func, 5}, - {"Contains", Func, 0}, - {"ContainsAny", Func, 0}, - {"ContainsFunc", Func, 21}, - {"ContainsRune", Func, 0}, - {"Count", Func, 0}, - {"Cut", Func, 18}, - {"CutPrefix", Func, 20}, - {"CutSuffix", Func, 20}, - {"EqualFold", Func, 0}, - {"Fields", Func, 0}, - {"FieldsFunc", Func, 0}, - {"FieldsFuncSeq", Func, 24}, - {"FieldsSeq", Func, 24}, - {"HasPrefix", Func, 0}, - {"HasSuffix", Func, 0}, - {"Index", Func, 0}, - {"IndexAny", Func, 0}, - {"IndexByte", Func, 2}, - {"IndexFunc", Func, 0}, - {"IndexRune", Func, 0}, - {"Join", Func, 0}, - {"LastIndex", Func, 0}, - {"LastIndexAny", Func, 0}, - {"LastIndexByte", Func, 5}, - {"LastIndexFunc", Func, 0}, - {"Lines", Func, 24}, - {"Map", Func, 0}, - {"NewReader", Func, 0}, - {"NewReplacer", Func, 0}, - {"Reader", Type, 0}, - {"Repeat", Func, 0}, - {"Replace", Func, 0}, - {"ReplaceAll", Func, 12}, - {"Replacer", Type, 0}, - {"Split", Func, 0}, - {"SplitAfter", Func, 0}, - {"SplitAfterN", Func, 0}, - {"SplitAfterSeq", Func, 24}, - {"SplitN", Func, 0}, - {"SplitSeq", Func, 24}, - {"Title", Func, 0}, - {"ToLower", Func, 0}, - {"ToLowerSpecial", Func, 0}, - {"ToTitle", Func, 0}, - {"ToTitleSpecial", Func, 0}, - {"ToUpper", Func, 0}, - {"ToUpperSpecial", Func, 0}, - {"ToValidUTF8", Func, 13}, - {"Trim", Func, 0}, - {"TrimFunc", Func, 0}, - {"TrimLeft", Func, 0}, - {"TrimLeftFunc", Func, 0}, - {"TrimPrefix", Func, 1}, - {"TrimRight", Func, 0}, - {"TrimRightFunc", Func, 0}, - {"TrimSpace", Func, 0}, - {"TrimSuffix", Func, 1}, + {"(*Builder).Cap", Method, 12, ""}, + {"(*Builder).Grow", Method, 10, ""}, + {"(*Builder).Len", Method, 10, ""}, + {"(*Builder).Reset", Method, 10, ""}, + {"(*Builder).String", Method, 10, ""}, + {"(*Builder).Write", Method, 10, ""}, + {"(*Builder).WriteByte", Method, 10, ""}, + {"(*Builder).WriteRune", Method, 10, ""}, + {"(*Builder).WriteString", Method, 10, ""}, + {"(*Reader).Len", Method, 0, ""}, + {"(*Reader).Read", Method, 0, ""}, + {"(*Reader).ReadAt", Method, 0, ""}, + {"(*Reader).ReadByte", Method, 0, ""}, + {"(*Reader).ReadRune", Method, 0, ""}, + {"(*Reader).Reset", Method, 7, ""}, + {"(*Reader).Seek", Method, 0, ""}, + {"(*Reader).Size", Method, 5, ""}, + {"(*Reader).UnreadByte", Method, 0, ""}, + {"(*Reader).UnreadRune", Method, 0, ""}, + {"(*Reader).WriteTo", Method, 1, ""}, + {"(*Replacer).Replace", Method, 0, ""}, + {"(*Replacer).WriteString", Method, 0, ""}, + {"Builder", Type, 10, ""}, + {"Clone", Func, 18, "func(s string) string"}, + {"Compare", Func, 5, "func(a string, b string) int"}, + {"Contains", Func, 0, "func(s string, substr string) bool"}, + {"ContainsAny", Func, 0, "func(s string, chars string) bool"}, + {"ContainsFunc", Func, 21, "func(s string, f func(rune) bool) bool"}, + {"ContainsRune", Func, 0, "func(s string, r rune) bool"}, + {"Count", Func, 0, "func(s string, substr string) int"}, + {"Cut", Func, 18, "func(s string, sep string) (before string, after string, found bool)"}, + {"CutPrefix", Func, 20, "func(s string, prefix string) (after string, found bool)"}, + {"CutSuffix", Func, 20, "func(s string, suffix string) (before string, found bool)"}, + {"EqualFold", Func, 0, "func(s string, t string) bool"}, + {"Fields", Func, 0, "func(s string) []string"}, + {"FieldsFunc", Func, 0, "func(s string, f func(rune) bool) []string"}, + {"FieldsFuncSeq", Func, 24, "func(s string, f func(rune) bool) iter.Seq[string]"}, + {"FieldsSeq", Func, 24, "func(s string) iter.Seq[string]"}, + {"HasPrefix", Func, 0, "func(s string, prefix string) bool"}, + {"HasSuffix", Func, 0, "func(s string, suffix string) bool"}, + {"Index", Func, 0, "func(s string, substr string) int"}, + {"IndexAny", Func, 0, "func(s string, chars string) int"}, + {"IndexByte", Func, 2, "func(s string, c byte) int"}, + {"IndexFunc", Func, 0, "func(s string, f func(rune) bool) int"}, + {"IndexRune", Func, 0, "func(s string, r rune) int"}, + {"Join", Func, 0, "func(elems []string, sep string) string"}, + {"LastIndex", Func, 0, "func(s string, substr string) int"}, + {"LastIndexAny", Func, 0, "func(s string, chars string) int"}, + {"LastIndexByte", Func, 5, "func(s string, c byte) int"}, + {"LastIndexFunc", Func, 0, "func(s string, f func(rune) bool) int"}, + {"Lines", Func, 24, "func(s string) iter.Seq[string]"}, + {"Map", Func, 0, "func(mapping func(rune) rune, s string) string"}, + {"NewReader", Func, 0, "func(s string) *Reader"}, + {"NewReplacer", Func, 0, "func(oldnew ...string) *Replacer"}, + {"Reader", Type, 0, ""}, + {"Repeat", Func, 0, "func(s string, count int) string"}, + {"Replace", Func, 0, "func(s string, old string, new string, n int) string"}, + {"ReplaceAll", Func, 12, "func(s string, old string, new string) string"}, + {"Replacer", Type, 0, ""}, + {"Split", Func, 0, "func(s string, sep string) []string"}, + {"SplitAfter", Func, 0, "func(s string, sep string) []string"}, + {"SplitAfterN", Func, 0, "func(s string, sep string, n int) []string"}, + {"SplitAfterSeq", Func, 24, "func(s string, sep string) iter.Seq[string]"}, + {"SplitN", Func, 0, "func(s string, sep string, n int) []string"}, + {"SplitSeq", Func, 24, "func(s string, sep string) iter.Seq[string]"}, + {"Title", Func, 0, "func(s string) string"}, + {"ToLower", Func, 0, "func(s string) string"}, + {"ToLowerSpecial", Func, 0, "func(c unicode.SpecialCase, s string) string"}, + {"ToTitle", Func, 0, "func(s string) string"}, + {"ToTitleSpecial", Func, 0, "func(c unicode.SpecialCase, s string) string"}, + {"ToUpper", Func, 0, "func(s string) string"}, + {"ToUpperSpecial", Func, 0, "func(c unicode.SpecialCase, s string) string"}, + {"ToValidUTF8", Func, 13, "func(s string, replacement string) string"}, + {"Trim", Func, 0, "func(s string, cutset string) string"}, + {"TrimFunc", Func, 0, "func(s string, f func(rune) bool) string"}, + {"TrimLeft", Func, 0, "func(s string, cutset string) string"}, + {"TrimLeftFunc", Func, 0, "func(s string, f func(rune) bool) string"}, + {"TrimPrefix", Func, 1, "func(s string, prefix string) string"}, + {"TrimRight", Func, 0, "func(s string, cutset string) string"}, + {"TrimRightFunc", Func, 0, "func(s string, f func(rune) bool) string"}, + {"TrimSpace", Func, 0, "func(s string) string"}, + {"TrimSuffix", Func, 1, "func(s string, suffix string) string"}, }, "structs": { - {"HostLayout", Type, 23}, + {"HostLayout", Type, 23, ""}, }, "sync": { - {"(*Cond).Broadcast", Method, 0}, - {"(*Cond).Signal", Method, 0}, - {"(*Cond).Wait", Method, 0}, - {"(*Map).Clear", Method, 23}, - {"(*Map).CompareAndDelete", Method, 20}, - {"(*Map).CompareAndSwap", Method, 20}, - {"(*Map).Delete", Method, 9}, - {"(*Map).Load", Method, 9}, - {"(*Map).LoadAndDelete", Method, 15}, - {"(*Map).LoadOrStore", Method, 9}, - {"(*Map).Range", Method, 9}, - {"(*Map).Store", Method, 9}, - {"(*Map).Swap", Method, 20}, - {"(*Mutex).Lock", Method, 0}, - {"(*Mutex).TryLock", Method, 18}, - {"(*Mutex).Unlock", Method, 0}, - {"(*Once).Do", Method, 0}, - {"(*Pool).Get", Method, 3}, - {"(*Pool).Put", Method, 3}, - {"(*RWMutex).Lock", Method, 0}, - {"(*RWMutex).RLock", Method, 0}, - {"(*RWMutex).RLocker", Method, 0}, - {"(*RWMutex).RUnlock", Method, 0}, - {"(*RWMutex).TryLock", Method, 18}, - {"(*RWMutex).TryRLock", Method, 18}, - {"(*RWMutex).Unlock", Method, 0}, - {"(*WaitGroup).Add", Method, 0}, - {"(*WaitGroup).Done", Method, 0}, - {"(*WaitGroup).Go", Method, 25}, - {"(*WaitGroup).Wait", Method, 0}, - {"Cond", Type, 0}, - {"Cond.L", Field, 0}, - {"Locker", Type, 0}, - {"Map", Type, 9}, - {"Mutex", Type, 0}, - {"NewCond", Func, 0}, - {"Once", Type, 0}, - {"OnceFunc", Func, 21}, - {"OnceValue", Func, 21}, - {"OnceValues", Func, 21}, - {"Pool", Type, 3}, - {"Pool.New", Field, 3}, - {"RWMutex", Type, 0}, - {"WaitGroup", Type, 0}, + {"(*Cond).Broadcast", Method, 0, ""}, + {"(*Cond).Signal", Method, 0, ""}, + {"(*Cond).Wait", Method, 0, ""}, + {"(*Map).Clear", Method, 23, ""}, + {"(*Map).CompareAndDelete", Method, 20, ""}, + {"(*Map).CompareAndSwap", Method, 20, ""}, + {"(*Map).Delete", Method, 9, ""}, + {"(*Map).Load", Method, 9, ""}, + {"(*Map).LoadAndDelete", Method, 15, ""}, + {"(*Map).LoadOrStore", Method, 9, ""}, + {"(*Map).Range", Method, 9, ""}, + {"(*Map).Store", Method, 9, ""}, + {"(*Map).Swap", Method, 20, ""}, + {"(*Mutex).Lock", Method, 0, ""}, + {"(*Mutex).TryLock", Method, 18, ""}, + {"(*Mutex).Unlock", Method, 0, ""}, + {"(*Once).Do", Method, 0, ""}, + {"(*Pool).Get", Method, 3, ""}, + {"(*Pool).Put", Method, 3, ""}, + {"(*RWMutex).Lock", Method, 0, ""}, + {"(*RWMutex).RLock", Method, 0, ""}, + {"(*RWMutex).RLocker", Method, 0, ""}, + {"(*RWMutex).RUnlock", Method, 0, ""}, + {"(*RWMutex).TryLock", Method, 18, ""}, + {"(*RWMutex).TryRLock", Method, 18, ""}, + {"(*RWMutex).Unlock", Method, 0, ""}, + {"(*WaitGroup).Add", Method, 0, ""}, + {"(*WaitGroup).Done", Method, 0, ""}, + {"(*WaitGroup).Go", Method, 25, ""}, + {"(*WaitGroup).Wait", Method, 0, ""}, + {"Cond", Type, 0, ""}, + {"Cond.L", Field, 0, ""}, + {"Locker", Type, 0, ""}, + {"Map", Type, 9, ""}, + {"Mutex", Type, 0, ""}, + {"NewCond", Func, 0, "func(l Locker) *Cond"}, + {"Once", Type, 0, ""}, + {"OnceFunc", Func, 21, "func(f func()) func()"}, + {"OnceValue", Func, 21, "func[T any](f func() T) func() T"}, + {"OnceValues", Func, 21, "func[T1, T2 any](f func() (T1, T2)) func() (T1, T2)"}, + {"Pool", Type, 3, ""}, + {"Pool.New", Field, 3, ""}, + {"RWMutex", Type, 0, ""}, + {"WaitGroup", Type, 0, ""}, }, "sync/atomic": { - {"(*Bool).CompareAndSwap", Method, 19}, - {"(*Bool).Load", Method, 19}, - {"(*Bool).Store", Method, 19}, - {"(*Bool).Swap", Method, 19}, - {"(*Int32).Add", Method, 19}, - {"(*Int32).And", Method, 23}, - {"(*Int32).CompareAndSwap", Method, 19}, - {"(*Int32).Load", Method, 19}, - {"(*Int32).Or", Method, 23}, - {"(*Int32).Store", Method, 19}, - {"(*Int32).Swap", Method, 19}, - {"(*Int64).Add", Method, 19}, - {"(*Int64).And", Method, 23}, - {"(*Int64).CompareAndSwap", Method, 19}, - {"(*Int64).Load", Method, 19}, - {"(*Int64).Or", Method, 23}, - {"(*Int64).Store", Method, 19}, - {"(*Int64).Swap", Method, 19}, - {"(*Pointer).CompareAndSwap", Method, 19}, - {"(*Pointer).Load", Method, 19}, - {"(*Pointer).Store", Method, 19}, - {"(*Pointer).Swap", Method, 19}, - {"(*Uint32).Add", Method, 19}, - {"(*Uint32).And", Method, 23}, - {"(*Uint32).CompareAndSwap", Method, 19}, - {"(*Uint32).Load", Method, 19}, - {"(*Uint32).Or", Method, 23}, - {"(*Uint32).Store", Method, 19}, - {"(*Uint32).Swap", Method, 19}, - {"(*Uint64).Add", Method, 19}, - {"(*Uint64).And", Method, 23}, - {"(*Uint64).CompareAndSwap", Method, 19}, - {"(*Uint64).Load", Method, 19}, - {"(*Uint64).Or", Method, 23}, - {"(*Uint64).Store", Method, 19}, - {"(*Uint64).Swap", Method, 19}, - {"(*Uintptr).Add", Method, 19}, - {"(*Uintptr).And", Method, 23}, - {"(*Uintptr).CompareAndSwap", Method, 19}, - {"(*Uintptr).Load", Method, 19}, - {"(*Uintptr).Or", Method, 23}, - {"(*Uintptr).Store", Method, 19}, - {"(*Uintptr).Swap", Method, 19}, - {"(*Value).CompareAndSwap", Method, 17}, - {"(*Value).Load", Method, 4}, - {"(*Value).Store", Method, 4}, - {"(*Value).Swap", Method, 17}, - {"AddInt32", Func, 0}, - {"AddInt64", Func, 0}, - {"AddUint32", Func, 0}, - {"AddUint64", Func, 0}, - {"AddUintptr", Func, 0}, - {"AndInt32", Func, 23}, - {"AndInt64", Func, 23}, - {"AndUint32", Func, 23}, - {"AndUint64", Func, 23}, - {"AndUintptr", Func, 23}, - {"Bool", Type, 19}, - {"CompareAndSwapInt32", Func, 0}, - {"CompareAndSwapInt64", Func, 0}, - {"CompareAndSwapPointer", Func, 0}, - {"CompareAndSwapUint32", Func, 0}, - {"CompareAndSwapUint64", Func, 0}, - {"CompareAndSwapUintptr", Func, 0}, - {"Int32", Type, 19}, - {"Int64", Type, 19}, - {"LoadInt32", Func, 0}, - {"LoadInt64", Func, 0}, - {"LoadPointer", Func, 0}, - {"LoadUint32", Func, 0}, - {"LoadUint64", Func, 0}, - {"LoadUintptr", Func, 0}, - {"OrInt32", Func, 23}, - {"OrInt64", Func, 23}, - {"OrUint32", Func, 23}, - {"OrUint64", Func, 23}, - {"OrUintptr", Func, 23}, - {"Pointer", Type, 19}, - {"StoreInt32", Func, 0}, - {"StoreInt64", Func, 0}, - {"StorePointer", Func, 0}, - {"StoreUint32", Func, 0}, - {"StoreUint64", Func, 0}, - {"StoreUintptr", Func, 0}, - {"SwapInt32", Func, 2}, - {"SwapInt64", Func, 2}, - {"SwapPointer", Func, 2}, - {"SwapUint32", Func, 2}, - {"SwapUint64", Func, 2}, - {"SwapUintptr", Func, 2}, - {"Uint32", Type, 19}, - {"Uint64", Type, 19}, - {"Uintptr", Type, 19}, - {"Value", Type, 4}, + {"(*Bool).CompareAndSwap", Method, 19, ""}, + {"(*Bool).Load", Method, 19, ""}, + {"(*Bool).Store", Method, 19, ""}, + {"(*Bool).Swap", Method, 19, ""}, + {"(*Int32).Add", Method, 19, ""}, + {"(*Int32).And", Method, 23, ""}, + {"(*Int32).CompareAndSwap", Method, 19, ""}, + {"(*Int32).Load", Method, 19, ""}, + {"(*Int32).Or", Method, 23, ""}, + {"(*Int32).Store", Method, 19, ""}, + {"(*Int32).Swap", Method, 19, ""}, + {"(*Int64).Add", Method, 19, ""}, + {"(*Int64).And", Method, 23, ""}, + {"(*Int64).CompareAndSwap", Method, 19, ""}, + {"(*Int64).Load", Method, 19, ""}, + {"(*Int64).Or", Method, 23, ""}, + {"(*Int64).Store", Method, 19, ""}, + {"(*Int64).Swap", Method, 19, ""}, + {"(*Pointer).CompareAndSwap", Method, 19, ""}, + {"(*Pointer).Load", Method, 19, ""}, + {"(*Pointer).Store", Method, 19, ""}, + {"(*Pointer).Swap", Method, 19, ""}, + {"(*Uint32).Add", Method, 19, ""}, + {"(*Uint32).And", Method, 23, ""}, + {"(*Uint32).CompareAndSwap", Method, 19, ""}, + {"(*Uint32).Load", Method, 19, ""}, + {"(*Uint32).Or", Method, 23, ""}, + {"(*Uint32).Store", Method, 19, ""}, + {"(*Uint32).Swap", Method, 19, ""}, + {"(*Uint64).Add", Method, 19, ""}, + {"(*Uint64).And", Method, 23, ""}, + {"(*Uint64).CompareAndSwap", Method, 19, ""}, + {"(*Uint64).Load", Method, 19, ""}, + {"(*Uint64).Or", Method, 23, ""}, + {"(*Uint64).Store", Method, 19, ""}, + {"(*Uint64).Swap", Method, 19, ""}, + {"(*Uintptr).Add", Method, 19, ""}, + {"(*Uintptr).And", Method, 23, ""}, + {"(*Uintptr).CompareAndSwap", Method, 19, ""}, + {"(*Uintptr).Load", Method, 19, ""}, + {"(*Uintptr).Or", Method, 23, ""}, + {"(*Uintptr).Store", Method, 19, ""}, + {"(*Uintptr).Swap", Method, 19, ""}, + {"(*Value).CompareAndSwap", Method, 17, ""}, + {"(*Value).Load", Method, 4, ""}, + {"(*Value).Store", Method, 4, ""}, + {"(*Value).Swap", Method, 17, ""}, + {"AddInt32", Func, 0, "func(addr *int32, delta int32) (new int32)"}, + {"AddInt64", Func, 0, "func(addr *int64, delta int64) (new int64)"}, + {"AddUint32", Func, 0, "func(addr *uint32, delta uint32) (new uint32)"}, + {"AddUint64", Func, 0, "func(addr *uint64, delta uint64) (new uint64)"}, + {"AddUintptr", Func, 0, "func(addr *uintptr, delta uintptr) (new uintptr)"}, + {"AndInt32", Func, 23, "func(addr *int32, mask int32) (old int32)"}, + {"AndInt64", Func, 23, "func(addr *int64, mask int64) (old int64)"}, + {"AndUint32", Func, 23, "func(addr *uint32, mask uint32) (old uint32)"}, + {"AndUint64", Func, 23, "func(addr *uint64, mask uint64) (old uint64)"}, + {"AndUintptr", Func, 23, "func(addr *uintptr, mask uintptr) (old uintptr)"}, + {"Bool", Type, 19, ""}, + {"CompareAndSwapInt32", Func, 0, "func(addr *int32, old int32, new int32) (swapped bool)"}, + {"CompareAndSwapInt64", Func, 0, "func(addr *int64, old int64, new int64) (swapped bool)"}, + {"CompareAndSwapPointer", Func, 0, "func(addr *unsafe.Pointer, old unsafe.Pointer, new unsafe.Pointer) (swapped bool)"}, + {"CompareAndSwapUint32", Func, 0, "func(addr *uint32, old uint32, new uint32) (swapped bool)"}, + {"CompareAndSwapUint64", Func, 0, "func(addr *uint64, old uint64, new uint64) (swapped bool)"}, + {"CompareAndSwapUintptr", Func, 0, "func(addr *uintptr, old uintptr, new uintptr) (swapped bool)"}, + {"Int32", Type, 19, ""}, + {"Int64", Type, 19, ""}, + {"LoadInt32", Func, 0, "func(addr *int32) (val int32)"}, + {"LoadInt64", Func, 0, "func(addr *int64) (val int64)"}, + {"LoadPointer", Func, 0, "func(addr *unsafe.Pointer) (val unsafe.Pointer)"}, + {"LoadUint32", Func, 0, "func(addr *uint32) (val uint32)"}, + {"LoadUint64", Func, 0, "func(addr *uint64) (val uint64)"}, + {"LoadUintptr", Func, 0, "func(addr *uintptr) (val uintptr)"}, + {"OrInt32", Func, 23, "func(addr *int32, mask int32) (old int32)"}, + {"OrInt64", Func, 23, "func(addr *int64, mask int64) (old int64)"}, + {"OrUint32", Func, 23, "func(addr *uint32, mask uint32) (old uint32)"}, + {"OrUint64", Func, 23, "func(addr *uint64, mask uint64) (old uint64)"}, + {"OrUintptr", Func, 23, "func(addr *uintptr, mask uintptr) (old uintptr)"}, + {"Pointer", Type, 19, ""}, + {"StoreInt32", Func, 0, "func(addr *int32, val int32)"}, + {"StoreInt64", Func, 0, "func(addr *int64, val int64)"}, + {"StorePointer", Func, 0, "func(addr *unsafe.Pointer, val unsafe.Pointer)"}, + {"StoreUint32", Func, 0, "func(addr *uint32, val uint32)"}, + {"StoreUint64", Func, 0, "func(addr *uint64, val uint64)"}, + {"StoreUintptr", Func, 0, "func(addr *uintptr, val uintptr)"}, + {"SwapInt32", Func, 2, "func(addr *int32, new int32) (old int32)"}, + {"SwapInt64", Func, 2, "func(addr *int64, new int64) (old int64)"}, + {"SwapPointer", Func, 2, "func(addr *unsafe.Pointer, new unsafe.Pointer) (old unsafe.Pointer)"}, + {"SwapUint32", Func, 2, "func(addr *uint32, new uint32) (old uint32)"}, + {"SwapUint64", Func, 2, "func(addr *uint64, new uint64) (old uint64)"}, + {"SwapUintptr", Func, 2, "func(addr *uintptr, new uintptr) (old uintptr)"}, + {"Uint32", Type, 19, ""}, + {"Uint64", Type, 19, ""}, + {"Uintptr", Type, 19, ""}, + {"Value", Type, 4, ""}, }, "syscall": { - {"(*Cmsghdr).SetLen", Method, 0}, - {"(*DLL).FindProc", Method, 0}, - {"(*DLL).MustFindProc", Method, 0}, - {"(*DLL).Release", Method, 0}, - {"(*DLLError).Error", Method, 0}, - {"(*DLLError).Unwrap", Method, 16}, - {"(*Filetime).Nanoseconds", Method, 0}, - {"(*Iovec).SetLen", Method, 0}, - {"(*LazyDLL).Handle", Method, 0}, - {"(*LazyDLL).Load", Method, 0}, - {"(*LazyDLL).NewProc", Method, 0}, - {"(*LazyProc).Addr", Method, 0}, - {"(*LazyProc).Call", Method, 0}, - {"(*LazyProc).Find", Method, 0}, - {"(*Msghdr).SetControllen", Method, 0}, - {"(*Proc).Addr", Method, 0}, - {"(*Proc).Call", Method, 0}, - {"(*PtraceRegs).PC", Method, 0}, - {"(*PtraceRegs).SetPC", Method, 0}, - {"(*RawSockaddrAny).Sockaddr", Method, 0}, - {"(*SID).Copy", Method, 0}, - {"(*SID).Len", Method, 0}, - {"(*SID).LookupAccount", Method, 0}, - {"(*SID).String", Method, 0}, - {"(*Timespec).Nano", Method, 0}, - {"(*Timespec).Unix", Method, 0}, - {"(*Timeval).Nano", Method, 0}, - {"(*Timeval).Nanoseconds", Method, 0}, - {"(*Timeval).Unix", Method, 0}, - {"(Errno).Error", Method, 0}, - {"(Errno).Is", Method, 13}, - {"(Errno).Temporary", Method, 0}, - {"(Errno).Timeout", Method, 0}, - {"(Signal).Signal", Method, 0}, - {"(Signal).String", Method, 0}, - {"(Token).Close", Method, 0}, - {"(Token).GetTokenPrimaryGroup", Method, 0}, - {"(Token).GetTokenUser", Method, 0}, - {"(Token).GetUserProfileDirectory", Method, 0}, - {"(WaitStatus).Continued", Method, 0}, - {"(WaitStatus).CoreDump", Method, 0}, - {"(WaitStatus).ExitStatus", Method, 0}, - {"(WaitStatus).Exited", Method, 0}, - {"(WaitStatus).Signal", Method, 0}, - {"(WaitStatus).Signaled", Method, 0}, - {"(WaitStatus).StopSignal", Method, 0}, - {"(WaitStatus).Stopped", Method, 0}, - {"(WaitStatus).TrapCause", Method, 0}, - {"AF_ALG", Const, 0}, - {"AF_APPLETALK", Const, 0}, - {"AF_ARP", Const, 0}, - {"AF_ASH", Const, 0}, - {"AF_ATM", Const, 0}, - {"AF_ATMPVC", Const, 0}, - {"AF_ATMSVC", Const, 0}, - {"AF_AX25", Const, 0}, - {"AF_BLUETOOTH", Const, 0}, - {"AF_BRIDGE", Const, 0}, - {"AF_CAIF", Const, 0}, - {"AF_CAN", Const, 0}, - {"AF_CCITT", Const, 0}, - {"AF_CHAOS", Const, 0}, - {"AF_CNT", Const, 0}, - {"AF_COIP", Const, 0}, - {"AF_DATAKIT", Const, 0}, - {"AF_DECnet", Const, 0}, - {"AF_DLI", Const, 0}, - {"AF_E164", Const, 0}, - {"AF_ECMA", Const, 0}, - {"AF_ECONET", Const, 0}, - {"AF_ENCAP", Const, 1}, - {"AF_FILE", Const, 0}, - {"AF_HYLINK", Const, 0}, - {"AF_IEEE80211", Const, 0}, - {"AF_IEEE802154", Const, 0}, - {"AF_IMPLINK", Const, 0}, - {"AF_INET", Const, 0}, - {"AF_INET6", Const, 0}, - {"AF_INET6_SDP", Const, 3}, - {"AF_INET_SDP", Const, 3}, - {"AF_IPX", Const, 0}, - {"AF_IRDA", Const, 0}, - {"AF_ISDN", Const, 0}, - {"AF_ISO", Const, 0}, - {"AF_IUCV", Const, 0}, - {"AF_KEY", Const, 0}, - {"AF_LAT", Const, 0}, - {"AF_LINK", Const, 0}, - {"AF_LLC", Const, 0}, - {"AF_LOCAL", Const, 0}, - {"AF_MAX", Const, 0}, - {"AF_MPLS", Const, 1}, - {"AF_NATM", Const, 0}, - {"AF_NDRV", Const, 0}, - {"AF_NETBEUI", Const, 0}, - {"AF_NETBIOS", Const, 0}, - {"AF_NETGRAPH", Const, 0}, - {"AF_NETLINK", Const, 0}, - {"AF_NETROM", Const, 0}, - {"AF_NS", Const, 0}, - {"AF_OROUTE", Const, 1}, - {"AF_OSI", Const, 0}, - {"AF_PACKET", Const, 0}, - {"AF_PHONET", Const, 0}, - {"AF_PPP", Const, 0}, - {"AF_PPPOX", Const, 0}, - {"AF_PUP", Const, 0}, - {"AF_RDS", Const, 0}, - {"AF_RESERVED_36", Const, 0}, - {"AF_ROSE", Const, 0}, - {"AF_ROUTE", Const, 0}, - {"AF_RXRPC", Const, 0}, - {"AF_SCLUSTER", Const, 0}, - {"AF_SECURITY", Const, 0}, - {"AF_SIP", Const, 0}, - {"AF_SLOW", Const, 0}, - {"AF_SNA", Const, 0}, - {"AF_SYSTEM", Const, 0}, - {"AF_TIPC", Const, 0}, - {"AF_UNIX", Const, 0}, - {"AF_UNSPEC", Const, 0}, - {"AF_UTUN", Const, 16}, - {"AF_VENDOR00", Const, 0}, - {"AF_VENDOR01", Const, 0}, - {"AF_VENDOR02", Const, 0}, - {"AF_VENDOR03", Const, 0}, - {"AF_VENDOR04", Const, 0}, - {"AF_VENDOR05", Const, 0}, - {"AF_VENDOR06", Const, 0}, - {"AF_VENDOR07", Const, 0}, - {"AF_VENDOR08", Const, 0}, - {"AF_VENDOR09", Const, 0}, - {"AF_VENDOR10", Const, 0}, - {"AF_VENDOR11", Const, 0}, - {"AF_VENDOR12", Const, 0}, - {"AF_VENDOR13", Const, 0}, - {"AF_VENDOR14", Const, 0}, - {"AF_VENDOR15", Const, 0}, - {"AF_VENDOR16", Const, 0}, - {"AF_VENDOR17", Const, 0}, - {"AF_VENDOR18", Const, 0}, - {"AF_VENDOR19", Const, 0}, - {"AF_VENDOR20", Const, 0}, - {"AF_VENDOR21", Const, 0}, - {"AF_VENDOR22", Const, 0}, - {"AF_VENDOR23", Const, 0}, - {"AF_VENDOR24", Const, 0}, - {"AF_VENDOR25", Const, 0}, - {"AF_VENDOR26", Const, 0}, - {"AF_VENDOR27", Const, 0}, - {"AF_VENDOR28", Const, 0}, - {"AF_VENDOR29", Const, 0}, - {"AF_VENDOR30", Const, 0}, - {"AF_VENDOR31", Const, 0}, - {"AF_VENDOR32", Const, 0}, - {"AF_VENDOR33", Const, 0}, - {"AF_VENDOR34", Const, 0}, - {"AF_VENDOR35", Const, 0}, - {"AF_VENDOR36", Const, 0}, - {"AF_VENDOR37", Const, 0}, - {"AF_VENDOR38", Const, 0}, - {"AF_VENDOR39", Const, 0}, - {"AF_VENDOR40", Const, 0}, - {"AF_VENDOR41", Const, 0}, - {"AF_VENDOR42", Const, 0}, - {"AF_VENDOR43", Const, 0}, - {"AF_VENDOR44", Const, 0}, - {"AF_VENDOR45", Const, 0}, - {"AF_VENDOR46", Const, 0}, - {"AF_VENDOR47", Const, 0}, - {"AF_WANPIPE", Const, 0}, - {"AF_X25", Const, 0}, - {"AI_CANONNAME", Const, 1}, - {"AI_NUMERICHOST", Const, 1}, - {"AI_PASSIVE", Const, 1}, - {"APPLICATION_ERROR", Const, 0}, - {"ARPHRD_ADAPT", Const, 0}, - {"ARPHRD_APPLETLK", Const, 0}, - {"ARPHRD_ARCNET", Const, 0}, - {"ARPHRD_ASH", Const, 0}, - {"ARPHRD_ATM", Const, 0}, - {"ARPHRD_AX25", Const, 0}, - {"ARPHRD_BIF", Const, 0}, - {"ARPHRD_CHAOS", Const, 0}, - {"ARPHRD_CISCO", Const, 0}, - {"ARPHRD_CSLIP", Const, 0}, - {"ARPHRD_CSLIP6", Const, 0}, - {"ARPHRD_DDCMP", Const, 0}, - {"ARPHRD_DLCI", Const, 0}, - {"ARPHRD_ECONET", Const, 0}, - {"ARPHRD_EETHER", Const, 0}, - {"ARPHRD_ETHER", Const, 0}, - {"ARPHRD_EUI64", Const, 0}, - {"ARPHRD_FCAL", Const, 0}, - {"ARPHRD_FCFABRIC", Const, 0}, - {"ARPHRD_FCPL", Const, 0}, - {"ARPHRD_FCPP", Const, 0}, - {"ARPHRD_FDDI", Const, 0}, - {"ARPHRD_FRAD", Const, 0}, - {"ARPHRD_FRELAY", Const, 1}, - {"ARPHRD_HDLC", Const, 0}, - {"ARPHRD_HIPPI", Const, 0}, - {"ARPHRD_HWX25", Const, 0}, - {"ARPHRD_IEEE1394", Const, 0}, - {"ARPHRD_IEEE802", Const, 0}, - {"ARPHRD_IEEE80211", Const, 0}, - {"ARPHRD_IEEE80211_PRISM", Const, 0}, - {"ARPHRD_IEEE80211_RADIOTAP", Const, 0}, - {"ARPHRD_IEEE802154", Const, 0}, - {"ARPHRD_IEEE802154_PHY", Const, 0}, - {"ARPHRD_IEEE802_TR", Const, 0}, - {"ARPHRD_INFINIBAND", Const, 0}, - {"ARPHRD_IPDDP", Const, 0}, - {"ARPHRD_IPGRE", Const, 0}, - {"ARPHRD_IRDA", Const, 0}, - {"ARPHRD_LAPB", Const, 0}, - {"ARPHRD_LOCALTLK", Const, 0}, - {"ARPHRD_LOOPBACK", Const, 0}, - {"ARPHRD_METRICOM", Const, 0}, - {"ARPHRD_NETROM", Const, 0}, - {"ARPHRD_NONE", Const, 0}, - {"ARPHRD_PIMREG", Const, 0}, - {"ARPHRD_PPP", Const, 0}, - {"ARPHRD_PRONET", Const, 0}, - {"ARPHRD_RAWHDLC", Const, 0}, - {"ARPHRD_ROSE", Const, 0}, - {"ARPHRD_RSRVD", Const, 0}, - {"ARPHRD_SIT", Const, 0}, - {"ARPHRD_SKIP", Const, 0}, - {"ARPHRD_SLIP", Const, 0}, - {"ARPHRD_SLIP6", Const, 0}, - {"ARPHRD_STRIP", Const, 1}, - {"ARPHRD_TUNNEL", Const, 0}, - {"ARPHRD_TUNNEL6", Const, 0}, - {"ARPHRD_VOID", Const, 0}, - {"ARPHRD_X25", Const, 0}, - {"AUTHTYPE_CLIENT", Const, 0}, - {"AUTHTYPE_SERVER", Const, 0}, - {"Accept", Func, 0}, - {"Accept4", Func, 1}, - {"AcceptEx", Func, 0}, - {"Access", Func, 0}, - {"Acct", Func, 0}, - {"AddrinfoW", Type, 1}, - {"AddrinfoW.Addr", Field, 1}, - {"AddrinfoW.Addrlen", Field, 1}, - {"AddrinfoW.Canonname", Field, 1}, - {"AddrinfoW.Family", Field, 1}, - {"AddrinfoW.Flags", Field, 1}, - {"AddrinfoW.Next", Field, 1}, - {"AddrinfoW.Protocol", Field, 1}, - {"AddrinfoW.Socktype", Field, 1}, - {"Adjtime", Func, 0}, - {"Adjtimex", Func, 0}, - {"AllThreadsSyscall", Func, 16}, - {"AllThreadsSyscall6", Func, 16}, - {"AttachLsf", Func, 0}, - {"B0", Const, 0}, - {"B1000000", Const, 0}, - {"B110", Const, 0}, - {"B115200", Const, 0}, - {"B1152000", Const, 0}, - {"B1200", Const, 0}, - {"B134", Const, 0}, - {"B14400", Const, 1}, - {"B150", Const, 0}, - {"B1500000", Const, 0}, - {"B1800", Const, 0}, - {"B19200", Const, 0}, - {"B200", Const, 0}, - {"B2000000", Const, 0}, - {"B230400", Const, 0}, - {"B2400", Const, 0}, - {"B2500000", Const, 0}, - {"B28800", Const, 1}, - {"B300", Const, 0}, - {"B3000000", Const, 0}, - {"B3500000", Const, 0}, - {"B38400", Const, 0}, - {"B4000000", Const, 0}, - {"B460800", Const, 0}, - {"B4800", Const, 0}, - {"B50", Const, 0}, - {"B500000", Const, 0}, - {"B57600", Const, 0}, - {"B576000", Const, 0}, - {"B600", Const, 0}, - {"B7200", Const, 1}, - {"B75", Const, 0}, - {"B76800", Const, 1}, - {"B921600", Const, 0}, - {"B9600", Const, 0}, - {"BASE_PROTOCOL", Const, 2}, - {"BIOCFEEDBACK", Const, 0}, - {"BIOCFLUSH", Const, 0}, - {"BIOCGBLEN", Const, 0}, - {"BIOCGDIRECTION", Const, 0}, - {"BIOCGDIRFILT", Const, 1}, - {"BIOCGDLT", Const, 0}, - {"BIOCGDLTLIST", Const, 0}, - {"BIOCGETBUFMODE", Const, 0}, - {"BIOCGETIF", Const, 0}, - {"BIOCGETZMAX", Const, 0}, - {"BIOCGFEEDBACK", Const, 1}, - {"BIOCGFILDROP", Const, 1}, - {"BIOCGHDRCMPLT", Const, 0}, - {"BIOCGRSIG", Const, 0}, - {"BIOCGRTIMEOUT", Const, 0}, - {"BIOCGSEESENT", Const, 0}, - {"BIOCGSTATS", Const, 0}, - {"BIOCGSTATSOLD", Const, 1}, - {"BIOCGTSTAMP", Const, 1}, - {"BIOCIMMEDIATE", Const, 0}, - {"BIOCLOCK", Const, 0}, - {"BIOCPROMISC", Const, 0}, - {"BIOCROTZBUF", Const, 0}, - {"BIOCSBLEN", Const, 0}, - {"BIOCSDIRECTION", Const, 0}, - {"BIOCSDIRFILT", Const, 1}, - {"BIOCSDLT", Const, 0}, - {"BIOCSETBUFMODE", Const, 0}, - {"BIOCSETF", Const, 0}, - {"BIOCSETFNR", Const, 0}, - {"BIOCSETIF", Const, 0}, - {"BIOCSETWF", Const, 0}, - {"BIOCSETZBUF", Const, 0}, - {"BIOCSFEEDBACK", Const, 1}, - {"BIOCSFILDROP", Const, 1}, - {"BIOCSHDRCMPLT", Const, 0}, - {"BIOCSRSIG", Const, 0}, - {"BIOCSRTIMEOUT", Const, 0}, - {"BIOCSSEESENT", Const, 0}, - {"BIOCSTCPF", Const, 1}, - {"BIOCSTSTAMP", Const, 1}, - {"BIOCSUDPF", Const, 1}, - {"BIOCVERSION", Const, 0}, - {"BPF_A", Const, 0}, - {"BPF_ABS", Const, 0}, - {"BPF_ADD", Const, 0}, - {"BPF_ALIGNMENT", Const, 0}, - {"BPF_ALIGNMENT32", Const, 1}, - {"BPF_ALU", Const, 0}, - {"BPF_AND", Const, 0}, - {"BPF_B", Const, 0}, - {"BPF_BUFMODE_BUFFER", Const, 0}, - {"BPF_BUFMODE_ZBUF", Const, 0}, - {"BPF_DFLTBUFSIZE", Const, 1}, - {"BPF_DIRECTION_IN", Const, 1}, - {"BPF_DIRECTION_OUT", Const, 1}, - {"BPF_DIV", Const, 0}, - {"BPF_H", Const, 0}, - {"BPF_IMM", Const, 0}, - {"BPF_IND", Const, 0}, - {"BPF_JA", Const, 0}, - {"BPF_JEQ", Const, 0}, - {"BPF_JGE", Const, 0}, - {"BPF_JGT", Const, 0}, - {"BPF_JMP", Const, 0}, - {"BPF_JSET", Const, 0}, - {"BPF_K", Const, 0}, - {"BPF_LD", Const, 0}, - {"BPF_LDX", Const, 0}, - {"BPF_LEN", Const, 0}, - {"BPF_LSH", Const, 0}, - {"BPF_MAJOR_VERSION", Const, 0}, - {"BPF_MAXBUFSIZE", Const, 0}, - {"BPF_MAXINSNS", Const, 0}, - {"BPF_MEM", Const, 0}, - {"BPF_MEMWORDS", Const, 0}, - {"BPF_MINBUFSIZE", Const, 0}, - {"BPF_MINOR_VERSION", Const, 0}, - {"BPF_MISC", Const, 0}, - {"BPF_MSH", Const, 0}, - {"BPF_MUL", Const, 0}, - {"BPF_NEG", Const, 0}, - {"BPF_OR", Const, 0}, - {"BPF_RELEASE", Const, 0}, - {"BPF_RET", Const, 0}, - {"BPF_RSH", Const, 0}, - {"BPF_ST", Const, 0}, - {"BPF_STX", Const, 0}, - {"BPF_SUB", Const, 0}, - {"BPF_TAX", Const, 0}, - {"BPF_TXA", Const, 0}, - {"BPF_T_BINTIME", Const, 1}, - {"BPF_T_BINTIME_FAST", Const, 1}, - {"BPF_T_BINTIME_MONOTONIC", Const, 1}, - {"BPF_T_BINTIME_MONOTONIC_FAST", Const, 1}, - {"BPF_T_FAST", Const, 1}, - {"BPF_T_FLAG_MASK", Const, 1}, - {"BPF_T_FORMAT_MASK", Const, 1}, - {"BPF_T_MICROTIME", Const, 1}, - {"BPF_T_MICROTIME_FAST", Const, 1}, - {"BPF_T_MICROTIME_MONOTONIC", Const, 1}, - {"BPF_T_MICROTIME_MONOTONIC_FAST", Const, 1}, - {"BPF_T_MONOTONIC", Const, 1}, - {"BPF_T_MONOTONIC_FAST", Const, 1}, - {"BPF_T_NANOTIME", Const, 1}, - {"BPF_T_NANOTIME_FAST", Const, 1}, - {"BPF_T_NANOTIME_MONOTONIC", Const, 1}, - {"BPF_T_NANOTIME_MONOTONIC_FAST", Const, 1}, - {"BPF_T_NONE", Const, 1}, - {"BPF_T_NORMAL", Const, 1}, - {"BPF_W", Const, 0}, - {"BPF_X", Const, 0}, - {"BRKINT", Const, 0}, - {"Bind", Func, 0}, - {"BindToDevice", Func, 0}, - {"BpfBuflen", Func, 0}, - {"BpfDatalink", Func, 0}, - {"BpfHdr", Type, 0}, - {"BpfHdr.Caplen", Field, 0}, - {"BpfHdr.Datalen", Field, 0}, - {"BpfHdr.Hdrlen", Field, 0}, - {"BpfHdr.Pad_cgo_0", Field, 0}, - {"BpfHdr.Tstamp", Field, 0}, - {"BpfHeadercmpl", Func, 0}, - {"BpfInsn", Type, 0}, - {"BpfInsn.Code", Field, 0}, - {"BpfInsn.Jf", Field, 0}, - {"BpfInsn.Jt", Field, 0}, - {"BpfInsn.K", Field, 0}, - {"BpfInterface", Func, 0}, - {"BpfJump", Func, 0}, - {"BpfProgram", Type, 0}, - {"BpfProgram.Insns", Field, 0}, - {"BpfProgram.Len", Field, 0}, - {"BpfProgram.Pad_cgo_0", Field, 0}, - {"BpfStat", Type, 0}, - {"BpfStat.Capt", Field, 2}, - {"BpfStat.Drop", Field, 0}, - {"BpfStat.Padding", Field, 2}, - {"BpfStat.Recv", Field, 0}, - {"BpfStats", Func, 0}, - {"BpfStmt", Func, 0}, - {"BpfTimeout", Func, 0}, - {"BpfTimeval", Type, 2}, - {"BpfTimeval.Sec", Field, 2}, - {"BpfTimeval.Usec", Field, 2}, - {"BpfVersion", Type, 0}, - {"BpfVersion.Major", Field, 0}, - {"BpfVersion.Minor", Field, 0}, - {"BpfZbuf", Type, 0}, - {"BpfZbuf.Bufa", Field, 0}, - {"BpfZbuf.Bufb", Field, 0}, - {"BpfZbuf.Buflen", Field, 0}, - {"BpfZbufHeader", Type, 0}, - {"BpfZbufHeader.Kernel_gen", Field, 0}, - {"BpfZbufHeader.Kernel_len", Field, 0}, - {"BpfZbufHeader.User_gen", Field, 0}, - {"BpfZbufHeader.X_bzh_pad", Field, 0}, - {"ByHandleFileInformation", Type, 0}, - {"ByHandleFileInformation.CreationTime", Field, 0}, - {"ByHandleFileInformation.FileAttributes", Field, 0}, - {"ByHandleFileInformation.FileIndexHigh", Field, 0}, - {"ByHandleFileInformation.FileIndexLow", Field, 0}, - {"ByHandleFileInformation.FileSizeHigh", Field, 0}, - {"ByHandleFileInformation.FileSizeLow", Field, 0}, - {"ByHandleFileInformation.LastAccessTime", Field, 0}, - {"ByHandleFileInformation.LastWriteTime", Field, 0}, - {"ByHandleFileInformation.NumberOfLinks", Field, 0}, - {"ByHandleFileInformation.VolumeSerialNumber", Field, 0}, - {"BytePtrFromString", Func, 1}, - {"ByteSliceFromString", Func, 1}, - {"CCR0_FLUSH", Const, 1}, - {"CERT_CHAIN_POLICY_AUTHENTICODE", Const, 0}, - {"CERT_CHAIN_POLICY_AUTHENTICODE_TS", Const, 0}, - {"CERT_CHAIN_POLICY_BASE", Const, 0}, - {"CERT_CHAIN_POLICY_BASIC_CONSTRAINTS", Const, 0}, - {"CERT_CHAIN_POLICY_EV", Const, 0}, - {"CERT_CHAIN_POLICY_MICROSOFT_ROOT", Const, 0}, - {"CERT_CHAIN_POLICY_NT_AUTH", Const, 0}, - {"CERT_CHAIN_POLICY_SSL", Const, 0}, - {"CERT_E_CN_NO_MATCH", Const, 0}, - {"CERT_E_EXPIRED", Const, 0}, - {"CERT_E_PURPOSE", Const, 0}, - {"CERT_E_ROLE", Const, 0}, - {"CERT_E_UNTRUSTEDROOT", Const, 0}, - {"CERT_STORE_ADD_ALWAYS", Const, 0}, - {"CERT_STORE_DEFER_CLOSE_UNTIL_LAST_FREE_FLAG", Const, 0}, - {"CERT_STORE_PROV_MEMORY", Const, 0}, - {"CERT_TRUST_HAS_EXCLUDED_NAME_CONSTRAINT", Const, 0}, - {"CERT_TRUST_HAS_NOT_DEFINED_NAME_CONSTRAINT", Const, 0}, - {"CERT_TRUST_HAS_NOT_PERMITTED_NAME_CONSTRAINT", Const, 0}, - {"CERT_TRUST_HAS_NOT_SUPPORTED_CRITICAL_EXT", Const, 0}, - {"CERT_TRUST_HAS_NOT_SUPPORTED_NAME_CONSTRAINT", Const, 0}, - {"CERT_TRUST_INVALID_BASIC_CONSTRAINTS", Const, 0}, - {"CERT_TRUST_INVALID_EXTENSION", Const, 0}, - {"CERT_TRUST_INVALID_NAME_CONSTRAINTS", Const, 0}, - {"CERT_TRUST_INVALID_POLICY_CONSTRAINTS", Const, 0}, - {"CERT_TRUST_IS_CYCLIC", Const, 0}, - {"CERT_TRUST_IS_EXPLICIT_DISTRUST", Const, 0}, - {"CERT_TRUST_IS_NOT_SIGNATURE_VALID", Const, 0}, - {"CERT_TRUST_IS_NOT_TIME_VALID", Const, 0}, - {"CERT_TRUST_IS_NOT_VALID_FOR_USAGE", Const, 0}, - {"CERT_TRUST_IS_OFFLINE_REVOCATION", Const, 0}, - {"CERT_TRUST_IS_REVOKED", Const, 0}, - {"CERT_TRUST_IS_UNTRUSTED_ROOT", Const, 0}, - {"CERT_TRUST_NO_ERROR", Const, 0}, - {"CERT_TRUST_NO_ISSUANCE_CHAIN_POLICY", Const, 0}, - {"CERT_TRUST_REVOCATION_STATUS_UNKNOWN", Const, 0}, - {"CFLUSH", Const, 1}, - {"CLOCAL", Const, 0}, - {"CLONE_CHILD_CLEARTID", Const, 2}, - {"CLONE_CHILD_SETTID", Const, 2}, - {"CLONE_CLEAR_SIGHAND", Const, 20}, - {"CLONE_CSIGNAL", Const, 3}, - {"CLONE_DETACHED", Const, 2}, - {"CLONE_FILES", Const, 2}, - {"CLONE_FS", Const, 2}, - {"CLONE_INTO_CGROUP", Const, 20}, - {"CLONE_IO", Const, 2}, - {"CLONE_NEWCGROUP", Const, 20}, - {"CLONE_NEWIPC", Const, 2}, - {"CLONE_NEWNET", Const, 2}, - {"CLONE_NEWNS", Const, 2}, - {"CLONE_NEWPID", Const, 2}, - {"CLONE_NEWTIME", Const, 20}, - {"CLONE_NEWUSER", Const, 2}, - {"CLONE_NEWUTS", Const, 2}, - {"CLONE_PARENT", Const, 2}, - {"CLONE_PARENT_SETTID", Const, 2}, - {"CLONE_PID", Const, 3}, - {"CLONE_PIDFD", Const, 20}, - {"CLONE_PTRACE", Const, 2}, - {"CLONE_SETTLS", Const, 2}, - {"CLONE_SIGHAND", Const, 2}, - {"CLONE_SYSVSEM", Const, 2}, - {"CLONE_THREAD", Const, 2}, - {"CLONE_UNTRACED", Const, 2}, - {"CLONE_VFORK", Const, 2}, - {"CLONE_VM", Const, 2}, - {"CPUID_CFLUSH", Const, 1}, - {"CREAD", Const, 0}, - {"CREATE_ALWAYS", Const, 0}, - {"CREATE_NEW", Const, 0}, - {"CREATE_NEW_PROCESS_GROUP", Const, 1}, - {"CREATE_UNICODE_ENVIRONMENT", Const, 0}, - {"CRYPT_DEFAULT_CONTAINER_OPTIONAL", Const, 0}, - {"CRYPT_DELETEKEYSET", Const, 0}, - {"CRYPT_MACHINE_KEYSET", Const, 0}, - {"CRYPT_NEWKEYSET", Const, 0}, - {"CRYPT_SILENT", Const, 0}, - {"CRYPT_VERIFYCONTEXT", Const, 0}, - {"CS5", Const, 0}, - {"CS6", Const, 0}, - {"CS7", Const, 0}, - {"CS8", Const, 0}, - {"CSIZE", Const, 0}, - {"CSTART", Const, 1}, - {"CSTATUS", Const, 1}, - {"CSTOP", Const, 1}, - {"CSTOPB", Const, 0}, - {"CSUSP", Const, 1}, - {"CTL_MAXNAME", Const, 0}, - {"CTL_NET", Const, 0}, - {"CTL_QUERY", Const, 1}, - {"CTRL_BREAK_EVENT", Const, 1}, - {"CTRL_CLOSE_EVENT", Const, 14}, - {"CTRL_C_EVENT", Const, 1}, - {"CTRL_LOGOFF_EVENT", Const, 14}, - {"CTRL_SHUTDOWN_EVENT", Const, 14}, - {"CancelIo", Func, 0}, - {"CancelIoEx", Func, 1}, - {"CertAddCertificateContextToStore", Func, 0}, - {"CertChainContext", Type, 0}, - {"CertChainContext.ChainCount", Field, 0}, - {"CertChainContext.Chains", Field, 0}, - {"CertChainContext.HasRevocationFreshnessTime", Field, 0}, - {"CertChainContext.LowerQualityChainCount", Field, 0}, - {"CertChainContext.LowerQualityChains", Field, 0}, - {"CertChainContext.RevocationFreshnessTime", Field, 0}, - {"CertChainContext.Size", Field, 0}, - {"CertChainContext.TrustStatus", Field, 0}, - {"CertChainElement", Type, 0}, - {"CertChainElement.ApplicationUsage", Field, 0}, - {"CertChainElement.CertContext", Field, 0}, - {"CertChainElement.ExtendedErrorInfo", Field, 0}, - {"CertChainElement.IssuanceUsage", Field, 0}, - {"CertChainElement.RevocationInfo", Field, 0}, - {"CertChainElement.Size", Field, 0}, - {"CertChainElement.TrustStatus", Field, 0}, - {"CertChainPara", Type, 0}, - {"CertChainPara.CacheResync", Field, 0}, - {"CertChainPara.CheckRevocationFreshnessTime", Field, 0}, - {"CertChainPara.RequestedUsage", Field, 0}, - {"CertChainPara.RequstedIssuancePolicy", Field, 0}, - {"CertChainPara.RevocationFreshnessTime", Field, 0}, - {"CertChainPara.Size", Field, 0}, - {"CertChainPara.URLRetrievalTimeout", Field, 0}, - {"CertChainPolicyPara", Type, 0}, - {"CertChainPolicyPara.ExtraPolicyPara", Field, 0}, - {"CertChainPolicyPara.Flags", Field, 0}, - {"CertChainPolicyPara.Size", Field, 0}, - {"CertChainPolicyStatus", Type, 0}, - {"CertChainPolicyStatus.ChainIndex", Field, 0}, - {"CertChainPolicyStatus.ElementIndex", Field, 0}, - {"CertChainPolicyStatus.Error", Field, 0}, - {"CertChainPolicyStatus.ExtraPolicyStatus", Field, 0}, - {"CertChainPolicyStatus.Size", Field, 0}, - {"CertCloseStore", Func, 0}, - {"CertContext", Type, 0}, - {"CertContext.CertInfo", Field, 0}, - {"CertContext.EncodedCert", Field, 0}, - {"CertContext.EncodingType", Field, 0}, - {"CertContext.Length", Field, 0}, - {"CertContext.Store", Field, 0}, - {"CertCreateCertificateContext", Func, 0}, - {"CertEnhKeyUsage", Type, 0}, - {"CertEnhKeyUsage.Length", Field, 0}, - {"CertEnhKeyUsage.UsageIdentifiers", Field, 0}, - {"CertEnumCertificatesInStore", Func, 0}, - {"CertFreeCertificateChain", Func, 0}, - {"CertFreeCertificateContext", Func, 0}, - {"CertGetCertificateChain", Func, 0}, - {"CertInfo", Type, 11}, - {"CertOpenStore", Func, 0}, - {"CertOpenSystemStore", Func, 0}, - {"CertRevocationCrlInfo", Type, 11}, - {"CertRevocationInfo", Type, 0}, - {"CertRevocationInfo.CrlInfo", Field, 0}, - {"CertRevocationInfo.FreshnessTime", Field, 0}, - {"CertRevocationInfo.HasFreshnessTime", Field, 0}, - {"CertRevocationInfo.OidSpecificInfo", Field, 0}, - {"CertRevocationInfo.RevocationOid", Field, 0}, - {"CertRevocationInfo.RevocationResult", Field, 0}, - {"CertRevocationInfo.Size", Field, 0}, - {"CertSimpleChain", Type, 0}, - {"CertSimpleChain.Elements", Field, 0}, - {"CertSimpleChain.HasRevocationFreshnessTime", Field, 0}, - {"CertSimpleChain.NumElements", Field, 0}, - {"CertSimpleChain.RevocationFreshnessTime", Field, 0}, - {"CertSimpleChain.Size", Field, 0}, - {"CertSimpleChain.TrustListInfo", Field, 0}, - {"CertSimpleChain.TrustStatus", Field, 0}, - {"CertTrustListInfo", Type, 11}, - {"CertTrustStatus", Type, 0}, - {"CertTrustStatus.ErrorStatus", Field, 0}, - {"CertTrustStatus.InfoStatus", Field, 0}, - {"CertUsageMatch", Type, 0}, - {"CertUsageMatch.Type", Field, 0}, - {"CertUsageMatch.Usage", Field, 0}, - {"CertVerifyCertificateChainPolicy", Func, 0}, - {"Chdir", Func, 0}, - {"CheckBpfVersion", Func, 0}, - {"Chflags", Func, 0}, - {"Chmod", Func, 0}, - {"Chown", Func, 0}, - {"Chroot", Func, 0}, - {"Clearenv", Func, 0}, - {"Close", Func, 0}, - {"CloseHandle", Func, 0}, - {"CloseOnExec", Func, 0}, - {"Closesocket", Func, 0}, - {"CmsgLen", Func, 0}, - {"CmsgSpace", Func, 0}, - {"Cmsghdr", Type, 0}, - {"Cmsghdr.Len", Field, 0}, - {"Cmsghdr.Level", Field, 0}, - {"Cmsghdr.Type", Field, 0}, - {"Cmsghdr.X__cmsg_data", Field, 0}, - {"CommandLineToArgv", Func, 0}, - {"ComputerName", Func, 0}, - {"Conn", Type, 9}, - {"Connect", Func, 0}, - {"ConnectEx", Func, 1}, - {"ConvertSidToStringSid", Func, 0}, - {"ConvertStringSidToSid", Func, 0}, - {"CopySid", Func, 0}, - {"Creat", Func, 0}, - {"CreateDirectory", Func, 0}, - {"CreateFile", Func, 0}, - {"CreateFileMapping", Func, 0}, - {"CreateHardLink", Func, 4}, - {"CreateIoCompletionPort", Func, 0}, - {"CreatePipe", Func, 0}, - {"CreateProcess", Func, 0}, - {"CreateProcessAsUser", Func, 10}, - {"CreateSymbolicLink", Func, 4}, - {"CreateToolhelp32Snapshot", Func, 4}, - {"Credential", Type, 0}, - {"Credential.Gid", Field, 0}, - {"Credential.Groups", Field, 0}, - {"Credential.NoSetGroups", Field, 9}, - {"Credential.Uid", Field, 0}, - {"CryptAcquireContext", Func, 0}, - {"CryptGenRandom", Func, 0}, - {"CryptReleaseContext", Func, 0}, - {"DIOCBSFLUSH", Const, 1}, - {"DIOCOSFPFLUSH", Const, 1}, - {"DLL", Type, 0}, - {"DLL.Handle", Field, 0}, - {"DLL.Name", Field, 0}, - {"DLLError", Type, 0}, - {"DLLError.Err", Field, 0}, - {"DLLError.Msg", Field, 0}, - {"DLLError.ObjName", Field, 0}, - {"DLT_A429", Const, 0}, - {"DLT_A653_ICM", Const, 0}, - {"DLT_AIRONET_HEADER", Const, 0}, - {"DLT_AOS", Const, 1}, - {"DLT_APPLE_IP_OVER_IEEE1394", Const, 0}, - {"DLT_ARCNET", Const, 0}, - {"DLT_ARCNET_LINUX", Const, 0}, - {"DLT_ATM_CLIP", Const, 0}, - {"DLT_ATM_RFC1483", Const, 0}, - {"DLT_AURORA", Const, 0}, - {"DLT_AX25", Const, 0}, - {"DLT_AX25_KISS", Const, 0}, - {"DLT_BACNET_MS_TP", Const, 0}, - {"DLT_BLUETOOTH_HCI_H4", Const, 0}, - {"DLT_BLUETOOTH_HCI_H4_WITH_PHDR", Const, 0}, - {"DLT_CAN20B", Const, 0}, - {"DLT_CAN_SOCKETCAN", Const, 1}, - {"DLT_CHAOS", Const, 0}, - {"DLT_CHDLC", Const, 0}, - {"DLT_CISCO_IOS", Const, 0}, - {"DLT_C_HDLC", Const, 0}, - {"DLT_C_HDLC_WITH_DIR", Const, 0}, - {"DLT_DBUS", Const, 1}, - {"DLT_DECT", Const, 1}, - {"DLT_DOCSIS", Const, 0}, - {"DLT_DVB_CI", Const, 1}, - {"DLT_ECONET", Const, 0}, - {"DLT_EN10MB", Const, 0}, - {"DLT_EN3MB", Const, 0}, - {"DLT_ENC", Const, 0}, - {"DLT_ERF", Const, 0}, - {"DLT_ERF_ETH", Const, 0}, - {"DLT_ERF_POS", Const, 0}, - {"DLT_FC_2", Const, 1}, - {"DLT_FC_2_WITH_FRAME_DELIMS", Const, 1}, - {"DLT_FDDI", Const, 0}, - {"DLT_FLEXRAY", Const, 0}, - {"DLT_FRELAY", Const, 0}, - {"DLT_FRELAY_WITH_DIR", Const, 0}, - {"DLT_GCOM_SERIAL", Const, 0}, - {"DLT_GCOM_T1E1", Const, 0}, - {"DLT_GPF_F", Const, 0}, - {"DLT_GPF_T", Const, 0}, - {"DLT_GPRS_LLC", Const, 0}, - {"DLT_GSMTAP_ABIS", Const, 1}, - {"DLT_GSMTAP_UM", Const, 1}, - {"DLT_HDLC", Const, 1}, - {"DLT_HHDLC", Const, 0}, - {"DLT_HIPPI", Const, 1}, - {"DLT_IBM_SN", Const, 0}, - {"DLT_IBM_SP", Const, 0}, - {"DLT_IEEE802", Const, 0}, - {"DLT_IEEE802_11", Const, 0}, - {"DLT_IEEE802_11_RADIO", Const, 0}, - {"DLT_IEEE802_11_RADIO_AVS", Const, 0}, - {"DLT_IEEE802_15_4", Const, 0}, - {"DLT_IEEE802_15_4_LINUX", Const, 0}, - {"DLT_IEEE802_15_4_NOFCS", Const, 1}, - {"DLT_IEEE802_15_4_NONASK_PHY", Const, 0}, - {"DLT_IEEE802_16_MAC_CPS", Const, 0}, - {"DLT_IEEE802_16_MAC_CPS_RADIO", Const, 0}, - {"DLT_IPFILTER", Const, 0}, - {"DLT_IPMB", Const, 0}, - {"DLT_IPMB_LINUX", Const, 0}, - {"DLT_IPNET", Const, 1}, - {"DLT_IPOIB", Const, 1}, - {"DLT_IPV4", Const, 1}, - {"DLT_IPV6", Const, 1}, - {"DLT_IP_OVER_FC", Const, 0}, - {"DLT_JUNIPER_ATM1", Const, 0}, - {"DLT_JUNIPER_ATM2", Const, 0}, - {"DLT_JUNIPER_ATM_CEMIC", Const, 1}, - {"DLT_JUNIPER_CHDLC", Const, 0}, - {"DLT_JUNIPER_ES", Const, 0}, - {"DLT_JUNIPER_ETHER", Const, 0}, - {"DLT_JUNIPER_FIBRECHANNEL", Const, 1}, - {"DLT_JUNIPER_FRELAY", Const, 0}, - {"DLT_JUNIPER_GGSN", Const, 0}, - {"DLT_JUNIPER_ISM", Const, 0}, - {"DLT_JUNIPER_MFR", Const, 0}, - {"DLT_JUNIPER_MLFR", Const, 0}, - {"DLT_JUNIPER_MLPPP", Const, 0}, - {"DLT_JUNIPER_MONITOR", Const, 0}, - {"DLT_JUNIPER_PIC_PEER", Const, 0}, - {"DLT_JUNIPER_PPP", Const, 0}, - {"DLT_JUNIPER_PPPOE", Const, 0}, - {"DLT_JUNIPER_PPPOE_ATM", Const, 0}, - {"DLT_JUNIPER_SERVICES", Const, 0}, - {"DLT_JUNIPER_SRX_E2E", Const, 1}, - {"DLT_JUNIPER_ST", Const, 0}, - {"DLT_JUNIPER_VP", Const, 0}, - {"DLT_JUNIPER_VS", Const, 1}, - {"DLT_LAPB_WITH_DIR", Const, 0}, - {"DLT_LAPD", Const, 0}, - {"DLT_LIN", Const, 0}, - {"DLT_LINUX_EVDEV", Const, 1}, - {"DLT_LINUX_IRDA", Const, 0}, - {"DLT_LINUX_LAPD", Const, 0}, - {"DLT_LINUX_PPP_WITHDIRECTION", Const, 0}, - {"DLT_LINUX_SLL", Const, 0}, - {"DLT_LOOP", Const, 0}, - {"DLT_LTALK", Const, 0}, - {"DLT_MATCHING_MAX", Const, 1}, - {"DLT_MATCHING_MIN", Const, 1}, - {"DLT_MFR", Const, 0}, - {"DLT_MOST", Const, 0}, - {"DLT_MPEG_2_TS", Const, 1}, - {"DLT_MPLS", Const, 1}, - {"DLT_MTP2", Const, 0}, - {"DLT_MTP2_WITH_PHDR", Const, 0}, - {"DLT_MTP3", Const, 0}, - {"DLT_MUX27010", Const, 1}, - {"DLT_NETANALYZER", Const, 1}, - {"DLT_NETANALYZER_TRANSPARENT", Const, 1}, - {"DLT_NFC_LLCP", Const, 1}, - {"DLT_NFLOG", Const, 1}, - {"DLT_NG40", Const, 1}, - {"DLT_NULL", Const, 0}, - {"DLT_PCI_EXP", Const, 0}, - {"DLT_PFLOG", Const, 0}, - {"DLT_PFSYNC", Const, 0}, - {"DLT_PPI", Const, 0}, - {"DLT_PPP", Const, 0}, - {"DLT_PPP_BSDOS", Const, 0}, - {"DLT_PPP_ETHER", Const, 0}, - {"DLT_PPP_PPPD", Const, 0}, - {"DLT_PPP_SERIAL", Const, 0}, - {"DLT_PPP_WITH_DIR", Const, 0}, - {"DLT_PPP_WITH_DIRECTION", Const, 0}, - {"DLT_PRISM_HEADER", Const, 0}, - {"DLT_PRONET", Const, 0}, - {"DLT_RAIF1", Const, 0}, - {"DLT_RAW", Const, 0}, - {"DLT_RAWAF_MASK", Const, 1}, - {"DLT_RIO", Const, 0}, - {"DLT_SCCP", Const, 0}, - {"DLT_SITA", Const, 0}, - {"DLT_SLIP", Const, 0}, - {"DLT_SLIP_BSDOS", Const, 0}, - {"DLT_STANAG_5066_D_PDU", Const, 1}, - {"DLT_SUNATM", Const, 0}, - {"DLT_SYMANTEC_FIREWALL", Const, 0}, - {"DLT_TZSP", Const, 0}, - {"DLT_USB", Const, 0}, - {"DLT_USB_LINUX", Const, 0}, - {"DLT_USB_LINUX_MMAPPED", Const, 1}, - {"DLT_USER0", Const, 0}, - {"DLT_USER1", Const, 0}, - {"DLT_USER10", Const, 0}, - {"DLT_USER11", Const, 0}, - {"DLT_USER12", Const, 0}, - {"DLT_USER13", Const, 0}, - {"DLT_USER14", Const, 0}, - {"DLT_USER15", Const, 0}, - {"DLT_USER2", Const, 0}, - {"DLT_USER3", Const, 0}, - {"DLT_USER4", Const, 0}, - {"DLT_USER5", Const, 0}, - {"DLT_USER6", Const, 0}, - {"DLT_USER7", Const, 0}, - {"DLT_USER8", Const, 0}, - {"DLT_USER9", Const, 0}, - {"DLT_WIHART", Const, 1}, - {"DLT_X2E_SERIAL", Const, 0}, - {"DLT_X2E_XORAYA", Const, 0}, - {"DNSMXData", Type, 0}, - {"DNSMXData.NameExchange", Field, 0}, - {"DNSMXData.Pad", Field, 0}, - {"DNSMXData.Preference", Field, 0}, - {"DNSPTRData", Type, 0}, - {"DNSPTRData.Host", Field, 0}, - {"DNSRecord", Type, 0}, - {"DNSRecord.Data", Field, 0}, - {"DNSRecord.Dw", Field, 0}, - {"DNSRecord.Length", Field, 0}, - {"DNSRecord.Name", Field, 0}, - {"DNSRecord.Next", Field, 0}, - {"DNSRecord.Reserved", Field, 0}, - {"DNSRecord.Ttl", Field, 0}, - {"DNSRecord.Type", Field, 0}, - {"DNSSRVData", Type, 0}, - {"DNSSRVData.Pad", Field, 0}, - {"DNSSRVData.Port", Field, 0}, - {"DNSSRVData.Priority", Field, 0}, - {"DNSSRVData.Target", Field, 0}, - {"DNSSRVData.Weight", Field, 0}, - {"DNSTXTData", Type, 0}, - {"DNSTXTData.StringArray", Field, 0}, - {"DNSTXTData.StringCount", Field, 0}, - {"DNS_INFO_NO_RECORDS", Const, 4}, - {"DNS_TYPE_A", Const, 0}, - {"DNS_TYPE_A6", Const, 0}, - {"DNS_TYPE_AAAA", Const, 0}, - {"DNS_TYPE_ADDRS", Const, 0}, - {"DNS_TYPE_AFSDB", Const, 0}, - {"DNS_TYPE_ALL", Const, 0}, - {"DNS_TYPE_ANY", Const, 0}, - {"DNS_TYPE_ATMA", Const, 0}, - {"DNS_TYPE_AXFR", Const, 0}, - {"DNS_TYPE_CERT", Const, 0}, - {"DNS_TYPE_CNAME", Const, 0}, - {"DNS_TYPE_DHCID", Const, 0}, - {"DNS_TYPE_DNAME", Const, 0}, - {"DNS_TYPE_DNSKEY", Const, 0}, - {"DNS_TYPE_DS", Const, 0}, - {"DNS_TYPE_EID", Const, 0}, - {"DNS_TYPE_GID", Const, 0}, - {"DNS_TYPE_GPOS", Const, 0}, - {"DNS_TYPE_HINFO", Const, 0}, - {"DNS_TYPE_ISDN", Const, 0}, - {"DNS_TYPE_IXFR", Const, 0}, - {"DNS_TYPE_KEY", Const, 0}, - {"DNS_TYPE_KX", Const, 0}, - {"DNS_TYPE_LOC", Const, 0}, - {"DNS_TYPE_MAILA", Const, 0}, - {"DNS_TYPE_MAILB", Const, 0}, - {"DNS_TYPE_MB", Const, 0}, - {"DNS_TYPE_MD", Const, 0}, - {"DNS_TYPE_MF", Const, 0}, - {"DNS_TYPE_MG", Const, 0}, - {"DNS_TYPE_MINFO", Const, 0}, - {"DNS_TYPE_MR", Const, 0}, - {"DNS_TYPE_MX", Const, 0}, - {"DNS_TYPE_NAPTR", Const, 0}, - {"DNS_TYPE_NBSTAT", Const, 0}, - {"DNS_TYPE_NIMLOC", Const, 0}, - {"DNS_TYPE_NS", Const, 0}, - {"DNS_TYPE_NSAP", Const, 0}, - {"DNS_TYPE_NSAPPTR", Const, 0}, - {"DNS_TYPE_NSEC", Const, 0}, - {"DNS_TYPE_NULL", Const, 0}, - {"DNS_TYPE_NXT", Const, 0}, - {"DNS_TYPE_OPT", Const, 0}, - {"DNS_TYPE_PTR", Const, 0}, - {"DNS_TYPE_PX", Const, 0}, - {"DNS_TYPE_RP", Const, 0}, - {"DNS_TYPE_RRSIG", Const, 0}, - {"DNS_TYPE_RT", Const, 0}, - {"DNS_TYPE_SIG", Const, 0}, - {"DNS_TYPE_SINK", Const, 0}, - {"DNS_TYPE_SOA", Const, 0}, - {"DNS_TYPE_SRV", Const, 0}, - {"DNS_TYPE_TEXT", Const, 0}, - {"DNS_TYPE_TKEY", Const, 0}, - {"DNS_TYPE_TSIG", Const, 0}, - {"DNS_TYPE_UID", Const, 0}, - {"DNS_TYPE_UINFO", Const, 0}, - {"DNS_TYPE_UNSPEC", Const, 0}, - {"DNS_TYPE_WINS", Const, 0}, - {"DNS_TYPE_WINSR", Const, 0}, - {"DNS_TYPE_WKS", Const, 0}, - {"DNS_TYPE_X25", Const, 0}, - {"DT_BLK", Const, 0}, - {"DT_CHR", Const, 0}, - {"DT_DIR", Const, 0}, - {"DT_FIFO", Const, 0}, - {"DT_LNK", Const, 0}, - {"DT_REG", Const, 0}, - {"DT_SOCK", Const, 0}, - {"DT_UNKNOWN", Const, 0}, - {"DT_WHT", Const, 0}, - {"DUPLICATE_CLOSE_SOURCE", Const, 0}, - {"DUPLICATE_SAME_ACCESS", Const, 0}, - {"DeleteFile", Func, 0}, - {"DetachLsf", Func, 0}, - {"DeviceIoControl", Func, 4}, - {"Dirent", Type, 0}, - {"Dirent.Fileno", Field, 0}, - {"Dirent.Ino", Field, 0}, - {"Dirent.Name", Field, 0}, - {"Dirent.Namlen", Field, 0}, - {"Dirent.Off", Field, 0}, - {"Dirent.Pad0", Field, 12}, - {"Dirent.Pad1", Field, 12}, - {"Dirent.Pad_cgo_0", Field, 0}, - {"Dirent.Reclen", Field, 0}, - {"Dirent.Seekoff", Field, 0}, - {"Dirent.Type", Field, 0}, - {"Dirent.X__d_padding", Field, 3}, - {"DnsNameCompare", Func, 4}, - {"DnsQuery", Func, 0}, - {"DnsRecordListFree", Func, 0}, - {"DnsSectionAdditional", Const, 4}, - {"DnsSectionAnswer", Const, 4}, - {"DnsSectionAuthority", Const, 4}, - {"DnsSectionQuestion", Const, 4}, - {"Dup", Func, 0}, - {"Dup2", Func, 0}, - {"Dup3", Func, 2}, - {"DuplicateHandle", Func, 0}, - {"E2BIG", Const, 0}, - {"EACCES", Const, 0}, - {"EADDRINUSE", Const, 0}, - {"EADDRNOTAVAIL", Const, 0}, - {"EADV", Const, 0}, - {"EAFNOSUPPORT", Const, 0}, - {"EAGAIN", Const, 0}, - {"EALREADY", Const, 0}, - {"EAUTH", Const, 0}, - {"EBADARCH", Const, 0}, - {"EBADE", Const, 0}, - {"EBADEXEC", Const, 0}, - {"EBADF", Const, 0}, - {"EBADFD", Const, 0}, - {"EBADMACHO", Const, 0}, - {"EBADMSG", Const, 0}, - {"EBADR", Const, 0}, - {"EBADRPC", Const, 0}, - {"EBADRQC", Const, 0}, - {"EBADSLT", Const, 0}, - {"EBFONT", Const, 0}, - {"EBUSY", Const, 0}, - {"ECANCELED", Const, 0}, - {"ECAPMODE", Const, 1}, - {"ECHILD", Const, 0}, - {"ECHO", Const, 0}, - {"ECHOCTL", Const, 0}, - {"ECHOE", Const, 0}, - {"ECHOK", Const, 0}, - {"ECHOKE", Const, 0}, - {"ECHONL", Const, 0}, - {"ECHOPRT", Const, 0}, - {"ECHRNG", Const, 0}, - {"ECOMM", Const, 0}, - {"ECONNABORTED", Const, 0}, - {"ECONNREFUSED", Const, 0}, - {"ECONNRESET", Const, 0}, - {"EDEADLK", Const, 0}, - {"EDEADLOCK", Const, 0}, - {"EDESTADDRREQ", Const, 0}, - {"EDEVERR", Const, 0}, - {"EDOM", Const, 0}, - {"EDOOFUS", Const, 0}, - {"EDOTDOT", Const, 0}, - {"EDQUOT", Const, 0}, - {"EEXIST", Const, 0}, - {"EFAULT", Const, 0}, - {"EFBIG", Const, 0}, - {"EFER_LMA", Const, 1}, - {"EFER_LME", Const, 1}, - {"EFER_NXE", Const, 1}, - {"EFER_SCE", Const, 1}, - {"EFTYPE", Const, 0}, - {"EHOSTDOWN", Const, 0}, - {"EHOSTUNREACH", Const, 0}, - {"EHWPOISON", Const, 0}, - {"EIDRM", Const, 0}, - {"EILSEQ", Const, 0}, - {"EINPROGRESS", Const, 0}, - {"EINTR", Const, 0}, - {"EINVAL", Const, 0}, - {"EIO", Const, 0}, - {"EIPSEC", Const, 1}, - {"EISCONN", Const, 0}, - {"EISDIR", Const, 0}, - {"EISNAM", Const, 0}, - {"EKEYEXPIRED", Const, 0}, - {"EKEYREJECTED", Const, 0}, - {"EKEYREVOKED", Const, 0}, - {"EL2HLT", Const, 0}, - {"EL2NSYNC", Const, 0}, - {"EL3HLT", Const, 0}, - {"EL3RST", Const, 0}, - {"ELAST", Const, 0}, - {"ELF_NGREG", Const, 0}, - {"ELF_PRARGSZ", Const, 0}, - {"ELIBACC", Const, 0}, - {"ELIBBAD", Const, 0}, - {"ELIBEXEC", Const, 0}, - {"ELIBMAX", Const, 0}, - {"ELIBSCN", Const, 0}, - {"ELNRNG", Const, 0}, - {"ELOOP", Const, 0}, - {"EMEDIUMTYPE", Const, 0}, - {"EMFILE", Const, 0}, - {"EMLINK", Const, 0}, - {"EMSGSIZE", Const, 0}, - {"EMT_TAGOVF", Const, 1}, - {"EMULTIHOP", Const, 0}, - {"EMUL_ENABLED", Const, 1}, - {"EMUL_LINUX", Const, 1}, - {"EMUL_LINUX32", Const, 1}, - {"EMUL_MAXID", Const, 1}, - {"EMUL_NATIVE", Const, 1}, - {"ENAMETOOLONG", Const, 0}, - {"ENAVAIL", Const, 0}, - {"ENDRUNDISC", Const, 1}, - {"ENEEDAUTH", Const, 0}, - {"ENETDOWN", Const, 0}, - {"ENETRESET", Const, 0}, - {"ENETUNREACH", Const, 0}, - {"ENFILE", Const, 0}, - {"ENOANO", Const, 0}, - {"ENOATTR", Const, 0}, - {"ENOBUFS", Const, 0}, - {"ENOCSI", Const, 0}, - {"ENODATA", Const, 0}, - {"ENODEV", Const, 0}, - {"ENOENT", Const, 0}, - {"ENOEXEC", Const, 0}, - {"ENOKEY", Const, 0}, - {"ENOLCK", Const, 0}, - {"ENOLINK", Const, 0}, - {"ENOMEDIUM", Const, 0}, - {"ENOMEM", Const, 0}, - {"ENOMSG", Const, 0}, - {"ENONET", Const, 0}, - {"ENOPKG", Const, 0}, - {"ENOPOLICY", Const, 0}, - {"ENOPROTOOPT", Const, 0}, - {"ENOSPC", Const, 0}, - {"ENOSR", Const, 0}, - {"ENOSTR", Const, 0}, - {"ENOSYS", Const, 0}, - {"ENOTBLK", Const, 0}, - {"ENOTCAPABLE", Const, 0}, - {"ENOTCONN", Const, 0}, - {"ENOTDIR", Const, 0}, - {"ENOTEMPTY", Const, 0}, - {"ENOTNAM", Const, 0}, - {"ENOTRECOVERABLE", Const, 0}, - {"ENOTSOCK", Const, 0}, - {"ENOTSUP", Const, 0}, - {"ENOTTY", Const, 0}, - {"ENOTUNIQ", Const, 0}, - {"ENXIO", Const, 0}, - {"EN_SW_CTL_INF", Const, 1}, - {"EN_SW_CTL_PREC", Const, 1}, - {"EN_SW_CTL_ROUND", Const, 1}, - {"EN_SW_DATACHAIN", Const, 1}, - {"EN_SW_DENORM", Const, 1}, - {"EN_SW_INVOP", Const, 1}, - {"EN_SW_OVERFLOW", Const, 1}, - {"EN_SW_PRECLOSS", Const, 1}, - {"EN_SW_UNDERFLOW", Const, 1}, - {"EN_SW_ZERODIV", Const, 1}, - {"EOPNOTSUPP", Const, 0}, - {"EOVERFLOW", Const, 0}, - {"EOWNERDEAD", Const, 0}, - {"EPERM", Const, 0}, - {"EPFNOSUPPORT", Const, 0}, - {"EPIPE", Const, 0}, - {"EPOLLERR", Const, 0}, - {"EPOLLET", Const, 0}, - {"EPOLLHUP", Const, 0}, - {"EPOLLIN", Const, 0}, - {"EPOLLMSG", Const, 0}, - {"EPOLLONESHOT", Const, 0}, - {"EPOLLOUT", Const, 0}, - {"EPOLLPRI", Const, 0}, - {"EPOLLRDBAND", Const, 0}, - {"EPOLLRDHUP", Const, 0}, - {"EPOLLRDNORM", Const, 0}, - {"EPOLLWRBAND", Const, 0}, - {"EPOLLWRNORM", Const, 0}, - {"EPOLL_CLOEXEC", Const, 0}, - {"EPOLL_CTL_ADD", Const, 0}, - {"EPOLL_CTL_DEL", Const, 0}, - {"EPOLL_CTL_MOD", Const, 0}, - {"EPOLL_NONBLOCK", Const, 0}, - {"EPROCLIM", Const, 0}, - {"EPROCUNAVAIL", Const, 0}, - {"EPROGMISMATCH", Const, 0}, - {"EPROGUNAVAIL", Const, 0}, - {"EPROTO", Const, 0}, - {"EPROTONOSUPPORT", Const, 0}, - {"EPROTOTYPE", Const, 0}, - {"EPWROFF", Const, 0}, - {"EQFULL", Const, 16}, - {"ERANGE", Const, 0}, - {"EREMCHG", Const, 0}, - {"EREMOTE", Const, 0}, - {"EREMOTEIO", Const, 0}, - {"ERESTART", Const, 0}, - {"ERFKILL", Const, 0}, - {"EROFS", Const, 0}, - {"ERPCMISMATCH", Const, 0}, - {"ERROR_ACCESS_DENIED", Const, 0}, - {"ERROR_ALREADY_EXISTS", Const, 0}, - {"ERROR_BROKEN_PIPE", Const, 0}, - {"ERROR_BUFFER_OVERFLOW", Const, 0}, - {"ERROR_DIR_NOT_EMPTY", Const, 8}, - {"ERROR_ENVVAR_NOT_FOUND", Const, 0}, - {"ERROR_FILE_EXISTS", Const, 0}, - {"ERROR_FILE_NOT_FOUND", Const, 0}, - {"ERROR_HANDLE_EOF", Const, 2}, - {"ERROR_INSUFFICIENT_BUFFER", Const, 0}, - {"ERROR_IO_PENDING", Const, 0}, - {"ERROR_MOD_NOT_FOUND", Const, 0}, - {"ERROR_MORE_DATA", Const, 3}, - {"ERROR_NETNAME_DELETED", Const, 3}, - {"ERROR_NOT_FOUND", Const, 1}, - {"ERROR_NO_MORE_FILES", Const, 0}, - {"ERROR_OPERATION_ABORTED", Const, 0}, - {"ERROR_PATH_NOT_FOUND", Const, 0}, - {"ERROR_PRIVILEGE_NOT_HELD", Const, 4}, - {"ERROR_PROC_NOT_FOUND", Const, 0}, - {"ESHLIBVERS", Const, 0}, - {"ESHUTDOWN", Const, 0}, - {"ESOCKTNOSUPPORT", Const, 0}, - {"ESPIPE", Const, 0}, - {"ESRCH", Const, 0}, - {"ESRMNT", Const, 0}, - {"ESTALE", Const, 0}, - {"ESTRPIPE", Const, 0}, - {"ETHERCAP_JUMBO_MTU", Const, 1}, - {"ETHERCAP_VLAN_HWTAGGING", Const, 1}, - {"ETHERCAP_VLAN_MTU", Const, 1}, - {"ETHERMIN", Const, 1}, - {"ETHERMTU", Const, 1}, - {"ETHERMTU_JUMBO", Const, 1}, - {"ETHERTYPE_8023", Const, 1}, - {"ETHERTYPE_AARP", Const, 1}, - {"ETHERTYPE_ACCTON", Const, 1}, - {"ETHERTYPE_AEONIC", Const, 1}, - {"ETHERTYPE_ALPHA", Const, 1}, - {"ETHERTYPE_AMBER", Const, 1}, - {"ETHERTYPE_AMOEBA", Const, 1}, - {"ETHERTYPE_AOE", Const, 1}, - {"ETHERTYPE_APOLLO", Const, 1}, - {"ETHERTYPE_APOLLODOMAIN", Const, 1}, - {"ETHERTYPE_APPLETALK", Const, 1}, - {"ETHERTYPE_APPLITEK", Const, 1}, - {"ETHERTYPE_ARGONAUT", Const, 1}, - {"ETHERTYPE_ARP", Const, 1}, - {"ETHERTYPE_AT", Const, 1}, - {"ETHERTYPE_ATALK", Const, 1}, - {"ETHERTYPE_ATOMIC", Const, 1}, - {"ETHERTYPE_ATT", Const, 1}, - {"ETHERTYPE_ATTSTANFORD", Const, 1}, - {"ETHERTYPE_AUTOPHON", Const, 1}, - {"ETHERTYPE_AXIS", Const, 1}, - {"ETHERTYPE_BCLOOP", Const, 1}, - {"ETHERTYPE_BOFL", Const, 1}, - {"ETHERTYPE_CABLETRON", Const, 1}, - {"ETHERTYPE_CHAOS", Const, 1}, - {"ETHERTYPE_COMDESIGN", Const, 1}, - {"ETHERTYPE_COMPUGRAPHIC", Const, 1}, - {"ETHERTYPE_COUNTERPOINT", Const, 1}, - {"ETHERTYPE_CRONUS", Const, 1}, - {"ETHERTYPE_CRONUSVLN", Const, 1}, - {"ETHERTYPE_DCA", Const, 1}, - {"ETHERTYPE_DDE", Const, 1}, - {"ETHERTYPE_DEBNI", Const, 1}, - {"ETHERTYPE_DECAM", Const, 1}, - {"ETHERTYPE_DECCUST", Const, 1}, - {"ETHERTYPE_DECDIAG", Const, 1}, - {"ETHERTYPE_DECDNS", Const, 1}, - {"ETHERTYPE_DECDTS", Const, 1}, - {"ETHERTYPE_DECEXPER", Const, 1}, - {"ETHERTYPE_DECLAST", Const, 1}, - {"ETHERTYPE_DECLTM", Const, 1}, - {"ETHERTYPE_DECMUMPS", Const, 1}, - {"ETHERTYPE_DECNETBIOS", Const, 1}, - {"ETHERTYPE_DELTACON", Const, 1}, - {"ETHERTYPE_DIDDLE", Const, 1}, - {"ETHERTYPE_DLOG1", Const, 1}, - {"ETHERTYPE_DLOG2", Const, 1}, - {"ETHERTYPE_DN", Const, 1}, - {"ETHERTYPE_DOGFIGHT", Const, 1}, - {"ETHERTYPE_DSMD", Const, 1}, - {"ETHERTYPE_ECMA", Const, 1}, - {"ETHERTYPE_ENCRYPT", Const, 1}, - {"ETHERTYPE_ES", Const, 1}, - {"ETHERTYPE_EXCELAN", Const, 1}, - {"ETHERTYPE_EXPERDATA", Const, 1}, - {"ETHERTYPE_FLIP", Const, 1}, - {"ETHERTYPE_FLOWCONTROL", Const, 1}, - {"ETHERTYPE_FRARP", Const, 1}, - {"ETHERTYPE_GENDYN", Const, 1}, - {"ETHERTYPE_HAYES", Const, 1}, - {"ETHERTYPE_HIPPI_FP", Const, 1}, - {"ETHERTYPE_HITACHI", Const, 1}, - {"ETHERTYPE_HP", Const, 1}, - {"ETHERTYPE_IEEEPUP", Const, 1}, - {"ETHERTYPE_IEEEPUPAT", Const, 1}, - {"ETHERTYPE_IMLBL", Const, 1}, - {"ETHERTYPE_IMLBLDIAG", Const, 1}, - {"ETHERTYPE_IP", Const, 1}, - {"ETHERTYPE_IPAS", Const, 1}, - {"ETHERTYPE_IPV6", Const, 1}, - {"ETHERTYPE_IPX", Const, 1}, - {"ETHERTYPE_IPXNEW", Const, 1}, - {"ETHERTYPE_KALPANA", Const, 1}, - {"ETHERTYPE_LANBRIDGE", Const, 1}, - {"ETHERTYPE_LANPROBE", Const, 1}, - {"ETHERTYPE_LAT", Const, 1}, - {"ETHERTYPE_LBACK", Const, 1}, - {"ETHERTYPE_LITTLE", Const, 1}, - {"ETHERTYPE_LLDP", Const, 1}, - {"ETHERTYPE_LOGICRAFT", Const, 1}, - {"ETHERTYPE_LOOPBACK", Const, 1}, - {"ETHERTYPE_MATRA", Const, 1}, - {"ETHERTYPE_MAX", Const, 1}, - {"ETHERTYPE_MERIT", Const, 1}, - {"ETHERTYPE_MICP", Const, 1}, - {"ETHERTYPE_MOPDL", Const, 1}, - {"ETHERTYPE_MOPRC", Const, 1}, - {"ETHERTYPE_MOTOROLA", Const, 1}, - {"ETHERTYPE_MPLS", Const, 1}, - {"ETHERTYPE_MPLS_MCAST", Const, 1}, - {"ETHERTYPE_MUMPS", Const, 1}, - {"ETHERTYPE_NBPCC", Const, 1}, - {"ETHERTYPE_NBPCLAIM", Const, 1}, - {"ETHERTYPE_NBPCLREQ", Const, 1}, - {"ETHERTYPE_NBPCLRSP", Const, 1}, - {"ETHERTYPE_NBPCREQ", Const, 1}, - {"ETHERTYPE_NBPCRSP", Const, 1}, - {"ETHERTYPE_NBPDG", Const, 1}, - {"ETHERTYPE_NBPDGB", Const, 1}, - {"ETHERTYPE_NBPDLTE", Const, 1}, - {"ETHERTYPE_NBPRAR", Const, 1}, - {"ETHERTYPE_NBPRAS", Const, 1}, - {"ETHERTYPE_NBPRST", Const, 1}, - {"ETHERTYPE_NBPSCD", Const, 1}, - {"ETHERTYPE_NBPVCD", Const, 1}, - {"ETHERTYPE_NBS", Const, 1}, - {"ETHERTYPE_NCD", Const, 1}, - {"ETHERTYPE_NESTAR", Const, 1}, - {"ETHERTYPE_NETBEUI", Const, 1}, - {"ETHERTYPE_NOVELL", Const, 1}, - {"ETHERTYPE_NS", Const, 1}, - {"ETHERTYPE_NSAT", Const, 1}, - {"ETHERTYPE_NSCOMPAT", Const, 1}, - {"ETHERTYPE_NTRAILER", Const, 1}, - {"ETHERTYPE_OS9", Const, 1}, - {"ETHERTYPE_OS9NET", Const, 1}, - {"ETHERTYPE_PACER", Const, 1}, - {"ETHERTYPE_PAE", Const, 1}, - {"ETHERTYPE_PCS", Const, 1}, - {"ETHERTYPE_PLANNING", Const, 1}, - {"ETHERTYPE_PPP", Const, 1}, - {"ETHERTYPE_PPPOE", Const, 1}, - {"ETHERTYPE_PPPOEDISC", Const, 1}, - {"ETHERTYPE_PRIMENTS", Const, 1}, - {"ETHERTYPE_PUP", Const, 1}, - {"ETHERTYPE_PUPAT", Const, 1}, - {"ETHERTYPE_QINQ", Const, 1}, - {"ETHERTYPE_RACAL", Const, 1}, - {"ETHERTYPE_RATIONAL", Const, 1}, - {"ETHERTYPE_RAWFR", Const, 1}, - {"ETHERTYPE_RCL", Const, 1}, - {"ETHERTYPE_RDP", Const, 1}, - {"ETHERTYPE_RETIX", Const, 1}, - {"ETHERTYPE_REVARP", Const, 1}, - {"ETHERTYPE_SCA", Const, 1}, - {"ETHERTYPE_SECTRA", Const, 1}, - {"ETHERTYPE_SECUREDATA", Const, 1}, - {"ETHERTYPE_SGITW", Const, 1}, - {"ETHERTYPE_SG_BOUNCE", Const, 1}, - {"ETHERTYPE_SG_DIAG", Const, 1}, - {"ETHERTYPE_SG_NETGAMES", Const, 1}, - {"ETHERTYPE_SG_RESV", Const, 1}, - {"ETHERTYPE_SIMNET", Const, 1}, - {"ETHERTYPE_SLOW", Const, 1}, - {"ETHERTYPE_SLOWPROTOCOLS", Const, 1}, - {"ETHERTYPE_SNA", Const, 1}, - {"ETHERTYPE_SNMP", Const, 1}, - {"ETHERTYPE_SONIX", Const, 1}, - {"ETHERTYPE_SPIDER", Const, 1}, - {"ETHERTYPE_SPRITE", Const, 1}, - {"ETHERTYPE_STP", Const, 1}, - {"ETHERTYPE_TALARIS", Const, 1}, - {"ETHERTYPE_TALARISMC", Const, 1}, - {"ETHERTYPE_TCPCOMP", Const, 1}, - {"ETHERTYPE_TCPSM", Const, 1}, - {"ETHERTYPE_TEC", Const, 1}, - {"ETHERTYPE_TIGAN", Const, 1}, - {"ETHERTYPE_TRAIL", Const, 1}, - {"ETHERTYPE_TRANSETHER", Const, 1}, - {"ETHERTYPE_TYMSHARE", Const, 1}, - {"ETHERTYPE_UBBST", Const, 1}, - {"ETHERTYPE_UBDEBUG", Const, 1}, - {"ETHERTYPE_UBDIAGLOOP", Const, 1}, - {"ETHERTYPE_UBDL", Const, 1}, - {"ETHERTYPE_UBNIU", Const, 1}, - {"ETHERTYPE_UBNMC", Const, 1}, - {"ETHERTYPE_VALID", Const, 1}, - {"ETHERTYPE_VARIAN", Const, 1}, - {"ETHERTYPE_VAXELN", Const, 1}, - {"ETHERTYPE_VEECO", Const, 1}, - {"ETHERTYPE_VEXP", Const, 1}, - {"ETHERTYPE_VGLAB", Const, 1}, - {"ETHERTYPE_VINES", Const, 1}, - {"ETHERTYPE_VINESECHO", Const, 1}, - {"ETHERTYPE_VINESLOOP", Const, 1}, - {"ETHERTYPE_VITAL", Const, 1}, - {"ETHERTYPE_VLAN", Const, 1}, - {"ETHERTYPE_VLTLMAN", Const, 1}, - {"ETHERTYPE_VPROD", Const, 1}, - {"ETHERTYPE_VURESERVED", Const, 1}, - {"ETHERTYPE_WATERLOO", Const, 1}, - {"ETHERTYPE_WELLFLEET", Const, 1}, - {"ETHERTYPE_X25", Const, 1}, - {"ETHERTYPE_X75", Const, 1}, - {"ETHERTYPE_XNSSM", Const, 1}, - {"ETHERTYPE_XTP", Const, 1}, - {"ETHER_ADDR_LEN", Const, 1}, - {"ETHER_ALIGN", Const, 1}, - {"ETHER_CRC_LEN", Const, 1}, - {"ETHER_CRC_POLY_BE", Const, 1}, - {"ETHER_CRC_POLY_LE", Const, 1}, - {"ETHER_HDR_LEN", Const, 1}, - {"ETHER_MAX_DIX_LEN", Const, 1}, - {"ETHER_MAX_LEN", Const, 1}, - {"ETHER_MAX_LEN_JUMBO", Const, 1}, - {"ETHER_MIN_LEN", Const, 1}, - {"ETHER_PPPOE_ENCAP_LEN", Const, 1}, - {"ETHER_TYPE_LEN", Const, 1}, - {"ETHER_VLAN_ENCAP_LEN", Const, 1}, - {"ETH_P_1588", Const, 0}, - {"ETH_P_8021Q", Const, 0}, - {"ETH_P_802_2", Const, 0}, - {"ETH_P_802_3", Const, 0}, - {"ETH_P_AARP", Const, 0}, - {"ETH_P_ALL", Const, 0}, - {"ETH_P_AOE", Const, 0}, - {"ETH_P_ARCNET", Const, 0}, - {"ETH_P_ARP", Const, 0}, - {"ETH_P_ATALK", Const, 0}, - {"ETH_P_ATMFATE", Const, 0}, - {"ETH_P_ATMMPOA", Const, 0}, - {"ETH_P_AX25", Const, 0}, - {"ETH_P_BPQ", Const, 0}, - {"ETH_P_CAIF", Const, 0}, - {"ETH_P_CAN", Const, 0}, - {"ETH_P_CONTROL", Const, 0}, - {"ETH_P_CUST", Const, 0}, - {"ETH_P_DDCMP", Const, 0}, - {"ETH_P_DEC", Const, 0}, - {"ETH_P_DIAG", Const, 0}, - {"ETH_P_DNA_DL", Const, 0}, - {"ETH_P_DNA_RC", Const, 0}, - {"ETH_P_DNA_RT", Const, 0}, - {"ETH_P_DSA", Const, 0}, - {"ETH_P_ECONET", Const, 0}, - {"ETH_P_EDSA", Const, 0}, - {"ETH_P_FCOE", Const, 0}, - {"ETH_P_FIP", Const, 0}, - {"ETH_P_HDLC", Const, 0}, - {"ETH_P_IEEE802154", Const, 0}, - {"ETH_P_IEEEPUP", Const, 0}, - {"ETH_P_IEEEPUPAT", Const, 0}, - {"ETH_P_IP", Const, 0}, - {"ETH_P_IPV6", Const, 0}, - {"ETH_P_IPX", Const, 0}, - {"ETH_P_IRDA", Const, 0}, - {"ETH_P_LAT", Const, 0}, - {"ETH_P_LINK_CTL", Const, 0}, - {"ETH_P_LOCALTALK", Const, 0}, - {"ETH_P_LOOP", Const, 0}, - {"ETH_P_MOBITEX", Const, 0}, - {"ETH_P_MPLS_MC", Const, 0}, - {"ETH_P_MPLS_UC", Const, 0}, - {"ETH_P_PAE", Const, 0}, - {"ETH_P_PAUSE", Const, 0}, - {"ETH_P_PHONET", Const, 0}, - {"ETH_P_PPPTALK", Const, 0}, - {"ETH_P_PPP_DISC", Const, 0}, - {"ETH_P_PPP_MP", Const, 0}, - {"ETH_P_PPP_SES", Const, 0}, - {"ETH_P_PUP", Const, 0}, - {"ETH_P_PUPAT", Const, 0}, - {"ETH_P_RARP", Const, 0}, - {"ETH_P_SCA", Const, 0}, - {"ETH_P_SLOW", Const, 0}, - {"ETH_P_SNAP", Const, 0}, - {"ETH_P_TEB", Const, 0}, - {"ETH_P_TIPC", Const, 0}, - {"ETH_P_TRAILER", Const, 0}, - {"ETH_P_TR_802_2", Const, 0}, - {"ETH_P_WAN_PPP", Const, 0}, - {"ETH_P_WCCP", Const, 0}, - {"ETH_P_X25", Const, 0}, - {"ETIME", Const, 0}, - {"ETIMEDOUT", Const, 0}, - {"ETOOMANYREFS", Const, 0}, - {"ETXTBSY", Const, 0}, - {"EUCLEAN", Const, 0}, - {"EUNATCH", Const, 0}, - {"EUSERS", Const, 0}, - {"EVFILT_AIO", Const, 0}, - {"EVFILT_FS", Const, 0}, - {"EVFILT_LIO", Const, 0}, - {"EVFILT_MACHPORT", Const, 0}, - {"EVFILT_PROC", Const, 0}, - {"EVFILT_READ", Const, 0}, - {"EVFILT_SIGNAL", Const, 0}, - {"EVFILT_SYSCOUNT", Const, 0}, - {"EVFILT_THREADMARKER", Const, 0}, - {"EVFILT_TIMER", Const, 0}, - {"EVFILT_USER", Const, 0}, - {"EVFILT_VM", Const, 0}, - {"EVFILT_VNODE", Const, 0}, - {"EVFILT_WRITE", Const, 0}, - {"EV_ADD", Const, 0}, - {"EV_CLEAR", Const, 0}, - {"EV_DELETE", Const, 0}, - {"EV_DISABLE", Const, 0}, - {"EV_DISPATCH", Const, 0}, - {"EV_DROP", Const, 3}, - {"EV_ENABLE", Const, 0}, - {"EV_EOF", Const, 0}, - {"EV_ERROR", Const, 0}, - {"EV_FLAG0", Const, 0}, - {"EV_FLAG1", Const, 0}, - {"EV_ONESHOT", Const, 0}, - {"EV_OOBAND", Const, 0}, - {"EV_POLL", Const, 0}, - {"EV_RECEIPT", Const, 0}, - {"EV_SYSFLAGS", Const, 0}, - {"EWINDOWS", Const, 0}, - {"EWOULDBLOCK", Const, 0}, - {"EXDEV", Const, 0}, - {"EXFULL", Const, 0}, - {"EXTA", Const, 0}, - {"EXTB", Const, 0}, - {"EXTPROC", Const, 0}, - {"Environ", Func, 0}, - {"EpollCreate", Func, 0}, - {"EpollCreate1", Func, 0}, - {"EpollCtl", Func, 0}, - {"EpollEvent", Type, 0}, - {"EpollEvent.Events", Field, 0}, - {"EpollEvent.Fd", Field, 0}, - {"EpollEvent.Pad", Field, 0}, - {"EpollEvent.PadFd", Field, 0}, - {"EpollWait", Func, 0}, - {"Errno", Type, 0}, - {"EscapeArg", Func, 0}, - {"Exchangedata", Func, 0}, - {"Exec", Func, 0}, - {"Exit", Func, 0}, - {"ExitProcess", Func, 0}, - {"FD_CLOEXEC", Const, 0}, - {"FD_SETSIZE", Const, 0}, - {"FILE_ACTION_ADDED", Const, 0}, - {"FILE_ACTION_MODIFIED", Const, 0}, - {"FILE_ACTION_REMOVED", Const, 0}, - {"FILE_ACTION_RENAMED_NEW_NAME", Const, 0}, - {"FILE_ACTION_RENAMED_OLD_NAME", Const, 0}, - {"FILE_APPEND_DATA", Const, 0}, - {"FILE_ATTRIBUTE_ARCHIVE", Const, 0}, - {"FILE_ATTRIBUTE_DIRECTORY", Const, 0}, - {"FILE_ATTRIBUTE_HIDDEN", Const, 0}, - {"FILE_ATTRIBUTE_NORMAL", Const, 0}, - {"FILE_ATTRIBUTE_READONLY", Const, 0}, - {"FILE_ATTRIBUTE_REPARSE_POINT", Const, 4}, - {"FILE_ATTRIBUTE_SYSTEM", Const, 0}, - {"FILE_BEGIN", Const, 0}, - {"FILE_CURRENT", Const, 0}, - {"FILE_END", Const, 0}, - {"FILE_FLAG_BACKUP_SEMANTICS", Const, 0}, - {"FILE_FLAG_OPEN_REPARSE_POINT", Const, 4}, - {"FILE_FLAG_OVERLAPPED", Const, 0}, - {"FILE_LIST_DIRECTORY", Const, 0}, - {"FILE_MAP_COPY", Const, 0}, - {"FILE_MAP_EXECUTE", Const, 0}, - {"FILE_MAP_READ", Const, 0}, - {"FILE_MAP_WRITE", Const, 0}, - {"FILE_NOTIFY_CHANGE_ATTRIBUTES", Const, 0}, - {"FILE_NOTIFY_CHANGE_CREATION", Const, 0}, - {"FILE_NOTIFY_CHANGE_DIR_NAME", Const, 0}, - {"FILE_NOTIFY_CHANGE_FILE_NAME", Const, 0}, - {"FILE_NOTIFY_CHANGE_LAST_ACCESS", Const, 0}, - {"FILE_NOTIFY_CHANGE_LAST_WRITE", Const, 0}, - {"FILE_NOTIFY_CHANGE_SIZE", Const, 0}, - {"FILE_SHARE_DELETE", Const, 0}, - {"FILE_SHARE_READ", Const, 0}, - {"FILE_SHARE_WRITE", Const, 0}, - {"FILE_SKIP_COMPLETION_PORT_ON_SUCCESS", Const, 2}, - {"FILE_SKIP_SET_EVENT_ON_HANDLE", Const, 2}, - {"FILE_TYPE_CHAR", Const, 0}, - {"FILE_TYPE_DISK", Const, 0}, - {"FILE_TYPE_PIPE", Const, 0}, - {"FILE_TYPE_REMOTE", Const, 0}, - {"FILE_TYPE_UNKNOWN", Const, 0}, - {"FILE_WRITE_ATTRIBUTES", Const, 0}, - {"FLUSHO", Const, 0}, - {"FORMAT_MESSAGE_ALLOCATE_BUFFER", Const, 0}, - {"FORMAT_MESSAGE_ARGUMENT_ARRAY", Const, 0}, - {"FORMAT_MESSAGE_FROM_HMODULE", Const, 0}, - {"FORMAT_MESSAGE_FROM_STRING", Const, 0}, - {"FORMAT_MESSAGE_FROM_SYSTEM", Const, 0}, - {"FORMAT_MESSAGE_IGNORE_INSERTS", Const, 0}, - {"FORMAT_MESSAGE_MAX_WIDTH_MASK", Const, 0}, - {"FSCTL_GET_REPARSE_POINT", Const, 4}, - {"F_ADDFILESIGS", Const, 0}, - {"F_ADDSIGS", Const, 0}, - {"F_ALLOCATEALL", Const, 0}, - {"F_ALLOCATECONTIG", Const, 0}, - {"F_CANCEL", Const, 0}, - {"F_CHKCLEAN", Const, 0}, - {"F_CLOSEM", Const, 1}, - {"F_DUP2FD", Const, 0}, - {"F_DUP2FD_CLOEXEC", Const, 1}, - {"F_DUPFD", Const, 0}, - {"F_DUPFD_CLOEXEC", Const, 0}, - {"F_EXLCK", Const, 0}, - {"F_FINDSIGS", Const, 16}, - {"F_FLUSH_DATA", Const, 0}, - {"F_FREEZE_FS", Const, 0}, - {"F_FSCTL", Const, 1}, - {"F_FSDIRMASK", Const, 1}, - {"F_FSIN", Const, 1}, - {"F_FSINOUT", Const, 1}, - {"F_FSOUT", Const, 1}, - {"F_FSPRIV", Const, 1}, - {"F_FSVOID", Const, 1}, - {"F_FULLFSYNC", Const, 0}, - {"F_GETCODEDIR", Const, 16}, - {"F_GETFD", Const, 0}, - {"F_GETFL", Const, 0}, - {"F_GETLEASE", Const, 0}, - {"F_GETLK", Const, 0}, - {"F_GETLK64", Const, 0}, - {"F_GETLKPID", Const, 0}, - {"F_GETNOSIGPIPE", Const, 0}, - {"F_GETOWN", Const, 0}, - {"F_GETOWN_EX", Const, 0}, - {"F_GETPATH", Const, 0}, - {"F_GETPATH_MTMINFO", Const, 0}, - {"F_GETPIPE_SZ", Const, 0}, - {"F_GETPROTECTIONCLASS", Const, 0}, - {"F_GETPROTECTIONLEVEL", Const, 16}, - {"F_GETSIG", Const, 0}, - {"F_GLOBAL_NOCACHE", Const, 0}, - {"F_LOCK", Const, 0}, - {"F_LOG2PHYS", Const, 0}, - {"F_LOG2PHYS_EXT", Const, 0}, - {"F_MARKDEPENDENCY", Const, 0}, - {"F_MAXFD", Const, 1}, - {"F_NOCACHE", Const, 0}, - {"F_NODIRECT", Const, 0}, - {"F_NOTIFY", Const, 0}, - {"F_OGETLK", Const, 0}, - {"F_OK", Const, 0}, - {"F_OSETLK", Const, 0}, - {"F_OSETLKW", Const, 0}, - {"F_PARAM_MASK", Const, 1}, - {"F_PARAM_MAX", Const, 1}, - {"F_PATHPKG_CHECK", Const, 0}, - {"F_PEOFPOSMODE", Const, 0}, - {"F_PREALLOCATE", Const, 0}, - {"F_RDADVISE", Const, 0}, - {"F_RDAHEAD", Const, 0}, - {"F_RDLCK", Const, 0}, - {"F_READAHEAD", Const, 0}, - {"F_READBOOTSTRAP", Const, 0}, - {"F_SETBACKINGSTORE", Const, 0}, - {"F_SETFD", Const, 0}, - {"F_SETFL", Const, 0}, - {"F_SETLEASE", Const, 0}, - {"F_SETLK", Const, 0}, - {"F_SETLK64", Const, 0}, - {"F_SETLKW", Const, 0}, - {"F_SETLKW64", Const, 0}, - {"F_SETLKWTIMEOUT", Const, 16}, - {"F_SETLK_REMOTE", Const, 0}, - {"F_SETNOSIGPIPE", Const, 0}, - {"F_SETOWN", Const, 0}, - {"F_SETOWN_EX", Const, 0}, - {"F_SETPIPE_SZ", Const, 0}, - {"F_SETPROTECTIONCLASS", Const, 0}, - {"F_SETSIG", Const, 0}, - {"F_SETSIZE", Const, 0}, - {"F_SHLCK", Const, 0}, - {"F_SINGLE_WRITER", Const, 16}, - {"F_TEST", Const, 0}, - {"F_THAW_FS", Const, 0}, - {"F_TLOCK", Const, 0}, - {"F_TRANSCODEKEY", Const, 16}, - {"F_ULOCK", Const, 0}, - {"F_UNLCK", Const, 0}, - {"F_UNLCKSYS", Const, 0}, - {"F_VOLPOSMODE", Const, 0}, - {"F_WRITEBOOTSTRAP", Const, 0}, - {"F_WRLCK", Const, 0}, - {"Faccessat", Func, 0}, - {"Fallocate", Func, 0}, - {"Fbootstraptransfer_t", Type, 0}, - {"Fbootstraptransfer_t.Buffer", Field, 0}, - {"Fbootstraptransfer_t.Length", Field, 0}, - {"Fbootstraptransfer_t.Offset", Field, 0}, - {"Fchdir", Func, 0}, - {"Fchflags", Func, 0}, - {"Fchmod", Func, 0}, - {"Fchmodat", Func, 0}, - {"Fchown", Func, 0}, - {"Fchownat", Func, 0}, - {"FcntlFlock", Func, 3}, - {"FdSet", Type, 0}, - {"FdSet.Bits", Field, 0}, - {"FdSet.X__fds_bits", Field, 0}, - {"Fdatasync", Func, 0}, - {"FileNotifyInformation", Type, 0}, - {"FileNotifyInformation.Action", Field, 0}, - {"FileNotifyInformation.FileName", Field, 0}, - {"FileNotifyInformation.FileNameLength", Field, 0}, - {"FileNotifyInformation.NextEntryOffset", Field, 0}, - {"Filetime", Type, 0}, - {"Filetime.HighDateTime", Field, 0}, - {"Filetime.LowDateTime", Field, 0}, - {"FindClose", Func, 0}, - {"FindFirstFile", Func, 0}, - {"FindNextFile", Func, 0}, - {"Flock", Func, 0}, - {"Flock_t", Type, 0}, - {"Flock_t.Len", Field, 0}, - {"Flock_t.Pad_cgo_0", Field, 0}, - {"Flock_t.Pad_cgo_1", Field, 3}, - {"Flock_t.Pid", Field, 0}, - {"Flock_t.Start", Field, 0}, - {"Flock_t.Sysid", Field, 0}, - {"Flock_t.Type", Field, 0}, - {"Flock_t.Whence", Field, 0}, - {"FlushBpf", Func, 0}, - {"FlushFileBuffers", Func, 0}, - {"FlushViewOfFile", Func, 0}, - {"ForkExec", Func, 0}, - {"ForkLock", Var, 0}, - {"FormatMessage", Func, 0}, - {"Fpathconf", Func, 0}, - {"FreeAddrInfoW", Func, 1}, - {"FreeEnvironmentStrings", Func, 0}, - {"FreeLibrary", Func, 0}, - {"Fsid", Type, 0}, - {"Fsid.Val", Field, 0}, - {"Fsid.X__fsid_val", Field, 2}, - {"Fsid.X__val", Field, 0}, - {"Fstat", Func, 0}, - {"Fstatat", Func, 12}, - {"Fstatfs", Func, 0}, - {"Fstore_t", Type, 0}, - {"Fstore_t.Bytesalloc", Field, 0}, - {"Fstore_t.Flags", Field, 0}, - {"Fstore_t.Length", Field, 0}, - {"Fstore_t.Offset", Field, 0}, - {"Fstore_t.Posmode", Field, 0}, - {"Fsync", Func, 0}, - {"Ftruncate", Func, 0}, - {"FullPath", Func, 4}, - {"Futimes", Func, 0}, - {"Futimesat", Func, 0}, - {"GENERIC_ALL", Const, 0}, - {"GENERIC_EXECUTE", Const, 0}, - {"GENERIC_READ", Const, 0}, - {"GENERIC_WRITE", Const, 0}, - {"GUID", Type, 1}, - {"GUID.Data1", Field, 1}, - {"GUID.Data2", Field, 1}, - {"GUID.Data3", Field, 1}, - {"GUID.Data4", Field, 1}, - {"GetAcceptExSockaddrs", Func, 0}, - {"GetAdaptersInfo", Func, 0}, - {"GetAddrInfoW", Func, 1}, - {"GetCommandLine", Func, 0}, - {"GetComputerName", Func, 0}, - {"GetConsoleMode", Func, 1}, - {"GetCurrentDirectory", Func, 0}, - {"GetCurrentProcess", Func, 0}, - {"GetEnvironmentStrings", Func, 0}, - {"GetEnvironmentVariable", Func, 0}, - {"GetExitCodeProcess", Func, 0}, - {"GetFileAttributes", Func, 0}, - {"GetFileAttributesEx", Func, 0}, - {"GetFileExInfoStandard", Const, 0}, - {"GetFileExMaxInfoLevel", Const, 0}, - {"GetFileInformationByHandle", Func, 0}, - {"GetFileType", Func, 0}, - {"GetFullPathName", Func, 0}, - {"GetHostByName", Func, 0}, - {"GetIfEntry", Func, 0}, - {"GetLastError", Func, 0}, - {"GetLengthSid", Func, 0}, - {"GetLongPathName", Func, 0}, - {"GetProcAddress", Func, 0}, - {"GetProcessTimes", Func, 0}, - {"GetProtoByName", Func, 0}, - {"GetQueuedCompletionStatus", Func, 0}, - {"GetServByName", Func, 0}, - {"GetShortPathName", Func, 0}, - {"GetStartupInfo", Func, 0}, - {"GetStdHandle", Func, 0}, - {"GetSystemTimeAsFileTime", Func, 0}, - {"GetTempPath", Func, 0}, - {"GetTimeZoneInformation", Func, 0}, - {"GetTokenInformation", Func, 0}, - {"GetUserNameEx", Func, 0}, - {"GetUserProfileDirectory", Func, 0}, - {"GetVersion", Func, 0}, - {"Getcwd", Func, 0}, - {"Getdents", Func, 0}, - {"Getdirentries", Func, 0}, - {"Getdtablesize", Func, 0}, - {"Getegid", Func, 0}, - {"Getenv", Func, 0}, - {"Geteuid", Func, 0}, - {"Getfsstat", Func, 0}, - {"Getgid", Func, 0}, - {"Getgroups", Func, 0}, - {"Getpagesize", Func, 0}, - {"Getpeername", Func, 0}, - {"Getpgid", Func, 0}, - {"Getpgrp", Func, 0}, - {"Getpid", Func, 0}, - {"Getppid", Func, 0}, - {"Getpriority", Func, 0}, - {"Getrlimit", Func, 0}, - {"Getrusage", Func, 0}, - {"Getsid", Func, 0}, - {"Getsockname", Func, 0}, - {"Getsockopt", Func, 1}, - {"GetsockoptByte", Func, 0}, - {"GetsockoptICMPv6Filter", Func, 2}, - {"GetsockoptIPMreq", Func, 0}, - {"GetsockoptIPMreqn", Func, 0}, - {"GetsockoptIPv6MTUInfo", Func, 2}, - {"GetsockoptIPv6Mreq", Func, 0}, - {"GetsockoptInet4Addr", Func, 0}, - {"GetsockoptInt", Func, 0}, - {"GetsockoptUcred", Func, 1}, - {"Gettid", Func, 0}, - {"Gettimeofday", Func, 0}, - {"Getuid", Func, 0}, - {"Getwd", Func, 0}, - {"Getxattr", Func, 1}, - {"HANDLE_FLAG_INHERIT", Const, 0}, - {"HKEY_CLASSES_ROOT", Const, 0}, - {"HKEY_CURRENT_CONFIG", Const, 0}, - {"HKEY_CURRENT_USER", Const, 0}, - {"HKEY_DYN_DATA", Const, 0}, - {"HKEY_LOCAL_MACHINE", Const, 0}, - {"HKEY_PERFORMANCE_DATA", Const, 0}, - {"HKEY_USERS", Const, 0}, - {"HUPCL", Const, 0}, - {"Handle", Type, 0}, - {"Hostent", Type, 0}, - {"Hostent.AddrList", Field, 0}, - {"Hostent.AddrType", Field, 0}, - {"Hostent.Aliases", Field, 0}, - {"Hostent.Length", Field, 0}, - {"Hostent.Name", Field, 0}, - {"ICANON", Const, 0}, - {"ICMP6_FILTER", Const, 2}, - {"ICMPV6_FILTER", Const, 2}, - {"ICMPv6Filter", Type, 2}, - {"ICMPv6Filter.Data", Field, 2}, - {"ICMPv6Filter.Filt", Field, 2}, - {"ICRNL", Const, 0}, - {"IEXTEN", Const, 0}, - {"IFAN_ARRIVAL", Const, 1}, - {"IFAN_DEPARTURE", Const, 1}, - {"IFA_ADDRESS", Const, 0}, - {"IFA_ANYCAST", Const, 0}, - {"IFA_BROADCAST", Const, 0}, - {"IFA_CACHEINFO", Const, 0}, - {"IFA_F_DADFAILED", Const, 0}, - {"IFA_F_DEPRECATED", Const, 0}, - {"IFA_F_HOMEADDRESS", Const, 0}, - {"IFA_F_NODAD", Const, 0}, - {"IFA_F_OPTIMISTIC", Const, 0}, - {"IFA_F_PERMANENT", Const, 0}, - {"IFA_F_SECONDARY", Const, 0}, - {"IFA_F_TEMPORARY", Const, 0}, - {"IFA_F_TENTATIVE", Const, 0}, - {"IFA_LABEL", Const, 0}, - {"IFA_LOCAL", Const, 0}, - {"IFA_MAX", Const, 0}, - {"IFA_MULTICAST", Const, 0}, - {"IFA_ROUTE", Const, 1}, - {"IFA_UNSPEC", Const, 0}, - {"IFF_ALLMULTI", Const, 0}, - {"IFF_ALTPHYS", Const, 0}, - {"IFF_AUTOMEDIA", Const, 0}, - {"IFF_BROADCAST", Const, 0}, - {"IFF_CANTCHANGE", Const, 0}, - {"IFF_CANTCONFIG", Const, 1}, - {"IFF_DEBUG", Const, 0}, - {"IFF_DRV_OACTIVE", Const, 0}, - {"IFF_DRV_RUNNING", Const, 0}, - {"IFF_DYING", Const, 0}, - {"IFF_DYNAMIC", Const, 0}, - {"IFF_LINK0", Const, 0}, - {"IFF_LINK1", Const, 0}, - {"IFF_LINK2", Const, 0}, - {"IFF_LOOPBACK", Const, 0}, - {"IFF_MASTER", Const, 0}, - {"IFF_MONITOR", Const, 0}, - {"IFF_MULTICAST", Const, 0}, - {"IFF_NOARP", Const, 0}, - {"IFF_NOTRAILERS", Const, 0}, - {"IFF_NO_PI", Const, 0}, - {"IFF_OACTIVE", Const, 0}, - {"IFF_ONE_QUEUE", Const, 0}, - {"IFF_POINTOPOINT", Const, 0}, - {"IFF_POINTTOPOINT", Const, 0}, - {"IFF_PORTSEL", Const, 0}, - {"IFF_PPROMISC", Const, 0}, - {"IFF_PROMISC", Const, 0}, - {"IFF_RENAMING", Const, 0}, - {"IFF_RUNNING", Const, 0}, - {"IFF_SIMPLEX", Const, 0}, - {"IFF_SLAVE", Const, 0}, - {"IFF_SMART", Const, 0}, - {"IFF_STATICARP", Const, 0}, - {"IFF_TAP", Const, 0}, - {"IFF_TUN", Const, 0}, - {"IFF_TUN_EXCL", Const, 0}, - {"IFF_UP", Const, 0}, - {"IFF_VNET_HDR", Const, 0}, - {"IFLA_ADDRESS", Const, 0}, - {"IFLA_BROADCAST", Const, 0}, - {"IFLA_COST", Const, 0}, - {"IFLA_IFALIAS", Const, 0}, - {"IFLA_IFNAME", Const, 0}, - {"IFLA_LINK", Const, 0}, - {"IFLA_LINKINFO", Const, 0}, - {"IFLA_LINKMODE", Const, 0}, - {"IFLA_MAP", Const, 0}, - {"IFLA_MASTER", Const, 0}, - {"IFLA_MAX", Const, 0}, - {"IFLA_MTU", Const, 0}, - {"IFLA_NET_NS_PID", Const, 0}, - {"IFLA_OPERSTATE", Const, 0}, - {"IFLA_PRIORITY", Const, 0}, - {"IFLA_PROTINFO", Const, 0}, - {"IFLA_QDISC", Const, 0}, - {"IFLA_STATS", Const, 0}, - {"IFLA_TXQLEN", Const, 0}, - {"IFLA_UNSPEC", Const, 0}, - {"IFLA_WEIGHT", Const, 0}, - {"IFLA_WIRELESS", Const, 0}, - {"IFNAMSIZ", Const, 0}, - {"IFT_1822", Const, 0}, - {"IFT_A12MPPSWITCH", Const, 0}, - {"IFT_AAL2", Const, 0}, - {"IFT_AAL5", Const, 0}, - {"IFT_ADSL", Const, 0}, - {"IFT_AFLANE8023", Const, 0}, - {"IFT_AFLANE8025", Const, 0}, - {"IFT_ARAP", Const, 0}, - {"IFT_ARCNET", Const, 0}, - {"IFT_ARCNETPLUS", Const, 0}, - {"IFT_ASYNC", Const, 0}, - {"IFT_ATM", Const, 0}, - {"IFT_ATMDXI", Const, 0}, - {"IFT_ATMFUNI", Const, 0}, - {"IFT_ATMIMA", Const, 0}, - {"IFT_ATMLOGICAL", Const, 0}, - {"IFT_ATMRADIO", Const, 0}, - {"IFT_ATMSUBINTERFACE", Const, 0}, - {"IFT_ATMVCIENDPT", Const, 0}, - {"IFT_ATMVIRTUAL", Const, 0}, - {"IFT_BGPPOLICYACCOUNTING", Const, 0}, - {"IFT_BLUETOOTH", Const, 1}, - {"IFT_BRIDGE", Const, 0}, - {"IFT_BSC", Const, 0}, - {"IFT_CARP", Const, 0}, - {"IFT_CCTEMUL", Const, 0}, - {"IFT_CELLULAR", Const, 0}, - {"IFT_CEPT", Const, 0}, - {"IFT_CES", Const, 0}, - {"IFT_CHANNEL", Const, 0}, - {"IFT_CNR", Const, 0}, - {"IFT_COFFEE", Const, 0}, - {"IFT_COMPOSITELINK", Const, 0}, - {"IFT_DCN", Const, 0}, - {"IFT_DIGITALPOWERLINE", Const, 0}, - {"IFT_DIGITALWRAPPEROVERHEADCHANNEL", Const, 0}, - {"IFT_DLSW", Const, 0}, - {"IFT_DOCSCABLEDOWNSTREAM", Const, 0}, - {"IFT_DOCSCABLEMACLAYER", Const, 0}, - {"IFT_DOCSCABLEUPSTREAM", Const, 0}, - {"IFT_DOCSCABLEUPSTREAMCHANNEL", Const, 1}, - {"IFT_DS0", Const, 0}, - {"IFT_DS0BUNDLE", Const, 0}, - {"IFT_DS1FDL", Const, 0}, - {"IFT_DS3", Const, 0}, - {"IFT_DTM", Const, 0}, - {"IFT_DUMMY", Const, 1}, - {"IFT_DVBASILN", Const, 0}, - {"IFT_DVBASIOUT", Const, 0}, - {"IFT_DVBRCCDOWNSTREAM", Const, 0}, - {"IFT_DVBRCCMACLAYER", Const, 0}, - {"IFT_DVBRCCUPSTREAM", Const, 0}, - {"IFT_ECONET", Const, 1}, - {"IFT_ENC", Const, 0}, - {"IFT_EON", Const, 0}, - {"IFT_EPLRS", Const, 0}, - {"IFT_ESCON", Const, 0}, - {"IFT_ETHER", Const, 0}, - {"IFT_FAITH", Const, 0}, - {"IFT_FAST", Const, 0}, - {"IFT_FASTETHER", Const, 0}, - {"IFT_FASTETHERFX", Const, 0}, - {"IFT_FDDI", Const, 0}, - {"IFT_FIBRECHANNEL", Const, 0}, - {"IFT_FRAMERELAYINTERCONNECT", Const, 0}, - {"IFT_FRAMERELAYMPI", Const, 0}, - {"IFT_FRDLCIENDPT", Const, 0}, - {"IFT_FRELAY", Const, 0}, - {"IFT_FRELAYDCE", Const, 0}, - {"IFT_FRF16MFRBUNDLE", Const, 0}, - {"IFT_FRFORWARD", Const, 0}, - {"IFT_G703AT2MB", Const, 0}, - {"IFT_G703AT64K", Const, 0}, - {"IFT_GIF", Const, 0}, - {"IFT_GIGABITETHERNET", Const, 0}, - {"IFT_GR303IDT", Const, 0}, - {"IFT_GR303RDT", Const, 0}, - {"IFT_H323GATEKEEPER", Const, 0}, - {"IFT_H323PROXY", Const, 0}, - {"IFT_HDH1822", Const, 0}, - {"IFT_HDLC", Const, 0}, - {"IFT_HDSL2", Const, 0}, - {"IFT_HIPERLAN2", Const, 0}, - {"IFT_HIPPI", Const, 0}, - {"IFT_HIPPIINTERFACE", Const, 0}, - {"IFT_HOSTPAD", Const, 0}, - {"IFT_HSSI", Const, 0}, - {"IFT_HY", Const, 0}, - {"IFT_IBM370PARCHAN", Const, 0}, - {"IFT_IDSL", Const, 0}, - {"IFT_IEEE1394", Const, 0}, - {"IFT_IEEE80211", Const, 0}, - {"IFT_IEEE80212", Const, 0}, - {"IFT_IEEE8023ADLAG", Const, 0}, - {"IFT_IFGSN", Const, 0}, - {"IFT_IMT", Const, 0}, - {"IFT_INFINIBAND", Const, 1}, - {"IFT_INTERLEAVE", Const, 0}, - {"IFT_IP", Const, 0}, - {"IFT_IPFORWARD", Const, 0}, - {"IFT_IPOVERATM", Const, 0}, - {"IFT_IPOVERCDLC", Const, 0}, - {"IFT_IPOVERCLAW", Const, 0}, - {"IFT_IPSWITCH", Const, 0}, - {"IFT_IPXIP", Const, 0}, - {"IFT_ISDN", Const, 0}, - {"IFT_ISDNBASIC", Const, 0}, - {"IFT_ISDNPRIMARY", Const, 0}, - {"IFT_ISDNS", Const, 0}, - {"IFT_ISDNU", Const, 0}, - {"IFT_ISO88022LLC", Const, 0}, - {"IFT_ISO88023", Const, 0}, - {"IFT_ISO88024", Const, 0}, - {"IFT_ISO88025", Const, 0}, - {"IFT_ISO88025CRFPINT", Const, 0}, - {"IFT_ISO88025DTR", Const, 0}, - {"IFT_ISO88025FIBER", Const, 0}, - {"IFT_ISO88026", Const, 0}, - {"IFT_ISUP", Const, 0}, - {"IFT_L2VLAN", Const, 0}, - {"IFT_L3IPVLAN", Const, 0}, - {"IFT_L3IPXVLAN", Const, 0}, - {"IFT_LAPB", Const, 0}, - {"IFT_LAPD", Const, 0}, - {"IFT_LAPF", Const, 0}, - {"IFT_LINEGROUP", Const, 1}, - {"IFT_LOCALTALK", Const, 0}, - {"IFT_LOOP", Const, 0}, - {"IFT_MEDIAMAILOVERIP", Const, 0}, - {"IFT_MFSIGLINK", Const, 0}, - {"IFT_MIOX25", Const, 0}, - {"IFT_MODEM", Const, 0}, - {"IFT_MPC", Const, 0}, - {"IFT_MPLS", Const, 0}, - {"IFT_MPLSTUNNEL", Const, 0}, - {"IFT_MSDSL", Const, 0}, - {"IFT_MVL", Const, 0}, - {"IFT_MYRINET", Const, 0}, - {"IFT_NFAS", Const, 0}, - {"IFT_NSIP", Const, 0}, - {"IFT_OPTICALCHANNEL", Const, 0}, - {"IFT_OPTICALTRANSPORT", Const, 0}, - {"IFT_OTHER", Const, 0}, - {"IFT_P10", Const, 0}, - {"IFT_P80", Const, 0}, - {"IFT_PARA", Const, 0}, - {"IFT_PDP", Const, 0}, - {"IFT_PFLOG", Const, 0}, - {"IFT_PFLOW", Const, 1}, - {"IFT_PFSYNC", Const, 0}, - {"IFT_PLC", Const, 0}, - {"IFT_PON155", Const, 1}, - {"IFT_PON622", Const, 1}, - {"IFT_POS", Const, 0}, - {"IFT_PPP", Const, 0}, - {"IFT_PPPMULTILINKBUNDLE", Const, 0}, - {"IFT_PROPATM", Const, 1}, - {"IFT_PROPBWAP2MP", Const, 0}, - {"IFT_PROPCNLS", Const, 0}, - {"IFT_PROPDOCSWIRELESSDOWNSTREAM", Const, 0}, - {"IFT_PROPDOCSWIRELESSMACLAYER", Const, 0}, - {"IFT_PROPDOCSWIRELESSUPSTREAM", Const, 0}, - {"IFT_PROPMUX", Const, 0}, - {"IFT_PROPVIRTUAL", Const, 0}, - {"IFT_PROPWIRELESSP2P", Const, 0}, - {"IFT_PTPSERIAL", Const, 0}, - {"IFT_PVC", Const, 0}, - {"IFT_Q2931", Const, 1}, - {"IFT_QLLC", Const, 0}, - {"IFT_RADIOMAC", Const, 0}, - {"IFT_RADSL", Const, 0}, - {"IFT_REACHDSL", Const, 0}, - {"IFT_RFC1483", Const, 0}, - {"IFT_RS232", Const, 0}, - {"IFT_RSRB", Const, 0}, - {"IFT_SDLC", Const, 0}, - {"IFT_SDSL", Const, 0}, - {"IFT_SHDSL", Const, 0}, - {"IFT_SIP", Const, 0}, - {"IFT_SIPSIG", Const, 1}, - {"IFT_SIPTG", Const, 1}, - {"IFT_SLIP", Const, 0}, - {"IFT_SMDSDXI", Const, 0}, - {"IFT_SMDSICIP", Const, 0}, - {"IFT_SONET", Const, 0}, - {"IFT_SONETOVERHEADCHANNEL", Const, 0}, - {"IFT_SONETPATH", Const, 0}, - {"IFT_SONETVT", Const, 0}, - {"IFT_SRP", Const, 0}, - {"IFT_SS7SIGLINK", Const, 0}, - {"IFT_STACKTOSTACK", Const, 0}, - {"IFT_STARLAN", Const, 0}, - {"IFT_STF", Const, 0}, - {"IFT_T1", Const, 0}, - {"IFT_TDLC", Const, 0}, - {"IFT_TELINK", Const, 1}, - {"IFT_TERMPAD", Const, 0}, - {"IFT_TR008", Const, 0}, - {"IFT_TRANSPHDLC", Const, 0}, - {"IFT_TUNNEL", Const, 0}, - {"IFT_ULTRA", Const, 0}, - {"IFT_USB", Const, 0}, - {"IFT_V11", Const, 0}, - {"IFT_V35", Const, 0}, - {"IFT_V36", Const, 0}, - {"IFT_V37", Const, 0}, - {"IFT_VDSL", Const, 0}, - {"IFT_VIRTUALIPADDRESS", Const, 0}, - {"IFT_VIRTUALTG", Const, 1}, - {"IFT_VOICEDID", Const, 1}, - {"IFT_VOICEEM", Const, 0}, - {"IFT_VOICEEMFGD", Const, 1}, - {"IFT_VOICEENCAP", Const, 0}, - {"IFT_VOICEFGDEANA", Const, 1}, - {"IFT_VOICEFXO", Const, 0}, - {"IFT_VOICEFXS", Const, 0}, - {"IFT_VOICEOVERATM", Const, 0}, - {"IFT_VOICEOVERCABLE", Const, 1}, - {"IFT_VOICEOVERFRAMERELAY", Const, 0}, - {"IFT_VOICEOVERIP", Const, 0}, - {"IFT_X213", Const, 0}, - {"IFT_X25", Const, 0}, - {"IFT_X25DDN", Const, 0}, - {"IFT_X25HUNTGROUP", Const, 0}, - {"IFT_X25MLP", Const, 0}, - {"IFT_X25PLE", Const, 0}, - {"IFT_XETHER", Const, 0}, - {"IGNBRK", Const, 0}, - {"IGNCR", Const, 0}, - {"IGNORE", Const, 0}, - {"IGNPAR", Const, 0}, - {"IMAXBEL", Const, 0}, - {"INFINITE", Const, 0}, - {"INLCR", Const, 0}, - {"INPCK", Const, 0}, - {"INVALID_FILE_ATTRIBUTES", Const, 0}, - {"IN_ACCESS", Const, 0}, - {"IN_ALL_EVENTS", Const, 0}, - {"IN_ATTRIB", Const, 0}, - {"IN_CLASSA_HOST", Const, 0}, - {"IN_CLASSA_MAX", Const, 0}, - {"IN_CLASSA_NET", Const, 0}, - {"IN_CLASSA_NSHIFT", Const, 0}, - {"IN_CLASSB_HOST", Const, 0}, - {"IN_CLASSB_MAX", Const, 0}, - {"IN_CLASSB_NET", Const, 0}, - {"IN_CLASSB_NSHIFT", Const, 0}, - {"IN_CLASSC_HOST", Const, 0}, - {"IN_CLASSC_NET", Const, 0}, - {"IN_CLASSC_NSHIFT", Const, 0}, - {"IN_CLASSD_HOST", Const, 0}, - {"IN_CLASSD_NET", Const, 0}, - {"IN_CLASSD_NSHIFT", Const, 0}, - {"IN_CLOEXEC", Const, 0}, - {"IN_CLOSE", Const, 0}, - {"IN_CLOSE_NOWRITE", Const, 0}, - {"IN_CLOSE_WRITE", Const, 0}, - {"IN_CREATE", Const, 0}, - {"IN_DELETE", Const, 0}, - {"IN_DELETE_SELF", Const, 0}, - {"IN_DONT_FOLLOW", Const, 0}, - {"IN_EXCL_UNLINK", Const, 0}, - {"IN_IGNORED", Const, 0}, - {"IN_ISDIR", Const, 0}, - {"IN_LINKLOCALNETNUM", Const, 0}, - {"IN_LOOPBACKNET", Const, 0}, - {"IN_MASK_ADD", Const, 0}, - {"IN_MODIFY", Const, 0}, - {"IN_MOVE", Const, 0}, - {"IN_MOVED_FROM", Const, 0}, - {"IN_MOVED_TO", Const, 0}, - {"IN_MOVE_SELF", Const, 0}, - {"IN_NONBLOCK", Const, 0}, - {"IN_ONESHOT", Const, 0}, - {"IN_ONLYDIR", Const, 0}, - {"IN_OPEN", Const, 0}, - {"IN_Q_OVERFLOW", Const, 0}, - {"IN_RFC3021_HOST", Const, 1}, - {"IN_RFC3021_MASK", Const, 1}, - {"IN_RFC3021_NET", Const, 1}, - {"IN_RFC3021_NSHIFT", Const, 1}, - {"IN_UNMOUNT", Const, 0}, - {"IOC_IN", Const, 1}, - {"IOC_INOUT", Const, 1}, - {"IOC_OUT", Const, 1}, - {"IOC_VENDOR", Const, 3}, - {"IOC_WS2", Const, 1}, - {"IO_REPARSE_TAG_SYMLINK", Const, 4}, - {"IPMreq", Type, 0}, - {"IPMreq.Interface", Field, 0}, - {"IPMreq.Multiaddr", Field, 0}, - {"IPMreqn", Type, 0}, - {"IPMreqn.Address", Field, 0}, - {"IPMreqn.Ifindex", Field, 0}, - {"IPMreqn.Multiaddr", Field, 0}, - {"IPPROTO_3PC", Const, 0}, - {"IPPROTO_ADFS", Const, 0}, - {"IPPROTO_AH", Const, 0}, - {"IPPROTO_AHIP", Const, 0}, - {"IPPROTO_APES", Const, 0}, - {"IPPROTO_ARGUS", Const, 0}, - {"IPPROTO_AX25", Const, 0}, - {"IPPROTO_BHA", Const, 0}, - {"IPPROTO_BLT", Const, 0}, - {"IPPROTO_BRSATMON", Const, 0}, - {"IPPROTO_CARP", Const, 0}, - {"IPPROTO_CFTP", Const, 0}, - {"IPPROTO_CHAOS", Const, 0}, - {"IPPROTO_CMTP", Const, 0}, - {"IPPROTO_COMP", Const, 0}, - {"IPPROTO_CPHB", Const, 0}, - {"IPPROTO_CPNX", Const, 0}, - {"IPPROTO_DCCP", Const, 0}, - {"IPPROTO_DDP", Const, 0}, - {"IPPROTO_DGP", Const, 0}, - {"IPPROTO_DIVERT", Const, 0}, - {"IPPROTO_DIVERT_INIT", Const, 3}, - {"IPPROTO_DIVERT_RESP", Const, 3}, - {"IPPROTO_DONE", Const, 0}, - {"IPPROTO_DSTOPTS", Const, 0}, - {"IPPROTO_EGP", Const, 0}, - {"IPPROTO_EMCON", Const, 0}, - {"IPPROTO_ENCAP", Const, 0}, - {"IPPROTO_EON", Const, 0}, - {"IPPROTO_ESP", Const, 0}, - {"IPPROTO_ETHERIP", Const, 0}, - {"IPPROTO_FRAGMENT", Const, 0}, - {"IPPROTO_GGP", Const, 0}, - {"IPPROTO_GMTP", Const, 0}, - {"IPPROTO_GRE", Const, 0}, - {"IPPROTO_HELLO", Const, 0}, - {"IPPROTO_HMP", Const, 0}, - {"IPPROTO_HOPOPTS", Const, 0}, - {"IPPROTO_ICMP", Const, 0}, - {"IPPROTO_ICMPV6", Const, 0}, - {"IPPROTO_IDP", Const, 0}, - {"IPPROTO_IDPR", Const, 0}, - {"IPPROTO_IDRP", Const, 0}, - {"IPPROTO_IGMP", Const, 0}, - {"IPPROTO_IGP", Const, 0}, - {"IPPROTO_IGRP", Const, 0}, - {"IPPROTO_IL", Const, 0}, - {"IPPROTO_INLSP", Const, 0}, - {"IPPROTO_INP", Const, 0}, - {"IPPROTO_IP", Const, 0}, - {"IPPROTO_IPCOMP", Const, 0}, - {"IPPROTO_IPCV", Const, 0}, - {"IPPROTO_IPEIP", Const, 0}, - {"IPPROTO_IPIP", Const, 0}, - {"IPPROTO_IPPC", Const, 0}, - {"IPPROTO_IPV4", Const, 0}, - {"IPPROTO_IPV6", Const, 0}, - {"IPPROTO_IPV6_ICMP", Const, 1}, - {"IPPROTO_IRTP", Const, 0}, - {"IPPROTO_KRYPTOLAN", Const, 0}, - {"IPPROTO_LARP", Const, 0}, - {"IPPROTO_LEAF1", Const, 0}, - {"IPPROTO_LEAF2", Const, 0}, - {"IPPROTO_MAX", Const, 0}, - {"IPPROTO_MAXID", Const, 0}, - {"IPPROTO_MEAS", Const, 0}, - {"IPPROTO_MH", Const, 1}, - {"IPPROTO_MHRP", Const, 0}, - {"IPPROTO_MICP", Const, 0}, - {"IPPROTO_MOBILE", Const, 0}, - {"IPPROTO_MPLS", Const, 1}, - {"IPPROTO_MTP", Const, 0}, - {"IPPROTO_MUX", Const, 0}, - {"IPPROTO_ND", Const, 0}, - {"IPPROTO_NHRP", Const, 0}, - {"IPPROTO_NONE", Const, 0}, - {"IPPROTO_NSP", Const, 0}, - {"IPPROTO_NVPII", Const, 0}, - {"IPPROTO_OLD_DIVERT", Const, 0}, - {"IPPROTO_OSPFIGP", Const, 0}, - {"IPPROTO_PFSYNC", Const, 0}, - {"IPPROTO_PGM", Const, 0}, - {"IPPROTO_PIGP", Const, 0}, - {"IPPROTO_PIM", Const, 0}, - {"IPPROTO_PRM", Const, 0}, - {"IPPROTO_PUP", Const, 0}, - {"IPPROTO_PVP", Const, 0}, - {"IPPROTO_RAW", Const, 0}, - {"IPPROTO_RCCMON", Const, 0}, - {"IPPROTO_RDP", Const, 0}, - {"IPPROTO_ROUTING", Const, 0}, - {"IPPROTO_RSVP", Const, 0}, - {"IPPROTO_RVD", Const, 0}, - {"IPPROTO_SATEXPAK", Const, 0}, - {"IPPROTO_SATMON", Const, 0}, - {"IPPROTO_SCCSP", Const, 0}, - {"IPPROTO_SCTP", Const, 0}, - {"IPPROTO_SDRP", Const, 0}, - {"IPPROTO_SEND", Const, 1}, - {"IPPROTO_SEP", Const, 0}, - {"IPPROTO_SKIP", Const, 0}, - {"IPPROTO_SPACER", Const, 0}, - {"IPPROTO_SRPC", Const, 0}, - {"IPPROTO_ST", Const, 0}, - {"IPPROTO_SVMTP", Const, 0}, - {"IPPROTO_SWIPE", Const, 0}, - {"IPPROTO_TCF", Const, 0}, - {"IPPROTO_TCP", Const, 0}, - {"IPPROTO_TLSP", Const, 0}, - {"IPPROTO_TP", Const, 0}, - {"IPPROTO_TPXX", Const, 0}, - {"IPPROTO_TRUNK1", Const, 0}, - {"IPPROTO_TRUNK2", Const, 0}, - {"IPPROTO_TTP", Const, 0}, - {"IPPROTO_UDP", Const, 0}, - {"IPPROTO_UDPLITE", Const, 0}, - {"IPPROTO_VINES", Const, 0}, - {"IPPROTO_VISA", Const, 0}, - {"IPPROTO_VMTP", Const, 0}, - {"IPPROTO_VRRP", Const, 1}, - {"IPPROTO_WBEXPAK", Const, 0}, - {"IPPROTO_WBMON", Const, 0}, - {"IPPROTO_WSN", Const, 0}, - {"IPPROTO_XNET", Const, 0}, - {"IPPROTO_XTP", Const, 0}, - {"IPV6_2292DSTOPTS", Const, 0}, - {"IPV6_2292HOPLIMIT", Const, 0}, - {"IPV6_2292HOPOPTS", Const, 0}, - {"IPV6_2292NEXTHOP", Const, 0}, - {"IPV6_2292PKTINFO", Const, 0}, - {"IPV6_2292PKTOPTIONS", Const, 0}, - {"IPV6_2292RTHDR", Const, 0}, - {"IPV6_ADDRFORM", Const, 0}, - {"IPV6_ADD_MEMBERSHIP", Const, 0}, - {"IPV6_AUTHHDR", Const, 0}, - {"IPV6_AUTH_LEVEL", Const, 1}, - {"IPV6_AUTOFLOWLABEL", Const, 0}, - {"IPV6_BINDANY", Const, 0}, - {"IPV6_BINDV6ONLY", Const, 0}, - {"IPV6_BOUND_IF", Const, 0}, - {"IPV6_CHECKSUM", Const, 0}, - {"IPV6_DEFAULT_MULTICAST_HOPS", Const, 0}, - {"IPV6_DEFAULT_MULTICAST_LOOP", Const, 0}, - {"IPV6_DEFHLIM", Const, 0}, - {"IPV6_DONTFRAG", Const, 0}, - {"IPV6_DROP_MEMBERSHIP", Const, 0}, - {"IPV6_DSTOPTS", Const, 0}, - {"IPV6_ESP_NETWORK_LEVEL", Const, 1}, - {"IPV6_ESP_TRANS_LEVEL", Const, 1}, - {"IPV6_FAITH", Const, 0}, - {"IPV6_FLOWINFO_MASK", Const, 0}, - {"IPV6_FLOWLABEL_MASK", Const, 0}, - {"IPV6_FRAGTTL", Const, 0}, - {"IPV6_FW_ADD", Const, 0}, - {"IPV6_FW_DEL", Const, 0}, - {"IPV6_FW_FLUSH", Const, 0}, - {"IPV6_FW_GET", Const, 0}, - {"IPV6_FW_ZERO", Const, 0}, - {"IPV6_HLIMDEC", Const, 0}, - {"IPV6_HOPLIMIT", Const, 0}, - {"IPV6_HOPOPTS", Const, 0}, - {"IPV6_IPCOMP_LEVEL", Const, 1}, - {"IPV6_IPSEC_POLICY", Const, 0}, - {"IPV6_JOIN_ANYCAST", Const, 0}, - {"IPV6_JOIN_GROUP", Const, 0}, - {"IPV6_LEAVE_ANYCAST", Const, 0}, - {"IPV6_LEAVE_GROUP", Const, 0}, - {"IPV6_MAXHLIM", Const, 0}, - {"IPV6_MAXOPTHDR", Const, 0}, - {"IPV6_MAXPACKET", Const, 0}, - {"IPV6_MAX_GROUP_SRC_FILTER", Const, 0}, - {"IPV6_MAX_MEMBERSHIPS", Const, 0}, - {"IPV6_MAX_SOCK_SRC_FILTER", Const, 0}, - {"IPV6_MIN_MEMBERSHIPS", Const, 0}, - {"IPV6_MMTU", Const, 0}, - {"IPV6_MSFILTER", Const, 0}, - {"IPV6_MTU", Const, 0}, - {"IPV6_MTU_DISCOVER", Const, 0}, - {"IPV6_MULTICAST_HOPS", Const, 0}, - {"IPV6_MULTICAST_IF", Const, 0}, - {"IPV6_MULTICAST_LOOP", Const, 0}, - {"IPV6_NEXTHOP", Const, 0}, - {"IPV6_OPTIONS", Const, 1}, - {"IPV6_PATHMTU", Const, 0}, - {"IPV6_PIPEX", Const, 1}, - {"IPV6_PKTINFO", Const, 0}, - {"IPV6_PMTUDISC_DO", Const, 0}, - {"IPV6_PMTUDISC_DONT", Const, 0}, - {"IPV6_PMTUDISC_PROBE", Const, 0}, - {"IPV6_PMTUDISC_WANT", Const, 0}, - {"IPV6_PORTRANGE", Const, 0}, - {"IPV6_PORTRANGE_DEFAULT", Const, 0}, - {"IPV6_PORTRANGE_HIGH", Const, 0}, - {"IPV6_PORTRANGE_LOW", Const, 0}, - {"IPV6_PREFER_TEMPADDR", Const, 0}, - {"IPV6_RECVDSTOPTS", Const, 0}, - {"IPV6_RECVDSTPORT", Const, 3}, - {"IPV6_RECVERR", Const, 0}, - {"IPV6_RECVHOPLIMIT", Const, 0}, - {"IPV6_RECVHOPOPTS", Const, 0}, - {"IPV6_RECVPATHMTU", Const, 0}, - {"IPV6_RECVPKTINFO", Const, 0}, - {"IPV6_RECVRTHDR", Const, 0}, - {"IPV6_RECVTCLASS", Const, 0}, - {"IPV6_ROUTER_ALERT", Const, 0}, - {"IPV6_RTABLE", Const, 1}, - {"IPV6_RTHDR", Const, 0}, - {"IPV6_RTHDRDSTOPTS", Const, 0}, - {"IPV6_RTHDR_LOOSE", Const, 0}, - {"IPV6_RTHDR_STRICT", Const, 0}, - {"IPV6_RTHDR_TYPE_0", Const, 0}, - {"IPV6_RXDSTOPTS", Const, 0}, - {"IPV6_RXHOPOPTS", Const, 0}, - {"IPV6_SOCKOPT_RESERVED1", Const, 0}, - {"IPV6_TCLASS", Const, 0}, - {"IPV6_UNICAST_HOPS", Const, 0}, - {"IPV6_USE_MIN_MTU", Const, 0}, - {"IPV6_V6ONLY", Const, 0}, - {"IPV6_VERSION", Const, 0}, - {"IPV6_VERSION_MASK", Const, 0}, - {"IPV6_XFRM_POLICY", Const, 0}, - {"IP_ADD_MEMBERSHIP", Const, 0}, - {"IP_ADD_SOURCE_MEMBERSHIP", Const, 0}, - {"IP_AUTH_LEVEL", Const, 1}, - {"IP_BINDANY", Const, 0}, - {"IP_BLOCK_SOURCE", Const, 0}, - {"IP_BOUND_IF", Const, 0}, - {"IP_DEFAULT_MULTICAST_LOOP", Const, 0}, - {"IP_DEFAULT_MULTICAST_TTL", Const, 0}, - {"IP_DF", Const, 0}, - {"IP_DIVERTFL", Const, 3}, - {"IP_DONTFRAG", Const, 0}, - {"IP_DROP_MEMBERSHIP", Const, 0}, - {"IP_DROP_SOURCE_MEMBERSHIP", Const, 0}, - {"IP_DUMMYNET3", Const, 0}, - {"IP_DUMMYNET_CONFIGURE", Const, 0}, - {"IP_DUMMYNET_DEL", Const, 0}, - {"IP_DUMMYNET_FLUSH", Const, 0}, - {"IP_DUMMYNET_GET", Const, 0}, - {"IP_EF", Const, 1}, - {"IP_ERRORMTU", Const, 1}, - {"IP_ESP_NETWORK_LEVEL", Const, 1}, - {"IP_ESP_TRANS_LEVEL", Const, 1}, - {"IP_FAITH", Const, 0}, - {"IP_FREEBIND", Const, 0}, - {"IP_FW3", Const, 0}, - {"IP_FW_ADD", Const, 0}, - {"IP_FW_DEL", Const, 0}, - {"IP_FW_FLUSH", Const, 0}, - {"IP_FW_GET", Const, 0}, - {"IP_FW_NAT_CFG", Const, 0}, - {"IP_FW_NAT_DEL", Const, 0}, - {"IP_FW_NAT_GET_CONFIG", Const, 0}, - {"IP_FW_NAT_GET_LOG", Const, 0}, - {"IP_FW_RESETLOG", Const, 0}, - {"IP_FW_TABLE_ADD", Const, 0}, - {"IP_FW_TABLE_DEL", Const, 0}, - {"IP_FW_TABLE_FLUSH", Const, 0}, - {"IP_FW_TABLE_GETSIZE", Const, 0}, - {"IP_FW_TABLE_LIST", Const, 0}, - {"IP_FW_ZERO", Const, 0}, - {"IP_HDRINCL", Const, 0}, - {"IP_IPCOMP_LEVEL", Const, 1}, - {"IP_IPSECFLOWINFO", Const, 1}, - {"IP_IPSEC_LOCAL_AUTH", Const, 1}, - {"IP_IPSEC_LOCAL_CRED", Const, 1}, - {"IP_IPSEC_LOCAL_ID", Const, 1}, - {"IP_IPSEC_POLICY", Const, 0}, - {"IP_IPSEC_REMOTE_AUTH", Const, 1}, - {"IP_IPSEC_REMOTE_CRED", Const, 1}, - {"IP_IPSEC_REMOTE_ID", Const, 1}, - {"IP_MAXPACKET", Const, 0}, - {"IP_MAX_GROUP_SRC_FILTER", Const, 0}, - {"IP_MAX_MEMBERSHIPS", Const, 0}, - {"IP_MAX_SOCK_MUTE_FILTER", Const, 0}, - {"IP_MAX_SOCK_SRC_FILTER", Const, 0}, - {"IP_MAX_SOURCE_FILTER", Const, 0}, - {"IP_MF", Const, 0}, - {"IP_MINFRAGSIZE", Const, 1}, - {"IP_MINTTL", Const, 0}, - {"IP_MIN_MEMBERSHIPS", Const, 0}, - {"IP_MSFILTER", Const, 0}, - {"IP_MSS", Const, 0}, - {"IP_MTU", Const, 0}, - {"IP_MTU_DISCOVER", Const, 0}, - {"IP_MULTICAST_IF", Const, 0}, - {"IP_MULTICAST_IFINDEX", Const, 0}, - {"IP_MULTICAST_LOOP", Const, 0}, - {"IP_MULTICAST_TTL", Const, 0}, - {"IP_MULTICAST_VIF", Const, 0}, - {"IP_NAT__XXX", Const, 0}, - {"IP_OFFMASK", Const, 0}, - {"IP_OLD_FW_ADD", Const, 0}, - {"IP_OLD_FW_DEL", Const, 0}, - {"IP_OLD_FW_FLUSH", Const, 0}, - {"IP_OLD_FW_GET", Const, 0}, - {"IP_OLD_FW_RESETLOG", Const, 0}, - {"IP_OLD_FW_ZERO", Const, 0}, - {"IP_ONESBCAST", Const, 0}, - {"IP_OPTIONS", Const, 0}, - {"IP_ORIGDSTADDR", Const, 0}, - {"IP_PASSSEC", Const, 0}, - {"IP_PIPEX", Const, 1}, - {"IP_PKTINFO", Const, 0}, - {"IP_PKTOPTIONS", Const, 0}, - {"IP_PMTUDISC", Const, 0}, - {"IP_PMTUDISC_DO", Const, 0}, - {"IP_PMTUDISC_DONT", Const, 0}, - {"IP_PMTUDISC_PROBE", Const, 0}, - {"IP_PMTUDISC_WANT", Const, 0}, - {"IP_PORTRANGE", Const, 0}, - {"IP_PORTRANGE_DEFAULT", Const, 0}, - {"IP_PORTRANGE_HIGH", Const, 0}, - {"IP_PORTRANGE_LOW", Const, 0}, - {"IP_RECVDSTADDR", Const, 0}, - {"IP_RECVDSTPORT", Const, 1}, - {"IP_RECVERR", Const, 0}, - {"IP_RECVIF", Const, 0}, - {"IP_RECVOPTS", Const, 0}, - {"IP_RECVORIGDSTADDR", Const, 0}, - {"IP_RECVPKTINFO", Const, 0}, - {"IP_RECVRETOPTS", Const, 0}, - {"IP_RECVRTABLE", Const, 1}, - {"IP_RECVTOS", Const, 0}, - {"IP_RECVTTL", Const, 0}, - {"IP_RETOPTS", Const, 0}, - {"IP_RF", Const, 0}, - {"IP_ROUTER_ALERT", Const, 0}, - {"IP_RSVP_OFF", Const, 0}, - {"IP_RSVP_ON", Const, 0}, - {"IP_RSVP_VIF_OFF", Const, 0}, - {"IP_RSVP_VIF_ON", Const, 0}, - {"IP_RTABLE", Const, 1}, - {"IP_SENDSRCADDR", Const, 0}, - {"IP_STRIPHDR", Const, 0}, - {"IP_TOS", Const, 0}, - {"IP_TRAFFIC_MGT_BACKGROUND", Const, 0}, - {"IP_TRANSPARENT", Const, 0}, - {"IP_TTL", Const, 0}, - {"IP_UNBLOCK_SOURCE", Const, 0}, - {"IP_XFRM_POLICY", Const, 0}, - {"IPv6MTUInfo", Type, 2}, - {"IPv6MTUInfo.Addr", Field, 2}, - {"IPv6MTUInfo.Mtu", Field, 2}, - {"IPv6Mreq", Type, 0}, - {"IPv6Mreq.Interface", Field, 0}, - {"IPv6Mreq.Multiaddr", Field, 0}, - {"ISIG", Const, 0}, - {"ISTRIP", Const, 0}, - {"IUCLC", Const, 0}, - {"IUTF8", Const, 0}, - {"IXANY", Const, 0}, - {"IXOFF", Const, 0}, - {"IXON", Const, 0}, - {"IfAddrmsg", Type, 0}, - {"IfAddrmsg.Family", Field, 0}, - {"IfAddrmsg.Flags", Field, 0}, - {"IfAddrmsg.Index", Field, 0}, - {"IfAddrmsg.Prefixlen", Field, 0}, - {"IfAddrmsg.Scope", Field, 0}, - {"IfAnnounceMsghdr", Type, 1}, - {"IfAnnounceMsghdr.Hdrlen", Field, 2}, - {"IfAnnounceMsghdr.Index", Field, 1}, - {"IfAnnounceMsghdr.Msglen", Field, 1}, - {"IfAnnounceMsghdr.Name", Field, 1}, - {"IfAnnounceMsghdr.Type", Field, 1}, - {"IfAnnounceMsghdr.Version", Field, 1}, - {"IfAnnounceMsghdr.What", Field, 1}, - {"IfData", Type, 0}, - {"IfData.Addrlen", Field, 0}, - {"IfData.Baudrate", Field, 0}, - {"IfData.Capabilities", Field, 2}, - {"IfData.Collisions", Field, 0}, - {"IfData.Datalen", Field, 0}, - {"IfData.Epoch", Field, 0}, - {"IfData.Hdrlen", Field, 0}, - {"IfData.Hwassist", Field, 0}, - {"IfData.Ibytes", Field, 0}, - {"IfData.Ierrors", Field, 0}, - {"IfData.Imcasts", Field, 0}, - {"IfData.Ipackets", Field, 0}, - {"IfData.Iqdrops", Field, 0}, - {"IfData.Lastchange", Field, 0}, - {"IfData.Link_state", Field, 0}, - {"IfData.Mclpool", Field, 2}, - {"IfData.Metric", Field, 0}, - {"IfData.Mtu", Field, 0}, - {"IfData.Noproto", Field, 0}, - {"IfData.Obytes", Field, 0}, - {"IfData.Oerrors", Field, 0}, - {"IfData.Omcasts", Field, 0}, - {"IfData.Opackets", Field, 0}, - {"IfData.Pad", Field, 2}, - {"IfData.Pad_cgo_0", Field, 2}, - {"IfData.Pad_cgo_1", Field, 2}, - {"IfData.Physical", Field, 0}, - {"IfData.Recvquota", Field, 0}, - {"IfData.Recvtiming", Field, 0}, - {"IfData.Reserved1", Field, 0}, - {"IfData.Reserved2", Field, 0}, - {"IfData.Spare_char1", Field, 0}, - {"IfData.Spare_char2", Field, 0}, - {"IfData.Type", Field, 0}, - {"IfData.Typelen", Field, 0}, - {"IfData.Unused1", Field, 0}, - {"IfData.Unused2", Field, 0}, - {"IfData.Xmitquota", Field, 0}, - {"IfData.Xmittiming", Field, 0}, - {"IfInfomsg", Type, 0}, - {"IfInfomsg.Change", Field, 0}, - {"IfInfomsg.Family", Field, 0}, - {"IfInfomsg.Flags", Field, 0}, - {"IfInfomsg.Index", Field, 0}, - {"IfInfomsg.Type", Field, 0}, - {"IfInfomsg.X__ifi_pad", Field, 0}, - {"IfMsghdr", Type, 0}, - {"IfMsghdr.Addrs", Field, 0}, - {"IfMsghdr.Data", Field, 0}, - {"IfMsghdr.Flags", Field, 0}, - {"IfMsghdr.Hdrlen", Field, 2}, - {"IfMsghdr.Index", Field, 0}, - {"IfMsghdr.Msglen", Field, 0}, - {"IfMsghdr.Pad1", Field, 2}, - {"IfMsghdr.Pad2", Field, 2}, - {"IfMsghdr.Pad_cgo_0", Field, 0}, - {"IfMsghdr.Pad_cgo_1", Field, 2}, - {"IfMsghdr.Tableid", Field, 2}, - {"IfMsghdr.Type", Field, 0}, - {"IfMsghdr.Version", Field, 0}, - {"IfMsghdr.Xflags", Field, 2}, - {"IfaMsghdr", Type, 0}, - {"IfaMsghdr.Addrs", Field, 0}, - {"IfaMsghdr.Flags", Field, 0}, - {"IfaMsghdr.Hdrlen", Field, 2}, - {"IfaMsghdr.Index", Field, 0}, - {"IfaMsghdr.Metric", Field, 0}, - {"IfaMsghdr.Msglen", Field, 0}, - {"IfaMsghdr.Pad1", Field, 2}, - {"IfaMsghdr.Pad2", Field, 2}, - {"IfaMsghdr.Pad_cgo_0", Field, 0}, - {"IfaMsghdr.Tableid", Field, 2}, - {"IfaMsghdr.Type", Field, 0}, - {"IfaMsghdr.Version", Field, 0}, - {"IfmaMsghdr", Type, 0}, - {"IfmaMsghdr.Addrs", Field, 0}, - {"IfmaMsghdr.Flags", Field, 0}, - {"IfmaMsghdr.Index", Field, 0}, - {"IfmaMsghdr.Msglen", Field, 0}, - {"IfmaMsghdr.Pad_cgo_0", Field, 0}, - {"IfmaMsghdr.Type", Field, 0}, - {"IfmaMsghdr.Version", Field, 0}, - {"IfmaMsghdr2", Type, 0}, - {"IfmaMsghdr2.Addrs", Field, 0}, - {"IfmaMsghdr2.Flags", Field, 0}, - {"IfmaMsghdr2.Index", Field, 0}, - {"IfmaMsghdr2.Msglen", Field, 0}, - {"IfmaMsghdr2.Pad_cgo_0", Field, 0}, - {"IfmaMsghdr2.Refcount", Field, 0}, - {"IfmaMsghdr2.Type", Field, 0}, - {"IfmaMsghdr2.Version", Field, 0}, - {"ImplementsGetwd", Const, 0}, - {"Inet4Pktinfo", Type, 0}, - {"Inet4Pktinfo.Addr", Field, 0}, - {"Inet4Pktinfo.Ifindex", Field, 0}, - {"Inet4Pktinfo.Spec_dst", Field, 0}, - {"Inet6Pktinfo", Type, 0}, - {"Inet6Pktinfo.Addr", Field, 0}, - {"Inet6Pktinfo.Ifindex", Field, 0}, - {"InotifyAddWatch", Func, 0}, - {"InotifyEvent", Type, 0}, - {"InotifyEvent.Cookie", Field, 0}, - {"InotifyEvent.Len", Field, 0}, - {"InotifyEvent.Mask", Field, 0}, - {"InotifyEvent.Name", Field, 0}, - {"InotifyEvent.Wd", Field, 0}, - {"InotifyInit", Func, 0}, - {"InotifyInit1", Func, 0}, - {"InotifyRmWatch", Func, 0}, - {"InterfaceAddrMessage", Type, 0}, - {"InterfaceAddrMessage.Data", Field, 0}, - {"InterfaceAddrMessage.Header", Field, 0}, - {"InterfaceAnnounceMessage", Type, 1}, - {"InterfaceAnnounceMessage.Header", Field, 1}, - {"InterfaceInfo", Type, 0}, - {"InterfaceInfo.Address", Field, 0}, - {"InterfaceInfo.BroadcastAddress", Field, 0}, - {"InterfaceInfo.Flags", Field, 0}, - {"InterfaceInfo.Netmask", Field, 0}, - {"InterfaceMessage", Type, 0}, - {"InterfaceMessage.Data", Field, 0}, - {"InterfaceMessage.Header", Field, 0}, - {"InterfaceMulticastAddrMessage", Type, 0}, - {"InterfaceMulticastAddrMessage.Data", Field, 0}, - {"InterfaceMulticastAddrMessage.Header", Field, 0}, - {"InvalidHandle", Const, 0}, - {"Ioperm", Func, 0}, - {"Iopl", Func, 0}, - {"Iovec", Type, 0}, - {"Iovec.Base", Field, 0}, - {"Iovec.Len", Field, 0}, - {"IpAdapterInfo", Type, 0}, - {"IpAdapterInfo.AdapterName", Field, 0}, - {"IpAdapterInfo.Address", Field, 0}, - {"IpAdapterInfo.AddressLength", Field, 0}, - {"IpAdapterInfo.ComboIndex", Field, 0}, - {"IpAdapterInfo.CurrentIpAddress", Field, 0}, - {"IpAdapterInfo.Description", Field, 0}, - {"IpAdapterInfo.DhcpEnabled", Field, 0}, - {"IpAdapterInfo.DhcpServer", Field, 0}, - {"IpAdapterInfo.GatewayList", Field, 0}, - {"IpAdapterInfo.HaveWins", Field, 0}, - {"IpAdapterInfo.Index", Field, 0}, - {"IpAdapterInfo.IpAddressList", Field, 0}, - {"IpAdapterInfo.LeaseExpires", Field, 0}, - {"IpAdapterInfo.LeaseObtained", Field, 0}, - {"IpAdapterInfo.Next", Field, 0}, - {"IpAdapterInfo.PrimaryWinsServer", Field, 0}, - {"IpAdapterInfo.SecondaryWinsServer", Field, 0}, - {"IpAdapterInfo.Type", Field, 0}, - {"IpAddrString", Type, 0}, - {"IpAddrString.Context", Field, 0}, - {"IpAddrString.IpAddress", Field, 0}, - {"IpAddrString.IpMask", Field, 0}, - {"IpAddrString.Next", Field, 0}, - {"IpAddressString", Type, 0}, - {"IpAddressString.String", Field, 0}, - {"IpMaskString", Type, 0}, - {"IpMaskString.String", Field, 2}, - {"Issetugid", Func, 0}, - {"KEY_ALL_ACCESS", Const, 0}, - {"KEY_CREATE_LINK", Const, 0}, - {"KEY_CREATE_SUB_KEY", Const, 0}, - {"KEY_ENUMERATE_SUB_KEYS", Const, 0}, - {"KEY_EXECUTE", Const, 0}, - {"KEY_NOTIFY", Const, 0}, - {"KEY_QUERY_VALUE", Const, 0}, - {"KEY_READ", Const, 0}, - {"KEY_SET_VALUE", Const, 0}, - {"KEY_WOW64_32KEY", Const, 0}, - {"KEY_WOW64_64KEY", Const, 0}, - {"KEY_WRITE", Const, 0}, - {"Kevent", Func, 0}, - {"Kevent_t", Type, 0}, - {"Kevent_t.Data", Field, 0}, - {"Kevent_t.Fflags", Field, 0}, - {"Kevent_t.Filter", Field, 0}, - {"Kevent_t.Flags", Field, 0}, - {"Kevent_t.Ident", Field, 0}, - {"Kevent_t.Pad_cgo_0", Field, 2}, - {"Kevent_t.Udata", Field, 0}, - {"Kill", Func, 0}, - {"Klogctl", Func, 0}, - {"Kqueue", Func, 0}, - {"LANG_ENGLISH", Const, 0}, - {"LAYERED_PROTOCOL", Const, 2}, - {"LCNT_OVERLOAD_FLUSH", Const, 1}, - {"LINUX_REBOOT_CMD_CAD_OFF", Const, 0}, - {"LINUX_REBOOT_CMD_CAD_ON", Const, 0}, - {"LINUX_REBOOT_CMD_HALT", Const, 0}, - {"LINUX_REBOOT_CMD_KEXEC", Const, 0}, - {"LINUX_REBOOT_CMD_POWER_OFF", Const, 0}, - {"LINUX_REBOOT_CMD_RESTART", Const, 0}, - {"LINUX_REBOOT_CMD_RESTART2", Const, 0}, - {"LINUX_REBOOT_CMD_SW_SUSPEND", Const, 0}, - {"LINUX_REBOOT_MAGIC1", Const, 0}, - {"LINUX_REBOOT_MAGIC2", Const, 0}, - {"LOCK_EX", Const, 0}, - {"LOCK_NB", Const, 0}, - {"LOCK_SH", Const, 0}, - {"LOCK_UN", Const, 0}, - {"LazyDLL", Type, 0}, - {"LazyDLL.Name", Field, 0}, - {"LazyProc", Type, 0}, - {"LazyProc.Name", Field, 0}, - {"Lchown", Func, 0}, - {"Linger", Type, 0}, - {"Linger.Linger", Field, 0}, - {"Linger.Onoff", Field, 0}, - {"Link", Func, 0}, - {"Listen", Func, 0}, - {"Listxattr", Func, 1}, - {"LoadCancelIoEx", Func, 1}, - {"LoadConnectEx", Func, 1}, - {"LoadCreateSymbolicLink", Func, 4}, - {"LoadDLL", Func, 0}, - {"LoadGetAddrInfo", Func, 1}, - {"LoadLibrary", Func, 0}, - {"LoadSetFileCompletionNotificationModes", Func, 2}, - {"LocalFree", Func, 0}, - {"Log2phys_t", Type, 0}, - {"Log2phys_t.Contigbytes", Field, 0}, - {"Log2phys_t.Devoffset", Field, 0}, - {"Log2phys_t.Flags", Field, 0}, - {"LookupAccountName", Func, 0}, - {"LookupAccountSid", Func, 0}, - {"LookupSID", Func, 0}, - {"LsfJump", Func, 0}, - {"LsfSocket", Func, 0}, - {"LsfStmt", Func, 0}, - {"Lstat", Func, 0}, - {"MADV_AUTOSYNC", Const, 1}, - {"MADV_CAN_REUSE", Const, 0}, - {"MADV_CORE", Const, 1}, - {"MADV_DOFORK", Const, 0}, - {"MADV_DONTFORK", Const, 0}, - {"MADV_DONTNEED", Const, 0}, - {"MADV_FREE", Const, 0}, - {"MADV_FREE_REUSABLE", Const, 0}, - {"MADV_FREE_REUSE", Const, 0}, - {"MADV_HUGEPAGE", Const, 0}, - {"MADV_HWPOISON", Const, 0}, - {"MADV_MERGEABLE", Const, 0}, - {"MADV_NOCORE", Const, 1}, - {"MADV_NOHUGEPAGE", Const, 0}, - {"MADV_NORMAL", Const, 0}, - {"MADV_NOSYNC", Const, 1}, - {"MADV_PROTECT", Const, 1}, - {"MADV_RANDOM", Const, 0}, - {"MADV_REMOVE", Const, 0}, - {"MADV_SEQUENTIAL", Const, 0}, - {"MADV_SPACEAVAIL", Const, 3}, - {"MADV_UNMERGEABLE", Const, 0}, - {"MADV_WILLNEED", Const, 0}, - {"MADV_ZERO_WIRED_PAGES", Const, 0}, - {"MAP_32BIT", Const, 0}, - {"MAP_ALIGNED_SUPER", Const, 3}, - {"MAP_ALIGNMENT_16MB", Const, 3}, - {"MAP_ALIGNMENT_1TB", Const, 3}, - {"MAP_ALIGNMENT_256TB", Const, 3}, - {"MAP_ALIGNMENT_4GB", Const, 3}, - {"MAP_ALIGNMENT_64KB", Const, 3}, - {"MAP_ALIGNMENT_64PB", Const, 3}, - {"MAP_ALIGNMENT_MASK", Const, 3}, - {"MAP_ALIGNMENT_SHIFT", Const, 3}, - {"MAP_ANON", Const, 0}, - {"MAP_ANONYMOUS", Const, 0}, - {"MAP_COPY", Const, 0}, - {"MAP_DENYWRITE", Const, 0}, - {"MAP_EXECUTABLE", Const, 0}, - {"MAP_FILE", Const, 0}, - {"MAP_FIXED", Const, 0}, - {"MAP_FLAGMASK", Const, 3}, - {"MAP_GROWSDOWN", Const, 0}, - {"MAP_HASSEMAPHORE", Const, 0}, - {"MAP_HUGETLB", Const, 0}, - {"MAP_INHERIT", Const, 3}, - {"MAP_INHERIT_COPY", Const, 3}, - {"MAP_INHERIT_DEFAULT", Const, 3}, - {"MAP_INHERIT_DONATE_COPY", Const, 3}, - {"MAP_INHERIT_NONE", Const, 3}, - {"MAP_INHERIT_SHARE", Const, 3}, - {"MAP_JIT", Const, 0}, - {"MAP_LOCKED", Const, 0}, - {"MAP_NOCACHE", Const, 0}, - {"MAP_NOCORE", Const, 1}, - {"MAP_NOEXTEND", Const, 0}, - {"MAP_NONBLOCK", Const, 0}, - {"MAP_NORESERVE", Const, 0}, - {"MAP_NOSYNC", Const, 1}, - {"MAP_POPULATE", Const, 0}, - {"MAP_PREFAULT_READ", Const, 1}, - {"MAP_PRIVATE", Const, 0}, - {"MAP_RENAME", Const, 0}, - {"MAP_RESERVED0080", Const, 0}, - {"MAP_RESERVED0100", Const, 1}, - {"MAP_SHARED", Const, 0}, - {"MAP_STACK", Const, 0}, - {"MAP_TRYFIXED", Const, 3}, - {"MAP_TYPE", Const, 0}, - {"MAP_WIRED", Const, 3}, - {"MAXIMUM_REPARSE_DATA_BUFFER_SIZE", Const, 4}, - {"MAXLEN_IFDESCR", Const, 0}, - {"MAXLEN_PHYSADDR", Const, 0}, - {"MAX_ADAPTER_ADDRESS_LENGTH", Const, 0}, - {"MAX_ADAPTER_DESCRIPTION_LENGTH", Const, 0}, - {"MAX_ADAPTER_NAME_LENGTH", Const, 0}, - {"MAX_COMPUTERNAME_LENGTH", Const, 0}, - {"MAX_INTERFACE_NAME_LEN", Const, 0}, - {"MAX_LONG_PATH", Const, 0}, - {"MAX_PATH", Const, 0}, - {"MAX_PROTOCOL_CHAIN", Const, 2}, - {"MCL_CURRENT", Const, 0}, - {"MCL_FUTURE", Const, 0}, - {"MNT_DETACH", Const, 0}, - {"MNT_EXPIRE", Const, 0}, - {"MNT_FORCE", Const, 0}, - {"MSG_BCAST", Const, 1}, - {"MSG_CMSG_CLOEXEC", Const, 0}, - {"MSG_COMPAT", Const, 0}, - {"MSG_CONFIRM", Const, 0}, - {"MSG_CONTROLMBUF", Const, 1}, - {"MSG_CTRUNC", Const, 0}, - {"MSG_DONTROUTE", Const, 0}, - {"MSG_DONTWAIT", Const, 0}, - {"MSG_EOF", Const, 0}, - {"MSG_EOR", Const, 0}, - {"MSG_ERRQUEUE", Const, 0}, - {"MSG_FASTOPEN", Const, 1}, - {"MSG_FIN", Const, 0}, - {"MSG_FLUSH", Const, 0}, - {"MSG_HAVEMORE", Const, 0}, - {"MSG_HOLD", Const, 0}, - {"MSG_IOVUSRSPACE", Const, 1}, - {"MSG_LENUSRSPACE", Const, 1}, - {"MSG_MCAST", Const, 1}, - {"MSG_MORE", Const, 0}, - {"MSG_NAMEMBUF", Const, 1}, - {"MSG_NBIO", Const, 0}, - {"MSG_NEEDSA", Const, 0}, - {"MSG_NOSIGNAL", Const, 0}, - {"MSG_NOTIFICATION", Const, 0}, - {"MSG_OOB", Const, 0}, - {"MSG_PEEK", Const, 0}, - {"MSG_PROXY", Const, 0}, - {"MSG_RCVMORE", Const, 0}, - {"MSG_RST", Const, 0}, - {"MSG_SEND", Const, 0}, - {"MSG_SYN", Const, 0}, - {"MSG_TRUNC", Const, 0}, - {"MSG_TRYHARD", Const, 0}, - {"MSG_USERFLAGS", Const, 1}, - {"MSG_WAITALL", Const, 0}, - {"MSG_WAITFORONE", Const, 0}, - {"MSG_WAITSTREAM", Const, 0}, - {"MS_ACTIVE", Const, 0}, - {"MS_ASYNC", Const, 0}, - {"MS_BIND", Const, 0}, - {"MS_DEACTIVATE", Const, 0}, - {"MS_DIRSYNC", Const, 0}, - {"MS_INVALIDATE", Const, 0}, - {"MS_I_VERSION", Const, 0}, - {"MS_KERNMOUNT", Const, 0}, - {"MS_KILLPAGES", Const, 0}, - {"MS_MANDLOCK", Const, 0}, - {"MS_MGC_MSK", Const, 0}, - {"MS_MGC_VAL", Const, 0}, - {"MS_MOVE", Const, 0}, - {"MS_NOATIME", Const, 0}, - {"MS_NODEV", Const, 0}, - {"MS_NODIRATIME", Const, 0}, - {"MS_NOEXEC", Const, 0}, - {"MS_NOSUID", Const, 0}, - {"MS_NOUSER", Const, 0}, - {"MS_POSIXACL", Const, 0}, - {"MS_PRIVATE", Const, 0}, - {"MS_RDONLY", Const, 0}, - {"MS_REC", Const, 0}, - {"MS_RELATIME", Const, 0}, - {"MS_REMOUNT", Const, 0}, - {"MS_RMT_MASK", Const, 0}, - {"MS_SHARED", Const, 0}, - {"MS_SILENT", Const, 0}, - {"MS_SLAVE", Const, 0}, - {"MS_STRICTATIME", Const, 0}, - {"MS_SYNC", Const, 0}, - {"MS_SYNCHRONOUS", Const, 0}, - {"MS_UNBINDABLE", Const, 0}, - {"Madvise", Func, 0}, - {"MapViewOfFile", Func, 0}, - {"MaxTokenInfoClass", Const, 0}, - {"Mclpool", Type, 2}, - {"Mclpool.Alive", Field, 2}, - {"Mclpool.Cwm", Field, 2}, - {"Mclpool.Grown", Field, 2}, - {"Mclpool.Hwm", Field, 2}, - {"Mclpool.Lwm", Field, 2}, - {"MibIfRow", Type, 0}, - {"MibIfRow.AdminStatus", Field, 0}, - {"MibIfRow.Descr", Field, 0}, - {"MibIfRow.DescrLen", Field, 0}, - {"MibIfRow.InDiscards", Field, 0}, - {"MibIfRow.InErrors", Field, 0}, - {"MibIfRow.InNUcastPkts", Field, 0}, - {"MibIfRow.InOctets", Field, 0}, - {"MibIfRow.InUcastPkts", Field, 0}, - {"MibIfRow.InUnknownProtos", Field, 0}, - {"MibIfRow.Index", Field, 0}, - {"MibIfRow.LastChange", Field, 0}, - {"MibIfRow.Mtu", Field, 0}, - {"MibIfRow.Name", Field, 0}, - {"MibIfRow.OperStatus", Field, 0}, - {"MibIfRow.OutDiscards", Field, 0}, - {"MibIfRow.OutErrors", Field, 0}, - {"MibIfRow.OutNUcastPkts", Field, 0}, - {"MibIfRow.OutOctets", Field, 0}, - {"MibIfRow.OutQLen", Field, 0}, - {"MibIfRow.OutUcastPkts", Field, 0}, - {"MibIfRow.PhysAddr", Field, 0}, - {"MibIfRow.PhysAddrLen", Field, 0}, - {"MibIfRow.Speed", Field, 0}, - {"MibIfRow.Type", Field, 0}, - {"Mkdir", Func, 0}, - {"Mkdirat", Func, 0}, - {"Mkfifo", Func, 0}, - {"Mknod", Func, 0}, - {"Mknodat", Func, 0}, - {"Mlock", Func, 0}, - {"Mlockall", Func, 0}, - {"Mmap", Func, 0}, - {"Mount", Func, 0}, - {"MoveFile", Func, 0}, - {"Mprotect", Func, 0}, - {"Msghdr", Type, 0}, - {"Msghdr.Control", Field, 0}, - {"Msghdr.Controllen", Field, 0}, - {"Msghdr.Flags", Field, 0}, - {"Msghdr.Iov", Field, 0}, - {"Msghdr.Iovlen", Field, 0}, - {"Msghdr.Name", Field, 0}, - {"Msghdr.Namelen", Field, 0}, - {"Msghdr.Pad_cgo_0", Field, 0}, - {"Msghdr.Pad_cgo_1", Field, 0}, - {"Munlock", Func, 0}, - {"Munlockall", Func, 0}, - {"Munmap", Func, 0}, - {"MustLoadDLL", Func, 0}, - {"NAME_MAX", Const, 0}, - {"NETLINK_ADD_MEMBERSHIP", Const, 0}, - {"NETLINK_AUDIT", Const, 0}, - {"NETLINK_BROADCAST_ERROR", Const, 0}, - {"NETLINK_CONNECTOR", Const, 0}, - {"NETLINK_DNRTMSG", Const, 0}, - {"NETLINK_DROP_MEMBERSHIP", Const, 0}, - {"NETLINK_ECRYPTFS", Const, 0}, - {"NETLINK_FIB_LOOKUP", Const, 0}, - {"NETLINK_FIREWALL", Const, 0}, - {"NETLINK_GENERIC", Const, 0}, - {"NETLINK_INET_DIAG", Const, 0}, - {"NETLINK_IP6_FW", Const, 0}, - {"NETLINK_ISCSI", Const, 0}, - {"NETLINK_KOBJECT_UEVENT", Const, 0}, - {"NETLINK_NETFILTER", Const, 0}, - {"NETLINK_NFLOG", Const, 0}, - {"NETLINK_NO_ENOBUFS", Const, 0}, - {"NETLINK_PKTINFO", Const, 0}, - {"NETLINK_RDMA", Const, 0}, - {"NETLINK_ROUTE", Const, 0}, - {"NETLINK_SCSITRANSPORT", Const, 0}, - {"NETLINK_SELINUX", Const, 0}, - {"NETLINK_UNUSED", Const, 0}, - {"NETLINK_USERSOCK", Const, 0}, - {"NETLINK_XFRM", Const, 0}, - {"NET_RT_DUMP", Const, 0}, - {"NET_RT_DUMP2", Const, 0}, - {"NET_RT_FLAGS", Const, 0}, - {"NET_RT_IFLIST", Const, 0}, - {"NET_RT_IFLIST2", Const, 0}, - {"NET_RT_IFLISTL", Const, 1}, - {"NET_RT_IFMALIST", Const, 0}, - {"NET_RT_MAXID", Const, 0}, - {"NET_RT_OIFLIST", Const, 1}, - {"NET_RT_OOIFLIST", Const, 1}, - {"NET_RT_STAT", Const, 0}, - {"NET_RT_STATS", Const, 1}, - {"NET_RT_TABLE", Const, 1}, - {"NET_RT_TRASH", Const, 0}, - {"NLA_ALIGNTO", Const, 0}, - {"NLA_F_NESTED", Const, 0}, - {"NLA_F_NET_BYTEORDER", Const, 0}, - {"NLA_HDRLEN", Const, 0}, - {"NLMSG_ALIGNTO", Const, 0}, - {"NLMSG_DONE", Const, 0}, - {"NLMSG_ERROR", Const, 0}, - {"NLMSG_HDRLEN", Const, 0}, - {"NLMSG_MIN_TYPE", Const, 0}, - {"NLMSG_NOOP", Const, 0}, - {"NLMSG_OVERRUN", Const, 0}, - {"NLM_F_ACK", Const, 0}, - {"NLM_F_APPEND", Const, 0}, - {"NLM_F_ATOMIC", Const, 0}, - {"NLM_F_CREATE", Const, 0}, - {"NLM_F_DUMP", Const, 0}, - {"NLM_F_ECHO", Const, 0}, - {"NLM_F_EXCL", Const, 0}, - {"NLM_F_MATCH", Const, 0}, - {"NLM_F_MULTI", Const, 0}, - {"NLM_F_REPLACE", Const, 0}, - {"NLM_F_REQUEST", Const, 0}, - {"NLM_F_ROOT", Const, 0}, - {"NOFLSH", Const, 0}, - {"NOTE_ABSOLUTE", Const, 0}, - {"NOTE_ATTRIB", Const, 0}, - {"NOTE_BACKGROUND", Const, 16}, - {"NOTE_CHILD", Const, 0}, - {"NOTE_CRITICAL", Const, 16}, - {"NOTE_DELETE", Const, 0}, - {"NOTE_EOF", Const, 1}, - {"NOTE_EXEC", Const, 0}, - {"NOTE_EXIT", Const, 0}, - {"NOTE_EXITSTATUS", Const, 0}, - {"NOTE_EXIT_CSERROR", Const, 16}, - {"NOTE_EXIT_DECRYPTFAIL", Const, 16}, - {"NOTE_EXIT_DETAIL", Const, 16}, - {"NOTE_EXIT_DETAIL_MASK", Const, 16}, - {"NOTE_EXIT_MEMORY", Const, 16}, - {"NOTE_EXIT_REPARENTED", Const, 16}, - {"NOTE_EXTEND", Const, 0}, - {"NOTE_FFAND", Const, 0}, - {"NOTE_FFCOPY", Const, 0}, - {"NOTE_FFCTRLMASK", Const, 0}, - {"NOTE_FFLAGSMASK", Const, 0}, - {"NOTE_FFNOP", Const, 0}, - {"NOTE_FFOR", Const, 0}, - {"NOTE_FORK", Const, 0}, - {"NOTE_LEEWAY", Const, 16}, - {"NOTE_LINK", Const, 0}, - {"NOTE_LOWAT", Const, 0}, - {"NOTE_NONE", Const, 0}, - {"NOTE_NSECONDS", Const, 0}, - {"NOTE_PCTRLMASK", Const, 0}, - {"NOTE_PDATAMASK", Const, 0}, - {"NOTE_REAP", Const, 0}, - {"NOTE_RENAME", Const, 0}, - {"NOTE_RESOURCEEND", Const, 0}, - {"NOTE_REVOKE", Const, 0}, - {"NOTE_SECONDS", Const, 0}, - {"NOTE_SIGNAL", Const, 0}, - {"NOTE_TRACK", Const, 0}, - {"NOTE_TRACKERR", Const, 0}, - {"NOTE_TRIGGER", Const, 0}, - {"NOTE_TRUNCATE", Const, 1}, - {"NOTE_USECONDS", Const, 0}, - {"NOTE_VM_ERROR", Const, 0}, - {"NOTE_VM_PRESSURE", Const, 0}, - {"NOTE_VM_PRESSURE_SUDDEN_TERMINATE", Const, 0}, - {"NOTE_VM_PRESSURE_TERMINATE", Const, 0}, - {"NOTE_WRITE", Const, 0}, - {"NameCanonical", Const, 0}, - {"NameCanonicalEx", Const, 0}, - {"NameDisplay", Const, 0}, - {"NameDnsDomain", Const, 0}, - {"NameFullyQualifiedDN", Const, 0}, - {"NameSamCompatible", Const, 0}, - {"NameServicePrincipal", Const, 0}, - {"NameUniqueId", Const, 0}, - {"NameUnknown", Const, 0}, - {"NameUserPrincipal", Const, 0}, - {"Nanosleep", Func, 0}, - {"NetApiBufferFree", Func, 0}, - {"NetGetJoinInformation", Func, 2}, - {"NetSetupDomainName", Const, 2}, - {"NetSetupUnjoined", Const, 2}, - {"NetSetupUnknownStatus", Const, 2}, - {"NetSetupWorkgroupName", Const, 2}, - {"NetUserGetInfo", Func, 0}, - {"NetlinkMessage", Type, 0}, - {"NetlinkMessage.Data", Field, 0}, - {"NetlinkMessage.Header", Field, 0}, - {"NetlinkRIB", Func, 0}, - {"NetlinkRouteAttr", Type, 0}, - {"NetlinkRouteAttr.Attr", Field, 0}, - {"NetlinkRouteAttr.Value", Field, 0}, - {"NetlinkRouteRequest", Type, 0}, - {"NetlinkRouteRequest.Data", Field, 0}, - {"NetlinkRouteRequest.Header", Field, 0}, - {"NewCallback", Func, 0}, - {"NewCallbackCDecl", Func, 3}, - {"NewLazyDLL", Func, 0}, - {"NlAttr", Type, 0}, - {"NlAttr.Len", Field, 0}, - {"NlAttr.Type", Field, 0}, - {"NlMsgerr", Type, 0}, - {"NlMsgerr.Error", Field, 0}, - {"NlMsgerr.Msg", Field, 0}, - {"NlMsghdr", Type, 0}, - {"NlMsghdr.Flags", Field, 0}, - {"NlMsghdr.Len", Field, 0}, - {"NlMsghdr.Pid", Field, 0}, - {"NlMsghdr.Seq", Field, 0}, - {"NlMsghdr.Type", Field, 0}, - {"NsecToFiletime", Func, 0}, - {"NsecToTimespec", Func, 0}, - {"NsecToTimeval", Func, 0}, - {"Ntohs", Func, 0}, - {"OCRNL", Const, 0}, - {"OFDEL", Const, 0}, - {"OFILL", Const, 0}, - {"OFIOGETBMAP", Const, 1}, - {"OID_PKIX_KP_SERVER_AUTH", Var, 0}, - {"OID_SERVER_GATED_CRYPTO", Var, 0}, - {"OID_SGC_NETSCAPE", Var, 0}, - {"OLCUC", Const, 0}, - {"ONLCR", Const, 0}, - {"ONLRET", Const, 0}, - {"ONOCR", Const, 0}, - {"ONOEOT", Const, 1}, - {"OPEN_ALWAYS", Const, 0}, - {"OPEN_EXISTING", Const, 0}, - {"OPOST", Const, 0}, - {"O_ACCMODE", Const, 0}, - {"O_ALERT", Const, 0}, - {"O_ALT_IO", Const, 1}, - {"O_APPEND", Const, 0}, - {"O_ASYNC", Const, 0}, - {"O_CLOEXEC", Const, 0}, - {"O_CREAT", Const, 0}, - {"O_DIRECT", Const, 0}, - {"O_DIRECTORY", Const, 0}, - {"O_DP_GETRAWENCRYPTED", Const, 16}, - {"O_DSYNC", Const, 0}, - {"O_EVTONLY", Const, 0}, - {"O_EXCL", Const, 0}, - {"O_EXEC", Const, 0}, - {"O_EXLOCK", Const, 0}, - {"O_FSYNC", Const, 0}, - {"O_LARGEFILE", Const, 0}, - {"O_NDELAY", Const, 0}, - {"O_NOATIME", Const, 0}, - {"O_NOCTTY", Const, 0}, - {"O_NOFOLLOW", Const, 0}, - {"O_NONBLOCK", Const, 0}, - {"O_NOSIGPIPE", Const, 1}, - {"O_POPUP", Const, 0}, - {"O_RDONLY", Const, 0}, - {"O_RDWR", Const, 0}, - {"O_RSYNC", Const, 0}, - {"O_SHLOCK", Const, 0}, - {"O_SYMLINK", Const, 0}, - {"O_SYNC", Const, 0}, - {"O_TRUNC", Const, 0}, - {"O_TTY_INIT", Const, 0}, - {"O_WRONLY", Const, 0}, - {"Open", Func, 0}, - {"OpenCurrentProcessToken", Func, 0}, - {"OpenProcess", Func, 0}, - {"OpenProcessToken", Func, 0}, - {"Openat", Func, 0}, - {"Overlapped", Type, 0}, - {"Overlapped.HEvent", Field, 0}, - {"Overlapped.Internal", Field, 0}, - {"Overlapped.InternalHigh", Field, 0}, - {"Overlapped.Offset", Field, 0}, - {"Overlapped.OffsetHigh", Field, 0}, - {"PACKET_ADD_MEMBERSHIP", Const, 0}, - {"PACKET_BROADCAST", Const, 0}, - {"PACKET_DROP_MEMBERSHIP", Const, 0}, - {"PACKET_FASTROUTE", Const, 0}, - {"PACKET_HOST", Const, 0}, - {"PACKET_LOOPBACK", Const, 0}, - {"PACKET_MR_ALLMULTI", Const, 0}, - {"PACKET_MR_MULTICAST", Const, 0}, - {"PACKET_MR_PROMISC", Const, 0}, - {"PACKET_MULTICAST", Const, 0}, - {"PACKET_OTHERHOST", Const, 0}, - {"PACKET_OUTGOING", Const, 0}, - {"PACKET_RECV_OUTPUT", Const, 0}, - {"PACKET_RX_RING", Const, 0}, - {"PACKET_STATISTICS", Const, 0}, - {"PAGE_EXECUTE_READ", Const, 0}, - {"PAGE_EXECUTE_READWRITE", Const, 0}, - {"PAGE_EXECUTE_WRITECOPY", Const, 0}, - {"PAGE_READONLY", Const, 0}, - {"PAGE_READWRITE", Const, 0}, - {"PAGE_WRITECOPY", Const, 0}, - {"PARENB", Const, 0}, - {"PARMRK", Const, 0}, - {"PARODD", Const, 0}, - {"PENDIN", Const, 0}, - {"PFL_HIDDEN", Const, 2}, - {"PFL_MATCHES_PROTOCOL_ZERO", Const, 2}, - {"PFL_MULTIPLE_PROTO_ENTRIES", Const, 2}, - {"PFL_NETWORKDIRECT_PROVIDER", Const, 2}, - {"PFL_RECOMMENDED_PROTO_ENTRY", Const, 2}, - {"PF_FLUSH", Const, 1}, - {"PKCS_7_ASN_ENCODING", Const, 0}, - {"PMC5_PIPELINE_FLUSH", Const, 1}, - {"PRIO_PGRP", Const, 2}, - {"PRIO_PROCESS", Const, 2}, - {"PRIO_USER", Const, 2}, - {"PRI_IOFLUSH", Const, 1}, - {"PROCESS_QUERY_INFORMATION", Const, 0}, - {"PROCESS_TERMINATE", Const, 2}, - {"PROT_EXEC", Const, 0}, - {"PROT_GROWSDOWN", Const, 0}, - {"PROT_GROWSUP", Const, 0}, - {"PROT_NONE", Const, 0}, - {"PROT_READ", Const, 0}, - {"PROT_WRITE", Const, 0}, - {"PROV_DH_SCHANNEL", Const, 0}, - {"PROV_DSS", Const, 0}, - {"PROV_DSS_DH", Const, 0}, - {"PROV_EC_ECDSA_FULL", Const, 0}, - {"PROV_EC_ECDSA_SIG", Const, 0}, - {"PROV_EC_ECNRA_FULL", Const, 0}, - {"PROV_EC_ECNRA_SIG", Const, 0}, - {"PROV_FORTEZZA", Const, 0}, - {"PROV_INTEL_SEC", Const, 0}, - {"PROV_MS_EXCHANGE", Const, 0}, - {"PROV_REPLACE_OWF", Const, 0}, - {"PROV_RNG", Const, 0}, - {"PROV_RSA_AES", Const, 0}, - {"PROV_RSA_FULL", Const, 0}, - {"PROV_RSA_SCHANNEL", Const, 0}, - {"PROV_RSA_SIG", Const, 0}, - {"PROV_SPYRUS_LYNKS", Const, 0}, - {"PROV_SSL", Const, 0}, - {"PR_CAPBSET_DROP", Const, 0}, - {"PR_CAPBSET_READ", Const, 0}, - {"PR_CLEAR_SECCOMP_FILTER", Const, 0}, - {"PR_ENDIAN_BIG", Const, 0}, - {"PR_ENDIAN_LITTLE", Const, 0}, - {"PR_ENDIAN_PPC_LITTLE", Const, 0}, - {"PR_FPEMU_NOPRINT", Const, 0}, - {"PR_FPEMU_SIGFPE", Const, 0}, - {"PR_FP_EXC_ASYNC", Const, 0}, - {"PR_FP_EXC_DISABLED", Const, 0}, - {"PR_FP_EXC_DIV", Const, 0}, - {"PR_FP_EXC_INV", Const, 0}, - {"PR_FP_EXC_NONRECOV", Const, 0}, - {"PR_FP_EXC_OVF", Const, 0}, - {"PR_FP_EXC_PRECISE", Const, 0}, - {"PR_FP_EXC_RES", Const, 0}, - {"PR_FP_EXC_SW_ENABLE", Const, 0}, - {"PR_FP_EXC_UND", Const, 0}, - {"PR_GET_DUMPABLE", Const, 0}, - {"PR_GET_ENDIAN", Const, 0}, - {"PR_GET_FPEMU", Const, 0}, - {"PR_GET_FPEXC", Const, 0}, - {"PR_GET_KEEPCAPS", Const, 0}, - {"PR_GET_NAME", Const, 0}, - {"PR_GET_PDEATHSIG", Const, 0}, - {"PR_GET_SECCOMP", Const, 0}, - {"PR_GET_SECCOMP_FILTER", Const, 0}, - {"PR_GET_SECUREBITS", Const, 0}, - {"PR_GET_TIMERSLACK", Const, 0}, - {"PR_GET_TIMING", Const, 0}, - {"PR_GET_TSC", Const, 0}, - {"PR_GET_UNALIGN", Const, 0}, - {"PR_MCE_KILL", Const, 0}, - {"PR_MCE_KILL_CLEAR", Const, 0}, - {"PR_MCE_KILL_DEFAULT", Const, 0}, - {"PR_MCE_KILL_EARLY", Const, 0}, - {"PR_MCE_KILL_GET", Const, 0}, - {"PR_MCE_KILL_LATE", Const, 0}, - {"PR_MCE_KILL_SET", Const, 0}, - {"PR_SECCOMP_FILTER_EVENT", Const, 0}, - {"PR_SECCOMP_FILTER_SYSCALL", Const, 0}, - {"PR_SET_DUMPABLE", Const, 0}, - {"PR_SET_ENDIAN", Const, 0}, - {"PR_SET_FPEMU", Const, 0}, - {"PR_SET_FPEXC", Const, 0}, - {"PR_SET_KEEPCAPS", Const, 0}, - {"PR_SET_NAME", Const, 0}, - {"PR_SET_PDEATHSIG", Const, 0}, - {"PR_SET_PTRACER", Const, 0}, - {"PR_SET_SECCOMP", Const, 0}, - {"PR_SET_SECCOMP_FILTER", Const, 0}, - {"PR_SET_SECUREBITS", Const, 0}, - {"PR_SET_TIMERSLACK", Const, 0}, - {"PR_SET_TIMING", Const, 0}, - {"PR_SET_TSC", Const, 0}, - {"PR_SET_UNALIGN", Const, 0}, - {"PR_TASK_PERF_EVENTS_DISABLE", Const, 0}, - {"PR_TASK_PERF_EVENTS_ENABLE", Const, 0}, - {"PR_TIMING_STATISTICAL", Const, 0}, - {"PR_TIMING_TIMESTAMP", Const, 0}, - {"PR_TSC_ENABLE", Const, 0}, - {"PR_TSC_SIGSEGV", Const, 0}, - {"PR_UNALIGN_NOPRINT", Const, 0}, - {"PR_UNALIGN_SIGBUS", Const, 0}, - {"PTRACE_ARCH_PRCTL", Const, 0}, - {"PTRACE_ATTACH", Const, 0}, - {"PTRACE_CONT", Const, 0}, - {"PTRACE_DETACH", Const, 0}, - {"PTRACE_EVENT_CLONE", Const, 0}, - {"PTRACE_EVENT_EXEC", Const, 0}, - {"PTRACE_EVENT_EXIT", Const, 0}, - {"PTRACE_EVENT_FORK", Const, 0}, - {"PTRACE_EVENT_VFORK", Const, 0}, - {"PTRACE_EVENT_VFORK_DONE", Const, 0}, - {"PTRACE_GETCRUNCHREGS", Const, 0}, - {"PTRACE_GETEVENTMSG", Const, 0}, - {"PTRACE_GETFPREGS", Const, 0}, - {"PTRACE_GETFPXREGS", Const, 0}, - {"PTRACE_GETHBPREGS", Const, 0}, - {"PTRACE_GETREGS", Const, 0}, - {"PTRACE_GETREGSET", Const, 0}, - {"PTRACE_GETSIGINFO", Const, 0}, - {"PTRACE_GETVFPREGS", Const, 0}, - {"PTRACE_GETWMMXREGS", Const, 0}, - {"PTRACE_GET_THREAD_AREA", Const, 0}, - {"PTRACE_KILL", Const, 0}, - {"PTRACE_OLDSETOPTIONS", Const, 0}, - {"PTRACE_O_MASK", Const, 0}, - {"PTRACE_O_TRACECLONE", Const, 0}, - {"PTRACE_O_TRACEEXEC", Const, 0}, - {"PTRACE_O_TRACEEXIT", Const, 0}, - {"PTRACE_O_TRACEFORK", Const, 0}, - {"PTRACE_O_TRACESYSGOOD", Const, 0}, - {"PTRACE_O_TRACEVFORK", Const, 0}, - {"PTRACE_O_TRACEVFORKDONE", Const, 0}, - {"PTRACE_PEEKDATA", Const, 0}, - {"PTRACE_PEEKTEXT", Const, 0}, - {"PTRACE_PEEKUSR", Const, 0}, - {"PTRACE_POKEDATA", Const, 0}, - {"PTRACE_POKETEXT", Const, 0}, - {"PTRACE_POKEUSR", Const, 0}, - {"PTRACE_SETCRUNCHREGS", Const, 0}, - {"PTRACE_SETFPREGS", Const, 0}, - {"PTRACE_SETFPXREGS", Const, 0}, - {"PTRACE_SETHBPREGS", Const, 0}, - {"PTRACE_SETOPTIONS", Const, 0}, - {"PTRACE_SETREGS", Const, 0}, - {"PTRACE_SETREGSET", Const, 0}, - {"PTRACE_SETSIGINFO", Const, 0}, - {"PTRACE_SETVFPREGS", Const, 0}, - {"PTRACE_SETWMMXREGS", Const, 0}, - {"PTRACE_SET_SYSCALL", Const, 0}, - {"PTRACE_SET_THREAD_AREA", Const, 0}, - {"PTRACE_SINGLEBLOCK", Const, 0}, - {"PTRACE_SINGLESTEP", Const, 0}, - {"PTRACE_SYSCALL", Const, 0}, - {"PTRACE_SYSEMU", Const, 0}, - {"PTRACE_SYSEMU_SINGLESTEP", Const, 0}, - {"PTRACE_TRACEME", Const, 0}, - {"PT_ATTACH", Const, 0}, - {"PT_ATTACHEXC", Const, 0}, - {"PT_CONTINUE", Const, 0}, - {"PT_DATA_ADDR", Const, 0}, - {"PT_DENY_ATTACH", Const, 0}, - {"PT_DETACH", Const, 0}, - {"PT_FIRSTMACH", Const, 0}, - {"PT_FORCEQUOTA", Const, 0}, - {"PT_KILL", Const, 0}, - {"PT_MASK", Const, 1}, - {"PT_READ_D", Const, 0}, - {"PT_READ_I", Const, 0}, - {"PT_READ_U", Const, 0}, - {"PT_SIGEXC", Const, 0}, - {"PT_STEP", Const, 0}, - {"PT_TEXT_ADDR", Const, 0}, - {"PT_TEXT_END_ADDR", Const, 0}, - {"PT_THUPDATE", Const, 0}, - {"PT_TRACE_ME", Const, 0}, - {"PT_WRITE_D", Const, 0}, - {"PT_WRITE_I", Const, 0}, - {"PT_WRITE_U", Const, 0}, - {"ParseDirent", Func, 0}, - {"ParseNetlinkMessage", Func, 0}, - {"ParseNetlinkRouteAttr", Func, 0}, - {"ParseRoutingMessage", Func, 0}, - {"ParseRoutingSockaddr", Func, 0}, - {"ParseSocketControlMessage", Func, 0}, - {"ParseUnixCredentials", Func, 0}, - {"ParseUnixRights", Func, 0}, - {"PathMax", Const, 0}, - {"Pathconf", Func, 0}, - {"Pause", Func, 0}, - {"Pipe", Func, 0}, - {"Pipe2", Func, 1}, - {"PivotRoot", Func, 0}, - {"Pointer", Type, 11}, - {"PostQueuedCompletionStatus", Func, 0}, - {"Pread", Func, 0}, - {"Proc", Type, 0}, - {"Proc.Dll", Field, 0}, - {"Proc.Name", Field, 0}, - {"ProcAttr", Type, 0}, - {"ProcAttr.Dir", Field, 0}, - {"ProcAttr.Env", Field, 0}, - {"ProcAttr.Files", Field, 0}, - {"ProcAttr.Sys", Field, 0}, - {"Process32First", Func, 4}, - {"Process32Next", Func, 4}, - {"ProcessEntry32", Type, 4}, - {"ProcessEntry32.DefaultHeapID", Field, 4}, - {"ProcessEntry32.ExeFile", Field, 4}, - {"ProcessEntry32.Flags", Field, 4}, - {"ProcessEntry32.ModuleID", Field, 4}, - {"ProcessEntry32.ParentProcessID", Field, 4}, - {"ProcessEntry32.PriClassBase", Field, 4}, - {"ProcessEntry32.ProcessID", Field, 4}, - {"ProcessEntry32.Size", Field, 4}, - {"ProcessEntry32.Threads", Field, 4}, - {"ProcessEntry32.Usage", Field, 4}, - {"ProcessInformation", Type, 0}, - {"ProcessInformation.Process", Field, 0}, - {"ProcessInformation.ProcessId", Field, 0}, - {"ProcessInformation.Thread", Field, 0}, - {"ProcessInformation.ThreadId", Field, 0}, - {"Protoent", Type, 0}, - {"Protoent.Aliases", Field, 0}, - {"Protoent.Name", Field, 0}, - {"Protoent.Proto", Field, 0}, - {"PtraceAttach", Func, 0}, - {"PtraceCont", Func, 0}, - {"PtraceDetach", Func, 0}, - {"PtraceGetEventMsg", Func, 0}, - {"PtraceGetRegs", Func, 0}, - {"PtracePeekData", Func, 0}, - {"PtracePeekText", Func, 0}, - {"PtracePokeData", Func, 0}, - {"PtracePokeText", Func, 0}, - {"PtraceRegs", Type, 0}, - {"PtraceRegs.Cs", Field, 0}, - {"PtraceRegs.Ds", Field, 0}, - {"PtraceRegs.Eax", Field, 0}, - {"PtraceRegs.Ebp", Field, 0}, - {"PtraceRegs.Ebx", Field, 0}, - {"PtraceRegs.Ecx", Field, 0}, - {"PtraceRegs.Edi", Field, 0}, - {"PtraceRegs.Edx", Field, 0}, - {"PtraceRegs.Eflags", Field, 0}, - {"PtraceRegs.Eip", Field, 0}, - {"PtraceRegs.Es", Field, 0}, - {"PtraceRegs.Esi", Field, 0}, - {"PtraceRegs.Esp", Field, 0}, - {"PtraceRegs.Fs", Field, 0}, - {"PtraceRegs.Fs_base", Field, 0}, - {"PtraceRegs.Gs", Field, 0}, - {"PtraceRegs.Gs_base", Field, 0}, - {"PtraceRegs.Orig_eax", Field, 0}, - {"PtraceRegs.Orig_rax", Field, 0}, - {"PtraceRegs.R10", Field, 0}, - {"PtraceRegs.R11", Field, 0}, - {"PtraceRegs.R12", Field, 0}, - {"PtraceRegs.R13", Field, 0}, - {"PtraceRegs.R14", Field, 0}, - {"PtraceRegs.R15", Field, 0}, - {"PtraceRegs.R8", Field, 0}, - {"PtraceRegs.R9", Field, 0}, - {"PtraceRegs.Rax", Field, 0}, - {"PtraceRegs.Rbp", Field, 0}, - {"PtraceRegs.Rbx", Field, 0}, - {"PtraceRegs.Rcx", Field, 0}, - {"PtraceRegs.Rdi", Field, 0}, - {"PtraceRegs.Rdx", Field, 0}, - {"PtraceRegs.Rip", Field, 0}, - {"PtraceRegs.Rsi", Field, 0}, - {"PtraceRegs.Rsp", Field, 0}, - {"PtraceRegs.Ss", Field, 0}, - {"PtraceRegs.Uregs", Field, 0}, - {"PtraceRegs.Xcs", Field, 0}, - {"PtraceRegs.Xds", Field, 0}, - {"PtraceRegs.Xes", Field, 0}, - {"PtraceRegs.Xfs", Field, 0}, - {"PtraceRegs.Xgs", Field, 0}, - {"PtraceRegs.Xss", Field, 0}, - {"PtraceSetOptions", Func, 0}, - {"PtraceSetRegs", Func, 0}, - {"PtraceSingleStep", Func, 0}, - {"PtraceSyscall", Func, 1}, - {"Pwrite", Func, 0}, - {"REG_BINARY", Const, 0}, - {"REG_DWORD", Const, 0}, - {"REG_DWORD_BIG_ENDIAN", Const, 0}, - {"REG_DWORD_LITTLE_ENDIAN", Const, 0}, - {"REG_EXPAND_SZ", Const, 0}, - {"REG_FULL_RESOURCE_DESCRIPTOR", Const, 0}, - {"REG_LINK", Const, 0}, - {"REG_MULTI_SZ", Const, 0}, - {"REG_NONE", Const, 0}, - {"REG_QWORD", Const, 0}, - {"REG_QWORD_LITTLE_ENDIAN", Const, 0}, - {"REG_RESOURCE_LIST", Const, 0}, - {"REG_RESOURCE_REQUIREMENTS_LIST", Const, 0}, - {"REG_SZ", Const, 0}, - {"RLIMIT_AS", Const, 0}, - {"RLIMIT_CORE", Const, 0}, - {"RLIMIT_CPU", Const, 0}, - {"RLIMIT_CPU_USAGE_MONITOR", Const, 16}, - {"RLIMIT_DATA", Const, 0}, - {"RLIMIT_FSIZE", Const, 0}, - {"RLIMIT_NOFILE", Const, 0}, - {"RLIMIT_STACK", Const, 0}, - {"RLIM_INFINITY", Const, 0}, - {"RTAX_ADVMSS", Const, 0}, - {"RTAX_AUTHOR", Const, 0}, - {"RTAX_BRD", Const, 0}, - {"RTAX_CWND", Const, 0}, - {"RTAX_DST", Const, 0}, - {"RTAX_FEATURES", Const, 0}, - {"RTAX_FEATURE_ALLFRAG", Const, 0}, - {"RTAX_FEATURE_ECN", Const, 0}, - {"RTAX_FEATURE_SACK", Const, 0}, - {"RTAX_FEATURE_TIMESTAMP", Const, 0}, - {"RTAX_GATEWAY", Const, 0}, - {"RTAX_GENMASK", Const, 0}, - {"RTAX_HOPLIMIT", Const, 0}, - {"RTAX_IFA", Const, 0}, - {"RTAX_IFP", Const, 0}, - {"RTAX_INITCWND", Const, 0}, - {"RTAX_INITRWND", Const, 0}, - {"RTAX_LABEL", Const, 1}, - {"RTAX_LOCK", Const, 0}, - {"RTAX_MAX", Const, 0}, - {"RTAX_MTU", Const, 0}, - {"RTAX_NETMASK", Const, 0}, - {"RTAX_REORDERING", Const, 0}, - {"RTAX_RTO_MIN", Const, 0}, - {"RTAX_RTT", Const, 0}, - {"RTAX_RTTVAR", Const, 0}, - {"RTAX_SRC", Const, 1}, - {"RTAX_SRCMASK", Const, 1}, - {"RTAX_SSTHRESH", Const, 0}, - {"RTAX_TAG", Const, 1}, - {"RTAX_UNSPEC", Const, 0}, - {"RTAX_WINDOW", Const, 0}, - {"RTA_ALIGNTO", Const, 0}, - {"RTA_AUTHOR", Const, 0}, - {"RTA_BRD", Const, 0}, - {"RTA_CACHEINFO", Const, 0}, - {"RTA_DST", Const, 0}, - {"RTA_FLOW", Const, 0}, - {"RTA_GATEWAY", Const, 0}, - {"RTA_GENMASK", Const, 0}, - {"RTA_IFA", Const, 0}, - {"RTA_IFP", Const, 0}, - {"RTA_IIF", Const, 0}, - {"RTA_LABEL", Const, 1}, - {"RTA_MAX", Const, 0}, - {"RTA_METRICS", Const, 0}, - {"RTA_MULTIPATH", Const, 0}, - {"RTA_NETMASK", Const, 0}, - {"RTA_OIF", Const, 0}, - {"RTA_PREFSRC", Const, 0}, - {"RTA_PRIORITY", Const, 0}, - {"RTA_SRC", Const, 0}, - {"RTA_SRCMASK", Const, 1}, - {"RTA_TABLE", Const, 0}, - {"RTA_TAG", Const, 1}, - {"RTA_UNSPEC", Const, 0}, - {"RTCF_DIRECTSRC", Const, 0}, - {"RTCF_DOREDIRECT", Const, 0}, - {"RTCF_LOG", Const, 0}, - {"RTCF_MASQ", Const, 0}, - {"RTCF_NAT", Const, 0}, - {"RTCF_VALVE", Const, 0}, - {"RTF_ADDRCLASSMASK", Const, 0}, - {"RTF_ADDRCONF", Const, 0}, - {"RTF_ALLONLINK", Const, 0}, - {"RTF_ANNOUNCE", Const, 1}, - {"RTF_BLACKHOLE", Const, 0}, - {"RTF_BROADCAST", Const, 0}, - {"RTF_CACHE", Const, 0}, - {"RTF_CLONED", Const, 1}, - {"RTF_CLONING", Const, 0}, - {"RTF_CONDEMNED", Const, 0}, - {"RTF_DEFAULT", Const, 0}, - {"RTF_DELCLONE", Const, 0}, - {"RTF_DONE", Const, 0}, - {"RTF_DYNAMIC", Const, 0}, - {"RTF_FLOW", Const, 0}, - {"RTF_FMASK", Const, 0}, - {"RTF_GATEWAY", Const, 0}, - {"RTF_GWFLAG_COMPAT", Const, 3}, - {"RTF_HOST", Const, 0}, - {"RTF_IFREF", Const, 0}, - {"RTF_IFSCOPE", Const, 0}, - {"RTF_INTERFACE", Const, 0}, - {"RTF_IRTT", Const, 0}, - {"RTF_LINKRT", Const, 0}, - {"RTF_LLDATA", Const, 0}, - {"RTF_LLINFO", Const, 0}, - {"RTF_LOCAL", Const, 0}, - {"RTF_MASK", Const, 1}, - {"RTF_MODIFIED", Const, 0}, - {"RTF_MPATH", Const, 1}, - {"RTF_MPLS", Const, 1}, - {"RTF_MSS", Const, 0}, - {"RTF_MTU", Const, 0}, - {"RTF_MULTICAST", Const, 0}, - {"RTF_NAT", Const, 0}, - {"RTF_NOFORWARD", Const, 0}, - {"RTF_NONEXTHOP", Const, 0}, - {"RTF_NOPMTUDISC", Const, 0}, - {"RTF_PERMANENT_ARP", Const, 1}, - {"RTF_PINNED", Const, 0}, - {"RTF_POLICY", Const, 0}, - {"RTF_PRCLONING", Const, 0}, - {"RTF_PROTO1", Const, 0}, - {"RTF_PROTO2", Const, 0}, - {"RTF_PROTO3", Const, 0}, - {"RTF_PROXY", Const, 16}, - {"RTF_REINSTATE", Const, 0}, - {"RTF_REJECT", Const, 0}, - {"RTF_RNH_LOCKED", Const, 0}, - {"RTF_ROUTER", Const, 16}, - {"RTF_SOURCE", Const, 1}, - {"RTF_SRC", Const, 1}, - {"RTF_STATIC", Const, 0}, - {"RTF_STICKY", Const, 0}, - {"RTF_THROW", Const, 0}, - {"RTF_TUNNEL", Const, 1}, - {"RTF_UP", Const, 0}, - {"RTF_USETRAILERS", Const, 1}, - {"RTF_WASCLONED", Const, 0}, - {"RTF_WINDOW", Const, 0}, - {"RTF_XRESOLVE", Const, 0}, - {"RTM_ADD", Const, 0}, - {"RTM_BASE", Const, 0}, - {"RTM_CHANGE", Const, 0}, - {"RTM_CHGADDR", Const, 1}, - {"RTM_DELACTION", Const, 0}, - {"RTM_DELADDR", Const, 0}, - {"RTM_DELADDRLABEL", Const, 0}, - {"RTM_DELETE", Const, 0}, - {"RTM_DELLINK", Const, 0}, - {"RTM_DELMADDR", Const, 0}, - {"RTM_DELNEIGH", Const, 0}, - {"RTM_DELQDISC", Const, 0}, - {"RTM_DELROUTE", Const, 0}, - {"RTM_DELRULE", Const, 0}, - {"RTM_DELTCLASS", Const, 0}, - {"RTM_DELTFILTER", Const, 0}, - {"RTM_DESYNC", Const, 1}, - {"RTM_F_CLONED", Const, 0}, - {"RTM_F_EQUALIZE", Const, 0}, - {"RTM_F_NOTIFY", Const, 0}, - {"RTM_F_PREFIX", Const, 0}, - {"RTM_GET", Const, 0}, - {"RTM_GET2", Const, 0}, - {"RTM_GETACTION", Const, 0}, - {"RTM_GETADDR", Const, 0}, - {"RTM_GETADDRLABEL", Const, 0}, - {"RTM_GETANYCAST", Const, 0}, - {"RTM_GETDCB", Const, 0}, - {"RTM_GETLINK", Const, 0}, - {"RTM_GETMULTICAST", Const, 0}, - {"RTM_GETNEIGH", Const, 0}, - {"RTM_GETNEIGHTBL", Const, 0}, - {"RTM_GETQDISC", Const, 0}, - {"RTM_GETROUTE", Const, 0}, - {"RTM_GETRULE", Const, 0}, - {"RTM_GETTCLASS", Const, 0}, - {"RTM_GETTFILTER", Const, 0}, - {"RTM_IEEE80211", Const, 0}, - {"RTM_IFANNOUNCE", Const, 0}, - {"RTM_IFINFO", Const, 0}, - {"RTM_IFINFO2", Const, 0}, - {"RTM_LLINFO_UPD", Const, 1}, - {"RTM_LOCK", Const, 0}, - {"RTM_LOSING", Const, 0}, - {"RTM_MAX", Const, 0}, - {"RTM_MAXSIZE", Const, 1}, - {"RTM_MISS", Const, 0}, - {"RTM_NEWACTION", Const, 0}, - {"RTM_NEWADDR", Const, 0}, - {"RTM_NEWADDRLABEL", Const, 0}, - {"RTM_NEWLINK", Const, 0}, - {"RTM_NEWMADDR", Const, 0}, - {"RTM_NEWMADDR2", Const, 0}, - {"RTM_NEWNDUSEROPT", Const, 0}, - {"RTM_NEWNEIGH", Const, 0}, - {"RTM_NEWNEIGHTBL", Const, 0}, - {"RTM_NEWPREFIX", Const, 0}, - {"RTM_NEWQDISC", Const, 0}, - {"RTM_NEWROUTE", Const, 0}, - {"RTM_NEWRULE", Const, 0}, - {"RTM_NEWTCLASS", Const, 0}, - {"RTM_NEWTFILTER", Const, 0}, - {"RTM_NR_FAMILIES", Const, 0}, - {"RTM_NR_MSGTYPES", Const, 0}, - {"RTM_OIFINFO", Const, 1}, - {"RTM_OLDADD", Const, 0}, - {"RTM_OLDDEL", Const, 0}, - {"RTM_OOIFINFO", Const, 1}, - {"RTM_REDIRECT", Const, 0}, - {"RTM_RESOLVE", Const, 0}, - {"RTM_RTTUNIT", Const, 0}, - {"RTM_SETDCB", Const, 0}, - {"RTM_SETGATE", Const, 1}, - {"RTM_SETLINK", Const, 0}, - {"RTM_SETNEIGHTBL", Const, 0}, - {"RTM_VERSION", Const, 0}, - {"RTNH_ALIGNTO", Const, 0}, - {"RTNH_F_DEAD", Const, 0}, - {"RTNH_F_ONLINK", Const, 0}, - {"RTNH_F_PERVASIVE", Const, 0}, - {"RTNLGRP_IPV4_IFADDR", Const, 1}, - {"RTNLGRP_IPV4_MROUTE", Const, 1}, - {"RTNLGRP_IPV4_ROUTE", Const, 1}, - {"RTNLGRP_IPV4_RULE", Const, 1}, - {"RTNLGRP_IPV6_IFADDR", Const, 1}, - {"RTNLGRP_IPV6_IFINFO", Const, 1}, - {"RTNLGRP_IPV6_MROUTE", Const, 1}, - {"RTNLGRP_IPV6_PREFIX", Const, 1}, - {"RTNLGRP_IPV6_ROUTE", Const, 1}, - {"RTNLGRP_IPV6_RULE", Const, 1}, - {"RTNLGRP_LINK", Const, 1}, - {"RTNLGRP_ND_USEROPT", Const, 1}, - {"RTNLGRP_NEIGH", Const, 1}, - {"RTNLGRP_NONE", Const, 1}, - {"RTNLGRP_NOTIFY", Const, 1}, - {"RTNLGRP_TC", Const, 1}, - {"RTN_ANYCAST", Const, 0}, - {"RTN_BLACKHOLE", Const, 0}, - {"RTN_BROADCAST", Const, 0}, - {"RTN_LOCAL", Const, 0}, - {"RTN_MAX", Const, 0}, - {"RTN_MULTICAST", Const, 0}, - {"RTN_NAT", Const, 0}, - {"RTN_PROHIBIT", Const, 0}, - {"RTN_THROW", Const, 0}, - {"RTN_UNICAST", Const, 0}, - {"RTN_UNREACHABLE", Const, 0}, - {"RTN_UNSPEC", Const, 0}, - {"RTN_XRESOLVE", Const, 0}, - {"RTPROT_BIRD", Const, 0}, - {"RTPROT_BOOT", Const, 0}, - {"RTPROT_DHCP", Const, 0}, - {"RTPROT_DNROUTED", Const, 0}, - {"RTPROT_GATED", Const, 0}, - {"RTPROT_KERNEL", Const, 0}, - {"RTPROT_MRT", Const, 0}, - {"RTPROT_NTK", Const, 0}, - {"RTPROT_RA", Const, 0}, - {"RTPROT_REDIRECT", Const, 0}, - {"RTPROT_STATIC", Const, 0}, - {"RTPROT_UNSPEC", Const, 0}, - {"RTPROT_XORP", Const, 0}, - {"RTPROT_ZEBRA", Const, 0}, - {"RTV_EXPIRE", Const, 0}, - {"RTV_HOPCOUNT", Const, 0}, - {"RTV_MTU", Const, 0}, - {"RTV_RPIPE", Const, 0}, - {"RTV_RTT", Const, 0}, - {"RTV_RTTVAR", Const, 0}, - {"RTV_SPIPE", Const, 0}, - {"RTV_SSTHRESH", Const, 0}, - {"RTV_WEIGHT", Const, 0}, - {"RT_CACHING_CONTEXT", Const, 1}, - {"RT_CLASS_DEFAULT", Const, 0}, - {"RT_CLASS_LOCAL", Const, 0}, - {"RT_CLASS_MAIN", Const, 0}, - {"RT_CLASS_MAX", Const, 0}, - {"RT_CLASS_UNSPEC", Const, 0}, - {"RT_DEFAULT_FIB", Const, 1}, - {"RT_NORTREF", Const, 1}, - {"RT_SCOPE_HOST", Const, 0}, - {"RT_SCOPE_LINK", Const, 0}, - {"RT_SCOPE_NOWHERE", Const, 0}, - {"RT_SCOPE_SITE", Const, 0}, - {"RT_SCOPE_UNIVERSE", Const, 0}, - {"RT_TABLEID_MAX", Const, 1}, - {"RT_TABLE_COMPAT", Const, 0}, - {"RT_TABLE_DEFAULT", Const, 0}, - {"RT_TABLE_LOCAL", Const, 0}, - {"RT_TABLE_MAIN", Const, 0}, - {"RT_TABLE_MAX", Const, 0}, - {"RT_TABLE_UNSPEC", Const, 0}, - {"RUSAGE_CHILDREN", Const, 0}, - {"RUSAGE_SELF", Const, 0}, - {"RUSAGE_THREAD", Const, 0}, - {"Radvisory_t", Type, 0}, - {"Radvisory_t.Count", Field, 0}, - {"Radvisory_t.Offset", Field, 0}, - {"Radvisory_t.Pad_cgo_0", Field, 0}, - {"RawConn", Type, 9}, - {"RawSockaddr", Type, 0}, - {"RawSockaddr.Data", Field, 0}, - {"RawSockaddr.Family", Field, 0}, - {"RawSockaddr.Len", Field, 0}, - {"RawSockaddrAny", Type, 0}, - {"RawSockaddrAny.Addr", Field, 0}, - {"RawSockaddrAny.Pad", Field, 0}, - {"RawSockaddrDatalink", Type, 0}, - {"RawSockaddrDatalink.Alen", Field, 0}, - {"RawSockaddrDatalink.Data", Field, 0}, - {"RawSockaddrDatalink.Family", Field, 0}, - {"RawSockaddrDatalink.Index", Field, 0}, - {"RawSockaddrDatalink.Len", Field, 0}, - {"RawSockaddrDatalink.Nlen", Field, 0}, - {"RawSockaddrDatalink.Pad_cgo_0", Field, 2}, - {"RawSockaddrDatalink.Slen", Field, 0}, - {"RawSockaddrDatalink.Type", Field, 0}, - {"RawSockaddrInet4", Type, 0}, - {"RawSockaddrInet4.Addr", Field, 0}, - {"RawSockaddrInet4.Family", Field, 0}, - {"RawSockaddrInet4.Len", Field, 0}, - {"RawSockaddrInet4.Port", Field, 0}, - {"RawSockaddrInet4.Zero", Field, 0}, - {"RawSockaddrInet6", Type, 0}, - {"RawSockaddrInet6.Addr", Field, 0}, - {"RawSockaddrInet6.Family", Field, 0}, - {"RawSockaddrInet6.Flowinfo", Field, 0}, - {"RawSockaddrInet6.Len", Field, 0}, - {"RawSockaddrInet6.Port", Field, 0}, - {"RawSockaddrInet6.Scope_id", Field, 0}, - {"RawSockaddrLinklayer", Type, 0}, - {"RawSockaddrLinklayer.Addr", Field, 0}, - {"RawSockaddrLinklayer.Family", Field, 0}, - {"RawSockaddrLinklayer.Halen", Field, 0}, - {"RawSockaddrLinklayer.Hatype", Field, 0}, - {"RawSockaddrLinklayer.Ifindex", Field, 0}, - {"RawSockaddrLinklayer.Pkttype", Field, 0}, - {"RawSockaddrLinklayer.Protocol", Field, 0}, - {"RawSockaddrNetlink", Type, 0}, - {"RawSockaddrNetlink.Family", Field, 0}, - {"RawSockaddrNetlink.Groups", Field, 0}, - {"RawSockaddrNetlink.Pad", Field, 0}, - {"RawSockaddrNetlink.Pid", Field, 0}, - {"RawSockaddrUnix", Type, 0}, - {"RawSockaddrUnix.Family", Field, 0}, - {"RawSockaddrUnix.Len", Field, 0}, - {"RawSockaddrUnix.Pad_cgo_0", Field, 2}, - {"RawSockaddrUnix.Path", Field, 0}, - {"RawSyscall", Func, 0}, - {"RawSyscall6", Func, 0}, - {"Read", Func, 0}, - {"ReadConsole", Func, 1}, - {"ReadDirectoryChanges", Func, 0}, - {"ReadDirent", Func, 0}, - {"ReadFile", Func, 0}, - {"Readlink", Func, 0}, - {"Reboot", Func, 0}, - {"Recvfrom", Func, 0}, - {"Recvmsg", Func, 0}, - {"RegCloseKey", Func, 0}, - {"RegEnumKeyEx", Func, 0}, - {"RegOpenKeyEx", Func, 0}, - {"RegQueryInfoKey", Func, 0}, - {"RegQueryValueEx", Func, 0}, - {"RemoveDirectory", Func, 0}, - {"Removexattr", Func, 1}, - {"Rename", Func, 0}, - {"Renameat", Func, 0}, - {"Revoke", Func, 0}, - {"Rlimit", Type, 0}, - {"Rlimit.Cur", Field, 0}, - {"Rlimit.Max", Field, 0}, - {"Rmdir", Func, 0}, - {"RouteMessage", Type, 0}, - {"RouteMessage.Data", Field, 0}, - {"RouteMessage.Header", Field, 0}, - {"RouteRIB", Func, 0}, - {"RoutingMessage", Type, 0}, - {"RtAttr", Type, 0}, - {"RtAttr.Len", Field, 0}, - {"RtAttr.Type", Field, 0}, - {"RtGenmsg", Type, 0}, - {"RtGenmsg.Family", Field, 0}, - {"RtMetrics", Type, 0}, - {"RtMetrics.Expire", Field, 0}, - {"RtMetrics.Filler", Field, 0}, - {"RtMetrics.Hopcount", Field, 0}, - {"RtMetrics.Locks", Field, 0}, - {"RtMetrics.Mtu", Field, 0}, - {"RtMetrics.Pad", Field, 3}, - {"RtMetrics.Pksent", Field, 0}, - {"RtMetrics.Recvpipe", Field, 0}, - {"RtMetrics.Refcnt", Field, 2}, - {"RtMetrics.Rtt", Field, 0}, - {"RtMetrics.Rttvar", Field, 0}, - {"RtMetrics.Sendpipe", Field, 0}, - {"RtMetrics.Ssthresh", Field, 0}, - {"RtMetrics.Weight", Field, 0}, - {"RtMsg", Type, 0}, - {"RtMsg.Dst_len", Field, 0}, - {"RtMsg.Family", Field, 0}, - {"RtMsg.Flags", Field, 0}, - {"RtMsg.Protocol", Field, 0}, - {"RtMsg.Scope", Field, 0}, - {"RtMsg.Src_len", Field, 0}, - {"RtMsg.Table", Field, 0}, - {"RtMsg.Tos", Field, 0}, - {"RtMsg.Type", Field, 0}, - {"RtMsghdr", Type, 0}, - {"RtMsghdr.Addrs", Field, 0}, - {"RtMsghdr.Errno", Field, 0}, - {"RtMsghdr.Flags", Field, 0}, - {"RtMsghdr.Fmask", Field, 0}, - {"RtMsghdr.Hdrlen", Field, 2}, - {"RtMsghdr.Index", Field, 0}, - {"RtMsghdr.Inits", Field, 0}, - {"RtMsghdr.Mpls", Field, 2}, - {"RtMsghdr.Msglen", Field, 0}, - {"RtMsghdr.Pad_cgo_0", Field, 0}, - {"RtMsghdr.Pad_cgo_1", Field, 2}, - {"RtMsghdr.Pid", Field, 0}, - {"RtMsghdr.Priority", Field, 2}, - {"RtMsghdr.Rmx", Field, 0}, - {"RtMsghdr.Seq", Field, 0}, - {"RtMsghdr.Tableid", Field, 2}, - {"RtMsghdr.Type", Field, 0}, - {"RtMsghdr.Use", Field, 0}, - {"RtMsghdr.Version", Field, 0}, - {"RtNexthop", Type, 0}, - {"RtNexthop.Flags", Field, 0}, - {"RtNexthop.Hops", Field, 0}, - {"RtNexthop.Ifindex", Field, 0}, - {"RtNexthop.Len", Field, 0}, - {"Rusage", Type, 0}, - {"Rusage.CreationTime", Field, 0}, - {"Rusage.ExitTime", Field, 0}, - {"Rusage.Idrss", Field, 0}, - {"Rusage.Inblock", Field, 0}, - {"Rusage.Isrss", Field, 0}, - {"Rusage.Ixrss", Field, 0}, - {"Rusage.KernelTime", Field, 0}, - {"Rusage.Majflt", Field, 0}, - {"Rusage.Maxrss", Field, 0}, - {"Rusage.Minflt", Field, 0}, - {"Rusage.Msgrcv", Field, 0}, - {"Rusage.Msgsnd", Field, 0}, - {"Rusage.Nivcsw", Field, 0}, - {"Rusage.Nsignals", Field, 0}, - {"Rusage.Nswap", Field, 0}, - {"Rusage.Nvcsw", Field, 0}, - {"Rusage.Oublock", Field, 0}, - {"Rusage.Stime", Field, 0}, - {"Rusage.UserTime", Field, 0}, - {"Rusage.Utime", Field, 0}, - {"SCM_BINTIME", Const, 0}, - {"SCM_CREDENTIALS", Const, 0}, - {"SCM_CREDS", Const, 0}, - {"SCM_RIGHTS", Const, 0}, - {"SCM_TIMESTAMP", Const, 0}, - {"SCM_TIMESTAMPING", Const, 0}, - {"SCM_TIMESTAMPNS", Const, 0}, - {"SCM_TIMESTAMP_MONOTONIC", Const, 0}, - {"SHUT_RD", Const, 0}, - {"SHUT_RDWR", Const, 0}, - {"SHUT_WR", Const, 0}, - {"SID", Type, 0}, - {"SIDAndAttributes", Type, 0}, - {"SIDAndAttributes.Attributes", Field, 0}, - {"SIDAndAttributes.Sid", Field, 0}, - {"SIGABRT", Const, 0}, - {"SIGALRM", Const, 0}, - {"SIGBUS", Const, 0}, - {"SIGCHLD", Const, 0}, - {"SIGCLD", Const, 0}, - {"SIGCONT", Const, 0}, - {"SIGEMT", Const, 0}, - {"SIGFPE", Const, 0}, - {"SIGHUP", Const, 0}, - {"SIGILL", Const, 0}, - {"SIGINFO", Const, 0}, - {"SIGINT", Const, 0}, - {"SIGIO", Const, 0}, - {"SIGIOT", Const, 0}, - {"SIGKILL", Const, 0}, - {"SIGLIBRT", Const, 1}, - {"SIGLWP", Const, 0}, - {"SIGPIPE", Const, 0}, - {"SIGPOLL", Const, 0}, - {"SIGPROF", Const, 0}, - {"SIGPWR", Const, 0}, - {"SIGQUIT", Const, 0}, - {"SIGSEGV", Const, 0}, - {"SIGSTKFLT", Const, 0}, - {"SIGSTOP", Const, 0}, - {"SIGSYS", Const, 0}, - {"SIGTERM", Const, 0}, - {"SIGTHR", Const, 0}, - {"SIGTRAP", Const, 0}, - {"SIGTSTP", Const, 0}, - {"SIGTTIN", Const, 0}, - {"SIGTTOU", Const, 0}, - {"SIGUNUSED", Const, 0}, - {"SIGURG", Const, 0}, - {"SIGUSR1", Const, 0}, - {"SIGUSR2", Const, 0}, - {"SIGVTALRM", Const, 0}, - {"SIGWINCH", Const, 0}, - {"SIGXCPU", Const, 0}, - {"SIGXFSZ", Const, 0}, - {"SIOCADDDLCI", Const, 0}, - {"SIOCADDMULTI", Const, 0}, - {"SIOCADDRT", Const, 0}, - {"SIOCAIFADDR", Const, 0}, - {"SIOCAIFGROUP", Const, 0}, - {"SIOCALIFADDR", Const, 0}, - {"SIOCARPIPLL", Const, 0}, - {"SIOCATMARK", Const, 0}, - {"SIOCAUTOADDR", Const, 0}, - {"SIOCAUTONETMASK", Const, 0}, - {"SIOCBRDGADD", Const, 1}, - {"SIOCBRDGADDS", Const, 1}, - {"SIOCBRDGARL", Const, 1}, - {"SIOCBRDGDADDR", Const, 1}, - {"SIOCBRDGDEL", Const, 1}, - {"SIOCBRDGDELS", Const, 1}, - {"SIOCBRDGFLUSH", Const, 1}, - {"SIOCBRDGFRL", Const, 1}, - {"SIOCBRDGGCACHE", Const, 1}, - {"SIOCBRDGGFD", Const, 1}, - {"SIOCBRDGGHT", Const, 1}, - {"SIOCBRDGGIFFLGS", Const, 1}, - {"SIOCBRDGGMA", Const, 1}, - {"SIOCBRDGGPARAM", Const, 1}, - {"SIOCBRDGGPRI", Const, 1}, - {"SIOCBRDGGRL", Const, 1}, - {"SIOCBRDGGSIFS", Const, 1}, - {"SIOCBRDGGTO", Const, 1}, - {"SIOCBRDGIFS", Const, 1}, - {"SIOCBRDGRTS", Const, 1}, - {"SIOCBRDGSADDR", Const, 1}, - {"SIOCBRDGSCACHE", Const, 1}, - {"SIOCBRDGSFD", Const, 1}, - {"SIOCBRDGSHT", Const, 1}, - {"SIOCBRDGSIFCOST", Const, 1}, - {"SIOCBRDGSIFFLGS", Const, 1}, - {"SIOCBRDGSIFPRIO", Const, 1}, - {"SIOCBRDGSMA", Const, 1}, - {"SIOCBRDGSPRI", Const, 1}, - {"SIOCBRDGSPROTO", Const, 1}, - {"SIOCBRDGSTO", Const, 1}, - {"SIOCBRDGSTXHC", Const, 1}, - {"SIOCDARP", Const, 0}, - {"SIOCDELDLCI", Const, 0}, - {"SIOCDELMULTI", Const, 0}, - {"SIOCDELRT", Const, 0}, - {"SIOCDEVPRIVATE", Const, 0}, - {"SIOCDIFADDR", Const, 0}, - {"SIOCDIFGROUP", Const, 0}, - {"SIOCDIFPHYADDR", Const, 0}, - {"SIOCDLIFADDR", Const, 0}, - {"SIOCDRARP", Const, 0}, - {"SIOCGARP", Const, 0}, - {"SIOCGDRVSPEC", Const, 0}, - {"SIOCGETKALIVE", Const, 1}, - {"SIOCGETLABEL", Const, 1}, - {"SIOCGETPFLOW", Const, 1}, - {"SIOCGETPFSYNC", Const, 1}, - {"SIOCGETSGCNT", Const, 0}, - {"SIOCGETVIFCNT", Const, 0}, - {"SIOCGETVLAN", Const, 0}, - {"SIOCGHIWAT", Const, 0}, - {"SIOCGIFADDR", Const, 0}, - {"SIOCGIFADDRPREF", Const, 1}, - {"SIOCGIFALIAS", Const, 1}, - {"SIOCGIFALTMTU", Const, 0}, - {"SIOCGIFASYNCMAP", Const, 0}, - {"SIOCGIFBOND", Const, 0}, - {"SIOCGIFBR", Const, 0}, - {"SIOCGIFBRDADDR", Const, 0}, - {"SIOCGIFCAP", Const, 0}, - {"SIOCGIFCONF", Const, 0}, - {"SIOCGIFCOUNT", Const, 0}, - {"SIOCGIFDATA", Const, 1}, - {"SIOCGIFDESCR", Const, 0}, - {"SIOCGIFDEVMTU", Const, 0}, - {"SIOCGIFDLT", Const, 1}, - {"SIOCGIFDSTADDR", Const, 0}, - {"SIOCGIFENCAP", Const, 0}, - {"SIOCGIFFIB", Const, 1}, - {"SIOCGIFFLAGS", Const, 0}, - {"SIOCGIFGATTR", Const, 1}, - {"SIOCGIFGENERIC", Const, 0}, - {"SIOCGIFGMEMB", Const, 0}, - {"SIOCGIFGROUP", Const, 0}, - {"SIOCGIFHARDMTU", Const, 3}, - {"SIOCGIFHWADDR", Const, 0}, - {"SIOCGIFINDEX", Const, 0}, - {"SIOCGIFKPI", Const, 0}, - {"SIOCGIFMAC", Const, 0}, - {"SIOCGIFMAP", Const, 0}, - {"SIOCGIFMEDIA", Const, 0}, - {"SIOCGIFMEM", Const, 0}, - {"SIOCGIFMETRIC", Const, 0}, - {"SIOCGIFMTU", Const, 0}, - {"SIOCGIFNAME", Const, 0}, - {"SIOCGIFNETMASK", Const, 0}, - {"SIOCGIFPDSTADDR", Const, 0}, - {"SIOCGIFPFLAGS", Const, 0}, - {"SIOCGIFPHYS", Const, 0}, - {"SIOCGIFPRIORITY", Const, 1}, - {"SIOCGIFPSRCADDR", Const, 0}, - {"SIOCGIFRDOMAIN", Const, 1}, - {"SIOCGIFRTLABEL", Const, 1}, - {"SIOCGIFSLAVE", Const, 0}, - {"SIOCGIFSTATUS", Const, 0}, - {"SIOCGIFTIMESLOT", Const, 1}, - {"SIOCGIFTXQLEN", Const, 0}, - {"SIOCGIFVLAN", Const, 0}, - {"SIOCGIFWAKEFLAGS", Const, 0}, - {"SIOCGIFXFLAGS", Const, 1}, - {"SIOCGLIFADDR", Const, 0}, - {"SIOCGLIFPHYADDR", Const, 0}, - {"SIOCGLIFPHYRTABLE", Const, 1}, - {"SIOCGLIFPHYTTL", Const, 3}, - {"SIOCGLINKSTR", Const, 1}, - {"SIOCGLOWAT", Const, 0}, - {"SIOCGPGRP", Const, 0}, - {"SIOCGPRIVATE_0", Const, 0}, - {"SIOCGPRIVATE_1", Const, 0}, - {"SIOCGRARP", Const, 0}, - {"SIOCGSPPPPARAMS", Const, 3}, - {"SIOCGSTAMP", Const, 0}, - {"SIOCGSTAMPNS", Const, 0}, - {"SIOCGVH", Const, 1}, - {"SIOCGVNETID", Const, 3}, - {"SIOCIFCREATE", Const, 0}, - {"SIOCIFCREATE2", Const, 0}, - {"SIOCIFDESTROY", Const, 0}, - {"SIOCIFGCLONERS", Const, 0}, - {"SIOCINITIFADDR", Const, 1}, - {"SIOCPROTOPRIVATE", Const, 0}, - {"SIOCRSLVMULTI", Const, 0}, - {"SIOCRTMSG", Const, 0}, - {"SIOCSARP", Const, 0}, - {"SIOCSDRVSPEC", Const, 0}, - {"SIOCSETKALIVE", Const, 1}, - {"SIOCSETLABEL", Const, 1}, - {"SIOCSETPFLOW", Const, 1}, - {"SIOCSETPFSYNC", Const, 1}, - {"SIOCSETVLAN", Const, 0}, - {"SIOCSHIWAT", Const, 0}, - {"SIOCSIFADDR", Const, 0}, - {"SIOCSIFADDRPREF", Const, 1}, - {"SIOCSIFALTMTU", Const, 0}, - {"SIOCSIFASYNCMAP", Const, 0}, - {"SIOCSIFBOND", Const, 0}, - {"SIOCSIFBR", Const, 0}, - {"SIOCSIFBRDADDR", Const, 0}, - {"SIOCSIFCAP", Const, 0}, - {"SIOCSIFDESCR", Const, 0}, - {"SIOCSIFDSTADDR", Const, 0}, - {"SIOCSIFENCAP", Const, 0}, - {"SIOCSIFFIB", Const, 1}, - {"SIOCSIFFLAGS", Const, 0}, - {"SIOCSIFGATTR", Const, 1}, - {"SIOCSIFGENERIC", Const, 0}, - {"SIOCSIFHWADDR", Const, 0}, - {"SIOCSIFHWBROADCAST", Const, 0}, - {"SIOCSIFKPI", Const, 0}, - {"SIOCSIFLINK", Const, 0}, - {"SIOCSIFLLADDR", Const, 0}, - {"SIOCSIFMAC", Const, 0}, - {"SIOCSIFMAP", Const, 0}, - {"SIOCSIFMEDIA", Const, 0}, - {"SIOCSIFMEM", Const, 0}, - {"SIOCSIFMETRIC", Const, 0}, - {"SIOCSIFMTU", Const, 0}, - {"SIOCSIFNAME", Const, 0}, - {"SIOCSIFNETMASK", Const, 0}, - {"SIOCSIFPFLAGS", Const, 0}, - {"SIOCSIFPHYADDR", Const, 0}, - {"SIOCSIFPHYS", Const, 0}, - {"SIOCSIFPRIORITY", Const, 1}, - {"SIOCSIFRDOMAIN", Const, 1}, - {"SIOCSIFRTLABEL", Const, 1}, - {"SIOCSIFRVNET", Const, 0}, - {"SIOCSIFSLAVE", Const, 0}, - {"SIOCSIFTIMESLOT", Const, 1}, - {"SIOCSIFTXQLEN", Const, 0}, - {"SIOCSIFVLAN", Const, 0}, - {"SIOCSIFVNET", Const, 0}, - {"SIOCSIFXFLAGS", Const, 1}, - {"SIOCSLIFPHYADDR", Const, 0}, - {"SIOCSLIFPHYRTABLE", Const, 1}, - {"SIOCSLIFPHYTTL", Const, 3}, - {"SIOCSLINKSTR", Const, 1}, - {"SIOCSLOWAT", Const, 0}, - {"SIOCSPGRP", Const, 0}, - {"SIOCSRARP", Const, 0}, - {"SIOCSSPPPPARAMS", Const, 3}, - {"SIOCSVH", Const, 1}, - {"SIOCSVNETID", Const, 3}, - {"SIOCZIFDATA", Const, 1}, - {"SIO_GET_EXTENSION_FUNCTION_POINTER", Const, 1}, - {"SIO_GET_INTERFACE_LIST", Const, 0}, - {"SIO_KEEPALIVE_VALS", Const, 3}, - {"SIO_UDP_CONNRESET", Const, 4}, - {"SOCK_CLOEXEC", Const, 0}, - {"SOCK_DCCP", Const, 0}, - {"SOCK_DGRAM", Const, 0}, - {"SOCK_FLAGS_MASK", Const, 1}, - {"SOCK_MAXADDRLEN", Const, 0}, - {"SOCK_NONBLOCK", Const, 0}, - {"SOCK_NOSIGPIPE", Const, 1}, - {"SOCK_PACKET", Const, 0}, - {"SOCK_RAW", Const, 0}, - {"SOCK_RDM", Const, 0}, - {"SOCK_SEQPACKET", Const, 0}, - {"SOCK_STREAM", Const, 0}, - {"SOL_AAL", Const, 0}, - {"SOL_ATM", Const, 0}, - {"SOL_DECNET", Const, 0}, - {"SOL_ICMPV6", Const, 0}, - {"SOL_IP", Const, 0}, - {"SOL_IPV6", Const, 0}, - {"SOL_IRDA", Const, 0}, - {"SOL_PACKET", Const, 0}, - {"SOL_RAW", Const, 0}, - {"SOL_SOCKET", Const, 0}, - {"SOL_TCP", Const, 0}, - {"SOL_X25", Const, 0}, - {"SOMAXCONN", Const, 0}, - {"SO_ACCEPTCONN", Const, 0}, - {"SO_ACCEPTFILTER", Const, 0}, - {"SO_ATTACH_FILTER", Const, 0}, - {"SO_BINDANY", Const, 1}, - {"SO_BINDTODEVICE", Const, 0}, - {"SO_BINTIME", Const, 0}, - {"SO_BROADCAST", Const, 0}, - {"SO_BSDCOMPAT", Const, 0}, - {"SO_DEBUG", Const, 0}, - {"SO_DETACH_FILTER", Const, 0}, - {"SO_DOMAIN", Const, 0}, - {"SO_DONTROUTE", Const, 0}, - {"SO_DONTTRUNC", Const, 0}, - {"SO_ERROR", Const, 0}, - {"SO_KEEPALIVE", Const, 0}, - {"SO_LABEL", Const, 0}, - {"SO_LINGER", Const, 0}, - {"SO_LINGER_SEC", Const, 0}, - {"SO_LISTENINCQLEN", Const, 0}, - {"SO_LISTENQLEN", Const, 0}, - {"SO_LISTENQLIMIT", Const, 0}, - {"SO_MARK", Const, 0}, - {"SO_NETPROC", Const, 1}, - {"SO_NKE", Const, 0}, - {"SO_NOADDRERR", Const, 0}, - {"SO_NOHEADER", Const, 1}, - {"SO_NOSIGPIPE", Const, 0}, - {"SO_NOTIFYCONFLICT", Const, 0}, - {"SO_NO_CHECK", Const, 0}, - {"SO_NO_DDP", Const, 0}, - {"SO_NO_OFFLOAD", Const, 0}, - {"SO_NP_EXTENSIONS", Const, 0}, - {"SO_NREAD", Const, 0}, - {"SO_NUMRCVPKT", Const, 16}, - {"SO_NWRITE", Const, 0}, - {"SO_OOBINLINE", Const, 0}, - {"SO_OVERFLOWED", Const, 1}, - {"SO_PASSCRED", Const, 0}, - {"SO_PASSSEC", Const, 0}, - {"SO_PEERCRED", Const, 0}, - {"SO_PEERLABEL", Const, 0}, - {"SO_PEERNAME", Const, 0}, - {"SO_PEERSEC", Const, 0}, - {"SO_PRIORITY", Const, 0}, - {"SO_PROTOCOL", Const, 0}, - {"SO_PROTOTYPE", Const, 1}, - {"SO_RANDOMPORT", Const, 0}, - {"SO_RCVBUF", Const, 0}, - {"SO_RCVBUFFORCE", Const, 0}, - {"SO_RCVLOWAT", Const, 0}, - {"SO_RCVTIMEO", Const, 0}, - {"SO_RESTRICTIONS", Const, 0}, - {"SO_RESTRICT_DENYIN", Const, 0}, - {"SO_RESTRICT_DENYOUT", Const, 0}, - {"SO_RESTRICT_DENYSET", Const, 0}, - {"SO_REUSEADDR", Const, 0}, - {"SO_REUSEPORT", Const, 0}, - {"SO_REUSESHAREUID", Const, 0}, - {"SO_RTABLE", Const, 1}, - {"SO_RXQ_OVFL", Const, 0}, - {"SO_SECURITY_AUTHENTICATION", Const, 0}, - {"SO_SECURITY_ENCRYPTION_NETWORK", Const, 0}, - {"SO_SECURITY_ENCRYPTION_TRANSPORT", Const, 0}, - {"SO_SETFIB", Const, 0}, - {"SO_SNDBUF", Const, 0}, - {"SO_SNDBUFFORCE", Const, 0}, - {"SO_SNDLOWAT", Const, 0}, - {"SO_SNDTIMEO", Const, 0}, - {"SO_SPLICE", Const, 1}, - {"SO_TIMESTAMP", Const, 0}, - {"SO_TIMESTAMPING", Const, 0}, - {"SO_TIMESTAMPNS", Const, 0}, - {"SO_TIMESTAMP_MONOTONIC", Const, 0}, - {"SO_TYPE", Const, 0}, - {"SO_UPCALLCLOSEWAIT", Const, 0}, - {"SO_UPDATE_ACCEPT_CONTEXT", Const, 0}, - {"SO_UPDATE_CONNECT_CONTEXT", Const, 1}, - {"SO_USELOOPBACK", Const, 0}, - {"SO_USER_COOKIE", Const, 1}, - {"SO_VENDOR", Const, 3}, - {"SO_WANTMORE", Const, 0}, - {"SO_WANTOOBFLAG", Const, 0}, - {"SSLExtraCertChainPolicyPara", Type, 0}, - {"SSLExtraCertChainPolicyPara.AuthType", Field, 0}, - {"SSLExtraCertChainPolicyPara.Checks", Field, 0}, - {"SSLExtraCertChainPolicyPara.ServerName", Field, 0}, - {"SSLExtraCertChainPolicyPara.Size", Field, 0}, - {"STANDARD_RIGHTS_ALL", Const, 0}, - {"STANDARD_RIGHTS_EXECUTE", Const, 0}, - {"STANDARD_RIGHTS_READ", Const, 0}, - {"STANDARD_RIGHTS_REQUIRED", Const, 0}, - {"STANDARD_RIGHTS_WRITE", Const, 0}, - {"STARTF_USESHOWWINDOW", Const, 0}, - {"STARTF_USESTDHANDLES", Const, 0}, - {"STD_ERROR_HANDLE", Const, 0}, - {"STD_INPUT_HANDLE", Const, 0}, - {"STD_OUTPUT_HANDLE", Const, 0}, - {"SUBLANG_ENGLISH_US", Const, 0}, - {"SW_FORCEMINIMIZE", Const, 0}, - {"SW_HIDE", Const, 0}, - {"SW_MAXIMIZE", Const, 0}, - {"SW_MINIMIZE", Const, 0}, - {"SW_NORMAL", Const, 0}, - {"SW_RESTORE", Const, 0}, - {"SW_SHOW", Const, 0}, - {"SW_SHOWDEFAULT", Const, 0}, - {"SW_SHOWMAXIMIZED", Const, 0}, - {"SW_SHOWMINIMIZED", Const, 0}, - {"SW_SHOWMINNOACTIVE", Const, 0}, - {"SW_SHOWNA", Const, 0}, - {"SW_SHOWNOACTIVATE", Const, 0}, - {"SW_SHOWNORMAL", Const, 0}, - {"SYMBOLIC_LINK_FLAG_DIRECTORY", Const, 4}, - {"SYNCHRONIZE", Const, 0}, - {"SYSCTL_VERSION", Const, 1}, - {"SYSCTL_VERS_0", Const, 1}, - {"SYSCTL_VERS_1", Const, 1}, - {"SYSCTL_VERS_MASK", Const, 1}, - {"SYS_ABORT2", Const, 0}, - {"SYS_ACCEPT", Const, 0}, - {"SYS_ACCEPT4", Const, 0}, - {"SYS_ACCEPT_NOCANCEL", Const, 0}, - {"SYS_ACCESS", Const, 0}, - {"SYS_ACCESS_EXTENDED", Const, 0}, - {"SYS_ACCT", Const, 0}, - {"SYS_ADD_KEY", Const, 0}, - {"SYS_ADD_PROFIL", Const, 0}, - {"SYS_ADJFREQ", Const, 1}, - {"SYS_ADJTIME", Const, 0}, - {"SYS_ADJTIMEX", Const, 0}, - {"SYS_AFS_SYSCALL", Const, 0}, - {"SYS_AIO_CANCEL", Const, 0}, - {"SYS_AIO_ERROR", Const, 0}, - {"SYS_AIO_FSYNC", Const, 0}, - {"SYS_AIO_MLOCK", Const, 14}, - {"SYS_AIO_READ", Const, 0}, - {"SYS_AIO_RETURN", Const, 0}, - {"SYS_AIO_SUSPEND", Const, 0}, - {"SYS_AIO_SUSPEND_NOCANCEL", Const, 0}, - {"SYS_AIO_WAITCOMPLETE", Const, 14}, - {"SYS_AIO_WRITE", Const, 0}, - {"SYS_ALARM", Const, 0}, - {"SYS_ARCH_PRCTL", Const, 0}, - {"SYS_ARM_FADVISE64_64", Const, 0}, - {"SYS_ARM_SYNC_FILE_RANGE", Const, 0}, - {"SYS_ATGETMSG", Const, 0}, - {"SYS_ATPGETREQ", Const, 0}, - {"SYS_ATPGETRSP", Const, 0}, - {"SYS_ATPSNDREQ", Const, 0}, - {"SYS_ATPSNDRSP", Const, 0}, - {"SYS_ATPUTMSG", Const, 0}, - {"SYS_ATSOCKET", Const, 0}, - {"SYS_AUDIT", Const, 0}, - {"SYS_AUDITCTL", Const, 0}, - {"SYS_AUDITON", Const, 0}, - {"SYS_AUDIT_SESSION_JOIN", Const, 0}, - {"SYS_AUDIT_SESSION_PORT", Const, 0}, - {"SYS_AUDIT_SESSION_SELF", Const, 0}, - {"SYS_BDFLUSH", Const, 0}, - {"SYS_BIND", Const, 0}, - {"SYS_BINDAT", Const, 3}, - {"SYS_BREAK", Const, 0}, - {"SYS_BRK", Const, 0}, - {"SYS_BSDTHREAD_CREATE", Const, 0}, - {"SYS_BSDTHREAD_REGISTER", Const, 0}, - {"SYS_BSDTHREAD_TERMINATE", Const, 0}, - {"SYS_CAPGET", Const, 0}, - {"SYS_CAPSET", Const, 0}, - {"SYS_CAP_ENTER", Const, 0}, - {"SYS_CAP_FCNTLS_GET", Const, 1}, - {"SYS_CAP_FCNTLS_LIMIT", Const, 1}, - {"SYS_CAP_GETMODE", Const, 0}, - {"SYS_CAP_GETRIGHTS", Const, 0}, - {"SYS_CAP_IOCTLS_GET", Const, 1}, - {"SYS_CAP_IOCTLS_LIMIT", Const, 1}, - {"SYS_CAP_NEW", Const, 0}, - {"SYS_CAP_RIGHTS_GET", Const, 1}, - {"SYS_CAP_RIGHTS_LIMIT", Const, 1}, - {"SYS_CHDIR", Const, 0}, - {"SYS_CHFLAGS", Const, 0}, - {"SYS_CHFLAGSAT", Const, 3}, - {"SYS_CHMOD", Const, 0}, - {"SYS_CHMOD_EXTENDED", Const, 0}, - {"SYS_CHOWN", Const, 0}, - {"SYS_CHOWN32", Const, 0}, - {"SYS_CHROOT", Const, 0}, - {"SYS_CHUD", Const, 0}, - {"SYS_CLOCK_ADJTIME", Const, 0}, - {"SYS_CLOCK_GETCPUCLOCKID2", Const, 1}, - {"SYS_CLOCK_GETRES", Const, 0}, - {"SYS_CLOCK_GETTIME", Const, 0}, - {"SYS_CLOCK_NANOSLEEP", Const, 0}, - {"SYS_CLOCK_SETTIME", Const, 0}, - {"SYS_CLONE", Const, 0}, - {"SYS_CLOSE", Const, 0}, - {"SYS_CLOSEFROM", Const, 0}, - {"SYS_CLOSE_NOCANCEL", Const, 0}, - {"SYS_CONNECT", Const, 0}, - {"SYS_CONNECTAT", Const, 3}, - {"SYS_CONNECT_NOCANCEL", Const, 0}, - {"SYS_COPYFILE", Const, 0}, - {"SYS_CPUSET", Const, 0}, - {"SYS_CPUSET_GETAFFINITY", Const, 0}, - {"SYS_CPUSET_GETID", Const, 0}, - {"SYS_CPUSET_SETAFFINITY", Const, 0}, - {"SYS_CPUSET_SETID", Const, 0}, - {"SYS_CREAT", Const, 0}, - {"SYS_CREATE_MODULE", Const, 0}, - {"SYS_CSOPS", Const, 0}, - {"SYS_CSOPS_AUDITTOKEN", Const, 16}, - {"SYS_DELETE", Const, 0}, - {"SYS_DELETE_MODULE", Const, 0}, - {"SYS_DUP", Const, 0}, - {"SYS_DUP2", Const, 0}, - {"SYS_DUP3", Const, 0}, - {"SYS_EACCESS", Const, 0}, - {"SYS_EPOLL_CREATE", Const, 0}, - {"SYS_EPOLL_CREATE1", Const, 0}, - {"SYS_EPOLL_CTL", Const, 0}, - {"SYS_EPOLL_CTL_OLD", Const, 0}, - {"SYS_EPOLL_PWAIT", Const, 0}, - {"SYS_EPOLL_WAIT", Const, 0}, - {"SYS_EPOLL_WAIT_OLD", Const, 0}, - {"SYS_EVENTFD", Const, 0}, - {"SYS_EVENTFD2", Const, 0}, - {"SYS_EXCHANGEDATA", Const, 0}, - {"SYS_EXECVE", Const, 0}, - {"SYS_EXIT", Const, 0}, - {"SYS_EXIT_GROUP", Const, 0}, - {"SYS_EXTATTRCTL", Const, 0}, - {"SYS_EXTATTR_DELETE_FD", Const, 0}, - {"SYS_EXTATTR_DELETE_FILE", Const, 0}, - {"SYS_EXTATTR_DELETE_LINK", Const, 0}, - {"SYS_EXTATTR_GET_FD", Const, 0}, - {"SYS_EXTATTR_GET_FILE", Const, 0}, - {"SYS_EXTATTR_GET_LINK", Const, 0}, - {"SYS_EXTATTR_LIST_FD", Const, 0}, - {"SYS_EXTATTR_LIST_FILE", Const, 0}, - {"SYS_EXTATTR_LIST_LINK", Const, 0}, - {"SYS_EXTATTR_SET_FD", Const, 0}, - {"SYS_EXTATTR_SET_FILE", Const, 0}, - {"SYS_EXTATTR_SET_LINK", Const, 0}, - {"SYS_FACCESSAT", Const, 0}, - {"SYS_FADVISE64", Const, 0}, - {"SYS_FADVISE64_64", Const, 0}, - {"SYS_FALLOCATE", Const, 0}, - {"SYS_FANOTIFY_INIT", Const, 0}, - {"SYS_FANOTIFY_MARK", Const, 0}, - {"SYS_FCHDIR", Const, 0}, - {"SYS_FCHFLAGS", Const, 0}, - {"SYS_FCHMOD", Const, 0}, - {"SYS_FCHMODAT", Const, 0}, - {"SYS_FCHMOD_EXTENDED", Const, 0}, - {"SYS_FCHOWN", Const, 0}, - {"SYS_FCHOWN32", Const, 0}, - {"SYS_FCHOWNAT", Const, 0}, - {"SYS_FCHROOT", Const, 1}, - {"SYS_FCNTL", Const, 0}, - {"SYS_FCNTL64", Const, 0}, - {"SYS_FCNTL_NOCANCEL", Const, 0}, - {"SYS_FDATASYNC", Const, 0}, - {"SYS_FEXECVE", Const, 0}, - {"SYS_FFCLOCK_GETCOUNTER", Const, 0}, - {"SYS_FFCLOCK_GETESTIMATE", Const, 0}, - {"SYS_FFCLOCK_SETESTIMATE", Const, 0}, - {"SYS_FFSCTL", Const, 0}, - {"SYS_FGETATTRLIST", Const, 0}, - {"SYS_FGETXATTR", Const, 0}, - {"SYS_FHOPEN", Const, 0}, - {"SYS_FHSTAT", Const, 0}, - {"SYS_FHSTATFS", Const, 0}, - {"SYS_FILEPORT_MAKEFD", Const, 0}, - {"SYS_FILEPORT_MAKEPORT", Const, 0}, - {"SYS_FKTRACE", Const, 1}, - {"SYS_FLISTXATTR", Const, 0}, - {"SYS_FLOCK", Const, 0}, - {"SYS_FORK", Const, 0}, - {"SYS_FPATHCONF", Const, 0}, - {"SYS_FREEBSD6_FTRUNCATE", Const, 0}, - {"SYS_FREEBSD6_LSEEK", Const, 0}, - {"SYS_FREEBSD6_MMAP", Const, 0}, - {"SYS_FREEBSD6_PREAD", Const, 0}, - {"SYS_FREEBSD6_PWRITE", Const, 0}, - {"SYS_FREEBSD6_TRUNCATE", Const, 0}, - {"SYS_FREMOVEXATTR", Const, 0}, - {"SYS_FSCTL", Const, 0}, - {"SYS_FSETATTRLIST", Const, 0}, - {"SYS_FSETXATTR", Const, 0}, - {"SYS_FSGETPATH", Const, 0}, - {"SYS_FSTAT", Const, 0}, - {"SYS_FSTAT64", Const, 0}, - {"SYS_FSTAT64_EXTENDED", Const, 0}, - {"SYS_FSTATAT", Const, 0}, - {"SYS_FSTATAT64", Const, 0}, - {"SYS_FSTATFS", Const, 0}, - {"SYS_FSTATFS64", Const, 0}, - {"SYS_FSTATV", Const, 0}, - {"SYS_FSTATVFS1", Const, 1}, - {"SYS_FSTAT_EXTENDED", Const, 0}, - {"SYS_FSYNC", Const, 0}, - {"SYS_FSYNC_NOCANCEL", Const, 0}, - {"SYS_FSYNC_RANGE", Const, 1}, - {"SYS_FTIME", Const, 0}, - {"SYS_FTRUNCATE", Const, 0}, - {"SYS_FTRUNCATE64", Const, 0}, - {"SYS_FUTEX", Const, 0}, - {"SYS_FUTIMENS", Const, 1}, - {"SYS_FUTIMES", Const, 0}, - {"SYS_FUTIMESAT", Const, 0}, - {"SYS_GETATTRLIST", Const, 0}, - {"SYS_GETAUDIT", Const, 0}, - {"SYS_GETAUDIT_ADDR", Const, 0}, - {"SYS_GETAUID", Const, 0}, - {"SYS_GETCONTEXT", Const, 0}, - {"SYS_GETCPU", Const, 0}, - {"SYS_GETCWD", Const, 0}, - {"SYS_GETDENTS", Const, 0}, - {"SYS_GETDENTS64", Const, 0}, - {"SYS_GETDIRENTRIES", Const, 0}, - {"SYS_GETDIRENTRIES64", Const, 0}, - {"SYS_GETDIRENTRIESATTR", Const, 0}, - {"SYS_GETDTABLECOUNT", Const, 1}, - {"SYS_GETDTABLESIZE", Const, 0}, - {"SYS_GETEGID", Const, 0}, - {"SYS_GETEGID32", Const, 0}, - {"SYS_GETEUID", Const, 0}, - {"SYS_GETEUID32", Const, 0}, - {"SYS_GETFH", Const, 0}, - {"SYS_GETFSSTAT", Const, 0}, - {"SYS_GETFSSTAT64", Const, 0}, - {"SYS_GETGID", Const, 0}, - {"SYS_GETGID32", Const, 0}, - {"SYS_GETGROUPS", Const, 0}, - {"SYS_GETGROUPS32", Const, 0}, - {"SYS_GETHOSTUUID", Const, 0}, - {"SYS_GETITIMER", Const, 0}, - {"SYS_GETLCID", Const, 0}, - {"SYS_GETLOGIN", Const, 0}, - {"SYS_GETLOGINCLASS", Const, 0}, - {"SYS_GETPEERNAME", Const, 0}, - {"SYS_GETPGID", Const, 0}, - {"SYS_GETPGRP", Const, 0}, - {"SYS_GETPID", Const, 0}, - {"SYS_GETPMSG", Const, 0}, - {"SYS_GETPPID", Const, 0}, - {"SYS_GETPRIORITY", Const, 0}, - {"SYS_GETRESGID", Const, 0}, - {"SYS_GETRESGID32", Const, 0}, - {"SYS_GETRESUID", Const, 0}, - {"SYS_GETRESUID32", Const, 0}, - {"SYS_GETRLIMIT", Const, 0}, - {"SYS_GETRTABLE", Const, 1}, - {"SYS_GETRUSAGE", Const, 0}, - {"SYS_GETSGROUPS", Const, 0}, - {"SYS_GETSID", Const, 0}, - {"SYS_GETSOCKNAME", Const, 0}, - {"SYS_GETSOCKOPT", Const, 0}, - {"SYS_GETTHRID", Const, 1}, - {"SYS_GETTID", Const, 0}, - {"SYS_GETTIMEOFDAY", Const, 0}, - {"SYS_GETUID", Const, 0}, - {"SYS_GETUID32", Const, 0}, - {"SYS_GETVFSSTAT", Const, 1}, - {"SYS_GETWGROUPS", Const, 0}, - {"SYS_GETXATTR", Const, 0}, - {"SYS_GET_KERNEL_SYMS", Const, 0}, - {"SYS_GET_MEMPOLICY", Const, 0}, - {"SYS_GET_ROBUST_LIST", Const, 0}, - {"SYS_GET_THREAD_AREA", Const, 0}, - {"SYS_GSSD_SYSCALL", Const, 14}, - {"SYS_GTTY", Const, 0}, - {"SYS_IDENTITYSVC", Const, 0}, - {"SYS_IDLE", Const, 0}, - {"SYS_INITGROUPS", Const, 0}, - {"SYS_INIT_MODULE", Const, 0}, - {"SYS_INOTIFY_ADD_WATCH", Const, 0}, - {"SYS_INOTIFY_INIT", Const, 0}, - {"SYS_INOTIFY_INIT1", Const, 0}, - {"SYS_INOTIFY_RM_WATCH", Const, 0}, - {"SYS_IOCTL", Const, 0}, - {"SYS_IOPERM", Const, 0}, - {"SYS_IOPL", Const, 0}, - {"SYS_IOPOLICYSYS", Const, 0}, - {"SYS_IOPRIO_GET", Const, 0}, - {"SYS_IOPRIO_SET", Const, 0}, - {"SYS_IO_CANCEL", Const, 0}, - {"SYS_IO_DESTROY", Const, 0}, - {"SYS_IO_GETEVENTS", Const, 0}, - {"SYS_IO_SETUP", Const, 0}, - {"SYS_IO_SUBMIT", Const, 0}, - {"SYS_IPC", Const, 0}, - {"SYS_ISSETUGID", Const, 0}, - {"SYS_JAIL", Const, 0}, - {"SYS_JAIL_ATTACH", Const, 0}, - {"SYS_JAIL_GET", Const, 0}, - {"SYS_JAIL_REMOVE", Const, 0}, - {"SYS_JAIL_SET", Const, 0}, - {"SYS_KAS_INFO", Const, 16}, - {"SYS_KDEBUG_TRACE", Const, 0}, - {"SYS_KENV", Const, 0}, - {"SYS_KEVENT", Const, 0}, - {"SYS_KEVENT64", Const, 0}, - {"SYS_KEXEC_LOAD", Const, 0}, - {"SYS_KEYCTL", Const, 0}, - {"SYS_KILL", Const, 0}, - {"SYS_KLDFIND", Const, 0}, - {"SYS_KLDFIRSTMOD", Const, 0}, - {"SYS_KLDLOAD", Const, 0}, - {"SYS_KLDNEXT", Const, 0}, - {"SYS_KLDSTAT", Const, 0}, - {"SYS_KLDSYM", Const, 0}, - {"SYS_KLDUNLOAD", Const, 0}, - {"SYS_KLDUNLOADF", Const, 0}, - {"SYS_KMQ_NOTIFY", Const, 14}, - {"SYS_KMQ_OPEN", Const, 14}, - {"SYS_KMQ_SETATTR", Const, 14}, - {"SYS_KMQ_TIMEDRECEIVE", Const, 14}, - {"SYS_KMQ_TIMEDSEND", Const, 14}, - {"SYS_KMQ_UNLINK", Const, 14}, - {"SYS_KQUEUE", Const, 0}, - {"SYS_KQUEUE1", Const, 1}, - {"SYS_KSEM_CLOSE", Const, 14}, - {"SYS_KSEM_DESTROY", Const, 14}, - {"SYS_KSEM_GETVALUE", Const, 14}, - {"SYS_KSEM_INIT", Const, 14}, - {"SYS_KSEM_OPEN", Const, 14}, - {"SYS_KSEM_POST", Const, 14}, - {"SYS_KSEM_TIMEDWAIT", Const, 14}, - {"SYS_KSEM_TRYWAIT", Const, 14}, - {"SYS_KSEM_UNLINK", Const, 14}, - {"SYS_KSEM_WAIT", Const, 14}, - {"SYS_KTIMER_CREATE", Const, 0}, - {"SYS_KTIMER_DELETE", Const, 0}, - {"SYS_KTIMER_GETOVERRUN", Const, 0}, - {"SYS_KTIMER_GETTIME", Const, 0}, - {"SYS_KTIMER_SETTIME", Const, 0}, - {"SYS_KTRACE", Const, 0}, - {"SYS_LCHFLAGS", Const, 0}, - {"SYS_LCHMOD", Const, 0}, - {"SYS_LCHOWN", Const, 0}, - {"SYS_LCHOWN32", Const, 0}, - {"SYS_LEDGER", Const, 16}, - {"SYS_LGETFH", Const, 0}, - {"SYS_LGETXATTR", Const, 0}, - {"SYS_LINK", Const, 0}, - {"SYS_LINKAT", Const, 0}, - {"SYS_LIO_LISTIO", Const, 0}, - {"SYS_LISTEN", Const, 0}, - {"SYS_LISTXATTR", Const, 0}, - {"SYS_LLISTXATTR", Const, 0}, - {"SYS_LOCK", Const, 0}, - {"SYS_LOOKUP_DCOOKIE", Const, 0}, - {"SYS_LPATHCONF", Const, 0}, - {"SYS_LREMOVEXATTR", Const, 0}, - {"SYS_LSEEK", Const, 0}, - {"SYS_LSETXATTR", Const, 0}, - {"SYS_LSTAT", Const, 0}, - {"SYS_LSTAT64", Const, 0}, - {"SYS_LSTAT64_EXTENDED", Const, 0}, - {"SYS_LSTATV", Const, 0}, - {"SYS_LSTAT_EXTENDED", Const, 0}, - {"SYS_LUTIMES", Const, 0}, - {"SYS_MAC_SYSCALL", Const, 0}, - {"SYS_MADVISE", Const, 0}, - {"SYS_MADVISE1", Const, 0}, - {"SYS_MAXSYSCALL", Const, 0}, - {"SYS_MBIND", Const, 0}, - {"SYS_MIGRATE_PAGES", Const, 0}, - {"SYS_MINCORE", Const, 0}, - {"SYS_MINHERIT", Const, 0}, - {"SYS_MKCOMPLEX", Const, 0}, - {"SYS_MKDIR", Const, 0}, - {"SYS_MKDIRAT", Const, 0}, - {"SYS_MKDIR_EXTENDED", Const, 0}, - {"SYS_MKFIFO", Const, 0}, - {"SYS_MKFIFOAT", Const, 0}, - {"SYS_MKFIFO_EXTENDED", Const, 0}, - {"SYS_MKNOD", Const, 0}, - {"SYS_MKNODAT", Const, 0}, - {"SYS_MLOCK", Const, 0}, - {"SYS_MLOCKALL", Const, 0}, - {"SYS_MMAP", Const, 0}, - {"SYS_MMAP2", Const, 0}, - {"SYS_MODCTL", Const, 1}, - {"SYS_MODFIND", Const, 0}, - {"SYS_MODFNEXT", Const, 0}, - {"SYS_MODIFY_LDT", Const, 0}, - {"SYS_MODNEXT", Const, 0}, - {"SYS_MODSTAT", Const, 0}, - {"SYS_MODWATCH", Const, 0}, - {"SYS_MOUNT", Const, 0}, - {"SYS_MOVE_PAGES", Const, 0}, - {"SYS_MPROTECT", Const, 0}, - {"SYS_MPX", Const, 0}, - {"SYS_MQUERY", Const, 1}, - {"SYS_MQ_GETSETATTR", Const, 0}, - {"SYS_MQ_NOTIFY", Const, 0}, - {"SYS_MQ_OPEN", Const, 0}, - {"SYS_MQ_TIMEDRECEIVE", Const, 0}, - {"SYS_MQ_TIMEDSEND", Const, 0}, - {"SYS_MQ_UNLINK", Const, 0}, - {"SYS_MREMAP", Const, 0}, - {"SYS_MSGCTL", Const, 0}, - {"SYS_MSGGET", Const, 0}, - {"SYS_MSGRCV", Const, 0}, - {"SYS_MSGRCV_NOCANCEL", Const, 0}, - {"SYS_MSGSND", Const, 0}, - {"SYS_MSGSND_NOCANCEL", Const, 0}, - {"SYS_MSGSYS", Const, 0}, - {"SYS_MSYNC", Const, 0}, - {"SYS_MSYNC_NOCANCEL", Const, 0}, - {"SYS_MUNLOCK", Const, 0}, - {"SYS_MUNLOCKALL", Const, 0}, - {"SYS_MUNMAP", Const, 0}, - {"SYS_NAME_TO_HANDLE_AT", Const, 0}, - {"SYS_NANOSLEEP", Const, 0}, - {"SYS_NEWFSTATAT", Const, 0}, - {"SYS_NFSCLNT", Const, 0}, - {"SYS_NFSSERVCTL", Const, 0}, - {"SYS_NFSSVC", Const, 0}, - {"SYS_NFSTAT", Const, 0}, - {"SYS_NICE", Const, 0}, - {"SYS_NLM_SYSCALL", Const, 14}, - {"SYS_NLSTAT", Const, 0}, - {"SYS_NMOUNT", Const, 0}, - {"SYS_NSTAT", Const, 0}, - {"SYS_NTP_ADJTIME", Const, 0}, - {"SYS_NTP_GETTIME", Const, 0}, - {"SYS_NUMA_GETAFFINITY", Const, 14}, - {"SYS_NUMA_SETAFFINITY", Const, 14}, - {"SYS_OABI_SYSCALL_BASE", Const, 0}, - {"SYS_OBREAK", Const, 0}, - {"SYS_OLDFSTAT", Const, 0}, - {"SYS_OLDLSTAT", Const, 0}, - {"SYS_OLDOLDUNAME", Const, 0}, - {"SYS_OLDSTAT", Const, 0}, - {"SYS_OLDUNAME", Const, 0}, - {"SYS_OPEN", Const, 0}, - {"SYS_OPENAT", Const, 0}, - {"SYS_OPENBSD_POLL", Const, 0}, - {"SYS_OPEN_BY_HANDLE_AT", Const, 0}, - {"SYS_OPEN_DPROTECTED_NP", Const, 16}, - {"SYS_OPEN_EXTENDED", Const, 0}, - {"SYS_OPEN_NOCANCEL", Const, 0}, - {"SYS_OVADVISE", Const, 0}, - {"SYS_PACCEPT", Const, 1}, - {"SYS_PATHCONF", Const, 0}, - {"SYS_PAUSE", Const, 0}, - {"SYS_PCICONFIG_IOBASE", Const, 0}, - {"SYS_PCICONFIG_READ", Const, 0}, - {"SYS_PCICONFIG_WRITE", Const, 0}, - {"SYS_PDFORK", Const, 0}, - {"SYS_PDGETPID", Const, 0}, - {"SYS_PDKILL", Const, 0}, - {"SYS_PERF_EVENT_OPEN", Const, 0}, - {"SYS_PERSONALITY", Const, 0}, - {"SYS_PID_HIBERNATE", Const, 0}, - {"SYS_PID_RESUME", Const, 0}, - {"SYS_PID_SHUTDOWN_SOCKETS", Const, 0}, - {"SYS_PID_SUSPEND", Const, 0}, - {"SYS_PIPE", Const, 0}, - {"SYS_PIPE2", Const, 0}, - {"SYS_PIVOT_ROOT", Const, 0}, - {"SYS_PMC_CONTROL", Const, 1}, - {"SYS_PMC_GET_INFO", Const, 1}, - {"SYS_POLL", Const, 0}, - {"SYS_POLLTS", Const, 1}, - {"SYS_POLL_NOCANCEL", Const, 0}, - {"SYS_POSIX_FADVISE", Const, 0}, - {"SYS_POSIX_FALLOCATE", Const, 0}, - {"SYS_POSIX_OPENPT", Const, 0}, - {"SYS_POSIX_SPAWN", Const, 0}, - {"SYS_PPOLL", Const, 0}, - {"SYS_PRCTL", Const, 0}, - {"SYS_PREAD", Const, 0}, - {"SYS_PREAD64", Const, 0}, - {"SYS_PREADV", Const, 0}, - {"SYS_PREAD_NOCANCEL", Const, 0}, - {"SYS_PRLIMIT64", Const, 0}, - {"SYS_PROCCTL", Const, 3}, - {"SYS_PROCESS_POLICY", Const, 0}, - {"SYS_PROCESS_VM_READV", Const, 0}, - {"SYS_PROCESS_VM_WRITEV", Const, 0}, - {"SYS_PROC_INFO", Const, 0}, - {"SYS_PROF", Const, 0}, - {"SYS_PROFIL", Const, 0}, - {"SYS_PSELECT", Const, 0}, - {"SYS_PSELECT6", Const, 0}, - {"SYS_PSET_ASSIGN", Const, 1}, - {"SYS_PSET_CREATE", Const, 1}, - {"SYS_PSET_DESTROY", Const, 1}, - {"SYS_PSYNCH_CVBROAD", Const, 0}, - {"SYS_PSYNCH_CVCLRPREPOST", Const, 0}, - {"SYS_PSYNCH_CVSIGNAL", Const, 0}, - {"SYS_PSYNCH_CVWAIT", Const, 0}, - {"SYS_PSYNCH_MUTEXDROP", Const, 0}, - {"SYS_PSYNCH_MUTEXWAIT", Const, 0}, - {"SYS_PSYNCH_RW_DOWNGRADE", Const, 0}, - {"SYS_PSYNCH_RW_LONGRDLOCK", Const, 0}, - {"SYS_PSYNCH_RW_RDLOCK", Const, 0}, - {"SYS_PSYNCH_RW_UNLOCK", Const, 0}, - {"SYS_PSYNCH_RW_UNLOCK2", Const, 0}, - {"SYS_PSYNCH_RW_UPGRADE", Const, 0}, - {"SYS_PSYNCH_RW_WRLOCK", Const, 0}, - {"SYS_PSYNCH_RW_YIELDWRLOCK", Const, 0}, - {"SYS_PTRACE", Const, 0}, - {"SYS_PUTPMSG", Const, 0}, - {"SYS_PWRITE", Const, 0}, - {"SYS_PWRITE64", Const, 0}, - {"SYS_PWRITEV", Const, 0}, - {"SYS_PWRITE_NOCANCEL", Const, 0}, - {"SYS_QUERY_MODULE", Const, 0}, - {"SYS_QUOTACTL", Const, 0}, - {"SYS_RASCTL", Const, 1}, - {"SYS_RCTL_ADD_RULE", Const, 0}, - {"SYS_RCTL_GET_LIMITS", Const, 0}, - {"SYS_RCTL_GET_RACCT", Const, 0}, - {"SYS_RCTL_GET_RULES", Const, 0}, - {"SYS_RCTL_REMOVE_RULE", Const, 0}, - {"SYS_READ", Const, 0}, - {"SYS_READAHEAD", Const, 0}, - {"SYS_READDIR", Const, 0}, - {"SYS_READLINK", Const, 0}, - {"SYS_READLINKAT", Const, 0}, - {"SYS_READV", Const, 0}, - {"SYS_READV_NOCANCEL", Const, 0}, - {"SYS_READ_NOCANCEL", Const, 0}, - {"SYS_REBOOT", Const, 0}, - {"SYS_RECV", Const, 0}, - {"SYS_RECVFROM", Const, 0}, - {"SYS_RECVFROM_NOCANCEL", Const, 0}, - {"SYS_RECVMMSG", Const, 0}, - {"SYS_RECVMSG", Const, 0}, - {"SYS_RECVMSG_NOCANCEL", Const, 0}, - {"SYS_REMAP_FILE_PAGES", Const, 0}, - {"SYS_REMOVEXATTR", Const, 0}, - {"SYS_RENAME", Const, 0}, - {"SYS_RENAMEAT", Const, 0}, - {"SYS_REQUEST_KEY", Const, 0}, - {"SYS_RESTART_SYSCALL", Const, 0}, - {"SYS_REVOKE", Const, 0}, - {"SYS_RFORK", Const, 0}, - {"SYS_RMDIR", Const, 0}, - {"SYS_RTPRIO", Const, 0}, - {"SYS_RTPRIO_THREAD", Const, 0}, - {"SYS_RT_SIGACTION", Const, 0}, - {"SYS_RT_SIGPENDING", Const, 0}, - {"SYS_RT_SIGPROCMASK", Const, 0}, - {"SYS_RT_SIGQUEUEINFO", Const, 0}, - {"SYS_RT_SIGRETURN", Const, 0}, - {"SYS_RT_SIGSUSPEND", Const, 0}, - {"SYS_RT_SIGTIMEDWAIT", Const, 0}, - {"SYS_RT_TGSIGQUEUEINFO", Const, 0}, - {"SYS_SBRK", Const, 0}, - {"SYS_SCHED_GETAFFINITY", Const, 0}, - {"SYS_SCHED_GETPARAM", Const, 0}, - {"SYS_SCHED_GETSCHEDULER", Const, 0}, - {"SYS_SCHED_GET_PRIORITY_MAX", Const, 0}, - {"SYS_SCHED_GET_PRIORITY_MIN", Const, 0}, - {"SYS_SCHED_RR_GET_INTERVAL", Const, 0}, - {"SYS_SCHED_SETAFFINITY", Const, 0}, - {"SYS_SCHED_SETPARAM", Const, 0}, - {"SYS_SCHED_SETSCHEDULER", Const, 0}, - {"SYS_SCHED_YIELD", Const, 0}, - {"SYS_SCTP_GENERIC_RECVMSG", Const, 0}, - {"SYS_SCTP_GENERIC_SENDMSG", Const, 0}, - {"SYS_SCTP_GENERIC_SENDMSG_IOV", Const, 0}, - {"SYS_SCTP_PEELOFF", Const, 0}, - {"SYS_SEARCHFS", Const, 0}, - {"SYS_SECURITY", Const, 0}, - {"SYS_SELECT", Const, 0}, - {"SYS_SELECT_NOCANCEL", Const, 0}, - {"SYS_SEMCONFIG", Const, 1}, - {"SYS_SEMCTL", Const, 0}, - {"SYS_SEMGET", Const, 0}, - {"SYS_SEMOP", Const, 0}, - {"SYS_SEMSYS", Const, 0}, - {"SYS_SEMTIMEDOP", Const, 0}, - {"SYS_SEM_CLOSE", Const, 0}, - {"SYS_SEM_DESTROY", Const, 0}, - {"SYS_SEM_GETVALUE", Const, 0}, - {"SYS_SEM_INIT", Const, 0}, - {"SYS_SEM_OPEN", Const, 0}, - {"SYS_SEM_POST", Const, 0}, - {"SYS_SEM_TRYWAIT", Const, 0}, - {"SYS_SEM_UNLINK", Const, 0}, - {"SYS_SEM_WAIT", Const, 0}, - {"SYS_SEM_WAIT_NOCANCEL", Const, 0}, - {"SYS_SEND", Const, 0}, - {"SYS_SENDFILE", Const, 0}, - {"SYS_SENDFILE64", Const, 0}, - {"SYS_SENDMMSG", Const, 0}, - {"SYS_SENDMSG", Const, 0}, - {"SYS_SENDMSG_NOCANCEL", Const, 0}, - {"SYS_SENDTO", Const, 0}, - {"SYS_SENDTO_NOCANCEL", Const, 0}, - {"SYS_SETATTRLIST", Const, 0}, - {"SYS_SETAUDIT", Const, 0}, - {"SYS_SETAUDIT_ADDR", Const, 0}, - {"SYS_SETAUID", Const, 0}, - {"SYS_SETCONTEXT", Const, 0}, - {"SYS_SETDOMAINNAME", Const, 0}, - {"SYS_SETEGID", Const, 0}, - {"SYS_SETEUID", Const, 0}, - {"SYS_SETFIB", Const, 0}, - {"SYS_SETFSGID", Const, 0}, - {"SYS_SETFSGID32", Const, 0}, - {"SYS_SETFSUID", Const, 0}, - {"SYS_SETFSUID32", Const, 0}, - {"SYS_SETGID", Const, 0}, - {"SYS_SETGID32", Const, 0}, - {"SYS_SETGROUPS", Const, 0}, - {"SYS_SETGROUPS32", Const, 0}, - {"SYS_SETHOSTNAME", Const, 0}, - {"SYS_SETITIMER", Const, 0}, - {"SYS_SETLCID", Const, 0}, - {"SYS_SETLOGIN", Const, 0}, - {"SYS_SETLOGINCLASS", Const, 0}, - {"SYS_SETNS", Const, 0}, - {"SYS_SETPGID", Const, 0}, - {"SYS_SETPRIORITY", Const, 0}, - {"SYS_SETPRIVEXEC", Const, 0}, - {"SYS_SETREGID", Const, 0}, - {"SYS_SETREGID32", Const, 0}, - {"SYS_SETRESGID", Const, 0}, - {"SYS_SETRESGID32", Const, 0}, - {"SYS_SETRESUID", Const, 0}, - {"SYS_SETRESUID32", Const, 0}, - {"SYS_SETREUID", Const, 0}, - {"SYS_SETREUID32", Const, 0}, - {"SYS_SETRLIMIT", Const, 0}, - {"SYS_SETRTABLE", Const, 1}, - {"SYS_SETSGROUPS", Const, 0}, - {"SYS_SETSID", Const, 0}, - {"SYS_SETSOCKOPT", Const, 0}, - {"SYS_SETTID", Const, 0}, - {"SYS_SETTID_WITH_PID", Const, 0}, - {"SYS_SETTIMEOFDAY", Const, 0}, - {"SYS_SETUID", Const, 0}, - {"SYS_SETUID32", Const, 0}, - {"SYS_SETWGROUPS", Const, 0}, - {"SYS_SETXATTR", Const, 0}, - {"SYS_SET_MEMPOLICY", Const, 0}, - {"SYS_SET_ROBUST_LIST", Const, 0}, - {"SYS_SET_THREAD_AREA", Const, 0}, - {"SYS_SET_TID_ADDRESS", Const, 0}, - {"SYS_SGETMASK", Const, 0}, - {"SYS_SHARED_REGION_CHECK_NP", Const, 0}, - {"SYS_SHARED_REGION_MAP_AND_SLIDE_NP", Const, 0}, - {"SYS_SHMAT", Const, 0}, - {"SYS_SHMCTL", Const, 0}, - {"SYS_SHMDT", Const, 0}, - {"SYS_SHMGET", Const, 0}, - {"SYS_SHMSYS", Const, 0}, - {"SYS_SHM_OPEN", Const, 0}, - {"SYS_SHM_UNLINK", Const, 0}, - {"SYS_SHUTDOWN", Const, 0}, - {"SYS_SIGACTION", Const, 0}, - {"SYS_SIGALTSTACK", Const, 0}, - {"SYS_SIGNAL", Const, 0}, - {"SYS_SIGNALFD", Const, 0}, - {"SYS_SIGNALFD4", Const, 0}, - {"SYS_SIGPENDING", Const, 0}, - {"SYS_SIGPROCMASK", Const, 0}, - {"SYS_SIGQUEUE", Const, 0}, - {"SYS_SIGQUEUEINFO", Const, 1}, - {"SYS_SIGRETURN", Const, 0}, - {"SYS_SIGSUSPEND", Const, 0}, - {"SYS_SIGSUSPEND_NOCANCEL", Const, 0}, - {"SYS_SIGTIMEDWAIT", Const, 0}, - {"SYS_SIGWAIT", Const, 0}, - {"SYS_SIGWAITINFO", Const, 0}, - {"SYS_SOCKET", Const, 0}, - {"SYS_SOCKETCALL", Const, 0}, - {"SYS_SOCKETPAIR", Const, 0}, - {"SYS_SPLICE", Const, 0}, - {"SYS_SSETMASK", Const, 0}, - {"SYS_SSTK", Const, 0}, - {"SYS_STACK_SNAPSHOT", Const, 0}, - {"SYS_STAT", Const, 0}, - {"SYS_STAT64", Const, 0}, - {"SYS_STAT64_EXTENDED", Const, 0}, - {"SYS_STATFS", Const, 0}, - {"SYS_STATFS64", Const, 0}, - {"SYS_STATV", Const, 0}, - {"SYS_STATVFS1", Const, 1}, - {"SYS_STAT_EXTENDED", Const, 0}, - {"SYS_STIME", Const, 0}, - {"SYS_STTY", Const, 0}, - {"SYS_SWAPCONTEXT", Const, 0}, - {"SYS_SWAPCTL", Const, 1}, - {"SYS_SWAPOFF", Const, 0}, - {"SYS_SWAPON", Const, 0}, - {"SYS_SYMLINK", Const, 0}, - {"SYS_SYMLINKAT", Const, 0}, - {"SYS_SYNC", Const, 0}, - {"SYS_SYNCFS", Const, 0}, - {"SYS_SYNC_FILE_RANGE", Const, 0}, - {"SYS_SYSARCH", Const, 0}, - {"SYS_SYSCALL", Const, 0}, - {"SYS_SYSCALL_BASE", Const, 0}, - {"SYS_SYSFS", Const, 0}, - {"SYS_SYSINFO", Const, 0}, - {"SYS_SYSLOG", Const, 0}, - {"SYS_TEE", Const, 0}, - {"SYS_TGKILL", Const, 0}, - {"SYS_THREAD_SELFID", Const, 0}, - {"SYS_THR_CREATE", Const, 0}, - {"SYS_THR_EXIT", Const, 0}, - {"SYS_THR_KILL", Const, 0}, - {"SYS_THR_KILL2", Const, 0}, - {"SYS_THR_NEW", Const, 0}, - {"SYS_THR_SELF", Const, 0}, - {"SYS_THR_SET_NAME", Const, 0}, - {"SYS_THR_SUSPEND", Const, 0}, - {"SYS_THR_WAKE", Const, 0}, - {"SYS_TIME", Const, 0}, - {"SYS_TIMERFD_CREATE", Const, 0}, - {"SYS_TIMERFD_GETTIME", Const, 0}, - {"SYS_TIMERFD_SETTIME", Const, 0}, - {"SYS_TIMER_CREATE", Const, 0}, - {"SYS_TIMER_DELETE", Const, 0}, - {"SYS_TIMER_GETOVERRUN", Const, 0}, - {"SYS_TIMER_GETTIME", Const, 0}, - {"SYS_TIMER_SETTIME", Const, 0}, - {"SYS_TIMES", Const, 0}, - {"SYS_TKILL", Const, 0}, - {"SYS_TRUNCATE", Const, 0}, - {"SYS_TRUNCATE64", Const, 0}, - {"SYS_TUXCALL", Const, 0}, - {"SYS_UGETRLIMIT", Const, 0}, - {"SYS_ULIMIT", Const, 0}, - {"SYS_UMASK", Const, 0}, - {"SYS_UMASK_EXTENDED", Const, 0}, - {"SYS_UMOUNT", Const, 0}, - {"SYS_UMOUNT2", Const, 0}, - {"SYS_UNAME", Const, 0}, - {"SYS_UNDELETE", Const, 0}, - {"SYS_UNLINK", Const, 0}, - {"SYS_UNLINKAT", Const, 0}, - {"SYS_UNMOUNT", Const, 0}, - {"SYS_UNSHARE", Const, 0}, - {"SYS_USELIB", Const, 0}, - {"SYS_USTAT", Const, 0}, - {"SYS_UTIME", Const, 0}, - {"SYS_UTIMENSAT", Const, 0}, - {"SYS_UTIMES", Const, 0}, - {"SYS_UTRACE", Const, 0}, - {"SYS_UUIDGEN", Const, 0}, - {"SYS_VADVISE", Const, 1}, - {"SYS_VFORK", Const, 0}, - {"SYS_VHANGUP", Const, 0}, - {"SYS_VM86", Const, 0}, - {"SYS_VM86OLD", Const, 0}, - {"SYS_VMSPLICE", Const, 0}, - {"SYS_VM_PRESSURE_MONITOR", Const, 0}, - {"SYS_VSERVER", Const, 0}, - {"SYS_WAIT4", Const, 0}, - {"SYS_WAIT4_NOCANCEL", Const, 0}, - {"SYS_WAIT6", Const, 1}, - {"SYS_WAITEVENT", Const, 0}, - {"SYS_WAITID", Const, 0}, - {"SYS_WAITID_NOCANCEL", Const, 0}, - {"SYS_WAITPID", Const, 0}, - {"SYS_WATCHEVENT", Const, 0}, - {"SYS_WORKQ_KERNRETURN", Const, 0}, - {"SYS_WORKQ_OPEN", Const, 0}, - {"SYS_WRITE", Const, 0}, - {"SYS_WRITEV", Const, 0}, - {"SYS_WRITEV_NOCANCEL", Const, 0}, - {"SYS_WRITE_NOCANCEL", Const, 0}, - {"SYS_YIELD", Const, 0}, - {"SYS__LLSEEK", Const, 0}, - {"SYS__LWP_CONTINUE", Const, 1}, - {"SYS__LWP_CREATE", Const, 1}, - {"SYS__LWP_CTL", Const, 1}, - {"SYS__LWP_DETACH", Const, 1}, - {"SYS__LWP_EXIT", Const, 1}, - {"SYS__LWP_GETNAME", Const, 1}, - {"SYS__LWP_GETPRIVATE", Const, 1}, - {"SYS__LWP_KILL", Const, 1}, - {"SYS__LWP_PARK", Const, 1}, - {"SYS__LWP_SELF", Const, 1}, - {"SYS__LWP_SETNAME", Const, 1}, - {"SYS__LWP_SETPRIVATE", Const, 1}, - {"SYS__LWP_SUSPEND", Const, 1}, - {"SYS__LWP_UNPARK", Const, 1}, - {"SYS__LWP_UNPARK_ALL", Const, 1}, - {"SYS__LWP_WAIT", Const, 1}, - {"SYS__LWP_WAKEUP", Const, 1}, - {"SYS__NEWSELECT", Const, 0}, - {"SYS__PSET_BIND", Const, 1}, - {"SYS__SCHED_GETAFFINITY", Const, 1}, - {"SYS__SCHED_GETPARAM", Const, 1}, - {"SYS__SCHED_SETAFFINITY", Const, 1}, - {"SYS__SCHED_SETPARAM", Const, 1}, - {"SYS__SYSCTL", Const, 0}, - {"SYS__UMTX_LOCK", Const, 0}, - {"SYS__UMTX_OP", Const, 0}, - {"SYS__UMTX_UNLOCK", Const, 0}, - {"SYS___ACL_ACLCHECK_FD", Const, 0}, - {"SYS___ACL_ACLCHECK_FILE", Const, 0}, - {"SYS___ACL_ACLCHECK_LINK", Const, 0}, - {"SYS___ACL_DELETE_FD", Const, 0}, - {"SYS___ACL_DELETE_FILE", Const, 0}, - {"SYS___ACL_DELETE_LINK", Const, 0}, - {"SYS___ACL_GET_FD", Const, 0}, - {"SYS___ACL_GET_FILE", Const, 0}, - {"SYS___ACL_GET_LINK", Const, 0}, - {"SYS___ACL_SET_FD", Const, 0}, - {"SYS___ACL_SET_FILE", Const, 0}, - {"SYS___ACL_SET_LINK", Const, 0}, - {"SYS___CAP_RIGHTS_GET", Const, 14}, - {"SYS___CLONE", Const, 1}, - {"SYS___DISABLE_THREADSIGNAL", Const, 0}, - {"SYS___GETCWD", Const, 0}, - {"SYS___GETLOGIN", Const, 1}, - {"SYS___GET_TCB", Const, 1}, - {"SYS___MAC_EXECVE", Const, 0}, - {"SYS___MAC_GETFSSTAT", Const, 0}, - {"SYS___MAC_GET_FD", Const, 0}, - {"SYS___MAC_GET_FILE", Const, 0}, - {"SYS___MAC_GET_LCID", Const, 0}, - {"SYS___MAC_GET_LCTX", Const, 0}, - {"SYS___MAC_GET_LINK", Const, 0}, - {"SYS___MAC_GET_MOUNT", Const, 0}, - {"SYS___MAC_GET_PID", Const, 0}, - {"SYS___MAC_GET_PROC", Const, 0}, - {"SYS___MAC_MOUNT", Const, 0}, - {"SYS___MAC_SET_FD", Const, 0}, - {"SYS___MAC_SET_FILE", Const, 0}, - {"SYS___MAC_SET_LCTX", Const, 0}, - {"SYS___MAC_SET_LINK", Const, 0}, - {"SYS___MAC_SET_PROC", Const, 0}, - {"SYS___MAC_SYSCALL", Const, 0}, - {"SYS___OLD_SEMWAIT_SIGNAL", Const, 0}, - {"SYS___OLD_SEMWAIT_SIGNAL_NOCANCEL", Const, 0}, - {"SYS___POSIX_CHOWN", Const, 1}, - {"SYS___POSIX_FCHOWN", Const, 1}, - {"SYS___POSIX_LCHOWN", Const, 1}, - {"SYS___POSIX_RENAME", Const, 1}, - {"SYS___PTHREAD_CANCELED", Const, 0}, - {"SYS___PTHREAD_CHDIR", Const, 0}, - {"SYS___PTHREAD_FCHDIR", Const, 0}, - {"SYS___PTHREAD_KILL", Const, 0}, - {"SYS___PTHREAD_MARKCANCEL", Const, 0}, - {"SYS___PTHREAD_SIGMASK", Const, 0}, - {"SYS___QUOTACTL", Const, 1}, - {"SYS___SEMCTL", Const, 1}, - {"SYS___SEMWAIT_SIGNAL", Const, 0}, - {"SYS___SEMWAIT_SIGNAL_NOCANCEL", Const, 0}, - {"SYS___SETLOGIN", Const, 1}, - {"SYS___SETUGID", Const, 0}, - {"SYS___SET_TCB", Const, 1}, - {"SYS___SIGACTION_SIGTRAMP", Const, 1}, - {"SYS___SIGTIMEDWAIT", Const, 1}, - {"SYS___SIGWAIT", Const, 0}, - {"SYS___SIGWAIT_NOCANCEL", Const, 0}, - {"SYS___SYSCTL", Const, 0}, - {"SYS___TFORK", Const, 1}, - {"SYS___THREXIT", Const, 1}, - {"SYS___THRSIGDIVERT", Const, 1}, - {"SYS___THRSLEEP", Const, 1}, - {"SYS___THRWAKEUP", Const, 1}, - {"S_ARCH1", Const, 1}, - {"S_ARCH2", Const, 1}, - {"S_BLKSIZE", Const, 0}, - {"S_IEXEC", Const, 0}, - {"S_IFBLK", Const, 0}, - {"S_IFCHR", Const, 0}, - {"S_IFDIR", Const, 0}, - {"S_IFIFO", Const, 0}, - {"S_IFLNK", Const, 0}, - {"S_IFMT", Const, 0}, - {"S_IFREG", Const, 0}, - {"S_IFSOCK", Const, 0}, - {"S_IFWHT", Const, 0}, - {"S_IREAD", Const, 0}, - {"S_IRGRP", Const, 0}, - {"S_IROTH", Const, 0}, - {"S_IRUSR", Const, 0}, - {"S_IRWXG", Const, 0}, - {"S_IRWXO", Const, 0}, - {"S_IRWXU", Const, 0}, - {"S_ISGID", Const, 0}, - {"S_ISTXT", Const, 0}, - {"S_ISUID", Const, 0}, - {"S_ISVTX", Const, 0}, - {"S_IWGRP", Const, 0}, - {"S_IWOTH", Const, 0}, - {"S_IWRITE", Const, 0}, - {"S_IWUSR", Const, 0}, - {"S_IXGRP", Const, 0}, - {"S_IXOTH", Const, 0}, - {"S_IXUSR", Const, 0}, - {"S_LOGIN_SET", Const, 1}, - {"SecurityAttributes", Type, 0}, - {"SecurityAttributes.InheritHandle", Field, 0}, - {"SecurityAttributes.Length", Field, 0}, - {"SecurityAttributes.SecurityDescriptor", Field, 0}, - {"Seek", Func, 0}, - {"Select", Func, 0}, - {"Sendfile", Func, 0}, - {"Sendmsg", Func, 0}, - {"SendmsgN", Func, 3}, - {"Sendto", Func, 0}, - {"Servent", Type, 0}, - {"Servent.Aliases", Field, 0}, - {"Servent.Name", Field, 0}, - {"Servent.Port", Field, 0}, - {"Servent.Proto", Field, 0}, - {"SetBpf", Func, 0}, - {"SetBpfBuflen", Func, 0}, - {"SetBpfDatalink", Func, 0}, - {"SetBpfHeadercmpl", Func, 0}, - {"SetBpfImmediate", Func, 0}, - {"SetBpfInterface", Func, 0}, - {"SetBpfPromisc", Func, 0}, - {"SetBpfTimeout", Func, 0}, - {"SetCurrentDirectory", Func, 0}, - {"SetEndOfFile", Func, 0}, - {"SetEnvironmentVariable", Func, 0}, - {"SetFileAttributes", Func, 0}, - {"SetFileCompletionNotificationModes", Func, 2}, - {"SetFilePointer", Func, 0}, - {"SetFileTime", Func, 0}, - {"SetHandleInformation", Func, 0}, - {"SetKevent", Func, 0}, - {"SetLsfPromisc", Func, 0}, - {"SetNonblock", Func, 0}, - {"Setdomainname", Func, 0}, - {"Setegid", Func, 0}, - {"Setenv", Func, 0}, - {"Seteuid", Func, 0}, - {"Setfsgid", Func, 0}, - {"Setfsuid", Func, 0}, - {"Setgid", Func, 0}, - {"Setgroups", Func, 0}, - {"Sethostname", Func, 0}, - {"Setlogin", Func, 0}, - {"Setpgid", Func, 0}, - {"Setpriority", Func, 0}, - {"Setprivexec", Func, 0}, - {"Setregid", Func, 0}, - {"Setresgid", Func, 0}, - {"Setresuid", Func, 0}, - {"Setreuid", Func, 0}, - {"Setrlimit", Func, 0}, - {"Setsid", Func, 0}, - {"Setsockopt", Func, 0}, - {"SetsockoptByte", Func, 0}, - {"SetsockoptICMPv6Filter", Func, 2}, - {"SetsockoptIPMreq", Func, 0}, - {"SetsockoptIPMreqn", Func, 0}, - {"SetsockoptIPv6Mreq", Func, 0}, - {"SetsockoptInet4Addr", Func, 0}, - {"SetsockoptInt", Func, 0}, - {"SetsockoptLinger", Func, 0}, - {"SetsockoptString", Func, 0}, - {"SetsockoptTimeval", Func, 0}, - {"Settimeofday", Func, 0}, - {"Setuid", Func, 0}, - {"Setxattr", Func, 1}, - {"Shutdown", Func, 0}, - {"SidTypeAlias", Const, 0}, - {"SidTypeComputer", Const, 0}, - {"SidTypeDeletedAccount", Const, 0}, - {"SidTypeDomain", Const, 0}, - {"SidTypeGroup", Const, 0}, - {"SidTypeInvalid", Const, 0}, - {"SidTypeLabel", Const, 0}, - {"SidTypeUnknown", Const, 0}, - {"SidTypeUser", Const, 0}, - {"SidTypeWellKnownGroup", Const, 0}, - {"Signal", Type, 0}, - {"SizeofBpfHdr", Const, 0}, - {"SizeofBpfInsn", Const, 0}, - {"SizeofBpfProgram", Const, 0}, - {"SizeofBpfStat", Const, 0}, - {"SizeofBpfVersion", Const, 0}, - {"SizeofBpfZbuf", Const, 0}, - {"SizeofBpfZbufHeader", Const, 0}, - {"SizeofCmsghdr", Const, 0}, - {"SizeofICMPv6Filter", Const, 2}, - {"SizeofIPMreq", Const, 0}, - {"SizeofIPMreqn", Const, 0}, - {"SizeofIPv6MTUInfo", Const, 2}, - {"SizeofIPv6Mreq", Const, 0}, - {"SizeofIfAddrmsg", Const, 0}, - {"SizeofIfAnnounceMsghdr", Const, 1}, - {"SizeofIfData", Const, 0}, - {"SizeofIfInfomsg", Const, 0}, - {"SizeofIfMsghdr", Const, 0}, - {"SizeofIfaMsghdr", Const, 0}, - {"SizeofIfmaMsghdr", Const, 0}, - {"SizeofIfmaMsghdr2", Const, 0}, - {"SizeofInet4Pktinfo", Const, 0}, - {"SizeofInet6Pktinfo", Const, 0}, - {"SizeofInotifyEvent", Const, 0}, - {"SizeofLinger", Const, 0}, - {"SizeofMsghdr", Const, 0}, - {"SizeofNlAttr", Const, 0}, - {"SizeofNlMsgerr", Const, 0}, - {"SizeofNlMsghdr", Const, 0}, - {"SizeofRtAttr", Const, 0}, - {"SizeofRtGenmsg", Const, 0}, - {"SizeofRtMetrics", Const, 0}, - {"SizeofRtMsg", Const, 0}, - {"SizeofRtMsghdr", Const, 0}, - {"SizeofRtNexthop", Const, 0}, - {"SizeofSockFilter", Const, 0}, - {"SizeofSockFprog", Const, 0}, - {"SizeofSockaddrAny", Const, 0}, - {"SizeofSockaddrDatalink", Const, 0}, - {"SizeofSockaddrInet4", Const, 0}, - {"SizeofSockaddrInet6", Const, 0}, - {"SizeofSockaddrLinklayer", Const, 0}, - {"SizeofSockaddrNetlink", Const, 0}, - {"SizeofSockaddrUnix", Const, 0}, - {"SizeofTCPInfo", Const, 1}, - {"SizeofUcred", Const, 0}, - {"SlicePtrFromStrings", Func, 1}, - {"SockFilter", Type, 0}, - {"SockFilter.Code", Field, 0}, - {"SockFilter.Jf", Field, 0}, - {"SockFilter.Jt", Field, 0}, - {"SockFilter.K", Field, 0}, - {"SockFprog", Type, 0}, - {"SockFprog.Filter", Field, 0}, - {"SockFprog.Len", Field, 0}, - {"SockFprog.Pad_cgo_0", Field, 0}, - {"Sockaddr", Type, 0}, - {"SockaddrDatalink", Type, 0}, - {"SockaddrDatalink.Alen", Field, 0}, - {"SockaddrDatalink.Data", Field, 0}, - {"SockaddrDatalink.Family", Field, 0}, - {"SockaddrDatalink.Index", Field, 0}, - {"SockaddrDatalink.Len", Field, 0}, - {"SockaddrDatalink.Nlen", Field, 0}, - {"SockaddrDatalink.Slen", Field, 0}, - {"SockaddrDatalink.Type", Field, 0}, - {"SockaddrGen", Type, 0}, - {"SockaddrInet4", Type, 0}, - {"SockaddrInet4.Addr", Field, 0}, - {"SockaddrInet4.Port", Field, 0}, - {"SockaddrInet6", Type, 0}, - {"SockaddrInet6.Addr", Field, 0}, - {"SockaddrInet6.Port", Field, 0}, - {"SockaddrInet6.ZoneId", Field, 0}, - {"SockaddrLinklayer", Type, 0}, - {"SockaddrLinklayer.Addr", Field, 0}, - {"SockaddrLinklayer.Halen", Field, 0}, - {"SockaddrLinklayer.Hatype", Field, 0}, - {"SockaddrLinklayer.Ifindex", Field, 0}, - {"SockaddrLinklayer.Pkttype", Field, 0}, - {"SockaddrLinklayer.Protocol", Field, 0}, - {"SockaddrNetlink", Type, 0}, - {"SockaddrNetlink.Family", Field, 0}, - {"SockaddrNetlink.Groups", Field, 0}, - {"SockaddrNetlink.Pad", Field, 0}, - {"SockaddrNetlink.Pid", Field, 0}, - {"SockaddrUnix", Type, 0}, - {"SockaddrUnix.Name", Field, 0}, - {"Socket", Func, 0}, - {"SocketControlMessage", Type, 0}, - {"SocketControlMessage.Data", Field, 0}, - {"SocketControlMessage.Header", Field, 0}, - {"SocketDisableIPv6", Var, 0}, - {"Socketpair", Func, 0}, - {"Splice", Func, 0}, - {"StartProcess", Func, 0}, - {"StartupInfo", Type, 0}, - {"StartupInfo.Cb", Field, 0}, - {"StartupInfo.Desktop", Field, 0}, - {"StartupInfo.FillAttribute", Field, 0}, - {"StartupInfo.Flags", Field, 0}, - {"StartupInfo.ShowWindow", Field, 0}, - {"StartupInfo.StdErr", Field, 0}, - {"StartupInfo.StdInput", Field, 0}, - {"StartupInfo.StdOutput", Field, 0}, - {"StartupInfo.Title", Field, 0}, - {"StartupInfo.X", Field, 0}, - {"StartupInfo.XCountChars", Field, 0}, - {"StartupInfo.XSize", Field, 0}, - {"StartupInfo.Y", Field, 0}, - {"StartupInfo.YCountChars", Field, 0}, - {"StartupInfo.YSize", Field, 0}, - {"Stat", Func, 0}, - {"Stat_t", Type, 0}, - {"Stat_t.Atim", Field, 0}, - {"Stat_t.Atim_ext", Field, 12}, - {"Stat_t.Atimespec", Field, 0}, - {"Stat_t.Birthtimespec", Field, 0}, - {"Stat_t.Blksize", Field, 0}, - {"Stat_t.Blocks", Field, 0}, - {"Stat_t.Btim_ext", Field, 12}, - {"Stat_t.Ctim", Field, 0}, - {"Stat_t.Ctim_ext", Field, 12}, - {"Stat_t.Ctimespec", Field, 0}, - {"Stat_t.Dev", Field, 0}, - {"Stat_t.Flags", Field, 0}, - {"Stat_t.Gen", Field, 0}, - {"Stat_t.Gid", Field, 0}, - {"Stat_t.Ino", Field, 0}, - {"Stat_t.Lspare", Field, 0}, - {"Stat_t.Lspare0", Field, 2}, - {"Stat_t.Lspare1", Field, 2}, - {"Stat_t.Mode", Field, 0}, - {"Stat_t.Mtim", Field, 0}, - {"Stat_t.Mtim_ext", Field, 12}, - {"Stat_t.Mtimespec", Field, 0}, - {"Stat_t.Nlink", Field, 0}, - {"Stat_t.Pad_cgo_0", Field, 0}, - {"Stat_t.Pad_cgo_1", Field, 0}, - {"Stat_t.Pad_cgo_2", Field, 0}, - {"Stat_t.Padding0", Field, 12}, - {"Stat_t.Padding1", Field, 12}, - {"Stat_t.Qspare", Field, 0}, - {"Stat_t.Rdev", Field, 0}, - {"Stat_t.Size", Field, 0}, - {"Stat_t.Spare", Field, 2}, - {"Stat_t.Uid", Field, 0}, - {"Stat_t.X__pad0", Field, 0}, - {"Stat_t.X__pad1", Field, 0}, - {"Stat_t.X__pad2", Field, 0}, - {"Stat_t.X__st_birthtim", Field, 2}, - {"Stat_t.X__st_ino", Field, 0}, - {"Stat_t.X__unused", Field, 0}, - {"Statfs", Func, 0}, - {"Statfs_t", Type, 0}, - {"Statfs_t.Asyncreads", Field, 0}, - {"Statfs_t.Asyncwrites", Field, 0}, - {"Statfs_t.Bavail", Field, 0}, - {"Statfs_t.Bfree", Field, 0}, - {"Statfs_t.Blocks", Field, 0}, - {"Statfs_t.Bsize", Field, 0}, - {"Statfs_t.Charspare", Field, 0}, - {"Statfs_t.F_asyncreads", Field, 2}, - {"Statfs_t.F_asyncwrites", Field, 2}, - {"Statfs_t.F_bavail", Field, 2}, - {"Statfs_t.F_bfree", Field, 2}, - {"Statfs_t.F_blocks", Field, 2}, - {"Statfs_t.F_bsize", Field, 2}, - {"Statfs_t.F_ctime", Field, 2}, - {"Statfs_t.F_favail", Field, 2}, - {"Statfs_t.F_ffree", Field, 2}, - {"Statfs_t.F_files", Field, 2}, - {"Statfs_t.F_flags", Field, 2}, - {"Statfs_t.F_fsid", Field, 2}, - {"Statfs_t.F_fstypename", Field, 2}, - {"Statfs_t.F_iosize", Field, 2}, - {"Statfs_t.F_mntfromname", Field, 2}, - {"Statfs_t.F_mntfromspec", Field, 3}, - {"Statfs_t.F_mntonname", Field, 2}, - {"Statfs_t.F_namemax", Field, 2}, - {"Statfs_t.F_owner", Field, 2}, - {"Statfs_t.F_spare", Field, 2}, - {"Statfs_t.F_syncreads", Field, 2}, - {"Statfs_t.F_syncwrites", Field, 2}, - {"Statfs_t.Ffree", Field, 0}, - {"Statfs_t.Files", Field, 0}, - {"Statfs_t.Flags", Field, 0}, - {"Statfs_t.Frsize", Field, 0}, - {"Statfs_t.Fsid", Field, 0}, - {"Statfs_t.Fssubtype", Field, 0}, - {"Statfs_t.Fstypename", Field, 0}, - {"Statfs_t.Iosize", Field, 0}, - {"Statfs_t.Mntfromname", Field, 0}, - {"Statfs_t.Mntonname", Field, 0}, - {"Statfs_t.Mount_info", Field, 2}, - {"Statfs_t.Namelen", Field, 0}, - {"Statfs_t.Namemax", Field, 0}, - {"Statfs_t.Owner", Field, 0}, - {"Statfs_t.Pad_cgo_0", Field, 0}, - {"Statfs_t.Pad_cgo_1", Field, 2}, - {"Statfs_t.Reserved", Field, 0}, - {"Statfs_t.Spare", Field, 0}, - {"Statfs_t.Syncreads", Field, 0}, - {"Statfs_t.Syncwrites", Field, 0}, - {"Statfs_t.Type", Field, 0}, - {"Statfs_t.Version", Field, 0}, - {"Stderr", Var, 0}, - {"Stdin", Var, 0}, - {"Stdout", Var, 0}, - {"StringBytePtr", Func, 0}, - {"StringByteSlice", Func, 0}, - {"StringSlicePtr", Func, 0}, - {"StringToSid", Func, 0}, - {"StringToUTF16", Func, 0}, - {"StringToUTF16Ptr", Func, 0}, - {"Symlink", Func, 0}, - {"Sync", Func, 0}, - {"SyncFileRange", Func, 0}, - {"SysProcAttr", Type, 0}, - {"SysProcAttr.AdditionalInheritedHandles", Field, 17}, - {"SysProcAttr.AmbientCaps", Field, 9}, - {"SysProcAttr.CgroupFD", Field, 20}, - {"SysProcAttr.Chroot", Field, 0}, - {"SysProcAttr.Cloneflags", Field, 2}, - {"SysProcAttr.CmdLine", Field, 0}, - {"SysProcAttr.CreationFlags", Field, 1}, - {"SysProcAttr.Credential", Field, 0}, - {"SysProcAttr.Ctty", Field, 1}, - {"SysProcAttr.Foreground", Field, 5}, - {"SysProcAttr.GidMappings", Field, 4}, - {"SysProcAttr.GidMappingsEnableSetgroups", Field, 5}, - {"SysProcAttr.HideWindow", Field, 0}, - {"SysProcAttr.Jail", Field, 21}, - {"SysProcAttr.NoInheritHandles", Field, 16}, - {"SysProcAttr.Noctty", Field, 0}, - {"SysProcAttr.ParentProcess", Field, 17}, - {"SysProcAttr.Pdeathsig", Field, 0}, - {"SysProcAttr.Pgid", Field, 5}, - {"SysProcAttr.PidFD", Field, 22}, - {"SysProcAttr.ProcessAttributes", Field, 13}, - {"SysProcAttr.Ptrace", Field, 0}, - {"SysProcAttr.Setctty", Field, 0}, - {"SysProcAttr.Setpgid", Field, 0}, - {"SysProcAttr.Setsid", Field, 0}, - {"SysProcAttr.ThreadAttributes", Field, 13}, - {"SysProcAttr.Token", Field, 10}, - {"SysProcAttr.UidMappings", Field, 4}, - {"SysProcAttr.Unshareflags", Field, 7}, - {"SysProcAttr.UseCgroupFD", Field, 20}, - {"SysProcIDMap", Type, 4}, - {"SysProcIDMap.ContainerID", Field, 4}, - {"SysProcIDMap.HostID", Field, 4}, - {"SysProcIDMap.Size", Field, 4}, - {"Syscall", Func, 0}, - {"Syscall12", Func, 0}, - {"Syscall15", Func, 0}, - {"Syscall18", Func, 12}, - {"Syscall6", Func, 0}, - {"Syscall9", Func, 0}, - {"SyscallN", Func, 18}, - {"Sysctl", Func, 0}, - {"SysctlUint32", Func, 0}, - {"Sysctlnode", Type, 2}, - {"Sysctlnode.Flags", Field, 2}, - {"Sysctlnode.Name", Field, 2}, - {"Sysctlnode.Num", Field, 2}, - {"Sysctlnode.Un", Field, 2}, - {"Sysctlnode.Ver", Field, 2}, - {"Sysctlnode.X__rsvd", Field, 2}, - {"Sysctlnode.X_sysctl_desc", Field, 2}, - {"Sysctlnode.X_sysctl_func", Field, 2}, - {"Sysctlnode.X_sysctl_parent", Field, 2}, - {"Sysctlnode.X_sysctl_size", Field, 2}, - {"Sysinfo", Func, 0}, - {"Sysinfo_t", Type, 0}, - {"Sysinfo_t.Bufferram", Field, 0}, - {"Sysinfo_t.Freehigh", Field, 0}, - {"Sysinfo_t.Freeram", Field, 0}, - {"Sysinfo_t.Freeswap", Field, 0}, - {"Sysinfo_t.Loads", Field, 0}, - {"Sysinfo_t.Pad", Field, 0}, - {"Sysinfo_t.Pad_cgo_0", Field, 0}, - {"Sysinfo_t.Pad_cgo_1", Field, 0}, - {"Sysinfo_t.Procs", Field, 0}, - {"Sysinfo_t.Sharedram", Field, 0}, - {"Sysinfo_t.Totalhigh", Field, 0}, - {"Sysinfo_t.Totalram", Field, 0}, - {"Sysinfo_t.Totalswap", Field, 0}, - {"Sysinfo_t.Unit", Field, 0}, - {"Sysinfo_t.Uptime", Field, 0}, - {"Sysinfo_t.X_f", Field, 0}, - {"Systemtime", Type, 0}, - {"Systemtime.Day", Field, 0}, - {"Systemtime.DayOfWeek", Field, 0}, - {"Systemtime.Hour", Field, 0}, - {"Systemtime.Milliseconds", Field, 0}, - {"Systemtime.Minute", Field, 0}, - {"Systemtime.Month", Field, 0}, - {"Systemtime.Second", Field, 0}, - {"Systemtime.Year", Field, 0}, - {"TCGETS", Const, 0}, - {"TCIFLUSH", Const, 1}, - {"TCIOFLUSH", Const, 1}, - {"TCOFLUSH", Const, 1}, - {"TCPInfo", Type, 1}, - {"TCPInfo.Advmss", Field, 1}, - {"TCPInfo.Ato", Field, 1}, - {"TCPInfo.Backoff", Field, 1}, - {"TCPInfo.Ca_state", Field, 1}, - {"TCPInfo.Fackets", Field, 1}, - {"TCPInfo.Last_ack_recv", Field, 1}, - {"TCPInfo.Last_ack_sent", Field, 1}, - {"TCPInfo.Last_data_recv", Field, 1}, - {"TCPInfo.Last_data_sent", Field, 1}, - {"TCPInfo.Lost", Field, 1}, - {"TCPInfo.Options", Field, 1}, - {"TCPInfo.Pad_cgo_0", Field, 1}, - {"TCPInfo.Pmtu", Field, 1}, - {"TCPInfo.Probes", Field, 1}, - {"TCPInfo.Rcv_mss", Field, 1}, - {"TCPInfo.Rcv_rtt", Field, 1}, - {"TCPInfo.Rcv_space", Field, 1}, - {"TCPInfo.Rcv_ssthresh", Field, 1}, - {"TCPInfo.Reordering", Field, 1}, - {"TCPInfo.Retrans", Field, 1}, - {"TCPInfo.Retransmits", Field, 1}, - {"TCPInfo.Rto", Field, 1}, - {"TCPInfo.Rtt", Field, 1}, - {"TCPInfo.Rttvar", Field, 1}, - {"TCPInfo.Sacked", Field, 1}, - {"TCPInfo.Snd_cwnd", Field, 1}, - {"TCPInfo.Snd_mss", Field, 1}, - {"TCPInfo.Snd_ssthresh", Field, 1}, - {"TCPInfo.State", Field, 1}, - {"TCPInfo.Total_retrans", Field, 1}, - {"TCPInfo.Unacked", Field, 1}, - {"TCPKeepalive", Type, 3}, - {"TCPKeepalive.Interval", Field, 3}, - {"TCPKeepalive.OnOff", Field, 3}, - {"TCPKeepalive.Time", Field, 3}, - {"TCP_CA_NAME_MAX", Const, 0}, - {"TCP_CONGCTL", Const, 1}, - {"TCP_CONGESTION", Const, 0}, - {"TCP_CONNECTIONTIMEOUT", Const, 0}, - {"TCP_CORK", Const, 0}, - {"TCP_DEFER_ACCEPT", Const, 0}, - {"TCP_ENABLE_ECN", Const, 16}, - {"TCP_INFO", Const, 0}, - {"TCP_KEEPALIVE", Const, 0}, - {"TCP_KEEPCNT", Const, 0}, - {"TCP_KEEPIDLE", Const, 0}, - {"TCP_KEEPINIT", Const, 1}, - {"TCP_KEEPINTVL", Const, 0}, - {"TCP_LINGER2", Const, 0}, - {"TCP_MAXBURST", Const, 0}, - {"TCP_MAXHLEN", Const, 0}, - {"TCP_MAXOLEN", Const, 0}, - {"TCP_MAXSEG", Const, 0}, - {"TCP_MAXWIN", Const, 0}, - {"TCP_MAX_SACK", Const, 0}, - {"TCP_MAX_WINSHIFT", Const, 0}, - {"TCP_MD5SIG", Const, 0}, - {"TCP_MD5SIG_MAXKEYLEN", Const, 0}, - {"TCP_MINMSS", Const, 0}, - {"TCP_MINMSSOVERLOAD", Const, 0}, - {"TCP_MSS", Const, 0}, - {"TCP_NODELAY", Const, 0}, - {"TCP_NOOPT", Const, 0}, - {"TCP_NOPUSH", Const, 0}, - {"TCP_NOTSENT_LOWAT", Const, 16}, - {"TCP_NSTATES", Const, 1}, - {"TCP_QUICKACK", Const, 0}, - {"TCP_RXT_CONNDROPTIME", Const, 0}, - {"TCP_RXT_FINDROP", Const, 0}, - {"TCP_SACK_ENABLE", Const, 1}, - {"TCP_SENDMOREACKS", Const, 16}, - {"TCP_SYNCNT", Const, 0}, - {"TCP_VENDOR", Const, 3}, - {"TCP_WINDOW_CLAMP", Const, 0}, - {"TCSAFLUSH", Const, 1}, - {"TCSETS", Const, 0}, - {"TF_DISCONNECT", Const, 0}, - {"TF_REUSE_SOCKET", Const, 0}, - {"TF_USE_DEFAULT_WORKER", Const, 0}, - {"TF_USE_KERNEL_APC", Const, 0}, - {"TF_USE_SYSTEM_THREAD", Const, 0}, - {"TF_WRITE_BEHIND", Const, 0}, - {"TH32CS_INHERIT", Const, 4}, - {"TH32CS_SNAPALL", Const, 4}, - {"TH32CS_SNAPHEAPLIST", Const, 4}, - {"TH32CS_SNAPMODULE", Const, 4}, - {"TH32CS_SNAPMODULE32", Const, 4}, - {"TH32CS_SNAPPROCESS", Const, 4}, - {"TH32CS_SNAPTHREAD", Const, 4}, - {"TIME_ZONE_ID_DAYLIGHT", Const, 0}, - {"TIME_ZONE_ID_STANDARD", Const, 0}, - {"TIME_ZONE_ID_UNKNOWN", Const, 0}, - {"TIOCCBRK", Const, 0}, - {"TIOCCDTR", Const, 0}, - {"TIOCCONS", Const, 0}, - {"TIOCDCDTIMESTAMP", Const, 0}, - {"TIOCDRAIN", Const, 0}, - {"TIOCDSIMICROCODE", Const, 0}, - {"TIOCEXCL", Const, 0}, - {"TIOCEXT", Const, 0}, - {"TIOCFLAG_CDTRCTS", Const, 1}, - {"TIOCFLAG_CLOCAL", Const, 1}, - {"TIOCFLAG_CRTSCTS", Const, 1}, - {"TIOCFLAG_MDMBUF", Const, 1}, - {"TIOCFLAG_PPS", Const, 1}, - {"TIOCFLAG_SOFTCAR", Const, 1}, - {"TIOCFLUSH", Const, 0}, - {"TIOCGDEV", Const, 0}, - {"TIOCGDRAINWAIT", Const, 0}, - {"TIOCGETA", Const, 0}, - {"TIOCGETD", Const, 0}, - {"TIOCGFLAGS", Const, 1}, - {"TIOCGICOUNT", Const, 0}, - {"TIOCGLCKTRMIOS", Const, 0}, - {"TIOCGLINED", Const, 1}, - {"TIOCGPGRP", Const, 0}, - {"TIOCGPTN", Const, 0}, - {"TIOCGQSIZE", Const, 1}, - {"TIOCGRANTPT", Const, 1}, - {"TIOCGRS485", Const, 0}, - {"TIOCGSERIAL", Const, 0}, - {"TIOCGSID", Const, 0}, - {"TIOCGSIZE", Const, 1}, - {"TIOCGSOFTCAR", Const, 0}, - {"TIOCGTSTAMP", Const, 1}, - {"TIOCGWINSZ", Const, 0}, - {"TIOCINQ", Const, 0}, - {"TIOCIXOFF", Const, 0}, - {"TIOCIXON", Const, 0}, - {"TIOCLINUX", Const, 0}, - {"TIOCMBIC", Const, 0}, - {"TIOCMBIS", Const, 0}, - {"TIOCMGDTRWAIT", Const, 0}, - {"TIOCMGET", Const, 0}, - {"TIOCMIWAIT", Const, 0}, - {"TIOCMODG", Const, 0}, - {"TIOCMODS", Const, 0}, - {"TIOCMSDTRWAIT", Const, 0}, - {"TIOCMSET", Const, 0}, - {"TIOCM_CAR", Const, 0}, - {"TIOCM_CD", Const, 0}, - {"TIOCM_CTS", Const, 0}, - {"TIOCM_DCD", Const, 0}, - {"TIOCM_DSR", Const, 0}, - {"TIOCM_DTR", Const, 0}, - {"TIOCM_LE", Const, 0}, - {"TIOCM_RI", Const, 0}, - {"TIOCM_RNG", Const, 0}, - {"TIOCM_RTS", Const, 0}, - {"TIOCM_SR", Const, 0}, - {"TIOCM_ST", Const, 0}, - {"TIOCNOTTY", Const, 0}, - {"TIOCNXCL", Const, 0}, - {"TIOCOUTQ", Const, 0}, - {"TIOCPKT", Const, 0}, - {"TIOCPKT_DATA", Const, 0}, - {"TIOCPKT_DOSTOP", Const, 0}, - {"TIOCPKT_FLUSHREAD", Const, 0}, - {"TIOCPKT_FLUSHWRITE", Const, 0}, - {"TIOCPKT_IOCTL", Const, 0}, - {"TIOCPKT_NOSTOP", Const, 0}, - {"TIOCPKT_START", Const, 0}, - {"TIOCPKT_STOP", Const, 0}, - {"TIOCPTMASTER", Const, 0}, - {"TIOCPTMGET", Const, 1}, - {"TIOCPTSNAME", Const, 1}, - {"TIOCPTYGNAME", Const, 0}, - {"TIOCPTYGRANT", Const, 0}, - {"TIOCPTYUNLK", Const, 0}, - {"TIOCRCVFRAME", Const, 1}, - {"TIOCREMOTE", Const, 0}, - {"TIOCSBRK", Const, 0}, - {"TIOCSCONS", Const, 0}, - {"TIOCSCTTY", Const, 0}, - {"TIOCSDRAINWAIT", Const, 0}, - {"TIOCSDTR", Const, 0}, - {"TIOCSERCONFIG", Const, 0}, - {"TIOCSERGETLSR", Const, 0}, - {"TIOCSERGETMULTI", Const, 0}, - {"TIOCSERGSTRUCT", Const, 0}, - {"TIOCSERGWILD", Const, 0}, - {"TIOCSERSETMULTI", Const, 0}, - {"TIOCSERSWILD", Const, 0}, - {"TIOCSER_TEMT", Const, 0}, - {"TIOCSETA", Const, 0}, - {"TIOCSETAF", Const, 0}, - {"TIOCSETAW", Const, 0}, - {"TIOCSETD", Const, 0}, - {"TIOCSFLAGS", Const, 1}, - {"TIOCSIG", Const, 0}, - {"TIOCSLCKTRMIOS", Const, 0}, - {"TIOCSLINED", Const, 1}, - {"TIOCSPGRP", Const, 0}, - {"TIOCSPTLCK", Const, 0}, - {"TIOCSQSIZE", Const, 1}, - {"TIOCSRS485", Const, 0}, - {"TIOCSSERIAL", Const, 0}, - {"TIOCSSIZE", Const, 1}, - {"TIOCSSOFTCAR", Const, 0}, - {"TIOCSTART", Const, 0}, - {"TIOCSTAT", Const, 0}, - {"TIOCSTI", Const, 0}, - {"TIOCSTOP", Const, 0}, - {"TIOCSTSTAMP", Const, 1}, - {"TIOCSWINSZ", Const, 0}, - {"TIOCTIMESTAMP", Const, 0}, - {"TIOCUCNTL", Const, 0}, - {"TIOCVHANGUP", Const, 0}, - {"TIOCXMTFRAME", Const, 1}, - {"TOKEN_ADJUST_DEFAULT", Const, 0}, - {"TOKEN_ADJUST_GROUPS", Const, 0}, - {"TOKEN_ADJUST_PRIVILEGES", Const, 0}, - {"TOKEN_ADJUST_SESSIONID", Const, 11}, - {"TOKEN_ALL_ACCESS", Const, 0}, - {"TOKEN_ASSIGN_PRIMARY", Const, 0}, - {"TOKEN_DUPLICATE", Const, 0}, - {"TOKEN_EXECUTE", Const, 0}, - {"TOKEN_IMPERSONATE", Const, 0}, - {"TOKEN_QUERY", Const, 0}, - {"TOKEN_QUERY_SOURCE", Const, 0}, - {"TOKEN_READ", Const, 0}, - {"TOKEN_WRITE", Const, 0}, - {"TOSTOP", Const, 0}, - {"TRUNCATE_EXISTING", Const, 0}, - {"TUNATTACHFILTER", Const, 0}, - {"TUNDETACHFILTER", Const, 0}, - {"TUNGETFEATURES", Const, 0}, - {"TUNGETIFF", Const, 0}, - {"TUNGETSNDBUF", Const, 0}, - {"TUNGETVNETHDRSZ", Const, 0}, - {"TUNSETDEBUG", Const, 0}, - {"TUNSETGROUP", Const, 0}, - {"TUNSETIFF", Const, 0}, - {"TUNSETLINK", Const, 0}, - {"TUNSETNOCSUM", Const, 0}, - {"TUNSETOFFLOAD", Const, 0}, - {"TUNSETOWNER", Const, 0}, - {"TUNSETPERSIST", Const, 0}, - {"TUNSETSNDBUF", Const, 0}, - {"TUNSETTXFILTER", Const, 0}, - {"TUNSETVNETHDRSZ", Const, 0}, - {"Tee", Func, 0}, - {"TerminateProcess", Func, 0}, - {"Termios", Type, 0}, - {"Termios.Cc", Field, 0}, - {"Termios.Cflag", Field, 0}, - {"Termios.Iflag", Field, 0}, - {"Termios.Ispeed", Field, 0}, - {"Termios.Lflag", Field, 0}, - {"Termios.Line", Field, 0}, - {"Termios.Oflag", Field, 0}, - {"Termios.Ospeed", Field, 0}, - {"Termios.Pad_cgo_0", Field, 0}, - {"Tgkill", Func, 0}, - {"Time", Func, 0}, - {"Time_t", Type, 0}, - {"Times", Func, 0}, - {"Timespec", Type, 0}, - {"Timespec.Nsec", Field, 0}, - {"Timespec.Pad_cgo_0", Field, 2}, - {"Timespec.Sec", Field, 0}, - {"TimespecToNsec", Func, 0}, - {"Timeval", Type, 0}, - {"Timeval.Pad_cgo_0", Field, 0}, - {"Timeval.Sec", Field, 0}, - {"Timeval.Usec", Field, 0}, - {"Timeval32", Type, 0}, - {"Timeval32.Sec", Field, 0}, - {"Timeval32.Usec", Field, 0}, - {"TimevalToNsec", Func, 0}, - {"Timex", Type, 0}, - {"Timex.Calcnt", Field, 0}, - {"Timex.Constant", Field, 0}, - {"Timex.Errcnt", Field, 0}, - {"Timex.Esterror", Field, 0}, - {"Timex.Freq", Field, 0}, - {"Timex.Jitcnt", Field, 0}, - {"Timex.Jitter", Field, 0}, - {"Timex.Maxerror", Field, 0}, - {"Timex.Modes", Field, 0}, - {"Timex.Offset", Field, 0}, - {"Timex.Pad_cgo_0", Field, 0}, - {"Timex.Pad_cgo_1", Field, 0}, - {"Timex.Pad_cgo_2", Field, 0}, - {"Timex.Pad_cgo_3", Field, 0}, - {"Timex.Ppsfreq", Field, 0}, - {"Timex.Precision", Field, 0}, - {"Timex.Shift", Field, 0}, - {"Timex.Stabil", Field, 0}, - {"Timex.Status", Field, 0}, - {"Timex.Stbcnt", Field, 0}, - {"Timex.Tai", Field, 0}, - {"Timex.Tick", Field, 0}, - {"Timex.Time", Field, 0}, - {"Timex.Tolerance", Field, 0}, - {"Timezoneinformation", Type, 0}, - {"Timezoneinformation.Bias", Field, 0}, - {"Timezoneinformation.DaylightBias", Field, 0}, - {"Timezoneinformation.DaylightDate", Field, 0}, - {"Timezoneinformation.DaylightName", Field, 0}, - {"Timezoneinformation.StandardBias", Field, 0}, - {"Timezoneinformation.StandardDate", Field, 0}, - {"Timezoneinformation.StandardName", Field, 0}, - {"Tms", Type, 0}, - {"Tms.Cstime", Field, 0}, - {"Tms.Cutime", Field, 0}, - {"Tms.Stime", Field, 0}, - {"Tms.Utime", Field, 0}, - {"Token", Type, 0}, - {"TokenAccessInformation", Const, 0}, - {"TokenAuditPolicy", Const, 0}, - {"TokenDefaultDacl", Const, 0}, - {"TokenElevation", Const, 0}, - {"TokenElevationType", Const, 0}, - {"TokenGroups", Const, 0}, - {"TokenGroupsAndPrivileges", Const, 0}, - {"TokenHasRestrictions", Const, 0}, - {"TokenImpersonationLevel", Const, 0}, - {"TokenIntegrityLevel", Const, 0}, - {"TokenLinkedToken", Const, 0}, - {"TokenLogonSid", Const, 0}, - {"TokenMandatoryPolicy", Const, 0}, - {"TokenOrigin", Const, 0}, - {"TokenOwner", Const, 0}, - {"TokenPrimaryGroup", Const, 0}, - {"TokenPrivileges", Const, 0}, - {"TokenRestrictedSids", Const, 0}, - {"TokenSandBoxInert", Const, 0}, - {"TokenSessionId", Const, 0}, - {"TokenSessionReference", Const, 0}, - {"TokenSource", Const, 0}, - {"TokenStatistics", Const, 0}, - {"TokenType", Const, 0}, - {"TokenUIAccess", Const, 0}, - {"TokenUser", Const, 0}, - {"TokenVirtualizationAllowed", Const, 0}, - {"TokenVirtualizationEnabled", Const, 0}, - {"Tokenprimarygroup", Type, 0}, - {"Tokenprimarygroup.PrimaryGroup", Field, 0}, - {"Tokenuser", Type, 0}, - {"Tokenuser.User", Field, 0}, - {"TranslateAccountName", Func, 0}, - {"TranslateName", Func, 0}, - {"TransmitFile", Func, 0}, - {"TransmitFileBuffers", Type, 0}, - {"TransmitFileBuffers.Head", Field, 0}, - {"TransmitFileBuffers.HeadLength", Field, 0}, - {"TransmitFileBuffers.Tail", Field, 0}, - {"TransmitFileBuffers.TailLength", Field, 0}, - {"Truncate", Func, 0}, - {"UNIX_PATH_MAX", Const, 12}, - {"USAGE_MATCH_TYPE_AND", Const, 0}, - {"USAGE_MATCH_TYPE_OR", Const, 0}, - {"UTF16FromString", Func, 1}, - {"UTF16PtrFromString", Func, 1}, - {"UTF16ToString", Func, 0}, - {"Ucred", Type, 0}, - {"Ucred.Gid", Field, 0}, - {"Ucred.Pid", Field, 0}, - {"Ucred.Uid", Field, 0}, - {"Umask", Func, 0}, - {"Uname", Func, 0}, - {"Undelete", Func, 0}, - {"UnixCredentials", Func, 0}, - {"UnixRights", Func, 0}, - {"Unlink", Func, 0}, - {"Unlinkat", Func, 0}, - {"UnmapViewOfFile", Func, 0}, - {"Unmount", Func, 0}, - {"Unsetenv", Func, 4}, - {"Unshare", Func, 0}, - {"UserInfo10", Type, 0}, - {"UserInfo10.Comment", Field, 0}, - {"UserInfo10.FullName", Field, 0}, - {"UserInfo10.Name", Field, 0}, - {"UserInfo10.UsrComment", Field, 0}, - {"Ustat", Func, 0}, - {"Ustat_t", Type, 0}, - {"Ustat_t.Fname", Field, 0}, - {"Ustat_t.Fpack", Field, 0}, - {"Ustat_t.Pad_cgo_0", Field, 0}, - {"Ustat_t.Pad_cgo_1", Field, 0}, - {"Ustat_t.Tfree", Field, 0}, - {"Ustat_t.Tinode", Field, 0}, - {"Utimbuf", Type, 0}, - {"Utimbuf.Actime", Field, 0}, - {"Utimbuf.Modtime", Field, 0}, - {"Utime", Func, 0}, - {"Utimes", Func, 0}, - {"UtimesNano", Func, 1}, - {"Utsname", Type, 0}, - {"Utsname.Domainname", Field, 0}, - {"Utsname.Machine", Field, 0}, - {"Utsname.Nodename", Field, 0}, - {"Utsname.Release", Field, 0}, - {"Utsname.Sysname", Field, 0}, - {"Utsname.Version", Field, 0}, - {"VDISCARD", Const, 0}, - {"VDSUSP", Const, 1}, - {"VEOF", Const, 0}, - {"VEOL", Const, 0}, - {"VEOL2", Const, 0}, - {"VERASE", Const, 0}, - {"VERASE2", Const, 1}, - {"VINTR", Const, 0}, - {"VKILL", Const, 0}, - {"VLNEXT", Const, 0}, - {"VMIN", Const, 0}, - {"VQUIT", Const, 0}, - {"VREPRINT", Const, 0}, - {"VSTART", Const, 0}, - {"VSTATUS", Const, 1}, - {"VSTOP", Const, 0}, - {"VSUSP", Const, 0}, - {"VSWTC", Const, 0}, - {"VT0", Const, 1}, - {"VT1", Const, 1}, - {"VTDLY", Const, 1}, - {"VTIME", Const, 0}, - {"VWERASE", Const, 0}, - {"VirtualLock", Func, 0}, - {"VirtualUnlock", Func, 0}, - {"WAIT_ABANDONED", Const, 0}, - {"WAIT_FAILED", Const, 0}, - {"WAIT_OBJECT_0", Const, 0}, - {"WAIT_TIMEOUT", Const, 0}, - {"WALL", Const, 0}, - {"WALLSIG", Const, 1}, - {"WALTSIG", Const, 1}, - {"WCLONE", Const, 0}, - {"WCONTINUED", Const, 0}, - {"WCOREFLAG", Const, 0}, - {"WEXITED", Const, 0}, - {"WLINUXCLONE", Const, 0}, - {"WNOHANG", Const, 0}, - {"WNOTHREAD", Const, 0}, - {"WNOWAIT", Const, 0}, - {"WNOZOMBIE", Const, 1}, - {"WOPTSCHECKED", Const, 1}, - {"WORDSIZE", Const, 0}, - {"WSABuf", Type, 0}, - {"WSABuf.Buf", Field, 0}, - {"WSABuf.Len", Field, 0}, - {"WSACleanup", Func, 0}, - {"WSADESCRIPTION_LEN", Const, 0}, - {"WSAData", Type, 0}, - {"WSAData.Description", Field, 0}, - {"WSAData.HighVersion", Field, 0}, - {"WSAData.MaxSockets", Field, 0}, - {"WSAData.MaxUdpDg", Field, 0}, - {"WSAData.SystemStatus", Field, 0}, - {"WSAData.VendorInfo", Field, 0}, - {"WSAData.Version", Field, 0}, - {"WSAEACCES", Const, 2}, - {"WSAECONNABORTED", Const, 9}, - {"WSAECONNRESET", Const, 3}, - {"WSAENOPROTOOPT", Const, 23}, - {"WSAEnumProtocols", Func, 2}, - {"WSAID_CONNECTEX", Var, 1}, - {"WSAIoctl", Func, 0}, - {"WSAPROTOCOL_LEN", Const, 2}, - {"WSAProtocolChain", Type, 2}, - {"WSAProtocolChain.ChainEntries", Field, 2}, - {"WSAProtocolChain.ChainLen", Field, 2}, - {"WSAProtocolInfo", Type, 2}, - {"WSAProtocolInfo.AddressFamily", Field, 2}, - {"WSAProtocolInfo.CatalogEntryId", Field, 2}, - {"WSAProtocolInfo.MaxSockAddr", Field, 2}, - {"WSAProtocolInfo.MessageSize", Field, 2}, - {"WSAProtocolInfo.MinSockAddr", Field, 2}, - {"WSAProtocolInfo.NetworkByteOrder", Field, 2}, - {"WSAProtocolInfo.Protocol", Field, 2}, - {"WSAProtocolInfo.ProtocolChain", Field, 2}, - {"WSAProtocolInfo.ProtocolMaxOffset", Field, 2}, - {"WSAProtocolInfo.ProtocolName", Field, 2}, - {"WSAProtocolInfo.ProviderFlags", Field, 2}, - {"WSAProtocolInfo.ProviderId", Field, 2}, - {"WSAProtocolInfo.ProviderReserved", Field, 2}, - {"WSAProtocolInfo.SecurityScheme", Field, 2}, - {"WSAProtocolInfo.ServiceFlags1", Field, 2}, - {"WSAProtocolInfo.ServiceFlags2", Field, 2}, - {"WSAProtocolInfo.ServiceFlags3", Field, 2}, - {"WSAProtocolInfo.ServiceFlags4", Field, 2}, - {"WSAProtocolInfo.SocketType", Field, 2}, - {"WSAProtocolInfo.Version", Field, 2}, - {"WSARecv", Func, 0}, - {"WSARecvFrom", Func, 0}, - {"WSASYS_STATUS_LEN", Const, 0}, - {"WSASend", Func, 0}, - {"WSASendTo", Func, 0}, - {"WSASendto", Func, 0}, - {"WSAStartup", Func, 0}, - {"WSTOPPED", Const, 0}, - {"WTRAPPED", Const, 1}, - {"WUNTRACED", Const, 0}, - {"Wait4", Func, 0}, - {"WaitForSingleObject", Func, 0}, - {"WaitStatus", Type, 0}, - {"WaitStatus.ExitCode", Field, 0}, - {"Win32FileAttributeData", Type, 0}, - {"Win32FileAttributeData.CreationTime", Field, 0}, - {"Win32FileAttributeData.FileAttributes", Field, 0}, - {"Win32FileAttributeData.FileSizeHigh", Field, 0}, - {"Win32FileAttributeData.FileSizeLow", Field, 0}, - {"Win32FileAttributeData.LastAccessTime", Field, 0}, - {"Win32FileAttributeData.LastWriteTime", Field, 0}, - {"Win32finddata", Type, 0}, - {"Win32finddata.AlternateFileName", Field, 0}, - {"Win32finddata.CreationTime", Field, 0}, - {"Win32finddata.FileAttributes", Field, 0}, - {"Win32finddata.FileName", Field, 0}, - {"Win32finddata.FileSizeHigh", Field, 0}, - {"Win32finddata.FileSizeLow", Field, 0}, - {"Win32finddata.LastAccessTime", Field, 0}, - {"Win32finddata.LastWriteTime", Field, 0}, - {"Win32finddata.Reserved0", Field, 0}, - {"Win32finddata.Reserved1", Field, 0}, - {"Write", Func, 0}, - {"WriteConsole", Func, 1}, - {"WriteFile", Func, 0}, - {"X509_ASN_ENCODING", Const, 0}, - {"XCASE", Const, 0}, - {"XP1_CONNECTIONLESS", Const, 2}, - {"XP1_CONNECT_DATA", Const, 2}, - {"XP1_DISCONNECT_DATA", Const, 2}, - {"XP1_EXPEDITED_DATA", Const, 2}, - {"XP1_GRACEFUL_CLOSE", Const, 2}, - {"XP1_GUARANTEED_DELIVERY", Const, 2}, - {"XP1_GUARANTEED_ORDER", Const, 2}, - {"XP1_IFS_HANDLES", Const, 2}, - {"XP1_MESSAGE_ORIENTED", Const, 2}, - {"XP1_MULTIPOINT_CONTROL_PLANE", Const, 2}, - {"XP1_MULTIPOINT_DATA_PLANE", Const, 2}, - {"XP1_PARTIAL_MESSAGE", Const, 2}, - {"XP1_PSEUDO_STREAM", Const, 2}, - {"XP1_QOS_SUPPORTED", Const, 2}, - {"XP1_SAN_SUPPORT_SDP", Const, 2}, - {"XP1_SUPPORT_BROADCAST", Const, 2}, - {"XP1_SUPPORT_MULTIPOINT", Const, 2}, - {"XP1_UNI_RECV", Const, 2}, - {"XP1_UNI_SEND", Const, 2}, + {"(*Cmsghdr).SetLen", Method, 0, ""}, + {"(*DLL).FindProc", Method, 0, ""}, + {"(*DLL).MustFindProc", Method, 0, ""}, + {"(*DLL).Release", Method, 0, ""}, + {"(*DLLError).Error", Method, 0, ""}, + {"(*DLLError).Unwrap", Method, 16, ""}, + {"(*Filetime).Nanoseconds", Method, 0, ""}, + {"(*Iovec).SetLen", Method, 0, ""}, + {"(*LazyDLL).Handle", Method, 0, ""}, + {"(*LazyDLL).Load", Method, 0, ""}, + {"(*LazyDLL).NewProc", Method, 0, ""}, + {"(*LazyProc).Addr", Method, 0, ""}, + {"(*LazyProc).Call", Method, 0, ""}, + {"(*LazyProc).Find", Method, 0, ""}, + {"(*Msghdr).SetControllen", Method, 0, ""}, + {"(*Proc).Addr", Method, 0, ""}, + {"(*Proc).Call", Method, 0, ""}, + {"(*PtraceRegs).PC", Method, 0, ""}, + {"(*PtraceRegs).SetPC", Method, 0, ""}, + {"(*RawSockaddrAny).Sockaddr", Method, 0, ""}, + {"(*SID).Copy", Method, 0, ""}, + {"(*SID).Len", Method, 0, ""}, + {"(*SID).LookupAccount", Method, 0, ""}, + {"(*SID).String", Method, 0, ""}, + {"(*Timespec).Nano", Method, 0, ""}, + {"(*Timespec).Unix", Method, 0, ""}, + {"(*Timeval).Nano", Method, 0, ""}, + {"(*Timeval).Nanoseconds", Method, 0, ""}, + {"(*Timeval).Unix", Method, 0, ""}, + {"(Errno).Error", Method, 0, ""}, + {"(Errno).Is", Method, 13, ""}, + {"(Errno).Temporary", Method, 0, ""}, + {"(Errno).Timeout", Method, 0, ""}, + {"(Signal).Signal", Method, 0, ""}, + {"(Signal).String", Method, 0, ""}, + {"(Token).Close", Method, 0, ""}, + {"(Token).GetTokenPrimaryGroup", Method, 0, ""}, + {"(Token).GetTokenUser", Method, 0, ""}, + {"(Token).GetUserProfileDirectory", Method, 0, ""}, + {"(WaitStatus).Continued", Method, 0, ""}, + {"(WaitStatus).CoreDump", Method, 0, ""}, + {"(WaitStatus).ExitStatus", Method, 0, ""}, + {"(WaitStatus).Exited", Method, 0, ""}, + {"(WaitStatus).Signal", Method, 0, ""}, + {"(WaitStatus).Signaled", Method, 0, ""}, + {"(WaitStatus).StopSignal", Method, 0, ""}, + {"(WaitStatus).Stopped", Method, 0, ""}, + {"(WaitStatus).TrapCause", Method, 0, ""}, + {"AF_ALG", Const, 0, ""}, + {"AF_APPLETALK", Const, 0, ""}, + {"AF_ARP", Const, 0, ""}, + {"AF_ASH", Const, 0, ""}, + {"AF_ATM", Const, 0, ""}, + {"AF_ATMPVC", Const, 0, ""}, + {"AF_ATMSVC", Const, 0, ""}, + {"AF_AX25", Const, 0, ""}, + {"AF_BLUETOOTH", Const, 0, ""}, + {"AF_BRIDGE", Const, 0, ""}, + {"AF_CAIF", Const, 0, ""}, + {"AF_CAN", Const, 0, ""}, + {"AF_CCITT", Const, 0, ""}, + {"AF_CHAOS", Const, 0, ""}, + {"AF_CNT", Const, 0, ""}, + {"AF_COIP", Const, 0, ""}, + {"AF_DATAKIT", Const, 0, ""}, + {"AF_DECnet", Const, 0, ""}, + {"AF_DLI", Const, 0, ""}, + {"AF_E164", Const, 0, ""}, + {"AF_ECMA", Const, 0, ""}, + {"AF_ECONET", Const, 0, ""}, + {"AF_ENCAP", Const, 1, ""}, + {"AF_FILE", Const, 0, ""}, + {"AF_HYLINK", Const, 0, ""}, + {"AF_IEEE80211", Const, 0, ""}, + {"AF_IEEE802154", Const, 0, ""}, + {"AF_IMPLINK", Const, 0, ""}, + {"AF_INET", Const, 0, ""}, + {"AF_INET6", Const, 0, ""}, + {"AF_INET6_SDP", Const, 3, ""}, + {"AF_INET_SDP", Const, 3, ""}, + {"AF_IPX", Const, 0, ""}, + {"AF_IRDA", Const, 0, ""}, + {"AF_ISDN", Const, 0, ""}, + {"AF_ISO", Const, 0, ""}, + {"AF_IUCV", Const, 0, ""}, + {"AF_KEY", Const, 0, ""}, + {"AF_LAT", Const, 0, ""}, + {"AF_LINK", Const, 0, ""}, + {"AF_LLC", Const, 0, ""}, + {"AF_LOCAL", Const, 0, ""}, + {"AF_MAX", Const, 0, ""}, + {"AF_MPLS", Const, 1, ""}, + {"AF_NATM", Const, 0, ""}, + {"AF_NDRV", Const, 0, ""}, + {"AF_NETBEUI", Const, 0, ""}, + {"AF_NETBIOS", Const, 0, ""}, + {"AF_NETGRAPH", Const, 0, ""}, + {"AF_NETLINK", Const, 0, ""}, + {"AF_NETROM", Const, 0, ""}, + {"AF_NS", Const, 0, ""}, + {"AF_OROUTE", Const, 1, ""}, + {"AF_OSI", Const, 0, ""}, + {"AF_PACKET", Const, 0, ""}, + {"AF_PHONET", Const, 0, ""}, + {"AF_PPP", Const, 0, ""}, + {"AF_PPPOX", Const, 0, ""}, + {"AF_PUP", Const, 0, ""}, + {"AF_RDS", Const, 0, ""}, + {"AF_RESERVED_36", Const, 0, ""}, + {"AF_ROSE", Const, 0, ""}, + {"AF_ROUTE", Const, 0, ""}, + {"AF_RXRPC", Const, 0, ""}, + {"AF_SCLUSTER", Const, 0, ""}, + {"AF_SECURITY", Const, 0, ""}, + {"AF_SIP", Const, 0, ""}, + {"AF_SLOW", Const, 0, ""}, + {"AF_SNA", Const, 0, ""}, + {"AF_SYSTEM", Const, 0, ""}, + {"AF_TIPC", Const, 0, ""}, + {"AF_UNIX", Const, 0, ""}, + {"AF_UNSPEC", Const, 0, ""}, + {"AF_UTUN", Const, 16, ""}, + {"AF_VENDOR00", Const, 0, ""}, + {"AF_VENDOR01", Const, 0, ""}, + {"AF_VENDOR02", Const, 0, ""}, + {"AF_VENDOR03", Const, 0, ""}, + {"AF_VENDOR04", Const, 0, ""}, + {"AF_VENDOR05", Const, 0, ""}, + {"AF_VENDOR06", Const, 0, ""}, + {"AF_VENDOR07", Const, 0, ""}, + {"AF_VENDOR08", Const, 0, ""}, + {"AF_VENDOR09", Const, 0, ""}, + {"AF_VENDOR10", Const, 0, ""}, + {"AF_VENDOR11", Const, 0, ""}, + {"AF_VENDOR12", Const, 0, ""}, + {"AF_VENDOR13", Const, 0, ""}, + {"AF_VENDOR14", Const, 0, ""}, + {"AF_VENDOR15", Const, 0, ""}, + {"AF_VENDOR16", Const, 0, ""}, + {"AF_VENDOR17", Const, 0, ""}, + {"AF_VENDOR18", Const, 0, ""}, + {"AF_VENDOR19", Const, 0, ""}, + {"AF_VENDOR20", Const, 0, ""}, + {"AF_VENDOR21", Const, 0, ""}, + {"AF_VENDOR22", Const, 0, ""}, + {"AF_VENDOR23", Const, 0, ""}, + {"AF_VENDOR24", Const, 0, ""}, + {"AF_VENDOR25", Const, 0, ""}, + {"AF_VENDOR26", Const, 0, ""}, + {"AF_VENDOR27", Const, 0, ""}, + {"AF_VENDOR28", Const, 0, ""}, + {"AF_VENDOR29", Const, 0, ""}, + {"AF_VENDOR30", Const, 0, ""}, + {"AF_VENDOR31", Const, 0, ""}, + {"AF_VENDOR32", Const, 0, ""}, + {"AF_VENDOR33", Const, 0, ""}, + {"AF_VENDOR34", Const, 0, ""}, + {"AF_VENDOR35", Const, 0, ""}, + {"AF_VENDOR36", Const, 0, ""}, + {"AF_VENDOR37", Const, 0, ""}, + {"AF_VENDOR38", Const, 0, ""}, + {"AF_VENDOR39", Const, 0, ""}, + {"AF_VENDOR40", Const, 0, ""}, + {"AF_VENDOR41", Const, 0, ""}, + {"AF_VENDOR42", Const, 0, ""}, + {"AF_VENDOR43", Const, 0, ""}, + {"AF_VENDOR44", Const, 0, ""}, + {"AF_VENDOR45", Const, 0, ""}, + {"AF_VENDOR46", Const, 0, ""}, + {"AF_VENDOR47", Const, 0, ""}, + {"AF_WANPIPE", Const, 0, ""}, + {"AF_X25", Const, 0, ""}, + {"AI_CANONNAME", Const, 1, ""}, + {"AI_NUMERICHOST", Const, 1, ""}, + {"AI_PASSIVE", Const, 1, ""}, + {"APPLICATION_ERROR", Const, 0, ""}, + {"ARPHRD_ADAPT", Const, 0, ""}, + {"ARPHRD_APPLETLK", Const, 0, ""}, + {"ARPHRD_ARCNET", Const, 0, ""}, + {"ARPHRD_ASH", Const, 0, ""}, + {"ARPHRD_ATM", Const, 0, ""}, + {"ARPHRD_AX25", Const, 0, ""}, + {"ARPHRD_BIF", Const, 0, ""}, + {"ARPHRD_CHAOS", Const, 0, ""}, + {"ARPHRD_CISCO", Const, 0, ""}, + {"ARPHRD_CSLIP", Const, 0, ""}, + {"ARPHRD_CSLIP6", Const, 0, ""}, + {"ARPHRD_DDCMP", Const, 0, ""}, + {"ARPHRD_DLCI", Const, 0, ""}, + {"ARPHRD_ECONET", Const, 0, ""}, + {"ARPHRD_EETHER", Const, 0, ""}, + {"ARPHRD_ETHER", Const, 0, ""}, + {"ARPHRD_EUI64", Const, 0, ""}, + {"ARPHRD_FCAL", Const, 0, ""}, + {"ARPHRD_FCFABRIC", Const, 0, ""}, + {"ARPHRD_FCPL", Const, 0, ""}, + {"ARPHRD_FCPP", Const, 0, ""}, + {"ARPHRD_FDDI", Const, 0, ""}, + {"ARPHRD_FRAD", Const, 0, ""}, + {"ARPHRD_FRELAY", Const, 1, ""}, + {"ARPHRD_HDLC", Const, 0, ""}, + {"ARPHRD_HIPPI", Const, 0, ""}, + {"ARPHRD_HWX25", Const, 0, ""}, + {"ARPHRD_IEEE1394", Const, 0, ""}, + {"ARPHRD_IEEE802", Const, 0, ""}, + {"ARPHRD_IEEE80211", Const, 0, ""}, + {"ARPHRD_IEEE80211_PRISM", Const, 0, ""}, + {"ARPHRD_IEEE80211_RADIOTAP", Const, 0, ""}, + {"ARPHRD_IEEE802154", Const, 0, ""}, + {"ARPHRD_IEEE802154_PHY", Const, 0, ""}, + {"ARPHRD_IEEE802_TR", Const, 0, ""}, + {"ARPHRD_INFINIBAND", Const, 0, ""}, + {"ARPHRD_IPDDP", Const, 0, ""}, + {"ARPHRD_IPGRE", Const, 0, ""}, + {"ARPHRD_IRDA", Const, 0, ""}, + {"ARPHRD_LAPB", Const, 0, ""}, + {"ARPHRD_LOCALTLK", Const, 0, ""}, + {"ARPHRD_LOOPBACK", Const, 0, ""}, + {"ARPHRD_METRICOM", Const, 0, ""}, + {"ARPHRD_NETROM", Const, 0, ""}, + {"ARPHRD_NONE", Const, 0, ""}, + {"ARPHRD_PIMREG", Const, 0, ""}, + {"ARPHRD_PPP", Const, 0, ""}, + {"ARPHRD_PRONET", Const, 0, ""}, + {"ARPHRD_RAWHDLC", Const, 0, ""}, + {"ARPHRD_ROSE", Const, 0, ""}, + {"ARPHRD_RSRVD", Const, 0, ""}, + {"ARPHRD_SIT", Const, 0, ""}, + {"ARPHRD_SKIP", Const, 0, ""}, + {"ARPHRD_SLIP", Const, 0, ""}, + {"ARPHRD_SLIP6", Const, 0, ""}, + {"ARPHRD_STRIP", Const, 1, ""}, + {"ARPHRD_TUNNEL", Const, 0, ""}, + {"ARPHRD_TUNNEL6", Const, 0, ""}, + {"ARPHRD_VOID", Const, 0, ""}, + {"ARPHRD_X25", Const, 0, ""}, + {"AUTHTYPE_CLIENT", Const, 0, ""}, + {"AUTHTYPE_SERVER", Const, 0, ""}, + {"Accept", Func, 0, "func(fd int) (nfd int, sa Sockaddr, err error)"}, + {"Accept4", Func, 1, "func(fd int, flags int) (nfd int, sa Sockaddr, err error)"}, + {"AcceptEx", Func, 0, ""}, + {"Access", Func, 0, "func(path string, mode uint32) (err error)"}, + {"Acct", Func, 0, "func(path string) (err error)"}, + {"AddrinfoW", Type, 1, ""}, + {"AddrinfoW.Addr", Field, 1, ""}, + {"AddrinfoW.Addrlen", Field, 1, ""}, + {"AddrinfoW.Canonname", Field, 1, ""}, + {"AddrinfoW.Family", Field, 1, ""}, + {"AddrinfoW.Flags", Field, 1, ""}, + {"AddrinfoW.Next", Field, 1, ""}, + {"AddrinfoW.Protocol", Field, 1, ""}, + {"AddrinfoW.Socktype", Field, 1, ""}, + {"Adjtime", Func, 0, ""}, + {"Adjtimex", Func, 0, "func(buf *Timex) (state int, err error)"}, + {"AllThreadsSyscall", Func, 16, "func(trap uintptr, a1 uintptr, a2 uintptr, a3 uintptr) (r1 uintptr, r2 uintptr, err Errno)"}, + {"AllThreadsSyscall6", Func, 16, "func(trap uintptr, a1 uintptr, a2 uintptr, a3 uintptr, a4 uintptr, a5 uintptr, a6 uintptr) (r1 uintptr, r2 uintptr, err Errno)"}, + {"AttachLsf", Func, 0, "func(fd int, i []SockFilter) error"}, + {"B0", Const, 0, ""}, + {"B1000000", Const, 0, ""}, + {"B110", Const, 0, ""}, + {"B115200", Const, 0, ""}, + {"B1152000", Const, 0, ""}, + {"B1200", Const, 0, ""}, + {"B134", Const, 0, ""}, + {"B14400", Const, 1, ""}, + {"B150", Const, 0, ""}, + {"B1500000", Const, 0, ""}, + {"B1800", Const, 0, ""}, + {"B19200", Const, 0, ""}, + {"B200", Const, 0, ""}, + {"B2000000", Const, 0, ""}, + {"B230400", Const, 0, ""}, + {"B2400", Const, 0, ""}, + {"B2500000", Const, 0, ""}, + {"B28800", Const, 1, ""}, + {"B300", Const, 0, ""}, + {"B3000000", Const, 0, ""}, + {"B3500000", Const, 0, ""}, + {"B38400", Const, 0, ""}, + {"B4000000", Const, 0, ""}, + {"B460800", Const, 0, ""}, + {"B4800", Const, 0, ""}, + {"B50", Const, 0, ""}, + {"B500000", Const, 0, ""}, + {"B57600", Const, 0, ""}, + {"B576000", Const, 0, ""}, + {"B600", Const, 0, ""}, + {"B7200", Const, 1, ""}, + {"B75", Const, 0, ""}, + {"B76800", Const, 1, ""}, + {"B921600", Const, 0, ""}, + {"B9600", Const, 0, ""}, + {"BASE_PROTOCOL", Const, 2, ""}, + {"BIOCFEEDBACK", Const, 0, ""}, + {"BIOCFLUSH", Const, 0, ""}, + {"BIOCGBLEN", Const, 0, ""}, + {"BIOCGDIRECTION", Const, 0, ""}, + {"BIOCGDIRFILT", Const, 1, ""}, + {"BIOCGDLT", Const, 0, ""}, + {"BIOCGDLTLIST", Const, 0, ""}, + {"BIOCGETBUFMODE", Const, 0, ""}, + {"BIOCGETIF", Const, 0, ""}, + {"BIOCGETZMAX", Const, 0, ""}, + {"BIOCGFEEDBACK", Const, 1, ""}, + {"BIOCGFILDROP", Const, 1, ""}, + {"BIOCGHDRCMPLT", Const, 0, ""}, + {"BIOCGRSIG", Const, 0, ""}, + {"BIOCGRTIMEOUT", Const, 0, ""}, + {"BIOCGSEESENT", Const, 0, ""}, + {"BIOCGSTATS", Const, 0, ""}, + {"BIOCGSTATSOLD", Const, 1, ""}, + {"BIOCGTSTAMP", Const, 1, ""}, + {"BIOCIMMEDIATE", Const, 0, ""}, + {"BIOCLOCK", Const, 0, ""}, + {"BIOCPROMISC", Const, 0, ""}, + {"BIOCROTZBUF", Const, 0, ""}, + {"BIOCSBLEN", Const, 0, ""}, + {"BIOCSDIRECTION", Const, 0, ""}, + {"BIOCSDIRFILT", Const, 1, ""}, + {"BIOCSDLT", Const, 0, ""}, + {"BIOCSETBUFMODE", Const, 0, ""}, + {"BIOCSETF", Const, 0, ""}, + {"BIOCSETFNR", Const, 0, ""}, + {"BIOCSETIF", Const, 0, ""}, + {"BIOCSETWF", Const, 0, ""}, + {"BIOCSETZBUF", Const, 0, ""}, + {"BIOCSFEEDBACK", Const, 1, ""}, + {"BIOCSFILDROP", Const, 1, ""}, + {"BIOCSHDRCMPLT", Const, 0, ""}, + {"BIOCSRSIG", Const, 0, ""}, + {"BIOCSRTIMEOUT", Const, 0, ""}, + {"BIOCSSEESENT", Const, 0, ""}, + {"BIOCSTCPF", Const, 1, ""}, + {"BIOCSTSTAMP", Const, 1, ""}, + {"BIOCSUDPF", Const, 1, ""}, + {"BIOCVERSION", Const, 0, ""}, + {"BPF_A", Const, 0, ""}, + {"BPF_ABS", Const, 0, ""}, + {"BPF_ADD", Const, 0, ""}, + {"BPF_ALIGNMENT", Const, 0, ""}, + {"BPF_ALIGNMENT32", Const, 1, ""}, + {"BPF_ALU", Const, 0, ""}, + {"BPF_AND", Const, 0, ""}, + {"BPF_B", Const, 0, ""}, + {"BPF_BUFMODE_BUFFER", Const, 0, ""}, + {"BPF_BUFMODE_ZBUF", Const, 0, ""}, + {"BPF_DFLTBUFSIZE", Const, 1, ""}, + {"BPF_DIRECTION_IN", Const, 1, ""}, + {"BPF_DIRECTION_OUT", Const, 1, ""}, + {"BPF_DIV", Const, 0, ""}, + {"BPF_H", Const, 0, ""}, + {"BPF_IMM", Const, 0, ""}, + {"BPF_IND", Const, 0, ""}, + {"BPF_JA", Const, 0, ""}, + {"BPF_JEQ", Const, 0, ""}, + {"BPF_JGE", Const, 0, ""}, + {"BPF_JGT", Const, 0, ""}, + {"BPF_JMP", Const, 0, ""}, + {"BPF_JSET", Const, 0, ""}, + {"BPF_K", Const, 0, ""}, + {"BPF_LD", Const, 0, ""}, + {"BPF_LDX", Const, 0, ""}, + {"BPF_LEN", Const, 0, ""}, + {"BPF_LSH", Const, 0, ""}, + {"BPF_MAJOR_VERSION", Const, 0, ""}, + {"BPF_MAXBUFSIZE", Const, 0, ""}, + {"BPF_MAXINSNS", Const, 0, ""}, + {"BPF_MEM", Const, 0, ""}, + {"BPF_MEMWORDS", Const, 0, ""}, + {"BPF_MINBUFSIZE", Const, 0, ""}, + {"BPF_MINOR_VERSION", Const, 0, ""}, + {"BPF_MISC", Const, 0, ""}, + {"BPF_MSH", Const, 0, ""}, + {"BPF_MUL", Const, 0, ""}, + {"BPF_NEG", Const, 0, ""}, + {"BPF_OR", Const, 0, ""}, + {"BPF_RELEASE", Const, 0, ""}, + {"BPF_RET", Const, 0, ""}, + {"BPF_RSH", Const, 0, ""}, + {"BPF_ST", Const, 0, ""}, + {"BPF_STX", Const, 0, ""}, + {"BPF_SUB", Const, 0, ""}, + {"BPF_TAX", Const, 0, ""}, + {"BPF_TXA", Const, 0, ""}, + {"BPF_T_BINTIME", Const, 1, ""}, + {"BPF_T_BINTIME_FAST", Const, 1, ""}, + {"BPF_T_BINTIME_MONOTONIC", Const, 1, ""}, + {"BPF_T_BINTIME_MONOTONIC_FAST", Const, 1, ""}, + {"BPF_T_FAST", Const, 1, ""}, + {"BPF_T_FLAG_MASK", Const, 1, ""}, + {"BPF_T_FORMAT_MASK", Const, 1, ""}, + {"BPF_T_MICROTIME", Const, 1, ""}, + {"BPF_T_MICROTIME_FAST", Const, 1, ""}, + {"BPF_T_MICROTIME_MONOTONIC", Const, 1, ""}, + {"BPF_T_MICROTIME_MONOTONIC_FAST", Const, 1, ""}, + {"BPF_T_MONOTONIC", Const, 1, ""}, + {"BPF_T_MONOTONIC_FAST", Const, 1, ""}, + {"BPF_T_NANOTIME", Const, 1, ""}, + {"BPF_T_NANOTIME_FAST", Const, 1, ""}, + {"BPF_T_NANOTIME_MONOTONIC", Const, 1, ""}, + {"BPF_T_NANOTIME_MONOTONIC_FAST", Const, 1, ""}, + {"BPF_T_NONE", Const, 1, ""}, + {"BPF_T_NORMAL", Const, 1, ""}, + {"BPF_W", Const, 0, ""}, + {"BPF_X", Const, 0, ""}, + {"BRKINT", Const, 0, ""}, + {"Bind", Func, 0, "func(fd int, sa Sockaddr) (err error)"}, + {"BindToDevice", Func, 0, "func(fd int, device string) (err error)"}, + {"BpfBuflen", Func, 0, ""}, + {"BpfDatalink", Func, 0, ""}, + {"BpfHdr", Type, 0, ""}, + {"BpfHdr.Caplen", Field, 0, ""}, + {"BpfHdr.Datalen", Field, 0, ""}, + {"BpfHdr.Hdrlen", Field, 0, ""}, + {"BpfHdr.Pad_cgo_0", Field, 0, ""}, + {"BpfHdr.Tstamp", Field, 0, ""}, + {"BpfHeadercmpl", Func, 0, ""}, + {"BpfInsn", Type, 0, ""}, + {"BpfInsn.Code", Field, 0, ""}, + {"BpfInsn.Jf", Field, 0, ""}, + {"BpfInsn.Jt", Field, 0, ""}, + {"BpfInsn.K", Field, 0, ""}, + {"BpfInterface", Func, 0, ""}, + {"BpfJump", Func, 0, ""}, + {"BpfProgram", Type, 0, ""}, + {"BpfProgram.Insns", Field, 0, ""}, + {"BpfProgram.Len", Field, 0, ""}, + {"BpfProgram.Pad_cgo_0", Field, 0, ""}, + {"BpfStat", Type, 0, ""}, + {"BpfStat.Capt", Field, 2, ""}, + {"BpfStat.Drop", Field, 0, ""}, + {"BpfStat.Padding", Field, 2, ""}, + {"BpfStat.Recv", Field, 0, ""}, + {"BpfStats", Func, 0, ""}, + {"BpfStmt", Func, 0, ""}, + {"BpfTimeout", Func, 0, ""}, + {"BpfTimeval", Type, 2, ""}, + {"BpfTimeval.Sec", Field, 2, ""}, + {"BpfTimeval.Usec", Field, 2, ""}, + {"BpfVersion", Type, 0, ""}, + {"BpfVersion.Major", Field, 0, ""}, + {"BpfVersion.Minor", Field, 0, ""}, + {"BpfZbuf", Type, 0, ""}, + {"BpfZbuf.Bufa", Field, 0, ""}, + {"BpfZbuf.Bufb", Field, 0, ""}, + {"BpfZbuf.Buflen", Field, 0, ""}, + {"BpfZbufHeader", Type, 0, ""}, + {"BpfZbufHeader.Kernel_gen", Field, 0, ""}, + {"BpfZbufHeader.Kernel_len", Field, 0, ""}, + {"BpfZbufHeader.User_gen", Field, 0, ""}, + {"BpfZbufHeader.X_bzh_pad", Field, 0, ""}, + {"ByHandleFileInformation", Type, 0, ""}, + {"ByHandleFileInformation.CreationTime", Field, 0, ""}, + {"ByHandleFileInformation.FileAttributes", Field, 0, ""}, + {"ByHandleFileInformation.FileIndexHigh", Field, 0, ""}, + {"ByHandleFileInformation.FileIndexLow", Field, 0, ""}, + {"ByHandleFileInformation.FileSizeHigh", Field, 0, ""}, + {"ByHandleFileInformation.FileSizeLow", Field, 0, ""}, + {"ByHandleFileInformation.LastAccessTime", Field, 0, ""}, + {"ByHandleFileInformation.LastWriteTime", Field, 0, ""}, + {"ByHandleFileInformation.NumberOfLinks", Field, 0, ""}, + {"ByHandleFileInformation.VolumeSerialNumber", Field, 0, ""}, + {"BytePtrFromString", Func, 1, "func(s string) (*byte, error)"}, + {"ByteSliceFromString", Func, 1, "func(s string) ([]byte, error)"}, + {"CCR0_FLUSH", Const, 1, ""}, + {"CERT_CHAIN_POLICY_AUTHENTICODE", Const, 0, ""}, + {"CERT_CHAIN_POLICY_AUTHENTICODE_TS", Const, 0, ""}, + {"CERT_CHAIN_POLICY_BASE", Const, 0, ""}, + {"CERT_CHAIN_POLICY_BASIC_CONSTRAINTS", Const, 0, ""}, + {"CERT_CHAIN_POLICY_EV", Const, 0, ""}, + {"CERT_CHAIN_POLICY_MICROSOFT_ROOT", Const, 0, ""}, + {"CERT_CHAIN_POLICY_NT_AUTH", Const, 0, ""}, + {"CERT_CHAIN_POLICY_SSL", Const, 0, ""}, + {"CERT_E_CN_NO_MATCH", Const, 0, ""}, + {"CERT_E_EXPIRED", Const, 0, ""}, + {"CERT_E_PURPOSE", Const, 0, ""}, + {"CERT_E_ROLE", Const, 0, ""}, + {"CERT_E_UNTRUSTEDROOT", Const, 0, ""}, + {"CERT_STORE_ADD_ALWAYS", Const, 0, ""}, + {"CERT_STORE_DEFER_CLOSE_UNTIL_LAST_FREE_FLAG", Const, 0, ""}, + {"CERT_STORE_PROV_MEMORY", Const, 0, ""}, + {"CERT_TRUST_HAS_EXCLUDED_NAME_CONSTRAINT", Const, 0, ""}, + {"CERT_TRUST_HAS_NOT_DEFINED_NAME_CONSTRAINT", Const, 0, ""}, + {"CERT_TRUST_HAS_NOT_PERMITTED_NAME_CONSTRAINT", Const, 0, ""}, + {"CERT_TRUST_HAS_NOT_SUPPORTED_CRITICAL_EXT", Const, 0, ""}, + {"CERT_TRUST_HAS_NOT_SUPPORTED_NAME_CONSTRAINT", Const, 0, ""}, + {"CERT_TRUST_INVALID_BASIC_CONSTRAINTS", Const, 0, ""}, + {"CERT_TRUST_INVALID_EXTENSION", Const, 0, ""}, + {"CERT_TRUST_INVALID_NAME_CONSTRAINTS", Const, 0, ""}, + {"CERT_TRUST_INVALID_POLICY_CONSTRAINTS", Const, 0, ""}, + {"CERT_TRUST_IS_CYCLIC", Const, 0, ""}, + {"CERT_TRUST_IS_EXPLICIT_DISTRUST", Const, 0, ""}, + {"CERT_TRUST_IS_NOT_SIGNATURE_VALID", Const, 0, ""}, + {"CERT_TRUST_IS_NOT_TIME_VALID", Const, 0, ""}, + {"CERT_TRUST_IS_NOT_VALID_FOR_USAGE", Const, 0, ""}, + {"CERT_TRUST_IS_OFFLINE_REVOCATION", Const, 0, ""}, + {"CERT_TRUST_IS_REVOKED", Const, 0, ""}, + {"CERT_TRUST_IS_UNTRUSTED_ROOT", Const, 0, ""}, + {"CERT_TRUST_NO_ERROR", Const, 0, ""}, + {"CERT_TRUST_NO_ISSUANCE_CHAIN_POLICY", Const, 0, ""}, + {"CERT_TRUST_REVOCATION_STATUS_UNKNOWN", Const, 0, ""}, + {"CFLUSH", Const, 1, ""}, + {"CLOCAL", Const, 0, ""}, + {"CLONE_CHILD_CLEARTID", Const, 2, ""}, + {"CLONE_CHILD_SETTID", Const, 2, ""}, + {"CLONE_CLEAR_SIGHAND", Const, 20, ""}, + {"CLONE_CSIGNAL", Const, 3, ""}, + {"CLONE_DETACHED", Const, 2, ""}, + {"CLONE_FILES", Const, 2, ""}, + {"CLONE_FS", Const, 2, ""}, + {"CLONE_INTO_CGROUP", Const, 20, ""}, + {"CLONE_IO", Const, 2, ""}, + {"CLONE_NEWCGROUP", Const, 20, ""}, + {"CLONE_NEWIPC", Const, 2, ""}, + {"CLONE_NEWNET", Const, 2, ""}, + {"CLONE_NEWNS", Const, 2, ""}, + {"CLONE_NEWPID", Const, 2, ""}, + {"CLONE_NEWTIME", Const, 20, ""}, + {"CLONE_NEWUSER", Const, 2, ""}, + {"CLONE_NEWUTS", Const, 2, ""}, + {"CLONE_PARENT", Const, 2, ""}, + {"CLONE_PARENT_SETTID", Const, 2, ""}, + {"CLONE_PID", Const, 3, ""}, + {"CLONE_PIDFD", Const, 20, ""}, + {"CLONE_PTRACE", Const, 2, ""}, + {"CLONE_SETTLS", Const, 2, ""}, + {"CLONE_SIGHAND", Const, 2, ""}, + {"CLONE_SYSVSEM", Const, 2, ""}, + {"CLONE_THREAD", Const, 2, ""}, + {"CLONE_UNTRACED", Const, 2, ""}, + {"CLONE_VFORK", Const, 2, ""}, + {"CLONE_VM", Const, 2, ""}, + {"CPUID_CFLUSH", Const, 1, ""}, + {"CREAD", Const, 0, ""}, + {"CREATE_ALWAYS", Const, 0, ""}, + {"CREATE_NEW", Const, 0, ""}, + {"CREATE_NEW_PROCESS_GROUP", Const, 1, ""}, + {"CREATE_UNICODE_ENVIRONMENT", Const, 0, ""}, + {"CRYPT_DEFAULT_CONTAINER_OPTIONAL", Const, 0, ""}, + {"CRYPT_DELETEKEYSET", Const, 0, ""}, + {"CRYPT_MACHINE_KEYSET", Const, 0, ""}, + {"CRYPT_NEWKEYSET", Const, 0, ""}, + {"CRYPT_SILENT", Const, 0, ""}, + {"CRYPT_VERIFYCONTEXT", Const, 0, ""}, + {"CS5", Const, 0, ""}, + {"CS6", Const, 0, ""}, + {"CS7", Const, 0, ""}, + {"CS8", Const, 0, ""}, + {"CSIZE", Const, 0, ""}, + {"CSTART", Const, 1, ""}, + {"CSTATUS", Const, 1, ""}, + {"CSTOP", Const, 1, ""}, + {"CSTOPB", Const, 0, ""}, + {"CSUSP", Const, 1, ""}, + {"CTL_MAXNAME", Const, 0, ""}, + {"CTL_NET", Const, 0, ""}, + {"CTL_QUERY", Const, 1, ""}, + {"CTRL_BREAK_EVENT", Const, 1, ""}, + {"CTRL_CLOSE_EVENT", Const, 14, ""}, + {"CTRL_C_EVENT", Const, 1, ""}, + {"CTRL_LOGOFF_EVENT", Const, 14, ""}, + {"CTRL_SHUTDOWN_EVENT", Const, 14, ""}, + {"CancelIo", Func, 0, ""}, + {"CancelIoEx", Func, 1, ""}, + {"CertAddCertificateContextToStore", Func, 0, ""}, + {"CertChainContext", Type, 0, ""}, + {"CertChainContext.ChainCount", Field, 0, ""}, + {"CertChainContext.Chains", Field, 0, ""}, + {"CertChainContext.HasRevocationFreshnessTime", Field, 0, ""}, + {"CertChainContext.LowerQualityChainCount", Field, 0, ""}, + {"CertChainContext.LowerQualityChains", Field, 0, ""}, + {"CertChainContext.RevocationFreshnessTime", Field, 0, ""}, + {"CertChainContext.Size", Field, 0, ""}, + {"CertChainContext.TrustStatus", Field, 0, ""}, + {"CertChainElement", Type, 0, ""}, + {"CertChainElement.ApplicationUsage", Field, 0, ""}, + {"CertChainElement.CertContext", Field, 0, ""}, + {"CertChainElement.ExtendedErrorInfo", Field, 0, ""}, + {"CertChainElement.IssuanceUsage", Field, 0, ""}, + {"CertChainElement.RevocationInfo", Field, 0, ""}, + {"CertChainElement.Size", Field, 0, ""}, + {"CertChainElement.TrustStatus", Field, 0, ""}, + {"CertChainPara", Type, 0, ""}, + {"CertChainPara.CacheResync", Field, 0, ""}, + {"CertChainPara.CheckRevocationFreshnessTime", Field, 0, ""}, + {"CertChainPara.RequestedUsage", Field, 0, ""}, + {"CertChainPara.RequstedIssuancePolicy", Field, 0, ""}, + {"CertChainPara.RevocationFreshnessTime", Field, 0, ""}, + {"CertChainPara.Size", Field, 0, ""}, + {"CertChainPara.URLRetrievalTimeout", Field, 0, ""}, + {"CertChainPolicyPara", Type, 0, ""}, + {"CertChainPolicyPara.ExtraPolicyPara", Field, 0, ""}, + {"CertChainPolicyPara.Flags", Field, 0, ""}, + {"CertChainPolicyPara.Size", Field, 0, ""}, + {"CertChainPolicyStatus", Type, 0, ""}, + {"CertChainPolicyStatus.ChainIndex", Field, 0, ""}, + {"CertChainPolicyStatus.ElementIndex", Field, 0, ""}, + {"CertChainPolicyStatus.Error", Field, 0, ""}, + {"CertChainPolicyStatus.ExtraPolicyStatus", Field, 0, ""}, + {"CertChainPolicyStatus.Size", Field, 0, ""}, + {"CertCloseStore", Func, 0, ""}, + {"CertContext", Type, 0, ""}, + {"CertContext.CertInfo", Field, 0, ""}, + {"CertContext.EncodedCert", Field, 0, ""}, + {"CertContext.EncodingType", Field, 0, ""}, + {"CertContext.Length", Field, 0, ""}, + {"CertContext.Store", Field, 0, ""}, + {"CertCreateCertificateContext", Func, 0, ""}, + {"CertEnhKeyUsage", Type, 0, ""}, + {"CertEnhKeyUsage.Length", Field, 0, ""}, + {"CertEnhKeyUsage.UsageIdentifiers", Field, 0, ""}, + {"CertEnumCertificatesInStore", Func, 0, ""}, + {"CertFreeCertificateChain", Func, 0, ""}, + {"CertFreeCertificateContext", Func, 0, ""}, + {"CertGetCertificateChain", Func, 0, ""}, + {"CertInfo", Type, 11, ""}, + {"CertOpenStore", Func, 0, ""}, + {"CertOpenSystemStore", Func, 0, ""}, + {"CertRevocationCrlInfo", Type, 11, ""}, + {"CertRevocationInfo", Type, 0, ""}, + {"CertRevocationInfo.CrlInfo", Field, 0, ""}, + {"CertRevocationInfo.FreshnessTime", Field, 0, ""}, + {"CertRevocationInfo.HasFreshnessTime", Field, 0, ""}, + {"CertRevocationInfo.OidSpecificInfo", Field, 0, ""}, + {"CertRevocationInfo.RevocationOid", Field, 0, ""}, + {"CertRevocationInfo.RevocationResult", Field, 0, ""}, + {"CertRevocationInfo.Size", Field, 0, ""}, + {"CertSimpleChain", Type, 0, ""}, + {"CertSimpleChain.Elements", Field, 0, ""}, + {"CertSimpleChain.HasRevocationFreshnessTime", Field, 0, ""}, + {"CertSimpleChain.NumElements", Field, 0, ""}, + {"CertSimpleChain.RevocationFreshnessTime", Field, 0, ""}, + {"CertSimpleChain.Size", Field, 0, ""}, + {"CertSimpleChain.TrustListInfo", Field, 0, ""}, + {"CertSimpleChain.TrustStatus", Field, 0, ""}, + {"CertTrustListInfo", Type, 11, ""}, + {"CertTrustStatus", Type, 0, ""}, + {"CertTrustStatus.ErrorStatus", Field, 0, ""}, + {"CertTrustStatus.InfoStatus", Field, 0, ""}, + {"CertUsageMatch", Type, 0, ""}, + {"CertUsageMatch.Type", Field, 0, ""}, + {"CertUsageMatch.Usage", Field, 0, ""}, + {"CertVerifyCertificateChainPolicy", Func, 0, ""}, + {"Chdir", Func, 0, "func(path string) (err error)"}, + {"CheckBpfVersion", Func, 0, ""}, + {"Chflags", Func, 0, ""}, + {"Chmod", Func, 0, "func(path string, mode uint32) (err error)"}, + {"Chown", Func, 0, "func(path string, uid int, gid int) (err error)"}, + {"Chroot", Func, 0, "func(path string) (err error)"}, + {"Clearenv", Func, 0, "func()"}, + {"Close", Func, 0, "func(fd int) (err error)"}, + {"CloseHandle", Func, 0, ""}, + {"CloseOnExec", Func, 0, "func(fd int)"}, + {"Closesocket", Func, 0, ""}, + {"CmsgLen", Func, 0, "func(datalen int) int"}, + {"CmsgSpace", Func, 0, "func(datalen int) int"}, + {"Cmsghdr", Type, 0, ""}, + {"Cmsghdr.Len", Field, 0, ""}, + {"Cmsghdr.Level", Field, 0, ""}, + {"Cmsghdr.Type", Field, 0, ""}, + {"Cmsghdr.X__cmsg_data", Field, 0, ""}, + {"CommandLineToArgv", Func, 0, ""}, + {"ComputerName", Func, 0, ""}, + {"Conn", Type, 9, ""}, + {"Connect", Func, 0, "func(fd int, sa Sockaddr) (err error)"}, + {"ConnectEx", Func, 1, ""}, + {"ConvertSidToStringSid", Func, 0, ""}, + {"ConvertStringSidToSid", Func, 0, ""}, + {"CopySid", Func, 0, ""}, + {"Creat", Func, 0, "func(path string, mode uint32) (fd int, err error)"}, + {"CreateDirectory", Func, 0, ""}, + {"CreateFile", Func, 0, ""}, + {"CreateFileMapping", Func, 0, ""}, + {"CreateHardLink", Func, 4, ""}, + {"CreateIoCompletionPort", Func, 0, ""}, + {"CreatePipe", Func, 0, ""}, + {"CreateProcess", Func, 0, ""}, + {"CreateProcessAsUser", Func, 10, ""}, + {"CreateSymbolicLink", Func, 4, ""}, + {"CreateToolhelp32Snapshot", Func, 4, ""}, + {"Credential", Type, 0, ""}, + {"Credential.Gid", Field, 0, ""}, + {"Credential.Groups", Field, 0, ""}, + {"Credential.NoSetGroups", Field, 9, ""}, + {"Credential.Uid", Field, 0, ""}, + {"CryptAcquireContext", Func, 0, ""}, + {"CryptGenRandom", Func, 0, ""}, + {"CryptReleaseContext", Func, 0, ""}, + {"DIOCBSFLUSH", Const, 1, ""}, + {"DIOCOSFPFLUSH", Const, 1, ""}, + {"DLL", Type, 0, ""}, + {"DLL.Handle", Field, 0, ""}, + {"DLL.Name", Field, 0, ""}, + {"DLLError", Type, 0, ""}, + {"DLLError.Err", Field, 0, ""}, + {"DLLError.Msg", Field, 0, ""}, + {"DLLError.ObjName", Field, 0, ""}, + {"DLT_A429", Const, 0, ""}, + {"DLT_A653_ICM", Const, 0, ""}, + {"DLT_AIRONET_HEADER", Const, 0, ""}, + {"DLT_AOS", Const, 1, ""}, + {"DLT_APPLE_IP_OVER_IEEE1394", Const, 0, ""}, + {"DLT_ARCNET", Const, 0, ""}, + {"DLT_ARCNET_LINUX", Const, 0, ""}, + {"DLT_ATM_CLIP", Const, 0, ""}, + {"DLT_ATM_RFC1483", Const, 0, ""}, + {"DLT_AURORA", Const, 0, ""}, + {"DLT_AX25", Const, 0, ""}, + {"DLT_AX25_KISS", Const, 0, ""}, + {"DLT_BACNET_MS_TP", Const, 0, ""}, + {"DLT_BLUETOOTH_HCI_H4", Const, 0, ""}, + {"DLT_BLUETOOTH_HCI_H4_WITH_PHDR", Const, 0, ""}, + {"DLT_CAN20B", Const, 0, ""}, + {"DLT_CAN_SOCKETCAN", Const, 1, ""}, + {"DLT_CHAOS", Const, 0, ""}, + {"DLT_CHDLC", Const, 0, ""}, + {"DLT_CISCO_IOS", Const, 0, ""}, + {"DLT_C_HDLC", Const, 0, ""}, + {"DLT_C_HDLC_WITH_DIR", Const, 0, ""}, + {"DLT_DBUS", Const, 1, ""}, + {"DLT_DECT", Const, 1, ""}, + {"DLT_DOCSIS", Const, 0, ""}, + {"DLT_DVB_CI", Const, 1, ""}, + {"DLT_ECONET", Const, 0, ""}, + {"DLT_EN10MB", Const, 0, ""}, + {"DLT_EN3MB", Const, 0, ""}, + {"DLT_ENC", Const, 0, ""}, + {"DLT_ERF", Const, 0, ""}, + {"DLT_ERF_ETH", Const, 0, ""}, + {"DLT_ERF_POS", Const, 0, ""}, + {"DLT_FC_2", Const, 1, ""}, + {"DLT_FC_2_WITH_FRAME_DELIMS", Const, 1, ""}, + {"DLT_FDDI", Const, 0, ""}, + {"DLT_FLEXRAY", Const, 0, ""}, + {"DLT_FRELAY", Const, 0, ""}, + {"DLT_FRELAY_WITH_DIR", Const, 0, ""}, + {"DLT_GCOM_SERIAL", Const, 0, ""}, + {"DLT_GCOM_T1E1", Const, 0, ""}, + {"DLT_GPF_F", Const, 0, ""}, + {"DLT_GPF_T", Const, 0, ""}, + {"DLT_GPRS_LLC", Const, 0, ""}, + {"DLT_GSMTAP_ABIS", Const, 1, ""}, + {"DLT_GSMTAP_UM", Const, 1, ""}, + {"DLT_HDLC", Const, 1, ""}, + {"DLT_HHDLC", Const, 0, ""}, + {"DLT_HIPPI", Const, 1, ""}, + {"DLT_IBM_SN", Const, 0, ""}, + {"DLT_IBM_SP", Const, 0, ""}, + {"DLT_IEEE802", Const, 0, ""}, + {"DLT_IEEE802_11", Const, 0, ""}, + {"DLT_IEEE802_11_RADIO", Const, 0, ""}, + {"DLT_IEEE802_11_RADIO_AVS", Const, 0, ""}, + {"DLT_IEEE802_15_4", Const, 0, ""}, + {"DLT_IEEE802_15_4_LINUX", Const, 0, ""}, + {"DLT_IEEE802_15_4_NOFCS", Const, 1, ""}, + {"DLT_IEEE802_15_4_NONASK_PHY", Const, 0, ""}, + {"DLT_IEEE802_16_MAC_CPS", Const, 0, ""}, + {"DLT_IEEE802_16_MAC_CPS_RADIO", Const, 0, ""}, + {"DLT_IPFILTER", Const, 0, ""}, + {"DLT_IPMB", Const, 0, ""}, + {"DLT_IPMB_LINUX", Const, 0, ""}, + {"DLT_IPNET", Const, 1, ""}, + {"DLT_IPOIB", Const, 1, ""}, + {"DLT_IPV4", Const, 1, ""}, + {"DLT_IPV6", Const, 1, ""}, + {"DLT_IP_OVER_FC", Const, 0, ""}, + {"DLT_JUNIPER_ATM1", Const, 0, ""}, + {"DLT_JUNIPER_ATM2", Const, 0, ""}, + {"DLT_JUNIPER_ATM_CEMIC", Const, 1, ""}, + {"DLT_JUNIPER_CHDLC", Const, 0, ""}, + {"DLT_JUNIPER_ES", Const, 0, ""}, + {"DLT_JUNIPER_ETHER", Const, 0, ""}, + {"DLT_JUNIPER_FIBRECHANNEL", Const, 1, ""}, + {"DLT_JUNIPER_FRELAY", Const, 0, ""}, + {"DLT_JUNIPER_GGSN", Const, 0, ""}, + {"DLT_JUNIPER_ISM", Const, 0, ""}, + {"DLT_JUNIPER_MFR", Const, 0, ""}, + {"DLT_JUNIPER_MLFR", Const, 0, ""}, + {"DLT_JUNIPER_MLPPP", Const, 0, ""}, + {"DLT_JUNIPER_MONITOR", Const, 0, ""}, + {"DLT_JUNIPER_PIC_PEER", Const, 0, ""}, + {"DLT_JUNIPER_PPP", Const, 0, ""}, + {"DLT_JUNIPER_PPPOE", Const, 0, ""}, + {"DLT_JUNIPER_PPPOE_ATM", Const, 0, ""}, + {"DLT_JUNIPER_SERVICES", Const, 0, ""}, + {"DLT_JUNIPER_SRX_E2E", Const, 1, ""}, + {"DLT_JUNIPER_ST", Const, 0, ""}, + {"DLT_JUNIPER_VP", Const, 0, ""}, + {"DLT_JUNIPER_VS", Const, 1, ""}, + {"DLT_LAPB_WITH_DIR", Const, 0, ""}, + {"DLT_LAPD", Const, 0, ""}, + {"DLT_LIN", Const, 0, ""}, + {"DLT_LINUX_EVDEV", Const, 1, ""}, + {"DLT_LINUX_IRDA", Const, 0, ""}, + {"DLT_LINUX_LAPD", Const, 0, ""}, + {"DLT_LINUX_PPP_WITHDIRECTION", Const, 0, ""}, + {"DLT_LINUX_SLL", Const, 0, ""}, + {"DLT_LOOP", Const, 0, ""}, + {"DLT_LTALK", Const, 0, ""}, + {"DLT_MATCHING_MAX", Const, 1, ""}, + {"DLT_MATCHING_MIN", Const, 1, ""}, + {"DLT_MFR", Const, 0, ""}, + {"DLT_MOST", Const, 0, ""}, + {"DLT_MPEG_2_TS", Const, 1, ""}, + {"DLT_MPLS", Const, 1, ""}, + {"DLT_MTP2", Const, 0, ""}, + {"DLT_MTP2_WITH_PHDR", Const, 0, ""}, + {"DLT_MTP3", Const, 0, ""}, + {"DLT_MUX27010", Const, 1, ""}, + {"DLT_NETANALYZER", Const, 1, ""}, + {"DLT_NETANALYZER_TRANSPARENT", Const, 1, ""}, + {"DLT_NFC_LLCP", Const, 1, ""}, + {"DLT_NFLOG", Const, 1, ""}, + {"DLT_NG40", Const, 1, ""}, + {"DLT_NULL", Const, 0, ""}, + {"DLT_PCI_EXP", Const, 0, ""}, + {"DLT_PFLOG", Const, 0, ""}, + {"DLT_PFSYNC", Const, 0, ""}, + {"DLT_PPI", Const, 0, ""}, + {"DLT_PPP", Const, 0, ""}, + {"DLT_PPP_BSDOS", Const, 0, ""}, + {"DLT_PPP_ETHER", Const, 0, ""}, + {"DLT_PPP_PPPD", Const, 0, ""}, + {"DLT_PPP_SERIAL", Const, 0, ""}, + {"DLT_PPP_WITH_DIR", Const, 0, ""}, + {"DLT_PPP_WITH_DIRECTION", Const, 0, ""}, + {"DLT_PRISM_HEADER", Const, 0, ""}, + {"DLT_PRONET", Const, 0, ""}, + {"DLT_RAIF1", Const, 0, ""}, + {"DLT_RAW", Const, 0, ""}, + {"DLT_RAWAF_MASK", Const, 1, ""}, + {"DLT_RIO", Const, 0, ""}, + {"DLT_SCCP", Const, 0, ""}, + {"DLT_SITA", Const, 0, ""}, + {"DLT_SLIP", Const, 0, ""}, + {"DLT_SLIP_BSDOS", Const, 0, ""}, + {"DLT_STANAG_5066_D_PDU", Const, 1, ""}, + {"DLT_SUNATM", Const, 0, ""}, + {"DLT_SYMANTEC_FIREWALL", Const, 0, ""}, + {"DLT_TZSP", Const, 0, ""}, + {"DLT_USB", Const, 0, ""}, + {"DLT_USB_LINUX", Const, 0, ""}, + {"DLT_USB_LINUX_MMAPPED", Const, 1, ""}, + {"DLT_USER0", Const, 0, ""}, + {"DLT_USER1", Const, 0, ""}, + {"DLT_USER10", Const, 0, ""}, + {"DLT_USER11", Const, 0, ""}, + {"DLT_USER12", Const, 0, ""}, + {"DLT_USER13", Const, 0, ""}, + {"DLT_USER14", Const, 0, ""}, + {"DLT_USER15", Const, 0, ""}, + {"DLT_USER2", Const, 0, ""}, + {"DLT_USER3", Const, 0, ""}, + {"DLT_USER4", Const, 0, ""}, + {"DLT_USER5", Const, 0, ""}, + {"DLT_USER6", Const, 0, ""}, + {"DLT_USER7", Const, 0, ""}, + {"DLT_USER8", Const, 0, ""}, + {"DLT_USER9", Const, 0, ""}, + {"DLT_WIHART", Const, 1, ""}, + {"DLT_X2E_SERIAL", Const, 0, ""}, + {"DLT_X2E_XORAYA", Const, 0, ""}, + {"DNSMXData", Type, 0, ""}, + {"DNSMXData.NameExchange", Field, 0, ""}, + {"DNSMXData.Pad", Field, 0, ""}, + {"DNSMXData.Preference", Field, 0, ""}, + {"DNSPTRData", Type, 0, ""}, + {"DNSPTRData.Host", Field, 0, ""}, + {"DNSRecord", Type, 0, ""}, + {"DNSRecord.Data", Field, 0, ""}, + {"DNSRecord.Dw", Field, 0, ""}, + {"DNSRecord.Length", Field, 0, ""}, + {"DNSRecord.Name", Field, 0, ""}, + {"DNSRecord.Next", Field, 0, ""}, + {"DNSRecord.Reserved", Field, 0, ""}, + {"DNSRecord.Ttl", Field, 0, ""}, + {"DNSRecord.Type", Field, 0, ""}, + {"DNSSRVData", Type, 0, ""}, + {"DNSSRVData.Pad", Field, 0, ""}, + {"DNSSRVData.Port", Field, 0, ""}, + {"DNSSRVData.Priority", Field, 0, ""}, + {"DNSSRVData.Target", Field, 0, ""}, + {"DNSSRVData.Weight", Field, 0, ""}, + {"DNSTXTData", Type, 0, ""}, + {"DNSTXTData.StringArray", Field, 0, ""}, + {"DNSTXTData.StringCount", Field, 0, ""}, + {"DNS_INFO_NO_RECORDS", Const, 4, ""}, + {"DNS_TYPE_A", Const, 0, ""}, + {"DNS_TYPE_A6", Const, 0, ""}, + {"DNS_TYPE_AAAA", Const, 0, ""}, + {"DNS_TYPE_ADDRS", Const, 0, ""}, + {"DNS_TYPE_AFSDB", Const, 0, ""}, + {"DNS_TYPE_ALL", Const, 0, ""}, + {"DNS_TYPE_ANY", Const, 0, ""}, + {"DNS_TYPE_ATMA", Const, 0, ""}, + {"DNS_TYPE_AXFR", Const, 0, ""}, + {"DNS_TYPE_CERT", Const, 0, ""}, + {"DNS_TYPE_CNAME", Const, 0, ""}, + {"DNS_TYPE_DHCID", Const, 0, ""}, + {"DNS_TYPE_DNAME", Const, 0, ""}, + {"DNS_TYPE_DNSKEY", Const, 0, ""}, + {"DNS_TYPE_DS", Const, 0, ""}, + {"DNS_TYPE_EID", Const, 0, ""}, + {"DNS_TYPE_GID", Const, 0, ""}, + {"DNS_TYPE_GPOS", Const, 0, ""}, + {"DNS_TYPE_HINFO", Const, 0, ""}, + {"DNS_TYPE_ISDN", Const, 0, ""}, + {"DNS_TYPE_IXFR", Const, 0, ""}, + {"DNS_TYPE_KEY", Const, 0, ""}, + {"DNS_TYPE_KX", Const, 0, ""}, + {"DNS_TYPE_LOC", Const, 0, ""}, + {"DNS_TYPE_MAILA", Const, 0, ""}, + {"DNS_TYPE_MAILB", Const, 0, ""}, + {"DNS_TYPE_MB", Const, 0, ""}, + {"DNS_TYPE_MD", Const, 0, ""}, + {"DNS_TYPE_MF", Const, 0, ""}, + {"DNS_TYPE_MG", Const, 0, ""}, + {"DNS_TYPE_MINFO", Const, 0, ""}, + {"DNS_TYPE_MR", Const, 0, ""}, + {"DNS_TYPE_MX", Const, 0, ""}, + {"DNS_TYPE_NAPTR", Const, 0, ""}, + {"DNS_TYPE_NBSTAT", Const, 0, ""}, + {"DNS_TYPE_NIMLOC", Const, 0, ""}, + {"DNS_TYPE_NS", Const, 0, ""}, + {"DNS_TYPE_NSAP", Const, 0, ""}, + {"DNS_TYPE_NSAPPTR", Const, 0, ""}, + {"DNS_TYPE_NSEC", Const, 0, ""}, + {"DNS_TYPE_NULL", Const, 0, ""}, + {"DNS_TYPE_NXT", Const, 0, ""}, + {"DNS_TYPE_OPT", Const, 0, ""}, + {"DNS_TYPE_PTR", Const, 0, ""}, + {"DNS_TYPE_PX", Const, 0, ""}, + {"DNS_TYPE_RP", Const, 0, ""}, + {"DNS_TYPE_RRSIG", Const, 0, ""}, + {"DNS_TYPE_RT", Const, 0, ""}, + {"DNS_TYPE_SIG", Const, 0, ""}, + {"DNS_TYPE_SINK", Const, 0, ""}, + {"DNS_TYPE_SOA", Const, 0, ""}, + {"DNS_TYPE_SRV", Const, 0, ""}, + {"DNS_TYPE_TEXT", Const, 0, ""}, + {"DNS_TYPE_TKEY", Const, 0, ""}, + {"DNS_TYPE_TSIG", Const, 0, ""}, + {"DNS_TYPE_UID", Const, 0, ""}, + {"DNS_TYPE_UINFO", Const, 0, ""}, + {"DNS_TYPE_UNSPEC", Const, 0, ""}, + {"DNS_TYPE_WINS", Const, 0, ""}, + {"DNS_TYPE_WINSR", Const, 0, ""}, + {"DNS_TYPE_WKS", Const, 0, ""}, + {"DNS_TYPE_X25", Const, 0, ""}, + {"DT_BLK", Const, 0, ""}, + {"DT_CHR", Const, 0, ""}, + {"DT_DIR", Const, 0, ""}, + {"DT_FIFO", Const, 0, ""}, + {"DT_LNK", Const, 0, ""}, + {"DT_REG", Const, 0, ""}, + {"DT_SOCK", Const, 0, ""}, + {"DT_UNKNOWN", Const, 0, ""}, + {"DT_WHT", Const, 0, ""}, + {"DUPLICATE_CLOSE_SOURCE", Const, 0, ""}, + {"DUPLICATE_SAME_ACCESS", Const, 0, ""}, + {"DeleteFile", Func, 0, ""}, + {"DetachLsf", Func, 0, "func(fd int) error"}, + {"DeviceIoControl", Func, 4, ""}, + {"Dirent", Type, 0, ""}, + {"Dirent.Fileno", Field, 0, ""}, + {"Dirent.Ino", Field, 0, ""}, + {"Dirent.Name", Field, 0, ""}, + {"Dirent.Namlen", Field, 0, ""}, + {"Dirent.Off", Field, 0, ""}, + {"Dirent.Pad0", Field, 12, ""}, + {"Dirent.Pad1", Field, 12, ""}, + {"Dirent.Pad_cgo_0", Field, 0, ""}, + {"Dirent.Reclen", Field, 0, ""}, + {"Dirent.Seekoff", Field, 0, ""}, + {"Dirent.Type", Field, 0, ""}, + {"Dirent.X__d_padding", Field, 3, ""}, + {"DnsNameCompare", Func, 4, ""}, + {"DnsQuery", Func, 0, ""}, + {"DnsRecordListFree", Func, 0, ""}, + {"DnsSectionAdditional", Const, 4, ""}, + {"DnsSectionAnswer", Const, 4, ""}, + {"DnsSectionAuthority", Const, 4, ""}, + {"DnsSectionQuestion", Const, 4, ""}, + {"Dup", Func, 0, "func(oldfd int) (fd int, err error)"}, + {"Dup2", Func, 0, "func(oldfd int, newfd int) (err error)"}, + {"Dup3", Func, 2, "func(oldfd int, newfd int, flags int) (err error)"}, + {"DuplicateHandle", Func, 0, ""}, + {"E2BIG", Const, 0, ""}, + {"EACCES", Const, 0, ""}, + {"EADDRINUSE", Const, 0, ""}, + {"EADDRNOTAVAIL", Const, 0, ""}, + {"EADV", Const, 0, ""}, + {"EAFNOSUPPORT", Const, 0, ""}, + {"EAGAIN", Const, 0, ""}, + {"EALREADY", Const, 0, ""}, + {"EAUTH", Const, 0, ""}, + {"EBADARCH", Const, 0, ""}, + {"EBADE", Const, 0, ""}, + {"EBADEXEC", Const, 0, ""}, + {"EBADF", Const, 0, ""}, + {"EBADFD", Const, 0, ""}, + {"EBADMACHO", Const, 0, ""}, + {"EBADMSG", Const, 0, ""}, + {"EBADR", Const, 0, ""}, + {"EBADRPC", Const, 0, ""}, + {"EBADRQC", Const, 0, ""}, + {"EBADSLT", Const, 0, ""}, + {"EBFONT", Const, 0, ""}, + {"EBUSY", Const, 0, ""}, + {"ECANCELED", Const, 0, ""}, + {"ECAPMODE", Const, 1, ""}, + {"ECHILD", Const, 0, ""}, + {"ECHO", Const, 0, ""}, + {"ECHOCTL", Const, 0, ""}, + {"ECHOE", Const, 0, ""}, + {"ECHOK", Const, 0, ""}, + {"ECHOKE", Const, 0, ""}, + {"ECHONL", Const, 0, ""}, + {"ECHOPRT", Const, 0, ""}, + {"ECHRNG", Const, 0, ""}, + {"ECOMM", Const, 0, ""}, + {"ECONNABORTED", Const, 0, ""}, + {"ECONNREFUSED", Const, 0, ""}, + {"ECONNRESET", Const, 0, ""}, + {"EDEADLK", Const, 0, ""}, + {"EDEADLOCK", Const, 0, ""}, + {"EDESTADDRREQ", Const, 0, ""}, + {"EDEVERR", Const, 0, ""}, + {"EDOM", Const, 0, ""}, + {"EDOOFUS", Const, 0, ""}, + {"EDOTDOT", Const, 0, ""}, + {"EDQUOT", Const, 0, ""}, + {"EEXIST", Const, 0, ""}, + {"EFAULT", Const, 0, ""}, + {"EFBIG", Const, 0, ""}, + {"EFER_LMA", Const, 1, ""}, + {"EFER_LME", Const, 1, ""}, + {"EFER_NXE", Const, 1, ""}, + {"EFER_SCE", Const, 1, ""}, + {"EFTYPE", Const, 0, ""}, + {"EHOSTDOWN", Const, 0, ""}, + {"EHOSTUNREACH", Const, 0, ""}, + {"EHWPOISON", Const, 0, ""}, + {"EIDRM", Const, 0, ""}, + {"EILSEQ", Const, 0, ""}, + {"EINPROGRESS", Const, 0, ""}, + {"EINTR", Const, 0, ""}, + {"EINVAL", Const, 0, ""}, + {"EIO", Const, 0, ""}, + {"EIPSEC", Const, 1, ""}, + {"EISCONN", Const, 0, ""}, + {"EISDIR", Const, 0, ""}, + {"EISNAM", Const, 0, ""}, + {"EKEYEXPIRED", Const, 0, ""}, + {"EKEYREJECTED", Const, 0, ""}, + {"EKEYREVOKED", Const, 0, ""}, + {"EL2HLT", Const, 0, ""}, + {"EL2NSYNC", Const, 0, ""}, + {"EL3HLT", Const, 0, ""}, + {"EL3RST", Const, 0, ""}, + {"ELAST", Const, 0, ""}, + {"ELF_NGREG", Const, 0, ""}, + {"ELF_PRARGSZ", Const, 0, ""}, + {"ELIBACC", Const, 0, ""}, + {"ELIBBAD", Const, 0, ""}, + {"ELIBEXEC", Const, 0, ""}, + {"ELIBMAX", Const, 0, ""}, + {"ELIBSCN", Const, 0, ""}, + {"ELNRNG", Const, 0, ""}, + {"ELOOP", Const, 0, ""}, + {"EMEDIUMTYPE", Const, 0, ""}, + {"EMFILE", Const, 0, ""}, + {"EMLINK", Const, 0, ""}, + {"EMSGSIZE", Const, 0, ""}, + {"EMT_TAGOVF", Const, 1, ""}, + {"EMULTIHOP", Const, 0, ""}, + {"EMUL_ENABLED", Const, 1, ""}, + {"EMUL_LINUX", Const, 1, ""}, + {"EMUL_LINUX32", Const, 1, ""}, + {"EMUL_MAXID", Const, 1, ""}, + {"EMUL_NATIVE", Const, 1, ""}, + {"ENAMETOOLONG", Const, 0, ""}, + {"ENAVAIL", Const, 0, ""}, + {"ENDRUNDISC", Const, 1, ""}, + {"ENEEDAUTH", Const, 0, ""}, + {"ENETDOWN", Const, 0, ""}, + {"ENETRESET", Const, 0, ""}, + {"ENETUNREACH", Const, 0, ""}, + {"ENFILE", Const, 0, ""}, + {"ENOANO", Const, 0, ""}, + {"ENOATTR", Const, 0, ""}, + {"ENOBUFS", Const, 0, ""}, + {"ENOCSI", Const, 0, ""}, + {"ENODATA", Const, 0, ""}, + {"ENODEV", Const, 0, ""}, + {"ENOENT", Const, 0, ""}, + {"ENOEXEC", Const, 0, ""}, + {"ENOKEY", Const, 0, ""}, + {"ENOLCK", Const, 0, ""}, + {"ENOLINK", Const, 0, ""}, + {"ENOMEDIUM", Const, 0, ""}, + {"ENOMEM", Const, 0, ""}, + {"ENOMSG", Const, 0, ""}, + {"ENONET", Const, 0, ""}, + {"ENOPKG", Const, 0, ""}, + {"ENOPOLICY", Const, 0, ""}, + {"ENOPROTOOPT", Const, 0, ""}, + {"ENOSPC", Const, 0, ""}, + {"ENOSR", Const, 0, ""}, + {"ENOSTR", Const, 0, ""}, + {"ENOSYS", Const, 0, ""}, + {"ENOTBLK", Const, 0, ""}, + {"ENOTCAPABLE", Const, 0, ""}, + {"ENOTCONN", Const, 0, ""}, + {"ENOTDIR", Const, 0, ""}, + {"ENOTEMPTY", Const, 0, ""}, + {"ENOTNAM", Const, 0, ""}, + {"ENOTRECOVERABLE", Const, 0, ""}, + {"ENOTSOCK", Const, 0, ""}, + {"ENOTSUP", Const, 0, ""}, + {"ENOTTY", Const, 0, ""}, + {"ENOTUNIQ", Const, 0, ""}, + {"ENXIO", Const, 0, ""}, + {"EN_SW_CTL_INF", Const, 1, ""}, + {"EN_SW_CTL_PREC", Const, 1, ""}, + {"EN_SW_CTL_ROUND", Const, 1, ""}, + {"EN_SW_DATACHAIN", Const, 1, ""}, + {"EN_SW_DENORM", Const, 1, ""}, + {"EN_SW_INVOP", Const, 1, ""}, + {"EN_SW_OVERFLOW", Const, 1, ""}, + {"EN_SW_PRECLOSS", Const, 1, ""}, + {"EN_SW_UNDERFLOW", Const, 1, ""}, + {"EN_SW_ZERODIV", Const, 1, ""}, + {"EOPNOTSUPP", Const, 0, ""}, + {"EOVERFLOW", Const, 0, ""}, + {"EOWNERDEAD", Const, 0, ""}, + {"EPERM", Const, 0, ""}, + {"EPFNOSUPPORT", Const, 0, ""}, + {"EPIPE", Const, 0, ""}, + {"EPOLLERR", Const, 0, ""}, + {"EPOLLET", Const, 0, ""}, + {"EPOLLHUP", Const, 0, ""}, + {"EPOLLIN", Const, 0, ""}, + {"EPOLLMSG", Const, 0, ""}, + {"EPOLLONESHOT", Const, 0, ""}, + {"EPOLLOUT", Const, 0, ""}, + {"EPOLLPRI", Const, 0, ""}, + {"EPOLLRDBAND", Const, 0, ""}, + {"EPOLLRDHUP", Const, 0, ""}, + {"EPOLLRDNORM", Const, 0, ""}, + {"EPOLLWRBAND", Const, 0, ""}, + {"EPOLLWRNORM", Const, 0, ""}, + {"EPOLL_CLOEXEC", Const, 0, ""}, + {"EPOLL_CTL_ADD", Const, 0, ""}, + {"EPOLL_CTL_DEL", Const, 0, ""}, + {"EPOLL_CTL_MOD", Const, 0, ""}, + {"EPOLL_NONBLOCK", Const, 0, ""}, + {"EPROCLIM", Const, 0, ""}, + {"EPROCUNAVAIL", Const, 0, ""}, + {"EPROGMISMATCH", Const, 0, ""}, + {"EPROGUNAVAIL", Const, 0, ""}, + {"EPROTO", Const, 0, ""}, + {"EPROTONOSUPPORT", Const, 0, ""}, + {"EPROTOTYPE", Const, 0, ""}, + {"EPWROFF", Const, 0, ""}, + {"EQFULL", Const, 16, ""}, + {"ERANGE", Const, 0, ""}, + {"EREMCHG", Const, 0, ""}, + {"EREMOTE", Const, 0, ""}, + {"EREMOTEIO", Const, 0, ""}, + {"ERESTART", Const, 0, ""}, + {"ERFKILL", Const, 0, ""}, + {"EROFS", Const, 0, ""}, + {"ERPCMISMATCH", Const, 0, ""}, + {"ERROR_ACCESS_DENIED", Const, 0, ""}, + {"ERROR_ALREADY_EXISTS", Const, 0, ""}, + {"ERROR_BROKEN_PIPE", Const, 0, ""}, + {"ERROR_BUFFER_OVERFLOW", Const, 0, ""}, + {"ERROR_DIR_NOT_EMPTY", Const, 8, ""}, + {"ERROR_ENVVAR_NOT_FOUND", Const, 0, ""}, + {"ERROR_FILE_EXISTS", Const, 0, ""}, + {"ERROR_FILE_NOT_FOUND", Const, 0, ""}, + {"ERROR_HANDLE_EOF", Const, 2, ""}, + {"ERROR_INSUFFICIENT_BUFFER", Const, 0, ""}, + {"ERROR_IO_PENDING", Const, 0, ""}, + {"ERROR_MOD_NOT_FOUND", Const, 0, ""}, + {"ERROR_MORE_DATA", Const, 3, ""}, + {"ERROR_NETNAME_DELETED", Const, 3, ""}, + {"ERROR_NOT_FOUND", Const, 1, ""}, + {"ERROR_NO_MORE_FILES", Const, 0, ""}, + {"ERROR_OPERATION_ABORTED", Const, 0, ""}, + {"ERROR_PATH_NOT_FOUND", Const, 0, ""}, + {"ERROR_PRIVILEGE_NOT_HELD", Const, 4, ""}, + {"ERROR_PROC_NOT_FOUND", Const, 0, ""}, + {"ESHLIBVERS", Const, 0, ""}, + {"ESHUTDOWN", Const, 0, ""}, + {"ESOCKTNOSUPPORT", Const, 0, ""}, + {"ESPIPE", Const, 0, ""}, + {"ESRCH", Const, 0, ""}, + {"ESRMNT", Const, 0, ""}, + {"ESTALE", Const, 0, ""}, + {"ESTRPIPE", Const, 0, ""}, + {"ETHERCAP_JUMBO_MTU", Const, 1, ""}, + {"ETHERCAP_VLAN_HWTAGGING", Const, 1, ""}, + {"ETHERCAP_VLAN_MTU", Const, 1, ""}, + {"ETHERMIN", Const, 1, ""}, + {"ETHERMTU", Const, 1, ""}, + {"ETHERMTU_JUMBO", Const, 1, ""}, + {"ETHERTYPE_8023", Const, 1, ""}, + {"ETHERTYPE_AARP", Const, 1, ""}, + {"ETHERTYPE_ACCTON", Const, 1, ""}, + {"ETHERTYPE_AEONIC", Const, 1, ""}, + {"ETHERTYPE_ALPHA", Const, 1, ""}, + {"ETHERTYPE_AMBER", Const, 1, ""}, + {"ETHERTYPE_AMOEBA", Const, 1, ""}, + {"ETHERTYPE_AOE", Const, 1, ""}, + {"ETHERTYPE_APOLLO", Const, 1, ""}, + {"ETHERTYPE_APOLLODOMAIN", Const, 1, ""}, + {"ETHERTYPE_APPLETALK", Const, 1, ""}, + {"ETHERTYPE_APPLITEK", Const, 1, ""}, + {"ETHERTYPE_ARGONAUT", Const, 1, ""}, + {"ETHERTYPE_ARP", Const, 1, ""}, + {"ETHERTYPE_AT", Const, 1, ""}, + {"ETHERTYPE_ATALK", Const, 1, ""}, + {"ETHERTYPE_ATOMIC", Const, 1, ""}, + {"ETHERTYPE_ATT", Const, 1, ""}, + {"ETHERTYPE_ATTSTANFORD", Const, 1, ""}, + {"ETHERTYPE_AUTOPHON", Const, 1, ""}, + {"ETHERTYPE_AXIS", Const, 1, ""}, + {"ETHERTYPE_BCLOOP", Const, 1, ""}, + {"ETHERTYPE_BOFL", Const, 1, ""}, + {"ETHERTYPE_CABLETRON", Const, 1, ""}, + {"ETHERTYPE_CHAOS", Const, 1, ""}, + {"ETHERTYPE_COMDESIGN", Const, 1, ""}, + {"ETHERTYPE_COMPUGRAPHIC", Const, 1, ""}, + {"ETHERTYPE_COUNTERPOINT", Const, 1, ""}, + {"ETHERTYPE_CRONUS", Const, 1, ""}, + {"ETHERTYPE_CRONUSVLN", Const, 1, ""}, + {"ETHERTYPE_DCA", Const, 1, ""}, + {"ETHERTYPE_DDE", Const, 1, ""}, + {"ETHERTYPE_DEBNI", Const, 1, ""}, + {"ETHERTYPE_DECAM", Const, 1, ""}, + {"ETHERTYPE_DECCUST", Const, 1, ""}, + {"ETHERTYPE_DECDIAG", Const, 1, ""}, + {"ETHERTYPE_DECDNS", Const, 1, ""}, + {"ETHERTYPE_DECDTS", Const, 1, ""}, + {"ETHERTYPE_DECEXPER", Const, 1, ""}, + {"ETHERTYPE_DECLAST", Const, 1, ""}, + {"ETHERTYPE_DECLTM", Const, 1, ""}, + {"ETHERTYPE_DECMUMPS", Const, 1, ""}, + {"ETHERTYPE_DECNETBIOS", Const, 1, ""}, + {"ETHERTYPE_DELTACON", Const, 1, ""}, + {"ETHERTYPE_DIDDLE", Const, 1, ""}, + {"ETHERTYPE_DLOG1", Const, 1, ""}, + {"ETHERTYPE_DLOG2", Const, 1, ""}, + {"ETHERTYPE_DN", Const, 1, ""}, + {"ETHERTYPE_DOGFIGHT", Const, 1, ""}, + {"ETHERTYPE_DSMD", Const, 1, ""}, + {"ETHERTYPE_ECMA", Const, 1, ""}, + {"ETHERTYPE_ENCRYPT", Const, 1, ""}, + {"ETHERTYPE_ES", Const, 1, ""}, + {"ETHERTYPE_EXCELAN", Const, 1, ""}, + {"ETHERTYPE_EXPERDATA", Const, 1, ""}, + {"ETHERTYPE_FLIP", Const, 1, ""}, + {"ETHERTYPE_FLOWCONTROL", Const, 1, ""}, + {"ETHERTYPE_FRARP", Const, 1, ""}, + {"ETHERTYPE_GENDYN", Const, 1, ""}, + {"ETHERTYPE_HAYES", Const, 1, ""}, + {"ETHERTYPE_HIPPI_FP", Const, 1, ""}, + {"ETHERTYPE_HITACHI", Const, 1, ""}, + {"ETHERTYPE_HP", Const, 1, ""}, + {"ETHERTYPE_IEEEPUP", Const, 1, ""}, + {"ETHERTYPE_IEEEPUPAT", Const, 1, ""}, + {"ETHERTYPE_IMLBL", Const, 1, ""}, + {"ETHERTYPE_IMLBLDIAG", Const, 1, ""}, + {"ETHERTYPE_IP", Const, 1, ""}, + {"ETHERTYPE_IPAS", Const, 1, ""}, + {"ETHERTYPE_IPV6", Const, 1, ""}, + {"ETHERTYPE_IPX", Const, 1, ""}, + {"ETHERTYPE_IPXNEW", Const, 1, ""}, + {"ETHERTYPE_KALPANA", Const, 1, ""}, + {"ETHERTYPE_LANBRIDGE", Const, 1, ""}, + {"ETHERTYPE_LANPROBE", Const, 1, ""}, + {"ETHERTYPE_LAT", Const, 1, ""}, + {"ETHERTYPE_LBACK", Const, 1, ""}, + {"ETHERTYPE_LITTLE", Const, 1, ""}, + {"ETHERTYPE_LLDP", Const, 1, ""}, + {"ETHERTYPE_LOGICRAFT", Const, 1, ""}, + {"ETHERTYPE_LOOPBACK", Const, 1, ""}, + {"ETHERTYPE_MATRA", Const, 1, ""}, + {"ETHERTYPE_MAX", Const, 1, ""}, + {"ETHERTYPE_MERIT", Const, 1, ""}, + {"ETHERTYPE_MICP", Const, 1, ""}, + {"ETHERTYPE_MOPDL", Const, 1, ""}, + {"ETHERTYPE_MOPRC", Const, 1, ""}, + {"ETHERTYPE_MOTOROLA", Const, 1, ""}, + {"ETHERTYPE_MPLS", Const, 1, ""}, + {"ETHERTYPE_MPLS_MCAST", Const, 1, ""}, + {"ETHERTYPE_MUMPS", Const, 1, ""}, + {"ETHERTYPE_NBPCC", Const, 1, ""}, + {"ETHERTYPE_NBPCLAIM", Const, 1, ""}, + {"ETHERTYPE_NBPCLREQ", Const, 1, ""}, + {"ETHERTYPE_NBPCLRSP", Const, 1, ""}, + {"ETHERTYPE_NBPCREQ", Const, 1, ""}, + {"ETHERTYPE_NBPCRSP", Const, 1, ""}, + {"ETHERTYPE_NBPDG", Const, 1, ""}, + {"ETHERTYPE_NBPDGB", Const, 1, ""}, + {"ETHERTYPE_NBPDLTE", Const, 1, ""}, + {"ETHERTYPE_NBPRAR", Const, 1, ""}, + {"ETHERTYPE_NBPRAS", Const, 1, ""}, + {"ETHERTYPE_NBPRST", Const, 1, ""}, + {"ETHERTYPE_NBPSCD", Const, 1, ""}, + {"ETHERTYPE_NBPVCD", Const, 1, ""}, + {"ETHERTYPE_NBS", Const, 1, ""}, + {"ETHERTYPE_NCD", Const, 1, ""}, + {"ETHERTYPE_NESTAR", Const, 1, ""}, + {"ETHERTYPE_NETBEUI", Const, 1, ""}, + {"ETHERTYPE_NOVELL", Const, 1, ""}, + {"ETHERTYPE_NS", Const, 1, ""}, + {"ETHERTYPE_NSAT", Const, 1, ""}, + {"ETHERTYPE_NSCOMPAT", Const, 1, ""}, + {"ETHERTYPE_NTRAILER", Const, 1, ""}, + {"ETHERTYPE_OS9", Const, 1, ""}, + {"ETHERTYPE_OS9NET", Const, 1, ""}, + {"ETHERTYPE_PACER", Const, 1, ""}, + {"ETHERTYPE_PAE", Const, 1, ""}, + {"ETHERTYPE_PCS", Const, 1, ""}, + {"ETHERTYPE_PLANNING", Const, 1, ""}, + {"ETHERTYPE_PPP", Const, 1, ""}, + {"ETHERTYPE_PPPOE", Const, 1, ""}, + {"ETHERTYPE_PPPOEDISC", Const, 1, ""}, + {"ETHERTYPE_PRIMENTS", Const, 1, ""}, + {"ETHERTYPE_PUP", Const, 1, ""}, + {"ETHERTYPE_PUPAT", Const, 1, ""}, + {"ETHERTYPE_QINQ", Const, 1, ""}, + {"ETHERTYPE_RACAL", Const, 1, ""}, + {"ETHERTYPE_RATIONAL", Const, 1, ""}, + {"ETHERTYPE_RAWFR", Const, 1, ""}, + {"ETHERTYPE_RCL", Const, 1, ""}, + {"ETHERTYPE_RDP", Const, 1, ""}, + {"ETHERTYPE_RETIX", Const, 1, ""}, + {"ETHERTYPE_REVARP", Const, 1, ""}, + {"ETHERTYPE_SCA", Const, 1, ""}, + {"ETHERTYPE_SECTRA", Const, 1, ""}, + {"ETHERTYPE_SECUREDATA", Const, 1, ""}, + {"ETHERTYPE_SGITW", Const, 1, ""}, + {"ETHERTYPE_SG_BOUNCE", Const, 1, ""}, + {"ETHERTYPE_SG_DIAG", Const, 1, ""}, + {"ETHERTYPE_SG_NETGAMES", Const, 1, ""}, + {"ETHERTYPE_SG_RESV", Const, 1, ""}, + {"ETHERTYPE_SIMNET", Const, 1, ""}, + {"ETHERTYPE_SLOW", Const, 1, ""}, + {"ETHERTYPE_SLOWPROTOCOLS", Const, 1, ""}, + {"ETHERTYPE_SNA", Const, 1, ""}, + {"ETHERTYPE_SNMP", Const, 1, ""}, + {"ETHERTYPE_SONIX", Const, 1, ""}, + {"ETHERTYPE_SPIDER", Const, 1, ""}, + {"ETHERTYPE_SPRITE", Const, 1, ""}, + {"ETHERTYPE_STP", Const, 1, ""}, + {"ETHERTYPE_TALARIS", Const, 1, ""}, + {"ETHERTYPE_TALARISMC", Const, 1, ""}, + {"ETHERTYPE_TCPCOMP", Const, 1, ""}, + {"ETHERTYPE_TCPSM", Const, 1, ""}, + {"ETHERTYPE_TEC", Const, 1, ""}, + {"ETHERTYPE_TIGAN", Const, 1, ""}, + {"ETHERTYPE_TRAIL", Const, 1, ""}, + {"ETHERTYPE_TRANSETHER", Const, 1, ""}, + {"ETHERTYPE_TYMSHARE", Const, 1, ""}, + {"ETHERTYPE_UBBST", Const, 1, ""}, + {"ETHERTYPE_UBDEBUG", Const, 1, ""}, + {"ETHERTYPE_UBDIAGLOOP", Const, 1, ""}, + {"ETHERTYPE_UBDL", Const, 1, ""}, + {"ETHERTYPE_UBNIU", Const, 1, ""}, + {"ETHERTYPE_UBNMC", Const, 1, ""}, + {"ETHERTYPE_VALID", Const, 1, ""}, + {"ETHERTYPE_VARIAN", Const, 1, ""}, + {"ETHERTYPE_VAXELN", Const, 1, ""}, + {"ETHERTYPE_VEECO", Const, 1, ""}, + {"ETHERTYPE_VEXP", Const, 1, ""}, + {"ETHERTYPE_VGLAB", Const, 1, ""}, + {"ETHERTYPE_VINES", Const, 1, ""}, + {"ETHERTYPE_VINESECHO", Const, 1, ""}, + {"ETHERTYPE_VINESLOOP", Const, 1, ""}, + {"ETHERTYPE_VITAL", Const, 1, ""}, + {"ETHERTYPE_VLAN", Const, 1, ""}, + {"ETHERTYPE_VLTLMAN", Const, 1, ""}, + {"ETHERTYPE_VPROD", Const, 1, ""}, + {"ETHERTYPE_VURESERVED", Const, 1, ""}, + {"ETHERTYPE_WATERLOO", Const, 1, ""}, + {"ETHERTYPE_WELLFLEET", Const, 1, ""}, + {"ETHERTYPE_X25", Const, 1, ""}, + {"ETHERTYPE_X75", Const, 1, ""}, + {"ETHERTYPE_XNSSM", Const, 1, ""}, + {"ETHERTYPE_XTP", Const, 1, ""}, + {"ETHER_ADDR_LEN", Const, 1, ""}, + {"ETHER_ALIGN", Const, 1, ""}, + {"ETHER_CRC_LEN", Const, 1, ""}, + {"ETHER_CRC_POLY_BE", Const, 1, ""}, + {"ETHER_CRC_POLY_LE", Const, 1, ""}, + {"ETHER_HDR_LEN", Const, 1, ""}, + {"ETHER_MAX_DIX_LEN", Const, 1, ""}, + {"ETHER_MAX_LEN", Const, 1, ""}, + {"ETHER_MAX_LEN_JUMBO", Const, 1, ""}, + {"ETHER_MIN_LEN", Const, 1, ""}, + {"ETHER_PPPOE_ENCAP_LEN", Const, 1, ""}, + {"ETHER_TYPE_LEN", Const, 1, ""}, + {"ETHER_VLAN_ENCAP_LEN", Const, 1, ""}, + {"ETH_P_1588", Const, 0, ""}, + {"ETH_P_8021Q", Const, 0, ""}, + {"ETH_P_802_2", Const, 0, ""}, + {"ETH_P_802_3", Const, 0, ""}, + {"ETH_P_AARP", Const, 0, ""}, + {"ETH_P_ALL", Const, 0, ""}, + {"ETH_P_AOE", Const, 0, ""}, + {"ETH_P_ARCNET", Const, 0, ""}, + {"ETH_P_ARP", Const, 0, ""}, + {"ETH_P_ATALK", Const, 0, ""}, + {"ETH_P_ATMFATE", Const, 0, ""}, + {"ETH_P_ATMMPOA", Const, 0, ""}, + {"ETH_P_AX25", Const, 0, ""}, + {"ETH_P_BPQ", Const, 0, ""}, + {"ETH_P_CAIF", Const, 0, ""}, + {"ETH_P_CAN", Const, 0, ""}, + {"ETH_P_CONTROL", Const, 0, ""}, + {"ETH_P_CUST", Const, 0, ""}, + {"ETH_P_DDCMP", Const, 0, ""}, + {"ETH_P_DEC", Const, 0, ""}, + {"ETH_P_DIAG", Const, 0, ""}, + {"ETH_P_DNA_DL", Const, 0, ""}, + {"ETH_P_DNA_RC", Const, 0, ""}, + {"ETH_P_DNA_RT", Const, 0, ""}, + {"ETH_P_DSA", Const, 0, ""}, + {"ETH_P_ECONET", Const, 0, ""}, + {"ETH_P_EDSA", Const, 0, ""}, + {"ETH_P_FCOE", Const, 0, ""}, + {"ETH_P_FIP", Const, 0, ""}, + {"ETH_P_HDLC", Const, 0, ""}, + {"ETH_P_IEEE802154", Const, 0, ""}, + {"ETH_P_IEEEPUP", Const, 0, ""}, + {"ETH_P_IEEEPUPAT", Const, 0, ""}, + {"ETH_P_IP", Const, 0, ""}, + {"ETH_P_IPV6", Const, 0, ""}, + {"ETH_P_IPX", Const, 0, ""}, + {"ETH_P_IRDA", Const, 0, ""}, + {"ETH_P_LAT", Const, 0, ""}, + {"ETH_P_LINK_CTL", Const, 0, ""}, + {"ETH_P_LOCALTALK", Const, 0, ""}, + {"ETH_P_LOOP", Const, 0, ""}, + {"ETH_P_MOBITEX", Const, 0, ""}, + {"ETH_P_MPLS_MC", Const, 0, ""}, + {"ETH_P_MPLS_UC", Const, 0, ""}, + {"ETH_P_PAE", Const, 0, ""}, + {"ETH_P_PAUSE", Const, 0, ""}, + {"ETH_P_PHONET", Const, 0, ""}, + {"ETH_P_PPPTALK", Const, 0, ""}, + {"ETH_P_PPP_DISC", Const, 0, ""}, + {"ETH_P_PPP_MP", Const, 0, ""}, + {"ETH_P_PPP_SES", Const, 0, ""}, + {"ETH_P_PUP", Const, 0, ""}, + {"ETH_P_PUPAT", Const, 0, ""}, + {"ETH_P_RARP", Const, 0, ""}, + {"ETH_P_SCA", Const, 0, ""}, + {"ETH_P_SLOW", Const, 0, ""}, + {"ETH_P_SNAP", Const, 0, ""}, + {"ETH_P_TEB", Const, 0, ""}, + {"ETH_P_TIPC", Const, 0, ""}, + {"ETH_P_TRAILER", Const, 0, ""}, + {"ETH_P_TR_802_2", Const, 0, ""}, + {"ETH_P_WAN_PPP", Const, 0, ""}, + {"ETH_P_WCCP", Const, 0, ""}, + {"ETH_P_X25", Const, 0, ""}, + {"ETIME", Const, 0, ""}, + {"ETIMEDOUT", Const, 0, ""}, + {"ETOOMANYREFS", Const, 0, ""}, + {"ETXTBSY", Const, 0, ""}, + {"EUCLEAN", Const, 0, ""}, + {"EUNATCH", Const, 0, ""}, + {"EUSERS", Const, 0, ""}, + {"EVFILT_AIO", Const, 0, ""}, + {"EVFILT_FS", Const, 0, ""}, + {"EVFILT_LIO", Const, 0, ""}, + {"EVFILT_MACHPORT", Const, 0, ""}, + {"EVFILT_PROC", Const, 0, ""}, + {"EVFILT_READ", Const, 0, ""}, + {"EVFILT_SIGNAL", Const, 0, ""}, + {"EVFILT_SYSCOUNT", Const, 0, ""}, + {"EVFILT_THREADMARKER", Const, 0, ""}, + {"EVFILT_TIMER", Const, 0, ""}, + {"EVFILT_USER", Const, 0, ""}, + {"EVFILT_VM", Const, 0, ""}, + {"EVFILT_VNODE", Const, 0, ""}, + {"EVFILT_WRITE", Const, 0, ""}, + {"EV_ADD", Const, 0, ""}, + {"EV_CLEAR", Const, 0, ""}, + {"EV_DELETE", Const, 0, ""}, + {"EV_DISABLE", Const, 0, ""}, + {"EV_DISPATCH", Const, 0, ""}, + {"EV_DROP", Const, 3, ""}, + {"EV_ENABLE", Const, 0, ""}, + {"EV_EOF", Const, 0, ""}, + {"EV_ERROR", Const, 0, ""}, + {"EV_FLAG0", Const, 0, ""}, + {"EV_FLAG1", Const, 0, ""}, + {"EV_ONESHOT", Const, 0, ""}, + {"EV_OOBAND", Const, 0, ""}, + {"EV_POLL", Const, 0, ""}, + {"EV_RECEIPT", Const, 0, ""}, + {"EV_SYSFLAGS", Const, 0, ""}, + {"EWINDOWS", Const, 0, ""}, + {"EWOULDBLOCK", Const, 0, ""}, + {"EXDEV", Const, 0, ""}, + {"EXFULL", Const, 0, ""}, + {"EXTA", Const, 0, ""}, + {"EXTB", Const, 0, ""}, + {"EXTPROC", Const, 0, ""}, + {"Environ", Func, 0, "func() []string"}, + {"EpollCreate", Func, 0, "func(size int) (fd int, err error)"}, + {"EpollCreate1", Func, 0, "func(flag int) (fd int, err error)"}, + {"EpollCtl", Func, 0, "func(epfd int, op int, fd int, event *EpollEvent) (err error)"}, + {"EpollEvent", Type, 0, ""}, + {"EpollEvent.Events", Field, 0, ""}, + {"EpollEvent.Fd", Field, 0, ""}, + {"EpollEvent.Pad", Field, 0, ""}, + {"EpollEvent.PadFd", Field, 0, ""}, + {"EpollWait", Func, 0, "func(epfd int, events []EpollEvent, msec int) (n int, err error)"}, + {"Errno", Type, 0, ""}, + {"EscapeArg", Func, 0, ""}, + {"Exchangedata", Func, 0, ""}, + {"Exec", Func, 0, "func(argv0 string, argv []string, envv []string) (err error)"}, + {"Exit", Func, 0, "func(code int)"}, + {"ExitProcess", Func, 0, ""}, + {"FD_CLOEXEC", Const, 0, ""}, + {"FD_SETSIZE", Const, 0, ""}, + {"FILE_ACTION_ADDED", Const, 0, ""}, + {"FILE_ACTION_MODIFIED", Const, 0, ""}, + {"FILE_ACTION_REMOVED", Const, 0, ""}, + {"FILE_ACTION_RENAMED_NEW_NAME", Const, 0, ""}, + {"FILE_ACTION_RENAMED_OLD_NAME", Const, 0, ""}, + {"FILE_APPEND_DATA", Const, 0, ""}, + {"FILE_ATTRIBUTE_ARCHIVE", Const, 0, ""}, + {"FILE_ATTRIBUTE_DIRECTORY", Const, 0, ""}, + {"FILE_ATTRIBUTE_HIDDEN", Const, 0, ""}, + {"FILE_ATTRIBUTE_NORMAL", Const, 0, ""}, + {"FILE_ATTRIBUTE_READONLY", Const, 0, ""}, + {"FILE_ATTRIBUTE_REPARSE_POINT", Const, 4, ""}, + {"FILE_ATTRIBUTE_SYSTEM", Const, 0, ""}, + {"FILE_BEGIN", Const, 0, ""}, + {"FILE_CURRENT", Const, 0, ""}, + {"FILE_END", Const, 0, ""}, + {"FILE_FLAG_BACKUP_SEMANTICS", Const, 0, ""}, + {"FILE_FLAG_OPEN_REPARSE_POINT", Const, 4, ""}, + {"FILE_FLAG_OVERLAPPED", Const, 0, ""}, + {"FILE_LIST_DIRECTORY", Const, 0, ""}, + {"FILE_MAP_COPY", Const, 0, ""}, + {"FILE_MAP_EXECUTE", Const, 0, ""}, + {"FILE_MAP_READ", Const, 0, ""}, + {"FILE_MAP_WRITE", Const, 0, ""}, + {"FILE_NOTIFY_CHANGE_ATTRIBUTES", Const, 0, ""}, + {"FILE_NOTIFY_CHANGE_CREATION", Const, 0, ""}, + {"FILE_NOTIFY_CHANGE_DIR_NAME", Const, 0, ""}, + {"FILE_NOTIFY_CHANGE_FILE_NAME", Const, 0, ""}, + {"FILE_NOTIFY_CHANGE_LAST_ACCESS", Const, 0, ""}, + {"FILE_NOTIFY_CHANGE_LAST_WRITE", Const, 0, ""}, + {"FILE_NOTIFY_CHANGE_SIZE", Const, 0, ""}, + {"FILE_SHARE_DELETE", Const, 0, ""}, + {"FILE_SHARE_READ", Const, 0, ""}, + {"FILE_SHARE_WRITE", Const, 0, ""}, + {"FILE_SKIP_COMPLETION_PORT_ON_SUCCESS", Const, 2, ""}, + {"FILE_SKIP_SET_EVENT_ON_HANDLE", Const, 2, ""}, + {"FILE_TYPE_CHAR", Const, 0, ""}, + {"FILE_TYPE_DISK", Const, 0, ""}, + {"FILE_TYPE_PIPE", Const, 0, ""}, + {"FILE_TYPE_REMOTE", Const, 0, ""}, + {"FILE_TYPE_UNKNOWN", Const, 0, ""}, + {"FILE_WRITE_ATTRIBUTES", Const, 0, ""}, + {"FLUSHO", Const, 0, ""}, + {"FORMAT_MESSAGE_ALLOCATE_BUFFER", Const, 0, ""}, + {"FORMAT_MESSAGE_ARGUMENT_ARRAY", Const, 0, ""}, + {"FORMAT_MESSAGE_FROM_HMODULE", Const, 0, ""}, + {"FORMAT_MESSAGE_FROM_STRING", Const, 0, ""}, + {"FORMAT_MESSAGE_FROM_SYSTEM", Const, 0, ""}, + {"FORMAT_MESSAGE_IGNORE_INSERTS", Const, 0, ""}, + {"FORMAT_MESSAGE_MAX_WIDTH_MASK", Const, 0, ""}, + {"FSCTL_GET_REPARSE_POINT", Const, 4, ""}, + {"F_ADDFILESIGS", Const, 0, ""}, + {"F_ADDSIGS", Const, 0, ""}, + {"F_ALLOCATEALL", Const, 0, ""}, + {"F_ALLOCATECONTIG", Const, 0, ""}, + {"F_CANCEL", Const, 0, ""}, + {"F_CHKCLEAN", Const, 0, ""}, + {"F_CLOSEM", Const, 1, ""}, + {"F_DUP2FD", Const, 0, ""}, + {"F_DUP2FD_CLOEXEC", Const, 1, ""}, + {"F_DUPFD", Const, 0, ""}, + {"F_DUPFD_CLOEXEC", Const, 0, ""}, + {"F_EXLCK", Const, 0, ""}, + {"F_FINDSIGS", Const, 16, ""}, + {"F_FLUSH_DATA", Const, 0, ""}, + {"F_FREEZE_FS", Const, 0, ""}, + {"F_FSCTL", Const, 1, ""}, + {"F_FSDIRMASK", Const, 1, ""}, + {"F_FSIN", Const, 1, ""}, + {"F_FSINOUT", Const, 1, ""}, + {"F_FSOUT", Const, 1, ""}, + {"F_FSPRIV", Const, 1, ""}, + {"F_FSVOID", Const, 1, ""}, + {"F_FULLFSYNC", Const, 0, ""}, + {"F_GETCODEDIR", Const, 16, ""}, + {"F_GETFD", Const, 0, ""}, + {"F_GETFL", Const, 0, ""}, + {"F_GETLEASE", Const, 0, ""}, + {"F_GETLK", Const, 0, ""}, + {"F_GETLK64", Const, 0, ""}, + {"F_GETLKPID", Const, 0, ""}, + {"F_GETNOSIGPIPE", Const, 0, ""}, + {"F_GETOWN", Const, 0, ""}, + {"F_GETOWN_EX", Const, 0, ""}, + {"F_GETPATH", Const, 0, ""}, + {"F_GETPATH_MTMINFO", Const, 0, ""}, + {"F_GETPIPE_SZ", Const, 0, ""}, + {"F_GETPROTECTIONCLASS", Const, 0, ""}, + {"F_GETPROTECTIONLEVEL", Const, 16, ""}, + {"F_GETSIG", Const, 0, ""}, + {"F_GLOBAL_NOCACHE", Const, 0, ""}, + {"F_LOCK", Const, 0, ""}, + {"F_LOG2PHYS", Const, 0, ""}, + {"F_LOG2PHYS_EXT", Const, 0, ""}, + {"F_MARKDEPENDENCY", Const, 0, ""}, + {"F_MAXFD", Const, 1, ""}, + {"F_NOCACHE", Const, 0, ""}, + {"F_NODIRECT", Const, 0, ""}, + {"F_NOTIFY", Const, 0, ""}, + {"F_OGETLK", Const, 0, ""}, + {"F_OK", Const, 0, ""}, + {"F_OSETLK", Const, 0, ""}, + {"F_OSETLKW", Const, 0, ""}, + {"F_PARAM_MASK", Const, 1, ""}, + {"F_PARAM_MAX", Const, 1, ""}, + {"F_PATHPKG_CHECK", Const, 0, ""}, + {"F_PEOFPOSMODE", Const, 0, ""}, + {"F_PREALLOCATE", Const, 0, ""}, + {"F_RDADVISE", Const, 0, ""}, + {"F_RDAHEAD", Const, 0, ""}, + {"F_RDLCK", Const, 0, ""}, + {"F_READAHEAD", Const, 0, ""}, + {"F_READBOOTSTRAP", Const, 0, ""}, + {"F_SETBACKINGSTORE", Const, 0, ""}, + {"F_SETFD", Const, 0, ""}, + {"F_SETFL", Const, 0, ""}, + {"F_SETLEASE", Const, 0, ""}, + {"F_SETLK", Const, 0, ""}, + {"F_SETLK64", Const, 0, ""}, + {"F_SETLKW", Const, 0, ""}, + {"F_SETLKW64", Const, 0, ""}, + {"F_SETLKWTIMEOUT", Const, 16, ""}, + {"F_SETLK_REMOTE", Const, 0, ""}, + {"F_SETNOSIGPIPE", Const, 0, ""}, + {"F_SETOWN", Const, 0, ""}, + {"F_SETOWN_EX", Const, 0, ""}, + {"F_SETPIPE_SZ", Const, 0, ""}, + {"F_SETPROTECTIONCLASS", Const, 0, ""}, + {"F_SETSIG", Const, 0, ""}, + {"F_SETSIZE", Const, 0, ""}, + {"F_SHLCK", Const, 0, ""}, + {"F_SINGLE_WRITER", Const, 16, ""}, + {"F_TEST", Const, 0, ""}, + {"F_THAW_FS", Const, 0, ""}, + {"F_TLOCK", Const, 0, ""}, + {"F_TRANSCODEKEY", Const, 16, ""}, + {"F_ULOCK", Const, 0, ""}, + {"F_UNLCK", Const, 0, ""}, + {"F_UNLCKSYS", Const, 0, ""}, + {"F_VOLPOSMODE", Const, 0, ""}, + {"F_WRITEBOOTSTRAP", Const, 0, ""}, + {"F_WRLCK", Const, 0, ""}, + {"Faccessat", Func, 0, "func(dirfd int, path string, mode uint32, flags int) (err error)"}, + {"Fallocate", Func, 0, "func(fd int, mode uint32, off int64, len int64) (err error)"}, + {"Fbootstraptransfer_t", Type, 0, ""}, + {"Fbootstraptransfer_t.Buffer", Field, 0, ""}, + {"Fbootstraptransfer_t.Length", Field, 0, ""}, + {"Fbootstraptransfer_t.Offset", Field, 0, ""}, + {"Fchdir", Func, 0, "func(fd int) (err error)"}, + {"Fchflags", Func, 0, ""}, + {"Fchmod", Func, 0, "func(fd int, mode uint32) (err error)"}, + {"Fchmodat", Func, 0, "func(dirfd int, path string, mode uint32, flags int) error"}, + {"Fchown", Func, 0, "func(fd int, uid int, gid int) (err error)"}, + {"Fchownat", Func, 0, "func(dirfd int, path string, uid int, gid int, flags int) (err error)"}, + {"FcntlFlock", Func, 3, "func(fd uintptr, cmd int, lk *Flock_t) error"}, + {"FdSet", Type, 0, ""}, + {"FdSet.Bits", Field, 0, ""}, + {"FdSet.X__fds_bits", Field, 0, ""}, + {"Fdatasync", Func, 0, "func(fd int) (err error)"}, + {"FileNotifyInformation", Type, 0, ""}, + {"FileNotifyInformation.Action", Field, 0, ""}, + {"FileNotifyInformation.FileName", Field, 0, ""}, + {"FileNotifyInformation.FileNameLength", Field, 0, ""}, + {"FileNotifyInformation.NextEntryOffset", Field, 0, ""}, + {"Filetime", Type, 0, ""}, + {"Filetime.HighDateTime", Field, 0, ""}, + {"Filetime.LowDateTime", Field, 0, ""}, + {"FindClose", Func, 0, ""}, + {"FindFirstFile", Func, 0, ""}, + {"FindNextFile", Func, 0, ""}, + {"Flock", Func, 0, "func(fd int, how int) (err error)"}, + {"Flock_t", Type, 0, ""}, + {"Flock_t.Len", Field, 0, ""}, + {"Flock_t.Pad_cgo_0", Field, 0, ""}, + {"Flock_t.Pad_cgo_1", Field, 3, ""}, + {"Flock_t.Pid", Field, 0, ""}, + {"Flock_t.Start", Field, 0, ""}, + {"Flock_t.Sysid", Field, 0, ""}, + {"Flock_t.Type", Field, 0, ""}, + {"Flock_t.Whence", Field, 0, ""}, + {"FlushBpf", Func, 0, ""}, + {"FlushFileBuffers", Func, 0, ""}, + {"FlushViewOfFile", Func, 0, ""}, + {"ForkExec", Func, 0, "func(argv0 string, argv []string, attr *ProcAttr) (pid int, err error)"}, + {"ForkLock", Var, 0, ""}, + {"FormatMessage", Func, 0, ""}, + {"Fpathconf", Func, 0, ""}, + {"FreeAddrInfoW", Func, 1, ""}, + {"FreeEnvironmentStrings", Func, 0, ""}, + {"FreeLibrary", Func, 0, ""}, + {"Fsid", Type, 0, ""}, + {"Fsid.Val", Field, 0, ""}, + {"Fsid.X__fsid_val", Field, 2, ""}, + {"Fsid.X__val", Field, 0, ""}, + {"Fstat", Func, 0, "func(fd int, stat *Stat_t) (err error)"}, + {"Fstatat", Func, 12, ""}, + {"Fstatfs", Func, 0, "func(fd int, buf *Statfs_t) (err error)"}, + {"Fstore_t", Type, 0, ""}, + {"Fstore_t.Bytesalloc", Field, 0, ""}, + {"Fstore_t.Flags", Field, 0, ""}, + {"Fstore_t.Length", Field, 0, ""}, + {"Fstore_t.Offset", Field, 0, ""}, + {"Fstore_t.Posmode", Field, 0, ""}, + {"Fsync", Func, 0, "func(fd int) (err error)"}, + {"Ftruncate", Func, 0, "func(fd int, length int64) (err error)"}, + {"FullPath", Func, 4, ""}, + {"Futimes", Func, 0, "func(fd int, tv []Timeval) (err error)"}, + {"Futimesat", Func, 0, "func(dirfd int, path string, tv []Timeval) (err error)"}, + {"GENERIC_ALL", Const, 0, ""}, + {"GENERIC_EXECUTE", Const, 0, ""}, + {"GENERIC_READ", Const, 0, ""}, + {"GENERIC_WRITE", Const, 0, ""}, + {"GUID", Type, 1, ""}, + {"GUID.Data1", Field, 1, ""}, + {"GUID.Data2", Field, 1, ""}, + {"GUID.Data3", Field, 1, ""}, + {"GUID.Data4", Field, 1, ""}, + {"GetAcceptExSockaddrs", Func, 0, ""}, + {"GetAdaptersInfo", Func, 0, ""}, + {"GetAddrInfoW", Func, 1, ""}, + {"GetCommandLine", Func, 0, ""}, + {"GetComputerName", Func, 0, ""}, + {"GetConsoleMode", Func, 1, ""}, + {"GetCurrentDirectory", Func, 0, ""}, + {"GetCurrentProcess", Func, 0, ""}, + {"GetEnvironmentStrings", Func, 0, ""}, + {"GetEnvironmentVariable", Func, 0, ""}, + {"GetExitCodeProcess", Func, 0, ""}, + {"GetFileAttributes", Func, 0, ""}, + {"GetFileAttributesEx", Func, 0, ""}, + {"GetFileExInfoStandard", Const, 0, ""}, + {"GetFileExMaxInfoLevel", Const, 0, ""}, + {"GetFileInformationByHandle", Func, 0, ""}, + {"GetFileType", Func, 0, ""}, + {"GetFullPathName", Func, 0, ""}, + {"GetHostByName", Func, 0, ""}, + {"GetIfEntry", Func, 0, ""}, + {"GetLastError", Func, 0, ""}, + {"GetLengthSid", Func, 0, ""}, + {"GetLongPathName", Func, 0, ""}, + {"GetProcAddress", Func, 0, ""}, + {"GetProcessTimes", Func, 0, ""}, + {"GetProtoByName", Func, 0, ""}, + {"GetQueuedCompletionStatus", Func, 0, ""}, + {"GetServByName", Func, 0, ""}, + {"GetShortPathName", Func, 0, ""}, + {"GetStartupInfo", Func, 0, ""}, + {"GetStdHandle", Func, 0, ""}, + {"GetSystemTimeAsFileTime", Func, 0, ""}, + {"GetTempPath", Func, 0, ""}, + {"GetTimeZoneInformation", Func, 0, ""}, + {"GetTokenInformation", Func, 0, ""}, + {"GetUserNameEx", Func, 0, ""}, + {"GetUserProfileDirectory", Func, 0, ""}, + {"GetVersion", Func, 0, ""}, + {"Getcwd", Func, 0, "func(buf []byte) (n int, err error)"}, + {"Getdents", Func, 0, "func(fd int, buf []byte) (n int, err error)"}, + {"Getdirentries", Func, 0, ""}, + {"Getdtablesize", Func, 0, ""}, + {"Getegid", Func, 0, "func() (egid int)"}, + {"Getenv", Func, 0, "func(key string) (value string, found bool)"}, + {"Geteuid", Func, 0, "func() (euid int)"}, + {"Getfsstat", Func, 0, ""}, + {"Getgid", Func, 0, "func() (gid int)"}, + {"Getgroups", Func, 0, "func() (gids []int, err error)"}, + {"Getpagesize", Func, 0, "func() int"}, + {"Getpeername", Func, 0, "func(fd int) (sa Sockaddr, err error)"}, + {"Getpgid", Func, 0, "func(pid int) (pgid int, err error)"}, + {"Getpgrp", Func, 0, "func() (pid int)"}, + {"Getpid", Func, 0, "func() (pid int)"}, + {"Getppid", Func, 0, "func() (ppid int)"}, + {"Getpriority", Func, 0, "func(which int, who int) (prio int, err error)"}, + {"Getrlimit", Func, 0, "func(resource int, rlim *Rlimit) (err error)"}, + {"Getrusage", Func, 0, "func(who int, rusage *Rusage) (err error)"}, + {"Getsid", Func, 0, ""}, + {"Getsockname", Func, 0, "func(fd int) (sa Sockaddr, err error)"}, + {"Getsockopt", Func, 1, ""}, + {"GetsockoptByte", Func, 0, ""}, + {"GetsockoptICMPv6Filter", Func, 2, "func(fd int, level int, opt int) (*ICMPv6Filter, error)"}, + {"GetsockoptIPMreq", Func, 0, "func(fd int, level int, opt int) (*IPMreq, error)"}, + {"GetsockoptIPMreqn", Func, 0, "func(fd int, level int, opt int) (*IPMreqn, error)"}, + {"GetsockoptIPv6MTUInfo", Func, 2, "func(fd int, level int, opt int) (*IPv6MTUInfo, error)"}, + {"GetsockoptIPv6Mreq", Func, 0, "func(fd int, level int, opt int) (*IPv6Mreq, error)"}, + {"GetsockoptInet4Addr", Func, 0, "func(fd int, level int, opt int) (value [4]byte, err error)"}, + {"GetsockoptInt", Func, 0, "func(fd int, level int, opt int) (value int, err error)"}, + {"GetsockoptUcred", Func, 1, "func(fd int, level int, opt int) (*Ucred, error)"}, + {"Gettid", Func, 0, "func() (tid int)"}, + {"Gettimeofday", Func, 0, "func(tv *Timeval) (err error)"}, + {"Getuid", Func, 0, "func() (uid int)"}, + {"Getwd", Func, 0, "func() (wd string, err error)"}, + {"Getxattr", Func, 1, "func(path string, attr string, dest []byte) (sz int, err error)"}, + {"HANDLE_FLAG_INHERIT", Const, 0, ""}, + {"HKEY_CLASSES_ROOT", Const, 0, ""}, + {"HKEY_CURRENT_CONFIG", Const, 0, ""}, + {"HKEY_CURRENT_USER", Const, 0, ""}, + {"HKEY_DYN_DATA", Const, 0, ""}, + {"HKEY_LOCAL_MACHINE", Const, 0, ""}, + {"HKEY_PERFORMANCE_DATA", Const, 0, ""}, + {"HKEY_USERS", Const, 0, ""}, + {"HUPCL", Const, 0, ""}, + {"Handle", Type, 0, ""}, + {"Hostent", Type, 0, ""}, + {"Hostent.AddrList", Field, 0, ""}, + {"Hostent.AddrType", Field, 0, ""}, + {"Hostent.Aliases", Field, 0, ""}, + {"Hostent.Length", Field, 0, ""}, + {"Hostent.Name", Field, 0, ""}, + {"ICANON", Const, 0, ""}, + {"ICMP6_FILTER", Const, 2, ""}, + {"ICMPV6_FILTER", Const, 2, ""}, + {"ICMPv6Filter", Type, 2, ""}, + {"ICMPv6Filter.Data", Field, 2, ""}, + {"ICMPv6Filter.Filt", Field, 2, ""}, + {"ICRNL", Const, 0, ""}, + {"IEXTEN", Const, 0, ""}, + {"IFAN_ARRIVAL", Const, 1, ""}, + {"IFAN_DEPARTURE", Const, 1, ""}, + {"IFA_ADDRESS", Const, 0, ""}, + {"IFA_ANYCAST", Const, 0, ""}, + {"IFA_BROADCAST", Const, 0, ""}, + {"IFA_CACHEINFO", Const, 0, ""}, + {"IFA_F_DADFAILED", Const, 0, ""}, + {"IFA_F_DEPRECATED", Const, 0, ""}, + {"IFA_F_HOMEADDRESS", Const, 0, ""}, + {"IFA_F_NODAD", Const, 0, ""}, + {"IFA_F_OPTIMISTIC", Const, 0, ""}, + {"IFA_F_PERMANENT", Const, 0, ""}, + {"IFA_F_SECONDARY", Const, 0, ""}, + {"IFA_F_TEMPORARY", Const, 0, ""}, + {"IFA_F_TENTATIVE", Const, 0, ""}, + {"IFA_LABEL", Const, 0, ""}, + {"IFA_LOCAL", Const, 0, ""}, + {"IFA_MAX", Const, 0, ""}, + {"IFA_MULTICAST", Const, 0, ""}, + {"IFA_ROUTE", Const, 1, ""}, + {"IFA_UNSPEC", Const, 0, ""}, + {"IFF_ALLMULTI", Const, 0, ""}, + {"IFF_ALTPHYS", Const, 0, ""}, + {"IFF_AUTOMEDIA", Const, 0, ""}, + {"IFF_BROADCAST", Const, 0, ""}, + {"IFF_CANTCHANGE", Const, 0, ""}, + {"IFF_CANTCONFIG", Const, 1, ""}, + {"IFF_DEBUG", Const, 0, ""}, + {"IFF_DRV_OACTIVE", Const, 0, ""}, + {"IFF_DRV_RUNNING", Const, 0, ""}, + {"IFF_DYING", Const, 0, ""}, + {"IFF_DYNAMIC", Const, 0, ""}, + {"IFF_LINK0", Const, 0, ""}, + {"IFF_LINK1", Const, 0, ""}, + {"IFF_LINK2", Const, 0, ""}, + {"IFF_LOOPBACK", Const, 0, ""}, + {"IFF_MASTER", Const, 0, ""}, + {"IFF_MONITOR", Const, 0, ""}, + {"IFF_MULTICAST", Const, 0, ""}, + {"IFF_NOARP", Const, 0, ""}, + {"IFF_NOTRAILERS", Const, 0, ""}, + {"IFF_NO_PI", Const, 0, ""}, + {"IFF_OACTIVE", Const, 0, ""}, + {"IFF_ONE_QUEUE", Const, 0, ""}, + {"IFF_POINTOPOINT", Const, 0, ""}, + {"IFF_POINTTOPOINT", Const, 0, ""}, + {"IFF_PORTSEL", Const, 0, ""}, + {"IFF_PPROMISC", Const, 0, ""}, + {"IFF_PROMISC", Const, 0, ""}, + {"IFF_RENAMING", Const, 0, ""}, + {"IFF_RUNNING", Const, 0, ""}, + {"IFF_SIMPLEX", Const, 0, ""}, + {"IFF_SLAVE", Const, 0, ""}, + {"IFF_SMART", Const, 0, ""}, + {"IFF_STATICARP", Const, 0, ""}, + {"IFF_TAP", Const, 0, ""}, + {"IFF_TUN", Const, 0, ""}, + {"IFF_TUN_EXCL", Const, 0, ""}, + {"IFF_UP", Const, 0, ""}, + {"IFF_VNET_HDR", Const, 0, ""}, + {"IFLA_ADDRESS", Const, 0, ""}, + {"IFLA_BROADCAST", Const, 0, ""}, + {"IFLA_COST", Const, 0, ""}, + {"IFLA_IFALIAS", Const, 0, ""}, + {"IFLA_IFNAME", Const, 0, ""}, + {"IFLA_LINK", Const, 0, ""}, + {"IFLA_LINKINFO", Const, 0, ""}, + {"IFLA_LINKMODE", Const, 0, ""}, + {"IFLA_MAP", Const, 0, ""}, + {"IFLA_MASTER", Const, 0, ""}, + {"IFLA_MAX", Const, 0, ""}, + {"IFLA_MTU", Const, 0, ""}, + {"IFLA_NET_NS_PID", Const, 0, ""}, + {"IFLA_OPERSTATE", Const, 0, ""}, + {"IFLA_PRIORITY", Const, 0, ""}, + {"IFLA_PROTINFO", Const, 0, ""}, + {"IFLA_QDISC", Const, 0, ""}, + {"IFLA_STATS", Const, 0, ""}, + {"IFLA_TXQLEN", Const, 0, ""}, + {"IFLA_UNSPEC", Const, 0, ""}, + {"IFLA_WEIGHT", Const, 0, ""}, + {"IFLA_WIRELESS", Const, 0, ""}, + {"IFNAMSIZ", Const, 0, ""}, + {"IFT_1822", Const, 0, ""}, + {"IFT_A12MPPSWITCH", Const, 0, ""}, + {"IFT_AAL2", Const, 0, ""}, + {"IFT_AAL5", Const, 0, ""}, + {"IFT_ADSL", Const, 0, ""}, + {"IFT_AFLANE8023", Const, 0, ""}, + {"IFT_AFLANE8025", Const, 0, ""}, + {"IFT_ARAP", Const, 0, ""}, + {"IFT_ARCNET", Const, 0, ""}, + {"IFT_ARCNETPLUS", Const, 0, ""}, + {"IFT_ASYNC", Const, 0, ""}, + {"IFT_ATM", Const, 0, ""}, + {"IFT_ATMDXI", Const, 0, ""}, + {"IFT_ATMFUNI", Const, 0, ""}, + {"IFT_ATMIMA", Const, 0, ""}, + {"IFT_ATMLOGICAL", Const, 0, ""}, + {"IFT_ATMRADIO", Const, 0, ""}, + {"IFT_ATMSUBINTERFACE", Const, 0, ""}, + {"IFT_ATMVCIENDPT", Const, 0, ""}, + {"IFT_ATMVIRTUAL", Const, 0, ""}, + {"IFT_BGPPOLICYACCOUNTING", Const, 0, ""}, + {"IFT_BLUETOOTH", Const, 1, ""}, + {"IFT_BRIDGE", Const, 0, ""}, + {"IFT_BSC", Const, 0, ""}, + {"IFT_CARP", Const, 0, ""}, + {"IFT_CCTEMUL", Const, 0, ""}, + {"IFT_CELLULAR", Const, 0, ""}, + {"IFT_CEPT", Const, 0, ""}, + {"IFT_CES", Const, 0, ""}, + {"IFT_CHANNEL", Const, 0, ""}, + {"IFT_CNR", Const, 0, ""}, + {"IFT_COFFEE", Const, 0, ""}, + {"IFT_COMPOSITELINK", Const, 0, ""}, + {"IFT_DCN", Const, 0, ""}, + {"IFT_DIGITALPOWERLINE", Const, 0, ""}, + {"IFT_DIGITALWRAPPEROVERHEADCHANNEL", Const, 0, ""}, + {"IFT_DLSW", Const, 0, ""}, + {"IFT_DOCSCABLEDOWNSTREAM", Const, 0, ""}, + {"IFT_DOCSCABLEMACLAYER", Const, 0, ""}, + {"IFT_DOCSCABLEUPSTREAM", Const, 0, ""}, + {"IFT_DOCSCABLEUPSTREAMCHANNEL", Const, 1, ""}, + {"IFT_DS0", Const, 0, ""}, + {"IFT_DS0BUNDLE", Const, 0, ""}, + {"IFT_DS1FDL", Const, 0, ""}, + {"IFT_DS3", Const, 0, ""}, + {"IFT_DTM", Const, 0, ""}, + {"IFT_DUMMY", Const, 1, ""}, + {"IFT_DVBASILN", Const, 0, ""}, + {"IFT_DVBASIOUT", Const, 0, ""}, + {"IFT_DVBRCCDOWNSTREAM", Const, 0, ""}, + {"IFT_DVBRCCMACLAYER", Const, 0, ""}, + {"IFT_DVBRCCUPSTREAM", Const, 0, ""}, + {"IFT_ECONET", Const, 1, ""}, + {"IFT_ENC", Const, 0, ""}, + {"IFT_EON", Const, 0, ""}, + {"IFT_EPLRS", Const, 0, ""}, + {"IFT_ESCON", Const, 0, ""}, + {"IFT_ETHER", Const, 0, ""}, + {"IFT_FAITH", Const, 0, ""}, + {"IFT_FAST", Const, 0, ""}, + {"IFT_FASTETHER", Const, 0, ""}, + {"IFT_FASTETHERFX", Const, 0, ""}, + {"IFT_FDDI", Const, 0, ""}, + {"IFT_FIBRECHANNEL", Const, 0, ""}, + {"IFT_FRAMERELAYINTERCONNECT", Const, 0, ""}, + {"IFT_FRAMERELAYMPI", Const, 0, ""}, + {"IFT_FRDLCIENDPT", Const, 0, ""}, + {"IFT_FRELAY", Const, 0, ""}, + {"IFT_FRELAYDCE", Const, 0, ""}, + {"IFT_FRF16MFRBUNDLE", Const, 0, ""}, + {"IFT_FRFORWARD", Const, 0, ""}, + {"IFT_G703AT2MB", Const, 0, ""}, + {"IFT_G703AT64K", Const, 0, ""}, + {"IFT_GIF", Const, 0, ""}, + {"IFT_GIGABITETHERNET", Const, 0, ""}, + {"IFT_GR303IDT", Const, 0, ""}, + {"IFT_GR303RDT", Const, 0, ""}, + {"IFT_H323GATEKEEPER", Const, 0, ""}, + {"IFT_H323PROXY", Const, 0, ""}, + {"IFT_HDH1822", Const, 0, ""}, + {"IFT_HDLC", Const, 0, ""}, + {"IFT_HDSL2", Const, 0, ""}, + {"IFT_HIPERLAN2", Const, 0, ""}, + {"IFT_HIPPI", Const, 0, ""}, + {"IFT_HIPPIINTERFACE", Const, 0, ""}, + {"IFT_HOSTPAD", Const, 0, ""}, + {"IFT_HSSI", Const, 0, ""}, + {"IFT_HY", Const, 0, ""}, + {"IFT_IBM370PARCHAN", Const, 0, ""}, + {"IFT_IDSL", Const, 0, ""}, + {"IFT_IEEE1394", Const, 0, ""}, + {"IFT_IEEE80211", Const, 0, ""}, + {"IFT_IEEE80212", Const, 0, ""}, + {"IFT_IEEE8023ADLAG", Const, 0, ""}, + {"IFT_IFGSN", Const, 0, ""}, + {"IFT_IMT", Const, 0, ""}, + {"IFT_INFINIBAND", Const, 1, ""}, + {"IFT_INTERLEAVE", Const, 0, ""}, + {"IFT_IP", Const, 0, ""}, + {"IFT_IPFORWARD", Const, 0, ""}, + {"IFT_IPOVERATM", Const, 0, ""}, + {"IFT_IPOVERCDLC", Const, 0, ""}, + {"IFT_IPOVERCLAW", Const, 0, ""}, + {"IFT_IPSWITCH", Const, 0, ""}, + {"IFT_IPXIP", Const, 0, ""}, + {"IFT_ISDN", Const, 0, ""}, + {"IFT_ISDNBASIC", Const, 0, ""}, + {"IFT_ISDNPRIMARY", Const, 0, ""}, + {"IFT_ISDNS", Const, 0, ""}, + {"IFT_ISDNU", Const, 0, ""}, + {"IFT_ISO88022LLC", Const, 0, ""}, + {"IFT_ISO88023", Const, 0, ""}, + {"IFT_ISO88024", Const, 0, ""}, + {"IFT_ISO88025", Const, 0, ""}, + {"IFT_ISO88025CRFPINT", Const, 0, ""}, + {"IFT_ISO88025DTR", Const, 0, ""}, + {"IFT_ISO88025FIBER", Const, 0, ""}, + {"IFT_ISO88026", Const, 0, ""}, + {"IFT_ISUP", Const, 0, ""}, + {"IFT_L2VLAN", Const, 0, ""}, + {"IFT_L3IPVLAN", Const, 0, ""}, + {"IFT_L3IPXVLAN", Const, 0, ""}, + {"IFT_LAPB", Const, 0, ""}, + {"IFT_LAPD", Const, 0, ""}, + {"IFT_LAPF", Const, 0, ""}, + {"IFT_LINEGROUP", Const, 1, ""}, + {"IFT_LOCALTALK", Const, 0, ""}, + {"IFT_LOOP", Const, 0, ""}, + {"IFT_MEDIAMAILOVERIP", Const, 0, ""}, + {"IFT_MFSIGLINK", Const, 0, ""}, + {"IFT_MIOX25", Const, 0, ""}, + {"IFT_MODEM", Const, 0, ""}, + {"IFT_MPC", Const, 0, ""}, + {"IFT_MPLS", Const, 0, ""}, + {"IFT_MPLSTUNNEL", Const, 0, ""}, + {"IFT_MSDSL", Const, 0, ""}, + {"IFT_MVL", Const, 0, ""}, + {"IFT_MYRINET", Const, 0, ""}, + {"IFT_NFAS", Const, 0, ""}, + {"IFT_NSIP", Const, 0, ""}, + {"IFT_OPTICALCHANNEL", Const, 0, ""}, + {"IFT_OPTICALTRANSPORT", Const, 0, ""}, + {"IFT_OTHER", Const, 0, ""}, + {"IFT_P10", Const, 0, ""}, + {"IFT_P80", Const, 0, ""}, + {"IFT_PARA", Const, 0, ""}, + {"IFT_PDP", Const, 0, ""}, + {"IFT_PFLOG", Const, 0, ""}, + {"IFT_PFLOW", Const, 1, ""}, + {"IFT_PFSYNC", Const, 0, ""}, + {"IFT_PLC", Const, 0, ""}, + {"IFT_PON155", Const, 1, ""}, + {"IFT_PON622", Const, 1, ""}, + {"IFT_POS", Const, 0, ""}, + {"IFT_PPP", Const, 0, ""}, + {"IFT_PPPMULTILINKBUNDLE", Const, 0, ""}, + {"IFT_PROPATM", Const, 1, ""}, + {"IFT_PROPBWAP2MP", Const, 0, ""}, + {"IFT_PROPCNLS", Const, 0, ""}, + {"IFT_PROPDOCSWIRELESSDOWNSTREAM", Const, 0, ""}, + {"IFT_PROPDOCSWIRELESSMACLAYER", Const, 0, ""}, + {"IFT_PROPDOCSWIRELESSUPSTREAM", Const, 0, ""}, + {"IFT_PROPMUX", Const, 0, ""}, + {"IFT_PROPVIRTUAL", Const, 0, ""}, + {"IFT_PROPWIRELESSP2P", Const, 0, ""}, + {"IFT_PTPSERIAL", Const, 0, ""}, + {"IFT_PVC", Const, 0, ""}, + {"IFT_Q2931", Const, 1, ""}, + {"IFT_QLLC", Const, 0, ""}, + {"IFT_RADIOMAC", Const, 0, ""}, + {"IFT_RADSL", Const, 0, ""}, + {"IFT_REACHDSL", Const, 0, ""}, + {"IFT_RFC1483", Const, 0, ""}, + {"IFT_RS232", Const, 0, ""}, + {"IFT_RSRB", Const, 0, ""}, + {"IFT_SDLC", Const, 0, ""}, + {"IFT_SDSL", Const, 0, ""}, + {"IFT_SHDSL", Const, 0, ""}, + {"IFT_SIP", Const, 0, ""}, + {"IFT_SIPSIG", Const, 1, ""}, + {"IFT_SIPTG", Const, 1, ""}, + {"IFT_SLIP", Const, 0, ""}, + {"IFT_SMDSDXI", Const, 0, ""}, + {"IFT_SMDSICIP", Const, 0, ""}, + {"IFT_SONET", Const, 0, ""}, + {"IFT_SONETOVERHEADCHANNEL", Const, 0, ""}, + {"IFT_SONETPATH", Const, 0, ""}, + {"IFT_SONETVT", Const, 0, ""}, + {"IFT_SRP", Const, 0, ""}, + {"IFT_SS7SIGLINK", Const, 0, ""}, + {"IFT_STACKTOSTACK", Const, 0, ""}, + {"IFT_STARLAN", Const, 0, ""}, + {"IFT_STF", Const, 0, ""}, + {"IFT_T1", Const, 0, ""}, + {"IFT_TDLC", Const, 0, ""}, + {"IFT_TELINK", Const, 1, ""}, + {"IFT_TERMPAD", Const, 0, ""}, + {"IFT_TR008", Const, 0, ""}, + {"IFT_TRANSPHDLC", Const, 0, ""}, + {"IFT_TUNNEL", Const, 0, ""}, + {"IFT_ULTRA", Const, 0, ""}, + {"IFT_USB", Const, 0, ""}, + {"IFT_V11", Const, 0, ""}, + {"IFT_V35", Const, 0, ""}, + {"IFT_V36", Const, 0, ""}, + {"IFT_V37", Const, 0, ""}, + {"IFT_VDSL", Const, 0, ""}, + {"IFT_VIRTUALIPADDRESS", Const, 0, ""}, + {"IFT_VIRTUALTG", Const, 1, ""}, + {"IFT_VOICEDID", Const, 1, ""}, + {"IFT_VOICEEM", Const, 0, ""}, + {"IFT_VOICEEMFGD", Const, 1, ""}, + {"IFT_VOICEENCAP", Const, 0, ""}, + {"IFT_VOICEFGDEANA", Const, 1, ""}, + {"IFT_VOICEFXO", Const, 0, ""}, + {"IFT_VOICEFXS", Const, 0, ""}, + {"IFT_VOICEOVERATM", Const, 0, ""}, + {"IFT_VOICEOVERCABLE", Const, 1, ""}, + {"IFT_VOICEOVERFRAMERELAY", Const, 0, ""}, + {"IFT_VOICEOVERIP", Const, 0, ""}, + {"IFT_X213", Const, 0, ""}, + {"IFT_X25", Const, 0, ""}, + {"IFT_X25DDN", Const, 0, ""}, + {"IFT_X25HUNTGROUP", Const, 0, ""}, + {"IFT_X25MLP", Const, 0, ""}, + {"IFT_X25PLE", Const, 0, ""}, + {"IFT_XETHER", Const, 0, ""}, + {"IGNBRK", Const, 0, ""}, + {"IGNCR", Const, 0, ""}, + {"IGNORE", Const, 0, ""}, + {"IGNPAR", Const, 0, ""}, + {"IMAXBEL", Const, 0, ""}, + {"INFINITE", Const, 0, ""}, + {"INLCR", Const, 0, ""}, + {"INPCK", Const, 0, ""}, + {"INVALID_FILE_ATTRIBUTES", Const, 0, ""}, + {"IN_ACCESS", Const, 0, ""}, + {"IN_ALL_EVENTS", Const, 0, ""}, + {"IN_ATTRIB", Const, 0, ""}, + {"IN_CLASSA_HOST", Const, 0, ""}, + {"IN_CLASSA_MAX", Const, 0, ""}, + {"IN_CLASSA_NET", Const, 0, ""}, + {"IN_CLASSA_NSHIFT", Const, 0, ""}, + {"IN_CLASSB_HOST", Const, 0, ""}, + {"IN_CLASSB_MAX", Const, 0, ""}, + {"IN_CLASSB_NET", Const, 0, ""}, + {"IN_CLASSB_NSHIFT", Const, 0, ""}, + {"IN_CLASSC_HOST", Const, 0, ""}, + {"IN_CLASSC_NET", Const, 0, ""}, + {"IN_CLASSC_NSHIFT", Const, 0, ""}, + {"IN_CLASSD_HOST", Const, 0, ""}, + {"IN_CLASSD_NET", Const, 0, ""}, + {"IN_CLASSD_NSHIFT", Const, 0, ""}, + {"IN_CLOEXEC", Const, 0, ""}, + {"IN_CLOSE", Const, 0, ""}, + {"IN_CLOSE_NOWRITE", Const, 0, ""}, + {"IN_CLOSE_WRITE", Const, 0, ""}, + {"IN_CREATE", Const, 0, ""}, + {"IN_DELETE", Const, 0, ""}, + {"IN_DELETE_SELF", Const, 0, ""}, + {"IN_DONT_FOLLOW", Const, 0, ""}, + {"IN_EXCL_UNLINK", Const, 0, ""}, + {"IN_IGNORED", Const, 0, ""}, + {"IN_ISDIR", Const, 0, ""}, + {"IN_LINKLOCALNETNUM", Const, 0, ""}, + {"IN_LOOPBACKNET", Const, 0, ""}, + {"IN_MASK_ADD", Const, 0, ""}, + {"IN_MODIFY", Const, 0, ""}, + {"IN_MOVE", Const, 0, ""}, + {"IN_MOVED_FROM", Const, 0, ""}, + {"IN_MOVED_TO", Const, 0, ""}, + {"IN_MOVE_SELF", Const, 0, ""}, + {"IN_NONBLOCK", Const, 0, ""}, + {"IN_ONESHOT", Const, 0, ""}, + {"IN_ONLYDIR", Const, 0, ""}, + {"IN_OPEN", Const, 0, ""}, + {"IN_Q_OVERFLOW", Const, 0, ""}, + {"IN_RFC3021_HOST", Const, 1, ""}, + {"IN_RFC3021_MASK", Const, 1, ""}, + {"IN_RFC3021_NET", Const, 1, ""}, + {"IN_RFC3021_NSHIFT", Const, 1, ""}, + {"IN_UNMOUNT", Const, 0, ""}, + {"IOC_IN", Const, 1, ""}, + {"IOC_INOUT", Const, 1, ""}, + {"IOC_OUT", Const, 1, ""}, + {"IOC_VENDOR", Const, 3, ""}, + {"IOC_WS2", Const, 1, ""}, + {"IO_REPARSE_TAG_SYMLINK", Const, 4, ""}, + {"IPMreq", Type, 0, ""}, + {"IPMreq.Interface", Field, 0, ""}, + {"IPMreq.Multiaddr", Field, 0, ""}, + {"IPMreqn", Type, 0, ""}, + {"IPMreqn.Address", Field, 0, ""}, + {"IPMreqn.Ifindex", Field, 0, ""}, + {"IPMreqn.Multiaddr", Field, 0, ""}, + {"IPPROTO_3PC", Const, 0, ""}, + {"IPPROTO_ADFS", Const, 0, ""}, + {"IPPROTO_AH", Const, 0, ""}, + {"IPPROTO_AHIP", Const, 0, ""}, + {"IPPROTO_APES", Const, 0, ""}, + {"IPPROTO_ARGUS", Const, 0, ""}, + {"IPPROTO_AX25", Const, 0, ""}, + {"IPPROTO_BHA", Const, 0, ""}, + {"IPPROTO_BLT", Const, 0, ""}, + {"IPPROTO_BRSATMON", Const, 0, ""}, + {"IPPROTO_CARP", Const, 0, ""}, + {"IPPROTO_CFTP", Const, 0, ""}, + {"IPPROTO_CHAOS", Const, 0, ""}, + {"IPPROTO_CMTP", Const, 0, ""}, + {"IPPROTO_COMP", Const, 0, ""}, + {"IPPROTO_CPHB", Const, 0, ""}, + {"IPPROTO_CPNX", Const, 0, ""}, + {"IPPROTO_DCCP", Const, 0, ""}, + {"IPPROTO_DDP", Const, 0, ""}, + {"IPPROTO_DGP", Const, 0, ""}, + {"IPPROTO_DIVERT", Const, 0, ""}, + {"IPPROTO_DIVERT_INIT", Const, 3, ""}, + {"IPPROTO_DIVERT_RESP", Const, 3, ""}, + {"IPPROTO_DONE", Const, 0, ""}, + {"IPPROTO_DSTOPTS", Const, 0, ""}, + {"IPPROTO_EGP", Const, 0, ""}, + {"IPPROTO_EMCON", Const, 0, ""}, + {"IPPROTO_ENCAP", Const, 0, ""}, + {"IPPROTO_EON", Const, 0, ""}, + {"IPPROTO_ESP", Const, 0, ""}, + {"IPPROTO_ETHERIP", Const, 0, ""}, + {"IPPROTO_FRAGMENT", Const, 0, ""}, + {"IPPROTO_GGP", Const, 0, ""}, + {"IPPROTO_GMTP", Const, 0, ""}, + {"IPPROTO_GRE", Const, 0, ""}, + {"IPPROTO_HELLO", Const, 0, ""}, + {"IPPROTO_HMP", Const, 0, ""}, + {"IPPROTO_HOPOPTS", Const, 0, ""}, + {"IPPROTO_ICMP", Const, 0, ""}, + {"IPPROTO_ICMPV6", Const, 0, ""}, + {"IPPROTO_IDP", Const, 0, ""}, + {"IPPROTO_IDPR", Const, 0, ""}, + {"IPPROTO_IDRP", Const, 0, ""}, + {"IPPROTO_IGMP", Const, 0, ""}, + {"IPPROTO_IGP", Const, 0, ""}, + {"IPPROTO_IGRP", Const, 0, ""}, + {"IPPROTO_IL", Const, 0, ""}, + {"IPPROTO_INLSP", Const, 0, ""}, + {"IPPROTO_INP", Const, 0, ""}, + {"IPPROTO_IP", Const, 0, ""}, + {"IPPROTO_IPCOMP", Const, 0, ""}, + {"IPPROTO_IPCV", Const, 0, ""}, + {"IPPROTO_IPEIP", Const, 0, ""}, + {"IPPROTO_IPIP", Const, 0, ""}, + {"IPPROTO_IPPC", Const, 0, ""}, + {"IPPROTO_IPV4", Const, 0, ""}, + {"IPPROTO_IPV6", Const, 0, ""}, + {"IPPROTO_IPV6_ICMP", Const, 1, ""}, + {"IPPROTO_IRTP", Const, 0, ""}, + {"IPPROTO_KRYPTOLAN", Const, 0, ""}, + {"IPPROTO_LARP", Const, 0, ""}, + {"IPPROTO_LEAF1", Const, 0, ""}, + {"IPPROTO_LEAF2", Const, 0, ""}, + {"IPPROTO_MAX", Const, 0, ""}, + {"IPPROTO_MAXID", Const, 0, ""}, + {"IPPROTO_MEAS", Const, 0, ""}, + {"IPPROTO_MH", Const, 1, ""}, + {"IPPROTO_MHRP", Const, 0, ""}, + {"IPPROTO_MICP", Const, 0, ""}, + {"IPPROTO_MOBILE", Const, 0, ""}, + {"IPPROTO_MPLS", Const, 1, ""}, + {"IPPROTO_MTP", Const, 0, ""}, + {"IPPROTO_MUX", Const, 0, ""}, + {"IPPROTO_ND", Const, 0, ""}, + {"IPPROTO_NHRP", Const, 0, ""}, + {"IPPROTO_NONE", Const, 0, ""}, + {"IPPROTO_NSP", Const, 0, ""}, + {"IPPROTO_NVPII", Const, 0, ""}, + {"IPPROTO_OLD_DIVERT", Const, 0, ""}, + {"IPPROTO_OSPFIGP", Const, 0, ""}, + {"IPPROTO_PFSYNC", Const, 0, ""}, + {"IPPROTO_PGM", Const, 0, ""}, + {"IPPROTO_PIGP", Const, 0, ""}, + {"IPPROTO_PIM", Const, 0, ""}, + {"IPPROTO_PRM", Const, 0, ""}, + {"IPPROTO_PUP", Const, 0, ""}, + {"IPPROTO_PVP", Const, 0, ""}, + {"IPPROTO_RAW", Const, 0, ""}, + {"IPPROTO_RCCMON", Const, 0, ""}, + {"IPPROTO_RDP", Const, 0, ""}, + {"IPPROTO_ROUTING", Const, 0, ""}, + {"IPPROTO_RSVP", Const, 0, ""}, + {"IPPROTO_RVD", Const, 0, ""}, + {"IPPROTO_SATEXPAK", Const, 0, ""}, + {"IPPROTO_SATMON", Const, 0, ""}, + {"IPPROTO_SCCSP", Const, 0, ""}, + {"IPPROTO_SCTP", Const, 0, ""}, + {"IPPROTO_SDRP", Const, 0, ""}, + {"IPPROTO_SEND", Const, 1, ""}, + {"IPPROTO_SEP", Const, 0, ""}, + {"IPPROTO_SKIP", Const, 0, ""}, + {"IPPROTO_SPACER", Const, 0, ""}, + {"IPPROTO_SRPC", Const, 0, ""}, + {"IPPROTO_ST", Const, 0, ""}, + {"IPPROTO_SVMTP", Const, 0, ""}, + {"IPPROTO_SWIPE", Const, 0, ""}, + {"IPPROTO_TCF", Const, 0, ""}, + {"IPPROTO_TCP", Const, 0, ""}, + {"IPPROTO_TLSP", Const, 0, ""}, + {"IPPROTO_TP", Const, 0, ""}, + {"IPPROTO_TPXX", Const, 0, ""}, + {"IPPROTO_TRUNK1", Const, 0, ""}, + {"IPPROTO_TRUNK2", Const, 0, ""}, + {"IPPROTO_TTP", Const, 0, ""}, + {"IPPROTO_UDP", Const, 0, ""}, + {"IPPROTO_UDPLITE", Const, 0, ""}, + {"IPPROTO_VINES", Const, 0, ""}, + {"IPPROTO_VISA", Const, 0, ""}, + {"IPPROTO_VMTP", Const, 0, ""}, + {"IPPROTO_VRRP", Const, 1, ""}, + {"IPPROTO_WBEXPAK", Const, 0, ""}, + {"IPPROTO_WBMON", Const, 0, ""}, + {"IPPROTO_WSN", Const, 0, ""}, + {"IPPROTO_XNET", Const, 0, ""}, + {"IPPROTO_XTP", Const, 0, ""}, + {"IPV6_2292DSTOPTS", Const, 0, ""}, + {"IPV6_2292HOPLIMIT", Const, 0, ""}, + {"IPV6_2292HOPOPTS", Const, 0, ""}, + {"IPV6_2292NEXTHOP", Const, 0, ""}, + {"IPV6_2292PKTINFO", Const, 0, ""}, + {"IPV6_2292PKTOPTIONS", Const, 0, ""}, + {"IPV6_2292RTHDR", Const, 0, ""}, + {"IPV6_ADDRFORM", Const, 0, ""}, + {"IPV6_ADD_MEMBERSHIP", Const, 0, ""}, + {"IPV6_AUTHHDR", Const, 0, ""}, + {"IPV6_AUTH_LEVEL", Const, 1, ""}, + {"IPV6_AUTOFLOWLABEL", Const, 0, ""}, + {"IPV6_BINDANY", Const, 0, ""}, + {"IPV6_BINDV6ONLY", Const, 0, ""}, + {"IPV6_BOUND_IF", Const, 0, ""}, + {"IPV6_CHECKSUM", Const, 0, ""}, + {"IPV6_DEFAULT_MULTICAST_HOPS", Const, 0, ""}, + {"IPV6_DEFAULT_MULTICAST_LOOP", Const, 0, ""}, + {"IPV6_DEFHLIM", Const, 0, ""}, + {"IPV6_DONTFRAG", Const, 0, ""}, + {"IPV6_DROP_MEMBERSHIP", Const, 0, ""}, + {"IPV6_DSTOPTS", Const, 0, ""}, + {"IPV6_ESP_NETWORK_LEVEL", Const, 1, ""}, + {"IPV6_ESP_TRANS_LEVEL", Const, 1, ""}, + {"IPV6_FAITH", Const, 0, ""}, + {"IPV6_FLOWINFO_MASK", Const, 0, ""}, + {"IPV6_FLOWLABEL_MASK", Const, 0, ""}, + {"IPV6_FRAGTTL", Const, 0, ""}, + {"IPV6_FW_ADD", Const, 0, ""}, + {"IPV6_FW_DEL", Const, 0, ""}, + {"IPV6_FW_FLUSH", Const, 0, ""}, + {"IPV6_FW_GET", Const, 0, ""}, + {"IPV6_FW_ZERO", Const, 0, ""}, + {"IPV6_HLIMDEC", Const, 0, ""}, + {"IPV6_HOPLIMIT", Const, 0, ""}, + {"IPV6_HOPOPTS", Const, 0, ""}, + {"IPV6_IPCOMP_LEVEL", Const, 1, ""}, + {"IPV6_IPSEC_POLICY", Const, 0, ""}, + {"IPV6_JOIN_ANYCAST", Const, 0, ""}, + {"IPV6_JOIN_GROUP", Const, 0, ""}, + {"IPV6_LEAVE_ANYCAST", Const, 0, ""}, + {"IPV6_LEAVE_GROUP", Const, 0, ""}, + {"IPV6_MAXHLIM", Const, 0, ""}, + {"IPV6_MAXOPTHDR", Const, 0, ""}, + {"IPV6_MAXPACKET", Const, 0, ""}, + {"IPV6_MAX_GROUP_SRC_FILTER", Const, 0, ""}, + {"IPV6_MAX_MEMBERSHIPS", Const, 0, ""}, + {"IPV6_MAX_SOCK_SRC_FILTER", Const, 0, ""}, + {"IPV6_MIN_MEMBERSHIPS", Const, 0, ""}, + {"IPV6_MMTU", Const, 0, ""}, + {"IPV6_MSFILTER", Const, 0, ""}, + {"IPV6_MTU", Const, 0, ""}, + {"IPV6_MTU_DISCOVER", Const, 0, ""}, + {"IPV6_MULTICAST_HOPS", Const, 0, ""}, + {"IPV6_MULTICAST_IF", Const, 0, ""}, + {"IPV6_MULTICAST_LOOP", Const, 0, ""}, + {"IPV6_NEXTHOP", Const, 0, ""}, + {"IPV6_OPTIONS", Const, 1, ""}, + {"IPV6_PATHMTU", Const, 0, ""}, + {"IPV6_PIPEX", Const, 1, ""}, + {"IPV6_PKTINFO", Const, 0, ""}, + {"IPV6_PMTUDISC_DO", Const, 0, ""}, + {"IPV6_PMTUDISC_DONT", Const, 0, ""}, + {"IPV6_PMTUDISC_PROBE", Const, 0, ""}, + {"IPV6_PMTUDISC_WANT", Const, 0, ""}, + {"IPV6_PORTRANGE", Const, 0, ""}, + {"IPV6_PORTRANGE_DEFAULT", Const, 0, ""}, + {"IPV6_PORTRANGE_HIGH", Const, 0, ""}, + {"IPV6_PORTRANGE_LOW", Const, 0, ""}, + {"IPV6_PREFER_TEMPADDR", Const, 0, ""}, + {"IPV6_RECVDSTOPTS", Const, 0, ""}, + {"IPV6_RECVDSTPORT", Const, 3, ""}, + {"IPV6_RECVERR", Const, 0, ""}, + {"IPV6_RECVHOPLIMIT", Const, 0, ""}, + {"IPV6_RECVHOPOPTS", Const, 0, ""}, + {"IPV6_RECVPATHMTU", Const, 0, ""}, + {"IPV6_RECVPKTINFO", Const, 0, ""}, + {"IPV6_RECVRTHDR", Const, 0, ""}, + {"IPV6_RECVTCLASS", Const, 0, ""}, + {"IPV6_ROUTER_ALERT", Const, 0, ""}, + {"IPV6_RTABLE", Const, 1, ""}, + {"IPV6_RTHDR", Const, 0, ""}, + {"IPV6_RTHDRDSTOPTS", Const, 0, ""}, + {"IPV6_RTHDR_LOOSE", Const, 0, ""}, + {"IPV6_RTHDR_STRICT", Const, 0, ""}, + {"IPV6_RTHDR_TYPE_0", Const, 0, ""}, + {"IPV6_RXDSTOPTS", Const, 0, ""}, + {"IPV6_RXHOPOPTS", Const, 0, ""}, + {"IPV6_SOCKOPT_RESERVED1", Const, 0, ""}, + {"IPV6_TCLASS", Const, 0, ""}, + {"IPV6_UNICAST_HOPS", Const, 0, ""}, + {"IPV6_USE_MIN_MTU", Const, 0, ""}, + {"IPV6_V6ONLY", Const, 0, ""}, + {"IPV6_VERSION", Const, 0, ""}, + {"IPV6_VERSION_MASK", Const, 0, ""}, + {"IPV6_XFRM_POLICY", Const, 0, ""}, + {"IP_ADD_MEMBERSHIP", Const, 0, ""}, + {"IP_ADD_SOURCE_MEMBERSHIP", Const, 0, ""}, + {"IP_AUTH_LEVEL", Const, 1, ""}, + {"IP_BINDANY", Const, 0, ""}, + {"IP_BLOCK_SOURCE", Const, 0, ""}, + {"IP_BOUND_IF", Const, 0, ""}, + {"IP_DEFAULT_MULTICAST_LOOP", Const, 0, ""}, + {"IP_DEFAULT_MULTICAST_TTL", Const, 0, ""}, + {"IP_DF", Const, 0, ""}, + {"IP_DIVERTFL", Const, 3, ""}, + {"IP_DONTFRAG", Const, 0, ""}, + {"IP_DROP_MEMBERSHIP", Const, 0, ""}, + {"IP_DROP_SOURCE_MEMBERSHIP", Const, 0, ""}, + {"IP_DUMMYNET3", Const, 0, ""}, + {"IP_DUMMYNET_CONFIGURE", Const, 0, ""}, + {"IP_DUMMYNET_DEL", Const, 0, ""}, + {"IP_DUMMYNET_FLUSH", Const, 0, ""}, + {"IP_DUMMYNET_GET", Const, 0, ""}, + {"IP_EF", Const, 1, ""}, + {"IP_ERRORMTU", Const, 1, ""}, + {"IP_ESP_NETWORK_LEVEL", Const, 1, ""}, + {"IP_ESP_TRANS_LEVEL", Const, 1, ""}, + {"IP_FAITH", Const, 0, ""}, + {"IP_FREEBIND", Const, 0, ""}, + {"IP_FW3", Const, 0, ""}, + {"IP_FW_ADD", Const, 0, ""}, + {"IP_FW_DEL", Const, 0, ""}, + {"IP_FW_FLUSH", Const, 0, ""}, + {"IP_FW_GET", Const, 0, ""}, + {"IP_FW_NAT_CFG", Const, 0, ""}, + {"IP_FW_NAT_DEL", Const, 0, ""}, + {"IP_FW_NAT_GET_CONFIG", Const, 0, ""}, + {"IP_FW_NAT_GET_LOG", Const, 0, ""}, + {"IP_FW_RESETLOG", Const, 0, ""}, + {"IP_FW_TABLE_ADD", Const, 0, ""}, + {"IP_FW_TABLE_DEL", Const, 0, ""}, + {"IP_FW_TABLE_FLUSH", Const, 0, ""}, + {"IP_FW_TABLE_GETSIZE", Const, 0, ""}, + {"IP_FW_TABLE_LIST", Const, 0, ""}, + {"IP_FW_ZERO", Const, 0, ""}, + {"IP_HDRINCL", Const, 0, ""}, + {"IP_IPCOMP_LEVEL", Const, 1, ""}, + {"IP_IPSECFLOWINFO", Const, 1, ""}, + {"IP_IPSEC_LOCAL_AUTH", Const, 1, ""}, + {"IP_IPSEC_LOCAL_CRED", Const, 1, ""}, + {"IP_IPSEC_LOCAL_ID", Const, 1, ""}, + {"IP_IPSEC_POLICY", Const, 0, ""}, + {"IP_IPSEC_REMOTE_AUTH", Const, 1, ""}, + {"IP_IPSEC_REMOTE_CRED", Const, 1, ""}, + {"IP_IPSEC_REMOTE_ID", Const, 1, ""}, + {"IP_MAXPACKET", Const, 0, ""}, + {"IP_MAX_GROUP_SRC_FILTER", Const, 0, ""}, + {"IP_MAX_MEMBERSHIPS", Const, 0, ""}, + {"IP_MAX_SOCK_MUTE_FILTER", Const, 0, ""}, + {"IP_MAX_SOCK_SRC_FILTER", Const, 0, ""}, + {"IP_MAX_SOURCE_FILTER", Const, 0, ""}, + {"IP_MF", Const, 0, ""}, + {"IP_MINFRAGSIZE", Const, 1, ""}, + {"IP_MINTTL", Const, 0, ""}, + {"IP_MIN_MEMBERSHIPS", Const, 0, ""}, + {"IP_MSFILTER", Const, 0, ""}, + {"IP_MSS", Const, 0, ""}, + {"IP_MTU", Const, 0, ""}, + {"IP_MTU_DISCOVER", Const, 0, ""}, + {"IP_MULTICAST_IF", Const, 0, ""}, + {"IP_MULTICAST_IFINDEX", Const, 0, ""}, + {"IP_MULTICAST_LOOP", Const, 0, ""}, + {"IP_MULTICAST_TTL", Const, 0, ""}, + {"IP_MULTICAST_VIF", Const, 0, ""}, + {"IP_NAT__XXX", Const, 0, ""}, + {"IP_OFFMASK", Const, 0, ""}, + {"IP_OLD_FW_ADD", Const, 0, ""}, + {"IP_OLD_FW_DEL", Const, 0, ""}, + {"IP_OLD_FW_FLUSH", Const, 0, ""}, + {"IP_OLD_FW_GET", Const, 0, ""}, + {"IP_OLD_FW_RESETLOG", Const, 0, ""}, + {"IP_OLD_FW_ZERO", Const, 0, ""}, + {"IP_ONESBCAST", Const, 0, ""}, + {"IP_OPTIONS", Const, 0, ""}, + {"IP_ORIGDSTADDR", Const, 0, ""}, + {"IP_PASSSEC", Const, 0, ""}, + {"IP_PIPEX", Const, 1, ""}, + {"IP_PKTINFO", Const, 0, ""}, + {"IP_PKTOPTIONS", Const, 0, ""}, + {"IP_PMTUDISC", Const, 0, ""}, + {"IP_PMTUDISC_DO", Const, 0, ""}, + {"IP_PMTUDISC_DONT", Const, 0, ""}, + {"IP_PMTUDISC_PROBE", Const, 0, ""}, + {"IP_PMTUDISC_WANT", Const, 0, ""}, + {"IP_PORTRANGE", Const, 0, ""}, + {"IP_PORTRANGE_DEFAULT", Const, 0, ""}, + {"IP_PORTRANGE_HIGH", Const, 0, ""}, + {"IP_PORTRANGE_LOW", Const, 0, ""}, + {"IP_RECVDSTADDR", Const, 0, ""}, + {"IP_RECVDSTPORT", Const, 1, ""}, + {"IP_RECVERR", Const, 0, ""}, + {"IP_RECVIF", Const, 0, ""}, + {"IP_RECVOPTS", Const, 0, ""}, + {"IP_RECVORIGDSTADDR", Const, 0, ""}, + {"IP_RECVPKTINFO", Const, 0, ""}, + {"IP_RECVRETOPTS", Const, 0, ""}, + {"IP_RECVRTABLE", Const, 1, ""}, + {"IP_RECVTOS", Const, 0, ""}, + {"IP_RECVTTL", Const, 0, ""}, + {"IP_RETOPTS", Const, 0, ""}, + {"IP_RF", Const, 0, ""}, + {"IP_ROUTER_ALERT", Const, 0, ""}, + {"IP_RSVP_OFF", Const, 0, ""}, + {"IP_RSVP_ON", Const, 0, ""}, + {"IP_RSVP_VIF_OFF", Const, 0, ""}, + {"IP_RSVP_VIF_ON", Const, 0, ""}, + {"IP_RTABLE", Const, 1, ""}, + {"IP_SENDSRCADDR", Const, 0, ""}, + {"IP_STRIPHDR", Const, 0, ""}, + {"IP_TOS", Const, 0, ""}, + {"IP_TRAFFIC_MGT_BACKGROUND", Const, 0, ""}, + {"IP_TRANSPARENT", Const, 0, ""}, + {"IP_TTL", Const, 0, ""}, + {"IP_UNBLOCK_SOURCE", Const, 0, ""}, + {"IP_XFRM_POLICY", Const, 0, ""}, + {"IPv6MTUInfo", Type, 2, ""}, + {"IPv6MTUInfo.Addr", Field, 2, ""}, + {"IPv6MTUInfo.Mtu", Field, 2, ""}, + {"IPv6Mreq", Type, 0, ""}, + {"IPv6Mreq.Interface", Field, 0, ""}, + {"IPv6Mreq.Multiaddr", Field, 0, ""}, + {"ISIG", Const, 0, ""}, + {"ISTRIP", Const, 0, ""}, + {"IUCLC", Const, 0, ""}, + {"IUTF8", Const, 0, ""}, + {"IXANY", Const, 0, ""}, + {"IXOFF", Const, 0, ""}, + {"IXON", Const, 0, ""}, + {"IfAddrmsg", Type, 0, ""}, + {"IfAddrmsg.Family", Field, 0, ""}, + {"IfAddrmsg.Flags", Field, 0, ""}, + {"IfAddrmsg.Index", Field, 0, ""}, + {"IfAddrmsg.Prefixlen", Field, 0, ""}, + {"IfAddrmsg.Scope", Field, 0, ""}, + {"IfAnnounceMsghdr", Type, 1, ""}, + {"IfAnnounceMsghdr.Hdrlen", Field, 2, ""}, + {"IfAnnounceMsghdr.Index", Field, 1, ""}, + {"IfAnnounceMsghdr.Msglen", Field, 1, ""}, + {"IfAnnounceMsghdr.Name", Field, 1, ""}, + {"IfAnnounceMsghdr.Type", Field, 1, ""}, + {"IfAnnounceMsghdr.Version", Field, 1, ""}, + {"IfAnnounceMsghdr.What", Field, 1, ""}, + {"IfData", Type, 0, ""}, + {"IfData.Addrlen", Field, 0, ""}, + {"IfData.Baudrate", Field, 0, ""}, + {"IfData.Capabilities", Field, 2, ""}, + {"IfData.Collisions", Field, 0, ""}, + {"IfData.Datalen", Field, 0, ""}, + {"IfData.Epoch", Field, 0, ""}, + {"IfData.Hdrlen", Field, 0, ""}, + {"IfData.Hwassist", Field, 0, ""}, + {"IfData.Ibytes", Field, 0, ""}, + {"IfData.Ierrors", Field, 0, ""}, + {"IfData.Imcasts", Field, 0, ""}, + {"IfData.Ipackets", Field, 0, ""}, + {"IfData.Iqdrops", Field, 0, ""}, + {"IfData.Lastchange", Field, 0, ""}, + {"IfData.Link_state", Field, 0, ""}, + {"IfData.Mclpool", Field, 2, ""}, + {"IfData.Metric", Field, 0, ""}, + {"IfData.Mtu", Field, 0, ""}, + {"IfData.Noproto", Field, 0, ""}, + {"IfData.Obytes", Field, 0, ""}, + {"IfData.Oerrors", Field, 0, ""}, + {"IfData.Omcasts", Field, 0, ""}, + {"IfData.Opackets", Field, 0, ""}, + {"IfData.Pad", Field, 2, ""}, + {"IfData.Pad_cgo_0", Field, 2, ""}, + {"IfData.Pad_cgo_1", Field, 2, ""}, + {"IfData.Physical", Field, 0, ""}, + {"IfData.Recvquota", Field, 0, ""}, + {"IfData.Recvtiming", Field, 0, ""}, + {"IfData.Reserved1", Field, 0, ""}, + {"IfData.Reserved2", Field, 0, ""}, + {"IfData.Spare_char1", Field, 0, ""}, + {"IfData.Spare_char2", Field, 0, ""}, + {"IfData.Type", Field, 0, ""}, + {"IfData.Typelen", Field, 0, ""}, + {"IfData.Unused1", Field, 0, ""}, + {"IfData.Unused2", Field, 0, ""}, + {"IfData.Xmitquota", Field, 0, ""}, + {"IfData.Xmittiming", Field, 0, ""}, + {"IfInfomsg", Type, 0, ""}, + {"IfInfomsg.Change", Field, 0, ""}, + {"IfInfomsg.Family", Field, 0, ""}, + {"IfInfomsg.Flags", Field, 0, ""}, + {"IfInfomsg.Index", Field, 0, ""}, + {"IfInfomsg.Type", Field, 0, ""}, + {"IfInfomsg.X__ifi_pad", Field, 0, ""}, + {"IfMsghdr", Type, 0, ""}, + {"IfMsghdr.Addrs", Field, 0, ""}, + {"IfMsghdr.Data", Field, 0, ""}, + {"IfMsghdr.Flags", Field, 0, ""}, + {"IfMsghdr.Hdrlen", Field, 2, ""}, + {"IfMsghdr.Index", Field, 0, ""}, + {"IfMsghdr.Msglen", Field, 0, ""}, + {"IfMsghdr.Pad1", Field, 2, ""}, + {"IfMsghdr.Pad2", Field, 2, ""}, + {"IfMsghdr.Pad_cgo_0", Field, 0, ""}, + {"IfMsghdr.Pad_cgo_1", Field, 2, ""}, + {"IfMsghdr.Tableid", Field, 2, ""}, + {"IfMsghdr.Type", Field, 0, ""}, + {"IfMsghdr.Version", Field, 0, ""}, + {"IfMsghdr.Xflags", Field, 2, ""}, + {"IfaMsghdr", Type, 0, ""}, + {"IfaMsghdr.Addrs", Field, 0, ""}, + {"IfaMsghdr.Flags", Field, 0, ""}, + {"IfaMsghdr.Hdrlen", Field, 2, ""}, + {"IfaMsghdr.Index", Field, 0, ""}, + {"IfaMsghdr.Metric", Field, 0, ""}, + {"IfaMsghdr.Msglen", Field, 0, ""}, + {"IfaMsghdr.Pad1", Field, 2, ""}, + {"IfaMsghdr.Pad2", Field, 2, ""}, + {"IfaMsghdr.Pad_cgo_0", Field, 0, ""}, + {"IfaMsghdr.Tableid", Field, 2, ""}, + {"IfaMsghdr.Type", Field, 0, ""}, + {"IfaMsghdr.Version", Field, 0, ""}, + {"IfmaMsghdr", Type, 0, ""}, + {"IfmaMsghdr.Addrs", Field, 0, ""}, + {"IfmaMsghdr.Flags", Field, 0, ""}, + {"IfmaMsghdr.Index", Field, 0, ""}, + {"IfmaMsghdr.Msglen", Field, 0, ""}, + {"IfmaMsghdr.Pad_cgo_0", Field, 0, ""}, + {"IfmaMsghdr.Type", Field, 0, ""}, + {"IfmaMsghdr.Version", Field, 0, ""}, + {"IfmaMsghdr2", Type, 0, ""}, + {"IfmaMsghdr2.Addrs", Field, 0, ""}, + {"IfmaMsghdr2.Flags", Field, 0, ""}, + {"IfmaMsghdr2.Index", Field, 0, ""}, + {"IfmaMsghdr2.Msglen", Field, 0, ""}, + {"IfmaMsghdr2.Pad_cgo_0", Field, 0, ""}, + {"IfmaMsghdr2.Refcount", Field, 0, ""}, + {"IfmaMsghdr2.Type", Field, 0, ""}, + {"IfmaMsghdr2.Version", Field, 0, ""}, + {"ImplementsGetwd", Const, 0, ""}, + {"Inet4Pktinfo", Type, 0, ""}, + {"Inet4Pktinfo.Addr", Field, 0, ""}, + {"Inet4Pktinfo.Ifindex", Field, 0, ""}, + {"Inet4Pktinfo.Spec_dst", Field, 0, ""}, + {"Inet6Pktinfo", Type, 0, ""}, + {"Inet6Pktinfo.Addr", Field, 0, ""}, + {"Inet6Pktinfo.Ifindex", Field, 0, ""}, + {"InotifyAddWatch", Func, 0, "func(fd int, pathname string, mask uint32) (watchdesc int, err error)"}, + {"InotifyEvent", Type, 0, ""}, + {"InotifyEvent.Cookie", Field, 0, ""}, + {"InotifyEvent.Len", Field, 0, ""}, + {"InotifyEvent.Mask", Field, 0, ""}, + {"InotifyEvent.Name", Field, 0, ""}, + {"InotifyEvent.Wd", Field, 0, ""}, + {"InotifyInit", Func, 0, "func() (fd int, err error)"}, + {"InotifyInit1", Func, 0, "func(flags int) (fd int, err error)"}, + {"InotifyRmWatch", Func, 0, "func(fd int, watchdesc uint32) (success int, err error)"}, + {"InterfaceAddrMessage", Type, 0, ""}, + {"InterfaceAddrMessage.Data", Field, 0, ""}, + {"InterfaceAddrMessage.Header", Field, 0, ""}, + {"InterfaceAnnounceMessage", Type, 1, ""}, + {"InterfaceAnnounceMessage.Header", Field, 1, ""}, + {"InterfaceInfo", Type, 0, ""}, + {"InterfaceInfo.Address", Field, 0, ""}, + {"InterfaceInfo.BroadcastAddress", Field, 0, ""}, + {"InterfaceInfo.Flags", Field, 0, ""}, + {"InterfaceInfo.Netmask", Field, 0, ""}, + {"InterfaceMessage", Type, 0, ""}, + {"InterfaceMessage.Data", Field, 0, ""}, + {"InterfaceMessage.Header", Field, 0, ""}, + {"InterfaceMulticastAddrMessage", Type, 0, ""}, + {"InterfaceMulticastAddrMessage.Data", Field, 0, ""}, + {"InterfaceMulticastAddrMessage.Header", Field, 0, ""}, + {"InvalidHandle", Const, 0, ""}, + {"Ioperm", Func, 0, "func(from int, num int, on int) (err error)"}, + {"Iopl", Func, 0, "func(level int) (err error)"}, + {"Iovec", Type, 0, ""}, + {"Iovec.Base", Field, 0, ""}, + {"Iovec.Len", Field, 0, ""}, + {"IpAdapterInfo", Type, 0, ""}, + {"IpAdapterInfo.AdapterName", Field, 0, ""}, + {"IpAdapterInfo.Address", Field, 0, ""}, + {"IpAdapterInfo.AddressLength", Field, 0, ""}, + {"IpAdapterInfo.ComboIndex", Field, 0, ""}, + {"IpAdapterInfo.CurrentIpAddress", Field, 0, ""}, + {"IpAdapterInfo.Description", Field, 0, ""}, + {"IpAdapterInfo.DhcpEnabled", Field, 0, ""}, + {"IpAdapterInfo.DhcpServer", Field, 0, ""}, + {"IpAdapterInfo.GatewayList", Field, 0, ""}, + {"IpAdapterInfo.HaveWins", Field, 0, ""}, + {"IpAdapterInfo.Index", Field, 0, ""}, + {"IpAdapterInfo.IpAddressList", Field, 0, ""}, + {"IpAdapterInfo.LeaseExpires", Field, 0, ""}, + {"IpAdapterInfo.LeaseObtained", Field, 0, ""}, + {"IpAdapterInfo.Next", Field, 0, ""}, + {"IpAdapterInfo.PrimaryWinsServer", Field, 0, ""}, + {"IpAdapterInfo.SecondaryWinsServer", Field, 0, ""}, + {"IpAdapterInfo.Type", Field, 0, ""}, + {"IpAddrString", Type, 0, ""}, + {"IpAddrString.Context", Field, 0, ""}, + {"IpAddrString.IpAddress", Field, 0, ""}, + {"IpAddrString.IpMask", Field, 0, ""}, + {"IpAddrString.Next", Field, 0, ""}, + {"IpAddressString", Type, 0, ""}, + {"IpAddressString.String", Field, 0, ""}, + {"IpMaskString", Type, 0, ""}, + {"IpMaskString.String", Field, 2, ""}, + {"Issetugid", Func, 0, ""}, + {"KEY_ALL_ACCESS", Const, 0, ""}, + {"KEY_CREATE_LINK", Const, 0, ""}, + {"KEY_CREATE_SUB_KEY", Const, 0, ""}, + {"KEY_ENUMERATE_SUB_KEYS", Const, 0, ""}, + {"KEY_EXECUTE", Const, 0, ""}, + {"KEY_NOTIFY", Const, 0, ""}, + {"KEY_QUERY_VALUE", Const, 0, ""}, + {"KEY_READ", Const, 0, ""}, + {"KEY_SET_VALUE", Const, 0, ""}, + {"KEY_WOW64_32KEY", Const, 0, ""}, + {"KEY_WOW64_64KEY", Const, 0, ""}, + {"KEY_WRITE", Const, 0, ""}, + {"Kevent", Func, 0, ""}, + {"Kevent_t", Type, 0, ""}, + {"Kevent_t.Data", Field, 0, ""}, + {"Kevent_t.Fflags", Field, 0, ""}, + {"Kevent_t.Filter", Field, 0, ""}, + {"Kevent_t.Flags", Field, 0, ""}, + {"Kevent_t.Ident", Field, 0, ""}, + {"Kevent_t.Pad_cgo_0", Field, 2, ""}, + {"Kevent_t.Udata", Field, 0, ""}, + {"Kill", Func, 0, "func(pid int, sig Signal) (err error)"}, + {"Klogctl", Func, 0, "func(typ int, buf []byte) (n int, err error)"}, + {"Kqueue", Func, 0, ""}, + {"LANG_ENGLISH", Const, 0, ""}, + {"LAYERED_PROTOCOL", Const, 2, ""}, + {"LCNT_OVERLOAD_FLUSH", Const, 1, ""}, + {"LINUX_REBOOT_CMD_CAD_OFF", Const, 0, ""}, + {"LINUX_REBOOT_CMD_CAD_ON", Const, 0, ""}, + {"LINUX_REBOOT_CMD_HALT", Const, 0, ""}, + {"LINUX_REBOOT_CMD_KEXEC", Const, 0, ""}, + {"LINUX_REBOOT_CMD_POWER_OFF", Const, 0, ""}, + {"LINUX_REBOOT_CMD_RESTART", Const, 0, ""}, + {"LINUX_REBOOT_CMD_RESTART2", Const, 0, ""}, + {"LINUX_REBOOT_CMD_SW_SUSPEND", Const, 0, ""}, + {"LINUX_REBOOT_MAGIC1", Const, 0, ""}, + {"LINUX_REBOOT_MAGIC2", Const, 0, ""}, + {"LOCK_EX", Const, 0, ""}, + {"LOCK_NB", Const, 0, ""}, + {"LOCK_SH", Const, 0, ""}, + {"LOCK_UN", Const, 0, ""}, + {"LazyDLL", Type, 0, ""}, + {"LazyDLL.Name", Field, 0, ""}, + {"LazyProc", Type, 0, ""}, + {"LazyProc.Name", Field, 0, ""}, + {"Lchown", Func, 0, "func(path string, uid int, gid int) (err error)"}, + {"Linger", Type, 0, ""}, + {"Linger.Linger", Field, 0, ""}, + {"Linger.Onoff", Field, 0, ""}, + {"Link", Func, 0, "func(oldpath string, newpath string) (err error)"}, + {"Listen", Func, 0, "func(s int, n int) (err error)"}, + {"Listxattr", Func, 1, "func(path string, dest []byte) (sz int, err error)"}, + {"LoadCancelIoEx", Func, 1, ""}, + {"LoadConnectEx", Func, 1, ""}, + {"LoadCreateSymbolicLink", Func, 4, ""}, + {"LoadDLL", Func, 0, ""}, + {"LoadGetAddrInfo", Func, 1, ""}, + {"LoadLibrary", Func, 0, ""}, + {"LoadSetFileCompletionNotificationModes", Func, 2, ""}, + {"LocalFree", Func, 0, ""}, + {"Log2phys_t", Type, 0, ""}, + {"Log2phys_t.Contigbytes", Field, 0, ""}, + {"Log2phys_t.Devoffset", Field, 0, ""}, + {"Log2phys_t.Flags", Field, 0, ""}, + {"LookupAccountName", Func, 0, ""}, + {"LookupAccountSid", Func, 0, ""}, + {"LookupSID", Func, 0, ""}, + {"LsfJump", Func, 0, "func(code int, k int, jt int, jf int) *SockFilter"}, + {"LsfSocket", Func, 0, "func(ifindex int, proto int) (int, error)"}, + {"LsfStmt", Func, 0, "func(code int, k int) *SockFilter"}, + {"Lstat", Func, 0, "func(path string, stat *Stat_t) (err error)"}, + {"MADV_AUTOSYNC", Const, 1, ""}, + {"MADV_CAN_REUSE", Const, 0, ""}, + {"MADV_CORE", Const, 1, ""}, + {"MADV_DOFORK", Const, 0, ""}, + {"MADV_DONTFORK", Const, 0, ""}, + {"MADV_DONTNEED", Const, 0, ""}, + {"MADV_FREE", Const, 0, ""}, + {"MADV_FREE_REUSABLE", Const, 0, ""}, + {"MADV_FREE_REUSE", Const, 0, ""}, + {"MADV_HUGEPAGE", Const, 0, ""}, + {"MADV_HWPOISON", Const, 0, ""}, + {"MADV_MERGEABLE", Const, 0, ""}, + {"MADV_NOCORE", Const, 1, ""}, + {"MADV_NOHUGEPAGE", Const, 0, ""}, + {"MADV_NORMAL", Const, 0, ""}, + {"MADV_NOSYNC", Const, 1, ""}, + {"MADV_PROTECT", Const, 1, ""}, + {"MADV_RANDOM", Const, 0, ""}, + {"MADV_REMOVE", Const, 0, ""}, + {"MADV_SEQUENTIAL", Const, 0, ""}, + {"MADV_SPACEAVAIL", Const, 3, ""}, + {"MADV_UNMERGEABLE", Const, 0, ""}, + {"MADV_WILLNEED", Const, 0, ""}, + {"MADV_ZERO_WIRED_PAGES", Const, 0, ""}, + {"MAP_32BIT", Const, 0, ""}, + {"MAP_ALIGNED_SUPER", Const, 3, ""}, + {"MAP_ALIGNMENT_16MB", Const, 3, ""}, + {"MAP_ALIGNMENT_1TB", Const, 3, ""}, + {"MAP_ALIGNMENT_256TB", Const, 3, ""}, + {"MAP_ALIGNMENT_4GB", Const, 3, ""}, + {"MAP_ALIGNMENT_64KB", Const, 3, ""}, + {"MAP_ALIGNMENT_64PB", Const, 3, ""}, + {"MAP_ALIGNMENT_MASK", Const, 3, ""}, + {"MAP_ALIGNMENT_SHIFT", Const, 3, ""}, + {"MAP_ANON", Const, 0, ""}, + {"MAP_ANONYMOUS", Const, 0, ""}, + {"MAP_COPY", Const, 0, ""}, + {"MAP_DENYWRITE", Const, 0, ""}, + {"MAP_EXECUTABLE", Const, 0, ""}, + {"MAP_FILE", Const, 0, ""}, + {"MAP_FIXED", Const, 0, ""}, + {"MAP_FLAGMASK", Const, 3, ""}, + {"MAP_GROWSDOWN", Const, 0, ""}, + {"MAP_HASSEMAPHORE", Const, 0, ""}, + {"MAP_HUGETLB", Const, 0, ""}, + {"MAP_INHERIT", Const, 3, ""}, + {"MAP_INHERIT_COPY", Const, 3, ""}, + {"MAP_INHERIT_DEFAULT", Const, 3, ""}, + {"MAP_INHERIT_DONATE_COPY", Const, 3, ""}, + {"MAP_INHERIT_NONE", Const, 3, ""}, + {"MAP_INHERIT_SHARE", Const, 3, ""}, + {"MAP_JIT", Const, 0, ""}, + {"MAP_LOCKED", Const, 0, ""}, + {"MAP_NOCACHE", Const, 0, ""}, + {"MAP_NOCORE", Const, 1, ""}, + {"MAP_NOEXTEND", Const, 0, ""}, + {"MAP_NONBLOCK", Const, 0, ""}, + {"MAP_NORESERVE", Const, 0, ""}, + {"MAP_NOSYNC", Const, 1, ""}, + {"MAP_POPULATE", Const, 0, ""}, + {"MAP_PREFAULT_READ", Const, 1, ""}, + {"MAP_PRIVATE", Const, 0, ""}, + {"MAP_RENAME", Const, 0, ""}, + {"MAP_RESERVED0080", Const, 0, ""}, + {"MAP_RESERVED0100", Const, 1, ""}, + {"MAP_SHARED", Const, 0, ""}, + {"MAP_STACK", Const, 0, ""}, + {"MAP_TRYFIXED", Const, 3, ""}, + {"MAP_TYPE", Const, 0, ""}, + {"MAP_WIRED", Const, 3, ""}, + {"MAXIMUM_REPARSE_DATA_BUFFER_SIZE", Const, 4, ""}, + {"MAXLEN_IFDESCR", Const, 0, ""}, + {"MAXLEN_PHYSADDR", Const, 0, ""}, + {"MAX_ADAPTER_ADDRESS_LENGTH", Const, 0, ""}, + {"MAX_ADAPTER_DESCRIPTION_LENGTH", Const, 0, ""}, + {"MAX_ADAPTER_NAME_LENGTH", Const, 0, ""}, + {"MAX_COMPUTERNAME_LENGTH", Const, 0, ""}, + {"MAX_INTERFACE_NAME_LEN", Const, 0, ""}, + {"MAX_LONG_PATH", Const, 0, ""}, + {"MAX_PATH", Const, 0, ""}, + {"MAX_PROTOCOL_CHAIN", Const, 2, ""}, + {"MCL_CURRENT", Const, 0, ""}, + {"MCL_FUTURE", Const, 0, ""}, + {"MNT_DETACH", Const, 0, ""}, + {"MNT_EXPIRE", Const, 0, ""}, + {"MNT_FORCE", Const, 0, ""}, + {"MSG_BCAST", Const, 1, ""}, + {"MSG_CMSG_CLOEXEC", Const, 0, ""}, + {"MSG_COMPAT", Const, 0, ""}, + {"MSG_CONFIRM", Const, 0, ""}, + {"MSG_CONTROLMBUF", Const, 1, ""}, + {"MSG_CTRUNC", Const, 0, ""}, + {"MSG_DONTROUTE", Const, 0, ""}, + {"MSG_DONTWAIT", Const, 0, ""}, + {"MSG_EOF", Const, 0, ""}, + {"MSG_EOR", Const, 0, ""}, + {"MSG_ERRQUEUE", Const, 0, ""}, + {"MSG_FASTOPEN", Const, 1, ""}, + {"MSG_FIN", Const, 0, ""}, + {"MSG_FLUSH", Const, 0, ""}, + {"MSG_HAVEMORE", Const, 0, ""}, + {"MSG_HOLD", Const, 0, ""}, + {"MSG_IOVUSRSPACE", Const, 1, ""}, + {"MSG_LENUSRSPACE", Const, 1, ""}, + {"MSG_MCAST", Const, 1, ""}, + {"MSG_MORE", Const, 0, ""}, + {"MSG_NAMEMBUF", Const, 1, ""}, + {"MSG_NBIO", Const, 0, ""}, + {"MSG_NEEDSA", Const, 0, ""}, + {"MSG_NOSIGNAL", Const, 0, ""}, + {"MSG_NOTIFICATION", Const, 0, ""}, + {"MSG_OOB", Const, 0, ""}, + {"MSG_PEEK", Const, 0, ""}, + {"MSG_PROXY", Const, 0, ""}, + {"MSG_RCVMORE", Const, 0, ""}, + {"MSG_RST", Const, 0, ""}, + {"MSG_SEND", Const, 0, ""}, + {"MSG_SYN", Const, 0, ""}, + {"MSG_TRUNC", Const, 0, ""}, + {"MSG_TRYHARD", Const, 0, ""}, + {"MSG_USERFLAGS", Const, 1, ""}, + {"MSG_WAITALL", Const, 0, ""}, + {"MSG_WAITFORONE", Const, 0, ""}, + {"MSG_WAITSTREAM", Const, 0, ""}, + {"MS_ACTIVE", Const, 0, ""}, + {"MS_ASYNC", Const, 0, ""}, + {"MS_BIND", Const, 0, ""}, + {"MS_DEACTIVATE", Const, 0, ""}, + {"MS_DIRSYNC", Const, 0, ""}, + {"MS_INVALIDATE", Const, 0, ""}, + {"MS_I_VERSION", Const, 0, ""}, + {"MS_KERNMOUNT", Const, 0, ""}, + {"MS_KILLPAGES", Const, 0, ""}, + {"MS_MANDLOCK", Const, 0, ""}, + {"MS_MGC_MSK", Const, 0, ""}, + {"MS_MGC_VAL", Const, 0, ""}, + {"MS_MOVE", Const, 0, ""}, + {"MS_NOATIME", Const, 0, ""}, + {"MS_NODEV", Const, 0, ""}, + {"MS_NODIRATIME", Const, 0, ""}, + {"MS_NOEXEC", Const, 0, ""}, + {"MS_NOSUID", Const, 0, ""}, + {"MS_NOUSER", Const, 0, ""}, + {"MS_POSIXACL", Const, 0, ""}, + {"MS_PRIVATE", Const, 0, ""}, + {"MS_RDONLY", Const, 0, ""}, + {"MS_REC", Const, 0, ""}, + {"MS_RELATIME", Const, 0, ""}, + {"MS_REMOUNT", Const, 0, ""}, + {"MS_RMT_MASK", Const, 0, ""}, + {"MS_SHARED", Const, 0, ""}, + {"MS_SILENT", Const, 0, ""}, + {"MS_SLAVE", Const, 0, ""}, + {"MS_STRICTATIME", Const, 0, ""}, + {"MS_SYNC", Const, 0, ""}, + {"MS_SYNCHRONOUS", Const, 0, ""}, + {"MS_UNBINDABLE", Const, 0, ""}, + {"Madvise", Func, 0, "func(b []byte, advice int) (err error)"}, + {"MapViewOfFile", Func, 0, ""}, + {"MaxTokenInfoClass", Const, 0, ""}, + {"Mclpool", Type, 2, ""}, + {"Mclpool.Alive", Field, 2, ""}, + {"Mclpool.Cwm", Field, 2, ""}, + {"Mclpool.Grown", Field, 2, ""}, + {"Mclpool.Hwm", Field, 2, ""}, + {"Mclpool.Lwm", Field, 2, ""}, + {"MibIfRow", Type, 0, ""}, + {"MibIfRow.AdminStatus", Field, 0, ""}, + {"MibIfRow.Descr", Field, 0, ""}, + {"MibIfRow.DescrLen", Field, 0, ""}, + {"MibIfRow.InDiscards", Field, 0, ""}, + {"MibIfRow.InErrors", Field, 0, ""}, + {"MibIfRow.InNUcastPkts", Field, 0, ""}, + {"MibIfRow.InOctets", Field, 0, ""}, + {"MibIfRow.InUcastPkts", Field, 0, ""}, + {"MibIfRow.InUnknownProtos", Field, 0, ""}, + {"MibIfRow.Index", Field, 0, ""}, + {"MibIfRow.LastChange", Field, 0, ""}, + {"MibIfRow.Mtu", Field, 0, ""}, + {"MibIfRow.Name", Field, 0, ""}, + {"MibIfRow.OperStatus", Field, 0, ""}, + {"MibIfRow.OutDiscards", Field, 0, ""}, + {"MibIfRow.OutErrors", Field, 0, ""}, + {"MibIfRow.OutNUcastPkts", Field, 0, ""}, + {"MibIfRow.OutOctets", Field, 0, ""}, + {"MibIfRow.OutQLen", Field, 0, ""}, + {"MibIfRow.OutUcastPkts", Field, 0, ""}, + {"MibIfRow.PhysAddr", Field, 0, ""}, + {"MibIfRow.PhysAddrLen", Field, 0, ""}, + {"MibIfRow.Speed", Field, 0, ""}, + {"MibIfRow.Type", Field, 0, ""}, + {"Mkdir", Func, 0, "func(path string, mode uint32) (err error)"}, + {"Mkdirat", Func, 0, "func(dirfd int, path string, mode uint32) (err error)"}, + {"Mkfifo", Func, 0, "func(path string, mode uint32) (err error)"}, + {"Mknod", Func, 0, "func(path string, mode uint32, dev int) (err error)"}, + {"Mknodat", Func, 0, "func(dirfd int, path string, mode uint32, dev int) (err error)"}, + {"Mlock", Func, 0, "func(b []byte) (err error)"}, + {"Mlockall", Func, 0, "func(flags int) (err error)"}, + {"Mmap", Func, 0, "func(fd int, offset int64, length int, prot int, flags int) (data []byte, err error)"}, + {"Mount", Func, 0, "func(source string, target string, fstype string, flags uintptr, data string) (err error)"}, + {"MoveFile", Func, 0, ""}, + {"Mprotect", Func, 0, "func(b []byte, prot int) (err error)"}, + {"Msghdr", Type, 0, ""}, + {"Msghdr.Control", Field, 0, ""}, + {"Msghdr.Controllen", Field, 0, ""}, + {"Msghdr.Flags", Field, 0, ""}, + {"Msghdr.Iov", Field, 0, ""}, + {"Msghdr.Iovlen", Field, 0, ""}, + {"Msghdr.Name", Field, 0, ""}, + {"Msghdr.Namelen", Field, 0, ""}, + {"Msghdr.Pad_cgo_0", Field, 0, ""}, + {"Msghdr.Pad_cgo_1", Field, 0, ""}, + {"Munlock", Func, 0, "func(b []byte) (err error)"}, + {"Munlockall", Func, 0, "func() (err error)"}, + {"Munmap", Func, 0, "func(b []byte) (err error)"}, + {"MustLoadDLL", Func, 0, ""}, + {"NAME_MAX", Const, 0, ""}, + {"NETLINK_ADD_MEMBERSHIP", Const, 0, ""}, + {"NETLINK_AUDIT", Const, 0, ""}, + {"NETLINK_BROADCAST_ERROR", Const, 0, ""}, + {"NETLINK_CONNECTOR", Const, 0, ""}, + {"NETLINK_DNRTMSG", Const, 0, ""}, + {"NETLINK_DROP_MEMBERSHIP", Const, 0, ""}, + {"NETLINK_ECRYPTFS", Const, 0, ""}, + {"NETLINK_FIB_LOOKUP", Const, 0, ""}, + {"NETLINK_FIREWALL", Const, 0, ""}, + {"NETLINK_GENERIC", Const, 0, ""}, + {"NETLINK_INET_DIAG", Const, 0, ""}, + {"NETLINK_IP6_FW", Const, 0, ""}, + {"NETLINK_ISCSI", Const, 0, ""}, + {"NETLINK_KOBJECT_UEVENT", Const, 0, ""}, + {"NETLINK_NETFILTER", Const, 0, ""}, + {"NETLINK_NFLOG", Const, 0, ""}, + {"NETLINK_NO_ENOBUFS", Const, 0, ""}, + {"NETLINK_PKTINFO", Const, 0, ""}, + {"NETLINK_RDMA", Const, 0, ""}, + {"NETLINK_ROUTE", Const, 0, ""}, + {"NETLINK_SCSITRANSPORT", Const, 0, ""}, + {"NETLINK_SELINUX", Const, 0, ""}, + {"NETLINK_UNUSED", Const, 0, ""}, + {"NETLINK_USERSOCK", Const, 0, ""}, + {"NETLINK_XFRM", Const, 0, ""}, + {"NET_RT_DUMP", Const, 0, ""}, + {"NET_RT_DUMP2", Const, 0, ""}, + {"NET_RT_FLAGS", Const, 0, ""}, + {"NET_RT_IFLIST", Const, 0, ""}, + {"NET_RT_IFLIST2", Const, 0, ""}, + {"NET_RT_IFLISTL", Const, 1, ""}, + {"NET_RT_IFMALIST", Const, 0, ""}, + {"NET_RT_MAXID", Const, 0, ""}, + {"NET_RT_OIFLIST", Const, 1, ""}, + {"NET_RT_OOIFLIST", Const, 1, ""}, + {"NET_RT_STAT", Const, 0, ""}, + {"NET_RT_STATS", Const, 1, ""}, + {"NET_RT_TABLE", Const, 1, ""}, + {"NET_RT_TRASH", Const, 0, ""}, + {"NLA_ALIGNTO", Const, 0, ""}, + {"NLA_F_NESTED", Const, 0, ""}, + {"NLA_F_NET_BYTEORDER", Const, 0, ""}, + {"NLA_HDRLEN", Const, 0, ""}, + {"NLMSG_ALIGNTO", Const, 0, ""}, + {"NLMSG_DONE", Const, 0, ""}, + {"NLMSG_ERROR", Const, 0, ""}, + {"NLMSG_HDRLEN", Const, 0, ""}, + {"NLMSG_MIN_TYPE", Const, 0, ""}, + {"NLMSG_NOOP", Const, 0, ""}, + {"NLMSG_OVERRUN", Const, 0, ""}, + {"NLM_F_ACK", Const, 0, ""}, + {"NLM_F_APPEND", Const, 0, ""}, + {"NLM_F_ATOMIC", Const, 0, ""}, + {"NLM_F_CREATE", Const, 0, ""}, + {"NLM_F_DUMP", Const, 0, ""}, + {"NLM_F_ECHO", Const, 0, ""}, + {"NLM_F_EXCL", Const, 0, ""}, + {"NLM_F_MATCH", Const, 0, ""}, + {"NLM_F_MULTI", Const, 0, ""}, + {"NLM_F_REPLACE", Const, 0, ""}, + {"NLM_F_REQUEST", Const, 0, ""}, + {"NLM_F_ROOT", Const, 0, ""}, + {"NOFLSH", Const, 0, ""}, + {"NOTE_ABSOLUTE", Const, 0, ""}, + {"NOTE_ATTRIB", Const, 0, ""}, + {"NOTE_BACKGROUND", Const, 16, ""}, + {"NOTE_CHILD", Const, 0, ""}, + {"NOTE_CRITICAL", Const, 16, ""}, + {"NOTE_DELETE", Const, 0, ""}, + {"NOTE_EOF", Const, 1, ""}, + {"NOTE_EXEC", Const, 0, ""}, + {"NOTE_EXIT", Const, 0, ""}, + {"NOTE_EXITSTATUS", Const, 0, ""}, + {"NOTE_EXIT_CSERROR", Const, 16, ""}, + {"NOTE_EXIT_DECRYPTFAIL", Const, 16, ""}, + {"NOTE_EXIT_DETAIL", Const, 16, ""}, + {"NOTE_EXIT_DETAIL_MASK", Const, 16, ""}, + {"NOTE_EXIT_MEMORY", Const, 16, ""}, + {"NOTE_EXIT_REPARENTED", Const, 16, ""}, + {"NOTE_EXTEND", Const, 0, ""}, + {"NOTE_FFAND", Const, 0, ""}, + {"NOTE_FFCOPY", Const, 0, ""}, + {"NOTE_FFCTRLMASK", Const, 0, ""}, + {"NOTE_FFLAGSMASK", Const, 0, ""}, + {"NOTE_FFNOP", Const, 0, ""}, + {"NOTE_FFOR", Const, 0, ""}, + {"NOTE_FORK", Const, 0, ""}, + {"NOTE_LEEWAY", Const, 16, ""}, + {"NOTE_LINK", Const, 0, ""}, + {"NOTE_LOWAT", Const, 0, ""}, + {"NOTE_NONE", Const, 0, ""}, + {"NOTE_NSECONDS", Const, 0, ""}, + {"NOTE_PCTRLMASK", Const, 0, ""}, + {"NOTE_PDATAMASK", Const, 0, ""}, + {"NOTE_REAP", Const, 0, ""}, + {"NOTE_RENAME", Const, 0, ""}, + {"NOTE_RESOURCEEND", Const, 0, ""}, + {"NOTE_REVOKE", Const, 0, ""}, + {"NOTE_SECONDS", Const, 0, ""}, + {"NOTE_SIGNAL", Const, 0, ""}, + {"NOTE_TRACK", Const, 0, ""}, + {"NOTE_TRACKERR", Const, 0, ""}, + {"NOTE_TRIGGER", Const, 0, ""}, + {"NOTE_TRUNCATE", Const, 1, ""}, + {"NOTE_USECONDS", Const, 0, ""}, + {"NOTE_VM_ERROR", Const, 0, ""}, + {"NOTE_VM_PRESSURE", Const, 0, ""}, + {"NOTE_VM_PRESSURE_SUDDEN_TERMINATE", Const, 0, ""}, + {"NOTE_VM_PRESSURE_TERMINATE", Const, 0, ""}, + {"NOTE_WRITE", Const, 0, ""}, + {"NameCanonical", Const, 0, ""}, + {"NameCanonicalEx", Const, 0, ""}, + {"NameDisplay", Const, 0, ""}, + {"NameDnsDomain", Const, 0, ""}, + {"NameFullyQualifiedDN", Const, 0, ""}, + {"NameSamCompatible", Const, 0, ""}, + {"NameServicePrincipal", Const, 0, ""}, + {"NameUniqueId", Const, 0, ""}, + {"NameUnknown", Const, 0, ""}, + {"NameUserPrincipal", Const, 0, ""}, + {"Nanosleep", Func, 0, "func(time *Timespec, leftover *Timespec) (err error)"}, + {"NetApiBufferFree", Func, 0, ""}, + {"NetGetJoinInformation", Func, 2, ""}, + {"NetSetupDomainName", Const, 2, ""}, + {"NetSetupUnjoined", Const, 2, ""}, + {"NetSetupUnknownStatus", Const, 2, ""}, + {"NetSetupWorkgroupName", Const, 2, ""}, + {"NetUserGetInfo", Func, 0, ""}, + {"NetlinkMessage", Type, 0, ""}, + {"NetlinkMessage.Data", Field, 0, ""}, + {"NetlinkMessage.Header", Field, 0, ""}, + {"NetlinkRIB", Func, 0, "func(proto int, family int) ([]byte, error)"}, + {"NetlinkRouteAttr", Type, 0, ""}, + {"NetlinkRouteAttr.Attr", Field, 0, ""}, + {"NetlinkRouteAttr.Value", Field, 0, ""}, + {"NetlinkRouteRequest", Type, 0, ""}, + {"NetlinkRouteRequest.Data", Field, 0, ""}, + {"NetlinkRouteRequest.Header", Field, 0, ""}, + {"NewCallback", Func, 0, ""}, + {"NewCallbackCDecl", Func, 3, ""}, + {"NewLazyDLL", Func, 0, ""}, + {"NlAttr", Type, 0, ""}, + {"NlAttr.Len", Field, 0, ""}, + {"NlAttr.Type", Field, 0, ""}, + {"NlMsgerr", Type, 0, ""}, + {"NlMsgerr.Error", Field, 0, ""}, + {"NlMsgerr.Msg", Field, 0, ""}, + {"NlMsghdr", Type, 0, ""}, + {"NlMsghdr.Flags", Field, 0, ""}, + {"NlMsghdr.Len", Field, 0, ""}, + {"NlMsghdr.Pid", Field, 0, ""}, + {"NlMsghdr.Seq", Field, 0, ""}, + {"NlMsghdr.Type", Field, 0, ""}, + {"NsecToFiletime", Func, 0, ""}, + {"NsecToTimespec", Func, 0, "func(nsec int64) Timespec"}, + {"NsecToTimeval", Func, 0, "func(nsec int64) Timeval"}, + {"Ntohs", Func, 0, ""}, + {"OCRNL", Const, 0, ""}, + {"OFDEL", Const, 0, ""}, + {"OFILL", Const, 0, ""}, + {"OFIOGETBMAP", Const, 1, ""}, + {"OID_PKIX_KP_SERVER_AUTH", Var, 0, ""}, + {"OID_SERVER_GATED_CRYPTO", Var, 0, ""}, + {"OID_SGC_NETSCAPE", Var, 0, ""}, + {"OLCUC", Const, 0, ""}, + {"ONLCR", Const, 0, ""}, + {"ONLRET", Const, 0, ""}, + {"ONOCR", Const, 0, ""}, + {"ONOEOT", Const, 1, ""}, + {"OPEN_ALWAYS", Const, 0, ""}, + {"OPEN_EXISTING", Const, 0, ""}, + {"OPOST", Const, 0, ""}, + {"O_ACCMODE", Const, 0, ""}, + {"O_ALERT", Const, 0, ""}, + {"O_ALT_IO", Const, 1, ""}, + {"O_APPEND", Const, 0, ""}, + {"O_ASYNC", Const, 0, ""}, + {"O_CLOEXEC", Const, 0, ""}, + {"O_CREAT", Const, 0, ""}, + {"O_DIRECT", Const, 0, ""}, + {"O_DIRECTORY", Const, 0, ""}, + {"O_DP_GETRAWENCRYPTED", Const, 16, ""}, + {"O_DSYNC", Const, 0, ""}, + {"O_EVTONLY", Const, 0, ""}, + {"O_EXCL", Const, 0, ""}, + {"O_EXEC", Const, 0, ""}, + {"O_EXLOCK", Const, 0, ""}, + {"O_FSYNC", Const, 0, ""}, + {"O_LARGEFILE", Const, 0, ""}, + {"O_NDELAY", Const, 0, ""}, + {"O_NOATIME", Const, 0, ""}, + {"O_NOCTTY", Const, 0, ""}, + {"O_NOFOLLOW", Const, 0, ""}, + {"O_NONBLOCK", Const, 0, ""}, + {"O_NOSIGPIPE", Const, 1, ""}, + {"O_POPUP", Const, 0, ""}, + {"O_RDONLY", Const, 0, ""}, + {"O_RDWR", Const, 0, ""}, + {"O_RSYNC", Const, 0, ""}, + {"O_SHLOCK", Const, 0, ""}, + {"O_SYMLINK", Const, 0, ""}, + {"O_SYNC", Const, 0, ""}, + {"O_TRUNC", Const, 0, ""}, + {"O_TTY_INIT", Const, 0, ""}, + {"O_WRONLY", Const, 0, ""}, + {"Open", Func, 0, "func(path string, mode int, perm uint32) (fd int, err error)"}, + {"OpenCurrentProcessToken", Func, 0, ""}, + {"OpenProcess", Func, 0, ""}, + {"OpenProcessToken", Func, 0, ""}, + {"Openat", Func, 0, "func(dirfd int, path string, flags int, mode uint32) (fd int, err error)"}, + {"Overlapped", Type, 0, ""}, + {"Overlapped.HEvent", Field, 0, ""}, + {"Overlapped.Internal", Field, 0, ""}, + {"Overlapped.InternalHigh", Field, 0, ""}, + {"Overlapped.Offset", Field, 0, ""}, + {"Overlapped.OffsetHigh", Field, 0, ""}, + {"PACKET_ADD_MEMBERSHIP", Const, 0, ""}, + {"PACKET_BROADCAST", Const, 0, ""}, + {"PACKET_DROP_MEMBERSHIP", Const, 0, ""}, + {"PACKET_FASTROUTE", Const, 0, ""}, + {"PACKET_HOST", Const, 0, ""}, + {"PACKET_LOOPBACK", Const, 0, ""}, + {"PACKET_MR_ALLMULTI", Const, 0, ""}, + {"PACKET_MR_MULTICAST", Const, 0, ""}, + {"PACKET_MR_PROMISC", Const, 0, ""}, + {"PACKET_MULTICAST", Const, 0, ""}, + {"PACKET_OTHERHOST", Const, 0, ""}, + {"PACKET_OUTGOING", Const, 0, ""}, + {"PACKET_RECV_OUTPUT", Const, 0, ""}, + {"PACKET_RX_RING", Const, 0, ""}, + {"PACKET_STATISTICS", Const, 0, ""}, + {"PAGE_EXECUTE_READ", Const, 0, ""}, + {"PAGE_EXECUTE_READWRITE", Const, 0, ""}, + {"PAGE_EXECUTE_WRITECOPY", Const, 0, ""}, + {"PAGE_READONLY", Const, 0, ""}, + {"PAGE_READWRITE", Const, 0, ""}, + {"PAGE_WRITECOPY", Const, 0, ""}, + {"PARENB", Const, 0, ""}, + {"PARMRK", Const, 0, ""}, + {"PARODD", Const, 0, ""}, + {"PENDIN", Const, 0, ""}, + {"PFL_HIDDEN", Const, 2, ""}, + {"PFL_MATCHES_PROTOCOL_ZERO", Const, 2, ""}, + {"PFL_MULTIPLE_PROTO_ENTRIES", Const, 2, ""}, + {"PFL_NETWORKDIRECT_PROVIDER", Const, 2, ""}, + {"PFL_RECOMMENDED_PROTO_ENTRY", Const, 2, ""}, + {"PF_FLUSH", Const, 1, ""}, + {"PKCS_7_ASN_ENCODING", Const, 0, ""}, + {"PMC5_PIPELINE_FLUSH", Const, 1, ""}, + {"PRIO_PGRP", Const, 2, ""}, + {"PRIO_PROCESS", Const, 2, ""}, + {"PRIO_USER", Const, 2, ""}, + {"PRI_IOFLUSH", Const, 1, ""}, + {"PROCESS_QUERY_INFORMATION", Const, 0, ""}, + {"PROCESS_TERMINATE", Const, 2, ""}, + {"PROT_EXEC", Const, 0, ""}, + {"PROT_GROWSDOWN", Const, 0, ""}, + {"PROT_GROWSUP", Const, 0, ""}, + {"PROT_NONE", Const, 0, ""}, + {"PROT_READ", Const, 0, ""}, + {"PROT_WRITE", Const, 0, ""}, + {"PROV_DH_SCHANNEL", Const, 0, ""}, + {"PROV_DSS", Const, 0, ""}, + {"PROV_DSS_DH", Const, 0, ""}, + {"PROV_EC_ECDSA_FULL", Const, 0, ""}, + {"PROV_EC_ECDSA_SIG", Const, 0, ""}, + {"PROV_EC_ECNRA_FULL", Const, 0, ""}, + {"PROV_EC_ECNRA_SIG", Const, 0, ""}, + {"PROV_FORTEZZA", Const, 0, ""}, + {"PROV_INTEL_SEC", Const, 0, ""}, + {"PROV_MS_EXCHANGE", Const, 0, ""}, + {"PROV_REPLACE_OWF", Const, 0, ""}, + {"PROV_RNG", Const, 0, ""}, + {"PROV_RSA_AES", Const, 0, ""}, + {"PROV_RSA_FULL", Const, 0, ""}, + {"PROV_RSA_SCHANNEL", Const, 0, ""}, + {"PROV_RSA_SIG", Const, 0, ""}, + {"PROV_SPYRUS_LYNKS", Const, 0, ""}, + {"PROV_SSL", Const, 0, ""}, + {"PR_CAPBSET_DROP", Const, 0, ""}, + {"PR_CAPBSET_READ", Const, 0, ""}, + {"PR_CLEAR_SECCOMP_FILTER", Const, 0, ""}, + {"PR_ENDIAN_BIG", Const, 0, ""}, + {"PR_ENDIAN_LITTLE", Const, 0, ""}, + {"PR_ENDIAN_PPC_LITTLE", Const, 0, ""}, + {"PR_FPEMU_NOPRINT", Const, 0, ""}, + {"PR_FPEMU_SIGFPE", Const, 0, ""}, + {"PR_FP_EXC_ASYNC", Const, 0, ""}, + {"PR_FP_EXC_DISABLED", Const, 0, ""}, + {"PR_FP_EXC_DIV", Const, 0, ""}, + {"PR_FP_EXC_INV", Const, 0, ""}, + {"PR_FP_EXC_NONRECOV", Const, 0, ""}, + {"PR_FP_EXC_OVF", Const, 0, ""}, + {"PR_FP_EXC_PRECISE", Const, 0, ""}, + {"PR_FP_EXC_RES", Const, 0, ""}, + {"PR_FP_EXC_SW_ENABLE", Const, 0, ""}, + {"PR_FP_EXC_UND", Const, 0, ""}, + {"PR_GET_DUMPABLE", Const, 0, ""}, + {"PR_GET_ENDIAN", Const, 0, ""}, + {"PR_GET_FPEMU", Const, 0, ""}, + {"PR_GET_FPEXC", Const, 0, ""}, + {"PR_GET_KEEPCAPS", Const, 0, ""}, + {"PR_GET_NAME", Const, 0, ""}, + {"PR_GET_PDEATHSIG", Const, 0, ""}, + {"PR_GET_SECCOMP", Const, 0, ""}, + {"PR_GET_SECCOMP_FILTER", Const, 0, ""}, + {"PR_GET_SECUREBITS", Const, 0, ""}, + {"PR_GET_TIMERSLACK", Const, 0, ""}, + {"PR_GET_TIMING", Const, 0, ""}, + {"PR_GET_TSC", Const, 0, ""}, + {"PR_GET_UNALIGN", Const, 0, ""}, + {"PR_MCE_KILL", Const, 0, ""}, + {"PR_MCE_KILL_CLEAR", Const, 0, ""}, + {"PR_MCE_KILL_DEFAULT", Const, 0, ""}, + {"PR_MCE_KILL_EARLY", Const, 0, ""}, + {"PR_MCE_KILL_GET", Const, 0, ""}, + {"PR_MCE_KILL_LATE", Const, 0, ""}, + {"PR_MCE_KILL_SET", Const, 0, ""}, + {"PR_SECCOMP_FILTER_EVENT", Const, 0, ""}, + {"PR_SECCOMP_FILTER_SYSCALL", Const, 0, ""}, + {"PR_SET_DUMPABLE", Const, 0, ""}, + {"PR_SET_ENDIAN", Const, 0, ""}, + {"PR_SET_FPEMU", Const, 0, ""}, + {"PR_SET_FPEXC", Const, 0, ""}, + {"PR_SET_KEEPCAPS", Const, 0, ""}, + {"PR_SET_NAME", Const, 0, ""}, + {"PR_SET_PDEATHSIG", Const, 0, ""}, + {"PR_SET_PTRACER", Const, 0, ""}, + {"PR_SET_SECCOMP", Const, 0, ""}, + {"PR_SET_SECCOMP_FILTER", Const, 0, ""}, + {"PR_SET_SECUREBITS", Const, 0, ""}, + {"PR_SET_TIMERSLACK", Const, 0, ""}, + {"PR_SET_TIMING", Const, 0, ""}, + {"PR_SET_TSC", Const, 0, ""}, + {"PR_SET_UNALIGN", Const, 0, ""}, + {"PR_TASK_PERF_EVENTS_DISABLE", Const, 0, ""}, + {"PR_TASK_PERF_EVENTS_ENABLE", Const, 0, ""}, + {"PR_TIMING_STATISTICAL", Const, 0, ""}, + {"PR_TIMING_TIMESTAMP", Const, 0, ""}, + {"PR_TSC_ENABLE", Const, 0, ""}, + {"PR_TSC_SIGSEGV", Const, 0, ""}, + {"PR_UNALIGN_NOPRINT", Const, 0, ""}, + {"PR_UNALIGN_SIGBUS", Const, 0, ""}, + {"PTRACE_ARCH_PRCTL", Const, 0, ""}, + {"PTRACE_ATTACH", Const, 0, ""}, + {"PTRACE_CONT", Const, 0, ""}, + {"PTRACE_DETACH", Const, 0, ""}, + {"PTRACE_EVENT_CLONE", Const, 0, ""}, + {"PTRACE_EVENT_EXEC", Const, 0, ""}, + {"PTRACE_EVENT_EXIT", Const, 0, ""}, + {"PTRACE_EVENT_FORK", Const, 0, ""}, + {"PTRACE_EVENT_VFORK", Const, 0, ""}, + {"PTRACE_EVENT_VFORK_DONE", Const, 0, ""}, + {"PTRACE_GETCRUNCHREGS", Const, 0, ""}, + {"PTRACE_GETEVENTMSG", Const, 0, ""}, + {"PTRACE_GETFPREGS", Const, 0, ""}, + {"PTRACE_GETFPXREGS", Const, 0, ""}, + {"PTRACE_GETHBPREGS", Const, 0, ""}, + {"PTRACE_GETREGS", Const, 0, ""}, + {"PTRACE_GETREGSET", Const, 0, ""}, + {"PTRACE_GETSIGINFO", Const, 0, ""}, + {"PTRACE_GETVFPREGS", Const, 0, ""}, + {"PTRACE_GETWMMXREGS", Const, 0, ""}, + {"PTRACE_GET_THREAD_AREA", Const, 0, ""}, + {"PTRACE_KILL", Const, 0, ""}, + {"PTRACE_OLDSETOPTIONS", Const, 0, ""}, + {"PTRACE_O_MASK", Const, 0, ""}, + {"PTRACE_O_TRACECLONE", Const, 0, ""}, + {"PTRACE_O_TRACEEXEC", Const, 0, ""}, + {"PTRACE_O_TRACEEXIT", Const, 0, ""}, + {"PTRACE_O_TRACEFORK", Const, 0, ""}, + {"PTRACE_O_TRACESYSGOOD", Const, 0, ""}, + {"PTRACE_O_TRACEVFORK", Const, 0, ""}, + {"PTRACE_O_TRACEVFORKDONE", Const, 0, ""}, + {"PTRACE_PEEKDATA", Const, 0, ""}, + {"PTRACE_PEEKTEXT", Const, 0, ""}, + {"PTRACE_PEEKUSR", Const, 0, ""}, + {"PTRACE_POKEDATA", Const, 0, ""}, + {"PTRACE_POKETEXT", Const, 0, ""}, + {"PTRACE_POKEUSR", Const, 0, ""}, + {"PTRACE_SETCRUNCHREGS", Const, 0, ""}, + {"PTRACE_SETFPREGS", Const, 0, ""}, + {"PTRACE_SETFPXREGS", Const, 0, ""}, + {"PTRACE_SETHBPREGS", Const, 0, ""}, + {"PTRACE_SETOPTIONS", Const, 0, ""}, + {"PTRACE_SETREGS", Const, 0, ""}, + {"PTRACE_SETREGSET", Const, 0, ""}, + {"PTRACE_SETSIGINFO", Const, 0, ""}, + {"PTRACE_SETVFPREGS", Const, 0, ""}, + {"PTRACE_SETWMMXREGS", Const, 0, ""}, + {"PTRACE_SET_SYSCALL", Const, 0, ""}, + {"PTRACE_SET_THREAD_AREA", Const, 0, ""}, + {"PTRACE_SINGLEBLOCK", Const, 0, ""}, + {"PTRACE_SINGLESTEP", Const, 0, ""}, + {"PTRACE_SYSCALL", Const, 0, ""}, + {"PTRACE_SYSEMU", Const, 0, ""}, + {"PTRACE_SYSEMU_SINGLESTEP", Const, 0, ""}, + {"PTRACE_TRACEME", Const, 0, ""}, + {"PT_ATTACH", Const, 0, ""}, + {"PT_ATTACHEXC", Const, 0, ""}, + {"PT_CONTINUE", Const, 0, ""}, + {"PT_DATA_ADDR", Const, 0, ""}, + {"PT_DENY_ATTACH", Const, 0, ""}, + {"PT_DETACH", Const, 0, ""}, + {"PT_FIRSTMACH", Const, 0, ""}, + {"PT_FORCEQUOTA", Const, 0, ""}, + {"PT_KILL", Const, 0, ""}, + {"PT_MASK", Const, 1, ""}, + {"PT_READ_D", Const, 0, ""}, + {"PT_READ_I", Const, 0, ""}, + {"PT_READ_U", Const, 0, ""}, + {"PT_SIGEXC", Const, 0, ""}, + {"PT_STEP", Const, 0, ""}, + {"PT_TEXT_ADDR", Const, 0, ""}, + {"PT_TEXT_END_ADDR", Const, 0, ""}, + {"PT_THUPDATE", Const, 0, ""}, + {"PT_TRACE_ME", Const, 0, ""}, + {"PT_WRITE_D", Const, 0, ""}, + {"PT_WRITE_I", Const, 0, ""}, + {"PT_WRITE_U", Const, 0, ""}, + {"ParseDirent", Func, 0, "func(buf []byte, max int, names []string) (consumed int, count int, newnames []string)"}, + {"ParseNetlinkMessage", Func, 0, "func(b []byte) ([]NetlinkMessage, error)"}, + {"ParseNetlinkRouteAttr", Func, 0, "func(m *NetlinkMessage) ([]NetlinkRouteAttr, error)"}, + {"ParseRoutingMessage", Func, 0, ""}, + {"ParseRoutingSockaddr", Func, 0, ""}, + {"ParseSocketControlMessage", Func, 0, "func(b []byte) ([]SocketControlMessage, error)"}, + {"ParseUnixCredentials", Func, 0, "func(m *SocketControlMessage) (*Ucred, error)"}, + {"ParseUnixRights", Func, 0, "func(m *SocketControlMessage) ([]int, error)"}, + {"PathMax", Const, 0, ""}, + {"Pathconf", Func, 0, ""}, + {"Pause", Func, 0, "func() (err error)"}, + {"Pipe", Func, 0, "func(p []int) error"}, + {"Pipe2", Func, 1, "func(p []int, flags int) error"}, + {"PivotRoot", Func, 0, "func(newroot string, putold string) (err error)"}, + {"Pointer", Type, 11, ""}, + {"PostQueuedCompletionStatus", Func, 0, ""}, + {"Pread", Func, 0, "func(fd int, p []byte, offset int64) (n int, err error)"}, + {"Proc", Type, 0, ""}, + {"Proc.Dll", Field, 0, ""}, + {"Proc.Name", Field, 0, ""}, + {"ProcAttr", Type, 0, ""}, + {"ProcAttr.Dir", Field, 0, ""}, + {"ProcAttr.Env", Field, 0, ""}, + {"ProcAttr.Files", Field, 0, ""}, + {"ProcAttr.Sys", Field, 0, ""}, + {"Process32First", Func, 4, ""}, + {"Process32Next", Func, 4, ""}, + {"ProcessEntry32", Type, 4, ""}, + {"ProcessEntry32.DefaultHeapID", Field, 4, ""}, + {"ProcessEntry32.ExeFile", Field, 4, ""}, + {"ProcessEntry32.Flags", Field, 4, ""}, + {"ProcessEntry32.ModuleID", Field, 4, ""}, + {"ProcessEntry32.ParentProcessID", Field, 4, ""}, + {"ProcessEntry32.PriClassBase", Field, 4, ""}, + {"ProcessEntry32.ProcessID", Field, 4, ""}, + {"ProcessEntry32.Size", Field, 4, ""}, + {"ProcessEntry32.Threads", Field, 4, ""}, + {"ProcessEntry32.Usage", Field, 4, ""}, + {"ProcessInformation", Type, 0, ""}, + {"ProcessInformation.Process", Field, 0, ""}, + {"ProcessInformation.ProcessId", Field, 0, ""}, + {"ProcessInformation.Thread", Field, 0, ""}, + {"ProcessInformation.ThreadId", Field, 0, ""}, + {"Protoent", Type, 0, ""}, + {"Protoent.Aliases", Field, 0, ""}, + {"Protoent.Name", Field, 0, ""}, + {"Protoent.Proto", Field, 0, ""}, + {"PtraceAttach", Func, 0, "func(pid int) (err error)"}, + {"PtraceCont", Func, 0, "func(pid int, signal int) (err error)"}, + {"PtraceDetach", Func, 0, "func(pid int) (err error)"}, + {"PtraceGetEventMsg", Func, 0, "func(pid int) (msg uint, err error)"}, + {"PtraceGetRegs", Func, 0, "func(pid int, regsout *PtraceRegs) (err error)"}, + {"PtracePeekData", Func, 0, "func(pid int, addr uintptr, out []byte) (count int, err error)"}, + {"PtracePeekText", Func, 0, "func(pid int, addr uintptr, out []byte) (count int, err error)"}, + {"PtracePokeData", Func, 0, "func(pid int, addr uintptr, data []byte) (count int, err error)"}, + {"PtracePokeText", Func, 0, "func(pid int, addr uintptr, data []byte) (count int, err error)"}, + {"PtraceRegs", Type, 0, ""}, + {"PtraceRegs.Cs", Field, 0, ""}, + {"PtraceRegs.Ds", Field, 0, ""}, + {"PtraceRegs.Eax", Field, 0, ""}, + {"PtraceRegs.Ebp", Field, 0, ""}, + {"PtraceRegs.Ebx", Field, 0, ""}, + {"PtraceRegs.Ecx", Field, 0, ""}, + {"PtraceRegs.Edi", Field, 0, ""}, + {"PtraceRegs.Edx", Field, 0, ""}, + {"PtraceRegs.Eflags", Field, 0, ""}, + {"PtraceRegs.Eip", Field, 0, ""}, + {"PtraceRegs.Es", Field, 0, ""}, + {"PtraceRegs.Esi", Field, 0, ""}, + {"PtraceRegs.Esp", Field, 0, ""}, + {"PtraceRegs.Fs", Field, 0, ""}, + {"PtraceRegs.Fs_base", Field, 0, ""}, + {"PtraceRegs.Gs", Field, 0, ""}, + {"PtraceRegs.Gs_base", Field, 0, ""}, + {"PtraceRegs.Orig_eax", Field, 0, ""}, + {"PtraceRegs.Orig_rax", Field, 0, ""}, + {"PtraceRegs.R10", Field, 0, ""}, + {"PtraceRegs.R11", Field, 0, ""}, + {"PtraceRegs.R12", Field, 0, ""}, + {"PtraceRegs.R13", Field, 0, ""}, + {"PtraceRegs.R14", Field, 0, ""}, + {"PtraceRegs.R15", Field, 0, ""}, + {"PtraceRegs.R8", Field, 0, ""}, + {"PtraceRegs.R9", Field, 0, ""}, + {"PtraceRegs.Rax", Field, 0, ""}, + {"PtraceRegs.Rbp", Field, 0, ""}, + {"PtraceRegs.Rbx", Field, 0, ""}, + {"PtraceRegs.Rcx", Field, 0, ""}, + {"PtraceRegs.Rdi", Field, 0, ""}, + {"PtraceRegs.Rdx", Field, 0, ""}, + {"PtraceRegs.Rip", Field, 0, ""}, + {"PtraceRegs.Rsi", Field, 0, ""}, + {"PtraceRegs.Rsp", Field, 0, ""}, + {"PtraceRegs.Ss", Field, 0, ""}, + {"PtraceRegs.Uregs", Field, 0, ""}, + {"PtraceRegs.Xcs", Field, 0, ""}, + {"PtraceRegs.Xds", Field, 0, ""}, + {"PtraceRegs.Xes", Field, 0, ""}, + {"PtraceRegs.Xfs", Field, 0, ""}, + {"PtraceRegs.Xgs", Field, 0, ""}, + {"PtraceRegs.Xss", Field, 0, ""}, + {"PtraceSetOptions", Func, 0, "func(pid int, options int) (err error)"}, + {"PtraceSetRegs", Func, 0, "func(pid int, regs *PtraceRegs) (err error)"}, + {"PtraceSingleStep", Func, 0, "func(pid int) (err error)"}, + {"PtraceSyscall", Func, 1, "func(pid int, signal int) (err error)"}, + {"Pwrite", Func, 0, "func(fd int, p []byte, offset int64) (n int, err error)"}, + {"REG_BINARY", Const, 0, ""}, + {"REG_DWORD", Const, 0, ""}, + {"REG_DWORD_BIG_ENDIAN", Const, 0, ""}, + {"REG_DWORD_LITTLE_ENDIAN", Const, 0, ""}, + {"REG_EXPAND_SZ", Const, 0, ""}, + {"REG_FULL_RESOURCE_DESCRIPTOR", Const, 0, ""}, + {"REG_LINK", Const, 0, ""}, + {"REG_MULTI_SZ", Const, 0, ""}, + {"REG_NONE", Const, 0, ""}, + {"REG_QWORD", Const, 0, ""}, + {"REG_QWORD_LITTLE_ENDIAN", Const, 0, ""}, + {"REG_RESOURCE_LIST", Const, 0, ""}, + {"REG_RESOURCE_REQUIREMENTS_LIST", Const, 0, ""}, + {"REG_SZ", Const, 0, ""}, + {"RLIMIT_AS", Const, 0, ""}, + {"RLIMIT_CORE", Const, 0, ""}, + {"RLIMIT_CPU", Const, 0, ""}, + {"RLIMIT_CPU_USAGE_MONITOR", Const, 16, ""}, + {"RLIMIT_DATA", Const, 0, ""}, + {"RLIMIT_FSIZE", Const, 0, ""}, + {"RLIMIT_NOFILE", Const, 0, ""}, + {"RLIMIT_STACK", Const, 0, ""}, + {"RLIM_INFINITY", Const, 0, ""}, + {"RTAX_ADVMSS", Const, 0, ""}, + {"RTAX_AUTHOR", Const, 0, ""}, + {"RTAX_BRD", Const, 0, ""}, + {"RTAX_CWND", Const, 0, ""}, + {"RTAX_DST", Const, 0, ""}, + {"RTAX_FEATURES", Const, 0, ""}, + {"RTAX_FEATURE_ALLFRAG", Const, 0, ""}, + {"RTAX_FEATURE_ECN", Const, 0, ""}, + {"RTAX_FEATURE_SACK", Const, 0, ""}, + {"RTAX_FEATURE_TIMESTAMP", Const, 0, ""}, + {"RTAX_GATEWAY", Const, 0, ""}, + {"RTAX_GENMASK", Const, 0, ""}, + {"RTAX_HOPLIMIT", Const, 0, ""}, + {"RTAX_IFA", Const, 0, ""}, + {"RTAX_IFP", Const, 0, ""}, + {"RTAX_INITCWND", Const, 0, ""}, + {"RTAX_INITRWND", Const, 0, ""}, + {"RTAX_LABEL", Const, 1, ""}, + {"RTAX_LOCK", Const, 0, ""}, + {"RTAX_MAX", Const, 0, ""}, + {"RTAX_MTU", Const, 0, ""}, + {"RTAX_NETMASK", Const, 0, ""}, + {"RTAX_REORDERING", Const, 0, ""}, + {"RTAX_RTO_MIN", Const, 0, ""}, + {"RTAX_RTT", Const, 0, ""}, + {"RTAX_RTTVAR", Const, 0, ""}, + {"RTAX_SRC", Const, 1, ""}, + {"RTAX_SRCMASK", Const, 1, ""}, + {"RTAX_SSTHRESH", Const, 0, ""}, + {"RTAX_TAG", Const, 1, ""}, + {"RTAX_UNSPEC", Const, 0, ""}, + {"RTAX_WINDOW", Const, 0, ""}, + {"RTA_ALIGNTO", Const, 0, ""}, + {"RTA_AUTHOR", Const, 0, ""}, + {"RTA_BRD", Const, 0, ""}, + {"RTA_CACHEINFO", Const, 0, ""}, + {"RTA_DST", Const, 0, ""}, + {"RTA_FLOW", Const, 0, ""}, + {"RTA_GATEWAY", Const, 0, ""}, + {"RTA_GENMASK", Const, 0, ""}, + {"RTA_IFA", Const, 0, ""}, + {"RTA_IFP", Const, 0, ""}, + {"RTA_IIF", Const, 0, ""}, + {"RTA_LABEL", Const, 1, ""}, + {"RTA_MAX", Const, 0, ""}, + {"RTA_METRICS", Const, 0, ""}, + {"RTA_MULTIPATH", Const, 0, ""}, + {"RTA_NETMASK", Const, 0, ""}, + {"RTA_OIF", Const, 0, ""}, + {"RTA_PREFSRC", Const, 0, ""}, + {"RTA_PRIORITY", Const, 0, ""}, + {"RTA_SRC", Const, 0, ""}, + {"RTA_SRCMASK", Const, 1, ""}, + {"RTA_TABLE", Const, 0, ""}, + {"RTA_TAG", Const, 1, ""}, + {"RTA_UNSPEC", Const, 0, ""}, + {"RTCF_DIRECTSRC", Const, 0, ""}, + {"RTCF_DOREDIRECT", Const, 0, ""}, + {"RTCF_LOG", Const, 0, ""}, + {"RTCF_MASQ", Const, 0, ""}, + {"RTCF_NAT", Const, 0, ""}, + {"RTCF_VALVE", Const, 0, ""}, + {"RTF_ADDRCLASSMASK", Const, 0, ""}, + {"RTF_ADDRCONF", Const, 0, ""}, + {"RTF_ALLONLINK", Const, 0, ""}, + {"RTF_ANNOUNCE", Const, 1, ""}, + {"RTF_BLACKHOLE", Const, 0, ""}, + {"RTF_BROADCAST", Const, 0, ""}, + {"RTF_CACHE", Const, 0, ""}, + {"RTF_CLONED", Const, 1, ""}, + {"RTF_CLONING", Const, 0, ""}, + {"RTF_CONDEMNED", Const, 0, ""}, + {"RTF_DEFAULT", Const, 0, ""}, + {"RTF_DELCLONE", Const, 0, ""}, + {"RTF_DONE", Const, 0, ""}, + {"RTF_DYNAMIC", Const, 0, ""}, + {"RTF_FLOW", Const, 0, ""}, + {"RTF_FMASK", Const, 0, ""}, + {"RTF_GATEWAY", Const, 0, ""}, + {"RTF_GWFLAG_COMPAT", Const, 3, ""}, + {"RTF_HOST", Const, 0, ""}, + {"RTF_IFREF", Const, 0, ""}, + {"RTF_IFSCOPE", Const, 0, ""}, + {"RTF_INTERFACE", Const, 0, ""}, + {"RTF_IRTT", Const, 0, ""}, + {"RTF_LINKRT", Const, 0, ""}, + {"RTF_LLDATA", Const, 0, ""}, + {"RTF_LLINFO", Const, 0, ""}, + {"RTF_LOCAL", Const, 0, ""}, + {"RTF_MASK", Const, 1, ""}, + {"RTF_MODIFIED", Const, 0, ""}, + {"RTF_MPATH", Const, 1, ""}, + {"RTF_MPLS", Const, 1, ""}, + {"RTF_MSS", Const, 0, ""}, + {"RTF_MTU", Const, 0, ""}, + {"RTF_MULTICAST", Const, 0, ""}, + {"RTF_NAT", Const, 0, ""}, + {"RTF_NOFORWARD", Const, 0, ""}, + {"RTF_NONEXTHOP", Const, 0, ""}, + {"RTF_NOPMTUDISC", Const, 0, ""}, + {"RTF_PERMANENT_ARP", Const, 1, ""}, + {"RTF_PINNED", Const, 0, ""}, + {"RTF_POLICY", Const, 0, ""}, + {"RTF_PRCLONING", Const, 0, ""}, + {"RTF_PROTO1", Const, 0, ""}, + {"RTF_PROTO2", Const, 0, ""}, + {"RTF_PROTO3", Const, 0, ""}, + {"RTF_PROXY", Const, 16, ""}, + {"RTF_REINSTATE", Const, 0, ""}, + {"RTF_REJECT", Const, 0, ""}, + {"RTF_RNH_LOCKED", Const, 0, ""}, + {"RTF_ROUTER", Const, 16, ""}, + {"RTF_SOURCE", Const, 1, ""}, + {"RTF_SRC", Const, 1, ""}, + {"RTF_STATIC", Const, 0, ""}, + {"RTF_STICKY", Const, 0, ""}, + {"RTF_THROW", Const, 0, ""}, + {"RTF_TUNNEL", Const, 1, ""}, + {"RTF_UP", Const, 0, ""}, + {"RTF_USETRAILERS", Const, 1, ""}, + {"RTF_WASCLONED", Const, 0, ""}, + {"RTF_WINDOW", Const, 0, ""}, + {"RTF_XRESOLVE", Const, 0, ""}, + {"RTM_ADD", Const, 0, ""}, + {"RTM_BASE", Const, 0, ""}, + {"RTM_CHANGE", Const, 0, ""}, + {"RTM_CHGADDR", Const, 1, ""}, + {"RTM_DELACTION", Const, 0, ""}, + {"RTM_DELADDR", Const, 0, ""}, + {"RTM_DELADDRLABEL", Const, 0, ""}, + {"RTM_DELETE", Const, 0, ""}, + {"RTM_DELLINK", Const, 0, ""}, + {"RTM_DELMADDR", Const, 0, ""}, + {"RTM_DELNEIGH", Const, 0, ""}, + {"RTM_DELQDISC", Const, 0, ""}, + {"RTM_DELROUTE", Const, 0, ""}, + {"RTM_DELRULE", Const, 0, ""}, + {"RTM_DELTCLASS", Const, 0, ""}, + {"RTM_DELTFILTER", Const, 0, ""}, + {"RTM_DESYNC", Const, 1, ""}, + {"RTM_F_CLONED", Const, 0, ""}, + {"RTM_F_EQUALIZE", Const, 0, ""}, + {"RTM_F_NOTIFY", Const, 0, ""}, + {"RTM_F_PREFIX", Const, 0, ""}, + {"RTM_GET", Const, 0, ""}, + {"RTM_GET2", Const, 0, ""}, + {"RTM_GETACTION", Const, 0, ""}, + {"RTM_GETADDR", Const, 0, ""}, + {"RTM_GETADDRLABEL", Const, 0, ""}, + {"RTM_GETANYCAST", Const, 0, ""}, + {"RTM_GETDCB", Const, 0, ""}, + {"RTM_GETLINK", Const, 0, ""}, + {"RTM_GETMULTICAST", Const, 0, ""}, + {"RTM_GETNEIGH", Const, 0, ""}, + {"RTM_GETNEIGHTBL", Const, 0, ""}, + {"RTM_GETQDISC", Const, 0, ""}, + {"RTM_GETROUTE", Const, 0, ""}, + {"RTM_GETRULE", Const, 0, ""}, + {"RTM_GETTCLASS", Const, 0, ""}, + {"RTM_GETTFILTER", Const, 0, ""}, + {"RTM_IEEE80211", Const, 0, ""}, + {"RTM_IFANNOUNCE", Const, 0, ""}, + {"RTM_IFINFO", Const, 0, ""}, + {"RTM_IFINFO2", Const, 0, ""}, + {"RTM_LLINFO_UPD", Const, 1, ""}, + {"RTM_LOCK", Const, 0, ""}, + {"RTM_LOSING", Const, 0, ""}, + {"RTM_MAX", Const, 0, ""}, + {"RTM_MAXSIZE", Const, 1, ""}, + {"RTM_MISS", Const, 0, ""}, + {"RTM_NEWACTION", Const, 0, ""}, + {"RTM_NEWADDR", Const, 0, ""}, + {"RTM_NEWADDRLABEL", Const, 0, ""}, + {"RTM_NEWLINK", Const, 0, ""}, + {"RTM_NEWMADDR", Const, 0, ""}, + {"RTM_NEWMADDR2", Const, 0, ""}, + {"RTM_NEWNDUSEROPT", Const, 0, ""}, + {"RTM_NEWNEIGH", Const, 0, ""}, + {"RTM_NEWNEIGHTBL", Const, 0, ""}, + {"RTM_NEWPREFIX", Const, 0, ""}, + {"RTM_NEWQDISC", Const, 0, ""}, + {"RTM_NEWROUTE", Const, 0, ""}, + {"RTM_NEWRULE", Const, 0, ""}, + {"RTM_NEWTCLASS", Const, 0, ""}, + {"RTM_NEWTFILTER", Const, 0, ""}, + {"RTM_NR_FAMILIES", Const, 0, ""}, + {"RTM_NR_MSGTYPES", Const, 0, ""}, + {"RTM_OIFINFO", Const, 1, ""}, + {"RTM_OLDADD", Const, 0, ""}, + {"RTM_OLDDEL", Const, 0, ""}, + {"RTM_OOIFINFO", Const, 1, ""}, + {"RTM_REDIRECT", Const, 0, ""}, + {"RTM_RESOLVE", Const, 0, ""}, + {"RTM_RTTUNIT", Const, 0, ""}, + {"RTM_SETDCB", Const, 0, ""}, + {"RTM_SETGATE", Const, 1, ""}, + {"RTM_SETLINK", Const, 0, ""}, + {"RTM_SETNEIGHTBL", Const, 0, ""}, + {"RTM_VERSION", Const, 0, ""}, + {"RTNH_ALIGNTO", Const, 0, ""}, + {"RTNH_F_DEAD", Const, 0, ""}, + {"RTNH_F_ONLINK", Const, 0, ""}, + {"RTNH_F_PERVASIVE", Const, 0, ""}, + {"RTNLGRP_IPV4_IFADDR", Const, 1, ""}, + {"RTNLGRP_IPV4_MROUTE", Const, 1, ""}, + {"RTNLGRP_IPV4_ROUTE", Const, 1, ""}, + {"RTNLGRP_IPV4_RULE", Const, 1, ""}, + {"RTNLGRP_IPV6_IFADDR", Const, 1, ""}, + {"RTNLGRP_IPV6_IFINFO", Const, 1, ""}, + {"RTNLGRP_IPV6_MROUTE", Const, 1, ""}, + {"RTNLGRP_IPV6_PREFIX", Const, 1, ""}, + {"RTNLGRP_IPV6_ROUTE", Const, 1, ""}, + {"RTNLGRP_IPV6_RULE", Const, 1, ""}, + {"RTNLGRP_LINK", Const, 1, ""}, + {"RTNLGRP_ND_USEROPT", Const, 1, ""}, + {"RTNLGRP_NEIGH", Const, 1, ""}, + {"RTNLGRP_NONE", Const, 1, ""}, + {"RTNLGRP_NOTIFY", Const, 1, ""}, + {"RTNLGRP_TC", Const, 1, ""}, + {"RTN_ANYCAST", Const, 0, ""}, + {"RTN_BLACKHOLE", Const, 0, ""}, + {"RTN_BROADCAST", Const, 0, ""}, + {"RTN_LOCAL", Const, 0, ""}, + {"RTN_MAX", Const, 0, ""}, + {"RTN_MULTICAST", Const, 0, ""}, + {"RTN_NAT", Const, 0, ""}, + {"RTN_PROHIBIT", Const, 0, ""}, + {"RTN_THROW", Const, 0, ""}, + {"RTN_UNICAST", Const, 0, ""}, + {"RTN_UNREACHABLE", Const, 0, ""}, + {"RTN_UNSPEC", Const, 0, ""}, + {"RTN_XRESOLVE", Const, 0, ""}, + {"RTPROT_BIRD", Const, 0, ""}, + {"RTPROT_BOOT", Const, 0, ""}, + {"RTPROT_DHCP", Const, 0, ""}, + {"RTPROT_DNROUTED", Const, 0, ""}, + {"RTPROT_GATED", Const, 0, ""}, + {"RTPROT_KERNEL", Const, 0, ""}, + {"RTPROT_MRT", Const, 0, ""}, + {"RTPROT_NTK", Const, 0, ""}, + {"RTPROT_RA", Const, 0, ""}, + {"RTPROT_REDIRECT", Const, 0, ""}, + {"RTPROT_STATIC", Const, 0, ""}, + {"RTPROT_UNSPEC", Const, 0, ""}, + {"RTPROT_XORP", Const, 0, ""}, + {"RTPROT_ZEBRA", Const, 0, ""}, + {"RTV_EXPIRE", Const, 0, ""}, + {"RTV_HOPCOUNT", Const, 0, ""}, + {"RTV_MTU", Const, 0, ""}, + {"RTV_RPIPE", Const, 0, ""}, + {"RTV_RTT", Const, 0, ""}, + {"RTV_RTTVAR", Const, 0, ""}, + {"RTV_SPIPE", Const, 0, ""}, + {"RTV_SSTHRESH", Const, 0, ""}, + {"RTV_WEIGHT", Const, 0, ""}, + {"RT_CACHING_CONTEXT", Const, 1, ""}, + {"RT_CLASS_DEFAULT", Const, 0, ""}, + {"RT_CLASS_LOCAL", Const, 0, ""}, + {"RT_CLASS_MAIN", Const, 0, ""}, + {"RT_CLASS_MAX", Const, 0, ""}, + {"RT_CLASS_UNSPEC", Const, 0, ""}, + {"RT_DEFAULT_FIB", Const, 1, ""}, + {"RT_NORTREF", Const, 1, ""}, + {"RT_SCOPE_HOST", Const, 0, ""}, + {"RT_SCOPE_LINK", Const, 0, ""}, + {"RT_SCOPE_NOWHERE", Const, 0, ""}, + {"RT_SCOPE_SITE", Const, 0, ""}, + {"RT_SCOPE_UNIVERSE", Const, 0, ""}, + {"RT_TABLEID_MAX", Const, 1, ""}, + {"RT_TABLE_COMPAT", Const, 0, ""}, + {"RT_TABLE_DEFAULT", Const, 0, ""}, + {"RT_TABLE_LOCAL", Const, 0, ""}, + {"RT_TABLE_MAIN", Const, 0, ""}, + {"RT_TABLE_MAX", Const, 0, ""}, + {"RT_TABLE_UNSPEC", Const, 0, ""}, + {"RUSAGE_CHILDREN", Const, 0, ""}, + {"RUSAGE_SELF", Const, 0, ""}, + {"RUSAGE_THREAD", Const, 0, ""}, + {"Radvisory_t", Type, 0, ""}, + {"Radvisory_t.Count", Field, 0, ""}, + {"Radvisory_t.Offset", Field, 0, ""}, + {"Radvisory_t.Pad_cgo_0", Field, 0, ""}, + {"RawConn", Type, 9, ""}, + {"RawSockaddr", Type, 0, ""}, + {"RawSockaddr.Data", Field, 0, ""}, + {"RawSockaddr.Family", Field, 0, ""}, + {"RawSockaddr.Len", Field, 0, ""}, + {"RawSockaddrAny", Type, 0, ""}, + {"RawSockaddrAny.Addr", Field, 0, ""}, + {"RawSockaddrAny.Pad", Field, 0, ""}, + {"RawSockaddrDatalink", Type, 0, ""}, + {"RawSockaddrDatalink.Alen", Field, 0, ""}, + {"RawSockaddrDatalink.Data", Field, 0, ""}, + {"RawSockaddrDatalink.Family", Field, 0, ""}, + {"RawSockaddrDatalink.Index", Field, 0, ""}, + {"RawSockaddrDatalink.Len", Field, 0, ""}, + {"RawSockaddrDatalink.Nlen", Field, 0, ""}, + {"RawSockaddrDatalink.Pad_cgo_0", Field, 2, ""}, + {"RawSockaddrDatalink.Slen", Field, 0, ""}, + {"RawSockaddrDatalink.Type", Field, 0, ""}, + {"RawSockaddrInet4", Type, 0, ""}, + {"RawSockaddrInet4.Addr", Field, 0, ""}, + {"RawSockaddrInet4.Family", Field, 0, ""}, + {"RawSockaddrInet4.Len", Field, 0, ""}, + {"RawSockaddrInet4.Port", Field, 0, ""}, + {"RawSockaddrInet4.Zero", Field, 0, ""}, + {"RawSockaddrInet6", Type, 0, ""}, + {"RawSockaddrInet6.Addr", Field, 0, ""}, + {"RawSockaddrInet6.Family", Field, 0, ""}, + {"RawSockaddrInet6.Flowinfo", Field, 0, ""}, + {"RawSockaddrInet6.Len", Field, 0, ""}, + {"RawSockaddrInet6.Port", Field, 0, ""}, + {"RawSockaddrInet6.Scope_id", Field, 0, ""}, + {"RawSockaddrLinklayer", Type, 0, ""}, + {"RawSockaddrLinklayer.Addr", Field, 0, ""}, + {"RawSockaddrLinklayer.Family", Field, 0, ""}, + {"RawSockaddrLinklayer.Halen", Field, 0, ""}, + {"RawSockaddrLinklayer.Hatype", Field, 0, ""}, + {"RawSockaddrLinklayer.Ifindex", Field, 0, ""}, + {"RawSockaddrLinklayer.Pkttype", Field, 0, ""}, + {"RawSockaddrLinklayer.Protocol", Field, 0, ""}, + {"RawSockaddrNetlink", Type, 0, ""}, + {"RawSockaddrNetlink.Family", Field, 0, ""}, + {"RawSockaddrNetlink.Groups", Field, 0, ""}, + {"RawSockaddrNetlink.Pad", Field, 0, ""}, + {"RawSockaddrNetlink.Pid", Field, 0, ""}, + {"RawSockaddrUnix", Type, 0, ""}, + {"RawSockaddrUnix.Family", Field, 0, ""}, + {"RawSockaddrUnix.Len", Field, 0, ""}, + {"RawSockaddrUnix.Pad_cgo_0", Field, 2, ""}, + {"RawSockaddrUnix.Path", Field, 0, ""}, + {"RawSyscall", Func, 0, "func(trap uintptr, a1 uintptr, a2 uintptr, a3 uintptr) (r1 uintptr, r2 uintptr, err Errno)"}, + {"RawSyscall6", Func, 0, "func(trap uintptr, a1 uintptr, a2 uintptr, a3 uintptr, a4 uintptr, a5 uintptr, a6 uintptr) (r1 uintptr, r2 uintptr, err Errno)"}, + {"Read", Func, 0, "func(fd int, p []byte) (n int, err error)"}, + {"ReadConsole", Func, 1, ""}, + {"ReadDirectoryChanges", Func, 0, ""}, + {"ReadDirent", Func, 0, "func(fd int, buf []byte) (n int, err error)"}, + {"ReadFile", Func, 0, ""}, + {"Readlink", Func, 0, "func(path string, buf []byte) (n int, err error)"}, + {"Reboot", Func, 0, "func(cmd int) (err error)"}, + {"Recvfrom", Func, 0, "func(fd int, p []byte, flags int) (n int, from Sockaddr, err error)"}, + {"Recvmsg", Func, 0, "func(fd int, p []byte, oob []byte, flags int) (n int, oobn int, recvflags int, from Sockaddr, err error)"}, + {"RegCloseKey", Func, 0, ""}, + {"RegEnumKeyEx", Func, 0, ""}, + {"RegOpenKeyEx", Func, 0, ""}, + {"RegQueryInfoKey", Func, 0, ""}, + {"RegQueryValueEx", Func, 0, ""}, + {"RemoveDirectory", Func, 0, ""}, + {"Removexattr", Func, 1, "func(path string, attr string) (err error)"}, + {"Rename", Func, 0, "func(oldpath string, newpath string) (err error)"}, + {"Renameat", Func, 0, "func(olddirfd int, oldpath string, newdirfd int, newpath string) (err error)"}, + {"Revoke", Func, 0, ""}, + {"Rlimit", Type, 0, ""}, + {"Rlimit.Cur", Field, 0, ""}, + {"Rlimit.Max", Field, 0, ""}, + {"Rmdir", Func, 0, "func(path string) error"}, + {"RouteMessage", Type, 0, ""}, + {"RouteMessage.Data", Field, 0, ""}, + {"RouteMessage.Header", Field, 0, ""}, + {"RouteRIB", Func, 0, ""}, + {"RoutingMessage", Type, 0, ""}, + {"RtAttr", Type, 0, ""}, + {"RtAttr.Len", Field, 0, ""}, + {"RtAttr.Type", Field, 0, ""}, + {"RtGenmsg", Type, 0, ""}, + {"RtGenmsg.Family", Field, 0, ""}, + {"RtMetrics", Type, 0, ""}, + {"RtMetrics.Expire", Field, 0, ""}, + {"RtMetrics.Filler", Field, 0, ""}, + {"RtMetrics.Hopcount", Field, 0, ""}, + {"RtMetrics.Locks", Field, 0, ""}, + {"RtMetrics.Mtu", Field, 0, ""}, + {"RtMetrics.Pad", Field, 3, ""}, + {"RtMetrics.Pksent", Field, 0, ""}, + {"RtMetrics.Recvpipe", Field, 0, ""}, + {"RtMetrics.Refcnt", Field, 2, ""}, + {"RtMetrics.Rtt", Field, 0, ""}, + {"RtMetrics.Rttvar", Field, 0, ""}, + {"RtMetrics.Sendpipe", Field, 0, ""}, + {"RtMetrics.Ssthresh", Field, 0, ""}, + {"RtMetrics.Weight", Field, 0, ""}, + {"RtMsg", Type, 0, ""}, + {"RtMsg.Dst_len", Field, 0, ""}, + {"RtMsg.Family", Field, 0, ""}, + {"RtMsg.Flags", Field, 0, ""}, + {"RtMsg.Protocol", Field, 0, ""}, + {"RtMsg.Scope", Field, 0, ""}, + {"RtMsg.Src_len", Field, 0, ""}, + {"RtMsg.Table", Field, 0, ""}, + {"RtMsg.Tos", Field, 0, ""}, + {"RtMsg.Type", Field, 0, ""}, + {"RtMsghdr", Type, 0, ""}, + {"RtMsghdr.Addrs", Field, 0, ""}, + {"RtMsghdr.Errno", Field, 0, ""}, + {"RtMsghdr.Flags", Field, 0, ""}, + {"RtMsghdr.Fmask", Field, 0, ""}, + {"RtMsghdr.Hdrlen", Field, 2, ""}, + {"RtMsghdr.Index", Field, 0, ""}, + {"RtMsghdr.Inits", Field, 0, ""}, + {"RtMsghdr.Mpls", Field, 2, ""}, + {"RtMsghdr.Msglen", Field, 0, ""}, + {"RtMsghdr.Pad_cgo_0", Field, 0, ""}, + {"RtMsghdr.Pad_cgo_1", Field, 2, ""}, + {"RtMsghdr.Pid", Field, 0, ""}, + {"RtMsghdr.Priority", Field, 2, ""}, + {"RtMsghdr.Rmx", Field, 0, ""}, + {"RtMsghdr.Seq", Field, 0, ""}, + {"RtMsghdr.Tableid", Field, 2, ""}, + {"RtMsghdr.Type", Field, 0, ""}, + {"RtMsghdr.Use", Field, 0, ""}, + {"RtMsghdr.Version", Field, 0, ""}, + {"RtNexthop", Type, 0, ""}, + {"RtNexthop.Flags", Field, 0, ""}, + {"RtNexthop.Hops", Field, 0, ""}, + {"RtNexthop.Ifindex", Field, 0, ""}, + {"RtNexthop.Len", Field, 0, ""}, + {"Rusage", Type, 0, ""}, + {"Rusage.CreationTime", Field, 0, ""}, + {"Rusage.ExitTime", Field, 0, ""}, + {"Rusage.Idrss", Field, 0, ""}, + {"Rusage.Inblock", Field, 0, ""}, + {"Rusage.Isrss", Field, 0, ""}, + {"Rusage.Ixrss", Field, 0, ""}, + {"Rusage.KernelTime", Field, 0, ""}, + {"Rusage.Majflt", Field, 0, ""}, + {"Rusage.Maxrss", Field, 0, ""}, + {"Rusage.Minflt", Field, 0, ""}, + {"Rusage.Msgrcv", Field, 0, ""}, + {"Rusage.Msgsnd", Field, 0, ""}, + {"Rusage.Nivcsw", Field, 0, ""}, + {"Rusage.Nsignals", Field, 0, ""}, + {"Rusage.Nswap", Field, 0, ""}, + {"Rusage.Nvcsw", Field, 0, ""}, + {"Rusage.Oublock", Field, 0, ""}, + {"Rusage.Stime", Field, 0, ""}, + {"Rusage.UserTime", Field, 0, ""}, + {"Rusage.Utime", Field, 0, ""}, + {"SCM_BINTIME", Const, 0, ""}, + {"SCM_CREDENTIALS", Const, 0, ""}, + {"SCM_CREDS", Const, 0, ""}, + {"SCM_RIGHTS", Const, 0, ""}, + {"SCM_TIMESTAMP", Const, 0, ""}, + {"SCM_TIMESTAMPING", Const, 0, ""}, + {"SCM_TIMESTAMPNS", Const, 0, ""}, + {"SCM_TIMESTAMP_MONOTONIC", Const, 0, ""}, + {"SHUT_RD", Const, 0, ""}, + {"SHUT_RDWR", Const, 0, ""}, + {"SHUT_WR", Const, 0, ""}, + {"SID", Type, 0, ""}, + {"SIDAndAttributes", Type, 0, ""}, + {"SIDAndAttributes.Attributes", Field, 0, ""}, + {"SIDAndAttributes.Sid", Field, 0, ""}, + {"SIGABRT", Const, 0, ""}, + {"SIGALRM", Const, 0, ""}, + {"SIGBUS", Const, 0, ""}, + {"SIGCHLD", Const, 0, ""}, + {"SIGCLD", Const, 0, ""}, + {"SIGCONT", Const, 0, ""}, + {"SIGEMT", Const, 0, ""}, + {"SIGFPE", Const, 0, ""}, + {"SIGHUP", Const, 0, ""}, + {"SIGILL", Const, 0, ""}, + {"SIGINFO", Const, 0, ""}, + {"SIGINT", Const, 0, ""}, + {"SIGIO", Const, 0, ""}, + {"SIGIOT", Const, 0, ""}, + {"SIGKILL", Const, 0, ""}, + {"SIGLIBRT", Const, 1, ""}, + {"SIGLWP", Const, 0, ""}, + {"SIGPIPE", Const, 0, ""}, + {"SIGPOLL", Const, 0, ""}, + {"SIGPROF", Const, 0, ""}, + {"SIGPWR", Const, 0, ""}, + {"SIGQUIT", Const, 0, ""}, + {"SIGSEGV", Const, 0, ""}, + {"SIGSTKFLT", Const, 0, ""}, + {"SIGSTOP", Const, 0, ""}, + {"SIGSYS", Const, 0, ""}, + {"SIGTERM", Const, 0, ""}, + {"SIGTHR", Const, 0, ""}, + {"SIGTRAP", Const, 0, ""}, + {"SIGTSTP", Const, 0, ""}, + {"SIGTTIN", Const, 0, ""}, + {"SIGTTOU", Const, 0, ""}, + {"SIGUNUSED", Const, 0, ""}, + {"SIGURG", Const, 0, ""}, + {"SIGUSR1", Const, 0, ""}, + {"SIGUSR2", Const, 0, ""}, + {"SIGVTALRM", Const, 0, ""}, + {"SIGWINCH", Const, 0, ""}, + {"SIGXCPU", Const, 0, ""}, + {"SIGXFSZ", Const, 0, ""}, + {"SIOCADDDLCI", Const, 0, ""}, + {"SIOCADDMULTI", Const, 0, ""}, + {"SIOCADDRT", Const, 0, ""}, + {"SIOCAIFADDR", Const, 0, ""}, + {"SIOCAIFGROUP", Const, 0, ""}, + {"SIOCALIFADDR", Const, 0, ""}, + {"SIOCARPIPLL", Const, 0, ""}, + {"SIOCATMARK", Const, 0, ""}, + {"SIOCAUTOADDR", Const, 0, ""}, + {"SIOCAUTONETMASK", Const, 0, ""}, + {"SIOCBRDGADD", Const, 1, ""}, + {"SIOCBRDGADDS", Const, 1, ""}, + {"SIOCBRDGARL", Const, 1, ""}, + {"SIOCBRDGDADDR", Const, 1, ""}, + {"SIOCBRDGDEL", Const, 1, ""}, + {"SIOCBRDGDELS", Const, 1, ""}, + {"SIOCBRDGFLUSH", Const, 1, ""}, + {"SIOCBRDGFRL", Const, 1, ""}, + {"SIOCBRDGGCACHE", Const, 1, ""}, + {"SIOCBRDGGFD", Const, 1, ""}, + {"SIOCBRDGGHT", Const, 1, ""}, + {"SIOCBRDGGIFFLGS", Const, 1, ""}, + {"SIOCBRDGGMA", Const, 1, ""}, + {"SIOCBRDGGPARAM", Const, 1, ""}, + {"SIOCBRDGGPRI", Const, 1, ""}, + {"SIOCBRDGGRL", Const, 1, ""}, + {"SIOCBRDGGSIFS", Const, 1, ""}, + {"SIOCBRDGGTO", Const, 1, ""}, + {"SIOCBRDGIFS", Const, 1, ""}, + {"SIOCBRDGRTS", Const, 1, ""}, + {"SIOCBRDGSADDR", Const, 1, ""}, + {"SIOCBRDGSCACHE", Const, 1, ""}, + {"SIOCBRDGSFD", Const, 1, ""}, + {"SIOCBRDGSHT", Const, 1, ""}, + {"SIOCBRDGSIFCOST", Const, 1, ""}, + {"SIOCBRDGSIFFLGS", Const, 1, ""}, + {"SIOCBRDGSIFPRIO", Const, 1, ""}, + {"SIOCBRDGSMA", Const, 1, ""}, + {"SIOCBRDGSPRI", Const, 1, ""}, + {"SIOCBRDGSPROTO", Const, 1, ""}, + {"SIOCBRDGSTO", Const, 1, ""}, + {"SIOCBRDGSTXHC", Const, 1, ""}, + {"SIOCDARP", Const, 0, ""}, + {"SIOCDELDLCI", Const, 0, ""}, + {"SIOCDELMULTI", Const, 0, ""}, + {"SIOCDELRT", Const, 0, ""}, + {"SIOCDEVPRIVATE", Const, 0, ""}, + {"SIOCDIFADDR", Const, 0, ""}, + {"SIOCDIFGROUP", Const, 0, ""}, + {"SIOCDIFPHYADDR", Const, 0, ""}, + {"SIOCDLIFADDR", Const, 0, ""}, + {"SIOCDRARP", Const, 0, ""}, + {"SIOCGARP", Const, 0, ""}, + {"SIOCGDRVSPEC", Const, 0, ""}, + {"SIOCGETKALIVE", Const, 1, ""}, + {"SIOCGETLABEL", Const, 1, ""}, + {"SIOCGETPFLOW", Const, 1, ""}, + {"SIOCGETPFSYNC", Const, 1, ""}, + {"SIOCGETSGCNT", Const, 0, ""}, + {"SIOCGETVIFCNT", Const, 0, ""}, + {"SIOCGETVLAN", Const, 0, ""}, + {"SIOCGHIWAT", Const, 0, ""}, + {"SIOCGIFADDR", Const, 0, ""}, + {"SIOCGIFADDRPREF", Const, 1, ""}, + {"SIOCGIFALIAS", Const, 1, ""}, + {"SIOCGIFALTMTU", Const, 0, ""}, + {"SIOCGIFASYNCMAP", Const, 0, ""}, + {"SIOCGIFBOND", Const, 0, ""}, + {"SIOCGIFBR", Const, 0, ""}, + {"SIOCGIFBRDADDR", Const, 0, ""}, + {"SIOCGIFCAP", Const, 0, ""}, + {"SIOCGIFCONF", Const, 0, ""}, + {"SIOCGIFCOUNT", Const, 0, ""}, + {"SIOCGIFDATA", Const, 1, ""}, + {"SIOCGIFDESCR", Const, 0, ""}, + {"SIOCGIFDEVMTU", Const, 0, ""}, + {"SIOCGIFDLT", Const, 1, ""}, + {"SIOCGIFDSTADDR", Const, 0, ""}, + {"SIOCGIFENCAP", Const, 0, ""}, + {"SIOCGIFFIB", Const, 1, ""}, + {"SIOCGIFFLAGS", Const, 0, ""}, + {"SIOCGIFGATTR", Const, 1, ""}, + {"SIOCGIFGENERIC", Const, 0, ""}, + {"SIOCGIFGMEMB", Const, 0, ""}, + {"SIOCGIFGROUP", Const, 0, ""}, + {"SIOCGIFHARDMTU", Const, 3, ""}, + {"SIOCGIFHWADDR", Const, 0, ""}, + {"SIOCGIFINDEX", Const, 0, ""}, + {"SIOCGIFKPI", Const, 0, ""}, + {"SIOCGIFMAC", Const, 0, ""}, + {"SIOCGIFMAP", Const, 0, ""}, + {"SIOCGIFMEDIA", Const, 0, ""}, + {"SIOCGIFMEM", Const, 0, ""}, + {"SIOCGIFMETRIC", Const, 0, ""}, + {"SIOCGIFMTU", Const, 0, ""}, + {"SIOCGIFNAME", Const, 0, ""}, + {"SIOCGIFNETMASK", Const, 0, ""}, + {"SIOCGIFPDSTADDR", Const, 0, ""}, + {"SIOCGIFPFLAGS", Const, 0, ""}, + {"SIOCGIFPHYS", Const, 0, ""}, + {"SIOCGIFPRIORITY", Const, 1, ""}, + {"SIOCGIFPSRCADDR", Const, 0, ""}, + {"SIOCGIFRDOMAIN", Const, 1, ""}, + {"SIOCGIFRTLABEL", Const, 1, ""}, + {"SIOCGIFSLAVE", Const, 0, ""}, + {"SIOCGIFSTATUS", Const, 0, ""}, + {"SIOCGIFTIMESLOT", Const, 1, ""}, + {"SIOCGIFTXQLEN", Const, 0, ""}, + {"SIOCGIFVLAN", Const, 0, ""}, + {"SIOCGIFWAKEFLAGS", Const, 0, ""}, + {"SIOCGIFXFLAGS", Const, 1, ""}, + {"SIOCGLIFADDR", Const, 0, ""}, + {"SIOCGLIFPHYADDR", Const, 0, ""}, + {"SIOCGLIFPHYRTABLE", Const, 1, ""}, + {"SIOCGLIFPHYTTL", Const, 3, ""}, + {"SIOCGLINKSTR", Const, 1, ""}, + {"SIOCGLOWAT", Const, 0, ""}, + {"SIOCGPGRP", Const, 0, ""}, + {"SIOCGPRIVATE_0", Const, 0, ""}, + {"SIOCGPRIVATE_1", Const, 0, ""}, + {"SIOCGRARP", Const, 0, ""}, + {"SIOCGSPPPPARAMS", Const, 3, ""}, + {"SIOCGSTAMP", Const, 0, ""}, + {"SIOCGSTAMPNS", Const, 0, ""}, + {"SIOCGVH", Const, 1, ""}, + {"SIOCGVNETID", Const, 3, ""}, + {"SIOCIFCREATE", Const, 0, ""}, + {"SIOCIFCREATE2", Const, 0, ""}, + {"SIOCIFDESTROY", Const, 0, ""}, + {"SIOCIFGCLONERS", Const, 0, ""}, + {"SIOCINITIFADDR", Const, 1, ""}, + {"SIOCPROTOPRIVATE", Const, 0, ""}, + {"SIOCRSLVMULTI", Const, 0, ""}, + {"SIOCRTMSG", Const, 0, ""}, + {"SIOCSARP", Const, 0, ""}, + {"SIOCSDRVSPEC", Const, 0, ""}, + {"SIOCSETKALIVE", Const, 1, ""}, + {"SIOCSETLABEL", Const, 1, ""}, + {"SIOCSETPFLOW", Const, 1, ""}, + {"SIOCSETPFSYNC", Const, 1, ""}, + {"SIOCSETVLAN", Const, 0, ""}, + {"SIOCSHIWAT", Const, 0, ""}, + {"SIOCSIFADDR", Const, 0, ""}, + {"SIOCSIFADDRPREF", Const, 1, ""}, + {"SIOCSIFALTMTU", Const, 0, ""}, + {"SIOCSIFASYNCMAP", Const, 0, ""}, + {"SIOCSIFBOND", Const, 0, ""}, + {"SIOCSIFBR", Const, 0, ""}, + {"SIOCSIFBRDADDR", Const, 0, ""}, + {"SIOCSIFCAP", Const, 0, ""}, + {"SIOCSIFDESCR", Const, 0, ""}, + {"SIOCSIFDSTADDR", Const, 0, ""}, + {"SIOCSIFENCAP", Const, 0, ""}, + {"SIOCSIFFIB", Const, 1, ""}, + {"SIOCSIFFLAGS", Const, 0, ""}, + {"SIOCSIFGATTR", Const, 1, ""}, + {"SIOCSIFGENERIC", Const, 0, ""}, + {"SIOCSIFHWADDR", Const, 0, ""}, + {"SIOCSIFHWBROADCAST", Const, 0, ""}, + {"SIOCSIFKPI", Const, 0, ""}, + {"SIOCSIFLINK", Const, 0, ""}, + {"SIOCSIFLLADDR", Const, 0, ""}, + {"SIOCSIFMAC", Const, 0, ""}, + {"SIOCSIFMAP", Const, 0, ""}, + {"SIOCSIFMEDIA", Const, 0, ""}, + {"SIOCSIFMEM", Const, 0, ""}, + {"SIOCSIFMETRIC", Const, 0, ""}, + {"SIOCSIFMTU", Const, 0, ""}, + {"SIOCSIFNAME", Const, 0, ""}, + {"SIOCSIFNETMASK", Const, 0, ""}, + {"SIOCSIFPFLAGS", Const, 0, ""}, + {"SIOCSIFPHYADDR", Const, 0, ""}, + {"SIOCSIFPHYS", Const, 0, ""}, + {"SIOCSIFPRIORITY", Const, 1, ""}, + {"SIOCSIFRDOMAIN", Const, 1, ""}, + {"SIOCSIFRTLABEL", Const, 1, ""}, + {"SIOCSIFRVNET", Const, 0, ""}, + {"SIOCSIFSLAVE", Const, 0, ""}, + {"SIOCSIFTIMESLOT", Const, 1, ""}, + {"SIOCSIFTXQLEN", Const, 0, ""}, + {"SIOCSIFVLAN", Const, 0, ""}, + {"SIOCSIFVNET", Const, 0, ""}, + {"SIOCSIFXFLAGS", Const, 1, ""}, + {"SIOCSLIFPHYADDR", Const, 0, ""}, + {"SIOCSLIFPHYRTABLE", Const, 1, ""}, + {"SIOCSLIFPHYTTL", Const, 3, ""}, + {"SIOCSLINKSTR", Const, 1, ""}, + {"SIOCSLOWAT", Const, 0, ""}, + {"SIOCSPGRP", Const, 0, ""}, + {"SIOCSRARP", Const, 0, ""}, + {"SIOCSSPPPPARAMS", Const, 3, ""}, + {"SIOCSVH", Const, 1, ""}, + {"SIOCSVNETID", Const, 3, ""}, + {"SIOCZIFDATA", Const, 1, ""}, + {"SIO_GET_EXTENSION_FUNCTION_POINTER", Const, 1, ""}, + {"SIO_GET_INTERFACE_LIST", Const, 0, ""}, + {"SIO_KEEPALIVE_VALS", Const, 3, ""}, + {"SIO_UDP_CONNRESET", Const, 4, ""}, + {"SOCK_CLOEXEC", Const, 0, ""}, + {"SOCK_DCCP", Const, 0, ""}, + {"SOCK_DGRAM", Const, 0, ""}, + {"SOCK_FLAGS_MASK", Const, 1, ""}, + {"SOCK_MAXADDRLEN", Const, 0, ""}, + {"SOCK_NONBLOCK", Const, 0, ""}, + {"SOCK_NOSIGPIPE", Const, 1, ""}, + {"SOCK_PACKET", Const, 0, ""}, + {"SOCK_RAW", Const, 0, ""}, + {"SOCK_RDM", Const, 0, ""}, + {"SOCK_SEQPACKET", Const, 0, ""}, + {"SOCK_STREAM", Const, 0, ""}, + {"SOL_AAL", Const, 0, ""}, + {"SOL_ATM", Const, 0, ""}, + {"SOL_DECNET", Const, 0, ""}, + {"SOL_ICMPV6", Const, 0, ""}, + {"SOL_IP", Const, 0, ""}, + {"SOL_IPV6", Const, 0, ""}, + {"SOL_IRDA", Const, 0, ""}, + {"SOL_PACKET", Const, 0, ""}, + {"SOL_RAW", Const, 0, ""}, + {"SOL_SOCKET", Const, 0, ""}, + {"SOL_TCP", Const, 0, ""}, + {"SOL_X25", Const, 0, ""}, + {"SOMAXCONN", Const, 0, ""}, + {"SO_ACCEPTCONN", Const, 0, ""}, + {"SO_ACCEPTFILTER", Const, 0, ""}, + {"SO_ATTACH_FILTER", Const, 0, ""}, + {"SO_BINDANY", Const, 1, ""}, + {"SO_BINDTODEVICE", Const, 0, ""}, + {"SO_BINTIME", Const, 0, ""}, + {"SO_BROADCAST", Const, 0, ""}, + {"SO_BSDCOMPAT", Const, 0, ""}, + {"SO_DEBUG", Const, 0, ""}, + {"SO_DETACH_FILTER", Const, 0, ""}, + {"SO_DOMAIN", Const, 0, ""}, + {"SO_DONTROUTE", Const, 0, ""}, + {"SO_DONTTRUNC", Const, 0, ""}, + {"SO_ERROR", Const, 0, ""}, + {"SO_KEEPALIVE", Const, 0, ""}, + {"SO_LABEL", Const, 0, ""}, + {"SO_LINGER", Const, 0, ""}, + {"SO_LINGER_SEC", Const, 0, ""}, + {"SO_LISTENINCQLEN", Const, 0, ""}, + {"SO_LISTENQLEN", Const, 0, ""}, + {"SO_LISTENQLIMIT", Const, 0, ""}, + {"SO_MARK", Const, 0, ""}, + {"SO_NETPROC", Const, 1, ""}, + {"SO_NKE", Const, 0, ""}, + {"SO_NOADDRERR", Const, 0, ""}, + {"SO_NOHEADER", Const, 1, ""}, + {"SO_NOSIGPIPE", Const, 0, ""}, + {"SO_NOTIFYCONFLICT", Const, 0, ""}, + {"SO_NO_CHECK", Const, 0, ""}, + {"SO_NO_DDP", Const, 0, ""}, + {"SO_NO_OFFLOAD", Const, 0, ""}, + {"SO_NP_EXTENSIONS", Const, 0, ""}, + {"SO_NREAD", Const, 0, ""}, + {"SO_NUMRCVPKT", Const, 16, ""}, + {"SO_NWRITE", Const, 0, ""}, + {"SO_OOBINLINE", Const, 0, ""}, + {"SO_OVERFLOWED", Const, 1, ""}, + {"SO_PASSCRED", Const, 0, ""}, + {"SO_PASSSEC", Const, 0, ""}, + {"SO_PEERCRED", Const, 0, ""}, + {"SO_PEERLABEL", Const, 0, ""}, + {"SO_PEERNAME", Const, 0, ""}, + {"SO_PEERSEC", Const, 0, ""}, + {"SO_PRIORITY", Const, 0, ""}, + {"SO_PROTOCOL", Const, 0, ""}, + {"SO_PROTOTYPE", Const, 1, ""}, + {"SO_RANDOMPORT", Const, 0, ""}, + {"SO_RCVBUF", Const, 0, ""}, + {"SO_RCVBUFFORCE", Const, 0, ""}, + {"SO_RCVLOWAT", Const, 0, ""}, + {"SO_RCVTIMEO", Const, 0, ""}, + {"SO_RESTRICTIONS", Const, 0, ""}, + {"SO_RESTRICT_DENYIN", Const, 0, ""}, + {"SO_RESTRICT_DENYOUT", Const, 0, ""}, + {"SO_RESTRICT_DENYSET", Const, 0, ""}, + {"SO_REUSEADDR", Const, 0, ""}, + {"SO_REUSEPORT", Const, 0, ""}, + {"SO_REUSESHAREUID", Const, 0, ""}, + {"SO_RTABLE", Const, 1, ""}, + {"SO_RXQ_OVFL", Const, 0, ""}, + {"SO_SECURITY_AUTHENTICATION", Const, 0, ""}, + {"SO_SECURITY_ENCRYPTION_NETWORK", Const, 0, ""}, + {"SO_SECURITY_ENCRYPTION_TRANSPORT", Const, 0, ""}, + {"SO_SETFIB", Const, 0, ""}, + {"SO_SNDBUF", Const, 0, ""}, + {"SO_SNDBUFFORCE", Const, 0, ""}, + {"SO_SNDLOWAT", Const, 0, ""}, + {"SO_SNDTIMEO", Const, 0, ""}, + {"SO_SPLICE", Const, 1, ""}, + {"SO_TIMESTAMP", Const, 0, ""}, + {"SO_TIMESTAMPING", Const, 0, ""}, + {"SO_TIMESTAMPNS", Const, 0, ""}, + {"SO_TIMESTAMP_MONOTONIC", Const, 0, ""}, + {"SO_TYPE", Const, 0, ""}, + {"SO_UPCALLCLOSEWAIT", Const, 0, ""}, + {"SO_UPDATE_ACCEPT_CONTEXT", Const, 0, ""}, + {"SO_UPDATE_CONNECT_CONTEXT", Const, 1, ""}, + {"SO_USELOOPBACK", Const, 0, ""}, + {"SO_USER_COOKIE", Const, 1, ""}, + {"SO_VENDOR", Const, 3, ""}, + {"SO_WANTMORE", Const, 0, ""}, + {"SO_WANTOOBFLAG", Const, 0, ""}, + {"SSLExtraCertChainPolicyPara", Type, 0, ""}, + {"SSLExtraCertChainPolicyPara.AuthType", Field, 0, ""}, + {"SSLExtraCertChainPolicyPara.Checks", Field, 0, ""}, + {"SSLExtraCertChainPolicyPara.ServerName", Field, 0, ""}, + {"SSLExtraCertChainPolicyPara.Size", Field, 0, ""}, + {"STANDARD_RIGHTS_ALL", Const, 0, ""}, + {"STANDARD_RIGHTS_EXECUTE", Const, 0, ""}, + {"STANDARD_RIGHTS_READ", Const, 0, ""}, + {"STANDARD_RIGHTS_REQUIRED", Const, 0, ""}, + {"STANDARD_RIGHTS_WRITE", Const, 0, ""}, + {"STARTF_USESHOWWINDOW", Const, 0, ""}, + {"STARTF_USESTDHANDLES", Const, 0, ""}, + {"STD_ERROR_HANDLE", Const, 0, ""}, + {"STD_INPUT_HANDLE", Const, 0, ""}, + {"STD_OUTPUT_HANDLE", Const, 0, ""}, + {"SUBLANG_ENGLISH_US", Const, 0, ""}, + {"SW_FORCEMINIMIZE", Const, 0, ""}, + {"SW_HIDE", Const, 0, ""}, + {"SW_MAXIMIZE", Const, 0, ""}, + {"SW_MINIMIZE", Const, 0, ""}, + {"SW_NORMAL", Const, 0, ""}, + {"SW_RESTORE", Const, 0, ""}, + {"SW_SHOW", Const, 0, ""}, + {"SW_SHOWDEFAULT", Const, 0, ""}, + {"SW_SHOWMAXIMIZED", Const, 0, ""}, + {"SW_SHOWMINIMIZED", Const, 0, ""}, + {"SW_SHOWMINNOACTIVE", Const, 0, ""}, + {"SW_SHOWNA", Const, 0, ""}, + {"SW_SHOWNOACTIVATE", Const, 0, ""}, + {"SW_SHOWNORMAL", Const, 0, ""}, + {"SYMBOLIC_LINK_FLAG_DIRECTORY", Const, 4, ""}, + {"SYNCHRONIZE", Const, 0, ""}, + {"SYSCTL_VERSION", Const, 1, ""}, + {"SYSCTL_VERS_0", Const, 1, ""}, + {"SYSCTL_VERS_1", Const, 1, ""}, + {"SYSCTL_VERS_MASK", Const, 1, ""}, + {"SYS_ABORT2", Const, 0, ""}, + {"SYS_ACCEPT", Const, 0, ""}, + {"SYS_ACCEPT4", Const, 0, ""}, + {"SYS_ACCEPT_NOCANCEL", Const, 0, ""}, + {"SYS_ACCESS", Const, 0, ""}, + {"SYS_ACCESS_EXTENDED", Const, 0, ""}, + {"SYS_ACCT", Const, 0, ""}, + {"SYS_ADD_KEY", Const, 0, ""}, + {"SYS_ADD_PROFIL", Const, 0, ""}, + {"SYS_ADJFREQ", Const, 1, ""}, + {"SYS_ADJTIME", Const, 0, ""}, + {"SYS_ADJTIMEX", Const, 0, ""}, + {"SYS_AFS_SYSCALL", Const, 0, ""}, + {"SYS_AIO_CANCEL", Const, 0, ""}, + {"SYS_AIO_ERROR", Const, 0, ""}, + {"SYS_AIO_FSYNC", Const, 0, ""}, + {"SYS_AIO_MLOCK", Const, 14, ""}, + {"SYS_AIO_READ", Const, 0, ""}, + {"SYS_AIO_RETURN", Const, 0, ""}, + {"SYS_AIO_SUSPEND", Const, 0, ""}, + {"SYS_AIO_SUSPEND_NOCANCEL", Const, 0, ""}, + {"SYS_AIO_WAITCOMPLETE", Const, 14, ""}, + {"SYS_AIO_WRITE", Const, 0, ""}, + {"SYS_ALARM", Const, 0, ""}, + {"SYS_ARCH_PRCTL", Const, 0, ""}, + {"SYS_ARM_FADVISE64_64", Const, 0, ""}, + {"SYS_ARM_SYNC_FILE_RANGE", Const, 0, ""}, + {"SYS_ATGETMSG", Const, 0, ""}, + {"SYS_ATPGETREQ", Const, 0, ""}, + {"SYS_ATPGETRSP", Const, 0, ""}, + {"SYS_ATPSNDREQ", Const, 0, ""}, + {"SYS_ATPSNDRSP", Const, 0, ""}, + {"SYS_ATPUTMSG", Const, 0, ""}, + {"SYS_ATSOCKET", Const, 0, ""}, + {"SYS_AUDIT", Const, 0, ""}, + {"SYS_AUDITCTL", Const, 0, ""}, + {"SYS_AUDITON", Const, 0, ""}, + {"SYS_AUDIT_SESSION_JOIN", Const, 0, ""}, + {"SYS_AUDIT_SESSION_PORT", Const, 0, ""}, + {"SYS_AUDIT_SESSION_SELF", Const, 0, ""}, + {"SYS_BDFLUSH", Const, 0, ""}, + {"SYS_BIND", Const, 0, ""}, + {"SYS_BINDAT", Const, 3, ""}, + {"SYS_BREAK", Const, 0, ""}, + {"SYS_BRK", Const, 0, ""}, + {"SYS_BSDTHREAD_CREATE", Const, 0, ""}, + {"SYS_BSDTHREAD_REGISTER", Const, 0, ""}, + {"SYS_BSDTHREAD_TERMINATE", Const, 0, ""}, + {"SYS_CAPGET", Const, 0, ""}, + {"SYS_CAPSET", Const, 0, ""}, + {"SYS_CAP_ENTER", Const, 0, ""}, + {"SYS_CAP_FCNTLS_GET", Const, 1, ""}, + {"SYS_CAP_FCNTLS_LIMIT", Const, 1, ""}, + {"SYS_CAP_GETMODE", Const, 0, ""}, + {"SYS_CAP_GETRIGHTS", Const, 0, ""}, + {"SYS_CAP_IOCTLS_GET", Const, 1, ""}, + {"SYS_CAP_IOCTLS_LIMIT", Const, 1, ""}, + {"SYS_CAP_NEW", Const, 0, ""}, + {"SYS_CAP_RIGHTS_GET", Const, 1, ""}, + {"SYS_CAP_RIGHTS_LIMIT", Const, 1, ""}, + {"SYS_CHDIR", Const, 0, ""}, + {"SYS_CHFLAGS", Const, 0, ""}, + {"SYS_CHFLAGSAT", Const, 3, ""}, + {"SYS_CHMOD", Const, 0, ""}, + {"SYS_CHMOD_EXTENDED", Const, 0, ""}, + {"SYS_CHOWN", Const, 0, ""}, + {"SYS_CHOWN32", Const, 0, ""}, + {"SYS_CHROOT", Const, 0, ""}, + {"SYS_CHUD", Const, 0, ""}, + {"SYS_CLOCK_ADJTIME", Const, 0, ""}, + {"SYS_CLOCK_GETCPUCLOCKID2", Const, 1, ""}, + {"SYS_CLOCK_GETRES", Const, 0, ""}, + {"SYS_CLOCK_GETTIME", Const, 0, ""}, + {"SYS_CLOCK_NANOSLEEP", Const, 0, ""}, + {"SYS_CLOCK_SETTIME", Const, 0, ""}, + {"SYS_CLONE", Const, 0, ""}, + {"SYS_CLOSE", Const, 0, ""}, + {"SYS_CLOSEFROM", Const, 0, ""}, + {"SYS_CLOSE_NOCANCEL", Const, 0, ""}, + {"SYS_CONNECT", Const, 0, ""}, + {"SYS_CONNECTAT", Const, 3, ""}, + {"SYS_CONNECT_NOCANCEL", Const, 0, ""}, + {"SYS_COPYFILE", Const, 0, ""}, + {"SYS_CPUSET", Const, 0, ""}, + {"SYS_CPUSET_GETAFFINITY", Const, 0, ""}, + {"SYS_CPUSET_GETID", Const, 0, ""}, + {"SYS_CPUSET_SETAFFINITY", Const, 0, ""}, + {"SYS_CPUSET_SETID", Const, 0, ""}, + {"SYS_CREAT", Const, 0, ""}, + {"SYS_CREATE_MODULE", Const, 0, ""}, + {"SYS_CSOPS", Const, 0, ""}, + {"SYS_CSOPS_AUDITTOKEN", Const, 16, ""}, + {"SYS_DELETE", Const, 0, ""}, + {"SYS_DELETE_MODULE", Const, 0, ""}, + {"SYS_DUP", Const, 0, ""}, + {"SYS_DUP2", Const, 0, ""}, + {"SYS_DUP3", Const, 0, ""}, + {"SYS_EACCESS", Const, 0, ""}, + {"SYS_EPOLL_CREATE", Const, 0, ""}, + {"SYS_EPOLL_CREATE1", Const, 0, ""}, + {"SYS_EPOLL_CTL", Const, 0, ""}, + {"SYS_EPOLL_CTL_OLD", Const, 0, ""}, + {"SYS_EPOLL_PWAIT", Const, 0, ""}, + {"SYS_EPOLL_WAIT", Const, 0, ""}, + {"SYS_EPOLL_WAIT_OLD", Const, 0, ""}, + {"SYS_EVENTFD", Const, 0, ""}, + {"SYS_EVENTFD2", Const, 0, ""}, + {"SYS_EXCHANGEDATA", Const, 0, ""}, + {"SYS_EXECVE", Const, 0, ""}, + {"SYS_EXIT", Const, 0, ""}, + {"SYS_EXIT_GROUP", Const, 0, ""}, + {"SYS_EXTATTRCTL", Const, 0, ""}, + {"SYS_EXTATTR_DELETE_FD", Const, 0, ""}, + {"SYS_EXTATTR_DELETE_FILE", Const, 0, ""}, + {"SYS_EXTATTR_DELETE_LINK", Const, 0, ""}, + {"SYS_EXTATTR_GET_FD", Const, 0, ""}, + {"SYS_EXTATTR_GET_FILE", Const, 0, ""}, + {"SYS_EXTATTR_GET_LINK", Const, 0, ""}, + {"SYS_EXTATTR_LIST_FD", Const, 0, ""}, + {"SYS_EXTATTR_LIST_FILE", Const, 0, ""}, + {"SYS_EXTATTR_LIST_LINK", Const, 0, ""}, + {"SYS_EXTATTR_SET_FD", Const, 0, ""}, + {"SYS_EXTATTR_SET_FILE", Const, 0, ""}, + {"SYS_EXTATTR_SET_LINK", Const, 0, ""}, + {"SYS_FACCESSAT", Const, 0, ""}, + {"SYS_FADVISE64", Const, 0, ""}, + {"SYS_FADVISE64_64", Const, 0, ""}, + {"SYS_FALLOCATE", Const, 0, ""}, + {"SYS_FANOTIFY_INIT", Const, 0, ""}, + {"SYS_FANOTIFY_MARK", Const, 0, ""}, + {"SYS_FCHDIR", Const, 0, ""}, + {"SYS_FCHFLAGS", Const, 0, ""}, + {"SYS_FCHMOD", Const, 0, ""}, + {"SYS_FCHMODAT", Const, 0, ""}, + {"SYS_FCHMOD_EXTENDED", Const, 0, ""}, + {"SYS_FCHOWN", Const, 0, ""}, + {"SYS_FCHOWN32", Const, 0, ""}, + {"SYS_FCHOWNAT", Const, 0, ""}, + {"SYS_FCHROOT", Const, 1, ""}, + {"SYS_FCNTL", Const, 0, ""}, + {"SYS_FCNTL64", Const, 0, ""}, + {"SYS_FCNTL_NOCANCEL", Const, 0, ""}, + {"SYS_FDATASYNC", Const, 0, ""}, + {"SYS_FEXECVE", Const, 0, ""}, + {"SYS_FFCLOCK_GETCOUNTER", Const, 0, ""}, + {"SYS_FFCLOCK_GETESTIMATE", Const, 0, ""}, + {"SYS_FFCLOCK_SETESTIMATE", Const, 0, ""}, + {"SYS_FFSCTL", Const, 0, ""}, + {"SYS_FGETATTRLIST", Const, 0, ""}, + {"SYS_FGETXATTR", Const, 0, ""}, + {"SYS_FHOPEN", Const, 0, ""}, + {"SYS_FHSTAT", Const, 0, ""}, + {"SYS_FHSTATFS", Const, 0, ""}, + {"SYS_FILEPORT_MAKEFD", Const, 0, ""}, + {"SYS_FILEPORT_MAKEPORT", Const, 0, ""}, + {"SYS_FKTRACE", Const, 1, ""}, + {"SYS_FLISTXATTR", Const, 0, ""}, + {"SYS_FLOCK", Const, 0, ""}, + {"SYS_FORK", Const, 0, ""}, + {"SYS_FPATHCONF", Const, 0, ""}, + {"SYS_FREEBSD6_FTRUNCATE", Const, 0, ""}, + {"SYS_FREEBSD6_LSEEK", Const, 0, ""}, + {"SYS_FREEBSD6_MMAP", Const, 0, ""}, + {"SYS_FREEBSD6_PREAD", Const, 0, ""}, + {"SYS_FREEBSD6_PWRITE", Const, 0, ""}, + {"SYS_FREEBSD6_TRUNCATE", Const, 0, ""}, + {"SYS_FREMOVEXATTR", Const, 0, ""}, + {"SYS_FSCTL", Const, 0, ""}, + {"SYS_FSETATTRLIST", Const, 0, ""}, + {"SYS_FSETXATTR", Const, 0, ""}, + {"SYS_FSGETPATH", Const, 0, ""}, + {"SYS_FSTAT", Const, 0, ""}, + {"SYS_FSTAT64", Const, 0, ""}, + {"SYS_FSTAT64_EXTENDED", Const, 0, ""}, + {"SYS_FSTATAT", Const, 0, ""}, + {"SYS_FSTATAT64", Const, 0, ""}, + {"SYS_FSTATFS", Const, 0, ""}, + {"SYS_FSTATFS64", Const, 0, ""}, + {"SYS_FSTATV", Const, 0, ""}, + {"SYS_FSTATVFS1", Const, 1, ""}, + {"SYS_FSTAT_EXTENDED", Const, 0, ""}, + {"SYS_FSYNC", Const, 0, ""}, + {"SYS_FSYNC_NOCANCEL", Const, 0, ""}, + {"SYS_FSYNC_RANGE", Const, 1, ""}, + {"SYS_FTIME", Const, 0, ""}, + {"SYS_FTRUNCATE", Const, 0, ""}, + {"SYS_FTRUNCATE64", Const, 0, ""}, + {"SYS_FUTEX", Const, 0, ""}, + {"SYS_FUTIMENS", Const, 1, ""}, + {"SYS_FUTIMES", Const, 0, ""}, + {"SYS_FUTIMESAT", Const, 0, ""}, + {"SYS_GETATTRLIST", Const, 0, ""}, + {"SYS_GETAUDIT", Const, 0, ""}, + {"SYS_GETAUDIT_ADDR", Const, 0, ""}, + {"SYS_GETAUID", Const, 0, ""}, + {"SYS_GETCONTEXT", Const, 0, ""}, + {"SYS_GETCPU", Const, 0, ""}, + {"SYS_GETCWD", Const, 0, ""}, + {"SYS_GETDENTS", Const, 0, ""}, + {"SYS_GETDENTS64", Const, 0, ""}, + {"SYS_GETDIRENTRIES", Const, 0, ""}, + {"SYS_GETDIRENTRIES64", Const, 0, ""}, + {"SYS_GETDIRENTRIESATTR", Const, 0, ""}, + {"SYS_GETDTABLECOUNT", Const, 1, ""}, + {"SYS_GETDTABLESIZE", Const, 0, ""}, + {"SYS_GETEGID", Const, 0, ""}, + {"SYS_GETEGID32", Const, 0, ""}, + {"SYS_GETEUID", Const, 0, ""}, + {"SYS_GETEUID32", Const, 0, ""}, + {"SYS_GETFH", Const, 0, ""}, + {"SYS_GETFSSTAT", Const, 0, ""}, + {"SYS_GETFSSTAT64", Const, 0, ""}, + {"SYS_GETGID", Const, 0, ""}, + {"SYS_GETGID32", Const, 0, ""}, + {"SYS_GETGROUPS", Const, 0, ""}, + {"SYS_GETGROUPS32", Const, 0, ""}, + {"SYS_GETHOSTUUID", Const, 0, ""}, + {"SYS_GETITIMER", Const, 0, ""}, + {"SYS_GETLCID", Const, 0, ""}, + {"SYS_GETLOGIN", Const, 0, ""}, + {"SYS_GETLOGINCLASS", Const, 0, ""}, + {"SYS_GETPEERNAME", Const, 0, ""}, + {"SYS_GETPGID", Const, 0, ""}, + {"SYS_GETPGRP", Const, 0, ""}, + {"SYS_GETPID", Const, 0, ""}, + {"SYS_GETPMSG", Const, 0, ""}, + {"SYS_GETPPID", Const, 0, ""}, + {"SYS_GETPRIORITY", Const, 0, ""}, + {"SYS_GETRESGID", Const, 0, ""}, + {"SYS_GETRESGID32", Const, 0, ""}, + {"SYS_GETRESUID", Const, 0, ""}, + {"SYS_GETRESUID32", Const, 0, ""}, + {"SYS_GETRLIMIT", Const, 0, ""}, + {"SYS_GETRTABLE", Const, 1, ""}, + {"SYS_GETRUSAGE", Const, 0, ""}, + {"SYS_GETSGROUPS", Const, 0, ""}, + {"SYS_GETSID", Const, 0, ""}, + {"SYS_GETSOCKNAME", Const, 0, ""}, + {"SYS_GETSOCKOPT", Const, 0, ""}, + {"SYS_GETTHRID", Const, 1, ""}, + {"SYS_GETTID", Const, 0, ""}, + {"SYS_GETTIMEOFDAY", Const, 0, ""}, + {"SYS_GETUID", Const, 0, ""}, + {"SYS_GETUID32", Const, 0, ""}, + {"SYS_GETVFSSTAT", Const, 1, ""}, + {"SYS_GETWGROUPS", Const, 0, ""}, + {"SYS_GETXATTR", Const, 0, ""}, + {"SYS_GET_KERNEL_SYMS", Const, 0, ""}, + {"SYS_GET_MEMPOLICY", Const, 0, ""}, + {"SYS_GET_ROBUST_LIST", Const, 0, ""}, + {"SYS_GET_THREAD_AREA", Const, 0, ""}, + {"SYS_GSSD_SYSCALL", Const, 14, ""}, + {"SYS_GTTY", Const, 0, ""}, + {"SYS_IDENTITYSVC", Const, 0, ""}, + {"SYS_IDLE", Const, 0, ""}, + {"SYS_INITGROUPS", Const, 0, ""}, + {"SYS_INIT_MODULE", Const, 0, ""}, + {"SYS_INOTIFY_ADD_WATCH", Const, 0, ""}, + {"SYS_INOTIFY_INIT", Const, 0, ""}, + {"SYS_INOTIFY_INIT1", Const, 0, ""}, + {"SYS_INOTIFY_RM_WATCH", Const, 0, ""}, + {"SYS_IOCTL", Const, 0, ""}, + {"SYS_IOPERM", Const, 0, ""}, + {"SYS_IOPL", Const, 0, ""}, + {"SYS_IOPOLICYSYS", Const, 0, ""}, + {"SYS_IOPRIO_GET", Const, 0, ""}, + {"SYS_IOPRIO_SET", Const, 0, ""}, + {"SYS_IO_CANCEL", Const, 0, ""}, + {"SYS_IO_DESTROY", Const, 0, ""}, + {"SYS_IO_GETEVENTS", Const, 0, ""}, + {"SYS_IO_SETUP", Const, 0, ""}, + {"SYS_IO_SUBMIT", Const, 0, ""}, + {"SYS_IPC", Const, 0, ""}, + {"SYS_ISSETUGID", Const, 0, ""}, + {"SYS_JAIL", Const, 0, ""}, + {"SYS_JAIL_ATTACH", Const, 0, ""}, + {"SYS_JAIL_GET", Const, 0, ""}, + {"SYS_JAIL_REMOVE", Const, 0, ""}, + {"SYS_JAIL_SET", Const, 0, ""}, + {"SYS_KAS_INFO", Const, 16, ""}, + {"SYS_KDEBUG_TRACE", Const, 0, ""}, + {"SYS_KENV", Const, 0, ""}, + {"SYS_KEVENT", Const, 0, ""}, + {"SYS_KEVENT64", Const, 0, ""}, + {"SYS_KEXEC_LOAD", Const, 0, ""}, + {"SYS_KEYCTL", Const, 0, ""}, + {"SYS_KILL", Const, 0, ""}, + {"SYS_KLDFIND", Const, 0, ""}, + {"SYS_KLDFIRSTMOD", Const, 0, ""}, + {"SYS_KLDLOAD", Const, 0, ""}, + {"SYS_KLDNEXT", Const, 0, ""}, + {"SYS_KLDSTAT", Const, 0, ""}, + {"SYS_KLDSYM", Const, 0, ""}, + {"SYS_KLDUNLOAD", Const, 0, ""}, + {"SYS_KLDUNLOADF", Const, 0, ""}, + {"SYS_KMQ_NOTIFY", Const, 14, ""}, + {"SYS_KMQ_OPEN", Const, 14, ""}, + {"SYS_KMQ_SETATTR", Const, 14, ""}, + {"SYS_KMQ_TIMEDRECEIVE", Const, 14, ""}, + {"SYS_KMQ_TIMEDSEND", Const, 14, ""}, + {"SYS_KMQ_UNLINK", Const, 14, ""}, + {"SYS_KQUEUE", Const, 0, ""}, + {"SYS_KQUEUE1", Const, 1, ""}, + {"SYS_KSEM_CLOSE", Const, 14, ""}, + {"SYS_KSEM_DESTROY", Const, 14, ""}, + {"SYS_KSEM_GETVALUE", Const, 14, ""}, + {"SYS_KSEM_INIT", Const, 14, ""}, + {"SYS_KSEM_OPEN", Const, 14, ""}, + {"SYS_KSEM_POST", Const, 14, ""}, + {"SYS_KSEM_TIMEDWAIT", Const, 14, ""}, + {"SYS_KSEM_TRYWAIT", Const, 14, ""}, + {"SYS_KSEM_UNLINK", Const, 14, ""}, + {"SYS_KSEM_WAIT", Const, 14, ""}, + {"SYS_KTIMER_CREATE", Const, 0, ""}, + {"SYS_KTIMER_DELETE", Const, 0, ""}, + {"SYS_KTIMER_GETOVERRUN", Const, 0, ""}, + {"SYS_KTIMER_GETTIME", Const, 0, ""}, + {"SYS_KTIMER_SETTIME", Const, 0, ""}, + {"SYS_KTRACE", Const, 0, ""}, + {"SYS_LCHFLAGS", Const, 0, ""}, + {"SYS_LCHMOD", Const, 0, ""}, + {"SYS_LCHOWN", Const, 0, ""}, + {"SYS_LCHOWN32", Const, 0, ""}, + {"SYS_LEDGER", Const, 16, ""}, + {"SYS_LGETFH", Const, 0, ""}, + {"SYS_LGETXATTR", Const, 0, ""}, + {"SYS_LINK", Const, 0, ""}, + {"SYS_LINKAT", Const, 0, ""}, + {"SYS_LIO_LISTIO", Const, 0, ""}, + {"SYS_LISTEN", Const, 0, ""}, + {"SYS_LISTXATTR", Const, 0, ""}, + {"SYS_LLISTXATTR", Const, 0, ""}, + {"SYS_LOCK", Const, 0, ""}, + {"SYS_LOOKUP_DCOOKIE", Const, 0, ""}, + {"SYS_LPATHCONF", Const, 0, ""}, + {"SYS_LREMOVEXATTR", Const, 0, ""}, + {"SYS_LSEEK", Const, 0, ""}, + {"SYS_LSETXATTR", Const, 0, ""}, + {"SYS_LSTAT", Const, 0, ""}, + {"SYS_LSTAT64", Const, 0, ""}, + {"SYS_LSTAT64_EXTENDED", Const, 0, ""}, + {"SYS_LSTATV", Const, 0, ""}, + {"SYS_LSTAT_EXTENDED", Const, 0, ""}, + {"SYS_LUTIMES", Const, 0, ""}, + {"SYS_MAC_SYSCALL", Const, 0, ""}, + {"SYS_MADVISE", Const, 0, ""}, + {"SYS_MADVISE1", Const, 0, ""}, + {"SYS_MAXSYSCALL", Const, 0, ""}, + {"SYS_MBIND", Const, 0, ""}, + {"SYS_MIGRATE_PAGES", Const, 0, ""}, + {"SYS_MINCORE", Const, 0, ""}, + {"SYS_MINHERIT", Const, 0, ""}, + {"SYS_MKCOMPLEX", Const, 0, ""}, + {"SYS_MKDIR", Const, 0, ""}, + {"SYS_MKDIRAT", Const, 0, ""}, + {"SYS_MKDIR_EXTENDED", Const, 0, ""}, + {"SYS_MKFIFO", Const, 0, ""}, + {"SYS_MKFIFOAT", Const, 0, ""}, + {"SYS_MKFIFO_EXTENDED", Const, 0, ""}, + {"SYS_MKNOD", Const, 0, ""}, + {"SYS_MKNODAT", Const, 0, ""}, + {"SYS_MLOCK", Const, 0, ""}, + {"SYS_MLOCKALL", Const, 0, ""}, + {"SYS_MMAP", Const, 0, ""}, + {"SYS_MMAP2", Const, 0, ""}, + {"SYS_MODCTL", Const, 1, ""}, + {"SYS_MODFIND", Const, 0, ""}, + {"SYS_MODFNEXT", Const, 0, ""}, + {"SYS_MODIFY_LDT", Const, 0, ""}, + {"SYS_MODNEXT", Const, 0, ""}, + {"SYS_MODSTAT", Const, 0, ""}, + {"SYS_MODWATCH", Const, 0, ""}, + {"SYS_MOUNT", Const, 0, ""}, + {"SYS_MOVE_PAGES", Const, 0, ""}, + {"SYS_MPROTECT", Const, 0, ""}, + {"SYS_MPX", Const, 0, ""}, + {"SYS_MQUERY", Const, 1, ""}, + {"SYS_MQ_GETSETATTR", Const, 0, ""}, + {"SYS_MQ_NOTIFY", Const, 0, ""}, + {"SYS_MQ_OPEN", Const, 0, ""}, + {"SYS_MQ_TIMEDRECEIVE", Const, 0, ""}, + {"SYS_MQ_TIMEDSEND", Const, 0, ""}, + {"SYS_MQ_UNLINK", Const, 0, ""}, + {"SYS_MREMAP", Const, 0, ""}, + {"SYS_MSGCTL", Const, 0, ""}, + {"SYS_MSGGET", Const, 0, ""}, + {"SYS_MSGRCV", Const, 0, ""}, + {"SYS_MSGRCV_NOCANCEL", Const, 0, ""}, + {"SYS_MSGSND", Const, 0, ""}, + {"SYS_MSGSND_NOCANCEL", Const, 0, ""}, + {"SYS_MSGSYS", Const, 0, ""}, + {"SYS_MSYNC", Const, 0, ""}, + {"SYS_MSYNC_NOCANCEL", Const, 0, ""}, + {"SYS_MUNLOCK", Const, 0, ""}, + {"SYS_MUNLOCKALL", Const, 0, ""}, + {"SYS_MUNMAP", Const, 0, ""}, + {"SYS_NAME_TO_HANDLE_AT", Const, 0, ""}, + {"SYS_NANOSLEEP", Const, 0, ""}, + {"SYS_NEWFSTATAT", Const, 0, ""}, + {"SYS_NFSCLNT", Const, 0, ""}, + {"SYS_NFSSERVCTL", Const, 0, ""}, + {"SYS_NFSSVC", Const, 0, ""}, + {"SYS_NFSTAT", Const, 0, ""}, + {"SYS_NICE", Const, 0, ""}, + {"SYS_NLM_SYSCALL", Const, 14, ""}, + {"SYS_NLSTAT", Const, 0, ""}, + {"SYS_NMOUNT", Const, 0, ""}, + {"SYS_NSTAT", Const, 0, ""}, + {"SYS_NTP_ADJTIME", Const, 0, ""}, + {"SYS_NTP_GETTIME", Const, 0, ""}, + {"SYS_NUMA_GETAFFINITY", Const, 14, ""}, + {"SYS_NUMA_SETAFFINITY", Const, 14, ""}, + {"SYS_OABI_SYSCALL_BASE", Const, 0, ""}, + {"SYS_OBREAK", Const, 0, ""}, + {"SYS_OLDFSTAT", Const, 0, ""}, + {"SYS_OLDLSTAT", Const, 0, ""}, + {"SYS_OLDOLDUNAME", Const, 0, ""}, + {"SYS_OLDSTAT", Const, 0, ""}, + {"SYS_OLDUNAME", Const, 0, ""}, + {"SYS_OPEN", Const, 0, ""}, + {"SYS_OPENAT", Const, 0, ""}, + {"SYS_OPENBSD_POLL", Const, 0, ""}, + {"SYS_OPEN_BY_HANDLE_AT", Const, 0, ""}, + {"SYS_OPEN_DPROTECTED_NP", Const, 16, ""}, + {"SYS_OPEN_EXTENDED", Const, 0, ""}, + {"SYS_OPEN_NOCANCEL", Const, 0, ""}, + {"SYS_OVADVISE", Const, 0, ""}, + {"SYS_PACCEPT", Const, 1, ""}, + {"SYS_PATHCONF", Const, 0, ""}, + {"SYS_PAUSE", Const, 0, ""}, + {"SYS_PCICONFIG_IOBASE", Const, 0, ""}, + {"SYS_PCICONFIG_READ", Const, 0, ""}, + {"SYS_PCICONFIG_WRITE", Const, 0, ""}, + {"SYS_PDFORK", Const, 0, ""}, + {"SYS_PDGETPID", Const, 0, ""}, + {"SYS_PDKILL", Const, 0, ""}, + {"SYS_PERF_EVENT_OPEN", Const, 0, ""}, + {"SYS_PERSONALITY", Const, 0, ""}, + {"SYS_PID_HIBERNATE", Const, 0, ""}, + {"SYS_PID_RESUME", Const, 0, ""}, + {"SYS_PID_SHUTDOWN_SOCKETS", Const, 0, ""}, + {"SYS_PID_SUSPEND", Const, 0, ""}, + {"SYS_PIPE", Const, 0, ""}, + {"SYS_PIPE2", Const, 0, ""}, + {"SYS_PIVOT_ROOT", Const, 0, ""}, + {"SYS_PMC_CONTROL", Const, 1, ""}, + {"SYS_PMC_GET_INFO", Const, 1, ""}, + {"SYS_POLL", Const, 0, ""}, + {"SYS_POLLTS", Const, 1, ""}, + {"SYS_POLL_NOCANCEL", Const, 0, ""}, + {"SYS_POSIX_FADVISE", Const, 0, ""}, + {"SYS_POSIX_FALLOCATE", Const, 0, ""}, + {"SYS_POSIX_OPENPT", Const, 0, ""}, + {"SYS_POSIX_SPAWN", Const, 0, ""}, + {"SYS_PPOLL", Const, 0, ""}, + {"SYS_PRCTL", Const, 0, ""}, + {"SYS_PREAD", Const, 0, ""}, + {"SYS_PREAD64", Const, 0, ""}, + {"SYS_PREADV", Const, 0, ""}, + {"SYS_PREAD_NOCANCEL", Const, 0, ""}, + {"SYS_PRLIMIT64", Const, 0, ""}, + {"SYS_PROCCTL", Const, 3, ""}, + {"SYS_PROCESS_POLICY", Const, 0, ""}, + {"SYS_PROCESS_VM_READV", Const, 0, ""}, + {"SYS_PROCESS_VM_WRITEV", Const, 0, ""}, + {"SYS_PROC_INFO", Const, 0, ""}, + {"SYS_PROF", Const, 0, ""}, + {"SYS_PROFIL", Const, 0, ""}, + {"SYS_PSELECT", Const, 0, ""}, + {"SYS_PSELECT6", Const, 0, ""}, + {"SYS_PSET_ASSIGN", Const, 1, ""}, + {"SYS_PSET_CREATE", Const, 1, ""}, + {"SYS_PSET_DESTROY", Const, 1, ""}, + {"SYS_PSYNCH_CVBROAD", Const, 0, ""}, + {"SYS_PSYNCH_CVCLRPREPOST", Const, 0, ""}, + {"SYS_PSYNCH_CVSIGNAL", Const, 0, ""}, + {"SYS_PSYNCH_CVWAIT", Const, 0, ""}, + {"SYS_PSYNCH_MUTEXDROP", Const, 0, ""}, + {"SYS_PSYNCH_MUTEXWAIT", Const, 0, ""}, + {"SYS_PSYNCH_RW_DOWNGRADE", Const, 0, ""}, + {"SYS_PSYNCH_RW_LONGRDLOCK", Const, 0, ""}, + {"SYS_PSYNCH_RW_RDLOCK", Const, 0, ""}, + {"SYS_PSYNCH_RW_UNLOCK", Const, 0, ""}, + {"SYS_PSYNCH_RW_UNLOCK2", Const, 0, ""}, + {"SYS_PSYNCH_RW_UPGRADE", Const, 0, ""}, + {"SYS_PSYNCH_RW_WRLOCK", Const, 0, ""}, + {"SYS_PSYNCH_RW_YIELDWRLOCK", Const, 0, ""}, + {"SYS_PTRACE", Const, 0, ""}, + {"SYS_PUTPMSG", Const, 0, ""}, + {"SYS_PWRITE", Const, 0, ""}, + {"SYS_PWRITE64", Const, 0, ""}, + {"SYS_PWRITEV", Const, 0, ""}, + {"SYS_PWRITE_NOCANCEL", Const, 0, ""}, + {"SYS_QUERY_MODULE", Const, 0, ""}, + {"SYS_QUOTACTL", Const, 0, ""}, + {"SYS_RASCTL", Const, 1, ""}, + {"SYS_RCTL_ADD_RULE", Const, 0, ""}, + {"SYS_RCTL_GET_LIMITS", Const, 0, ""}, + {"SYS_RCTL_GET_RACCT", Const, 0, ""}, + {"SYS_RCTL_GET_RULES", Const, 0, ""}, + {"SYS_RCTL_REMOVE_RULE", Const, 0, ""}, + {"SYS_READ", Const, 0, ""}, + {"SYS_READAHEAD", Const, 0, ""}, + {"SYS_READDIR", Const, 0, ""}, + {"SYS_READLINK", Const, 0, ""}, + {"SYS_READLINKAT", Const, 0, ""}, + {"SYS_READV", Const, 0, ""}, + {"SYS_READV_NOCANCEL", Const, 0, ""}, + {"SYS_READ_NOCANCEL", Const, 0, ""}, + {"SYS_REBOOT", Const, 0, ""}, + {"SYS_RECV", Const, 0, ""}, + {"SYS_RECVFROM", Const, 0, ""}, + {"SYS_RECVFROM_NOCANCEL", Const, 0, ""}, + {"SYS_RECVMMSG", Const, 0, ""}, + {"SYS_RECVMSG", Const, 0, ""}, + {"SYS_RECVMSG_NOCANCEL", Const, 0, ""}, + {"SYS_REMAP_FILE_PAGES", Const, 0, ""}, + {"SYS_REMOVEXATTR", Const, 0, ""}, + {"SYS_RENAME", Const, 0, ""}, + {"SYS_RENAMEAT", Const, 0, ""}, + {"SYS_REQUEST_KEY", Const, 0, ""}, + {"SYS_RESTART_SYSCALL", Const, 0, ""}, + {"SYS_REVOKE", Const, 0, ""}, + {"SYS_RFORK", Const, 0, ""}, + {"SYS_RMDIR", Const, 0, ""}, + {"SYS_RTPRIO", Const, 0, ""}, + {"SYS_RTPRIO_THREAD", Const, 0, ""}, + {"SYS_RT_SIGACTION", Const, 0, ""}, + {"SYS_RT_SIGPENDING", Const, 0, ""}, + {"SYS_RT_SIGPROCMASK", Const, 0, ""}, + {"SYS_RT_SIGQUEUEINFO", Const, 0, ""}, + {"SYS_RT_SIGRETURN", Const, 0, ""}, + {"SYS_RT_SIGSUSPEND", Const, 0, ""}, + {"SYS_RT_SIGTIMEDWAIT", Const, 0, ""}, + {"SYS_RT_TGSIGQUEUEINFO", Const, 0, ""}, + {"SYS_SBRK", Const, 0, ""}, + {"SYS_SCHED_GETAFFINITY", Const, 0, ""}, + {"SYS_SCHED_GETPARAM", Const, 0, ""}, + {"SYS_SCHED_GETSCHEDULER", Const, 0, ""}, + {"SYS_SCHED_GET_PRIORITY_MAX", Const, 0, ""}, + {"SYS_SCHED_GET_PRIORITY_MIN", Const, 0, ""}, + {"SYS_SCHED_RR_GET_INTERVAL", Const, 0, ""}, + {"SYS_SCHED_SETAFFINITY", Const, 0, ""}, + {"SYS_SCHED_SETPARAM", Const, 0, ""}, + {"SYS_SCHED_SETSCHEDULER", Const, 0, ""}, + {"SYS_SCHED_YIELD", Const, 0, ""}, + {"SYS_SCTP_GENERIC_RECVMSG", Const, 0, ""}, + {"SYS_SCTP_GENERIC_SENDMSG", Const, 0, ""}, + {"SYS_SCTP_GENERIC_SENDMSG_IOV", Const, 0, ""}, + {"SYS_SCTP_PEELOFF", Const, 0, ""}, + {"SYS_SEARCHFS", Const, 0, ""}, + {"SYS_SECURITY", Const, 0, ""}, + {"SYS_SELECT", Const, 0, ""}, + {"SYS_SELECT_NOCANCEL", Const, 0, ""}, + {"SYS_SEMCONFIG", Const, 1, ""}, + {"SYS_SEMCTL", Const, 0, ""}, + {"SYS_SEMGET", Const, 0, ""}, + {"SYS_SEMOP", Const, 0, ""}, + {"SYS_SEMSYS", Const, 0, ""}, + {"SYS_SEMTIMEDOP", Const, 0, ""}, + {"SYS_SEM_CLOSE", Const, 0, ""}, + {"SYS_SEM_DESTROY", Const, 0, ""}, + {"SYS_SEM_GETVALUE", Const, 0, ""}, + {"SYS_SEM_INIT", Const, 0, ""}, + {"SYS_SEM_OPEN", Const, 0, ""}, + {"SYS_SEM_POST", Const, 0, ""}, + {"SYS_SEM_TRYWAIT", Const, 0, ""}, + {"SYS_SEM_UNLINK", Const, 0, ""}, + {"SYS_SEM_WAIT", Const, 0, ""}, + {"SYS_SEM_WAIT_NOCANCEL", Const, 0, ""}, + {"SYS_SEND", Const, 0, ""}, + {"SYS_SENDFILE", Const, 0, ""}, + {"SYS_SENDFILE64", Const, 0, ""}, + {"SYS_SENDMMSG", Const, 0, ""}, + {"SYS_SENDMSG", Const, 0, ""}, + {"SYS_SENDMSG_NOCANCEL", Const, 0, ""}, + {"SYS_SENDTO", Const, 0, ""}, + {"SYS_SENDTO_NOCANCEL", Const, 0, ""}, + {"SYS_SETATTRLIST", Const, 0, ""}, + {"SYS_SETAUDIT", Const, 0, ""}, + {"SYS_SETAUDIT_ADDR", Const, 0, ""}, + {"SYS_SETAUID", Const, 0, ""}, + {"SYS_SETCONTEXT", Const, 0, ""}, + {"SYS_SETDOMAINNAME", Const, 0, ""}, + {"SYS_SETEGID", Const, 0, ""}, + {"SYS_SETEUID", Const, 0, ""}, + {"SYS_SETFIB", Const, 0, ""}, + {"SYS_SETFSGID", Const, 0, ""}, + {"SYS_SETFSGID32", Const, 0, ""}, + {"SYS_SETFSUID", Const, 0, ""}, + {"SYS_SETFSUID32", Const, 0, ""}, + {"SYS_SETGID", Const, 0, ""}, + {"SYS_SETGID32", Const, 0, ""}, + {"SYS_SETGROUPS", Const, 0, ""}, + {"SYS_SETGROUPS32", Const, 0, ""}, + {"SYS_SETHOSTNAME", Const, 0, ""}, + {"SYS_SETITIMER", Const, 0, ""}, + {"SYS_SETLCID", Const, 0, ""}, + {"SYS_SETLOGIN", Const, 0, ""}, + {"SYS_SETLOGINCLASS", Const, 0, ""}, + {"SYS_SETNS", Const, 0, ""}, + {"SYS_SETPGID", Const, 0, ""}, + {"SYS_SETPRIORITY", Const, 0, ""}, + {"SYS_SETPRIVEXEC", Const, 0, ""}, + {"SYS_SETREGID", Const, 0, ""}, + {"SYS_SETREGID32", Const, 0, ""}, + {"SYS_SETRESGID", Const, 0, ""}, + {"SYS_SETRESGID32", Const, 0, ""}, + {"SYS_SETRESUID", Const, 0, ""}, + {"SYS_SETRESUID32", Const, 0, ""}, + {"SYS_SETREUID", Const, 0, ""}, + {"SYS_SETREUID32", Const, 0, ""}, + {"SYS_SETRLIMIT", Const, 0, ""}, + {"SYS_SETRTABLE", Const, 1, ""}, + {"SYS_SETSGROUPS", Const, 0, ""}, + {"SYS_SETSID", Const, 0, ""}, + {"SYS_SETSOCKOPT", Const, 0, ""}, + {"SYS_SETTID", Const, 0, ""}, + {"SYS_SETTID_WITH_PID", Const, 0, ""}, + {"SYS_SETTIMEOFDAY", Const, 0, ""}, + {"SYS_SETUID", Const, 0, ""}, + {"SYS_SETUID32", Const, 0, ""}, + {"SYS_SETWGROUPS", Const, 0, ""}, + {"SYS_SETXATTR", Const, 0, ""}, + {"SYS_SET_MEMPOLICY", Const, 0, ""}, + {"SYS_SET_ROBUST_LIST", Const, 0, ""}, + {"SYS_SET_THREAD_AREA", Const, 0, ""}, + {"SYS_SET_TID_ADDRESS", Const, 0, ""}, + {"SYS_SGETMASK", Const, 0, ""}, + {"SYS_SHARED_REGION_CHECK_NP", Const, 0, ""}, + {"SYS_SHARED_REGION_MAP_AND_SLIDE_NP", Const, 0, ""}, + {"SYS_SHMAT", Const, 0, ""}, + {"SYS_SHMCTL", Const, 0, ""}, + {"SYS_SHMDT", Const, 0, ""}, + {"SYS_SHMGET", Const, 0, ""}, + {"SYS_SHMSYS", Const, 0, ""}, + {"SYS_SHM_OPEN", Const, 0, ""}, + {"SYS_SHM_UNLINK", Const, 0, ""}, + {"SYS_SHUTDOWN", Const, 0, ""}, + {"SYS_SIGACTION", Const, 0, ""}, + {"SYS_SIGALTSTACK", Const, 0, ""}, + {"SYS_SIGNAL", Const, 0, ""}, + {"SYS_SIGNALFD", Const, 0, ""}, + {"SYS_SIGNALFD4", Const, 0, ""}, + {"SYS_SIGPENDING", Const, 0, ""}, + {"SYS_SIGPROCMASK", Const, 0, ""}, + {"SYS_SIGQUEUE", Const, 0, ""}, + {"SYS_SIGQUEUEINFO", Const, 1, ""}, + {"SYS_SIGRETURN", Const, 0, ""}, + {"SYS_SIGSUSPEND", Const, 0, ""}, + {"SYS_SIGSUSPEND_NOCANCEL", Const, 0, ""}, + {"SYS_SIGTIMEDWAIT", Const, 0, ""}, + {"SYS_SIGWAIT", Const, 0, ""}, + {"SYS_SIGWAITINFO", Const, 0, ""}, + {"SYS_SOCKET", Const, 0, ""}, + {"SYS_SOCKETCALL", Const, 0, ""}, + {"SYS_SOCKETPAIR", Const, 0, ""}, + {"SYS_SPLICE", Const, 0, ""}, + {"SYS_SSETMASK", Const, 0, ""}, + {"SYS_SSTK", Const, 0, ""}, + {"SYS_STACK_SNAPSHOT", Const, 0, ""}, + {"SYS_STAT", Const, 0, ""}, + {"SYS_STAT64", Const, 0, ""}, + {"SYS_STAT64_EXTENDED", Const, 0, ""}, + {"SYS_STATFS", Const, 0, ""}, + {"SYS_STATFS64", Const, 0, ""}, + {"SYS_STATV", Const, 0, ""}, + {"SYS_STATVFS1", Const, 1, ""}, + {"SYS_STAT_EXTENDED", Const, 0, ""}, + {"SYS_STIME", Const, 0, ""}, + {"SYS_STTY", Const, 0, ""}, + {"SYS_SWAPCONTEXT", Const, 0, ""}, + {"SYS_SWAPCTL", Const, 1, ""}, + {"SYS_SWAPOFF", Const, 0, ""}, + {"SYS_SWAPON", Const, 0, ""}, + {"SYS_SYMLINK", Const, 0, ""}, + {"SYS_SYMLINKAT", Const, 0, ""}, + {"SYS_SYNC", Const, 0, ""}, + {"SYS_SYNCFS", Const, 0, ""}, + {"SYS_SYNC_FILE_RANGE", Const, 0, ""}, + {"SYS_SYSARCH", Const, 0, ""}, + {"SYS_SYSCALL", Const, 0, ""}, + {"SYS_SYSCALL_BASE", Const, 0, ""}, + {"SYS_SYSFS", Const, 0, ""}, + {"SYS_SYSINFO", Const, 0, ""}, + {"SYS_SYSLOG", Const, 0, ""}, + {"SYS_TEE", Const, 0, ""}, + {"SYS_TGKILL", Const, 0, ""}, + {"SYS_THREAD_SELFID", Const, 0, ""}, + {"SYS_THR_CREATE", Const, 0, ""}, + {"SYS_THR_EXIT", Const, 0, ""}, + {"SYS_THR_KILL", Const, 0, ""}, + {"SYS_THR_KILL2", Const, 0, ""}, + {"SYS_THR_NEW", Const, 0, ""}, + {"SYS_THR_SELF", Const, 0, ""}, + {"SYS_THR_SET_NAME", Const, 0, ""}, + {"SYS_THR_SUSPEND", Const, 0, ""}, + {"SYS_THR_WAKE", Const, 0, ""}, + {"SYS_TIME", Const, 0, ""}, + {"SYS_TIMERFD_CREATE", Const, 0, ""}, + {"SYS_TIMERFD_GETTIME", Const, 0, ""}, + {"SYS_TIMERFD_SETTIME", Const, 0, ""}, + {"SYS_TIMER_CREATE", Const, 0, ""}, + {"SYS_TIMER_DELETE", Const, 0, ""}, + {"SYS_TIMER_GETOVERRUN", Const, 0, ""}, + {"SYS_TIMER_GETTIME", Const, 0, ""}, + {"SYS_TIMER_SETTIME", Const, 0, ""}, + {"SYS_TIMES", Const, 0, ""}, + {"SYS_TKILL", Const, 0, ""}, + {"SYS_TRUNCATE", Const, 0, ""}, + {"SYS_TRUNCATE64", Const, 0, ""}, + {"SYS_TUXCALL", Const, 0, ""}, + {"SYS_UGETRLIMIT", Const, 0, ""}, + {"SYS_ULIMIT", Const, 0, ""}, + {"SYS_UMASK", Const, 0, ""}, + {"SYS_UMASK_EXTENDED", Const, 0, ""}, + {"SYS_UMOUNT", Const, 0, ""}, + {"SYS_UMOUNT2", Const, 0, ""}, + {"SYS_UNAME", Const, 0, ""}, + {"SYS_UNDELETE", Const, 0, ""}, + {"SYS_UNLINK", Const, 0, ""}, + {"SYS_UNLINKAT", Const, 0, ""}, + {"SYS_UNMOUNT", Const, 0, ""}, + {"SYS_UNSHARE", Const, 0, ""}, + {"SYS_USELIB", Const, 0, ""}, + {"SYS_USTAT", Const, 0, ""}, + {"SYS_UTIME", Const, 0, ""}, + {"SYS_UTIMENSAT", Const, 0, ""}, + {"SYS_UTIMES", Const, 0, ""}, + {"SYS_UTRACE", Const, 0, ""}, + {"SYS_UUIDGEN", Const, 0, ""}, + {"SYS_VADVISE", Const, 1, ""}, + {"SYS_VFORK", Const, 0, ""}, + {"SYS_VHANGUP", Const, 0, ""}, + {"SYS_VM86", Const, 0, ""}, + {"SYS_VM86OLD", Const, 0, ""}, + {"SYS_VMSPLICE", Const, 0, ""}, + {"SYS_VM_PRESSURE_MONITOR", Const, 0, ""}, + {"SYS_VSERVER", Const, 0, ""}, + {"SYS_WAIT4", Const, 0, ""}, + {"SYS_WAIT4_NOCANCEL", Const, 0, ""}, + {"SYS_WAIT6", Const, 1, ""}, + {"SYS_WAITEVENT", Const, 0, ""}, + {"SYS_WAITID", Const, 0, ""}, + {"SYS_WAITID_NOCANCEL", Const, 0, ""}, + {"SYS_WAITPID", Const, 0, ""}, + {"SYS_WATCHEVENT", Const, 0, ""}, + {"SYS_WORKQ_KERNRETURN", Const, 0, ""}, + {"SYS_WORKQ_OPEN", Const, 0, ""}, + {"SYS_WRITE", Const, 0, ""}, + {"SYS_WRITEV", Const, 0, ""}, + {"SYS_WRITEV_NOCANCEL", Const, 0, ""}, + {"SYS_WRITE_NOCANCEL", Const, 0, ""}, + {"SYS_YIELD", Const, 0, ""}, + {"SYS__LLSEEK", Const, 0, ""}, + {"SYS__LWP_CONTINUE", Const, 1, ""}, + {"SYS__LWP_CREATE", Const, 1, ""}, + {"SYS__LWP_CTL", Const, 1, ""}, + {"SYS__LWP_DETACH", Const, 1, ""}, + {"SYS__LWP_EXIT", Const, 1, ""}, + {"SYS__LWP_GETNAME", Const, 1, ""}, + {"SYS__LWP_GETPRIVATE", Const, 1, ""}, + {"SYS__LWP_KILL", Const, 1, ""}, + {"SYS__LWP_PARK", Const, 1, ""}, + {"SYS__LWP_SELF", Const, 1, ""}, + {"SYS__LWP_SETNAME", Const, 1, ""}, + {"SYS__LWP_SETPRIVATE", Const, 1, ""}, + {"SYS__LWP_SUSPEND", Const, 1, ""}, + {"SYS__LWP_UNPARK", Const, 1, ""}, + {"SYS__LWP_UNPARK_ALL", Const, 1, ""}, + {"SYS__LWP_WAIT", Const, 1, ""}, + {"SYS__LWP_WAKEUP", Const, 1, ""}, + {"SYS__NEWSELECT", Const, 0, ""}, + {"SYS__PSET_BIND", Const, 1, ""}, + {"SYS__SCHED_GETAFFINITY", Const, 1, ""}, + {"SYS__SCHED_GETPARAM", Const, 1, ""}, + {"SYS__SCHED_SETAFFINITY", Const, 1, ""}, + {"SYS__SCHED_SETPARAM", Const, 1, ""}, + {"SYS__SYSCTL", Const, 0, ""}, + {"SYS__UMTX_LOCK", Const, 0, ""}, + {"SYS__UMTX_OP", Const, 0, ""}, + {"SYS__UMTX_UNLOCK", Const, 0, ""}, + {"SYS___ACL_ACLCHECK_FD", Const, 0, ""}, + {"SYS___ACL_ACLCHECK_FILE", Const, 0, ""}, + {"SYS___ACL_ACLCHECK_LINK", Const, 0, ""}, + {"SYS___ACL_DELETE_FD", Const, 0, ""}, + {"SYS___ACL_DELETE_FILE", Const, 0, ""}, + {"SYS___ACL_DELETE_LINK", Const, 0, ""}, + {"SYS___ACL_GET_FD", Const, 0, ""}, + {"SYS___ACL_GET_FILE", Const, 0, ""}, + {"SYS___ACL_GET_LINK", Const, 0, ""}, + {"SYS___ACL_SET_FD", Const, 0, ""}, + {"SYS___ACL_SET_FILE", Const, 0, ""}, + {"SYS___ACL_SET_LINK", Const, 0, ""}, + {"SYS___CAP_RIGHTS_GET", Const, 14, ""}, + {"SYS___CLONE", Const, 1, ""}, + {"SYS___DISABLE_THREADSIGNAL", Const, 0, ""}, + {"SYS___GETCWD", Const, 0, ""}, + {"SYS___GETLOGIN", Const, 1, ""}, + {"SYS___GET_TCB", Const, 1, ""}, + {"SYS___MAC_EXECVE", Const, 0, ""}, + {"SYS___MAC_GETFSSTAT", Const, 0, ""}, + {"SYS___MAC_GET_FD", Const, 0, ""}, + {"SYS___MAC_GET_FILE", Const, 0, ""}, + {"SYS___MAC_GET_LCID", Const, 0, ""}, + {"SYS___MAC_GET_LCTX", Const, 0, ""}, + {"SYS___MAC_GET_LINK", Const, 0, ""}, + {"SYS___MAC_GET_MOUNT", Const, 0, ""}, + {"SYS___MAC_GET_PID", Const, 0, ""}, + {"SYS___MAC_GET_PROC", Const, 0, ""}, + {"SYS___MAC_MOUNT", Const, 0, ""}, + {"SYS___MAC_SET_FD", Const, 0, ""}, + {"SYS___MAC_SET_FILE", Const, 0, ""}, + {"SYS___MAC_SET_LCTX", Const, 0, ""}, + {"SYS___MAC_SET_LINK", Const, 0, ""}, + {"SYS___MAC_SET_PROC", Const, 0, ""}, + {"SYS___MAC_SYSCALL", Const, 0, ""}, + {"SYS___OLD_SEMWAIT_SIGNAL", Const, 0, ""}, + {"SYS___OLD_SEMWAIT_SIGNAL_NOCANCEL", Const, 0, ""}, + {"SYS___POSIX_CHOWN", Const, 1, ""}, + {"SYS___POSIX_FCHOWN", Const, 1, ""}, + {"SYS___POSIX_LCHOWN", Const, 1, ""}, + {"SYS___POSIX_RENAME", Const, 1, ""}, + {"SYS___PTHREAD_CANCELED", Const, 0, ""}, + {"SYS___PTHREAD_CHDIR", Const, 0, ""}, + {"SYS___PTHREAD_FCHDIR", Const, 0, ""}, + {"SYS___PTHREAD_KILL", Const, 0, ""}, + {"SYS___PTHREAD_MARKCANCEL", Const, 0, ""}, + {"SYS___PTHREAD_SIGMASK", Const, 0, ""}, + {"SYS___QUOTACTL", Const, 1, ""}, + {"SYS___SEMCTL", Const, 1, ""}, + {"SYS___SEMWAIT_SIGNAL", Const, 0, ""}, + {"SYS___SEMWAIT_SIGNAL_NOCANCEL", Const, 0, ""}, + {"SYS___SETLOGIN", Const, 1, ""}, + {"SYS___SETUGID", Const, 0, ""}, + {"SYS___SET_TCB", Const, 1, ""}, + {"SYS___SIGACTION_SIGTRAMP", Const, 1, ""}, + {"SYS___SIGTIMEDWAIT", Const, 1, ""}, + {"SYS___SIGWAIT", Const, 0, ""}, + {"SYS___SIGWAIT_NOCANCEL", Const, 0, ""}, + {"SYS___SYSCTL", Const, 0, ""}, + {"SYS___TFORK", Const, 1, ""}, + {"SYS___THREXIT", Const, 1, ""}, + {"SYS___THRSIGDIVERT", Const, 1, ""}, + {"SYS___THRSLEEP", Const, 1, ""}, + {"SYS___THRWAKEUP", Const, 1, ""}, + {"S_ARCH1", Const, 1, ""}, + {"S_ARCH2", Const, 1, ""}, + {"S_BLKSIZE", Const, 0, ""}, + {"S_IEXEC", Const, 0, ""}, + {"S_IFBLK", Const, 0, ""}, + {"S_IFCHR", Const, 0, ""}, + {"S_IFDIR", Const, 0, ""}, + {"S_IFIFO", Const, 0, ""}, + {"S_IFLNK", Const, 0, ""}, + {"S_IFMT", Const, 0, ""}, + {"S_IFREG", Const, 0, ""}, + {"S_IFSOCK", Const, 0, ""}, + {"S_IFWHT", Const, 0, ""}, + {"S_IREAD", Const, 0, ""}, + {"S_IRGRP", Const, 0, ""}, + {"S_IROTH", Const, 0, ""}, + {"S_IRUSR", Const, 0, ""}, + {"S_IRWXG", Const, 0, ""}, + {"S_IRWXO", Const, 0, ""}, + {"S_IRWXU", Const, 0, ""}, + {"S_ISGID", Const, 0, ""}, + {"S_ISTXT", Const, 0, ""}, + {"S_ISUID", Const, 0, ""}, + {"S_ISVTX", Const, 0, ""}, + {"S_IWGRP", Const, 0, ""}, + {"S_IWOTH", Const, 0, ""}, + {"S_IWRITE", Const, 0, ""}, + {"S_IWUSR", Const, 0, ""}, + {"S_IXGRP", Const, 0, ""}, + {"S_IXOTH", Const, 0, ""}, + {"S_IXUSR", Const, 0, ""}, + {"S_LOGIN_SET", Const, 1, ""}, + {"SecurityAttributes", Type, 0, ""}, + {"SecurityAttributes.InheritHandle", Field, 0, ""}, + {"SecurityAttributes.Length", Field, 0, ""}, + {"SecurityAttributes.SecurityDescriptor", Field, 0, ""}, + {"Seek", Func, 0, "func(fd int, offset int64, whence int) (off int64, err error)"}, + {"Select", Func, 0, "func(nfd int, r *FdSet, w *FdSet, e *FdSet, timeout *Timeval) (n int, err error)"}, + {"Sendfile", Func, 0, "func(outfd int, infd int, offset *int64, count int) (written int, err error)"}, + {"Sendmsg", Func, 0, "func(fd int, p []byte, oob []byte, to Sockaddr, flags int) (err error)"}, + {"SendmsgN", Func, 3, "func(fd int, p []byte, oob []byte, to Sockaddr, flags int) (n int, err error)"}, + {"Sendto", Func, 0, "func(fd int, p []byte, flags int, to Sockaddr) (err error)"}, + {"Servent", Type, 0, ""}, + {"Servent.Aliases", Field, 0, ""}, + {"Servent.Name", Field, 0, ""}, + {"Servent.Port", Field, 0, ""}, + {"Servent.Proto", Field, 0, ""}, + {"SetBpf", Func, 0, ""}, + {"SetBpfBuflen", Func, 0, ""}, + {"SetBpfDatalink", Func, 0, ""}, + {"SetBpfHeadercmpl", Func, 0, ""}, + {"SetBpfImmediate", Func, 0, ""}, + {"SetBpfInterface", Func, 0, ""}, + {"SetBpfPromisc", Func, 0, ""}, + {"SetBpfTimeout", Func, 0, ""}, + {"SetCurrentDirectory", Func, 0, ""}, + {"SetEndOfFile", Func, 0, ""}, + {"SetEnvironmentVariable", Func, 0, ""}, + {"SetFileAttributes", Func, 0, ""}, + {"SetFileCompletionNotificationModes", Func, 2, ""}, + {"SetFilePointer", Func, 0, ""}, + {"SetFileTime", Func, 0, ""}, + {"SetHandleInformation", Func, 0, ""}, + {"SetKevent", Func, 0, ""}, + {"SetLsfPromisc", Func, 0, "func(name string, m bool) error"}, + {"SetNonblock", Func, 0, "func(fd int, nonblocking bool) (err error)"}, + {"Setdomainname", Func, 0, "func(p []byte) (err error)"}, + {"Setegid", Func, 0, "func(egid int) (err error)"}, + {"Setenv", Func, 0, "func(key string, value string) error"}, + {"Seteuid", Func, 0, "func(euid int) (err error)"}, + {"Setfsgid", Func, 0, "func(gid int) (err error)"}, + {"Setfsuid", Func, 0, "func(uid int) (err error)"}, + {"Setgid", Func, 0, "func(gid int) (err error)"}, + {"Setgroups", Func, 0, "func(gids []int) (err error)"}, + {"Sethostname", Func, 0, "func(p []byte) (err error)"}, + {"Setlogin", Func, 0, ""}, + {"Setpgid", Func, 0, "func(pid int, pgid int) (err error)"}, + {"Setpriority", Func, 0, "func(which int, who int, prio int) (err error)"}, + {"Setprivexec", Func, 0, ""}, + {"Setregid", Func, 0, "func(rgid int, egid int) (err error)"}, + {"Setresgid", Func, 0, "func(rgid int, egid int, sgid int) (err error)"}, + {"Setresuid", Func, 0, "func(ruid int, euid int, suid int) (err error)"}, + {"Setreuid", Func, 0, "func(ruid int, euid int) (err error)"}, + {"Setrlimit", Func, 0, "func(resource int, rlim *Rlimit) error"}, + {"Setsid", Func, 0, "func() (pid int, err error)"}, + {"Setsockopt", Func, 0, ""}, + {"SetsockoptByte", Func, 0, "func(fd int, level int, opt int, value byte) (err error)"}, + {"SetsockoptICMPv6Filter", Func, 2, "func(fd int, level int, opt int, filter *ICMPv6Filter) error"}, + {"SetsockoptIPMreq", Func, 0, "func(fd int, level int, opt int, mreq *IPMreq) (err error)"}, + {"SetsockoptIPMreqn", Func, 0, "func(fd int, level int, opt int, mreq *IPMreqn) (err error)"}, + {"SetsockoptIPv6Mreq", Func, 0, "func(fd int, level int, opt int, mreq *IPv6Mreq) (err error)"}, + {"SetsockoptInet4Addr", Func, 0, "func(fd int, level int, opt int, value [4]byte) (err error)"}, + {"SetsockoptInt", Func, 0, "func(fd int, level int, opt int, value int) (err error)"}, + {"SetsockoptLinger", Func, 0, "func(fd int, level int, opt int, l *Linger) (err error)"}, + {"SetsockoptString", Func, 0, "func(fd int, level int, opt int, s string) (err error)"}, + {"SetsockoptTimeval", Func, 0, "func(fd int, level int, opt int, tv *Timeval) (err error)"}, + {"Settimeofday", Func, 0, "func(tv *Timeval) (err error)"}, + {"Setuid", Func, 0, "func(uid int) (err error)"}, + {"Setxattr", Func, 1, "func(path string, attr string, data []byte, flags int) (err error)"}, + {"Shutdown", Func, 0, "func(fd int, how int) (err error)"}, + {"SidTypeAlias", Const, 0, ""}, + {"SidTypeComputer", Const, 0, ""}, + {"SidTypeDeletedAccount", Const, 0, ""}, + {"SidTypeDomain", Const, 0, ""}, + {"SidTypeGroup", Const, 0, ""}, + {"SidTypeInvalid", Const, 0, ""}, + {"SidTypeLabel", Const, 0, ""}, + {"SidTypeUnknown", Const, 0, ""}, + {"SidTypeUser", Const, 0, ""}, + {"SidTypeWellKnownGroup", Const, 0, ""}, + {"Signal", Type, 0, ""}, + {"SizeofBpfHdr", Const, 0, ""}, + {"SizeofBpfInsn", Const, 0, ""}, + {"SizeofBpfProgram", Const, 0, ""}, + {"SizeofBpfStat", Const, 0, ""}, + {"SizeofBpfVersion", Const, 0, ""}, + {"SizeofBpfZbuf", Const, 0, ""}, + {"SizeofBpfZbufHeader", Const, 0, ""}, + {"SizeofCmsghdr", Const, 0, ""}, + {"SizeofICMPv6Filter", Const, 2, ""}, + {"SizeofIPMreq", Const, 0, ""}, + {"SizeofIPMreqn", Const, 0, ""}, + {"SizeofIPv6MTUInfo", Const, 2, ""}, + {"SizeofIPv6Mreq", Const, 0, ""}, + {"SizeofIfAddrmsg", Const, 0, ""}, + {"SizeofIfAnnounceMsghdr", Const, 1, ""}, + {"SizeofIfData", Const, 0, ""}, + {"SizeofIfInfomsg", Const, 0, ""}, + {"SizeofIfMsghdr", Const, 0, ""}, + {"SizeofIfaMsghdr", Const, 0, ""}, + {"SizeofIfmaMsghdr", Const, 0, ""}, + {"SizeofIfmaMsghdr2", Const, 0, ""}, + {"SizeofInet4Pktinfo", Const, 0, ""}, + {"SizeofInet6Pktinfo", Const, 0, ""}, + {"SizeofInotifyEvent", Const, 0, ""}, + {"SizeofLinger", Const, 0, ""}, + {"SizeofMsghdr", Const, 0, ""}, + {"SizeofNlAttr", Const, 0, ""}, + {"SizeofNlMsgerr", Const, 0, ""}, + {"SizeofNlMsghdr", Const, 0, ""}, + {"SizeofRtAttr", Const, 0, ""}, + {"SizeofRtGenmsg", Const, 0, ""}, + {"SizeofRtMetrics", Const, 0, ""}, + {"SizeofRtMsg", Const, 0, ""}, + {"SizeofRtMsghdr", Const, 0, ""}, + {"SizeofRtNexthop", Const, 0, ""}, + {"SizeofSockFilter", Const, 0, ""}, + {"SizeofSockFprog", Const, 0, ""}, + {"SizeofSockaddrAny", Const, 0, ""}, + {"SizeofSockaddrDatalink", Const, 0, ""}, + {"SizeofSockaddrInet4", Const, 0, ""}, + {"SizeofSockaddrInet6", Const, 0, ""}, + {"SizeofSockaddrLinklayer", Const, 0, ""}, + {"SizeofSockaddrNetlink", Const, 0, ""}, + {"SizeofSockaddrUnix", Const, 0, ""}, + {"SizeofTCPInfo", Const, 1, ""}, + {"SizeofUcred", Const, 0, ""}, + {"SlicePtrFromStrings", Func, 1, "func(ss []string) ([]*byte, error)"}, + {"SockFilter", Type, 0, ""}, + {"SockFilter.Code", Field, 0, ""}, + {"SockFilter.Jf", Field, 0, ""}, + {"SockFilter.Jt", Field, 0, ""}, + {"SockFilter.K", Field, 0, ""}, + {"SockFprog", Type, 0, ""}, + {"SockFprog.Filter", Field, 0, ""}, + {"SockFprog.Len", Field, 0, ""}, + {"SockFprog.Pad_cgo_0", Field, 0, ""}, + {"Sockaddr", Type, 0, ""}, + {"SockaddrDatalink", Type, 0, ""}, + {"SockaddrDatalink.Alen", Field, 0, ""}, + {"SockaddrDatalink.Data", Field, 0, ""}, + {"SockaddrDatalink.Family", Field, 0, ""}, + {"SockaddrDatalink.Index", Field, 0, ""}, + {"SockaddrDatalink.Len", Field, 0, ""}, + {"SockaddrDatalink.Nlen", Field, 0, ""}, + {"SockaddrDatalink.Slen", Field, 0, ""}, + {"SockaddrDatalink.Type", Field, 0, ""}, + {"SockaddrGen", Type, 0, ""}, + {"SockaddrInet4", Type, 0, ""}, + {"SockaddrInet4.Addr", Field, 0, ""}, + {"SockaddrInet4.Port", Field, 0, ""}, + {"SockaddrInet6", Type, 0, ""}, + {"SockaddrInet6.Addr", Field, 0, ""}, + {"SockaddrInet6.Port", Field, 0, ""}, + {"SockaddrInet6.ZoneId", Field, 0, ""}, + {"SockaddrLinklayer", Type, 0, ""}, + {"SockaddrLinklayer.Addr", Field, 0, ""}, + {"SockaddrLinklayer.Halen", Field, 0, ""}, + {"SockaddrLinklayer.Hatype", Field, 0, ""}, + {"SockaddrLinklayer.Ifindex", Field, 0, ""}, + {"SockaddrLinklayer.Pkttype", Field, 0, ""}, + {"SockaddrLinklayer.Protocol", Field, 0, ""}, + {"SockaddrNetlink", Type, 0, ""}, + {"SockaddrNetlink.Family", Field, 0, ""}, + {"SockaddrNetlink.Groups", Field, 0, ""}, + {"SockaddrNetlink.Pad", Field, 0, ""}, + {"SockaddrNetlink.Pid", Field, 0, ""}, + {"SockaddrUnix", Type, 0, ""}, + {"SockaddrUnix.Name", Field, 0, ""}, + {"Socket", Func, 0, "func(domain int, typ int, proto int) (fd int, err error)"}, + {"SocketControlMessage", Type, 0, ""}, + {"SocketControlMessage.Data", Field, 0, ""}, + {"SocketControlMessage.Header", Field, 0, ""}, + {"SocketDisableIPv6", Var, 0, ""}, + {"Socketpair", Func, 0, "func(domain int, typ int, proto int) (fd [2]int, err error)"}, + {"Splice", Func, 0, "func(rfd int, roff *int64, wfd int, woff *int64, len int, flags int) (n int64, err error)"}, + {"StartProcess", Func, 0, "func(argv0 string, argv []string, attr *ProcAttr) (pid int, handle uintptr, err error)"}, + {"StartupInfo", Type, 0, ""}, + {"StartupInfo.Cb", Field, 0, ""}, + {"StartupInfo.Desktop", Field, 0, ""}, + {"StartupInfo.FillAttribute", Field, 0, ""}, + {"StartupInfo.Flags", Field, 0, ""}, + {"StartupInfo.ShowWindow", Field, 0, ""}, + {"StartupInfo.StdErr", Field, 0, ""}, + {"StartupInfo.StdInput", Field, 0, ""}, + {"StartupInfo.StdOutput", Field, 0, ""}, + {"StartupInfo.Title", Field, 0, ""}, + {"StartupInfo.X", Field, 0, ""}, + {"StartupInfo.XCountChars", Field, 0, ""}, + {"StartupInfo.XSize", Field, 0, ""}, + {"StartupInfo.Y", Field, 0, ""}, + {"StartupInfo.YCountChars", Field, 0, ""}, + {"StartupInfo.YSize", Field, 0, ""}, + {"Stat", Func, 0, "func(path string, stat *Stat_t) (err error)"}, + {"Stat_t", Type, 0, ""}, + {"Stat_t.Atim", Field, 0, ""}, + {"Stat_t.Atim_ext", Field, 12, ""}, + {"Stat_t.Atimespec", Field, 0, ""}, + {"Stat_t.Birthtimespec", Field, 0, ""}, + {"Stat_t.Blksize", Field, 0, ""}, + {"Stat_t.Blocks", Field, 0, ""}, + {"Stat_t.Btim_ext", Field, 12, ""}, + {"Stat_t.Ctim", Field, 0, ""}, + {"Stat_t.Ctim_ext", Field, 12, ""}, + {"Stat_t.Ctimespec", Field, 0, ""}, + {"Stat_t.Dev", Field, 0, ""}, + {"Stat_t.Flags", Field, 0, ""}, + {"Stat_t.Gen", Field, 0, ""}, + {"Stat_t.Gid", Field, 0, ""}, + {"Stat_t.Ino", Field, 0, ""}, + {"Stat_t.Lspare", Field, 0, ""}, + {"Stat_t.Lspare0", Field, 2, ""}, + {"Stat_t.Lspare1", Field, 2, ""}, + {"Stat_t.Mode", Field, 0, ""}, + {"Stat_t.Mtim", Field, 0, ""}, + {"Stat_t.Mtim_ext", Field, 12, ""}, + {"Stat_t.Mtimespec", Field, 0, ""}, + {"Stat_t.Nlink", Field, 0, ""}, + {"Stat_t.Pad_cgo_0", Field, 0, ""}, + {"Stat_t.Pad_cgo_1", Field, 0, ""}, + {"Stat_t.Pad_cgo_2", Field, 0, ""}, + {"Stat_t.Padding0", Field, 12, ""}, + {"Stat_t.Padding1", Field, 12, ""}, + {"Stat_t.Qspare", Field, 0, ""}, + {"Stat_t.Rdev", Field, 0, ""}, + {"Stat_t.Size", Field, 0, ""}, + {"Stat_t.Spare", Field, 2, ""}, + {"Stat_t.Uid", Field, 0, ""}, + {"Stat_t.X__pad0", Field, 0, ""}, + {"Stat_t.X__pad1", Field, 0, ""}, + {"Stat_t.X__pad2", Field, 0, ""}, + {"Stat_t.X__st_birthtim", Field, 2, ""}, + {"Stat_t.X__st_ino", Field, 0, ""}, + {"Stat_t.X__unused", Field, 0, ""}, + {"Statfs", Func, 0, "func(path string, buf *Statfs_t) (err error)"}, + {"Statfs_t", Type, 0, ""}, + {"Statfs_t.Asyncreads", Field, 0, ""}, + {"Statfs_t.Asyncwrites", Field, 0, ""}, + {"Statfs_t.Bavail", Field, 0, ""}, + {"Statfs_t.Bfree", Field, 0, ""}, + {"Statfs_t.Blocks", Field, 0, ""}, + {"Statfs_t.Bsize", Field, 0, ""}, + {"Statfs_t.Charspare", Field, 0, ""}, + {"Statfs_t.F_asyncreads", Field, 2, ""}, + {"Statfs_t.F_asyncwrites", Field, 2, ""}, + {"Statfs_t.F_bavail", Field, 2, ""}, + {"Statfs_t.F_bfree", Field, 2, ""}, + {"Statfs_t.F_blocks", Field, 2, ""}, + {"Statfs_t.F_bsize", Field, 2, ""}, + {"Statfs_t.F_ctime", Field, 2, ""}, + {"Statfs_t.F_favail", Field, 2, ""}, + {"Statfs_t.F_ffree", Field, 2, ""}, + {"Statfs_t.F_files", Field, 2, ""}, + {"Statfs_t.F_flags", Field, 2, ""}, + {"Statfs_t.F_fsid", Field, 2, ""}, + {"Statfs_t.F_fstypename", Field, 2, ""}, + {"Statfs_t.F_iosize", Field, 2, ""}, + {"Statfs_t.F_mntfromname", Field, 2, ""}, + {"Statfs_t.F_mntfromspec", Field, 3, ""}, + {"Statfs_t.F_mntonname", Field, 2, ""}, + {"Statfs_t.F_namemax", Field, 2, ""}, + {"Statfs_t.F_owner", Field, 2, ""}, + {"Statfs_t.F_spare", Field, 2, ""}, + {"Statfs_t.F_syncreads", Field, 2, ""}, + {"Statfs_t.F_syncwrites", Field, 2, ""}, + {"Statfs_t.Ffree", Field, 0, ""}, + {"Statfs_t.Files", Field, 0, ""}, + {"Statfs_t.Flags", Field, 0, ""}, + {"Statfs_t.Frsize", Field, 0, ""}, + {"Statfs_t.Fsid", Field, 0, ""}, + {"Statfs_t.Fssubtype", Field, 0, ""}, + {"Statfs_t.Fstypename", Field, 0, ""}, + {"Statfs_t.Iosize", Field, 0, ""}, + {"Statfs_t.Mntfromname", Field, 0, ""}, + {"Statfs_t.Mntonname", Field, 0, ""}, + {"Statfs_t.Mount_info", Field, 2, ""}, + {"Statfs_t.Namelen", Field, 0, ""}, + {"Statfs_t.Namemax", Field, 0, ""}, + {"Statfs_t.Owner", Field, 0, ""}, + {"Statfs_t.Pad_cgo_0", Field, 0, ""}, + {"Statfs_t.Pad_cgo_1", Field, 2, ""}, + {"Statfs_t.Reserved", Field, 0, ""}, + {"Statfs_t.Spare", Field, 0, ""}, + {"Statfs_t.Syncreads", Field, 0, ""}, + {"Statfs_t.Syncwrites", Field, 0, ""}, + {"Statfs_t.Type", Field, 0, ""}, + {"Statfs_t.Version", Field, 0, ""}, + {"Stderr", Var, 0, ""}, + {"Stdin", Var, 0, ""}, + {"Stdout", Var, 0, ""}, + {"StringBytePtr", Func, 0, "func(s string) *byte"}, + {"StringByteSlice", Func, 0, "func(s string) []byte"}, + {"StringSlicePtr", Func, 0, "func(ss []string) []*byte"}, + {"StringToSid", Func, 0, ""}, + {"StringToUTF16", Func, 0, ""}, + {"StringToUTF16Ptr", Func, 0, ""}, + {"Symlink", Func, 0, "func(oldpath string, newpath string) (err error)"}, + {"Sync", Func, 0, "func()"}, + {"SyncFileRange", Func, 0, "func(fd int, off int64, n int64, flags int) (err error)"}, + {"SysProcAttr", Type, 0, ""}, + {"SysProcAttr.AdditionalInheritedHandles", Field, 17, ""}, + {"SysProcAttr.AmbientCaps", Field, 9, ""}, + {"SysProcAttr.CgroupFD", Field, 20, ""}, + {"SysProcAttr.Chroot", Field, 0, ""}, + {"SysProcAttr.Cloneflags", Field, 2, ""}, + {"SysProcAttr.CmdLine", Field, 0, ""}, + {"SysProcAttr.CreationFlags", Field, 1, ""}, + {"SysProcAttr.Credential", Field, 0, ""}, + {"SysProcAttr.Ctty", Field, 1, ""}, + {"SysProcAttr.Foreground", Field, 5, ""}, + {"SysProcAttr.GidMappings", Field, 4, ""}, + {"SysProcAttr.GidMappingsEnableSetgroups", Field, 5, ""}, + {"SysProcAttr.HideWindow", Field, 0, ""}, + {"SysProcAttr.Jail", Field, 21, ""}, + {"SysProcAttr.NoInheritHandles", Field, 16, ""}, + {"SysProcAttr.Noctty", Field, 0, ""}, + {"SysProcAttr.ParentProcess", Field, 17, ""}, + {"SysProcAttr.Pdeathsig", Field, 0, ""}, + {"SysProcAttr.Pgid", Field, 5, ""}, + {"SysProcAttr.PidFD", Field, 22, ""}, + {"SysProcAttr.ProcessAttributes", Field, 13, ""}, + {"SysProcAttr.Ptrace", Field, 0, ""}, + {"SysProcAttr.Setctty", Field, 0, ""}, + {"SysProcAttr.Setpgid", Field, 0, ""}, + {"SysProcAttr.Setsid", Field, 0, ""}, + {"SysProcAttr.ThreadAttributes", Field, 13, ""}, + {"SysProcAttr.Token", Field, 10, ""}, + {"SysProcAttr.UidMappings", Field, 4, ""}, + {"SysProcAttr.Unshareflags", Field, 7, ""}, + {"SysProcAttr.UseCgroupFD", Field, 20, ""}, + {"SysProcIDMap", Type, 4, ""}, + {"SysProcIDMap.ContainerID", Field, 4, ""}, + {"SysProcIDMap.HostID", Field, 4, ""}, + {"SysProcIDMap.Size", Field, 4, ""}, + {"Syscall", Func, 0, "func(trap uintptr, a1 uintptr, a2 uintptr, a3 uintptr) (r1 uintptr, r2 uintptr, err Errno)"}, + {"Syscall12", Func, 0, ""}, + {"Syscall15", Func, 0, ""}, + {"Syscall18", Func, 12, ""}, + {"Syscall6", Func, 0, "func(trap uintptr, a1 uintptr, a2 uintptr, a3 uintptr, a4 uintptr, a5 uintptr, a6 uintptr) (r1 uintptr, r2 uintptr, err Errno)"}, + {"Syscall9", Func, 0, ""}, + {"SyscallN", Func, 18, ""}, + {"Sysctl", Func, 0, ""}, + {"SysctlUint32", Func, 0, ""}, + {"Sysctlnode", Type, 2, ""}, + {"Sysctlnode.Flags", Field, 2, ""}, + {"Sysctlnode.Name", Field, 2, ""}, + {"Sysctlnode.Num", Field, 2, ""}, + {"Sysctlnode.Un", Field, 2, ""}, + {"Sysctlnode.Ver", Field, 2, ""}, + {"Sysctlnode.X__rsvd", Field, 2, ""}, + {"Sysctlnode.X_sysctl_desc", Field, 2, ""}, + {"Sysctlnode.X_sysctl_func", Field, 2, ""}, + {"Sysctlnode.X_sysctl_parent", Field, 2, ""}, + {"Sysctlnode.X_sysctl_size", Field, 2, ""}, + {"Sysinfo", Func, 0, "func(info *Sysinfo_t) (err error)"}, + {"Sysinfo_t", Type, 0, ""}, + {"Sysinfo_t.Bufferram", Field, 0, ""}, + {"Sysinfo_t.Freehigh", Field, 0, ""}, + {"Sysinfo_t.Freeram", Field, 0, ""}, + {"Sysinfo_t.Freeswap", Field, 0, ""}, + {"Sysinfo_t.Loads", Field, 0, ""}, + {"Sysinfo_t.Pad", Field, 0, ""}, + {"Sysinfo_t.Pad_cgo_0", Field, 0, ""}, + {"Sysinfo_t.Pad_cgo_1", Field, 0, ""}, + {"Sysinfo_t.Procs", Field, 0, ""}, + {"Sysinfo_t.Sharedram", Field, 0, ""}, + {"Sysinfo_t.Totalhigh", Field, 0, ""}, + {"Sysinfo_t.Totalram", Field, 0, ""}, + {"Sysinfo_t.Totalswap", Field, 0, ""}, + {"Sysinfo_t.Unit", Field, 0, ""}, + {"Sysinfo_t.Uptime", Field, 0, ""}, + {"Sysinfo_t.X_f", Field, 0, ""}, + {"Systemtime", Type, 0, ""}, + {"Systemtime.Day", Field, 0, ""}, + {"Systemtime.DayOfWeek", Field, 0, ""}, + {"Systemtime.Hour", Field, 0, ""}, + {"Systemtime.Milliseconds", Field, 0, ""}, + {"Systemtime.Minute", Field, 0, ""}, + {"Systemtime.Month", Field, 0, ""}, + {"Systemtime.Second", Field, 0, ""}, + {"Systemtime.Year", Field, 0, ""}, + {"TCGETS", Const, 0, ""}, + {"TCIFLUSH", Const, 1, ""}, + {"TCIOFLUSH", Const, 1, ""}, + {"TCOFLUSH", Const, 1, ""}, + {"TCPInfo", Type, 1, ""}, + {"TCPInfo.Advmss", Field, 1, ""}, + {"TCPInfo.Ato", Field, 1, ""}, + {"TCPInfo.Backoff", Field, 1, ""}, + {"TCPInfo.Ca_state", Field, 1, ""}, + {"TCPInfo.Fackets", Field, 1, ""}, + {"TCPInfo.Last_ack_recv", Field, 1, ""}, + {"TCPInfo.Last_ack_sent", Field, 1, ""}, + {"TCPInfo.Last_data_recv", Field, 1, ""}, + {"TCPInfo.Last_data_sent", Field, 1, ""}, + {"TCPInfo.Lost", Field, 1, ""}, + {"TCPInfo.Options", Field, 1, ""}, + {"TCPInfo.Pad_cgo_0", Field, 1, ""}, + {"TCPInfo.Pmtu", Field, 1, ""}, + {"TCPInfo.Probes", Field, 1, ""}, + {"TCPInfo.Rcv_mss", Field, 1, ""}, + {"TCPInfo.Rcv_rtt", Field, 1, ""}, + {"TCPInfo.Rcv_space", Field, 1, ""}, + {"TCPInfo.Rcv_ssthresh", Field, 1, ""}, + {"TCPInfo.Reordering", Field, 1, ""}, + {"TCPInfo.Retrans", Field, 1, ""}, + {"TCPInfo.Retransmits", Field, 1, ""}, + {"TCPInfo.Rto", Field, 1, ""}, + {"TCPInfo.Rtt", Field, 1, ""}, + {"TCPInfo.Rttvar", Field, 1, ""}, + {"TCPInfo.Sacked", Field, 1, ""}, + {"TCPInfo.Snd_cwnd", Field, 1, ""}, + {"TCPInfo.Snd_mss", Field, 1, ""}, + {"TCPInfo.Snd_ssthresh", Field, 1, ""}, + {"TCPInfo.State", Field, 1, ""}, + {"TCPInfo.Total_retrans", Field, 1, ""}, + {"TCPInfo.Unacked", Field, 1, ""}, + {"TCPKeepalive", Type, 3, ""}, + {"TCPKeepalive.Interval", Field, 3, ""}, + {"TCPKeepalive.OnOff", Field, 3, ""}, + {"TCPKeepalive.Time", Field, 3, ""}, + {"TCP_CA_NAME_MAX", Const, 0, ""}, + {"TCP_CONGCTL", Const, 1, ""}, + {"TCP_CONGESTION", Const, 0, ""}, + {"TCP_CONNECTIONTIMEOUT", Const, 0, ""}, + {"TCP_CORK", Const, 0, ""}, + {"TCP_DEFER_ACCEPT", Const, 0, ""}, + {"TCP_ENABLE_ECN", Const, 16, ""}, + {"TCP_INFO", Const, 0, ""}, + {"TCP_KEEPALIVE", Const, 0, ""}, + {"TCP_KEEPCNT", Const, 0, ""}, + {"TCP_KEEPIDLE", Const, 0, ""}, + {"TCP_KEEPINIT", Const, 1, ""}, + {"TCP_KEEPINTVL", Const, 0, ""}, + {"TCP_LINGER2", Const, 0, ""}, + {"TCP_MAXBURST", Const, 0, ""}, + {"TCP_MAXHLEN", Const, 0, ""}, + {"TCP_MAXOLEN", Const, 0, ""}, + {"TCP_MAXSEG", Const, 0, ""}, + {"TCP_MAXWIN", Const, 0, ""}, + {"TCP_MAX_SACK", Const, 0, ""}, + {"TCP_MAX_WINSHIFT", Const, 0, ""}, + {"TCP_MD5SIG", Const, 0, ""}, + {"TCP_MD5SIG_MAXKEYLEN", Const, 0, ""}, + {"TCP_MINMSS", Const, 0, ""}, + {"TCP_MINMSSOVERLOAD", Const, 0, ""}, + {"TCP_MSS", Const, 0, ""}, + {"TCP_NODELAY", Const, 0, ""}, + {"TCP_NOOPT", Const, 0, ""}, + {"TCP_NOPUSH", Const, 0, ""}, + {"TCP_NOTSENT_LOWAT", Const, 16, ""}, + {"TCP_NSTATES", Const, 1, ""}, + {"TCP_QUICKACK", Const, 0, ""}, + {"TCP_RXT_CONNDROPTIME", Const, 0, ""}, + {"TCP_RXT_FINDROP", Const, 0, ""}, + {"TCP_SACK_ENABLE", Const, 1, ""}, + {"TCP_SENDMOREACKS", Const, 16, ""}, + {"TCP_SYNCNT", Const, 0, ""}, + {"TCP_VENDOR", Const, 3, ""}, + {"TCP_WINDOW_CLAMP", Const, 0, ""}, + {"TCSAFLUSH", Const, 1, ""}, + {"TCSETS", Const, 0, ""}, + {"TF_DISCONNECT", Const, 0, ""}, + {"TF_REUSE_SOCKET", Const, 0, ""}, + {"TF_USE_DEFAULT_WORKER", Const, 0, ""}, + {"TF_USE_KERNEL_APC", Const, 0, ""}, + {"TF_USE_SYSTEM_THREAD", Const, 0, ""}, + {"TF_WRITE_BEHIND", Const, 0, ""}, + {"TH32CS_INHERIT", Const, 4, ""}, + {"TH32CS_SNAPALL", Const, 4, ""}, + {"TH32CS_SNAPHEAPLIST", Const, 4, ""}, + {"TH32CS_SNAPMODULE", Const, 4, ""}, + {"TH32CS_SNAPMODULE32", Const, 4, ""}, + {"TH32CS_SNAPPROCESS", Const, 4, ""}, + {"TH32CS_SNAPTHREAD", Const, 4, ""}, + {"TIME_ZONE_ID_DAYLIGHT", Const, 0, ""}, + {"TIME_ZONE_ID_STANDARD", Const, 0, ""}, + {"TIME_ZONE_ID_UNKNOWN", Const, 0, ""}, + {"TIOCCBRK", Const, 0, ""}, + {"TIOCCDTR", Const, 0, ""}, + {"TIOCCONS", Const, 0, ""}, + {"TIOCDCDTIMESTAMP", Const, 0, ""}, + {"TIOCDRAIN", Const, 0, ""}, + {"TIOCDSIMICROCODE", Const, 0, ""}, + {"TIOCEXCL", Const, 0, ""}, + {"TIOCEXT", Const, 0, ""}, + {"TIOCFLAG_CDTRCTS", Const, 1, ""}, + {"TIOCFLAG_CLOCAL", Const, 1, ""}, + {"TIOCFLAG_CRTSCTS", Const, 1, ""}, + {"TIOCFLAG_MDMBUF", Const, 1, ""}, + {"TIOCFLAG_PPS", Const, 1, ""}, + {"TIOCFLAG_SOFTCAR", Const, 1, ""}, + {"TIOCFLUSH", Const, 0, ""}, + {"TIOCGDEV", Const, 0, ""}, + {"TIOCGDRAINWAIT", Const, 0, ""}, + {"TIOCGETA", Const, 0, ""}, + {"TIOCGETD", Const, 0, ""}, + {"TIOCGFLAGS", Const, 1, ""}, + {"TIOCGICOUNT", Const, 0, ""}, + {"TIOCGLCKTRMIOS", Const, 0, ""}, + {"TIOCGLINED", Const, 1, ""}, + {"TIOCGPGRP", Const, 0, ""}, + {"TIOCGPTN", Const, 0, ""}, + {"TIOCGQSIZE", Const, 1, ""}, + {"TIOCGRANTPT", Const, 1, ""}, + {"TIOCGRS485", Const, 0, ""}, + {"TIOCGSERIAL", Const, 0, ""}, + {"TIOCGSID", Const, 0, ""}, + {"TIOCGSIZE", Const, 1, ""}, + {"TIOCGSOFTCAR", Const, 0, ""}, + {"TIOCGTSTAMP", Const, 1, ""}, + {"TIOCGWINSZ", Const, 0, ""}, + {"TIOCINQ", Const, 0, ""}, + {"TIOCIXOFF", Const, 0, ""}, + {"TIOCIXON", Const, 0, ""}, + {"TIOCLINUX", Const, 0, ""}, + {"TIOCMBIC", Const, 0, ""}, + {"TIOCMBIS", Const, 0, ""}, + {"TIOCMGDTRWAIT", Const, 0, ""}, + {"TIOCMGET", Const, 0, ""}, + {"TIOCMIWAIT", Const, 0, ""}, + {"TIOCMODG", Const, 0, ""}, + {"TIOCMODS", Const, 0, ""}, + {"TIOCMSDTRWAIT", Const, 0, ""}, + {"TIOCMSET", Const, 0, ""}, + {"TIOCM_CAR", Const, 0, ""}, + {"TIOCM_CD", Const, 0, ""}, + {"TIOCM_CTS", Const, 0, ""}, + {"TIOCM_DCD", Const, 0, ""}, + {"TIOCM_DSR", Const, 0, ""}, + {"TIOCM_DTR", Const, 0, ""}, + {"TIOCM_LE", Const, 0, ""}, + {"TIOCM_RI", Const, 0, ""}, + {"TIOCM_RNG", Const, 0, ""}, + {"TIOCM_RTS", Const, 0, ""}, + {"TIOCM_SR", Const, 0, ""}, + {"TIOCM_ST", Const, 0, ""}, + {"TIOCNOTTY", Const, 0, ""}, + {"TIOCNXCL", Const, 0, ""}, + {"TIOCOUTQ", Const, 0, ""}, + {"TIOCPKT", Const, 0, ""}, + {"TIOCPKT_DATA", Const, 0, ""}, + {"TIOCPKT_DOSTOP", Const, 0, ""}, + {"TIOCPKT_FLUSHREAD", Const, 0, ""}, + {"TIOCPKT_FLUSHWRITE", Const, 0, ""}, + {"TIOCPKT_IOCTL", Const, 0, ""}, + {"TIOCPKT_NOSTOP", Const, 0, ""}, + {"TIOCPKT_START", Const, 0, ""}, + {"TIOCPKT_STOP", Const, 0, ""}, + {"TIOCPTMASTER", Const, 0, ""}, + {"TIOCPTMGET", Const, 1, ""}, + {"TIOCPTSNAME", Const, 1, ""}, + {"TIOCPTYGNAME", Const, 0, ""}, + {"TIOCPTYGRANT", Const, 0, ""}, + {"TIOCPTYUNLK", Const, 0, ""}, + {"TIOCRCVFRAME", Const, 1, ""}, + {"TIOCREMOTE", Const, 0, ""}, + {"TIOCSBRK", Const, 0, ""}, + {"TIOCSCONS", Const, 0, ""}, + {"TIOCSCTTY", Const, 0, ""}, + {"TIOCSDRAINWAIT", Const, 0, ""}, + {"TIOCSDTR", Const, 0, ""}, + {"TIOCSERCONFIG", Const, 0, ""}, + {"TIOCSERGETLSR", Const, 0, ""}, + {"TIOCSERGETMULTI", Const, 0, ""}, + {"TIOCSERGSTRUCT", Const, 0, ""}, + {"TIOCSERGWILD", Const, 0, ""}, + {"TIOCSERSETMULTI", Const, 0, ""}, + {"TIOCSERSWILD", Const, 0, ""}, + {"TIOCSER_TEMT", Const, 0, ""}, + {"TIOCSETA", Const, 0, ""}, + {"TIOCSETAF", Const, 0, ""}, + {"TIOCSETAW", Const, 0, ""}, + {"TIOCSETD", Const, 0, ""}, + {"TIOCSFLAGS", Const, 1, ""}, + {"TIOCSIG", Const, 0, ""}, + {"TIOCSLCKTRMIOS", Const, 0, ""}, + {"TIOCSLINED", Const, 1, ""}, + {"TIOCSPGRP", Const, 0, ""}, + {"TIOCSPTLCK", Const, 0, ""}, + {"TIOCSQSIZE", Const, 1, ""}, + {"TIOCSRS485", Const, 0, ""}, + {"TIOCSSERIAL", Const, 0, ""}, + {"TIOCSSIZE", Const, 1, ""}, + {"TIOCSSOFTCAR", Const, 0, ""}, + {"TIOCSTART", Const, 0, ""}, + {"TIOCSTAT", Const, 0, ""}, + {"TIOCSTI", Const, 0, ""}, + {"TIOCSTOP", Const, 0, ""}, + {"TIOCSTSTAMP", Const, 1, ""}, + {"TIOCSWINSZ", Const, 0, ""}, + {"TIOCTIMESTAMP", Const, 0, ""}, + {"TIOCUCNTL", Const, 0, ""}, + {"TIOCVHANGUP", Const, 0, ""}, + {"TIOCXMTFRAME", Const, 1, ""}, + {"TOKEN_ADJUST_DEFAULT", Const, 0, ""}, + {"TOKEN_ADJUST_GROUPS", Const, 0, ""}, + {"TOKEN_ADJUST_PRIVILEGES", Const, 0, ""}, + {"TOKEN_ADJUST_SESSIONID", Const, 11, ""}, + {"TOKEN_ALL_ACCESS", Const, 0, ""}, + {"TOKEN_ASSIGN_PRIMARY", Const, 0, ""}, + {"TOKEN_DUPLICATE", Const, 0, ""}, + {"TOKEN_EXECUTE", Const, 0, ""}, + {"TOKEN_IMPERSONATE", Const, 0, ""}, + {"TOKEN_QUERY", Const, 0, ""}, + {"TOKEN_QUERY_SOURCE", Const, 0, ""}, + {"TOKEN_READ", Const, 0, ""}, + {"TOKEN_WRITE", Const, 0, ""}, + {"TOSTOP", Const, 0, ""}, + {"TRUNCATE_EXISTING", Const, 0, ""}, + {"TUNATTACHFILTER", Const, 0, ""}, + {"TUNDETACHFILTER", Const, 0, ""}, + {"TUNGETFEATURES", Const, 0, ""}, + {"TUNGETIFF", Const, 0, ""}, + {"TUNGETSNDBUF", Const, 0, ""}, + {"TUNGETVNETHDRSZ", Const, 0, ""}, + {"TUNSETDEBUG", Const, 0, ""}, + {"TUNSETGROUP", Const, 0, ""}, + {"TUNSETIFF", Const, 0, ""}, + {"TUNSETLINK", Const, 0, ""}, + {"TUNSETNOCSUM", Const, 0, ""}, + {"TUNSETOFFLOAD", Const, 0, ""}, + {"TUNSETOWNER", Const, 0, ""}, + {"TUNSETPERSIST", Const, 0, ""}, + {"TUNSETSNDBUF", Const, 0, ""}, + {"TUNSETTXFILTER", Const, 0, ""}, + {"TUNSETVNETHDRSZ", Const, 0, ""}, + {"Tee", Func, 0, "func(rfd int, wfd int, len int, flags int) (n int64, err error)"}, + {"TerminateProcess", Func, 0, ""}, + {"Termios", Type, 0, ""}, + {"Termios.Cc", Field, 0, ""}, + {"Termios.Cflag", Field, 0, ""}, + {"Termios.Iflag", Field, 0, ""}, + {"Termios.Ispeed", Field, 0, ""}, + {"Termios.Lflag", Field, 0, ""}, + {"Termios.Line", Field, 0, ""}, + {"Termios.Oflag", Field, 0, ""}, + {"Termios.Ospeed", Field, 0, ""}, + {"Termios.Pad_cgo_0", Field, 0, ""}, + {"Tgkill", Func, 0, "func(tgid int, tid int, sig Signal) (err error)"}, + {"Time", Func, 0, "func(t *Time_t) (tt Time_t, err error)"}, + {"Time_t", Type, 0, ""}, + {"Times", Func, 0, "func(tms *Tms) (ticks uintptr, err error)"}, + {"Timespec", Type, 0, ""}, + {"Timespec.Nsec", Field, 0, ""}, + {"Timespec.Pad_cgo_0", Field, 2, ""}, + {"Timespec.Sec", Field, 0, ""}, + {"TimespecToNsec", Func, 0, "func(ts Timespec) int64"}, + {"Timeval", Type, 0, ""}, + {"Timeval.Pad_cgo_0", Field, 0, ""}, + {"Timeval.Sec", Field, 0, ""}, + {"Timeval.Usec", Field, 0, ""}, + {"Timeval32", Type, 0, ""}, + {"Timeval32.Sec", Field, 0, ""}, + {"Timeval32.Usec", Field, 0, ""}, + {"TimevalToNsec", Func, 0, "func(tv Timeval) int64"}, + {"Timex", Type, 0, ""}, + {"Timex.Calcnt", Field, 0, ""}, + {"Timex.Constant", Field, 0, ""}, + {"Timex.Errcnt", Field, 0, ""}, + {"Timex.Esterror", Field, 0, ""}, + {"Timex.Freq", Field, 0, ""}, + {"Timex.Jitcnt", Field, 0, ""}, + {"Timex.Jitter", Field, 0, ""}, + {"Timex.Maxerror", Field, 0, ""}, + {"Timex.Modes", Field, 0, ""}, + {"Timex.Offset", Field, 0, ""}, + {"Timex.Pad_cgo_0", Field, 0, ""}, + {"Timex.Pad_cgo_1", Field, 0, ""}, + {"Timex.Pad_cgo_2", Field, 0, ""}, + {"Timex.Pad_cgo_3", Field, 0, ""}, + {"Timex.Ppsfreq", Field, 0, ""}, + {"Timex.Precision", Field, 0, ""}, + {"Timex.Shift", Field, 0, ""}, + {"Timex.Stabil", Field, 0, ""}, + {"Timex.Status", Field, 0, ""}, + {"Timex.Stbcnt", Field, 0, ""}, + {"Timex.Tai", Field, 0, ""}, + {"Timex.Tick", Field, 0, ""}, + {"Timex.Time", Field, 0, ""}, + {"Timex.Tolerance", Field, 0, ""}, + {"Timezoneinformation", Type, 0, ""}, + {"Timezoneinformation.Bias", Field, 0, ""}, + {"Timezoneinformation.DaylightBias", Field, 0, ""}, + {"Timezoneinformation.DaylightDate", Field, 0, ""}, + {"Timezoneinformation.DaylightName", Field, 0, ""}, + {"Timezoneinformation.StandardBias", Field, 0, ""}, + {"Timezoneinformation.StandardDate", Field, 0, ""}, + {"Timezoneinformation.StandardName", Field, 0, ""}, + {"Tms", Type, 0, ""}, + {"Tms.Cstime", Field, 0, ""}, + {"Tms.Cutime", Field, 0, ""}, + {"Tms.Stime", Field, 0, ""}, + {"Tms.Utime", Field, 0, ""}, + {"Token", Type, 0, ""}, + {"TokenAccessInformation", Const, 0, ""}, + {"TokenAuditPolicy", Const, 0, ""}, + {"TokenDefaultDacl", Const, 0, ""}, + {"TokenElevation", Const, 0, ""}, + {"TokenElevationType", Const, 0, ""}, + {"TokenGroups", Const, 0, ""}, + {"TokenGroupsAndPrivileges", Const, 0, ""}, + {"TokenHasRestrictions", Const, 0, ""}, + {"TokenImpersonationLevel", Const, 0, ""}, + {"TokenIntegrityLevel", Const, 0, ""}, + {"TokenLinkedToken", Const, 0, ""}, + {"TokenLogonSid", Const, 0, ""}, + {"TokenMandatoryPolicy", Const, 0, ""}, + {"TokenOrigin", Const, 0, ""}, + {"TokenOwner", Const, 0, ""}, + {"TokenPrimaryGroup", Const, 0, ""}, + {"TokenPrivileges", Const, 0, ""}, + {"TokenRestrictedSids", Const, 0, ""}, + {"TokenSandBoxInert", Const, 0, ""}, + {"TokenSessionId", Const, 0, ""}, + {"TokenSessionReference", Const, 0, ""}, + {"TokenSource", Const, 0, ""}, + {"TokenStatistics", Const, 0, ""}, + {"TokenType", Const, 0, ""}, + {"TokenUIAccess", Const, 0, ""}, + {"TokenUser", Const, 0, ""}, + {"TokenVirtualizationAllowed", Const, 0, ""}, + {"TokenVirtualizationEnabled", Const, 0, ""}, + {"Tokenprimarygroup", Type, 0, ""}, + {"Tokenprimarygroup.PrimaryGroup", Field, 0, ""}, + {"Tokenuser", Type, 0, ""}, + {"Tokenuser.User", Field, 0, ""}, + {"TranslateAccountName", Func, 0, ""}, + {"TranslateName", Func, 0, ""}, + {"TransmitFile", Func, 0, ""}, + {"TransmitFileBuffers", Type, 0, ""}, + {"TransmitFileBuffers.Head", Field, 0, ""}, + {"TransmitFileBuffers.HeadLength", Field, 0, ""}, + {"TransmitFileBuffers.Tail", Field, 0, ""}, + {"TransmitFileBuffers.TailLength", Field, 0, ""}, + {"Truncate", Func, 0, "func(path string, length int64) (err error)"}, + {"UNIX_PATH_MAX", Const, 12, ""}, + {"USAGE_MATCH_TYPE_AND", Const, 0, ""}, + {"USAGE_MATCH_TYPE_OR", Const, 0, ""}, + {"UTF16FromString", Func, 1, ""}, + {"UTF16PtrFromString", Func, 1, ""}, + {"UTF16ToString", Func, 0, ""}, + {"Ucred", Type, 0, ""}, + {"Ucred.Gid", Field, 0, ""}, + {"Ucred.Pid", Field, 0, ""}, + {"Ucred.Uid", Field, 0, ""}, + {"Umask", Func, 0, "func(mask int) (oldmask int)"}, + {"Uname", Func, 0, "func(buf *Utsname) (err error)"}, + {"Undelete", Func, 0, ""}, + {"UnixCredentials", Func, 0, "func(ucred *Ucred) []byte"}, + {"UnixRights", Func, 0, "func(fds ...int) []byte"}, + {"Unlink", Func, 0, "func(path string) error"}, + {"Unlinkat", Func, 0, "func(dirfd int, path string) error"}, + {"UnmapViewOfFile", Func, 0, ""}, + {"Unmount", Func, 0, "func(target string, flags int) (err error)"}, + {"Unsetenv", Func, 4, "func(key string) error"}, + {"Unshare", Func, 0, "func(flags int) (err error)"}, + {"UserInfo10", Type, 0, ""}, + {"UserInfo10.Comment", Field, 0, ""}, + {"UserInfo10.FullName", Field, 0, ""}, + {"UserInfo10.Name", Field, 0, ""}, + {"UserInfo10.UsrComment", Field, 0, ""}, + {"Ustat", Func, 0, "func(dev int, ubuf *Ustat_t) (err error)"}, + {"Ustat_t", Type, 0, ""}, + {"Ustat_t.Fname", Field, 0, ""}, + {"Ustat_t.Fpack", Field, 0, ""}, + {"Ustat_t.Pad_cgo_0", Field, 0, ""}, + {"Ustat_t.Pad_cgo_1", Field, 0, ""}, + {"Ustat_t.Tfree", Field, 0, ""}, + {"Ustat_t.Tinode", Field, 0, ""}, + {"Utimbuf", Type, 0, ""}, + {"Utimbuf.Actime", Field, 0, ""}, + {"Utimbuf.Modtime", Field, 0, ""}, + {"Utime", Func, 0, "func(path string, buf *Utimbuf) (err error)"}, + {"Utimes", Func, 0, "func(path string, tv []Timeval) (err error)"}, + {"UtimesNano", Func, 1, "func(path string, ts []Timespec) (err error)"}, + {"Utsname", Type, 0, ""}, + {"Utsname.Domainname", Field, 0, ""}, + {"Utsname.Machine", Field, 0, ""}, + {"Utsname.Nodename", Field, 0, ""}, + {"Utsname.Release", Field, 0, ""}, + {"Utsname.Sysname", Field, 0, ""}, + {"Utsname.Version", Field, 0, ""}, + {"VDISCARD", Const, 0, ""}, + {"VDSUSP", Const, 1, ""}, + {"VEOF", Const, 0, ""}, + {"VEOL", Const, 0, ""}, + {"VEOL2", Const, 0, ""}, + {"VERASE", Const, 0, ""}, + {"VERASE2", Const, 1, ""}, + {"VINTR", Const, 0, ""}, + {"VKILL", Const, 0, ""}, + {"VLNEXT", Const, 0, ""}, + {"VMIN", Const, 0, ""}, + {"VQUIT", Const, 0, ""}, + {"VREPRINT", Const, 0, ""}, + {"VSTART", Const, 0, ""}, + {"VSTATUS", Const, 1, ""}, + {"VSTOP", Const, 0, ""}, + {"VSUSP", Const, 0, ""}, + {"VSWTC", Const, 0, ""}, + {"VT0", Const, 1, ""}, + {"VT1", Const, 1, ""}, + {"VTDLY", Const, 1, ""}, + {"VTIME", Const, 0, ""}, + {"VWERASE", Const, 0, ""}, + {"VirtualLock", Func, 0, ""}, + {"VirtualUnlock", Func, 0, ""}, + {"WAIT_ABANDONED", Const, 0, ""}, + {"WAIT_FAILED", Const, 0, ""}, + {"WAIT_OBJECT_0", Const, 0, ""}, + {"WAIT_TIMEOUT", Const, 0, ""}, + {"WALL", Const, 0, ""}, + {"WALLSIG", Const, 1, ""}, + {"WALTSIG", Const, 1, ""}, + {"WCLONE", Const, 0, ""}, + {"WCONTINUED", Const, 0, ""}, + {"WCOREFLAG", Const, 0, ""}, + {"WEXITED", Const, 0, ""}, + {"WLINUXCLONE", Const, 0, ""}, + {"WNOHANG", Const, 0, ""}, + {"WNOTHREAD", Const, 0, ""}, + {"WNOWAIT", Const, 0, ""}, + {"WNOZOMBIE", Const, 1, ""}, + {"WOPTSCHECKED", Const, 1, ""}, + {"WORDSIZE", Const, 0, ""}, + {"WSABuf", Type, 0, ""}, + {"WSABuf.Buf", Field, 0, ""}, + {"WSABuf.Len", Field, 0, ""}, + {"WSACleanup", Func, 0, ""}, + {"WSADESCRIPTION_LEN", Const, 0, ""}, + {"WSAData", Type, 0, ""}, + {"WSAData.Description", Field, 0, ""}, + {"WSAData.HighVersion", Field, 0, ""}, + {"WSAData.MaxSockets", Field, 0, ""}, + {"WSAData.MaxUdpDg", Field, 0, ""}, + {"WSAData.SystemStatus", Field, 0, ""}, + {"WSAData.VendorInfo", Field, 0, ""}, + {"WSAData.Version", Field, 0, ""}, + {"WSAEACCES", Const, 2, ""}, + {"WSAECONNABORTED", Const, 9, ""}, + {"WSAECONNRESET", Const, 3, ""}, + {"WSAENOPROTOOPT", Const, 23, ""}, + {"WSAEnumProtocols", Func, 2, ""}, + {"WSAID_CONNECTEX", Var, 1, ""}, + {"WSAIoctl", Func, 0, ""}, + {"WSAPROTOCOL_LEN", Const, 2, ""}, + {"WSAProtocolChain", Type, 2, ""}, + {"WSAProtocolChain.ChainEntries", Field, 2, ""}, + {"WSAProtocolChain.ChainLen", Field, 2, ""}, + {"WSAProtocolInfo", Type, 2, ""}, + {"WSAProtocolInfo.AddressFamily", Field, 2, ""}, + {"WSAProtocolInfo.CatalogEntryId", Field, 2, ""}, + {"WSAProtocolInfo.MaxSockAddr", Field, 2, ""}, + {"WSAProtocolInfo.MessageSize", Field, 2, ""}, + {"WSAProtocolInfo.MinSockAddr", Field, 2, ""}, + {"WSAProtocolInfo.NetworkByteOrder", Field, 2, ""}, + {"WSAProtocolInfo.Protocol", Field, 2, ""}, + {"WSAProtocolInfo.ProtocolChain", Field, 2, ""}, + {"WSAProtocolInfo.ProtocolMaxOffset", Field, 2, ""}, + {"WSAProtocolInfo.ProtocolName", Field, 2, ""}, + {"WSAProtocolInfo.ProviderFlags", Field, 2, ""}, + {"WSAProtocolInfo.ProviderId", Field, 2, ""}, + {"WSAProtocolInfo.ProviderReserved", Field, 2, ""}, + {"WSAProtocolInfo.SecurityScheme", Field, 2, ""}, + {"WSAProtocolInfo.ServiceFlags1", Field, 2, ""}, + {"WSAProtocolInfo.ServiceFlags2", Field, 2, ""}, + {"WSAProtocolInfo.ServiceFlags3", Field, 2, ""}, + {"WSAProtocolInfo.ServiceFlags4", Field, 2, ""}, + {"WSAProtocolInfo.SocketType", Field, 2, ""}, + {"WSAProtocolInfo.Version", Field, 2, ""}, + {"WSARecv", Func, 0, ""}, + {"WSARecvFrom", Func, 0, ""}, + {"WSASYS_STATUS_LEN", Const, 0, ""}, + {"WSASend", Func, 0, ""}, + {"WSASendTo", Func, 0, ""}, + {"WSASendto", Func, 0, ""}, + {"WSAStartup", Func, 0, ""}, + {"WSTOPPED", Const, 0, ""}, + {"WTRAPPED", Const, 1, ""}, + {"WUNTRACED", Const, 0, ""}, + {"Wait4", Func, 0, "func(pid int, wstatus *WaitStatus, options int, rusage *Rusage) (wpid int, err error)"}, + {"WaitForSingleObject", Func, 0, ""}, + {"WaitStatus", Type, 0, ""}, + {"WaitStatus.ExitCode", Field, 0, ""}, + {"Win32FileAttributeData", Type, 0, ""}, + {"Win32FileAttributeData.CreationTime", Field, 0, ""}, + {"Win32FileAttributeData.FileAttributes", Field, 0, ""}, + {"Win32FileAttributeData.FileSizeHigh", Field, 0, ""}, + {"Win32FileAttributeData.FileSizeLow", Field, 0, ""}, + {"Win32FileAttributeData.LastAccessTime", Field, 0, ""}, + {"Win32FileAttributeData.LastWriteTime", Field, 0, ""}, + {"Win32finddata", Type, 0, ""}, + {"Win32finddata.AlternateFileName", Field, 0, ""}, + {"Win32finddata.CreationTime", Field, 0, ""}, + {"Win32finddata.FileAttributes", Field, 0, ""}, + {"Win32finddata.FileName", Field, 0, ""}, + {"Win32finddata.FileSizeHigh", Field, 0, ""}, + {"Win32finddata.FileSizeLow", Field, 0, ""}, + {"Win32finddata.LastAccessTime", Field, 0, ""}, + {"Win32finddata.LastWriteTime", Field, 0, ""}, + {"Win32finddata.Reserved0", Field, 0, ""}, + {"Win32finddata.Reserved1", Field, 0, ""}, + {"Write", Func, 0, "func(fd int, p []byte) (n int, err error)"}, + {"WriteConsole", Func, 1, ""}, + {"WriteFile", Func, 0, ""}, + {"X509_ASN_ENCODING", Const, 0, ""}, + {"XCASE", Const, 0, ""}, + {"XP1_CONNECTIONLESS", Const, 2, ""}, + {"XP1_CONNECT_DATA", Const, 2, ""}, + {"XP1_DISCONNECT_DATA", Const, 2, ""}, + {"XP1_EXPEDITED_DATA", Const, 2, ""}, + {"XP1_GRACEFUL_CLOSE", Const, 2, ""}, + {"XP1_GUARANTEED_DELIVERY", Const, 2, ""}, + {"XP1_GUARANTEED_ORDER", Const, 2, ""}, + {"XP1_IFS_HANDLES", Const, 2, ""}, + {"XP1_MESSAGE_ORIENTED", Const, 2, ""}, + {"XP1_MULTIPOINT_CONTROL_PLANE", Const, 2, ""}, + {"XP1_MULTIPOINT_DATA_PLANE", Const, 2, ""}, + {"XP1_PARTIAL_MESSAGE", Const, 2, ""}, + {"XP1_PSEUDO_STREAM", Const, 2, ""}, + {"XP1_QOS_SUPPORTED", Const, 2, ""}, + {"XP1_SAN_SUPPORT_SDP", Const, 2, ""}, + {"XP1_SUPPORT_BROADCAST", Const, 2, ""}, + {"XP1_SUPPORT_MULTIPOINT", Const, 2, ""}, + {"XP1_UNI_RECV", Const, 2, ""}, + {"XP1_UNI_SEND", Const, 2, ""}, }, "syscall/js": { - {"CopyBytesToGo", Func, 0}, - {"CopyBytesToJS", Func, 0}, - {"Error", Type, 0}, - {"Func", Type, 0}, - {"FuncOf", Func, 0}, - {"Global", Func, 0}, - {"Null", Func, 0}, - {"Type", Type, 0}, - {"TypeBoolean", Const, 0}, - {"TypeFunction", Const, 0}, - {"TypeNull", Const, 0}, - {"TypeNumber", Const, 0}, - {"TypeObject", Const, 0}, - {"TypeString", Const, 0}, - {"TypeSymbol", Const, 0}, - {"TypeUndefined", Const, 0}, - {"Undefined", Func, 0}, - {"Value", Type, 0}, - {"ValueError", Type, 0}, - {"ValueOf", Func, 0}, + {"CopyBytesToGo", Func, 0, ""}, + {"CopyBytesToJS", Func, 0, ""}, + {"Error", Type, 0, ""}, + {"Func", Type, 0, ""}, + {"FuncOf", Func, 0, ""}, + {"Global", Func, 0, ""}, + {"Null", Func, 0, ""}, + {"Type", Type, 0, ""}, + {"TypeBoolean", Const, 0, ""}, + {"TypeFunction", Const, 0, ""}, + {"TypeNull", Const, 0, ""}, + {"TypeNumber", Const, 0, ""}, + {"TypeObject", Const, 0, ""}, + {"TypeString", Const, 0, ""}, + {"TypeSymbol", Const, 0, ""}, + {"TypeUndefined", Const, 0, ""}, + {"Undefined", Func, 0, ""}, + {"Value", Type, 0, ""}, + {"ValueError", Type, 0, ""}, + {"ValueOf", Func, 0, ""}, }, "testing": { - {"(*B).Chdir", Method, 24}, - {"(*B).Cleanup", Method, 14}, - {"(*B).Context", Method, 24}, - {"(*B).Elapsed", Method, 20}, - {"(*B).Error", Method, 0}, - {"(*B).Errorf", Method, 0}, - {"(*B).Fail", Method, 0}, - {"(*B).FailNow", Method, 0}, - {"(*B).Failed", Method, 0}, - {"(*B).Fatal", Method, 0}, - {"(*B).Fatalf", Method, 0}, - {"(*B).Helper", Method, 9}, - {"(*B).Log", Method, 0}, - {"(*B).Logf", Method, 0}, - {"(*B).Loop", Method, 24}, - {"(*B).Name", Method, 8}, - {"(*B).ReportAllocs", Method, 1}, - {"(*B).ReportMetric", Method, 13}, - {"(*B).ResetTimer", Method, 0}, - {"(*B).Run", Method, 7}, - {"(*B).RunParallel", Method, 3}, - {"(*B).SetBytes", Method, 0}, - {"(*B).SetParallelism", Method, 3}, - {"(*B).Setenv", Method, 17}, - {"(*B).Skip", Method, 1}, - {"(*B).SkipNow", Method, 1}, - {"(*B).Skipf", Method, 1}, - {"(*B).Skipped", Method, 1}, - {"(*B).StartTimer", Method, 0}, - {"(*B).StopTimer", Method, 0}, - {"(*B).TempDir", Method, 15}, - {"(*F).Add", Method, 18}, - {"(*F).Chdir", Method, 24}, - {"(*F).Cleanup", Method, 18}, - {"(*F).Context", Method, 24}, - {"(*F).Error", Method, 18}, - {"(*F).Errorf", Method, 18}, - {"(*F).Fail", Method, 18}, - {"(*F).FailNow", Method, 18}, - {"(*F).Failed", Method, 18}, - {"(*F).Fatal", Method, 18}, - {"(*F).Fatalf", Method, 18}, - {"(*F).Fuzz", Method, 18}, - {"(*F).Helper", Method, 18}, - {"(*F).Log", Method, 18}, - {"(*F).Logf", Method, 18}, - {"(*F).Name", Method, 18}, - {"(*F).Setenv", Method, 18}, - {"(*F).Skip", Method, 18}, - {"(*F).SkipNow", Method, 18}, - {"(*F).Skipf", Method, 18}, - {"(*F).Skipped", Method, 18}, - {"(*F).TempDir", Method, 18}, - {"(*M).Run", Method, 4}, - {"(*PB).Next", Method, 3}, - {"(*T).Chdir", Method, 24}, - {"(*T).Cleanup", Method, 14}, - {"(*T).Context", Method, 24}, - {"(*T).Deadline", Method, 15}, - {"(*T).Error", Method, 0}, - {"(*T).Errorf", Method, 0}, - {"(*T).Fail", Method, 0}, - {"(*T).FailNow", Method, 0}, - {"(*T).Failed", Method, 0}, - {"(*T).Fatal", Method, 0}, - {"(*T).Fatalf", Method, 0}, - {"(*T).Helper", Method, 9}, - {"(*T).Log", Method, 0}, - {"(*T).Logf", Method, 0}, - {"(*T).Name", Method, 8}, - {"(*T).Parallel", Method, 0}, - {"(*T).Run", Method, 7}, - {"(*T).Setenv", Method, 17}, - {"(*T).Skip", Method, 1}, - {"(*T).SkipNow", Method, 1}, - {"(*T).Skipf", Method, 1}, - {"(*T).Skipped", Method, 1}, - {"(*T).TempDir", Method, 15}, - {"(BenchmarkResult).AllocedBytesPerOp", Method, 1}, - {"(BenchmarkResult).AllocsPerOp", Method, 1}, - {"(BenchmarkResult).MemString", Method, 1}, - {"(BenchmarkResult).NsPerOp", Method, 0}, - {"(BenchmarkResult).String", Method, 0}, - {"AllocsPerRun", Func, 1}, - {"B", Type, 0}, - {"B.N", Field, 0}, - {"Benchmark", Func, 0}, - {"BenchmarkResult", Type, 0}, - {"BenchmarkResult.Bytes", Field, 0}, - {"BenchmarkResult.Extra", Field, 13}, - {"BenchmarkResult.MemAllocs", Field, 1}, - {"BenchmarkResult.MemBytes", Field, 1}, - {"BenchmarkResult.N", Field, 0}, - {"BenchmarkResult.T", Field, 0}, - {"Cover", Type, 2}, - {"Cover.Blocks", Field, 2}, - {"Cover.Counters", Field, 2}, - {"Cover.CoveredPackages", Field, 2}, - {"Cover.Mode", Field, 2}, - {"CoverBlock", Type, 2}, - {"CoverBlock.Col0", Field, 2}, - {"CoverBlock.Col1", Field, 2}, - {"CoverBlock.Line0", Field, 2}, - {"CoverBlock.Line1", Field, 2}, - {"CoverBlock.Stmts", Field, 2}, - {"CoverMode", Func, 8}, - {"Coverage", Func, 4}, - {"F", Type, 18}, - {"Init", Func, 13}, - {"InternalBenchmark", Type, 0}, - {"InternalBenchmark.F", Field, 0}, - {"InternalBenchmark.Name", Field, 0}, - {"InternalExample", Type, 0}, - {"InternalExample.F", Field, 0}, - {"InternalExample.Name", Field, 0}, - {"InternalExample.Output", Field, 0}, - {"InternalExample.Unordered", Field, 7}, - {"InternalFuzzTarget", Type, 18}, - {"InternalFuzzTarget.Fn", Field, 18}, - {"InternalFuzzTarget.Name", Field, 18}, - {"InternalTest", Type, 0}, - {"InternalTest.F", Field, 0}, - {"InternalTest.Name", Field, 0}, - {"M", Type, 4}, - {"Main", Func, 0}, - {"MainStart", Func, 4}, - {"PB", Type, 3}, - {"RegisterCover", Func, 2}, - {"RunBenchmarks", Func, 0}, - {"RunExamples", Func, 0}, - {"RunTests", Func, 0}, - {"Short", Func, 0}, - {"T", Type, 0}, - {"TB", Type, 2}, - {"Testing", Func, 21}, - {"Verbose", Func, 1}, + {"(*B).Chdir", Method, 24, ""}, + {"(*B).Cleanup", Method, 14, ""}, + {"(*B).Context", Method, 24, ""}, + {"(*B).Elapsed", Method, 20, ""}, + {"(*B).Error", Method, 0, ""}, + {"(*B).Errorf", Method, 0, ""}, + {"(*B).Fail", Method, 0, ""}, + {"(*B).FailNow", Method, 0, ""}, + {"(*B).Failed", Method, 0, ""}, + {"(*B).Fatal", Method, 0, ""}, + {"(*B).Fatalf", Method, 0, ""}, + {"(*B).Helper", Method, 9, ""}, + {"(*B).Log", Method, 0, ""}, + {"(*B).Logf", Method, 0, ""}, + {"(*B).Loop", Method, 24, ""}, + {"(*B).Name", Method, 8, ""}, + {"(*B).ReportAllocs", Method, 1, ""}, + {"(*B).ReportMetric", Method, 13, ""}, + {"(*B).ResetTimer", Method, 0, ""}, + {"(*B).Run", Method, 7, ""}, + {"(*B).RunParallel", Method, 3, ""}, + {"(*B).SetBytes", Method, 0, ""}, + {"(*B).SetParallelism", Method, 3, ""}, + {"(*B).Setenv", Method, 17, ""}, + {"(*B).Skip", Method, 1, ""}, + {"(*B).SkipNow", Method, 1, ""}, + {"(*B).Skipf", Method, 1, ""}, + {"(*B).Skipped", Method, 1, ""}, + {"(*B).StartTimer", Method, 0, ""}, + {"(*B).StopTimer", Method, 0, ""}, + {"(*B).TempDir", Method, 15, ""}, + {"(*F).Add", Method, 18, ""}, + {"(*F).Chdir", Method, 24, ""}, + {"(*F).Cleanup", Method, 18, ""}, + {"(*F).Context", Method, 24, ""}, + {"(*F).Error", Method, 18, ""}, + {"(*F).Errorf", Method, 18, ""}, + {"(*F).Fail", Method, 18, ""}, + {"(*F).FailNow", Method, 18, ""}, + {"(*F).Failed", Method, 18, ""}, + {"(*F).Fatal", Method, 18, ""}, + {"(*F).Fatalf", Method, 18, ""}, + {"(*F).Fuzz", Method, 18, ""}, + {"(*F).Helper", Method, 18, ""}, + {"(*F).Log", Method, 18, ""}, + {"(*F).Logf", Method, 18, ""}, + {"(*F).Name", Method, 18, ""}, + {"(*F).Setenv", Method, 18, ""}, + {"(*F).Skip", Method, 18, ""}, + {"(*F).SkipNow", Method, 18, ""}, + {"(*F).Skipf", Method, 18, ""}, + {"(*F).Skipped", Method, 18, ""}, + {"(*F).TempDir", Method, 18, ""}, + {"(*M).Run", Method, 4, ""}, + {"(*PB).Next", Method, 3, ""}, + {"(*T).Chdir", Method, 24, ""}, + {"(*T).Cleanup", Method, 14, ""}, + {"(*T).Context", Method, 24, ""}, + {"(*T).Deadline", Method, 15, ""}, + {"(*T).Error", Method, 0, ""}, + {"(*T).Errorf", Method, 0, ""}, + {"(*T).Fail", Method, 0, ""}, + {"(*T).FailNow", Method, 0, ""}, + {"(*T).Failed", Method, 0, ""}, + {"(*T).Fatal", Method, 0, ""}, + {"(*T).Fatalf", Method, 0, ""}, + {"(*T).Helper", Method, 9, ""}, + {"(*T).Log", Method, 0, ""}, + {"(*T).Logf", Method, 0, ""}, + {"(*T).Name", Method, 8, ""}, + {"(*T).Parallel", Method, 0, ""}, + {"(*T).Run", Method, 7, ""}, + {"(*T).Setenv", Method, 17, ""}, + {"(*T).Skip", Method, 1, ""}, + {"(*T).SkipNow", Method, 1, ""}, + {"(*T).Skipf", Method, 1, ""}, + {"(*T).Skipped", Method, 1, ""}, + {"(*T).TempDir", Method, 15, ""}, + {"(BenchmarkResult).AllocedBytesPerOp", Method, 1, ""}, + {"(BenchmarkResult).AllocsPerOp", Method, 1, ""}, + {"(BenchmarkResult).MemString", Method, 1, ""}, + {"(BenchmarkResult).NsPerOp", Method, 0, ""}, + {"(BenchmarkResult).String", Method, 0, ""}, + {"AllocsPerRun", Func, 1, "func(runs int, f func()) (avg float64)"}, + {"B", Type, 0, ""}, + {"B.N", Field, 0, ""}, + {"Benchmark", Func, 0, "func(f func(b *B)) BenchmarkResult"}, + {"BenchmarkResult", Type, 0, ""}, + {"BenchmarkResult.Bytes", Field, 0, ""}, + {"BenchmarkResult.Extra", Field, 13, ""}, + {"BenchmarkResult.MemAllocs", Field, 1, ""}, + {"BenchmarkResult.MemBytes", Field, 1, ""}, + {"BenchmarkResult.N", Field, 0, ""}, + {"BenchmarkResult.T", Field, 0, ""}, + {"Cover", Type, 2, ""}, + {"Cover.Blocks", Field, 2, ""}, + {"Cover.Counters", Field, 2, ""}, + {"Cover.CoveredPackages", Field, 2, ""}, + {"Cover.Mode", Field, 2, ""}, + {"CoverBlock", Type, 2, ""}, + {"CoverBlock.Col0", Field, 2, ""}, + {"CoverBlock.Col1", Field, 2, ""}, + {"CoverBlock.Line0", Field, 2, ""}, + {"CoverBlock.Line1", Field, 2, ""}, + {"CoverBlock.Stmts", Field, 2, ""}, + {"CoverMode", Func, 8, "func() string"}, + {"Coverage", Func, 4, "func() float64"}, + {"F", Type, 18, ""}, + {"Init", Func, 13, "func()"}, + {"InternalBenchmark", Type, 0, ""}, + {"InternalBenchmark.F", Field, 0, ""}, + {"InternalBenchmark.Name", Field, 0, ""}, + {"InternalExample", Type, 0, ""}, + {"InternalExample.F", Field, 0, ""}, + {"InternalExample.Name", Field, 0, ""}, + {"InternalExample.Output", Field, 0, ""}, + {"InternalExample.Unordered", Field, 7, ""}, + {"InternalFuzzTarget", Type, 18, ""}, + {"InternalFuzzTarget.Fn", Field, 18, ""}, + {"InternalFuzzTarget.Name", Field, 18, ""}, + {"InternalTest", Type, 0, ""}, + {"InternalTest.F", Field, 0, ""}, + {"InternalTest.Name", Field, 0, ""}, + {"M", Type, 4, ""}, + {"Main", Func, 0, "func(matchString func(pat string, str string) (bool, error), tests []InternalTest, benchmarks []InternalBenchmark, examples []InternalExample)"}, + {"MainStart", Func, 4, "func(deps testDeps, tests []InternalTest, benchmarks []InternalBenchmark, fuzzTargets []InternalFuzzTarget, examples []InternalExample) *M"}, + {"PB", Type, 3, ""}, + {"RegisterCover", Func, 2, "func(c Cover)"}, + {"RunBenchmarks", Func, 0, "func(matchString func(pat string, str string) (bool, error), benchmarks []InternalBenchmark)"}, + {"RunExamples", Func, 0, "func(matchString func(pat string, str string) (bool, error), examples []InternalExample) (ok bool)"}, + {"RunTests", Func, 0, "func(matchString func(pat string, str string) (bool, error), tests []InternalTest) (ok bool)"}, + {"Short", Func, 0, "func() bool"}, + {"T", Type, 0, ""}, + {"TB", Type, 2, ""}, + {"Testing", Func, 21, "func() bool"}, + {"Verbose", Func, 1, "func() bool"}, }, "testing/fstest": { - {"(MapFS).Glob", Method, 16}, - {"(MapFS).Lstat", Method, 25}, - {"(MapFS).Open", Method, 16}, - {"(MapFS).ReadDir", Method, 16}, - {"(MapFS).ReadFile", Method, 16}, - {"(MapFS).ReadLink", Method, 25}, - {"(MapFS).Stat", Method, 16}, - {"(MapFS).Sub", Method, 16}, - {"MapFS", Type, 16}, - {"MapFile", Type, 16}, - {"MapFile.Data", Field, 16}, - {"MapFile.ModTime", Field, 16}, - {"MapFile.Mode", Field, 16}, - {"MapFile.Sys", Field, 16}, - {"TestFS", Func, 16}, + {"(MapFS).Glob", Method, 16, ""}, + {"(MapFS).Lstat", Method, 25, ""}, + {"(MapFS).Open", Method, 16, ""}, + {"(MapFS).ReadDir", Method, 16, ""}, + {"(MapFS).ReadFile", Method, 16, ""}, + {"(MapFS).ReadLink", Method, 25, ""}, + {"(MapFS).Stat", Method, 16, ""}, + {"(MapFS).Sub", Method, 16, ""}, + {"MapFS", Type, 16, ""}, + {"MapFile", Type, 16, ""}, + {"MapFile.Data", Field, 16, ""}, + {"MapFile.ModTime", Field, 16, ""}, + {"MapFile.Mode", Field, 16, ""}, + {"MapFile.Sys", Field, 16, ""}, + {"TestFS", Func, 16, "func(fsys fs.FS, expected ...string) error"}, }, "testing/iotest": { - {"DataErrReader", Func, 0}, - {"ErrReader", Func, 16}, - {"ErrTimeout", Var, 0}, - {"HalfReader", Func, 0}, - {"NewReadLogger", Func, 0}, - {"NewWriteLogger", Func, 0}, - {"OneByteReader", Func, 0}, - {"TestReader", Func, 16}, - {"TimeoutReader", Func, 0}, - {"TruncateWriter", Func, 0}, + {"DataErrReader", Func, 0, "func(r io.Reader) io.Reader"}, + {"ErrReader", Func, 16, "func(err error) io.Reader"}, + {"ErrTimeout", Var, 0, ""}, + {"HalfReader", Func, 0, "func(r io.Reader) io.Reader"}, + {"NewReadLogger", Func, 0, "func(prefix string, r io.Reader) io.Reader"}, + {"NewWriteLogger", Func, 0, "func(prefix string, w io.Writer) io.Writer"}, + {"OneByteReader", Func, 0, "func(r io.Reader) io.Reader"}, + {"TestReader", Func, 16, "func(r io.Reader, content []byte) error"}, + {"TimeoutReader", Func, 0, "func(r io.Reader) io.Reader"}, + {"TruncateWriter", Func, 0, "func(w io.Writer, n int64) io.Writer"}, }, "testing/quick": { - {"(*CheckEqualError).Error", Method, 0}, - {"(*CheckError).Error", Method, 0}, - {"(SetupError).Error", Method, 0}, - {"Check", Func, 0}, - {"CheckEqual", Func, 0}, - {"CheckEqualError", Type, 0}, - {"CheckEqualError.CheckError", Field, 0}, - {"CheckEqualError.Out1", Field, 0}, - {"CheckEqualError.Out2", Field, 0}, - {"CheckError", Type, 0}, - {"CheckError.Count", Field, 0}, - {"CheckError.In", Field, 0}, - {"Config", Type, 0}, - {"Config.MaxCount", Field, 0}, - {"Config.MaxCountScale", Field, 0}, - {"Config.Rand", Field, 0}, - {"Config.Values", Field, 0}, - {"Generator", Type, 0}, - {"SetupError", Type, 0}, - {"Value", Func, 0}, + {"(*CheckEqualError).Error", Method, 0, ""}, + {"(*CheckError).Error", Method, 0, ""}, + {"(SetupError).Error", Method, 0, ""}, + {"Check", Func, 0, "func(f any, config *Config) error"}, + {"CheckEqual", Func, 0, "func(f any, g any, config *Config) error"}, + {"CheckEqualError", Type, 0, ""}, + {"CheckEqualError.CheckError", Field, 0, ""}, + {"CheckEqualError.Out1", Field, 0, ""}, + {"CheckEqualError.Out2", Field, 0, ""}, + {"CheckError", Type, 0, ""}, + {"CheckError.Count", Field, 0, ""}, + {"CheckError.In", Field, 0, ""}, + {"Config", Type, 0, ""}, + {"Config.MaxCount", Field, 0, ""}, + {"Config.MaxCountScale", Field, 0, ""}, + {"Config.Rand", Field, 0, ""}, + {"Config.Values", Field, 0, ""}, + {"Generator", Type, 0, ""}, + {"SetupError", Type, 0, ""}, + {"Value", Func, 0, "func(t reflect.Type, rand *rand.Rand) (value reflect.Value, ok bool)"}, }, "testing/slogtest": { - {"Run", Func, 22}, - {"TestHandler", Func, 21}, + {"Run", Func, 22, "func(t *testing.T, newHandler func(*testing.T) slog.Handler, result func(*testing.T) map[string]any)"}, + {"TestHandler", Func, 21, "func(h slog.Handler, results func() []map[string]any) error"}, }, "text/scanner": { - {"(*Position).IsValid", Method, 0}, - {"(*Scanner).Init", Method, 0}, - {"(*Scanner).IsValid", Method, 0}, - {"(*Scanner).Next", Method, 0}, - {"(*Scanner).Peek", Method, 0}, - {"(*Scanner).Pos", Method, 0}, - {"(*Scanner).Scan", Method, 0}, - {"(*Scanner).TokenText", Method, 0}, - {"(Position).String", Method, 0}, - {"(Scanner).String", Method, 0}, - {"Char", Const, 0}, - {"Comment", Const, 0}, - {"EOF", Const, 0}, - {"Float", Const, 0}, - {"GoTokens", Const, 0}, - {"GoWhitespace", Const, 0}, - {"Ident", Const, 0}, - {"Int", Const, 0}, - {"Position", Type, 0}, - {"Position.Column", Field, 0}, - {"Position.Filename", Field, 0}, - {"Position.Line", Field, 0}, - {"Position.Offset", Field, 0}, - {"RawString", Const, 0}, - {"ScanChars", Const, 0}, - {"ScanComments", Const, 0}, - {"ScanFloats", Const, 0}, - {"ScanIdents", Const, 0}, - {"ScanInts", Const, 0}, - {"ScanRawStrings", Const, 0}, - {"ScanStrings", Const, 0}, - {"Scanner", Type, 0}, - {"Scanner.Error", Field, 0}, - {"Scanner.ErrorCount", Field, 0}, - {"Scanner.IsIdentRune", Field, 4}, - {"Scanner.Mode", Field, 0}, - {"Scanner.Position", Field, 0}, - {"Scanner.Whitespace", Field, 0}, - {"SkipComments", Const, 0}, - {"String", Const, 0}, - {"TokenString", Func, 0}, + {"(*Position).IsValid", Method, 0, ""}, + {"(*Scanner).Init", Method, 0, ""}, + {"(*Scanner).IsValid", Method, 0, ""}, + {"(*Scanner).Next", Method, 0, ""}, + {"(*Scanner).Peek", Method, 0, ""}, + {"(*Scanner).Pos", Method, 0, ""}, + {"(*Scanner).Scan", Method, 0, ""}, + {"(*Scanner).TokenText", Method, 0, ""}, + {"(Position).String", Method, 0, ""}, + {"(Scanner).String", Method, 0, ""}, + {"Char", Const, 0, ""}, + {"Comment", Const, 0, ""}, + {"EOF", Const, 0, ""}, + {"Float", Const, 0, ""}, + {"GoTokens", Const, 0, ""}, + {"GoWhitespace", Const, 0, ""}, + {"Ident", Const, 0, ""}, + {"Int", Const, 0, ""}, + {"Position", Type, 0, ""}, + {"Position.Column", Field, 0, ""}, + {"Position.Filename", Field, 0, ""}, + {"Position.Line", Field, 0, ""}, + {"Position.Offset", Field, 0, ""}, + {"RawString", Const, 0, ""}, + {"ScanChars", Const, 0, ""}, + {"ScanComments", Const, 0, ""}, + {"ScanFloats", Const, 0, ""}, + {"ScanIdents", Const, 0, ""}, + {"ScanInts", Const, 0, ""}, + {"ScanRawStrings", Const, 0, ""}, + {"ScanStrings", Const, 0, ""}, + {"Scanner", Type, 0, ""}, + {"Scanner.Error", Field, 0, ""}, + {"Scanner.ErrorCount", Field, 0, ""}, + {"Scanner.IsIdentRune", Field, 4, ""}, + {"Scanner.Mode", Field, 0, ""}, + {"Scanner.Position", Field, 0, ""}, + {"Scanner.Whitespace", Field, 0, ""}, + {"SkipComments", Const, 0, ""}, + {"String", Const, 0, ""}, + {"TokenString", Func, 0, "func(tok rune) string"}, }, "text/tabwriter": { - {"(*Writer).Flush", Method, 0}, - {"(*Writer).Init", Method, 0}, - {"(*Writer).Write", Method, 0}, - {"AlignRight", Const, 0}, - {"Debug", Const, 0}, - {"DiscardEmptyColumns", Const, 0}, - {"Escape", Const, 0}, - {"FilterHTML", Const, 0}, - {"NewWriter", Func, 0}, - {"StripEscape", Const, 0}, - {"TabIndent", Const, 0}, - {"Writer", Type, 0}, + {"(*Writer).Flush", Method, 0, ""}, + {"(*Writer).Init", Method, 0, ""}, + {"(*Writer).Write", Method, 0, ""}, + {"AlignRight", Const, 0, ""}, + {"Debug", Const, 0, ""}, + {"DiscardEmptyColumns", Const, 0, ""}, + {"Escape", Const, 0, ""}, + {"FilterHTML", Const, 0, ""}, + {"NewWriter", Func, 0, "func(output io.Writer, minwidth int, tabwidth int, padding int, padchar byte, flags uint) *Writer"}, + {"StripEscape", Const, 0, ""}, + {"TabIndent", Const, 0, ""}, + {"Writer", Type, 0, ""}, }, "text/template": { - {"(*Template).AddParseTree", Method, 0}, - {"(*Template).Clone", Method, 0}, - {"(*Template).DefinedTemplates", Method, 5}, - {"(*Template).Delims", Method, 0}, - {"(*Template).Execute", Method, 0}, - {"(*Template).ExecuteTemplate", Method, 0}, - {"(*Template).Funcs", Method, 0}, - {"(*Template).Lookup", Method, 0}, - {"(*Template).Name", Method, 0}, - {"(*Template).New", Method, 0}, - {"(*Template).Option", Method, 5}, - {"(*Template).Parse", Method, 0}, - {"(*Template).ParseFS", Method, 16}, - {"(*Template).ParseFiles", Method, 0}, - {"(*Template).ParseGlob", Method, 0}, - {"(*Template).Templates", Method, 0}, - {"(ExecError).Error", Method, 6}, - {"(ExecError).Unwrap", Method, 13}, - {"(Template).Copy", Method, 2}, - {"(Template).ErrorContext", Method, 1}, - {"ExecError", Type, 6}, - {"ExecError.Err", Field, 6}, - {"ExecError.Name", Field, 6}, - {"FuncMap", Type, 0}, - {"HTMLEscape", Func, 0}, - {"HTMLEscapeString", Func, 0}, - {"HTMLEscaper", Func, 0}, - {"IsTrue", Func, 6}, - {"JSEscape", Func, 0}, - {"JSEscapeString", Func, 0}, - {"JSEscaper", Func, 0}, - {"Must", Func, 0}, - {"New", Func, 0}, - {"ParseFS", Func, 16}, - {"ParseFiles", Func, 0}, - {"ParseGlob", Func, 0}, - {"Template", Type, 0}, - {"Template.Tree", Field, 0}, - {"URLQueryEscaper", Func, 0}, + {"(*Template).AddParseTree", Method, 0, ""}, + {"(*Template).Clone", Method, 0, ""}, + {"(*Template).DefinedTemplates", Method, 5, ""}, + {"(*Template).Delims", Method, 0, ""}, + {"(*Template).Execute", Method, 0, ""}, + {"(*Template).ExecuteTemplate", Method, 0, ""}, + {"(*Template).Funcs", Method, 0, ""}, + {"(*Template).Lookup", Method, 0, ""}, + {"(*Template).Name", Method, 0, ""}, + {"(*Template).New", Method, 0, ""}, + {"(*Template).Option", Method, 5, ""}, + {"(*Template).Parse", Method, 0, ""}, + {"(*Template).ParseFS", Method, 16, ""}, + {"(*Template).ParseFiles", Method, 0, ""}, + {"(*Template).ParseGlob", Method, 0, ""}, + {"(*Template).Templates", Method, 0, ""}, + {"(ExecError).Error", Method, 6, ""}, + {"(ExecError).Unwrap", Method, 13, ""}, + {"(Template).Copy", Method, 2, ""}, + {"(Template).ErrorContext", Method, 1, ""}, + {"ExecError", Type, 6, ""}, + {"ExecError.Err", Field, 6, ""}, + {"ExecError.Name", Field, 6, ""}, + {"FuncMap", Type, 0, ""}, + {"HTMLEscape", Func, 0, "func(w io.Writer, b []byte)"}, + {"HTMLEscapeString", Func, 0, "func(s string) string"}, + {"HTMLEscaper", Func, 0, "func(args ...any) string"}, + {"IsTrue", Func, 6, "func(val any) (truth bool, ok bool)"}, + {"JSEscape", Func, 0, "func(w io.Writer, b []byte)"}, + {"JSEscapeString", Func, 0, "func(s string) string"}, + {"JSEscaper", Func, 0, "func(args ...any) string"}, + {"Must", Func, 0, "func(t *Template, err error) *Template"}, + {"New", Func, 0, "func(name string) *Template"}, + {"ParseFS", Func, 16, "func(fsys fs.FS, patterns ...string) (*Template, error)"}, + {"ParseFiles", Func, 0, "func(filenames ...string) (*Template, error)"}, + {"ParseGlob", Func, 0, "func(pattern string) (*Template, error)"}, + {"Template", Type, 0, ""}, + {"Template.Tree", Field, 0, ""}, + {"URLQueryEscaper", Func, 0, "func(args ...any) string"}, }, "text/template/parse": { - {"(*ActionNode).Copy", Method, 0}, - {"(*ActionNode).String", Method, 0}, - {"(*BoolNode).Copy", Method, 0}, - {"(*BoolNode).String", Method, 0}, - {"(*BranchNode).Copy", Method, 4}, - {"(*BranchNode).String", Method, 0}, - {"(*BreakNode).Copy", Method, 18}, - {"(*BreakNode).String", Method, 18}, - {"(*ChainNode).Add", Method, 1}, - {"(*ChainNode).Copy", Method, 1}, - {"(*ChainNode).String", Method, 1}, - {"(*CommandNode).Copy", Method, 0}, - {"(*CommandNode).String", Method, 0}, - {"(*CommentNode).Copy", Method, 16}, - {"(*CommentNode).String", Method, 16}, - {"(*ContinueNode).Copy", Method, 18}, - {"(*ContinueNode).String", Method, 18}, - {"(*DotNode).Copy", Method, 0}, - {"(*DotNode).String", Method, 0}, - {"(*DotNode).Type", Method, 0}, - {"(*FieldNode).Copy", Method, 0}, - {"(*FieldNode).String", Method, 0}, - {"(*IdentifierNode).Copy", Method, 0}, - {"(*IdentifierNode).SetPos", Method, 1}, - {"(*IdentifierNode).SetTree", Method, 4}, - {"(*IdentifierNode).String", Method, 0}, - {"(*IfNode).Copy", Method, 0}, - {"(*IfNode).String", Method, 0}, - {"(*ListNode).Copy", Method, 0}, - {"(*ListNode).CopyList", Method, 0}, - {"(*ListNode).String", Method, 0}, - {"(*NilNode).Copy", Method, 1}, - {"(*NilNode).String", Method, 1}, - {"(*NilNode).Type", Method, 1}, - {"(*NumberNode).Copy", Method, 0}, - {"(*NumberNode).String", Method, 0}, - {"(*PipeNode).Copy", Method, 0}, - {"(*PipeNode).CopyPipe", Method, 0}, - {"(*PipeNode).String", Method, 0}, - {"(*RangeNode).Copy", Method, 0}, - {"(*RangeNode).String", Method, 0}, - {"(*StringNode).Copy", Method, 0}, - {"(*StringNode).String", Method, 0}, - {"(*TemplateNode).Copy", Method, 0}, - {"(*TemplateNode).String", Method, 0}, - {"(*TextNode).Copy", Method, 0}, - {"(*TextNode).String", Method, 0}, - {"(*Tree).Copy", Method, 2}, - {"(*Tree).ErrorContext", Method, 1}, - {"(*Tree).Parse", Method, 0}, - {"(*VariableNode).Copy", Method, 0}, - {"(*VariableNode).String", Method, 0}, - {"(*WithNode).Copy", Method, 0}, - {"(*WithNode).String", Method, 0}, - {"(ActionNode).Position", Method, 1}, - {"(ActionNode).Type", Method, 0}, - {"(BoolNode).Position", Method, 1}, - {"(BoolNode).Type", Method, 0}, - {"(BranchNode).Position", Method, 1}, - {"(BranchNode).Type", Method, 0}, - {"(BreakNode).Position", Method, 18}, - {"(BreakNode).Type", Method, 18}, - {"(ChainNode).Position", Method, 1}, - {"(ChainNode).Type", Method, 1}, - {"(CommandNode).Position", Method, 1}, - {"(CommandNode).Type", Method, 0}, - {"(CommentNode).Position", Method, 16}, - {"(CommentNode).Type", Method, 16}, - {"(ContinueNode).Position", Method, 18}, - {"(ContinueNode).Type", Method, 18}, - {"(DotNode).Position", Method, 1}, - {"(FieldNode).Position", Method, 1}, - {"(FieldNode).Type", Method, 0}, - {"(IdentifierNode).Position", Method, 1}, - {"(IdentifierNode).Type", Method, 0}, - {"(IfNode).Position", Method, 1}, - {"(IfNode).Type", Method, 0}, - {"(ListNode).Position", Method, 1}, - {"(ListNode).Type", Method, 0}, - {"(NilNode).Position", Method, 1}, - {"(NodeType).Type", Method, 0}, - {"(NumberNode).Position", Method, 1}, - {"(NumberNode).Type", Method, 0}, - {"(PipeNode).Position", Method, 1}, - {"(PipeNode).Type", Method, 0}, - {"(Pos).Position", Method, 1}, - {"(RangeNode).Position", Method, 1}, - {"(RangeNode).Type", Method, 0}, - {"(StringNode).Position", Method, 1}, - {"(StringNode).Type", Method, 0}, - {"(TemplateNode).Position", Method, 1}, - {"(TemplateNode).Type", Method, 0}, - {"(TextNode).Position", Method, 1}, - {"(TextNode).Type", Method, 0}, - {"(VariableNode).Position", Method, 1}, - {"(VariableNode).Type", Method, 0}, - {"(WithNode).Position", Method, 1}, - {"(WithNode).Type", Method, 0}, - {"ActionNode", Type, 0}, - {"ActionNode.Line", Field, 0}, - {"ActionNode.NodeType", Field, 0}, - {"ActionNode.Pipe", Field, 0}, - {"ActionNode.Pos", Field, 1}, - {"BoolNode", Type, 0}, - {"BoolNode.NodeType", Field, 0}, - {"BoolNode.Pos", Field, 1}, - {"BoolNode.True", Field, 0}, - {"BranchNode", Type, 0}, - {"BranchNode.ElseList", Field, 0}, - {"BranchNode.Line", Field, 0}, - {"BranchNode.List", Field, 0}, - {"BranchNode.NodeType", Field, 0}, - {"BranchNode.Pipe", Field, 0}, - {"BranchNode.Pos", Field, 1}, - {"BreakNode", Type, 18}, - {"BreakNode.Line", Field, 18}, - {"BreakNode.NodeType", Field, 18}, - {"BreakNode.Pos", Field, 18}, - {"ChainNode", Type, 1}, - {"ChainNode.Field", Field, 1}, - {"ChainNode.Node", Field, 1}, - {"ChainNode.NodeType", Field, 1}, - {"ChainNode.Pos", Field, 1}, - {"CommandNode", Type, 0}, - {"CommandNode.Args", Field, 0}, - {"CommandNode.NodeType", Field, 0}, - {"CommandNode.Pos", Field, 1}, - {"CommentNode", Type, 16}, - {"CommentNode.NodeType", Field, 16}, - {"CommentNode.Pos", Field, 16}, - {"CommentNode.Text", Field, 16}, - {"ContinueNode", Type, 18}, - {"ContinueNode.Line", Field, 18}, - {"ContinueNode.NodeType", Field, 18}, - {"ContinueNode.Pos", Field, 18}, - {"DotNode", Type, 0}, - {"DotNode.NodeType", Field, 4}, - {"DotNode.Pos", Field, 1}, - {"FieldNode", Type, 0}, - {"FieldNode.Ident", Field, 0}, - {"FieldNode.NodeType", Field, 0}, - {"FieldNode.Pos", Field, 1}, - {"IdentifierNode", Type, 0}, - {"IdentifierNode.Ident", Field, 0}, - {"IdentifierNode.NodeType", Field, 0}, - {"IdentifierNode.Pos", Field, 1}, - {"IfNode", Type, 0}, - {"IfNode.BranchNode", Field, 0}, - {"IsEmptyTree", Func, 0}, - {"ListNode", Type, 0}, - {"ListNode.NodeType", Field, 0}, - {"ListNode.Nodes", Field, 0}, - {"ListNode.Pos", Field, 1}, - {"Mode", Type, 16}, - {"New", Func, 0}, - {"NewIdentifier", Func, 0}, - {"NilNode", Type, 1}, - {"NilNode.NodeType", Field, 4}, - {"NilNode.Pos", Field, 1}, - {"Node", Type, 0}, - {"NodeAction", Const, 0}, - {"NodeBool", Const, 0}, - {"NodeBreak", Const, 18}, - {"NodeChain", Const, 1}, - {"NodeCommand", Const, 0}, - {"NodeComment", Const, 16}, - {"NodeContinue", Const, 18}, - {"NodeDot", Const, 0}, - {"NodeField", Const, 0}, - {"NodeIdentifier", Const, 0}, - {"NodeIf", Const, 0}, - {"NodeList", Const, 0}, - {"NodeNil", Const, 1}, - {"NodeNumber", Const, 0}, - {"NodePipe", Const, 0}, - {"NodeRange", Const, 0}, - {"NodeString", Const, 0}, - {"NodeTemplate", Const, 0}, - {"NodeText", Const, 0}, - {"NodeType", Type, 0}, - {"NodeVariable", Const, 0}, - {"NodeWith", Const, 0}, - {"NumberNode", Type, 0}, - {"NumberNode.Complex128", Field, 0}, - {"NumberNode.Float64", Field, 0}, - {"NumberNode.Int64", Field, 0}, - {"NumberNode.IsComplex", Field, 0}, - {"NumberNode.IsFloat", Field, 0}, - {"NumberNode.IsInt", Field, 0}, - {"NumberNode.IsUint", Field, 0}, - {"NumberNode.NodeType", Field, 0}, - {"NumberNode.Pos", Field, 1}, - {"NumberNode.Text", Field, 0}, - {"NumberNode.Uint64", Field, 0}, - {"Parse", Func, 0}, - {"ParseComments", Const, 16}, - {"PipeNode", Type, 0}, - {"PipeNode.Cmds", Field, 0}, - {"PipeNode.Decl", Field, 0}, - {"PipeNode.IsAssign", Field, 11}, - {"PipeNode.Line", Field, 0}, - {"PipeNode.NodeType", Field, 0}, - {"PipeNode.Pos", Field, 1}, - {"Pos", Type, 1}, - {"RangeNode", Type, 0}, - {"RangeNode.BranchNode", Field, 0}, - {"SkipFuncCheck", Const, 17}, - {"StringNode", Type, 0}, - {"StringNode.NodeType", Field, 0}, - {"StringNode.Pos", Field, 1}, - {"StringNode.Quoted", Field, 0}, - {"StringNode.Text", Field, 0}, - {"TemplateNode", Type, 0}, - {"TemplateNode.Line", Field, 0}, - {"TemplateNode.Name", Field, 0}, - {"TemplateNode.NodeType", Field, 0}, - {"TemplateNode.Pipe", Field, 0}, - {"TemplateNode.Pos", Field, 1}, - {"TextNode", Type, 0}, - {"TextNode.NodeType", Field, 0}, - {"TextNode.Pos", Field, 1}, - {"TextNode.Text", Field, 0}, - {"Tree", Type, 0}, - {"Tree.Mode", Field, 16}, - {"Tree.Name", Field, 0}, - {"Tree.ParseName", Field, 1}, - {"Tree.Root", Field, 0}, - {"VariableNode", Type, 0}, - {"VariableNode.Ident", Field, 0}, - {"VariableNode.NodeType", Field, 0}, - {"VariableNode.Pos", Field, 1}, - {"WithNode", Type, 0}, - {"WithNode.BranchNode", Field, 0}, + {"(*ActionNode).Copy", Method, 0, ""}, + {"(*ActionNode).String", Method, 0, ""}, + {"(*BoolNode).Copy", Method, 0, ""}, + {"(*BoolNode).String", Method, 0, ""}, + {"(*BranchNode).Copy", Method, 4, ""}, + {"(*BranchNode).String", Method, 0, ""}, + {"(*BreakNode).Copy", Method, 18, ""}, + {"(*BreakNode).String", Method, 18, ""}, + {"(*ChainNode).Add", Method, 1, ""}, + {"(*ChainNode).Copy", Method, 1, ""}, + {"(*ChainNode).String", Method, 1, ""}, + {"(*CommandNode).Copy", Method, 0, ""}, + {"(*CommandNode).String", Method, 0, ""}, + {"(*CommentNode).Copy", Method, 16, ""}, + {"(*CommentNode).String", Method, 16, ""}, + {"(*ContinueNode).Copy", Method, 18, ""}, + {"(*ContinueNode).String", Method, 18, ""}, + {"(*DotNode).Copy", Method, 0, ""}, + {"(*DotNode).String", Method, 0, ""}, + {"(*DotNode).Type", Method, 0, ""}, + {"(*FieldNode).Copy", Method, 0, ""}, + {"(*FieldNode).String", Method, 0, ""}, + {"(*IdentifierNode).Copy", Method, 0, ""}, + {"(*IdentifierNode).SetPos", Method, 1, ""}, + {"(*IdentifierNode).SetTree", Method, 4, ""}, + {"(*IdentifierNode).String", Method, 0, ""}, + {"(*IfNode).Copy", Method, 0, ""}, + {"(*IfNode).String", Method, 0, ""}, + {"(*ListNode).Copy", Method, 0, ""}, + {"(*ListNode).CopyList", Method, 0, ""}, + {"(*ListNode).String", Method, 0, ""}, + {"(*NilNode).Copy", Method, 1, ""}, + {"(*NilNode).String", Method, 1, ""}, + {"(*NilNode).Type", Method, 1, ""}, + {"(*NumberNode).Copy", Method, 0, ""}, + {"(*NumberNode).String", Method, 0, ""}, + {"(*PipeNode).Copy", Method, 0, ""}, + {"(*PipeNode).CopyPipe", Method, 0, ""}, + {"(*PipeNode).String", Method, 0, ""}, + {"(*RangeNode).Copy", Method, 0, ""}, + {"(*RangeNode).String", Method, 0, ""}, + {"(*StringNode).Copy", Method, 0, ""}, + {"(*StringNode).String", Method, 0, ""}, + {"(*TemplateNode).Copy", Method, 0, ""}, + {"(*TemplateNode).String", Method, 0, ""}, + {"(*TextNode).Copy", Method, 0, ""}, + {"(*TextNode).String", Method, 0, ""}, + {"(*Tree).Copy", Method, 2, ""}, + {"(*Tree).ErrorContext", Method, 1, ""}, + {"(*Tree).Parse", Method, 0, ""}, + {"(*VariableNode).Copy", Method, 0, ""}, + {"(*VariableNode).String", Method, 0, ""}, + {"(*WithNode).Copy", Method, 0, ""}, + {"(*WithNode).String", Method, 0, ""}, + {"(ActionNode).Position", Method, 1, ""}, + {"(ActionNode).Type", Method, 0, ""}, + {"(BoolNode).Position", Method, 1, ""}, + {"(BoolNode).Type", Method, 0, ""}, + {"(BranchNode).Position", Method, 1, ""}, + {"(BranchNode).Type", Method, 0, ""}, + {"(BreakNode).Position", Method, 18, ""}, + {"(BreakNode).Type", Method, 18, ""}, + {"(ChainNode).Position", Method, 1, ""}, + {"(ChainNode).Type", Method, 1, ""}, + {"(CommandNode).Position", Method, 1, ""}, + {"(CommandNode).Type", Method, 0, ""}, + {"(CommentNode).Position", Method, 16, ""}, + {"(CommentNode).Type", Method, 16, ""}, + {"(ContinueNode).Position", Method, 18, ""}, + {"(ContinueNode).Type", Method, 18, ""}, + {"(DotNode).Position", Method, 1, ""}, + {"(FieldNode).Position", Method, 1, ""}, + {"(FieldNode).Type", Method, 0, ""}, + {"(IdentifierNode).Position", Method, 1, ""}, + {"(IdentifierNode).Type", Method, 0, ""}, + {"(IfNode).Position", Method, 1, ""}, + {"(IfNode).Type", Method, 0, ""}, + {"(ListNode).Position", Method, 1, ""}, + {"(ListNode).Type", Method, 0, ""}, + {"(NilNode).Position", Method, 1, ""}, + {"(NodeType).Type", Method, 0, ""}, + {"(NumberNode).Position", Method, 1, ""}, + {"(NumberNode).Type", Method, 0, ""}, + {"(PipeNode).Position", Method, 1, ""}, + {"(PipeNode).Type", Method, 0, ""}, + {"(Pos).Position", Method, 1, ""}, + {"(RangeNode).Position", Method, 1, ""}, + {"(RangeNode).Type", Method, 0, ""}, + {"(StringNode).Position", Method, 1, ""}, + {"(StringNode).Type", Method, 0, ""}, + {"(TemplateNode).Position", Method, 1, ""}, + {"(TemplateNode).Type", Method, 0, ""}, + {"(TextNode).Position", Method, 1, ""}, + {"(TextNode).Type", Method, 0, ""}, + {"(VariableNode).Position", Method, 1, ""}, + {"(VariableNode).Type", Method, 0, ""}, + {"(WithNode).Position", Method, 1, ""}, + {"(WithNode).Type", Method, 0, ""}, + {"ActionNode", Type, 0, ""}, + {"ActionNode.Line", Field, 0, ""}, + {"ActionNode.NodeType", Field, 0, ""}, + {"ActionNode.Pipe", Field, 0, ""}, + {"ActionNode.Pos", Field, 1, ""}, + {"BoolNode", Type, 0, ""}, + {"BoolNode.NodeType", Field, 0, ""}, + {"BoolNode.Pos", Field, 1, ""}, + {"BoolNode.True", Field, 0, ""}, + {"BranchNode", Type, 0, ""}, + {"BranchNode.ElseList", Field, 0, ""}, + {"BranchNode.Line", Field, 0, ""}, + {"BranchNode.List", Field, 0, ""}, + {"BranchNode.NodeType", Field, 0, ""}, + {"BranchNode.Pipe", Field, 0, ""}, + {"BranchNode.Pos", Field, 1, ""}, + {"BreakNode", Type, 18, ""}, + {"BreakNode.Line", Field, 18, ""}, + {"BreakNode.NodeType", Field, 18, ""}, + {"BreakNode.Pos", Field, 18, ""}, + {"ChainNode", Type, 1, ""}, + {"ChainNode.Field", Field, 1, ""}, + {"ChainNode.Node", Field, 1, ""}, + {"ChainNode.NodeType", Field, 1, ""}, + {"ChainNode.Pos", Field, 1, ""}, + {"CommandNode", Type, 0, ""}, + {"CommandNode.Args", Field, 0, ""}, + {"CommandNode.NodeType", Field, 0, ""}, + {"CommandNode.Pos", Field, 1, ""}, + {"CommentNode", Type, 16, ""}, + {"CommentNode.NodeType", Field, 16, ""}, + {"CommentNode.Pos", Field, 16, ""}, + {"CommentNode.Text", Field, 16, ""}, + {"ContinueNode", Type, 18, ""}, + {"ContinueNode.Line", Field, 18, ""}, + {"ContinueNode.NodeType", Field, 18, ""}, + {"ContinueNode.Pos", Field, 18, ""}, + {"DotNode", Type, 0, ""}, + {"DotNode.NodeType", Field, 4, ""}, + {"DotNode.Pos", Field, 1, ""}, + {"FieldNode", Type, 0, ""}, + {"FieldNode.Ident", Field, 0, ""}, + {"FieldNode.NodeType", Field, 0, ""}, + {"FieldNode.Pos", Field, 1, ""}, + {"IdentifierNode", Type, 0, ""}, + {"IdentifierNode.Ident", Field, 0, ""}, + {"IdentifierNode.NodeType", Field, 0, ""}, + {"IdentifierNode.Pos", Field, 1, ""}, + {"IfNode", Type, 0, ""}, + {"IfNode.BranchNode", Field, 0, ""}, + {"IsEmptyTree", Func, 0, "func(n Node) bool"}, + {"ListNode", Type, 0, ""}, + {"ListNode.NodeType", Field, 0, ""}, + {"ListNode.Nodes", Field, 0, ""}, + {"ListNode.Pos", Field, 1, ""}, + {"Mode", Type, 16, ""}, + {"New", Func, 0, "func(name string, funcs ...map[string]any) *Tree"}, + {"NewIdentifier", Func, 0, "func(ident string) *IdentifierNode"}, + {"NilNode", Type, 1, ""}, + {"NilNode.NodeType", Field, 4, ""}, + {"NilNode.Pos", Field, 1, ""}, + {"Node", Type, 0, ""}, + {"NodeAction", Const, 0, ""}, + {"NodeBool", Const, 0, ""}, + {"NodeBreak", Const, 18, ""}, + {"NodeChain", Const, 1, ""}, + {"NodeCommand", Const, 0, ""}, + {"NodeComment", Const, 16, ""}, + {"NodeContinue", Const, 18, ""}, + {"NodeDot", Const, 0, ""}, + {"NodeField", Const, 0, ""}, + {"NodeIdentifier", Const, 0, ""}, + {"NodeIf", Const, 0, ""}, + {"NodeList", Const, 0, ""}, + {"NodeNil", Const, 1, ""}, + {"NodeNumber", Const, 0, ""}, + {"NodePipe", Const, 0, ""}, + {"NodeRange", Const, 0, ""}, + {"NodeString", Const, 0, ""}, + {"NodeTemplate", Const, 0, ""}, + {"NodeText", Const, 0, ""}, + {"NodeType", Type, 0, ""}, + {"NodeVariable", Const, 0, ""}, + {"NodeWith", Const, 0, ""}, + {"NumberNode", Type, 0, ""}, + {"NumberNode.Complex128", Field, 0, ""}, + {"NumberNode.Float64", Field, 0, ""}, + {"NumberNode.Int64", Field, 0, ""}, + {"NumberNode.IsComplex", Field, 0, ""}, + {"NumberNode.IsFloat", Field, 0, ""}, + {"NumberNode.IsInt", Field, 0, ""}, + {"NumberNode.IsUint", Field, 0, ""}, + {"NumberNode.NodeType", Field, 0, ""}, + {"NumberNode.Pos", Field, 1, ""}, + {"NumberNode.Text", Field, 0, ""}, + {"NumberNode.Uint64", Field, 0, ""}, + {"Parse", Func, 0, "func(name string, text string, leftDelim string, rightDelim string, funcs ...map[string]any) (map[string]*Tree, error)"}, + {"ParseComments", Const, 16, ""}, + {"PipeNode", Type, 0, ""}, + {"PipeNode.Cmds", Field, 0, ""}, + {"PipeNode.Decl", Field, 0, ""}, + {"PipeNode.IsAssign", Field, 11, ""}, + {"PipeNode.Line", Field, 0, ""}, + {"PipeNode.NodeType", Field, 0, ""}, + {"PipeNode.Pos", Field, 1, ""}, + {"Pos", Type, 1, ""}, + {"RangeNode", Type, 0, ""}, + {"RangeNode.BranchNode", Field, 0, ""}, + {"SkipFuncCheck", Const, 17, ""}, + {"StringNode", Type, 0, ""}, + {"StringNode.NodeType", Field, 0, ""}, + {"StringNode.Pos", Field, 1, ""}, + {"StringNode.Quoted", Field, 0, ""}, + {"StringNode.Text", Field, 0, ""}, + {"TemplateNode", Type, 0, ""}, + {"TemplateNode.Line", Field, 0, ""}, + {"TemplateNode.Name", Field, 0, ""}, + {"TemplateNode.NodeType", Field, 0, ""}, + {"TemplateNode.Pipe", Field, 0, ""}, + {"TemplateNode.Pos", Field, 1, ""}, + {"TextNode", Type, 0, ""}, + {"TextNode.NodeType", Field, 0, ""}, + {"TextNode.Pos", Field, 1, ""}, + {"TextNode.Text", Field, 0, ""}, + {"Tree", Type, 0, ""}, + {"Tree.Mode", Field, 16, ""}, + {"Tree.Name", Field, 0, ""}, + {"Tree.ParseName", Field, 1, ""}, + {"Tree.Root", Field, 0, ""}, + {"VariableNode", Type, 0, ""}, + {"VariableNode.Ident", Field, 0, ""}, + {"VariableNode.NodeType", Field, 0, ""}, + {"VariableNode.Pos", Field, 1, ""}, + {"WithNode", Type, 0, ""}, + {"WithNode.BranchNode", Field, 0, ""}, }, "time": { - {"(*Location).String", Method, 0}, - {"(*ParseError).Error", Method, 0}, - {"(*Ticker).Reset", Method, 15}, - {"(*Ticker).Stop", Method, 0}, - {"(*Time).GobDecode", Method, 0}, - {"(*Time).UnmarshalBinary", Method, 2}, - {"(*Time).UnmarshalJSON", Method, 0}, - {"(*Time).UnmarshalText", Method, 2}, - {"(*Timer).Reset", Method, 1}, - {"(*Timer).Stop", Method, 0}, - {"(Duration).Abs", Method, 19}, - {"(Duration).Hours", Method, 0}, - {"(Duration).Microseconds", Method, 13}, - {"(Duration).Milliseconds", Method, 13}, - {"(Duration).Minutes", Method, 0}, - {"(Duration).Nanoseconds", Method, 0}, - {"(Duration).Round", Method, 9}, - {"(Duration).Seconds", Method, 0}, - {"(Duration).String", Method, 0}, - {"(Duration).Truncate", Method, 9}, - {"(Month).String", Method, 0}, - {"(Time).Add", Method, 0}, - {"(Time).AddDate", Method, 0}, - {"(Time).After", Method, 0}, - {"(Time).AppendBinary", Method, 24}, - {"(Time).AppendFormat", Method, 5}, - {"(Time).AppendText", Method, 24}, - {"(Time).Before", Method, 0}, - {"(Time).Clock", Method, 0}, - {"(Time).Compare", Method, 20}, - {"(Time).Date", Method, 0}, - {"(Time).Day", Method, 0}, - {"(Time).Equal", Method, 0}, - {"(Time).Format", Method, 0}, - {"(Time).GoString", Method, 17}, - {"(Time).GobEncode", Method, 0}, - {"(Time).Hour", Method, 0}, - {"(Time).ISOWeek", Method, 0}, - {"(Time).In", Method, 0}, - {"(Time).IsDST", Method, 17}, - {"(Time).IsZero", Method, 0}, - {"(Time).Local", Method, 0}, - {"(Time).Location", Method, 0}, - {"(Time).MarshalBinary", Method, 2}, - {"(Time).MarshalJSON", Method, 0}, - {"(Time).MarshalText", Method, 2}, - {"(Time).Minute", Method, 0}, - {"(Time).Month", Method, 0}, - {"(Time).Nanosecond", Method, 0}, - {"(Time).Round", Method, 1}, - {"(Time).Second", Method, 0}, - {"(Time).String", Method, 0}, - {"(Time).Sub", Method, 0}, - {"(Time).Truncate", Method, 1}, - {"(Time).UTC", Method, 0}, - {"(Time).Unix", Method, 0}, - {"(Time).UnixMicro", Method, 17}, - {"(Time).UnixMilli", Method, 17}, - {"(Time).UnixNano", Method, 0}, - {"(Time).Weekday", Method, 0}, - {"(Time).Year", Method, 0}, - {"(Time).YearDay", Method, 1}, - {"(Time).Zone", Method, 0}, - {"(Time).ZoneBounds", Method, 19}, - {"(Weekday).String", Method, 0}, - {"ANSIC", Const, 0}, - {"After", Func, 0}, - {"AfterFunc", Func, 0}, - {"April", Const, 0}, - {"August", Const, 0}, - {"Date", Func, 0}, - {"DateOnly", Const, 20}, - {"DateTime", Const, 20}, - {"December", Const, 0}, - {"Duration", Type, 0}, - {"February", Const, 0}, - {"FixedZone", Func, 0}, - {"Friday", Const, 0}, - {"Hour", Const, 0}, - {"January", Const, 0}, - {"July", Const, 0}, - {"June", Const, 0}, - {"Kitchen", Const, 0}, - {"Layout", Const, 17}, - {"LoadLocation", Func, 0}, - {"LoadLocationFromTZData", Func, 10}, - {"Local", Var, 0}, - {"Location", Type, 0}, - {"March", Const, 0}, - {"May", Const, 0}, - {"Microsecond", Const, 0}, - {"Millisecond", Const, 0}, - {"Minute", Const, 0}, - {"Monday", Const, 0}, - {"Month", Type, 0}, - {"Nanosecond", Const, 0}, - {"NewTicker", Func, 0}, - {"NewTimer", Func, 0}, - {"November", Const, 0}, - {"Now", Func, 0}, - {"October", Const, 0}, - {"Parse", Func, 0}, - {"ParseDuration", Func, 0}, - {"ParseError", Type, 0}, - {"ParseError.Layout", Field, 0}, - {"ParseError.LayoutElem", Field, 0}, - {"ParseError.Message", Field, 0}, - {"ParseError.Value", Field, 0}, - {"ParseError.ValueElem", Field, 0}, - {"ParseInLocation", Func, 1}, - {"RFC1123", Const, 0}, - {"RFC1123Z", Const, 0}, - {"RFC3339", Const, 0}, - {"RFC3339Nano", Const, 0}, - {"RFC822", Const, 0}, - {"RFC822Z", Const, 0}, - {"RFC850", Const, 0}, - {"RubyDate", Const, 0}, - {"Saturday", Const, 0}, - {"Second", Const, 0}, - {"September", Const, 0}, - {"Since", Func, 0}, - {"Sleep", Func, 0}, - {"Stamp", Const, 0}, - {"StampMicro", Const, 0}, - {"StampMilli", Const, 0}, - {"StampNano", Const, 0}, - {"Sunday", Const, 0}, - {"Thursday", Const, 0}, - {"Tick", Func, 0}, - {"Ticker", Type, 0}, - {"Ticker.C", Field, 0}, - {"Time", Type, 0}, - {"TimeOnly", Const, 20}, - {"Timer", Type, 0}, - {"Timer.C", Field, 0}, - {"Tuesday", Const, 0}, - {"UTC", Var, 0}, - {"Unix", Func, 0}, - {"UnixDate", Const, 0}, - {"UnixMicro", Func, 17}, - {"UnixMilli", Func, 17}, - {"Until", Func, 8}, - {"Wednesday", Const, 0}, - {"Weekday", Type, 0}, + {"(*Location).String", Method, 0, ""}, + {"(*ParseError).Error", Method, 0, ""}, + {"(*Ticker).Reset", Method, 15, ""}, + {"(*Ticker).Stop", Method, 0, ""}, + {"(*Time).GobDecode", Method, 0, ""}, + {"(*Time).UnmarshalBinary", Method, 2, ""}, + {"(*Time).UnmarshalJSON", Method, 0, ""}, + {"(*Time).UnmarshalText", Method, 2, ""}, + {"(*Timer).Reset", Method, 1, ""}, + {"(*Timer).Stop", Method, 0, ""}, + {"(Duration).Abs", Method, 19, ""}, + {"(Duration).Hours", Method, 0, ""}, + {"(Duration).Microseconds", Method, 13, ""}, + {"(Duration).Milliseconds", Method, 13, ""}, + {"(Duration).Minutes", Method, 0, ""}, + {"(Duration).Nanoseconds", Method, 0, ""}, + {"(Duration).Round", Method, 9, ""}, + {"(Duration).Seconds", Method, 0, ""}, + {"(Duration).String", Method, 0, ""}, + {"(Duration).Truncate", Method, 9, ""}, + {"(Month).String", Method, 0, ""}, + {"(Time).Add", Method, 0, ""}, + {"(Time).AddDate", Method, 0, ""}, + {"(Time).After", Method, 0, ""}, + {"(Time).AppendBinary", Method, 24, ""}, + {"(Time).AppendFormat", Method, 5, ""}, + {"(Time).AppendText", Method, 24, ""}, + {"(Time).Before", Method, 0, ""}, + {"(Time).Clock", Method, 0, ""}, + {"(Time).Compare", Method, 20, ""}, + {"(Time).Date", Method, 0, ""}, + {"(Time).Day", Method, 0, ""}, + {"(Time).Equal", Method, 0, ""}, + {"(Time).Format", Method, 0, ""}, + {"(Time).GoString", Method, 17, ""}, + {"(Time).GobEncode", Method, 0, ""}, + {"(Time).Hour", Method, 0, ""}, + {"(Time).ISOWeek", Method, 0, ""}, + {"(Time).In", Method, 0, ""}, + {"(Time).IsDST", Method, 17, ""}, + {"(Time).IsZero", Method, 0, ""}, + {"(Time).Local", Method, 0, ""}, + {"(Time).Location", Method, 0, ""}, + {"(Time).MarshalBinary", Method, 2, ""}, + {"(Time).MarshalJSON", Method, 0, ""}, + {"(Time).MarshalText", Method, 2, ""}, + {"(Time).Minute", Method, 0, ""}, + {"(Time).Month", Method, 0, ""}, + {"(Time).Nanosecond", Method, 0, ""}, + {"(Time).Round", Method, 1, ""}, + {"(Time).Second", Method, 0, ""}, + {"(Time).String", Method, 0, ""}, + {"(Time).Sub", Method, 0, ""}, + {"(Time).Truncate", Method, 1, ""}, + {"(Time).UTC", Method, 0, ""}, + {"(Time).Unix", Method, 0, ""}, + {"(Time).UnixMicro", Method, 17, ""}, + {"(Time).UnixMilli", Method, 17, ""}, + {"(Time).UnixNano", Method, 0, ""}, + {"(Time).Weekday", Method, 0, ""}, + {"(Time).Year", Method, 0, ""}, + {"(Time).YearDay", Method, 1, ""}, + {"(Time).Zone", Method, 0, ""}, + {"(Time).ZoneBounds", Method, 19, ""}, + {"(Weekday).String", Method, 0, ""}, + {"ANSIC", Const, 0, ""}, + {"After", Func, 0, "func(d Duration) <-chan Time"}, + {"AfterFunc", Func, 0, "func(d Duration, f func()) *Timer"}, + {"April", Const, 0, ""}, + {"August", Const, 0, ""}, + {"Date", Func, 0, "func(year int, month Month, day int, hour int, min int, sec int, nsec int, loc *Location) Time"}, + {"DateOnly", Const, 20, ""}, + {"DateTime", Const, 20, ""}, + {"December", Const, 0, ""}, + {"Duration", Type, 0, ""}, + {"February", Const, 0, ""}, + {"FixedZone", Func, 0, "func(name string, offset int) *Location"}, + {"Friday", Const, 0, ""}, + {"Hour", Const, 0, ""}, + {"January", Const, 0, ""}, + {"July", Const, 0, ""}, + {"June", Const, 0, ""}, + {"Kitchen", Const, 0, ""}, + {"Layout", Const, 17, ""}, + {"LoadLocation", Func, 0, "func(name string) (*Location, error)"}, + {"LoadLocationFromTZData", Func, 10, "func(name string, data []byte) (*Location, error)"}, + {"Local", Var, 0, ""}, + {"Location", Type, 0, ""}, + {"March", Const, 0, ""}, + {"May", Const, 0, ""}, + {"Microsecond", Const, 0, ""}, + {"Millisecond", Const, 0, ""}, + {"Minute", Const, 0, ""}, + {"Monday", Const, 0, ""}, + {"Month", Type, 0, ""}, + {"Nanosecond", Const, 0, ""}, + {"NewTicker", Func, 0, "func(d Duration) *Ticker"}, + {"NewTimer", Func, 0, "func(d Duration) *Timer"}, + {"November", Const, 0, ""}, + {"Now", Func, 0, "func() Time"}, + {"October", Const, 0, ""}, + {"Parse", Func, 0, "func(layout string, value string) (Time, error)"}, + {"ParseDuration", Func, 0, "func(s string) (Duration, error)"}, + {"ParseError", Type, 0, ""}, + {"ParseError.Layout", Field, 0, ""}, + {"ParseError.LayoutElem", Field, 0, ""}, + {"ParseError.Message", Field, 0, ""}, + {"ParseError.Value", Field, 0, ""}, + {"ParseError.ValueElem", Field, 0, ""}, + {"ParseInLocation", Func, 1, "func(layout string, value string, loc *Location) (Time, error)"}, + {"RFC1123", Const, 0, ""}, + {"RFC1123Z", Const, 0, ""}, + {"RFC3339", Const, 0, ""}, + {"RFC3339Nano", Const, 0, ""}, + {"RFC822", Const, 0, ""}, + {"RFC822Z", Const, 0, ""}, + {"RFC850", Const, 0, ""}, + {"RubyDate", Const, 0, ""}, + {"Saturday", Const, 0, ""}, + {"Second", Const, 0, ""}, + {"September", Const, 0, ""}, + {"Since", Func, 0, "func(t Time) Duration"}, + {"Sleep", Func, 0, "func(d Duration)"}, + {"Stamp", Const, 0, ""}, + {"StampMicro", Const, 0, ""}, + {"StampMilli", Const, 0, ""}, + {"StampNano", Const, 0, ""}, + {"Sunday", Const, 0, ""}, + {"Thursday", Const, 0, ""}, + {"Tick", Func, 0, "func(d Duration) <-chan Time"}, + {"Ticker", Type, 0, ""}, + {"Ticker.C", Field, 0, ""}, + {"Time", Type, 0, ""}, + {"TimeOnly", Const, 20, ""}, + {"Timer", Type, 0, ""}, + {"Timer.C", Field, 0, ""}, + {"Tuesday", Const, 0, ""}, + {"UTC", Var, 0, ""}, + {"Unix", Func, 0, "func(sec int64, nsec int64) Time"}, + {"UnixDate", Const, 0, ""}, + {"UnixMicro", Func, 17, "func(usec int64) Time"}, + {"UnixMilli", Func, 17, "func(msec int64) Time"}, + {"Until", Func, 8, "func(t Time) Duration"}, + {"Wednesday", Const, 0, ""}, + {"Weekday", Type, 0, ""}, }, "unicode": { - {"(SpecialCase).ToLower", Method, 0}, - {"(SpecialCase).ToTitle", Method, 0}, - {"(SpecialCase).ToUpper", Method, 0}, - {"ASCII_Hex_Digit", Var, 0}, - {"Adlam", Var, 7}, - {"Ahom", Var, 5}, - {"Anatolian_Hieroglyphs", Var, 5}, - {"Arabic", Var, 0}, - {"Armenian", Var, 0}, - {"Avestan", Var, 0}, - {"AzeriCase", Var, 0}, - {"Balinese", Var, 0}, - {"Bamum", Var, 0}, - {"Bassa_Vah", Var, 4}, - {"Batak", Var, 0}, - {"Bengali", Var, 0}, - {"Bhaiksuki", Var, 7}, - {"Bidi_Control", Var, 0}, - {"Bopomofo", Var, 0}, - {"Brahmi", Var, 0}, - {"Braille", Var, 0}, - {"Buginese", Var, 0}, - {"Buhid", Var, 0}, - {"C", Var, 0}, - {"Canadian_Aboriginal", Var, 0}, - {"Carian", Var, 0}, - {"CaseRange", Type, 0}, - {"CaseRange.Delta", Field, 0}, - {"CaseRange.Hi", Field, 0}, - {"CaseRange.Lo", Field, 0}, - {"CaseRanges", Var, 0}, - {"Categories", Var, 0}, - {"Caucasian_Albanian", Var, 4}, - {"Cc", Var, 0}, - {"Cf", Var, 0}, - {"Chakma", Var, 1}, - {"Cham", Var, 0}, - {"Cherokee", Var, 0}, - {"Chorasmian", Var, 16}, - {"Co", Var, 0}, - {"Common", Var, 0}, - {"Coptic", Var, 0}, - {"Cs", Var, 0}, - {"Cuneiform", Var, 0}, - {"Cypriot", Var, 0}, - {"Cypro_Minoan", Var, 21}, - {"Cyrillic", Var, 0}, - {"Dash", Var, 0}, - {"Deprecated", Var, 0}, - {"Deseret", Var, 0}, - {"Devanagari", Var, 0}, - {"Diacritic", Var, 0}, - {"Digit", Var, 0}, - {"Dives_Akuru", Var, 16}, - {"Dogra", Var, 13}, - {"Duployan", Var, 4}, - {"Egyptian_Hieroglyphs", Var, 0}, - {"Elbasan", Var, 4}, - {"Elymaic", Var, 14}, - {"Ethiopic", Var, 0}, - {"Extender", Var, 0}, - {"FoldCategory", Var, 0}, - {"FoldScript", Var, 0}, - {"Georgian", Var, 0}, - {"Glagolitic", Var, 0}, - {"Gothic", Var, 0}, - {"Grantha", Var, 4}, - {"GraphicRanges", Var, 0}, - {"Greek", Var, 0}, - {"Gujarati", Var, 0}, - {"Gunjala_Gondi", Var, 13}, - {"Gurmukhi", Var, 0}, - {"Han", Var, 0}, - {"Hangul", Var, 0}, - {"Hanifi_Rohingya", Var, 13}, - {"Hanunoo", Var, 0}, - {"Hatran", Var, 5}, - {"Hebrew", Var, 0}, - {"Hex_Digit", Var, 0}, - {"Hiragana", Var, 0}, - {"Hyphen", Var, 0}, - {"IDS_Binary_Operator", Var, 0}, - {"IDS_Trinary_Operator", Var, 0}, - {"Ideographic", Var, 0}, - {"Imperial_Aramaic", Var, 0}, - {"In", Func, 2}, - {"Inherited", Var, 0}, - {"Inscriptional_Pahlavi", Var, 0}, - {"Inscriptional_Parthian", Var, 0}, - {"Is", Func, 0}, - {"IsControl", Func, 0}, - {"IsDigit", Func, 0}, - {"IsGraphic", Func, 0}, - {"IsLetter", Func, 0}, - {"IsLower", Func, 0}, - {"IsMark", Func, 0}, - {"IsNumber", Func, 0}, - {"IsOneOf", Func, 0}, - {"IsPrint", Func, 0}, - {"IsPunct", Func, 0}, - {"IsSpace", Func, 0}, - {"IsSymbol", Func, 0}, - {"IsTitle", Func, 0}, - {"IsUpper", Func, 0}, - {"Javanese", Var, 0}, - {"Join_Control", Var, 0}, - {"Kaithi", Var, 0}, - {"Kannada", Var, 0}, - {"Katakana", Var, 0}, - {"Kawi", Var, 21}, - {"Kayah_Li", Var, 0}, - {"Kharoshthi", Var, 0}, - {"Khitan_Small_Script", Var, 16}, - {"Khmer", Var, 0}, - {"Khojki", Var, 4}, - {"Khudawadi", Var, 4}, - {"L", Var, 0}, - {"Lao", Var, 0}, - {"Latin", Var, 0}, - {"Lepcha", Var, 0}, - {"Letter", Var, 0}, - {"Limbu", Var, 0}, - {"Linear_A", Var, 4}, - {"Linear_B", Var, 0}, - {"Lisu", Var, 0}, - {"Ll", Var, 0}, - {"Lm", Var, 0}, - {"Lo", Var, 0}, - {"Logical_Order_Exception", Var, 0}, - {"Lower", Var, 0}, - {"LowerCase", Const, 0}, - {"Lt", Var, 0}, - {"Lu", Var, 0}, - {"Lycian", Var, 0}, - {"Lydian", Var, 0}, - {"M", Var, 0}, - {"Mahajani", Var, 4}, - {"Makasar", Var, 13}, - {"Malayalam", Var, 0}, - {"Mandaic", Var, 0}, - {"Manichaean", Var, 4}, - {"Marchen", Var, 7}, - {"Mark", Var, 0}, - {"Masaram_Gondi", Var, 10}, - {"MaxASCII", Const, 0}, - {"MaxCase", Const, 0}, - {"MaxLatin1", Const, 0}, - {"MaxRune", Const, 0}, - {"Mc", Var, 0}, - {"Me", Var, 0}, - {"Medefaidrin", Var, 13}, - {"Meetei_Mayek", Var, 0}, - {"Mende_Kikakui", Var, 4}, - {"Meroitic_Cursive", Var, 1}, - {"Meroitic_Hieroglyphs", Var, 1}, - {"Miao", Var, 1}, - {"Mn", Var, 0}, - {"Modi", Var, 4}, - {"Mongolian", Var, 0}, - {"Mro", Var, 4}, - {"Multani", Var, 5}, - {"Myanmar", Var, 0}, - {"N", Var, 0}, - {"Nabataean", Var, 4}, - {"Nag_Mundari", Var, 21}, - {"Nandinagari", Var, 14}, - {"Nd", Var, 0}, - {"New_Tai_Lue", Var, 0}, - {"Newa", Var, 7}, - {"Nko", Var, 0}, - {"Nl", Var, 0}, - {"No", Var, 0}, - {"Noncharacter_Code_Point", Var, 0}, - {"Number", Var, 0}, - {"Nushu", Var, 10}, - {"Nyiakeng_Puachue_Hmong", Var, 14}, - {"Ogham", Var, 0}, - {"Ol_Chiki", Var, 0}, - {"Old_Hungarian", Var, 5}, - {"Old_Italic", Var, 0}, - {"Old_North_Arabian", Var, 4}, - {"Old_Permic", Var, 4}, - {"Old_Persian", Var, 0}, - {"Old_Sogdian", Var, 13}, - {"Old_South_Arabian", Var, 0}, - {"Old_Turkic", Var, 0}, - {"Old_Uyghur", Var, 21}, - {"Oriya", Var, 0}, - {"Osage", Var, 7}, - {"Osmanya", Var, 0}, - {"Other", Var, 0}, - {"Other_Alphabetic", Var, 0}, - {"Other_Default_Ignorable_Code_Point", Var, 0}, - {"Other_Grapheme_Extend", Var, 0}, - {"Other_ID_Continue", Var, 0}, - {"Other_ID_Start", Var, 0}, - {"Other_Lowercase", Var, 0}, - {"Other_Math", Var, 0}, - {"Other_Uppercase", Var, 0}, - {"P", Var, 0}, - {"Pahawh_Hmong", Var, 4}, - {"Palmyrene", Var, 4}, - {"Pattern_Syntax", Var, 0}, - {"Pattern_White_Space", Var, 0}, - {"Pau_Cin_Hau", Var, 4}, - {"Pc", Var, 0}, - {"Pd", Var, 0}, - {"Pe", Var, 0}, - {"Pf", Var, 0}, - {"Phags_Pa", Var, 0}, - {"Phoenician", Var, 0}, - {"Pi", Var, 0}, - {"Po", Var, 0}, - {"Prepended_Concatenation_Mark", Var, 7}, - {"PrintRanges", Var, 0}, - {"Properties", Var, 0}, - {"Ps", Var, 0}, - {"Psalter_Pahlavi", Var, 4}, - {"Punct", Var, 0}, - {"Quotation_Mark", Var, 0}, - {"Radical", Var, 0}, - {"Range16", Type, 0}, - {"Range16.Hi", Field, 0}, - {"Range16.Lo", Field, 0}, - {"Range16.Stride", Field, 0}, - {"Range32", Type, 0}, - {"Range32.Hi", Field, 0}, - {"Range32.Lo", Field, 0}, - {"Range32.Stride", Field, 0}, - {"RangeTable", Type, 0}, - {"RangeTable.LatinOffset", Field, 1}, - {"RangeTable.R16", Field, 0}, - {"RangeTable.R32", Field, 0}, - {"Regional_Indicator", Var, 10}, - {"Rejang", Var, 0}, - {"ReplacementChar", Const, 0}, - {"Runic", Var, 0}, - {"S", Var, 0}, - {"STerm", Var, 0}, - {"Samaritan", Var, 0}, - {"Saurashtra", Var, 0}, - {"Sc", Var, 0}, - {"Scripts", Var, 0}, - {"Sentence_Terminal", Var, 7}, - {"Sharada", Var, 1}, - {"Shavian", Var, 0}, - {"Siddham", Var, 4}, - {"SignWriting", Var, 5}, - {"SimpleFold", Func, 0}, - {"Sinhala", Var, 0}, - {"Sk", Var, 0}, - {"Sm", Var, 0}, - {"So", Var, 0}, - {"Soft_Dotted", Var, 0}, - {"Sogdian", Var, 13}, - {"Sora_Sompeng", Var, 1}, - {"Soyombo", Var, 10}, - {"Space", Var, 0}, - {"SpecialCase", Type, 0}, - {"Sundanese", Var, 0}, - {"Syloti_Nagri", Var, 0}, - {"Symbol", Var, 0}, - {"Syriac", Var, 0}, - {"Tagalog", Var, 0}, - {"Tagbanwa", Var, 0}, - {"Tai_Le", Var, 0}, - {"Tai_Tham", Var, 0}, - {"Tai_Viet", Var, 0}, - {"Takri", Var, 1}, - {"Tamil", Var, 0}, - {"Tangsa", Var, 21}, - {"Tangut", Var, 7}, - {"Telugu", Var, 0}, - {"Terminal_Punctuation", Var, 0}, - {"Thaana", Var, 0}, - {"Thai", Var, 0}, - {"Tibetan", Var, 0}, - {"Tifinagh", Var, 0}, - {"Tirhuta", Var, 4}, - {"Title", Var, 0}, - {"TitleCase", Const, 0}, - {"To", Func, 0}, - {"ToLower", Func, 0}, - {"ToTitle", Func, 0}, - {"ToUpper", Func, 0}, - {"Toto", Var, 21}, - {"TurkishCase", Var, 0}, - {"Ugaritic", Var, 0}, - {"Unified_Ideograph", Var, 0}, - {"Upper", Var, 0}, - {"UpperCase", Const, 0}, - {"UpperLower", Const, 0}, - {"Vai", Var, 0}, - {"Variation_Selector", Var, 0}, - {"Version", Const, 0}, - {"Vithkuqi", Var, 21}, - {"Wancho", Var, 14}, - {"Warang_Citi", Var, 4}, - {"White_Space", Var, 0}, - {"Yezidi", Var, 16}, - {"Yi", Var, 0}, - {"Z", Var, 0}, - {"Zanabazar_Square", Var, 10}, - {"Zl", Var, 0}, - {"Zp", Var, 0}, - {"Zs", Var, 0}, + {"(SpecialCase).ToLower", Method, 0, ""}, + {"(SpecialCase).ToTitle", Method, 0, ""}, + {"(SpecialCase).ToUpper", Method, 0, ""}, + {"ASCII_Hex_Digit", Var, 0, ""}, + {"Adlam", Var, 7, ""}, + {"Ahom", Var, 5, ""}, + {"Anatolian_Hieroglyphs", Var, 5, ""}, + {"Arabic", Var, 0, ""}, + {"Armenian", Var, 0, ""}, + {"Avestan", Var, 0, ""}, + {"AzeriCase", Var, 0, ""}, + {"Balinese", Var, 0, ""}, + {"Bamum", Var, 0, ""}, + {"Bassa_Vah", Var, 4, ""}, + {"Batak", Var, 0, ""}, + {"Bengali", Var, 0, ""}, + {"Bhaiksuki", Var, 7, ""}, + {"Bidi_Control", Var, 0, ""}, + {"Bopomofo", Var, 0, ""}, + {"Brahmi", Var, 0, ""}, + {"Braille", Var, 0, ""}, + {"Buginese", Var, 0, ""}, + {"Buhid", Var, 0, ""}, + {"C", Var, 0, ""}, + {"Canadian_Aboriginal", Var, 0, ""}, + {"Carian", Var, 0, ""}, + {"CaseRange", Type, 0, ""}, + {"CaseRange.Delta", Field, 0, ""}, + {"CaseRange.Hi", Field, 0, ""}, + {"CaseRange.Lo", Field, 0, ""}, + {"CaseRanges", Var, 0, ""}, + {"Categories", Var, 0, ""}, + {"Caucasian_Albanian", Var, 4, ""}, + {"Cc", Var, 0, ""}, + {"Cf", Var, 0, ""}, + {"Chakma", Var, 1, ""}, + {"Cham", Var, 0, ""}, + {"Cherokee", Var, 0, ""}, + {"Chorasmian", Var, 16, ""}, + {"Co", Var, 0, ""}, + {"Common", Var, 0, ""}, + {"Coptic", Var, 0, ""}, + {"Cs", Var, 0, ""}, + {"Cuneiform", Var, 0, ""}, + {"Cypriot", Var, 0, ""}, + {"Cypro_Minoan", Var, 21, ""}, + {"Cyrillic", Var, 0, ""}, + {"Dash", Var, 0, ""}, + {"Deprecated", Var, 0, ""}, + {"Deseret", Var, 0, ""}, + {"Devanagari", Var, 0, ""}, + {"Diacritic", Var, 0, ""}, + {"Digit", Var, 0, ""}, + {"Dives_Akuru", Var, 16, ""}, + {"Dogra", Var, 13, ""}, + {"Duployan", Var, 4, ""}, + {"Egyptian_Hieroglyphs", Var, 0, ""}, + {"Elbasan", Var, 4, ""}, + {"Elymaic", Var, 14, ""}, + {"Ethiopic", Var, 0, ""}, + {"Extender", Var, 0, ""}, + {"FoldCategory", Var, 0, ""}, + {"FoldScript", Var, 0, ""}, + {"Georgian", Var, 0, ""}, + {"Glagolitic", Var, 0, ""}, + {"Gothic", Var, 0, ""}, + {"Grantha", Var, 4, ""}, + {"GraphicRanges", Var, 0, ""}, + {"Greek", Var, 0, ""}, + {"Gujarati", Var, 0, ""}, + {"Gunjala_Gondi", Var, 13, ""}, + {"Gurmukhi", Var, 0, ""}, + {"Han", Var, 0, ""}, + {"Hangul", Var, 0, ""}, + {"Hanifi_Rohingya", Var, 13, ""}, + {"Hanunoo", Var, 0, ""}, + {"Hatran", Var, 5, ""}, + {"Hebrew", Var, 0, ""}, + {"Hex_Digit", Var, 0, ""}, + {"Hiragana", Var, 0, ""}, + {"Hyphen", Var, 0, ""}, + {"IDS_Binary_Operator", Var, 0, ""}, + {"IDS_Trinary_Operator", Var, 0, ""}, + {"Ideographic", Var, 0, ""}, + {"Imperial_Aramaic", Var, 0, ""}, + {"In", Func, 2, "func(r rune, ranges ...*RangeTable) bool"}, + {"Inherited", Var, 0, ""}, + {"Inscriptional_Pahlavi", Var, 0, ""}, + {"Inscriptional_Parthian", Var, 0, ""}, + {"Is", Func, 0, "func(rangeTab *RangeTable, r rune) bool"}, + {"IsControl", Func, 0, "func(r rune) bool"}, + {"IsDigit", Func, 0, "func(r rune) bool"}, + {"IsGraphic", Func, 0, "func(r rune) bool"}, + {"IsLetter", Func, 0, "func(r rune) bool"}, + {"IsLower", Func, 0, "func(r rune) bool"}, + {"IsMark", Func, 0, "func(r rune) bool"}, + {"IsNumber", Func, 0, "func(r rune) bool"}, + {"IsOneOf", Func, 0, "func(ranges []*RangeTable, r rune) bool"}, + {"IsPrint", Func, 0, "func(r rune) bool"}, + {"IsPunct", Func, 0, "func(r rune) bool"}, + {"IsSpace", Func, 0, "func(r rune) bool"}, + {"IsSymbol", Func, 0, "func(r rune) bool"}, + {"IsTitle", Func, 0, "func(r rune) bool"}, + {"IsUpper", Func, 0, "func(r rune) bool"}, + {"Javanese", Var, 0, ""}, + {"Join_Control", Var, 0, ""}, + {"Kaithi", Var, 0, ""}, + {"Kannada", Var, 0, ""}, + {"Katakana", Var, 0, ""}, + {"Kawi", Var, 21, ""}, + {"Kayah_Li", Var, 0, ""}, + {"Kharoshthi", Var, 0, ""}, + {"Khitan_Small_Script", Var, 16, ""}, + {"Khmer", Var, 0, ""}, + {"Khojki", Var, 4, ""}, + {"Khudawadi", Var, 4, ""}, + {"L", Var, 0, ""}, + {"Lao", Var, 0, ""}, + {"Latin", Var, 0, ""}, + {"Lepcha", Var, 0, ""}, + {"Letter", Var, 0, ""}, + {"Limbu", Var, 0, ""}, + {"Linear_A", Var, 4, ""}, + {"Linear_B", Var, 0, ""}, + {"Lisu", Var, 0, ""}, + {"Ll", Var, 0, ""}, + {"Lm", Var, 0, ""}, + {"Lo", Var, 0, ""}, + {"Logical_Order_Exception", Var, 0, ""}, + {"Lower", Var, 0, ""}, + {"LowerCase", Const, 0, ""}, + {"Lt", Var, 0, ""}, + {"Lu", Var, 0, ""}, + {"Lycian", Var, 0, ""}, + {"Lydian", Var, 0, ""}, + {"M", Var, 0, ""}, + {"Mahajani", Var, 4, ""}, + {"Makasar", Var, 13, ""}, + {"Malayalam", Var, 0, ""}, + {"Mandaic", Var, 0, ""}, + {"Manichaean", Var, 4, ""}, + {"Marchen", Var, 7, ""}, + {"Mark", Var, 0, ""}, + {"Masaram_Gondi", Var, 10, ""}, + {"MaxASCII", Const, 0, ""}, + {"MaxCase", Const, 0, ""}, + {"MaxLatin1", Const, 0, ""}, + {"MaxRune", Const, 0, ""}, + {"Mc", Var, 0, ""}, + {"Me", Var, 0, ""}, + {"Medefaidrin", Var, 13, ""}, + {"Meetei_Mayek", Var, 0, ""}, + {"Mende_Kikakui", Var, 4, ""}, + {"Meroitic_Cursive", Var, 1, ""}, + {"Meroitic_Hieroglyphs", Var, 1, ""}, + {"Miao", Var, 1, ""}, + {"Mn", Var, 0, ""}, + {"Modi", Var, 4, ""}, + {"Mongolian", Var, 0, ""}, + {"Mro", Var, 4, ""}, + {"Multani", Var, 5, ""}, + {"Myanmar", Var, 0, ""}, + {"N", Var, 0, ""}, + {"Nabataean", Var, 4, ""}, + {"Nag_Mundari", Var, 21, ""}, + {"Nandinagari", Var, 14, ""}, + {"Nd", Var, 0, ""}, + {"New_Tai_Lue", Var, 0, ""}, + {"Newa", Var, 7, ""}, + {"Nko", Var, 0, ""}, + {"Nl", Var, 0, ""}, + {"No", Var, 0, ""}, + {"Noncharacter_Code_Point", Var, 0, ""}, + {"Number", Var, 0, ""}, + {"Nushu", Var, 10, ""}, + {"Nyiakeng_Puachue_Hmong", Var, 14, ""}, + {"Ogham", Var, 0, ""}, + {"Ol_Chiki", Var, 0, ""}, + {"Old_Hungarian", Var, 5, ""}, + {"Old_Italic", Var, 0, ""}, + {"Old_North_Arabian", Var, 4, ""}, + {"Old_Permic", Var, 4, ""}, + {"Old_Persian", Var, 0, ""}, + {"Old_Sogdian", Var, 13, ""}, + {"Old_South_Arabian", Var, 0, ""}, + {"Old_Turkic", Var, 0, ""}, + {"Old_Uyghur", Var, 21, ""}, + {"Oriya", Var, 0, ""}, + {"Osage", Var, 7, ""}, + {"Osmanya", Var, 0, ""}, + {"Other", Var, 0, ""}, + {"Other_Alphabetic", Var, 0, ""}, + {"Other_Default_Ignorable_Code_Point", Var, 0, ""}, + {"Other_Grapheme_Extend", Var, 0, ""}, + {"Other_ID_Continue", Var, 0, ""}, + {"Other_ID_Start", Var, 0, ""}, + {"Other_Lowercase", Var, 0, ""}, + {"Other_Math", Var, 0, ""}, + {"Other_Uppercase", Var, 0, ""}, + {"P", Var, 0, ""}, + {"Pahawh_Hmong", Var, 4, ""}, + {"Palmyrene", Var, 4, ""}, + {"Pattern_Syntax", Var, 0, ""}, + {"Pattern_White_Space", Var, 0, ""}, + {"Pau_Cin_Hau", Var, 4, ""}, + {"Pc", Var, 0, ""}, + {"Pd", Var, 0, ""}, + {"Pe", Var, 0, ""}, + {"Pf", Var, 0, ""}, + {"Phags_Pa", Var, 0, ""}, + {"Phoenician", Var, 0, ""}, + {"Pi", Var, 0, ""}, + {"Po", Var, 0, ""}, + {"Prepended_Concatenation_Mark", Var, 7, ""}, + {"PrintRanges", Var, 0, ""}, + {"Properties", Var, 0, ""}, + {"Ps", Var, 0, ""}, + {"Psalter_Pahlavi", Var, 4, ""}, + {"Punct", Var, 0, ""}, + {"Quotation_Mark", Var, 0, ""}, + {"Radical", Var, 0, ""}, + {"Range16", Type, 0, ""}, + {"Range16.Hi", Field, 0, ""}, + {"Range16.Lo", Field, 0, ""}, + {"Range16.Stride", Field, 0, ""}, + {"Range32", Type, 0, ""}, + {"Range32.Hi", Field, 0, ""}, + {"Range32.Lo", Field, 0, ""}, + {"Range32.Stride", Field, 0, ""}, + {"RangeTable", Type, 0, ""}, + {"RangeTable.LatinOffset", Field, 1, ""}, + {"RangeTable.R16", Field, 0, ""}, + {"RangeTable.R32", Field, 0, ""}, + {"Regional_Indicator", Var, 10, ""}, + {"Rejang", Var, 0, ""}, + {"ReplacementChar", Const, 0, ""}, + {"Runic", Var, 0, ""}, + {"S", Var, 0, ""}, + {"STerm", Var, 0, ""}, + {"Samaritan", Var, 0, ""}, + {"Saurashtra", Var, 0, ""}, + {"Sc", Var, 0, ""}, + {"Scripts", Var, 0, ""}, + {"Sentence_Terminal", Var, 7, ""}, + {"Sharada", Var, 1, ""}, + {"Shavian", Var, 0, ""}, + {"Siddham", Var, 4, ""}, + {"SignWriting", Var, 5, ""}, + {"SimpleFold", Func, 0, "func(r rune) rune"}, + {"Sinhala", Var, 0, ""}, + {"Sk", Var, 0, ""}, + {"Sm", Var, 0, ""}, + {"So", Var, 0, ""}, + {"Soft_Dotted", Var, 0, ""}, + {"Sogdian", Var, 13, ""}, + {"Sora_Sompeng", Var, 1, ""}, + {"Soyombo", Var, 10, ""}, + {"Space", Var, 0, ""}, + {"SpecialCase", Type, 0, ""}, + {"Sundanese", Var, 0, ""}, + {"Syloti_Nagri", Var, 0, ""}, + {"Symbol", Var, 0, ""}, + {"Syriac", Var, 0, ""}, + {"Tagalog", Var, 0, ""}, + {"Tagbanwa", Var, 0, ""}, + {"Tai_Le", Var, 0, ""}, + {"Tai_Tham", Var, 0, ""}, + {"Tai_Viet", Var, 0, ""}, + {"Takri", Var, 1, ""}, + {"Tamil", Var, 0, ""}, + {"Tangsa", Var, 21, ""}, + {"Tangut", Var, 7, ""}, + {"Telugu", Var, 0, ""}, + {"Terminal_Punctuation", Var, 0, ""}, + {"Thaana", Var, 0, ""}, + {"Thai", Var, 0, ""}, + {"Tibetan", Var, 0, ""}, + {"Tifinagh", Var, 0, ""}, + {"Tirhuta", Var, 4, ""}, + {"Title", Var, 0, ""}, + {"TitleCase", Const, 0, ""}, + {"To", Func, 0, "func(_case int, r rune) rune"}, + {"ToLower", Func, 0, "func(r rune) rune"}, + {"ToTitle", Func, 0, "func(r rune) rune"}, + {"ToUpper", Func, 0, "func(r rune) rune"}, + {"Toto", Var, 21, ""}, + {"TurkishCase", Var, 0, ""}, + {"Ugaritic", Var, 0, ""}, + {"Unified_Ideograph", Var, 0, ""}, + {"Upper", Var, 0, ""}, + {"UpperCase", Const, 0, ""}, + {"UpperLower", Const, 0, ""}, + {"Vai", Var, 0, ""}, + {"Variation_Selector", Var, 0, ""}, + {"Version", Const, 0, ""}, + {"Vithkuqi", Var, 21, ""}, + {"Wancho", Var, 14, ""}, + {"Warang_Citi", Var, 4, ""}, + {"White_Space", Var, 0, ""}, + {"Yezidi", Var, 16, ""}, + {"Yi", Var, 0, ""}, + {"Z", Var, 0, ""}, + {"Zanabazar_Square", Var, 10, ""}, + {"Zl", Var, 0, ""}, + {"Zp", Var, 0, ""}, + {"Zs", Var, 0, ""}, }, "unicode/utf16": { - {"AppendRune", Func, 20}, - {"Decode", Func, 0}, - {"DecodeRune", Func, 0}, - {"Encode", Func, 0}, - {"EncodeRune", Func, 0}, - {"IsSurrogate", Func, 0}, - {"RuneLen", Func, 23}, + {"AppendRune", Func, 20, "func(a []uint16, r rune) []uint16"}, + {"Decode", Func, 0, "func(s []uint16) []rune"}, + {"DecodeRune", Func, 0, "func(r1 rune, r2 rune) rune"}, + {"Encode", Func, 0, "func(s []rune) []uint16"}, + {"EncodeRune", Func, 0, "func(r rune) (r1 rune, r2 rune)"}, + {"IsSurrogate", Func, 0, "func(r rune) bool"}, + {"RuneLen", Func, 23, "func(r rune) int"}, }, "unicode/utf8": { - {"AppendRune", Func, 18}, - {"DecodeLastRune", Func, 0}, - {"DecodeLastRuneInString", Func, 0}, - {"DecodeRune", Func, 0}, - {"DecodeRuneInString", Func, 0}, - {"EncodeRune", Func, 0}, - {"FullRune", Func, 0}, - {"FullRuneInString", Func, 0}, - {"MaxRune", Const, 0}, - {"RuneCount", Func, 0}, - {"RuneCountInString", Func, 0}, - {"RuneError", Const, 0}, - {"RuneLen", Func, 0}, - {"RuneSelf", Const, 0}, - {"RuneStart", Func, 0}, - {"UTFMax", Const, 0}, - {"Valid", Func, 0}, - {"ValidRune", Func, 1}, - {"ValidString", Func, 0}, + {"AppendRune", Func, 18, "func(p []byte, r rune) []byte"}, + {"DecodeLastRune", Func, 0, "func(p []byte) (r rune, size int)"}, + {"DecodeLastRuneInString", Func, 0, "func(s string) (r rune, size int)"}, + {"DecodeRune", Func, 0, "func(p []byte) (r rune, size int)"}, + {"DecodeRuneInString", Func, 0, "func(s string) (r rune, size int)"}, + {"EncodeRune", Func, 0, "func(p []byte, r rune) int"}, + {"FullRune", Func, 0, "func(p []byte) bool"}, + {"FullRuneInString", Func, 0, "func(s string) bool"}, + {"MaxRune", Const, 0, ""}, + {"RuneCount", Func, 0, "func(p []byte) int"}, + {"RuneCountInString", Func, 0, "func(s string) (n int)"}, + {"RuneError", Const, 0, ""}, + {"RuneLen", Func, 0, "func(r rune) int"}, + {"RuneSelf", Const, 0, ""}, + {"RuneStart", Func, 0, "func(b byte) bool"}, + {"UTFMax", Const, 0, ""}, + {"Valid", Func, 0, "func(p []byte) bool"}, + {"ValidRune", Func, 1, "func(r rune) bool"}, + {"ValidString", Func, 0, "func(s string) bool"}, }, "unique": { - {"(Handle).Value", Method, 23}, - {"Handle", Type, 23}, - {"Make", Func, 23}, + {"(Handle).Value", Method, 23, ""}, + {"Handle", Type, 23, ""}, + {"Make", Func, 23, "func[T comparable](value T) Handle[T]"}, }, "unsafe": { - {"Add", Func, 0}, - {"Alignof", Func, 0}, - {"Offsetof", Func, 0}, - {"Pointer", Type, 0}, - {"Sizeof", Func, 0}, - {"Slice", Func, 0}, - {"SliceData", Func, 0}, - {"String", Func, 0}, - {"StringData", Func, 0}, + {"Add", Func, 0, ""}, + {"Alignof", Func, 0, ""}, + {"Offsetof", Func, 0, ""}, + {"Pointer", Type, 0, ""}, + {"Sizeof", Func, 0, ""}, + {"Slice", Func, 0, ""}, + {"SliceData", Func, 0, ""}, + {"String", Func, 0, ""}, + {"StringData", Func, 0, ""}, }, "weak": { - {"(Pointer).Value", Method, 24}, - {"Make", Func, 24}, - {"Pointer", Type, 24}, + {"(Pointer).Value", Method, 24, ""}, + {"Make", Func, 24, "func[T any](ptr *T) Pointer[T]"}, + {"Pointer", Type, 24, ""}, }, } diff --git a/internal/stdlib/stdlib.go b/internal/stdlib/stdlib.go index 3d96d3bf686..e223e0f3405 100644 --- a/internal/stdlib/stdlib.go +++ b/internal/stdlib/stdlib.go @@ -18,6 +18,14 @@ type Symbol struct { Name string Kind Kind Version Version // Go version that first included the symbol + // Signature provides the type of a function (defined only for Kind=Func). + // Imported types are denoted as pkg.T; pkg is not fully qualified. + // TODO(adonovan): use an unambiguous encoding that is parseable. + // + // Example2: + // func[M ~map[K]V, K comparable, V any](m M) M + // func(fi fs.FileInfo, link string) (*Header, error) + Signature string // if Kind == stdlib.Func } // A Kind indicates the kind of a symbol: diff --git a/internal/stdlib/testdata/nethttp.deps b/internal/stdlib/testdata/nethttp.deps index 71e58a0c693..658c4f1635c 100644 --- a/internal/stdlib/testdata/nethttp.deps +++ b/internal/stdlib/testdata/nethttp.deps @@ -21,6 +21,7 @@ internal/runtime/sys internal/runtime/maps internal/runtime/syscall internal/stringslite +internal/trace/tracev2 runtime internal/reflectlite errors @@ -166,4 +167,5 @@ mime/multipart net/http/httptrace net/http/internal net/http/internal/ascii +net/http/internal/httpcommon net/http diff --git a/internal/stdlib/testdata/nethttp.imports b/internal/stdlib/testdata/nethttp.imports index de41e46c0fe..82dd1e613f6 100644 --- a/internal/stdlib/testdata/nethttp.imports +++ b/internal/stdlib/testdata/nethttp.imports @@ -27,6 +27,7 @@ net net/http/httptrace net/http/internal net/http/internal/ascii +net/http/internal/httpcommon net/textproto net/url os @@ -40,6 +41,7 @@ strconv strings sync sync/atomic +syscall time unicode unicode/utf8 From 1587f35e0f29901400db45729165312c35747447 Mon Sep 17 00:00:00 2001 From: xieyuschen Date: Wed, 30 Apr 2025 23:49:58 -0600 Subject: [PATCH 257/270] gopls/internal/server/completion: get correct surrounding for second lhs Currently, gopls doesn't provide an ident for the second lhs like 'lhs1, lhs2' because the expr produces a StmtExpr rathan than BadExpr. As a result, gopls cannot calculate a correct surrounding when offer a completion. This CL checks the StmtExpr and offers a correct surrounding as long as the ast node could be used as a lhs, so gopls could produce correct replace range when returning completion item as response. Fixes golang/go#72753 Change-Id: I3de2db9180fda10674ddabafdce96662cf725c7e Reviewed-on: https://go-review.googlesource.com/c/tools/+/668855 Reviewed-by: Alan Donovan Auto-Submit: Alan Donovan Reviewed-by: Hongxiang Jiang LUCI-TryBot-Result: Go LUCI --- .../internal/golang/completion/completion.go | 13 ++- .../marker/testdata/completion/issue72753.txt | 86 +++++++++++++++++++ 2 files changed, 98 insertions(+), 1 deletion(-) create mode 100644 gopls/internal/test/marker/testdata/completion/issue72753.txt diff --git a/gopls/internal/golang/completion/completion.go b/gopls/internal/golang/completion/completion.go index 35d8dfff6a2..83be9f2ed80 100644 --- a/gopls/internal/golang/completion/completion.go +++ b/gopls/internal/golang/completion/completion.go @@ -787,7 +787,6 @@ func (c *completer) containingIdent(src []byte) *ast.Ident { } fakeIdent := &ast.Ident{Name: lit, NamePos: pos} - if _, isBadDecl := c.path[0].(*ast.BadDecl); isBadDecl { // You don't get *ast.Idents at the file level, so look for bad // decls and use the manually extracted token. @@ -802,6 +801,18 @@ func (c *completer) containingIdent(src []byte) *ast.Ident { // is a keyword. This improves completion after an "accidental // keyword", e.g. completing to "variance" in "someFunc(var<>)". return fakeIdent + } else if block, ok := c.path[0].(*ast.BlockStmt); ok && len(block.List) != 0 { + last := block.List[len(block.List)-1] + // Handle incomplete AssignStmt with multiple left-hand vars: + // var left, right int + // left, ri‸ -> "right" + if expr, ok := last.(*ast.ExprStmt); ok && + (is[*ast.Ident](expr.X) || + is[*ast.SelectorExpr](expr.X) || + is[*ast.IndexExpr](expr.X) || + is[*ast.StarExpr](expr.X)) { + return fakeIdent + } } return nil diff --git a/gopls/internal/test/marker/testdata/completion/issue72753.txt b/gopls/internal/test/marker/testdata/completion/issue72753.txt new file mode 100644 index 00000000000..a548858492b --- /dev/null +++ b/gopls/internal/test/marker/testdata/completion/issue72753.txt @@ -0,0 +1,86 @@ +This test checks that completion gives correct completion for +incomplete AssignStmt with multiple left-hand vars. + +-- flags -- +-ignore_extra_diags + +-- settings.json -- +{ + "usePlaceholders": false +} + +-- go.mod -- +module mod.test + +go 1.21 + +-- string.go -- +package a + +func _(left, right string){ + left, ri //@acceptcompletion(re"ri()", "right", string) +} + +-- @string/string.go -- +package a + +func _(left, right string){ + left, right //@acceptcompletion(re"ri()", "right", string) +} + +-- array.go -- +package a +func _(right string) { + var left [3]int + left[0], ri //@acceptcompletion(re"ri()", "right", array) +} + +-- @array/array.go -- +package a +func _(right string) { + var left [3]int + left[0], right //@acceptcompletion(re"ri()", "right", array) +} + +-- slice.go -- +package a +func _(right string) { + var left []int + left[0], ri //@acceptcompletion(re"ri()", "right", slice) +} + +-- @slice/slice.go -- +package a +func _(right string) { + var left []int + left[0], right //@acceptcompletion(re"ri()", "right", slice) +} + +-- map.go -- +package a +func _(right string) { + var left map[int]int + left[0], ri //@acceptcompletion(re"ri()", "right", map) +} + +-- @map/map.go -- +package a +func _(right string) { + var left map[int]int + left[0], right //@acceptcompletion(re"ri()", "right", map) +} + +-- star.go -- +package a +func _(right string) { + var left *int + *left, ri //@acceptcompletion(re"ri()", "right", star) +} + +-- @star/star.go -- +package a +func _(right string) { + var left *int + *left, right //@acceptcompletion(re"ri()", "right", star) +} + From 79062274589e174c5f7047afb9ad579215fe29c3 Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Tue, 29 Apr 2025 15:08:52 +0000 Subject: [PATCH 258/270] internal/mcp: add a command transport for connecting to a sidecar Add a missing CommandTransport that communicates with a sidecar process over newline-delimited stdin/stdout, the counterpart to the existing StdIOTransport. Add a test using the usual TestMain trick. Change-Id: I170ba78faf6b6d7fca7da1af11bf92e919b11ed5 Reviewed-on: https://go-review.googlesource.com/c/tools/+/668895 LUCI-TryBot-Result: Go LUCI Reviewed-by: Jonathan Amsterdam --- internal/mcp/cmd.go | 106 ++++++++++++++++++++++++++++++++++++++ internal/mcp/cmd_test.go | 67 ++++++++++++++++++++++++ internal/mcp/transport.go | 6 +-- 3 files changed, 175 insertions(+), 4 deletions(-) create mode 100644 internal/mcp/cmd.go create mode 100644 internal/mcp/cmd_test.go diff --git a/internal/mcp/cmd.go b/internal/mcp/cmd.go new file mode 100644 index 00000000000..6cb8fdf449f --- /dev/null +++ b/internal/mcp/cmd.go @@ -0,0 +1,106 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mcp + +import ( + "context" + "fmt" + "io" + "os/exec" + "syscall" + "time" +) + +// A CommandTransport is a [Transport] that runs a command and communicates +// with it over stdin/stdout, using newline-delimited JSON. +type CommandTransport struct { + cmd *exec.Cmd +} + +// NewCommandTransport returns a [CommandTransport] that runs the given command +// and communicates with it over stdin/stdout. +// +// The resulting transport takes ownership of the command, starting it during +// [CommandTransport.Connect], and stopping it when the connection is closed. +func NewCommandTransport(cmd *exec.Cmd) *CommandTransport { + return &CommandTransport{cmd} +} + +// Connect starts the command, and connects to it over stdin/stdout. +func (t *CommandTransport) Connect(ctx context.Context) (Stream, error) { + stdout, err := t.cmd.StdoutPipe() + if err != nil { + return nil, err + } + stdout = io.NopCloser(stdout) // close the connection by closing stdin, not stdout + stdin, err := t.cmd.StdinPipe() + if err != nil { + return nil, err + } + if err := t.cmd.Start(); err != nil { + return nil, err + } + return newIOStream(&pipeRWC{t.cmd, stdout, stdin}), nil +} + +// A pipeRWC is an io.ReadWriteCloser that communicates with a subprocess over +// stdin/stdout pipes. +type pipeRWC struct { + cmd *exec.Cmd + stdout io.ReadCloser + stdin io.WriteCloser +} + +func (s *pipeRWC) Read(p []byte) (n int, err error) { + return s.stdout.Read(p) +} + +func (s *pipeRWC) Write(p []byte) (n int, err error) { + return s.stdin.Write(p) +} + +// Close closes the input stream to the child process, and awaits normal +// termination of the command. If the command does not exit, it is signalled to +// terminate, and then eventually killed. +func (s *pipeRWC) Close() error { + // Spec: + // "For the stdio transport, the client SHOULD initiate shutdown by:... + + // "...First, closing the input stream to the child process (the server)" + if err := s.stdin.Close(); err != nil { + return fmt.Errorf("closing stdin: %v", err) + } + resChan := make(chan error, 1) + go func() { + resChan <- s.cmd.Wait() + }() + // "...Waiting for the server to exit, or sending SIGTERM if the server does not exit within a reasonable time" + wait := func() (error, bool) { + select { + case err := <-resChan: + return err, true + case <-time.After(5 * time.Second): + } + return nil, false + } + if err, ok := wait(); ok { + return err + } + // Note the condition here: if sending SIGTERM fails, don't wait and just + // move on to SIGKILL. + if err := s.cmd.Process.Signal(syscall.SIGTERM); err == nil { + if err, ok := wait(); ok { + return err + } + } + // "...Sending SIGKILL if the server does not exit within a reasonable time after SIGTERM" + if err := s.cmd.Process.Kill(); err != nil { + return err + } + if err, ok := wait(); ok { + return err + } + return fmt.Errorf("unresponsive subprocess") +} diff --git a/internal/mcp/cmd_test.go b/internal/mcp/cmd_test.go new file mode 100644 index 00000000000..211784619e3 --- /dev/null +++ b/internal/mcp/cmd_test.go @@ -0,0 +1,67 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mcp_test + +import ( + "context" + "log" + "os" + "os/exec" + "testing" + + "github.com/google/go-cmp/cmp" + "golang.org/x/tools/internal/mcp" +) + +const runAsServer = "_MCP_RUN_AS_SERVER" + +func TestMain(m *testing.M) { + if os.Getenv(runAsServer) != "" { + os.Unsetenv(runAsServer) + runServer() + return + } + os.Exit(m.Run()) +} + +func runServer() { + ctx := context.Background() + + server := mcp.NewServer("greeter", "v0.0.1", nil) + server.AddTools(mcp.MakeTool("greet", "say hi", SayHi)) + + if err := server.Run(ctx, mcp.NewStdIOTransport(), nil); err != nil { + log.Fatal(err) + } +} + +func TestCmdTransport(t *testing.T) { + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + exe, err := os.Executable() + if err != nil { + t.Fatal(err) + } + cmd := exec.Command(exe) + cmd.Env = append(os.Environ(), runAsServer+"=true") + + client := mcp.NewClient("client", "v0.0.1", nil) + serverConn, err := client.Connect(ctx, mcp.NewCommandTransport(cmd), nil) + if err != nil { + log.Fatal(err) + } + got, err := serverConn.CallTool(ctx, "greet", SayHiParams{Name: "user"}) + if err != nil { + log.Fatal(err) + } + want := []mcp.Content{mcp.TextContent{Text: "Hi user"}} + if diff := cmp.Diff(want, got); diff != "" { + t.Errorf("greet returned unexpected content (-want +got):\n%s", diff) + } + if err := serverConn.Close(); err != nil { + t.Fatalf("closing server: %v", err) + } +} diff --git a/internal/mcp/transport.go b/internal/mcp/transport.go index 841cc099c10..1ca4c9db13f 100644 --- a/internal/mcp/transport.go +++ b/internal/mcp/transport.go @@ -227,13 +227,11 @@ type rwc struct { } func (r rwc) Read(p []byte) (n int, err error) { - n, err = r.rc.Read(p) - return n, err + return r.rc.Read(p) } func (r rwc) Write(p []byte) (n int, err error) { - n, err = r.wc.Write(p) - return n, err + return r.wc.Write(p) } func (r rwc) Close() error { From 37278be522ab65bacb84ce7c2e85f568dce0e63d Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Mon, 28 Apr 2025 20:54:15 +0000 Subject: [PATCH 259/270] internal/mcp: add more package documentation, examples Add more documentation for the mcp package, explaining how to create and use Clients and Servers. Also add an example of using the SSEHandler to crete an MCP server. Change-Id: Ib327826b2cdb1e3418482551c65569ec6f01af33 Reviewed-on: https://go-review.googlesource.com/c/tools/+/668715 Reviewed-by: Jonathan Amsterdam LUCI-TryBot-Result: Go LUCI Reviewed-by: Neal Patel Auto-Submit: Robert Findley --- internal/mcp/mcp.go | 67 ++++++++++++++++--- ...example_test.go => server_example_test.go} | 0 internal/mcp/sse.go | 10 +-- internal/mcp/sse_example_test.go | 50 ++++++++++++++ internal/mcp/sse_test.go | 6 +- 5 files changed, 117 insertions(+), 16 deletions(-) rename internal/mcp/{example_test.go => server_example_test.go} (100%) create mode 100644 internal/mcp/sse_example_test.go diff --git a/internal/mcp/mcp.go b/internal/mcp/mcp.go index b911e959b91..b492b0544e1 100644 --- a/internal/mcp/mcp.go +++ b/internal/mcp/mcp.go @@ -4,18 +4,69 @@ // The mcp package provides an SDK for writing model context protocol clients // and servers. It is a work-in-progress. As of writing, it is a prototype to -// explore the design space of client/server lifecycle and binding. +// explore the design space of client/server transport and binding. // -// To get started, create an MCP client or server with [NewClient] or -// [NewServer], then add features to your client or server using Add -// methods, then connect to a peer using a [Transport] instance and a call to -// [Client.Connect] or [Server.Connect]. +// To get started, create either a [Client] or [Server], and connect it to a +// peer using a [Transport]. The diagram below illustrates how this works: +// +// Client Server +// ⇅ (jsonrpc2) ⇅ +// ServerConnection ⇄ Client Transport ⇄ Server Transport ⇄ ClientConnection +// +// A [Client] is an MCP client, which can be configured with various client +// capabilities. Clients may be connected to one or more [Server] instances +// using the [Client.Connect] method, which creates a [ServerConnection]. +// +// Similarly, a [Server] is an MCP server, which can be configured with various +// server capabilities. Servers may be connected to one or more [Client] +// instances using the [Server.Connect] method, which creates a +// [ClientConnection]. +// +// A [Transport] connects a bidirectional [Stream] of jsonrpc2 messages. In +// practice, transports in the MCP spec are are either client transports or +// server transports. For example, the [StdIOTransport] is a server transport +// that communicates over stdin/stdout, and its counterpart is a +// [CommandTransport] that communicates with a subprocess over its +// stdin/stdout. +// +// Some transports may hide more complicated details, such as an +// [SSEClientTransport], which reads messages via server-sent events on a +// hanging GET request, and writes them to a POST endpoint. Users of this SDK +// may define their own custom Transports by implementing the [Transport] +// interface. +// +// Here's an example that creates a client that talks to an MCP server running +// as a sidecar process: +// +// import "golang.org/x/tools/internal/mcp" +// ... +// // Create a new client, with no features. +// client := mcp.NewClient("mcp-client", "v1.0.0", nil) +// // Connect to a server over stdin/stdout +// transport := mcp.NewCommandTransport(exec.Command("myserver")) +// serverConn, err := client.Connect(ctx, transport, nil) +// if err != nil { +// log.Fatal(err) +// } +// // Call a tool on the server. +// content, err := serverConn.CallTool(ctx, "greet", map[string]any{"name": "you"}) +// +// Here is an example of the corresponding server, connected over stdin/stdout: +// +// import "golang.org/x/tools/internal/mcp" +// ... +// // Create a server with a single tool. +// server := mcp.NewServer("greeter", "v1.0.0", nil) +// server.AddTool(mcp.MakeTool("greet", "say hi", SayHi)) +// // Run the server over stdin/stdout, until the client diconnects +// _ = server.Run(ctx, mcp.NewStdIOTransport(), nil) +// +// # TODO // -// TODO: // - Support pagination. // - Support all client/server operations. -// - Support Streamable HTTP transport. +// - Support streamable HTTP transport. // - Support multiple versions of the spec. // - Implement proper JSON schema support, with both client-side and -// server-side validation.. +// server-side validation. package mcp diff --git a/internal/mcp/example_test.go b/internal/mcp/server_example_test.go similarity index 100% rename from internal/mcp/example_test.go rename to internal/mcp/server_example_test.go diff --git a/internal/mcp/sse.go b/internal/mcp/sse.go index da364e161b3..bdc62a71cd3 100644 --- a/internal/mcp/sse.go +++ b/internal/mcp/sse.go @@ -277,14 +277,16 @@ type SSEClientTransport struct { // NewSSEClientTransport returns a new client transport that connects to the // SSE server at the provided URL. -func NewSSEClientTransport(rawURL string) (*SSEClientTransport, error) { - url, err := url.Parse(rawURL) +// +// NewSSEClientTransport panics if the given URL is invalid. +func NewSSEClientTransport(baseURL string) *SSEClientTransport { + url, err := url.Parse(baseURL) if err != nil { - return nil, err + panic(fmt.Sprintf("invalid base url: %v", err)) } return &SSEClientTransport{ sseEndpoint: url, - }, nil + } } // Connect connects through the client endpoint. diff --git a/internal/mcp/sse_example_test.go b/internal/mcp/sse_example_test.go new file mode 100644 index 00000000000..fb2445012f5 --- /dev/null +++ b/internal/mcp/sse_example_test.go @@ -0,0 +1,50 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mcp_test + +import ( + "context" + "fmt" + "log" + "net/http" + "net/http/httptest" + + "golang.org/x/tools/internal/mcp" +) + +type AddParams struct { + X, Y int +} + +func Add(ctx context.Context, cc *mcp.ClientConnection, params *AddParams) ([]mcp.Content, error) { + return []mcp.Content{ + mcp.TextContent{Text: fmt.Sprintf("%d", params.X+params.Y)}, + }, nil +} + +func ExampleSSEHandler() { + server := mcp.NewServer("adder", "v0.0.1", nil) + server.AddTools(mcp.MakeTool("add", "add two numbers", Add)) + + handler := mcp.NewSSEHandler(func(*http.Request) *mcp.Server { return server }) + httpServer := httptest.NewServer(handler) + defer httpServer.Close() + + ctx := context.Background() + transport := mcp.NewSSEClientTransport(httpServer.URL) + serverConn, err := mcp.NewClient("test", "v1.0.0", nil).Connect(ctx, transport, nil) + if err != nil { + log.Fatal(err) + } + defer serverConn.Close() + + content, err := serverConn.CallTool(ctx, "add", AddParams{1, 2}) + if err != nil { + log.Fatal(err) + } + fmt.Println(content[0].(mcp.TextContent).Text) + + // Output: 3 +} diff --git a/internal/mcp/sse_test.go b/internal/mcp/sse_test.go index f1ae5e40725..4e41f60e3e1 100644 --- a/internal/mcp/sse_test.go +++ b/internal/mcp/sse_test.go @@ -31,11 +31,9 @@ func TestSSEServer(t *testing.T) { } } httpServer := httptest.NewServer(sseHandler) + defer httpServer.Close() - clientTransport, err := NewSSEClientTransport(httpServer.URL) - if err != nil { - t.Fatal(err) - } + clientTransport := NewSSEClientTransport(httpServer.URL) client := NewClient("testClient", "v1.0.0", nil) sc, err := client.Connect(ctx, clientTransport, nil) From ab017007cffb09e38000a13e0c075bd4203c5b91 Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Tue, 29 Apr 2025 18:26:00 +0000 Subject: [PATCH 260/270] internal/mcp: add tool and schema options Implement variadic options for building tools and schemas, to allow easier customization of tool input schemas. Also, make fields required, unless they are marked as "omitempty". Change-Id: I7a29258fae3ec5e7ea4906ed30ed6750979de962 Reviewed-on: https://go-review.googlesource.com/c/tools/+/668955 Reviewed-by: Jonathan Amsterdam LUCI-TryBot-Result: Go LUCI --- internal/mcp/examples/hello/main.go | 6 +- internal/mcp/internal/jsonschema/infer.go | 44 ++++++-- .../mcp/internal/jsonschema/infer_test.go | 1 + internal/mcp/mcp.go | 2 +- internal/mcp/mcp_test.go | 3 +- internal/mcp/tool.go | 105 +++++++++++++++++- internal/mcp/tool_test.go | 89 +++++++++++++++ 7 files changed, 236 insertions(+), 14 deletions(-) create mode 100644 internal/mcp/tool_test.go diff --git a/internal/mcp/examples/hello/main.go b/internal/mcp/examples/hello/main.go index d9b9967ff2c..1ad5b2796d8 100644 --- a/internal/mcp/examples/hello/main.go +++ b/internal/mcp/examples/hello/main.go @@ -17,7 +17,7 @@ import ( var httpAddr = flag.String("http", "", "if set, use SSE HTTP at this address, instead of stdin/stdout") type SayHiParams struct { - Name string `json:"name" mcp:"the name to say hi to"` + Name string `json:"name"` } func SayHi(ctx context.Context, cc *mcp.ClientConnection, params *SayHiParams) ([]mcp.Content, error) { @@ -30,7 +30,9 @@ func main() { flag.Parse() server := mcp.NewServer("greeter", "v0.0.1", nil) - server.AddTools(mcp.MakeTool("greet", "say hi", SayHi)) + server.AddTools(mcp.MakeTool("greet", "say hi", SayHi, mcp.Input( + mcp.Property("name", mcp.Description("the name to say hi to")), + ))) if *httpAddr != "" { handler := mcp.NewSSEHandler(func(*http.Request) *mcp.Server { diff --git a/internal/mcp/internal/jsonschema/infer.go b/internal/mcp/internal/jsonschema/infer.go index 2c4e73411b2..9cadac79943 100644 --- a/internal/mcp/internal/jsonschema/infer.go +++ b/internal/mcp/internal/jsonschema/infer.go @@ -9,6 +9,7 @@ package jsonschema import ( "fmt" "reflect" + "slices" "strings" ) @@ -19,6 +20,21 @@ func For[T any]() (*Schema, error) { return ForType(reflect.TypeFor[T]()) } +// ForType constructs a JSON schema object for the given type. +// It translates Go types into compatible JSON schema types, as follows: +// - strings have schema type "string" +// - bools have schema type "boolean" +// - signed and unsigned integer types have schema type "integer" +// - floating point types have schema type "number" +// - slices and arrays have schema type "array", and a corresponding schema +// for items +// - maps with string key have schema type "object", and corresponding +// schema for additionalProperties +// - structs have schema type "object", and disallow additionalProperties. +// Their properties are derived from exported struct fields, using the +// struct field json name. Fields that are marked "omitempty" are +// considered optional; all other fields become required properties. +// // It returns an error if t contains (possibly recursively) any of the following Go // types, as they are incompatible with the JSON schema spec. // - maps with key other than 'string' @@ -75,6 +91,10 @@ func typeSchema(t reflect.Type, seen map[reflect.Type]*Schema) (*Schema, error) if err != nil { return nil, fmt.Errorf("computing element schema: %v", err) } + if t.Kind() == reflect.Array { + s.MinItems = Ptr(float64(t.Len())) + s.MaxItems = Ptr(float64(t.Len())) + } case reflect.String: s.Type = "string" @@ -86,8 +106,8 @@ func typeSchema(t reflect.Type, seen map[reflect.Type]*Schema) (*Schema, error) for i := range t.NumField() { field := t.Field(i) - name, ok := jsonName(field) - if !ok { + name, required, include := parseField(field) + if !include { continue } if s.Properties == nil { @@ -97,6 +117,9 @@ func typeSchema(t reflect.Type, seen map[reflect.Type]*Schema) (*Schema, error) if err != nil { return nil, err } + if required { + s.Required = append(s.Required, name) + } } default: @@ -105,14 +128,21 @@ func typeSchema(t reflect.Type, seen map[reflect.Type]*Schema) (*Schema, error) return s, nil } -func jsonName(f reflect.StructField) (string, bool) { +func parseField(f reflect.StructField) (name string, required, include bool) { if !f.IsExported() { - return "", false + return "", false, false } + name = f.Name + required = true if tag, ok := f.Tag.Lookup("json"); ok { - if name, _, _ := strings.Cut(tag, ","); name != "" { - return name, name != "-" + props := strings.Split(tag, ",") + if props[0] != "" { + if props[0] == "-" { + return "", false, false + } + name = props[0] } + required = !slices.Contains(props[1:], "omitempty") } - return f.Name, true + return name, required, true } diff --git a/internal/mcp/internal/jsonschema/infer_test.go b/internal/mcp/internal/jsonschema/infer_test.go index 7f3b5d0abe3..1c44041f93f 100644 --- a/internal/mcp/internal/jsonschema/infer_test.go +++ b/internal/mcp/internal/jsonschema/infer_test.go @@ -56,6 +56,7 @@ func TestForType(t *testing.T) { "P": {Type: "boolean"}, "NoSkip": {Type: "string"}, }, + Required: []string{"f", "G", "P"}, AdditionalProperties: &jsonschema.Schema{Not: &jsonschema.Schema{}}, }}, } diff --git a/internal/mcp/mcp.go b/internal/mcp/mcp.go index b492b0544e1..ccaebbd1d4b 100644 --- a/internal/mcp/mcp.go +++ b/internal/mcp/mcp.go @@ -67,6 +67,6 @@ // - Support all client/server operations. // - Support streamable HTTP transport. // - Support multiple versions of the spec. -// - Implement proper JSON schema support, with both client-side and +// - Implement full JSON schema support, with both client-side and // server-side validation. package mcp diff --git a/internal/mcp/mcp_test.go b/internal/mcp/mcp_test.go index b5878e5b7fd..dc2d38cbd91 100644 --- a/internal/mcp/mcp_test.go +++ b/internal/mcp/mcp_test.go @@ -88,7 +88,8 @@ func TestEndToEnd(t *testing.T) { Name: "greet", Description: "say hi", InputSchema: &jsonschema.Schema{ - Type: "object", + Type: "object", + Required: []string{"Name"}, Properties: map[string]*jsonschema.Schema{ "Name": {Type: "string"}, }, diff --git a/internal/mcp/tool.go b/internal/mcp/tool.go index 7d1fbbb0e9d..491e727a12b 100644 --- a/internal/mcp/tool.go +++ b/internal/mcp/tool.go @@ -7,6 +7,7 @@ package mcp import ( "context" "encoding/json" + "slices" "golang.org/x/tools/internal/mcp/internal/jsonschema" "golang.org/x/tools/internal/mcp/internal/protocol" @@ -23,13 +24,16 @@ type Tool struct { // MakeTool is a helper to make a tool using reflection on the given handler. // +// If provided, variadic [ToolOption] values may be used to customize the tool. +// // The input schema for the tool is extracted from the request type for the -// handler, and used to unmmarshal and validate requests to the handler. +// handler, and used to unmmarshal and validate requests to the handler. This +// schema may be customized using the [Input] option. // // It is the caller's responsibility that the handler request type can produce // a valid schema, as documented by [jsonschema.ForType]; otherwise, MakeTool // panics. -func MakeTool[TReq any](name, description string, handler func(context.Context, *ClientConnection, TReq) ([]Content, error)) *Tool { +func MakeTool[TReq any](name, description string, handler func(context.Context, *ClientConnection, TReq) ([]Content, error), opts ...ToolOption) *Tool { schema, err := jsonschema.For[TReq]() if err != nil { panic(err) @@ -51,7 +55,7 @@ func MakeTool[TReq any](name, description string, handler func(context.Context, } return res, nil } - return &Tool{ + t := &Tool{ Definition: protocol.Tool{ Name: name, Description: description, @@ -59,6 +63,10 @@ func MakeTool[TReq any](name, description string, handler func(context.Context, }, Handler: wrapped, } + for _, opt := range opts { + opt.set(t) + } + return t } // unmarshalSchema unmarshals data into v and validates the result according to @@ -68,3 +76,94 @@ func unmarshalSchema(data json.RawMessage, _ *jsonschema.Schema, v any) error { // Separate validation from assignment. return json.Unmarshal(data, v) } + +// A ToolOption configures the behavior of a Tool. +type ToolOption interface { + set(*Tool) +} + +type toolSetter func(*Tool) + +func (s toolSetter) set(t *Tool) { s(t) } + +// Input applies the provided [SchemaOption] configuration to the tool's input +// schema. +func Input(opts ...SchemaOption) ToolOption { + return toolSetter(func(t *Tool) { + for _, opt := range opts { + opt.set(t.Definition.InputSchema) + } + }) +} + +// A SchemaOption configures a jsonschema.Schema. +type SchemaOption interface { + set(s *jsonschema.Schema) +} + +type schemaSetter func(*jsonschema.Schema) + +func (s schemaSetter) set(schema *jsonschema.Schema) { s(schema) } + +// Property configures the schema for the property of the given name. +// If there is no such property in the schema, it is created. +func Property(name string, opts ...SchemaOption) SchemaOption { + return schemaSetter(func(schema *jsonschema.Schema) { + propSchema, ok := schema.Properties[name] + if !ok { + propSchema = new(jsonschema.Schema) + schema.Properties[name] = propSchema + } + // Apply the options, with special handling for Required, as it needs to be + // set on the parent schema. + for _, opt := range opts { + if req, ok := opt.(required); ok { + if req { + if !slices.Contains(schema.Required, name) { + schema.Required = append(schema.Required, name) + } + } else { + schema.Required = slices.DeleteFunc(schema.Required, func(s string) bool { + return s == name + }) + } + } else { + opt.set(propSchema) + } + } + }) +} + +// Required sets whether the associated property is required. It is only valid +// when used in a [Property] option: using Required outside of Property panics. +func Required(v bool) SchemaOption { + return required(v) +} + +type required bool + +func (required) set(s *jsonschema.Schema) { + panic("use of required outside of Property") +} + +// Enum sets the provided values as the "enum" value of the schema. +func Enum(values ...any) SchemaOption { + return schemaSetter(func(s *jsonschema.Schema) { + s.Enum = values + }) +} + +// Description sets the provided schema description. +func Description(description string) SchemaOption { + return schemaSetter(func(schema *jsonschema.Schema) { + schema.Description = description + }) +} + +// Schema overrides the inferred schema with a shallow copy of the given +// schema. +func Schema(schema *jsonschema.Schema) SchemaOption { + return schemaSetter(func(s *jsonschema.Schema) { + *s = *schema + }) +} diff --git a/internal/mcp/tool_test.go b/internal/mcp/tool_test.go new file mode 100644 index 00000000000..3d8201a89ad --- /dev/null +++ b/internal/mcp/tool_test.go @@ -0,0 +1,89 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mcp_test + +import ( + "context" + "testing" + + "github.com/google/go-cmp/cmp" + "golang.org/x/tools/internal/mcp" + "golang.org/x/tools/internal/mcp/internal/jsonschema" +) + +// testHandler is used for type inference in TestMakeTool. +func testHandler[T any](context.Context, *mcp.ClientConnection, T) ([]mcp.Content, error) { + panic("not implemented") +} + +func TestMakeTool(t *testing.T) { + tests := []struct { + tool *mcp.Tool + want *jsonschema.Schema + }{ + { + mcp.MakeTool("basic", "", testHandler[struct { + Name string `json:"name"` + }]), + &jsonschema.Schema{ + Type: "object", + Required: []string{"name"}, + Properties: map[string]*jsonschema.Schema{ + "name": {Type: "string"}, + }, + AdditionalProperties: &jsonschema.Schema{Not: new(jsonschema.Schema)}, + }, + }, + { + mcp.MakeTool("enum", "", testHandler[struct{ Name string }], mcp.Input( + mcp.Property("Name", mcp.Enum("x", "y", "z")), + )), + &jsonschema.Schema{ + Type: "object", + Required: []string{"Name"}, + Properties: map[string]*jsonschema.Schema{ + "Name": {Type: "string", Enum: []any{"x", "y", "z"}}, + }, + AdditionalProperties: &jsonschema.Schema{Not: new(jsonschema.Schema)}, + }, + }, + { + mcp.MakeTool("required", "", testHandler[struct { + Name string `json:"name"` + Language string `json:"language"` + X int `json:"x,omitempty"` + Y int `json:"y,omitempty"` + }], mcp.Input( + mcp.Property("x", mcp.Required(true)))), + &jsonschema.Schema{ + Type: "object", + Required: []string{"name", "language", "x"}, + Properties: map[string]*jsonschema.Schema{ + "language": {Type: "string"}, + "name": {Type: "string"}, + "x": {Type: "integer"}, + "y": {Type: "integer"}, + }, + AdditionalProperties: &jsonschema.Schema{Not: new(jsonschema.Schema)}, + }, + }, + { + mcp.MakeTool("set_schema", "", testHandler[struct { + X int `json:"x,omitempty"` + Y int `json:"y,omitempty"` + }], mcp.Input( + mcp.Schema(&jsonschema.Schema{Type: "object"})), + ), + &jsonschema.Schema{ + Type: "object", + }, + }, + } + for _, test := range tests { + if diff := cmp.Diff(test.want, test.tool.Definition.InputSchema); diff != "" { + t.Errorf("MakeTool(%v) mismatch (-want +got):\n%s", test.tool.Definition.Name, diff) + } + } +} From 80e0fd81238f7cf7eb9b049d1d483b24acfb6622 Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Wed, 30 Apr 2025 01:16:23 +0000 Subject: [PATCH 261/270] internal/mcp: support prompts Add support for 'prompts/list' and 'prompts/get', and registering prompts on the server using AddPrompts. Add a 'MakePrompt' helper to construct prompts from a prompt handler using reflection. Change-Id: I479cb9c9b99313cca99640c1f5ea1939363759e6 Reviewed-on: https://go-review.googlesource.com/c/tools/+/669015 LUCI-TryBot-Result: Go LUCI Reviewed-by: Jonathan Amsterdam --- internal/mcp/client.go | 43 +++++++- internal/mcp/cmd_test.go | 2 +- internal/mcp/examples/hello/main.go | 25 ++++- internal/mcp/internal/jsonschema/infer.go | 1 + internal/mcp/internal/protocol/generate.go | 71 ++++-------- internal/mcp/internal/protocol/protocol.go | 89 ++++++++++++--- internal/mcp/internal/util/util.go | 36 ++++++ internal/mcp/mcp.go | 4 + internal/mcp/mcp_test.go | 70 ++++++++++-- internal/mcp/prompt.go | 122 +++++++++++++++++++++ internal/mcp/prompt_test.go | 54 +++++++++ internal/mcp/server.go | 56 +++++++++- internal/mcp/server_example_test.go | 2 +- internal/mcp/sse_example_test.go | 2 +- internal/mcp/sse_test.go | 2 +- internal/mcp/tool.go | 38 +++++-- internal/mcp/tool_test.go | 12 +- 17 files changed, 531 insertions(+), 98 deletions(-) create mode 100644 internal/mcp/internal/util/util.go create mode 100644 internal/mcp/prompt.go create mode 100644 internal/mcp/prompt_test.go diff --git a/internal/mcp/client.go b/internal/mcp/client.go index 6883843311e..b71585227f9 100644 --- a/internal/mcp/client.go +++ b/internal/mcp/client.go @@ -150,6 +150,33 @@ func (sc *ServerConnection) Ping(ctx context.Context) error { return call(ctx, sc.conn, "ping", nil, nil) } +// ListPrompts lists prompts that are currently available on the server. +func (sc *ServerConnection) ListPrompts(ctx context.Context) ([]protocol.Prompt, error) { + var ( + params = &protocol.ListPromptsParams{} + result protocol.ListPromptsResult + ) + if err := call(ctx, sc.conn, "prompts/list", params, &result); err != nil { + return nil, err + } + return result.Prompts, nil +} + +// GetPrompt gets a prompt from the server. +func (sc *ServerConnection) GetPrompt(ctx context.Context, name string, args map[string]string) (*protocol.GetPromptResult, error) { + var ( + params = &protocol.GetPromptParams{ + Name: name, + Arguments: args, + } + result = &protocol.GetPromptResult{} + ) + if err := call(ctx, sc.conn, "prompts/get", params, result); err != nil { + return nil, err + } + return result, nil +} + // ListTools lists tools that are currently available on the server. func (sc *ServerConnection) ListTools(ctx context.Context) ([]protocol.Tool, error) { var ( @@ -164,23 +191,27 @@ func (sc *ServerConnection) ListTools(ctx context.Context) ([]protocol.Tool, err // CallTool calls the tool with the given name and arguments. // -// TODO: make the following true: +// TODO(jba): make the following true: // If the provided arguments do not conform to the schema for the given tool, // the call fails. -func (sc *ServerConnection) CallTool(ctx context.Context, name string, args any) (_ []Content, err error) { +func (sc *ServerConnection) CallTool(ctx context.Context, name string, args map[string]any) (_ []Content, err error) { defer func() { if err != nil { err = fmt.Errorf("calling tool %q: %w", name, err) } }() - argJSON, err := json.Marshal(args) - if err != nil { - return nil, fmt.Errorf("marshaling args: %v", err) + argsJSON := make(map[string]json.RawMessage) + for name, arg := range args { + argJSON, err := json.Marshal(arg) + if err != nil { + return nil, fmt.Errorf("marshaling argument %s: %v", name, err) + } + argsJSON[name] = argJSON } var ( params = &protocol.CallToolParams{ Name: name, - Arguments: argJSON, + Arguments: argsJSON, } result protocol.CallToolResult ) diff --git a/internal/mcp/cmd_test.go b/internal/mcp/cmd_test.go index 211784619e3..01de31c0e9d 100644 --- a/internal/mcp/cmd_test.go +++ b/internal/mcp/cmd_test.go @@ -53,7 +53,7 @@ func TestCmdTransport(t *testing.T) { if err != nil { log.Fatal(err) } - got, err := serverConn.CallTool(ctx, "greet", SayHiParams{Name: "user"}) + got, err := serverConn.CallTool(ctx, "greet", map[string]any{"name": "user"}) if err != nil { log.Fatal(err) } diff --git a/internal/mcp/examples/hello/main.go b/internal/mcp/examples/hello/main.go index 1ad5b2796d8..5f4634d3a8e 100644 --- a/internal/mcp/examples/hello/main.go +++ b/internal/mcp/examples/hello/main.go @@ -6,26 +6,46 @@ package main import ( "context" + "encoding/json" "flag" "fmt" "net/http" "os" "golang.org/x/tools/internal/mcp" + "golang.org/x/tools/internal/mcp/internal/protocol" ) var httpAddr = flag.String("http", "", "if set, use SSE HTTP at this address, instead of stdin/stdout") -type SayHiParams struct { +type HiParams struct { Name string `json:"name"` } -func SayHi(ctx context.Context, cc *mcp.ClientConnection, params *SayHiParams) ([]mcp.Content, error) { +func SayHi(ctx context.Context, cc *mcp.ClientConnection, params *HiParams) ([]mcp.Content, error) { return []mcp.Content{ mcp.TextContent{Text: "Hi " + params.Name}, }, nil } +func PromptHi(ctx context.Context, cc *mcp.ClientConnection, params *HiParams) (*protocol.GetPromptResult, error) { + // (see related TODOs about cleaning up content construction) + content, err := json.Marshal(protocol.TextContent{ + Type: "text", + Text: "Say hi to " + params.Name, + }) + if err != nil { + return nil, err + } + return &protocol.GetPromptResult{ + Description: "Code review prompt", + Messages: []protocol.PromptMessage{ + // TODO: move 'Content' to the protocol package. + {Role: "user", Content: json.RawMessage(content)}, + }, + }, nil +} + func main() { flag.Parse() @@ -33,6 +53,7 @@ func main() { server.AddTools(mcp.MakeTool("greet", "say hi", SayHi, mcp.Input( mcp.Property("name", mcp.Description("the name to say hi to")), ))) + server.AddPrompts(mcp.MakePrompt("greet", "", PromptHi)) if *httpAddr != "" { handler := mcp.NewSSEHandler(func(*http.Request) *mcp.Server { diff --git a/internal/mcp/internal/jsonschema/infer.go b/internal/mcp/internal/jsonschema/infer.go index 9cadac79943..45ecadfdb63 100644 --- a/internal/mcp/internal/jsonschema/infer.go +++ b/internal/mcp/internal/jsonschema/infer.go @@ -142,6 +142,7 @@ func parseField(f reflect.StructField) (name string, required, include bool) { } name = props[0] } + // TODO: support 'omitzero' as well. required = !slices.Contains(props[1:], "omitempty") } return name, required, true diff --git a/internal/mcp/internal/protocol/generate.go b/internal/mcp/internal/protocol/generate.go index 73f3cc8d332..c5ffee9964a 100644 --- a/internal/mcp/internal/protocol/generate.go +++ b/internal/mcp/internal/protocol/generate.go @@ -13,13 +13,11 @@ package main import ( "bytes" - "cmp" "encoding/json" "flag" "fmt" "go/format" "io" - "iter" "log" "net/http" "os" @@ -28,6 +26,7 @@ import ( "strings" "golang.org/x/tools/internal/mcp/internal/jsonschema" + "golang.org/x/tools/internal/mcp/internal/util" ) var schemaFile = flag.String("schema_file", "", "if set, use this file as the persistent schema file") @@ -54,31 +53,36 @@ var declarations = config{ "CallToolRequest": { Fields: config{"Params": {Name: "CallToolParams"}}, }, - "CallToolResult": { - Name: "CallToolResult", - }, + "CallToolResult": {Name: "CallToolResult"}, "CancelledNotification": { Fields: config{"Params": {Name: "CancelledParams"}}, }, "ClientCapabilities": {Name: "ClientCapabilities"}, - "Implementation": {Name: "Implementation"}, + "GetPromptRequest": { + Fields: config{"Params": {Name: "GetPromptParams"}}, + }, + "GetPromptResult": {Name: "GetPromptResult"}, + "Implementation": {Name: "Implementation"}, "InitializeRequest": { Fields: config{"Params": {Name: "InitializeParams"}}, }, - "InitializeResult": { - Name: "InitializeResult", - }, + "InitializeResult": {Name: "InitializeResult"}, "InitializedNotification": { Fields: config{"Params": {Name: "InitializedParams"}}, }, + "ListPromptsRequest": { + Fields: config{"Params": {Name: "ListPromptsParams"}}, + }, + "ListPromptsResult": {Name: "ListPromptsResult"}, "ListToolsRequest": { Fields: config{"Params": {Name: "ListToolsParams"}}, }, - "ListToolsResult": { - Name: "ListToolsResult", - }, - "RequestId": {Substitute: "any"}, // null|number|string - "Role": {Name: "Role"}, + "ListToolsResult": {Name: "ListToolsResult"}, + "Prompt": {Name: "Prompt"}, + "PromptMessage": {Name: "PromptMessage"}, + "PromptArgument": {Name: "PromptArgument"}, + "RequestId": {Substitute: "any"}, // null|number|string + "Role": {Name: "Role"}, "ServerCapabilities": { Name: "ServerCapabilities", Fields: config{ @@ -92,9 +96,7 @@ var declarations = config{ Name: "Tool", Fields: config{"InputSchema": {Substitute: "*jsonschema.Schema"}}, }, - "ToolAnnotations": { - Name: "ToolAnnotations", - }, + "ToolAnnotations": {Name: "ToolAnnotations"}, } func main() { @@ -114,7 +116,7 @@ func main() { // writing types, we collect definitions and concatenate them later. This // also allows us to sort. named := make(map[string]*bytes.Buffer) - for name, def := range sorted(schema.Definitions) { + for name, def := range util.Sorted(schema.Definitions) { config := declarations[name] if config == nil { continue @@ -142,7 +144,7 @@ import ( `) // Write out types. - for _, b := range sorted(named) { + for _, b := range util.Sorted(named) { fmt.Fprintln(buf) fmt.Fprint(buf, b.String()) } @@ -242,8 +244,8 @@ func writeType(w io.Writer, config *typeConfig, def *jsonschema.Schema, named ma // unmarshal them into a map[string]any, or delay unmarshalling with // json.RawMessage. For now, use json.RawMessage as it defers the choice. if def.Type == "object" && canHaveAdditionalProperties(def) { - w.Write([]byte("json.RawMessage")) - return nil + w.Write([]byte("map[string]")) + return writeType(w, nil, def.AdditionalProperties, named) } if def.Type == "" { @@ -269,7 +271,7 @@ func writeType(w io.Writer, config *typeConfig, def *jsonschema.Schema, named ma case "object": fmt.Fprintf(w, "struct {\n") - for name, fieldDef := range sorted(def.Properties) { + for name, fieldDef := range util.Sorted(def.Properties) { if fieldDef.Description != "" { fmt.Fprintf(w, "%s\n", toComment(fieldDef.Description)) } @@ -385,28 +387,3 @@ func assert(cond bool, msg string) { panic(msg) } } - -// Helpers below are copied from gopls' moremaps package. - -// sorted returns an iterator over the entries of m in key order. -func sorted[M ~map[K]V, K cmp.Ordered, V any](m M) iter.Seq2[K, V] { - // TODO(adonovan): use maps.Sorted if proposal #68598 is accepted. - return func(yield func(K, V) bool) { - keys := keySlice(m) - slices.Sort(keys) - for _, k := range keys { - if !yield(k, m[k]) { - break - } - } - } -} - -// keySlice returns the keys of the map M, like slices.Collect(maps.Keys(m)). -func keySlice[M ~map[K]V, K comparable, V any](m M) []K { - r := make([]K, 0, len(m)) - for k := range m { - r = append(r, k) - } - return r -} diff --git a/internal/mcp/internal/protocol/protocol.go b/internal/mcp/internal/protocol/protocol.go index b460e950801..b851dac0808 100644 --- a/internal/mcp/internal/protocol/protocol.go +++ b/internal/mcp/internal/protocol/protocol.go @@ -29,8 +29,8 @@ type Annotations struct { } type CallToolParams struct { - Arguments json.RawMessage `json:"arguments,omitempty"` - Name string `json:"name"` + Arguments map[string]json.RawMessage `json:"arguments,omitempty"` + Name string `json:"name"` } // The server's response to a tool call. @@ -46,8 +46,8 @@ type CallToolParams struct { type CallToolResult struct { // This result property is reserved by the protocol to allow clients and servers // to attach additional metadata to their responses. - Meta json.RawMessage `json:"_meta,omitempty"` - Content []json.RawMessage `json:"content"` + Meta map[string]json.RawMessage `json:"_meta,omitempty"` + Content []json.RawMessage `json:"content"` // Whether the tool call ended in an error. // // If not set, this is assumed to be false (the call was successful). @@ -70,14 +70,31 @@ type CancelledParams struct { // additional capabilities. type ClientCapabilities struct { // Experimental, non-standard capabilities that the client supports. - Experimental json.RawMessage `json:"experimental,omitempty"` + Experimental map[string]map[string]json.RawMessage `json:"experimental,omitempty"` // Present if the client supports listing roots. Roots *struct { // Whether the client supports notifications for changes to the roots list. ListChanged bool `json:"listChanged,omitempty"` } `json:"roots,omitempty"` // Present if the client supports sampling from an LLM. - Sampling json.RawMessage `json:"sampling,omitempty"` + Sampling map[string]json.RawMessage `json:"sampling,omitempty"` +} + +type GetPromptParams struct { + // Arguments to use for templating the prompt. + Arguments map[string]string `json:"arguments,omitempty"` + // The name of the prompt or prompt template. + Name string `json:"name"` +} + +// The server's response to a prompts/get request from the client. +type GetPromptResult struct { + // This result property is reserved by the protocol to allow clients and servers + // to attach additional metadata to their responses. + Meta map[string]json.RawMessage `json:"_meta,omitempty"` + // An optional description for the prompt. + Description string `json:"description,omitempty"` + Messages []PromptMessage `json:"messages"` } // Describes the name and version of an MCP implementation. @@ -99,8 +116,8 @@ type InitializeParams struct { type InitializeResult struct { // This result property is reserved by the protocol to allow clients and servers // to attach additional metadata to their responses. - Meta json.RawMessage `json:"_meta,omitempty"` - Capabilities ServerCapabilities `json:"capabilities"` + Meta map[string]json.RawMessage `json:"_meta,omitempty"` + Capabilities ServerCapabilities `json:"capabilities"` // Instructions describing how to use the server and its features. // // This can be used by clients to improve the LLM's understanding of available @@ -114,7 +131,24 @@ type InitializeResult struct { ServerInfo Implementation `json:"serverInfo"` } -type InitializedParams json.RawMessage +type InitializedParams map[string]json.RawMessage + +type ListPromptsParams struct { + // An opaque token representing the current pagination position. If provided, + // the server should return results starting after this cursor. + Cursor string `json:"cursor,omitempty"` +} + +// The server's response to a prompts/list request from the client. +type ListPromptsResult struct { + // This result property is reserved by the protocol to allow clients and servers + // to attach additional metadata to their responses. + Meta map[string]json.RawMessage `json:"_meta,omitempty"` + // An opaque token representing the pagination position after the last returned + // result. If present, there may be more results available. + NextCursor string `json:"nextCursor,omitempty"` + Prompts []Prompt `json:"prompts"` +} type ListToolsParams struct { // An opaque token representing the current pagination position. If provided, @@ -126,19 +160,48 @@ type ListToolsParams struct { type ListToolsResult struct { // This result property is reserved by the protocol to allow clients and servers // to attach additional metadata to their responses. - Meta json.RawMessage `json:"_meta,omitempty"` + Meta map[string]json.RawMessage `json:"_meta,omitempty"` // An opaque token representing the pagination position after the last returned // result. If present, there may be more results available. NextCursor string `json:"nextCursor,omitempty"` Tools []Tool `json:"tools"` } +// A prompt or prompt template that the server offers. +type Prompt struct { + // A list of arguments to use for templating the prompt. + Arguments []PromptArgument `json:"arguments,omitempty"` + // An optional description of what this prompt provides + Description string `json:"description,omitempty"` + // The name of the prompt or prompt template. + Name string `json:"name"` +} + +// Describes an argument that a prompt can accept. +type PromptArgument struct { + // A human-readable description of the argument. + Description string `json:"description,omitempty"` + // The name of the argument. + Name string `json:"name"` + // Whether this argument must be provided. + Required bool `json:"required,omitempty"` +} + // Present if the server offers any prompt templates. type PromptCapabilities struct { // Whether this server supports notifications for changes to the prompt list. ListChanged bool `json:"listChanged,omitempty"` } +// Describes a message returned as part of a prompt. +// +// This is similar to `SamplingMessage`, but also supports the embedding of +// resources from the MCP server. +type PromptMessage struct { + Content json.RawMessage `json:"content"` + Role Role `json:"role"` +} + // Present if the server offers any resources to read. type ResourceCapabilities struct { // Whether this server supports notifications for changes to the resource list. @@ -155,11 +218,11 @@ type Role string // additional capabilities. type ServerCapabilities struct { // Present if the server supports argument autocompletion suggestions. - Completions json.RawMessage `json:"completions,omitempty"` + Completions map[string]json.RawMessage `json:"completions,omitempty"` // Experimental, non-standard capabilities that the server supports. - Experimental json.RawMessage `json:"experimental,omitempty"` + Experimental map[string]map[string]json.RawMessage `json:"experimental,omitempty"` // Present if the server supports sending log messages to the client. - Logging json.RawMessage `json:"logging,omitempty"` + Logging map[string]json.RawMessage `json:"logging,omitempty"` // Present if the server offers any prompt templates. Prompts *PromptCapabilities `json:"prompts,omitempty"` // Present if the server offers any resources to read. diff --git a/internal/mcp/internal/util/util.go b/internal/mcp/internal/util/util.go new file mode 100644 index 00000000000..cdc6038ede8 --- /dev/null +++ b/internal/mcp/internal/util/util.go @@ -0,0 +1,36 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package util + +import ( + "cmp" + "iter" + "slices" +) + +// Helpers below are copied from gopls' moremaps package. + +// sorted returns an iterator over the entries of m in key order. +func Sorted[M ~map[K]V, K cmp.Ordered, V any](m M) iter.Seq2[K, V] { + // TODO(adonovan): use maps.Sorted if proposal #68598 is accepted. + return func(yield func(K, V) bool) { + keys := KeySlice(m) + slices.Sort(keys) + for _, k := range keys { + if !yield(k, m[k]) { + break + } + } + } +} + +// keySlice returns the keys of the map M, like slices.Collect(maps.Keys(m)). +func KeySlice[M ~map[K]V, K comparable, V any](m M) []K { + r := make([]K, 0, len(m)) + for k := range m { + r = append(r, k) + } + return r +} diff --git a/internal/mcp/mcp.go b/internal/mcp/mcp.go index ccaebbd1d4b..f71521885e4 100644 --- a/internal/mcp/mcp.go +++ b/internal/mcp/mcp.go @@ -63,8 +63,12 @@ // // # TODO // +// - Support all content types. // - Support pagination. +// - Support completion. +// - Support oauth. // - Support all client/server operations. +// - Pass the client connection in the context. // - Support streamable HTTP transport. // - Support multiple versions of the spec. // - Implement full JSON schema support, with both client-side and diff --git a/internal/mcp/mcp_test.go b/internal/mcp/mcp_test.go index dc2d38cbd91..70fcbab27da 100644 --- a/internal/mcp/mcp_test.go +++ b/internal/mcp/mcp_test.go @@ -6,6 +6,7 @@ package mcp import ( "context" + "encoding/json" "errors" "fmt" "slices" @@ -41,9 +42,34 @@ func TestEndToEnd(t *testing.T) { // The 'fail' tool returns this error. failure := errors.New("mcp failure") - s.AddTools(MakeTool("fail", "just fail", func(context.Context, *ClientConnection, struct{}) ([]Content, error) { - return nil, failure - })) + s.AddTools( + MakeTool("fail", "just fail", func(context.Context, *ClientConnection, struct{}) ([]Content, error) { + return nil, failure + }), + ) + + s.AddPrompts( + MakePrompt("code_review", "do a code review", func(_ context.Context, _ *ClientConnection, params struct{ Code string }) (*protocol.GetPromptResult, error) { + // TODO(rfindley): clean up this handling of content. + content, err := json.Marshal(protocol.TextContent{ + Type: "text", + Text: "Please review the following code: " + params.Code, + }) + if err != nil { + return nil, err + } + return &protocol.GetPromptResult{ + Description: "Code review prompt", + Messages: []protocol.PromptMessage{ + // TODO: move 'Content' to the protocol package. + {Role: "user", Content: json.RawMessage(content)}, + }, + }, nil + }), + MakePrompt("fail", "", func(_ context.Context, _ *ClientConnection, params struct{}) (*protocol.GetPromptResult, error) { + return nil, failure + }), + ) // Connect the server. cc, err := s.Connect(ctx, st, nil) @@ -80,6 +106,34 @@ func TestEndToEnd(t *testing.T) { t.Fatalf("ping failed: %v", err) } + gotPrompts, err := sc.ListPrompts(ctx) + if err != nil { + t.Errorf("prompts/list failed: %v", err) + } + wantPrompts := []protocol.Prompt{ + { + Name: "code_review", + Description: "do a code review", + Arguments: []protocol.PromptArgument{{Name: "Code", Required: true}}, + }, + {Name: "fail"}, + } + if diff := cmp.Diff(wantPrompts, gotPrompts); diff != "" { + t.Fatalf("prompts/list mismatch (-want +got):\n%s", diff) + } + + gotReview, err := sc.GetPrompt(ctx, "code_review", map[string]string{"Code": "1+1"}) + if err != nil { + t.Fatal(err) + } + // TODO: assert on the full review, once content is easier to create. + if got, want := gotReview.Description, "Code review prompt"; got != want { + t.Errorf("prompts/get 'code_review': got description %q, want %q", got, want) + } + if _, err := sc.GetPrompt(ctx, "fail", map[string]string{}); err == nil || !strings.Contains(err.Error(), failure.Error()) { + t.Errorf("fail returned unexpected error: got %v, want containing %v", err, failure) + } + gotTools, err := sc.ListTools(ctx) if err != nil { t.Errorf("tools/list failed: %v", err) @@ -107,7 +161,7 @@ func TestEndToEnd(t *testing.T) { t.Fatalf("tools/list mismatch (-want +got):\n%s", diff) } - gotHi, err := sc.CallTool(ctx, "greet", hiParams{"user"}) + gotHi, err := sc.CallTool(ctx, "greet", map[string]any{"name": "user"}) if err != nil { t.Fatal(err) } @@ -116,7 +170,7 @@ func TestEndToEnd(t *testing.T) { t.Errorf("tools/call 'greet' mismatch (-want +got):\n%s", diff) } - if _, err := sc.CallTool(ctx, "fail", struct{}{}); err == nil || !strings.Contains(err.Error(), failure.Error()) { + if _, err := sc.CallTool(ctx, "fail", map[string]any{}); err == nil || !strings.Contains(err.Error(), failure.Error()) { t.Errorf("fail returned unexpected error: got %v, want containing %v", err, failure) } @@ -175,12 +229,12 @@ func TestServerClosing(t *testing.T) { } wg.Done() }() - if _, err := sc.CallTool(ctx, "greet", hiParams{"user"}); err != nil { + if _, err := sc.CallTool(ctx, "greet", map[string]any{"name": "user"}); err != nil { t.Fatalf("after connecting: %v", err) } cc.Close() wg.Wait() - if _, err := sc.CallTool(ctx, "greet", hiParams{"user"}); !errors.Is(err, ErrConnectionClosed) { + if _, err := sc.CallTool(ctx, "greet", map[string]any{"name": "user"}); !errors.Is(err, ErrConnectionClosed) { t.Errorf("after disconnection, got error %v, want EOF", err) } } @@ -244,7 +298,7 @@ func TestCancellation(t *testing.T) { defer sc.Close() ctx, cancel := context.WithCancel(context.Background()) - go sc.CallTool(ctx, "slow", struct{}{}) + go sc.CallTool(ctx, "slow", map[string]any{}) <-start cancel() select { diff --git a/internal/mcp/prompt.go b/internal/mcp/prompt.go new file mode 100644 index 00000000000..f136669d8e4 --- /dev/null +++ b/internal/mcp/prompt.go @@ -0,0 +1,122 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mcp + +import ( + "context" + "encoding/json" + "fmt" + "reflect" + "slices" + + "golang.org/x/tools/internal/mcp/internal/jsonschema" + "golang.org/x/tools/internal/mcp/internal/protocol" + "golang.org/x/tools/internal/mcp/internal/util" +) + +// A PromptHandler handles a call to prompts/get. +type PromptHandler func(context.Context, *ClientConnection, map[string]string) (*protocol.GetPromptResult, error) + +// A Prompt is a prompt definition bound to a prompt handler. +type Prompt struct { + Definition protocol.Prompt + Handler PromptHandler +} + +// MakePrompt is a helper to use reflection to create a prompt for the given +// handler. +// +// The arguments for the prompt are extracted from the request type for the +// handler. The handler request type must be a struct consisting only of fields +// of type string or *string. The argument names for the resulting prompt +// definition correspond to the JSON names of the request fields, and any +// fields that are not marked "omitempty" are considered required. +func MakePrompt[TReq any](name, description string, handler func(context.Context, *ClientConnection, TReq) (*protocol.GetPromptResult, error), opts ...PromptOption) *Prompt { + schema, err := jsonschema.For[TReq]() + if err != nil { + panic(err) + } + if schema.Type != "object" || !reflect.DeepEqual(schema.AdditionalProperties, &jsonschema.Schema{Not: &jsonschema.Schema{}}) { + panic(fmt.Sprintf("handler request type must be a struct")) + } + prompt := &Prompt{ + Definition: protocol.Prompt{ + Name: name, + Description: description, + }, + } + required := make(map[string]bool) + for _, p := range schema.Required { + required[p] = true + } + for name, prop := range util.Sorted(schema.Properties) { + if prop.Type != "string" { + panic(fmt.Sprintf("handler type must consist only of string fields")) + } + prompt.Definition.Arguments = append(prompt.Definition.Arguments, protocol.PromptArgument{ + Name: name, + Description: prop.Description, + Required: required[name], + }) + } + prompt.Handler = func(ctx context.Context, cc *ClientConnection, args map[string]string) (*protocol.GetPromptResult, error) { + // For simplicity, just marshal and unmarshal the arguments. + // This could be avoided in the future. + rawArgs, err := json.Marshal(args) + if err != nil { + return nil, err + } + var v TReq + if err := unmarshalSchema(rawArgs, schema, &v); err != nil { + return nil, err + } + return handler(ctx, cc, v) + } + for _, opt := range opts { + opt.set(prompt) + } + return prompt +} + +// A PromptOption configures the behavior of a Prompt. +type PromptOption interface { + set(*Prompt) +} + +type promptSetter func(*Prompt) + +func (s promptSetter) set(p *Prompt) { s(p) } + +// Argument configures the 'schema' of a prompt argument. +// If the argument does not exist, it is added. +// +// Since prompt arguments are not a full JSON schema, Argument only accepts +// Required and Description, and panics when encountering any other option. +func Argument(name string, opts ...SchemaOption) PromptOption { + return promptSetter(func(p *Prompt) { + i := slices.IndexFunc(p.Definition.Arguments, func(arg protocol.PromptArgument) bool { + return arg.Name == name + }) + var arg protocol.PromptArgument + if i < 0 { + i = len(p.Definition.Arguments) + arg = protocol.PromptArgument{Name: name} + p.Definition.Arguments = append(p.Definition.Arguments, arg) + } else { + arg = p.Definition.Arguments[i] + } + for _, opt := range opts { + switch v := opt.(type) { + case required: + arg.Required = bool(v) + case description: + arg.Description = string(v) + default: + panic(fmt.Sprintf("unsupported prompt argument schema option %T", opt)) + } + } + p.Definition.Arguments[i] = arg + }) +} diff --git a/internal/mcp/prompt_test.go b/internal/mcp/prompt_test.go new file mode 100644 index 00000000000..6fccc9b936f --- /dev/null +++ b/internal/mcp/prompt_test.go @@ -0,0 +1,54 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mcp_test + +import ( + "context" + "testing" + + "github.com/google/go-cmp/cmp" + "golang.org/x/tools/internal/mcp" + "golang.org/x/tools/internal/mcp/internal/protocol" +) + +// testPromptHandler is used for type inference in TestMakePrompt. +func testPromptHandler[T any](context.Context, *mcp.ClientConnection, T) (*protocol.GetPromptResult, error) { + panic("not implemented") +} + +func TestMakePrompt(t *testing.T) { + tests := []struct { + prompt *mcp.Prompt + want []protocol.PromptArgument + }{ + { + mcp.MakePrompt("empty", "", testPromptHandler[struct{}]), + nil, + }, + { + mcp.MakePrompt("add_arg", "", testPromptHandler[struct{}], mcp.Argument("x")), + []protocol.PromptArgument{{Name: "x"}}, + }, + { + mcp.MakePrompt("combo", "", testPromptHandler[struct { + Name string `json:"name"` + Country string `json:"country,omitempty"` + State string + }], + mcp.Argument("name", mcp.Description("the person's name")), + mcp.Argument("State", mcp.Required(false))), + []protocol.PromptArgument{ + {Name: "State"}, + {Name: "country"}, + {Name: "name", Required: true, Description: "the person's name"}, + }, + }, + } + for _, test := range tests { + if diff := cmp.Diff(test.want, test.prompt.Definition.Arguments); diff != "" { + t.Errorf("MakePrompt(%v) mismatch (-want +got):\n%s", test.prompt.Definition.Name, diff) + } + } +} diff --git a/internal/mcp/server.go b/internal/mcp/server.go index 527cddad66b..d549db50b71 100644 --- a/internal/mcp/server.go +++ b/internal/mcp/server.go @@ -26,6 +26,7 @@ type Server struct { opts ServerOptions mu sync.Mutex + prompts []*Prompt tools []*Tool clients []*ClientConnection } @@ -53,6 +54,15 @@ func NewServer(name, version string, opts *ServerOptions) *Server { } } +// AddPrompts adds the given prompts to the server. +// +// TODO(rfindley): notify connected clients of any changes. +func (s *Server) AddPrompts(prompts ...*Prompt) { + s.mu.Lock() + defer s.mu.Unlock() + s.prompts = append(s.prompts, prompts...) +} + // AddTools adds the given tools to the server. // // TODO(rfindley): notify connected clients of any changes. @@ -71,6 +81,33 @@ func (s *Server) Clients() iter.Seq[*ClientConnection] { return slices.Values(clients) } +func (s *Server) listPrompts(_ context.Context, _ *ClientConnection, params *protocol.ListPromptsParams) (*protocol.ListPromptsResult, error) { + s.mu.Lock() + defer s.mu.Unlock() + + res := new(protocol.ListPromptsResult) + for _, p := range s.prompts { + res.Prompts = append(res.Prompts, p.Definition) + } + return res, nil +} + +func (s *Server) getPrompt(ctx context.Context, cc *ClientConnection, params *protocol.GetPromptParams) (*protocol.GetPromptResult, error) { + s.mu.Lock() + var prompt *Prompt + if i := slices.IndexFunc(s.prompts, func(t *Prompt) bool { + return t.Definition.Name == params.Name + }); i >= 0 { + prompt = s.prompts[i] + } + s.mu.Unlock() + + if prompt == nil { + return nil, fmt.Errorf("%s: unknown prompt %q", jsonrpc2.ErrInvalidParams, params.Name) + } + return prompt.Handler(ctx, cc, params.Arguments) +} + func (s *Server) listTools(_ context.Context, _ *ClientConnection, params *protocol.ListToolsParams) (*protocol.ListToolsResult, error) { s.mu.Lock() defer s.mu.Unlock() @@ -185,6 +222,12 @@ func (cc *ClientConnection) handle(ctx context.Context, req *jsonrpc2.Request) ( // The spec says that 'ping' expects an empty object result. return struct{}{}, nil + case "prompts/list": + return dispatch(ctx, cc, req, cc.server.listPrompts) + + case "prompts/get": + return dispatch(ctx, cc, req, cc.server.getPrompt) + case "tools/list": return dispatch(ctx, cc, req, cc.server.listTools) @@ -216,8 +259,11 @@ func (cc *ClientConnection) initialize(ctx context.Context, _ *ClientConnection, // TODO(rfindley): support multiple protocol versions. ProtocolVersion: "2024-11-05", Capabilities: protocol.ServerCapabilities{ + Prompts: &protocol.PromptCapabilities{ + ListChanged: false, // not yet supported + }, Tools: &protocol.ToolCapabilities{ - ListChanged: true, + ListChanged: false, // not yet supported }, }, Instructions: cc.server.opts.Instructions, @@ -249,5 +295,11 @@ func dispatch[TConn, TParams, TResult any](ctx context.Context, conn TConn, req if err := json.Unmarshal(req.Params, ¶ms); err != nil { return nil, err } - return f(ctx, conn, params) + // Important: avoid returning a typed nil, as it can't be handled by the + // jsonrpc2 package. + res, err := f(ctx, conn, params) + if err != nil { + return nil, err + } + return res, nil } diff --git a/internal/mcp/server_example_test.go b/internal/mcp/server_example_test.go index 679cbe0c144..4e4517b78d5 100644 --- a/internal/mcp/server_example_test.go +++ b/internal/mcp/server_example_test.go @@ -40,7 +40,7 @@ func ExampleServer() { log.Fatal(err) } - content, err := serverConnection.CallTool(ctx, "greet", SayHiParams{Name: "user"}) + content, err := serverConnection.CallTool(ctx, "greet", map[string]any{"name": "user"}) if err != nil { log.Fatal(err) } diff --git a/internal/mcp/sse_example_test.go b/internal/mcp/sse_example_test.go index fb2445012f5..6a22de348e8 100644 --- a/internal/mcp/sse_example_test.go +++ b/internal/mcp/sse_example_test.go @@ -40,7 +40,7 @@ func ExampleSSEHandler() { } defer serverConn.Close() - content, err := serverConn.CallTool(ctx, "add", AddParams{1, 2}) + content, err := serverConn.CallTool(ctx, "add", map[string]any{"x": 1, "y": 2}) if err != nil { log.Fatal(err) } diff --git a/internal/mcp/sse_test.go b/internal/mcp/sse_test.go index 4e41f60e3e1..ac2e902ffb3 100644 --- a/internal/mcp/sse_test.go +++ b/internal/mcp/sse_test.go @@ -44,7 +44,7 @@ func TestSSEServer(t *testing.T) { t.Fatal(err) } cc := <-clients - gotHi, err := sc.CallTool(ctx, "greet", hiParams{"user"}) + gotHi, err := sc.CallTool(ctx, "greet", map[string]any{"name": "user"}) if err != nil { t.Fatal(err) } diff --git a/internal/mcp/tool.go b/internal/mcp/tool.go index 491e727a12b..491f53f30e8 100644 --- a/internal/mcp/tool.go +++ b/internal/mcp/tool.go @@ -14,7 +14,7 @@ import ( ) // A ToolHandler handles a call to tools/call. -type ToolHandler func(context.Context, *ClientConnection, json.RawMessage) (*protocol.CallToolResult, error) +type ToolHandler func(context.Context, *ClientConnection, map[string]json.RawMessage) (*protocol.CallToolResult, error) // A Tool is a tool definition that is bound to a tool handler. type Tool struct { @@ -30,20 +30,30 @@ type Tool struct { // handler, and used to unmmarshal and validate requests to the handler. This // schema may be customized using the [Input] option. // -// It is the caller's responsibility that the handler request type can produce -// a valid schema, as documented by [jsonschema.ForType]; otherwise, MakeTool -// panics. +// The handler request type must translate to a valid schema, as documented by +// [jsonschema.ForType]; otherwise, MakeTool panics. +// +// TODO: just have the handler return a CallToolResult: returning []Content is +// going to be inconsistent with other server features. func MakeTool[TReq any](name, description string, handler func(context.Context, *ClientConnection, TReq) ([]Content, error), opts ...ToolOption) *Tool { schema, err := jsonschema.For[TReq]() if err != nil { panic(err) } - wrapped := func(ctx context.Context, cc *ClientConnection, args json.RawMessage) (*protocol.CallToolResult, error) { + wrapped := func(ctx context.Context, cc *ClientConnection, args map[string]json.RawMessage) (*protocol.CallToolResult, error) { + // For simplicity, just marshal and unmarshal the arguments. + // This could be avoided in the future. + rawArgs, err := json.Marshal(args) + if err != nil { + return nil, err + } var v TReq - if err := unmarshalSchema(args, schema, &v); err != nil { + if err := unmarshalSchema(rawArgs, schema, &v); err != nil { return nil, err } content, err := handler(ctx, cc, v) + // TODO: investigate why server errors are embedded in this strange way, + // rather than returned as jsonrpc2 server errors. if err != nil { return &protocol.CallToolResult{ Content: marshalContent([]Content{TextContent{Text: err.Error()}}), @@ -140,6 +150,8 @@ func Required(v bool) SchemaOption { return required(v) } +// required must be a distinguished type as it needs special handling to mutate +// the parent schema, and to mutate prompt arguments. type required bool func (required) set(s *jsonschema.Schema) { @@ -154,10 +166,16 @@ func Enum(values ...any) SchemaOption { } // Description sets the provided schema description. -func Description(description string) SchemaOption { - return schemaSetter(func(schema *jsonschema.Schema) { - schema.Description = description - }) +func Description(desc string) SchemaOption { + return description(desc) +} + +// description must be a distinguished type so that it can be handled by prompt +// options. +type description string + +func (d description) set(s *jsonschema.Schema) { + s.Description = string(d) } // Schema overrides the inferred schema with a shallow copy of the given diff --git a/internal/mcp/tool_test.go b/internal/mcp/tool_test.go index 3d8201a89ad..2891213c906 100644 --- a/internal/mcp/tool_test.go +++ b/internal/mcp/tool_test.go @@ -13,8 +13,8 @@ import ( "golang.org/x/tools/internal/mcp/internal/jsonschema" ) -// testHandler is used for type inference in TestMakeTool. -func testHandler[T any](context.Context, *mcp.ClientConnection, T) ([]mcp.Content, error) { +// testToolHandler is used for type inference in TestMakeTool. +func testToolHandler[T any](context.Context, *mcp.ClientConnection, T) ([]mcp.Content, error) { panic("not implemented") } @@ -24,7 +24,7 @@ func TestMakeTool(t *testing.T) { want *jsonschema.Schema }{ { - mcp.MakeTool("basic", "", testHandler[struct { + mcp.MakeTool("basic", "", testToolHandler[struct { Name string `json:"name"` }]), &jsonschema.Schema{ @@ -37,7 +37,7 @@ func TestMakeTool(t *testing.T) { }, }, { - mcp.MakeTool("enum", "", testHandler[struct{ Name string }], mcp.Input( + mcp.MakeTool("enum", "", testToolHandler[struct{ Name string }], mcp.Input( mcp.Property("Name", mcp.Enum("x", "y", "z")), )), &jsonschema.Schema{ @@ -50,7 +50,7 @@ func TestMakeTool(t *testing.T) { }, }, { - mcp.MakeTool("required", "", testHandler[struct { + mcp.MakeTool("required", "", testToolHandler[struct { Name string `json:"name"` Language string `json:"language"` X int `json:"x,omitempty"` @@ -70,7 +70,7 @@ func TestMakeTool(t *testing.T) { }, }, { - mcp.MakeTool("set_schema", "", testHandler[struct { + mcp.MakeTool("set_schema", "", testToolHandler[struct { X int `json:"x,omitempty"` Y int `json:"y,omitempty"` }], mcp.Input( From efd15d847c477923faddc112714c8f410452c685 Mon Sep 17 00:00:00 2001 From: Rob Findley Date: Wed, 30 Apr 2025 14:49:52 +0000 Subject: [PATCH 262/270] internal/mcp: clean up handling of content Add support for all content types, and formalize the conversion of content to and from the wire format (as a discriminated union). Change-Id: I93678ce98c02176a524d2c82425fb1267ad68bf0 Reviewed-on: https://go-review.googlesource.com/c/tools/+/669135 LUCI-TryBot-Result: Go LUCI Reviewed-by: Jonathan Amsterdam Auto-Submit: Robert Findley --- internal/mcp/client.go | 15 +-- internal/mcp/cmd_test.go | 5 +- internal/mcp/content.go | 140 +++++++++++++++------ internal/mcp/content_test.go | 75 +++++++++++ internal/mcp/examples/hello/main.go | 12 +- internal/mcp/internal/protocol/content.go | 42 +++++++ internal/mcp/internal/protocol/generate.go | 12 +- internal/mcp/internal/protocol/protocol.go | 15 +-- internal/mcp/internal/util/util.go | 9 ++ internal/mcp/mcp_test.go | 43 ++++--- internal/mcp/server_example_test.go | 4 +- internal/mcp/sse_example_test.go | 4 +- internal/mcp/sse_test.go | 5 +- internal/mcp/tool.go | 5 +- 14 files changed, 282 insertions(+), 104 deletions(-) create mode 100644 internal/mcp/content_test.go create mode 100644 internal/mcp/internal/protocol/content.go diff --git a/internal/mcp/client.go b/internal/mcp/client.go index b71585227f9..351f8b4cc5e 100644 --- a/internal/mcp/client.go +++ b/internal/mcp/client.go @@ -7,7 +7,6 @@ package mcp import ( "context" "encoding/json" - "errors" "fmt" "iter" "slices" @@ -194,7 +193,7 @@ func (sc *ServerConnection) ListTools(ctx context.Context) ([]protocol.Tool, err // TODO(jba): make the following true: // If the provided arguments do not conform to the schema for the given tool, // the call fails. -func (sc *ServerConnection) CallTool(ctx context.Context, name string, args map[string]any) (_ []Content, err error) { +func (sc *ServerConnection) CallTool(ctx context.Context, name string, args map[string]any) (_ *protocol.CallToolResult, err error) { defer func() { if err != nil { err = fmt.Errorf("calling tool %q: %w", name, err) @@ -218,15 +217,5 @@ func (sc *ServerConnection) CallTool(ctx context.Context, name string, args map[ if err := call(ctx, sc.conn, "tools/call", params, &result); err != nil { return nil, err } - content, err := unmarshalContent(result.Content) - if err != nil { - return nil, fmt.Errorf("unmarshaling tool content: %v", err) - } - if result.IsError { - if len(content) != 1 || !is[TextContent](content[0]) { - return nil, errors.New("malformed error content") - } - return nil, errors.New(content[0].(TextContent).Text) - } - return content, nil + return &result, nil } diff --git a/internal/mcp/cmd_test.go b/internal/mcp/cmd_test.go index 01de31c0e9d..0cf3fa36301 100644 --- a/internal/mcp/cmd_test.go +++ b/internal/mcp/cmd_test.go @@ -13,6 +13,7 @@ import ( "github.com/google/go-cmp/cmp" "golang.org/x/tools/internal/mcp" + "golang.org/x/tools/internal/mcp/internal/protocol" ) const runAsServer = "_MCP_RUN_AS_SERVER" @@ -57,7 +58,9 @@ func TestCmdTransport(t *testing.T) { if err != nil { log.Fatal(err) } - want := []mcp.Content{mcp.TextContent{Text: "Hi user"}} + want := &protocol.CallToolResult{ + Content: []protocol.Content{{Type: "text", Text: "Hi user"}}, + } if diff := cmp.Diff(want, got); diff != "" { t.Errorf("greet returned unexpected content (-want +got):\n%s", diff) } diff --git a/internal/mcp/content.go b/internal/mcp/content.go index 5f13e4834c3..f0e20136fbc 100644 --- a/internal/mcp/content.go +++ b/internal/mcp/content.go @@ -5,60 +5,120 @@ package mcp import ( - "encoding/json" "fmt" "golang.org/x/tools/internal/mcp/internal/protocol" ) -// Content is the abstract result of a Tool call. +// Content is the union of supported content types: [TextContent], +// [ImageContent], [AudioContent], and [ResourceContent]. // -// TODO: support all content types. +// ToWire converts content to its jsonrpc2 wire format. type Content interface { - toProtocol() any + ToWire() protocol.Content } -func marshalContent(content []Content) []json.RawMessage { - var msgs []json.RawMessage - for _, c := range content { - msg, err := json.Marshal(c.toProtocol()) - if err != nil { - panic(fmt.Sprintf("marshaling content: %v", err)) - } - msgs = append(msgs, msg) - } - return msgs +// TextContent is a textual content. +type TextContent struct { + Text string } -func unmarshalContent(msgs []json.RawMessage) ([]Content, error) { - var content []Content - for _, msg := range msgs { - var allContent struct { - Type string `json:"type"` - Text json.RawMessage - } - if err := json.Unmarshal(msg, &allContent); err != nil { - return nil, fmt.Errorf("content missing \"type\"") - } - switch allContent.Type { - case "text": - var text string - if err := json.Unmarshal(allContent.Text, &text); err != nil { - return nil, fmt.Errorf("unmarshalling text content: %v", err) - } - content = append(content, TextContent{Text: text}) - default: - return nil, fmt.Errorf("unsupported content type %q", allContent.Type) - } +func (c TextContent) ToWire() protocol.Content { + return protocol.Content{Type: "text", Text: c.Text} +} + +// ImageContent contains base64-encoded image data. +type ImageContent struct { + Data string + MimeType string +} + +func (c ImageContent) ToWire() protocol.Content { + return protocol.Content{Type: "image", MIMEType: c.MimeType, Data: c.Data} +} + +// AudioContent contains base64-encoded audio data. +type AudioContent struct { + Data string + MimeType string +} + +func (c AudioContent) ToWire() protocol.Content { + return protocol.Content{Type: "audio", MIMEType: c.MimeType, Data: c.Data} +} + +// ResourceContent contains embedded resources. +type ResourceContent struct { + Resource Resource +} + +func (r ResourceContent) ToWire() protocol.Content { + res := r.Resource.ToWire() + return protocol.Content{Type: "resource", Resource: &res} +} + +type Resource interface { + ToWire() protocol.Resource +} + +type TextResource struct { + URI string + MimeType string + Text string +} + +func (r TextResource) ToWire() protocol.Resource { + return protocol.Resource{ + URI: r.URI, + MIMEType: r.MimeType, + Text: r.Text, } - return content, nil } -// TextContent is a textual content. -type TextContent struct { - Text string +type BlobResource struct { + URI string + MimeType string + Blob string } -func (c TextContent) toProtocol() any { - return protocol.TextContent{Type: "text", Text: c.Text} +func (r BlobResource) ToWire() protocol.Resource { + blob := r.Blob + return protocol.Resource{ + URI: r.URI, + MIMEType: r.MimeType, + Blob: &blob, + } +} + +// ContentFromWireContent converts content from the jsonrpc2 wire format to a +// typed Content value. +func ContentFromWireContent(c protocol.Content) Content { + switch c.Type { + case "text": + return TextContent{Text: c.Text} + case "image": + return ImageContent{Data: c.Data, MimeType: c.MIMEType} + case "audio": + return AudioContent{Data: c.Data, MimeType: c.MIMEType} + case "resource": + r := ResourceContent{} + if c.Resource != nil { + if c.Resource.Blob != nil { + r.Resource = BlobResource{ + URI: c.Resource.URI, + MimeType: c.Resource.MIMEType, + Blob: *c.Resource.Blob, + } + } else { + r.Resource = TextResource{ + URI: c.Resource.URI, + MimeType: c.Resource.MIMEType, + Text: c.Resource.Text, + } + } + } + return r + default: + panic(fmt.Sprintf("unrecognized wire content type %q", c.Type)) + } } diff --git a/internal/mcp/content_test.go b/internal/mcp/content_test.go new file mode 100644 index 00000000000..f48f51e7689 --- /dev/null +++ b/internal/mcp/content_test.go @@ -0,0 +1,75 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mcp_test + +import ( + "testing" + + "github.com/google/go-cmp/cmp" + "golang.org/x/tools/internal/mcp" + "golang.org/x/tools/internal/mcp/internal/protocol" +) + +func TestContent(t *testing.T) { + tests := []struct { + in mcp.Content + want protocol.Content + }{ + {mcp.TextContent{Text: "hello"}, protocol.Content{Type: "text", Text: "hello"}}, + { + mcp.ImageContent{Data: "a1b2c3", MimeType: "image/png"}, + protocol.Content{Type: "image", Data: "a1b2c3", MIMEType: "image/png"}, + }, + { + mcp.AudioContent{Data: "a1b2c3", MimeType: "audio/wav"}, + protocol.Content{Type: "audio", Data: "a1b2c3", MIMEType: "audio/wav"}, + }, + { + mcp.ResourceContent{ + Resource: mcp.TextResource{ + URI: "file://foo", + MimeType: "text", + Text: "abc", + }, + }, + protocol.Content{ + Type: "resource", + Resource: &protocol.Resource{ + URI: "file://foo", + MIMEType: "text", + Text: "abc", + }, + }, + }, + { + mcp.ResourceContent{ + Resource: mcp.BlobResource{ + URI: "file://foo", + MimeType: "text", + Blob: "a1b2c3", + }, + }, + protocol.Content{ + Type: "resource", + Resource: &protocol.Resource{ + URI: "file://foo", + MIMEType: "text", + Blob: ptr("a1b2c3"), + }, + }, + }, + } + + for _, test := range tests { + got := test.in.ToWire() + if diff := cmp.Diff(test.want, got); diff != "" { + t.Errorf("ToWire mismatch (-want +got):\n%s", diff) + } + } +} + +func ptr[T any](t T) *T { + return &t +} diff --git a/internal/mcp/examples/hello/main.go b/internal/mcp/examples/hello/main.go index 5f4634d3a8e..3f80254fd33 100644 --- a/internal/mcp/examples/hello/main.go +++ b/internal/mcp/examples/hello/main.go @@ -6,7 +6,6 @@ package main import ( "context" - "encoding/json" "flag" "fmt" "net/http" @@ -29,19 +28,10 @@ func SayHi(ctx context.Context, cc *mcp.ClientConnection, params *HiParams) ([]m } func PromptHi(ctx context.Context, cc *mcp.ClientConnection, params *HiParams) (*protocol.GetPromptResult, error) { - // (see related TODOs about cleaning up content construction) - content, err := json.Marshal(protocol.TextContent{ - Type: "text", - Text: "Say hi to " + params.Name, - }) - if err != nil { - return nil, err - } return &protocol.GetPromptResult{ Description: "Code review prompt", Messages: []protocol.PromptMessage{ - // TODO: move 'Content' to the protocol package. - {Role: "user", Content: json.RawMessage(content)}, + {Role: "user", Content: mcp.TextContent{Text: "Say hi to " + params.Name}.ToWire()}, }, }, nil } diff --git a/internal/mcp/internal/protocol/content.go b/internal/mcp/internal/protocol/content.go new file mode 100644 index 00000000000..5374b62488b --- /dev/null +++ b/internal/mcp/internal/protocol/content.go @@ -0,0 +1,42 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package protocol + +import ( + "encoding/json" + "fmt" +) + +// Content is the wire format for content, including all fields. +type Content struct { + Type string `json:"type"` + Text string `json:"text,omitempty"` + MIMEType string `json:"mimeType,omitempty"` + Data string `json:"data,omitempty"` + Resource *Resource `json:"resource,omitempty"` +} + +// Resource is the wire format for embedded resources, including all fields. +type Resource struct { + URI string `json:"uri,"` + MIMEType string `json:"mimeType,omitempty"` + Text string `json:"text"` + Blob *string `json:"blob"` // blob is a pointer to distinguish empty from missing data +} + +func (c *Content) UnmarshalJSON(data []byte) error { + type wireContent Content // for naive unmarshaling + var c2 wireContent + if err := json.Unmarshal(data, &c2); err != nil { + return err + } + switch c2.Type { + case "text", "image", "audio", "resource": + default: + return fmt.Errorf("unrecognized content type %s", c.Type) + } + *c = Content(c2) + return nil +} diff --git a/internal/mcp/internal/protocol/generate.go b/internal/mcp/internal/protocol/generate.go index c5ffee9964a..e8c92bc802e 100644 --- a/internal/mcp/internal/protocol/generate.go +++ b/internal/mcp/internal/protocol/generate.go @@ -91,7 +91,6 @@ var declarations = config{ "Tools": {Name: "ToolCapabilities"}, }, }, - "TextContent": {Name: "TextContent"}, "Tool": { Name: "Tool", Fields: config{"InputSchema": {Substitute: "*jsonschema.Schema"}}, @@ -249,8 +248,15 @@ func writeType(w io.Writer, config *typeConfig, def *jsonschema.Schema, named ma } if def.Type == "" { - // E.g. union types. - fmt.Fprintf(w, "json.RawMessage") + // special case: recognize Content + if slices.ContainsFunc(def.AnyOf, func(s *jsonschema.Schema) bool { + return s.Ref == "#/definitions/TextContent" + }) { + fmt.Fprintf(w, "Content") + } else { + // E.g. union types. + fmt.Fprintf(w, "json.RawMessage") + } } else { switch def.Type { case "array": diff --git a/internal/mcp/internal/protocol/protocol.go b/internal/mcp/internal/protocol/protocol.go index b851dac0808..bd02bf07b74 100644 --- a/internal/mcp/internal/protocol/protocol.go +++ b/internal/mcp/internal/protocol/protocol.go @@ -47,7 +47,7 @@ type CallToolResult struct { // This result property is reserved by the protocol to allow clients and servers // to attach additional metadata to their responses. Meta map[string]json.RawMessage `json:"_meta,omitempty"` - Content []json.RawMessage `json:"content"` + Content []Content `json:"content"` // Whether the tool call ended in an error. // // If not set, this is assumed to be false (the call was successful). @@ -198,8 +198,8 @@ type PromptCapabilities struct { // This is similar to `SamplingMessage`, but also supports the embedding of // resources from the MCP server. type PromptMessage struct { - Content json.RawMessage `json:"content"` - Role Role `json:"role"` + Content Content `json:"content"` + Role Role `json:"role"` } // Present if the server offers any resources to read. @@ -231,15 +231,6 @@ type ServerCapabilities struct { Tools *ToolCapabilities `json:"tools,omitempty"` } -// Text provided to or from an LLM. -type TextContent struct { - // Optional annotations for the client. - Annotations *Annotations `json:"annotations,omitempty"` - // The text content of the message. - Text string `json:"text"` - Type string `json:"type"` -} - // Definition for a tool the client can call. type Tool struct { // Optional additional tool information. diff --git a/internal/mcp/internal/util/util.go b/internal/mcp/internal/util/util.go index cdc6038ede8..c62a6f7e0af 100644 --- a/internal/mcp/internal/util/util.go +++ b/internal/mcp/internal/util/util.go @@ -10,6 +10,15 @@ import ( "slices" ) +// Apply returns a new slice resulting from applying f to each element of x. +func Apply[S ~[]E, E, F any](x S, f func(E) F) []F { + y := make([]F, len(x)) + for i, e := range x { + y[i] = f(e) + } + return y +} + // Helpers below are copied from gopls' moremaps package. // sorted returns an iterator over the entries of m in key order. diff --git a/internal/mcp/mcp_test.go b/internal/mcp/mcp_test.go index 70fcbab27da..a455f39c835 100644 --- a/internal/mcp/mcp_test.go +++ b/internal/mcp/mcp_test.go @@ -6,7 +6,6 @@ package mcp import ( "context" - "encoding/json" "errors" "fmt" "slices" @@ -50,19 +49,10 @@ func TestEndToEnd(t *testing.T) { s.AddPrompts( MakePrompt("code_review", "do a code review", func(_ context.Context, _ *ClientConnection, params struct{ Code string }) (*protocol.GetPromptResult, error) { - // TODO(rfindley): clean up this handling of content. - content, err := json.Marshal(protocol.TextContent{ - Type: "text", - Text: "Please review the following code: " + params.Code, - }) - if err != nil { - return nil, err - } return &protocol.GetPromptResult{ Description: "Code review prompt", Messages: []protocol.PromptMessage{ - // TODO: move 'Content' to the protocol package. - {Role: "user", Content: json.RawMessage(content)}, + {Role: "user", Content: TextContent{Text: "Please review the following code: " + params.Code}.ToWire()}, }, }, nil }), @@ -126,10 +116,17 @@ func TestEndToEnd(t *testing.T) { if err != nil { t.Fatal(err) } - // TODO: assert on the full review, once content is easier to create. - if got, want := gotReview.Description, "Code review prompt"; got != want { - t.Errorf("prompts/get 'code_review': got description %q, want %q", got, want) + wantReview := &protocol.GetPromptResult{ + Description: "Code review prompt", + Messages: []protocol.PromptMessage{{ + Content: TextContent{Text: "Please review the following code: 1+1"}.ToWire(), + Role: "user", + }}, + } + if diff := cmp.Diff(wantReview, gotReview); diff != "" { + t.Errorf("prompts/get 'code_review' mismatch (-want +got):\n%s", diff) } + if _, err := sc.GetPrompt(ctx, "fail", map[string]string{}); err == nil || !strings.Contains(err.Error(), failure.Error()) { t.Errorf("fail returned unexpected error: got %v, want containing %v", err, failure) } @@ -165,13 +162,25 @@ func TestEndToEnd(t *testing.T) { if err != nil { t.Fatal(err) } - wantHi := []Content{TextContent{Text: "hi user"}} + wantHi := &protocol.CallToolResult{ + Content: []protocol.Content{{Type: "text", Text: "hi user"}}, + } if diff := cmp.Diff(wantHi, gotHi); diff != "" { t.Errorf("tools/call 'greet' mismatch (-want +got):\n%s", diff) } - if _, err := sc.CallTool(ctx, "fail", map[string]any{}); err == nil || !strings.Contains(err.Error(), failure.Error()) { - t.Errorf("fail returned unexpected error: got %v, want containing %v", err, failure) + gotFail, err := sc.CallTool(ctx, "fail", map[string]any{}) + // Counter-intuitively, when a tool fails, we don't expect an RPC error for + // call tool: instead, the failure is embedded in the result. + if err != nil { + t.Fatal(err) + } + wantFail := &protocol.CallToolResult{ + IsError: true, + Content: []protocol.Content{{Type: "text", Text: failure.Error()}}, + } + if diff := cmp.Diff(wantFail, gotFail); diff != "" { + t.Errorf("tools/call 'fail' mismatch (-want +got):\n%s", diff) } // Disconnect. diff --git a/internal/mcp/server_example_test.go b/internal/mcp/server_example_test.go index 4e4517b78d5..a386903d604 100644 --- a/internal/mcp/server_example_test.go +++ b/internal/mcp/server_example_test.go @@ -40,11 +40,11 @@ func ExampleServer() { log.Fatal(err) } - content, err := serverConnection.CallTool(ctx, "greet", map[string]any{"name": "user"}) + res, err := serverConnection.CallTool(ctx, "greet", map[string]any{"name": "user"}) if err != nil { log.Fatal(err) } - fmt.Println(content[0].(mcp.TextContent).Text) + fmt.Println(res.Content[0].Text) serverConnection.Close() clientConnection.Wait() diff --git a/internal/mcp/sse_example_test.go b/internal/mcp/sse_example_test.go index 6a22de348e8..ad88a22e073 100644 --- a/internal/mcp/sse_example_test.go +++ b/internal/mcp/sse_example_test.go @@ -40,11 +40,11 @@ func ExampleSSEHandler() { } defer serverConn.Close() - content, err := serverConn.CallTool(ctx, "add", map[string]any{"x": 1, "y": 2}) + res, err := serverConn.CallTool(ctx, "add", map[string]any{"x": 1, "y": 2}) if err != nil { log.Fatal(err) } - fmt.Println(content[0].(mcp.TextContent).Text) + fmt.Println(res.Content[0].Text) // Output: 3 } diff --git a/internal/mcp/sse_test.go b/internal/mcp/sse_test.go index ac2e902ffb3..920d746d3ad 100644 --- a/internal/mcp/sse_test.go +++ b/internal/mcp/sse_test.go @@ -12,6 +12,7 @@ import ( "testing" "github.com/google/go-cmp/cmp" + "golang.org/x/tools/internal/mcp/internal/protocol" ) func TestSSEServer(t *testing.T) { @@ -48,7 +49,9 @@ func TestSSEServer(t *testing.T) { if err != nil { t.Fatal(err) } - wantHi := []Content{TextContent{Text: "hi user"}} + wantHi := &protocol.CallToolResult{ + Content: []protocol.Content{{Type: "text", Text: "hi user"}}, + } if diff := cmp.Diff(wantHi, gotHi); diff != "" { t.Errorf("tools/call 'greet' mismatch (-want +got):\n%s", diff) } diff --git a/internal/mcp/tool.go b/internal/mcp/tool.go index 491f53f30e8..f10c0286db4 100644 --- a/internal/mcp/tool.go +++ b/internal/mcp/tool.go @@ -11,6 +11,7 @@ import ( "golang.org/x/tools/internal/mcp/internal/jsonschema" "golang.org/x/tools/internal/mcp/internal/protocol" + "golang.org/x/tools/internal/mcp/internal/util" ) // A ToolHandler handles a call to tools/call. @@ -56,12 +57,12 @@ func MakeTool[TReq any](name, description string, handler func(context.Context, // rather than returned as jsonrpc2 server errors. if err != nil { return &protocol.CallToolResult{ - Content: marshalContent([]Content{TextContent{Text: err.Error()}}), + Content: []protocol.Content{TextContent{Text: err.Error()}.ToWire()}, IsError: true, }, nil } res := &protocol.CallToolResult{ - Content: marshalContent(content), + Content: util.Apply(content, Content.ToWire), } return res, nil } From c75f7e8c3155d18474d59a91b2c99d29325d75d1 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Fri, 25 Apr 2025 11:13:31 -0400 Subject: [PATCH 263/270] jsonschema: validate object keywords Add validation for JSON objects (Go maps). The map keys are called "properties." As with arrays, annotations are needed to support the unevaluatedProperties keyword. Change-Id: Ia01f74300f4bcdbce94065572d83c46dfaa53ae7 Reviewed-on: https://go-review.googlesource.com/c/tools/+/668256 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- .../mcp/internal/jsonschema/annotations.go | 4 + .../draft2020-12/additionalProperties.json | 219 +++ .../testdata/draft2020-12/boolean_schema.json | 104 ++ .../draft2020-12/dependentRequired.json | 152 ++ .../draft2020-12/dependentSchemas.json | 171 ++ .../draft2020-12/infinite-loop-detection.json | 37 + .../testdata/draft2020-12/maxProperties.json | 79 + .../testdata/draft2020-12/minProperties.json | 60 + .../draft2020-12/patternProperties.json | 176 ++ .../testdata/draft2020-12/properties.json | 242 +++ .../testdata/draft2020-12/propertyNames.json | 168 ++ .../testdata/draft2020-12/required.json | 158 ++ .../draft2020-12/unevaluatedProperties.json | 1601 +++++++++++++++++ internal/mcp/internal/jsonschema/validate.go | 140 +- .../mcp/internal/jsonschema/validate_test.go | 2 +- 15 files changed, 3310 insertions(+), 3 deletions(-) create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/additionalProperties.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/boolean_schema.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/dependentRequired.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/dependentSchemas.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/infinite-loop-detection.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/maxProperties.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/minProperties.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/patternProperties.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/properties.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/propertyNames.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/required.json create mode 100644 internal/mcp/internal/jsonschema/testdata/draft2020-12/unevaluatedProperties.json diff --git a/internal/mcp/internal/jsonschema/annotations.go b/internal/mcp/internal/jsonschema/annotations.go index 7b0932de8d4..1b6c2a57580 100644 --- a/internal/mcp/internal/jsonschema/annotations.go +++ b/internal/mcp/internal/jsonschema/annotations.go @@ -14,6 +14,7 @@ type annotations struct { allItems bool // all items were evaluated endIndex int // 1+largest index evaluated by prefixItems evaluatedIndexes map[int]bool // set of indexes evaluated by contains + allProperties bool // all properties were evaluated evaluatedProperties map[string]bool // set of properties evaluated by various keywords } @@ -58,6 +59,9 @@ func (a *annotations) merge(b *annotations) { a.endIndex = b.endIndex } a.evaluatedIndexes = merge(a.evaluatedIndexes, b.evaluatedIndexes) + if b.allProperties { + a.allProperties = true + } a.evaluatedProperties = merge(a.evaluatedProperties, b.evaluatedProperties) } diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/additionalProperties.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/additionalProperties.json new file mode 100644 index 00000000000..9618575e208 --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/additionalProperties.json @@ -0,0 +1,219 @@ +[ + { + "description": + "additionalProperties being false does not allow other properties", + "specification": [ { "core":"10.3.2.3", "quote": "The value of \"additionalProperties\" MUST be a valid JSON Schema. Boolean \"false\" forbids everything." } ], + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "properties": {"foo": {}, "bar": {}}, + "patternProperties": { "^v": {} }, + "additionalProperties": false + }, + "tests": [ + { + "description": "no additional properties is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "an additional property is invalid", + "data": {"foo" : 1, "bar" : 2, "quux" : "boom"}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [1, 2, 3], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobarbaz", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + }, + { + "description": "patternProperties are not additional properties", + "data": {"foo":1, "vroom": 2}, + "valid": true + } + ] + }, + { + "description": "non-ASCII pattern with additionalProperties", + "specification": [ { "core":"10.3.2.3"} ], + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "patternProperties": {"^á": {}}, + "additionalProperties": false + }, + "tests": [ + { + "description": "matching the pattern is valid", + "data": {"ármányos": 2}, + "valid": true + }, + { + "description": "not matching the pattern is invalid", + "data": {"élmény": 2}, + "valid": false + } + ] + }, + { + "description": "additionalProperties with schema", + "specification": [ { "core":"10.3.2.3", "quote": "The value of \"additionalProperties\" MUST be a valid JSON Schema." } ], + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "properties": {"foo": {}, "bar": {}}, + "additionalProperties": {"type": "boolean"} + }, + "tests": [ + { + "description": "no additional properties is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "an additional valid property is valid", + "data": {"foo" : 1, "bar" : 2, "quux" : true}, + "valid": true + }, + { + "description": "an additional invalid property is invalid", + "data": {"foo" : 1, "bar" : 2, "quux" : 12}, + "valid": false + } + ] + }, + { + "description": "additionalProperties can exist by itself", + "specification": [ { "core":"10.3.2.3", "quote": "With no other applicator applying to object instances. This validates all the instance values irrespective of their property names" } ], + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "additionalProperties": {"type": "boolean"} + }, + "tests": [ + { + "description": "an additional valid property is valid", + "data": {"foo" : true}, + "valid": true + }, + { + "description": "an additional invalid property is invalid", + "data": {"foo" : 1}, + "valid": false + } + ] + }, + { + "description": "additionalProperties are allowed by default", + "specification": [ { "core":"10.3.2.3", "quote": "Omitting this keyword has the same assertion behavior as an empty schema." } ], + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "properties": {"foo": {}, "bar": {}} + }, + "tests": [ + { + "description": "additional properties are allowed", + "data": {"foo": 1, "bar": 2, "quux": true}, + "valid": true + } + ] + }, + { + "description": "additionalProperties does not look in applicators", + "specification":[ { "core": "10.2", "quote": "Subschemas of applicator keywords evaluate the instance completely independently such that the results of one such subschema MUST NOT impact the results of sibling subschemas." } ], + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "allOf": [ + {"properties": {"foo": {}}} + ], + "additionalProperties": {"type": "boolean"} + }, + "tests": [ + { + "description": "properties defined in allOf are not examined", + "data": {"foo": 1, "bar": true}, + "valid": false + } + ] + }, + { + "description": "additionalProperties with null valued instance properties", + "specification": [ { "core":"10.3.2.3" } ], + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "additionalProperties": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null values", + "data": {"foo": null}, + "valid": true + } + ] + }, + { + "description": "additionalProperties with propertyNames", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "propertyNames": { + "maxLength": 5 + }, + "additionalProperties": { + "type": "number" + } + }, + "tests": [ + { + "description": "Valid against both keywords", + "data": { "apple": 4 }, + "valid": true + }, + { + "description": "Valid against propertyNames, but not additionalProperties", + "data": { "fig": 2, "pear": "available" }, + "valid": false + } + ] + }, + { + "description": "dependentSchemas with additionalProperties", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "properties": {"foo2": {}}, + "dependentSchemas": { + "foo" : {}, + "foo2": { + "properties": { + "bar": {} + } + } + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "additionalProperties doesn't consider dependentSchemas", + "data": {"foo": ""}, + "valid": false + }, + { + "description": "additionalProperties can't see bar", + "data": {"bar": ""}, + "valid": false + }, + { + "description": "additionalProperties can't see bar even when foo2 is present", + "data": {"foo2": "", "bar": ""}, + "valid": false + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/boolean_schema.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/boolean_schema.json new file mode 100644 index 00000000000..6d40f23f262 --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/boolean_schema.json @@ -0,0 +1,104 @@ +[ + { + "description": "boolean schema 'true'", + "schema": true, + "tests": [ + { + "description": "number is valid", + "data": 1, + "valid": true + }, + { + "description": "string is valid", + "data": "foo", + "valid": true + }, + { + "description": "boolean true is valid", + "data": true, + "valid": true + }, + { + "description": "boolean false is valid", + "data": false, + "valid": true + }, + { + "description": "null is valid", + "data": null, + "valid": true + }, + { + "description": "object is valid", + "data": {"foo": "bar"}, + "valid": true + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + }, + { + "description": "array is valid", + "data": ["foo"], + "valid": true + }, + { + "description": "empty array is valid", + "data": [], + "valid": true + } + ] + }, + { + "description": "boolean schema 'false'", + "schema": false, + "tests": [ + { + "description": "number is invalid", + "data": 1, + "valid": false + }, + { + "description": "string is invalid", + "data": "foo", + "valid": false + }, + { + "description": "boolean true is invalid", + "data": true, + "valid": false + }, + { + "description": "boolean false is invalid", + "data": false, + "valid": false + }, + { + "description": "null is invalid", + "data": null, + "valid": false + }, + { + "description": "object is invalid", + "data": {"foo": "bar"}, + "valid": false + }, + { + "description": "empty object is invalid", + "data": {}, + "valid": false + }, + { + "description": "array is invalid", + "data": ["foo"], + "valid": false + }, + { + "description": "empty array is invalid", + "data": [], + "valid": false + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/dependentRequired.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/dependentRequired.json new file mode 100644 index 00000000000..2baa38e9f48 --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/dependentRequired.json @@ -0,0 +1,152 @@ +[ + { + "description": "single dependency", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "dependentRequired": {"bar": ["foo"]} + }, + "tests": [ + { + "description": "neither", + "data": {}, + "valid": true + }, + { + "description": "nondependant", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "with dependency", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "missing dependency", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "ignores arrays", + "data": ["bar"], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "empty dependents", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "dependentRequired": {"bar": []} + }, + "tests": [ + { + "description": "empty object", + "data": {}, + "valid": true + }, + { + "description": "object with one property", + "data": {"bar": 2}, + "valid": true + }, + { + "description": "non-object is valid", + "data": 1, + "valid": true + } + ] + }, + { + "description": "multiple dependents required", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "dependentRequired": {"quux": ["foo", "bar"]} + }, + "tests": [ + { + "description": "neither", + "data": {}, + "valid": true + }, + { + "description": "nondependants", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "with dependencies", + "data": {"foo": 1, "bar": 2, "quux": 3}, + "valid": true + }, + { + "description": "missing dependency", + "data": {"foo": 1, "quux": 2}, + "valid": false + }, + { + "description": "missing other dependency", + "data": {"bar": 1, "quux": 2}, + "valid": false + }, + { + "description": "missing both dependencies", + "data": {"quux": 1}, + "valid": false + } + ] + }, + { + "description": "dependencies with escaped characters", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "dependentRequired": { + "foo\nbar": ["foo\rbar"], + "foo\"bar": ["foo'bar"] + } + }, + "tests": [ + { + "description": "CRLF", + "data": { + "foo\nbar": 1, + "foo\rbar": 2 + }, + "valid": true + }, + { + "description": "quoted quotes", + "data": { + "foo'bar": 1, + "foo\"bar": 2 + }, + "valid": true + }, + { + "description": "CRLF missing dependent", + "data": { + "foo\nbar": 1, + "foo": 2 + }, + "valid": false + }, + { + "description": "quoted quotes missing dependent", + "data": { + "foo\"bar": 2 + }, + "valid": false + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/dependentSchemas.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/dependentSchemas.json new file mode 100644 index 00000000000..1c5f0574a09 --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/dependentSchemas.json @@ -0,0 +1,171 @@ +[ + { + "description": "single dependency", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "dependentSchemas": { + "bar": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"type": "integer"} + } + } + } + }, + "tests": [ + { + "description": "valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "no dependency", + "data": {"foo": "quux"}, + "valid": true + }, + { + "description": "wrong type", + "data": {"foo": "quux", "bar": 2}, + "valid": false + }, + { + "description": "wrong type other", + "data": {"foo": 2, "bar": "quux"}, + "valid": false + }, + { + "description": "wrong type both", + "data": {"foo": "quux", "bar": "quux"}, + "valid": false + }, + { + "description": "ignores arrays", + "data": ["bar"], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "boolean subschemas", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "dependentSchemas": { + "foo": true, + "bar": false + } + }, + "tests": [ + { + "description": "object with property having schema true is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "object with property having schema false is invalid", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "object with both properties is invalid", + "data": {"foo": 1, "bar": 2}, + "valid": false + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "dependencies with escaped characters", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "dependentSchemas": { + "foo\tbar": {"minProperties": 4}, + "foo'bar": {"required": ["foo\"bar"]} + } + }, + "tests": [ + { + "description": "quoted tab", + "data": { + "foo\tbar": 1, + "a": 2, + "b": 3, + "c": 4 + }, + "valid": true + }, + { + "description": "quoted quote", + "data": { + "foo'bar": {"foo\"bar": 1} + }, + "valid": false + }, + { + "description": "quoted tab invalid under dependent schema", + "data": { + "foo\tbar": 1, + "a": 2 + }, + "valid": false + }, + { + "description": "quoted quote invalid under dependent schema", + "data": {"foo'bar": 1}, + "valid": false + } + ] + }, + { + "description": "dependent subschema incompatible with root", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "properties": { + "foo": {} + }, + "dependentSchemas": { + "foo": { + "properties": { + "bar": {} + }, + "additionalProperties": false + } + } + }, + "tests": [ + { + "description": "matches root", + "data": {"foo": 1}, + "valid": false + }, + { + "description": "matches dependency", + "data": {"bar": 1}, + "valid": true + }, + { + "description": "matches both", + "data": {"foo": 1, "bar": 2}, + "valid": false + }, + { + "description": "no dependency", + "data": {"baz": 1}, + "valid": true + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/infinite-loop-detection.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/infinite-loop-detection.json new file mode 100644 index 00000000000..46f157a35a5 --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/infinite-loop-detection.json @@ -0,0 +1,37 @@ +[ + { + "description": "evaluating the same schema location against the same data location twice is not a sign of an infinite loop", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$defs": { + "int": { "type": "integer" } + }, + "allOf": [ + { + "properties": { + "foo": { + "$ref": "#/$defs/int" + } + } + }, + { + "additionalProperties": { + "$ref": "#/$defs/int" + } + } + ] + }, + "tests": [ + { + "description": "passing case", + "data": { "foo": 1 }, + "valid": true + }, + { + "description": "failing case", + "data": { "foo": "a string" }, + "valid": false + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/maxProperties.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/maxProperties.json new file mode 100644 index 00000000000..73ae7316f88 --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/maxProperties.json @@ -0,0 +1,79 @@ +[ + { + "description": "maxProperties validation", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "maxProperties": 2 + }, + "tests": [ + { + "description": "shorter is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "exact length is valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "too long is invalid", + "data": {"foo": 1, "bar": 2, "baz": 3}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [1, 2, 3], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "maxProperties validation with a decimal", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "maxProperties": 2.0 + }, + "tests": [ + { + "description": "shorter is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "too long is invalid", + "data": {"foo": 1, "bar": 2, "baz": 3}, + "valid": false + } + ] + }, + { + "description": "maxProperties = 0 means the object is empty", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "maxProperties": 0 + }, + "tests": [ + { + "description": "no properties is valid", + "data": {}, + "valid": true + }, + { + "description": "one property is invalid", + "data": { "foo": 1 }, + "valid": false + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/minProperties.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/minProperties.json new file mode 100644 index 00000000000..a753ad35f21 --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/minProperties.json @@ -0,0 +1,60 @@ +[ + { + "description": "minProperties validation", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "minProperties": 1 + }, + "tests": [ + { + "description": "longer is valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "exact length is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "too short is invalid", + "data": {}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores strings", + "data": "", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "minProperties validation with a decimal", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "minProperties": 1.0 + }, + "tests": [ + { + "description": "longer is valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "too short is invalid", + "data": {}, + "valid": false + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/patternProperties.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/patternProperties.json new file mode 100644 index 00000000000..81829c71ffa --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/patternProperties.json @@ -0,0 +1,176 @@ +[ + { + "description": + "patternProperties validates properties matching a regex", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "patternProperties": { + "f.*o": {"type": "integer"} + } + }, + "tests": [ + { + "description": "a single valid match is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "multiple valid matches is valid", + "data": {"foo": 1, "foooooo" : 2}, + "valid": true + }, + { + "description": "a single invalid match is invalid", + "data": {"foo": "bar", "fooooo": 2}, + "valid": false + }, + { + "description": "multiple invalid matches is invalid", + "data": {"foo": "bar", "foooooo" : "baz"}, + "valid": false + }, + { + "description": "ignores arrays", + "data": ["foo"], + "valid": true + }, + { + "description": "ignores strings", + "data": "foo", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "multiple simultaneous patternProperties are validated", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "patternProperties": { + "a*": {"type": "integer"}, + "aaa*": {"maximum": 20} + } + }, + "tests": [ + { + "description": "a single valid match is valid", + "data": {"a": 21}, + "valid": true + }, + { + "description": "a simultaneous match is valid", + "data": {"aaaa": 18}, + "valid": true + }, + { + "description": "multiple matches is valid", + "data": {"a": 21, "aaaa": 18}, + "valid": true + }, + { + "description": "an invalid due to one is invalid", + "data": {"a": "bar"}, + "valid": false + }, + { + "description": "an invalid due to the other is invalid", + "data": {"aaaa": 31}, + "valid": false + }, + { + "description": "an invalid due to both is invalid", + "data": {"aaa": "foo", "aaaa": 31}, + "valid": false + } + ] + }, + { + "description": "regexes are not anchored by default and are case sensitive", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "patternProperties": { + "[0-9]{2,}": { "type": "boolean" }, + "X_": { "type": "string" } + } + }, + "tests": [ + { + "description": "non recognized members are ignored", + "data": { "answer 1": "42" }, + "valid": true + }, + { + "description": "recognized members are accounted for", + "data": { "a31b": null }, + "valid": false + }, + { + "description": "regexes are case sensitive", + "data": { "a_x_3": 3 }, + "valid": true + }, + { + "description": "regexes are case sensitive, 2", + "data": { "a_X_3": 3 }, + "valid": false + } + ] + }, + { + "description": "patternProperties with boolean schemas", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "patternProperties": { + "f.*": true, + "b.*": false + } + }, + "tests": [ + { + "description": "object with property matching schema true is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "object with property matching schema false is invalid", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "object with both properties is invalid", + "data": {"foo": 1, "bar": 2}, + "valid": false + }, + { + "description": "object with a property matching both true and false is invalid", + "data": {"foobar":1}, + "valid": false + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "patternProperties with null valued instance properties", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "patternProperties": { + "^.*bar$": {"type": "null"} + } + }, + "tests": [ + { + "description": "allows null values", + "data": {"foobar": null}, + "valid": true + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/properties.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/properties.json new file mode 100644 index 00000000000..523dcde7c5a --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/properties.json @@ -0,0 +1,242 @@ +[ + { + "description": "object properties validation", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "properties": { + "foo": {"type": "integer"}, + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "both properties present and valid is valid", + "data": {"foo": 1, "bar": "baz"}, + "valid": true + }, + { + "description": "one property invalid is invalid", + "data": {"foo": 1, "bar": {}}, + "valid": false + }, + { + "description": "both properties invalid is invalid", + "data": {"foo": [], "bar": {}}, + "valid": false + }, + { + "description": "doesn't invalidate other properties", + "data": {"quux": []}, + "valid": true + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": + "properties, patternProperties, additionalProperties interaction", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "properties": { + "foo": {"type": "array", "maxItems": 3}, + "bar": {"type": "array"} + }, + "patternProperties": {"f.o": {"minItems": 2}}, + "additionalProperties": {"type": "integer"} + }, + "tests": [ + { + "description": "property validates property", + "data": {"foo": [1, 2]}, + "valid": true + }, + { + "description": "property invalidates property", + "data": {"foo": [1, 2, 3, 4]}, + "valid": false + }, + { + "description": "patternProperty invalidates property", + "data": {"foo": []}, + "valid": false + }, + { + "description": "patternProperty validates nonproperty", + "data": {"fxo": [1, 2]}, + "valid": true + }, + { + "description": "patternProperty invalidates nonproperty", + "data": {"fxo": []}, + "valid": false + }, + { + "description": "additionalProperty ignores property", + "data": {"bar": []}, + "valid": true + }, + { + "description": "additionalProperty validates others", + "data": {"quux": 3}, + "valid": true + }, + { + "description": "additionalProperty invalidates others", + "data": {"quux": "foo"}, + "valid": false + } + ] + }, + { + "description": "properties with boolean schema", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "properties": { + "foo": true, + "bar": false + } + }, + "tests": [ + { + "description": "no property present is valid", + "data": {}, + "valid": true + }, + { + "description": "only 'true' property present is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "only 'false' property present is invalid", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "both properties present is invalid", + "data": {"foo": 1, "bar": 2}, + "valid": false + } + ] + }, + { + "description": "properties with escaped characters", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "properties": { + "foo\nbar": {"type": "number"}, + "foo\"bar": {"type": "number"}, + "foo\\bar": {"type": "number"}, + "foo\rbar": {"type": "number"}, + "foo\tbar": {"type": "number"}, + "foo\fbar": {"type": "number"} + } + }, + "tests": [ + { + "description": "object with all numbers is valid", + "data": { + "foo\nbar": 1, + "foo\"bar": 1, + "foo\\bar": 1, + "foo\rbar": 1, + "foo\tbar": 1, + "foo\fbar": 1 + }, + "valid": true + }, + { + "description": "object with strings is invalid", + "data": { + "foo\nbar": "1", + "foo\"bar": "1", + "foo\\bar": "1", + "foo\rbar": "1", + "foo\tbar": "1", + "foo\fbar": "1" + }, + "valid": false + } + ] + }, + { + "description": "properties with null valued instance properties", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "properties": { + "foo": {"type": "null"} + } + }, + "tests": [ + { + "description": "allows null values", + "data": {"foo": null}, + "valid": true + } + ] + }, + { + "description": "properties whose names are Javascript object property names", + "comment": "Ensure JS implementations don't universally consider e.g. __proto__ to always be present in an object.", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "properties": { + "__proto__": {"type": "number"}, + "toString": { + "properties": { "length": { "type": "string" } } + }, + "constructor": {"type": "number"} + } + }, + "tests": [ + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + }, + { + "description": "none of the properties mentioned", + "data": {}, + "valid": true + }, + { + "description": "__proto__ not valid", + "data": { "__proto__": "foo" }, + "valid": false + }, + { + "description": "toString not valid", + "data": { "toString": { "length": 37 } }, + "valid": false + }, + { + "description": "constructor not valid", + "data": { "constructor": { "length": 37 } }, + "valid": false + }, + { + "description": "all present and valid", + "data": { + "__proto__": 12, + "toString": { "length": "foo" }, + "constructor": 37 + }, + "valid": true + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/propertyNames.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/propertyNames.json new file mode 100644 index 00000000000..b4780088a66 --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/propertyNames.json @@ -0,0 +1,168 @@ +[ + { + "description": "propertyNames validation", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "propertyNames": {"maxLength": 3} + }, + "tests": [ + { + "description": "all property names valid", + "data": { + "f": {}, + "foo": {} + }, + "valid": true + }, + { + "description": "some property names invalid", + "data": { + "foo": {}, + "foobar": {} + }, + "valid": false + }, + { + "description": "object without properties is valid", + "data": {}, + "valid": true + }, + { + "description": "ignores arrays", + "data": [1, 2, 3, 4], + "valid": true + }, + { + "description": "ignores strings", + "data": "foobar", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "propertyNames validation with pattern", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "propertyNames": { "pattern": "^a+$" } + }, + "tests": [ + { + "description": "matching property names valid", + "data": { + "a": {}, + "aa": {}, + "aaa": {} + }, + "valid": true + }, + { + "description": "non-matching property name is invalid", + "data": { + "aaA": {} + }, + "valid": false + }, + { + "description": "object without properties is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "propertyNames with boolean schema true", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "propertyNames": true + }, + "tests": [ + { + "description": "object with any properties is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "propertyNames with boolean schema false", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "propertyNames": false + }, + "tests": [ + { + "description": "object with any properties is invalid", + "data": {"foo": 1}, + "valid": false + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "propertyNames with const", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "propertyNames": {"const": "foo"} + }, + "tests": [ + { + "description": "object with property foo is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "object with any other property is invalid", + "data": {"bar": 1}, + "valid": false + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + } + ] + }, + { + "description": "propertyNames with enum", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "propertyNames": {"enum": ["foo", "bar"]} + }, + "tests": [ + { + "description": "object with property foo is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "object with property foo and bar is valid", + "data": {"foo": 1, "bar": 1}, + "valid": true + }, + { + "description": "object with any other property is invalid", + "data": {"baz": 1}, + "valid": false + }, + { + "description": "empty object is valid", + "data": {}, + "valid": true + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/required.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/required.json new file mode 100644 index 00000000000..e66f29f2439 --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/required.json @@ -0,0 +1,158 @@ +[ + { + "description": "required validation", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "properties": { + "foo": {}, + "bar": {} + }, + "required": ["foo"] + }, + "tests": [ + { + "description": "present required property is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "non-present required property is invalid", + "data": {"bar": 1}, + "valid": false + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores strings", + "data": "", + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "required default validation", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "properties": { + "foo": {} + } + }, + "tests": [ + { + "description": "not required by default", + "data": {}, + "valid": true + } + ] + }, + { + "description": "required with empty array", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "properties": { + "foo": {} + }, + "required": [] + }, + "tests": [ + { + "description": "property not required", + "data": {}, + "valid": true + } + ] + }, + { + "description": "required with escaped characters", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "required": [ + "foo\nbar", + "foo\"bar", + "foo\\bar", + "foo\rbar", + "foo\tbar", + "foo\fbar" + ] + }, + "tests": [ + { + "description": "object with all properties present is valid", + "data": { + "foo\nbar": 1, + "foo\"bar": 1, + "foo\\bar": 1, + "foo\rbar": 1, + "foo\tbar": 1, + "foo\fbar": 1 + }, + "valid": true + }, + { + "description": "object with some properties missing is invalid", + "data": { + "foo\nbar": "1", + "foo\"bar": "1" + }, + "valid": false + } + ] + }, + { + "description": "required properties whose names are Javascript object property names", + "comment": "Ensure JS implementations don't universally consider e.g. __proto__ to always be present in an object.", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "required": ["__proto__", "toString", "constructor"] + }, + "tests": [ + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores other non-objects", + "data": 12, + "valid": true + }, + { + "description": "none of the properties mentioned", + "data": {}, + "valid": false + }, + { + "description": "__proto__ present", + "data": { "__proto__": "foo" }, + "valid": false + }, + { + "description": "toString present", + "data": { "toString": { "length": 37 } }, + "valid": false + }, + { + "description": "constructor present", + "data": { "constructor": { "length": 37 } }, + "valid": false + }, + { + "description": "all present", + "data": { + "__proto__": 12, + "toString": { "length": "foo" }, + "constructor": 37 + }, + "valid": true + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/unevaluatedProperties.json b/internal/mcp/internal/jsonschema/testdata/draft2020-12/unevaluatedProperties.json new file mode 100644 index 00000000000..ae29c9eb3b6 --- /dev/null +++ b/internal/mcp/internal/jsonschema/testdata/draft2020-12/unevaluatedProperties.json @@ -0,0 +1,1601 @@ +[ + { + "description": "unevaluatedProperties true", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "unevaluatedProperties": true + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": {}, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + } + ] + }, + { + "description": "unevaluatedProperties schema", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "unevaluatedProperties": { + "type": "string", + "minLength": 3 + } + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": {}, + "valid": true + }, + { + "description": "with valid unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with invalid unevaluated properties", + "data": { + "foo": "fo" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties false", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": {}, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with adjacent properties", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with adjacent patternProperties", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "patternProperties": { + "^foo": { "type": "string" } + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with adjacent additionalProperties", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "additionalProperties": true, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no additional properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with additional properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + } + ] + }, + { + "description": "unevaluatedProperties with nested properties", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "allOf": [ + { + "properties": { + "bar": { "type": "string" } + } + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no additional properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + }, + { + "description": "with additional properties", + "data": { + "foo": "foo", + "bar": "bar", + "baz": "baz" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with nested patternProperties", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "allOf": [ + { + "patternProperties": { + "^bar": { "type": "string" } + } + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no additional properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + }, + { + "description": "with additional properties", + "data": { + "foo": "foo", + "bar": "bar", + "baz": "baz" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with nested additionalProperties", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "allOf": [ + { + "additionalProperties": true + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no additional properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with additional properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + } + ] + }, + { + "description": "unevaluatedProperties with nested unevaluatedProperties", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "allOf": [ + { + "unevaluatedProperties": true + } + ], + "unevaluatedProperties": { + "type": "string", + "maxLength": 2 + } + }, + "tests": [ + { + "description": "with no nested unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with nested unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + } + ] + }, + { + "description": "unevaluatedProperties with anyOf", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "anyOf": [ + { + "properties": { + "bar": { "const": "bar" } + }, + "required": ["bar"] + }, + { + "properties": { + "baz": { "const": "baz" } + }, + "required": ["baz"] + }, + { + "properties": { + "quux": { "const": "quux" } + }, + "required": ["quux"] + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "when one matches and has no unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + }, + { + "description": "when one matches and has unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar", + "baz": "not-baz" + }, + "valid": false + }, + { + "description": "when two match and has no unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar", + "baz": "baz" + }, + "valid": true + }, + { + "description": "when two match and has unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar", + "baz": "baz", + "quux": "not-quux" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with oneOf", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "oneOf": [ + { + "properties": { + "bar": { "const": "bar" } + }, + "required": ["bar"] + }, + { + "properties": { + "baz": { "const": "baz" } + }, + "required": ["baz"] + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar", + "quux": "quux" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with not", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "not": { + "not": { + "properties": { + "bar": { "const": "bar" } + }, + "required": ["bar"] + } + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with if/then/else", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "if": { + "properties": { + "foo": { "const": "then" } + }, + "required": ["foo"] + }, + "then": { + "properties": { + "bar": { "type": "string" } + }, + "required": ["bar"] + }, + "else": { + "properties": { + "baz": { "type": "string" } + }, + "required": ["baz"] + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "when if is true and has no unevaluated properties", + "data": { + "foo": "then", + "bar": "bar" + }, + "valid": true + }, + { + "description": "when if is true and has unevaluated properties", + "data": { + "foo": "then", + "bar": "bar", + "baz": "baz" + }, + "valid": false + }, + { + "description": "when if is false and has no unevaluated properties", + "data": { + "baz": "baz" + }, + "valid": true + }, + { + "description": "when if is false and has unevaluated properties", + "data": { + "foo": "else", + "baz": "baz" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with if/then/else, then not defined", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "if": { + "properties": { + "foo": { "const": "then" } + }, + "required": ["foo"] + }, + "else": { + "properties": { + "baz": { "type": "string" } + }, + "required": ["baz"] + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "when if is true and has no unevaluated properties", + "data": { + "foo": "then", + "bar": "bar" + }, + "valid": false + }, + { + "description": "when if is true and has unevaluated properties", + "data": { + "foo": "then", + "bar": "bar", + "baz": "baz" + }, + "valid": false + }, + { + "description": "when if is false and has no unevaluated properties", + "data": { + "baz": "baz" + }, + "valid": true + }, + { + "description": "when if is false and has unevaluated properties", + "data": { + "foo": "else", + "baz": "baz" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with if/then/else, else not defined", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "if": { + "properties": { + "foo": { "const": "then" } + }, + "required": ["foo"] + }, + "then": { + "properties": { + "bar": { "type": "string" } + }, + "required": ["bar"] + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "when if is true and has no unevaluated properties", + "data": { + "foo": "then", + "bar": "bar" + }, + "valid": true + }, + { + "description": "when if is true and has unevaluated properties", + "data": { + "foo": "then", + "bar": "bar", + "baz": "baz" + }, + "valid": false + }, + { + "description": "when if is false and has no unevaluated properties", + "data": { + "baz": "baz" + }, + "valid": false + }, + { + "description": "when if is false and has unevaluated properties", + "data": { + "foo": "else", + "baz": "baz" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with dependentSchemas", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "dependentSchemas": { + "foo": { + "properties": { + "bar": { "const": "bar" } + }, + "required": ["bar"] + } + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with boolean schemas", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "allOf": [true], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with $ref", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "$ref": "#/$defs/bar", + "properties": { + "foo": { "type": "string" } + }, + "unevaluatedProperties": false, + "$defs": { + "bar": { + "properties": { + "bar": { "type": "string" } + } + } + } + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar", + "baz": "baz" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties before $ref", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "unevaluatedProperties": false, + "properties": { + "foo": { "type": "string" } + }, + "$ref": "#/$defs/bar", + "$defs": { + "bar": { + "properties": { + "bar": { "type": "string" } + } + } + } + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar", + "baz": "baz" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties with $dynamicRef", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://example.com/unevaluated-properties-with-dynamic-ref/derived", + + "$ref": "./baseSchema", + + "$defs": { + "derived": { + "$dynamicAnchor": "addons", + "properties": { + "bar": { "type": "string" } + } + }, + "baseSchema": { + "$id": "./baseSchema", + + "$comment": "unevaluatedProperties comes first so it's more likely to catch bugs with implementations that are sensitive to keyword ordering", + "unevaluatedProperties": false, + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "$dynamicRef": "#addons", + + "$defs": { + "defaultAddons": { + "$comment": "Needed to satisfy the bookending requirement", + "$dynamicAnchor": "addons" + } + } + } + } + }, + "tests": [ + { + "description": "with no unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + }, + { + "description": "with unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar", + "baz": "baz" + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties can't see inside cousins", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "allOf": [ + { + "properties": { + "foo": true + } + }, + { + "unevaluatedProperties": false + } + ] + }, + "tests": [ + { + "description": "always fails", + "data": { + "foo": 1 + }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties can't see inside cousins (reverse order)", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "allOf": [ + { + "unevaluatedProperties": false + }, + { + "properties": { + "foo": true + } + } + ] + }, + "tests": [ + { + "description": "always fails", + "data": { + "foo": 1 + }, + "valid": false + } + ] + }, + { + "description": "nested unevaluatedProperties, outer false, inner true, properties outside", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "allOf": [ + { + "unevaluatedProperties": true + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no nested unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with nested unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + } + ] + }, + { + "description": "nested unevaluatedProperties, outer false, inner true, properties inside", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "allOf": [ + { + "properties": { + "foo": { "type": "string" } + }, + "unevaluatedProperties": true + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "with no nested unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with nested unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": true + } + ] + }, + { + "description": "nested unevaluatedProperties, outer true, inner false, properties outside", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "properties": { + "foo": { "type": "string" } + }, + "allOf": [ + { + "unevaluatedProperties": false + } + ], + "unevaluatedProperties": true + }, + "tests": [ + { + "description": "with no nested unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": false + }, + { + "description": "with nested unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "nested unevaluatedProperties, outer true, inner false, properties inside", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "allOf": [ + { + "properties": { + "foo": { "type": "string" } + }, + "unevaluatedProperties": false + } + ], + "unevaluatedProperties": true + }, + "tests": [ + { + "description": "with no nested unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with nested unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "cousin unevaluatedProperties, true and false, true with properties", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "allOf": [ + { + "properties": { + "foo": { "type": "string" } + }, + "unevaluatedProperties": true + }, + { + "unevaluatedProperties": false + } + ] + }, + "tests": [ + { + "description": "with no nested unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": false + }, + { + "description": "with nested unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "cousin unevaluatedProperties, true and false, false with properties", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "allOf": [ + { + "unevaluatedProperties": true + }, + { + "properties": { + "foo": { "type": "string" } + }, + "unevaluatedProperties": false + } + ] + }, + "tests": [ + { + "description": "with no nested unevaluated properties", + "data": { + "foo": "foo" + }, + "valid": true + }, + { + "description": "with nested unevaluated properties", + "data": { + "foo": "foo", + "bar": "bar" + }, + "valid": false + } + ] + }, + { + "description": "property is evaluated in an uncle schema to unevaluatedProperties", + "comment": "see https://stackoverflow.com/questions/66936884/deeply-nested-unevaluatedproperties-and-their-expectations", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "properties": { + "foo": { + "type": "object", + "properties": { + "bar": { + "type": "string" + } + }, + "unevaluatedProperties": false + } + }, + "anyOf": [ + { + "properties": { + "foo": { + "properties": { + "faz": { + "type": "string" + } + } + } + } + } + ] + }, + "tests": [ + { + "description": "no extra properties", + "data": { + "foo": { + "bar": "test" + } + }, + "valid": true + }, + { + "description": "uncle keyword evaluation is not significant", + "data": { + "foo": { + "bar": "test", + "faz": "test" + } + }, + "valid": false + } + ] + }, + { + "description": "in-place applicator siblings, allOf has unevaluated", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "allOf": [ + { + "properties": { + "foo": true + }, + "unevaluatedProperties": false + } + ], + "anyOf": [ + { + "properties": { + "bar": true + } + } + ] + }, + "tests": [ + { + "description": "base case: both properties present", + "data": { + "foo": 1, + "bar": 1 + }, + "valid": false + }, + { + "description": "in place applicator siblings, bar is missing", + "data": { + "foo": 1 + }, + "valid": true + }, + { + "description": "in place applicator siblings, foo is missing", + "data": { + "bar": 1 + }, + "valid": false + } + ] + }, + { + "description": "in-place applicator siblings, anyOf has unevaluated", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "allOf": [ + { + "properties": { + "foo": true + } + } + ], + "anyOf": [ + { + "properties": { + "bar": true + }, + "unevaluatedProperties": false + } + ] + }, + "tests": [ + { + "description": "base case: both properties present", + "data": { + "foo": 1, + "bar": 1 + }, + "valid": false + }, + { + "description": "in place applicator siblings, bar is missing", + "data": { + "foo": 1 + }, + "valid": false + }, + { + "description": "in place applicator siblings, foo is missing", + "data": { + "bar": 1 + }, + "valid": true + } + ] + }, + { + "description": "unevaluatedProperties + single cyclic ref", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "properties": { + "x": { "$ref": "#" } + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "Empty is valid", + "data": {}, + "valid": true + }, + { + "description": "Single is valid", + "data": { "x": {} }, + "valid": true + }, + { + "description": "Unevaluated on 1st level is invalid", + "data": { "x": {}, "y": {} }, + "valid": false + }, + { + "description": "Nested is valid", + "data": { "x": { "x": {} } }, + "valid": true + }, + { + "description": "Unevaluated on 2nd level is invalid", + "data": { "x": { "x": {}, "y": {} } }, + "valid": false + }, + { + "description": "Deep nested is valid", + "data": { "x": { "x": { "x": {} } } }, + "valid": true + }, + { + "description": "Unevaluated on 3rd level is invalid", + "data": { "x": { "x": { "x": {}, "y": {} } } }, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties + ref inside allOf / oneOf", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$defs": { + "one": { + "properties": { "a": true } + }, + "two": { + "required": ["x"], + "properties": { "x": true } + } + }, + "allOf": [ + { "$ref": "#/$defs/one" }, + { "properties": { "b": true } }, + { + "oneOf": [ + { "$ref": "#/$defs/two" }, + { + "required": ["y"], + "properties": { "y": true } + } + ] + } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "Empty is invalid (no x or y)", + "data": {}, + "valid": false + }, + { + "description": "a and b are invalid (no x or y)", + "data": { "a": 1, "b": 1 }, + "valid": false + }, + { + "description": "x and y are invalid", + "data": { "x": 1, "y": 1 }, + "valid": false + }, + { + "description": "a and x are valid", + "data": { "a": 1, "x": 1 }, + "valid": true + }, + { + "description": "a and y are valid", + "data": { "a": 1, "y": 1 }, + "valid": true + }, + { + "description": "a and b and x are valid", + "data": { "a": 1, "b": 1, "x": 1 }, + "valid": true + }, + { + "description": "a and b and y are valid", + "data": { "a": 1, "b": 1, "y": 1 }, + "valid": true + }, + { + "description": "a and b and x and y are invalid", + "data": { "a": 1, "b": 1, "x": 1, "y": 1 }, + "valid": false + } + ] + }, + { + "description": "dynamic evalation inside nested refs", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$defs": { + "one": { + "oneOf": [ + { "$ref": "#/$defs/two" }, + { "required": ["b"], "properties": { "b": true } }, + { "required": ["xx"], "patternProperties": { "x": true } }, + { "required": ["all"], "unevaluatedProperties": true } + ] + }, + "two": { + "oneOf": [ + { "required": ["c"], "properties": { "c": true } }, + { "required": ["d"], "properties": { "d": true } } + ] + } + }, + "oneOf": [ + { "$ref": "#/$defs/one" }, + { "required": ["a"], "properties": { "a": true } } + ], + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "Empty is invalid", + "data": {}, + "valid": false + }, + { + "description": "a is valid", + "data": { "a": 1 }, + "valid": true + }, + { + "description": "b is valid", + "data": { "b": 1 }, + "valid": true + }, + { + "description": "c is valid", + "data": { "c": 1 }, + "valid": true + }, + { + "description": "d is valid", + "data": { "d": 1 }, + "valid": true + }, + { + "description": "a + b is invalid", + "data": { "a": 1, "b": 1 }, + "valid": false + }, + { + "description": "a + c is invalid", + "data": { "a": 1, "c": 1 }, + "valid": false + }, + { + "description": "a + d is invalid", + "data": { "a": 1, "d": 1 }, + "valid": false + }, + { + "description": "b + c is invalid", + "data": { "b": 1, "c": 1 }, + "valid": false + }, + { + "description": "b + d is invalid", + "data": { "b": 1, "d": 1 }, + "valid": false + }, + { + "description": "c + d is invalid", + "data": { "c": 1, "d": 1 }, + "valid": false + }, + { + "description": "xx is valid", + "data": { "xx": 1 }, + "valid": true + }, + { + "description": "xx + foox is valid", + "data": { "xx": 1, "foox": 1 }, + "valid": true + }, + { + "description": "xx + foo is invalid", + "data": { "xx": 1, "foo": 1 }, + "valid": false + }, + { + "description": "xx + a is invalid", + "data": { "xx": 1, "a": 1 }, + "valid": false + }, + { + "description": "xx + b is invalid", + "data": { "xx": 1, "b": 1 }, + "valid": false + }, + { + "description": "xx + c is invalid", + "data": { "xx": 1, "c": 1 }, + "valid": false + }, + { + "description": "xx + d is invalid", + "data": { "xx": 1, "d": 1 }, + "valid": false + }, + { + "description": "all is valid", + "data": { "all": 1 }, + "valid": true + }, + { + "description": "all + foo is valid", + "data": { "all": 1, "foo": 1 }, + "valid": true + }, + { + "description": "all + a is invalid", + "data": { "all": 1, "a": 1 }, + "valid": false + } + ] + }, + { + "description": "non-object instances are valid", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "ignores booleans", + "data": true, + "valid": true + }, + { + "description": "ignores integers", + "data": 123, + "valid": true + }, + { + "description": "ignores floats", + "data": 1.0, + "valid": true + }, + { + "description": "ignores arrays", + "data": [], + "valid": true + }, + { + "description": "ignores strings", + "data": "foo", + "valid": true + }, + { + "description": "ignores null", + "data": null, + "valid": true + } + ] + }, + { + "description": "unevaluatedProperties with null valued instance properties", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "unevaluatedProperties": { + "type": "null" + } + }, + "tests": [ + { + "description": "allows null valued properties", + "data": {"foo": null}, + "valid": true + } + ] + }, + { + "description": "unevaluatedProperties not affected by propertyNames", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "propertyNames": {"maxLength": 1}, + "unevaluatedProperties": { + "type": "number" + } + }, + "tests": [ + { + "description": "allows only number properties", + "data": {"a": 1}, + "valid": true + }, + { + "description": "string property is invalid", + "data": {"a": "b"}, + "valid": false + } + ] + }, + { + "description": "unevaluatedProperties can see annotations from if without then and else", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "if": { + "patternProperties": { + "foo": { + "type": "string" + } + } + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "valid in case if is evaluated", + "data": { + "foo": "a" + }, + "valid": true + }, + { + "description": "invalid in case if is evaluated", + "data": { + "bar": "a" + }, + "valid": false + } + ] + }, + { + "description": "dependentSchemas with unevaluatedProperties", + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "properties": {"foo2": {}}, + "dependentSchemas": { + "foo" : {}, + "foo2": { + "properties": { + "bar":{} + } + } + }, + "unevaluatedProperties": false + }, + "tests": [ + { + "description": "unevaluatedProperties doesn't consider dependentSchemas", + "data": {"foo": ""}, + "valid": false + }, + { + "description": "unevaluatedProperties doesn't see bar when foo2 is absent", + "data": {"bar": ""}, + "valid": false + }, + { + "description": "unevaluatedProperties sees bar when foo2 is present", + "data": { "foo2": "", "bar": ""}, + "valid": true + } + ] + } +] diff --git a/internal/mcp/internal/jsonschema/validate.go b/internal/mcp/internal/jsonschema/validate.go index 2231914b9bc..2b847c22b59 100644 --- a/internal/mcp/internal/jsonschema/validate.go +++ b/internal/mcp/internal/jsonschema/validate.go @@ -79,11 +79,11 @@ func (st *state) validate(instance reflect.Value, schema *Schema, callerAnns *an // "number" subsumes integers if !(gotType == schema.Type || gotType == "integer" && schema.Type == "number") { - return fmt.Errorf("type: %s has type %q, want %q", instance, gotType, schema.Type) + return fmt.Errorf("type: %v has type %q, want %q", instance, gotType, schema.Type) } } else { if !(slices.Contains(schema.Types, gotType) || (gotType == "integer" && slices.Contains(schema.Types, "number"))) { - return fmt.Errorf("type: %s has type %q, want one of %q", + return fmt.Errorf("type: %v has type %q, want one of %q", instance, gotType, strings.Join(schema.Types, ", ")) } } @@ -322,6 +322,142 @@ func (st *state) validate(instance reflect.Value, schema *Schema, callerAnns *an } } + // objects + // https://json-schema.org/draft/2020-12/json-schema-core#section-10.3.2 + if instance.Kind() == reflect.Map { + if kt := instance.Type().Key(); kt.Kind() != reflect.String { + return fmt.Errorf("map key type %s is not a string", kt) + } + // Track the evaluated properties for just this schema, to support additionalProperties. + // If we used anns here, then we'd be including properties evaluated in subschemas + // from allOf, etc., which additionalProperties shouldn't observe. + evalProps := map[string]bool{} + for prop, schema := range schema.Properties { + val := instance.MapIndex(reflect.ValueOf(prop)) + if !val.IsValid() { + // It's OK if the instance doesn't have the property. + continue + } + if err := st.validate(val, schema, nil, append(path, prop)); err != nil { + return err + } + evalProps[prop] = true + } + if len(schema.PatternProperties) > 0 { + for vprop, val := range instance.Seq2() { + prop := vprop.String() + // Check every matching pattern. + for pattern, schema := range schema.PatternProperties { + // TODO(jba): pre-compile regexps + m, err := regexp.MatchString(pattern, prop) + if err != nil { + return err + } + if m { + if err := st.validate(val, schema, nil, append(path, prop)); err != nil { + return err + } + evalProps[prop] = true + } + } + } + } + if schema.AdditionalProperties != nil { + // Apply to all properties not handled above. + for vprop, val := range instance.Seq2() { + prop := vprop.String() + if !evalProps[prop] { + if err := st.validate(val, schema.AdditionalProperties, nil, append(path, prop)); err != nil { + return err + } + evalProps[prop] = true + } + } + } + anns.noteProperties(evalProps) + if schema.PropertyNames != nil { + for prop := range instance.Seq() { + if err := st.validate(prop, schema.PropertyNames, nil, append(path, prop.String())); err != nil { + return err + } + } + } + + // https://json-schema.org/draft/2020-12/draft-bhutton-json-schema-validation-01#section-6.5 + if schema.MinProperties != nil { + if n, m := instance.Len(), int(*schema.MinProperties); n < m { + return fmt.Errorf("minProperties: object has %d properties, less than %d", n, m) + } + } + if schema.MaxProperties != nil { + if n, m := instance.Len(), int(*schema.MaxProperties); n > m { + return fmt.Errorf("maxProperties: object has %d properties, greater than %d", n, m) + } + } + + hasProperty := func(prop string) bool { + return instance.MapIndex(reflect.ValueOf(prop)).IsValid() + } + + missingProperties := func(props []string) []string { + var missing []string + for _, p := range props { + if !hasProperty(p) { + missing = append(missing, p) + } + } + return missing + } + + if schema.Required != nil { + if m := missingProperties(schema.Required); len(m) > 0 { + return fmt.Errorf("required: missing properties: %q", m) + } + } + if schema.DependentRequired != nil { + // "Validation succeeds if, for each name that appears in both the instance + // and as a name within this keyword's value, every item in the corresponding + // array is also the name of a property in the instance." §6.5.4 + for dprop, reqs := range schema.DependentRequired { + if hasProperty(dprop) { + if m := missingProperties(reqs); len(m) > 0 { + return fmt.Errorf("dependentRequired[%q]: missing properties %q", dprop, m) + } + } + } + } + + // https://json-schema.org/draft/2020-12/json-schema-core#section-10.2.2.4 + if schema.DependentSchemas != nil { + // This does not collect annotations, although it seems like it should. + for dprop, ss := range schema.DependentSchemas { + if hasProperty(dprop) { + // TODO: include dependentSchemas[dprop] in the errors. + err := st.validate(instance, ss, &anns, path) + if err != nil { + return err + } + } + } + } + if schema.UnevaluatedProperties != nil && !anns.allProperties { + // This looks a lot like AdditionalProperties, but depends on in-place keywords like allOf + // in addition to sibling keywords. + for vprop, val := range instance.Seq2() { + prop := vprop.String() + if !anns.evaluatedProperties[prop] { + if err := st.validate(val, schema.UnevaluatedProperties, nil, append(path, prop)); err != nil { + return err + } + } + } + // The spec says the annotation should be the set of evaluated properties, but we can optimize + // by setting a single boolean, since after this succeeds all properties will be validated. + // See https://json-schema.slack.com/archives/CT7FF623C/p1745592564381459. + anns.allProperties = true + } + } + if callerAnns != nil { // Our caller wants to know what we've validated. callerAnns.merge(&anns) diff --git a/internal/mcp/internal/jsonschema/validate_test.go b/internal/mcp/internal/jsonschema/validate_test.go index e86df8e2621..46ff1d3a65f 100644 --- a/internal/mcp/internal/jsonschema/validate_test.go +++ b/internal/mcp/internal/jsonschema/validate_test.go @@ -53,7 +53,7 @@ func TestValidate(t *testing.T) { t.Run(g.Description, func(t *testing.T) { rs := &ResolvedSchema{root: g.Schema} for s := range g.Schema.all() { - if s.Properties != nil || s.Required != nil || s.Defs != nil || s.UnevaluatedProperties != nil { + if s.Defs != nil || s.Ref != "" { t.Skip("schema or subschema has unimplemented keywords") } } From ca54d5908e1314c991d297bb31cd3e2ad9445c85 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Fri, 25 Apr 2025 15:20:37 -0400 Subject: [PATCH 264/270] jsonschema: represent integer keywords as ints Some JSON Schema keywords, like minLength, are logically integers. The JSON Schema spec allows them to be written as floating-point values with zero fractional part, which is a reasonable choice, since JSON doesn't distinguish the two. We could simply validate these fields as integers in the normal course of events (namely, validating the schema against its meta-schema), but this CL does a little better: it uses *int struct fields for these keywords, preventing them from ever being non-integers. The UnmarshalJSON method still accepts integral floating-point values. Marshaling is unaffected, and the results don't change: encoding/json has always marshaled integral floats without the decimal part. Other choices: - Leave them as float64: you could then get away with Schema{MinLength: Ptr(1.5)} and though you couldn't get far with that, it's still irksome. - json.Number: same problem as float64, plus it's even harder to write as a literal. - int64: now `Ptr(1)` doesn't work (it's a *int, not a *int64). Anyway, these keywords all compare against len(x) in Go, which is of type int. Change-Id: Ie3f08bd8a5deaf2a21344d6ab873551c011b12d5 Reviewed-on: https://go-review.googlesource.com/c/tools/+/668315 LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley --- internal/mcp/internal/jsonschema/infer.go | 4 +- internal/mcp/internal/jsonschema/schema.go | 61 +++++++++++++--- .../mcp/internal/jsonschema/schema_test.go | 72 ++++++++++++++++++- internal/mcp/internal/jsonschema/validate.go | 20 +++--- 4 files changed, 132 insertions(+), 25 deletions(-) diff --git a/internal/mcp/internal/jsonschema/infer.go b/internal/mcp/internal/jsonschema/infer.go index 45ecadfdb63..7fbf3fe630f 100644 --- a/internal/mcp/internal/jsonschema/infer.go +++ b/internal/mcp/internal/jsonschema/infer.go @@ -92,8 +92,8 @@ func typeSchema(t reflect.Type, seen map[reflect.Type]*Schema) (*Schema, error) return nil, fmt.Errorf("computing element schema: %v", err) } if t.Kind() == reflect.Array { - s.MinItems = Ptr(float64(t.Len())) - s.MaxItems = Ptr(float64(t.Len())) + s.MinItems = Ptr(t.Len()) + s.MaxItems = Ptr(t.Len()) } case reflect.String: diff --git a/internal/mcp/internal/jsonschema/schema.go b/internal/mcp/internal/jsonschema/schema.go index 52358f13337..444d2b82f85 100644 --- a/internal/mcp/internal/jsonschema/schema.go +++ b/internal/mcp/internal/jsonschema/schema.go @@ -8,6 +8,7 @@ package jsonschema import ( "bytes" + "cmp" "encoding/json" "errors" "fmt" @@ -67,25 +68,25 @@ type Schema struct { Maximum *float64 `json:"maximum,omitempty"` ExclusiveMinimum *float64 `json:"exclusiveMinimum,omitempty"` ExclusiveMaximum *float64 `json:"exclusiveMaximum,omitempty"` - MinLength *float64 `json:"minLength,omitempty"` - MaxLength *float64 `json:"maxLength,omitempty"` + MinLength *int `json:"minLength,omitempty"` + MaxLength *int `json:"maxLength,omitempty"` Pattern string `json:"pattern,omitempty"` // arrays PrefixItems []*Schema `json:"prefixItems,omitempty"` Items *Schema `json:"items,omitempty"` - MinItems *float64 `json:"minItems,omitempty"` - MaxItems *float64 `json:"maxItems,omitempty"` + MinItems *int `json:"minItems,omitempty"` + MaxItems *int `json:"maxItems,omitempty"` AdditionalItems *Schema `json:"additionalItems,omitempty"` UniqueItems bool `json:"uniqueItems,omitempty"` Contains *Schema `json:"contains,omitempty"` - MinContains *float64 `json:"minContains,omitempty"` - MaxContains *float64 `json:"maxContains,omitempty"` + MinContains *int `json:"minContains,omitempty"` // *int, not int: default is 1, not 0 + MaxContains *int `json:"maxContains,omitempty"` UnevaluatedItems *Schema `json:"unevaluatedItems,omitempty"` // objects - MinProperties *float64 `json:"minProperties,omitempty"` - MaxProperties *float64 `json:"maxProperties,omitempty"` + MinProperties *int `json:"minProperties,omitempty"` + MaxProperties *int `json:"maxProperties,omitempty"` Required []string `json:"required,omitempty"` DependentRequired map[string][]string `json:"dependentRequired,omitempty"` Properties map[string]*Schema `json:"properties,omitempty"` @@ -174,8 +175,17 @@ func (s *Schema) UnmarshalJSON(data []byte) error { } ms := struct { - Type json.RawMessage `json:"type,omitempty"` - Const json.RawMessage `json:"const,omitempty"` + Type json.RawMessage `json:"type,omitempty"` + Const json.RawMessage `json:"const,omitempty"` + MinLength *float64 `json:"minLength,omitempty"` + MaxLength *float64 `json:"maxLength,omitempty"` + MinItems *float64 `json:"minItems,omitempty"` + MaxItems *float64 `json:"maxItems,omitempty"` + MinProperties *float64 `json:"minProperties,omitempty"` + MaxProperties *float64 `json:"maxProperties,omitempty"` + MinContains *float64 `json:"minContains,omitempty"` + MaxContains *float64 `json:"maxContains,omitempty"` + *schemaWithoutMethods }{ schemaWithoutMethods: (*schemaWithoutMethods)(s), @@ -192,12 +202,13 @@ func (s *Schema) UnmarshalJSON(data []byte) error { case '[': err = json.Unmarshal(ms.Type, &s.Types) default: - err = fmt.Errorf("invalid type: %q", ms.Type) + err = fmt.Errorf(`invalid value for "type": %q`, ms.Type) } } if err != nil { return err } + // Setting Const to a pointer to null will marshal properly, but won't unmarshal: // the *any is set to nil, not a pointer to nil. if len(ms.Const) > 0 { @@ -207,6 +218,34 @@ func (s *Schema) UnmarshalJSON(data []byte) error { return err } } + + // Store integer properties as ints. + setInt := func(name string, dst **int, src *float64) error { + if src == nil { + return nil + } + i := int(*src) + if float64(i) != *src { + return fmt.Errorf("%s: %f is not an int", name, *src) + } + *dst = &i + return nil + } + + err = cmp.Or( + setInt("minLength", &s.MinLength, ms.MinLength), + setInt("maxLength", &s.MaxLength, ms.MaxLength), + setInt("minItems", &s.MinItems, ms.MinItems), + setInt("maxItems", &s.MaxItems, ms.MaxItems), + setInt("minProperties", &s.MinProperties, ms.MinProperties), + setInt("maxProperties", &s.MaxProperties, ms.MaxProperties), + setInt("minContains", &s.MinContains, ms.MinContains), + setInt("maxContains", &s.MaxContains, ms.MaxContains), + ) + if err != nil { + return err + } + return nil } diff --git a/internal/mcp/internal/jsonschema/schema_test.go b/internal/mcp/internal/jsonschema/schema_test.go index 62b92d2968d..b742de3ae70 100644 --- a/internal/mcp/internal/jsonschema/schema_test.go +++ b/internal/mcp/internal/jsonschema/schema_test.go @@ -6,14 +6,16 @@ package jsonschema import ( "encoding/json" + "regexp" "testing" ) -func TestMarshal(t *testing.T) { +func TestGoRoundTrip(t *testing.T) { + // Verify that Go representations round-trip. for _, s := range []*Schema{ {Type: "null"}, {Types: []string{"null", "number"}}, - {Type: "string", MinLength: Ptr(20.0)}, + {Type: "string", MinLength: Ptr(20)}, {Minimum: Ptr(20.0)}, {Items: &Schema{Type: "integer"}}, {Const: Ptr(any(0))}, @@ -38,3 +40,69 @@ func TestMarshal(t *testing.T) { } } } + +func TestJSONRoundTrip(t *testing.T) { + // Verify that JSON texts for schemas marshal into equivalent forms. + // We don't expect everything to round-trip perfectly. For example, "true" and "false" + // will turn into their object equivalents. + // But most things should. + // Some of these cases test Schema.{UnM,M}arshalJSON. + // Most of others follow from the behavior of encoding/json, but they are still + // valuable as regression tests of this package's behavior. + for _, tt := range []struct { + in, want string + }{ + {`true`, `{}`}, // boolean schemas become object schemas + {`false`, `{"not":{}}`}, + {`{"type":"", "enum":null}`, `{}`}, // empty fields are omitted + {`{"minimum":1}`, `{"minimum":1}`}, + {`{"minimum":1.0}`, `{"minimum":1}`}, // floating-point integers lose their fractional part + {`{"minLength":1.0}`, `{"minLength":1}`}, // some floats are unmarshaled into ints, but you can't tell + { + // map keys are sorted + `{"$vocabulary":{"b":true, "a":false}}`, + `{"$vocabulary":{"a":false,"b":true}}`, + }, + {`{"unk":0}`, `{}`}, // unknown fields are dropped, unfortunately + } { + var s Schema + if err := json.Unmarshal([]byte(tt.in), &s); err != nil { + t.Fatal(err) + } + data, err := json.Marshal(s) + if err != nil { + t.Fatal(err) + } + if got := string(data); got != tt.want { + t.Errorf("%s:\ngot %s\nwant %s", tt.in, got, tt.want) + } + } +} + +func TestUnmarshalErrors(t *testing.T) { + for _, tt := range []struct { + in string + want string // error must match this regexp + }{ + {`1`, "cannot unmarshal number"}, + {`{"type":1}`, `invalid value for "type"`}, + {`{"minLength":1.5}`, `minLength:.*not an int`}, + {`{"maxLength":1.5}`, `maxLength:.*not an int`}, + {`{"minItems":1.5}`, `minItems:.*not an int`}, + {`{"maxItems":1.5}`, `maxItems:.*not an int`}, + {`{"minProperties":1.5}`, `minProperties:.*not an int`}, + {`{"maxProperties":1.5}`, `maxProperties:.*not an int`}, + {`{"minContains":1.5}`, `minContains:.*not an int`}, + {`{"maxContains":1.5}`, `maxContains:.*not an int`}, + } { + var s Schema + err := json.Unmarshal([]byte(tt.in), &s) + if err == nil { + t.Fatalf("%s: no error but expected one", tt.in) + } + if !regexp.MustCompile(tt.want).MatchString(err.Error()) { + t.Errorf("%s: error %q does not match %q", tt.in, err, tt.want) + } + + } +} diff --git a/internal/mcp/internal/jsonschema/validate.go b/internal/mcp/internal/jsonschema/validate.go index 2b847c22b59..26f3844a4e9 100644 --- a/internal/mcp/internal/jsonschema/validate.go +++ b/internal/mcp/internal/jsonschema/validate.go @@ -145,12 +145,12 @@ func (st *state) validate(instance reflect.Value, schema *Schema, callerAnns *an str := instance.String() n := utf8.RuneCountInString(str) if schema.MinLength != nil { - if m := int(*schema.MinLength); n < m { + if m := *schema.MinLength; n < m { return fmt.Errorf("minLength: %q contains %d Unicode code points, fewer than %d", str, n, m) } } if schema.MaxLength != nil { - if m := int(*schema.MaxLength); n > m { + if m := *schema.MaxLength; n > m { return fmt.Errorf("maxLength: %q contains %d Unicode code points, more than %d", str, n, m) } } @@ -268,7 +268,7 @@ func (st *state) validate(instance reflect.Value, schema *Schema, callerAnns *an anns.noteIndex(i) } } - if nContains == 0 && (schema.MinContains == nil || int(*schema.MinContains) > 0) { + if nContains == 0 && (schema.MinContains == nil || *schema.MinContains > 0) { return fmt.Errorf("contains: %s does not have an item matching %s", instance, schema.Contains) } @@ -277,23 +277,23 @@ func (st *state) validate(instance reflect.Value, schema *Schema, callerAnns *an // https://json-schema.org/draft/2020-12/draft-bhutton-json-schema-validation-01#section-6.4 // TODO(jba): check that these next four keywords' values are integers. if schema.MinContains != nil && schema.Contains != nil { - if m := int(*schema.MinContains); nContains < m { + if m := *schema.MinContains; nContains < m { return fmt.Errorf("minContains: contains validated %d items, less than %d", nContains, m) } } if schema.MaxContains != nil && schema.Contains != nil { - if m := int(*schema.MaxContains); nContains > m { + if m := *schema.MaxContains; nContains > m { return fmt.Errorf("maxContains: contains validated %d items, greater than %d", nContains, m) } } if schema.MinItems != nil { - if m := int(*schema.MinItems); instance.Len() < m { + if m := *schema.MinItems; instance.Len() < m { return fmt.Errorf("minItems: array length %d is less than %d", instance.Len(), m) } } if schema.MaxItems != nil { - if m := int(*schema.MaxItems); instance.Len() > m { - return fmt.Errorf("minItems: array length %d is greater than %d", instance.Len(), m) + if m := *schema.MaxItems; instance.Len() > m { + return fmt.Errorf("maxItems: array length %d is greater than %d", instance.Len(), m) } } if schema.UniqueItems { @@ -385,12 +385,12 @@ func (st *state) validate(instance reflect.Value, schema *Schema, callerAnns *an // https://json-schema.org/draft/2020-12/draft-bhutton-json-schema-validation-01#section-6.5 if schema.MinProperties != nil { - if n, m := instance.Len(), int(*schema.MinProperties); n < m { + if n, m := instance.Len(), *schema.MinProperties; n < m { return fmt.Errorf("minProperties: object has %d properties, less than %d", n, m) } } if schema.MaxProperties != nil { - if n, m := instance.Len(), int(*schema.MaxProperties); n > m { + if n, m := instance.Len(), *schema.MaxProperties; n > m { return fmt.Errorf("maxProperties: object has %d properties, greater than %d", n, m) } } From 0f6a53f117b8301d7df42c33a03ad120da7469f3 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Fri, 2 May 2025 12:40:29 -0400 Subject: [PATCH 265/270] mcp: remove ServerConnection Remove ServerConnection. - It's not in typescript or mcpgo APIs. - Amortizing a client over multiple servers doesn't have any value. Change-Id: I725b42569733d6836864d96b16898a10ae91e570 Reviewed-on: https://go-review.googlesource.com/c/tools/+/669575 LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley --- internal/mcp/client.go | 115 ++++++++++------------------ internal/mcp/cmd_test.go | 7 +- internal/mcp/mcp.go | 15 ++-- internal/mcp/mcp_test.go | 50 +++++------- internal/mcp/server_example_test.go | 7 +- internal/mcp/sse_example_test.go | 8 +- internal/mcp/sse_test.go | 13 ++-- internal/mcp/transport.go | 2 - 8 files changed, 83 insertions(+), 134 deletions(-) diff --git a/internal/mcp/client.go b/internal/mcp/client.go index 351f8b4cc5e..c9b5b4c40b6 100644 --- a/internal/mcp/client.go +++ b/internal/mcp/client.go @@ -2,31 +2,27 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +// TODO: consider passing Transport to NewClient and merging {Connection,Client}Options package mcp import ( "context" "encoding/json" "fmt" - "iter" - "slices" "sync" jsonrpc2 "golang.org/x/tools/internal/jsonrpc2_v2" "golang.org/x/tools/internal/mcp/internal/protocol" ) -// A Client is an MCP client, which may be connected to one or more MCP servers +// A Client is an MCP client, which may be connected to an MCP server // using the [Client.Connect] method. -// -// TODO(rfindley): revisit the many-to-one relationship of clients and servers. -// It is a bit odd. type Client struct { - name string - version string - - mu sync.Mutex - servers []*ServerConnection + name string + version string + mu sync.Mutex + conn *jsonrpc2.Connection + initializeResult *protocol.InitializeResult } // NewClient creates a new Client. @@ -41,99 +37,68 @@ func NewClient(name, version string, opts *ClientOptions) *Client { } } -// Servers returns an iterator that yields the current set of server -// connections. -func (c *Client) Servers() iter.Seq[*ServerConnection] { - c.mu.Lock() - clients := slices.Clone(c.servers) - c.mu.Unlock() - return slices.Values(clients) -} - -// ClientOptions configures the behavior of the client, and apply to every -// client-server connection created using [Client.Connect]. +// ClientOptions configures the behavior of the client. type ClientOptions struct{} // bind implements the binder[*ServerConnection] interface, so that Clients can // be connected using [connect]. -func (c *Client) bind(conn *jsonrpc2.Connection) *ServerConnection { - sc := &ServerConnection{ - conn: conn, - client: c, - } +func (c *Client) bind(conn *jsonrpc2.Connection) *Client { c.mu.Lock() - c.servers = append(c.servers, sc) - c.mu.Unlock() - return sc + defer c.mu.Unlock() + c.conn = conn + return c } // disconnect implements the binder[*ServerConnection] interface, so that // Clients can be connected using [connect]. -func (c *Client) disconnect(sc *ServerConnection) { - c.mu.Lock() - defer c.mu.Unlock() - c.servers = slices.DeleteFunc(c.servers, func(sc2 *ServerConnection) bool { - return sc2 == sc - }) +func (c *Client) disconnect(*Client) { + // Do nothing. In particular, do not set conn to nil: it needs to exist so it can + // return an error. } // Connect connects the MCP client over the given transport and initializes an // MCP session. // -// It returns an initialized [ServerConnection] object that may be used to -// query the MCP server, terminate the connection (with [Connection.Close]), or -// await server termination (with [Connection.Wait]). -// // Typically, it is the responsibility of the client to close the connection // when it is no longer needed. However, if the connection is closed by the // server, calls or notifications will return an error wrapping // [ErrConnectionClosed]. -func (c *Client) Connect(ctx context.Context, t Transport, opts *ConnectionOptions) (sc *ServerConnection, err error) { +func (c *Client) Connect(ctx context.Context, t Transport, opts *ConnectionOptions) (err error) { defer func() { - if sc != nil && err != nil { - _ = sc.Close() + if err != nil { + _ = c.Close() } }() - sc, err = connect(ctx, t, opts, c) + _, err = connect(ctx, t, opts, c) if err != nil { - return nil, err + return err } params := &protocol.InitializeParams{ ClientInfo: protocol.Implementation{Name: c.name, Version: c.version}, } - if err := call(ctx, sc.conn, "initialize", params, &sc.initializeResult); err != nil { - return nil, err + if err := call(ctx, c.conn, "initialize", params, &c.initializeResult); err != nil { + return err } - if err := sc.conn.Notify(ctx, "notifications/initialized", &protocol.InitializedParams{}); err != nil { - return nil, err + if err := c.conn.Notify(ctx, "notifications/initialized", &protocol.InitializedParams{}); err != nil { + return err } - return sc, nil -} - -// A ServerConnection is a connection with an MCP server. -// -// It handles messages from the client, and can be used to send messages to the -// client. Create a connection by calling [Server.Connect]. -type ServerConnection struct { - conn *jsonrpc2.Connection - client *Client - initializeResult *protocol.InitializeResult + return nil } // Close performs a graceful close of the connection, preventing new requests // from being handled, and waiting for ongoing requests to return. Close then // terminates the connection. -func (cc *ServerConnection) Close() error { - return cc.conn.Close() +func (c *Client) Close() error { + return c.conn.Close() } // Wait waits for the connection to be closed by the server. // Generally, clients should be responsible for closing the connection. -func (cc *ServerConnection) Wait() error { - return cc.conn.Wait() +func (c *Client) Wait() error { + return c.conn.Wait() } -func (sc *ServerConnection) handle(ctx context.Context, req *jsonrpc2.Request) (any, error) { +func (*Client) handle(ctx context.Context, req *jsonrpc2.Request) (any, error) { // No need to check that the connection is initialized, since we initialize // it in Connect. switch req.Method { @@ -145,24 +110,24 @@ func (sc *ServerConnection) handle(ctx context.Context, req *jsonrpc2.Request) ( } // Ping makes an MCP "ping" request to the server. -func (sc *ServerConnection) Ping(ctx context.Context) error { - return call(ctx, sc.conn, "ping", nil, nil) +func (c *Client) Ping(ctx context.Context) error { + return call(ctx, c.conn, "ping", nil, nil) } // ListPrompts lists prompts that are currently available on the server. -func (sc *ServerConnection) ListPrompts(ctx context.Context) ([]protocol.Prompt, error) { +func (c *Client) ListPrompts(ctx context.Context) ([]protocol.Prompt, error) { var ( params = &protocol.ListPromptsParams{} result protocol.ListPromptsResult ) - if err := call(ctx, sc.conn, "prompts/list", params, &result); err != nil { + if err := call(ctx, c.conn, "prompts/list", params, &result); err != nil { return nil, err } return result.Prompts, nil } // GetPrompt gets a prompt from the server. -func (sc *ServerConnection) GetPrompt(ctx context.Context, name string, args map[string]string) (*protocol.GetPromptResult, error) { +func (c *Client) GetPrompt(ctx context.Context, name string, args map[string]string) (*protocol.GetPromptResult, error) { var ( params = &protocol.GetPromptParams{ Name: name, @@ -170,19 +135,19 @@ func (sc *ServerConnection) GetPrompt(ctx context.Context, name string, args map } result = &protocol.GetPromptResult{} ) - if err := call(ctx, sc.conn, "prompts/get", params, result); err != nil { + if err := call(ctx, c.conn, "prompts/get", params, result); err != nil { return nil, err } return result, nil } // ListTools lists tools that are currently available on the server. -func (sc *ServerConnection) ListTools(ctx context.Context) ([]protocol.Tool, error) { +func (c *Client) ListTools(ctx context.Context) ([]protocol.Tool, error) { var ( params = &protocol.ListToolsParams{} result protocol.ListToolsResult ) - if err := call(ctx, sc.conn, "tools/list", params, &result); err != nil { + if err := call(ctx, c.conn, "tools/list", params, &result); err != nil { return nil, err } return result.Tools, nil @@ -193,7 +158,7 @@ func (sc *ServerConnection) ListTools(ctx context.Context) ([]protocol.Tool, err // TODO(jba): make the following true: // If the provided arguments do not conform to the schema for the given tool, // the call fails. -func (sc *ServerConnection) CallTool(ctx context.Context, name string, args map[string]any) (_ *protocol.CallToolResult, err error) { +func (c *Client) CallTool(ctx context.Context, name string, args map[string]any) (_ *protocol.CallToolResult, err error) { defer func() { if err != nil { err = fmt.Errorf("calling tool %q: %w", name, err) @@ -214,7 +179,7 @@ func (sc *ServerConnection) CallTool(ctx context.Context, name string, args map[ } result protocol.CallToolResult ) - if err := call(ctx, sc.conn, "tools/call", params, &result); err != nil { + if err := call(ctx, c.conn, "tools/call", params, &result); err != nil { return nil, err } return &result, nil diff --git a/internal/mcp/cmd_test.go b/internal/mcp/cmd_test.go index 0cf3fa36301..822d6498883 100644 --- a/internal/mcp/cmd_test.go +++ b/internal/mcp/cmd_test.go @@ -50,11 +50,10 @@ func TestCmdTransport(t *testing.T) { cmd.Env = append(os.Environ(), runAsServer+"=true") client := mcp.NewClient("client", "v0.0.1", nil) - serverConn, err := client.Connect(ctx, mcp.NewCommandTransport(cmd), nil) - if err != nil { + if err := client.Connect(ctx, mcp.NewCommandTransport(cmd), nil); err != nil { log.Fatal(err) } - got, err := serverConn.CallTool(ctx, "greet", map[string]any{"name": "user"}) + got, err := client.CallTool(ctx, "greet", map[string]any{"name": "user"}) if err != nil { log.Fatal(err) } @@ -64,7 +63,7 @@ func TestCmdTransport(t *testing.T) { if diff := cmp.Diff(want, got); diff != "" { t.Errorf("greet returned unexpected content (-want +got):\n%s", diff) } - if err := serverConn.Close(); err != nil { + if err := client.Close(); err != nil { t.Fatalf("closing server: %v", err) } } diff --git a/internal/mcp/mcp.go b/internal/mcp/mcp.go index f71521885e4..40710b68edc 100644 --- a/internal/mcp/mcp.go +++ b/internal/mcp/mcp.go @@ -9,13 +9,13 @@ // To get started, create either a [Client] or [Server], and connect it to a // peer using a [Transport]. The diagram below illustrates how this works: // -// Client Server -// ⇅ (jsonrpc2) ⇅ -// ServerConnection ⇄ Client Transport ⇄ Server Transport ⇄ ClientConnection +// Client Server +// ⇅ (jsonrpc2) ⇅ +// Client Transport ⇄ Server Transport ⇄ ClientConnection // // A [Client] is an MCP client, which can be configured with various client -// capabilities. Clients may be connected to one or more [Server] instances -// using the [Client.Connect] method, which creates a [ServerConnection]. +// capabilities. Clients may be connected to a [Server] instance +// using the [Client.Connect] method. // // Similarly, a [Server] is an MCP server, which can be configured with various // server capabilities. Servers may be connected to one or more [Client] @@ -44,12 +44,11 @@ // client := mcp.NewClient("mcp-client", "v1.0.0", nil) // // Connect to a server over stdin/stdout // transport := mcp.NewCommandTransport(exec.Command("myserver")) -// serverConn, err := client.Connect(ctx, transport, nil) -// if err != nil { +// if err := client.Connect(ctx, transport, nil); err != nil { // log.Fatal(err) // } // // Call a tool on the server. -// content, err := serverConn.CallTool(ctx, "greet", map[string]any{"name": "you"}) +// content, err := client.CallTool(ctx, "greet", map[string]any{"name": "you"}) // // Here is an example of the corresponding server, connected over stdin/stdout: // diff --git a/internal/mcp/mcp_test.go b/internal/mcp/mcp_test.go index a455f39c835..e5be0f0d544 100644 --- a/internal/mcp/mcp_test.go +++ b/internal/mcp/mcp_test.go @@ -83,20 +83,15 @@ func TestEndToEnd(t *testing.T) { c := NewClient("testClient", "v1.0.0", nil) // Connect the client. - sc, err := c.Connect(ctx, ct, nil) - if err != nil { + if err := c.Connect(ctx, ct, nil); err != nil { t.Fatal(err) } - if got := slices.Collect(c.Servers()); len(got) != 1 { - t.Errorf("after connection, Servers() has length %d, want 1", len(got)) - } - - if err := sc.Ping(ctx); err != nil { + if err := c.Ping(ctx); err != nil { t.Fatalf("ping failed: %v", err) } - gotPrompts, err := sc.ListPrompts(ctx) + gotPrompts, err := c.ListPrompts(ctx) if err != nil { t.Errorf("prompts/list failed: %v", err) } @@ -112,7 +107,7 @@ func TestEndToEnd(t *testing.T) { t.Fatalf("prompts/list mismatch (-want +got):\n%s", diff) } - gotReview, err := sc.GetPrompt(ctx, "code_review", map[string]string{"Code": "1+1"}) + gotReview, err := c.GetPrompt(ctx, "code_review", map[string]string{"Code": "1+1"}) if err != nil { t.Fatal(err) } @@ -127,11 +122,11 @@ func TestEndToEnd(t *testing.T) { t.Errorf("prompts/get 'code_review' mismatch (-want +got):\n%s", diff) } - if _, err := sc.GetPrompt(ctx, "fail", map[string]string{}); err == nil || !strings.Contains(err.Error(), failure.Error()) { + if _, err := c.GetPrompt(ctx, "fail", map[string]string{}); err == nil || !strings.Contains(err.Error(), failure.Error()) { t.Errorf("fail returned unexpected error: got %v, want containing %v", err, failure) } - gotTools, err := sc.ListTools(ctx) + gotTools, err := c.ListTools(ctx) if err != nil { t.Errorf("tools/list failed: %v", err) } @@ -158,7 +153,7 @@ func TestEndToEnd(t *testing.T) { t.Fatalf("tools/list mismatch (-want +got):\n%s", diff) } - gotHi, err := sc.CallTool(ctx, "greet", map[string]any{"name": "user"}) + gotHi, err := c.CallTool(ctx, "greet", map[string]any{"name": "user"}) if err != nil { t.Fatal(err) } @@ -169,7 +164,7 @@ func TestEndToEnd(t *testing.T) { t.Errorf("tools/call 'greet' mismatch (-want +got):\n%s", diff) } - gotFail, err := sc.CallTool(ctx, "fail", map[string]any{}) + gotFail, err := c.CallTool(ctx, "fail", map[string]any{}) // Counter-intuitively, when a tool fails, we don't expect an RPC error for // call tool: instead, the failure is embedded in the result. if err != nil { @@ -184,7 +179,7 @@ func TestEndToEnd(t *testing.T) { } // Disconnect. - sc.Close() + c.Close() clientWG.Wait() // After disconnecting, neither client nor server should have any @@ -192,9 +187,6 @@ func TestEndToEnd(t *testing.T) { for range s.Clients() { t.Errorf("unexpected client after disconnection") } - for range c.Servers() { - t.Errorf("unexpected server after disconnection") - } } // basicConnection returns a new basic client-server connection configured with @@ -202,7 +194,7 @@ func TestEndToEnd(t *testing.T) { // // The caller should cancel either the client connection or server connection // when the connections are no longer needed. -func basicConnection(t *testing.T, tools ...*Tool) (*ClientConnection, *ServerConnection) { +func basicConnection(t *testing.T, tools ...*Tool) (*ClientConnection, *Client) { t.Helper() ctx := context.Background() @@ -218,32 +210,31 @@ func basicConnection(t *testing.T, tools ...*Tool) (*ClientConnection, *ServerCo } c := NewClient("testClient", "v1.0.0", nil) - sc, err := c.Connect(ctx, ct, nil) - if err != nil { + if err := c.Connect(ctx, ct, nil); err != nil { t.Fatal(err) } - return cc, sc + return cc, c } func TestServerClosing(t *testing.T) { - cc, sc := basicConnection(t, MakeTool("greet", "say hi", sayHi)) - defer sc.Close() + cc, c := basicConnection(t, MakeTool("greet", "say hi", sayHi)) + defer c.Close() ctx := context.Background() var wg sync.WaitGroup wg.Add(1) go func() { - if err := sc.Wait(); err != nil { + if err := c.Wait(); err != nil { t.Errorf("server connection failed: %v", err) } wg.Done() }() - if _, err := sc.CallTool(ctx, "greet", map[string]any{"name": "user"}); err != nil { + if _, err := c.CallTool(ctx, "greet", map[string]any{"name": "user"}); err != nil { t.Fatalf("after connecting: %v", err) } cc.Close() wg.Wait() - if _, err := sc.CallTool(ctx, "greet", map[string]any{"name": "user"}); !errors.Is(err, ErrConnectionClosed) { + if _, err := c.CallTool(ctx, "greet", map[string]any{"name": "user"}); !errors.Is(err, ErrConnectionClosed) { t.Errorf("after disconnection, got error %v, want EOF", err) } } @@ -264,16 +255,15 @@ func TestBatching(t *testing.T) { // 'initialize' to block. Therefore, we can only test with a size of 1. const batchSize = 1 BatchSize(ct, batchSize) - sc, err := c.Connect(ctx, ct, opts) - if err != nil { + if err := c.Connect(ctx, ct, opts); err != nil { t.Fatal(err) } - defer sc.Close() + defer c.Close() errs := make(chan error, batchSize) for i := range batchSize { go func() { - _, err := sc.ListTools(ctx) + _, err := c.ListTools(ctx) errs <- err }() time.Sleep(2 * time.Millisecond) diff --git a/internal/mcp/server_example_test.go b/internal/mcp/server_example_test.go index a386903d604..e532416cb6e 100644 --- a/internal/mcp/server_example_test.go +++ b/internal/mcp/server_example_test.go @@ -35,18 +35,17 @@ func ExampleServer() { } client := mcp.NewClient("client", "v0.0.1", nil) - serverConnection, err := client.Connect(ctx, clientTransport, nil) - if err != nil { + if err := client.Connect(ctx, clientTransport, nil); err != nil { log.Fatal(err) } - res, err := serverConnection.CallTool(ctx, "greet", map[string]any{"name": "user"}) + res, err := client.CallTool(ctx, "greet", map[string]any{"name": "user"}) if err != nil { log.Fatal(err) } fmt.Println(res.Content[0].Text) - serverConnection.Close() + client.Close() clientConnection.Wait() // Output: Hi user diff --git a/internal/mcp/sse_example_test.go b/internal/mcp/sse_example_test.go index ad88a22e073..7aeb24d1154 100644 --- a/internal/mcp/sse_example_test.go +++ b/internal/mcp/sse_example_test.go @@ -34,13 +34,13 @@ func ExampleSSEHandler() { ctx := context.Background() transport := mcp.NewSSEClientTransport(httpServer.URL) - serverConn, err := mcp.NewClient("test", "v1.0.0", nil).Connect(ctx, transport, nil) - if err != nil { + client := mcp.NewClient("test", "v1.0.0", nil) + if err := client.Connect(ctx, transport, nil); err != nil { log.Fatal(err) } - defer serverConn.Close() + defer client.Close() - res, err := serverConn.CallTool(ctx, "add", map[string]any{"x": 1, "y": 2}) + res, err := client.CallTool(ctx, "add", map[string]any{"x": 1, "y": 2}) if err != nil { log.Fatal(err) } diff --git a/internal/mcp/sse_test.go b/internal/mcp/sse_test.go index 920d746d3ad..57a8e746405 100644 --- a/internal/mcp/sse_test.go +++ b/internal/mcp/sse_test.go @@ -36,16 +36,15 @@ func TestSSEServer(t *testing.T) { clientTransport := NewSSEClientTransport(httpServer.URL) - client := NewClient("testClient", "v1.0.0", nil) - sc, err := client.Connect(ctx, clientTransport, nil) - if err != nil { + c := NewClient("testClient", "v1.0.0", nil) + if err := c.Connect(ctx, clientTransport, nil); err != nil { t.Fatal(err) } - if err := sc.Ping(ctx); err != nil { + if err := c.Ping(ctx); err != nil { t.Fatal(err) } cc := <-clients - gotHi, err := sc.CallTool(ctx, "greet", map[string]any{"name": "user"}) + gotHi, err := c.CallTool(ctx, "greet", map[string]any{"name": "user"}) if err != nil { t.Fatal(err) } @@ -59,11 +58,11 @@ func TestSSEServer(t *testing.T) { // Test that closing either end of the connection terminates the other // end. if closeServerFirst { - sc.Close() + c.Close() cc.Wait() } else { cc.Close() - sc.Wait() + c.Wait() } }) } diff --git a/internal/mcp/transport.go b/internal/mcp/transport.go index 1ca4c9db13f..015b9670e38 100644 --- a/internal/mcp/transport.go +++ b/internal/mcp/transport.go @@ -77,7 +77,6 @@ func NewLocalTransport() (*IOTransport, *IOTransport) { // [ServerConnection] and [ClientConnection]. type handler interface { handle(ctx context.Context, req *jsonrpc2.Request) (result any, err error) - comparable } type binder[T handler] interface { @@ -122,7 +121,6 @@ func connect[H handler](ctx context.Context, t Transport, opts *ConnectionOption }, }) assert(preempter.conn != nil, "unbound preempter") - assert(h != zero, "unbound connection") return h, nil } From 635622b8d7adfa5b93a40d8554af4836025efb90 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Fri, 25 Apr 2025 17:53:38 -0400 Subject: [PATCH 266/270] jsonschema: using hashing for uniqueItems Determine if a slice contains unique items by hashing them. This should take linear time on average, better than the previous quadratic algorithm. Change-Id: I8dc95bb6f29d802bdbc64bc7f7e698c71eae5ce7 Reviewed-on: https://go-review.googlesource.com/c/tools/+/669455 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- internal/mcp/internal/jsonschema/util.go | 87 ++++++++++++++++++- internal/mcp/internal/jsonschema/util_test.go | 54 ++++++++++++ internal/mcp/internal/jsonschema/validate.go | 29 +++++-- 3 files changed, 162 insertions(+), 8 deletions(-) diff --git a/internal/mcp/internal/jsonschema/util.go b/internal/mcp/internal/jsonschema/util.go index 0e7bd083ea0..266a324f338 100644 --- a/internal/mcp/internal/jsonschema/util.go +++ b/internal/mcp/internal/jsonschema/util.go @@ -6,11 +6,15 @@ package jsonschema import ( "bytes" + "cmp" + "encoding/binary" "encoding/json" "fmt" + "hash/maphash" "math" "math/big" "reflect" + "slices" ) // Equal reports whether two Go values representing JSON values are equal according @@ -126,14 +130,93 @@ func equalValue(x, y reflect.Value) bool { return x.String() == y.String() case reflect.Bool: return x.Bool() == y.Bool() - case reflect.Complex64, reflect.Complex128: - return x.Complex() == y.Complex() // Ints, uints and floats handled in jsonNumber, at top of function. default: panic(fmt.Sprintf("unsupported kind: %s", x.Kind())) } } +// hashValue adds v to the data hashed by h. v must not have cycles. +// hashValue panics if the value contains functions or channels, or maps whose +// key type is not string. +// It ignores unexported fields of structs. +// Calls to hashValue with the equal values (in the sense +// of [Equal]) result in the same sequence of values written to the hash. +func hashValue(h *maphash.Hash, v reflect.Value) { + // TODO: replace writes of basic types with WriteComparable in 1.24. + + writeUint := func(u uint64) { + var buf [8]byte + binary.BigEndian.PutUint64(buf[:], u) + h.Write(buf[:]) + } + + var write func(reflect.Value) + write = func(v reflect.Value) { + if r, ok := jsonNumber(v); ok { + // We want 1.0 and 1 to hash the same. + // big.Rats are always normalized, so they will be. + // We could do this more efficiently by handling the int and float cases + // separately, but that's premature. + writeUint(uint64(r.Sign() + 1)) + h.Write(r.Num().Bytes()) + h.Write(r.Denom().Bytes()) + return + } + switch v.Kind() { + case reflect.Invalid: + h.WriteByte(0) + case reflect.String: + h.WriteString(v.String()) + case reflect.Bool: + if v.Bool() { + h.WriteByte(1) + } else { + h.WriteByte(0) + } + case reflect.Complex64, reflect.Complex128: + c := v.Complex() + writeUint(math.Float64bits(real(c))) + writeUint(math.Float64bits(imag(c))) + case reflect.Array, reflect.Slice: + // Although we could treat []byte more efficiently, + // JSON values are unlikely to contain them. + writeUint(uint64(v.Len())) + for i := range v.Len() { + write(v.Index(i)) + } + case reflect.Interface, reflect.Pointer: + write(v.Elem()) + case reflect.Struct: + t := v.Type() + for i := range t.NumField() { + if sf := t.Field(i); sf.IsExported() { + write(v.FieldByIndex(sf.Index)) + } + } + case reflect.Map: + if v.Type().Key().Kind() != reflect.String { + panic("map with non-string key") + } + // Sort the keys so the hash is deterministic. + keys := v.MapKeys() + // Write the length. That distinguishes between, say, two consecutive + // maps with disjoint keys from one map that has the items of both. + writeUint(uint64(len(keys))) + slices.SortFunc(keys, func(x, y reflect.Value) int { return cmp.Compare(x.String(), y.String()) }) + for _, k := range keys { + write(k) + write(v.MapIndex(k)) + } + // Ints, uints and floats handled in jsonNumber, at top of function. + default: + panic(fmt.Sprintf("unsupported kind: %s", v.Kind())) + } + } + + write(v) +} + // jsonNumber converts a numeric value or a json.Number to a [big.Rat]. // If v is not a number, it returns nil, false. func jsonNumber(v reflect.Value) (*big.Rat, bool) { diff --git a/internal/mcp/internal/jsonschema/util_test.go b/internal/mcp/internal/jsonschema/util_test.go index ee79b3396c9..7b16d17a42c 100644 --- a/internal/mcp/internal/jsonschema/util_test.go +++ b/internal/mcp/internal/jsonschema/util_test.go @@ -6,6 +6,7 @@ package jsonschema import ( "encoding/json" + "hash/maphash" "reflect" "testing" ) @@ -71,3 +72,56 @@ func TestJSONType(t *testing.T) { } } + +func TestHash(t *testing.T) { + x := map[string]any{ + "s": []any{1, "foo", nil, true}, + "f": 2.5, + "m": map[string]any{ + "n": json.Number("123.456"), + "schema": &Schema{Type: "integer", UniqueItems: true}, + }, + "c": 1.2 + 3.4i, + "n": nil, + } + + seed := maphash.MakeSeed() + + hash := func(x any) uint64 { + var h maphash.Hash + h.SetSeed(seed) + hashValue(&h, reflect.ValueOf(x)) + return h.Sum64() + } + + want := hash(x) + // Run several times to verify consistency. + for range 10 { + if got := hash(x); got != want { + t.Errorf("hash values differ: %d vs. %d", got, want) + } + } + + // Check mathematically equal values. + nums := []any{ + 5, + uint(5), + 5.0, + json.Number("5"), + json.Number("5.00"), + } + for i, n := range nums { + if i == 0 { + want = hash(n) + } else if got := hash(n); got != want { + t.Errorf("hashes differ between %v (%[1]T) and %v (%[2]T)", nums[0], n) + } + } + + // Check that a bare JSON `null` is OK. + var null any + if err := json.Unmarshal([]byte(`null`), &null); err != nil { + t.Fatal(err) + } + _ = hash(null) +} diff --git a/internal/mcp/internal/jsonschema/validate.go b/internal/mcp/internal/jsonschema/validate.go index 26f3844a4e9..0ff3769e8a2 100644 --- a/internal/mcp/internal/jsonschema/validate.go +++ b/internal/mcp/internal/jsonschema/validate.go @@ -6,6 +6,7 @@ package jsonschema import ( "fmt" + "hash/maphash" "math" "math/big" "reflect" @@ -297,16 +298,32 @@ func (st *state) validate(instance reflect.Value, schema *Schema, callerAnns *an } } if schema.UniqueItems { - // Determine uniqueness with O(n²) comparisons. - // TODO: optimize via hashing. - for i := range instance.Len() { - for j := i + 1; j < instance.Len(); j++ { - if equalValue(instance.Index(i), instance.Index(j)) { - return fmt.Errorf("uniqueItems: array items %d and %d are equal", i, j) + if instance.Len() > 1 { + // Hash each item and compare the hashes. + // If two hashes differ, the items differ. + // If two hashes are the same, compare the collisions for equality. + // (The same logic as hash table lookup.) + // TODO(jba): Use container/hash.Map when it becomes available (https://go.dev/issue/69559), + hashes := map[uint64][]int{} // from hash to indices + seed := maphash.MakeSeed() + for i := range instance.Len() { + item := instance.Index(i) + var h maphash.Hash + h.SetSeed(seed) + hashValue(&h, item) + hv := h.Sum64() + if sames := hashes[hv]; len(sames) > 0 { + for _, j := range sames { + if equalValue(item, instance.Index(j)) { + return fmt.Errorf("uniqueItems: array items %d and %d are equal", i, j) + } + } } + hashes[hv] = append(hashes[hv], i) } } } + // https://json-schema.org/draft/2020-12/json-schema-core#section-11.2 if schema.UnevaluatedItems != nil && !anns.allItems { // Apply this subschema to all items in the array that haven't been successfully validated. From 2f1855032061b882e2584ba9534d3d294bb88488 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Fri, 2 May 2025 17:25:43 -0400 Subject: [PATCH 267/270] jsonschema: better errors unmarshaling ints Distinguish among non-numbers, non-integral floating-point values, and unrepresentable integers. Change-Id: I0d60b92d6b4dd0baa536476931eb75e42ed5366a Reviewed-on: https://go-review.googlesource.com/c/tools/+/669695 Reviewed-by: Alan Donovan LUCI-TryBot-Result: Go LUCI --- internal/mcp/internal/jsonschema/schema.go | 83 ++++++++++++------- .../mcp/internal/jsonschema/schema_test.go | 21 +++-- 2 files changed, 64 insertions(+), 40 deletions(-) diff --git a/internal/mcp/internal/jsonschema/schema.go b/internal/mcp/internal/jsonschema/schema.go index 444d2b82f85..dded5366b99 100644 --- a/internal/mcp/internal/jsonschema/schema.go +++ b/internal/mcp/internal/jsonschema/schema.go @@ -8,11 +8,11 @@ package jsonschema import ( "bytes" - "cmp" "encoding/json" "errors" "fmt" "iter" + "math" ) // A Schema is a JSON schema object. @@ -177,14 +177,14 @@ func (s *Schema) UnmarshalJSON(data []byte) error { ms := struct { Type json.RawMessage `json:"type,omitempty"` Const json.RawMessage `json:"const,omitempty"` - MinLength *float64 `json:"minLength,omitempty"` - MaxLength *float64 `json:"maxLength,omitempty"` - MinItems *float64 `json:"minItems,omitempty"` - MaxItems *float64 `json:"maxItems,omitempty"` - MinProperties *float64 `json:"minProperties,omitempty"` - MaxProperties *float64 `json:"maxProperties,omitempty"` - MinContains *float64 `json:"minContains,omitempty"` - MaxContains *float64 `json:"maxContains,omitempty"` + MinLength *integer `json:"minLength,omitempty"` + MaxLength *integer `json:"maxLength,omitempty"` + MinItems *integer `json:"minItems,omitempty"` + MaxItems *integer `json:"maxItems,omitempty"` + MinProperties *integer `json:"minProperties,omitempty"` + MaxProperties *integer `json:"maxProperties,omitempty"` + MinContains *integer `json:"minContains,omitempty"` + MaxContains *integer `json:"maxContains,omitempty"` *schemaWithoutMethods }{ @@ -219,33 +219,52 @@ func (s *Schema) UnmarshalJSON(data []byte) error { } } - // Store integer properties as ints. - setInt := func(name string, dst **int, src *float64) error { - if src == nil { - return nil + set := func(dst **int, src *integer) { + if src != nil { + *dst = Ptr(int(*src)) } - i := int(*src) - if float64(i) != *src { - return fmt.Errorf("%s: %f is not an int", name, *src) - } - *dst = &i - return nil } - err = cmp.Or( - setInt("minLength", &s.MinLength, ms.MinLength), - setInt("maxLength", &s.MaxLength, ms.MaxLength), - setInt("minItems", &s.MinItems, ms.MinItems), - setInt("maxItems", &s.MaxItems, ms.MaxItems), - setInt("minProperties", &s.MinProperties, ms.MinProperties), - setInt("maxProperties", &s.MaxProperties, ms.MaxProperties), - setInt("minContains", &s.MinContains, ms.MinContains), - setInt("maxContains", &s.MaxContains, ms.MaxContains), - ) - if err != nil { - return err - } + set(&s.MinLength, ms.MinLength) + set(&s.MaxLength, ms.MaxLength) + set(&s.MinItems, ms.MinItems) + set(&s.MaxItems, ms.MaxItems) + set(&s.MinProperties, ms.MinProperties) + set(&s.MaxProperties, ms.MaxProperties) + set(&s.MinContains, ms.MinContains) + set(&s.MaxContains, ms.MaxContains) + + return nil +} +type integer int32 // for the integer-valued fields of Schema + +func (ip *integer) UnmarshalJSON(data []byte) error { + if len(data) == 0 { + // nothing to do + return nil + } + // If there is a decimal point, src is a floating-point number. + var i int64 + if bytes.ContainsRune(data, '.') { + var f float64 + if err := json.Unmarshal(data, &f); err != nil { + return errors.New("not a number") + } + i = int64(f) + if float64(i) != f { + return errors.New("not an integer value") + } + } else { + if err := json.Unmarshal(data, &i); err != nil { + return errors.New("cannot be unmarshaled into an int") + } + } + // Ensure behavior is the same on both 32-bit and 64-bit systems. + if i < math.MinInt32 || i > math.MaxInt32 { + return errors.New("integer is out of range") + } + *ip = integer(i) return nil } diff --git a/internal/mcp/internal/jsonschema/schema_test.go b/internal/mcp/internal/jsonschema/schema_test.go index b742de3ae70..2bda7818af2 100644 --- a/internal/mcp/internal/jsonschema/schema_test.go +++ b/internal/mcp/internal/jsonschema/schema_test.go @@ -6,6 +6,8 @@ package jsonschema import ( "encoding/json" + "fmt" + "math" "regexp" "testing" ) @@ -86,14 +88,17 @@ func TestUnmarshalErrors(t *testing.T) { }{ {`1`, "cannot unmarshal number"}, {`{"type":1}`, `invalid value for "type"`}, - {`{"minLength":1.5}`, `minLength:.*not an int`}, - {`{"maxLength":1.5}`, `maxLength:.*not an int`}, - {`{"minItems":1.5}`, `minItems:.*not an int`}, - {`{"maxItems":1.5}`, `maxItems:.*not an int`}, - {`{"minProperties":1.5}`, `minProperties:.*not an int`}, - {`{"maxProperties":1.5}`, `maxProperties:.*not an int`}, - {`{"minContains":1.5}`, `minContains:.*not an int`}, - {`{"maxContains":1.5}`, `maxContains:.*not an int`}, + {`{"minLength":1.5}`, `not an integer value`}, + {`{"maxLength":1.5}`, `not an integer value`}, + {`{"minItems":1.5}`, `not an integer value`}, + {`{"maxItems":1.5}`, `not an integer value`}, + {`{"minProperties":1.5}`, `not an integer value`}, + {`{"maxProperties":1.5}`, `not an integer value`}, + {`{"minContains":1.5}`, `not an integer value`}, + {`{"maxContains":1.5}`, `not an integer value`}, + {fmt.Sprintf(`{"maxContains":%d}`, int64(math.MaxInt32+1)), `out of range`}, + {`{"minLength":9e99}`, `cannot be unmarshaled`}, + {`{"minLength":"1.5"}`, `not a number`}, } { var s Schema err := json.Unmarshal([]byte(tt.in), &s) From 125cd119e1c88b64a76c416c4c9d24983b491bf6 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Sat, 26 Apr 2025 09:10:49 -0400 Subject: [PATCH 268/270] jsonschema: pre-compile regexps This is the first CL that deals with the process of preparing a schema for validation. Perform some basic checks on the scheme. Along the way, compile regexps and store them in the schema for use during validation. Change-Id: I0e5e6ce28656dcecb6d2d4b2fdc98998fa05b6f1 Reviewed-on: https://go-review.googlesource.com/c/tools/+/669696 LUCI-TryBot-Result: Go LUCI Reviewed-by: Alan Donovan --- .../mcp/internal/jsonschema/infer_test.go | 3 +- internal/mcp/internal/jsonschema/resolve.go | 93 +++++++++++++++++++ .../mcp/internal/jsonschema/resolve_test.go | 37 ++++++++ internal/mcp/internal/jsonschema/schema.go | 5 + internal/mcp/internal/jsonschema/validate.go | 38 ++------ .../mcp/internal/jsonschema/validate_test.go | 5 +- internal/mcp/mcp_test.go | 3 +- internal/mcp/tool_test.go | 3 +- 8 files changed, 153 insertions(+), 34 deletions(-) create mode 100644 internal/mcp/internal/jsonschema/resolve.go create mode 100644 internal/mcp/internal/jsonschema/resolve_test.go diff --git a/internal/mcp/internal/jsonschema/infer_test.go b/internal/mcp/internal/jsonschema/infer_test.go index 1c44041f93f..8b56ffc2f9e 100644 --- a/internal/mcp/internal/jsonschema/infer_test.go +++ b/internal/mcp/internal/jsonschema/infer_test.go @@ -8,6 +8,7 @@ import ( "testing" "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" "golang.org/x/tools/internal/mcp/internal/jsonschema" ) @@ -63,7 +64,7 @@ func TestForType(t *testing.T) { for _, test := range tests { t.Run(test.name, func(t *testing.T) { - if diff := cmp.Diff(test.want, test.got); diff != "" { + if diff := cmp.Diff(test.want, test.got, cmpopts.IgnoreUnexported(jsonschema.Schema{})); diff != "" { t.Errorf("ForType mismatch (-want +got):\n%s", diff) } }) diff --git a/internal/mcp/internal/jsonschema/resolve.go b/internal/mcp/internal/jsonschema/resolve.go new file mode 100644 index 00000000000..2a0f1abe391 --- /dev/null +++ b/internal/mcp/internal/jsonschema/resolve.go @@ -0,0 +1,93 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file deals with preparing a schema for validation, including various checks, +// optimizations, and the resolution of cross-schema references. + +package jsonschema + +import ( + "errors" + "fmt" + "regexp" +) + +// A Resolved consists of a [Schema] along with associated information needed to +// validate documents against it. +// A Resolved has been validated against its meta-schema, and all its references +// (the $ref and $dynamicRef keywords) have been resolved to their referenced Schemas. +// Call [Schema.Resolve] to obtain a Resolved from a Schema. +type Resolved struct { + root *Schema +} + +// Resolve resolves all references within the schema and performs other tasks that +// prepare the schema for validation. +func (root *Schema) Resolve() (*Resolved, error) { + // There are three steps involved in preparing a schema to validate. + // 1. Check: validate the schema against a meta-schema, and perform other well-formedness + // checks. Precompute some values along the way. + // 2. Resolve URIs: TODO. + // 3. Resolve references: TODO. + if err := root.check(); err != nil { + return nil, err + } + return &Resolved{root: root}, nil +} + +func (s *Schema) check() error { + if s == nil { + return errors.New("nil schema") + } + var errs []error + report := func(err error) { errs = append(errs, err) } + + for ss := range s.all() { + ss.checkLocal(report) + } + return errors.Join(errs...) +} + +// checkLocal checks s for validity, independently of other schemas it may refer to. +// Since checking a regexp involves compiling it, checkLocal saves those compiled regexps +// in the schema for later use. +// It appends the errors it finds to errs. +func (s *Schema) checkLocal(report func(error)) { + addf := func(format string, args ...any) { + report(fmt.Errorf("jsonschema.Schema: "+format, args...)) + } + + if s == nil { + addf("nil subschema") + return + } + if err := s.basicChecks(); err != nil { + report(err) + return + } + + // TODO: validate the schema's properties, + // ideally by jsonschema-validating it against the meta-schema. + + // Check and compile regexps. + if s.Pattern != "" { + re, err := regexp.Compile(s.Pattern) + if err != nil { + addf("pattern: %w", err) + } else { + s.pattern = re + } + } + if len(s.PatternProperties) > 0 { + s.patternProperties = map[*regexp.Regexp]*Schema{} + for reString, subschema := range s.PatternProperties { + re, err := regexp.Compile(reString) + if err != nil { + addf("patternProperties[%q]: %w", reString, err) + continue + } + s.patternProperties[re] = subschema + } + } +} diff --git a/internal/mcp/internal/jsonschema/resolve_test.go b/internal/mcp/internal/jsonschema/resolve_test.go new file mode 100644 index 00000000000..7e2929438f4 --- /dev/null +++ b/internal/mcp/internal/jsonschema/resolve_test.go @@ -0,0 +1,37 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package jsonschema + +import ( + "regexp" + "testing" +) + +func TestCheckLocal(t *testing.T) { + for _, tt := range []struct { + s *Schema + want string // error must be non-nil and match this regexp + }{ + {nil, "nil"}, + { + &Schema{Pattern: "]["}, + "regexp", + }, + { + &Schema{PatternProperties: map[string]*Schema{"*": nil}}, + "regexp", + }, + } { + _, err := tt.s.Resolve() + if err == nil { + t.Errorf("%s: unexpectedly passed", tt.s.json()) + continue + } + if !regexp.MustCompile(tt.want).MatchString(err.Error()) { + t.Errorf("%s: did not match\nerror: %s\nregexp: %s", + tt.s.json(), err, tt.want) + } + } +} diff --git a/internal/mcp/internal/jsonschema/schema.go b/internal/mcp/internal/jsonschema/schema.go index dded5366b99..15960b47ca8 100644 --- a/internal/mcp/internal/jsonschema/schema.go +++ b/internal/mcp/internal/jsonschema/schema.go @@ -13,6 +13,7 @@ import ( "fmt" "iter" "math" + "regexp" ) // A Schema is a JSON schema object. @@ -106,6 +107,10 @@ type Schema struct { Then *Schema `json:"then,omitempty"` Else *Schema `json:"else,omitempty"` DependentSchemas map[string]*Schema `json:"dependentSchemas,omitempty"` + + // computed fields + pattern *regexp.Regexp + patternProperties map[*regexp.Regexp]*Schema } // String returns a short description of the schema. diff --git a/internal/mcp/internal/jsonschema/validate.go b/internal/mcp/internal/jsonschema/validate.go index 0ff3769e8a2..60f03c55412 100644 --- a/internal/mcp/internal/jsonschema/validate.go +++ b/internal/mcp/internal/jsonschema/validate.go @@ -10,7 +10,6 @@ import ( "math" "math/big" "reflect" - "regexp" "slices" "strings" "unicode/utf8" @@ -19,16 +18,9 @@ import ( // The value of the "$schema" keyword for the version that we can validate. const draft202012 = "https://json-schema.org/draft/2020-12/schema" -// Temporary definition of ResolvedSchema. -// The full definition deals with references between schemas, specifically the $id, $anchor and $ref keywords. -// We'll ignore that for now. -type ResolvedSchema struct { - root *Schema -} - // Validate validates the instance, which must be a JSON value, against the schema. // It returns nil if validation is successful or an error if it is not. -func (rs *ResolvedSchema) Validate(instance any) error { +func (rs *Resolved) Validate(instance any) error { if s := rs.root.Schema; s != "" && s != draft202012 { return fmt.Errorf("cannot validate version %s, only %s", s, draft202012) } @@ -39,7 +31,7 @@ func (rs *ResolvedSchema) Validate(instance any) error { // state is the state of single call to ResolvedSchema.Validate. type state struct { - rs *ResolvedSchema + rs *Resolved depth int } @@ -60,10 +52,8 @@ func (st *state) validate(instance reflect.Value, schema *Schema, callerAnns *an return fmt.Errorf("max recursion depth of %d reached", st.depth) } - // Treat the nil schema like the empty schema, as accepting everything. - if schema == nil { - return nil - } + // We checked for nil schemas in [Schema.Resolve]. + assert(schema != nil, "nil schema") // Step through interfaces. if instance.IsValid() && instance.Kind() == reflect.Interface { @@ -156,15 +146,8 @@ func (st *state) validate(instance reflect.Value, schema *Schema, callerAnns *an } } - if schema.Pattern != "" { - // TODO(jba): compile regexps during schema validation. - m, err := regexp.MatchString(schema.Pattern, str) - if err != nil { - return err - } - if !m { - return fmt.Errorf("pattern: %q does not match pattern %q", str, schema.Pattern) - } + if schema.Pattern != "" && !schema.pattern.MatchString(str) { + return fmt.Errorf("pattern: %q does not match regular expression %q", str, schema.Pattern) } } @@ -364,13 +347,8 @@ func (st *state) validate(instance reflect.Value, schema *Schema, callerAnns *an for vprop, val := range instance.Seq2() { prop := vprop.String() // Check every matching pattern. - for pattern, schema := range schema.PatternProperties { - // TODO(jba): pre-compile regexps - m, err := regexp.MatchString(pattern, prop) - if err != nil { - return err - } - if m { + for re, schema := range schema.patternProperties { + if re.MatchString(prop) { if err := st.validate(val, schema, nil, append(path, prop)); err != nil { return err } diff --git a/internal/mcp/internal/jsonschema/validate_test.go b/internal/mcp/internal/jsonschema/validate_test.go index 46ff1d3a65f..10e757fd8e6 100644 --- a/internal/mcp/internal/jsonschema/validate_test.go +++ b/internal/mcp/internal/jsonschema/validate_test.go @@ -51,7 +51,10 @@ func TestValidate(t *testing.T) { } for _, g := range groups { t.Run(g.Description, func(t *testing.T) { - rs := &ResolvedSchema{root: g.Schema} + rs, err := g.Schema.Resolve() + if err != nil { + t.Fatal(err) + } for s := range g.Schema.all() { if s.Defs != nil || s.Ref != "" { t.Skip("schema or subschema has unimplemented keywords") diff --git a/internal/mcp/mcp_test.go b/internal/mcp/mcp_test.go index e5be0f0d544..3baf81e604d 100644 --- a/internal/mcp/mcp_test.go +++ b/internal/mcp/mcp_test.go @@ -15,6 +15,7 @@ import ( "time" "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" "golang.org/x/tools/internal/mcp/internal/jsonschema" "golang.org/x/tools/internal/mcp/internal/protocol" ) @@ -149,7 +150,7 @@ func TestEndToEnd(t *testing.T) { AdditionalProperties: falseSchema, }, }} - if diff := cmp.Diff(wantTools, gotTools); diff != "" { + if diff := cmp.Diff(wantTools, gotTools, cmpopts.IgnoreUnexported(jsonschema.Schema{})); diff != "" { t.Fatalf("tools/list mismatch (-want +got):\n%s", diff) } diff --git a/internal/mcp/tool_test.go b/internal/mcp/tool_test.go index 2891213c906..197d43aa7c6 100644 --- a/internal/mcp/tool_test.go +++ b/internal/mcp/tool_test.go @@ -9,6 +9,7 @@ import ( "testing" "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" "golang.org/x/tools/internal/mcp" "golang.org/x/tools/internal/mcp/internal/jsonschema" ) @@ -82,7 +83,7 @@ func TestMakeTool(t *testing.T) { }, } for _, test := range tests { - if diff := cmp.Diff(test.want, test.tool.Definition.InputSchema); diff != "" { + if diff := cmp.Diff(test.want, test.tool.Definition.InputSchema, cmpopts.IgnoreUnexported(jsonschema.Schema{})); diff != "" { t.Errorf("MakeTool(%v) mismatch (-want +got):\n%s", test.tool.Definition.Name, diff) } } From 7701413e8cbb7d09b75c3e96dcfc23e05cf4c199 Mon Sep 17 00:00:00 2001 From: Jonathan Amsterdam Date: Mon, 5 May 2025 15:31:09 -0400 Subject: [PATCH 269/270] internal/mcp: move jsonschema out of mcp/internal The jsonschema package is part of of the API of the mcp package, so it should not be internal. Change-Id: I9520797837329fc2cda09c2f46ec4d48d031b9d3 Reviewed-on: https://go-review.googlesource.com/c/tools/+/669996 LUCI-TryBot-Result: Go LUCI Reviewed-by: Robert Findley --- internal/mcp/internal/protocol/protocol.go | 2 +- internal/mcp/{internal => }/jsonschema/annotations.go | 0 internal/mcp/{internal => }/jsonschema/infer.go | 0 internal/mcp/{internal => }/jsonschema/infer_test.go | 2 +- internal/mcp/{internal => }/jsonschema/resolve.go | 0 internal/mcp/{internal => }/jsonschema/resolve_test.go | 0 internal/mcp/{internal => }/jsonschema/schema.go | 0 internal/mcp/{internal => }/jsonschema/schema_test.go | 0 .../{internal => }/jsonschema/testdata/draft2020-12/README.md | 0 .../jsonschema/testdata/draft2020-12/additionalProperties.json | 0 .../{internal => }/jsonschema/testdata/draft2020-12/allOf.json | 0 .../{internal => }/jsonschema/testdata/draft2020-12/anyOf.json | 0 .../jsonschema/testdata/draft2020-12/boolean_schema.json | 0 .../{internal => }/jsonschema/testdata/draft2020-12/const.json | 0 .../jsonschema/testdata/draft2020-12/contains.json | 0 .../jsonschema/testdata/draft2020-12/dependentRequired.json | 0 .../jsonschema/testdata/draft2020-12/dependentSchemas.json | 0 .../{internal => }/jsonschema/testdata/draft2020-12/enum.json | 0 .../jsonschema/testdata/draft2020-12/exclusiveMaximum.json | 0 .../jsonschema/testdata/draft2020-12/exclusiveMinimum.json | 0 .../jsonschema/testdata/draft2020-12/if-then-else.json | 0 .../testdata/draft2020-12/infinite-loop-detection.json | 0 .../{internal => }/jsonschema/testdata/draft2020-12/items.json | 0 .../jsonschema/testdata/draft2020-12/maxContains.json | 0 .../jsonschema/testdata/draft2020-12/maxItems.json | 0 .../jsonschema/testdata/draft2020-12/maxLength.json | 0 .../jsonschema/testdata/draft2020-12/maxProperties.json | 0 .../jsonschema/testdata/draft2020-12/maximum.json | 0 .../jsonschema/testdata/draft2020-12/minContains.json | 0 .../jsonschema/testdata/draft2020-12/minItems.json | 0 .../jsonschema/testdata/draft2020-12/minLength.json | 0 .../jsonschema/testdata/draft2020-12/minProperties.json | 0 .../jsonschema/testdata/draft2020-12/minimum.json | 0 .../jsonschema/testdata/draft2020-12/multipleOf.json | 0 .../{internal => }/jsonschema/testdata/draft2020-12/not.json | 0 .../{internal => }/jsonschema/testdata/draft2020-12/oneOf.json | 0 .../jsonschema/testdata/draft2020-12/pattern.json | 0 .../jsonschema/testdata/draft2020-12/patternProperties.json | 0 .../jsonschema/testdata/draft2020-12/prefixItems.json | 0 .../jsonschema/testdata/draft2020-12/properties.json | 0 .../jsonschema/testdata/draft2020-12/propertyNames.json | 0 .../jsonschema/testdata/draft2020-12/required.json | 0 .../{internal => }/jsonschema/testdata/draft2020-12/type.json | 0 .../jsonschema/testdata/draft2020-12/unevaluatedItems.json | 0 .../jsonschema/testdata/draft2020-12/unevaluatedProperties.json | 0 .../jsonschema/testdata/draft2020-12/uniqueItems.json | 0 internal/mcp/{internal => }/jsonschema/util.go | 0 internal/mcp/{internal => }/jsonschema/util_test.go | 0 internal/mcp/{internal => }/jsonschema/validate.go | 0 internal/mcp/{internal => }/jsonschema/validate_test.go | 0 internal/mcp/mcp_test.go | 2 +- internal/mcp/prompt.go | 2 +- internal/mcp/tool.go | 2 +- internal/mcp/tool_test.go | 2 +- 54 files changed, 6 insertions(+), 6 deletions(-) rename internal/mcp/{internal => }/jsonschema/annotations.go (100%) rename internal/mcp/{internal => }/jsonschema/infer.go (100%) rename internal/mcp/{internal => }/jsonschema/infer_test.go (97%) rename internal/mcp/{internal => }/jsonschema/resolve.go (100%) rename internal/mcp/{internal => }/jsonschema/resolve_test.go (100%) rename internal/mcp/{internal => }/jsonschema/schema.go (100%) rename internal/mcp/{internal => }/jsonschema/schema_test.go (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/README.md (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/additionalProperties.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/allOf.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/anyOf.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/boolean_schema.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/const.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/contains.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/dependentRequired.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/dependentSchemas.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/enum.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/exclusiveMaximum.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/exclusiveMinimum.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/if-then-else.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/infinite-loop-detection.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/items.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/maxContains.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/maxItems.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/maxLength.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/maxProperties.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/maximum.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/minContains.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/minItems.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/minLength.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/minProperties.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/minimum.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/multipleOf.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/not.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/oneOf.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/pattern.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/patternProperties.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/prefixItems.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/properties.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/propertyNames.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/required.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/type.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/unevaluatedItems.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/unevaluatedProperties.json (100%) rename internal/mcp/{internal => }/jsonschema/testdata/draft2020-12/uniqueItems.json (100%) rename internal/mcp/{internal => }/jsonschema/util.go (100%) rename internal/mcp/{internal => }/jsonschema/util_test.go (100%) rename internal/mcp/{internal => }/jsonschema/validate.go (100%) rename internal/mcp/{internal => }/jsonschema/validate_test.go (100%) diff --git a/internal/mcp/internal/protocol/protocol.go b/internal/mcp/internal/protocol/protocol.go index bd02bf07b74..ead91c5b2b1 100644 --- a/internal/mcp/internal/protocol/protocol.go +++ b/internal/mcp/internal/protocol/protocol.go @@ -9,7 +9,7 @@ package protocol import ( "encoding/json" - "golang.org/x/tools/internal/mcp/internal/jsonschema" + "golang.org/x/tools/internal/mcp/jsonschema" ) // Optional annotations for the client. The client can use annotations to inform diff --git a/internal/mcp/internal/jsonschema/annotations.go b/internal/mcp/jsonschema/annotations.go similarity index 100% rename from internal/mcp/internal/jsonschema/annotations.go rename to internal/mcp/jsonschema/annotations.go diff --git a/internal/mcp/internal/jsonschema/infer.go b/internal/mcp/jsonschema/infer.go similarity index 100% rename from internal/mcp/internal/jsonschema/infer.go rename to internal/mcp/jsonschema/infer.go diff --git a/internal/mcp/internal/jsonschema/infer_test.go b/internal/mcp/jsonschema/infer_test.go similarity index 97% rename from internal/mcp/internal/jsonschema/infer_test.go rename to internal/mcp/jsonschema/infer_test.go index 8b56ffc2f9e..fe289815a2a 100644 --- a/internal/mcp/internal/jsonschema/infer_test.go +++ b/internal/mcp/jsonschema/infer_test.go @@ -9,7 +9,7 @@ import ( "github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp/cmpopts" - "golang.org/x/tools/internal/mcp/internal/jsonschema" + "golang.org/x/tools/internal/mcp/jsonschema" ) func forType[T any]() *jsonschema.Schema { diff --git a/internal/mcp/internal/jsonschema/resolve.go b/internal/mcp/jsonschema/resolve.go similarity index 100% rename from internal/mcp/internal/jsonschema/resolve.go rename to internal/mcp/jsonschema/resolve.go diff --git a/internal/mcp/internal/jsonschema/resolve_test.go b/internal/mcp/jsonschema/resolve_test.go similarity index 100% rename from internal/mcp/internal/jsonschema/resolve_test.go rename to internal/mcp/jsonschema/resolve_test.go diff --git a/internal/mcp/internal/jsonschema/schema.go b/internal/mcp/jsonschema/schema.go similarity index 100% rename from internal/mcp/internal/jsonschema/schema.go rename to internal/mcp/jsonschema/schema.go diff --git a/internal/mcp/internal/jsonschema/schema_test.go b/internal/mcp/jsonschema/schema_test.go similarity index 100% rename from internal/mcp/internal/jsonschema/schema_test.go rename to internal/mcp/jsonschema/schema_test.go diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/README.md b/internal/mcp/jsonschema/testdata/draft2020-12/README.md similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/README.md rename to internal/mcp/jsonschema/testdata/draft2020-12/README.md diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/additionalProperties.json b/internal/mcp/jsonschema/testdata/draft2020-12/additionalProperties.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/additionalProperties.json rename to internal/mcp/jsonschema/testdata/draft2020-12/additionalProperties.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/allOf.json b/internal/mcp/jsonschema/testdata/draft2020-12/allOf.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/allOf.json rename to internal/mcp/jsonschema/testdata/draft2020-12/allOf.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/anyOf.json b/internal/mcp/jsonschema/testdata/draft2020-12/anyOf.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/anyOf.json rename to internal/mcp/jsonschema/testdata/draft2020-12/anyOf.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/boolean_schema.json b/internal/mcp/jsonschema/testdata/draft2020-12/boolean_schema.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/boolean_schema.json rename to internal/mcp/jsonschema/testdata/draft2020-12/boolean_schema.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/const.json b/internal/mcp/jsonschema/testdata/draft2020-12/const.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/const.json rename to internal/mcp/jsonschema/testdata/draft2020-12/const.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/contains.json b/internal/mcp/jsonschema/testdata/draft2020-12/contains.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/contains.json rename to internal/mcp/jsonschema/testdata/draft2020-12/contains.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/dependentRequired.json b/internal/mcp/jsonschema/testdata/draft2020-12/dependentRequired.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/dependentRequired.json rename to internal/mcp/jsonschema/testdata/draft2020-12/dependentRequired.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/dependentSchemas.json b/internal/mcp/jsonschema/testdata/draft2020-12/dependentSchemas.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/dependentSchemas.json rename to internal/mcp/jsonschema/testdata/draft2020-12/dependentSchemas.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/enum.json b/internal/mcp/jsonschema/testdata/draft2020-12/enum.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/enum.json rename to internal/mcp/jsonschema/testdata/draft2020-12/enum.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/exclusiveMaximum.json b/internal/mcp/jsonschema/testdata/draft2020-12/exclusiveMaximum.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/exclusiveMaximum.json rename to internal/mcp/jsonschema/testdata/draft2020-12/exclusiveMaximum.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/exclusiveMinimum.json b/internal/mcp/jsonschema/testdata/draft2020-12/exclusiveMinimum.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/exclusiveMinimum.json rename to internal/mcp/jsonschema/testdata/draft2020-12/exclusiveMinimum.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/if-then-else.json b/internal/mcp/jsonschema/testdata/draft2020-12/if-then-else.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/if-then-else.json rename to internal/mcp/jsonschema/testdata/draft2020-12/if-then-else.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/infinite-loop-detection.json b/internal/mcp/jsonschema/testdata/draft2020-12/infinite-loop-detection.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/infinite-loop-detection.json rename to internal/mcp/jsonschema/testdata/draft2020-12/infinite-loop-detection.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/items.json b/internal/mcp/jsonschema/testdata/draft2020-12/items.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/items.json rename to internal/mcp/jsonschema/testdata/draft2020-12/items.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/maxContains.json b/internal/mcp/jsonschema/testdata/draft2020-12/maxContains.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/maxContains.json rename to internal/mcp/jsonschema/testdata/draft2020-12/maxContains.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/maxItems.json b/internal/mcp/jsonschema/testdata/draft2020-12/maxItems.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/maxItems.json rename to internal/mcp/jsonschema/testdata/draft2020-12/maxItems.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/maxLength.json b/internal/mcp/jsonschema/testdata/draft2020-12/maxLength.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/maxLength.json rename to internal/mcp/jsonschema/testdata/draft2020-12/maxLength.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/maxProperties.json b/internal/mcp/jsonschema/testdata/draft2020-12/maxProperties.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/maxProperties.json rename to internal/mcp/jsonschema/testdata/draft2020-12/maxProperties.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/maximum.json b/internal/mcp/jsonschema/testdata/draft2020-12/maximum.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/maximum.json rename to internal/mcp/jsonschema/testdata/draft2020-12/maximum.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/minContains.json b/internal/mcp/jsonschema/testdata/draft2020-12/minContains.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/minContains.json rename to internal/mcp/jsonschema/testdata/draft2020-12/minContains.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/minItems.json b/internal/mcp/jsonschema/testdata/draft2020-12/minItems.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/minItems.json rename to internal/mcp/jsonschema/testdata/draft2020-12/minItems.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/minLength.json b/internal/mcp/jsonschema/testdata/draft2020-12/minLength.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/minLength.json rename to internal/mcp/jsonschema/testdata/draft2020-12/minLength.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/minProperties.json b/internal/mcp/jsonschema/testdata/draft2020-12/minProperties.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/minProperties.json rename to internal/mcp/jsonschema/testdata/draft2020-12/minProperties.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/minimum.json b/internal/mcp/jsonschema/testdata/draft2020-12/minimum.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/minimum.json rename to internal/mcp/jsonschema/testdata/draft2020-12/minimum.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/multipleOf.json b/internal/mcp/jsonschema/testdata/draft2020-12/multipleOf.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/multipleOf.json rename to internal/mcp/jsonschema/testdata/draft2020-12/multipleOf.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/not.json b/internal/mcp/jsonschema/testdata/draft2020-12/not.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/not.json rename to internal/mcp/jsonschema/testdata/draft2020-12/not.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/oneOf.json b/internal/mcp/jsonschema/testdata/draft2020-12/oneOf.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/oneOf.json rename to internal/mcp/jsonschema/testdata/draft2020-12/oneOf.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/pattern.json b/internal/mcp/jsonschema/testdata/draft2020-12/pattern.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/pattern.json rename to internal/mcp/jsonschema/testdata/draft2020-12/pattern.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/patternProperties.json b/internal/mcp/jsonschema/testdata/draft2020-12/patternProperties.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/patternProperties.json rename to internal/mcp/jsonschema/testdata/draft2020-12/patternProperties.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/prefixItems.json b/internal/mcp/jsonschema/testdata/draft2020-12/prefixItems.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/prefixItems.json rename to internal/mcp/jsonschema/testdata/draft2020-12/prefixItems.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/properties.json b/internal/mcp/jsonschema/testdata/draft2020-12/properties.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/properties.json rename to internal/mcp/jsonschema/testdata/draft2020-12/properties.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/propertyNames.json b/internal/mcp/jsonschema/testdata/draft2020-12/propertyNames.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/propertyNames.json rename to internal/mcp/jsonschema/testdata/draft2020-12/propertyNames.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/required.json b/internal/mcp/jsonschema/testdata/draft2020-12/required.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/required.json rename to internal/mcp/jsonschema/testdata/draft2020-12/required.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/type.json b/internal/mcp/jsonschema/testdata/draft2020-12/type.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/type.json rename to internal/mcp/jsonschema/testdata/draft2020-12/type.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/unevaluatedItems.json b/internal/mcp/jsonschema/testdata/draft2020-12/unevaluatedItems.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/unevaluatedItems.json rename to internal/mcp/jsonschema/testdata/draft2020-12/unevaluatedItems.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/unevaluatedProperties.json b/internal/mcp/jsonschema/testdata/draft2020-12/unevaluatedProperties.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/unevaluatedProperties.json rename to internal/mcp/jsonschema/testdata/draft2020-12/unevaluatedProperties.json diff --git a/internal/mcp/internal/jsonschema/testdata/draft2020-12/uniqueItems.json b/internal/mcp/jsonschema/testdata/draft2020-12/uniqueItems.json similarity index 100% rename from internal/mcp/internal/jsonschema/testdata/draft2020-12/uniqueItems.json rename to internal/mcp/jsonschema/testdata/draft2020-12/uniqueItems.json diff --git a/internal/mcp/internal/jsonschema/util.go b/internal/mcp/jsonschema/util.go similarity index 100% rename from internal/mcp/internal/jsonschema/util.go rename to internal/mcp/jsonschema/util.go diff --git a/internal/mcp/internal/jsonschema/util_test.go b/internal/mcp/jsonschema/util_test.go similarity index 100% rename from internal/mcp/internal/jsonschema/util_test.go rename to internal/mcp/jsonschema/util_test.go diff --git a/internal/mcp/internal/jsonschema/validate.go b/internal/mcp/jsonschema/validate.go similarity index 100% rename from internal/mcp/internal/jsonschema/validate.go rename to internal/mcp/jsonschema/validate.go diff --git a/internal/mcp/internal/jsonschema/validate_test.go b/internal/mcp/jsonschema/validate_test.go similarity index 100% rename from internal/mcp/internal/jsonschema/validate_test.go rename to internal/mcp/jsonschema/validate_test.go diff --git a/internal/mcp/mcp_test.go b/internal/mcp/mcp_test.go index 3baf81e604d..e6fbcd2949a 100644 --- a/internal/mcp/mcp_test.go +++ b/internal/mcp/mcp_test.go @@ -16,8 +16,8 @@ import ( "github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp/cmpopts" - "golang.org/x/tools/internal/mcp/internal/jsonschema" "golang.org/x/tools/internal/mcp/internal/protocol" + "golang.org/x/tools/internal/mcp/jsonschema" ) type hiParams struct { diff --git a/internal/mcp/prompt.go b/internal/mcp/prompt.go index f136669d8e4..2faa3800ef9 100644 --- a/internal/mcp/prompt.go +++ b/internal/mcp/prompt.go @@ -11,9 +11,9 @@ import ( "reflect" "slices" - "golang.org/x/tools/internal/mcp/internal/jsonschema" "golang.org/x/tools/internal/mcp/internal/protocol" "golang.org/x/tools/internal/mcp/internal/util" + "golang.org/x/tools/internal/mcp/jsonschema" ) // A PromptHandler handles a call to prompts/get. diff --git a/internal/mcp/tool.go b/internal/mcp/tool.go index f10c0286db4..ca2e6b6b62d 100644 --- a/internal/mcp/tool.go +++ b/internal/mcp/tool.go @@ -9,9 +9,9 @@ import ( "encoding/json" "slices" - "golang.org/x/tools/internal/mcp/internal/jsonschema" "golang.org/x/tools/internal/mcp/internal/protocol" "golang.org/x/tools/internal/mcp/internal/util" + "golang.org/x/tools/internal/mcp/jsonschema" ) // A ToolHandler handles a call to tools/call. diff --git a/internal/mcp/tool_test.go b/internal/mcp/tool_test.go index 197d43aa7c6..85b5e55e931 100644 --- a/internal/mcp/tool_test.go +++ b/internal/mcp/tool_test.go @@ -11,7 +11,7 @@ import ( "github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp/cmpopts" "golang.org/x/tools/internal/mcp" - "golang.org/x/tools/internal/mcp/internal/jsonschema" + "golang.org/x/tools/internal/mcp/jsonschema" ) // testToolHandler is used for type inference in TestMakeTool. From 64b63a6f6aeed9d09ddeb61e31b8e3ecb925f6ff Mon Sep 17 00:00:00 2001 From: Gopher Robot Date: Mon, 5 May 2025 13:08:34 -0700 Subject: [PATCH 270/270] go.mod: update golang.org/x dependencies Update golang.org/x dependencies to their latest tagged versions. Change-Id: I7b5f575a490d1c6a568b4f9441b8924c1de69732 Reviewed-on: https://go-review.googlesource.com/c/tools/+/670097 LUCI-TryBot-Result: Go LUCI Auto-Submit: Gopher Robot Reviewed-by: Dmitri Shuralyov Reviewed-by: David Chase --- go.mod | 6 +++--- go.sum | 12 ++++++------ gopls/go.mod | 6 +++--- gopls/go.sum | 18 +++++++++--------- 4 files changed, 21 insertions(+), 21 deletions(-) diff --git a/go.mod b/go.mod index 7e4e371b770..91de2267573 100644 --- a/go.mod +++ b/go.mod @@ -6,9 +6,9 @@ require ( github.com/google/go-cmp v0.6.0 github.com/yuin/goldmark v1.4.13 golang.org/x/mod v0.24.0 - golang.org/x/net v0.39.0 - golang.org/x/sync v0.13.0 + golang.org/x/net v0.40.0 + golang.org/x/sync v0.14.0 golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457 ) -require golang.org/x/sys v0.32.0 // indirect +require golang.org/x/sys v0.33.0 // indirect diff --git a/go.sum b/go.sum index ff5857bd93a..6a01512f3e4 100644 --- a/go.sum +++ b/go.sum @@ -4,11 +4,11 @@ github.com/yuin/goldmark v1.4.13 h1:fVcFKWvrslecOb/tg+Cc05dkeYx540o0FuFt3nUVDoE= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU= golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= -golang.org/x/net v0.39.0 h1:ZCu7HMWDxpXpaiKdhzIfaltL9Lp31x/3fCP11bc6/fY= -golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E= -golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610= -golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= -golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20= -golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/net v0.40.0 h1:79Xs7wF06Gbdcg4kdCCIQArK11Z1hr5POQ6+fIYHNuY= +golang.org/x/net v0.40.0/go.mod h1:y0hY0exeL2Pku80/zKK7tpntoX23cqL3Oa6njdgRtds= +golang.org/x/sync v0.14.0 h1:woo0S4Yywslg6hp4eUFjTVOyKt0RookbpAHG4c1HmhQ= +golang.org/x/sync v0.14.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= +golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457 h1:zf5N6UOrA487eEFacMePxjXAJctxKmyjKUsjA11Uzuk= golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457/go.mod h1:pRgIJT+bRLFKnoM1ldnzKoxTIn14Yxz928LQRYYgIN0= diff --git a/gopls/go.mod b/gopls/go.mod index 7b3772bfab4..96c3fbb127a 100644 --- a/gopls/go.mod +++ b/gopls/go.mod @@ -7,10 +7,10 @@ require ( github.com/google/go-cmp v0.6.0 github.com/jba/templatecheck v0.7.1 golang.org/x/mod v0.24.0 - golang.org/x/sync v0.13.0 - golang.org/x/sys v0.32.0 + golang.org/x/sync v0.14.0 + golang.org/x/sys v0.33.0 golang.org/x/telemetry v0.0.0-20250417124945-06ef541f3fa3 - golang.org/x/text v0.24.0 + golang.org/x/text v0.25.0 golang.org/x/tools v0.30.0 golang.org/x/vuln v1.1.4 gopkg.in/yaml.v3 v3.0.1 diff --git a/gopls/go.sum b/gopls/go.sum index 519e4e79f81..27f999d51a4 100644 --- a/gopls/go.sum +++ b/gopls/go.sum @@ -22,7 +22,7 @@ github.com/rogpeppe/go-internal v1.13.2-0.20241226121412-a5dc8ff20d0a h1:w3tdWGK github.com/rogpeppe/go-internal v1.13.2-0.20241226121412-a5dc8ff20d0a/go.mod h1:S8kfXMp+yh77OxPD4fdM6YUknrZpQxLhvxzS4gDHENY= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= -golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc= +golang.org/x/crypto v0.38.0/go.mod h1:MvrbAqul58NNYPKnOra203SB9vpuZW0e+RRZV+Ggqjw= golang.org/x/exp/typeparams v0.0.0-20250218142911-aa4b98e5adaa h1:Br3+0EZZohShrmVVc85znGpxw7Ca8hsUJlrdT/JQGw8= golang.org/x/exp/typeparams v0.0.0-20250218142911-aa4b98e5adaa/go.mod h1:LKZHyeOpPuZcMgxeHjJp4p5yvxrCX1xDvH10zYHhjjQ= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= @@ -31,27 +31,27 @@ golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU= golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= -golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E= +golang.org/x/net v0.40.0/go.mod h1:y0hY0exeL2Pku80/zKK7tpntoX23cqL3Oa6njdgRtds= golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610= -golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sync v0.14.0 h1:woo0S4Yywslg6hp4eUFjTVOyKt0RookbpAHG4c1HmhQ= +golang.org/x/sync v0.14.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20= -golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= +golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457/go.mod h1:pRgIJT+bRLFKnoM1ldnzKoxTIn14Yxz928LQRYYgIN0= golang.org/x/telemetry v0.0.0-20250417124945-06ef541f3fa3 h1:RXY2+rSHXvxO2Y+gKrPjYVaEoGOqh3VEXFhnWAt1Irg= golang.org/x/telemetry v0.0.0-20250417124945-06ef541f3fa3/go.mod h1:RoaXAWDwS90j6FxVKwJdBV+0HCU+llrKUGgJaxiKl6M= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= -golang.org/x/term v0.31.0/go.mod h1:R4BeIy7D95HzImkxGkTW1UQTtP54tio2RyHz7PwK0aw= +golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0= -golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU= +golang.org/x/text v0.25.0 h1:qVyWApTSYLk/drJRO5mDlNYskwQznZmkpV2c8q9zls4= +golang.org/x/text v0.25.0/go.mod h1:WEdwpYrmk1qmdHvhkSTNPm3app7v4rsT8F2UD6+VHIA= golang.org/x/vuln v1.1.4 h1:Ju8QsuyhX3Hk8ma3CesTbO8vfJD9EvUBgHvkxHBzj0I= golang.org/x/vuln v1.1.4/go.mod h1:F+45wmU18ym/ca5PLTPLsSzr2KppzswxPP603ldA67s= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=