diff --git a/cmd/callgraph/main.go b/cmd/callgraph/main.go index 9e440bbafb9..e489de883d0 100644 --- a/cmd/callgraph/main.go +++ b/cmd/callgraph/main.go @@ -148,10 +148,7 @@ func init() { // If $GOMAXPROCS isn't set, use the full capacity of the machine. // For small machines, use at least 4 threads. if os.Getenv("GOMAXPROCS") == "" { - n := runtime.NumCPU() - if n < 4 { - n = 4 - } + n := max(runtime.NumCPU(), 4) runtime.GOMAXPROCS(n) } } diff --git a/cmd/deadcode/deadcode_test.go b/cmd/deadcode/deadcode_test.go index 90c067331dc..a9b8327c7d7 100644 --- a/cmd/deadcode/deadcode_test.go +++ b/cmd/deadcode/deadcode_test.go @@ -34,7 +34,6 @@ func Test(t *testing.T) { t.Fatal(err) } for _, filename := range matches { - filename := filename t.Run(filename, func(t *testing.T) { t.Parallel() diff --git a/cmd/file2fuzz/main.go b/cmd/file2fuzz/main.go index 2a86c2ece88..f9d4708cd28 100644 --- a/cmd/file2fuzz/main.go +++ b/cmd/file2fuzz/main.go @@ -34,7 +34,7 @@ import ( var encVersion1 = "go test fuzz v1" func encodeByteSlice(b []byte) []byte { - return []byte(fmt.Sprintf("%s\n[]byte(%q)", encVersion1, b)) + return fmt.Appendf(nil, "%s\n[]byte(%q)", encVersion1, b) } func usage() { diff --git a/cmd/go-contrib-init/contrib.go b/cmd/go-contrib-init/contrib.go index 9254b86388f..0ab93c90f73 100644 --- a/cmd/go-contrib-init/contrib.go +++ b/cmd/go-contrib-init/contrib.go @@ -160,44 +160,6 @@ GOPATH: %s } return } - - gopath := firstGoPath() - if gopath == "" { - log.Fatal("Your GOPATH is not set, please set it") - } - - rightdir := filepath.Join(gopath, "src", "golang.org", "x", *repo) - if !strings.HasPrefix(wd, rightdir) { - dirExists, err := exists(rightdir) - if err != nil { - log.Fatal(err) - } - if !dirExists { - log.Fatalf("The repo you want to work on is currently not on your system.\n"+ - "Run %q to obtain this repo\n"+ - "then go to the directory %q\n", - "go get -d golang.org/x/"+*repo, rightdir) - } - log.Fatalf("Your current directory is:%q\n"+ - "Working on golang/x/%v requires you be in %q\n", - wd, *repo, rightdir) - } -} - -func firstGoPath() string { - list := filepath.SplitList(build.Default.GOPATH) - if len(list) < 1 { - return "" - } - return list[0] -} - -func exists(path string) (bool, error) { - _, err := os.Stat(path) - if os.IsNotExist(err) { - return false, nil - } - return true, err } func inGoPath(wd string) bool { diff --git a/cmd/godex/writetype.go b/cmd/godex/writetype.go index 866f718f05f..f59760a81c6 100644 --- a/cmd/godex/writetype.go +++ b/cmd/godex/writetype.go @@ -14,6 +14,7 @@ package main import ( "go/types" + "slices" ) func (p *printer) writeType(this *types.Package, typ types.Type) { @@ -28,11 +29,9 @@ func (p *printer) writeTypeInternal(this *types.Package, typ types.Type, visited // practice deeply nested composite types with unnamed component // types are uncommon. This code is likely more efficient than // using a map. - for _, t := range visited { - if t == typ { - p.printf("○%T", typ) // cycle to typ - return - } + if slices.Contains(visited, typ) { + p.printf("○%T", typ) // cycle to typ + return } visited = append(visited, typ) @@ -72,7 +71,7 @@ func (p *printer) writeTypeInternal(this *types.Package, typ types.Type, visited p.print("struct {\n") p.indent++ - for i := 0; i < n; i++ { + for i := range n { f := t.Field(i) if !f.Anonymous() { p.printf("%s ", f.Name()) @@ -120,7 +119,7 @@ func (p *printer) writeTypeInternal(this *types.Package, typ types.Type, visited if GcCompatibilityMode { // print flattened interface // (useful to compare against gc-generated interfaces) - for i := 0; i < n; i++ { + for i := range n { m := t.Method(i) p.print(m.Name()) p.writeSignatureInternal(this, m.Type().(*types.Signature), visited) diff --git a/cmd/godoc/godoc_test.go b/cmd/godoc/godoc_test.go index 66b93f10630..7cd38574233 100644 --- a/cmd/godoc/godoc_test.go +++ b/cmd/godoc/godoc_test.go @@ -16,6 +16,7 @@ import ( "os/exec" "regexp" "runtime" + "slices" "strings" "sync" "testing" @@ -127,12 +128,7 @@ func waitForServer(t *testing.T, ctx context.Context, url, match string, reverse // hasTag checks whether a given release tag is contained in the current version // of the go binary. func hasTag(t string) bool { - for _, v := range build.Default.ReleaseTags { - if t == v { - return true - } - } - return false + return slices.Contains(build.Default.ReleaseTags, t) } func TestURL(t *testing.T) { diff --git a/cmd/godoc/main.go b/cmd/godoc/main.go index a665be0769d..1bce091f269 100644 --- a/cmd/godoc/main.go +++ b/cmd/godoc/main.go @@ -114,7 +114,7 @@ func loggingHandler(h http.Handler) http.Handler { func handleURLFlag() { // Try up to 10 fetches, following redirects. urlstr := *urlFlag - for i := 0; i < 10; i++ { + for range 10 { // Prepare request. u, err := url.Parse(urlstr) if err != nil { diff --git a/cmd/goimports/goimports.go b/cmd/goimports/goimports.go index dcb5023a2e7..11f56e0e865 100644 --- a/cmd/goimports/goimports.go +++ b/cmd/goimports/goimports.go @@ -361,8 +361,8 @@ func replaceTempFilename(diff []byte, filename string) ([]byte, error) { } // Always print filepath with slash separator. f := filepath.ToSlash(filename) - bs[0] = []byte(fmt.Sprintf("--- %s%s", f+".orig", t0)) - bs[1] = []byte(fmt.Sprintf("+++ %s%s", f, t1)) + bs[0] = fmt.Appendf(nil, "--- %s%s", f+".orig", t0) + bs[1] = fmt.Appendf(nil, "+++ %s%s", f, t1) return bytes.Join(bs, []byte{'\n'}), nil } diff --git a/cmd/goyacc/yacc.go b/cmd/goyacc/yacc.go index 965a76f14dc..be084da3690 100644 --- a/cmd/goyacc/yacc.go +++ b/cmd/goyacc/yacc.go @@ -1478,7 +1478,7 @@ func symnam(i int) string { // set elements 0 through n-1 to c func aryfil(v []int, n, c int) { - for i := 0; i < n; i++ { + for i := range n { v[i] = c } } @@ -1840,7 +1840,7 @@ func closure(i int) { nexts: // initially fill the sets - for s := 0; s < n; s++ { + for s := range n { prd := curres[s] // @@ -2609,7 +2609,7 @@ func callopt() { if adb > 2 { for p = 0; p <= maxa; p += 10 { fmt.Fprintf(ftable, "%v ", p) - for i = 0; i < 10; i++ { + for i = range 10 { fmt.Fprintf(ftable, "%v ", amem[p+i]) } ftable.WriteRune('\n') @@ -2653,7 +2653,7 @@ func gin(i int) { // now, find amem place for it nextgp: - for p := 0; p < ACTSIZE; p++ { + for p := range ACTSIZE { if amem[p] != 0 { continue } @@ -3117,7 +3117,7 @@ func aryeq(a []int, b []int) int { if len(b) != n { return 0 } - for ll := 0; ll < n; ll++ { + for ll := range n { if a[ll] != b[ll] { return 0 } diff --git a/cmd/html2article/conv.go b/cmd/html2article/conv.go index 604bb1fd7cd..e2946431ce2 100644 --- a/cmd/html2article/conv.go +++ b/cmd/html2article/conv.go @@ -16,6 +16,7 @@ import ( "net/url" "os" "regexp" + "slices" "strings" "golang.org/x/net/html" @@ -270,10 +271,8 @@ func hasClass(name string) selector { return func(n *html.Node) bool { for _, a := range n.Attr { if a.Key == "class" { - for _, c := range strings.Fields(a.Val) { - if c == name { - return true - } + if slices.Contains(strings.Fields(a.Val), name) { + return true } } } diff --git a/cmd/present/main.go b/cmd/present/main.go index 340025276f9..99ed838e926 100644 --- a/cmd/present/main.go +++ b/cmd/present/main.go @@ -73,8 +73,8 @@ func main() { origin := &url.URL{Scheme: "http"} if *originHost != "" { - if strings.HasPrefix(*originHost, "https://") { - *originHost = strings.TrimPrefix(*originHost, "https://") + if after, ok := strings.CutPrefix(*originHost, "https://"); ok { + *originHost = after origin.Scheme = "https" } *originHost = strings.TrimPrefix(*originHost, "http://") diff --git a/cmd/present2md/main.go b/cmd/present2md/main.go index a11e57ecf8b..e23bb33daed 100644 --- a/cmd/present2md/main.go +++ b/cmd/present2md/main.go @@ -447,10 +447,10 @@ func parseInlineLink(s string) (link string, length int) { // If the URL is http://foo.com, drop the http:// // In other words, render [[http://golang.org]] as: // golang.org - if strings.HasPrefix(rawURL, url.Scheme+"://") { - simpleURL = strings.TrimPrefix(rawURL, url.Scheme+"://") - } else if strings.HasPrefix(rawURL, url.Scheme+":") { - simpleURL = strings.TrimPrefix(rawURL, url.Scheme+":") + if after, ok := strings.CutPrefix(rawURL, url.Scheme+"://"); ok { + simpleURL = after + } else if after, ok := strings.CutPrefix(rawURL, url.Scheme+":"); ok { + simpleURL = after } } return renderLink(rawURL, simpleURL), end + 2 diff --git a/cmd/signature-fuzzer/internal/fuzz-generator/gen_test.go b/cmd/signature-fuzzer/internal/fuzz-generator/gen_test.go index 4bd5bab7c38..f10a7e9a7df 100644 --- a/cmd/signature-fuzzer/internal/fuzz-generator/gen_test.go +++ b/cmd/signature-fuzzer/internal/fuzz-generator/gen_test.go @@ -35,7 +35,7 @@ func mkGenState() *genstate { func TestBasic(t *testing.T) { checkTunables(tunables) s := mkGenState() - for i := 0; i < 1000; i++ { + for i := range 1000 { s.wr = NewWrapRand(int64(i), RandCtlChecks|RandCtlPanic) fp := s.GenFunc(i, i) var buf bytes.Buffer @@ -58,7 +58,7 @@ func TestMoreComplicated(t *testing.T) { checkTunables(tunables) s := mkGenState() - for i := 0; i < 10000; i++ { + for i := range 10000 { s.wr = NewWrapRand(int64(i), RandCtlChecks|RandCtlPanic) fp := s.GenFunc(i, i) var buf bytes.Buffer diff --git a/cmd/signature-fuzzer/internal/fuzz-generator/generator.go b/cmd/signature-fuzzer/internal/fuzz-generator/generator.go index 6c8002f9f0c..261dd6c029b 100644 --- a/cmd/signature-fuzzer/internal/fuzz-generator/generator.go +++ b/cmd/signature-fuzzer/internal/fuzz-generator/generator.go @@ -48,6 +48,7 @@ import ( "os" "os/exec" "path/filepath" + "slices" "strconv" "strings" ) @@ -561,12 +562,7 @@ func (s *genstate) popTunables() { // See precludeSelectedTypes below for more info. func (s *genstate) redistributeFraction(toIncorporate uint8, avoid []int) { inavoid := func(j int) bool { - for _, k := range avoid { - if j == k { - return true - } - } - return false + return slices.Contains(avoid, j) } doredis := func() { @@ -631,7 +627,7 @@ func (s *genstate) GenParm(f *funcdef, depth int, mkctl bool, pidx int) parm { // Convert tf into a cumulative sum tf := s.tunables.typeFractions sum := uint8(0) - for i := 0; i < len(tf); i++ { + for i := range len(tf) { sum += tf[i] tf[i] = sum } @@ -662,7 +658,7 @@ func (s *genstate) GenParm(f *funcdef, depth int, mkctl bool, pidx int) parm { f.structdefs = append(f.structdefs, sp) tnf := int64(s.tunables.nStructFields) / int64(depth+1) nf := int(s.wr.Intn(tnf)) - for fi := 0; fi < nf; fi++ { + for range nf { fp := s.GenParm(f, depth+1, false, pidx) skComp := tunables.doSkipCompare && uint8(s.wr.Intn(100)) < s.tunables.skipCompareFraction @@ -832,7 +828,7 @@ func (s *genstate) GenFunc(fidx int, pidx int) *funcdef { needControl := f.recur f.dodefc = uint8(s.wr.Intn(100)) pTaken := uint8(s.wr.Intn(100)) < s.tunables.takenFraction - for pi := 0; pi < numParams; pi++ { + for range numParams { newparm := s.GenParm(f, 0, needControl, pidx) if !pTaken { newparm.SetAddrTaken(notAddrTaken) @@ -848,7 +844,7 @@ func (s *genstate) GenFunc(fidx int, pidx int) *funcdef { } rTaken := uint8(s.wr.Intn(100)) < s.tunables.takenFraction - for ri := 0; ri < numReturns; ri++ { + for range numReturns { r := s.GenReturn(f, 0, pidx) if !rTaken { r.SetAddrTaken(notAddrTaken) @@ -903,7 +899,7 @@ func (s *genstate) emitCompareFunc(f *funcdef, b *bytes.Buffer, p parm) { b.WriteString(" return ") numel := p.NumElements() ncmp := 0 - for i := 0; i < numel; i++ { + for i := range numel { lelref, lelparm := p.GenElemRef(i, "left") relref, _ := p.GenElemRef(i, "right") if lelref == "" || lelref == "_" { @@ -1501,7 +1497,7 @@ func (s *genstate) emitParamChecks(f *funcdef, b *bytes.Buffer, pidx int, value } else { numel := p.NumElements() cel := checkableElements(p) - for i := 0; i < numel; i++ { + for i := range numel { verb(4, "emitting check-code for p%d el %d value=%d", pi, i, value) elref, elparm := p.GenElemRef(i, s.genParamRef(p, pi)) valstr, value = s.GenValue(f, elparm, value, false) @@ -1535,7 +1531,7 @@ func (s *genstate) emitParamChecks(f *funcdef, b *bytes.Buffer, pidx int, value // receiver value check if f.isMethod { numel := f.receiver.NumElements() - for i := 0; i < numel; i++ { + for i := range numel { verb(4, "emitting check-code for rcvr el %d value=%d", i, value) elref, elparm := f.receiver.GenElemRef(i, "rcvr") valstr, value = s.GenValue(f, elparm, value, false) @@ -1608,7 +1604,7 @@ func (s *genstate) emitDeferChecks(f *funcdef, b *bytes.Buffer, value int) int { b.WriteString(" // check parm " + which + "\n") numel := p.NumElements() cel := checkableElements(p) - for i := 0; i < numel; i++ { + for i := range numel { elref, elparm := p.GenElemRef(i, s.genParamRef(p, pi)) if elref == "" || elref == "_" || cel == 0 { verb(4, "empty skip p%d el %d", pi, i) @@ -2058,7 +2054,7 @@ func (s *genstate) emitMain(outf *os.File, numit int, fcnmask map[int]int, pkmas for k := 0; k < s.NumTestPackages; k++ { cp := fmt.Sprintf("%s%s%d", s.Tag, CallerName, k) fmt.Fprintf(outf, " go func(ch chan bool) {\n") - for i := 0; i < numit; i++ { + for i := range numit { if shouldEmitFP(i, k, fcnmask, pkmask) { fmt.Fprintf(outf, " %s.%s%d(\"normal\")\n", cp, CallerName, i) if s.tunables.doReflectCall { diff --git a/cmd/stringer/golden_test.go b/cmd/stringer/golden_test.go index 2a81c0855aa..e40b7c53c91 100644 --- a/cmd/stringer/golden_test.go +++ b/cmd/stringer/golden_test.go @@ -453,7 +453,6 @@ func TestGolden(t *testing.T) { dir := t.TempDir() for _, test := range golden { - test := test t.Run(test.name, func(t *testing.T) { input := "package test\n" + test.input file := test.name + ".go" diff --git a/container/intsets/sparse_test.go b/container/intsets/sparse_test.go index cd8ec6e0840..f218e09b6a3 100644 --- a/container/intsets/sparse_test.go +++ b/container/intsets/sparse_test.go @@ -236,7 +236,7 @@ func (set *pset) check(t *testing.T, msg string) { func randomPset(prng *rand.Rand, maxSize int) *pset { set := makePset() size := int(prng.Int()) % maxSize - for i := 0; i < size; i++ { + for range size { // TODO(adonovan): benchmark how performance varies // with this sparsity parameter. n := int(prng.Int()) % 10000 @@ -252,7 +252,7 @@ func TestRandomMutations(t *testing.T) { set := makePset() prng := rand.New(rand.NewSource(0)) - for i := 0; i < 10000; i++ { + for i := range 10000 { n := int(prng.Int())%2000 - 1000 if i%2 == 0 { if debug { @@ -278,9 +278,9 @@ func TestRandomMutations(t *testing.T) { func TestLowerBound(t *testing.T) { // Use random sets of sizes from 0 to about 4000. prng := rand.New(rand.NewSource(0)) - for i := uint(0); i < 12; i++ { + for i := range uint(12) { x := randomPset(prng, 1<= j && e < found { @@ -302,7 +302,7 @@ func TestSetOperations(t *testing.T) { // For each operator, we test variations such as // Z.op(X, Y), Z.op(X, Z) and Z.op(Z, Y) to exercise // the degenerate cases of each method implementation. - for i := uint(0); i < 12; i++ { + for i := range uint(12) { X := randomPset(prng, 1< 0 { exitcode = 1 // analysis failed, at least partially } else if rootDiags > 0 { @@ -266,12 +265,10 @@ func printDiagnostics(graph *checker.Graph) (exitcode int) { var list []*checker.Action var total time.Duration - // TODO(adonovan): use "for act := range graph.All() { ... }" in go1.23. - graph.All()(func(act *checker.Action) bool { + for act := range graph.All() { list = append(list, act) total += act.Duration - return true - }) + } // Print actions accounting for 90% of the total. sort.Slice(list, func(i, j int) bool { diff --git a/go/analysis/passes/composite/composite.go b/go/analysis/passes/composite/composite.go index 60c6afe49f0..25c98a97bbc 100644 --- a/go/analysis/passes/composite/composite.go +++ b/go/analysis/passes/composite/composite.go @@ -115,7 +115,7 @@ func run(pass *analysis.Pass) (any, error) { missingKeys = append(missingKeys, analysis.TextEdit{ Pos: e.Pos(), End: e.Pos(), - NewText: []byte(fmt.Sprintf("%s: ", field.Name())), + NewText: fmt.Appendf(nil, "%s: ", field.Name()), }) } } diff --git a/go/analysis/passes/copylock/copylock.go b/go/analysis/passes/copylock/copylock.go index 49c14d4980d..a4e455d9b30 100644 --- a/go/analysis/passes/copylock/copylock.go +++ b/go/analysis/passes/copylock/copylock.go @@ -355,7 +355,7 @@ func lockPath(tpkg *types.Package, typ types.Type, seen map[types.Type]bool) typ } nfields := styp.NumFields() - for i := 0; i < nfields; i++ { + for i := range nfields { ftyp := styp.Field(i).Type() subpath := lockPath(tpkg, ftyp, seen) if subpath != nil { diff --git a/go/analysis/passes/fieldalignment/fieldalignment.go b/go/analysis/passes/fieldalignment/fieldalignment.go index e2ddc83b604..4987ec5afdd 100644 --- a/go/analysis/passes/fieldalignment/fieldalignment.go +++ b/go/analysis/passes/fieldalignment/fieldalignment.go @@ -168,7 +168,7 @@ func optimalOrder(str *types.Struct, sizes *gcSizes) (*types.Struct, []int) { } elems := make([]elem, nf) - for i := 0; i < nf; i++ { + for i := range nf { field := str.Field(i) ft := field.Type() elems[i] = elem{ @@ -312,7 +312,7 @@ func (s *gcSizes) Sizeof(T types.Type) int64 { var o int64 max := int64(1) - for i := 0; i < nf; i++ { + for i := range nf { ft := t.Field(i).Type() a, sz := s.Alignof(ft), s.Sizeof(ft) if a > max { @@ -366,7 +366,7 @@ func (s *gcSizes) ptrdata(T types.Type) int64 { } var o, p int64 - for i := 0; i < nf; i++ { + for i := range nf { ft := t.Field(i).Type() a, sz := s.Alignof(ft), s.Sizeof(ft) fp := s.ptrdata(ft) diff --git a/go/analysis/passes/httpmux/httpmux.go b/go/analysis/passes/httpmux/httpmux.go index 58d3ed5daca..655b78fd1cb 100644 --- a/go/analysis/passes/httpmux/httpmux.go +++ b/go/analysis/passes/httpmux/httpmux.go @@ -9,6 +9,7 @@ import ( "go/constant" "go/types" "regexp" + "slices" "strings" "golang.org/x/mod/semver" @@ -103,12 +104,7 @@ func isMethodNamed(f *types.Func, pkgPath string, names ...string) bool { if f.Type().(*types.Signature).Recv() == nil { return false // not a method } - for _, n := range names { - if f.Name() == n { - return true - } - } - return false // not in names + return slices.Contains(names, f.Name()) } // stringConstantExpr returns expression's string constant value. diff --git a/go/analysis/passes/nilfunc/nilfunc.go b/go/analysis/passes/nilfunc/nilfunc.go index 3ac2dcd4907..fa1883b0c34 100644 --- a/go/analysis/passes/nilfunc/nilfunc.go +++ b/go/analysis/passes/nilfunc/nilfunc.go @@ -16,7 +16,7 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/typesinternal" ) //go:embed doc.go @@ -55,24 +55,8 @@ func run(pass *analysis.Pass) (any, error) { return } - // Only want identifiers or selector expressions. - var obj types.Object - switch v := e2.(type) { - case *ast.Ident: - obj = pass.TypesInfo.Uses[v] - case *ast.SelectorExpr: - obj = pass.TypesInfo.Uses[v.Sel] - case *ast.IndexExpr, *ast.IndexListExpr: - // Check generic functions such as "f[T1,T2]". - x, _, _, _ := typeparams.UnpackIndexExpr(v) - if id, ok := x.(*ast.Ident); ok { - obj = pass.TypesInfo.Uses[id] - } - default: - return - } - // Only want functions. + obj := pass.TypesInfo.Uses[typesinternal.UsedIdent(pass.TypesInfo, e2)] if _, ok := obj.(*types.Func); !ok { return } diff --git a/go/analysis/passes/structtag/structtag.go b/go/analysis/passes/structtag/structtag.go index d926503403d..da4afd1b232 100644 --- a/go/analysis/passes/structtag/structtag.go +++ b/go/analysis/passes/structtag/structtag.go @@ -13,6 +13,7 @@ import ( "go/types" "path/filepath" "reflect" + "slices" "strconv" "strings" @@ -167,11 +168,8 @@ func checkTagDuplicates(pass *analysis.Pass, tag, key string, nearest, field *ty if i := strings.Index(val, ","); i >= 0 { if key == "xml" { // Use a separate namespace for XML attributes. - for _, opt := range strings.Split(val[i:], ",") { - if opt == "attr" { - key += " attribute" // Key is part of the error message. - break - } + if slices.Contains(strings.Split(val[i:], ","), "attr") { + key += " attribute" // Key is part of the error message. } } val = val[:i] diff --git a/go/analysis/passes/testinggoroutine/util.go b/go/analysis/passes/testinggoroutine/util.go index 027c99e6b0f..88e77fb4fc4 100644 --- a/go/analysis/passes/testinggoroutine/util.go +++ b/go/analysis/passes/testinggoroutine/util.go @@ -7,6 +7,7 @@ package testinggoroutine import ( "go/ast" "go/types" + "slices" "golang.org/x/tools/internal/typeparams" ) @@ -48,12 +49,7 @@ func isMethodNamed(f *types.Func, pkgPath string, names ...string) bool { if f.Type().(*types.Signature).Recv() == nil { return false } - for _, n := range names { - if f.Name() == n { - return true - } - } - return false + return slices.Contains(names, f.Name()) } func funcIdent(fun ast.Expr) *ast.Ident { diff --git a/go/ast/astutil/imports.go b/go/ast/astutil/imports.go index a6b5ed0a893..5e5601aa467 100644 --- a/go/ast/astutil/imports.go +++ b/go/ast/astutil/imports.go @@ -9,6 +9,7 @@ import ( "fmt" "go/ast" "go/token" + "slices" "strconv" "strings" ) @@ -186,7 +187,7 @@ func AddNamedImport(fset *token.FileSet, f *ast.File, name, path string) (added spec.(*ast.ImportSpec).Path.ValuePos = first.Pos() first.Specs = append(first.Specs, spec) } - f.Decls = append(f.Decls[:i], f.Decls[i+1:]...) + f.Decls = slices.Delete(f.Decls, i, i+1) i-- } diff --git a/go/ast/astutil/rewrite_test.go b/go/ast/astutil/rewrite_test.go index 57136a07cab..2e1c77034c8 100644 --- a/go/ast/astutil/rewrite_test.go +++ b/go/ast/astutil/rewrite_test.go @@ -244,7 +244,6 @@ func vardecl(name, typ string) *ast.GenDecl { func TestRewrite(t *testing.T) { t.Run("*", func(t *testing.T) { for _, test := range rewriteTests { - test := test t.Run(test.name, func(t *testing.T) { t.Parallel() fset := token.NewFileSet() diff --git a/go/ast/inspector/inspector.go b/go/ast/inspector/inspector.go index 0d5050fe405..1da4a361f0b 100644 --- a/go/ast/inspector/inspector.go +++ b/go/ast/inspector/inspector.go @@ -10,6 +10,7 @@ // builds a list of push/pop events and their node type. Subsequent // method calls that request a traversal scan this list, rather than walk // the AST, and perform type filtering using efficient bit sets. +// This representation is sometimes called a "balanced parenthesis tree." // // Experiments suggest the inspector's traversals are about 2.5x faster // than ast.Inspect, but it may take around 5 traversals for this @@ -50,6 +51,7 @@ type Inspector struct { //go:linkname events func events(in *Inspector) []event { return in.events } +//go:linkname packEdgeKindAndIndex func packEdgeKindAndIndex(ek edge.Kind, index int) int32 { return int32(uint32(index+1)<<7 | uint32(ek)) } diff --git a/go/buildutil/allpackages.go b/go/buildutil/allpackages.go index dfb8cd6c7b0..32886a7175f 100644 --- a/go/buildutil/allpackages.go +++ b/go/buildutil/allpackages.go @@ -52,7 +52,6 @@ func ForEachPackage(ctxt *build.Context, found func(importPath string, err error var wg sync.WaitGroup for _, root := range ctxt.SrcDirs() { - root := root wg.Add(1) go func() { allPackages(ctxt, root, ch) @@ -107,7 +106,6 @@ func allPackages(ctxt *build.Context, root string, ch chan<- item) { ch <- item{pkg, err} } for _, fi := range files { - fi := fi if fi.IsDir() { wg.Add(1) go func() { diff --git a/go/callgraph/cha/cha_test.go b/go/callgraph/cha/cha_test.go index 7795cb44de0..922541d6c56 100644 --- a/go/callgraph/cha/cha_test.go +++ b/go/callgraph/cha/cha_test.go @@ -40,7 +40,7 @@ var inputs = []string{ func expectation(f *ast.File) (string, token.Pos) { for _, c := range f.Comments { text := strings.TrimSpace(c.Text()) - if t := strings.TrimPrefix(text, "WANT:\n"); t != text { + if t, ok := strings.CutPrefix(text, "WANT:\n"); ok { return t, c.Pos() } } diff --git a/go/callgraph/rta/rta_test.go b/go/callgraph/rta/rta_test.go index 6b16484245b..8cfc73ee4db 100644 --- a/go/callgraph/rta/rta_test.go +++ b/go/callgraph/rta/rta_test.go @@ -105,7 +105,7 @@ func check(t *testing.T, f *ast.File, pkg *ssa.Package, res *rta.Result) { expectation := func(f *ast.File) (string, int) { for _, c := range f.Comments { text := strings.TrimSpace(c.Text()) - if t := strings.TrimPrefix(text, "WANT:\n"); t != text { + if t, ok := strings.CutPrefix(text, "WANT:\n"); ok { return t, tokFile.Line(c.Pos()) } } @@ -134,7 +134,7 @@ func check(t *testing.T, f *ast.File, pkg *ssa.Package, res *rta.Result) { // A leading "!" negates the assertion. sense := true - if rest := strings.TrimPrefix(line, "!"); rest != line { + if rest, ok := strings.CutPrefix(line, "!"); ok { sense = false line = strings.TrimSpace(rest) if line == "" { diff --git a/go/callgraph/vta/graph.go b/go/callgraph/vta/graph.go index 164018708ef..26225e7db37 100644 --- a/go/callgraph/vta/graph.go +++ b/go/callgraph/vta/graph.go @@ -8,6 +8,7 @@ import ( "fmt" "go/token" "go/types" + "iter" "golang.org/x/tools/go/ssa" "golang.org/x/tools/go/types/typeutil" @@ -270,7 +271,7 @@ func (g *vtaGraph) numNodes() int { return len(g.idx) } -func (g *vtaGraph) successors(x idx) func(yield func(y idx) bool) { +func (g *vtaGraph) successors(x idx) iter.Seq[idx] { return func(yield func(y idx) bool) { for y := range g.m[x] { if !yield(y) { diff --git a/go/callgraph/vta/graph_test.go b/go/callgraph/vta/graph_test.go index 9e780c7e4e2..725749ea6ab 100644 --- a/go/callgraph/vta/graph_test.go +++ b/go/callgraph/vta/graph_test.go @@ -148,7 +148,9 @@ func TestVtaGraph(t *testing.T) { {n4, 0}, } { sl := 0 - g.successors(g.idx[test.n])(func(_ idx) bool { sl++; return true }) + for range g.successors(g.idx[test.n]) { + sl++ + } if sl != test.l { t.Errorf("want %d successors; got %d", test.l, sl) } @@ -163,10 +165,10 @@ func vtaGraphStr(g *vtaGraph) []string { var vgs []string for n := 0; n < g.numNodes(); n++ { var succStr []string - g.successors(idx(n))(func(s idx) bool { + for s := range g.successors(idx(n)) { succStr = append(succStr, g.node[s].String()) - return true - }) + } + sort.Strings(succStr) entry := fmt.Sprintf("%v -> %v", g.node[n].String(), strings.Join(succStr, ", ")) vgs = append(vgs, removeModulePrefix(entry)) diff --git a/go/callgraph/vta/helpers_test.go b/go/callgraph/vta/helpers_test.go index 59a9277f759..be5e756dcd5 100644 --- a/go/callgraph/vta/helpers_test.go +++ b/go/callgraph/vta/helpers_test.go @@ -28,7 +28,7 @@ import ( func want(f *ast.File) []string { for _, c := range f.Comments { text := strings.TrimSpace(c.Text()) - if t := strings.TrimPrefix(text, "WANT:\n"); t != text { + if t, ok := strings.CutPrefix(text, "WANT:\n"); ok { return strings.Split(t, "\n") } } diff --git a/go/callgraph/vta/internal/trie/op_test.go b/go/callgraph/vta/internal/trie/op_test.go index b4610d55c22..535e7ac2775 100644 --- a/go/callgraph/vta/internal/trie/op_test.go +++ b/go/callgraph/vta/internal/trie/op_test.go @@ -12,6 +12,7 @@ import ( "time" "golang.org/x/tools/go/callgraph/vta/internal/trie" + "maps" ) // This file tests trie.Map by cross checking operations on a collection of @@ -189,12 +190,8 @@ func (c builtinCollection) Intersect(l int, r int) { func (c builtinCollection) Merge(l int, r int) { result := map[uint64]any{} - for k, v := range c[r] { - result[k] = v - } - for k, v := range c[l] { - result[k] = v - } + maps.Copy(result, c[r]) + maps.Copy(result, c[l]) c[l] = result } @@ -217,9 +214,7 @@ func (c builtinCollection) Average(l int, r int) { func (c builtinCollection) Assign(l, r int) { m := map[uint64]any{} - for k, v := range c[r] { - m[k] = v - } + maps.Copy(m, c[r]) c[l] = m } @@ -232,7 +227,7 @@ func newTriesCollection(size int) *trieCollection { b: trie.NewBuilder(), tries: make([]trie.MutMap, size), } - for i := 0; i < size; i++ { + for i := range size { tc.tries[i] = tc.b.MutEmpty() } return tc @@ -240,7 +235,7 @@ func newTriesCollection(size int) *trieCollection { func newMapsCollection(size int) *builtinCollection { maps := make(builtinCollection, size) - for i := 0; i < size; i++ { + for i := range size { maps[i] = map[uint64]any{} } return &maps @@ -290,7 +285,7 @@ func (op operation) Apply(maps mapCollection) any { func distribution(dist map[opCode]int) []opCode { var codes []opCode for op, n := range dist { - for i := 0; i < n; i++ { + for range n { codes = append(codes, op) } } @@ -326,7 +321,7 @@ func randOperator(r *rand.Rand, opts options) operation { func randOperators(r *rand.Rand, numops int, opts options) []operation { ops := make([]operation, numops) - for i := 0; i < numops; i++ { + for i := range numops { ops[i] = randOperator(r, opts) } return ops diff --git a/go/callgraph/vta/propagation.go b/go/callgraph/vta/propagation.go index 1c4dcd2888e..a71c5b0034a 100644 --- a/go/callgraph/vta/propagation.go +++ b/go/callgraph/vta/propagation.go @@ -42,7 +42,7 @@ func scc(g *vtaGraph) (sccs [][]idx, idxToSccID []int) { *ns = state{pre: nextPre, lowLink: nextPre, onStack: true} stack = append(stack, n) - g.successors(n)(func(s idx) bool { + for s := range g.successors(n) { if ss := &states[s]; ss.pre == 0 { // Analyze successor s that has not been visited yet. doSCC(s) @@ -52,8 +52,7 @@ func scc(g *vtaGraph) (sccs [][]idx, idxToSccID []int) { // in the current SCC. ns.lowLink = min(ns.lowLink, ss.pre) } - return true - }) + } // if n is a root node, pop the stack and generate a new SCC. if ns.lowLink == ns.pre { @@ -166,10 +165,9 @@ func propagate(graph *vtaGraph, canon *typeutil.Map) propTypeMap { for i := len(sccs) - 1; i >= 0; i-- { nextSccs := make(map[int]empty) for _, n := range sccs[i] { - graph.successors(n)(func(succ idx) bool { + for succ := range graph.successors(n) { nextSccs[idxToSccID[succ]] = empty{} - return true - }) + } } // Propagate types to all successor SCCs. for nextScc := range nextSccs { diff --git a/go/callgraph/vta/propagation_test.go b/go/callgraph/vta/propagation_test.go index bc9ca1ecde6..2b36cf39bb7 100644 --- a/go/callgraph/vta/propagation_test.go +++ b/go/callgraph/vta/propagation_test.go @@ -123,17 +123,14 @@ func sccEqual(sccs1 []string, sccs2 []string) bool { // // for every edge x -> y in g, nodeToScc[x] > nodeToScc[y] func isRevTopSorted(g *vtaGraph, idxToScc []int) bool { - result := true - for n := 0; n < len(idxToScc); n++ { - g.successors(idx(n))(func(s idx) bool { + for n := range idxToScc { + for s := range g.successors(idx(n)) { if idxToScc[n] < idxToScc[s] { - result = false return false } - return true - }) + } } - return result + return true } func sccMapsConsistent(sccs [][]idx, idxToSccID []int) bool { diff --git a/go/gcexportdata/example_test.go b/go/gcexportdata/example_test.go index 852ba5a597c..d6d69a8aa54 100644 --- a/go/gcexportdata/example_test.go +++ b/go/gcexportdata/example_test.go @@ -15,6 +15,7 @@ import ( "log" "os" "path/filepath" + "slices" "strings" "golang.org/x/tools/go/gcexportdata" @@ -51,13 +52,7 @@ func ExampleRead() { // We can see all the names in Names. members := pkg.Scope().Names() - foundPrintln := false - for _, member := range members { - if member == "Println" { - foundPrintln = true - break - } - } + foundPrintln := slices.Contains(members, "Println") fmt.Print("Package members: ") if foundPrintln { fmt.Println("Println found") diff --git a/go/gcexportdata/gcexportdata.go b/go/gcexportdata/gcexportdata.go index 65fe2628e90..7b90bc92353 100644 --- a/go/gcexportdata/gcexportdata.go +++ b/go/gcexportdata/gcexportdata.go @@ -193,10 +193,7 @@ func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package, return pkg, err default: - l := len(data) - if l > 10 { - l = 10 - } + l := min(len(data), 10) return nil, fmt.Errorf("unexpected export data with prefix %q for path %s", string(data[:l]), path) } } diff --git a/go/packages/external.go b/go/packages/external.go index 91bd62e83b1..f37bc651009 100644 --- a/go/packages/external.go +++ b/go/packages/external.go @@ -90,7 +90,7 @@ func findExternalDriver(cfg *Config) driver { const toolPrefix = "GOPACKAGESDRIVER=" tool := "" for _, env := range cfg.Env { - if val := strings.TrimPrefix(env, toolPrefix); val != env { + if val, ok := strings.CutPrefix(env, toolPrefix); ok { tool = val } } diff --git a/go/packages/overlay_test.go b/go/packages/overlay_test.go index 1108461926f..4a7cc68f4c7 100644 --- a/go/packages/overlay_test.go +++ b/go/packages/overlay_test.go @@ -10,6 +10,7 @@ import ( "os" "path/filepath" "reflect" + "slices" "sort" "testing" @@ -93,7 +94,7 @@ func testOverlayChangesBothPackageNames(t *testing.T, exporter packagestest.Expo if len(initial) != 3 { t.Fatalf("expected 3 packages, got %v", len(initial)) } - for i := 0; i < 3; i++ { + for i := range 3 { if ok := checkPkg(t, initial[i], want[i].id, want[i].name, want[i].count); !ok { t.Errorf("%d: got {%s %s %d}, expected %v", i, initial[i].ID, initial[i].Name, len(initial[i].Syntax), want[i]) @@ -139,7 +140,7 @@ func testOverlayChangesTestPackageName(t *testing.T, exporter packagestest.Expor if len(initial) != 3 { t.Fatalf("expected 3 packages, got %v", len(initial)) } - for i := 0; i < 3; i++ { + for i := range 3 { if ok := checkPkg(t, initial[i], want[i].id, want[i].name, want[i].count); !ok { t.Errorf("got {%s %s %d}, expected %v", initial[i].ID, initial[i].Name, len(initial[i].Syntax), want[i]) @@ -824,11 +825,8 @@ func testInvalidFilesBeforeOverlayContains(t *testing.T, exporter packagestest.E t.Fatalf("expected package ID %q, got %q", tt.wantID, pkg.ID) } var containsFile bool - for _, goFile := range pkg.CompiledGoFiles { - if f == goFile { - containsFile = true - break - } + if slices.Contains(pkg.CompiledGoFiles, f) { + containsFile = true } if !containsFile { t.Fatalf("expected %s in CompiledGoFiles, got %v", f, pkg.CompiledGoFiles) @@ -1054,7 +1052,7 @@ func TestOverlaysInReplace(t *testing.T) { if err := os.Mkdir(dirB, 0775); err != nil { t.Fatal(err) } - if err := os.WriteFile(filepath.Join(dirB, "go.mod"), []byte(fmt.Sprintf("module %s.com", dirB)), 0775); err != nil { + if err := os.WriteFile(filepath.Join(dirB, "go.mod"), fmt.Appendf(nil, "module %s.com", dirB), 0775); err != nil { t.Fatal(err) } if err := os.MkdirAll(filepath.Join(dirB, "inner"), 0775); err != nil { diff --git a/go/packages/packages_test.go b/go/packages/packages_test.go index 5678b265561..ae3cbb6bb2b 100644 --- a/go/packages/packages_test.go +++ b/go/packages/packages_test.go @@ -20,6 +20,7 @@ import ( "path/filepath" "reflect" "runtime" + "slices" "sort" "strings" "testing" @@ -387,7 +388,7 @@ func TestLoadArgumentListIsNotTooLong(t *testing.T) { defer exported.Cleanup() numOfPatterns := argMax/16 + 1 // the pattern below is approx. 16 chars patterns := make([]string, numOfPatterns) - for i := 0; i < numOfPatterns; i++ { + for i := range numOfPatterns { patterns[i] = fmt.Sprintf("golang.org/mod/p%d", i) } // patterns have more than argMax number of chars combined with whitespaces b/w patterns @@ -1610,7 +1611,7 @@ EOF defer os.Setenv(pathKey, oldPath) // Clone exported.Config config := exported.Config - config.Env = append([]string{}, exported.Config.Env...) + config.Env = slices.Clone(exported.Config.Env) config.Env = append(config.Env, "GOPACKAGESDRIVER="+test.driver) pkgs, err := packages.Load(exported.Config, "golist") if err != nil { @@ -1978,7 +1979,6 @@ func testCgoNoSyntax(t *testing.T, exporter packagestest.Exporter) { packages.NeedName | packages.NeedImports, } for _, mode := range modes { - mode := mode t.Run(fmt.Sprint(mode), func(t *testing.T) { exported.Config.Mode = mode pkgs, err := packages.Load(exported.Config, "golang.org/fake/c") @@ -2787,7 +2787,7 @@ func main() { t.Fatal(err) } - exported.Config.Env = append(append([]string{}, baseEnv...), "GOPACKAGESDRIVER="+emptyDriverPath) + exported.Config.Env = append(slices.Clone(baseEnv), "GOPACKAGESDRIVER="+emptyDriverPath) initial, err := packages.Load(exported.Config, "golang.org/fake/a") if err != nil { t.Fatal(err) @@ -2807,7 +2807,7 @@ func main() { t.Fatal(err) } - exported.Config.Env = append(append([]string{}, baseEnv...), "GOPACKAGESDRIVER="+notHandledDriverPath) + exported.Config.Env = append(slices.Clone(baseEnv), "GOPACKAGESDRIVER="+notHandledDriverPath) initial, err = packages.Load(exported.Config, "golang.org/fake/a") if err != nil { t.Fatal(err) diff --git a/go/packages/packagestest/export.go b/go/packages/packagestest/export.go index 4ac4967b46b..86da99ecdf3 100644 --- a/go/packages/packagestest/export.go +++ b/go/packages/packagestest/export.go @@ -159,7 +159,6 @@ var All = []Exporter{GOPATH, Modules} func TestAll(t *testing.T, f func(*testing.T, Exporter)) { t.Helper() for _, e := range All { - e := e // in case f calls t.Parallel t.Run(e.Name(), func(t *testing.T) { t.Helper() f(t, e) @@ -173,7 +172,6 @@ func TestAll(t *testing.T, f func(*testing.T, Exporter)) { func BenchmarkAll(b *testing.B, f func(*testing.B, Exporter)) { b.Helper() for _, e := range All { - e := e // in case f calls t.Parallel b.Run(e.Name(), func(b *testing.B) { b.Helper() f(b, e) diff --git a/go/ssa/builder_test.go b/go/ssa/builder_test.go index 2589cc82bb6..a48723bd271 100644 --- a/go/ssa/builder_test.go +++ b/go/ssa/builder_test.go @@ -613,7 +613,6 @@ var indirect = R[int].M "(p.S[int]).M[int]", }, } { - entry := entry t.Run(entry.name, func(t *testing.T) { v := p.Var(entry.name) if v == nil { @@ -1011,7 +1010,6 @@ func TestGo117Builtins(t *testing.T) { } for _, tc := range tests { - tc := tc t.Run(tc.name, func(t *testing.T) { t.Parallel() fset := token.NewFileSet() @@ -1466,7 +1464,6 @@ func TestBuildPackageGo120(t *testing.T) { } for _, tc := range tests { - tc := tc t.Run(tc.name, func(t *testing.T) { t.Parallel() fset := token.NewFileSet() diff --git a/go/ssa/dom.go b/go/ssa/dom.go index f490986140c..78f651c8ee9 100644 --- a/go/ssa/dom.go +++ b/go/ssa/dom.go @@ -22,6 +22,7 @@ import ( "fmt" "math/big" "os" + "slices" "sort" ) @@ -43,7 +44,7 @@ func (b *BasicBlock) Dominates(c *BasicBlock) bool { // DomPreorder returns a new slice containing the blocks of f // in a preorder traversal of the dominator tree. func (f *Function) DomPreorder() []*BasicBlock { - slice := append([]*BasicBlock(nil), f.Blocks...) + slice := slices.Clone(f.Blocks) sort.Slice(slice, func(i, j int) bool { return slice[i].dom.pre < slice[j].dom.pre }) @@ -54,7 +55,7 @@ func (f *Function) DomPreorder() []*BasicBlock { // in a postorder traversal of the dominator tree. // (This is not the same as a postdominance order.) func (f *Function) DomPostorder() []*BasicBlock { - slice := append([]*BasicBlock(nil), f.Blocks...) + slice := slices.Clone(f.Blocks) sort.Slice(slice, func(i, j int) bool { return slice[i].dom.post < slice[j].dom.post }) @@ -277,8 +278,8 @@ func sanityCheckDomTree(f *Function) { // Check the entire relation. O(n^2). // The Recover block (if any) must be treated specially so we skip it. ok := true - for i := 0; i < n; i++ { - for j := 0; j < n; j++ { + for i := range n { + for j := range n { b, c := f.Blocks[i], f.Blocks[j] if c == f.Recover { continue diff --git a/go/ssa/emit.go b/go/ssa/emit.go index bca79adc4e1..e53ebf5a7fd 100644 --- a/go/ssa/emit.go +++ b/go/ssa/emit.go @@ -496,7 +496,7 @@ func emitTailCall(f *Function, call *Call) { case 1: ret.Results = []Value{tuple} default: - for i := 0; i < nr; i++ { + for i := range nr { v := emitExtract(f, tuple, i) // TODO(adonovan): in principle, this is required: // v = emitConv(f, o.Type, f.Signature.Results[i].Type) diff --git a/go/ssa/func.go b/go/ssa/func.go index 010c128a9ec..2d52309b623 100644 --- a/go/ssa/func.go +++ b/go/ssa/func.go @@ -13,6 +13,7 @@ import ( "go/token" "go/types" "io" + "iter" "os" "strings" @@ -187,8 +188,7 @@ func targetedBlock(f *Function, tok token.Token) *BasicBlock { } // instrs returns an iterator that returns each reachable instruction of the SSA function. -// TODO: return an iter.Seq once x/tools is on 1.23 -func (f *Function) instrs() func(yield func(i Instruction) bool) { +func (f *Function) instrs() iter.Seq[Instruction] { return func(yield func(i Instruction) bool) { for _, block := range f.Blocks { for _, instr := range block.Instrs { @@ -817,7 +817,7 @@ func blockExit(fn *Function, block *BasicBlock, pos token.Pos) *exit { return e } -// blockExit creates a new exit to a yield fn that returns the source function. +// returnExit creates a new exit to a yield fn that returns the source function. func returnExit(fn *Function, pos token.Pos) *exit { e := &exit{ id: unique(fn), diff --git a/go/ssa/instantiate.go b/go/ssa/instantiate.go index 2512f32976c..20a0986e6d3 100644 --- a/go/ssa/instantiate.go +++ b/go/ssa/instantiate.go @@ -7,6 +7,7 @@ package ssa import ( "fmt" "go/types" + "slices" "sync" ) @@ -122,10 +123,5 @@ func (prog *Program) isParameterized(ts ...types.Type) bool { // handle the most common but shallow cases such as T, pkg.T, // *T without consulting the cache under the lock. - for _, t := range ts { - if prog.hasParams.Has(t) { - return true - } - } - return false + return slices.ContainsFunc(ts, prog.hasParams.Has) } diff --git a/go/ssa/interp/external.go b/go/ssa/interp/external.go index 2a3a7e5b79e..2fb683c07fe 100644 --- a/go/ssa/interp/external.go +++ b/go/ssa/interp/external.go @@ -9,6 +9,7 @@ package interp import ( "bytes" + "maps" "math" "os" "runtime" @@ -30,7 +31,7 @@ var externals = make(map[string]externalFn) func init() { // That little dot ۰ is an Arabic zero numeral (U+06F0), categories [Nd]. - for k, v := range map[string]externalFn{ + maps.Copy(externals, map[string]externalFn{ "(reflect.Value).Bool": ext۰reflect۰Value۰Bool, "(reflect.Value).CanAddr": ext۰reflect۰Value۰CanAddr, "(reflect.Value).CanInterface": ext۰reflect۰Value۰CanInterface, @@ -111,9 +112,7 @@ func init() { "strings.ToLower": ext۰strings۰ToLower, "time.Sleep": ext۰time۰Sleep, "unicode/utf8.DecodeRuneInString": ext۰unicode۰utf8۰DecodeRuneInString, - } { - externals[k] = v - } + }) } func ext۰bytes۰Equal(fr *frame, args []value) value { diff --git a/go/ssa/lift.go b/go/ssa/lift.go index 6138ca82e0e..d7c1bf5063e 100644 --- a/go/ssa/lift.go +++ b/go/ssa/lift.go @@ -374,7 +374,7 @@ func (s *blockSet) add(b *BasicBlock) bool { // returns its index, or returns -1 if empty. func (s *blockSet) take() int { l := s.BitLen() - for i := 0; i < l; i++ { + for i := range l { if s.Bit(i) == 1 { s.SetBit(&s.Int, i, 0) return i @@ -403,10 +403,8 @@ func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap, fresh *int) bool // Don't lift result values in functions that defer // calls that may recover from panic. if fn := alloc.Parent(); fn.Recover != nil { - for _, nr := range fn.results { - if nr == alloc { - return false - } + if slices.Contains(fn.results, alloc) { + return false } } diff --git a/go/ssa/sanity.go b/go/ssa/sanity.go index 97ef886e3cf..b11680a1e1d 100644 --- a/go/ssa/sanity.go +++ b/go/ssa/sanity.go @@ -14,6 +14,7 @@ import ( "go/types" "io" "os" + "slices" "strings" ) @@ -119,13 +120,7 @@ func (s *sanity) checkInstr(idx int, instr Instruction) { case *Alloc: if !instr.Heap { - found := false - for _, l := range s.fn.Locals { - if l == instr { - found = true - break - } - } + found := slices.Contains(s.fn.Locals, instr) if !found { s.errorf("local alloc %s = %s does not appear in Function.Locals", instr.Name(), instr) } @@ -282,13 +277,7 @@ func (s *sanity) checkBlock(b *BasicBlock, index int) { // Check predecessor and successor relations are dual, // and that all blocks in CFG belong to same function. for _, a := range b.Preds { - found := false - for _, bb := range a.Succs { - if bb == b { - found = true - break - } - } + found := slices.Contains(a.Succs, b) if !found { s.errorf("expected successor edge in predecessor %s; found only: %s", a, a.Succs) } @@ -297,13 +286,7 @@ func (s *sanity) checkBlock(b *BasicBlock, index int) { } } for _, c := range b.Succs { - found := false - for _, bb := range c.Preds { - if bb == b { - found = true - break - } - } + found := slices.Contains(c.Preds, b) if !found { s.errorf("expected predecessor edge in successor %s; found only: %s", c, c.Preds) } @@ -529,12 +512,10 @@ func (s *sanity) checkFunction(fn *Function) bool { // Build the set of valid referrers. s.instrs = make(map[Instruction]unit) - // TODO: switch to range-over-func when x/tools updates to 1.23. // instrs are the instructions that are present in the function. - fn.instrs()(func(instr Instruction) bool { + for instr := range fn.instrs() { s.instrs[instr] = unit{} - return true - }) + } // Check all Locals allocations appear in the function instruction. for i, l := range fn.Locals { diff --git a/go/ssa/subst.go b/go/ssa/subst.go index bbe5796d703..b4ea16854ea 100644 --- a/go/ssa/subst.go +++ b/go/ssa/subst.go @@ -266,7 +266,7 @@ func (subst *subster) interface_(iface *types.Interface) *types.Interface { var methods []*types.Func initMethods := func(n int) { // copy first n explicit methods methods = make([]*types.Func, iface.NumExplicitMethods()) - for i := 0; i < n; i++ { + for i := range n { f := iface.ExplicitMethod(i) norecv := changeRecv(f.Type().(*types.Signature), nil) methods[i] = types.NewFunc(f.Pos(), f.Pkg(), f.Name(), norecv) @@ -290,7 +290,7 @@ func (subst *subster) interface_(iface *types.Interface) *types.Interface { var embeds []types.Type initEmbeds := func(n int) { // copy first n embedded types embeds = make([]types.Type, iface.NumEmbeddeds()) - for i := 0; i < n; i++ { + for i := range n { embeds[i] = iface.EmbeddedType(i) } } diff --git a/go/ssa/util.go b/go/ssa/util.go index 9a73984a6a0..e53b31ff3bb 100644 --- a/go/ssa/util.go +++ b/go/ssa/util.go @@ -385,7 +385,7 @@ func (m *typeListMap) hash(ts []types.Type) uint32 { // Some smallish prime far away from typeutil.Hash. n := len(ts) h := uint32(13619) + 2*uint32(n) - for i := 0; i < n; i++ { + for i := range n { h += 3 * m.hasher.Hash(ts[i]) } return h diff --git a/go/types/typeutil/callee.go b/go/types/typeutil/callee.go index 754380351e8..53b71339305 100644 --- a/go/types/typeutil/callee.go +++ b/go/types/typeutil/callee.go @@ -7,45 +7,23 @@ package typeutil import ( "go/ast" "go/types" - - "golang.org/x/tools/internal/typeparams" + _ "unsafe" // for linkname ) // Callee returns the named target of a function call, if any: // a function, method, builtin, or variable. // // Functions and methods may potentially have type parameters. +// +// Note: for calls of instantiated functions and methods, Callee returns +// the corresponding generic function or method on the generic type. func Callee(info *types.Info, call *ast.CallExpr) types.Object { - fun := ast.Unparen(call.Fun) - - // Look through type instantiation if necessary. - isInstance := false - switch fun.(type) { - case *ast.IndexExpr, *ast.IndexListExpr: - // When extracting the callee from an *IndexExpr, we need to check that - // it is a *types.Func and not a *types.Var. - // Example: Don't match a slice m within the expression `m[0]()`. - isInstance = true - fun, _, _, _ = typeparams.UnpackIndexExpr(fun) - } - - var obj types.Object - switch fun := fun.(type) { - case *ast.Ident: - obj = info.Uses[fun] // type, var, builtin, or declared func - case *ast.SelectorExpr: - if sel, ok := info.Selections[fun]; ok { - obj = sel.Obj() // method or field - } else { - obj = info.Uses[fun.Sel] // qualified identifier? - } + obj := info.Uses[usedIdent(info, call.Fun)] + if obj == nil { + return nil } if _, ok := obj.(*types.TypeName); ok { - return nil // T(x) is a conversion, not a call - } - // A Func is required to match instantiations. - if _, ok := obj.(*types.Func); isInstance && !ok { - return nil // Was not a Func. + return nil } return obj } @@ -56,13 +34,52 @@ func Callee(info *types.Info, call *ast.CallExpr) types.Object { // Note: for calls of instantiated functions and methods, StaticCallee returns // the corresponding generic function or method on the generic type. func StaticCallee(info *types.Info, call *ast.CallExpr) *types.Func { - if f, ok := Callee(info, call).(*types.Func); ok && !interfaceMethod(f) { - return f + obj := info.Uses[usedIdent(info, call.Fun)] + fn, _ := obj.(*types.Func) + if fn == nil || interfaceMethod(fn) { + return nil + } + return fn +} + +// usedIdent is the implementation of [internal/typesinternal.UsedIdent]. +// It returns the identifier associated with e. +// See typesinternal.UsedIdent for a fuller description. +// This function should live in typesinternal, but cannot because it would +// create an import cycle. +// +//go:linkname usedIdent +func usedIdent(info *types.Info, e ast.Expr) *ast.Ident { + if info.Types == nil || info.Uses == nil { + panic("one of info.Types or info.Uses is nil; both must be populated") + } + // Look through type instantiation if necessary. + switch d := ast.Unparen(e).(type) { + case *ast.IndexExpr: + if info.Types[d.Index].IsType() { + e = d.X + } + case *ast.IndexListExpr: + e = d.X + } + + switch e := ast.Unparen(e).(type) { + // info.Uses always has the object we want, even for selector expressions. + // We don't need info.Selections. + // See go/types/recording.go:recordSelection. + case *ast.Ident: + return e + case *ast.SelectorExpr: + return e.Sel } return nil } +// interfaceMethod reports whether its argument is a method of an interface. +// This function should live in typesinternal, but cannot because it would create an import cycle. +// +//go:linkname interfaceMethod func interfaceMethod(f *types.Func) bool { - recv := f.Type().(*types.Signature).Recv() + recv := f.Signature().Recv() return recv != nil && types.IsInterface(recv.Type()) } diff --git a/go/types/typeutil/callee_test.go b/go/types/typeutil/callee_test.go index 1d48bc743a9..3f96533ffff 100644 --- a/go/types/typeutil/callee_test.go +++ b/go/types/typeutil/callee_test.go @@ -122,6 +122,7 @@ func testStaticCallee(t *testing.T, contents []string) { cfg := &types.Config{Importer: closure(packages)} info := &types.Info{ Instances: make(map[*ast.Ident]types.Instance), + Types: make(map[ast.Expr]types.TypeAndValue), Uses: make(map[*ast.Ident]types.Object), Selections: make(map[*ast.SelectorExpr]*types.Selection), FileVersions: make(map[*ast.File]string), diff --git a/godoc/index.go b/godoc/index.go index 05a1a9441ee..853337715c1 100644 --- a/godoc/index.go +++ b/godoc/index.go @@ -65,6 +65,7 @@ import ( "golang.org/x/tools/godoc/util" "golang.org/x/tools/godoc/vfs" + "maps" ) // ---------------------------------------------------------------------------- @@ -862,9 +863,7 @@ func (x *Indexer) indexGoFile(dirname string, filename string, file *token.File, dest = make(map[string]SpotKind) x.exports[pkgPath] = dest } - for k, v := range x.curPkgExports { - dest[k] = v - } + maps.Copy(dest, x.curPkgExports) } } @@ -1069,7 +1068,7 @@ func (c *Corpus) NewIndex() *Index { // convert alist into a map of alternative spellings alts := make(map[string]*AltWords) - for i := 0; i < len(alist); i++ { + for i := range alist { a := alist[i].(*AltWords) alts[a.Canon] = a } diff --git a/godoc/snippet.go b/godoc/snippet.go index 1750478606e..43c1899a093 100644 --- a/godoc/snippet.go +++ b/godoc/snippet.go @@ -14,6 +14,7 @@ import ( "fmt" "go/ast" "go/token" + "slices" ) type Snippet struct { @@ -41,10 +42,8 @@ func findSpec(list []ast.Spec, id *ast.Ident) ast.Spec { return s } case *ast.ValueSpec: - for _, n := range s.Names { - if n == id { - return s - } + if slices.Contains(s.Names, id) { + return s } case *ast.TypeSpec: if s.Name == id { diff --git a/godoc/static/gen_test.go b/godoc/static/gen_test.go index 1f1c62e0e9c..7b7668a558c 100644 --- a/godoc/static/gen_test.go +++ b/godoc/static/gen_test.go @@ -39,7 +39,7 @@ to see the differences.`) // TestAppendQuote ensures that AppendQuote produces a valid literal. func TestAppendQuote(t *testing.T) { var in, out bytes.Buffer - for r := rune(0); r < unicode.MaxRune; r++ { + for r := range unicode.MaxRune { in.WriteRune(r) } appendQuote(&out, in.Bytes()) diff --git a/godoc/versions_test.go b/godoc/versions_test.go index a021616ba11..7b822f69b51 100644 --- a/godoc/versions_test.go +++ b/godoc/versions_test.go @@ -6,6 +6,7 @@ package godoc import ( "go/build" + "slices" "testing" "golang.org/x/tools/internal/testenv" @@ -102,12 +103,7 @@ func TestParseVersionRow(t *testing.T) { // hasTag checks whether a given release tag is contained in the current version // of the go binary. func hasTag(t string) bool { - for _, v := range build.Default.ReleaseTags { - if t == v { - return true - } - } - return false + return slices.Contains(build.Default.ReleaseTags, t) } func TestAPIVersion(t *testing.T) { diff --git a/godoc/vfs/os.go b/godoc/vfs/os.go index 35d050946e6..fe21a58662e 100644 --- a/godoc/vfs/os.go +++ b/godoc/vfs/os.go @@ -12,6 +12,7 @@ import ( pathpkg "path" "path/filepath" "runtime" + "slices" ) // We expose a new variable because otherwise we need to copy the findGOROOT logic again @@ -45,10 +46,8 @@ type osFS struct { func isGoPath(path string) bool { for _, bp := range filepath.SplitList(build.Default.GOPATH) { - for _, gp := range filepath.SplitList(path) { - if bp == gp { - return true - } + if slices.Contains(filepath.SplitList(path), bp) { + return true } } return false diff --git a/godoc/vfs/zipfs/zipfs_test.go b/godoc/vfs/zipfs/zipfs_test.go index b6f2431b0b5..3e5a8034a5b 100644 --- a/godoc/vfs/zipfs/zipfs_test.go +++ b/godoc/vfs/zipfs/zipfs_test.go @@ -59,7 +59,7 @@ func TestMain(t *testing.M) { os.Exit(t.Run()) } -// setups state each of the tests uses +// setup state each of the tests uses func setup() error { // create zipfs b := new(bytes.Buffer) @@ -172,7 +172,7 @@ func TestZipFSOpenSeek(t *testing.T) { defer f.Close() // test Seek() multiple times - for i := 0; i < 3; i++ { + for range 3 { all, err := io.ReadAll(f) if err != nil { t.Error(err) diff --git a/gopls/doc/analyzers.md b/gopls/doc/analyzers.md index aa95e024089..4b2bff1a63a 100644 --- a/gopls/doc/analyzers.md +++ b/gopls/doc/analyzers.md @@ -298,7 +298,7 @@ The gofix analyzer inlines functions and constants that are marked for inlining. Default: on. -Package documentation: [gofix](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/gofix) +Package documentation: [gofix](https://pkg.go.dev/golang.org/x/tools/internal/gofix) ## `hostport`: check format of addresses passed to net.Dial @@ -476,39 +476,90 @@ Package documentation: [lostcancel](https://pkg.go.dev/golang.org/x/tools/go/ana This analyzer reports opportunities for simplifying and clarifying -existing code by using more modern features of Go, such as: - - - replacing an if/else conditional assignment by a call to the - built-in min or max functions added in go1.21; - - replacing sort.Slice(x, func(i, j int) bool) { return s[i] < s[j] } - by a call to slices.Sort(s), added in go1.21; - - replacing interface{} by the 'any' type added in go1.18; - - replacing append([]T(nil), s...) by slices.Clone(s) or - slices.Concat(s), added in go1.21; - - replacing a loop around an m[k]=v map update by a call - to one of the Collect, Copy, Clone, or Insert functions - from the maps package, added in go1.21; - - replacing []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...), - added in go1.19; - - replacing uses of context.WithCancel in tests with t.Context, added in - go1.24; - - replacing omitempty by omitzero on structs, added in go1.24; - - replacing append(s[:i], s[i+1]...) by slices.Delete(s, i, i+1), - added in go1.21 - - replacing a 3-clause for i := 0; i < n; i++ {} loop by - for i := range n {}, added in go1.22; - - replacing Split in "for range strings.Split(...)" by go1.24's - more efficient SplitSeq, or Fields with FieldSeq; +existing code by using more modern features of Go and its standard +library. + +Each diagnostic provides a fix. Our intent is that these fixes may +be safely applied en masse without changing the behavior of your +program. In some cases the suggested fixes are imperfect and may +lead to (for example) unused imports or unused local variables, +causing build breakage. However, these problems are generally +trivial to fix. We regard any modernizer whose fix changes program +behavior to have a serious bug and will endeavor to fix it. To apply all modernization fixes en masse, you can use the following command: - $ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -test ./... + $ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./... If the tool warns of conflicting fixes, you may need to run it more than once until it has applied all fixes cleanly. This command is not an officially supported interface and may change in the future. +Changes produced by this tool should be reviewed as usual before +being merged. In some cases, a loop may be replaced by a simple +function call, causing comments within the loop to be discarded. +Human judgment may be required to avoid losing comments of value. + +Each diagnostic reported by modernize has a specific category. (The +categories are listed below.) Diagnostics in some categories, such +as "efaceany" (which replaces "interface{}" with "any" where it is +safe to do so) are particularly numerous. It may ease the burden of +code review to apply fixes in two passes, the first change +consisting only of fixes of category "efaceany", the second +consisting of all others. This can be achieved using the -category flag: + + $ modernize -category=efaceany -fix -test ./... + $ modernize -category=-efaceany -fix -test ./... + +Categories of modernize diagnostic: + + - forvar: remove x := x variable declarations made unnecessary by the new semantics of loops in go1.22. + + - slicescontains: replace 'for i, elem := range s { if elem == needle { ...; break }' + by a call to slices.Contains, added in go1.21. + + - minmax: replace an if/else conditional assignment by a call to + the built-in min or max functions added in go1.21. + + - sortslice: replace sort.Slice(x, func(i, j int) bool) { return s[i] < s[j] } + by a call to slices.Sort(s), added in go1.21. + + - efaceany: replace interface{} by the 'any' type added in go1.18. + + - slicesclone: replace append([]T(nil), s...) by slices.Clone(s) or + slices.Concat(s), added in go1.21. + + - mapsloop: replace a loop around an m[k]=v map update by a call + to one of the Collect, Copy, Clone, or Insert functions from + the maps package, added in go1.21. + + - fmtappendf: replace []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...), + added in go1.19. + + - testingcontext: replace uses of context.WithCancel in tests + with t.Context, added in go1.24. + + - omitzero: replace omitempty by omitzero on structs, added in go1.24. + + - bloop: replace "for i := range b.N" or "for range b.N" in a + benchmark with "for b.Loop()", and remove any preceding calls + to b.StopTimer, b.StartTimer, and b.ResetTimer. + + - slicesdelete: replace append(s[:i], s[i+1]...) by + slices.Delete(s, i, i+1), added in go1.21. + + - rangeint: replace a 3-clause "for i := 0; i < n; i++" loop by + "for i := range n", added in go1.22. + + - stringsseq: replace Split in "for range strings.Split(...)" by go1.24's + more efficient SplitSeq, or Fields with FieldSeq. + + - stringscutprefix: replace some uses of HasPrefix followed by TrimPrefix with CutPrefix, + added to the strings package in go1.20. + + - waitgroup: replace old complex usages of sync.WaitGroup by less complex WaitGroup.Go method in go1.25. + Default: on. Package documentation: [modernize](https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/modernize) diff --git a/gopls/doc/release/v0.19.0.md b/gopls/doc/release/v0.19.0.md index 149a474244a..f6208417ebc 100644 --- a/gopls/doc/release/v0.19.0.md +++ b/gopls/doc/release/v0.19.0.md @@ -39,3 +39,10 @@ TODO: implement global. This code action, available on a dotted import, will offer to replace the import with a regular one and qualify each use of the package with its name. + +### Auto-complete package clause for new Go files + +Gopls now automatically adds the appropriate `package` clause to newly created Go files, +so that you can immediately get started writing the interesting part. + +It requires client support for `workspace/didCreateFiles` \ No newline at end of file diff --git a/gopls/go.mod b/gopls/go.mod index da7303222d2..c09e2daf7bd 100644 --- a/gopls/go.mod +++ b/gopls/go.mod @@ -1,15 +1,15 @@ module golang.org/x/tools/gopls -go 1.24.0 +go 1.24.2 require ( github.com/google/go-cmp v0.6.0 github.com/jba/templatecheck v0.7.1 golang.org/x/mod v0.24.0 - golang.org/x/sync v0.12.0 - golang.org/x/sys v0.31.0 + golang.org/x/sync v0.13.0 + golang.org/x/sys v0.32.0 golang.org/x/telemetry v0.0.0-20250220152412-165e2f84edbc - golang.org/x/text v0.23.0 + golang.org/x/text v0.24.0 golang.org/x/tools v0.30.0 golang.org/x/vuln v1.1.4 gopkg.in/yaml.v3 v3.0.1 diff --git a/gopls/go.sum b/gopls/go.sum index 20633541388..f5a9bbde4ca 100644 --- a/gopls/go.sum +++ b/gopls/go.sum @@ -16,7 +16,7 @@ github.com/rogpeppe/go-internal v1.13.2-0.20241226121412-a5dc8ff20d0a h1:w3tdWGK github.com/rogpeppe/go-internal v1.13.2-0.20241226121412-a5dc8ff20d0a/go.mod h1:S8kfXMp+yh77OxPD4fdM6YUknrZpQxLhvxzS4gDHENY= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= -golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc= +golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc= golang.org/x/exp/typeparams v0.0.0-20250218142911-aa4b98e5adaa h1:Br3+0EZZohShrmVVc85znGpxw7Ca8hsUJlrdT/JQGw8= golang.org/x/exp/typeparams v0.0.0-20250218142911-aa4b98e5adaa/go.mod h1:LKZHyeOpPuZcMgxeHjJp4p5yvxrCX1xDvH10zYHhjjQ= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= @@ -25,27 +25,27 @@ golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU= golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= -golang.org/x/net v0.37.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= +golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E= golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw= -golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610= +golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik= -golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20= +golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457/go.mod h1:pRgIJT+bRLFKnoM1ldnzKoxTIn14Yxz928LQRYYgIN0= golang.org/x/telemetry v0.0.0-20250220152412-165e2f84edbc h1:HS+G1Mhh2dxM8ObutfYKdjfD7zpkyeP/UxeRnJpIZtQ= golang.org/x/telemetry v0.0.0-20250220152412-165e2f84edbc/go.mod h1:bDzXkYUaHzz51CtDy5kh/jR4lgPxsdbqC37kp/dzhCc= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= -golang.org/x/term v0.30.0/go.mod h1:NYYFdzHoI5wRh/h5tDMdMqCqPJZEuNqVR5xJLd/n67g= +golang.org/x/term v0.31.0/go.mod h1:R4BeIy7D95HzImkxGkTW1UQTtP54tio2RyHz7PwK0aw= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY= -golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4= +golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0= +golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU= golang.org/x/vuln v1.1.4 h1:Ju8QsuyhX3Hk8ma3CesTbO8vfJD9EvUBgHvkxHBzj0I= golang.org/x/vuln v1.1.4/go.mod h1:F+45wmU18ym/ca5PLTPLsSzr2KppzswxPP603ldA67s= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/gopls/internal/analysis/fillreturns/fillreturns.go b/gopls/internal/analysis/fillreturns/fillreturns.go index 184aac5ea1f..a90105f6f56 100644 --- a/gopls/internal/analysis/fillreturns/fillreturns.go +++ b/gopls/internal/analysis/fillreturns/fillreturns.go @@ -55,12 +55,9 @@ outer: } // Find cursor for enclosing return statement (which may be curErr itself). - curRet := curErr - if _, ok := curRet.Node().(*ast.ReturnStmt); !ok { - curRet, ok = moreiters.First(curErr.Ancestors((*ast.ReturnStmt)(nil))) - if !ok { - continue // no enclosing return - } + curRet, ok := moreiters.First(curErr.Enclosing((*ast.ReturnStmt)(nil))) + if !ok { + continue // no enclosing return } ret := curRet.Node().(*ast.ReturnStmt) @@ -114,7 +111,7 @@ outer: retTyps = append(retTyps, retTyp) } - curFile, _ := moreiters.First(curRet.Ancestors((*ast.File)(nil))) + curFile, _ := moreiters.First(curRet.Enclosing((*ast.File)(nil))) file := curFile.Node().(*ast.File) matches := analysisinternal.MatchingIdents(retTyps, file, ret.Pos(), info, pass.Pkg) qual := typesinternal.FileQualifier(file, pass.Pkg) @@ -230,8 +227,5 @@ func fixesError(err types.Error) bool { // enclosingFunc returns the cursor for the innermost Func{Decl,Lit} // that encloses c, if any. func enclosingFunc(c cursor.Cursor) (cursor.Cursor, bool) { - for curAncestor := range c.Ancestors((*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)) { - return curAncestor, true - } - return cursor.Cursor{}, false + return moreiters.First(c.Enclosing((*ast.FuncDecl)(nil), (*ast.FuncLit)(nil))) } diff --git a/gopls/internal/analysis/hostport/hostport.go b/gopls/internal/analysis/hostport/hostport.go index a7030ae116f..d95e475d1bf 100644 --- a/gopls/internal/analysis/hostport/hostport.go +++ b/gopls/internal/analysis/hostport/hostport.go @@ -14,11 +14,10 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" "golang.org/x/tools/gopls/internal/util/safetoken" - "golang.org/x/tools/internal/analysisinternal" - "golang.org/x/tools/internal/astutil/cursor" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" + "golang.org/x/tools/internal/typesinternal/typeindex" ) const Doc = `check format of addresses passed to net.Dial @@ -44,20 +43,20 @@ var Analyzer = &analysis.Analyzer{ Name: "hostport", Doc: Doc, URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/hostport", - Requires: []*analysis.Analyzer{inspect.Analyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, typeindexanalyzer.Analyzer}, Run: run, } func run(pass *analysis.Pass) (any, error) { - // Fast path: if the package doesn't import net and fmt, skip - // the traversal. - if !analysisinternal.Imports(pass.Pkg, "net") || - !analysisinternal.Imports(pass.Pkg, "fmt") { - return nil, nil + var ( + index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + info = pass.TypesInfo + fmtSprintf = index.Object("fmt", "Sprintf") + ) + if !index.Used(fmtSprintf) { + return nil, nil // fast path: package doesn't use fmt.Sprintf } - info := pass.TypesInfo - // checkAddr reports a diagnostic (and returns true) if e // is a call of the form fmt.Sprintf("%d:%d", ...). // The diagnostic includes a fix. @@ -65,96 +64,94 @@ func run(pass *analysis.Pass) (any, error) { // dialCall is non-nil if the Dial call is non-local // but within the same file. checkAddr := func(e ast.Expr, dialCall *ast.CallExpr) { - if call, ok := e.(*ast.CallExpr); ok { - obj := typeutil.Callee(info, call) - if analysisinternal.IsFunctionNamed(obj, "fmt", "Sprintf") { - // Examine format string. - formatArg := call.Args[0] - if tv := info.Types[formatArg]; tv.Value != nil { - numericPort := false - format := constant.StringVal(tv.Value) - switch format { - case "%s:%d": - // Have: fmt.Sprintf("%s:%d", host, port) - numericPort = true - - case "%s:%s": - // Have: fmt.Sprintf("%s:%s", host, portStr) - // Keep port string as is. - - default: - return - } + if call, ok := e.(*ast.CallExpr); ok && typeutil.Callee(info, call) == fmtSprintf { + // Examine format string. + formatArg := call.Args[0] + if tv := info.Types[formatArg]; tv.Value != nil { + numericPort := false + format := constant.StringVal(tv.Value) + switch format { + case "%s:%d": + // Have: fmt.Sprintf("%s:%d", host, port) + numericPort = true + + case "%s:%s": + // Have: fmt.Sprintf("%s:%s", host, portStr) + // Keep port string as is. + + default: + return + } - // Use granular edits to preserve original formatting. - edits := []analysis.TextEdit{ - { - // Replace fmt.Sprintf with net.JoinHostPort. - Pos: call.Fun.Pos(), - End: call.Fun.End(), - NewText: []byte("net.JoinHostPort"), - }, - { - // Delete format string. - Pos: formatArg.Pos(), - End: call.Args[1].Pos(), - }, - } + // Use granular edits to preserve original formatting. + edits := []analysis.TextEdit{ + { + // Replace fmt.Sprintf with net.JoinHostPort. + Pos: call.Fun.Pos(), + End: call.Fun.End(), + NewText: []byte("net.JoinHostPort"), + }, + { + // Delete format string. + Pos: formatArg.Pos(), + End: call.Args[1].Pos(), + }, + } - // Turn numeric port into a string. - if numericPort { - // port => fmt.Sprintf("%d", port) - // 123 => "123" - port := call.Args[2] - newPort := fmt.Sprintf(`fmt.Sprintf("%%d", %s)`, port) - if port := info.Types[port].Value; port != nil { - if i, ok := constant.Int64Val(port); ok { - newPort = fmt.Sprintf(`"%d"`, i) // numeric constant - } + // Turn numeric port into a string. + if numericPort { + // port => fmt.Sprintf("%d", port) + // 123 => "123" + port := call.Args[2] + newPort := fmt.Sprintf(`fmt.Sprintf("%%d", %s)`, port) + if port := info.Types[port].Value; port != nil { + if i, ok := constant.Int64Val(port); ok { + newPort = fmt.Sprintf(`"%d"`, i) // numeric constant } - - edits = append(edits, analysis.TextEdit{ - Pos: port.Pos(), - End: port.End(), - NewText: []byte(newPort), - }) - } - - // Refer to Dial call, if not adjacent. - suffix := "" - if dialCall != nil { - suffix = fmt.Sprintf(" (passed to net.Dial at L%d)", - safetoken.StartPosition(pass.Fset, dialCall.Pos()).Line) } - pass.Report(analysis.Diagnostic{ - // Highlight the format string. - Pos: formatArg.Pos(), - End: formatArg.End(), - Message: fmt.Sprintf("address format %q does not work with IPv6%s", format, suffix), - SuggestedFixes: []analysis.SuggestedFix{{ - Message: "Replace fmt.Sprintf with net.JoinHostPort", - TextEdits: edits, - }}, + edits = append(edits, analysis.TextEdit{ + Pos: port.Pos(), + End: port.End(), + NewText: []byte(newPort), }) } + + // Refer to Dial call, if not adjacent. + suffix := "" + if dialCall != nil { + suffix = fmt.Sprintf(" (passed to net.Dial at L%d)", + safetoken.StartPosition(pass.Fset, dialCall.Pos()).Line) + } + + pass.Report(analysis.Diagnostic{ + // Highlight the format string. + Pos: formatArg.Pos(), + End: formatArg.End(), + Message: fmt.Sprintf("address format %q does not work with IPv6%s", format, suffix), + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Replace fmt.Sprintf with net.JoinHostPort", + TextEdits: edits, + }}, + }) } } } // Check address argument of each call to net.Dial et al. - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - for curCall := range cursor.Root(inspect).Preorder((*ast.CallExpr)(nil)) { - call := curCall.Node().(*ast.CallExpr) - - obj := typeutil.Callee(info, call) - if analysisinternal.IsFunctionNamed(obj, "net", "Dial", "DialTimeout") || - analysisinternal.IsMethodNamed(obj, "net", "Dialer", "Dial") { - + for _, callee := range []types.Object{ + index.Object("net", "Dial"), + index.Object("net", "DialTimeout"), + index.Selection("net", "Dialer", "Dial"), + } { + for curCall := range index.Calls(callee) { + call := curCall.Node().(*ast.CallExpr) switch address := call.Args[1].(type) { case *ast.CallExpr: - // net.Dial("tcp", fmt.Sprintf("%s:%d", ...)) - checkAddr(address, nil) + if len(call.Args) == 2 { // avoid spread-call edge case + // net.Dial("tcp", fmt.Sprintf("%s:%d", ...)) + checkAddr(address, nil) + } case *ast.Ident: // addr := fmt.Sprintf("%s:%d", ...) @@ -162,25 +159,23 @@ func run(pass *analysis.Pass) (any, error) { // net.Dial("tcp", addr) // Search for decl of addrVar within common ancestor of addrVar and Dial call. + // TODO(adonovan): abstract "find RHS of statement that assigns var v". + // TODO(adonovan): reject if there are other assignments to var v. if addrVar, ok := info.Uses[address].(*types.Var); ok { - pos := addrVar.Pos() - for curAncestor := range curCall.Ancestors() { - if curIdent, ok := curAncestor.FindPos(pos, pos); ok { - // curIdent is the declaring ast.Ident of addr. - switch parent := curIdent.Parent().Node().(type) { - case *ast.AssignStmt: - if len(parent.Rhs) == 1 { - // Have: addr := fmt.Sprintf("%s:%d", ...) - checkAddr(parent.Rhs[0], call) - } - - case *ast.ValueSpec: - if len(parent.Values) == 1 { - // Have: var addr = fmt.Sprintf("%s:%d", ...) - checkAddr(parent.Values[0], call) - } + if curId, ok := index.Def(addrVar); ok { + // curIdent is the declaring ast.Ident of addr. + switch parent := curId.Parent().Node().(type) { + case *ast.AssignStmt: + if len(parent.Rhs) == 1 { + // Have: addr := fmt.Sprintf("%s:%d", ...) + checkAddr(parent.Rhs[0], call) + } + + case *ast.ValueSpec: + if len(parent.Values) == 1 { + // Have: var addr = fmt.Sprintf("%s:%d", ...) + checkAddr(parent.Values[0], call) } - break } } } diff --git a/gopls/internal/analysis/maprange/cmd/maprange/main.go b/gopls/internal/analysis/maprange/cmd/maprange/main.go new file mode 100644 index 00000000000..ec1fd5ca93c --- /dev/null +++ b/gopls/internal/analysis/maprange/cmd/maprange/main.go @@ -0,0 +1,14 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// The maprange command applies the golang.org/x/tools/gopls/internal/analysis/maprange +// analysis to the specified packages of Go source code. +package main + +import ( + "golang.org/x/tools/go/analysis/singlechecker" + "golang.org/x/tools/gopls/internal/analysis/maprange" +) + +func main() { singlechecker.Main(maprange.Analyzer) } diff --git a/gopls/internal/analysis/maprange/doc.go b/gopls/internal/analysis/maprange/doc.go new file mode 100644 index 00000000000..46f465059a9 --- /dev/null +++ b/gopls/internal/analysis/maprange/doc.go @@ -0,0 +1,37 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package maprange defines an Analyzer that checks for redundant use +// of the functions maps.Keys and maps.Values in "for" statements with +// "range" clauses. +// +// # Analyzer maprange +// +// maprange: checks for unnecessary calls to maps.Keys and maps.Values in range statements +// +// Consider a loop written like this: +// +// for val := range maps.Values(m) { +// fmt.Println(val) +// } +// +// This should instead be written without the call to maps.Values: +// +// for _, val := range m { +// fmt.Println(val) +// } +// +// golang.org/x/exp/maps returns slices for Keys/Values instead of iterators, +// but unnecessary calls should similarly be removed: +// +// for _, key := range maps.Keys(m) { +// fmt.Println(key) +// } +// +// should be rewritten as: +// +// for key := range m { +// fmt.Println(key) +// } +package maprange diff --git a/gopls/internal/analysis/maprange/maprange.go b/gopls/internal/analysis/maprange/maprange.go new file mode 100644 index 00000000000..eed04b14e72 --- /dev/null +++ b/gopls/internal/analysis/maprange/maprange.go @@ -0,0 +1,159 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package maprange + +import ( + _ "embed" + "fmt" + "go/ast" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/gopls/internal/util/moreiters" + "golang.org/x/tools/internal/analysisinternal" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" + "golang.org/x/tools/internal/astutil/cursor" + "golang.org/x/tools/internal/astutil/edge" + "golang.org/x/tools/internal/typesinternal/typeindex" + "golang.org/x/tools/internal/versions" +) + +//go:embed doc.go +var doc string + +var Analyzer = &analysis.Analyzer{ + Name: "maprange", + Doc: analysisinternal.MustExtractDoc(doc, "maprange"), + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/maprange", + Requires: []*analysis.Analyzer{typeindexanalyzer.Analyzer}, + Run: run, +} + +// This is a variable because the package name is different in Google's code base. +var xmaps = "golang.org/x/exp/maps" + +func run(pass *analysis.Pass) (any, error) { + switch pass.Pkg.Path() { + case "maps", xmaps: + // These packages know how to use their own APIs. + return nil, nil + } + var ( + index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + mapsKeys = index.Object("maps", "Keys") + mapsValues = index.Object("maps", "Values") + xmapsKeys = index.Object(xmaps, "Keys") + xmapsValues = index.Object(xmaps, "Values") + ) + for _, callee := range []types.Object{mapsKeys, mapsValues, xmapsKeys, xmapsValues} { + for curCall := range index.Calls(callee) { + if ek, _ := curCall.ParentEdge(); ek != edge.RangeStmt_X { + continue + } + analyzeRangeStmt(pass, callee, curCall) + } + } + return nil, nil +} + +// analyzeRangeStmt analyzes range statements iterating over calls to maps.Keys +// or maps.Values (from the standard library "maps" or "golang.org/x/exp/maps"). +// +// It reports a diagnostic with a suggested fix to simplify the loop by removing +// the unnecessary function call and adjusting range variables, if possible. +// For certain patterns involving x/exp/maps.Keys before Go 1.22, it reports +// a diagnostic about potential incorrect usage without a suggested fix. +// No diagnostic is reported if the range statement doesn't require changes. +func analyzeRangeStmt(pass *analysis.Pass, callee types.Object, curCall cursor.Cursor) { + var ( + call = curCall.Node().(*ast.CallExpr) + rangeStmt = curCall.Parent().Node().(*ast.RangeStmt) + pkg = callee.Pkg().Path() + fn = callee.Name() + ) + var edits []analysis.TextEdit + + // Check if the call to maps.Keys or maps.Values can be removed/replaced. + // Example: + // for range maps.Keys(m) + // ^^^^^^^^^ removeCall + // for i, _ := range maps.Keys(m) + // ^^^^^^^^^ replace with `len` + // + // If we have: for i, k := range maps.Keys(m) (only possible using x/exp/maps) + // or: for i, v = range maps.Values(m) + // do not remove the call. + removeCall := !isSet(rangeStmt.Key) || !isSet(rangeStmt.Value) + replace := "" + if pkg == xmaps && isSet(rangeStmt.Key) && rangeStmt.Value == nil { + // If we have: for i := range maps.Keys(m) (using x/exp/maps), + // Replace with: for i := range len(m) + replace = "len" + canRangeOverInt := fileUses(pass.TypesInfo, curCall, "go1.22") + if !canRangeOverInt { + pass.Report(analysis.Diagnostic{ + Pos: call.Pos(), + End: call.End(), + Message: fmt.Sprintf("likely incorrect use of %s.%s (returns a slice)", pkg, fn), + }) + return + } + } + if removeCall { + edits = append(edits, analysis.TextEdit{ + Pos: call.Fun.Pos(), + End: call.Fun.End(), + NewText: []byte(replace)}) + } + // Check if the key of the range statement should be removed. + // Example: + // for _, k := range maps.Keys(m) + // ^^^ removeKey ^^^^^^^^^ removeCall + removeKey := pkg == xmaps && fn == "Keys" && !isSet(rangeStmt.Key) && isSet(rangeStmt.Value) + if removeKey { + edits = append(edits, analysis.TextEdit{ + Pos: rangeStmt.Key.Pos(), + End: rangeStmt.Value.Pos(), + }) + } + // Check if a key should be inserted to the range statement. + // Example: + // for _, v := range maps.Values(m) + // ^^^ addKey ^^^^^^^^^^^ removeCall + addKey := pkg == "maps" && fn == "Values" && isSet(rangeStmt.Key) + if addKey { + edits = append(edits, analysis.TextEdit{ + Pos: rangeStmt.Key.Pos(), + End: rangeStmt.Key.Pos(), + NewText: []byte("_, "), + }) + } + + if len(edits) > 0 { + pass.Report(analysis.Diagnostic{ + Pos: call.Pos(), + End: call.End(), + Message: fmt.Sprintf("unnecessary and inefficient call of %s.%s", pkg, fn), + SuggestedFixes: []analysis.SuggestedFix{{ + Message: fmt.Sprintf("Remove unnecessary call to %s.%s", pkg, fn), + TextEdits: edits, + }}, + }) + } +} + +// isSet reports whether an ast.Expr is a non-nil expression that is not the blank identifier. +func isSet(expr ast.Expr) bool { + ident, ok := expr.(*ast.Ident) + return expr != nil && (!ok || ident.Name != "_") +} + +// fileUses reports whether the file containing the specified cursor +// uses at least the specified version of Go (e.g. "go1.24"). +func fileUses(info *types.Info, c cursor.Cursor, version string) bool { + c, _ = moreiters.First(c.Enclosing((*ast.File)(nil))) + file := c.Node().(*ast.File) + return !versions.Before(info.FileVersions[file], version) +} diff --git a/gopls/internal/analysis/maprange/maprange_test.go b/gopls/internal/analysis/maprange/maprange_test.go new file mode 100644 index 00000000000..1759dc1db99 --- /dev/null +++ b/gopls/internal/analysis/maprange/maprange_test.go @@ -0,0 +1,23 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package maprange_test + +import ( + "golang.org/x/tools/go/analysis/analysistest" + "golang.org/x/tools/gopls/internal/analysis/maprange" + "golang.org/x/tools/internal/testfiles" + "path/filepath" + "testing" +) + +func TestBasic(t *testing.T) { + dir := testfiles.ExtractTxtarFileToTmp(t, filepath.Join(analysistest.TestData(), "basic.txtar")) + analysistest.RunWithSuggestedFixes(t, dir, maprange.Analyzer, "maprange") +} + +func TestOld(t *testing.T) { + dir := testfiles.ExtractTxtarFileToTmp(t, filepath.Join(analysistest.TestData(), "old.txtar")) + analysistest.RunWithSuggestedFixes(t, dir, maprange.Analyzer, "maprange") +} diff --git a/gopls/internal/analysis/maprange/testdata/basic.txtar b/gopls/internal/analysis/maprange/testdata/basic.txtar new file mode 100644 index 00000000000..1950e958218 --- /dev/null +++ b/gopls/internal/analysis/maprange/testdata/basic.txtar @@ -0,0 +1,209 @@ +Test of fixing redundant calls to maps.Keys and maps.Values +(both stdlib "maps" and "golang.org/x/exp/maps") for Go 1.24. + +-- go.mod -- +module maprange + +require golang.org/x/exp v0.0.0 + +replace golang.org/x/exp => ./exp + +go 1.24 + +-- basic.go -- +package basic + +import "maps" + +func _() { + m := make(map[int]int) + + for range maps.Keys(m) { // want `unnecessary and inefficient call of maps.Keys` + } + + for range maps.Values(m) { // want `unnecessary and inefficient call of maps.Values` + } + + var x struct { + Map map[int]int + } + x.Map = make(map[int]int) + for x.Map[1] = range maps.Keys(m) { // want `unnecessary and inefficient call of maps.Keys` + } + + for x.Map[2] = range maps.Values(m) { // want `unnecessary and inefficient call of maps.Values` + } + + for k := range maps.Keys(m) { // want `unnecessary and inefficient call of maps.Keys` + _ = k + } + + for v := range maps.Values(m) { // want `unnecessary and inefficient call of maps.Values` + _ = v + } + + for range maps.Keys(x.Map) { // want `unnecessary and inefficient call of maps.Keys` + } + + for /* comment */ k := range /* comment */ maps.Keys(/* comment */ m) { // want `unnecessary and inefficient call of maps.Keys` + _ = k + } +} + +-- basic.go.golden -- +package basic + +import "maps" + +func _() { + m := make(map[int]int) + + for range m { // want `unnecessary and inefficient call of maps.Keys` + } + + for range m { // want `unnecessary and inefficient call of maps.Values` + } + + var x struct { + Map map[int]int + } + x.Map = make(map[int]int) + for x.Map[1] = range m { // want `unnecessary and inefficient call of maps.Keys` + } + + for _, x.Map[2] = range m { // want `unnecessary and inefficient call of maps.Values` + } + + for k := range m { // want `unnecessary and inefficient call of maps.Keys` + _ = k + } + + for _, v := range m { // want `unnecessary and inefficient call of maps.Values` + _ = v + } + + for range x.Map { // want `unnecessary and inefficient call of maps.Keys` + } + + for /* comment */ k := range /* comment */ /* comment */ m { // want `unnecessary and inefficient call of maps.Keys` + _ = k + } +} + +-- xmaps.go -- +package basic + +import "golang.org/x/exp/maps" + +func _() { + m := make(map[int]int) + + for range maps.Keys(m) { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Keys` + } + + for range maps.Values(m) { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Values` + } + + for i := range maps.Values(m) { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Values` + _ = i + } + + var x struct { + Map map[int]int + } + x.Map = make(map[int]int) + for _, x.Map[1] = range maps.Keys(m) { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Keys` + } + + for _, x.Map[2] = range maps.Values(m) { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Values` + } + + for _, k := range maps.Keys(m) { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Keys` + _ = k + } + + for _, v := range maps.Values(m) { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Values` + _ = v + } + + for range maps.Keys(x.Map) { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Keys` + } + + for i, k := range maps.Keys(m) { // ok: this can't be straightforwardly rewritten + _, _ = i, k + } + + for _, _ = range maps.Values(m) { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Values` + } +} + +-- xmaps.go.golden -- +package basic + +import "golang.org/x/exp/maps" + +func _() { + m := make(map[int]int) + + for range m { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Keys` + } + + for range m { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Values` + } + + for i := range len(m) { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Values` + _ = i + } + + var x struct { + Map map[int]int + } + x.Map = make(map[int]int) + for x.Map[1] = range m { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Keys` + } + + for _, x.Map[2] = range m { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Values` + } + + for k := range m { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Keys` + _ = k + } + + for _, v := range m { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Values` + _ = v + } + + for range x.Map { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Keys` + } + + for i, k := range maps.Keys(m) { // ok: this can't be straightforwardly rewritten + _, _ = i, k + } + + for _, _ = range m { // want `unnecessary and inefficient call of golang.org/x/exp/maps.Values` + } +} + +-- exp/go.mod -- +module golang.org/x/exp + +go 1.24 + +-- exp/maps/maps.go -- +package maps + +func Keys[M ~map[K]V, K comparable, V any](m M) []K { + r := make([]K, 0, len(m)) + for k := range m { + r = append(r, k) + } + return r +} + +func Values[M ~map[K]V, K comparable, V any](m M) []V { + r := make([]V, 0, len(m)) + for _, v := range m { + r = append(r, v) + } + return r +} \ No newline at end of file diff --git a/gopls/internal/analysis/maprange/testdata/old.txtar b/gopls/internal/analysis/maprange/testdata/old.txtar new file mode 100644 index 00000000000..d27ff8c2a22 --- /dev/null +++ b/gopls/internal/analysis/maprange/testdata/old.txtar @@ -0,0 +1,62 @@ +Test of fixing redundant calls to maps.Keys and maps.Values +(both stdlib "maps" and "golang.org/x/exp/maps") for Go 1.21, +before range over int made suggesting a fix for a rare case easier. + +-- go.mod -- +module maprange + +require golang.org/x/exp v0.0.0 + +replace golang.org/x/exp => ./exp + +go 1.21 + +-- old.go -- +package old + +import "golang.org/x/exp/maps" + +func _() { + m := make(map[int]int) + + for i := range maps.Keys(m) { // want `likely incorrect use of golang.org/x/exp/maps.Keys \(returns a slice\)` + _ = i + } +} + +-- old.go.golden -- +package old + +import "golang.org/x/exp/maps" + +func _() { + m := make(map[int]int) + + for i := range maps.Keys(m) { // want `likely incorrect use of golang.org/x/exp/maps.Keys \(returns a slice\)` + _ = i + } +} + +-- exp/go.mod -- +module golang.org/x/exp + +go 1.21 + +-- exp/maps/maps.go -- +package maps + +func Keys[M ~map[K]V, K comparable, V any](m M) []K { + r := make([]K, 0, len(m)) + for k := range m { + r = append(r, k) + } + return r +} + +func Values[M ~map[K]V, K comparable, V any](m M) []V { + r := make([]V, 0, len(m)) + for _, v := range m { + r = append(r, v) + } + return r +} \ No newline at end of file diff --git a/gopls/internal/analysis/modernize/bloop.go b/gopls/internal/analysis/modernize/bloop.go index f851a6688e1..5bfb0b7d8e8 100644 --- a/gopls/internal/analysis/modernize/bloop.go +++ b/gopls/internal/analysis/modernize/bloop.go @@ -14,8 +14,11 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/gopls/internal/util/moreiters" "golang.org/x/tools/internal/analysisinternal" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" "golang.org/x/tools/internal/astutil/cursor" + "golang.org/x/tools/internal/typesinternal/typeindex" ) // bloop updates benchmarks that use "for range b.N", replacing it @@ -31,7 +34,11 @@ func bloop(pass *analysis.Pass) { return } - info := pass.TypesInfo + var ( + inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + info = pass.TypesInfo + ) // edits computes the text edits for a matched for/range loop // at the specified cursor. b is the *testing.B value, and @@ -76,7 +83,6 @@ func bloop(pass *analysis.Pass) { } // Find all for/range statements. - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) loops := []ast.Node{ (*ast.ForStmt)(nil), (*ast.RangeStmt)(nil), @@ -101,11 +107,11 @@ func bloop(pass *analysis.Pass) { if assign, ok := n.Init.(*ast.AssignStmt); ok && assign.Tok == token.DEFINE && len(assign.Rhs) == 1 && - isZeroLiteral(assign.Rhs[0]) && + isZeroIntLiteral(info, assign.Rhs[0]) && is[*ast.IncDecStmt](n.Post) && n.Post.(*ast.IncDecStmt).Tok == token.INC && equalSyntax(n.Post.(*ast.IncDecStmt).X, assign.Lhs[0]) && - !uses(info, body, info.Defs[assign.Lhs[0].(*ast.Ident)]) { + !uses(index, body, info.Defs[assign.Lhs[0].(*ast.Ident)]) { delStart, delEnd = n.Init.Pos(), n.Post.End() } @@ -152,9 +158,9 @@ func bloop(pass *analysis.Pass) { } // uses reports whether the subtree cur contains a use of obj. -func uses(info *types.Info, cur cursor.Cursor, obj types.Object) bool { - for curId := range cur.Preorder((*ast.Ident)(nil)) { - if info.Uses[curId.Node().(*ast.Ident)] == obj { +func uses(index *typeindex.Index, cur cursor.Cursor, obj types.Object) bool { + for use := range index.Uses(obj) { + if cur.Contains(use) { return true } } @@ -164,8 +170,5 @@ func uses(info *types.Info, cur cursor.Cursor, obj types.Object) bool { // enclosingFunc returns the cursor for the innermost Func{Decl,Lit} // that encloses c, if any. func enclosingFunc(c cursor.Cursor) (cursor.Cursor, bool) { - for curAncestor := range c.Ancestors((*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)) { - return curAncestor, true - } - return cursor.Cursor{}, false + return moreiters.First(c.Enclosing((*ast.FuncDecl)(nil), (*ast.FuncLit)(nil))) } diff --git a/gopls/internal/analysis/modernize/doc.go b/gopls/internal/analysis/modernize/doc.go index b12abab7063..aa052540832 100644 --- a/gopls/internal/analysis/modernize/doc.go +++ b/gopls/internal/analysis/modernize/doc.go @@ -9,36 +9,87 @@ // modernize: simplify code by using modern constructs // // This analyzer reports opportunities for simplifying and clarifying -// existing code by using more modern features of Go, such as: -// -// - replacing an if/else conditional assignment by a call to the -// built-in min or max functions added in go1.21; -// - replacing sort.Slice(x, func(i, j int) bool) { return s[i] < s[j] } -// by a call to slices.Sort(s), added in go1.21; -// - replacing interface{} by the 'any' type added in go1.18; -// - replacing append([]T(nil), s...) by slices.Clone(s) or -// slices.Concat(s), added in go1.21; -// - replacing a loop around an m[k]=v map update by a call -// to one of the Collect, Copy, Clone, or Insert functions -// from the maps package, added in go1.21; -// - replacing []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...), -// added in go1.19; -// - replacing uses of context.WithCancel in tests with t.Context, added in -// go1.24; -// - replacing omitempty by omitzero on structs, added in go1.24; -// - replacing append(s[:i], s[i+1]...) by slices.Delete(s, i, i+1), -// added in go1.21 -// - replacing a 3-clause for i := 0; i < n; i++ {} loop by -// for i := range n {}, added in go1.22; -// - replacing Split in "for range strings.Split(...)" by go1.24's -// more efficient SplitSeq, or Fields with FieldSeq; +// existing code by using more modern features of Go and its standard +// library. +// +// Each diagnostic provides a fix. Our intent is that these fixes may +// be safely applied en masse without changing the behavior of your +// program. In some cases the suggested fixes are imperfect and may +// lead to (for example) unused imports or unused local variables, +// causing build breakage. However, these problems are generally +// trivial to fix. We regard any modernizer whose fix changes program +// behavior to have a serious bug and will endeavor to fix it. // // To apply all modernization fixes en masse, you can use the // following command: // -// $ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -test ./... +// $ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./... // // If the tool warns of conflicting fixes, you may need to run it more // than once until it has applied all fixes cleanly. This command is // not an officially supported interface and may change in the future. +// +// Changes produced by this tool should be reviewed as usual before +// being merged. In some cases, a loop may be replaced by a simple +// function call, causing comments within the loop to be discarded. +// Human judgment may be required to avoid losing comments of value. +// +// Each diagnostic reported by modernize has a specific category. (The +// categories are listed below.) Diagnostics in some categories, such +// as "efaceany" (which replaces "interface{}" with "any" where it is +// safe to do so) are particularly numerous. It may ease the burden of +// code review to apply fixes in two passes, the first change +// consisting only of fixes of category "efaceany", the second +// consisting of all others. This can be achieved using the -category flag: +// +// $ modernize -category=efaceany -fix -test ./... +// $ modernize -category=-efaceany -fix -test ./... +// +// Categories of modernize diagnostic: +// +// - forvar: remove x := x variable declarations made unnecessary by the new semantics of loops in go1.22. +// +// - slicescontains: replace 'for i, elem := range s { if elem == needle { ...; break }' +// by a call to slices.Contains, added in go1.21. +// +// - minmax: replace an if/else conditional assignment by a call to +// the built-in min or max functions added in go1.21. +// +// - sortslice: replace sort.Slice(x, func(i, j int) bool) { return s[i] < s[j] } +// by a call to slices.Sort(s), added in go1.21. +// +// - efaceany: replace interface{} by the 'any' type added in go1.18. +// +// - slicesclone: replace append([]T(nil), s...) by slices.Clone(s) or +// slices.Concat(s), added in go1.21. +// +// - mapsloop: replace a loop around an m[k]=v map update by a call +// to one of the Collect, Copy, Clone, or Insert functions from +// the maps package, added in go1.21. +// +// - fmtappendf: replace []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...), +// added in go1.19. +// +// - testingcontext: replace uses of context.WithCancel in tests +// with t.Context, added in go1.24. +// +// - omitzero: replace omitempty by omitzero on structs, added in go1.24. +// +// - bloop: replace "for i := range b.N" or "for range b.N" in a +// benchmark with "for b.Loop()", and remove any preceding calls +// to b.StopTimer, b.StartTimer, and b.ResetTimer. +// +// - slicesdelete: replace append(s[:i], s[i+1]...) by +// slices.Delete(s, i, i+1), added in go1.21. +// +// - rangeint: replace a 3-clause "for i := 0; i < n; i++" loop by +// "for i := range n", added in go1.22. +// +// - stringsseq: replace Split in "for range strings.Split(...)" by go1.24's +// more efficient SplitSeq, or Fields with FieldSeq. +// +// - stringscutprefix: replace some uses of HasPrefix followed by TrimPrefix with CutPrefix, +// added to the strings package in go1.20. +// +// - waitgroup: replace old complex usages of sync.WaitGroup by less complex WaitGroup.Go method in go1.25. package modernize diff --git a/gopls/internal/analysis/modernize/fmtappendf.go b/gopls/internal/analysis/modernize/fmtappendf.go index 8575827aa3e..6b01d38050e 100644 --- a/gopls/internal/analysis/modernize/fmtappendf.go +++ b/gopls/internal/analysis/modernize/fmtappendf.go @@ -5,33 +5,35 @@ package modernize import ( + "fmt" "go/ast" "go/types" "strings" "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/analysisinternal" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" + "golang.org/x/tools/internal/astutil/edge" + "golang.org/x/tools/internal/typesinternal/typeindex" ) // The fmtappend function replaces []byte(fmt.Sprintf(...)) by -// fmt.Appendf(nil, ...). +// fmt.Appendf(nil, ...), and similarly for Sprint, Sprintln. func fmtappendf(pass *analysis.Pass) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - info := pass.TypesInfo - for curFile := range filesUsing(inspect, info, "go1.19") { - for curCallExpr := range curFile.Preorder((*ast.CallExpr)(nil)) { - conv := curCallExpr.Node().(*ast.CallExpr) - tv := info.Types[conv.Fun] - if tv.IsType() && types.Identical(tv.Type, byteSliceType) { - call, ok := conv.Args[0].(*ast.CallExpr) - if ok { - obj := typeutil.Callee(info, call) - if !analysisinternal.IsFunctionNamed(obj, "fmt", "Sprintf", "Sprintln", "Sprint") { - continue - } + index := pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + for _, fn := range []types.Object{ + index.Object("fmt", "Sprintf"), + index.Object("fmt", "Sprintln"), + index.Object("fmt", "Sprint"), + } { + for curCall := range index.Calls(fn) { + call := curCall.Node().(*ast.CallExpr) + if ek, idx := curCall.ParentEdge(); ek == edge.CallExpr_Args && idx == 0 { + // Is parent a T(fmt.SprintX(...)) conversion? + conv := curCall.Parent().Node().(*ast.CallExpr) + tv := pass.TypesInfo.Types[conv.Fun] + if tv.IsType() && types.Identical(tv.Type, byteSliceType) && + fileUses(pass.TypesInfo, enclosingFile(curCall), "go1.19") { + // Have: []byte(fmt.SprintX(...)) // Find "Sprint" identifier. var id *ast.Ident @@ -42,13 +44,14 @@ func fmtappendf(pass *analysis.Pass) { id = e // "Sprint" after `import . "fmt"` } + old, new := fn.Name(), strings.Replace(fn.Name(), "Sprint", "Append", 1) pass.Report(analysis.Diagnostic{ Pos: conv.Pos(), End: conv.End(), Category: "fmtappendf", - Message: "Replace []byte(fmt.Sprintf...) with fmt.Appendf", + Message: fmt.Sprintf("Replace []byte(fmt.%s...) with fmt.%s", old, new), SuggestedFixes: []analysis.SuggestedFix{{ - Message: "Replace []byte(fmt.Sprintf...) with fmt.Appendf", + Message: fmt.Sprintf("Replace []byte(fmt.%s...) with fmt.%s", old, new), TextEdits: []analysis.TextEdit{ { // delete "[]byte(" @@ -63,7 +66,7 @@ func fmtappendf(pass *analysis.Pass) { { Pos: id.Pos(), End: id.End(), - NewText: []byte(strings.Replace(obj.Name(), "Sprint", "Append", 1)), + NewText: []byte(new), }, { Pos: call.Lparen + 1, diff --git a/gopls/internal/analysis/modernize/forvar.go b/gopls/internal/analysis/modernize/forvar.go new file mode 100644 index 00000000000..6f88ab77ed9 --- /dev/null +++ b/gopls/internal/analysis/modernize/forvar.go @@ -0,0 +1,95 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + "go/ast" + "go/token" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/internal/analysisinternal" +) + +// forvar offers to fix unnecessary copying of a for variable +// +// for _, x := range foo { +// x := x // offer to remove this superfluous assignment +// } +// +// Prerequisites: +// First statement in a range loop has to be := +// where the two idents are the same, +// and the ident is defined (:=) as a variable in the for statement. +// (Note that this 'fix' does not work for three clause loops +// because the Go specification says "The variable used by each subsequent iteration +// is declared implicitly before executing the post statement and initialized to the +// value of the previous iteration's variable at that moment.") +func forvar(pass *analysis.Pass) { + info := pass.TypesInfo + + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + for curFile := range filesUsing(inspect, info, "go1.22") { + for curLoop := range curFile.Preorder((*ast.RangeStmt)(nil)) { + // in a range loop. Is the first statement var := var? + // if so, is var one of the range vars, and is it defined + // in the for statement? + // If so, decide how much to delete. + loop := curLoop.Node().(*ast.RangeStmt) + if loop.Tok != token.DEFINE { + continue + } + v, stmt := loopVarRedecl(loop.Body) + if v == nil { + continue // index is not redeclared + } + if (loop.Key == nil || !equalSyntax(loop.Key, v)) && + (loop.Value == nil || !equalSyntax(loop.Value, v)) { + continue + } + astFile := curFile.Node().(*ast.File) + edits := analysisinternal.DeleteStmt(pass.Fset, astFile, stmt, bug.Reportf) + if len(edits) == 0 { + bug.Reportf("forvar failed to delete statement") + continue + } + remove := edits[0] + diag := analysis.Diagnostic{ + Pos: remove.Pos, + End: remove.End, + Category: "forvar", + Message: "copying variable is unneeded", + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Remove unneeded redeclaration", + TextEdits: []analysis.TextEdit{remove}, + }}, + } + pass.Report(diag) + } + } +} + +// if the first statement is var := var, return var and the stmt +func loopVarRedecl(body *ast.BlockStmt) (*ast.Ident, *ast.AssignStmt) { + if len(body.List) < 1 { + return nil, nil + } + stmt, ok := body.List[0].(*ast.AssignStmt) + if !ok || !isSimpleAssign(stmt) || stmt.Tok != token.DEFINE { + return nil, nil + } + if _, ok := stmt.Lhs[0].(*ast.Ident); !ok { + return nil, nil + } + if _, ok := stmt.Rhs[0].(*ast.Ident); !ok { + return nil, nil + } + if stmt.Lhs[0].(*ast.Ident).Name == stmt.Rhs[0].(*ast.Ident).Name { + return stmt.Lhs[0].(*ast.Ident), stmt + } + return nil, nil +} diff --git a/gopls/internal/analysis/modernize/maps.go b/gopls/internal/analysis/modernize/maps.go index 5577978278c..1a5e2c3eeee 100644 --- a/gopls/internal/analysis/modernize/maps.go +++ b/gopls/internal/analysis/modernize/maps.go @@ -156,16 +156,35 @@ func mapsloop(pass *analysis.Pass) { start, end token.Pos ) if mrhs != nil { - // Replace RHS of preceding m=... assignment (and loop) with expression. - start, end = mrhs.Pos(), rng.End() - newText = fmt.Appendf(nil, "%s%s(%s)", + // Replace assignment and loop with expression. + // + // m = make(...) + // for k, v := range x { /* comments */ m[k] = v } + // + // -> + // + // /* comments */ + // m = maps.Copy(x) + curPrev, _ := curRange.PrevSibling() + start, end = curPrev.Node().Pos(), rng.End() + newText = fmt.Appendf(nil, "%s%s = %s%s(%s)", + allComments(file, start, end), + analysisinternal.Format(pass.Fset, m), prefix, funcName, analysisinternal.Format(pass.Fset, x)) } else { // Replace loop with call statement. + // + // for k, v := range x { /* comments */ m[k] = v } + // + // -> + // + // /* comments */ + // maps.Copy(m, x) start, end = rng.Pos(), rng.End() - newText = fmt.Appendf(nil, "%s%s(%s, %s)", + newText = fmt.Appendf(nil, "%s%s%s(%s, %s)", + allComments(file, start, end), prefix, funcName, analysisinternal.Format(pass.Fset, m), diff --git a/gopls/internal/analysis/modernize/minmax.go b/gopls/internal/analysis/modernize/minmax.go index 8888383afec..0e43ee11c3d 100644 --- a/gopls/internal/analysis/modernize/minmax.go +++ b/gopls/internal/analysis/modernize/minmax.go @@ -9,12 +9,14 @@ import ( "go/ast" "go/token" "go/types" + "strings" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/internal/analysisinternal" "golang.org/x/tools/internal/astutil/cursor" + "golang.org/x/tools/internal/typeparams" ) // The minmax pass replaces if/else statements with calls to min or max. @@ -24,6 +26,10 @@ import ( // 1. if a < b { x = a } else { x = b } => x = min(a, b) // 2. x = a; if a < b { x = b } => x = max(a, b) // +// Pattern 1 requires that a is not NaN, and pattern 2 requires that b +// is not Nan. Since this is hard to prove, we reject floating-point +// numbers. +// // Variants: // - all four ordered comparisons // - "x := a" or "x = a" or "var x = a" in pattern 2 @@ -32,7 +38,7 @@ func minmax(pass *analysis.Pass) { // check is called for all statements of this form: // if a < b { lhs = rhs } - check := func(curIfStmt cursor.Cursor, compare *ast.BinaryExpr) { + check := func(file *ast.File, curIfStmt cursor.Cursor, compare *ast.BinaryExpr) { var ( ifStmt = curIfStmt.Node().(*ast.IfStmt) tassign = ifStmt.Body.List[0].(*ast.AssignStmt) @@ -85,7 +91,8 @@ func minmax(pass *analysis.Pass) { // Replace IfStmt with lhs = min(a, b). Pos: ifStmt.Pos(), End: ifStmt.End(), - NewText: fmt.Appendf(nil, "%s = %s(%s, %s)", + NewText: fmt.Appendf(nil, "%s%s = %s(%s, %s)", + allComments(file, ifStmt.Pos(), ifStmt.End()), analysisinternal.Format(pass.Fset, lhs), sym, analysisinternal.Format(pass.Fset, a), @@ -144,10 +151,13 @@ func minmax(pass *analysis.Pass) { SuggestedFixes: []analysis.SuggestedFix{{ Message: fmt.Sprintf("Replace if/else with %s", sym), TextEdits: []analysis.TextEdit{{ - // Replace rhs0 and IfStmt with min(a, b) - Pos: rhs0.Pos(), + Pos: fassign.Pos(), End: ifStmt.End(), - NewText: fmt.Appendf(nil, "%s(%s, %s)", + // Replace "x := a; if ... {}" with "x = min(...)", preserving comments. + NewText: fmt.Appendf(nil, "%s %s %s %s(%s, %s)", + allComments(file, fassign.Pos(), ifStmt.End()), + analysisinternal.Format(pass.Fset, lhs), + fassign.Tok.String(), sym, analysisinternal.Format(pass.Fset, a), analysisinternal.Format(pass.Fset, b)), @@ -159,23 +169,35 @@ func minmax(pass *analysis.Pass) { } // Find all "if a < b { lhs = rhs }" statements. + info := pass.TypesInfo inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - for curFile := range filesUsing(inspect, pass.TypesInfo, "go1.21") { + for curFile := range filesUsing(inspect, info, "go1.21") { + astFile := curFile.Node().(*ast.File) for curIfStmt := range curFile.Preorder((*ast.IfStmt)(nil)) { ifStmt := curIfStmt.Node().(*ast.IfStmt) - if compare, ok := ifStmt.Cond.(*ast.BinaryExpr); ok && ifStmt.Init == nil && isInequality(compare.Op) != 0 && isAssignBlock(ifStmt.Body) { - - // Have: if a < b { lhs = rhs } - check(curIfStmt, compare) + // a blank var has no type. + if tLHS := info.TypeOf(ifStmt.Body.List[0].(*ast.AssignStmt).Lhs[0]); tLHS != nil && !maybeNaN(tLHS) { + // Have: if a < b { lhs = rhs } + check(astFile, curIfStmt, compare) + } } } } } +// allComments collects all the comments from start to end. +func allComments(file *ast.File, start, end token.Pos) string { + var buf strings.Builder + for co := range analysisinternal.Comments(file, start, end) { + _, _ = fmt.Fprintf(&buf, "%s\n", co.Text) + } + return buf.String() +} + // isInequality reports non-zero if tok is one of < <= => >: // +1 for > and -1 for <. func isInequality(tok token.Token) int { @@ -206,6 +228,21 @@ func isSimpleAssign(n ast.Node) bool { len(assign.Rhs) == 1 } +// maybeNaN reports whether t is (or may be) a floating-point type. +func maybeNaN(t types.Type) bool { + // For now, we rely on core types. + // TODO(adonovan): In the post-core-types future, + // follow the approach of types.Checker.applyTypeFunc. + t = typeparams.CoreType(t) + if t == nil { + return true // fail safe + } + if basic, ok := t.(*types.Basic); ok && basic.Info()&types.IsFloat != 0 { + return true + } + return false +} + // -- utils -- func is[T any](x any) bool { diff --git a/gopls/internal/analysis/modernize/modernize.go b/gopls/internal/analysis/modernize/modernize.go index 96e8b325df4..b7e943a0c51 100644 --- a/gopls/internal/analysis/modernize/modernize.go +++ b/gopls/internal/analysis/modernize/modernize.go @@ -7,11 +7,13 @@ package modernize import ( _ "embed" "go/ast" + "go/constant" "go/format" "go/token" "go/types" "iter" "regexp" + "slices" "strings" "golang.org/x/tools/go/analysis" @@ -20,6 +22,7 @@ import ( "golang.org/x/tools/gopls/internal/util/astutil" "golang.org/x/tools/gopls/internal/util/moreiters" "golang.org/x/tools/internal/analysisinternal" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/stdlib" "golang.org/x/tools/internal/versions" @@ -31,11 +34,20 @@ var doc string var Analyzer = &analysis.Analyzer{ Name: "modernize", Doc: analysisinternal.MustExtractDoc(doc, "modernize"), - Requires: []*analysis.Analyzer{inspect.Analyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, typeindexanalyzer.Analyzer}, Run: run, URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/modernize", } +// Stopgap until general solution in CL 655555 lands. A change to the +// cmd/vet CLI requires a proposal whereas a change to an analyzer's +// flag set does not. +var category string + +func init() { + Analyzer.Flags.StringVar(&category, "category", "", "comma-separated list of categories to apply; with a leading '-', a list of categories to ignore") +} + func run(pass *analysis.Pass) (any, error) { // Decorate pass.Report to suppress diagnostics in generated files. // @@ -55,6 +67,10 @@ func run(pass *analysis.Pass) (any, error) { if diag.Category == "" { panic("Diagnostic.Category is unset") } + // TODO(adonovan): stopgap until CL 655555 lands. + if !enabledCategory(category, diag.Category) { + return + } if _, ok := generated[pass.Fset.File(diag.Pos)]; ok { return // skip checking if it's generated code } @@ -66,24 +82,20 @@ func run(pass *analysis.Pass) (any, error) { bloop(pass) efaceany(pass) fmtappendf(pass) + forvar(pass) mapsloop(pass) minmax(pass) omitzero(pass) rangeint(pass) slicescontains(pass) slicesdelete(pass) + stringscutprefix(pass) stringsseq(pass) sortslice(pass) testingContext(pass) + waitgroup(pass) - // TODO(adonovan): - // - more modernizers here; see #70815. - // - opt: interleave these micro-passes within a single inspection. - // - solve the "duplicate import" problem (#68765) when a number of - // fixes in the same file are applied in parallel and all add - // the same import. The tests exhibit the problem. - // - should all diagnostics be of the form "x can be modernized by y" - // or is that a foolish consistency? + // TODO(adonovan): opt: interleave these micro-passes within a single inspection. return nil, nil } @@ -108,14 +120,22 @@ func formatExprs(fset *token.FileSet, exprs []ast.Expr) string { return buf.String() } -// isZeroLiteral reports whether e is the literal 0. -func isZeroLiteral(e ast.Expr) bool { - lit, ok := e.(*ast.BasicLit) - return ok && lit.Kind == token.INT && lit.Value == "0" +// isZeroIntLiteral reports whether e is an integer whose value is 0. +func isZeroIntLiteral(info *types.Info, e ast.Expr) bool { + return isIntLiteral(info, e, 0) +} + +// isIntLiteral reports whether e is an integer with given value. +func isIntLiteral(info *types.Info, e ast.Expr, n int64) bool { + return info.Types[e].Value == constant.MakeInt64(n) } // filesUsing returns a cursor for each *ast.File in the inspector // that uses at least the specified version of Go (e.g. "go1.24"). +// +// TODO(adonovan): opt: eliminate this function, instead following the +// approach of [fmtappendf], which uses typeindex and [fileUses]. +// See "Tip" at [fileUses] for motivation. func filesUsing(inspect *inspector.Inspector, info *types.Info, version string) iter.Seq[cursor.Cursor] { return func(yield func(cursor.Cursor) bool) { for curFile := range cursor.Root(inspect).Children() { @@ -127,6 +147,25 @@ func filesUsing(inspect *inspector.Inspector, info *types.Info, version string) } } +// fileUses reports whether the specified file uses at least the +// specified version of Go (e.g. "go1.24"). +// +// Tip: we recommend using this check "late", just before calling +// pass.Report, rather than "early" (when entering each ast.File, or +// each candidate node of interest, during the traversal), because the +// operation is not free, yet is not a highly selective filter: the +// fraction of files that pass most version checks is high and +// increases over time. +func fileUses(info *types.Info, file *ast.File, version string) bool { + return !versions.Before(info.FileVersions[file], version) +} + +// enclosingFile returns the syntax tree for the file enclosing c. +func enclosingFile(c cursor.Cursor) *ast.File { + c, _ = moreiters.First(c.Enclosing((*ast.File)(nil))) + return c.Node().(*ast.File) +} + // within reports whether the current pass is analyzing one of the // specified standard packages or their dependencies. func within(pass *analysis.Pass, pkgs ...string) bool { @@ -159,3 +198,60 @@ var ( byteSliceType = types.NewSlice(types.Typ[types.Byte]) omitemptyRegex = regexp.MustCompile(`(?:^json| json):"[^"]*(,omitempty)(?:"|,[^"]*")\s?`) ) + +// enabledCategory reports whether a given category is enabled by the specified +// filter. filter is a comma-separated list of categories, optionally prefixed +// with `-` to disable all provided categories. All categories are enabled with +// an empty filter. +// +// (Will be superseded by https://go.dev/cl/655555.) +func enabledCategory(filter, category string) bool { + if filter == "" { + return true + } + // negation must be specified at the start + filter, exclude := strings.CutPrefix(filter, "-") + filters := strings.Split(filter, ",") + if slices.Contains(filters, category) { + return !exclude + } + return exclude +} + +// noEffects reports whether the expression has no side effects, i.e., it +// does not modify the memory state. This function is conservative: it may +// return false even when the expression has no effect. +func noEffects(info *types.Info, expr ast.Expr) bool { + noEffects := true + ast.Inspect(expr, func(n ast.Node) bool { + switch v := n.(type) { + case nil, *ast.Ident, *ast.BasicLit, *ast.BinaryExpr, *ast.ParenExpr, + *ast.SelectorExpr, *ast.IndexExpr, *ast.SliceExpr, *ast.TypeAssertExpr, + *ast.StarExpr, *ast.CompositeLit, *ast.ArrayType, *ast.StructType, + *ast.MapType, *ast.InterfaceType, *ast.KeyValueExpr: + // No effect + case *ast.UnaryExpr: + // Channel send <-ch has effects + if v.Op == token.ARROW { + noEffects = false + } + case *ast.CallExpr: + // Type conversion has no effects + if !info.Types[v].IsType() { + // TODO(adonovan): Add a case for built-in functions without side + // effects (by using callsPureBuiltin from tools/internal/refactor/inline) + + noEffects = false + } + case *ast.FuncLit: + // A FuncLit has no effects, but do not descend into it. + return false + default: + // All other expressions have effects + noEffects = false + } + + return noEffects + }) + return noEffects +} diff --git a/gopls/internal/analysis/modernize/modernize_test.go b/gopls/internal/analysis/modernize/modernize_test.go index 7bdc8014389..e823e983995 100644 --- a/gopls/internal/analysis/modernize/modernize_test.go +++ b/gopls/internal/analysis/modernize/modernize_test.go @@ -17,15 +17,19 @@ func Test(t *testing.T) { "bloop", "efaceany", "fmtappendf", + "forvar", "mapsloop", "minmax", "omitzero", "rangeint", "slicescontains", "slicesdelete", + "stringscutprefix", + "stringscutprefix/bytescutprefix", "splitseq", "fieldsseq", "sortslice", "testingcontext", + "waitgroup", ) } diff --git a/gopls/internal/analysis/modernize/rangeint.go b/gopls/internal/analysis/modernize/rangeint.go index 2921bbb3468..1d3f4b5db0c 100644 --- a/gopls/internal/analysis/modernize/rangeint.go +++ b/gopls/internal/analysis/modernize/rangeint.go @@ -15,8 +15,11 @@ import ( "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" "golang.org/x/tools/internal/analysisinternal" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/astutil/edge" + "golang.org/x/tools/internal/typesinternal" + "golang.org/x/tools/internal/typesinternal/typeindex" ) // rangeint offers a fix to replace a 3-clause 'for' loop: @@ -31,8 +34,6 @@ import ( // - The ':=' may be replaced by '='. // - The fix may remove "i :=" if it would become unused. // -// TODO(adonovan): permit variants such as "i := int64(0)". -// // Restrictions: // - The variable i must not be assigned or address-taken within the // loop, because a "for range int" loop does not respect assignments @@ -40,13 +41,23 @@ import ( // - The limit must not be b.N, to avoid redundancy with bloop's fixes. // // Caveats: -// - The fix will cause the limit expression to be evaluated exactly -// once, instead of once per iteration. The limit may be a function call -// (e.g. seq.Len()). The fix may change the cardinality of side effects. +// +// The fix causes the limit expression to be evaluated exactly once, +// instead of once per iteration. So, to avoid changing the +// cardinality of side effects, the limit expression must not involve +// function calls (e.g. seq.Len()) or channel receives. Moreover, the +// value of the limit expression must be loop invariant, which in +// practice means it must take one of the following forms: +// +// - a local variable that is assigned only once and not address-taken; +// - a constant; or +// - len(s), where s has the above properties. func rangeint(pass *analysis.Pass) { info := pass.TypesInfo inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + typeindex := pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + for curFile := range filesUsing(inspect, info, "go1.22") { nextLoop: for curLoop := range curFile.Preorder((*ast.ForStmt)(nil)) { @@ -54,7 +65,7 @@ func rangeint(pass *analysis.Pass) { if init, ok := loop.Init.(*ast.AssignStmt); ok && isSimpleAssign(init) && is[*ast.Ident](init.Lhs[0]) && - isZeroLiteral(init.Rhs[0]) { + isZeroIntLiteral(info, init.Rhs[0]) { // Have: for i = 0; ... (or i := 0) index := init.Lhs[0].(*ast.Ident) @@ -62,13 +73,41 @@ func rangeint(pass *analysis.Pass) { compare.Op == token.LSS && equalSyntax(compare.X, init.Lhs[0]) { // Have: for i = 0; i < limit; ... {} + limit := compare.Y - // Skip loops up to b.N in benchmarks; see [bloop]. - if sel, ok := limit.(*ast.SelectorExpr); ok && - sel.Sel.Name == "N" && - analysisinternal.IsPointerToNamed(info.TypeOf(sel.X), "testing", "B") { - continue // skip b.N + // If limit is "len(slice)", simplify it to "slice". + // + // (Don't replace "for i := 0; i < len(map); i++" + // with "for range m" because it's too hard to prove + // that len(m) is loop-invariant). + if call, ok := limit.(*ast.CallExpr); ok && + typeutil.Callee(info, call) == builtinLen && + is[*types.Slice](info.TypeOf(call.Args[0]).Underlying()) { + limit = call.Args[0] + } + + // Check the form of limit: must be a constant, + // or a local var that is not assigned or address-taken. + limitOK := false + if info.Types[limit].Value != nil { + limitOK = true // constant + } else if id, ok := limit.(*ast.Ident); ok { + if v, ok := info.Uses[id].(*types.Var); ok && + !(v.Exported() && typesinternal.IsPackageLevel(v)) { + // limit is a local or unexported global var. + // (An exported global may have uses we can't see.) + for cur := range typeindex.Uses(v) { + if isScalarLvalue(info, cur) { + // Limit var is assigned or address-taken. + continue nextLoop + } + } + limitOK = true + } + } + if !limitOK { + continue nextLoop } if inc, ok := loop.Post.(*ast.IncDecStmt); ok && @@ -87,7 +126,7 @@ func rangeint(pass *analysis.Pass) { // Reject if any is an l-value (assigned or address-taken): // a "for range int" loop does not respect assignments to // the loop variable. - if isScalarLvalue(curId) { + if isScalarLvalue(info, curId) { continue nextLoop } } @@ -137,11 +176,19 @@ func rangeint(pass *analysis.Pass) { // re-type check the expression to detect this case. var beforeLimit, afterLimit string if v := info.Types[limit].Value; v != nil { - beforeLimit, afterLimit = "int(", ")" + tVar := info.TypeOf(init.Rhs[0]) + + // TODO(adonovan): use a types.Qualifier that respects the existing + // imports of this file that are visible (not shadowed) at the current position, + // and adds new imports as needed, similar to analysisinternal.AddImport. + // (Unfortunately types.Qualifier doesn't provide the name of the package + // member to be qualified, a qualifier cannot perform the necessary shadowing + // check for dot-imported names.) + beforeLimit, afterLimit = fmt.Sprintf("%s(", types.TypeString(tVar, types.RelativeTo(pass.Pkg))), ")" info2 := &types.Info{Types: make(map[ast.Expr]types.TypeAndValue)} if types.CheckExpr(pass.Fset, pass.Pkg, limit.Pos(), limit, info2) == nil { tLimit := types.Default(info2.TypeOf(limit)) - if types.AssignableTo(tLimit, types.Typ[types.Int]) { + if types.AssignableTo(tLimit, tVar) { beforeLimit, afterLimit = "", "" } } @@ -199,7 +246,7 @@ func rangeint(pass *analysis.Pass) { // // This function is valid only for scalars (x = ...), // not for aggregates (x.a[i] = ...) -func isScalarLvalue(curId cursor.Cursor) bool { +func isScalarLvalue(info *types.Info, curId cursor.Cursor) bool { // Unfortunately we can't simply use info.Types[e].Assignable() // as it is always true for a variable even when that variable is // used only as an r-value. So we must inspect enclosing syntax. @@ -207,15 +254,22 @@ func isScalarLvalue(curId cursor.Cursor) bool { cur := curId // Strip enclosing parens. - ek, _ := cur.Edge() + ek, _ := cur.ParentEdge() for ek == edge.ParenExpr_X { cur = cur.Parent() - ek, _ = cur.Edge() + ek, _ = cur.ParentEdge() } switch ek { case edge.AssignStmt_Lhs: - return true // i = j + assign := cur.Parent().Node().(*ast.AssignStmt) + if assign.Tok != token.DEFINE { + return true // i = j or i += j + } + id := curId.Node().(*ast.Ident) + if v, ok := info.Defs[id]; ok && v.Pos() != id.Pos() { + return true // reassignment of i (i, j := 1, 2) + } case edge.IncDecStmt_X: return true // i++, i-- case edge.UnaryExpr_X: diff --git a/gopls/internal/analysis/modernize/slices.go b/gopls/internal/analysis/modernize/slices.go index 7e0d9cbd92e..18e02d51ebf 100644 --- a/gopls/internal/analysis/modernize/slices.go +++ b/gopls/internal/analysis/modernize/slices.go @@ -210,13 +210,16 @@ func appendclipped(pass *analysis.Pass) { // x[:len(x):len(x)] (nonempty) res=x // x[:k:k] (nonempty) // slices.Clip(x) (nonempty) res=x +// +// TODO(adonovan): Add a check that the expression x has no side effects in +// case x[:len(x):len(x)] -> x. Now the program behavior may change. func clippedSlice(info *types.Info, e ast.Expr) (res ast.Expr, empty bool) { switch e := e.(type) { case *ast.SliceExpr: // x[:0:0], x[:len(x):len(x)], x[:k:k] if e.Slice3 && e.High != nil && e.Max != nil && equalSyntax(e.High, e.Max) { // x[:k:k] res = e - empty = isZeroLiteral(e.High) // x[:0:0] + empty = isZeroIntLiteral(info, e.High) // x[:0:0] if call, ok := e.High.(*ast.CallExpr); ok && typeutil.Callee(info, call) == builtinLen && equalSyntax(call.Args[0], e.X) { diff --git a/gopls/internal/analysis/modernize/slicescontains.go b/gopls/internal/analysis/modernize/slicescontains.go index b59ea452a0f..e99474df6ab 100644 --- a/gopls/internal/analysis/modernize/slicescontains.go +++ b/gopls/internal/analysis/modernize/slicescontains.go @@ -15,8 +15,10 @@ import ( "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" "golang.org/x/tools/internal/analysisinternal" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/typesinternal/typeindex" ) // The slicescontains pass identifies loops that can be replaced by a @@ -46,6 +48,9 @@ import ( // It may change cardinality of effects of the "needle" expression. // (Mostly this appears to be a desirable optimization, avoiding // redundantly repeated evaluation.) +// +// TODO(adonovan): Add a check that needle/predicate expression from +// if-statement has no effects. Now the program behavior may change. func slicescontains(pass *analysis.Pass) { // Skip the analyzer in packages where its // fixes would create an import cycle. @@ -53,7 +58,11 @@ func slicescontains(pass *analysis.Pass) { return } - info := pass.TypesInfo + var ( + inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + info = pass.TypesInfo + ) // check is called for each RangeStmt of this form: // for i, elem := range s { if cond { ... } } @@ -141,8 +150,8 @@ func slicescontains(pass *analysis.Pass) { if !ok { panic(fmt.Sprintf("FindNode(%T) failed", n)) } - return uses(info, cur, info.Defs[rng.Key.(*ast.Ident)]) || - rng.Value != nil && uses(info, cur, info.Defs[rng.Value.(*ast.Ident)]) + return uses(index, cur, info.Defs[rng.Key.(*ast.Ident)]) || + rng.Value != nil && uses(index, cur, info.Defs[rng.Value.(*ast.Ident)]) } if usesRangeVar(body) { // Body uses range var "i" or "elem". @@ -346,7 +355,6 @@ func slicescontains(pass *analysis.Pass) { } } - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) for curFile := range filesUsing(inspect, info, "go1.21") { file := curFile.Node().(*ast.File) diff --git a/gopls/internal/analysis/modernize/slicesdelete.go b/gopls/internal/analysis/modernize/slicesdelete.go index 3c3d880f62b..493009c35be 100644 --- a/gopls/internal/analysis/modernize/slicesdelete.go +++ b/gopls/internal/analysis/modernize/slicesdelete.go @@ -94,7 +94,7 @@ func slicesdelete(pass *analysis.Pass) { slice2, ok2 := call.Args[1].(*ast.SliceExpr) if ok1 && slice1.Low == nil && !slice1.Slice3 && ok2 && slice2.High == nil && !slice2.Slice3 && - equalSyntax(slice1.X, slice2.X) && + equalSyntax(slice1.X, slice2.X) && noEffects(info, slice1.X) && increasingSliceIndices(info, slice1.High, slice2.Low) { // Have append(s[:a], s[b:]...) where we can verify a < b. report(file, call, slice1, slice2) diff --git a/gopls/internal/analysis/modernize/sortslice.go b/gopls/internal/analysis/modernize/sortslice.go index 0437aaf2f67..bbd04e9293d 100644 --- a/gopls/internal/analysis/modernize/sortslice.go +++ b/gopls/internal/analysis/modernize/sortslice.go @@ -10,10 +10,9 @@ import ( "go/types" "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/types/typeutil" "golang.org/x/tools/internal/analysisinternal" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" + "golang.org/x/tools/internal/typesinternal/typeindex" ) // The sortslice pass replaces sort.Slice(slice, less) with @@ -42,14 +41,13 @@ func sortslice(pass *analysis.Pass) { return } - info := pass.TypesInfo - - check := func(file *ast.File, call *ast.CallExpr) { - // call to sort.Slice? - obj := typeutil.Callee(info, call) - if !analysisinternal.IsFunctionNamed(obj, "sort", "Slice") { - return - } + var ( + info = pass.TypesInfo + index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + sortSlice = index.Object("sort", "Slice") + ) + for curCall := range index.Calls(sortSlice) { + call := curCall.Node().(*ast.CallExpr) if lit, ok := call.Args[1].(*ast.FuncLit); ok && len(lit.Body.List) == 1 { sig := info.Types[lit.Type].Type.(*types.Signature) @@ -68,7 +66,9 @@ func sortslice(pass *analysis.Pass) { is[*ast.Ident](index.Index) && info.Uses[index.Index.(*ast.Ident)] == v } - if isIndex(compare.X, i) && isIndex(compare.Y, j) { + file := enclosingFile(curCall) + if isIndex(compare.X, i) && isIndex(compare.Y, j) && + fileUses(info, file, "go1.21") { // Have: sort.Slice(s, func(i, j int) bool { return s[i] < s[j] }) _, prefix, importEdits := analysisinternal.AddImport( @@ -102,14 +102,4 @@ func sortslice(pass *analysis.Pass) { } } } - - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - for curFile := range filesUsing(inspect, info, "go1.21") { - file := curFile.Node().(*ast.File) - - for curCall := range curFile.Preorder((*ast.CallExpr)(nil)) { - call := curCall.Node().(*ast.CallExpr) - check(file, call) - } - } } diff --git a/gopls/internal/analysis/modernize/stringscutprefix.go b/gopls/internal/analysis/modernize/stringscutprefix.go new file mode 100644 index 00000000000..cd053539910 --- /dev/null +++ b/gopls/internal/analysis/modernize/stringscutprefix.go @@ -0,0 +1,205 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + "fmt" + "go/ast" + "go/token" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" + "golang.org/x/tools/internal/typesinternal/typeindex" +) + +// stringscutprefix offers a fix to replace an if statement which +// calls to the 2 patterns below with strings.CutPrefix. +// +// Patterns: +// +// 1. if strings.HasPrefix(s, pre) { use(strings.TrimPrefix(s, pre) } +// => +// if after, ok := strings.CutPrefix(s, pre); ok { use(after) } +// +// 2. if after := strings.TrimPrefix(s, pre); after != s { use(after) } +// => +// if after, ok := strings.CutPrefix(s, pre); ok { use(after) } +// +// The use must occur within the first statement of the block, and the offered fix +// only replaces the first occurrence of strings.TrimPrefix. +// +// Variants: +// - bytes.HasPrefix usage as pattern 1. +func stringscutprefix(pass *analysis.Pass) { + var ( + inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + info = pass.TypesInfo + + stringsTrimPrefix = index.Object("strings", "TrimPrefix") + bytesTrimPrefix = index.Object("bytes", "TrimPrefix") + ) + if !index.Used(stringsTrimPrefix, bytesTrimPrefix) { + return + } + + const ( + category = "stringscutprefix" + fixedMessage = "Replace HasPrefix/TrimPrefix with CutPrefix" + ) + + for curFile := range filesUsing(inspect, pass.TypesInfo, "go1.20") { + for curIfStmt := range curFile.Preorder((*ast.IfStmt)(nil)) { + ifStmt := curIfStmt.Node().(*ast.IfStmt) + + // pattern1 + if call, ok := ifStmt.Cond.(*ast.CallExpr); ok && len(ifStmt.Body.List) > 0 { + obj := typeutil.Callee(info, call) + if !analysisinternal.IsFunctionNamed(obj, "strings", "HasPrefix") && + !analysisinternal.IsFunctionNamed(obj, "bytes", "HasPrefix") { + continue + } + + // Replace the first occurrence of strings.TrimPrefix(s, pre) in the first statement only, + // but not later statements in case s or pre are modified by intervening logic. + firstStmt := curIfStmt.Child(ifStmt.Body).Child(ifStmt.Body.List[0]) + for curCall := range firstStmt.Preorder((*ast.CallExpr)(nil)) { + call1 := curCall.Node().(*ast.CallExpr) + obj1 := typeutil.Callee(info, call1) + // bytesTrimPrefix or stringsTrimPrefix might be nil if the file doesn't import it, + // so we need to ensure the obj1 is not nil otherwise the call1 is not TrimPrefix and cause a panic. + if obj1 == nil || + obj1 != stringsTrimPrefix && obj1 != bytesTrimPrefix { + continue + } + // Have: if strings.HasPrefix(s0, pre0) { ...strings.TrimPrefix(s, pre)... } + var ( + s0 = call.Args[0] + pre0 = call.Args[1] + s = call1.Args[0] + pre = call1.Args[1] + ) + + // check whether the obj1 uses the exact the same argument with strings.HasPrefix + // shadow variables won't be valid because we only access the first statement. + if equalSyntax(s0, s) && equalSyntax(pre0, pre) { + after := analysisinternal.FreshName(info.Scopes[ifStmt], ifStmt.Pos(), "after") + _, prefix, importEdits := analysisinternal.AddImport( + info, + curFile.Node().(*ast.File), + obj1.Pkg().Name(), + obj1.Pkg().Path(), + "CutPrefix", + call.Pos(), + ) + okVarName := analysisinternal.FreshName(info.Scopes[ifStmt], ifStmt.Pos(), "ok") + pass.Report(analysis.Diagnostic{ + // highlight at HasPrefix call. + Pos: call.Pos(), + End: call.End(), + Category: category, + Message: "HasPrefix + TrimPrefix can be simplified to CutPrefix", + SuggestedFixes: []analysis.SuggestedFix{{ + Message: fixedMessage, + // if strings.HasPrefix(s, pre) { use(strings.TrimPrefix(s, pre)) } + // ------------ ----------------- ----- -------------------------- + // if after, ok := strings.CutPrefix(s, pre); ok { use(after) } + TextEdits: append(importEdits, []analysis.TextEdit{ + { + Pos: call.Fun.Pos(), + End: call.Fun.Pos(), + NewText: []byte(fmt.Sprintf("%s, %s :=", after, okVarName)), + }, + { + Pos: call.Fun.Pos(), + End: call.Fun.End(), + NewText: fmt.Appendf(nil, "%sCutPrefix", prefix), + }, + { + Pos: call.End(), + End: call.End(), + NewText: []byte(fmt.Sprintf("; %s ", okVarName)), + }, + { + Pos: call1.Pos(), + End: call1.End(), + NewText: []byte(after), + }, + }...), + }}}, + ) + break + } + } + } + + // pattern2 + if bin, ok := ifStmt.Cond.(*ast.BinaryExpr); ok && + bin.Op == token.NEQ && + ifStmt.Init != nil && + isSimpleAssign(ifStmt.Init) { + assign := ifStmt.Init.(*ast.AssignStmt) + if call, ok := assign.Rhs[0].(*ast.CallExpr); ok && assign.Tok == token.DEFINE { + lhs := assign.Lhs[0] + obj := typeutil.Callee(info, call) + if obj == stringsTrimPrefix && + (equalSyntax(lhs, bin.X) && equalSyntax(call.Args[0], bin.Y) || + (equalSyntax(lhs, bin.Y) && equalSyntax(call.Args[0], bin.X))) { + okVarName := analysisinternal.FreshName(info.Scopes[ifStmt], ifStmt.Pos(), "ok") + // Have one of: + // if rest := TrimPrefix(s, prefix); rest != s { + // if rest := TrimPrefix(s, prefix); s != rest { + + // We use AddImport not to add an import (since it exists already) + // but to compute the correct prefix in the dot-import case. + _, prefix, importEdits := analysisinternal.AddImport( + info, + curFile.Node().(*ast.File), + obj.Pkg().Name(), + obj.Pkg().Path(), + "CutPrefix", + call.Pos(), + ) + + pass.Report(analysis.Diagnostic{ + // highlight from the init and the condition end. + Pos: ifStmt.Init.Pos(), + End: ifStmt.Cond.End(), + Category: category, + Message: "TrimPrefix can be simplified to CutPrefix", + SuggestedFixes: []analysis.SuggestedFix{{ + Message: fixedMessage, + // if x := strings.TrimPrefix(s, pre); x != s ... + // ---- ---------- ------ + // if x, ok := strings.CutPrefix (s, pre); ok ... + TextEdits: append(importEdits, []analysis.TextEdit{ + { + Pos: assign.Lhs[0].End(), + End: assign.Lhs[0].End(), + NewText: fmt.Appendf(nil, ", %s", okVarName), + }, + { + Pos: call.Fun.Pos(), + End: call.Fun.End(), + NewText: fmt.Appendf(nil, "%sCutPrefix", prefix), + }, + { + Pos: ifStmt.Cond.Pos(), + End: ifStmt.Cond.End(), + NewText: []byte(okVarName), + }, + }...), + }}, + }) + } + } + } + } + } +} diff --git a/gopls/internal/analysis/modernize/stringsseq.go b/gopls/internal/analysis/modernize/stringsseq.go index ca9d918912e..d32f8be754f 100644 --- a/gopls/internal/analysis/modernize/stringsseq.go +++ b/gopls/internal/analysis/modernize/stringsseq.go @@ -14,8 +14,9 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/analysisinternal" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" "golang.org/x/tools/internal/astutil/edge" + "golang.org/x/tools/internal/typesinternal/typeindex" ) // stringsseq offers a fix to replace a call to strings.Split with @@ -33,12 +34,20 @@ import ( // - bytes.SplitSeq // - bytes.FieldsSeq func stringsseq(pass *analysis.Pass) { - if !analysisinternal.Imports(pass.Pkg, "strings") && - !analysisinternal.Imports(pass.Pkg, "bytes") { + var ( + inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + info = pass.TypesInfo + + stringsSplit = index.Object("strings", "Split") + stringsFields = index.Object("strings", "Fields") + bytesSplit = index.Object("bytes", "Split") + bytesFields = index.Object("bytes", "Fields") + ) + if !index.Used(stringsSplit, stringsFields, bytesSplit, bytesFields) { return } - info := pass.TypesInfo - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + for curFile := range filesUsing(inspect, info, "go1.24") { for curRange := range curFile.Preorder((*ast.RangeStmt)(nil)) { rng := curRange.Node().(*ast.RangeStmt) @@ -55,14 +64,14 @@ func stringsseq(pass *analysis.Pass) { if !ok { if id, ok := rng.X.(*ast.Ident); ok { if v, ok := info.Uses[id].(*types.Var); ok { - if ek, idx := curRange.Edge(); ek == edge.BlockStmt_List && idx > 0 { + if ek, idx := curRange.ParentEdge(); ek == edge.BlockStmt_List && idx > 0 { curPrev, _ := curRange.PrevSibling() if assign, ok := curPrev.Node().(*ast.AssignStmt); ok && assign.Tok == token.DEFINE && len(assign.Lhs) == 1 && len(assign.Rhs) == 1 && info.Defs[assign.Lhs[0].(*ast.Ident)] == v && - soleUse(info, v) == id { + soleUseIs(index, v, id) { // Have: // lines := ... // for _, line := range lines {...} @@ -96,9 +105,8 @@ func stringsseq(pass *analysis.Pass) { continue } - obj := typeutil.Callee(info, call) - if analysisinternal.IsFunctionNamed(obj, "strings", "Split", "Fields") || - analysisinternal.IsFunctionNamed(obj, "bytes", "Split", "Fields") { + switch obj := typeutil.Callee(info, call); obj { + case stringsSplit, stringsFields, bytesSplit, bytesFields: oldFnName := obj.Name() seqFnName := fmt.Sprintf("%sSeq", oldFnName) pass.Report(analysis.Diagnostic{ diff --git a/gopls/internal/analysis/modernize/testdata/src/fmtappendf/fmtappendf.go b/gopls/internal/analysis/modernize/testdata/src/fmtappendf/fmtappendf.go index a39a03ee786..a435b6a6461 100644 --- a/gopls/internal/analysis/modernize/testdata/src/fmtappendf/fmtappendf.go +++ b/gopls/internal/analysis/modernize/testdata/src/fmtappendf/fmtappendf.go @@ -29,8 +29,8 @@ func typealias() { } func otherprints() { - sprint := []byte(fmt.Sprint("bye %d", 1)) // want "Replace .*Sprintf.* with fmt.Appendf" + sprint := []byte(fmt.Sprint("bye %d", 1)) // want "Replace .*Sprint.* with fmt.Append" print(sprint) - sprintln := []byte(fmt.Sprintln("bye %d", 1)) // want "Replace .*Sprintf.* with fmt.Appendf" + sprintln := []byte(fmt.Sprintln("bye %d", 1)) // want "Replace .*Sprintln.* with fmt.Appendln" print(sprintln) } diff --git a/gopls/internal/analysis/modernize/testdata/src/fmtappendf/fmtappendf.go.golden b/gopls/internal/analysis/modernize/testdata/src/fmtappendf/fmtappendf.go.golden index 7c8aa7b9a5e..4fd2b136b82 100644 --- a/gopls/internal/analysis/modernize/testdata/src/fmtappendf/fmtappendf.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/fmtappendf/fmtappendf.go.golden @@ -29,8 +29,8 @@ func typealias() { } func otherprints() { - sprint := fmt.Append(nil, "bye %d", 1) // want "Replace .*Sprintf.* with fmt.Appendf" + sprint := fmt.Append(nil, "bye %d", 1) // want "Replace .*Sprint.* with fmt.Append" print(sprint) - sprintln := fmt.Appendln(nil, "bye %d", 1) // want "Replace .*Sprintf.* with fmt.Appendf" + sprintln := fmt.Appendln(nil, "bye %d", 1) // want "Replace .*Sprintln.* with fmt.Appendln" print(sprintln) } \ No newline at end of file diff --git a/gopls/internal/analysis/modernize/testdata/src/forvar/forvar.go b/gopls/internal/analysis/modernize/testdata/src/forvar/forvar.go new file mode 100644 index 00000000000..dd5ecd75e29 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/forvar/forvar.go @@ -0,0 +1,62 @@ +package forvar + +func _(m map[int]int, s []int) { + // changed + for i := range s { + i := i // want "copying variable is unneeded" + go f(i) + } + for _, v := range s { + v := v // want "copying variable is unneeded" + go f(v) + } + for k, v := range m { + k := k // want "copying variable is unneeded" + v := v // nope: report only the first redeclaration + go f(k) + go f(v) + } + for _, v := range m { + v := v // want "copying variable is unneeded" + go f(v) + } + for i := range s { + /* hi */ i := i // want "copying variable is unneeded" + go f(i) + } + // nope + var i, k, v int + + for i = range s { // nope, scope change + i := i + go f(i) + } + for _, v = range s { // nope, scope change + v := v + go f(v) + } + for k = range m { // nope, scope change + k := k + go f(k) + } + for k, v = range m { // nope, scope change + k := k + v := v + go f(k) + go f(v) + } + for _, v = range m { // nope, scope change + v := v + go f(v) + } + for _, v = range m { // nope, not x := x + v := i + go f(v) + } + for i := range s { + i := (i) + go f(i) + } +} + +func f(n int) {} diff --git a/gopls/internal/analysis/modernize/testdata/src/forvar/forvar.go.golden b/gopls/internal/analysis/modernize/testdata/src/forvar/forvar.go.golden new file mode 100644 index 00000000000..35f71404c35 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/forvar/forvar.go.golden @@ -0,0 +1,62 @@ +package forvar + +func _(m map[int]int, s []int) { + // changed + for i := range s { + // want "copying variable is unneeded" + go f(i) + } + for _, v := range s { + // want "copying variable is unneeded" + go f(v) + } + for k, v := range m { + // want "copying variable is unneeded" + v := v // nope: report only the first redeclaration + go f(k) + go f(v) + } + for _, v := range m { + // want "copying variable is unneeded" + go f(v) + } + for i := range s { + /* hi */ // want "copying variable is unneeded" + go f(i) + } + // nope + var i, k, v int + + for i = range s { // nope, scope change + i := i + go f(i) + } + for _, v = range s { // nope, scope change + v := v + go f(v) + } + for k = range m { // nope, scope change + k := k + go f(k) + } + for k, v = range m { // nope, scope change + k := k + v := v + go f(k) + go f(v) + } + for _, v = range m { // nope, scope change + v := v + go f(v) + } + for _, v = range m { // nope, not x := x + v := i + go f(v) + } + for i := range s { + i := (i) + go f(i) + } +} + +func f(n int) {} diff --git a/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go index 68ff9154ffd..7d0f7d17e91 100644 --- a/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go +++ b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go @@ -16,6 +16,7 @@ type M map[int]string func useCopy(dst, src map[int]string) { // Replace loop by maps.Copy. for key, value := range src { + // A dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Copy" } } @@ -23,6 +24,7 @@ func useCopy(dst, src map[int]string) { func useCopyGeneric[K comparable, V any, M ~map[K]V](dst, src M) { // Replace loop by maps.Copy. for key, value := range src { + // A dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Copy" } } @@ -32,12 +34,18 @@ func useCopyNotClone(src map[int]string) { // Replace make(...) by maps.Copy. dst := make(map[int]string, len(src)) + // A for key, value := range src { + // B dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Copy" + // C } + // A dst = map[int]string{} + // B for key, value := range src { + // C dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Copy" } println(dst) @@ -126,8 +134,10 @@ func useInsert_assignableToSeq2(dst map[int]string, src func(yield func(int, str func useCollect(src iter.Seq2[int, string]) { // Replace loop and make(...) by maps.Collect. var dst map[int]string - dst = make(map[int]string) + dst = make(map[int]string) // A + // B for key, value := range src { + // C dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Collect" } } @@ -137,7 +147,9 @@ func useInsert_typesDifferAssign(src iter.Seq2[int, string]) { // that is assignable to M. var dst M dst = make(M) + // A for key, value := range src { + // B dst[key] = value // want "Replace m\\[k\\]=v loop with maps.Collect" } } diff --git a/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go.golden b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go.golden index be189673d9a..9136105b908 100644 --- a/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop.go.golden @@ -15,11 +15,15 @@ type M map[int]string func useCopy(dst, src map[int]string) { // Replace loop by maps.Copy. + // A + // want "Replace m\\[k\\]=v loop with maps.Copy" maps.Copy(dst, src) } func useCopyGeneric[K comparable, V any, M ~map[K]V](dst, src M) { // Replace loop by maps.Copy. + // A + // want "Replace m\\[k\\]=v loop with maps.Copy" maps.Copy(dst, src) } @@ -28,9 +32,17 @@ func useCopyNotClone(src map[int]string) { // Replace make(...) by maps.Copy. dst := make(map[int]string, len(src)) + // A + // B + // want "Replace m\\[k\\]=v loop with maps.Copy" + // C maps.Copy(dst, src) + // A dst = map[int]string{} + // B + // C + // want "Replace m\\[k\\]=v loop with maps.Copy" maps.Copy(dst, src) println(dst) } @@ -40,9 +52,11 @@ func useCopyParen(src map[int]string) { // Replace (make)(...) by maps.Clone. dst := (make)(map[int]string, len(src)) + // want "Replace m\\[k\\]=v loop with maps.Copy" maps.Copy(dst, src) dst = (map[int]string{}) + // want "Replace m\\[k\\]=v loop with maps.Copy" maps.Copy(dst, src) println(dst) } @@ -50,6 +64,7 @@ func useCopyParen(src map[int]string) { func useCopy_typesDiffer(src M) { // Replace loop but not make(...) as maps.Copy(src) would return wrong type M. dst := make(map[int]string, len(src)) + // want "Replace m\\[k\\]=v loop with maps.Copy" maps.Copy(dst, src) println(dst) } @@ -57,6 +72,7 @@ func useCopy_typesDiffer(src M) { func useCopy_typesDiffer2(src map[int]string) { // Replace loop but not make(...) as maps.Copy(src) would return wrong type map[int]string. dst := make(M, len(src)) + // want "Replace m\\[k\\]=v loop with maps.Copy" maps.Copy(dst, src) println(dst) } @@ -68,6 +84,7 @@ func useClone_typesDiffer3(src map[int]string) { // which is assignable to M. var dst M dst = make(M, len(src)) + // want "Replace m\\[k\\]=v loop with maps.Copy" maps.Copy(dst, src) println(dst) } @@ -79,6 +96,7 @@ func useClone_typesDiffer4(src map[int]string) { // which is assignable to M. var dst M dst = make(M, len(src)) + // want "Replace m\\[k\\]=v loop with maps.Copy" maps.Copy(dst, src) println(dst) } @@ -88,6 +106,7 @@ func useClone_generic[Map ~map[K]V, K comparable, V any](src Map) { // Replace loop and make(...) by maps.Clone dst := make(Map, len(src)) + // want "Replace m\\[k\\]=v loop with maps.Copy" maps.Copy(dst, src) println(dst) } @@ -96,12 +115,17 @@ func useClone_generic[Map ~map[K]V, K comparable, V any](src Map) { func useInsert_assignableToSeq2(dst map[int]string, src func(yield func(int, string) bool)) { // Replace loop by maps.Insert because src is assignable to iter.Seq2. + // want "Replace m\\[k\\]=v loop with maps.Insert" maps.Insert(dst, src) } func useCollect(src iter.Seq2[int, string]) { // Replace loop and make(...) by maps.Collect. var dst map[int]string + // A + // B + // C + // want "Replace m\\[k\\]=v loop with maps.Collect" dst = maps.Collect(src) } @@ -109,6 +133,9 @@ func useInsert_typesDifferAssign(src iter.Seq2[int, string]) { // Replace loop and make(...): maps.Collect returns an unnamed map type // that is assignable to M. var dst M + // A + // B + // want "Replace m\\[k\\]=v loop with maps.Collect" dst = maps.Collect(src) } @@ -116,6 +143,7 @@ func useInsert_typesDifferDeclare(src iter.Seq2[int, string]) { // Replace loop but not make(...) as maps.Collect would return an // unnamed map type that would change the type of dst. dst := make(M) + // want "Replace m\\[k\\]=v loop with maps.Insert" maps.Insert(dst, src) } diff --git a/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop_dot.go.golden b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop_dot.go.golden index e992314cf56..6347d56360a 100644 --- a/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop_dot.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/mapsloop/mapsloop_dot.go.golden @@ -8,6 +8,7 @@ var _ = Clone[M] // force "maps" import so that each diagnostic doesn't add one func useCopyDot(dst, src map[int]string) { // Replace loop by maps.Copy. + // want "Replace m\\[k\\]=v loop with maps.Copy" Copy(dst, src) } @@ -16,6 +17,7 @@ func useCloneDot(src map[int]string) { // Replace make(...) by maps.Copy. dst := make(map[int]string, len(src)) + // want "Replace m\\[k\\]=v loop with maps.Copy" Copy(dst, src) println(dst) } diff --git a/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go b/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go index 44ba7c9193a..cdc767450d2 100644 --- a/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go +++ b/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go @@ -1,9 +1,12 @@ package minmax func ifmin(a, b int) { - x := a + x := a // A + // B if a < b { // want "if statement can be modernized using max" - x = b + // C + x = b // D + // E } print(x) } @@ -33,20 +36,30 @@ func ifmaxvariant(a, b int) { } func ifelsemin(a, b int) { - var x int + var x int // A + // B if a <= b { // want "if/else statement can be modernized using min" - x = a + // C + x = a // D + // E } else { - x = b + // F + x = b // G + // H } print(x) } func ifelsemax(a, b int) { - var x int + // A + var x int // B + // C if a >= b { // want "if/else statement can be modernized using max" - x = a + // D + x = a // E + // F } else { + // G x = b } print(x) @@ -115,3 +128,31 @@ func nopeHasElseBlock(x int) int { } return y } + +func fix72727(a, b int) { + o := a - 42 + // some important comment. DO NOT REMOVE. + if o < b { // want "if statement can be modernized using max" + o = b + } +} + +type myfloat float64 + +// The built-in min/max differ in their treatement of NaN, +// so reject floating-point numbers (#72829). +func nopeFloat(a, b myfloat) (res myfloat) { + if a < b { + res = a + } else { + res = b + } + return +} + +// Regression test for golang/go#72928. +func underscoreAssign(a, b int) { + if a > b { + _ = a + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go.golden b/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go.golden index df1d5180f8a..b7be86bf416 100644 --- a/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/minmax/minmax.go.golden @@ -1,33 +1,57 @@ package minmax func ifmin(a, b int) { + // A + // B + // want "if statement can be modernized using max" + // C + // D + // E x := max(a, b) print(x) } func ifmax(a, b int) { + // want "if statement can be modernized using min" x := min(a, b) print(x) } func ifminvariant(a, b int) { + // want "if statement can be modernized using min" x := min(a, b) print(x) } func ifmaxvariant(a, b int) { + // want "if statement can be modernized using min" x := min(a, b) print(x) } func ifelsemin(a, b int) { - var x int + var x int // A + // B + // want "if/else statement can be modernized using min" + // C + // D + // E + // F + // G + // H x = min(a, b) print(x) } func ifelsemax(a, b int) { - var x int + // A + var x int // B + // C + // want "if/else statement can be modernized using max" + // D + // E + // F + // G x = max(a, b) print(x) } @@ -55,6 +79,7 @@ func nopeIfStmtHasInitStmt() { // Regression test for a bug: fix was "y := max(x, y)". func oops() { x := 1 + // want "if statement can be modernized using max" y := max(x, 2) print(y) } @@ -92,3 +117,29 @@ func nopeHasElseBlock(x int) int { } return y } + +func fix72727(a, b int) { + // some important comment. DO NOT REMOVE. + // want "if statement can be modernized using max" + o := max(a-42, b) +} + +type myfloat float64 + +// The built-in min/max differ in their treatement of NaN, +// so reject floating-point numbers (#72829). +func nopeFloat(a, b myfloat) (res myfloat) { + if a < b { + res = a + } else { + res = b + } + return +} + +// Regression test for golang/go#72928. +func underscoreAssign(a, b int) { + if a > b { + _ = a + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go index da486dcd32c..74f3488546c 100644 --- a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go +++ b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go @@ -1,12 +1,54 @@ package rangeint +import ( + "os" + os1 "os" +) + func _(i int, s struct{ i int }, slice []int) { for i := 0; i < 10; i++ { // want "for loop can be modernized using range over int" println(i) } + for j := int(0); j < 10; j++ { // want "for loop can be modernized using range over int" + println(j) + } + for j := int8(0); j < 10; j++ { // want "for loop can be modernized using range over int" + println(j) + } + for j := int16(0); j < 10; j++ { // want "for loop can be modernized using range over int" + println(j) + } + for j := int32(0); j < 10; j++ { // want "for loop can be modernized using range over int" + println(j) + } + for j := int64(0); j < 10; j++ { // want "for loop can be modernized using range over int" + println(j) + } + for j := uint8(0); j < 10; j++ { // want "for loop can be modernized using range over int" + println(j) + } + for j := uint16(0); j < 10; j++ { // want "for loop can be modernized using range over int" + println(j) + } + for j := uint32(0); j < 10; j++ { // want "for loop can be modernized using range over int" + println(j) + } + for j := uint64(0); j < 10; j++ { // want "for loop can be modernized using range over int" + println(j) + } + for j := int8(0.); j < 10; j++ { // want "for loop can be modernized using range over int" + println(j) + } + for j := int8(.0); j < 10; j++ { // want "for loop can be modernized using range over int" + println(j) + } + for j := os.FileMode(0); j < 10; j++ { // want "for loop can be modernized using range over int" + println(j) + } + { var i int - for i = 0; i < f(); i++ { // want "for loop can be modernized using range over int" + for i = 0; i < 10; i++ { // want "for loop can be modernized using range over int" } // NB: no uses of i after loop. } @@ -21,6 +63,12 @@ func _(i int, s struct{ i int }, slice []int) { } // nope + for j := .0; j < 10; j++ { // nope: j is a float type + println(j) + } + for j := float64(0); j < 10; j++ { // nope: j is a float type + println(j) + } for i := 0; i < 10; { // nope: missing increment } for i := 0; i < 10; i-- { // nope: negative increment @@ -42,8 +90,57 @@ func _(i int, s struct{ i int }, slice []int) { for i := 0; i < 10; i++ { // nope: assigns i i = 8 } + + // The limit expression must be loop invariant; + // see https://github.com/golang/go/issues/72917 + for i := 0; i < f(); i++ { // nope + } + { + var s struct{ limit int } + for i := 0; i < s.limit; i++ { // nope: limit is not a const or local var + } + } + { + const k = 10 + for i := 0; i < k; i++ { // want "for loop can be modernized using range over int" + } + } + { + var limit = 10 + for i := 0; i < limit; i++ { // want "for loop can be modernized using range over int" + } + } + { + var limit = 10 + for i := 0; i < limit; i++ { // nope: limit is address-taken + } + print(&limit) + } + { + limit := 10 + limit++ + for i := 0; i < limit; i++ { // nope: limit is assigned other than by its declaration + } + } + for i := 0; i < Global; i++ { // nope: limit is an exported global var; may be updated elsewhere + } + for i := 0; i < len(table); i++ { // want "for loop can be modernized using range over int" + } + { + s := []string{} + for i := 0; i < len(s); i++ { // nope: limit is not loop-invariant + s = s[1:] + } + } + for i := 0; i < len(slice); i++ { // nope: i is incremented within loop + i += 1 + } } +var Global int + +var table = []string{"hello", "world"} + func f() int { return 0 } // Repro for part of #71847: ("for range n is invalid if the loop body contains i++"): @@ -72,8 +169,65 @@ func issue71847d() { const limit = 1e3 // float for i := 0; i < limit; i++ { // want "for loop can be modernized using range over int" } + for i := int(0); i < limit; i++ { // want "for loop can be modernized using range over int" + } + for i := uint(0); i < limit; i++ { // want "for loop can be modernized using range over int" + } const limit2 = 1 + 0i // complex for i := 0; i < limit2; i++ { // want "for loop can be modernized using range over int" } } + +func issue72726() { + var n, kd int + for i := 0; i < n; i++ { // want "for loop can be modernized using range over int" + // nope: j will be invisible once it's refactored to 'for j := range min(n-j, kd+1)' + for j := 0; j < min(n-j, kd+1); j++ { // nope + _, _ = i, j + } + } + + for i := 0; i < i; i++ { // nope + } + + var i int + for i = 0; i < i/2; i++ { // nope + } + + var arr []int + for i = 0; i < arr[i]; i++ { // nope + } +} + +func todo() { + for j := os1.FileMode(0); j < 10; j++ { // want "for loop can be modernized using range over int" + println(j) + } +} + +type T uint +type TAlias = uint + +func Fn(a int) T { + return T(a) +} + +func issue73037() { + var q T + for a := T(0); a < q; a++ { // want "for loop can be modernized using range over int" + println(a) + } + for a := Fn(0); a < q; a++ { + println(a) + } + var qa TAlias + for a := TAlias(0); a < qa; a++ { // want "for loop can be modernized using range over int" + println(a) + } + for a := T(0); a < 10; a++ { // want "for loop can be modernized using range over int" + for b := T(0); b < 10; b++ { // want "for loop can be modernized using range over int" + println(a, b) + } + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden index 01d28ccb92b..cdd2f118997 100644 --- a/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/rangeint/rangeint.go.golden @@ -1,12 +1,54 @@ package rangeint +import ( + "os" + os1 "os" +) + func _(i int, s struct{ i int }, slice []int) { for i := range 10 { // want "for loop can be modernized using range over int" println(i) } + for j := range 10 { // want "for loop can be modernized using range over int" + println(j) + } + for j := range int8(10) { // want "for loop can be modernized using range over int" + println(j) + } + for j := range int16(10) { // want "for loop can be modernized using range over int" + println(j) + } + for j := range int32(10) { // want "for loop can be modernized using range over int" + println(j) + } + for j := range int64(10) { // want "for loop can be modernized using range over int" + println(j) + } + for j := range uint8(10) { // want "for loop can be modernized using range over int" + println(j) + } + for j := range uint16(10) { // want "for loop can be modernized using range over int" + println(j) + } + for j := range uint32(10) { // want "for loop can be modernized using range over int" + println(j) + } + for j := range uint64(10) { // want "for loop can be modernized using range over int" + println(j) + } + for j := range int8(10) { // want "for loop can be modernized using range over int" + println(j) + } + for j := range int8(10) { // want "for loop can be modernized using range over int" + println(j) + } + for j := range os.FileMode(10) { // want "for loop can be modernized using range over int" + println(j) + } + { var i int - for i = range f() { // want "for loop can be modernized using range over int" + for i = range 10 { // want "for loop can be modernized using range over int" } // NB: no uses of i after loop. } @@ -21,6 +63,12 @@ func _(i int, s struct{ i int }, slice []int) { } // nope + for j := .0; j < 10; j++ { // nope: j is a float type + println(j) + } + for j := float64(0); j < 10; j++ { // nope: j is a float type + println(j) + } for i := 0; i < 10; { // nope: missing increment } for i := 0; i < 10; i-- { // nope: negative increment @@ -42,8 +90,57 @@ func _(i int, s struct{ i int }, slice []int) { for i := 0; i < 10; i++ { // nope: assigns i i = 8 } + + // The limit expression must be loop invariant; + // see https://github.com/golang/go/issues/72917 + for i := 0; i < f(); i++ { // nope + } + { + var s struct{ limit int } + for i := 0; i < s.limit; i++ { // nope: limit is not a const or local var + } + } + { + const k = 10 + for range k { // want "for loop can be modernized using range over int" + } + } + { + var limit = 10 + for range limit { // want "for loop can be modernized using range over int" + } + } + { + var limit = 10 + for i := 0; i < limit; i++ { // nope: limit is address-taken + } + print(&limit) + } + { + limit := 10 + limit++ + for i := 0; i < limit; i++ { // nope: limit is assigned other than by its declaration + } + } + for i := 0; i < Global; i++ { // nope: limit is an exported global var; may be updated elsewhere + } + for range table { // want "for loop can be modernized using range over int" + } + { + s := []string{} + for i := 0; i < len(s); i++ { // nope: limit is not loop-invariant + s = s[1:] + } + } + for i := 0; i < len(slice); i++ { // nope: i is incremented within loop + i += 1 + } } +var Global int + +var table = []string{"hello", "world"} + func f() int { return 0 } // Repro for part of #71847: ("for range n is invalid if the loop body contains i++"): @@ -72,8 +169,65 @@ func issue71847d() { const limit = 1e3 // float for range int(limit) { // want "for loop can be modernized using range over int" } + for range int(limit) { // want "for loop can be modernized using range over int" + } + for range uint(limit) { // want "for loop can be modernized using range over int" + } const limit2 = 1 + 0i // complex for range int(limit2) { // want "for loop can be modernized using range over int" } } + +func issue72726() { + var n, kd int + for i := range n { // want "for loop can be modernized using range over int" + // nope: j will be invisible once it's refactored to 'for j := range min(n-j, kd+1)' + for j := 0; j < min(n-j, kd+1); j++ { // nope + _, _ = i, j + } + } + + for i := 0; i < i; i++ { // nope + } + + var i int + for i = 0; i < i/2; i++ { // nope + } + + var arr []int + for i = 0; i < arr[i]; i++ { // nope + } +} + +func todo() { + for j := range os.FileMode(10) { // want "for loop can be modernized using range over int" + println(j) + } +} + +type T uint +type TAlias = uint + +func Fn(a int) T { + return T(a) +} + +func issue73037() { + var q T + for a := range q { // want "for loop can be modernized using range over int" + println(a) + } + for a := Fn(0); a < q; a++ { + println(a) + } + var qa TAlias + for a := range qa { // want "for loop can be modernized using range over int" + println(a) + } + for a := range T(10) { // want "for loop can be modernized using range over int" + for b := range T(10) { // want "for loop can be modernized using range over int" + println(a, b) + } + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/slicesdelete/slicesdelete.go b/gopls/internal/analysis/modernize/testdata/src/slicesdelete/slicesdelete.go index a710d06f2fe..0ee608d8f9f 100644 --- a/gopls/internal/analysis/modernize/testdata/src/slicesdelete/slicesdelete.go +++ b/gopls/internal/analysis/modernize/testdata/src/slicesdelete/slicesdelete.go @@ -2,6 +2,10 @@ package slicesdelete var g struct{ f []int } +func h() []int { return []int{} } + +var ch chan []int + func slicesdelete(test, other []byte, i int) { const k = 1 _ = append(test[:i], test[i+1:]...) // want "Replace append with slices.Delete" @@ -26,6 +30,10 @@ func slicesdelete(test, other []byte, i int) { _ = append(g.f[:i], g.f[i+k:]...) // want "Replace append with slices.Delete" + _ = append(h()[:i], h()[i+1:]...) // potentially has side effects + + _ = append((<-ch)[:i], (<-ch)[i+1:]...) // has side effects + _ = append(test[:3], test[i+1:]...) // cannot verify a < b _ = append(test[:i-4], test[i-1:]...) // want "Replace append with slices.Delete" diff --git a/gopls/internal/analysis/modernize/testdata/src/slicesdelete/slicesdelete.go.golden b/gopls/internal/analysis/modernize/testdata/src/slicesdelete/slicesdelete.go.golden index 2d9447af3a3..a15eb07dee9 100644 --- a/gopls/internal/analysis/modernize/testdata/src/slicesdelete/slicesdelete.go.golden +++ b/gopls/internal/analysis/modernize/testdata/src/slicesdelete/slicesdelete.go.golden @@ -4,35 +4,43 @@ import "slices" var g struct{ f []int } +func h() []int { return []int{} } + +var ch chan []int + func slicesdelete(test, other []byte, i int) { - const k = 1 - _ = slices.Delete(test, i, i+1) // want "Replace append with slices.Delete" + const k = 1 + _ = slices.Delete(test, i, i+1) // want "Replace append with slices.Delete" + + _ = slices.Delete(test, i+1, i+2) // want "Replace append with slices.Delete" + + _ = append(test[:i+1], test[i+1:]...) // not deleting any slice elements - _ = slices.Delete(test, i+1, i+2) // want "Replace append with slices.Delete" + _ = append(test[:i], test[i-1:]...) // not deleting any slice elements - _ = append(test[:i+1], test[i+1:]...) // not deleting any slice elements + _ = slices.Delete(test, i-1, i) // want "Replace append with slices.Delete" - _ = append(test[:i], test[i-1:]...) // not deleting any slice elements + _ = slices.Delete(test, i-2, i+1) // want "Replace append with slices.Delete" - _ = slices.Delete(test, i-1, i) // want "Replace append with slices.Delete" + _ = append(test[:i-2], other[i+1:]...) // different slices "test" and "other" - _ = slices.Delete(test, i-2, i+1) // want "Replace append with slices.Delete" + _ = append(test[:i-2], other[i+1+k:]...) // cannot verify a < b - _ = append(test[:i-2], other[i+1:]...) // different slices "test" and "other" + _ = append(test[:i-2], test[11:]...) // cannot verify a < b - _ = append(test[:i-2], other[i+1+k:]...) // cannot verify a < b + _ = slices.Delete(test, 1, 3) // want "Replace append with slices.Delete" - _ = append(test[:i-2], test[11:]...) // cannot verify a < b + _ = slices.Delete(g.f, i, i+k) // want "Replace append with slices.Delete" - _ = slices.Delete(test, 1, 3) // want "Replace append with slices.Delete" + _ = append(h()[:i], h()[i+1:]...) // potentially has side effects - _ = slices.Delete(g.f, i, i+k) // want "Replace append with slices.Delete" + _ = append((<-ch)[:i], (<-ch)[i+1:]...) // has side effects - _ = append(test[:3], test[i+1:]...) // cannot verify a < b + _ = append(test[:3], test[i+1:]...) // cannot verify a < b - _ = slices.Delete(test, i-4, i-1) // want "Replace append with slices.Delete" + _ = slices.Delete(test, i-4, i-1) // want "Replace append with slices.Delete" - _ = slices.Delete(test, 1+2, 3+4) // want "Replace append with slices.Delete" + _ = slices.Delete(test, 1+2, 3+4) // want "Replace append with slices.Delete" - _ = append(test[:1+2], test[i-1:]...) // cannot verify a < b -} \ No newline at end of file + _ = append(test[:1+2], test[i-1:]...) // cannot verify a < b +} diff --git a/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix/bytescutprefix.go b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix/bytescutprefix.go new file mode 100644 index 00000000000..7c5363e6c8d --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix/bytescutprefix.go @@ -0,0 +1,16 @@ +package bytescutprefix + +import ( + "bytes" +) + +func _() { + if bytes.HasPrefix(bss, bspre) { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a := bytes.TrimPrefix(bss, bspre) + _ = a + } + if bytes.HasPrefix([]byte(""), []byte("")) { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a := bytes.TrimPrefix([]byte(""), []byte("")) + _ = a + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix/bytescutprefix.go.golden b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix/bytescutprefix.go.golden new file mode 100644 index 00000000000..8d41a8bf343 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix/bytescutprefix.go.golden @@ -0,0 +1,16 @@ +package bytescutprefix + +import ( + "bytes" +) + +func _() { + if after, ok := bytes.CutPrefix(bss, bspre); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a := after + _ = a + } + if after, ok := bytes.CutPrefix([]byte(""), []byte("")); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a := after + _ = a + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix/bytescutprefix_dot.go b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix/bytescutprefix_dot.go new file mode 100644 index 00000000000..bfde6b7a461 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix/bytescutprefix_dot.go @@ -0,0 +1,15 @@ +package bytescutprefix + +import ( + . "bytes" +) + +var bss, bspre []byte + +// test supported cases of pattern 1 +func _() { + if HasPrefix(bss, bspre) { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a := TrimPrefix(bss, bspre) + _ = a + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix/bytescutprefix_dot.go.golden b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix/bytescutprefix_dot.go.golden new file mode 100644 index 00000000000..8eb562e7940 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/bytescutprefix/bytescutprefix_dot.go.golden @@ -0,0 +1,15 @@ +package bytescutprefix + +import ( + . "bytes" +) + +var bss, bspre []byte + +// test supported cases of pattern 1 +func _() { + if after, ok := CutPrefix(bss, bspre); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a := after + _ = a + } +} \ No newline at end of file diff --git a/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix.go b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix.go new file mode 100644 index 00000000000..7679bdb6e67 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix.go @@ -0,0 +1,124 @@ +package stringscutprefix + +import ( + "strings" +) + +var ( + s, pre string +) + +// test supported cases of pattern 1 +func _() { + if strings.HasPrefix(s, pre) { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a := strings.TrimPrefix(s, pre) + _ = a + } + if strings.HasPrefix("", "") { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a := strings.TrimPrefix("", "") + _ = a + } + if strings.HasPrefix(s, "") { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + println([]byte(strings.TrimPrefix(s, ""))) + } + if strings.HasPrefix(s, "") { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a, b := "", strings.TrimPrefix(s, "") + _, _ = a, b + } + if strings.HasPrefix(s, "") { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a, b := strings.TrimPrefix(s, ""), strings.TrimPrefix(s, "") // only replace the first occurrence + s = "123" + b = strings.TrimPrefix(s, "") // only replace the first occurrence + _, _ = a, b + } + + var a, b string + if strings.HasPrefix(s, "") { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a, b = "", strings.TrimPrefix(s, "") + _, _ = a, b + } +} + +// test cases that are not supported by pattern1 +func _() { + ok := strings.HasPrefix("", "") + if ok { // noop, currently it doesn't track the result usage of HasPrefix + a := strings.TrimPrefix("", "") + _ = a + } + if strings.HasPrefix(s, pre) { + a := strings.TrimPrefix("", "") // noop, as the argument isn't the same + _ = a + } + if strings.HasPrefix(s, pre) { + var result string + result = strings.TrimPrefix("", "") // noop, as we believe define is more popular. + _ = result + } + if strings.HasPrefix("", "") { + a := strings.TrimPrefix(s, pre) // noop, as the argument isn't the same + _ = a + } +} + +var value0 string + +// test supported cases of pattern2 +func _() { + if after := strings.TrimPrefix(s, pre); after != s { // want "TrimPrefix can be simplified to CutPrefix" + println(after) + } + if after := strings.TrimPrefix(s, pre); s != after { // want "TrimPrefix can be simplified to CutPrefix" + println(after) + } + if after := strings.TrimPrefix(s, pre); s != after { // want "TrimPrefix can be simplified to CutPrefix" + println(strings.TrimPrefix(s, pre)) // noop here + } + if after := strings.TrimPrefix(s, ""); s != after { // want "TrimPrefix can be simplified to CutPrefix" + println(after) + } + var ok bool // define an ok variable to test the fix won't shadow it for its if stmt body + _ = ok + if after := strings.TrimPrefix(s, pre); after != s { // want "TrimPrefix can be simplified to CutPrefix" + println(after) + } + var predefined string + if predefined = strings.TrimPrefix(s, pre); s != predefined { // noop + println(predefined) + } + if predefined = strings.TrimPrefix(s, pre); s != predefined { // noop + println(&predefined) + } + var value string + if value = strings.TrimPrefix(s, pre); s != value { // noop + println(value) + } + lhsMap := make(map[string]string) + if lhsMap[""] = strings.TrimPrefix(s, pre); s != lhsMap[""] { // noop + println(lhsMap[""]) + } + arr := make([]string, 0) + if arr[0] = strings.TrimPrefix(s, pre); s != arr[0] { // noop + println(arr[0]) + } + type example struct { + field string + } + var e example + if e.field = strings.TrimPrefix(s, pre); s != e.field { // noop + println(e.field) + } +} + +// test cases that not supported by pattern2 +func _() { + if after := strings.TrimPrefix(s, pre); s != pre { // noop + println(after) + } + if after := strings.TrimPrefix(s, pre); after != pre { // noop + println(after) + } + if strings.TrimPrefix(s, pre) != s { + println(strings.TrimPrefix(s, pre)) + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix.go.golden b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix.go.golden new file mode 100644 index 00000000000..a6c52b08802 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix.go.golden @@ -0,0 +1,124 @@ +package stringscutprefix + +import ( + "strings" +) + +var ( + s, pre string +) + +// test supported cases of pattern 1 +func _() { + if after, ok := strings.CutPrefix(s, pre); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a := after + _ = a + } + if after, ok := strings.CutPrefix("", ""); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a := after + _ = a + } + if after, ok := strings.CutPrefix(s, ""); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + println([]byte(after)) + } + if after, ok := strings.CutPrefix(s, ""); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a, b := "", after + _, _ = a, b + } + if after, ok := strings.CutPrefix(s, ""); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a, b := after, strings.TrimPrefix(s, "") // only replace the first occurrence + s = "123" + b = strings.TrimPrefix(s, "") // only replace the first occurrence + _, _ = a, b + } + + var a, b string + if after, ok := strings.CutPrefix(s, ""); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a, b = "", after + _, _ = a, b + } +} + +// test cases that are not supported by pattern1 +func _() { + ok := strings.HasPrefix("", "") + if ok { // noop, currently it doesn't track the result usage of HasPrefix + a := strings.TrimPrefix("", "") + _ = a + } + if strings.HasPrefix(s, pre) { + a := strings.TrimPrefix("", "") // noop, as the argument isn't the same + _ = a + } + if strings.HasPrefix(s, pre) { + var result string + result = strings.TrimPrefix("", "") // noop, as we believe define is more popular. + _ = result + } + if strings.HasPrefix("", "") { + a := strings.TrimPrefix(s, pre) // noop, as the argument isn't the same + _ = a + } +} + +var value0 string + +// test supported cases of pattern2 +func _() { + if after, ok := strings.CutPrefix(s, pre); ok { // want "TrimPrefix can be simplified to CutPrefix" + println(after) + } + if after, ok := strings.CutPrefix(s, pre); ok { // want "TrimPrefix can be simplified to CutPrefix" + println(after) + } + if after, ok := strings.CutPrefix(s, pre); ok { // want "TrimPrefix can be simplified to CutPrefix" + println(strings.TrimPrefix(s, pre)) // noop here + } + if after, ok := strings.CutPrefix(s, ""); ok { // want "TrimPrefix can be simplified to CutPrefix" + println(after) + } + var ok bool // define an ok variable to test the fix won't shadow it for its if stmt body + _ = ok + if after, ok0 := strings.CutPrefix(s, pre); ok0 { // want "TrimPrefix can be simplified to CutPrefix" + println(after) + } + var predefined string + if predefined = strings.TrimPrefix(s, pre); s != predefined { // noop + println(predefined) + } + if predefined = strings.TrimPrefix(s, pre); s != predefined { // noop + println(&predefined) + } + var value string + if value = strings.TrimPrefix(s, pre); s != value { // noop + println(value) + } + lhsMap := make(map[string]string) + if lhsMap[""] = strings.TrimPrefix(s, pre); s != lhsMap[""] { // noop + println(lhsMap[""]) + } + arr := make([]string, 0) + if arr[0] = strings.TrimPrefix(s, pre); s != arr[0] { // noop + println(arr[0]) + } + type example struct { + field string + } + var e example + if e.field = strings.TrimPrefix(s, pre); s != e.field { // noop + println(e.field) + } +} + +// test cases that not supported by pattern2 +func _() { + if after := strings.TrimPrefix(s, pre); s != pre { // noop + println(after) + } + if after := strings.TrimPrefix(s, pre); after != pre { // noop + println(after) + } + if strings.TrimPrefix(s, pre) != s { + println(strings.TrimPrefix(s, pre)) + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix_dot.go b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix_dot.go new file mode 100644 index 00000000000..75ce5bbe39b --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix_dot.go @@ -0,0 +1,23 @@ +package stringscutprefix + +import ( + . "strings" +) + +// test supported cases of pattern 1 +func _() { + if HasPrefix(s, pre) { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a := TrimPrefix(s, pre) + _ = a + } +} + +// test supported cases of pattern2 +func _() { + if after := TrimPrefix(s, pre); after != s { // want "TrimPrefix can be simplified to CutPrefix" + println(after) + } + if after := TrimPrefix(s, pre); s != after { // want "TrimPrefix can be simplified to CutPrefix" + println(after) + } +} diff --git a/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix_dot.go.golden b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix_dot.go.golden new file mode 100644 index 00000000000..50e3b6ff0ca --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/stringscutprefix/stringscutprefix_dot.go.golden @@ -0,0 +1,23 @@ +package stringscutprefix + +import ( + . "strings" +) + +// test supported cases of pattern 1 +func _() { + if after, ok := CutPrefix(s, pre); ok { // want "HasPrefix \\+ TrimPrefix can be simplified to CutPrefix" + a := after + _ = a + } +} + +// test supported cases of pattern2 +func _() { + if after, ok := CutPrefix(s, pre); ok { // want "TrimPrefix can be simplified to CutPrefix" + println(after) + } + if after, ok := CutPrefix(s, pre); ok { // want "TrimPrefix can be simplified to CutPrefix" + println(after) + } +} \ No newline at end of file diff --git a/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup.go b/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup.go new file mode 100644 index 00000000000..8269235bda7 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup.go @@ -0,0 +1,152 @@ +package waitgroup + +import ( + "fmt" + "sync" +) + +// supported case for pattern 1. +func _() { + var wg sync.WaitGroup + wg.Add(1) // want "Goroutine creation can be simplified using WaitGroup.Go" + go func() { + defer wg.Done() + fmt.Println() + }() + + wg.Add(1) // want "Goroutine creation can be simplified using WaitGroup.Go" + go func() { + defer wg.Done() + }() + + for range 10 { + wg.Add(1) // want "Goroutine creation can be simplified using WaitGroup.Go" + go func() { + defer wg.Done() + fmt.Println() + }() + } +} + +// supported case for pattern 2. +func _() { + var wg sync.WaitGroup + wg.Add(1) // want "Goroutine creation can be simplified using WaitGroup.Go" + go func() { + fmt.Println() + wg.Done() + }() + + wg.Add(1) // want "Goroutine creation can be simplified using WaitGroup.Go" + go func() { + wg.Done() + }() + + for range 10 { + wg.Add(1) // want "Goroutine creation can be simplified using WaitGroup.Go" + go func() { + fmt.Println() + wg.Done() + }() + } +} + +// this function puts some wrong usages but waitgroup modernizer will still offer fixes. +func _() { + var wg sync.WaitGroup + wg.Add(1) // want "Goroutine creation can be simplified using WaitGroup.Go" + go func() { + defer wg.Done() + defer wg.Done() + fmt.Println() + }() + + wg.Add(1) // want "Goroutine creation can be simplified using WaitGroup.Go" + go func() { + defer wg.Done() + fmt.Println() + wg.Done() + }() + + wg.Add(1) // want "Goroutine creation can be simplified using WaitGroup.Go" + go func() { + fmt.Println() + wg.Done() + wg.Done() + }() +} + +// this function puts the unsupported cases of pattern 1. +func _() { + var wg sync.WaitGroup + wg.Add(1) + go func() {}() + + wg.Add(1) + go func(i int) { + defer wg.Done() + fmt.Println(i) + }(1) + + wg.Add(1) + go func() { + fmt.Println() + defer wg.Done() + }() + + wg.Add(1) + go func() { // noop: no wg.Done call inside function body. + fmt.Println() + }() + + go func() { // noop: no Add call before this go stmt. + defer wg.Done() + fmt.Println() + }() + + wg.Add(2) // noop: only support Add(1). + go func() { + defer wg.Done() + }() + + var wg1 sync.WaitGroup + wg1.Add(1) // noop: Add and Done should be the same object. + go func() { + defer wg.Done() + fmt.Println() + }() + + wg.Add(1) // noop: Add and Done should be the same object. + go func() { + defer wg1.Done() + fmt.Println() + }() +} + +// this function puts the unsupported cases of pattern 2. +func _() { + var wg sync.WaitGroup + wg.Add(1) + go func() { + wg.Done() + fmt.Println() + }() + + go func() { // noop: no Add call before this go stmt. + fmt.Println() + wg.Done() + }() + + var wg1 sync.WaitGroup + wg1.Add(1) // noop: Add and Done should be the same object. + go func() { + fmt.Println() + wg.Done() + }() + + wg.Add(1) // noop: Add and Done should be the same object. + go func() { + fmt.Println() + wg1.Done() + }() +} diff --git a/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup.go.golden b/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup.go.golden new file mode 100644 index 00000000000..dd98429da0d --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup.go.golden @@ -0,0 +1,143 @@ +package waitgroup + +import ( + "fmt" + "sync" +) + +// supported case for pattern 1. +func _() { + var wg sync.WaitGroup + // want "Goroutine creation can be simplified using WaitGroup.Go" + wg.Go(func() { + fmt.Println() + }) + + // want "Goroutine creation can be simplified using WaitGroup.Go" + wg.Go(func() { + }) + + for range 10 { + // want "Goroutine creation can be simplified using WaitGroup.Go" + wg.Go(func() { + fmt.Println() + }) + } +} + +// supported case for pattern 2. +func _() { + var wg sync.WaitGroup + // want "Goroutine creation can be simplified using WaitGroup.Go" + wg.Go(func() { + fmt.Println() + }) + + // want "Goroutine creation can be simplified using WaitGroup.Go" + wg.Go(func() { + }) + + for range 10 { + // want "Goroutine creation can be simplified using WaitGroup.Go" + wg.Go(func() { + fmt.Println() + }) + } +} + +// this function puts some wrong usages but waitgroup modernizer will still offer fixes. +func _() { + var wg sync.WaitGroup + // want "Goroutine creation can be simplified using WaitGroup.Go" + wg.Go(func() { + defer wg.Done() + fmt.Println() + }) + + // want "Goroutine creation can be simplified using WaitGroup.Go" + wg.Go(func() { + fmt.Println() + wg.Done() + }) + + // want "Goroutine creation can be simplified using WaitGroup.Go" + wg.Go(func() { + fmt.Println() + wg.Done() + }) +} + +// this function puts the unsupported cases of pattern 1. +func _() { + var wg sync.WaitGroup + wg.Add(1) + go func() {}() + + wg.Add(1) + go func(i int) { + defer wg.Done() + fmt.Println(i) + }(1) + + wg.Add(1) + go func() { + fmt.Println() + defer wg.Done() + }() + + wg.Add(1) + go func() { // noop: no wg.Done call inside function body. + fmt.Println() + }() + + go func() { // noop: no Add call before this go stmt. + defer wg.Done() + fmt.Println() + }() + + wg.Add(2) // noop: only support Add(1). + go func() { + defer wg.Done() + }() + + var wg1 sync.WaitGroup + wg1.Add(1) // noop: Add and Done should be the same object. + go func() { + defer wg.Done() + fmt.Println() + }() + + wg.Add(1) // noop: Add and Done should be the same object. + go func() { + defer wg1.Done() + fmt.Println() + }() +} + +// this function puts the unsupported cases of pattern 2. +func _() { + var wg sync.WaitGroup + wg.Add(1) + go func() { + wg.Done() + fmt.Println() + }() + + go func() { // noop: no Add call before this go stmt. + fmt.Println() + wg.Done() + }() + + var wg1 sync.WaitGroup + wg1.Add(1) // noop: Add and Done should be the same object. + go func() { + fmt.Println() + wg.Done() + }() + + wg.Add(1) // noop: Add and Done should be the same object. + go func() { + fmt.Println() + wg1.Done() + }() +} diff --git a/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup_alias.go b/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup_alias.go new file mode 100644 index 00000000000..087edba27be --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup_alias.go @@ -0,0 +1,21 @@ +package waitgroup + +import ( + "fmt" + sync1 "sync" +) + +func _() { + var wg sync1.WaitGroup + wg.Add(1) // want "Goroutine creation can be simplified using WaitGroup.Go" + go func() { + defer wg.Done() + fmt.Println() + }() + + wg.Add(1) // want "Goroutine creation can be simplified using WaitGroup.Go" + go func() { + fmt.Println() + wg.Done() + }() +} diff --git a/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup_alias.go.golden b/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup_alias.go.golden new file mode 100644 index 00000000000..377973bc689 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup_alias.go.golden @@ -0,0 +1,19 @@ +package waitgroup + +import ( + "fmt" + sync1 "sync" +) + +func _() { + var wg sync1.WaitGroup + // want "Goroutine creation can be simplified using WaitGroup.Go" + wg.Go(func() { + fmt.Println() + }) + + // want "Goroutine creation can be simplified using WaitGroup.Go" + wg.Go(func() { + fmt.Println() + }) +} \ No newline at end of file diff --git a/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup_dot.go b/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup_dot.go new file mode 100644 index 00000000000..b4d1e150dbc --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup_dot.go @@ -0,0 +1,22 @@ +package waitgroup + +import ( + "fmt" + . "sync" +) + +// supported case for pattern 1. +func _() { + var wg WaitGroup + wg.Add(1) // want "Goroutine creation can be simplified using WaitGroup.Go" + go func() { + defer wg.Done() + fmt.Println() + }() + + wg.Add(1) // want "Goroutine creation can be simplified using WaitGroup.Go" + go func() { + fmt.Println() + wg.Done() + }() +} diff --git a/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup_dot.go.golden b/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup_dot.go.golden new file mode 100644 index 00000000000..37584be72f8 --- /dev/null +++ b/gopls/internal/analysis/modernize/testdata/src/waitgroup/waitgroup_dot.go.golden @@ -0,0 +1,20 @@ +package waitgroup + +import ( + "fmt" + . "sync" +) + +// supported case for pattern 1. +func _() { + var wg WaitGroup + // want "Goroutine creation can be simplified using WaitGroup.Go" + wg.Go(func() { + fmt.Println() + }) + + // want "Goroutine creation can be simplified using WaitGroup.Go" + wg.Go(func() { + fmt.Println() + }) +} \ No newline at end of file diff --git a/gopls/internal/analysis/modernize/testingcontext.go b/gopls/internal/analysis/modernize/testingcontext.go index 9bdc11ccfca..de52f756ab8 100644 --- a/gopls/internal/analysis/modernize/testingcontext.go +++ b/gopls/internal/analysis/modernize/testingcontext.go @@ -14,12 +14,11 @@ import ( "unicode/utf8" "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" "golang.org/x/tools/internal/analysisinternal" - "golang.org/x/tools/internal/astutil/cursor" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" "golang.org/x/tools/internal/astutil/edge" + "golang.org/x/tools/internal/typesinternal/typeindex" ) // The testingContext pass replaces calls to context.WithCancel from within @@ -41,38 +40,32 @@ import ( // - the call is within a test or subtest function // - the relevant testing.{T,B,F} is named and not shadowed at the call func testingContext(pass *analysis.Pass) { - if !analysisinternal.Imports(pass.Pkg, "testing") { - return - } + var ( + index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + info = pass.TypesInfo - info := pass.TypesInfo + contextWithCancel = index.Object("context", "WithCancel") + ) - // checkCall finds eligible calls to context.WithCancel to replace. - checkCall := func(cur cursor.Cursor) { +calls: + for cur := range index.Calls(contextWithCancel) { call := cur.Node().(*ast.CallExpr) - obj := typeutil.Callee(info, call) - if !analysisinternal.IsFunctionNamed(obj, "context", "WithCancel") { - return - } - - // Have: context.WithCancel(arg) + // Have: context.WithCancel(...) arg, ok := call.Args[0].(*ast.CallExpr) if !ok { - return + continue } - if obj := typeutil.Callee(info, arg); !analysisinternal.IsFunctionNamed(obj, "context", "Background", "TODO") { - return + if !analysisinternal.IsFunctionNamed(typeutil.Callee(info, arg), "context", "Background", "TODO") { + continue } - // Have: context.WithCancel(context.{Background,TODO}()) parent := cur.Parent() assign, ok := parent.Node().(*ast.AssignStmt) if !ok || assign.Tok != token.DEFINE { - return + continue } - // Have: a, b := context.WithCancel(context.{Background,TODO}()) // Check that both a and b are declared, not redeclarations. @@ -80,27 +73,27 @@ func testingContext(pass *analysis.Pass) { for _, expr := range assign.Lhs { id, ok := expr.(*ast.Ident) if !ok { - return + continue calls } obj, ok := info.Defs[id] if !ok { - return + continue calls } lhs = append(lhs, obj) } next, ok := parent.NextSibling() if !ok { - return + continue } defr, ok := next.Node().(*ast.DeferStmt) if !ok { - return + continue } - if soleUse(info, lhs[1]) != defr.Call.Fun { - return + deferId, ok := defr.Call.Fun.(*ast.Ident) + if !ok || !soleUseIs(index, lhs[1], deferId) { + continue // b is used elsewhere } - // Have: // a, b := context.WithCancel(context.{Background,TODO}()) // defer b() @@ -110,7 +103,7 @@ func testingContext(pass *analysis.Pass) { if curFunc, ok := enclosingFunc(cur); ok { switch n := curFunc.Node().(type) { case *ast.FuncLit: - if e, idx := curFunc.Edge(); e == edge.CallExpr_Args && idx == 1 { + if ek, idx := curFunc.ParentEdge(); ek == edge.CallExpr_Args && idx == 1 { // Have: call(..., func(...) { ...context.WithCancel(...)... }) obj := typeutil.Callee(info, curFunc.Parent().Node().(*ast.CallExpr)) if (analysisinternal.IsMethodNamed(obj, "testing", "T", "Run") || @@ -126,8 +119,7 @@ func testingContext(pass *analysis.Pass) { testObj = isTestFn(info, n) } } - - if testObj != nil { + if testObj != nil && fileUses(info, enclosingFile(cur), "go1.24") { // Have a test function. Check that we can resolve the relevant // testing.{T,B,F} at the current position. if _, obj := lhs[0].Parent().LookupParent(testObj.Name(), lhs[0].Pos()); obj == testObj { @@ -148,29 +140,19 @@ func testingContext(pass *analysis.Pass) { } } } - - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - for curFile := range filesUsing(inspect, info, "go1.24") { - for cur := range curFile.Preorder((*ast.CallExpr)(nil)) { - checkCall(cur) - } - } } -// soleUse returns the ident that refers to obj, if there is exactly one. -// -// TODO(rfindley): consider factoring to share with gopls/internal/refactor/inline. -func soleUse(info *types.Info, obj types.Object) (sole *ast.Ident) { - // This is not efficient, but it is called infrequently. - for id, obj2 := range info.Uses { - if obj2 == obj { - if sole != nil { - return nil // not unique - } - sole = id +// soleUseIs reports whether id is the sole Ident that uses obj. +// (It returns false if there were no uses of obj.) +func soleUseIs(index *typeindex.Index, obj types.Object, id *ast.Ident) bool { + empty := true + for use := range index.Uses(obj) { + empty = false + if use.Node() != id { + return false } } - return sole + return !empty } // isTestFn checks whether fn is a test function (TestX, BenchmarkX, FuzzX), diff --git a/gopls/internal/analysis/modernize/waitgroup.go b/gopls/internal/analysis/modernize/waitgroup.go new file mode 100644 index 00000000000..080bd4d362a --- /dev/null +++ b/gopls/internal/analysis/modernize/waitgroup.go @@ -0,0 +1,144 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + "fmt" + "go/ast" + "slices" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" + "golang.org/x/tools/internal/typesinternal/typeindex" +) + +// The waitgroup pass replaces old more complex code with +// go1.25 added API WaitGroup.Go. +// +// Patterns: +// +// 1. wg.Add(1); go func() { defer wg.Done(); ... }() +// => +// wg.Go(go func() { ... }) +// +// 2. wg.Add(1); go func() { ...; wg.Done() }() +// => +// wg.Go(go func() { ... }) +// +// The wg.Done must occur within the first statement of the block in a +// defer format or last statement of the block, and the offered fix +// only removes the first/last wg.Done call. It doesn't fix existing +// wrong usage of sync.WaitGroup. +// +// The use of WaitGroup.Go in pattern 1 implicitly introduces a +// 'defer', which may change the behavior in the case of panic from +// the "..." logic. In this instance, the change is safe: before and +// after the transformation, an unhandled panic inevitably results in +// a fatal crash. The fact that the transformed code calls wg.Done() +// before the crash doesn't materially change anything. (If Done had +// other effects, or blocked, or if WaitGroup.Go propagated panics +// from child to parent goroutine, the argument would be different.) +func waitgroup(pass *analysis.Pass) { + var ( + index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + info = pass.TypesInfo + syncWaitGroupAdd = index.Selection("sync", "WaitGroup", "Add") + syncWaitGroupDone = index.Selection("sync", "WaitGroup", "Done") + ) + if !index.Used(syncWaitGroupDone) { + return + } + + for curAddCall := range index.Calls(syncWaitGroupAdd) { + // Extract receiver from wg.Add call. + addCall := curAddCall.Node().(*ast.CallExpr) + if !isIntLiteral(info, addCall.Args[0], 1) { + continue // not a call to wg.Add(1) + } + // Inv: the Args[0] check ensures addCall is not of + // the form sync.WaitGroup.Add(&wg, 1). + addCallRecv := ast.Unparen(addCall.Fun).(*ast.SelectorExpr).X + + // Following statement must be go func() { ... } (). + addStmt, ok := curAddCall.Parent().Node().(*ast.ExprStmt) + if !ok { + continue // unnecessary parens? + } + curNext, ok := curAddCall.Parent().NextSibling() + if !ok { + continue // no successor + } + goStmt, ok := curNext.Node().(*ast.GoStmt) + if !ok { + continue // not a go stmt + } + lit, ok := goStmt.Call.Fun.(*ast.FuncLit) + if !ok || len(goStmt.Call.Args) != 0 { + continue // go argument is not func(){...}() + } + list := lit.Body.List + if len(list) == 0 { + continue + } + + // Body must start with "defer wg.Done()" or end with "wg.Done()". + var doneStmt ast.Stmt + if deferStmt, ok := list[0].(*ast.DeferStmt); ok && + typeutil.Callee(info, deferStmt.Call) == syncWaitGroupDone && + equalSyntax(ast.Unparen(deferStmt.Call.Fun).(*ast.SelectorExpr).X, addCallRecv) { + doneStmt = deferStmt // "defer wg.Done()" + + } else if lastStmt, ok := list[len(list)-1].(*ast.ExprStmt); ok { + if doneCall, ok := lastStmt.X.(*ast.CallExpr); ok && + typeutil.Callee(info, doneCall) == syncWaitGroupDone && + equalSyntax(ast.Unparen(doneCall.Fun).(*ast.SelectorExpr).X, addCallRecv) { + doneStmt = lastStmt // "wg.Done()" + } + } + if doneStmt == nil { + continue + } + + file := enclosingFile(curAddCall) + if !fileUses(info, file, "go1.25") { + continue + } + + pass.Report(analysis.Diagnostic{ + Pos: addCall.Pos(), + End: goStmt.End(), + Category: "waitgroup", + Message: "Goroutine creation can be simplified using WaitGroup.Go", + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Simplify by using WaitGroup.Go", + TextEdits: slices.Concat( + // delete "wg.Add(1)" + analysisinternal.DeleteStmt(pass.Fset, file, addStmt, nil), + // delete "wg.Done()" or "defer wg.Done()" + analysisinternal.DeleteStmt(pass.Fset, file, doneStmt, nil), + []analysis.TextEdit{ + // go func() + // ------ + // wg.Go(func() + { + Pos: goStmt.Pos(), + End: goStmt.Call.Pos(), + NewText: fmt.Appendf(nil, "%s.Go(", addCallRecv), + }, + // ... }() + // - + // ... } ) + { + Pos: goStmt.Call.Lparen, + End: goStmt.Call.Rparen, + }, + }, + ), + }}, + }) + } +} diff --git a/gopls/internal/analysis/nonewvars/nonewvars.go b/gopls/internal/analysis/nonewvars/nonewvars.go index b7f861ba7f1..eeae7211c97 100644 --- a/gopls/internal/analysis/nonewvars/nonewvars.go +++ b/gopls/internal/analysis/nonewvars/nonewvars.go @@ -49,14 +49,11 @@ func run(pass *analysis.Pass) (any, error) { } // Find enclosing assignment (which may be curErr itself). - assign, ok := curErr.Node().(*ast.AssignStmt) + curAssign, ok := moreiters.First(curErr.Enclosing((*ast.AssignStmt)(nil))) if !ok { - cur, ok := moreiters.First(curErr.Ancestors((*ast.AssignStmt)(nil))) - if !ok { - continue // no enclosing assignment - } - assign = cur.Node().(*ast.AssignStmt) + continue // no enclosing assignment } + assign := curAssign.Node().(*ast.AssignStmt) if assign.Tok != token.DEFINE { continue // not a := statement } diff --git a/gopls/internal/analysis/noresultvalues/noresultvalues.go b/gopls/internal/analysis/noresultvalues/noresultvalues.go index 6b8f9d895e4..848f6532ce0 100644 --- a/gopls/internal/analysis/noresultvalues/noresultvalues.go +++ b/gopls/internal/analysis/noresultvalues/noresultvalues.go @@ -48,7 +48,7 @@ func run(pass *analysis.Pass) (any, error) { continue // can't find errant node } // Find first enclosing return statement, if any. - if curRet, ok := moreiters.First(curErr.Ancestors((*ast.ReturnStmt)(nil))); ok { + if curRet, ok := moreiters.First(curErr.Enclosing((*ast.ReturnStmt)(nil))); ok { ret := curRet.Node() pass.Report(analysis.Diagnostic{ Pos: start, diff --git a/gopls/internal/analysis/unusedparams/unusedparams.go b/gopls/internal/analysis/unusedparams/unusedparams.go index 2986dfd6e41..12076c5f273 100644 --- a/gopls/internal/analysis/unusedparams/unusedparams.go +++ b/gopls/internal/analysis/unusedparams/unusedparams.go @@ -80,24 +80,9 @@ func run(pass *analysis.Pass) (any, error) { inspect.Preorder(filter, func(n ast.Node) { switch n := n.(type) { case *ast.CallExpr: - // Strip off any generic instantiation. - fun := n.Fun - switch fun_ := fun.(type) { - case *ast.IndexExpr: - fun = fun_.X // f[T]() (funcs[i]() is rejected below) - case *ast.IndexListExpr: - fun = fun_.X // f[K, V]() - } - + id := typesinternal.UsedIdent(pass.TypesInfo, n.Fun) // Find object: // record non-exported function, method, or func-typed var. - var id *ast.Ident - switch fun := fun.(type) { - case *ast.Ident: - id = fun - case *ast.SelectorExpr: - id = fun.Sel - } if id != nil && !id.IsExported() { switch pass.TypesInfo.Uses[id].(type) { case *types.Func, *types.Var: @@ -202,7 +187,7 @@ func run(pass *analysis.Pass) (any, error) { case *ast.AssignStmt: // f = func() {...} // f := func() {...} - if e, idx := c.Edge(); e == edge.AssignStmt_Rhs { + if ek, idx := c.ParentEdge(); ek == edge.AssignStmt_Rhs { // Inv: n == AssignStmt.Rhs[idx] if id, ok := parent.Lhs[idx].(*ast.Ident); ok { fn = pass.TypesInfo.ObjectOf(id) diff --git a/gopls/internal/cache/analysis.go b/gopls/internal/cache/analysis.go index 4083f49d2d6..cf5518cf79f 100644 --- a/gopls/internal/cache/analysis.go +++ b/gopls/internal/cache/analysis.go @@ -637,7 +637,7 @@ func (an *analysisNode) runCached(ctx context.Context, key file.Hash) (*analyzeS return summary, nil } -// analysisCacheKey returns a cache key that is a cryptographic digest +// cacheKey returns a cache key that is a cryptographic digest // of the all the values that might affect type checking and analysis: // the analyzer names, package metadata, names and contents of // compiled Go files, and vdeps (successor) information diff --git a/gopls/internal/cache/check.go b/gopls/internal/cache/check.go index 27d5cfa240b..909003288bc 100644 --- a/gopls/internal/cache/check.go +++ b/gopls/internal/cache/check.go @@ -637,7 +637,10 @@ func (b *typeCheckBatch) checkPackageForImport(ctx context.Context, ph *packageH go func() { exportData, err := gcimporter.IExportShallow(b.fset, pkg, bug.Reportf) if err != nil { - bug.Reportf("exporting package %v: %v", ph.mp.ID, err) + // Internal error; the stack will have been reported via + // bug.Reportf within IExportShallow, so there's not much + // to do here (issue #71067). + event.Error(ctx, "IExportShallow failed", err, label.Package.Of(string(ph.mp.ID))) return } if err := filecache.Set(exportDataKind, ph.key, exportData); err != nil { diff --git a/gopls/internal/cache/filterer.go b/gopls/internal/cache/filterer.go index 0ec18369bdf..13dbd8a1b04 100644 --- a/gopls/internal/cache/filterer.go +++ b/gopls/internal/cache/filterer.go @@ -11,45 +11,55 @@ import ( "strings" ) -type Filterer struct { - // Whether a filter is excluded depends on the operator (first char of the raw filter). - // Slices filters and excluded then should have the same length. - filters []*regexp.Regexp - excluded []bool -} - -// NewFilterer computes regular expression form of all raw filters -func NewFilterer(rawFilters []string) *Filterer { - var f Filterer - for _, filter := range rawFilters { +// PathIncludeFunc creates a function that determines if a given file path +// should be included based on a set of inclusion/exclusion rules. +// +// The `rules` parameter is a slice of strings, where each string represents a +// filtering rule. Each rule consists of an operator (`+` for inclusion, `-` +// for exclusion) followed by a path pattern. See more detail of rules syntax +// at [settings.BuildOptions.DirectoryFilters]. +// +// Rules are evaluated in order, and the last matching rule determines +// whether a path is included or excluded. +// +// Examples: +// - []{"-foo"}: Exclude "foo" at the current depth. +// - []{"-**foo"}: Exclude "foo" at any depth. +// - []{"+bar"}: Include "bar" at the current depth. +// - []{"-foo", "+foo/**/bar"}: Exclude all "foo" at current depth except +// directory "bar" under "foo" at any depth. +func PathIncludeFunc(rules []string) func(string) bool { + var matchers []*regexp.Regexp + var included []bool + for _, filter := range rules { filter = path.Clean(filepath.ToSlash(filter)) // TODO(dungtuanle): fix: validate [+-] prefix. op, prefix := filter[0], filter[1:] - // convertFilterToRegexp adds "/" at the end of prefix to handle cases where a filter is a prefix of another filter. + // convertFilterToRegexp adds "/" at the end of prefix to handle cases + // where a filter is a prefix of another filter. // For example, it prevents [+foobar, -foo] from excluding "foobar". - f.filters = append(f.filters, convertFilterToRegexp(filepath.ToSlash(prefix))) - f.excluded = append(f.excluded, op == '-') + matchers = append(matchers, convertFilterToRegexp(filepath.ToSlash(prefix))) + included = append(included, op == '+') } - return &f -} - -// Disallow return true if the path is excluded from the filterer's filters. -func (f *Filterer) Disallow(path string) bool { - // Ensure trailing but not leading slash. - path = strings.TrimPrefix(path, "/") - if !strings.HasSuffix(path, "/") { - path += "/" - } + return func(path string) bool { + // Ensure leading and trailing slashes. + if !strings.HasPrefix(path, "/") { + path = "/" + path + } + if !strings.HasSuffix(path, "/") { + path += "/" + } - // TODO(adonovan): opt: iterate in reverse and break at first match. - excluded := false - for i, filter := range f.filters { - if filter.MatchString(path) { - excluded = f.excluded[i] // last match wins + // TODO(adonovan): opt: iterate in reverse and break at first match. + include := true + for i, filter := range matchers { + if filter.MatchString(path) { + include = included[i] // last match wins + } } + return include } - return excluded } // convertFilterToRegexp replaces glob-like operator substrings in a string file path to their equivalent regex forms. @@ -60,7 +70,7 @@ func convertFilterToRegexp(filter string) *regexp.Regexp { return regexp.MustCompile(".*") } var ret strings.Builder - ret.WriteString("^") + ret.WriteString("^/") segs := strings.Split(filter, "/") for _, seg := range segs { // Inv: seg != "" since path is clean. @@ -77,7 +87,7 @@ func convertFilterToRegexp(filter string) *regexp.Regexp { // BenchmarkWorkspaceSymbols time by ~20% (even though // filter CPU time increased by only by ~2.5%) when the // default filter was changed to "**/node_modules". - pattern = strings.TrimPrefix(pattern, "^.*") + pattern = strings.TrimPrefix(pattern, "^/.*") return regexp.MustCompile(pattern) } diff --git a/gopls/internal/cache/methodsets/methodsets.go b/gopls/internal/cache/methodsets/methodsets.go index 3026819ee81..2387050f2d9 100644 --- a/gopls/internal/cache/methodsets/methodsets.go +++ b/gopls/internal/cache/methodsets/methodsets.go @@ -52,6 +52,7 @@ import ( "golang.org/x/tools/go/types/objectpath" "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/fingerprint" "golang.org/x/tools/gopls/internal/util/frob" "golang.org/x/tools/gopls/internal/util/safetoken" "golang.org/x/tools/internal/typesinternal" @@ -195,7 +196,7 @@ func implements(x, y *gobMethodSet) bool { // so a string match is sufficient. match = mx.Sum&my.Sum == my.Sum && mx.Fingerprint == my.Fingerprint } else { - match = unify(mx.parse(), my.parse()) + match = fingerprint.Matches(mx.parse(), my.parse()) } return !match } @@ -326,7 +327,7 @@ func methodSetInfo(t types.Type, setIndexInfo func(*gobMethod, *types.Func)) *go for i := 0; i < mset.Len(); i++ { m := mset.At(i).Obj().(*types.Func) id := m.Id() - fp, isTricky := fingerprint(m.Signature()) + fp, isTricky := fingerprint.Encode(m.Signature()) if isTricky { tricky = true } @@ -389,7 +390,7 @@ type gobMethod struct { ObjectPath int // object path of method relative to PkgPath // internal fields (not serialized) - tree atomic.Pointer[sexpr] // fingerprint tree, parsed on demand + tree atomic.Pointer[fingerprint.Tree] // fingerprint tree, parsed on demand } // A gobPosition records the file, offset, and length of an identifier. @@ -400,10 +401,10 @@ type gobPosition struct { // parse returns the method's parsed fingerprint tree. // It may return a new instance or a cached one. -func (m *gobMethod) parse() sexpr { +func (m *gobMethod) parse() fingerprint.Tree { ptr := m.tree.Load() if ptr == nil { - tree := parseFingerprint(m.Fingerprint) + tree := fingerprint.Parse(m.Fingerprint) ptr = &tree m.tree.Store(ptr) // may race; that's ok } diff --git a/gopls/internal/cache/port.go b/gopls/internal/cache/port.go index 40005bcf6d4..8caaa801b68 100644 --- a/gopls/internal/cache/port.go +++ b/gopls/internal/cache/port.go @@ -7,6 +7,7 @@ package cache import ( "bytes" "go/build" + "go/build/constraint" "go/parser" "go/token" "io" @@ -173,12 +174,16 @@ func (p port) matches(path string, content []byte) bool { // without trimming content. func trimContentForPortMatch(content []byte) []byte { buildComment := buildComment(content) - return []byte(buildComment + "\npackage p") // package name does not matter + // The package name does not matter, but +build lines + // require a blank line before the package declaration. + return []byte(buildComment + "\n\npackage p") } // buildComment returns the first matching //go:build comment in the given // content, or "" if none exists. func buildComment(content []byte) string { + var lines []string + f, err := parser.ParseFile(token.NewFileSet(), "", content, parser.PackageClauseOnly|parser.ParseComments) if err != nil { return "" @@ -186,24 +191,15 @@ func buildComment(content []byte) string { for _, cg := range f.Comments { for _, c := range cg.List { - if isGoBuildComment(c.Text) { + if constraint.IsGoBuild(c.Text) { + // A file must have only one //go:build line. return c.Text } + if constraint.IsPlusBuild(c.Text) { + // A file may have several // +build lines. + lines = append(lines, c.Text) + } } } - return "" -} - -// Adapted from go/build/build.go. -// -// TODO(rfindley): use constraint.IsGoBuild once we are on 1.19+. -func isGoBuildComment(line string) bool { - const goBuildComment = "//go:build" - if !strings.HasPrefix(line, goBuildComment) { - return false - } - // Report whether //go:build is followed by a word boundary. - line = strings.TrimSpace(line) - rest := line[len(goBuildComment):] - return len(rest) == 0 || len(strings.TrimSpace(rest)) < len(rest) + return strings.Join(lines, "\n") } diff --git a/gopls/internal/cache/session.go b/gopls/internal/cache/session.go index c2f57e985f7..c46fc78b975 100644 --- a/gopls/internal/cache/session.go +++ b/gopls/internal/cache/session.go @@ -169,14 +169,14 @@ func (s *Session) createView(ctx context.Context, def *viewDefinition) (*View, * // Compute a prefix match, respecting segment boundaries, by ensuring // the pattern (dir) has a trailing slash. dirPrefix := strings.TrimSuffix(string(def.folder.Dir), "/") + "/" - filterer := NewFilterer(def.folder.Options.DirectoryFilters) + pathIncluded := PathIncludeFunc(def.folder.Options.DirectoryFilters) skipPath = func(dir string) bool { uri := strings.TrimSuffix(string(protocol.URIFromPath(dir)), "/") // Note that the logic below doesn't handle the case where uri == // v.folder.Dir, because there is no point in excluding the entire // workspace folder! if rel := strings.TrimPrefix(uri, dirPrefix); rel != uri { - return filterer.Disallow(rel) + return !pathIncluded(rel) } return false } diff --git a/gopls/internal/cache/testfuncs/tests.go b/gopls/internal/cache/testfuncs/tests.go index 1182795b37b..e0e3ce1beca 100644 --- a/gopls/internal/cache/testfuncs/tests.go +++ b/gopls/internal/cache/testfuncs/tests.go @@ -57,6 +57,7 @@ func NewIndex(files []*parsego.File, info *types.Info) *Index { b := &indexBuilder{ fileIndex: make(map[protocol.DocumentURI]int), subNames: make(map[string]int), + visited: make(map[*types.Func]bool), } return b.build(files, info) } @@ -101,6 +102,7 @@ func (b *indexBuilder) build(files []*parsego.File, info *types.Info) *Index { } b.Files[i].Tests = append(b.Files[i].Tests, t) + b.visited[obj] = true // Check for subtests if isTest { @@ -168,27 +170,48 @@ func (b *indexBuilder) findSubtests(parent gobTest, typ *ast.FuncType, body *ast t.Location.Range, _ = file.NodeRange(call) tests = append(tests, t) - if typ, body := findFunc(files, info, body, call.Args[1]); typ != nil { + fn, typ, body := findFunc(files, info, body, call.Args[1]) + if typ == nil { + continue + } + + // Function literals don't have an associated object + if fn == nil { tests = append(tests, b.findSubtests(t, typ, body, file, files, info)...) + continue + } + + // Never recurse if the second argument is a top-level test function + if isTest, _ := isTestOrExample(fn); isTest { + continue + } + + // Don't recurse into functions that have already been visited + if b.visited[fn] { + continue } + + b.visited[fn] = true + tests = append(tests, b.findSubtests(t, typ, body, file, files, info)...) } return tests } // findFunc finds the type and body of the given expr, which may be a function -// literal or reference to a declared function. -// -// If no function is found, findFunc returns (nil, nil). -func findFunc(files []*parsego.File, info *types.Info, body *ast.BlockStmt, expr ast.Expr) (*ast.FuncType, *ast.BlockStmt) { +// literal or reference to a declared function. If the expression is a declared +// function, findFunc returns its [types.Func]. If the expression is a function +// literal, findFunc returns nil for the first return value. If no function is +// found, findFunc returns (nil, nil, nil). +func findFunc(files []*parsego.File, info *types.Info, body *ast.BlockStmt, expr ast.Expr) (*types.Func, *ast.FuncType, *ast.BlockStmt) { var obj types.Object switch arg := expr.(type) { case *ast.FuncLit: - return arg.Type, arg.Body + return nil, arg.Type, arg.Body case *ast.Ident: obj = info.ObjectOf(arg) if obj == nil { - return nil, nil + return nil, nil, nil } case *ast.SelectorExpr: @@ -198,12 +221,12 @@ func findFunc(files []*parsego.File, info *types.Info, body *ast.BlockStmt, expr // complex. However, those cases should be rare. sel, ok := info.Selections[arg] if !ok { - return nil, nil + return nil, nil, nil } obj = sel.Obj() default: - return nil, nil + return nil, nil, nil } if v, ok := obj.(*types.Var); ok { @@ -211,7 +234,7 @@ func findFunc(files []*parsego.File, info *types.Info, body *ast.BlockStmt, expr // the file), but that doesn't account for assignment. If the variable // is assigned multiple times, we could easily get the wrong one. _, _ = v, body - return nil, nil + return nil, nil, nil } for _, file := range files { @@ -228,11 +251,11 @@ func findFunc(files []*parsego.File, info *types.Info, body *ast.BlockStmt, expr } if info.ObjectOf(decl.Name) == obj { - return decl.Type, decl.Body + return obj.(*types.Func), decl.Type, decl.Body } } } - return nil, nil + return nil, nil, nil } // isTestOrExample reports whether the given func is a testing func or an @@ -308,6 +331,7 @@ type indexBuilder struct { gobPackage fileIndex map[protocol.DocumentURI]int subNames map[string]int + visited map[*types.Func]bool } // -- serial format of index -- diff --git a/gopls/internal/cache/view.go b/gopls/internal/cache/view.go index fc1ac5724ed..6bb0ae8edeb 100644 --- a/gopls/internal/cache/view.go +++ b/gopls/internal/cache/view.go @@ -477,11 +477,11 @@ func (v *View) filterFunc() func(protocol.DocumentURI) bool { modcacheFilter := "-" + strings.TrimPrefix(filepath.ToSlash(pref), "/") filters = append(filters, modcacheFilter) } - filterer := NewFilterer(filters) + pathIncluded := PathIncludeFunc(filters) v._filterFunc = func(uri protocol.DocumentURI) bool { // Only filter relative to the configured root directory. if pathutil.InDir(folderDir, uri.Path()) { - return relPathExcludedByFilter(strings.TrimPrefix(uri.Path(), folderDir), filterer) + return relPathExcludedByFilter(strings.TrimPrefix(uri.Path(), folderDir), pathIncluded) } return false } @@ -1264,7 +1264,7 @@ func allFilesExcluded(files []string, filterFunc func(protocol.DocumentURI) bool return true } -func relPathExcludedByFilter(path string, filterer *Filterer) bool { +func relPathExcludedByFilter(path string, pathIncluded func(string) bool) bool { path = strings.TrimPrefix(filepath.ToSlash(path), "/") - return filterer.Disallow(path) + return !pathIncluded(path) } diff --git a/gopls/internal/cache/view_test.go b/gopls/internal/cache/view_test.go index 992a3d61828..46000191e42 100644 --- a/gopls/internal/cache/view_test.go +++ b/gopls/internal/cache/view_test.go @@ -90,14 +90,14 @@ func TestFilters(t *testing.T) { } for _, tt := range tests { - filterer := NewFilterer(tt.filters) + pathIncluded := PathIncludeFunc(tt.filters) for _, inc := range tt.included { - if relPathExcludedByFilter(inc, filterer) { + if relPathExcludedByFilter(inc, pathIncluded) { t.Errorf("filters %q excluded %v, wanted included", tt.filters, inc) } } for _, exc := range tt.excluded { - if !relPathExcludedByFilter(exc, filterer) { + if !relPathExcludedByFilter(exc, pathIncluded) { t.Errorf("filters %q included %v, wanted excluded", tt.filters, exc) } } diff --git a/gopls/internal/cmd/cmd.go b/gopls/internal/cmd/cmd.go index 4a00afc4115..fed96388fb4 100644 --- a/gopls/internal/cmd/cmd.go +++ b/gopls/internal/cmd/cmd.go @@ -343,7 +343,8 @@ func (c *connection) initialize(ctx context.Context, options func(*settings.Opti // Make sure to respect configured options when sending initialize request. opts := settings.DefaultOptions(options) - // If you add an additional option here, you must update the map key in connect. + // If you add an additional option here, + // you must update the map key of settings.DefaultOptions called in (*Application).connect. params.Capabilities.TextDocument.Hover = &protocol.HoverClientCapabilities{ ContentFormat: []protocol.MarkupKind{opts.PreferredContentFormat}, } @@ -351,7 +352,7 @@ func (c *connection) initialize(ctx context.Context, options func(*settings.Opti params.Capabilities.TextDocument.SemanticTokens = protocol.SemanticTokensClientCapabilities{} params.Capabilities.TextDocument.SemanticTokens.Formats = []protocol.TokenFormat{"relative"} params.Capabilities.TextDocument.SemanticTokens.Requests.Range = &protocol.Or_ClientSemanticTokensRequestOptions_range{Value: true} - //params.Capabilities.TextDocument.SemanticTokens.Requests.Range.Value = true + // params.Capabilities.TextDocument.SemanticTokens.Requests.Range.Value = true params.Capabilities.TextDocument.SemanticTokens.Requests.Full = &protocol.Or_ClientSemanticTokensRequestOptions_full{Value: true} params.Capabilities.TextDocument.SemanticTokens.TokenTypes = moreslices.ConvertStrings[string](semtok.TokenTypes) params.Capabilities.TextDocument.SemanticTokens.TokenModifiers = moreslices.ConvertStrings[string](semtok.TokenModifiers) @@ -363,6 +364,9 @@ func (c *connection) initialize(ctx context.Context, options func(*settings.Opti }, } params.Capabilities.Window.WorkDoneProgress = true + params.Capabilities.Workspace.FileOperations = &protocol.FileOperationClientCapabilities{ + DidCreate: true, + } params.InitializationOptions = map[string]any{ "symbolMatcher": string(opts.SymbolMatcher), @@ -817,10 +821,10 @@ func (c *connection) diagnoseFiles(ctx context.Context, files []protocol.Documen } func (c *connection) terminate(ctx context.Context) { - //TODO: do we need to handle errors on these calls? + // TODO: do we need to handle errors on these calls? c.Shutdown(ctx) - //TODO: right now calling exit terminates the process, we should rethink that - //server.Exit(ctx) + // TODO: right now calling exit terminates the process, we should rethink that + // server.Exit(ctx) } // Implement io.Closer. diff --git a/gopls/internal/cmd/integration_test.go b/gopls/internal/cmd/integration_test.go index e7ac774f5c0..6e4b450635b 100644 --- a/gopls/internal/cmd/integration_test.go +++ b/gopls/internal/cmd/integration_test.go @@ -508,6 +508,14 @@ func f() { func TestImplementations(t *testing.T) { t.Parallel() + // types.CheckExpr, now used in the rangeint modernizer, had a + // data race (#71817) that was fixed in go1.25 and backported + // to go1.24 but not to go1.23. Although in principle it could + // affect a lot of tests, it (weirdly) only seems to show up + // in this one (#72082). Rather than backport again, we + // suppress this test. + testenv.NeedsGo1Point(t, 24) + tree := writeTree(t, ` -- a.go -- package a @@ -1002,9 +1010,9 @@ type C struct{} res := gopls(t, tree, "codeaction", "-title=Browse.*doc", "a/a.go") res.checkExit(true) got := res.stdout - want := `command "Browse gopls feature documentation" [gopls.doc.features]` + + want := `command "Browse documentation for package a" [source.doc]` + "\n" + - `command "Browse documentation for package a" [source.doc]` + + `command "Browse gopls feature documentation" [gopls.doc.features]` + "\n" if got != want { t.Errorf("codeaction: got <<%s>>, want <<%s>>\nstderr:\n%s", got, want, res.stderr) diff --git a/gopls/internal/doc/api.json b/gopls/internal/doc/api.json index b9e0e78e950..0852870ba41 100644 --- a/gopls/internal/doc/api.json +++ b/gopls/internal/doc/api.json @@ -562,7 +562,7 @@ }, { "Name": "\"modernize\"", - "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go, such as:\n\n - replacing an if/else conditional assignment by a call to the\n built-in min or max functions added in go1.21;\n - replacing sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21;\n - replacing interface{} by the 'any' type added in go1.18;\n - replacing append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21;\n - replacing a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions\n from the maps package, added in go1.21;\n - replacing []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19;\n - replacing uses of context.WithCancel in tests with t.Context, added in\n go1.24;\n - replacing omitempty by omitzero on structs, added in go1.24;\n - replacing append(s[:i], s[i+1]...) by slices.Delete(s, i, i+1),\n added in go1.21\n - replacing a 3-clause for i := 0; i \u003c n; i++ {} loop by\n for i := range n {}, added in go1.22;\n - replacing Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq;\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -test ./...\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.", + "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go and its standard\nlibrary.\n\nEach diagnostic provides a fix. Our intent is that these fixes may\nbe safely applied en masse without changing the behavior of your\nprogram. In some cases the suggested fixes are imperfect and may\nlead to (for example) unused imports or unused local variables,\ncausing build breakage. However, these problems are generally\ntrivial to fix. We regard any modernizer whose fix changes program\nbehavior to have a serious bug and will endeavor to fix it.\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./...\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.\n\nChanges produced by this tool should be reviewed as usual before\nbeing merged. In some cases, a loop may be replaced by a simple\nfunction call, causing comments within the loop to be discarded.\nHuman judgment may be required to avoid losing comments of value.\n\nEach diagnostic reported by modernize has a specific category. (The\ncategories are listed below.) Diagnostics in some categories, such\nas \"efaceany\" (which replaces \"interface{}\" with \"any\" where it is\nsafe to do so) are particularly numerous. It may ease the burden of\ncode review to apply fixes in two passes, the first change\nconsisting only of fixes of category \"efaceany\", the second\nconsisting of all others. This can be achieved using the -category flag:\n\n\t$ modernize -category=efaceany -fix -test ./...\n\t$ modernize -category=-efaceany -fix -test ./...\n\nCategories of modernize diagnostic:\n\n - forvar: remove x := x variable declarations made unnecessary by the new semantics of loops in go1.22.\n\n - slicescontains: replace 'for i, elem := range s { if elem == needle { ...; break }'\n by a call to slices.Contains, added in go1.21.\n\n - minmax: replace an if/else conditional assignment by a call to\n the built-in min or max functions added in go1.21.\n\n - sortslice: replace sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21.\n\n - efaceany: replace interface{} by the 'any' type added in go1.18.\n\n - slicesclone: replace append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21.\n\n - mapsloop: replace a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions from\n the maps package, added in go1.21.\n\n - fmtappendf: replace []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19.\n\n - testingcontext: replace uses of context.WithCancel in tests\n with t.Context, added in go1.24.\n\n - omitzero: replace omitempty by omitzero on structs, added in go1.24.\n\n - bloop: replace \"for i := range b.N\" or \"for range b.N\" in a\n benchmark with \"for b.Loop()\", and remove any preceding calls\n to b.StopTimer, b.StartTimer, and b.ResetTimer.\n\n - slicesdelete: replace append(s[:i], s[i+1]...) by\n slices.Delete(s, i, i+1), added in go1.21.\n\n - rangeint: replace a 3-clause \"for i := 0; i \u003c n; i++\" loop by\n \"for i := range n\", added in go1.22.\n\n - stringsseq: replace Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq.\n\n - stringscutprefix: replace some uses of HasPrefix followed by TrimPrefix with CutPrefix,\n added to the strings package in go1.20.\n\n - waitgroup: replace old complex usages of sync.WaitGroup by less complex WaitGroup.Go method in go1.25.", "Default": "true", "Status": "" }, @@ -1297,7 +1297,7 @@ { "Name": "gofix", "Doc": "apply fixes based on go:fix comment directives\n\nThe gofix analyzer inlines functions and constants that are marked for inlining.", - "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/gofix", + "URL": "https://pkg.go.dev/golang.org/x/tools/internal/gofix", "Default": true }, { @@ -1338,7 +1338,7 @@ }, { "Name": "modernize", - "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go, such as:\n\n - replacing an if/else conditional assignment by a call to the\n built-in min or max functions added in go1.21;\n - replacing sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21;\n - replacing interface{} by the 'any' type added in go1.18;\n - replacing append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21;\n - replacing a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions\n from the maps package, added in go1.21;\n - replacing []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19;\n - replacing uses of context.WithCancel in tests with t.Context, added in\n go1.24;\n - replacing omitempty by omitzero on structs, added in go1.24;\n - replacing append(s[:i], s[i+1]...) by slices.Delete(s, i, i+1),\n added in go1.21\n - replacing a 3-clause for i := 0; i \u003c n; i++ {} loop by\n for i := range n {}, added in go1.22;\n - replacing Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq;\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -test ./...\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.", + "Doc": "simplify code by using modern constructs\n\nThis analyzer reports opportunities for simplifying and clarifying\nexisting code by using more modern features of Go and its standard\nlibrary.\n\nEach diagnostic provides a fix. Our intent is that these fixes may\nbe safely applied en masse without changing the behavior of your\nprogram. In some cases the suggested fixes are imperfect and may\nlead to (for example) unused imports or unused local variables,\ncausing build breakage. However, these problems are generally\ntrivial to fix. We regard any modernizer whose fix changes program\nbehavior to have a serious bug and will endeavor to fix it.\n\nTo apply all modernization fixes en masse, you can use the\nfollowing command:\n\n\t$ go run golang.org/x/tools/gopls/internal/analysis/modernize/cmd/modernize@latest -fix -test ./...\n\nIf the tool warns of conflicting fixes, you may need to run it more\nthan once until it has applied all fixes cleanly. This command is\nnot an officially supported interface and may change in the future.\n\nChanges produced by this tool should be reviewed as usual before\nbeing merged. In some cases, a loop may be replaced by a simple\nfunction call, causing comments within the loop to be discarded.\nHuman judgment may be required to avoid losing comments of value.\n\nEach diagnostic reported by modernize has a specific category. (The\ncategories are listed below.) Diagnostics in some categories, such\nas \"efaceany\" (which replaces \"interface{}\" with \"any\" where it is\nsafe to do so) are particularly numerous. It may ease the burden of\ncode review to apply fixes in two passes, the first change\nconsisting only of fixes of category \"efaceany\", the second\nconsisting of all others. This can be achieved using the -category flag:\n\n\t$ modernize -category=efaceany -fix -test ./...\n\t$ modernize -category=-efaceany -fix -test ./...\n\nCategories of modernize diagnostic:\n\n - forvar: remove x := x variable declarations made unnecessary by the new semantics of loops in go1.22.\n\n - slicescontains: replace 'for i, elem := range s { if elem == needle { ...; break }'\n by a call to slices.Contains, added in go1.21.\n\n - minmax: replace an if/else conditional assignment by a call to\n the built-in min or max functions added in go1.21.\n\n - sortslice: replace sort.Slice(x, func(i, j int) bool) { return s[i] \u003c s[j] }\n by a call to slices.Sort(s), added in go1.21.\n\n - efaceany: replace interface{} by the 'any' type added in go1.18.\n\n - slicesclone: replace append([]T(nil), s...) by slices.Clone(s) or\n slices.Concat(s), added in go1.21.\n\n - mapsloop: replace a loop around an m[k]=v map update by a call\n to one of the Collect, Copy, Clone, or Insert functions from\n the maps package, added in go1.21.\n\n - fmtappendf: replace []byte(fmt.Sprintf...) by fmt.Appendf(nil, ...),\n added in go1.19.\n\n - testingcontext: replace uses of context.WithCancel in tests\n with t.Context, added in go1.24.\n\n - omitzero: replace omitempty by omitzero on structs, added in go1.24.\n\n - bloop: replace \"for i := range b.N\" or \"for range b.N\" in a\n benchmark with \"for b.Loop()\", and remove any preceding calls\n to b.StopTimer, b.StartTimer, and b.ResetTimer.\n\n - slicesdelete: replace append(s[:i], s[i+1]...) by\n slices.Delete(s, i, i+1), added in go1.21.\n\n - rangeint: replace a 3-clause \"for i := 0; i \u003c n; i++\" loop by\n \"for i := range n\", added in go1.22.\n\n - stringsseq: replace Split in \"for range strings.Split(...)\" by go1.24's\n more efficient SplitSeq, or Fields with FieldSeq.\n\n - stringscutprefix: replace some uses of HasPrefix followed by TrimPrefix with CutPrefix,\n added to the strings package in go1.20.\n\n - waitgroup: replace old complex usages of sync.WaitGroup by less complex WaitGroup.Go method in go1.25.", "URL": "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/modernize", "Default": true }, diff --git a/gopls/internal/fuzzy/input.go b/gopls/internal/fuzzy/input.go index c1038163f1a..fd8575f6382 100644 --- a/gopls/internal/fuzzy/input.go +++ b/gopls/internal/fuzzy/input.go @@ -36,7 +36,7 @@ func RuneRoles(candidate []byte, reuse []RuneRole) []RuneRole { } prev, prev2 := rtNone, rtNone - for i := 0; i < len(candidate); i++ { + for i := range candidate { r := rune(candidate[i]) role := RNone @@ -122,7 +122,7 @@ func LastSegment(input string, roles []RuneRole) string { func fromChunks(chunks []string, buffer []byte) []byte { ii := 0 for _, chunk := range chunks { - for i := 0; i < len(chunk); i++ { + for i := range len(chunk) { if ii >= cap(buffer) { break } @@ -143,7 +143,7 @@ func toLower(input []byte, reuse []byte) []byte { output = make([]byte, len(input)) } - for i := 0; i < len(input); i++ { + for i := range input { r := rune(input[i]) if input[i] <= unicode.MaxASCII { if 'A' <= r && r <= 'Z' { diff --git a/gopls/internal/fuzzy/input_test.go b/gopls/internal/fuzzy/input_test.go index ffe147241b6..dd751b8f0c2 100644 --- a/gopls/internal/fuzzy/input_test.go +++ b/gopls/internal/fuzzy/input_test.go @@ -127,7 +127,7 @@ func BenchmarkRoles(b *testing.B) { str := "AbstractSWTFactory" out := make([]fuzzy.RuneRole, len(str)) - for i := 0; i < b.N; i++ { + for b.Loop() { fuzzy.RuneRoles([]byte(str), out) } b.SetBytes(int64(len(str))) diff --git a/gopls/internal/fuzzy/matcher.go b/gopls/internal/fuzzy/matcher.go index 29d1b36501e..eff86efac34 100644 --- a/gopls/internal/fuzzy/matcher.go +++ b/gopls/internal/fuzzy/matcher.go @@ -134,10 +134,7 @@ func (m *Matcher) ScoreChunks(chunks []string) float32 { if sc < 0 { sc = 0 } - normalizedScore := float32(sc) * m.scoreScale - if normalizedScore > 1 { - normalizedScore = 1 - } + normalizedScore := min(float32(sc)*m.scoreScale, 1) return normalizedScore } @@ -177,7 +174,7 @@ func (m *Matcher) MatchedRanges() []int { i-- } // Reverse slice. - for i := 0; i < len(ret)/2; i++ { + for i := range len(ret) / 2 { ret[i], ret[len(ret)-1-i] = ret[len(ret)-1-i], ret[i] } return ret @@ -211,7 +208,7 @@ func (m *Matcher) computeScore(candidate []byte, candidateLower []byte) int { m.scores[0][0][0] = score(0, 0) // Start with 0. segmentsLeft, lastSegStart := 1, 0 - for i := 0; i < candLen; i++ { + for i := range candLen { if m.roles[i] == RSep { segmentsLeft++ lastSegStart = i + 1 @@ -304,7 +301,7 @@ func (m *Matcher) computeScore(candidate []byte, candidateLower []byte) int { // Third dimension encodes whether there is a gap between the previous match and the current // one. - for k := 0; k < 2; k++ { + for k := range 2 { sc := m.scores[i-1][j-1][k].val() + charScore isConsecutive := k == 1 || i-1 == 0 || i-1 == lastSegStart @@ -342,7 +339,7 @@ func (m *Matcher) ScoreTable(candidate string) string { var line1, line2, separator bytes.Buffer line1.WriteString("\t") line2.WriteString("\t") - for j := 0; j < len(m.pattern); j++ { + for j := range len(m.pattern) { line1.WriteString(fmt.Sprintf("%c\t\t", m.pattern[j])) separator.WriteString("----------------") } diff --git a/gopls/internal/fuzzy/matcher_test.go b/gopls/internal/fuzzy/matcher_test.go index 056da25d675..f743be0c5ef 100644 --- a/gopls/internal/fuzzy/matcher_test.go +++ b/gopls/internal/fuzzy/matcher_test.go @@ -293,8 +293,7 @@ func BenchmarkMatcher(b *testing.B) { matcher := fuzzy.NewMatcher(pattern) - b.ResetTimer() - for i := 0; i < b.N; i++ { + for b.Loop() { for _, c := range candidates { matcher.Score(c) } diff --git a/gopls/internal/fuzzy/self_test.go b/gopls/internal/fuzzy/self_test.go index 1c64f1953df..7cdb4fdef96 100644 --- a/gopls/internal/fuzzy/self_test.go +++ b/gopls/internal/fuzzy/self_test.go @@ -14,7 +14,7 @@ func BenchmarkSelf_Matcher(b *testing.B) { idents := collectIdentifiers(b) patterns := generatePatterns() - for i := 0; i < b.N; i++ { + for b.Loop() { for _, pattern := range patterns { sm := NewMatcher(pattern) for _, ident := range idents { @@ -28,7 +28,7 @@ func BenchmarkSelf_SymbolMatcher(b *testing.B) { idents := collectIdentifiers(b) patterns := generatePatterns() - for i := 0; i < b.N; i++ { + for b.Loop() { for _, pattern := range patterns { sm := NewSymbolMatcher(pattern) for _, ident := range idents { diff --git a/gopls/internal/golang/addtest.go b/gopls/internal/golang/addtest.go index 4a43a82ffee..e952874e109 100644 --- a/gopls/internal/golang/addtest.go +++ b/gopls/internal/golang/addtest.go @@ -319,7 +319,7 @@ func AddTestForFunc(ctx context.Context, snapshot *cache.Snapshot, loc protocol. // package decl based on the originating file. // Search for something that looks like a copyright header, to replicate // in the new file. - if c := copyrightComment(pgf.File); c != nil { + if c := CopyrightComment(pgf.File); c != nil { start, end, err := pgf.NodeOffsets(c) if err != nil { return nil, err diff --git a/gopls/internal/golang/codeaction.go b/gopls/internal/golang/codeaction.go index 74f3c2b6085..7949493a896 100644 --- a/gopls/internal/golang/codeaction.go +++ b/gopls/internal/golang/codeaction.go @@ -14,7 +14,6 @@ import ( "path/filepath" "reflect" "slices" - "sort" "strings" "golang.org/x/tools/go/ast/astutil" @@ -112,10 +111,7 @@ func CodeActions(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, } } - sort.Slice(actions, func(i, j int) bool { - return actions[i].Kind < actions[j].Kind - }) - + // Return code actions in the order their providers are listed. return actions, nil } @@ -233,6 +229,8 @@ type codeActionProducer struct { needPkg bool // fn needs type information (req.pkg) } +// Code Actions are returned in the order their producers are listed below. +// Depending on the client, this may influence the order they appear in the UI. var codeActionProducers = [...]codeActionProducer{ {kind: protocol.QuickFix, fn: quickFix, needPkg: true}, {kind: protocol.SourceOrganizeImports, fn: sourceOrganizeImports}, @@ -240,9 +238,8 @@ var codeActionProducers = [...]codeActionProducer{ {kind: settings.GoAssembly, fn: goAssembly, needPkg: true}, {kind: settings.GoDoc, fn: goDoc, needPkg: true}, {kind: settings.GoFreeSymbols, fn: goFreeSymbols}, - {kind: settings.GoTest, fn: goTest}, + {kind: settings.GoTest, fn: goTest, needPkg: true}, {kind: settings.GoToggleCompilerOptDetails, fn: toggleCompilerOptDetails}, - {kind: settings.GoplsDocFeatures, fn: goplsDocFeatures}, {kind: settings.RefactorExtractFunction, fn: refactorExtractFunction}, {kind: settings.RefactorExtractMethod, fn: refactorExtractMethod}, {kind: settings.RefactorExtractToNewFile, fn: refactorExtractToNewFile}, @@ -261,6 +258,7 @@ var codeActionProducers = [...]codeActionProducer{ {kind: settings.RefactorRewriteMoveParamRight, fn: refactorRewriteMoveParamRight, needPkg: true}, {kind: settings.RefactorRewriteSplitLines, fn: refactorRewriteSplitLines, needPkg: true}, {kind: settings.RefactorRewriteEliminateDotImport, fn: refactorRewriteEliminateDotImport, needPkg: true}, + {kind: settings.GoplsDocFeatures, fn: goplsDocFeatures}, // offer this one last (#72742) // Note: don't forget to update the allow-list in Server.CodeAction // when adding new query operations like GoTest and GoDoc that @@ -956,7 +954,7 @@ func goAssembly(ctx context.Context, req *codeActionsRequest) error { sym.WriteString(".") curSel, _ := req.pgf.Cursor.FindPos(req.start, req.end) - for cur := range curSel.Ancestors((*ast.FuncDecl)(nil), (*ast.ValueSpec)(nil)) { + for cur := range curSel.Enclosing((*ast.FuncDecl)(nil), (*ast.ValueSpec)(nil)) { var name string // in command title switch node := cur.Node().(type) { case *ast.FuncDecl: diff --git a/gopls/internal/golang/completion/completion.go b/gopls/internal/golang/completion/completion.go index a6c0e49c311..a3270f97909 100644 --- a/gopls/internal/golang/completion/completion.go +++ b/gopls/internal/golang/completion/completion.go @@ -164,14 +164,14 @@ func (i *CompletionItem) addConversion(c *completer, conv conversionEdits) error // Scoring constants are used for weighting the relevance of different candidates. const ( + // lowScore indicates an irrelevant or not useful completion item. + lowScore float64 = 0.01 + // stdScore is the base score for all completion items. stdScore float64 = 1.0 // highScore indicates a very relevant completion item. highScore float64 = 10.0 - - // lowScore indicates an irrelevant or not useful completion item. - lowScore float64 = 0.01 ) // matcher matches a candidate's label against the user input. The @@ -702,7 +702,7 @@ func Completion(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, p // depend on other candidates having already been collected. c.addStatementCandidates() - c.sortItems() + sortItems(c.items) return c.items, c.getSurrounding(), nil } @@ -830,16 +830,16 @@ func (c *completer) scanToken(contents []byte) (token.Pos, token.Token, string) } } -func (c *completer) sortItems() { - sort.SliceStable(c.items, func(i, j int) bool { +func sortItems(items []CompletionItem) { + sort.SliceStable(items, func(i, j int) bool { // Sort by score first. - if c.items[i].Score != c.items[j].Score { - return c.items[i].Score > c.items[j].Score + if items[i].Score != items[j].Score { + return items[i].Score > items[j].Score } // Then sort by label so order stays consistent. This also has the // effect of preferring shorter candidates. - return c.items[i].Label < c.items[j].Label + return items[i].Label < items[j].Label }) } @@ -1177,7 +1177,10 @@ func isValidIdentifierChar(char byte) bool { // adds struct fields, interface methods, function declaration fields to completion func (c *completer) addFieldItems(fields *ast.FieldList) { - if fields == nil { + // TODO: in golang/go#72828, we get here with a nil surrounding. + // This indicates a logic bug elsewhere: we should only be interrogating the + // surrounding if it is set. + if fields == nil || c.surrounding == nil { return } @@ -1998,6 +2001,8 @@ func (c *completer) structLiteralFieldName(ctx context.Context) error { // enclosingCompositeLiteral returns information about the composite literal enclosing the // position. +// It returns nil on failure; for example, if there is no type information for a +// node on path. func enclosingCompositeLiteral(path []ast.Node, pos token.Pos, info *types.Info) *compLitInfo { for _, n := range path { switch n := n.(type) { @@ -2562,7 +2567,7 @@ func inferExpectedResultTypes(c *completer, callNodeIdx int) []types.Type { switch node := c.path[callNodeIdx+1].(type) { case *ast.KeyValueExpr: enclosingCompositeLiteral := enclosingCompositeLiteral(c.path[callNodeIdx:], callNode.Pos(), c.pkg.TypesInfo()) - if !wantStructFieldCompletions(enclosingCompositeLiteral) { + if enclosingCompositeLiteral != nil && !wantStructFieldCompletions(enclosingCompositeLiteral) { expectedResults = append(expectedResults, expectedCompositeLiteralType(enclosingCompositeLiteral, callNode.Pos())) } case *ast.AssignStmt: diff --git a/gopls/internal/golang/completion/newfile.go b/gopls/internal/golang/completion/newfile.go new file mode 100644 index 00000000000..d9869a2f050 --- /dev/null +++ b/gopls/internal/golang/completion/newfile.go @@ -0,0 +1,65 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package completion + +import ( + "bytes" + "context" + "fmt" + + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/golang" + "golang.org/x/tools/gopls/internal/protocol" +) + +// NewFile returns a document change to complete an empty go file. +func NewFile(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) (*protocol.DocumentChange, error) { + if bs, err := fh.Content(); err != nil || len(bs) != 0 { + return nil, err + } + meta, err := golang.NarrowestMetadataForFile(ctx, snapshot, fh.URI()) + if err != nil { + return nil, err + } + var buf bytes.Buffer + // Copy the copyright header from the first existing file that has one. + for _, fileURI := range meta.GoFiles { + if fileURI == fh.URI() { + continue + } + fh, err := snapshot.ReadFile(ctx, fileURI) + if err != nil { + continue + } + pgf, err := snapshot.ParseGo(ctx, fh, parsego.Header) + if err != nil { + continue + } + if group := golang.CopyrightComment(pgf.File); group != nil { + start, end, err := pgf.NodeOffsets(group) + if err != nil { + continue + } + buf.Write(pgf.Src[start:end]) + buf.WriteString("\n\n") + break + } + } + + pkgName, err := bestPackage(ctx, snapshot, fh.URI()) + if err != nil { + return nil, err + } + + fmt.Fprintf(&buf, "package %s\n", pkgName) + change := protocol.DocumentChangeEdit(fh, []protocol.TextEdit{{ + Range: protocol.Range{}, // insert at start of file + NewText: buf.String(), + }}) + + return &change, nil +} diff --git a/gopls/internal/golang/completion/package.go b/gopls/internal/golang/completion/package.go index 5fd6c04144d..d1698ee6580 100644 --- a/gopls/internal/golang/completion/package.go +++ b/gopls/internal/golang/completion/package.go @@ -15,6 +15,7 @@ import ( "go/token" "go/types" "path/filepath" + "sort" "strings" "unicode" @@ -27,6 +28,24 @@ import ( "golang.org/x/tools/gopls/internal/util/safetoken" ) +// bestPackage offers the best package name for a package declaration when +// one is not present in the given file. +func bestPackage(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI) (string, error) { + suggestions, err := packageSuggestions(ctx, snapshot, uri, "") + if err != nil { + return "", err + } + // sort with the same way of sortItems. + sort.SliceStable(suggestions, func(i, j int) bool { + if suggestions[i].score != suggestions[j].score { + return suggestions[i].score > suggestions[j].score + } + return suggestions[i].name < suggestions[j].name + }) + + return suggestions[0].name, nil +} + // packageClauseCompletions offers completions for a package declaration when // one is not present in the given file. func packageClauseCompletions(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) ([]CompletionItem, *Selection, error) { @@ -62,7 +81,7 @@ func packageClauseCompletions(ctx context.Context, snapshot *cache.Snapshot, fh Score: pkg.score, }) } - + sortItems(items) return items, surrounding, nil } @@ -197,11 +216,20 @@ func packageSuggestions(ctx context.Context, snapshot *cache.Snapshot, fileURI p } matcher := fuzzy.NewMatcher(prefix) + var currentPackageName string + if variants, err := snapshot.MetadataForFile(ctx, fileURI); err == nil && + len(variants) != 0 { + currentPackageName = string(variants[0].Name) + } // Always try to suggest a main package defer func() { + mainScore := lowScore + if currentPackageName == "main" { + mainScore = highScore + } if score := float64(matcher.Score("main")); score > 0 { - packages = append(packages, toCandidate("main", score*lowScore)) + packages = append(packages, toCandidate("main", score*mainScore)) } }() @@ -254,15 +282,20 @@ func packageSuggestions(ctx context.Context, snapshot *cache.Snapshot, fileURI p seenPkgs[testPkgName] = struct{}{} } - // Add current directory name as a low relevance suggestion. if _, ok := seenPkgs[pkgName]; !ok { + // Add current directory name as a low relevance suggestion. + dirNameScore := lowScore + // if current package name is empty, the dir name is the best choice. + if currentPackageName == "" { + dirNameScore = highScore + } if score := float64(matcher.Score(string(pkgName))); score > 0 { - packages = append(packages, toCandidate(string(pkgName), score*lowScore)) + packages = append(packages, toCandidate(string(pkgName), score*dirNameScore)) } testPkgName := pkgName + "_test" if score := float64(matcher.Score(string(testPkgName))); score > 0 { - packages = append(packages, toCandidate(string(testPkgName), score*lowScore)) + packages = append(packages, toCandidate(string(testPkgName), score*dirNameScore)) } } diff --git a/gopls/internal/golang/completion/util.go b/gopls/internal/golang/completion/util.go index 7a4729413ae..306078296c1 100644 --- a/gopls/internal/golang/completion/util.go +++ b/gopls/internal/golang/completion/util.go @@ -171,7 +171,7 @@ func deslice(T types.Type) types.Type { return nil } -// isSelector returns the enclosing *ast.SelectorExpr when pos is in the +// enclosingSelector returns the enclosing *ast.SelectorExpr when pos is in the // selector. func enclosingSelector(path []ast.Node, pos token.Pos) *ast.SelectorExpr { if len(path) == 0 { diff --git a/gopls/internal/golang/extracttofile.go b/gopls/internal/golang/extracttofile.go index 39fb28e624b..d3026d4ee0f 100644 --- a/gopls/internal/golang/extracttofile.go +++ b/gopls/internal/golang/extracttofile.go @@ -138,7 +138,7 @@ func ExtractToNewFile(ctx context.Context, snapshot *cache.Snapshot, fh file.Han } var buf bytes.Buffer - if c := copyrightComment(pgf.File); c != nil { + if c := CopyrightComment(pgf.File); c != nil { start, end, err := pgf.NodeOffsets(c) if err != nil { return nil, err diff --git a/gopls/internal/golang/implementation.go b/gopls/internal/golang/implementation.go index 2d9a1e93ef3..a5ab5d19a13 100644 --- a/gopls/internal/golang/implementation.go +++ b/gopls/internal/golang/implementation.go @@ -11,6 +11,7 @@ import ( "go/ast" "go/token" "go/types" + "iter" "reflect" "slices" "sort" @@ -26,10 +27,12 @@ import ( "golang.org/x/tools/gopls/internal/file" "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/gopls/internal/util/bug" + "golang.org/x/tools/gopls/internal/util/moreiters" "golang.org/x/tools/gopls/internal/util/safetoken" "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/astutil/edge" "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/typesinternal" ) // This file defines the new implementation of the 'implementation' @@ -497,133 +500,327 @@ func concreteImplementsIntf(msets *typeutil.MethodSetCache, x, y types.Type) boo if !ok { return false // x lacks a method of y } - if !unify(xm.Signature(), ym.Signature()) { + if !unify(xm.Signature(), ym.Signature(), nil) { return false // signatures do not match } } return true // all methods found } -// unify reports whether the types of x and y match, allowing free -// type parameters to stand for anything at all, without regard to -// consistency of substitutions. +// unify reports whether the types of x and y match. // -// TODO(adonovan): implement proper unification (#63982), finding the -// most general unifier across all the interface methods. +// If unifier is nil, unify reports only whether it succeeded. +// If unifier is non-nil, it is populated with the values +// of type parameters determined during a successful unification. +// If unification succeeds without binding a type parameter, that parameter +// will not be present in the map. // -// See also: unify in cache/methodsets/fingerprint, which uses a -// similar ersatz unification approach on type fingerprints, for -// the global index. -func unify(x, y types.Type) bool { - x = types.Unalias(x) - y = types.Unalias(y) - - // For now, allow a type parameter to match anything, - // without regard to consistency of substitutions. - if is[*types.TypeParam](x) || is[*types.TypeParam](y) { - return true +// On entry, the unifier's contents are treated as the values of already-bound type +// parameters, constraining the unification. +// +// For example, if unifier is an empty (not nil) map on entry, then the types +// +// func[T any](T, int) +// +// and +// +// func[U any](bool, U) +// +// will unify, with T=bool and U=int. +// That is, the contents of unifier after unify returns will be +// +// {T: bool, U: int} +// +// where "T" is the type parameter T and "bool" is the basic type for bool. +// +// But if unifier is {T: int} is int on entry, then unification will fail, because T +// does not unify with bool. +// +// Unify does not preserve aliases. For example, given the following: +// +// type String = string +// type A[T] = T +// +// unification succeeds with T bound to string, not String. +// +// See also: unify in cache/methodsets/fingerprint, which implements +// unification for type fingerprints, for the global index. +// +// BUG: literal interfaces are not handled properly. But this function is currently +// used only for signatures, where such types are very rare. +func unify(x, y types.Type, unifier map[*types.TypeParam]types.Type) bool { + // bindings[tp] is the binding for type parameter tp. + // Although type parameters are nominally bound to types, each bindings[tp] + // is a pointer to a type, so unbound variables that unify can share a binding. + bindings := map[*types.TypeParam]*types.Type{} + + // Bindings is initialized with pointers to the provided types. + for tp, t := range unifier { + bindings[tp] = &t } - if reflect.TypeOf(x) != reflect.TypeOf(y) { - return false // mismatched types - } - - switch x := x.(type) { - case *types.Array: - y := y.(*types.Array) - return x.Len() == y.Len() && - unify(x.Elem(), y.Elem()) - - case *types.Basic: - y := y.(*types.Basic) - return x.Kind() == y.Kind() - - case *types.Chan: - y := y.(*types.Chan) - return x.Dir() == y.Dir() && - unify(x.Elem(), y.Elem()) - - case *types.Interface: - y := y.(*types.Interface) - // TODO(adonovan): fix: for correctness, we must check - // that both interfaces have the same set of methods - // modulo type parameters, while avoiding the risk of - // unbounded interface recursion. - // - // Since non-empty interface literals are vanishingly - // rare in methods signatures, we ignore this for now. - // If more precision is needed we could compare method - // names and arities, still without full recursion. - return x.NumMethods() == y.NumMethods() - - case *types.Map: - y := y.(*types.Map) - return unify(x.Key(), y.Key()) && - unify(x.Elem(), y.Elem()) - - case *types.Named: - y := y.(*types.Named) - if x.Origin() != y.Origin() { - return false // different named types + // bindingFor returns the *types.Type in bindings for tp if tp is not nil, + // creating one if needed. + bindingFor := func(tp *types.TypeParam) *types.Type { + if tp == nil { + return nil } - xtargs := x.TypeArgs() - ytargs := y.TypeArgs() - if xtargs.Len() != ytargs.Len() { - return false // arity error (ill-typed) + b := bindings[tp] + if b == nil { + b = new(types.Type) + bindings[tp] = b } - for i := range xtargs.Len() { - if !unify(xtargs.At(i), ytargs.At(i)) { - return false // mismatched type args + return b + } + + // bind sets b to t if b does not occur in t. + bind := func(b *types.Type, t types.Type) bool { + for tp := range typeParams(t) { + if b == bindings[tp] { + return false // failed "occurs" check } } + *b = t return true + } + + // uni performs the actual unification. + depth := 0 + var uni func(x, y types.Type) bool + uni = func(x, y types.Type) bool { + // Panic if recursion gets too deep, to detect bugs before + // overflowing the stack. + depth++ + defer func() { depth-- }() + if depth > 100 { + panic("unify: max depth exceeded") + } - case *types.Pointer: - y := y.(*types.Pointer) - return unify(x.Elem(), y.Elem()) + x = types.Unalias(x) + y = types.Unalias(y) - case *types.Signature: - y := y.(*types.Signature) - return x.Variadic() == y.Variadic() && - unify(x.Params(), y.Params()) && - unify(x.Results(), y.Results()) + tpx, _ := x.(*types.TypeParam) + tpy, _ := y.(*types.TypeParam) + if tpx != nil || tpy != nil { + // Identical type params unify. + if tpx == tpy { + return true + } + bx := bindingFor(tpx) + by := bindingFor(tpy) + + // If both args are type params and neither is bound, have them share a binding. + if bx != nil && by != nil && *bx == nil && *by == nil { + // Arbitrarily give y's binding to x. + bindings[tpx] = by + return true + } + // Treat param bindings like original args in what follows. + if bx != nil && *bx != nil { + x = *bx + } + if by != nil && *by != nil { + y = *by + } + // If the x param is unbound, bind it to y. + if bx != nil && *bx == nil { + return bind(bx, y) + } + // If the y param is unbound, bind it to x. + if by != nil && *by == nil { + return bind(by, x) + } + // Unify the binding of a bound parameter. + return uni(x, y) + } - case *types.Slice: - y := y.(*types.Slice) - return unify(x.Elem(), y.Elem()) + // Neither arg is a type param. - case *types.Struct: - y := y.(*types.Struct) - if x.NumFields() != y.NumFields() { - return false + if reflect.TypeOf(x) != reflect.TypeOf(y) { + return false // mismatched types } - for i := range x.NumFields() { - xf := x.Field(i) - yf := y.Field(i) - if xf.Embedded() != yf.Embedded() || - xf.Name() != yf.Name() || - x.Tag(i) != y.Tag(i) || - !xf.Exported() && xf.Pkg() != yf.Pkg() || - !unify(xf.Type(), yf.Type()) { + + switch x := x.(type) { + case *types.Array: + y := y.(*types.Array) + return x.Len() == y.Len() && + uni(x.Elem(), y.Elem()) + + case *types.Basic: + y := y.(*types.Basic) + return x.Kind() == y.Kind() + + case *types.Chan: + y := y.(*types.Chan) + return x.Dir() == y.Dir() && + uni(x.Elem(), y.Elem()) + + case *types.Interface: + y := y.(*types.Interface) + // TODO(adonovan,jba): fix: for correctness, we must check + // that both interfaces have the same set of methods + // modulo type parameters, while avoiding the risk of + // unbounded interface recursion. + // + // Since non-empty interface literals are vanishingly + // rare in methods signatures, we ignore this for now. + // If more precision is needed we could compare method + // names and arities, still without full recursion. + return x.NumMethods() == y.NumMethods() + + case *types.Map: + y := y.(*types.Map) + return uni(x.Key(), y.Key()) && + uni(x.Elem(), y.Elem()) + + case *types.Named: + y := y.(*types.Named) + if x.Origin() != y.Origin() { + return false // different named types + } + xtargs := x.TypeArgs() + ytargs := y.TypeArgs() + if xtargs.Len() != ytargs.Len() { + return false // arity error (ill-typed) + } + for i := range xtargs.Len() { + if !uni(xtargs.At(i), ytargs.At(i)) { + return false // mismatched type args + } + } + return true + + case *types.Pointer: + y := y.(*types.Pointer) + return uni(x.Elem(), y.Elem()) + + case *types.Signature: + y := y.(*types.Signature) + return x.Variadic() == y.Variadic() && + uni(x.Params(), y.Params()) && + uni(x.Results(), y.Results()) + + case *types.Slice: + y := y.(*types.Slice) + return uni(x.Elem(), y.Elem()) + + case *types.Struct: + y := y.(*types.Struct) + if x.NumFields() != y.NumFields() { return false } + for i := range x.NumFields() { + xf := x.Field(i) + yf := y.Field(i) + if xf.Embedded() != yf.Embedded() || + xf.Name() != yf.Name() || + x.Tag(i) != y.Tag(i) || + !xf.Exported() && xf.Pkg() != yf.Pkg() || + !uni(xf.Type(), yf.Type()) { + return false + } + } + return true + + case *types.Tuple: + y := y.(*types.Tuple) + if x.Len() != y.Len() { + return false + } + for i := range x.Len() { + if !uni(x.At(i).Type(), y.At(i).Type()) { + return false + } + } + return true + + default: // incl. *Union, *TypeParam + panic(fmt.Sprintf("unexpected Type %#v", x)) } - return true + } - case *types.Tuple: - y := y.(*types.Tuple) - if x.Len() != y.Len() { - return false + if !uni(x, y) { + clear(unifier) + return false + } + + // Populate the input map with the resulting types. + if unifier != nil { + for tparam, tptr := range bindings { + unifier[tparam] = *tptr } - for i := range x.Len() { - if !unify(x.At(i).Type(), y.At(i).Type()) { - return false + } + return true +} + +// typeParams yields all the free type parameters within t that are relevant for +// unification. +func typeParams(t types.Type) iter.Seq[*types.TypeParam] { + + return func(yield func(*types.TypeParam) bool) { + seen := map[*types.TypeParam]bool{} // yield each type param only once + + // tps(t) yields each TypeParam in t and returns false to stop. + var tps func(types.Type) bool + tps = func(t types.Type) bool { + t = types.Unalias(t) + + switch t := t.(type) { + case *types.TypeParam: + if seen[t] { + return true + } + seen[t] = true + return yield(t) + + case *types.Basic: + return true + + case *types.Array: + return tps(t.Elem()) + + case *types.Chan: + return tps(t.Elem()) + + case *types.Interface: + // TODO(jba): implement. + return true + + case *types.Map: + return tps(t.Key()) && tps(t.Elem()) + + case *types.Named: + if t.Origin() == t { + // generic type: look at type params + return moreiters.Every(t.TypeParams().TypeParams(), + func(tp *types.TypeParam) bool { return tps(tp) }) + } + // instantiated type: look at type args + return moreiters.Every(t.TypeArgs().Types(), tps) + + case *types.Pointer: + return tps(t.Elem()) + + case *types.Signature: + return tps(t.Params()) && tps(t.Results()) + + case *types.Slice: + return tps(t.Elem()) + + case *types.Struct: + return moreiters.Every(t.Fields(), + func(v *types.Var) bool { return tps(v.Type()) }) + + case *types.Tuple: + return moreiters.Every(t.Variables(), + func(v *types.Var) bool { return tps(v.Type()) }) + + default: // incl. *Union + panic(fmt.Sprintf("unexpected Type %#v", t)) } } - return true - default: // incl. *Union, *TypeParam - panic(fmt.Sprintf("unexpected Type %#v", x)) + tps(t) } } @@ -665,6 +862,7 @@ func pathEnclosingObjNode(f *ast.File, pos token.Pos) []ast.Node { // handled this by calling astutil.PathEnclosingInterval twice, // once for "pos" and once for "pos-1". found = n.Pos() <= pos && pos <= n.End() + case *ast.ImportSpec: if n.Path.Pos() <= pos && pos < n.Path.End() { found = true @@ -674,6 +872,7 @@ func pathEnclosingObjNode(f *ast.File, pos token.Pos) []ast.Node { path = append(path, n.Name) } } + case *ast.StarExpr: // Follow star expressions to the inner identifier. if pos == n.Star { @@ -690,7 +889,6 @@ func pathEnclosingObjNode(f *ast.File, pos token.Pos) []ast.Node { // Reverse path so leaf is first element. slices.Reverse(path) - return path } @@ -740,6 +938,9 @@ func implFuncs(pkg *cache.Package, pgf *parsego.File, pos token.Pos) ([]protocol } info := pkg.TypesInfo() + if info.Types == nil || info.Defs == nil || info.Uses == nil { + panic("one of info.Types, .Defs or .Uses is nil") + } // Find innermost enclosing FuncType or CallExpr. // @@ -821,7 +1022,7 @@ func funcUses(pkg *cache.Package, t types.Type) ([]protocol.Location, error) { if ftyp == nil { continue // missing type information } - if unify(t, ftyp) { + if unify(t, ftyp, nil) { loc, err := pgf.PosLocation(pos, end) if err != nil { return nil, err @@ -855,7 +1056,7 @@ func funcDefs(pkg *cache.Package, t types.Type) ([]protocol.Location, error) { if ftyp == nil { continue // missing type information } - if unify(t, ftyp) { + if unify(t, ftyp, nil) { pos := fn.Pos() loc, err := pgf.PosLocation(pos, pos+token.Pos(len("func"))) if err != nil { @@ -879,8 +1080,7 @@ func funcDefs(pkg *cache.Package, t types.Type) ([]protocol.Location, error) { // beneathFuncDef reports whether the specified FuncType cursor is a // child of Func{Decl,Lit}. func beneathFuncDef(cur cursor.Cursor) bool { - ek, _ := cur.Edge() - switch ek { + switch ek, _ := cur.ParentEdge(); ek { case edge.FuncDecl_Type, edge.FuncLit_Type: return true } @@ -892,29 +1092,10 @@ func beneathFuncDef(cur cursor.Cursor) bool { // // Tested via ../test/marker/testdata/implementation/signature.txt. func dynamicFuncCallType(info *types.Info, call *ast.CallExpr) types.Type { - fun := ast.Unparen(call.Fun) - tv := info.Types[fun] - - // Reject conversion, or call to built-in. - if !tv.IsValue() { - return nil - } - - // Reject call to named func/method. - if id, ok := fun.(*ast.Ident); ok && is[*types.Func](info.Uses[id]) { - return nil + if typesinternal.ClassifyCall(info, call) == typesinternal.CallDynamic { + return info.Types[call.Fun].Type.Underlying() } - - // Reject method selections (T.method() or x.method()) - if sel, ok := fun.(*ast.SelectorExpr); ok { - seln, ok := info.Selections[sel] - if !ok || seln.Kind() != types.FieldVal { - return nil - } - } - - // TODO(adonovan): consider x() where x : TypeParam. - return tv.Type.Underlying() // e.g. x() or x.field() + return nil } // inToken reports whether pos is within the token of diff --git a/gopls/internal/golang/implementation_test.go b/gopls/internal/golang/implementation_test.go new file mode 100644 index 00000000000..b7253bb8bf7 --- /dev/null +++ b/gopls/internal/golang/implementation_test.go @@ -0,0 +1,303 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +import ( + "go/types" + "maps" + "testing" + + "golang.org/x/tools/internal/testfiles" + "golang.org/x/tools/txtar" +) + +func TestUnify(t *testing.T) { + // Most cases from TestMatches in gopls/internal/util/fingerprint/fingerprint_test.go. + const src = ` +-- go.mod -- +module example.com +go 1.24 + +-- a/a.go -- +package a + +type Int = int +type String = string + +// Eq.Equal matches casefold.Equal. +type Eq[T any] interface { Equal(T, T) bool } +type casefold struct{} +func (casefold) Equal(x, y string) bool + +// A matches AString. +type A[T any] = struct { x T } +type AString = struct { x string } + +// B matches anything! +type B[T any] = T + +func C1[T any](int, T, ...string) T { panic(0) } +func C2[U any](int, int, ...U) bool { panic(0) } +func C3(int, bool, ...string) rune +func C4(int, bool, ...string) +func C5(int, float64, bool, string) bool +func C6(int, bool, ...string) bool + +func DAny[T any](Named[T]) { panic(0) } +func DString(Named[string]) +func DInt(Named[int]) + +type Named[T any] struct { x T } + +func E1(byte) rune +func E2(uint8) int32 +func E3(int8) uint32 + +// generic vs. generic +func F1[T any](T) { panic(0) } +func F2[T any](*T) { panic(0) } +func F3[T any](T, T) { panic(0) } +func F4[U any](U, *U) {panic(0) } +func F4a[U any](U, Named[U]) {panic(0) } +func F5[T, U any](T, U, U) { panic(0) } +func F6[T any](T, int, T) { panic(0) } +func F7[T any](bool, T, T) { panic(0) } +func F8[V any](*V, int, int) { panic(0) } +func F9[V any](V, *V, V) { panic(0) } +` + type tmap = map[*types.TypeParam]types.Type + + var ( + boolType = types.Typ[types.Bool] + intType = types.Typ[types.Int] + stringType = types.Typ[types.String] + ) + + pkg := testfiles.LoadPackages(t, txtar.Parse([]byte(src)), "./a")[0] + scope := pkg.Types.Scope() + + tparam := func(name string, index int) *types.TypeParam { + obj := scope.Lookup(name) + var tps *types.TypeParamList + switch obj := obj.(type) { + case *types.Func: + tps = obj.Signature().TypeParams() + case *types.TypeName: + if n, ok := obj.Type().(*types.Named); ok { + tps = n.TypeParams() + } else { + tps = obj.Type().(*types.Alias).TypeParams() + } + default: + t.Fatalf("unsupported object of type %T", obj) + } + return tps.At(index) + } + + for _, test := range []struct { + x, y string // the symbols in the above source code whose types to unify + method string // optional field or method + params tmap // initial values of type params + want bool // success or failure + wantParams tmap // expected output + }{ + { + // In Eq[T], T is bound to string. + x: "Eq", + y: "casefold", + method: "Equal", + want: true, + wantParams: tmap{tparam("Eq", 0): stringType}, + }, + { + // If we unify A[T] and A[string], T should be bound to string. + x: "A", + y: "AString", + want: true, + wantParams: tmap{tparam("A", 0): stringType}, + }, + {x: "A", y: "Eq", want: false}, // completely unrelated + { + x: "B", + y: "String", + want: true, + wantParams: tmap{tparam("B", 0): stringType}, + }, + { + x: "B", + y: "Int", + want: true, + wantParams: tmap{tparam("B", 0): intType}, + }, + { + x: "B", + y: "A", + want: true, + // B's T is bound to A's struct { x T } + wantParams: tmap{tparam("B", 0): scope.Lookup("A").Type().Underlying()}, + }, + { + // C1's U unifies with C6's bool. + x: "C1", + y: "C6", + wantParams: tmap{tparam("C1", 0): boolType}, + want: true, + }, + // C1 fails to unify with C2 because C1's T must be bound to both int and bool. + {x: "C1", y: "C2", want: false}, + // The remaining "C" cases fail for less interesting reasons, usually different numbers + // or types of parameters or results. + {x: "C1", y: "C3", want: false}, + {x: "C1", y: "C4", want: false}, + {x: "C1", y: "C5", want: false}, + {x: "C2", y: "C3", want: false}, + {x: "C2", y: "C4", want: false}, + {x: "C3", y: "C4", want: false}, + { + x: "DAny", + y: "DString", + want: true, + wantParams: tmap{tparam("DAny", 0): stringType}, + }, + {x: "DString", y: "DInt", want: false}, // different instantiations of Named + {x: "E1", y: "E2", want: true}, // byte and rune are just aliases + {x: "E2", y: "E3", want: false}, + + // The following tests cover all of the type param cases of unify. + { + // F1[*int] = F2[int], for example + // F1's T is bound to a pointer to F2's T. + x: "F1", + // F2's T is unbound: any instantiation works. + y: "F2", + want: true, + wantParams: tmap{tparam("F1", 0): types.NewPointer(tparam("F2", 0))}, + }, + {x: "F3", y: "F4", want: false}, // would require U identical to *U, prevented by occur check + {x: "F3", y: "F4a", want: false}, // occur check through Named[T] + { + x: "F5", + y: "F6", + want: true, + wantParams: tmap{ + tparam("F5", 0): intType, + tparam("F5", 1): intType, + tparam("F6", 0): intType, + }, + }, + {x: "F6", y: "F7", want: false}, // both are bound + { + x: "F5", + y: "F6", + params: tmap{tparam("F6", 0): intType}, // consistent with the result + want: true, + wantParams: tmap{ + tparam("F5", 0): intType, + tparam("F5", 1): intType, + tparam("F6", 0): intType, + }, + }, + { + x: "F5", + y: "F6", + params: tmap{tparam("F6", 0): boolType}, // not consistent + want: false, + }, + {x: "F6", y: "F7", want: false}, // both are bound + { + // T=*V, U=int, V=int + x: "F5", + y: "F8", + want: true, + wantParams: tmap{ + tparam("F5", 0): types.NewPointer(tparam("F8", 0)), + tparam("F5", 1): intType, + }, + }, + { + // T=*V, U=int, V=int + // Partial initial information is fine, as long as it's consistent. + x: "F5", + y: "F8", + want: true, + params: tmap{tparam("F5", 1): intType}, + wantParams: tmap{ + tparam("F5", 0): types.NewPointer(tparam("F8", 0)), + tparam("F5", 1): intType, + }, + }, + { + // T=*V, U=int, V=int + // Partial initial information is fine, as long as it's consistent. + x: "F5", + y: "F8", + want: true, + params: tmap{tparam("F5", 0): types.NewPointer(tparam("F8", 0))}, + wantParams: tmap{ + tparam("F5", 0): types.NewPointer(tparam("F8", 0)), + tparam("F5", 1): intType, + }, + }, + {x: "F5", y: "F9", want: false}, // T is unbound, V is bound, and T occurs in V + { + // T bound to Named[T'] + x: "F1", + y: "DAny", + want: true, + wantParams: tmap{ + tparam("F1", 0): scope.Lookup("DAny").(*types.Func).Signature().Params().At(0).Type()}, + }, + } { + + lookup := func(name string) types.Type { + obj := scope.Lookup(name) + if obj == nil { + t.Fatalf("Lookup %s failed", name) + } + if test.method != "" { + obj, _, _ = types.LookupFieldOrMethod(obj.Type(), true, pkg.Types, test.method) + if obj == nil { + t.Fatalf("Lookup %s.%s failed", name, test.method) + } + } + return obj.Type() + } + + check := func(a, b string, want, compareParams bool) { + t.Helper() + + ta := lookup(a) + tb := lookup(b) + + var gotParams tmap + if test.params == nil { + // Get the unifier even if there are no input params. + gotParams = tmap{} + } else { + gotParams = maps.Clone(test.params) + } + got := unify(ta, tb, gotParams) + if got != want { + t.Errorf("a=%s b=%s method=%s: unify returned %t for these inputs:\n- %s\n- %s", + a, b, test.method, got, ta, tb) + return + } + if !compareParams { + return + } + if !maps.EqualFunc(gotParams, test.wantParams, types.Identical) { + t.Errorf("x=%s y=%s method=%s: params: got %v, want %v", + a, b, test.method, gotParams, test.wantParams) + } + } + + check(test.x, test.y, test.want, true) + // unify is symmetric + check(test.y, test.x, test.want, true) + // unify is reflexive + check(test.x, test.x, true, false) + check(test.y, test.y, true, false) + } +} diff --git a/gopls/internal/golang/stubmethods/stubmethods.go b/gopls/internal/golang/stubmethods/stubmethods.go index a060993b1ab..43842264d70 100644 --- a/gopls/internal/golang/stubmethods/stubmethods.go +++ b/gopls/internal/golang/stubmethods/stubmethods.go @@ -54,7 +54,7 @@ type IfaceStubInfo struct { func GetIfaceStubInfo(fset *token.FileSet, info *types.Info, pgf *parsego.File, pos, end token.Pos) *IfaceStubInfo { // TODO(adonovan): simplify, using Cursor: // curErr, _ := pgf.Cursor.FindPos(pos, end) - // for cur := range curErr.Ancestors() { + // for cur := range curErr.Enclosing() { // switch n := cur.Node().(type) {... path, _ := astutil.PathEnclosingInterval(pgf.File, pos, end) for _, n := range path { diff --git a/gopls/internal/golang/util.go b/gopls/internal/golang/util.go index a81ff3fbe58..b13056e02b9 100644 --- a/gopls/internal/golang/util.go +++ b/gopls/internal/golang/util.go @@ -361,9 +361,9 @@ func AbbreviateVarName(s string) string { return b.String() } -// copyrightComment returns the copyright comment group from the input file, or +// CopyrightComment returns the copyright comment group from the input file, or // nil if not found. -func copyrightComment(file *ast.File) *ast.CommentGroup { +func CopyrightComment(file *ast.File) *ast.CommentGroup { if len(file.Comments) == 0 { return nil } diff --git a/gopls/internal/golang/workspace_symbol.go b/gopls/internal/golang/workspace_symbol.go index 89c144b9230..91c5ee22925 100644 --- a/gopls/internal/golang/workspace_symbol.go +++ b/gopls/internal/golang/workspace_symbol.go @@ -300,8 +300,7 @@ func collectSymbols(ctx context.Context, snapshots []*cache.Snapshot, matcherTyp // whether a URI is in any open workspace. folderURI := snapshot.Folder() - filters := snapshot.Options().DirectoryFilters - filterer := cache.NewFilterer(filters) + pathIncluded := cache.PathIncludeFunc(snapshot.Options().DirectoryFilters) folder := filepath.ToSlash(folderURI.Path()) var ( @@ -371,7 +370,7 @@ func collectSymbols(ctx context.Context, snapshots []*cache.Snapshot, matcherTyp uri := sp.Files[i] norm := filepath.ToSlash(uri.Path()) nm := strings.TrimPrefix(norm, folder) - if filterer.Disallow(nm) { + if !pathIncluded(nm) { continue } // Only scan each file once. diff --git a/gopls/internal/golang/workspace_symbol_test.go b/gopls/internal/golang/workspace_symbol_test.go index 4982b767754..fbfec8e1204 100644 --- a/gopls/internal/golang/workspace_symbol_test.go +++ b/gopls/internal/golang/workspace_symbol_test.go @@ -47,7 +47,7 @@ func TestParseQuery(t *testing.T) { } } -func TestFiltererDisallow(t *testing.T) { +func TestPathIncludeFunc(t *testing.T) { tests := []struct { filters []string included []string @@ -119,18 +119,24 @@ func TestFiltererDisallow(t *testing.T) { []string{"a/b/c.go", "bb"}, []string{"b/c/d.go", "b"}, }, + // golang/vscode-go#3692 + { + []string{"-**/foo", "+**/bar"}, + []string{"bar/a.go", "a/bar/b.go"}, + []string{"foo/a.go", "a/foo/b.go"}, + }, } for _, test := range tests { - filterer := cache.NewFilterer(test.filters) + pathIncluded := cache.PathIncludeFunc(test.filters) for _, inc := range test.included { - if filterer.Disallow(inc) { + if !pathIncluded(inc) { t.Errorf("Filters %v excluded %v, wanted included", test.filters, inc) } } for _, exc := range test.excluded { - if !filterer.Disallow(exc) { + if pathIncluded(exc) { t.Errorf("Filters %v included %v, wanted excluded", test.filters, exc) } } diff --git a/gopls/internal/lsprpc/lsprpc.go b/gopls/internal/lsprpc/lsprpc.go index 9255f9176bc..3d26bdd6896 100644 --- a/gopls/internal/lsprpc/lsprpc.go +++ b/gopls/internal/lsprpc/lsprpc.go @@ -392,7 +392,7 @@ func (f *forwarder) replyWithDebugAddress(outerCtx context.Context, r jsonrpc2.R addr, err = di.Serve(outerCtx, addr) if err != nil { event.Error(outerCtx, "starting debug server", err) - return r(ctx, result, outerErr) + return r(ctx, result, err) } urls := []string{"http://" + addr} modified.URLs = append(urls, modified.URLs...) diff --git a/gopls/internal/mod/diagnostics.go b/gopls/internal/mod/diagnostics.go index a89c148d7a7..8ad1ece05e7 100644 --- a/gopls/internal/mod/diagnostics.go +++ b/gopls/internal/mod/diagnostics.go @@ -34,7 +34,7 @@ func ParseDiagnostics(ctx context.Context, snapshot *cache.Snapshot) (map[protoc return collectDiagnostics(ctx, snapshot, parseDiagnostics) } -// Diagnostics returns diagnostics from running go mod tidy. +// TidyDiagnostics returns diagnostics from running go mod tidy. func TidyDiagnostics(ctx context.Context, snapshot *cache.Snapshot) (map[protocol.DocumentURI][]*cache.Diagnostic, error) { ctx, done := event.Start(ctx, "mod.Diagnostics", snapshot.Labels()...) defer done() diff --git a/gopls/internal/server/call_hierarchy.go b/gopls/internal/server/call_hierarchy.go index 671d4f8c81c..758a4628948 100644 --- a/gopls/internal/server/call_hierarchy.go +++ b/gopls/internal/server/call_hierarchy.go @@ -14,7 +14,7 @@ import ( ) func (s *server) PrepareCallHierarchy(ctx context.Context, params *protocol.CallHierarchyPrepareParams) ([]protocol.CallHierarchyItem, error) { - ctx, done := event.Start(ctx, "lsp.Server.prepareCallHierarchy") + ctx, done := event.Start(ctx, "server.PrepareCallHierarchy") defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) @@ -29,7 +29,7 @@ func (s *server) PrepareCallHierarchy(ctx context.Context, params *protocol.Call } func (s *server) IncomingCalls(ctx context.Context, params *protocol.CallHierarchyIncomingCallsParams) ([]protocol.CallHierarchyIncomingCall, error) { - ctx, done := event.Start(ctx, "lsp.Server.incomingCalls") + ctx, done := event.Start(ctx, "server.IncomingCalls") defer done() fh, snapshot, release, err := s.fileOf(ctx, params.Item.URI) @@ -44,7 +44,7 @@ func (s *server) IncomingCalls(ctx context.Context, params *protocol.CallHierarc } func (s *server) OutgoingCalls(ctx context.Context, params *protocol.CallHierarchyOutgoingCallsParams) ([]protocol.CallHierarchyOutgoingCall, error) { - ctx, done := event.Start(ctx, "lsp.Server.outgoingCalls") + ctx, done := event.Start(ctx, "server.OutgoingCalls") defer done() fh, snapshot, release, err := s.fileOf(ctx, params.Item.URI) diff --git a/gopls/internal/server/code_action.go b/gopls/internal/server/code_action.go index c36e7c33f94..4617fad5de7 100644 --- a/gopls/internal/server/code_action.go +++ b/gopls/internal/server/code_action.go @@ -22,7 +22,7 @@ import ( ) func (s *server) CodeAction(ctx context.Context, params *protocol.CodeActionParams) ([]protocol.CodeAction, error) { - ctx, done := event.Start(ctx, "lsp.Server.codeAction") + ctx, done := event.Start(ctx, "server.CodeAction") defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) @@ -225,7 +225,7 @@ func triggerKind(params *protocol.CodeActionParams) protocol.CodeActionTriggerKi // This feature allows capable clients to preview and selectively apply the diff // instead of applying the whole thing unconditionally through workspace/applyEdit. func (s *server) ResolveCodeAction(ctx context.Context, ca *protocol.CodeAction) (*protocol.CodeAction, error) { - ctx, done := event.Start(ctx, "lsp.Server.resolveCodeAction") + ctx, done := event.Start(ctx, "server.ResolveCodeAction") defer done() // Only resolve the code action if there is Data provided. diff --git a/gopls/internal/server/code_lens.go b/gopls/internal/server/code_lens.go index 67b359e866c..2509452f0b5 100644 --- a/gopls/internal/server/code_lens.go +++ b/gopls/internal/server/code_lens.go @@ -22,7 +22,7 @@ import ( // CodeLens reports the set of available CodeLenses // (range-associated commands) in the given file. func (s *server) CodeLens(ctx context.Context, params *protocol.CodeLensParams) ([]protocol.CodeLens, error) { - ctx, done := event.Start(ctx, "lsp.Server.codeLens", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.CodeLens", label.URI.Of(params.TextDocument.URI)) defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) diff --git a/gopls/internal/server/command.go b/gopls/internal/server/command.go index 0142de532c3..ca8177530e5 100644 --- a/gopls/internal/server/command.go +++ b/gopls/internal/server/command.go @@ -47,7 +47,7 @@ import ( ) func (s *server) ExecuteCommand(ctx context.Context, params *protocol.ExecuteCommandParams) (any, error) { - ctx, done := event.Start(ctx, "lsp.Server.executeCommand") + ctx, done := event.Start(ctx, "server.ExecuteCommand") defer done() // For test synchronization, always create a progress notification. @@ -1652,7 +1652,7 @@ func (c *commandHandler) DiagnoseFiles(ctx context.Context, args command.Diagnos // Though note that implementing pull diagnostics may cause some servers to // request diagnostics in an ad-hoc manner, and break our intentional pacing. - ctx, done := event.Start(ctx, "lsp.server.DiagnoseFiles") + ctx, done := event.Start(ctx, "commandHandler.DiagnoseFiles") defer done() snapshots := make(map[*cache.Snapshot]bool) diff --git a/gopls/internal/server/completion.go b/gopls/internal/server/completion.go index 6c185e93717..02604b2f710 100644 --- a/gopls/internal/server/completion.go +++ b/gopls/internal/server/completion.go @@ -27,7 +27,7 @@ func (s *server) Completion(ctx context.Context, params *protocol.CompletionPara recordLatency(ctx, rerr) }() - ctx, done := event.Start(ctx, "lsp.Server.completion", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.Completion", label.URI.Of(params.TextDocument.URI)) defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) @@ -102,6 +102,8 @@ func (s *server) saveLastCompletion(uri protocol.DocumentURI, version int32, ite s.efficacyItems = items } +// toProtocolCompletionItems converts the candidates to the protocol completion items, +// the candidates must be sorted based on score as it will be respected by client side. func toProtocolCompletionItems(candidates []completion.CompletionItem, surrounding *completion.Selection, options *settings.Options) ([]protocol.CompletionItem, error) { replaceRng, err := surrounding.Range() if err != nil { diff --git a/gopls/internal/server/definition.go b/gopls/internal/server/definition.go index 5a9c020cfc5..8b9d42413be 100644 --- a/gopls/internal/server/definition.go +++ b/gopls/internal/server/definition.go @@ -24,7 +24,7 @@ func (s *server) Definition(ctx context.Context, params *protocol.DefinitionPara recordLatency(ctx, rerr) }() - ctx, done := event.Start(ctx, "lsp.Server.definition", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.Definition", label.URI.Of(params.TextDocument.URI)) defer done() // TODO(rfindley): definition requests should be multiplexed across all views. @@ -46,7 +46,7 @@ func (s *server) Definition(ctx context.Context, params *protocol.DefinitionPara } func (s *server) TypeDefinition(ctx context.Context, params *protocol.TypeDefinitionParams) ([]protocol.Location, error) { - ctx, done := event.Start(ctx, "lsp.Server.typeDefinition", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.TypeDefinition", label.URI.Of(params.TextDocument.URI)) defer done() // TODO(rfindley): type definition requests should be multiplexed across all views. diff --git a/gopls/internal/server/diagnostics.go b/gopls/internal/server/diagnostics.go index b4e764b1233..92ca54e226a 100644 --- a/gopls/internal/server/diagnostics.go +++ b/gopls/internal/server/diagnostics.go @@ -200,7 +200,7 @@ func (s *server) diagnoseChangedViews(ctx context.Context, modID uint64, lastCha // snapshot (or a subsequent snapshot in the same View) is eventually // diagnosed. func (s *server) diagnoseSnapshot(ctx context.Context, snapshot *cache.Snapshot, changedURIs []protocol.DocumentURI, delay time.Duration) { - ctx, done := event.Start(ctx, "Server.diagnoseSnapshot", snapshot.Labels()...) + ctx, done := event.Start(ctx, "server.diagnoseSnapshot", snapshot.Labels()...) defer done() if delay > 0 { @@ -241,7 +241,7 @@ func (s *server) diagnoseSnapshot(ctx context.Context, snapshot *cache.Snapshot, } func (s *server) diagnoseChangedFiles(ctx context.Context, snapshot *cache.Snapshot, uris []protocol.DocumentURI) (diagMap, error) { - ctx, done := event.Start(ctx, "Server.diagnoseChangedFiles", snapshot.Labels()...) + ctx, done := event.Start(ctx, "server.diagnoseChangedFiles", snapshot.Labels()...) defer done() toDiagnose := make(map[metadata.PackageID]*metadata.Package) @@ -311,7 +311,7 @@ func (s *server) diagnoseChangedFiles(ctx context.Context, snapshot *cache.Snaps } func (s *server) diagnose(ctx context.Context, snapshot *cache.Snapshot) (diagMap, error) { - ctx, done := event.Start(ctx, "Server.diagnose", snapshot.Labels()...) + ctx, done := event.Start(ctx, "server.diagnose", snapshot.Labels()...) defer done() // Wait for a free diagnostics slot. @@ -640,7 +640,7 @@ func (s *server) updateCriticalErrorStatus(ctx context.Context, snapshot *cache. // updateDiagnostics records the result of diagnosing a snapshot, and publishes // any diagnostics that need to be updated on the client. func (s *server) updateDiagnostics(ctx context.Context, snapshot *cache.Snapshot, diagnostics diagMap, final bool) { - ctx, done := event.Start(ctx, "Server.publishDiagnostics") + ctx, done := event.Start(ctx, "server.publishDiagnostics") defer done() s.diagnosticsMu.Lock() diff --git a/gopls/internal/server/folding_range.go b/gopls/internal/server/folding_range.go index b05d5302f10..5dbfd697db4 100644 --- a/gopls/internal/server/folding_range.go +++ b/gopls/internal/server/folding_range.go @@ -15,7 +15,7 @@ import ( ) func (s *server) FoldingRange(ctx context.Context, params *protocol.FoldingRangeParams) ([]protocol.FoldingRange, error) { - ctx, done := event.Start(ctx, "lsp.Server.foldingRange", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.FoldingRange", label.URI.Of(params.TextDocument.URI)) defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) diff --git a/gopls/internal/server/format.go b/gopls/internal/server/format.go index 1e6344dcff4..6abbb96d5b6 100644 --- a/gopls/internal/server/format.go +++ b/gopls/internal/server/format.go @@ -17,7 +17,7 @@ import ( ) func (s *server) Formatting(ctx context.Context, params *protocol.DocumentFormattingParams) ([]protocol.TextEdit, error) { - ctx, done := event.Start(ctx, "lsp.Server.formatting", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.Formatting", label.URI.Of(params.TextDocument.URI)) defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) diff --git a/gopls/internal/server/general.go b/gopls/internal/server/general.go index b7b69931103..5e02b832747 100644 --- a/gopls/internal/server/general.go +++ b/gopls/internal/server/general.go @@ -38,7 +38,7 @@ import ( ) func (s *server) Initialize(ctx context.Context, params *protocol.ParamInitialize) (*protocol.InitializeResult, error) { - ctx, done := event.Start(ctx, "lsp.Server.initialize") + ctx, done := event.Start(ctx, "server.Initialize") defer done() var clientName string @@ -189,6 +189,15 @@ func (s *server) Initialize(ctx context.Context, params *protocol.ParamInitializ Supported: true, ChangeNotifications: "workspace/didChangeWorkspaceFolders", }, + FileOperations: &protocol.FileOperationOptions{ + DidCreate: &protocol.FileOperationRegistrationOptions{ + Filters: []protocol.FileOperationFilter{{ + Scheme: "file", + // gopls is only interested with files in .go extension. + Pattern: protocol.FileOperationPattern{Glob: "**/*.go"}, + }}, + }, + }, }, }, ServerInfo: &protocol.ServerInfo{ @@ -199,7 +208,7 @@ func (s *server) Initialize(ctx context.Context, params *protocol.ParamInitializ } func (s *server) Initialized(ctx context.Context, params *protocol.InitializedParams) error { - ctx, done := event.Start(ctx, "lsp.Server.initialized") + ctx, done := event.Start(ctx, "server.Initialized") defer done() s.stateMu.Lock() @@ -626,7 +635,7 @@ func (s *server) fileOf(ctx context.Context, uri protocol.DocumentURI) (file.Han // Shutdown implements the 'shutdown' LSP handler. It releases resources // associated with the server and waits for all ongoing work to complete. func (s *server) Shutdown(ctx context.Context) error { - ctx, done := event.Start(ctx, "lsp.Server.shutdown") + ctx, done := event.Start(ctx, "server.Shutdown") defer done() s.stateMu.Lock() @@ -653,7 +662,7 @@ func (s *server) Shutdown(ctx context.Context) error { } func (s *server) Exit(ctx context.Context) error { - ctx, done := event.Start(ctx, "lsp.Server.exit") + ctx, done := event.Start(ctx, "server.Exit") defer done() s.stateMu.Lock() diff --git a/gopls/internal/server/highlight.go b/gopls/internal/server/highlight.go index 35ffc2db2f5..04ebbfa25ec 100644 --- a/gopls/internal/server/highlight.go +++ b/gopls/internal/server/highlight.go @@ -16,7 +16,7 @@ import ( ) func (s *server) DocumentHighlight(ctx context.Context, params *protocol.DocumentHighlightParams) ([]protocol.DocumentHighlight, error) { - ctx, done := event.Start(ctx, "lsp.Server.documentHighlight", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.DocumentHighlight", label.URI.Of(params.TextDocument.URI)) defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) diff --git a/gopls/internal/server/hover.go b/gopls/internal/server/hover.go index 80c35c09565..ed70ce493ba 100644 --- a/gopls/internal/server/hover.go +++ b/gopls/internal/server/hover.go @@ -25,7 +25,7 @@ func (s *server) Hover(ctx context.Context, params *protocol.HoverParams) (_ *pr recordLatency(ctx, rerr) }() - ctx, done := event.Start(ctx, "lsp.Server.hover", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.Hover", label.URI.Of(params.TextDocument.URI)) defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) diff --git a/gopls/internal/server/implementation.go b/gopls/internal/server/implementation.go index 9e61ebc4d88..9b2c103b2c3 100644 --- a/gopls/internal/server/implementation.go +++ b/gopls/internal/server/implementation.go @@ -21,7 +21,7 @@ func (s *server) Implementation(ctx context.Context, params *protocol.Implementa recordLatency(ctx, rerr) }() - ctx, done := event.Start(ctx, "lsp.Server.implementation", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.Implementation", label.URI.Of(params.TextDocument.URI)) defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) diff --git a/gopls/internal/server/inlay_hint.go b/gopls/internal/server/inlay_hint.go index fca8bcbc1c8..a11ab4c313a 100644 --- a/gopls/internal/server/inlay_hint.go +++ b/gopls/internal/server/inlay_hint.go @@ -16,7 +16,7 @@ import ( ) func (s *server) InlayHint(ctx context.Context, params *protocol.InlayHintParams) ([]protocol.InlayHint, error) { - ctx, done := event.Start(ctx, "lsp.Server.inlayHint", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.InlayHint", label.URI.Of(params.TextDocument.URI)) defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) diff --git a/gopls/internal/server/link.go b/gopls/internal/server/link.go index c888904baab..cf475ca90c9 100644 --- a/gopls/internal/server/link.go +++ b/gopls/internal/server/link.go @@ -29,7 +29,7 @@ import ( ) func (s *server) DocumentLink(ctx context.Context, params *protocol.DocumentLinkParams) (links []protocol.DocumentLink, err error) { - ctx, done := event.Start(ctx, "lsp.Server.documentLink") + ctx, done := event.Start(ctx, "server.DocumentLink") defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) @@ -211,7 +211,7 @@ var acceptedSchemes = map[string]bool{ "https": true, } -// urlRegexp is the user-supplied regular expression to match URL. +// findLinksInString is the user-supplied regular expression to match URL. // srcOffset is the start offset of 'src' within m's file. func findLinksInString(urlRegexp *regexp.Regexp, src string, srcOffset int, m *protocol.Mapper) ([]protocol.DocumentLink, error) { var links []protocol.DocumentLink diff --git a/gopls/internal/server/references.go b/gopls/internal/server/references.go index f5019693946..8a01e96498b 100644 --- a/gopls/internal/server/references.go +++ b/gopls/internal/server/references.go @@ -22,7 +22,7 @@ func (s *server) References(ctx context.Context, params *protocol.ReferenceParam recordLatency(ctx, rerr) }() - ctx, done := event.Start(ctx, "lsp.Server.references", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.References", label.URI.Of(params.TextDocument.URI)) defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) diff --git a/gopls/internal/server/rename.go b/gopls/internal/server/rename.go index b6fac8ba219..218740bd679 100644 --- a/gopls/internal/server/rename.go +++ b/gopls/internal/server/rename.go @@ -17,7 +17,7 @@ import ( ) func (s *server) Rename(ctx context.Context, params *protocol.RenameParams) (*protocol.WorkspaceEdit, error) { - ctx, done := event.Start(ctx, "lsp.Server.rename", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.Rename", label.URI.Of(params.TextDocument.URI)) defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) @@ -68,7 +68,7 @@ func (s *server) Rename(ctx context.Context, params *protocol.RenameParams) (*pr // TODO(rfindley): why wouldn't we want to show an error to the user, if the // user initiated a rename request at the cursor? func (s *server) PrepareRename(ctx context.Context, params *protocol.PrepareRenameParams) (*protocol.PrepareRenamePlaceholder, error) { - ctx, done := event.Start(ctx, "lsp.Server.prepareRename", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.PrepareRename", label.URI.Of(params.TextDocument.URI)) defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) diff --git a/gopls/internal/server/selection_range.go b/gopls/internal/server/selection_range.go index 484e1cf67ab..afc878b1544 100644 --- a/gopls/internal/server/selection_range.go +++ b/gopls/internal/server/selection_range.go @@ -27,7 +27,7 @@ import ( // returned for each cursor to avoid multiple round-trips when the user is // likely to issue this command multiple times in quick succession. func (s *server) SelectionRange(ctx context.Context, params *protocol.SelectionRangeParams) ([]protocol.SelectionRange, error) { - ctx, done := event.Start(ctx, "lsp.Server.selectionRange") + ctx, done := event.Start(ctx, "server.SelectionRange") defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) diff --git a/gopls/internal/server/semantic.go b/gopls/internal/server/semantic.go index f746593a3dd..f0a2e11dd98 100644 --- a/gopls/internal/server/semantic.go +++ b/gopls/internal/server/semantic.go @@ -24,7 +24,7 @@ func (s *server) SemanticTokensRange(ctx context.Context, params *protocol.Seman } func (s *server) semanticTokens(ctx context.Context, td protocol.TextDocumentIdentifier, rng *protocol.Range) (*protocol.SemanticTokens, error) { - ctx, done := event.Start(ctx, "lsp.Server.semanticTokens", label.URI.Of(td.URI)) + ctx, done := event.Start(ctx, "server.semanticTokens", label.URI.Of(td.URI)) defer done() fh, snapshot, release, err := s.fileOf(ctx, td.URI) diff --git a/gopls/internal/server/server.go b/gopls/internal/server/server.go index 033295ffb32..c22e8f19750 100644 --- a/gopls/internal/server/server.go +++ b/gopls/internal/server/server.go @@ -181,7 +181,7 @@ type server struct { } func (s *server) WorkDoneProgressCancel(ctx context.Context, params *protocol.WorkDoneProgressCancelParams) error { - ctx, done := event.Start(ctx, "lsp.Server.workDoneProgressCancel") + ctx, done := event.Start(ctx, "server.WorkDoneProgressCancel") defer done() return s.progress.Cancel(params.Token) diff --git a/gopls/internal/server/signature_help.go b/gopls/internal/server/signature_help.go index addcfe1e262..eb464c48e27 100644 --- a/gopls/internal/server/signature_help.go +++ b/gopls/internal/server/signature_help.go @@ -15,7 +15,7 @@ import ( ) func (s *server) SignatureHelp(ctx context.Context, params *protocol.SignatureHelpParams) (*protocol.SignatureHelp, error) { - ctx, done := event.Start(ctx, "lsp.Server.signatureHelp", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.SignatureHelp", label.URI.Of(params.TextDocument.URI)) defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) diff --git a/gopls/internal/server/symbols.go b/gopls/internal/server/symbols.go index e35b2c75451..40df7369f51 100644 --- a/gopls/internal/server/symbols.go +++ b/gopls/internal/server/symbols.go @@ -16,7 +16,7 @@ import ( ) func (s *server) DocumentSymbol(ctx context.Context, params *protocol.DocumentSymbolParams) ([]any, error) { - ctx, done := event.Start(ctx, "lsp.Server.documentSymbol", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.DocumentSymbol", label.URI.Of(params.TextDocument.URI)) defer done() fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) diff --git a/gopls/internal/server/text_synchronization.go b/gopls/internal/server/text_synchronization.go index ad1266d783e..ad8554d9302 100644 --- a/gopls/internal/server/text_synchronization.go +++ b/gopls/internal/server/text_synchronization.go @@ -92,7 +92,7 @@ func (m ModificationSource) String() string { } func (s *server) DidOpen(ctx context.Context, params *protocol.DidOpenTextDocumentParams) error { - ctx, done := event.Start(ctx, "lsp.Server.didOpen", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.DidOpen", label.URI.Of(params.TextDocument.URI)) defer done() uri := params.TextDocument.URI @@ -121,7 +121,7 @@ func (s *server) DidOpen(ctx context.Context, params *protocol.DidOpenTextDocume } func (s *server) DidChange(ctx context.Context, params *protocol.DidChangeTextDocumentParams) error { - ctx, done := event.Start(ctx, "lsp.Server.didChange", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.DidChange", label.URI.Of(params.TextDocument.URI)) defer done() uri := params.TextDocument.URI @@ -174,7 +174,7 @@ func (s *server) warnAboutModifyingGeneratedFiles(ctx context.Context, uri proto } func (s *server) DidChangeWatchedFiles(ctx context.Context, params *protocol.DidChangeWatchedFilesParams) error { - ctx, done := event.Start(ctx, "lsp.Server.didChangeWatchedFiles") + ctx, done := event.Start(ctx, "server.DidChangeWatchedFiles") defer done() var modifications []file.Modification @@ -190,7 +190,7 @@ func (s *server) DidChangeWatchedFiles(ctx context.Context, params *protocol.Did } func (s *server) DidSave(ctx context.Context, params *protocol.DidSaveTextDocumentParams) error { - ctx, done := event.Start(ctx, "lsp.Server.didSave", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.DidSave", label.URI.Of(params.TextDocument.URI)) defer done() c := file.Modification{ @@ -204,7 +204,7 @@ func (s *server) DidSave(ctx context.Context, params *protocol.DidSaveTextDocume } func (s *server) DidClose(ctx context.Context, params *protocol.DidCloseTextDocumentParams) error { - ctx, done := event.Start(ctx, "lsp.Server.didClose", label.URI.Of(params.TextDocument.URI)) + ctx, done := event.Start(ctx, "server.DidClose", label.URI.Of(params.TextDocument.URI)) defer done() return s.didModifyFiles(ctx, []file.Modification{ diff --git a/gopls/internal/server/unimplemented.go b/gopls/internal/server/unimplemented.go index 7375dc4bb1b..d3bb07cb647 100644 --- a/gopls/internal/server/unimplemented.go +++ b/gopls/internal/server/unimplemented.go @@ -34,10 +34,6 @@ func (s *server) DidCloseNotebookDocument(context.Context, *protocol.DidCloseNot return notImplemented("DidCloseNotebookDocument") } -func (s *server) DidCreateFiles(context.Context, *protocol.CreateFilesParams) error { - return notImplemented("DidCreateFiles") -} - func (s *server) DidDeleteFiles(context.Context, *protocol.DeleteFilesParams) error { return notImplemented("DidDeleteFiles") } diff --git a/gopls/internal/server/workspace.go b/gopls/internal/server/workspace.go index 84e663c1049..ced5656c6ac 100644 --- a/gopls/internal/server/workspace.go +++ b/gopls/internal/server/workspace.go @@ -12,6 +12,8 @@ import ( "sync" "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/file" + "golang.org/x/tools/gopls/internal/golang/completion" "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/gopls/internal/settings" "golang.org/x/tools/internal/event" @@ -59,7 +61,7 @@ func (s *server) addView(ctx context.Context, name string, dir protocol.Document } func (s *server) DidChangeConfiguration(ctx context.Context, _ *protocol.DidChangeConfigurationParams) error { - ctx, done := event.Start(ctx, "lsp.Server.didChangeConfiguration") + ctx, done := event.Start(ctx, "server.DidChangeConfiguration") defer done() var wg sync.WaitGroup @@ -139,3 +141,31 @@ func (s *server) DidChangeConfiguration(ctx context.Context, _ *protocol.DidChan return nil } + +func (s *server) DidCreateFiles(ctx context.Context, params *protocol.CreateFilesParams) error { + ctx, done := event.Start(ctx, "server.DidCreateFiles") + defer done() + + var allChanges []protocol.DocumentChange + for _, createdFile := range params.Files { + uri := protocol.DocumentURI(createdFile.URI) + fh, snapshot, release, err := s.fileOf(ctx, uri) + if err != nil { + event.Error(ctx, "fail to call fileOf", err) + continue + } + defer release() + + switch snapshot.FileKind(fh) { + case file.Go: + change, err := completion.NewFile(ctx, snapshot, fh) + if err != nil { + continue + } + allChanges = append(allChanges, *change) + default: + } + } + + return applyChanges(ctx, s.client, allChanges) +} diff --git a/gopls/internal/server/workspace_symbol.go b/gopls/internal/server/workspace_symbol.go index 9eafeb015ad..f34e76f7937 100644 --- a/gopls/internal/server/workspace_symbol.go +++ b/gopls/internal/server/workspace_symbol.go @@ -20,7 +20,7 @@ func (s *server) Symbol(ctx context.Context, params *protocol.WorkspaceSymbolPar recordLatency(ctx, rerr) }() - ctx, done := event.Start(ctx, "lsp.Server.symbol") + ctx, done := event.Start(ctx, "server.Symbol") defer done() views := s.session.Views() diff --git a/gopls/internal/settings/analysis.go b/gopls/internal/settings/analysis.go index 5ba8bdd06b0..e914407fe6b 100644 --- a/gopls/internal/settings/analysis.go +++ b/gopls/internal/settings/analysis.go @@ -49,7 +49,6 @@ import ( "golang.org/x/tools/gopls/internal/analysis/deprecated" "golang.org/x/tools/gopls/internal/analysis/embeddirective" "golang.org/x/tools/gopls/internal/analysis/fillreturns" - "golang.org/x/tools/gopls/internal/analysis/gofix" "golang.org/x/tools/gopls/internal/analysis/hostport" "golang.org/x/tools/gopls/internal/analysis/infertypeargs" "golang.org/x/tools/gopls/internal/analysis/modernize" @@ -63,6 +62,7 @@ import ( "golang.org/x/tools/gopls/internal/analysis/unusedvariable" "golang.org/x/tools/gopls/internal/analysis/yield" "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/internal/gofix" ) // Analyzer augments a [analysis.Analyzer] with additional LSP configuration. diff --git a/gopls/internal/settings/codeactionkind.go b/gopls/internal/settings/codeactionkind.go index 09d9d419567..f6f8a4df2a4 100644 --- a/gopls/internal/settings/codeactionkind.go +++ b/gopls/internal/settings/codeactionkind.go @@ -81,6 +81,7 @@ const ( GoTest protocol.CodeActionKind = "source.test" GoToggleCompilerOptDetails protocol.CodeActionKind = "source.toggleCompilerOptDetails" AddTest protocol.CodeActionKind = "source.addTest" + OrganizeImports protocol.CodeActionKind = "source.organizeImports" // gopls GoplsDocFeatures protocol.CodeActionKind = "gopls.doc.features" diff --git a/gopls/internal/settings/settings.go b/gopls/internal/settings/settings.go index 59b2aa1b87f..a47a69b0296 100644 --- a/gopls/internal/settings/settings.go +++ b/gopls/internal/settings/settings.go @@ -1387,7 +1387,7 @@ func (o *Options) EnabledSemanticTokenModifiers() map[semtok.Modifier]bool { return copy } -// EncodeSemanticTokenTypes returns a map of types to boolean. +// EnabledSemanticTokenTypes returns a map of types to boolean. func (o *Options) EnabledSemanticTokenTypes() map[semtok.Type]bool { copy := make(map[semtok.Type]bool, len(o.SemanticTokenTypes)) for k, v := range o.SemanticTokenTypes { diff --git a/gopls/internal/telemetry/cmd/stacks/stacks.go b/gopls/internal/telemetry/cmd/stacks/stacks.go index 36a675d0eb0..f8caabd67e6 100644 --- a/gopls/internal/telemetry/cmd/stacks/stacks.go +++ b/gopls/internal/telemetry/cmd/stacks/stacks.go @@ -529,7 +529,7 @@ func parsePredicate(s string) (func(string) bool, error) { }, nil } -// claimStack maps each stack ID to its issue (if any). +// claimStacks maps each stack ID to its issue (if any). // // It returns a map of stack text to the issue that claimed it. // diff --git a/gopls/internal/telemetry/telemetry_test.go b/gopls/internal/telemetry/telemetry_test.go index 4c41cc40dc9..1e56012182f 100644 --- a/gopls/internal/telemetry/telemetry_test.go +++ b/gopls/internal/telemetry/telemetry_test.go @@ -168,7 +168,7 @@ func addForwardedCounters(env *Env, names []string, values []int64) { Names: names, Values: values, }) if err != nil { - env.T.Fatal(err) + env.TB.Fatal(err) } var res error env.ExecuteCommand(&protocol.ExecuteCommandParams{ @@ -176,7 +176,7 @@ func addForwardedCounters(env *Env, names []string, values []int64) { Arguments: args, }, &res) if res != nil { - env.T.Errorf("%v failed - %v", command.AddTelemetryCounters, res) + env.TB.Errorf("%v failed - %v", command.AddTelemetryCounters, res) } } diff --git a/gopls/internal/test/integration/bench/completion_test.go b/gopls/internal/test/integration/bench/completion_test.go index d84512d1f8f..48ecf0cefd6 100644 --- a/gopls/internal/test/integration/bench/completion_test.go +++ b/gopls/internal/test/integration/bench/completion_test.go @@ -69,7 +69,7 @@ func endRangeInBuffer(env *Env, name string) protocol.Range { m := protocol.NewMapper("", []byte(buffer)) rng, err := m.OffsetRange(len(buffer), len(buffer)) if err != nil { - env.T.Fatal(err) + env.TB.Fatal(err) } return rng } diff --git a/gopls/internal/test/integration/bench/repo_test.go b/gopls/internal/test/integration/bench/repo_test.go index 50370e73491..65728c00552 100644 --- a/gopls/internal/test/integration/bench/repo_test.go +++ b/gopls/internal/test/integration/bench/repo_test.go @@ -211,7 +211,7 @@ func (r *repo) sharedEnv(tb testing.TB) *Env { }) return &Env{ - T: tb, + TB: tb, Ctx: context.Background(), Editor: r.editor, Sandbox: r.sandbox, @@ -238,7 +238,7 @@ func (r *repo) newEnv(tb testing.TB, config fake.EditorConfig, forOperation stri } return &Env{ - T: tb, + TB: tb, Ctx: context.Background(), Editor: editor, Sandbox: sandbox, diff --git a/gopls/internal/test/integration/completion/completion_test.go b/gopls/internal/test/integration/completion/completion_test.go index 0713b1f62b9..8fa03908c01 100644 --- a/gopls/internal/test/integration/completion/completion_test.go +++ b/gopls/internal/test/integration/completion/completion_test.go @@ -53,6 +53,10 @@ func TestPackageCompletion(t *testing.T) { module mod.com go 1.12 +-- cmd/main.go -- +package main +-- cmd/testfile.go -- +package -- fruits/apple.go -- package apple @@ -95,6 +99,13 @@ package want []string editRegexp string }{ + { + name: "main package completion after package keyword", + filename: "cmd/testfile.go", + triggerRegexp: "package()", + want: []string{"package main", "package cmd", "package cmd_test"}, + editRegexp: "package", + }, { name: "package completion at valid position", filename: "fruits/testfile.go", diff --git a/gopls/internal/test/integration/env.go b/gopls/internal/test/integration/env.go index f19a426316d..822120e8324 100644 --- a/gopls/internal/test/integration/env.go +++ b/gopls/internal/test/integration/env.go @@ -21,7 +21,7 @@ import ( // wrapper methods that hide the boilerplate of plumbing contexts and checking // errors. type Env struct { - T testing.TB // TODO(rfindley): rename to TB + TB testing.TB Ctx context.Context // Most tests should not need to access the scratch area, editor, server, or @@ -311,9 +311,9 @@ func (a *Awaiter) checkConditionsLocked() { // Use AfterChange or OnceMet instead, so that the runner knows when to stop // waiting. func (e *Env) Await(expectations ...Expectation) { - e.T.Helper() + e.TB.Helper() if err := e.Awaiter.Await(e.Ctx, AllOf(expectations...)); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } @@ -321,7 +321,7 @@ func (e *Env) Await(expectations ...Expectation) { // unmeetable. If it was met, OnceMet checks that the state meets all // expectations in mustMeets. func (e *Env) OnceMet(pre Expectation, mustMeets ...Expectation) { - e.T.Helper() + e.TB.Helper() e.Await(OnceMet(pre, AllOf(mustMeets...))) } diff --git a/gopls/internal/test/integration/expectation.go b/gopls/internal/test/integration/expectation.go index 70a16fd6b3a..98554ddccc3 100644 --- a/gopls/internal/test/integration/expectation.go +++ b/gopls/internal/test/integration/expectation.go @@ -352,7 +352,7 @@ func (e *Env) DoneDiagnosingChanges() Expectation { // - workspace/didChangeWatchedFiles // - workspace/didChangeConfiguration func (e *Env) AfterChange(expectations ...Expectation) { - e.T.Helper() + e.TB.Helper() e.OnceMet( e.DoneDiagnosingChanges(), expectations..., diff --git a/gopls/internal/test/integration/fake/editor.go b/gopls/internal/test/integration/fake/editor.go index 170a9823cad..01f3de8aba9 100644 --- a/gopls/internal/test/integration/fake/editor.go +++ b/gopls/internal/test/integration/fake/editor.go @@ -1309,6 +1309,19 @@ func (e *Editor) Completion(ctx context.Context, loc protocol.Location) (*protoc return completions, nil } +func (e *Editor) DidCreateFiles(ctx context.Context, files ...protocol.DocumentURI) error { + if e.Server == nil { + return nil + } + params := &protocol.CreateFilesParams{} + for _, file := range files { + params.Files = append(params.Files, protocol.FileCreate{ + URI: string(file), + }) + } + return e.Server.DidCreateFiles(ctx, params) +} + func (e *Editor) SetSuggestionInsertReplaceMode(_ context.Context, useReplaceMode bool) { e.mu.Lock() defer e.mu.Unlock() diff --git a/gopls/internal/test/integration/misc/codeactions_test.go b/gopls/internal/test/integration/misc/codeactions_test.go index c62a3898e9b..d9c83186d69 100644 --- a/gopls/internal/test/integration/misc/codeactions_test.go +++ b/gopls/internal/test/integration/misc/codeactions_test.go @@ -35,25 +35,28 @@ package a func f() { g() } func g() {} + +-- issue72742/a.go -- +package main + +func main(){ + fmt.Println("helloworld") +} ` Run(t, src, func(t *testing.T, env *Env) { - check := func(filename string, wantKind ...protocol.CodeActionKind) { + check := func(filename string, re string, want []protocol.CodeActionKind) { env.OpenFile(filename) - loc := env.RegexpSearch(filename, `g\(\)`) + loc := env.RegexpSearch(filename, re) actions, err := env.Editor.CodeAction(env.Ctx, loc, nil, protocol.CodeActionUnknownTrigger) if err != nil { t.Fatal(err) } - type kinds = map[protocol.CodeActionKind]bool - got := make(kinds) + type kinds = []protocol.CodeActionKind + got := make(kinds, 0) for _, act := range actions { - got[act.Kind] = true - } - want := make(kinds) - for _, kind := range wantKind { - want[kind] = true + got = append(got, act.Kind) } if diff := cmp.Diff(want, got); diff != "" { @@ -63,20 +66,33 @@ func g() {} } } - check("src/a.go", + check("src/a.go", `g\(\)`, []protocol.CodeActionKind{ settings.AddTest, settings.GoAssembly, settings.GoDoc, settings.GoFreeSymbols, settings.GoToggleCompilerOptDetails, + settings.RefactorInlineCall, settings.GoplsDocFeatures, - settings.RefactorInlineCall) - check("gen/a.go", + }) + + check("gen/a.go", `g\(\)`, []protocol.CodeActionKind{ settings.GoAssembly, settings.GoDoc, settings.GoFreeSymbols, settings.GoToggleCompilerOptDetails, - settings.GoplsDocFeatures) + settings.GoplsDocFeatures, + }) + + check("issue72742/a.go", `fmt`, []protocol.CodeActionKind{ + settings.OrganizeImports, + settings.AddTest, + settings.GoAssembly, + settings.GoDoc, + settings.GoFreeSymbols, + settings.GoToggleCompilerOptDetails, + settings.GoplsDocFeatures, + }) }) } diff --git a/gopls/internal/test/integration/misc/compileropt_test.go b/gopls/internal/test/integration/misc/compileropt_test.go index 175ec640042..68138fabc43 100644 --- a/gopls/internal/test/integration/misc/compileropt_test.go +++ b/gopls/internal/test/integration/misc/compileropt_test.go @@ -166,3 +166,66 @@ func H(x int) any { return &x } ) }) } + +// TestCompilerOptDetails_config exercises that the "want optimization +// details" flag honors the "annotation" configuration setting. +func TestCompilerOptDetails_config(t *testing.T) { + if runtime.GOOS == "android" { + t.Skipf("the compiler optimization details code action doesn't work on Android") + } + + const mod = ` +-- go.mod -- +module mod.com +go 1.18 + +-- a/a.go -- +package a + +func F(x int) any { return &x } // escape(x escapes to heap) +func G() { defer func(){} () } // cannotInlineFunction(unhandled op DEFER) +` + + for _, escape := range []bool{true, false} { + WithOptions( + Settings{"annotations": map[string]any{"inline": true, "escape": escape}}, + ).Run(t, mod, func(t *testing.T, env *Env) { + env.OpenFile("a/a.go") + actions := env.CodeActionForFile("a/a.go", nil) + + docAction, err := codeActionByKind(actions, settings.GoToggleCompilerOptDetails) + if err != nil { + t.Fatal(err) + } + params := &protocol.ExecuteCommandParams{ + Command: docAction.Command.Command, + Arguments: docAction.Command.Arguments, + } + env.ExecuteCommand(params, nil) + + env.OnceMet( + CompletedWork(server.DiagnosticWorkTitle(server.FromToggleCompilerOptDetails), 1, true), + cond(escape, Diagnostics, NoDiagnostics)( + ForFile("a/a.go"), + AtPosition("a/a.go", 2, 7), + WithMessage("x escapes to heap"), + WithSeverityTags("optimizer details", protocol.SeverityInformation, nil), + ), + Diagnostics( + ForFile("a/a.go"), + AtPosition("a/a.go", 3, 5), + WithMessage("cannotInlineFunction(unhandled op DEFER)"), + WithSeverityTags("optimizer details", protocol.SeverityInformation, nil), + ), + ) + }) + } +} + +func cond[T any](cond bool, x, y T) T { + if cond { + return x + } else { + return y + } +} diff --git a/gopls/internal/test/integration/misc/highlight_test.go b/gopls/internal/test/integration/misc/highlight_test.go index e4da558e5d0..36bddf25057 100644 --- a/gopls/internal/test/integration/misc/highlight_test.go +++ b/gopls/internal/test/integration/misc/highlight_test.go @@ -124,7 +124,7 @@ func main() {}` } func checkHighlights(env *Env, loc protocol.Location, highlightCount int) { - t := env.T + t := env.TB t.Helper() highlights := env.DocumentHighlight(loc) diff --git a/gopls/internal/test/integration/misc/test_test.go b/gopls/internal/test/integration/misc/test_test.go new file mode 100644 index 00000000000..b282bf57a95 --- /dev/null +++ b/gopls/internal/test/integration/misc/test_test.go @@ -0,0 +1,82 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package misc + +// This file defines tests of the source.test ("Run tests and +// benchmarks") code action. + +import ( + "os" + "path/filepath" + "testing" + + "golang.org/x/tools/gopls/internal/protocol" + "golang.org/x/tools/gopls/internal/settings" + . "golang.org/x/tools/gopls/internal/test/integration" +) + +func TestRunTestsAndBenchmarks(t *testing.T) { + file := filepath.Join(t.TempDir(), "out") + os.Setenv("TESTFILE", file) + + const src = ` +-- go.mod -- +module example.com +go 1.19 + +-- a/a.go -- +package a + +-- a/a_test.go -- +package a + +import ( + "os" + "testing" +) + +func Test(t *testing.T) { + os.WriteFile(os.Getenv("TESTFILE"), []byte("ok"), 0644) +} + +` + Run(t, src, func(t *testing.T, env *Env) { + env.OpenFile("a/a_test.go") + loc := env.RegexpSearch("a/a_test.go", "WriteFile") + + // Request code actions. (settings.GoTest is special: + // it is returned only when explicitly requested.) + actions, err := env.Editor.Server.CodeAction(env.Ctx, &protocol.CodeActionParams{ + TextDocument: protocol.TextDocumentIdentifier{URI: loc.URI}, + Range: loc.Range, + Context: protocol.CodeActionContext{ + Only: []protocol.CodeActionKind{settings.GoTest}, + }, + }) + if err != nil { + t.Fatal(err) + } + if len(actions) != 1 { + t.Fatalf("CodeAction returned %#v, want one source.test action", actions) + } + if actions[0].Command == nil { + t.Fatalf("CodeActions()[0] has no Command") + } + + // Execute test. + // (ExecuteCommand fails if the test fails.) + t.Logf("Running %s...", actions[0].Title) + env.ExecuteCommand(&protocol.ExecuteCommandParams{ + Command: actions[0].Command.Command, + Arguments: actions[0].Command.Arguments, + }, nil) + + // Check test had expected side effect. + data, err := os.ReadFile(file) + if string(data) != "ok" { + t.Fatalf("Test did not write expected content of %s; ReadFile returned (%q, %v)", file, data, err) + } + }) +} diff --git a/gopls/internal/test/integration/misc/webserver_test.go b/gopls/internal/test/integration/misc/webserver_test.go index 79a6548ee3e..691d45baa6e 100644 --- a/gopls/internal/test/integration/misc/webserver_test.go +++ b/gopls/internal/test/integration/misc/webserver_test.go @@ -589,13 +589,15 @@ func init() { checkMatch(t, true, report, `CALL runtime.printlock`) checkMatch(t, true, report, `CALL runtime.printstring`) checkMatch(t, true, report, `CALL runtime.printunlock`) - checkMatch(t, true, report, `CALL example.com/a.f.deferwrap1`) + checkMatch(t, true, report, `CALL example.com/a.f.deferwrap`) checkMatch(t, true, report, `RET`) checkMatch(t, true, report, `CALL runtime.morestack_noctxt`) } // Nested functions are also shown. - checkMatch(t, true, report, `TEXT.*example.com/a.f.deferwrap1`) + // + // The condition here was relaxed to unblock go.dev/cl/639515. + checkMatch(t, true, report, `example.com/a.f.deferwrap`) // But other functions are not. checkMatch(t, false, report, `TEXT.*example.com/a.g`) diff --git a/gopls/internal/test/integration/misc/workspace_symbol_test.go b/gopls/internal/test/integration/misc/workspace_symbol_test.go index 9420b146d85..f1148539447 100644 --- a/gopls/internal/test/integration/misc/workspace_symbol_test.go +++ b/gopls/internal/test/integration/misc/workspace_symbol_test.go @@ -8,8 +8,8 @@ import ( "testing" "github.com/google/go-cmp/cmp" - . "golang.org/x/tools/gopls/internal/test/integration" "golang.org/x/tools/gopls/internal/settings" + . "golang.org/x/tools/gopls/internal/test/integration" ) func TestWorkspaceSymbolMissingMetadata(t *testing.T) { @@ -103,12 +103,12 @@ const ( } func checkSymbols(env *Env, query string, want ...string) { - env.T.Helper() + env.TB.Helper() var got []string for _, info := range env.Symbol(query) { got = append(got, info.Name) } if diff := cmp.Diff(got, want); diff != "" { - env.T.Errorf("unexpected Symbol(%q) result (+want -got):\n%s", query, diff) + env.TB.Errorf("unexpected Symbol(%q) result (+want -got):\n%s", query, diff) } } diff --git a/gopls/internal/test/integration/runner.go b/gopls/internal/test/integration/runner.go index b3e98b859d3..c4609cb8f91 100644 --- a/gopls/internal/test/integration/runner.go +++ b/gopls/internal/test/integration/runner.go @@ -253,7 +253,7 @@ func ConnectGoplsEnv(t testing.TB, ctx context.Context, sandbox *fake.Sandbox, c t.Fatal(err) } env := &Env{ - T: t, + TB: t, Ctx: ctx, Sandbox: sandbox, Server: connector, @@ -266,10 +266,10 @@ func ConnectGoplsEnv(t testing.TB, ctx context.Context, sandbox *fake.Sandbox, c // longBuilders maps builders that are skipped when -short is set to a // (possibly empty) justification. var longBuilders = map[string]string{ - "openbsd-amd64-64": "go.dev/issue/42789", - "openbsd-386-64": "go.dev/issue/42789", - "openbsd-386-68": "go.dev/issue/42789", - "openbsd-amd64-68": "go.dev/issue/42789", + "x_tools-gotip-openbsd-amd64": "go.dev/issue/72145", + "x_tools-go1.24-openbsd-amd64": "go.dev/issue/72145", + "x_tools-go1.23-openbsd-amd64": "go.dev/issue/72145", + "darwin-amd64-10_12": "", "freebsd-amd64-race": "", "illumos-amd64": "", diff --git a/gopls/internal/test/integration/workspace/didcreatefiles_test.go b/gopls/internal/test/integration/workspace/didcreatefiles_test.go new file mode 100644 index 00000000000..cba0daf472e --- /dev/null +++ b/gopls/internal/test/integration/workspace/didcreatefiles_test.go @@ -0,0 +1,146 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package workspace + +import ( + "context" + "fmt" + "testing" + + . "golang.org/x/tools/gopls/internal/test/integration" +) + +// TestAutoFillPackageDecl tests that creation of a new .go file causes +// gopls to choose a sensible package name and fill in the package declaration. +func TestAutoFillPackageDecl(t *testing.T) { + const existFiles = ` +-- go.mod -- +module mod.com + +go 1.12 + +-- dog/a_test.go -- +package dog +-- fruits/apple.go -- +package apple + +fun apple() int { + return 0 +} + +-- license/license.go -- +/* Copyright 2025 The Go Authors. All rights reserved. +Use of this source code is governed by a BSD-style +license that can be found in the LICENSE file. */ + +package license + +-- license1/license.go -- +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package license1 + +-- cmd/main.go -- +package main + +-- integration/a_test.go -- +package integration_test + +-- nopkg/testfile.go -- +package +` + for _, tc := range []struct { + name string + newfile string + want string + }{ + { + name: "new file in folder with a_test.go", + newfile: "dog/newfile.go", + want: "package dog\n", + }, + { + name: "new file in folder with go file", + newfile: "fruits/newfile.go", + want: "package apple\n", + }, + { + name: "new test file in folder with go file", + newfile: "fruits/newfile_test.go", + want: "package apple\n", + }, + { + name: "new file in folder with go file that contains license comment", + newfile: "license/newfile.go", + want: `/* Copyright 2025 The Go Authors. All rights reserved. +Use of this source code is governed by a BSD-style +license that can be found in the LICENSE file. */ + +package license +`, + }, + { + name: "new file in folder with go file that contains license comment", + newfile: "license1/newfile.go", + want: `// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package license1 +`, + }, + { + name: "new file in folder with main package", + newfile: "cmd/newfile.go", + want: "package main\n", + }, + { + name: "new file in empty folder", + newfile: "empty_folder/newfile.go", + want: "package emptyfolder\n", + }, + { + name: "new file in folder with integration_test package", + newfile: "integration/newfile.go", + want: "package integration\n", + }, + { + name: "new test file in folder with integration_test package", + newfile: "integration/newfile_test.go", + want: "package integration\n", + }, + { + name: "new file in folder with incomplete package clause", + newfile: "incomplete/newfile.go", + want: "package incomplete\n", + }, + { + name: "package completion for dir name with punctuation", + newfile: "123f_r.u~its-123/newfile.go", + want: "package fruits123\n", + }, + { + name: "package completion for dir name with invalid dir name", + newfile: "123f_r.u~its-123/newfile.go", + want: "package fruits123\n", + }, + } { + t.Run(tc.name, func(t *testing.T) { + createFiles := fmt.Sprintf("%s\n-- %s --", existFiles, tc.newfile) + Run(t, createFiles, func(t *testing.T, env *Env) { + env.DidCreateFiles(env.Editor.DocumentURI(tc.newfile)) + // save buffer to ensure the edits take effects in the file system. + if err := env.Editor.SaveBuffer(context.Background(), tc.newfile); err != nil { + t.Fatal(err) + } + if got := env.FileContent(tc.newfile); tc.want != got { + t.Fatalf("want '%s' but got '%s'", tc.want, got) + } + }) + }) + } +} diff --git a/gopls/internal/test/integration/workspace/packages_test.go b/gopls/internal/test/integration/workspace/packages_test.go index fdee21d822f..3420e32e084 100644 --- a/gopls/internal/test/integration/workspace/packages_test.go +++ b/gopls/internal/test/integration/workspace/packages_test.go @@ -433,6 +433,66 @@ func (X) SubtestMethod(t *testing.T) { }) } +func TestRecursiveSubtest(t *testing.T) { + const files = ` +-- go.mod -- +module foo + +-- foo_test.go -- +package foo + +import "testing" + +func TestFoo(t *testing.T) { t.Run("Foo", TestFoo) } +func TestBar(t *testing.T) { t.Run("Foo", TestFoo) } + +func TestBaz(t *testing.T) { + var sub func(t *testing.T) + sub = func(t *testing.T) { t.Run("Sub", sub) } + t.Run("Sub", sub) +} +` + + Run(t, files, func(t *testing.T, env *Env) { + checkPackages(t, env, []protocol.DocumentURI{env.Editor.DocumentURI("foo_test.go")}, false, command.NeedTests, []command.Package{ + { + Path: "foo", + ForTest: "foo", + ModulePath: "foo", + TestFiles: []command.TestFile{ + { + URI: env.Editor.DocumentURI("foo_test.go"), + Tests: []command.TestCase{ + {Name: "TestFoo"}, + {Name: "TestFoo/Foo"}, + {Name: "TestBar"}, + {Name: "TestBar/Foo"}, + {Name: "TestBaz"}, + {Name: "TestBaz/Sub"}, + }, + }, + }, + }, + }, map[string]command.Module{ + "foo": { + Path: "foo", + GoMod: env.Editor.DocumentURI("go.mod"), + }, + }, []string{ + `func TestFoo(t *testing.T) { t.Run("Foo", TestFoo) }`, + `t.Run("Foo", TestFoo)`, + `func TestBar(t *testing.T) { t.Run("Foo", TestFoo) }`, + `t.Run("Foo", TestFoo)`, + `func TestBaz(t *testing.T) { + var sub func(t *testing.T) + sub = func(t *testing.T) { t.Run("Sub", sub) } + t.Run("Sub", sub) +}`, + `t.Run("Sub", sub)`, + }) + }) +} + func checkPackages(t testing.TB, env *Env, files []protocol.DocumentURI, recursive bool, mode command.PackagesMode, wantPkg []command.Package, wantModule map[string]command.Module, wantSource []string) { t.Helper() diff --git a/gopls/internal/test/integration/wrappers.go b/gopls/internal/test/integration/wrappers.go index 989ae913acf..17e0cf329c4 100644 --- a/gopls/internal/test/integration/wrappers.go +++ b/gopls/internal/test/integration/wrappers.go @@ -18,19 +18,19 @@ import ( // RemoveWorkspaceFile deletes a file on disk but does nothing in the // editor. It calls t.Fatal on any error. func (e *Env) RemoveWorkspaceFile(name string) { - e.T.Helper() + e.TB.Helper() if err := e.Sandbox.Workdir.RemoveFile(e.Ctx, name); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // ReadWorkspaceFile reads a file from the workspace, calling t.Fatal on any // error. func (e *Env) ReadWorkspaceFile(name string) string { - e.T.Helper() + e.TB.Helper() content, err := e.Sandbox.Workdir.ReadFile(name) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return string(content) } @@ -38,55 +38,55 @@ func (e *Env) ReadWorkspaceFile(name string) string { // WriteWorkspaceFile writes a file to disk but does nothing in the editor. // It calls t.Fatal on any error. func (e *Env) WriteWorkspaceFile(name, content string) { - e.T.Helper() + e.TB.Helper() if err := e.Sandbox.Workdir.WriteFile(e.Ctx, name, content); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // WriteWorkspaceFiles deletes a file on disk but does nothing in the // editor. It calls t.Fatal on any error. func (e *Env) WriteWorkspaceFiles(files map[string]string) { - e.T.Helper() + e.TB.Helper() if err := e.Sandbox.Workdir.WriteFiles(e.Ctx, files); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // ListFiles lists relative paths to files in the given directory. // It calls t.Fatal on any error. func (e *Env) ListFiles(dir string) []string { - e.T.Helper() + e.TB.Helper() paths, err := e.Sandbox.Workdir.ListFiles(dir) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return paths } // OpenFile opens a file in the editor, calling t.Fatal on any error. func (e *Env) OpenFile(name string) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.OpenFile(e.Ctx, name); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // CreateBuffer creates a buffer in the editor, calling t.Fatal on any error. func (e *Env) CreateBuffer(name string, content string) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.CreateBuffer(e.Ctx, name, content); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // BufferText returns the current buffer contents for the file with the given // relative path, calling t.Fatal if the file is not open in a buffer. func (e *Env) BufferText(name string) string { - e.T.Helper() + e.TB.Helper() text, ok := e.Editor.BufferText(name) if !ok { - e.T.Fatalf("buffer %q is not open", name) + e.TB.Fatalf("buffer %q is not open", name) } return text } @@ -94,24 +94,24 @@ func (e *Env) BufferText(name string) string { // CloseBuffer closes an editor buffer without saving, calling t.Fatal on any // error. func (e *Env) CloseBuffer(name string) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.CloseBuffer(e.Ctx, name); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // EditBuffer applies edits to an editor buffer, calling t.Fatal on any error. func (e *Env) EditBuffer(name string, edits ...protocol.TextEdit) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.EditBuffer(e.Ctx, name, edits); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } func (e *Env) SetBufferContent(name string, content string) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.SetBufferContent(e.Ctx, name, content); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } @@ -119,7 +119,7 @@ func (e *Env) SetBufferContent(name string, content string) { // editing session: it returns the buffer content for an open file, the // on-disk content for an unopened file, or "" for a non-existent file. func (e *Env) FileContent(name string) string { - e.T.Helper() + e.TB.Helper() text, ok := e.Editor.BufferText(name) if ok { return text @@ -129,7 +129,7 @@ func (e *Env) FileContent(name string) string { if errors.Is(err, os.ErrNotExist) { return "" } else { - e.T.Fatal(err) + e.TB.Fatal(err) } } return string(content) @@ -138,14 +138,14 @@ func (e *Env) FileContent(name string) string { // FileContentAt returns the file content at the given location, using the // file's mapper. func (e *Env) FileContentAt(location protocol.Location) string { - e.T.Helper() + e.TB.Helper() mapper, err := e.Editor.Mapper(location.URI.Path()) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } start, end, err := mapper.RangeOffsets(location.Range) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return string(mapper.Content[start:end]) } @@ -154,13 +154,13 @@ func (e *Env) FileContentAt(location protocol.Location) string { // buffer specified by name, calling t.Fatal on any error. It first searches // for the position in open buffers, then in workspace files. func (e *Env) RegexpSearch(name, re string) protocol.Location { - e.T.Helper() + e.TB.Helper() loc, err := e.Editor.RegexpSearch(name, re) if err == fake.ErrUnknownBuffer { loc, err = e.Sandbox.Workdir.RegexpSearch(name, re) } if err != nil { - e.T.Fatalf("RegexpSearch: %v, %v for %q", name, err, re) + e.TB.Fatalf("RegexpSearch: %v, %v for %q", name, err, re) } return loc } @@ -168,24 +168,24 @@ func (e *Env) RegexpSearch(name, re string) protocol.Location { // RegexpReplace replaces the first group in the first match of regexpStr with // the replace text, calling t.Fatal on any error. func (e *Env) RegexpReplace(name, regexpStr, replace string) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.RegexpReplace(e.Ctx, name, regexpStr, replace); err != nil { - e.T.Fatalf("RegexpReplace: %v", err) + e.TB.Fatalf("RegexpReplace: %v", err) } } // SaveBuffer saves an editor buffer, calling t.Fatal on any error. func (e *Env) SaveBuffer(name string) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.SaveBuffer(e.Ctx, name); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } func (e *Env) SaveBufferWithoutActions(name string) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.SaveBufferWithoutActions(e.Ctx, name); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } @@ -194,64 +194,64 @@ func (e *Env) SaveBufferWithoutActions(name string) { // // TODO(rfindley): rename this to just 'Definition'. func (e *Env) GoToDefinition(loc protocol.Location) protocol.Location { - e.T.Helper() + e.TB.Helper() loc, err := e.Editor.Definition(e.Ctx, loc) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return loc } func (e *Env) TypeDefinition(loc protocol.Location) protocol.Location { - e.T.Helper() + e.TB.Helper() loc, err := e.Editor.TypeDefinition(e.Ctx, loc) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return loc } // FormatBuffer formats the editor buffer, calling t.Fatal on any error. func (e *Env) FormatBuffer(name string) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.FormatBuffer(e.Ctx, name); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // OrganizeImports processes the source.organizeImports codeAction, calling // t.Fatal on any error. func (e *Env) OrganizeImports(name string) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.OrganizeImports(e.Ctx, name); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // ApplyQuickFixes processes the quickfix codeAction, calling t.Fatal on any error. func (e *Env) ApplyQuickFixes(path string, diagnostics []protocol.Diagnostic) { - e.T.Helper() + e.TB.Helper() loc := e.Sandbox.Workdir.EntireFile(path) if err := e.Editor.ApplyQuickFixes(e.Ctx, loc, diagnostics); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // ApplyCodeAction applies the given code action, calling t.Fatal on any error. func (e *Env) ApplyCodeAction(action protocol.CodeAction) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.ApplyCodeAction(e.Ctx, action); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // Diagnostics returns diagnostics for the given file, calling t.Fatal on any // error. func (e *Env) Diagnostics(name string) []protocol.Diagnostic { - e.T.Helper() + e.TB.Helper() diags, err := e.Editor.Diagnostics(e.Ctx, name) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return diags } @@ -259,11 +259,11 @@ func (e *Env) Diagnostics(name string) []protocol.Diagnostic { // GetQuickFixes returns the available quick fix code actions, calling t.Fatal // on any error. func (e *Env) GetQuickFixes(path string, diagnostics []protocol.Diagnostic) []protocol.CodeAction { - e.T.Helper() + e.TB.Helper() loc := e.Sandbox.Workdir.EntireFile(path) actions, err := e.Editor.GetQuickFixes(e.Ctx, loc, diagnostics) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return actions } @@ -271,28 +271,28 @@ func (e *Env) GetQuickFixes(path string, diagnostics []protocol.Diagnostic) []pr // Hover in the editor, calling t.Fatal on any error. // It may return (nil, zero) even on success. func (e *Env) Hover(loc protocol.Location) (*protocol.MarkupContent, protocol.Location) { - e.T.Helper() + e.TB.Helper() c, loc, err := e.Editor.Hover(e.Ctx, loc) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return c, loc } func (e *Env) DocumentLink(name string) []protocol.DocumentLink { - e.T.Helper() + e.TB.Helper() links, err := e.Editor.DocumentLink(e.Ctx, name) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return links } func (e *Env) DocumentHighlight(loc protocol.Location) []protocol.DocumentHighlight { - e.T.Helper() + e.TB.Helper() highlights, err := e.Editor.DocumentHighlight(e.Ctx, loc) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return highlights } @@ -301,9 +301,9 @@ func (e *Env) DocumentHighlight(loc protocol.Location) []protocol.DocumentHighli // It waits for the generate command to complete and checks for file changes // before returning. func (e *Env) RunGenerate(dir string) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.RunGenerate(e.Ctx, dir); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } e.Await(NoOutstandingWork(IgnoreTelemetryPromptWork)) // Ideally the editor.Workspace would handle all synthetic file watching, but @@ -315,10 +315,10 @@ func (e *Env) RunGenerate(dir string) { // RunGoCommand runs the given command in the sandbox's default working // directory. func (e *Env) RunGoCommand(verb string, args ...string) []byte { - e.T.Helper() + e.TB.Helper() out, err := e.Sandbox.RunGoCommand(e.Ctx, "", verb, args, nil, true) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return out } @@ -326,28 +326,28 @@ func (e *Env) RunGoCommand(verb string, args ...string) []byte { // RunGoCommandInDir is like RunGoCommand, but executes in the given // relative directory of the sandbox. func (e *Env) RunGoCommandInDir(dir, verb string, args ...string) { - e.T.Helper() + e.TB.Helper() if _, err := e.Sandbox.RunGoCommand(e.Ctx, dir, verb, args, nil, true); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // RunGoCommandInDirWithEnv is like RunGoCommand, but executes in the given // relative directory of the sandbox with the given additional environment variables. func (e *Env) RunGoCommandInDirWithEnv(dir string, env []string, verb string, args ...string) { - e.T.Helper() + e.TB.Helper() if _, err := e.Sandbox.RunGoCommand(e.Ctx, dir, verb, args, env, true); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // GoVersion checks the version of the go command. // It returns the X in Go 1.X. func (e *Env) GoVersion() int { - e.T.Helper() + e.TB.Helper() v, err := e.Sandbox.GoVersion(e.Ctx) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return v } @@ -355,33 +355,33 @@ func (e *Env) GoVersion() int { // DumpGoSum prints the correct go.sum contents for dir in txtar format, // for use in creating integration tests. func (e *Env) DumpGoSum(dir string) { - e.T.Helper() + e.TB.Helper() if _, err := e.Sandbox.RunGoCommand(e.Ctx, dir, "list", []string{"-mod=mod", "./..."}, nil, true); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } sumFile := path.Join(dir, "go.sum") - e.T.Log("\n\n-- " + sumFile + " --\n" + e.ReadWorkspaceFile(sumFile)) - e.T.Fatal("see contents above") + e.TB.Log("\n\n-- " + sumFile + " --\n" + e.ReadWorkspaceFile(sumFile)) + e.TB.Fatal("see contents above") } // CheckForFileChanges triggers a manual poll of the workspace for any file // changes since creation, or since last polling. It is a workaround for the // lack of true file watching support in the fake workspace. func (e *Env) CheckForFileChanges() { - e.T.Helper() + e.TB.Helper() if err := e.Sandbox.Workdir.CheckForFileChanges(e.Ctx); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // CodeLens calls textDocument/codeLens for the given path, calling t.Fatal on // any error. func (e *Env) CodeLens(path string) []protocol.CodeLens { - e.T.Helper() + e.TB.Helper() lens, err := e.Editor.CodeLens(e.Ctx, path) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return lens } @@ -391,9 +391,9 @@ func (e *Env) CodeLens(path string) []protocol.CodeLens { // // result is a pointer to a variable to be populated by json.Unmarshal. func (e *Env) ExecuteCodeLensCommand(path string, cmd command.Command, result any) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.ExecuteCodeLensCommand(e.Ctx, path, cmd, result); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } @@ -402,9 +402,9 @@ func (e *Env) ExecuteCodeLensCommand(path string, cmd command.Command, result an // // result is a pointer to a variable to be populated by json.Unmarshal. func (e *Env) ExecuteCommand(params *protocol.ExecuteCommandParams, result any) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.ExecuteCommand(e.Ctx, params, result); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } @@ -430,7 +430,7 @@ func (e *Env) StartProfile() (stop func() string) { // This would be a lot simpler if we generated params constructors. args, err := command.MarshalArgs(command.StartProfileArgs{}) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } params := &protocol.ExecuteCommandParams{ Command: command.StartProfile.String(), @@ -442,7 +442,7 @@ func (e *Env) StartProfile() (stop func() string) { return func() string { stopArgs, err := command.MarshalArgs(command.StopProfileArgs{}) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } stopParams := &protocol.ExecuteCommandParams{ Command: command.StopProfile.String(), @@ -457,91 +457,99 @@ func (e *Env) StartProfile() (stop func() string) { // InlayHints calls textDocument/inlayHints for the given path, calling t.Fatal on // any error. func (e *Env) InlayHints(path string) []protocol.InlayHint { - e.T.Helper() + e.TB.Helper() hints, err := e.Editor.InlayHint(e.Ctx, path) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return hints } // Symbol calls workspace/symbol func (e *Env) Symbol(query string) []protocol.SymbolInformation { - e.T.Helper() + e.TB.Helper() ans, err := e.Editor.Symbols(e.Ctx, query) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return ans } // References wraps Editor.References, calling t.Fatal on any error. func (e *Env) References(loc protocol.Location) []protocol.Location { - e.T.Helper() + e.TB.Helper() locations, err := e.Editor.References(e.Ctx, loc) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return locations } // Rename wraps Editor.Rename, calling t.Fatal on any error. func (e *Env) Rename(loc protocol.Location, newName string) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.Rename(e.Ctx, loc, newName); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // Implementations wraps Editor.Implementations, calling t.Fatal on any error. func (e *Env) Implementations(loc protocol.Location) []protocol.Location { - e.T.Helper() + e.TB.Helper() locations, err := e.Editor.Implementations(e.Ctx, loc) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return locations } // RenameFile wraps Editor.RenameFile, calling t.Fatal on any error. func (e *Env) RenameFile(oldPath, newPath string) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.RenameFile(e.Ctx, oldPath, newPath); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // SignatureHelp wraps Editor.SignatureHelp, calling t.Fatal on error func (e *Env) SignatureHelp(loc protocol.Location) *protocol.SignatureHelp { - e.T.Helper() + e.TB.Helper() sighelp, err := e.Editor.SignatureHelp(e.Ctx, loc) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return sighelp } // Completion executes a completion request on the server. func (e *Env) Completion(loc protocol.Location) *protocol.CompletionList { - e.T.Helper() + e.TB.Helper() completions, err := e.Editor.Completion(e.Ctx, loc) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return completions } +func (e *Env) DidCreateFiles(files ...protocol.DocumentURI) { + e.TB.Helper() + err := e.Editor.DidCreateFiles(e.Ctx, files...) + if err != nil { + e.TB.Fatal(err) + } +} + func (e *Env) SetSuggestionInsertReplaceMode(useReplaceMode bool) { - e.T.Helper() + e.TB.Helper() e.Editor.SetSuggestionInsertReplaceMode(e.Ctx, useReplaceMode) } // AcceptCompletion accepts a completion for the given item at the given // position. func (e *Env) AcceptCompletion(loc protocol.Location, item protocol.CompletionItem) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.AcceptCompletion(e.Ctx, loc, item); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } @@ -554,38 +562,38 @@ func (e *Env) CodeActionForFile(path string, diagnostics []protocol.Diagnostic) // CodeAction calls textDocument/codeAction for a selection, // and calls t.Fatal if there were errors. func (e *Env) CodeAction(loc protocol.Location, diagnostics []protocol.Diagnostic, trigger protocol.CodeActionTriggerKind) []protocol.CodeAction { - e.T.Helper() + e.TB.Helper() actions, err := e.Editor.CodeAction(e.Ctx, loc, diagnostics, trigger) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return actions } // ChangeConfiguration updates the editor config, calling t.Fatal on any error. func (e *Env) ChangeConfiguration(newConfig fake.EditorConfig) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.ChangeConfiguration(e.Ctx, newConfig); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // ChangeWorkspaceFolders updates the editor workspace folders, calling t.Fatal // on any error. func (e *Env) ChangeWorkspaceFolders(newFolders ...string) { - e.T.Helper() + e.TB.Helper() if err := e.Editor.ChangeWorkspaceFolders(e.Ctx, newFolders); err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } } // SemanticTokensFull invokes textDocument/semanticTokens/full, calling t.Fatal // on any error. func (e *Env) SemanticTokensFull(path string) []fake.SemanticToken { - e.T.Helper() + e.TB.Helper() toks, err := e.Editor.SemanticTokensFull(e.Ctx, path) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return toks } @@ -593,10 +601,10 @@ func (e *Env) SemanticTokensFull(path string) []fake.SemanticToken { // SemanticTokensRange invokes textDocument/semanticTokens/range, calling t.Fatal // on any error. func (e *Env) SemanticTokensRange(loc protocol.Location) []fake.SemanticToken { - e.T.Helper() + e.TB.Helper() toks, err := e.Editor.SemanticTokensRange(e.Ctx, loc) if err != nil { - e.T.Fatal(err) + e.TB.Fatal(err) } return toks } @@ -606,9 +614,9 @@ func (e *Env) SemanticTokensRange(loc protocol.Location) []fake.SemanticToken { func (e *Env) Close() { ctx := xcontext.Detach(e.Ctx) if err := e.Editor.Close(ctx); err != nil { - e.T.Errorf("closing editor: %v", err) + e.TB.Errorf("closing editor: %v", err) } if err := e.Sandbox.Close(); err != nil { - e.T.Errorf("cleaning up sandbox: %v", err) + e.TB.Errorf("cleaning up sandbox: %v", err) } } diff --git a/gopls/internal/test/marker/marker_test.go b/gopls/internal/test/marker/marker_test.go index 3ff7da65ac5..8c27adc9018 100644 --- a/gopls/internal/test/marker/marker_test.go +++ b/gopls/internal/test/marker/marker_test.go @@ -321,7 +321,7 @@ type marker struct { func (m marker) ctx() context.Context { return m.run.env.Ctx } // T returns the testing.TB for this mark. -func (m marker) T() testing.TB { return m.run.env.T } +func (m marker) T() testing.TB { return m.run.env.TB } // server returns the LSP server for the marker test run. func (m marker) editor() *fake.Editor { return m.run.env.Editor } @@ -982,7 +982,7 @@ func newEnv(t *testing.T, cache *cache.Cache, files, proxyFiles map[string][]byt t.Fatal(err) } return &integration.Env{ - T: t, + TB: t, Ctx: ctx, Editor: editor, Sandbox: sandbox, @@ -1035,17 +1035,17 @@ func (c *marker) sprintf(format string, args ...any) string { func (run *markerTestRun) fmtPos(pos token.Pos) string { file := run.test.fset.File(pos) if file == nil { - run.env.T.Errorf("position %d not in test fileset", pos) + run.env.TB.Errorf("position %d not in test fileset", pos) return "" } m, err := run.env.Editor.Mapper(file.Name()) if err != nil { - run.env.T.Errorf("%s", err) + run.env.TB.Errorf("%s", err) return "" } loc, err := m.PosLocation(file, pos, pos) if err != nil { - run.env.T.Errorf("Mapper(%s).PosLocation failed: %v", file.Name(), err) + run.env.TB.Errorf("Mapper(%s).PosLocation failed: %v", file.Name(), err) } return run.fmtLoc(loc) } @@ -1055,7 +1055,7 @@ func (run *markerTestRun) fmtPos(pos token.Pos) string { // archive file. func (run *markerTestRun) fmtLoc(loc protocol.Location) string { if loc == (protocol.Location{}) { - run.env.T.Errorf("unable to find %s in test archive", loc) + run.env.TB.Errorf("unable to find %s in test archive", loc) return "" } lines := bytes.Count(run.test.archive.Comment, []byte("\n")) @@ -1094,12 +1094,12 @@ func (run *markerTestRun) mapLocation(loc protocol.Location) (name string, start name = run.env.Sandbox.Workdir.URIToPath(loc.URI) m, err := run.env.Editor.Mapper(name) if err != nil { - run.env.T.Errorf("internal error: %v", err) + run.env.TB.Errorf("internal error: %v", err) return } start, end, err := m.RangeOffsets(loc.Range) if err != nil { - run.env.T.Errorf("error formatting location %s: %v", loc, err) + run.env.TB.Errorf("error formatting location %s: %v", loc, err) return } startLine, startCol = m.OffsetLineCol8(start) @@ -2306,11 +2306,11 @@ func codeActionChanges(env *integration.Env, uri protocol.DocumentURI, rng proto if action.Edit != nil { if len(action.Edit.Changes) > 0 { - env.T.Errorf("internal error: discarding unexpected CodeAction{Kind=%s, Title=%q}.Edit.Changes", action.Kind, action.Title) + env.TB.Errorf("internal error: discarding unexpected CodeAction{Kind=%s, Title=%q}.Edit.Changes", action.Kind, action.Title) } if action.Edit.DocumentChanges != nil { if action.Command != nil { - env.T.Errorf("internal error: discarding unexpected CodeAction{Kind=%s, Title=%q}.Command", action.Kind, action.Title) + env.TB.Errorf("internal error: discarding unexpected CodeAction{Kind=%s, Title=%q}.Command", action.Kind, action.Title) } return action.Edit.DocumentChanges, nil } diff --git a/gopls/internal/test/marker/testdata/codeaction/inline_issue67336.txt b/gopls/internal/test/marker/testdata/codeaction/inline_issue67336.txt index 437fb474fb2..f15ca29397b 100644 --- a/gopls/internal/test/marker/testdata/codeaction/inline_issue67336.txt +++ b/gopls/internal/test/marker/testdata/codeaction/inline_issue67336.txt @@ -54,7 +54,6 @@ package c import ( "context" - "example.com/define/my/typ" "example.com/one/more/pkg" pkg0 "example.com/some/other/pkg" diff --git a/gopls/internal/test/marker/testdata/codeaction/removeparam_imports.txt b/gopls/internal/test/marker/testdata/codeaction/removeparam_imports.txt index d9f4f22dc7e..cd5f910a70d 100644 --- a/gopls/internal/test/marker/testdata/codeaction/removeparam_imports.txt +++ b/gopls/internal/test/marker/testdata/codeaction/removeparam_imports.txt @@ -65,9 +65,7 @@ func B(x, y c.C) { //@codeaction("x", "refactor.rewrite.removeUnusedParam", resu -- @b/a/a3.go -- package a -import ( - "mod.test/b" -) +import "mod.test/b" func _() { b.B(<-b.Chan) @@ -79,9 +77,7 @@ func _() { -- @b/a/a2.go -- package a -import ( - "mod.test/b" -) +import "mod.test/b" func _() { b.B(<-b.Chan) @@ -90,9 +86,7 @@ func _() { -- @b/a/a1.go -- package a -import ( - "mod.test/b" -) +import "mod.test/b" func _() { b.B(<-b.Chan) diff --git a/gopls/internal/test/marker/testdata/implementation/issue67041.txt b/gopls/internal/test/marker/testdata/implementation/issue67041.txt index 3b058534cd3..78965200b20 100644 --- a/gopls/internal/test/marker/testdata/implementation/issue67041.txt +++ b/gopls/internal/test/marker/testdata/implementation/issue67041.txt @@ -1,5 +1,5 @@ -This test verifies that implementations uses the correct object when querying -local implementations . As described in golang/go#67041), a bug led to it +This test verifies that Implementations uses the correct object when querying +local implementations. As described in golang/go#67041, a bug led to it comparing types from different realms. -- go.mod -- diff --git a/gopls/internal/cache/methodsets/fingerprint.go b/gopls/internal/util/fingerprint/fingerprint.go similarity index 62% rename from gopls/internal/cache/methodsets/fingerprint.go rename to gopls/internal/util/fingerprint/fingerprint.go index 05ccfe0911c..b279003d081 100644 --- a/gopls/internal/cache/methodsets/fingerprint.go +++ b/gopls/internal/util/fingerprint/fingerprint.go @@ -1,7 +1,13 @@ -// Copyright 2024 The Go Authors. All rights reserved. +// Copyright 2025 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package methodsets + +// Package fingerprint defines a function to [Encode] types as strings +// with the property that identical types have equal string encodings, +// in most cases. In the remaining cases (mostly involving generic +// types), the encodings can be parsed using [Parse] into [Tree] form +// and matched using [Matches]. +package fingerprint import ( "fmt" @@ -12,6 +18,52 @@ import ( "text/scanner" ) +// Encode returns an encoding of a [types.Type] such that, in +// most cases, Encode(x) == Encode(y) iff [types.Identical](x, y). +// +// For a minority of types, mostly involving type parameters, identity +// cannot be reduced to string comparison; these types are called +// "tricky", and are indicated by the boolean result. +// +// In general, computing identity correctly for tricky types requires +// the type checker. However, the fingerprint encoding can be parsed +// by [Parse] into a [Tree] form that permits simple matching sufficient +// to allow a type parameter to unify with any subtree; see [Match]. +// +// In the standard library, 99.8% of package-level types have a +// non-tricky method-set. The most common exceptions are due to type +// parameters. +// +// fingerprint.Encode is defined only for the signature types of functions +// and methods. It must not be called for "untyped" basic types, nor +// the type of a generic function. +func Encode(t types.Type) (_ string, tricky bool) { return fingerprint(t) } + +// A Tree is a parsed form of a fingerprint for use with [Matches]. +type Tree struct{ tree sexpr } + +// String returns the tree in an unspecified human-readable form. +func (tree Tree) String() string { + var out strings.Builder + writeSexpr(&out, tree.tree) + return out.String() +} + +// Parse parses a fingerprint into tree form. +// +// The input must have been produced by [Encode] at the same source +// version; parsing is thus infallible. +func Parse(fp string) Tree { + return Tree{parseFingerprint(fp)} +} + +// Matches reports whether two fingerprint trees match, meaning that +// under some conditions (for example, particular instantiations of +// type parameters) the two types may be identical. +func Matches(x, y Tree) bool { + return unify(x.tree, y.tree) +} + // Fingerprint syntax // // The lexical syntax is essentially Lisp S-expressions: @@ -38,25 +90,6 @@ import ( // // field = IDENT IDENT STRING τ -- name, embedded?, tag, type -// fingerprint returns an encoding of a [types.Type] such that, in -// most cases, fingerprint(x) == fingerprint(t) iff types.Identical(x, y). -// -// For a minority of types, mostly involving type parameters, identity -// cannot be reduced to string comparison; these types are called -// "tricky", and are indicated by the boolean result. -// -// In general, computing identity correctly for tricky types requires -// the type checker. However, the fingerprint encoding can be parsed -// by [parseFingerprint] into a tree form that permits simple matching -// sufficient to allow a type parameter to unify with any subtree. -// -// In the standard library, 99.8% of package-level types have a -// non-tricky method-set. The most common exceptions are due to type -// parameters. -// -// fingerprint is defined only for the signature types of methods. It -// must not be called for "untyped" basic types, nor the type of a -// generic function. func fingerprint(t types.Type) (string, bool) { var buf strings.Builder tricky := false @@ -202,8 +235,6 @@ func fingerprint(t types.Type) (string, bool) { return buf.String(), tricky } -const symTypeparam = "typeparam" - // sexpr defines the representation of a fingerprint tree. type ( sexpr any // = string | int | symbol | *cons | nil @@ -272,12 +303,6 @@ func parseFingerprint(fp string) sexpr { return parse() } -func sexprString(x sexpr) string { - var out strings.Builder - writeSexpr(&out, x) - return out.String() -} - // writeSexpr formats an S-expression. // It is provided for debugging. func writeSexpr(out *strings.Builder, x sexpr) { @@ -313,45 +338,129 @@ func writeSexpr(out *strings.Builder, x sexpr) { } } -// unify reports whether the types of methods x and y match, in the -// presence of type parameters, each of which matches anything at all. -// (It's not true unification as we don't track substitutions.) -// -// TODO(adonovan): implement full unification. +// unify reports whether x and y match, in the presence of type parameters. +// The constraints on type parameters are ignored, but each type parameter must +// have a consistent binding. func unify(x, y sexpr) bool { - if isTypeParam(x) >= 0 || isTypeParam(y) >= 0 { - return true // a type parameter matches anything + + // maxTypeParam returns the maximum type parameter index in x. + var maxTypeParam func(x sexpr) int + maxTypeParam = func(x sexpr) int { + if i := typeParamIndex(x); i >= 0 { + return i + } + if c, ok := x.(*cons); ok { + return max(maxTypeParam(c.car), maxTypeParam(c.cdr)) + } + return -1 } - if reflect.TypeOf(x) != reflect.TypeOf(y) { - return false // type mismatch + + // xBindings[i] is the binding for type parameter #i in x, and similarly for y. + // Although type parameters are nominally bound to sexprs, each bindings[i] + // is a *sexpr, so unbound variables can share a binding. + xBindings := make([]*sexpr, maxTypeParam(x)+1) + for i := range len(xBindings) { + xBindings[i] = new(sexpr) } - switch x := x.(type) { - case nil, string, int, symbol: - return x == y - case *cons: - y := y.(*cons) - if !unify(x.car, y.car) { + yBindings := make([]*sexpr, maxTypeParam(y)+1) + for i := range len(yBindings) { + yBindings[i] = new(sexpr) + } + + // bind sets binding b to s from bindings if it does not occur in s. + bind := func(b *sexpr, s sexpr, bindings []*sexpr) bool { + // occurs reports whether b is present in s. + var occurs func(s sexpr) bool + occurs = func(s sexpr) bool { + if j := typeParamIndex(s); j >= 0 { + return b == bindings[j] + } + if c, ok := s.(*cons); ok { + return occurs(c.car) || occurs(c.cdr) + } return false } - if x.cdr == nil { - return y.cdr == nil - } - if y.cdr == nil { + + if occurs(s) { return false } - return unify(x.cdr, y.cdr) - default: - panic(fmt.Sprintf("unify %T %T", x, y)) + *b = s + return true } + + var uni func(x, y sexpr) bool + uni = func(x, y sexpr) bool { + var bx, by *sexpr + ix := typeParamIndex(x) + if ix >= 0 { + bx = xBindings[ix] + } + iy := typeParamIndex(y) + if iy >= 0 { + by = yBindings[iy] + } + + if bx != nil || by != nil { + // If both args are type params and neither is bound, have them share a binding. + if bx != nil && by != nil && *bx == nil && *by == nil { + xBindings[ix] = yBindings[iy] + return true + } + // Treat param bindings like original args in what follows. + if bx != nil && *bx != nil { + x = *bx + } + if by != nil && *by != nil { + y = *by + } + // If the x param is unbound, bind it to y. + if bx != nil && *bx == nil { + return bind(bx, y, yBindings) + } + // If the y param is unbound, bind it to x. + if by != nil && *by == nil { + return bind(by, x, xBindings) + } + // Unify the binding of a bound parameter. + return uni(x, y) + } + + // Neither arg is a type param. + if reflect.TypeOf(x) != reflect.TypeOf(y) { + return false // type mismatch + } + switch x := x.(type) { + case nil, string, int, symbol: + return x == y + case *cons: + y := y.(*cons) + if !uni(x.car, y.car) { + return false + } + if x.cdr == nil { + return y.cdr == nil + } + if y.cdr == nil { + return false + } + return uni(x.cdr, y.cdr) + default: + panic(fmt.Sprintf("unify %T %T", x, y)) + } + } + // At least one param is bound. Unify its binding with the other. + return uni(x, y) } -// isTypeParam returns the index of the type parameter, +// typeParamIndex returns the index of the type parameter, // if x has the form "(typeparam INTEGER)", otherwise -1. -func isTypeParam(x sexpr) int { +func typeParamIndex(x sexpr) int { if x, ok := x.(*cons); ok { if sym, ok := x.car.(symbol); ok && sym == symTypeparam { - return 0 + return x.cdr.(*cons).car.(int) } } return -1 } + +const symTypeparam = "typeparam" diff --git a/gopls/internal/cache/methodsets/fingerprint_test.go b/gopls/internal/util/fingerprint/fingerprint_test.go similarity index 63% rename from gopls/internal/cache/methodsets/fingerprint_test.go rename to gopls/internal/util/fingerprint/fingerprint_test.go index 795ddaa965b..40ea2ede34e 100644 --- a/gopls/internal/cache/methodsets/fingerprint_test.go +++ b/gopls/internal/util/fingerprint/fingerprint_test.go @@ -1,13 +1,8 @@ -// Copyright 2024 The Go Authors. All rights reserved. +// Copyright 2025 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package methodsets - -// This is an internal test of [fingerprint] and [unify]. -// -// TODO(adonovan): avoid internal tests. -// Break fingerprint.go off into its own package? +package fingerprint_test import ( "go/types" @@ -15,15 +10,15 @@ import ( "golang.org/x/tools/go/packages" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/testenv" + "golang.org/x/tools/gopls/internal/util/fingerprint" "golang.org/x/tools/internal/testfiles" "golang.org/x/tools/txtar" ) -// Test_fingerprint runs the fingerprint encoder, decoder, and printer +// Test runs the fingerprint encoder, decoder, and printer // on the types of all package-level symbols in gopls, and ensures // that parse+print is lossless. -func Test_fingerprint(t *testing.T) { +func Test(t *testing.T) { if testing.Short() { t.Skip("skipping slow test") } @@ -54,7 +49,7 @@ func Test_fingerprint(t *testing.T) { continue // untyped constant } - fp, tricky := fingerprint(typ) // check Type encoder doesn't panic + fp, tricky := fingerprint.Encode(typ) // check Type encoder doesn't panic // All equivalent (non-tricky) types have the same fingerprint. if !tricky { @@ -66,8 +61,8 @@ func Test_fingerprint(t *testing.T) { } } - tree := parseFingerprint(fp) // check parser doesn't panic - fp2 := sexprString(tree) // check formatter doesn't pannic + tree := fingerprint.Parse(fp) // check parser doesn't panic + fp2 := tree.String() // check formatter doesn't pannic // A parse+print round-trip should be lossless. if fp != fp2 { @@ -79,12 +74,8 @@ func Test_fingerprint(t *testing.T) { } } -// Test_unify exercises the matching algorithm for generic types. -func Test_unify(t *testing.T) { - if testenv.Go1Point() < 24 { - testenv.NeedsGoExperiment(t, "aliastypeparams") // testenv.Go1Point() >= 24 implies aliastypeparams=1 - } - +// TestMatches exercises the matching algorithm for generic types. +func TestMatches(t *testing.T) { const src = ` -- go.mod -- module example.com @@ -113,6 +104,7 @@ func C2[U any](int, int, ...U) bool { panic(0) } func C3(int, bool, ...string) rune func C4(int, bool, ...string) func C5(int, float64, bool, string) bool +func C6(int, bool, ...string) bool func DAny[T any](Named[T]) { panic(0) } func DString(Named[string]) @@ -123,6 +115,17 @@ type Named[T any] struct { x T } func E1(byte) rune func E2(uint8) int32 func E3(int8) uint32 + +// generic vs. generic +func F1[T any](T) { panic(0) } +func F2[T any](*T) { panic(0) } +func F3[T any](T, T) { panic(0) } +func F4[U any](U, *U) { panic(0) } +func F5[T, U any](T, U, U) { panic(0) } +func F6[T any](T, int, T) { panic(0) } +func F7[T any](bool, T, T) { panic(0) } +func F8[V any](*V, int, int) { panic(0) } +func F9[V any](V, *V, V) { panic(0) } ` pkg := testfiles.LoadPackages(t, txtar.Parse([]byte(src)), "./a")[0] scope := pkg.Types.Scope() @@ -137,11 +140,12 @@ func E3(int8) uint32 {"B", "String", "", true}, {"B", "Int", "", true}, {"B", "A", "", true}, - {"C1", "C2", "", true}, // matches despite inconsistent substitution - {"C1", "C3", "", true}, + {"C1", "C2", "", false}, + {"C1", "C3", "", false}, {"C1", "C4", "", false}, {"C1", "C5", "", false}, - {"C2", "C3", "", false}, // intransitive (C1≡C2 ^ C1≡C3) + {"C1", "C6", "", true}, + {"C2", "C3", "", false}, {"C2", "C4", "", false}, {"C3", "C4", "", false}, {"DAny", "DString", "", true}, @@ -149,6 +153,13 @@ func E3(int8) uint32 {"DString", "DInt", "", false}, // different instantiations of Named {"E1", "E2", "", true}, // byte and rune are just aliases {"E2", "E3", "", false}, + // The following tests cover all of the type param cases of unify. + {"F1", "F2", "", true}, // F1[*int] = F2[int] + {"F3", "F4", "", false}, // would require U identical to *U, prevented by occur check + {"F5", "F6", "", true}, // one param is bound, the other is not + {"F6", "F7", "", false}, // both are bound + {"F5", "F8", "", true}, // T=*int, U=int, V=int + {"F5", "F9", "", false}, // T is unbound, V is bound, and T occurs in V } { lookup := func(name string) types.Type { obj := scope.Lookup(name) @@ -164,20 +175,30 @@ func E3(int8) uint32 return obj.Type() } - a := lookup(test.a) - b := lookup(test.b) + check := func(sa, sb string, want bool) { + t.Helper() - afp, _ := fingerprint(a) - bfp, _ := fingerprint(b) + a := lookup(sa) + b := lookup(sb) - atree := parseFingerprint(afp) - btree := parseFingerprint(bfp) + afp, _ := fingerprint.Encode(a) + bfp, _ := fingerprint.Encode(b) - got := unify(atree, btree) - if got != test.want { - t.Errorf("a=%s b=%s method=%s: unify returned %t for these inputs:\n- %s\n- %s", - test.a, test.b, test.method, - got, sexprString(atree), sexprString(btree)) + atree := fingerprint.Parse(afp) + btree := fingerprint.Parse(bfp) + + got := fingerprint.Matches(atree, btree) + if got != want { + t.Errorf("a=%s b=%s method=%s: unify returned %t for these inputs:\n- %s\n- %s", + sa, sb, test.method, got, a, b) + } } + + check(test.a, test.b, test.want) + // Matches is symmetric + check(test.b, test.a, test.want) + // Matches is reflexive + check(test.a, test.a, true) + check(test.b, test.b, true) } } diff --git a/gopls/internal/util/moreiters/iters.go b/gopls/internal/util/moreiters/iters.go index d41cb1d3bca..69c76ccb9b6 100644 --- a/gopls/internal/util/moreiters/iters.go +++ b/gopls/internal/util/moreiters/iters.go @@ -24,3 +24,24 @@ func Contains[T comparable](seq iter.Seq[T], x T) bool { } return false } + +// Every reports whether every pred(t) for t in seq returns true, +// stopping at the first false element. +func Every[T any](seq iter.Seq[T], pred func(T) bool) bool { + for t := range seq { + if !pred(t) { + return false + } + } + return true +} + +// Any reports whether any pred(t) for t in seq returns true. +func Any[T any](seq iter.Seq[T], pred func(T) bool) bool { + for t := range seq { + if pred(t) { + return true + } + } + return false +} diff --git a/gopls/internal/util/persistent/map.go b/gopls/internal/util/persistent/map.go index 193f98791d8..d97a9494c41 100644 --- a/gopls/internal/util/persistent/map.go +++ b/gopls/internal/util/persistent/map.go @@ -203,6 +203,8 @@ func (pm *Map[K, V]) SetAll(other *Map[K, V]) { // Set updates the value associated with the specified key. // If release is non-nil, it will be called with entry's key and value once the // key is no longer contained in the map or any clone. +// +// TODO(adonovan): fix release, which has the wrong type. func (pm *Map[K, V]) Set(key K, value V, release func(key, value any)) { first := pm.root second := newNodeWithRef(key, value, release) diff --git a/gopls/internal/util/persistent/race_test.go b/gopls/internal/util/persistent/race_test.go new file mode 100644 index 00000000000..827791a78dc --- /dev/null +++ b/gopls/internal/util/persistent/race_test.go @@ -0,0 +1,66 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build race + +package persistent + +import ( + "context" + "maps" + "testing" + "time" + + "golang.org/x/sync/errgroup" +) + +// TestConcurrency exercises concurrent map access. +// It doesn't assert anything, but it runs under the race detector. +func TestConcurrency(t *testing.T) { + ctx, cancel := context.WithTimeout(t.Context(), 5*time.Second) + defer cancel() + var orig Map[int, int] // maps subset of [0-10] to itself (values aren't interesting) + for i := range 10 { + orig.Set(i, i, func(k, v any) { /* just for good measure*/ }) + } + g, ctx := errgroup.WithContext(ctx) + const N = 10 // concurrency level + g.SetLimit(N) + for range N { + g.Go(func() error { + // Each thread has its own clone of the original, + // sharing internal structures. Each map is accessed + // only by a single thread; the shared data is immutable. + m := orig.Clone() + + // Run until the timeout. + for ctx.Err() == nil { + for i := range 1000 { + key := i % 10 + + switch { + case i%2 == 0: + _, _ = m.Get(key) + case i%11 == 0: + m.Set(key, key, func(key, value any) {}) + case i%13 == 0: + _ = maps.Collect(m.All()) + case i%17 == 0: + _ = m.Delete(key) + case i%19 == 0: + _ = m.Keys() + case i%31 == 0: + _ = m.String() + case i%23 == 0: + _ = m.Clone() + } + // Don't call m.Clear(), as it would + // disentangle the various maps from each other. + } + } + return nil + }) + } + g.Wait() // no errors +} diff --git a/internal/analysisinternal/analysis.go b/internal/analysisinternal/analysis.go index 5eb7ac5a939..b22e314cf45 100644 --- a/internal/analysisinternal/analysis.go +++ b/internal/analysisinternal/analysis.go @@ -15,11 +15,14 @@ import ( "go/scanner" "go/token" "go/types" + "iter" pathpkg "path" "slices" "strings" "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/astutil/cursor" "golang.org/x/tools/internal/typesinternal" ) @@ -217,7 +220,7 @@ func CheckReadable(pass *analysis.Pass, filename string) error { // to form a qualified name, and the edit for the new import. // // In the special case that pkgpath is dot-imported then member, the -// identifer for which the import is being added, is consulted. If +// identifier for which the import is being added, is consulted. If // member is not shadowed at pos, AddImport returns (".", "", nil). // (AddImport accepts the caller's implicit claim that the imported // package declares member.) @@ -249,13 +252,7 @@ func AddImport(info *types.Info, file *ast.File, preferredName, pkgpath, member // We must add a new import. // Ensure we have a fresh name. - newName := preferredName - for i := 0; ; i++ { - if _, obj := scope.LookupParent(newName, pos); obj == nil { - break // fresh - } - newName = fmt.Sprintf("%s%d", preferredName, i) - } + newName := FreshName(scope, pos, preferredName) // Create a new import declaration either before the first existing // declaration (which must exist), including its comments; or @@ -295,6 +292,19 @@ func AddImport(info *types.Info, file *ast.File, preferredName, pkgpath, member }} } +// FreshName returns the name of an identifier that is undefined +// at the specified position, based on the preferred name. +func FreshName(scope *types.Scope, pos token.Pos, preferred string) string { + newName := preferred + for i := 0; ; i++ { + if _, obj := scope.LookupParent(newName, pos); obj == nil { + break // fresh + } + newName = fmt.Sprintf("%s%d", preferred, i) + } + return newName +} + // Format returns a string representation of the expression e. func Format(fset *token.FileSet, e ast.Expr) string { var buf strings.Builder @@ -487,3 +497,143 @@ func CanImport(from, to string) bool { } return true } + +// DeleteStmt returns the edits to remove stmt if it is contained +// in a BlockStmt, CaseClause, CommClause, or is the STMT in switch STMT; ... {...} +// The report function abstracts gopls' bug.Report. +func DeleteStmt(fset *token.FileSet, astFile *ast.File, stmt ast.Stmt, report func(string, ...any)) []analysis.TextEdit { + // TODO: pass in the cursor to a ast.Stmt. callers should provide the Cursor + insp := inspector.New([]*ast.File{astFile}) + root := cursor.Root(insp) + cstmt, ok := root.FindNode(stmt) + if !ok { + report("%s not found in file", stmt.Pos()) + return nil + } + // some paranoia + if !stmt.Pos().IsValid() || !stmt.End().IsValid() { + report("%s: stmt has invalid position", stmt.Pos()) + return nil + } + + // if the stmt is on a line by itself delete the whole line + // otherwise just delete the statement. + + // this logic would be a lot simpler with the file contents, and somewhat simpler + // if the cursors included the comments. + + tokFile := fset.File(stmt.Pos()) + lineOf := tokFile.Line + stmtStartLine, stmtEndLine := lineOf(stmt.Pos()), lineOf(stmt.End()) + + var from, to token.Pos + // bounds of adjacent syntax/comments on same line, if any + limits := func(left, right token.Pos) { + if lineOf(left) == stmtStartLine { + from = left + } + if lineOf(right) == stmtEndLine { + to = right + } + } + // TODO(pjw): there are other places a statement might be removed: + // IfStmt = "if" [ SimpleStmt ";" ] Expression Block [ "else" ( IfStmt | Block ) ] . + // (removing the blocks requires more rewriting than this routine would do) + // CommCase = "case" ( SendStmt | RecvStmt ) | "default" . + // (removing the stmt requires more rewriting, and it's unclear what the user means) + switch parent := cstmt.Parent().Node().(type) { + case *ast.SwitchStmt: + limits(parent.Switch, parent.Body.Lbrace) + case *ast.TypeSwitchStmt: + limits(parent.Switch, parent.Body.Lbrace) + if parent.Assign == stmt { + return nil // don't let the user break the type switch + } + case *ast.BlockStmt: + limits(parent.Lbrace, parent.Rbrace) + case *ast.CommClause: + limits(parent.Colon, cstmt.Parent().Parent().Node().(*ast.BlockStmt).Rbrace) + if parent.Comm == stmt { + return nil // maybe the user meant to remove the entire CommClause? + } + case *ast.CaseClause: + limits(parent.Colon, cstmt.Parent().Parent().Node().(*ast.BlockStmt).Rbrace) + case *ast.ForStmt: + limits(parent.For, parent.Body.Lbrace) + + default: + return nil // not one of ours + } + + if prev, found := cstmt.PrevSibling(); found && lineOf(prev.Node().End()) == stmtStartLine { + from = prev.Node().End() // preceding statement ends on same line + } + if next, found := cstmt.NextSibling(); found && lineOf(next.Node().Pos()) == stmtEndLine { + to = next.Node().Pos() // following statement begins on same line + } + // and now for the comments +Outer: + for _, cg := range astFile.Comments { + for _, co := range cg.List { + if lineOf(co.End()) < stmtStartLine { + continue + } else if lineOf(co.Pos()) > stmtEndLine { + break Outer // no more are possible + } + if lineOf(co.End()) == stmtStartLine && co.End() < stmt.Pos() { + if !from.IsValid() || co.End() > from { + from = co.End() + continue // maybe there are more + } + } + if lineOf(co.Pos()) == stmtEndLine && co.Pos() > stmt.End() { + if !to.IsValid() || co.Pos() < to { + to = co.Pos() + continue // maybe there are more + } + } + } + } + // if either from or to is valid, just remove the statement + // otherwise remove the line + edit := analysis.TextEdit{Pos: stmt.Pos(), End: stmt.End()} + if from.IsValid() || to.IsValid() { + // remove just the statment. + // we can't tell if there is a ; or whitespace right after the statment + // ideally we'd like to remove the former and leave the latter + // (if gofmt has run, there likely won't be a ;) + // In type switches we know there's a semicolon somewhere after the statement, + // but the extra work for this special case is not worth it, as gofmt will fix it. + return []analysis.TextEdit{edit} + } + // remove the whole line + for lineOf(edit.Pos) == stmtStartLine { + edit.Pos-- + } + edit.Pos++ // get back tostmtStartLine + for lineOf(edit.End) == stmtEndLine { + edit.End++ + } + return []analysis.TextEdit{edit} +} + +// Comments returns an iterator over the comments overlapping the specified interval. +func Comments(file *ast.File, start, end token.Pos) iter.Seq[*ast.Comment] { + // TODO(adonovan): optimize use binary O(log n) instead of linear O(n) search. + return func(yield func(*ast.Comment) bool) { + for _, cg := range file.Comments { + for _, co := range cg.List { + if co.Pos() > end { + return + } + if co.End() < start { + continue + } + + if !yield(co) { + return + } + } + } + } +} diff --git a/internal/analysisinternal/analysis_test.go b/internal/analysisinternal/analysis_test.go index 0b21876d386..e3c760aff5a 100644 --- a/internal/analysisinternal/analysis_test.go +++ b/internal/analysisinternal/analysis_test.go @@ -4,7 +4,16 @@ package analysisinternal -import "testing" +import ( + "go/ast" + "go/parser" + "go/token" + "slices" + "testing" + + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/astutil/cursor" +) func TestCanImport(t *testing.T) { for _, tt := range []struct { @@ -32,3 +41,260 @@ func TestCanImport(t *testing.T) { } } } + +func TestDeleteStmt(t *testing.T) { + type testCase struct { + in string + which int // count of ast.Stmt in ast.Inspect traversal to remove + want string + name string // should contain exactly one of [block,switch,case,comm,for,type] + } + tests := []testCase{ + { // do nothing when asked to remove a function body + in: "package p; func f() { }", + which: 0, + want: "package p; func f() { }", + name: "block0", + }, + { + in: "package p; func f() { abcd()}", + which: 1, + want: "package p; func f() { }", + name: "block1", + }, + { + in: "package p; func f() { a() }", + which: 1, + want: "package p; func f() { }", + name: "block2", + }, + { + in: "package p; func f() { a();}", + which: 1, + want: "package p; func f() { ;}", + name: "block3", + }, + { + in: "package p; func f() {\n a() \n\n}", + which: 1, + want: "package p; func f() {\n\n}", + name: "block4", + }, + { + in: "package p; func f() { a()// comment\n}", + which: 1, + want: "package p; func f() { // comment\n}", + name: "block5", + }, + { + in: "package p; func f() { /*c*/a() \n}", + which: 1, + want: "package p; func f() { /*c*/ \n}", + name: "block6", + }, + { + in: "package p; func f() { a();b();}", + which: 2, + want: "package p; func f() { a();;}", + name: "block7", + }, + { + in: "package p; func f() {\n\ta()\n\tb()\n}", + which: 2, + want: "package p; func f() {\n\ta()\n}", + name: "block8", + }, + { + in: "package p; func f() {\n\ta()\n\tb()\n\tc()\n}", + which: 2, + want: "package p; func f() {\n\ta()\n\tc()\n}", + name: "block9", + }, + { + in: "package p\nfunc f() {a()+b()}", + which: 1, + want: "package p\nfunc f() {}", + name: "block10", + }, + { + in: "package p\nfunc f() {(a()+b())}", + which: 1, + want: "package p\nfunc f() {}", + name: "block11", + }, + { + in: "package p; func f() { switch a(); b() {}}", + which: 2, // 0 is the func body, 1 is the switch statement + want: "package p; func f() { switch ; b() {}}", + name: "switch0", + }, + { + in: "package p; func f() { switch /*c*/a(); {}}", + which: 2, // 0 is the func body, 1 is the switch statement + want: "package p; func f() { switch /*c*/; {}}", + name: "switch1", + }, + { + in: "package p; func f() { switch a()/*c*/; {}}", + which: 2, // 0 is the func body, 1 is the switch statement + want: "package p; func f() { switch /*c*/; {}}", + name: "switch2", + }, + { + in: "package p; func f() { select {default: a()}}", + which: 4, // 0 is the func body, 1 is the select statement, 2 is its body, 3 is the comm clause + want: "package p; func f() { select {default: }}", + name: "comm0", + }, + { + in: "package p; func f(x chan any) { select {case x <- a: a(x)}}", + which: 5, // 0 is the func body, 1 is the select statement, 2 is its body, 3 is the comm clause + want: "package p; func f(x chan any) { select {case x <- a: }}", + name: "comm1", + }, + { + in: "package p; func f(x chan any) { select {case x <- a: a(x)}}", + which: 4, // 0 is the func body, 1 is the select statement, 2 is its body, 3 is the comm clause + want: "package p; func f(x chan any) { select {case x <- a: a(x)}}", + name: "comm2", + }, + { + in: "package p; func f() { switch {default: a()}}", + which: 4, // 0 is the func body, 1 is the select statement, 2 is its body + want: "package p; func f() { switch {default: }}", + name: "case0", + }, + { + in: "package p; func f() { switch {case 3: a()}}", + which: 4, // 0 is the func body, 1 is the select statement, 2 is its body + want: "package p; func f() { switch {case 3: }}", + name: "case1", + }, + { + in: "package p; func f() {for a();;b() {}}", + which: 2, + want: "package p; func f() {for ;;b() {}}", + name: "for0", + }, + { + in: "package p; func f() {for a();c();b() {}}", + which: 3, + want: "package p; func f() {for a();c(); {}}", + name: "for1", + }, + { + in: "package p; func f() {for\na();c()\nb() {}}", + which: 2, + want: "package p; func f() {for\n;c()\nb() {}}", + name: "for2", + }, + { + in: "package p; func f() {for a();\nc();b() {}}", + which: 3, + want: "package p; func f() {for a();\nc(); {}}", + name: "for3", + }, + { + in: "package p; func f() {switch a();b().(type){}}", + which: 2, + want: "package p; func f() {switch ;b().(type){}}", + name: "type0", + }, + { + in: "package p; func f() {switch a();b().(type){}}", + which: 3, + want: "package p; func f() {switch a();b().(type){}}", + name: "type1", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + fset := token.NewFileSet() + f, err := parser.ParseFile(fset, tt.name, tt.in, parser.ParseComments) + if err != nil { + t.Fatalf("%s: %v", tt.name, err) + } + insp := inspector.New([]*ast.File{f}) + root := cursor.Root(insp) + var stmt cursor.Cursor + cnt := 0 + for cn := range root.Preorder() { // Preorder(ast.Stmt(nil)) doesn't work + if _, ok := cn.Node().(ast.Stmt); !ok { + continue + } + if cnt == tt.which { + stmt = cn + break + } + cnt++ + } + if cnt != tt.which { + t.Fatalf("test %s does not contain desired statement %d", tt.name, tt.which) + } + edits := DeleteStmt(fset, f, stmt.Node().(ast.Stmt), nil) + if tt.want == tt.in { + if len(edits) != 0 { + t.Fatalf("%s: got %d edits, expected 0", tt.name, len(edits)) + } + return + } + if len(edits) != 1 { + t.Fatalf("%s: got %d edits, expected 1", tt.name, len(edits)) + } + tokFile := fset.File(f.Pos()) + + left := tokFile.Offset(edits[0].Pos) + right := tokFile.Offset(edits[0].End) + + got := tt.in[:left] + tt.in[right:] + if got != tt.want { + t.Errorf("%s: got\n%q, want\n%q", tt.name, got, tt.want) + } + }) + + } +} + +func TestComments(t *testing.T) { + src := ` +package main + +// A +func fn() { }` + var fset token.FileSet + f, err := parser.ParseFile(&fset, "", []byte(src), parser.ParseComments|parser.AllErrors) + if err != nil { + t.Fatal(err) + } + + commentA := f.Comments[0].List[0] + commentAMidPos := (commentA.Pos() + commentA.End()) / 2 + + want := []*ast.Comment{commentA} + testCases := []struct { + name string + start, end token.Pos + want []*ast.Comment + }{ + {name: "comment totally overlaps with given interval", start: f.Pos(), end: f.End(), want: want}, + {name: "interval from file start to mid of comment A", start: f.Pos(), end: commentAMidPos, want: want}, + {name: "interval from mid of comment A to file end", start: commentAMidPos, end: commentA.End(), want: want}, + {name: "interval from start of comment A to mid of comment A", start: commentA.Pos(), end: commentAMidPos, want: want}, + {name: "interval from mid of comment A to comment A end", start: commentAMidPos, end: commentA.End(), want: want}, + {name: "interval at the start of comment A", start: commentA.Pos(), end: commentA.Pos(), want: want}, + {name: "interval at the end of comment A", start: commentA.End(), end: commentA.End(), want: want}, + {name: "interval from file start to the front of comment A start", start: f.Pos(), end: commentA.Pos() - 1, want: nil}, + {name: "interval from the position after end of comment A to file end", start: commentA.End() + 1, end: f.End(), want: nil}, + } + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var got []*ast.Comment + for co := range Comments(f, tc.start, tc.end) { + got = append(got, co) + } + if !slices.Equal(got, tc.want) { + t.Errorf("%s: got %v, want %v", tc.name, got, tc.want) + } + }) + } +} diff --git a/internal/analysisinternal/typeindex/typeindex.go b/internal/analysisinternal/typeindex/typeindex.go new file mode 100644 index 00000000000..bba21c6ea01 --- /dev/null +++ b/internal/analysisinternal/typeindex/typeindex.go @@ -0,0 +1,33 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package typeindex defines an analyzer that provides a +// [golang.org/x/tools/internal/typesinternal/typeindex.Index]. +// +// Like [golang.org/x/tools/go/analysis/passes/inspect], it is +// intended to be used as a helper by other analyzers; it reports no +// diagnostics of its own. +package typeindex + +import ( + "reflect" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/typesinternal/typeindex" +) + +var Analyzer = &analysis.Analyzer{ + Name: "typeindex", + Doc: "indexes of type information for later passes", + URL: "https://pkg.go.dev/golang.org/x/tools/internal/analysisinternal/typeindex", + Run: func(pass *analysis.Pass) (any, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + return typeindex.New(inspect, pass.Pkg, pass.TypesInfo), nil + }, + RunDespiteErrors: true, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + ResultType: reflect.TypeOf(new(typeindex.Index)), +} diff --git a/internal/astutil/cursor/cursor.go b/internal/astutil/cursor/cursor.go index 83a47e09058..3f015998c52 100644 --- a/internal/astutil/cursor/cursor.go +++ b/internal/astutil/cursor/cursor.go @@ -44,6 +44,37 @@ func Root(in *inspector.Inspector) Cursor { return Cursor{in, -1} } +// At returns the cursor at the specified index in the traversal, +// which must have been obtained from [Cursor.Index] on a Cursor +// belonging to the same Inspector. +func At(in *inspector.Inspector, index int32) Cursor { + if index < 0 { + panic("negative index") + } + events := events(in) + if int(index) >= len(events) { + panic("index out of range for this inspector") + } + if events[index].index < index { + panic("invalid index") // (a push, not a pop) + } + return Cursor{in, index} +} + +// Index returns the index of this cursor position within the package. +// +// Clients should not assume anything about the numeric Index value +// except that it increases monotonically throughout the traversal. +// It is provided for use with [At]. +// +// Index must not be called on the Root node. +func (c Cursor) Index() int32 { + if c.index < 0 { + panic("Index called on Root node") + } + return c.index +} + // Node returns the node at the current cursor position, // or nil for the cursor returned by [Inspector.Root]. func (c Cursor) Node() ast.Node { @@ -167,30 +198,29 @@ func (c Cursor) Stack(stack []Cursor) []Cursor { panic("Cursor.Stack called on Root node") } - stack = append(stack, c) - stack = slices.AppendSeq(stack, c.Ancestors()) + stack = slices.AppendSeq(stack, c.Enclosing()) slices.Reverse(stack) return stack } -// Ancestors returns an iterator over the ancestors of the current -// node, starting with [Cursor.Parent]. +// Enclosing returns an iterator over the nodes enclosing the current +// current node, starting with the Cursor itself. // -// Ancestors must not be called on the Root node (whose [Cursor.Node] returns nil). +// Enclosing must not be called on the Root node (whose [Cursor.Node] returns nil). // // The types argument, if non-empty, enables type-based filtering of -// events: the sequence includes only ancestors whose type matches an -// element of the types slice. -func (c Cursor) Ancestors(types ...ast.Node) iter.Seq[Cursor] { +// events: the sequence includes only enclosing nodes whose type +// matches an element of the types slice. +func (c Cursor) Enclosing(types ...ast.Node) iter.Seq[Cursor] { if c.index < 0 { - panic("Cursor.Ancestors called on Root node") + panic("Cursor.Enclosing called on Root node") } mask := maskOf(types) return func(yield func(Cursor) bool) { events := c.events() - for i := events[c.index].parent; i >= 0; i = events[i].parent { + for i := c.index; i >= 0; i = events[i].parent { if events[i].typ&mask != 0 && !yield(Cursor{c.in, i}) { break } @@ -209,25 +239,52 @@ func (c Cursor) Parent() Cursor { return Cursor{c.in, c.events()[c.index].parent} } -// Edge returns the identity of the field in the parent node +// ParentEdge returns the identity of the field in the parent node // that holds this cursor's node, and if it is a list, the index within it. // // For example, f(x, y) is a CallExpr whose three children are Idents. // f has edge kind [edge.CallExpr_Fun] and index -1. // x and y have kind [edge.CallExpr_Args] and indices 0 and 1, respectively. // -// Edge must not be called on the Root node (whose [Cursor.Node] returns nil). -// // If called on a child of the Root node, it returns ([edge.Invalid], -1). -func (c Cursor) Edge() (edge.Kind, int) { +// +// ParentEdge must not be called on the Root node (whose [Cursor.Node] returns nil). +func (c Cursor) ParentEdge() (edge.Kind, int) { if c.index < 0 { - panic("Cursor.Edge called on Root node") + panic("Cursor.ParentEdge called on Root node") } events := c.events() pop := events[c.index].index return unpackEdgeKindAndIndex(events[pop].parent) } +// ChildAt returns the cursor for the child of the +// current node identified by its edge and index. +// The index must be -1 if the edge.Kind is not a slice. +// The indicated child node must exist. +// +// ChildAt must not be called on the Root node (whose [Cursor.Node] returns nil). +// +// Invariant: c.Parent().ChildAt(c.ParentEdge()) == c. +func (c Cursor) ChildAt(k edge.Kind, idx int) Cursor { + target := packEdgeKindAndIndex(k, idx) + + // Unfortunately there's no shortcut to looping. + events := c.events() + i := c.index + 1 + for { + pop := events[i].index + if pop < i { + break + } + if events[pop].parent == target { + return Cursor{c.in, i} + } + i = pop + 1 + } + panic(fmt.Sprintf("ChildAt(%v, %d): no such child of %v", k, idx, c)) +} + // Child returns the cursor for n, which must be a direct child of c's Node. // // Child must not be called on the Root node (whose [Cursor.Node] returns nil). @@ -355,7 +412,7 @@ func (c Cursor) LastChild() (Cursor, bool) { // So, do not assume that the previous sibling of an ast.Stmt is also // an ast.Stmt, or if it is, that they are executed sequentially, // unless you have established that, say, its parent is a BlockStmt -// or its [Cursor.Edge] is [edge.BlockStmt_List]. +// or its [Cursor.ParentEdge] is [edge.BlockStmt_List]. // For example, given "for S1; ; S2 {}", the predecessor of S2 is S1, // even though they are not executed in sequence. func (c Cursor) Children() iter.Seq[Cursor] { @@ -367,6 +424,18 @@ func (c Cursor) Children() iter.Seq[Cursor] { } } +// Contains reports whether c contains or is equal to c2. +// +// Both Cursors must belong to the same [Inspector]; +// neither may be its Root node. +func (c Cursor) Contains(c2 Cursor) bool { + if c.in != c2.in { + panic("different inspectors") + } + events := c.events() + return c.index <= c2.index && events[c2.index].index <= events[c.index].index +} + // FindNode returns the cursor for node n if it belongs to the subtree // rooted at c. It returns zero if n is not found. func (c Cursor) FindNode(n ast.Node) (Cursor, bool) { diff --git a/internal/astutil/cursor/cursor_test.go b/internal/astutil/cursor/cursor_test.go index 67e91544c4d..9effae912a3 100644 --- a/internal/astutil/cursor/cursor_test.go +++ b/internal/astutil/cursor/cursor_test.go @@ -131,9 +131,8 @@ func g() { _ = curFunc.Node().(*ast.FuncDecl) // Check edge and index. - if e, idx := curFunc.Edge(); e != edge.File_Decls || idx != nfuncs { - t.Errorf("%v.Edge() = (%v, %v), want edge.File_Decls, %d", - curFunc, e, idx, nfuncs) + if k, idx := curFunc.ParentEdge(); k != edge.File_Decls || idx != nfuncs { + t.Errorf("%v.ParentEdge() = (%v, %d), want edge.File_Decls, %d", curFunc, k, idx, nfuncs) } nfuncs++ @@ -163,11 +162,10 @@ func g() { t.Errorf("curCall.Stack() = %q, want %q", got, want) } - // Ancestors = Reverse(Stack[:last]). - stack = stack[:len(stack)-1] + // Enclosing = Reverse(Stack()). slices.Reverse(stack) - if got, want := slices.Collect(curCall.Ancestors()), stack; !reflect.DeepEqual(got, want) { - t.Errorf("Ancestors = %v, Reverse(Stack - last element) = %v", got, want) + if got, want := slices.Collect(curCall.Enclosing()), stack; !reflect.DeepEqual(got, want) { + t.Errorf("Enclosing = %v, Reverse(Stack - last element) = %v", got, want) } } @@ -367,8 +365,10 @@ func TestCursor_Edge(t *testing.T) { continue // root node } - e, idx := cur.Edge() - parent := cur.Parent() + var ( + parent = cur.Parent() + e, idx = cur.ParentEdge() + ) // ast.File, child of root? if parent.Node() == nil { @@ -384,12 +384,18 @@ func TestCursor_Edge(t *testing.T) { e.NodeType(), parent.Node()) } - // Check consistency of c.Edge.Get(c.Parent().Node()) == c.Node(). + // Check c.Edge.Get(c.Parent.Node) == c.Node. if got := e.Get(parent.Node(), idx); got != cur.Node() { t.Errorf("cur=%v@%s: %s.Get(cur.Parent().Node(), %d) = %T@%s, want cur.Node()", cur, netFset.Position(cur.Node().Pos()), e, idx, got, netFset.Position(got.Pos())) } + // Check c.Parent.ChildAt(c.ParentEdge()) == c. + if got := parent.ChildAt(e, idx); got != cur { + t.Errorf("cur=%v@%s: cur.Parent().ChildAt(%v, %d) = %T@%s, want cur", + cur, netFset.Position(cur.Node().Pos()), e, idx, got.Node(), netFset.Position(got.Node().Pos())) + } + // Check that reflection on the parent finds the current node. fv := reflect.ValueOf(parent.Node()).Elem().FieldByName(e.FieldName()) if idx >= 0 { @@ -408,6 +414,38 @@ func TestCursor_Edge(t *testing.T) { if cur.Parent().Child(cur.Node()) != cur { t.Errorf("Cursor.Parent.Child = %v, want %v", cur.Parent().Child(cur.Node()), cur) } + + // Check invariants of Contains: + + // A cursor contains itself. + if !cur.Contains(cur) { + t.Errorf("!cur.Contains(cur): %v", cur) + } + // A parent contains its child, but not the inverse. + if !parent.Contains(cur) { + t.Errorf("!cur.Parent().Contains(cur): %v", cur) + } + if cur.Contains(parent) { + t.Errorf("cur.Contains(cur.Parent()): %v", cur) + } + // A grandparent contains its grandchild, but not the inverse. + if grandparent := cur.Parent(); grandparent.Node() != nil { + if !grandparent.Contains(cur) { + t.Errorf("!cur.Parent().Parent().Contains(cur): %v", cur) + } + if cur.Contains(grandparent) { + t.Errorf("cur.Contains(cur.Parent().Parent()): %v", cur) + } + } + // A cursor and its uncle/aunt do not contain each other. + if uncle, ok := parent.NextSibling(); ok { + if uncle.Contains(cur) { + t.Errorf("cur.Parent().NextSibling().Contains(cur): %v", cur) + } + if cur.Contains(uncle) { + t.Errorf("cur.Contains(cur.Parent().NextSibling()): %v", cur) + } + } } } @@ -503,12 +541,12 @@ func BenchmarkInspectCalls(b *testing.B) { } }) - b.Run("CursorAncestors", func(b *testing.B) { + b.Run("CursorEnclosing", func(b *testing.B) { var ncalls int for range b.N { for cur := range cursor.Root(inspect).Preorder(callExprs...) { _ = cur.Node().(*ast.CallExpr) - for range cur.Ancestors() { + for range cur.Enclosing() { } ncalls++ } diff --git a/internal/astutil/cursor/hooks.go b/internal/astutil/cursor/hooks.go index 8b61f5ddc11..0257d61d778 100644 --- a/internal/astutil/cursor/hooks.go +++ b/internal/astutil/cursor/hooks.go @@ -31,6 +31,9 @@ func maskOf(nodes []ast.Node) uint64 //go:linkname events golang.org/x/tools/go/ast/inspector.events func events(in *inspector.Inspector) []event +//go:linkname packEdgeKindAndIndex golang.org/x/tools/go/ast/inspector.packEdgeKindAndIndex +func packEdgeKindAndIndex(edge.Kind, int) int32 + //go:linkname unpackEdgeKindAndIndex golang.org/x/tools/go/ast/inspector.unpackEdgeKindAndIndex func unpackEdgeKindAndIndex(int32) (edge.Kind, int) diff --git a/internal/bisect/bisect.go b/internal/bisect/bisect.go index 5a7da4871a8..7b1d112a7cd 100644 --- a/internal/bisect/bisect.go +++ b/internal/bisect/bisect.go @@ -320,7 +320,7 @@ func AppendMarker(dst []byte, id uint64) []byte { const prefix = "[bisect-match 0x" var buf [len(prefix) + 16 + 1]byte copy(buf[:], prefix) - for i := 0; i < 16; i++ { + for i := range 16 { buf[len(prefix)+i] = "0123456789abcdef"[id>>60] id <<= 4 } @@ -504,7 +504,7 @@ func fnvString(h uint64, x string) uint64 { } func fnvUint64(h uint64, x uint64) uint64 { - for i := 0; i < 8; i++ { + for range 8 { h ^= uint64(x & 0xFF) x >>= 8 h *= prime64 @@ -513,7 +513,7 @@ func fnvUint64(h uint64, x uint64) uint64 { } func fnvUint32(h uint64, x uint32) uint64 { - for i := 0; i < 4; i++ { + for range 4 { h ^= uint64(x & 0xFF) x >>= 8 h *= prime64 diff --git a/internal/diff/diff.go b/internal/diff/diff.go index a13547b7a7e..c12bdfd2acd 100644 --- a/internal/diff/diff.go +++ b/internal/diff/diff.go @@ -7,6 +7,7 @@ package diff import ( "fmt" + "slices" "sort" "strings" ) @@ -64,7 +65,7 @@ func ApplyBytes(src []byte, edits []Edit) ([]byte, error) { // It may return a different slice. func validate(src string, edits []Edit) ([]Edit, int, error) { if !sort.IsSorted(editsSort(edits)) { - edits = append([]Edit(nil), edits...) + edits = slices.Clone(edits) SortEdits(edits) } diff --git a/internal/diff/diff_test.go b/internal/diff/diff_test.go index 77a20baf272..9e2a1d23997 100644 --- a/internal/diff/diff_test.go +++ b/internal/diff/diff_test.go @@ -61,7 +61,7 @@ func TestNEdits(t *testing.T) { func TestNRandom(t *testing.T) { rand.Seed(1) - for i := 0; i < 1000; i++ { + for i := range 1000 { a := randstr("abω", 16) b := randstr("abωc", 16) edits := diff.Strings(a, b) @@ -200,7 +200,7 @@ func TestRegressionOld002(t *testing.T) { func randstr(s string, n int) string { src := []rune(s) x := make([]rune, n) - for i := 0; i < n; i++ { + for i := range n { x[i] = src[rand.Intn(len(src))] } return string(x) diff --git a/internal/diff/lcs/common_test.go b/internal/diff/lcs/common_test.go index f19245e404c..68f4485fdb8 100644 --- a/internal/diff/lcs/common_test.go +++ b/internal/diff/lcs/common_test.go @@ -7,6 +7,7 @@ package lcs import ( "log" "math/rand" + "slices" "strings" "testing" ) @@ -72,10 +73,8 @@ func check(t *testing.T, str string, lcs lcs, want []string) { got.WriteString(str[dd.X : dd.X+dd.Len]) } ans := got.String() - for _, w := range want { - if ans == w { - return - } + if slices.Contains(want, ans) { + return } t.Fatalf("str=%q lcs=%v want=%q got=%q", str, lcs, want, ans) } @@ -109,7 +108,7 @@ func lcslen(l lcs) int { func randstr(s string, n int) string { src := []rune(s) x := make([]rune, n) - for i := 0; i < n; i++ { + for i := range n { x[i] = src[rand.Intn(len(src))] } return string(x) diff --git a/internal/diff/lcs/old.go b/internal/diff/lcs/old.go index 7c74b47bb1c..c0d43a6c2c7 100644 --- a/internal/diff/lcs/old.go +++ b/internal/diff/lcs/old.go @@ -377,10 +377,7 @@ func (e *editGraph) twoDone(df, db int) (int, bool) { if (df+db+e.delta)%2 != 0 { return 0, false // diagonals cannot overlap } - kmin := -db + e.delta - if -df > kmin { - kmin = -df - } + kmin := max(-df, -db+e.delta) kmax := db + e.delta if df < kmax { kmax = df diff --git a/internal/diff/lcs/old_test.go b/internal/diff/lcs/old_test.go index ddc3bde0ed2..2eac1af6d2f 100644 --- a/internal/diff/lcs/old_test.go +++ b/internal/diff/lcs/old_test.go @@ -107,7 +107,7 @@ func TestRegressionOld003(t *testing.T) { func TestRandOld(t *testing.T) { rand.Seed(1) - for i := 0; i < 1000; i++ { + for i := range 1000 { // TODO(adonovan): use ASCII and bytesSeqs here? The use of // non-ASCII isn't relevant to the property exercised by the test. a := []rune(randstr("abω", 16)) @@ -186,7 +186,7 @@ func genBench(set string, n int) []struct{ before, after string } { // before and after differing at least once, and about 5% rand.Seed(3) var ans []struct{ before, after string } - for i := 0; i < 24; i++ { + for range 24 { // maybe b should have an approximately known number of diffs a := randstr(set, n) cnt := 0 diff --git a/internal/diff/ndiff.go b/internal/diff/ndiff.go index fbef4d730c5..a2eef26ac77 100644 --- a/internal/diff/ndiff.go +++ b/internal/diff/ndiff.go @@ -72,7 +72,7 @@ func diffRunes(before, after []rune) []Edit { func runes(bytes []byte) []rune { n := utf8.RuneCount(bytes) runes := make([]rune, n) - for i := 0; i < n; i++ { + for i := range n { r, sz := utf8.DecodeRune(bytes) bytes = bytes[sz:] runes[i] = r diff --git a/internal/diffp/diff.go b/internal/diffp/diff.go index aa5ef81ac2e..54ab0888482 100644 --- a/internal/diffp/diff.go +++ b/internal/diffp/diff.go @@ -119,10 +119,7 @@ func Diff(oldName string, old []byte, newName string, new []byte) []byte { // End chunk with common lines for context. if len(ctext) > 0 { - n := end.x - start.x - if n > C { - n = C - } + n := min(end.x-start.x, C) for _, s := range x[start.x : start.x+n] { ctext = append(ctext, " "+s) count.x++ @@ -237,7 +234,7 @@ func tgs(x, y []string) []pair { for i := range T { T[i] = n + 1 } - for i := 0; i < n; i++ { + for i := range n { k := sort.Search(n, func(k int) bool { return T[k] >= J[i] }) diff --git a/internal/event/label/label.go b/internal/event/label/label.go index 7c00ca2a6da..92a39105731 100644 --- a/internal/event/label/label.go +++ b/internal/event/label/label.go @@ -8,6 +8,7 @@ import ( "fmt" "io" "reflect" + "slices" "unsafe" ) @@ -154,10 +155,8 @@ func (f *filter) Valid(index int) bool { func (f *filter) Label(index int) Label { l := f.underlying.Label(index) - for _, f := range f.keys { - if l.Key() == f { - return Label{} - } + if slices.Contains(f.keys, l.Key()) { + return Label{} } return l } diff --git a/internal/gcimporter/gcimporter_test.go b/internal/gcimporter/gcimporter_test.go index 9b38a0e1e28..9dc65fa19f6 100644 --- a/internal/gcimporter/gcimporter_test.go +++ b/internal/gcimporter/gcimporter_test.go @@ -672,7 +672,7 @@ func TestIssue15517(t *testing.T) { // file and package path are different, exposing the problem if present. // The same issue occurs with vendoring.) imports := make(map[string]*types.Package) - for i := 0; i < 3; i++ { + for range 3 { if _, err := gcimporter.Import(token.NewFileSet(), imports, "./././testdata/p", tmpdir, nil); err != nil { t.Fatal(err) } @@ -785,7 +785,7 @@ type K = StillBad[string] // Use the interface instances concurrently. for _, inst := range insts { var wg sync.WaitGroup - for i := 0; i < 2; i++ { + for range 2 { wg.Add(1) go func() { defer wg.Done() diff --git a/internal/gcimporter/iexport.go b/internal/gcimporter/iexport.go index 253d6493c21..780873e3ae7 100644 --- a/internal/gcimporter/iexport.go +++ b/internal/gcimporter/iexport.go @@ -236,6 +236,7 @@ import ( "io" "math/big" "reflect" + "slices" "sort" "strconv" "strings" @@ -271,10 +272,10 @@ import ( // file system, be sure to include a cryptographic digest of the executable in // the key to avoid version skew. // -// If the provided reportf func is non-nil, it will be used for reporting bugs -// encountered during export. -// TODO(rfindley): remove reportf when we are confident enough in the new -// objectpath encoding. +// If the provided reportf func is non-nil, it is used for reporting +// bugs (e.g. recovered panics) encountered during export, enabling us +// to obtain via telemetry the stack that would otherwise be lost by +// merely returning an error. func IExportShallow(fset *token.FileSet, pkg *types.Package, reportf ReportFunc) ([]byte, error) { // In principle this operation can only fail if out.Write fails, // but that's impossible for bytes.Buffer---and as a matter of @@ -283,7 +284,7 @@ func IExportShallow(fset *token.FileSet, pkg *types.Package, reportf ReportFunc) // TODO(adonovan): use byte slices throughout, avoiding copying. const bundle, shallow = false, true var out bytes.Buffer - err := iexportCommon(&out, fset, bundle, shallow, iexportVersion, []*types.Package{pkg}) + err := iexportCommon(&out, fset, bundle, shallow, iexportVersion, []*types.Package{pkg}, reportf) return out.Bytes(), err } @@ -323,20 +324,27 @@ const bundleVersion = 0 // so that calls to IImportData can override with a provided package path. func IExportData(out io.Writer, fset *token.FileSet, pkg *types.Package) error { const bundle, shallow = false, false - return iexportCommon(out, fset, bundle, shallow, iexportVersion, []*types.Package{pkg}) + return iexportCommon(out, fset, bundle, shallow, iexportVersion, []*types.Package{pkg}, nil) } // IExportBundle writes an indexed export bundle for pkgs to out. func IExportBundle(out io.Writer, fset *token.FileSet, pkgs []*types.Package) error { const bundle, shallow = true, false - return iexportCommon(out, fset, bundle, shallow, iexportVersion, pkgs) + return iexportCommon(out, fset, bundle, shallow, iexportVersion, pkgs, nil) } -func iexportCommon(out io.Writer, fset *token.FileSet, bundle, shallow bool, version int, pkgs []*types.Package) (err error) { +func iexportCommon(out io.Writer, fset *token.FileSet, bundle, shallow bool, version int, pkgs []*types.Package, reportf ReportFunc) (err error) { if !debug { defer func() { if e := recover(); e != nil { + // Report the stack via telemetry (see #71067). + if reportf != nil { + reportf("panic in exporter") + } if ierr, ok := e.(internalError); ok { + // internalError usually means we exported a + // bad go/types data structure: a violation + // of an implicit precondition of Export. err = ierr return } @@ -458,7 +466,7 @@ func (p *iexporter) encodeFile(w *intWriter, file *token.File, needed []uint64) w.uint64(size) // Sort the set of needed offsets. Duplicates are harmless. - sort.Slice(needed, func(i, j int) bool { return needed[i] < needed[j] }) + slices.Sort(needed) lines := file.Lines() // byte offset of each line start w.uint64(uint64(len(lines))) @@ -812,7 +820,7 @@ func (p *iexporter) doDecl(obj types.Object) { n := named.NumMethods() w.uint64(uint64(n)) - for i := 0; i < n; i++ { + for i := range n { m := named.Method(i) w.pos(m.Pos()) w.string(m.Name()) @@ -1089,7 +1097,7 @@ func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) { w.pkg(fieldPkg) w.uint64(uint64(n)) - for i := 0; i < n; i++ { + for i := range n { f := t.Field(i) if w.p.shallow { w.objectPath(f) @@ -1138,7 +1146,7 @@ func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) { w.startType(unionType) nt := t.Len() w.uint64(uint64(nt)) - for i := 0; i < nt; i++ { + for i := range nt { term := t.Term(i) w.bool(term.Tilde()) w.typ(term.Type(), pkg) @@ -1267,7 +1275,7 @@ func tparamName(exportName string) string { func (w *exportWriter) paramList(tup *types.Tuple) { n := tup.Len() w.uint64(uint64(n)) - for i := 0; i < n; i++ { + for i := range n { w.param(tup.At(i)) } } diff --git a/internal/gcimporter/iexport_test.go b/internal/gcimporter/iexport_test.go index 5707b3784a5..fa8ecd30dc1 100644 --- a/internal/gcimporter/iexport_test.go +++ b/internal/gcimporter/iexport_test.go @@ -29,7 +29,7 @@ import ( func iexport(fset *token.FileSet, version int, pkg *types.Package) ([]byte, error) { var buf bytes.Buffer const bundle, shallow = false, false - if err := gcimporter.IExportCommon(&buf, fset, bundle, shallow, version, []*types.Package{pkg}); err != nil { + if err := gcimporter.IExportCommon(&buf, fset, bundle, shallow, version, []*types.Package{pkg}, nil); err != nil { return nil, err } return buf.Bytes(), nil diff --git a/internal/gcimporter/iimport.go b/internal/gcimporter/iimport.go index bc6c9741e7d..82e6c9d2dc1 100644 --- a/internal/gcimporter/iimport.go +++ b/internal/gcimporter/iimport.go @@ -16,6 +16,7 @@ import ( "go/types" "io" "math/big" + "slices" "sort" "strings" @@ -314,7 +315,7 @@ func iimportCommon(fset *token.FileSet, getPackages GetPackagesFunc, data []byte pkgs = pkgList[:1] // record all referenced packages as imports - list := append(([]*types.Package)(nil), pkgList[1:]...) + list := slices.Clone(pkgList[1:]) sort.Sort(byPath(list)) pkgs[0].SetImports(list) } diff --git a/internal/gocommand/invoke.go b/internal/gocommand/invoke.go index 7ea9013447b..58721202de7 100644 --- a/internal/gocommand/invoke.go +++ b/internal/gocommand/invoke.go @@ -141,7 +141,7 @@ func (runner *Runner) runPiped(ctx context.Context, inv Invocation, stdout, stde // Wait for all in-progress go commands to return before proceeding, // to avoid load concurrency errors. - for i := 0; i < maxInFlight; i++ { + for range maxInFlight { select { case <-ctx.Done(): return ctx.Err(), ctx.Err() diff --git a/gopls/internal/analysis/gofix/cmd/gofix/main.go b/internal/gofix/cmd/gofix/main.go similarity index 89% rename from gopls/internal/analysis/gofix/cmd/gofix/main.go rename to internal/gofix/cmd/gofix/main.go index d75978f6e59..9ec77943774 100644 --- a/gopls/internal/analysis/gofix/cmd/gofix/main.go +++ b/internal/gofix/cmd/gofix/main.go @@ -10,7 +10,7 @@ package main import ( "golang.org/x/tools/go/analysis/singlechecker" - "golang.org/x/tools/gopls/internal/analysis/gofix" + "golang.org/x/tools/internal/gofix" ) func main() { singlechecker.Main(gofix.Analyzer) } diff --git a/gopls/internal/analysis/gofix/doc.go b/internal/gofix/doc.go similarity index 100% rename from gopls/internal/analysis/gofix/doc.go rename to internal/gofix/doc.go diff --git a/gopls/internal/analysis/gofix/gofix.go b/internal/gofix/gofix.go similarity index 95% rename from gopls/internal/analysis/gofix/gofix.go rename to internal/gofix/gofix.go index a2380f1d644..565272b5e46 100644 --- a/gopls/internal/analysis/gofix/gofix.go +++ b/internal/gofix/gofix.go @@ -9,6 +9,7 @@ import ( "go/ast" "go/token" "go/types" + "iter" "slices" "strings" @@ -18,7 +19,6 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/gopls/internal/util/moreiters" "golang.org/x/tools/internal/analysisinternal" internalastutil "golang.org/x/tools/internal/astutil" "golang.org/x/tools/internal/astutil/cursor" @@ -34,7 +34,7 @@ var doc string var Analyzer = &analysis.Analyzer{ Name: "gofix", Doc: analysisinternal.MustExtractDoc(doc, "gofix"), - URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/gofix", + URL: "https://pkg.go.dev/golang.org/x/tools/internal/gofix", Run: func(pass *analysis.Pass) (any, error) { return run(pass, true) }, FactTypes: []analysis.Fact{ (*goFixInlineFuncFact)(nil), @@ -47,7 +47,7 @@ var Analyzer = &analysis.Analyzer{ var DirectiveAnalyzer = &analysis.Analyzer{ Name: "gofixdirective", Doc: analysisinternal.MustExtractDoc(doc, "gofixdirective"), - URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/gofix", + URL: "https://pkg.go.dev/golang.org/x/tools/internal/gofix", Run: func(pass *analysis.Pass) (any, error) { return run(pass, false) }, FactTypes: []analysis.Fact{ (*goFixInlineFuncFact)(nil), @@ -330,7 +330,7 @@ func (a *analyzer) inlineAlias(tn *types.TypeName, curId cursor.Cursor) { // Remember the names of the alias's type params. When we check for shadowing // later, we'll ignore these because they won't appear in the replacement text. typeParamNames := map[*types.TypeName]bool{} - for tp := range alias.TypeParams().TypeParams() { + for tp := range listIter(alias.TypeParams()) { typeParamNames[tp.Obj()] = true } rhs := alias.Rhs() @@ -386,13 +386,13 @@ func (a *analyzer) inlineAlias(tn *types.TypeName, curId cursor.Cursor) { // pkg.Id[T] // pkg.Id[K, V] var expr ast.Expr = id - if e, _ := curId.Edge(); e == edge.SelectorExpr_Sel { + if ek, _ := curId.ParentEdge(); ek == edge.SelectorExpr_Sel { curId = curId.Parent() expr = curId.Node().(ast.Expr) } // If expr is part of an IndexExpr or IndexListExpr, we'll need that node. // Given C[int], TypeOf(C) is generic but TypeOf(C[int]) is instantiated. - switch ek, _ := curId.Edge(); ek { + switch ek, _ := curId.ParentEdge(); ek { case edge.IndexExpr_X: expr = curId.Parent().Node().(*ast.IndexExpr) case edge.IndexListExpr_X: @@ -405,7 +405,7 @@ func (a *analyzer) inlineAlias(tn *types.TypeName, curId cursor.Cursor) { // A[int, Foo] as M[int, Foo]. // Don't validate instantiation: it can't panic unless we have a bug, // in which case seeing the stack trace via telemetry would be helpful. - instAlias, _ := types.Instantiate(nil, alias, slices.Collect(targs.Types()), false) + instAlias, _ := types.Instantiate(nil, alias, slices.Collect(listIter(targs)), false) rhs = instAlias.(*types.Alias).Rhs() } // To get the replacement text, render the alias RHS using the package prefixes @@ -437,11 +437,11 @@ func typenames(t types.Type) []*types.TypeName { case *types.Basic: tns = append(tns, types.Universe.Lookup(t.Name()).(*types.TypeName)) case *types.Named: - for t := range t.TypeArgs().Types() { + for t := range listIter(t.TypeArgs()) { visit(t) } case *types.Alias: - for t := range t.TypeArgs().Types() { + for t := range listIter(t.TypeArgs()) { visit(t) } case *types.TypeParam: @@ -458,8 +458,8 @@ func typenames(t types.Type) []*types.TypeName { visit(t.Key()) visit(t.Elem()) case *types.Struct: - for f := range t.Fields() { - visit(f.Type()) + for i := range t.NumFields() { + visit(t.Field(i).Type()) } case *types.Signature: // Ignore the receiver: although it may be present, it has no meaning @@ -479,7 +479,7 @@ func typenames(t types.Type) []*types.TypeName { visit(t.ExplicitMethod(i).Type()) } case *types.Tuple: - for v := range t.Variables() { + for v := range listIter(t) { visit(v.Type()) } case *types.Union: @@ -548,7 +548,7 @@ func (a *analyzer) inlineConst(con *types.Const, cur cursor.Cursor) { } // If n is qualified by a package identifier, we'll need the full selector expression. var expr ast.Expr = n - if e, _ := cur.Edge(); e == edge.SelectorExpr_Sel { + if ek, _ := cur.ParentEdge(); ek == edge.SelectorExpr_Sel { expr = cur.Parent().Node().(ast.Expr) } a.reportInline("constant", "Constant", expr, edits, importPrefix+incon.RHSName) @@ -592,8 +592,10 @@ func (a *analyzer) readFile(node ast.Node) ([]byte, error) { // currentFile returns the unique ast.File for a cursor. func currentFile(c cursor.Cursor) *ast.File { - cf, _ := moreiters.First(c.Ancestors((*ast.File)(nil))) - return cf.Node().(*ast.File) + for cf := range c.Enclosing((*ast.File)(nil)) { + return cf.Node().(*ast.File) + } + panic("no *ast.File enclosing a cursor: impossible") } // hasFixInline reports the presence of a "//go:fix inline" directive @@ -640,3 +642,19 @@ func (*goFixInlineAliasFact) AFact() {} func discard(string, ...any) {} var builtinIota = types.Universe.Lookup("iota") + +type list[T any] interface { + Len() int + At(int) T +} + +// TODO(adonovan): eliminate in favor of go/types@go1.24 iterators. +func listIter[L list[T], T any](lst L) iter.Seq[T] { + return func(yield func(T) bool) { + for i := range lst.Len() { + if !yield(lst.At(i)) { + return + } + } + } +} diff --git a/gopls/internal/analysis/gofix/gofix_test.go b/internal/gofix/gofix_test.go similarity index 97% rename from gopls/internal/analysis/gofix/gofix_test.go rename to internal/gofix/gofix_test.go index 4acc4daf2ff..ae2df3860a8 100644 --- a/gopls/internal/analysis/gofix/gofix_test.go +++ b/internal/gofix/gofix_test.go @@ -19,6 +19,9 @@ import ( ) func TestAnalyzer(t *testing.T) { + if testenv.Go1Point() < 24 { + testenv.NeedsGoExperiment(t, "aliastypeparams") + } analysistest.RunWithSuggestedFixes(t, analysistest.TestData(), Analyzer, "a", "b") } diff --git a/gopls/internal/analysis/gofix/testdata/src/a/a.go b/internal/gofix/testdata/src/a/a.go similarity index 100% rename from gopls/internal/analysis/gofix/testdata/src/a/a.go rename to internal/gofix/testdata/src/a/a.go diff --git a/gopls/internal/analysis/gofix/testdata/src/a/a.go.golden b/internal/gofix/testdata/src/a/a.go.golden similarity index 100% rename from gopls/internal/analysis/gofix/testdata/src/a/a.go.golden rename to internal/gofix/testdata/src/a/a.go.golden diff --git a/gopls/internal/analysis/gofix/testdata/src/a/internal/d.go b/internal/gofix/testdata/src/a/internal/d.go similarity index 100% rename from gopls/internal/analysis/gofix/testdata/src/a/internal/d.go rename to internal/gofix/testdata/src/a/internal/d.go diff --git a/gopls/internal/analysis/gofix/testdata/src/b/b.go b/internal/gofix/testdata/src/b/b.go similarity index 100% rename from gopls/internal/analysis/gofix/testdata/src/b/b.go rename to internal/gofix/testdata/src/b/b.go diff --git a/gopls/internal/analysis/gofix/testdata/src/b/b.go.golden b/internal/gofix/testdata/src/b/b.go.golden similarity index 97% rename from gopls/internal/analysis/gofix/testdata/src/b/b.go.golden rename to internal/gofix/testdata/src/b/b.go.golden index fd8d87a2ef1..4de7f09710f 100644 --- a/gopls/internal/analysis/gofix/testdata/src/b/b.go.golden +++ b/internal/gofix/testdata/src/b/b.go.golden @@ -4,10 +4,8 @@ import a0 "a" import "io" -import ( - "a" - . "c" -) +import "a" +import . "c" func f() { a.One() // want `cannot inline call to a.One because body refers to non-exported one` diff --git a/gopls/internal/analysis/gofix/testdata/src/c/c.go b/internal/gofix/testdata/src/c/c.go similarity index 100% rename from gopls/internal/analysis/gofix/testdata/src/c/c.go rename to internal/gofix/testdata/src/c/c.go diff --git a/gopls/internal/analysis/gofix/testdata/src/directive/directive.go b/internal/gofix/testdata/src/directive/directive.go similarity index 100% rename from gopls/internal/analysis/gofix/testdata/src/directive/directive.go rename to internal/gofix/testdata/src/directive/directive.go diff --git a/gopls/internal/analysis/gofix/testdata/src/directive/directive.go.golden b/internal/gofix/testdata/src/directive/directive.go.golden similarity index 99% rename from gopls/internal/analysis/gofix/testdata/src/directive/directive.go.golden rename to internal/gofix/testdata/src/directive/directive.go.golden index 3e5b3409288..a6625e1731f 100644 --- a/gopls/internal/analysis/gofix/testdata/src/directive/directive.go.golden +++ b/internal/gofix/testdata/src/directive/directive.go.golden @@ -68,4 +68,3 @@ type E = map[[Uno]string][]*T // want `invalid //go:fix inline directive: array // //go:fix inline type EL = map[[2]string][]*T // want EL: `goFixInline alias` - diff --git a/internal/gopathwalk/walk.go b/internal/gopathwalk/walk.go index 984b79c2a07..5252144d046 100644 --- a/internal/gopathwalk/walk.go +++ b/internal/gopathwalk/walk.go @@ -14,6 +14,7 @@ import ( "os" "path/filepath" "runtime" + "slices" "strings" "sync" "time" @@ -195,10 +196,8 @@ func (w *walker) getIgnoredDirs(path string) []string { // shouldSkipDir reports whether the file should be skipped or not. func (w *walker) shouldSkipDir(dir string) bool { - for _, ignoredDir := range w.ignoredDirs { - if dir == ignoredDir { - return true - } + if slices.Contains(w.ignoredDirs, dir) { + return true } if w.skip != nil { // Check with the user specified callback. diff --git a/internal/imports/fix.go b/internal/imports/fix.go index 737a9bfae8f..89b96381cdc 100644 --- a/internal/imports/fix.go +++ b/internal/imports/fix.go @@ -32,6 +32,7 @@ import ( "golang.org/x/tools/internal/gocommand" "golang.org/x/tools/internal/gopathwalk" "golang.org/x/tools/internal/stdlib" + "maps" ) // importToGroup is a list of functions which map from an import path to @@ -585,7 +586,7 @@ func getFixesWithSource(ctx context.Context, fset *token.FileSet, f *ast.File, f srcDir := filepath.Dir(abs) if logf != nil { - logf("fixImports(filename=%q), srcDir=%q ...", filename, abs, srcDir) + logf("fixImports(filename=%q), srcDir=%q ...", filename, srcDir) } // First pass: looking only at f, and using the naive algorithm to @@ -968,9 +969,7 @@ func (e *ProcessEnv) CopyConfig() *ProcessEnv { resolver: nil, Env: map[string]string{}, } - for k, v := range e.Env { - copy.Env[k] = v - } + maps.Copy(copy.Env, e.Env) return copy } @@ -1003,9 +1002,7 @@ func (e *ProcessEnv) init() error { if err := json.Unmarshal(stdout.Bytes(), &goEnv); err != nil { return err } - for k, v := range goEnv { - e.Env[k] = v - } + maps.Copy(e.Env, goEnv) e.initialized = true return nil } diff --git a/internal/imports/fix_test.go b/internal/imports/fix_test.go index 478313aec7f..5313956dd63 100644 --- a/internal/imports/fix_test.go +++ b/internal/imports/fix_test.go @@ -2912,7 +2912,7 @@ func _() { wg sync.WaitGroup ) wg.Add(n) - for i := 0; i < n; i++ { + for range n { go func() { defer wg.Done() _, err := t.process("foo.com", "p/first.go", nil, nil) @@ -2983,7 +2983,7 @@ func TestSymbolSearchStarvation(t *testing.T) { } var candidates []pkgDistance - for i := 0; i < candCount; i++ { + for i := range candCount { name := fmt.Sprintf("bar%d", i) candidates = append(candidates, pkgDistance{ pkg: &pkg{ diff --git a/internal/imports/mod.go b/internal/imports/mod.go index 8555e3f83da..df94ec8186e 100644 --- a/internal/imports/mod.go +++ b/internal/imports/mod.go @@ -13,6 +13,7 @@ import ( "path" "path/filepath" "regexp" + "slices" "sort" "strconv" "strings" @@ -150,8 +151,8 @@ func newModuleResolver(e *ProcessEnv, moduleCacheCache *DirInfoCache) (*ModuleRe Path: "", Dir: filepath.Join(filepath.Dir(goWork), "vendor"), } - r.modsByModPath = append(append([]*gocommand.ModuleJSON{}, mainModsVendor...), r.dummyVendorMod) - r.modsByDir = append(append([]*gocommand.ModuleJSON{}, mainModsVendor...), r.dummyVendorMod) + r.modsByModPath = append(slices.Clone(mainModsVendor), r.dummyVendorMod) + r.modsByDir = append(slices.Clone(mainModsVendor), r.dummyVendorMod) } } else { // Vendor mode is off, so run go list -m ... to find everything. diff --git a/internal/imports/mod_cache.go b/internal/imports/mod_cache.go index b1192696b28..b96c9d4bf71 100644 --- a/internal/imports/mod_cache.go +++ b/internal/imports/mod_cache.go @@ -128,7 +128,7 @@ func (d *DirInfoCache) ScanAndListen(ctx context.Context, listener cacheListener // are going to be. Setting an arbitrary limit makes it much easier. const maxInFlight = 10 sema := make(chan struct{}, maxInFlight) - for i := 0; i < maxInFlight; i++ { + for range maxInFlight { sema <- struct{}{} } @@ -156,7 +156,7 @@ func (d *DirInfoCache) ScanAndListen(ctx context.Context, listener cacheListener d.mu.Lock() delete(d.listeners, cookie) d.mu.Unlock() - for i := 0; i < maxInFlight; i++ { + for range maxInFlight { <-sema } } diff --git a/internal/imports/mod_test.go b/internal/imports/mod_test.go index 890dc1b2e25..2862e84d184 100644 --- a/internal/imports/mod_test.go +++ b/internal/imports/mod_test.go @@ -25,6 +25,8 @@ import ( "golang.org/x/tools/internal/proxydir" "golang.org/x/tools/internal/testenv" "golang.org/x/tools/txtar" + "maps" + "slices" ) // Tests that we can find packages in the stdlib. @@ -928,12 +930,7 @@ func scanToSlice(resolver Resolver, exclude []gopathwalk.RootType) ([]*pkg, erro var result []*pkg filter := &scanCallback{ rootFound: func(root gopathwalk.Root) bool { - for _, rt := range exclude { - if root.Type == rt { - return false - } - } - return true + return !slices.Contains(exclude, root.Type) }, dirFound: func(pkg *pkg) bool { return true @@ -1023,9 +1020,7 @@ func setup(t *testing.T, extraEnv map[string]string, main, wd string) *modTest { WorkingDir: filepath.Join(mainDir, wd), GocmdRunner: &gocommand.Runner{}, } - for k, v := range extraEnv { - env.Env[k] = v - } + maps.Copy(env.Env, extraEnv) if *testDebug { env.Logf = log.Printf } diff --git a/internal/imports/sortimports.go b/internal/imports/sortimports.go index da8194fd965..67c17bc4319 100644 --- a/internal/imports/sortimports.go +++ b/internal/imports/sortimports.go @@ -11,6 +11,7 @@ import ( "go/ast" "go/token" "log" + "slices" "sort" "strconv" ) @@ -30,7 +31,7 @@ func sortImports(localPrefix string, tokFile *token.File, f *ast.File) { if len(d.Specs) == 0 { // Empty import block, remove it. - f.Decls = append(f.Decls[:i], f.Decls[i+1:]...) + f.Decls = slices.Delete(f.Decls, i, i+1) } if !d.Lparen.IsValid() { @@ -91,7 +92,7 @@ func mergeImports(f *ast.File) { spec.(*ast.ImportSpec).Path.ValuePos = first.Pos() first.Specs = append(first.Specs, spec) } - f.Decls = append(f.Decls[:i], f.Decls[i+1:]...) + f.Decls = slices.Delete(f.Decls, i, i+1) i-- } } diff --git a/internal/modindex/lookup.go b/internal/modindex/lookup.go index 5499c5c67f3..bd605e0d763 100644 --- a/internal/modindex/lookup.go +++ b/internal/modindex/lookup.go @@ -120,7 +120,7 @@ func (ix *Index) Lookup(pkg, name string, prefix bool) []Candidate { px.Results = int16(n) if len(flds) >= 4 { sig := strings.Split(flds[3], " ") - for i := 0; i < len(sig); i++ { + for i := range sig { // $ cannot otherwise occur. removing the spaces // almost works, but for chan struct{}, e.g. sig[i] = strings.Replace(sig[i], "$", " ", -1) @@ -136,7 +136,7 @@ func (ix *Index) Lookup(pkg, name string, prefix bool) []Candidate { func toFields(sig []string) []Field { ans := make([]Field, len(sig)/2) - for i := 0; i < len(ans); i++ { + for i := range ans { ans[i] = Field{Arg: sig[2*i], Type: sig[2*i+1]} } return ans diff --git a/internal/packagestest/export.go b/internal/packagestest/export.go index ce992e17a90..4dd2b331736 100644 --- a/internal/packagestest/export.go +++ b/internal/packagestest/export.go @@ -155,7 +155,6 @@ var All = []Exporter{GOPATH, Modules} func TestAll(t *testing.T, f func(*testing.T, Exporter)) { t.Helper() for _, e := range All { - e := e // in case f calls t.Parallel t.Run(e.Name(), func(t *testing.T) { t.Helper() f(t, e) @@ -169,7 +168,6 @@ func TestAll(t *testing.T, f func(*testing.T, Exporter)) { func BenchmarkAll(b *testing.B, f func(*testing.B, Exporter)) { b.Helper() for _, e := range All { - e := e // in case f calls t.Parallel b.Run(e.Name(), func(b *testing.B) { b.Helper() f(b, e) diff --git a/internal/pkgbits/decoder.go b/internal/pkgbits/decoder.go index f6cb37c5c3d..c0aba26c482 100644 --- a/internal/pkgbits/decoder.go +++ b/internal/pkgbits/decoder.go @@ -259,7 +259,7 @@ func (r *Decoder) rawUvarint() uint64 { func readUvarint(r *strings.Reader) (uint64, error) { var x uint64 var s uint - for i := 0; i < binary.MaxVarintLen64; i++ { + for i := range binary.MaxVarintLen64 { b, err := r.ReadByte() if err != nil { if i > 0 && err == io.EOF { diff --git a/internal/proxydir/proxydir.go b/internal/proxydir/proxydir.go index dc6b6ae94e8..bbd1ab4fd26 100644 --- a/internal/proxydir/proxydir.go +++ b/internal/proxydir/proxydir.go @@ -46,7 +46,7 @@ func WriteModuleVersion(rootDir, module, ver string, files map[string][]byte) (r } // info file, just the bare bones. - infoContents := []byte(fmt.Sprintf(`{"Version": "%v", "Time":"2017-12-14T13:08:43Z"}`, ver)) + infoContents := fmt.Appendf(nil, `{"Version": "%v", "Time":"2017-12-14T13:08:43Z"}`, ver) if err := os.WriteFile(filepath.Join(dir, ver+".info"), infoContents, 0644); err != nil { return err } diff --git a/internal/refactor/inline/callee.go b/internal/refactor/inline/callee.go index b4ec43d551c..ca9426a2656 100644 --- a/internal/refactor/inline/callee.go +++ b/internal/refactor/inline/callee.go @@ -14,6 +14,7 @@ import ( "go/parser" "go/token" "go/types" + "slices" "strings" "golang.org/x/tools/go/types/typeutil" @@ -303,7 +304,7 @@ func AnalyzeCallee(logf func(string, ...any), fset *token.FileSet, pkg *types.Pa return nil, tuple.At(i).Type() } } - for i := 0; i < sig.Results().Len(); i++ { + for i := range sig.Results().Len() { expr, typ := argInfo(i) var flags returnOperandFlags if typ == types.Typ[types.UntypedNil] { // untyped nil is preserved by go/types @@ -572,11 +573,9 @@ func analyzeAssignment(info *types.Info, stack []ast.Node) (assignable, ifaceAss // Types do not need to match for an initializer with known type. if spec, ok := parent.(*ast.ValueSpec); ok && spec.Type != nil { - for _, v := range spec.Values { - if v == expr { - typ := info.TypeOf(spec.Type) - return true, typ == nil || types.IsInterface(typ), false - } + if slices.Contains(spec.Values, expr) { + typ := info.TypeOf(spec.Type) + return true, typ == nil || types.IsInterface(typ), false } } @@ -616,7 +615,7 @@ func analyzeAssignment(info *types.Info, stack []ast.Node) (assignable, ifaceAss return true, types.IsInterface(under.Elem()), false case *types.Struct: // Struct{k: expr} if id, _ := kv.Key.(*ast.Ident); id != nil { - for fi := 0; fi < under.NumFields(); fi++ { + for fi := range under.NumFields() { field := under.Field(fi) if info.Uses[id] == field { return true, types.IsInterface(field.Type()), false @@ -715,7 +714,7 @@ func paramTypeAtIndex(sig *types.Signature, call *ast.CallExpr, index int) types // given outer-to-inner stack, after stripping parentheses, along with the // remaining stack up to the parent node. // -// If no such context exists, returns (nil, nil). +// If no such context exists, returns (nil, nil, nil). func exprContext(stack []ast.Node) (remaining []ast.Node, parent ast.Node, expr ast.Expr) { expr, _ = stack[len(stack)-1].(ast.Expr) if expr == nil { diff --git a/internal/refactor/inline/calleefx.go b/internal/refactor/inline/calleefx.go index 11246e5b969..26dc02c010b 100644 --- a/internal/refactor/inline/calleefx.go +++ b/internal/refactor/inline/calleefx.go @@ -31,7 +31,7 @@ const ( // } // // is [1 0 -2 2], indicating reads of y and x, followed by the unknown -// effects of the g() call. and finally the read of parameter z. This +// effects of the g() call, and finally the read of parameter z. This // information is used during inlining to ascertain when it is safe // for parameter references to be replaced by their corresponding // argument expressions. Such substitutions are permitted only when diff --git a/internal/refactor/inline/calleefx_test.go b/internal/refactor/inline/calleefx_test.go index 1fc16aebaac..b643c7a06ac 100644 --- a/internal/refactor/inline/calleefx_test.go +++ b/internal/refactor/inline/calleefx_test.go @@ -107,7 +107,6 @@ func TestCalleeEffects(t *testing.T) { }, } for _, test := range tests { - test := test t.Run(test.descr, func(t *testing.T) { fset := token.NewFileSet() mustParse := func(filename string, content any) *ast.File { diff --git a/internal/refactor/inline/everything_test.go b/internal/refactor/inline/everything_test.go index 12b9ba47f21..a32e0709be1 100644 --- a/internal/refactor/inline/everything_test.go +++ b/internal/refactor/inline/everything_test.go @@ -13,6 +13,7 @@ import ( "log" "os" "path/filepath" + "slices" "strings" "testing" @@ -193,7 +194,7 @@ func TestEverything(t *testing.T) { t.Fatalf("transformed source does not parse: %v", err) } // Splice into original file list. - syntax := append([]*ast.File(nil), callerPkg.Syntax...) + syntax := slices.Clone(callerPkg.Syntax) for i := range callerPkg.Syntax { if syntax[i] == callerFile { syntax[i] = f diff --git a/internal/refactor/inline/free.go b/internal/refactor/inline/free.go new file mode 100644 index 00000000000..28cebeea3db --- /dev/null +++ b/internal/refactor/inline/free.go @@ -0,0 +1,376 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Copied, with considerable changes, from go/parser/resolver.go +// at af53bd2c03. + +package inline + +import ( + "go/ast" + "go/token" +) + +// freeishNames computes an approximation to the free names of the AST +// at node n based solely on syntax, inserting values into the map. +// +// In the absence of composite literals, the set of free names is exact. Composite +// literals introduce an ambiguity that can only be resolved with type information: +// whether F is a field name or a value in `T{F: ...}`. +// If includeComplitIdents is true, this function conservatively assumes +// T is not a struct type, so freeishNames overapproximates: the resulting +// set may contain spurious entries that are not free lexical references +// but are references to struct fields. +// If includeComplitIdents is false, this function assumes that T *is* +// a struct type, so freeishNames underapproximates: the resulting set +// may omit names that are free lexical references. +// +// The code is based on go/parser.resolveFile, but heavily simplified. Crucial +// differences are: +// - Instead of resolving names to their objects, this function merely records +// whether they are free. +// - Labels are ignored: they do not refer to values. +// - This is never called on FuncDecls or ImportSpecs, so the function +// panics if it sees one. +func freeishNames(free map[string]bool, n ast.Node, includeComplitIdents bool) { + v := &freeVisitor{free: free, includeComplitIdents: includeComplitIdents} + ast.Walk(v, n) + assert(v.scope == nil, "unbalanced scopes") +} + +// A freeVisitor holds state for a free-name analysis. +type freeVisitor struct { + scope *scope // the current innermost scope + free map[string]bool // free names seen so far + includeComplitIdents bool // include identifier key in composite literals +} + +// scope contains all the names defined in a lexical scope. +// It is like ast.Scope, but without deprecation warnings. +type scope struct { + names map[string]bool + outer *scope +} + +func (s *scope) defined(name string) bool { + for ; s != nil; s = s.outer { + if s.names[name] { + return true + } + } + return false +} + +func (v *freeVisitor) Visit(n ast.Node) ast.Visitor { + switch n := n.(type) { + + // Expressions. + case *ast.Ident: + v.resolve(n) + + case *ast.FuncLit: + v.openScope() + defer v.closeScope() + v.walkFuncType(n.Type) + v.walkBody(n.Body) + + case *ast.SelectorExpr: + v.walk(n.X) + // Skip n.Sel: it cannot be free. + + case *ast.StructType: + v.openScope() + defer v.closeScope() + v.walkFieldList(n.Fields) + + case *ast.FuncType: + v.openScope() + defer v.closeScope() + v.walkFuncType(n) + + case *ast.CompositeLit: + v.walk(n.Type) + for _, e := range n.Elts { + if kv, _ := e.(*ast.KeyValueExpr); kv != nil { + if ident, _ := kv.Key.(*ast.Ident); ident != nil { + // It is not possible from syntax alone to know whether + // an identifier used as a composite literal key is + // a struct field (if n.Type is a struct) or a value + // (if n.Type is a map, slice or array). + if v.includeComplitIdents { + // Over-approximate by treating both cases as potentially + // free names. + v.resolve(ident) + } else { + // Under-approximate by ignoring potentially free names. + } + } else { + v.walk(kv.Key) + } + v.walk(kv.Value) + } else { + v.walk(e) + } + } + + case *ast.InterfaceType: + v.openScope() + defer v.closeScope() + v.walkFieldList(n.Methods) + + // Statements + case *ast.AssignStmt: + walkSlice(v, n.Rhs) + if n.Tok == token.DEFINE { + v.shortVarDecl(n.Lhs) + } else { + walkSlice(v, n.Lhs) + } + + case *ast.LabeledStmt: + // ignore labels + // TODO(jba): consider labels? + v.walk(n.Stmt) + + case *ast.BranchStmt: + // Ignore labels. + // TODO(jba): consider labels? + + case *ast.BlockStmt: + v.openScope() + defer v.closeScope() + walkSlice(v, n.List) + + case *ast.IfStmt: + v.openScope() + defer v.closeScope() + v.walk(n.Init) + v.walk(n.Cond) + v.walk(n.Body) + v.walk(n.Else) + + case *ast.CaseClause: + walkSlice(v, n.List) + v.openScope() + defer v.closeScope() + walkSlice(v, n.Body) + + case *ast.SwitchStmt: + v.openScope() + defer v.closeScope() + v.walk(n.Init) + v.walk(n.Tag) + v.walkBody(n.Body) + + case *ast.TypeSwitchStmt: + if n.Init != nil { + v.openScope() + defer v.closeScope() + v.walk(n.Init) + } + v.openScope() + defer v.closeScope() + v.walk(n.Assign) + // We can use walkBody here because we don't track label scopes. + v.walkBody(n.Body) + + case *ast.CommClause: + v.openScope() + defer v.closeScope() + v.walk(n.Comm) + walkSlice(v, n.Body) + + case *ast.SelectStmt: + v.walkBody(n.Body) + + case *ast.ForStmt: + v.openScope() + defer v.closeScope() + v.walk(n.Init) + v.walk(n.Cond) + v.walk(n.Post) + v.walk(n.Body) + + case *ast.RangeStmt: + v.openScope() + defer v.closeScope() + v.walk(n.X) + var lhs []ast.Expr + if n.Key != nil { + lhs = append(lhs, n.Key) + } + if n.Value != nil { + lhs = append(lhs, n.Value) + } + if len(lhs) > 0 { + if n.Tok == token.DEFINE { + v.shortVarDecl(lhs) + } else { + walkSlice(v, lhs) + } + } + v.walk(n.Body) + + // Declarations + case *ast.GenDecl: + switch n.Tok { + case token.CONST, token.VAR: + for _, spec := range n.Specs { + spec := spec.(*ast.ValueSpec) + walkSlice(v, spec.Values) + if spec.Type != nil { + v.walk(spec.Type) + } + v.declare(spec.Names...) + } + case token.TYPE: + for _, spec := range n.Specs { + spec := spec.(*ast.TypeSpec) + // Go spec: The scope of a type identifier declared inside a + // function begins at the identifier in the TypeSpec and ends + // at the end of the innermost containing block. + v.declare(spec.Name) + if spec.TypeParams != nil { + v.openScope() + defer v.closeScope() + v.walkTypeParams(spec.TypeParams) + } + v.walk(spec.Type) + } + + case token.IMPORT: + panic("encountered import declaration in free analysis") + } + + case *ast.FuncDecl: + panic("encountered top-level function declaration in free analysis") + + default: + return v + } + + return nil +} + +func (r *freeVisitor) openScope() { + r.scope = &scope{map[string]bool{}, r.scope} +} + +func (r *freeVisitor) closeScope() { + r.scope = r.scope.outer +} + +func (r *freeVisitor) walk(n ast.Node) { + if n != nil { + ast.Walk(r, n) + } +} + +// walkFuncType walks a function type. It is used for explicit +// function types, like this: +// +// type RunFunc func(context.Context) error +// +// and function literals, like this: +// +// func(a, b int) int { return a + b} +// +// neither of which have type parameters. +// Function declarations do involve type parameters, but we don't +// handle them. +func (r *freeVisitor) walkFuncType(typ *ast.FuncType) { + // The order here doesn't really matter, because names in + // a field list cannot appear in types. + // (The situation is different for type parameters, for which + // see [freeVisitor.walkTypeParams].) + r.resolveFieldList(typ.Params) + r.resolveFieldList(typ.Results) + r.declareFieldList(typ.Params) + r.declareFieldList(typ.Results) +} + +// walkTypeParams is like walkFieldList, but declares type parameters eagerly so +// that they may be resolved in the constraint expressions held in the field +// Type. +func (r *freeVisitor) walkTypeParams(list *ast.FieldList) { + r.declareFieldList(list) + r.resolveFieldList(list) +} + +func (r *freeVisitor) walkBody(body *ast.BlockStmt) { + if body == nil { + return + } + walkSlice(r, body.List) +} + +func (r *freeVisitor) walkFieldList(list *ast.FieldList) { + if list == nil { + return + } + r.resolveFieldList(list) // .Type may contain references + r.declareFieldList(list) // .Names declares names +} + +func (r *freeVisitor) shortVarDecl(lhs []ast.Expr) { + // Go spec: A short variable declaration may redeclare variables provided + // they were originally declared in the same block with the same type, and + // at least one of the non-blank variables is new. + // + // However, it doesn't matter to free analysis whether a variable is declared + // fresh or redeclared. + for _, x := range lhs { + // In a well-formed program each expr must be an identifier, + // but be forgiving. + if id, ok := x.(*ast.Ident); ok { + r.declare(id) + } + } +} + +func walkSlice[S ~[]E, E ast.Node](r *freeVisitor, list S) { + for _, e := range list { + r.walk(e) + } +} + +// resolveFieldList resolves the types of the fields in list. +// The companion method declareFieldList declares the names of the fields. +func (r *freeVisitor) resolveFieldList(list *ast.FieldList) { + if list == nil { + return + } + for _, f := range list.List { + r.walk(f.Type) + } +} + +// declareFieldList declares the names of the fields in list. +// (Names in a FieldList always establish new bindings.) +// The companion method resolveFieldList resolves the types of the fields. +func (r *freeVisitor) declareFieldList(list *ast.FieldList) { + if list == nil { + return + } + for _, f := range list.List { + r.declare(f.Names...) + } +} + +// resolve marks ident as free if it is not in scope. +// TODO(jba): rename: no resolution is happening. +func (r *freeVisitor) resolve(ident *ast.Ident) { + if s := ident.Name; s != "_" && !r.scope.defined(s) { + r.free[s] = true + } +} + +// declare adds each non-blank ident to the current scope. +func (r *freeVisitor) declare(idents ...*ast.Ident) { + for _, id := range idents { + if id.Name != "_" { + r.scope.names[id.Name] = true + } + } +} diff --git a/internal/refactor/inline/free_test.go b/internal/refactor/inline/free_test.go new file mode 100644 index 00000000000..28fa56db099 --- /dev/null +++ b/internal/refactor/inline/free_test.go @@ -0,0 +1,239 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package inline + +import ( + "fmt" + "go/ast" + "go/parser" + "go/token" + "maps" + "slices" + "strings" + "testing" +) + +func TestFreeishNames(t *testing.T) { + elems := func(m map[string]bool) string { + return strings.Join(slices.Sorted(maps.Keys(m)), " ") + } + + type testcase struct { + code string // one or more exprs, decls or stmts + want string // space-separated list of free names + } + + for _, tc := range []struct { + includeComplitIdents bool + cases []testcase + }{ + {true, []testcase{ + { + `x`, + "x", + }, + { + `x.y.z`, + "x", + }, + { + `T{a: 1, b: 2, c.d: e}`, + "a b c e T", + }, + { + `f(x)`, + "f x", + }, + { + `f.m(x)`, + "f x", + }, + { + `func(x int) int { return x + y }`, + "int y", + }, + { + `x = func(x int) int { return 2*x }()`, + "int x", + }, + { + `func(x int) (y int) { return x + y }`, + "int", + }, + { + `struct{a **int; b map[int][]bool}`, + "bool int", + }, + { + `struct{f int}{f: 0}`, + "f int", + }, + { + `interface{m1(int) bool; m2(x int) (y bool)}`, + "bool int", + }, + { + `x := 1; x++`, + "", + }, + { + `x = 1`, + "x", + }, + { + `_ = 1`, + "", + }, + { + `x, y := 1, 2; x = y + z`, + "z", + }, + { + `x, y := y, x; x = y + z`, + "x y z", + }, + { + `a, b := 0, 0; b, c := 0, 0; print(a, b, c, d)`, + "d print", + }, + { + `label: x++`, + "x", + }, + { + `if x == y {x}`, + "x y", + }, + { + `if x := 1; x == y {x}`, + "y", + }, + { + `if x := 1; x == y {x} else {z}`, + "y z", + }, + { + `switch x { case 1: x; case y: z }`, + "x y z", + }, + { + `switch x := 1; x { case 1: x; case y: z }`, + "y z", + }, + { + `switch x.(type) { case int: x; case []int: y }`, + "int x y", + }, + { + `switch x := 1; x.(type) { case int: x; case []int: y }`, + "int y", + }, + { + `switch y := x.(type) { case int: x; case []int: y }`, + "int x", + }, + { + `select { case c <- 1: x; case x := <-c: 2; default: y}`, + "c x y", + }, + { + `for i := 0; i < 9; i++ { c <- j }`, + "c j", + }, + { + `for i = 0; i < 9; i++ { c <- j }`, + "c i j", + }, + { + `for i := range 9 { c <- j }`, + "c j", + }, + { + `for i = range 9 { c <- j }`, + "c i j", + }, + { + `for _, e := range []int{1, 2, x} {e}`, + "int x", + }, + { + `var x, y int; f(x, y)`, + "f int", + }, + { + `{var x, y int}; f(x, y)`, + "f int x y", + }, + { + `const x = 1; { const y = iota; return x, y }`, + "iota", + }, + { + `type t int; t(0)`, + "int", + }, + { + `type t[T ~int] struct { t T }; x = t{t: 1}.t`, // field t shadowed by type decl + "int x", + }, + { + `type t[S ~[]E, E any] S`, + "any", + }, + { + `var a [unsafe.Sizeof(func(x int) { x + y })]int`, + "int unsafe y", + }, + }}, + { + false, + []testcase{ + { + `x`, + "x", + }, + { + `x.y.z`, + "x", + }, + { + `T{a: 1, b: 2, c.d: e}`, + "c e T", // omit a and b + }, + { + `type t[T ~int] struct { t T }; x = t{t: 1}.t`, // field t shadowed by type decl + "int x", + }, + }, + }, + } { + t.Run(fmt.Sprintf("includeComplitIdents=%t", tc.includeComplitIdents), func(t *testing.T) { + for _, test := range tc.cases { + _, f := mustParse(t, "free.go", `package p; func _() {`+test.code+`}`) + n := f.Decls[0].(*ast.FuncDecl).Body + got := map[string]bool{} + want := map[string]bool{} + for _, n := range strings.Fields(test.want) { + want[n] = true + } + + freeishNames(got, n, tc.includeComplitIdents) + + if !maps.Equal(got, want) { + t.Errorf("\ncode %s\ngot %v\nwant %v", test.code, elems(got), elems(want)) + } + } + }) + } +} + +func mustParse(t *testing.T, filename string, content any) (*token.FileSet, *ast.File) { + fset := token.NewFileSet() + f, err := parser.ParseFile(fset, filename, content, parser.ParseComments|parser.SkipObjectResolution) + if err != nil { + t.Fatalf("ParseFile: %v", err) + } + return fset, f +} diff --git a/internal/refactor/inline/inline.go b/internal/refactor/inline/inline.go index 2b6f06242e7..edd5d836613 100644 --- a/internal/refactor/inline/inline.go +++ b/internal/refactor/inline/inline.go @@ -22,11 +22,11 @@ import ( "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/imports" "golang.org/x/tools/internal/analysisinternal" internalastutil "golang.org/x/tools/internal/astutil" "golang.org/x/tools/internal/typeparams" "golang.org/x/tools/internal/typesinternal" + "maps" ) // A Caller describes the function call and its enclosing context. @@ -271,12 +271,12 @@ func (st *state) inline() (*Result, error) { } } - // Add new imports. - // + // Add new imports that are still used. + newImports := trimNewImports(res.newImports, res.new) // Insert new imports after last existing import, // to avoid migration of pre-import comments. // The imports will be organized below. - if len(res.newImports) > 0 { + if len(newImports) > 0 { // If we have imports to add, do so independent of the rest of the file. // Otherwise, the length of the new imports may consume floating comments, // causing them to be printed inside the imports block. @@ -329,7 +329,7 @@ func (st *state) inline() (*Result, error) { } } // Add new imports. - for _, imp := range res.newImports { + for _, imp := range newImports { // Check that the new imports are accessible. path, _ := strconv.Unquote(imp.spec.Path.Value) if !analysisinternal.CanImport(caller.Types.Path(), path) { @@ -355,30 +355,14 @@ func (st *state) inline() (*Result, error) { } // Delete imports referenced only by caller.Call.Fun. - // - // (We can't let imports.Process take care of it as it may - // mistake obsolete imports for missing new imports when the - // names are similar, as is common during a package migration.) for _, oldImport := range res.oldImports { specToDelete := oldImport.spec - for _, decl := range f.Decls { - if decl, ok := decl.(*ast.GenDecl); ok && decl.Tok == token.IMPORT { - decl.Specs = slices.DeleteFunc(decl.Specs, func(spec ast.Spec) bool { - imp := spec.(*ast.ImportSpec) - // Since we re-parsed the file, we can't match by identity; - // instead look for syntactic equivalence. - return imp.Path.Value == specToDelete.Path.Value && - (imp.Name != nil) == (specToDelete.Name != nil) && - (imp.Name == nil || imp.Name.Name == specToDelete.Name.Name) - }) - - // Edge case: import "foo" => import (). - if !decl.Lparen.IsValid() { - decl.Lparen = decl.TokPos + token.Pos(len("import")) - decl.Rparen = decl.Lparen + 1 - } - } + name := "" + if specToDelete.Name != nil { + name = specToDelete.Name.Name } + path, _ := strconv.Unquote(specToDelete.Path.Value) + astutil.DeleteNamedImport(caller.Fset, f, name, path) } var out bytes.Buffer @@ -387,66 +371,6 @@ func (st *state) inline() (*Result, error) { } newSrc := out.Bytes() - // Remove imports that are no longer referenced. - // - // It ought to be possible to compute the set of PkgNames used - // by the "old" code, compute the free identifiers of the - // "new" code using a syntax-only (no go/types) algorithm, and - // see if the reduction in the number of uses of any PkgName - // equals the number of times it appears in caller.Info.Uses, - // indicating that it is no longer referenced by res.new. - // - // However, the notorious ambiguity of resolving T{F: 0} makes this - // unreliable: without types, we can't tell whether F refers to - // a field of struct T, or a package-level const/var of a - // dot-imported (!) package. - // - // So, for now, we run imports.Process, which is - // unsatisfactory as it has to run the go command, and it - // looks at the user's module cache state--unnecessarily, - // since this step cannot add new imports. - // - // TODO(adonovan): replace with a simpler implementation since - // all the necessary imports are present but merely untidy. - // That will be faster, and also less prone to nondeterminism - // if there are bugs in our logic for import maintenance. - // - // However, golang.org/x/tools/internal/imports.ApplyFixes is - // too simple as it requires the caller to have figured out - // all the logical edits. In our case, we know all the new - // imports that are needed (see newImports), each of which can - // be specified as: - // - // &imports.ImportFix{ - // StmtInfo: imports.ImportInfo{path, name, - // IdentName: name, - // FixType: imports.AddImport, - // } - // - // but we don't know which imports are made redundant by the - // inlining itself. For example, inlining a call to - // fmt.Println may make the "fmt" import redundant. - // - // Also, both imports.Process and internal/imports.ApplyFixes - // reformat the entire file, which is not ideal for clients - // such as gopls. (That said, the point of a canonical format - // is arguably that any tool can reformat as needed without - // this being inconvenient.) - // - // We could invoke imports.Process and parse its result, - // compare against the original AST, compute a list of import - // fixes, and return that too. - - // Recompute imports only if there were existing ones. - if len(f.Imports) > 0 { - formatted, err := imports.Process("output", newSrc, nil) - if err != nil { - logf("cannot reformat: %v <<%s>>", err, &out) - return nil, err // cannot reformat (a bug?) - } - newSrc = formatted - } - literalized := false if call, ok := res.new.(*ast.CallExpr); ok && is[*ast.FuncLit](call.Fun) { literalized = true @@ -470,6 +394,183 @@ type newImport struct { spec *ast.ImportSpec } +// importState tracks information about imports. +type importState struct { + logf func(string, ...any) + caller *Caller + importMap map[string][]string // from package paths in the caller's file to local names + newImports []newImport // for references to free names in callee; to be added to the file + oldImports []oldImport // referenced only by caller.Call.Fun; to be removed from the file +} + +// newImportState returns an importState with initial information about the caller's imports. +func newImportState(logf func(string, ...any), caller *Caller, callee *gobCallee) *importState { + // For simplicity we ignore existing dot imports, so that a qualified + // identifier (QI) in the callee is always represented by a QI in the caller, + // allowing us to treat a QI like a selection on a package name. + is := &importState{ + logf: logf, + caller: caller, + importMap: make(map[string][]string), + } + + for _, imp := range caller.File.Imports { + if pkgName, ok := importedPkgName(caller.Info, imp); ok && + pkgName.Name() != "." && + pkgName.Name() != "_" { + + // If the import's sole use is in caller.Call.Fun of the form p.F(...), + // where p.F is a qualified identifier, the p import may not be + // necessary. + // + // Only the qualified identifier case matters, as other references to + // imported package names in the Call.Fun expression (e.g. + // x.after(3*time.Second).f() or time.Second.String()) will remain after + // inlining, as arguments. + // + // If that is the case, proactively check if any of the callee FreeObjs + // need this import. Doing so eagerly simplifies the resulting logic. + needed := true + sel, ok := ast.Unparen(caller.Call.Fun).(*ast.SelectorExpr) + if ok && soleUse(caller.Info, pkgName) == sel.X { + needed = false // no longer needed by caller + // Check to see if any of the inlined free objects need this package. + for _, obj := range callee.FreeObjs { + if obj.PkgPath == pkgName.Imported().Path() && obj.Shadow[pkgName.Name()] == 0 { + needed = true // needed by callee + break + } + } + } + + // Exclude imports not needed by the caller or callee after inlining; the second + // return value holds these. + if needed { + path := pkgName.Imported().Path() + is.importMap[path] = append(is.importMap[path], pkgName.Name()) + } else { + is.oldImports = append(is.oldImports, oldImport{pkgName: pkgName, spec: imp}) + } + } + } + return is +} + +// importName finds an existing import name to use in a particular shadowing +// context. It is used to determine the set of new imports in +// localName, and is also used for writing out names in inlining +// strategies below. +func (i *importState) importName(pkgPath string, shadow shadowMap) string { + for _, name := range i.importMap[pkgPath] { + // Check that either the import preexisted, or that it was newly added + // (no PkgName) but is not shadowed, either in the callee (shadows) or + // caller (caller.lookup). + if shadow[name] == 0 { + found := i.caller.lookup(name) + if is[*types.PkgName](found) || found == nil { + return name + } + } + } + return "" +} + +// localName returns the local name for a given imported package path, +// adding one if it doesn't exists. +func (i *importState) localName(pkgPath, pkgName string, shadow shadowMap) string { + // Does an import already exist that works in this shadowing context? + if name := i.importName(pkgPath, shadow); name != "" { + return name + } + + newlyAdded := func(name string) bool { + return slices.ContainsFunc(i.newImports, func(n newImport) bool { return n.pkgName == name }) + } + + // shadowedInCaller reports whether a candidate package name + // already refers to a declaration in the caller. + shadowedInCaller := func(name string) bool { + obj := i.caller.lookup(name) + if obj == nil { + return false + } + // If obj will be removed, the name is available. + return !slices.ContainsFunc(i.oldImports, func(o oldImport) bool { return o.pkgName == obj }) + } + + // import added by callee + // + // Choose local PkgName based on last segment of + // package path plus, if needed, a numeric suffix to + // ensure uniqueness. + // + // "init" is not a legal PkgName. + // + // TODO(rfindley): is it worth preserving local package names for callee + // imports? Are they likely to be better or worse than the name we choose + // here? + base := pkgName + name := base + for n := 0; shadow[name] != 0 || shadowedInCaller(name) || newlyAdded(name) || name == "init"; n++ { + name = fmt.Sprintf("%s%d", base, n) + } + i.logf("adding import %s %q", name, pkgPath) + spec := &ast.ImportSpec{ + Path: &ast.BasicLit{ + Kind: token.STRING, + Value: strconv.Quote(pkgPath), + }, + } + // Use explicit pkgname (out of necessity) when it differs from the declared name, + // or (for good style) when it differs from base(pkgpath). + if name != pkgName || name != pathpkg.Base(pkgPath) { + spec.Name = makeIdent(name) + } + i.newImports = append(i.newImports, newImport{ + pkgName: name, + spec: spec, + }) + i.importMap[pkgPath] = append(i.importMap[pkgPath], name) + return name +} + +// trimNewImports removes imports that are no longer needed. +// +// The list of new imports as constructed by calls to [importState.localName] +// includes all of the packages referenced by the callee. +// But in the process of inlining, we may have dropped some of those references. +// For example, if the callee looked like this: +// +// func F(x int) (p.T) {... /* no mention of p */ ...} +// +// and we inlined by assignment: +// +// v := ... +// +// then the reference to package p drops away. +// +// Remove the excess imports by seeing which remain in new, the expression +// to be inlined. +// We can find those by looking at the free names in new. +// The list of free names cannot include spurious package names. +// Free-name tracking is precise except for the case of an identifier +// key in a composite literal, which names either a field or a value. +// Neither fields nor values are package names. +// Since they are not relevant to removing unused imports, we instruct +// freeishNames to omit composite-literal keys that are identifiers. +func trimNewImports(newImports []newImport, new ast.Node) []newImport { + free := map[string]bool{} + const omitComplitIdents = false + freeishNames(free, new, omitComplitIdents) + var res []newImport + for _, ni := range newImports { + if free[ni.pkgName] { + res = append(res, ni) + } + } + return res +} + type inlineCallResult struct { newImports []newImport // to add oldImports []oldImport // to remove @@ -586,144 +687,8 @@ func (st *state) inlineCall() (*inlineCallResult, error) { assign1 = func(v *types.Var) bool { return !updatedLocals[v] } } - // import map, initially populated with caller imports, and updated below - // with new imports necessary to reference free symbols in the callee. - // - // For simplicity we ignore existing dot imports, so that a qualified - // identifier (QI) in the callee is always represented by a QI in the caller, - // allowing us to treat a QI like a selection on a package name. - importMap := make(map[string][]string) // maps package path to local name(s) - var oldImports []oldImport // imports referenced only by caller.Call.Fun - - for _, imp := range caller.File.Imports { - if pkgName, ok := importedPkgName(caller.Info, imp); ok && - pkgName.Name() != "." && - pkgName.Name() != "_" { - - // If the import's sole use is in caller.Call.Fun of the form p.F(...), - // where p.F is a qualified identifier, the p import may not be - // necessary. - // - // Only the qualified identifier case matters, as other references to - // imported package names in the Call.Fun expression (e.g. - // x.after(3*time.Second).f() or time.Second.String()) will remain after - // inlining, as arguments. - // - // If that is the case, proactively check if any of the callee FreeObjs - // need this import. Doing so eagerly simplifies the resulting logic. - needed := true - sel, ok := ast.Unparen(caller.Call.Fun).(*ast.SelectorExpr) - if ok && soleUse(caller.Info, pkgName) == sel.X { - needed = false // no longer needed by caller - // Check to see if any of the inlined free objects need this package. - for _, obj := range callee.FreeObjs { - if obj.PkgPath == pkgName.Imported().Path() && obj.Shadow[pkgName.Name()] == 0 { - needed = true // needed by callee - break - } - } - } - - if needed { - path := pkgName.Imported().Path() - importMap[path] = append(importMap[path], pkgName.Name()) - } else { - oldImports = append(oldImports, oldImport{pkgName: pkgName, spec: imp}) - } - } - } - - // importName finds an existing import name to use in a particular shadowing - // context. It is used to determine the set of new imports in - // getOrMakeImportName, and is also used for writing out names in inlining - // strategies below. - importName := func(pkgPath string, shadow shadowMap) string { - for _, name := range importMap[pkgPath] { - // Check that either the import preexisted, or that it was newly added - // (no PkgName) but is not shadowed, either in the callee (shadows) or - // caller (caller.lookup). - if shadow[name] == 0 { - found := caller.lookup(name) - if is[*types.PkgName](found) || found == nil { - return name - } - } - } - return "" - } - - // keep track of new imports that are necessary to reference any free names - // in the callee. - var newImports []newImport - - // getOrMakeImportName returns the local name for a given imported package path, - // adding one if it doesn't exists. - getOrMakeImportName := func(pkgPath, pkgName string, shadow shadowMap) string { - // Does an import already exist that works in this shadowing context? - if name := importName(pkgPath, shadow); name != "" { - return name - } - - newlyAdded := func(name string) bool { - for _, new := range newImports { - if new.pkgName == name { - return true - } - } - return false - } - - // shadowedInCaller reports whether a candidate package name - // already refers to a declaration in the caller. - shadowedInCaller := func(name string) bool { - obj := caller.lookup(name) - if obj == nil { - return false - } - // If obj will be removed, the name is available. - for _, old := range oldImports { - if old.pkgName == obj { - return false - } - } - return true - } - - // import added by callee - // - // Choose local PkgName based on last segment of - // package path plus, if needed, a numeric suffix to - // ensure uniqueness. - // - // "init" is not a legal PkgName. - // - // TODO(rfindley): is it worth preserving local package names for callee - // imports? Are they likely to be better or worse than the name we choose - // here? - base := pkgName - name := base - for n := 0; shadow[name] != 0 || shadowedInCaller(name) || newlyAdded(name) || name == "init"; n++ { - name = fmt.Sprintf("%s%d", base, n) - } - logf("adding import %s %q", name, pkgPath) - spec := &ast.ImportSpec{ - Path: &ast.BasicLit{ - Kind: token.STRING, - Value: strconv.Quote(pkgPath), - }, - } - // Use explicit pkgname (out of necessity) when it differs from the declared name, - // or (for good style) when it differs from base(pkgpath). - if name != pkgName || name != pathpkg.Base(pkgPath) { - spec.Name = makeIdent(name) - } - newImports = append(newImports, newImport{ - pkgName: name, - spec: spec, - }) - importMap[pkgPath] = append(importMap[pkgPath], name) - return name - } + // Extract information about the caller's imports. + istate := newImportState(logf, caller, callee) // Compute the renaming of the callee's free identifiers. objRenames := make([]ast.Expr, len(callee.FreeObjs)) // nil => no change @@ -751,7 +716,7 @@ func (st *state) inlineCall() (*inlineCallResult, error) { var newName ast.Expr if obj.Kind == "pkgname" { // Use locally appropriate import, creating as needed. - n := getOrMakeImportName(obj.PkgPath, obj.PkgName, obj.Shadow) + n := istate.localName(obj.PkgPath, obj.PkgName, obj.Shadow) newName = makeIdent(n) // imported package } else if !obj.ValidPos { // Built-in function, type, or value (e.g. nil, zero): @@ -796,7 +761,7 @@ func (st *state) inlineCall() (*inlineCallResult, error) { // Form a qualified identifier, pkg.Name. if qualify { - pkgName := getOrMakeImportName(obj.PkgPath, obj.PkgName, obj.Shadow) + pkgName := istate.localName(obj.PkgPath, obj.PkgName, obj.Shadow) newName = &ast.SelectorExpr{ X: makeIdent(pkgName), Sel: makeIdent(obj.Name), @@ -807,8 +772,8 @@ func (st *state) inlineCall() (*inlineCallResult, error) { } res := &inlineCallResult{ - newImports: newImports, - oldImports: oldImports, + newImports: istate.newImports, + oldImports: istate.oldImports, } // Parse callee function declaration. @@ -929,9 +894,7 @@ func (st *state) inlineCall() (*inlineCallResult, error) { elts = append(elts, arg.expr) pure = pure && arg.pure effects = effects || arg.effects - for k, v := range arg.freevars { - freevars[k] = v - } + maps.Copy(freevars, arg.freevars) } args = append(ordinary, &argument{ expr: &ast.CompositeLit{ @@ -1157,7 +1120,7 @@ func (st *state) inlineCall() (*inlineCallResult, error) { (!needBindingDecl || (bindingDecl != nil && len(bindingDecl.names) == 0)) { // Reduces to: { var (bindings); lhs... := rhs... } - if newStmts, ok := st.assignStmts(stmt, results, importName); ok { + if newStmts, ok := st.assignStmts(stmt, results, istate.importName); ok { logf("strategy: reduce assign-context call to { return exprs }") clearPositions(calleeDecl.Body) @@ -2313,7 +2276,8 @@ func createBindingDecl(logf logger, caller *Caller, args []*argument, calleeDecl free[name] = true } } - freeishNames(free, spec.Type) + const includeComplitIdents = true + freeishNames(free, spec.Type, includeComplitIdents) for name := range free { if names[name] { logf("binding decl would shadow free name %q", name) @@ -2444,36 +2408,6 @@ func freeVars(info *types.Info, e ast.Expr) map[string]bool { return free } -// freeishNames computes an over-approximation to the free names -// of the type syntax t, inserting values into the map. -// -// Because we don't have go/types annotations, we can't give an exact -// result in all cases. In particular, an array type [n]T might have a -// size such as unsafe.Sizeof(func() int{stmts...}()) and now the -// precise answer depends upon all the statement syntax too. But that -// never happens in practice. -func freeishNames(free map[string]bool, t ast.Expr) { - var visit func(n ast.Node) bool - visit = func(n ast.Node) bool { - switch n := n.(type) { - case *ast.Ident: - free[n.Name] = true - - case *ast.SelectorExpr: - ast.Inspect(n.X, visit) - return false // don't visit .Sel - - case *ast.Field: - ast.Inspect(n.Type, visit) - // Don't visit .Names: - // FuncType parameters, interface methods, struct fields - return false - } - return true - } - ast.Inspect(t, visit) -} - // effects reports whether an expression might change the state of the // program (through function calls and channel receives) and affect // the evaluation of subsequent expressions. @@ -3016,13 +2950,13 @@ func replaceNode(root ast.Node, from, to ast.Node) { } case reflect.Struct: - for i := 0; i < v.Type().NumField(); i++ { + for i := range v.Type().NumField() { visit(v.Field(i)) } case reflect.Slice: compact := false - for i := 0; i < v.Len(); i++ { + for i := range v.Len() { visit(v.Index(i)) if v.Index(i).IsNil() { compact = true @@ -3033,7 +2967,7 @@ func replaceNode(root ast.Node, from, to ast.Node) { // (Do this is a second pass to avoid // unnecessary writes in the common case.) j := 0 - for i := 0; i < v.Len(); i++ { + for i := range v.Len() { if !v.Index(i).IsNil() { v.Index(j).Set(v.Index(i)) j++ @@ -3093,7 +3027,7 @@ func clearPositions(root ast.Node) { if n != nil { v := reflect.ValueOf(n).Elem() // deref the pointer to struct fields := v.Type().NumField() - for i := 0; i < fields; i++ { + for i := range fields { f := v.Field(i) // Clearing Pos arbitrarily is destructive, // as its presence may be semantically significant @@ -3416,12 +3350,14 @@ func (st *state) assignStmts(callerStmt *ast.AssignStmt, returnOperands []ast.Ex freeNames = make(map[string]bool) // free(ish) names among rhs expressions nonTrivial = make(map[int]bool) // indexes in rhs of nontrivial result conversions ) + const includeComplitIdents = true + for i, expr := range callerStmt.Rhs { if expr == caller.Call { assert(callIdx == -1, "malformed (duplicative) AST") callIdx = i for j, returnOperand := range returnOperands { - freeishNames(freeNames, returnOperand) + freeishNames(freeNames, returnOperand, includeComplitIdents) rhs = append(rhs, returnOperand) if resultInfo[j]&nonTrivialResult != 0 { nonTrivial[i+j] = true @@ -3434,7 +3370,7 @@ func (st *state) assignStmts(callerStmt *ast.AssignStmt, returnOperands []ast.Ex // We must clone before clearing positions, since e came from the caller. expr = internalastutil.CloneNode(expr) clearPositions(expr) - freeishNames(freeNames, expr) + freeishNames(freeNames, expr, includeComplitIdents) rhs = append(rhs, expr) } } diff --git a/internal/refactor/inline/inline_test.go b/internal/refactor/inline/inline_test.go index 3be37d5ecde..a3934b5cd68 100644 --- a/internal/refactor/inline/inline_test.go +++ b/internal/refactor/inline/inline_test.go @@ -29,10 +29,33 @@ import ( "golang.org/x/tools/internal/expect" "golang.org/x/tools/internal/refactor/inline" "golang.org/x/tools/internal/testenv" + "golang.org/x/tools/internal/testfiles" "golang.org/x/tools/txtar" ) // TestData executes test scenarios specified by files in testdata/*.txtar. +// Each txtar file describes two sets of files, some containing Go source +// and others expected results. +// +// The Go source files and go.mod are parsed and type-checked as a Go module. +// Some of these files contain marker comments (in a form described below) describing +// the inlinings to perform and whether they should succeed or fail. A marker +// indicating success refers to another file in the txtar, not a .go +// file, that should contain the contents of the first file after inlining. +// +// The marker format for success is +// +// @inline(re"pat", wantfile) +// +// The first call in the marker's line that matches pat is inlined, and the contents +// of the resulting file must match the contents of wantfile. +// +// The marker format for failure is +// +// @inline(re"pat", re"errpat") +// +// The first argument selects the call for inlining as before, and the second +// is a regular expression that must match the text of resulting error. func TestData(t *testing.T) { testenv.NeedsGoPackages(t) @@ -41,7 +64,6 @@ func TestData(t *testing.T) { t.Fatal(err) } for _, file := range files { - file := file t.Run(filepath.Base(file), func(t *testing.T) { t.Parallel() @@ -56,10 +78,11 @@ func TestData(t *testing.T) { if err != nil { t.Fatal(err) } - dir := t.TempDir() - if err := extractTxtar(ar, dir); err != nil { + fs, err := txtar.FS(ar) + if err != nil { t.Fatal(err) } + dir := testfiles.CopyToTmp(t, fs) // Load packages. cfg := &packages.Config{ @@ -118,8 +141,9 @@ func TestData(t *testing.T) { var want any switch x := note.Args[1].(type) { case string, expect.Identifier: + name := fmt.Sprint(x) for _, file := range ar.Files { - if file.Name == fmt.Sprint(x) { + if file.Name == name { want = file.Data break } @@ -1769,7 +1793,6 @@ func TestRedundantConversions(t *testing.T) { func runTests(t *testing.T, tests []testcase) { for _, test := range tests { - test := test t.Run(test.descr, func(t *testing.T) { fset := token.NewFileSet() mustParse := func(filename string, content any) *ast.File { @@ -1860,7 +1883,7 @@ func runTests(t *testing.T, tests []testcase) { res, err := doIt() // Want error? - if rest := strings.TrimPrefix(test.want, "error: "); rest != test.want { + if rest, ok := strings.CutPrefix(test.want, "error: "); ok { if err == nil { t.Fatalf("unexpected success: want error matching %q", rest) } @@ -1941,20 +1964,6 @@ func checkTranscode(callee *inline.Callee) error { return nil } -// TODO(adonovan): publish this a helper (#61386). -func extractTxtar(ar *txtar.Archive, dir string) error { - for _, file := range ar.Files { - name := filepath.Join(dir, file.Name) - if err := os.MkdirAll(filepath.Dir(name), 0777); err != nil { - return err - } - if err := os.WriteFile(name, file.Data, 0666); err != nil { - return err - } - } - return nil -} - // deepHash computes a cryptographic hash of an ast.Node so that // if the data structure is mutated, the hash changes. // It assumes Go variables do not change address. diff --git a/internal/refactor/inline/testdata/assignment.txtar b/internal/refactor/inline/testdata/assignment.txtar index c79c1732934..e201d601480 100644 --- a/internal/refactor/inline/testdata/assignment.txtar +++ b/internal/refactor/inline/testdata/assignment.txtar @@ -103,9 +103,7 @@ func _() { -- b2 -- package a -import ( - "testdata/b" -) +import "testdata/b" func _() { var y int diff --git a/internal/refactor/inline/testdata/import-shadow-1.txtar b/internal/refactor/inline/testdata/import-shadow-1.txtar new file mode 100644 index 00000000000..dc960ac3213 --- /dev/null +++ b/internal/refactor/inline/testdata/import-shadow-1.txtar @@ -0,0 +1,48 @@ +This file is identical to import-shadow.txtar except +that the imports in a/a.go are not grouped. +That is unusual, since goimports and related tools +form groups. + +The result of inlining (bresult) also looks strange, +but again, goimports would fix it up. + +-- go.mod -- +module testdata +go 1.12 + +-- a/a.go -- +package a + +import "testdata/b" +import "log" + +func A() { + const log = "shadow" + b.B() //@ inline(re"B", bresult) +} + +var _ log.Logger + +-- b/b.go -- +package b + +import "log" + +func B() { + log.Printf("") +} + +-- bresult -- +package a + +import ( + log0 "log" +) +import "log" + +func A() { + const log = "shadow" + log0.Printf("") //@ inline(re"B", bresult) +} + +var _ log.Logger diff --git a/internal/refactor/inline/testdata/import-shadow-2.txtar b/internal/refactor/inline/testdata/import-shadow-2.txtar new file mode 100644 index 00000000000..14cd045c6c3 --- /dev/null +++ b/internal/refactor/inline/testdata/import-shadow-2.txtar @@ -0,0 +1,75 @@ +See import-shadow.txtar for a description. + +-- go.mod -- +module testdata +go 1.12 + +-- a/a.go -- +package a + +import "testdata/b" + +var x b.T + +func A(b int) { + x.F() //@ inline(re"F", fresult) +} + +-- b/b.go -- +package b + +type T struct{} + +func (T) F() { + One() + Two() +} + +func One() {} +func Two() {} + +-- fresult -- +package a + +import ( + "testdata/b" + b0 "testdata/b" +) + +var x b.T + +func A(b int) { + b0.One() + b0.Two() //@ inline(re"F", fresult) +} + +-- d/d.go -- +package d + +import "testdata/e" + +func D() { + const log = "shadow" + e.E() //@ inline(re"E", eresult) +} + +-- e/e.go -- +package e + +import "log" + +func E() { + log.Printf("") +} + +-- eresult -- +package d + +import ( + log0 "log" +) + +func D() { + const log = "shadow" + log0.Printf("") //@ inline(re"E", eresult) +} diff --git a/internal/refactor/inline/testdata/import-shadow.txtar b/internal/refactor/inline/testdata/import-shadow.txtar index 9d1abdb9e95..c4ea9a61624 100644 --- a/internal/refactor/inline/testdata/import-shadow.txtar +++ b/internal/refactor/inline/testdata/import-shadow.txtar @@ -2,10 +2,10 @@ Just because a package (e.g. log) is imported by the caller, and the name log is in scope, doesn't mean the name in scope refers to the package: it could be locally shadowed. -In all three scenarios below, renaming import with a fresh name is -added because the usual name is locally shadowed: in cases 1, 2 an -existing import is shadowed by (respectively) a local constant, -parameter; in case 3 there is no existing import. +In all three scenarios in this file and import-shadow-2.txtar, a renaming +import with a fresh name is added because the usual name is locally +shadowed: in cases 1, 2 an existing import is shadowed by (respectively) +a local constant, parameter; in case 3 there is no existing import. -- go.mod -- module testdata @@ -14,8 +14,10 @@ go 1.12 -- a/a.go -- package a -import "testdata/b" -import "log" +import ( + "testdata/b" + "log" +) func A() { const log = "shadow" @@ -47,77 +49,3 @@ func A() { } var _ log.Logger - --- go.mod -- -module testdata -go 1.12 - --- a/a.go -- -package a - -import "testdata/b" - -var x b.T - -func A(b int) { - x.F() //@ inline(re"F", fresult) -} - --- b/b.go -- -package b - -type T struct{} - -func (T) F() { - One() - Two() -} - -func One() {} -func Two() {} - --- fresult -- -package a - -import ( - "testdata/b" - b0 "testdata/b" -) - -var x b.T - -func A(b int) { - b0.One() - b0.Two() //@ inline(re"F", fresult) -} - --- d/d.go -- -package d - -import "testdata/e" - -func D() { - const log = "shadow" - e.E() //@ inline(re"E", eresult) -} - --- e/e.go -- -package e - -import "log" - -func E() { - log.Printf("") -} - --- eresult -- -package d - -import ( - log0 "log" -) - -func D() { - const log = "shadow" - log0.Printf("") //@ inline(re"E", eresult) -} diff --git a/internal/stdlib/deps.go b/internal/stdlib/deps.go index 7cca431cd65..c50bf406b7f 100644 --- a/internal/stdlib/deps.go +++ b/internal/stdlib/deps.go @@ -12,348 +12,348 @@ type pkginfo struct { } var deps = [...]pkginfo{ - {"archive/tar", "\x03k\x03E5\x01\v\x01#\x01\x01\x02\x05\t\x02\x01\x02\x02\v"}, - {"archive/zip", "\x02\x04a\a\x16\x0205\x01+\x05\x01\x10\x03\x02\r\x04"}, - {"bufio", "\x03k}E\x13"}, - {"bytes", "n+R\x03\fG\x02\x02"}, + {"archive/tar", "\x03j\x03E6\x01\v\x01\"\x01\x01\x02\x05\n\x02\x01\x02\x02\v"}, + {"archive/zip", "\x02\x04`\a\x16\x0206\x01*\x05\x01\x11\x03\x02\r\x04"}, + {"bufio", "\x03j~E\x13"}, + {"bytes", "m+S\x03\fG\x02\x02"}, {"cmp", ""}, {"compress/bzip2", "\x02\x02\xe7\x01B"}, - {"compress/flate", "\x02l\x03z\r\x024\x01\x03"}, - {"compress/gzip", "\x02\x04a\a\x03\x15eT"}, - {"compress/lzw", "\x02l\x03z"}, - {"compress/zlib", "\x02\x04a\a\x03\x13\x01f"}, + {"compress/flate", "\x02k\x03{\r\x024\x01\x03"}, + {"compress/gzip", "\x02\x04`\a\x03\x15fT"}, + {"compress/lzw", "\x02k\x03{"}, + {"compress/zlib", "\x02\x04`\a\x03\x13\x01g"}, {"container/heap", "\xae\x02"}, {"container/list", ""}, {"container/ring", ""}, - {"context", "n\\h\x01\f"}, - {"crypto", "\x84\x01gD"}, + {"context", "m\\i\x01\f"}, + {"crypto", "\x83\x01hD"}, {"crypto/aes", "\x10\n\a\x8e\x02"}, - {"crypto/cipher", "\x03\x1e\x01\x01\x1d\x11\x1d,Q"}, - {"crypto/des", "\x10\x13\x1d.,\x95\x01\x03"}, - {"crypto/dsa", "@\x04*}\x0e"}, - {"crypto/ecdh", "\x03\v\f\x0e\x04\x14\x04\r\x1d}"}, - {"crypto/ecdsa", "\x0e\x05\x03\x04\x01\x0e\x16\x01\x04\f\x01\x1d}\x0e\x04K\x01"}, - {"crypto/ed25519", "\x0e\x1c\x16\n\a\x1d}D"}, - {"crypto/elliptic", "0>}\x0e9"}, - {"crypto/fips140", " \x05\x91\x01"}, - {"crypto/hkdf", "-\x12\x01.\x16"}, - {"crypto/hmac", "\x1a\x14\x11\x01\x113"}, - {"crypto/internal/boring", "\x0e\x02\rg"}, + {"crypto/cipher", "\x03\x1e\x01\x01\x1d\x11\x1c,R"}, + {"crypto/des", "\x10\x13\x1d-,\x96\x01\x03"}, + {"crypto/dsa", "@\x04)~\x0e"}, + {"crypto/ecdh", "\x03\v\f\x0e\x04\x14\x04\r\x1c~"}, + {"crypto/ecdsa", "\x0e\x05\x03\x04\x01\x0e\x16\x01\x04\f\x01\x1c~\x0e\x04K\x01"}, + {"crypto/ed25519", "\x0e\x1c\x16\n\a\x1c~D"}, + {"crypto/elliptic", "0=~\x0e9"}, + {"crypto/fips140", " \x05\x90\x01"}, + {"crypto/hkdf", "-\x12\x01-\x16"}, + {"crypto/hmac", "\x1a\x14\x11\x01\x112"}, + {"crypto/internal/boring", "\x0e\x02\rf"}, {"crypto/internal/boring/bbig", "\x1a\xdf\x01L"}, {"crypto/internal/boring/bcache", "\xb3\x02\x12"}, {"crypto/internal/boring/sig", ""}, - {"crypto/internal/cryptotest", "\x03\r\n)\x0e\x1a\x06\x13\x12#\a\t\x11\x11\x11\x1b\x01\f\f\x05\n"}, + {"crypto/internal/cryptotest", "\x03\r\n)\x0e\x19\x06\x13\x12#\a\t\x11\x12\x11\x1a\r\r\x05\n"}, {"crypto/internal/entropy", "E"}, - {"crypto/internal/fips140", ">0}9\f\x15"}, - {"crypto/internal/fips140/aes", "\x03\x1d\x03\x02\x13\x04\x01\x01\x05+\x8c\x015"}, - {"crypto/internal/fips140/aes/gcm", " \x01\x02\x02\x02\x11\x04\x01\x06+\x8a\x01"}, + {"crypto/internal/fips140", ">/~8\r\x15"}, + {"crypto/internal/fips140/aes", "\x03\x1d\x03\x02\x13\x04\x01\x01\x05*\x8d\x015"}, + {"crypto/internal/fips140/aes/gcm", " \x01\x02\x02\x02\x11\x04\x01\x06*\x8b\x01"}, {"crypto/internal/fips140/alias", "\xc5\x02"}, - {"crypto/internal/fips140/bigmod", "%\x17\x01\x06+\x8c\x01"}, + {"crypto/internal/fips140/bigmod", "%\x17\x01\x06*\x8d\x01"}, {"crypto/internal/fips140/check", " \x0e\x06\b\x02\xad\x01Z"}, - {"crypto/internal/fips140/check/checktest", "%\xff\x01!"}, - {"crypto/internal/fips140/drbg", "\x03\x1c\x01\x01\x04\x13\x04\b\x01)}\x0f8"}, - {"crypto/internal/fips140/ecdh", "\x03\x1d\x05\x02\t\f2}\x0f8"}, - {"crypto/internal/fips140/ecdsa", "\x03\x1d\x04\x01\x02\a\x02\x068}G"}, - {"crypto/internal/fips140/ed25519", "\x03\x1d\x05\x02\x04\v8\xc1\x01\x03"}, - {"crypto/internal/fips140/edwards25519", "%\a\f\x042\x8c\x018"}, - {"crypto/internal/fips140/edwards25519/field", "%\x13\x042\x8c\x01"}, - {"crypto/internal/fips140/hkdf", "\x03\x1d\x05\t\x06:"}, - {"crypto/internal/fips140/hmac", "\x03\x1d\x14\x01\x018"}, - {"crypto/internal/fips140/mlkem", "\x03\x1d\x05\x02\x0e\x03\x042"}, - {"crypto/internal/fips140/nistec", "%\f\a\x042\x8c\x01*\x0e\x13"}, - {"crypto/internal/fips140/nistec/fiat", "%\x136\x8c\x01"}, - {"crypto/internal/fips140/pbkdf2", "\x03\x1d\x05\t\x06:"}, - {"crypto/internal/fips140/rsa", "\x03\x1d\x04\x01\x02\r\x01\x01\x026}G"}, - {"crypto/internal/fips140/sha256", "\x03\x1d\x1c\x01\x06+\x8c\x01"}, - {"crypto/internal/fips140/sha3", "\x03\x1d\x18\x04\x011\x8c\x01K"}, - {"crypto/internal/fips140/sha512", "\x03\x1d\x1c\x01\x06+\x8c\x01"}, + {"crypto/internal/fips140/check/checktest", "%\xfe\x01\""}, + {"crypto/internal/fips140/drbg", "\x03\x1c\x01\x01\x04\x13\x04\b\x01(~\x0f8"}, + {"crypto/internal/fips140/ecdh", "\x03\x1d\x05\x02\t\f1~\x0f8"}, + {"crypto/internal/fips140/ecdsa", "\x03\x1d\x04\x01\x02\a\x02\x067~G"}, + {"crypto/internal/fips140/ed25519", "\x03\x1d\x05\x02\x04\v7\xc2\x01\x03"}, + {"crypto/internal/fips140/edwards25519", "%\a\f\x041\x8d\x018"}, + {"crypto/internal/fips140/edwards25519/field", "%\x13\x041\x8d\x01"}, + {"crypto/internal/fips140/hkdf", "\x03\x1d\x05\t\x069"}, + {"crypto/internal/fips140/hmac", "\x03\x1d\x14\x01\x017"}, + {"crypto/internal/fips140/mlkem", "\x03\x1d\x05\x02\x0e\x03\x041"}, + {"crypto/internal/fips140/nistec", "%\f\a\x041\x8d\x01)\x0f\x13"}, + {"crypto/internal/fips140/nistec/fiat", "%\x135\x8d\x01"}, + {"crypto/internal/fips140/pbkdf2", "\x03\x1d\x05\t\x069"}, + {"crypto/internal/fips140/rsa", "\x03\x1d\x04\x01\x02\r\x01\x01\x025~G"}, + {"crypto/internal/fips140/sha256", "\x03\x1d\x1c\x01\x06*\x8d\x01"}, + {"crypto/internal/fips140/sha3", "\x03\x1d\x18\x04\x010\x8d\x01K"}, + {"crypto/internal/fips140/sha512", "\x03\x1d\x1c\x01\x06*\x8d\x01"}, {"crypto/internal/fips140/ssh", " \x05"}, - {"crypto/internal/fips140/subtle", "#\x19\xbe\x01"}, - {"crypto/internal/fips140/tls12", "\x03\x1d\x05\t\x06\x028"}, - {"crypto/internal/fips140/tls13", "\x03\x1d\x05\b\a\b2"}, + {"crypto/internal/fips140/subtle", "#"}, + {"crypto/internal/fips140/tls12", "\x03\x1d\x05\t\x06\x027"}, + {"crypto/internal/fips140/tls13", "\x03\x1d\x05\b\a\b1"}, {"crypto/internal/fips140deps", ""}, - {"crypto/internal/fips140deps/byteorder", "\x9a\x01"}, - {"crypto/internal/fips140deps/cpu", "\xae\x01\a"}, - {"crypto/internal/fips140deps/godebug", "\xb6\x01"}, - {"crypto/internal/fips140hash", "5\x1a5\xc1\x01"}, - {"crypto/internal/fips140only", "'\r\x01\x01N25"}, + {"crypto/internal/fips140deps/byteorder", "\x99\x01"}, + {"crypto/internal/fips140deps/cpu", "\xad\x01\a"}, + {"crypto/internal/fips140deps/godebug", "\xb5\x01"}, + {"crypto/internal/fips140hash", "5\x1a4\xc2\x01"}, + {"crypto/internal/fips140only", "'\r\x01\x01M26"}, {"crypto/internal/fips140test", ""}, - {"crypto/internal/hpke", "\x0e\x01\x01\x03\x1a\x1d$,`M"}, + {"crypto/internal/hpke", "\x0e\x01\x01\x03\x1a\x1d#,aM"}, {"crypto/internal/impl", "\xb0\x02"}, {"crypto/internal/randutil", "\xeb\x01\x12"}, - {"crypto/internal/sysrand", "\xd7\x01@\x1b\x01\f\x06"}, - {"crypto/internal/sysrand/internal/seccomp", "n"}, - {"crypto/md5", "\x0e2.\x16\x16`"}, + {"crypto/internal/sysrand", "mi\"\x1e\r\x0f\x01\x01\v\x06"}, + {"crypto/internal/sysrand/internal/seccomp", "m"}, + {"crypto/md5", "\x0e2-\x16\x16a"}, {"crypto/mlkem", "/"}, - {"crypto/pbkdf2", "2\r\x01.\x16"}, - {"crypto/rand", "\x1a\x06\a\x19\x04\x01)}\x0eL"}, - {"crypto/rc4", "#\x1d.\xc1\x01"}, - {"crypto/rsa", "\x0e\f\x01\t\x0f\f\x01\x04\x06\a\x1d\x03\x1325\r\x01"}, - {"crypto/sha1", "\x0e\f&.\x16\x16\x14L"}, - {"crypto/sha256", "\x0e\f\x1aP"}, - {"crypto/sha3", "\x0e'O\xc1\x01"}, - {"crypto/sha512", "\x0e\f\x1cN"}, - {"crypto/subtle", "8\x98\x01T"}, - {"crypto/tls", "\x03\b\x02\x01\x01\x01\x01\x02\x01\x01\x01\x03\x01\a\x01\v\x02\n\x01\b\x05\x03\x01\x01\x01\x01\x02\x01\x02\x01\x18\x02\x03\x13\x16\x14\b5\x16\x16\r\t\x01\x01\x01\x02\x01\f\x06\x02\x01"}, + {"crypto/pbkdf2", "2\r\x01-\x16"}, + {"crypto/rand", "\x1a\x06\a\x19\x04\x01(~\x0eL"}, + {"crypto/rc4", "#\x1d-\xc2\x01"}, + {"crypto/rsa", "\x0e\f\x01\t\x0f\f\x01\x04\x06\a\x1c\x03\x1326\r\x01"}, + {"crypto/sha1", "\x0e\f&-\x16\x16\x14M"}, + {"crypto/sha256", "\x0e\f\x1aO"}, + {"crypto/sha3", "\x0e'N\xc2\x01"}, + {"crypto/sha512", "\x0e\f\x1cM"}, + {"crypto/subtle", "8\x96\x01U"}, + {"crypto/tls", "\x03\b\x02\x01\x01\x01\x01\x02\x01\x01\x01\x03\x01\a\x01\v\x02\n\x01\b\x05\x03\x01\x01\x01\x01\x02\x01\x02\x01\x17\x02\x03\x13\x16\x14\b6\x16\x15\r\n\x01\x01\x01\x02\x01\f\x06\x02\x01"}, {"crypto/tls/internal/fips140tls", " \x93\x02"}, - {"crypto/x509", "\x03\v\x01\x01\x01\x01\x01\x01\x01\x011\x03\x02\x01\x01\x02\x05\x01\x0e\x06\x02\x02\x03E5\x03\t\x01\x01\x01\a\x10\x05\t\x05\v\x01\x02\r\x02\x01\x01\x02\x03\x01"}, - {"crypto/x509/internal/macos", "\x03k'\x8f\x01\v\x10\x06"}, - {"crypto/x509/pkix", "d\x06\a\x88\x01F"}, - {"database/sql", "\x03\nK\x16\x03z\f\x06\"\x05\t\x02\x03\x01\f\x02\x02\x02"}, - {"database/sql/driver", "\ra\x03\xae\x01\x10\x10"}, - {"debug/buildinfo", "\x03X\x02\x01\x01\b\a\x03`\x18\x02\x01+\x10\x1e"}, - {"debug/dwarf", "\x03d\a\x03z1\x12\x01\x01"}, - {"debug/elf", "\x03\x06Q\r\a\x03`\x19\x01,\x18\x01\x15"}, - {"debug/gosym", "\x03d\n\xbd\x01\x01\x01\x02"}, - {"debug/macho", "\x03\x06Q\r\n`\x1a,\x18\x01"}, - {"debug/pe", "\x03\x06Q\r\a\x03`\x1a,\x18\x01\x15"}, - {"debug/plan9obj", "g\a\x03`\x1a,"}, - {"embed", "n+:\x18\x01S"}, + {"crypto/x509", "\x03\v\x01\x01\x01\x01\x01\x01\x01\x011\x03\x02\x01\x01\x02\x05\x0e\x06\x02\x02\x03E\x033\x01\x02\t\x01\x01\x01\a\x0f\x05\x01\x06\x02\x05\f\x01\x02\r\x02\x01\x01\x02\x03\x01"}, + {"crypto/x509/pkix", "c\x06\a\x89\x01F"}, + {"database/sql", "\x03\nJ\x16\x03{\f\x06!\x05\n\x02\x03\x01\f\x02\x02\x02"}, + {"database/sql/driver", "\r`\x03\xae\x01\x11\x10"}, + {"debug/buildinfo", "\x03W\x02\x01\x01\b\a\x03`\x19\x02\x01*\x0f "}, + {"debug/dwarf", "\x03c\a\x03{0\x13\x01\x01"}, + {"debug/elf", "\x03\x06P\r\a\x03`\x1a\x01+\x19\x01\x15"}, + {"debug/gosym", "\x03c\n\xbe\x01\x01\x01\x02"}, + {"debug/macho", "\x03\x06P\r\n`\x1b+\x19\x01"}, + {"debug/pe", "\x03\x06P\r\a\x03`\x1b+\x19\x01\x15"}, + {"debug/plan9obj", "f\a\x03`\x1b+"}, + {"embed", "m+:\x19\x01S"}, {"embed/internal/embedtest", ""}, {"encoding", ""}, {"encoding/ascii85", "\xeb\x01D"}, - {"encoding/asn1", "\x03k\x03\x87\x01\x01&\x0e\x02\x01\x0f\x03\x01"}, + {"encoding/asn1", "\x03j\x03\x88\x01\x01%\x0f\x02\x01\x0f\x03\x01"}, {"encoding/base32", "\xeb\x01B\x02"}, - {"encoding/base64", "\x9a\x01QB\x02"}, - {"encoding/binary", "n}\r'\x0e\x05"}, - {"encoding/csv", "\x02\x01k\x03zE\x11\x02"}, - {"encoding/gob", "\x02`\x05\a\x03`\x1a\f\x01\x02\x1d\b\x13\x01\x0e\x02"}, - {"encoding/hex", "n\x03zB\x03"}, - {"encoding/json", "\x03\x01^\x04\b\x03z\r'\x0e\x02\x01\x02\x0f\x01\x01\x02"}, - {"encoding/pem", "\x03c\b}B\x03"}, - {"encoding/xml", "\x02\x01_\f\x03z4\x05\v\x01\x02\x0f\x02"}, - {"errors", "\xca\x01{"}, - {"expvar", "kK9\t\n\x15\r\t\x02\x03\x01\x10"}, - {"flag", "b\f\x03z,\b\x05\t\x02\x01\x0f"}, - {"fmt", "nE8\r\x1f\b\x0e\x02\x03\x11"}, - {"go/ast", "\x03\x01m\x0f\x01j\x03)\b\x0e\x02\x01"}, + {"encoding/base64", "f\x85\x01B\x02"}, + {"encoding/binary", "m~\r&\x0f\x05"}, + {"encoding/csv", "\x02\x01j\x03{E\x11\x02"}, + {"encoding/gob", "\x02_\x05\a\x03`\x1b\f\x01\x02\x1c\b\x14\x01\x0e\x02"}, + {"encoding/hex", "m\x03{B\x03"}, + {"encoding/json", "\x03\x01]\x04\b\x03{\r&\x0f\x02\x01\x02\x0f\x01\x01\x02"}, + {"encoding/pem", "\x03b\b~B\x03"}, + {"encoding/xml", "\x02\x01^\f\x03{3\x05\f\x01\x02\x0f\x02"}, + {"errors", "\xc9\x01|"}, + {"expvar", "jK:\t\n\x14\r\n\x02\x03\x01\x10"}, + {"flag", "a\f\x03{+\b\x05\n\x02\x01\x0f"}, + {"fmt", "mE9\r\x1e\b\x0f\x02\x03\x11"}, + {"go/ast", "\x03\x01l\x0f\x01k\x03(\b\x0f\x02\x01"}, {"go/ast/internal/tests", ""}, - {"go/build", "\x02\x01k\x03\x01\x03\x02\a\x02\x01\x17\x1e\x04\x02\t\x14\x12\x01+\x01\x04\x01\a\t\x02\x01\x11\x02\x02"}, - {"go/build/constraint", "n\xc1\x01\x01\x11\x02"}, - {"go/constant", "q\x10w\x01\x015\x01\x02\x11"}, - {"go/doc", "\x04m\x01\x06\t=-1\x11\x02\x01\x11\x02"}, - {"go/doc/comment", "\x03n\xbc\x01\x01\x01\x01\x11\x02"}, - {"go/format", "\x03n\x01\f\x01\x02jE"}, - {"go/importer", "t\a\x01\x01\x04\x01i9"}, - {"go/internal/gccgoimporter", "\x02\x01X\x13\x03\x05\v\x01g\x02,\x01\x05\x12\x01\v\b"}, - {"go/internal/gcimporter", "\x02o\x10\x01/\x05\x0e',\x16\x03\x02"}, - {"go/internal/srcimporter", "q\x01\x02\n\x03\x01i,\x01\x05\x13\x02\x13"}, - {"go/parser", "\x03k\x03\x01\x03\v\x01j\x01+\x06\x13"}, - {"go/printer", "q\x01\x03\x03\tj\r\x1f\x16\x02\x01\x02\n\x05\x02"}, - {"go/scanner", "\x03n\x10j2\x11\x01\x12\x02"}, - {"go/token", "\x04m\xbc\x01\x02\x03\x01\x0e\x02"}, - {"go/types", "\x03\x01\x06d\x03\x01\x04\b\x03\x02\x15\x1e\x06+\x04\x03\n%\a\t\x01\x01\x01\x02\x01\x0e\x02\x02"}, - {"go/version", "\xbb\x01u"}, + {"go/build", "\x02\x01j\x03\x01\x03\x02\a\x02\x01\x17\x1e\x04\x02\t\x14\x13\x01*\x01\x04\x01\a\n\x02\x01\x11\x02\x02"}, + {"go/build/constraint", "m\xc2\x01\x01\x11\x02"}, + {"go/constant", "p\x10x\x01\x015\x01\x02\x11"}, + {"go/doc", "\x04l\x01\x06\t=.0\x12\x02\x01\x11\x02"}, + {"go/doc/comment", "\x03m\xbd\x01\x01\x01\x01\x11\x02"}, + {"go/format", "\x03m\x01\f\x01\x02kE"}, + {"go/importer", "s\a\x01\x01\x04\x01j8"}, + {"go/internal/gccgoimporter", "\x02\x01W\x13\x03\x05\v\x01h\x02+\x01\x05\x13\x01\v\b"}, + {"go/internal/gcimporter", "\x02n\x10\x01/\x05\x0e(+\x17\x03\x02"}, + {"go/internal/srcimporter", "p\x01\x02\n\x03\x01j+\x01\x05\x14\x02\x13"}, + {"go/parser", "\x03j\x03\x01\x03\v\x01k\x01*\x06\x14"}, + {"go/printer", "p\x01\x03\x03\tk\r\x1e\x17\x02\x01\x02\n\x05\x02"}, + {"go/scanner", "\x03m\x10k1\x12\x01\x12\x02"}, + {"go/token", "\x04l\xbd\x01\x02\x03\x01\x0e\x02"}, + {"go/types", "\x03\x01\x06c\x03\x01\x04\b\x03\x02\x15\x1e\x06,\x04\x03\n$\a\n\x01\x01\x01\x02\x01\x0e\x02\x02"}, + {"go/version", "\xba\x01v"}, {"hash", "\xeb\x01"}, - {"hash/adler32", "n\x16\x16"}, - {"hash/crc32", "n\x16\x16\x14\x84\x01\x01"}, - {"hash/crc64", "n\x16\x16\x98\x01"}, - {"hash/fnv", "n\x16\x16`"}, - {"hash/maphash", "\x95\x01\x05\x1b\x03@M"}, + {"hash/adler32", "m\x16\x16"}, + {"hash/crc32", "m\x16\x16\x14\x85\x01\x01\x12"}, + {"hash/crc64", "m\x16\x16\x99\x01"}, + {"hash/fnv", "m\x16\x16a"}, + {"hash/maphash", "\x94\x01\x05\x1b\x03AM"}, {"html", "\xb0\x02\x02\x11"}, - {"html/template", "\x03h\x06\x19,5\x01\v \x05\x01\x02\x03\r\x01\x02\v\x01\x03\x02"}, - {"image", "\x02l\x1f^\x0f5\x03\x01"}, + {"html/template", "\x03g\x06\x19,6\x01\v\x1f\x05\x01\x02\x03\x0e\x01\x02\v\x01\x03\x02"}, + {"image", "\x02k\x1f_\x0f5\x03\x01"}, {"image/color", ""}, - {"image/color/palette", "\x8d\x01"}, - {"image/draw", "\x8c\x01\x01\x04"}, - {"image/gif", "\x02\x01\x05f\x03\x1b\x01\x01\x01\vQ"}, - {"image/internal/imageutil", "\x8c\x01"}, - {"image/jpeg", "\x02l\x1e\x01\x04Z"}, - {"image/png", "\x02\a^\n\x13\x02\x06\x01^D"}, - {"index/suffixarray", "\x03d\a}\r*\v\x01"}, - {"internal/abi", "\xb5\x01\x90\x01"}, + {"image/color/palette", "\x8c\x01"}, + {"image/draw", "\x8b\x01\x01\x04"}, + {"image/gif", "\x02\x01\x05e\x03\x1b\x01\x01\x01\vR"}, + {"image/internal/imageutil", "\x8b\x01"}, + {"image/jpeg", "\x02k\x1e\x01\x04["}, + {"image/png", "\x02\a]\n\x13\x02\x06\x01_D"}, + {"index/suffixarray", "\x03c\a~\r)\f\x01"}, + {"internal/abi", "\xb4\x01\x91\x01"}, {"internal/asan", "\xc5\x02"}, - {"internal/bisect", "\xa4\x02\x0e\x01"}, - {"internal/buildcfg", "qG_\x06\x02\x05\v\x01"}, - {"internal/bytealg", "\xae\x01\x97\x01"}, + {"internal/bisect", "\xa3\x02\x0f\x01"}, + {"internal/buildcfg", "pG_\x06\x02\x05\f\x01"}, + {"internal/bytealg", "\xad\x01\x98\x01"}, {"internal/byteorder", ""}, {"internal/cfg", ""}, - {"internal/chacha8rand", "\x9a\x01\x1b\x90\x01"}, + {"internal/chacha8rand", "\x99\x01\x1b\x91\x01"}, {"internal/copyright", ""}, {"internal/coverage", ""}, {"internal/coverage/calloc", ""}, - {"internal/coverage/cfile", "k\x06\x17\x16\x01\x02\x01\x01\x01\x01\x01\x01\x01$\x01\x1e,\x06\a\v\x01\x03\f\x06"}, - {"internal/coverage/cformat", "\x04m-\x04I\f6\x01\x02\f"}, - {"internal/coverage/cmerge", "q-Z"}, - {"internal/coverage/decodecounter", "g\n-\v\x02@,\x18\x16"}, - {"internal/coverage/decodemeta", "\x02e\n\x17\x16\v\x02@,"}, - {"internal/coverage/encodecounter", "\x02e\n-\f\x01\x02>\f \x16"}, - {"internal/coverage/encodemeta", "\x02\x01d\n\x13\x04\x16\r\x02>,."}, - {"internal/coverage/pods", "\x04m-y\x06\x05\v\x02\x01"}, + {"internal/coverage/cfile", "j\x06\x17\x16\x01\x02\x01\x01\x01\x01\x01\x01\x01#\x01 +\x06\a\f\x01\x03\f\x06"}, + {"internal/coverage/cformat", "\x04l-\x04J\f6\x01\x02\f"}, + {"internal/coverage/cmerge", "p-["}, + {"internal/coverage/decodecounter", "f\n-\v\x02A+\x19\x16"}, + {"internal/coverage/decodemeta", "\x02d\n\x17\x16\v\x02A+"}, + {"internal/coverage/encodecounter", "\x02d\n-\f\x01\x02?\f\x1f\x17"}, + {"internal/coverage/encodemeta", "\x02\x01c\n\x13\x04\x16\r\x02?+/"}, + {"internal/coverage/pods", "\x04l-y\x06\x05\f\x02\x01"}, {"internal/coverage/rtcov", "\xc5\x02"}, - {"internal/coverage/slicereader", "g\nzZ"}, - {"internal/coverage/slicewriter", "qz"}, - {"internal/coverage/stringtab", "q8\x04>"}, + {"internal/coverage/slicereader", "f\n{Z"}, + {"internal/coverage/slicewriter", "p{"}, + {"internal/coverage/stringtab", "p8\x04?"}, {"internal/coverage/test", ""}, {"internal/coverage/uleb128", ""}, {"internal/cpu", "\xc5\x02"}, - {"internal/dag", "\x04m\xbc\x01\x03"}, - {"internal/diff", "\x03n\xbd\x01\x02"}, - {"internal/exportdata", "\x02\x01k\x03\x03]\x1a,\x01\x05\x12\x01\x02"}, - {"internal/filepathlite", "n+:\x19A"}, - {"internal/fmtsort", "\x04\x9b\x02\x0e"}, - {"internal/fuzz", "\x03\nA\x19\x04\x03\x03\x01\f\x0355\r\x02\x1d\x01\x05\x02\x05\v\x01\x02\x01\x01\v\x04\x02"}, + {"internal/dag", "\x04l\xbd\x01\x03"}, + {"internal/diff", "\x03m\xbe\x01\x02"}, + {"internal/exportdata", "\x02\x01j\x03\x03]\x1b+\x01\x05\x13\x01\x02"}, + {"internal/filepathlite", "m+:\x1aA"}, + {"internal/fmtsort", "\x04\x9a\x02\x0f"}, + {"internal/fuzz", "\x03\nA\x18\x04\x03\x03\x01\f\x0356\r\x02\x1c\x01\x05\x02\x05\f\x01\x02\x01\x01\v\x04\x02"}, {"internal/goarch", ""}, - {"internal/godebug", "\x97\x01 {\x01\x12"}, + {"internal/godebug", "\x96\x01 |\x01\x12"}, {"internal/godebugs", ""}, {"internal/goexperiment", ""}, {"internal/goos", ""}, - {"internal/goroot", "\x97\x02\x01\x05\x13\x02"}, + {"internal/goroot", "\x96\x02\x01\x05\x14\x02"}, {"internal/gover", "\x04"}, {"internal/goversion", ""}, {"internal/itoa", ""}, - {"internal/lazyregexp", "\x97\x02\v\x0e\x02"}, - {"internal/lazytemplate", "\xeb\x01,\x19\x02\v"}, + {"internal/lazyregexp", "\x96\x02\v\x0f\x02"}, + {"internal/lazytemplate", "\xeb\x01+\x1a\x02\v"}, {"internal/msan", "\xc5\x02"}, {"internal/nettrace", ""}, - {"internal/obscuretestdata", "f\x85\x01,"}, - {"internal/oserror", "n"}, - {"internal/pkgbits", "\x03K\x19\a\x03\x05\vj\x0e\x1e\r\v\x01"}, + {"internal/obscuretestdata", "e\x86\x01+"}, + {"internal/oserror", "m"}, + {"internal/pkgbits", "\x03K\x18\a\x03\x05\vk\x0e\x1d\r\f\x01"}, {"internal/platform", ""}, - {"internal/poll", "nO\x1a\x149\x0e\x01\x01\v\x06"}, - {"internal/profile", "\x03\x04g\x03z7\f\x01\x01\x0f"}, + {"internal/poll", "mO\x1a\x158\x0f\x01\x01\v\x06"}, + {"internal/profile", "\x03\x04f\x03{6\r\x01\x01\x0f"}, {"internal/profilerecord", ""}, - {"internal/race", "\x95\x01\xb0\x01"}, - {"internal/reflectlite", "\x95\x01 3+\x1a\x02"}, {"internal/syslist", ""}, - {"internal/testenv", "\x03\na\x02\x01*\x1a\x10'+\x01\x05\a\v\x01\x02\x02\x01\n"}, + {"internal/testenv", "\x03\n`\x02\x01*\x1a\x10(*\x01\x05\a\f\x01\x02\x02\x01\n"}, {"internal/testlog", "\xb2\x02\x01\x12"}, - {"internal/testpty", "n\x03f@\x1d"}, - {"internal/trace", "\x02\x01\x01\x06]\a\x03n\x03\x03\x06\x03\n5\x01\x02\x0f\x06"}, - {"internal/trace/internal/testgen", "\x03d\nl\x03\x02\x03\x011\v\x0e"}, - {"internal/trace/internal/tracev1", "\x03\x01c\a\x03t\x06\r5\x01"}, - {"internal/trace/raw", "\x02e\nq\x03\x06D\x01\x11"}, - {"internal/trace/testtrace", "\x02\x01k\x03l\x03\x06\x057\v\x02\x01"}, - {"internal/trace/tracev2", ""}, - {"internal/trace/traceviewer", "\x02^\v\x06\x1a<\x16\a\a\x04\t\n\x15\x01\x05\a\v\x01\x02\r"}, + {"internal/testpty", "m\x03\xa6\x01"}, + {"internal/trace", "\x02\x01\x01\x06\\\a\x03m\x01\x01\x06\x06\x03\n5\x01\x02\x0f"}, + {"internal/trace/event", ""}, + {"internal/trace/event/go122", "pm"}, + {"internal/trace/internal/oldtrace", "\x03\x01b\a\x03m\b\x06\r5\x01"}, + {"internal/trace/internal/testgen/go122", "\x03c\nl\x01\x01\x03\x04\x010\v\x0f"}, + {"internal/trace/raw", "\x02d\nm\b\x06D\x01\x11"}, + {"internal/trace/testtrace", "\x02\x01j\x03l\x05\x05\x056\f\x02\x01"}, + {"internal/trace/traceviewer", "\x02]\v\x06\x1a<\x16\b\a\x04\t\n\x14\x01\x05\a\f\x01\x02\r"}, {"internal/trace/traceviewer/format", ""}, - {"internal/trace/version", "qq\t"}, - {"internal/txtar", "\x03n\xa6\x01\x19"}, + {"internal/trace/version", "pm\x01\r"}, + {"internal/txtar", "\x03m\xa6\x01\x1a"}, {"internal/types/errors", "\xaf\x02"}, {"internal/unsafeheader", "\xc5\x02"}, - {"internal/xcoff", "Z\r\a\x03`\x1a,\x18\x01"}, - {"internal/zstd", "g\a\x03z\x0f"}, - {"io", "n\xc4\x01"}, - {"io/fs", "n+*(1\x11\x12\x04"}, - {"io/ioutil", "\xeb\x01\x01+\x16\x03"}, - {"iter", "\xc9\x01[!"}, - {"log", "qz\x05'\r\x0e\x01\f"}, + {"internal/xcoff", "Y\r\a\x03`\x1b+\x19\x01"}, + {"internal/zstd", "f\a\x03{\x0f"}, + {"io", "m\xc5\x01"}, + {"io/fs", "m+*)0\x12\x12\x04"}, + {"io/ioutil", "\xeb\x01\x01*\x17\x03"}, + {"iter", "\xc8\x01[\""}, + {"log", "p{\x05&\r\x0f\x01\f"}, {"log/internal", ""}, - {"log/slog", "\x03\nU\t\x03\x03z\x04\x01\x02\x02\x04'\x05\t\x02\x01\x02\x01\f\x02\x02\x02"}, + {"log/slog", "\x03\nT\t\x03\x03{\x04\x01\x02\x02\x04&\x05\n\x02\x01\x02\x01\f\x02\x02\x02"}, {"log/slog/internal", ""}, - {"log/slog/internal/benchmarks", "\ra\x03z\x06\x03;\x10"}, + {"log/slog/internal/benchmarks", "\r`\x03{\x06\x03;\x10"}, {"log/slog/internal/buffer", "\xb2\x02"}, {"log/slog/internal/slogtest", "\xf1\x01"}, - {"log/syslog", "n\x03~\x12\x16\x19\x02\r"}, + {"log/syslog", "m\x03\x7f\x12\x15\x1a\x02\r"}, {"maps", "\xee\x01W"}, - {"math", "\xfa\x01K"}, - {"math/big", "\x03k\x03)Q\r\x02\x021\x02\x01\x02\x13"}, + {"math", "\xad\x01MK"}, + {"math/big", "\x03j\x03)\x14>\r\x02\x023\x01\x02\x13"}, {"math/bits", "\xc5\x02"}, {"math/cmplx", "\xf8\x01\x02"}, - {"math/rand", "\xb6\x01B:\x01\x12"}, - {"math/rand/v2", "n,\x02\\\x02K"}, - {"mime", "\x02\x01c\b\x03z\f \x16\x03\x02\x0f\x02"}, - {"mime/multipart", "\x02\x01G$\x03E5\f\x01\x06\x02\x15\x02\x06\x10\x02\x01\x15"}, - {"mime/quotedprintable", "\x02\x01nz"}, - {"net", "\x04\ta+\x1d\a\x04\x05\x05\a\x01\x04\x14\x01%\x06\r\t\x05\x01\x01\v\x06\a"}, - {"net/http", "\x02\x01\x04\x04\x02=\b\x14\x01\a\x03E5\x01\x03\b\x01\x02\x02\x02\x01\x02\x06\x02\x01\x01\n\x01\x01\x05\x01\x02\x05\t\x01\x01\x01\x02\x01\f\x02\x02\x02\b\x01\x01\x01"}, - {"net/http/cgi", "\x02P\x1c\x03z\x04\b\n\x01\x13\x01\x01\x01\x04\x01\x05\x02\t\x02\x01\x0f\x0e"}, - {"net/http/cookiejar", "\x04j\x03\x90\x01\x01\b\f\x17\x03\x02\r\x04"}, - {"net/http/fcgi", "\x02\x01\nZ\a\x03z\x16\x01\x01\x14\x19\x02\r"}, - {"net/http/httptest", "\x02\x01\nE\x02\x1c\x01z\x04\x12\x01\n\t\x02\x18\x01\x02\r\x0e"}, - {"net/http/httptrace", "\rEo@\x14\n "}, - {"net/http/httputil", "\x02\x01\na\x03z\x04\x0f\x03\x01\x05\x02\x01\v\x01\x1a\x02\r\x0e"}, - {"net/http/internal", "\x02\x01k\x03z"}, + {"math/rand", "\xb5\x01C:\x01\x12"}, + {"math/rand/v2", "m,\x02]\x02K"}, + {"mime", "\x02\x01b\b\x03{\f\x1f\x17\x03\x02\x0f\x02"}, + {"mime/multipart", "\x02\x01G#\x03E6\f\x01\x06\x02\x14\x02\x06\x11\x02\x01\x15"}, + {"mime/quotedprintable", "\x02\x01m{"}, + {"net", "\x04\t`+\x1d\a\x04\x05\f\x01\x04\x15\x01$\x06\r\n\x05\x01\x01\v\x06\a"}, + {"net/http", "\x02\x01\x04\x04\x02=\b\x13\x01\a\x03E6\x01\x03\b\x01\x02\x02\x02\x01\x02\x06\x02\x01\n\x01\x01\x05\x01\x02\x05\n\x01\x01\x01\x02\x01\f\x02\x02\x02\b\x01\x01\x01"}, + {"net/http/cgi", "\x02P\x1b\x03{\x04\b\n\x01\x12\x01\x01\x01\x04\x01\x05\x02\n\x02\x01\x0f\x0e"}, + {"net/http/cookiejar", "\x04i\x03\x91\x01\x01\b\v\x18\x03\x02\r\x04"}, + {"net/http/fcgi", "\x02\x01\nY\a\x03{\x16\x01\x01\x13\x1a\x02\r"}, + {"net/http/httptest", "\x02\x01\nE\x02\x1b\x01{\x04\x12\x01\t\t\x02\x19\x01\x02\r\x0e"}, + {"net/http/httptrace", "\rEnA\x13\n!"}, + {"net/http/httputil", "\x02\x01\n`\x03{\x04\x0f\x03\x01\x05\x02\x01\n\x01\x1b\x02\r\x0e"}, + {"net/http/internal", "\x02\x01j\x03{"}, {"net/http/internal/ascii", "\xb0\x02\x11"}, - {"net/http/internal/httpcommon", "\ra\x03\x96\x01\x0e\x01\x18\x01\x01\x02\x1b\x02"}, {"net/http/internal/testcert", "\xb0\x02"}, - {"net/http/pprof", "\x02\x01\nd\x19,\x11$\x04\x13\x14\x01\r\x06\x02\x01\x02\x01\x0f"}, - {"net/internal/cgotest", "\xd7\x01n"}, - {"net/internal/socktest", "q\xc1\x01\x02"}, - {"net/mail", "\x02l\x03z\x04\x0f\x03\x14\x1b\x02\r\x04"}, - {"net/netip", "\x04j+\x01#;\x025\x15"}, - {"net/rpc", "\x02g\x05\x03\x10\n`\x04\x12\x01\x1d\x0e\x03\x02"}, - {"net/rpc/jsonrpc", "k\x03\x03z\x16\x11 "}, - {"net/smtp", "\x19.\v\x14\b\x03z\x16\x14\x1b"}, - {"net/textproto", "\x02\x01k\x03z\r\t.\x01\x02\x13"}, - {"net/url", "n\x03\x86\x01%\x11\x02\x01\x15"}, - {"os", "n+\x19\v\t\r\x03\x01\x04\x10\x018\t\x05\x01\x01\v\x06"}, - {"os/exec", "\x03\naH \x01\x14\x01+\x06\a\v\x01\x04\v"}, + {"net/http/pprof", "\x02\x01\nc\x19,\x11%\x04\x13\x13\x01\r\x06\x03\x01\x02\x01\x0f"}, + {"net/internal/cgotest", ""}, + {"net/internal/socktest", "p\xc2\x01\x02"}, + {"net/mail", "\x02k\x03{\x04\x0f\x03\x13\x1c\x02\r\x04"}, + {"net/netip", "\x04i+\x01#<\x025\x15"}, + {"net/rpc", "\x02f\x05\x03\x10\na\x04\x12\x01\x1c\x0f\x03\x02"}, + {"net/rpc/jsonrpc", "j\x03\x03{\x16\x10!"}, + {"net/smtp", "\x19.\v\x13\b\x03{\x16\x13\x1c"}, + {"net/textproto", "\x02\x01j\x03{\r\t.\x01\x02\x13"}, + {"net/url", "m\x03\x87\x01$\x12\x02\x01\x15"}, + {"os", "m+\x01\x18\x03\b\t\r\x03\x01\x04\x11\x017\n\x05\x01\x01\v\x06"}, + {"os/exec", "\x03\n`H \x01\x15\x01*\x06\a\f\x01\x04\v"}, {"os/exec/internal/fdtest", "\xb4\x02"}, - {"os/signal", "\r\x8a\x02\x16\x05\x02"}, - {"os/user", "qfM\v\x01\x02\x02\x11"}, - {"path", "n+\xaa\x01"}, - {"path/filepath", "n+\x19:+\r\t\x03\x04\x0f"}, - {"plugin", "n\xc4\x01\x13"}, - {"reflect", "n'\x04\x1c\b\f\x05\x02\x18\x06\n,\v\x03\x0f\x02\x02"}, + {"os/signal", "\r\x89\x02\x17\x05\x02"}, + {"os/user", "\x02\x01j\x03{+\r\f\x01\x02"}, + {"path", "m+\xab\x01"}, + {"path/filepath", "m+\x19;*\r\n\x03\x04\x0f"}, + {"plugin", "m"}, + {"reflect", "m'\x04\x1c\b\f\x04\x02\x1a\x06\n+\f\x03\x0f\x02\x02"}, {"reflect/internal/example1", ""}, {"reflect/internal/example2", ""}, - {"regexp", "\x03\xe8\x018\n\x02\x01\x02\x0f\x02"}, + {"regexp", "\x03\xe8\x017\v\x02\x01\x02\x0f\x02"}, {"regexp/syntax", "\xad\x02\x01\x01\x01\x11\x02"}, - {"runtime", "\x95\x01\x04\x01\x02\f\x06\a\x02\x01\x01\x0f\x04\x01\x01\x01\x01\x03\x0fc"}, - {"runtime/cgo", "\xd0\x01b\x01\x12"}, - {"runtime/coverage", "\xa0\x01K"}, - {"runtime/debug", "qUQ\r\t\x02\x01\x0f\x06"}, + {"runtime", "\x94\x01\x04\x01\x02\f\x06\a\x02\x01\x01\x0f\x03\x01\x01\x01\x01\x01\x03s"}, + {"runtime/coverage", "\x9f\x01L"}, + {"runtime/debug", "pUQ\r\n\x02\x01\x0f\x06"}, + {"runtime/internal/startlinetest", ""}, {"runtime/internal/wasitest", ""}, - {"runtime/metrics", "\xb7\x01A,!"}, - {"runtime/pprof", "\x02\x01\x01\x03\x06Z\a\x03$3#\r\x1f\r\t\x01\x01\x01\x02\x02\b\x03\x06"}, - {"runtime/race", ""}, - {"runtime/trace", "\rdz9\x0e\x01\x12"}, + {"runtime/metrics", "\xb6\x01B+\""}, + {"runtime/pprof", "\x02\x01\x01\x03\x06Y\a\x03$3$\r\x1e\r\n\x01\x01\x01\x02\x02\b\x03\x06"}, + {"runtime/race", "\xab\x02"}, + {"runtime/race/internal/amd64v1", ""}, + {"runtime/trace", "\rc{8\x0f\x01\x12"}, {"slices", "\x04\xea\x01\fK"}, - {"sort", "\xca\x0103"}, - {"strconv", "n+:%\x02I"}, - {"strings", "n'\x04:\x18\x03\f8\x0f\x02\x02"}, + {"sort", "\xc9\x0113"}, + {"strconv", "m+:&\x02I"}, + {"strings", "m'\x04:\x19\x03\f8\x0f\x02\x02"}, {"structs", ""}, - {"sync", "\xc9\x01\vP\x0f\x12"}, + {"sync", "\xc8\x01\vP\x10\x12"}, {"sync/atomic", "\xc5\x02"}, - {"syscall", "n'\x01\x03\x01\x1b\b\x03\x03\x06[\x0e\x01\x12"}, - {"testing", "\x03\na\x02\x01X\x0f\x13\r\x04\x1b\x06\x02\x05\x03\x05\x01\x02\x01\x02\x01\f\x02\x02\x02"}, - {"testing/fstest", "n\x03z\x01\v%\x11\x03\b\a"}, - {"testing/internal/testdeps", "\x02\v\xa7\x01'\x10,\x03\x05\x03\b\x06\x02\r"}, - {"testing/iotest", "\x03k\x03z\x04"}, - {"testing/quick", "p\x01\x87\x01\x04#\x11\x0f"}, - {"testing/slogtest", "\ra\x03\x80\x01.\x05\x11\n"}, - {"text/scanner", "\x03nz,*\x02"}, - {"text/tabwriter", "qzX"}, - {"text/template", "n\x03B8\x01\v\x1f\x01\x05\x01\x02\x05\f\x02\f\x03\x02"}, - {"text/template/parse", "\x03n\xb3\x01\v\x01\x11\x02"}, - {"time", "n+\x1d\x1d'*\x0e\x02\x11"}, - {"time/tzdata", "n\xc6\x01\x11"}, + {"syscall", "m(\x03\x01\x1b\b\x03\x03\x06\aT\x0f\x01\x12"}, + {"testing", "\x03\n`\x02\x01G\x11\x0f\x14\r\x04\x1a\x06\x02\x05\x02\a\x01\x02\x01\x02\x01\f\x02\x02\x02"}, + {"testing/fstest", "m\x03{\x01\v$\x12\x03\b\a"}, + {"testing/internal/testdeps", "\x02\v\xa6\x01'\x11+\x03\x05\x03\b\a\x02\r"}, + {"testing/iotest", "\x03j\x03{\x04"}, + {"testing/quick", "o\x01\x88\x01\x04\"\x12\x0f"}, + {"testing/slogtest", "\r`\x03\x81\x01-\x05\x12\n"}, + {"text/scanner", "\x03m{++\x02"}, + {"text/tabwriter", "p{X"}, + {"text/template", "m\x03B9\x01\v\x1e\x01\x05\x01\x02\x05\r\x02\f\x03\x02"}, + {"text/template/parse", "\x03m\xb3\x01\f\x01\x11\x02"}, + {"time", "m+\x1d\x1d()\x0f\x02\x11"}, + {"time/tzdata", "m\xc7\x01\x11"}, {"unicode", ""}, {"unicode/utf16", ""}, {"unicode/utf8", ""}, - {"unique", "\x95\x01>\x01P\x0e\x13\x12"}, + {"unique", "\x94\x01>\x01P\x0f\x13\x12"}, {"unsafe", ""}, - {"vendor/golang.org/x/crypto/chacha20", "\x10W\a\x8c\x01*&"}, - {"vendor/golang.org/x/crypto/chacha20poly1305", "\x10W\a\xd8\x01\x04\x01"}, - {"vendor/golang.org/x/crypto/cryptobyte", "d\n\x03\x88\x01& \n"}, + {"vendor/golang.org/x/crypto/chacha20", "\x10V\a\x8d\x01)'"}, + {"vendor/golang.org/x/crypto/chacha20poly1305", "\x10V\a\xd9\x01\x04\x01\a"}, + {"vendor/golang.org/x/crypto/cryptobyte", "c\n\x03\x89\x01%!\n"}, {"vendor/golang.org/x/crypto/cryptobyte/asn1", ""}, {"vendor/golang.org/x/crypto/internal/alias", "\xc5\x02"}, - {"vendor/golang.org/x/crypto/internal/poly1305", "Q\x16\x93\x01"}, - {"vendor/golang.org/x/net/dns/dnsmessage", "n"}, - {"vendor/golang.org/x/net/http/httpguts", "\x81\x02\x14\x1b\x13\r"}, - {"vendor/golang.org/x/net/http/httpproxy", "n\x03\x90\x01\x15\x01\x19\x13\r"}, - {"vendor/golang.org/x/net/http2/hpack", "\x03k\x03zG"}, - {"vendor/golang.org/x/net/idna", "q\x87\x018\x13\x10\x02\x01"}, - {"vendor/golang.org/x/net/nettest", "\x03d\a\x03z\x11\x05\x16\x01\f\v\x01\x02\x02\x01\n"}, - {"vendor/golang.org/x/sys/cpu", "\x97\x02\r\v\x01\x15"}, - {"vendor/golang.org/x/text/secure/bidirule", "n\xd5\x01\x11\x01"}, - {"vendor/golang.org/x/text/transform", "\x03k}X"}, - {"vendor/golang.org/x/text/unicode/bidi", "\x03\bf~?\x15"}, - {"vendor/golang.org/x/text/unicode/norm", "g\nzG\x11\x11"}, - {"weak", "\x95\x01\x8f\x01!"}, + {"vendor/golang.org/x/crypto/internal/poly1305", "Q\x15\x94\x01"}, + {"vendor/golang.org/x/net/dns/dnsmessage", "m"}, + {"vendor/golang.org/x/net/http/httpguts", "\x81\x02\x13\x1c\x13\r"}, + {"vendor/golang.org/x/net/http/httpproxy", "m\x03\x91\x01\x0f\x05\x01\x1a\x13\r"}, + {"vendor/golang.org/x/net/http2/hpack", "\x03j\x03{G"}, + {"vendor/golang.org/x/net/idna", "p\x88\x018\x13\x10\x02\x01"}, + {"vendor/golang.org/x/net/nettest", "\x03c\a\x03{\x11\x05\x15\x01\f\f\x01\x02\x02\x01\n"}, + {"vendor/golang.org/x/sys/cpu", "\x96\x02\r\f\x01\x15"}, + {"vendor/golang.org/x/text/secure/bidirule", "m\xd6\x01\x11\x01"}, + {"vendor/golang.org/x/text/transform", "\x03j~X"}, + {"vendor/golang.org/x/text/unicode/bidi", "\x03\be\x7f?\x15"}, + {"vendor/golang.org/x/text/unicode/norm", "f\n{G\x11\x11"}, + {"weak", "\x94\x01\x8f\x01\""}, } diff --git a/internal/stdlib/generate.go b/internal/stdlib/generate.go index 4c67d8bd797..cfef0a2438f 100644 --- a/internal/stdlib/generate.go +++ b/internal/stdlib/generate.go @@ -246,6 +246,7 @@ func deps() { cmd := exec.Command("go", "list", "-deps", "-json", "std") cmd.Stdout = stdout cmd.Stderr = os.Stderr + cmd.Env = append(os.Environ(), "CGO_ENABLED=0", "GOOS=linux", "GOARCH=amd64") if err := cmd.Run(); err != nil { log.Fatal(err) } @@ -336,6 +337,7 @@ var deps = [...]pkginfo{ cmd := exec.Command("go", "list", t.flag, "net/http") cmd.Stdout = stdout cmd.Stderr = os.Stderr + cmd.Env = append(os.Environ(), "CGO_ENABLED=0", "GOOS=linux", "GOARCH=amd64") if err := cmd.Run(); err != nil { log.Fatal(err) } diff --git a/internal/stdlib/manifest.go b/internal/stdlib/manifest.go index 00776a31b60..2b418796abb 100644 --- a/internal/stdlib/manifest.go +++ b/internal/stdlib/manifest.go @@ -7119,7 +7119,6 @@ var PackageSymbols = map[string][]Symbol{ {"FormatFileInfo", Func, 21}, {"Glob", Func, 16}, {"GlobFS", Type, 16}, - {"Lstat", Func, 25}, {"ModeAppend", Const, 16}, {"ModeCharDevice", Const, 16}, {"ModeDevice", Const, 16}, @@ -7144,8 +7143,6 @@ var PackageSymbols = map[string][]Symbol{ {"ReadDirFile", Type, 16}, {"ReadFile", Func, 16}, {"ReadFileFS", Type, 16}, - {"ReadLink", Func, 25}, - {"ReadLinkFS", Type, 25}, {"SkipAll", Var, 20}, {"SkipDir", Var, 16}, {"Stat", Func, 16}, @@ -9149,8 +9146,6 @@ var PackageSymbols = map[string][]Symbol{ {"(*ProcessState).SysUsage", Method, 0}, {"(*ProcessState).SystemTime", Method, 0}, {"(*ProcessState).UserTime", Method, 0}, - {"(*Root).Chmod", Method, 25}, - {"(*Root).Chown", Method, 25}, {"(*Root).Close", Method, 24}, {"(*Root).Create", Method, 24}, {"(*Root).FS", Method, 24}, @@ -16759,11 +16754,9 @@ var PackageSymbols = map[string][]Symbol{ }, "testing/fstest": { {"(MapFS).Glob", Method, 16}, - {"(MapFS).Lstat", Method, 25}, {"(MapFS).Open", Method, 16}, {"(MapFS).ReadDir", Method, 16}, {"(MapFS).ReadFile", Method, 16}, - {"(MapFS).ReadLink", Method, 25}, {"(MapFS).Stat", Method, 16}, {"(MapFS).Sub", Method, 16}, {"MapFS", Type, 16}, diff --git a/internal/stdlib/testdata/nethttp.deps b/internal/stdlib/testdata/nethttp.deps index e1235e84932..71e58a0c693 100644 --- a/internal/stdlib/testdata/nethttp.deps +++ b/internal/stdlib/testdata/nethttp.deps @@ -19,8 +19,8 @@ internal/race internal/runtime/math internal/runtime/sys internal/runtime/maps +internal/runtime/syscall internal/stringslite -internal/trace/tracev2 runtime internal/reflectlite errors @@ -122,6 +122,7 @@ crypto/internal/fips140/tls13 vendor/golang.org/x/crypto/internal/alias vendor/golang.org/x/crypto/chacha20 vendor/golang.org/x/crypto/internal/poly1305 +vendor/golang.org/x/sys/cpu vendor/golang.org/x/crypto/chacha20poly1305 crypto/internal/hpke crypto/md5 @@ -132,7 +133,6 @@ crypto/sha1 crypto/sha256 crypto/tls/internal/fips140tls crypto/dsa -crypto/x509/internal/macos encoding/hex crypto/x509/pkix encoding/base64 @@ -140,13 +140,13 @@ encoding/pem maps vendor/golang.org/x/net/dns/dnsmessage internal/nettrace +internal/singleflight weak unique net/netip -internal/routebsd -internal/singleflight net net/url +path/filepath crypto/x509 crypto/tls vendor/golang.org/x/text/transform @@ -162,10 +162,8 @@ vendor/golang.org/x/net/http/httpproxy vendor/golang.org/x/net/http2/hpack mime mime/quotedprintable -path/filepath mime/multipart net/http/httptrace net/http/internal net/http/internal/ascii -net/http/internal/httpcommon net/http diff --git a/internal/stdlib/testdata/nethttp.imports b/internal/stdlib/testdata/nethttp.imports index 77e78696bdd..de41e46c0fe 100644 --- a/internal/stdlib/testdata/nethttp.imports +++ b/internal/stdlib/testdata/nethttp.imports @@ -27,7 +27,6 @@ net net/http/httptrace net/http/internal net/http/internal/ascii -net/http/internal/httpcommon net/textproto net/url os diff --git a/internal/testenv/testenv.go b/internal/testenv/testenv.go index 5c541b7b19b..fa53f37f7aa 100644 --- a/internal/testenv/testenv.go +++ b/internal/testenv/testenv.go @@ -149,7 +149,7 @@ func HasTool(tool string) error { func cgoEnabled(bypassEnvironment bool) (bool, error) { cmd := exec.Command("go", "env", "CGO_ENABLED") if bypassEnvironment { - cmd.Env = append(append([]string(nil), os.Environ()...), "CGO_ENABLED=") + cmd.Env = append(os.Environ(), "CGO_ENABLED=") } out, err := cmd.Output() if err != nil { @@ -251,8 +251,8 @@ func NeedsGoPackagesEnv(t testing.TB, env []string) { t.Helper() for _, v := range env { - if strings.HasPrefix(v, "GOPACKAGESDRIVER=") { - tool := strings.TrimPrefix(v, "GOPACKAGESDRIVER=") + if after, ok := strings.CutPrefix(v, "GOPACKAGESDRIVER="); ok { + tool := after if tool == "off" { NeedsTool(t, "go") } else { diff --git a/internal/testfiles/testfiles.go b/internal/testfiles/testfiles.go index 78733976b3b..dee63c1c2f0 100644 --- a/internal/testfiles/testfiles.go +++ b/internal/testfiles/testfiles.go @@ -7,7 +7,6 @@ package testfiles import ( - "io" "io/fs" "os" "path/filepath" @@ -46,7 +45,7 @@ import ( func CopyToTmp(t testing.TB, src fs.FS, rename ...string) string { dstdir := t.TempDir() - if err := copyFS(dstdir, src); err != nil { + if err := os.CopyFS(dstdir, src); err != nil { t.Fatal(err) } for _, r := range rename { @@ -64,33 +63,6 @@ func CopyToTmp(t testing.TB, src fs.FS, rename ...string) string { return dstdir } -// Copy the files in src to dst. -// Use os.CopyFS when 1.23 can be used in x/tools. -func copyFS(dstdir string, src fs.FS) error { - return fs.WalkDir(src, ".", func(path string, d fs.DirEntry, err error) error { - if err != nil { - return err - } - newpath := filepath.Join(dstdir, path) - if d.IsDir() { - return os.MkdirAll(newpath, 0777) - } - r, err := src.Open(path) - if err != nil { - return err - } - defer r.Close() - - w, err := os.Create(newpath) - if err != nil { - return err - } - defer w.Close() - _, err = io.Copy(w, r) - return err - }) -} - // ExtractTxtarFileToTmp read a txtar archive on a given path, // extracts it to a temporary directory, and returns the // temporary directory. diff --git a/internal/tokeninternal/tokeninternal.go b/internal/tokeninternal/tokeninternal.go index 0a73e2ebda3..549bb183976 100644 --- a/internal/tokeninternal/tokeninternal.go +++ b/internal/tokeninternal/tokeninternal.go @@ -9,6 +9,7 @@ package tokeninternal import ( "fmt" "go/token" + "slices" "sort" "sync" "sync/atomic" @@ -18,7 +19,29 @@ import ( // AddExistingFiles adds the specified files to the FileSet if they // are not already present. It panics if any pair of files in the // resulting FileSet would overlap. +// +// TODO(adonovan): add this a method to FileSet; see +// https://github.com/golang/go/issues/73205 func AddExistingFiles(fset *token.FileSet, files []*token.File) { + + // This function cannot be implemented as: + // + // for _, file := range files { + // if prev := fset.File(token.Pos(file.Base())); prev != nil { + // if prev != file { + // panic("FileSet contains a different file at the same base") + // } + // continue + // } + // file2 := fset.AddFile(file.Name(), file.Base(), file.Size()) + // file2.SetLines(file.Lines()) + // } + // + // because all calls to AddFile must be in increasing order. + // AddExistingFiles lets us augment an existing FileSet + // sequentially, so long as all sets of files have disjoint + // ranges. + // Punch through the FileSet encapsulation. type tokenFileSet struct { // This type remained essentially consistent from go1.16 to go1.21. @@ -83,10 +106,7 @@ func AddExistingFiles(fset *token.FileSet, files []*token.File) { // of their Base. func FileSetFor(files ...*token.File) *token.FileSet { fset := token.NewFileSet() - for _, f := range files { - f2 := fset.AddFile(f.Name(), f.Base(), f.Size()) - f2.SetLines(f.Lines()) - } + AddExistingFiles(fset, files) return fset } @@ -94,12 +114,5 @@ func FileSetFor(files ...*token.File) *token.FileSet { // create copies of the token.Files in fset: they are added to the resulting // FileSet unmodified. func CloneFileSet(fset *token.FileSet) *token.FileSet { - var files []*token.File - fset.Iterate(func(f *token.File) bool { - files = append(files, f) - return true - }) - newFileSet := token.NewFileSet() - AddExistingFiles(newFileSet, files) - return newFileSet + return FileSetFor(slices.Collect(fset.Iterate)...) } diff --git a/internal/typeparams/free.go b/internal/typeparams/free.go index 0ade5c2949e..709d2fc1447 100644 --- a/internal/typeparams/free.go +++ b/internal/typeparams/free.go @@ -70,7 +70,7 @@ func (w *Free) Has(typ types.Type) (res bool) { case *types.Tuple: n := t.Len() - for i := 0; i < n; i++ { + for i := range n { if w.Has(t.At(i).Type()) { return true } diff --git a/internal/typeparams/termlist.go b/internal/typeparams/termlist.go index cbd12f80131..9bc29143f6a 100644 --- a/internal/typeparams/termlist.go +++ b/internal/typeparams/termlist.go @@ -1,3 +1,6 @@ +// Code generated by "go test -run=Generate -write=all"; DO NOT EDIT. +// Source: ../../cmd/compile/internal/types2/termlist.go + // Copyright 2021 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. @@ -7,8 +10,8 @@ package typeparams import ( - "bytes" "go/types" + "strings" ) // A termlist represents the type set represented by the union @@ -22,15 +25,18 @@ type termlist []*term // It is in normal form. var allTermlist = termlist{new(term)} +// termSep is the separator used between individual terms. +const termSep = " | " + // String prints the termlist exactly (without normalization). func (xl termlist) String() string { if len(xl) == 0 { return "∅" } - var buf bytes.Buffer + var buf strings.Builder for i, x := range xl { if i > 0 { - buf.WriteString(" | ") + buf.WriteString(termSep) } buf.WriteString(x.String()) } diff --git a/internal/typeparams/typeterm.go b/internal/typeparams/typeterm.go index 7350bb702a1..fa758cdc989 100644 --- a/internal/typeparams/typeterm.go +++ b/internal/typeparams/typeterm.go @@ -1,3 +1,6 @@ +// Code generated by "go test -run=Generate -write=all"; DO NOT EDIT. +// Source: ../../cmd/compile/internal/types2/typeterm.go + // Copyright 2021 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. diff --git a/internal/typesinternal/classify_call.go b/internal/typesinternal/classify_call.go new file mode 100644 index 00000000000..649c82b6bea --- /dev/null +++ b/internal/typesinternal/classify_call.go @@ -0,0 +1,135 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typesinternal + +import ( + "fmt" + "go/ast" + "go/types" + _ "unsafe" +) + +// CallKind describes the function position of an [*ast.CallExpr]. +type CallKind int + +const ( + CallStatic CallKind = iota // static call to known function + CallInterface // dynamic call through an interface method + CallDynamic // dynamic call of a func value + CallBuiltin // call to a builtin function + CallConversion // a conversion (not a call) +) + +var callKindNames = []string{ + "CallStatic", + "CallInterface", + "CallDynamic", + "CallBuiltin", + "CallConversion", +} + +func (k CallKind) String() string { + if i := int(k); i >= 0 && i < len(callKindNames) { + return callKindNames[i] + } + return fmt.Sprintf("typeutil.CallKind(%d)", k) +} + +// ClassifyCall classifies the function position of a call expression ([*ast.CallExpr]). +// It distinguishes among true function calls, calls to builtins, and type conversions, +// and further classifies function calls as static calls (where the function is known), +// dynamic interface calls, and other dynamic calls. +// +// For the declarations: +// +// func f() {} +// func g[T any]() {} +// var v func() +// var s []func() +// type I interface { M() } +// var i I +// +// ClassifyCall returns the following: +// +// f() CallStatic +// g[int]() CallStatic +// i.M() CallInterface +// min(1, 2) CallBuiltin +// v() CallDynamic +// s[0]() CallDynamic +// int(x) CallConversion +// []byte("") CallConversion +func ClassifyCall(info *types.Info, call *ast.CallExpr) CallKind { + if info.Types == nil { + panic("ClassifyCall: info.Types is nil") + } + if info.Types[call.Fun].IsType() { + return CallConversion + } + obj := info.Uses[UsedIdent(info, call.Fun)] + // Classify the call by the type of the object, if any. + switch obj := obj.(type) { + case *types.Builtin: + return CallBuiltin + case *types.Func: + if interfaceMethod(obj) { + return CallInterface + } + return CallStatic + default: + return CallDynamic + } +} + +// UsedIdent returns the identifier such that info.Uses[UsedIdent(info, e)] +// is the [types.Object] used by e, if any. +// +// If e is one of various forms of reference: +// +// f, c, v, T lexical reference +// pkg.X qualified identifier +// f[T] or pkg.F[K,V] instantiations of the above kinds +// expr.f field or method value selector +// T.f method expression selector +// +// UsedIdent returns the identifier whose is associated value in [types.Info.Uses] +// is the object to which it refers. +// +// For the declarations: +// +// func F[T any] {...} +// type I interface { M() } +// var ( +// x int +// s struct { f int } +// a []int +// i I +// ) +// +// UsedIdent returns the following: +// +// Expr UsedIdent +// x x +// s.f f +// F[int] F +// i.M M +// I.M M +// min min +// int int +// 1 nil +// a[0] nil +// []byte nil +// +// Note: if e is an instantiated function or method, UsedIdent returns +// the corresponding generic function or method on the generic type. +func UsedIdent(info *types.Info, e ast.Expr) *ast.Ident { + return usedIdent(info, e) +} + +//go:linkname usedIdent golang.org/x/tools/go/types/typeutil.usedIdent +func usedIdent(info *types.Info, e ast.Expr) *ast.Ident + +//go:linkname interfaceMethod golang.org/x/tools/go/types/typeutil.interfaceMethod +func interfaceMethod(f *types.Func) bool diff --git a/internal/typesinternal/classify_call_test.go b/internal/typesinternal/classify_call_test.go new file mode 100644 index 00000000000..e875727d1a5 --- /dev/null +++ b/internal/typesinternal/classify_call_test.go @@ -0,0 +1,160 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typesinternal_test + +import ( + "bytes" + "fmt" + "go/ast" + "go/format" + "go/importer" + "go/parser" + "go/token" + "go/types" + "testing" + + ti "golang.org/x/tools/internal/typesinternal" +) + +func TestClassifyCallAndUsed(t *testing.T) { + const src = ` + package p + + func g(int) + + type A[T any] *T + + func F[T any](T) {} + + type S struct{ f func(int) } + func (S) g(int) + + type I interface{ m(int) } + + var ( + z S + a struct{b struct{c S}} + f = g + m map[int]func() + n []func() + p *int + ) + + func tests[T int]() { + var zt T + + g(1) + f(1) + println() + z.g(1) // a concrete method + a.b.c.g(1) // same + S.g(z, 1) // method expression + z.f(1) // struct field + I(nil).m(1) // interface method, then type conversion (preorder traversal) + m[0]() // a map + n[0]() // a slice + F[int](1) // instantiated function + F[T](zt) // generic function + func() {}() // function literal + _=[]byte("") // type expression + _=A[int](p) // instantiated type + _=T(1) // type param + // parenthesized forms + (z.g)(1) + (z).g(1) + + + // A[T](1) // generic type: illegal + } + ` + + fset := token.NewFileSet() + cfg := &types.Config{ + Error: func(err error) { t.Fatal(err) }, + Importer: importer.Default(), + } + info := ti.NewTypesInfo() + // parse + f, err := parser.ParseFile(fset, "classify.go", src, 0) + if err != nil { + t.Fatal(err) + } + + // type-check + pkg, err := cfg.Check(f.Name.Name, fset, []*ast.File{f}, info) + if err != nil { + t.Fatal(err) + } + + lookup := func(sym string) types.Object { + return pkg.Scope().Lookup(sym) + } + + member := func(sym, fieldOrMethod string) types.Object { + obj, _, _ := types.LookupFieldOrMethod(lookup(sym).Type(), false, pkg, fieldOrMethod) + return obj + } + + printlnObj := types.Universe.Lookup("println") + + typeParam := lookup("tests").Type().(*types.Signature).TypeParams().At(0).Obj() + + // Expected Calls are in the order of CallExprs at the end of src, above. + wants := []struct { + kind ti.CallKind + usedObj types.Object // the object obtained from the result of UsedIdent + }{ + {ti.CallStatic, lookup("g")}, // g + {ti.CallDynamic, lookup("f")}, // f + {ti.CallBuiltin, printlnObj}, // println + {ti.CallStatic, member("S", "g")}, // z.g + {ti.CallStatic, member("S", "g")}, // a.b.c.g + {ti.CallStatic, member("S", "g")}, // S.g(z, 1) + {ti.CallDynamic, member("z", "f")}, // z.f + {ti.CallInterface, member("I", "m")}, // I(nil).m + {ti.CallConversion, lookup("I")}, // I(nil) + {ti.CallDynamic, nil}, // m[0] + {ti.CallDynamic, nil}, // n[0] + {ti.CallStatic, lookup("F")}, // F[int] + {ti.CallStatic, lookup("F")}, // F[T] + {ti.CallDynamic, nil}, // f(){} + {ti.CallConversion, nil}, // []byte + {ti.CallConversion, lookup("A")}, // A[int] + {ti.CallConversion, typeParam}, // T + {ti.CallStatic, member("S", "g")}, // (z.g) + {ti.CallStatic, member("S", "g")}, // (z).g + } + + i := 0 + ast.Inspect(f, func(n ast.Node) bool { + if call, ok := n.(*ast.CallExpr); ok { + if i >= len(wants) { + t.Fatal("more calls than wants") + } + var buf bytes.Buffer + if err := format.Node(&buf, fset, n); err != nil { + t.Fatal(err) + } + prefix := fmt.Sprintf("%s (#%d)", buf.String(), i) + + gotKind := ti.ClassifyCall(info, call) + want := wants[i] + + if gotKind != want.kind { + t.Errorf("%s kind: got %s, want %s", prefix, gotKind, want.kind) + } + + w := want.usedObj + if g := info.Uses[ti.UsedIdent(info, call.Fun)]; g != w { + t.Errorf("%s used obj: got %v (%[2]T), want %v", prefix, g, w) + } + i++ + } + return true + }) + if i != len(wants) { + t.Fatal("more wants than calls") + } +} diff --git a/internal/typesinternal/typeindex/typeindex.go b/internal/typesinternal/typeindex/typeindex.go new file mode 100644 index 00000000000..34087a98fbf --- /dev/null +++ b/internal/typesinternal/typeindex/typeindex.go @@ -0,0 +1,223 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package typeindex provides an [Index] of type information for a +// package, allowing efficient lookup of, say, whether a given symbol +// is referenced and, if so, where from; or of the [cursor.Cursor] for +// the declaration of a particular [types.Object] symbol. +package typeindex + +import ( + "encoding/binary" + "go/ast" + "go/types" + "iter" + + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/astutil/cursor" + "golang.org/x/tools/internal/astutil/edge" + "golang.org/x/tools/internal/typesinternal" +) + +// New constructs an Index for the package of type-annotated syntax +// +// TODO(adonovan): accept a FileSet too? +// We regret not requiring one in inspector.New. +func New(inspect *inspector.Inspector, pkg *types.Package, info *types.Info) *Index { + ix := &Index{ + inspect: inspect, + info: info, + packages: make(map[string]*types.Package), + def: make(map[types.Object]cursor.Cursor), + uses: make(map[types.Object]*uses), + } + + addPackage := func(pkg2 *types.Package) { + if pkg2 != nil && pkg2 != pkg { + ix.packages[pkg2.Path()] = pkg2 + } + } + + for cur := range cursor.Root(inspect).Preorder((*ast.ImportSpec)(nil), (*ast.Ident)(nil)) { + switch n := cur.Node().(type) { + case *ast.ImportSpec: + // Index direct imports, including blank ones. + if pkgname := info.PkgNameOf(n); pkgname != nil { + addPackage(pkgname.Imported()) + } + + case *ast.Ident: + // Index all defining and using identifiers. + if obj := info.Defs[n]; obj != nil { + ix.def[obj] = cur + } + + if obj := info.Uses[n]; obj != nil { + // Index indirect dependencies (via fields and methods). + if !typesinternal.IsPackageLevel(obj) { + addPackage(obj.Pkg()) + } + + us, ok := ix.uses[obj] + if !ok { + us = &uses{} + us.code = us.initial[:0] + ix.uses[obj] = us + } + delta := cur.Index() - us.last + if delta < 0 { + panic("non-monotonic") + } + us.code = binary.AppendUvarint(us.code, uint64(delta)) + us.last = cur.Index() + } + } + } + return ix +} + +// An Index holds an index mapping [types.Object] symbols to their syntax. +// In effect, it is the inverse of [types.Info]. +type Index struct { + inspect *inspector.Inspector + info *types.Info + packages map[string]*types.Package // packages of all symbols referenced from this package + def map[types.Object]cursor.Cursor // Cursor of *ast.Ident that defines the Object + uses map[types.Object]*uses // Cursors of *ast.Idents that use the Object +} + +// A uses holds the list of Cursors of Idents that use a given symbol. +// +// The Uses map of [types.Info] is substantial, so it pays to compress +// its inverse mapping here, both in space and in CPU due to reduced +// allocation. A Cursor is 2 words; a Cursor.Index is 4 bytes; but +// since Cursors are naturally delivered in ascending order, we can +// use varint-encoded deltas at a cost of only ~1.7-2.2 bytes per use. +// +// Many variables have only one or two uses, so their encoded uses may +// fit in the 4 bytes of initial, saving further CPU and space +// essentially for free since the struct's size class is 4 words. +type uses struct { + code []byte // varint-encoded deltas of successive Cursor.Index values + last int32 // most recent Cursor.Index value; used during encoding + initial [4]byte // use slack in size class as initial space for code +} + +// Uses returns the sequence of Cursors of [*ast.Ident]s in this package +// that refer to obj. If obj is nil, the sequence is empty. +func (ix *Index) Uses(obj types.Object) iter.Seq[cursor.Cursor] { + return func(yield func(cursor.Cursor) bool) { + if uses := ix.uses[obj]; uses != nil { + var last int32 + for code := uses.code; len(code) > 0; { + delta, n := binary.Uvarint(code) + last += int32(delta) + if !yield(cursor.At(ix.inspect, last)) { + return + } + code = code[n:] + } + } + } +} + +// Used reports whether any of the specified objects are used, in +// other words, obj != nil && Uses(obj) is non-empty for some obj in objs. +// +// (This treatment of nil allows Used to be called directly on the +// result of [Index.Object] so that analyzers can conveniently skip +// packages that don't use a symbol of interest.) +func (ix *Index) Used(objs ...types.Object) bool { + for _, obj := range objs { + if obj != nil && ix.uses[obj] != nil { + return true + } + } + return false +} + +// Def returns the Cursor of the [*ast.Ident] in this package +// that declares the specified object, if any. +func (ix *Index) Def(obj types.Object) (cursor.Cursor, bool) { + cur, ok := ix.def[obj] + return cur, ok +} + +// Package returns the package of the specified path, +// or nil if it is not referenced from this package. +func (ix *Index) Package(path string) *types.Package { + return ix.packages[path] +} + +// Object returns the package-level symbol name within the package of +// the specified path, or nil if the package or symbol does not exist +// or is not visible from this package. +func (ix *Index) Object(path, name string) types.Object { + if pkg := ix.Package(path); pkg != nil { + return pkg.Scope().Lookup(name) + } + return nil +} + +// Selection returns the named method or field belonging to the +// package-level type returned by Object(path, typename). +func (ix *Index) Selection(path, typename, name string) types.Object { + if obj := ix.Object(path, typename); obj != nil { + if tname, ok := obj.(*types.TypeName); ok { + obj, _, _ := types.LookupFieldOrMethod(tname.Type(), true, obj.Pkg(), name) + return obj + } + } + return nil +} + +// Calls returns the sequence of cursors for *ast.CallExpr nodes that +// call the specified callee, as defined by [typeutil.Callee]. +// If callee is nil, the sequence is empty. +func (ix *Index) Calls(callee types.Object) iter.Seq[cursor.Cursor] { + return func(yield func(cursor.Cursor) bool) { + for cur := range ix.Uses(callee) { + ek, _ := cur.ParentEdge() + + // The call may be of the form f() or x.f(), + // optionally with parens; ascend from f to call. + // + // It is tempting but wrong to use the first + // CallExpr ancestor: we have to make sure the + // ident is in the CallExpr.Fun position, otherwise + // f(f, f) would have two spurious matches. + // Avoiding Enclosing is also significantly faster. + + // inverse unparen: f -> (f) + for ek == edge.ParenExpr_X { + cur = cur.Parent() + ek, _ = cur.ParentEdge() + } + + // ascend selector: f -> x.f + if ek == edge.SelectorExpr_Sel { + cur = cur.Parent() + ek, _ = cur.ParentEdge() + } + + // inverse unparen again + for ek == edge.ParenExpr_X { + cur = cur.Parent() + ek, _ = cur.ParentEdge() + } + + // ascend from f or x.f to call + if ek == edge.CallExpr_Fun { + curCall := cur.Parent() + call := curCall.Node().(*ast.CallExpr) + if typeutil.Callee(ix.info, call) == callee { + if !yield(curCall) { + return + } + } + } + } + } +} diff --git a/internal/typesinternal/typeindex/typeindex_test.go b/internal/typesinternal/typeindex/typeindex_test.go new file mode 100644 index 00000000000..c8b08dc9d00 --- /dev/null +++ b/internal/typesinternal/typeindex/typeindex_test.go @@ -0,0 +1,159 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.24 + +package typeindex_test + +import ( + "go/ast" + "slices" + "testing" + + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/packages" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/astutil/cursor" + "golang.org/x/tools/internal/testenv" + "golang.org/x/tools/internal/typesinternal/typeindex" +) + +func TestIndex(t *testing.T) { + testenv.NeedsGoPackages(t) + var ( + pkg = loadNetHTTP(t) + inspect = inspector.New(pkg.Syntax) + index = typeindex.New(inspect, pkg.Types, pkg.TypesInfo) + fmtSprintf = index.Object("fmt", "Sprintf") + ) + + // Gather calls and uses of fmt.Sprintf in net/http. + var ( + wantUses []*ast.Ident + wantCalls []*ast.CallExpr + ) + for n := range inspect.PreorderSeq((*ast.CallExpr)(nil), (*ast.Ident)(nil)) { + switch n := n.(type) { + case *ast.CallExpr: + if typeutil.Callee(pkg.TypesInfo, n) == fmtSprintf { + wantCalls = append(wantCalls, n) + } + case *ast.Ident: + if pkg.TypesInfo.Uses[n] == fmtSprintf { + wantUses = append(wantUses, n) + } + } + } + // sanity check (expect about 60 of each) + if wantUses == nil || wantCalls == nil { + t.Fatalf("no calls or uses of fmt.Sprintf in net/http") + } + + var ( + gotUses []*ast.Ident + gotCalls []*ast.CallExpr + ) + for curId := range index.Uses(fmtSprintf) { + gotUses = append(gotUses, curId.Node().(*ast.Ident)) + } + for curCall := range index.Calls(fmtSprintf) { + gotCalls = append(gotCalls, curCall.Node().(*ast.CallExpr)) + } + + if !slices.Equal(gotUses, wantUses) { + t.Errorf("index.Uses(fmt.Sprintf) = %v, want %v", gotUses, wantUses) + } + if !slices.Equal(gotCalls, wantCalls) { + t.Errorf("index.Calls(fmt.Sprintf) = %v, want %v", gotCalls, wantCalls) + } +} + +func loadNetHTTP(tb testing.TB) *packages.Package { + cfg := &packages.Config{Mode: packages.LoadSyntax} + pkgs, err := packages.Load(cfg, "net/http") + if err != nil { + tb.Fatal(err) + } + return pkgs[0] +} + +func BenchmarkIndex(b *testing.B) { + // Load net/http, a large package, and find calls to net.Dial. + // + // There is currently exactly one, which provides an extreme + // demonstration of the performance advantage of the Index. + // + // Index construction costs approximately 7x the cursor + // traversal, so it breaks even when it replaces 7 passes. + // The cost of index lookup is approximately zero. + pkg := loadNetHTTP(b) + + // Build the Inspector (~2.8ms). + var inspect *inspector.Inspector + b.Run("inspector.New", func(b *testing.B) { + for b.Loop() { + inspect = inspector.New(pkg.Syntax) + } + }) + + // Build the Index (~6.6ms). + var index *typeindex.Index + b.Run("typeindex.New", func(b *testing.B) { + b.ReportAllocs() // 2.48MB/op + for b.Loop() { + index = typeindex.New(inspect, pkg.Types, pkg.TypesInfo) + } + }) + + target := index.Object("net", "Dial") + + var countA, countB, countC int + + // unoptimized inspect implementation (~1.6ms, 1x) + b.Run("inspect", func(b *testing.B) { + for b.Loop() { + countA = 0 + for _, file := range pkg.Syntax { + ast.Inspect(file, func(n ast.Node) bool { + if call, ok := n.(*ast.CallExpr); ok { + if typeutil.Callee(pkg.TypesInfo, call) == target { + countA++ + } + } + return true + }) + } + } + }) + if countA == 0 { + b.Errorf("target %v not found", target) + } + + // unoptimized cursor implementation (~390us, 4x faster) + b.Run("cursor", func(b *testing.B) { + for b.Loop() { + countB = 0 + for curCall := range cursor.Root(inspect).Preorder((*ast.CallExpr)(nil)) { + call := curCall.Node().(*ast.CallExpr) + if typeutil.Callee(pkg.TypesInfo, call) == target { + countB++ + } + } + } + }) + + // indexed implementation (~120ns, >10,000x faster) + b.Run("index", func(b *testing.B) { + for b.Loop() { + countC = 0 + for range index.Calls(target) { + countC++ + } + } + }) + + if countA != countB || countA != countC { + b.Fatalf("inconsistent results (inspect=%d, cursor=%d, index=%d)", countA, countB, countC) + } +} diff --git a/internal/typesinternal/types.go b/internal/typesinternal/types.go index edf0347ec3b..cc244689ef8 100644 --- a/internal/typesinternal/types.go +++ b/internal/typesinternal/types.go @@ -7,6 +7,7 @@ package typesinternal import ( + "go/ast" "go/token" "go/types" "reflect" @@ -127,3 +128,17 @@ func Origin(t NamedOrAlias) NamedOrAlias { func IsPackageLevel(obj types.Object) bool { return obj.Pkg() != nil && obj.Parent() == obj.Pkg().Scope() } + +// NewTypesInfo returns a *types.Info with all maps populated. +func NewTypesInfo() *types.Info { + return &types.Info{ + Types: map[ast.Expr]types.TypeAndValue{}, + Instances: map[*ast.Ident]types.Instance{}, + Defs: map[*ast.Ident]types.Object{}, + Uses: map[*ast.Ident]types.Object{}, + Implicits: map[ast.Node]types.Object{}, + Selections: map[*ast.SelectorExpr]*types.Selection{}, + Scopes: map[ast.Node]*types.Scope{}, + FileVersions: map[*ast.File]string{}, + } +} diff --git a/playground/socket/socket.go b/playground/socket/socket.go index 378edd4c3a5..c7843e59734 100644 --- a/playground/socket/socket.go +++ b/playground/socket/socket.go @@ -28,6 +28,7 @@ import ( "os/exec" "path/filepath" "runtime" + "slices" "strings" "time" "unicode/utf8" @@ -439,12 +440,7 @@ func (p *process) cmd(dir string, args ...string) *exec.Cmd { } func isNacl() bool { - for _, v := range append(Environ(), os.Environ()...) { - if v == "GOOS=nacl" { - return true - } - } - return false + return slices.Contains(append(Environ(), os.Environ()...), "GOOS=nacl") } // naclCmd returns an *exec.Cmd that executes bin under native client. diff --git a/playground/socket/socket_test.go b/playground/socket/socket_test.go index d410afea875..942f27e2af5 100644 --- a/playground/socket/socket_test.go +++ b/playground/socket/socket_test.go @@ -52,7 +52,7 @@ func TestLimiter(t *testing.T) { ch := make(chan *Message) go func() { var m Message - for i := 0; i < msgLimit+10; i++ { + for range msgLimit + 10 { ch <- &m } ch <- &Message{Kind: "end"} diff --git a/present/link.go b/present/link.go index ef96bf4ef6b..f6a8be1e693 100644 --- a/present/link.go +++ b/present/link.go @@ -86,10 +86,10 @@ func parseInlineLink(s string) (link string, length int) { // If the URL is http://foo.com, drop the http:// // In other words, render [[http://golang.org]] as: // golang.org - if strings.HasPrefix(rawURL, url.Scheme+"://") { - simpleURL = strings.TrimPrefix(rawURL, url.Scheme+"://") - } else if strings.HasPrefix(rawURL, url.Scheme+":") { - simpleURL = strings.TrimPrefix(rawURL, url.Scheme+":") + if after, ok := strings.CutPrefix(rawURL, url.Scheme+"://"); ok { + simpleURL = after + } else if after, ok := strings.CutPrefix(rawURL, url.Scheme+":"); ok { + simpleURL = after } } return renderLink(rawURL, simpleURL), end + 2 diff --git a/present/parse_test.go b/present/parse_test.go index dad57ea77ca..bb0fe72fad0 100644 --- a/present/parse_test.go +++ b/present/parse_test.go @@ -27,7 +27,6 @@ func TestTestdata(t *testing.T) { } files := append(filesP, filesMD...) for _, file := range files { - file := file name := filepath.Base(file) if name == "README" { continue diff --git a/refactor/eg/eg.go b/refactor/eg/eg.go index 15dfbd6ca0f..8de1fd7d1de 100644 --- a/refactor/eg/eg.go +++ b/refactor/eg/eg.go @@ -8,12 +8,14 @@ package eg // import "golang.org/x/tools/refactor/eg" import ( "bytes" + "errors" "fmt" "go/ast" "go/format" "go/printer" "go/token" "go/types" + "maps" "os" ) @@ -158,6 +160,10 @@ type Transformer struct { // described in the package documentation. // tmplInfo is the type information for tmplFile. func NewTransformer(fset *token.FileSet, tmplPkg *types.Package, tmplFile *ast.File, tmplInfo *types.Info, verbose bool) (*Transformer, error) { + // These maps are required by types.Info.TypeOf. + if tmplInfo.Types == nil || tmplInfo.Defs == nil || tmplInfo.Uses == nil { + return nil, errors.New("eg.NewTransformer: types.Info argument missing one of Types, Defs or Uses") + } // Check the template. beforeSig := funcSig(tmplPkg, "before") if beforeSig == nil { @@ -350,18 +356,10 @@ func stmtAndExpr(fn *ast.FuncDecl) ([]ast.Stmt, ast.Expr, error) { // mergeTypeInfo adds type info from src to dst. func mergeTypeInfo(dst, src *types.Info) { - for k, v := range src.Types { - dst.Types[k] = v - } - for k, v := range src.Defs { - dst.Defs[k] = v - } - for k, v := range src.Uses { - dst.Uses[k] = v - } - for k, v := range src.Selections { - dst.Selections[k] = v - } + maps.Copy(dst.Types, src.Types) + maps.Copy(dst.Defs, src.Defs) + maps.Copy(dst.Uses, src.Uses) + maps.Copy(dst.Selections, src.Selections) } // (debugging only) diff --git a/refactor/satisfy/find.go b/refactor/satisfy/find.go index a897c3c2fd4..766cc575387 100644 --- a/refactor/satisfy/find.go +++ b/refactor/satisfy/find.go @@ -84,6 +84,9 @@ type Finder struct { // info.{Defs,Uses,Selections,Types} must have been populated by the // type-checker. func (f *Finder) Find(info *types.Info, files []*ast.File) { + if info.Defs == nil || info.Uses == nil || info.Selections == nil || info.Types == nil { + panic("Finder.Find: one of info.{Defs,Uses,Selections.Types} is not populated") + } if f.Result == nil { f.Result = make(map[Constraint]bool) }