diff --git a/internal/golangorgx/gopls/analysis/deprecated/deprecated.go b/internal/golangorgx/gopls/analysis/deprecated/deprecated.go deleted file mode 100644 index b795344935f..00000000000 --- a/internal/golangorgx/gopls/analysis/deprecated/deprecated.go +++ /dev/null @@ -1,268 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package deprecated - -import ( - "bytes" - "go/ast" - "go/format" - "go/token" - "go/types" - "strconv" - "strings" - - _ "embed" - - "cuelang.org/go/internal/golangorgx/tools/analysisinternal" - "cuelang.org/go/internal/golangorgx/tools/typeparams" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -//go:embed doc.go -var doc string - -var Analyzer = &analysis.Analyzer{ - Name: "deprecated", - Doc: analysisinternal.MustExtractDoc(doc, "deprecated"), - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: checkDeprecated, - FactTypes: []analysis.Fact{(*deprecationFact)(nil)}, - RunDespiteErrors: true, - URL: "https://pkg.go.dev/cuelang.org/go/internal/golangorgx/gopls/analysis/deprecated", -} - -// checkDeprecated is a simplified copy of staticcheck.CheckDeprecated. -func checkDeprecated(pass *analysis.Pass) (interface{}, error) { - inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - deprs, err := collectDeprecatedNames(pass, inspector) - if err != nil || (len(deprs.packages) == 0 && len(deprs.objects) == 0) { - return nil, err - } - - reportDeprecation := func(depr *deprecationFact, node ast.Node) { - // TODO(hyangah): staticcheck.CheckDeprecated has more complex logic. Do we need it here? - // TODO(hyangah): Scrub depr.Msg. depr.Msg may contain Go comments - // markdown syntaxes but LSP diagnostics do not support markdown syntax. - - buf := new(bytes.Buffer) - if err := format.Node(buf, pass.Fset, node); err != nil { - // This shouldn't happen but let's be conservative. - buf.Reset() - buf.WriteString("declaration") - } - pass.ReportRangef(node, "%s is deprecated: %s", buf, depr.Msg) - } - - nodeFilter := []ast.Node{(*ast.SelectorExpr)(nil)} - inspector.Preorder(nodeFilter, func(node ast.Node) { - // Caveat: this misses dot-imported objects - sel, ok := node.(*ast.SelectorExpr) - if !ok { - return - } - - obj := pass.TypesInfo.ObjectOf(sel.Sel) - if obj_, ok := obj.(*types.Func); ok { - obj = typeparams.OriginMethod(obj_) - } - if obj == nil || obj.Pkg() == nil { - // skip invalid sel.Sel. - return - } - - if obj.Pkg() == pass.Pkg { - // A package is allowed to use its own deprecated objects - return - } - - // A package "foo" has two related packages "foo_test" and "foo.test", for external tests and the package main - // generated by 'go test' respectively. "foo_test" can import and use "foo", "foo.test" imports and uses "foo" - // and "foo_test". - - if strings.TrimSuffix(pass.Pkg.Path(), "_test") == obj.Pkg().Path() { - // foo_test (the external tests of foo) can use objects from foo. - return - } - if strings.TrimSuffix(pass.Pkg.Path(), ".test") == obj.Pkg().Path() { - // foo.test (the main package of foo's tests) can use objects from foo. - return - } - if strings.TrimSuffix(pass.Pkg.Path(), ".test") == strings.TrimSuffix(obj.Pkg().Path(), "_test") { - // foo.test (the main package of foo's tests) can use objects from foo's external tests. - return - } - - if depr, ok := deprs.objects[obj]; ok { - reportDeprecation(depr, sel) - } - }) - - for _, f := range pass.Files { - for _, spec := range f.Imports { - var imp *types.Package - var obj types.Object - if spec.Name != nil { - obj = pass.TypesInfo.ObjectOf(spec.Name) - } else { - obj = pass.TypesInfo.Implicits[spec] - } - pkgName, ok := obj.(*types.PkgName) - if !ok { - continue - } - imp = pkgName.Imported() - - path, err := strconv.Unquote(spec.Path.Value) - if err != nil { - continue - } - pkgPath := pass.Pkg.Path() - if strings.TrimSuffix(pkgPath, "_test") == path { - // foo_test can import foo - continue - } - if strings.TrimSuffix(pkgPath, ".test") == path { - // foo.test can import foo - continue - } - if strings.TrimSuffix(pkgPath, ".test") == strings.TrimSuffix(path, "_test") { - // foo.test can import foo_test - continue - } - if depr, ok := deprs.packages[imp]; ok { - reportDeprecation(depr, spec.Path) - } - } - } - return nil, nil -} - -type deprecationFact struct{ Msg string } - -func (*deprecationFact) AFact() {} -func (d *deprecationFact) String() string { return "Deprecated: " + d.Msg } - -type deprecatedNames struct { - objects map[types.Object]*deprecationFact - packages map[*types.Package]*deprecationFact -} - -// collectDeprecatedNames collects deprecated identifiers and publishes -// them both as Facts and the return value. This is a simplified copy -// of staticcheck's fact_deprecated analyzer. -func collectDeprecatedNames(pass *analysis.Pass, ins *inspector.Inspector) (deprecatedNames, error) { - extractDeprecatedMessage := func(docs []*ast.CommentGroup) string { - for _, doc := range docs { - if doc == nil { - continue - } - parts := strings.Split(doc.Text(), "\n\n") - for _, part := range parts { - if !strings.HasPrefix(part, "Deprecated: ") { - continue - } - alt := part[len("Deprecated: "):] - alt = strings.Replace(alt, "\n", " ", -1) - return strings.TrimSpace(alt) - } - } - return "" - } - - doDocs := func(names []*ast.Ident, docs *ast.CommentGroup) { - alt := extractDeprecatedMessage([]*ast.CommentGroup{docs}) - if alt == "" { - return - } - - for _, name := range names { - obj := pass.TypesInfo.ObjectOf(name) - pass.ExportObjectFact(obj, &deprecationFact{alt}) - } - } - - var docs []*ast.CommentGroup - for _, f := range pass.Files { - docs = append(docs, f.Doc) - } - if alt := extractDeprecatedMessage(docs); alt != "" { - // Don't mark package syscall as deprecated, even though - // it is. A lot of people still use it for simple - // constants like SIGKILL, and I am not comfortable - // telling them to use x/sys for that. - if pass.Pkg.Path() != "syscall" { - pass.ExportPackageFact(&deprecationFact{alt}) - } - } - nodeFilter := []ast.Node{ - (*ast.GenDecl)(nil), - (*ast.FuncDecl)(nil), - (*ast.TypeSpec)(nil), - (*ast.ValueSpec)(nil), - (*ast.File)(nil), - (*ast.StructType)(nil), - (*ast.InterfaceType)(nil), - } - ins.Preorder(nodeFilter, func(node ast.Node) { - var names []*ast.Ident - var docs *ast.CommentGroup - switch node := node.(type) { - case *ast.GenDecl: - switch node.Tok { - case token.TYPE, token.CONST, token.VAR: - docs = node.Doc - for i := range node.Specs { - switch n := node.Specs[i].(type) { - case *ast.ValueSpec: - names = append(names, n.Names...) - case *ast.TypeSpec: - names = append(names, n.Name) - } - } - default: - return - } - case *ast.FuncDecl: - docs = node.Doc - names = []*ast.Ident{node.Name} - case *ast.TypeSpec: - docs = node.Doc - names = []*ast.Ident{node.Name} - case *ast.ValueSpec: - docs = node.Doc - names = node.Names - case *ast.StructType: - for _, field := range node.Fields.List { - doDocs(field.Names, field.Doc) - } - case *ast.InterfaceType: - for _, field := range node.Methods.List { - doDocs(field.Names, field.Doc) - } - } - if docs != nil && len(names) > 0 { - doDocs(names, docs) - } - }) - - // Every identifier is potentially deprecated, so we will need - // to look up facts a lot. Construct maps of all facts propagated - // to this pass for fast lookup. - out := deprecatedNames{ - objects: map[types.Object]*deprecationFact{}, - packages: map[*types.Package]*deprecationFact{}, - } - for _, fact := range pass.AllObjectFacts() { - out.objects[fact.Object] = fact.Fact.(*deprecationFact) - } - for _, fact := range pass.AllPackageFacts() { - out.packages[fact.Package] = fact.Fact.(*deprecationFact) - } - - return out, nil -} diff --git a/internal/golangorgx/gopls/analysis/deprecated/doc.go b/internal/golangorgx/gopls/analysis/deprecated/doc.go deleted file mode 100644 index 0d96b86b302..00000000000 --- a/internal/golangorgx/gopls/analysis/deprecated/doc.go +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package deprecated defines an Analyzer that marks deprecated symbols and package imports. -// -// # Analyzer deprecated -// -// deprecated: check for use of deprecated identifiers -// -// The deprecated analyzer looks for deprecated symbols and package -// imports. -// -// See https://go.dev/wiki/Deprecated to learn about Go's convention -// for documenting and signaling deprecated identifiers. -package deprecated diff --git a/internal/golangorgx/gopls/analysis/embeddirective/doc.go b/internal/golangorgx/gopls/analysis/embeddirective/doc.go deleted file mode 100644 index bfed47f14f4..00000000000 --- a/internal/golangorgx/gopls/analysis/embeddirective/doc.go +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package embeddirective defines an Analyzer that validates //go:embed directives. -// The analyzer defers fixes to its parent golang.Analyzer. -// -// # Analyzer embed -// -// embed: check //go:embed directive usage -// -// This analyzer checks that the embed package is imported if //go:embed -// directives are present, providing a suggested fix to add the import if -// it is missing. -// -// This analyzer also checks that //go:embed directives precede the -// declaration of a single variable. -package embeddirective diff --git a/internal/golangorgx/gopls/analysis/embeddirective/embeddirective.go b/internal/golangorgx/gopls/analysis/embeddirective/embeddirective.go deleted file mode 100644 index 752d66ad22e..00000000000 --- a/internal/golangorgx/gopls/analysis/embeddirective/embeddirective.go +++ /dev/null @@ -1,165 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package embeddirective - -import ( - _ "embed" - "go/ast" - "go/token" - "go/types" - "strings" - - "cuelang.org/go/internal/golangorgx/tools/analysisinternal" - "golang.org/x/tools/go/analysis" -) - -//go:embed doc.go -var doc string - -var Analyzer = &analysis.Analyzer{ - Name: "embed", - Doc: analysisinternal.MustExtractDoc(doc, "embed"), - Run: run, - RunDespiteErrors: true, - URL: "https://pkg.go.dev/cuelang.org/go/internal/golangorgx/gopls/analysis/embeddirective", -} - -const FixCategory = "addembedimport" // recognized by gopls ApplyFix - -func run(pass *analysis.Pass) (interface{}, error) { - for _, f := range pass.Files { - comments := embedDirectiveComments(f) - if len(comments) == 0 { - continue // nothing to check - } - - hasEmbedImport := false - for _, imp := range f.Imports { - if imp.Path.Value == `"embed"` { - hasEmbedImport = true - break - } - } - - for _, c := range comments { - pos, end := c.Pos(), c.Pos()+token.Pos(len("//go:embed")) - - if !hasEmbedImport { - pass.Report(analysis.Diagnostic{ - Pos: pos, - End: end, - Message: `must import "embed" when using go:embed directives`, - Category: FixCategory, - SuggestedFixes: []analysis.SuggestedFix{{ - Message: `Add missing "embed" import`, - // No TextEdits => computed by a gopls command. - }}, - }) - } - - var msg string - spec := nextVarSpec(c, f) - switch { - case spec == nil: - msg = `go:embed directives must precede a "var" declaration` - case len(spec.Names) != 1: - msg = "declarations following go:embed directives must define a single variable" - case len(spec.Values) > 0: - msg = "declarations following go:embed directives must not specify a value" - case !embeddableType(pass.TypesInfo.Defs[spec.Names[0]]): - msg = "declarations following go:embed directives must be of type string, []byte or embed.FS" - } - if msg != "" { - pass.Report(analysis.Diagnostic{ - Pos: pos, - End: end, - Message: msg, - }) - } - } - } - return nil, nil -} - -// embedDirectiveComments returns all comments in f that contains a //go:embed directive. -func embedDirectiveComments(f *ast.File) []*ast.Comment { - comments := []*ast.Comment{} - for _, cg := range f.Comments { - for _, c := range cg.List { - if strings.HasPrefix(c.Text, "//go:embed ") { - comments = append(comments, c) - } - } - } - return comments -} - -// nextVarSpec returns the ValueSpec for the variable declaration immediately following -// the go:embed comment, or nil if the next declaration is not a variable declaration. -func nextVarSpec(com *ast.Comment, f *ast.File) *ast.ValueSpec { - // Embed directives must be followed by a declaration of one variable with no value. - // There may be comments and empty lines between the directive and the declaration. - var nextDecl ast.Decl - for _, d := range f.Decls { - if com.End() < d.End() { - nextDecl = d - break - } - } - if nextDecl == nil || nextDecl.Pos() == token.NoPos { - return nil - } - decl, ok := nextDecl.(*ast.GenDecl) - if !ok { - return nil - } - if decl.Tok != token.VAR { - return nil - } - - // var declarations can be both freestanding and blocks (with parenthesis). - // Only the first variable spec following the directive is interesting. - var nextSpec ast.Spec - for _, s := range decl.Specs { - if com.End() < s.End() { - nextSpec = s - break - } - } - if nextSpec == nil { - return nil - } - spec, ok := nextSpec.(*ast.ValueSpec) - if !ok { - // Invalid AST, but keep going. - return nil - } - return spec -} - -// embeddableType in go:embed directives are string, []byte or embed.FS. -func embeddableType(o types.Object) bool { - if o == nil { - return false - } - - // For embed.FS the underlying type is an implementation detail. - // As long as the named type resolves to embed.FS, it is OK. - if named, ok := o.Type().(*types.Named); ok { - obj := named.Obj() - if obj.Pkg() != nil && obj.Pkg().Path() == "embed" && obj.Name() == "FS" { - return true - } - } - - switch v := o.Type().Underlying().(type) { - case *types.Basic: - return types.Identical(v, types.Typ[types.String]) - case *types.Slice: - return types.Identical(v.Elem(), types.Typ[types.Byte]) - } - - return false -} diff --git a/internal/golangorgx/gopls/analysis/fillreturns/doc.go b/internal/golangorgx/gopls/analysis/fillreturns/doc.go deleted file mode 100644 index 584aec47db9..00000000000 --- a/internal/golangorgx/gopls/analysis/fillreturns/doc.go +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package fillreturns defines an Analyzer that will attempt to -// automatically fill in a return statement that has missing -// values with zero value elements. -// -// # Analyzer fillreturns -// -// fillreturns: suggest fixes for errors due to an incorrect number of return values -// -// This checker provides suggested fixes for type errors of the -// type "wrong number of return values (want %d, got %d)". For example: -// -// func m() (int, string, *bool, error) { -// return -// } -// -// will turn into -// -// func m() (int, string, *bool, error) { -// return 0, "", nil, nil -// } -// -// This functionality is similar to https://github.com/sqs/goreturns. -package fillreturns diff --git a/internal/golangorgx/gopls/analysis/fillreturns/fillreturns.go b/internal/golangorgx/gopls/analysis/fillreturns/fillreturns.go deleted file mode 100644 index 893db0771c5..00000000000 --- a/internal/golangorgx/gopls/analysis/fillreturns/fillreturns.go +++ /dev/null @@ -1,264 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package fillreturns - -import ( - "bytes" - _ "embed" - "fmt" - "go/ast" - "go/format" - "go/types" - "regexp" - "strings" - - "cuelang.org/go/internal/golangorgx/tools/analysisinternal" - "cuelang.org/go/internal/golangorgx/tools/fuzzy" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ast/astutil" -) - -//go:embed doc.go -var doc string - -var Analyzer = &analysis.Analyzer{ - Name: "fillreturns", - Doc: analysisinternal.MustExtractDoc(doc, "fillreturns"), - Run: run, - RunDespiteErrors: true, - URL: "https://pkg.go.dev/cuelang.org/go/internal/golangorgx/gopls/analysis/fillreturns", -} - -func run(pass *analysis.Pass) (interface{}, error) { - info := pass.TypesInfo - if info == nil { - return nil, fmt.Errorf("nil TypeInfo") - } - -outer: - for _, typeErr := range pass.TypeErrors { - // Filter out the errors that are not relevant to this analyzer. - if !FixesError(typeErr) { - continue - } - var file *ast.File - for _, f := range pass.Files { - if f.Pos() <= typeErr.Pos && typeErr.Pos <= f.End() { - file = f - break - } - } - if file == nil { - continue - } - - // Get the end position of the error. - // (This heuristic assumes that the buffer is formatted, - // at least up to the end position of the error.) - var buf bytes.Buffer - if err := format.Node(&buf, pass.Fset, file); err != nil { - continue - } - typeErrEndPos := analysisinternal.TypeErrorEndPos(pass.Fset, buf.Bytes(), typeErr.Pos) - - // TODO(rfindley): much of the error handling code below returns, when it - // should probably continue. - - // Get the path for the relevant range. - path, _ := astutil.PathEnclosingInterval(file, typeErr.Pos, typeErrEndPos) - if len(path) == 0 { - return nil, nil - } - - // Find the enclosing return statement. - var ret *ast.ReturnStmt - var retIdx int - for i, n := range path { - if r, ok := n.(*ast.ReturnStmt); ok { - ret = r - retIdx = i - break - } - } - if ret == nil { - return nil, nil - } - - // Get the function type that encloses the ReturnStmt. - var enclosingFunc *ast.FuncType - for _, n := range path[retIdx+1:] { - switch node := n.(type) { - case *ast.FuncLit: - enclosingFunc = node.Type - case *ast.FuncDecl: - enclosingFunc = node.Type - } - if enclosingFunc != nil { - break - } - } - if enclosingFunc == nil || enclosingFunc.Results == nil { - continue - } - - // Skip any generic enclosing functions, since type parameters don't - // have 0 values. - // TODO(rfindley): We should be able to handle this if the return - // values are all concrete types. - if tparams := enclosingFunc.TypeParams; tparams != nil && tparams.NumFields() > 0 { - return nil, nil - } - - // Find the function declaration that encloses the ReturnStmt. - var outer *ast.FuncDecl - for _, p := range path { - if p, ok := p.(*ast.FuncDecl); ok { - outer = p - break - } - } - if outer == nil { - return nil, nil - } - - // Skip any return statements that contain function calls with multiple - // return values. - for _, expr := range ret.Results { - e, ok := expr.(*ast.CallExpr) - if !ok { - continue - } - if tup, ok := info.TypeOf(e).(*types.Tuple); ok && tup.Len() > 1 { - continue outer - } - } - - // Duplicate the return values to track which values have been matched. - remaining := make([]ast.Expr, len(ret.Results)) - copy(remaining, ret.Results) - - fixed := make([]ast.Expr, len(enclosingFunc.Results.List)) - - // For each value in the return function declaration, find the leftmost element - // in the return statement that has the desired type. If no such element exists, - // fill in the missing value with the appropriate "zero" value. - // Beware that type information may be incomplete. - var retTyps []types.Type - for _, ret := range enclosingFunc.Results.List { - retTyp := info.TypeOf(ret.Type) - if retTyp == nil { - return nil, nil - } - retTyps = append(retTyps, retTyp) - } - matches := analysisinternal.MatchingIdents(retTyps, file, ret.Pos(), info, pass.Pkg) - for i, retTyp := range retTyps { - var match ast.Expr - var idx int - for j, val := range remaining { - if t := info.TypeOf(val); t == nil || !matchingTypes(t, retTyp) { - continue - } - if !analysisinternal.IsZeroValue(val) { - match, idx = val, j - break - } - // If the current match is a "zero" value, we keep searching in - // case we find a non-"zero" value match. If we do not find a - // non-"zero" value, we will use the "zero" value. - match, idx = val, j - } - - if match != nil { - fixed[i] = match - remaining = append(remaining[:idx], remaining[idx+1:]...) - } else { - names, ok := matches[retTyp] - if !ok { - return nil, fmt.Errorf("invalid return type: %v", retTyp) - } - // Find the identifier most similar to the return type. - // If no identifier matches the pattern, generate a zero value. - if best := fuzzy.BestMatch(retTyp.String(), names); best != "" { - fixed[i] = ast.NewIdent(best) - } else if zero := analysisinternal.ZeroValue(file, pass.Pkg, retTyp); zero != nil { - fixed[i] = zero - } else { - return nil, nil - } - } - } - - // Remove any non-matching "zero values" from the leftover values. - var nonZeroRemaining []ast.Expr - for _, expr := range remaining { - if !analysisinternal.IsZeroValue(expr) { - nonZeroRemaining = append(nonZeroRemaining, expr) - } - } - // Append leftover return values to end of new return statement. - fixed = append(fixed, nonZeroRemaining...) - - newRet := &ast.ReturnStmt{ - Return: ret.Pos(), - Results: fixed, - } - - // Convert the new return statement AST to text. - var newBuf bytes.Buffer - if err := format.Node(&newBuf, pass.Fset, newRet); err != nil { - return nil, err - } - - pass.Report(analysis.Diagnostic{ - Pos: typeErr.Pos, - End: typeErrEndPos, - Message: typeErr.Msg, - SuggestedFixes: []analysis.SuggestedFix{{ - Message: "Fill in return values", - TextEdits: []analysis.TextEdit{{ - Pos: ret.Pos(), - End: ret.End(), - NewText: newBuf.Bytes(), - }}, - }}, - }) - } - return nil, nil -} - -func matchingTypes(want, got types.Type) bool { - if want == got || types.Identical(want, got) { - return true - } - // Code segment to help check for untyped equality from (golang/go#32146). - if rhs, ok := want.(*types.Basic); ok && rhs.Info()&types.IsUntyped > 0 { - if lhs, ok := got.Underlying().(*types.Basic); ok { - return rhs.Info()&types.IsConstType == lhs.Info()&types.IsConstType - } - } - return types.AssignableTo(want, got) || types.ConvertibleTo(want, got) -} - -// Error messages have changed across Go versions. These regexps capture recent -// incarnations. -// -// TODO(rfindley): once error codes are exported and exposed via go/packages, -// use error codes rather than string matching here. -var wrongReturnNumRegexes = []*regexp.Regexp{ - regexp.MustCompile(`wrong number of return values \(want (\d+), got (\d+)\)`), - regexp.MustCompile(`too many return values`), - regexp.MustCompile(`not enough return values`), -} - -func FixesError(err types.Error) bool { - msg := strings.TrimSpace(err.Msg) - for _, rx := range wrongReturnNumRegexes { - if rx.MatchString(msg) { - return true - } - } - return false -} diff --git a/internal/golangorgx/gopls/analysis/fillstruct/fillstruct.go b/internal/golangorgx/gopls/analysis/fillstruct/fillstruct.go deleted file mode 100644 index 03dd0c427b6..00000000000 --- a/internal/golangorgx/gopls/analysis/fillstruct/fillstruct.go +++ /dev/null @@ -1,502 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package fillstruct defines an Analyzer that automatically -// fills in a struct declaration with zero value elements for each field. -// -// The analyzer's diagnostic is merely a prompt. -// The actual fix is created by a separate direct call from gopls to -// the SuggestedFixes function. -// Tests of Analyzer.Run can be found in ./testdata/src. -// Tests of the SuggestedFixes logic live in ../../testdata/fillstruct. -package fillstruct - -import ( - "bytes" - "fmt" - "go/ast" - "go/format" - "go/token" - "go/types" - "strings" - "unicode" - - "cuelang.org/go/internal/golangorgx/gopls/util/safetoken" - "cuelang.org/go/internal/golangorgx/tools/analysisinternal" - "cuelang.org/go/internal/golangorgx/tools/fuzzy" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/ast/inspector" -) - -// Diagnose computes diagnostics for fillable struct literals overlapping with -// the provided start and end position. -// -// The diagnostic contains a lazy fix; the actual patch is computed -// (via the ApplyFix command) by a call to [SuggestedFix]. -// -// If either start or end is invalid, the entire package is inspected. -func Diagnose(inspect *inspector.Inspector, start, end token.Pos, pkg *types.Package, info *types.Info) []analysis.Diagnostic { - var diags []analysis.Diagnostic - nodeFilter := []ast.Node{(*ast.CompositeLit)(nil)} - inspect.Preorder(nodeFilter, func(n ast.Node) { - expr := n.(*ast.CompositeLit) - - if (start.IsValid() && expr.End() < start) || (end.IsValid() && expr.Pos() > end) { - return // non-overlapping - } - - typ := info.TypeOf(expr) - if typ == nil { - return - } - - // Find reference to the type declaration of the struct being initialized. - typ = deref(typ) - tStruct, ok := typ.Underlying().(*types.Struct) - if !ok { - return - } - // Inv: typ is the possibly-named struct type. - - fieldCount := tStruct.NumFields() - - // Skip any struct that is already populated or that has no fields. - if fieldCount == 0 || fieldCount == len(expr.Elts) { - return - } - - // Are any fields in need of filling? - var fillableFields []string - for i := 0; i < fieldCount; i++ { - field := tStruct.Field(i) - // Ignore fields that are not accessible in the current package. - if field.Pkg() != nil && field.Pkg() != pkg && !field.Exported() { - continue - } - fillableFields = append(fillableFields, fmt.Sprintf("%s: %s", field.Name(), field.Type().String())) - } - if len(fillableFields) == 0 { - return - } - - // Derive a name for the struct type. - var name string - if typ != tStruct { - // named struct type (e.g. pkg.S[T]) - name = types.TypeString(typ, types.RelativeTo(pkg)) - } else { - // anonymous struct type - totalFields := len(fillableFields) - const maxLen = 20 - // Find the index to cut off printing of fields. - var i, fieldLen int - for i = range fillableFields { - if fieldLen > maxLen { - break - } - fieldLen += len(fillableFields[i]) - } - fillableFields = fillableFields[:i] - if i < totalFields { - fillableFields = append(fillableFields, "...") - } - name = fmt.Sprintf("anonymous struct{ %s }", strings.Join(fillableFields, ", ")) - } - diags = append(diags, analysis.Diagnostic{ - Message: fmt.Sprintf("%s literal has missing fields", name), - Pos: expr.Pos(), - End: expr.End(), - Category: FixCategory, - SuggestedFixes: []analysis.SuggestedFix{{ - Message: fmt.Sprintf("Fill %s", name), - // No TextEdits => computed later by gopls. - }}, - }) - }) - - return diags -} - -const FixCategory = "fillstruct" // recognized by gopls ApplyFix - -// SuggestedFix computes the suggested fix for the kinds of -// diagnostics produced by the Analyzer above. -func SuggestedFix(fset *token.FileSet, start, end token.Pos, content []byte, file *ast.File, pkg *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { - if info == nil { - return nil, nil, fmt.Errorf("nil types.Info") - } - - pos := start // don't use the end - - // TODO(rstambler): Using ast.Inspect would probably be more efficient than - // calling PathEnclosingInterval. Switch this approach. - path, _ := astutil.PathEnclosingInterval(file, pos, pos) - if len(path) == 0 { - return nil, nil, fmt.Errorf("no enclosing ast.Node") - } - var expr *ast.CompositeLit - for _, n := range path { - if node, ok := n.(*ast.CompositeLit); ok { - expr = node - break - } - } - - typ := info.TypeOf(expr) - if typ == nil { - return nil, nil, fmt.Errorf("no composite literal") - } - - // Find reference to the type declaration of the struct being initialized. - typ = deref(typ) - tStruct, ok := typ.Underlying().(*types.Struct) - if !ok { - return nil, nil, fmt.Errorf("%s is not a (pointer to) struct type", - types.TypeString(typ, types.RelativeTo(pkg))) - } - // Inv: typ is the possibly-named struct type. - - fieldCount := tStruct.NumFields() - - // Check which types have already been filled in. (we only want to fill in - // the unfilled types, or else we'll blat user-supplied details) - prefilledFields := map[string]ast.Expr{} - for _, e := range expr.Elts { - if kv, ok := e.(*ast.KeyValueExpr); ok { - if key, ok := kv.Key.(*ast.Ident); ok { - prefilledFields[key.Name] = kv.Value - } - } - } - - // Use a new fileset to build up a token.File for the new composite - // literal. We need one line for foo{, one line for }, and one line for - // each field we're going to set. format.Node only cares about line - // numbers, so we don't need to set columns, and each line can be - // 1 byte long. - // TODO(adonovan): why is this necessary? The position information - // is going to be wrong for the existing trees in prefilledFields. - // Can't the formatter just do its best with an empty fileset? - fakeFset := token.NewFileSet() - tok := fakeFset.AddFile("", -1, fieldCount+2) - - line := 2 // account for 1-based lines and the left brace - var fieldTyps []types.Type - for i := 0; i < fieldCount; i++ { - field := tStruct.Field(i) - // Ignore fields that are not accessible in the current package. - if field.Pkg() != nil && field.Pkg() != pkg && !field.Exported() { - fieldTyps = append(fieldTyps, nil) - continue - } - fieldTyps = append(fieldTyps, field.Type()) - } - matches := analysisinternal.MatchingIdents(fieldTyps, file, start, info, pkg) - var elts []ast.Expr - for i, fieldTyp := range fieldTyps { - if fieldTyp == nil { - continue // TODO(adonovan): is this reachable? - } - fieldName := tStruct.Field(i).Name() - - tok.AddLine(line - 1) // add 1 byte per line - if line > tok.LineCount() { - panic(fmt.Sprintf("invalid line number %v (of %v) for fillstruct", line, tok.LineCount())) - } - pos := tok.LineStart(line) - - kv := &ast.KeyValueExpr{ - Key: &ast.Ident{ - NamePos: pos, - Name: fieldName, - }, - Colon: pos, - } - if expr, ok := prefilledFields[fieldName]; ok { - kv.Value = expr - } else { - names, ok := matches[fieldTyp] - if !ok { - return nil, nil, fmt.Errorf("invalid struct field type: %v", fieldTyp) - } - - // Find the name most similar to the field name. - // If no name matches the pattern, generate a zero value. - // NOTE: We currently match on the name of the field key rather than the field type. - if best := fuzzy.BestMatch(fieldName, names); best != "" { - kv.Value = ast.NewIdent(best) - } else if v := populateValue(file, pkg, fieldTyp); v != nil { - kv.Value = v - } else { - return nil, nil, nil // no fix to suggest - } - } - elts = append(elts, kv) - line++ - } - - // If all of the struct's fields are unexported, we have nothing to do. - if len(elts) == 0 { - return nil, nil, fmt.Errorf("no elements to fill") - } - - // Add the final line for the right brace. Offset is the number of - // bytes already added plus 1. - tok.AddLine(len(elts) + 1) - line = len(elts) + 2 - if line > tok.LineCount() { - panic(fmt.Sprintf("invalid line number %v (of %v) for fillstruct", line, tok.LineCount())) - } - - cl := &ast.CompositeLit{ - Type: expr.Type, - Lbrace: tok.LineStart(1), - Elts: elts, - Rbrace: tok.LineStart(line), - } - - // Find the line on which the composite literal is declared. - split := bytes.Split(content, []byte("\n")) - lineNumber := safetoken.StartPosition(fset, expr.Lbrace).Line - firstLine := split[lineNumber-1] // lines are 1-indexed - - // Trim the whitespace from the left of the line, and use the index - // to get the amount of whitespace on the left. - trimmed := bytes.TrimLeftFunc(firstLine, unicode.IsSpace) - index := bytes.Index(firstLine, trimmed) - whitespace := firstLine[:index] - - // First pass through the formatter: turn the expr into a string. - var formatBuf bytes.Buffer - if err := format.Node(&formatBuf, fakeFset, cl); err != nil { - return nil, nil, fmt.Errorf("failed to run first format on:\n%s\ngot err: %v", cl.Type, err) - } - sug := indent(formatBuf.Bytes(), whitespace) - - if len(prefilledFields) > 0 { - // Attempt a second pass through the formatter to line up columns. - sourced, err := format.Source(sug) - if err == nil { - sug = indent(sourced, whitespace) - } - } - - return fset, &analysis.SuggestedFix{ - TextEdits: []analysis.TextEdit{ - { - Pos: expr.Pos(), - End: expr.End(), - NewText: sug, - }, - }, - }, nil -} - -// indent works line by line through str, indenting (prefixing) each line with -// ind. -func indent(str, ind []byte) []byte { - split := bytes.Split(str, []byte("\n")) - newText := bytes.NewBuffer(nil) - for i, s := range split { - if len(s) == 0 { - continue - } - // Don't add the extra indentation to the first line. - if i != 0 { - newText.Write(ind) - } - newText.Write(s) - if i < len(split)-1 { - newText.WriteByte('\n') - } - } - return newText.Bytes() -} - -// populateValue constructs an expression to fill the value of a struct field. -// -// When the type of a struct field is a basic literal or interface, we return -// default values. For other types, such as maps, slices, and channels, we create -// empty expressions such as []T{} or make(chan T) rather than using default values. -// -// The reasoning here is that users will call fillstruct with the intention of -// initializing the struct, in which case setting these fields to nil has no effect. -// -// populateValue returns nil if the value cannot be filled. -func populateValue(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr { - switch u := typ.Underlying().(type) { - case *types.Basic: - switch { - case u.Info()&types.IsNumeric != 0: - return &ast.BasicLit{Kind: token.INT, Value: "0"} - case u.Info()&types.IsBoolean != 0: - return &ast.Ident{Name: "false"} - case u.Info()&types.IsString != 0: - return &ast.BasicLit{Kind: token.STRING, Value: `""`} - case u.Kind() == types.UnsafePointer: - return ast.NewIdent("nil") - case u.Kind() == types.Invalid: - return nil - default: - panic(fmt.Sprintf("unknown basic type %v", u)) - } - - case *types.Map: - k := analysisinternal.TypeExpr(f, pkg, u.Key()) - v := analysisinternal.TypeExpr(f, pkg, u.Elem()) - if k == nil || v == nil { - return nil - } - return &ast.CompositeLit{ - Type: &ast.MapType{ - Key: k, - Value: v, - }, - } - case *types.Slice: - s := analysisinternal.TypeExpr(f, pkg, u.Elem()) - if s == nil { - return nil - } - return &ast.CompositeLit{ - Type: &ast.ArrayType{ - Elt: s, - }, - } - - case *types.Array: - a := analysisinternal.TypeExpr(f, pkg, u.Elem()) - if a == nil { - return nil - } - return &ast.CompositeLit{ - Type: &ast.ArrayType{ - Elt: a, - Len: &ast.BasicLit{ - Kind: token.INT, Value: fmt.Sprintf("%v", u.Len()), - }, - }, - } - - case *types.Chan: - v := analysisinternal.TypeExpr(f, pkg, u.Elem()) - if v == nil { - return nil - } - dir := ast.ChanDir(u.Dir()) - if u.Dir() == types.SendRecv { - dir = ast.SEND | ast.RECV - } - return &ast.CallExpr{ - Fun: ast.NewIdent("make"), - Args: []ast.Expr{ - &ast.ChanType{ - Dir: dir, - Value: v, - }, - }, - } - - case *types.Struct: - s := analysisinternal.TypeExpr(f, pkg, typ) - if s == nil { - return nil - } - return &ast.CompositeLit{ - Type: s, - } - - case *types.Signature: - var params []*ast.Field - for i := 0; i < u.Params().Len(); i++ { - p := analysisinternal.TypeExpr(f, pkg, u.Params().At(i).Type()) - if p == nil { - return nil - } - params = append(params, &ast.Field{ - Type: p, - Names: []*ast.Ident{ - { - Name: u.Params().At(i).Name(), - }, - }, - }) - } - var returns []*ast.Field - for i := 0; i < u.Results().Len(); i++ { - r := analysisinternal.TypeExpr(f, pkg, u.Results().At(i).Type()) - if r == nil { - return nil - } - returns = append(returns, &ast.Field{ - Type: r, - }) - } - return &ast.FuncLit{ - Type: &ast.FuncType{ - Params: &ast.FieldList{ - List: params, - }, - Results: &ast.FieldList{ - List: returns, - }, - }, - Body: &ast.BlockStmt{}, - } - - case *types.Pointer: - switch u.Elem().(type) { - case *types.Basic: - return &ast.CallExpr{ - Fun: &ast.Ident{ - Name: "new", - }, - Args: []ast.Expr{ - &ast.Ident{ - Name: u.Elem().String(), - }, - }, - } - default: - x := populateValue(f, pkg, u.Elem()) - if x == nil { - return nil - } - return &ast.UnaryExpr{ - Op: token.AND, - X: x, - } - } - - case *types.Interface: - if param, ok := typ.(*types.TypeParam); ok { - // *new(T) is the zero value of a type parameter T. - // TODO(adonovan): one could give a more specific zero - // value if the type has a core type that is, say, - // always a number or a pointer. See go/ssa for details. - return &ast.StarExpr{ - X: &ast.CallExpr{ - Fun: ast.NewIdent("new"), - Args: []ast.Expr{ - ast.NewIdent(param.Obj().Name()), - }, - }, - } - } - - return ast.NewIdent("nil") - } - return nil -} - -func deref(t types.Type) types.Type { - for { - ptr, ok := t.Underlying().(*types.Pointer) - if !ok { - return t - } - t = ptr.Elem() - } -} diff --git a/internal/golangorgx/gopls/analysis/infertypeargs/infertypeargs.go b/internal/golangorgx/gopls/analysis/infertypeargs/infertypeargs.go deleted file mode 100644 index 0aff9049393..00000000000 --- a/internal/golangorgx/gopls/analysis/infertypeargs/infertypeargs.go +++ /dev/null @@ -1,149 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package infertypeargs - -import ( - "go/ast" - "go/token" - "go/types" - - "cuelang.org/go/internal/golangorgx/tools/typeparams" - "cuelang.org/go/internal/golangorgx/tools/versions" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -const Doc = `check for unnecessary type arguments in call expressions - -Explicit type arguments may be omitted from call expressions if they can be -inferred from function arguments, or from other type arguments: - - func f[T any](T) {} - - func _() { - f[string]("foo") // string could be inferred - } -` - -var Analyzer = &analysis.Analyzer{ - Name: "infertypeargs", - Doc: Doc, - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, - URL: "https://pkg.go.dev/cuelang.org/go/internal/golangorgx/gopls/analysis/infertypeargs", -} - -func run(pass *analysis.Pass) (any, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - for _, diag := range diagnose(pass.Fset, inspect, token.NoPos, token.NoPos, pass.Pkg, pass.TypesInfo) { - pass.Report(diag) - } - return nil, nil -} - -// Diagnose reports diagnostics describing simplifications to type -// arguments overlapping with the provided start and end position. -// -// If start or end is token.NoPos, the corresponding bound is not checked -// (i.e. if both start and end are NoPos, all call expressions are considered). -func diagnose(fset *token.FileSet, inspect *inspector.Inspector, start, end token.Pos, pkg *types.Package, info *types.Info) []analysis.Diagnostic { - var diags []analysis.Diagnostic - - nodeFilter := []ast.Node{(*ast.CallExpr)(nil)} - inspect.Preorder(nodeFilter, func(node ast.Node) { - call := node.(*ast.CallExpr) - x, lbrack, indices, rbrack := typeparams.UnpackIndexExpr(call.Fun) - ident := calledIdent(x) - if ident == nil || len(indices) == 0 { - return // no explicit args, nothing to do - } - - if (start.IsValid() && call.End() < start) || (end.IsValid() && call.Pos() > end) { - return // non-overlapping - } - - // Confirm that instantiation actually occurred at this ident. - idata, ok := info.Instances[ident] - if !ok { - return // something went wrong, but fail open - } - instance := idata.Type - - // Start removing argument expressions from the right, and check if we can - // still infer the call expression. - required := len(indices) // number of type expressions that are required - for i := len(indices) - 1; i >= 0; i-- { - var fun ast.Expr - if i == 0 { - // No longer an index expression: just use the parameterized operand. - fun = x - } else { - fun = typeparams.PackIndexExpr(x, lbrack, indices[:i], indices[i-1].End()) - } - newCall := &ast.CallExpr{ - Fun: fun, - Lparen: call.Lparen, - Args: call.Args, - Ellipsis: call.Ellipsis, - Rparen: call.Rparen, - } - info := &types.Info{ - Instances: make(map[*ast.Ident]types.Instance), - } - versions.InitFileVersions(info) - if err := types.CheckExpr(fset, pkg, call.Pos(), newCall, info); err != nil { - // Most likely inference failed. - break - } - newIData := info.Instances[ident] - newInstance := newIData.Type - if !types.Identical(instance, newInstance) { - // The inferred result type does not match the original result type, so - // this simplification is not valid. - break - } - required = i - } - if required < len(indices) { - var s, e token.Pos - var edit analysis.TextEdit - if required == 0 { - s, e = lbrack, rbrack+1 // erase the entire index - edit = analysis.TextEdit{Pos: s, End: e} - } else { - s = indices[required].Pos() - e = rbrack - // erase from end of last arg to include last comma & white-spaces - edit = analysis.TextEdit{Pos: indices[required-1].End(), End: e} - } - // Recheck that our (narrower) fixes overlap with the requested range. - if (start.IsValid() && e < start) || (end.IsValid() && s > end) { - return // non-overlapping - } - diags = append(diags, analysis.Diagnostic{ - Pos: s, - End: e, - Message: "unnecessary type arguments", - SuggestedFixes: []analysis.SuggestedFix{{ - Message: "Simplify type arguments", - TextEdits: []analysis.TextEdit{edit}, - }}, - }) - } - }) - - return diags -} - -func calledIdent(x ast.Expr) *ast.Ident { - switch x := x.(type) { - case *ast.Ident: - return x - case *ast.SelectorExpr: - return x.Sel - } - return nil -} diff --git a/internal/golangorgx/gopls/analysis/nonewvars/doc.go b/internal/golangorgx/gopls/analysis/nonewvars/doc.go deleted file mode 100644 index b0bef847e32..00000000000 --- a/internal/golangorgx/gopls/analysis/nonewvars/doc.go +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package nonewvars defines an Analyzer that applies suggested fixes -// to errors of the type "no new variables on left side of :=". -// -// # Analyzer nonewvars -// -// nonewvars: suggested fixes for "no new vars on left side of :=" -// -// This checker provides suggested fixes for type errors of the -// type "no new vars on left side of :=". For example: -// -// z := 1 -// z := 2 -// -// will turn into -// -// z := 1 -// z = 2 -package nonewvars diff --git a/internal/golangorgx/gopls/analysis/nonewvars/nonewvars.go b/internal/golangorgx/gopls/analysis/nonewvars/nonewvars.go deleted file mode 100644 index ae097dd4e64..00000000000 --- a/internal/golangorgx/gopls/analysis/nonewvars/nonewvars.go +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package nonewvars defines an Analyzer that applies suggested fixes -// to errors of the type "no new variables on left side of :=". -package nonewvars - -import ( - "bytes" - _ "embed" - "go/ast" - "go/format" - "go/token" - - "cuelang.org/go/internal/golangorgx/tools/analysisinternal" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -//go:embed doc.go -var doc string - -var Analyzer = &analysis.Analyzer{ - Name: "nonewvars", - Doc: analysisinternal.MustExtractDoc(doc, "nonewvars"), - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, - RunDespiteErrors: true, - URL: "https://pkg.go.dev/cuelang.org/go/internal/golangorgx/gopls/analysis/nonewvars", -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - if len(pass.TypeErrors) == 0 { - return nil, nil - } - - nodeFilter := []ast.Node{(*ast.AssignStmt)(nil)} - inspect.Preorder(nodeFilter, func(n ast.Node) { - assignStmt, _ := n.(*ast.AssignStmt) - // We only care about ":=". - if assignStmt.Tok != token.DEFINE { - return - } - - var file *ast.File - for _, f := range pass.Files { - if f.Pos() <= assignStmt.Pos() && assignStmt.Pos() < f.End() { - file = f - break - } - } - if file == nil { - return - } - - for _, err := range pass.TypeErrors { - if !FixesError(err.Msg) { - continue - } - if assignStmt.Pos() > err.Pos || err.Pos >= assignStmt.End() { - continue - } - var buf bytes.Buffer - if err := format.Node(&buf, pass.Fset, file); err != nil { - continue - } - pass.Report(analysis.Diagnostic{ - Pos: err.Pos, - End: analysisinternal.TypeErrorEndPos(pass.Fset, buf.Bytes(), err.Pos), - Message: err.Msg, - SuggestedFixes: []analysis.SuggestedFix{{ - Message: "Change ':=' to '='", - TextEdits: []analysis.TextEdit{{ - Pos: err.Pos, - End: err.Pos + 1, - }}, - }}, - }) - } - }) - return nil, nil -} - -func FixesError(msg string) bool { - return msg == "no new variables on left side of :=" -} diff --git a/internal/golangorgx/gopls/analysis/noresultvalues/doc.go b/internal/golangorgx/gopls/analysis/noresultvalues/doc.go deleted file mode 100644 index 87df2093e8d..00000000000 --- a/internal/golangorgx/gopls/analysis/noresultvalues/doc.go +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package noresultvalues defines an Analyzer that applies suggested fixes -// to errors of the type "no result values expected". -// -// # Analyzer noresultvalues -// -// noresultvalues: suggested fixes for unexpected return values -// -// This checker provides suggested fixes for type errors of the -// type "no result values expected" or "too many return values". -// For example: -// -// func z() { return nil } -// -// will turn into -// -// func z() { return } -package noresultvalues diff --git a/internal/golangorgx/gopls/analysis/noresultvalues/noresultvalues.go b/internal/golangorgx/gopls/analysis/noresultvalues/noresultvalues.go deleted file mode 100644 index 90c6a7a5534..00000000000 --- a/internal/golangorgx/gopls/analysis/noresultvalues/noresultvalues.go +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package noresultvalues - -import ( - "bytes" - "go/ast" - "go/format" - "strings" - - _ "embed" - - "cuelang.org/go/internal/golangorgx/tools/analysisinternal" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -//go:embed doc.go -var doc string - -var Analyzer = &analysis.Analyzer{ - Name: "noresultvalues", - Doc: analysisinternal.MustExtractDoc(doc, "noresultvalues"), - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, - RunDespiteErrors: true, - URL: "https://pkg.go.dev/cuelang.org/go/internal/golangorgx/gopls/analysis/noresultvars", -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - if len(pass.TypeErrors) == 0 { - return nil, nil - } - - nodeFilter := []ast.Node{(*ast.ReturnStmt)(nil)} - inspect.Preorder(nodeFilter, func(n ast.Node) { - retStmt, _ := n.(*ast.ReturnStmt) - - var file *ast.File - for _, f := range pass.Files { - if f.Pos() <= retStmt.Pos() && retStmt.Pos() < f.End() { - file = f - break - } - } - if file == nil { - return - } - - for _, err := range pass.TypeErrors { - if !FixesError(err.Msg) { - continue - } - if retStmt.Pos() >= err.Pos || err.Pos >= retStmt.End() { - continue - } - var buf bytes.Buffer - if err := format.Node(&buf, pass.Fset, file); err != nil { - continue - } - pass.Report(analysis.Diagnostic{ - Pos: err.Pos, - End: analysisinternal.TypeErrorEndPos(pass.Fset, buf.Bytes(), err.Pos), - Message: err.Msg, - SuggestedFixes: []analysis.SuggestedFix{{ - Message: "Delete return values", - TextEdits: []analysis.TextEdit{{ - Pos: retStmt.Pos(), - End: retStmt.End(), - NewText: []byte("return"), - }}, - }}, - }) - } - }) - return nil, nil -} - -func FixesError(msg string) bool { - return msg == "no result values expected" || - strings.HasPrefix(msg, "too many return values") && strings.Contains(msg, "want ()") -} diff --git a/internal/golangorgx/gopls/analysis/simplifycompositelit/doc.go b/internal/golangorgx/gopls/analysis/simplifycompositelit/doc.go deleted file mode 100644 index fe40596746e..00000000000 --- a/internal/golangorgx/gopls/analysis/simplifycompositelit/doc.go +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package simplifycompositelit defines an Analyzer that simplifies composite literals. -// https://github.com/golang/go/blob/master/src/cmd/gofmt/simplify.go -// https://golang.org/cmd/gofmt/#hdr-The_simplify_command -// -// # Analyzer simplifycompositelit -// -// simplifycompositelit: check for composite literal simplifications -// -// An array, slice, or map composite literal of the form: -// -// []T{T{}, T{}} -// -// will be simplified to: -// -// []T{{}, {}} -// -// This is one of the simplifications that "gofmt -s" applies. -package simplifycompositelit diff --git a/internal/golangorgx/gopls/analysis/simplifycompositelit/simplifycompositelit.go b/internal/golangorgx/gopls/analysis/simplifycompositelit/simplifycompositelit.go deleted file mode 100644 index 4358aaff4e3..00000000000 --- a/internal/golangorgx/gopls/analysis/simplifycompositelit/simplifycompositelit.go +++ /dev/null @@ -1,193 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package simplifycompositelit defines an Analyzer that simplifies composite literals. -// https://github.com/golang/go/blob/master/src/cmd/gofmt/simplify.go -// https://golang.org/cmd/gofmt/#hdr-The_simplify_command -package simplifycompositelit - -import ( - "bytes" - _ "embed" - "fmt" - "go/ast" - "go/printer" - "go/token" - "reflect" - - "cuelang.org/go/internal/golangorgx/tools/analysisinternal" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -//go:embed doc.go -var doc string - -var Analyzer = &analysis.Analyzer{ - Name: "simplifycompositelit", - Doc: analysisinternal.MustExtractDoc(doc, "simplifycompositelit"), - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, - URL: "https://pkg.go.dev/cuelang.org/go/internal/golangorgx/gopls/analysis/simplifycompositelit", -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - nodeFilter := []ast.Node{(*ast.CompositeLit)(nil)} - inspect.Preorder(nodeFilter, func(n ast.Node) { - expr := n.(*ast.CompositeLit) - - outer := expr - var keyType, eltType ast.Expr - switch typ := outer.Type.(type) { - case *ast.ArrayType: - eltType = typ.Elt - case *ast.MapType: - keyType = typ.Key - eltType = typ.Value - } - - if eltType == nil { - return - } - var ktyp reflect.Value - if keyType != nil { - ktyp = reflect.ValueOf(keyType) - } - typ := reflect.ValueOf(eltType) - for _, x := range outer.Elts { - // look at value of indexed/named elements - if t, ok := x.(*ast.KeyValueExpr); ok { - if keyType != nil { - simplifyLiteral(pass, ktyp, keyType, t.Key) - } - x = t.Value - } - simplifyLiteral(pass, typ, eltType, x) - } - }) - return nil, nil -} - -func simplifyLiteral(pass *analysis.Pass, typ reflect.Value, astType, x ast.Expr) { - // if the element is a composite literal and its literal type - // matches the outer literal's element type exactly, the inner - // literal type may be omitted - if inner, ok := x.(*ast.CompositeLit); ok && match(typ, reflect.ValueOf(inner.Type)) { - var b bytes.Buffer - printer.Fprint(&b, pass.Fset, inner.Type) - createDiagnostic(pass, inner.Type.Pos(), inner.Type.End(), b.String()) - } - // if the outer literal's element type is a pointer type *T - // and the element is & of a composite literal of type T, - // the inner &T may be omitted. - if ptr, ok := astType.(*ast.StarExpr); ok { - if addr, ok := x.(*ast.UnaryExpr); ok && addr.Op == token.AND { - if inner, ok := addr.X.(*ast.CompositeLit); ok { - if match(reflect.ValueOf(ptr.X), reflect.ValueOf(inner.Type)) { - var b bytes.Buffer - printer.Fprint(&b, pass.Fset, inner.Type) - // Account for the & by subtracting 1 from typ.Pos(). - createDiagnostic(pass, inner.Type.Pos()-1, inner.Type.End(), "&"+b.String()) - } - } - } - } -} - -func createDiagnostic(pass *analysis.Pass, start, end token.Pos, typ string) { - pass.Report(analysis.Diagnostic{ - Pos: start, - End: end, - Message: "redundant type from array, slice, or map composite literal", - SuggestedFixes: []analysis.SuggestedFix{{ - Message: fmt.Sprintf("Remove '%s'", typ), - TextEdits: []analysis.TextEdit{{ - Pos: start, - End: end, - NewText: []byte{}, - }}, - }}, - }) -} - -// match reports whether pattern matches val, -// recording wildcard submatches in m. -// If m == nil, match checks whether pattern == val. -// from https://github.com/golang/go/blob/26154f31ad6c801d8bad5ef58df1e9263c6beec7/src/cmd/gofmt/rewrite.go#L160 -func match(pattern, val reflect.Value) bool { - // Otherwise, pattern and val must match recursively. - if !pattern.IsValid() || !val.IsValid() { - return !pattern.IsValid() && !val.IsValid() - } - if pattern.Type() != val.Type() { - return false - } - - // Special cases. - switch pattern.Type() { - case identType: - // For identifiers, only the names need to match - // (and none of the other *ast.Object information). - // This is a common case, handle it all here instead - // of recursing down any further via reflection. - p := pattern.Interface().(*ast.Ident) - v := val.Interface().(*ast.Ident) - return p == nil && v == nil || p != nil && v != nil && p.Name == v.Name - case objectPtrType, positionType: - // object pointers and token positions always match - return true - case callExprType: - // For calls, the Ellipsis fields (token.Position) must - // match since that is how f(x) and f(x...) are different. - // Check them here but fall through for the remaining fields. - p := pattern.Interface().(*ast.CallExpr) - v := val.Interface().(*ast.CallExpr) - if p.Ellipsis.IsValid() != v.Ellipsis.IsValid() { - return false - } - } - - p := reflect.Indirect(pattern) - v := reflect.Indirect(val) - if !p.IsValid() || !v.IsValid() { - return !p.IsValid() && !v.IsValid() - } - - switch p.Kind() { - case reflect.Slice: - if p.Len() != v.Len() { - return false - } - for i := 0; i < p.Len(); i++ { - if !match(p.Index(i), v.Index(i)) { - return false - } - } - return true - - case reflect.Struct: - for i := 0; i < p.NumField(); i++ { - if !match(p.Field(i), v.Field(i)) { - return false - } - } - return true - - case reflect.Interface: - return match(p.Elem(), v.Elem()) - } - - // Handle token integers, etc. - return p.Interface() == v.Interface() -} - -// Values/types for special cases. -var ( - identType = reflect.TypeOf((*ast.Ident)(nil)) - objectPtrType = reflect.TypeOf((*ast.Object)(nil)) - positionType = reflect.TypeOf(token.NoPos) - callExprType = reflect.TypeOf((*ast.CallExpr)(nil)) -) diff --git a/internal/golangorgx/gopls/analysis/simplifyrange/doc.go b/internal/golangorgx/gopls/analysis/simplifyrange/doc.go deleted file mode 100644 index f55ed56b35b..00000000000 --- a/internal/golangorgx/gopls/analysis/simplifyrange/doc.go +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package simplifyrange defines an Analyzer that simplifies range statements. -// https://golang.org/cmd/gofmt/#hdr-The_simplify_command -// https://github.com/golang/go/blob/master/src/cmd/gofmt/simplify.go -// -// # Analyzer simplifyrange -// -// simplifyrange: check for range statement simplifications -// -// A range of the form: -// -// for x, _ = range v {...} -// -// will be simplified to: -// -// for x = range v {...} -// -// A range of the form: -// -// for _ = range v {...} -// -// will be simplified to: -// -// for range v {...} -// -// This is one of the simplifications that "gofmt -s" applies. -package simplifyrange diff --git a/internal/golangorgx/gopls/analysis/simplifyrange/simplifyrange.go b/internal/golangorgx/gopls/analysis/simplifyrange/simplifyrange.go deleted file mode 100644 index 6fcb2ba60bf..00000000000 --- a/internal/golangorgx/gopls/analysis/simplifyrange/simplifyrange.go +++ /dev/null @@ -1,105 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package simplifyrange - -import ( - "bytes" - _ "embed" - "go/ast" - "go/printer" - "go/token" - - "cuelang.org/go/internal/golangorgx/tools/analysisinternal" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -//go:embed doc.go -var doc string - -var Analyzer = &analysis.Analyzer{ - Name: "simplifyrange", - Doc: analysisinternal.MustExtractDoc(doc, "simplifyrange"), - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, - URL: "https://pkg.go.dev/cuelang.org/go/internal/golangorgx/gopls/analysis/simplifyrange", -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - nodeFilter := []ast.Node{ - (*ast.RangeStmt)(nil), - } - inspect.Preorder(nodeFilter, func(n ast.Node) { - var copy *ast.RangeStmt - if stmt, ok := n.(*ast.RangeStmt); ok { - x := *stmt - copy = &x - } - if copy == nil { - return - } - end := newlineIndex(pass.Fset, copy) - - // Range statements of the form: for i, _ := range x {} - var old ast.Expr - if isBlank(copy.Value) { - old = copy.Value - copy.Value = nil - } - // Range statements of the form: for _ := range x {} - if isBlank(copy.Key) && copy.Value == nil { - old = copy.Key - copy.Key = nil - } - // Return early if neither if condition is met. - if old == nil { - return - } - pass.Report(analysis.Diagnostic{ - Pos: old.Pos(), - End: old.End(), - Message: "simplify range expression", - SuggestedFixes: suggestedFixes(pass.Fset, copy, end), - }) - }) - return nil, nil -} - -func suggestedFixes(fset *token.FileSet, rng *ast.RangeStmt, end token.Pos) []analysis.SuggestedFix { - var b bytes.Buffer - printer.Fprint(&b, fset, rng) - stmt := b.Bytes() - index := bytes.Index(stmt, []byte("\n")) - // If there is a new line character, then don't replace the body. - if index != -1 { - stmt = stmt[:index] - } - return []analysis.SuggestedFix{{ - Message: "Remove empty value", - TextEdits: []analysis.TextEdit{{ - Pos: rng.Pos(), - End: end, - NewText: stmt[:index], - }}, - }} -} - -func newlineIndex(fset *token.FileSet, rng *ast.RangeStmt) token.Pos { - var b bytes.Buffer - printer.Fprint(&b, fset, rng) - contents := b.Bytes() - index := bytes.Index(contents, []byte("\n")) - if index == -1 { - return rng.End() - } - return rng.Pos() + token.Pos(index) -} - -func isBlank(x ast.Expr) bool { - ident, ok := x.(*ast.Ident) - return ok && ident.Name == "_" -} diff --git a/internal/golangorgx/gopls/analysis/simplifyslice/doc.go b/internal/golangorgx/gopls/analysis/simplifyslice/doc.go deleted file mode 100644 index 2fb4c461054..00000000000 --- a/internal/golangorgx/gopls/analysis/simplifyslice/doc.go +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package simplifyslice defines an Analyzer that simplifies slice statements. -// https://github.com/golang/go/blob/master/src/cmd/gofmt/simplify.go -// https://golang.org/cmd/gofmt/#hdr-The_simplify_command -// -// # Analyzer simplifyslice -// -// simplifyslice: check for slice simplifications -// -// A slice expression of the form: -// -// s[a:len(s)] -// -// will be simplified to: -// -// s[a:] -// -// This is one of the simplifications that "gofmt -s" applies. -package simplifyslice diff --git a/internal/golangorgx/gopls/analysis/simplifyslice/simplifyslice.go b/internal/golangorgx/gopls/analysis/simplifyslice/simplifyslice.go deleted file mode 100644 index 7f9fb18483c..00000000000 --- a/internal/golangorgx/gopls/analysis/simplifyslice/simplifyslice.go +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package simplifyslice - -import ( - "bytes" - _ "embed" - "fmt" - "go/ast" - "go/printer" - - "cuelang.org/go/internal/golangorgx/tools/analysisinternal" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -//go:embed doc.go -var doc string - -var Analyzer = &analysis.Analyzer{ - Name: "simplifyslice", - Doc: analysisinternal.MustExtractDoc(doc, "simplifyslice"), - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, - URL: "https://pkg.go.dev/cuelang.org/go/internal/golangorgx/gopls/analysis/simplifyslice", -} - -// Note: We could also simplify slice expressions of the form s[0:b] to s[:b] -// but we leave them as is since sometimes we want to be very explicit -// about the lower bound. -// An example where the 0 helps: -// x, y, z := b[0:2], b[2:4], b[4:6] -// An example where it does not: -// x, y := b[:n], b[n:] - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - nodeFilter := []ast.Node{ - (*ast.SliceExpr)(nil), - } - inspect.Preorder(nodeFilter, func(n ast.Node) { - expr := n.(*ast.SliceExpr) - // - 3-index slices always require the 2nd and 3rd index - if expr.Max != nil { - return - } - s, ok := expr.X.(*ast.Ident) - // the array/slice object is a single, resolved identifier - if !ok || s.Obj == nil { - return - } - call, ok := expr.High.(*ast.CallExpr) - // the high expression is a function call with a single argument - if !ok || len(call.Args) != 1 || call.Ellipsis.IsValid() { - return - } - fun, ok := call.Fun.(*ast.Ident) - // the function called is "len" and it is not locally defined; and - // because we don't have dot imports, it must be the predefined len() - if !ok || fun.Name != "len" || fun.Obj != nil { - return - } - arg, ok := call.Args[0].(*ast.Ident) - // the len argument is the array/slice object - if !ok || arg.Obj != s.Obj { - return - } - var b bytes.Buffer - printer.Fprint(&b, pass.Fset, expr.High) - pass.Report(analysis.Diagnostic{ - Pos: expr.High.Pos(), - End: expr.High.End(), - Message: fmt.Sprintf("unneeded: %s", b.String()), - SuggestedFixes: []analysis.SuggestedFix{{ - Message: fmt.Sprintf("Remove '%s'", b.String()), - TextEdits: []analysis.TextEdit{{ - Pos: expr.High.Pos(), - End: expr.High.End(), - NewText: []byte{}, - }}, - }}, - }) - }) - return nil, nil -} diff --git a/internal/golangorgx/gopls/analysis/stubmethods/doc.go b/internal/golangorgx/gopls/analysis/stubmethods/doc.go deleted file mode 100644 index e1383cfc7e7..00000000000 --- a/internal/golangorgx/gopls/analysis/stubmethods/doc.go +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package stubmethods defines a code action for missing interface methods. -// -// # Analyzer stubmethods -// -// stubmethods: detect missing methods and fix with stub implementations -// -// This analyzer detects type-checking errors due to missing methods -// in assignments from concrete types to interface types, and offers -// a suggested fix that will create a set of stub methods so that -// the concrete type satisfies the interface. -// -// For example, this function will not compile because the value -// NegativeErr{} does not implement the "error" interface: -// -// func sqrt(x float64) (float64, error) { -// if x < 0 { -// return 0, NegativeErr{} // error: missing method -// } -// ... -// } -// -// type NegativeErr struct{} -// -// This analyzer will suggest a fix to declare this method: -// -// // Error implements error.Error. -// func (NegativeErr) Error() string { -// panic("unimplemented") -// } -// -// (At least, it appears to behave that way, but technically it -// doesn't use the SuggestedFix mechanism and the stub is created by -// logic in gopls's golang.stub function.) -package stubmethods diff --git a/internal/golangorgx/gopls/analysis/stubmethods/stubmethods.go b/internal/golangorgx/gopls/analysis/stubmethods/stubmethods.go deleted file mode 100644 index ed54da4877e..00000000000 --- a/internal/golangorgx/gopls/analysis/stubmethods/stubmethods.go +++ /dev/null @@ -1,402 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package stubmethods - -import ( - "bytes" - _ "embed" - "fmt" - "go/ast" - "go/format" - "go/token" - "go/types" - "strings" - - "cuelang.org/go/internal/golangorgx/gopls/util/typesutil" - "cuelang.org/go/internal/golangorgx/tools/analysisinternal" - "cuelang.org/go/internal/golangorgx/tools/typesinternal" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ast/astutil" -) - -//go:embed doc.go -var doc string - -var Analyzer = &analysis.Analyzer{ - Name: "stubmethods", - Doc: analysisinternal.MustExtractDoc(doc, "stubmethods"), - Run: run, - RunDespiteErrors: true, - URL: "https://pkg.go.dev/cuelang.org/go/internal/golangorgx/gopls/analysis/stubmethods", -} - -// TODO(rfindley): remove this thin wrapper around the stubmethods refactoring, -// and eliminate the stubmethods analyzer. -// -// Previous iterations used the analysis framework for computing refactorings, -// which proved inefficient. -func run(pass *analysis.Pass) (interface{}, error) { - for _, err := range pass.TypeErrors { - var file *ast.File - for _, f := range pass.Files { - if f.Pos() <= err.Pos && err.Pos < f.End() { - file = f - break - } - } - // Get the end position of the error. - _, _, end, ok := typesinternal.ReadGo116ErrorData(err) - if !ok { - var buf bytes.Buffer - if err := format.Node(&buf, pass.Fset, file); err != nil { - continue - } - end = analysisinternal.TypeErrorEndPos(pass.Fset, buf.Bytes(), err.Pos) - } - if diag, ok := DiagnosticForError(pass.Fset, file, err.Pos, end, err.Msg, pass.TypesInfo); ok { - pass.Report(diag) - } - } - - return nil, nil -} - -// MatchesMessage reports whether msg matches the error message sought after by -// the stubmethods fix. -func MatchesMessage(msg string) bool { - return strings.Contains(msg, "missing method") || strings.HasPrefix(msg, "cannot convert") || strings.Contains(msg, "not implement") -} - -// DiagnosticForError computes a diagnostic suggesting to implement an -// interface to fix the type checking error defined by (start, end, msg). -// -// If no such fix is possible, the second result is false. -func DiagnosticForError(fset *token.FileSet, file *ast.File, start, end token.Pos, msg string, info *types.Info) (analysis.Diagnostic, bool) { - if !MatchesMessage(msg) { - return analysis.Diagnostic{}, false - } - - path, _ := astutil.PathEnclosingInterval(file, start, end) - si := GetStubInfo(fset, info, path, start) - if si == nil { - return analysis.Diagnostic{}, false - } - qf := typesutil.FileQualifier(file, si.Concrete.Obj().Pkg(), info) - iface := types.TypeString(si.Interface.Type(), qf) - return analysis.Diagnostic{ - Pos: start, - End: end, - Message: msg, - Category: FixCategory, - SuggestedFixes: []analysis.SuggestedFix{{ - Message: fmt.Sprintf("Declare missing methods of %s", iface), - // No TextEdits => computed later by gopls. - }}, - }, true -} - -const FixCategory = "stubmethods" // recognized by gopls ApplyFix - -// StubInfo represents a concrete type -// that wants to stub out an interface type -type StubInfo struct { - // Interface is the interface that the client wants to implement. - // When the interface is defined, the underlying object will be a TypeName. - // Note that we keep track of types.Object instead of types.Type in order - // to keep a reference to the declaring object's package and the ast file - // in the case where the concrete type file requires a new import that happens to be renamed - // in the interface file. - // TODO(marwan-at-work): implement interface literals. - Fset *token.FileSet // the FileSet used to type-check the types below - Interface *types.TypeName - Concrete *types.Named - Pointer bool -} - -// GetStubInfo determines whether the "missing method error" -// can be used to deduced what the concrete and interface types are. -// -// TODO(adonovan): this function (and its following 5 helpers) tries -// to deduce a pair of (concrete, interface) types that are related by -// an assignment, either explicitly or through a return statement or -// function call. This is essentially what the refactor/satisfy does, -// more generally. Refactor to share logic, after auditing 'satisfy' -// for safety on ill-typed code. -func GetStubInfo(fset *token.FileSet, info *types.Info, path []ast.Node, pos token.Pos) *StubInfo { - for _, n := range path { - switch n := n.(type) { - case *ast.ValueSpec: - return fromValueSpec(fset, info, n, pos) - case *ast.ReturnStmt: - // An error here may not indicate a real error the user should know about, but it may. - // Therefore, it would be best to log it out for debugging/reporting purposes instead of ignoring - // it. However, event.Log takes a context which is not passed via the analysis package. - // TODO(marwan-at-work): properly log this error. - si, _ := fromReturnStmt(fset, info, pos, path, n) - return si - case *ast.AssignStmt: - return fromAssignStmt(fset, info, n, pos) - case *ast.CallExpr: - // Note that some call expressions don't carry the interface type - // because they don't point to a function or method declaration elsewhere. - // For eaxmple, "var Interface = (*Concrete)(nil)". In that case, continue - // this loop to encounter other possibilities such as *ast.ValueSpec or others. - si := fromCallExpr(fset, info, pos, n) - if si != nil { - return si - } - } - } - return nil -} - -// fromCallExpr tries to find an *ast.CallExpr's function declaration and -// analyzes a function call's signature against the passed in parameter to deduce -// the concrete and interface types. -func fromCallExpr(fset *token.FileSet, info *types.Info, pos token.Pos, call *ast.CallExpr) *StubInfo { - // Find argument containing pos. - argIdx := -1 - var arg ast.Expr - for i, callArg := range call.Args { - if callArg.Pos() <= pos && pos <= callArg.End() { - argIdx = i - arg = callArg - break - } - } - if arg == nil { - return nil - } - - concType, pointer := concreteType(arg, info) - if concType == nil || concType.Obj().Pkg() == nil { - return nil - } - tv, ok := info.Types[call.Fun] - if !ok { - return nil - } - sig, ok := tv.Type.(*types.Signature) - if !ok { - return nil - } - var paramType types.Type - if sig.Variadic() && argIdx >= sig.Params().Len()-1 { - v := sig.Params().At(sig.Params().Len() - 1) - if s, _ := v.Type().(*types.Slice); s != nil { - paramType = s.Elem() - } - } else if argIdx < sig.Params().Len() { - paramType = sig.Params().At(argIdx).Type() - } - if paramType == nil { - return nil // A type error prevents us from determining the param type. - } - iface := ifaceObjFromType(paramType) - if iface == nil { - return nil - } - return &StubInfo{ - Fset: fset, - Concrete: concType, - Pointer: pointer, - Interface: iface, - } -} - -// fromReturnStmt analyzes a "return" statement to extract -// a concrete type that is trying to be returned as an interface type. -// -// For example, func() io.Writer { return myType{} } -// would return StubInfo with the interface being io.Writer and the concrete type being myType{}. -func fromReturnStmt(fset *token.FileSet, info *types.Info, pos token.Pos, path []ast.Node, ret *ast.ReturnStmt) (*StubInfo, error) { - // Find return operand containing pos. - returnIdx := -1 - for i, r := range ret.Results { - if r.Pos() <= pos && pos <= r.End() { - returnIdx = i - break - } - } - if returnIdx == -1 { - return nil, fmt.Errorf("pos %d not within return statement bounds: [%d-%d]", pos, ret.Pos(), ret.End()) - } - - concType, pointer := concreteType(ret.Results[returnIdx], info) - if concType == nil || concType.Obj().Pkg() == nil { - return nil, nil - } - funcType := enclosingFunction(path, info) - if funcType == nil { - return nil, fmt.Errorf("could not find the enclosing function of the return statement") - } - if len(funcType.Results.List) != len(ret.Results) { - return nil, fmt.Errorf("%d-operand return statement in %d-result function", - len(ret.Results), - len(funcType.Results.List)) - } - iface := ifaceType(funcType.Results.List[returnIdx].Type, info) - if iface == nil { - return nil, nil - } - return &StubInfo{ - Fset: fset, - Concrete: concType, - Pointer: pointer, - Interface: iface, - }, nil -} - -// fromValueSpec returns *StubInfo from a variable declaration such as -// var x io.Writer = &T{} -func fromValueSpec(fset *token.FileSet, info *types.Info, spec *ast.ValueSpec, pos token.Pos) *StubInfo { - // Find RHS element containing pos. - var rhs ast.Expr - for _, r := range spec.Values { - if r.Pos() <= pos && pos <= r.End() { - rhs = r - break - } - } - if rhs == nil { - return nil // e.g. pos was on the LHS (#64545) - } - - // Possible implicit/explicit conversion to interface type? - ifaceNode := spec.Type // var _ myInterface = ... - if call, ok := rhs.(*ast.CallExpr); ok && ifaceNode == nil && len(call.Args) == 1 { - // var _ = myInterface(v) - ifaceNode = call.Fun - rhs = call.Args[0] - } - concType, pointer := concreteType(rhs, info) - if concType == nil || concType.Obj().Pkg() == nil { - return nil - } - ifaceObj := ifaceType(ifaceNode, info) - if ifaceObj == nil { - return nil - } - return &StubInfo{ - Fset: fset, - Concrete: concType, - Interface: ifaceObj, - Pointer: pointer, - } -} - -// fromAssignStmt returns *StubInfo from a variable assignment such as -// var x io.Writer -// x = &T{} -func fromAssignStmt(fset *token.FileSet, info *types.Info, assign *ast.AssignStmt, pos token.Pos) *StubInfo { - // The interface conversion error in an assignment is against the RHS: - // - // var x io.Writer - // x = &T{} // error: missing method - // ^^^^ - // - // Find RHS element containing pos. - var lhs, rhs ast.Expr - for i, r := range assign.Rhs { - if r.Pos() <= pos && pos <= r.End() { - if i >= len(assign.Lhs) { - // This should never happen as we would get a - // "cannot assign N values to M variables" - // before we get an interface conversion error. - // But be defensive. - return nil - } - lhs = assign.Lhs[i] - rhs = r - break - } - } - if lhs == nil || rhs == nil { - return nil - } - - ifaceObj := ifaceType(lhs, info) - if ifaceObj == nil { - return nil - } - concType, pointer := concreteType(rhs, info) - if concType == nil || concType.Obj().Pkg() == nil { - return nil - } - return &StubInfo{ - Fset: fset, - Concrete: concType, - Interface: ifaceObj, - Pointer: pointer, - } -} - -// ifaceType returns the named interface type to which e refers, if any. -func ifaceType(e ast.Expr, info *types.Info) *types.TypeName { - tv, ok := info.Types[e] - if !ok { - return nil - } - return ifaceObjFromType(tv.Type) -} - -func ifaceObjFromType(t types.Type) *types.TypeName { - named, ok := t.(*types.Named) - if !ok { - return nil - } - if !types.IsInterface(named) { - return nil - } - // Interfaces defined in the "builtin" package return nil a Pkg(). - // But they are still real interfaces that we need to make a special case for. - // Therefore, protect gopls from panicking if a new interface type was added in the future. - if named.Obj().Pkg() == nil && named.Obj().Name() != "error" { - return nil - } - return named.Obj() -} - -// concreteType tries to extract the *types.Named that defines -// the concrete type given the ast.Expr where the "missing method" -// or "conversion" errors happened. If the concrete type is something -// that cannot have methods defined on it (such as basic types), this -// method will return a nil *types.Named. The second return parameter -// is a boolean that indicates whether the concreteType was defined as a -// pointer or value. -func concreteType(e ast.Expr, info *types.Info) (*types.Named, bool) { - tv, ok := info.Types[e] - if !ok { - return nil, false - } - typ := tv.Type - ptr, isPtr := typ.(*types.Pointer) - if isPtr { - typ = ptr.Elem() - } - named, ok := typ.(*types.Named) - if !ok { - return nil, false - } - return named, isPtr -} - -// enclosingFunction returns the signature and type of the function -// enclosing the given position. -func enclosingFunction(path []ast.Node, info *types.Info) *ast.FuncType { - for _, node := range path { - switch t := node.(type) { - case *ast.FuncDecl: - if _, ok := info.Defs[t.Name]; ok { - return t.Type - } - case *ast.FuncLit: - if _, ok := info.Types[t]; ok { - return t.Type - } - } - } - return nil -} diff --git a/internal/golangorgx/gopls/analysis/undeclaredname/doc.go b/internal/golangorgx/gopls/analysis/undeclaredname/doc.go deleted file mode 100644 index 02989c9d75b..00000000000 --- a/internal/golangorgx/gopls/analysis/undeclaredname/doc.go +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package undeclaredname defines an Analyzer that applies suggested fixes -// to errors of the type "undeclared name: %s". -// -// # Analyzer undeclaredname -// -// undeclaredname: suggested fixes for "undeclared name: <>" -// -// This checker provides suggested fixes for type errors of the -// type "undeclared name: <>". It will either insert a new statement, -// such as: -// -// <> := -// -// or a new function declaration, such as: -// -// func <>(inferred parameters) { -// panic("implement me!") -// } -package undeclaredname diff --git a/internal/golangorgx/gopls/analysis/undeclaredname/undeclared.go b/internal/golangorgx/gopls/analysis/undeclaredname/undeclared.go deleted file mode 100644 index e0c09cccd6e..00000000000 --- a/internal/golangorgx/gopls/analysis/undeclaredname/undeclared.go +++ /dev/null @@ -1,359 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package undeclaredname - -import ( - "bytes" - _ "embed" - "fmt" - "go/ast" - "go/format" - "go/token" - "go/types" - "strings" - "unicode" - - "cuelang.org/go/internal/golangorgx/gopls/util/safetoken" - "cuelang.org/go/internal/golangorgx/tools/analysisinternal" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ast/astutil" -) - -//go:embed doc.go -var doc string - -var Analyzer = &analysis.Analyzer{ - Name: "undeclaredname", - Doc: analysisinternal.MustExtractDoc(doc, "undeclaredname"), - Requires: []*analysis.Analyzer{}, - Run: run, - RunDespiteErrors: true, - URL: "https://pkg.go.dev/cuelang.org/go/internal/golangorgx/gopls/analysis/undeclaredname", -} - -// The prefix for this error message changed in Go 1.20. -var undeclaredNamePrefixes = []string{"undeclared name: ", "undefined: "} - -func run(pass *analysis.Pass) (interface{}, error) { - for _, err := range pass.TypeErrors { - runForError(pass, err) - } - return nil, nil -} - -func runForError(pass *analysis.Pass, err types.Error) { - // Extract symbol name from error. - var name string - for _, prefix := range undeclaredNamePrefixes { - if !strings.HasPrefix(err.Msg, prefix) { - continue - } - name = strings.TrimPrefix(err.Msg, prefix) - } - if name == "" { - return - } - - // Find file enclosing error. - var file *ast.File - for _, f := range pass.Files { - if f.Pos() <= err.Pos && err.Pos < f.End() { - file = f - break - } - } - if file == nil { - return - } - - // Find path to identifier in the error. - path, _ := astutil.PathEnclosingInterval(file, err.Pos, err.Pos) - if len(path) < 2 { - return - } - ident, ok := path[0].(*ast.Ident) - if !ok || ident.Name != name { - return - } - - // Skip selector expressions because it might be too complex - // to try and provide a suggested fix for fields and methods. - if _, ok := path[1].(*ast.SelectorExpr); ok { - return - } - - // Undeclared quick fixes only work in function bodies. - inFunc := false - for i := range path { - if _, inFunc = path[i].(*ast.FuncDecl); inFunc { - if i == 0 { - return - } - if _, isBody := path[i-1].(*ast.BlockStmt); !isBody { - return - } - break - } - } - if !inFunc { - return - } - - // Offer a fix. - noun := "variable" - if isCallPosition(path) { - noun = "function" - } - pass.Report(analysis.Diagnostic{ - Pos: err.Pos, - End: err.Pos + token.Pos(len(name)), - Message: err.Msg, - Category: FixCategory, - SuggestedFixes: []analysis.SuggestedFix{{ - Message: fmt.Sprintf("Create %s %q", noun, name), - // No TextEdits => computed by a gopls command - }}, - }) -} - -const FixCategory = "undeclaredname" // recognized by gopls ApplyFix - -// SuggestedFix computes the edits for the lazy (no-edits) fix suggested by the analyzer. -func SuggestedFix(fset *token.FileSet, start, end token.Pos, content []byte, file *ast.File, pkg *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { - pos := start // don't use the end - path, _ := astutil.PathEnclosingInterval(file, pos, pos) - if len(path) < 2 { - return nil, nil, fmt.Errorf("no expression found") - } - ident, ok := path[0].(*ast.Ident) - if !ok { - return nil, nil, fmt.Errorf("no identifier found") - } - - // Check for a possible call expression, in which case we should add a - // new function declaration. - if isCallPosition(path) { - return newFunctionDeclaration(path, file, pkg, info, fset) - } - - // Get the place to insert the new statement. - insertBeforeStmt := analysisinternal.StmtToInsertVarBefore(path) - if insertBeforeStmt == nil { - return nil, nil, fmt.Errorf("could not locate insertion point") - } - - insertBefore := safetoken.StartPosition(fset, insertBeforeStmt.Pos()).Offset - - // Get the indent to add on the line after the new statement. - // Since this will have a parse error, we can not use format.Source(). - contentBeforeStmt, indent := content[:insertBefore], "\n" - if nl := bytes.LastIndex(contentBeforeStmt, []byte("\n")); nl != -1 { - indent = string(contentBeforeStmt[nl:]) - } - - // Create the new local variable statement. - newStmt := fmt.Sprintf("%s := %s", ident.Name, indent) - return fset, &analysis.SuggestedFix{ - Message: fmt.Sprintf("Create variable %q", ident.Name), - TextEdits: []analysis.TextEdit{{ - Pos: insertBeforeStmt.Pos(), - End: insertBeforeStmt.Pos(), - NewText: []byte(newStmt), - }}, - }, nil -} - -func newFunctionDeclaration(path []ast.Node, file *ast.File, pkg *types.Package, info *types.Info, fset *token.FileSet) (*token.FileSet, *analysis.SuggestedFix, error) { - if len(path) < 3 { - return nil, nil, fmt.Errorf("unexpected set of enclosing nodes: %v", path) - } - ident, ok := path[0].(*ast.Ident) - if !ok { - return nil, nil, fmt.Errorf("no name for function declaration %v (%T)", path[0], path[0]) - } - call, ok := path[1].(*ast.CallExpr) - if !ok { - return nil, nil, fmt.Errorf("no call expression found %v (%T)", path[1], path[1]) - } - - // Find the enclosing function, so that we can add the new declaration - // below. - var enclosing *ast.FuncDecl - for _, n := range path { - if n, ok := n.(*ast.FuncDecl); ok { - enclosing = n - break - } - } - // TODO(rstambler): Support the situation when there is no enclosing - // function. - if enclosing == nil { - return nil, nil, fmt.Errorf("no enclosing function found: %v", path) - } - - pos := enclosing.End() - - var paramNames []string - var paramTypes []types.Type - // keep track of all param names to later ensure uniqueness - nameCounts := map[string]int{} - for _, arg := range call.Args { - typ := info.TypeOf(arg) - if typ == nil { - return nil, nil, fmt.Errorf("unable to determine type for %s", arg) - } - - switch t := typ.(type) { - // this is the case where another function call returning multiple - // results is used as an argument - case *types.Tuple: - n := t.Len() - for i := 0; i < n; i++ { - name := typeToArgName(t.At(i).Type()) - nameCounts[name]++ - - paramNames = append(paramNames, name) - paramTypes = append(paramTypes, types.Default(t.At(i).Type())) - } - - default: - // does the argument have a name we can reuse? - // only happens in case of a *ast.Ident - var name string - if ident, ok := arg.(*ast.Ident); ok { - name = ident.Name - } - - if name == "" { - name = typeToArgName(typ) - } - - nameCounts[name]++ - - paramNames = append(paramNames, name) - paramTypes = append(paramTypes, types.Default(typ)) - } - } - - for n, c := range nameCounts { - // Any names we saw more than once will need a unique suffix added - // on. Reset the count to 1 to act as the suffix for the first - // occurrence of that name. - if c >= 2 { - nameCounts[n] = 1 - } else { - delete(nameCounts, n) - } - } - - params := &ast.FieldList{} - - for i, name := range paramNames { - if suffix, repeats := nameCounts[name]; repeats { - nameCounts[name]++ - name = fmt.Sprintf("%s%d", name, suffix) - } - - // only worth checking after previous param in the list - if i > 0 { - // if type of parameter at hand is the same as the previous one, - // add it to the previous param list of identifiers so to have: - // (s1, s2 string) - // and not - // (s1 string, s2 string) - if paramTypes[i] == paramTypes[i-1] { - params.List[len(params.List)-1].Names = append(params.List[len(params.List)-1].Names, ast.NewIdent(name)) - continue - } - } - - params.List = append(params.List, &ast.Field{ - Names: []*ast.Ident{ - ast.NewIdent(name), - }, - Type: analysisinternal.TypeExpr(file, pkg, paramTypes[i]), - }) - } - - decl := &ast.FuncDecl{ - Name: ast.NewIdent(ident.Name), - Type: &ast.FuncType{ - Params: params, - // TODO(golang/go#47558): Also handle result - // parameters here based on context of CallExpr. - }, - Body: &ast.BlockStmt{ - List: []ast.Stmt{ - &ast.ExprStmt{ - X: &ast.CallExpr{ - Fun: ast.NewIdent("panic"), - Args: []ast.Expr{ - &ast.BasicLit{ - Value: `"unimplemented"`, - }, - }, - }, - }, - }, - }, - } - - b := bytes.NewBufferString("\n\n") - if err := format.Node(b, fset, decl); err != nil { - return nil, nil, err - } - return fset, &analysis.SuggestedFix{ - Message: fmt.Sprintf("Create function %q", ident.Name), - TextEdits: []analysis.TextEdit{{ - Pos: pos, - End: pos, - NewText: b.Bytes(), - }}, - }, nil -} - -func typeToArgName(ty types.Type) string { - s := types.Default(ty).String() - - switch t := ty.(type) { - case *types.Basic: - // use first letter in type name for basic types - return s[0:1] - case *types.Slice: - // use element type to decide var name for slices - return typeToArgName(t.Elem()) - case *types.Array: - // use element type to decide var name for arrays - return typeToArgName(t.Elem()) - case *types.Chan: - return "ch" - } - - s = strings.TrimFunc(s, func(r rune) bool { - return !unicode.IsLetter(r) - }) - - if s == "error" { - return "err" - } - - // remove package (if present) - // and make first letter lowercase - a := []rune(s[strings.LastIndexByte(s, '.')+1:]) - a[0] = unicode.ToLower(a[0]) - return string(a) -} - -// isCallPosition reports whether the path denotes the subtree in call position, f(). -func isCallPosition(path []ast.Node) bool { - return len(path) > 1 && - is[*ast.CallExpr](path[1]) && - path[1].(*ast.CallExpr).Fun == path[0] -} - -func is[T any](x any) bool { - _, ok := x.(T) - return ok -} diff --git a/internal/golangorgx/gopls/analysis/unusedparams/doc.go b/internal/golangorgx/gopls/analysis/unusedparams/doc.go deleted file mode 100644 index 07e43c0d084..00000000000 --- a/internal/golangorgx/gopls/analysis/unusedparams/doc.go +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package unusedparams defines an analyzer that checks for unused -// parameters of functions. -// -// # Analyzer unusedparams -// -// unusedparams: check for unused parameters of functions -// -// The unusedparams analyzer checks functions to see if there are -// any parameters that are not being used. -// -// To ensure soundness, it ignores: -// - "address-taken" functions, that is, functions that are used as -// a value rather than being called directly; their signatures may -// be required to conform to a func type. -// - exported functions or methods, since they may be address-taken -// in another package. -// - unexported methods whose name matches an interface method -// declared in the same package, since the method's signature -// may be required to conform to the interface type. -// - functions with empty bodies, or containing just a call to panic. -// - parameters that are unnamed, or named "_", the blank identifier. -// -// The analyzer suggests a fix of replacing the parameter name by "_", -// but in such cases a deeper fix can be obtained by invoking the -// "Refactor: remove unused parameter" code action, which will -// eliminate the parameter entirely, along with all corresponding -// arguments at call sites, while taking care to preserve any side -// effects in the argument expressions; see -// https://github.com/golang/tools/releases/tag/gopls%2Fv0.14. -package unusedparams diff --git a/internal/golangorgx/gopls/analysis/unusedparams/unusedparams.go b/internal/golangorgx/gopls/analysis/unusedparams/unusedparams.go deleted file mode 100644 index 85422014a71..00000000000 --- a/internal/golangorgx/gopls/analysis/unusedparams/unusedparams.go +++ /dev/null @@ -1,308 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package unusedparams - -import ( - _ "embed" - "fmt" - "go/ast" - "go/types" - - "cuelang.org/go/internal/golangorgx/gopls/util/slices" - "cuelang.org/go/internal/golangorgx/tools/analysisinternal" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -//go:embed doc.go -var doc string - -var Analyzer = &analysis.Analyzer{ - Name: "unusedparams", - Doc: analysisinternal.MustExtractDoc(doc, "unusedparams"), - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, - URL: "https://pkg.go.dev/cuelang.org/go/internal/golangorgx/gopls/analysis/unusedparams", -} - -const FixCategory = "unusedparam" // recognized by gopls ApplyFix - -func run(pass *analysis.Pass) (any, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - // First find all "address-taken" functions. - // We must conservatively assume that their parameters - // are all required to conform to some signature. - // - // A named function is address-taken if it is somewhere - // used not in call position: - // - // f(...) // not address-taken - // use(f) // address-taken - // - // A literal function is address-taken if it is not - // immediately bound to a variable, or if that variable is - // used not in call position: - // - // f := func() { ... }; f() used only in call position - // var f func(); f = func() { ...f()... }; f() ditto - // use(func() { ... }) address-taken - // - - // Note: this algorithm relies on the assumption that the - // analyzer is called only for the "widest" package for a - // given file: that is, p_test in preference to p, if both - // exist. Analyzing only package p may produce diagnostics - // that would be falsified based on declarations in p_test.go - // files. The gopls analysis driver does this, but most - // drivers to not, so running this command in, say, - // unitchecker or multichecker may produce incorrect results. - - // Gather global information: - // - uses of functions not in call position - // - unexported interface methods - // - all referenced variables - - usesOutsideCall := make(map[types.Object][]*ast.Ident) - unexportedIMethodNames := make(map[string]bool) - { - callPosn := make(map[*ast.Ident]bool) // all idents f appearing in f() calls - filter := []ast.Node{ - (*ast.CallExpr)(nil), - (*ast.InterfaceType)(nil), - } - inspect.Preorder(filter, func(n ast.Node) { - switch n := n.(type) { - case *ast.CallExpr: - // Strip off any generic instantiation. - fun := n.Fun - switch fun_ := fun.(type) { - case *ast.IndexExpr: - fun = fun_.X // f[T]() (funcs[i]() is rejected below) - case *ast.IndexListExpr: - fun = fun_.X // f[K, V]() - } - - // Find object: - // record non-exported function, method, or func-typed var. - var id *ast.Ident - switch fun := fun.(type) { - case *ast.Ident: - id = fun - case *ast.SelectorExpr: - id = fun.Sel - } - if id != nil && !id.IsExported() { - switch pass.TypesInfo.Uses[id].(type) { - case *types.Func, *types.Var: - callPosn[id] = true - } - } - - case *ast.InterfaceType: - // Record the set of names of unexported interface methods. - // (It would be more precise to record signatures but - // generics makes it tricky, and this conservative - // heuristic is close enough.) - t := pass.TypesInfo.TypeOf(n).(*types.Interface) - for i := 0; i < t.NumExplicitMethods(); i++ { - m := t.ExplicitMethod(i) - if !m.Exported() && m.Name() != "_" { - unexportedIMethodNames[m.Name()] = true - } - } - } - }) - - for id, obj := range pass.TypesInfo.Uses { - if !callPosn[id] { - // This includes "f = func() {...}", which we deal with below. - usesOutsideCall[obj] = append(usesOutsideCall[obj], id) - } - } - } - - // Find all vars (notably parameters) that are used. - usedVars := make(map[*types.Var]bool) - for _, obj := range pass.TypesInfo.Uses { - if v, ok := obj.(*types.Var); ok { - if v.IsField() { - continue // no point gathering these - } - usedVars[v] = true - } - } - - // Check each non-address-taken function's parameters are all used. - filter := []ast.Node{ - (*ast.FuncDecl)(nil), - (*ast.FuncLit)(nil), - } - inspect.WithStack(filter, func(n ast.Node, push bool, stack []ast.Node) bool { - // (We always return true so that we visit nested FuncLits.) - - if !push { - return true - } - - var ( - fn types.Object // function symbol (*Func, possibly *Var for a FuncLit) - ftype *ast.FuncType - body *ast.BlockStmt - ) - switch n := n.(type) { - case *ast.FuncDecl: - // We can't analyze non-Go functions. - if n.Body == nil { - return true - } - - // Ignore exported functions and methods: we - // must assume they may be address-taken in - // another package. - if n.Name.IsExported() { - return true - } - - // Ignore methods that match the name of any - // interface method declared in this package, - // as the method's signature may need to conform - // to the interface. - if n.Recv != nil && unexportedIMethodNames[n.Name.Name] { - return true - } - - fn = pass.TypesInfo.Defs[n.Name].(*types.Func) - ftype, body = n.Type, n.Body - - case *ast.FuncLit: - // Find the symbol for the variable (if any) - // to which the FuncLit is bound. - // (We don't bother to allow ParenExprs.) - switch parent := stack[len(stack)-2].(type) { - case *ast.AssignStmt: - // f = func() {...} - // f := func() {...} - for i, rhs := range parent.Rhs { - if rhs == n { - if id, ok := parent.Lhs[i].(*ast.Ident); ok { - fn = pass.TypesInfo.ObjectOf(id) - - // Edge case: f = func() {...} - // should not count as a use. - if pass.TypesInfo.Uses[id] != nil { - usesOutsideCall[fn] = slices.Remove(usesOutsideCall[fn], id) - } - - if fn == nil && id.Name == "_" { - // Edge case: _ = func() {...} - // has no var. Fake one. - fn = types.NewVar(id.Pos(), pass.Pkg, id.Name, pass.TypesInfo.TypeOf(n)) - } - } - break - } - } - - case *ast.ValueSpec: - // var f = func() { ... } - // (unless f is an exported package-level var) - for i, val := range parent.Values { - if val == n { - v := pass.TypesInfo.Defs[parent.Names[i]] - if !(v.Parent() == pass.Pkg.Scope() && v.Exported()) { - fn = v - } - break - } - } - } - - ftype, body = n.Type, n.Body - } - - // Ignore address-taken functions and methods: unused - // parameters may be needed to conform to a func type. - if fn == nil || len(usesOutsideCall[fn]) > 0 { - return true - } - - // If there are no parameters, there are no unused parameters. - if ftype.Params.NumFields() == 0 { - return true - } - - // To reduce false positives, ignore functions with an - // empty or panic body. - // - // We choose not to ignore functions whose body is a - // single return statement (as earlier versions did) - // func f() { return } - // func f() { return g(...) } - // as we suspect that was just heuristic to reduce - // false positives in the earlier unsound algorithm. - switch len(body.List) { - case 0: - // Empty body. Although the parameter is - // unnecessary, it's pretty obvious to the - // reader that that's the case, so we allow it. - return true // func f() {} - case 1: - if stmt, ok := body.List[0].(*ast.ExprStmt); ok { - // We allow a panic body, as it is often a - // placeholder for a future implementation: - // func f() { panic(...) } - if call, ok := stmt.X.(*ast.CallExpr); ok { - if fun, ok := call.Fun.(*ast.Ident); ok && fun.Name == "panic" { - return true - } - } - } - } - - // Report each unused parameter. - for _, field := range ftype.Params.List { - for _, id := range field.Names { - if id.Name == "_" { - continue - } - param := pass.TypesInfo.Defs[id].(*types.Var) - if !usedVars[param] { - start, end := field.Pos(), field.End() - if len(field.Names) > 1 { - start, end = id.Pos(), id.End() - } - // This diagnostic carries both an edit-based fix to - // rename the unused parameter, and a command-based fix - // to remove it (see golang.RemoveUnusedParameter). - pass.Report(analysis.Diagnostic{ - Pos: start, - End: end, - Message: fmt.Sprintf("unused parameter: %s", id.Name), - Category: FixCategory, - SuggestedFixes: []analysis.SuggestedFix{ - { - Message: `Rename parameter to "_"`, - TextEdits: []analysis.TextEdit{{ - Pos: id.Pos(), - End: id.End(), - NewText: []byte("_"), - }}, - }, - { - Message: fmt.Sprintf("Remove unused parameter %q", id.Name), - // No TextEdits => computed by gopls command - }, - }, - }) - } - } - } - - return true - }) - return nil, nil -} diff --git a/internal/golangorgx/gopls/analysis/unusedvariable/unusedvariable.go b/internal/golangorgx/gopls/analysis/unusedvariable/unusedvariable.go deleted file mode 100644 index dbc22d505b1..00000000000 --- a/internal/golangorgx/gopls/analysis/unusedvariable/unusedvariable.go +++ /dev/null @@ -1,306 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package unusedvariable defines an analyzer that checks for unused variables. -package unusedvariable - -import ( - "bytes" - "fmt" - "go/ast" - "go/format" - "go/token" - "go/types" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ast/astutil" -) - -const Doc = `check for unused variables and suggest fixes` - -var Analyzer = &analysis.Analyzer{ - Name: "unusedvariable", - Doc: Doc, - Requires: []*analysis.Analyzer{}, - Run: run, - RunDespiteErrors: true, // an unusedvariable diagnostic is a compile error - URL: "https://pkg.go.dev/cuelang.org/go/internal/golangorgx/gopls/analysis/unusedvariable", -} - -// The suffix for this error message changed in Go 1.20. -var unusedVariableSuffixes = []string{" declared and not used", " declared but not used"} - -func run(pass *analysis.Pass) (interface{}, error) { - for _, typeErr := range pass.TypeErrors { - for _, suffix := range unusedVariableSuffixes { - if strings.HasSuffix(typeErr.Msg, suffix) { - varName := strings.TrimSuffix(typeErr.Msg, suffix) - err := runForError(pass, typeErr, varName) - if err != nil { - return nil, err - } - } - } - } - - return nil, nil -} - -func runForError(pass *analysis.Pass, err types.Error, name string) error { - var file *ast.File - for _, f := range pass.Files { - if f.Pos() <= err.Pos && err.Pos < f.End() { - file = f - break - } - } - if file == nil { - return nil - } - - path, _ := astutil.PathEnclosingInterval(file, err.Pos, err.Pos) - if len(path) < 2 { - return nil - } - - ident, ok := path[0].(*ast.Ident) - if !ok || ident.Name != name { - return nil - } - - diag := analysis.Diagnostic{ - Pos: ident.Pos(), - End: ident.End(), - Message: err.Msg, - } - - for i := range path { - switch stmt := path[i].(type) { - case *ast.ValueSpec: - // Find GenDecl to which offending ValueSpec belongs. - if decl, ok := path[i+1].(*ast.GenDecl); ok { - fixes := removeVariableFromSpec(pass, path, stmt, decl, ident) - // fixes may be nil - if len(fixes) > 0 { - diag.SuggestedFixes = fixes - pass.Report(diag) - } - } - - case *ast.AssignStmt: - if stmt.Tok != token.DEFINE { - continue - } - - containsIdent := false - for _, expr := range stmt.Lhs { - if expr == ident { - containsIdent = true - } - } - if !containsIdent { - continue - } - - fixes := removeVariableFromAssignment(path, stmt, ident) - // fixes may be nil - if len(fixes) > 0 { - diag.SuggestedFixes = fixes - pass.Report(diag) - } - } - } - - return nil -} - -func removeVariableFromSpec(pass *analysis.Pass, path []ast.Node, stmt *ast.ValueSpec, decl *ast.GenDecl, ident *ast.Ident) []analysis.SuggestedFix { - newDecl := new(ast.GenDecl) - *newDecl = *decl - newDecl.Specs = nil - - for _, spec := range decl.Specs { - if spec != stmt { - newDecl.Specs = append(newDecl.Specs, spec) - continue - } - - newSpec := new(ast.ValueSpec) - *newSpec = *stmt - newSpec.Names = nil - - for _, n := range stmt.Names { - if n != ident { - newSpec.Names = append(newSpec.Names, n) - } - } - - if len(newSpec.Names) > 0 { - newDecl.Specs = append(newDecl.Specs, newSpec) - } - } - - // decl.End() does not include any comments, so if a comment is present we - // need to account for it when we delete the statement - end := decl.End() - if stmt.Comment != nil && stmt.Comment.End() > end { - end = stmt.Comment.End() - } - - // There are no other specs left in the declaration, the whole statement can - // be deleted - if len(newDecl.Specs) == 0 { - // Find parent DeclStmt and delete it - for _, node := range path { - if declStmt, ok := node.(*ast.DeclStmt); ok { - edits := deleteStmtFromBlock(path, declStmt) - if len(edits) == 0 { - return nil // can this happen? - } - return []analysis.SuggestedFix{ - { - Message: suggestedFixMessage(ident.Name), - TextEdits: edits, - }, - } - } - } - } - - var b bytes.Buffer - if err := format.Node(&b, pass.Fset, newDecl); err != nil { - return nil - } - - return []analysis.SuggestedFix{ - { - Message: suggestedFixMessage(ident.Name), - TextEdits: []analysis.TextEdit{ - { - Pos: decl.Pos(), - // Avoid adding a new empty line - End: end + 1, - NewText: b.Bytes(), - }, - }, - }, - } -} - -func removeVariableFromAssignment(path []ast.Node, stmt *ast.AssignStmt, ident *ast.Ident) []analysis.SuggestedFix { - // The only variable in the assignment is unused - if len(stmt.Lhs) == 1 { - // If LHS has only one expression to be valid it has to have 1 expression - // on RHS - // - // RHS may have side effects, preserve RHS - if exprMayHaveSideEffects(stmt.Rhs[0]) { - // Delete until RHS - return []analysis.SuggestedFix{ - { - Message: suggestedFixMessage(ident.Name), - TextEdits: []analysis.TextEdit{ - { - Pos: ident.Pos(), - End: stmt.Rhs[0].Pos(), - }, - }, - }, - } - } - - // RHS does not have any side effects, delete the whole statement - edits := deleteStmtFromBlock(path, stmt) - if len(edits) == 0 { - return nil // can this happen? - } - return []analysis.SuggestedFix{ - { - Message: suggestedFixMessage(ident.Name), - TextEdits: edits, - }, - } - } - - // Otherwise replace ident with `_` - return []analysis.SuggestedFix{ - { - Message: suggestedFixMessage(ident.Name), - TextEdits: []analysis.TextEdit{ - { - Pos: ident.Pos(), - End: ident.End(), - NewText: []byte("_"), - }, - }, - }, - } -} - -func suggestedFixMessage(name string) string { - return fmt.Sprintf("Remove variable %s", name) -} - -func deleteStmtFromBlock(path []ast.Node, stmt ast.Stmt) []analysis.TextEdit { - // Find innermost enclosing BlockStmt. - var block *ast.BlockStmt - for i := range path { - if blockStmt, ok := path[i].(*ast.BlockStmt); ok { - block = blockStmt - break - } - } - - nodeIndex := -1 - for i, blockStmt := range block.List { - if blockStmt == stmt { - nodeIndex = i - break - } - } - - // The statement we need to delete was not found in BlockStmt - if nodeIndex == -1 { - return nil - } - - // Delete until the end of the block unless there is another statement after - // the one we are trying to delete - end := block.Rbrace - if nodeIndex < len(block.List)-1 { - end = block.List[nodeIndex+1].Pos() - } - - return []analysis.TextEdit{ - { - Pos: stmt.Pos(), - End: end, - }, - } -} - -// exprMayHaveSideEffects reports whether the expression may have side effects -// (because it contains a function call or channel receive). We disregard -// runtime panics as well written programs should not encounter them. -func exprMayHaveSideEffects(expr ast.Expr) bool { - var mayHaveSideEffects bool - ast.Inspect(expr, func(n ast.Node) bool { - switch n := n.(type) { - case *ast.CallExpr: // possible function call - mayHaveSideEffects = true - return false - case *ast.UnaryExpr: - if n.Op == token.ARROW { // channel receive - mayHaveSideEffects = true - return false - } - case *ast.FuncLit: - return false // evaluating what's inside a FuncLit has no effect - } - return true - }) - - return mayHaveSideEffects -} diff --git a/internal/golangorgx/gopls/analysis/useany/useany.go b/internal/golangorgx/gopls/analysis/useany/useany.go deleted file mode 100644 index 5eb44d8ec82..00000000000 --- a/internal/golangorgx/gopls/analysis/useany/useany.go +++ /dev/null @@ -1,98 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package useany defines an Analyzer that checks for usage of interface{} in -// constraints, rather than the predeclared any. -package useany - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -const Doc = `check for constraints that could be simplified to "any"` - -var Analyzer = &analysis.Analyzer{ - Name: "useany", - Doc: Doc, - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, - URL: "https://pkg.go.dev/cuelang.org/go/internal/golangorgx/gopls/analysis/useany", -} - -func run(pass *analysis.Pass) (interface{}, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - universeAny := types.Universe.Lookup("any") - - nodeFilter := []ast.Node{ - (*ast.TypeSpec)(nil), - (*ast.FuncType)(nil), - } - - inspect.Preorder(nodeFilter, func(node ast.Node) { - var tparams *ast.FieldList - switch node := node.(type) { - case *ast.TypeSpec: - tparams = node.TypeParams - case *ast.FuncType: - tparams = node.TypeParams - default: - panic(fmt.Sprintf("unexpected node type %T", node)) - } - if tparams.NumFields() == 0 { - return - } - - for _, field := range tparams.List { - typ := pass.TypesInfo.Types[field.Type].Type - if typ == nil { - continue // something is wrong, but not our concern - } - iface, ok := typ.Underlying().(*types.Interface) - if !ok { - continue // invalid constraint - } - - // If the constraint is the empty interface, offer a fix to use 'any' - // instead. - if iface.Empty() { - id, _ := field.Type.(*ast.Ident) - if id != nil && pass.TypesInfo.Uses[id] == universeAny { - continue - } - - diag := analysis.Diagnostic{ - Pos: field.Type.Pos(), - End: field.Type.End(), - Message: `could use "any" for this empty interface`, - } - - // Only suggest a fix to 'any' if we actually resolve the predeclared - // any in this scope. - if scope := pass.TypesInfo.Scopes[node]; scope != nil { - if _, any := scope.LookupParent("any", token.NoPos); any == universeAny { - diag.SuggestedFixes = []analysis.SuggestedFix{{ - Message: `use "any"`, - TextEdits: []analysis.TextEdit{{ - Pos: field.Type.Pos(), - End: field.Type.End(), - NewText: []byte("any"), - }}, - }} - } - } - - pass.Report(diag) - } - } - }) - return nil, nil -} diff --git a/internal/golangorgx/gopls/cache/errors.go b/internal/golangorgx/gopls/cache/errors.go index bbdd7d28c4d..dea04bfb847 100644 --- a/internal/golangorgx/gopls/cache/errors.go +++ b/internal/golangorgx/gopls/cache/errors.go @@ -14,7 +14,6 @@ import ( "go/parser" "go/scanner" "go/token" - "log" "path/filepath" "regexp" "strconv" @@ -295,65 +294,6 @@ func toSourceDiagnostic(srcAnalyzer *settings.Analyzer, gobDiag *gobDiagnostic) Tags: srcAnalyzer.Tag, } - // We cross the set of fixes (whether edit- or command-based) - // with the set of kinds, as a single fix may represent more - // than one kind of action (e.g. refactor, quickfix, fixall), - // each corresponding to a distinct client UI element - // or operation. - kinds := srcAnalyzer.ActionKinds - if len(kinds) == 0 { - kinds = []protocol.CodeActionKind{protocol.QuickFix} - } - - var fixes []SuggestedFix - for _, fix := range gobDiag.SuggestedFixes { - if len(fix.TextEdits) > 0 { - // Accumulate edit-based fixes supplied by the diagnostic itself. - edits := make(map[protocol.DocumentURI][]protocol.TextEdit) - for _, e := range fix.TextEdits { - uri := e.Location.URI - edits[uri] = append(edits[uri], protocol.TextEdit{ - Range: e.Location.Range, - NewText: string(e.NewText), - }) - } - for _, kind := range kinds { - fixes = append(fixes, SuggestedFix{ - Title: fix.Message, - Edits: edits, - ActionKind: kind, - }) - } - - } else { - // Accumulate command-based fixes, whose edits - // are not provided by the analyzer but are computed on demand - // by logic "adjacent to" the analyzer. - // - // The analysis.Diagnostic.Category is used as the fix name. - cmd, err := command.NewApplyFixCommand(fix.Message, command.ApplyFixArgs{ - Fix: diag.Code, - URI: gobDiag.Location.URI, - Range: gobDiag.Location.Range, - }) - if err != nil { - // JSON marshalling of these argument values cannot fail. - log.Fatalf("internal error in NewApplyFixCommand: %v", err) - } - for _, kind := range kinds { - fixes = append(fixes, SuggestedFixFromCommand(cmd, kind)) - } - - // Ensure that the analyzer specifies a category for all its no-edit fixes. - // This is asserted by analysistest.RunWithSuggestedFixes, but there - // may be gaps in test coverage. - if diag.Code == "" || diag.Code == "default" { - bug.Reportf("missing Diagnostic.Code: %#v", *diag) - } - } - } - diag.SuggestedFixes = fixes - // If the fixes only delete code, assume that the diagnostic is reporting dead code. if onlyDeletions(diag.SuggestedFixes) { diag.Tags = append(diag.Tags, protocol.Unnecessary) diff --git a/internal/golangorgx/gopls/golang/add_import.go b/internal/golangorgx/gopls/golang/add_import.go deleted file mode 100644 index 673fda14e1e..00000000000 --- a/internal/golangorgx/gopls/golang/add_import.go +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -import ( - "context" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/tools/imports" -) - -// AddImport adds a single import statement to the given file -func AddImport(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, importPath string) ([]protocol.TextEdit, error) { - pgf, err := snapshot.ParseGo(ctx, fh, ParseFull) - if err != nil { - return nil, err - } - return ComputeOneImportFixEdits(snapshot, pgf, &imports.ImportFix{ - StmtInfo: imports.ImportInfo{ - ImportPath: importPath, - }, - FixType: imports.AddImport, - }) -} diff --git a/internal/golangorgx/gopls/golang/call_hierarchy.go b/internal/golangorgx/gopls/golang/call_hierarchy.go deleted file mode 100644 index 65e6bc33203..00000000000 --- a/internal/golangorgx/gopls/golang/call_hierarchy.go +++ /dev/null @@ -1,312 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -import ( - "context" - "errors" - "fmt" - "go/ast" - "go/token" - "go/types" - "path/filepath" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/util/bug" - "cuelang.org/go/internal/golangorgx/gopls/util/safetoken" - "cuelang.org/go/internal/golangorgx/tools/event" - "cuelang.org/go/internal/golangorgx/tools/event/tag" - "golang.org/x/tools/go/ast/astutil" -) - -// PrepareCallHierarchy returns an array of CallHierarchyItem for a file and the position within the file. -func PrepareCallHierarchy(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp protocol.Position) ([]protocol.CallHierarchyItem, error) { - ctx, done := event.Start(ctx, "golang.PrepareCallHierarchy") - defer done() - - pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) - if err != nil { - return nil, err - } - pos, err := pgf.PositionPos(pp) - if err != nil { - return nil, err - } - - _, obj, _ := referencedObject(pkg, pgf, pos) - if obj == nil { - return nil, nil - } - - if _, ok := obj.Type().Underlying().(*types.Signature); !ok { - return nil, nil - } - - declLoc, err := mapPosition(ctx, pkg.FileSet(), snapshot, obj.Pos(), adjustedObjEnd(obj)) - if err != nil { - return nil, err - } - rng := declLoc.Range - - callHierarchyItem := protocol.CallHierarchyItem{ - Name: obj.Name(), - Kind: protocol.Function, - Tags: []protocol.SymbolTag{}, - Detail: fmt.Sprintf("%s • %s", obj.Pkg().Path(), filepath.Base(declLoc.URI.Path())), - URI: declLoc.URI, - Range: rng, - SelectionRange: rng, - } - return []protocol.CallHierarchyItem{callHierarchyItem}, nil -} - -// IncomingCalls returns an array of CallHierarchyIncomingCall for a file and the position within the file. -func IncomingCalls(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pos protocol.Position) ([]protocol.CallHierarchyIncomingCall, error) { - ctx, done := event.Start(ctx, "golang.IncomingCalls") - defer done() - - refs, err := references(ctx, snapshot, fh, pos, false) - if err != nil { - if errors.Is(err, ErrNoIdentFound) || errors.Is(err, errNoObjectFound) { - return nil, nil - } - return nil, err - } - - // Group references by their enclosing function declaration. - incomingCalls := make(map[protocol.Location]*protocol.CallHierarchyIncomingCall) - for _, ref := range refs { - callItem, err := enclosingNodeCallItem(ctx, snapshot, ref.pkgPath, ref.location) - if err != nil { - event.Error(ctx, "error getting enclosing node", err, tag.Method.Of(string(ref.pkgPath))) - continue - } - loc := protocol.Location{ - URI: callItem.URI, - Range: callItem.Range, - } - call, ok := incomingCalls[loc] - if !ok { - call = &protocol.CallHierarchyIncomingCall{From: callItem} - incomingCalls[loc] = call - } - call.FromRanges = append(call.FromRanges, ref.location.Range) - } - - // Flatten the map of pointers into a slice of values. - incomingCallItems := make([]protocol.CallHierarchyIncomingCall, 0, len(incomingCalls)) - for _, callItem := range incomingCalls { - incomingCallItems = append(incomingCallItems, *callItem) - } - return incomingCallItems, nil -} - -// enclosingNodeCallItem creates a CallHierarchyItem representing the function call at loc. -func enclosingNodeCallItem(ctx context.Context, snapshot *cache.Snapshot, pkgPath PackagePath, loc protocol.Location) (protocol.CallHierarchyItem, error) { - // Parse the file containing the reference. - fh, err := snapshot.ReadFile(ctx, loc.URI) - if err != nil { - return protocol.CallHierarchyItem{}, err - } - // TODO(adonovan): opt: before parsing, trim the bodies of functions - // that don't contain the reference, using either a scanner-based - // implementation such as https://go.dev/play/p/KUrObH1YkX8 - // (~31% speedup), or a byte-oriented implementation (2x speedup). - pgf, err := snapshot.ParseGo(ctx, fh, ParseFull) - if err != nil { - return protocol.CallHierarchyItem{}, err - } - start, end, err := pgf.RangePos(loc.Range) - if err != nil { - return protocol.CallHierarchyItem{}, err - } - - // Find the enclosing function, if any, and the number of func literals in between. - var funcDecl *ast.FuncDecl - var funcLit *ast.FuncLit // innermost function literal - var litCount int - path, _ := astutil.PathEnclosingInterval(pgf.File, start, end) -outer: - for _, node := range path { - switch n := node.(type) { - case *ast.FuncDecl: - funcDecl = n - break outer - case *ast.FuncLit: - litCount++ - if litCount > 1 { - continue - } - funcLit = n - } - } - - nameIdent := path[len(path)-1].(*ast.File).Name - kind := protocol.Package - if funcDecl != nil { - nameIdent = funcDecl.Name - kind = protocol.Function - } - - nameStart, nameEnd := nameIdent.Pos(), nameIdent.End() - if funcLit != nil { - nameStart, nameEnd = funcLit.Type.Func, funcLit.Type.Params.Pos() - kind = protocol.Function - } - rng, err := pgf.PosRange(nameStart, nameEnd) - if err != nil { - return protocol.CallHierarchyItem{}, err - } - - name := nameIdent.Name - for i := 0; i < litCount; i++ { - name += ".func()" - } - - return protocol.CallHierarchyItem{ - Name: name, - Kind: kind, - Tags: []protocol.SymbolTag{}, - Detail: fmt.Sprintf("%s • %s", pkgPath, filepath.Base(fh.URI().Path())), - URI: loc.URI, - Range: rng, - SelectionRange: rng, - }, nil -} - -// OutgoingCalls returns an array of CallHierarchyOutgoingCall for a file and the position within the file. -func OutgoingCalls(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp protocol.Position) ([]protocol.CallHierarchyOutgoingCall, error) { - ctx, done := event.Start(ctx, "golang.OutgoingCalls") - defer done() - - pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) - if err != nil { - return nil, err - } - pos, err := pgf.PositionPos(pp) - if err != nil { - return nil, err - } - - _, obj, _ := referencedObject(pkg, pgf, pos) - if obj == nil { - return nil, nil - } - - if _, ok := obj.Type().Underlying().(*types.Signature); !ok { - return nil, nil - } - - // Skip builtins. - if obj.Pkg() == nil { - return nil, nil - } - - if !obj.Pos().IsValid() { - return nil, bug.Errorf("internal error: object %s.%s missing position", obj.Pkg().Path(), obj.Name()) - } - - declFile := pkg.FileSet().File(obj.Pos()) - if declFile == nil { - return nil, bug.Errorf("file not found for %d", obj.Pos()) - } - - uri := protocol.URIFromPath(declFile.Name()) - offset, err := safetoken.Offset(declFile, obj.Pos()) - if err != nil { - return nil, err - } - - // Use TypecheckFull as we want to inspect the body of the function declaration. - declPkg, declPGF, err := NarrowestPackageForFile(ctx, snapshot, uri) - if err != nil { - return nil, err - } - - declPos, err := safetoken.Pos(declPGF.Tok, offset) - if err != nil { - return nil, err - } - - declNode, _, _ := findDeclInfo([]*ast.File{declPGF.File}, declPos) - if declNode == nil { - // TODO(rfindley): why don't we return an error here, or even bug.Errorf? - return nil, nil - // return nil, bug.Errorf("failed to find declaration for object %s.%s", obj.Pkg().Path(), obj.Name()) - } - - type callRange struct { - start, end token.Pos - } - callRanges := []callRange{} - ast.Inspect(declNode, func(n ast.Node) bool { - if call, ok := n.(*ast.CallExpr); ok { - var start, end token.Pos - switch n := call.Fun.(type) { - case *ast.SelectorExpr: - start, end = n.Sel.NamePos, call.Lparen - case *ast.Ident: - start, end = n.NamePos, call.Lparen - case *ast.FuncLit: - // while we don't add the function literal as an 'outgoing' call - // we still want to traverse into it - return true - default: - // ignore any other kind of call expressions - // for ex: direct function literal calls since that's not an 'outgoing' call - return false - } - callRanges = append(callRanges, callRange{start: start, end: end}) - } - return true - }) - - outgoingCalls := map[token.Pos]*protocol.CallHierarchyOutgoingCall{} - for _, callRange := range callRanges { - _, obj, _ := referencedObject(declPkg, declPGF, callRange.start) - if obj == nil { - continue - } - - // ignore calls to builtin functions - if obj.Pkg() == nil { - continue - } - - outgoingCall, ok := outgoingCalls[obj.Pos()] - if !ok { - loc, err := mapPosition(ctx, declPkg.FileSet(), snapshot, obj.Pos(), obj.Pos()+token.Pos(len(obj.Name()))) - if err != nil { - return nil, err - } - outgoingCall = &protocol.CallHierarchyOutgoingCall{ - To: protocol.CallHierarchyItem{ - Name: obj.Name(), - Kind: protocol.Function, - Tags: []protocol.SymbolTag{}, - Detail: fmt.Sprintf("%s • %s", obj.Pkg().Path(), filepath.Base(loc.URI.Path())), - URI: loc.URI, - Range: loc.Range, - SelectionRange: loc.Range, - }, - } - outgoingCalls[obj.Pos()] = outgoingCall - } - - rng, err := declPGF.PosRange(callRange.start, callRange.end) - if err != nil { - return nil, err - } - outgoingCall.FromRanges = append(outgoingCall.FromRanges, rng) - } - - outgoingCallItems := make([]protocol.CallHierarchyOutgoingCall, 0, len(outgoingCalls)) - for _, callItem := range outgoingCalls { - outgoingCallItems = append(outgoingCallItems, *callItem) - } - return outgoingCallItems, nil -} diff --git a/internal/golangorgx/gopls/golang/change_quote.go b/internal/golangorgx/gopls/golang/change_quote.go deleted file mode 100644 index 7aa5c004381..00000000000 --- a/internal/golangorgx/gopls/golang/change_quote.go +++ /dev/null @@ -1,84 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -import ( - "go/ast" - "go/token" - "strconv" - "strings" - - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/util/bug" - "cuelang.org/go/internal/golangorgx/gopls/util/safetoken" - "cuelang.org/go/internal/golangorgx/tools/diff" - "golang.org/x/tools/go/ast/astutil" -) - -// ConvertStringLiteral reports whether we can convert between raw and interpreted -// string literals in the [start, end), along with a CodeAction containing the edits. -// -// Only the following conditions are true, the action in result is valid -// - [start, end) is enclosed by a string literal -// - if the string is interpreted string, need check whether the convert is allowed -func ConvertStringLiteral(pgf *ParsedGoFile, fh file.Handle, rng protocol.Range) (protocol.CodeAction, bool) { - startPos, endPos, err := pgf.RangePos(rng) - if err != nil { - return protocol.CodeAction{}, false // e.g. invalid range - } - path, _ := astutil.PathEnclosingInterval(pgf.File, startPos, endPos) - lit, ok := path[0].(*ast.BasicLit) - if !ok || lit.Kind != token.STRING { - return protocol.CodeAction{}, false - } - - str, err := strconv.Unquote(lit.Value) - if err != nil { - return protocol.CodeAction{}, false - } - - interpreted := lit.Value[0] == '"' - // Not all "..." strings can be represented as `...` strings. - if interpreted && !strconv.CanBackquote(strings.ReplaceAll(str, "\n", "")) { - return protocol.CodeAction{}, false - } - - var ( - title string - newText string - ) - if interpreted { - title = "Convert to raw string literal" - newText = "`" + str + "`" - } else { - title = "Convert to interpreted string literal" - newText = strconv.Quote(str) - } - - start, end, err := safetoken.Offsets(pgf.Tok, lit.Pos(), lit.End()) - if err != nil { - bug.Reportf("failed to get string literal offset by token.Pos:%v", err) - return protocol.CodeAction{}, false - } - edits := []diff.Edit{{ - Start: start, - End: end, - New: newText, - }} - pedits, err := protocol.EditsFromDiffEdits(pgf.Mapper, edits) - if err != nil { - bug.Reportf("failed to convert diff.Edit to protocol.TextEdit:%v", err) - return protocol.CodeAction{}, false - } - - return protocol.CodeAction{ - Title: title, - Kind: protocol.RefactorRewrite, - Edit: &protocol.WorkspaceEdit{ - DocumentChanges: documentChanges(fh, pedits), - }, - }, true -} diff --git a/internal/golangorgx/gopls/golang/change_signature.go b/internal/golangorgx/gopls/golang/change_signature.go deleted file mode 100644 index 42f090508c2..00000000000 --- a/internal/golangorgx/gopls/golang/change_signature.go +++ /dev/null @@ -1,574 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -import ( - "bytes" - "context" - "fmt" - "go/ast" - "go/format" - "go/parser" - "go/token" - "go/types" - "regexp" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/cache/parsego" - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/util/bug" - "cuelang.org/go/internal/golangorgx/gopls/util/safetoken" - internalastutil "cuelang.org/go/internal/golangorgx/tools/astutil" - "cuelang.org/go/internal/golangorgx/tools/diff" - "cuelang.org/go/internal/golangorgx/tools/refactor/inline" - "cuelang.org/go/internal/golangorgx/tools/tokeninternal" - "cuelang.org/go/internal/golangorgx/tools/typesinternal" - "cuelang.org/go/internal/golangorgx/tools/versions" - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/imports" -) - -// RemoveUnusedParameter computes a refactoring to remove the parameter -// indicated by the given range, which must be contained within an unused -// parameter name or field. -// -// This operation is a work in progress. Remaining TODO: -// - Handle function assignment correctly. -// - Improve the extra newlines in output. -// - Stream type checking via ForEachPackage. -// - Avoid unnecessary additional type checking. -func RemoveUnusedParameter(ctx context.Context, fh file.Handle, rng protocol.Range, snapshot *cache.Snapshot) ([]protocol.DocumentChanges, error) { - pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) - if err != nil { - return nil, err - } - if perrors, terrors := pkg.GetParseErrors(), pkg.GetTypeErrors(); len(perrors) > 0 || len(terrors) > 0 { - var sample string - if len(perrors) > 0 { - sample = perrors[0].Error() - } else { - sample = terrors[0].Error() - } - return nil, fmt.Errorf("can't change signatures for packages with parse or type errors: (e.g. %s)", sample) - } - - info, err := FindParam(pgf, rng) - if err != nil { - return nil, err // e.g. invalid range - } - if info.Decl.Recv != nil { - return nil, fmt.Errorf("can't change signature of methods (yet)") - } - if info.Field == nil { - return nil, fmt.Errorf("failed to find field") - } - - // Create the new declaration, which is a copy of the original decl with the - // unnecessary parameter removed. - newDecl := internalastutil.CloneNode(info.Decl) - if info.Name != nil { - names := remove(newDecl.Type.Params.List[info.FieldIndex].Names, info.NameIndex) - newDecl.Type.Params.List[info.FieldIndex].Names = names - } - if len(newDecl.Type.Params.List[info.FieldIndex].Names) == 0 { - // Unnamed, or final name was removed: in either case, remove the field. - newDecl.Type.Params.List = remove(newDecl.Type.Params.List, info.FieldIndex) - } - - // Compute inputs into building a wrapper function around the modified - // signature. - var ( - params = internalastutil.CloneNode(info.Decl.Type.Params) // "_" names will be modified - args []ast.Expr // arguments to delegate - variadic = false // whether the signature is variadic - ) - { - allNames := make(map[string]bool) // for renaming blanks - for _, fld := range params.List { - for _, n := range fld.Names { - if n.Name != "_" { - allNames[n.Name] = true - } - } - } - blanks := 0 - for i, fld := range params.List { - for j, n := range fld.Names { - if i == info.FieldIndex && j == info.NameIndex { - continue - } - if n.Name == "_" { - // Create names for blank (_) parameters so the delegating wrapper - // can refer to them. - for { - newName := fmt.Sprintf("blank%d", blanks) - blanks++ - if !allNames[newName] { - n.Name = newName - break - } - } - } - args = append(args, &ast.Ident{Name: n.Name}) - if i == len(params.List)-1 { - _, variadic = fld.Type.(*ast.Ellipsis) - } - } - } - } - - // Rewrite all referring calls. - newContent, err := rewriteCalls(ctx, signatureRewrite{ - snapshot: snapshot, - pkg: pkg, - pgf: pgf, - origDecl: info.Decl, - newDecl: newDecl, - params: params, - callArgs: args, - variadic: variadic, - }) - if err != nil { - return nil, err - } - - // Finally, rewrite the original declaration. We do this after inlining all - // calls, as there may be calls in the same file as the declaration. But none - // of the inlining should have changed the location of the original - // declaration. - { - idx := findDecl(pgf.File, info.Decl) - if idx < 0 { - return nil, bug.Errorf("didn't find original decl") - } - - src, ok := newContent[pgf.URI] - if !ok { - src = pgf.Src - } - fset := tokeninternal.FileSetFor(pgf.Tok) - src, err := rewriteSignature(fset, idx, src, newDecl) - if err != nil { - return nil, err - } - newContent[pgf.URI] = src - } - - // Translate the resulting state into document changes. - var changes []protocol.DocumentChanges - for uri, after := range newContent { - fh, err := snapshot.ReadFile(ctx, uri) - if err != nil { - return nil, err - } - before, err := fh.Content() - if err != nil { - return nil, err - } - edits := diff.Bytes(before, after) - mapper := protocol.NewMapper(uri, before) - pedits, err := protocol.EditsFromDiffEdits(mapper, edits) - if err != nil { - return nil, fmt.Errorf("computing edits for %s: %v", uri, err) - } - changes = append(changes, documentChanges(fh, pedits)...) - } - return changes, nil -} - -// rewriteSignature rewrites the signature of the declIdx'th declaration in src -// to use the signature of newDecl (described by fset). -// -// TODO(rfindley): I think this operation could be generalized, for example by -// using a concept of a 'nodepath' to correlate nodes between two related -// files. -// -// Note that with its current application, rewriteSignature is expected to -// succeed. Separate bug.Errorf calls are used below (rather than one call at -// the callsite) in order to have greater precision. -func rewriteSignature(fset *token.FileSet, declIdx int, src0 []byte, newDecl *ast.FuncDecl) ([]byte, error) { - // Parse the new file0 content, to locate the original params. - file0, err := parser.ParseFile(fset, "", src0, parser.ParseComments|parser.SkipObjectResolution) - if err != nil { - return nil, bug.Errorf("re-parsing declaring file failed: %v", err) - } - decl0, _ := file0.Decls[declIdx].(*ast.FuncDecl) - // Inlining shouldn't have changed the location of any declarations, but do - // a sanity check. - if decl0 == nil || decl0.Name.Name != newDecl.Name.Name { - return nil, bug.Errorf("inlining affected declaration order: found %v, not func %s", decl0, newDecl.Name.Name) - } - opening0, closing0, err := safetoken.Offsets(fset.File(decl0.Pos()), decl0.Type.Params.Opening, decl0.Type.Params.Closing) - if err != nil { - return nil, bug.Errorf("can't find params: %v", err) - } - - // Format the modified signature and apply a textual replacement. This - // minimizes comment disruption. - formattedType := FormatNode(fset, newDecl.Type) - expr, err := parser.ParseExprFrom(fset, "", []byte(formattedType), 0) - if err != nil { - return nil, bug.Errorf("parsing modified signature: %v", err) - } - newType := expr.(*ast.FuncType) - opening1, closing1, err := safetoken.Offsets(fset.File(newType.Pos()), newType.Params.Opening, newType.Params.Closing) - if err != nil { - return nil, bug.Errorf("param offsets: %v", err) - } - newParams := formattedType[opening1 : closing1+1] - - // Splice. - var buf bytes.Buffer - buf.Write(src0[:opening0]) - buf.WriteString(newParams) - buf.Write(src0[closing0+1:]) - newSrc := buf.Bytes() - if len(file0.Imports) > 0 { - formatted, err := imports.Process("output", newSrc, nil) - if err != nil { - return nil, bug.Errorf("imports.Process failed: %v", err) - } - newSrc = formatted - } - return newSrc, nil -} - -// ParamInfo records information about a param identified by a position. -type ParamInfo struct { - Decl *ast.FuncDecl // enclosing func decl (non-nil) - FieldIndex int // index of Field in Decl.Type.Params, or -1 - Field *ast.Field // enclosing field of Decl, or nil if range not among parameters - NameIndex int // index of Name in Field.Names, or nil - Name *ast.Ident // indicated name (either enclosing, or Field.Names[0] if len(Field.Names) == 1) -} - -// FindParam finds the parameter information spanned by the given range. -func FindParam(pgf *ParsedGoFile, rng protocol.Range) (*ParamInfo, error) { - start, end, err := pgf.RangePos(rng) - if err != nil { - return nil, err - } - - path, _ := astutil.PathEnclosingInterval(pgf.File, start, end) - var ( - id *ast.Ident - field *ast.Field - decl *ast.FuncDecl - ) - // Find the outermost enclosing node of each kind, whether or not they match - // the semantics described in the docstring. - for _, n := range path { - switch n := n.(type) { - case *ast.Ident: - id = n - case *ast.Field: - field = n - case *ast.FuncDecl: - decl = n - } - } - // Check the conditions described in the docstring. - if decl == nil { - return nil, fmt.Errorf("range is not within a function declaration") - } - info := &ParamInfo{ - FieldIndex: -1, - NameIndex: -1, - Decl: decl, - } - for fi, f := range decl.Type.Params.List { - if f == field { - info.FieldIndex = fi - info.Field = f - for ni, n := range f.Names { - if n == id { - info.NameIndex = ni - info.Name = n - break - } - } - if info.Name == nil && len(info.Field.Names) == 1 { - info.NameIndex = 0 - info.Name = info.Field.Names[0] - } - break - } - } - return info, nil -} - -// signatureRewrite defines a rewritten function signature. -// -// See rewriteCalls for more details. -type signatureRewrite struct { - snapshot *cache.Snapshot - pkg *cache.Package - pgf *parsego.File - origDecl, newDecl *ast.FuncDecl - params *ast.FieldList - callArgs []ast.Expr - variadic bool -} - -// rewriteCalls returns the document changes required to rewrite the -// signature of origDecl to that of newDecl. -// -// This is a rather complicated factoring of the rewrite operation, but is able -// to describe arbitrary rewrites. Specifically, rewriteCalls creates a -// synthetic copy of pkg, where the original function declaration is changed to -// be a trivial wrapper around the new declaration. params and callArgs are -// used to perform this delegation: params must have the same type as origDecl, -// but may have renamed parameters (such as is required for delegating blank -// parameters). callArgs are the arguments of the delegated call (i.e. using -// params). -// -// For example, consider removing the unused 'b' parameter below, rewriting -// -// func Foo(a, b, c, _ int) int { -// return a+c -// } -// -// To -// -// func Foo(a, c, _ int) int { -// return a+c -// } -// -// In this case, rewriteCalls is parameterized as follows: -// - origDecl is the original declaration -// - newDecl is the new declaration, which is a copy of origDecl less the 'b' -// parameter. -// - params is a new parameter list (a, b, c, blank0 int) to be used for the -// new wrapper. -// - callArgs is the argument list (a, c, blank0), to be used to call the new -// delegate. -// -// rewriting is expressed this way so that rewriteCalls can own the details -// of *how* this rewriting is performed. For example, as of writing it names -// the synthetic delegate G_o_p_l_s_foo, but the caller need not know this. -// -// By passing an entirely new declaration, rewriteCalls may be used for -// signature refactorings that may affect the function body, such as removing -// or adding return values. -func rewriteCalls(ctx context.Context, rw signatureRewrite) (map[protocol.DocumentURI][]byte, error) { - // tag is a unique prefix that is added to the delegated declaration. - // - // It must have a ~0% probability of causing collisions with existing names. - const tag = "G_o_p_l_s_" - - var ( - modifiedSrc []byte - modifiedFile *ast.File - modifiedDecl *ast.FuncDecl - ) - { - delegate := internalastutil.CloneNode(rw.newDecl) // clone before modifying - delegate.Name.Name = tag + delegate.Name.Name - if obj := rw.pkg.GetTypes().Scope().Lookup(delegate.Name.Name); obj != nil { - return nil, fmt.Errorf("synthetic name %q conflicts with an existing declaration", delegate.Name.Name) - } - - wrapper := internalastutil.CloneNode(rw.origDecl) - wrapper.Type.Params = rw.params - call := &ast.CallExpr{ - Fun: &ast.Ident{Name: delegate.Name.Name}, - Args: rw.callArgs, - } - if rw.variadic { - call.Ellipsis = 1 // must not be token.NoPos - } - - var stmt ast.Stmt - if delegate.Type.Results.NumFields() > 0 { - stmt = &ast.ReturnStmt{ - Results: []ast.Expr{call}, - } - } else { - stmt = &ast.ExprStmt{ - X: call, - } - } - wrapper.Body = &ast.BlockStmt{ - List: []ast.Stmt{stmt}, - } - - fset := tokeninternal.FileSetFor(rw.pgf.Tok) - var err error - modifiedSrc, err = replaceFileDecl(rw.pgf, rw.origDecl, delegate) - if err != nil { - return nil, err - } - // TODO(rfindley): we can probably get away with one fewer parse operations - // by returning the modified AST from replaceDecl. Investigate if that is - // accurate. - modifiedSrc = append(modifiedSrc, []byte("\n\n"+FormatNode(fset, wrapper))...) - modifiedFile, err = parser.ParseFile(rw.pkg.FileSet(), rw.pgf.URI.Path(), modifiedSrc, parser.ParseComments|parser.SkipObjectResolution) - if err != nil { - return nil, err - } - modifiedDecl = modifiedFile.Decls[len(modifiedFile.Decls)-1].(*ast.FuncDecl) - } - - // Type check pkg again with the modified file, to compute the synthetic - // callee. - logf := logger(ctx, "change signature", rw.snapshot.Options().VerboseOutput) - pkg2, info, err := reTypeCheck(logf, rw.pkg, map[protocol.DocumentURI]*ast.File{rw.pgf.URI: modifiedFile}, false) - if err != nil { - return nil, err - } - calleeInfo, err := inline.AnalyzeCallee(logf, rw.pkg.FileSet(), pkg2, info, modifiedDecl, modifiedSrc) - if err != nil { - return nil, fmt.Errorf("analyzing callee: %v", err) - } - - post := func(got []byte) []byte { return bytes.ReplaceAll(got, []byte(tag), nil) } - return inlineAllCalls(ctx, logf, rw.snapshot, rw.pkg, rw.pgf, rw.origDecl, calleeInfo, post) -} - -// reTypeCheck re-type checks orig with new file contents defined by fileMask. -// -// It expects that any newly added imports are already present in the -// transitive imports of orig. -// -// If expectErrors is true, reTypeCheck allows errors in the new package. -// TODO(rfindley): perhaps this should be a filter to specify which errors are -// acceptable. -func reTypeCheck(logf func(string, ...any), orig *cache.Package, fileMask map[protocol.DocumentURI]*ast.File, expectErrors bool) (*types.Package, *types.Info, error) { - pkg := types.NewPackage(string(orig.Metadata().PkgPath), string(orig.Metadata().Name)) - info := &types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - Defs: make(map[*ast.Ident]types.Object), - Uses: make(map[*ast.Ident]types.Object), - Implicits: make(map[ast.Node]types.Object), - Selections: make(map[*ast.SelectorExpr]*types.Selection), - Scopes: make(map[ast.Node]*types.Scope), - Instances: make(map[*ast.Ident]types.Instance), - } - versions.InitFileVersions(info) - { - var files []*ast.File - for _, pgf := range orig.CompiledGoFiles() { - if mask, ok := fileMask[pgf.URI]; ok { - files = append(files, mask) - } else { - files = append(files, pgf.File) - } - } - - // Implement a BFS for imports in the transitive package graph. - // - // Note that this only works if any newly added imports are expected to be - // present among transitive imports. In general we cannot assume this to - // be the case, but in the special case of removing a parameter it works - // because any parameter types must be present in export data. - var importer func(importPath string) (*types.Package, error) - { - var ( - importsByPath = make(map[string]*types.Package) // cached imports - toSearch = []*types.Package{orig.GetTypes()} // packages to search - searched = make(map[string]bool) // path -> (false, if present in toSearch; true, if already searched) - ) - importer = func(path string) (*types.Package, error) { - if p, ok := importsByPath[path]; ok { - return p, nil - } - for len(toSearch) > 0 { - pkg := toSearch[0] - toSearch = toSearch[1:] - searched[pkg.Path()] = true - for _, p := range pkg.Imports() { - // TODO(rfindley): this is incorrect: p.Path() is a package path, - // whereas path is an import path. We can fix this by reporting any - // newly added imports from inlining, or by using the ImporterFrom - // interface and package metadata. - // - // TODO(rfindley): can't the inliner also be wrong here? It's - // possible that an import path means different things depending on - // the location. - importsByPath[p.Path()] = p - if _, ok := searched[p.Path()]; !ok { - searched[p.Path()] = false - toSearch = append(toSearch, p) - } - } - if p, ok := importsByPath[path]; ok { - return p, nil - } - } - return nil, fmt.Errorf("missing import") - } - } - cfg := &types.Config{ - Sizes: orig.Metadata().TypesSizes, - Importer: ImporterFunc(importer), - } - - // Copied from cache/check.go. - // TODO(rfindley): factor this out and fix goVersionRx. - // Set Go dialect. - if module := orig.Metadata().Module; module != nil && module.GoVersion != "" { - goVersion := "go" + module.GoVersion - // types.NewChecker panics if GoVersion is invalid. - // An unparsable mod file should probably stop us - // before we get here, but double check just in case. - if goVersionRx.MatchString(goVersion) { - typesinternal.SetGoVersion(cfg, goVersion) - } - } - if expectErrors { - cfg.Error = func(err error) { - logf("re-type checking: expected error: %v", err) - } - } - typesinternal.SetUsesCgo(cfg) - checker := types.NewChecker(cfg, orig.FileSet(), pkg, info) - if err := checker.Files(files); err != nil && !expectErrors { - return nil, nil, fmt.Errorf("type checking rewritten package: %v", err) - } - } - return pkg, info, nil -} - -// TODO(golang/go#63472): this looks wrong with the new Go version syntax. -var goVersionRx = regexp.MustCompile(`^go([1-9][0-9]*)\.(0|[1-9][0-9]*)$`) - -func remove[T any](s []T, i int) []T { - return append(s[:i], s[i+1:]...) -} - -// replaceFileDecl replaces old with new in the file described by pgf. -// -// TODO(rfindley): generalize, and combine with rewriteSignature. -func replaceFileDecl(pgf *ParsedGoFile, old, new ast.Decl) ([]byte, error) { - i := findDecl(pgf.File, old) - if i == -1 { - return nil, bug.Errorf("didn't find old declaration") - } - start, end, err := safetoken.Offsets(pgf.Tok, old.Pos(), old.End()) - if err != nil { - return nil, err - } - var out bytes.Buffer - out.Write(pgf.Src[:start]) - fset := tokeninternal.FileSetFor(pgf.Tok) - if err := format.Node(&out, fset, new); err != nil { - return nil, bug.Errorf("formatting new node: %v", err) - } - out.Write(pgf.Src[end:]) - return out.Bytes(), nil -} - -// findDecl finds the index of decl in file.Decls. -// -// TODO: use slices.Index when it is available. -func findDecl(file *ast.File, decl ast.Decl) int { - for i, d := range file.Decls { - if d == decl { - return i - } - } - return -1 -} diff --git a/internal/golangorgx/gopls/golang/code_lens.go b/internal/golangorgx/gopls/golang/code_lens.go deleted file mode 100644 index b9647f443aa..00000000000 --- a/internal/golangorgx/gopls/golang/code_lens.go +++ /dev/null @@ -1,248 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -import ( - "context" - "go/ast" - "go/token" - "go/types" - "regexp" - "strings" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/protocol/command" -) - -type LensFunc func(context.Context, *cache.Snapshot, file.Handle) ([]protocol.CodeLens, error) - -// LensFuncs returns the supported lensFuncs for Go files. -func LensFuncs() map[command.Command]LensFunc { - return map[command.Command]LensFunc{ - command.Generate: goGenerateCodeLens, - command.Test: runTestCodeLens, - command.RegenerateCgo: regenerateCgoLens, - command.GCDetails: toggleDetailsCodeLens, - } -} - -var ( - testRe = regexp.MustCompile(`^Test([^a-z]|$)`) // TestFoo or Test but not Testable - benchmarkRe = regexp.MustCompile(`^Benchmark([^a-z]|$)`) -) - -func runTestCodeLens(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.CodeLens, error) { - var codeLens []protocol.CodeLens - - pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) - if err != nil { - return nil, err - } - fns, err := TestsAndBenchmarks(pkg, pgf) - if err != nil { - return nil, err - } - puri := fh.URI() - for _, fn := range fns.Tests { - cmd, err := command.NewTestCommand("run test", puri, []string{fn.Name}, nil) - if err != nil { - return nil, err - } - rng := protocol.Range{Start: fn.Rng.Start, End: fn.Rng.Start} - codeLens = append(codeLens, protocol.CodeLens{Range: rng, Command: &cmd}) - } - - for _, fn := range fns.Benchmarks { - cmd, err := command.NewTestCommand("run benchmark", puri, nil, []string{fn.Name}) - if err != nil { - return nil, err - } - rng := protocol.Range{Start: fn.Rng.Start, End: fn.Rng.Start} - codeLens = append(codeLens, protocol.CodeLens{Range: rng, Command: &cmd}) - } - - if len(fns.Benchmarks) > 0 { - pgf, err := snapshot.ParseGo(ctx, fh, ParseFull) - if err != nil { - return nil, err - } - // add a code lens to the top of the file which runs all benchmarks in the file - rng, err := pgf.PosRange(pgf.File.Package, pgf.File.Package) - if err != nil { - return nil, err - } - var benches []string - for _, fn := range fns.Benchmarks { - benches = append(benches, fn.Name) - } - cmd, err := command.NewTestCommand("run file benchmarks", puri, nil, benches) - if err != nil { - return nil, err - } - codeLens = append(codeLens, protocol.CodeLens{Range: rng, Command: &cmd}) - } - return codeLens, nil -} - -type TestFn struct { - Name string - Rng protocol.Range -} - -type TestFns struct { - Tests []TestFn - Benchmarks []TestFn -} - -func TestsAndBenchmarks(pkg *cache.Package, pgf *ParsedGoFile) (TestFns, error) { - var out TestFns - - if !strings.HasSuffix(pgf.URI.Path(), "_test.go") { - return out, nil - } - - for _, d := range pgf.File.Decls { - fn, ok := d.(*ast.FuncDecl) - if !ok { - continue - } - - rng, err := pgf.NodeRange(fn) - if err != nil { - return out, err - } - - if matchTestFunc(fn, pkg, testRe, "T") { - out.Tests = append(out.Tests, TestFn{fn.Name.Name, rng}) - } - - if matchTestFunc(fn, pkg, benchmarkRe, "B") { - out.Benchmarks = append(out.Benchmarks, TestFn{fn.Name.Name, rng}) - } - } - - return out, nil -} - -func matchTestFunc(fn *ast.FuncDecl, pkg *cache.Package, nameRe *regexp.Regexp, paramID string) bool { - // Make sure that the function name matches a test function. - if !nameRe.MatchString(fn.Name.Name) { - return false - } - info := pkg.GetTypesInfo() - if info == nil { - return false - } - obj := info.ObjectOf(fn.Name) - if obj == nil { - return false - } - sig, ok := obj.Type().(*types.Signature) - if !ok { - return false - } - // Test functions should have only one parameter. - if sig.Params().Len() != 1 { - return false - } - - // Check the type of the only parameter - paramTyp, ok := sig.Params().At(0).Type().(*types.Pointer) - if !ok { - return false - } - named, ok := paramTyp.Elem().(*types.Named) - if !ok { - return false - } - namedObj := named.Obj() - if namedObj.Pkg().Path() != "testing" { - return false - } - return namedObj.Id() == paramID -} - -func goGenerateCodeLens(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.CodeLens, error) { - pgf, err := snapshot.ParseGo(ctx, fh, ParseFull) - if err != nil { - return nil, err - } - const ggDirective = "//go:generate" - for _, c := range pgf.File.Comments { - for _, l := range c.List { - if !strings.HasPrefix(l.Text, ggDirective) { - continue - } - rng, err := pgf.PosRange(l.Pos(), l.Pos()+token.Pos(len(ggDirective))) - if err != nil { - return nil, err - } - dir := fh.URI().Dir() - nonRecursiveCmd, err := command.NewGenerateCommand("run go generate", command.GenerateArgs{Dir: dir, Recursive: false}) - if err != nil { - return nil, err - } - recursiveCmd, err := command.NewGenerateCommand("run go generate ./...", command.GenerateArgs{Dir: dir, Recursive: true}) - if err != nil { - return nil, err - } - return []protocol.CodeLens{ - {Range: rng, Command: &recursiveCmd}, - {Range: rng, Command: &nonRecursiveCmd}, - }, nil - - } - } - return nil, nil -} - -func regenerateCgoLens(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.CodeLens, error) { - pgf, err := snapshot.ParseGo(ctx, fh, ParseFull) - if err != nil { - return nil, err - } - var c *ast.ImportSpec - for _, imp := range pgf.File.Imports { - if imp.Path.Value == `"C"` { - c = imp - } - } - if c == nil { - return nil, nil - } - rng, err := pgf.NodeRange(c) - if err != nil { - return nil, err - } - puri := fh.URI() - cmd, err := command.NewRegenerateCgoCommand("regenerate cgo definitions", command.URIArg{URI: puri}) - if err != nil { - return nil, err - } - return []protocol.CodeLens{{Range: rng, Command: &cmd}}, nil -} - -func toggleDetailsCodeLens(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.CodeLens, error) { - pgf, err := snapshot.ParseGo(ctx, fh, ParseFull) - if err != nil { - return nil, err - } - if !pgf.File.Package.IsValid() { - // Without a package name we have nowhere to put the codelens, so give up. - return nil, nil - } - rng, err := pgf.PosRange(pgf.File.Package, pgf.File.Package) - if err != nil { - return nil, err - } - puri := fh.URI() - cmd, err := command.NewGCDetailsCommand("Toggle gc annotation details", puri) - if err != nil { - return nil, err - } - return []protocol.CodeLens{{Range: rng, Command: &cmd}}, nil -} diff --git a/internal/golangorgx/gopls/golang/codeaction.go b/internal/golangorgx/gopls/golang/codeaction.go deleted file mode 100644 index 9ecad36a3c4..00000000000 --- a/internal/golangorgx/gopls/golang/codeaction.go +++ /dev/null @@ -1,451 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -import ( - "context" - "encoding/json" - "fmt" - "go/ast" - "strings" - - "cuelang.org/go/internal/golangorgx/gopls/analysis/fillstruct" - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/cache/parsego" - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/protocol/command" - "cuelang.org/go/internal/golangorgx/gopls/settings" - "cuelang.org/go/internal/golangorgx/gopls/util/bug" - "cuelang.org/go/internal/golangorgx/gopls/util/slices" - "cuelang.org/go/internal/golangorgx/tools/event" - "cuelang.org/go/internal/golangorgx/tools/event/tag" - "cuelang.org/go/internal/golangorgx/tools/imports" - "golang.org/x/tools/go/ast/inspector" -) - -// CodeActions returns all code actions (edits and other commands) -// available for the selected range. -func CodeActions(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, rng protocol.Range, diagnostics []protocol.Diagnostic, want map[protocol.CodeActionKind]bool) (actions []protocol.CodeAction, _ error) { - // Only compute quick fixes if there are any diagnostics to fix. - wantQuickFixes := want[protocol.QuickFix] && len(diagnostics) > 0 - - // Code actions requiring syntax information alone. - if wantQuickFixes || want[protocol.SourceOrganizeImports] || want[protocol.RefactorExtract] { - pgf, err := snapshot.ParseGo(ctx, fh, parsego.ParseFull) - if err != nil { - return nil, err - } - - // Process any missing imports and pair them with the diagnostics they fix. - if wantQuickFixes || want[protocol.SourceOrganizeImports] { - importEdits, importEditsPerFix, err := allImportsFixes(ctx, snapshot, pgf) - if err != nil { - event.Error(ctx, "imports fixes", err, tag.File.Of(fh.URI().Path())) - importEdits = nil - importEditsPerFix = nil - } - - // Separate this into a set of codeActions per diagnostic, where - // each action is the addition, removal, or renaming of one import. - if wantQuickFixes { - for _, importFix := range importEditsPerFix { - fixed := fixedByImportFix(importFix.fix, diagnostics) - if len(fixed) == 0 { - continue - } - actions = append(actions, protocol.CodeAction{ - Title: importFixTitle(importFix.fix), - Kind: protocol.QuickFix, - Edit: &protocol.WorkspaceEdit{ - DocumentChanges: documentChanges(fh, importFix.edits), - }, - Diagnostics: fixed, - }) - } - } - - // Send all of the import edits as one code action if the file is - // being organized. - if want[protocol.SourceOrganizeImports] && len(importEdits) > 0 { - actions = append(actions, protocol.CodeAction{ - Title: "Organize Imports", - Kind: protocol.SourceOrganizeImports, - Edit: &protocol.WorkspaceEdit{ - DocumentChanges: documentChanges(fh, importEdits), - }, - }) - } - } - - if want[protocol.RefactorExtract] { - extractions, err := getExtractCodeActions(pgf, rng, snapshot.Options()) - if err != nil { - return nil, err - } - actions = append(actions, extractions...) - } - } - - // Code actions requiring type information. - if want[protocol.RefactorRewrite] || - want[protocol.RefactorInline] || - want[protocol.GoTest] { - pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) - if err != nil { - return nil, err - } - if want[protocol.RefactorRewrite] { - rewrites, err := getRewriteCodeActions(pkg, pgf, fh, rng, snapshot.Options()) - if err != nil { - return nil, err - } - actions = append(actions, rewrites...) - } - - if want[protocol.RefactorInline] { - rewrites, err := getInlineCodeActions(pkg, pgf, rng, snapshot.Options()) - if err != nil { - return nil, err - } - actions = append(actions, rewrites...) - } - - if want[protocol.GoTest] { - fixes, err := getGoTestCodeActions(pkg, pgf, rng) - if err != nil { - return nil, err - } - actions = append(actions, fixes...) - } - } - return actions, nil -} - -func supportsResolveEdits(options *settings.Options) bool { - return options.CodeActionResolveOptions != nil && slices.Contains(options.CodeActionResolveOptions, "edit") -} - -func importFixTitle(fix *imports.ImportFix) string { - var str string - switch fix.FixType { - case imports.AddImport: - str = fmt.Sprintf("Add import: %s %q", fix.StmtInfo.Name, fix.StmtInfo.ImportPath) - case imports.DeleteImport: - str = fmt.Sprintf("Delete import: %s %q", fix.StmtInfo.Name, fix.StmtInfo.ImportPath) - case imports.SetImportName: - str = fmt.Sprintf("Rename import: %s %q", fix.StmtInfo.Name, fix.StmtInfo.ImportPath) - } - return str -} - -// fixedByImportFix filters the provided slice of diagnostics to those that -// would be fixed by the provided imports fix. -func fixedByImportFix(fix *imports.ImportFix, diagnostics []protocol.Diagnostic) []protocol.Diagnostic { - var results []protocol.Diagnostic - for _, diagnostic := range diagnostics { - switch { - // "undeclared name: X" may be an unresolved import. - case strings.HasPrefix(diagnostic.Message, "undeclared name: "): - ident := strings.TrimPrefix(diagnostic.Message, "undeclared name: ") - if ident == fix.IdentName { - results = append(results, diagnostic) - } - // "undefined: X" may be an unresolved import at Go 1.20+. - case strings.HasPrefix(diagnostic.Message, "undefined: "): - ident := strings.TrimPrefix(diagnostic.Message, "undefined: ") - if ident == fix.IdentName { - results = append(results, diagnostic) - } - // "could not import: X" may be an invalid import. - case strings.HasPrefix(diagnostic.Message, "could not import: "): - ident := strings.TrimPrefix(diagnostic.Message, "could not import: ") - if ident == fix.IdentName { - results = append(results, diagnostic) - } - // "X imported but not used" is an unused import. - // "X imported but not used as Y" is an unused import. - case strings.Contains(diagnostic.Message, " imported but not used"): - idx := strings.Index(diagnostic.Message, " imported but not used") - importPath := diagnostic.Message[:idx] - if importPath == fmt.Sprintf("%q", fix.StmtInfo.ImportPath) { - results = append(results, diagnostic) - } - } - } - return results -} - -// getExtractCodeActions returns any refactor.extract code actions for the selection. -func getExtractCodeActions(pgf *ParsedGoFile, rng protocol.Range, options *settings.Options) ([]protocol.CodeAction, error) { - if rng.Start == rng.End { - return nil, nil - } - - start, end, err := pgf.RangePos(rng) - if err != nil { - return nil, err - } - puri := pgf.URI - var commands []protocol.Command - if _, ok, methodOk, _ := CanExtractFunction(pgf.Tok, start, end, pgf.Src, pgf.File); ok { - cmd, err := command.NewApplyFixCommand("Extract function", command.ApplyFixArgs{ - Fix: fixExtractFunction, - URI: puri, - Range: rng, - ResolveEdits: supportsResolveEdits(options), - }) - if err != nil { - return nil, err - } - commands = append(commands, cmd) - if methodOk { - cmd, err := command.NewApplyFixCommand("Extract method", command.ApplyFixArgs{ - Fix: fixExtractMethod, - URI: puri, - Range: rng, - ResolveEdits: supportsResolveEdits(options), - }) - if err != nil { - return nil, err - } - commands = append(commands, cmd) - } - } - if _, _, ok, _ := CanExtractVariable(start, end, pgf.File); ok { - cmd, err := command.NewApplyFixCommand("Extract variable", command.ApplyFixArgs{ - Fix: fixExtractVariable, - URI: puri, - Range: rng, - ResolveEdits: supportsResolveEdits(options), - }) - if err != nil { - return nil, err - } - commands = append(commands, cmd) - } - var actions []protocol.CodeAction - for i := range commands { - actions = append(actions, newCodeAction(commands[i].Title, protocol.RefactorExtract, &commands[i], nil, options)) - } - return actions, nil -} - -func newCodeAction(title string, kind protocol.CodeActionKind, cmd *protocol.Command, diagnostics []protocol.Diagnostic, options *settings.Options) protocol.CodeAction { - action := protocol.CodeAction{ - Title: title, - Kind: kind, - Diagnostics: diagnostics, - } - if !supportsResolveEdits(options) { - action.Command = cmd - } else { - data, err := json.Marshal(cmd) - if err != nil { - panic("unable to marshal") - } - msg := json.RawMessage(data) - action.Data = &msg - } - return action -} - -// getRewriteCodeActions returns refactor.rewrite code actions available at the specified range. -func getRewriteCodeActions(pkg *cache.Package, pgf *ParsedGoFile, fh file.Handle, rng protocol.Range, options *settings.Options) (_ []protocol.CodeAction, rerr error) { - // golang/go#61693: code actions were refactored to run outside of the - // analysis framework, but as a result they lost their panic recovery. - // - // These code actions should never fail, but put back the panic recovery as a - // defensive measure. - defer func() { - if r := recover(); r != nil { - rerr = bug.Errorf("refactor.rewrite code actions panicked: %v", r) - } - }() - - var actions []protocol.CodeAction - - if canRemoveParameter(pkg, pgf, rng) { - cmd, err := command.NewChangeSignatureCommand("remove unused parameter", command.ChangeSignatureArgs{ - RemoveParameter: protocol.Location{ - URI: pgf.URI, - Range: rng, - }, - ResolveEdits: supportsResolveEdits(options), - }) - if err != nil { - return nil, err - } - actions = append(actions, newCodeAction("Refactor: remove unused parameter", protocol.RefactorRewrite, &cmd, nil, options)) - } - - if action, ok := ConvertStringLiteral(pgf, fh, rng); ok { - actions = append(actions, action) - } - - start, end, err := pgf.RangePos(rng) - if err != nil { - return nil, err - } - - var commands []protocol.Command - if _, ok, _ := CanInvertIfCondition(pgf.File, start, end); ok { - cmd, err := command.NewApplyFixCommand("Invert 'if' condition", command.ApplyFixArgs{ - Fix: fixInvertIfCondition, - URI: pgf.URI, - Range: rng, - ResolveEdits: supportsResolveEdits(options), - }) - if err != nil { - return nil, err - } - commands = append(commands, cmd) - } - - // N.B.: an inspector only pays for itself after ~5 passes, which means we're - // currently not getting a good deal on this inspection. - // - // TODO: Consider removing the inspection after convenienceAnalyzers are removed. - inspect := inspector.New([]*ast.File{pgf.File}) - for _, diag := range fillstruct.Diagnose(inspect, start, end, pkg.GetTypes(), pkg.GetTypesInfo()) { - rng, err := pgf.Mapper.PosRange(pgf.Tok, diag.Pos, diag.End) - if err != nil { - return nil, err - } - for _, fix := range diag.SuggestedFixes { - cmd, err := command.NewApplyFixCommand(fix.Message, command.ApplyFixArgs{ - Fix: diag.Category, - URI: pgf.URI, - Range: rng, - ResolveEdits: supportsResolveEdits(options), - }) - if err != nil { - return nil, err - } - commands = append(commands, cmd) - } - } - - for i := range commands { - actions = append(actions, newCodeAction(commands[i].Title, protocol.RefactorRewrite, &commands[i], nil, options)) - } - - return actions, nil -} - -// canRemoveParameter reports whether we can remove the function parameter -// indicated by the given [start, end) range. -// -// This is true if: -// - [start, end) is contained within an unused field or parameter name -// - ... of a non-method function declaration. -// -// (Note that the unusedparam analyzer also computes this property, but -// much more precisely, allowing it to report its findings as diagnostics.) -func canRemoveParameter(pkg *cache.Package, pgf *ParsedGoFile, rng protocol.Range) bool { - info, err := FindParam(pgf, rng) - if err != nil { - return false // e.g. invalid range - } - if info.Field == nil { - return false // range does not span a parameter - } - if info.Decl.Body == nil { - return false // external function - } - if len(info.Field.Names) == 0 { - return true // no names => field is unused - } - if info.Name == nil { - return false // no name is indicated - } - if info.Name.Name == "_" { - return true // trivially unused - } - - obj := pkg.GetTypesInfo().Defs[info.Name] - if obj == nil { - return false // something went wrong - } - - used := false - ast.Inspect(info.Decl.Body, func(node ast.Node) bool { - if n, ok := node.(*ast.Ident); ok && pkg.GetTypesInfo().Uses[n] == obj { - used = true - } - return !used // keep going until we find a use - }) - return !used -} - -// getInlineCodeActions returns refactor.inline actions available at the specified range. -func getInlineCodeActions(pkg *cache.Package, pgf *ParsedGoFile, rng protocol.Range, options *settings.Options) ([]protocol.CodeAction, error) { - start, end, err := pgf.RangePos(rng) - if err != nil { - return nil, err - } - - // If range is within call expression, offer inline action. - var commands []protocol.Command - if _, fn, err := EnclosingStaticCall(pkg, pgf, start, end); err == nil { - cmd, err := command.NewApplyFixCommand(fmt.Sprintf("Inline call to %s", fn.Name()), command.ApplyFixArgs{ - Fix: fixInlineCall, - URI: pgf.URI, - Range: rng, - ResolveEdits: supportsResolveEdits(options), - }) - if err != nil { - return nil, err - } - commands = append(commands, cmd) - } - - // Convert commands to actions. - var actions []protocol.CodeAction - for i := range commands { - actions = append(actions, newCodeAction(commands[i].Title, protocol.RefactorInline, &commands[i], nil, options)) - } - return actions, nil -} - -// getGoTestCodeActions returns any "run this test/benchmark" code actions for the selection. -func getGoTestCodeActions(pkg *cache.Package, pgf *ParsedGoFile, rng protocol.Range) ([]protocol.CodeAction, error) { - fns, err := TestsAndBenchmarks(pkg, pgf) - if err != nil { - return nil, err - } - - var tests, benchmarks []string - for _, fn := range fns.Tests { - if !protocol.Intersect(fn.Rng, rng) { - continue - } - tests = append(tests, fn.Name) - } - for _, fn := range fns.Benchmarks { - if !protocol.Intersect(fn.Rng, rng) { - continue - } - benchmarks = append(benchmarks, fn.Name) - } - - if len(tests) == 0 && len(benchmarks) == 0 { - return nil, nil - } - - cmd, err := command.NewTestCommand("Run tests and benchmarks", pgf.URI, tests, benchmarks) - if err != nil { - return nil, err - } - return []protocol.CodeAction{{ - Title: cmd.Title, - Kind: protocol.GoTest, - Command: &cmd, - }}, nil -} - -func documentChanges(fh file.Handle, edits []protocol.TextEdit) []protocol.DocumentChanges { - return protocol.TextEditsToDocumentChanges(fh.URI(), fh.Version(), edits) -} diff --git a/internal/golangorgx/gopls/golang/comment.go b/internal/golangorgx/gopls/golang/comment.go deleted file mode 100644 index 105294013e1..00000000000 --- a/internal/golangorgx/gopls/golang/comment.go +++ /dev/null @@ -1,386 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.19 -// +build !go1.19 - -package golang - -import ( - "bytes" - "io" - "regexp" - "strings" - "unicode" - "unicode/utf8" - - "cuelang.org/go/internal/golangorgx/gopls/settings" -) - -// CommentToMarkdown converts comment text to formatted markdown. -// The comment was prepared by DocReader, -// so it is known not to have leading, trailing blank lines -// nor to have trailing spaces at the end of lines. -// The comment markers have already been removed. -// -// Each line is converted into a markdown line and empty lines are just converted to -// newlines. Heading are prefixed with `### ` to make it a markdown heading. -// -// A span of indented lines retains a 4 space prefix block, with the common indent -// prefix removed unless empty, in which case it will be converted to a newline. -// -// URLs in the comment text are converted into links. -func CommentToMarkdown(text string, _ *settings.Options) string { - buf := &bytes.Buffer{} - commentToMarkdown(buf, text) - return buf.String() -} - -var ( - mdNewline = []byte("\n") - mdHeader = []byte("### ") - mdIndent = []byte(" ") - mdLinkStart = []byte("[") - mdLinkDiv = []byte("](") - mdLinkEnd = []byte(")") -) - -func commentToMarkdown(w io.Writer, text string) { - blocks := blocks(text) - for i, b := range blocks { - switch b.op { - case opPara: - for _, line := range b.lines { - emphasize(w, line, true) - } - case opHead: - // The header block can consist of only one line. - // However, check the number of lines, just in case. - if len(b.lines) == 0 { - // Skip this block. - continue - } - header := b.lines[0] - - w.Write(mdHeader) - commentEscape(w, header, true) - // Header doesn't end with \n unlike the lines of other blocks. - w.Write(mdNewline) - case opPre: - for _, line := range b.lines { - if isBlank(line) { - w.Write(mdNewline) - continue - } - w.Write(mdIndent) - w.Write([]byte(line)) - } - } - - if i < len(blocks)-1 { - w.Write(mdNewline) - } - } -} - -const ( - ulquo = "“" - urquo = "”" -) - -var ( - markdownEscape = regexp.MustCompile(`([\\\x60*{}[\]()#+\-.!_>~|"$%&'\/:;<=?@^])`) - - unicodeQuoteReplacer = strings.NewReplacer("``", ulquo, "''", urquo) -) - -// commentEscape escapes comment text for markdown. If nice is set, -// also turn double ` and ' into “ and ”. -func commentEscape(w io.Writer, text string, nice bool) { - if nice { - text = convertQuotes(text) - } - text = escapeRegex(text) - w.Write([]byte(text)) -} - -func convertQuotes(text string) string { - return unicodeQuoteReplacer.Replace(text) -} - -func escapeRegex(text string) string { - return markdownEscape.ReplaceAllString(text, `\$1`) -} - -func emphasize(w io.Writer, line string, nice bool) { - for { - m := matchRx.FindStringSubmatchIndex(line) - if m == nil { - break - } - // m >= 6 (two parenthesized sub-regexps in matchRx, 1st one is urlRx) - - // write text before match - commentEscape(w, line[0:m[0]], nice) - - // adjust match for URLs - match := line[m[0]:m[1]] - if strings.Contains(match, "://") { - m0, m1 := m[0], m[1] - for _, s := range []string{"()", "{}", "[]"} { - open, close := s[:1], s[1:] // E.g., "(" and ")" - // require opening parentheses before closing parentheses (#22285) - if i := strings.Index(match, close); i >= 0 && i < strings.Index(match, open) { - m1 = m0 + i - match = line[m0:m1] - } - // require balanced pairs of parentheses (#5043) - for i := 0; strings.Count(match, open) != strings.Count(match, close) && i < 10; i++ { - m1 = strings.LastIndexAny(line[:m1], s) - match = line[m0:m1] - } - } - if m1 != m[1] { - // redo matching with shortened line for correct indices - m = matchRx.FindStringSubmatchIndex(line[:m[0]+len(match)]) - } - } - - // Following code has been modified from go/doc since words is always - // nil. All html formatting has also been transformed into markdown formatting - - // analyze match - url := "" - if m[2] >= 0 { - url = match - } - - // write match - if len(url) > 0 { - w.Write(mdLinkStart) - } - - commentEscape(w, match, nice) - - if len(url) > 0 { - w.Write(mdLinkDiv) - w.Write([]byte(urlReplacer.Replace(url))) - w.Write(mdLinkEnd) - } - - // advance - line = line[m[1]:] - } - commentEscape(w, line, nice) -} - -// Everything from here on is a copy of go/doc/comment.go - -const ( - // Regexp for Go identifiers - identRx = `[\pL_][\pL_0-9]*` - - // Regexp for URLs - // Match parens, and check later for balance - see #5043, #22285 - // Match .,:;?! within path, but not at end - see #18139, #16565 - // This excludes some rare yet valid urls ending in common punctuation - // in order to allow sentences ending in URLs. - - // protocol (required) e.g. http - protoPart = `(https?|ftp|file|gopher|mailto|nntp)` - // host (required) e.g. www.example.com or [::1]:8080 - hostPart = `([a-zA-Z0-9_@\-.\[\]:]+)` - // path+query+fragment (optional) e.g. /path/index.html?q=foo#bar - pathPart = `([.,:;?!]*[a-zA-Z0-9$'()*+&#=@~_/\-\[\]%])*` - - urlRx = protoPart + `://` + hostPart + pathPart -) - -var ( - matchRx = regexp.MustCompile(`(` + urlRx + `)|(` + identRx + `)`) - urlReplacer = strings.NewReplacer(`(`, `\(`, `)`, `\)`) -) - -func indentLen(s string) int { - i := 0 - for i < len(s) && (s[i] == ' ' || s[i] == '\t') { - i++ - } - return i -} - -func isBlank(s string) bool { - return len(s) == 0 || (len(s) == 1 && s[0] == '\n') -} - -func commonPrefix(a, b string) string { - i := 0 - for i < len(a) && i < len(b) && a[i] == b[i] { - i++ - } - return a[0:i] -} - -func unindent(block []string) { - if len(block) == 0 { - return - } - - // compute maximum common white prefix - prefix := block[0][0:indentLen(block[0])] - for _, line := range block { - if !isBlank(line) { - prefix = commonPrefix(prefix, line) - } - } - n := len(prefix) - - // remove - for i, line := range block { - if !isBlank(line) { - block[i] = line[n:] - } - } -} - -// heading returns the trimmed line if it passes as a section heading; -// otherwise it returns the empty string. -func heading(line string) string { - line = strings.TrimSpace(line) - if len(line) == 0 { - return "" - } - - // a heading must start with an uppercase letter - r, _ := utf8.DecodeRuneInString(line) - if !unicode.IsLetter(r) || !unicode.IsUpper(r) { - return "" - } - - // it must end in a letter or digit: - r, _ = utf8.DecodeLastRuneInString(line) - if !unicode.IsLetter(r) && !unicode.IsDigit(r) { - return "" - } - - // exclude lines with illegal characters. we allow "()," - if strings.ContainsAny(line, ";:!?+*/=[]{}_^°&§~%#@<\">\\") { - return "" - } - - // allow "'" for possessive "'s" only - for b := line; ; { - i := strings.IndexRune(b, '\'') - if i < 0 { - break - } - if i+1 >= len(b) || b[i+1] != 's' || (i+2 < len(b) && b[i+2] != ' ') { - return "" // not followed by "s " - } - b = b[i+2:] - } - - // allow "." when followed by non-space - for b := line; ; { - i := strings.IndexRune(b, '.') - if i < 0 { - break - } - if i+1 >= len(b) || b[i+1] == ' ' { - return "" // not followed by non-space - } - b = b[i+1:] - } - - return line -} - -type op int - -const ( - opPara op = iota - opHead - opPre -) - -type block struct { - op op - lines []string -} - -func blocks(text string) []block { - var ( - out []block - para []string - - lastWasBlank = false - lastWasHeading = false - ) - - close := func() { - if para != nil { - out = append(out, block{opPara, para}) - para = nil - } - } - - lines := strings.SplitAfter(text, "\n") - unindent(lines) - for i := 0; i < len(lines); { - line := lines[i] - if isBlank(line) { - // close paragraph - close() - i++ - lastWasBlank = true - continue - } - if indentLen(line) > 0 { - // close paragraph - close() - - // count indented or blank lines - j := i + 1 - for j < len(lines) && (isBlank(lines[j]) || indentLen(lines[j]) > 0) { - j++ - } - // but not trailing blank lines - for j > i && isBlank(lines[j-1]) { - j-- - } - pre := lines[i:j] - i = j - - unindent(pre) - - // put those lines in a pre block - out = append(out, block{opPre, pre}) - lastWasHeading = false - continue - } - - if lastWasBlank && !lastWasHeading && i+2 < len(lines) && - isBlank(lines[i+1]) && !isBlank(lines[i+2]) && indentLen(lines[i+2]) == 0 { - // current line is non-blank, surrounded by blank lines - // and the next non-blank line is not indented: this - // might be a heading. - if head := heading(line); head != "" { - close() - out = append(out, block{opHead, []string{head}}) - i += 2 - lastWasHeading = true - continue - } - } - - // open paragraph - lastWasBlank = false - lastWasHeading = false - para = append(para, lines[i]) - i++ - } - close() - - return out -} diff --git a/internal/golangorgx/gopls/golang/comment_go119.go b/internal/golangorgx/gopls/golang/comment_go119.go deleted file mode 100644 index c0ecadcc2da..00000000000 --- a/internal/golangorgx/gopls/golang/comment_go119.go +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.19 -// +build go1.19 - -package golang - -// Starting with go1.19, the formatting of comments has changed, and there -// is a new package (go/doc/comment) for processing them. -// As long as gopls has to compile under earlier versions, tests -// have to pass with both the old and new code, which produce -// slightly different results. - -// When gopls no longer needs to compile with go1.18, the old comment.go should -// be replaced by this file, the golden test files should be updated. -// (and checkSameMarkdown() could be replaced by a simple comparison.) - -import ( - "fmt" - "go/doc/comment" - - "cuelang.org/go/internal/golangorgx/gopls/settings" -) - -// CommentToMarkdown converts comment text to formatted markdown. -// The comment was prepared by DocReader, -// so it is known not to have leading, trailing blank lines -// nor to have trailing spaces at the end of lines. -// The comment markers have already been removed. -func CommentToMarkdown(text string, options *settings.Options) string { - var p comment.Parser - doc := p.Parse(text) - var pr comment.Printer - // The default produces {#Hdr-...} tags for headings. - // vscode displays thems, which is undesirable. - // The godoc for comment.Printer says the tags - // avoid a security problem. - pr.HeadingID = func(*comment.Heading) string { return "" } - pr.DocLinkURL = func(link *comment.DocLink) string { - msg := fmt.Sprintf("https://%s/%s", options.LinkTarget, link.ImportPath) - if link.Name != "" { - msg += "#" - if link.Recv != "" { - msg += link.Recv + "." - } - msg += link.Name - } - return msg - } - easy := pr.Markdown(doc) - return string(easy) -} diff --git a/internal/golangorgx/gopls/golang/completion/builtin.go b/internal/golangorgx/gopls/golang/completion/builtin.go deleted file mode 100644 index 39732d86434..00000000000 --- a/internal/golangorgx/gopls/golang/completion/builtin.go +++ /dev/null @@ -1,147 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package completion - -import ( - "context" - "go/ast" - "go/types" -) - -// builtinArgKind determines the expected object kind for a builtin -// argument. It attempts to use the AST hints from builtin.go where -// possible. -func (c *completer) builtinArgKind(ctx context.Context, obj types.Object, call *ast.CallExpr) objKind { - builtin, err := c.snapshot.BuiltinFile(ctx) - if err != nil { - return 0 - } - exprIdx := exprAtPos(c.pos, call.Args) - - builtinObj := builtin.File.Scope.Lookup(obj.Name()) - if builtinObj == nil { - return 0 - } - decl, ok := builtinObj.Decl.(*ast.FuncDecl) - if !ok || exprIdx >= len(decl.Type.Params.List) { - return 0 - } - - switch ptyp := decl.Type.Params.List[exprIdx].Type.(type) { - case *ast.ChanType: - return kindChan - case *ast.ArrayType: - return kindSlice - case *ast.MapType: - return kindMap - case *ast.Ident: - switch ptyp.Name { - case "Type": - switch obj.Name() { - case "make": - return kindChan | kindSlice | kindMap - case "len": - return kindSlice | kindMap | kindArray | kindString | kindChan - case "cap": - return kindSlice | kindArray | kindChan - } - } - } - - return 0 -} - -// builtinArgType infers the type of an argument to a builtin -// function. parentInf is the inferred type info for the builtin -// call's parent node. -func (c *completer) builtinArgType(obj types.Object, call *ast.CallExpr, parentInf candidateInference) candidateInference { - var ( - exprIdx = exprAtPos(c.pos, call.Args) - - // Propagate certain properties from our parent's inference. - inf = candidateInference{ - typeName: parentInf.typeName, - modifiers: parentInf.modifiers, - } - ) - - switch obj.Name() { - case "append": - if exprIdx <= 0 { - // Infer first append() arg type as apparent return type of - // append(). - inf.objType = parentInf.objType - if parentInf.variadic { - inf.objType = types.NewSlice(inf.objType) - } - break - } - - // For non-initial append() args, infer slice type from the first - // append() arg, or from parent context. - if len(call.Args) > 0 { - inf.objType = c.pkg.GetTypesInfo().TypeOf(call.Args[0]) - } - if inf.objType == nil { - inf.objType = parentInf.objType - } - if inf.objType == nil { - break - } - - inf.objType = deslice(inf.objType) - - // Check if we are completing the variadic append() param. - inf.variadic = exprIdx == 1 && len(call.Args) <= 2 - - // Penalize the first append() argument as a candidate. You - // don't normally append a slice to itself. - if sliceChain := objChain(c.pkg.GetTypesInfo(), call.Args[0]); len(sliceChain) > 0 { - inf.penalized = append(inf.penalized, penalizedObj{objChain: sliceChain, penalty: 0.9}) - } - case "delete": - if exprIdx > 0 && len(call.Args) > 0 { - // Try to fill in expected type of map key. - firstArgType := c.pkg.GetTypesInfo().TypeOf(call.Args[0]) - if firstArgType != nil { - if mt, ok := firstArgType.Underlying().(*types.Map); ok { - inf.objType = mt.Key() - } - } - } - case "copy": - var t1, t2 types.Type - if len(call.Args) > 0 { - t1 = c.pkg.GetTypesInfo().TypeOf(call.Args[0]) - if len(call.Args) > 1 { - t2 = c.pkg.GetTypesInfo().TypeOf(call.Args[1]) - } - } - - // Fill in expected type of either arg if the other is already present. - if exprIdx == 1 && t1 != nil { - inf.objType = t1 - } else if exprIdx == 0 && t2 != nil { - inf.objType = t2 - } - case "new": - inf.typeName.wantTypeName = true - if parentInf.objType != nil { - // Expected type for "new" is the de-pointered parent type. - if ptr, ok := parentInf.objType.Underlying().(*types.Pointer); ok { - inf.objType = ptr.Elem() - } - } - case "make": - if exprIdx == 0 { - inf.typeName.wantTypeName = true - inf.objType = parentInf.objType - } else { - inf.objType = types.Typ[types.UntypedInt] - } - } - - return inf -} diff --git a/internal/golangorgx/gopls/golang/completion/completion.go b/internal/golangorgx/gopls/golang/completion/completion.go deleted file mode 100644 index 56e07901e3b..00000000000 --- a/internal/golangorgx/gopls/golang/completion/completion.go +++ /dev/null @@ -1,3285 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package completion provides core functionality for code completion in Go -// editors and tools. -package completion - -import ( - "context" - "fmt" - "go/ast" - "go/constant" - "go/parser" - "go/printer" - "go/scanner" - "go/token" - "go/types" - "math" - "sort" - "strconv" - "strings" - "sync" - "sync/atomic" - "time" - "unicode" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/cache/metadata" - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/golang" - "cuelang.org/go/internal/golangorgx/gopls/golang/completion/snippet" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/settings" - goplsastutil "cuelang.org/go/internal/golangorgx/gopls/util/astutil" - "cuelang.org/go/internal/golangorgx/gopls/util/safetoken" - "cuelang.org/go/internal/golangorgx/gopls/util/typesutil" - "cuelang.org/go/internal/golangorgx/tools/event" - "cuelang.org/go/internal/golangorgx/tools/fuzzy" - "cuelang.org/go/internal/golangorgx/tools/imports" - "cuelang.org/go/internal/golangorgx/tools/typeparams" - "golang.org/x/sync/errgroup" - "golang.org/x/tools/go/ast/astutil" -) - -// A CompletionItem represents a possible completion suggested by the algorithm. -type CompletionItem struct { - - // Invariant: CompletionItem does not refer to syntax or types. - - // Label is the primary text the user sees for this completion item. - Label string - - // Detail is supplemental information to present to the user. - // This often contains the type or return type of the completion item. - Detail string - - // InsertText is the text to insert if this item is selected. - // Any of the prefix that has already been typed is not trimmed. - // The insert text does not contain snippets. - InsertText string - - Kind protocol.CompletionItemKind - Tags []protocol.CompletionItemTag - Deprecated bool // Deprecated, prefer Tags if available - - // An optional array of additional TextEdits that are applied when - // selecting this completion. - // - // Additional text edits should be used to change text unrelated to the current cursor position - // (for example adding an import statement at the top of the file if the completion item will - // insert an unqualified type). - AdditionalTextEdits []protocol.TextEdit - - // Depth is how many levels were searched to find this completion. - // For example when completing "foo<>", "fooBar" is depth 0, and - // "fooBar.Baz" is depth 1. - Depth int - - // Score is the internal relevance score. - // A higher score indicates that this completion item is more relevant. - Score float64 - - // snippet is the LSP snippet for the completion item. The LSP - // specification contains details about LSP snippets. For example, a - // snippet for a function with the following signature: - // - // func foo(a, b, c int) - // - // would be: - // - // foo(${1:a int}, ${2: b int}, ${3: c int}) - // - // If Placeholders is false in the CompletionOptions, the above - // snippet would instead be: - // - // foo(${1:}) - snippet *snippet.Builder - - // Documentation is the documentation for the completion item. - Documentation string - - // isSlice reports whether the underlying type of the object - // from which this candidate was derived is a slice. - // (Used to complete append() calls.) - isSlice bool -} - -// completionOptions holds completion specific configuration. -type completionOptions struct { - unimported bool - documentation bool - fullDocumentation bool - placeholders bool - snippets bool - postfix bool - matcher settings.Matcher - budget time.Duration - completeFunctionCalls bool -} - -// Snippet is a convenience returns the snippet if available, otherwise -// the InsertText. -// used for an item, depending on if the callee wants placeholders or not. -func (i *CompletionItem) Snippet() string { - if i.snippet != nil { - return i.snippet.String() - } - return i.InsertText -} - -// Scoring constants are used for weighting the relevance of different candidates. -const ( - // stdScore is the base score for all completion items. - stdScore float64 = 1.0 - - // highScore indicates a very relevant completion item. - highScore float64 = 10.0 - - // lowScore indicates an irrelevant or not useful completion item. - lowScore float64 = 0.01 -) - -// matcher matches a candidate's label against the user input. The -// returned score reflects the quality of the match. A score of zero -// indicates no match, and a score of one means a perfect match. -type matcher interface { - Score(candidateLabel string) (score float32) -} - -// prefixMatcher implements case sensitive prefix matching. -type prefixMatcher string - -func (pm prefixMatcher) Score(candidateLabel string) float32 { - if strings.HasPrefix(candidateLabel, string(pm)) { - return 1 - } - return -1 -} - -// insensitivePrefixMatcher implements case insensitive prefix matching. -type insensitivePrefixMatcher string - -func (ipm insensitivePrefixMatcher) Score(candidateLabel string) float32 { - if strings.HasPrefix(strings.ToLower(candidateLabel), string(ipm)) { - return 1 - } - return -1 -} - -// completer contains the necessary information for a single completion request. -type completer struct { - snapshot *cache.Snapshot - pkg *cache.Package - qf types.Qualifier // for qualifying typed expressions - mq golang.MetadataQualifier // for syntactic qualifying - opts *completionOptions - - // completionContext contains information about the trigger for this - // completion request. - completionContext completionContext - - // fh is a handle to the file associated with this completion request. - fh file.Handle - - // filename is the name of the file associated with this completion request. - filename string - - // file is the AST of the file associated with this completion request. - file *ast.File - - // (tokFile, pos) is the position at which the request was triggered. - tokFile *token.File - pos token.Pos - - // path is the path of AST nodes enclosing the position. - path []ast.Node - - // seen is the map that ensures we do not return duplicate results. - seen map[types.Object]bool - - // items is the list of completion items returned. - items []CompletionItem - - // completionCallbacks is a list of callbacks to collect completions that - // require expensive operations. This includes operations where we search - // through the entire module cache. - completionCallbacks []func(context.Context, *imports.Options) error - - // surrounding describes the identifier surrounding the position. - surrounding *Selection - - // inference contains information we've inferred about ideal - // candidates such as the candidate's type. - inference candidateInference - - // enclosingFunc contains information about the function enclosing - // the position. - enclosingFunc *funcInfo - - // enclosingCompositeLiteral contains information about the composite literal - // enclosing the position. - enclosingCompositeLiteral *compLitInfo - - // deepState contains the current state of our deep completion search. - deepState deepCompletionState - - // matcher matches the candidates against the surrounding prefix. - matcher matcher - - // methodSetCache caches the types.NewMethodSet call, which is relatively - // expensive and can be called many times for the same type while searching - // for deep completions. - methodSetCache map[methodSetKey]*types.MethodSet - - // mapper converts the positions in the file from which the completion originated. - mapper *protocol.Mapper - - // startTime is when we started processing this completion request. It does - // not include any time the request spent in the queue. - // - // Note: in CL 503016, startTime move to *after* type checking, but it was - // subsequently determined that it was better to keep setting it *before* - // type checking, so that the completion budget best approximates the user - // experience. See golang/go#62665 for more details. - startTime time.Time - - // scopes contains all scopes defined by nodes in our path, - // including nil values for nodes that don't defined a scope. It - // also includes our package scope and the universal scope at the - // end. - scopes []*types.Scope -} - -// funcInfo holds info about a function object. -type funcInfo struct { - // sig is the function declaration enclosing the position. - sig *types.Signature - - // body is the function's body. - body *ast.BlockStmt -} - -type compLitInfo struct { - // cl is the *ast.CompositeLit enclosing the position. - cl *ast.CompositeLit - - // clType is the type of cl. - clType types.Type - - // kv is the *ast.KeyValueExpr enclosing the position, if any. - kv *ast.KeyValueExpr - - // inKey is true if we are certain the position is in the key side - // of a key-value pair. - inKey bool - - // maybeInFieldName is true if inKey is false and it is possible - // we are completing a struct field name. For example, - // "SomeStruct{<>}" will be inKey=false, but maybeInFieldName=true - // because we _could_ be completing a field name. - maybeInFieldName bool -} - -type importInfo struct { - importPath string - name string -} - -type methodSetKey struct { - typ types.Type - addressable bool -} - -type completionContext struct { - // triggerCharacter is the character used to trigger completion at current - // position, if any. - triggerCharacter string - - // triggerKind is information about how a completion was triggered. - triggerKind protocol.CompletionTriggerKind - - // commentCompletion is true if we are completing a comment. - commentCompletion bool - - // packageCompletion is true if we are completing a package name. - packageCompletion bool -} - -// A Selection represents the cursor position and surrounding identifier. -type Selection struct { - content string - tokFile *token.File - start, end, cursor token.Pos // relative to rng.TokFile - mapper *protocol.Mapper -} - -func (p Selection) Range() (protocol.Range, error) { - return p.mapper.PosRange(p.tokFile, p.start, p.end) -} - -func (p Selection) Prefix() string { - return p.content[:p.cursor-p.start] -} - -func (p Selection) Suffix() string { - return p.content[p.cursor-p.start:] -} - -func (c *completer) setSurrounding(ident *ast.Ident) { - if c.surrounding != nil { - return - } - if !(ident.Pos() <= c.pos && c.pos <= ident.End()) { - return - } - - c.surrounding = &Selection{ - content: ident.Name, - cursor: c.pos, - // Overwrite the prefix only. - tokFile: c.tokFile, - start: ident.Pos(), - end: ident.End(), - mapper: c.mapper, - } - - c.setMatcherFromPrefix(c.surrounding.Prefix()) -} - -func (c *completer) setMatcherFromPrefix(prefix string) { - switch c.opts.matcher { - case settings.Fuzzy: - c.matcher = fuzzy.NewMatcher(prefix) - case settings.CaseSensitive: - c.matcher = prefixMatcher(prefix) - default: - c.matcher = insensitivePrefixMatcher(strings.ToLower(prefix)) - } -} - -func (c *completer) getSurrounding() *Selection { - if c.surrounding == nil { - c.surrounding = &Selection{ - content: "", - cursor: c.pos, - tokFile: c.tokFile, - start: c.pos, - end: c.pos, - mapper: c.mapper, - } - } - return c.surrounding -} - -// candidate represents a completion candidate. -type candidate struct { - // obj is the types.Object to complete to. - // TODO(adonovan): eliminate dependence on go/types throughout this struct. - // See comment in (*completer).selector for explanation. - obj types.Object - - // score is used to rank candidates. - score float64 - - // name is the deep object name path, e.g. "foo.bar" - name string - - // detail is additional information about this item. If not specified, - // defaults to type string for the object. - detail string - - // path holds the path from the search root (excluding the candidate - // itself) for a deep candidate. - path []types.Object - - // pathInvokeMask is a bit mask tracking whether each entry in path - // should be formatted with "()" (i.e. whether it is a function - // invocation). - pathInvokeMask uint16 - - // mods contains modifications that should be applied to the - // candidate when inserted. For example, "foo" may be inserted as - // "*foo" or "foo()". - mods []typeModKind - - // addressable is true if a pointer can be taken to the candidate. - addressable bool - - // convertTo is a type that this candidate should be cast to. For - // example, if convertTo is float64, "foo" should be formatted as - // "float64(foo)". - convertTo types.Type - - // imp is the import that needs to be added to this package in order - // for this candidate to be valid. nil if no import needed. - imp *importInfo -} - -func (c candidate) hasMod(mod typeModKind) bool { - for _, m := range c.mods { - if m == mod { - return true - } - } - return false -} - -// ErrIsDefinition is an error that informs the user they got no -// completions because they tried to complete the name of a new object -// being defined. -type ErrIsDefinition struct { - objStr string -} - -func (e ErrIsDefinition) Error() string { - msg := "this is a definition" - if e.objStr != "" { - msg += " of " + e.objStr - } - return msg -} - -// Completion returns a list of possible candidates for completion, given a -// a file and a position. -// -// The selection is computed based on the preceding identifier and can be used by -// the client to score the quality of the completion. For instance, some clients -// may tolerate imperfect matches as valid completion results, since users may make typos. -func Completion(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, protoPos protocol.Position, protoContext protocol.CompletionContext) ([]CompletionItem, *Selection, error) { - ctx, done := event.Start(ctx, "completion.Completion") - defer done() - - startTime := time.Now() - - pkg, pgf, err := golang.NarrowestPackageForFile(ctx, snapshot, fh.URI()) - if err != nil || pgf.File.Package == token.NoPos { - // If we can't parse this file or find position for the package - // keyword, it may be missing a package declaration. Try offering - // suggestions for the package declaration. - // Note that this would be the case even if the keyword 'package' is - // present but no package name exists. - items, surrounding, innerErr := packageClauseCompletions(ctx, snapshot, fh, protoPos) - if innerErr != nil { - // return the error for GetParsedFile since it's more relevant in this situation. - return nil, nil, fmt.Errorf("getting file %s for Completion: %w (package completions: %v)", fh.URI(), err, innerErr) - } - return items, surrounding, nil - } - - pos, err := pgf.PositionPos(protoPos) - if err != nil { - return nil, nil, err - } - // Completion is based on what precedes the cursor. - // Find the path to the position before pos. - path, _ := astutil.PathEnclosingInterval(pgf.File, pos-1, pos-1) - if path == nil { - return nil, nil, fmt.Errorf("cannot find node enclosing position") - } - - // Check if completion at this position is valid. If not, return early. - switch n := path[0].(type) { - case *ast.BasicLit: - // Skip completion inside literals except for ImportSpec - if len(path) > 1 { - if _, ok := path[1].(*ast.ImportSpec); ok { - break - } - } - return nil, nil, nil - case *ast.CallExpr: - if n.Ellipsis.IsValid() && pos > n.Ellipsis && pos <= n.Ellipsis+token.Pos(len("...")) { - // Don't offer completions inside or directly after "...". For - // example, don't offer completions at "<>" in "foo(bar...<>"). - return nil, nil, nil - } - case *ast.Ident: - // reject defining identifiers - if obj, ok := pkg.GetTypesInfo().Defs[n]; ok { - if v, ok := obj.(*types.Var); ok && v.IsField() && v.Embedded() { - // An anonymous field is also a reference to a type. - } else if pgf.File.Name == n { - // Don't skip completions if Ident is for package name. - break - } else { - objStr := "" - if obj != nil { - qual := types.RelativeTo(pkg.GetTypes()) - objStr = types.ObjectString(obj, qual) - } - ans, sel := definition(path, obj, pgf) - if ans != nil { - sort.Slice(ans, func(i, j int) bool { - return ans[i].Score > ans[j].Score - }) - return ans, sel, nil - } - return nil, nil, ErrIsDefinition{objStr: objStr} - } - } - } - - // Collect all surrounding scopes, innermost first. - scopes := golang.CollectScopes(pkg.GetTypesInfo(), path, pos) - scopes = append(scopes, pkg.GetTypes().Scope(), types.Universe) - - opts := snapshot.Options() - c := &completer{ - pkg: pkg, - snapshot: snapshot, - qf: typesutil.FileQualifier(pgf.File, pkg.GetTypes(), pkg.GetTypesInfo()), - mq: golang.MetadataQualifierForFile(snapshot, pgf.File, pkg.Metadata()), - completionContext: completionContext{ - triggerCharacter: protoContext.TriggerCharacter, - triggerKind: protoContext.TriggerKind, - }, - fh: fh, - filename: fh.URI().Path(), - tokFile: pgf.Tok, - file: pgf.File, - path: path, - pos: pos, - seen: make(map[types.Object]bool), - enclosingFunc: enclosingFunction(path, pkg.GetTypesInfo()), - enclosingCompositeLiteral: enclosingCompositeLiteral(path, pos, pkg.GetTypesInfo()), - deepState: deepCompletionState{ - enabled: opts.DeepCompletion, - }, - opts: &completionOptions{ - matcher: opts.Matcher, - unimported: opts.CompleteUnimported, - documentation: opts.CompletionDocumentation && opts.HoverKind != settings.NoDocumentation, - fullDocumentation: opts.HoverKind == settings.FullDocumentation, - placeholders: opts.UsePlaceholders, - budget: opts.CompletionBudget, - snippets: opts.InsertTextFormat == protocol.SnippetTextFormat, - postfix: opts.ExperimentalPostfixCompletions, - completeFunctionCalls: opts.CompleteFunctionCalls, - }, - // default to a matcher that always matches - matcher: prefixMatcher(""), - methodSetCache: make(map[methodSetKey]*types.MethodSet), - mapper: pgf.Mapper, - startTime: startTime, - scopes: scopes, - } - - ctx, cancel := context.WithCancel(ctx) - defer cancel() - - // Compute the deadline for this operation. Deadline is relative to the - // search operation, not the entire completion RPC, as the work up until this - // point depends significantly on how long it took to type-check, which in - // turn depends on the timing of the request relative to other operations on - // the snapshot. Including that work in the budget leads to inconsistent - // results (and realistically, if type-checking took 200ms already, the user - // is unlikely to be significantly more bothered by e.g. another 100ms of - // search). - // - // Don't overload the context with this deadline, as we don't want to - // conflate user cancellation (=fail the operation) with our time limit - // (=stop searching and succeed with partial results). - var deadline *time.Time - if c.opts.budget > 0 { - d := startTime.Add(c.opts.budget) - deadline = &d - } - - if surrounding := c.containingIdent(pgf.Src); surrounding != nil { - c.setSurrounding(surrounding) - } - - c.inference = expectedCandidate(ctx, c) - - err = c.collectCompletions(ctx) - if err != nil { - return nil, nil, err - } - - // Deep search collected candidates and their members for more candidates. - c.deepSearch(ctx, 1, deadline) - - // At this point we have a sufficiently complete set of results, and want to - // return as close to the completion budget as possible. Previously, we - // avoided cancelling the context because it could result in partial results - // for e.g. struct fields. At this point, we have a minimal valid set of - // candidates, and so truncating due to context cancellation is acceptable. - if c.opts.budget > 0 { - timeoutDuration := time.Until(c.startTime.Add(c.opts.budget)) - ctx, cancel = context.WithTimeout(ctx, timeoutDuration) - defer cancel() - } - - for _, callback := range c.completionCallbacks { - if deadline == nil || time.Now().Before(*deadline) { - if err := c.snapshot.RunProcessEnvFunc(ctx, callback); err != nil { - return nil, nil, err - } - } - } - - // Search candidates populated by expensive operations like - // unimportedMembers etc. for more completion items. - c.deepSearch(ctx, 0, deadline) - - // Statement candidates offer an entire statement in certain contexts, as - // opposed to a single object. Add statement candidates last because they - // depend on other candidates having already been collected. - c.addStatementCandidates() - - c.sortItems() - return c.items, c.getSurrounding(), nil -} - -// collectCompletions adds possible completion candidates to either the deep -// search queue or completion items directly for different completion contexts. -func (c *completer) collectCompletions(ctx context.Context) error { - // Inside import blocks, return completions for unimported packages. - for _, importSpec := range c.file.Imports { - if !(importSpec.Path.Pos() <= c.pos && c.pos <= importSpec.Path.End()) { - continue - } - return c.populateImportCompletions(importSpec) - } - - // Inside comments, offer completions for the name of the relevant symbol. - for _, comment := range c.file.Comments { - if comment.Pos() < c.pos && c.pos <= comment.End() { - c.populateCommentCompletions(comment) - return nil - } - } - - // Struct literals are handled entirely separately. - if c.wantStructFieldCompletions() { - // If we are definitely completing a struct field name, deep completions - // don't make sense. - if c.enclosingCompositeLiteral.inKey { - c.deepState.enabled = false - } - return c.structLiteralFieldName(ctx) - } - - if lt := c.wantLabelCompletion(); lt != labelNone { - c.labels(lt) - return nil - } - - if c.emptySwitchStmt() { - // Empty switch statements only admit "default" and "case" keywords. - c.addKeywordItems(map[string]bool{}, highScore, CASE, DEFAULT) - return nil - } - - switch n := c.path[0].(type) { - case *ast.Ident: - if c.file.Name == n { - return c.packageNameCompletions(ctx, c.fh.URI(), n) - } else if sel, ok := c.path[1].(*ast.SelectorExpr); ok && sel.Sel == n { - // Is this the Sel part of a selector? - return c.selector(ctx, sel) - } - return c.lexical(ctx) - // The function name hasn't been typed yet, but the parens are there: - // recv.‸(arg) - case *ast.TypeAssertExpr: - // Create a fake selector expression. - // - // The name "_" is the convention used by go/parser to represent phantom - // selectors. - sel := &ast.Ident{NamePos: n.X.End() + token.Pos(len(".")), Name: "_"} - return c.selector(ctx, &ast.SelectorExpr{X: n.X, Sel: sel}) - case *ast.SelectorExpr: - return c.selector(ctx, n) - // At the file scope, only keywords are allowed. - case *ast.BadDecl, *ast.File: - c.addKeywordCompletions() - default: - // fallback to lexical completions - return c.lexical(ctx) - } - - return nil -} - -// containingIdent returns the *ast.Ident containing pos, if any. It -// synthesizes an *ast.Ident to allow completion in the face of -// certain syntax errors. -func (c *completer) containingIdent(src []byte) *ast.Ident { - // In the normal case, our leaf AST node is the identifier being completed. - if ident, ok := c.path[0].(*ast.Ident); ok { - return ident - } - - pos, tkn, lit := c.scanToken(src) - if !pos.IsValid() { - return nil - } - - fakeIdent := &ast.Ident{Name: lit, NamePos: pos} - - if _, isBadDecl := c.path[0].(*ast.BadDecl); isBadDecl { - // You don't get *ast.Idents at the file level, so look for bad - // decls and use the manually extracted token. - return fakeIdent - } else if c.emptySwitchStmt() { - // Only keywords are allowed in empty switch statements. - // *ast.Idents are not parsed, so we must use the manually - // extracted token. - return fakeIdent - } else if tkn.IsKeyword() { - // Otherwise, manually extract the prefix if our containing token - // is a keyword. This improves completion after an "accidental - // keyword", e.g. completing to "variance" in "someFunc(var<>)". - return fakeIdent - } - - return nil -} - -// scanToken scans pgh's contents for the token containing pos. -func (c *completer) scanToken(contents []byte) (token.Pos, token.Token, string) { - tok := c.pkg.FileSet().File(c.pos) - - var s scanner.Scanner - s.Init(tok, contents, nil, 0) - for { - tknPos, tkn, lit := s.Scan() - if tkn == token.EOF || tknPos >= c.pos { - return token.NoPos, token.ILLEGAL, "" - } - - if len(lit) > 0 && tknPos <= c.pos && c.pos <= tknPos+token.Pos(len(lit)) { - return tknPos, tkn, lit - } - } -} - -func (c *completer) sortItems() { - sort.SliceStable(c.items, func(i, j int) bool { - // Sort by score first. - if c.items[i].Score != c.items[j].Score { - return c.items[i].Score > c.items[j].Score - } - - // Then sort by label so order stays consistent. This also has the - // effect of preferring shorter candidates. - return c.items[i].Label < c.items[j].Label - }) -} - -// emptySwitchStmt reports whether pos is in an empty switch or select -// statement. -func (c *completer) emptySwitchStmt() bool { - block, ok := c.path[0].(*ast.BlockStmt) - if !ok || len(block.List) > 0 || len(c.path) == 1 { - return false - } - - switch c.path[1].(type) { - case *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.SelectStmt: - return true - default: - return false - } -} - -// populateImportCompletions yields completions for an import path around the cursor. -// -// Completions are suggested at the directory depth of the given import path so -// that we don't overwhelm the user with a large list of possibilities. As an -// example, a completion for the prefix "golang" results in "golang.org/". -// Completions for "golang.org/" yield its subdirectories -// (i.e. "golang.org/x/"). The user is meant to accept completion suggestions -// until they reach a complete import path. -func (c *completer) populateImportCompletions(searchImport *ast.ImportSpec) error { - if !strings.HasPrefix(searchImport.Path.Value, `"`) { - return nil - } - - // deepSearch is not valuable for import completions. - c.deepState.enabled = false - - importPath := searchImport.Path.Value - - // Extract the text between the quotes (if any) in an import spec. - // prefix is the part of import path before the cursor. - prefixEnd := c.pos - searchImport.Path.Pos() - prefix := strings.Trim(importPath[:prefixEnd], `"`) - - // The number of directories in the import path gives us the depth at - // which to search. - depth := len(strings.Split(prefix, "/")) - 1 - - content := importPath - start, end := searchImport.Path.Pos(), searchImport.Path.End() - namePrefix, nameSuffix := `"`, `"` - // If a starting quote is present, adjust surrounding to either after the - // cursor or after the first slash (/), except if cursor is at the starting - // quote. Otherwise we provide a completion including the starting quote. - if strings.HasPrefix(importPath, `"`) && c.pos > searchImport.Path.Pos() { - content = content[1:] - start++ - if depth > 0 { - // Adjust textEdit start to replacement range. For ex: if current - // path was "golang.or/x/to<>ols/internal/", where <> is the cursor - // position, start of the replacement range would be after - // "golang.org/x/". - path := strings.SplitAfter(prefix, "/") - numChars := len(strings.Join(path[:len(path)-1], "")) - content = content[numChars:] - start += token.Pos(numChars) - } - namePrefix = "" - } - - // We won't provide an ending quote if one is already present, except if - // cursor is after the ending quote but still in import spec. This is - // because cursor has to be in our textEdit range. - if strings.HasSuffix(importPath, `"`) && c.pos < searchImport.Path.End() { - end-- - content = content[:len(content)-1] - nameSuffix = "" - } - - c.surrounding = &Selection{ - content: content, - cursor: c.pos, - tokFile: c.tokFile, - start: start, - end: end, - mapper: c.mapper, - } - - seenImports := make(map[string]struct{}) - for _, importSpec := range c.file.Imports { - if importSpec.Path.Value == importPath { - continue - } - seenImportPath, err := strconv.Unquote(importSpec.Path.Value) - if err != nil { - return err - } - seenImports[seenImportPath] = struct{}{} - } - - var mu sync.Mutex // guard c.items locally, since searchImports is called in parallel - seen := make(map[string]struct{}) - searchImports := func(pkg imports.ImportFix) { - path := pkg.StmtInfo.ImportPath - if _, ok := seenImports[path]; ok { - return - } - - // Any package path containing fewer directories than the search - // prefix is not a match. - pkgDirList := strings.Split(path, "/") - if len(pkgDirList) < depth+1 { - return - } - pkgToConsider := strings.Join(pkgDirList[:depth+1], "/") - - name := pkgDirList[depth] - // if we're adding an opening quote to completion too, set name to full - // package path since we'll need to overwrite that range. - if namePrefix == `"` { - name = pkgToConsider - } - - score := pkg.Relevance - if len(pkgDirList)-1 == depth { - score *= highScore - } else { - // For incomplete package paths, add a terminal slash to indicate that the - // user should keep triggering completions. - name += "/" - pkgToConsider += "/" - } - - if _, ok := seen[pkgToConsider]; ok { - return - } - seen[pkgToConsider] = struct{}{} - - mu.Lock() - defer mu.Unlock() - - name = namePrefix + name + nameSuffix - obj := types.NewPkgName(0, nil, name, types.NewPackage(pkgToConsider, name)) - c.deepState.enqueue(candidate{ - obj: obj, - detail: fmt.Sprintf("%q", pkgToConsider), - score: score, - }) - } - - c.completionCallbacks = append(c.completionCallbacks, func(ctx context.Context, opts *imports.Options) error { - return imports.GetImportPaths(ctx, searchImports, prefix, c.filename, c.pkg.GetTypes().Name(), opts.Env) - }) - return nil -} - -// populateCommentCompletions yields completions for comments preceding or in declarations. -func (c *completer) populateCommentCompletions(comment *ast.CommentGroup) { - // If the completion was triggered by a period, ignore it. These types of - // completions will not be useful in comments. - if c.completionContext.triggerCharacter == "." { - return - } - - // Using the comment position find the line after - file := c.pkg.FileSet().File(comment.End()) - if file == nil { - return - } - - // Deep completion doesn't work properly in comments since we don't - // have a type object to complete further. - c.deepState.enabled = false - c.completionContext.commentCompletion = true - - // Documentation isn't useful in comments, since it might end up being the - // comment itself. - c.opts.documentation = false - - commentLine := safetoken.Line(file, comment.End()) - - // comment is valid, set surrounding as word boundaries around cursor - c.setSurroundingForComment(comment) - - // Using the next line pos, grab and parse the exported symbol on that line - for _, n := range c.file.Decls { - declLine := safetoken.Line(file, n.Pos()) - // if the comment is not in, directly above or on the same line as a declaration - if declLine != commentLine && declLine != commentLine+1 && - !(n.Pos() <= comment.Pos() && comment.End() <= n.End()) { - continue - } - switch node := n.(type) { - // handle const, vars, and types - case *ast.GenDecl: - for _, spec := range node.Specs { - switch spec := spec.(type) { - case *ast.ValueSpec: - for _, name := range spec.Names { - if name.String() == "_" { - continue - } - obj := c.pkg.GetTypesInfo().ObjectOf(name) - c.deepState.enqueue(candidate{obj: obj, score: stdScore}) - } - case *ast.TypeSpec: - // add TypeSpec fields to completion - switch typeNode := spec.Type.(type) { - case *ast.StructType: - c.addFieldItems(typeNode.Fields) - case *ast.FuncType: - c.addFieldItems(typeNode.Params) - c.addFieldItems(typeNode.Results) - case *ast.InterfaceType: - c.addFieldItems(typeNode.Methods) - } - - if spec.Name.String() == "_" { - continue - } - - obj := c.pkg.GetTypesInfo().ObjectOf(spec.Name) - // Type name should get a higher score than fields but not highScore by default - // since field near a comment cursor gets a highScore - score := stdScore * 1.1 - // If type declaration is on the line after comment, give it a highScore. - if declLine == commentLine+1 { - score = highScore - } - - c.deepState.enqueue(candidate{obj: obj, score: score}) - } - } - // handle functions - case *ast.FuncDecl: - c.addFieldItems(node.Recv) - c.addFieldItems(node.Type.Params) - c.addFieldItems(node.Type.Results) - - // collect receiver struct fields - if node.Recv != nil { - for _, fields := range node.Recv.List { - for _, name := range fields.Names { - obj := c.pkg.GetTypesInfo().ObjectOf(name) - if obj == nil { - continue - } - - recvType := obj.Type().Underlying() - if ptr, ok := recvType.(*types.Pointer); ok { - recvType = ptr.Elem() - } - recvStruct, ok := recvType.Underlying().(*types.Struct) - if !ok { - continue - } - for i := 0; i < recvStruct.NumFields(); i++ { - field := recvStruct.Field(i) - c.deepState.enqueue(candidate{obj: field, score: lowScore}) - } - } - } - } - - if node.Name.String() == "_" { - continue - } - - obj := c.pkg.GetTypesInfo().ObjectOf(node.Name) - if obj == nil || obj.Pkg() != nil && obj.Pkg() != c.pkg.GetTypes() { - continue - } - - c.deepState.enqueue(candidate{obj: obj, score: highScore}) - } - } -} - -// sets word boundaries surrounding a cursor for a comment -func (c *completer) setSurroundingForComment(comments *ast.CommentGroup) { - var cursorComment *ast.Comment - for _, comment := range comments.List { - if c.pos >= comment.Pos() && c.pos <= comment.End() { - cursorComment = comment - break - } - } - // if cursor isn't in the comment - if cursorComment == nil { - return - } - - // index of cursor in comment text - cursorOffset := int(c.pos - cursorComment.Pos()) - start, end := cursorOffset, cursorOffset - for start > 0 && isValidIdentifierChar(cursorComment.Text[start-1]) { - start-- - } - for end < len(cursorComment.Text) && isValidIdentifierChar(cursorComment.Text[end]) { - end++ - } - - c.surrounding = &Selection{ - content: cursorComment.Text[start:end], - cursor: c.pos, - tokFile: c.tokFile, - start: token.Pos(int(cursorComment.Slash) + start), - end: token.Pos(int(cursorComment.Slash) + end), - mapper: c.mapper, - } - c.setMatcherFromPrefix(c.surrounding.Prefix()) -} - -// isValidIdentifierChar returns true if a byte is a valid go identifier -// character, i.e. unicode letter or digit or underscore. -func isValidIdentifierChar(char byte) bool { - charRune := rune(char) - return unicode.In(charRune, unicode.Letter, unicode.Digit) || char == '_' -} - -// adds struct fields, interface methods, function declaration fields to completion -func (c *completer) addFieldItems(fields *ast.FieldList) { - if fields == nil { - return - } - - cursor := c.surrounding.cursor - for _, field := range fields.List { - for _, name := range field.Names { - if name.String() == "_" { - continue - } - obj := c.pkg.GetTypesInfo().ObjectOf(name) - if obj == nil { - continue - } - - // if we're in a field comment/doc, score that field as more relevant - score := stdScore - if field.Comment != nil && field.Comment.Pos() <= cursor && cursor <= field.Comment.End() { - score = highScore - } else if field.Doc != nil && field.Doc.Pos() <= cursor && cursor <= field.Doc.End() { - score = highScore - } - - c.deepState.enqueue(candidate{obj: obj, score: score}) - } - } -} - -func (c *completer) wantStructFieldCompletions() bool { - clInfo := c.enclosingCompositeLiteral - if clInfo == nil { - return false - } - - return clInfo.isStruct() && (clInfo.inKey || clInfo.maybeInFieldName) -} - -func (c *completer) wantTypeName() bool { - return !c.completionContext.commentCompletion && c.inference.typeName.wantTypeName -} - -// See https://golang.org/issue/36001. Unimported completions are expensive. -const ( - maxUnimportedPackageNames = 5 - unimportedMemberTarget = 100 -) - -// selector finds completions for the specified selector expression. -func (c *completer) selector(ctx context.Context, sel *ast.SelectorExpr) error { - c.inference.objChain = objChain(c.pkg.GetTypesInfo(), sel.X) - - // True selector? - if tv, ok := c.pkg.GetTypesInfo().Types[sel.X]; ok { - c.methodsAndFields(tv.Type, tv.Addressable(), nil, c.deepState.enqueue) - c.addPostfixSnippetCandidates(ctx, sel) - return nil - } - - id, ok := sel.X.(*ast.Ident) - if !ok { - return nil - } - - // Treat sel as a qualified identifier. - var filter func(*metadata.Package) bool - needImport := false - if pkgName, ok := c.pkg.GetTypesInfo().Uses[id].(*types.PkgName); ok { - // Qualified identifier with import declaration. - imp := pkgName.Imported() - - // Known direct dependency? Expand using type information. - if _, ok := c.pkg.Metadata().DepsByPkgPath[golang.PackagePath(imp.Path())]; ok { - c.packageMembers(imp, stdScore, nil, c.deepState.enqueue) - return nil - } - - // Imported declaration with missing type information. - // Fall through to shallow completion of unimported package members. - // Match candidate packages by path. - filter = func(mp *metadata.Package) bool { - return strings.TrimPrefix(string(mp.PkgPath), "vendor/") == imp.Path() - } - } else { - // Qualified identifier without import declaration. - // Match candidate packages by name. - filter = func(mp *metadata.Package) bool { - return string(mp.Name) == id.Name - } - needImport = true - } - - // Search unimported packages. - if !c.opts.unimported { - return nil // feature disabled - } - - // The deep completion algorithm is exceedingly complex and - // deeply coupled to the now obsolete notions that all - // token.Pos values can be interpreted by as a single FileSet - // belonging to the Snapshot and that all types.Object values - // are canonicalized by a single types.Importer mapping. - // These invariants are no longer true now that gopls uses - // an incremental approach, parsing and type-checking each - // package separately. - // - // Consequently, completion of symbols defined in packages that - // are not currently imported by the query file cannot use the - // deep completion machinery which is based on type information. - // Instead it must use only syntax information from a quick - // parse of top-level declarations (but not function bodies). - // - // TODO(adonovan): rewrite the deep completion machinery to - // not assume global Pos/Object realms and then use export - // data instead of the quick parse approach taken here. - - // First, we search among packages in the forward transitive - // closure of the workspace. - // We'll use a fast parse to extract package members - // from those that match the name/path criterion. - all, err := c.snapshot.AllMetadata(ctx) - if err != nil { - return err - } - known := make(map[golang.PackagePath]*metadata.Package) - for _, mp := range all { - if mp.Name == "main" { - continue // not importable - } - if mp.IsIntermediateTestVariant() { - continue - } - // The only test variant we admit is "p [p.test]" - // when we are completing within "p_test [p.test]", - // as in that case we would like to offer completions - // of the test variants' additional symbols. - if mp.ForTest != "" && c.pkg.Metadata().PkgPath != mp.ForTest+"_test" { - continue - } - if !filter(mp) { - continue - } - // Prefer previous entry unless this one is its test variant. - if mp.ForTest != "" || known[mp.PkgPath] == nil { - known[mp.PkgPath] = mp - } - } - - paths := make([]string, 0, len(known)) - for path := range known { - paths = append(paths, string(path)) - } - - // Rank import paths as goimports would. - var relevances map[string]float64 - if len(paths) > 0 { - if err := c.snapshot.RunProcessEnvFunc(ctx, func(ctx context.Context, opts *imports.Options) error { - var err error - relevances, err = imports.ScoreImportPaths(ctx, opts.Env, paths) - return err - }); err != nil { - return err - } - sort.Slice(paths, func(i, j int) bool { - return relevances[paths[i]] > relevances[paths[j]] - }) - } - - // quickParse does a quick parse of a single file of package m, - // extracts exported package members and adds candidates to c.items. - // TODO(rfindley): synchronizing access to c here does not feel right. - // Consider adding a concurrency-safe API for completer. - var cMu sync.Mutex // guards c.items and c.matcher - var enough int32 // atomic bool - quickParse := func(uri protocol.DocumentURI, mp *metadata.Package) error { - if atomic.LoadInt32(&enough) != 0 { - return nil - } - - fh, err := c.snapshot.ReadFile(ctx, uri) - if err != nil { - return err - } - content, err := fh.Content() - if err != nil { - return err - } - path := string(mp.PkgPath) - forEachPackageMember(content, func(tok token.Token, id *ast.Ident, fn *ast.FuncDecl) { - if atomic.LoadInt32(&enough) != 0 { - return - } - - if !id.IsExported() { - return - } - - cMu.Lock() - score := c.matcher.Score(id.Name) - cMu.Unlock() - - if sel.Sel.Name != "_" && score == 0 { - return // not a match; avoid constructing the completion item below - } - - // The only detail is the kind and package: `var (from "example.com/foo")` - // TODO(adonovan): pretty-print FuncDecl.FuncType or TypeSpec.Type? - // TODO(adonovan): should this score consider the actual c.matcher.Score - // of the item? How does this compare with the deepState.enqueue path? - item := CompletionItem{ - Label: id.Name, - Detail: fmt.Sprintf("%s (from %q)", strings.ToLower(tok.String()), mp.PkgPath), - InsertText: id.Name, - Score: float64(score) * unimportedScore(relevances[path]), - } - switch tok { - case token.FUNC: - item.Kind = protocol.FunctionCompletion - case token.VAR: - item.Kind = protocol.VariableCompletion - case token.CONST: - item.Kind = protocol.ConstantCompletion - case token.TYPE: - // Without types, we can't distinguish Class from Interface. - item.Kind = protocol.ClassCompletion - } - - if needImport { - imp := &importInfo{importPath: path} - if imports.ImportPathToAssumedName(path) != string(mp.Name) { - imp.name = string(mp.Name) - } - item.AdditionalTextEdits, _ = c.importEdits(imp) - } - - // For functions, add a parameter snippet. - if fn != nil { - paramList := func(list *ast.FieldList) []string { - var params []string - if list != nil { - var cfg printer.Config // slight overkill - param := func(name string, typ ast.Expr) { - var buf strings.Builder - buf.WriteString(name) - buf.WriteByte(' ') - cfg.Fprint(&buf, token.NewFileSet(), typ) - params = append(params, buf.String()) - } - - for _, field := range list.List { - if field.Names != nil { - for _, name := range field.Names { - param(name.Name, field.Type) - } - } else { - param("_", field.Type) - } - } - } - return params - } - - // Ideally we would eliminate the suffix of type - // parameters that are redundant with inference - // from the argument types (#51783), but it's - // quite fiddly to do using syntax alone. - // (See inferableTypeParams in format.go.) - tparams := paramList(fn.Type.TypeParams) - params := paramList(fn.Type.Params) - var sn snippet.Builder - c.functionCallSnippet(id.Name, tparams, params, &sn) - item.snippet = &sn - } - - cMu.Lock() - c.items = append(c.items, item) - if len(c.items) >= unimportedMemberTarget { - atomic.StoreInt32(&enough, 1) - } - cMu.Unlock() - }) - return nil - } - - // Extract the package-level candidates using a quick parse. - var g errgroup.Group - for _, path := range paths { - mp := known[golang.PackagePath(path)] - for _, uri := range mp.CompiledGoFiles { - uri := uri - g.Go(func() error { - return quickParse(uri, mp) - }) - } - } - if err := g.Wait(); err != nil { - return err - } - - // In addition, we search in the module cache using goimports. - ctx, cancel := context.WithCancel(ctx) - var mu sync.Mutex - add := func(pkgExport imports.PackageExport) { - if ignoreUnimportedCompletion(pkgExport.Fix) { - return - } - - mu.Lock() - defer mu.Unlock() - // TODO(adonovan): what if the actual package has a vendor/ prefix? - if _, ok := known[golang.PackagePath(pkgExport.Fix.StmtInfo.ImportPath)]; ok { - return // We got this one above. - } - - // Continue with untyped proposals. - pkg := types.NewPackage(pkgExport.Fix.StmtInfo.ImportPath, pkgExport.Fix.IdentName) - for _, export := range pkgExport.Exports { - score := unimportedScore(pkgExport.Fix.Relevance) - c.deepState.enqueue(candidate{ - obj: types.NewVar(0, pkg, export, nil), - score: score, - imp: &importInfo{ - importPath: pkgExport.Fix.StmtInfo.ImportPath, - name: pkgExport.Fix.StmtInfo.Name, - }, - }) - } - if len(c.items) >= unimportedMemberTarget { - cancel() - } - } - - c.completionCallbacks = append(c.completionCallbacks, func(ctx context.Context, opts *imports.Options) error { - defer cancel() - return imports.GetPackageExports(ctx, add, id.Name, c.filename, c.pkg.GetTypes().Name(), opts.Env) - }) - return nil -} - -// unimportedScore returns a score for an unimported package that is generally -// lower than other candidates. -func unimportedScore(relevance float64) float64 { - return (stdScore + .1*relevance) / 2 -} - -func (c *completer) packageMembers(pkg *types.Package, score float64, imp *importInfo, cb func(candidate)) { - scope := pkg.Scope() - for _, name := range scope.Names() { - obj := scope.Lookup(name) - cb(candidate{ - obj: obj, - score: score, - imp: imp, - addressable: isVar(obj), - }) - } -} - -// ignoreUnimportedCompletion reports whether an unimported completion -// resulting in the given import should be ignored. -func ignoreUnimportedCompletion(fix *imports.ImportFix) bool { - // golang/go#60062: don't add unimported completion to golang.org/toolchain. - return fix != nil && strings.HasPrefix(fix.StmtInfo.ImportPath, "golang.org/toolchain") -} - -func (c *completer) methodsAndFields(typ types.Type, addressable bool, imp *importInfo, cb func(candidate)) { - mset := c.methodSetCache[methodSetKey{typ, addressable}] - if mset == nil { - if addressable && !types.IsInterface(typ) && !isPointer(typ) { - // Add methods of *T, which includes methods with receiver T. - mset = types.NewMethodSet(types.NewPointer(typ)) - } else { - // Add methods of T. - mset = types.NewMethodSet(typ) - } - c.methodSetCache[methodSetKey{typ, addressable}] = mset - } - - if isStarTestingDotF(typ) && addressable { - // is that a sufficient test? (or is more care needed?) - if c.fuzz(mset, imp, cb) { - return - } - } - - for i := 0; i < mset.Len(); i++ { - cb(candidate{ - obj: mset.At(i).Obj(), - score: stdScore, - imp: imp, - addressable: addressable || isPointer(typ), - }) - } - - // Add fields of T. - eachField(typ, func(v *types.Var) { - cb(candidate{ - obj: v, - score: stdScore - 0.01, - imp: imp, - addressable: addressable || isPointer(typ), - }) - }) -} - -// isStarTestingDotF reports whether typ is *testing.F. -func isStarTestingDotF(typ types.Type) bool { - ptr, _ := typ.(*types.Pointer) - if ptr == nil { - return false - } - named, _ := ptr.Elem().(*types.Named) - if named == nil { - return false - } - obj := named.Obj() - // obj.Pkg is nil for the error type. - return obj != nil && obj.Pkg() != nil && obj.Pkg().Path() == "testing" && obj.Name() == "F" -} - -// lexical finds completions in the lexical environment. -func (c *completer) lexical(ctx context.Context) error { - var ( - builtinIota = types.Universe.Lookup("iota") - builtinNil = types.Universe.Lookup("nil") - - // TODO(rfindley): only allow "comparable" where it is valid (in constraint - // position or embedded in interface declarations). - // builtinComparable = types.Universe.Lookup("comparable") - ) - - // Track seen variables to avoid showing completions for shadowed variables. - // This works since we look at scopes from innermost to outermost. - seen := make(map[string]struct{}) - - // Process scopes innermost first. - for i, scope := range c.scopes { - if scope == nil { - continue - } - - Names: - for _, name := range scope.Names() { - declScope, obj := scope.LookupParent(name, c.pos) - if declScope != scope { - continue // Name was declared in some enclosing scope, or not at all. - } - - // If obj's type is invalid, find the AST node that defines the lexical block - // containing the declaration of obj. Don't resolve types for packages. - if !isPkgName(obj) && !typeIsValid(obj.Type()) { - // Match the scope to its ast.Node. If the scope is the package scope, - // use the *ast.File as the starting node. - var node ast.Node - if i < len(c.path) { - node = c.path[i] - } else if i == len(c.path) { // use the *ast.File for package scope - node = c.path[i-1] - } - if node != nil { - if resolved := resolveInvalid(c.pkg.FileSet(), obj, node, c.pkg.GetTypesInfo()); resolved != nil { - obj = resolved - } - } - } - - // Don't use LHS of decl in RHS. - for _, ident := range enclosingDeclLHS(c.path) { - if obj.Pos() == ident.Pos() { - continue Names - } - } - - // Don't suggest "iota" outside of const decls. - if obj == builtinIota && !c.inConstDecl() { - continue - } - - // Rank outer scopes lower than inner. - score := stdScore * math.Pow(.99, float64(i)) - - // Dowrank "nil" a bit so it is ranked below more interesting candidates. - if obj == builtinNil { - score /= 2 - } - - // If we haven't already added a candidate for an object with this name. - if _, ok := seen[obj.Name()]; !ok { - seen[obj.Name()] = struct{}{} - c.deepState.enqueue(candidate{ - obj: obj, - score: score, - addressable: isVar(obj), - }) - } - } - } - - if c.inference.objType != nil { - if named, _ := golang.Deref(c.inference.objType).(*types.Named); named != nil { - // If we expected a named type, check the type's package for - // completion items. This is useful when the current file hasn't - // imported the type's package yet. - - if named.Obj() != nil && named.Obj().Pkg() != nil { - pkg := named.Obj().Pkg() - - // Make sure the package name isn't already in use by another - // object, and that this file doesn't import the package yet. - // TODO(adonovan): what if pkg.Path has vendor/ prefix? - if _, ok := seen[pkg.Name()]; !ok && pkg != c.pkg.GetTypes() && !alreadyImports(c.file, golang.ImportPath(pkg.Path())) { - seen[pkg.Name()] = struct{}{} - obj := types.NewPkgName(0, nil, pkg.Name(), pkg) - imp := &importInfo{ - importPath: pkg.Path(), - } - if imports.ImportPathToAssumedName(pkg.Path()) != pkg.Name() { - imp.name = pkg.Name() - } - c.deepState.enqueue(candidate{ - obj: obj, - score: stdScore, - imp: imp, - }) - } - } - } - } - - if c.opts.unimported { - if err := c.unimportedPackages(ctx, seen); err != nil { - return err - } - } - - if c.inference.typeName.isTypeParam { - // If we are completing a type param, offer each structural type. - // This ensures we suggest "[]int" and "[]float64" for a constraint - // with type union "[]int | []float64". - if t, _ := c.inference.objType.(*types.Interface); t != nil { - terms, _ := typeparams.InterfaceTermSet(t) - for _, term := range terms { - c.injectType(ctx, term.Type()) - } - } - } else { - c.injectType(ctx, c.inference.objType) - } - - // Add keyword completion items appropriate in the current context. - c.addKeywordCompletions() - - return nil -} - -// injectType manufactures candidates based on the given type. This is -// intended for types not discoverable via lexical search, such as -// composite and/or generic types. For example, if the type is "[]int", -// this method makes sure you get candidates "[]int{}" and "[]int" -// (the latter applies when completing a type name). -func (c *completer) injectType(ctx context.Context, t types.Type) { - if t == nil { - return - } - - t = golang.Deref(t) - - // If we have an expected type and it is _not_ a named type, handle - // it specially. Non-named types like "[]int" will never be - // considered via a lexical search, so we need to directly inject - // them. Also allow generic types since lexical search does not - // infer instantiated versions of them. - if named, _ := t.(*types.Named); named == nil || named.TypeParams().Len() > 0 { - // If our expected type is "[]int", this will add a literal - // candidate of "[]int{}". - c.literal(ctx, t, nil) - - if _, isBasic := t.(*types.Basic); !isBasic { - // If we expect a non-basic type name (e.g. "[]int"), hack up - // a named type whose name is literally "[]int". This allows - // us to reuse our object based completion machinery. - fakeNamedType := candidate{ - obj: types.NewTypeName(token.NoPos, nil, types.TypeString(t, c.qf), t), - score: stdScore, - } - // Make sure the type name matches before considering - // candidate. This cuts down on useless candidates. - if c.matchingTypeName(&fakeNamedType) { - c.deepState.enqueue(fakeNamedType) - } - } - } -} - -func (c *completer) unimportedPackages(ctx context.Context, seen map[string]struct{}) error { - var prefix string - if c.surrounding != nil { - prefix = c.surrounding.Prefix() - } - - // Don't suggest unimported packages if we have absolutely nothing - // to go on. - if prefix == "" { - return nil - } - - count := 0 - - // Search the forward transitive closure of the workspace. - all, err := c.snapshot.AllMetadata(ctx) - if err != nil { - return err - } - pkgNameByPath := make(map[golang.PackagePath]string) - var paths []string // actually PackagePaths - for _, mp := range all { - if mp.ForTest != "" { - continue // skip all test variants - } - if mp.Name == "main" { - continue // main is non-importable - } - if !strings.HasPrefix(string(mp.Name), prefix) { - continue // not a match - } - paths = append(paths, string(mp.PkgPath)) - pkgNameByPath[mp.PkgPath] = string(mp.Name) - } - - // Rank candidates using goimports' algorithm. - var relevances map[string]float64 - if len(paths) != 0 { - if err := c.snapshot.RunProcessEnvFunc(ctx, func(ctx context.Context, opts *imports.Options) error { - var err error - relevances, err = imports.ScoreImportPaths(ctx, opts.Env, paths) - return err - }); err != nil { - return err - } - } - sort.Slice(paths, func(i, j int) bool { - if relevances[paths[i]] != relevances[paths[j]] { - return relevances[paths[i]] > relevances[paths[j]] - } - - // Fall back to lexical sort to keep truncated set of candidates - // in a consistent order. - return paths[i] < paths[j] - }) - - for _, path := range paths { - name := pkgNameByPath[golang.PackagePath(path)] - if _, ok := seen[name]; ok { - continue - } - imp := &importInfo{ - importPath: path, - } - if imports.ImportPathToAssumedName(path) != name { - imp.name = name - } - if count >= maxUnimportedPackageNames { - return nil - } - c.deepState.enqueue(candidate{ - // Pass an empty *types.Package to disable deep completions. - obj: types.NewPkgName(0, nil, name, types.NewPackage(path, name)), - score: unimportedScore(relevances[path]), - imp: imp, - }) - count++ - } - - var mu sync.Mutex - add := func(pkg imports.ImportFix) { - if ignoreUnimportedCompletion(&pkg) { - return - } - mu.Lock() - defer mu.Unlock() - if _, ok := seen[pkg.IdentName]; ok { - return - } - if _, ok := relevances[pkg.StmtInfo.ImportPath]; ok { - return - } - - if count >= maxUnimportedPackageNames { - return - } - - // Do not add the unimported packages to seen, since we can have - // multiple packages of the same name as completion suggestions, since - // only one will be chosen. - obj := types.NewPkgName(0, nil, pkg.IdentName, types.NewPackage(pkg.StmtInfo.ImportPath, pkg.IdentName)) - c.deepState.enqueue(candidate{ - obj: obj, - score: unimportedScore(pkg.Relevance), - imp: &importInfo{ - importPath: pkg.StmtInfo.ImportPath, - name: pkg.StmtInfo.Name, - }, - }) - count++ - } - - c.completionCallbacks = append(c.completionCallbacks, func(ctx context.Context, opts *imports.Options) error { - return imports.GetAllCandidates(ctx, add, prefix, c.filename, c.pkg.GetTypes().Name(), opts.Env) - }) - - return nil -} - -// alreadyImports reports whether f has an import with the specified path. -func alreadyImports(f *ast.File, path golang.ImportPath) bool { - for _, s := range f.Imports { - if metadata.UnquoteImportPath(s) == path { - return true - } - } - return false -} - -func (c *completer) inConstDecl() bool { - for _, n := range c.path { - if decl, ok := n.(*ast.GenDecl); ok && decl.Tok == token.CONST { - return true - } - } - return false -} - -// structLiteralFieldName finds completions for struct field names inside a struct literal. -func (c *completer) structLiteralFieldName(ctx context.Context) error { - clInfo := c.enclosingCompositeLiteral - - // Mark fields of the composite literal that have already been set, - // except for the current field. - addedFields := make(map[*types.Var]bool) - for _, el := range clInfo.cl.Elts { - if kvExpr, ok := el.(*ast.KeyValueExpr); ok { - if clInfo.kv == kvExpr { - continue - } - - if key, ok := kvExpr.Key.(*ast.Ident); ok { - if used, ok := c.pkg.GetTypesInfo().Uses[key]; ok { - if usedVar, ok := used.(*types.Var); ok { - addedFields[usedVar] = true - } - } - } - } - } - - deltaScore := 0.0001 - switch t := clInfo.clType.(type) { - case *types.Struct: - for i := 0; i < t.NumFields(); i++ { - field := t.Field(i) - if !addedFields[field] { - c.deepState.enqueue(candidate{ - obj: field, - score: highScore - float64(i)*deltaScore, - }) - } - } - - // Add lexical completions if we aren't certain we are in the key part of a - // key-value pair. - if clInfo.maybeInFieldName { - return c.lexical(ctx) - } - default: - return c.lexical(ctx) - } - - return nil -} - -func (cl *compLitInfo) isStruct() bool { - _, ok := cl.clType.(*types.Struct) - return ok -} - -// enclosingCompositeLiteral returns information about the composite literal enclosing the -// position. -func enclosingCompositeLiteral(path []ast.Node, pos token.Pos, info *types.Info) *compLitInfo { - for _, n := range path { - switch n := n.(type) { - case *ast.CompositeLit: - // The enclosing node will be a composite literal if the user has just - // opened the curly brace (e.g. &x{<>) or the completion request is triggered - // from an already completed composite literal expression (e.g. &x{foo: 1, <>}) - // - // The position is not part of the composite literal unless it falls within the - // curly braces (e.g. "foo.Foo<>Struct{}"). - if !(n.Lbrace < pos && pos <= n.Rbrace) { - // Keep searching since we may yet be inside a composite literal. - // For example "Foo{B: Ba<>{}}". - break - } - - tv, ok := info.Types[n] - if !ok { - return nil - } - - clInfo := compLitInfo{ - cl: n, - clType: golang.Deref(tv.Type).Underlying(), - } - - var ( - expr ast.Expr - hasKeys bool - ) - for _, el := range n.Elts { - // Remember the expression that the position falls in, if any. - if el.Pos() <= pos && pos <= el.End() { - expr = el - } - - if kv, ok := el.(*ast.KeyValueExpr); ok { - hasKeys = true - // If expr == el then we know the position falls in this expression, - // so also record kv as the enclosing *ast.KeyValueExpr. - if expr == el { - clInfo.kv = kv - break - } - } - } - - if clInfo.kv != nil { - // If in a *ast.KeyValueExpr, we know we are in the key if the position - // is to the left of the colon (e.g. "Foo{F<>: V}". - clInfo.inKey = pos <= clInfo.kv.Colon - } else if hasKeys { - // If we aren't in a *ast.KeyValueExpr but the composite literal has - // other *ast.KeyValueExprs, we must be on the key side of a new - // *ast.KeyValueExpr (e.g. "Foo{F: V, <>}"). - clInfo.inKey = true - } else { - switch clInfo.clType.(type) { - case *types.Struct: - if len(n.Elts) == 0 { - // If the struct literal is empty, next could be a struct field - // name or an expression (e.g. "Foo{<>}" could become "Foo{F:}" - // or "Foo{someVar}"). - clInfo.maybeInFieldName = true - } else if len(n.Elts) == 1 { - // If there is one expression and the position is in that expression - // and the expression is an identifier, we may be writing a field - // name or an expression (e.g. "Foo{F<>}"). - _, clInfo.maybeInFieldName = expr.(*ast.Ident) - } - case *types.Map: - // If we aren't in a *ast.KeyValueExpr we must be adding a new key - // to the map. - clInfo.inKey = true - } - } - - return &clInfo - default: - if breaksExpectedTypeInference(n, pos) { - return nil - } - } - } - - return nil -} - -// enclosingFunction returns the signature and body of the function -// enclosing the given position. -func enclosingFunction(path []ast.Node, info *types.Info) *funcInfo { - for _, node := range path { - switch t := node.(type) { - case *ast.FuncDecl: - if obj, ok := info.Defs[t.Name]; ok { - return &funcInfo{ - sig: obj.Type().(*types.Signature), - body: t.Body, - } - } - case *ast.FuncLit: - if typ, ok := info.Types[t]; ok { - if sig, _ := typ.Type.(*types.Signature); sig == nil { - // golang/go#49397: it should not be possible, but we somehow arrived - // here with a non-signature type, most likely due to AST mangling - // such that node.Type is not a FuncType. - return nil - } - return &funcInfo{ - sig: typ.Type.(*types.Signature), - body: t.Body, - } - } - } - } - return nil -} - -func (c *completer) expectedCompositeLiteralType() types.Type { - clInfo := c.enclosingCompositeLiteral - switch t := clInfo.clType.(type) { - case *types.Slice: - if clInfo.inKey { - return types.Typ[types.UntypedInt] - } - return t.Elem() - case *types.Array: - if clInfo.inKey { - return types.Typ[types.UntypedInt] - } - return t.Elem() - case *types.Map: - if clInfo.inKey { - return t.Key() - } - return t.Elem() - case *types.Struct: - // If we are completing a key (i.e. field name), there is no expected type. - if clInfo.inKey { - return nil - } - - // If we are in a key-value pair, but not in the key, then we must be on the - // value side. The expected type of the value will be determined from the key. - if clInfo.kv != nil { - if key, ok := clInfo.kv.Key.(*ast.Ident); ok { - for i := 0; i < t.NumFields(); i++ { - if field := t.Field(i); field.Name() == key.Name { - return field.Type() - } - } - } - } else { - // If we aren't in a key-value pair and aren't in the key, we must be using - // implicit field names. - - // The order of the literal fields must match the order in the struct definition. - // Find the element that the position belongs to and suggest that field's type. - if i := exprAtPos(c.pos, clInfo.cl.Elts); i < t.NumFields() { - return t.Field(i).Type() - } - } - } - return nil -} - -// typeMod represents an operator that changes the expected type. -type typeMod struct { - mod typeModKind - arrayLen int64 -} - -type typeModKind int - -const ( - dereference typeModKind = iota // pointer indirection: "*" - reference // adds level of pointer: "&" for values, "*" for type names - chanRead // channel read operator: "<-" - sliceType // make a slice type: "[]" in "[]int" - arrayType // make an array type: "[2]" in "[2]int" - invoke // make a function call: "()" in "foo()" - takeSlice // take slice of array: "[:]" in "foo[:]" - takeDotDotDot // turn slice into variadic args: "..." in "foo..." - index // index into slice/array: "[0]" in "foo[0]" -) - -type objKind int - -const ( - kindAny objKind = 0 - kindArray objKind = 1 << iota - kindSlice - kindChan - kindMap - kindStruct - kindString - kindInt - kindBool - kindBytes - kindPtr - kindFloat - kindComplex - kindError - kindStringer - kindFunc -) - -// penalizedObj represents an object that should be disfavored as a -// completion candidate. -type penalizedObj struct { - // objChain is the full "chain", e.g. "foo.bar().baz" becomes - // []types.Object{foo, bar, baz}. - objChain []types.Object - // penalty is score penalty in the range (0, 1). - penalty float64 -} - -// candidateInference holds information we have inferred about a type that can be -// used at the current position. -type candidateInference struct { - // objType is the desired type of an object used at the query position. - objType types.Type - - // objKind is a mask of expected kinds of types such as "map", "slice", etc. - objKind objKind - - // variadic is true if we are completing the initial variadic - // parameter. For example: - // append([]T{}, <>) // objType=T variadic=true - // append([]T{}, T{}, <>) // objType=T variadic=false - variadic bool - - // modifiers are prefixes such as "*", "&" or "<-" that influence how - // a candidate type relates to the expected type. - modifiers []typeMod - - // convertibleTo is a type our candidate type must be convertible to. - convertibleTo types.Type - - // typeName holds information about the expected type name at - // position, if any. - typeName typeNameInference - - // assignees are the types that would receive a function call's - // results at the position. For example: - // - // foo := 123 - // foo, bar := <> - // - // at "<>", the assignees are [int, ]. - assignees []types.Type - - // variadicAssignees is true if we could be completing an inner - // function call that fills out an outer function call's variadic - // params. For example: - // - // func foo(int, ...string) {} - // - // foo(<>) // variadicAssignees=true - // foo(bar<>) // variadicAssignees=true - // foo(bar, baz<>) // variadicAssignees=false - variadicAssignees bool - - // penalized holds expressions that should be disfavored as - // candidates. For example, it tracks expressions already used in a - // switch statement's other cases. Each expression is tracked using - // its entire object "chain" allowing differentiation between - // "a.foo" and "b.foo" when "a" and "b" are the same type. - penalized []penalizedObj - - // objChain contains the chain of objects representing the - // surrounding *ast.SelectorExpr. For example, if we are completing - // "foo.bar.ba<>", objChain will contain []types.Object{foo, bar}. - objChain []types.Object -} - -// typeNameInference holds information about the expected type name at -// position. -type typeNameInference struct { - // wantTypeName is true if we expect the name of a type. - wantTypeName bool - - // modifiers are prefixes such as "*", "&" or "<-" that influence how - // a candidate type relates to the expected type. - modifiers []typeMod - - // assertableFrom is a type that must be assertable to our candidate type. - assertableFrom types.Type - - // wantComparable is true if we want a comparable type. - wantComparable bool - - // seenTypeSwitchCases tracks types that have already been used by - // the containing type switch. - seenTypeSwitchCases []types.Type - - // compLitType is true if we are completing a composite literal type - // name, e.g "foo<>{}". - compLitType bool - - // isTypeParam is true if we are completing a type instantiation parameter - isTypeParam bool -} - -// expectedCandidate returns information about the expected candidate -// for an expression at the query position. -func expectedCandidate(ctx context.Context, c *completer) (inf candidateInference) { - inf.typeName = expectTypeName(c) - - if c.enclosingCompositeLiteral != nil { - inf.objType = c.expectedCompositeLiteralType() - } - -Nodes: - for i, node := range c.path { - switch node := node.(type) { - case *ast.BinaryExpr: - // Determine if query position comes from left or right of op. - e := node.X - if c.pos < node.OpPos { - e = node.Y - } - if tv, ok := c.pkg.GetTypesInfo().Types[e]; ok { - switch node.Op { - case token.LAND, token.LOR: - // Don't infer "bool" type for "&&" or "||". Often you want - // to compose a boolean expression from non-boolean - // candidates. - default: - inf.objType = tv.Type - } - break Nodes - } - case *ast.AssignStmt: - // Only rank completions if you are on the right side of the token. - if c.pos > node.TokPos { - i := exprAtPos(c.pos, node.Rhs) - if i >= len(node.Lhs) { - i = len(node.Lhs) - 1 - } - if tv, ok := c.pkg.GetTypesInfo().Types[node.Lhs[i]]; ok { - inf.objType = tv.Type - } - - // If we have a single expression on the RHS, record the LHS - // assignees so we can favor multi-return function calls with - // matching result values. - if len(node.Rhs) <= 1 { - for _, lhs := range node.Lhs { - inf.assignees = append(inf.assignees, c.pkg.GetTypesInfo().TypeOf(lhs)) - } - } else { - // Otherwise, record our single assignee, even if its type is - // not available. We use this info to downrank functions - // with the wrong number of result values. - inf.assignees = append(inf.assignees, c.pkg.GetTypesInfo().TypeOf(node.Lhs[i])) - } - } - return inf - case *ast.ValueSpec: - if node.Type != nil && c.pos > node.Type.End() { - inf.objType = c.pkg.GetTypesInfo().TypeOf(node.Type) - } - return inf - case *ast.CallExpr: - // Only consider CallExpr args if position falls between parens. - if node.Lparen < c.pos && c.pos <= node.Rparen { - // For type conversions like "int64(foo)" we can only infer our - // desired type is convertible to int64. - if typ := typeConversion(node, c.pkg.GetTypesInfo()); typ != nil { - inf.convertibleTo = typ - break Nodes - } - - sig, _ := c.pkg.GetTypesInfo().Types[node.Fun].Type.(*types.Signature) - - if sig != nil && sig.TypeParams().Len() > 0 { - // If we are completing a generic func call, re-check the call expression. - // This allows type param inference to work in cases like: - // - // func foo[T any](T) {} - // foo[int](<>) // <- get "int" completions instead of "T" - // - // TODO: remove this after https://go.dev/issue/52503 - info := &types.Info{Types: make(map[ast.Expr]types.TypeAndValue)} - types.CheckExpr(c.pkg.FileSet(), c.pkg.GetTypes(), node.Fun.Pos(), node.Fun, info) - sig, _ = info.Types[node.Fun].Type.(*types.Signature) - } - - if sig != nil { - inf = c.expectedCallParamType(inf, node, sig) - } - - if funIdent, ok := node.Fun.(*ast.Ident); ok { - obj := c.pkg.GetTypesInfo().ObjectOf(funIdent) - - if obj != nil && obj.Parent() == types.Universe { - // Defer call to builtinArgType so we can provide it the - // inferred type from its parent node. - defer func() { - inf = c.builtinArgType(obj, node, inf) - inf.objKind = c.builtinArgKind(ctx, obj, node) - }() - - // The expected type of builtin arguments like append() is - // the expected type of the builtin call itself. For - // example: - // - // var foo []int = append(<>) - // - // To find the expected type at <> we "skip" the append() - // node and get the expected type one level up, which is - // []int. - continue Nodes - } - } - - return inf - } - case *ast.ReturnStmt: - if c.enclosingFunc != nil { - sig := c.enclosingFunc.sig - // Find signature result that corresponds to our return statement. - if resultIdx := exprAtPos(c.pos, node.Results); resultIdx < len(node.Results) { - if resultIdx < sig.Results().Len() { - inf.objType = sig.Results().At(resultIdx).Type() - } - } - } - return inf - case *ast.CaseClause: - if swtch, ok := findSwitchStmt(c.path[i+1:], c.pos, node).(*ast.SwitchStmt); ok { - if tv, ok := c.pkg.GetTypesInfo().Types[swtch.Tag]; ok { - inf.objType = tv.Type - - // Record which objects have already been used in the case - // statements so we don't suggest them again. - for _, cc := range swtch.Body.List { - for _, caseExpr := range cc.(*ast.CaseClause).List { - // Don't record the expression we are currently completing. - if caseExpr.Pos() < c.pos && c.pos <= caseExpr.End() { - continue - } - - if objs := objChain(c.pkg.GetTypesInfo(), caseExpr); len(objs) > 0 { - inf.penalized = append(inf.penalized, penalizedObj{objChain: objs, penalty: 0.1}) - } - } - } - } - } - return inf - case *ast.SliceExpr: - // Make sure position falls within the brackets (e.g. "foo[a:<>]"). - if node.Lbrack < c.pos && c.pos <= node.Rbrack { - inf.objType = types.Typ[types.UntypedInt] - } - return inf - case *ast.IndexExpr: - // Make sure position falls within the brackets (e.g. "foo[<>]"). - if node.Lbrack < c.pos && c.pos <= node.Rbrack { - if tv, ok := c.pkg.GetTypesInfo().Types[node.X]; ok { - switch t := tv.Type.Underlying().(type) { - case *types.Map: - inf.objType = t.Key() - case *types.Slice, *types.Array: - inf.objType = types.Typ[types.UntypedInt] - } - - if ct := expectedConstraint(tv.Type, 0); ct != nil { - inf.objType = ct - inf.typeName.wantTypeName = true - inf.typeName.isTypeParam = true - } - } - } - return inf - case *ast.IndexListExpr: - if node.Lbrack < c.pos && c.pos <= node.Rbrack { - if tv, ok := c.pkg.GetTypesInfo().Types[node.X]; ok { - if ct := expectedConstraint(tv.Type, exprAtPos(c.pos, node.Indices)); ct != nil { - inf.objType = ct - inf.typeName.wantTypeName = true - inf.typeName.isTypeParam = true - } - } - } - return inf - case *ast.SendStmt: - // Make sure we are on right side of arrow (e.g. "foo <- <>"). - if c.pos > node.Arrow+1 { - if tv, ok := c.pkg.GetTypesInfo().Types[node.Chan]; ok { - if ch, ok := tv.Type.Underlying().(*types.Chan); ok { - inf.objType = ch.Elem() - } - } - } - return inf - case *ast.RangeStmt: - if goplsastutil.NodeContains(node.X, c.pos) { - inf.objKind |= kindSlice | kindArray | kindMap | kindString - if node.Value == nil { - inf.objKind |= kindChan - } - } - return inf - case *ast.StarExpr: - inf.modifiers = append(inf.modifiers, typeMod{mod: dereference}) - case *ast.UnaryExpr: - switch node.Op { - case token.AND: - inf.modifiers = append(inf.modifiers, typeMod{mod: reference}) - case token.ARROW: - inf.modifiers = append(inf.modifiers, typeMod{mod: chanRead}) - } - case *ast.DeferStmt, *ast.GoStmt: - inf.objKind |= kindFunc - return inf - default: - if breaksExpectedTypeInference(node, c.pos) { - return inf - } - } - } - - return inf -} - -func (c *completer) expectedCallParamType(inf candidateInference, node *ast.CallExpr, sig *types.Signature) candidateInference { - numParams := sig.Params().Len() - if numParams == 0 { - return inf - } - - exprIdx := exprAtPos(c.pos, node.Args) - - // If we have one or zero arg expressions, we may be - // completing to a function call that returns multiple - // values, in turn getting passed in to the surrounding - // call. Record the assignees so we can favor function - // calls that return matching values. - if len(node.Args) <= 1 && exprIdx == 0 { - for i := 0; i < sig.Params().Len(); i++ { - inf.assignees = append(inf.assignees, sig.Params().At(i).Type()) - } - - // Record that we may be completing into variadic parameters. - inf.variadicAssignees = sig.Variadic() - } - - // Make sure not to run past the end of expected parameters. - if exprIdx >= numParams { - inf.objType = sig.Params().At(numParams - 1).Type() - } else { - inf.objType = sig.Params().At(exprIdx).Type() - } - - if sig.Variadic() && exprIdx >= (numParams-1) { - // If we are completing a variadic param, deslice the variadic type. - inf.objType = deslice(inf.objType) - // Record whether we are completing the initial variadic param. - inf.variadic = exprIdx == numParams-1 && len(node.Args) <= numParams - - // Check if we can infer object kind from printf verb. - inf.objKind |= printfArgKind(c.pkg.GetTypesInfo(), node, exprIdx) - } - - // If our expected type is an uninstantiated generic type param, - // swap to the constraint which will do a decent job filtering - // candidates. - if tp, _ := inf.objType.(*types.TypeParam); tp != nil { - inf.objType = tp.Constraint() - } - - return inf -} - -func expectedConstraint(t types.Type, idx int) types.Type { - var tp *types.TypeParamList - if named, _ := t.(*types.Named); named != nil { - tp = named.TypeParams() - } else if sig, _ := t.Underlying().(*types.Signature); sig != nil { - tp = sig.TypeParams() - } - if tp == nil || idx >= tp.Len() { - return nil - } - return tp.At(idx).Constraint() -} - -// objChain decomposes e into a chain of objects if possible. For -// example, "foo.bar().baz" will yield []types.Object{foo, bar, baz}. -// If any part can't be turned into an object, return nil. -func objChain(info *types.Info, e ast.Expr) []types.Object { - var objs []types.Object - - for e != nil { - switch n := e.(type) { - case *ast.Ident: - obj := info.ObjectOf(n) - if obj == nil { - return nil - } - objs = append(objs, obj) - e = nil - case *ast.SelectorExpr: - obj := info.ObjectOf(n.Sel) - if obj == nil { - return nil - } - objs = append(objs, obj) - e = n.X - case *ast.CallExpr: - if len(n.Args) > 0 { - return nil - } - e = n.Fun - default: - return nil - } - } - - // Reverse order so the layout matches the syntactic order. - for i := 0; i < len(objs)/2; i++ { - objs[i], objs[len(objs)-1-i] = objs[len(objs)-1-i], objs[i] - } - - return objs -} - -// applyTypeModifiers applies the list of type modifiers to a type. -// It returns nil if the modifiers could not be applied. -func (ci candidateInference) applyTypeModifiers(typ types.Type, addressable bool) types.Type { - for _, mod := range ci.modifiers { - switch mod.mod { - case dereference: - // For every "*" indirection operator, remove a pointer layer - // from candidate type. - if ptr, ok := typ.Underlying().(*types.Pointer); ok { - typ = ptr.Elem() - } else { - return nil - } - case reference: - // For every "&" address operator, add another pointer layer to - // candidate type, if the candidate is addressable. - if addressable { - typ = types.NewPointer(typ) - } else { - return nil - } - case chanRead: - // For every "<-" operator, remove a layer of channelness. - if ch, ok := typ.(*types.Chan); ok { - typ = ch.Elem() - } else { - return nil - } - } - } - - return typ -} - -// applyTypeNameModifiers applies the list of type modifiers to a type name. -func (ci candidateInference) applyTypeNameModifiers(typ types.Type) types.Type { - for _, mod := range ci.typeName.modifiers { - switch mod.mod { - case reference: - typ = types.NewPointer(typ) - case arrayType: - typ = types.NewArray(typ, mod.arrayLen) - case sliceType: - typ = types.NewSlice(typ) - } - } - return typ -} - -// matchesVariadic returns true if we are completing a variadic -// parameter and candType is a compatible slice type. -func (ci candidateInference) matchesVariadic(candType types.Type) bool { - return ci.variadic && ci.objType != nil && assignableTo(candType, types.NewSlice(ci.objType)) -} - -// findSwitchStmt returns an *ast.CaseClause's corresponding *ast.SwitchStmt or -// *ast.TypeSwitchStmt. path should start from the case clause's first ancestor. -func findSwitchStmt(path []ast.Node, pos token.Pos, c *ast.CaseClause) ast.Stmt { - // Make sure position falls within a "case <>:" clause. - if exprAtPos(pos, c.List) >= len(c.List) { - return nil - } - // A case clause is always nested within a block statement in a switch statement. - if len(path) < 2 { - return nil - } - if _, ok := path[0].(*ast.BlockStmt); !ok { - return nil - } - switch s := path[1].(type) { - case *ast.SwitchStmt: - return s - case *ast.TypeSwitchStmt: - return s - default: - return nil - } -} - -// breaksExpectedTypeInference reports if an expression node's type is unrelated -// to its child expression node types. For example, "Foo{Bar: x.Baz(<>)}" should -// expect a function argument, not a composite literal value. -func breaksExpectedTypeInference(n ast.Node, pos token.Pos) bool { - switch n := n.(type) { - case *ast.CompositeLit: - // Doesn't break inference if pos is in type name. - // For example: "Foo<>{Bar: 123}" - return n.Type == nil || !goplsastutil.NodeContains(n.Type, pos) - case *ast.CallExpr: - // Doesn't break inference if pos is in func name. - // For example: "Foo<>(123)" - return !goplsastutil.NodeContains(n.Fun, pos) - case *ast.FuncLit, *ast.IndexExpr, *ast.SliceExpr: - return true - default: - return false - } -} - -// expectTypeName returns information about the expected type name at position. -func expectTypeName(c *completer) typeNameInference { - var inf typeNameInference - -Nodes: - for i, p := range c.path { - switch n := p.(type) { - case *ast.FieldList: - // Expect a type name if pos is in a FieldList. This applies to - // FuncType params/results, FuncDecl receiver, StructType, and - // InterfaceType. We don't need to worry about the field name - // because completion bails out early if pos is in an *ast.Ident - // that defines an object. - inf.wantTypeName = true - break Nodes - case *ast.CaseClause: - // Expect type names in type switch case clauses. - if swtch, ok := findSwitchStmt(c.path[i+1:], c.pos, n).(*ast.TypeSwitchStmt); ok { - // The case clause types must be assertable from the type switch parameter. - ast.Inspect(swtch.Assign, func(n ast.Node) bool { - if ta, ok := n.(*ast.TypeAssertExpr); ok { - inf.assertableFrom = c.pkg.GetTypesInfo().TypeOf(ta.X) - return false - } - return true - }) - inf.wantTypeName = true - - // Track the types that have already been used in this - // switch's case statements so we don't recommend them. - for _, e := range swtch.Body.List { - for _, typeExpr := range e.(*ast.CaseClause).List { - // Skip if type expression contains pos. We don't want to - // count it as already used if the user is completing it. - if typeExpr.Pos() < c.pos && c.pos <= typeExpr.End() { - continue - } - - if t := c.pkg.GetTypesInfo().TypeOf(typeExpr); t != nil { - inf.seenTypeSwitchCases = append(inf.seenTypeSwitchCases, t) - } - } - } - - break Nodes - } - return typeNameInference{} - case *ast.TypeAssertExpr: - // Expect type names in type assert expressions. - if n.Lparen < c.pos && c.pos <= n.Rparen { - // The type in parens must be assertable from the expression type. - inf.assertableFrom = c.pkg.GetTypesInfo().TypeOf(n.X) - inf.wantTypeName = true - break Nodes - } - return typeNameInference{} - case *ast.StarExpr: - inf.modifiers = append(inf.modifiers, typeMod{mod: reference}) - case *ast.CompositeLit: - // We want a type name if position is in the "Type" part of a - // composite literal (e.g. "Foo<>{}"). - if n.Type != nil && n.Type.Pos() <= c.pos && c.pos <= n.Type.End() { - inf.wantTypeName = true - inf.compLitType = true - - if i < len(c.path)-1 { - // Track preceding "&" operator. Technically it applies to - // the composite literal and not the type name, but if - // affects our type completion nonetheless. - if u, ok := c.path[i+1].(*ast.UnaryExpr); ok && u.Op == token.AND { - inf.modifiers = append(inf.modifiers, typeMod{mod: reference}) - } - } - } - break Nodes - case *ast.ArrayType: - // If we are inside the "Elt" part of an array type, we want a type name. - if n.Elt.Pos() <= c.pos && c.pos <= n.Elt.End() { - inf.wantTypeName = true - if n.Len == nil { - // No "Len" expression means a slice type. - inf.modifiers = append(inf.modifiers, typeMod{mod: sliceType}) - } else { - // Try to get the array type using the constant value of "Len". - tv, ok := c.pkg.GetTypesInfo().Types[n.Len] - if ok && tv.Value != nil && tv.Value.Kind() == constant.Int { - if arrayLen, ok := constant.Int64Val(tv.Value); ok { - inf.modifiers = append(inf.modifiers, typeMod{mod: arrayType, arrayLen: arrayLen}) - } - } - } - - // ArrayTypes can be nested, so keep going if our parent is an - // ArrayType. - if i < len(c.path)-1 { - if _, ok := c.path[i+1].(*ast.ArrayType); ok { - continue Nodes - } - } - - break Nodes - } - case *ast.MapType: - inf.wantTypeName = true - if n.Key != nil { - inf.wantComparable = goplsastutil.NodeContains(n.Key, c.pos) - } else { - // If the key is empty, assume we are completing the key if - // pos is directly after the "map[". - inf.wantComparable = c.pos == n.Pos()+token.Pos(len("map[")) - } - break Nodes - case *ast.ValueSpec: - inf.wantTypeName = n.Type != nil && goplsastutil.NodeContains(n.Type, c.pos) - break Nodes - case *ast.TypeSpec: - inf.wantTypeName = goplsastutil.NodeContains(n.Type, c.pos) - default: - if breaksExpectedTypeInference(p, c.pos) { - return typeNameInference{} - } - } - } - - return inf -} - -func (c *completer) fakeObj(T types.Type) *types.Var { - return types.NewVar(token.NoPos, c.pkg.GetTypes(), "", T) -} - -// derivableTypes iterates types you can derive from t. For example, -// from "foo" we might derive "&foo", and "foo()". -func derivableTypes(t types.Type, addressable bool, f func(t types.Type, addressable bool, mod typeModKind) bool) bool { - switch t := t.Underlying().(type) { - case *types.Signature: - // If t is a func type with a single result, offer the result type. - if t.Results().Len() == 1 && f(t.Results().At(0).Type(), false, invoke) { - return true - } - case *types.Array: - if f(t.Elem(), true, index) { - return true - } - // Try converting array to slice. - if f(types.NewSlice(t.Elem()), false, takeSlice) { - return true - } - case *types.Pointer: - if f(t.Elem(), false, dereference) { - return true - } - case *types.Slice: - if f(t.Elem(), true, index) { - return true - } - case *types.Map: - if f(t.Elem(), false, index) { - return true - } - case *types.Chan: - if f(t.Elem(), false, chanRead) { - return true - } - } - - // Check if c is addressable and a pointer to c matches our type inference. - if addressable && f(types.NewPointer(t), false, reference) { - return true - } - - return false -} - -// anyCandType reports whether f returns true for any candidate type -// derivable from c. It searches up to three levels of type -// modification. For example, given "foo" we could discover "***foo" -// or "*foo()". -func (c *candidate) anyCandType(f func(t types.Type, addressable bool) bool) bool { - if c.obj == nil || c.obj.Type() == nil { - return false - } - - const maxDepth = 3 - - var searchTypes func(t types.Type, addressable bool, mods []typeModKind) bool - searchTypes = func(t types.Type, addressable bool, mods []typeModKind) bool { - if f(t, addressable) { - if len(mods) > 0 { - newMods := make([]typeModKind, len(mods)+len(c.mods)) - copy(newMods, mods) - copy(newMods[len(mods):], c.mods) - c.mods = newMods - } - return true - } - - if len(mods) == maxDepth { - return false - } - - return derivableTypes(t, addressable, func(t types.Type, addressable bool, mod typeModKind) bool { - return searchTypes(t, addressable, append(mods, mod)) - }) - } - - return searchTypes(c.obj.Type(), c.addressable, make([]typeModKind, 0, maxDepth)) -} - -// matchingCandidate reports whether cand matches our type inferences. -// It mutates cand's score in certain cases. -func (c *completer) matchingCandidate(cand *candidate) bool { - if c.completionContext.commentCompletion { - return false - } - - // Bail out early if we are completing a field name in a composite literal. - if v, ok := cand.obj.(*types.Var); ok && v.IsField() && c.wantStructFieldCompletions() { - return true - } - - if isTypeName(cand.obj) { - return c.matchingTypeName(cand) - } else if c.wantTypeName() { - // If we want a type, a non-type object never matches. - return false - } - - if c.inference.candTypeMatches(cand) { - return true - } - - candType := cand.obj.Type() - if candType == nil { - return false - } - - if sig, ok := candType.Underlying().(*types.Signature); ok { - if c.inference.assigneesMatch(cand, sig) { - // Invoke the candidate if its results are multi-assignable. - cand.mods = append(cand.mods, invoke) - return true - } - } - - // Default to invoking *types.Func candidates. This is so function - // completions in an empty statement (or other cases with no expected type) - // are invoked by default. - if isFunc(cand.obj) { - cand.mods = append(cand.mods, invoke) - } - - return false -} - -// candTypeMatches reports whether cand makes a good completion -// candidate given the candidate inference. cand's score may be -// mutated to downrank the candidate in certain situations. -func (ci *candidateInference) candTypeMatches(cand *candidate) bool { - var ( - expTypes = make([]types.Type, 0, 2) - variadicType types.Type - ) - if ci.objType != nil { - expTypes = append(expTypes, ci.objType) - - if ci.variadic { - variadicType = types.NewSlice(ci.objType) - expTypes = append(expTypes, variadicType) - } - } - - return cand.anyCandType(func(candType types.Type, addressable bool) bool { - // Take into account any type modifiers on the expected type. - candType = ci.applyTypeModifiers(candType, addressable) - if candType == nil { - return false - } - - if ci.convertibleTo != nil && convertibleTo(candType, ci.convertibleTo) { - return true - } - - for _, expType := range expTypes { - if isEmptyInterface(expType) { - continue - } - - matches := ci.typeMatches(expType, candType) - if !matches { - // If candType doesn't otherwise match, consider if we can - // convert candType directly to expType. - if considerTypeConversion(candType, expType, cand.path) { - cand.convertTo = expType - // Give a major score penalty so we always prefer directly - // assignable candidates, all else equal. - cand.score *= 0.5 - return true - } - - continue - } - - if expType == variadicType { - cand.mods = append(cand.mods, takeDotDotDot) - } - - // Lower candidate score for untyped conversions. This avoids - // ranking untyped constants above candidates with an exact type - // match. Don't lower score of builtin constants, e.g. "true". - if isUntyped(candType) && !types.Identical(candType, expType) && cand.obj.Parent() != types.Universe { - // Bigger penalty for deep completions into other packages to - // avoid random constants from other packages popping up all - // the time. - if len(cand.path) > 0 && isPkgName(cand.path[0]) { - cand.score *= 0.5 - } else { - cand.score *= 0.75 - } - } - - return true - } - - // If we don't have a specific expected type, fall back to coarser - // object kind checks. - if ci.objType == nil || isEmptyInterface(ci.objType) { - // If we were able to apply type modifiers to our candidate type, - // count that as a match. For example: - // - // var foo chan int - // <-fo<> - // - // We were able to apply the "<-" type modifier to "foo", so "foo" - // matches. - if len(ci.modifiers) > 0 { - return true - } - - // If we didn't have an exact type match, check if our object kind - // matches. - if ci.kindMatches(candType) { - if ci.objKind == kindFunc { - cand.mods = append(cand.mods, invoke) - } - return true - } - } - - return false - }) -} - -// considerTypeConversion returns true if we should offer a completion -// automatically converting "from" to "to". -func considerTypeConversion(from, to types.Type, path []types.Object) bool { - // Don't offer to convert deep completions from other packages. - // Otherwise there are many random package level consts/vars that - // pop up as candidates all the time. - if len(path) > 0 && isPkgName(path[0]) { - return false - } - - if _, ok := from.(*types.TypeParam); ok { - return false - } - - if !convertibleTo(from, to) { - return false - } - - // Don't offer to convert ints to strings since that probably - // doesn't do what the user wants. - if isBasicKind(from, types.IsInteger) && isBasicKind(to, types.IsString) { - return false - } - - return true -} - -// typeMatches reports whether an object of candType makes a good -// completion candidate given the expected type expType. -func (ci *candidateInference) typeMatches(expType, candType types.Type) bool { - // Handle untyped values specially since AssignableTo gives false negatives - // for them (see https://golang.org/issue/32146). - if candBasic, ok := candType.Underlying().(*types.Basic); ok { - if expBasic, ok := expType.Underlying().(*types.Basic); ok { - // Note that the candidate and/or the expected can be untyped. - // In "fo<> == 100" the expected type is untyped, and the - // candidate could also be an untyped constant. - - // Sort by is_untyped and then by is_int to simplify below logic. - a, b := candBasic.Info(), expBasic.Info() - if a&types.IsUntyped == 0 || (b&types.IsInteger > 0 && b&types.IsUntyped > 0) { - a, b = b, a - } - - // If at least one is untyped... - if a&types.IsUntyped > 0 { - switch { - // Untyped integers are compatible with floats. - case a&types.IsInteger > 0 && b&types.IsFloat > 0: - return true - - // Check if their constant kind (bool|int|float|complex|string) matches. - // This doesn't take into account the constant value, so there will be some - // false positives due to integer sign and overflow. - case a&types.IsConstType == b&types.IsConstType: - return true - } - } - } - } - - // AssignableTo covers the case where the types are equal, but also handles - // cases like assigning a concrete type to an interface type. - return assignableTo(candType, expType) -} - -// kindMatches reports whether candType's kind matches our expected -// kind (e.g. slice, map, etc.). -func (ci *candidateInference) kindMatches(candType types.Type) bool { - return ci.objKind > 0 && ci.objKind&candKind(candType) > 0 -} - -// assigneesMatch reports whether an invocation of sig matches the -// number and type of any assignees. -func (ci *candidateInference) assigneesMatch(cand *candidate, sig *types.Signature) bool { - if len(ci.assignees) == 0 { - return false - } - - // Uniresult functions are always usable and are handled by the - // normal, non-assignees type matching logic. - if sig.Results().Len() == 1 { - return false - } - - // Don't prefer completing into func(...interface{}) calls since all - // functions would match. - if ci.variadicAssignees && len(ci.assignees) == 1 && isEmptyInterface(deslice(ci.assignees[0])) { - return false - } - - var numberOfResultsCouldMatch bool - if ci.variadicAssignees { - numberOfResultsCouldMatch = sig.Results().Len() >= len(ci.assignees)-1 - } else { - numberOfResultsCouldMatch = sig.Results().Len() == len(ci.assignees) - } - - // If our signature doesn't return the right number of values, it's - // not a match, so downrank it. For example: - // - // var foo func() (int, int) - // a, b, c := <> // downrank "foo()" since it only returns two values - if !numberOfResultsCouldMatch { - cand.score /= 2 - return false - } - - // If at least one assignee has a valid type, and all valid - // assignees match the corresponding sig result value, the signature - // is a match. - allMatch := false - for i := 0; i < sig.Results().Len(); i++ { - var assignee types.Type - - // If we are completing into variadic parameters, deslice the - // expected variadic type. - if ci.variadicAssignees && i >= len(ci.assignees)-1 { - assignee = ci.assignees[len(ci.assignees)-1] - if elem := deslice(assignee); elem != nil { - assignee = elem - } - } else { - assignee = ci.assignees[i] - } - - if assignee == nil || assignee == types.Typ[types.Invalid] { - continue - } - - allMatch = ci.typeMatches(assignee, sig.Results().At(i).Type()) - if !allMatch { - break - } - } - return allMatch -} - -func (c *completer) matchingTypeName(cand *candidate) bool { - if !c.wantTypeName() { - return false - } - - typeMatches := func(candType types.Type) bool { - // Take into account any type name modifier prefixes. - candType = c.inference.applyTypeNameModifiers(candType) - - if from := c.inference.typeName.assertableFrom; from != nil { - // Don't suggest the starting type in type assertions. For example, - // if "foo" is an io.Writer, don't suggest "foo.(io.Writer)". - if types.Identical(from, candType) { - return false - } - - if intf, ok := from.Underlying().(*types.Interface); ok { - if !types.AssertableTo(intf, candType) { - return false - } - } - } - - if c.inference.typeName.wantComparable && !types.Comparable(candType) { - return false - } - - // Skip this type if it has already been used in another type - // switch case. - for _, seen := range c.inference.typeName.seenTypeSwitchCases { - if types.Identical(candType, seen) { - return false - } - } - - // We can expect a type name and have an expected type in cases like: - // - // var foo []int - // foo = []i<> - // - // Where our expected type is "[]int", and we expect a type name. - if c.inference.objType != nil { - return assignableTo(candType, c.inference.objType) - } - - // Default to saying any type name is a match. - return true - } - - t := cand.obj.Type() - - if typeMatches(t) { - return true - } - - if !types.IsInterface(t) && typeMatches(types.NewPointer(t)) { - if c.inference.typeName.compLitType { - // If we are completing a composite literal type as in - // "foo<>{}", to make a pointer we must prepend "&". - cand.mods = append(cand.mods, reference) - } else { - // If we are completing a normal type name such as "foo<>", to - // make a pointer we must prepend "*". - cand.mods = append(cand.mods, dereference) - } - return true - } - - return false -} - -var ( - // "interface { Error() string }" (i.e. error) - errorIntf = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) - - // "interface { String() string }" (i.e. fmt.Stringer) - stringerIntf = types.NewInterfaceType([]*types.Func{ - types.NewFunc(token.NoPos, nil, "String", types.NewSignature( - nil, - nil, - types.NewTuple(types.NewParam(token.NoPos, nil, "", types.Typ[types.String])), - false, - )), - }, nil).Complete() - - byteType = types.Universe.Lookup("byte").Type() -) - -// candKind returns the objKind of candType, if any. -func candKind(candType types.Type) objKind { - var kind objKind - - switch t := candType.Underlying().(type) { - case *types.Array: - kind |= kindArray - if t.Elem() == byteType { - kind |= kindBytes - } - case *types.Slice: - kind |= kindSlice - if t.Elem() == byteType { - kind |= kindBytes - } - case *types.Chan: - kind |= kindChan - case *types.Map: - kind |= kindMap - case *types.Pointer: - kind |= kindPtr - - // Some builtins handle array pointers as arrays, so just report a pointer - // to an array as an array. - if _, isArray := t.Elem().Underlying().(*types.Array); isArray { - kind |= kindArray - } - case *types.Basic: - switch info := t.Info(); { - case info&types.IsString > 0: - kind |= kindString - case info&types.IsInteger > 0: - kind |= kindInt - case info&types.IsFloat > 0: - kind |= kindFloat - case info&types.IsComplex > 0: - kind |= kindComplex - case info&types.IsBoolean > 0: - kind |= kindBool - } - case *types.Signature: - return kindFunc - } - - if types.Implements(candType, errorIntf) { - kind |= kindError - } - - if types.Implements(candType, stringerIntf) { - kind |= kindStringer - } - - return kind -} - -// innermostScope returns the innermost scope for c.pos. -func (c *completer) innermostScope() *types.Scope { - for _, s := range c.scopes { - if s != nil { - return s - } - } - return nil -} - -// isSlice reports whether the object's underlying type is a slice. -func isSlice(obj types.Object) bool { - if obj != nil && obj.Type() != nil { - if _, ok := obj.Type().Underlying().(*types.Slice); ok { - return true - } - } - return false -} - -// forEachPackageMember calls f(tok, id, fn) for each package-level -// TYPE/VAR/CONST/FUNC declaration in the Go source file, based on a -// quick partial parse. fn is non-nil only for function declarations. -// The AST position information is garbage. -func forEachPackageMember(content []byte, f func(tok token.Token, id *ast.Ident, fn *ast.FuncDecl)) { - purged := goplsastutil.PurgeFuncBodies(content) - file, _ := parser.ParseFile(token.NewFileSet(), "", purged, 0) - for _, decl := range file.Decls { - switch decl := decl.(type) { - case *ast.GenDecl: - for _, spec := range decl.Specs { - switch spec := spec.(type) { - case *ast.ValueSpec: // var/const - for _, id := range spec.Names { - f(decl.Tok, id, nil) - } - case *ast.TypeSpec: - f(decl.Tok, spec.Name, nil) - } - } - case *ast.FuncDecl: - if decl.Recv == nil { - f(token.FUNC, decl.Name, decl) - } - } - } -} diff --git a/internal/golangorgx/gopls/golang/completion/deep_completion.go b/internal/golangorgx/gopls/golang/completion/deep_completion.go deleted file mode 100644 index 9219b15b7c9..00000000000 --- a/internal/golangorgx/gopls/golang/completion/deep_completion.go +++ /dev/null @@ -1,371 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package completion - -import ( - "context" - "go/types" - "strings" - "time" -) - -// MaxDeepCompletions limits deep completion results because in most cases -// there are too many to be useful. -const MaxDeepCompletions = 3 - -// deepCompletionState stores our state as we search for deep completions. -// "deep completion" refers to searching into objects' fields and methods to -// find more completion candidates. -type deepCompletionState struct { - // enabled indicates whether deep completion is permitted. - enabled bool - - // queueClosed is used to disable adding new sub-fields to search queue - // once we're running out of our time budget. - queueClosed bool - - // thisQueue holds the current breadth first search queue. - thisQueue []candidate - - // nextQueue holds the next breadth first search iteration's queue. - nextQueue []candidate - - // highScores tracks the highest deep candidate scores we have found - // so far. This is used to avoid work for low scoring deep candidates. - highScores [MaxDeepCompletions]float64 - - // candidateCount is the count of unique deep candidates encountered - // so far. - candidateCount int -} - -// enqueue adds a candidate to the search queue. -func (s *deepCompletionState) enqueue(cand candidate) { - s.nextQueue = append(s.nextQueue, cand) -} - -// scorePenalty computes a deep candidate score penalty. A candidate is -// penalized based on depth to favor shallower candidates. We also give a -// slight bonus to unexported objects and a slight additional penalty to -// function objects. -func (s *deepCompletionState) scorePenalty(cand *candidate) float64 { - var deepPenalty float64 - for _, dc := range cand.path { - deepPenalty++ - - if !dc.Exported() { - deepPenalty -= 0.1 - } - - if _, isSig := dc.Type().Underlying().(*types.Signature); isSig { - deepPenalty += 0.1 - } - } - - // Normalize penalty to a max depth of 10. - return deepPenalty / 10 -} - -// isHighScore returns whether score is among the top MaxDeepCompletions deep -// candidate scores encountered so far. If so, it adds score to highScores, -// possibly displacing an existing high score. -func (s *deepCompletionState) isHighScore(score float64) bool { - // Invariant: s.highScores is sorted with highest score first. Unclaimed - // positions are trailing zeros. - - // If we beat an existing score then take its spot. - for i, deepScore := range s.highScores { - if score <= deepScore { - continue - } - - if deepScore != 0 && i != len(s.highScores)-1 { - // If this wasn't an empty slot then we need to scooch everyone - // down one spot. - copy(s.highScores[i+1:], s.highScores[i:]) - } - s.highScores[i] = score - return true - } - - return false -} - -// newPath returns path from search root for an object following a given -// candidate. -func (s *deepCompletionState) newPath(cand candidate, obj types.Object) []types.Object { - path := make([]types.Object, len(cand.path)+1) - copy(path, cand.path) - path[len(path)-1] = obj - - return path -} - -// deepSearch searches a candidate and its subordinate objects for completion -// items if deep completion is enabled and adds the valid candidates to -// completion items. -func (c *completer) deepSearch(ctx context.Context, minDepth int, deadline *time.Time) { - defer func() { - // We can return early before completing the search, so be sure to - // clear out our queues to not impact any further invocations. - c.deepState.thisQueue = c.deepState.thisQueue[:0] - c.deepState.nextQueue = c.deepState.nextQueue[:0] - }() - - depth := 0 // current depth being processed - // Stop reports whether we should stop the search immediately. - stop := func() bool { - // Context cancellation indicates that the actual completion operation was - // cancelled, so ignore minDepth and deadline. - select { - case <-ctx.Done(): - return true - default: - } - // Otherwise, only stop if we've searched at least minDepth and reached the deadline. - return depth > minDepth && deadline != nil && time.Now().After(*deadline) - } - - for len(c.deepState.nextQueue) > 0 { - depth++ - if stop() { - return - } - c.deepState.thisQueue, c.deepState.nextQueue = c.deepState.nextQueue, c.deepState.thisQueue[:0] - - outer: - for _, cand := range c.deepState.thisQueue { - obj := cand.obj - - if obj == nil { - continue - } - - // At the top level, dedupe by object. - if len(cand.path) == 0 { - if c.seen[obj] { - continue - } - c.seen[obj] = true - } - - // If obj is not accessible because it lives in another package and is - // not exported, don't treat it as a completion candidate unless it's - // a package completion candidate. - if !c.completionContext.packageCompletion && - obj.Pkg() != nil && obj.Pkg() != c.pkg.GetTypes() && !obj.Exported() { - continue - } - - // If we want a type name, don't offer non-type name candidates. - // However, do offer package names since they can contain type names, - // and do offer any candidate without a type since we aren't sure if it - // is a type name or not (i.e. unimported candidate). - if c.wantTypeName() && obj.Type() != nil && !isTypeName(obj) && !isPkgName(obj) { - continue - } - - // When searching deep, make sure we don't have a cycle in our chain. - // We don't dedupe by object because we want to allow both "foo.Baz" - // and "bar.Baz" even though "Baz" is represented the same types.Object - // in both. - for _, seenObj := range cand.path { - if seenObj == obj { - continue outer - } - } - - c.addCandidate(ctx, &cand) - - c.deepState.candidateCount++ - if c.opts.budget > 0 && c.deepState.candidateCount%100 == 0 { - if stop() { - return - } - spent := float64(time.Since(c.startTime)) / float64(c.opts.budget) - // If we are almost out of budgeted time, no further elements - // should be added to the queue. This ensures remaining time is - // used for processing current queue. - if !c.deepState.queueClosed && spent >= 0.85 { - c.deepState.queueClosed = true - } - } - - // if deep search is disabled, don't add any more candidates. - if !c.deepState.enabled || c.deepState.queueClosed { - continue - } - - // Searching members for a type name doesn't make sense. - if isTypeName(obj) { - continue - } - if obj.Type() == nil { - continue - } - - // Don't search embedded fields because they were already included in their - // parent's fields. - if v, ok := obj.(*types.Var); ok && v.Embedded() { - continue - } - - if sig, ok := obj.Type().Underlying().(*types.Signature); ok { - // If obj is a function that takes no arguments and returns one - // value, keep searching across the function call. - if sig.Params().Len() == 0 && sig.Results().Len() == 1 { - path := c.deepState.newPath(cand, obj) - // The result of a function call is not addressable. - c.methodsAndFields(sig.Results().At(0).Type(), false, cand.imp, func(newCand candidate) { - newCand.pathInvokeMask = cand.pathInvokeMask | (1 << uint64(len(cand.path))) - newCand.path = path - c.deepState.enqueue(newCand) - }) - } - } - - path := c.deepState.newPath(cand, obj) - switch obj := obj.(type) { - case *types.PkgName: - c.packageMembers(obj.Imported(), stdScore, cand.imp, func(newCand candidate) { - newCand.pathInvokeMask = cand.pathInvokeMask - newCand.path = path - c.deepState.enqueue(newCand) - }) - default: - c.methodsAndFields(obj.Type(), cand.addressable, cand.imp, func(newCand candidate) { - newCand.pathInvokeMask = cand.pathInvokeMask - newCand.path = path - c.deepState.enqueue(newCand) - }) - } - } - } -} - -// addCandidate adds a completion candidate to suggestions, without searching -// its members for more candidates. -func (c *completer) addCandidate(ctx context.Context, cand *candidate) { - obj := cand.obj - if c.matchingCandidate(cand) { - cand.score *= highScore - - if p := c.penalty(cand); p > 0 { - cand.score *= (1 - p) - } - } else if isTypeName(obj) { - // If obj is a *types.TypeName that didn't otherwise match, check - // if a literal object of this type makes a good candidate. - - // We only care about named types (i.e. don't want builtin types). - if _, isNamed := obj.Type().(*types.Named); isNamed { - c.literal(ctx, obj.Type(), cand.imp) - } - } - - // Lower score of method calls so we prefer fields and vars over calls. - if cand.hasMod(invoke) { - if sig, ok := obj.Type().Underlying().(*types.Signature); ok && sig.Recv() != nil { - cand.score *= 0.9 - } - } - - // Prefer private objects over public ones. - if !obj.Exported() && obj.Parent() != types.Universe { - cand.score *= 1.1 - } - - // Slight penalty for index modifier (e.g. changing "foo" to - // "foo[]") to curb false positives. - if cand.hasMod(index) { - cand.score *= 0.9 - } - - // Favor shallow matches by lowering score according to depth. - cand.score -= cand.score * c.deepState.scorePenalty(cand) - - if cand.score < 0 { - cand.score = 0 - } - - cand.name = deepCandName(cand) - if item, err := c.item(ctx, *cand); err == nil { - c.items = append(c.items, item) - } -} - -// deepCandName produces the full candidate name including any -// ancestor objects. For example, "foo.bar().baz" for candidate "baz". -func deepCandName(cand *candidate) string { - totalLen := len(cand.obj.Name()) - for i, obj := range cand.path { - totalLen += len(obj.Name()) + 1 - if cand.pathInvokeMask&(1< 0 { - totalLen += 2 - } - } - - var buf strings.Builder - buf.Grow(totalLen) - - for i, obj := range cand.path { - buf.WriteString(obj.Name()) - if cand.pathInvokeMask&(1< 0 { - buf.WriteByte('(') - buf.WriteByte(')') - } - buf.WriteByte('.') - } - - buf.WriteString(cand.obj.Name()) - - return buf.String() -} - -// penalty reports a score penalty for cand in the range (0, 1). -// For example, a candidate is penalized if it has already been used -// in another switch case statement. -func (c *completer) penalty(cand *candidate) float64 { - for _, p := range c.inference.penalized { - if c.objChainMatches(cand, p.objChain) { - return p.penalty - } - } - - return 0 -} - -// objChainMatches reports whether cand combined with the surrounding -// object prefix matches chain. -func (c *completer) objChainMatches(cand *candidate, chain []types.Object) bool { - // For example, when completing: - // - // foo.ba<> - // - // If we are considering the deep candidate "bar.baz", cand is baz, - // objChain is [foo] and deepChain is [bar]. We would match the - // chain [foo, bar, baz]. - if len(chain) != len(c.inference.objChain)+len(cand.path)+1 { - return false - } - - if chain[len(chain)-1] != cand.obj { - return false - } - - for i, o := range c.inference.objChain { - if chain[i] != o { - return false - } - } - - for i, o := range cand.path { - if chain[i+len(c.inference.objChain)] != o { - return false - } - } - - return true -} diff --git a/internal/golangorgx/gopls/golang/completion/definition.go b/internal/golangorgx/gopls/golang/completion/definition.go deleted file mode 100644 index 4a956ed87be..00000000000 --- a/internal/golangorgx/gopls/golang/completion/definition.go +++ /dev/null @@ -1,160 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package completion - -import ( - "go/ast" - "go/types" - "strings" - "unicode" - "unicode/utf8" - - "cuelang.org/go/internal/golangorgx/gopls/golang" - "cuelang.org/go/internal/golangorgx/gopls/golang/completion/snippet" - "cuelang.org/go/internal/golangorgx/gopls/protocol" -) - -// some function definitions in test files can be completed -// So far, TestFoo(t *testing.T), TestMain(m *testing.M) -// BenchmarkFoo(b *testing.B), FuzzFoo(f *testing.F) - -// path[0] is known to be *ast.Ident -func definition(path []ast.Node, obj types.Object, pgf *golang.ParsedGoFile) ([]CompletionItem, *Selection) { - if _, ok := obj.(*types.Func); !ok { - return nil, nil // not a function at all - } - if !strings.HasSuffix(pgf.URI.Path(), "_test.go") { - return nil, nil // not a test file - } - - name := path[0].(*ast.Ident).Name - if len(name) == 0 { - // can't happen - return nil, nil - } - start := path[0].Pos() - end := path[0].End() - sel := &Selection{ - content: "", - cursor: start, - tokFile: pgf.Tok, - start: start, - end: end, - mapper: pgf.Mapper, - } - var ans []CompletionItem - var hasParens bool - n, ok := path[1].(*ast.FuncDecl) - if !ok { - return nil, nil // can't happen - } - if n.Recv != nil { - return nil, nil // a method, not a function - } - t := n.Type.Params - if t.Closing != t.Opening { - hasParens = true - } - - // Always suggest TestMain, if possible - if strings.HasPrefix("TestMain", name) { - if hasParens { - ans = append(ans, defItem("TestMain", obj)) - } else { - ans = append(ans, defItem("TestMain(m *testing.M)", obj)) - } - } - - // If a snippet is possible, suggest it - if strings.HasPrefix("Test", name) { - if hasParens { - ans = append(ans, defItem("Test", obj)) - } else { - ans = append(ans, defSnippet("Test", "(t *testing.T)", obj)) - } - return ans, sel - } else if strings.HasPrefix("Benchmark", name) { - if hasParens { - ans = append(ans, defItem("Benchmark", obj)) - } else { - ans = append(ans, defSnippet("Benchmark", "(b *testing.B)", obj)) - } - return ans, sel - } else if strings.HasPrefix("Fuzz", name) { - if hasParens { - ans = append(ans, defItem("Fuzz", obj)) - } else { - ans = append(ans, defSnippet("Fuzz", "(f *testing.F)", obj)) - } - return ans, sel - } - - // Fill in the argument for what the user has already typed - if got := defMatches(name, "Test", path, "(t *testing.T)"); got != "" { - ans = append(ans, defItem(got, obj)) - } else if got := defMatches(name, "Benchmark", path, "(b *testing.B)"); got != "" { - ans = append(ans, defItem(got, obj)) - } else if got := defMatches(name, "Fuzz", path, "(f *testing.F)"); got != "" { - ans = append(ans, defItem(got, obj)) - } - return ans, sel -} - -// defMatches returns text for defItem, never for defSnippet -func defMatches(name, pat string, path []ast.Node, arg string) string { - if !strings.HasPrefix(name, pat) { - return "" - } - c, _ := utf8.DecodeRuneInString(name[len(pat):]) - if unicode.IsLower(c) { - return "" - } - fd, ok := path[1].(*ast.FuncDecl) - if !ok { - // we don't know what's going on - return "" - } - fp := fd.Type.Params - if len(fp.List) > 0 { - // signature already there, nothing to suggest - return "" - } - if fp.Opening != fp.Closing { - // nothing: completion works on words, not easy to insert arg - return "" - } - // suggesting signature too - return name + arg -} - -func defSnippet(prefix, suffix string, obj types.Object) CompletionItem { - var sn snippet.Builder - sn.WriteText(prefix) - sn.WritePlaceholder(func(b *snippet.Builder) { b.WriteText("Xxx") }) - sn.WriteText(suffix + " {\n\t") - sn.WriteFinalTabstop() - sn.WriteText("\n}") - return CompletionItem{ - Label: prefix + "Xxx" + suffix, - Detail: "tab, type the rest of the name, then tab", - Kind: protocol.FunctionCompletion, - Depth: 0, - Score: 10, - snippet: &sn, - Documentation: prefix + " test function", - isSlice: isSlice(obj), - } -} -func defItem(val string, obj types.Object) CompletionItem { - return CompletionItem{ - Label: val, - InsertText: val, - Kind: protocol.FunctionCompletion, - Depth: 0, - Score: 9, // prefer the snippets when available - Documentation: "complete the function name", - isSlice: isSlice(obj), - } -} diff --git a/internal/golangorgx/gopls/golang/completion/format.go b/internal/golangorgx/gopls/golang/completion/format.go deleted file mode 100644 index f7ced5aed0e..00000000000 --- a/internal/golangorgx/gopls/golang/completion/format.go +++ /dev/null @@ -1,439 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package completion - -import ( - "context" - "errors" - "fmt" - "go/ast" - "go/doc" - "go/types" - "strings" - - "cuelang.org/go/internal/golangorgx/gopls/golang" - "cuelang.org/go/internal/golangorgx/gopls/golang/completion/snippet" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/util/safetoken" - "cuelang.org/go/internal/golangorgx/tools/event" - "cuelang.org/go/internal/golangorgx/tools/imports" -) - -var ( - errNoMatch = errors.New("not a surrounding match") - errLowScore = errors.New("not a high scoring candidate") -) - -// item formats a candidate to a CompletionItem. -func (c *completer) item(ctx context.Context, cand candidate) (CompletionItem, error) { - obj := cand.obj - - // if the object isn't a valid match against the surrounding, return early. - matchScore := c.matcher.Score(cand.name) - if matchScore <= 0 { - return CompletionItem{}, errNoMatch - } - cand.score *= float64(matchScore) - - // Ignore deep candidates that won't be in the MaxDeepCompletions anyway. - if len(cand.path) != 0 && !c.deepState.isHighScore(cand.score) { - return CompletionItem{}, errLowScore - } - - // Handle builtin types separately. - if obj.Parent() == types.Universe { - return c.formatBuiltin(ctx, cand) - } - - var ( - label = cand.name - detail = types.TypeString(obj.Type(), c.qf) - insert = label - kind = protocol.TextCompletion - snip snippet.Builder - protocolEdits []protocol.TextEdit - ) - if obj.Type() == nil { - detail = "" - } - if isTypeName(obj) && c.wantTypeParams() { - x := cand.obj.(*types.TypeName) - if named, ok := x.Type().(*types.Named); ok { - tp := named.TypeParams() - label += golang.FormatTypeParams(tp) - insert = label // maintain invariant above (label == insert) - } - } - - snip.WriteText(insert) - - switch obj := obj.(type) { - case *types.TypeName: - detail, kind = golang.FormatType(obj.Type(), c.qf) - case *types.Const: - kind = protocol.ConstantCompletion - case *types.Var: - if _, ok := obj.Type().(*types.Struct); ok { - detail = "struct{...}" // for anonymous structs - } else if obj.IsField() { - var err error - detail, err = golang.FormatVarType(ctx, c.snapshot, c.pkg, obj, c.qf, c.mq) - if err != nil { - return CompletionItem{}, err - } - } - if obj.IsField() { - kind = protocol.FieldCompletion - c.structFieldSnippet(cand, detail, &snip) - } else { - kind = protocol.VariableCompletion - } - if obj.Type() == nil { - break - } - case *types.Func: - sig, ok := obj.Type().Underlying().(*types.Signature) - if !ok { - break - } - kind = protocol.FunctionCompletion - if sig != nil && sig.Recv() != nil { - kind = protocol.MethodCompletion - } - case *types.PkgName: - kind = protocol.ModuleCompletion - detail = fmt.Sprintf("%q", obj.Imported().Path()) - case *types.Label: - kind = protocol.ConstantCompletion - detail = "label" - } - - var prefix string - for _, mod := range cand.mods { - switch mod { - case reference: - prefix = "&" + prefix - case dereference: - prefix = "*" + prefix - case chanRead: - prefix = "<-" + prefix - } - } - - var ( - suffix string - funcType = obj.Type() - ) -Suffixes: - for _, mod := range cand.mods { - switch mod { - case invoke: - if sig, ok := funcType.Underlying().(*types.Signature); ok { - s, err := golang.NewSignature(ctx, c.snapshot, c.pkg, sig, nil, c.qf, c.mq) - if err != nil { - return CompletionItem{}, err - } - - tparams := s.TypeParams() - if len(tparams) > 0 { - // Eliminate the suffix of type parameters that are - // likely redundant because they can probably be - // inferred from the argument types (#51783). - // - // We don't bother doing the reverse inference from - // result types as result-only type parameters are - // quite unusual. - free := inferableTypeParams(sig) - for i := sig.TypeParams().Len() - 1; i >= 0; i-- { - tparam := sig.TypeParams().At(i) - if !free[tparam] { - break - } - tparams = tparams[:i] // eliminate - } - } - - c.functionCallSnippet("", tparams, s.Params(), &snip) - if sig.Results().Len() == 1 { - funcType = sig.Results().At(0).Type() - } - detail = "func" + s.Format() - } - - if !c.opts.snippets { - // Without snippets the candidate will not include "()". Don't - // add further suffixes since they will be invalid. For - // example, with snippets "foo()..." would become "foo..." - // without snippets if we added the dotDotDot. - break Suffixes - } - case takeSlice: - suffix += "[:]" - case takeDotDotDot: - suffix += "..." - case index: - snip.WriteText("[") - snip.WritePlaceholder(nil) - snip.WriteText("]") - } - } - - // If this candidate needs an additional import statement, - // add the additional text edits needed. - if cand.imp != nil { - addlEdits, err := c.importEdits(cand.imp) - - if err != nil { - return CompletionItem{}, err - } - - protocolEdits = append(protocolEdits, addlEdits...) - if kind != protocol.ModuleCompletion { - if detail != "" { - detail += " " - } - detail += fmt.Sprintf("(from %q)", cand.imp.importPath) - } - } - - if cand.convertTo != nil { - typeName := types.TypeString(cand.convertTo, c.qf) - - switch t := cand.convertTo.(type) { - // We need extra parens when casting to these types. For example, - // we need "(*int)(foo)", not "*int(foo)". - case *types.Pointer, *types.Signature: - typeName = "(" + typeName + ")" - case *types.Basic: - // If the types are incompatible (as determined by typeMatches), then we - // must need a conversion here. However, if the target type is untyped, - // don't suggest converting to e.g. "untyped float" (golang/go#62141). - if t.Info()&types.IsUntyped != 0 { - typeName = types.TypeString(types.Default(cand.convertTo), c.qf) - } - } - - prefix = typeName + "(" + prefix - suffix = ")" - } - - if prefix != "" { - // If we are in a selector, add an edit to place prefix before selector. - if sel := enclosingSelector(c.path, c.pos); sel != nil { - edits, err := c.editText(sel.Pos(), sel.Pos(), prefix) - if err != nil { - return CompletionItem{}, err - } - protocolEdits = append(protocolEdits, edits...) - } else { - // If there is no selector, just stick the prefix at the start. - insert = prefix + insert - snip.PrependText(prefix) - } - } - - if suffix != "" { - insert += suffix - snip.WriteText(suffix) - } - - detail = strings.TrimPrefix(detail, "untyped ") - // override computed detail with provided detail, if something is provided. - if cand.detail != "" { - detail = cand.detail - } - item := CompletionItem{ - Label: label, - InsertText: insert, - AdditionalTextEdits: protocolEdits, - Detail: detail, - Kind: kind, - Score: cand.score, - Depth: len(cand.path), - snippet: &snip, - isSlice: isSlice(obj), - } - // If the user doesn't want documentation for completion items. - if !c.opts.documentation { - return item, nil - } - pos := safetoken.StartPosition(c.pkg.FileSet(), obj.Pos()) - - // We ignore errors here, because some types, like "unsafe" or "error", - // may not have valid positions that we can use to get documentation. - if !pos.IsValid() { - return item, nil - } - - comment, err := golang.HoverDocForObject(ctx, c.snapshot, c.pkg.FileSet(), obj) - if err != nil { - event.Error(ctx, fmt.Sprintf("failed to find Hover for %q", obj.Name()), err) - return item, nil - } - if c.opts.fullDocumentation { - item.Documentation = comment.Text() - } else { - item.Documentation = doc.Synopsis(comment.Text()) - } - // The desired pattern is `^// Deprecated`, but the prefix has been removed - // TODO(rfindley): It doesn't look like this does the right thing for - // multi-line comments. - if strings.HasPrefix(comment.Text(), "Deprecated") { - if c.snapshot.Options().CompletionTags { - item.Tags = []protocol.CompletionItemTag{protocol.ComplDeprecated} - } else if c.snapshot.Options().CompletionDeprecated { - item.Deprecated = true - } - } - - return item, nil -} - -// importEdits produces the text edits necessary to add the given import to the current file. -func (c *completer) importEdits(imp *importInfo) ([]protocol.TextEdit, error) { - if imp == nil { - return nil, nil - } - - pgf, err := c.pkg.File(protocol.URIFromPath(c.filename)) - if err != nil { - return nil, err - } - - return golang.ComputeOneImportFixEdits(c.snapshot, pgf, &imports.ImportFix{ - StmtInfo: imports.ImportInfo{ - ImportPath: imp.importPath, - Name: imp.name, - }, - // IdentName is unused on this path and is difficult to get. - FixType: imports.AddImport, - }) -} - -func (c *completer) formatBuiltin(ctx context.Context, cand candidate) (CompletionItem, error) { - obj := cand.obj - item := CompletionItem{ - Label: obj.Name(), - InsertText: obj.Name(), - Score: cand.score, - } - switch obj.(type) { - case *types.Const: - item.Kind = protocol.ConstantCompletion - case *types.Builtin: - item.Kind = protocol.FunctionCompletion - sig, err := golang.NewBuiltinSignature(ctx, c.snapshot, obj.Name()) - if err != nil { - return CompletionItem{}, err - } - item.Detail = "func" + sig.Format() - item.snippet = &snippet.Builder{} - // The signature inferred for a built-in is instantiated, so TypeParams=∅. - c.functionCallSnippet(obj.Name(), sig.TypeParams(), sig.Params(), item.snippet) - case *types.TypeName: - if types.IsInterface(obj.Type()) { - item.Kind = protocol.InterfaceCompletion - } else { - item.Kind = protocol.ClassCompletion - } - case *types.Nil: - item.Kind = protocol.VariableCompletion - } - return item, nil -} - -// decide if the type params (if any) should be part of the completion -// which only possible for types.Named and types.Signature -// (so far, only in receivers, e.g.; func (s *GENERIC[K, V])..., which is a types.Named) -func (c *completer) wantTypeParams() bool { - // Need to be lexically in a receiver, and a child of an IndexListExpr - // (but IndexListExpr only exists with go1.18) - start := c.path[0].Pos() - for i, nd := range c.path { - if fd, ok := nd.(*ast.FuncDecl); ok { - if i > 0 && fd.Recv != nil && start < fd.Recv.End() { - return true - } else { - return false - } - } - } - return false -} - -// inferableTypeParams returns the set of type parameters -// of sig that are constrained by (inferred from) the argument types. -func inferableTypeParams(sig *types.Signature) map[*types.TypeParam]bool { - free := make(map[*types.TypeParam]bool) - - // visit adds to free all the free type parameters of t. - var visit func(t types.Type) - visit = func(t types.Type) { - switch t := t.(type) { - case *types.Array: - visit(t.Elem()) - case *types.Chan: - visit(t.Elem()) - case *types.Map: - visit(t.Key()) - visit(t.Elem()) - case *types.Pointer: - visit(t.Elem()) - case *types.Slice: - visit(t.Elem()) - case *types.Interface: - for i := 0; i < t.NumExplicitMethods(); i++ { - visit(t.ExplicitMethod(i).Type()) - } - for i := 0; i < t.NumEmbeddeds(); i++ { - visit(t.EmbeddedType(i)) - } - case *types.Union: - for i := 0; i < t.Len(); i++ { - visit(t.Term(i).Type()) - } - case *types.Signature: - if tp := t.TypeParams(); tp != nil { - // Generic signatures only appear as the type of generic - // function declarations, so this isn't really reachable. - for i := 0; i < tp.Len(); i++ { - visit(tp.At(i).Constraint()) - } - } - visit(t.Params()) - visit(t.Results()) - case *types.Tuple: - for i := 0; i < t.Len(); i++ { - visit(t.At(i).Type()) - } - case *types.Struct: - for i := 0; i < t.NumFields(); i++ { - visit(t.Field(i).Type()) - } - case *types.TypeParam: - free[t] = true - case *types.Basic, *types.Named: - // nop - default: - panic(t) - } - } - - visit(sig.Params()) - - // Perform induction through constraints. -restart: - for i := 0; i < sig.TypeParams().Len(); i++ { - tp := sig.TypeParams().At(i) - if free[tp] { - n := len(free) - visit(tp.Constraint()) - if len(free) > n { - goto restart // iterate until fixed point - } - } - } - return free -} diff --git a/internal/golangorgx/gopls/golang/completion/fuzz.go b/internal/golangorgx/gopls/golang/completion/fuzz.go deleted file mode 100644 index 1dedeb84f26..00000000000 --- a/internal/golangorgx/gopls/golang/completion/fuzz.go +++ /dev/null @@ -1,141 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package completion - -import ( - "fmt" - "go/ast" - "go/types" - "strings" - - "cuelang.org/go/internal/golangorgx/gopls/protocol" -) - -// golang/go#51089 -// *testing.F deserves special treatment as member use is constrained: -// The arguments to f.Fuzz are determined by the arguments to a previous f.Add -// Inside f.Fuzz only f.Failed and f.Name are allowed. -// PJW: are there other packages where we can deduce usage constraints? - -// if we find fuzz completions, then return true, as those are the only completions to offer -func (c *completer) fuzz(mset *types.MethodSet, imp *importInfo, cb func(candidate)) bool { - // 1. inside f.Fuzz? (only f.Failed and f.Name) - // 2. possible completing f.Fuzz? - // [Ident,SelectorExpr,Callexpr,ExprStmt,BlockiStmt,FuncDecl(Fuzz...)] - // 3. before f.Fuzz, same (for 2., offer choice when looking at an F) - - // does the path contain FuncLit as arg to f.Fuzz CallExpr? - inside := false -Loop: - for i, n := range c.path { - switch v := n.(type) { - case *ast.CallExpr: - if len(v.Args) != 1 { - continue Loop - } - if _, ok := v.Args[0].(*ast.FuncLit); !ok { - continue - } - if s, ok := v.Fun.(*ast.SelectorExpr); !ok || s.Sel.Name != "Fuzz" { - continue - } - if i > 2 { // avoid t.Fuzz itself in tests - inside = true - break Loop - } - } - } - if inside { - for i := 0; i < mset.Len(); i++ { - o := mset.At(i).Obj() - if o.Name() == "Failed" || o.Name() == "Name" { - cb(candidate{ - obj: o, - score: stdScore, - imp: imp, - addressable: true, - }) - } - } - return true - } - // if it could be t.Fuzz, look for the preceding t.Add - id, ok := c.path[0].(*ast.Ident) - if ok && strings.HasPrefix("Fuzz", id.Name) { - var add *ast.CallExpr - f := func(n ast.Node) bool { - if n == nil { - return true - } - call, ok := n.(*ast.CallExpr) - if !ok { - return true - } - s, ok := call.Fun.(*ast.SelectorExpr) - if !ok { - return true - } - if s.Sel.Name != "Add" { - return true - } - // Sel.X should be of type *testing.F - got := c.pkg.GetTypesInfo().Types[s.X] - if got.Type.String() == "*testing.F" { - add = call - } - return false // because we're done... - } - // look at the enclosing FuzzFoo functions - if len(c.path) < 2 { - return false - } - n := c.path[len(c.path)-2] - if _, ok := n.(*ast.FuncDecl); !ok { - // the path should start with ast.File, ast.FuncDecl, ... - // but it didn't, so give up - return false - } - ast.Inspect(n, f) - if add == nil { - // looks like f.Fuzz without a preceding f.Add. - // let the regular completion handle it. - return false - } - - lbl := "Fuzz(func(t *testing.T" - for i, a := range add.Args { - info := c.pkg.GetTypesInfo().TypeOf(a) - if info == nil { - return false // How could this happen, but better safe than panic. - } - lbl += fmt.Sprintf(", %c %s", 'a'+i, info) - } - lbl += ")" - xx := CompletionItem{ - Label: lbl, - InsertText: lbl, - Kind: protocol.FunctionCompletion, - Depth: 0, - Score: 10, // pretty confident the user should see this - Documentation: "argument types from f.Add", - isSlice: false, - } - c.items = append(c.items, xx) - for i := 0; i < mset.Len(); i++ { - o := mset.At(i).Obj() - if o.Name() != "Fuzz" { - cb(candidate{ - obj: o, - score: stdScore, - imp: imp, - addressable: true, - }) - } - } - return true // done - } - // let the standard processing take care of it instead - return false -} diff --git a/internal/golangorgx/gopls/golang/completion/keywords.go b/internal/golangorgx/gopls/golang/completion/keywords.go deleted file mode 100644 index 7b8c7ec1865..00000000000 --- a/internal/golangorgx/gopls/golang/completion/keywords.go +++ /dev/null @@ -1,154 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package completion - -import ( - "go/ast" - - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/util/astutil" -) - -const ( - BREAK = "break" - CASE = "case" - CHAN = "chan" - CONST = "const" - CONTINUE = "continue" - DEFAULT = "default" - DEFER = "defer" - ELSE = "else" - FALLTHROUGH = "fallthrough" - FOR = "for" - FUNC = "func" - GO = "go" - GOTO = "goto" - IF = "if" - IMPORT = "import" - INTERFACE = "interface" - MAP = "map" - PACKAGE = "package" - RANGE = "range" - RETURN = "return" - SELECT = "select" - STRUCT = "struct" - SWITCH = "switch" - TYPE = "type" - VAR = "var" -) - -// addKeywordCompletions offers keyword candidates appropriate at the position. -func (c *completer) addKeywordCompletions() { - seen := make(map[string]bool) - - if c.wantTypeName() && c.inference.objType == nil { - // If we want a type name but don't have an expected obj type, - // include "interface", "struct", "func", "chan", and "map". - - // "interface" and "struct" are more common declaring named types. - // Give them a higher score if we are in a type declaration. - structIntf, funcChanMap := stdScore, highScore - if len(c.path) > 1 { - if _, namedDecl := c.path[1].(*ast.TypeSpec); namedDecl { - structIntf, funcChanMap = highScore, stdScore - } - } - - c.addKeywordItems(seen, structIntf, STRUCT, INTERFACE) - c.addKeywordItems(seen, funcChanMap, FUNC, CHAN, MAP) - } - - // If we are at the file scope, only offer decl keywords. We don't - // get *ast.Idents at the file scope because non-keyword identifiers - // turn into *ast.BadDecl, not *ast.Ident. - if len(c.path) == 1 || isASTFile(c.path[1]) { - c.addKeywordItems(seen, stdScore, TYPE, CONST, VAR, FUNC, IMPORT) - return - } else if _, ok := c.path[0].(*ast.Ident); !ok { - // Otherwise only offer keywords if the client is completing an identifier. - return - } - - if len(c.path) > 2 { - // Offer "range" if we are in ast.ForStmt.Init. This is what the - // AST looks like before "range" is typed, e.g. "for i := r<>". - if loop, ok := c.path[2].(*ast.ForStmt); ok && loop.Init != nil && astutil.NodeContains(loop.Init, c.pos) { - c.addKeywordItems(seen, stdScore, RANGE) - } - } - - // Only suggest keywords if we are beginning a statement. - switch n := c.path[1].(type) { - case *ast.BlockStmt, *ast.ExprStmt: - // OK - our ident must be at beginning of statement. - case *ast.CommClause: - // Make sure we aren't in the Comm statement. - if !n.Colon.IsValid() || c.pos <= n.Colon { - return - } - case *ast.CaseClause: - // Make sure we aren't in the case List. - if !n.Colon.IsValid() || c.pos <= n.Colon { - return - } - default: - return - } - - // Filter out keywords depending on scope - // Skip the first one because we want to look at the enclosing scopes - path := c.path[1:] - for i, n := range path { - switch node := n.(type) { - case *ast.CaseClause: - // only recommend "fallthrough" and "break" within the bodies of a case clause - if c.pos > node.Colon { - c.addKeywordItems(seen, stdScore, BREAK) - // "fallthrough" is only valid in switch statements. - // A case clause is always nested within a block statement in a switch statement, - // that block statement is nested within either a TypeSwitchStmt or a SwitchStmt. - if i+2 >= len(path) { - continue - } - if _, ok := path[i+2].(*ast.SwitchStmt); ok { - c.addKeywordItems(seen, stdScore, FALLTHROUGH) - } - } - case *ast.CommClause: - if c.pos > node.Colon { - c.addKeywordItems(seen, stdScore, BREAK) - } - case *ast.TypeSwitchStmt, *ast.SelectStmt, *ast.SwitchStmt: - c.addKeywordItems(seen, stdScore, CASE, DEFAULT) - case *ast.ForStmt, *ast.RangeStmt: - c.addKeywordItems(seen, stdScore, BREAK, CONTINUE) - // This is a bit weak, functions allow for many keywords - case *ast.FuncDecl: - if node.Body != nil && c.pos > node.Body.Lbrace { - c.addKeywordItems(seen, stdScore, DEFER, RETURN, FOR, GO, SWITCH, SELECT, IF, ELSE, VAR, CONST, GOTO, TYPE) - } - } - } -} - -// addKeywordItems dedupes and adds completion items for the specified -// keywords with the specified score. -func (c *completer) addKeywordItems(seen map[string]bool, score float64, kws ...string) { - for _, kw := range kws { - if seen[kw] { - continue - } - seen[kw] = true - - if matchScore := c.matcher.Score(kw); matchScore > 0 { - c.items = append(c.items, CompletionItem{ - Label: kw, - Kind: protocol.KeywordCompletion, - InsertText: kw, - Score: score * float64(matchScore), - }) - } - } -} diff --git a/internal/golangorgx/gopls/golang/completion/labels.go b/internal/golangorgx/gopls/golang/completion/labels.go deleted file mode 100644 index e4fd961e319..00000000000 --- a/internal/golangorgx/gopls/golang/completion/labels.go +++ /dev/null @@ -1,112 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package completion - -import ( - "go/ast" - "go/token" - "math" -) - -type labelType int - -const ( - labelNone labelType = iota - labelBreak - labelContinue - labelGoto -) - -// wantLabelCompletion returns true if we want (only) label -// completions at the position. -func (c *completer) wantLabelCompletion() labelType { - if _, ok := c.path[0].(*ast.Ident); ok && len(c.path) > 1 { - // We want a label if we are an *ast.Ident child of a statement - // that accepts a label, e.g. "break Lo<>". - return takesLabel(c.path[1]) - } - - return labelNone -} - -// takesLabel returns the corresponding labelType if n is a statement -// that accepts a label, otherwise labelNone. -func takesLabel(n ast.Node) labelType { - if bs, ok := n.(*ast.BranchStmt); ok { - switch bs.Tok { - case token.BREAK: - return labelBreak - case token.CONTINUE: - return labelContinue - case token.GOTO: - return labelGoto - } - } - return labelNone -} - -// labels adds completion items for labels defined in the enclosing -// function. -func (c *completer) labels(lt labelType) { - if c.enclosingFunc == nil { - return - } - - addLabel := func(score float64, l *ast.LabeledStmt) { - labelObj := c.pkg.GetTypesInfo().ObjectOf(l.Label) - if labelObj != nil { - c.deepState.enqueue(candidate{obj: labelObj, score: score}) - } - } - - switch lt { - case labelBreak, labelContinue: - // "break" and "continue" only accept labels from enclosing statements. - - for i, p := range c.path { - switch p := p.(type) { - case *ast.FuncLit: - // Labels are function scoped, so don't continue out of functions. - return - case *ast.LabeledStmt: - switch p.Stmt.(type) { - case *ast.ForStmt, *ast.RangeStmt: - // Loop labels can be used for "break" or "continue". - addLabel(highScore*math.Pow(.99, float64(i)), p) - case *ast.SwitchStmt, *ast.SelectStmt, *ast.TypeSwitchStmt: - // Switch and select labels can be used only for "break". - if lt == labelBreak { - addLabel(highScore*math.Pow(.99, float64(i)), p) - } - } - } - } - case labelGoto: - // Goto accepts any label in the same function not in a nested - // block. It also doesn't take labels that would jump across - // variable definitions, but ignore that case for now. - ast.Inspect(c.enclosingFunc.body, func(n ast.Node) bool { - if n == nil { - return false - } - - switch n := n.(type) { - // Only search into block-like nodes enclosing our "goto". - // This prevents us from finding labels in nested blocks. - case *ast.BlockStmt, *ast.CommClause, *ast.CaseClause: - for _, p := range c.path { - if n == p { - return true - } - } - return false - case *ast.LabeledStmt: - addLabel(highScore, n) - } - - return true - }) - } -} diff --git a/internal/golangorgx/gopls/golang/completion/literal.go b/internal/golangorgx/gopls/golang/completion/literal.go deleted file mode 100644 index 6ba267fe277..00000000000 --- a/internal/golangorgx/gopls/golang/completion/literal.go +++ /dev/null @@ -1,591 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package completion - -import ( - "context" - "fmt" - "go/types" - "strings" - "unicode" - - "cuelang.org/go/internal/golangorgx/gopls/golang" - "cuelang.org/go/internal/golangorgx/gopls/golang/completion/snippet" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/tools/event" -) - -// literal generates composite literal, function literal, and make() -// completion items. -func (c *completer) literal(ctx context.Context, literalType types.Type, imp *importInfo) { - if !c.opts.snippets { - return - } - - expType := c.inference.objType - - if c.inference.matchesVariadic(literalType) { - // Don't offer literal slice candidates for variadic arguments. - // For example, don't offer "[]interface{}{}" in "fmt.Print(<>)". - return - } - - // Avoid literal candidates if the expected type is an empty - // interface. It isn't very useful to suggest a literal candidate of - // every possible type. - if expType != nil && isEmptyInterface(expType) { - return - } - - // We handle unnamed literal completions explicitly before searching - // for candidates. Avoid named-type literal completions for - // unnamed-type expected type since that results in duplicate - // candidates. For example, in - // - // type mySlice []int - // var []int = <> - // - // don't offer "mySlice{}" since we have already added a candidate - // of "[]int{}". - if _, named := literalType.(*types.Named); named && expType != nil { - if _, named := golang.Deref(expType).(*types.Named); !named { - return - } - } - - // Check if an object of type literalType would match our expected type. - cand := candidate{ - obj: c.fakeObj(literalType), - } - - switch literalType.Underlying().(type) { - // These literal types are addressable (e.g. "&[]int{}"), others are - // not (e.g. can't do "&(func(){})"). - case *types.Struct, *types.Array, *types.Slice, *types.Map: - cand.addressable = true - } - - if !c.matchingCandidate(&cand) || cand.convertTo != nil { - return - } - - var ( - qf = c.qf - sel = enclosingSelector(c.path, c.pos) - ) - - // Don't qualify the type name if we are in a selector expression - // since the package name is already present. - if sel != nil { - qf = func(_ *types.Package) string { return "" } - } - - snip, typeName := c.typeNameSnippet(literalType, qf) - - // A type name of "[]int" doesn't work very will with the matcher - // since "[" isn't a valid identifier prefix. Here we strip off the - // slice (and array) prefix yielding just "int". - matchName := typeName - switch t := literalType.(type) { - case *types.Slice: - matchName = types.TypeString(t.Elem(), qf) - case *types.Array: - matchName = types.TypeString(t.Elem(), qf) - } - - addlEdits, err := c.importEdits(imp) - if err != nil { - event.Error(ctx, "error adding import for literal candidate", err) - return - } - - // If prefix matches the type name, client may want a composite literal. - if score := c.matcher.Score(matchName); score > 0 { - if cand.hasMod(reference) { - if sel != nil { - // If we are in a selector we must place the "&" before the selector. - // For example, "foo.B<>" must complete to "&foo.Bar{}", not - // "foo.&Bar{}". - edits, err := c.editText(sel.Pos(), sel.Pos(), "&") - if err != nil { - event.Error(ctx, "error making edit for literal pointer completion", err) - return - } - addlEdits = append(addlEdits, edits...) - } else { - // Otherwise we can stick the "&" directly before the type name. - typeName = "&" + typeName - snip.PrependText("&") - } - } - - switch t := literalType.Underlying().(type) { - case *types.Struct, *types.Array, *types.Slice, *types.Map: - c.compositeLiteral(t, snip.Clone(), typeName, float64(score), addlEdits) - case *types.Signature: - // Add a literal completion for a signature type that implements - // an interface. For example, offer "http.HandlerFunc()" when - // expected type is "http.Handler". - if expType != nil && types.IsInterface(expType) { - c.basicLiteral(t, snip.Clone(), typeName, float64(score), addlEdits) - } - case *types.Basic: - // Add a literal completion for basic types that implement our - // expected interface (e.g. named string type http.Dir - // implements http.FileSystem), or are identical to our expected - // type (i.e. yielding a type conversion such as "float64()"). - if expType != nil && (types.IsInterface(expType) || types.Identical(expType, literalType)) { - c.basicLiteral(t, snip.Clone(), typeName, float64(score), addlEdits) - } - } - } - - // If prefix matches "make", client may want a "make()" - // invocation. We also include the type name to allow for more - // flexible fuzzy matching. - if score := c.matcher.Score("make." + matchName); !cand.hasMod(reference) && score > 0 { - switch literalType.Underlying().(type) { - case *types.Slice: - // The second argument to "make()" for slices is required, so default to "0". - c.makeCall(snip.Clone(), typeName, "0", float64(score), addlEdits) - case *types.Map, *types.Chan: - // Maps and channels don't require the second argument, so omit - // to keep things simple for now. - c.makeCall(snip.Clone(), typeName, "", float64(score), addlEdits) - } - } - - // If prefix matches "func", client may want a function literal. - if score := c.matcher.Score("func"); !cand.hasMod(reference) && score > 0 && (expType == nil || !types.IsInterface(expType)) { - switch t := literalType.Underlying().(type) { - case *types.Signature: - c.functionLiteral(ctx, t, float64(score)) - } - } -} - -// literalCandidateScore is the base score for literal candidates. -// Literal candidates match the expected type so they should be high -// scoring, but we want them ranked below lexical objects of the -// correct type, so scale down highScore. -const literalCandidateScore = highScore / 2 - -// functionLiteral adds a function literal completion item for the -// given signature. -func (c *completer) functionLiteral(ctx context.Context, sig *types.Signature, matchScore float64) { - snip := &snippet.Builder{} - snip.WriteText("func(") - - // First we generate names for each param and keep a seen count so - // we know if we need to uniquify param names. For example, - // "func(int)" will become "func(i int)", but "func(int, int64)" - // will become "func(i1 int, i2 int64)". - var ( - paramNames = make([]string, sig.Params().Len()) - paramNameCount = make(map[string]int) - hasTypeParams bool - ) - for i := 0; i < sig.Params().Len(); i++ { - var ( - p = sig.Params().At(i) - name = p.Name() - ) - - if tp, _ := p.Type().(*types.TypeParam); tp != nil && !c.typeParamInScope(tp) { - hasTypeParams = true - } - - if name == "" { - // If the param has no name in the signature, guess a name based - // on the type. Use an empty qualifier to ignore the package. - // For example, we want to name "http.Request" "r", not "hr". - typeName, err := golang.FormatVarType(ctx, c.snapshot, c.pkg, p, - func(p *types.Package) string { return "" }, - func(golang.PackageName, golang.ImportPath, golang.PackagePath) string { return "" }) - if err != nil { - // In general, the only error we should encounter while formatting is - // context cancellation. - if ctx.Err() == nil { - event.Error(ctx, "formatting var type", err) - } - return - } - name = abbreviateTypeName(typeName) - } - paramNames[i] = name - if name != "_" { - paramNameCount[name]++ - } - } - - for n, c := range paramNameCount { - // Any names we saw more than once will need a unique suffix added - // on. Reset the count to 1 to act as the suffix for the first - // name. - if c >= 2 { - paramNameCount[n] = 1 - } else { - delete(paramNameCount, n) - } - } - - for i := 0; i < sig.Params().Len(); i++ { - if hasTypeParams && !c.opts.placeholders { - // If there are type params in the args then the user must - // choose the concrete types. If placeholders are disabled just - // drop them between the parens and let them fill things in. - snip.WritePlaceholder(nil) - break - } - - if i > 0 { - snip.WriteText(", ") - } - - var ( - p = sig.Params().At(i) - name = paramNames[i] - ) - - // Uniquify names by adding on an incrementing numeric suffix. - if idx, found := paramNameCount[name]; found { - paramNameCount[name]++ - name = fmt.Sprintf("%s%d", name, idx) - } - - if name != p.Name() && c.opts.placeholders { - // If we didn't use the signature's param name verbatim then we - // may have chosen a poor name. Give the user a placeholder so - // they can easily fix the name. - snip.WritePlaceholder(func(b *snippet.Builder) { - b.WriteText(name) - }) - } else { - snip.WriteText(name) - } - - // If the following param's type is identical to this one, omit - // this param's type string. For example, emit "i, j int" instead - // of "i int, j int". - if i == sig.Params().Len()-1 || !types.Identical(p.Type(), sig.Params().At(i+1).Type()) { - snip.WriteText(" ") - typeStr, err := golang.FormatVarType(ctx, c.snapshot, c.pkg, p, c.qf, c.mq) - if err != nil { - // In general, the only error we should encounter while formatting is - // context cancellation. - if ctx.Err() == nil { - event.Error(ctx, "formatting var type", err) - } - return - } - if sig.Variadic() && i == sig.Params().Len()-1 { - typeStr = strings.Replace(typeStr, "[]", "...", 1) - } - - if tp, _ := p.Type().(*types.TypeParam); tp != nil && !c.typeParamInScope(tp) { - snip.WritePlaceholder(func(snip *snippet.Builder) { - snip.WriteText(typeStr) - }) - } else { - snip.WriteText(typeStr) - } - } - } - snip.WriteText(")") - - results := sig.Results() - if results.Len() > 0 { - snip.WriteText(" ") - } - - resultsNeedParens := results.Len() > 1 || - results.Len() == 1 && results.At(0).Name() != "" - - var resultHasTypeParams bool - for i := 0; i < results.Len(); i++ { - if tp, _ := results.At(i).Type().(*types.TypeParam); tp != nil && !c.typeParamInScope(tp) { - resultHasTypeParams = true - } - } - - if resultsNeedParens { - snip.WriteText("(") - } - for i := 0; i < results.Len(); i++ { - if resultHasTypeParams && !c.opts.placeholders { - // Leave an empty tabstop if placeholders are disabled and there - // are type args that need specificying. - snip.WritePlaceholder(nil) - break - } - - if i > 0 { - snip.WriteText(", ") - } - r := results.At(i) - if name := r.Name(); name != "" { - snip.WriteText(name + " ") - } - - text, err := golang.FormatVarType(ctx, c.snapshot, c.pkg, r, c.qf, c.mq) - if err != nil { - // In general, the only error we should encounter while formatting is - // context cancellation. - if ctx.Err() == nil { - event.Error(ctx, "formatting var type", err) - } - return - } - if tp, _ := r.Type().(*types.TypeParam); tp != nil && !c.typeParamInScope(tp) { - snip.WritePlaceholder(func(snip *snippet.Builder) { - snip.WriteText(text) - }) - } else { - snip.WriteText(text) - } - } - if resultsNeedParens { - snip.WriteText(")") - } - - snip.WriteText(" {") - snip.WriteFinalTabstop() - snip.WriteText("}") - - c.items = append(c.items, CompletionItem{ - Label: "func(...) {}", - Score: matchScore * literalCandidateScore, - Kind: protocol.VariableCompletion, - snippet: snip, - }) -} - -// conventionalAcronyms contains conventional acronyms for type names -// in lower case. For example, "ctx" for "context" and "err" for "error". -var conventionalAcronyms = map[string]string{ - "context": "ctx", - "error": "err", - "tx": "tx", - "responsewriter": "w", -} - -// abbreviateTypeName abbreviates type names into acronyms. For -// example, "fooBar" is abbreviated "fb". Care is taken to ignore -// non-identifier runes. For example, "[]int" becomes "i", and -// "struct { i int }" becomes "s". -func abbreviateTypeName(s string) string { - var ( - b strings.Builder - useNextUpper bool - ) - - // Trim off leading non-letters. We trim everything between "[" and - // "]" to handle array types like "[someConst]int". - var inBracket bool - s = strings.TrimFunc(s, func(r rune) bool { - if inBracket { - inBracket = r != ']' - return true - } - - if r == '[' { - inBracket = true - } - - return !unicode.IsLetter(r) - }) - - if acr, ok := conventionalAcronyms[strings.ToLower(s)]; ok { - return acr - } - - for i, r := range s { - // Stop if we encounter a non-identifier rune. - if !unicode.IsLetter(r) && !unicode.IsNumber(r) { - break - } - - if i == 0 { - b.WriteRune(unicode.ToLower(r)) - } - - if unicode.IsUpper(r) { - if useNextUpper { - b.WriteRune(unicode.ToLower(r)) - useNextUpper = false - } - } else { - useNextUpper = true - } - } - - return b.String() -} - -// compositeLiteral adds a composite literal completion item for the given typeName. -func (c *completer) compositeLiteral(T types.Type, snip *snippet.Builder, typeName string, matchScore float64, edits []protocol.TextEdit) { - snip.WriteText("{") - // Don't put the tab stop inside the composite literal curlies "{}" - // for structs that have no accessible fields. - if strct, ok := T.(*types.Struct); !ok || fieldsAccessible(strct, c.pkg.GetTypes()) { - snip.WriteFinalTabstop() - } - snip.WriteText("}") - - nonSnippet := typeName + "{}" - - c.items = append(c.items, CompletionItem{ - Label: nonSnippet, - InsertText: nonSnippet, - Score: matchScore * literalCandidateScore, - Kind: protocol.VariableCompletion, - AdditionalTextEdits: edits, - snippet: snip, - }) -} - -// basicLiteral adds a literal completion item for the given basic -// type name typeName. -func (c *completer) basicLiteral(T types.Type, snip *snippet.Builder, typeName string, matchScore float64, edits []protocol.TextEdit) { - // Never give type conversions like "untyped int()". - if isUntyped(T) { - return - } - - snip.WriteText("(") - snip.WriteFinalTabstop() - snip.WriteText(")") - - nonSnippet := typeName + "()" - - c.items = append(c.items, CompletionItem{ - Label: nonSnippet, - InsertText: nonSnippet, - Detail: T.String(), - Score: matchScore * literalCandidateScore, - Kind: protocol.VariableCompletion, - AdditionalTextEdits: edits, - snippet: snip, - }) -} - -// makeCall adds a completion item for a "make()" call given a specific type. -func (c *completer) makeCall(snip *snippet.Builder, typeName string, secondArg string, matchScore float64, edits []protocol.TextEdit) { - // Keep it simple and don't add any placeholders for optional "make()" arguments. - - snip.PrependText("make(") - if secondArg != "" { - snip.WriteText(", ") - snip.WritePlaceholder(func(b *snippet.Builder) { - if c.opts.placeholders { - b.WriteText(secondArg) - } - }) - } - snip.WriteText(")") - - var nonSnippet strings.Builder - nonSnippet.WriteString("make(" + typeName) - if secondArg != "" { - nonSnippet.WriteString(", ") - nonSnippet.WriteString(secondArg) - } - nonSnippet.WriteByte(')') - - c.items = append(c.items, CompletionItem{ - Label: nonSnippet.String(), - InsertText: nonSnippet.String(), - Score: matchScore * literalCandidateScore, - Kind: protocol.FunctionCompletion, - AdditionalTextEdits: edits, - snippet: snip, - }) -} - -// Create a snippet for a type name where type params become placeholders. -func (c *completer) typeNameSnippet(literalType types.Type, qf types.Qualifier) (*snippet.Builder, string) { - var ( - snip snippet.Builder - typeName string - named, _ = literalType.(*types.Named) - ) - - if named != nil && named.Obj() != nil && named.TypeParams().Len() > 0 && !c.fullyInstantiated(named) { - // We are not "fully instantiated" meaning we have type params that must be specified. - if pkg := qf(named.Obj().Pkg()); pkg != "" { - typeName = pkg + "." - } - - // We do this to get "someType" instead of "someType[T]". - typeName += named.Obj().Name() - snip.WriteText(typeName + "[") - - if c.opts.placeholders { - for i := 0; i < named.TypeParams().Len(); i++ { - if i > 0 { - snip.WriteText(", ") - } - snip.WritePlaceholder(func(snip *snippet.Builder) { - snip.WriteText(types.TypeString(named.TypeParams().At(i), qf)) - }) - } - } else { - snip.WritePlaceholder(nil) - } - snip.WriteText("]") - typeName += "[...]" - } else { - // We don't have unspecified type params so use default type formatting. - typeName = types.TypeString(literalType, qf) - snip.WriteText(typeName) - } - - return &snip, typeName -} - -// fullyInstantiated reports whether all of t's type params have -// specified type args. -func (c *completer) fullyInstantiated(t *types.Named) bool { - tps := t.TypeParams() - tas := t.TypeArgs() - - if tps.Len() != tas.Len() { - return false - } - - for i := 0; i < tas.Len(); i++ { - switch ta := tas.At(i).(type) { - case *types.TypeParam: - // A *TypeParam only counts as specified if it is currently in - // scope (i.e. we are in a generic definition). - if !c.typeParamInScope(ta) { - return false - } - case *types.Named: - if !c.fullyInstantiated(ta) { - return false - } - } - } - return true -} - -// typeParamInScope returns whether tp's object is in scope at c.pos. -// This tells you whether you are in a generic definition and can -// assume tp has been specified. -func (c *completer) typeParamInScope(tp *types.TypeParam) bool { - obj := tp.Obj() - if obj == nil { - return false - } - - scope := c.innermostScope() - if scope == nil { - return false - } - - _, foundObj := scope.LookupParent(obj.Name(), c.pos) - return obj == foundObj -} diff --git a/internal/golangorgx/gopls/golang/completion/package.go b/internal/golangorgx/gopls/golang/completion/package.go deleted file mode 100644 index f08a6c948a7..00000000000 --- a/internal/golangorgx/gopls/golang/completion/package.go +++ /dev/null @@ -1,352 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package completion - -import ( - "bytes" - "context" - "errors" - "fmt" - "go/ast" - "go/parser" - "go/scanner" - "go/token" - "go/types" - "path/filepath" - "strings" - "unicode" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/golang" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/util/safetoken" - "cuelang.org/go/internal/golangorgx/tools/fuzzy" -) - -// packageClauseCompletions offers completions for a package declaration when -// one is not present in the given file. -func packageClauseCompletions(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) ([]CompletionItem, *Selection, error) { - // We know that the AST for this file will be empty due to the missing - // package declaration, but parse it anyway to get a mapper. - // TODO(adonovan): opt: there's no need to parse just to get a mapper. - pgf, err := snapshot.ParseGo(ctx, fh, golang.ParseFull) - if err != nil { - return nil, nil, err - } - - offset, err := pgf.Mapper.PositionOffset(position) - if err != nil { - return nil, nil, err - } - surrounding, err := packageCompletionSurrounding(pgf, offset) - if err != nil { - return nil, nil, fmt.Errorf("invalid position for package completion: %w", err) - } - - packageSuggestions, err := packageSuggestions(ctx, snapshot, fh.URI(), "") - if err != nil { - return nil, nil, err - } - - var items []CompletionItem - for _, pkg := range packageSuggestions { - insertText := fmt.Sprintf("package %s", pkg.name) - items = append(items, CompletionItem{ - Label: insertText, - Kind: protocol.ModuleCompletion, - InsertText: insertText, - Score: pkg.score, - }) - } - - return items, surrounding, nil -} - -// packageCompletionSurrounding returns surrounding for package completion if a -// package completions can be suggested at a given cursor offset. A valid location -// for package completion is above any declarations or import statements. -func packageCompletionSurrounding(pgf *golang.ParsedGoFile, offset int) (*Selection, error) { - m := pgf.Mapper - // If the file lacks a package declaration, the parser will return an empty - // AST. As a work-around, try to parse an expression from the file contents. - fset := token.NewFileSet() - expr, _ := parser.ParseExprFrom(fset, m.URI.Path(), pgf.Src, parser.Mode(0)) - if expr == nil { - return nil, fmt.Errorf("unparseable file (%s)", m.URI) - } - tok := fset.File(expr.Pos()) - cursor := tok.Pos(offset) - - // If we were able to parse out an identifier as the first expression from - // the file, it may be the beginning of a package declaration ("pack "). - // We can offer package completions if the cursor is in the identifier. - if name, ok := expr.(*ast.Ident); ok { - if cursor >= name.Pos() && cursor <= name.End() { - if !strings.HasPrefix(PACKAGE, name.Name) { - return nil, fmt.Errorf("cursor in non-matching ident") - } - return &Selection{ - content: name.Name, - cursor: cursor, - tokFile: tok, - start: name.Pos(), - end: name.End(), - mapper: m, - }, nil - } - } - - // The file is invalid, but it contains an expression that we were able to - // parse. We will use this expression to construct the cursor's - // "surrounding". - - // First, consider the possibility that we have a valid "package" keyword - // with an empty package name ("package "). "package" is parsed as an - // *ast.BadDecl since it is a keyword. This logic would allow "package" to - // appear on any line of the file as long as it's the first code expression - // in the file. - lines := strings.Split(string(pgf.Src), "\n") - cursorLine := safetoken.Line(tok, cursor) - if cursorLine <= 0 || cursorLine > len(lines) { - return nil, fmt.Errorf("invalid line number") - } - if safetoken.StartPosition(fset, expr.Pos()).Line == cursorLine { - words := strings.Fields(lines[cursorLine-1]) - if len(words) > 0 && words[0] == PACKAGE { - content := PACKAGE - // Account for spaces if there are any. - if len(words) > 1 { - content += " " - } - - start := expr.Pos() - end := token.Pos(int(expr.Pos()) + len(content) + 1) - // We have verified that we have a valid 'package' keyword as our - // first expression. Ensure that cursor is in this keyword or - // otherwise fallback to the general case. - if cursor >= start && cursor <= end { - return &Selection{ - content: content, - cursor: cursor, - tokFile: tok, - start: start, - end: end, - mapper: m, - }, nil - } - } - } - - // If the cursor is after the start of the expression, no package - // declaration will be valid. - if cursor > expr.Pos() { - return nil, fmt.Errorf("cursor after expression") - } - - // If the cursor is in a comment, don't offer any completions. - if cursorInComment(tok, cursor, m.Content) { - return nil, fmt.Errorf("cursor in comment") - } - - // The surrounding range in this case is the cursor. - return &Selection{ - content: "", - tokFile: tok, - start: cursor, - end: cursor, - cursor: cursor, - mapper: m, - }, nil -} - -func cursorInComment(file *token.File, cursor token.Pos, src []byte) bool { - var s scanner.Scanner - s.Init(file, src, func(_ token.Position, _ string) {}, scanner.ScanComments) - for { - pos, tok, lit := s.Scan() - if pos <= cursor && cursor <= token.Pos(int(pos)+len(lit)) { - return tok == token.COMMENT - } - if tok == token.EOF { - break - } - } - return false -} - -// packageNameCompletions returns name completions for a package clause using -// the current name as prefix. -func (c *completer) packageNameCompletions(ctx context.Context, fileURI protocol.DocumentURI, name *ast.Ident) error { - cursor := int(c.pos - name.NamePos) - if cursor < 0 || cursor > len(name.Name) { - return errors.New("cursor is not in package name identifier") - } - - c.completionContext.packageCompletion = true - - prefix := name.Name[:cursor] - packageSuggestions, err := packageSuggestions(ctx, c.snapshot, fileURI, prefix) - if err != nil { - return err - } - - for _, pkg := range packageSuggestions { - c.deepState.enqueue(pkg) - } - return nil -} - -// packageSuggestions returns a list of packages from workspace packages that -// have the given prefix and are used in the same directory as the given -// file. This also includes test packages for these packages (_test) and -// the directory name itself. -func packageSuggestions(ctx context.Context, snapshot *cache.Snapshot, fileURI protocol.DocumentURI, prefix string) (packages []candidate, err error) { - active, err := snapshot.WorkspaceMetadata(ctx) - if err != nil { - return nil, err - } - - toCandidate := func(name string, score float64) candidate { - obj := types.NewPkgName(0, nil, name, types.NewPackage("", name)) - return candidate{obj: obj, name: name, detail: name, score: score} - } - - matcher := fuzzy.NewMatcher(prefix) - - // Always try to suggest a main package - defer func() { - if score := float64(matcher.Score("main")); score > 0 { - packages = append(packages, toCandidate("main", score*lowScore)) - } - }() - - dirPath := filepath.Dir(fileURI.Path()) - dirName := filepath.Base(dirPath) - if !isValidDirName(dirName) { - return packages, nil - } - pkgName := convertDirNameToPkgName(dirName) - - seenPkgs := make(map[golang.PackageName]struct{}) - - // The `go` command by default only allows one package per directory but we - // support multiple package suggestions since gopls is build system agnostic. - for _, mp := range active { - if mp.Name == "main" || mp.Name == "" { - continue - } - if _, ok := seenPkgs[mp.Name]; ok { - continue - } - - // Only add packages that are previously used in the current directory. - var relevantPkg bool - for _, uri := range mp.CompiledGoFiles { - if filepath.Dir(uri.Path()) == dirPath { - relevantPkg = true - break - } - } - if !relevantPkg { - continue - } - - // Add a found package used in current directory as a high relevance - // suggestion and the test package for it as a medium relevance - // suggestion. - if score := float64(matcher.Score(string(mp.Name))); score > 0 { - packages = append(packages, toCandidate(string(mp.Name), score*highScore)) - } - seenPkgs[mp.Name] = struct{}{} - - testPkgName := mp.Name + "_test" - if _, ok := seenPkgs[testPkgName]; ok || strings.HasSuffix(string(mp.Name), "_test") { - continue - } - if score := float64(matcher.Score(string(testPkgName))); score > 0 { - packages = append(packages, toCandidate(string(testPkgName), score*stdScore)) - } - seenPkgs[testPkgName] = struct{}{} - } - - // Add current directory name as a low relevance suggestion. - if _, ok := seenPkgs[pkgName]; !ok { - if score := float64(matcher.Score(string(pkgName))); score > 0 { - packages = append(packages, toCandidate(string(pkgName), score*lowScore)) - } - - testPkgName := pkgName + "_test" - if score := float64(matcher.Score(string(testPkgName))); score > 0 { - packages = append(packages, toCandidate(string(testPkgName), score*lowScore)) - } - } - - return packages, nil -} - -// isValidDirName checks whether the passed directory name can be used in -// a package path. Requirements for a package path can be found here: -// https://golang.org/ref/mod#go-mod-file-ident. -func isValidDirName(dirName string) bool { - if dirName == "" { - return false - } - - for i, ch := range dirName { - if isLetter(ch) || isDigit(ch) { - continue - } - if i == 0 { - // Directory name can start only with '_'. '.' is not allowed in module paths. - // '-' and '~' are not allowed because elements of package paths must be - // safe command-line arguments. - if ch == '_' { - continue - } - } else { - // Modules path elements can't end with '.' - if isAllowedPunctuation(ch) && (i != len(dirName)-1 || ch != '.') { - continue - } - } - - return false - } - return true -} - -// convertDirNameToPkgName converts a valid directory name to a valid package name. -// It leaves only letters and digits. All letters are mapped to lower case. -func convertDirNameToPkgName(dirName string) golang.PackageName { - var buf bytes.Buffer - for _, ch := range dirName { - switch { - case isLetter(ch): - buf.WriteRune(unicode.ToLower(ch)) - - case buf.Len() != 0 && isDigit(ch): - buf.WriteRune(ch) - } - } - return golang.PackageName(buf.String()) -} - -// isLetter and isDigit allow only ASCII characters because -// "Each path element is a non-empty string made of up ASCII letters, -// ASCII digits, and limited ASCII punctuation" -// (see https://golang.org/ref/mod#go-mod-file-ident). - -func isLetter(ch rune) bool { - return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' -} - -func isDigit(ch rune) bool { - return '0' <= ch && ch <= '9' -} - -func isAllowedPunctuation(ch rune) bool { - return ch == '_' || ch == '-' || ch == '~' || ch == '.' -} diff --git a/internal/golangorgx/gopls/golang/completion/postfix_snippets.go b/internal/golangorgx/gopls/golang/completion/postfix_snippets.go deleted file mode 100644 index c20ed9c36f7..00000000000 --- a/internal/golangorgx/gopls/golang/completion/postfix_snippets.go +++ /dev/null @@ -1,682 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package completion - -import ( - "context" - "fmt" - "go/ast" - "go/token" - "go/types" - "log" - "reflect" - "strings" - "sync" - "text/template" - - "cuelang.org/go/internal/golangorgx/gopls/cache/metadata" - "cuelang.org/go/internal/golangorgx/gopls/golang" - "cuelang.org/go/internal/golangorgx/gopls/golang/completion/snippet" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/util/safetoken" - "cuelang.org/go/internal/golangorgx/tools/event" - "cuelang.org/go/internal/golangorgx/tools/imports" -) - -// Postfix snippets are artificial methods that allow the user to -// compose common operations in an "argument oriented" fashion. For -// example, instead of "sort.Slice(someSlice, ...)" a user can expand -// "someSlice.sort!". - -// postfixTmpl represents a postfix snippet completion candidate. -type postfixTmpl struct { - // label is the completion candidate's label presented to the user. - label string - - // details is passed along to the client as the candidate's details. - details string - - // body is the template text. See postfixTmplArgs for details on the - // facilities available to the template. - body string - - tmpl *template.Template -} - -// postfixTmplArgs are the template execution arguments available to -// the postfix snippet templates. -type postfixTmplArgs struct { - // StmtOK is true if it is valid to replace the selector with a - // statement. For example: - // - // func foo() { - // bar.sort! // statement okay - // - // someMethod(bar.sort!) // statement not okay - // } - StmtOK bool - - // X is the textual SelectorExpr.X. For example, when completing - // "foo.bar.print!", "X" is "foo.bar". - X string - - // Obj is the types.Object of SelectorExpr.X, if any. - Obj types.Object - - // Type is the type of "foo.bar" in "foo.bar.print!". - Type types.Type - - // FuncResult are results of the enclosed function - FuncResults []*types.Var - - sel *ast.SelectorExpr - scope *types.Scope - snip snippet.Builder - importIfNeeded func(pkgPath string, scope *types.Scope) (name string, edits []protocol.TextEdit, err error) - edits []protocol.TextEdit - qf types.Qualifier - varNames map[string]bool - placeholders bool - currentTabStop int -} - -var postfixTmpls = []postfixTmpl{{ - label: "sort", - details: "sort.Slice()", - body: `{{if and (eq .Kind "slice") .StmtOK -}} -{{.Import "sort"}}.Slice({{.X}}, func({{.VarName nil "i"}}, {{.VarName nil "j"}} int) bool { - {{.Cursor}} -}) -{{- end}}`, -}, { - label: "last", - details: "s[len(s)-1]", - body: `{{if and (eq .Kind "slice") .Obj -}} -{{.X}}[len({{.X}})-1] -{{- end}}`, -}, { - label: "reverse", - details: "reverse slice", - body: `{{if and (eq .Kind "slice") .StmtOK -}} -{{$i := .VarName nil "i"}}{{$j := .VarName nil "j" -}} -for {{$i}}, {{$j}} := 0, len({{.X}})-1; {{$i}} < {{$j}}; {{$i}}, {{$j}} = {{$i}}+1, {{$j}}-1 { - {{.X}}[{{$i}}], {{.X}}[{{$j}}] = {{.X}}[{{$j}}], {{.X}}[{{$i}}] -} -{{end}}`, -}, { - label: "range", - details: "range over slice", - body: `{{if and (eq .Kind "slice") .StmtOK -}} -for {{.VarName nil "i" | .Placeholder }}, {{.VarName .ElemType "v" | .Placeholder}} := range {{.X}} { - {{.Cursor}} -} -{{- end}}`, -}, { - label: "for", - details: "range over slice by index", - body: `{{if and (eq .Kind "slice") .StmtOK -}} -for {{ .VarName nil "i" | .Placeholder }} := range {{.X}} { - {{.Cursor}} -} -{{- end}}`, -}, { - label: "forr", - details: "range over slice by index and value", - body: `{{if and (eq .Kind "slice") .StmtOK -}} -for {{.VarName nil "i" | .Placeholder }}, {{.VarName .ElemType "v" | .Placeholder }} := range {{.X}} { - {{.Cursor}} -} -{{- end}}`, -}, { - label: "append", - details: "append and re-assign slice", - body: `{{if and (eq .Kind "slice") .StmtOK .Obj -}} -{{.X}} = append({{.X}}, {{.Cursor}}) -{{- end}}`, -}, { - label: "append", - details: "append to slice", - body: `{{if and (eq .Kind "slice") (not .StmtOK) -}} -append({{.X}}, {{.Cursor}}) -{{- end}}`, -}, { - label: "copy", - details: "duplicate slice", - body: `{{if and (eq .Kind "slice") .StmtOK .Obj -}} -{{$v := (.VarName nil (printf "%sCopy" .X))}}{{$v}} := make([]{{.TypeName .ElemType}}, len({{.X}})) -copy({{$v}}, {{.X}}) -{{end}}`, -}, { - label: "range", - details: "range over map", - body: `{{if and (eq .Kind "map") .StmtOK -}} -for {{.VarName .KeyType "k" | .Placeholder}}, {{.VarName .ElemType "v" | .Placeholder}} := range {{.X}} { - {{.Cursor}} -} -{{- end}}`, -}, { - label: "for", - details: "range over map by key", - body: `{{if and (eq .Kind "map") .StmtOK -}} -for {{.VarName .KeyType "k" | .Placeholder}} := range {{.X}} { - {{.Cursor}} -} -{{- end}}`, -}, { - label: "forr", - details: "range over map by key and value", - body: `{{if and (eq .Kind "map") .StmtOK -}} -for {{.VarName .KeyType "k" | .Placeholder}}, {{.VarName .ElemType "v" | .Placeholder}} := range {{.X}} { - {{.Cursor}} -} -{{- end}}`, -}, { - label: "clear", - details: "clear map contents", - body: `{{if and (eq .Kind "map") .StmtOK -}} -{{$k := (.VarName .KeyType "k")}}for {{$k}} := range {{.X}} { - delete({{.X}}, {{$k}}) -} -{{end}}`, -}, { - label: "keys", - details: "create slice of keys", - body: `{{if and (eq .Kind "map") .StmtOK -}} -{{$keysVar := (.VarName nil "keys")}}{{$keysVar}} := make([]{{.TypeName .KeyType}}, 0, len({{.X}})) -{{$k := (.VarName .KeyType "k")}}for {{$k}} := range {{.X}} { - {{$keysVar}} = append({{$keysVar}}, {{$k}}) -} -{{end}}`, -}, { - label: "range", - details: "range over channel", - body: `{{if and (eq .Kind "chan") .StmtOK -}} -for {{.VarName .ElemType "e" | .Placeholder}} := range {{.X}} { - {{.Cursor}} -} -{{- end}}`, -}, { - label: "for", - details: "range over channel", - body: `{{if and (eq .Kind "chan") .StmtOK -}} -for {{.VarName .ElemType "e" | .Placeholder}} := range {{.X}} { - {{.Cursor}} -} -{{- end}}`, -}, { - label: "var", - details: "assign to variables", - body: `{{if and (eq .Kind "tuple") .StmtOK -}} -{{$a := .}}{{range $i, $v := .Tuple}}{{if $i}}, {{end}}{{$a.VarName $v.Type $v.Name | $a.Placeholder }}{{end}} := {{.X}} -{{- end}}`, -}, { - label: "var", - details: "assign to variable", - body: `{{if and (ne .Kind "tuple") .StmtOK -}} -{{.VarName .Type "" | .Placeholder }} := {{.X}} -{{- end}}`, -}, { - label: "print", - details: "print to stdout", - body: `{{if and (ne .Kind "tuple") .StmtOK -}} -{{.Import "fmt"}}.Printf("{{.EscapeQuotes .X}}: %v\n", {{.X}}) -{{- end}}`, -}, { - label: "print", - details: "print to stdout", - body: `{{if and (eq .Kind "tuple") .StmtOK -}} -{{.Import "fmt"}}.Println({{.X}}) -{{- end}}`, -}, { - label: "split", - details: "split string", - body: `{{if (eq (.TypeName .Type) "string") -}} -{{.Import "strings"}}.Split({{.X}}, "{{.Cursor}}") -{{- end}}`, -}, { - label: "join", - details: "join string slice", - body: `{{if and (eq .Kind "slice") (eq (.TypeName .ElemType) "string") -}} -{{.Import "strings"}}.Join({{.X}}, "{{.Cursor}}") -{{- end}}`, -}, { - label: "ifnotnil", - details: "if expr != nil", - body: `{{if and (or (eq .Kind "pointer") (eq .Kind "chan") (eq .Kind "signature") (eq .Kind "interface") (eq .Kind "map") (eq .Kind "slice")) .StmtOK -}} -if {{.X}} != nil { - {{.Cursor}} -} -{{- end}}`, -}, { - label: "len", - details: "len(s)", - body: `{{if (eq .Kind "slice" "map" "array" "chan") -}} -len({{.X}}) -{{- end}}`, -}, { - label: "iferr", - details: "check error and return", - body: `{{if and .StmtOK (eq (.TypeName .Type) "error") -}} -{{- $errName := (or (and .IsIdent .X) "err") -}} -if {{if not .IsIdent}}err := {{.X}}; {{end}}{{$errName}} != nil { - return {{$a := .}}{{range $i, $v := .FuncResults}} - {{- if $i}}, {{end -}} - {{- if eq ($a.TypeName $v.Type) "error" -}} - {{$a.Placeholder $errName}} - {{- else -}} - {{$a.Zero $v.Type}} - {{- end -}} - {{end}} -} -{{end}}`, -}, { - label: "iferr", - details: "check error and return", - body: `{{if and .StmtOK (eq .Kind "tuple") (len .Tuple) (eq (.TypeName .TupleLast.Type) "error") -}} -{{- $a := . -}} -if {{range $i, $v := .Tuple}}{{if $i}}, {{end}}{{if and (eq ($a.TypeName $v.Type) "error") (eq (inc $i) (len $a.Tuple))}}err{{else}}_{{end}}{{end}} := {{.X -}} -; err != nil { - return {{range $i, $v := .FuncResults}} - {{- if $i}}, {{end -}} - {{- if eq ($a.TypeName $v.Type) "error" -}} - {{$a.Placeholder "err"}} - {{- else -}} - {{$a.Zero $v.Type}} - {{- end -}} - {{end}} -} -{{end}}`, -}, { - // variferr snippets use nested placeholders, as described in - // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#snippet_syntax, - // so that users can wrap the returned error without modifying the error - // variable name. - label: "variferr", - details: "assign variables and check error", - body: `{{if and .StmtOK (eq .Kind "tuple") (len .Tuple) (eq (.TypeName .TupleLast.Type) "error") -}} -{{- $a := . -}} -{{- $errName := "err" -}} -{{- range $i, $v := .Tuple -}} - {{- if $i}}, {{end -}} - {{- if and (eq ($a.TypeName $v.Type) "error") (eq (inc $i) (len $a.Tuple)) -}} - {{$errName | $a.SpecifiedPlaceholder (len $a.Tuple)}} - {{- else -}} - {{$a.VarName $v.Type $v.Name | $a.Placeholder}} - {{- end -}} -{{- end}} := {{.X}} -if {{$errName | $a.SpecifiedPlaceholder (len $a.Tuple)}} != nil { - return {{range $i, $v := .FuncResults}} - {{- if $i}}, {{end -}} - {{- if eq ($a.TypeName $v.Type) "error" -}} - {{$errName | $a.SpecifiedPlaceholder (len $a.Tuple) | - $a.SpecifiedPlaceholder (inc (len $a.Tuple))}} - {{- else -}} - {{$a.Zero $v.Type}} - {{- end -}} - {{end}} -} -{{end}}`, -}, { - label: "variferr", - details: "assign variables and check error", - body: `{{if and .StmtOK (eq (.TypeName .Type) "error") -}} -{{- $a := . -}} -{{- $errName := .VarName nil "err" -}} -{{$errName | $a.SpecifiedPlaceholder 1}} := {{.X}} -if {{$errName | $a.SpecifiedPlaceholder 1}} != nil { - return {{range $i, $v := .FuncResults}} - {{- if $i}}, {{end -}} - {{- if eq ($a.TypeName $v.Type) "error" -}} - {{$errName | $a.SpecifiedPlaceholder 1 | $a.SpecifiedPlaceholder 2}} - {{- else -}} - {{$a.Zero $v.Type}} - {{- end -}} - {{end}} -} -{{end}}`, -}} - -// Cursor indicates where the client's cursor should end up after the -// snippet is done. -func (a *postfixTmplArgs) Cursor() string { - return "$0" -} - -// Placeholder indicate a tab stop with the placeholder string, the order -// of tab stops is the same as the order of invocation -func (a *postfixTmplArgs) Placeholder(placeholder string) string { - if !a.placeholders { - placeholder = "" - } - return fmt.Sprintf("${%d:%s}", a.nextTabStop(), placeholder) -} - -// nextTabStop returns the next tab stop index for a new placeholder. -func (a *postfixTmplArgs) nextTabStop() int { - // Tab stops start from 1, so increment before returning. - a.currentTabStop++ - return a.currentTabStop -} - -// SpecifiedPlaceholder indicate a specified tab stop with the placeholder string. -// Sometimes the same tab stop appears in multiple places and their numbers -// need to be specified. e.g. variferr -func (a *postfixTmplArgs) SpecifiedPlaceholder(tabStop int, placeholder string) string { - if !a.placeholders { - placeholder = "" - } - return fmt.Sprintf("${%d:%s}", tabStop, placeholder) -} - -// Import makes sure the package corresponding to path is imported, -// returning the identifier to use to refer to the package. -func (a *postfixTmplArgs) Import(path string) (string, error) { - name, edits, err := a.importIfNeeded(path, a.scope) - if err != nil { - return "", fmt.Errorf("couldn't import %q: %w", path, err) - } - a.edits = append(a.edits, edits...) - - return name, nil -} - -func (a *postfixTmplArgs) EscapeQuotes(v string) string { - return strings.ReplaceAll(v, `"`, `\\"`) -} - -// ElemType returns the Elem() type of xType, if applicable. -func (a *postfixTmplArgs) ElemType() types.Type { - if e, _ := a.Type.(interface{ Elem() types.Type }); e != nil { - return e.Elem() - } - return nil -} - -// Kind returns the underlying kind of type, e.g. "slice", "struct", -// etc. -func (a *postfixTmplArgs) Kind() string { - t := reflect.TypeOf(a.Type.Underlying()) - return strings.ToLower(strings.TrimPrefix(t.String(), "*types.")) -} - -// KeyType returns the type of X's key. KeyType panics if X is not a -// map. -func (a *postfixTmplArgs) KeyType() types.Type { - return a.Type.Underlying().(*types.Map).Key() -} - -// Tuple returns the tuple result vars if the type of X is tuple. -func (a *postfixTmplArgs) Tuple() []*types.Var { - tuple, _ := a.Type.(*types.Tuple) - if tuple == nil { - return nil - } - - typs := make([]*types.Var, 0, tuple.Len()) - for i := 0; i < tuple.Len(); i++ { - typs = append(typs, tuple.At(i)) - } - return typs -} - -// TupleLast returns the last tuple result vars if the type of X is tuple. -func (a *postfixTmplArgs) TupleLast() *types.Var { - tuple, _ := a.Type.(*types.Tuple) - if tuple == nil { - return nil - } - if tuple.Len() == 0 { - return nil - } - return tuple.At(tuple.Len() - 1) -} - -// TypeName returns the textual representation of type t. -func (a *postfixTmplArgs) TypeName(t types.Type) (string, error) { - if t == nil || t == types.Typ[types.Invalid] { - return "", fmt.Errorf("invalid type: %v", t) - } - return types.TypeString(t, a.qf), nil -} - -// Zero return the zero value representation of type t -func (a *postfixTmplArgs) Zero(t types.Type) string { - return formatZeroValue(t, a.qf) -} - -func (a *postfixTmplArgs) IsIdent() bool { - _, ok := a.sel.X.(*ast.Ident) - return ok -} - -// VarName returns a suitable variable name for the type t. If t -// implements the error interface, "err" is used. If t is not a named -// type then nonNamedDefault is used. Otherwise a name is made by -// abbreviating the type name. If the resultant name is already in -// scope, an integer is appended to make a unique name. -func (a *postfixTmplArgs) VarName(t types.Type, nonNamedDefault string) string { - if t == nil { - t = types.Typ[types.Invalid] - } - - var name string - // go/types predicates are undefined on types.Typ[types.Invalid]. - if !types.Identical(t, types.Typ[types.Invalid]) && types.Implements(t, errorIntf) { - name = "err" - } else if _, isNamed := golang.Deref(t).(*types.Named); !isNamed { - name = nonNamedDefault - } - - if name == "" { - name = types.TypeString(t, func(p *types.Package) string { - return "" - }) - name = abbreviateTypeName(name) - } - - if dot := strings.LastIndex(name, "."); dot > -1 { - name = name[dot+1:] - } - - uniqueName := name - for i := 2; ; i++ { - if s, _ := a.scope.LookupParent(uniqueName, token.NoPos); s == nil && !a.varNames[uniqueName] { - break - } - uniqueName = fmt.Sprintf("%s%d", name, i) - } - - a.varNames[uniqueName] = true - - return uniqueName -} - -func (c *completer) addPostfixSnippetCandidates(ctx context.Context, sel *ast.SelectorExpr) { - if !c.opts.postfix { - return - } - - initPostfixRules() - - if sel == nil || sel.Sel == nil { - return - } - - selType := c.pkg.GetTypesInfo().TypeOf(sel.X) - if selType == nil { - return - } - - // Skip empty tuples since there is no value to operate on. - if tuple, ok := selType.Underlying().(*types.Tuple); ok && tuple == nil { - return - } - - tokFile := c.pkg.FileSet().File(c.pos) - - // Only replace sel with a statement if sel is already a statement. - var stmtOK bool - for i, n := range c.path { - if n == sel && i < len(c.path)-1 { - switch p := c.path[i+1].(type) { - case *ast.ExprStmt: - stmtOK = true - case *ast.AssignStmt: - // In cases like: - // - // foo.<> - // bar = 123 - // - // detect that "foo." makes up the entire statement since the - // apparent selector spans lines. - stmtOK = safetoken.Line(tokFile, c.pos) < safetoken.Line(tokFile, p.TokPos) - } - break - } - } - - var funcResults []*types.Var - if c.enclosingFunc != nil { - results := c.enclosingFunc.sig.Results() - if results != nil { - funcResults = make([]*types.Var, results.Len()) - for i := 0; i < results.Len(); i++ { - funcResults[i] = results.At(i) - } - } - } - - scope := c.pkg.GetTypes().Scope().Innermost(c.pos) - if scope == nil { - return - } - - // afterDot is the position after selector dot, e.g. "|" in - // "foo.|print". - afterDot := sel.Sel.Pos() - - // We must detect dangling selectors such as: - // - // foo.<> - // bar - // - // and adjust afterDot so that we don't mistakenly delete the - // newline thinking "bar" is part of our selector. - if startLine := safetoken.Line(tokFile, sel.Pos()); startLine != safetoken.Line(tokFile, afterDot) { - if safetoken.Line(tokFile, c.pos) != startLine { - return - } - afterDot = c.pos - } - - for _, rule := range postfixTmpls { - // When completing foo.print<>, "print" is naturally overwritten, - // but we need to also remove "foo." so the snippet has a clean - // slate. - edits, err := c.editText(sel.Pos(), afterDot, "") - if err != nil { - event.Error(ctx, "error calculating postfix edits", err) - return - } - - tmplArgs := postfixTmplArgs{ - X: golang.FormatNode(c.pkg.FileSet(), sel.X), - StmtOK: stmtOK, - Obj: exprObj(c.pkg.GetTypesInfo(), sel.X), - Type: selType, - FuncResults: funcResults, - sel: sel, - qf: c.qf, - importIfNeeded: c.importIfNeeded, - scope: scope, - varNames: make(map[string]bool), - placeholders: c.opts.placeholders, - } - - // Feed the template straight into the snippet builder. This - // allows templates to build snippets as they are executed. - err = rule.tmpl.Execute(&tmplArgs.snip, &tmplArgs) - if err != nil { - event.Error(ctx, "error executing postfix template", err) - continue - } - - if strings.TrimSpace(tmplArgs.snip.String()) == "" { - continue - } - - score := c.matcher.Score(rule.label) - if score <= 0 { - continue - } - - c.items = append(c.items, CompletionItem{ - Label: rule.label + "!", - Detail: rule.details, - Score: float64(score) * 0.01, - Kind: protocol.SnippetCompletion, - snippet: &tmplArgs.snip, - AdditionalTextEdits: append(edits, tmplArgs.edits...), - }) - } -} - -var postfixRulesOnce sync.Once - -func initPostfixRules() { - postfixRulesOnce.Do(func() { - var idx int - for _, rule := range postfixTmpls { - var err error - rule.tmpl, err = template.New("postfix_snippet").Funcs(template.FuncMap{ - "inc": inc, - }).Parse(rule.body) - if err != nil { - log.Panicf("error parsing postfix snippet template: %v", err) - } - postfixTmpls[idx] = rule - idx++ - } - postfixTmpls = postfixTmpls[:idx] - }) -} - -func inc(i int) int { - return i + 1 -} - -// importIfNeeded returns the package identifier and any necessary -// edits to import package pkgPath. -func (c *completer) importIfNeeded(pkgPath string, scope *types.Scope) (string, []protocol.TextEdit, error) { - defaultName := imports.ImportPathToAssumedName(pkgPath) - - // Check if file already imports pkgPath. - for _, s := range c.file.Imports { - // TODO(adonovan): what if pkgPath has a vendor/ suffix? - // This may be the cause of go.dev/issue/56291. - if string(metadata.UnquoteImportPath(s)) == pkgPath { - if s.Name == nil { - return defaultName, nil, nil - } - if s.Name.Name != "_" { - return s.Name.Name, nil, nil - } - } - } - - // Give up if the package's name is already in use by another object. - if _, obj := scope.LookupParent(defaultName, token.NoPos); obj != nil { - return "", nil, fmt.Errorf("import name %q of %q already in use", defaultName, pkgPath) - } - - edits, err := c.importEdits(&importInfo{ - importPath: pkgPath, - }) - if err != nil { - return "", nil, err - } - - return defaultName, edits, nil -} diff --git a/internal/golangorgx/gopls/golang/completion/printf.go b/internal/golangorgx/gopls/golang/completion/printf.go deleted file mode 100644 index 43201175542..00000000000 --- a/internal/golangorgx/gopls/golang/completion/printf.go +++ /dev/null @@ -1,172 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package completion - -import ( - "go/ast" - "go/constant" - "go/types" - "strconv" - "strings" - "unicode/utf8" -) - -// printfArgKind returns the expected objKind when completing a -// printf-like operand. call is the printf-like function call, and -// argIdx is the index of call.Args being completed. -func printfArgKind(info *types.Info, call *ast.CallExpr, argIdx int) objKind { - // Printf-like function name must end in "f". - fn := exprObj(info, call.Fun) - if fn == nil || !strings.HasSuffix(fn.Name(), "f") { - return kindAny - } - - sig, _ := fn.Type().(*types.Signature) - if sig == nil { - return kindAny - } - - // Must be variadic and take at least two params. - numParams := sig.Params().Len() - if !sig.Variadic() || numParams < 2 || argIdx < numParams-1 { - return kindAny - } - - // Param preceding variadic args must be a (format) string. - if !types.Identical(sig.Params().At(numParams-2).Type(), types.Typ[types.String]) { - return kindAny - } - - // Format string must be a constant. - strArg := info.Types[call.Args[numParams-2]].Value - if strArg == nil || strArg.Kind() != constant.String { - return kindAny - } - - return formatOperandKind(constant.StringVal(strArg), argIdx-(numParams-1)+1) -} - -// formatOperandKind returns the objKind corresponding to format's -// operandIdx'th operand. -func formatOperandKind(format string, operandIdx int) objKind { - var ( - prevOperandIdx int - kind = kindAny - ) - for { - i := strings.Index(format, "%") - if i == -1 { - break - } - - var operands []formatOperand - format, operands = parsePrintfVerb(format[i+1:], prevOperandIdx) - - // Check if any this verb's operands correspond to our target - // operandIdx. - for _, v := range operands { - if v.idx == operandIdx { - if kind == kindAny { - kind = v.kind - } else if v.kind != kindAny { - // If multiple verbs refer to the same operand, take the - // intersection of their kinds. - kind &= v.kind - } - } - - prevOperandIdx = v.idx - } - } - return kind -} - -type formatOperand struct { - // idx is the one-based printf operand index. - idx int - // kind is a mask of expected kinds of objects for this operand. - kind objKind -} - -// parsePrintfVerb parses the leading printf verb in f. The opening -// "%" must already be trimmed from f. prevIdx is the previous -// operand's index, or zero if this is the first verb. The format -// string is returned with the leading verb removed. Multiple operands -// can be returned in the case of dynamic widths such as "%*.*f". -func parsePrintfVerb(f string, prevIdx int) (string, []formatOperand) { - var verbs []formatOperand - - addVerb := func(k objKind) { - verbs = append(verbs, formatOperand{ - idx: prevIdx + 1, - kind: k, - }) - prevIdx++ - } - - for len(f) > 0 { - // Trim first rune off of f so we are guaranteed to make progress. - r, l := utf8.DecodeRuneInString(f) - f = f[l:] - - // We care about three things: - // 1. The verb, which maps directly to object kind. - // 2. Explicit operand indices like "%[2]s". - // 3. Dynamic widths using "*". - switch r { - case '%': - return f, nil - case '*': - addVerb(kindInt) - continue - case '[': - // Parse operand index as in "%[2]s". - i := strings.Index(f, "]") - if i == -1 { - return f, nil - } - - idx, err := strconv.Atoi(f[:i]) - f = f[i+1:] - if err != nil { - return f, nil - } - - prevIdx = idx - 1 - continue - case 'v', 'T': - addVerb(kindAny) - case 't': - addVerb(kindBool) - case 'c', 'd', 'o', 'O', 'U': - addVerb(kindInt) - case 'e', 'E', 'f', 'F', 'g', 'G': - addVerb(kindFloat | kindComplex) - case 'b': - addVerb(kindInt | kindFloat | kindComplex | kindBytes) - case 'q', 's': - addVerb(kindString | kindBytes | kindStringer | kindError) - case 'x', 'X': - // Omit kindStringer and kindError though technically allowed. - addVerb(kindString | kindBytes | kindInt | kindFloat | kindComplex) - case 'p': - addVerb(kindPtr | kindSlice) - case 'w': - addVerb(kindError) - case '+', '-', '#', ' ', '.', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': - // Flag or numeric width/precision value. - continue - default: - // Assume unrecognized rune is a custom fmt.Formatter verb. - addVerb(kindAny) - } - - if len(verbs) > 0 { - break - } - } - - return f, verbs -} diff --git a/internal/golangorgx/gopls/golang/completion/snippet.go b/internal/golangorgx/gopls/golang/completion/snippet.go deleted file mode 100644 index a2596bd64a7..00000000000 --- a/internal/golangorgx/gopls/golang/completion/snippet.go +++ /dev/null @@ -1,126 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package completion - -import ( - "go/ast" - - "cuelang.org/go/internal/golangorgx/gopls/golang/completion/snippet" - "cuelang.org/go/internal/golangorgx/gopls/util/safetoken" -) - -// structFieldSnippet calculates the snippet for struct literal field names. -func (c *completer) structFieldSnippet(cand candidate, detail string, snip *snippet.Builder) { - if !c.wantStructFieldCompletions() { - return - } - - // If we are in a deep completion then we can't be completing a field - // name (e.g. "Foo{f<>}" completing to "Foo{f.Bar}" should not generate - // a snippet). - if len(cand.path) > 0 { - return - } - - clInfo := c.enclosingCompositeLiteral - - // If we are already in a key-value expression, we don't want a snippet. - if clInfo.kv != nil { - return - } - - // A plain snippet turns "Foo{Ba<>" into "Foo{Bar: <>". - snip.WriteText(": ") - snip.WritePlaceholder(func(b *snippet.Builder) { - // A placeholder snippet turns "Foo{Ba<>" into "Foo{Bar: <*int*>". - if c.opts.placeholders { - b.WriteText(detail) - } - }) - - fset := c.pkg.FileSet() - - // If the cursor position is on a different line from the literal's opening brace, - // we are in a multiline literal. Ignore line directives. - if safetoken.StartPosition(fset, c.pos).Line != safetoken.StartPosition(fset, clInfo.cl.Lbrace).Line { - snip.WriteText(",") - } -} - -// functionCallSnippet calculates the snippet for function calls. -// -// Callers should omit the suffix of type parameters that are -// constrained by the argument types, to avoid offering completions -// that contain instantiations that are redundant because of type -// inference, such as f[int](1) for func f[T any](x T). -func (c *completer) functionCallSnippet(name string, tparams, params []string, snip *snippet.Builder) { - if !c.opts.completeFunctionCalls { - snip.WriteText(name) - return - } - - // If there is no suffix then we need to reuse existing call parens - // "()" if present. If there is an identifier suffix then we always - // need to include "()" since we don't overwrite the suffix. - if c.surrounding != nil && c.surrounding.Suffix() == "" && len(c.path) > 1 { - // If we are the left side (i.e. "Fun") part of a call expression, - // we don't want a snippet since there are already parens present. - switch n := c.path[1].(type) { - case *ast.CallExpr: - // The Lparen != Rparen check detects fudged CallExprs we - // inserted when fixing the AST. In this case, we do still need - // to insert the calling "()" parens. - if n.Fun == c.path[0] && n.Lparen != n.Rparen { - return - } - case *ast.SelectorExpr: - if len(c.path) > 2 { - if call, ok := c.path[2].(*ast.CallExpr); ok && call.Fun == c.path[1] && call.Lparen != call.Rparen { - return - } - } - } - } - - snip.WriteText(name) - - if len(tparams) > 0 { - snip.WriteText("[") - if c.opts.placeholders { - for i, tp := range tparams { - if i > 0 { - snip.WriteText(", ") - } - snip.WritePlaceholder(func(b *snippet.Builder) { - b.WriteText(tp) - }) - } - } else { - snip.WritePlaceholder(nil) - } - snip.WriteText("]") - } - - snip.WriteText("(") - - if c.opts.placeholders { - // A placeholder snippet turns "someFun<>" into "someFunc(<*i int*>, *s string*)". - for i, p := range params { - if i > 0 { - snip.WriteText(", ") - } - snip.WritePlaceholder(func(b *snippet.Builder) { - b.WriteText(p) - }) - } - } else { - // A plain snippet turns "someFun<>" into "someFunc(<>)". - if len(params) > 0 { - snip.WritePlaceholder(nil) - } - } - - snip.WriteText(")") -} diff --git a/internal/golangorgx/gopls/golang/completion/snippet/snippet_builder.go b/internal/golangorgx/gopls/golang/completion/snippet/snippet_builder.go deleted file mode 100644 index fa63e8d8324..00000000000 --- a/internal/golangorgx/gopls/golang/completion/snippet/snippet_builder.go +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package snippet implements the specification for the LSP snippet format. -// -// Snippets are "tab stop" templates returned as an optional attribute of LSP -// completion candidates. As the user presses tab, they cycle through a series of -// tab stops defined in the snippet. Each tab stop can optionally have placeholder -// text, which can be pre-selected by editors. For a full description of syntax -// and features, see "Snippet Syntax" at -// https://microsoft.github.io/language-server-protocol/specifications/specification-3-14/#textDocument_completion. -// -// A typical snippet looks like "foo(${1:i int}, ${2:s string})". -package snippet - -import ( - "fmt" - "strings" -) - -// A Builder is used to build an LSP snippet piecemeal. -// The zero value is ready to use. Do not copy a non-zero Builder. -type Builder struct { - // currentTabStop is the index of the previous tab stop. The - // next tab stop will be currentTabStop+1. - currentTabStop int - sb strings.Builder -} - -// Escape characters defined in https://microsoft.github.io/language-server-protocol/specifications/specification-3-14/#textDocument_completion under "Grammar". -var replacer = strings.NewReplacer( - `\`, `\\`, - `}`, `\}`, - `$`, `\$`, -) - -func (b *Builder) WriteText(s string) { - replacer.WriteString(&b.sb, s) -} - -func (b *Builder) PrependText(s string) { - rawSnip := b.String() - b.sb.Reset() - b.WriteText(s) - b.sb.WriteString(rawSnip) -} - -func (b *Builder) Write(data []byte) (int, error) { - return b.sb.Write(data) -} - -// WritePlaceholder writes a tab stop and placeholder value to the Builder. -// The callback style allows for creating nested placeholders. To write an -// empty tab stop, provide a nil callback. -func (b *Builder) WritePlaceholder(fn func(*Builder)) { - fmt.Fprintf(&b.sb, "${%d:", b.nextTabStop()) - if fn != nil { - fn(b) - } - b.sb.WriteByte('}') -} - -// WriteFinalTabstop marks where cursor ends up after the user has -// cycled through all the normal tab stops. It defaults to the -// character after the snippet. -func (b *Builder) WriteFinalTabstop() { - fmt.Fprint(&b.sb, "$0") -} - -// In addition to '\', '}', and '$', snippet choices also use '|' and ',' as -// meta characters, so they must be escaped within the choices. -var choiceReplacer = strings.NewReplacer( - `\`, `\\`, - `}`, `\}`, - `$`, `\$`, - `|`, `\|`, - `,`, `\,`, -) - -// WriteChoice writes a tab stop and list of text choices to the Builder. -// The user's editor will prompt the user to choose one of the choices. -func (b *Builder) WriteChoice(choices []string) { - fmt.Fprintf(&b.sb, "${%d|", b.nextTabStop()) - for i, c := range choices { - if i != 0 { - b.sb.WriteByte(',') - } - choiceReplacer.WriteString(&b.sb, c) - } - b.sb.WriteString("|}") -} - -// String returns the built snippet string. -func (b *Builder) String() string { - return b.sb.String() -} - -// Clone returns a copy of b. -func (b *Builder) Clone() *Builder { - var clone Builder - clone.sb.WriteString(b.String()) - return &clone -} - -// nextTabStop returns the next tab stop index for a new placeholder. -func (b *Builder) nextTabStop() int { - // Tab stops start from 1, so increment before returning. - b.currentTabStop++ - return b.currentTabStop -} diff --git a/internal/golangorgx/gopls/golang/completion/statements.go b/internal/golangorgx/gopls/golang/completion/statements.go deleted file mode 100644 index ba2bee69cb5..00000000000 --- a/internal/golangorgx/gopls/golang/completion/statements.go +++ /dev/null @@ -1,420 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package completion - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - "strings" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/golang" - "cuelang.org/go/internal/golangorgx/gopls/golang/completion/snippet" - "cuelang.org/go/internal/golangorgx/gopls/protocol" -) - -// addStatementCandidates adds full statement completion candidates -// appropriate for the current context. -func (c *completer) addStatementCandidates() { - c.addErrCheck() - c.addAssignAppend() - c.addReturnZeroValues() -} - -// addAssignAppend offers a completion candidate of the form: -// -// someSlice = append(someSlice, ) -// -// It will offer the "append" completion in either of two situations: -// -// 1. Position is in RHS of assign, prefix matches "append", and -// corresponding LHS object is a slice. For example, -// "foo = ap<>" completes to "foo = append(foo, )". -// -// 2. Prefix is an ident or selector in an *ast.ExprStmt (i.e. -// beginning of statement), and our best matching candidate is a -// slice. For example: "foo.ba" completes to "foo.bar = append(foo.bar, )". -func (c *completer) addAssignAppend() { - if len(c.path) < 3 { - return - } - - ident, _ := c.path[0].(*ast.Ident) - if ident == nil { - return - } - - var ( - // sliceText is the full name of our slice object, e.g. "s.abc" in - // "s.abc = app<>". - sliceText string - // needsLHS is true if we need to prepend the LHS slice name and - // "=" to our candidate. - needsLHS = false - fset = c.pkg.FileSet() - ) - - switch n := c.path[1].(type) { - case *ast.AssignStmt: - // We are already in an assignment. Make sure our prefix matches "append". - if c.matcher.Score("append") <= 0 { - return - } - - exprIdx := exprAtPos(c.pos, n.Rhs) - if exprIdx == len(n.Rhs) || exprIdx > len(n.Lhs)-1 { - return - } - - lhsType := c.pkg.GetTypesInfo().TypeOf(n.Lhs[exprIdx]) - if lhsType == nil { - return - } - - // Make sure our corresponding LHS object is a slice. - if _, isSlice := lhsType.Underlying().(*types.Slice); !isSlice { - return - } - - // The name or our slice is whatever's in the LHS expression. - sliceText = golang.FormatNode(fset, n.Lhs[exprIdx]) - case *ast.SelectorExpr: - // Make sure we are a selector at the beginning of a statement. - if _, parentIsExprtStmt := c.path[2].(*ast.ExprStmt); !parentIsExprtStmt { - return - } - - // So far we only know the first part of our slice name. For - // example in "s.a<>" we only know our slice begins with "s." - // since the user could still be typing. - sliceText = golang.FormatNode(fset, n.X) + "." - needsLHS = true - case *ast.ExprStmt: - needsLHS = true - default: - return - } - - var ( - label string - snip snippet.Builder - score = highScore - ) - - if needsLHS { - // Offer the long form assign + append candidate if our best - // candidate is a slice. - bestItem := c.topCandidate() - if bestItem == nil || !bestItem.isSlice { - return - } - - // Don't rank the full form assign + append candidate above the - // slice itself. - score = bestItem.Score - 0.01 - - // Fill in rest of sliceText now that we have the object name. - sliceText += bestItem.Label - - // Fill in the candidate's LHS bits. - label = fmt.Sprintf("%s = ", bestItem.Label) - snip.WriteText(label) - } - - snip.WriteText(fmt.Sprintf("append(%s, ", sliceText)) - snip.WritePlaceholder(nil) - snip.WriteText(")") - - c.items = append(c.items, CompletionItem{ - Label: label + fmt.Sprintf("append(%s, )", sliceText), - Kind: protocol.FunctionCompletion, - Score: score, - snippet: &snip, - }) -} - -// topCandidate returns the strictly highest scoring candidate -// collected so far. If the top two candidates have the same score, -// nil is returned. -func (c *completer) topCandidate() *CompletionItem { - var bestItem, secondBestItem *CompletionItem - for i := range c.items { - if bestItem == nil || c.items[i].Score > bestItem.Score { - bestItem = &c.items[i] - } else if secondBestItem == nil || c.items[i].Score > secondBestItem.Score { - secondBestItem = &c.items[i] - } - } - - // If secondBestItem has the same score, bestItem isn't - // the strict best. - if secondBestItem != nil && secondBestItem.Score == bestItem.Score { - return nil - } - - return bestItem -} - -// addErrCheck offers a completion candidate of the form: -// -// if err != nil { -// return nil, err -// } -// -// In the case of test functions, it offers a completion candidate of the form: -// -// if err != nil { -// t.Fatal(err) -// } -// -// The position must be in a function that returns an error, and the -// statement preceding the position must be an assignment where the -// final LHS object is an error. addErrCheck will synthesize -// zero values as necessary to make the return statement valid. -func (c *completer) addErrCheck() { - if len(c.path) < 2 || c.enclosingFunc == nil || !c.opts.placeholders { - return - } - - var ( - errorType = types.Universe.Lookup("error").Type() - result = c.enclosingFunc.sig.Results() - testVar = getTestVar(c.enclosingFunc, c.pkg) - isTest = testVar != "" - doesNotReturnErr = result.Len() == 0 || !types.Identical(result.At(result.Len()-1).Type(), errorType) - ) - // Make sure our enclosing function is a Test func or returns an error. - if !isTest && doesNotReturnErr { - return - } - - prevLine := prevStmt(c.pos, c.path) - if prevLine == nil { - return - } - - // Make sure our preceding statement was as assignment. - assign, _ := prevLine.(*ast.AssignStmt) - if assign == nil || len(assign.Lhs) == 0 { - return - } - - lastAssignee := assign.Lhs[len(assign.Lhs)-1] - - // Make sure the final assignee is an error. - if !types.Identical(c.pkg.GetTypesInfo().TypeOf(lastAssignee), errorType) { - return - } - - var ( - // errVar is e.g. "err" in "foo, err := bar()". - errVar = golang.FormatNode(c.pkg.FileSet(), lastAssignee) - - // Whether we need to include the "if" keyword in our candidate. - needsIf = true - ) - - // If the returned error from the previous statement is "_", it is not a real object. - // If we don't have an error, and the function signature takes a testing.TB that is either ignored - // or an "_", then we also can't call t.Fatal(err). - if errVar == "_" { - return - } - - // Below we try to detect if the user has already started typing "if - // err" so we can replace what they've typed with our complete - // statement. - switch n := c.path[0].(type) { - case *ast.Ident: - switch c.path[1].(type) { - case *ast.ExprStmt: - // This handles: - // - // f, err := os.Open("foo") - // i<> - - // Make sure they are typing "if". - if c.matcher.Score("if") <= 0 { - return - } - case *ast.IfStmt: - // This handles: - // - // f, err := os.Open("foo") - // if er<> - - // Make sure they are typing the error's name. - if c.matcher.Score(errVar) <= 0 { - return - } - - needsIf = false - default: - return - } - case *ast.IfStmt: - // This handles: - // - // f, err := os.Open("foo") - // if <> - - // Avoid false positives by ensuring the if's cond is a bad - // expression. For example, don't offer the completion in cases - // like "if <> somethingElse". - if _, bad := n.Cond.(*ast.BadExpr); !bad { - return - } - - // If "if" is our direct prefix, we need to include it in our - // candidate since the existing "if" will be overwritten. - needsIf = c.pos == n.Pos()+token.Pos(len("if")) - } - - // Build up a snippet that looks like: - // - // if err != nil { - // return , ..., ${1:err} - // } - // - // We make the error a placeholder so it is easy to alter the error. - var snip snippet.Builder - if needsIf { - snip.WriteText("if ") - } - snip.WriteText(fmt.Sprintf("%s != nil {\n\t", errVar)) - - var label string - if isTest { - snip.WriteText(fmt.Sprintf("%s.Fatal(%s)", testVar, errVar)) - label = fmt.Sprintf("%[1]s != nil { %[2]s.Fatal(%[1]s) }", errVar, testVar) - } else { - snip.WriteText("return ") - for i := 0; i < result.Len()-1; i++ { - snip.WriteText(formatZeroValue(result.At(i).Type(), c.qf)) - snip.WriteText(", ") - } - snip.WritePlaceholder(func(b *snippet.Builder) { - b.WriteText(errVar) - }) - label = fmt.Sprintf("%[1]s != nil { return %[1]s }", errVar) - } - - snip.WriteText("\n}") - - if needsIf { - label = "if " + label - } - - c.items = append(c.items, CompletionItem{ - Label: label, - Kind: protocol.SnippetCompletion, - Score: highScore, - snippet: &snip, - }) -} - -// getTestVar checks the function signature's input parameters and returns -// the name of the first parameter that implements "testing.TB". For example, -// func someFunc(t *testing.T) returns the string "t", func someFunc(b *testing.B) -// returns "b" etc. An empty string indicates that the function signature -// does not take a testing.TB parameter or does so but is ignored such -// as func someFunc(*testing.T). -func getTestVar(enclosingFunc *funcInfo, pkg *cache.Package) string { - if enclosingFunc == nil || enclosingFunc.sig == nil { - return "" - } - - var testingPkg *types.Package - for _, p := range pkg.GetTypes().Imports() { - if p.Path() == "testing" { - testingPkg = p - break - } - } - if testingPkg == nil { - return "" - } - tbObj := testingPkg.Scope().Lookup("TB") - if tbObj == nil { - return "" - } - iface, ok := tbObj.Type().Underlying().(*types.Interface) - if !ok { - return "" - } - - sig := enclosingFunc.sig - for i := 0; i < sig.Params().Len(); i++ { - param := sig.Params().At(i) - if param.Name() == "_" { - continue - } - if !types.Implements(param.Type(), iface) { - continue - } - return param.Name() - } - - return "" -} - -// addReturnZeroValues offers a snippet candidate on the form: -// -// return 0, "", nil -// -// Requires a partially or fully written return keyword at position. -// Requires current position to be in a function with more than -// zero return parameters. -func (c *completer) addReturnZeroValues() { - if len(c.path) < 2 || c.enclosingFunc == nil || !c.opts.placeholders { - return - } - result := c.enclosingFunc.sig.Results() - if result.Len() == 0 { - return - } - - // Offer just less than we expect from return as a keyword. - var score = stdScore - 0.01 - switch c.path[0].(type) { - case *ast.ReturnStmt, *ast.Ident: - f := c.matcher.Score("return") - if f <= 0 { - return - } - score *= float64(f) - default: - return - } - - // The snippet will have a placeholder over each return value. - // The label will not. - var snip snippet.Builder - var label strings.Builder - snip.WriteText("return ") - fmt.Fprintf(&label, "return ") - - for i := 0; i < result.Len(); i++ { - if i > 0 { - snip.WriteText(", ") - fmt.Fprintf(&label, ", ") - } - - zero := formatZeroValue(result.At(i).Type(), c.qf) - snip.WritePlaceholder(func(b *snippet.Builder) { - b.WriteText(zero) - }) - fmt.Fprintf(&label, zero) - } - - c.items = append(c.items, CompletionItem{ - Label: label.String(), - Kind: protocol.SnippetCompletion, - Score: score, - snippet: &snip, - }) -} diff --git a/internal/golangorgx/gopls/golang/completion/util.go b/internal/golangorgx/gopls/golang/completion/util.go deleted file mode 100644 index 68f8dc95157..00000000000 --- a/internal/golangorgx/gopls/golang/completion/util.go +++ /dev/null @@ -1,343 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package completion - -import ( - "go/ast" - "go/token" - "go/types" - - "cuelang.org/go/internal/golangorgx/gopls/golang" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/util/safetoken" - "cuelang.org/go/internal/golangorgx/tools/diff" - "golang.org/x/tools/go/types/typeutil" -) - -// exprAtPos returns the index of the expression containing pos. -func exprAtPos(pos token.Pos, args []ast.Expr) int { - for i, expr := range args { - if expr.Pos() <= pos && pos <= expr.End() { - return i - } - } - return len(args) -} - -// eachField invokes fn for each field that can be selected from a -// value of type T. -func eachField(T types.Type, fn func(*types.Var)) { - // TODO(adonovan): this algorithm doesn't exclude ambiguous - // selections that match more than one field/method. - // types.NewSelectionSet should do that for us. - - // for termination on recursive types - var seen typeutil.Map - - var visit func(T types.Type) - visit = func(T types.Type) { - if T, ok := golang.Deref(T).Underlying().(*types.Struct); ok { - if seen.At(T) != nil { - return - } - - for i := 0; i < T.NumFields(); i++ { - f := T.Field(i) - fn(f) - if f.Anonymous() { - seen.Set(T, true) - visit(f.Type()) - } - } - } - } - visit(T) -} - -// typeIsValid reports whether typ doesn't contain any Invalid types. -func typeIsValid(typ types.Type) bool { - // Check named types separately, because we don't want - // to call Underlying() on them to avoid problems with recursive types. - if _, ok := typ.(*types.Named); ok { - return true - } - - switch typ := typ.Underlying().(type) { - case *types.Basic: - return typ.Kind() != types.Invalid - case *types.Array: - return typeIsValid(typ.Elem()) - case *types.Slice: - return typeIsValid(typ.Elem()) - case *types.Pointer: - return typeIsValid(typ.Elem()) - case *types.Map: - return typeIsValid(typ.Key()) && typeIsValid(typ.Elem()) - case *types.Chan: - return typeIsValid(typ.Elem()) - case *types.Signature: - return typeIsValid(typ.Params()) && typeIsValid(typ.Results()) - case *types.Tuple: - for i := 0; i < typ.Len(); i++ { - if !typeIsValid(typ.At(i).Type()) { - return false - } - } - return true - case *types.Struct, *types.Interface: - // Don't bother checking structs, interfaces for validity. - return true - default: - return false - } -} - -// resolveInvalid traverses the node of the AST that defines the scope -// containing the declaration of obj, and attempts to find a user-friendly -// name for its invalid type. The resulting Object and its Type are fake. -func resolveInvalid(fset *token.FileSet, obj types.Object, node ast.Node, info *types.Info) types.Object { - var resultExpr ast.Expr - ast.Inspect(node, func(node ast.Node) bool { - switch n := node.(type) { - case *ast.ValueSpec: - for _, name := range n.Names { - if info.Defs[name] == obj { - resultExpr = n.Type - } - } - return false - case *ast.Field: // This case handles parameters and results of a FuncDecl or FuncLit. - for _, name := range n.Names { - if info.Defs[name] == obj { - resultExpr = n.Type - } - } - return false - default: - return true - } - }) - // Construct a fake type for the object and return a fake object with this type. - typename := golang.FormatNode(fset, resultExpr) - typ := types.NewNamed(types.NewTypeName(token.NoPos, obj.Pkg(), typename, nil), types.Typ[types.Invalid], nil) - return types.NewVar(obj.Pos(), obj.Pkg(), obj.Name(), typ) -} - -func isPointer(T types.Type) bool { - _, ok := T.(*types.Pointer) - return ok -} - -func isVar(obj types.Object) bool { - _, ok := obj.(*types.Var) - return ok -} - -func isTypeName(obj types.Object) bool { - _, ok := obj.(*types.TypeName) - return ok -} - -func isFunc(obj types.Object) bool { - _, ok := obj.(*types.Func) - return ok -} - -func isEmptyInterface(T types.Type) bool { - intf, _ := T.(*types.Interface) - return intf != nil && intf.NumMethods() == 0 && intf.IsMethodSet() -} - -func isUntyped(T types.Type) bool { - if basic, ok := T.(*types.Basic); ok { - return basic.Info()&types.IsUntyped > 0 - } - return false -} - -func isPkgName(obj types.Object) bool { - _, ok := obj.(*types.PkgName) - return ok -} - -func isASTFile(n ast.Node) bool { - _, ok := n.(*ast.File) - return ok -} - -func deslice(T types.Type) types.Type { - if slice, ok := T.Underlying().(*types.Slice); ok { - return slice.Elem() - } - return nil -} - -// isSelector returns the enclosing *ast.SelectorExpr when pos is in the -// selector. -func enclosingSelector(path []ast.Node, pos token.Pos) *ast.SelectorExpr { - if len(path) == 0 { - return nil - } - - if sel, ok := path[0].(*ast.SelectorExpr); ok { - return sel - } - - if _, ok := path[0].(*ast.Ident); ok && len(path) > 1 { - if sel, ok := path[1].(*ast.SelectorExpr); ok && pos >= sel.Sel.Pos() { - return sel - } - } - - return nil -} - -// enclosingDeclLHS returns LHS idents from containing value spec or -// assign statement. -func enclosingDeclLHS(path []ast.Node) []*ast.Ident { - for _, n := range path { - switch n := n.(type) { - case *ast.ValueSpec: - return n.Names - case *ast.AssignStmt: - ids := make([]*ast.Ident, 0, len(n.Lhs)) - for _, e := range n.Lhs { - if id, ok := e.(*ast.Ident); ok { - ids = append(ids, id) - } - } - return ids - } - } - - return nil -} - -// exprObj returns the types.Object associated with the *ast.Ident or -// *ast.SelectorExpr e. -func exprObj(info *types.Info, e ast.Expr) types.Object { - var ident *ast.Ident - switch expr := e.(type) { - case *ast.Ident: - ident = expr - case *ast.SelectorExpr: - ident = expr.Sel - default: - return nil - } - - return info.ObjectOf(ident) -} - -// typeConversion returns the type being converted to if call is a type -// conversion expression. -func typeConversion(call *ast.CallExpr, info *types.Info) types.Type { - // Type conversion (e.g. "float64(foo)"). - if fun, _ := exprObj(info, call.Fun).(*types.TypeName); fun != nil { - return fun.Type() - } - - return nil -} - -// fieldsAccessible returns whether s has at least one field accessible by p. -func fieldsAccessible(s *types.Struct, p *types.Package) bool { - for i := 0; i < s.NumFields(); i++ { - f := s.Field(i) - if f.Exported() || f.Pkg() == p { - return true - } - } - return false -} - -// prevStmt returns the statement that precedes the statement containing pos. -// For example: -// -// foo := 1 -// bar(1 + 2<>) -// -// If "<>" is pos, prevStmt returns "foo := 1" -func prevStmt(pos token.Pos, path []ast.Node) ast.Stmt { - var blockLines []ast.Stmt - for i := 0; i < len(path) && blockLines == nil; i++ { - switch n := path[i].(type) { - case *ast.BlockStmt: - blockLines = n.List - case *ast.CommClause: - blockLines = n.Body - case *ast.CaseClause: - blockLines = n.Body - } - } - - for i := len(blockLines) - 1; i >= 0; i-- { - if blockLines[i].End() < pos { - return blockLines[i] - } - } - - return nil -} - -// formatZeroValue produces Go code representing the zero value of T. It -// returns the empty string if T is invalid. -func formatZeroValue(T types.Type, qf types.Qualifier) string { - switch u := T.Underlying().(type) { - case *types.Basic: - switch { - case u.Info()&types.IsNumeric > 0: - return "0" - case u.Info()&types.IsString > 0: - return `""` - case u.Info()&types.IsBoolean > 0: - return "false" - default: - return "" - } - case *types.Pointer, *types.Interface, *types.Chan, *types.Map, *types.Slice, *types.Signature: - return "nil" - default: - return types.TypeString(T, qf) + "{}" - } -} - -// isBasicKind returns whether t is a basic type of kind k. -func isBasicKind(t types.Type, k types.BasicInfo) bool { - b, _ := t.Underlying().(*types.Basic) - return b != nil && b.Info()&k > 0 -} - -func (c *completer) editText(from, to token.Pos, newText string) ([]protocol.TextEdit, error) { - start, end, err := safetoken.Offsets(c.tokFile, from, to) - if err != nil { - return nil, err // can't happen: from/to came from c - } - return protocol.EditsFromDiffEdits(c.mapper, []diff.Edit{{ - Start: start, - End: end, - New: newText, - }}) -} - -// assignableTo is like types.AssignableTo, but returns false if -// either type is invalid. -func assignableTo(x, to types.Type) bool { - if x == types.Typ[types.Invalid] || to == types.Typ[types.Invalid] { - return false - } - - return types.AssignableTo(x, to) -} - -// convertibleTo is like types.ConvertibleTo, but returns false if -// either type is invalid. -func convertibleTo(x, to types.Type) bool { - if x == types.Typ[types.Invalid] || to == types.Typ[types.Invalid] { - return false - } - - return types.ConvertibleTo(x, to) -} diff --git a/internal/golangorgx/gopls/golang/definition.go b/internal/golangorgx/gopls/golang/definition.go deleted file mode 100644 index c5b1a7d4c9c..00000000000 --- a/internal/golangorgx/gopls/golang/definition.go +++ /dev/null @@ -1,312 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -import ( - "context" - "errors" - "fmt" - "go/ast" - "go/parser" - "go/token" - "go/types" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/cache/metadata" - "cuelang.org/go/internal/golangorgx/gopls/cache/parsego" - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/util/bug" - "cuelang.org/go/internal/golangorgx/tools/event" -) - -// Definition handles the textDocument/definition request for Go files. -func Definition(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) ([]protocol.Location, error) { - ctx, done := event.Start(ctx, "golang.Definition") - defer done() - - pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) - if err != nil { - return nil, err - } - pos, err := pgf.PositionPos(position) - if err != nil { - return nil, err - } - - // Handle the case where the cursor is in an import. - importLocations, err := importDefinition(ctx, snapshot, pkg, pgf, pos) - if err != nil { - return nil, err - } - if len(importLocations) > 0 { - return importLocations, nil - } - - // Handle the case where the cursor is in the package name. - // We use "<= End" to accept a query immediately after the package name. - if pgf.File != nil && pgf.File.Name.Pos() <= pos && pos <= pgf.File.Name.End() { - // If there's no package documentation, just use current file. - declFile := pgf - for _, pgf := range pkg.CompiledGoFiles() { - if pgf.File.Name != nil && pgf.File.Doc != nil { - declFile = pgf - break - } - } - loc, err := declFile.NodeLocation(declFile.File.Name) - if err != nil { - return nil, err - } - return []protocol.Location{loc}, nil - } - - // Handle the case where the cursor is in a linkname directive. - locations, err := LinknameDefinition(ctx, snapshot, pgf.Mapper, position) - if !errors.Is(err, ErrNoLinkname) { - return locations, err - } - - // Handle the case where the cursor is in an embed directive. - locations, err = EmbedDefinition(pgf.Mapper, position) - if !errors.Is(err, ErrNoEmbed) { - return locations, err - } - - // The general case: the cursor is on an identifier. - _, obj, _ := referencedObject(pkg, pgf, pos) - if obj == nil { - return nil, nil - } - - // Handle objects with no position: builtin, unsafe. - if !obj.Pos().IsValid() { - return builtinDefinition(ctx, snapshot, obj) - } - - // Finally, map the object position. - loc, err := mapPosition(ctx, pkg.FileSet(), snapshot, obj.Pos(), adjustedObjEnd(obj)) - if err != nil { - return nil, err - } - return []protocol.Location{loc}, nil -} - -// builtinDefinition returns the location of the fake source -// declaration of a built-in in {builtin,unsafe}.go. -func builtinDefinition(ctx context.Context, snapshot *cache.Snapshot, obj types.Object) ([]protocol.Location, error) { - pgf, decl, err := builtinDecl(ctx, snapshot, obj) - if err != nil { - return nil, err - } - - loc, err := pgf.PosLocation(decl.Pos(), decl.Pos()+token.Pos(len(obj.Name()))) - if err != nil { - return nil, err - } - return []protocol.Location{loc}, nil -} - -// builtinDecl returns the parsed Go file and node corresponding to a builtin -// object, which may be a universe object or part of types.Unsafe. -func builtinDecl(ctx context.Context, snapshot *cache.Snapshot, obj types.Object) (*parsego.File, ast.Node, error) { - // getDecl returns the file-level declaration of name - // using legacy (go/ast) object resolution. - getDecl := func(file *ast.File, name string) (ast.Node, error) { - astObj := file.Scope.Lookup(name) - if astObj == nil { - // Every built-in should have documentation syntax. - // However, it is possible to reach this statement by - // commenting out declarations in {builtin,unsafe}.go. - return nil, fmt.Errorf("internal error: no object for %s", name) - } - decl, ok := astObj.Decl.(ast.Node) - if !ok { - return nil, bug.Errorf("internal error: no declaration for %s", obj.Name()) - } - return decl, nil - } - - var ( - pgf *ParsedGoFile - decl ast.Node - err error - ) - if obj.Pkg() == types.Unsafe { - // package "unsafe": - // parse $GOROOT/src/unsafe/unsafe.go - unsafe := snapshot.Metadata("unsafe") - if unsafe == nil { - // If the type checker somehow resolved 'unsafe', we must have metadata - // for it. - return nil, nil, bug.Errorf("no metadata for package 'unsafe'") - } - uri := unsafe.GoFiles[0] - fh, err := snapshot.ReadFile(ctx, uri) - if err != nil { - return nil, nil, err - } - pgf, err = snapshot.ParseGo(ctx, fh, ParseFull&^parser.SkipObjectResolution) - if err != nil { - return nil, nil, err - } - decl, err = getDecl(pgf.File, obj.Name()) - if err != nil { - return nil, nil, err - } - } else { - // pseudo-package "builtin": - // use parsed $GOROOT/src/builtin/builtin.go - pgf, err = snapshot.BuiltinFile(ctx) - if err != nil { - return nil, nil, err - } - - if obj.Parent() == types.Universe { - // built-in function or type - decl, err = getDecl(pgf.File, obj.Name()) - if err != nil { - return nil, nil, err - } - } else if obj.Name() == "Error" { - // error.Error method - decl, err = getDecl(pgf.File, "error") - if err != nil { - return nil, nil, err - } - decl = decl.(*ast.TypeSpec).Type.(*ast.InterfaceType).Methods.List[0] - - } else { - return nil, nil, bug.Errorf("unknown built-in %v", obj) - } - } - return pgf, decl, nil -} - -// referencedObject returns the identifier and object referenced at the -// specified position, which must be within the file pgf, for the purposes of -// definition/hover/call hierarchy operations. It returns a nil object if no -// object was found at the given position. -// -// If the returned identifier is a type-switch implicit (i.e. the x in x := -// e.(type)), the third result will be the type of the expression being -// switched on (the type of e in the example). This facilitates workarounds for -// limitations of the go/types API, which does not report an object for the -// identifier x. -// -// For embedded fields, referencedObject returns the type name object rather -// than the var (field) object. -// -// TODO(rfindley): this function exists to preserve the pre-existing behavior -// of golang.Identifier. Eliminate this helper in favor of sharing -// functionality with objectsAt, after choosing suitable primitives. -func referencedObject(pkg *cache.Package, pgf *ParsedGoFile, pos token.Pos) (*ast.Ident, types.Object, types.Type) { - path := pathEnclosingObjNode(pgf.File, pos) - if len(path) == 0 { - return nil, nil, nil - } - var obj types.Object - info := pkg.GetTypesInfo() - switch n := path[0].(type) { - case *ast.Ident: - obj = info.ObjectOf(n) - // If n is the var's declaring ident in a type switch - // [i.e. the x in x := foo.(type)], it will not have an object. In this - // case, set obj to the first implicit object (if any), and return the type - // of the expression being switched on. - // - // The type switch may have no case clauses and thus no - // implicit objects; this is a type error ("unused x"), - if obj == nil { - if implicits, typ := typeSwitchImplicits(info, path); len(implicits) > 0 { - return n, implicits[0], typ - } - } - - // If the original position was an embedded field, we want to jump - // to the field's type definition, not the field's definition. - if v, ok := obj.(*types.Var); ok && v.Embedded() { - // types.Info.Uses contains the embedded field's *types.TypeName. - if typeName := info.Uses[n]; typeName != nil { - obj = typeName - } - } - return n, obj, nil - } - return nil, nil, nil -} - -// importDefinition returns locations defining a package referenced by the -// import spec containing pos. -// -// If pos is not inside an import spec, it returns nil, nil. -func importDefinition(ctx context.Context, s *cache.Snapshot, pkg *cache.Package, pgf *ParsedGoFile, pos token.Pos) ([]protocol.Location, error) { - var imp *ast.ImportSpec - for _, spec := range pgf.File.Imports { - // We use "<= End" to accept a query immediately after an ImportSpec. - if spec.Path.Pos() <= pos && pos <= spec.Path.End() { - imp = spec - } - } - if imp == nil { - return nil, nil - } - - importPath := metadata.UnquoteImportPath(imp) - impID := pkg.Metadata().DepsByImpPath[importPath] - if impID == "" { - return nil, fmt.Errorf("failed to resolve import %q", importPath) - } - impMetadata := s.Metadata(impID) - if impMetadata == nil { - return nil, fmt.Errorf("missing information for package %q", impID) - } - - var locs []protocol.Location - for _, f := range impMetadata.CompiledGoFiles { - fh, err := s.ReadFile(ctx, f) - if err != nil { - if ctx.Err() != nil { - return nil, ctx.Err() - } - continue - } - pgf, err := s.ParseGo(ctx, fh, ParseHeader) - if err != nil { - if ctx.Err() != nil { - return nil, ctx.Err() - } - continue - } - loc, err := pgf.NodeLocation(pgf.File) - if err != nil { - return nil, err - } - locs = append(locs, loc) - } - - if len(locs) == 0 { - return nil, fmt.Errorf("package %q has no readable files", impID) // incl. unsafe - } - - return locs, nil -} - -// TODO(rfindley): avoid the duplicate column mapping here, by associating a -// column mapper with each file handle. -func mapPosition(ctx context.Context, fset *token.FileSet, s file.Source, start, end token.Pos) (protocol.Location, error) { - file := fset.File(start) - uri := protocol.URIFromPath(file.Name()) - fh, err := s.ReadFile(ctx, uri) - if err != nil { - return protocol.Location{}, err - } - content, err := fh.Content() - if err != nil { - return protocol.Location{}, err - } - m := protocol.NewMapper(fh.URI(), content) - return m.PosLocation(file, start, end) -} diff --git a/internal/golangorgx/gopls/golang/diagnostics.go b/internal/golangorgx/gopls/golang/diagnostics.go deleted file mode 100644 index d34f25e023f..00000000000 --- a/internal/golangorgx/gopls/golang/diagnostics.go +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -import ( - "context" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/cache/metadata" - "cuelang.org/go/internal/golangorgx/gopls/progress" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/settings" - "cuelang.org/go/internal/golangorgx/gopls/util/maps" -) - -// Analyze reports go/analysis-framework diagnostics in the specified package. -// -// If the provided tracker is non-nil, it may be used to provide notifications -// of the ongoing analysis pass. -func Analyze(ctx context.Context, snapshot *cache.Snapshot, pkgIDs map[PackageID]*metadata.Package, tracker *progress.Tracker) (map[protocol.DocumentURI][]*cache.Diagnostic, error) { - // Exit early if the context has been canceled. This also protects us - // from a race on Options, see golang/go#36699. - if ctx.Err() != nil { - return nil, ctx.Err() - } - - options := snapshot.Options() - categories := []map[string]*settings.Analyzer{ - options.DefaultAnalyzers, - options.StaticcheckAnalyzers, - } - - var analyzers []*settings.Analyzer - for _, cat := range categories { - for _, a := range cat { - analyzers = append(analyzers, a) - } - } - - analysisDiagnostics, err := snapshot.Analyze(ctx, pkgIDs, analyzers, tracker) - if err != nil { - return nil, err - } - byURI := func(d *cache.Diagnostic) protocol.DocumentURI { return d.URI } - return maps.Group(analysisDiagnostics, byURI), nil -} diff --git a/internal/golangorgx/gopls/golang/embeddirective.go b/internal/golangorgx/gopls/golang/embeddirective.go deleted file mode 100644 index bb8a59d521e..00000000000 --- a/internal/golangorgx/gopls/golang/embeddirective.go +++ /dev/null @@ -1,195 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -import ( - "errors" - "fmt" - "io/fs" - "path/filepath" - "strconv" - "strings" - "unicode" - "unicode/utf8" - - "cuelang.org/go/internal/golangorgx/gopls/protocol" -) - -// ErrNoEmbed is returned by EmbedDefinition when no embed -// directive is found at a particular position. -// As such it indicates that other definitions could be worth checking. -var ErrNoEmbed = errors.New("no embed directive found") - -var errStopWalk = errors.New("stop walk") - -// EmbedDefinition finds a file matching the embed directive at pos in the mapped file. -// If there is no embed directive at pos, returns ErrNoEmbed. -// If multiple files match the embed pattern, one is picked at random. -func EmbedDefinition(m *protocol.Mapper, pos protocol.Position) ([]protocol.Location, error) { - pattern, _ := parseEmbedDirective(m, pos) - if pattern == "" { - return nil, ErrNoEmbed - } - - // Find the first matching file. - var match string - dir := filepath.Dir(m.URI.Path()) - err := filepath.WalkDir(dir, func(abs string, d fs.DirEntry, e error) error { - if e != nil { - return e - } - rel, err := filepath.Rel(dir, abs) - if err != nil { - return err - } - ok, err := filepath.Match(pattern, rel) - if err != nil { - return err - } - if ok && !d.IsDir() { - match = abs - return errStopWalk - } - return nil - }) - if err != nil && !errors.Is(err, errStopWalk) { - return nil, err - } - if match == "" { - return nil, fmt.Errorf("%q does not match any files in %q", pattern, dir) - } - - loc := protocol.Location{ - URI: protocol.URIFromPath(match), - Range: protocol.Range{ - Start: protocol.Position{Line: 0, Character: 0}, - }, - } - return []protocol.Location{loc}, nil -} - -// parseEmbedDirective attempts to parse a go:embed directive argument at pos. -// If successful it return the directive argument and its range, else zero values are returned. -func parseEmbedDirective(m *protocol.Mapper, pos protocol.Position) (string, protocol.Range) { - lineStart, err := m.PositionOffset(protocol.Position{Line: pos.Line, Character: 0}) - if err != nil { - return "", protocol.Range{} - } - lineEnd, err := m.PositionOffset(protocol.Position{Line: pos.Line + 1, Character: 0}) - if err != nil { - return "", protocol.Range{} - } - - text := string(m.Content[lineStart:lineEnd]) - if !strings.HasPrefix(text, "//go:embed") { - return "", protocol.Range{} - } - text = text[len("//go:embed"):] - offset := lineStart + len("//go:embed") - - // Find the first pattern in text that covers the offset of the pos we are looking for. - findOffset, err := m.PositionOffset(pos) - if err != nil { - return "", protocol.Range{} - } - patterns, err := parseGoEmbed(text, offset) - if err != nil { - return "", protocol.Range{} - } - for _, p := range patterns { - if p.startOffset <= findOffset && findOffset <= p.endOffset { - // Found our match. - rng, err := m.OffsetRange(p.startOffset, p.endOffset) - if err != nil { - return "", protocol.Range{} - } - return p.pattern, rng - } - } - - return "", protocol.Range{} -} - -type fileEmbed struct { - pattern string - startOffset int - endOffset int -} - -// parseGoEmbed patterns that come after the directive. -// -// Copied and adapted from go/build/read.go. -// Replaced token.Position with start/end offset (including quotes if present). -func parseGoEmbed(args string, offset int) ([]fileEmbed, error) { - trimBytes := func(n int) { - offset += n - args = args[n:] - } - trimSpace := func() { - trim := strings.TrimLeftFunc(args, unicode.IsSpace) - trimBytes(len(args) - len(trim)) - } - - var list []fileEmbed - for trimSpace(); args != ""; trimSpace() { - var path string - pathOffset := offset - Switch: - switch args[0] { - default: - i := len(args) - for j, c := range args { - if unicode.IsSpace(c) { - i = j - break - } - } - path = args[:i] - trimBytes(i) - - case '`': - var ok bool - path, _, ok = strings.Cut(args[1:], "`") - if !ok { - return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args) - } - trimBytes(1 + len(path) + 1) - - case '"': - i := 1 - for ; i < len(args); i++ { - if args[i] == '\\' { - i++ - continue - } - if args[i] == '"' { - q, err := strconv.Unquote(args[:i+1]) - if err != nil { - return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args[:i+1]) - } - path = q - trimBytes(i + 1) - break Switch - } - } - if i >= len(args) { - return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args) - } - } - - if args != "" { - r, _ := utf8.DecodeRuneInString(args) - if !unicode.IsSpace(r) { - return nil, fmt.Errorf("invalid quoted string in //go:embed: %s", args) - } - } - list = append(list, fileEmbed{ - pattern: path, - startOffset: pathOffset, - endOffset: offset, - }) - } - return list, nil -} diff --git a/internal/golangorgx/gopls/golang/extract.go b/internal/golangorgx/gopls/golang/extract.go deleted file mode 100644 index b46a48afce8..00000000000 --- a/internal/golangorgx/gopls/golang/extract.go +++ /dev/null @@ -1,1361 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -import ( - "bytes" - "fmt" - "go/ast" - "go/format" - "go/parser" - "go/token" - "go/types" - "sort" - "strings" - "text/scanner" - - "cuelang.org/go/internal/golangorgx/gopls/util/bug" - "cuelang.org/go/internal/golangorgx/gopls/util/safetoken" - "cuelang.org/go/internal/golangorgx/tools/analysisinternal" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ast/astutil" -) - -func extractVariable(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { - tokFile := fset.File(file.Pos()) - expr, path, ok, err := CanExtractVariable(start, end, file) - if !ok { - return nil, nil, fmt.Errorf("extractVariable: cannot extract %s: %v", safetoken.StartPosition(fset, start), err) - } - - // Create new AST node for extracted code. - var lhsNames []string - switch expr := expr.(type) { - // TODO: stricter rules for selectorExpr. - case *ast.BasicLit, *ast.CompositeLit, *ast.IndexExpr, *ast.SliceExpr, - *ast.UnaryExpr, *ast.BinaryExpr, *ast.SelectorExpr: - lhsName, _ := generateAvailableIdentifier(expr.Pos(), path, pkg, info, "x", 0) - lhsNames = append(lhsNames, lhsName) - case *ast.CallExpr: - tup, ok := info.TypeOf(expr).(*types.Tuple) - if !ok { - // If the call expression only has one return value, we can treat it the - // same as our standard extract variable case. - lhsName, _ := generateAvailableIdentifier(expr.Pos(), path, pkg, info, "x", 0) - lhsNames = append(lhsNames, lhsName) - break - } - idx := 0 - for i := 0; i < tup.Len(); i++ { - // Generate a unique variable for each return value. - var lhsName string - lhsName, idx = generateAvailableIdentifier(expr.Pos(), path, pkg, info, "x", idx) - lhsNames = append(lhsNames, lhsName) - } - default: - return nil, nil, fmt.Errorf("cannot extract %T", expr) - } - - insertBeforeStmt := analysisinternal.StmtToInsertVarBefore(path) - if insertBeforeStmt == nil { - return nil, nil, fmt.Errorf("cannot find location to insert extraction") - } - indent, err := calculateIndentation(src, tokFile, insertBeforeStmt) - if err != nil { - return nil, nil, err - } - newLineIndent := "\n" + indent - - lhs := strings.Join(lhsNames, ", ") - assignStmt := &ast.AssignStmt{ - Lhs: []ast.Expr{ast.NewIdent(lhs)}, - Tok: token.DEFINE, - Rhs: []ast.Expr{expr}, - } - var buf bytes.Buffer - if err := format.Node(&buf, fset, assignStmt); err != nil { - return nil, nil, err - } - assignment := strings.ReplaceAll(buf.String(), "\n", newLineIndent) + newLineIndent - - return fset, &analysis.SuggestedFix{ - TextEdits: []analysis.TextEdit{ - { - Pos: insertBeforeStmt.Pos(), - End: insertBeforeStmt.Pos(), - NewText: []byte(assignment), - }, - { - Pos: start, - End: end, - NewText: []byte(lhs), - }, - }, - }, nil -} - -// CanExtractVariable reports whether the code in the given range can be -// extracted to a variable. -func CanExtractVariable(start, end token.Pos, file *ast.File) (ast.Expr, []ast.Node, bool, error) { - if start == end { - return nil, nil, false, fmt.Errorf("start and end are equal") - } - path, _ := astutil.PathEnclosingInterval(file, start, end) - if len(path) == 0 { - return nil, nil, false, fmt.Errorf("no path enclosing interval") - } - for _, n := range path { - if _, ok := n.(*ast.ImportSpec); ok { - return nil, nil, false, fmt.Errorf("cannot extract variable in an import block") - } - } - node := path[0] - if start != node.Pos() || end != node.End() { - return nil, nil, false, fmt.Errorf("range does not map to an AST node") - } - expr, ok := node.(ast.Expr) - if !ok { - return nil, nil, false, fmt.Errorf("node is not an expression") - } - switch expr.(type) { - case *ast.BasicLit, *ast.CompositeLit, *ast.IndexExpr, *ast.CallExpr, - *ast.SliceExpr, *ast.UnaryExpr, *ast.BinaryExpr, *ast.SelectorExpr: - return expr, path, true, nil - } - return nil, nil, false, fmt.Errorf("cannot extract an %T to a variable", expr) -} - -// Calculate indentation for insertion. -// When inserting lines of code, we must ensure that the lines have consistent -// formatting (i.e. the proper indentation). To do so, we observe the indentation on the -// line of code on which the insertion occurs. -func calculateIndentation(content []byte, tok *token.File, insertBeforeStmt ast.Node) (string, error) { - line := safetoken.Line(tok, insertBeforeStmt.Pos()) - lineOffset, stmtOffset, err := safetoken.Offsets(tok, tok.LineStart(line), insertBeforeStmt.Pos()) - if err != nil { - return "", err - } - return string(content[lineOffset:stmtOffset]), nil -} - -// generateAvailableIdentifier adjusts the new function name until there are no collisions in scope. -// Possible collisions include other function and variable names. Returns the next index to check for prefix. -func generateAvailableIdentifier(pos token.Pos, path []ast.Node, pkg *types.Package, info *types.Info, prefix string, idx int) (string, int) { - scopes := CollectScopes(info, path, pos) - scopes = append(scopes, pkg.Scope()) - return generateIdentifier(idx, prefix, func(name string) bool { - for _, scope := range scopes { - if scope != nil && scope.Lookup(name) != nil { - return true - } - } - return false - }) -} - -func generateIdentifier(idx int, prefix string, hasCollision func(string) bool) (string, int) { - name := prefix - if idx != 0 { - name += fmt.Sprintf("%d", idx) - } - for hasCollision(name) { - idx++ - name = fmt.Sprintf("%v%d", prefix, idx) - } - return name, idx + 1 -} - -// returnVariable keeps track of the information we need to properly introduce a new variable -// that we will return in the extracted function. -type returnVariable struct { - // name is the identifier that is used on the left-hand side of the call to - // the extracted function. - name ast.Expr - // decl is the declaration of the variable. It is used in the type signature of the - // extracted function and for variable declarations. - decl *ast.Field - // zeroVal is the "zero value" of the type of the variable. It is used in a return - // statement in the extracted function. - zeroVal ast.Expr -} - -// extractMethod refactors the selected block of code into a new method. -func extractMethod(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { - return extractFunctionMethod(fset, start, end, src, file, pkg, info, true) -} - -// extractFunction refactors the selected block of code into a new function. -func extractFunction(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { - return extractFunctionMethod(fset, start, end, src, file, pkg, info, false) -} - -// extractFunctionMethod refactors the selected block of code into a new function/method. -// It also replaces the selected block of code with a call to the extracted -// function. First, we manually adjust the selection range. We remove trailing -// and leading whitespace characters to ensure the range is precisely bounded -// by AST nodes. Next, we determine the variables that will be the parameters -// and return values of the extracted function/method. Lastly, we construct the call -// of the function/method and insert this call as well as the extracted function/method into -// their proper locations. -func extractFunctionMethod(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, pkg *types.Package, info *types.Info, isMethod bool) (*token.FileSet, *analysis.SuggestedFix, error) { - errorPrefix := "extractFunction" - if isMethod { - errorPrefix = "extractMethod" - } - - tok := fset.File(file.Pos()) - if tok == nil { - return nil, nil, bug.Errorf("no file for position") - } - p, ok, methodOk, err := CanExtractFunction(tok, start, end, src, file) - if (!ok && !isMethod) || (!methodOk && isMethod) { - return nil, nil, fmt.Errorf("%s: cannot extract %s: %v", errorPrefix, - safetoken.StartPosition(fset, start), err) - } - tok, path, start, end, outer, node := p.tok, p.path, p.start, p.end, p.outer, p.node - fileScope := info.Scopes[file] - if fileScope == nil { - return nil, nil, fmt.Errorf("%s: file scope is empty", errorPrefix) - } - pkgScope := fileScope.Parent() - if pkgScope == nil { - return nil, nil, fmt.Errorf("%s: package scope is empty", errorPrefix) - } - - // A return statement is non-nested if its parent node is equal to the parent node - // of the first node in the selection. These cases must be handled separately because - // non-nested return statements are guaranteed to execute. - var retStmts []*ast.ReturnStmt - var hasNonNestedReturn bool - startParent := findParent(outer, node) - ast.Inspect(outer, func(n ast.Node) bool { - if n == nil { - return false - } - if n.Pos() < start || n.End() > end { - return n.Pos() <= end - } - ret, ok := n.(*ast.ReturnStmt) - if !ok { - return true - } - if findParent(outer, n) == startParent { - hasNonNestedReturn = true - } - retStmts = append(retStmts, ret) - return false - }) - containsReturnStatement := len(retStmts) > 0 - - // Now that we have determined the correct range for the selection block, - // we must determine the signature of the extracted function. We will then replace - // the block with an assignment statement that calls the extracted function with - // the appropriate parameters and return values. - variables, err := collectFreeVars(info, file, fileScope, pkgScope, start, end, path[0]) - if err != nil { - return nil, nil, err - } - - var ( - receiverUsed bool - receiver *ast.Field - receiverName string - receiverObj types.Object - ) - if isMethod { - if outer == nil || outer.Recv == nil || len(outer.Recv.List) == 0 { - return nil, nil, fmt.Errorf("%s: cannot extract need method receiver", errorPrefix) - } - receiver = outer.Recv.List[0] - if len(receiver.Names) == 0 || receiver.Names[0] == nil { - return nil, nil, fmt.Errorf("%s: cannot extract need method receiver name", errorPrefix) - } - recvName := receiver.Names[0] - receiverName = recvName.Name - receiverObj = info.ObjectOf(recvName) - } - - var ( - params, returns []ast.Expr // used when calling the extracted function - paramTypes, returnTypes []*ast.Field // used in the signature of the extracted function - uninitialized []types.Object // vars we will need to initialize before the call - ) - - // Avoid duplicates while traversing vars and uninitialized. - seenVars := make(map[types.Object]ast.Expr) - seenUninitialized := make(map[types.Object]struct{}) - - // Some variables on the left-hand side of our assignment statement may be free. If our - // selection begins in the same scope in which the free variable is defined, we can - // redefine it in our assignment statement. See the following example, where 'b' and - // 'err' (both free variables) can be redefined in the second funcCall() while maintaining - // correctness. - // - // - // Not Redefined: - // - // a, err := funcCall() - // var b int - // b, err = funcCall() - // - // Redefined: - // - // a, err := funcCall() - // b, err := funcCall() - // - // We track the number of free variables that can be redefined to maintain our preference - // of using "x, y, z := fn()" style assignment statements. - var canRedefineCount int - - // Each identifier in the selected block must become (1) a parameter to the - // extracted function, (2) a return value of the extracted function, or (3) a local - // variable in the extracted function. Determine the outcome(s) for each variable - // based on whether it is free, altered within the selected block, and used outside - // of the selected block. - for _, v := range variables { - if _, ok := seenVars[v.obj]; ok { - continue - } - if v.obj.Name() == "_" { - // The blank identifier is always a local variable - continue - } - typ := analysisinternal.TypeExpr(file, pkg, v.obj.Type()) - if typ == nil { - return nil, nil, fmt.Errorf("nil AST expression for type: %v", v.obj.Name()) - } - seenVars[v.obj] = typ - identifier := ast.NewIdent(v.obj.Name()) - // An identifier must meet three conditions to become a return value of the - // extracted function. (1) its value must be defined or reassigned within - // the selection (isAssigned), (2) it must be used at least once after the - // selection (isUsed), and (3) its first use after the selection - // cannot be its own reassignment or redefinition (objOverriden). - vscope := v.obj.Parent() - if vscope == nil { - return nil, nil, fmt.Errorf("parent nil") - } - isUsed, firstUseAfter := objUsed(info, end, vscope.End(), v.obj) - if v.assigned && isUsed && !varOverridden(info, firstUseAfter, v.obj, v.free, outer) { - returnTypes = append(returnTypes, &ast.Field{Type: typ}) - returns = append(returns, identifier) - if !v.free { - uninitialized = append(uninitialized, v.obj) - - } else { - // In go1.22, Scope.Pos for function scopes changed (#60752): - // it used to start at the body ('{'), now it starts at "func". - // - // The second condition below handles the case when - // v's block is the FuncDecl.Body itself. - if vscope.Pos() == startParent.Pos() || - startParent == outer.Body && vscope == info.Scopes[outer.Type] { - canRedefineCount++ - } - } - } - // An identifier must meet two conditions to become a parameter of the - // extracted function. (1) it must be free (isFree), and (2) its first - // use within the selection cannot be its own definition (isDefined). - if v.free && !v.defined { - // Skip the selector for a method. - if isMethod && v.obj == receiverObj { - receiverUsed = true - continue - } - params = append(params, identifier) - paramTypes = append(paramTypes, &ast.Field{ - Names: []*ast.Ident{identifier}, - Type: typ, - }) - } - } - - reorderParams(params, paramTypes) - - // Find the function literal that encloses the selection. The enclosing function literal - // may not be the enclosing function declaration (i.e. 'outer'). For example, in the - // following block: - // - // func main() { - // ast.Inspect(node, func(n ast.Node) bool { - // v := 1 // this line extracted - // return true - // }) - // } - // - // 'outer' is main(). However, the extracted selection most directly belongs to - // the anonymous function literal, the second argument of ast.Inspect(). We use the - // enclosing function literal to determine the proper return types for return statements - // within the selection. We still need the enclosing function declaration because this is - // the top-level declaration. We inspect the top-level declaration to look for variables - // as well as for code replacement. - enclosing := outer.Type - for _, p := range path { - if p == enclosing { - break - } - if fl, ok := p.(*ast.FuncLit); ok { - enclosing = fl.Type - break - } - } - - // We put the selection in a constructed file. We can then traverse and edit - // the extracted selection without modifying the original AST. - startOffset, endOffset, err := safetoken.Offsets(tok, start, end) - if err != nil { - return nil, nil, err - } - selection := src[startOffset:endOffset] - extractedBlock, err := parseBlockStmt(fset, selection) - if err != nil { - return nil, nil, err - } - - // We need to account for return statements in the selected block, as they will complicate - // the logical flow of the extracted function. See the following example, where ** denotes - // the range to be extracted. - // - // Before: - // - // func _() int { - // a := 1 - // b := 2 - // **if a == b { - // return a - // }** - // ... - // } - // - // After: - // - // func _() int { - // a := 1 - // b := 2 - // cond0, ret0 := x0(a, b) - // if cond0 { - // return ret0 - // } - // ... - // } - // - // func x0(a int, b int) (bool, int) { - // if a == b { - // return true, a - // } - // return false, 0 - // } - // - // We handle returns by adding an additional boolean return value to the extracted function. - // This bool reports whether the original function would have returned. Because the - // extracted selection contains a return statement, we must also add the types in the - // return signature of the enclosing function to the return signature of the - // extracted function. We then add an extra if statement checking this boolean value - // in the original function. If the condition is met, the original function should - // return a value, mimicking the functionality of the original return statement(s) - // in the selection. - // - // If there is a return that is guaranteed to execute (hasNonNestedReturns=true), then - // we don't need to include this additional condition check and can simply return. - // - // Before: - // - // func _() int { - // a := 1 - // b := 2 - // **if a == b { - // return a - // } - // return b** - // } - // - // After: - // - // func _() int { - // a := 1 - // b := 2 - // return x0(a, b) - // } - // - // func x0(a int, b int) int { - // if a == b { - // return a - // } - // return b - // } - - var retVars []*returnVariable - var ifReturn *ast.IfStmt - if containsReturnStatement { - if !hasNonNestedReturn { - // The selected block contained return statements, so we have to modify the - // signature of the extracted function as described above. Adjust all of - // the return statements in the extracted function to reflect this change in - // signature. - if err := adjustReturnStatements(returnTypes, seenVars, file, pkg, extractedBlock); err != nil { - return nil, nil, err - } - } - // Collect the additional return values and types needed to accommodate return - // statements in the selection. Update the type signature of the extracted - // function and construct the if statement that will be inserted in the enclosing - // function. - retVars, ifReturn, err = generateReturnInfo(enclosing, pkg, path, file, info, start, hasNonNestedReturn) - if err != nil { - return nil, nil, err - } - } - - // Add a return statement to the end of the new function. This return statement must include - // the values for the types of the original extracted function signature and (if a return - // statement is present in the selection) enclosing function signature. - // This only needs to be done if the selections does not have a non-nested return, otherwise - // it already terminates with a return statement. - hasReturnValues := len(returns)+len(retVars) > 0 - if hasReturnValues && !hasNonNestedReturn { - extractedBlock.List = append(extractedBlock.List, &ast.ReturnStmt{ - Results: append(returns, getZeroVals(retVars)...), - }) - } - - // Construct the appropriate call to the extracted function. - // We must meet two conditions to use ":=" instead of '='. (1) there must be at least - // one variable on the lhs that is uninitialized (non-free) prior to the assignment. - // (2) all of the initialized (free) variables on the lhs must be able to be redefined. - sym := token.ASSIGN - canDefineCount := len(uninitialized) + canRedefineCount - canDefine := len(uninitialized)+len(retVars) > 0 && canDefineCount == len(returns) - if canDefine { - sym = token.DEFINE - } - var name, funName string - if isMethod { - name = "newMethod" - // TODO(suzmue): generate a name that does not conflict for "newMethod". - funName = name - } else { - name = "newFunction" - funName, _ = generateAvailableIdentifier(start, path, pkg, info, name, 0) - } - extractedFunCall := generateFuncCall(hasNonNestedReturn, hasReturnValues, params, - append(returns, getNames(retVars)...), funName, sym, receiverName) - - // Build the extracted function. - newFunc := &ast.FuncDecl{ - Name: ast.NewIdent(funName), - Type: &ast.FuncType{ - Params: &ast.FieldList{List: paramTypes}, - Results: &ast.FieldList{List: append(returnTypes, getDecls(retVars)...)}, - }, - Body: extractedBlock, - } - if isMethod { - var names []*ast.Ident - if receiverUsed { - names = append(names, ast.NewIdent(receiverName)) - } - newFunc.Recv = &ast.FieldList{ - List: []*ast.Field{{ - Names: names, - Type: receiver.Type, - }}, - } - } - - // Create variable declarations for any identifiers that need to be initialized prior to - // calling the extracted function. We do not manually initialize variables if every return - // value is uninitialized. We can use := to initialize the variables in this situation. - var declarations []ast.Stmt - if canDefineCount != len(returns) { - declarations = initializeVars(uninitialized, retVars, seenUninitialized, seenVars) - } - - var declBuf, replaceBuf, newFuncBuf, ifBuf, commentBuf bytes.Buffer - if err := format.Node(&declBuf, fset, declarations); err != nil { - return nil, nil, err - } - if err := format.Node(&replaceBuf, fset, extractedFunCall); err != nil { - return nil, nil, err - } - if ifReturn != nil { - if err := format.Node(&ifBuf, fset, ifReturn); err != nil { - return nil, nil, err - } - } - if err := format.Node(&newFuncBuf, fset, newFunc); err != nil { - return nil, nil, err - } - // Find all the comments within the range and print them to be put somewhere. - // TODO(suzmue): print these in the extracted function at the correct place. - for _, cg := range file.Comments { - if cg.Pos().IsValid() && cg.Pos() < end && cg.Pos() >= start { - for _, c := range cg.List { - fmt.Fprintln(&commentBuf, c.Text) - } - } - } - - // We're going to replace the whole enclosing function, - // so preserve the text before and after the selected block. - outerStart, outerEnd, err := safetoken.Offsets(tok, outer.Pos(), outer.End()) - if err != nil { - return nil, nil, err - } - before := src[outerStart:startOffset] - after := src[endOffset:outerEnd] - indent, err := calculateIndentation(src, tok, node) - if err != nil { - return nil, nil, err - } - newLineIndent := "\n" + indent - - var fullReplacement strings.Builder - fullReplacement.Write(before) - if commentBuf.Len() > 0 { - comments := strings.ReplaceAll(commentBuf.String(), "\n", newLineIndent) - fullReplacement.WriteString(comments) - } - if declBuf.Len() > 0 { // add any initializations, if needed - initializations := strings.ReplaceAll(declBuf.String(), "\n", newLineIndent) + - newLineIndent - fullReplacement.WriteString(initializations) - } - fullReplacement.Write(replaceBuf.Bytes()) // call the extracted function - if ifBuf.Len() > 0 { // add the if statement below the function call, if needed - ifstatement := newLineIndent + - strings.ReplaceAll(ifBuf.String(), "\n", newLineIndent) - fullReplacement.WriteString(ifstatement) - } - fullReplacement.Write(after) - fullReplacement.WriteString("\n\n") // add newlines after the enclosing function - fullReplacement.Write(newFuncBuf.Bytes()) // insert the extracted function - - return fset, &analysis.SuggestedFix{ - TextEdits: []analysis.TextEdit{{ - Pos: outer.Pos(), - End: outer.End(), - NewText: []byte(fullReplacement.String()), - }}, - }, nil -} - -// isSelector reports if e is the selector expr , . -func isSelector(e ast.Expr, x, sel string) bool { - selectorExpr, ok := e.(*ast.SelectorExpr) - if !ok { - return false - } - ident, ok := selectorExpr.X.(*ast.Ident) - if !ok { - return false - } - return ident.Name == x && selectorExpr.Sel.Name == sel -} - -// reorderParams reorders the given parameters in-place to follow common Go conventions. -func reorderParams(params []ast.Expr, paramTypes []*ast.Field) { - // Move Context parameter (if any) to front. - for i, t := range paramTypes { - if isSelector(t.Type, "context", "Context") { - p, t := params[i], paramTypes[i] - copy(params[1:], params[:i]) - copy(paramTypes[1:], paramTypes[:i]) - params[0], paramTypes[0] = p, t - break - } - } -} - -// adjustRangeForCommentsAndWhiteSpace adjusts the given range to exclude unnecessary leading or -// trailing whitespace characters from selection as well as leading or trailing comments. -// In the following example, each line of the if statement is indented once. There are also two -// extra spaces after the sclosing bracket before the line break and a comment. -// -// \tif (true) { -// \t _ = 1 -// \t} // hello \n -// -// By default, a valid range begins at 'if' and ends at the first whitespace character -// after the '}'. But, users are likely to highlight full lines rather than adjusting -// their cursors for whitespace. To support this use case, we must manually adjust the -// ranges to match the correct AST node. In this particular example, we would adjust -// rng.Start forward to the start of 'if' and rng.End backward to after '}'. -func adjustRangeForCommentsAndWhiteSpace(tok *token.File, start, end token.Pos, content []byte, file *ast.File) (token.Pos, token.Pos, error) { - // Adjust the end of the range to after leading whitespace and comments. - prevStart := token.NoPos - startComment := sort.Search(len(file.Comments), func(i int) bool { - // Find the index for the first comment that ends after range start. - return file.Comments[i].End() > start - }) - for prevStart != start { - prevStart = start - // If start is within a comment, move start to the end - // of the comment group. - if startComment < len(file.Comments) && file.Comments[startComment].Pos() <= start && start < file.Comments[startComment].End() { - start = file.Comments[startComment].End() - startComment++ - } - // Move forwards to find a non-whitespace character. - offset, err := safetoken.Offset(tok, start) - if err != nil { - return 0, 0, err - } - for offset < len(content) && isGoWhiteSpace(content[offset]) { - offset++ - } - start = tok.Pos(offset) - } - - // Adjust the end of the range to before trailing whitespace and comments. - prevEnd := token.NoPos - endComment := sort.Search(len(file.Comments), func(i int) bool { - // Find the index for the first comment that ends after the range end. - return file.Comments[i].End() >= end - }) - // Search will return n if not found, so we need to adjust if there are no - // comments that would match. - if endComment == len(file.Comments) { - endComment = -1 - } - for prevEnd != end { - prevEnd = end - // If end is within a comment, move end to the start - // of the comment group. - if endComment >= 0 && file.Comments[endComment].Pos() < end && end <= file.Comments[endComment].End() { - end = file.Comments[endComment].Pos() - endComment-- - } - // Move backwards to find a non-whitespace character. - offset, err := safetoken.Offset(tok, end) - if err != nil { - return 0, 0, err - } - for offset > 0 && isGoWhiteSpace(content[offset-1]) { - offset-- - } - end = tok.Pos(offset) - } - - return start, end, nil -} - -// isGoWhiteSpace returns true if b is a considered white space in -// Go as defined by scanner.GoWhitespace. -func isGoWhiteSpace(b byte) bool { - return uint64(scanner.GoWhitespace)&(1< not free - } - return obj, true - } - // sel returns non-nil if n denotes a selection o.x.y that is referenced by the - // span and defined either within the span or in the lexical environment. The bool - // return value acts as an indicator for where it was defined. - var sel func(n *ast.SelectorExpr) (types.Object, bool) - sel = func(n *ast.SelectorExpr) (types.Object, bool) { - switch x := astutil.Unparen(n.X).(type) { - case *ast.SelectorExpr: - return sel(x) - case *ast.Ident: - return id(x) - } - return nil, false - } - seen := make(map[types.Object]*variable) - firstUseIn := make(map[types.Object]token.Pos) - var vars []types.Object - ast.Inspect(node, func(n ast.Node) bool { - if n == nil { - return false - } - if start <= n.Pos() && n.End() <= end { - var obj types.Object - var isFree, prune bool - switch n := n.(type) { - case *ast.Ident: - obj, isFree = id(n) - case *ast.SelectorExpr: - obj, isFree = sel(n) - prune = true - } - if obj != nil { - seen[obj] = &variable{ - obj: obj, - free: isFree, - } - vars = append(vars, obj) - // Find the first time that the object is used in the selection. - first, ok := firstUseIn[obj] - if !ok || n.Pos() < first { - firstUseIn[obj] = n.Pos() - } - if prune { - return false - } - } - } - return n.Pos() <= end - }) - - // Find identifiers that are initialized or whose values are altered at some - // point in the selected block. For example, in a selected block from lines 2-4, - // variables x, y, and z are included in assigned. However, in a selected block - // from lines 3-4, only variables y and z are included in assigned. - // - // 1: var a int - // 2: var x int - // 3: y := 3 - // 4: z := x + a - // - ast.Inspect(node, func(n ast.Node) bool { - if n == nil { - return false - } - if n.Pos() < start || n.End() > end { - return n.Pos() <= end - } - switch n := n.(type) { - case *ast.AssignStmt: - for _, assignment := range n.Lhs { - lhs, ok := assignment.(*ast.Ident) - if !ok { - continue - } - obj, _ := id(lhs) - if obj == nil { - continue - } - if _, ok := seen[obj]; !ok { - continue - } - seen[obj].assigned = true - if n.Tok != token.DEFINE { - continue - } - // Find identifiers that are defined prior to being used - // elsewhere in the selection. - // TODO: Include identifiers that are assigned prior to being - // used elsewhere in the selection. Then, change the assignment - // to a definition in the extracted function. - if firstUseIn[obj] != lhs.Pos() { - continue - } - // Ensure that the object is not used in its own re-definition. - // For example: - // var f float64 - // f, e := math.Frexp(f) - for _, expr := range n.Rhs { - if referencesObj(info, expr, obj) { - continue - } - if _, ok := seen[obj]; !ok { - continue - } - seen[obj].defined = true - break - } - } - return false - case *ast.DeclStmt: - gen, ok := n.Decl.(*ast.GenDecl) - if !ok { - return false - } - for _, spec := range gen.Specs { - vSpecs, ok := spec.(*ast.ValueSpec) - if !ok { - continue - } - for _, vSpec := range vSpecs.Names { - obj, _ := id(vSpec) - if obj == nil { - continue - } - if _, ok := seen[obj]; !ok { - continue - } - seen[obj].assigned = true - } - } - return false - case *ast.IncDecStmt: - if ident, ok := n.X.(*ast.Ident); !ok { - return false - } else if obj, _ := id(ident); obj == nil { - return false - } else { - if _, ok := seen[obj]; !ok { - return false - } - seen[obj].assigned = true - } - } - return true - }) - var variables []*variable - for _, obj := range vars { - v, ok := seen[obj] - if !ok { - return nil, fmt.Errorf("no seen types.Object for %v", obj) - } - variables = append(variables, v) - } - return variables, nil -} - -// referencesObj checks whether the given object appears in the given expression. -func referencesObj(info *types.Info, expr ast.Expr, obj types.Object) bool { - var hasObj bool - ast.Inspect(expr, func(n ast.Node) bool { - if n == nil { - return false - } - ident, ok := n.(*ast.Ident) - if !ok { - return true - } - objUse := info.Uses[ident] - if obj == objUse { - hasObj = true - return false - } - return false - }) - return hasObj -} - -type fnExtractParams struct { - tok *token.File - start, end token.Pos - path []ast.Node - outer *ast.FuncDecl - node ast.Node -} - -// CanExtractFunction reports whether the code in the given range can be -// extracted to a function. -func CanExtractFunction(tok *token.File, start, end token.Pos, src []byte, file *ast.File) (*fnExtractParams, bool, bool, error) { - if start == end { - return nil, false, false, fmt.Errorf("start and end are equal") - } - var err error - start, end, err = adjustRangeForCommentsAndWhiteSpace(tok, start, end, src, file) - if err != nil { - return nil, false, false, err - } - path, _ := astutil.PathEnclosingInterval(file, start, end) - if len(path) == 0 { - return nil, false, false, fmt.Errorf("no path enclosing interval") - } - // Node that encloses the selection must be a statement. - // TODO: Support function extraction for an expression. - _, ok := path[0].(ast.Stmt) - if !ok { - return nil, false, false, fmt.Errorf("node is not a statement") - } - - // Find the function declaration that encloses the selection. - var outer *ast.FuncDecl - for _, p := range path { - if p, ok := p.(*ast.FuncDecl); ok { - outer = p - break - } - } - if outer == nil { - return nil, false, false, fmt.Errorf("no enclosing function") - } - - // Find the nodes at the start and end of the selection. - var startNode, endNode ast.Node - ast.Inspect(outer, func(n ast.Node) bool { - if n == nil { - return false - } - // Do not override 'start' with a node that begins at the same location - // but is nested further from 'outer'. - if startNode == nil && n.Pos() == start && n.End() <= end { - startNode = n - } - if endNode == nil && n.End() == end && n.Pos() >= start { - endNode = n - } - return n.Pos() <= end - }) - if startNode == nil || endNode == nil { - return nil, false, false, fmt.Errorf("range does not map to AST nodes") - } - // If the region is a blockStmt, use the first and last nodes in the block - // statement. - // { ... } => { ... } - if blockStmt, ok := startNode.(*ast.BlockStmt); ok { - if len(blockStmt.List) == 0 { - return nil, false, false, fmt.Errorf("range maps to empty block statement") - } - startNode, endNode = blockStmt.List[0], blockStmt.List[len(blockStmt.List)-1] - start, end = startNode.Pos(), endNode.End() - } - return &fnExtractParams{ - tok: tok, - start: start, - end: end, - path: path, - outer: outer, - node: startNode, - }, true, outer.Recv != nil, nil -} - -// objUsed checks if the object is used within the range. It returns the first -// occurrence of the object in the range, if it exists. -func objUsed(info *types.Info, start, end token.Pos, obj types.Object) (bool, *ast.Ident) { - var firstUse *ast.Ident - for id, objUse := range info.Uses { - if obj != objUse { - continue - } - if id.Pos() < start || id.End() > end { - continue - } - if firstUse == nil || id.Pos() < firstUse.Pos() { - firstUse = id - } - } - return firstUse != nil, firstUse -} - -// varOverridden traverses the given AST node until we find the given identifier. Then, we -// examine the occurrence of the given identifier and check for (1) whether the identifier -// is being redefined. If the identifier is free, we also check for (2) whether the identifier -// is being reassigned. We will not include an identifier in the return statement of the -// extracted function if it meets one of the above conditions. -func varOverridden(info *types.Info, firstUse *ast.Ident, obj types.Object, isFree bool, node ast.Node) bool { - var isOverriden bool - ast.Inspect(node, func(n ast.Node) bool { - if n == nil { - return false - } - assignment, ok := n.(*ast.AssignStmt) - if !ok { - return true - } - // A free variable is initialized prior to the selection. We can always reassign - // this variable after the selection because it has already been defined. - // Conversely, a non-free variable is initialized within the selection. Thus, we - // cannot reassign this variable after the selection unless it is initialized and - // returned by the extracted function. - if !isFree && assignment.Tok == token.ASSIGN { - return false - } - for _, assigned := range assignment.Lhs { - ident, ok := assigned.(*ast.Ident) - // Check if we found the first use of the identifier. - if !ok || ident != firstUse { - continue - } - objUse := info.Uses[ident] - if objUse == nil || objUse != obj { - continue - } - // Ensure that the object is not used in its own definition. - // For example: - // var f float64 - // f, e := math.Frexp(f) - for _, expr := range assignment.Rhs { - if referencesObj(info, expr, obj) { - return false - } - } - isOverriden = true - return false - } - return false - }) - return isOverriden -} - -// parseBlockStmt generates an AST file from the given text. We then return the portion of the -// file that represents the text. -func parseBlockStmt(fset *token.FileSet, src []byte) (*ast.BlockStmt, error) { - text := "package main\nfunc _() { " + string(src) + " }" - extract, err := parser.ParseFile(fset, "", text, 0) - if err != nil { - return nil, err - } - if len(extract.Decls) == 0 { - return nil, fmt.Errorf("parsed file does not contain any declarations") - } - decl, ok := extract.Decls[0].(*ast.FuncDecl) - if !ok { - return nil, fmt.Errorf("parsed file does not contain expected function declaration") - } - if decl.Body == nil { - return nil, fmt.Errorf("extracted function has no body") - } - return decl.Body, nil -} - -// generateReturnInfo generates the information we need to adjust the return statements and -// signature of the extracted function. We prepare names, signatures, and "zero values" that -// represent the new variables. We also use this information to construct the if statement that -// is inserted below the call to the extracted function. -func generateReturnInfo(enclosing *ast.FuncType, pkg *types.Package, path []ast.Node, file *ast.File, info *types.Info, pos token.Pos, hasNonNestedReturns bool) ([]*returnVariable, *ast.IfStmt, error) { - var retVars []*returnVariable - var cond *ast.Ident - if !hasNonNestedReturns { - // Generate information for the added bool value. - name, _ := generateAvailableIdentifier(pos, path, pkg, info, "shouldReturn", 0) - cond = &ast.Ident{Name: name} - retVars = append(retVars, &returnVariable{ - name: cond, - decl: &ast.Field{Type: ast.NewIdent("bool")}, - zeroVal: ast.NewIdent("false"), - }) - } - // Generate information for the values in the return signature of the enclosing function. - if enclosing.Results != nil { - idx := 0 - for _, field := range enclosing.Results.List { - typ := info.TypeOf(field.Type) - if typ == nil { - return nil, nil, fmt.Errorf( - "failed type conversion, AST expression: %T", field.Type) - } - expr := analysisinternal.TypeExpr(file, pkg, typ) - if expr == nil { - return nil, nil, fmt.Errorf("nil AST expression") - } - var name string - name, idx = generateAvailableIdentifier(pos, path, pkg, info, "returnValue", idx) - retVars = append(retVars, &returnVariable{ - name: ast.NewIdent(name), - decl: &ast.Field{Type: expr}, - zeroVal: analysisinternal.ZeroValue(file, pkg, typ), - }) - } - } - var ifReturn *ast.IfStmt - if !hasNonNestedReturns { - // Create the return statement for the enclosing function. We must exclude the variable - // for the condition of the if statement (cond) from the return statement. - ifReturn = &ast.IfStmt{ - Cond: cond, - Body: &ast.BlockStmt{ - List: []ast.Stmt{&ast.ReturnStmt{Results: getNames(retVars)[1:]}}, - }, - } - } - return retVars, ifReturn, nil -} - -// adjustReturnStatements adds "zero values" of the given types to each return statement -// in the given AST node. -func adjustReturnStatements(returnTypes []*ast.Field, seenVars map[types.Object]ast.Expr, file *ast.File, pkg *types.Package, extractedBlock *ast.BlockStmt) error { - var zeroVals []ast.Expr - // Create "zero values" for each type. - for _, returnType := range returnTypes { - var val ast.Expr - for obj, typ := range seenVars { - if typ != returnType.Type { - continue - } - val = analysisinternal.ZeroValue(file, pkg, obj.Type()) - break - } - if val == nil { - return fmt.Errorf( - "could not find matching AST expression for %T", returnType.Type) - } - zeroVals = append(zeroVals, val) - } - // Add "zero values" to each return statement. - // The bool reports whether the enclosing function should return after calling the - // extracted function. We set the bool to 'true' because, if these return statements - // execute, the extracted function terminates early, and the enclosing function must - // return as well. - zeroVals = append(zeroVals, ast.NewIdent("true")) - ast.Inspect(extractedBlock, func(n ast.Node) bool { - if n == nil { - return false - } - if n, ok := n.(*ast.ReturnStmt); ok { - n.Results = append(zeroVals, n.Results...) - return false - } - return true - }) - return nil -} - -// generateFuncCall constructs a call expression for the extracted function, described by the -// given parameters and return variables. -func generateFuncCall(hasNonNestedReturn, hasReturnVals bool, params, returns []ast.Expr, name string, token token.Token, selector string) ast.Node { - var replace ast.Node - callExpr := &ast.CallExpr{ - Fun: ast.NewIdent(name), - Args: params, - } - if selector != "" { - callExpr = &ast.CallExpr{ - Fun: &ast.SelectorExpr{ - X: ast.NewIdent(selector), - Sel: ast.NewIdent(name), - }, - Args: params, - } - } - if hasReturnVals { - if hasNonNestedReturn { - // Create a return statement that returns the result of the function call. - replace = &ast.ReturnStmt{ - Return: 0, - Results: []ast.Expr{callExpr}, - } - } else { - // Assign the result of the function call. - replace = &ast.AssignStmt{ - Lhs: returns, - Tok: token, - Rhs: []ast.Expr{callExpr}, - } - } - } else { - replace = callExpr - } - return replace -} - -// initializeVars creates variable declarations, if needed. -// Our preference is to replace the selected block with an "x, y, z := fn()" style -// assignment statement. We can use this style when all of the variables in the -// extracted function's return statement are either not defined prior to the extracted block -// or can be safely redefined. However, for example, if z is already defined -// in a different scope, we replace the selected block with: -// -// var x int -// var y string -// x, y, z = fn() -func initializeVars(uninitialized []types.Object, retVars []*returnVariable, seenUninitialized map[types.Object]struct{}, seenVars map[types.Object]ast.Expr) []ast.Stmt { - var declarations []ast.Stmt - for _, obj := range uninitialized { - if _, ok := seenUninitialized[obj]; ok { - continue - } - seenUninitialized[obj] = struct{}{} - valSpec := &ast.ValueSpec{ - Names: []*ast.Ident{ast.NewIdent(obj.Name())}, - Type: seenVars[obj], - } - genDecl := &ast.GenDecl{ - Tok: token.VAR, - Specs: []ast.Spec{valSpec}, - } - declarations = append(declarations, &ast.DeclStmt{Decl: genDecl}) - } - // Each variable added from a return statement in the selection - // must be initialized. - for i, retVar := range retVars { - n := retVar.name.(*ast.Ident) - valSpec := &ast.ValueSpec{ - Names: []*ast.Ident{n}, - Type: retVars[i].decl.Type, - } - genDecl := &ast.GenDecl{ - Tok: token.VAR, - Specs: []ast.Spec{valSpec}, - } - declarations = append(declarations, &ast.DeclStmt{Decl: genDecl}) - } - return declarations -} - -// getNames returns the names from the given list of returnVariable. -func getNames(retVars []*returnVariable) []ast.Expr { - var names []ast.Expr - for _, retVar := range retVars { - names = append(names, retVar.name) - } - return names -} - -// getZeroVals returns the "zero values" from the given list of returnVariable. -func getZeroVals(retVars []*returnVariable) []ast.Expr { - var zvs []ast.Expr - for _, retVar := range retVars { - zvs = append(zvs, retVar.zeroVal) - } - return zvs -} - -// getDecls returns the declarations from the given list of returnVariable. -func getDecls(retVars []*returnVariable) []*ast.Field { - var decls []*ast.Field - for _, retVar := range retVars { - decls = append(decls, retVar.decl) - } - return decls -} diff --git a/internal/golangorgx/gopls/golang/fix.go b/internal/golangorgx/gopls/golang/fix.go deleted file mode 100644 index 5fcd1a3fd94..00000000000 --- a/internal/golangorgx/gopls/golang/fix.go +++ /dev/null @@ -1,223 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -import ( - "context" - "fmt" - "go/ast" - "go/token" - "go/types" - - "cuelang.org/go/internal/golangorgx/gopls/analysis/embeddirective" - "cuelang.org/go/internal/golangorgx/gopls/analysis/fillstruct" - "cuelang.org/go/internal/golangorgx/gopls/analysis/stubmethods" - "cuelang.org/go/internal/golangorgx/gopls/analysis/undeclaredname" - "cuelang.org/go/internal/golangorgx/gopls/analysis/unusedparams" - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/cache/parsego" - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/util/bug" - "cuelang.org/go/internal/golangorgx/tools/imports" - "golang.org/x/tools/go/analysis" -) - -// A fixer is a function that suggests a fix for a diagnostic produced -// by the analysis framework. This is done outside of the analyzer Run -// function so that the construction of expensive fixes can be -// deferred until they are requested by the user. -// -// The actual diagnostic is not provided; only its position, as the -// triple (pgf, start, end); the resulting SuggestedFix implicitly -// relates to that file. -// -// The supplied token positions (start, end) must belong to -// pkg.FileSet(), and the returned positions -// (SuggestedFix.TextEdits[*].{Pos,End}) must belong to the returned -// FileSet. -// -// A fixer may return (nil, nil) if no fix is available. -type fixer func(ctx context.Context, s *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) - -// A singleFileFixer is a Fixer that inspects only a single file, -// and does not depend on data types from the cache package. -// -// TODO(adonovan): move fillstruct and undeclaredname into this -// package, so we can remove the import restriction and push -// the singleFile wrapper down into each singleFileFixer? -type singleFileFixer func(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) - -// singleFile adapts a single-file fixer to a Fixer. -func singleFile(fixer1 singleFileFixer) fixer { - return func(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { - return fixer1(pkg.FileSet(), start, end, pgf.Src, pgf.File, pkg.GetTypes(), pkg.GetTypesInfo()) - } -} - -// Names of ApplyFix.Fix created directly by the CodeAction handler. -const ( - fixExtractVariable = "extract_variable" - fixExtractFunction = "extract_function" - fixExtractMethod = "extract_method" - fixInlineCall = "inline_call" - fixInvertIfCondition = "invert_if_condition" -) - -// ApplyFix applies the specified kind of suggested fix to the given -// file and range, returning the resulting edits. -// -// A fix kind is either the Category of an analysis.Diagnostic that -// had a SuggestedFix with no edits; or the name of a fix agreed upon -// by [CodeActions] and this function. -// Fix kinds identify fixes in the command protocol. -// -// TODO(adonovan): come up with a better mechanism for registering the -// connection between analyzers, code actions, and fixers. A flaw of -// the current approach is that the same Category could in theory -// apply to a Diagnostic with several lazy fixes, making them -// impossible to distinguish. It would more precise if there was a -// SuggestedFix.Category field, or some other way to squirrel metadata -// in the fix. -func ApplyFix(ctx context.Context, fix string, snapshot *cache.Snapshot, fh file.Handle, rng protocol.Range) ([]protocol.TextDocumentEdit, error) { - // This can't be expressed as an entry in the fixer table below - // because it operates in the protocol (not go/{token,ast}) domain. - // (Sigh; perhaps it was a mistake to factor out the - // NarrowestPackageForFile/RangePos/suggestedFixToEdits - // steps.) - if fix == unusedparams.FixCategory { - changes, err := RemoveUnusedParameter(ctx, fh, rng, snapshot) - if err != nil { - return nil, err - } - // Unwrap TextDocumentEdits again! - var edits []protocol.TextDocumentEdit - for _, change := range changes { - edits = append(edits, *change.TextDocumentEdit) - } - return edits, nil - } - - fixers := map[string]fixer{ - // Fixes for analyzer-provided diagnostics. - // These match the Diagnostic.Category. - embeddirective.FixCategory: addEmbedImport, - fillstruct.FixCategory: singleFile(fillstruct.SuggestedFix), - stubmethods.FixCategory: stubMethodsFixer, - undeclaredname.FixCategory: singleFile(undeclaredname.SuggestedFix), - - // Ad-hoc fixers: these are used when the command is - // constructed directly by logic in server/code_action. - fixExtractFunction: singleFile(extractFunction), - fixExtractMethod: singleFile(extractMethod), - fixExtractVariable: singleFile(extractVariable), - fixInlineCall: inlineCall, - fixInvertIfCondition: singleFile(invertIfCondition), - } - fixer, ok := fixers[fix] - if !ok { - return nil, fmt.Errorf("no suggested fix function for %s", fix) - } - pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) - if err != nil { - return nil, err - } - start, end, err := pgf.RangePos(rng) - if err != nil { - return nil, err - } - fixFset, suggestion, err := fixer(ctx, snapshot, pkg, pgf, start, end) - if err != nil { - return nil, err - } - if suggestion == nil { - return nil, nil - } - return suggestedFixToEdits(ctx, snapshot, fixFset, suggestion) -} - -// suggestedFixToEdits converts the suggestion's edits from analysis form into protocol form. -func suggestedFixToEdits(ctx context.Context, snapshot *cache.Snapshot, fset *token.FileSet, suggestion *analysis.SuggestedFix) ([]protocol.TextDocumentEdit, error) { - editsPerFile := map[protocol.DocumentURI]*protocol.TextDocumentEdit{} - for _, edit := range suggestion.TextEdits { - tokFile := fset.File(edit.Pos) - if tokFile == nil { - return nil, bug.Errorf("no file for edit position") - } - end := edit.End - if !end.IsValid() { - end = edit.Pos - } - fh, err := snapshot.ReadFile(ctx, protocol.URIFromPath(tokFile.Name())) - if err != nil { - return nil, err - } - te, ok := editsPerFile[fh.URI()] - if !ok { - te = &protocol.TextDocumentEdit{ - TextDocument: protocol.OptionalVersionedTextDocumentIdentifier{ - Version: fh.Version(), - TextDocumentIdentifier: protocol.TextDocumentIdentifier{ - URI: fh.URI(), - }, - }, - } - editsPerFile[fh.URI()] = te - } - content, err := fh.Content() - if err != nil { - return nil, err - } - m := protocol.NewMapper(fh.URI(), content) // TODO(adonovan): opt: memoize in map - rng, err := m.PosRange(tokFile, edit.Pos, end) - if err != nil { - return nil, err - } - te.Edits = append(te.Edits, protocol.Or_TextDocumentEdit_edits_Elem{ - Value: protocol.TextEdit{ - Range: rng, - NewText: string(edit.NewText), - }, - }) - } - var edits []protocol.TextDocumentEdit - for _, edit := range editsPerFile { - edits = append(edits, *edit) - } - return edits, nil -} - -// addEmbedImport adds a missing embed "embed" import with blank name. -func addEmbedImport(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, _, _ token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { - // Like golang.AddImport, but with _ as Name and using our pgf. - protoEdits, err := ComputeOneImportFixEdits(snapshot, pgf, &imports.ImportFix{ - StmtInfo: imports.ImportInfo{ - ImportPath: "embed", - Name: "_", - }, - FixType: imports.AddImport, - }) - if err != nil { - return nil, nil, fmt.Errorf("compute edits: %w", err) - } - - var edits []analysis.TextEdit - for _, e := range protoEdits { - start, end, err := pgf.RangePos(e.Range) - if err != nil { - return nil, nil, err // e.g. invalid range - } - edits = append(edits, analysis.TextEdit{ - Pos: start, - End: end, - NewText: []byte(e.NewText), - }) - } - - return pkg.FileSet(), &analysis.SuggestedFix{ - Message: "Add embed import", - TextEdits: edits, - }, nil -} diff --git a/internal/golangorgx/gopls/golang/folding_range.go b/internal/golangorgx/gopls/golang/folding_range.go deleted file mode 100644 index c490129f5fe..00000000000 --- a/internal/golangorgx/gopls/golang/folding_range.go +++ /dev/null @@ -1,196 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -import ( - "context" - "go/ast" - "go/token" - "sort" - "strings" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/util/bug" - "cuelang.org/go/internal/golangorgx/gopls/util/safetoken" -) - -// FoldingRangeInfo holds range and kind info of folding for an ast.Node -type FoldingRangeInfo struct { - MappedRange protocol.MappedRange - Kind protocol.FoldingRangeKind -} - -// FoldingRange gets all of the folding range for f. -func FoldingRange(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, lineFoldingOnly bool) (ranges []*FoldingRangeInfo, err error) { - // TODO(suzmue): consider limiting the number of folding ranges returned, and - // implement a way to prioritize folding ranges in that case. - pgf, err := snapshot.ParseGo(ctx, fh, ParseFull) - if err != nil { - return nil, err - } - - // With parse errors, we wouldn't be able to produce accurate folding info. - // LSP protocol (3.16) currently does not have a way to handle this case - // (https://github.com/microsoft/language-server-protocol/issues/1200). - // We cannot return an error either because we are afraid some editors - // may not handle errors nicely. As a workaround, we now return an empty - // result and let the client handle this case by double check the file - // contents (i.e. if the file is not empty and the folding range result - // is empty, raise an internal error). - if pgf.ParseErr != nil { - return nil, nil - } - - // Get folding ranges for comments separately as they are not walked by ast.Inspect. - ranges = append(ranges, commentsFoldingRange(pgf)...) - - visit := func(n ast.Node) bool { - rng := foldingRangeFunc(pgf, n, lineFoldingOnly) - if rng != nil { - ranges = append(ranges, rng) - } - return true - } - // Walk the ast and collect folding ranges. - ast.Inspect(pgf.File, visit) - - sort.Slice(ranges, func(i, j int) bool { - irng := ranges[i].MappedRange.Range() - jrng := ranges[j].MappedRange.Range() - return protocol.CompareRange(irng, jrng) < 0 - }) - - return ranges, nil -} - -// foldingRangeFunc calculates the line folding range for ast.Node n -func foldingRangeFunc(pgf *ParsedGoFile, n ast.Node, lineFoldingOnly bool) *FoldingRangeInfo { - // TODO(suzmue): include trailing empty lines before the closing - // parenthesis/brace. - var kind protocol.FoldingRangeKind - var start, end token.Pos - switch n := n.(type) { - case *ast.BlockStmt: - // Fold between positions of or lines between "{" and "}". - var startList, endList token.Pos - if num := len(n.List); num != 0 { - startList, endList = n.List[0].Pos(), n.List[num-1].End() - } - start, end = validLineFoldingRange(pgf.Tok, n.Lbrace, n.Rbrace, startList, endList, lineFoldingOnly) - case *ast.CaseClause: - // Fold from position of ":" to end. - start, end = n.Colon+1, n.End() - case *ast.CommClause: - // Fold from position of ":" to end. - start, end = n.Colon+1, n.End() - case *ast.CallExpr: - // Fold from position of "(" to position of ")". - start, end = n.Lparen+1, n.Rparen - case *ast.FieldList: - // Fold between positions of or lines between opening parenthesis/brace and closing parenthesis/brace. - var startList, endList token.Pos - if num := len(n.List); num != 0 { - startList, endList = n.List[0].Pos(), n.List[num-1].End() - } - start, end = validLineFoldingRange(pgf.Tok, n.Opening, n.Closing, startList, endList, lineFoldingOnly) - case *ast.GenDecl: - // If this is an import declaration, set the kind to be protocol.Imports. - if n.Tok == token.IMPORT { - kind = protocol.Imports - } - // Fold between positions of or lines between "(" and ")". - var startSpecs, endSpecs token.Pos - if num := len(n.Specs); num != 0 { - startSpecs, endSpecs = n.Specs[0].Pos(), n.Specs[num-1].End() - } - start, end = validLineFoldingRange(pgf.Tok, n.Lparen, n.Rparen, startSpecs, endSpecs, lineFoldingOnly) - case *ast.BasicLit: - // Fold raw string literals from position of "`" to position of "`". - if n.Kind == token.STRING && len(n.Value) >= 2 && n.Value[0] == '`' && n.Value[len(n.Value)-1] == '`' { - start, end = n.Pos(), n.End() - } - case *ast.CompositeLit: - // Fold between positions of or lines between "{" and "}". - var startElts, endElts token.Pos - if num := len(n.Elts); num != 0 { - startElts, endElts = n.Elts[0].Pos(), n.Elts[num-1].End() - } - start, end = validLineFoldingRange(pgf.Tok, n.Lbrace, n.Rbrace, startElts, endElts, lineFoldingOnly) - } - - // Check that folding positions are valid. - if !start.IsValid() || !end.IsValid() { - return nil - } - // in line folding mode, do not fold if the start and end lines are the same. - if lineFoldingOnly && safetoken.Line(pgf.Tok, start) == safetoken.Line(pgf.Tok, end) { - return nil - } - mrng, err := pgf.PosMappedRange(start, end) - if err != nil { - bug.Errorf("%w", err) // can't happen - } - return &FoldingRangeInfo{ - MappedRange: mrng, - Kind: kind, - } -} - -// validLineFoldingRange returns start and end token.Pos for folding range if the range is valid. -// returns token.NoPos otherwise, which fails token.IsValid check -func validLineFoldingRange(tokFile *token.File, open, close, start, end token.Pos, lineFoldingOnly bool) (token.Pos, token.Pos) { - if lineFoldingOnly { - if !open.IsValid() || !close.IsValid() { - return token.NoPos, token.NoPos - } - - // Don't want to fold if the start/end is on the same line as the open/close - // as an example, the example below should *not* fold: - // var x = [2]string{"d", - // "e" } - if safetoken.Line(tokFile, open) == safetoken.Line(tokFile, start) || - safetoken.Line(tokFile, close) == safetoken.Line(tokFile, end) { - return token.NoPos, token.NoPos - } - - return open + 1, end - } - return open + 1, close -} - -// commentsFoldingRange returns the folding ranges for all comment blocks in file. -// The folding range starts at the end of the first line of the comment block, and ends at the end of the -// comment block and has kind protocol.Comment. -func commentsFoldingRange(pgf *ParsedGoFile) (comments []*FoldingRangeInfo) { - tokFile := pgf.Tok - for _, commentGrp := range pgf.File.Comments { - startGrpLine, endGrpLine := safetoken.Line(tokFile, commentGrp.Pos()), safetoken.Line(tokFile, commentGrp.End()) - if startGrpLine == endGrpLine { - // Don't fold single line comments. - continue - } - - firstComment := commentGrp.List[0] - startPos, endLinePos := firstComment.Pos(), firstComment.End() - startCmmntLine, endCmmntLine := safetoken.Line(tokFile, startPos), safetoken.Line(tokFile, endLinePos) - if startCmmntLine != endCmmntLine { - // If the first comment spans multiple lines, then we want to have the - // folding range start at the end of the first line. - endLinePos = token.Pos(int(startPos) + len(strings.Split(firstComment.Text, "\n")[0])) - } - mrng, err := pgf.PosMappedRange(endLinePos, commentGrp.End()) - if err != nil { - bug.Errorf("%w", err) // can't happen - } - comments = append(comments, &FoldingRangeInfo{ - // Fold from the end of the first line comment to the end of the comment block. - MappedRange: mrng, - Kind: protocol.Comment, - }) - } - return comments -} diff --git a/internal/golangorgx/gopls/golang/gc_annotations.go b/internal/golangorgx/gopls/golang/gc_annotations.go deleted file mode 100644 index 6ab1c566f30..00000000000 --- a/internal/golangorgx/gopls/golang/gc_annotations.go +++ /dev/null @@ -1,208 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -import ( - "bytes" - "context" - "encoding/json" - "fmt" - "os" - "path/filepath" - "strings" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/cache/metadata" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/settings" - "cuelang.org/go/internal/golangorgx/tools/gocommand" -) - -func GCOptimizationDetails(ctx context.Context, snapshot *cache.Snapshot, mp *metadata.Package) (map[protocol.DocumentURI][]*cache.Diagnostic, error) { - if len(mp.CompiledGoFiles) == 0 { - return nil, nil - } - pkgDir := filepath.Dir(mp.CompiledGoFiles[0].Path()) - outDir := filepath.Join(os.TempDir(), fmt.Sprintf("cuepls-%d.details", os.Getpid())) - - if err := os.MkdirAll(outDir, 0700); err != nil { - return nil, err - } - tmpFile, err := os.CreateTemp(os.TempDir(), "cuepls-x") - if err != nil { - return nil, err - } - tmpFile.Close() // ignore error - defer os.Remove(tmpFile.Name()) - - outDirURI := protocol.URIFromPath(outDir) - // GC details doesn't handle Windows URIs in the form of "file:///C:/...", - // so rewrite them to "file://C:/...". See golang/go#41614. - if !strings.HasPrefix(outDir, "/") { - outDirURI = protocol.DocumentURI(strings.Replace(string(outDirURI), "file:///", "file://", 1)) - } - inv := &gocommand.Invocation{ - Verb: "build", - Args: []string{ - fmt.Sprintf("-gcflags=-json=0,%s", outDirURI), - fmt.Sprintf("-o=%s", tmpFile.Name()), - ".", - }, - WorkingDir: pkgDir, - } - _, err = snapshot.RunGoCommandDirect(ctx, cache.Normal, inv) - if err != nil { - return nil, err - } - files, err := findJSONFiles(outDir) - if err != nil { - return nil, err - } - reports := make(map[protocol.DocumentURI][]*cache.Diagnostic) - opts := snapshot.Options() - var parseError error - for _, fn := range files { - uri, diagnostics, err := parseDetailsFile(fn, opts) - if err != nil { - // expect errors for all the files, save 1 - parseError = err - } - fh := snapshot.FindFile(uri) - if fh == nil { - continue - } - if pkgDir != filepath.Dir(fh.URI().Path()) { - // https://github.com/golang/go/issues/42198 - // sometimes the detail diagnostics generated for files - // outside the package can never be taken back. - continue - } - reports[fh.URI()] = diagnostics - } - return reports, parseError -} - -func parseDetailsFile(filename string, options *settings.Options) (protocol.DocumentURI, []*cache.Diagnostic, error) { - buf, err := os.ReadFile(filename) - if err != nil { - return "", nil, err - } - var ( - uri protocol.DocumentURI - i int - diagnostics []*cache.Diagnostic - ) - type metadata struct { - File string `json:"file,omitempty"` - } - for dec := json.NewDecoder(bytes.NewReader(buf)); dec.More(); { - // The first element always contains metadata. - if i == 0 { - i++ - m := new(metadata) - if err := dec.Decode(m); err != nil { - return "", nil, err - } - if !strings.HasSuffix(m.File, ".go") { - continue // - } - uri = protocol.URIFromPath(m.File) - continue - } - d := new(protocol.Diagnostic) - if err := dec.Decode(d); err != nil { - return "", nil, err - } - d.Tags = []protocol.DiagnosticTag{} // must be an actual slice - msg := d.Code.(string) - if msg != "" { - msg = fmt.Sprintf("%s(%s)", msg, d.Message) - } - if !showDiagnostic(msg, d.Source, options) { - continue - } - var related []protocol.DiagnosticRelatedInformation - for _, ri := range d.RelatedInformation { - // TODO(rfindley): The compiler uses LSP-like JSON to encode gc details, - // however the positions it uses are 1-based UTF-8: - // https://github.com/golang/go/blob/master/src/cmd/compile/internal/logopt/log_opts.go - // - // Here, we adjust for 0-based positions, but do not translate UTF-8 to UTF-16. - related = append(related, protocol.DiagnosticRelatedInformation{ - Location: protocol.Location{ - URI: ri.Location.URI, - Range: zeroIndexedRange(ri.Location.Range), - }, - Message: ri.Message, - }) - } - diagnostic := &cache.Diagnostic{ - URI: uri, - Range: zeroIndexedRange(d.Range), - Message: msg, - Severity: d.Severity, - Source: cache.OptimizationDetailsError, // d.Source is always "go compiler" as of 1.16, use our own - Tags: d.Tags, - Related: related, - } - diagnostics = append(diagnostics, diagnostic) - i++ - } - return uri, diagnostics, nil -} - -// showDiagnostic reports whether a given diagnostic should be shown to the end -// user, given the current options. -func showDiagnostic(msg, source string, o *settings.Options) bool { - if source != "go compiler" { - return false - } - if o.Annotations == nil { - return true - } - switch { - case strings.HasPrefix(msg, "canInline") || - strings.HasPrefix(msg, "cannotInline") || - strings.HasPrefix(msg, "inlineCall"): - return o.Annotations[settings.Inline] - case strings.HasPrefix(msg, "escape") || msg == "leak": - return o.Annotations[settings.Escape] - case strings.HasPrefix(msg, "nilcheck"): - return o.Annotations[settings.Nil] - case strings.HasPrefix(msg, "isInBounds") || - strings.HasPrefix(msg, "isSliceInBounds"): - return o.Annotations[settings.Bounds] - } - return false -} - -// The range produced by the compiler is 1-indexed, so subtract range by 1. -func zeroIndexedRange(rng protocol.Range) protocol.Range { - return protocol.Range{ - Start: protocol.Position{ - Line: rng.Start.Line - 1, - Character: rng.Start.Character - 1, - }, - End: protocol.Position{ - Line: rng.End.Line - 1, - Character: rng.End.Character - 1, - }, - } -} - -func findJSONFiles(dir string) ([]string, error) { - ans := []string{} - f := func(path string, fi os.FileInfo, _ error) error { - if fi.IsDir() { - return nil - } - if strings.HasSuffix(path, ".json") { - ans = append(ans, path) - } - return nil - } - err := filepath.Walk(dir, f) - return ans, err -} diff --git a/internal/golangorgx/gopls/golang/highlight.go b/internal/golangorgx/gopls/golang/highlight.go deleted file mode 100644 index e252e453773..00000000000 --- a/internal/golangorgx/gopls/golang/highlight.go +++ /dev/null @@ -1,526 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -import ( - "context" - "fmt" - "go/ast" - "go/token" - "go/types" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/util/typesutil" - "cuelang.org/go/internal/golangorgx/tools/event" - "golang.org/x/tools/go/ast/astutil" -) - -func Highlight(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) ([]protocol.Range, error) { - ctx, done := event.Start(ctx, "golang.Highlight") - defer done() - - // We always want fully parsed files for highlight, regardless - // of whether the file belongs to a workspace package. - pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) - if err != nil { - return nil, fmt.Errorf("getting package for Highlight: %w", err) - } - - pos, err := pgf.PositionPos(position) - if err != nil { - return nil, err - } - path, _ := astutil.PathEnclosingInterval(pgf.File, pos, pos) - if len(path) == 0 { - return nil, fmt.Errorf("no enclosing position found for %v:%v", position.Line, position.Character) - } - // If start == end for astutil.PathEnclosingInterval, the 1-char interval - // following start is used instead. As a result, we might not get an exact - // match so we should check the 1-char interval to the left of the passed - // in position to see if that is an exact match. - if _, ok := path[0].(*ast.Ident); !ok { - if p, _ := astutil.PathEnclosingInterval(pgf.File, pos-1, pos-1); p != nil { - switch p[0].(type) { - case *ast.Ident, *ast.SelectorExpr: - path = p // use preceding ident/selector - } - } - } - result, err := highlightPath(path, pgf.File, pkg.GetTypesInfo()) - if err != nil { - return nil, err - } - var ranges []protocol.Range - for rng := range result { - rng, err := pgf.PosRange(rng.start, rng.end) - if err != nil { - return nil, err - } - ranges = append(ranges, rng) - } - return ranges, nil -} - -// highlightPath returns ranges to highlight for the given enclosing path, -// which should be the result of astutil.PathEnclosingInterval. -func highlightPath(path []ast.Node, file *ast.File, info *types.Info) (map[posRange]struct{}, error) { - result := make(map[posRange]struct{}) - switch node := path[0].(type) { - case *ast.BasicLit: - // Import path string literal? - if len(path) > 1 { - if imp, ok := path[1].(*ast.ImportSpec); ok { - highlight := func(n ast.Node) { - result[posRange{start: n.Pos(), end: n.End()}] = struct{}{} - } - - // Highlight the import itself... - highlight(imp) - - // ...and all references to it in the file. - if pkgname, ok := typesutil.ImportedPkgName(info, imp); ok { - ast.Inspect(file, func(n ast.Node) bool { - if id, ok := n.(*ast.Ident); ok && - info.Uses[id] == pkgname { - highlight(id) - } - return true - }) - } - return result, nil - } - } - highlightFuncControlFlow(path, result) - case *ast.ReturnStmt, *ast.FuncDecl, *ast.FuncType: - highlightFuncControlFlow(path, result) - case *ast.Ident: - // Check if ident is inside return or func decl. - highlightFuncControlFlow(path, result) - highlightIdentifier(node, file, info, result) - case *ast.ForStmt, *ast.RangeStmt: - highlightLoopControlFlow(path, info, result) - case *ast.SwitchStmt: - highlightSwitchFlow(path, info, result) - case *ast.BranchStmt: - // BREAK can exit a loop, switch or select, while CONTINUE exit a loop so - // these need to be handled separately. They can also be embedded in any - // other loop/switch/select if they have a label. TODO: add support for - // GOTO and FALLTHROUGH as well. - switch node.Tok { - case token.BREAK: - if node.Label != nil { - highlightLabeledFlow(path, info, node, result) - } else { - highlightUnlabeledBreakFlow(path, info, result) - } - case token.CONTINUE: - if node.Label != nil { - highlightLabeledFlow(path, info, node, result) - } else { - highlightLoopControlFlow(path, info, result) - } - } - default: - // If the cursor is in an unidentified area, return empty results. - return nil, nil - } - return result, nil -} - -type posRange struct { - start, end token.Pos -} - -// highlightFuncControlFlow adds highlight ranges to the result map to -// associate results and result parameters. -// -// Specifically, if the cursor is in a result or result parameter, all -// results and result parameters with the same index are highlighted. If the -// cursor is in a 'func' or 'return' keyword, the func keyword as well as all -// returns from that func are highlighted. -// -// As a special case, if the cursor is within a complicated expression, control -// flow highlighting is disabled, as it would highlight too much. -func highlightFuncControlFlow(path []ast.Node, result map[posRange]unit) { - - var ( - funcType *ast.FuncType // type of enclosing func, or nil - funcBody *ast.BlockStmt // body of enclosing func, or nil - returnStmt *ast.ReturnStmt // enclosing ReturnStmt within the func, or nil - ) - -findEnclosingFunc: - for i, n := range path { - switch n := n.(type) { - // TODO(rfindley, low priority): these pre-existing cases for KeyValueExpr - // and CallExpr appear to avoid highlighting when the cursor is in a - // complicated expression. However, the basis for this heuristic is - // unclear. Can we formalize a rationale? - case *ast.KeyValueExpr: - // If cursor is in a key: value expr, we don't want control flow highlighting. - return - - case *ast.CallExpr: - // If cursor is an arg in a callExpr, we don't want control flow highlighting. - if i > 0 { - for _, arg := range n.Args { - if arg == path[i-1] { - return - } - } - } - - case *ast.FuncLit: - funcType = n.Type - funcBody = n.Body - break findEnclosingFunc - - case *ast.FuncDecl: - funcType = n.Type - funcBody = n.Body - break findEnclosingFunc - - case *ast.ReturnStmt: - returnStmt = n - } - } - - if funcType == nil { - return // cursor is not in a function - } - - // Helper functions for inspecting the current location. - var ( - pos = path[0].Pos() - inSpan = func(start, end token.Pos) bool { return start <= pos && pos < end } - inNode = func(n ast.Node) bool { return inSpan(n.Pos(), n.End()) } - ) - - inResults := funcType.Results != nil && inNode(funcType.Results) - - // If the cursor is on a "return" or "func" keyword, but not highlighting any - // specific field or expression, we should highlight all of the exit points - // of the function, including the "return" and "func" keywords. - funcEnd := funcType.Func + token.Pos(len("func")) - highlightAll := path[0] == returnStmt || inSpan(funcType.Func, funcEnd) - var highlightIndexes map[int]bool - - if highlightAll { - // Add the "func" part of the func declaration. - result[posRange{ - start: funcType.Func, - end: funcEnd, - }] = unit{} - } else if returnStmt == nil && !inResults { - return // nothing to highlight - } else { - // If we're not highighting the entire return statement, we need to collect - // specific result indexes to highlight. This may be more than one index if - // the cursor is on a multi-name result field, but not in any specific name. - if !highlightAll { - highlightIndexes = make(map[int]bool) - if returnStmt != nil { - for i, n := range returnStmt.Results { - if inNode(n) { - highlightIndexes[i] = true - break - } - } - } - - // Scan fields, either adding highlights according to the highlightIndexes - // computed above, or accounting for the cursor position within the result - // list. - // (We do both at once to avoid repeating the cumbersome field traversal.) - i := 0 - findField: - for _, field := range funcType.Results.List { - for j, name := range field.Names { - if inNode(name) || highlightIndexes[i+j] { - result[posRange{name.Pos(), name.End()}] = unit{} - highlightIndexes[i+j] = true - break findField // found/highlighted the specific name - } - } - // If the cursor is in a field but not in a name (e.g. in the space, or - // the type), highlight the whole field. - // - // Note that this may not be ideal if we're at e.g. - // - // (x,‸y int, z int8) - // - // ...where it would make more sense to highlight only y. But we don't - // reach this function if not in a func, return, ident, or basiclit. - if inNode(field) || highlightIndexes[i] { - result[posRange{field.Pos(), field.End()}] = unit{} - highlightIndexes[i] = true - if inNode(field) { - for j := range field.Names { - highlightIndexes[i+j] = true - } - } - break findField // found/highlighted the field - } - - n := len(field.Names) - if n == 0 { - n = 1 - } - i += n - } - } - } - - if funcBody != nil { - ast.Inspect(funcBody, func(n ast.Node) bool { - switch n := n.(type) { - case *ast.FuncDecl, *ast.FuncLit: - // Don't traverse into any functions other than enclosingFunc. - return false - case *ast.ReturnStmt: - if highlightAll { - // Add the entire return statement. - result[posRange{n.Pos(), n.End()}] = unit{} - } else { - // Add the highlighted indexes. - for i, expr := range n.Results { - if highlightIndexes[i] { - result[posRange{expr.Pos(), expr.End()}] = unit{} - } - } - } - return false - - } - return true - }) - } -} - -// highlightUnlabeledBreakFlow highlights the innermost enclosing for/range/switch or swlect -func highlightUnlabeledBreakFlow(path []ast.Node, info *types.Info, result map[posRange]struct{}) { - // Reverse walk the path until we find closest loop, select, or switch. - for _, n := range path { - switch n.(type) { - case *ast.ForStmt, *ast.RangeStmt: - highlightLoopControlFlow(path, info, result) - return // only highlight the innermost statement - case *ast.SwitchStmt: - highlightSwitchFlow(path, info, result) - return - case *ast.SelectStmt: - // TODO: add highlight when breaking a select. - return - } - } -} - -// highlightLabeledFlow highlights the enclosing labeled for, range, -// or switch statement denoted by a labeled break or continue stmt. -func highlightLabeledFlow(path []ast.Node, info *types.Info, stmt *ast.BranchStmt, result map[posRange]struct{}) { - use := info.Uses[stmt.Label] - if use == nil { - return - } - for _, n := range path { - if label, ok := n.(*ast.LabeledStmt); ok && info.Defs[label.Label] == use { - switch label.Stmt.(type) { - case *ast.ForStmt, *ast.RangeStmt: - highlightLoopControlFlow([]ast.Node{label.Stmt, label}, info, result) - case *ast.SwitchStmt: - highlightSwitchFlow([]ast.Node{label.Stmt, label}, info, result) - } - return - } - } -} - -func labelFor(path []ast.Node) *ast.Ident { - if len(path) > 1 { - if n, ok := path[1].(*ast.LabeledStmt); ok { - return n.Label - } - } - return nil -} - -func highlightLoopControlFlow(path []ast.Node, info *types.Info, result map[posRange]struct{}) { - var loop ast.Node - var loopLabel *ast.Ident - stmtLabel := labelFor(path) -Outer: - // Reverse walk the path till we get to the for loop. - for i := range path { - switch n := path[i].(type) { - case *ast.ForStmt, *ast.RangeStmt: - loopLabel = labelFor(path[i:]) - - if stmtLabel == nil || loopLabel == stmtLabel { - loop = n - break Outer - } - } - } - if loop == nil { - return - } - - // Add the for statement. - rng := posRange{ - start: loop.Pos(), - end: loop.Pos() + token.Pos(len("for")), - } - result[rng] = struct{}{} - - // Traverse AST to find branch statements within the same for-loop. - ast.Inspect(loop, func(n ast.Node) bool { - switch n.(type) { - case *ast.ForStmt, *ast.RangeStmt: - return loop == n - case *ast.SwitchStmt, *ast.SelectStmt: - return false - } - b, ok := n.(*ast.BranchStmt) - if !ok { - return true - } - if b.Label == nil || info.Uses[b.Label] == info.Defs[loopLabel] { - result[posRange{start: b.Pos(), end: b.End()}] = struct{}{} - } - return true - }) - - // Find continue statements in the same loop or switches/selects. - ast.Inspect(loop, func(n ast.Node) bool { - switch n.(type) { - case *ast.ForStmt, *ast.RangeStmt: - return loop == n - } - - if n, ok := n.(*ast.BranchStmt); ok && n.Tok == token.CONTINUE { - result[posRange{start: n.Pos(), end: n.End()}] = struct{}{} - } - return true - }) - - // We don't need to check other for loops if we aren't looking for labeled statements. - if loopLabel == nil { - return - } - - // Find labeled branch statements in any loop. - ast.Inspect(loop, func(n ast.Node) bool { - b, ok := n.(*ast.BranchStmt) - if !ok { - return true - } - // statement with labels that matches the loop - if b.Label != nil && info.Uses[b.Label] == info.Defs[loopLabel] { - result[posRange{start: b.Pos(), end: b.End()}] = struct{}{} - } - return true - }) -} - -func highlightSwitchFlow(path []ast.Node, info *types.Info, result map[posRange]struct{}) { - var switchNode ast.Node - var switchNodeLabel *ast.Ident - stmtLabel := labelFor(path) -Outer: - // Reverse walk the path till we get to the switch statement. - for i := range path { - switch n := path[i].(type) { - case *ast.SwitchStmt: - switchNodeLabel = labelFor(path[i:]) - if stmtLabel == nil || switchNodeLabel == stmtLabel { - switchNode = n - break Outer - } - } - } - // Cursor is not in a switch statement - if switchNode == nil { - return - } - - // Add the switch statement. - rng := posRange{ - start: switchNode.Pos(), - end: switchNode.Pos() + token.Pos(len("switch")), - } - result[rng] = struct{}{} - - // Traverse AST to find break statements within the same switch. - ast.Inspect(switchNode, func(n ast.Node) bool { - switch n.(type) { - case *ast.SwitchStmt: - return switchNode == n - case *ast.ForStmt, *ast.RangeStmt, *ast.SelectStmt: - return false - } - - b, ok := n.(*ast.BranchStmt) - if !ok || b.Tok != token.BREAK { - return true - } - - if b.Label == nil || info.Uses[b.Label] == info.Defs[switchNodeLabel] { - result[posRange{start: b.Pos(), end: b.End()}] = struct{}{} - } - return true - }) - - // We don't need to check other switches if we aren't looking for labeled statements. - if switchNodeLabel == nil { - return - } - - // Find labeled break statements in any switch - ast.Inspect(switchNode, func(n ast.Node) bool { - b, ok := n.(*ast.BranchStmt) - if !ok || b.Tok != token.BREAK { - return true - } - - if b.Label != nil && info.Uses[b.Label] == info.Defs[switchNodeLabel] { - result[posRange{start: b.Pos(), end: b.End()}] = struct{}{} - } - - return true - }) -} - -func highlightIdentifier(id *ast.Ident, file *ast.File, info *types.Info, result map[posRange]struct{}) { - highlight := func(n ast.Node) { - result[posRange{start: n.Pos(), end: n.End()}] = struct{}{} - } - - // obj may be nil if the Ident is undefined. - // In this case, the behavior expected by tests is - // to match other undefined Idents of the same name. - obj := info.ObjectOf(id) - - ast.Inspect(file, func(n ast.Node) bool { - switch n := n.(type) { - case *ast.Ident: - if n.Name == id.Name && info.ObjectOf(n) == obj { - highlight(n) - } - - case *ast.ImportSpec: - pkgname, ok := typesutil.ImportedPkgName(info, n) - if ok && pkgname == obj { - if n.Name != nil { - highlight(n.Name) - } else { - highlight(n) - } - } - } - return true - }) -} diff --git a/internal/golangorgx/gopls/golang/hover.go b/internal/golangorgx/gopls/golang/hover.go deleted file mode 100644 index a267e2ed365..00000000000 --- a/internal/golangorgx/gopls/golang/hover.go +++ /dev/null @@ -1,1274 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -import ( - "bytes" - "context" - "encoding/json" - "fmt" - "go/ast" - "go/constant" - "go/doc" - "go/format" - "go/token" - "go/types" - "io/fs" - "path/filepath" - "strconv" - "strings" - "text/tabwriter" - "time" - "unicode/utf8" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/cache/metadata" - "cuelang.org/go/internal/golangorgx/gopls/cache/parsego" - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/settings" - "cuelang.org/go/internal/golangorgx/gopls/util/bug" - "cuelang.org/go/internal/golangorgx/gopls/util/safetoken" - "cuelang.org/go/internal/golangorgx/gopls/util/slices" - "cuelang.org/go/internal/golangorgx/gopls/util/typesutil" - "cuelang.org/go/internal/golangorgx/tools/event" - "cuelang.org/go/internal/golangorgx/tools/tokeninternal" - "golang.org/x/text/unicode/runenames" - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/types/typeutil" -) - -// hoverJSON contains the structured result of a hover query. It is -// formatted in one of several formats as determined by the HoverKind -// setting, one of which is JSON. -// -// We believe this is used only by govim. -// TODO(adonovan): see if we can wean all clients of this interface. -type hoverJSON struct { - // Synopsis is a single sentence synopsis of the symbol's documentation. - Synopsis string `json:"synopsis"` - - // FullDocumentation is the symbol's full documentation. - FullDocumentation string `json:"fullDocumentation"` - - // Signature is the symbol's signature. - Signature string `json:"signature"` - - // SingleLine is a single line describing the symbol. - // This is recommended only for use in clients that show a single line for hover. - SingleLine string `json:"singleLine"` - - // SymbolName is the human-readable name to use for the symbol in links. - SymbolName string `json:"symbolName"` - - // LinkPath is the pkg.go.dev link for the given symbol. - // For example, the "go/ast" part of "pkg.go.dev/go/ast#Node". - LinkPath string `json:"linkPath"` - - // LinkAnchor is the pkg.go.dev link anchor for the given symbol. - // For example, the "Node" part of "pkg.go.dev/go/ast#Node". - LinkAnchor string `json:"linkAnchor"` - - // New fields go below, and are unexported. The existing - // exported fields are underspecified and have already - // constrained our movements too much. A detailed JSON - // interface might be nice, but it needs a design and a - // precise specification. - - // typeDecl is the declaration syntax for a type, - // or "" for a non-type. - typeDecl string - - // methods is the list of descriptions of methods of a type, - // omitting any that are obvious from typeDecl. - // It is "" for a non-type. - methods string - - // promotedFields is the list of descriptions of accessible - // fields of a (struct) type that were promoted through an - // embedded field. - promotedFields string -} - -// Hover implements the "textDocument/hover" RPC for Go files. -func Hover(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) (*protocol.Hover, error) { - ctx, done := event.Start(ctx, "golang.Hover") - defer done() - - rng, h, err := hover(ctx, snapshot, fh, position) - if err != nil { - return nil, err - } - if h == nil { - return nil, nil - } - hover, err := formatHover(h, snapshot.Options()) - if err != nil { - return nil, err - } - return &protocol.Hover{ - Contents: protocol.MarkupContent{ - Kind: snapshot.Options().PreferredContentFormat, - Value: hover, - }, - Range: rng, - }, nil -} - -// hover computes hover information at the given position. If we do not support -// hovering at the position, it returns _, nil, nil: an error is only returned -// if the position is valid but we fail to compute hover information. -func hover(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp protocol.Position) (protocol.Range, *hoverJSON, error) { - pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) - if err != nil { - return protocol.Range{}, nil, err - } - pos, err := pgf.PositionPos(pp) - if err != nil { - return protocol.Range{}, nil, err - } - - // Handle hovering over import paths, which do not have an associated - // identifier. - for _, spec := range pgf.File.Imports { - // We are inclusive of the end point here to allow hovering when the cursor - // is just after the import path. - if spec.Path.Pos() <= pos && pos <= spec.Path.End() { - return hoverImport(ctx, snapshot, pkg, pgf, spec) - } - } - - // Handle hovering over the package name, which does not have an associated - // object. - // As with import paths, we allow hovering just after the package name. - if pgf.File.Name != nil && pgf.File.Name.Pos() <= pos && pos <= pgf.File.Name.Pos() { - return hoverPackageName(pkg, pgf) - } - - // Handle hovering over (non-import-path) literals. - if path, _ := astutil.PathEnclosingInterval(pgf.File, pos, pos); len(path) > 0 { - if lit, _ := path[0].(*ast.BasicLit); lit != nil { - return hoverLit(pgf, lit, pos) - } - } - - // Handle hovering over embed directive argument. - pattern, embedRng := parseEmbedDirective(pgf.Mapper, pp) - if pattern != "" { - return hoverEmbed(fh, embedRng, pattern) - } - - // Handle linkname directive by overriding what to look for. - var linkedRange *protocol.Range // range referenced by linkname directive, or nil - if pkgPath, name, offset := parseLinkname(pgf.Mapper, pp); pkgPath != "" && name != "" { - // rng covering 2nd linkname argument: pkgPath.name. - rng, err := pgf.PosRange(pgf.Tok.Pos(offset), pgf.Tok.Pos(offset+len(pkgPath)+len(".")+len(name))) - if err != nil { - return protocol.Range{}, nil, fmt.Errorf("range over linkname arg: %w", err) - } - linkedRange = &rng - - pkg, pgf, pos, err = findLinkname(ctx, snapshot, PackagePath(pkgPath), name) - if err != nil { - return protocol.Range{}, nil, fmt.Errorf("find linkname: %w", err) - } - } - - // The general case: compute hover information for the object referenced by - // the identifier at pos. - ident, obj, selectedType := referencedObject(pkg, pgf, pos) - if obj == nil || ident == nil { - return protocol.Range{}, nil, nil // no object to hover - } - - // Unless otherwise specified, rng covers the ident being hovered. - var rng protocol.Range - if linkedRange != nil { - rng = *linkedRange - } else { - rng, err = pgf.NodeRange(ident) - if err != nil { - return protocol.Range{}, nil, err - } - } - - // By convention, we qualify hover information relative to the package - // from which the request originated. - qf := typesutil.FileQualifier(pgf.File, pkg.GetTypes(), pkg.GetTypesInfo()) - - // Handle type switch identifiers as a special case, since they don't have an - // object. - // - // There's not much useful information to provide. - if selectedType != nil { - fakeObj := types.NewVar(obj.Pos(), obj.Pkg(), obj.Name(), selectedType) - signature := types.ObjectString(fakeObj, qf) - return rng, &hoverJSON{ - Signature: signature, - SingleLine: signature, - SymbolName: fakeObj.Name(), - }, nil - } - - // Handle builtins, which don't have a package or position. - if !obj.Pos().IsValid() { - h, err := hoverBuiltin(ctx, snapshot, obj) - return rng, h, err - } - - // For all other objects, consider the full syntax of their declaration in - // order to correctly compute their documentation, signature, and link. - // - // Beware: decl{PGF,Pos} are not necessarily associated with pkg.FileSet(). - declPGF, declPos, err := parseFull(ctx, snapshot, pkg.FileSet(), obj.Pos()) - if err != nil { - return protocol.Range{}, nil, fmt.Errorf("re-parsing declaration of %s: %v", obj.Name(), err) - } - decl, spec, field := findDeclInfo([]*ast.File{declPGF.File}, declPos) // may be nil^3 - comment := chooseDocComment(decl, spec, field) - docText := comment.Text() - - // By default, types.ObjectString provides a reasonable signature. - signature := objectString(obj, qf, declPos, declPGF.Tok, spec) - singleLineSignature := signature - - // TODO(rfindley): we could do much better for inferred signatures. - if inferred := inferredSignature(pkg.GetTypesInfo(), ident); inferred != nil { - if s := inferredSignatureString(obj, qf, inferred); s != "" { - signature = s - } - } - - var typeDecl, methods, fields string - - // For "objects defined by a type spec", the signature produced by - // objectString is insufficient: - // (1) large structs are formatted poorly, with no newlines - // (2) we lose inline comments - // Furthermore, we include a summary of their method set. - _, isTypeName := obj.(*types.TypeName) - _, isTypeParam := obj.Type().(*types.TypeParam) - if isTypeName && !isTypeParam { - spec, ok := spec.(*ast.TypeSpec) - if !ok { - // We cannot find a TypeSpec for this type or alias declaration - // (that is not a type parameter or a built-in). - // This should be impossible even for ill-formed trees; - // we suspect that AST repair may be creating inconsistent - // positions. Don't report a bug in that case. (#64241) - errorf := fmt.Errorf - if !declPGF.Fixed() { - errorf = bug.Errorf - } - return protocol.Range{}, nil, errorf("type name %q without type spec", obj.Name()) - } - - // Format the type's declaration syntax. - { - // Don't duplicate comments. - spec2 := *spec - spec2.Doc = nil - spec2.Comment = nil - - var b strings.Builder - b.WriteString("type ") - fset := tokeninternal.FileSetFor(declPGF.Tok) - // TODO(adonovan): use a smarter formatter that omits - // inaccessible fields (non-exported ones from other packages). - if err := format.Node(&b, fset, &spec2); err != nil { - return protocol.Range{}, nil, err - } - typeDecl = b.String() - } - - // Promoted fields - // - // Show a table of accessible fields of the (struct) - // type that may not be visible in the syntax (above) - // due to promotion through embedded fields. - // - // Example: - // - // // Embedded fields: - // foo int // through x.y - // z string // through x.y - if prom := promotedFields(obj.Type(), pkg.GetTypes()); len(prom) > 0 { - var b strings.Builder - b.WriteString("// Embedded fields:\n") - w := tabwriter.NewWriter(&b, 0, 8, 1, ' ', 0) - for _, f := range prom { - fmt.Fprintf(w, "%s\t%s\t// through %s\t\n", - f.field.Name(), - types.TypeString(f.field.Type(), qf), - f.path) - } - w.Flush() - b.WriteByte('\n') - fields = b.String() - } - - // -- methods -- - - // For an interface type, explicit methods will have - // already been displayed when the node was formatted - // above. Don't list these again. - var skip map[string]bool - if iface, ok := spec.Type.(*ast.InterfaceType); ok { - if iface.Methods.List != nil { - for _, m := range iface.Methods.List { - if len(m.Names) == 1 { - if skip == nil { - skip = make(map[string]bool) - } - skip[m.Names[0].Name] = true - } - } - } - } - - // Display all the type's accessible methods, - // including those that require a pointer receiver, - // and those promoted from embedded struct fields or - // embedded interfaces. - var b strings.Builder - for _, m := range typeutil.IntuitiveMethodSet(obj.Type(), nil) { - if !accessibleTo(m.Obj(), pkg.GetTypes()) { - continue // inaccessible - } - if skip[m.Obj().Name()] { - continue // redundant with format.Node above - } - if b.Len() > 0 { - b.WriteByte('\n') - } - - // Use objectString for its prettier rendering of method receivers. - b.WriteString(objectString(m.Obj(), qf, token.NoPos, nil, nil)) - } - methods = b.String() - - signature = typeDecl + "\n" + methods - } - - // Compute link data (on pkg.go.dev or other documentation host). - // - // If linkPath is empty, the symbol is not linkable. - var ( - linkName string // => link title, always non-empty - linkPath string // => link path - anchor string // link anchor - linkMeta *metadata.Package // metadata for the linked package - ) - { - linkMeta = findFileInDeps(snapshot, pkg.Metadata(), declPGF.URI) - if linkMeta == nil { - return protocol.Range{}, nil, bug.Errorf("no package data for %s", declPGF.URI) - } - - // For package names, we simply link to their imported package. - if pkgName, ok := obj.(*types.PkgName); ok { - linkName = pkgName.Name() - linkPath = pkgName.Imported().Path() - impID := linkMeta.DepsByPkgPath[PackagePath(pkgName.Imported().Path())] - linkMeta = snapshot.Metadata(impID) - if linkMeta == nil { - // Broken imports have fake package paths, so it is not a bug if we - // don't have metadata. As of writing, there is no way to distinguish - // broken imports from a true bug where expected metadata is missing. - return protocol.Range{}, nil, fmt.Errorf("no package data for %s", declPGF.URI) - } - } else { - // For all others, check whether the object is in the package scope, or - // an exported field or method of an object in the package scope. - // - // We try to match pkgsite's heuristics for what is linkable, and what is - // not. - var recv types.Object - switch obj := obj.(type) { - case *types.Func: - sig := obj.Type().(*types.Signature) - if sig.Recv() != nil { - tname := typeToObject(sig.Recv().Type()) - if tname != nil { // beware typed nil - recv = tname - } - } - case *types.Var: - if obj.IsField() { - if spec, ok := spec.(*ast.TypeSpec); ok { - typeName := spec.Name - scopeObj, _ := obj.Pkg().Scope().Lookup(typeName.Name).(*types.TypeName) - if scopeObj != nil { - if st, _ := scopeObj.Type().Underlying().(*types.Struct); st != nil { - for i := 0; i < st.NumFields(); i++ { - if obj == st.Field(i) { - recv = scopeObj - } - } - } - } - } - } - } - - // Even if the object is not available in package documentation, it may - // be embedded in a documented receiver. Detect this by searching - // enclosing selector expressions. - // - // TODO(rfindley): pkgsite doesn't document fields from embedding, just - // methods. - if recv == nil || !recv.Exported() { - path := pathEnclosingObjNode(pgf.File, pos) - if enclosing := searchForEnclosing(pkg.GetTypesInfo(), path); enclosing != nil { - recv = enclosing - } else { - recv = nil // note: just recv = ... could result in a typed nil. - } - } - - pkg := obj.Pkg() - if recv != nil { - linkName = fmt.Sprintf("(%s.%s).%s", pkg.Name(), recv.Name(), obj.Name()) - if obj.Exported() && recv.Exported() && pkg.Scope().Lookup(recv.Name()) == recv { - linkPath = pkg.Path() - anchor = fmt.Sprintf("%s.%s", recv.Name(), obj.Name()) - } - } else { - linkName = fmt.Sprintf("%s.%s", pkg.Name(), obj.Name()) - if obj.Exported() && pkg.Scope().Lookup(obj.Name()) == obj { - linkPath = pkg.Path() - anchor = obj.Name() - } - } - } - } - - if snapshot.IsGoPrivatePath(linkPath) || linkMeta.ForTest != "" { - linkPath = "" - } else if linkMeta.Module != nil && linkMeta.Module.Version != "" { - mod := linkMeta.Module - linkPath = strings.Replace(linkPath, mod.Path, mod.Path+"@"+mod.Version, 1) - } - - return rng, &hoverJSON{ - Synopsis: doc.Synopsis(docText), - FullDocumentation: docText, - SingleLine: singleLineSignature, - SymbolName: linkName, - Signature: signature, - LinkPath: linkPath, - LinkAnchor: anchor, - typeDecl: typeDecl, - methods: methods, - promotedFields: fields, - }, nil -} - -// hoverBuiltin computes hover information when hovering over a builtin -// identifier. -func hoverBuiltin(ctx context.Context, snapshot *cache.Snapshot, obj types.Object) (*hoverJSON, error) { - // Special handling for error.Error, which is the only builtin method. - // - // TODO(rfindley): can this be unified with the handling below? - if obj.Name() == "Error" { - signature := obj.String() - return &hoverJSON{ - Signature: signature, - SingleLine: signature, - // TODO(rfindley): these are better than the current behavior. - // SymbolName: "(error).Error", - // LinkPath: "builtin", - // LinkAnchor: "error.Error", - }, nil - } - - pgf, node, err := builtinDecl(ctx, snapshot, obj) - if err != nil { - return nil, err - } - - var comment *ast.CommentGroup - path, _ := astutil.PathEnclosingInterval(pgf.File, node.Pos(), node.End()) - for _, n := range path { - switch n := n.(type) { - case *ast.GenDecl: - // Separate documentation and signature. - comment = n.Doc - node2 := *n - node2.Doc = nil - node = &node2 - case *ast.FuncDecl: - // Ditto. - comment = n.Doc - node2 := *n - node2.Doc = nil - node = &node2 - } - } - - signature := FormatNodeFile(pgf.Tok, node) - // Replace fake types with their common equivalent. - // TODO(rfindley): we should instead use obj.Type(), which would have the - // *actual* types of the builtin call. - signature = replacer.Replace(signature) - - docText := comment.Text() - return &hoverJSON{ - Synopsis: doc.Synopsis(docText), - FullDocumentation: docText, - Signature: signature, - SingleLine: obj.String(), - SymbolName: obj.Name(), - LinkPath: "builtin", - LinkAnchor: obj.Name(), - }, nil -} - -// hoverImport computes hover information when hovering over the import path of -// imp in the file pgf of pkg. -// -// If we do not have metadata for the hovered import, it returns _ -func hoverImport(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, pgf *ParsedGoFile, imp *ast.ImportSpec) (protocol.Range, *hoverJSON, error) { - rng, err := pgf.NodeRange(imp.Path) - if err != nil { - return protocol.Range{}, nil, err - } - - importPath := metadata.UnquoteImportPath(imp) - if importPath == "" { - return protocol.Range{}, nil, fmt.Errorf("invalid import path") - } - impID := pkg.Metadata().DepsByImpPath[importPath] - if impID == "" { - return protocol.Range{}, nil, fmt.Errorf("no package data for import %q", importPath) - } - impMetadata := snapshot.Metadata(impID) - if impMetadata == nil { - return protocol.Range{}, nil, bug.Errorf("failed to resolve import ID %q", impID) - } - - // Find the first file with a package doc comment. - var comment *ast.CommentGroup - for _, f := range impMetadata.CompiledGoFiles { - fh, err := snapshot.ReadFile(ctx, f) - if err != nil { - if ctx.Err() != nil { - return protocol.Range{}, nil, ctx.Err() - } - continue - } - pgf, err := snapshot.ParseGo(ctx, fh, ParseHeader) - if err != nil { - if ctx.Err() != nil { - return protocol.Range{}, nil, ctx.Err() - } - continue - } - if pgf.File.Doc != nil { - comment = pgf.File.Doc - break - } - } - - docText := comment.Text() - return rng, &hoverJSON{ - Synopsis: doc.Synopsis(docText), - FullDocumentation: docText, - }, nil -} - -// hoverPackageName computes hover information for the package name of the file -// pgf in pkg. -func hoverPackageName(pkg *cache.Package, pgf *ParsedGoFile) (protocol.Range, *hoverJSON, error) { - var comment *ast.CommentGroup - for _, pgf := range pkg.CompiledGoFiles() { - if pgf.File.Doc != nil { - comment = pgf.File.Doc - break - } - } - rng, err := pgf.NodeRange(pgf.File.Name) - if err != nil { - return protocol.Range{}, nil, err - } - docText := comment.Text() - return rng, &hoverJSON{ - Synopsis: doc.Synopsis(docText), - FullDocumentation: docText, - // Note: including a signature is redundant, since the cursor is already on the - // package name. - }, nil -} - -// hoverLit computes hover information when hovering over the basic literal lit -// in the file pgf. The provided pos must be the exact position of the cursor, -// as it is used to extract the hovered rune in strings. -// -// For example, hovering over "\u2211" in "foo \u2211 bar" yields: -// -// '∑', U+2211, N-ARY SUMMATION -func hoverLit(pgf *ParsedGoFile, lit *ast.BasicLit, pos token.Pos) (protocol.Range, *hoverJSON, error) { - var ( - value string // if non-empty, a constant value to format in hover - r rune // if non-zero, format a description of this rune in hover - start, end token.Pos // hover span - ) - // Extract a rune from the current position. - // 'Ω', "...Ω...", or 0x03A9 => 'Ω', U+03A9, GREEK CAPITAL LETTER OMEGA - switch lit.Kind { - case token.CHAR: - s, err := strconv.Unquote(lit.Value) - if err != nil { - // If the conversion fails, it's because of an invalid syntax, therefore - // there is no rune to be found. - return protocol.Range{}, nil, nil - } - r, _ = utf8.DecodeRuneInString(s) - if r == utf8.RuneError { - return protocol.Range{}, nil, fmt.Errorf("rune error") - } - start, end = lit.Pos(), lit.End() - - case token.INT: - // Short literals (e.g. 99 decimal, 07 octal) are uninteresting. - if len(lit.Value) < 3 { - return protocol.Range{}, nil, nil - } - - v := constant.MakeFromLiteral(lit.Value, lit.Kind, 0) - if v.Kind() != constant.Int { - return protocol.Range{}, nil, nil - } - - switch lit.Value[:2] { - case "0x", "0X": - // As a special case, try to recognize hexadecimal literals as runes if - // they are within the range of valid unicode values. - if v, ok := constant.Int64Val(v); ok && v > 0 && v <= utf8.MaxRune && utf8.ValidRune(rune(v)) { - r = rune(v) - } - fallthrough - case "0o", "0O", "0b", "0B": - // Format the decimal value of non-decimal literals. - value = v.ExactString() - start, end = lit.Pos(), lit.End() - default: - return protocol.Range{}, nil, nil - } - - case token.STRING: - // It's a string, scan only if it contains a unicode escape sequence under or before the - // current cursor position. - litOffset, err := safetoken.Offset(pgf.Tok, lit.Pos()) - if err != nil { - return protocol.Range{}, nil, err - } - offset, err := safetoken.Offset(pgf.Tok, pos) - if err != nil { - return protocol.Range{}, nil, err - } - for i := offset - litOffset; i > 0; i-- { - // Start at the cursor position and search backward for the beginning of a rune escape sequence. - rr, _ := utf8.DecodeRuneInString(lit.Value[i:]) - if rr == utf8.RuneError { - return protocol.Range{}, nil, fmt.Errorf("rune error") - } - if rr == '\\' { - // Got the beginning, decode it. - var tail string - r, _, tail, err = strconv.UnquoteChar(lit.Value[i:], '"') - if err != nil { - // If the conversion fails, it's because of an invalid syntax, - // therefore is no rune to be found. - return protocol.Range{}, nil, nil - } - // Only the rune escape sequence part of the string has to be highlighted, recompute the range. - runeLen := len(lit.Value) - (i + len(tail)) - start = token.Pos(int(lit.Pos()) + i) - end = token.Pos(int(start) + runeLen) - break - } - } - } - - if value == "" && r == 0 { // nothing to format - return protocol.Range{}, nil, nil - } - - rng, err := pgf.PosRange(start, end) - if err != nil { - return protocol.Range{}, nil, err - } - - var b strings.Builder - if value != "" { - b.WriteString(value) - } - if r != 0 { - runeName := runenames.Name(r) - if len(runeName) > 0 && runeName[0] == '<' { - // Check if the rune looks like an HTML tag. If so, trim the surrounding <> - // characters to work around https://github.com/microsoft/vscode/issues/124042. - runeName = strings.TrimRight(runeName[1:], ">") - } - if b.Len() > 0 { - b.WriteString(", ") - } - if strconv.IsPrint(r) { - fmt.Fprintf(&b, "'%c', ", r) - } - fmt.Fprintf(&b, "U+%04X, %s", r, runeName) - } - hover := b.String() - return rng, &hoverJSON{ - Synopsis: hover, - FullDocumentation: hover, - }, nil -} - -// hoverEmbed computes hover information for a filepath.Match pattern. -// Assumes that the pattern is relative to the location of fh. -func hoverEmbed(fh file.Handle, rng protocol.Range, pattern string) (protocol.Range, *hoverJSON, error) { - s := &strings.Builder{} - - dir := filepath.Dir(fh.URI().Path()) - var matches []string - err := filepath.WalkDir(dir, func(abs string, d fs.DirEntry, e error) error { - if e != nil { - return e - } - rel, err := filepath.Rel(dir, abs) - if err != nil { - return err - } - ok, err := filepath.Match(pattern, rel) - if err != nil { - return err - } - if ok && !d.IsDir() { - matches = append(matches, rel) - } - return nil - }) - if err != nil { - return protocol.Range{}, nil, err - } - - for _, m := range matches { - // TODO: Renders each file as separate markdown paragraphs. - // If forcing (a single) newline is possible it might be more clear. - fmt.Fprintf(s, "%s\n\n", m) - } - - json := &hoverJSON{ - Signature: fmt.Sprintf("Embedding %q", pattern), - Synopsis: s.String(), - FullDocumentation: s.String(), - } - return rng, json, nil -} - -// inferredSignatureString is a wrapper around the types.ObjectString function -// that adds more information to inferred signatures. It will return an empty string -// if the passed types.Object is not a signature. -func inferredSignatureString(obj types.Object, qf types.Qualifier, inferred *types.Signature) string { - // If the signature type was inferred, prefer the inferred signature with a - // comment showing the generic signature. - if sig, _ := obj.Type().(*types.Signature); sig != nil && sig.TypeParams().Len() > 0 && inferred != nil { - obj2 := types.NewFunc(obj.Pos(), obj.Pkg(), obj.Name(), inferred) - str := types.ObjectString(obj2, qf) - // Try to avoid overly long lines. - if len(str) > 60 { - str += "\n" - } else { - str += " " - } - str += "// " + types.TypeString(sig, qf) - return str - } - return "" -} - -// objectString is a wrapper around the types.ObjectString function. -// It handles adding more information to the object string. -// If spec is non-nil, it may be used to format additional declaration -// syntax, and file must be the token.File describing its positions. -// -// Precondition: obj is not a built-in function or method. -func objectString(obj types.Object, qf types.Qualifier, declPos token.Pos, file *token.File, spec ast.Spec) string { - str := types.ObjectString(obj, qf) - - switch obj := obj.(type) { - case *types.Func: - // We fork ObjectString to improve its rendering of methods: - // specifically, we show the receiver name, - // and replace the period in (T).f by a space (#62190). - - sig := obj.Type().(*types.Signature) - - var buf bytes.Buffer - buf.WriteString("func ") - if recv := sig.Recv(); recv != nil { - buf.WriteByte('(') - if _, ok := recv.Type().(*types.Interface); ok { - // gcimporter creates abstract methods of - // named interfaces using the interface type - // (not the named type) as the receiver. - // Don't print it in full. - buf.WriteString("interface") - } else { - // Show receiver name (go/types does not). - name := recv.Name() - if name != "" && name != "_" { - buf.WriteString(name) - buf.WriteString(" ") - } - types.WriteType(&buf, recv.Type(), qf) - } - buf.WriteByte(')') - buf.WriteByte(' ') // space (go/types uses a period) - } else if s := qf(obj.Pkg()); s != "" { - buf.WriteString(s) - buf.WriteString(".") - } - buf.WriteString(obj.Name()) - types.WriteSignature(&buf, sig, qf) - str = buf.String() - - case *types.Const: - // Show value of a constant. - var ( - declaration = obj.Val().String() // default formatted declaration - comment = "" // if non-empty, a clarifying comment - ) - - // Try to use the original declaration. - switch obj.Val().Kind() { - case constant.String: - // Usually the original declaration of a string doesn't carry much information. - // Also strings can be very long. So, just use the constant's value. - - default: - if spec, _ := spec.(*ast.ValueSpec); spec != nil { - for i, name := range spec.Names { - if declPos == name.Pos() { - if i < len(spec.Values) { - originalDeclaration := FormatNodeFile(file, spec.Values[i]) - if originalDeclaration != declaration { - comment = declaration - declaration = originalDeclaration - } - } - break - } - } - } - } - - // Special formatting cases. - switch typ := obj.Type().(type) { - case *types.Named: - // Try to add a formatted duration as an inline comment. - pkg := typ.Obj().Pkg() - if pkg.Path() == "time" && typ.Obj().Name() == "Duration" { - if d, ok := constant.Int64Val(obj.Val()); ok { - comment = time.Duration(d).String() - } - } - } - if comment == declaration { - comment = "" - } - - str += " = " + declaration - if comment != "" { - str += " // " + comment - } - } - return str -} - -// HoverDocForObject returns the best doc comment for obj (for which -// fset provides file/line information). -// -// TODO(rfindley): there appears to be zero(!) tests for this functionality. -func HoverDocForObject(ctx context.Context, snapshot *cache.Snapshot, fset *token.FileSet, obj types.Object) (*ast.CommentGroup, error) { - if _, isTypeName := obj.(*types.TypeName); isTypeName { - if _, isTypeParam := obj.Type().(*types.TypeParam); isTypeParam { - return nil, nil - } - } - - pgf, pos, err := parseFull(ctx, snapshot, fset, obj.Pos()) - if err != nil { - return nil, fmt.Errorf("re-parsing: %v", err) - } - - decl, spec, field := findDeclInfo([]*ast.File{pgf.File}, pos) - return chooseDocComment(decl, spec, field), nil -} - -func chooseDocComment(decl ast.Decl, spec ast.Spec, field *ast.Field) *ast.CommentGroup { - if field != nil { - if field.Doc != nil { - return field.Doc - } - if field.Comment != nil { - return field.Comment - } - return nil - } - switch decl := decl.(type) { - case *ast.FuncDecl: - return decl.Doc - case *ast.GenDecl: - switch spec := spec.(type) { - case *ast.ValueSpec: - if spec.Doc != nil { - return spec.Doc - } - if decl.Doc != nil { - return decl.Doc - } - return spec.Comment - case *ast.TypeSpec: - if spec.Doc != nil { - return spec.Doc - } - if decl.Doc != nil { - return decl.Doc - } - return spec.Comment - } - } - return nil -} - -// parseFull fully parses the file corresponding to position pos (for -// which fset provides file/line information). -// -// It returns the resulting parsego.File as well as new pos contained -// in the parsed file. -// -// BEWARE: the provided FileSet is used only to interpret the provided -// pos; the resulting File and Pos may belong to the same or a -// different FileSet, such as one synthesized by the parser cache, if -// parse-caching is enabled. -func parseFull(ctx context.Context, snapshot *cache.Snapshot, fset *token.FileSet, pos token.Pos) (*parsego.File, token.Pos, error) { - f := fset.File(pos) - if f == nil { - return nil, 0, bug.Errorf("internal error: no file for position %d", pos) - } - - uri := protocol.URIFromPath(f.Name()) - fh, err := snapshot.ReadFile(ctx, uri) - if err != nil { - return nil, 0, err - } - - pgf, err := snapshot.ParseGo(ctx, fh, ParseFull) - if err != nil { - return nil, 0, err - } - - offset, err := safetoken.Offset(f, pos) - if err != nil { - return nil, 0, bug.Errorf("offset out of bounds in %q", uri) - } - - fullPos, err := safetoken.Pos(pgf.Tok, offset) - if err != nil { - return nil, 0, err - } - - return pgf, fullPos, nil -} - -func formatHover(h *hoverJSON, options *settings.Options) (string, error) { - maybeMarkdown := func(s string) string { - if s != "" && options.PreferredContentFormat == protocol.Markdown { - s = fmt.Sprintf("```go\n%s\n```", strings.Trim(s, "\n")) - } - return s - } - - switch options.HoverKind { - case settings.SingleLine: - return h.SingleLine, nil - - case settings.NoDocumentation: - return maybeMarkdown(h.Signature), nil - - case settings.Structured: - b, err := json.Marshal(h) - if err != nil { - return "", err - } - return string(b), nil - - case settings.SynopsisDocumentation, - settings.FullDocumentation: - // For types, we display TypeDecl and Methods, - // but not Signature, which is redundant (= TypeDecl + "\n" + Methods). - // For all other symbols, we display Signature; - // TypeDecl and Methods are empty. - // (This awkwardness is to preserve JSON compatibility.) - parts := []string{ - maybeMarkdown(h.Signature), - maybeMarkdown(h.typeDecl), - formatDoc(h, options), - maybeMarkdown(h.promotedFields), - maybeMarkdown(h.methods), - formatLink(h, options), - } - if h.typeDecl != "" { - parts[0] = "" // type: suppress redundant Signature - } - parts = slices.Remove(parts, "") - - var b strings.Builder - for i, part := range parts { - if i > 0 { - if options.PreferredContentFormat == protocol.Markdown { - b.WriteString("\n\n") - } else { - b.WriteByte('\n') - } - } - b.WriteString(part) - } - return b.String(), nil - - default: - return "", fmt.Errorf("invalid HoverKind: %v", options.HoverKind) - } -} - -func formatLink(h *hoverJSON, options *settings.Options) string { - if !options.LinksInHover || options.LinkTarget == "" || h.LinkPath == "" { - return "" - } - plainLink := cache.BuildLink(options.LinkTarget, h.LinkPath, h.LinkAnchor) - switch options.PreferredContentFormat { - case protocol.Markdown: - return fmt.Sprintf("[`%s` on %s](%s)", h.SymbolName, options.LinkTarget, plainLink) - case protocol.PlainText: - return "" - default: - return plainLink - } -} - -func formatDoc(h *hoverJSON, options *settings.Options) string { - var doc string - switch options.HoverKind { - case settings.SynopsisDocumentation: - doc = h.Synopsis - case settings.FullDocumentation: - doc = h.FullDocumentation - } - if options.PreferredContentFormat == protocol.Markdown { - return CommentToMarkdown(doc, options) - } - return doc -} - -// findDeclInfo returns the syntax nodes involved in the declaration of the -// types.Object with position pos, searching the given list of file syntax -// trees. -// -// Pos may be the position of the name-defining identifier in a FuncDecl, -// ValueSpec, TypeSpec, Field, or as a special case the position of -// Ellipsis.Elt in an ellipsis field. -// -// If found, the resulting decl, spec, and field will be the inner-most -// instance of each node type surrounding pos. -// -// If field is non-nil, pos is the position of a field Var. If field is nil and -// spec is non-nil, pos is the position of a Var, Const, or TypeName object. If -// both field and spec are nil and decl is non-nil, pos is the position of a -// Func object. -// -// It returns a nil decl if no object-defining node is found at pos. -// -// TODO(rfindley): this function has tricky semantics, and may be worth unit -// testing and/or refactoring. -func findDeclInfo(files []*ast.File, pos token.Pos) (decl ast.Decl, spec ast.Spec, field *ast.Field) { - // panic(found{}) breaks off the traversal and - // causes the function to return normally. - type found struct{} - defer func() { - switch x := recover().(type) { - case nil: - case found: - default: - panic(x) - } - }() - - // Visit the files in search of the node at pos. - stack := make([]ast.Node, 0, 20) - // Allocate the closure once, outside the loop. - f := func(n ast.Node) bool { - if n != nil { - stack = append(stack, n) // push - } else { - stack = stack[:len(stack)-1] // pop - return false - } - - // Skip subtrees (incl. files) that don't contain the search point. - if !(n.Pos() <= pos && pos < n.End()) { - return false - } - - switch n := n.(type) { - case *ast.Field: - findEnclosingDeclAndSpec := func() { - for i := len(stack) - 1; i >= 0; i-- { - switch n := stack[i].(type) { - case ast.Spec: - spec = n - case ast.Decl: - decl = n - return - } - } - } - - // Check each field name since you can have - // multiple names for the same type expression. - for _, id := range n.Names { - if id.Pos() == pos { - field = n - findEnclosingDeclAndSpec() - panic(found{}) - } - } - - // Check *ast.Field itself. This handles embedded - // fields which have no associated *ast.Ident name. - if n.Pos() == pos { - field = n - findEnclosingDeclAndSpec() - panic(found{}) - } - - // Also check "X" in "...X". This makes it easy to format variadic - // signature params properly. - // - // TODO(rfindley): I don't understand this comment. How does finding the - // field in this case make it easier to format variadic signature params? - if ell, ok := n.Type.(*ast.Ellipsis); ok && ell.Elt != nil && ell.Elt.Pos() == pos { - field = n - findEnclosingDeclAndSpec() - panic(found{}) - } - - case *ast.FuncDecl: - if n.Name.Pos() == pos { - decl = n - panic(found{}) - } - - case *ast.GenDecl: - for _, s := range n.Specs { - switch s := s.(type) { - case *ast.TypeSpec: - if s.Name.Pos() == pos { - decl = n - spec = s - panic(found{}) - } - case *ast.ValueSpec: - for _, id := range s.Names { - if id.Pos() == pos { - decl = n - spec = s - panic(found{}) - } - } - } - } - } - return true - } - for _, file := range files { - ast.Inspect(file, f) - } - - return nil, nil, nil -} - -type promotedField struct { - path string // path (e.g. "x.y" through embedded fields) - field *types.Var -} - -// promotedFields returns the list of accessible promoted fields of a struct type t. -// (Logic plundered from x/tools/cmd/guru/describe.go.) -func promotedFields(t types.Type, from *types.Package) []promotedField { - wantField := func(f *types.Var) bool { - if !accessibleTo(f, from) { - return false - } - // Check that the field is not shadowed. - obj, _, _ := types.LookupFieldOrMethod(t, true, f.Pkg(), f.Name()) - return obj == f - } - - var fields []promotedField - var visit func(t types.Type, stack []*types.Named) - visit = func(t types.Type, stack []*types.Named) { - tStruct, ok := Deref(t).Underlying().(*types.Struct) - if !ok { - return - } - fieldloop: - for i := 0; i < tStruct.NumFields(); i++ { - f := tStruct.Field(i) - - // Handle recursion through anonymous fields. - if f.Anonymous() { - tf := f.Type() - if ptr, ok := tf.(*types.Pointer); ok { - tf = ptr.Elem() - } - if named, ok := tf.(*types.Named); ok { // (be defensive) - // If we've already visited this named type - // on this path, break the cycle. - for _, x := range stack { - if x.Origin() == named.Origin() { - continue fieldloop - } - } - visit(f.Type(), append(stack, named)) - } - } - - // Save accessible promoted fields. - if len(stack) > 0 && wantField(f) { - var path strings.Builder - for i, t := range stack { - if i > 0 { - path.WriteByte('.') - } - path.WriteString(t.Obj().Name()) - } - fields = append(fields, promotedField{ - path: path.String(), - field: f, - }) - } - } - } - visit(t, nil) - - return fields -} - -func accessibleTo(obj types.Object, pkg *types.Package) bool { - return obj.Exported() || obj.Pkg() == pkg -} diff --git a/internal/golangorgx/gopls/golang/identifier.go b/internal/golangorgx/gopls/golang/identifier.go deleted file mode 100644 index 28f89757057..00000000000 --- a/internal/golangorgx/gopls/golang/identifier.go +++ /dev/null @@ -1,172 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -import ( - "errors" - "go/ast" - "go/types" -) - -// ErrNoIdentFound is error returned when no identifier is found at a particular position -var ErrNoIdentFound = errors.New("no identifier found") - -// inferredSignature determines the resolved non-generic signature for an -// identifier in an instantiation expression. -// -// If no such signature exists, it returns nil. -func inferredSignature(info *types.Info, id *ast.Ident) *types.Signature { - inst := info.Instances[id] - sig, _ := inst.Type.(*types.Signature) - return sig -} - -func searchForEnclosing(info *types.Info, path []ast.Node) *types.TypeName { - for _, n := range path { - switch n := n.(type) { - case *ast.SelectorExpr: - if sel, ok := info.Selections[n]; ok { - recv := Deref(sel.Recv()) - - // Keep track of the last exported type seen. - var exported *types.TypeName - if named, ok := recv.(*types.Named); ok && named.Obj().Exported() { - exported = named.Obj() - } - // We don't want the last element, as that's the field or - // method itself. - for _, index := range sel.Index()[:len(sel.Index())-1] { - if r, ok := recv.Underlying().(*types.Struct); ok { - recv = Deref(r.Field(index).Type()) - if named, ok := recv.(*types.Named); ok && named.Obj().Exported() { - exported = named.Obj() - } - } - } - return exported - } - } - } - return nil -} - -// typeToObject returns the relevant type name for the given type, after -// unwrapping pointers, arrays, slices, channels, and function signatures with -// a single non-error result, and ignoring built-in named types. -func typeToObject(typ types.Type) *types.TypeName { - switch typ := typ.(type) { - case *types.Named: - // TODO(rfindley): this should use typeparams.NamedTypeOrigin. - return typ.Obj() - case *types.Pointer: - return typeToObject(typ.Elem()) - case *types.Array: - return typeToObject(typ.Elem()) - case *types.Slice: - return typeToObject(typ.Elem()) - case *types.Chan: - return typeToObject(typ.Elem()) - case *types.Signature: - // Try to find a return value of a named type. If there's only one - // such value, jump to its type definition. - var res *types.TypeName - - results := typ.Results() - for i := 0; i < results.Len(); i++ { - obj := typeToObject(results.At(i).Type()) - if obj == nil || hasErrorType(obj) { - // Skip builtins. TODO(rfindley): should comparable be handled here as well? - continue - } - if res != nil { - // The function/method must have only one return value of a named type. - return nil - } - - res = obj - } - return res - default: - return nil - } -} - -func hasErrorType(obj types.Object) bool { - return types.IsInterface(obj.Type()) && obj.Pkg() == nil && obj.Name() == "error" -} - -// typeSwitchImplicits returns all the implicit type switch objects that -// correspond to the leaf *ast.Ident. It also returns the original type -// associated with the identifier (outside of a case clause). -func typeSwitchImplicits(info *types.Info, path []ast.Node) ([]types.Object, types.Type) { - ident, _ := path[0].(*ast.Ident) - if ident == nil { - return nil, nil - } - - var ( - ts *ast.TypeSwitchStmt - assign *ast.AssignStmt - cc *ast.CaseClause - obj = info.ObjectOf(ident) - ) - - // Walk our ancestors to determine if our leaf ident refers to a - // type switch variable, e.g. the "a" from "switch a := b.(type)". -Outer: - for i := 1; i < len(path); i++ { - switch n := path[i].(type) { - case *ast.AssignStmt: - // Check if ident is the "a" in "a := foo.(type)". The "a" in - // this case has no types.Object, so check for ident equality. - if len(n.Lhs) == 1 && n.Lhs[0] == ident { - assign = n - } - case *ast.CaseClause: - // Check if ident is a use of "a" within a case clause. Each - // case clause implicitly maps "a" to a different types.Object, - // so check if ident's object is the case clause's implicit - // object. - if obj != nil && info.Implicits[n] == obj { - cc = n - } - case *ast.TypeSwitchStmt: - // Look for the type switch that owns our previously found - // *ast.AssignStmt or *ast.CaseClause. - if n.Assign == assign { - ts = n - break Outer - } - - for _, stmt := range n.Body.List { - if stmt == cc { - ts = n - break Outer - } - } - } - } - if ts == nil { - return nil, nil - } - // Our leaf ident refers to a type switch variable. Fan out to the - // type switch's implicit case clause objects. - var objs []types.Object - for _, cc := range ts.Body.List { - if ccObj := info.Implicits[cc]; ccObj != nil { - objs = append(objs, ccObj) - } - } - // The right-hand side of a type switch should only have one - // element, and we need to track its type in order to generate - // hover information for implicit type switch variables. - var typ types.Type - if assign, ok := ts.Assign.(*ast.AssignStmt); ok && len(assign.Rhs) == 1 { - if rhs := assign.Rhs[0].(*ast.TypeAssertExpr); ok { - typ = info.TypeOf(rhs.X) - } - } - return objs, typ -} diff --git a/internal/golangorgx/gopls/golang/implementation.go b/internal/golangorgx/gopls/golang/implementation.go deleted file mode 100644 index e4722645a2b..00000000000 --- a/internal/golangorgx/gopls/golang/implementation.go +++ /dev/null @@ -1,497 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -import ( - "context" - "errors" - "fmt" - "go/ast" - "go/token" - "go/types" - "reflect" - "sort" - "strings" - "sync" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/cache/metadata" - "cuelang.org/go/internal/golangorgx/gopls/cache/methodsets" - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/util/bug" - "cuelang.org/go/internal/golangorgx/gopls/util/safetoken" - "cuelang.org/go/internal/golangorgx/tools/event" - "golang.org/x/sync/errgroup" -) - -// This file defines the new implementation of the 'implementation' -// operator that does not require type-checker data structures for an -// unbounded number of packages. -// -// TODO(adonovan): -// - Audit to ensure robustness in face of type errors. -// - Eliminate false positives due to 'tricky' cases of the global algorithm. -// - Ensure we have test coverage of: -// type aliases -// nil, PkgName, Builtin (all errors) -// any (empty result) -// method of unnamed interface type (e.g. var x interface { f() }) -// (the global algorithm may find implementations of this type -// but will not include it in the index.) - -// Implementation returns a new sorted array of locations of -// declarations of types that implement (or are implemented by) the -// type referred to at the given position. -// -// If the position denotes a method, the computation is applied to its -// receiver type and then its corresponding methods are returned. -func Implementation(ctx context.Context, snapshot *cache.Snapshot, f file.Handle, pp protocol.Position) ([]protocol.Location, error) { - ctx, done := event.Start(ctx, "golang.Implementation") - defer done() - - locs, err := implementations(ctx, snapshot, f, pp) - if err != nil { - return nil, err - } - - // Sort and de-duplicate locations. - sort.Slice(locs, func(i, j int) bool { - return protocol.CompareLocation(locs[i], locs[j]) < 0 - }) - out := locs[:0] - for _, loc := range locs { - if len(out) == 0 || out[len(out)-1] != loc { - out = append(out, loc) - } - } - locs = out - - return locs, nil -} - -func implementations(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp protocol.Position) ([]protocol.Location, error) { - obj, pkg, err := implementsObj(ctx, snapshot, fh.URI(), pp) - if err != nil { - return nil, err - } - - var localPkgs []*cache.Package - if obj.Pos().IsValid() { // no local package for error or error.Error - declPosn := safetoken.StartPosition(pkg.FileSet(), obj.Pos()) - // Type-check the declaring package (incl. variants) for use - // by the "local" search, which uses type information to - // enumerate all types within the package that satisfy the - // query type, even those defined local to a function. - declURI := protocol.URIFromPath(declPosn.Filename) - declMPs, err := snapshot.MetadataForFile(ctx, declURI) - if err != nil { - return nil, err - } - metadata.RemoveIntermediateTestVariants(&declMPs) - if len(declMPs) == 0 { - return nil, fmt.Errorf("no packages for file %s", declURI) - } - ids := make([]PackageID, len(declMPs)) - for i, mp := range declMPs { - ids[i] = mp.ID - } - localPkgs, err = snapshot.TypeCheck(ctx, ids...) - if err != nil { - return nil, err - } - } - - // Is the selected identifier a type name or method? - // (For methods, report the corresponding method names.) - var queryType types.Type - var queryMethodID string - switch obj := obj.(type) { - case *types.TypeName: - queryType = obj.Type() - case *types.Func: - // For methods, use the receiver type, which may be anonymous. - if recv := obj.Type().(*types.Signature).Recv(); recv != nil { - queryType = recv.Type() - queryMethodID = obj.Id() - } - } - if queryType == nil { - return nil, bug.Errorf("%s is not a type or method", obj.Name()) // should have been handled by implementsObj - } - - // Compute the method-set fingerprint used as a key to the global search. - key, hasMethods := methodsets.KeyOf(queryType) - if !hasMethods { - // A type with no methods yields an empty result. - // (No point reporting that every type satisfies 'any'.) - return nil, nil - } - - // The global search needs to look at every package in the - // forward transitive closure of the workspace; see package - // ./methodsets. - // - // For now we do all the type checking before beginning the search. - // TODO(adonovan): opt: search in parallel topological order - // so that we can overlap index lookup with typechecking. - // I suspect a number of algorithms on the result of TypeCheck could - // be optimized by being applied as soon as each package is available. - globalMetas, err := snapshot.AllMetadata(ctx) - if err != nil { - return nil, err - } - metadata.RemoveIntermediateTestVariants(&globalMetas) - globalIDs := make([]PackageID, 0, len(globalMetas)) - - var pkgPath PackagePath - if obj.Pkg() != nil { // nil for error - pkgPath = PackagePath(obj.Pkg().Path()) - } - for _, mp := range globalMetas { - if mp.PkgPath == pkgPath { - continue // declaring package is handled by local implementation - } - globalIDs = append(globalIDs, mp.ID) - } - indexes, err := snapshot.MethodSets(ctx, globalIDs...) - if err != nil { - return nil, fmt.Errorf("querying method sets: %v", err) - } - - // Search local and global packages in parallel. - var ( - group errgroup.Group - locsMu sync.Mutex - locs []protocol.Location - ) - // local search - for _, localPkg := range localPkgs { - localPkg := localPkg - group.Go(func() error { - localLocs, err := localImplementations(ctx, snapshot, localPkg, queryType, queryMethodID) - if err != nil { - return err - } - locsMu.Lock() - locs = append(locs, localLocs...) - locsMu.Unlock() - return nil - }) - } - // global search - for _, index := range indexes { - index := index - group.Go(func() error { - for _, res := range index.Search(key, queryMethodID) { - loc := res.Location - // Map offsets to protocol.Locations in parallel (may involve I/O). - group.Go(func() error { - ploc, err := offsetToLocation(ctx, snapshot, loc.Filename, loc.Start, loc.End) - if err != nil { - return err - } - locsMu.Lock() - locs = append(locs, ploc) - locsMu.Unlock() - return nil - }) - } - return nil - }) - } - if err := group.Wait(); err != nil { - return nil, err - } - - return locs, nil -} - -// offsetToLocation converts an offset-based position to a protocol.Location, -// which requires reading the file. -func offsetToLocation(ctx context.Context, snapshot *cache.Snapshot, filename string, start, end int) (protocol.Location, error) { - uri := protocol.URIFromPath(filename) - fh, err := snapshot.ReadFile(ctx, uri) - if err != nil { - return protocol.Location{}, err // cancelled, perhaps - } - content, err := fh.Content() - if err != nil { - return protocol.Location{}, err // nonexistent or deleted ("can't happen") - } - m := protocol.NewMapper(uri, content) - return m.OffsetLocation(start, end) -} - -// implementsObj returns the object to query for implementations, which is a -// type name or method. -// -// The returned Package is the narrowest package containing ppos, which is the -// package using the resulting obj but not necessarily the declaring package. -func implementsObj(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI, ppos protocol.Position) (types.Object, *cache.Package, error) { - pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, uri) - if err != nil { - return nil, nil, err - } - pos, err := pgf.PositionPos(ppos) - if err != nil { - return nil, nil, err - } - - // This function inherits the limitation of its predecessor in - // requiring the selection to be an identifier (of a type or - // method). But there's no fundamental reason why one could - // not pose this query about any selected piece of syntax that - // has a type and thus a method set. - // (If LSP was more thorough about passing text selections as - // intervals to queries, you could ask about the method set of a - // subexpression such as x.f().) - - // TODO(adonovan): simplify: use objectsAt? - path := pathEnclosingObjNode(pgf.File, pos) - if path == nil { - return nil, nil, ErrNoIdentFound - } - id, ok := path[0].(*ast.Ident) - if !ok { - return nil, nil, ErrNoIdentFound - } - - // Is the object a type or method? Reject other kinds. - obj := pkg.GetTypesInfo().Uses[id] - if obj == nil { - // Check uses first (unlike ObjectOf) so that T in - // struct{T} is treated as a reference to a type, - // not a declaration of a field. - obj = pkg.GetTypesInfo().Defs[id] - } - switch obj := obj.(type) { - case *types.TypeName: - // ok - case *types.Func: - if obj.Type().(*types.Signature).Recv() == nil { - return nil, nil, fmt.Errorf("%s is a function, not a method", id.Name) - } - case nil: - return nil, nil, fmt.Errorf("%s denotes unknown object", id.Name) - default: - // e.g. *types.Var -> "var". - kind := strings.ToLower(strings.TrimPrefix(reflect.TypeOf(obj).String(), "*types.")) - return nil, nil, fmt.Errorf("%s is a %s, not a type", id.Name, kind) - } - - return obj, pkg, nil -} - -// localImplementations searches within pkg for declarations of all -// types that are assignable to/from the query type, and returns a new -// unordered array of their locations. -// -// If methodID is non-empty, the function instead returns the location -// of each type's method (if any) of that ID. -// -// ("Local" refers to the search within the same package, but this -// function's results may include type declarations that are local to -// a function body. The global search index excludes such types -// because reliably naming such types is hard.) -func localImplementations(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, queryType types.Type, methodID string) ([]protocol.Location, error) { - queryType = methodsets.EnsurePointer(queryType) - - // Scan through all type declarations in the syntax. - var locs []protocol.Location - var methodLocs []methodsets.Location - for _, pgf := range pkg.CompiledGoFiles() { - ast.Inspect(pgf.File, func(n ast.Node) bool { - spec, ok := n.(*ast.TypeSpec) - if !ok { - return true // not a type declaration - } - def := pkg.GetTypesInfo().Defs[spec.Name] - if def == nil { - return true // "can't happen" for types - } - if def.(*types.TypeName).IsAlias() { - return true // skip type aliases to avoid duplicate reporting - } - candidateType := methodsets.EnsurePointer(def.Type()) - - // The historical behavior enshrined by this - // function rejects cases where both are - // (nontrivial) interface types? - // That seems like useful information. - // TODO(adonovan): UX: report I/I pairs too? - // The same question appears in the global algorithm (methodsets). - if !concreteImplementsIntf(candidateType, queryType) { - return true // not assignable - } - - // Ignore types with empty method sets. - // (No point reporting that every type satisfies 'any'.) - mset := types.NewMethodSet(candidateType) - if mset.Len() == 0 { - return true - } - - if methodID == "" { - // Found matching type. - locs = append(locs, mustLocation(pgf, spec.Name)) - return true - } - - // Find corresponding method. - // - // We can't use LookupFieldOrMethod because it requires - // the methodID's types.Package, which we don't know. - // We could recursively search pkg.Imports for it, - // but it's easier to walk the method set. - for i := 0; i < mset.Len(); i++ { - method := mset.At(i).Obj() - if method.Id() == methodID { - posn := safetoken.StartPosition(pkg.FileSet(), method.Pos()) - methodLocs = append(methodLocs, methodsets.Location{ - Filename: posn.Filename, - Start: posn.Offset, - End: posn.Offset + len(method.Name()), - }) - break - } - } - return true - }) - } - - // Finally convert method positions to protocol form by reading the files. - for _, mloc := range methodLocs { - loc, err := offsetToLocation(ctx, snapshot, mloc.Filename, mloc.Start, mloc.End) - if err != nil { - return nil, err - } - locs = append(locs, loc) - } - - // Special case: for types that satisfy error, report builtin.go (see #59527). - if types.Implements(queryType, errorInterfaceType) { - loc, err := errorLocation(ctx, snapshot) - if err != nil { - return nil, err - } - locs = append(locs, loc) - } - - return locs, nil -} - -var errorInterfaceType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) - -// errorLocation returns the location of the 'error' type in builtin.go. -func errorLocation(ctx context.Context, snapshot *cache.Snapshot) (protocol.Location, error) { - pgf, err := snapshot.BuiltinFile(ctx) - if err != nil { - return protocol.Location{}, err - } - for _, decl := range pgf.File.Decls { - if decl, ok := decl.(*ast.GenDecl); ok { - for _, spec := range decl.Specs { - if spec, ok := spec.(*ast.TypeSpec); ok && spec.Name.Name == "error" { - return pgf.NodeLocation(spec.Name) - } - } - } - } - return protocol.Location{}, fmt.Errorf("built-in error type not found") -} - -// concreteImplementsIntf returns true if a is an interface type implemented by -// concrete type b, or vice versa. -func concreteImplementsIntf(a, b types.Type) bool { - aIsIntf, bIsIntf := types.IsInterface(a), types.IsInterface(b) - - // Make sure exactly one is an interface type. - if aIsIntf == bIsIntf { - return false - } - - // Rearrange if needed so "a" is the concrete type. - if aIsIntf { - a, b = b, a - } - - // TODO(adonovan): this should really use GenericAssignableTo - // to report (e.g.) "ArrayList[T] implements List[T]", but - // GenericAssignableTo doesn't work correctly on pointers to - // generic named types. Thus the legacy implementation and the - // "local" part of implementations fail to report generics. - // The global algorithm based on subsets does the right thing. - return types.AssignableTo(a, b) -} - -var ( - // TODO(adonovan): why do various RPC handlers related to - // IncomingCalls return (nil, nil) on the protocol in response - // to this error? That seems like a violation of the protocol. - // Is it perhaps a workaround for VSCode behavior? - errNoObjectFound = errors.New("no object found") -) - -// pathEnclosingObjNode returns the AST path to the object-defining -// node associated with pos. "Object-defining" means either an -// *ast.Ident mapped directly to a types.Object or an ast.Node mapped -// implicitly to a types.Object. -func pathEnclosingObjNode(f *ast.File, pos token.Pos) []ast.Node { - var ( - path []ast.Node - found bool - ) - - ast.Inspect(f, func(n ast.Node) bool { - if found { - return false - } - - if n == nil { - path = path[:len(path)-1] - return false - } - - path = append(path, n) - - switch n := n.(type) { - case *ast.Ident: - // Include the position directly after identifier. This handles - // the common case where the cursor is right after the - // identifier the user is currently typing. Previously we - // handled this by calling astutil.PathEnclosingInterval twice, - // once for "pos" and once for "pos-1". - found = n.Pos() <= pos && pos <= n.End() - case *ast.ImportSpec: - if n.Path.Pos() <= pos && pos < n.Path.End() { - found = true - // If import spec has a name, add name to path even though - // position isn't in the name. - if n.Name != nil { - path = append(path, n.Name) - } - } - case *ast.StarExpr: - // Follow star expressions to the inner identifier. - if pos == n.Star { - pos = n.X.Pos() - } - } - - return !found - }) - - if len(path) == 0 { - return nil - } - - // Reverse path so leaf is first element. - for i := 0; i < len(path)/2; i++ { - path[i], path[len(path)-1-i] = path[len(path)-1-i], path[i] - } - - return path -} diff --git a/internal/golangorgx/gopls/golang/inlay_hint.go b/internal/golangorgx/gopls/golang/inlay_hint.go deleted file mode 100644 index 3de633b844b..00000000000 --- a/internal/golangorgx/gopls/golang/inlay_hint.go +++ /dev/null @@ -1,396 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -import ( - "context" - "fmt" - "go/ast" - "go/constant" - "go/token" - "go/types" - "strings" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/util/typesutil" - "cuelang.org/go/internal/golangorgx/tools/event" -) - -const ( - maxLabelLength = 28 -) - -type InlayHintFunc func(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, q *types.Qualifier) []protocol.InlayHint - -type Hint struct { - Name string - Doc string - Run InlayHintFunc -} - -const ( - ParameterNames = "parameterNames" - AssignVariableTypes = "assignVariableTypes" - ConstantValues = "constantValues" - RangeVariableTypes = "rangeVariableTypes" - CompositeLiteralTypes = "compositeLiteralTypes" - CompositeLiteralFieldNames = "compositeLiteralFields" - FunctionTypeParameters = "functionTypeParameters" -) - -var AllInlayHints = map[string]*Hint{ - AssignVariableTypes: { - Name: AssignVariableTypes, - Doc: "Enable/disable inlay hints for variable types in assign statements:\n```go\n\ti/* int*/, j/* int*/ := 0, len(r)-1\n```", - Run: assignVariableTypes, - }, - ParameterNames: { - Name: ParameterNames, - Doc: "Enable/disable inlay hints for parameter names:\n```go\n\tparseInt(/* str: */ \"123\", /* radix: */ 8)\n```", - Run: parameterNames, - }, - ConstantValues: { - Name: ConstantValues, - Doc: "Enable/disable inlay hints for constant values:\n```go\n\tconst (\n\t\tKindNone Kind = iota/* = 0*/\n\t\tKindPrint/* = 1*/\n\t\tKindPrintf/* = 2*/\n\t\tKindErrorf/* = 3*/\n\t)\n```", - Run: constantValues, - }, - RangeVariableTypes: { - Name: RangeVariableTypes, - Doc: "Enable/disable inlay hints for variable types in range statements:\n```go\n\tfor k/* int*/, v/* string*/ := range []string{} {\n\t\tfmt.Println(k, v)\n\t}\n```", - Run: rangeVariableTypes, - }, - CompositeLiteralTypes: { - Name: CompositeLiteralTypes, - Doc: "Enable/disable inlay hints for composite literal types:\n```go\n\tfor _, c := range []struct {\n\t\tin, want string\n\t}{\n\t\t/*struct{ in string; want string }*/{\"Hello, world\", \"dlrow ,olleH\"},\n\t}\n```", - Run: compositeLiteralTypes, - }, - CompositeLiteralFieldNames: { - Name: CompositeLiteralFieldNames, - Doc: "Enable/disable inlay hints for composite literal field names:\n```go\n\t{/*in: */\"Hello, world\", /*want: */\"dlrow ,olleH\"}\n```", - Run: compositeLiteralFields, - }, - FunctionTypeParameters: { - Name: FunctionTypeParameters, - Doc: "Enable/disable inlay hints for implicit type parameters on generic functions:\n```go\n\tmyFoo/*[int, string]*/(1, \"hello\")\n```", - Run: funcTypeParams, - }, -} - -func InlayHint(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pRng protocol.Range) ([]protocol.InlayHint, error) { - ctx, done := event.Start(ctx, "golang.InlayHint") - defer done() - - pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) - if err != nil { - return nil, fmt.Errorf("getting file for InlayHint: %w", err) - } - - // Collect a list of the inlay hints that are enabled. - inlayHintOptions := snapshot.Options().InlayHintOptions - var enabledHints []InlayHintFunc - for hint, enabled := range inlayHintOptions.Hints { - if !enabled { - continue - } - if h, ok := AllInlayHints[hint]; ok { - enabledHints = append(enabledHints, h.Run) - } - } - if len(enabledHints) == 0 { - return nil, nil - } - - info := pkg.GetTypesInfo() - q := typesutil.FileQualifier(pgf.File, pkg.GetTypes(), info) - - // Set the range to the full file if the range is not valid. - start, end := pgf.File.Pos(), pgf.File.End() - if pRng.Start.Line < pRng.End.Line || pRng.Start.Character < pRng.End.Character { - // Adjust start and end for the specified range. - var err error - start, end, err = pgf.RangePos(pRng) - if err != nil { - return nil, err - } - } - - var hints []protocol.InlayHint - ast.Inspect(pgf.File, func(node ast.Node) bool { - // If not in range, we can stop looking. - if node == nil || node.End() < start || node.Pos() > end { - return false - } - for _, fn := range enabledHints { - hints = append(hints, fn(node, pgf.Mapper, pgf.Tok, info, &q)...) - } - return true - }) - return hints, nil -} - -func parameterNames(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, _ *types.Qualifier) []protocol.InlayHint { - callExpr, ok := node.(*ast.CallExpr) - if !ok { - return nil - } - signature, ok := info.TypeOf(callExpr.Fun).(*types.Signature) - if !ok { - return nil - } - - var hints []protocol.InlayHint - for i, v := range callExpr.Args { - start, err := m.PosPosition(tf, v.Pos()) - if err != nil { - continue - } - params := signature.Params() - // When a function has variadic params, we skip args after - // params.Len(). - if i > params.Len()-1 { - break - } - param := params.At(i) - // param.Name is empty for built-ins like append - if param.Name() == "" { - continue - } - // Skip the parameter name hint if the arg matches - // the parameter name. - if i, ok := v.(*ast.Ident); ok && i.Name == param.Name() { - continue - } - - label := param.Name() - if signature.Variadic() && i == params.Len()-1 { - label = label + "..." - } - hints = append(hints, protocol.InlayHint{ - Position: start, - Label: buildLabel(label + ":"), - Kind: protocol.Parameter, - PaddingRight: true, - }) - } - return hints -} - -func funcTypeParams(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, _ *types.Qualifier) []protocol.InlayHint { - ce, ok := node.(*ast.CallExpr) - if !ok { - return nil - } - id, ok := ce.Fun.(*ast.Ident) - if !ok { - return nil - } - inst := info.Instances[id] - if inst.TypeArgs == nil { - return nil - } - start, err := m.PosPosition(tf, id.End()) - if err != nil { - return nil - } - var args []string - for i := 0; i < inst.TypeArgs.Len(); i++ { - args = append(args, inst.TypeArgs.At(i).String()) - } - if len(args) == 0 { - return nil - } - return []protocol.InlayHint{{ - Position: start, - Label: buildLabel("[" + strings.Join(args, ", ") + "]"), - Kind: protocol.Type, - }} -} - -func assignVariableTypes(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, q *types.Qualifier) []protocol.InlayHint { - stmt, ok := node.(*ast.AssignStmt) - if !ok || stmt.Tok != token.DEFINE { - return nil - } - - var hints []protocol.InlayHint - for _, v := range stmt.Lhs { - if h := variableType(v, m, tf, info, q); h != nil { - hints = append(hints, *h) - } - } - return hints -} - -func rangeVariableTypes(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, q *types.Qualifier) []protocol.InlayHint { - rStmt, ok := node.(*ast.RangeStmt) - if !ok { - return nil - } - var hints []protocol.InlayHint - if h := variableType(rStmt.Key, m, tf, info, q); h != nil { - hints = append(hints, *h) - } - if h := variableType(rStmt.Value, m, tf, info, q); h != nil { - hints = append(hints, *h) - } - return hints -} - -func variableType(e ast.Expr, m *protocol.Mapper, tf *token.File, info *types.Info, q *types.Qualifier) *protocol.InlayHint { - typ := info.TypeOf(e) - if typ == nil { - return nil - } - end, err := m.PosPosition(tf, e.End()) - if err != nil { - return nil - } - return &protocol.InlayHint{ - Position: end, - Label: buildLabel(types.TypeString(typ, *q)), - Kind: protocol.Type, - PaddingLeft: true, - } -} - -func constantValues(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, _ *types.Qualifier) []protocol.InlayHint { - genDecl, ok := node.(*ast.GenDecl) - if !ok || genDecl.Tok != token.CONST { - return nil - } - - var hints []protocol.InlayHint - for _, v := range genDecl.Specs { - spec, ok := v.(*ast.ValueSpec) - if !ok { - continue - } - end, err := m.PosPosition(tf, v.End()) - if err != nil { - continue - } - // Show hints when values are missing or at least one value is not - // a basic literal. - showHints := len(spec.Values) == 0 - checkValues := len(spec.Names) == len(spec.Values) - var values []string - for i, w := range spec.Names { - obj, ok := info.ObjectOf(w).(*types.Const) - if !ok || obj.Val().Kind() == constant.Unknown { - return nil - } - if checkValues { - switch spec.Values[i].(type) { - case *ast.BadExpr: - return nil - case *ast.BasicLit: - default: - if obj.Val().Kind() != constant.Bool { - showHints = true - } - } - } - values = append(values, fmt.Sprintf("%v", obj.Val())) - } - if !showHints || len(values) == 0 { - continue - } - hints = append(hints, protocol.InlayHint{ - Position: end, - Label: buildLabel("= " + strings.Join(values, ", ")), - PaddingLeft: true, - }) - } - return hints -} - -func compositeLiteralFields(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, _ *types.Qualifier) []protocol.InlayHint { - compLit, ok := node.(*ast.CompositeLit) - if !ok { - return nil - } - typ := info.TypeOf(compLit) - if typ == nil { - return nil - } - if t, ok := typ.(*types.Pointer); ok { - typ = t.Elem() - } - strct, ok := typ.Underlying().(*types.Struct) - if !ok { - return nil - } - - var hints []protocol.InlayHint - var allEdits []protocol.TextEdit - for i, v := range compLit.Elts { - if _, ok := v.(*ast.KeyValueExpr); !ok { - start, err := m.PosPosition(tf, v.Pos()) - if err != nil { - continue - } - if i > strct.NumFields()-1 { - break - } - hints = append(hints, protocol.InlayHint{ - Position: start, - Label: buildLabel(strct.Field(i).Name() + ":"), - Kind: protocol.Parameter, - PaddingRight: true, - }) - allEdits = append(allEdits, protocol.TextEdit{ - Range: protocol.Range{Start: start, End: start}, - NewText: strct.Field(i).Name() + ": ", - }) - } - } - // It is not allowed to have a mix of keyed and unkeyed fields, so - // have the text edits add keys to all fields. - for i := range hints { - hints[i].TextEdits = allEdits - } - return hints -} - -func compositeLiteralTypes(node ast.Node, m *protocol.Mapper, tf *token.File, info *types.Info, q *types.Qualifier) []protocol.InlayHint { - compLit, ok := node.(*ast.CompositeLit) - if !ok { - return nil - } - typ := info.TypeOf(compLit) - if typ == nil { - return nil - } - if compLit.Type != nil { - return nil - } - prefix := "" - if t, ok := typ.(*types.Pointer); ok { - typ = t.Elem() - prefix = "&" - } - // The type for this composite literal is implicit, add an inlay hint. - start, err := m.PosPosition(tf, compLit.Lbrace) - if err != nil { - return nil - } - return []protocol.InlayHint{{ - Position: start, - Label: buildLabel(fmt.Sprintf("%s%s", prefix, types.TypeString(typ, *q))), - Kind: protocol.Type, - }} -} - -func buildLabel(s string) []protocol.InlayHintLabelPart { - label := protocol.InlayHintLabelPart{ - Value: s, - } - if len(s) > maxLabelLength+len("...") { - label.Value = s[:maxLabelLength] + "..." - } - return []protocol.InlayHintLabelPart{label} -} diff --git a/internal/golangorgx/gopls/golang/inline.go b/internal/golangorgx/gopls/golang/inline.go deleted file mode 100644 index 100e0f94c6f..00000000000 --- a/internal/golangorgx/gopls/golang/inline.go +++ /dev/null @@ -1,136 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -// This file defines the refactor.inline code action. - -import ( - "context" - "fmt" - "go/ast" - "go/token" - "go/types" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/cache/parsego" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/util/safetoken" - "cuelang.org/go/internal/golangorgx/tools/diff" - "cuelang.org/go/internal/golangorgx/tools/event" - "cuelang.org/go/internal/golangorgx/tools/refactor/inline" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/types/typeutil" -) - -// EnclosingStaticCall returns the innermost function call enclosing -// the selected range, along with the callee. -func EnclosingStaticCall(pkg *cache.Package, pgf *ParsedGoFile, start, end token.Pos) (*ast.CallExpr, *types.Func, error) { - path, _ := astutil.PathEnclosingInterval(pgf.File, start, end) - - var call *ast.CallExpr -loop: - for _, n := range path { - switch n := n.(type) { - case *ast.FuncLit: - break loop - case *ast.CallExpr: - call = n - break loop - } - } - if call == nil { - return nil, nil, fmt.Errorf("no enclosing call") - } - if safetoken.Line(pgf.Tok, call.Lparen) != safetoken.Line(pgf.Tok, start) { - return nil, nil, fmt.Errorf("enclosing call is not on this line") - } - fn := typeutil.StaticCallee(pkg.GetTypesInfo(), call) - if fn == nil { - return nil, nil, fmt.Errorf("not a static call to a Go function") - } - return call, fn, nil -} - -func inlineCall(ctx context.Context, snapshot *cache.Snapshot, callerPkg *cache.Package, callerPGF *parsego.File, start, end token.Pos) (_ *token.FileSet, _ *analysis.SuggestedFix, err error) { - // Find enclosing static call. - call, fn, err := EnclosingStaticCall(callerPkg, callerPGF, start, end) - if err != nil { - return nil, nil, err - } - - // Locate callee by file/line and analyze it. - calleePosn := safetoken.StartPosition(callerPkg.FileSet(), fn.Pos()) - calleePkg, calleePGF, err := NarrowestPackageForFile(ctx, snapshot, protocol.URIFromPath(calleePosn.Filename)) - if err != nil { - return nil, nil, err - } - var calleeDecl *ast.FuncDecl - for _, decl := range calleePGF.File.Decls { - if decl, ok := decl.(*ast.FuncDecl); ok { - posn := safetoken.StartPosition(calleePkg.FileSet(), decl.Name.Pos()) - if posn.Line == calleePosn.Line && posn.Column == calleePosn.Column { - calleeDecl = decl - break - } - } - } - if calleeDecl == nil { - return nil, nil, fmt.Errorf("can't find callee") - } - - // The inliner assumes that input is well-typed, - // but that is frequently not the case within gopls. - // Until we are able to harden the inliner, - // report panics as errors to avoid crashing the server. - bad := func(p *cache.Package) bool { return len(p.GetParseErrors())+len(p.GetTypeErrors()) > 0 } - if bad(calleePkg) || bad(callerPkg) { - defer func() { - if x := recover(); x != nil { - err = fmt.Errorf("inlining failed (%q), likely because inputs were ill-typed", x) - } - }() - } - - // Users can consult the gopls event log to see - // why a particular inlining strategy was chosen. - logf := logger(ctx, "inliner", snapshot.Options().VerboseOutput) - - callee, err := inline.AnalyzeCallee(logf, calleePkg.FileSet(), calleePkg.GetTypes(), calleePkg.GetTypesInfo(), calleeDecl, calleePGF.Src) - if err != nil { - return nil, nil, err - } - - // Inline the call. - caller := &inline.Caller{ - Fset: callerPkg.FileSet(), - Types: callerPkg.GetTypes(), - Info: callerPkg.GetTypesInfo(), - File: callerPGF.File, - Call: call, - Content: callerPGF.Src, - } - - got, err := inline.Inline(logf, caller, callee) - if err != nil { - return nil, nil, err - } - - return callerPkg.FileSet(), &analysis.SuggestedFix{ - Message: fmt.Sprintf("inline call of %v", callee), - TextEdits: diffToTextEdits(callerPGF.Tok, diff.Bytes(callerPGF.Src, got)), - }, nil -} - -// TODO(adonovan): change the inliner to instead accept an io.Writer. -func logger(ctx context.Context, name string, verbose bool) func(format string, args ...any) { - if verbose { - return func(format string, args ...any) { - event.Log(ctx, name+": "+fmt.Sprintf(format, args...)) - } - } else { - return func(string, ...any) {} - } -} diff --git a/internal/golangorgx/gopls/golang/inline_all.go b/internal/golangorgx/gopls/golang/inline_all.go deleted file mode 100644 index 7e23cd6fbd1..00000000000 --- a/internal/golangorgx/gopls/golang/inline_all.go +++ /dev/null @@ -1,275 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -import ( - "context" - "fmt" - "go/ast" - "go/parser" - "go/types" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/util/bug" - "cuelang.org/go/internal/golangorgx/tools/refactor/inline" - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/types/typeutil" -) - -// inlineAllCalls inlines all calls to the original function declaration -// described by callee, returning the resulting modified file content. -// -// inlining everything is currently an expensive operation: it involves re-type -// checking every package that contains a potential call, as reported by -// References. In cases where there are multiple calls per file, inlineAllCalls -// must type check repeatedly for each additional call. -// -// The provided post processing function is applied to the resulting source -// after each transformation. This is necessary because we are using this -// function to inline synthetic wrappers for the purpose of signature -// rewriting. The delegated function has a fake name that doesn't exist in the -// snapshot, and so we can't re-type check until we replace this fake name. -// -// TODO(rfindley): this only works because removing a parameter is a very -// narrow operation. A better solution would be to allow for ad-hoc snapshots -// that expose the full machinery of real snapshots: minimal invalidation, -// batched type checking, etc. Then we could actually rewrite the declaring -// package in this snapshot (and so 'post' would not be necessary), and could -// robustly re-type check for the purpose of iterative inlining, even if the -// inlined code pulls in new imports that weren't present in export data. -// -// The code below notes where are assumptions are made that only hold true in -// the case of parameter removal (annotated with 'Assumption:') -func inlineAllCalls(ctx context.Context, logf func(string, ...any), snapshot *cache.Snapshot, pkg *cache.Package, pgf *ParsedGoFile, origDecl *ast.FuncDecl, callee *inline.Callee, post func([]byte) []byte) (map[protocol.DocumentURI][]byte, error) { - // Collect references. - var refs []protocol.Location - { - funcPos, err := pgf.Mapper.PosPosition(pgf.Tok, origDecl.Name.NamePos) - if err != nil { - return nil, err - } - fh, err := snapshot.ReadFile(ctx, pgf.URI) - if err != nil { - return nil, err - } - refs, err = References(ctx, snapshot, fh, funcPos, false) - if err != nil { - return nil, fmt.Errorf("finding references to rewrite: %v", err) - } - } - - // Type-check the narrowest package containing each reference. - // TODO(rfindley): we should expose forEachPackage in order to operate in - // parallel and to reduce peak memory for this operation. - var ( - pkgForRef = make(map[protocol.Location]PackageID) - pkgs = make(map[PackageID]*cache.Package) - ) - { - needPkgs := make(map[PackageID]struct{}) - for _, ref := range refs { - md, err := NarrowestMetadataForFile(ctx, snapshot, ref.URI) - if err != nil { - return nil, fmt.Errorf("finding ref metadata: %v", err) - } - pkgForRef[ref] = md.ID - needPkgs[md.ID] = struct{}{} - } - var pkgIDs []PackageID - for id := range needPkgs { // TODO: use maps.Keys once it is available to us - pkgIDs = append(pkgIDs, id) - } - - refPkgs, err := snapshot.TypeCheck(ctx, pkgIDs...) - if err != nil { - return nil, fmt.Errorf("type checking reference packages: %v", err) - } - - for _, p := range refPkgs { - pkgs[p.Metadata().ID] = p - } - } - - // Organize calls by top file declaration. Calls within a single file may - // affect each other, as the inlining edit may affect the surrounding scope - // or imports Therefore, when inlining subsequent calls in the same - // declaration, we must re-type check. - - type fileCalls struct { - pkg *cache.Package - pgf *ParsedGoFile - calls []*ast.CallExpr - } - - refsByFile := make(map[protocol.DocumentURI]*fileCalls) - for _, ref := range refs { - refpkg := pkgs[pkgForRef[ref]] - pgf, err := refpkg.File(ref.URI) - if err != nil { - return nil, bug.Errorf("finding %s in %s: %v", ref.URI, refpkg.Metadata().ID, err) - } - start, end, err := pgf.RangePos(ref.Range) - if err != nil { - return nil, err // e.g. invalid range - } - - // Look for the surrounding call expression. - var ( - name *ast.Ident - call *ast.CallExpr - ) - path, _ := astutil.PathEnclosingInterval(pgf.File, start, end) - name, _ = path[0].(*ast.Ident) - if _, ok := path[1].(*ast.SelectorExpr); ok { - call, _ = path[2].(*ast.CallExpr) - } else { - call, _ = path[1].(*ast.CallExpr) - } - if name == nil || call == nil { - // TODO(rfindley): handle this case with eta-abstraction: - // a reference to the target function f in a non-call position - // use(f) - // is replaced by - // use(func(...) { f(...) }) - return nil, fmt.Errorf("cannot inline: found non-call function reference %v", ref) - } - // Sanity check. - if obj := refpkg.GetTypesInfo().ObjectOf(name); obj == nil || - obj.Name() != origDecl.Name.Name || - obj.Pkg() == nil || - obj.Pkg().Path() != string(pkg.Metadata().PkgPath) { - return nil, bug.Errorf("cannot inline: corrupted reference %v", ref) - } - - callInfo, ok := refsByFile[ref.URI] - if !ok { - callInfo = &fileCalls{ - pkg: refpkg, - pgf: pgf, - } - refsByFile[ref.URI] = callInfo - } - callInfo.calls = append(callInfo.calls, call) - } - - // Inline each call within the same decl in sequence, re-typechecking after - // each one. If there is only a single call within the decl, we can avoid - // additional type checking. - // - // Assumption: inlining does not affect the package scope, so we can operate - // on separate files independently. - result := make(map[protocol.DocumentURI][]byte) - for uri, callInfo := range refsByFile { - var ( - calls = callInfo.calls - fset = callInfo.pkg.FileSet() - tpkg = callInfo.pkg.GetTypes() - tinfo = callInfo.pkg.GetTypesInfo() - file = callInfo.pgf.File - content = callInfo.pgf.Src - ) - - // Check for overlapping calls (such as Foo(Foo())). We can't handle these - // because inlining may change the source order of the inner call with - // respect to the inlined outer call, and so the heuristic we use to find - // the next call (counting from top-to-bottom) does not work. - for i := range calls { - if i > 0 && calls[i-1].End() > calls[i].Pos() { - return nil, fmt.Errorf("%s: can't inline overlapping call %s", uri, types.ExprString(calls[i-1])) - } - } - - currentCall := 0 - for currentCall < len(calls) { - caller := &inline.Caller{ - Fset: fset, - Types: tpkg, - Info: tinfo, - File: file, - Call: calls[currentCall], - Content: content, - } - var err error - content, err = inline.Inline(logf, caller, callee) - if err != nil { - return nil, fmt.Errorf("inlining failed: %v", err) - } - if post != nil { - content = post(content) - } - if len(calls) <= 1 { - // No need to re-type check, as we've inlined all calls. - break - } - - // TODO(rfindley): develop a theory of "trivial" inlining, which are - // inlinings that don't require re-type checking. - // - // In principle, if the inlining only involves replacing one call with - // another, the scope of the caller is unchanged and there is no need to - // type check again before inlining subsequent calls (edits should not - // overlap, and should not affect each other semantically). However, it - // feels sufficiently complicated that, to be safe, this optimization is - // deferred until later. - - file, err = parser.ParseFile(fset, uri.Path(), content, parser.ParseComments|parser.SkipObjectResolution) - if err != nil { - return nil, bug.Errorf("inlined file failed to parse: %v", err) - } - - // After inlining one call with a removed parameter, the package will - // fail to type check due to "not enough arguments". Therefore, we must - // allow type errors here. - // - // Assumption: the resulting type errors do not affect the correctness of - // subsequent inlining, because invalid arguments to a call do not affect - // anything in the surrounding scope. - // - // TODO(rfindley): improve this. - tpkg, tinfo, err = reTypeCheck(logf, callInfo.pkg, map[protocol.DocumentURI]*ast.File{uri: file}, true) - if err != nil { - return nil, bug.Errorf("type checking after inlining failed: %v", err) - } - - // Collect calls to the target function in the modified declaration. - var calls2 []*ast.CallExpr - ast.Inspect(file, func(n ast.Node) bool { - if call, ok := n.(*ast.CallExpr); ok { - fn := typeutil.StaticCallee(tinfo, call) - if fn != nil && fn.Pkg().Path() == string(pkg.Metadata().PkgPath) && fn.Name() == origDecl.Name.Name { - calls2 = append(calls2, call) - } - } - return true - }) - - // If the number of calls has increased, this process will never cease. - // If the number of calls has decreased, assume that inlining removed a - // call. - // If the number of calls didn't change, assume that inlining replaced - // a call, and move on to the next. - // - // Assumption: we're inlining a call that has at most one recursive - // reference (which holds for signature rewrites). - // - // TODO(rfindley): this isn't good enough. We should be able to support - // inlining all existing calls even if they increase calls. How do we - // correlate the before and after syntax? - switch { - case len(calls2) > len(calls): - return nil, fmt.Errorf("inlining increased calls %d->%d, possible recursive call? content:\n%s", len(calls), len(calls2), content) - case len(calls2) < len(calls): - calls = calls2 - case len(calls2) == len(calls): - calls = calls2 - currentCall++ - } - } - - result[callInfo.pgf.URI] = content - } - return result, nil -} diff --git a/internal/golangorgx/gopls/golang/invertifcondition.go b/internal/golangorgx/gopls/golang/invertifcondition.go deleted file mode 100644 index db64694fe07..00000000000 --- a/internal/golangorgx/gopls/golang/invertifcondition.go +++ /dev/null @@ -1,267 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - "strings" - - "cuelang.org/go/internal/golangorgx/gopls/util/safetoken" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ast/astutil" -) - -// invertIfCondition is a singleFileFixFunc that inverts an if/else statement -func invertIfCondition(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, _ *types.Package, _ *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { - ifStatement, _, err := CanInvertIfCondition(file, start, end) - if err != nil { - return nil, nil, err - } - - var replaceElse analysis.TextEdit - - endsWithReturn, err := endsWithReturn(ifStatement.Else) - if err != nil { - return nil, nil, err - } - - if endsWithReturn { - // Replace the whole else part with an empty line and an unindented - // version of the original if body - sourcePos := safetoken.StartPosition(fset, ifStatement.Pos()) - - indent := sourcePos.Column - 1 - if indent < 0 { - indent = 0 - } - - standaloneBodyText := ifBodyToStandaloneCode(fset, ifStatement.Body, src) - replaceElse = analysis.TextEdit{ - Pos: ifStatement.Body.Rbrace + 1, // 1 == len("}") - End: ifStatement.End(), - NewText: []byte("\n\n" + strings.Repeat("\t", indent) + standaloneBodyText), - } - } else { - // Replace the else body text with the if body text - bodyStart := safetoken.StartPosition(fset, ifStatement.Body.Lbrace) - bodyEnd := safetoken.EndPosition(fset, ifStatement.Body.Rbrace+1) // 1 == len("}") - bodyText := src[bodyStart.Offset:bodyEnd.Offset] - replaceElse = analysis.TextEdit{ - Pos: ifStatement.Else.Pos(), - End: ifStatement.Else.End(), - NewText: bodyText, - } - } - - // Replace the if text with the else text - elsePosInSource := safetoken.StartPosition(fset, ifStatement.Else.Pos()) - elseEndInSource := safetoken.EndPosition(fset, ifStatement.Else.End()) - elseText := src[elsePosInSource.Offset:elseEndInSource.Offset] - replaceBodyWithElse := analysis.TextEdit{ - Pos: ifStatement.Body.Pos(), - End: ifStatement.Body.End(), - NewText: elseText, - } - - // Replace the if condition with its inverse - inverseCondition, err := invertCondition(fset, ifStatement.Cond, src) - if err != nil { - return nil, nil, err - } - replaceConditionWithInverse := analysis.TextEdit{ - Pos: ifStatement.Cond.Pos(), - End: ifStatement.Cond.End(), - NewText: inverseCondition, - } - - // Return a SuggestedFix with just that TextEdit in there - return fset, &analysis.SuggestedFix{ - TextEdits: []analysis.TextEdit{ - replaceConditionWithInverse, - replaceBodyWithElse, - replaceElse, - }, - }, nil -} - -func endsWithReturn(elseBranch ast.Stmt) (bool, error) { - elseBlock, isBlockStatement := elseBranch.(*ast.BlockStmt) - if !isBlockStatement { - return false, fmt.Errorf("unable to figure out whether this ends with return: %T", elseBranch) - } - - if len(elseBlock.List) == 0 { - // Empty blocks don't end in returns - return false, nil - } - - lastStatement := elseBlock.List[len(elseBlock.List)-1] - - _, lastStatementIsReturn := lastStatement.(*ast.ReturnStmt) - return lastStatementIsReturn, nil -} - -// Turn { fmt.Println("Hello") } into just fmt.Println("Hello"), with one less -// level of indentation. -// -// The first line of the result will not be indented, but all of the following -// lines will. -func ifBodyToStandaloneCode(fset *token.FileSet, ifBody *ast.BlockStmt, src []byte) string { - // Get the whole body (without the surrounding braces) as a string - bodyStart := safetoken.StartPosition(fset, ifBody.Lbrace+1) // 1 == len("}") - bodyEnd := safetoken.EndPosition(fset, ifBody.Rbrace) - bodyWithoutBraces := string(src[bodyStart.Offset:bodyEnd.Offset]) - bodyWithoutBraces = strings.TrimSpace(bodyWithoutBraces) - - // Unindent - bodyWithoutBraces = strings.ReplaceAll(bodyWithoutBraces, "\n\t", "\n") - - return bodyWithoutBraces -} - -func invertCondition(fset *token.FileSet, cond ast.Expr, src []byte) ([]byte, error) { - condStart := safetoken.StartPosition(fset, cond.Pos()) - condEnd := safetoken.EndPosition(fset, cond.End()) - oldText := string(src[condStart.Offset:condEnd.Offset]) - - switch expr := cond.(type) { - case *ast.Ident, *ast.ParenExpr, *ast.CallExpr, *ast.StarExpr, *ast.IndexExpr, *ast.IndexListExpr, *ast.SelectorExpr: - newText := "!" + oldText - if oldText == "true" { - newText = "false" - } else if oldText == "false" { - newText = "true" - } - - return []byte(newText), nil - - case *ast.UnaryExpr: - if expr.Op != token.NOT { - // This should never happen - return dumbInvert(fset, cond, src), nil - } - - inverse := expr.X - if p, isParen := inverse.(*ast.ParenExpr); isParen { - // We got !(x), remove the parentheses with the ! so we get just "x" - inverse = p.X - - start := safetoken.StartPosition(fset, inverse.Pos()) - end := safetoken.EndPosition(fset, inverse.End()) - if start.Line != end.Line { - // The expression is multi-line, so we can't remove the parentheses - inverse = expr.X - } - } - - start := safetoken.StartPosition(fset, inverse.Pos()) - end := safetoken.EndPosition(fset, inverse.End()) - textWithoutNot := src[start.Offset:end.Offset] - - return textWithoutNot, nil - - case *ast.BinaryExpr: - // These inversions are unsound for floating point NaN, but that's ok. - negations := map[token.Token]string{ - token.EQL: "!=", - token.LSS: ">=", - token.GTR: "<=", - token.NEQ: "==", - token.LEQ: ">", - token.GEQ: "<", - } - - negation, negationFound := negations[expr.Op] - if !negationFound { - return invertAndOr(fset, expr, src) - } - - xPosInSource := safetoken.StartPosition(fset, expr.X.Pos()) - opPosInSource := safetoken.StartPosition(fset, expr.OpPos) - yPosInSource := safetoken.StartPosition(fset, expr.Y.Pos()) - - textBeforeOp := string(src[xPosInSource.Offset:opPosInSource.Offset]) - - oldOpWithTrailingWhitespace := string(src[opPosInSource.Offset:yPosInSource.Offset]) - newOpWithTrailingWhitespace := negation + oldOpWithTrailingWhitespace[len(expr.Op.String()):] - - textAfterOp := string(src[yPosInSource.Offset:condEnd.Offset]) - - return []byte(textBeforeOp + newOpWithTrailingWhitespace + textAfterOp), nil - } - - return dumbInvert(fset, cond, src), nil -} - -// dumbInvert is a fallback, inverting cond into !(cond). -func dumbInvert(fset *token.FileSet, expr ast.Expr, src []byte) []byte { - start := safetoken.StartPosition(fset, expr.Pos()) - end := safetoken.EndPosition(fset, expr.End()) - text := string(src[start.Offset:end.Offset]) - return []byte("!(" + text + ")") -} - -func invertAndOr(fset *token.FileSet, expr *ast.BinaryExpr, src []byte) ([]byte, error) { - if expr.Op != token.LAND && expr.Op != token.LOR { - // Neither AND nor OR, don't know how to invert this - return dumbInvert(fset, expr, src), nil - } - - oppositeOp := "&&" - if expr.Op == token.LAND { - oppositeOp = "||" - } - - xEndInSource := safetoken.EndPosition(fset, expr.X.End()) - opPosInSource := safetoken.StartPosition(fset, expr.OpPos) - whitespaceAfterBefore := src[xEndInSource.Offset:opPosInSource.Offset] - - invertedBefore, err := invertCondition(fset, expr.X, src) - if err != nil { - return nil, err - } - - invertedAfter, err := invertCondition(fset, expr.Y, src) - if err != nil { - return nil, err - } - - yPosInSource := safetoken.StartPosition(fset, expr.Y.Pos()) - - oldOpWithTrailingWhitespace := string(src[opPosInSource.Offset:yPosInSource.Offset]) - newOpWithTrailingWhitespace := oppositeOp + oldOpWithTrailingWhitespace[len(expr.Op.String()):] - - return []byte(string(invertedBefore) + string(whitespaceAfterBefore) + newOpWithTrailingWhitespace + string(invertedAfter)), nil -} - -// CanInvertIfCondition reports whether we can do invert-if-condition on the -// code in the given range -func CanInvertIfCondition(file *ast.File, start, end token.Pos) (*ast.IfStmt, bool, error) { - path, _ := astutil.PathEnclosingInterval(file, start, end) - for _, node := range path { - stmt, isIfStatement := node.(*ast.IfStmt) - if !isIfStatement { - continue - } - - if stmt.Else == nil { - // Can't invert conditions without else clauses - return nil, false, fmt.Errorf("else clause required") - } - - if _, hasElseIf := stmt.Else.(*ast.IfStmt); hasElseIf { - // Can't invert conditions with else-if clauses, unclear what that - // would look like - return nil, false, fmt.Errorf("else-if not supported") - } - - return stmt, true, nil - } - - return nil, false, fmt.Errorf("not an if statement") -} diff --git a/internal/golangorgx/gopls/golang/known_packages.go b/internal/golangorgx/gopls/golang/known_packages.go deleted file mode 100644 index 227f789af9c..00000000000 --- a/internal/golangorgx/gopls/golang/known_packages.go +++ /dev/null @@ -1,137 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -import ( - "context" - "go/parser" - "go/token" - "sort" - "strings" - "sync" - "time" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/cache/metadata" - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/tools/event" - "cuelang.org/go/internal/golangorgx/tools/imports" -) - -// KnownPackagePaths returns a new list of package paths of all known -// packages in the package graph that could potentially be imported by -// the given file. The list is ordered lexicographically, except that -// all dot-free paths (standard packages) appear before dotful ones. -// -// It is part of the gopls.list_known_packages command. -func KnownPackagePaths(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]PackagePath, error) { - // This algorithm is expressed in terms of Metadata, not Packages, - // so it doesn't cause or wait for type checking. - - current, err := NarrowestMetadataForFile(ctx, snapshot, fh.URI()) - if err != nil { - return nil, err // e.g. context cancelled - } - - // Parse the file's imports so we can compute which - // PackagePaths are imported by this specific file. - src, err := fh.Content() - if err != nil { - return nil, err - } - file, err := parser.ParseFile(token.NewFileSet(), fh.URI().Path(), src, parser.ImportsOnly) - if err != nil { - return nil, err - } - imported := make(map[PackagePath]bool) - for _, imp := range file.Imports { - if id := current.DepsByImpPath[metadata.UnquoteImportPath(imp)]; id != "" { - if mp := snapshot.Metadata(id); mp != nil { - imported[mp.PkgPath] = true - } - } - } - - // Now find candidates among all known packages. - knownPkgs, err := snapshot.AllMetadata(ctx) - if err != nil { - return nil, err - } - seen := make(map[PackagePath]bool) - for _, knownPkg := range knownPkgs { - // package main cannot be imported - if knownPkg.Name == "main" { - continue - } - // test packages cannot be imported - if knownPkg.ForTest != "" { - continue - } - // No need to import what the file already imports. - // This check is based on PackagePath, not PackageID, - // so that all test variants are filtered out too. - if imported[knownPkg.PkgPath] { - continue - } - // make sure internal packages are importable by the file - if !metadata.IsValidImport(current.PkgPath, knownPkg.PkgPath) { - continue - } - // naive check on cyclical imports - if isDirectlyCyclical(current, knownPkg) { - continue - } - // AllMetadata may have multiple variants of a pkg. - seen[knownPkg.PkgPath] = true - } - - // Augment the set by invoking the goimports algorithm. - if err := snapshot.RunProcessEnvFunc(ctx, func(ctx context.Context, o *imports.Options) error { - ctx, cancel := context.WithTimeout(ctx, time.Millisecond*80) - defer cancel() - var seenMu sync.Mutex - wrapped := func(ifix imports.ImportFix) { - seenMu.Lock() - defer seenMu.Unlock() - // TODO(adonovan): what if the actual package path has a vendor/ prefix? - seen[PackagePath(ifix.StmtInfo.ImportPath)] = true - } - return imports.GetAllCandidates(ctx, wrapped, "", fh.URI().Path(), string(current.Name), o.Env) - }); err != nil { - // If goimports failed, proceed with just the candidates from the metadata. - event.Error(ctx, "imports.GetAllCandidates", err) - } - - // Sort lexicographically, but with std before non-std packages. - paths := make([]PackagePath, 0, len(seen)) - for path := range seen { - paths = append(paths, path) - } - sort.Slice(paths, func(i, j int) bool { - importI, importJ := paths[i], paths[j] - iHasDot := strings.Contains(string(importI), ".") - jHasDot := strings.Contains(string(importJ), ".") - if iHasDot != jHasDot { - return jHasDot // dot-free paths (standard packages) compare less - } - return importI < importJ - }) - - return paths, nil -} - -// isDirectlyCyclical checks if imported directly imports pkg. -// It does not (yet) offer a full cyclical check because showing a user -// a list of importable packages already generates a very large list -// and having a few false positives in there could be worth the -// performance snappiness. -// -// TODO(adonovan): ensure that metadata graph is always cyclic! -// Many algorithms will get confused or even stuck in the -// presence of cycles. Then replace this function by 'false'. -func isDirectlyCyclical(pkg, imported *metadata.Package) bool { - _, ok := imported.DepsByPkgPath[pkg.PkgPath] - return ok -} diff --git a/internal/golangorgx/gopls/golang/linkname.go b/internal/golangorgx/gopls/golang/linkname.go deleted file mode 100644 index 5e12f96a6e7..00000000000 --- a/internal/golangorgx/gopls/golang/linkname.go +++ /dev/null @@ -1,144 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -import ( - "context" - "errors" - "fmt" - "go/token" - "strings" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/cache/metadata" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/util/safetoken" -) - -// ErrNoLinkname is returned by LinknameDefinition when no linkname -// directive is found at a particular position. -// As such it indicates that other definitions could be worth checking. -var ErrNoLinkname = errors.New("no linkname directive found") - -// LinknameDefinition finds the definition of the linkname directive in m at pos. -// If there is no linkname directive at pos, returns ErrNoLinkname. -func LinknameDefinition(ctx context.Context, snapshot *cache.Snapshot, m *protocol.Mapper, from protocol.Position) ([]protocol.Location, error) { - pkgPath, name, _ := parseLinkname(m, from) - if pkgPath == "" { - return nil, ErrNoLinkname - } - - _, pgf, pos, err := findLinkname(ctx, snapshot, PackagePath(pkgPath), name) - if err != nil { - return nil, fmt.Errorf("find linkname: %w", err) - } - loc, err := pgf.PosLocation(pos, pos+token.Pos(len(name))) - if err != nil { - return nil, fmt.Errorf("location of linkname: %w", err) - } - return []protocol.Location{loc}, nil -} - -// parseLinkname attempts to parse a go:linkname declaration at the given pos. -// If successful, it returns -// - package path referenced -// - object name referenced -// - byte offset in mapped file of the start of the link target -// of the linkname directives 2nd argument. -// -// If the position is not in the second argument of a go:linkname directive, -// or parsing fails, it returns "", "", 0. -func parseLinkname(m *protocol.Mapper, pos protocol.Position) (pkgPath, name string, targetOffset int) { - lineStart, err := m.PositionOffset(protocol.Position{Line: pos.Line, Character: 0}) - if err != nil { - return "", "", 0 - } - lineEnd, err := m.PositionOffset(protocol.Position{Line: pos.Line + 1, Character: 0}) - if err != nil { - return "", "", 0 - } - - directive := string(m.Content[lineStart:lineEnd]) - // (Assumes no leading spaces.) - if !strings.HasPrefix(directive, "//go:linkname") { - return "", "", 0 - } - // Sometimes source code (typically tests) has another - // comment after the directive, trim that away. - if i := strings.LastIndex(directive, "//"); i != 0 { - directive = strings.TrimSpace(directive[:i]) - } - - // Looking for pkgpath in '//go:linkname f pkgpath.g'. - // (We ignore 1-arg linkname directives.) - parts := strings.Fields(directive) - if len(parts) != 3 { - return "", "", 0 - } - - // Inside 2nd arg [start, end]? - // (Assumes no trailing spaces.) - offset, err := m.PositionOffset(pos) - if err != nil { - return "", "", 0 - } - end := lineStart + len(directive) - start := end - len(parts[2]) - if !(start <= offset && offset <= end) { - return "", "", 0 - } - linkname := parts[2] - - // Split the pkg path from the name. - dot := strings.LastIndexByte(linkname, '.') - if dot < 0 { - return "", "", 0 - } - - return linkname[:dot], linkname[dot+1:], start -} - -// findLinkname searches dependencies of packages containing fh for an object -// with linker name matching the given package path and name. -func findLinkname(ctx context.Context, snapshot *cache.Snapshot, pkgPath PackagePath, name string) (*cache.Package, *ParsedGoFile, token.Pos, error) { - // Typically the linkname refers to a forward dependency - // or a reverse dependency, but in general it may refer - // to any package that is linked with this one. - var pkgMeta *metadata.Package - metas, err := snapshot.AllMetadata(ctx) - if err != nil { - return nil, nil, token.NoPos, err - } - metadata.RemoveIntermediateTestVariants(&metas) - for _, meta := range metas { - if meta.PkgPath == pkgPath { - pkgMeta = meta - break - } - } - if pkgMeta == nil { - return nil, nil, token.NoPos, fmt.Errorf("cannot find package %q", pkgPath) - } - - // When found, type check the desired package (snapshot.TypeCheck in TypecheckFull mode), - pkgs, err := snapshot.TypeCheck(ctx, pkgMeta.ID) - if err != nil { - return nil, nil, token.NoPos, err - } - pkg := pkgs[0] - - obj := pkg.GetTypes().Scope().Lookup(name) - if obj == nil { - return nil, nil, token.NoPos, fmt.Errorf("package %q does not define %s", pkgPath, name) - } - - objURI := safetoken.StartPosition(pkg.FileSet(), obj.Pos()) - pgf, err := pkg.File(protocol.URIFromPath(objURI.Filename)) - if err != nil { - return nil, nil, token.NoPos, err - } - - return pkg, pgf, obj.Pos(), nil -} diff --git a/internal/golangorgx/gopls/golang/origin.go b/internal/golangorgx/gopls/golang/origin.go deleted file mode 100644 index c5e84db0ceb..00000000000 --- a/internal/golangorgx/gopls/golang/origin.go +++ /dev/null @@ -1,26 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.19 -// +build !go1.19 - -package golang - -import "go/types" - -// containsOrigin reports whether the provided object set contains an object -// with the same origin as the provided obj (which may be a synthetic object -// created during instantiation). -func containsOrigin(objSet map[types.Object]bool, obj types.Object) bool { - if obj == nil { - return objSet[obj] - } - // In Go 1.18, we can't use the types.Var.Origin and types.Func.Origin methods. - for target := range objSet { - if target.Pkg() == obj.Pkg() && target.Pos() == obj.Pos() && target.Name() == obj.Name() { - return true - } - } - return false -} diff --git a/internal/golangorgx/gopls/golang/origin_119.go b/internal/golangorgx/gopls/golang/origin_119.go deleted file mode 100644 index 16f6ca7c065..00000000000 --- a/internal/golangorgx/gopls/golang/origin_119.go +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.19 -// +build go1.19 - -package golang - -import "go/types" - -// containsOrigin reports whether the provided object set contains an object -// with the same origin as the provided obj (which may be a synthetic object -// created during instantiation). -func containsOrigin(objSet map[types.Object]bool, obj types.Object) bool { - objOrigin := origin(obj) - for target := range objSet { - if origin(target) == objOrigin { - return true - } - } - return false -} - -func origin(obj types.Object) types.Object { - switch obj := obj.(type) { - case *types.Var: - return obj.Origin() - case *types.Func: - return obj.Origin() - } - return obj -} diff --git a/internal/golangorgx/gopls/golang/references.go b/internal/golangorgx/gopls/golang/references.go deleted file mode 100644 index 204f84e2a51..00000000000 --- a/internal/golangorgx/gopls/golang/references.go +++ /dev/null @@ -1,694 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -// This file defines the 'references' query based on a serializable -// index constructed during type checking, thus avoiding the need to -// type-check packages at search time. -// -// See the ./xrefs/ subpackage for the index construction and lookup. -// -// This implementation does not intermingle objects from distinct -// calls to TypeCheck. - -import ( - "context" - "fmt" - "go/ast" - "go/token" - "go/types" - "sort" - "strings" - "sync" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/cache/metadata" - "cuelang.org/go/internal/golangorgx/gopls/cache/methodsets" - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/util/bug" - "cuelang.org/go/internal/golangorgx/gopls/util/safetoken" - "cuelang.org/go/internal/golangorgx/tools/event" - "golang.org/x/sync/errgroup" - "golang.org/x/tools/go/types/objectpath" -) - -// References returns a list of all references (sorted with -// definitions before uses) to the object denoted by the identifier at -// the given file/position, searching the entire workspace. -func References(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp protocol.Position, includeDeclaration bool) ([]protocol.Location, error) { - references, err := references(ctx, snapshot, fh, pp, includeDeclaration) - if err != nil { - return nil, err - } - locations := make([]protocol.Location, len(references)) - for i, ref := range references { - locations[i] = ref.location - } - return locations, nil -} - -// A reference describes an identifier that refers to the same -// object as the subject of a References query. -type reference struct { - isDeclaration bool - location protocol.Location - pkgPath PackagePath // of declaring package (same for all elements of the slice) -} - -// references returns a list of all references (sorted with -// definitions before uses) to the object denoted by the identifier at -// the given file/position, searching the entire workspace. -func references(ctx context.Context, snapshot *cache.Snapshot, f file.Handle, pp protocol.Position, includeDeclaration bool) ([]reference, error) { - ctx, done := event.Start(ctx, "golang.references") - defer done() - - // Is the cursor within the package name declaration? - _, inPackageName, err := parsePackageNameDecl(ctx, snapshot, f, pp) - if err != nil { - return nil, err - } - - var refs []reference - if inPackageName { - refs, err = packageReferences(ctx, snapshot, f.URI()) - } else { - refs, err = ordinaryReferences(ctx, snapshot, f.URI(), pp) - } - if err != nil { - return nil, err - } - - sort.Slice(refs, func(i, j int) bool { - x, y := refs[i], refs[j] - if x.isDeclaration != y.isDeclaration { - return x.isDeclaration // decls < refs - } - return protocol.CompareLocation(x.location, y.location) < 0 - }) - - // De-duplicate by location, and optionally remove declarations. - out := refs[:0] - for _, ref := range refs { - if !includeDeclaration && ref.isDeclaration { - continue - } - if len(out) == 0 || out[len(out)-1].location != ref.location { - out = append(out, ref) - } - } - refs = out - - return refs, nil -} - -// packageReferences returns a list of references to the package -// declaration of the specified name and uri by searching among the -// import declarations of all packages that directly import the target -// package. -func packageReferences(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI) ([]reference, error) { - metas, err := snapshot.MetadataForFile(ctx, uri) - if err != nil { - return nil, err - } - if len(metas) == 0 { - return nil, fmt.Errorf("found no package containing %s", uri) - } - - var refs []reference - - // Find external references to the package declaration - // from each direct import of the package. - // - // The narrowest package is the most broadly imported, - // so we choose it for the external references. - // - // But if the file ends with _test.go then we need to - // find the package it is testing; there's no direct way - // to do that, so pick a file from the same package that - // doesn't end in _test.go and start over. - narrowest := metas[0] - if narrowest.ForTest != "" && strings.HasSuffix(string(uri), "_test.go") { - for _, f := range narrowest.CompiledGoFiles { - if !strings.HasSuffix(string(f), "_test.go") { - return packageReferences(ctx, snapshot, f) - } - } - // This package has no non-test files. - // Skip the search for external references. - // (Conceivably one could blank-import an empty package, but why?) - } else { - rdeps, err := snapshot.ReverseDependencies(ctx, narrowest.ID, false) // direct - if err != nil { - return nil, err - } - - // Restrict search to workspace packages. - workspace, err := snapshot.WorkspaceMetadata(ctx) - if err != nil { - return nil, err - } - workspaceMap := make(map[PackageID]*metadata.Package, len(workspace)) - for _, mp := range workspace { - workspaceMap[mp.ID] = mp - } - - for _, rdep := range rdeps { - if _, ok := workspaceMap[rdep.ID]; !ok { - continue - } - for _, uri := range rdep.CompiledGoFiles { - fh, err := snapshot.ReadFile(ctx, uri) - if err != nil { - return nil, err - } - f, err := snapshot.ParseGo(ctx, fh, ParseHeader) - if err != nil { - return nil, err - } - for _, imp := range f.File.Imports { - if rdep.DepsByImpPath[metadata.UnquoteImportPath(imp)] == narrowest.ID { - refs = append(refs, reference{ - isDeclaration: false, - location: mustLocation(f, imp), - pkgPath: narrowest.PkgPath, - }) - } - } - } - } - } - - // Find internal "references" to the package from - // of each package declaration in the target package itself. - // - // The widest package (possibly a test variant) has the - // greatest number of files and thus we choose it for the - // "internal" references. - widest := metas[len(metas)-1] // may include _test.go files - for _, uri := range widest.CompiledGoFiles { - fh, err := snapshot.ReadFile(ctx, uri) - if err != nil { - return nil, err - } - f, err := snapshot.ParseGo(ctx, fh, ParseHeader) - if err != nil { - return nil, err - } - refs = append(refs, reference{ - isDeclaration: true, // (one of many) - location: mustLocation(f, f.File.Name), - pkgPath: widest.PkgPath, - }) - } - - return refs, nil -} - -// ordinaryReferences computes references for all ordinary objects (not package declarations). -func ordinaryReferences(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI, pp protocol.Position) ([]reference, error) { - // Strategy: use the reference information computed by the - // type checker to find the declaration. First type-check this - // package to find the declaration, then type check the - // declaring package (which may be different), plus variants, - // to find local (in-package) references. - // Global references are satisfied by the index. - - // Strictly speaking, a wider package could provide a different - // declaration (e.g. because the _test.go files can change the - // meaning of a field or method selection), but the narrower - // package reports the more broadly referenced object. - pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, uri) - if err != nil { - return nil, err - } - - // Find the selected object (declaration or reference). - // For struct{T}, we choose the field (Def) over the type (Use). - pos, err := pgf.PositionPos(pp) - if err != nil { - return nil, err - } - candidates, _, err := objectsAt(pkg.GetTypesInfo(), pgf.File, pos) - if err != nil { - return nil, err - } - - // Pick first object arbitrarily. - // The case variables of a type switch have different - // types but that difference is immaterial here. - var obj types.Object - for obj = range candidates { - break - } - if obj == nil { - return nil, ErrNoIdentFound // can't happen - } - - // nil, error, error.Error, iota, or other built-in? - if obj.Pkg() == nil { - return nil, fmt.Errorf("references to builtin %q are not supported", obj.Name()) - } - if !obj.Pos().IsValid() { - if obj.Pkg().Path() != "unsafe" { - bug.Reportf("references: object %v has no position", obj) - } - return nil, fmt.Errorf("references to unsafe.%s are not supported", obj.Name()) - } - - // Find metadata of all packages containing the object's defining file. - // This may include the query pkg, and possibly other variants. - declPosn := safetoken.StartPosition(pkg.FileSet(), obj.Pos()) - declURI := protocol.URIFromPath(declPosn.Filename) - variants, err := snapshot.MetadataForFile(ctx, declURI) - if err != nil { - return nil, err - } - if len(variants) == 0 { - return nil, fmt.Errorf("no packages for file %q", declURI) // can't happen - } - // (variants must include ITVs for reverse dependency computation below.) - - // Is object exported? - // If so, compute scope and targets of the global search. - var ( - globalScope = make(map[PackageID]*metadata.Package) // (excludes ITVs) - globalTargets map[PackagePath]map[objectpath.Path]unit - expansions = make(map[PackageID]unit) // packages that caused search expansion - ) - // TODO(adonovan): what about generic functions? Need to consider both - // uninstantiated and instantiated. The latter have no objectpath. Use Origin? - if path, err := objectpath.For(obj); err == nil && obj.Exported() { - pkgPath := variants[0].PkgPath // (all variants have same package path) - globalTargets = map[PackagePath]map[objectpath.Path]unit{ - pkgPath: {path: {}}, // primary target - } - - // Compute set of (non-ITV) workspace packages. - // We restrict references to this subset. - workspace, err := snapshot.WorkspaceMetadata(ctx) - if err != nil { - return nil, err - } - workspaceMap := make(map[PackageID]*metadata.Package, len(workspace)) - workspaceIDs := make([]PackageID, 0, len(workspace)) - for _, mp := range workspace { - workspaceMap[mp.ID] = mp - workspaceIDs = append(workspaceIDs, mp.ID) - } - - // addRdeps expands the global scope to include the - // reverse dependencies of the specified package. - addRdeps := func(id PackageID, transitive bool) error { - rdeps, err := snapshot.ReverseDependencies(ctx, id, transitive) - if err != nil { - return err - } - for rdepID, rdep := range rdeps { - // Skip non-workspace packages. - // - // This means we also skip any expansion of the - // search that might be caused by a non-workspace - // package, possibly causing us to miss references - // to the expanded target set from workspace packages. - // - // TODO(adonovan): don't skip those expansions. - // The challenge is how to so without type-checking - // a lot of non-workspace packages not covered by - // the initial workspace load. - if _, ok := workspaceMap[rdepID]; !ok { - continue - } - - globalScope[rdepID] = rdep - } - return nil - } - - // How far need we search? - // For package-level objects, we need only search the direct importers. - // For fields and methods, we must search transitively. - transitive := obj.Pkg().Scope().Lookup(obj.Name()) != obj - - // The scope is the union of rdeps of each variant. - // (Each set is disjoint so there's no benefit to - // combining the metadata graph traversals.) - for _, mp := range variants { - if err := addRdeps(mp.ID, transitive); err != nil { - return nil, err - } - } - - // Is object a method? - // - // If so, expand the search so that the targets include - // all methods that correspond to it through interface - // satisfaction, and the scope includes the rdeps of - // the package that declares each corresponding type. - // - // 'expansions' records the packages that declared - // such types. - if recv := effectiveReceiver(obj); recv != nil { - if err := expandMethodSearch(ctx, snapshot, workspaceIDs, obj.(*types.Func), recv, addRdeps, globalTargets, expansions); err != nil { - return nil, err - } - } - } - - // The search functions will call report(loc) for each hit. - var ( - refsMu sync.Mutex - refs []reference - ) - report := func(loc protocol.Location, isDecl bool) { - ref := reference{ - isDeclaration: isDecl, - location: loc, - pkgPath: pkg.Metadata().PkgPath, - } - refsMu.Lock() - refs = append(refs, ref) - refsMu.Unlock() - } - - // Loop over the variants of the declaring package, - // and perform both the local (in-package) and global - // (cross-package) searches, in parallel. - // - // TODO(adonovan): opt: support LSP reference streaming. See: - // - https://github.com/microsoft/vscode-languageserver-node/pull/164 - // - https://github.com/microsoft/language-server-protocol/pull/182 - // - // Careful: this goroutine must not return before group.Wait. - var group errgroup.Group - - // Compute local references for each variant. - // The target objects are identified by (URI, offset). - for _, mp := range variants { - // We want the ordinary importable package, - // plus any test-augmented variants, since - // declarations in _test.go files may change - // the reference of a selection, or even a - // field into a method or vice versa. - // - // But we don't need intermediate test variants, - // as their local references will be covered - // already by other variants. - if mp.IsIntermediateTestVariant() { - continue - } - mp := mp - group.Go(func() error { - // TODO(adonovan): opt: batch these TypeChecks. - pkgs, err := snapshot.TypeCheck(ctx, mp.ID) - if err != nil { - return err - } - pkg := pkgs[0] - - // Find the declaration of the corresponding - // object in this package based on (URI, offset). - pgf, err := pkg.File(declURI) - if err != nil { - return err - } - pos, err := safetoken.Pos(pgf.Tok, declPosn.Offset) - if err != nil { - return err - } - objects, _, err := objectsAt(pkg.GetTypesInfo(), pgf.File, pos) - if err != nil { - return err // unreachable? (probably caught earlier) - } - - // Report the locations of the declaration(s). - // TODO(adonovan): what about for corresponding methods? Add tests. - for _, node := range objects { - report(mustLocation(pgf, node), true) - } - - // Convert targets map to set. - targets := make(map[types.Object]bool) - for obj := range objects { - targets[obj] = true - } - - return localReferences(pkg, targets, true, report) - }) - } - - // Also compute local references within packages that declare - // corresponding methods (see above), which expand the global search. - // The target objects are identified by (PkgPath, objectpath). - for id := range expansions { - id := id - group.Go(func() error { - // TODO(adonovan): opt: batch these TypeChecks. - pkgs, err := snapshot.TypeCheck(ctx, id) - if err != nil { - return err - } - pkg := pkgs[0] - - targets := make(map[types.Object]bool) - for objpath := range globalTargets[pkg.Metadata().PkgPath] { - obj, err := objectpath.Object(pkg.GetTypes(), objpath) - if err != nil { - // No such object, because it was - // declared only in the test variant. - continue - } - targets[obj] = true - } - - // Don't include corresponding types or methods - // since expansions did that already, and we don't - // want (e.g.) concrete -> interface -> concrete. - const correspond = false - return localReferences(pkg, targets, correspond, report) - }) - } - - // Compute global references for selected reverse dependencies. - group.Go(func() error { - var globalIDs []PackageID - for id := range globalScope { - globalIDs = append(globalIDs, id) - } - indexes, err := snapshot.References(ctx, globalIDs...) - if err != nil { - return err - } - for _, index := range indexes { - for _, loc := range index.Lookup(globalTargets) { - report(loc, false) - } - } - return nil - }) - - if err := group.Wait(); err != nil { - return nil, err - } - return refs, nil -} - -// expandMethodSearch expands the scope and targets of a global search -// for an exported method to include all methods in the workspace -// that correspond to it through interface satisfaction. -// -// Each package that declares a corresponding type is added to -// expansions so that we can also find local references to the type -// within the package, which of course requires type checking. -// -// The scope is expanded by a sequence of calls (not concurrent) to addRdeps. -// -// recv is the method's effective receiver type, for method-set computations. -func expandMethodSearch(ctx context.Context, snapshot *cache.Snapshot, workspaceIDs []PackageID, method *types.Func, recv types.Type, addRdeps func(id PackageID, transitive bool) error, targets map[PackagePath]map[objectpath.Path]unit, expansions map[PackageID]unit) error { - // Compute the method-set fingerprint used as a key to the global search. - key, hasMethods := methodsets.KeyOf(recv) - if !hasMethods { - return bug.Errorf("KeyOf(%s)={} yet %s is a method", recv, method) - } - // Search the methodset index of each package in the workspace. - indexes, err := snapshot.MethodSets(ctx, workspaceIDs...) - if err != nil { - return err - } - var mu sync.Mutex // guards addRdeps, targets, expansions - var group errgroup.Group - for i, index := range indexes { - i := i - index := index - group.Go(func() error { - // Consult index for matching methods. - results := index.Search(key, method.Name()) - if len(results) == 0 { - return nil - } - - // We have discovered one or more corresponding types. - id := workspaceIDs[i] - - mu.Lock() - defer mu.Unlock() - - // Expand global search scope to include rdeps of this pkg. - if err := addRdeps(id, true); err != nil { - return err - } - - // Mark this package so that we search within it for - // local references to the additional types/methods. - expansions[id] = unit{} - - // Add each corresponding method the to set of global search targets. - for _, res := range results { - methodPkg := PackagePath(res.PkgPath) - opaths, ok := targets[methodPkg] - if !ok { - opaths = make(map[objectpath.Path]unit) - targets[methodPkg] = opaths - } - opaths[res.ObjectPath] = unit{} - } - return nil - }) - } - return group.Wait() -} - -// localReferences traverses syntax and reports each reference to one -// of the target objects, or (if correspond is set) an object that -// corresponds to one of them via interface satisfaction. -func localReferences(pkg *cache.Package, targets map[types.Object]bool, correspond bool, report func(loc protocol.Location, isDecl bool)) error { - // If we're searching for references to a method optionally - // broaden the search to include references to corresponding - // methods of mutually assignable receiver types. - // (We use a slice, but objectsAt never returns >1 methods.) - var methodRecvs []types.Type - var methodName string // name of an arbitrary target, iff a method - if correspond { - for obj := range targets { - if t := effectiveReceiver(obj); t != nil { - methodRecvs = append(methodRecvs, t) - methodName = obj.Name() - } - } - } - - // matches reports whether obj either is or corresponds to a target. - // (Correspondence is defined as usual for interface methods.) - matches := func(obj types.Object) bool { - if containsOrigin(targets, obj) { - return true - } - if methodRecvs != nil && obj.Name() == methodName { - if orecv := effectiveReceiver(obj); orecv != nil { - for _, mrecv := range methodRecvs { - if concreteImplementsIntf(orecv, mrecv) { - return true - } - } - } - } - return false - } - - // Scan through syntax looking for uses of one of the target objects. - for _, pgf := range pkg.CompiledGoFiles() { - ast.Inspect(pgf.File, func(n ast.Node) bool { - if id, ok := n.(*ast.Ident); ok { - if obj, ok := pkg.GetTypesInfo().Uses[id]; ok && matches(obj) { - report(mustLocation(pgf, id), false) - } - } - return true - }) - } - return nil -} - -// effectiveReceiver returns the effective receiver type for method-set -// comparisons for obj, if it is a method, or nil otherwise. -func effectiveReceiver(obj types.Object) types.Type { - if fn, ok := obj.(*types.Func); ok { - if recv := fn.Type().(*types.Signature).Recv(); recv != nil { - return methodsets.EnsurePointer(recv.Type()) - } - } - return nil -} - -// objectsAt returns the non-empty set of objects denoted (def or use) -// by the specified position within a file syntax tree, or an error if -// none were found. -// -// The result may contain more than one element because all case -// variables of a type switch appear to be declared at the same -// position. -// -// Each object is mapped to the syntax node that was treated as an -// identifier, which is not always an ast.Ident. The second component -// of the result is the innermost node enclosing pos. -// -// TODO(adonovan): factor in common with referencedObject. -func objectsAt(info *types.Info, file *ast.File, pos token.Pos) (map[types.Object]ast.Node, ast.Node, error) { - path := pathEnclosingObjNode(file, pos) - if path == nil { - return nil, nil, ErrNoIdentFound - } - - targets := make(map[types.Object]ast.Node) - - switch leaf := path[0].(type) { - case *ast.Ident: - // If leaf represents an implicit type switch object or the type - // switch "assign" variable, expand to all of the type switch's - // implicit objects. - if implicits, _ := typeSwitchImplicits(info, path); len(implicits) > 0 { - for _, obj := range implicits { - targets[obj] = leaf - } - } else { - // Note: prior to go1.21, go/types issue #60372 causes the position - // a field Var T created for struct{*p.T} to be recorded at the - // start of the field type ("*") not the location of the T. - // This affects references and other gopls operations (issue #60369). - // TODO(adonovan): delete this comment when we drop support for go1.20. - - // For struct{T}, we prefer the defined field Var over the used TypeName. - obj := info.ObjectOf(leaf) - if obj == nil { - return nil, nil, fmt.Errorf("%w for %q", errNoObjectFound, leaf.Name) - } - targets[obj] = leaf - } - case *ast.ImportSpec: - // Look up the implicit *types.PkgName. - obj := info.Implicits[leaf] - if obj == nil { - return nil, nil, fmt.Errorf("%w for import %s", errNoObjectFound, metadata.UnquoteImportPath(leaf)) - } - targets[obj] = leaf - } - - if len(targets) == 0 { - return nil, nil, fmt.Errorf("objectAt: internal error: no targets") // can't happen - } - return targets, path[0], nil -} - -// mustLocation reports the location interval a syntax node, -// which must belong to m.File. -// -// Safe for use only by references and implementations. -func mustLocation(pgf *ParsedGoFile, n ast.Node) protocol.Location { - loc, err := pgf.NodeLocation(n) - if err != nil { - panic(err) // can't happen in references or implementations - } - return loc -} diff --git a/internal/golangorgx/gopls/golang/rename.go b/internal/golangorgx/gopls/golang/rename.go deleted file mode 100644 index d42d5592863..00000000000 --- a/internal/golangorgx/gopls/golang/rename.go +++ /dev/null @@ -1,1459 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -// TODO(adonovan): -// -// - method of generic concrete type -> arbitrary instances of same -// -// - make satisfy work across packages. -// -// - tests, tests, tests: -// - play with renamings in the k8s tree. -// - generics -// - error cases (e.g. conflicts) -// - renaming a symbol declared in the module cache -// (currently proceeds with half of the renaming!) -// - make sure all tests have both a local and a cross-package analogue. -// - look at coverage -// - special cases: embedded fields, interfaces, test variants, -// function-local things with uppercase names; -// packages with type errors (currently 'satisfy' rejects them), -// package with missing imports; -// -// - measure performance in k8s. -// -// - The original gorename tool assumed well-typedness, but the gopls feature -// does no such check (which actually makes it much more useful). -// Audit to ensure it is safe on ill-typed code. -// -// - Generics support was no doubt buggy before but incrementalization -// may have exacerbated it. If the problem were just about objects, -// defs and uses it would be fairly simple, but type assignability -// comes into play in the 'satisfy' check for method renamings. -// De-instantiating Vector[int] to Vector[T] changes its type. -// We need to come up with a theory for the satisfy check that -// works with generics, and across packages. We currently have no -// simple way to pass types between packages (think: objectpath for -// types), though presumably exportdata could be pressed into service. -// -// - FileID-based de-duplication of edits to different URIs for the same file. - -import ( - "context" - "errors" - "fmt" - "go/ast" - "go/token" - "go/types" - "path" - "path/filepath" - "regexp" - "sort" - "strconv" - "strings" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/cache/metadata" - "cuelang.org/go/internal/golangorgx/gopls/cache/parsego" - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/util/bug" - "cuelang.org/go/internal/golangorgx/gopls/util/safetoken" - "cuelang.org/go/internal/golangorgx/tools/diff" - "cuelang.org/go/internal/golangorgx/tools/event" - "cuelang.org/go/internal/golangorgx/tools/typeparams" - "golang.org/x/mod/modfile" - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/types/objectpath" - "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/refactor/satisfy" -) - -// A renamer holds state of a single call to renameObj, which renames -// an object (or several coupled objects) within a single type-checked -// syntax package. -type renamer struct { - pkg *cache.Package // the syntax package in which the renaming is applied - objsToUpdate map[types.Object]bool // records progress of calls to check - conflicts []string - from, to string - satisfyConstraints map[satisfy.Constraint]bool - msets typeutil.MethodSetCache - changeMethods bool -} - -// A PrepareItem holds the result of a "prepare rename" operation: -// the source range and value of a selected identifier. -type PrepareItem struct { - Range protocol.Range - Text string -} - -// PrepareRename searches for a valid renaming at position pp. -// -// The returned usererr is intended to be displayed to the user to explain why -// the prepare fails. Probably we could eliminate the redundancy in returning -// two errors, but for now this is done defensively. -func PrepareRename(ctx context.Context, snapshot *cache.Snapshot, f file.Handle, pp protocol.Position) (_ *PrepareItem, usererr, err error) { - ctx, done := event.Start(ctx, "golang.PrepareRename") - defer done() - - // Is the cursor within the package name declaration? - if pgf, inPackageName, err := parsePackageNameDecl(ctx, snapshot, f, pp); err != nil { - return nil, err, err - } else if inPackageName { - item, err := prepareRenamePackageName(ctx, snapshot, pgf) - return item, err, err - } - - // Ordinary (non-package) renaming. - // - // Type-check the current package, locate the reference at the position, - // validate the object, and report its name and range. - // - // TODO(adonovan): in all cases below, we return usererr=nil, - // which means we return (nil, nil) at the protocol - // layer. This seems like a bug, or at best an exploitation of - // knowledge of VSCode-specific behavior. Can we avoid that? - pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, f.URI()) - if err != nil { - return nil, nil, err - } - pos, err := pgf.PositionPos(pp) - if err != nil { - return nil, nil, err - } - targets, node, err := objectsAt(pkg.GetTypesInfo(), pgf.File, pos) - if err != nil { - return nil, nil, err - } - var obj types.Object - for obj = range targets { - break // pick one arbitrarily - } - if err := checkRenamable(obj); err != nil { - return nil, nil, err - } - rng, err := pgf.NodeRange(node) - if err != nil { - return nil, nil, err - } - if _, isImport := node.(*ast.ImportSpec); isImport { - // We're not really renaming the import path. - rng.End = rng.Start - } - return &PrepareItem{ - Range: rng, - Text: obj.Name(), - }, nil, nil -} - -func prepareRenamePackageName(ctx context.Context, snapshot *cache.Snapshot, pgf *ParsedGoFile) (*PrepareItem, error) { - // Does the client support file renaming? - fileRenameSupported := false - for _, op := range snapshot.Options().SupportedResourceOperations { - if op == protocol.Rename { - fileRenameSupported = true - break - } - } - if !fileRenameSupported { - return nil, errors.New("can't rename package: LSP client does not support file renaming") - } - - // Check validity of the metadata for the file's containing package. - meta, err := NarrowestMetadataForFile(ctx, snapshot, pgf.URI) - if err != nil { - return nil, err - } - if meta.Name == "main" { - return nil, fmt.Errorf("can't rename package \"main\"") - } - if strings.HasSuffix(string(meta.Name), "_test") { - return nil, fmt.Errorf("can't rename x_test packages") - } - if meta.Module == nil { - return nil, fmt.Errorf("can't rename package: missing module information for package %q", meta.PkgPath) - } - if meta.Module.Path == string(meta.PkgPath) { - return nil, fmt.Errorf("can't rename package: package path %q is the same as module path %q", meta.PkgPath, meta.Module.Path) - } - - // Return the location of the package declaration. - rng, err := pgf.NodeRange(pgf.File.Name) - if err != nil { - return nil, err - } - return &PrepareItem{ - Range: rng, - Text: string(meta.Name), - }, nil -} - -func checkRenamable(obj types.Object) error { - switch obj := obj.(type) { - case *types.Var: - if obj.Embedded() { - return fmt.Errorf("can't rename embedded fields: rename the type directly or name the field") - } - case *types.Builtin, *types.Nil: - return fmt.Errorf("%s is built in and cannot be renamed", obj.Name()) - } - if obj.Pkg() == nil || obj.Pkg().Path() == "unsafe" { - // e.g. error.Error, unsafe.Pointer - return fmt.Errorf("%s is built in and cannot be renamed", obj.Name()) - } - if obj.Name() == "_" { - return errors.New("can't rename \"_\"") - } - return nil -} - -// Rename returns a map of TextEdits for each file modified when renaming a -// given identifier within a package and a boolean value of true for renaming -// package and false otherwise. -func Rename(ctx context.Context, snapshot *cache.Snapshot, f file.Handle, pp protocol.Position, newName string) (map[protocol.DocumentURI][]protocol.TextEdit, bool, error) { - ctx, done := event.Start(ctx, "golang.Rename") - defer done() - - if !isValidIdentifier(newName) { - return nil, false, fmt.Errorf("invalid identifier to rename: %q", newName) - } - - // Cursor within package name declaration? - _, inPackageName, err := parsePackageNameDecl(ctx, snapshot, f, pp) - if err != nil { - return nil, false, err - } - - var editMap map[protocol.DocumentURI][]diff.Edit - if inPackageName { - editMap, err = renamePackageName(ctx, snapshot, f, PackageName(newName)) - } else { - editMap, err = renameOrdinary(ctx, snapshot, f, pp, newName) - } - if err != nil { - return nil, false, err - } - - // Convert edits to protocol form. - result := make(map[protocol.DocumentURI][]protocol.TextEdit) - for uri, edits := range editMap { - // Sort and de-duplicate edits. - // - // Overlapping edits may arise in local renamings (due - // to type switch implicits) and globals ones (due to - // processing multiple package variants). - // - // We assume renaming produces diffs that are all - // replacements (no adjacent insertions that might - // become reordered) and that are either identical or - // non-overlapping. - diff.SortEdits(edits) - filtered := edits[:0] - for i, edit := range edits { - if i == 0 || edit != filtered[len(filtered)-1] { - filtered = append(filtered, edit) - } - } - edits = filtered - - // TODO(adonovan): the logic above handles repeat edits to the - // same file URI (e.g. as a member of package p and p_test) but - // is not sufficient to handle file-system level aliasing arising - // from symbolic or hard links. For that, we should use a - // robustio-FileID-keyed map. - // See https://go.dev/cl/457615 for example. - // This really occurs in practice, e.g. kubernetes has - // vendor/k8s.io/kubectl -> ../../staging/src/k8s.io/kubectl. - fh, err := snapshot.ReadFile(ctx, uri) - if err != nil { - return nil, false, err - } - data, err := fh.Content() - if err != nil { - return nil, false, err - } - m := protocol.NewMapper(uri, data) - protocolEdits, err := protocol.EditsFromDiffEdits(m, edits) - if err != nil { - return nil, false, err - } - result[uri] = protocolEdits - } - - return result, inPackageName, nil -} - -// renameOrdinary renames an ordinary (non-package) name throughout the workspace. -func renameOrdinary(ctx context.Context, snapshot *cache.Snapshot, f file.Handle, pp protocol.Position, newName string) (map[protocol.DocumentURI][]diff.Edit, error) { - // Type-check the referring package and locate the object(s). - // - // Unlike NarrowestPackageForFile, this operation prefers the - // widest variant as, for non-exported identifiers, it is the - // only package we need. (In case you're wondering why - // 'references' doesn't also want the widest variant: it - // computes the union across all variants.) - var targets map[types.Object]ast.Node - var pkg *cache.Package - { - mps, err := snapshot.MetadataForFile(ctx, f.URI()) - if err != nil { - return nil, err - } - metadata.RemoveIntermediateTestVariants(&mps) - if len(mps) == 0 { - return nil, fmt.Errorf("no package metadata for file %s", f.URI()) - } - widest := mps[len(mps)-1] // widest variant may include _test.go files - pkgs, err := snapshot.TypeCheck(ctx, widest.ID) - if err != nil { - return nil, err - } - pkg = pkgs[0] - pgf, err := pkg.File(f.URI()) - if err != nil { - return nil, err // "can't happen" - } - pos, err := pgf.PositionPos(pp) - if err != nil { - return nil, err - } - objects, _, err := objectsAt(pkg.GetTypesInfo(), pgf.File, pos) - if err != nil { - return nil, err - } - targets = objects - } - - // Pick a representative object arbitrarily. - // (All share the same name, pos, and kind.) - var obj types.Object - for obj = range targets { - break - } - if obj.Name() == newName { - return nil, fmt.Errorf("old and new names are the same: %s", newName) - } - if err := checkRenamable(obj); err != nil { - return nil, err - } - - // Find objectpath, if object is exported ("" otherwise). - var declObjPath objectpath.Path - if obj.Exported() { - // objectpath.For requires the origin of a generic function or type, not an - // instantiation (a bug?). Unfortunately we can't call Func.Origin as this - // is not available in go/types@go1.18. So we take a scenic route. - // - // Note that unlike Funcs, TypeNames are always canonical (they are "left" - // of the type parameters, unlike methods). - switch obj.(type) { // avoid "obj :=" since cases reassign the var - case *types.TypeName: - if _, ok := obj.Type().(*types.TypeParam); ok { - // As with capitalized function parameters below, type parameters are - // local. - goto skipObjectPath - } - case *types.Func: - obj = funcOrigin(obj.(*types.Func)) - case *types.Var: - // TODO(adonovan): do vars need the origin treatment too? (issue #58462) - - // Function parameter and result vars that are (unusually) - // capitalized are technically exported, even though they - // cannot be referenced, because they may affect downstream - // error messages. But we can safely treat them as local. - // - // This is not merely an optimization: the renameExported - // operation gets confused by such vars. It finds them from - // objectpath, the classifies them as local vars, but as - // they came from export data they lack syntax and the - // correct scope tree (issue #61294). - if !obj.(*types.Var).IsField() && !isPackageLevel(obj) { - goto skipObjectPath - } - } - if path, err := objectpath.For(obj); err == nil { - declObjPath = path - } - skipObjectPath: - } - - // Nonexported? Search locally. - if declObjPath == "" { - var objects []types.Object - for obj := range targets { - objects = append(objects, obj) - } - editMap, _, err := renameObjects(newName, pkg, objects...) - return editMap, err - } - - // Exported: search globally. - // - // For exported package-level var/const/func/type objects, the - // search scope is just the direct importers. - // - // For exported fields and methods, the scope is the - // transitive rdeps. (The exportedness of the field's struct - // or method's receiver is irrelevant.) - transitive := false - switch obj := obj.(type) { - case *types.TypeName: - // Renaming an exported package-level type - // requires us to inspect all transitive rdeps - // in the event that the type is embedded. - // - // TODO(adonovan): opt: this is conservative - // but inefficient. Instead, expand the scope - // of the search only if we actually encounter - // an embedding of the type, and only then to - // the rdeps of the embedding package. - if obj.Parent() == obj.Pkg().Scope() { - transitive = true - } - - case *types.Var: - if obj.IsField() { - transitive = true // field - } - - // TODO(adonovan): opt: process only packages that - // contain a reference (xrefs) to the target field. - - case *types.Func: - if obj.Type().(*types.Signature).Recv() != nil { - transitive = true // method - } - - // It's tempting to optimize by skipping - // packages that don't contain a reference to - // the method in the xrefs index, but we still - // need to apply the satisfy check to those - // packages to find assignment statements that - // might expands the scope of the renaming. - } - - // Type-check all the packages to inspect. - declURI := protocol.URIFromPath(pkg.FileSet().File(obj.Pos()).Name()) - pkgs, err := typeCheckReverseDependencies(ctx, snapshot, declURI, transitive) - if err != nil { - return nil, err - } - - // Apply the renaming to the (initial) object. - declPkgPath := PackagePath(obj.Pkg().Path()) - return renameExported(pkgs, declPkgPath, declObjPath, newName) -} - -// funcOrigin is a go1.18-portable implementation of (*types.Func).Origin. -func funcOrigin(fn *types.Func) *types.Func { - // Method? - if fn.Type().(*types.Signature).Recv() != nil { - return typeparams.OriginMethod(fn) - } - - // Package-level function? - // (Assume the origin has the same position.) - gen := fn.Pkg().Scope().Lookup(fn.Name()) - if gen != nil && gen.Pos() == fn.Pos() { - return gen.(*types.Func) - } - - return fn -} - -// typeCheckReverseDependencies returns the type-checked packages for -// the reverse dependencies of all packages variants containing -// file declURI. The packages are in some topological order. -// -// It includes all variants (even intermediate test variants) for the -// purposes of computing reverse dependencies, but discards ITVs for -// the actual renaming work. -// -// (This neglects obscure edge cases where a _test.go file changes the -// selectors used only in an ITV, but life is short. Also sin must be -// punished.) -func typeCheckReverseDependencies(ctx context.Context, snapshot *cache.Snapshot, declURI protocol.DocumentURI, transitive bool) ([]*cache.Package, error) { - variants, err := snapshot.MetadataForFile(ctx, declURI) - if err != nil { - return nil, err - } - // variants must include ITVs for the reverse dependency - // computation, but they are filtered out before we typecheck. - allRdeps := make(map[PackageID]*metadata.Package) - for _, variant := range variants { - rdeps, err := snapshot.ReverseDependencies(ctx, variant.ID, transitive) - if err != nil { - return nil, err - } - allRdeps[variant.ID] = variant // include self - for id, meta := range rdeps { - allRdeps[id] = meta - } - } - var ids []PackageID - for id, meta := range allRdeps { - if meta.IsIntermediateTestVariant() { - continue - } - ids = append(ids, id) - } - - // Sort the packages into some topological order of the - // (unfiltered) metadata graph. - metadata.SortPostOrder(snapshot, ids) - - // Dependencies must be visited first since they can expand - // the search set. Ideally we would process the (filtered) set - // of packages in the parallel postorder of the snapshot's - // (unfiltered) metadata graph, but this is quite tricky - // without a good graph abstraction. - // - // For now, we visit packages sequentially in order of - // ascending height, like an inverted breadth-first search. - // - // Type checking is by far the dominant cost, so - // overlapping it with renaming may not be worthwhile. - return snapshot.TypeCheck(ctx, ids...) -} - -// renameExported renames the object denoted by (pkgPath, objPath) -// within the specified packages, along with any other objects that -// must be renamed as a consequence. The slice of packages must be -// topologically ordered. -func renameExported(pkgs []*cache.Package, declPkgPath PackagePath, declObjPath objectpath.Path, newName string) (map[protocol.DocumentURI][]diff.Edit, error) { - - // A target is a name for an object that is stable across types.Packages. - type target struct { - pkg PackagePath - obj objectpath.Path - } - - // Populate the initial set of target objects. - // This set may grow as we discover the consequences of each renaming. - // - // TODO(adonovan): strictly, each cone of reverse dependencies - // of a single variant should have its own target map that - // monotonically expands as we go up the import graph, because - // declarations in test files can alter the set of - // package-level names and change the meaning of field and - // method selectors. So if we parallelize the graph - // visitation (see above), we should also compute the targets - // as a union of dependencies. - // - // Or we could decide that the logic below is fast enough not - // to need parallelism. In small measurements so far the - // type-checking step is about 95% and the renaming only 5%. - targets := map[target]bool{{declPkgPath, declObjPath}: true} - - // Apply the renaming operation to each package. - allEdits := make(map[protocol.DocumentURI][]diff.Edit) - for _, pkg := range pkgs { - - // Resolved target objects within package pkg. - var objects []types.Object - for t := range targets { - p := pkg.DependencyTypes(t.pkg) - if p == nil { - continue // indirect dependency of no consequence - } - obj, err := objectpath.Object(p, t.obj) - if err != nil { - // Possibly a method or an unexported type - // that is not reachable through export data? - // See https://github.com/golang/go/issues/60789. - // - // TODO(adonovan): it seems unsatisfactory that Object - // should return an error for a "valid" path. Perhaps - // we should define such paths as invalid and make - // objectpath.For compute reachability? - // Would that be a compatible change? - continue - } - objects = append(objects, obj) - } - if len(objects) == 0 { - continue // no targets of consequence to this package - } - - // Apply the renaming. - editMap, moreObjects, err := renameObjects(newName, pkg, objects...) - if err != nil { - return nil, err - } - - // It is safe to concatenate the edits as they are non-overlapping - // (or identical, in which case they will be de-duped by Rename). - for uri, edits := range editMap { - allEdits[uri] = append(allEdits[uri], edits...) - } - - // Expand the search set? - for obj := range moreObjects { - objpath, err := objectpath.For(obj) - if err != nil { - continue // not exported - } - target := target{PackagePath(obj.Pkg().Path()), objpath} - targets[target] = true - - // TODO(adonovan): methods requires dynamic - // programming of the product targets x - // packages as any package might add a new - // target (from a foward dep) as a - // consequence, and any target might imply a - // new set of rdeps. See golang/go#58461. - } - } - - return allEdits, nil -} - -// renamePackageName renames package declarations, imports, and go.mod files. -func renamePackageName(ctx context.Context, s *cache.Snapshot, f file.Handle, newName PackageName) (map[protocol.DocumentURI][]diff.Edit, error) { - // Rename the package decl and all imports. - renamingEdits, err := renamePackage(ctx, s, f, newName) - if err != nil { - return nil, err - } - - // Update the last component of the file's enclosing directory. - oldBase := filepath.Dir(f.URI().Path()) - newPkgDir := filepath.Join(filepath.Dir(oldBase), string(newName)) - - // Update any affected replace directives in go.mod files. - // TODO(adonovan): extract into its own function. - // - // Get all workspace modules. - // TODO(adonovan): should this operate on all go.mod files, - // irrespective of whether they are included in the workspace? - modFiles := s.View().ModFiles() - for _, m := range modFiles { - fh, err := s.ReadFile(ctx, m) - if err != nil { - return nil, err - } - pm, err := s.ParseMod(ctx, fh) - if err != nil { - return nil, err - } - - modFileDir := filepath.Dir(pm.URI.Path()) - affectedReplaces := []*modfile.Replace{} - - // Check if any replace directives need to be fixed - for _, r := range pm.File.Replace { - if !strings.HasPrefix(r.New.Path, "/") && !strings.HasPrefix(r.New.Path, "./") && !strings.HasPrefix(r.New.Path, "../") { - continue - } - - replacedPath := r.New.Path - if strings.HasPrefix(r.New.Path, "./") || strings.HasPrefix(r.New.Path, "../") { - replacedPath = filepath.Join(modFileDir, r.New.Path) - } - - // TODO: Is there a risk of converting a '\' delimited replacement to a '/' delimited replacement? - if !strings.HasPrefix(filepath.ToSlash(replacedPath)+"/", filepath.ToSlash(oldBase)+"/") { - continue // not affected by the package renaming - } - - affectedReplaces = append(affectedReplaces, r) - } - - if len(affectedReplaces) == 0 { - continue - } - copied, err := modfile.Parse("", pm.Mapper.Content, nil) - if err != nil { - return nil, err - } - - for _, r := range affectedReplaces { - replacedPath := r.New.Path - if strings.HasPrefix(r.New.Path, "./") || strings.HasPrefix(r.New.Path, "../") { - replacedPath = filepath.Join(modFileDir, r.New.Path) - } - - suffix := strings.TrimPrefix(replacedPath, oldBase) - - newReplacedPath, err := filepath.Rel(modFileDir, newPkgDir+suffix) - if err != nil { - return nil, err - } - - newReplacedPath = filepath.ToSlash(newReplacedPath) - - if !strings.HasPrefix(newReplacedPath, "/") && !strings.HasPrefix(newReplacedPath, "../") { - newReplacedPath = "./" + newReplacedPath - } - - if err := copied.AddReplace(r.Old.Path, "", newReplacedPath, ""); err != nil { - return nil, err - } - } - - copied.Cleanup() - newContent, err := copied.Format() - if err != nil { - return nil, err - } - - // Calculate the edits to be made due to the change. - edits := diff.Bytes(pm.Mapper.Content, newContent) - renamingEdits[pm.URI] = append(renamingEdits[pm.URI], edits...) - } - - return renamingEdits, nil -} - -// renamePackage computes all workspace edits required to rename the package -// described by the given metadata, to newName, by renaming its package -// directory. -// -// It updates package clauses and import paths for the renamed package as well -// as any other packages affected by the directory renaming among all packages -// known to the snapshot. -func renamePackage(ctx context.Context, s *cache.Snapshot, f file.Handle, newName PackageName) (map[protocol.DocumentURI][]diff.Edit, error) { - if strings.HasSuffix(string(newName), "_test") { - return nil, fmt.Errorf("cannot rename to _test package") - } - - // We need metadata for the relevant package and module paths. - // These should be the same for all packages containing the file. - meta, err := NarrowestMetadataForFile(ctx, s, f.URI()) - if err != nil { - return nil, err - } - - oldPkgPath := meta.PkgPath - if meta.Module == nil { - return nil, fmt.Errorf("cannot rename package: missing module information for package %q", meta.PkgPath) - } - modulePath := PackagePath(meta.Module.Path) - if modulePath == oldPkgPath { - return nil, fmt.Errorf("cannot rename package: module path %q is the same as the package path, so renaming the package directory would have no effect", modulePath) - } - - newPathPrefix := path.Join(path.Dir(string(oldPkgPath)), string(newName)) - - // We must inspect all packages, not just direct importers, - // because we also rename subpackages, which may be unrelated. - // (If the renamed package imports a subpackage it may require - // edits to both its package and import decls.) - allMetadata, err := s.AllMetadata(ctx) - if err != nil { - return nil, err - } - - // Rename package and import declarations in all relevant packages. - edits := make(map[protocol.DocumentURI][]diff.Edit) - for _, mp := range allMetadata { - // Special case: x_test packages for the renamed package will not have the - // package path as a dir prefix, but still need their package clauses - // renamed. - if mp.PkgPath == oldPkgPath+"_test" { - if err := renamePackageClause(ctx, mp, s, newName+"_test", edits); err != nil { - return nil, err - } - continue - } - - // Subtle: check this condition before checking for valid module info - // below, because we should not fail this operation if unrelated packages - // lack module info. - if !strings.HasPrefix(string(mp.PkgPath)+"/", string(oldPkgPath)+"/") { - continue // not affected by the package renaming - } - - if mp.Module == nil { - // This check will always fail under Bazel. - return nil, fmt.Errorf("cannot rename package: missing module information for package %q", mp.PkgPath) - } - - if modulePath != PackagePath(mp.Module.Path) { - continue // don't edit imports if nested package and renaming package have different module paths - } - - // Renaming a package consists of changing its import path and package name. - suffix := strings.TrimPrefix(string(mp.PkgPath), string(oldPkgPath)) - newPath := newPathPrefix + suffix - - pkgName := mp.Name - if mp.PkgPath == oldPkgPath { - pkgName = newName - - if err := renamePackageClause(ctx, mp, s, newName, edits); err != nil { - return nil, err - } - } - - imp := ImportPath(newPath) // TODO(adonovan): what if newPath has vendor/ prefix? - if err := renameImports(ctx, s, mp, imp, pkgName, edits); err != nil { - return nil, err - } - } - - return edits, nil -} - -// renamePackageClause computes edits renaming the package clause of files in -// the package described by the given metadata, to newName. -// -// Edits are written into the edits map. -func renamePackageClause(ctx context.Context, mp *metadata.Package, snapshot *cache.Snapshot, newName PackageName, edits map[protocol.DocumentURI][]diff.Edit) error { - // Rename internal references to the package in the renaming package. - for _, uri := range mp.CompiledGoFiles { - fh, err := snapshot.ReadFile(ctx, uri) - if err != nil { - return err - } - f, err := snapshot.ParseGo(ctx, fh, ParseHeader) - if err != nil { - return err - } - if f.File.Name == nil { - continue // no package declaration - } - - edit, err := posEdit(f.Tok, f.File.Name.Pos(), f.File.Name.End(), string(newName)) - if err != nil { - return err - } - edits[f.URI] = append(edits[f.URI], edit) - } - - return nil -} - -// renameImports computes the set of edits to imports resulting from renaming -// the package described by the given metadata, to a package with import path -// newPath and name newName. -// -// Edits are written into the edits map. -func renameImports(ctx context.Context, snapshot *cache.Snapshot, mp *metadata.Package, newPath ImportPath, newName PackageName, allEdits map[protocol.DocumentURI][]diff.Edit) error { - rdeps, err := snapshot.ReverseDependencies(ctx, mp.ID, false) // find direct importers - if err != nil { - return err - } - - // Pass 1: rename import paths in import declarations. - needsTypeCheck := make(map[PackageID][]protocol.DocumentURI) - for _, rdep := range rdeps { - if rdep.IsIntermediateTestVariant() { - continue // for renaming, these variants are redundant - } - - for _, uri := range rdep.CompiledGoFiles { - fh, err := snapshot.ReadFile(ctx, uri) - if err != nil { - return err - } - f, err := snapshot.ParseGo(ctx, fh, ParseHeader) - if err != nil { - return err - } - if f.File.Name == nil { - continue // no package declaration - } - for _, imp := range f.File.Imports { - if rdep.DepsByImpPath[metadata.UnquoteImportPath(imp)] != mp.ID { - continue // not the import we're looking for - } - - // If the import does not explicitly specify - // a local name, then we need to invoke the - // type checker to locate references to update. - // - // TODO(adonovan): is this actually true? - // Renaming an import with a local name can still - // cause conflicts: shadowing of built-ins, or of - // package-level decls in the same or another file. - if imp.Name == nil { - needsTypeCheck[rdep.ID] = append(needsTypeCheck[rdep.ID], uri) - } - - // Create text edit for the import path (string literal). - edit, err := posEdit(f.Tok, imp.Path.Pos(), imp.Path.End(), strconv.Quote(string(newPath))) - if err != nil { - return err - } - allEdits[uri] = append(allEdits[uri], edit) - } - } - } - - // If the imported package's name hasn't changed, - // we don't need to rename references within each file. - if newName == mp.Name { - return nil - } - - // Pass 2: rename local name (types.PkgName) of imported - // package throughout one or more files of the package. - ids := make([]PackageID, 0, len(needsTypeCheck)) - for id := range needsTypeCheck { - ids = append(ids, id) - } - pkgs, err := snapshot.TypeCheck(ctx, ids...) - if err != nil { - return err - } - for i, id := range ids { - pkg := pkgs[i] - for _, uri := range needsTypeCheck[id] { - f, err := pkg.File(uri) - if err != nil { - return err - } - for _, imp := range f.File.Imports { - if imp.Name != nil { - continue // has explicit local name - } - if rdeps[id].DepsByImpPath[metadata.UnquoteImportPath(imp)] != mp.ID { - continue // not the import we're looking for - } - - pkgname := pkg.GetTypesInfo().Implicits[imp].(*types.PkgName) - - pkgScope := pkg.GetTypes().Scope() - fileScope := pkg.GetTypesInfo().Scopes[f.File] - - localName := string(newName) - try := 0 - - // Keep trying with fresh names until one succeeds. - // - // TODO(adonovan): fix: this loop is not sufficient to choose a name - // that is guaranteed to be conflict-free; renameObj may still fail. - // So the retry loop should be around renameObj, and we shouldn't - // bother with scopes here. - for fileScope.Lookup(localName) != nil || pkgScope.Lookup(localName) != nil { - try++ - localName = fmt.Sprintf("%s%d", newName, try) - } - - // renameObj detects various conflicts, including: - // - new name conflicts with a package-level decl in this file; - // - new name hides a package-level decl in another file that - // is actually referenced in this file; - // - new name hides a built-in that is actually referenced - // in this file; - // - a reference in this file to the old package name would - // become shadowed by an intervening declaration that - // uses the new name. - // It returns the edits if no conflict was detected. - editMap, _, err := renameObjects(localName, pkg, pkgname) - if err != nil { - return err - } - - // If the chosen local package name matches the package's - // new name, delete the change that would have inserted - // an explicit local name, which is always the lexically - // first change. - if localName == string(newName) { - edits, ok := editMap[uri] - if !ok { - return fmt.Errorf("internal error: no changes for %s", uri) - } - diff.SortEdits(edits) - editMap[uri] = edits[1:] - } - for uri, edits := range editMap { - allEdits[uri] = append(allEdits[uri], edits...) - } - } - } - } - return nil -} - -// renameObjects computes the edits to the type-checked syntax package pkg -// required to rename a set of target objects to newName. -// -// It also returns the set of objects that were found (due to -// corresponding methods and embedded fields) to require renaming as a -// consequence of the requested renamings. -// -// It returns an error if the renaming would cause a conflict. -func renameObjects(newName string, pkg *cache.Package, targets ...types.Object) (map[protocol.DocumentURI][]diff.Edit, map[types.Object]bool, error) { - r := renamer{ - pkg: pkg, - objsToUpdate: make(map[types.Object]bool), - from: targets[0].Name(), - to: newName, - } - - // A renaming initiated at an interface method indicates the - // intention to rename abstract and concrete methods as needed - // to preserve assignability. - // TODO(adonovan): pull this into the caller. - for _, obj := range targets { - if obj, ok := obj.(*types.Func); ok { - recv := obj.Type().(*types.Signature).Recv() - if recv != nil && types.IsInterface(recv.Type().Underlying()) { - r.changeMethods = true - break - } - } - } - - // Check that the renaming of the identifier is ok. - for _, obj := range targets { - r.check(obj) - if len(r.conflicts) > 0 { - // Stop at first error. - return nil, nil, fmt.Errorf("%s", strings.Join(r.conflicts, "\n")) - } - } - - editMap, err := r.update() - if err != nil { - return nil, nil, err - } - - // Remove initial targets so that only 'consequences' remain. - for _, obj := range targets { - delete(r.objsToUpdate, obj) - } - return editMap, r.objsToUpdate, nil -} - -// Rename all references to the target objects. -func (r *renamer) update() (map[protocol.DocumentURI][]diff.Edit, error) { - result := make(map[protocol.DocumentURI][]diff.Edit) - - // shouldUpdate reports whether obj is one of (or an - // instantiation of one of) the target objects. - shouldUpdate := func(obj types.Object) bool { - return containsOrigin(r.objsToUpdate, obj) - } - - // Find all identifiers in the package that define or use a - // renamed object. We iterate over info as it is more efficient - // than calling ast.Inspect for each of r.pkg.CompiledGoFiles(). - type item struct { - node ast.Node // Ident, ImportSpec (obj=PkgName), or CaseClause (obj=Var) - obj types.Object - isDef bool - } - var items []item - info := r.pkg.GetTypesInfo() - for id, obj := range info.Uses { - if shouldUpdate(obj) { - items = append(items, item{id, obj, false}) - } - } - for id, obj := range info.Defs { - if shouldUpdate(obj) { - items = append(items, item{id, obj, true}) - } - } - for node, obj := range info.Implicits { - if shouldUpdate(obj) { - switch node.(type) { - case *ast.ImportSpec, *ast.CaseClause: - items = append(items, item{node, obj, true}) - } - } - } - sort.Slice(items, func(i, j int) bool { - return items[i].node.Pos() < items[j].node.Pos() - }) - - // Update each identifier, and its doc comment if it is a declaration. - for _, item := range items { - pgf, ok := enclosingFile(r.pkg, item.node.Pos()) - if !ok { - bug.Reportf("edit does not belong to syntax of package %q", r.pkg) - continue - } - - // Renaming a types.PkgName may result in the addition or removal of an identifier, - // so we deal with this separately. - if pkgName, ok := item.obj.(*types.PkgName); ok && item.isDef { - edit, err := r.updatePkgName(pgf, pkgName) - if err != nil { - return nil, err - } - result[pgf.URI] = append(result[pgf.URI], edit) - continue - } - - // Workaround the unfortunate lack of a Var object - // for x in "switch x := expr.(type) {}" by adjusting - // the case clause to the switch ident. - // This may result in duplicate edits, but we de-dup later. - if _, ok := item.node.(*ast.CaseClause); ok { - path, _ := astutil.PathEnclosingInterval(pgf.File, item.obj.Pos(), item.obj.Pos()) - item.node = path[0].(*ast.Ident) - } - - // Replace the identifier with r.to. - edit, err := posEdit(pgf.Tok, item.node.Pos(), item.node.End(), r.to) - if err != nil { - return nil, err - } - - result[pgf.URI] = append(result[pgf.URI], edit) - - if !item.isDef { // uses do not have doc comments to update. - continue - } - - doc := docComment(pgf, item.node.(*ast.Ident)) - if doc == nil { - continue - } - - // Perform the rename in doc comments declared in the original package. - // go/parser strips out \r\n returns from the comment text, so go - // line-by-line through the comment text to get the correct positions. - docRegexp := regexp.MustCompile(`\b` + r.from + `\b`) // valid identifier => valid regexp - for _, comment := range doc.List { - if isDirective(comment.Text) { - continue - } - // TODO(adonovan): why are we looping over lines? - // Just run the loop body once over the entire multiline comment. - lines := strings.Split(comment.Text, "\n") - tokFile := pgf.Tok - commentLine := safetoken.Line(tokFile, comment.Pos()) - uri := protocol.URIFromPath(tokFile.Name()) - for i, line := range lines { - lineStart := comment.Pos() - if i > 0 { - lineStart = tokFile.LineStart(commentLine + i) - } - for _, locs := range docRegexp.FindAllIndex([]byte(line), -1) { - edit, err := posEdit(tokFile, lineStart+token.Pos(locs[0]), lineStart+token.Pos(locs[1]), r.to) - if err != nil { - return nil, err // can't happen - } - result[uri] = append(result[uri], edit) - } - } - } - } - - docLinkEdits, err := r.updateCommentDocLinks() - if err != nil { - return nil, err - } - for uri, edits := range docLinkEdits { - result[uri] = append(result[uri], edits...) - } - - return result, nil -} - -// updateCommentDocLinks updates each doc comment in the package -// that refers to one of the renamed objects using a doc link -// (https://golang.org/doc/comment#doclinks) such as "[pkg.Type.Method]". -func (r *renamer) updateCommentDocLinks() (map[protocol.DocumentURI][]diff.Edit, error) { - result := make(map[protocol.DocumentURI][]diff.Edit) - var docRenamers []*docLinkRenamer - for obj := range r.objsToUpdate { - if _, ok := obj.(*types.PkgName); ok { - // The dot package name will not be referenced - if obj.Name() == "." { - continue - } - - docRenamers = append(docRenamers, &docLinkRenamer{ - isDep: false, - isPkgOrType: true, - file: r.pkg.FileSet().File(obj.Pos()), - regexp: docLinkPattern("", "", obj.Name(), true), - to: r.to, - }) - continue - } - if !obj.Exported() { - continue - } - recvName := "" - // Doc links can reference only exported package-level objects - // and methods of exported package-level named types. - if !isPackageLevel(obj) { - _, isFunc := obj.(*types.Func) - if !isFunc { - continue - } - recv := obj.Type().(*types.Signature).Recv() - if recv == nil { - continue - } - recvT := recv.Type() - if ptr, ok := recvT.(*types.Pointer); ok { - recvT = ptr.Elem() - } - named, isNamed := recvT.(*types.Named) - if !isNamed { - continue - } - // Doc links can't reference interface methods. - if types.IsInterface(named.Underlying()) { - continue - } - name := named.Origin().Obj() - if !name.Exported() || !isPackageLevel(name) { - continue - } - recvName = name.Name() - } - - // Qualify objects from other packages. - pkgName := "" - if r.pkg.GetTypes() != obj.Pkg() { - pkgName = obj.Pkg().Name() - } - _, isTypeName := obj.(*types.TypeName) - docRenamers = append(docRenamers, &docLinkRenamer{ - isDep: r.pkg.GetTypes() != obj.Pkg(), - isPkgOrType: isTypeName, - packagePath: obj.Pkg().Path(), - packageName: pkgName, - recvName: recvName, - objName: obj.Name(), - regexp: docLinkPattern(pkgName, recvName, obj.Name(), isTypeName), - to: r.to, - }) - } - for _, pgf := range r.pkg.CompiledGoFiles() { - for _, d := range docRenamers { - edits, err := d.update(pgf) - if err != nil { - return nil, err - } - if len(edits) > 0 { - result[pgf.URI] = append(result[pgf.URI], edits...) - } - } - } - return result, nil -} - -// docLinkPattern returns a regular expression that matches doclinks in comments. -// It has one submatch that indicates the symbol to be updated. -func docLinkPattern(pkgName, recvName, objName string, isPkgOrType bool) *regexp.Regexp { - // The doc link may contain a leading star, e.g. [*bytes.Buffer]. - pattern := `\[\*?` - if pkgName != "" { - pattern += pkgName + `\.` - } - if recvName != "" { - pattern += recvName + `\.` - } - // The first submatch is object name. - pattern += `(` + objName + `)` - // If the object is a *types.TypeName or *types.PkgName, also need - // match the objects referenced by them, so add `(\.\w+)*`. - if isPkgOrType { - pattern += `(?:\.\w+)*` - } - // There are two type of link in comments: - // 1. url link. e.g. [text]: url - // 2. doc link. e.g. [pkg.Name] - // in order to only match the doc link, add `([^:]|$)` in the end. - pattern += `\](?:[^:]|$)` - - return regexp.MustCompile(pattern) -} - -// A docLinkRenamer renames doc links of forms such as these: -// -// [Func] -// [pkg.Func] -// [RecvType.Method] -// [*Type] -// [*pkg.Type] -// [*pkg.RecvType.Method] -type docLinkRenamer struct { - isDep bool // object is from a dependency package - isPkgOrType bool // object is *types.PkgName or *types.TypeName - packagePath string - packageName string // e.g. "pkg" - recvName string // e.g. "RecvType" - objName string // e.g. "Func", "Type", "Method" - to string // new name - regexp *regexp.Regexp - - file *token.File // enclosing file, if renaming *types.PkgName -} - -// update updates doc links in the package level comments. -func (r *docLinkRenamer) update(pgf *parsego.File) (result []diff.Edit, err error) { - if r.file != nil && r.file != pgf.Tok { - return nil, nil - } - pattern := r.regexp - // If the object is in dependency package, - // the imported name in the file may be different from the original package name - if r.isDep { - for _, spec := range pgf.File.Imports { - importPath, _ := strconv.Unquote(spec.Path.Value) - if importPath == r.packagePath { - // Ignore blank imports - if spec.Name == nil || spec.Name.Name == "_" || spec.Name.Name == "." { - continue - } - if spec.Name.Name != r.packageName { - pattern = docLinkPattern(spec.Name.Name, r.recvName, r.objName, r.isPkgOrType) - } - break - } - } - } - - var edits []diff.Edit - updateDocLinks := func(doc *ast.CommentGroup) error { - if doc != nil { - for _, c := range doc.List { - for _, locs := range pattern.FindAllStringSubmatchIndex(c.Text, -1) { - // The first submatch is the object name, so the locs[2:4] is the index of object name. - edit, err := posEdit(pgf.Tok, c.Pos()+token.Pos(locs[2]), c.Pos()+token.Pos(locs[3]), r.to) - if err != nil { - return err - } - edits = append(edits, edit) - } - } - } - return nil - } - - // Update package doc comments. - err = updateDocLinks(pgf.File.Doc) - if err != nil { - return nil, err - } - for _, decl := range pgf.File.Decls { - var doc *ast.CommentGroup - switch decl := decl.(type) { - case *ast.GenDecl: - doc = decl.Doc - case *ast.FuncDecl: - doc = decl.Doc - } - err = updateDocLinks(doc) - if err != nil { - return nil, err - } - } - return edits, nil -} - -// docComment returns the doc for an identifier within the specified file. -func docComment(pgf *ParsedGoFile, id *ast.Ident) *ast.CommentGroup { - nodes, _ := astutil.PathEnclosingInterval(pgf.File, id.Pos(), id.End()) - for _, node := range nodes { - switch decl := node.(type) { - case *ast.FuncDecl: - return decl.Doc - case *ast.Field: - return decl.Doc - case *ast.GenDecl: - return decl.Doc - // For {Type,Value}Spec, if the doc on the spec is absent, - // search for the enclosing GenDecl - case *ast.TypeSpec: - if decl.Doc != nil { - return decl.Doc - } - case *ast.ValueSpec: - if decl.Doc != nil { - return decl.Doc - } - case *ast.Ident: - case *ast.AssignStmt: - // *ast.AssignStmt doesn't have an associated comment group. - // So, we try to find a comment just before the identifier. - - // Try to find a comment group only for short variable declarations (:=). - if decl.Tok != token.DEFINE { - return nil - } - - identLine := safetoken.Line(pgf.Tok, id.Pos()) - for _, comment := range nodes[len(nodes)-1].(*ast.File).Comments { - if comment.Pos() > id.Pos() { - // Comment is after the identifier. - continue - } - - lastCommentLine := safetoken.Line(pgf.Tok, comment.End()) - if lastCommentLine+1 == identLine { - return comment - } - } - default: - return nil - } - } - return nil -} - -// updatePkgName returns the updates to rename a pkgName in the import spec by -// only modifying the package name portion of the import declaration. -func (r *renamer) updatePkgName(pgf *ParsedGoFile, pkgName *types.PkgName) (diff.Edit, error) { - // Modify ImportSpec syntax to add or remove the Name as needed. - path, _ := astutil.PathEnclosingInterval(pgf.File, pkgName.Pos(), pkgName.Pos()) - if len(path) < 2 { - return diff.Edit{}, fmt.Errorf("no path enclosing interval for %s", pkgName.Name()) - } - spec, ok := path[1].(*ast.ImportSpec) - if !ok { - return diff.Edit{}, fmt.Errorf("failed to update PkgName for %s", pkgName.Name()) - } - - newText := "" - if pkgName.Imported().Name() != r.to { - newText = r.to + " " - } - - // Replace the portion (possibly empty) of the spec before the path: - // local "path" or "path" - // -> <- -><- - return posEdit(pgf.Tok, spec.Pos(), spec.Path.Pos(), newText) -} - -// parsePackageNameDecl is a convenience function that parses and -// returns the package name declaration of file fh, and reports -// whether the position ppos lies within it. -// -// Note: also used by references. -func parsePackageNameDecl(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, ppos protocol.Position) (*ParsedGoFile, bool, error) { - pgf, err := snapshot.ParseGo(ctx, fh, ParseHeader) - if err != nil { - return nil, false, err - } - // Careful: because we used ParseHeader, - // pgf.Pos(ppos) may be beyond EOF => (0, err). - pos, _ := pgf.PositionPos(ppos) - return pgf, pgf.File.Name.Pos() <= pos && pos <= pgf.File.Name.End(), nil -} - -// enclosingFile returns the CompiledGoFile of pkg that contains the specified position. -func enclosingFile(pkg *cache.Package, pos token.Pos) (*ParsedGoFile, bool) { - for _, pgf := range pkg.CompiledGoFiles() { - if pgf.File.Pos() <= pos && pos <= pgf.File.End() { - return pgf, true - } - } - return nil, false -} - -// posEdit returns an edit to replace the (start, end) range of tf with 'new'. -func posEdit(tf *token.File, start, end token.Pos, new string) (diff.Edit, error) { - startOffset, endOffset, err := safetoken.Offsets(tf, start, end) - if err != nil { - return diff.Edit{}, err - } - return diff.Edit{Start: startOffset, End: endOffset, New: new}, nil -} diff --git a/internal/golangorgx/gopls/golang/rename_check.go b/internal/golangorgx/gopls/golang/rename_check.go deleted file mode 100644 index 4264b561a7b..00000000000 --- a/internal/golangorgx/gopls/golang/rename_check.go +++ /dev/null @@ -1,957 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. -// -// Taken from golang.org/x/tools/refactor/rename. - -package golang - -// This file defines the conflict-checking portion of the rename operation. -// -// The renamer works on a single package of type-checked syntax, and -// is called in parallel for all necessary packages in the workspace, -// possibly up to the transitive reverse dependencies of the -// declaration. Finally the union of all edits and errors is computed. -// -// Renaming one object may entail renaming of others. For example: -// -// - An embedded field couples a Var (field) and a TypeName. -// So, renaming either one requires renaming the other. -// If the initial object is an embedded field, we must add its -// TypeName (and its enclosing package) to the renaming set; -// this is easily discovered at the outset. -// -// Conversely, if the initial object is a TypeName, we must observe -// whether any of its references (from directly importing packages) -// is coincident with an embedded field Var and, if so, initiate a -// renaming of it. -// -// - A method of an interface type is coupled to all corresponding -// methods of types that are assigned to the interface (as -// discovered by the 'satisfy' pass). As a matter of usability, we -// require that such renamings be initiated from the interface -// method, not the concrete method. - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - "path/filepath" - "reflect" - "strings" - "unicode" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/util/safetoken" - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/refactor/satisfy" -) - -// errorf reports an error (e.g. conflict) and prevents file modification. -func (r *renamer) errorf(pos token.Pos, format string, args ...interface{}) { - // Conflict error messages in the old gorename tool (whence this - // logic originated) contain rich information associated with - // multiple source lines, such as: - // - // p/a.go:1:2: renaming "x" to "y" here - // p/b.go:3:4: \t would cause this reference to "y" - // p/c.go:5:5: \t to become shadowed by this intervening declaration. - // - // Unfortunately LSP provides no means to transmit the - // structure of this error, so we format the positions briefly - // using dir/file.go where dir is the base name of the parent - // directory. - - var conflict strings.Builder - - // Add prefix of (truncated) position. - if pos != token.NoPos { - // TODO(adonovan): skip position of first error if it is - // on the same line as the renaming itself. - posn := safetoken.StartPosition(r.pkg.FileSet(), pos).String() - segments := strings.Split(filepath.ToSlash(posn), "/") - if n := len(segments); n > 2 { - segments = segments[n-2:] - } - posn = strings.Join(segments, "/") - fmt.Fprintf(&conflict, "%s:", posn) - - if !strings.HasPrefix(format, "\t") { - conflict.WriteByte(' ') - } - } - - fmt.Fprintf(&conflict, format, args...) - r.conflicts = append(r.conflicts, conflict.String()) -} - -// check performs safety checks of the renaming of the 'from' object to r.to. -func (r *renamer) check(from types.Object) { - if r.objsToUpdate[from] { - return - } - r.objsToUpdate[from] = true - - // NB: order of conditions is important. - if from_, ok := from.(*types.PkgName); ok { - r.checkInFileBlock(from_) - } else if from_, ok := from.(*types.Label); ok { - r.checkLabel(from_) - } else if isPackageLevel(from) { - r.checkInPackageBlock(from) - } else if v, ok := from.(*types.Var); ok && v.IsField() { - r.checkStructField(v) - } else if f, ok := from.(*types.Func); ok && recv(f) != nil { - r.checkMethod(f) - } else if isLocal(from) { - r.checkInLexicalScope(from) - } else { - r.errorf(from.Pos(), "unexpected %s object %q (please report a bug)\n", - objectKind(from), from) - } -} - -// checkInFileBlock performs safety checks for renames of objects in the file block, -// i.e. imported package names. -func (r *renamer) checkInFileBlock(from *types.PkgName) { - // Check import name is not "init". - if r.to == "init" { - r.errorf(from.Pos(), "%q is not a valid imported package name", r.to) - } - - // Check for conflicts between file and package block. - if prev := from.Pkg().Scope().Lookup(r.to); prev != nil { - r.errorf(from.Pos(), "renaming this %s %q to %q would conflict", - objectKind(from), from.Name(), r.to) - r.errorf(prev.Pos(), "\twith this package member %s", - objectKind(prev)) - return // since checkInPackageBlock would report redundant errors - } - - // Check for conflicts in lexical scope. - r.checkInLexicalScope(from) -} - -// checkInPackageBlock performs safety checks for renames of -// func/var/const/type objects in the package block. -func (r *renamer) checkInPackageBlock(from types.Object) { - // Check that there are no references to the name from another - // package if the renaming would make it unexported. - if typ := r.pkg.GetTypes(); typ != from.Pkg() && ast.IsExported(r.from) && !ast.IsExported(r.to) { - if id := someUse(r.pkg.GetTypesInfo(), from); id != nil { - r.checkExport(id, typ, from) - } - } - - // Check that in the package block, "init" is a function, and never referenced. - if r.to == "init" { - kind := objectKind(from) - if kind == "func" { - // Reject if intra-package references to it exist. - for id, obj := range r.pkg.GetTypesInfo().Uses { - if obj == from { - r.errorf(from.Pos(), - "renaming this func %q to %q would make it a package initializer", - from.Name(), r.to) - r.errorf(id.Pos(), "\tbut references to it exist") - break - } - } - } else { - r.errorf(from.Pos(), "you cannot have a %s at package level named %q", - kind, r.to) - } - } - - // Check for conflicts between package block and all file blocks. - for _, f := range r.pkg.GetSyntax() { - fileScope := r.pkg.GetTypesInfo().Scopes[f] - b, prev := fileScope.LookupParent(r.to, token.NoPos) - if b == fileScope { - r.errorf(from.Pos(), "renaming this %s %q to %q would conflict", objectKind(from), from.Name(), r.to) - var prevPos token.Pos - if prev != nil { - prevPos = prev.Pos() - } - r.errorf(prevPos, "\twith this %s", objectKind(prev)) - return // since checkInPackageBlock would report redundant errors - } - } - - // Check for conflicts in lexical scope. - r.checkInLexicalScope(from) -} - -// checkInLexicalScope performs safety checks that a renaming does not -// change the lexical reference structure of the specified package. -// -// For objects in lexical scope, there are three kinds of conflicts: -// same-, sub-, and super-block conflicts. We will illustrate all three -// using this example: -// -// var x int -// var z int -// -// func f(y int) { -// print(x) -// print(y) -// } -// -// Renaming x to z encounters a "same-block conflict", because an object -// with the new name already exists, defined in the same lexical block -// as the old object. -// -// Renaming x to y encounters a "sub-block conflict", because there exists -// a reference to x from within (what would become) a hole in its scope. -// The definition of y in an (inner) sub-block would cast a shadow in -// the scope of the renamed variable. -// -// Renaming y to x encounters a "super-block conflict". This is the -// converse situation: there is an existing definition of the new name -// (x) in an (enclosing) super-block, and the renaming would create a -// hole in its scope, within which there exist references to it. The -// new name shadows the existing definition of x in the super-block. -// -// Removing the old name (and all references to it) is always safe, and -// requires no checks. -func (r *renamer) checkInLexicalScope(from types.Object) { - b := from.Parent() // the block defining the 'from' object - if b != nil { - toBlock, to := b.LookupParent(r.to, from.Parent().End()) - if toBlock == b { - // same-block conflict - r.errorf(from.Pos(), "renaming this %s %q to %q", - objectKind(from), from.Name(), r.to) - r.errorf(to.Pos(), "\tconflicts with %s in same block", - objectKind(to)) - return - } else if toBlock != nil { - // Check for super-block conflict. - // The name r.to is defined in a superblock. - // Is that name referenced from within this block? - forEachLexicalRef(r.pkg, to, func(id *ast.Ident, block *types.Scope) bool { - _, obj := block.LookupParent(from.Name(), id.Pos()) - if obj == from { - // super-block conflict - r.errorf(from.Pos(), "renaming this %s %q to %q", - objectKind(from), from.Name(), r.to) - r.errorf(id.Pos(), "\twould shadow this reference") - r.errorf(to.Pos(), "\tto the %s declared here", - objectKind(to)) - return false // stop - } - return true - }) - } - } - // Check for sub-block conflict. - // Is there an intervening definition of r.to between - // the block defining 'from' and some reference to it? - forEachLexicalRef(r.pkg, from, func(id *ast.Ident, block *types.Scope) bool { - // Find the block that defines the found reference. - // It may be an ancestor. - fromBlock, _ := block.LookupParent(from.Name(), id.Pos()) - // See what r.to would resolve to in the same scope. - toBlock, to := block.LookupParent(r.to, id.Pos()) - if to != nil { - // sub-block conflict - if deeper(toBlock, fromBlock) { - r.errorf(from.Pos(), "renaming this %s %q to %q", - objectKind(from), from.Name(), r.to) - r.errorf(id.Pos(), "\twould cause this reference to become shadowed") - r.errorf(to.Pos(), "\tby this intervening %s definition", - objectKind(to)) - return false // stop - } - } - return true - }) - - // Renaming a type that is used as an embedded field - // requires renaming the field too. e.g. - // type T int // if we rename this to U.. - // var s struct {T} - // print(s.T) // ...this must change too - if _, ok := from.(*types.TypeName); ok { - for id, obj := range r.pkg.GetTypesInfo().Uses { - if obj == from { - if field := r.pkg.GetTypesInfo().Defs[id]; field != nil { - r.check(field) - } - } - } - } -} - -// deeper reports whether block x is lexically deeper than y. -func deeper(x, y *types.Scope) bool { - if x == y || x == nil { - return false - } else if y == nil { - return true - } else { - return deeper(x.Parent(), y.Parent()) - } -} - -// Scope and Position -// -// Consider a function f declared as: -// -// func f[T *U, U *T](p, q T) (r, s U) { var ( v T; w = v ); type (t *t; u t) } -// ^ ^ ^ ^ ^ ^ -/// {T,U} {p,q,r,s} v w t u -// -// All objects {T, U, p, q, r, s, local} belong to the same lexical -// block, the function scope, which is found in types.Info.Scopes -// for f's FuncType. (A function body's BlockStmt does not have -// an associated scope; only nested BlockStmts do.) -// -// The effective scope of each object is different: -// -// - The type parameters T and U, whose constraints may refer to each -// other, all have a scope that starts at the beginning of the -// FuncDecl.Type.Func token. -// -// - The parameter and result variables {p,q,r,s} can reference the -// type parameters but not each other, so their scopes all start at -// the end of the FuncType. -// (Prior to go1.22 it was--incorrectly--unset; see #64295). -// Beware also that Scope.Innermost does not currently work correctly for -// type parameters: it returns the scope of the package, not the function. -// -// - Each const or var {v,w} declared within the function body has a -// scope that begins at the end of its ValueSpec, or after the -// AssignStmt for a var declared by ":=". -// -// - Each type {t,u} in the body has a scope that that begins at -// the start of the TypeSpec, so they can be self-recursive -// but--unlike package-level types--not mutually recursive. - -// forEachLexicalRef calls fn(id, block) for each identifier id in package -// pkg that is a reference to obj in lexical scope. block is the -// lexical block enclosing the reference. If fn returns false the -// iteration is terminated and findLexicalRefs returns false. -func forEachLexicalRef(pkg *cache.Package, obj types.Object, fn func(id *ast.Ident, block *types.Scope) bool) bool { - ok := true - var stack []ast.Node - - var visit func(n ast.Node) bool - visit = func(n ast.Node) bool { - if n == nil { - stack = stack[:len(stack)-1] // pop - return false - } - if !ok { - return false // bail out - } - - stack = append(stack, n) // push - switch n := n.(type) { - case *ast.Ident: - if pkg.GetTypesInfo().Uses[n] == obj { - block := enclosingBlock(pkg.GetTypesInfo(), stack) - if !fn(n, block) { - ok = false - } - } - return visit(nil) // pop stack - - case *ast.SelectorExpr: - // don't visit n.Sel - ast.Inspect(n.X, visit) - return visit(nil) // pop stack, don't descend - - case *ast.CompositeLit: - // Handle recursion ourselves for struct literals - // so we don't visit field identifiers. - tv, ok := pkg.GetTypesInfo().Types[n] - if !ok { - return visit(nil) // pop stack, don't descend - } - // TODO(adonovan): fix: for generics, should be T.core not T.underlying. - if _, ok := Deref(tv.Type).Underlying().(*types.Struct); ok { - if n.Type != nil { - ast.Inspect(n.Type, visit) - } - for _, elt := range n.Elts { - if kv, ok := elt.(*ast.KeyValueExpr); ok { - ast.Inspect(kv.Value, visit) - } else { - ast.Inspect(elt, visit) - } - } - return visit(nil) // pop stack, don't descend - } - } - return true - } - - for _, f := range pkg.GetSyntax() { - ast.Inspect(f, visit) - if len(stack) != 0 { - panic(stack) - } - if !ok { - break - } - } - return ok -} - -// enclosingBlock returns the innermost block logically enclosing the -// specified AST node (an ast.Ident), specified in the form of a path -// from the root of the file, [file...n]. -func enclosingBlock(info *types.Info, stack []ast.Node) *types.Scope { - for i := range stack { - n := stack[len(stack)-1-i] - // For some reason, go/types always associates a - // function's scope with its FuncType. - // See comments about scope above. - switch f := n.(type) { - case *ast.FuncDecl: - n = f.Type - case *ast.FuncLit: - n = f.Type - } - if b := info.Scopes[n]; b != nil { - return b - } - } - panic("no Scope for *ast.File") -} - -func (r *renamer) checkLabel(label *types.Label) { - // Check there are no identical labels in the function's label block. - // (Label blocks don't nest, so this is easy.) - if prev := label.Parent().Lookup(r.to); prev != nil { - r.errorf(label.Pos(), "renaming this label %q to %q", label.Name(), prev.Name()) - r.errorf(prev.Pos(), "\twould conflict with this one") - } -} - -// checkStructField checks that the field renaming will not cause -// conflicts at its declaration, or ambiguity or changes to any selection. -func (r *renamer) checkStructField(from *types.Var) { - - // If this is the declaring package, check that the struct - // declaration is free of field conflicts, and field/method - // conflicts. - // - // go/types offers no easy way to get from a field (or interface - // method) to its declaring struct (or interface), so we must - // ascend the AST. - if pgf, ok := enclosingFile(r.pkg, from.Pos()); ok { - path, _ := astutil.PathEnclosingInterval(pgf.File, from.Pos(), from.Pos()) - // path matches this pattern: - // [Ident SelectorExpr? StarExpr? Field FieldList StructType ParenExpr* ... File] - - // Ascend to FieldList. - var i int - for { - if _, ok := path[i].(*ast.FieldList); ok { - break - } - i++ - } - i++ - tStruct := path[i].(*ast.StructType) - i++ - // Ascend past parens (unlikely). - for { - _, ok := path[i].(*ast.ParenExpr) - if !ok { - break - } - i++ - } - if spec, ok := path[i].(*ast.TypeSpec); ok { - // This struct is also a named type. - // We must check for direct (non-promoted) field/field - // and method/field conflicts. - named := r.pkg.GetTypesInfo().Defs[spec.Name].Type() - prev, indices, _ := types.LookupFieldOrMethod(named, true, r.pkg.GetTypes(), r.to) - if len(indices) == 1 { - r.errorf(from.Pos(), "renaming this field %q to %q", - from.Name(), r.to) - r.errorf(prev.Pos(), "\twould conflict with this %s", - objectKind(prev)) - return // skip checkSelections to avoid redundant errors - } - } else { - // This struct is not a named type. - // We need only check for direct (non-promoted) field/field conflicts. - T := r.pkg.GetTypesInfo().Types[tStruct].Type.Underlying().(*types.Struct) - for i := 0; i < T.NumFields(); i++ { - if prev := T.Field(i); prev.Name() == r.to { - r.errorf(from.Pos(), "renaming this field %q to %q", - from.Name(), r.to) - r.errorf(prev.Pos(), "\twould conflict with this field") - return // skip checkSelections to avoid redundant errors - } - } - } - } - - // Renaming an anonymous field requires renaming the type too. e.g. - // print(s.T) // if we rename T to U, - // type T int // this and - // var s struct {T} // this must change too. - if from.Anonymous() { - if named, ok := from.Type().(*types.Named); ok { - r.check(named.Obj()) - } else if named, ok := Deref(from.Type()).(*types.Named); ok { - r.check(named.Obj()) - } - } - - // Check integrity of existing (field and method) selections. - r.checkSelections(from) -} - -// checkSelections checks that all uses and selections that resolve to -// the specified object would continue to do so after the renaming. -func (r *renamer) checkSelections(from types.Object) { - pkg := r.pkg - typ := pkg.GetTypes() - { - if id := someUse(pkg.GetTypesInfo(), from); id != nil { - if !r.checkExport(id, typ, from) { - return - } - } - - for syntax, sel := range pkg.GetTypesInfo().Selections { - // There may be extant selections of only the old - // name or only the new name, so we must check both. - // (If neither, the renaming is sound.) - // - // In both cases, we wish to compare the lengths - // of the implicit field path (Selection.Index) - // to see if the renaming would change it. - // - // If a selection that resolves to 'from', when renamed, - // would yield a path of the same or shorter length, - // this indicates ambiguity or a changed referent, - // analogous to same- or sub-block lexical conflict. - // - // If a selection using the name 'to' would - // yield a path of the same or shorter length, - // this indicates ambiguity or shadowing, - // analogous to same- or super-block lexical conflict. - - // TODO(adonovan): fix: derive from Types[syntax.X].Mode - // TODO(adonovan): test with pointer, value, addressable value. - isAddressable := true - - if sel.Obj() == from { - if obj, indices, _ := types.LookupFieldOrMethod(sel.Recv(), isAddressable, from.Pkg(), r.to); obj != nil { - // Renaming this existing selection of - // 'from' may block access to an existing - // type member named 'to'. - delta := len(indices) - len(sel.Index()) - if delta > 0 { - continue // no ambiguity - } - r.selectionConflict(from, delta, syntax, obj) - return - } - } else if sel.Obj().Name() == r.to { - if obj, indices, _ := types.LookupFieldOrMethod(sel.Recv(), isAddressable, from.Pkg(), from.Name()); obj == from { - // Renaming 'from' may cause this existing - // selection of the name 'to' to change - // its meaning. - delta := len(indices) - len(sel.Index()) - if delta > 0 { - continue // no ambiguity - } - r.selectionConflict(from, -delta, syntax, sel.Obj()) - return - } - } - } - } -} - -func (r *renamer) selectionConflict(from types.Object, delta int, syntax *ast.SelectorExpr, obj types.Object) { - r.errorf(from.Pos(), "renaming this %s %q to %q", - objectKind(from), from.Name(), r.to) - - switch { - case delta < 0: - // analogous to sub-block conflict - r.errorf(syntax.Sel.Pos(), - "\twould change the referent of this selection") - r.errorf(obj.Pos(), "\tof this %s", objectKind(obj)) - case delta == 0: - // analogous to same-block conflict - r.errorf(syntax.Sel.Pos(), - "\twould make this reference ambiguous") - r.errorf(obj.Pos(), "\twith this %s", objectKind(obj)) - case delta > 0: - // analogous to super-block conflict - r.errorf(syntax.Sel.Pos(), - "\twould shadow this selection") - r.errorf(obj.Pos(), "\tof the %s declared here", - objectKind(obj)) - } -} - -// checkMethod performs safety checks for renaming a method. -// There are three hazards: -// - declaration conflicts -// - selection ambiguity/changes -// - entailed renamings of assignable concrete/interface types. -// -// We reject renamings initiated at concrete methods if it would -// change the assignability relation. For renamings of abstract -// methods, we rename all methods transitively coupled to it via -// assignability. -func (r *renamer) checkMethod(from *types.Func) { - // e.g. error.Error - if from.Pkg() == nil { - r.errorf(from.Pos(), "you cannot rename built-in method %s", from) - return - } - - // ASSIGNABILITY: We reject renamings of concrete methods that - // would break a 'satisfy' constraint; but renamings of abstract - // methods are allowed to proceed, and we rename affected - // concrete and abstract methods as necessary. It is the - // initial method that determines the policy. - - // Check for conflict at point of declaration. - // Check to ensure preservation of assignability requirements. - R := recv(from).Type() - if types.IsInterface(R) { - // Abstract method - - // declaration - prev, _, _ := types.LookupFieldOrMethod(R, false, from.Pkg(), r.to) - if prev != nil { - r.errorf(from.Pos(), "renaming this interface method %q to %q", - from.Name(), r.to) - r.errorf(prev.Pos(), "\twould conflict with this method") - return - } - - // Check all interfaces that embed this one for - // declaration conflicts too. - { - // Start with named interface types (better errors) - for _, obj := range r.pkg.GetTypesInfo().Defs { - if obj, ok := obj.(*types.TypeName); ok && types.IsInterface(obj.Type()) { - f, _, _ := types.LookupFieldOrMethod( - obj.Type(), false, from.Pkg(), from.Name()) - if f == nil { - continue - } - t, _, _ := types.LookupFieldOrMethod( - obj.Type(), false, from.Pkg(), r.to) - if t == nil { - continue - } - r.errorf(from.Pos(), "renaming this interface method %q to %q", - from.Name(), r.to) - r.errorf(t.Pos(), "\twould conflict with this method") - r.errorf(obj.Pos(), "\tin named interface type %q", obj.Name()) - } - } - - // Now look at all literal interface types (includes named ones again). - for e, tv := range r.pkg.GetTypesInfo().Types { - if e, ok := e.(*ast.InterfaceType); ok { - _ = e - _ = tv.Type.(*types.Interface) - // TODO(adonovan): implement same check as above. - } - } - } - - // assignability - // - // Find the set of concrete or abstract methods directly - // coupled to abstract method 'from' by some - // satisfy.Constraint, and rename them too. - for key := range r.satisfy() { - // key = (lhs, rhs) where lhs is always an interface. - - lsel := r.msets.MethodSet(key.LHS).Lookup(from.Pkg(), from.Name()) - if lsel == nil { - continue - } - rmethods := r.msets.MethodSet(key.RHS) - rsel := rmethods.Lookup(from.Pkg(), from.Name()) - if rsel == nil { - continue - } - - // If both sides have a method of this name, - // and one of them is m, the other must be coupled. - var coupled *types.Func - switch from { - case lsel.Obj(): - coupled = rsel.Obj().(*types.Func) - case rsel.Obj(): - coupled = lsel.Obj().(*types.Func) - default: - continue - } - - // We must treat concrete-to-interface - // constraints like an implicit selection C.f of - // each interface method I.f, and check that the - // renaming leaves the selection unchanged and - // unambiguous. - // - // Fun fact: the implicit selection of C.f - // type I interface{f()} - // type C struct{I} - // func (C) g() - // var _ I = C{} // here - // yields abstract method I.f. This can make error - // messages less than obvious. - // - if !types.IsInterface(key.RHS) { - // The logic below was derived from checkSelections. - - rtosel := rmethods.Lookup(from.Pkg(), r.to) - if rtosel != nil { - rto := rtosel.Obj().(*types.Func) - delta := len(rsel.Index()) - len(rtosel.Index()) - if delta < 0 { - continue // no ambiguity - } - - // TODO(adonovan): record the constraint's position. - keyPos := token.NoPos - - r.errorf(from.Pos(), "renaming this method %q to %q", - from.Name(), r.to) - if delta == 0 { - // analogous to same-block conflict - r.errorf(keyPos, "\twould make the %s method of %s invoked via interface %s ambiguous", - r.to, key.RHS, key.LHS) - r.errorf(rto.Pos(), "\twith (%s).%s", - recv(rto).Type(), r.to) - } else { - // analogous to super-block conflict - r.errorf(keyPos, "\twould change the %s method of %s invoked via interface %s", - r.to, key.RHS, key.LHS) - r.errorf(coupled.Pos(), "\tfrom (%s).%s", - recv(coupled).Type(), r.to) - r.errorf(rto.Pos(), "\tto (%s).%s", - recv(rto).Type(), r.to) - } - return // one error is enough - } - } - - if !r.changeMethods { - // This should be unreachable. - r.errorf(from.Pos(), "internal error: during renaming of abstract method %s", from) - r.errorf(coupled.Pos(), "\tchangedMethods=false, coupled method=%s", coupled) - r.errorf(from.Pos(), "\tPlease file a bug report") - return - } - - // Rename the coupled method to preserve assignability. - r.check(coupled) - } - } else { - // Concrete method - - // declaration - prev, indices, _ := types.LookupFieldOrMethod(R, true, from.Pkg(), r.to) - if prev != nil && len(indices) == 1 { - r.errorf(from.Pos(), "renaming this method %q to %q", - from.Name(), r.to) - r.errorf(prev.Pos(), "\twould conflict with this %s", - objectKind(prev)) - return - } - - // assignability - // - // Find the set of abstract methods coupled to concrete - // method 'from' by some satisfy.Constraint, and rename - // them too. - // - // Coupling may be indirect, e.g. I.f <-> C.f via type D. - // - // type I interface {f()} - // type C int - // type (C) f() - // type D struct{C} - // var _ I = D{} - // - for key := range r.satisfy() { - // key = (lhs, rhs) where lhs is always an interface. - if types.IsInterface(key.RHS) { - continue - } - rsel := r.msets.MethodSet(key.RHS).Lookup(from.Pkg(), from.Name()) - if rsel == nil || rsel.Obj() != from { - continue // rhs does not have the method - } - lsel := r.msets.MethodSet(key.LHS).Lookup(from.Pkg(), from.Name()) - if lsel == nil { - continue - } - imeth := lsel.Obj().(*types.Func) - - // imeth is the abstract method (e.g. I.f) - // and key.RHS is the concrete coupling type (e.g. D). - if !r.changeMethods { - r.errorf(from.Pos(), "renaming this method %q to %q", - from.Name(), r.to) - var pos token.Pos - var iface string - - I := recv(imeth).Type() - if named, ok := I.(*types.Named); ok { - pos = named.Obj().Pos() - iface = "interface " + named.Obj().Name() - } else { - pos = from.Pos() - iface = I.String() - } - r.errorf(pos, "\twould make %s no longer assignable to %s", - key.RHS, iface) - r.errorf(imeth.Pos(), "\t(rename %s.%s if you intend to change both types)", - I, from.Name()) - return // one error is enough - } - - // Rename the coupled interface method to preserve assignability. - r.check(imeth) - } - } - - // Check integrity of existing (field and method) selections. - // We skip this if there were errors above, to avoid redundant errors. - r.checkSelections(from) -} - -func (r *renamer) checkExport(id *ast.Ident, pkg *types.Package, from types.Object) bool { - // Reject cross-package references if r.to is unexported. - // (Such references may be qualified identifiers or field/method - // selections.) - if !ast.IsExported(r.to) && pkg != from.Pkg() { - r.errorf(from.Pos(), - "renaming %q to %q would make it unexported", - from.Name(), r.to) - r.errorf(id.Pos(), "\tbreaking references from packages such as %q", - pkg.Path()) - return false - } - return true -} - -// satisfy returns the set of interface satisfaction constraints. -func (r *renamer) satisfy() map[satisfy.Constraint]bool { - if r.satisfyConstraints == nil { - // Compute on demand: it's expensive. - var f satisfy.Finder - pkg := r.pkg - { - // From satisfy.Finder documentation: - // - // The package must be free of type errors, and - // info.{Defs,Uses,Selections,Types} must have been populated by the - // type-checker. - // - // Only proceed if all packages have no errors. - if len(pkg.GetParseErrors()) > 0 || len(pkg.GetTypeErrors()) > 0 { - r.errorf(token.NoPos, // we don't have a position for this error. - "renaming %q to %q not possible because %q has errors", - r.from, r.to, pkg.Metadata().PkgPath) - return nil - } - f.Find(pkg.GetTypesInfo(), pkg.GetSyntax()) - } - r.satisfyConstraints = f.Result - } - return r.satisfyConstraints -} - -// -- helpers ---------------------------------------------------------- - -// recv returns the method's receiver. -func recv(meth *types.Func) *types.Var { - return meth.Type().(*types.Signature).Recv() -} - -// someUse returns an arbitrary use of obj within info. -func someUse(info *types.Info, obj types.Object) *ast.Ident { - for id, o := range info.Uses { - if o == obj { - return id - } - } - return nil -} - -func objectKind(obj types.Object) string { - if obj == nil { - return "nil object" - } - switch obj := obj.(type) { - case *types.PkgName: - return "imported package name" - case *types.TypeName: - return "type" - case *types.Var: - if obj.IsField() { - return "field" - } - case *types.Func: - if obj.Type().(*types.Signature).Recv() != nil { - return "method" - } - } - // label, func, var, const - return strings.ToLower(strings.TrimPrefix(reflect.TypeOf(obj).String(), "*types.")) -} - -// NB: for renamings, blank is not considered valid. -func isValidIdentifier(id string) bool { - if id == "" || id == "_" { - return false - } - for i, r := range id { - if !isLetter(r) && (i == 0 || !isDigit(r)) { - return false - } - } - return token.Lookup(id) == token.IDENT -} - -// isLocal reports whether obj is local to some function. -// Precondition: not a struct field or interface method. -func isLocal(obj types.Object) bool { - // [... 5=stmt 4=func 3=file 2=pkg 1=universe] - var depth int - for scope := obj.Parent(); scope != nil; scope = scope.Parent() { - depth++ - } - return depth >= 4 -} - -func isPackageLevel(obj types.Object) bool { - if obj == nil { - return false - } - return obj.Pkg().Scope().Lookup(obj.Name()) == obj -} - -// -- Plundered from go/scanner: --------------------------------------- - -func isLetter(ch rune) bool { - return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_' || ch >= 0x80 && unicode.IsLetter(ch) -} - -func isDigit(ch rune) bool { - return '0' <= ch && ch <= '9' || ch >= 0x80 && unicode.IsDigit(ch) -} diff --git a/internal/golangorgx/gopls/golang/signature_help.go b/internal/golangorgx/gopls/golang/signature_help.go deleted file mode 100644 index f8c2cc3d7c4..00000000000 --- a/internal/golangorgx/gopls/golang/signature_help.go +++ /dev/null @@ -1,204 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -import ( - "context" - "fmt" - "go/ast" - "go/token" - "go/types" - "strings" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/settings" - "cuelang.org/go/internal/golangorgx/gopls/util/bug" - "cuelang.org/go/internal/golangorgx/gopls/util/typesutil" - "cuelang.org/go/internal/golangorgx/tools/event" - "golang.org/x/tools/go/ast/astutil" -) - -func SignatureHelp(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) (*protocol.SignatureInformation, int, error) { - ctx, done := event.Start(ctx, "golang.SignatureHelp") - defer done() - - // We need full type-checking here, as we must type-check function bodies in - // order to provide signature help at the requested position. - pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) - if err != nil { - return nil, 0, fmt.Errorf("getting file for SignatureHelp: %w", err) - } - pos, err := pgf.PositionPos(position) - if err != nil { - return nil, 0, err - } - // Find a call expression surrounding the query position. - var callExpr *ast.CallExpr - path, _ := astutil.PathEnclosingInterval(pgf.File, pos, pos) - if path == nil { - return nil, 0, fmt.Errorf("cannot find node enclosing position") - } -FindCall: - for _, node := range path { - switch node := node.(type) { - case *ast.CallExpr: - if pos >= node.Lparen && pos <= node.Rparen { - callExpr = node - break FindCall - } - case *ast.FuncLit, *ast.FuncType: - // The user is within an anonymous function, - // which may be the parameter to the *ast.CallExpr. - // Don't show signature help in this case. - return nil, 0, fmt.Errorf("no signature help within a function declaration") - case *ast.BasicLit: - if node.Kind == token.STRING { - return nil, 0, fmt.Errorf("no signature help within a string literal") - } - } - - } - if callExpr == nil || callExpr.Fun == nil { - return nil, 0, fmt.Errorf("cannot find an enclosing function") - } - - info := pkg.GetTypesInfo() - - // Get the type information for the function being called. - var sig *types.Signature - if tv, ok := info.Types[callExpr.Fun]; !ok { - return nil, 0, fmt.Errorf("cannot get type for Fun %[1]T (%[1]v)", callExpr.Fun) - } else if tv.IsType() { - return nil, 0, fmt.Errorf("this is a conversion to %s, not a call", tv.Type) - } else if sig, ok = tv.Type.Underlying().(*types.Signature); !ok { - return nil, 0, fmt.Errorf("cannot find signature for Fun %[1]T (%[1]v)", callExpr.Fun) - } - // Inv: sig != nil - - qf := typesutil.FileQualifier(pgf.File, pkg.GetTypes(), info) - - // Get the object representing the function, if available. - // There is no object in certain cases such as calling a function returned by - // a function (e.g. "foo()()"). - var obj types.Object - switch t := callExpr.Fun.(type) { - case *ast.Ident: - obj = info.ObjectOf(t) - case *ast.SelectorExpr: - obj = info.ObjectOf(t.Sel) - } - - // Call to built-in? - if obj != nil && !obj.Pos().IsValid() { - // function? - if obj, ok := obj.(*types.Builtin); ok { - return builtinSignature(ctx, snapshot, callExpr, obj.Name(), pos) - } - - // method (only error.Error)? - if fn, ok := obj.(*types.Func); ok && fn.Name() == "Error" { - return &protocol.SignatureInformation{ - Label: "Error()", - Documentation: stringToSigInfoDocumentation("Error returns the error message.", snapshot.Options()), - }, 0, nil - } - - return nil, 0, bug.Errorf("call to unexpected built-in %v (%T)", obj, obj) - } - - activeParam := activeParameter(callExpr, sig.Params().Len(), sig.Variadic(), pos) - - var ( - name string - comment *ast.CommentGroup - ) - if obj != nil { - d, err := HoverDocForObject(ctx, snapshot, pkg.FileSet(), obj) - if err != nil { - return nil, 0, err - } - name = obj.Name() - comment = d - } else { - name = "func" - } - mq := MetadataQualifierForFile(snapshot, pgf.File, pkg.Metadata()) - s, err := NewSignature(ctx, snapshot, pkg, sig, comment, qf, mq) - if err != nil { - return nil, 0, err - } - paramInfo := make([]protocol.ParameterInformation, 0, len(s.params)) - for _, p := range s.params { - paramInfo = append(paramInfo, protocol.ParameterInformation{Label: p}) - } - return &protocol.SignatureInformation{ - Label: name + s.Format(), - Documentation: stringToSigInfoDocumentation(s.doc, snapshot.Options()), - Parameters: paramInfo, - }, activeParam, nil -} - -func builtinSignature(ctx context.Context, snapshot *cache.Snapshot, callExpr *ast.CallExpr, name string, pos token.Pos) (*protocol.SignatureInformation, int, error) { - sig, err := NewBuiltinSignature(ctx, snapshot, name) - if err != nil { - return nil, 0, err - } - paramInfo := make([]protocol.ParameterInformation, 0, len(sig.params)) - for _, p := range sig.params { - paramInfo = append(paramInfo, protocol.ParameterInformation{Label: p}) - } - activeParam := activeParameter(callExpr, len(sig.params), sig.variadic, pos) - return &protocol.SignatureInformation{ - Label: sig.name + sig.Format(), - Documentation: stringToSigInfoDocumentation(sig.doc, snapshot.Options()), - Parameters: paramInfo, - }, activeParam, nil -} - -func activeParameter(callExpr *ast.CallExpr, numParams int, variadic bool, pos token.Pos) (activeParam int) { - if len(callExpr.Args) == 0 { - return 0 - } - // First, check if the position is even in the range of the arguments. - start, end := callExpr.Lparen, callExpr.Rparen - if !(start <= pos && pos <= end) { - return 0 - } - for _, expr := range callExpr.Args { - if start == token.NoPos { - start = expr.Pos() - } - end = expr.End() - if start <= pos && pos <= end { - break - } - // Don't advance the active parameter for the last parameter of a variadic function. - if !variadic || activeParam < numParams-1 { - activeParam++ - } - start = expr.Pos() + 1 // to account for commas - } - return activeParam -} - -func stringToSigInfoDocumentation(s string, options *settings.Options) *protocol.Or_SignatureInformation_documentation { - v := s - k := protocol.PlainText - if options.PreferredContentFormat == protocol.Markdown { - v = CommentToMarkdown(s, options) - // whether or not content is newline terminated may not matter for LSP clients, - // but our tests expect trailing newlines to be stripped. - v = strings.TrimSuffix(v, "\n") // TODO(pjw): change the golden files - k = protocol.Markdown - } - return &protocol.Or_SignatureInformation_documentation{ - Value: protocol.MarkupContent{ - Kind: k, - Value: v, - }, - } -} diff --git a/internal/golangorgx/gopls/golang/stub.go b/internal/golangorgx/gopls/golang/stub.go deleted file mode 100644 index 49d8c82d1d3..00000000000 --- a/internal/golangorgx/gopls/golang/stub.go +++ /dev/null @@ -1,335 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -import ( - "bytes" - "context" - "fmt" - "go/format" - "go/parser" - "go/token" - "go/types" - "io" - pathpkg "path" - "strings" - - "cuelang.org/go/internal/golangorgx/gopls/analysis/stubmethods" - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/cache/metadata" - "cuelang.org/go/internal/golangorgx/gopls/cache/parsego" - "cuelang.org/go/internal/golangorgx/gopls/util/bug" - "cuelang.org/go/internal/golangorgx/gopls/util/safetoken" - "cuelang.org/go/internal/golangorgx/tools/diff" - "cuelang.org/go/internal/golangorgx/tools/tokeninternal" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ast/astutil" -) - -// stubMethodsFixer returns a suggested fix to declare the missing -// methods of the concrete type that is assigned to an interface type -// at the cursor position. -func stubMethodsFixer(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { - nodes, _ := astutil.PathEnclosingInterval(pgf.File, start, end) - si := stubmethods.GetStubInfo(pkg.FileSet(), pkg.GetTypesInfo(), nodes, start) - if si == nil { - return nil, nil, fmt.Errorf("nil interface request") - } - - // A function-local type cannot be stubbed - // since there's nowhere to put the methods. - conc := si.Concrete.Obj() - if conc.Parent() != conc.Pkg().Scope() { - return nil, nil, fmt.Errorf("local type %q cannot be stubbed", conc.Name()) - } - - // Parse the file declaring the concrete type. - // - // Beware: declPGF is not necessarily covered by pkg.FileSet() or si.Fset. - declPGF, _, err := parseFull(ctx, snapshot, si.Fset, conc.Pos()) - if err != nil { - return nil, nil, fmt.Errorf("failed to parse file %q declaring implementation type: %w", declPGF.URI, err) - } - if declPGF.Fixed() { - return nil, nil, fmt.Errorf("file contains parse errors: %s", declPGF.URI) - } - - // Find metadata for the concrete type's declaring package - // as we'll need its import mapping. - declMeta := findFileInDeps(snapshot, pkg.Metadata(), declPGF.URI) - if declMeta == nil { - return nil, nil, bug.Errorf("can't find metadata for file %s among dependencies of %s", declPGF.URI, pkg) - } - - // Record all direct methods of the current object - concreteFuncs := make(map[string]struct{}) - for i := 0; i < si.Concrete.NumMethods(); i++ { - concreteFuncs[si.Concrete.Method(i).Name()] = struct{}{} - } - - // Find subset of interface methods that the concrete type lacks. - ifaceType := si.Interface.Type().Underlying().(*types.Interface) - - type missingFn struct { - fn *types.Func - needSubtle string - } - - var ( - missing []missingFn - concreteStruct, isStruct = si.Concrete.Origin().Underlying().(*types.Struct) - ) - - for i := 0; i < ifaceType.NumMethods(); i++ { - imethod := ifaceType.Method(i) - cmethod, index, _ := types.LookupFieldOrMethod(si.Concrete, si.Pointer, imethod.Pkg(), imethod.Name()) - if cmethod == nil { - missing = append(missing, missingFn{fn: imethod}) - continue - } - - if _, ok := cmethod.(*types.Var); ok { - // len(LookupFieldOrMethod.index) = 1 => conflict, >1 => shadow. - return nil, nil, fmt.Errorf("adding method %s.%s would conflict with (or shadow) existing field", - conc.Name(), imethod.Name()) - } - - if _, exist := concreteFuncs[imethod.Name()]; exist { - if !types.Identical(cmethod.Type(), imethod.Type()) { - return nil, nil, fmt.Errorf("method %s.%s already exists but has the wrong type: got %s, want %s", - conc.Name(), imethod.Name(), cmethod.Type(), imethod.Type()) - } - continue - } - - mf := missingFn{fn: imethod} - if isStruct && len(index) > 0 { - field := concreteStruct.Field(index[0]) - - fn := field.Name() - if _, ok := field.Type().(*types.Pointer); ok { - fn = "*" + fn - } - - mf.needSubtle = fmt.Sprintf("// Subtle: this method shadows the method (%s).%s of %s.%s.\n", fn, imethod.Name(), si.Concrete.Obj().Name(), field.Name()) - } - - missing = append(missing, mf) - } - if len(missing) == 0 { - return nil, nil, fmt.Errorf("no missing methods found") - } - - // Build import environment for the declaring file. - // (typesutil.FileQualifier works only for complete - // import mappings, and requires types.) - importEnv := make(map[ImportPath]string) // value is local name - for _, imp := range declPGF.File.Imports { - importPath := metadata.UnquoteImportPath(imp) - var name string - if imp.Name != nil { - name = imp.Name.Name - if name == "_" { - continue - } else if name == "." { - name = "" // see types.Qualifier - } - } else { - // Use the correct name from the metadata of the imported - // package---not a guess based on the import path. - mp := snapshot.Metadata(declMeta.DepsByImpPath[importPath]) - if mp == nil { - continue // can't happen? - } - name = string(mp.Name) - } - importEnv[importPath] = name // latest alias wins - } - - // Create a package name qualifier that uses the - // locally appropriate imported package name. - // It records any needed new imports. - // TODO(adonovan): factor with golang.FormatVarType? - // - // Prior to CL 469155 this logic preserved any renaming - // imports from the file that declares the interface - // method--ostensibly the preferred name for imports of - // frequently renamed packages such as protobufs. - // Now we use the package's declared name. If this turns out - // to be a mistake, then use parseHeader(si.iface.Pos()). - // - type newImport struct{ name, importPath string } - var newImports []newImport // for AddNamedImport - qual := func(pkg *types.Package) string { - // TODO(adonovan): don't ignore vendor prefix. - // - // Ignore the current package import. - if pkg.Path() == conc.Pkg().Path() { - return "" - } - - importPath := ImportPath(pkg.Path()) - name, ok := importEnv[importPath] - if !ok { - // Insert new import using package's declared name. - // - // TODO(adonovan): resolve conflict between declared - // name and existing file-level (declPGF.File.Imports) - // or package-level (si.Concrete.Pkg.Scope) decls by - // generating a fresh name. - name = pkg.Name() - importEnv[importPath] = name - new := newImport{importPath: string(importPath)} - // For clarity, use a renaming import whenever the - // local name does not match the path's last segment. - if name != pathpkg.Base(trimVersionSuffix(new.importPath)) { - new.name = name - } - newImports = append(newImports, new) - } - return name - } - - // Format interface name (used only in a comment). - iface := si.Interface.Name() - if ipkg := si.Interface.Pkg(); ipkg != nil && ipkg != conc.Pkg() { - iface = ipkg.Name() + "." + iface - } - - // Pointer receiver? - var star string - if si.Pointer { - star = "*" - } - - // If there are any that have named receiver, choose the first one. - // Otherwise, use lowercase for the first letter of the object. - rn := strings.ToLower(si.Concrete.Obj().Name()[0:1]) - for i := 0; i < si.Concrete.NumMethods(); i++ { - if recv, ok := si.Concrete.Method(i).Type().(*types.Signature); ok && recv.Recv().Name() != "" { - rn = recv.Recv().Name() - break - } - } - - // Check for receiver name conflicts - checkRecvName := func(tuple *types.Tuple) bool { - for i := 0; i < tuple.Len(); i++ { - if rn == tuple.At(i).Name() { - return true - } - } - return false - } - - // Format the new methods. - var newMethods bytes.Buffer - - for index := range missing { - mrn := rn + " " - if sig, ok := missing[index].fn.Type().(*types.Signature); ok { - if checkRecvName(sig.Params()) || checkRecvName(sig.Results()) { - mrn = "" - } - } - - fmt.Fprintf(&newMethods, `// %s implements %s. -%sfunc (%s%s%s%s) %s%s { - panic("unimplemented") -} -`, - missing[index].fn.Name(), - iface, - missing[index].needSubtle, - mrn, - star, - si.Concrete.Obj().Name(), - FormatTypeParams(si.Concrete.TypeParams()), - missing[index].fn.Name(), - strings.TrimPrefix(types.TypeString(missing[index].fn.Type(), qual), "func")) - } - - // Compute insertion point for new methods: - // after the top-level declaration enclosing the (package-level) type. - insertOffset, err := safetoken.Offset(declPGF.Tok, declPGF.File.End()) - if err != nil { - return nil, nil, bug.Errorf("internal error: end position outside file bounds: %v", err) - } - concOffset, err := safetoken.Offset(si.Fset.File(conc.Pos()), conc.Pos()) - if err != nil { - return nil, nil, bug.Errorf("internal error: finding type decl offset: %v", err) - } - for _, decl := range declPGF.File.Decls { - declEndOffset, err := safetoken.Offset(declPGF.Tok, decl.End()) - if err != nil { - return nil, nil, bug.Errorf("internal error: finding decl offset: %v", err) - } - if declEndOffset > concOffset { - insertOffset = declEndOffset - break - } - } - - // Splice the new methods into the file content. - var buf bytes.Buffer - input := declPGF.Mapper.Content // unfixed content of file - buf.Write(input[:insertOffset]) - buf.WriteByte('\n') - io.Copy(&buf, &newMethods) - buf.Write(input[insertOffset:]) - - // Re-parse the file. - fset := token.NewFileSet() - newF, err := parser.ParseFile(fset, declPGF.URI.Path(), buf.Bytes(), parser.ParseComments) - if err != nil { - return nil, nil, fmt.Errorf("could not reparse file: %w", err) - } - - // Splice the new imports into the syntax tree. - for _, imp := range newImports { - astutil.AddNamedImport(fset, newF, imp.name, imp.importPath) - } - - // Pretty-print. - var output bytes.Buffer - if err := format.Node(&output, fset, newF); err != nil { - return nil, nil, fmt.Errorf("format.Node: %w", err) - } - - // Report the diff. - diffs := diff.Bytes(input, output.Bytes()) - return tokeninternal.FileSetFor(declPGF.Tok), // edits use declPGF.Tok - &analysis.SuggestedFix{TextEdits: diffToTextEdits(declPGF.Tok, diffs)}, - nil -} - -// diffToTextEdits converts diff (offset-based) edits to analysis (token.Pos) form. -func diffToTextEdits(tok *token.File, diffs []diff.Edit) []analysis.TextEdit { - edits := make([]analysis.TextEdit, 0, len(diffs)) - for _, edit := range diffs { - edits = append(edits, analysis.TextEdit{ - Pos: tok.Pos(edit.Start), - End: tok.Pos(edit.End), - NewText: []byte(edit.New), - }) - } - return edits -} - -// trimVersionSuffix removes a trailing "/v2" (etc) suffix from a module path. -// -// This is only a heuristic as to the package's declared name, and -// should only be used for stylistic decisions, such as whether it -// would be clearer to use an explicit local name in the import -// because the declared name differs from the result of this function. -// When the name matters for correctness, look up the imported -// package's Metadata.Name. -func trimVersionSuffix(path string) string { - dir, base := pathpkg.Split(path) - if len(base) > 1 && base[0] == 'v' && strings.Trim(base[1:], "0123456789") == "" { - return dir // sans "/v2" - } - return path -} diff --git a/internal/golangorgx/gopls/golang/symbols.go b/internal/golangorgx/gopls/golang/symbols.go deleted file mode 100644 index 3e1bdf3bf0c..00000000000 --- a/internal/golangorgx/gopls/golang/symbols.go +++ /dev/null @@ -1,229 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -import ( - "context" - "fmt" - "go/ast" - "go/token" - "go/types" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/tools/event" -) - -func DocumentSymbols(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.DocumentSymbol, error) { - ctx, done := event.Start(ctx, "golang.DocumentSymbols") - defer done() - - pgf, err := snapshot.ParseGo(ctx, fh, ParseFull) - if err != nil { - return nil, fmt.Errorf("getting file for DocumentSymbols: %w", err) - } - - // Build symbols for file declarations. When encountering a declaration with - // errors (typically because positions are invalid), we skip the declaration - // entirely. VS Code fails to show any symbols if one of the top-level - // symbols is missing position information. - var symbols []protocol.DocumentSymbol - for _, decl := range pgf.File.Decls { - switch decl := decl.(type) { - case *ast.FuncDecl: - if decl.Name.Name == "_" { - continue - } - fs, err := funcSymbol(pgf.Mapper, pgf.Tok, decl) - if err == nil { - // If function is a method, prepend the type of the method. - if decl.Recv != nil && len(decl.Recv.List) > 0 { - fs.Name = fmt.Sprintf("(%s).%s", types.ExprString(decl.Recv.List[0].Type), fs.Name) - } - symbols = append(symbols, fs) - } - case *ast.GenDecl: - for _, spec := range decl.Specs { - switch spec := spec.(type) { - case *ast.TypeSpec: - if spec.Name.Name == "_" { - continue - } - ts, err := typeSymbol(pgf.Mapper, pgf.Tok, spec) - if err == nil { - symbols = append(symbols, ts) - } - case *ast.ValueSpec: - for _, name := range spec.Names { - if name.Name == "_" { - continue - } - vs, err := varSymbol(pgf.Mapper, pgf.Tok, spec, name, decl.Tok == token.CONST) - if err == nil { - symbols = append(symbols, vs) - } - } - } - } - } - } - return symbols, nil -} - -func funcSymbol(m *protocol.Mapper, tf *token.File, decl *ast.FuncDecl) (protocol.DocumentSymbol, error) { - s := protocol.DocumentSymbol{ - Name: decl.Name.Name, - Kind: protocol.Function, - } - if decl.Recv != nil { - s.Kind = protocol.Method - } - var err error - s.Range, err = m.NodeRange(tf, decl) - if err != nil { - return protocol.DocumentSymbol{}, err - } - s.SelectionRange, err = m.NodeRange(tf, decl.Name) - if err != nil { - return protocol.DocumentSymbol{}, err - } - s.Detail = types.ExprString(decl.Type) - return s, nil -} - -func typeSymbol(m *protocol.Mapper, tf *token.File, spec *ast.TypeSpec) (protocol.DocumentSymbol, error) { - s := protocol.DocumentSymbol{ - Name: spec.Name.Name, - } - var err error - s.Range, err = m.NodeRange(tf, spec) - if err != nil { - return protocol.DocumentSymbol{}, err - } - s.SelectionRange, err = m.NodeRange(tf, spec.Name) - if err != nil { - return protocol.DocumentSymbol{}, err - } - s.Kind, s.Detail, s.Children = typeDetails(m, tf, spec.Type) - return s, nil -} - -func typeDetails(m *protocol.Mapper, tf *token.File, typExpr ast.Expr) (kind protocol.SymbolKind, detail string, children []protocol.DocumentSymbol) { - switch typExpr := typExpr.(type) { - case *ast.StructType: - kind = protocol.Struct - children = fieldListSymbols(m, tf, typExpr.Fields, protocol.Field) - if len(children) > 0 { - detail = "struct{...}" - } else { - detail = "struct{}" - } - - // Find interface methods and embedded types. - case *ast.InterfaceType: - kind = protocol.Interface - children = fieldListSymbols(m, tf, typExpr.Methods, protocol.Method) - if len(children) > 0 { - detail = "interface{...}" - } else { - detail = "interface{}" - } - - case *ast.FuncType: - kind = protocol.Function - detail = types.ExprString(typExpr) - - default: - kind = protocol.Class // catch-all, for cases where we don't know the kind syntactically - detail = types.ExprString(typExpr) - } - return -} - -func fieldListSymbols(m *protocol.Mapper, tf *token.File, fields *ast.FieldList, fieldKind protocol.SymbolKind) []protocol.DocumentSymbol { - if fields == nil { - return nil - } - - var symbols []protocol.DocumentSymbol - for _, field := range fields.List { - detail, children := "", []protocol.DocumentSymbol(nil) - if field.Type != nil { - _, detail, children = typeDetails(m, tf, field.Type) - } - if len(field.Names) == 0 { // embedded interface or struct field - // By default, use the formatted type details as the name of this field. - // This handles potentially invalid syntax, as well as type embeddings in - // interfaces. - child := protocol.DocumentSymbol{ - Name: detail, - Kind: protocol.Field, // consider all embeddings to be fields - Children: children, - } - - // If the field is a valid embedding, promote the type name to field - // name. - selection := field.Type - if id := embeddedIdent(field.Type); id != nil { - child.Name = id.Name - child.Detail = detail - selection = id - } - - if rng, err := m.NodeRange(tf, field.Type); err == nil { - child.Range = rng - } - if rng, err := m.NodeRange(tf, selection); err == nil { - child.SelectionRange = rng - } - - symbols = append(symbols, child) - } else { - for _, name := range field.Names { - child := protocol.DocumentSymbol{ - Name: name.Name, - Kind: fieldKind, - Detail: detail, - Children: children, - } - - if rng, err := m.NodeRange(tf, field); err == nil { - child.Range = rng - } - if rng, err := m.NodeRange(tf, name); err == nil { - child.SelectionRange = rng - } - - symbols = append(symbols, child) - } - } - - } - return symbols -} - -func varSymbol(m *protocol.Mapper, tf *token.File, spec *ast.ValueSpec, name *ast.Ident, isConst bool) (protocol.DocumentSymbol, error) { - s := protocol.DocumentSymbol{ - Name: name.Name, - Kind: protocol.Variable, - } - if isConst { - s.Kind = protocol.Constant - } - var err error - s.Range, err = m.NodeRange(tf, spec) - if err != nil { - return protocol.DocumentSymbol{}, err - } - s.SelectionRange, err = m.NodeRange(tf, name) - if err != nil { - return protocol.DocumentSymbol{}, err - } - if spec.Type != nil { // type may be missing from the syntax - _, s.Detail, s.Children = typeDetails(m, tf, spec.Type) - } - return s, nil -} diff --git a/internal/golangorgx/gopls/golang/type_definition.go b/internal/golangorgx/gopls/golang/type_definition.go deleted file mode 100644 index 2d2e209a6c1..00000000000 --- a/internal/golangorgx/gopls/golang/type_definition.go +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -import ( - "context" - "fmt" - "go/token" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/util/bug" - "cuelang.org/go/internal/golangorgx/tools/event" -) - -// TypeDefinition handles the textDocument/typeDefinition request for Go files. -func TypeDefinition(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) ([]protocol.Location, error) { - ctx, done := event.Start(ctx, "golang.TypeDefinition") - defer done() - - pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) - if err != nil { - return nil, err - } - pos, err := pgf.PositionPos(position) - if err != nil { - return nil, err - } - - // TODO(rfindley): handle type switch implicits correctly here: if the user - // jumps to the type definition of x in x := y.(type), it makes sense to jump - // to the type of y. - _, obj, _ := referencedObject(pkg, pgf, pos) - if obj == nil { - return nil, nil - } - - tname := typeToObject(obj.Type()) - if tname == nil { - return nil, fmt.Errorf("no type definition for %s", obj.Name()) - } - - if !tname.Pos().IsValid() { - // The only defined types with no position are error and comparable. - if tname.Name() != "error" && tname.Name() != "comparable" { - bug.Reportf("unexpected type name with no position: %s", tname) - } - return nil, nil - } - - loc, err := mapPosition(ctx, pkg.FileSet(), snapshot, tname.Pos(), tname.Pos()+token.Pos(len(tname.Name()))) - if err != nil { - return nil, err - } - return []protocol.Location{loc}, nil -} diff --git a/internal/golangorgx/gopls/golang/types_format.go b/internal/golangorgx/gopls/golang/types_format.go deleted file mode 100644 index d888b84ceec..00000000000 --- a/internal/golangorgx/gopls/golang/types_format.go +++ /dev/null @@ -1,526 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -import ( - "bytes" - "context" - "fmt" - "go/ast" - "go/doc" - "go/printer" - "go/token" - "go/types" - "strings" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/settings" - "cuelang.org/go/internal/golangorgx/gopls/util/bug" - "cuelang.org/go/internal/golangorgx/tools/event" - "cuelang.org/go/internal/golangorgx/tools/event/tag" - "cuelang.org/go/internal/golangorgx/tools/tokeninternal" - "cuelang.org/go/internal/golangorgx/tools/typeparams" -) - -// FormatType returns the detail and kind for a types.Type. -func FormatType(typ types.Type, qf types.Qualifier) (detail string, kind protocol.CompletionItemKind) { - if types.IsInterface(typ) { - detail = "interface{...}" - kind = protocol.InterfaceCompletion - } else if _, ok := typ.(*types.Struct); ok { - detail = "struct{...}" - kind = protocol.StructCompletion - } else if typ != typ.Underlying() { - detail, kind = FormatType(typ.Underlying(), qf) - } else { - detail = types.TypeString(typ, qf) - kind = protocol.ClassCompletion - } - return detail, kind -} - -type signature struct { - name, doc string - typeParams, params, results []string - variadic bool - needResultParens bool -} - -func (s *signature) Format() string { - var b strings.Builder - b.WriteByte('(') - for i, p := range s.params { - if i > 0 { - b.WriteString(", ") - } - b.WriteString(p) - } - b.WriteByte(')') - - // Add space between parameters and results. - if len(s.results) > 0 { - b.WriteByte(' ') - } - if s.needResultParens { - b.WriteByte('(') - } - for i, r := range s.results { - if i > 0 { - b.WriteString(", ") - } - b.WriteString(r) - } - if s.needResultParens { - b.WriteByte(')') - } - return b.String() -} - -func (s *signature) TypeParams() []string { - return s.typeParams -} - -func (s *signature) Params() []string { - return s.params -} - -// NewBuiltinSignature returns signature for the builtin object with a given -// name, if a builtin object with the name exists. -func NewBuiltinSignature(ctx context.Context, s *cache.Snapshot, name string) (*signature, error) { - builtin, err := s.BuiltinFile(ctx) - if err != nil { - return nil, err - } - obj := builtin.File.Scope.Lookup(name) - if obj == nil { - return nil, fmt.Errorf("no builtin object for %s", name) - } - decl, ok := obj.Decl.(*ast.FuncDecl) - if !ok { - return nil, fmt.Errorf("no function declaration for builtin: %s", name) - } - if decl.Type == nil { - return nil, fmt.Errorf("no type for builtin decl %s", decl.Name) - } - var variadic bool - if decl.Type.Params.List != nil { - numParams := len(decl.Type.Params.List) - lastParam := decl.Type.Params.List[numParams-1] - if _, ok := lastParam.Type.(*ast.Ellipsis); ok { - variadic = true - } - } - fset := tokeninternal.FileSetFor(builtin.Tok) - params, _ := formatFieldList(ctx, fset, decl.Type.Params, variadic) - results, needResultParens := formatFieldList(ctx, fset, decl.Type.Results, false) - d := decl.Doc.Text() - switch s.Options().HoverKind { - case settings.SynopsisDocumentation: - d = doc.Synopsis(d) - case settings.NoDocumentation: - d = "" - } - return &signature{ - doc: d, - name: name, - needResultParens: needResultParens, - params: params, - results: results, - variadic: variadic, - }, nil -} - -// replacer replaces some synthetic "type classes" used in the builtin file -// with their most common constituent type. -var replacer = strings.NewReplacer( - `ComplexType`, `complex128`, - `FloatType`, `float64`, - `IntegerType`, `int`, -) - -func formatFieldList(ctx context.Context, fset *token.FileSet, list *ast.FieldList, variadic bool) ([]string, bool) { - if list == nil { - return nil, false - } - var writeResultParens bool - var result []string - for i := 0; i < len(list.List); i++ { - if i >= 1 { - writeResultParens = true - } - p := list.List[i] - cfg := printer.Config{Mode: printer.UseSpaces | printer.TabIndent, Tabwidth: 4} - b := &bytes.Buffer{} - if err := cfg.Fprint(b, fset, p.Type); err != nil { - event.Error(ctx, "unable to print type", nil, tag.Type.Of(p.Type)) - continue - } - typ := replacer.Replace(b.String()) - if len(p.Names) == 0 { - result = append(result, typ) - } - for _, name := range p.Names { - if name.Name != "" { - if i == 0 { - writeResultParens = true - } - result = append(result, fmt.Sprintf("%s %s", name.Name, typ)) - } else { - result = append(result, typ) - } - } - } - if variadic { - result[len(result)-1] = strings.Replace(result[len(result)-1], "[]", "...", 1) - } - return result, writeResultParens -} - -// FormatTypeParams turns TypeParamList into its Go representation, such as: -// [T, Y]. Note that it does not print constraints as this is mainly used for -// formatting type params in method receivers. -func FormatTypeParams(tparams *types.TypeParamList) string { - if tparams == nil || tparams.Len() == 0 { - return "" - } - var buf bytes.Buffer - buf.WriteByte('[') - for i := 0; i < tparams.Len(); i++ { - if i > 0 { - buf.WriteString(", ") - } - buf.WriteString(tparams.At(i).Obj().Name()) - } - buf.WriteByte(']') - return buf.String() -} - -// NewSignature returns formatted signature for a types.Signature struct. -func NewSignature(ctx context.Context, s *cache.Snapshot, pkg *cache.Package, sig *types.Signature, comment *ast.CommentGroup, qf types.Qualifier, mq MetadataQualifier) (*signature, error) { - var tparams []string - tpList := sig.TypeParams() - for i := 0; i < tpList.Len(); i++ { - tparam := tpList.At(i) - // TODO: is it possible to reuse the logic from FormatVarType here? - s := tparam.Obj().Name() + " " + tparam.Constraint().String() - tparams = append(tparams, s) - } - - params := make([]string, 0, sig.Params().Len()) - for i := 0; i < sig.Params().Len(); i++ { - el := sig.Params().At(i) - typ, err := FormatVarType(ctx, s, pkg, el, qf, mq) - if err != nil { - return nil, err - } - p := typ - if el.Name() != "" { - p = el.Name() + " " + typ - } - params = append(params, p) - } - - var needResultParens bool - results := make([]string, 0, sig.Results().Len()) - for i := 0; i < sig.Results().Len(); i++ { - if i >= 1 { - needResultParens = true - } - el := sig.Results().At(i) - typ, err := FormatVarType(ctx, s, pkg, el, qf, mq) - if err != nil { - return nil, err - } - if el.Name() == "" { - results = append(results, typ) - } else { - if i == 0 { - needResultParens = true - } - results = append(results, el.Name()+" "+typ) - } - } - var d string - if comment != nil { - d = comment.Text() - } - switch s.Options().HoverKind { - case settings.SynopsisDocumentation: - d = doc.Synopsis(d) - case settings.NoDocumentation: - d = "" - } - return &signature{ - doc: d, - typeParams: tparams, - params: params, - results: results, - variadic: sig.Variadic(), - needResultParens: needResultParens, - }, nil -} - -// FormatVarType formats a *types.Var, accounting for type aliases. -// To do this, it looks in the AST of the file in which the object is declared. -// On any errors, it always falls back to types.TypeString. -// -// TODO(rfindley): this function could return the actual name used in syntax, -// for better parameter names. -func FormatVarType(ctx context.Context, snapshot *cache.Snapshot, srcpkg *cache.Package, obj *types.Var, qf types.Qualifier, mq MetadataQualifier) (string, error) { - // TODO(rfindley): This looks wrong. The previous comment said: - // "If the given expr refers to a type parameter, then use the - // object's Type instead of the type parameter declaration. This helps - // format the instantiated type as opposed to the original undeclared - // generic type". - // - // But of course, if obj is a type param, we are formatting a generic type - // and not an instantiated type. Handling for instantiated types must be done - // at a higher level. - // - // Left this during refactoring in order to preserve pre-existing logic. - if typeparams.IsTypeParam(obj.Type()) { - return types.TypeString(obj.Type(), qf), nil - } - - if obj.Pkg() == nil || !obj.Pos().IsValid() { - // This is defensive, though it is extremely unlikely we'll ever have a - // builtin var. - return types.TypeString(obj.Type(), qf), nil - } - - // TODO(rfindley): parsing to produce candidates can be costly; consider - // using faster methods. - targetpgf, pos, err := parseFull(ctx, snapshot, srcpkg.FileSet(), obj.Pos()) - if err != nil { - return "", err // e.g. ctx cancelled - } - - targetMeta := findFileInDeps(snapshot, srcpkg.Metadata(), targetpgf.URI) - if targetMeta == nil { - // If we have an object from type-checking, it should exist in a file in - // the forward transitive closure. - return "", bug.Errorf("failed to find file %q in deps of %q", targetpgf.URI, srcpkg.Metadata().ID) - } - - decl, spec, field := findDeclInfo([]*ast.File{targetpgf.File}, pos) - - // We can't handle type parameters correctly, so we fall back on TypeString - // for parameterized decls. - if decl, _ := decl.(*ast.FuncDecl); decl != nil { - if decl.Type.TypeParams.NumFields() > 0 { - return types.TypeString(obj.Type(), qf), nil // in generic function - } - if decl.Recv != nil && len(decl.Recv.List) > 0 { - rtype := decl.Recv.List[0].Type - if e, ok := rtype.(*ast.StarExpr); ok { - rtype = e.X - } - if x, _, _, _ := typeparams.UnpackIndexExpr(rtype); x != nil { - return types.TypeString(obj.Type(), qf), nil // in method of generic type - } - } - } - if spec, _ := spec.(*ast.TypeSpec); spec != nil && spec.TypeParams.NumFields() > 0 { - return types.TypeString(obj.Type(), qf), nil // in generic type decl - } - - if field == nil { - // TODO(rfindley): we should never reach here from an ordinary var, so - // should probably return an error here. - return types.TypeString(obj.Type(), qf), nil - } - expr := field.Type - - rq := requalifier(snapshot, targetpgf.File, targetMeta, mq) - - // The type names in the AST may not be correctly qualified. - // Determine the package name to use based on the package that originated - // the query and the package in which the type is declared. - // We then qualify the value by cloning the AST node and editing it. - expr = qualifyTypeExpr(expr, rq) - - // If the request came from a different package than the one in which the - // types are defined, we may need to modify the qualifiers. - return FormatNodeFile(targetpgf.Tok, expr), nil -} - -// qualifyTypeExpr clones the type expression expr after re-qualifying type -// names using the given function, which accepts the current syntactic -// qualifier (possibly "" for unqualified idents), and returns a new qualifier -// (again, possibly "" if the identifier should be unqualified). -// -// The resulting expression may be inaccurate: without type-checking we don't -// properly account for "." imported identifiers or builtins. -// -// TODO(rfindley): add many more tests for this function. -func qualifyTypeExpr(expr ast.Expr, qf func(string) string) ast.Expr { - switch expr := expr.(type) { - case *ast.ArrayType: - return &ast.ArrayType{ - Lbrack: expr.Lbrack, - Elt: qualifyTypeExpr(expr.Elt, qf), - Len: expr.Len, - } - - case *ast.BinaryExpr: - if expr.Op != token.OR { - return expr - } - return &ast.BinaryExpr{ - X: qualifyTypeExpr(expr.X, qf), - OpPos: expr.OpPos, - Op: expr.Op, - Y: qualifyTypeExpr(expr.Y, qf), - } - - case *ast.ChanType: - return &ast.ChanType{ - Arrow: expr.Arrow, - Begin: expr.Begin, - Dir: expr.Dir, - Value: qualifyTypeExpr(expr.Value, qf), - } - - case *ast.Ellipsis: - return &ast.Ellipsis{ - Ellipsis: expr.Ellipsis, - Elt: qualifyTypeExpr(expr.Elt, qf), - } - - case *ast.FuncType: - return &ast.FuncType{ - Func: expr.Func, - Params: qualifyFieldList(expr.Params, qf), - Results: qualifyFieldList(expr.Results, qf), - } - - case *ast.Ident: - // Unqualified type (builtin, package local, or dot-imported). - - // Don't qualify names that look like builtins. - // - // Without type-checking this may be inaccurate. It could be made accurate - // by doing syntactic object resolution for the entire package, but that - // does not seem worthwhile and we generally want to avoid using - // ast.Object, which may be inaccurate. - if obj := types.Universe.Lookup(expr.Name); obj != nil { - return expr - } - - newName := qf("") - if newName != "" { - return &ast.SelectorExpr{ - X: &ast.Ident{ - NamePos: expr.Pos(), - Name: newName, - }, - Sel: expr, - } - } - return expr - - case *ast.IndexExpr: - return &ast.IndexExpr{ - X: qualifyTypeExpr(expr.X, qf), - Lbrack: expr.Lbrack, - Index: qualifyTypeExpr(expr.Index, qf), - Rbrack: expr.Rbrack, - } - - case *ast.IndexListExpr: - indices := make([]ast.Expr, len(expr.Indices)) - for i, idx := range expr.Indices { - indices[i] = qualifyTypeExpr(idx, qf) - } - return &ast.IndexListExpr{ - X: qualifyTypeExpr(expr.X, qf), - Lbrack: expr.Lbrack, - Indices: indices, - Rbrack: expr.Rbrack, - } - - case *ast.InterfaceType: - return &ast.InterfaceType{ - Interface: expr.Interface, - Methods: qualifyFieldList(expr.Methods, qf), - Incomplete: expr.Incomplete, - } - - case *ast.MapType: - return &ast.MapType{ - Map: expr.Map, - Key: qualifyTypeExpr(expr.Key, qf), - Value: qualifyTypeExpr(expr.Value, qf), - } - - case *ast.ParenExpr: - return &ast.ParenExpr{ - Lparen: expr.Lparen, - Rparen: expr.Rparen, - X: qualifyTypeExpr(expr.X, qf), - } - - case *ast.SelectorExpr: - if id, ok := expr.X.(*ast.Ident); ok { - // qualified type - newName := qf(id.Name) - if newName == "" { - return expr.Sel - } - return &ast.SelectorExpr{ - X: &ast.Ident{ - NamePos: id.NamePos, - Name: newName, - }, - Sel: expr.Sel, - } - } - return expr - - case *ast.StarExpr: - return &ast.StarExpr{ - Star: expr.Star, - X: qualifyTypeExpr(expr.X, qf), - } - - case *ast.StructType: - return &ast.StructType{ - Struct: expr.Struct, - Fields: qualifyFieldList(expr.Fields, qf), - Incomplete: expr.Incomplete, - } - - default: - return expr - } -} - -func qualifyFieldList(fl *ast.FieldList, qf func(string) string) *ast.FieldList { - if fl == nil { - return nil - } - if fl.List == nil { - return &ast.FieldList{ - Closing: fl.Closing, - Opening: fl.Opening, - } - } - list := make([]*ast.Field, 0, len(fl.List)) - for _, f := range fl.List { - list = append(list, &ast.Field{ - Comment: f.Comment, - Doc: f.Doc, - Names: f.Names, - Tag: f.Tag, - Type: qualifyTypeExpr(f.Type, qf), - }) - } - return &ast.FieldList{ - Closing: fl.Closing, - Opening: fl.Opening, - List: list, - } -} diff --git a/internal/golangorgx/gopls/golang/util.go b/internal/golangorgx/gopls/golang/util.go index 7604cb29976..96303ed37b7 100644 --- a/internal/golangorgx/gopls/golang/util.go +++ b/internal/golangorgx/gopls/golang/util.go @@ -6,20 +6,11 @@ package golang import ( "context" - "go/ast" - "go/printer" - "go/token" - "go/types" "regexp" - "strings" "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/cache/metadata" "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/util/astutil" - "cuelang.org/go/internal/golangorgx/gopls/util/bug" "cuelang.org/go/internal/golangorgx/gopls/util/safetoken" - "cuelang.org/go/internal/golangorgx/tools/tokeninternal" ) // IsGenerated gets and reads the file denoted by uri and reports @@ -50,340 +41,7 @@ func IsGenerated(ctx context.Context, snapshot *cache.Snapshot, uri protocol.Doc return false } -// adjustedObjEnd returns the end position of obj, possibly modified for -// package names. -// -// TODO(rfindley): eliminate this function, by inlining it at callsites where -// it makes sense. -func adjustedObjEnd(obj types.Object) token.Pos { - nameLen := len(obj.Name()) - if pkgName, ok := obj.(*types.PkgName); ok { - // An imported Go package has a package-local, unqualified name. - // When the name matches the imported package name, there is no - // identifier in the import spec with the local package name. - // - // For example: - // import "go/ast" // name "ast" matches package name - // import a "go/ast" // name "a" does not match package name - // - // When the identifier does not appear in the source, have the range - // of the object be the import path, including quotes. - if pkgName.Imported().Name() == pkgName.Name() { - nameLen = len(pkgName.Imported().Path()) + len(`""`) - } - } - return obj.Pos() + token.Pos(nameLen) -} - // Matches cgo generated comment as well as the proposed standard: // // https://golang.org/s/generatedcode var generatedRx = regexp.MustCompile(`// .*DO NOT EDIT\.?`) - -// nodeAtPos returns the index and the node whose position is contained inside -// the node list. -func nodeAtPos(nodes []ast.Node, pos token.Pos) (ast.Node, int) { - if nodes == nil { - return nil, -1 - } - for i, node := range nodes { - if node.Pos() <= pos && pos <= node.End() { - return node, i - } - } - return nil, -1 -} - -// FormatNode returns the "pretty-print" output for an ast node. -func FormatNode(fset *token.FileSet, n ast.Node) string { - var buf strings.Builder - if err := printer.Fprint(&buf, fset, n); err != nil { - // TODO(rfindley): we should use bug.Reportf here. - // We encounter this during completion.resolveInvalid. - return "" - } - return buf.String() -} - -// FormatNodeFile is like FormatNode, but requires only the token.File for the -// syntax containing the given ast node. -func FormatNodeFile(file *token.File, n ast.Node) string { - fset := tokeninternal.FileSetFor(file) - return FormatNode(fset, n) -} - -// Deref returns a pointer's element type, traversing as many levels as needed. -// Otherwise it returns typ. -// -// It can return a pointer type for cyclic types (see golang/go#45510). -func Deref(typ types.Type) types.Type { - var seen map[types.Type]struct{} - for { - p, ok := typ.Underlying().(*types.Pointer) - if !ok { - return typ - } - if _, ok := seen[p.Elem()]; ok { - return typ - } - - typ = p.Elem() - - if seen == nil { - seen = make(map[types.Type]struct{}) - } - seen[typ] = struct{}{} - } -} - -// findFileInDeps finds package metadata containing URI in the transitive -// dependencies of m. When using the Go command, the answer is unique. -func findFileInDeps(s metadata.Source, mp *metadata.Package, uri protocol.DocumentURI) *metadata.Package { - seen := make(map[PackageID]bool) - var search func(*metadata.Package) *metadata.Package - search = func(mp *metadata.Package) *metadata.Package { - if seen[mp.ID] { - return nil - } - seen[mp.ID] = true - for _, cgf := range mp.CompiledGoFiles { - if cgf == uri { - return mp - } - } - for _, dep := range mp.DepsByPkgPath { - mp := s.Metadata(dep) - if mp == nil { - bug.Reportf("nil metadata for %q", dep) - continue - } - if found := search(mp); found != nil { - return found - } - } - return nil - } - return search(mp) -} - -// CollectScopes returns all scopes in an ast path, ordered as innermost scope -// first. -func CollectScopes(info *types.Info, path []ast.Node, pos token.Pos) []*types.Scope { - // scopes[i], where i import path mapping. - inverseDeps := make(map[PackageID]PackagePath) - for path, id := range mp.DepsByPkgPath { - inverseDeps[id] = path - } - importsByPkgPath := make(map[PackagePath]ImportPath) // best import paths by pkgPath - for impPath, id := range mp.DepsByImpPath { - if id == "" { - continue - } - pkgPath := inverseDeps[id] - _, hasPath := importsByPkgPath[pkgPath] - _, hasImp := localNames[impPath] - // In rare cases, there may be multiple import paths with the same package - // path. In such scenarios, prefer an import path that already exists in - // the file. - if !hasPath || hasImp { - importsByPkgPath[pkgPath] = impPath - } - } - - return func(pkgName PackageName, impPath ImportPath, pkgPath PackagePath) string { - // If supplied, translate the package path to an import path in the source - // package. - if pkgPath != "" { - if srcImp := importsByPkgPath[pkgPath]; srcImp != "" { - impPath = srcImp - } - if pkgPath == mp.PkgPath { - return "" - } - } - if localName, ok := localNames[impPath]; ok && impPath != "" { - return localName - } - if pkgName != "" { - return string(pkgName) - } - idx := strings.LastIndexByte(string(impPath), '/') - return string(impPath[idx+1:]) - } -} - -// importInfo collects information about the import specified by imp, -// extracting its file-local name, package name, import path, and package path. -// -// If metadata is missing for the import, the resulting package name and -// package path may be empty, and the file local name may be guessed based on -// the import path. -// -// Note: previous versions of this helper used a PackageID->PackagePath map -// extracted from m, for extracting package path even in the case where -// metadata for a dep was missing. This should not be necessary, as we should -// always have metadata for IDs contained in DepsByPkgPath. -func importInfo(s metadata.Source, imp *ast.ImportSpec, mp *metadata.Package) (string, PackageName, ImportPath, PackagePath) { - var ( - name string // local name - pkgName PackageName - impPath = metadata.UnquoteImportPath(imp) - pkgPath PackagePath - ) - - // If the import has a local name, use it. - if imp.Name != nil { - name = imp.Name.Name - } - - // Try to find metadata for the import. If successful and there is no local - // name, the package name is the local name. - if depID := mp.DepsByImpPath[impPath]; depID != "" { - if depMP := s.Metadata(depID); depMP != nil { - if name == "" { - name = string(depMP.Name) - } - pkgName = depMP.Name - pkgPath = depMP.PkgPath - } - } - - // If the local name is still unknown, guess it based on the import path. - if name == "" { - idx := strings.LastIndexByte(string(impPath), '/') - name = string(impPath[idx+1:]) - } - return name, pkgName, impPath, pkgPath -} - -// isDirective reports whether c is a comment directive. -// -// Copied and adapted from go/src/go/ast/ast.go. -func isDirective(c string) bool { - if len(c) < 3 { - return false - } - if c[1] != '/' { - return false - } - //-style comment (no newline at the end) - c = c[2:] - if len(c) == 0 { - // empty line - return false - } - // "//line " is a line directive. - // (The // has been removed.) - if strings.HasPrefix(c, "line ") { - return true - } - - // "//[a-z0-9]+:[a-z0-9]" - // (The // has been removed.) - colon := strings.Index(c, ":") - if colon <= 0 || colon+1 >= len(c) { - return false - } - for i := 0; i <= colon+1; i++ { - if i == colon { - continue - } - b := c[i] - if !('a' <= b && b <= 'z' || '0' <= b && b <= '9') { - return false - } - } - return true -} - -// embeddedIdent returns the type name identifier for an embedding x, if x in a -// valid embedding. Otherwise, it returns nil. -// -// Spec: An embedded field must be specified as a type name T or as a pointer -// to a non-interface type name *T -func embeddedIdent(x ast.Expr) *ast.Ident { - if star, ok := x.(*ast.StarExpr); ok { - x = star.X - } - switch ix := x.(type) { // check for instantiated receivers - case *ast.IndexExpr: - x = ix.X - case *ast.IndexListExpr: - x = ix.X - } - switch x := x.(type) { - case *ast.Ident: - return x - case *ast.SelectorExpr: - if _, ok := x.X.(*ast.Ident); ok { - return x.Sel - } - } - return nil -} - -// An importFunc is an implementation of the single-method -// types.Importer interface based on a function value. -type ImporterFunc func(path string) (*types.Package, error) - -func (f ImporterFunc) Import(path string) (*types.Package, error) { return f(path) } diff --git a/internal/golangorgx/gopls/golang/workspace_symbol.go b/internal/golangorgx/gopls/golang/workspace_symbol.go deleted file mode 100644 index b9aec1e7107..00000000000 --- a/internal/golangorgx/gopls/golang/workspace_symbol.go +++ /dev/null @@ -1,526 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package golang - -import ( - "context" - "fmt" - "path/filepath" - "runtime" - "sort" - "strings" - "unicode" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/cache/metadata" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/settings" - "cuelang.org/go/internal/golangorgx/tools/event" - "cuelang.org/go/internal/golangorgx/tools/fuzzy" -) - -// maxSymbols defines the maximum number of symbol results that should ever be -// sent in response to a client. -const maxSymbols = 100 - -// WorkspaceSymbols matches symbols across all views using the given query, -// according to the match semantics parameterized by matcherType and style. -// -// The workspace symbol method is defined in the spec as follows: -// -// The workspace symbol request is sent from the client to the server to -// list project-wide symbols matching the query string. -// -// It is unclear what "project-wide" means here, but given the parameters of -// workspace/symbol do not include any workspace identifier, then it has to be -// assumed that "project-wide" means "across all workspaces". Hence why -// WorkspaceSymbols receives the views []View. -// -// However, it then becomes unclear what it would mean to call WorkspaceSymbols -// with a different configured SymbolMatcher per View. Therefore we assume that -// Session level configuration will define the SymbolMatcher to be used for the -// WorkspaceSymbols method. -func WorkspaceSymbols(ctx context.Context, matcher settings.SymbolMatcher, style settings.SymbolStyle, snapshots []*cache.Snapshot, query string) ([]protocol.SymbolInformation, error) { - ctx, done := event.Start(ctx, "golang.WorkspaceSymbols") - defer done() - if query == "" { - return nil, nil - } - - var s symbolizer - switch style { - case settings.DynamicSymbols: - s = dynamicSymbolMatch - case settings.FullyQualifiedSymbols: - s = fullyQualifiedSymbolMatch - case settings.PackageQualifiedSymbols: - s = packageSymbolMatch - default: - panic(fmt.Errorf("unknown symbol style: %v", style)) - } - - return collectSymbols(ctx, snapshots, matcher, s, query) -} - -// A matcherFunc returns the index and score of a symbol match. -// -// See the comment for symbolCollector for more information. -type matcherFunc func(chunks []string) (int, float64) - -// A symbolizer returns the best symbol match for a name with pkg, according to -// some heuristic. The symbol name is passed as the slice nameParts of logical -// name pieces. For example, for myType.field the caller can pass either -// []string{"myType.field"} or []string{"myType.", "field"}. -// -// See the comment for symbolCollector for more information. -// -// The space argument is an empty slice with spare capacity that may be used -// to allocate the result. -type symbolizer func(space []string, name string, pkg *metadata.Package, m matcherFunc) ([]string, float64) - -func fullyQualifiedSymbolMatch(space []string, name string, pkg *metadata.Package, matcher matcherFunc) ([]string, float64) { - if _, score := dynamicSymbolMatch(space, name, pkg, matcher); score > 0 { - return append(space, string(pkg.PkgPath), ".", name), score - } - return nil, 0 -} - -func dynamicSymbolMatch(space []string, name string, pkg *metadata.Package, matcher matcherFunc) ([]string, float64) { - if metadata.IsCommandLineArguments(pkg.ID) { - // command-line-arguments packages have a non-sensical package path, so - // just use their package name. - return packageSymbolMatch(space, name, pkg, matcher) - } - - var score float64 - - endsInPkgName := strings.HasSuffix(string(pkg.PkgPath), string(pkg.Name)) - - // If the package path does not end in the package name, we need to check the - // package-qualified symbol as an extra pass first. - if !endsInPkgName { - pkgQualified := append(space, string(pkg.Name), ".", name) - idx, score := matcher(pkgQualified) - nameStart := len(pkg.Name) + 1 - if score > 0 { - // If our match is contained entirely within the unqualified portion, - // just return that. - if idx >= nameStart { - return append(space, name), score - } - // Lower the score for matches that include the package name. - return pkgQualified, score * 0.8 - } - } - - // Now try matching the fully qualified symbol. - fullyQualified := append(space, string(pkg.PkgPath), ".", name) - idx, score := matcher(fullyQualified) - - // As above, check if we matched just the unqualified symbol name. - nameStart := len(pkg.PkgPath) + 1 - if idx >= nameStart { - return append(space, name), score - } - - // If our package path ends in the package name, we'll have skipped the - // initial pass above, so check if we matched just the package-qualified - // name. - if endsInPkgName && idx >= 0 { - pkgStart := len(pkg.PkgPath) - len(pkg.Name) - if idx >= pkgStart { - return append(space, string(pkg.Name), ".", name), score - } - } - - // Our match was not contained within the unqualified or package qualified - // symbol. Return the fully qualified symbol but discount the score. - return fullyQualified, score * 0.6 -} - -func packageSymbolMatch(space []string, name string, pkg *metadata.Package, matcher matcherFunc) ([]string, float64) { - qualified := append(space, string(pkg.Name), ".", name) - if _, s := matcher(qualified); s > 0 { - return qualified, s - } - return nil, 0 -} - -func buildMatcher(matcher settings.SymbolMatcher, query string) matcherFunc { - switch matcher { - case settings.SymbolFuzzy: - return parseQuery(query, newFuzzyMatcher) - case settings.SymbolFastFuzzy: - return parseQuery(query, func(query string) matcherFunc { - return fuzzy.NewSymbolMatcher(query).Match - }) - case settings.SymbolCaseSensitive: - return matchExact(query) - case settings.SymbolCaseInsensitive: - q := strings.ToLower(query) - exact := matchExact(q) - wrapper := []string{""} - return func(chunks []string) (int, float64) { - s := strings.Join(chunks, "") - wrapper[0] = strings.ToLower(s) - return exact(wrapper) - } - } - panic(fmt.Errorf("unknown symbol matcher: %v", matcher)) -} - -func newFuzzyMatcher(query string) matcherFunc { - fm := fuzzy.NewMatcher(query) - return func(chunks []string) (int, float64) { - score := float64(fm.ScoreChunks(chunks)) - ranges := fm.MatchedRanges() - if len(ranges) > 0 { - return ranges[0], score - } - return -1, score - } -} - -// parseQuery parses a field-separated symbol query, extracting the special -// characters listed below, and returns a matcherFunc corresponding to the AND -// of all field queries. -// -// Special characters: -// -// ^ match exact prefix -// $ match exact suffix -// ' match exact -// -// In all three of these special queries, matches are 'smart-cased', meaning -// they are case sensitive if the symbol query contains any upper-case -// characters, and case insensitive otherwise. -func parseQuery(q string, newMatcher func(string) matcherFunc) matcherFunc { - fields := strings.Fields(q) - if len(fields) == 0 { - return func([]string) (int, float64) { return -1, 0 } - } - var funcs []matcherFunc - for _, field := range fields { - var f matcherFunc - switch { - case strings.HasPrefix(field, "^"): - prefix := field[1:] - f = smartCase(prefix, func(chunks []string) (int, float64) { - s := strings.Join(chunks, "") - if strings.HasPrefix(s, prefix) { - return 0, 1 - } - return -1, 0 - }) - case strings.HasPrefix(field, "'"): - exact := field[1:] - f = smartCase(exact, matchExact(exact)) - case strings.HasSuffix(field, "$"): - suffix := field[0 : len(field)-1] - f = smartCase(suffix, func(chunks []string) (int, float64) { - s := strings.Join(chunks, "") - if strings.HasSuffix(s, suffix) { - return len(s) - len(suffix), 1 - } - return -1, 0 - }) - default: - f = newMatcher(field) - } - funcs = append(funcs, f) - } - if len(funcs) == 1 { - return funcs[0] - } - return comboMatcher(funcs).match -} - -func matchExact(exact string) matcherFunc { - return func(chunks []string) (int, float64) { - s := strings.Join(chunks, "") - if idx := strings.LastIndex(s, exact); idx >= 0 { - return idx, 1 - } - return -1, 0 - } -} - -// smartCase returns a matcherFunc that is case-sensitive if q contains any -// upper-case characters, and case-insensitive otherwise. -func smartCase(q string, m matcherFunc) matcherFunc { - insensitive := strings.ToLower(q) == q - wrapper := []string{""} - return func(chunks []string) (int, float64) { - s := strings.Join(chunks, "") - if insensitive { - s = strings.ToLower(s) - } - wrapper[0] = s - return m(wrapper) - } -} - -type comboMatcher []matcherFunc - -func (c comboMatcher) match(chunks []string) (int, float64) { - score := 1.0 - first := 0 - for _, f := range c { - idx, s := f(chunks) - if idx < first { - first = idx - } - score *= s - } - return first, score -} - -// collectSymbols calls snapshot.Symbols to walk the syntax trees of -// all files in the views' current snapshots, and returns a sorted, -// scored list of symbols that best match the parameters. -// -// How it matches symbols is parameterized by two interfaces: -// - A matcherFunc determines how well a string symbol matches a query. It -// returns a non-negative score indicating the quality of the match. A score -// of zero indicates no match. -// - A symbolizer determines how we extract the symbol for an object. This -// enables the 'symbolStyle' configuration option. -func collectSymbols(ctx context.Context, snapshots []*cache.Snapshot, matcherType settings.SymbolMatcher, symbolizer symbolizer, query string) ([]protocol.SymbolInformation, error) { - // Extract symbols from all files. - var work []symbolFile - var roots []string - seen := make(map[protocol.DocumentURI]bool) - // TODO(adonovan): opt: parallelize this loop? How often is len > 1? - for _, snapshot := range snapshots { - // Use the root view URIs for determining (lexically) - // whether a URI is in any open workspace. - folderURI := snapshot.Folder() - roots = append(roots, strings.TrimRight(string(folderURI), "/")) - - filters := snapshot.Options().DirectoryFilters - filterer := cache.NewFilterer(filters) - folder := filepath.ToSlash(folderURI.Path()) - - workspaceOnly := true - if snapshot.Options().SymbolScope == settings.AllSymbolScope { - workspaceOnly = false - } - symbols, err := snapshot.Symbols(ctx, workspaceOnly) - if err != nil { - return nil, err - } - - for uri, syms := range symbols { - norm := filepath.ToSlash(uri.Path()) - nm := strings.TrimPrefix(norm, folder) - if filterer.Disallow(nm) { - continue - } - // Only scan each file once. - if seen[uri] { - continue - } - meta, err := NarrowestMetadataForFile(ctx, snapshot, uri) - if err != nil { - event.Error(ctx, fmt.Sprintf("missing metadata for %q", uri), err) - continue - } - seen[uri] = true - work = append(work, symbolFile{uri, meta, syms}) - } - } - - // Match symbols in parallel. - // Each worker has its own symbolStore, - // which we merge at the end. - nmatchers := runtime.GOMAXPROCS(-1) // matching is CPU bound - results := make(chan *symbolStore) - for i := 0; i < nmatchers; i++ { - go func(i int) { - matcher := buildMatcher(matcherType, query) - store := new(symbolStore) - // Assign files to workers in round-robin fashion. - for j := i; j < len(work); j += nmatchers { - matchFile(store, symbolizer, matcher, roots, work[j]) - } - results <- store - }(i) - } - - // Gather and merge results as they arrive. - var unified symbolStore - for i := 0; i < nmatchers; i++ { - store := <-results - for _, syms := range store.res { - unified.store(syms) - } - } - return unified.results(), nil -} - -// symbolFile holds symbol information for a single file. -type symbolFile struct { - uri protocol.DocumentURI - mp *metadata.Package - syms []cache.Symbol -} - -// matchFile scans a symbol file and adds matching symbols to the store. -func matchFile(store *symbolStore, symbolizer symbolizer, matcher matcherFunc, roots []string, i symbolFile) { - space := make([]string, 0, 3) - for _, sym := range i.syms { - symbolParts, score := symbolizer(space, sym.Name, i.mp, matcher) - - // Check if the score is too low before applying any downranking. - if store.tooLow(score) { - continue - } - - // Factors to apply to the match score for the purpose of downranking - // results. - // - // These numbers were crudely calibrated based on trial-and-error using a - // small number of sample queries. Adjust as necessary. - // - // All factors are multiplicative, meaning if more than one applies they are - // multiplied together. - const ( - // nonWorkspaceFactor is applied to symbols outside the workspace. - // Developers are less likely to want to jump to code that they - // are not actively working on. - nonWorkspaceFactor = 0.5 - // nonWorkspaceUnexportedFactor is applied to unexported symbols outside - // the workspace. Since one wouldn't usually jump to unexported - // symbols to understand a package API, they are particularly irrelevant. - nonWorkspaceUnexportedFactor = 0.5 - // every field or method nesting level to access the field decreases - // the score by a factor of 1.0 - depth*depthFactor, up to a depth of - // 3. - // - // Use a small constant here, as this exists mostly to break ties - // (e.g. given a type Foo and a field x.Foo, prefer Foo). - depthFactor = 0.01 - ) - - startWord := true - exported := true - depth := 0.0 - for _, r := range sym.Name { - if startWord && !unicode.IsUpper(r) { - exported = false - } - if r == '.' { - startWord = true - depth++ - } else { - startWord = false - } - } - - // TODO(rfindley): use metadata to determine if the file is in a workspace - // package, rather than this heuristic. - inWorkspace := false - for _, root := range roots { - if strings.HasPrefix(string(i.uri), root) { - inWorkspace = true - break - } - } - - // Apply downranking based on workspace position. - if !inWorkspace { - score *= nonWorkspaceFactor - if !exported { - score *= nonWorkspaceUnexportedFactor - } - } - - // Apply downranking based on symbol depth. - if depth > 3 { - depth = 3 - } - score *= 1.0 - depth*depthFactor - - if store.tooLow(score) { - continue - } - - si := symbolInformation{ - score: score, - symbol: strings.Join(symbolParts, ""), - kind: sym.Kind, - uri: i.uri, - rng: sym.Range, - container: string(i.mp.PkgPath), - } - store.store(si) - } -} - -type symbolStore struct { - res [maxSymbols]symbolInformation -} - -// store inserts si into the sorted results, if si has a high enough score. -func (sc *symbolStore) store(si symbolInformation) { - if sc.tooLow(si.score) { - return - } - insertAt := sort.Search(len(sc.res), func(i int) bool { - // Sort by score, then symbol length, and finally lexically. - if sc.res[i].score != si.score { - return sc.res[i].score < si.score - } - if len(sc.res[i].symbol) != len(si.symbol) { - return len(sc.res[i].symbol) > len(si.symbol) - } - return sc.res[i].symbol > si.symbol - }) - if insertAt < len(sc.res)-1 { - copy(sc.res[insertAt+1:], sc.res[insertAt:len(sc.res)-1]) - } - sc.res[insertAt] = si -} - -func (sc *symbolStore) tooLow(score float64) bool { - return score <= sc.res[len(sc.res)-1].score -} - -func (sc *symbolStore) results() []protocol.SymbolInformation { - var res []protocol.SymbolInformation - for _, si := range sc.res { - if si.score <= 0 { - return res - } - res = append(res, si.asProtocolSymbolInformation()) - } - return res -} - -// symbolInformation is a cut-down version of protocol.SymbolInformation that -// allows struct values of this type to be used as map keys. -type symbolInformation struct { - score float64 - symbol string - container string - kind protocol.SymbolKind - uri protocol.DocumentURI - rng protocol.Range -} - -// asProtocolSymbolInformation converts s to a protocol.SymbolInformation value. -// -// TODO: work out how to handle tags if/when they are needed. -func (s symbolInformation) asProtocolSymbolInformation() protocol.SymbolInformation { - return protocol.SymbolInformation{ - Name: s.symbol, - Kind: s.kind, - Location: protocol.Location{ - URI: s.uri, - Range: s.rng, - }, - ContainerName: s.container, - } -} diff --git a/internal/golangorgx/gopls/mod/code_lens.go b/internal/golangorgx/gopls/mod/code_lens.go deleted file mode 100644 index ba3a279088f..00000000000 --- a/internal/golangorgx/gopls/mod/code_lens.go +++ /dev/null @@ -1,166 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package mod - -import ( - "context" - "fmt" - "os" - "path/filepath" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/golang" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/protocol/command" - "golang.org/x/mod/modfile" -) - -// LensFuncs returns the supported lensFuncs for go.mod files. -func LensFuncs() map[command.Command]golang.LensFunc { - return map[command.Command]golang.LensFunc{ - command.UpgradeDependency: upgradeLenses, - command.Tidy: tidyLens, - command.Vendor: vendorLens, - } -} - -func upgradeLenses(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.CodeLens, error) { - pm, err := snapshot.ParseMod(ctx, fh) - if err != nil || pm.File == nil { - return nil, err - } - uri := fh.URI() - reset, err := command.NewResetGoModDiagnosticsCommand("Reset go.mod diagnostics", command.ResetGoModDiagnosticsArgs{URIArg: command.URIArg{URI: uri}}) - if err != nil { - return nil, err - } - // Put the `Reset go.mod diagnostics` codelens on the module statement. - modrng, err := moduleStmtRange(fh, pm) - if err != nil { - return nil, err - } - lenses := []protocol.CodeLens{{Range: modrng, Command: &reset}} - if len(pm.File.Require) == 0 { - // Nothing to upgrade. - return lenses, nil - } - var requires []string - for _, req := range pm.File.Require { - requires = append(requires, req.Mod.Path) - } - checkUpgrade, err := command.NewCheckUpgradesCommand("Check for upgrades", command.CheckUpgradesArgs{ - URI: uri, - Modules: requires, - }) - if err != nil { - return nil, err - } - upgradeTransitive, err := command.NewUpgradeDependencyCommand("Upgrade transitive dependencies", command.DependencyArgs{ - URI: uri, - AddRequire: false, - GoCmdArgs: []string{"-d", "-u", "-t", "./..."}, - }) - if err != nil { - return nil, err - } - upgradeDirect, err := command.NewUpgradeDependencyCommand("Upgrade direct dependencies", command.DependencyArgs{ - URI: uri, - AddRequire: false, - GoCmdArgs: append([]string{"-d"}, requires...), - }) - if err != nil { - return nil, err - } - - // Put the upgrade code lenses above the first require block or statement. - rng, err := firstRequireRange(fh, pm) - if err != nil { - return nil, err - } - - return append(lenses, []protocol.CodeLens{ - {Range: rng, Command: &checkUpgrade}, - {Range: rng, Command: &upgradeTransitive}, - {Range: rng, Command: &upgradeDirect}, - }...), nil -} - -func tidyLens(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.CodeLens, error) { - pm, err := snapshot.ParseMod(ctx, fh) - if err != nil || pm.File == nil { - return nil, err - } - uri := fh.URI() - cmd, err := command.NewTidyCommand("Run go mod tidy", command.URIArgs{URIs: []protocol.DocumentURI{uri}}) - if err != nil { - return nil, err - } - rng, err := moduleStmtRange(fh, pm) - if err != nil { - return nil, err - } - return []protocol.CodeLens{{ - Range: rng, - Command: &cmd, - }}, nil -} - -func vendorLens(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.CodeLens, error) { - pm, err := snapshot.ParseMod(ctx, fh) - if err != nil || pm.File == nil { - return nil, err - } - if len(pm.File.Require) == 0 { - // Nothing to vendor. - return nil, nil - } - rng, err := moduleStmtRange(fh, pm) - if err != nil { - return nil, err - } - title := "Create vendor directory" - uri := fh.URI() - cmd, err := command.NewVendorCommand(title, command.URIArg{URI: uri}) - if err != nil { - return nil, err - } - // Change the message depending on whether or not the module already has a - // vendor directory. - vendorDir := filepath.Join(filepath.Dir(fh.URI().Path()), "vendor") - if info, _ := os.Stat(vendorDir); info != nil && info.IsDir() { - title = "Sync vendor directory" - } - return []protocol.CodeLens{{Range: rng, Command: &cmd}}, nil -} - -func moduleStmtRange(fh file.Handle, pm *cache.ParsedModule) (protocol.Range, error) { - if pm.File == nil || pm.File.Module == nil || pm.File.Module.Syntax == nil { - return protocol.Range{}, fmt.Errorf("no module statement in %s", fh.URI()) - } - syntax := pm.File.Module.Syntax - return pm.Mapper.OffsetRange(syntax.Start.Byte, syntax.End.Byte) -} - -// firstRequireRange returns the range for the first "require" in the given -// go.mod file. This is either a require block or an individual require line. -func firstRequireRange(fh file.Handle, pm *cache.ParsedModule) (protocol.Range, error) { - if len(pm.File.Require) == 0 { - return protocol.Range{}, fmt.Errorf("no requires in the file %s", fh.URI()) - } - var start, end modfile.Position - for _, stmt := range pm.File.Syntax.Stmt { - if b, ok := stmt.(*modfile.LineBlock); ok && len(b.Token) == 1 && b.Token[0] == "require" { - start, end = b.Span() - break - } - } - - firstRequire := pm.File.Require[0].Syntax - if start.Byte == 0 || firstRequire.Start.Byte < start.Byte { - start, end = firstRequire.Start, firstRequire.End - } - return pm.Mapper.OffsetRange(start.Byte, end.Byte) -} diff --git a/internal/golangorgx/gopls/mod/diagnostics.go b/internal/golangorgx/gopls/mod/diagnostics.go deleted file mode 100644 index 3a58221c0d4..00000000000 --- a/internal/golangorgx/gopls/mod/diagnostics.go +++ /dev/null @@ -1,245 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package mod provides core features related to go.mod file -// handling for use by Go editors and tools. -package mod - -import ( - "context" - "fmt" - "runtime" - "sort" - "strings" - "sync" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/protocol/command" - "cuelang.org/go/internal/golangorgx/tools/event" - "golang.org/x/mod/modfile" - "golang.org/x/mod/semver" - "golang.org/x/sync/errgroup" -) - -// ParseDiagnostics returns diagnostics from parsing the go.mod files in the workspace. -func ParseDiagnostics(ctx context.Context, snapshot *cache.Snapshot) (map[protocol.DocumentURI][]*cache.Diagnostic, error) { - ctx, done := event.Start(ctx, "mod.Diagnostics", snapshot.Labels()...) - defer done() - - return collectDiagnostics(ctx, snapshot, ModParseDiagnostics) -} - -// Diagnostics returns diagnostics from running go mod tidy. -func TidyDiagnostics(ctx context.Context, snapshot *cache.Snapshot) (map[protocol.DocumentURI][]*cache.Diagnostic, error) { - ctx, done := event.Start(ctx, "mod.Diagnostics", snapshot.Labels()...) - defer done() - - return collectDiagnostics(ctx, snapshot, ModTidyDiagnostics) -} - -// UpgradeDiagnostics returns upgrade diagnostics for the modules in the -// workspace with known upgrades. -func UpgradeDiagnostics(ctx context.Context, snapshot *cache.Snapshot) (map[protocol.DocumentURI][]*cache.Diagnostic, error) { - ctx, done := event.Start(ctx, "mod.UpgradeDiagnostics", snapshot.Labels()...) - defer done() - - return collectDiagnostics(ctx, snapshot, ModUpgradeDiagnostics) -} - -func collectDiagnostics(ctx context.Context, snapshot *cache.Snapshot, diagFn func(context.Context, *cache.Snapshot, file.Handle) ([]*cache.Diagnostic, error)) (map[protocol.DocumentURI][]*cache.Diagnostic, error) { - g, ctx := errgroup.WithContext(ctx) - cpulimit := runtime.GOMAXPROCS(0) - g.SetLimit(cpulimit) - - var mu sync.Mutex - reports := make(map[protocol.DocumentURI][]*cache.Diagnostic) - - for _, uri := range snapshot.View().ModFiles() { - uri := uri - g.Go(func() error { - fh, err := snapshot.ReadFile(ctx, uri) - if err != nil { - return err - } - diagnostics, err := diagFn(ctx, snapshot, fh) - if err != nil { - return err - } - for _, d := range diagnostics { - mu.Lock() - reports[d.URI] = append(reports[fh.URI()], d) - mu.Unlock() - } - return nil - }) - } - - if err := g.Wait(); err != nil { - return nil, err - } - return reports, nil -} - -// ModParseDiagnostics reports diagnostics from parsing the mod file. -func ModParseDiagnostics(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) (diagnostics []*cache.Diagnostic, err error) { - pm, err := snapshot.ParseMod(ctx, fh) - if err != nil { - if pm == nil || len(pm.ParseErrors) == 0 { - return nil, err - } - return pm.ParseErrors, nil - } - return nil, nil -} - -// ModTidyDiagnostics reports diagnostics from running go mod tidy. -func ModTidyDiagnostics(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]*cache.Diagnostic, error) { - pm, err := snapshot.ParseMod(ctx, fh) // memoized - if err != nil { - return nil, nil // errors reported by ModDiagnostics above - } - - tidied, err := snapshot.ModTidy(ctx, pm) - if err != nil { - if err != cache.ErrNoModOnDisk { - // TODO(rfindley): the check for ErrNoModOnDisk was historically determined - // to be benign, but may date back to the time when the Go command did not - // have overlay support. - // - // See if we can pass the overlay to the Go command, and eliminate this guard.. - event.Error(ctx, fmt.Sprintf("tidy: diagnosing %s", pm.URI), err) - } - return nil, nil - } - return tidied.Diagnostics, nil -} - -// ModUpgradeDiagnostics adds upgrade quick fixes for individual modules if the upgrades -// are recorded in the view. -func ModUpgradeDiagnostics(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) (upgradeDiagnostics []*cache.Diagnostic, err error) { - pm, err := snapshot.ParseMod(ctx, fh) - if err != nil { - // Don't return an error if there are parse error diagnostics to be shown, but also do not - // continue since we won't be able to show the upgrade diagnostics. - if pm != nil && len(pm.ParseErrors) != 0 { - return nil, nil - } - return nil, err - } - - upgrades := snapshot.ModuleUpgrades(fh.URI()) - for _, req := range pm.File.Require { - ver, ok := upgrades[req.Mod.Path] - if !ok || req.Mod.Version == ver { - continue - } - rng, err := pm.Mapper.OffsetRange(req.Syntax.Start.Byte, req.Syntax.End.Byte) - if err != nil { - return nil, err - } - // Upgrade to the exact version we offer the user, not the most recent. - title := fmt.Sprintf("%s%v", upgradeCodeActionPrefix, ver) - cmd, err := command.NewUpgradeDependencyCommand(title, command.DependencyArgs{ - URI: fh.URI(), - AddRequire: false, - GoCmdArgs: []string{req.Mod.Path + "@" + ver}, - }) - if err != nil { - return nil, err - } - upgradeDiagnostics = append(upgradeDiagnostics, &cache.Diagnostic{ - URI: fh.URI(), - Range: rng, - Severity: protocol.SeverityInformation, - Source: cache.UpgradeNotification, - Message: fmt.Sprintf("%v can be upgraded", req.Mod.Path), - SuggestedFixes: []cache.SuggestedFix{cache.SuggestedFixFromCommand(cmd, protocol.QuickFix)}, - }) - } - - return upgradeDiagnostics, nil -} - -const upgradeCodeActionPrefix = "Upgrade to " - -func sortedKeys(m map[string]bool) []string { - ret := make([]string, 0, len(m)) - for k := range m { - ret = append(ret, k) - } - sort.Strings(ret) - return ret -} - -// href returns the url for the vulnerability information. -// Eventually we should retrieve the url embedded in the osv.Entry. -// While vuln.go.dev is under development, this always returns -// the page in pkg.go.dev. -func href(vulnID string) string { - return fmt.Sprintf("https://pkg.go.dev/vuln/%s", vulnID) -} - -func getUpgradeCodeAction(fh file.Handle, req *modfile.Require, version string) (protocol.Command, error) { - cmd, err := command.NewUpgradeDependencyCommand(upgradeTitle(version), command.DependencyArgs{ - URI: fh.URI(), - AddRequire: false, - GoCmdArgs: []string{req.Mod.Path + "@" + version}, - }) - if err != nil { - return protocol.Command{}, err - } - return cmd, nil -} - -func upgradeTitle(fixedVersion string) string { - title := fmt.Sprintf("%s%v", upgradeCodeActionPrefix, fixedVersion) - return title -} - -// SelectUpgradeCodeActions takes a list of code actions for a required module -// and returns a more selective list of upgrade code actions, -// where the code actions have been deduped. Code actions unrelated to upgrade -// are deduplicated by the name. -func SelectUpgradeCodeActions(actions []protocol.CodeAction) []protocol.CodeAction { - if len(actions) <= 1 { - return actions // return early if no sorting necessary - } - var versionedUpgrade, latestUpgrade, resetAction protocol.CodeAction - var chosenVersionedUpgrade string - var selected []protocol.CodeAction - - seenTitles := make(map[string]bool) - - for _, action := range actions { - if strings.HasPrefix(action.Title, upgradeCodeActionPrefix) { - if v := getUpgradeVersion(action); v == "latest" && latestUpgrade.Title == "" { - latestUpgrade = action - } else if versionedUpgrade.Title == "" || semver.Compare(v, chosenVersionedUpgrade) > 0 { - chosenVersionedUpgrade = v - versionedUpgrade = action - } - } else if strings.HasPrefix(action.Title, "Reset govulncheck") { - resetAction = action - } else if !seenTitles[action.Command.Title] { - seenTitles[action.Command.Title] = true - selected = append(selected, action) - } - } - if versionedUpgrade.Title != "" { - selected = append(selected, versionedUpgrade) - } - if latestUpgrade.Title != "" { - selected = append(selected, latestUpgrade) - } - if resetAction.Title != "" { - selected = append(selected, resetAction) - } - return selected -} - -func getUpgradeVersion(p protocol.CodeAction) string { - return strings.TrimPrefix(p.Title, upgradeCodeActionPrefix) -} diff --git a/internal/golangorgx/gopls/mod/format.go b/internal/golangorgx/gopls/mod/format.go deleted file mode 100644 index c7362bed339..00000000000 --- a/internal/golangorgx/gopls/mod/format.go +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package mod - -import ( - "context" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/tools/diff" - "cuelang.org/go/internal/golangorgx/tools/event" -) - -func Format(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.TextEdit, error) { - ctx, done := event.Start(ctx, "mod.Format") - defer done() - - pm, err := snapshot.ParseMod(ctx, fh) - if err != nil { - return nil, err - } - formatted, err := pm.File.Format() - if err != nil { - return nil, err - } - // Calculate the edits to be made due to the change. - diffs := diff.Bytes(pm.Mapper.Content, formatted) - return protocol.EditsFromDiffEdits(pm.Mapper, diffs) -} diff --git a/internal/golangorgx/gopls/mod/hover.go b/internal/golangorgx/gopls/mod/hover.go deleted file mode 100644 index db6fcf9ff9a..00000000000 --- a/internal/golangorgx/gopls/mod/hover.go +++ /dev/null @@ -1,177 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package mod - -import ( - "bytes" - "context" - "fmt" - "strings" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/settings" - "cuelang.org/go/internal/golangorgx/tools/event" - "golang.org/x/mod/modfile" -) - -func Hover(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, position protocol.Position) (*protocol.Hover, error) { - var found bool - for _, uri := range snapshot.View().ModFiles() { - if fh.URI() == uri { - found = true - break - } - } - - // We only provide hover information for the view's go.mod files. - if !found { - return nil, nil - } - - ctx, done := event.Start(ctx, "mod.Hover") - defer done() - - // Get the position of the cursor. - pm, err := snapshot.ParseMod(ctx, fh) - if err != nil { - return nil, fmt.Errorf("getting modfile handle: %w", err) - } - offset, err := pm.Mapper.PositionOffset(position) - if err != nil { - return nil, fmt.Errorf("computing cursor position: %w", err) - } - - return hoverOnRequireStatement(ctx, pm, offset, snapshot, fh) -} - -func hoverOnRequireStatement(ctx context.Context, pm *cache.ParsedModule, offset int, snapshot *cache.Snapshot, fh file.Handle) (*protocol.Hover, error) { - // Confirm that the cursor is at the position of a require statement. - var req *modfile.Require - var startOffset, endOffset int - for _, r := range pm.File.Require { - dep := []byte(r.Mod.Path) - s, e := r.Syntax.Start.Byte, r.Syntax.End.Byte - i := bytes.Index(pm.Mapper.Content[s:e], dep) - if i == -1 { - continue - } - // Shift the start position to the location of the - // dependency within the require statement. - startOffset, endOffset = s+i, e - if startOffset <= offset && offset <= endOffset { - req = r - break - } - } - // TODO(hyangah): find position for info about vulnerabilities in Go - - // The cursor position is not on a require statement. - if req == nil { - return nil, nil - } - - // Get the `go mod why` results for the given file. - why, err := snapshot.ModWhy(ctx, fh) - if err != nil { - return nil, err - } - explanation, ok := why[req.Mod.Path] - if !ok { - return nil, nil - } - - // Get the range to highlight for the hover. - // TODO(hyangah): adjust the hover range to include the version number - // to match the diagnostics' range. - rng, err := pm.Mapper.OffsetRange(startOffset, endOffset) - if err != nil { - return nil, err - } - options := snapshot.Options() - isPrivate := snapshot.IsGoPrivatePath(req.Mod.Path) - header := formatHeader(req.Mod.Path, options) - explanation = formatExplanation(explanation, req, options, isPrivate) - - return &protocol.Hover{ - Contents: protocol.MarkupContent{ - Kind: options.PreferredContentFormat, - Value: header + explanation, - }, - Range: rng, - }, nil -} - -func formatHeader(modpath string, options *settings.Options) string { - var b strings.Builder - // Write the heading as an H3. - b.WriteString("#### " + modpath) - if options.PreferredContentFormat == protocol.Markdown { - b.WriteString("\n\n") - } else { - b.WriteRune('\n') - } - return b.String() -} - -func fixedVersion(fixed string) string { - if fixed == "" { - return "No fix is available." - } - return "Fixed in " + fixed + "." -} - -func formatExplanation(text string, req *modfile.Require, options *settings.Options, isPrivate bool) string { - text = strings.TrimSuffix(text, "\n") - splt := strings.Split(text, "\n") - length := len(splt) - - var b strings.Builder - - // If the explanation is 2 lines, then it is of the form: - // # golang.org/x/text/encoding - // (main module does not need package golang.org/x/text/encoding) - if length == 2 { - b.WriteString(splt[1]) - return b.String() - } - - imp := splt[length-1] // import path - reference := imp - // See golang/go#36998: don't link to modules matching GOPRIVATE. - if !isPrivate && options.PreferredContentFormat == protocol.Markdown { - target := imp - if strings.ToLower(options.LinkTarget) == "pkg.go.dev" { - target = strings.Replace(target, req.Mod.Path, req.Mod.String(), 1) - } - reference = fmt.Sprintf("[%s](%s)", imp, cache.BuildLink(options.LinkTarget, target, "")) - } - b.WriteString("This module is necessary because " + reference + " is imported in") - - // If the explanation is 3 lines, then it is of the form: - // # golang.org/x/tools - // modtest - // golang.org/x/tools/go/packages - if length == 3 { - msg := fmt.Sprintf(" `%s`.", splt[1]) - b.WriteString(msg) - return b.String() - } - - // If the explanation is more than 3 lines, then it is of the form: - // # golang.org/x/text/language - // rsc.io/quote - // rsc.io/sampler - // golang.org/x/text/language - b.WriteString(":\n```text") - dash := "" - for _, imp := range splt[1 : length-1] { - dash += "-" - b.WriteString("\n" + dash + " " + imp) - } - b.WriteString("\n```") - return b.String() -} diff --git a/internal/golangorgx/gopls/mod/inlayhint.go b/internal/golangorgx/gopls/mod/inlayhint.go deleted file mode 100644 index 214b70d74ab..00000000000 --- a/internal/golangorgx/gopls/mod/inlayhint.go +++ /dev/null @@ -1,104 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. -package mod - -import ( - "context" - "fmt" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "golang.org/x/mod/modfile" -) - -func InlayHint(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, _ protocol.Range) ([]protocol.InlayHint, error) { - // Inlay hints are enabled if the client supports them. - pm, err := snapshot.ParseMod(ctx, fh) - if err != nil { - return nil, err - } - - // Compare the version of the module used in the snapshot's - // metadata (i.e. the solution to the MVS constraints computed - // by go list) with the version requested by the module, in - // both cases, taking replaces into account. Produce an - // InlayHint when the version of the module is not the one - // used. - - replaces := make(map[string]*modfile.Replace) - for _, x := range pm.File.Replace { - replaces[x.Old.Path] = x - } - - requires := make(map[string]*modfile.Require) - for _, x := range pm.File.Require { - requires[x.Mod.Path] = x - } - - am, err := snapshot.AllMetadata(ctx) - if err != nil { - return nil, err - } - - var ans []protocol.InlayHint - seen := make(map[string]bool) - for _, meta := range am { - if meta.Module == nil || seen[meta.Module.Path] { - continue - } - seen[meta.Module.Path] = true - metaVersion := meta.Module.Version - if meta.Module.Replace != nil { - metaVersion = meta.Module.Replace.Version - } - // These versions can be blank, as in gopls/go.mod's local replace - if oldrepl, ok := replaces[meta.Module.Path]; ok && oldrepl.New.Version != metaVersion { - ih := genHint(oldrepl.Syntax, oldrepl.New.Version, metaVersion, pm.Mapper) - if ih != nil { - ans = append(ans, *ih) - } - } else if oldreq, ok := requires[meta.Module.Path]; ok && oldreq.Mod.Version != metaVersion { - // maybe it was replaced: - if _, ok := replaces[meta.Module.Path]; ok { - continue - } - ih := genHint(oldreq.Syntax, oldreq.Mod.Version, metaVersion, pm.Mapper) - if ih != nil { - ans = append(ans, *ih) - } - } - } - return ans, nil -} - -func genHint(mline *modfile.Line, oldVersion, newVersion string, m *protocol.Mapper) *protocol.InlayHint { - x := mline.End.Byte // the parser has removed trailing whitespace and comments (see modfile_test.go) - x -= len(mline.Token[len(mline.Token)-1]) - line, err := m.OffsetPosition(x) - if err != nil { - return nil - } - part := protocol.InlayHintLabelPart{ - Value: newVersion, - Tooltip: &protocol.OrPTooltipPLabel{ - Value: fmt.Sprintf("The build selects version %s rather than go.mod's version %s.", newVersion, oldVersion), - }, - } - rng, err := m.OffsetRange(x, mline.End.Byte) - if err != nil { - return nil - } - te := protocol.TextEdit{ - Range: rng, - NewText: newVersion, - } - return &protocol.InlayHint{ - Position: line, - Label: []protocol.InlayHintLabelPart{part}, - Kind: protocol.Parameter, - PaddingRight: true, - TextEdits: []protocol.TextEdit{te}, - } -} diff --git a/internal/golangorgx/gopls/server/call_hierarchy.go b/internal/golangorgx/gopls/server/call_hierarchy.go deleted file mode 100644 index 2d97008bfa9..00000000000 --- a/internal/golangorgx/gopls/server/call_hierarchy.go +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package server - -import ( - "context" - - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/golang" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/tools/event" -) - -func (s *server) PrepareCallHierarchy(ctx context.Context, params *protocol.CallHierarchyPrepareParams) ([]protocol.CallHierarchyItem, error) { - ctx, done := event.Start(ctx, "lsp.Server.prepareCallHierarchy") - defer done() - - fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) - if err != nil { - return nil, err - } - defer release() - if snapshot.FileKind(fh) != file.Go { - return nil, nil // empty result - } - return golang.PrepareCallHierarchy(ctx, snapshot, fh, params.Position) -} - -func (s *server) IncomingCalls(ctx context.Context, params *protocol.CallHierarchyIncomingCallsParams) ([]protocol.CallHierarchyIncomingCall, error) { - ctx, done := event.Start(ctx, "lsp.Server.incomingCalls") - defer done() - - fh, snapshot, release, err := s.fileOf(ctx, params.Item.URI) - if err != nil { - return nil, err - } - defer release() - if snapshot.FileKind(fh) != file.Go { - return nil, nil // empty result - } - return golang.IncomingCalls(ctx, snapshot, fh, params.Item.Range.Start) -} - -func (s *server) OutgoingCalls(ctx context.Context, params *protocol.CallHierarchyOutgoingCallsParams) ([]protocol.CallHierarchyOutgoingCall, error) { - ctx, done := event.Start(ctx, "lsp.Server.outgoingCalls") - defer done() - - fh, snapshot, release, err := s.fileOf(ctx, params.Item.URI) - if err != nil { - return nil, err - } - defer release() - if snapshot.FileKind(fh) != file.Go { - return nil, nil // empty result - } - return golang.OutgoingCalls(ctx, snapshot, fh, params.Item.Range.Start) -} diff --git a/internal/golangorgx/gopls/server/code_action.go b/internal/golangorgx/gopls/server/code_action.go index 4279edc2b7c..d7b6d0303f5 100644 --- a/internal/golangorgx/gopls/server/code_action.go +++ b/internal/golangorgx/gopls/server/code_action.go @@ -5,269 +5,10 @@ package server import ( - "context" - "fmt" - "sort" - "strings" - - "cuelang.org/go/internal/golangorgx/gopls/cache" "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/golang" - "cuelang.org/go/internal/golangorgx/gopls/mod" "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/protocol/command" - "cuelang.org/go/internal/golangorgx/tools/event" ) -func (s *server) CodeAction(ctx context.Context, params *protocol.CodeActionParams) ([]protocol.CodeAction, error) { - ctx, done := event.Start(ctx, "lsp.Server.codeAction") - defer done() - - fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) - if err != nil { - return nil, err - } - defer release() - uri := fh.URI() - - // Determine the supported actions for this file kind. - kind := snapshot.FileKind(fh) - supportedCodeActions, ok := snapshot.Options().SupportedCodeActions[kind] - if !ok { - return nil, fmt.Errorf("no supported code actions for %v file kind", kind) - } - if len(supportedCodeActions) == 0 { - return nil, nil // not an error if there are none supported - } - - // The Only field of the context specifies which code actions the client wants. - // If Only is empty, assume that the client wants all of the non-explicit code actions. - var want map[protocol.CodeActionKind]bool - { - // Explicit Code Actions are opt-in and shouldn't be returned to the client unless - // requested using Only. - // TODO: Add other CodeLenses such as GoGenerate, RegenerateCgo, etc.. - explicit := map[protocol.CodeActionKind]bool{ - protocol.GoTest: true, - } - - if len(params.Context.Only) == 0 { - want = supportedCodeActions - } else { - want = make(map[protocol.CodeActionKind]bool) - for _, only := range params.Context.Only { - for k, v := range supportedCodeActions { - if only == k || strings.HasPrefix(string(k), string(only)+".") { - want[k] = want[k] || v - } - } - want[only] = want[only] || explicit[only] - } - } - } - if len(want) == 0 { - return nil, fmt.Errorf("no supported code action to execute for %s, wanted %v", uri, params.Context.Only) - } - - switch kind { - case file.Mod: - var actions []protocol.CodeAction - - fixes, err := s.codeActionsMatchingDiagnostics(ctx, fh.URI(), snapshot, params.Context.Diagnostics, want) - if err != nil { - return nil, err - } - - // Group vulnerability fixes by their range, and select only the most - // appropriate upgrades. - // - // TODO(rfindley): can this instead be accomplished on the diagnosis side, - // so that code action handling remains uniform? - vulnFixes := make(map[protocol.Range][]protocol.CodeAction) - searchFixes: - for _, fix := range fixes { - for _, diag := range fix.Diagnostics { - if diag.Source == string(cache.Govulncheck) || diag.Source == string(cache.Vulncheck) { - vulnFixes[diag.Range] = append(vulnFixes[diag.Range], fix) - continue searchFixes - } - } - actions = append(actions, fix) - } - - for _, fixes := range vulnFixes { - fixes = mod.SelectUpgradeCodeActions(fixes) - actions = append(actions, fixes...) - } - - return actions, nil - - case file.Go: - // Don't suggest fixes for generated files, since they are generally - // not useful and some editors may apply them automatically on save. - if golang.IsGenerated(ctx, snapshot, uri) { - return nil, nil - } - - actions, err := s.codeActionsMatchingDiagnostics(ctx, uri, snapshot, params.Context.Diagnostics, want) - if err != nil { - return nil, err - } - - moreActions, err := golang.CodeActions(ctx, snapshot, fh, params.Range, params.Context.Diagnostics, want) - if err != nil { - return nil, err - } - actions = append(actions, moreActions...) - - return actions, nil - - default: - // Unsupported file kind for a code action. - return nil, nil - } -} - -// ResolveCodeAction resolves missing Edit information (that is, computes the -// details of the necessary patch) in the given code action using the provided -// Data field of the CodeAction, which should contain the raw json of a protocol.Command. -// -// This should be called by the client before applying code actions, when the -// client has code action resolve support. -// -// This feature allows capable clients to preview and selectively apply the diff -// instead of applying the whole thing unconditionally through workspace/applyEdit. -func (s *server) ResolveCodeAction(ctx context.Context, ca *protocol.CodeAction) (*protocol.CodeAction, error) { - ctx, done := event.Start(ctx, "lsp.Server.resolveCodeAction") - defer done() - - // Only resolve the code action if there is Data provided. - var cmd protocol.Command - if ca.Data != nil { - if err := protocol.UnmarshalJSON(*ca.Data, &cmd); err != nil { - return nil, err - } - } - if cmd.Command != "" { - params := &protocol.ExecuteCommandParams{ - Command: cmd.Command, - Arguments: cmd.Arguments, - } - - handler := &commandHandler{ - s: s, - params: params, - } - edit, err := command.Dispatch(ctx, params, handler) - if err != nil { - - return nil, err - } - var ok bool - if ca.Edit, ok = edit.(*protocol.WorkspaceEdit); !ok { - return nil, fmt.Errorf("unable to resolve code action %q", ca.Title) - } - } - return ca, nil -} - -// codeActionsMatchingDiagnostics fetches code actions for the provided -// diagnostics, by first attempting to unmarshal code actions directly from the -// bundled protocol.Diagnostic.Data field, and failing that by falling back on -// fetching a matching Diagnostic from the set of stored diagnostics for -// this file. -func (s *server) codeActionsMatchingDiagnostics(ctx context.Context, uri protocol.DocumentURI, snapshot *cache.Snapshot, pds []protocol.Diagnostic, want map[protocol.CodeActionKind]bool) ([]protocol.CodeAction, error) { - var actions []protocol.CodeAction - var unbundled []protocol.Diagnostic // diagnostics without bundled code actions in their Data field - for _, pd := range pds { - bundled := cache.BundledQuickFixes(pd) - if len(bundled) > 0 { - for _, fix := range bundled { - if want[fix.Kind] { - actions = append(actions, fix) - } - } - } else { - // No bundled actions: keep searching for a match. - unbundled = append(unbundled, pd) - } - } - - for _, pd := range unbundled { - for _, sd := range s.findMatchingDiagnostics(uri, pd) { - diagActions, err := codeActionsForDiagnostic(ctx, snapshot, sd, &pd, want) - if err != nil { - return nil, err - } - actions = append(actions, diagActions...) - } - } - return actions, nil -} - -func codeActionsForDiagnostic(ctx context.Context, snapshot *cache.Snapshot, sd *cache.Diagnostic, pd *protocol.Diagnostic, want map[protocol.CodeActionKind]bool) ([]protocol.CodeAction, error) { - var actions []protocol.CodeAction - for _, fix := range sd.SuggestedFixes { - if !want[fix.ActionKind] { - continue - } - changes := []protocol.DocumentChanges{} // must be a slice - for uri, edits := range fix.Edits { - fh, err := snapshot.ReadFile(ctx, uri) - if err != nil { - return nil, err - } - changes = append(changes, documentChanges(fh, edits)...) - } - actions = append(actions, protocol.CodeAction{ - Title: fix.Title, - Kind: fix.ActionKind, - Edit: &protocol.WorkspaceEdit{ - DocumentChanges: changes, - }, - Command: fix.Command, - Diagnostics: []protocol.Diagnostic{*pd}, - }) - } - return actions, nil -} - -func (s *server) findMatchingDiagnostics(uri protocol.DocumentURI, pd protocol.Diagnostic) []*cache.Diagnostic { - s.diagnosticsMu.Lock() - defer s.diagnosticsMu.Unlock() - - var sds []*cache.Diagnostic - for _, viewDiags := range s.diagnostics[uri].byView { - for _, sd := range viewDiags.diagnostics { - sameDiagnostic := (pd.Message == strings.TrimSpace(sd.Message) && // extra space may have been trimmed when converting to protocol.Diagnostic - protocol.CompareRange(pd.Range, sd.Range) == 0 && - pd.Source == string(sd.Source)) - - if sameDiagnostic { - sds = append(sds, sd) - } - } - } - return sds -} - -func (s *server) getSupportedCodeActions() []protocol.CodeActionKind { - allCodeActionKinds := make(map[protocol.CodeActionKind]struct{}) - for _, kinds := range s.Options().SupportedCodeActions { - for kind := range kinds { - allCodeActionKinds[kind] = struct{}{} - } - } - var result []protocol.CodeActionKind - for kind := range allCodeActionKinds { - result = append(result, kind) - } - sort.Slice(result, func(i, j int) bool { - return result[i] < result[j] - }) - return result -} - type unit = struct{} func documentChanges(fh file.Handle, edits []protocol.TextEdit) []protocol.DocumentChanges { diff --git a/internal/golangorgx/gopls/server/code_lens.go b/internal/golangorgx/gopls/server/code_lens.go deleted file mode 100644 index 76c3a4d2b04..00000000000 --- a/internal/golangorgx/gopls/server/code_lens.go +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package server - -import ( - "context" - "fmt" - "sort" - - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/golang" - "cuelang.org/go/internal/golangorgx/gopls/mod" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/protocol/command" - "cuelang.org/go/internal/golangorgx/tools/event" - "cuelang.org/go/internal/golangorgx/tools/event/tag" -) - -func (s *server) CodeLens(ctx context.Context, params *protocol.CodeLensParams) ([]protocol.CodeLens, error) { - ctx, done := event.Start(ctx, "lsp.Server.codeLens", tag.URI.Of(params.TextDocument.URI)) - defer done() - - fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) - if err != nil { - return nil, err - } - defer release() - - var lenses map[command.Command]golang.LensFunc - switch snapshot.FileKind(fh) { - case file.Mod: - lenses = mod.LensFuncs() - case file.Go: - lenses = golang.LensFuncs() - default: - // Unsupported file kind for a code lens. - return nil, nil - } - var result []protocol.CodeLens - for cmd, lf := range lenses { - if !snapshot.Options().Codelenses[string(cmd)] { - continue - } - added, err := lf(ctx, snapshot, fh) - // Code lens is called on every keystroke, so we should just operate in - // a best-effort mode, ignoring errors. - if err != nil { - event.Error(ctx, fmt.Sprintf("code lens %s failed", cmd), err) - continue - } - result = append(result, added...) - } - sort.Slice(result, func(i, j int) bool { - a, b := result[i], result[j] - if cmp := protocol.CompareRange(a.Range, b.Range); cmp != 0 { - return cmp < 0 - } - return a.Command.Command < b.Command.Command - }) - return result, nil -} diff --git a/internal/golangorgx/gopls/server/command.go b/internal/golangorgx/gopls/server/command.go index 48e0319be27..d15a6abea0b 100644 --- a/internal/golangorgx/gopls/server/command.go +++ b/internal/golangorgx/gopls/server/command.go @@ -5,1100 +5,12 @@ package server import ( - "bytes" "context" - "encoding/json" - "errors" - "fmt" - "io" - "os" - "path/filepath" - "regexp" - "runtime" - "runtime/pprof" - "sort" - "strings" - "sync" - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/cache/metadata" - "cuelang.org/go/internal/golangorgx/gopls/cache/parsego" - "cuelang.org/go/internal/golangorgx/gopls/debug" - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/golang" - "cuelang.org/go/internal/golangorgx/gopls/progress" "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/protocol/command" - "cuelang.org/go/internal/golangorgx/gopls/telemetry" - "cuelang.org/go/internal/golangorgx/gopls/util/bug" - "cuelang.org/go/internal/golangorgx/tools/diff" "cuelang.org/go/internal/golangorgx/tools/event" - "cuelang.org/go/internal/golangorgx/tools/gocommand" - "cuelang.org/go/internal/golangorgx/tools/tokeninternal" - "cuelang.org/go/internal/golangorgx/tools/xcontext" - "golang.org/x/mod/modfile" - "golang.org/x/tools/go/ast/astutil" ) -func (s *server) ExecuteCommand(ctx context.Context, params *protocol.ExecuteCommandParams) (interface{}, error) { - ctx, done := event.Start(ctx, "lsp.Server.executeCommand") - defer done() - - var found bool - for _, name := range s.Options().SupportedCommands { - if name == params.Command { - found = true - break - } - } - if !found { - return nil, fmt.Errorf("%s is not a supported command", params.Command) - } - - handler := &commandHandler{ - s: s, - params: params, - } - return command.Dispatch(ctx, params, handler) -} - -type commandHandler struct { - s *server - params *protocol.ExecuteCommandParams -} - -func (h *commandHandler) MaybePromptForTelemetry(ctx context.Context) error { - go h.s.maybePromptForTelemetry(ctx, true) - return nil -} - -func (*commandHandler) AddTelemetryCounters(_ context.Context, args command.AddTelemetryCountersArgs) error { - if len(args.Names) != len(args.Values) { - return fmt.Errorf("Names and Values must have the same length") - } - // invalid counter update requests will be silently dropped. (no audience) - telemetry.AddForwardedCounters(args.Names, args.Values) - return nil -} - -// commandConfig configures common command set-up and execution. -type commandConfig struct { - // TODO(adonovan): whether a command is synchronous or - // asynchronous is part of the server interface contract, not - // a mere implementation detail of the handler. - // Export a (command.Command).IsAsync() property so that - // clients can tell. (The tricky part is ensuring the handler - // remains consistent with the command.Command metadata, as at - // the point were we read the 'async' field below, we no - // longer know that command.Command.) - - async bool // whether to run the command asynchronously. Async commands can only return errors. - requireSave bool // whether all files must be saved for the command to work - progress string // title to use for progress reporting. If empty, no progress will be reported. - forView string // view to resolve to a snapshot; incompatible with forURI - forURI protocol.DocumentURI // URI to resolve to a snapshot. If unset, snapshot will be nil. -} - -// commandDeps is evaluated from a commandConfig. Note that not all fields may -// be populated, depending on which configuration is set. See comments in-line -// for details. -type commandDeps struct { - snapshot *cache.Snapshot // present if cfg.forURI was set - fh file.Handle // present if cfg.forURI was set - work *progress.WorkDone // present cfg.progress was set -} - -type commandFunc func(context.Context, commandDeps) error - -// These strings are reported as the final WorkDoneProgressEnd message -// for each workspace/executeCommand request. -const ( - CommandCanceled = "canceled" - CommandFailed = "failed" - CommandCompleted = "completed" -) - -// run performs command setup for command execution, and invokes the given run -// function. If cfg.async is set, run executes the given func in a separate -// goroutine, and returns as soon as setup is complete and the goroutine is -// scheduled. -// -// Invariant: if the resulting error is non-nil, the given run func will -// (eventually) be executed exactly once. -func (c *commandHandler) run(ctx context.Context, cfg commandConfig, run commandFunc) (err error) { - if cfg.requireSave { - var unsaved []string - for _, overlay := range c.s.session.Overlays() { - if !overlay.SameContentsOnDisk() { - unsaved = append(unsaved, overlay.URI().Path()) - } - } - if len(unsaved) > 0 { - return fmt.Errorf("All files must be saved first (unsaved: %v).", unsaved) - } - } - var deps commandDeps - var release func() - if cfg.forURI != "" && cfg.forView != "" { - return bug.Errorf("internal error: forURI=%q, forView=%q", cfg.forURI, cfg.forView) - } - if cfg.forURI != "" { - deps.fh, deps.snapshot, release, err = c.s.fileOf(ctx, cfg.forURI) - if err != nil { - return err - } - - } else if cfg.forView != "" { - view, err := c.s.session.View(cfg.forView) - if err != nil { - return err - } - deps.snapshot, release, err = view.Snapshot() - if err != nil { - return err - } - - } else { - release = func() {} - } - // Inv: release() must be called exactly once after this point. - // In the async case, runcmd may outlive run(). - - ctx, cancel := context.WithCancel(xcontext.Detach(ctx)) - if cfg.progress != "" { - deps.work = c.s.progress.Start(ctx, cfg.progress, "Running...", c.params.WorkDoneToken, cancel) - } - runcmd := func() error { - defer release() - defer cancel() - err := run(ctx, deps) - if deps.work != nil { - switch { - case errors.Is(err, context.Canceled): - deps.work.End(ctx, CommandCanceled) - case err != nil: - event.Error(ctx, "command error", err) - deps.work.End(ctx, CommandFailed) - default: - deps.work.End(ctx, CommandCompleted) - } - } - return err - } - if cfg.async { - go func() { - if err := runcmd(); err != nil { - showMessage(ctx, c.s.client, protocol.Error, err.Error()) - } - }() - return nil - } - return runcmd() -} - -func (c *commandHandler) ApplyFix(ctx context.Context, args command.ApplyFixArgs) (*protocol.WorkspaceEdit, error) { - var result *protocol.WorkspaceEdit - err := c.run(ctx, commandConfig{ - // Note: no progress here. Applying fixes should be quick. - forURI: args.URI, - }, func(ctx context.Context, deps commandDeps) error { - edits, err := golang.ApplyFix(ctx, args.Fix, deps.snapshot, deps.fh, args.Range) - if err != nil { - return err - } - changes := []protocol.DocumentChanges{} // must be a slice - for _, edit := range edits { - edit := edit - changes = append(changes, protocol.DocumentChanges{ - TextDocumentEdit: &edit, - }) - } - edit := protocol.WorkspaceEdit{ - DocumentChanges: changes, - } - if args.ResolveEdits { - result = &edit - return nil - } - r, err := c.s.client.ApplyEdit(ctx, &protocol.ApplyWorkspaceEditParams{ - Edit: edit, - }) - if err != nil { - return err - } - if !r.Applied { - return errors.New(r.FailureReason) - } - return nil - }) - return result, err -} - -func (c *commandHandler) RegenerateCgo(ctx context.Context, args command.URIArg) error { - return c.run(ctx, commandConfig{ - progress: "Regenerating Cgo", - }, func(ctx context.Context, _ commandDeps) error { - return c.modifyState(ctx, FromRegenerateCgo, func() (*cache.Snapshot, func(), error) { - // Resetting the view causes cgo to be regenerated via `go list`. - v, err := c.s.session.ResetView(ctx, args.URI) - if err != nil { - return nil, nil, err - } - return v.Snapshot() - }) - }) -} - -// modifyState performs an operation that modifies the snapshot state. -// -// It causes a snapshot diagnosis for the provided ModificationSource. -func (c *commandHandler) modifyState(ctx context.Context, source ModificationSource, work func() (*cache.Snapshot, func(), error)) error { - var wg sync.WaitGroup // tracks work done on behalf of this function, incl. diagnostics - wg.Add(1) - defer wg.Done() - - // Track progress on this operation for testing. - if c.s.Options().VerboseWorkDoneProgress { - work := c.s.progress.Start(ctx, DiagnosticWorkTitle(source), "Calculating file diagnostics...", nil, nil) - go func() { - wg.Wait() - work.End(ctx, "Done.") - }() - } - snapshot, release, err := work() - if err != nil { - return err - } - wg.Add(1) - go func() { - c.s.diagnoseSnapshot(snapshot, nil, 0) - release() - wg.Done() - }() - return nil -} - -func (c *commandHandler) CheckUpgrades(ctx context.Context, args command.CheckUpgradesArgs) error { - return c.run(ctx, commandConfig{ - forURI: args.URI, - progress: "Checking for upgrades", - }, func(ctx context.Context, deps commandDeps) error { - return c.modifyState(ctx, FromCheckUpgrades, func() (*cache.Snapshot, func(), error) { - upgrades, err := c.s.getUpgrades(ctx, deps.snapshot, args.URI, args.Modules) - if err != nil { - return nil, nil, err - } - return c.s.session.InvalidateView(ctx, deps.snapshot.View(), cache.StateChange{ - ModuleUpgrades: map[protocol.DocumentURI]map[string]string{args.URI: upgrades}, - }) - }) - }) -} - -func (c *commandHandler) AddDependency(ctx context.Context, args command.DependencyArgs) error { - return c.GoGetModule(ctx, args) -} - -func (c *commandHandler) UpgradeDependency(ctx context.Context, args command.DependencyArgs) error { - return c.GoGetModule(ctx, args) -} - -func (c *commandHandler) ResetGoModDiagnostics(ctx context.Context, args command.ResetGoModDiagnosticsArgs) error { - return c.run(ctx, commandConfig{ - forURI: args.URI, - }, func(ctx context.Context, deps commandDeps) error { - return c.modifyState(ctx, FromResetGoModDiagnostics, func() (*cache.Snapshot, func(), error) { - return c.s.session.InvalidateView(ctx, deps.snapshot.View(), cache.StateChange{ - ModuleUpgrades: map[protocol.DocumentURI]map[string]string{ - deps.fh.URI(): nil, - }, - }) - }) - }) -} - -func (c *commandHandler) GoGetModule(ctx context.Context, args command.DependencyArgs) error { - return c.run(ctx, commandConfig{ - progress: "Running go get", - forURI: args.URI, - }, func(ctx context.Context, deps commandDeps) error { - return c.s.runGoModUpdateCommands(ctx, deps.snapshot, args.URI, func(invoke func(...string) (*bytes.Buffer, error)) error { - return runGoGetModule(invoke, args.AddRequire, args.GoCmdArgs) - }) - }) -} - -// TODO(rFindley): UpdateGoSum, Tidy, and Vendor could probably all be one command. -func (c *commandHandler) UpdateGoSum(ctx context.Context, args command.URIArgs) error { - return c.run(ctx, commandConfig{ - progress: "Updating go.sum", - }, func(ctx context.Context, _ commandDeps) error { - for _, uri := range args.URIs { - fh, snapshot, release, err := c.s.fileOf(ctx, uri) - if err != nil { - return err - } - defer release() - if err := c.s.runGoModUpdateCommands(ctx, snapshot, fh.URI(), func(invoke func(...string) (*bytes.Buffer, error)) error { - _, err := invoke("list", "all") - return err - }); err != nil { - return err - } - } - return nil - }) -} - -func (c *commandHandler) Tidy(ctx context.Context, args command.URIArgs) error { - return c.run(ctx, commandConfig{ - requireSave: true, - progress: "Running go mod tidy", - }, func(ctx context.Context, _ commandDeps) error { - for _, uri := range args.URIs { - fh, snapshot, release, err := c.s.fileOf(ctx, uri) - if err != nil { - return err - } - defer release() - if err := c.s.runGoModUpdateCommands(ctx, snapshot, fh.URI(), func(invoke func(...string) (*bytes.Buffer, error)) error { - _, err := invoke("mod", "tidy") - return err - }); err != nil { - return err - } - } - return nil - }) -} - -func (c *commandHandler) Vendor(ctx context.Context, args command.URIArg) error { - return c.run(ctx, commandConfig{ - requireSave: true, - progress: "Running go mod vendor", - forURI: args.URI, - }, func(ctx context.Context, deps commandDeps) error { - // Use RunGoCommandPiped here so that we don't compete with any other go - // command invocations. go mod vendor deletes modules.txt before recreating - // it, and therefore can run into file locking issues on Windows if that - // file is in use by another process, such as go list. - // - // If golang/go#44119 is resolved, go mod vendor will instead modify - // modules.txt in-place. In that case we could theoretically allow this - // command to run concurrently. - stderr := new(bytes.Buffer) - err := deps.snapshot.RunGoCommandPiped(ctx, cache.Normal|cache.AllowNetwork, &gocommand.Invocation{ - Verb: "mod", - Args: []string{"vendor"}, - WorkingDir: filepath.Dir(args.URI.Path()), - }, &bytes.Buffer{}, stderr) - if err != nil { - return fmt.Errorf("running go mod vendor failed: %v\nstderr:\n%s", err, stderr.String()) - } - return nil - }) -} - -func (c *commandHandler) EditGoDirective(ctx context.Context, args command.EditGoDirectiveArgs) error { - return c.run(ctx, commandConfig{ - requireSave: true, // if go.mod isn't saved it could cause a problem - forURI: args.URI, - }, func(ctx context.Context, _ commandDeps) error { - fh, snapshot, release, err := c.s.fileOf(ctx, args.URI) - if err != nil { - return err - } - defer release() - if err := c.s.runGoModUpdateCommands(ctx, snapshot, fh.URI(), func(invoke func(...string) (*bytes.Buffer, error)) error { - _, err := invoke("mod", "edit", "-go", args.Version) - return err - }); err != nil { - return err - } - return nil - }) -} - -func (c *commandHandler) RemoveDependency(ctx context.Context, args command.RemoveDependencyArgs) error { - return c.run(ctx, commandConfig{ - progress: "Removing dependency", - forURI: args.URI, - }, func(ctx context.Context, deps commandDeps) error { - // See the documentation for OnlyDiagnostic. - // - // TODO(rfindley): In Go 1.17+, we will be able to use the go command - // without checking if the module is tidy. - if args.OnlyDiagnostic { - return c.s.runGoModUpdateCommands(ctx, deps.snapshot, args.URI, func(invoke func(...string) (*bytes.Buffer, error)) error { - if err := runGoGetModule(invoke, false, []string{args.ModulePath + "@none"}); err != nil { - return err - } - _, err := invoke("mod", "tidy") - return err - }) - } - pm, err := deps.snapshot.ParseMod(ctx, deps.fh) - if err != nil { - return err - } - edits, err := dropDependency(pm, args.ModulePath) - if err != nil { - return err - } - response, err := c.s.client.ApplyEdit(ctx, &protocol.ApplyWorkspaceEditParams{ - Edit: protocol.WorkspaceEdit{ - DocumentChanges: documentChanges(deps.fh, edits), - }, - }) - if err != nil { - return err - } - if !response.Applied { - return fmt.Errorf("edits not applied because of %s", response.FailureReason) - } - return nil - }) -} - -// dropDependency returns the edits to remove the given require from the go.mod -// file. -func dropDependency(pm *cache.ParsedModule, modulePath string) ([]protocol.TextEdit, error) { - // We need a private copy of the parsed go.mod file, since we're going to - // modify it. - copied, err := modfile.Parse("", pm.Mapper.Content, nil) - if err != nil { - return nil, err - } - if err := copied.DropRequire(modulePath); err != nil { - return nil, err - } - copied.Cleanup() - newContent, err := copied.Format() - if err != nil { - return nil, err - } - // Calculate the edits to be made due to the change. - diff := diff.Bytes(pm.Mapper.Content, newContent) - return protocol.EditsFromDiffEdits(pm.Mapper, diff) -} - -func (c *commandHandler) Test(ctx context.Context, uri protocol.DocumentURI, tests, benchmarks []string) error { - return c.RunTests(ctx, command.RunTestsArgs{ - URI: uri, - Tests: tests, - Benchmarks: benchmarks, - }) -} - -func (c *commandHandler) RunTests(ctx context.Context, args command.RunTestsArgs) error { - return c.run(ctx, commandConfig{ - async: true, - progress: "Running go test", - requireSave: true, - forURI: args.URI, - }, func(ctx context.Context, deps commandDeps) error { - return c.runTests(ctx, deps.snapshot, deps.work, args.URI, args.Tests, args.Benchmarks) - }) -} - -func (c *commandHandler) runTests(ctx context.Context, snapshot *cache.Snapshot, work *progress.WorkDone, uri protocol.DocumentURI, tests, benchmarks []string) error { - // TODO: fix the error reporting when this runs async. - meta, err := golang.NarrowestMetadataForFile(ctx, snapshot, uri) - if err != nil { - return err - } - pkgPath := string(meta.ForTest) - - // create output - buf := &bytes.Buffer{} - ew := progress.NewEventWriter(ctx, "test") - out := io.MultiWriter(ew, progress.NewWorkDoneWriter(ctx, work), buf) - - // Run `go test -run Func` on each test. - var failedTests int - for _, funcName := range tests { - inv := &gocommand.Invocation{ - Verb: "test", - Args: []string{pkgPath, "-v", "-count=1", fmt.Sprintf("-run=^%s$", regexp.QuoteMeta(funcName))}, - WorkingDir: filepath.Dir(uri.Path()), - } - if err := snapshot.RunGoCommandPiped(ctx, cache.Normal, inv, out, out); err != nil { - if errors.Is(err, context.Canceled) { - return err - } - failedTests++ - } - } - - // Run `go test -run=^$ -bench Func` on each test. - var failedBenchmarks int - for _, funcName := range benchmarks { - inv := &gocommand.Invocation{ - Verb: "test", - Args: []string{pkgPath, "-v", "-run=^$", fmt.Sprintf("-bench=^%s$", regexp.QuoteMeta(funcName))}, - WorkingDir: filepath.Dir(uri.Path()), - } - if err := snapshot.RunGoCommandPiped(ctx, cache.Normal, inv, out, out); err != nil { - if errors.Is(err, context.Canceled) { - return err - } - failedBenchmarks++ - } - } - - var title string - if len(tests) > 0 && len(benchmarks) > 0 { - title = "tests and benchmarks" - } else if len(tests) > 0 { - title = "tests" - } else if len(benchmarks) > 0 { - title = "benchmarks" - } else { - return errors.New("No functions were provided") - } - message := fmt.Sprintf("all %s passed", title) - if failedTests > 0 && failedBenchmarks > 0 { - message = fmt.Sprintf("%d / %d tests failed and %d / %d benchmarks failed", failedTests, len(tests), failedBenchmarks, len(benchmarks)) - } else if failedTests > 0 { - message = fmt.Sprintf("%d / %d tests failed", failedTests, len(tests)) - } else if failedBenchmarks > 0 { - message = fmt.Sprintf("%d / %d benchmarks failed", failedBenchmarks, len(benchmarks)) - } - if failedTests > 0 || failedBenchmarks > 0 { - message += "\n" + buf.String() - } - - showMessage(ctx, c.s.client, protocol.Info, message) - - if failedTests > 0 || failedBenchmarks > 0 { - return errors.New("gopls.test command failed") - } - return nil -} - -func (c *commandHandler) Generate(ctx context.Context, args command.GenerateArgs) error { - title := "Running go generate ." - if args.Recursive { - title = "Running go generate ./..." - } - return c.run(ctx, commandConfig{ - requireSave: true, - progress: title, - forURI: args.Dir, - }, func(ctx context.Context, deps commandDeps) error { - er := progress.NewEventWriter(ctx, "generate") - - pattern := "." - if args.Recursive { - pattern = "./..." - } - inv := &gocommand.Invocation{ - Verb: "generate", - Args: []string{"-x", pattern}, - WorkingDir: args.Dir.Path(), - } - stderr := io.MultiWriter(er, progress.NewWorkDoneWriter(ctx, deps.work)) - if err := deps.snapshot.RunGoCommandPiped(ctx, cache.AllowNetwork, inv, er, stderr); err != nil { - return err - } - return nil - }) -} - -func (c *commandHandler) GoGetPackage(ctx context.Context, args command.GoGetPackageArgs) error { - return c.run(ctx, commandConfig{ - forURI: args.URI, - progress: "Running go get", - }, func(ctx context.Context, deps commandDeps) error { - // Run on a throwaway go.mod, otherwise it'll write to the real one. - stdout, err := deps.snapshot.RunGoCommandDirect(ctx, cache.WriteTemporaryModFile|cache.AllowNetwork, &gocommand.Invocation{ - Verb: "list", - Args: []string{"-f", "{{.Module.Path}}@{{.Module.Version}}", args.Pkg}, - WorkingDir: filepath.Dir(args.URI.Path()), - }) - if err != nil { - return err - } - ver := strings.TrimSpace(stdout.String()) - return c.s.runGoModUpdateCommands(ctx, deps.snapshot, args.URI, func(invoke func(...string) (*bytes.Buffer, error)) error { - if args.AddRequire { - if err := addModuleRequire(invoke, []string{ver}); err != nil { - return err - } - } - _, err := invoke(append([]string{"get", "-d"}, args.Pkg)...) - return err - }) - }) -} - -func (s *server) runGoModUpdateCommands(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI, run func(invoke func(...string) (*bytes.Buffer, error)) error) error { - newModBytes, newSumBytes, err := snapshot.RunGoModUpdateCommands(ctx, filepath.Dir(uri.Path()), run) - if err != nil { - return err - } - modURI := snapshot.GoModForFile(uri) - sumURI := protocol.URIFromPath(strings.TrimSuffix(modURI.Path(), ".mod") + ".sum") - modEdits, err := collectFileEdits(ctx, snapshot, modURI, newModBytes) - if err != nil { - return err - } - sumEdits, err := collectFileEdits(ctx, snapshot, sumURI, newSumBytes) - if err != nil { - return err - } - return applyFileEdits(ctx, s.client, append(sumEdits, modEdits...)) -} - -// collectFileEdits collects any file edits required to transform the snapshot -// file specified by uri to the provided new content. -// -// If the file is not open, collectFileEdits simply writes the new content to -// disk. -// -// TODO(rfindley): fix this API asymmetry. It should be up to the caller to -// write the file or apply the edits. -func collectFileEdits(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI, newContent []byte) ([]protocol.TextDocumentEdit, error) { - fh, err := snapshot.ReadFile(ctx, uri) - if err != nil { - return nil, err - } - oldContent, err := fh.Content() - if err != nil && !os.IsNotExist(err) { - return nil, err - } - - if bytes.Equal(oldContent, newContent) { - return nil, nil - } - - // Sending a workspace edit to a closed file causes VS Code to open the - // file and leave it unsaved. We would rather apply the changes directly, - // especially to go.sum, which should be mostly invisible to the user. - if !snapshot.IsOpen(uri) { - err := os.WriteFile(uri.Path(), newContent, 0666) - return nil, err - } - - m := protocol.NewMapper(fh.URI(), oldContent) - diff := diff.Bytes(oldContent, newContent) - edits, err := protocol.EditsFromDiffEdits(m, diff) - if err != nil { - return nil, err - } - return []protocol.TextDocumentEdit{{ - TextDocument: protocol.OptionalVersionedTextDocumentIdentifier{ - Version: fh.Version(), - TextDocumentIdentifier: protocol.TextDocumentIdentifier{ - URI: uri, - }, - }, - Edits: protocol.AsAnnotatedTextEdits(edits), - }}, nil -} - -func applyFileEdits(ctx context.Context, cli protocol.Client, edits []protocol.TextDocumentEdit) error { - if len(edits) == 0 { - return nil - } - response, err := cli.ApplyEdit(ctx, &protocol.ApplyWorkspaceEditParams{ - Edit: protocol.WorkspaceEdit{ - DocumentChanges: protocol.TextDocumentEditsToDocumentChanges(edits), - }, - }) - if err != nil { - return err - } - if !response.Applied { - return fmt.Errorf("edits not applied because of %s", response.FailureReason) - } - return nil -} - -func runGoGetModule(invoke func(...string) (*bytes.Buffer, error), addRequire bool, args []string) error { - if addRequire { - if err := addModuleRequire(invoke, args); err != nil { - return err - } - } - _, err := invoke(append([]string{"get", "-d"}, args...)...) - return err -} - -func addModuleRequire(invoke func(...string) (*bytes.Buffer, error), args []string) error { - // Using go get to create a new dependency results in an - // `// indirect` comment we may not want. The only way to avoid it - // is to add the require as direct first. Then we can use go get to - // update go.sum and tidy up. - _, err := invoke(append([]string{"mod", "edit", "-require"}, args...)...) - return err -} - -// TODO(rfindley): inline. -func (s *server) getUpgrades(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI, modules []string) (map[string]string, error) { - stdout, err := snapshot.RunGoCommandDirect(ctx, cache.Normal|cache.AllowNetwork, &gocommand.Invocation{ - Verb: "list", - Args: append([]string{"-m", "-u", "-json"}, modules...), - WorkingDir: filepath.Dir(uri.Path()), - }) - if err != nil { - return nil, err - } - - upgrades := map[string]string{} - for dec := json.NewDecoder(stdout); dec.More(); { - mod := &gocommand.ModuleJSON{} - if err := dec.Decode(mod); err != nil { - return nil, err - } - if mod.Update == nil { - continue - } - upgrades[mod.Path] = mod.Update.Version - } - return upgrades, nil -} - -func (c *commandHandler) GCDetails(ctx context.Context, uri protocol.DocumentURI) error { - return c.ToggleGCDetails(ctx, command.URIArg{URI: uri}) -} - -func (c *commandHandler) ToggleGCDetails(ctx context.Context, args command.URIArg) error { - return c.run(ctx, commandConfig{ - requireSave: true, - progress: "Toggling GC Details", - forURI: args.URI, - }, func(ctx context.Context, deps commandDeps) error { - return c.modifyState(ctx, FromToggleGCDetails, func() (*cache.Snapshot, func(), error) { - meta, err := golang.NarrowestMetadataForFile(ctx, deps.snapshot, deps.fh.URI()) - if err != nil { - return nil, nil, err - } - wantDetails := !deps.snapshot.WantGCDetails(meta.ID) // toggle the gc details state - return c.s.session.InvalidateView(ctx, deps.snapshot.View(), cache.StateChange{ - GCDetails: map[metadata.PackageID]bool{ - meta.ID: wantDetails, - }, - }) - }) - }) -} - -func (c *commandHandler) ListKnownPackages(ctx context.Context, args command.URIArg) (command.ListKnownPackagesResult, error) { - var result command.ListKnownPackagesResult - err := c.run(ctx, commandConfig{ - progress: "Listing packages", - forURI: args.URI, - }, func(ctx context.Context, deps commandDeps) error { - pkgs, err := golang.KnownPackagePaths(ctx, deps.snapshot, deps.fh) - for _, pkg := range pkgs { - result.Packages = append(result.Packages, string(pkg)) - } - return err - }) - return result, err -} - -func (c *commandHandler) ListImports(ctx context.Context, args command.URIArg) (command.ListImportsResult, error) { - var result command.ListImportsResult - err := c.run(ctx, commandConfig{ - forURI: args.URI, - }, func(ctx context.Context, deps commandDeps) error { - fh, err := deps.snapshot.ReadFile(ctx, args.URI) - if err != nil { - return err - } - pgf, err := deps.snapshot.ParseGo(ctx, fh, parsego.ParseHeader) - if err != nil { - return err - } - fset := tokeninternal.FileSetFor(pgf.Tok) - for _, group := range astutil.Imports(fset, pgf.File) { - for _, imp := range group { - if imp.Path == nil { - continue - } - var name string - if imp.Name != nil { - name = imp.Name.Name - } - result.Imports = append(result.Imports, command.FileImport{ - Path: string(metadata.UnquoteImportPath(imp)), - Name: name, - }) - } - } - meta, err := golang.NarrowestMetadataForFile(ctx, deps.snapshot, args.URI) - if err != nil { - return err // e.g. cancelled - } - for pkgPath := range meta.DepsByPkgPath { - result.PackageImports = append(result.PackageImports, - command.PackageImport{Path: string(pkgPath)}) - } - sort.Slice(result.PackageImports, func(i, j int) bool { - return result.PackageImports[i].Path < result.PackageImports[j].Path - }) - return nil - }) - return result, err -} - -func (c *commandHandler) AddImport(ctx context.Context, args command.AddImportArgs) error { - return c.run(ctx, commandConfig{ - progress: "Adding import", - forURI: args.URI, - }, func(ctx context.Context, deps commandDeps) error { - edits, err := golang.AddImport(ctx, deps.snapshot, deps.fh, args.ImportPath) - if err != nil { - return fmt.Errorf("could not add import: %v", err) - } - if _, err := c.s.client.ApplyEdit(ctx, &protocol.ApplyWorkspaceEditParams{ - Edit: protocol.WorkspaceEdit{ - DocumentChanges: documentChanges(deps.fh, edits), - }, - }); err != nil { - return fmt.Errorf("could not apply import edits: %v", err) - } - return nil - }) -} - -func (c *commandHandler) StartDebugging(ctx context.Context, args command.DebuggingArgs) (result command.DebuggingResult, _ error) { - addr := args.Addr - if addr == "" { - addr = "localhost:0" - } - di := debug.GetInstance(ctx) - if di == nil { - return result, errors.New("internal error: server has no debugging instance") - } - listenedAddr, err := di.Serve(ctx, addr) - if err != nil { - return result, fmt.Errorf("starting debug server: %w", err) - } - result.URLs = []string{"http://" + listenedAddr} - openClientBrowser(ctx, c.s.client, result.URLs[0]) - return result, nil -} - -func (c *commandHandler) StartProfile(ctx context.Context, args command.StartProfileArgs) (result command.StartProfileResult, _ error) { - file, err := os.CreateTemp("", "cuepls-profile-*") - if err != nil { - return result, fmt.Errorf("creating temp profile file: %v", err) - } - - c.s.ongoingProfileMu.Lock() - defer c.s.ongoingProfileMu.Unlock() - - if c.s.ongoingProfile != nil { - file.Close() // ignore error - return result, fmt.Errorf("profile already started (for %q)", c.s.ongoingProfile.Name()) - } - - if err := pprof.StartCPUProfile(file); err != nil { - file.Close() // ignore error - return result, fmt.Errorf("starting profile: %v", err) - } - - c.s.ongoingProfile = file - return result, nil -} - -func (c *commandHandler) StopProfile(ctx context.Context, args command.StopProfileArgs) (result command.StopProfileResult, _ error) { - c.s.ongoingProfileMu.Lock() - defer c.s.ongoingProfileMu.Unlock() - - prof := c.s.ongoingProfile - c.s.ongoingProfile = nil - - if prof == nil { - return result, fmt.Errorf("no ongoing profile") - } - - pprof.StopCPUProfile() - if err := prof.Close(); err != nil { - return result, fmt.Errorf("closing profile file: %v", err) - } - result.File = prof.Name() - return result, nil -} - -// MemStats implements the MemStats command. It returns an error as a -// future-proof API, but the resulting error is currently always nil. -func (c *commandHandler) MemStats(ctx context.Context) (command.MemStatsResult, error) { - // GC a few times for stable results. - runtime.GC() - runtime.GC() - runtime.GC() - var m runtime.MemStats - runtime.ReadMemStats(&m) - return command.MemStatsResult{ - HeapAlloc: m.HeapAlloc, - HeapInUse: m.HeapInuse, - TotalAlloc: m.TotalAlloc, - }, nil -} - -// WorkspaceStats implements the WorkspaceStats command, reporting information -// about the current state of the loaded workspace for the current session. -func (c *commandHandler) WorkspaceStats(ctx context.Context) (command.WorkspaceStatsResult, error) { - var res command.WorkspaceStatsResult - res.Files = c.s.session.Cache().FileStats() - - for _, view := range c.s.session.Views() { - vs, err := collectViewStats(ctx, view) - if err != nil { - return res, err - } - res.Views = append(res.Views, vs) - } - return res, nil -} - -func collectViewStats(ctx context.Context, view *cache.View) (command.ViewStats, error) { - s, release, err := view.Snapshot() - if err != nil { - return command.ViewStats{}, err - } - defer release() - - allMD, err := s.AllMetadata(ctx) - if err != nil { - return command.ViewStats{}, err - } - allPackages := collectPackageStats(allMD) - - wsMD, err := s.WorkspaceMetadata(ctx) - if err != nil { - return command.ViewStats{}, err - } - workspacePackages := collectPackageStats(wsMD) - - var ids []golang.PackageID - for _, mp := range wsMD { - ids = append(ids, mp.ID) - } - - diags, err := s.PackageDiagnostics(ctx, ids...) - if err != nil { - return command.ViewStats{}, err - } - - ndiags := 0 - for _, d := range diags { - ndiags += len(d) - } - - return command.ViewStats{ - GoCommandVersion: view.GoVersionString(), - AllPackages: allPackages, - WorkspacePackages: workspacePackages, - Diagnostics: ndiags, - }, nil -} - -func collectPackageStats(mps []*metadata.Package) command.PackageStats { - var stats command.PackageStats - stats.Packages = len(mps) - modules := make(map[string]bool) - - for _, mp := range mps { - n := len(mp.CompiledGoFiles) - stats.CompiledGoFiles += n - if n > stats.LargestPackage { - stats.LargestPackage = n - } - if mp.Module != nil { - modules[mp.Module.Path] = true - } - } - stats.Modules = len(modules) - - return stats -} - -// RunGoWorkCommand invokes `go work ` with the provided arguments. -// -// args.InitFirst controls whether to first run `go work init`. This allows a -// single command to both create and recursively populate a go.work file -- as -// of writing there is no `go work init -r`. -// -// Some thought went into implementing this command. Unlike the go.mod commands -// above, this command simply invokes the go command and relies on the client -// to notify gopls of file changes via didChangeWatchedFile notifications. -// We could instead run these commands with GOWORK set to a temp file, but that -// poses the following problems: -// - directory locations in the resulting temp go.work file will be computed -// relative to the directory containing that go.work. If the go.work is in a -// tempdir, the directories will need to be translated to/from that dir. -// - it would be simpler to use a temp go.work file in the workspace -// directory, or whichever directory contains the real go.work file, but -// that sets a bad precedent of writing to a user-owned directory. We -// shouldn't start doing that. -// - Sending workspace edits to create a go.work file would require using -// the CreateFile resource operation, which would need to be tested in every -// client as we haven't used it before. We don't have time for that right -// now. -// -// Therefore, we simply require that the current go.work file is saved (if it -// exists), and delegate to the go command. -func (c *commandHandler) RunGoWorkCommand(ctx context.Context, args command.RunGoWorkArgs) error { - return c.run(ctx, commandConfig{ - progress: "Running go work command", - forView: args.ViewID, - }, func(ctx context.Context, deps commandDeps) (runErr error) { - snapshot := deps.snapshot - view := snapshot.View() - viewDir := snapshot.Folder().Path() - - if view.Type() != cache.GoWorkView && view.GoWork() != "" { - // If we are not using an existing go.work file, GOWORK must be explicitly off. - // TODO(rfindley): what about GO111MODULE=off? - return fmt.Errorf("cannot modify go.work files when GOWORK=off") - } - - var gowork string - // If the user has explicitly set GOWORK=off, we should warn them - // explicitly and avoid potentially misleading errors below. - if view.GoWork() != "" { - gowork = view.GoWork().Path() - fh, err := snapshot.ReadFile(ctx, view.GoWork()) - if err != nil { - return err // e.g. canceled - } - if !fh.SameContentsOnDisk() { - return fmt.Errorf("must save workspace file %s before running go work commands", view.GoWork()) - } - } else { - if !args.InitFirst { - // If go.work does not exist, we should have detected that and asked - // for InitFirst. - return bug.Errorf("internal error: cannot run go work command: required go.work file not found") - } - gowork = filepath.Join(viewDir, "go.work") - if err := c.invokeGoWork(ctx, viewDir, gowork, []string{"init"}); err != nil { - return fmt.Errorf("running `go work init`: %v", err) - } - } - - return c.invokeGoWork(ctx, viewDir, gowork, args.Args) - }) -} - -func (c *commandHandler) invokeGoWork(ctx context.Context, viewDir, gowork string, args []string) error { - inv := gocommand.Invocation{ - Verb: "work", - Args: args, - WorkingDir: viewDir, - Env: append(os.Environ(), fmt.Sprintf("GOWORK=%s", gowork)), - } - if _, err := c.s.session.GoCommandRunner().Run(ctx, inv); err != nil { - return fmt.Errorf("running go work command: %v", err) - } - return nil -} - // showMessage causes the client to show a progress or error message. // // It reports whether it succeeded. If it fails, it writes an error to @@ -1114,130 +26,3 @@ func showMessage(ctx context.Context, cli protocol.Client, typ protocol.MessageT } return true } - -// openClientBrowser causes the LSP client to open the specified URL -// in an external browser. -func openClientBrowser(ctx context.Context, cli protocol.Client, url protocol.URI) { - showDocumentImpl(ctx, cli, url, nil) -} - -// openClientEditor causes the LSP client to open the specified document -// and select the indicated range. -func openClientEditor(ctx context.Context, cli protocol.Client, loc protocol.Location) { - showDocumentImpl(ctx, cli, protocol.URI(loc.URI), &loc.Range) -} - -func showDocumentImpl(ctx context.Context, cli protocol.Client, url protocol.URI, rangeOpt *protocol.Range) { - // In principle we shouldn't send a showDocument request to a - // client that doesn't support it, as reported by - // ShowDocumentClientCapabilities. But even clients that do - // support it may defer the real work of opening the document - // asynchronously, to avoid deadlocks due to rentrancy. - // - // For example: client sends request to server; server sends - // showDocument to client; client opens editor; editor causes - // new RPC to be sent to server, which is still busy with - // previous request. (This happens in eglot.) - // - // So we can't rely on the success/failure information. - // That's the reason this function doesn't return an error. - - // "External" means run the system-wide handler (e.g. open(1) - // on macOS or xdg-open(1) on Linux) for this URL, ignoring - // TakeFocus and Selection. Note that this may still end up - // opening the same editor (e.g. VSCode) for a file: URL. - res, err := cli.ShowDocument(ctx, &protocol.ShowDocumentParams{ - URI: url, - External: rangeOpt == nil, - TakeFocus: true, - Selection: rangeOpt, // optional - }) - if err != nil { - event.Error(ctx, "client.showDocument: %v", err) - } else if res != nil && !res.Success { - event.Log(ctx, fmt.Sprintf("client declined to open document %v", url)) - } -} - -func (c *commandHandler) ChangeSignature(ctx context.Context, args command.ChangeSignatureArgs) (*protocol.WorkspaceEdit, error) { - var result *protocol.WorkspaceEdit - err := c.run(ctx, commandConfig{ - forURI: args.RemoveParameter.URI, - }, func(ctx context.Context, deps commandDeps) error { - // For now, gopls only supports removing unused parameters. - changes, err := golang.RemoveUnusedParameter(ctx, deps.fh, args.RemoveParameter.Range, deps.snapshot) - if err != nil { - return err - } - edit := protocol.WorkspaceEdit{ - DocumentChanges: changes, - } - if args.ResolveEdits { - result = &edit - return nil - } - r, err := c.s.client.ApplyEdit(ctx, &protocol.ApplyWorkspaceEditParams{ - Edit: edit, - }) - if !r.Applied { - return fmt.Errorf("failed to apply edits: %v", r.FailureReason) - } - - return nil - }) - return result, err -} - -func (c *commandHandler) DiagnoseFiles(ctx context.Context, args command.DiagnoseFilesArgs) error { - return c.run(ctx, commandConfig{ - progress: "Diagnose files", - }, func(ctx context.Context, _ commandDeps) error { - - // TODO(rfindley): even better would be textDocument/diagnostics (golang/go#60122). - // Though note that implementing pull diagnostics may cause some servers to - // request diagnostics in an ad-hoc manner, and break our intentional pacing. - - ctx, done := event.Start(ctx, "lsp.server.DiagnoseFiles") - defer done() - - snapshots := make(map[*cache.Snapshot]bool) - for _, uri := range args.Files { - fh, snapshot, release, err := c.s.fileOf(ctx, uri) - if err != nil { - return err - } - if snapshots[snapshot] || snapshot.FileKind(fh) != file.Go { - release() - continue - } - defer release() - snapshots[snapshot] = true - } - - var wg sync.WaitGroup - for snapshot := range snapshots { - snapshot := snapshot - wg.Add(1) - go func() { - defer wg.Done() - c.s.diagnoseSnapshot(snapshot, nil, 0) - }() - } - wg.Wait() - - return nil - }) -} - -func (c *commandHandler) Views(ctx context.Context) ([]command.View, error) { - var summaries []command.View - for _, view := range c.s.session.Views() { - summaries = append(summaries, command.View{ - Type: view.Type().String(), - Root: view.Root(), - Folder: view.Folder().Dir, - EnvOverlay: view.EnvOverlay(), - }) - } - return summaries, nil -} diff --git a/internal/golangorgx/gopls/server/completion.go b/internal/golangorgx/gopls/server/completion.go deleted file mode 100644 index f86a89c1eb1..00000000000 --- a/internal/golangorgx/gopls/server/completion.go +++ /dev/null @@ -1,152 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package server - -import ( - "context" - "fmt" - "strings" - - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/golang" - "cuelang.org/go/internal/golangorgx/gopls/golang/completion" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/settings" - "cuelang.org/go/internal/golangorgx/gopls/telemetry" - "cuelang.org/go/internal/golangorgx/gopls/template" - "cuelang.org/go/internal/golangorgx/gopls/work" - "cuelang.org/go/internal/golangorgx/tools/event" - "cuelang.org/go/internal/golangorgx/tools/event/tag" -) - -func (s *server) Completion(ctx context.Context, params *protocol.CompletionParams) (_ *protocol.CompletionList, rerr error) { - recordLatency := telemetry.StartLatencyTimer("completion") - defer func() { - recordLatency(ctx, rerr) - }() - - ctx, done := event.Start(ctx, "lsp.Server.completion", tag.URI.Of(params.TextDocument.URI)) - defer done() - - fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) - if err != nil { - return nil, err - } - defer release() - - var candidates []completion.CompletionItem - var surrounding *completion.Selection - switch snapshot.FileKind(fh) { - case file.Go: - candidates, surrounding, err = completion.Completion(ctx, snapshot, fh, params.Position, params.Context) - case file.Mod: - candidates, surrounding = nil, nil - case file.Work: - cl, err := work.Completion(ctx, snapshot, fh, params.Position) - if err != nil { - break - } - return cl, nil - case file.Tmpl: - var cl *protocol.CompletionList - cl, err = template.Completion(ctx, snapshot, fh, params.Position, params.Context) - if err != nil { - break // use common error handling, candidates==nil - } - return cl, nil - } - if err != nil { - event.Error(ctx, "no completions found", err, tag.Position.Of(params.Position)) - } - if candidates == nil { - return &protocol.CompletionList{ - IsIncomplete: true, - Items: []protocol.CompletionItem{}, - }, nil - } - - rng, err := surrounding.Range() - if err != nil { - return nil, err - } - - // When using deep completions/fuzzy matching, report results as incomplete so - // client fetches updated completions after every key stroke. - options := snapshot.Options() - incompleteResults := options.DeepCompletion || options.Matcher == settings.Fuzzy - - items := toProtocolCompletionItems(candidates, rng, options) - - return &protocol.CompletionList{ - IsIncomplete: incompleteResults, - Items: items, - }, nil -} - -func toProtocolCompletionItems(candidates []completion.CompletionItem, rng protocol.Range, options *settings.Options) []protocol.CompletionItem { - var ( - items = make([]protocol.CompletionItem, 0, len(candidates)) - numDeepCompletionsSeen int - ) - for i, candidate := range candidates { - // Limit the number of deep completions to not overwhelm the user in cases - // with dozens of deep completion matches. - if candidate.Depth > 0 { - if !options.DeepCompletion { - continue - } - if numDeepCompletionsSeen >= completion.MaxDeepCompletions { - continue - } - numDeepCompletionsSeen++ - } - insertText := candidate.InsertText - if options.InsertTextFormat == protocol.SnippetTextFormat { - insertText = candidate.Snippet() - } - - // This can happen if the client has snippets disabled but the - // candidate only supports snippet insertion. - if insertText == "" { - continue - } - - doc := &protocol.Or_CompletionItem_documentation{ - Value: protocol.MarkupContent{ - Kind: protocol.Markdown, - Value: golang.CommentToMarkdown(candidate.Documentation, options), - }, - } - if options.PreferredContentFormat != protocol.Markdown { - doc.Value = candidate.Documentation - } - item := protocol.CompletionItem{ - Label: candidate.Label, - Detail: candidate.Detail, - Kind: candidate.Kind, - TextEdit: &protocol.TextEdit{ - NewText: insertText, - Range: rng, - }, - InsertTextFormat: &options.InsertTextFormat, - AdditionalTextEdits: candidate.AdditionalTextEdits, - // This is a hack so that the client sorts completion results in the order - // according to their score. This can be removed upon the resolution of - // https://github.com/Microsoft/language-server-protocol/issues/348. - SortText: fmt.Sprintf("%05d", i), - - // Trim operators (VSCode doesn't like weird characters in - // filterText). - FilterText: strings.TrimLeft(candidate.InsertText, "&*"), - - Preselect: i == 0, - Documentation: doc, - Tags: protocol.NonNilSlice(candidate.Tags), - Deprecated: candidate.Deprecated, - } - items = append(items, item) - } - return items -} diff --git a/internal/golangorgx/gopls/server/definition.go b/internal/golangorgx/gopls/server/definition.go deleted file mode 100644 index d00dda51be9..00000000000 --- a/internal/golangorgx/gopls/server/definition.go +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package server - -import ( - "context" - "fmt" - - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/golang" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/telemetry" - "cuelang.org/go/internal/golangorgx/gopls/template" - "cuelang.org/go/internal/golangorgx/tools/event" - "cuelang.org/go/internal/golangorgx/tools/event/tag" -) - -func (s *server) Definition(ctx context.Context, params *protocol.DefinitionParams) (_ []protocol.Location, rerr error) { - recordLatency := telemetry.StartLatencyTimer("definition") - defer func() { - recordLatency(ctx, rerr) - }() - - ctx, done := event.Start(ctx, "lsp.Server.definition", tag.URI.Of(params.TextDocument.URI)) - defer done() - - // TODO(rfindley): definition requests should be multiplexed across all views. - fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) - if err != nil { - return nil, err - } - defer release() - switch kind := snapshot.FileKind(fh); kind { - case file.Tmpl: - return template.Definition(snapshot, fh, params.Position) - case file.Go: - return golang.Definition(ctx, snapshot, fh, params.Position) - default: - return nil, fmt.Errorf("can't find definitions for file type %s", kind) - } -} - -func (s *server) TypeDefinition(ctx context.Context, params *protocol.TypeDefinitionParams) ([]protocol.Location, error) { - ctx, done := event.Start(ctx, "lsp.Server.typeDefinition", tag.URI.Of(params.TextDocument.URI)) - defer done() - - // TODO(rfindley): type definition requests should be multiplexed across all views. - fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) - if err != nil { - return nil, err - } - defer release() - switch kind := snapshot.FileKind(fh); kind { - case file.Go: - return golang.TypeDefinition(ctx, snapshot, fh, params.Position) - default: - return nil, fmt.Errorf("can't find type definitions for file type %s", kind) - } -} diff --git a/internal/golangorgx/gopls/server/diagnostics.go b/internal/golangorgx/gopls/server/diagnostics.go index 963e534c300..a121461a805 100644 --- a/internal/golangorgx/gopls/server/diagnostics.go +++ b/internal/golangorgx/gopls/server/diagnostics.go @@ -20,14 +20,11 @@ import ( "cuelang.org/go/internal/golangorgx/gopls/cache/metadata" "cuelang.org/go/internal/golangorgx/gopls/file" "cuelang.org/go/internal/golangorgx/gopls/golang" - "cuelang.org/go/internal/golangorgx/gopls/mod" "cuelang.org/go/internal/golangorgx/gopls/protocol" "cuelang.org/go/internal/golangorgx/gopls/settings" "cuelang.org/go/internal/golangorgx/gopls/template" "cuelang.org/go/internal/golangorgx/gopls/util/maps" - "cuelang.org/go/internal/golangorgx/gopls/work" "cuelang.org/go/internal/golangorgx/tools/event" - "cuelang.org/go/internal/golangorgx/tools/event/keys" "cuelang.org/go/internal/golangorgx/tools/event/tag" ) @@ -335,27 +332,6 @@ func (s *server) diagnose(ctx context.Context, snapshot *cache.Snapshot) (diagMa // Diagnostics below are organized by increasing specificity: // go.work > mod > mod upgrade > mod vuln > package, etc. - // Diagnose go.work file. - workReports, workErr := work.Diagnostics(ctx, snapshot) - if ctx.Err() != nil { - return nil, ctx.Err() - } - store("diagnosing go.work file", workReports, workErr) - - // Diagnose go.mod file. - modReports, modErr := mod.ParseDiagnostics(ctx, snapshot) - if ctx.Err() != nil { - return nil, ctx.Err() - } - store("diagnosing go.mod file", modReports, modErr) - - // Diagnose go.mod upgrades. - upgradeReports, upgradeErr := mod.UpgradeDiagnostics(ctx, snapshot) - if ctx.Err() != nil { - return nil, ctx.Err() - } - store("diagnosing go.mod upgrades", upgradeReports, upgradeErr) - workspacePkgs, err := snapshot.WorkspaceMetadata(ctx) if s.shouldIgnoreError(snapshot, err) { return diagnostics, ctx.Err() @@ -397,16 +373,6 @@ func (s *server) diagnose(ctx context.Context, snapshot *cache.Snapshot) (diagMa var wg sync.WaitGroup // for potentially slow operations below - // Maybe run go mod tidy (if it has been invalidated). - // - // Since go mod tidy can be slow, we run it concurrently to diagnostics. - wg.Add(1) - go func() { - defer wg.Done() - modTidyReports, err := mod.TidyDiagnostics(ctx, snapshot) - store("running go mod tidy", modTidyReports, err) - }() - // Run type checking and go/analysis diagnosis of packages in parallel. // // For analysis, we use the *widest* package for each open file, @@ -460,13 +426,6 @@ func (s *server) diagnose(ctx context.Context, snapshot *cache.Snapshot) (diagMa } } - wg.Add(1) - go func() { - defer wg.Done() - gcDetailsReports, err := s.gcDetailsDiagnostics(ctx, snapshot, toDiagnose) - store("collecting gc_details", gcDetailsReports, err) - }() - // Package diagnostics and analysis diagnostics must both be computed and // merged before they can be reported. var pkgDiags, analysisDiags diagMap @@ -481,21 +440,6 @@ func (s *server) diagnose(ctx context.Context, snapshot *cache.Snapshot) (diagMa } }() - // Get diagnostics from analysis framework. - // This includes type-error analyzers, which suggest fixes to compiler errors. - wg.Add(1) - go func() { - defer wg.Done() - var err error - // TODO(rfindley): here and above, we should avoid using the first result - // if err is non-nil (though as of today it's OK). - analysisDiags, err = golang.Analyze(ctx, snapshot, toAnalyze, s.progress) - if err != nil { - event.Error(ctx, "warning: analyzing package", err, append(snapshot.Labels(), tag.Package.Of(keys.Join(maps.Keys(toDiagnose))))...) - return - } - }() - wg.Wait() // Merge analysis diagnostics with package diagnostics, and store the @@ -513,52 +457,6 @@ func (s *server) diagnose(ctx context.Context, snapshot *cache.Snapshot) (diagMa return diagnostics, nil } -func (s *server) gcDetailsDiagnostics(ctx context.Context, snapshot *cache.Snapshot, toDiagnose map[metadata.PackageID]*metadata.Package) (diagMap, error) { - // Process requested gc_details diagnostics. - // - // TODO(rfindley): this could be improved: - // 1. This should memoize its results if the package has not changed. - // 2. This should not even run gc_details if the package contains unsaved - // files. - // 3. See note below about using ReadFile. - // Consider that these points, in combination with the note below about - // races, suggest that gc_details should be tracked on the Snapshot. - var toGCDetail map[metadata.PackageID]*metadata.Package - for _, mp := range toDiagnose { - if snapshot.WantGCDetails(mp.ID) { - if toGCDetail == nil { - toGCDetail = make(map[metadata.PackageID]*metadata.Package) - } - toGCDetail[mp.ID] = mp - } - } - - diagnostics := make(diagMap) - for _, mp := range toGCDetail { - gcReports, err := golang.GCOptimizationDetails(ctx, snapshot, mp) - if err != nil { - event.Error(ctx, "warning: gc details", err, append(snapshot.Labels(), tag.Package.Of(string(mp.ID)))...) - continue - } - for uri, diags := range gcReports { - // TODO(rfindley): reading here should not be necessary: if a file has - // been deleted we should be notified, and diagnostics will eventually - // become consistent. - fh, err := snapshot.ReadFile(ctx, uri) - if err != nil { - return nil, err - } - // Don't publish gc details for unsaved buffers, since the underlying - // logic operates on the file on disk. - if fh == nil || !fh.SameContentsOnDisk() { - continue - } - diagnostics[uri] = append(diagnostics[uri], diags...) - } - } - return diagnostics, nil -} - // combineDiagnostics combines and filters list/parse/type diagnostics from // tdiags with adiags, and appends the two lists to *outT and *outA, // respectively. diff --git a/internal/golangorgx/gopls/server/folding_range.go b/internal/golangorgx/gopls/server/folding_range.go deleted file mode 100644 index 55753e37258..00000000000 --- a/internal/golangorgx/gopls/server/folding_range.go +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package server - -import ( - "context" - - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/golang" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/tools/event" - "cuelang.org/go/internal/golangorgx/tools/event/tag" -) - -func (s *server) FoldingRange(ctx context.Context, params *protocol.FoldingRangeParams) ([]protocol.FoldingRange, error) { - ctx, done := event.Start(ctx, "lsp.Server.foldingRange", tag.URI.Of(params.TextDocument.URI)) - defer done() - - fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) - if err != nil { - return nil, err - } - defer release() - if snapshot.FileKind(fh) != file.Go { - return nil, nil // empty result - } - ranges, err := golang.FoldingRange(ctx, snapshot, fh, snapshot.Options().LineFoldingOnly) - if err != nil { - return nil, err - } - return toProtocolFoldingRanges(ranges) -} - -func toProtocolFoldingRanges(ranges []*golang.FoldingRangeInfo) ([]protocol.FoldingRange, error) { - result := make([]protocol.FoldingRange, 0, len(ranges)) - for _, info := range ranges { - rng := info.MappedRange.Range() - result = append(result, protocol.FoldingRange{ - StartLine: rng.Start.Line, - StartCharacter: rng.Start.Character, - EndLine: rng.End.Line, - EndCharacter: rng.End.Character, - Kind: string(info.Kind), - }) - } - return result, nil -} diff --git a/internal/golangorgx/gopls/server/format.go b/internal/golangorgx/gopls/server/format.go index c04cc96cb34..26d6072e000 100644 --- a/internal/golangorgx/gopls/server/format.go +++ b/internal/golangorgx/gopls/server/format.go @@ -9,9 +9,7 @@ import ( "cuelang.org/go/internal/golangorgx/gopls/file" "cuelang.org/go/internal/golangorgx/gopls/golang" - "cuelang.org/go/internal/golangorgx/gopls/mod" "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/work" "cuelang.org/go/internal/golangorgx/tools/event" "cuelang.org/go/internal/golangorgx/tools/event/tag" ) @@ -27,12 +25,8 @@ func (s *server) Formatting(ctx context.Context, params *protocol.DocumentFormat defer release() switch snapshot.FileKind(fh) { - case file.Mod: - return mod.Format(ctx, snapshot, fh) case file.Go: return golang.Format(ctx, snapshot, fh) - case file.Work: - return work.Format(ctx, snapshot, fh) } return nil, nil // empty result } diff --git a/internal/golangorgx/gopls/server/general.go b/internal/golangorgx/gopls/server/general.go index f89437e7b8b..3e85f268a73 100644 --- a/internal/golangorgx/gopls/server/general.go +++ b/internal/golangorgx/gopls/server/general.go @@ -112,16 +112,6 @@ func (s *server) Initialize(ctx context.Context, params *protocol.ParamInitializ } var codeActionProvider interface{} = true - if ca := params.Capabilities.TextDocument.CodeAction; len(ca.CodeActionLiteralSupport.CodeActionKind.ValueSet) > 0 { - // If the client has specified CodeActionLiteralSupport, - // send the code actions we support. - // - // Using CodeActionOptions is only valid if codeActionLiteralSupport is set. - codeActionProvider = &protocol.CodeActionOptions{ - CodeActionKinds: s.getSupportedCodeActions(), - ResolveProvider: true, - } - } var renameOpts interface{} = true if r := params.Capabilities.TextDocument.Rename; r != nil && r.PrepareSupport { renameOpts = protocol.RenameOptions{ diff --git a/internal/golangorgx/gopls/server/highlight.go b/internal/golangorgx/gopls/server/highlight.go deleted file mode 100644 index b99767fcc3f..00000000000 --- a/internal/golangorgx/gopls/server/highlight.go +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package server - -import ( - "context" - - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/golang" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/template" - "cuelang.org/go/internal/golangorgx/tools/event" - "cuelang.org/go/internal/golangorgx/tools/event/tag" -) - -func (s *server) DocumentHighlight(ctx context.Context, params *protocol.DocumentHighlightParams) ([]protocol.DocumentHighlight, error) { - ctx, done := event.Start(ctx, "lsp.Server.documentHighlight", tag.URI.Of(params.TextDocument.URI)) - defer done() - - fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) - if err != nil { - return nil, err - } - defer release() - - switch snapshot.FileKind(fh) { - case file.Tmpl: - return template.Highlight(ctx, snapshot, fh, params.Position) - case file.Go: - rngs, err := golang.Highlight(ctx, snapshot, fh, params.Position) - if err != nil { - event.Error(ctx, "no highlight", err) - } - return toProtocolHighlight(rngs), nil - } - return nil, nil // empty result -} - -func toProtocolHighlight(rngs []protocol.Range) []protocol.DocumentHighlight { - result := make([]protocol.DocumentHighlight, 0, len(rngs)) - kind := protocol.Text - for _, rng := range rngs { - result = append(result, protocol.DocumentHighlight{ - Kind: kind, - Range: rng, - }) - } - return result -} diff --git a/internal/golangorgx/gopls/server/hover.go b/internal/golangorgx/gopls/server/hover.go deleted file mode 100644 index f423bcf4707..00000000000 --- a/internal/golangorgx/gopls/server/hover.go +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package server - -import ( - "context" - - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/golang" - "cuelang.org/go/internal/golangorgx/gopls/mod" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/telemetry" - "cuelang.org/go/internal/golangorgx/gopls/template" - "cuelang.org/go/internal/golangorgx/gopls/work" - "cuelang.org/go/internal/golangorgx/tools/event" - "cuelang.org/go/internal/golangorgx/tools/event/tag" -) - -func (s *server) Hover(ctx context.Context, params *protocol.HoverParams) (_ *protocol.Hover, rerr error) { - recordLatency := telemetry.StartLatencyTimer("hover") - defer func() { - recordLatency(ctx, rerr) - }() - - ctx, done := event.Start(ctx, "lsp.Server.hover", tag.URI.Of(params.TextDocument.URI)) - defer done() - - fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) - if err != nil { - return nil, err - } - defer release() - - switch snapshot.FileKind(fh) { - case file.Mod: - return mod.Hover(ctx, snapshot, fh, params.Position) - case file.Go: - return golang.Hover(ctx, snapshot, fh, params.Position) - case file.Tmpl: - return template.Hover(ctx, snapshot, fh, params.Position) - case file.Work: - return work.Hover(ctx, snapshot, fh, params.Position) - } - return nil, nil // empty result -} diff --git a/internal/golangorgx/gopls/server/implementation.go b/internal/golangorgx/gopls/server/implementation.go deleted file mode 100644 index 13a8110bb7f..00000000000 --- a/internal/golangorgx/gopls/server/implementation.go +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package server - -import ( - "context" - - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/golang" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/telemetry" - "cuelang.org/go/internal/golangorgx/tools/event" - "cuelang.org/go/internal/golangorgx/tools/event/tag" -) - -func (s *server) Implementation(ctx context.Context, params *protocol.ImplementationParams) (_ []protocol.Location, rerr error) { - recordLatency := telemetry.StartLatencyTimer("implementation") - defer func() { - recordLatency(ctx, rerr) - }() - - ctx, done := event.Start(ctx, "lsp.Server.implementation", tag.URI.Of(params.TextDocument.URI)) - defer done() - - fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) - if err != nil { - return nil, err - } - defer release() - if snapshot.FileKind(fh) != file.Go { - return nil, nil // empty result - } - return golang.Implementation(ctx, snapshot, fh, params.Position) -} diff --git a/internal/golangorgx/gopls/server/inlay_hint.go b/internal/golangorgx/gopls/server/inlay_hint.go deleted file mode 100644 index 48867dd2ed7..00000000000 --- a/internal/golangorgx/gopls/server/inlay_hint.go +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package server - -import ( - "context" - - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/golang" - "cuelang.org/go/internal/golangorgx/gopls/mod" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/tools/event" - "cuelang.org/go/internal/golangorgx/tools/event/tag" -) - -func (s *server) InlayHint(ctx context.Context, params *protocol.InlayHintParams) ([]protocol.InlayHint, error) { - ctx, done := event.Start(ctx, "lsp.Server.inlayHint", tag.URI.Of(params.TextDocument.URI)) - defer done() - - fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) - if err != nil { - return nil, err - } - defer release() - - switch snapshot.FileKind(fh) { - case file.Mod: - return mod.InlayHint(ctx, snapshot, fh, params.Range) - case file.Go: - return golang.InlayHint(ctx, snapshot, fh, params.Range) - } - return nil, nil // empty result -} diff --git a/internal/golangorgx/gopls/server/link.go b/internal/golangorgx/gopls/server/link.go deleted file mode 100644 index e6133533458..00000000000 --- a/internal/golangorgx/gopls/server/link.go +++ /dev/null @@ -1,285 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package server - -import ( - "bytes" - "context" - "fmt" - "go/ast" - "go/token" - "net/url" - "regexp" - "strings" - "sync" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/cache/metadata" - "cuelang.org/go/internal/golangorgx/gopls/cache/parsego" - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/golang" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/util/safetoken" - "cuelang.org/go/internal/golangorgx/tools/event" - "cuelang.org/go/internal/golangorgx/tools/event/tag" - "golang.org/x/mod/modfile" -) - -func (s *server) DocumentLink(ctx context.Context, params *protocol.DocumentLinkParams) (links []protocol.DocumentLink, err error) { - ctx, done := event.Start(ctx, "lsp.Server.documentLink") - defer done() - - fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) - if err != nil { - return nil, err - } - defer release() - - switch snapshot.FileKind(fh) { - case file.Mod: - links, err = modLinks(ctx, snapshot, fh) - case file.Go: - links, err = goLinks(ctx, snapshot, fh) - } - // Don't return errors for document links. - if err != nil { - event.Error(ctx, "failed to compute document links", err, tag.URI.Of(fh.URI())) - return nil, nil // empty result - } - return links, nil // may be empty (for other file types) -} - -func modLinks(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.DocumentLink, error) { - pm, err := snapshot.ParseMod(ctx, fh) - if err != nil { - return nil, err - } - - var links []protocol.DocumentLink - for _, req := range pm.File.Require { - if req.Syntax == nil { - continue - } - // See golang/go#36998: don't link to modules matching GOPRIVATE. - if snapshot.IsGoPrivatePath(req.Mod.Path) { - continue - } - dep := []byte(req.Mod.Path) - start, end := req.Syntax.Start.Byte, req.Syntax.End.Byte - i := bytes.Index(pm.Mapper.Content[start:end], dep) - if i == -1 { - continue - } - // Shift the start position to the location of the - // dependency within the require statement. - target := cache.BuildLink(snapshot.Options().LinkTarget, "mod/"+req.Mod.String(), "") - l, err := toProtocolLink(pm.Mapper, target, start+i, start+i+len(dep)) - if err != nil { - return nil, err - } - links = append(links, l) - } - // TODO(ridersofrohan): handle links for replace and exclude directives. - if syntax := pm.File.Syntax; syntax == nil { - return links, nil - } - - // Get all the links that are contained in the comments of the file. - urlRegexp := snapshot.Options().URLRegexp - for _, expr := range pm.File.Syntax.Stmt { - comments := expr.Comment() - if comments == nil { - continue - } - for _, section := range [][]modfile.Comment{comments.Before, comments.Suffix, comments.After} { - for _, comment := range section { - l, err := findLinksInString(urlRegexp, comment.Token, comment.Start.Byte, pm.Mapper) - if err != nil { - return nil, err - } - links = append(links, l...) - } - } - } - return links, nil -} - -// goLinks returns the set of hyperlink annotations for the specified Go file. -func goLinks(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]protocol.DocumentLink, error) { - - pgf, err := snapshot.ParseGo(ctx, fh, parsego.ParseFull) - if err != nil { - return nil, err - } - - var links []protocol.DocumentLink - - // Create links for import specs. - if snapshot.Options().ImportShortcut.ShowLinks() { - - // If links are to pkg.go.dev, append module version suffixes. - // This requires the import map from the package metadata. Ignore errors. - var depsByImpPath map[golang.ImportPath]golang.PackageID - if strings.ToLower(snapshot.Options().LinkTarget) == "pkg.go.dev" { - if meta, err := golang.NarrowestMetadataForFile(ctx, snapshot, fh.URI()); err == nil { - depsByImpPath = meta.DepsByImpPath - } - } - - for _, imp := range pgf.File.Imports { - importPath := metadata.UnquoteImportPath(imp) - if importPath == "" { - continue // bad import - } - // See golang/go#36998: don't link to modules matching GOPRIVATE. - if snapshot.IsGoPrivatePath(string(importPath)) { - continue - } - - urlPath := string(importPath) - - // For pkg.go.dev, append module version suffix to package import path. - if mp := snapshot.Metadata(depsByImpPath[importPath]); mp != nil && mp.Module != nil && mp.Module.Path != "" && mp.Module.Version != "" { - urlPath = strings.Replace(urlPath, mp.Module.Path, mp.Module.Path+"@"+mp.Module.Version, 1) - } - - start, end, err := safetoken.Offsets(pgf.Tok, imp.Path.Pos(), imp.Path.End()) - if err != nil { - return nil, err - } - targetURL := cache.BuildLink(snapshot.Options().LinkTarget, urlPath, "") - // Account for the quotation marks in the positions. - l, err := toProtocolLink(pgf.Mapper, targetURL, start+len(`"`), end-len(`"`)) - if err != nil { - return nil, err - } - links = append(links, l) - } - } - - urlRegexp := snapshot.Options().URLRegexp - - // Gather links found in string literals. - var str []*ast.BasicLit - ast.Inspect(pgf.File, func(node ast.Node) bool { - switch n := node.(type) { - case *ast.ImportSpec: - return false // don't process import strings again - case *ast.BasicLit: - if n.Kind == token.STRING { - str = append(str, n) - } - } - return true - }) - for _, s := range str { - strOffset, err := safetoken.Offset(pgf.Tok, s.Pos()) - if err != nil { - return nil, err - } - l, err := findLinksInString(urlRegexp, s.Value, strOffset, pgf.Mapper) - if err != nil { - return nil, err - } - links = append(links, l...) - } - - // Gather links found in comments. - for _, commentGroup := range pgf.File.Comments { - for _, comment := range commentGroup.List { - commentOffset, err := safetoken.Offset(pgf.Tok, comment.Pos()) - if err != nil { - return nil, err - } - l, err := findLinksInString(urlRegexp, comment.Text, commentOffset, pgf.Mapper) - if err != nil { - return nil, err - } - links = append(links, l...) - } - } - - return links, nil -} - -// acceptedSchemes controls the schemes that URLs must have to be shown to the -// user. Other schemes can't be opened by LSP clients, so linkifying them is -// distracting. See golang/go#43990. -var acceptedSchemes = map[string]bool{ - "http": true, - "https": true, -} - -// urlRegexp is the user-supplied regular expression to match URL. -// srcOffset is the start offset of 'src' within m's file. -func findLinksInString(urlRegexp *regexp.Regexp, src string, srcOffset int, m *protocol.Mapper) ([]protocol.DocumentLink, error) { - var links []protocol.DocumentLink - for _, index := range urlRegexp.FindAllIndex([]byte(src), -1) { - start, end := index[0], index[1] - link := src[start:end] - linkURL, err := url.Parse(link) - // Fallback: Linkify IP addresses as suggested in golang/go#18824. - if err != nil { - linkURL, err = url.Parse("//" + link) - // Not all potential links will be valid, so don't return this error. - if err != nil { - continue - } - } - // If the URL has no scheme, use https. - if linkURL.Scheme == "" { - linkURL.Scheme = "https" - } - if !acceptedSchemes[linkURL.Scheme] { - continue - } - - l, err := toProtocolLink(m, linkURL.String(), srcOffset+start, srcOffset+end) - if err != nil { - return nil, err - } - links = append(links, l) - } - // Handle golang/go#1234-style links. - r := getIssueRegexp() - for _, index := range r.FindAllIndex([]byte(src), -1) { - start, end := index[0], index[1] - matches := r.FindStringSubmatch(src) - if len(matches) < 4 { - continue - } - org, repo, number := matches[1], matches[2], matches[3] - targetURL := fmt.Sprintf("https://github.com/%s/%s/issues/%s", org, repo, number) - l, err := toProtocolLink(m, targetURL, srcOffset+start, srcOffset+end) - if err != nil { - return nil, err - } - links = append(links, l) - } - return links, nil -} - -func getIssueRegexp() *regexp.Regexp { - once.Do(func() { - issueRegexp = regexp.MustCompile(`(\w+)/([\w-]+)#([0-9]+)`) - }) - return issueRegexp -} - -var ( - once sync.Once - issueRegexp *regexp.Regexp -) - -func toProtocolLink(m *protocol.Mapper, targetURL string, start, end int) (protocol.DocumentLink, error) { - rng, err := m.OffsetRange(start, end) - if err != nil { - return protocol.DocumentLink{}, err - } - return protocol.DocumentLink{ - Range: rng, - Target: &targetURL, - }, nil -} diff --git a/internal/golangorgx/gopls/server/references.go b/internal/golangorgx/gopls/server/references.go deleted file mode 100644 index 63a4aaa63c1..00000000000 --- a/internal/golangorgx/gopls/server/references.go +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package server - -import ( - "context" - - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/golang" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/telemetry" - "cuelang.org/go/internal/golangorgx/gopls/template" - "cuelang.org/go/internal/golangorgx/tools/event" - "cuelang.org/go/internal/golangorgx/tools/event/tag" -) - -func (s *server) References(ctx context.Context, params *protocol.ReferenceParams) (_ []protocol.Location, rerr error) { - recordLatency := telemetry.StartLatencyTimer("references") - defer func() { - recordLatency(ctx, rerr) - }() - - ctx, done := event.Start(ctx, "lsp.Server.references", tag.URI.Of(params.TextDocument.URI)) - defer done() - - fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) - if err != nil { - return nil, err - } - defer release() - switch snapshot.FileKind(fh) { - case file.Tmpl: - return template.References(ctx, snapshot, fh, params) - case file.Go: - return golang.References(ctx, snapshot, fh, params.Position, params.Context.IncludeDeclaration) - } - return nil, nil // empty result -} diff --git a/internal/golangorgx/gopls/server/rename.go b/internal/golangorgx/gopls/server/rename.go deleted file mode 100644 index 8b3097830cd..00000000000 --- a/internal/golangorgx/gopls/server/rename.go +++ /dev/null @@ -1,98 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package server - -import ( - "context" - "fmt" - "path/filepath" - - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/golang" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/tools/event" - "cuelang.org/go/internal/golangorgx/tools/event/tag" -) - -func (s *server) Rename(ctx context.Context, params *protocol.RenameParams) (*protocol.WorkspaceEdit, error) { - ctx, done := event.Start(ctx, "lsp.Server.rename", tag.URI.Of(params.TextDocument.URI)) - defer done() - - fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) - if err != nil { - return nil, err - } - defer release() - - if kind := snapshot.FileKind(fh); kind != file.Go { - return nil, fmt.Errorf("cannot rename in file of type %s", kind) - } - - // Because we don't handle directory renaming within golang.Rename, golang.Rename returns - // boolean value isPkgRenaming to determine whether an DocumentChanges of type RenameFile should - // be added to the return protocol.WorkspaceEdit value. - edits, isPkgRenaming, err := golang.Rename(ctx, snapshot, fh, params.Position, params.NewName) - if err != nil { - return nil, err - } - - docChanges := []protocol.DocumentChanges{} // must be a slice - for uri, e := range edits { - fh, err := snapshot.ReadFile(ctx, uri) - if err != nil { - return nil, err - } - docChanges = append(docChanges, documentChanges(fh, e)...) - } - if isPkgRenaming { - // Update the last component of the file's enclosing directory. - oldBase := filepath.Dir(fh.URI().Path()) - newURI := filepath.Join(filepath.Dir(oldBase), params.NewName) - docChanges = append(docChanges, protocol.DocumentChanges{ - RenameFile: &protocol.RenameFile{ - Kind: "rename", - OldURI: protocol.URIFromPath(oldBase), - NewURI: protocol.URIFromPath(newURI), - }, - }) - } - return &protocol.WorkspaceEdit{ - DocumentChanges: docChanges, - }, nil -} - -// PrepareRename implements the textDocument/prepareRename handler. It may -// return (nil, nil) if there is no rename at the cursor position, but it is -// not desirable to display an error to the user. -// -// TODO(rfindley): why wouldn't we want to show an error to the user, if the -// user initiated a rename request at the cursor? -func (s *server) PrepareRename(ctx context.Context, params *protocol.PrepareRenameParams) (*protocol.PrepareRenamePlaceholder, error) { - ctx, done := event.Start(ctx, "lsp.Server.prepareRename", tag.URI.Of(params.TextDocument.URI)) - defer done() - - fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) - if err != nil { - return nil, err - } - defer release() - - if kind := snapshot.FileKind(fh); kind != file.Go { - return nil, fmt.Errorf("cannot rename in file of type %s", kind) - } - - // Do not return errors here, as it adds clutter. - // Returning a nil result means there is not a valid rename. - item, usererr, err := golang.PrepareRename(ctx, snapshot, fh, params.Position) - if err != nil { - // Return usererr here rather than err, to avoid cluttering the UI with - // internal error details. - return nil, usererr - } - return &protocol.PrepareRenamePlaceholder{ - Range: item.Range, - Placeholder: item.Text, - }, nil -} diff --git a/internal/golangorgx/gopls/server/selection_range.go b/internal/golangorgx/gopls/server/selection_range.go deleted file mode 100644 index 3d03c82acf2..00000000000 --- a/internal/golangorgx/gopls/server/selection_range.go +++ /dev/null @@ -1,75 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package server - -import ( - "context" - "fmt" - - "cuelang.org/go/internal/golangorgx/gopls/cache/parsego" - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/tools/event" - "golang.org/x/tools/go/ast/astutil" -) - -// selectionRange defines the textDocument/selectionRange feature, -// which, given a list of positions within a file, -// reports a linked list of enclosing syntactic blocks, innermost first. -// -// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_selectionRange. -// -// This feature can be used by a client to implement "expand selection" in a -// language-aware fashion. Multiple input positions are supported to allow -// for multiple cursors, and the entire path up to the whole document is -// returned for each cursor to avoid multiple round-trips when the user is -// likely to issue this command multiple times in quick succession. -func (s *server) SelectionRange(ctx context.Context, params *protocol.SelectionRangeParams) ([]protocol.SelectionRange, error) { - ctx, done := event.Start(ctx, "lsp.Server.selectionRange") - defer done() - - fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) - if err != nil { - return nil, err - } - defer release() - - if kind := snapshot.FileKind(fh); kind != file.Go { - return nil, fmt.Errorf("SelectionRange not supported for file of type %s", kind) - } - - pgf, err := snapshot.ParseGo(ctx, fh, parsego.ParseFull) - if err != nil { - return nil, err - } - - result := make([]protocol.SelectionRange, len(params.Positions)) - for i, protocolPos := range params.Positions { - pos, err := pgf.PositionPos(protocolPos) - if err != nil { - return nil, err - } - - path, _ := astutil.PathEnclosingInterval(pgf.File, pos, pos) - - tail := &result[i] // tail of the Parent linked list, built head first - - for j, node := range path { - rng, err := pgf.NodeRange(node) - if err != nil { - return nil, err - } - - // Add node to tail. - if j > 0 { - tail.Parent = &protocol.SelectionRange{} - tail = tail.Parent - } - tail.Range = rng - } - } - - return result, nil -} diff --git a/internal/golangorgx/gopls/server/semantic.go b/internal/golangorgx/gopls/server/semantic.go deleted file mode 100644 index 9e9a97b857b..00000000000 --- a/internal/golangorgx/gopls/server/semantic.go +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package server - -import ( - "context" - "fmt" - - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/golang" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/template" - "cuelang.org/go/internal/golangorgx/tools/event" - "cuelang.org/go/internal/golangorgx/tools/event/tag" -) - -func (s *server) SemanticTokensFull(ctx context.Context, params *protocol.SemanticTokensParams) (*protocol.SemanticTokens, error) { - return s.semanticTokens(ctx, params.TextDocument, nil) -} - -func (s *server) SemanticTokensRange(ctx context.Context, params *protocol.SemanticTokensRangeParams) (*protocol.SemanticTokens, error) { - return s.semanticTokens(ctx, params.TextDocument, ¶ms.Range) -} - -func (s *server) semanticTokens(ctx context.Context, td protocol.TextDocumentIdentifier, rng *protocol.Range) (*protocol.SemanticTokens, error) { - ctx, done := event.Start(ctx, "lsp.Server.semanticTokens", tag.URI.Of(td.URI)) - defer done() - - fh, snapshot, release, err := s.fileOf(ctx, td.URI) - if err != nil { - return nil, err - } - defer release() - if !snapshot.Options().SemanticTokens { - // return an error, so if the option changes - // the client won't remember the wrong answer - return nil, fmt.Errorf("semantictokens are disabled") - } - - switch snapshot.FileKind(fh) { - case file.Tmpl: - return template.SemanticTokens(ctx, snapshot, fh.URI()) - - case file.Go: - return golang.SemanticTokens(ctx, snapshot, fh, rng) - - default: - // TODO(adonovan): should return an error! - return nil, nil // empty result - } -} diff --git a/internal/golangorgx/gopls/server/signature_help.go b/internal/golangorgx/gopls/server/signature_help.go deleted file mode 100644 index 24fd13a6acb..00000000000 --- a/internal/golangorgx/gopls/server/signature_help.go +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package server - -import ( - "context" - - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/golang" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/tools/event" - "cuelang.org/go/internal/golangorgx/tools/event/tag" -) - -func (s *server) SignatureHelp(ctx context.Context, params *protocol.SignatureHelpParams) (*protocol.SignatureHelp, error) { - ctx, done := event.Start(ctx, "lsp.Server.signatureHelp", tag.URI.Of(params.TextDocument.URI)) - defer done() - - fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) - defer release() - if err != nil { - return nil, err - } - - if snapshot.FileKind(fh) != file.Go { - return nil, nil // empty result - } - - info, activeParameter, err := golang.SignatureHelp(ctx, snapshot, fh, params.Position) - if err != nil { - event.Error(ctx, "no signature help", err, tag.Position.Of(params.Position)) - return nil, nil // sic? There could be many reasons for failure. - } - return &protocol.SignatureHelp{ - Signatures: []protocol.SignatureInformation{*info}, - ActiveParameter: uint32(activeParameter), - }, nil -} diff --git a/internal/golangorgx/gopls/server/symbols.go b/internal/golangorgx/gopls/server/symbols.go deleted file mode 100644 index a25e5ed0373..00000000000 --- a/internal/golangorgx/gopls/server/symbols.go +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package server - -import ( - "context" - - "cuelang.org/go/internal/golangorgx/gopls/file" - "cuelang.org/go/internal/golangorgx/gopls/golang" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/template" - "cuelang.org/go/internal/golangorgx/tools/event" - "cuelang.org/go/internal/golangorgx/tools/event/tag" -) - -func (s *server) DocumentSymbol(ctx context.Context, params *protocol.DocumentSymbolParams) ([]any, error) { - ctx, done := event.Start(ctx, "lsp.Server.documentSymbol", tag.URI.Of(params.TextDocument.URI)) - defer done() - - fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) - if err != nil { - return nil, err - } - defer release() - - var docSymbols []protocol.DocumentSymbol - switch snapshot.FileKind(fh) { - case file.Tmpl: - docSymbols, err = template.DocumentSymbols(snapshot, fh) - case file.Go: - docSymbols, err = golang.DocumentSymbols(ctx, snapshot, fh) - default: - return nil, nil // empty result - } - if err != nil { - event.Error(ctx, "DocumentSymbols failed", err) - return nil, nil // empty result - } - // Convert the symbols to an interface array. - // TODO: Remove this once the lsp deprecates SymbolInformation. - symbols := make([]any, len(docSymbols)) - for i, s := range docSymbols { - if snapshot.Options().HierarchicalDocumentSymbolSupport { - symbols[i] = s - continue - } - // If the client does not support hierarchical document symbols, then - // we need to be backwards compatible for now and return SymbolInformation. - symbols[i] = protocol.SymbolInformation{ - Name: s.Name, - Kind: s.Kind, - Deprecated: s.Deprecated, - Location: protocol.Location{ - URI: params.TextDocument.URI, - Range: s.Range, - }, - } - } - return symbols, nil -} diff --git a/internal/golangorgx/gopls/server/unimplemented.go b/internal/golangorgx/gopls/server/unimplemented.go index 9de1ae1a439..70b8000f697 100644 --- a/internal/golangorgx/gopls/server/unimplemented.go +++ b/internal/golangorgx/gopls/server/unimplemented.go @@ -14,14 +14,30 @@ import ( "cuelang.org/go/internal/golangorgx/tools/jsonrpc2" ) +func (s *server) CodeAction(ctx context.Context, params *protocol.CodeActionParams) ([]protocol.CodeAction, error) { + return nil, notImplemented("CodeAction") +} + +func (s *server) CodeLens(ctx context.Context, params *protocol.CodeLensParams) ([]protocol.CodeLens, error) { + return nil, notImplemented("CodeLens") +} + func (s *server) ColorPresentation(context.Context, *protocol.ColorPresentationParams) ([]protocol.ColorPresentation, error) { return nil, notImplemented("ColorPresentation") } +func (s *server) Completion(ctx context.Context, params *protocol.CompletionParams) (_ *protocol.CompletionList, rerr error) { + return nil, notImplemented("Completion") +} + func (s *server) Declaration(context.Context, *protocol.DeclarationParams) (*protocol.Or_textDocument_declaration, error) { return nil, notImplemented("Declaration") } +func (s *server) Definition(ctx context.Context, params *protocol.DefinitionParams) (_ []protocol.Location, rerr error) { + return nil, notImplemented("Definition") +} + func (s *server) Diagnostic(context.Context, *string) (*string, error) { return nil, notImplemented("Diagnostic") } @@ -62,6 +78,42 @@ func (s *server) DocumentColor(context.Context, *protocol.DocumentColorParams) ( return nil, notImplemented("DocumentColor") } +func (s *server) DocumentHighlight(ctx context.Context, params *protocol.DocumentHighlightParams) ([]protocol.DocumentHighlight, error) { + return nil, notImplemented("DocumentHighlight") +} + +func (s *server) DocumentLink(ctx context.Context, params *protocol.DocumentLinkParams) (links []protocol.DocumentLink, err error) { + return nil, notImplemented("DocumentLink") +} + +func (s *server) DocumentSymbol(ctx context.Context, params *protocol.DocumentSymbolParams) ([]interface{}, error) { + return nil, notImplemented("DocumentSymbol") +} + +func (s *server) ExecuteCommand(ctx context.Context, params *protocol.ExecuteCommandParams) (interface{}, error) { + return nil, notImplemented("ExecuteCommand") +} + +func (s *server) FoldingRange(ctx context.Context, params *protocol.FoldingRangeParams) ([]protocol.FoldingRange, error) { + return nil, notImplemented("FoldingRange") +} + +func (s *server) Hover(ctx context.Context, params *protocol.HoverParams) (_ *protocol.Hover, rerr error) { + return nil, notImplemented("Hover") +} + +func (s *server) Implementation(ctx context.Context, params *protocol.ImplementationParams) (_ []protocol.Location, rerr error) { + return nil, notImplemented("Implementation") +} + +func (s *server) IncomingCalls(ctx context.Context, params *protocol.CallHierarchyIncomingCallsParams) ([]protocol.CallHierarchyIncomingCall, error) { + return nil, notImplemented("IncomingCalls") +} + +func (s *server) InlayHint(ctx context.Context, params *protocol.InlayHintParams) ([]protocol.InlayHint, error) { + return nil, notImplemented("InlayHint") +} + func (s *server) InlineCompletion(context.Context, *protocol.InlineCompletionParams) (*protocol.Or_Result_textDocument_inlineCompletion, error) { return nil, notImplemented("InlineCompletion") } @@ -82,6 +134,18 @@ func (s *server) OnTypeFormatting(context.Context, *protocol.DocumentOnTypeForma return nil, notImplemented("OnTypeFormatting") } +func (s *server) OutgoingCalls(ctx context.Context, params *protocol.CallHierarchyOutgoingCallsParams) ([]protocol.CallHierarchyOutgoingCall, error) { + return nil, notImplemented("OutgoingCalls") +} + +func (s *server) PrepareCallHierarchy(ctx context.Context, params *protocol.CallHierarchyPrepareParams) ([]protocol.CallHierarchyItem, error) { + return nil, notImplemented("PrepareCallHierarchy") +} + +func (s *server) PrepareRename(ctx context.Context, params *protocol.PrepareRenameParams) (*protocol.PrepareRenamePlaceholder, error) { + return nil, notImplemented("PrepareRename") +} + func (s *server) PrepareTypeHierarchy(context.Context, *protocol.TypeHierarchyPrepareParams) ([]protocol.TypeHierarchyItem, error) { return nil, notImplemented("PrepareTypeHierarchy") } @@ -98,6 +162,14 @@ func (s *server) RangesFormatting(context.Context, *protocol.DocumentRangesForma return nil, notImplemented("RangesFormatting") } +func (s *server) References(ctx context.Context, params *protocol.ReferenceParams) (_ []protocol.Location, rerr error) { + return nil, notImplemented("References") +} + +func (s *server) Rename(ctx context.Context, params *protocol.RenameParams) (*protocol.WorkspaceEdit, error) { + return nil, notImplemented("Rename") +} + func (s *server) Resolve(context.Context, *protocol.InlayHint) (*protocol.InlayHint, error) { return nil, notImplemented("Resolve") } @@ -106,6 +178,10 @@ func (s *server) ResolveCodeLens(context.Context, *protocol.CodeLens) (*protocol return nil, notImplemented("ResolveCodeLens") } +func (s *server) ResolveCodeAction(context.Context, *protocol.CodeAction) (*protocol.CodeAction, error) { + return nil, notImplemented("ResolveCodeAction") +} + func (s *server) ResolveCompletionItem(context.Context, *protocol.CompletionItem) (*protocol.CompletionItem, error) { return nil, notImplemented("ResolveCompletionItem") } @@ -118,14 +194,30 @@ func (s *server) ResolveWorkspaceSymbol(context.Context, *protocol.WorkspaceSymb return nil, notImplemented("ResolveWorkspaceSymbol") } +func (s *server) SelectionRange(ctx context.Context, params *protocol.SelectionRangeParams) ([]protocol.SelectionRange, error) { + return nil, notImplemented("SelectionRange") +} + +func (s *server) SemanticTokensFull(ctx context.Context, params *protocol.SemanticTokensParams) (*protocol.SemanticTokens, error) { + return nil, notImplemented("SemanticTokens") +} + func (s *server) SemanticTokensFullDelta(context.Context, *protocol.SemanticTokensDeltaParams) (interface{}, error) { return nil, notImplemented("SemanticTokensFullDelta") } +func (s *server) SemanticTokensRange(ctx context.Context, params *protocol.SemanticTokensRangeParams) (*protocol.SemanticTokens, error) { + return nil, notImplemented("SemanticTokensRange") +} + func (s *server) SetTrace(context.Context, *protocol.SetTraceParams) error { return notImplemented("SetTrace") } +func (s *server) SignatureHelp(ctx context.Context, params *protocol.SignatureHelpParams) (*protocol.SignatureHelp, error) { + return nil, notImplemented("SignatureHelp") +} + func (s *server) Subtypes(context.Context, *protocol.TypeHierarchySubtypesParams) ([]protocol.TypeHierarchyItem, error) { return nil, notImplemented("Subtypes") } @@ -134,6 +226,14 @@ func (s *server) Supertypes(context.Context, *protocol.TypeHierarchySupertypesPa return nil, notImplemented("Supertypes") } +func (s *server) Symbol(ctx context.Context, params *protocol.WorkspaceSymbolParams) (_ []protocol.SymbolInformation, rerr error) { + return nil, notImplemented("Symbol") +} + +func (s *server) TypeDefinition(ctx context.Context, params *protocol.TypeDefinitionParams) ([]protocol.Location, error) { + return nil, notImplemented("TypeDefinition") +} + func (s *server) WillCreateFiles(context.Context, *protocol.CreateFilesParams) (*protocol.WorkspaceEdit, error) { return nil, notImplemented("WillCreateFiles") } diff --git a/internal/golangorgx/gopls/server/workspace_symbol.go b/internal/golangorgx/gopls/server/workspace_symbol.go deleted file mode 100644 index ae56704bd80..00000000000 --- a/internal/golangorgx/gopls/server/workspace_symbol.go +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package server - -import ( - "context" - - "cuelang.org/go/internal/golangorgx/gopls/cache" - "cuelang.org/go/internal/golangorgx/gopls/golang" - "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/telemetry" - "cuelang.org/go/internal/golangorgx/tools/event" -) - -func (s *server) Symbol(ctx context.Context, params *protocol.WorkspaceSymbolParams) (_ []protocol.SymbolInformation, rerr error) { - recordLatency := telemetry.StartLatencyTimer("symbol") - defer func() { - recordLatency(ctx, rerr) - }() - - ctx, done := event.Start(ctx, "lsp.Server.symbol") - defer done() - - views := s.session.Views() - matcher := s.Options().SymbolMatcher - style := s.Options().SymbolStyle - - var snapshots []*cache.Snapshot - for _, v := range views { - snapshot, release, err := v.Snapshot() - if err != nil { - continue // snapshot is shutting down - } - // If err is non-nil, the snapshot is shutting down. Skip it. - defer release() - snapshots = append(snapshots, snapshot) - } - return golang.WorkspaceSymbols(ctx, matcher, style, snapshots, params.Query) -} diff --git a/internal/golangorgx/gopls/settings/default.go b/internal/golangorgx/gopls/settings/default.go index baff0772c7e..ad325738626 100644 --- a/internal/golangorgx/gopls/settings/default.go +++ b/internal/golangorgx/gopls/settings/default.go @@ -8,7 +8,6 @@ import ( "sync" "time" - "cuelang.org/go/internal/golangorgx/gopls/file" "cuelang.org/go/internal/golangorgx/gopls/protocol" "cuelang.org/go/internal/golangorgx/gopls/protocol/command" ) @@ -39,23 +38,6 @@ func DefaultOptions(overrides ...func(*Options)) *Options { HierarchicalDocumentSymbolSupport: true, }, ServerOptions: ServerOptions{ - SupportedCodeActions: map[file.Kind]map[protocol.CodeActionKind]bool{ - file.Go: { - protocol.SourceFixAll: true, - protocol.SourceOrganizeImports: true, - protocol.QuickFix: true, - protocol.RefactorRewrite: true, - protocol.RefactorInline: true, - protocol.RefactorExtract: true, - }, - file.Mod: { - protocol.SourceOrganizeImports: true, - protocol.QuickFix: true, - }, - file.Work: {}, - file.Sum: {}, - file.Tmpl: {}, - }, SupportedCommands: commands, }, UserOptions: UserOptions{ diff --git a/internal/golangorgx/gopls/settings/settings.go b/internal/golangorgx/gopls/settings/settings.go index e025fb19e2e..b94bab272a4 100644 --- a/internal/golangorgx/gopls/settings/settings.go +++ b/internal/golangorgx/gopls/settings/settings.go @@ -13,61 +13,9 @@ import ( "strings" "time" - "cuelang.org/go/internal/golangorgx/gopls/analysis/deprecated" - "cuelang.org/go/internal/golangorgx/gopls/analysis/embeddirective" - "cuelang.org/go/internal/golangorgx/gopls/analysis/fillreturns" - "cuelang.org/go/internal/golangorgx/gopls/analysis/infertypeargs" - "cuelang.org/go/internal/golangorgx/gopls/analysis/nonewvars" - "cuelang.org/go/internal/golangorgx/gopls/analysis/noresultvalues" - "cuelang.org/go/internal/golangorgx/gopls/analysis/simplifycompositelit" - "cuelang.org/go/internal/golangorgx/gopls/analysis/simplifyrange" - "cuelang.org/go/internal/golangorgx/gopls/analysis/simplifyslice" - "cuelang.org/go/internal/golangorgx/gopls/analysis/stubmethods" - "cuelang.org/go/internal/golangorgx/gopls/analysis/undeclaredname" - "cuelang.org/go/internal/golangorgx/gopls/analysis/unusedparams" - "cuelang.org/go/internal/golangorgx/gopls/analysis/unusedvariable" - "cuelang.org/go/internal/golangorgx/gopls/analysis/useany" "cuelang.org/go/internal/golangorgx/gopls/file" "cuelang.org/go/internal/golangorgx/gopls/protocol" - "cuelang.org/go/internal/golangorgx/gopls/protocol/command" "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/appends" - "golang.org/x/tools/go/analysis/passes/asmdecl" - "golang.org/x/tools/go/analysis/passes/assign" - "golang.org/x/tools/go/analysis/passes/atomic" - "golang.org/x/tools/go/analysis/passes/atomicalign" - "golang.org/x/tools/go/analysis/passes/bools" - "golang.org/x/tools/go/analysis/passes/buildtag" - "golang.org/x/tools/go/analysis/passes/cgocall" - "golang.org/x/tools/go/analysis/passes/composite" - "golang.org/x/tools/go/analysis/passes/copylock" - "golang.org/x/tools/go/analysis/passes/deepequalerrors" - "golang.org/x/tools/go/analysis/passes/defers" - "golang.org/x/tools/go/analysis/passes/directive" - "golang.org/x/tools/go/analysis/passes/errorsas" - "golang.org/x/tools/go/analysis/passes/fieldalignment" - "golang.org/x/tools/go/analysis/passes/httpresponse" - "golang.org/x/tools/go/analysis/passes/ifaceassert" - "golang.org/x/tools/go/analysis/passes/loopclosure" - "golang.org/x/tools/go/analysis/passes/lostcancel" - "golang.org/x/tools/go/analysis/passes/nilfunc" - "golang.org/x/tools/go/analysis/passes/nilness" - "golang.org/x/tools/go/analysis/passes/printf" - "golang.org/x/tools/go/analysis/passes/shadow" - "golang.org/x/tools/go/analysis/passes/shift" - "golang.org/x/tools/go/analysis/passes/slog" - "golang.org/x/tools/go/analysis/passes/sortslice" - "golang.org/x/tools/go/analysis/passes/stdmethods" - "golang.org/x/tools/go/analysis/passes/stringintconv" - "golang.org/x/tools/go/analysis/passes/structtag" - "golang.org/x/tools/go/analysis/passes/testinggoroutine" - "golang.org/x/tools/go/analysis/passes/tests" - "golang.org/x/tools/go/analysis/passes/timeformat" - "golang.org/x/tools/go/analysis/passes/unmarshal" - "golang.org/x/tools/go/analysis/passes/unreachable" - "golang.org/x/tools/go/analysis/passes/unsafeptr" - "golang.org/x/tools/go/analysis/passes/unusedresult" - "golang.org/x/tools/go/analysis/passes/unusedwrite" ) type Annotation string @@ -815,12 +763,6 @@ func (o *Options) EnableAllExperiments() { } func (o *Options) enableAllExperimentMaps() { - if _, ok := o.Codelenses[string(command.GCDetails)]; !ok { - o.Codelenses[string(command.GCDetails)] = true - } - if _, ok := o.Analyses[unusedvariable.Analyzer.Name]; !ok { - o.Analyses[unusedvariable.Analyzer.Name] = true - } } // validateDirectoryFilter validates if the filter string @@ -1371,94 +1313,7 @@ func (r *OptionResult) setStringSlice(s *[]string) { } func analyzers() map[string]*Analyzer { - return map[string]*Analyzer{ - // The traditional vet suite: - appends.Analyzer.Name: {Analyzer: appends.Analyzer, Enabled: true}, - asmdecl.Analyzer.Name: {Analyzer: asmdecl.Analyzer, Enabled: true}, - assign.Analyzer.Name: {Analyzer: assign.Analyzer, Enabled: true}, - atomic.Analyzer.Name: {Analyzer: atomic.Analyzer, Enabled: true}, - bools.Analyzer.Name: {Analyzer: bools.Analyzer, Enabled: true}, - buildtag.Analyzer.Name: {Analyzer: buildtag.Analyzer, Enabled: true}, - cgocall.Analyzer.Name: {Analyzer: cgocall.Analyzer, Enabled: true}, - composite.Analyzer.Name: {Analyzer: composite.Analyzer, Enabled: true}, - copylock.Analyzer.Name: {Analyzer: copylock.Analyzer, Enabled: true}, - defers.Analyzer.Name: {Analyzer: defers.Analyzer, Enabled: true}, - deprecated.Analyzer.Name: { - Analyzer: deprecated.Analyzer, - Enabled: true, - Severity: protocol.SeverityHint, - Tag: []protocol.DiagnosticTag{protocol.Deprecated}, - }, - directive.Analyzer.Name: {Analyzer: directive.Analyzer, Enabled: true}, - errorsas.Analyzer.Name: {Analyzer: errorsas.Analyzer, Enabled: true}, - httpresponse.Analyzer.Name: {Analyzer: httpresponse.Analyzer, Enabled: true}, - ifaceassert.Analyzer.Name: {Analyzer: ifaceassert.Analyzer, Enabled: true}, - loopclosure.Analyzer.Name: {Analyzer: loopclosure.Analyzer, Enabled: true}, - lostcancel.Analyzer.Name: {Analyzer: lostcancel.Analyzer, Enabled: true}, - nilfunc.Analyzer.Name: {Analyzer: nilfunc.Analyzer, Enabled: true}, - printf.Analyzer.Name: {Analyzer: printf.Analyzer, Enabled: true}, - shift.Analyzer.Name: {Analyzer: shift.Analyzer, Enabled: true}, - slog.Analyzer.Name: {Analyzer: slog.Analyzer, Enabled: true}, - stdmethods.Analyzer.Name: {Analyzer: stdmethods.Analyzer, Enabled: true}, - stringintconv.Analyzer.Name: {Analyzer: stringintconv.Analyzer, Enabled: true}, - structtag.Analyzer.Name: {Analyzer: structtag.Analyzer, Enabled: true}, - tests.Analyzer.Name: {Analyzer: tests.Analyzer, Enabled: true}, - unmarshal.Analyzer.Name: {Analyzer: unmarshal.Analyzer, Enabled: true}, - unreachable.Analyzer.Name: {Analyzer: unreachable.Analyzer, Enabled: true}, - unsafeptr.Analyzer.Name: {Analyzer: unsafeptr.Analyzer, Enabled: true}, - unusedresult.Analyzer.Name: {Analyzer: unusedresult.Analyzer, Enabled: true}, - - // Non-vet analyzers: - atomicalign.Analyzer.Name: {Analyzer: atomicalign.Analyzer, Enabled: true}, - deepequalerrors.Analyzer.Name: {Analyzer: deepequalerrors.Analyzer, Enabled: true}, - fieldalignment.Analyzer.Name: {Analyzer: fieldalignment.Analyzer, Enabled: false}, - nilness.Analyzer.Name: {Analyzer: nilness.Analyzer, Enabled: true}, - shadow.Analyzer.Name: {Analyzer: shadow.Analyzer, Enabled: false}, - sortslice.Analyzer.Name: {Analyzer: sortslice.Analyzer, Enabled: true}, - testinggoroutine.Analyzer.Name: {Analyzer: testinggoroutine.Analyzer, Enabled: true}, - unusedparams.Analyzer.Name: {Analyzer: unusedparams.Analyzer, Enabled: true}, - unusedwrite.Analyzer.Name: {Analyzer: unusedwrite.Analyzer, Enabled: false}, - useany.Analyzer.Name: {Analyzer: useany.Analyzer, Enabled: false}, - infertypeargs.Analyzer.Name: { - Analyzer: infertypeargs.Analyzer, - Enabled: true, - Severity: protocol.SeverityHint, - }, - timeformat.Analyzer.Name: {Analyzer: timeformat.Analyzer, Enabled: true}, - embeddirective.Analyzer.Name: {Analyzer: embeddirective.Analyzer, Enabled: true}, - - // gofmt -s suite: - simplifycompositelit.Analyzer.Name: { - Analyzer: simplifycompositelit.Analyzer, - Enabled: true, - ActionKinds: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix}, - }, - simplifyrange.Analyzer.Name: { - Analyzer: simplifyrange.Analyzer, - Enabled: true, - ActionKinds: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix}, - }, - simplifyslice.Analyzer.Name: { - Analyzer: simplifyslice.Analyzer, - Enabled: true, - ActionKinds: []protocol.CodeActionKind{protocol.SourceFixAll, protocol.QuickFix}, - }, - - // Type error analyzers. - // These analyzers enrich go/types errors with suggested fixes. - fillreturns.Analyzer.Name: {Analyzer: fillreturns.Analyzer, Enabled: true}, - nonewvars.Analyzer.Name: {Analyzer: nonewvars.Analyzer, Enabled: true}, - noresultvalues.Analyzer.Name: {Analyzer: noresultvalues.Analyzer, Enabled: true}, - stubmethods.Analyzer.Name: {Analyzer: stubmethods.Analyzer, Enabled: true}, - undeclaredname.Analyzer.Name: {Analyzer: undeclaredname.Analyzer, Enabled: true}, - // TODO(rfindley): why isn't the 'unusedvariable' analyzer enabled, if it - // is only enhancing type errors with suggested fixes? - // - // In particular, enabling this analyzer could cause unused variables to be - // greyed out, (due to the 'deletions only' fix). That seems like a nice UI - // feature. - unusedvariable.Analyzer.Name: {Analyzer: unusedvariable.Analyzer, Enabled: false}, - } + return map[string]*Analyzer{} } func urlRegexp() *regexp.Regexp {