diff --git a/internal/lsp/cache/check.go b/internal/lsp/cache/check.go index 49188f4c..db3a1708 100644 --- a/internal/lsp/cache/check.go +++ b/internal/lsp/cache/check.go @@ -14,6 +14,7 @@ import ( "strings" "sync" + "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/packages" "golang.org/x/tools/internal/lsp/source" ) @@ -52,7 +53,7 @@ func (v *View) parse(ctx context.Context, uri source.URI) error { } // Type-check package. pkg, err := v.typeCheck(f.meta.pkgPath) - if pkg == nil || pkg.Types == nil { + if pkg == nil || pkg.GetTypes() == nil { return err } // Add every file in this package to our cache. @@ -65,8 +66,8 @@ func (v *View) parse(ctx context.Context, uri source.URI) error { return nil } -func (v *View) cachePackage(pkg *packages.Package) { - for _, file := range pkg.Syntax { +func (v *View) cachePackage(pkg *Package) { + for _, file := range pkg.GetSyntax() { // TODO: If a file is in multiple packages, which package do we store? if !file.Pos().IsValid() { log.Printf("invalid position for file %v", file.Name) @@ -202,10 +203,10 @@ func (v *View) Import(pkgPath string) (*types.Package, error) { if e.err != nil { return nil, e.err } - return e.pkg.Types, nil + return e.pkg.types, nil } -func (v *View) typeCheck(pkgPath string) (*packages.Package, error) { +func (v *View) typeCheck(pkgPath string) (*Package, error) { meta, ok := v.mcache.packages[pkgPath] if !ok { return nil, fmt.Errorf("no metadata for %v", pkgPath) @@ -217,15 +218,13 @@ func (v *View) typeCheck(pkgPath string) (*packages.Package, error) { } else { typ = types.NewPackage(meta.pkgPath, meta.name) } - pkg := &packages.Package{ - ID: meta.id, - Name: meta.name, - PkgPath: meta.pkgPath, - CompiledGoFiles: meta.files, - Imports: make(map[string]*packages.Package), - Fset: v.Config.Fset, - Types: typ, - TypesInfo: &types.Info{ + pkg := &Package{ + id: meta.id, + pkgPath: meta.pkgPath, + files: meta.files, + imports: make(map[string]*Package), + types: typ, + typesInfo: &types.Info{ Types: make(map[ast.Expr]types.TypeAndValue), Defs: make(map[*ast.Ident]types.Object), Uses: make(map[*ast.Ident]types.Object), @@ -233,8 +232,7 @@ func (v *View) typeCheck(pkgPath string) (*packages.Package, error) { Selections: make(map[*ast.SelectorExpr]*types.Selection), Scopes: make(map[ast.Node]*types.Scope), }, - // TODO(rstambler): Get real TypeSizes from go/packages (golang.org/issues/30139). - TypesSizes: &types.StdSizes{}, + analyses: make(map[*analysis.Analyzer]*analysisEntry), } appendError := func(err error) { v.appendPkgError(pkg, err) @@ -243,29 +241,30 @@ func (v *View) typeCheck(pkgPath string) (*packages.Package, error) { for _, err := range errs { appendError(err) } - pkg.Syntax = files + pkg.syntax = files cfg := &types.Config{ Error: appendError, Importer: v, } - check := types.NewChecker(cfg, v.Config.Fset, pkg.Types, pkg.TypesInfo) - check.Files(pkg.Syntax) + check := types.NewChecker(cfg, v.Config.Fset, pkg.types, pkg.typesInfo) + check.Files(pkg.syntax) - // Set imports of package to correspond to cached packages. This is - // necessary for go/analysis, but once we merge its approach with the - // current caching system, we can eliminate this. + // Set imports of package to correspond to cached packages. + // We lock the package cache, but we shouldn't get any inconsistencies + // because we are still holding the lock on the view. v.pcache.mu.Lock() + defer v.pcache.mu.Unlock() + for importPath := range meta.children { if importEntry, ok := v.pcache.packages[importPath]; ok { - pkg.Imports[importPath] = importEntry.pkg + pkg.imports[importPath] = importEntry.pkg } } - v.pcache.mu.Unlock() return pkg, nil } -func (v *View) appendPkgError(pkg *packages.Package, err error) { +func (v *View) appendPkgError(pkg *Package, err error) { if err == nil { return } @@ -293,7 +292,7 @@ func (v *View) appendPkgError(pkg *packages.Package, err error) { Kind: packages.TypeError, }) } - pkg.Errors = append(pkg.Errors, errs...) + pkg.errors = append(pkg.errors, errs...) } // We use a counting semaphore to limit diff --git a/internal/lsp/cache/file.go b/internal/lsp/cache/file.go index 13a55154..4747c66f 100644 --- a/internal/lsp/cache/file.go +++ b/internal/lsp/cache/file.go @@ -10,7 +10,6 @@ import ( "go/token" "io/ioutil" - "golang.org/x/tools/go/packages" "golang.org/x/tools/internal/lsp/source" ) @@ -22,7 +21,7 @@ type File struct { content []byte ast *ast.File token *token.File - pkg *packages.Package + pkg *Package meta *metadata imports []*ast.ImportSpec } @@ -67,7 +66,7 @@ func (f *File) GetAST(ctx context.Context) *ast.File { return f.ast } -func (f *File) GetPackage(ctx context.Context) *packages.Package { +func (f *File) GetPackage(ctx context.Context) source.Package { f.view.mu.Lock() defer f.view.mu.Unlock() diff --git a/internal/lsp/cache/pkg.go b/internal/lsp/cache/pkg.go new file mode 100644 index 00000000..6fa35cc0 --- /dev/null +++ b/internal/lsp/cache/pkg.go @@ -0,0 +1,119 @@ +package cache + +import ( + "context" + "go/ast" + "go/types" + "sort" + "sync" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/packages" + "golang.org/x/tools/internal/lsp/source" +) + +// Package contains the type information needed by the source package. +type Package struct { + id, pkgPath string + files []string + syntax []*ast.File + errors []packages.Error + imports map[string]*Package + types *types.Package + typesInfo *types.Info + + // The analysis cache holds analysis information for all the packages in a view. + // Each graph node (action) is one unit of analysis. + // Edges express package-to-package (vertical) dependencies, + // and analysis-to-analysis (horizontal) dependencies. + mu sync.Mutex + analyses map[*analysis.Analyzer]*analysisEntry +} + +type analysisEntry struct { + ready chan struct{} + *source.Action +} + +func (pkg *Package) GetActionGraph(ctx context.Context, a *analysis.Analyzer) (*source.Action, error) { + if ctx.Err() != nil { + return nil, ctx.Err() + } + + pkg.mu.Lock() + e, ok := pkg.analyses[a] + if ok { + // cache hit + pkg.mu.Unlock() + + // wait for entry to become ready or the context to be cancelled + select { + case <-e.ready: + case <-ctx.Done(): + return nil, ctx.Err() + } + } else { + // cache miss + e = &analysisEntry{ + ready: make(chan struct{}), + Action: &source.Action{ + Analyzer: a, + Pkg: pkg, + }, + } + pkg.analyses[a] = e + pkg.mu.Unlock() + + // This goroutine becomes responsible for populating + // the entry and broadcasting its readiness. + + // Add a dependency on each required analyzers. + for _, req := range a.Requires { + act, err := pkg.GetActionGraph(ctx, req) + if err != nil { + return nil, err + } + e.Deps = append(e.Deps, act) + } + + // An analysis that consumes/produces facts + // must run on the package's dependencies too. + if len(a.FactTypes) > 0 { + importPaths := make([]string, 0, len(pkg.imports)) + for importPath := range pkg.imports { + importPaths = append(importPaths, importPath) + } + sort.Strings(importPaths) // for determinism + for _, importPath := range importPaths { + dep := pkg.imports[importPath] + act, err := dep.GetActionGraph(ctx, a) + if err != nil { + return nil, err + } + e.Deps = append(e.Deps, act) + } + } + close(e.ready) + } + return e.Action, nil +} + +func (pkg *Package) GetFilenames() []string { + return pkg.files +} + +func (pkg *Package) GetSyntax() []*ast.File { + return pkg.syntax +} + +func (pkg *Package) GetErrors() []packages.Error { + return pkg.errors +} + +func (pkg *Package) GetTypes() *types.Package { + return pkg.types +} + +func (pkg *Package) GetTypesInfo() *types.Info { + return pkg.typesInfo +} diff --git a/internal/lsp/cache/view.go b/internal/lsp/cache/view.go index 26742327..2dba8d83 100644 --- a/internal/lsp/cache/view.go +++ b/internal/lsp/cache/view.go @@ -43,8 +43,6 @@ type View struct { mcache *metadataCache pcache *packageCache - - analysisCache *source.AnalysisCache } type metadataCache struct { @@ -64,7 +62,7 @@ type packageCache struct { } type entry struct { - pkg *packages.Package + pkg *Package err error ready chan struct{} // closed to broadcast ready condition } @@ -98,11 +96,6 @@ func (v *View) FileSet() *token.FileSet { return v.Config.Fset } -func (v *View) GetAnalysisCache() *source.AnalysisCache { - v.analysisCache = source.NewAnalysisCache() - return v.analysisCache -} - // SetContent sets the overlay contents for a file. func (v *View) SetContent(ctx context.Context, uri source.URI, content []byte) error { v.mu.Lock() @@ -151,7 +144,7 @@ func (v *View) applyContentChange(uri source.URI, content []byte) { // Remove the package and all of its reverse dependencies from the cache. if f.pkg != nil { - v.remove(f.pkg.PkgPath) + v.remove(f.pkg.pkgPath) } switch { diff --git a/internal/lsp/cmd/definition.go b/internal/lsp/cmd/definition.go index 5a2bab6e..62889a91 100644 --- a/internal/lsp/cmd/definition.go +++ b/internal/lsp/cmd/definition.go @@ -124,8 +124,8 @@ func buildGuruDefinition(ctx context.Context, view source.View, ident *source.Id // Behavior that attempts to match the expected output for guru. For an example // of the format, see the associated definition tests. buf := &bytes.Buffer{} - q := types.RelativeTo(pkg.Types) - qualifyName := ident.Declaration.Object.Pkg() != pkg.Types + q := types.RelativeTo(pkg.GetTypes()) + qualifyName := ident.Declaration.Object.Pkg() != pkg.GetTypes() name := ident.Name var suffix interface{} switch obj := ident.Declaration.Object.(type) { diff --git a/internal/lsp/source/analysis.go b/internal/lsp/source/analysis.go index dacac7f3..25f697ac 100644 --- a/internal/lsp/source/analysis.go +++ b/internal/lsp/source/analysis.go @@ -7,7 +7,9 @@ package source import ( + "context" "fmt" + "go/token" "go/types" "log" "reflect" @@ -17,74 +19,24 @@ import ( "time" "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/packages" ) -// AnalysisCache holds analysis information for all the packages in a view. -type AnalysisCache struct { - m map[analysisKey]*action -} - -func NewAnalysisCache() *AnalysisCache { - return &AnalysisCache{make(map[analysisKey]*action)} -} - -// Each graph node (action) is one unit of analysis. -// Edges express package-to-package (vertical) dependencies, -// and analysis-to-analysis (horizontal) dependencies. -type analysisKey struct { - *analysis.Analyzer - *packages.Package -} - -func (c *AnalysisCache) analyze(pkgs []*packages.Package, analyzers []*analysis.Analyzer) []*action { - // TODO(matloob): Every time but the first, this needs to re-construct - // the invalidated parts of the action graph, probably under a lock? - - // Construct the action graph. - var mkAction func(a *analysis.Analyzer, pkg *packages.Package) *action - mkAction = func(a *analysis.Analyzer, pkg *packages.Package) *action { - k := analysisKey{a, pkg} - act, ok := c.m[k] - if !ok { - act = &action{a: a, pkg: pkg} - - // Add a dependency on each required analyzers. - for _, req := range a.Requires { - act.deps = append(act.deps, mkAction(req, pkg)) - } - - // An analysis that consumes/produces facts - // must run on the package's dependencies too. - if len(a.FactTypes) > 0 { - paths := make([]string, 0, len(pkg.Imports)) - for path := range pkg.Imports { - paths = append(paths, path) - } - sort.Strings(paths) // for determinism - for _, path := range paths { - dep := mkAction(a, pkg.Imports[path]) - act.deps = append(act.deps, dep) - } - } - - c.m[k] = act - } - return act - } - +func analyze(ctx context.Context, v View, pkgs []Package, analyzers []*analysis.Analyzer) []*Action { // Build nodes for initial packages. - var roots []*action + var roots []*Action for _, a := range analyzers { for _, pkg := range pkgs { - root := mkAction(a, pkg) + root, err := pkg.GetActionGraph(ctx, a) + if err != nil { + continue + } root.isroot = true roots = append(roots, root) } } // Execute the graph in parallel. - execAll(roots) + execAll(v.FileSet(), roots) return roots } @@ -93,13 +45,13 @@ func (c *AnalysisCache) analyze(pkgs []*packages.Package, analyzers []*analysis. // one analysis to one package. Actions form a DAG, both within a // package (as different analyzers are applied, either in sequence or // parallel), and across packages (as dependencies are analyzed). -type action struct { +type Action struct { once sync.Once - a *analysis.Analyzer - pkg *packages.Package + Analyzer *analysis.Analyzer + Pkg Package + Deps []*Action pass *analysis.Pass isroot bool - deps []*action objectFacts map[objectFactKey]analysis.Fact packageFacts map[packageFactKey]analysis.Fact inputs map[*analysis.Analyzer]interface{} @@ -119,16 +71,16 @@ type packageFactKey struct { typ reflect.Type } -func (act *action) String() string { - return fmt.Sprintf("%s@%s", act.a, act.pkg) +func (act *Action) String() string { + return fmt.Sprintf("%s@%s", act.Analyzer, act.Pkg) } -func execAll(actions []*action) { +func execAll(fset *token.FileSet, actions []*Action) { var wg sync.WaitGroup for _, act := range actions { wg.Add(1) - work := func(act *action) { - act.exec() + work := func(act *Action) { + act.exec(fset) wg.Done() } go work(act) @@ -136,15 +88,19 @@ func execAll(actions []*action) { wg.Wait() } -func (act *action) exec() { act.once.Do(act.execOnce) } +func (act *Action) exec(fset *token.FileSet) { + act.once.Do(func() { + act.execOnce(fset) + }) +} -func (act *action) execOnce() { +func (act *Action) execOnce(fset *token.FileSet) { // Analyze dependencies. - execAll(act.deps) + execAll(fset, act.Deps) // Report an error if any dependency failed. var failed []string - for _, dep := range act.deps { + for _, dep := range act.Deps { if dep.err != nil { failed = append(failed, dep.String()) } @@ -160,14 +116,14 @@ func (act *action) execOnce() { inputs := make(map[*analysis.Analyzer]interface{}) act.objectFacts = make(map[objectFactKey]analysis.Fact) act.packageFacts = make(map[packageFactKey]analysis.Fact) - for _, dep := range act.deps { - if dep.pkg == act.pkg { + for _, dep := range act.Deps { + if dep.Pkg == act.Pkg { // Same package, different analysis (horizontal edge): // in-memory outputs of prerequisite analyzers // become inputs to this analysis pass. - inputs[dep.a] = dep.result + inputs[dep.Analyzer] = dep.result - } else if dep.a == act.a { // (always true) + } else if dep.Analyzer == act.Analyzer { // (always true) // Same analysis, different package (vertical edge): // serialized facts produced by prerequisite analysis // become available to this analysis pass. @@ -177,13 +133,13 @@ func (act *action) execOnce() { // Run the analysis. pass := &analysis.Pass{ - Analyzer: act.a, - Fset: act.pkg.Fset, - Files: act.pkg.Syntax, - OtherFiles: act.pkg.OtherFiles, - Pkg: act.pkg.Types, - TypesInfo: act.pkg.TypesInfo, - TypesSizes: act.pkg.TypesSizes, + Analyzer: act.Analyzer, + Fset: fset, + Files: act.Pkg.GetSyntax(), + Pkg: act.Pkg.GetTypes(), + TypesInfo: act.Pkg.GetTypesInfo(), + // TODO(rstambler): Get real TypeSizes from go/packages (golang.org/issues/30139). + TypesSizes: &types.StdSizes{}, ResultOf: inputs, Report: func(d analysis.Diagnostic) { act.diagnostics = append(act.diagnostics, d) }, ImportObjectFact: act.importObjectFact, @@ -194,7 +150,7 @@ func (act *action) execOnce() { act.pass = pass var err error - if act.pkg.IllTyped && !pass.Analyzer.RunDespiteErrors { + if len(act.Pkg.GetErrors()) > 0 && !pass.Analyzer.RunDespiteErrors { err = fmt.Errorf("analysis skipped due to errors in package") } else { act.result, err = pass.Analyzer.Run(pass) @@ -215,12 +171,12 @@ func (act *action) execOnce() { // inheritFacts populates act.facts with // those it obtains from its dependency, dep. -func inheritFacts(act, dep *action) { +func inheritFacts(act, dep *Action) { for key, fact := range dep.objectFacts { // Filter out facts related to objects // that are irrelevant downstream // (equivalently: not in the compiler export data). - if !exportedFrom(key.obj, dep.pkg.Types) { + if !exportedFrom(key.obj, dep.Pkg.GetTypes()) { continue } act.objectFacts[key] = fact @@ -260,7 +216,7 @@ func exportedFrom(obj types.Object, pkg *types.Package) bool { // importObjectFact implements Pass.ImportObjectFact. // Given a non-nil pointer ptr of type *T, where *T satisfies Fact, // importObjectFact copies the fact value to *ptr. -func (act *action) importObjectFact(obj types.Object, ptr analysis.Fact) bool { +func (act *Action) importObjectFact(obj types.Object, ptr analysis.Fact) bool { if obj == nil { panic("nil object") } @@ -273,14 +229,14 @@ func (act *action) importObjectFact(obj types.Object, ptr analysis.Fact) bool { } // exportObjectFact implements Pass.ExportObjectFact. -func (act *action) exportObjectFact(obj types.Object, fact analysis.Fact) { +func (act *Action) exportObjectFact(obj types.Object, fact analysis.Fact) { if act.pass.ExportObjectFact == nil { log.Panicf("%s: Pass.ExportObjectFact(%s, %T) called after Run", act, obj, fact) } - if obj.Pkg() != act.pkg.Types { + if obj.Pkg() != act.Pkg.GetTypes() { log.Panicf("internal error: in analysis %s of package %s: Fact.Set(%s, %T): can't set facts on objects belonging another package", - act.a, act.pkg, obj, fact) + act.Analyzer, act.Pkg, obj, fact) } key := objectFactKey{obj, factType(fact)} @@ -290,7 +246,7 @@ func (act *action) exportObjectFact(obj types.Object, fact analysis.Fact) { // importPackageFact implements Pass.ImportPackageFact. // Given a non-nil pointer ptr of type *T, where *T satisfies Fact, // fact copies the fact value to *ptr. -func (act *action) importPackageFact(pkg *types.Package, ptr analysis.Fact) bool { +func (act *Action) importPackageFact(pkg *types.Package, ptr analysis.Fact) bool { if pkg == nil { panic("nil package") } @@ -303,7 +259,7 @@ func (act *action) importPackageFact(pkg *types.Package, ptr analysis.Fact) bool } // exportPackageFact implements Pass.ExportPackageFact. -func (act *action) exportPackageFact(fact analysis.Fact) { +func (act *Action) exportPackageFact(fact analysis.Fact) { if act.pass.ExportPackageFact == nil { log.Panicf("%s: Pass.ExportPackageFact(%T) called after Run", act, fact) } diff --git a/internal/lsp/source/completion.go b/internal/lsp/source/completion.go index 5e673290..b776e5d9 100644 --- a/internal/lsp/source/completion.go +++ b/internal/lsp/source/completion.go @@ -77,16 +77,16 @@ func Completion(ctx context.Context, f File, pos token.Pos) (items []CompletionI // Save certain facts about the query position, including the expected type // of the completion result, the signature of the function enclosing the // position. - typ := expectedType(path, pos, pkg.TypesInfo) - sig := enclosingFunction(path, pos, pkg.TypesInfo) - pkgStringer := qualifier(file, pkg.Types, pkg.TypesInfo) + typ := expectedType(path, pos, pkg.GetTypesInfo()) + sig := enclosingFunction(path, pos, pkg.GetTypesInfo()) + pkgStringer := qualifier(file, pkg.GetTypes(), pkg.GetTypesInfo()) preferTypeNames := wantTypeNames(pos, path) seen := make(map[types.Object]bool) // found adds a candidate completion. // Only the first candidate of a given name is considered. found := func(obj types.Object, weight float64, items []CompletionItem) []CompletionItem { - if obj.Pkg() != nil && obj.Pkg() != pkg.Types && !obj.Exported() { + if obj.Pkg() != nil && obj.Pkg() != pkg.GetTypes() && !obj.Exported() { return items // inaccessible } @@ -107,7 +107,7 @@ func Completion(ctx context.Context, f File, pos token.Pos) (items []CompletionI } // The position is within a composite literal. - if items, prefix, ok := complit(path, pos, pkg.Types, pkg.TypesInfo, found); ok { + if items, prefix, ok := complit(path, pos, pkg.GetTypes(), pkg.GetTypesInfo(), found); ok { return items, prefix, nil } switch n := path[0].(type) { @@ -117,39 +117,39 @@ func Completion(ctx context.Context, f File, pos token.Pos) (items []CompletionI // Is this the Sel part of a selector? if sel, ok := path[1].(*ast.SelectorExpr); ok && sel.Sel == n { - items, err = selector(sel, pos, pkg.TypesInfo, found) + items, err = selector(sel, pos, pkg.GetTypesInfo(), found) return items, prefix, err } // reject defining identifiers - if obj, ok := pkg.TypesInfo.Defs[n]; ok { + if obj, ok := pkg.GetTypesInfo().Defs[n]; ok { if v, ok := obj.(*types.Var); ok && v.IsField() { // An anonymous field is also a reference to a type. } else { of := "" if obj != nil { - qual := types.RelativeTo(pkg.Types) + qual := types.RelativeTo(pkg.GetTypes()) of += ", of " + types.ObjectString(obj, qual) } return nil, "", fmt.Errorf("this is a definition%s", of) } } - items = append(items, lexical(path, pos, pkg.Types, pkg.TypesInfo, found)...) + items = append(items, lexical(path, pos, pkg.GetTypes(), pkg.GetTypesInfo(), found)...) // The function name hasn't been typed yet, but the parens are there: // recv.‸(arg) case *ast.TypeAssertExpr: // Create a fake selector expression. - items, err = selector(&ast.SelectorExpr{X: n.X}, pos, pkg.TypesInfo, found) + items, err = selector(&ast.SelectorExpr{X: n.X}, pos, pkg.GetTypesInfo(), found) return items, prefix, err case *ast.SelectorExpr: - items, err = selector(n, pos, pkg.TypesInfo, found) + items, err = selector(n, pos, pkg.GetTypesInfo(), found) return items, prefix, err default: // fallback to lexical completions - return lexical(path, pos, pkg.Types, pkg.TypesInfo, found), "", nil + return lexical(path, pos, pkg.GetTypes(), pkg.GetTypesInfo(), found), "", nil } return items, prefix, nil } diff --git a/internal/lsp/source/definition.go b/internal/lsp/source/definition.go index cca6035f..edffa049 100644 --- a/internal/lsp/source/definition.go +++ b/internal/lsp/source/definition.go @@ -52,7 +52,7 @@ func (i *IdentifierInfo) Hover(ctx context.Context, q types.Qualifier) (string, if q == nil { fAST := i.File.GetAST(ctx) pkg := i.File.GetPackage(ctx) - q = qualifier(fAST, pkg.Types, pkg.TypesInfo) + q = qualifier(fAST, pkg.GetTypes(), pkg.GetTypesInfo()) } return types.ObjectString(i.Declaration.Object, q), nil } @@ -84,7 +84,7 @@ func identifier(ctx context.Context, v View, f File, pos token.Pos) (*Identifier } result.Name = result.ident.Name result.Range = Range{Start: result.ident.Pos(), End: result.ident.End()} - result.Declaration.Object = pkg.TypesInfo.ObjectOf(result.ident) + result.Declaration.Object = pkg.GetTypesInfo().ObjectOf(result.ident) if result.Declaration.Object == nil { return nil, fmt.Errorf("no object for ident %v", result.Name) } @@ -101,7 +101,7 @@ func identifier(ctx context.Context, v View, f File, pos token.Pos) (*Identifier if result.Declaration.Range, err = objToRange(ctx, v, result.Declaration.Object); err != nil { return nil, err } - typ := pkg.TypesInfo.TypeOf(result.ident) + typ := pkg.GetTypesInfo().TypeOf(result.ident) if typ == nil { return nil, fmt.Errorf("no type for %s", result.Name) } diff --git a/internal/lsp/source/diagnostics.go b/internal/lsp/source/diagnostics.go index 3c3218ea..98e42074 100644 --- a/internal/lsp/source/diagnostics.go +++ b/internal/lsp/source/diagnostics.go @@ -61,11 +61,11 @@ func Diagnostics(ctx context.Context, v View, uri URI) (map[string][]Diagnostic, pkg := f.GetPackage(ctx) // Prepare the reports we will send for this package. reports := make(map[string][]Diagnostic) - for _, filename := range pkg.CompiledGoFiles { + for _, filename := range pkg.GetFilenames() { reports[filename] = []Diagnostic{} } var parseErrors, typeErrors []packages.Error - for _, err := range pkg.Errors { + for _, err := range pkg.GetErrors() { switch err.Kind { case packages.ParseError: parseErrors = append(parseErrors, err) @@ -117,13 +117,12 @@ func Diagnostics(ctx context.Context, v View, uri URI) (map[string][]Diagnostic, return reports, nil } // Type checking and parsing succeeded. Run analyses. - runAnalyses(v.GetAnalysisCache(), pkg, func(a *analysis.Analyzer, diag analysis.Diagnostic) { - pos := pkg.Fset.Position(diag.Pos) + runAnalyses(ctx, v, pkg, func(a *analysis.Analyzer, diag analysis.Diagnostic) { + pos := v.FileSet().Position(diag.Pos) category := a.Name if diag.Category != "" { category += "." + category } - reports[pos.Filename] = append(reports[pos.Filename], Diagnostic{ Source: category, Range: Range{Start: diag.Pos, End: diag.Pos}, @@ -186,7 +185,7 @@ func identifierEnd(content []byte, l, c int) (int, error) { return bytes.IndexAny(line[c-1:], " \n,():;[]"), nil } -func runAnalyses(c *AnalysisCache, pkg *packages.Package, report func(a *analysis.Analyzer, diag analysis.Diagnostic)) error { +func runAnalyses(ctx context.Context, v View, pkg Package, report func(a *analysis.Analyzer, diag analysis.Diagnostic)) error { // the traditional vet suite: analyzers := []*analysis.Analyzer{ asmdecl.Analyzer, @@ -213,7 +212,7 @@ func runAnalyses(c *AnalysisCache, pkg *packages.Package, report func(a *analysi unusedresult.Analyzer, } - roots := c.analyze([]*packages.Package{pkg}, analyzers) + roots := analyze(ctx, v, []Package{pkg}, analyzers) // Report diagnostics and errors from root analyzers. for _, r := range roots { @@ -223,7 +222,7 @@ func runAnalyses(c *AnalysisCache, pkg *packages.Package, report func(a *analysi // which isn't super useful... return r.err } - report(r.a, diag) + report(r.Analyzer, diag) } } diff --git a/internal/lsp/source/signature_help.go b/internal/lsp/source/signature_help.go index 152727ea..dda57fe8 100644 --- a/internal/lsp/source/signature_help.go +++ b/internal/lsp/source/signature_help.go @@ -48,9 +48,9 @@ func SignatureHelp(ctx context.Context, f File, pos token.Pos) (*SignatureInform var obj types.Object switch t := callExpr.Fun.(type) { case *ast.Ident: - obj = pkg.TypesInfo.ObjectOf(t) + obj = pkg.GetTypesInfo().ObjectOf(t) case *ast.SelectorExpr: - obj = pkg.TypesInfo.ObjectOf(t.Sel) + obj = pkg.GetTypesInfo().ObjectOf(t.Sel) default: return nil, fmt.Errorf("the enclosing function is malformed") } @@ -70,7 +70,7 @@ func SignatureHelp(ctx context.Context, f File, pos token.Pos) (*SignatureInform if sig == nil { return nil, fmt.Errorf("no function signatures found for %s", obj.Name()) } - pkgStringer := qualifier(fAST, pkg.Types, pkg.TypesInfo) + pkgStringer := qualifier(fAST, pkg.GetTypes(), pkg.GetTypesInfo()) var paramInfo []ParameterInformation for i := 0; i < sig.Params().Len(); i++ { param := sig.Params().At(i) diff --git a/internal/lsp/source/view.go b/internal/lsp/source/view.go index f9088280..c0e6f01c 100644 --- a/internal/lsp/source/view.go +++ b/internal/lsp/source/view.go @@ -8,7 +8,9 @@ import ( "context" "go/ast" "go/token" + "go/types" + "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/packages" ) @@ -18,7 +20,6 @@ import ( type View interface { GetFile(ctx context.Context, uri URI) (File, error) SetContent(ctx context.Context, uri URI, content []byte) error - GetAnalysisCache() *AnalysisCache FileSet() *token.FileSet } @@ -29,11 +30,22 @@ type View interface { type File interface { GetAST(ctx context.Context) *ast.File GetFileSet(ctx context.Context) *token.FileSet - GetPackage(ctx context.Context) *packages.Package + GetPackage(ctx context.Context) Package GetToken(ctx context.Context) *token.File GetContent(ctx context.Context) []byte } +// Package represents a Go package that has been type-checked. It maintains +// only the relevant fields of a *go/packages.Package. +type Package interface { + GetFilenames() []string + GetSyntax() []*ast.File + GetErrors() []packages.Error + GetTypes() *types.Package + GetTypesInfo() *types.Info + GetActionGraph(ctx context.Context, a *analysis.Analyzer) (*Action, error) +} + // Range represents a start and end position. // Because Range is based purely on two token.Pos entries, it is not self // contained. You need access to a token.FileSet to regain the file