internal/lsp: cache the *ast.File and *token.File on the package
This change removes the need for the ast and token fields on the *goFile object. We switch to using source.ParseGoHandles on the package, which means that we can easily access both the AST and token via the package, which is already cached. Change-Id: I5f78bbe09362f4d95eb15556617bdbd809a7a55d Reviewed-on: https://go-review.googlesource.com/c/tools/+/185878 Run-TryBot: Rebecca Stambler <rstambler@golang.org> TryBot-Result: Gobot Gobot <gobot@golang.org> Reviewed-by: Ian Cottrell <iancottrell@google.com>
This commit is contained in:
parent
502543d2ed
commit
b667c4c58e
|
@ -17,6 +17,7 @@ import (
|
||||||
"golang.org/x/tools/go/packages"
|
"golang.org/x/tools/go/packages"
|
||||||
"golang.org/x/tools/internal/lsp/source"
|
"golang.org/x/tools/internal/lsp/source"
|
||||||
"golang.org/x/tools/internal/lsp/telemetry/trace"
|
"golang.org/x/tools/internal/lsp/telemetry/trace"
|
||||||
|
"golang.org/x/tools/internal/lsp/xlog"
|
||||||
"golang.org/x/tools/internal/span"
|
"golang.org/x/tools/internal/span"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -121,42 +122,42 @@ func (imp *importer) typeCheck(ctx context.Context, id packageID) (*pkg, error)
|
||||||
mode = source.ParseExported
|
mode = source.ParseExported
|
||||||
}
|
}
|
||||||
var (
|
var (
|
||||||
files []*astFile
|
files = make([]*ast.File, len(meta.files))
|
||||||
phs []source.ParseGoHandle
|
errors = make([]error, len(meta.files))
|
||||||
wg sync.WaitGroup
|
wg sync.WaitGroup
|
||||||
)
|
)
|
||||||
for _, filename := range meta.files {
|
for _, filename := range meta.files {
|
||||||
uri := span.FileURI(filename)
|
uri := span.FileURI(filename)
|
||||||
f, err := imp.view.getFile(ctx, uri)
|
f, err := imp.view.getFile(ctx, uri)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
xlog.Errorf(ctx, "unable to get file for %s: %v", f.URI(), err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
ph := imp.view.session.cache.ParseGoHandle(f.Handle(ctx), mode)
|
pkg.files = append(pkg.files, imp.view.session.cache.ParseGoHandle(f.Handle(ctx), mode))
|
||||||
phs = append(phs, ph)
|
|
||||||
files = append(files, &astFile{
|
|
||||||
uri: ph.File().Identity().URI,
|
|
||||||
isTrimmed: mode == source.ParseExported,
|
|
||||||
ph: ph,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
for i, ph := range phs {
|
for i, ph := range pkg.files {
|
||||||
wg.Add(1)
|
wg.Add(1)
|
||||||
go func(i int, ph source.ParseGoHandle) {
|
go func(i int, ph source.ParseGoHandle) {
|
||||||
defer wg.Done()
|
defer wg.Done()
|
||||||
|
|
||||||
files[i].file, files[i].err = ph.Parse(ctx)
|
files[i], errors[i] = ph.Parse(ctx)
|
||||||
}(i, ph)
|
}(i, ph)
|
||||||
}
|
}
|
||||||
wg.Wait()
|
wg.Wait()
|
||||||
|
|
||||||
|
var i int
|
||||||
for _, f := range files {
|
for _, f := range files {
|
||||||
pkg.files = append(pkg.files, f)
|
if f != nil {
|
||||||
|
files[i] = f
|
||||||
if f.err != nil {
|
i++
|
||||||
if f.err == context.Canceled {
|
|
||||||
return nil, f.err
|
|
||||||
}
|
}
|
||||||
imp.view.session.cache.appendPkgError(pkg, f.err)
|
}
|
||||||
|
for _, err := range errors {
|
||||||
|
if err == context.Canceled {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
imp.view.session.cache.appendPkgError(pkg, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -192,7 +193,7 @@ func (imp *importer) typeCheck(ctx context.Context, id packageID) (*pkg, error)
|
||||||
check := types.NewChecker(cfg, imp.fset, pkg.types, pkg.typesInfo)
|
check := types.NewChecker(cfg, imp.fset, pkg.types, pkg.typesInfo)
|
||||||
|
|
||||||
// Ignore type-checking errors.
|
// Ignore type-checking errors.
|
||||||
check.Files(pkg.GetSyntax())
|
check.Files(files)
|
||||||
|
|
||||||
// Add every file in this package to our cache.
|
// Add every file in this package to our cache.
|
||||||
if err := imp.cachePackage(ctx, pkg, meta, mode); err != nil {
|
if err := imp.cachePackage(ctx, pkg, meta, mode); err != nil {
|
||||||
|
@ -203,16 +204,17 @@ func (imp *importer) typeCheck(ctx context.Context, id packageID) (*pkg, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (imp *importer) cachePackage(ctx context.Context, pkg *pkg, meta *metadata, mode source.ParseMode) error {
|
func (imp *importer) cachePackage(ctx context.Context, pkg *pkg, meta *metadata, mode source.ParseMode) error {
|
||||||
for _, file := range pkg.files {
|
for _, ph := range pkg.files {
|
||||||
f, err := imp.view.getFile(ctx, file.uri)
|
uri := ph.File().Identity().URI
|
||||||
|
f, err := imp.view.getFile(ctx, uri)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("no such file %s: %v", file.uri, err)
|
return fmt.Errorf("no such file %s: %v", uri, err)
|
||||||
}
|
}
|
||||||
gof, ok := f.(*goFile)
|
gof, ok := f.(*goFile)
|
||||||
if !ok {
|
if !ok {
|
||||||
return fmt.Errorf("non Go file %s", file.uri)
|
return fmt.Errorf("non Go file %s", uri)
|
||||||
}
|
}
|
||||||
if err := imp.cachePerFile(gof, file, pkg); err != nil {
|
if err := imp.cachePerFile(gof, ph, pkg); err != nil {
|
||||||
return fmt.Errorf("failed to cache file %s: %v", gof.URI(), err)
|
return fmt.Errorf("failed to cache file %s: %v", gof.URI(), err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -231,7 +233,7 @@ func (imp *importer) cachePackage(ctx context.Context, pkg *pkg, meta *metadata,
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (imp *importer) cachePerFile(gof *goFile, file *astFile, p *pkg) error {
|
func (imp *importer) cachePerFile(gof *goFile, ph source.ParseGoHandle, p *pkg) error {
|
||||||
gof.mu.Lock()
|
gof.mu.Lock()
|
||||||
defer gof.mu.Unlock()
|
defer gof.mu.Unlock()
|
||||||
|
|
||||||
|
@ -241,25 +243,11 @@ func (imp *importer) cachePerFile(gof *goFile, file *astFile, p *pkg) error {
|
||||||
}
|
}
|
||||||
gof.pkgs[p.id] = p
|
gof.pkgs[p.id] = p
|
||||||
|
|
||||||
// Get the AST for the file.
|
file, err := ph.Parse(imp.ctx)
|
||||||
gof.ast = file
|
if file == nil {
|
||||||
if gof.ast == nil {
|
return fmt.Errorf("no AST for %s: %v", ph.File().Identity().URI, err)
|
||||||
return fmt.Errorf("no AST information for %s", file.uri)
|
|
||||||
}
|
}
|
||||||
if gof.ast.file == nil {
|
gof.imports = file.Imports
|
||||||
return fmt.Errorf("no AST for %s", file.uri)
|
|
||||||
}
|
|
||||||
// Get the *token.File directly from the AST.
|
|
||||||
pos := gof.ast.file.Pos()
|
|
||||||
if !pos.IsValid() {
|
|
||||||
return fmt.Errorf("AST for %s has an invalid position", file.uri)
|
|
||||||
}
|
|
||||||
tok := imp.view.session.cache.FileSet().File(pos)
|
|
||||||
if tok == nil {
|
|
||||||
return fmt.Errorf("no *token.File for %s", file.uri)
|
|
||||||
}
|
|
||||||
gof.token = tok
|
|
||||||
gof.imports = gof.ast.file.Imports
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -34,8 +34,6 @@ type fileBase struct {
|
||||||
|
|
||||||
handleMu sync.Mutex
|
handleMu sync.Mutex
|
||||||
handle source.FileHandle
|
handle source.FileHandle
|
||||||
|
|
||||||
token *token.File
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func basename(filename string) string {
|
func basename(filename string) string {
|
||||||
|
|
|
@ -6,6 +6,7 @@ package cache
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"fmt"
|
||||||
"go/ast"
|
"go/ast"
|
||||||
"go/token"
|
"go/token"
|
||||||
"sync"
|
"sync"
|
||||||
|
@ -34,7 +35,6 @@ type goFile struct {
|
||||||
|
|
||||||
imports []*ast.ImportSpec
|
imports []*ast.ImportSpec
|
||||||
|
|
||||||
ast *astFile
|
|
||||||
pkgs map[packageID]*pkg
|
pkgs map[packageID]*pkg
|
||||||
meta map[packageID]*metadata
|
meta map[packageID]*metadata
|
||||||
}
|
}
|
||||||
|
@ -47,69 +47,49 @@ type astFile struct {
|
||||||
isTrimmed bool
|
isTrimmed bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *goFile) GetToken(ctx context.Context) *token.File {
|
func (f *goFile) GetToken(ctx context.Context) (*token.File, error) {
|
||||||
|
file, err := f.GetAST(ctx, source.ParseFull)
|
||||||
|
if file == nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return f.view.session.cache.fset.File(file.Pos()), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *goFile) GetAST(ctx context.Context, mode source.ParseMode) (*ast.File, error) {
|
||||||
f.view.mu.Lock()
|
f.view.mu.Lock()
|
||||||
defer f.view.mu.Unlock()
|
defer f.view.mu.Unlock()
|
||||||
|
|
||||||
if f.isDirty() || f.astIsTrimmed() {
|
if f.isDirty(ctx) || f.wrongParseMode(ctx, mode) {
|
||||||
if _, err := f.view.loadParseTypecheck(ctx, f); err != nil {
|
if _, err := f.view.loadParseTypecheck(ctx, f); err != nil {
|
||||||
xlog.Errorf(ctx, "unable to check package for %s: %v", f.URI(), err)
|
return nil, fmt.Errorf("GetAST: unable to check package for %s: %v", f.URI(), err)
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
f.mu.Lock()
|
fh := f.Handle(ctx)
|
||||||
defer f.mu.Unlock()
|
// Check for a cached AST first, in case getting a trimmed version would actually cause a re-parse.
|
||||||
|
for _, m := range []source.ParseMode{
|
||||||
if unexpectedAST(ctx, f) {
|
source.ParseHeader,
|
||||||
return nil
|
source.ParseExported,
|
||||||
|
source.ParseFull,
|
||||||
|
} {
|
||||||
|
if m < mode {
|
||||||
|
continue
|
||||||
}
|
}
|
||||||
return f.token
|
if v, ok := f.view.session.cache.store.Cached(parseKey{
|
||||||
}
|
file: fh.Identity(),
|
||||||
|
mode: m,
|
||||||
func (f *goFile) GetAnyAST(ctx context.Context) *ast.File {
|
}).(*parseGoData); ok {
|
||||||
f.view.mu.Lock()
|
return v.ast, v.err
|
||||||
defer f.view.mu.Unlock()
|
|
||||||
|
|
||||||
if f.isDirty() {
|
|
||||||
if _, err := f.view.loadParseTypecheck(ctx, f); err != nil {
|
|
||||||
xlog.Errorf(ctx, "unable to check package for %s: %v", f.URI(), err)
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
ph := f.view.session.cache.ParseGoHandle(fh, mode)
|
||||||
f.mu.Lock()
|
return ph.Parse(ctx)
|
||||||
defer f.mu.Unlock()
|
|
||||||
|
|
||||||
if f.ast == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return f.ast.file
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *goFile) GetAST(ctx context.Context) *ast.File {
|
|
||||||
f.view.mu.Lock()
|
|
||||||
defer f.view.mu.Unlock()
|
|
||||||
|
|
||||||
if f.isDirty() || f.astIsTrimmed() {
|
|
||||||
if _, err := f.view.loadParseTypecheck(ctx, f); err != nil {
|
|
||||||
xlog.Errorf(ctx, "unable to check package for %s: %v", f.URI(), err)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
f.mu.Lock()
|
|
||||||
defer f.mu.Unlock()
|
|
||||||
|
|
||||||
if unexpectedAST(ctx, f) {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return f.ast.file
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *goFile) GetPackages(ctx context.Context) []source.Package {
|
func (f *goFile) GetPackages(ctx context.Context) []source.Package {
|
||||||
f.view.mu.Lock()
|
f.view.mu.Lock()
|
||||||
defer f.view.mu.Unlock()
|
defer f.view.mu.Unlock()
|
||||||
|
|
||||||
if f.isDirty() || f.astIsTrimmed() {
|
if f.isDirty(ctx) || f.wrongParseMode(ctx, source.ParseFull) {
|
||||||
if errs, err := f.view.loadParseTypecheck(ctx, f); err != nil {
|
if errs, err := f.view.loadParseTypecheck(ctx, f); err != nil {
|
||||||
xlog.Errorf(ctx, "unable to check package for %s: %v", f.URI(), err)
|
xlog.Errorf(ctx, "unable to check package for %s: %v", f.URI(), err)
|
||||||
|
|
||||||
|
@ -124,9 +104,6 @@ func (f *goFile) GetPackages(ctx context.Context) []source.Package {
|
||||||
f.mu.Lock()
|
f.mu.Lock()
|
||||||
defer f.mu.Unlock()
|
defer f.mu.Unlock()
|
||||||
|
|
||||||
if unexpectedAST(ctx, f) {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
var pkgs []source.Package
|
var pkgs []source.Package
|
||||||
for _, pkg := range f.pkgs {
|
for _, pkg := range f.pkgs {
|
||||||
pkgs = append(pkgs, pkg)
|
pkgs = append(pkgs, pkg)
|
||||||
|
@ -149,23 +126,24 @@ func (f *goFile) GetPackage(ctx context.Context) source.Package {
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
func unexpectedAST(ctx context.Context, f *goFile) bool {
|
func (f *goFile) wrongParseMode(ctx context.Context, mode source.ParseMode) bool {
|
||||||
// If the AST comes back nil, something has gone wrong.
|
f.mu.Lock()
|
||||||
if f.ast == nil {
|
defer f.mu.Unlock()
|
||||||
xlog.Errorf(ctx, "expected full AST for %s, returned nil", f.URI())
|
|
||||||
return true
|
fh := f.Handle(ctx)
|
||||||
|
for _, pkg := range f.pkgs {
|
||||||
|
for _, ph := range pkg.files {
|
||||||
|
if fh.Identity() == ph.File().Identity() {
|
||||||
|
return ph.Mode() < mode
|
||||||
|
}
|
||||||
}
|
}
|
||||||
// If the AST comes back trimmed, something has gone wrong.
|
|
||||||
if f.ast.isTrimmed {
|
|
||||||
xlog.Errorf(ctx, "expected full AST for %s, returned trimmed", f.URI())
|
|
||||||
return true
|
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
// isDirty is true if the file needs to be type-checked.
|
// isDirty is true if the file needs to be type-checked.
|
||||||
// It assumes that the file's view's mutex is held by the caller.
|
// It assumes that the file's view's mutex is held by the caller.
|
||||||
func (f *goFile) isDirty() bool {
|
func (f *goFile) isDirty(ctx context.Context) bool {
|
||||||
f.mu.Lock()
|
f.mu.Lock()
|
||||||
defer f.mu.Unlock()
|
defer f.mu.Unlock()
|
||||||
|
|
||||||
|
@ -185,14 +163,16 @@ func (f *goFile) isDirty() bool {
|
||||||
if len(f.missingImports) > 0 {
|
if len(f.missingImports) > 0 {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
return f.token == nil || f.ast == nil
|
fh := f.Handle(ctx)
|
||||||
|
for _, pkg := range f.pkgs {
|
||||||
|
for _, file := range pkg.files {
|
||||||
|
// There is a type-checked package for the current file handle.
|
||||||
|
if file.File().Identity() == fh.Identity() {
|
||||||
|
return false
|
||||||
}
|
}
|
||||||
|
}
|
||||||
func (f *goFile) astIsTrimmed() bool {
|
}
|
||||||
f.mu.Lock()
|
return true
|
||||||
defer f.mu.Unlock()
|
|
||||||
|
|
||||||
return f.ast != nil && f.ast.isTrimmed
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *goFile) GetActiveReverseDeps(ctx context.Context) []source.GoFile {
|
func (f *goFile) GetActiveReverseDeps(ctx context.Context) []source.GoFile {
|
||||||
|
|
|
@ -20,7 +20,7 @@ func (v *view) loadParseTypecheck(ctx context.Context, f *goFile) ([]packages.Er
|
||||||
|
|
||||||
// If the AST for this file is trimmed, and we are explicitly type-checking it,
|
// If the AST for this file is trimmed, and we are explicitly type-checking it,
|
||||||
// don't ignore function bodies.
|
// don't ignore function bodies.
|
||||||
if f.astIsTrimmed() {
|
if f.wrongParseMode(ctx, source.ParseFull) {
|
||||||
v.pcache.mu.Lock()
|
v.pcache.mu.Lock()
|
||||||
f.invalidateAST(ctx)
|
f.invalidateAST(ctx)
|
||||||
v.pcache.mu.Unlock()
|
v.pcache.mu.Unlock()
|
||||||
|
|
|
@ -6,6 +6,7 @@ package cache
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"fmt"
|
||||||
"go/token"
|
"go/token"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -14,7 +15,10 @@ type modFile struct {
|
||||||
fileBase
|
fileBase
|
||||||
}
|
}
|
||||||
|
|
||||||
func (*modFile) GetToken(context.Context) *token.File { return nil }
|
func (*modFile) GetToken(context.Context) (*token.File, error) {
|
||||||
|
return nil, fmt.Errorf("GetToken: not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
func (*modFile) setContent(content []byte) {}
|
func (*modFile) setContent(content []byte) {}
|
||||||
func (*modFile) filename() string { return "" }
|
func (*modFile) filename() string { return "" }
|
||||||
func (*modFile) isActive() bool { return false }
|
func (*modFile) isActive() bool { return false }
|
||||||
|
|
|
@ -22,7 +22,7 @@ type pkg struct {
|
||||||
id packageID
|
id packageID
|
||||||
pkgPath packagePath
|
pkgPath packagePath
|
||||||
|
|
||||||
files []*astFile
|
files []source.ParseGoHandle
|
||||||
errors []packages.Error
|
errors []packages.Error
|
||||||
imports map[packagePath]*pkg
|
imports map[packagePath]*pkg
|
||||||
types *types.Package
|
types *types.Package
|
||||||
|
@ -149,17 +149,18 @@ func (pkg *pkg) PkgPath() string {
|
||||||
|
|
||||||
func (pkg *pkg) GetFilenames() []string {
|
func (pkg *pkg) GetFilenames() []string {
|
||||||
filenames := make([]string, 0, len(pkg.files))
|
filenames := make([]string, 0, len(pkg.files))
|
||||||
for _, f := range pkg.files {
|
for _, ph := range pkg.files {
|
||||||
filenames = append(filenames, f.uri.Filename())
|
filenames = append(filenames, ph.File().Identity().URI.Filename())
|
||||||
}
|
}
|
||||||
return filenames
|
return filenames
|
||||||
}
|
}
|
||||||
|
|
||||||
func (pkg *pkg) GetSyntax() []*ast.File {
|
func (pkg *pkg) GetSyntax(ctx context.Context) []*ast.File {
|
||||||
var syntax []*ast.File
|
var syntax []*ast.File
|
||||||
for _, f := range pkg.files {
|
for _, ph := range pkg.files {
|
||||||
if f.file != nil {
|
file, _ := ph.Parse(ctx)
|
||||||
syntax = append(syntax, f.file)
|
if file != nil {
|
||||||
|
syntax = append(syntax, file)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return syntax
|
return syntax
|
||||||
|
|
|
@ -6,6 +6,7 @@ package cache
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"fmt"
|
||||||
"go/token"
|
"go/token"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -14,7 +15,10 @@ type sumFile struct {
|
||||||
fileBase
|
fileBase
|
||||||
}
|
}
|
||||||
|
|
||||||
func (*sumFile) GetToken(context.Context) *token.File { return nil }
|
func (*sumFile) GetToken(context.Context) (*token.File, error) {
|
||||||
|
return nil, fmt.Errorf("GetToken: not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
func (*sumFile) setContent(content []byte) {}
|
func (*sumFile) setContent(content []byte) {}
|
||||||
func (*sumFile) filename() string { return "" }
|
func (*sumFile) filename() string { return "" }
|
||||||
func (*sumFile) isActive() bool { return false }
|
func (*sumFile) isActive() bool { return false }
|
||||||
|
|
|
@ -245,8 +245,6 @@ func (f *goFile) invalidateContent(ctx context.Context) {
|
||||||
// including any position and type information that depends on it.
|
// including any position and type information that depends on it.
|
||||||
func (f *goFile) invalidateAST(ctx context.Context) {
|
func (f *goFile) invalidateAST(ctx context.Context) {
|
||||||
f.mu.Lock()
|
f.mu.Lock()
|
||||||
f.ast = nil
|
|
||||||
f.token = nil
|
|
||||||
pkgs := f.pkgs
|
pkgs := f.pkgs
|
||||||
f.mu.Unlock()
|
f.mu.Unlock()
|
||||||
|
|
||||||
|
@ -287,6 +285,16 @@ func (v *view) remove(ctx context.Context, id packageID, seen map[packageID]stru
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
gof.mu.Lock()
|
gof.mu.Lock()
|
||||||
|
if pkg, ok := gof.pkgs[id]; ok {
|
||||||
|
// TODO: Ultimately, we shouldn't need this.
|
||||||
|
// Preemptively delete all of the cached keys if we are invalidating a package.
|
||||||
|
for _, ph := range pkg.files {
|
||||||
|
v.session.cache.store.Delete(parseKey{
|
||||||
|
file: ph.File().Identity(),
|
||||||
|
mode: ph.Mode(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
delete(gof.pkgs, id)
|
delete(gof.pkgs, id)
|
||||||
gof.mu.Unlock()
|
gof.mu.Unlock()
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,7 +6,6 @@ package lsp
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"golang.org/x/tools/internal/lsp/protocol"
|
"golang.org/x/tools/internal/lsp/protocol"
|
||||||
"golang.org/x/tools/internal/lsp/source"
|
"golang.org/x/tools/internal/lsp/source"
|
||||||
|
@ -39,9 +38,9 @@ func spanToRange(ctx context.Context, view source.View, s span.Span) (source.GoF
|
||||||
}
|
}
|
||||||
if rng.Start == rng.End {
|
if rng.Start == rng.End {
|
||||||
// If we have a single point, assume we want the whole file.
|
// If we have a single point, assume we want the whole file.
|
||||||
tok := f.GetToken(ctx)
|
tok, err := f.GetToken(ctx)
|
||||||
if tok == nil {
|
if err != nil {
|
||||||
return nil, nil, span.Range{}, fmt.Errorf("no file information for %s", f.URI())
|
return nil, nil, span.Range{}, err
|
||||||
}
|
}
|
||||||
rng.End = tok.Pos(tok.Size())
|
rng.End = tok.Pos(tok.Size())
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,13 +26,12 @@ func (s *Server) documentLink(ctx context.Context, params *protocol.DocumentLink
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
file := f.GetAST(ctx)
|
file, err := f.GetAST(ctx, source.ParseFull)
|
||||||
if file == nil {
|
if file == nil {
|
||||||
return nil, fmt.Errorf("no AST for %v", uri)
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
var links []protocol.DocumentLink
|
var links []protocol.DocumentLink
|
||||||
|
|
||||||
ast.Inspect(file, func(node ast.Node) bool {
|
ast.Inspect(file, func(node ast.Node) bool {
|
||||||
switch n := node.(type) {
|
switch n := node.(type) {
|
||||||
case *ast.ImportSpec:
|
case *ast.ImportSpec:
|
||||||
|
@ -78,6 +77,27 @@ func (s *Server) documentLink(ctx context.Context, params *protocol.DocumentLink
|
||||||
return links, nil
|
return links, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func findLinksInString(src string, pos token.Pos, view source.View, mapper *protocol.ColumnMapper) ([]protocol.DocumentLink, error) {
|
||||||
|
var links []protocol.DocumentLink
|
||||||
|
re, err := getURLRegexp()
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("cannot create regexp for links: %s", err.Error())
|
||||||
|
}
|
||||||
|
for _, urlIndex := range re.FindAllIndex([]byte(src), -1) {
|
||||||
|
start := urlIndex[0]
|
||||||
|
end := urlIndex[1]
|
||||||
|
startPos := token.Pos(int(pos) + start)
|
||||||
|
endPos := token.Pos(int(pos) + end)
|
||||||
|
target := src[start:end]
|
||||||
|
l, err := toProtocolLink(view, mapper, target, startPos, endPos)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
links = append(links, l)
|
||||||
|
}
|
||||||
|
return links, nil
|
||||||
|
}
|
||||||
|
|
||||||
const urlRegexpString = "(http|ftp|https)://([\\w_-]+(?:(?:\\.[\\w_-]+)+))([\\w.,@?^=%&:/~+#-]*[\\w@?^=%&/~+#-])?"
|
const urlRegexpString = "(http|ftp|https)://([\\w_-]+(?:(?:\\.[\\w_-]+)+))([\\w.,@?^=%&:/~+#-]*[\\w@?^=%&/~+#-])?"
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
@ -108,24 +128,3 @@ func toProtocolLink(view source.View, mapper *protocol.ColumnMapper, target stri
|
||||||
}
|
}
|
||||||
return l, nil
|
return l, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func findLinksInString(src string, pos token.Pos, view source.View, mapper *protocol.ColumnMapper) ([]protocol.DocumentLink, error) {
|
|
||||||
var links []protocol.DocumentLink
|
|
||||||
re, err := getURLRegexp()
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("cannot create regexp for links: %s", err.Error())
|
|
||||||
}
|
|
||||||
for _, urlIndex := range re.FindAllIndex([]byte(src), -1) {
|
|
||||||
start := urlIndex[0]
|
|
||||||
end := urlIndex[1]
|
|
||||||
startPos := token.Pos(int(pos) + start)
|
|
||||||
endPos := token.Pos(int(pos) + end)
|
|
||||||
target := src[start:end]
|
|
||||||
l, err := toProtocolLink(view, mapper, target, startPos, endPos)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
links = append(links, l)
|
|
||||||
}
|
|
||||||
return links, nil
|
|
||||||
}
|
|
||||||
|
|
|
@ -148,7 +148,7 @@ func (act *Action) execOnce(ctx context.Context, fset *token.FileSet) error {
|
||||||
pass := &analysis.Pass{
|
pass := &analysis.Pass{
|
||||||
Analyzer: act.Analyzer,
|
Analyzer: act.Analyzer,
|
||||||
Fset: fset,
|
Fset: fset,
|
||||||
Files: act.Pkg.GetSyntax(),
|
Files: act.Pkg.GetSyntax(ctx),
|
||||||
Pkg: act.Pkg.GetTypes(),
|
Pkg: act.Pkg.GetTypes(),
|
||||||
TypesInfo: act.Pkg.GetTypesInfo(),
|
TypesInfo: act.Pkg.GetTypesInfo(),
|
||||||
TypesSizes: act.Pkg.GetTypesSizes(),
|
TypesSizes: act.Pkg.GetTypesSizes(),
|
||||||
|
|
|
@ -280,11 +280,11 @@ type CompletionOptions struct {
|
||||||
func Completion(ctx context.Context, view View, f GoFile, pos token.Pos, opts CompletionOptions) ([]CompletionItem, *Selection, error) {
|
func Completion(ctx context.Context, view View, f GoFile, pos token.Pos, opts CompletionOptions) ([]CompletionItem, *Selection, error) {
|
||||||
ctx, done := trace.StartSpan(ctx, "source.Completion")
|
ctx, done := trace.StartSpan(ctx, "source.Completion")
|
||||||
defer done()
|
defer done()
|
||||||
file := f.GetAST(ctx)
|
|
||||||
if file == nil {
|
|
||||||
return nil, nil, fmt.Errorf("no AST for %s", f.URI())
|
|
||||||
}
|
|
||||||
|
|
||||||
|
file, err := f.GetAST(ctx, ParseFull)
|
||||||
|
if file == nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
pkg := f.GetPackage(ctx)
|
pkg := f.GetPackage(ctx)
|
||||||
if pkg == nil || pkg.IsIllTyped() {
|
if pkg == nil || pkg.IsIllTyped() {
|
||||||
return nil, nil, fmt.Errorf("package for %s is ill typed", f.URI())
|
return nil, nil, fmt.Errorf("package for %s is ill typed", f.URI())
|
||||||
|
@ -509,6 +509,7 @@ func (c *completer) lexical() error {
|
||||||
if scope == types.Universe {
|
if scope == types.Universe {
|
||||||
score *= 0.1
|
score *= 0.1
|
||||||
}
|
}
|
||||||
|
|
||||||
// If we haven't already added a candidate for an object with this name.
|
// If we haven't already added a candidate for an object with this name.
|
||||||
if _, ok := seen[obj.Name()]; !ok {
|
if _, ok := seen[obj.Name()]; !ok {
|
||||||
seen[obj.Name()] = struct{}{}
|
seen[obj.Name()] = struct{}{}
|
||||||
|
|
|
@ -98,31 +98,38 @@ func (c *completer) item(cand candidate) (CompletionItem, error) {
|
||||||
if c.opts.WantDocumentaton {
|
if c.opts.WantDocumentaton {
|
||||||
declRange, err := objToRange(c.ctx, c.view.Session().Cache().FileSet(), obj)
|
declRange, err := objToRange(c.ctx, c.view.Session().Cache().FileSet(), obj)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return CompletionItem{}, err
|
xlog.Errorf(c.ctx, "failed to get declaration range for object %s: %v", obj.Name(), err)
|
||||||
|
goto Return
|
||||||
}
|
}
|
||||||
pos := declRange.FileSet.Position(declRange.Start)
|
pos := declRange.FileSet.Position(declRange.Start)
|
||||||
if !pos.IsValid() {
|
if !pos.IsValid() {
|
||||||
return CompletionItem{}, fmt.Errorf("invalid declaration position for %v", item.Label)
|
xlog.Errorf(c.ctx, "invalid declaration position for %v: %v", item.Label, err)
|
||||||
|
goto Return
|
||||||
}
|
}
|
||||||
uri := span.FileURI(pos.Filename)
|
uri := span.FileURI(pos.Filename)
|
||||||
f, err := c.view.GetFile(c.ctx, uri)
|
f, err := c.view.GetFile(c.ctx, uri)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return CompletionItem{}, err
|
xlog.Errorf(c.ctx, "unable to get file for %s: %v", uri, err)
|
||||||
|
goto Return
|
||||||
}
|
}
|
||||||
gof, ok := f.(GoFile)
|
gof, ok := f.(GoFile)
|
||||||
if !ok {
|
if !ok {
|
||||||
return CompletionItem{}, fmt.Errorf("declaration for %s not in a Go file: %s", item.Label, uri)
|
xlog.Errorf(c.ctx, "declaration for %s not in a Go file: %s", item.Label, uri)
|
||||||
|
goto Return
|
||||||
}
|
}
|
||||||
ident, err := Identifier(c.ctx, c.view, gof, declRange.Start)
|
ident, err := Identifier(c.ctx, c.view, gof, declRange.Start)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return CompletionItem{}, err
|
xlog.Errorf(c.ctx, "no identifier for %s: %v", obj.Name(), err)
|
||||||
|
goto Return
|
||||||
}
|
}
|
||||||
documentation, err := ident.Documentation(c.ctx, SynopsisDocumentation)
|
documentation, err := ident.Documentation(c.ctx, SynopsisDocumentation)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return CompletionItem{}, err
|
xlog.Errorf(c.ctx, "no documentation for %s: %v", obj.Name(), err)
|
||||||
|
goto Return
|
||||||
}
|
}
|
||||||
item.Documentation = documentation
|
item.Documentation = documentation
|
||||||
}
|
}
|
||||||
|
Return:
|
||||||
return item, nil
|
return item, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -239,9 +239,9 @@ func pointToSpan(ctx context.Context, view View, spn span.Span) span.Span {
|
||||||
xlog.Errorf(ctx, "%s is not a Go file", spn.URI())
|
xlog.Errorf(ctx, "%s is not a Go file", spn.URI())
|
||||||
return spn
|
return spn
|
||||||
}
|
}
|
||||||
tok := diagFile.GetToken(ctx)
|
tok, err := diagFile.GetToken(ctx)
|
||||||
if tok == nil {
|
if err != nil {
|
||||||
xlog.Errorf(ctx, "could not find token.File for diagnostic: %v", spn.URI())
|
xlog.Errorf(ctx, "could not find token.File for %s: %v", spn.URI(), err)
|
||||||
return spn
|
return spn
|
||||||
}
|
}
|
||||||
data, _, err := diagFile.Handle(ctx).Read(ctx)
|
data, _, err := diagFile.Handle(ctx).Read(ctx)
|
||||||
|
|
|
@ -25,9 +25,10 @@ import (
|
||||||
func Format(ctx context.Context, f GoFile, rng span.Range) ([]TextEdit, error) {
|
func Format(ctx context.Context, f GoFile, rng span.Range) ([]TextEdit, error) {
|
||||||
ctx, done := trace.StartSpan(ctx, "source.Format")
|
ctx, done := trace.StartSpan(ctx, "source.Format")
|
||||||
defer done()
|
defer done()
|
||||||
file := f.GetAST(ctx)
|
|
||||||
|
file, err := f.GetAST(ctx, ParseFull)
|
||||||
if file == nil {
|
if file == nil {
|
||||||
return nil, fmt.Errorf("no AST for %s", f.URI())
|
return nil, err
|
||||||
}
|
}
|
||||||
pkg := f.GetPackage(ctx)
|
pkg := f.GetPackage(ctx)
|
||||||
if hasListErrors(pkg.GetErrors()) || hasParseErrors(pkg.GetErrors()) {
|
if hasListErrors(pkg.GetErrors()) || hasParseErrors(pkg.GetErrors()) {
|
||||||
|
|
|
@ -18,9 +18,10 @@ import (
|
||||||
func Highlight(ctx context.Context, f GoFile, pos token.Pos) ([]span.Span, error) {
|
func Highlight(ctx context.Context, f GoFile, pos token.Pos) ([]span.Span, error) {
|
||||||
ctx, done := trace.StartSpan(ctx, "source.Highlight")
|
ctx, done := trace.StartSpan(ctx, "source.Highlight")
|
||||||
defer done()
|
defer done()
|
||||||
file := f.GetAST(ctx)
|
|
||||||
|
file, err := f.GetAST(ctx, ParseFull)
|
||||||
if file == nil {
|
if file == nil {
|
||||||
return nil, fmt.Errorf("no AST for %s", f.URI())
|
return nil, err
|
||||||
}
|
}
|
||||||
fset := f.FileSet()
|
fset := f.FileSet()
|
||||||
path, _ := astutil.PathEnclosingInterval(file, pos, pos)
|
path, _ := astutil.PathEnclosingInterval(file, pos, pos)
|
||||||
|
|
|
@ -65,16 +65,17 @@ func Identifier(ctx context.Context, view View, f GoFile, pos token.Pos) (*Ident
|
||||||
func identifier(ctx context.Context, view View, f GoFile, pos token.Pos) (*IdentifierInfo, error) {
|
func identifier(ctx context.Context, view View, f GoFile, pos token.Pos) (*IdentifierInfo, error) {
|
||||||
ctx, done := trace.StartSpan(ctx, "source.identifier")
|
ctx, done := trace.StartSpan(ctx, "source.identifier")
|
||||||
defer done()
|
defer done()
|
||||||
file := f.GetAST(ctx)
|
|
||||||
|
file, err := f.GetAST(ctx, ParseFull)
|
||||||
if file == nil {
|
if file == nil {
|
||||||
return nil, fmt.Errorf("no AST for %s", f.URI())
|
return nil, err
|
||||||
}
|
}
|
||||||
pkg := f.GetPackage(ctx)
|
pkg := f.GetPackage(ctx)
|
||||||
if pkg == nil || pkg.IsIllTyped() {
|
if pkg == nil || pkg.IsIllTyped() {
|
||||||
return nil, fmt.Errorf("pkg for %s is ill-typed", f.URI())
|
return nil, fmt.Errorf("pkg for %s is ill-typed", f.URI())
|
||||||
}
|
}
|
||||||
// Handle import specs separately, as there is no formal position for a package declaration.
|
// Handle import specs separately, as there is no formal position for a package declaration.
|
||||||
if result, err := importSpec(f, file, pkg, pos); result != nil || err != nil {
|
if result, err := importSpec(ctx, f, file, pkg, pos); result != nil || err != nil {
|
||||||
return result, err
|
return result, err
|
||||||
}
|
}
|
||||||
path, _ := astutil.PathEnclosingInterval(file, pos, pos)
|
path, _ := astutil.PathEnclosingInterval(file, pos, pos)
|
||||||
|
@ -121,8 +122,6 @@ func identifier(ctx context.Context, view View, f GoFile, pos token.Pos) (*Ident
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var err error
|
|
||||||
|
|
||||||
// Handle builtins separately.
|
// Handle builtins separately.
|
||||||
if result.decl.obj.Parent() == types.Universe {
|
if result.decl.obj.Parent() == types.Universe {
|
||||||
decl, ok := lookupBuiltinDecl(f.View(), result.Name).(ast.Node)
|
decl, ok := lookupBuiltinDecl(f.View(), result.Name).(ast.Node)
|
||||||
|
@ -235,14 +234,13 @@ func objToNode(ctx context.Context, view View, originPkg *types.Package, obj typ
|
||||||
}
|
}
|
||||||
// If the object is exported from a different package,
|
// If the object is exported from a different package,
|
||||||
// we don't need its full AST to find the definition.
|
// we don't need its full AST to find the definition.
|
||||||
var declAST *ast.File
|
mode := ParseFull
|
||||||
if obj.Exported() && obj.Pkg() != originPkg {
|
if obj.Exported() && obj.Pkg() != originPkg {
|
||||||
declAST = declFile.GetAnyAST(ctx)
|
mode = ParseExported
|
||||||
} else {
|
|
||||||
declAST = declFile.GetAST(ctx)
|
|
||||||
}
|
}
|
||||||
|
declAST, err := declFile.GetAST(ctx, mode)
|
||||||
if declAST == nil {
|
if declAST == nil {
|
||||||
return nil, fmt.Errorf("no AST for %s", f.URI())
|
return nil, err
|
||||||
}
|
}
|
||||||
path, _ := astutil.PathEnclosingInterval(declAST, rng.Start, rng.End)
|
path, _ := astutil.PathEnclosingInterval(declAST, rng.Start, rng.End)
|
||||||
if path == nil {
|
if path == nil {
|
||||||
|
@ -267,7 +265,7 @@ func objToNode(ctx context.Context, view View, originPkg *types.Package, obj typ
|
||||||
}
|
}
|
||||||
|
|
||||||
// importSpec handles positions inside of an *ast.ImportSpec.
|
// importSpec handles positions inside of an *ast.ImportSpec.
|
||||||
func importSpec(f GoFile, fAST *ast.File, pkg Package, pos token.Pos) (*IdentifierInfo, error) {
|
func importSpec(ctx context.Context, f GoFile, fAST *ast.File, pkg Package, pos token.Pos) (*IdentifierInfo, error) {
|
||||||
var imp *ast.ImportSpec
|
var imp *ast.ImportSpec
|
||||||
for _, spec := range fAST.Imports {
|
for _, spec := range fAST.Imports {
|
||||||
if spec.Pos() <= pos && pos < spec.End() {
|
if spec.Pos() <= pos && pos < spec.End() {
|
||||||
|
@ -292,12 +290,12 @@ func importSpec(f GoFile, fAST *ast.File, pkg Package, pos token.Pos) (*Identifi
|
||||||
if importedPkg == nil {
|
if importedPkg == nil {
|
||||||
return nil, fmt.Errorf("no import for %q", importPath)
|
return nil, fmt.Errorf("no import for %q", importPath)
|
||||||
}
|
}
|
||||||
if importedPkg.GetSyntax() == nil {
|
if importedPkg.GetSyntax(ctx) == nil {
|
||||||
return nil, fmt.Errorf("no syntax for for %q", importPath)
|
return nil, fmt.Errorf("no syntax for for %q", importPath)
|
||||||
}
|
}
|
||||||
// Heuristic: Jump to the longest (most "interesting") file of the package.
|
// Heuristic: Jump to the longest (most "interesting") file of the package.
|
||||||
var dest *ast.File
|
var dest *ast.File
|
||||||
for _, f := range importedPkg.GetSyntax() {
|
for _, f := range importedPkg.GetSyntax(ctx) {
|
||||||
if dest == nil || f.End()-f.Pos() > dest.End()-dest.Pos() {
|
if dest == nil || f.End()-f.Pos() > dest.End()-dest.Pos() {
|
||||||
dest = f
|
dest = f
|
||||||
}
|
}
|
||||||
|
|
|
@ -39,6 +39,7 @@ type renamer struct {
|
||||||
func (i *IdentifierInfo) Rename(ctx context.Context, newName string) (map[span.URI][]TextEdit, error) {
|
func (i *IdentifierInfo) Rename(ctx context.Context, newName string) (map[span.URI][]TextEdit, error) {
|
||||||
ctx, done := trace.StartSpan(ctx, "source.Rename")
|
ctx, done := trace.StartSpan(ctx, "source.Rename")
|
||||||
defer done()
|
defer done()
|
||||||
|
|
||||||
if i.Name == newName {
|
if i.Name == newName {
|
||||||
return nil, fmt.Errorf("old and new names are the same: %s", newName)
|
return nil, fmt.Errorf("old and new names are the same: %s", newName)
|
||||||
}
|
}
|
||||||
|
|
|
@ -113,7 +113,7 @@ func (r *renamer) checkInPackageBlock(from types.Object) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for conflicts between package block and all file blocks.
|
// Check for conflicts between package block and all file blocks.
|
||||||
for _, f := range pkg.GetSyntax() {
|
for _, f := range pkg.GetSyntax(r.ctx) {
|
||||||
fileScope := pkg.GetTypesInfo().Scopes[f]
|
fileScope := pkg.GetTypesInfo().Scopes[f]
|
||||||
b, prev := fileScope.LookupParent(r.to, token.NoPos)
|
b, prev := fileScope.LookupParent(r.to, token.NoPos)
|
||||||
if b == fileScope {
|
if b == fileScope {
|
||||||
|
@ -328,7 +328,7 @@ func forEachLexicalRef(ctx context.Context, pkg Package, obj types.Object, fn fu
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, f := range pkg.GetSyntax() {
|
for _, f := range pkg.GetSyntax(ctx) {
|
||||||
ast.Inspect(f, visit)
|
ast.Inspect(f, visit)
|
||||||
if len(stack) != 0 {
|
if len(stack) != 0 {
|
||||||
panic(stack)
|
panic(stack)
|
||||||
|
@ -802,7 +802,7 @@ func (r *renamer) satisfy() map[satisfy.Constraint]bool {
|
||||||
r.from, r.to, pkg.PkgPath())
|
r.from, r.to, pkg.PkgPath())
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
f.Find(pkg.GetTypesInfo(), pkg.GetSyntax())
|
f.Find(pkg.GetTypesInfo(), pkg.GetSyntax(r.ctx))
|
||||||
}
|
}
|
||||||
r.satisfyConstraints = f.Result
|
r.satisfyConstraints = f.Result
|
||||||
}
|
}
|
||||||
|
@ -835,7 +835,7 @@ func someUse(info *types.Info, obj types.Object) *ast.Ident {
|
||||||
//
|
//
|
||||||
func pathEnclosingInterval(ctx context.Context, fset *token.FileSet, pkg Package, start, end token.Pos) (resPkg Package, path []ast.Node, exact bool) {
|
func pathEnclosingInterval(ctx context.Context, fset *token.FileSet, pkg Package, start, end token.Pos) (resPkg Package, path []ast.Node, exact bool) {
|
||||||
var pkgs = []Package{pkg}
|
var pkgs = []Package{pkg}
|
||||||
for _, f := range pkg.GetSyntax() {
|
for _, f := range pkg.GetSyntax(ctx) {
|
||||||
for _, imp := range f.Imports {
|
for _, imp := range f.Imports {
|
||||||
if imp == nil {
|
if imp == nil {
|
||||||
continue
|
continue
|
||||||
|
@ -848,7 +848,7 @@ func pathEnclosingInterval(ctx context.Context, fset *token.FileSet, pkg Package
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for _, p := range pkgs {
|
for _, p := range pkgs {
|
||||||
for _, f := range p.GetSyntax() {
|
for _, f := range p.GetSyntax(ctx) {
|
||||||
if f.Pos() == token.NoPos {
|
if f.Pos() == token.NoPos {
|
||||||
// This can happen if the parser saw
|
// This can happen if the parser saw
|
||||||
// too many errors and bailed out.
|
// too many errors and bailed out.
|
||||||
|
|
|
@ -28,9 +28,10 @@ type ParameterInformation struct {
|
||||||
func SignatureHelp(ctx context.Context, f GoFile, pos token.Pos) (*SignatureInformation, error) {
|
func SignatureHelp(ctx context.Context, f GoFile, pos token.Pos) (*SignatureInformation, error) {
|
||||||
ctx, done := trace.StartSpan(ctx, "source.SignatureHelp")
|
ctx, done := trace.StartSpan(ctx, "source.SignatureHelp")
|
||||||
defer done()
|
defer done()
|
||||||
file := f.GetAST(ctx)
|
|
||||||
|
file, err := f.GetAST(ctx, ParseFull)
|
||||||
if file == nil {
|
if file == nil {
|
||||||
return nil, fmt.Errorf("no AST for %s", f.URI())
|
return nil, err
|
||||||
}
|
}
|
||||||
pkg := f.GetPackage(ctx)
|
pkg := f.GetPackage(ctx)
|
||||||
if pkg == nil || pkg.IsIllTyped() {
|
if pkg == nil || pkg.IsIllTyped() {
|
||||||
|
|
|
@ -145,9 +145,9 @@ func (r *runner) Completion(t *testing.T, data tests.Completions, snippets tests
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("failed for %v: %v", src, err)
|
t.Fatalf("failed for %v: %v", src, err)
|
||||||
}
|
}
|
||||||
tok := f.(source.GoFile).GetToken(ctx)
|
tok, err := f.(source.GoFile).GetToken(ctx)
|
||||||
if tok == nil {
|
if err != nil {
|
||||||
t.Fatalf("failed to get token for %v", src)
|
t.Fatalf("failed to get token for %s: %v", src.URI(), err)
|
||||||
}
|
}
|
||||||
pos := tok.Pos(src.Start().Offset())
|
pos := tok.Pos(src.Start().Offset())
|
||||||
list, surrounding, err := source.Completion(ctx, r.view, f.(source.GoFile), pos, source.CompletionOptions{
|
list, surrounding, err := source.Completion(ctx, r.view, f.(source.GoFile), pos, source.CompletionOptions{
|
||||||
|
@ -183,7 +183,10 @@ func (r *runner) Completion(t *testing.T, data tests.Completions, snippets tests
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("failed for %v: %v", src, err)
|
t.Fatalf("failed for %v: %v", src, err)
|
||||||
}
|
}
|
||||||
tok := f.GetToken(ctx)
|
tok, err := f.(source.GoFile).GetToken(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed to get token for %s: %v", src.URI(), err)
|
||||||
|
}
|
||||||
pos := tok.Pos(src.Start().Offset())
|
pos := tok.Pos(src.Start().Offset())
|
||||||
list, _, err := source.Completion(ctx, r.view, f.(source.GoFile), pos, source.CompletionOptions{
|
list, _, err := source.Completion(ctx, r.view, f.(source.GoFile), pos, source.CompletionOptions{
|
||||||
DeepComplete: strings.Contains(string(src.URI()), "deepcomplete"),
|
DeepComplete: strings.Contains(string(src.URI()), "deepcomplete"),
|
||||||
|
@ -305,7 +308,11 @@ func (r *runner) Format(t *testing.T, data tests.Formats) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("failed for %v: %v", spn, err)
|
t.Fatalf("failed for %v: %v", spn, err)
|
||||||
}
|
}
|
||||||
rng, err := spn.Range(span.NewTokenConverter(f.FileSet(), f.GetToken(ctx)))
|
tok, err := f.(source.GoFile).GetToken(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed to get token for %s: %v", spn.URI(), err)
|
||||||
|
}
|
||||||
|
rng, err := spn.Range(span.NewTokenConverter(f.FileSet(), tok))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("failed for %v: %v", spn, err)
|
t.Fatalf("failed for %v: %v", spn, err)
|
||||||
}
|
}
|
||||||
|
@ -343,7 +350,11 @@ func (r *runner) Import(t *testing.T, data tests.Imports) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("failed for %v: %v", spn, err)
|
t.Fatalf("failed for %v: %v", spn, err)
|
||||||
}
|
}
|
||||||
rng, err := spn.Range(span.NewTokenConverter(f.FileSet(), f.GetToken(ctx)))
|
tok, err := f.(source.GoFile).GetToken(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed to get token for %s: %v", spn.URI(), err)
|
||||||
|
}
|
||||||
|
rng, err := spn.Range(span.NewTokenConverter(f.FileSet(), tok))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("failed for %v: %v", spn, err)
|
t.Fatalf("failed for %v: %v", spn, err)
|
||||||
}
|
}
|
||||||
|
@ -374,7 +385,10 @@ func (r *runner) Definition(t *testing.T, data tests.Definitions) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("failed for %v: %v", d.Src, err)
|
t.Fatalf("failed for %v: %v", d.Src, err)
|
||||||
}
|
}
|
||||||
tok := f.GetToken(ctx)
|
tok, err := f.(source.GoFile).GetToken(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed to get token for %s: %v", d.Src.URI(), err)
|
||||||
|
}
|
||||||
pos := tok.Pos(d.Src.Start().Offset())
|
pos := tok.Pos(d.Src.Start().Offset())
|
||||||
ident, err := source.Identifier(ctx, r.view, f.(source.GoFile), pos)
|
ident, err := source.Identifier(ctx, r.view, f.(source.GoFile), pos)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -417,7 +431,10 @@ func (r *runner) Highlight(t *testing.T, data tests.Highlights) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("failed for %v: %v", src, err)
|
t.Fatalf("failed for %v: %v", src, err)
|
||||||
}
|
}
|
||||||
tok := f.GetToken(ctx)
|
tok, err := f.(source.GoFile).GetToken(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed to get token for %s: %v", src.URI(), err)
|
||||||
|
}
|
||||||
pos := tok.Pos(src.Start().Offset())
|
pos := tok.Pos(src.Start().Offset())
|
||||||
highlights, err := source.Highlight(ctx, f.(source.GoFile), pos)
|
highlights, err := source.Highlight(ctx, f.(source.GoFile), pos)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -441,8 +458,10 @@ func (r *runner) Reference(t *testing.T, data tests.References) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("failed for %v: %v", src, err)
|
t.Fatalf("failed for %v: %v", src, err)
|
||||||
}
|
}
|
||||||
|
tok, err := f.(source.GoFile).GetToken(ctx)
|
||||||
tok := f.GetToken(ctx)
|
if err != nil {
|
||||||
|
t.Fatalf("failed to get token for %s: %v", src.URI(), err)
|
||||||
|
}
|
||||||
pos := tok.Pos(src.Start().Offset())
|
pos := tok.Pos(src.Start().Offset())
|
||||||
ident, err := source.Identifier(ctx, r.view, f.(source.GoFile), pos)
|
ident, err := source.Identifier(ctx, r.view, f.(source.GoFile), pos)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -489,7 +508,10 @@ func (r *runner) Rename(t *testing.T, data tests.Renames) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("failed for %v: %v", spn, err)
|
t.Fatalf("failed for %v: %v", spn, err)
|
||||||
}
|
}
|
||||||
tok := f.GetToken(ctx)
|
tok, err := f.(source.GoFile).GetToken(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed to get token for %s: %v", spn.URI(), err)
|
||||||
|
}
|
||||||
pos := tok.Pos(spn.Start().Offset())
|
pos := tok.Pos(spn.Start().Offset())
|
||||||
|
|
||||||
ident, err := source.Identifier(r.ctx, r.view, f.(source.GoFile), pos)
|
ident, err := source.Identifier(r.ctx, r.view, f.(source.GoFile), pos)
|
||||||
|
@ -632,7 +654,10 @@ func (r *runner) SignatureHelp(t *testing.T, data tests.Signatures) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("failed for %v: %v", spn, err)
|
t.Fatalf("failed for %v: %v", spn, err)
|
||||||
}
|
}
|
||||||
tok := f.GetToken(ctx)
|
tok, err := f.(source.GoFile).GetToken(ctx)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed to get token for %s: %v", spn.URI(), err)
|
||||||
|
}
|
||||||
pos := tok.Pos(spn.Start().Offset())
|
pos := tok.Pos(spn.Start().Offset())
|
||||||
gotSignature, err := source.SignatureHelp(ctx, f.(source.GoFile), pos)
|
gotSignature, err := source.SignatureHelp(ctx, f.(source.GoFile), pos)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
@ -44,10 +44,11 @@ type Symbol struct {
|
||||||
func DocumentSymbols(ctx context.Context, f GoFile) ([]Symbol, error) {
|
func DocumentSymbols(ctx context.Context, f GoFile) ([]Symbol, error) {
|
||||||
ctx, done := trace.StartSpan(ctx, "source.DocumentSymbols")
|
ctx, done := trace.StartSpan(ctx, "source.DocumentSymbols")
|
||||||
defer done()
|
defer done()
|
||||||
|
|
||||||
fset := f.FileSet()
|
fset := f.FileSet()
|
||||||
file := f.GetAST(ctx)
|
file, err := f.GetAST(ctx, ParseFull)
|
||||||
if file == nil {
|
if file == nil {
|
||||||
return nil, fmt.Errorf("no AST for %s", f.URI())
|
return nil, err
|
||||||
}
|
}
|
||||||
pkg := f.GetPackage(ctx)
|
pkg := f.GetPackage(ctx)
|
||||||
if pkg == nil || pkg.IsIllTyped() {
|
if pkg == nil || pkg.IsIllTyped() {
|
||||||
|
|
|
@ -216,19 +216,15 @@ type File interface {
|
||||||
View() View
|
View() View
|
||||||
Handle(ctx context.Context) FileHandle
|
Handle(ctx context.Context) FileHandle
|
||||||
FileSet() *token.FileSet
|
FileSet() *token.FileSet
|
||||||
GetToken(ctx context.Context) *token.File
|
GetToken(ctx context.Context) (*token.File, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
// GoFile represents a Go source file that has been type-checked.
|
// GoFile represents a Go source file that has been type-checked.
|
||||||
type GoFile interface {
|
type GoFile interface {
|
||||||
File
|
File
|
||||||
|
|
||||||
// GetAnyAST returns an AST that may or may not contain function bodies.
|
|
||||||
// It should be used in scenarios where function bodies are not necessary.
|
|
||||||
GetAnyAST(ctx context.Context) *ast.File
|
|
||||||
|
|
||||||
// GetAST returns the full AST for the file.
|
// GetAST returns the full AST for the file.
|
||||||
GetAST(ctx context.Context) *ast.File
|
GetAST(ctx context.Context, mode ParseMode) (*ast.File, error)
|
||||||
|
|
||||||
// GetPackage returns the package that this file belongs to.
|
// GetPackage returns the package that this file belongs to.
|
||||||
GetPackage(ctx context.Context) Package
|
GetPackage(ctx context.Context) Package
|
||||||
|
@ -255,7 +251,7 @@ type Package interface {
|
||||||
ID() string
|
ID() string
|
||||||
PkgPath() string
|
PkgPath() string
|
||||||
GetFilenames() []string
|
GetFilenames() []string
|
||||||
GetSyntax() []*ast.File
|
GetSyntax(context.Context) []*ast.File
|
||||||
GetErrors() []packages.Error
|
GetErrors() []packages.Error
|
||||||
GetTypes() *types.Package
|
GetTypes() *types.Package
|
||||||
GetTypesInfo() *types.Info
|
GetTypesInfo() *types.Info
|
||||||
|
|
|
@ -22,7 +22,11 @@ func getSourceFile(ctx context.Context, v source.View, uri span.URI) (source.Fil
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
m := protocol.NewColumnMapper(f.URI(), f.URI().Filename(), f.FileSet(), f.GetToken(ctx), data)
|
tok, err := f.GetToken(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
m := protocol.NewColumnMapper(f.URI(), f.URI().Filename(), f.FileSet(), tok, data)
|
||||||
|
|
||||||
return f, m, nil
|
return f, m, nil
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue