internal/lsp: improve error handling while parsing
If the context is canceled (or times out) during parsing, we were previously caching the package with no *ast.Files. Any further LSP queries against that package would fail because the package is already loaded, but none of the files are mapped to the package. Fix by propagating non-parse errors as "fatal" errors in parseFiles. typeCheck will propagate these errors and not cache the package. I also fixed the package cache to not cache errors loading packages. If you get an error like "context canceled" then none of the package's files are mapped to the package. This prevents the package from ever getting unmapped when its files are updated. I also added a retry mechanism where if the current request is not canceled but the package failed to load due to a previous request being canceled, this request can try loading the package again. Updates golang/go#32354, golang/go#32360 Change-Id: I466ddb8d336aeecf6e50f9f6d040787a86a60ca0 GitHub-Last-Rev: 5f1e7ef9c883b76a9c1b3636936d91ec0821d922 GitHub-Pull-Request: golang/tools#110 Reviewed-on: https://go-review.googlesource.com/c/tools/+/181317 Run-TryBot: Rebecca Stambler <rstambler@golang.org> TryBot-Result: Gobot Gobot <gobot@golang.org> Reviewed-by: Rebecca Stambler <rstambler@golang.org>
This commit is contained in:
parent
a99d5a724b
commit
1d0142ba47
|
@ -45,6 +45,7 @@ func (imp *importer) getPkg(pkgPath string) (*pkg, error) {
|
|||
}
|
||||
imp.view.pcache.mu.Lock()
|
||||
e, ok := imp.view.pcache.packages[pkgPath]
|
||||
|
||||
if ok {
|
||||
// cache hit
|
||||
imp.view.pcache.mu.Unlock()
|
||||
|
@ -61,9 +62,21 @@ func (imp *importer) getPkg(pkgPath string) (*pkg, error) {
|
|||
e.pkg, e.err = imp.typeCheck(pkgPath)
|
||||
close(e.ready)
|
||||
}
|
||||
|
||||
if e.err != nil {
|
||||
// If the import had been previously canceled, and that error cached, try again.
|
||||
if e.err == context.Canceled && imp.ctx.Err() == nil {
|
||||
imp.view.pcache.mu.Lock()
|
||||
// Clear out canceled cache entry if it is still there.
|
||||
if imp.view.pcache.packages[pkgPath] == e {
|
||||
delete(imp.view.pcache.packages, pkgPath)
|
||||
}
|
||||
imp.view.pcache.mu.Unlock()
|
||||
return imp.getPkg(pkgPath)
|
||||
}
|
||||
return nil, e.err
|
||||
}
|
||||
|
||||
return e.pkg, nil
|
||||
}
|
||||
|
||||
|
@ -104,10 +117,19 @@ func (imp *importer) typeCheck(pkgPath string) (*pkg, error) {
|
|||
ignoreFuncBodies := imp.topLevelPkgID != pkg.id
|
||||
|
||||
// Don't type-check function bodies if we are not in the top-level package.
|
||||
files, errs := imp.parseFiles(meta.files, ignoreFuncBodies)
|
||||
for _, err := range errs {
|
||||
files, parseErrs, err := imp.parseFiles(meta.files, ignoreFuncBodies)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, err := range parseErrs {
|
||||
appendError(err)
|
||||
}
|
||||
|
||||
// If something unexpected happens, don't cache a package with 0 parsed files.
|
||||
if len(files) == 0 {
|
||||
return nil, fmt.Errorf("no parsed files for package %s", pkg.pkgPath)
|
||||
}
|
||||
|
||||
pkg.syntax = files
|
||||
|
||||
// Handle circular imports by copying previously seen imports.
|
||||
|
|
|
@ -52,9 +52,12 @@ func (v *view) loadParseTypecheck(ctx context.Context, f *goFile) ([]packages.Er
|
|||
}
|
||||
// Type-check package.
|
||||
pkg, err := imp.getPkg(f.meta.pkgPath)
|
||||
if pkg == nil || pkg.IsIllTyped() {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if pkg == nil || pkg.IsIllTyped() {
|
||||
return nil, fmt.Errorf("loadParseTypecheck: %s is ill typed", f.meta.pkgPath)
|
||||
}
|
||||
// If we still have not found the package for the file, something is wrong.
|
||||
if f.pkg == nil {
|
||||
return nil, fmt.Errorf("loadParseTypeCheck: no package found for %v", f.filename())
|
||||
|
|
|
@ -28,32 +28,48 @@ func parseFile(fset *token.FileSet, filename string, src []byte) (*ast.File, err
|
|||
var ioLimit = make(chan bool, 20)
|
||||
|
||||
// parseFiles reads and parses the Go source files and returns the ASTs
|
||||
// of the ones that could be at least partially parsed, along with a
|
||||
// list of I/O and parse errors encountered.
|
||||
// of the ones that could be at least partially parsed, along with a list
|
||||
// parse errors encountered, and a fatal error that prevented parsing.
|
||||
//
|
||||
// Because files are scanned in parallel, the token.Pos
|
||||
// positions of the resulting ast.Files are not ordered.
|
||||
//
|
||||
func (imp *importer) parseFiles(filenames []string, ignoreFuncBodies bool) ([]*astFile, []error) {
|
||||
var wg sync.WaitGroup
|
||||
n := len(filenames)
|
||||
parsed := make([]*astFile, n)
|
||||
errors := make([]error, n)
|
||||
func (imp *importer) parseFiles(filenames []string, ignoreFuncBodies bool) ([]*astFile, []error, error) {
|
||||
var (
|
||||
wg sync.WaitGroup
|
||||
mu sync.Mutex
|
||||
n = len(filenames)
|
||||
parsed = make([]*astFile, n)
|
||||
errors = make([]error, n)
|
||||
fatalErr error
|
||||
)
|
||||
|
||||
setFatalErr := func(err error) {
|
||||
mu.Lock()
|
||||
fatalErr = err
|
||||
mu.Unlock()
|
||||
}
|
||||
|
||||
for i, filename := range filenames {
|
||||
if imp.ctx.Err() != nil {
|
||||
parsed[i], errors[i] = nil, imp.ctx.Err()
|
||||
continue
|
||||
if err := imp.ctx.Err(); err != nil {
|
||||
setFatalErr(err)
|
||||
break
|
||||
}
|
||||
// First, check if we have already cached an AST for this file.
|
||||
f, err := imp.view.findFile(span.FileURI(filename))
|
||||
if err != nil || f == nil {
|
||||
parsed[i], errors[i] = nil, err
|
||||
continue
|
||||
if err != nil {
|
||||
setFatalErr(err)
|
||||
break
|
||||
}
|
||||
if f == nil {
|
||||
setFatalErr(fmt.Errorf("could not find file %s", filename))
|
||||
break
|
||||
}
|
||||
|
||||
gof, ok := f.(*goFile)
|
||||
if !ok {
|
||||
parsed[i], errors[i] = nil, fmt.Errorf("non-Go file in parse call: %v", filename)
|
||||
continue
|
||||
setFatalErr(fmt.Errorf("non-Go file in parse call: %s", filename))
|
||||
break
|
||||
}
|
||||
wg.Add(1)
|
||||
go func(i int, filename string) {
|
||||
|
@ -71,14 +87,13 @@ func (imp *importer) parseFiles(filenames []string, ignoreFuncBodies bool) ([]*a
|
|||
}
|
||||
|
||||
// We don't have a cached AST for this file, so we read its content and parse it.
|
||||
data, _, err := gof.Handle(imp.ctx).Read(imp.ctx)
|
||||
src, _, err := gof.Handle(imp.ctx).Read(imp.ctx)
|
||||
if err != nil {
|
||||
parsed[i], errors[i] = nil, err
|
||||
setFatalErr(err)
|
||||
return
|
||||
}
|
||||
src := data
|
||||
if src == nil {
|
||||
parsed[i], errors[i] = nil, fmt.Errorf("no source for %v", filename)
|
||||
setFatalErr(fmt.Errorf("no source for %v", filename))
|
||||
return
|
||||
}
|
||||
|
||||
|
@ -102,6 +117,10 @@ func (imp *importer) parseFiles(filenames []string, ignoreFuncBodies bool) ([]*a
|
|||
}
|
||||
wg.Wait()
|
||||
|
||||
if fatalErr != nil {
|
||||
return nil, nil, fatalErr
|
||||
}
|
||||
|
||||
// Eliminate nils, preserving order.
|
||||
var o int
|
||||
for _, f := range parsed {
|
||||
|
@ -121,7 +140,7 @@ func (imp *importer) parseFiles(filenames []string, ignoreFuncBodies bool) ([]*a
|
|||
}
|
||||
errors = errors[:o]
|
||||
|
||||
return parsed, errors
|
||||
return parsed, errors, nil
|
||||
}
|
||||
|
||||
// sameFile returns true if x and y have the same basename and denote
|
||||
|
|
Loading…
Reference in New Issue