internal/lsp: fix stuck diagnostic messages
The "analyses" cache in lsp/cache.(*Package).GetActionGraph was not getting cleared on errors. This could result in future calls to GetActionGraph waiting on the "ready" channel indefinitely. This in turn caused the goroutine in cacheAndDiagnose to block indefinitely and never send the diagnostic results back. Now we use a defer statement to always close the channel. If we did not succeed, we also clear out the cache entry and set a "succeeded = false" flag to signal waiters that they need to retry. If in the future errors other than context.Canceled/Timeout are possible, this retry behavior may need to be revisited. Fixes golang/go#30786 Change-Id: Icacc9188f1500b00f2178521ce373a2c1363f932 GitHub-Last-Rev: 7c43afd4286a69b0d35a625716e6934c72c4cef5 GitHub-Pull-Request: golang/tools#91 Reviewed-on: https://go-review.googlesource.com/c/tools/+/173977 Reviewed-by: Rebecca Stambler <rstambler@golang.org> Run-TryBot: Rebecca Stambler <rstambler@golang.org>
This commit is contained in:
parent
bb8aefc696
commit
e54115a062
|
@ -32,7 +32,8 @@ type Package struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type analysisEntry struct {
|
type analysisEntry struct {
|
||||||
ready chan struct{}
|
done chan struct{}
|
||||||
|
succeeded bool
|
||||||
*source.Action
|
*source.Action
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -49,14 +50,20 @@ func (pkg *Package) GetActionGraph(ctx context.Context, a *analysis.Analyzer) (*
|
||||||
|
|
||||||
// wait for entry to become ready or the context to be cancelled
|
// wait for entry to become ready or the context to be cancelled
|
||||||
select {
|
select {
|
||||||
case <-e.ready:
|
case <-e.done:
|
||||||
|
// If the goroutine we are waiting on was cancelled, we should retry.
|
||||||
|
// If errors other than cancelation/timeout become possible, it may
|
||||||
|
// no longer be appropriate to always retry here.
|
||||||
|
if !e.succeeded {
|
||||||
|
return pkg.GetActionGraph(ctx, a)
|
||||||
|
}
|
||||||
case <-ctx.Done():
|
case <-ctx.Done():
|
||||||
return nil, ctx.Err()
|
return nil, ctx.Err()
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// cache miss
|
// cache miss
|
||||||
e = &analysisEntry{
|
e = &analysisEntry{
|
||||||
ready: make(chan struct{}),
|
done: make(chan struct{}),
|
||||||
Action: &source.Action{
|
Action: &source.Action{
|
||||||
Analyzer: a,
|
Analyzer: a,
|
||||||
Pkg: pkg,
|
Pkg: pkg,
|
||||||
|
@ -65,6 +72,21 @@ func (pkg *Package) GetActionGraph(ctx context.Context, a *analysis.Analyzer) (*
|
||||||
pkg.analyses[a] = e
|
pkg.analyses[a] = e
|
||||||
pkg.mu.Unlock()
|
pkg.mu.Unlock()
|
||||||
|
|
||||||
|
defer func() {
|
||||||
|
// If we got an error, clear out our defunct cache entry. We don't cache
|
||||||
|
// errors since they could depend on our dependencies, which can change.
|
||||||
|
// Currently the only possible error is context.Canceled, though, which
|
||||||
|
// should also not be cached.
|
||||||
|
if !e.succeeded {
|
||||||
|
pkg.mu.Lock()
|
||||||
|
delete(pkg.analyses, a)
|
||||||
|
pkg.mu.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Always close done so waiters don't get stuck.
|
||||||
|
close(e.done)
|
||||||
|
}()
|
||||||
|
|
||||||
// This goroutine becomes responsible for populating
|
// This goroutine becomes responsible for populating
|
||||||
// the entry and broadcasting its readiness.
|
// the entry and broadcasting its readiness.
|
||||||
|
|
||||||
|
@ -94,7 +116,8 @@ func (pkg *Package) GetActionGraph(ctx context.Context, a *analysis.Analyzer) (*
|
||||||
e.Deps = append(e.Deps, act)
|
e.Deps = append(e.Deps, act)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
close(e.ready)
|
|
||||||
|
e.succeeded = true
|
||||||
}
|
}
|
||||||
return e.Action, nil
|
return e.Action, nil
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue