Compare commits

..

No commits in common. "master" and "gopls/v0.1.1" have entirely different histories.

154 changed files with 2249 additions and 7682 deletions

View File

@ -1,83 +1,18 @@
// Copyright 2019 The Go Authors. All rights reserved. // The digraph command performs queries over unlabelled directed graphs
// Use of this source code is governed by a BSD-style // represented in text form. It is intended to integrate nicely with
// license that can be found in the LICENSE file. // typical UNIX command pipelines.
//
/* // Since directed graphs (import graphs, reference graphs, call graphs,
The digraph command performs queries over unlabelled directed graphs // etc) often arise during software tool development and debugging, this
represented in text form. It is intended to integrate nicely with // command is included in the go.tools repository.
typical UNIX command pipelines. //
Usage:
your-application | digraph [command]
The support commands are:
nodes
the set of all nodes
degree
the in-degree and out-degree of each node
preds <node> ...
the set of immediate predecessors of the specified nodes
succs <node> ...
the set of immediate successors of the specified nodes
forward <node> ...
the set of nodes transitively reachable from the specified nodes
reverse <node> ...
the set of nodes that transitively reach the specified nodes
somepath <node> <node>
the list of nodes on some arbitrary path from the first node to the second
allpaths <node> <node>
the set of nodes on all paths from the first node to the second
sccs
all strongly connected components (one per line)
scc <node>
the set of nodes nodes strongly connected to the specified one
Input format:
Each line contains zero or more words. Words are separated by unquoted
whitespace; words may contain Go-style double-quoted portions, allowing spaces
and other characters to be expressed.
Each word declares a node, and if there are more than one, an edge from the
first to each subsequent one. The graph is provided on the standard input.
For instance, the following (acyclic) graph specifies a partial order among the
subtasks of getting dressed:
$ cat clothes.txt
socks shoes
"boxer shorts" pants
pants belt shoes
shirt tie sweater
sweater jacket
hat
The line "shirt tie sweater" indicates the two edges shirt -> tie and
shirt -> sweater, not shirt -> tie -> sweater.
Example usage:
Using digraph with existing Go tools:
$ go mod graph | digraph nodes # Operate on the Go module graph.
$ go list -m all | digraph nodes # Operate on the Go package graph.
Show the transitive closure of imports of the digraph tool itself:
$ go list -f '{{.ImportPath}} {{join .Imports " "}}' ... | digraph forward golang.org/x/tools/cmd/digraph
Show which clothes (see above) must be donned before a jacket:
$ digraph reverse jacket
*/
package main // import "golang.org/x/tools/cmd/digraph"
// TODO(adonovan): // TODO(adonovan):
// - support input files other than stdin // - support input files other than stdin
// - support alternative formats (AT&T GraphViz, CSV, etc), // - support alternative formats (AT&T GraphViz, CSV, etc),
// a comment syntax, etc. // a comment syntax, etc.
// - allow queries to nest, like Blaze query language. // - allow queries to nest, like Blaze query language.
//
package main // import "golang.org/x/tools/cmd/digraph"
import ( import (
"bufio" "bufio"
@ -93,41 +28,74 @@ import (
"unicode/utf8" "unicode/utf8"
) )
func usage() { const Usage = `digraph: queries over directed graphs in text form.
fmt.Fprintf(os.Stderr, `Usage: your-application | digraph [command]
Graph format:
Each line contains zero or more words. Words are separated by
unquoted whitespace; words may contain Go-style double-quoted portions,
allowing spaces and other characters to be expressed.
Each field declares a node, and if there are more than one,
an edge from the first to each subsequent one.
The graph is provided on the standard input.
For instance, the following (acyclic) graph specifies a partial order
among the subtasks of getting dressed:
% cat clothes.txt
socks shoes
"boxer shorts" pants
pants belt shoes
shirt tie sweater
sweater jacket
hat
The line "shirt tie sweater" indicates the two edges shirt -> tie and
shirt -> sweater, not shirt -> tie -> sweater.
Supported queries:
The support commands are:
nodes nodes
the set of all nodes the set of all nodes
degree degree
the in-degree and out-degree of each node the in-degree and out-degree of each node.
preds <node> ... preds <label> ...
the set of immediate predecessors of the specified nodes the set of immediate predecessors of the specified nodes
succs <node> ... succs <label> ...
the set of immediate successors of the specified nodes the set of immediate successors of the specified nodes
forward <node> ... forward <label> ...
the set of nodes transitively reachable from the specified nodes the set of nodes transitively reachable from the specified nodes
reverse <node> ... reverse <label> ...
the set of nodes that transitively reach the specified nodes the set of nodes that transitively reach the specified nodes
somepath <node> <node> somepath <label> <label>
the list of nodes on some arbitrary path from the first node to the second the list of nodes on some arbitrary path from the first node to the second
allpaths <node> <node> allpaths <label> <label>
the set of nodes on all paths from the first node to the second the set of nodes on all paths from the first node to the second
sccs sccs
all strongly connected components (one per line) all strongly connected components (one per line)
scc <node> scc <label>
the set of nodes nodes strongly connected to the specified one the set of nodes nodes strongly connected to the specified one
`)
os.Exit(2) Example usage:
}
Show the transitive closure of imports of the digraph tool itself:
% go list -f '{{.ImportPath}}{{.Imports}}' ... | tr '[]' ' ' |
digraph forward golang.org/x/tools/cmd/digraph
Show which clothes (see above) must be donned before a jacket:
% digraph reverse jacket <clothes.txt
`
func main() { func main() {
flag.Usage = usage flag.Usage = func() { fmt.Fprintln(os.Stderr, Usage) }
flag.Parse() flag.Parse()
args := flag.Args() args := flag.Args()
if len(args) == 0 { if len(args) == 0 {
usage() fmt.Fprintln(os.Stderr, Usage)
return
} }
if err := digraph(args[0], args[1:]); err != nil { if err := digraph(args[0], args[1:]); err != nil {
@ -262,47 +230,6 @@ func (g graph) sccs() []nodeset {
return sccs return sccs
} }
func (g graph) allpaths(from, to string) error {
// Mark all nodes to "to".
seen := make(nodeset) // value of seen[x] indicates whether x is on some path to "to"
var visit func(node string) bool
visit = func(node string) bool {
reachesTo, ok := seen[node]
if !ok {
reachesTo = node == to
seen[node] = reachesTo
for e := range g[node] {
if visit(e) {
reachesTo = true
}
}
if reachesTo && node != to {
seen[node] = true
}
}
return reachesTo
}
visit(from)
// For each marked node, collect its marked successors.
var edges []string
for n := range seen {
for succ := range g[n] {
if seen[succ] {
edges = append(edges, n+" "+succ)
}
}
}
// Sort (so that this method is deterministic) and print edges.
sort.Strings(edges)
for _, e := range edges {
fmt.Fprintln(stdout, e)
}
return nil
}
func parse(rd io.Reader) (graph, error) { func parse(rd io.Reader) (graph, error) {
g := make(graph) g := make(graph)
@ -325,7 +252,6 @@ func parse(rd io.Reader) (graph, error) {
return g, nil return g, nil
} }
// Overridable for testing purposes.
var stdin io.Reader = os.Stdin var stdin io.Reader = os.Stdin
var stdout io.Writer = os.Stdout var stdout io.Writer = os.Stdout
@ -440,7 +366,33 @@ func digraph(cmd string, args []string) error {
if g[to] == nil { if g[to] == nil {
return fmt.Errorf("no such 'to' node %q", to) return fmt.Errorf("no such 'to' node %q", to)
} }
g.allpaths(from, to)
seen := make(nodeset) // value of seen[x] indicates whether x is on some path to 'to'
var visit func(label string) bool
visit = func(label string) bool {
reachesTo, ok := seen[label]
if !ok {
reachesTo = label == to
seen[label] = reachesTo
for e := range g[label] {
if visit(e) {
reachesTo = true
}
}
seen[label] = reachesTo
}
return reachesTo
}
if !visit(from) {
return fmt.Errorf("no path from %q to %q", from, to)
}
for label, reachesTo := range seen {
if !reachesTo {
delete(seen, label)
}
}
seen.sort().println("\n")
case "sccs": case "sccs":
if len(args) != 0 { if len(args) != 0 {

View File

@ -1,6 +1,3 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main package main
import ( import (
@ -29,34 +26,35 @@ d c
` `
for _, test := range []struct { for _, test := range []struct {
name string
input string input string
cmd string cmd string
args []string args []string
want string want string
}{ }{
{"nodes", g1, "nodes", nil, "belt\nhat\njacket\npants\nshirt\nshoes\nshorts\nsocks\nsweater\ntie\n"}, {g1, "nodes", nil, "belt\nhat\njacket\npants\nshirt\nshoes\nshorts\nsocks\nsweater\ntie\n"},
{"reverse", g1, "reverse", []string{"jacket"}, "jacket\nshirt\nsweater\n"}, {g1, "reverse", []string{"jacket"}, "jacket\nshirt\nsweater\n"},
{"forward", g1, "forward", []string{"socks"}, "shoes\nsocks\n"}, {g1, "forward", []string{"socks"}, "shoes\nsocks\n"},
{"forward multiple args", g1, "forward", []string{"socks", "sweater"}, "jacket\nshoes\nsocks\nsweater\n"}, {g1, "forward", []string{"socks", "sweater"}, "jacket\nshoes\nsocks\nsweater\n"},
{"scss", g2, "sccs", nil, "a\nb\nc d\n"},
{"scc", g2, "scc", []string{"d"}, "c\nd\n"}, {g2, "allpaths", []string{"a", "d"}, "a\nb\nc\nd\n"},
{"succs", g2, "succs", []string{"a"}, "b\nc\n"},
{"preds", g2, "preds", []string{"c"}, "a\nd\n"}, {g2, "sccs", nil, "a\nb\nc d\n"},
{"preds multiple args", g2, "preds", []string{"c", "d"}, "a\nb\nc\nd\n"}, {g2, "scc", []string{"d"}, "c\nd\n"},
{g2, "succs", []string{"a"}, "b\nc\n"},
{g2, "preds", []string{"c"}, "a\nd\n"},
{g2, "preds", []string{"c", "d"}, "a\nb\nc\nd\n"},
} { } {
t.Run(test.name, func(t *testing.T) {
stdin = strings.NewReader(test.input) stdin = strings.NewReader(test.input)
stdout = new(bytes.Buffer) stdout = new(bytes.Buffer)
if err := digraph(test.cmd, test.args); err != nil { if err := digraph(test.cmd, test.args); err != nil {
t.Fatal(err) t.Error(err)
continue
} }
got := stdout.(fmt.Stringer).String() got := stdout.(fmt.Stringer).String()
if got != test.want { if got != test.want {
t.Errorf("digraph(%s, %s) = got %q, want %q", test.cmd, test.args, got, test.want) t.Errorf("digraph(%s, %s) = %q, want %q", test.cmd, test.args, got, test.want)
} }
})
} }
// TODO(adonovan): // TODO(adonovan):
@ -64,110 +62,6 @@ d c
// - test errors // - test errors
} }
func TestAllpaths(t *testing.T) {
for _, test := range []struct {
name string
in string
to string // from is always "A"
want string
}{
{
name: "Basic",
in: "A B\nB C",
to: "B",
want: "A B\n",
},
{
name: "Long",
in: "A B\nB C\n",
to: "C",
want: "A B\nB C\n",
},
{
name: "Cycle Basic",
in: "A B\nB A",
to: "B",
want: "A B\nB A\n",
},
{
name: "Cycle Path Out",
// A <-> B -> C -> D
in: "A B\nB A\nB C\nC D",
to: "C",
want: "A B\nB A\nB C\n",
},
{
name: "Cycle Path Out Further Out",
// A -> B <-> C -> D -> E
in: "A B\nB C\nC D\nC B\nD E",
to: "D",
want: "A B\nB C\nC B\nC D\n",
},
{
name: "Two Paths Basic",
// /-> C --\
// A -> B -- -> E -> F
// \-> D --/
in: "A B\nB C\nC E\nB D\nD E\nE F",
to: "E",
want: "A B\nB C\nB D\nC E\nD E\n",
},
{
name: "Two Paths With One Immediately From Start",
// /-> B -+ -> D
// A -- |
// \-> C <+
in: "A B\nA C\nB C\nB D",
to: "C",
want: "A B\nA C\nB C\n",
},
{
name: "Two Paths Further Up",
// /-> B --\
// A -- -> D -> E -> F
// \-> C --/
in: "A B\nA C\nB D\nC D\nD E\nE F",
to: "E",
want: "A B\nA C\nB D\nC D\nD E\n",
},
{
// We should include A - C - D even though it's further up the
// second path than D (which would already be in the graph by
// the time we get around to integrating the second path).
name: "Two Splits",
// /-> B --\ /-> E --\
// A -- -> D -- -> G -> H
// \-> C --/ \-> F --/
in: "A B\nA C\nB D\nC D\nD E\nD F\nE G\nF G\nG H",
to: "G",
want: "A B\nA C\nB D\nC D\nD E\nD F\nE G\nF G\n",
},
{
// D - E should not be duplicated.
name: "Two Paths - Two Splits With Gap",
// /-> B --\ /-> F --\
// A -- -> D -> E -- -> H -> I
// \-> C --/ \-> G --/
in: "A B\nA C\nB D\nC D\nD E\nE F\nE G\nF H\nG H\nH I",
to: "H",
want: "A B\nA C\nB D\nC D\nD E\nE F\nE G\nF H\nG H\n",
},
} {
t.Run(test.name, func(t *testing.T) {
stdin = strings.NewReader(test.in)
stdout = new(bytes.Buffer)
if err := digraph("allpaths", []string{"A", test.to}); err != nil {
t.Fatal(err)
}
got := stdout.(fmt.Stringer).String()
if got != test.want {
t.Errorf("digraph(allpaths, A, %s) = got %q, want %q", test.to, got, test.want)
}
})
}
}
func TestSplit(t *testing.T) { func TestSplit(t *testing.T) {
for _, test := range []struct { for _, test := range []struct {
line string line string

View File

@ -19,5 +19,5 @@ import (
func main() { func main() {
debug.Version += "-cmd.gopls" debug.Version += "-cmd.gopls"
tool.Main(context.Background(), cmd.New("gopls-legacy", "", nil), os.Args[1:]) tool.Main(context.Background(), cmd.New("", nil), os.Args[1:])
} }

View File

@ -54,7 +54,7 @@ function initNotes() {
slides.src = slidesUrl; slides.src = slidesUrl;
w.document.body.appendChild(slides); w.document.body.appendChild(slides);
var curSlide = parseInt(localStorage.getItem(destSlideKey()), 10); var curSlide = parseInt(localStorage.getItem(getDestSlideKey()), 10);
var formattedNotes = ''; var formattedNotes = '';
var section = sections[curSlide - 1]; var section = sections[curSlide - 1];
// curSlide is 0 when initialized from the first page of slides. // curSlide is 0 when initialized from the first page of slides.
@ -107,7 +107,7 @@ function updateNotes() {
// When triggered from parent window, notesWindow is null // When triggered from parent window, notesWindow is null
// The storage event listener on notesWindow will update notes // The storage event listener on notesWindow will update notes
if (!notesWindow) return; if (!notesWindow) return;
var destSlide = parseInt(localStorage.getItem(destSlideKey()), 10); var destSlide = parseInt(localStorage.getItem(getDestSlideKey()), 10);
var section = sections[destSlide - 1]; var section = sections[destSlide - 1];
var el = notesWindow.document.getElementById('presenter-notes'); var el = notesWindow.document.getElementById('presenter-notes');

2
go.mod
View File

@ -3,6 +3,6 @@ module golang.org/x/tools
go 1.11 go 1.11
require ( require (
golang.org/x/net v0.0.0-20190620200207-3b0461eec859 golang.org/x/net v0.0.0-20190311183353-d8887717615a
golang.org/x/sync v0.0.0-20190423024810-112230192c58 golang.org/x/sync v0.0.0-20190423024810-112230192c58
) )

4
go.sum
View File

@ -1,6 +1,6 @@
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859 h1:R/3boaszxrf1GEUWTVDzSKVwLmSJpwZ1yqXm8j0v2QI= golang.org/x/net v0.0.0-20190311183353-d8887717615a h1:oWX7TPOiFAMXLq8o0ikBYfCJVlRHBcsciT5bXOrH628=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/sync v0.0.0-20190423024810-112230192c58 h1:8gQV6CLnAEikrhgkHFbMAEhagSSnXWGV915qUMm9mrU= golang.org/x/sync v0.0.0-20190423024810-112230192c58 h1:8gQV6CLnAEikrhgkHFbMAEhagSSnXWGV915qUMm9mrU=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=

View File

@ -99,16 +99,6 @@ func (s *Set) ExportObjectFact(obj types.Object, fact analysis.Fact) {
s.mu.Unlock() s.mu.Unlock()
} }
func (s *Set) AllObjectFacts() []analysis.ObjectFact {
var facts []analysis.ObjectFact
for k, v := range s.m {
if k.obj != nil {
facts = append(facts, analysis.ObjectFact{k.obj, v})
}
}
return facts
}
// ImportPackageFact implements analysis.Pass.ImportPackageFact. // ImportPackageFact implements analysis.Pass.ImportPackageFact.
func (s *Set) ImportPackageFact(pkg *types.Package, ptr analysis.Fact) bool { func (s *Set) ImportPackageFact(pkg *types.Package, ptr analysis.Fact) bool {
if pkg == nil { if pkg == nil {
@ -132,16 +122,6 @@ func (s *Set) ExportPackageFact(fact analysis.Fact) {
s.mu.Unlock() s.mu.Unlock()
} }
func (s *Set) AllPackageFacts() []analysis.PackageFact {
var facts []analysis.PackageFact
for k, v := range s.m {
if k.obj == nil {
facts = append(facts, analysis.PackageFact{k.pkg, v})
}
}
return facts
}
// gobFact is the Gob declaration of a serialized fact. // gobFact is the Gob declaration of a serialized fact.
type gobFact struct { type gobFact struct {
PkgPath string // path of package PkgPath string // path of package

View File

@ -334,10 +334,8 @@ func run(fset *token.FileSet, cfg *Config, analyzers []*analysis.Analyzer) ([]re
Report: func(d analysis.Diagnostic) { act.diagnostics = append(act.diagnostics, d) }, Report: func(d analysis.Diagnostic) { act.diagnostics = append(act.diagnostics, d) },
ImportObjectFact: facts.ImportObjectFact, ImportObjectFact: facts.ImportObjectFact,
ExportObjectFact: facts.ExportObjectFact, ExportObjectFact: facts.ExportObjectFact,
AllObjectFacts: facts.AllObjectFacts,
ImportPackageFact: facts.ImportPackageFact, ImportPackageFact: facts.ImportPackageFact,
ExportPackageFact: facts.ExportPackageFact, ExportPackageFact: facts.ExportPackageFact,
AllPackageFacts: facts.AllPackageFacts,
} }
t0 := time.Now() t0 := time.Now()

View File

@ -149,7 +149,7 @@ func (b *builder) branchStmt(s *ast.BranchStmt) {
} }
case token.FALLTHROUGH: case token.FALLTHROUGH:
for t := b.targets; t != nil && block == nil; t = t.tail { for t := b.targets; t != nil; t = t.tail {
block = t._fallthrough block = t._fallthrough
} }

View File

@ -811,15 +811,7 @@ func (imp *importer) doImport(from *PackageInfo, to string) (*types.Package, err
// Import of incomplete package: this indicates a cycle. // Import of incomplete package: this indicates a cycle.
fromPath := from.Pkg.Path() fromPath := from.Pkg.Path()
if cycle := imp.findPath(path, fromPath); cycle != nil { if cycle := imp.findPath(path, fromPath); cycle != nil {
// Normalize cycle: start from alphabetically largest node. cycle = append([]string{fromPath}, cycle...)
pos, start := -1, ""
for i, s := range cycle {
if pos < 0 || s > start {
pos, start = i, s
}
}
cycle = append(cycle, cycle[:pos]...)[pos:] // rotate cycle to start from largest
cycle = append(cycle, cycle[0]) // add start node to end to show cycliness
return nil, fmt.Errorf("import cycle: %s", strings.Join(cycle, " -> ")) return nil, fmt.Errorf("import cycle: %s", strings.Join(cycle, " -> "))
} }

View File

@ -16,28 +16,13 @@ import (
"strings" "strings"
) )
// The Driver Protocol // Driver
//
// The driver, given the inputs to a call to Load, returns metadata about the packages specified.
// This allows for different build systems to support go/packages by telling go/packages how the
// packages' source is organized.
// The driver is a binary, either specified by the GOPACKAGESDRIVER environment variable or in
// the path as gopackagesdriver. It's given the inputs to load in its argv. See the package
// documentation in doc.go for the full description of the patterns that need to be supported.
// A driver receives as a JSON-serialized driverRequest struct in standard input and will
// produce a JSON-serialized driverResponse (see definition in packages.go) in its standard output.
// driverRequest is used to provide the portion of Load's Config that is needed by a driver.
type driverRequest struct { type driverRequest struct {
Command string `json:"command"`
Mode LoadMode `json:"mode"` Mode LoadMode `json:"mode"`
// Env specifies the environment the underlying build system should be run in.
Env []string `json:"env"` Env []string `json:"env"`
// BuildFlags are flags that should be passed to the underlying build system.
BuildFlags []string `json:"build_flags"` BuildFlags []string `json:"build_flags"`
// Tests specifies whether the patterns should also return test packages.
Tests bool `json:"tests"` Tests bool `json:"tests"`
// Overlay maps file paths (relative to the driver's working directory) to the byte contents
// of overlay files.
Overlay map[string][]byte `json:"overlay"` Overlay map[string][]byte `json:"overlay"`
} }

View File

@ -316,7 +316,9 @@ func runNamedQueries(cfg *Config, driver driver, response *responseDeduper, quer
startWalk := time.Now() startWalk := time.Now()
gopathwalk.Walk(roots, add, gopathwalk.Options{ModulesEnabled: modRoot != "", Debug: debug}) gopathwalk.Walk(roots, add, gopathwalk.Options{ModulesEnabled: modRoot != "", Debug: debug})
cfg.Logf("%v for walk", time.Since(startWalk)) if debug {
log.Printf("%v for walk", time.Since(startWalk))
}
// Weird special case: the top-level package in a module will be in // Weird special case: the top-level package in a module will be in
// whatever directory the user checked the repository out into. It's // whatever directory the user checked the repository out into. It's
@ -757,9 +759,11 @@ func invokeGo(cfg *Config, args ...string) (*bytes.Buffer, error) {
cmd.Dir = cfg.Dir cmd.Dir = cfg.Dir
cmd.Stdout = stdout cmd.Stdout = stdout
cmd.Stderr = stderr cmd.Stderr = stderr
if debug {
defer func(start time.Time) { defer func(start time.Time) {
cfg.Logf("%s for %v, stderr: <<%s>>\n", time.Since(start), cmdDebugStr(cmd, args...), stderr) log.Printf("%s for %v, stderr: <<%s>>\n", time.Since(start), cmdDebugStr(cmd, args...), stderr)
}(time.Now()) }(time.Now())
}
if err := cmd.Run(); err != nil { if err := cmd.Run(); err != nil {
// Check for 'go' executable not being found. // Check for 'go' executable not being found.

View File

@ -103,12 +103,6 @@ type Config struct {
// If Context is nil, the load cannot be cancelled. // If Context is nil, the load cannot be cancelled.
Context context.Context Context context.Context
// Logf is the logger for the config.
// If the user provides a logger, debug logging is enabled.
// If the GOPACKAGESDEBUG environment variable is set to true,
// but the logger is nil, default to log.Printf.
Logf func(format string, args ...interface{})
// Dir is the directory in which to run the build system's query tool // Dir is the directory in which to run the build system's query tool
// that provides information about the packages. // that provides information about the packages.
// If Dir is empty, the tool is run in the current directory. // If Dir is empty, the tool is run in the current directory.
@ -435,17 +429,6 @@ func newLoader(cfg *Config) *loader {
} }
if cfg != nil { if cfg != nil {
ld.Config = *cfg ld.Config = *cfg
// If the user has provided a logger, use it.
ld.Config.Logf = cfg.Logf
}
if ld.Config.Logf == nil {
// If the GOPACKAGESDEBUG environment variable is set to true,
// but the user has not provided a logger, default to log.Printf.
if debug {
ld.Config.Logf = log.Printf
} else {
ld.Config.Logf = func(format string, args ...interface{}) {}
}
} }
if ld.Config.Mode == 0 { if ld.Config.Mode == 0 {
ld.Config.Mode = NeedName | NeedFiles | NeedCompiledGoFiles // Preserve zero behavior of Mode for backwards compatibility. ld.Config.Mode = NeedName | NeedFiles | NeedCompiledGoFiles // Preserve zero behavior of Mode for backwards compatibility.

View File

@ -930,12 +930,6 @@ func testNewPackagesInOverlay(t *testing.T, exporter packagestest.Exporter) {
"b/b.go": `package b; import "golang.org/fake/c"; const B = "b" + c.C`, "b/b.go": `package b; import "golang.org/fake/c"; const B = "b" + c.C`,
"c/c.go": `package c; const C = "c"`, "c/c.go": `package c; const C = "c"`,
"d/d.go": `package d; const D = "d"`, "d/d.go": `package d; const D = "d"`,
// TODO: Remove these temporary files when golang.org/issue/33157 is resolved.
filepath.Join("e/e_temp.go"): ``,
filepath.Join("f/f_temp.go"): ``,
filepath.Join("g/g_temp.go"): ``,
filepath.Join("h/h_temp.go"): ``,
}}}) }}})
defer exported.Cleanup() defer exported.Cleanup()
@ -992,11 +986,7 @@ func testNewPackagesInOverlay(t *testing.T, exporter packagestest.Exporter) {
} { } {
exported.Config.Overlay = test.overlay exported.Config.Overlay = test.overlay
exported.Config.Mode = packages.LoadAllSyntax exported.Config.Mode = packages.LoadAllSyntax
exported.Config.Logf = t.Logf initial, err := packages.Load(exported.Config, "golang.org/fake/e")
// With an overlay, we don't know the expected import path,
// so load with the absolute path of the directory.
initial, err := packages.Load(exported.Config, filepath.Join(dir, "e"))
if err != nil { if err != nil {
t.Error(err) t.Error(err)
continue continue

View File

@ -2,6 +2,6 @@ module golang.org/x/tools/gopls
go 1.11 go 1.11
require golang.org/x/tools v0.0.0-20190723021737-8bb11ff117ca require golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0
replace golang.org/x/tools => ../ replace golang.org/x/tools => ../

View File

@ -1,8 +1,7 @@
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/sync v0.0.0-20190423024810-112230192c58 h1:8gQV6CLnAEikrhgkHFbMAEhagSSnXWGV915qUMm9mrU= golang.org/x/sync v0.0.0-20190423024810-112230192c58 h1:8gQV6CLnAEikrhgkHFbMAEhagSSnXWGV915qUMm9mrU=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/tools v0.0.0-20190723021737-8bb11ff117ca h1:SqwJrz6xPBlCUltcEHz2/p01HRPR+VGD+aYLikk8uas= golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190723021737-8bb11ff117ca/go.mod h1:jcCCGcm9btYwXyDqrUWc6MKQKKGJCWEQ3AfLSRIbEuI=

View File

@ -17,5 +17,5 @@ import (
) )
func main() { func main() {
tool.Main(context.Background(), cmd.New("gopls", "", nil), os.Args[1:]) tool.Main(context.Background(), cmd.New("", nil), os.Args[1:])
} }

View File

@ -13,6 +13,7 @@ import (
"go/parser" "go/parser"
"go/token" "go/token"
"io/ioutil" "io/ioutil"
"log"
"os" "os"
"os/exec" "os/exec"
"path" "path"
@ -67,19 +68,6 @@ func importGroup(env *ProcessEnv, importPath string) int {
return 0 return 0
} }
type importFixType int
const (
addImport importFixType = iota
deleteImport
setImportName
)
type importFix struct {
info importInfo
fixType importFixType
}
// An importInfo represents a single import statement. // An importInfo represents a single import statement.
type importInfo struct { type importInfo struct {
importPath string // import path, e.g. "crypto/rand". importPath string // import path, e.g. "crypto/rand".
@ -181,8 +169,8 @@ func collectReferences(f *ast.File) references {
return refs return refs
} }
// collectImports returns all the imports in f. // collectImports returns all the imports in f, keyed by their package name as
// Unnamed imports (., _) and "C" are ignored. // determined by pathToName. Unnamed imports (., _) and "C" are ignored.
func collectImports(f *ast.File) []*importInfo { func collectImports(f *ast.File) []*importInfo {
var imports []*importInfo var imports []*importInfo
for _, imp := range f.Imports { for _, imp := range f.Imports {
@ -258,12 +246,6 @@ type pass struct {
// loadPackageNames saves the package names for everything referenced by imports. // loadPackageNames saves the package names for everything referenced by imports.
func (p *pass) loadPackageNames(imports []*importInfo) error { func (p *pass) loadPackageNames(imports []*importInfo) error {
if p.env.Debug {
p.env.Logf("loading package names for %v packages", len(imports))
defer func() {
p.env.Logf("done loading package names for %v packages", len(imports))
}()
}
var unknown []string var unknown []string
for _, imp := range imports { for _, imp := range imports {
if _, ok := p.knownPackages[imp.importPath]; ok { if _, ok := p.knownPackages[imp.importPath]; ok {
@ -272,7 +254,7 @@ func (p *pass) loadPackageNames(imports []*importInfo) error {
unknown = append(unknown, imp.importPath) unknown = append(unknown, imp.importPath)
} }
names, err := p.env.GetResolver().loadPackageNames(unknown, p.srcDir) names, err := p.env.getResolver().loadPackageNames(unknown, p.srcDir)
if err != nil { if err != nil {
return err return err
} }
@ -303,7 +285,7 @@ func (p *pass) importIdentifier(imp *importInfo) string {
// load reads in everything necessary to run a pass, and reports whether the // load reads in everything necessary to run a pass, and reports whether the
// file already has all the imports it needs. It fills in p.missingRefs with the // file already has all the imports it needs. It fills in p.missingRefs with the
// file's missing symbols, if any, or removes unused imports if not. // file's missing symbols, if any, or removes unused imports if not.
func (p *pass) load() ([]*importFix, bool) { func (p *pass) load() bool {
p.knownPackages = map[string]*packageInfo{} p.knownPackages = map[string]*packageInfo{}
p.missingRefs = references{} p.missingRefs = references{}
p.existingImports = map[string]*importInfo{} p.existingImports = map[string]*importInfo{}
@ -331,9 +313,9 @@ func (p *pass) load() ([]*importFix, bool) {
err := p.loadPackageNames(append(imports, p.candidates...)) err := p.loadPackageNames(append(imports, p.candidates...))
if err != nil { if err != nil {
if p.env.Debug { if p.env.Debug {
p.env.Logf("loading package names: %v", err) log.Printf("loading package names: %v", err)
} }
return nil, false return false
} }
} }
for _, imp := range imports { for _, imp := range imports {
@ -352,16 +334,16 @@ func (p *pass) load() ([]*importFix, bool) {
} }
} }
if len(p.missingRefs) != 0 { if len(p.missingRefs) != 0 {
return nil, false return false
} }
return p.fix() return p.fix()
} }
// fix attempts to satisfy missing imports using p.candidates. If it finds // fix attempts to satisfy missing imports using p.candidates. If it finds
// everything, or if p.lastTry is true, it updates fixes to add the imports it found, // everything, or if p.lastTry is true, it adds the imports it found,
// delete anything unused, and update import names, and returns true. // removes anything unused, and returns true.
func (p *pass) fix() ([]*importFix, bool) { func (p *pass) fix() bool {
// Find missing imports. // Find missing imports.
var selected []*importInfo var selected []*importInfo
for left, rights := range p.missingRefs { for left, rights := range p.missingRefs {
@ -371,11 +353,10 @@ func (p *pass) fix() ([]*importFix, bool) {
} }
if !p.lastTry && len(selected) != len(p.missingRefs) { if !p.lastTry && len(selected) != len(p.missingRefs) {
return nil, false return false
} }
// Found everything, or giving up. Add the new imports and remove any unused. // Found everything, or giving up. Add the new imports and remove any unused.
var fixes []*importFix
for _, imp := range p.existingImports { for _, imp := range p.existingImports {
// We deliberately ignore globals here, because we can't be sure // We deliberately ignore globals here, because we can't be sure
// they're in the same package. People do things like put multiple // they're in the same package. People do things like put multiple
@ -383,77 +364,27 @@ func (p *pass) fix() ([]*importFix, bool) {
// remove imports if they happen to have the same name as a var in // remove imports if they happen to have the same name as a var in
// a different package. // a different package.
if _, ok := p.allRefs[p.importIdentifier(imp)]; !ok { if _, ok := p.allRefs[p.importIdentifier(imp)]; !ok {
fixes = append(fixes, &importFix{ astutil.DeleteNamedImport(p.fset, p.f, imp.name, imp.importPath)
info: *imp,
fixType: deleteImport,
})
continue
}
// An existing import may need to update its import name to be correct.
if name := p.importSpecName(imp); name != imp.name {
fixes = append(fixes, &importFix{
info: importInfo{
name: name,
importPath: imp.importPath,
},
fixType: setImportName,
})
} }
} }
for _, imp := range selected { for _, imp := range selected {
fixes = append(fixes, &importFix{ astutil.AddNamedImport(p.fset, p.f, imp.name, imp.importPath)
info: importInfo{
name: p.importSpecName(imp),
importPath: imp.importPath,
},
fixType: addImport,
})
} }
return fixes, true if p.loadRealPackageNames {
} for _, imp := range p.f.Imports {
if imp.Name != nil {
// importSpecName gets the import name of imp in the import spec. continue
// }
// When the import identifier matches the assumed import name, the import name does path := strings.Trim(imp.Path.Value, `""`)
// not appear in the import spec. ident := p.importIdentifier(&importInfo{importPath: path})
func (p *pass) importSpecName(imp *importInfo) string { if ident != importPathToAssumedName(path) {
// If we did not load the real package names, or the name is already set, imp.Name = &ast.Ident{Name: ident, NamePos: imp.Pos()}
// we just return the existing name. }
if !p.loadRealPackageNames || imp.name != "" { }
return imp.name
} }
ident := p.importIdentifier(imp)
if ident == importPathToAssumedName(imp.importPath) {
return "" // ident not needed since the assumed and real names are the same.
}
return ident
}
// apply will perform the fixes on f in order.
func apply(fset *token.FileSet, f *ast.File, fixes []*importFix) bool {
for _, fix := range fixes {
switch fix.fixType {
case deleteImport:
astutil.DeleteNamedImport(fset, f, fix.info.name, fix.info.importPath)
case addImport:
astutil.AddNamedImport(fset, f, fix.info.name, fix.info.importPath)
case setImportName:
// Find the matching import path and change the name.
for _, spec := range f.Imports {
path := strings.Trim(spec.Path.Value, `"`)
if path == fix.info.importPath {
spec.Name = &ast.Ident{
Name: fix.info.name,
NamePos: spec.Pos(),
}
}
}
}
}
return true return true
} }
@ -506,24 +437,13 @@ func (p *pass) addCandidate(imp *importInfo, pkg *packageInfo) {
var fixImports = fixImportsDefault var fixImports = fixImportsDefault
func fixImportsDefault(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv) error { func fixImportsDefault(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv) error {
fixes, err := getFixes(fset, f, filename, env)
if err != nil {
return err
}
apply(fset, f, fixes)
return err
}
// getFixes gets the import fixes that need to be made to f in order to fix the imports.
// It does not modify the ast.
func getFixes(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv) ([]*importFix, error) {
abs, err := filepath.Abs(filename) abs, err := filepath.Abs(filename)
if err != nil { if err != nil {
return nil, err return err
} }
srcDir := filepath.Dir(abs) srcDir := filepath.Dir(abs)
if env.Debug { if env.Debug {
env.Logf("fixImports(filename=%q), abs=%q, srcDir=%q ...", filename, abs, srcDir) log.Printf("fixImports(filename=%q), abs=%q, srcDir=%q ...", filename, abs, srcDir)
} }
// First pass: looking only at f, and using the naive algorithm to // First pass: looking only at f, and using the naive algorithm to
@ -531,8 +451,8 @@ func getFixes(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv
// complete. We can't add any imports yet, because we don't know // complete. We can't add any imports yet, because we don't know
// if missing references are actually package vars. // if missing references are actually package vars.
p := &pass{fset: fset, f: f, srcDir: srcDir} p := &pass{fset: fset, f: f, srcDir: srcDir}
if fixes, done := p.load(); done { if p.load() {
return fixes, nil return nil
} }
otherFiles := parseOtherFiles(fset, srcDir, filename) otherFiles := parseOtherFiles(fset, srcDir, filename)
@ -540,15 +460,15 @@ func getFixes(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv
// Second pass: add information from other files in the same package, // Second pass: add information from other files in the same package,
// like their package vars and imports. // like their package vars and imports.
p.otherFiles = otherFiles p.otherFiles = otherFiles
if fixes, done := p.load(); done { if p.load() {
return fixes, nil return nil
} }
// Now we can try adding imports from the stdlib. // Now we can try adding imports from the stdlib.
p.assumeSiblingImportsValid() p.assumeSiblingImportsValid()
addStdlibCandidates(p, p.missingRefs) addStdlibCandidates(p, p.missingRefs)
if fixes, done := p.fix(); done { if p.fix() {
return fixes, nil return nil
} }
// Third pass: get real package names where we had previously used // Third pass: get real package names where we had previously used
@ -557,25 +477,25 @@ func getFixes(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv
p = &pass{fset: fset, f: f, srcDir: srcDir, env: env} p = &pass{fset: fset, f: f, srcDir: srcDir, env: env}
p.loadRealPackageNames = true p.loadRealPackageNames = true
p.otherFiles = otherFiles p.otherFiles = otherFiles
if fixes, done := p.load(); done { if p.load() {
return fixes, nil return nil
} }
addStdlibCandidates(p, p.missingRefs) addStdlibCandidates(p, p.missingRefs)
p.assumeSiblingImportsValid() p.assumeSiblingImportsValid()
if fixes, done := p.fix(); done { if p.fix() {
return fixes, nil return nil
} }
// Go look for candidates in $GOPATH, etc. We don't necessarily load // Go look for candidates in $GOPATH, etc. We don't necessarily load
// the real exports of sibling imports, so keep assuming their contents. // the real exports of sibling imports, so keep assuming their contents.
if err := addExternalCandidates(p, p.missingRefs, filename); err != nil { if err := addExternalCandidates(p, p.missingRefs, filename); err != nil {
return nil, err return err
} }
p.lastTry = true p.lastTry = true
fixes, _ := p.fix() p.fix()
return fixes, nil return nil
} }
// ProcessEnv contains environment variables and settings that affect the use of // ProcessEnv contains environment variables and settings that affect the use of
@ -592,10 +512,7 @@ type ProcessEnv struct {
// If true, use go/packages regardless of the environment. // If true, use go/packages regardless of the environment.
ForceGoPackages bool ForceGoPackages bool
// Logf is the default logger for the ProcessEnv. resolver resolver
Logf func(format string, args ...interface{})
resolver Resolver
} }
func (e *ProcessEnv) env() []string { func (e *ProcessEnv) env() []string {
@ -617,7 +534,7 @@ func (e *ProcessEnv) env() []string {
return env return env
} }
func (e *ProcessEnv) GetResolver() Resolver { func (e *ProcessEnv) getResolver() resolver {
if e.resolver != nil { if e.resolver != nil {
return e.resolver return e.resolver
} }
@ -631,7 +548,7 @@ func (e *ProcessEnv) GetResolver() Resolver {
e.resolver = &gopathResolver{env: e} e.resolver = &gopathResolver{env: e}
return e.resolver return e.resolver
} }
e.resolver = &ModuleResolver{env: e} e.resolver = &moduleResolver{env: e}
return e.resolver return e.resolver
} }
@ -660,7 +577,7 @@ func (e *ProcessEnv) invokeGo(args ...string) (*bytes.Buffer, error) {
cmd.Dir = e.WorkingDir cmd.Dir = e.WorkingDir
if e.Debug { if e.Debug {
defer func(start time.Time) { e.Logf("%s for %v", time.Since(start), cmdDebugStr(cmd)) }(time.Now()) defer func(start time.Time) { log.Printf("%s for %v", time.Since(start), cmdDebugStr(cmd)) }(time.Now())
} }
if err := cmd.Run(); err != nil { if err := cmd.Run(); err != nil {
return nil, fmt.Errorf("running go: %v (stderr:\n%s)", err, stderr) return nil, fmt.Errorf("running go: %v (stderr:\n%s)", err, stderr)
@ -700,23 +617,20 @@ func addStdlibCandidates(pass *pass, refs references) {
} }
} }
// A Resolver does the build-system-specific parts of goimports. // A resolver does the build-system-specific parts of goimports.
type Resolver interface { type resolver interface {
// loadPackageNames loads the package names in importPaths. // loadPackageNames loads the package names in importPaths.
loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error)
// scan finds (at least) the packages satisfying refs. The returned slice is unordered. // scan finds (at least) the packages satisfying refs. The returned slice is unordered.
scan(refs references) ([]*pkg, error) scan(refs references) ([]*pkg, error)
} }
// gopackagesResolver implements resolver for GOPATH and module workspaces using go/packages. // gopathResolver implements resolver for GOPATH and module workspaces using go/packages.
type goPackagesResolver struct { type goPackagesResolver struct {
env *ProcessEnv env *ProcessEnv
} }
func (r *goPackagesResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) { func (r *goPackagesResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) {
if len(importPaths) == 0 {
return nil, nil
}
cfg := r.env.newPackagesConfig(packages.LoadFiles) cfg := r.env.newPackagesConfig(packages.LoadFiles)
pkgs, err := packages.Load(cfg, importPaths...) pkgs, err := packages.Load(cfg, importPaths...)
if err != nil { if err != nil {
@ -761,7 +675,7 @@ func (r *goPackagesResolver) scan(refs references) ([]*pkg, error) {
} }
func addExternalCandidates(pass *pass, refs references, filename string) error { func addExternalCandidates(pass *pass, refs references, filename string) error {
dirScan, err := pass.env.GetResolver().scan(refs) dirScan, err := pass.env.getResolver().scan(refs)
if err != nil { if err != nil {
return err return err
} }
@ -870,7 +784,7 @@ func (r *gopathResolver) loadPackageNames(importPaths []string, srcDir string) (
return names, nil return names, nil
} }
// importPathToName finds out the actual package name, as declared in its .go files. // importPathToNameGoPath finds out the actual package name, as declared in its .go files.
// If there's a problem, it returns "". // If there's a problem, it returns "".
func importPathToName(env *ProcessEnv, importPath, srcDir string) (packageName string) { func importPathToName(env *ProcessEnv, importPath, srcDir string) (packageName string) {
// Fast path for standard library without going to disk. // Fast path for standard library without going to disk.
@ -890,8 +804,8 @@ func importPathToName(env *ProcessEnv, importPath, srcDir string) (packageName s
} }
// packageDirToName is a faster version of build.Import if // packageDirToName is a faster version of build.Import if
// the only thing desired is the package name. Given a directory, // the only thing desired is the package name. It uses build.FindOnly
// packageDirToName then only parses one file in the package, // to find the directory and then only parses one file in the package,
// trusting that the files in the directory are consistent. // trusting that the files in the directory are consistent.
func packageDirToName(dir string) (packageName string, err error) { func packageDirToName(dir string) (packageName string, err error) {
d, err := os.Open(dir) d, err := os.Open(dir)
@ -1029,7 +943,7 @@ func VendorlessPath(ipath string) string {
// It returns nil on error or if the package name in dir does not match expectPackage. // It returns nil on error or if the package name in dir does not match expectPackage.
func loadExports(ctx context.Context, env *ProcessEnv, expectPackage string, pkg *pkg) (map[string]bool, error) { func loadExports(ctx context.Context, env *ProcessEnv, expectPackage string, pkg *pkg) (map[string]bool, error) {
if env.Debug { if env.Debug {
env.Logf("loading exports in dir %s (seeking package %s)", pkg.dir, expectPackage) log.Printf("loading exports in dir %s (seeking package %s)", pkg.dir, expectPackage)
} }
if pkg.goPackage != nil { if pkg.goPackage != nil {
exports := map[string]bool{} exports := map[string]bool{}
@ -1107,7 +1021,7 @@ func loadExports(ctx context.Context, env *ProcessEnv, expectPackage string, pkg
exportList = append(exportList, k) exportList = append(exportList, k)
} }
sort.Strings(exportList) sort.Strings(exportList)
env.Logf("loaded exports in dir %v (package %v): %v", pkg.dir, expectPackage, strings.Join(exportList, ", ")) log.Printf("loaded exports in dir %v (package %v): %v", pkg.dir, expectPackage, strings.Join(exportList, ", "))
} }
return exports, nil return exports, nil
} }
@ -1144,7 +1058,7 @@ func findImport(ctx context.Context, pass *pass, dirScan []*pkg, pkgName string,
sort.Sort(byDistanceOrImportPathShortLength(candidates)) sort.Sort(byDistanceOrImportPathShortLength(candidates))
if pass.env.Debug { if pass.env.Debug {
for i, c := range candidates { for i, c := range candidates {
pass.env.Logf("%s candidate %d/%d: %v in %v", pkgName, i+1, len(candidates), c.pkg.importPathShort, c.pkg.dir) log.Printf("%s candidate %d/%d: %v in %v", pkgName, i+1, len(candidates), c.pkg.importPathShort, c.pkg.dir)
} }
} }
@ -1184,7 +1098,7 @@ func findImport(ctx context.Context, pass *pass, dirScan []*pkg, pkgName string,
exports, err := loadExports(ctx, pass.env, pkgName, c.pkg) exports, err := loadExports(ctx, pass.env, pkgName, c.pkg)
if err != nil { if err != nil {
if pass.env.Debug { if pass.env.Debug {
pass.env.Logf("loading exports in dir %s (seeking package %s): %v", c.pkg.dir, pkgName, err) log.Printf("loading exports in dir %s (seeking package %s): %v", c.pkg.dir, pkgName, err)
} }
resc <- nil resc <- nil
return return

View File

@ -1855,7 +1855,7 @@ func TestImportPathToNameGoPathParse(t *testing.T) {
if strings.Contains(t.Name(), "GoPackages") { if strings.Contains(t.Name(), "GoPackages") {
t.Skip("go/packages does not ignore package main") t.Skip("go/packages does not ignore package main")
} }
r := t.env.GetResolver() r := t.env.getResolver()
srcDir := filepath.Dir(t.exported.File("example.net/pkg", "z.go")) srcDir := filepath.Dir(t.exported.File("example.net/pkg", "z.go"))
names, err := r.loadPackageNames([]string{"example.net/pkg"}, srcDir) names, err := r.loadPackageNames([]string{"example.net/pkg"}, srcDir)
if err != nil { if err != nil {

View File

@ -19,7 +19,6 @@ import (
"go/token" "go/token"
"io" "io"
"io/ioutil" "io/ioutil"
"log"
"regexp" "regexp"
"strconv" "strconv"
"strings" "strings"
@ -51,11 +50,6 @@ func Process(filename string, src []byte, opt *Options) ([]byte, error) {
src = b src = b
} }
// Set the logger if the user has not provided it.
if opt.Env.Logf == nil {
opt.Env.Logf = log.Printf
}
fileSet := token.NewFileSet() fileSet := token.NewFileSet()
file, adjust, err := parse(fileSet, filename, src, opt) file, adjust, err := parse(fileSet, filename, src, opt)
if err != nil { if err != nil {

View File

@ -4,6 +4,7 @@ import (
"bytes" "bytes"
"encoding/json" "encoding/json"
"io/ioutil" "io/ioutil"
"log"
"os" "os"
"path" "path"
"path/filepath" "path/filepath"
@ -18,39 +19,37 @@ import (
"golang.org/x/tools/internal/module" "golang.org/x/tools/internal/module"
) )
// ModuleResolver implements resolver for modules using the go command as little // moduleResolver implements resolver for modules using the go command as little
// as feasible. // as feasible.
type ModuleResolver struct { type moduleResolver struct {
env *ProcessEnv env *ProcessEnv
Initialized bool initialized bool
Main *ModuleJSON main *moduleJSON
ModsByModPath []*ModuleJSON // All modules, ordered by # of path components in module Path... modsByModPath []*moduleJSON // All modules, ordered by # of path components in module Path...
ModsByDir []*ModuleJSON // ...or Dir. modsByDir []*moduleJSON // ...or Dir.
ModCachePkgs map[string]*pkg // Packages in the mod cache, keyed by absolute directory.
} }
type ModuleJSON struct { type moduleJSON struct {
Path string // module path Path string // module path
Version string // module version Version string // module version
Versions []string // available module versions (with -versions) Versions []string // available module versions (with -versions)
Replace *ModuleJSON // replaced by this module Replace *moduleJSON // replaced by this module
Time *time.Time // time version was created Time *time.Time // time version was created
Update *ModuleJSON // available update, if any (with -u) Update *moduleJSON // available update, if any (with -u)
Main bool // is this the main module? Main bool // is this the main module?
Indirect bool // is this module only an indirect dependency of main module? Indirect bool // is this module only an indirect dependency of main module?
Dir string // directory holding files for this module, if any Dir string // directory holding files for this module, if any
GoMod string // path to go.mod file for this module, if any GoMod string // path to go.mod file for this module, if any
Error *ModuleErrorJSON // error loading module Error *moduleErrorJSON // error loading module
} }
type ModuleErrorJSON struct { type moduleErrorJSON struct {
Err string // the error itself Err string // the error itself
} }
func (r *ModuleResolver) init() error { func (r *moduleResolver) init() error {
if r.Initialized { if r.initialized {
return nil return nil
} }
stdout, err := r.env.invokeGo("list", "-m", "-json", "...") stdout, err := r.env.invokeGo("list", "-m", "-json", "...")
@ -58,47 +57,45 @@ func (r *ModuleResolver) init() error {
return err return err
} }
for dec := json.NewDecoder(stdout); dec.More(); { for dec := json.NewDecoder(stdout); dec.More(); {
mod := &ModuleJSON{} mod := &moduleJSON{}
if err := dec.Decode(mod); err != nil { if err := dec.Decode(mod); err != nil {
return err return err
} }
if mod.Dir == "" { if mod.Dir == "" {
if r.env.Debug { if r.env.Debug {
r.env.Logf("module %v has not been downloaded and will be ignored", mod.Path) log.Printf("module %v has not been downloaded and will be ignored", mod.Path)
} }
// Can't do anything with a module that's not downloaded. // Can't do anything with a module that's not downloaded.
continue continue
} }
r.ModsByModPath = append(r.ModsByModPath, mod) r.modsByModPath = append(r.modsByModPath, mod)
r.ModsByDir = append(r.ModsByDir, mod) r.modsByDir = append(r.modsByDir, mod)
if mod.Main { if mod.Main {
r.Main = mod r.main = mod
} }
} }
sort.Slice(r.ModsByModPath, func(i, j int) bool { sort.Slice(r.modsByModPath, func(i, j int) bool {
count := func(x int) int { count := func(x int) int {
return strings.Count(r.ModsByModPath[x].Path, "/") return strings.Count(r.modsByModPath[x].Path, "/")
} }
return count(j) < count(i) // descending order return count(j) < count(i) // descending order
}) })
sort.Slice(r.ModsByDir, func(i, j int) bool { sort.Slice(r.modsByDir, func(i, j int) bool {
count := func(x int) int { count := func(x int) int {
return strings.Count(r.ModsByDir[x].Dir, "/") return strings.Count(r.modsByDir[x].Dir, "/")
} }
return count(j) < count(i) // descending order return count(j) < count(i) // descending order
}) })
r.ModCachePkgs = make(map[string]*pkg) r.initialized = true
r.Initialized = true
return nil return nil
} }
// findPackage returns the module and directory that contains the package at // findPackage returns the module and directory that contains the package at
// the given import path, or returns nil, "" if no module is in scope. // the given import path, or returns nil, "" if no module is in scope.
func (r *ModuleResolver) findPackage(importPath string) (*ModuleJSON, string) { func (r *moduleResolver) findPackage(importPath string) (*moduleJSON, string) {
for _, m := range r.ModsByModPath { for _, m := range r.modsByModPath {
if !strings.HasPrefix(importPath, m.Path) { if !strings.HasPrefix(importPath, m.Path) {
continue continue
} }
@ -127,7 +124,7 @@ func (r *ModuleResolver) findPackage(importPath string) (*ModuleJSON, string) {
// findModuleByDir returns the module that contains dir, or nil if no such // findModuleByDir returns the module that contains dir, or nil if no such
// module is in scope. // module is in scope.
func (r *ModuleResolver) findModuleByDir(dir string) *ModuleJSON { func (r *moduleResolver) findModuleByDir(dir string) *moduleJSON {
// This is quite tricky and may not be correct. dir could be: // This is quite tricky and may not be correct. dir could be:
// - a package in the main module. // - a package in the main module.
// - a replace target underneath the main module's directory. // - a replace target underneath the main module's directory.
@ -138,7 +135,7 @@ func (r *ModuleResolver) findModuleByDir(dir string) *ModuleJSON {
// - in /vendor/ in -mod=vendor mode. // - in /vendor/ in -mod=vendor mode.
// - nested module? Dunno. // - nested module? Dunno.
// Rumor has it that replace targets cannot contain other replace targets. // Rumor has it that replace targets cannot contain other replace targets.
for _, m := range r.ModsByDir { for _, m := range r.modsByDir {
if !strings.HasPrefix(dir, m.Dir) { if !strings.HasPrefix(dir, m.Dir) {
continue continue
} }
@ -154,7 +151,7 @@ func (r *ModuleResolver) findModuleByDir(dir string) *ModuleJSON {
// dirIsNestedModule reports if dir is contained in a nested module underneath // dirIsNestedModule reports if dir is contained in a nested module underneath
// mod, not actually in mod. // mod, not actually in mod.
func dirIsNestedModule(dir string, mod *ModuleJSON) bool { func dirIsNestedModule(dir string, mod *moduleJSON) bool {
if !strings.HasPrefix(dir, mod.Dir) { if !strings.HasPrefix(dir, mod.Dir) {
return false return false
} }
@ -180,7 +177,7 @@ func findModFile(dir string) string {
} }
} }
func (r *ModuleResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) { func (r *moduleResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) {
if err := r.init(); err != nil { if err := r.init(); err != nil {
return nil, err return nil, err
} }
@ -199,7 +196,7 @@ func (r *ModuleResolver) loadPackageNames(importPaths []string, srcDir string) (
return names, nil return names, nil
} }
func (r *ModuleResolver) scan(_ references) ([]*pkg, error) { func (r *moduleResolver) scan(_ references) ([]*pkg, error) {
if err := r.init(); err != nil { if err := r.init(); err != nil {
return nil, err return nil, err
} }
@ -208,15 +205,15 @@ func (r *ModuleResolver) scan(_ references) ([]*pkg, error) {
roots := []gopathwalk.Root{ roots := []gopathwalk.Root{
{filepath.Join(r.env.GOROOT, "/src"), gopathwalk.RootGOROOT}, {filepath.Join(r.env.GOROOT, "/src"), gopathwalk.RootGOROOT},
} }
if r.Main != nil { if r.main != nil {
roots = append(roots, gopathwalk.Root{r.Main.Dir, gopathwalk.RootCurrentModule}) roots = append(roots, gopathwalk.Root{r.main.Dir, gopathwalk.RootCurrentModule})
} }
for _, p := range filepath.SplitList(r.env.GOPATH) { for _, p := range filepath.SplitList(r.env.GOPATH) {
roots = append(roots, gopathwalk.Root{filepath.Join(p, "/pkg/mod"), gopathwalk.RootModuleCache}) roots = append(roots, gopathwalk.Root{filepath.Join(p, "/pkg/mod"), gopathwalk.RootModuleCache})
} }
// Walk replace targets, just in case they're not in any of the above. // Walk replace targets, just in case they're not in any of the above.
for _, mod := range r.ModsByModPath { for _, mod := range r.modsByModPath {
if mod.Replace != nil { if mod.Replace != nil {
roots = append(roots, gopathwalk.Root{mod.Dir, gopathwalk.RootOther}) roots = append(roots, gopathwalk.Root{mod.Dir, gopathwalk.RootOther})
} }
@ -236,15 +233,6 @@ func (r *ModuleResolver) scan(_ references) ([]*pkg, error) {
dupCheck[dir] = true dupCheck[dir] = true
absDir := dir
// Packages in the module cache are immutable. If we have
// already seen this package on a previous scan of the module
// cache, return that result.
if p, ok := r.ModCachePkgs[absDir]; ok {
result = append(result, p)
return
}
subdir := "" subdir := ""
if dir != root.Path { if dir != root.Path {
subdir = dir[len(root.Path)+len("/"):] subdir = dir[len(root.Path)+len("/"):]
@ -260,13 +248,13 @@ func (r *ModuleResolver) scan(_ references) ([]*pkg, error) {
} }
switch root.Type { switch root.Type {
case gopathwalk.RootCurrentModule: case gopathwalk.RootCurrentModule:
importPath = path.Join(r.Main.Path, filepath.ToSlash(subdir)) importPath = path.Join(r.main.Path, filepath.ToSlash(subdir))
case gopathwalk.RootModuleCache: case gopathwalk.RootModuleCache:
matches := modCacheRegexp.FindStringSubmatch(subdir) matches := modCacheRegexp.FindStringSubmatch(subdir)
modPath, err := module.DecodePath(filepath.ToSlash(matches[1])) modPath, err := module.DecodePath(filepath.ToSlash(matches[1]))
if err != nil { if err != nil {
if r.env.Debug { if r.env.Debug {
r.env.Logf("decoding module cache path %q: %v", subdir, err) log.Printf("decoding module cache path %q: %v", subdir, err)
} }
return return
} }
@ -311,18 +299,10 @@ func (r *ModuleResolver) scan(_ references) ([]*pkg, error) {
dir = canonicalDir dir = canonicalDir
} }
res := &pkg{ result = append(result, &pkg{
importPathShort: VendorlessPath(importPath), importPathShort: VendorlessPath(importPath),
dir: dir, dir: dir,
} })
switch root.Type {
case gopathwalk.RootModuleCache:
// Save the results of processing this directory.
r.ModCachePkgs[absDir] = res
}
result = append(result, res)
}, gopathwalk.Options{Debug: r.env.Debug, ModulesEnabled: true}) }, gopathwalk.Options{Debug: r.env.Debug, ModulesEnabled: true})
return result, nil return result, nil
} }

View File

@ -118,25 +118,6 @@ import _ "example.com"
mt.assertFound("example.com", "x") mt.assertFound("example.com", "x")
} }
// Tests that scanning the module cache > 1 time is able to find the same module.
func TestModMultipleScans(t *testing.T) {
mt := setup(t, `
-- go.mod --
module x
require example.com v1.0.0
-- x.go --
package x
import _ "example.com"
`, "")
defer mt.cleanup()
mt.assertScanFinds("example.com", "x")
mt.assertScanFinds("example.com", "x")
}
// Tests that -mod=vendor sort of works. Adapted from mod_getmode_vendor.txt. // Tests that -mod=vendor sort of works. Adapted from mod_getmode_vendor.txt.
func TestModeGetmodeVendor(t *testing.T) { func TestModeGetmodeVendor(t *testing.T) {
mt := setup(t, ` mt := setup(t, `
@ -159,7 +140,7 @@ import _ "rsc.io/quote"
mt.env.GOFLAGS = "" mt.env.GOFLAGS = ""
// Clear out the resolver's cache, since we've changed the environment. // Clear out the resolver's cache, since we've changed the environment.
mt.resolver = &ModuleResolver{env: mt.env} mt.resolver = &moduleResolver{env: mt.env}
mt.assertModuleFoundInDir("rsc.io/quote", "quote", `pkg.*mod.*/quote@.*$`) mt.assertModuleFoundInDir("rsc.io/quote", "quote", `pkg.*mod.*/quote@.*$`)
} }
@ -505,7 +486,7 @@ var proxyDir string
type modTest struct { type modTest struct {
*testing.T *testing.T
env *ProcessEnv env *ProcessEnv
resolver *ModuleResolver resolver *moduleResolver
cleanup func() cleanup func()
} }
@ -557,7 +538,7 @@ func setup(t *testing.T, main, wd string) *modTest {
return &modTest{ return &modTest{
T: t, T: t,
env: env, env: env,
resolver: &ModuleResolver{env: env}, resolver: &moduleResolver{env: env},
cleanup: func() { cleanup: func() {
_ = filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { _ = filepath.Walk(dir, func(path string, info os.FileInfo, err error) error {
if err != nil { if err != nil {

View File

@ -1,124 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package jsonrpc2
import (
"context"
)
// Handler is the interface used to hook into the mesage handling of an rpc
// connection.
type Handler interface {
// Deliver is invoked to handle incoming requests.
// If the request returns false from IsNotify then the Handler must eventually
// call Reply on the Conn with the supplied request.
// Handlers are called synchronously, they should pass the work off to a go
// routine if they are going to take a long time.
// If Deliver returns true all subsequent handlers will be invoked with
// delivered set to true, and should not attempt to deliver the message.
Deliver(ctx context.Context, r *Request, delivered bool) bool
// Cancel is invoked for cancelled outgoing requests.
// It is okay to use the connection to send notifications, but the context will
// be in the cancelled state, so you must do it with the background context
// instead.
// If Cancel returns true all subsequent handlers will be invoked with
// cancelled set to true, and should not attempt to cancel the message.
Cancel(ctx context.Context, conn *Conn, id ID, cancelled bool) bool
// Log is invoked for all messages flowing through a Conn.
// direction indicates if the message being received or sent
// id is the message id, if not set it was a notification
// elapsed is the time between a call being seen and the response, and is
// negative for anything that is not a response.
// method is the method name specified in the message
// payload is the parameters for a call or notification, and the result for a
// response
// Request is called near the start of processing any request.
Request(ctx context.Context, direction Direction, r *WireRequest) context.Context
// Response is called near the start of processing any response.
Response(ctx context.Context, direction Direction, r *WireResponse) context.Context
// Done is called when any request is fully processed.
// For calls, this means the response has also been processed, for notifies
// this is as soon as the message has been written to the stream.
// If err is set, it implies the request failed.
Done(ctx context.Context, err error)
// Read is called with a count each time some data is read from the stream.
// The read calls are delayed until after the data has been interpreted so
// that it can be attributed to a request/response.
Read(ctx context.Context, bytes int64) context.Context
// Wrote is called each time some data is written to the stream.
Wrote(ctx context.Context, bytes int64) context.Context
// Error is called with errors that cannot be delivered through the normal
// mechanisms, for instance a failure to process a notify cannot be delivered
// back to the other party.
Error(ctx context.Context, err error)
}
// Direction is used to indicate to a logger whether the logged message was being
// sent or received.
type Direction bool
const (
// Send indicates the message is outgoing.
Send = Direction(true)
// Receive indicates the message is incoming.
Receive = Direction(false)
)
func (d Direction) String() string {
switch d {
case Send:
return "send"
case Receive:
return "receive"
default:
panic("unreachable")
}
}
type EmptyHandler struct{}
func (EmptyHandler) Deliver(ctx context.Context, r *Request, delivered bool) bool {
return false
}
func (EmptyHandler) Cancel(ctx context.Context, conn *Conn, id ID, cancelled bool) bool {
return false
}
func (EmptyHandler) Request(ctx context.Context, direction Direction, r *WireRequest) context.Context {
return ctx
}
func (EmptyHandler) Response(ctx context.Context, direction Direction, r *WireResponse) context.Context {
return ctx
}
func (EmptyHandler) Done(ctx context.Context, err error) {
}
func (EmptyHandler) Read(ctx context.Context, bytes int64) context.Context {
return ctx
}
func (EmptyHandler) Wrote(ctx context.Context, bytes int64) context.Context {
return ctx
}
func (EmptyHandler) Error(ctx context.Context, err error) {}
type defaultHandler struct{ EmptyHandler }
func (defaultHandler) Deliver(ctx context.Context, r *Request, delivered bool) bool {
if delivered {
return false
}
if !r.IsNotify() {
r.Reply(ctx, nil, NewErrorf(CodeMethodNotFound, "method %q not found", r.Method))
}
return true
}

View File

@ -13,17 +13,27 @@ import (
"fmt" "fmt"
"sync" "sync"
"sync/atomic" "sync/atomic"
"time"
"golang.org/x/tools/internal/lsp/telemetry"
"golang.org/x/tools/internal/lsp/telemetry/stats"
"golang.org/x/tools/internal/lsp/telemetry/tag"
"golang.org/x/tools/internal/lsp/telemetry/trace"
) )
// Conn is a JSON RPC 2 client server connection. // Conn is a JSON RPC 2 client server connection.
// Conn is bidirectional; it does not have a designated server or client end. // Conn is bidirectional; it does not have a designated server or client end.
type Conn struct { type Conn struct {
seq int64 // must only be accessed using atomic operations seq int64 // must only be accessed using atomic operations
handlers []Handler Handler Handler
Canceler Canceler
Logger Logger
Capacity int
RejectIfOverloaded bool
stream Stream stream Stream
err error err error
pendingMu sync.Mutex // protects the pending map pendingMu sync.Mutex // protects the pending map
pending map[ID]chan *WireResponse pending map[ID]chan *wireResponse
handlingMu sync.Mutex // protects the handling map handlingMu sync.Mutex // protects the handling map
handling map[ID]*Request handling map[ID]*Request
} }
@ -42,11 +52,91 @@ const (
type Request struct { type Request struct {
conn *Conn conn *Conn
cancel context.CancelFunc cancel context.CancelFunc
start time.Time
state requestState state requestState
nextRequest chan struct{} nextRequest chan struct{}
// The Wire values of the request. // Method is a string containing the method name to invoke.
WireRequest Method string
// Params is either a struct or an array with the parameters of the method.
Params *json.RawMessage
// The id of this request, used to tie the response back to the request.
// Will be either a string or a number. If not set, the request is a notify,
// and no response is possible.
ID *ID
}
// Handler is an option you can pass to NewConn to handle incoming requests.
// If the request returns false from IsNotify then the Handler must eventually
// call Reply on the Conn with the supplied request.
// Handlers are called synchronously, they should pass the work off to a go
// routine if they are going to take a long time.
type Handler func(context.Context, *Request)
// Canceler is an option you can pass to NewConn which is invoked for
// cancelled outgoing requests.
// It is okay to use the connection to send notifications, but the context will
// be in the cancelled state, so you must do it with the background context
// instead.
type Canceler func(context.Context, *Conn, ID)
type rpcStats struct {
server bool
method string
span trace.Span
start time.Time
received int64
sent int64
}
type statsKeyType string
const rpcStatsKey = statsKeyType("rpcStatsKey")
func start(ctx context.Context, server bool, method string, id *ID) (context.Context, *rpcStats) {
if method == "" {
panic("no method in rpc stats")
}
s := &rpcStats{
server: server,
method: method,
start: time.Now(),
}
ctx = context.WithValue(ctx, rpcStatsKey, s)
tags := make([]tag.Mutator, 0, 4)
tags = append(tags, tag.Upsert(telemetry.KeyMethod, method))
mode := telemetry.Outbound
spanKind := trace.SpanKindClient
if server {
spanKind = trace.SpanKindServer
mode = telemetry.Inbound
}
tags = append(tags, tag.Upsert(telemetry.KeyRPCDirection, mode))
if id != nil {
tags = append(tags, tag.Upsert(telemetry.KeyRPCID, id.String()))
}
ctx, s.span = trace.StartSpan(ctx, method, trace.WithSpanKind(spanKind))
ctx, _ = tag.New(ctx, tags...)
stats.Record(ctx, telemetry.Started.M(1))
return ctx, s
}
func (s *rpcStats) end(ctx context.Context, err *error) {
if err != nil && *err != nil {
ctx, _ = tag.New(ctx, tag.Upsert(telemetry.KeyStatus, "ERROR"))
} else {
ctx, _ = tag.New(ctx, tag.Upsert(telemetry.KeyStatus, "OK"))
}
elapsedTime := time.Since(s.start)
latencyMillis := float64(elapsedTime) / float64(time.Millisecond)
stats.Record(ctx,
telemetry.ReceivedBytes.M(s.received),
telemetry.SentBytes.M(s.sent),
telemetry.Latency.M(latencyMillis),
)
s.span.End()
} }
// NewErrorf builds a Error struct for the suppied message and code. // NewErrorf builds a Error struct for the suppied message and code.
@ -62,23 +152,23 @@ func NewErrorf(code int64, format string, args ...interface{}) *Error {
// You must call Run for the connection to be active. // You must call Run for the connection to be active.
func NewConn(s Stream) *Conn { func NewConn(s Stream) *Conn {
conn := &Conn{ conn := &Conn{
handlers: []Handler{defaultHandler{}},
stream: s, stream: s,
pending: make(map[ID]chan *WireResponse), pending: make(map[ID]chan *wireResponse),
handling: make(map[ID]*Request), handling: make(map[ID]*Request),
} }
// the default handler reports a method error
conn.Handler = func(ctx context.Context, r *Request) {
if !r.IsNotify() {
r.Reply(ctx, nil, NewErrorf(CodeMethodNotFound, "method %q not found", r.Method))
}
}
// the default canceler does nothing
conn.Canceler = func(context.Context, *Conn, ID) {}
// the default logger does nothing
conn.Logger = func(Direction, *ID, time.Duration, string, *json.RawMessage, *Error) {}
return conn return conn
} }
// AddHandler adds a new handler to the set the connection will invoke.
// Handlers are invoked in the reverse order of how they were added, this
// allows the most recent addition to be the first one to attempt to handle a
// message.
func (c *Conn) AddHandler(handler Handler) {
// prepend the new handlers so we use them first
c.handlers = append([]Handler{handler}, c.handlers...)
}
// Cancel cancels a pending Call on the server side. // Cancel cancels a pending Call on the server side.
// The call is identified by its id. // The call is identified by its id.
// JSON RPC 2 does not specify a cancel message, so cancellation support is not // JSON RPC 2 does not specify a cancel message, so cancellation support is not
@ -97,11 +187,14 @@ func (c *Conn) Cancel(id ID) {
// It will return as soon as the notification has been sent, as no response is // It will return as soon as the notification has been sent, as no response is
// possible. // possible.
func (c *Conn) Notify(ctx context.Context, method string, params interface{}) (err error) { func (c *Conn) Notify(ctx context.Context, method string, params interface{}) (err error) {
ctx, rpcStats := start(ctx, false, method, nil)
defer rpcStats.end(ctx, &err)
jsonParams, err := marshalToRaw(params) jsonParams, err := marshalToRaw(params)
if err != nil { if err != nil {
return fmt.Errorf("marshalling notify parameters: %v", err) return fmt.Errorf("marshalling notify parameters: %v", err)
} }
request := &WireRequest{ request := &wireRequest{
Method: method, Method: method,
Params: jsonParams, Params: jsonParams,
} }
@ -109,18 +202,9 @@ func (c *Conn) Notify(ctx context.Context, method string, params interface{}) (e
if err != nil { if err != nil {
return fmt.Errorf("marshalling notify request: %v", err) return fmt.Errorf("marshalling notify request: %v", err)
} }
for _, h := range c.handlers { c.Logger(Send, nil, -1, request.Method, request.Params, nil)
ctx = h.Request(ctx, Send, request)
}
defer func() {
for _, h := range c.handlers {
h.Done(ctx, err)
}
}()
n, err := c.stream.Write(ctx, data) n, err := c.stream.Write(ctx, data)
for _, h := range c.handlers { rpcStats.sent += n
ctx = h.Wrote(ctx, n)
}
return err return err
} }
@ -130,11 +214,13 @@ func (c *Conn) Notify(ctx context.Context, method string, params interface{}) (e
func (c *Conn) Call(ctx context.Context, method string, params, result interface{}) (err error) { func (c *Conn) Call(ctx context.Context, method string, params, result interface{}) (err error) {
// generate a new request identifier // generate a new request identifier
id := ID{Number: atomic.AddInt64(&c.seq, 1)} id := ID{Number: atomic.AddInt64(&c.seq, 1)}
ctx, rpcStats := start(ctx, false, method, &id)
defer rpcStats.end(ctx, &err)
jsonParams, err := marshalToRaw(params) jsonParams, err := marshalToRaw(params)
if err != nil { if err != nil {
return fmt.Errorf("marshalling call parameters: %v", err) return fmt.Errorf("marshalling call parameters: %v", err)
} }
request := &WireRequest{ request := &wireRequest{
ID: &id, ID: &id,
Method: method, Method: method,
Params: jsonParams, Params: jsonParams,
@ -144,12 +230,9 @@ func (c *Conn) Call(ctx context.Context, method string, params, result interface
if err != nil { if err != nil {
return fmt.Errorf("marshalling call request: %v", err) return fmt.Errorf("marshalling call request: %v", err)
} }
for _, h := range c.handlers {
ctx = h.Request(ctx, Send, request)
}
// we have to add ourselves to the pending map before we send, otherwise we // we have to add ourselves to the pending map before we send, otherwise we
// are racing the response // are racing the response
rchan := make(chan *WireResponse) rchan := make(chan *wireResponse)
c.pendingMu.Lock() c.pendingMu.Lock()
c.pending[id] = rchan c.pending[id] = rchan
c.pendingMu.Unlock() c.pendingMu.Unlock()
@ -158,15 +241,12 @@ func (c *Conn) Call(ctx context.Context, method string, params, result interface
c.pendingMu.Lock() c.pendingMu.Lock()
delete(c.pending, id) delete(c.pending, id)
c.pendingMu.Unlock() c.pendingMu.Unlock()
for _, h := range c.handlers {
h.Done(ctx, err)
}
}() }()
// now we are ready to send // now we are ready to send
before := time.Now()
c.Logger(Send, request.ID, -1, request.Method, request.Params, nil)
n, err := c.stream.Write(ctx, data) n, err := c.stream.Write(ctx, data)
for _, h := range c.handlers { rpcStats.sent += n
ctx = h.Wrote(ctx, n)
}
if err != nil { if err != nil {
// sending failed, we will never get a response, so don't leave it pending // sending failed, we will never get a response, so don't leave it pending
return err return err
@ -174,9 +254,8 @@ func (c *Conn) Call(ctx context.Context, method string, params, result interface
// now wait for the response // now wait for the response
select { select {
case response := <-rchan: case response := <-rchan:
for _, h := range c.handlers { elapsed := time.Since(before)
ctx = h.Response(ctx, Receive, response) c.Logger(Receive, response.ID, elapsed, request.Method, response.Result, response.Error)
}
// is it an error response? // is it an error response?
if response.Error != nil { if response.Error != nil {
return response.Error return response.Error
@ -190,12 +269,7 @@ func (c *Conn) Call(ctx context.Context, method string, params, result interface
return nil return nil
case <-ctx.Done(): case <-ctx.Done():
// allow the handler to propagate the cancel // allow the handler to propagate the cancel
cancelled := false c.Canceler(ctx, c, id)
for _, h := range c.handlers {
if h.Cancel(ctx, c, id, cancelled) {
cancelled = true
}
}
return ctx.Err() return ctx.Err()
} }
} }
@ -235,6 +309,9 @@ func (r *Request) Reply(ctx context.Context, result interface{}, err error) erro
if r.IsNotify() { if r.IsNotify() {
return fmt.Errorf("reply not invoked with a valid call") return fmt.Errorf("reply not invoked with a valid call")
} }
ctx, st := trace.StartSpan(ctx, r.Method+":reply", trace.WithSpanKind(trace.SpanKindClient))
defer st.End()
// reply ends the handling phase of a call, so if we are not yet // reply ends the handling phase of a call, so if we are not yet
// parallel we should be now. The go routine is allowed to continue // parallel we should be now. The go routine is allowed to continue
// to do work after replying, which is why it is important to unlock // to do work after replying, which is why it is important to unlock
@ -242,11 +319,12 @@ func (r *Request) Reply(ctx context.Context, result interface{}, err error) erro
r.Parallel() r.Parallel()
r.state = requestReplied r.state = requestReplied
elapsed := time.Since(r.start)
var raw *json.RawMessage var raw *json.RawMessage
if err == nil { if err == nil {
raw, err = marshalToRaw(result) raw, err = marshalToRaw(result)
} }
response := &WireResponse{ response := &wireResponse{
Result: raw, Result: raw,
ID: r.ID, ID: r.ID,
} }
@ -261,12 +339,14 @@ func (r *Request) Reply(ctx context.Context, result interface{}, err error) erro
if err != nil { if err != nil {
return err return err
} }
for _, h := range r.conn.handlers { r.conn.Logger(Send, response.ID, elapsed, r.Method, response.Result, response.Error)
ctx = h.Response(ctx, Send, response)
}
n, err := r.conn.stream.Write(ctx, data) n, err := r.conn.stream.Write(ctx, data)
for _, h := range r.conn.handlers {
ctx = h.Wrote(ctx, n) v := ctx.Value(rpcStatsKey)
if v != nil {
v.(*rpcStats).sent += n
} else {
panic("no stats available in reply")
} }
if err != nil { if err != nil {
@ -305,7 +385,7 @@ type combined struct {
// caused the termination. // caused the termination.
// It must be called exactly once for each Conn. // It must be called exactly once for each Conn.
// It returns only when the reader is closed or there is an error in the stream. // It returns only when the reader is closed or there is an error in the stream.
func (c *Conn) Run(runCtx context.Context) error { func (c *Conn) Run(ctx context.Context) error {
// we need to make the next request "lock" in an unlocked state to allow // we need to make the next request "lock" in an unlocked state to allow
// the first incoming request to proceed. All later requests are unlocked // the first incoming request to proceed. All later requests are unlocked
// by the preceding request going to parallel mode. // by the preceding request going to parallel mode.
@ -313,7 +393,7 @@ func (c *Conn) Run(runCtx context.Context) error {
close(nextRequest) close(nextRequest)
for { for {
// get the data for a message // get the data for a message
data, n, err := c.stream.Read(runCtx) data, n, err := c.stream.Read(ctx)
if err != nil { if err != nil {
// the stream failed, we cannot continue // the stream failed, we cannot continue
return err return err
@ -323,32 +403,26 @@ func (c *Conn) Run(runCtx context.Context) error {
if err := json.Unmarshal(data, msg); err != nil { if err := json.Unmarshal(data, msg); err != nil {
// a badly formed message arrived, log it and continue // a badly formed message arrived, log it and continue
// we trust the stream to have isolated the error to just this message // we trust the stream to have isolated the error to just this message
for _, h := range c.handlers { c.Logger(Receive, nil, -1, "", nil, NewErrorf(0, "unmarshal failed: %v", err))
h.Error(runCtx, fmt.Errorf("unmarshal failed: %v", err))
}
continue continue
} }
// work out which kind of message we have // work out which kind of message we have
switch { switch {
case msg.Method != "": case msg.Method != "":
// if method is set it must be a request // if method is set it must be a request
reqCtx, cancelReq := context.WithCancel(runCtx) reqCtx, cancelReq := context.WithCancel(ctx)
reqCtx, rpcStats := start(reqCtx, true, msg.Method, msg.ID)
rpcStats.received += n
thisRequest := nextRequest thisRequest := nextRequest
nextRequest = make(chan struct{}) nextRequest = make(chan struct{})
req := &Request{ req := &Request{
conn: c, conn: c,
cancel: cancelReq, cancel: cancelReq,
nextRequest: nextRequest, nextRequest: nextRequest,
WireRequest: WireRequest{ start: time.Now(),
VersionTag: msg.VersionTag,
Method: msg.Method, Method: msg.Method,
Params: msg.Params, Params: msg.Params,
ID: msg.ID, ID: msg.ID,
},
}
for _, h := range c.handlers {
reqCtx = h.Request(reqCtx, Receive, &req.WireRequest)
reqCtx = h.Read(reqCtx, n)
} }
c.setHandling(req, true) c.setHandling(req, true)
go func() { go func() {
@ -360,17 +434,11 @@ func (c *Conn) Run(runCtx context.Context) error {
req.Reply(reqCtx, nil, NewErrorf(CodeInternalError, "method %q did not reply", req.Method)) req.Reply(reqCtx, nil, NewErrorf(CodeInternalError, "method %q did not reply", req.Method))
} }
req.Parallel() req.Parallel()
for _, h := range c.handlers { rpcStats.end(reqCtx, nil)
h.Done(reqCtx, err)
}
cancelReq() cancelReq()
}() }()
delivered := false c.Logger(Receive, req.ID, -1, req.Method, req.Params, nil)
for _, h := range c.handlers { c.Handler(reqCtx, req)
if h.Deliver(reqCtx, req, delivered) {
delivered = true
}
}
}() }()
case msg.ID != nil: case msg.ID != nil:
// we have a response, get the pending entry from the map // we have a response, get the pending entry from the map
@ -381,7 +449,7 @@ func (c *Conn) Run(runCtx context.Context) error {
} }
c.pendingMu.Unlock() c.pendingMu.Unlock()
// and send the reply to the channel // and send the reply to the channel
response := &WireResponse{ response := &wireResponse{
Result: msg.Result, Result: msg.Result,
Error: msg.Error, Error: msg.Error,
ID: msg.ID, ID: msg.ID,
@ -389,9 +457,7 @@ func (c *Conn) Run(runCtx context.Context) error {
rchan <- response rchan <- response
close(rchan) close(rchan)
default: default:
for _, h := range c.handlers { c.Logger(Receive, nil, -1, "", nil, NewErrorf(0, "message not a call, notify or response, ignoring"))
h.Error(runCtx, fmt.Errorf("message not a call, notify or response, ignoring"))
}
} }
} }
} }

View File

@ -10,11 +10,9 @@ import (
"flag" "flag"
"fmt" "fmt"
"io" "io"
"log"
"path" "path"
"reflect" "reflect"
"testing" "testing"
"time"
"golang.org/x/tools/internal/jsonrpc2" "golang.org/x/tools/internal/jsonrpc2"
) )
@ -108,7 +106,10 @@ func run(ctx context.Context, t *testing.T, withHeaders bool, r io.ReadCloser, w
stream = jsonrpc2.NewStream(r, w) stream = jsonrpc2.NewStream(r, w)
} }
conn := jsonrpc2.NewConn(stream) conn := jsonrpc2.NewConn(stream)
conn.AddHandler(&handle{log: *logRPC}) conn.Handler = handle
if *logRPC {
conn.Logger = jsonrpc2.Log
}
go func() { go func() {
defer func() { defer func() {
r.Close() r.Close()
@ -121,82 +122,36 @@ func run(ctx context.Context, t *testing.T, withHeaders bool, r io.ReadCloser, w
return conn return conn
} }
type handle struct { func handle(ctx context.Context, r *jsonrpc2.Request) {
log bool
}
func (h *handle) Deliver(ctx context.Context, r *jsonrpc2.Request, delivered bool) bool {
switch r.Method { switch r.Method {
case "no_args": case "no_args":
if r.Params != nil { if r.Params != nil {
r.Reply(ctx, nil, jsonrpc2.NewErrorf(jsonrpc2.CodeInvalidParams, "Expected no params")) r.Reply(ctx, nil, jsonrpc2.NewErrorf(jsonrpc2.CodeInvalidParams, "Expected no params"))
return true return
} }
r.Reply(ctx, true, nil) r.Reply(ctx, true, nil)
case "one_string": case "one_string":
var v string var v string
if err := json.Unmarshal(*r.Params, &v); err != nil { if err := json.Unmarshal(*r.Params, &v); err != nil {
r.Reply(ctx, nil, jsonrpc2.NewErrorf(jsonrpc2.CodeParseError, "%v", err.Error())) r.Reply(ctx, nil, jsonrpc2.NewErrorf(jsonrpc2.CodeParseError, "%v", err.Error()))
return true return
} }
r.Reply(ctx, "got:"+v, nil) r.Reply(ctx, "got:"+v, nil)
case "one_number": case "one_number":
var v int var v int
if err := json.Unmarshal(*r.Params, &v); err != nil { if err := json.Unmarshal(*r.Params, &v); err != nil {
r.Reply(ctx, nil, jsonrpc2.NewErrorf(jsonrpc2.CodeParseError, "%v", err.Error())) r.Reply(ctx, nil, jsonrpc2.NewErrorf(jsonrpc2.CodeParseError, "%v", err.Error()))
return true return
} }
r.Reply(ctx, fmt.Sprintf("got:%d", v), nil) r.Reply(ctx, fmt.Sprintf("got:%d", v), nil)
case "join": case "join":
var v []string var v []string
if err := json.Unmarshal(*r.Params, &v); err != nil { if err := json.Unmarshal(*r.Params, &v); err != nil {
r.Reply(ctx, nil, jsonrpc2.NewErrorf(jsonrpc2.CodeParseError, "%v", err.Error())) r.Reply(ctx, nil, jsonrpc2.NewErrorf(jsonrpc2.CodeParseError, "%v", err.Error()))
return true return
} }
r.Reply(ctx, path.Join(v...), nil) r.Reply(ctx, path.Join(v...), nil)
default: default:
r.Reply(ctx, nil, jsonrpc2.NewErrorf(jsonrpc2.CodeMethodNotFound, "method %q not found", r.Method)) r.Reply(ctx, nil, jsonrpc2.NewErrorf(jsonrpc2.CodeMethodNotFound, "method %q not found", r.Method))
} }
return true
}
func (h *handle) Cancel(ctx context.Context, conn *jsonrpc2.Conn, id jsonrpc2.ID, cancelled bool) bool {
return false
}
func (h *handle) Request(ctx context.Context, direction jsonrpc2.Direction, r *jsonrpc2.WireRequest) context.Context {
if h.log {
if r.ID != nil {
log.Printf("%v call [%v] %s %v", direction, r.ID, r.Method, r.Params)
} else {
log.Printf("%v notification %s %v", direction, r.Method, r.Params)
}
ctx = context.WithValue(ctx, "method", r.Method)
ctx = context.WithValue(ctx, "start", time.Now())
}
return ctx
}
func (h *handle) Response(ctx context.Context, direction jsonrpc2.Direction, r *jsonrpc2.WireResponse) context.Context {
if h.log {
method := ctx.Value("method")
elapsed := time.Since(ctx.Value("start").(time.Time))
log.Printf("%v response in %v [%v] %s %v", direction, elapsed, r.ID, method, r.Result)
}
return ctx
}
func (h *handle) Done(ctx context.Context, err error) {
}
func (h *handle) Read(ctx context.Context, bytes int64) context.Context {
return ctx
}
func (h *handle) Wrote(ctx context.Context, bytes int64) context.Context {
return ctx
}
func (h *handle) Error(ctx context.Context, err error) {
log.Printf("%v", err)
} }

59
internal/jsonrpc2/log.go Normal file
View File

@ -0,0 +1,59 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package jsonrpc2
import (
"encoding/json"
"log"
"time"
)
// Logger is an option you can pass to NewConn which is invoked for
// all messages flowing through a Conn.
// direction indicates if the message being recieved or sent
// id is the message id, if not set it was a notification
// elapsed is the time between a call being seen and the response, and is
// negative for anything that is not a response.
// method is the method name specified in the message
// payload is the parameters for a call or notification, and the result for a
// response
type Logger = func(direction Direction, id *ID, elapsed time.Duration, method string, payload *json.RawMessage, err *Error)
// Direction is used to indicate to a logger whether the logged message was being
// sent or received.
type Direction bool
const (
// Send indicates the message is outgoing.
Send = Direction(true)
// Receive indicates the message is incoming.
Receive = Direction(false)
)
func (d Direction) String() string {
switch d {
case Send:
return "send"
case Receive:
return "receive"
default:
panic("unreachable")
}
}
// Log is an implementation of Logger that outputs using log.Print
// It is not used by default, but is provided for easy logging in users code.
func Log(direction Direction, id *ID, elapsed time.Duration, method string, payload *json.RawMessage, err *Error) {
switch {
case err != nil:
log.Printf("%v failure [%v] %s %v", direction, id, method, err)
case id == nil:
log.Printf("%v notification %s %s", direction, method, *payload)
case elapsed >= 0:
log.Printf("%v response in %v [%v] %s %s", direction, elapsed, id, method, *payload)
default:
log.Printf("%v call [%v] %s %s", direction, id, method, *payload)
}
}

View File

@ -34,8 +34,8 @@ const (
CodeServerOverloaded = -32000 CodeServerOverloaded = -32000
) )
// WireRequest is sent to a server to represent a Call or Notify operaton. // wireRequest is sent to a server to represent a Call or Notify operaton.
type WireRequest struct { type wireRequest struct {
// VersionTag is always encoded as the string "2.0" // VersionTag is always encoded as the string "2.0"
VersionTag VersionTag `json:"jsonrpc"` VersionTag VersionTag `json:"jsonrpc"`
// Method is a string containing the method name to invoke. // Method is a string containing the method name to invoke.
@ -48,11 +48,11 @@ type WireRequest struct {
ID *ID `json:"id,omitempty"` ID *ID `json:"id,omitempty"`
} }
// WireResponse is a reply to a Request. // wireResponse is a reply to a Request.
// It will always have the ID field set to tie it back to a request, and will // It will always have the ID field set to tie it back to a request, and will
// have either the Result or Error fields set depending on whether it is a // have either the Result or Error fields set depending on whether it is a
// success or failure response. // success or failure response.
type WireResponse struct { type wireResponse struct {
// VersionTag is always encoded as the string "2.0" // VersionTag is always encoded as the string "2.0"
VersionTag VersionTag `json:"jsonrpc"` VersionTag VersionTag `json:"jsonrpc"`
// Result is the response value, and is required on success. // Result is the response value, and is required on success.

View File

@ -14,6 +14,7 @@ import (
"golang.org/x/tools/internal/lsp/debug" "golang.org/x/tools/internal/lsp/debug"
"golang.org/x/tools/internal/lsp/source" "golang.org/x/tools/internal/lsp/source"
"golang.org/x/tools/internal/lsp/xlog"
"golang.org/x/tools/internal/memoize" "golang.org/x/tools/internal/memoize"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
) )
@ -71,11 +72,12 @@ func (c *cache) GetFile(uri span.URI) source.FileHandle {
} }
} }
func (c *cache) NewSession(ctx context.Context) source.Session { func (c *cache) NewSession(log xlog.Logger) source.Session {
index := atomic.AddInt64(&sessionIndex, 1) index := atomic.AddInt64(&sessionIndex, 1)
s := &session{ s := &session{
cache: c, cache: c,
id: strconv.FormatInt(index, 10), id: strconv.FormatInt(index, 10),
log: log,
overlays: make(map[span.URI]*overlay), overlays: make(map[span.URI]*overlay),
filesWatchMap: NewWatchMap(), filesWatchMap: NewWatchMap(),
} }

View File

@ -16,9 +16,6 @@ import (
"golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/packages" "golang.org/x/tools/go/packages"
"golang.org/x/tools/internal/lsp/source" "golang.org/x/tools/internal/lsp/source"
"golang.org/x/tools/internal/lsp/telemetry"
"golang.org/x/tools/internal/lsp/telemetry/log"
"golang.org/x/tools/internal/lsp/telemetry/trace"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
) )
@ -37,19 +34,18 @@ type importer struct {
} }
func (imp *importer) Import(pkgPath string) (*types.Package, error) { func (imp *importer) Import(pkgPath string) (*types.Package, error) {
ctx := imp.ctx
id, ok := imp.view.mcache.ids[packagePath(pkgPath)] id, ok := imp.view.mcache.ids[packagePath(pkgPath)]
if !ok { if !ok {
return nil, fmt.Errorf("no known ID for %s", pkgPath) return nil, fmt.Errorf("no known ID for %s", pkgPath)
} }
pkg, err := imp.getPkg(ctx, id) pkg, err := imp.getPkg(id)
if err != nil { if err != nil {
return nil, err return nil, err
} }
return pkg.types, nil return pkg.types, nil
} }
func (imp *importer) getPkg(ctx context.Context, id packageID) (*pkg, error) { func (imp *importer) getPkg(id packageID) (*pkg, error) {
if _, ok := imp.seen[id]; ok { if _, ok := imp.seen[id]; ok {
return nil, fmt.Errorf("circular import detected") return nil, fmt.Errorf("circular import detected")
} }
@ -69,24 +65,20 @@ func (imp *importer) getPkg(ctx context.Context, id packageID) (*pkg, error) {
// This goroutine becomes responsible for populating // This goroutine becomes responsible for populating
// the entry and broadcasting its readiness. // the entry and broadcasting its readiness.
e.pkg, e.err = imp.typeCheck(ctx, id) e.pkg, e.err = imp.typeCheck(id)
if e.err != nil {
// Don't cache failed packages. If we didn't successfully cache the package
// in each file, then this pcache entry won't get invalidated as those files
// change.
imp.view.pcache.mu.Lock()
if imp.view.pcache.packages[id] == e {
delete(imp.view.pcache.packages, id)
}
imp.view.pcache.mu.Unlock()
}
close(e.ready) close(e.ready)
} }
if e.err != nil { if e.err != nil {
// If the import had been previously canceled, and that error cached, try again. // If the import had been previously canceled, and that error cached, try again.
if e.err == context.Canceled && ctx.Err() == nil { if e.err == context.Canceled && imp.ctx.Err() == nil {
return imp.getPkg(ctx, id) imp.view.pcache.mu.Lock()
// Clear out canceled cache entry if it is still there.
if imp.view.pcache.packages[id] == e {
delete(imp.view.pcache.packages, id)
}
imp.view.pcache.mu.Unlock()
return imp.getPkg(id)
} }
return nil, e.err return nil, e.err
} }
@ -94,9 +86,7 @@ func (imp *importer) getPkg(ctx context.Context, id packageID) (*pkg, error) {
return e.pkg, nil return e.pkg, nil
} }
func (imp *importer) typeCheck(ctx context.Context, id packageID) (*pkg, error) { func (imp *importer) typeCheck(id packageID) (*pkg, error) {
ctx, done := trace.StartSpan(ctx, "cache.importer.typeCheck", telemetry.Package.Of(id))
defer done()
meta, ok := imp.view.mcache.packages[id] meta, ok := imp.view.mcache.packages[id]
if !ok { if !ok {
return nil, fmt.Errorf("no metadata for %v", id) return nil, fmt.Errorf("no metadata for %v", id)
@ -123,42 +113,42 @@ func (imp *importer) typeCheck(ctx context.Context, id packageID) (*pkg, error)
mode = source.ParseExported mode = source.ParseExported
} }
var ( var (
files = make([]*ast.File, len(meta.files)) files []*astFile
errors = make([]error, len(meta.files)) phs []source.ParseGoHandle
wg sync.WaitGroup wg sync.WaitGroup
) )
for _, filename := range meta.files { for _, filename := range meta.files {
uri := span.FileURI(filename) uri := span.FileURI(filename)
f, err := imp.view.getFile(ctx, uri) f, err := imp.view.getFile(uri)
if err != nil { if err != nil {
log.Error(ctx, "unable to get file", err, telemetry.File.Of(f.URI()))
continue continue
} }
pkg.files = append(pkg.files, imp.view.session.cache.ParseGoHandle(f.Handle(ctx), mode)) ph := imp.view.session.cache.ParseGoHandle(f.Handle(imp.ctx), mode)
phs = append(phs, ph)
files = append(files, &astFile{
uri: ph.File().Identity().URI,
isTrimmed: mode == source.ParseExported,
ph: ph,
})
} }
for i, ph := range pkg.files { for i, ph := range phs {
wg.Add(1) wg.Add(1)
go func(i int, ph source.ParseGoHandle) { go func(i int, ph source.ParseGoHandle) {
defer wg.Done() defer wg.Done()
files[i], errors[i] = ph.Parse(ctx) files[i].file, files[i].err = ph.Parse(imp.ctx)
}(i, ph) }(i, ph)
} }
wg.Wait() wg.Wait()
var i int
for _, f := range files { for _, f := range files {
if f != nil { pkg.files = append(pkg.files, f)
files[i] = f
i++ if f.err != nil {
if f.err == context.Canceled {
return nil, f.err
} }
} imp.view.session.cache.appendPkgError(pkg, f.err)
for _, err := range errors {
if err == context.Canceled {
return nil, err
}
if err != nil {
imp.view.session.cache.appendPkgError(pkg, err)
} }
} }
@ -185,7 +175,7 @@ func (imp *importer) typeCheck(ctx context.Context, id packageID) (*pkg, error)
IgnoreFuncBodies: mode == source.ParseExported, IgnoreFuncBodies: mode == source.ParseExported,
Importer: &importer{ Importer: &importer{
view: imp.view, view: imp.view,
ctx: ctx, ctx: imp.ctx,
fset: imp.fset, fset: imp.fset,
topLevelPkgID: imp.topLevelPkgID, topLevelPkgID: imp.topLevelPkgID,
seen: seen, seen: seen,
@ -194,29 +184,28 @@ func (imp *importer) typeCheck(ctx context.Context, id packageID) (*pkg, error)
check := types.NewChecker(cfg, imp.fset, pkg.types, pkg.typesInfo) check := types.NewChecker(cfg, imp.fset, pkg.types, pkg.typesInfo)
// Ignore type-checking errors. // Ignore type-checking errors.
check.Files(files) check.Files(pkg.GetSyntax())
// Add every file in this package to our cache. // Add every file in this package to our cache.
if err := imp.cachePackage(ctx, pkg, meta, mode); err != nil { imp.cachePackage(imp.ctx, pkg, meta, mode)
return nil, err
}
return pkg, nil return pkg, nil
} }
func (imp *importer) cachePackage(ctx context.Context, pkg *pkg, meta *metadata, mode source.ParseMode) error { func (imp *importer) cachePackage(ctx context.Context, pkg *pkg, meta *metadata, mode source.ParseMode) {
for _, ph := range pkg.files { for _, file := range pkg.files {
uri := ph.File().Identity().URI f, err := imp.view.getFile(file.uri)
f, err := imp.view.getFile(ctx, uri)
if err != nil { if err != nil {
return fmt.Errorf("no such file %s: %v", uri, err) imp.view.session.log.Errorf(ctx, "no file: %v", err)
continue
} }
gof, ok := f.(*goFile) gof, ok := f.(*goFile)
if !ok { if !ok {
return fmt.Errorf("non Go file %s", uri) imp.view.session.log.Errorf(ctx, "%v is not a Go file", file.uri)
continue
} }
if err := imp.cachePerFile(gof, ph, pkg); err != nil { if err := imp.cachePerFile(gof, file, pkg); err != nil {
return fmt.Errorf("failed to cache file %s: %v", gof.URI(), err) imp.view.session.log.Errorf(ctx, "failed to cache file %s: %v", gof.URI(), err)
} }
} }
@ -224,17 +213,15 @@ func (imp *importer) cachePackage(ctx context.Context, pkg *pkg, meta *metadata,
// We lock the package cache, but we shouldn't get any inconsistencies // We lock the package cache, but we shouldn't get any inconsistencies
// because we are still holding the lock on the view. // because we are still holding the lock on the view.
for importPath := range meta.children { for importPath := range meta.children {
importPkg, err := imp.getPkg(ctx, importPath) importPkg, err := imp.getPkg(importPath)
if err != nil { if err != nil {
continue continue
} }
pkg.imports[importPkg.pkgPath] = importPkg pkg.imports[importPkg.pkgPath] = importPkg
} }
return nil
} }
func (imp *importer) cachePerFile(gof *goFile, ph source.ParseGoHandle, p *pkg) error { func (imp *importer) cachePerFile(gof *goFile, file *astFile, p *pkg) error {
gof.mu.Lock() gof.mu.Lock()
defer gof.mu.Unlock() defer gof.mu.Unlock()
@ -244,11 +231,25 @@ func (imp *importer) cachePerFile(gof *goFile, ph source.ParseGoHandle, p *pkg)
} }
gof.pkgs[p.id] = p gof.pkgs[p.id] = p
file, err := ph.Parse(imp.ctx) // Get the AST for the file.
if file == nil { gof.ast = file
return fmt.Errorf("no AST for %s: %v", ph.File().Identity().URI, err) if gof.ast == nil {
return fmt.Errorf("no AST information for %s", file.uri)
} }
gof.imports = file.Imports if gof.ast.file == nil {
return fmt.Errorf("no AST for %s", file.uri)
}
// Get the *token.File directly from the AST.
pos := gof.ast.file.Pos()
if !pos.IsValid() {
return fmt.Errorf("AST for %s has an invalid position", file.uri)
}
tok := imp.view.session.cache.FileSet().File(pos)
if tok == nil {
return fmt.Errorf("no *token.File for %s", file.uri)
}
gof.token = tok
gof.imports = gof.ast.file.Imports
return nil return nil
} }

View File

@ -10,8 +10,6 @@ import (
"os" "os"
"golang.org/x/tools/internal/lsp/source" "golang.org/x/tools/internal/lsp/source"
"golang.org/x/tools/internal/lsp/telemetry"
"golang.org/x/tools/internal/lsp/telemetry/trace"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
) )
@ -52,8 +50,6 @@ func (h *nativeFileHandle) Kind() source.FileKind {
} }
func (h *nativeFileHandle) Read(ctx context.Context) ([]byte, string, error) { func (h *nativeFileHandle) Read(ctx context.Context) ([]byte, string, error) {
ctx, done := trace.StartSpan(ctx, "cache.nativeFileHandle.Read", telemetry.File.Of(h.identity.URI.Filename()))
defer done()
//TODO: this should fail if the version is not the same as the handle //TODO: this should fail if the version is not the same as the handle
data, err := ioutil.ReadFile(h.identity.URI.Filename()) data, err := ioutil.ReadFile(h.identity.URI.Filename())
if err != nil { if err != nil {

View File

@ -34,6 +34,8 @@ type fileBase struct {
handleMu sync.Mutex handleMu sync.Mutex
handle source.FileHandle handle source.FileHandle
token *token.File
} }
func basename(filename string) string { func basename(filename string) string {

View File

@ -6,14 +6,11 @@ package cache
import ( import (
"context" "context"
"fmt"
"go/ast" "go/ast"
"go/token" "go/token"
"sync" "sync"
"golang.org/x/tools/internal/lsp/source" "golang.org/x/tools/internal/lsp/source"
"golang.org/x/tools/internal/lsp/telemetry"
"golang.org/x/tools/internal/lsp/telemetry/log"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
) )
@ -36,6 +33,7 @@ type goFile struct {
imports []*ast.ImportSpec imports []*ast.ImportSpec
ast *astFile
pkgs map[packageID]*pkg pkgs map[packageID]*pkg
meta map[packageID]*metadata meta map[packageID]*metadata
} }
@ -48,53 +46,61 @@ type astFile struct {
isTrimmed bool isTrimmed bool
} }
func (f *goFile) GetToken(ctx context.Context) (*token.File, error) { func (f *goFile) GetToken(ctx context.Context) *token.File {
file, err := f.GetAST(ctx, source.ParseFull)
if file == nil {
return nil, err
}
return f.view.session.cache.fset.File(file.Pos()), nil
}
func (f *goFile) GetAST(ctx context.Context, mode source.ParseMode) (*ast.File, error) {
f.view.mu.Lock() f.view.mu.Lock()
defer f.view.mu.Unlock() defer f.view.mu.Unlock()
ctx = telemetry.File.With(ctx, f.URI())
if f.isDirty(ctx) || f.wrongParseMode(ctx, mode) { if f.isDirty() || f.astIsTrimmed() {
if _, err := f.view.loadParseTypecheck(ctx, f); err != nil { if _, err := f.view.loadParseTypecheck(ctx, f); err != nil {
return nil, fmt.Errorf("GetAST: unable to check package for %s: %v", f.URI(), err) f.View().Session().Logger().Errorf(ctx, "unable to check package for %s: %v", f.URI(), err)
return nil
} }
} }
fh := f.Handle(ctx) if unexpectedAST(ctx, f) {
// Check for a cached AST first, in case getting a trimmed version would actually cause a re-parse. return nil
for _, m := range []source.ParseMode{
source.ParseHeader,
source.ParseExported,
source.ParseFull,
} {
if m < mode {
continue
} }
if v, ok := f.view.session.cache.store.Cached(parseKey{ return f.token
file: fh.Identity(), }
mode: m,
}).(*parseGoData); ok { func (f *goFile) GetAnyAST(ctx context.Context) *ast.File {
return v.ast, v.err f.view.mu.Lock()
defer f.view.mu.Unlock()
if f.isDirty() {
if _, err := f.view.loadParseTypecheck(ctx, f); err != nil {
f.View().Session().Logger().Errorf(ctx, "unable to check package for %s: %v", f.URI(), err)
return nil
} }
} }
ph := f.view.session.cache.ParseGoHandle(fh, mode) if f.ast == nil {
return ph.Parse(ctx) return nil
}
return f.ast.file
}
func (f *goFile) GetAST(ctx context.Context) *ast.File {
f.view.mu.Lock()
defer f.view.mu.Unlock()
if f.isDirty() || f.astIsTrimmed() {
if _, err := f.view.loadParseTypecheck(ctx, f); err != nil {
f.View().Session().Logger().Errorf(ctx, "unable to check package for %s: %v", f.URI(), err)
return nil
}
}
if unexpectedAST(ctx, f) {
return nil
}
return f.ast.file
} }
func (f *goFile) GetPackages(ctx context.Context) []source.Package { func (f *goFile) GetPackages(ctx context.Context) []source.Package {
f.view.mu.Lock() f.view.mu.Lock()
defer f.view.mu.Unlock() defer f.view.mu.Unlock()
ctx = telemetry.File.With(ctx, f.URI())
if f.isDirty(ctx) || f.wrongParseMode(ctx, source.ParseFull) { if f.isDirty() || f.astIsTrimmed() {
if errs, err := f.view.loadParseTypecheck(ctx, f); err != nil { if errs, err := f.view.loadParseTypecheck(ctx, f); err != nil {
log.Error(ctx, "unable to check package", err, telemetry.File) f.View().Session().Logger().Errorf(ctx, "unable to check package for %s: %v", f.URI(), err)
// Create diagnostics for errors if we are able to. // Create diagnostics for errors if we are able to.
if len(errs) > 0 { if len(errs) > 0 {
@ -103,10 +109,9 @@ func (f *goFile) GetPackages(ctx context.Context) []source.Package {
return nil return nil
} }
} }
if unexpectedAST(ctx, f) {
f.mu.Lock() return nil
defer f.mu.Unlock() }
var pkgs []source.Package var pkgs []source.Package
for _, pkg := range f.pkgs { for _, pkg := range f.pkgs {
pkgs = append(pkgs, pkg) pkgs = append(pkgs, pkg)
@ -129,24 +134,26 @@ func (f *goFile) GetPackage(ctx context.Context) source.Package {
return result return result
} }
func (f *goFile) wrongParseMode(ctx context.Context, mode source.ParseMode) bool { func unexpectedAST(ctx context.Context, f *goFile) bool {
f.mu.Lock() f.mu.Lock()
defer f.mu.Unlock() defer f.mu.Unlock()
fh := f.Handle(ctx) // If the AST comes back nil, something has gone wrong.
for _, pkg := range f.pkgs { if f.ast == nil {
for _, ph := range pkg.files { f.View().Session().Logger().Errorf(ctx, "expected full AST for %s, returned nil", f.URI())
if fh.Identity() == ph.File().Identity() { return true
return ph.Mode() < mode
}
} }
// If the AST comes back trimmed, something has gone wrong.
if f.ast.isTrimmed {
f.View().Session().Logger().Errorf(ctx, "expected full AST for %s, returned trimmed", f.URI())
return true
} }
return false return false
} }
// isDirty is true if the file needs to be type-checked. // isDirty is true if the file needs to be type-checked.
// It assumes that the file's view's mutex is held by the caller. // It assumes that the file's view's mutex is held by the caller.
func (f *goFile) isDirty(ctx context.Context) bool { func (f *goFile) isDirty() bool {
f.mu.Lock() f.mu.Lock()
defer f.mu.Unlock() defer f.mu.Unlock()
@ -166,16 +173,14 @@ func (f *goFile) isDirty(ctx context.Context) bool {
if len(f.missingImports) > 0 { if len(f.missingImports) > 0 {
return true return true
} }
fh := f.Handle(ctx) return f.token == nil || f.ast == nil
for _, pkg := range f.pkgs { }
for _, file := range pkg.files {
// There is a type-checked package for the current file handle. func (f *goFile) astIsTrimmed() bool {
if file.File().Identity() == fh.Identity() { f.mu.Lock()
return false defer f.mu.Unlock()
}
} return f.ast != nil && f.ast.isTrimmed
}
return true
} }
func (f *goFile) GetActiveReverseDeps(ctx context.Context) []source.GoFile { func (f *goFile) GetActiveReverseDeps(ctx context.Context) []source.GoFile {
@ -221,7 +226,7 @@ func (v *view) reverseDeps(ctx context.Context, seen map[packageID]struct{}, res
} }
for _, filename := range m.files { for _, filename := range m.files {
uri := span.FileURI(filename) uri := span.FileURI(filename)
if f, err := v.getFile(ctx, uri); err == nil && v.session.IsOpen(uri) { if f, err := v.getFile(uri); err == nil && v.session.IsOpen(uri) {
results[f.(*goFile)] = struct{}{} results[f.(*goFile)] = struct{}{}
} }
} }

View File

@ -10,10 +10,6 @@ import (
"golang.org/x/tools/go/packages" "golang.org/x/tools/go/packages"
"golang.org/x/tools/internal/lsp/source" "golang.org/x/tools/internal/lsp/source"
"golang.org/x/tools/internal/lsp/telemetry"
"golang.org/x/tools/internal/lsp/telemetry/log"
"golang.org/x/tools/internal/lsp/telemetry/tag"
"golang.org/x/tools/internal/lsp/telemetry/trace"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
) )
@ -23,9 +19,9 @@ func (v *view) loadParseTypecheck(ctx context.Context, f *goFile) ([]packages.Er
// If the AST for this file is trimmed, and we are explicitly type-checking it, // If the AST for this file is trimmed, and we are explicitly type-checking it,
// don't ignore function bodies. // don't ignore function bodies.
if f.wrongParseMode(ctx, source.ParseFull) { if f.astIsTrimmed() {
v.pcache.mu.Lock() v.pcache.mu.Lock()
f.invalidateAST(ctx) f.invalidateAST()
v.pcache.mu.Unlock() v.pcache.mu.Unlock()
} }
@ -47,10 +43,10 @@ func (v *view) loadParseTypecheck(ctx context.Context, f *goFile) ([]packages.Er
} }
// Start prefetching direct imports. // Start prefetching direct imports.
for importID := range m.children { for importID := range m.children {
go imp.getPkg(ctx, importID) go imp.getPkg(importID)
} }
// Type-check package. // Type-check package.
pkg, err := imp.getPkg(ctx, imp.topLevelPkgID) pkg, err := imp.getPkg(imp.topLevelPkgID)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -79,18 +75,22 @@ func sameSet(x, y map[packagePath]struct{}) bool {
// checkMetadata determines if we should run go/packages.Load for this file. // checkMetadata determines if we should run go/packages.Load for this file.
// If yes, update the metadata for the file and its package. // If yes, update the metadata for the file and its package.
func (v *view) checkMetadata(ctx context.Context, f *goFile) (map[packageID]*metadata, []packages.Error, error) { func (v *view) checkMetadata(ctx context.Context, f *goFile) (map[packageID]*metadata, []packages.Error, error) {
if !v.runGopackages(ctx, f) { f.mu.Lock()
defer f.mu.Unlock()
if !v.parseImports(ctx, f) {
return f.meta, nil, nil return f.meta, nil, nil
} }
// Check if the context has been canceled before calling packages.Load. // Reset the file's metadata and type information if we are re-running `go list`.
if ctx.Err() != nil { for k := range f.meta {
return nil, nil, ctx.Err() delete(f.meta, k)
}
for k := range f.pkgs {
delete(f.pkgs, k)
} }
ctx, done := trace.StartSpan(ctx, "packages.Load", telemetry.File.Of(f.filename())) pkgs, err := packages.Load(v.buildConfig(), fmt.Sprintf("file=%s", f.filename()))
defer done()
pkgs, err := packages.Load(v.Config(ctx), fmt.Sprintf("file=%s", f.filename()))
if len(pkgs) == 0 { if len(pkgs) == 0 {
if err == nil { if err == nil {
err = fmt.Errorf("go/packages.Load: no packages found for %s", f.filename()) err = fmt.Errorf("go/packages.Load: no packages found for %s", f.filename())
@ -103,91 +103,53 @@ func (v *view) checkMetadata(ctx context.Context, f *goFile) (map[packageID]*met
}, },
}, err }, err
} }
// Track missing imports as we look at the package's errors. // Track missing imports as we look at the package's errors.
missingImports := make(map[packagePath]struct{}) missingImports := make(map[packagePath]struct{})
log.Print(ctx, "go/packages.Load", tag.Of("packages", len(pkgs)))
for _, pkg := range pkgs { for _, pkg := range pkgs {
log.Print(ctx, "go/packages.Load", tag.Of("package", pkg.PkgPath), tag.Of("files", pkg.CompiledGoFiles))
// If the package comes back with errors from `go list`, // If the package comes back with errors from `go list`,
// don't bother type-checking it. // don't bother type-checking it.
if len(pkg.Errors) > 0 { if len(pkg.Errors) > 0 {
return nil, pkg.Errors, fmt.Errorf("package %s has errors, skipping type-checking", pkg.PkgPath) return nil, pkg.Errors, fmt.Errorf("package %s has errors, skipping type-checking", pkg.PkgPath)
} }
for importPath, importPkg := range pkg.Imports {
// If we encounter a package we cannot import, mark it as missing.
if importPkg.PkgPath != "unsafe" && len(importPkg.CompiledGoFiles) == 0 {
missingImports[packagePath(importPath)] = struct{}{}
}
}
// Build the import graph for this package. // Build the import graph for this package.
if err := v.link(ctx, packagePath(pkg.PkgPath), pkg, nil, missingImports); err != nil { v.link(ctx, packagePath(pkg.PkgPath), pkg, nil)
return nil, nil, err
} }
}
m, err := validateMetadata(ctx, missingImports, f)
if err != nil {
return nil, nil, err
}
return m, nil, nil
}
func validateMetadata(ctx context.Context, missingImports map[packagePath]struct{}, f *goFile) (map[packageID]*metadata, error) {
f.mu.Lock()
defer f.mu.Unlock()
// If `go list` failed to get data for the file in question (this should never happen). // If `go list` failed to get data for the file in question (this should never happen).
if len(f.meta) == 0 { if len(f.meta) == 0 {
return nil, fmt.Errorf("loadParseTypecheck: no metadata found for %v", f.filename()) return nil, nil, fmt.Errorf("loadParseTypecheck: no metadata found for %v", f.filename())
} }
// If we have already seen these missing imports before, and we have type information, // If we have already seen these missing imports before, and we have type information,
// there is no need to continue. // there is no need to continue.
if sameSet(missingImports, f.missingImports) && len(f.pkgs) != 0 { if sameSet(missingImports, f.missingImports) && len(f.pkgs) != 0 {
return nil, nil return nil, nil, nil
} }
// Otherwise, update the missing imports map. // Otherwise, update the missing imports map.
f.missingImports = missingImports f.missingImports = missingImports
return f.meta, nil
return f.meta, nil, nil
} }
// reparseImports reparses a file's package and import declarations to // reparseImports reparses a file's package and import declarations to
// determine if they have changed. // determine if they have changed.
func (v *view) runGopackages(ctx context.Context, f *goFile) (result bool) { func (v *view) parseImports(ctx context.Context, f *goFile) bool {
f.mu.Lock()
defer func() {
// Clear metadata if we are intending to re-run go/packages.
if result {
// Reset the file's metadata and type information if we are re-running `go list`.
for k := range f.meta {
delete(f.meta, k)
}
for k := range f.pkgs {
delete(f.pkgs, k)
}
}
f.mu.Unlock()
}()
if len(f.meta) == 0 || len(f.missingImports) > 0 { if len(f.meta) == 0 || len(f.missingImports) > 0 {
return true return true
} }
// Get file content in case we don't already have it. // Get file content in case we don't already have it.
parsed, err := v.session.cache.ParseGoHandle(f.Handle(ctx), source.ParseHeader).Parse(ctx) parsed, _ := v.session.cache.ParseGoHandle(f.Handle(ctx), source.ParseHeader).Parse(ctx)
if err == context.Canceled {
return false
}
if parsed == nil { if parsed == nil {
return true return true
} }
// TODO: Add support for re-running `go list` when the package name changes.
// Check if the package's name has changed, by checking if this is a filename
// we already know about, and if so, check if its package name has changed.
for _, m := range f.meta {
for _, filename := range m.files {
if filename == f.URI().Filename() {
if m.name != parsed.Name.Name {
return true
}
}
}
}
// If the package's imports have changed, re-run `go list`. // If the package's imports have changed, re-run `go list`.
if len(f.imports) != len(parsed.Imports) { if len(f.imports) != len(parsed.Imports) {
@ -199,11 +161,10 @@ func (v *view) runGopackages(ctx context.Context, f *goFile) (result bool) {
return true return true
} }
} }
return false return false
} }
func (v *view) link(ctx context.Context, pkgPath packagePath, pkg *packages.Package, parent *metadata, missingImports map[packagePath]struct{}) error { func (v *view) link(ctx context.Context, pkgPath packagePath, pkg *packages.Package, parent *metadata) *metadata {
id := packageID(pkg.ID) id := packageID(pkg.ID)
m, ok := v.mcache.packages[id] m, ok := v.mcache.packages[id]
@ -211,7 +172,7 @@ func (v *view) link(ctx context.Context, pkgPath packagePath, pkg *packages.Pack
// so relevant packages get parsed and type-checked again. // so relevant packages get parsed and type-checked again.
if ok && !filenamesIdentical(m.files, pkg.CompiledGoFiles) { if ok && !filenamesIdentical(m.files, pkg.CompiledGoFiles) {
v.pcache.mu.Lock() v.pcache.mu.Lock()
v.remove(ctx, id, make(map[packageID]struct{})) v.remove(id, make(map[packageID]struct{}))
v.pcache.mu.Unlock() v.pcache.mu.Unlock()
} }
@ -231,20 +192,16 @@ func (v *view) link(ctx context.Context, pkgPath packagePath, pkg *packages.Pack
m.name = pkg.Name m.name = pkg.Name
m.files = pkg.CompiledGoFiles m.files = pkg.CompiledGoFiles
for _, filename := range m.files { for _, filename := range m.files {
f, err := v.getFile(ctx, span.FileURI(filename)) if f, _ := v.getFile(span.FileURI(filename)); f != nil {
if err != nil { if gof, ok := f.(*goFile); ok {
log.Error(ctx, "no file", err, telemetry.File.Of(filename))
continue
}
gof, ok := f.(*goFile)
if !ok {
log.Error(ctx, "not a Go file", nil, telemetry.File.Of(filename))
continue
}
if gof.meta == nil { if gof.meta == nil {
gof.meta = make(map[packageID]*metadata) gof.meta = make(map[packageID]*metadata)
} }
gof.meta[m.id] = m gof.meta[m.id] = m
} else {
v.Session().Logger().Errorf(ctx, "not a Go file: %s", f.URI())
}
}
} }
// Connect the import graph. // Connect the import graph.
if parent != nil { if parent != nil {
@ -252,19 +209,8 @@ func (v *view) link(ctx context.Context, pkgPath packagePath, pkg *packages.Pack
parent.children[id] = true parent.children[id] = true
} }
for importPath, importPkg := range pkg.Imports { for importPath, importPkg := range pkg.Imports {
importPkgPath := packagePath(importPath)
if importPkgPath == pkgPath {
return fmt.Errorf("cycle detected in %s", importPath)
}
// Don't remember any imports with significant errors.
if importPkgPath != "unsafe" && len(pkg.CompiledGoFiles) == 0 {
missingImports[importPkgPath] = struct{}{}
continue
}
if _, ok := m.children[packageID(importPkg.ID)]; !ok { if _, ok := m.children[packageID(importPkg.ID)]; !ok {
if err := v.link(ctx, importPkgPath, importPkg, m, missingImports); err != nil { v.link(ctx, packagePath(importPath), importPkg, m)
log.Error(ctx, "error in dependency", err, telemetry.Package.Of(importPkgPath))
}
} }
} }
// Clear out any imports that have been removed. // Clear out any imports that have been removed.
@ -280,7 +226,7 @@ func (v *view) link(ctx context.Context, pkgPath packagePath, pkg *packages.Pack
delete(m.children, importID) delete(m.children, importID)
delete(child.parents, id) delete(child.parents, id)
} }
return nil return m
} }
// filenamesIdentical reports whether two sets of file names are identical. // filenamesIdentical reports whether two sets of file names are identical.

View File

@ -6,7 +6,6 @@ package cache
import ( import (
"context" "context"
"fmt"
"go/token" "go/token"
) )
@ -15,10 +14,7 @@ type modFile struct {
fileBase fileBase
} }
func (*modFile) GetToken(context.Context) (*token.File, error) { func (*modFile) GetToken(context.Context) *token.File { return nil }
return nil, fmt.Errorf("GetToken: not implemented")
}
func (*modFile) setContent(content []byte) {} func (*modFile) setContent(content []byte) {}
func (*modFile) filename() string { return "" } func (*modFile) filename() string { return "" }
func (*modFile) isActive() bool { return false } func (*modFile) isActive() bool { return false }

View File

@ -13,13 +13,11 @@ import (
"go/token" "go/token"
"golang.org/x/tools/internal/lsp/source" "golang.org/x/tools/internal/lsp/source"
"golang.org/x/tools/internal/lsp/telemetry"
"golang.org/x/tools/internal/lsp/telemetry/trace"
"golang.org/x/tools/internal/memoize" "golang.org/x/tools/internal/memoize"
) )
// Limits the number of parallel parser calls per process. // Limits the number of parallel parser calls per process.
var parseLimit = make(chan struct{}, 20) var parseLimit = make(chan bool, 20)
// parseKey uniquely identifies a parsed Go file. // parseKey uniquely identifies a parsed Go file.
type parseKey struct { type parseKey struct {
@ -75,13 +73,11 @@ func (h *parseGoHandle) Parse(ctx context.Context) (*ast.File, error) {
} }
func parseGo(ctx context.Context, c *cache, fh source.FileHandle, mode source.ParseMode) (*ast.File, error) { func parseGo(ctx context.Context, c *cache, fh source.FileHandle, mode source.ParseMode) (*ast.File, error) {
ctx, done := trace.StartSpan(ctx, "cache.parseGo", telemetry.File.Of(fh.Identity().URI.Filename()))
defer done()
buf, _, err := fh.Read(ctx) buf, _, err := fh.Read(ctx)
if err != nil { if err != nil {
return nil, err return nil, err
} }
parseLimit <- struct{}{} parseLimit <- true
defer func() { <-parseLimit }() defer func() { <-parseLimit }()
parserMode := parser.AllErrors | parser.ParseComments parserMode := parser.AllErrors | parser.ParseComments
if mode == source.ParseHeader { if mode == source.ParseHeader {
@ -141,8 +137,8 @@ func isEllipsisArray(n ast.Expr) bool {
return ok return ok
} }
// fix inspects the AST and potentially modifies any *ast.BadStmts so that it can be // fix inspects and potentially modifies any *ast.BadStmts or *ast.BadExprs in the AST.
// type-checked more effectively. // We attempt to modify the AST such that we can type-check it more effectively.
func fix(ctx context.Context, file *ast.File, tok *token.File, src []byte) error { func fix(ctx context.Context, file *ast.File, tok *token.File, src []byte) error {
var parent ast.Node var parent ast.Node
var err error var err error
@ -208,7 +204,7 @@ func parseDeferOrGoStmt(bad *ast.BadStmt, parent ast.Node, tok *token.File, src
var to, curr token.Pos var to, curr token.Pos
FindTo: FindTo:
for { for {
curr, tkn, _ = s.Scan() curr, tkn, lit = s.Scan()
// TODO(rstambler): This still needs more handling to work correctly. // TODO(rstambler): This still needs more handling to work correctly.
// We encounter a specific issue with code that looks like this: // We encounter a specific issue with code that looks like this:
// //

View File

@ -22,7 +22,7 @@ type pkg struct {
id packageID id packageID
pkgPath packagePath pkgPath packagePath
files []source.ParseGoHandle files []*astFile
errors []packages.Error errors []packages.Error
imports map[packagePath]*pkg imports map[packagePath]*pkg
types *types.Package types *types.Package
@ -149,18 +149,17 @@ func (pkg *pkg) PkgPath() string {
func (pkg *pkg) GetFilenames() []string { func (pkg *pkg) GetFilenames() []string {
filenames := make([]string, 0, len(pkg.files)) filenames := make([]string, 0, len(pkg.files))
for _, ph := range pkg.files { for _, f := range pkg.files {
filenames = append(filenames, ph.File().Identity().URI.Filename()) filenames = append(filenames, f.uri.Filename())
} }
return filenames return filenames
} }
func (pkg *pkg) GetSyntax(ctx context.Context) []*ast.File { func (pkg *pkg) GetSyntax() []*ast.File {
var syntax []*ast.File var syntax []*ast.File
for _, ph := range pkg.files { for _, f := range pkg.files {
file, _ := ph.Parse(ctx) if f.file != nil {
if file != nil { syntax = append(syntax, f.file)
syntax = append(syntax, file)
} }
} }
return syntax return syntax

View File

@ -16,16 +16,15 @@ import (
"golang.org/x/tools/internal/lsp/debug" "golang.org/x/tools/internal/lsp/debug"
"golang.org/x/tools/internal/lsp/source" "golang.org/x/tools/internal/lsp/source"
"golang.org/x/tools/internal/lsp/telemetry" "golang.org/x/tools/internal/lsp/xlog"
"golang.org/x/tools/internal/lsp/telemetry/log"
"golang.org/x/tools/internal/lsp/telemetry/trace"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
"golang.org/x/tools/internal/xcontext"
) )
type session struct { type session struct {
cache *cache cache *cache
id string id string
// the logger to use to communicate back with the client
log xlog.Logger
viewMu sync.Mutex viewMu sync.Mutex
views []*view views []*view
@ -45,9 +44,9 @@ type overlay struct {
hash string hash string
kind source.FileKind kind source.FileKind
// sameContentOnDisk is true if a file has been saved on disk, // onDisk is true if a file has been saved on disk,
// and therefore does not need to be part of the overlay sent to go/packages. // and therefore does not need to be part of the overlay sent to go/packages.
sameContentOnDisk bool onDisk bool
} }
func (s *session) Shutdown(ctx context.Context) { func (s *session) Shutdown(ctx context.Context) {
@ -65,18 +64,16 @@ func (s *session) Cache() source.Cache {
return s.cache return s.cache
} }
func (s *session) NewView(ctx context.Context, name string, folder span.URI) source.View { func (s *session) NewView(name string, folder span.URI) source.View {
index := atomic.AddInt64(&viewIndex, 1) index := atomic.AddInt64(&viewIndex, 1)
s.viewMu.Lock() s.viewMu.Lock()
defer s.viewMu.Unlock() defer s.viewMu.Unlock()
// We want a true background context and not a detatched context here ctx := context.Background()
// the spans need to be unrelated and no tag values should pollute it. backgroundCtx, cancel := context.WithCancel(ctx)
baseCtx := trace.Detach(xcontext.Detach(ctx))
backgroundCtx, cancel := context.WithCancel(baseCtx)
v := &view{ v := &view{
session: s, session: s,
id: strconv.FormatInt(index, 10), id: strconv.FormatInt(index, 10),
baseCtx: baseCtx, baseCtx: ctx,
backgroundCtx: backgroundCtx, backgroundCtx: backgroundCtx,
cancel: cancel, cancel: cancel,
name: name, name: name,
@ -95,7 +92,7 @@ func (s *session) NewView(ctx context.Context, name string, folder span.URI) sou
} }
// Preemptively build the builtin package, // Preemptively build the builtin package,
// so we immediately add builtin.go to the list of ignored files. // so we immediately add builtin.go to the list of ignored files.
v.buildBuiltinPkg(ctx) v.buildBuiltinPkg()
s.views = append(s.views, v) s.views = append(s.views, v)
// we always need to drop the view map // we always need to drop the view map
@ -181,29 +178,28 @@ func (s *session) removeView(ctx context.Context, view *view) error {
return fmt.Errorf("view %s for %v not found", view.Name(), view.Folder()) return fmt.Errorf("view %s for %v not found", view.Name(), view.Folder())
} }
// TODO: Propagate the language ID through to the view. func (s *session) Logger() xlog.Logger {
func (s *session) DidOpen(ctx context.Context, uri span.URI, _ source.FileKind, text []byte) { return s.log
ctx = telemetry.File.With(ctx, uri) }
// Mark the file as open.
s.openFiles.Store(uri, true)
// Read the file on disk and compare it to the text provided. func (s *session) DidOpen(ctx context.Context, uri span.URI) {
// If it is the same as on disk, we can avoid sending it as an overlay to go/packages. s.openFiles.Store(uri, true)
s.openOverlay(ctx, uri, text)
// Mark the file as just opened so that we know to re-run packages.Load on it. // Mark the file as just opened so that we know to re-run packages.Load on it.
// We do this because we may not be aware of all of the packages the file belongs to. // We do this because we may not be aware of all of the packages the file belongs to.
// A file may be in multiple views. // A file may be in multiple views.
// For each view, get the file and mark it as just opened.
for _, view := range s.views { for _, view := range s.views {
if strings.HasPrefix(string(uri), string(view.Folder())) { if strings.HasPrefix(string(uri), string(view.Folder())) {
f, err := view.GetFile(ctx, uri) f, err := view.GetFile(ctx, uri)
if err != nil { if err != nil {
log.Error(ctx, "error getting file", nil, telemetry.File) s.log.Errorf(ctx, "error getting file for %s", uri)
return return
} }
gof, ok := f.(*goFile) gof, ok := f.(*goFile)
if !ok { if !ok {
log.Error(ctx, "not a Go file", nil, telemetry.File) s.log.Errorf(ctx, "%s is not a Go file", uri)
return return
} }
// Mark file as open. // Mark file as open.
@ -219,7 +215,7 @@ func (s *session) DidSave(uri span.URI) {
defer s.overlayMu.Unlock() defer s.overlayMu.Unlock()
if overlay, ok := s.overlays[uri]; ok { if overlay, ok := s.overlays[uri]; ok {
overlay.sameContentOnDisk = true overlay.onDisk = true
} }
} }
@ -259,30 +255,6 @@ func (s *session) SetOverlay(uri span.URI, data []byte) {
} }
} }
// openOverlay adds the file content to the overlay.
// It also checks if the provided content is equivalent to the file's content on disk.
func (s *session) openOverlay(ctx context.Context, uri span.URI, data []byte) {
s.overlayMu.Lock()
defer func() {
s.overlayMu.Unlock()
s.filesWatchMap.Notify(uri)
}()
s.overlays[uri] = &overlay{
session: s,
uri: uri,
data: data,
hash: hashContents(data),
}
_, hash, err := s.cache.GetFile(uri).Read(ctx)
if err != nil {
log.Error(ctx, "failed to read", err, telemetry.File)
return
}
if hash == s.overlays[uri].hash {
s.overlays[uri].sameContentOnDisk = true
}
}
func (s *session) readOverlay(uri span.URI) *overlay { func (s *session) readOverlay(uri span.URI) *overlay {
s.overlayMu.Lock() s.overlayMu.Lock()
defer s.overlayMu.Unlock() defer s.overlayMu.Unlock()
@ -300,7 +272,7 @@ func (s *session) buildOverlay() map[string][]byte {
overlays := make(map[string][]byte) overlays := make(map[string][]byte)
for uri, overlay := range s.overlays { for uri, overlay := range s.overlays {
if overlay.sameContentOnDisk { if overlay.onDisk {
continue continue
} }
overlays[uri.Filename()] = overlay.data overlays[uri.Filename()] = overlay.data

View File

@ -6,7 +6,6 @@ package cache
import ( import (
"context" "context"
"fmt"
"go/token" "go/token"
) )
@ -15,10 +14,7 @@ type sumFile struct {
fileBase fileBase
} }
func (*sumFile) GetToken(context.Context) (*token.File, error) { func (*sumFile) GetToken(context.Context) *token.File { return nil }
return nil, fmt.Errorf("GetToken: not implemented")
}
func (*sumFile) setContent(content []byte) {} func (*sumFile) setContent(content []byte) {}
func (*sumFile) filename() string { return "" } func (*sumFile) filename() string { return "" }
func (*sumFile) isActive() bool { return false } func (*sumFile) isActive() bool { return false }

View File

@ -6,22 +6,17 @@ package cache
import ( import (
"context" "context"
"fmt"
"go/ast" "go/ast"
"go/parser" "go/parser"
"go/token" "go/token"
"go/types" "go/types"
"os" "os"
"path/filepath" "path/filepath"
"strings"
"sync" "sync"
"golang.org/x/tools/go/packages" "golang.org/x/tools/go/packages"
"golang.org/x/tools/internal/imports"
"golang.org/x/tools/internal/lsp/debug" "golang.org/x/tools/internal/lsp/debug"
"golang.org/x/tools/internal/lsp/source" "golang.org/x/tools/internal/lsp/source"
"golang.org/x/tools/internal/lsp/telemetry"
"golang.org/x/tools/internal/lsp/telemetry/log"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
) )
@ -53,19 +48,6 @@ type view struct {
// env is the environment to use when invoking underlying tools. // env is the environment to use when invoking underlying tools.
env []string env []string
// process is the process env for this view.
// Note: this contains cached module and filesystem state.
//
// TODO(suzmue): the state cached in the process env is specific to each view,
// however, there is state that can be shared between views that is not currently
// cached, like the module cache.
processEnv *imports.ProcessEnv
// modFileVersions stores the last seen versions of the module files that are used
// by processEnvs resolver.
// TODO(suzmue): These versions may not actually be on disk.
modFileVersions map[string]string
// buildFlags is the build flags to use when invoking underlying tools. // buildFlags is the build flags to use when invoking underlying tools.
buildFlags []string buildFlags []string
@ -129,7 +111,7 @@ func (v *view) Folder() span.URI {
// Config returns the configuration used for the view's interaction with the // Config returns the configuration used for the view's interaction with the
// go/packages API. It is shared across all views. // go/packages API. It is shared across all views.
func (v *view) Config(ctx context.Context) *packages.Config { func (v *view) buildConfig() *packages.Config {
// TODO: Should we cache the config and/or overlay somewhere? // TODO: Should we cache the config and/or overlay somewhere?
return &packages.Config{ return &packages.Config{
Dir: v.folder.Filename(), Dir: v.folder.Filename(),
@ -146,112 +128,10 @@ func (v *view) Config(ctx context.Context) *packages.Config {
ParseFile: func(*token.FileSet, string, []byte) (*ast.File, error) { ParseFile: func(*token.FileSet, string, []byte) (*ast.File, error) {
panic("go/packages must not be used to parse files") panic("go/packages must not be used to parse files")
}, },
Logf: func(format string, args ...interface{}) {
log.Print(ctx, fmt.Sprintf(format, args...))
},
Tests: true, Tests: true,
} }
} }
func (v *view) RunProcessEnvFunc(ctx context.Context, fn func(*imports.Options) error, opts *imports.Options) error {
v.mu.Lock()
defer v.mu.Unlock()
if v.processEnv == nil {
v.processEnv = v.buildProcessEnv(ctx)
}
// Before running the user provided function, clear caches in the resolver.
if v.modFilesChanged() {
if r, ok := v.processEnv.GetResolver().(*imports.ModuleResolver); ok {
// Clear the resolver cache and set Initialized to false.
r.Initialized = false
r.Main = nil
r.ModsByModPath = nil
r.ModsByDir = nil
// Reset the modFileVersions.
v.modFileVersions = nil
}
}
// Run the user function.
opts.Env = v.processEnv
if err := fn(opts); err != nil {
return err
}
// If applicable, store the file versions of the 'go.mod' files that are
// looked at by the resolver.
v.storeModFileVersions()
return nil
}
func (v *view) buildProcessEnv(ctx context.Context) *imports.ProcessEnv {
cfg := v.Config(ctx)
env := &imports.ProcessEnv{
WorkingDir: cfg.Dir,
Logf: func(format string, args ...interface{}) {
log.Print(ctx, fmt.Sprintf(format, args...))
},
}
for _, kv := range cfg.Env {
split := strings.Split(kv, "=")
if len(split) < 2 {
continue
}
switch split[0] {
case "GOPATH":
env.GOPATH = split[1]
case "GOROOT":
env.GOROOT = split[1]
case "GO111MODULE":
env.GO111MODULE = split[1]
case "GOPROXY":
env.GOROOT = split[1]
case "GOFLAGS":
env.GOFLAGS = split[1]
case "GOSUMDB":
env.GOSUMDB = split[1]
}
}
return env
}
func (v *view) modFilesChanged() bool {
// Check the versions of the 'go.mod' files of the main module
// and modules included by a replace directive. Return true if
// any of these file versions do not match.
for filename, version := range v.modFileVersions {
if version != v.fileVersion(filename) {
return true
}
}
return false
}
func (v *view) storeModFileVersions() {
// Store the mod files versions, if we are using a ModuleResolver.
r, moduleMode := v.processEnv.GetResolver().(*imports.ModuleResolver)
if !moduleMode || !r.Initialized {
return
}
v.modFileVersions = make(map[string]string)
// Get the file versions of the 'go.mod' files of the main module
// and modules included by a replace directive in the resolver.
for _, mod := range r.ModsByModPath {
if (mod.Main || mod.Replace != nil) && mod.GoMod != "" {
v.modFileVersions[mod.GoMod] = v.fileVersion(mod.GoMod)
}
}
}
func (v *view) fileVersion(filename string) string {
uri := span.FileURI(filename)
f := v.session.GetFile(uri)
return f.Identity().Version
}
func (v *view) Env() []string { func (v *view) Env() []string {
v.mu.Lock() v.mu.Lock()
defer v.mu.Unlock() defer v.mu.Unlock()
@ -263,7 +143,6 @@ func (v *view) SetEnv(env []string) {
defer v.mu.Unlock() defer v.mu.Unlock()
//TODO: this should invalidate the entire view //TODO: this should invalidate the entire view
v.env = env v.env = env
v.processEnv = nil // recompute process env
} }
func (v *view) SetBuildFlags(buildFlags []string) { func (v *view) SetBuildFlags(buildFlags []string) {
@ -307,12 +186,9 @@ func (v *view) BuiltinPackage() *ast.Package {
// buildBuiltinPkg builds the view's builtin package. // buildBuiltinPkg builds the view's builtin package.
// It assumes that the view is not active yet, // It assumes that the view is not active yet,
// i.e. it has not been added to the session's list of views. // i.e. it has not been added to the session's list of views.
func (v *view) buildBuiltinPkg(ctx context.Context) { func (v *view) buildBuiltinPkg() {
cfg := *v.Config(ctx) cfg := *v.buildConfig()
pkgs, err := packages.Load(&cfg, "builtin") pkgs, _ := packages.Load(&cfg, "builtin")
if err != nil {
log.Error(ctx, "error getting package metadata for \"builtin\" package", err)
}
if len(pkgs) != 1 { if len(pkgs) != 1 {
v.builtinPkg, _ = ast.NewPackage(cfg.Fset, nil, nil, nil) v.builtinPkg, _ = ast.NewPackage(cfg.Fset, nil, nil, nil)
return return
@ -348,33 +224,33 @@ func (v *view) SetContent(ctx context.Context, uri span.URI, content []byte) err
// invalidateContent invalidates the content of a Go file, // invalidateContent invalidates the content of a Go file,
// including any position and type information that depends on it. // including any position and type information that depends on it.
func (f *goFile) invalidateContent(ctx context.Context) { func (f *goFile) invalidateContent() {
// Mutex acquisition order here is important. It must match the order f.handleMu.Lock()
// in loadParseTypecheck to avoid deadlocks. defer f.handleMu.Unlock()
f.view.mcache.mu.Lock() f.view.mcache.mu.Lock()
defer f.view.mcache.mu.Unlock() defer f.view.mcache.mu.Unlock()
f.view.pcache.mu.Lock() f.view.pcache.mu.Lock()
defer f.view.pcache.mu.Unlock() defer f.view.pcache.mu.Unlock()
f.handleMu.Lock() f.invalidateAST()
defer f.handleMu.Unlock()
f.invalidateAST(ctx)
f.handle = nil f.handle = nil
} }
// invalidateAST invalidates the AST of a Go file, // invalidateAST invalidates the AST of a Go file,
// including any position and type information that depends on it. // including any position and type information that depends on it.
func (f *goFile) invalidateAST(ctx context.Context) { func (f *goFile) invalidateAST() {
f.mu.Lock() f.mu.Lock()
f.ast = nil
f.token = nil
pkgs := f.pkgs pkgs := f.pkgs
f.mu.Unlock() f.mu.Unlock()
// Remove the package and all of its reverse dependencies from the cache. // Remove the package and all of its reverse dependencies from the cache.
for id, pkg := range pkgs { for id, pkg := range pkgs {
if pkg != nil { if pkg != nil {
f.view.remove(ctx, id, map[packageID]struct{}{}) f.view.remove(id, map[packageID]struct{}{})
} }
} }
} }
@ -382,7 +258,7 @@ func (f *goFile) invalidateAST(ctx context.Context) {
// remove invalidates a package and its reverse dependencies in the view's // remove invalidates a package and its reverse dependencies in the view's
// package cache. It is assumed that the caller has locked both the mutexes // package cache. It is assumed that the caller has locked both the mutexes
// of both the mcache and the pcache. // of both the mcache and the pcache.
func (v *view) remove(ctx context.Context, id packageID, seen map[packageID]struct{}) { func (v *view) remove(id packageID, seen map[packageID]struct{}) {
if _, ok := seen[id]; ok { if _, ok := seen[id]; ok {
return return
} }
@ -392,37 +268,20 @@ func (v *view) remove(ctx context.Context, id packageID, seen map[packageID]stru
} }
seen[id] = struct{}{} seen[id] = struct{}{}
for parentID := range m.parents { for parentID := range m.parents {
v.remove(ctx, parentID, seen) v.remove(parentID, seen)
} }
// All of the files in the package may also be holding a pointer to the // All of the files in the package may also be holding a pointer to the
// invalidated package. // invalidated package.
for _, filename := range m.files { for _, filename := range m.files {
f, err := v.findFile(span.FileURI(filename)) if f, _ := v.findFile(span.FileURI(filename)); f != nil {
if err != nil { if gof, ok := f.(*goFile); ok {
log.Error(ctx, "cannot find file", err, telemetry.File.Of(f.URI()))
continue
}
gof, ok := f.(*goFile)
if !ok {
log.Error(ctx, "non-Go file", nil, telemetry.File.Of(f.URI()))
continue
}
gof.mu.Lock() gof.mu.Lock()
if pkg, ok := gof.pkgs[id]; ok {
// TODO: Ultimately, we shouldn't need this.
// Preemptively delete all of the cached keys if we are invalidating a package.
for _, ph := range pkg.files {
v.session.cache.store.Delete(parseKey{
file: ph.File().Identity(),
mode: ph.Mode(),
})
}
}
delete(gof.pkgs, id) delete(gof.pkgs, id)
gof.mu.Unlock() gof.mu.Unlock()
} }
}
}
delete(v.pcache.packages, id) delete(v.pcache.packages, id)
return
} }
// FindFile returns the file if the given URI is already a part of the view. // FindFile returns the file if the given URI is already a part of the view.
@ -442,11 +301,11 @@ func (v *view) GetFile(ctx context.Context, uri span.URI) (source.File, error) {
v.mu.Lock() v.mu.Lock()
defer v.mu.Unlock() defer v.mu.Unlock()
return v.getFile(ctx, uri) return v.getFile(uri)
} }
// getFile is the unlocked internal implementation of GetFile. // getFile is the unlocked internal implementation of GetFile.
func (v *view) getFile(ctx context.Context, uri span.URI) (viewFile, error) { func (v *view) getFile(uri span.URI) (viewFile, error) {
if f, err := v.findFile(uri); err != nil { if f, err := v.findFile(uri); err != nil {
return nil, err return nil, err
} else if f != nil { } else if f != nil {
@ -485,7 +344,7 @@ func (v *view) getFile(ctx context.Context, uri span.URI) (viewFile, error) {
if !ok { if !ok {
return return
} }
gof.invalidateContent(ctx) gof.invalidateContent()
}) })
} }
v.mapFile(uri, f) v.mapFile(uri, f)

View File

@ -48,15 +48,9 @@ func (w *WatchMap) Watch(key interface{}, callback func()) func() {
} }
func (w *WatchMap) Notify(key interface{}) { func (w *WatchMap) Notify(key interface{}) {
// Make a copy of the watcher callbacks so we don't need to hold
// the mutex during the callbacks (to avoid deadlocks).
w.mu.Lock() w.mu.Lock()
entries := w.watchers[key] defer w.mu.Unlock()
entriesCopy := make([]watcher, len(entries)) for _, entry := range w.watchers[key] {
copy(entriesCopy, entries)
w.mu.Unlock()
for _, entry := range entriesCopy {
entry.callback() entry.callback()
} }
} }

View File

@ -59,7 +59,7 @@ func (c *check) Run(ctx context.Context, args ...string) error {
for _, file := range checking { for _, file := range checking {
select { select {
case <-file.hasDiagnostics: case <-file.hasDiagnostics:
case <-time.After(30 * time.Second): case <-time.Tick(30 * time.Second):
return fmt.Errorf("timed out waiting for results from %v", file.uri) return fmt.Errorf("timed out waiting for results from %v", file.uri)
} }
file.diagnosticsMu.Lock() file.diagnosticsMu.Lock()

View File

@ -5,6 +5,7 @@
package cmd_test package cmd_test
import ( import (
"context"
"fmt" "fmt"
"strings" "strings"
"testing" "testing"
@ -22,7 +23,7 @@ func (r *runner) Diagnostics(t *testing.T, data tests.Diagnostics) {
fname := uri.Filename() fname := uri.Filename()
args := []string{"-remote=internal", "check", fname} args := []string{"-remote=internal", "check", fname}
out := captureStdOut(t, func() { out := captureStdOut(t, func() {
tool.Main(r.ctx, r.app, args) tool.Main(context.Background(), r.app, args)
}) })
// parse got into a collection of reports // parse got into a collection of reports
got := map[string]struct{}{} got := map[string]struct{}{}

View File

@ -24,10 +24,8 @@ import (
"golang.org/x/tools/internal/lsp/cache" "golang.org/x/tools/internal/lsp/cache"
"golang.org/x/tools/internal/lsp/protocol" "golang.org/x/tools/internal/lsp/protocol"
"golang.org/x/tools/internal/lsp/source" "golang.org/x/tools/internal/lsp/source"
"golang.org/x/tools/internal/lsp/telemetry/ocagent"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
"golang.org/x/tools/internal/tool" "golang.org/x/tools/internal/tool"
"golang.org/x/tools/internal/xcontext"
) )
// Application is the main application as passed to tool.Main // Application is the main application as passed to tool.Main
@ -46,9 +44,6 @@ type Application struct {
// The base cache to use for sessions from this application. // The base cache to use for sessions from this application.
cache source.Cache cache source.Cache
// The name of the binary, used in help and telemetry.
name string
// The working directory to run commands in. // The working directory to run commands in.
wd string wd string
@ -60,28 +55,23 @@ type Application struct {
// Enable verbose logging // Enable verbose logging
Verbose bool `flag:"v" help:"Verbose output"` Verbose bool `flag:"v" help:"Verbose output"`
// Control ocagent export of telemetry
OCAgent string `flag:"ocagent" help:"The address of the ocagent, or off"`
} }
// Returns a new Application ready to run. // Returns a new Application ready to run.
func New(name, wd string, env []string) *Application { func New(wd string, env []string) *Application {
if wd == "" { if wd == "" {
wd, _ = os.Getwd() wd, _ = os.Getwd()
} }
app := &Application{ app := &Application{
cache: cache.New(), cache: cache.New(),
name: name,
wd: wd, wd: wd,
env: env, env: env,
OCAgent: "off", //TODO: Remove this line to default the exporter to on
} }
return app return app
} }
// Name implements tool.Application returning the binary name. // Name implements tool.Application returning the binary name.
func (app *Application) Name() string { return app.name } func (app *Application) Name() string { return "gopls" }
// Usage implements tool.Application returning empty extra argument usage. // Usage implements tool.Application returning empty extra argument usage.
func (app *Application) Usage() string { return "<command> [command-flags] [command-args]" } func (app *Application) Usage() string { return "<command> [command-flags] [command-args]" }
@ -111,7 +101,6 @@ gopls flags are:
// If no arguments are passed it will invoke the server sub command, as a // If no arguments are passed it will invoke the server sub command, as a
// temporary measure for compatibility. // temporary measure for compatibility.
func (app *Application) Run(ctx context.Context, args ...string) error { func (app *Application) Run(ctx context.Context, args ...string) error {
ocagent.Export(app.name, app.OCAgent)
app.Serve.app = app app.Serve.app = app
if len(args) == 0 { if len(args) == 0 {
tool.Main(ctx, &app.Serve, args) tool.Main(ctx, &app.Serve, args)
@ -150,7 +139,7 @@ func (app *Application) connect(ctx context.Context) (*connection, error) {
switch app.Remote { switch app.Remote {
case "": case "":
connection := newConnection(app) connection := newConnection(app)
ctx, connection.Server = lsp.NewClientServer(ctx, app.cache, connection.Client) connection.Server = lsp.NewClientServer(app.cache, connection.Client)
return connection, connection.initialize(ctx) return connection, connection.initialize(ctx)
case "internal": case "internal":
internalMu.Lock() internalMu.Lock()
@ -159,16 +148,13 @@ func (app *Application) connect(ctx context.Context) (*connection, error) {
return c, nil return c, nil
} }
connection := newConnection(app) connection := newConnection(app)
ctx := xcontext.Detach(ctx) //TODO:a way of shutting down the internal server ctx := context.Background() //TODO:a way of shutting down the internal server
cr, sw, _ := os.Pipe() cr, sw, _ := os.Pipe()
sr, cw, _ := os.Pipe() sr, cw, _ := os.Pipe()
var jc *jsonrpc2.Conn var jc *jsonrpc2.Conn
ctx, jc, connection.Server = protocol.NewClient(ctx, jsonrpc2.NewHeaderStream(cr, cw), connection.Client) jc, connection.Server, _ = protocol.NewClient(jsonrpc2.NewHeaderStream(cr, cw), connection.Client)
go jc.Run(ctx) go jc.Run(ctx)
go func() { go lsp.NewServer(app.cache, jsonrpc2.NewHeaderStream(sr, sw)).Run(ctx)
ctx, srv := lsp.NewServer(ctx, app.cache, jsonrpc2.NewHeaderStream(sr, sw))
srv.Run(ctx)
}()
if err := connection.initialize(ctx); err != nil { if err := connection.initialize(ctx); err != nil {
return nil, err return nil, err
} }
@ -182,7 +168,7 @@ func (app *Application) connect(ctx context.Context) (*connection, error) {
} }
stream := jsonrpc2.NewHeaderStream(conn, conn) stream := jsonrpc2.NewHeaderStream(conn, conn)
var jc *jsonrpc2.Conn var jc *jsonrpc2.Conn
ctx, jc, connection.Server = protocol.NewClient(ctx, stream, connection.Client) jc, connection.Server, _ = protocol.NewClient(stream, connection.Client)
go jc.Run(ctx) go jc.Run(ctx)
return connection, connection.initialize(ctx) return connection, connection.initialize(ctx)
} }
@ -348,14 +334,12 @@ func (c *cmdClient) getFile(ctx context.Context, uri span.URI) *cmdFile {
func (c *connection) AddFile(ctx context.Context, uri span.URI) *cmdFile { func (c *connection) AddFile(ctx context.Context, uri span.URI) *cmdFile {
c.Client.filesMu.Lock() c.Client.filesMu.Lock()
defer c.Client.filesMu.Unlock() defer c.Client.filesMu.Unlock()
file := c.Client.getFile(ctx, uri) file := c.Client.getFile(ctx, uri)
if !file.added { if !file.added {
file.added = true file.added = true
p := &protocol.DidOpenTextDocumentParams{} p := &protocol.DidOpenTextDocumentParams{}
p.TextDocument.URI = string(uri) p.TextDocument.URI = string(uri)
p.TextDocument.Text = string(file.mapper.Content) p.TextDocument.Text = string(file.mapper.Content)
p.TextDocument.LanguageID = source.DetectLanguage("", file.uri.Filename()).String()
if err := c.Server.DidOpen(ctx, p); err != nil { if err := c.Server.DidOpen(ctx, p); err != nil {
file.err = fmt.Errorf("%v: %v", uri, err) file.err = fmt.Errorf("%v: %v", uri, err)
} }

View File

@ -6,7 +6,6 @@ package cmd_test
import ( import (
"bytes" "bytes"
"context"
"io/ioutil" "io/ioutil"
"os" "os"
"path/filepath" "path/filepath"
@ -25,7 +24,6 @@ type runner struct {
exporter packagestest.Exporter exporter packagestest.Exporter
data *tests.Data data *tests.Data
app *cmd.Application app *cmd.Application
ctx context.Context
} }
func TestCommandLine(t *testing.T) { func TestCommandLine(t *testing.T) {
@ -39,8 +37,7 @@ func testCommandLine(t *testing.T, exporter packagestest.Exporter) {
r := &runner{ r := &runner{
exporter: exporter, exporter: exporter,
data: data, data: data,
app: cmd.New("gopls-test", data.Config.Dir, data.Exported.Config.Env), app: cmd.New(data.Config.Dir, data.Exported.Config.Env),
ctx: tests.Context(t),
} }
tests.Run(t, r, data) tests.Run(t, r, data)
} }

View File

@ -5,6 +5,7 @@
package cmd_test package cmd_test
import ( import (
"context"
"fmt" "fmt"
"os" "os"
"path/filepath" "path/filepath"
@ -55,7 +56,7 @@ func TestDefinitionHelpExample(t *testing.T) {
fmt.Sprintf("%v:#%v", thisFile, cmd.ExampleOffset)} { fmt.Sprintf("%v:#%v", thisFile, cmd.ExampleOffset)} {
args := append(baseArgs, query) args := append(baseArgs, query)
got := captureStdOut(t, func() { got := captureStdOut(t, func() {
tool.Main(tests.Context(t), cmd.New("gopls-test", "", nil), args) tool.Main(context.Background(), cmd.New("", nil), args)
}) })
if !expect.MatchString(got) { if !expect.MatchString(got) {
t.Errorf("test with %v\nexpected:\n%s\ngot:\n%s", args, expect, got) t.Errorf("test with %v\nexpected:\n%s\ngot:\n%s", args, expect, got)
@ -83,7 +84,7 @@ func (r *runner) Definition(t *testing.T, data tests.Definitions) {
uri := d.Src.URI() uri := d.Src.URI()
args = append(args, fmt.Sprint(d.Src)) args = append(args, fmt.Sprint(d.Src))
got := captureStdOut(t, func() { got := captureStdOut(t, func() {
tool.Main(r.ctx, r.app, args) tool.Main(context.Background(), r.app, args)
}) })
got = normalizePaths(r.data, got) got = normalizePaths(r.data, got)
if mode&jsonGoDef != 0 && runtime.GOOS == "windows" { if mode&jsonGoDef != 0 && runtime.GOOS == "windows" {

View File

@ -5,6 +5,7 @@
package cmd_test package cmd_test
import ( import (
"context"
"os/exec" "os/exec"
"regexp" "regexp"
"strings" "strings"
@ -37,9 +38,9 @@ func (r *runner) Format(t *testing.T, data tests.Formats) {
//TODO: our error handling differs, for now just skip unformattable files //TODO: our error handling differs, for now just skip unformattable files
continue continue
} }
app := cmd.New("gopls-test", r.data.Config.Dir, r.data.Config.Env) app := cmd.New(r.data.Config.Dir, r.data.Config.Env)
got := captureStdOut(t, func() { got := captureStdOut(t, func() {
tool.Main(r.ctx, app, append([]string{"-remote=internal", "format"}, args...)) tool.Main(context.Background(), app, append([]string{"-remote=internal", "format"}, args...))
}) })
got = normalizePaths(r.data, got) got = normalizePaths(r.data, got)
// check the first two lines are the expected file header // check the first two lines are the expected file header

View File

@ -20,9 +20,6 @@ import (
"golang.org/x/tools/internal/jsonrpc2" "golang.org/x/tools/internal/jsonrpc2"
"golang.org/x/tools/internal/lsp" "golang.org/x/tools/internal/lsp"
"golang.org/x/tools/internal/lsp/debug" "golang.org/x/tools/internal/lsp/debug"
"golang.org/x/tools/internal/lsp/telemetry"
"golang.org/x/tools/internal/lsp/telemetry/tag"
"golang.org/x/tools/internal/lsp/telemetry/trace"
"golang.org/x/tools/internal/tool" "golang.org/x/tools/internal/tool"
) )
@ -82,8 +79,8 @@ func (s *Serve) Run(ctx context.Context, args ...string) error {
} }
// For debugging purposes only. // For debugging purposes only.
run := func(ctx context.Context, srv *lsp.Server) { run := func(srv *lsp.Server) {
srv.Conn.AddHandler(&handler{loggingRPCs: s.Trace, out: out}) srv.Conn.Logger = logger(s.Trace, out)
go srv.Run(ctx) go srv.Run(ctx)
} }
if s.Address != "" { if s.Address != "" {
@ -93,8 +90,8 @@ func (s *Serve) Run(ctx context.Context, args ...string) error {
return lsp.RunServerOnPort(ctx, s.app.cache, s.Port, run) return lsp.RunServerOnPort(ctx, s.app.cache, s.Port, run)
} }
stream := jsonrpc2.NewHeaderStream(os.Stdin, os.Stdout) stream := jsonrpc2.NewHeaderStream(os.Stdin, os.Stdout)
ctx, srv := lsp.NewServer(ctx, s.app.cache, stream) srv := lsp.NewServer(s.app.cache, stream)
srv.Conn.AddHandler(&handler{loggingRPCs: s.Trace, out: out}) srv.Conn.Logger = logger(s.Trace, out)
return srv.Run(ctx) return srv.Run(ctx)
} }
@ -118,121 +115,14 @@ func (s *Serve) forward() error {
return <-errc return <-errc
} }
type handler struct { func logger(trace bool, out io.Writer) jsonrpc2.Logger {
loggingRPCs bool return func(direction jsonrpc2.Direction, id *jsonrpc2.ID, elapsed time.Duration, method string, payload *json.RawMessage, err *jsonrpc2.Error) {
out io.Writer if !trace {
}
type rpcStats struct {
method string
direction jsonrpc2.Direction
id *jsonrpc2.ID
payload *json.RawMessage
start time.Time
delivering func()
close func()
}
type statsKeyType int
const statsKey = statsKeyType(0)
func (h *handler) Deliver(ctx context.Context, r *jsonrpc2.Request, delivered bool) bool {
stats := h.getStats(ctx)
if stats != nil {
stats.delivering()
}
return false
}
func (h *handler) Cancel(ctx context.Context, conn *jsonrpc2.Conn, id jsonrpc2.ID, cancelled bool) bool {
return false
}
func (h *handler) Request(ctx context.Context, direction jsonrpc2.Direction, r *jsonrpc2.WireRequest) context.Context {
if r.Method == "" {
panic("no method in rpc stats")
}
stats := &rpcStats{
method: r.Method,
start: time.Now(),
direction: direction,
payload: r.Params,
}
ctx = context.WithValue(ctx, statsKey, stats)
mode := telemetry.Outbound
if direction == jsonrpc2.Receive {
mode = telemetry.Inbound
}
ctx, stats.close = trace.StartSpan(ctx, r.Method,
tag.Tag{Key: telemetry.Method, Value: r.Method},
tag.Tag{Key: telemetry.RPCDirection, Value: mode},
tag.Tag{Key: telemetry.RPCID, Value: r.ID},
)
telemetry.Started.Record(ctx, 1)
_, stats.delivering = trace.StartSpan(ctx, "queued")
return ctx
}
func (h *handler) Response(ctx context.Context, direction jsonrpc2.Direction, r *jsonrpc2.WireResponse) context.Context {
stats := h.getStats(ctx)
h.logRPC(direction, r.ID, 0, stats.method, r.Result, nil)
return ctx
}
func (h *handler) Done(ctx context.Context, err error) {
stats := h.getStats(ctx)
h.logRPC(stats.direction, stats.id, time.Since(stats.start), stats.method, stats.payload, err)
if err != nil {
ctx = telemetry.StatusCode.With(ctx, "ERROR")
} else {
ctx = telemetry.StatusCode.With(ctx, "OK")
}
elapsedTime := time.Since(stats.start)
latencyMillis := float64(elapsedTime) / float64(time.Millisecond)
telemetry.Latency.Record(ctx, latencyMillis)
stats.close()
}
func (h *handler) Read(ctx context.Context, bytes int64) context.Context {
telemetry.SentBytes.Record(ctx, bytes)
return ctx
}
func (h *handler) Wrote(ctx context.Context, bytes int64) context.Context {
telemetry.ReceivedBytes.Record(ctx, bytes)
return ctx
}
const eol = "\r\n\r\n\r\n"
func (h *handler) Error(ctx context.Context, err error) {
stats := h.getStats(ctx)
h.logRPC(stats.direction, stats.id, 0, stats.method, nil, err)
}
func (h *handler) getStats(ctx context.Context) *rpcStats {
stats, ok := ctx.Value(statsKey).(*rpcStats)
if !ok || stats == nil {
method, ok := ctx.Value(telemetry.Method).(string)
if !ok {
method = "???"
}
stats = &rpcStats{
method: method,
close: func() {},
}
}
return stats
}
func (h *handler) logRPC(direction jsonrpc2.Direction, id *jsonrpc2.ID, elapsed time.Duration, method string, payload *json.RawMessage, err error) {
if !h.loggingRPCs {
return return
} }
const eol = "\r\n\r\n\r\n" const eol = "\r\n\r\n\r\n"
if err != nil { if err != nil {
fmt.Fprintf(h.out, "[Error - %v] %s %s%s %v%s", time.Now().Format("3:04:05 PM"), fmt.Fprintf(out, "[Error - %v] %s %s%s %v%s", time.Now().Format("3:04:05 PM"),
direction, method, id, err, eol) direction, method, id, err, eol)
return return
} }
@ -274,5 +164,6 @@ func (h *handler) logRPC(direction jsonrpc2.Direction, id *jsonrpc2.ID, elapsed
params = "{}" params = "{}"
} }
fmt.Fprintf(outx, ".\r\nParams: %s%s", params, eol) fmt.Fprintf(outx, ".\r\nParams: %s%s", params, eol)
fmt.Fprintf(h.out, "%s", outx.String()) fmt.Fprintf(out, "%s", outx.String())
}
} }

View File

@ -6,35 +6,15 @@ package lsp
import ( import (
"context" "context"
"fmt"
"strings" "strings"
"golang.org/x/tools/internal/lsp/protocol" "golang.org/x/tools/internal/lsp/protocol"
"golang.org/x/tools/internal/lsp/source" "golang.org/x/tools/internal/lsp/source"
"golang.org/x/tools/internal/lsp/telemetry"
"golang.org/x/tools/internal/lsp/telemetry/log"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
) )
func (s *Server) codeAction(ctx context.Context, params *protocol.CodeActionParams) ([]protocol.CodeAction, error) { func (s *Server) codeAction(ctx context.Context, params *protocol.CodeActionParams) ([]protocol.CodeAction, error) {
// The Only field of the context specifies which code actions the client wants.
// If Only is empty, assume that the client wants all of the possible code actions.
var wanted map[protocol.CodeActionKind]bool
if len(params.Context.Only) == 0 {
wanted = s.supportedCodeActions
} else {
wanted = make(map[protocol.CodeActionKind]bool)
for _, only := range params.Context.Only {
wanted[only] = s.supportedCodeActions[only]
}
}
uri := span.NewURI(params.TextDocument.URI) uri := span.NewURI(params.TextDocument.URI)
if len(wanted) == 0 {
return nil, fmt.Errorf("no supported code action to execute for %s, wanted %v", uri, params.Context.Only)
}
view := s.session.ViewOf(uri) view := s.session.ViewOf(uri)
gof, m, err := getGoFile(ctx, view, uri) gof, m, err := getGoFile(ctx, view, uri)
if err != nil { if err != nil {
@ -44,27 +24,25 @@ func (s *Server) codeAction(ctx context.Context, params *protocol.CodeActionPara
if err != nil { if err != nil {
return nil, err return nil, err
} }
var codeActions []protocol.CodeAction var codeActions []protocol.CodeAction
// TODO(rstambler): Handle params.Context.Only when VSCode-Go uses a
// version of vscode-languageclient that fixes
// https://github.com/Microsoft/vscode-languageserver-node/issues/442.
edits, err := organizeImports(ctx, view, spn) edits, err := organizeImports(ctx, view, spn)
if err != nil { if err != nil {
return nil, err return nil, err
} }
if len(edits) > 0 {
// If the user wants to see quickfixes. codeActions = append(codeActions, protocol.CodeAction{
if wanted[protocol.QuickFix] { Title: "Organize Imports",
// First, add the quick fixes reported by go/analysis. Kind: protocol.SourceOrganizeImports,
// TODO: Enable this when this actually works. For now, it's needless work. Edit: &protocol.WorkspaceEdit{
if s.wantSuggestedFixes { Changes: &map[string][]protocol.TextEdit{
qf, err := quickFixes(ctx, view, gof) string(spn.URI()): edits,
if err != nil { },
log.Error(ctx, "quick fixes failed", err, telemetry.File.Of(uri)) },
} })
codeActions = append(codeActions, qf...) // If we also have diagnostics, we can associate them with quick fixes.
}
// If we also have diagnostics for missing imports, we can associate them with quick fixes.
if findImportErrors(params.Context.Diagnostics) { if findImportErrors(params.Context.Diagnostics) {
// TODO(rstambler): Separate this into a set of codeActions per diagnostic, // TODO(rstambler): Separate this into a set of codeActions per diagnostic,
// where each action is the addition or removal of one import. // where each action is the addition or removal of one import.
@ -79,21 +57,26 @@ func (s *Server) codeAction(ctx context.Context, params *protocol.CodeActionPara
}, },
}) })
} }
diags := gof.GetPackage(ctx).GetDiagnostics()
for _, diag := range diags {
pdiag, err := toProtocolDiagnostic(ctx, view, diag)
if err != nil {
return nil, err
} }
for _, ca := range diag.SuggestedFixes {
// Add the results of import organization as source.OrganizeImports.
if wanted[protocol.SourceOrganizeImports] {
codeActions = append(codeActions, protocol.CodeAction{ codeActions = append(codeActions, protocol.CodeAction{
Title: "Organize Imports", Title: ca.Title,
Kind: protocol.SourceOrganizeImports, Kind: protocol.QuickFix, // TODO(matloob): Be more accurate about these?
Edit: &protocol.WorkspaceEdit{ Edit: &protocol.WorkspaceEdit{
Changes: &map[string][]protocol.TextEdit{ Changes: &map[string][]protocol.TextEdit{
string(spn.URI()): edits, string(spn.URI()): edits,
}, },
}, },
Diagnostics: []protocol.Diagnostic{pdiag},
}) })
} }
}
}
return codeActions, nil return codeActions, nil
} }
@ -102,7 +85,7 @@ func organizeImports(ctx context.Context, view source.View, s span.Span) ([]prot
if err != nil { if err != nil {
return nil, err return nil, err
} }
edits, err := source.Imports(ctx, view, f, rng) edits, err := source.Imports(ctx, f, rng)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -123,46 +106,9 @@ func findImportErrors(diagnostics []protocol.Diagnostic) bool {
return true return true
} }
// "X imported but not used" is an unused import. // "X imported but not used" is an unused import.
// "X imported but not used as Y" is an unused import. if strings.HasSuffix(diagnostic.Message, " imported but not used") {
if strings.Contains(diagnostic.Message, " imported but not used") {
return true return true
} }
} }
return false return false
} }
func quickFixes(ctx context.Context, view source.View, gof source.GoFile) ([]protocol.CodeAction, error) {
var codeActions []protocol.CodeAction
// TODO: This is technically racy because the diagnostics provided by the code action
// may not be the same as the ones that gopls is aware of.
// We need to figure out some way to solve this problem.
diags := gof.GetPackage(ctx).GetDiagnostics()
for _, diag := range diags {
pdiag, err := toProtocolDiagnostic(ctx, view, diag)
if err != nil {
return nil, err
}
for _, ca := range diag.SuggestedFixes {
_, m, err := getGoFile(ctx, view, diag.URI())
if err != nil {
return nil, err
}
edits, err := ToProtocolEdits(m, ca.Edits)
if err != nil {
return nil, err
}
codeActions = append(codeActions, protocol.CodeAction{
Title: ca.Title,
Kind: protocol.QuickFix, // TODO(matloob): Be more accurate about these?
Edit: &protocol.WorkspaceEdit{
Changes: &map[string][]protocol.TextEdit{
string(diag.URI()): edits,
},
},
Diagnostics: []protocol.Diagnostic{pdiag},
})
}
}
return codeActions, nil
}

View File

@ -12,8 +12,6 @@ import (
"golang.org/x/tools/internal/lsp/protocol" "golang.org/x/tools/internal/lsp/protocol"
"golang.org/x/tools/internal/lsp/source" "golang.org/x/tools/internal/lsp/source"
"golang.org/x/tools/internal/lsp/telemetry/log"
"golang.org/x/tools/internal/lsp/telemetry/tag"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
) )
@ -32,16 +30,35 @@ func (s *Server) completion(ctx context.Context, params *protocol.CompletionPara
if err != nil { if err != nil {
return nil, err return nil, err
} }
candidates, surrounding, err := source.Completion(ctx, view, f, rng.Start, source.CompletionOptions{ items, surrounding, err := source.Completion(ctx, view, f, rng.Start, source.CompletionOptions{
DeepComplete: s.useDeepCompletions, DeepComplete: s.useDeepCompletions,
WantDocumentaton: s.wantCompletionDocumentation,
}) })
if err != nil { if err != nil {
log.Print(ctx, "no completions found", tag.Of("At", rng), tag.Of("Failure", err)) s.session.Logger().Infof(ctx, "no completions found for %s:%v:%v: %v", uri, int(params.Position.Line), int(params.Position.Character), err)
}
// We might need to adjust the position to account for the prefix.
insertionRng := protocol.Range{
Start: params.Position,
End: params.Position,
}
var prefix string
if surrounding != nil {
prefix = surrounding.Prefix()
spn, err := surrounding.Range.Span()
if err != nil {
s.session.Logger().Infof(ctx, "failed to get span for surrounding position: %s:%v:%v: %v", uri, int(params.Position.Line), int(params.Position.Character), err)
} else {
rng, err := m.Range(spn)
if err != nil {
s.session.Logger().Infof(ctx, "failed to convert surrounding position: %s:%v:%v: %v", uri, int(params.Position.Line), int(params.Position.Character), err)
} else {
insertionRng = rng
}
}
} }
return &protocol.CompletionList{ return &protocol.CompletionList{
IsIncomplete: false, IsIncomplete: false,
Items: s.toProtocolCompletionItems(ctx, view, m, candidates, params.Position, surrounding), Items: toProtocolCompletionItems(items, prefix, insertionRng, s.insertTextFormat, s.usePlaceholders, s.useDeepCompletions),
}, nil }, nil
} }
@ -49,54 +66,41 @@ func (s *Server) completion(ctx context.Context, params *protocol.CompletionPara
// to be useful. // to be useful.
const maxDeepCompletions = 3 const maxDeepCompletions = 3
func (s *Server) toProtocolCompletionItems(ctx context.Context, view source.View, m *protocol.ColumnMapper, candidates []source.CompletionItem, pos protocol.Position, surrounding *source.Selection) []protocol.CompletionItem { func toProtocolCompletionItems(candidates []source.CompletionItem, prefix string, rng protocol.Range, insertTextFormat protocol.InsertTextFormat, usePlaceholders bool, useDeepCompletions bool) []protocol.CompletionItem {
// Sort the candidates by score, since that is not supported by LSP yet. // Sort the candidates by score, since that is not supported by LSP yet.
sort.SliceStable(candidates, func(i, j int) bool { sort.SliceStable(candidates, func(i, j int) bool {
return candidates[i].Score > candidates[j].Score return candidates[i].Score > candidates[j].Score
}) })
// We might need to adjust the position to account for the prefix.
insertionRange := protocol.Range{
Start: pos,
End: pos,
}
var prefix string
if surrounding != nil {
prefix = strings.ToLower(surrounding.Prefix())
spn, err := surrounding.Range.Span()
if err != nil {
log.Print(ctx, "failed to get span for surrounding position: %s:%v:%v: %v", tag.Of("Position", pos), tag.Of("Failure", err))
} else {
rng, err := m.Range(spn)
if err != nil {
log.Print(ctx, "failed to convert surrounding position", tag.Of("Position", pos), tag.Of("Failure", err))
} else {
insertionRange = rng
}
}
}
var numDeepCompletionsSeen int // Matching against the prefix should be case insensitive.
prefix = strings.ToLower(prefix)
items := make([]protocol.CompletionItem, 0, len(candidates)) var (
items = make([]protocol.CompletionItem, 0, len(candidates))
numDeepCompletionsSeen int
)
for i, candidate := range candidates { for i, candidate := range candidates {
// Match against the label (case-insensitive). // Match against the label (case-insensitive).
if !strings.HasPrefix(strings.ToLower(candidate.Label), prefix) { if !strings.HasPrefix(strings.ToLower(candidate.Label), prefix) {
continue continue
} }
// Limit the number of deep completions to not overwhelm the user in cases // Limit the number of deep completions to not overwhelm the user in cases
// with dozens of deep completion matches. // with dozens of deep completion matches.
if candidate.Depth > 0 { if candidate.Depth > 0 {
if !s.useDeepCompletions { if !useDeepCompletions {
continue continue
} }
if numDeepCompletionsSeen >= maxDeepCompletions { if numDeepCompletionsSeen >= maxDeepCompletions {
continue continue
} }
numDeepCompletionsSeen++ numDeepCompletionsSeen++
} }
insertText := candidate.InsertText insertText := candidate.InsertText
if s.insertTextFormat == protocol.SnippetTextFormat { if insertTextFormat == protocol.SnippetTextFormat {
insertText = candidate.Snippet(s.usePlaceholders) insertText = candidate.Snippet(usePlaceholders)
} }
item := protocol.CompletionItem{ item := protocol.CompletionItem{
Label: candidate.Label, Label: candidate.Label,
@ -104,16 +108,15 @@ func (s *Server) toProtocolCompletionItems(ctx context.Context, view source.View
Kind: toProtocolCompletionItemKind(candidate.Kind), Kind: toProtocolCompletionItemKind(candidate.Kind),
TextEdit: &protocol.TextEdit{ TextEdit: &protocol.TextEdit{
NewText: insertText, NewText: insertText,
Range: insertionRange, Range: rng,
}, },
InsertTextFormat: s.insertTextFormat, InsertTextFormat: insertTextFormat,
// This is a hack so that the client sorts completion results in the order // This is a hack so that the client sorts completion results in the order
// according to their score. This can be removed upon the resolution of // according to their score. This can be removed upon the resolution of
// https://github.com/Microsoft/language-server-protocol/issues/348. // https://github.com/Microsoft/language-server-protocol/issues/348.
SortText: fmt.Sprintf("%05d", i), SortText: fmt.Sprintf("%05d", i),
FilterText: candidate.InsertText, FilterText: candidate.InsertText,
Preselect: i == 0, Preselect: i == 0,
Documentation: candidate.Documentation,
} }
// Trigger signature help for any function or method completion. // Trigger signature help for any function or method completion.
// This is helpful even if a function does not have parameters, // This is helpful even if a function does not have parameters,

View File

@ -20,7 +20,7 @@ const (
) )
// Version is a manually-updated mechanism for tracking versions. // Version is a manually-updated mechanism for tracking versions.
var Version = "v0.1.3" var Version = "v0.1.1"
// This writes the version and environment information to a writer. // This writes the version and environment information to a writer.
func PrintVersionInfo(w io.Writer, verbose bool, mode PrintMode) { func PrintVersionInfo(w io.Writer, verbose bool, mode PrintMode) {

View File

@ -1,49 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package debug
import (
"golang.org/x/tools/internal/lsp/telemetry"
"golang.org/x/tools/internal/lsp/telemetry/metric"
)
var (
// the distributions we use for histograms
bytesDistribution = []int64{1 << 10, 1 << 11, 1 << 12, 1 << 14, 1 << 16, 1 << 20}
millisecondsDistribution = []float64{0.1, 0.5, 1, 2, 5, 10, 50, 100, 500, 1000, 5000, 10000, 50000, 100000}
receivedBytes = metric.HistogramInt64{
Name: "received_bytes",
Description: "Distribution of received bytes, by method.",
Keys: []interface{}{telemetry.RPCDirection, telemetry.Method},
Buckets: bytesDistribution,
}.Record(telemetry.ReceivedBytes)
sentBytes = metric.HistogramInt64{
Name: "sent_bytes",
Description: "Distribution of sent bytes, by method.",
Keys: []interface{}{telemetry.RPCDirection, telemetry.Method},
Buckets: bytesDistribution,
}.Record(telemetry.SentBytes)
latency = metric.HistogramFloat64{
Name: "latency",
Description: "Distribution of latency in milliseconds, by method.",
Keys: []interface{}{telemetry.RPCDirection, telemetry.Method},
Buckets: millisecondsDistribution,
}.Record(telemetry.Latency)
started = metric.Scalar{
Name: "started",
Description: "Count of RPCs started by method.",
Keys: []interface{}{telemetry.RPCDirection, telemetry.Method},
}.CountInt64(telemetry.Started)
completed = metric.Scalar{
Name: "completed",
Description: "Count of RPCs completed by method and status.",
Keys: []interface{}{telemetry.RPCDirection, telemetry.Method, telemetry.StatusCode},
}.CountFloat64(telemetry.Latency)
)

View File

@ -1,111 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package debug
import (
"bytes"
"fmt"
"net/http"
"sort"
"golang.org/x/tools/internal/lsp/telemetry/metric"
"golang.org/x/tools/internal/lsp/telemetry/tag"
"golang.org/x/tools/internal/lsp/telemetry/worker"
)
type prometheus struct {
metrics []metric.Data
}
func (p *prometheus) observeMetric(data metric.Data) {
name := data.Handle().Name()
index := sort.Search(len(p.metrics), func(i int) bool {
return p.metrics[i].Handle().Name() >= name
})
if index >= len(p.metrics) || p.metrics[index].Handle().Name() != name {
old := p.metrics
p.metrics = make([]metric.Data, len(old)+1)
copy(p.metrics, old[:index])
copy(p.metrics[index+1:], old[index:])
}
p.metrics[index] = data
}
func (p *prometheus) header(w http.ResponseWriter, name, description string, isGauge, isHistogram bool) {
kind := "counter"
if isGauge {
kind = "gauge"
}
if isHistogram {
kind = "histogram"
}
fmt.Fprintf(w, "# HELP %s %s\n", name, description)
fmt.Fprintf(w, "# TYPE %s %s\n", name, kind)
}
func (p *prometheus) row(w http.ResponseWriter, name string, group tag.List, extra string, value interface{}) {
fmt.Fprint(w, name)
buf := &bytes.Buffer{}
fmt.Fprint(buf, group)
if extra != "" {
if buf.Len() > 0 {
fmt.Fprint(buf, ",")
}
fmt.Fprint(buf, extra)
}
if buf.Len() > 0 {
fmt.Fprint(w, "{")
buf.WriteTo(w)
fmt.Fprint(w, "}")
}
fmt.Fprintf(w, " %v\n", value)
}
func (p *prometheus) serve(w http.ResponseWriter, r *http.Request) {
done := make(chan struct{})
worker.Do(func() {
defer close(done)
for _, data := range p.metrics {
switch data := data.(type) {
case *metric.Int64Data:
p.header(w, data.Info.Name, data.Info.Description, data.IsGauge, false)
for i, group := range data.Groups() {
p.row(w, data.Info.Name, group, "", data.Rows[i])
}
case *metric.Float64Data:
p.header(w, data.Info.Name, data.Info.Description, data.IsGauge, false)
for i, group := range data.Groups() {
p.row(w, data.Info.Name, group, "", data.Rows[i])
}
case *metric.HistogramInt64Data:
p.header(w, data.Info.Name, data.Info.Description, false, true)
for i, group := range data.Groups() {
row := data.Rows[i]
for j, b := range data.Info.Buckets {
p.row(w, data.Info.Name+"_bucket", group, fmt.Sprintf(`le="%v"`, b), row.Values[j])
}
p.row(w, data.Info.Name+"_bucket", group, `le="+Inf"`, row.Count)
p.row(w, data.Info.Name+"_count", group, "", row.Count)
p.row(w, data.Info.Name+"_sum", group, "", row.Sum)
}
case *metric.HistogramFloat64Data:
p.header(w, data.Info.Name, data.Info.Description, false, true)
for i, group := range data.Groups() {
row := data.Rows[i]
for j, b := range data.Info.Buckets {
p.row(w, data.Info.Name+"_bucket", group, fmt.Sprintf(`le="%v"`, b), row.Values[j])
}
p.row(w, data.Info.Name+"_bucket", group, `le="+Inf"`, row.Count)
p.row(w, data.Info.Name+"_count", group, "", row.Count)
p.row(w, data.Info.Name+"_sum", group, "", row.Sum)
}
}
}
})
<-done
}

View File

@ -1,209 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package debug
import (
"fmt"
"html/template"
"log"
"net/http"
"sort"
"golang.org/x/tools/internal/lsp/telemetry"
"golang.org/x/tools/internal/lsp/telemetry/metric"
)
var rpcTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(`
{{define "title"}}RPC Information{{end}}
{{define "body"}}
<H2>Inbound</H2>
{{template "rpcSection" .Inbound}}
<H2>Outbound</H2>
{{template "rpcSection" .Outbound}}
{{end}}
{{define "rpcSection"}}
{{range .}}<P>
<b>{{.Method}}</b> {{.Started}} <a href="/trace/{{.Method}}">traces</a> ({{.InProgress}} in progress)
<br>
<i>Latency</i> {{with .Latency}}{{.Mean}} ({{.Min}}<{{.Max}}){{end}}
<i>By bucket</i> 0s {{range .Latency.Values}}<b>{{.Count}}</b> {{.Limit}} {{end}}
<br>
<i>Received</i> {{with .Received}}{{.Mean}} ({{.Min}}<{{.Max}}){{end}}
<i>Sent</i> {{with .Sent}}{{.Mean}} ({{.Min}}<{{.Max}}){{end}}
<br>
<i>Result codes</i> {{range .Codes}}{{.Key}}={{.Count}} {{end}}
</P>
{{end}}
{{end}}
`))
type rpcs struct {
Inbound []*rpcStats
Outbound []*rpcStats
}
type rpcStats struct {
Method string
Started int64
Completed int64
InProgress int64
Latency rpcTimeHistogram
Received rpcBytesHistogram
Sent rpcBytesHistogram
Codes []*rpcCodeBucket
}
type rpcTimeHistogram struct {
Sum timeUnits
Count int64
Mean timeUnits
Min timeUnits
Max timeUnits
Values []rpcTimeBucket
}
type rpcTimeBucket struct {
Limit timeUnits
Count int64
}
type rpcBytesHistogram struct {
Sum byteUnits
Count int64
Mean byteUnits
Min byteUnits
Max byteUnits
Values []rpcBytesBucket
}
type rpcBytesBucket struct {
Limit byteUnits
Count int64
}
type rpcCodeBucket struct {
Key string
Count int64
}
func (r *rpcs) observeMetric(data metric.Data) {
for i, group := range data.Groups() {
set := &r.Inbound
if group.Get(telemetry.RPCDirection) == telemetry.Outbound {
set = &r.Outbound
}
method, ok := group.Get(telemetry.Method).(string)
if !ok {
log.Printf("Not a method... %v", group)
continue
}
index := sort.Search(len(*set), func(i int) bool {
return (*set)[i].Method >= method
})
if index >= len(*set) || (*set)[index].Method != method {
old := *set
*set = make([]*rpcStats, len(old)+1)
copy(*set, old[:index])
copy((*set)[index+1:], old[index:])
(*set)[index] = &rpcStats{Method: method}
}
stats := (*set)[index]
switch data.Handle() {
case started:
stats.Started = data.(*metric.Int64Data).Rows[i]
case completed:
status, ok := group.Get(telemetry.StatusCode).(string)
if !ok {
log.Printf("Not status... %v", group)
continue
}
var b *rpcCodeBucket
for c, entry := range stats.Codes {
if entry.Key == status {
b = stats.Codes[c]
break
}
}
if b == nil {
b = &rpcCodeBucket{Key: status}
stats.Codes = append(stats.Codes, b)
sort.Slice(stats.Codes, func(i int, j int) bool {
return stats.Codes[i].Key < stats.Codes[i].Key
})
}
b.Count = data.(*metric.Int64Data).Rows[i]
case latency:
data := data.(*metric.HistogramFloat64Data)
row := data.Rows[i]
stats.Latency.Count = row.Count
stats.Latency.Sum = timeUnits(row.Sum)
stats.Latency.Min = timeUnits(row.Min)
stats.Latency.Max = timeUnits(row.Max)
stats.Latency.Mean = timeUnits(row.Sum) / timeUnits(row.Count)
stats.Latency.Values = make([]rpcTimeBucket, len(data.Info.Buckets))
last := int64(0)
for i, b := range data.Info.Buckets {
stats.Latency.Values[i].Limit = timeUnits(b)
stats.Latency.Values[i].Count = row.Values[i] - last
last = row.Values[i]
}
case sentBytes:
data := data.(*metric.HistogramInt64Data)
row := data.Rows[i]
stats.Sent.Count = row.Count
stats.Sent.Sum = byteUnits(row.Sum)
stats.Sent.Min = byteUnits(row.Min)
stats.Sent.Max = byteUnits(row.Max)
stats.Sent.Mean = byteUnits(row.Sum) / byteUnits(row.Count)
case receivedBytes:
data := data.(*metric.HistogramInt64Data)
row := data.Rows[i]
stats.Received.Count = row.Count
stats.Received.Sum = byteUnits(row.Sum)
stats.Sent.Min = byteUnits(row.Min)
stats.Sent.Max = byteUnits(row.Max)
stats.Received.Mean = byteUnits(row.Sum) / byteUnits(row.Count)
}
}
for _, set := range [][]*rpcStats{r.Inbound, r.Outbound} {
for _, stats := range set {
stats.Completed = 0
for _, b := range stats.Codes {
stats.Completed += b.Count
}
stats.InProgress = stats.Started - stats.Completed
}
}
}
func (r *rpcs) getData(req *http.Request) interface{} {
return r
}
func units(v float64, suffixes []string) string {
s := ""
for _, s = range suffixes {
n := v / 1000
if n < 1 {
break
}
v = n
}
return fmt.Sprintf("%.2f%s", v, s)
}
type timeUnits float64
func (v timeUnits) String() string {
v = v * 1000 * 1000
return units(float64(v), []string{"ns", "μs", "ms", "s"})
}
type byteUnits float64
func (v byteUnits) String() string {
return units(float64(v), []string{"B", "KB", "MB", "GB", "TB"})
}

View File

@ -9,6 +9,7 @@ import (
"context" "context"
"go/token" "go/token"
"html/template" "html/template"
"log"
"net" "net"
"net/http" "net/http"
"net/http/pprof" "net/http/pprof"
@ -18,11 +19,7 @@ import (
"strconv" "strconv"
"sync" "sync"
"golang.org/x/tools/internal/lsp/telemetry/log" "golang.org/x/tools/internal/lsp/telemetry"
"golang.org/x/tools/internal/lsp/telemetry/metric"
"golang.org/x/tools/internal/lsp/telemetry/tag"
"golang.org/x/tools/internal/lsp/telemetry/trace"
"golang.org/x/tools/internal/lsp/telemetry/worker"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
) )
@ -215,25 +212,17 @@ func Serve(ctx context.Context, addr string) error {
if err != nil { if err != nil {
return err return err
} }
log.Print(ctx, "Debug serving", tag.Of("Port", listener.Addr().(*net.TCPAddr).Port)) log.Printf("Debug serving on port: %d", listener.Addr().(*net.TCPAddr).Port)
prometheus := prometheus{}
metric.RegisterObservers(prometheus.observeMetric)
rpcs := rpcs{}
metric.RegisterObservers(rpcs.observeMetric)
traces := traces{}
trace.RegisterObservers(traces.export)
go func() { go func() {
mux := http.NewServeMux() mux := http.NewServeMux()
mux.HandleFunc("/", Render(mainTmpl, func(*http.Request) interface{} { return data })) mux.HandleFunc("/", Render(mainTmpl, func(*http.Request) interface{} { return data }))
mux.HandleFunc("/debug/", Render(debugTmpl, nil)) mux.HandleFunc("/debug/", Render(debugTmpl, nil))
telemetry.Handle(mux)
mux.HandleFunc("/debug/pprof/", pprof.Index) mux.HandleFunc("/debug/pprof/", pprof.Index)
mux.HandleFunc("/debug/pprof/cmdline", pprof.Cmdline) mux.HandleFunc("/debug/pprof/cmdline", pprof.Cmdline)
mux.HandleFunc("/debug/pprof/profile", pprof.Profile) mux.HandleFunc("/debug/pprof/profile", pprof.Profile)
mux.HandleFunc("/debug/pprof/symbol", pprof.Symbol) mux.HandleFunc("/debug/pprof/symbol", pprof.Symbol)
mux.HandleFunc("/debug/pprof/trace", pprof.Trace) mux.HandleFunc("/debug/pprof/trace", pprof.Trace)
mux.HandleFunc("/metrics/", prometheus.serve)
mux.HandleFunc("/rpc/", Render(rpcTmpl, rpcs.getData))
mux.HandleFunc("/trace/", Render(traceTmpl, traces.getData))
mux.HandleFunc("/cache/", Render(cacheTmpl, getCache)) mux.HandleFunc("/cache/", Render(cacheTmpl, getCache))
mux.HandleFunc("/session/", Render(sessionTmpl, getSession)) mux.HandleFunc("/session/", Render(sessionTmpl, getSession))
mux.HandleFunc("/view/", Render(viewTmpl, getView)) mux.HandleFunc("/view/", Render(viewTmpl, getView))
@ -241,28 +230,23 @@ func Serve(ctx context.Context, addr string) error {
mux.HandleFunc("/info", Render(infoTmpl, getInfo)) mux.HandleFunc("/info", Render(infoTmpl, getInfo))
mux.HandleFunc("/memory", Render(memoryTmpl, getMemory)) mux.HandleFunc("/memory", Render(memoryTmpl, getMemory))
if err := http.Serve(listener, mux); err != nil { if err := http.Serve(listener, mux); err != nil {
log.Error(ctx, "Debug server failed", err) log.Printf("Debug server failed with %v", err)
return return
} }
log.Print(ctx, "Debug server finished") log.Printf("Debug server finished")
}() }()
return nil return nil
} }
func Render(tmpl *template.Template, fun func(*http.Request) interface{}) func(http.ResponseWriter, *http.Request) { func Render(tmpl *template.Template, fun func(*http.Request) interface{}) func(http.ResponseWriter, *http.Request) {
return func(w http.ResponseWriter, r *http.Request) { return func(w http.ResponseWriter, r *http.Request) {
done := make(chan struct{})
worker.Do(func() {
defer close(done)
var data interface{} var data interface{}
if fun != nil { if fun != nil {
data = fun(r) data = fun(r)
} }
if err := tmpl.Execute(w, data); err != nil { if err := tmpl.Execute(w, data); err != nil {
log.Error(context.Background(), "", err) log.Print(err)
} }
})
<-done
} }
} }
@ -294,10 +278,6 @@ var BaseTemplate = template.Must(template.New("").Parse(`
td.value { td.value {
text-align: right; text-align: right;
} }
ul.events {
list-style-type: none;
}
</style> </style>
{{block "head" .}}{{end}} {{block "head" .}}{{end}}
</head> </head>
@ -305,9 +285,7 @@ ul.events {
<a href="/">Main</a> <a href="/">Main</a>
<a href="/info">Info</a> <a href="/info">Info</a>
<a href="/memory">Memory</a> <a href="/memory">Memory</a>
<a href="/metrics">Metrics</a> <a href="/debug/">Debug</a>
<a href="/rpc">RPC</a>
<a href="/trace">Trace</a>
<hr> <hr>
<h1>{{template "title" .}}</h1> <h1>{{template "title" .}}</h1>
{{block "body" .}} {{block "body" .}}
@ -378,6 +356,8 @@ var debugTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(`
{{define "title"}}GoPls Debug pages{{end}} {{define "title"}}GoPls Debug pages{{end}}
{{define "body"}} {{define "body"}}
<a href="/debug/pprof">Profiling</a> <a href="/debug/pprof">Profiling</a>
<a href="/debug/rpcz">RPCz</a>
<a href="/debug/tracez">Tracez</a>
{{end}} {{end}}
`)) `))

View File

@ -1,172 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package debug
import (
"bytes"
"fmt"
"html/template"
"net/http"
"sort"
"strings"
"time"
"golang.org/x/tools/internal/lsp/telemetry/tag"
"golang.org/x/tools/internal/lsp/telemetry/trace"
)
var traceTmpl = template.Must(template.Must(BaseTemplate.Clone()).Parse(`
{{define "title"}}Trace Information{{end}}
{{define "body"}}
{{range .Traces}}<a href="/trace/{{.Name}}">{{.Name}}</a> last: {{.Last.Duration}}, longest: {{.Longest.Duration}}<br>{{end}}
{{if .Selected}}
<H2>{{.Selected.Name}}</H2>
{{if .Selected.Last}}<H3>Last</H3><ul>{{template "details" .Selected.Last}}</ul>{{end}}
{{if .Selected.Longest}}<H3>Longest</H3><ul>{{template "details" .Selected.Longest}}</ul>{{end}}
{{end}}
{{end}}
{{define "details"}}
<li>{{.Offset}} {{.Name}} {{.Duration}} {{.Tags}}</li>
{{if .Events}}<ul class=events>{{range .Events}}<li>{{.Offset}} {{.Tags}}</li>{{end}}</ul>{{end}}
{{if .Children}}<ul>{{range .Children}}{{template "details" .}}{{end}}</ul>{{end}}
{{end}}
`))
type traces struct {
sets map[string]*traceSet
unfinished map[trace.SpanID]*traceData
}
type traceResults struct {
Traces []*traceSet
Selected *traceSet
}
type traceSet struct {
Name string
Last *traceData
Longest *traceData
}
type traceData struct {
ID trace.SpanID
ParentID trace.SpanID
Name string
Start time.Time
Finish time.Time
Offset time.Duration
Duration time.Duration
Tags string
Events []traceEvent
Children []*traceData
}
type traceEvent struct {
Time time.Time
Offset time.Duration
Tags string
}
func (t *traces) export(span *trace.Span) {
if t.sets == nil {
t.sets = make(map[string]*traceSet)
t.unfinished = make(map[trace.SpanID]*traceData)
}
// is this a completed span?
if span.Finish.IsZero() {
t.start(span)
} else {
t.finish(span)
}
}
func (t *traces) start(span *trace.Span) {
// just starting, add it to the unfinished map
td := &traceData{
ID: span.SpanID,
ParentID: span.ParentID,
Name: span.Name,
Start: span.Start,
Tags: renderTags(span.Tags),
}
t.unfinished[span.SpanID] = td
// and wire up parents if we have them
if !span.ParentID.IsValid() {
return
}
parent, found := t.unfinished[span.ParentID]
if !found {
// trace had an invalid parent, so it cannot itself be valid
return
}
parent.Children = append(parent.Children, td)
}
func (t *traces) finish(span *trace.Span) {
// finishing, must be already in the map
td, found := t.unfinished[span.SpanID]
if !found {
return // if this happens we are in a bad place
}
delete(t.unfinished, span.SpanID)
td.Finish = span.Finish
td.Duration = span.Finish.Sub(span.Start)
td.Events = make([]traceEvent, len(span.Events))
for i, event := range span.Events {
td.Events[i] = traceEvent{
Time: event.Time,
Tags: renderTags(event.Tags),
}
}
set, ok := t.sets[span.Name]
if !ok {
set = &traceSet{Name: span.Name}
t.sets[span.Name] = set
}
set.Last = td
if set.Longest == nil || set.Last.Duration > set.Longest.Duration {
set.Longest = set.Last
}
if !td.ParentID.IsValid() {
fillOffsets(td, td.Start)
}
}
func (t *traces) getData(req *http.Request) interface{} {
if len(t.sets) == 0 {
return nil
}
data := traceResults{}
data.Traces = make([]*traceSet, 0, len(t.sets))
for _, set := range t.sets {
data.Traces = append(data.Traces, set)
}
sort.Slice(data.Traces, func(i, j int) bool { return data.Traces[i].Name < data.Traces[j].Name })
if bits := strings.SplitN(req.URL.Path, "/trace/", 2); len(bits) > 1 {
data.Selected = t.sets[bits[1]]
}
return data
}
func fillOffsets(td *traceData, start time.Time) {
td.Offset = td.Start.Sub(start)
for i := range td.Events {
td.Events[i].Offset = td.Events[i].Time.Sub(start)
}
for _, child := range td.Children {
fillOffsets(child, start)
}
}
func renderTags(tags tag.List) string {
buf := &bytes.Buffer{}
for _, tag := range tags {
fmt.Fprintf(buf, "%v=%q ", tag.Key, tag.Value)
}
return buf.String()
}

View File

@ -10,16 +10,13 @@ import (
"golang.org/x/tools/internal/lsp/protocol" "golang.org/x/tools/internal/lsp/protocol"
"golang.org/x/tools/internal/lsp/source" "golang.org/x/tools/internal/lsp/source"
"golang.org/x/tools/internal/lsp/telemetry"
"golang.org/x/tools/internal/lsp/telemetry/log"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
) )
func (s *Server) Diagnostics(ctx context.Context, view source.View, uri span.URI) { func (s *Server) Diagnostics(ctx context.Context, view source.View, uri span.URI) {
ctx = telemetry.File.With(ctx, uri)
f, err := view.GetFile(ctx, uri) f, err := view.GetFile(ctx, uri)
if err != nil { if err != nil {
log.Error(ctx, "no file", err, telemetry.File) s.session.Logger().Errorf(ctx, "no file for %s: %v", uri, err)
return return
} }
// For non-Go files, don't return any diagnostics. // For non-Go files, don't return any diagnostics.
@ -29,7 +26,7 @@ func (s *Server) Diagnostics(ctx context.Context, view source.View, uri span.URI
} }
reports, err := source.Diagnostics(ctx, view, gof, s.disabledAnalyses) reports, err := source.Diagnostics(ctx, view, gof, s.disabledAnalyses)
if err != nil { if err != nil {
log.Error(ctx, "failed to compute diagnostics", err, telemetry.File) s.session.Logger().Errorf(ctx, "failed to compute diagnostics for %s: %v", gof.URI(), err)
return return
} }
@ -41,7 +38,7 @@ func (s *Server) Diagnostics(ctx context.Context, view source.View, uri span.URI
if s.undelivered == nil { if s.undelivered == nil {
s.undelivered = make(map[span.URI][]source.Diagnostic) s.undelivered = make(map[span.URI][]source.Diagnostic)
} }
log.Error(ctx, "failed to deliver diagnostic (will retry)", err, telemetry.File) s.session.Logger().Errorf(ctx, "failed to deliver diagnostic for %s (will retry): %v", uri, err)
s.undelivered[uri] = diagnostics s.undelivered[uri] = diagnostics
continue continue
} }
@ -52,7 +49,7 @@ func (s *Server) Diagnostics(ctx context.Context, view source.View, uri span.URI
// undelivered ones (only for remaining URIs). // undelivered ones (only for remaining URIs).
for uri, diagnostics := range s.undelivered { for uri, diagnostics := range s.undelivered {
if err := s.publishDiagnostics(ctx, view, uri, diagnostics); err != nil { if err := s.publishDiagnostics(ctx, view, uri, diagnostics); err != nil {
log.Error(ctx, "failed to deliver diagnostic for (will not retry)", err, telemetry.File) s.session.Logger().Errorf(ctx, "failed to deliver diagnostic for %s (will not retry): %v", uri, err)
} }
// If we fail to deliver the same diagnostics twice, just give up. // If we fail to deliver the same diagnostics twice, just give up.
delete(s.undelivered, uri) delete(s.undelivered, uri)

View File

@ -6,6 +6,7 @@ package lsp
import ( import (
"context" "context"
"fmt"
"golang.org/x/tools/internal/lsp/protocol" "golang.org/x/tools/internal/lsp/protocol"
"golang.org/x/tools/internal/lsp/source" "golang.org/x/tools/internal/lsp/source"
@ -38,9 +39,9 @@ func spanToRange(ctx context.Context, view source.View, s span.Span) (source.GoF
} }
if rng.Start == rng.End { if rng.Start == rng.End {
// If we have a single point, assume we want the whole file. // If we have a single point, assume we want the whole file.
tok, err := f.GetToken(ctx) tok := f.GetToken(ctx)
if err != nil { if tok == nil {
return nil, nil, span.Range{}, err return nil, nil, span.Range{}, fmt.Errorf("no file information for %s", f.URI())
} }
rng.End = tok.Pos(tok.Size()) rng.End = tok.Pos(tok.Size())
} }

View File

@ -1,185 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package fuzzy
import (
"unicode"
)
// Input specifies the type of the input. This influences how the runes are interpreted wrt to
// segmenting the input.
type Input int
const (
// Text represents a text input type. Input is not segmented.
Text Input = iota
// Filename represents a filepath input type with '/' segment delimitors.
Filename
// Symbol represents a symbol input type with '.' and ':' segment delimitors.
Symbol
)
// RuneRole specifies the role of a rune in the context of an input.
type RuneRole byte
const (
// RNone specifies a rune without any role in the input (i.e., whitespace/non-ASCII).
RNone RuneRole = iota
// RSep specifies a rune with the role of segment separator.
RSep
// RTail specifies a rune which is a lower-case tail in a word in the input.
RTail
// RUCTail specifies a rune which is an upper-case tail in a word in the input.
RUCTail
// RHead specifies a rune which is the first character in a word in the input.
RHead
)
// RuneRoles detects the roles of each byte rune in an input string and stores it in the output
// slice. The rune role depends on the input type. Stops when it parsed all the runes in the string
// or when it filled the output. If output is nil, then it gets created.
func RuneRoles(str string, input Input, reuse []RuneRole) []RuneRole {
var output []RuneRole
if cap(reuse) < len(str) {
output = make([]RuneRole, 0, len(str))
} else {
output = reuse[:0]
}
prev, prev2 := rtNone, rtNone
for i := 0; i < len(str); i++ {
r := rune(str[i])
role := RNone
curr := rtLower
if str[i] <= unicode.MaxASCII {
curr = runeType(rt[str[i]] - '0')
}
if curr == rtLower {
if prev == rtNone || prev == rtPunct {
role = RHead
} else {
role = RTail
}
} else if curr == rtUpper {
role = RHead
if prev == rtUpper {
// This and previous characters are both upper case.
if i+1 == len(str) {
// This is last character, previous was also uppercase -> this is UCTail
// i.e., (current char is C): aBC / BC / ABC
role = RUCTail
}
}
} else if curr == rtPunct {
switch {
case input == Filename && r == '/':
role = RSep
case input == Symbol && r == '.':
role = RSep
case input == Symbol && r == ':':
role = RSep
}
}
if curr != rtLower {
if i > 1 && output[i-1] == RHead && prev2 == rtUpper && (output[i-2] == RHead || output[i-2] == RUCTail) {
// The previous two characters were uppercase. The current one is not a lower case, so the
// previous one can't be a HEAD. Make it a UCTail.
// i.e., (last char is current char - B must be a UCTail): ABC / ZABC / AB.
output[i-1] = RUCTail
}
}
output = append(output, role)
prev2 = prev
prev = curr
}
return output
}
type runeType byte
const (
rtNone runeType = iota
rtPunct
rtLower
rtUpper
)
const rt = "00000000000000000000000000000000000000000000001122222222221000000333333333333333333333333330000002222222222222222222222222200000"
// LastSegment returns the substring representing the last segment from the input, where each
// byte has an associated RuneRole in the roles slice. This makes sense only for inputs of Symbol
// or Filename type.
func LastSegment(input string, roles []RuneRole) string {
// Exclude ending separators.
end := len(input) - 1
for end >= 0 && roles[end] == RSep {
end--
}
if end < 0 {
return ""
}
start := end - 1
for start >= 0 && roles[start] != RSep {
start--
}
return input[start+1 : end+1]
}
// ToLower transforms the input string to lower case, which is stored in the output byte slice.
// The lower casing considers only ASCII values - non ASCII values are left unmodified.
// Stops when parsed all input or when it filled the output slice. If output is nil, then it gets
// created.
func ToLower(input string, reuse []byte) []byte {
output := reuse
if cap(reuse) < len(input) {
output = make([]byte, len(input))
}
for i := 0; i < len(input); i++ {
r := rune(input[i])
if r <= unicode.MaxASCII {
if 'A' <= r && r <= 'Z' {
r += 'a' - 'A'
}
}
output[i] = byte(r)
}
return output[:len(input)]
}
// WordConsumer defines a consumer for a word delimited by the [start,end) byte offsets in an input
// (start is inclusive, end is exclusive).
type WordConsumer func(start, end int)
// Words find word delimiters in an input based on its bytes' mappings to rune roles. The offset
// delimiters for each word are fed to the provided consumer function.
func Words(roles []RuneRole, consume WordConsumer) {
var wordStart int
for i, r := range roles {
switch r {
case RUCTail, RTail:
case RHead, RNone, RSep:
if i != wordStart {
consume(wordStart, i)
}
wordStart = i
if r != RHead {
// Skip this character.
wordStart = i + 1
}
}
}
if wordStart != len(roles) {
consume(wordStart, len(roles))
}
}

View File

@ -1,186 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package fuzzy_test
import (
"bytes"
"sort"
"testing"
"golang.org/x/tools/internal/lsp/fuzzy"
)
var rolesTests = []struct {
str string
input fuzzy.Input
want string
}{
{str: "abc", want: "Ccc", input: fuzzy.Text},
{str: ".abc", want: " Ccc", input: fuzzy.Text},
{str: "abc def", want: "Ccc Ccc", input: fuzzy.Text},
{str: "SWT MyID", want: "Cuu CcCu", input: fuzzy.Text},
{str: "ID", want: "Cu", input: fuzzy.Text},
{str: "IDD", want: "Cuu", input: fuzzy.Text},
{str: " ID ", want: " Cu ", input: fuzzy.Text},
{str: "IDSome", want: "CuCccc", input: fuzzy.Text},
{str: "0123456789", want: "Cccccccccc", input: fuzzy.Text},
{str: "abcdefghigklmnopqrstuvwxyz", want: "Cccccccccccccccccccccccccc", input: fuzzy.Text},
{str: "ABCDEFGHIGKLMNOPQRSTUVWXYZ", want: "Cuuuuuuuuuuuuuuuuuuuuuuuuu", input: fuzzy.Text},
{str: "こんにちは", want: "Ccccccccccccccc", input: fuzzy.Text}, // We don't parse unicode
{str: ":/.", want: " ", input: fuzzy.Text},
// Filenames
{str: "abc/def", want: "Ccc/Ccc", input: fuzzy.Filename},
{str: " abc_def", want: " Ccc Ccc", input: fuzzy.Filename},
{str: " abc_DDf", want: " Ccc CCc", input: fuzzy.Filename},
{str: ":.", want: " ", input: fuzzy.Filename},
// Symbols
{str: "abc::def::goo", want: "Ccc//Ccc//Ccc", input: fuzzy.Symbol},
{str: "proto::Message", want: "Ccccc//Ccccccc", input: fuzzy.Symbol},
{str: "AbstractSWTFactory", want: "CcccccccCuuCcccccc", input: fuzzy.Symbol},
{str: "Abs012", want: "Cccccc", input: fuzzy.Symbol},
{str: "/", want: " ", input: fuzzy.Symbol},
{str: "fOO", want: "CCu", input: fuzzy.Symbol},
{str: "fo_oo.o_oo", want: "Cc Cc/C Cc", input: fuzzy.Symbol},
}
func rolesString(roles []fuzzy.RuneRole) string {
var buf bytes.Buffer
for _, r := range roles {
buf.WriteByte(" /cuC"[int(r)])
}
return buf.String()
}
func TestRoles(t *testing.T) {
for _, tc := range rolesTests {
gotRoles := make([]fuzzy.RuneRole, len(tc.str))
fuzzy.RuneRoles(tc.str, tc.input, gotRoles)
got := rolesString(gotRoles)
if got != tc.want {
t.Errorf("roles(%s) = %v; want %v", tc.str, got, tc.want)
}
}
}
func words(strWords ...string) [][]byte {
var ret [][]byte
for _, w := range strWords {
ret = append(ret, []byte(w))
}
return ret
}
var wordSplitTests = []struct {
input string
want []string
}{
{
input: "foo bar baz",
want: []string{"foo", "bar", "baz"},
},
{
input: "fooBarBaz",
want: []string{"foo", "Bar", "Baz"},
},
{
input: "FOOBarBAZ",
want: []string{"FOO", "Bar", "BAZ"},
},
{
input: "foo123_bar2Baz3",
want: []string{"foo123", "bar2", "Baz3"},
},
}
func TestWordSplit(t *testing.T) {
for _, tc := range wordSplitTests {
roles := fuzzy.RuneRoles(tc.input, fuzzy.Symbol, nil)
var got []string
consumer := func(i, j int) {
got = append(got, tc.input[i:j])
}
fuzzy.Words(roles, consumer)
if eq := diffStringLists(tc.want, got); !eq {
t.Errorf("input %v: (want %v -> got %v)", tc.input, tc.want, got)
}
}
}
func diffStringLists(a, b []string) bool {
if len(a) != len(b) {
return false
}
sort.Strings(a)
sort.Strings(b)
for i := range a {
if a[i] != b[i] {
return false
}
}
return true
}
var lastSegmentSplitTests = []struct {
str string
input fuzzy.Input
want string
}{
{
str: "identifier",
input: fuzzy.Symbol,
want: "identifier",
},
{
str: "two_words",
input: fuzzy.Symbol,
want: "two_words",
},
{
str: "first::second",
input: fuzzy.Symbol,
want: "second",
},
{
str: "foo.bar.FOOBar_buz123_test",
input: fuzzy.Symbol,
want: "FOOBar_buz123_test",
},
{
str: "golang.org/x/tools/internal/lsp/fuzzy_matcher.go",
input: fuzzy.Filename,
want: "fuzzy_matcher.go",
},
{
str: "golang.org/x/tools/internal/lsp/fuzzy_matcher.go",
input: fuzzy.Text,
want: "golang.org/x/tools/internal/lsp/fuzzy_matcher.go",
},
}
func TestLastSegment(t *testing.T) {
for _, tc := range lastSegmentSplitTests {
roles := fuzzy.RuneRoles(tc.str, tc.input, nil)
got := fuzzy.LastSegment(tc.str, roles)
if got != tc.want {
t.Errorf("str %v: want %v; got %v", tc.str, tc.want, got)
}
}
}
func BenchmarkRoles(b *testing.B) {
str := "AbstractSWTFactory"
out := make([]fuzzy.RuneRole, len(str))
for i := 0; i < b.N; i++ {
fuzzy.RuneRoles(str, fuzzy.Symbol, out)
}
b.SetBytes(int64(len(str)))
}

View File

@ -1,437 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package fuzzy implements a fuzzy matching algorithm.
package fuzzy
import (
"bytes"
"fmt"
)
const (
// MaxInputSize is the maximum size of the input scored against the fuzzy matcher. Longer inputs
// will be truncated to this size.
MaxInputSize = 127
// MaxPatternSize is the maximum size of the pattern used to construct the fuzzy matcher. Longer
// inputs are truncated to this size.
MaxPatternSize = 63
)
type scoreVal int
func (s scoreVal) val() int {
return int(s) >> 1
}
func (s scoreVal) prevK() int {
return int(s) & 1
}
func score(val int, prevK int /*0 or 1*/) scoreVal {
return scoreVal(val<<1 + prevK)
}
// Matcher implements a fuzzy matching algorithm for scoring candidates against a pattern.
// The matcher does not support parallel usage.
type Matcher struct {
input Input
pattern string
patternLower []byte // lower-case version of the pattern
patternShort []byte // first characters of the pattern
caseSensitive bool // set if the pattern is mix-cased
patternRoles []RuneRole // the role of each character in the pattern
roles []RuneRole // the role of each character in the tested string
scores [MaxInputSize + 1][MaxPatternSize + 1][2]scoreVal
scoreScale float32
lastCandidateLen int // in bytes
lastCandidateMatched bool
// Here we save the last candidate in lower-case. This is basically a byte slice we reuse for
// performance reasons, so the slice is not reallocated for every candidate.
lowerBuf [MaxInputSize]byte
rolesBuf [MaxInputSize]RuneRole
}
func (m *Matcher) bestK(i, j int) int {
if m.scores[i][j][0].val() < m.scores[i][j][1].val() {
return 1
}
return 0
}
// NewMatcher returns a new fuzzy matcher for scoring candidates against the provided pattern.
func NewMatcher(pattern string, input Input) *Matcher {
if len(pattern) > MaxPatternSize {
pattern = pattern[:MaxPatternSize]
}
m := &Matcher{
input: input,
pattern: pattern,
patternLower: ToLower(pattern, nil),
}
for i, c := range m.patternLower {
if pattern[i] != c {
m.caseSensitive = true
break
}
}
if len(pattern) > 3 {
m.patternShort = m.patternLower[:3]
} else {
m.patternShort = m.patternLower
}
m.patternRoles = RuneRoles(pattern, input, nil)
if len(pattern) > 0 {
maxCharScore := 4
if input == Text {
maxCharScore = 6
}
m.scoreScale = 1 / float32(maxCharScore*len(pattern))
}
return m
}
// SetInput updates the input type for subsequent scoring attempts.
func (m *Matcher) SetInput(input Input) {
if m.input == input {
return
}
m.input = input
m.patternRoles = RuneRoles(m.pattern, input, m.patternRoles)
}
// Score returns the score returned by matching the candidate to the pattern.
// This is not designed for parallel use. Multiple candidates must be scored sequentally.
// Returns a score between 0 and 1 (0 - no match, 1 - perfect match).
func (m *Matcher) Score(candidate string) float32 {
if len(candidate) > MaxInputSize {
candidate = candidate[:MaxInputSize]
}
lower := ToLower(candidate, m.lowerBuf[:])
m.lastCandidateLen = len(candidate)
if len(m.pattern) == 0 {
// Empty patterns perfectly match candidates.
return 1
}
if m.match(candidate, lower) {
sc := m.computeScore(candidate, lower)
if sc > minScore/2 && !m.poorMatch() {
m.lastCandidateMatched = true
if len(m.pattern) == len(candidate) {
// Perfect match.
return 1
}
if sc < 0 {
sc = 0
}
normalizedScore := float32(sc) * m.scoreScale
if normalizedScore > 1 {
normalizedScore = 1
}
return normalizedScore
}
}
m.lastCandidateMatched = false
return -1
}
const minScore = -10000
// MatchedRanges returns matches ranges for the last scored string as a flattened array of
// [begin, end) byte offset pairs.
func (m *Matcher) MatchedRanges() []int {
if len(m.pattern) == 0 || !m.lastCandidateMatched {
return nil
}
i, j := m.lastCandidateLen, len(m.pattern)
if m.scores[i][j][0].val() < minScore/2 && m.scores[i][j][1].val() < minScore/2 {
return nil
}
var ret []int
k := m.bestK(i, j)
for i > 0 {
take := (k == 1)
k = m.scores[i][j][k].prevK()
if take {
if len(ret) == 0 || ret[len(ret)-1] != i {
ret = append(ret, i)
ret = append(ret, i-1)
} else {
ret[len(ret)-1] = i - 1
}
j--
}
i--
}
// Reverse slice.
for i := 0; i < len(ret)/2; i++ {
ret[i], ret[len(ret)-1-i] = ret[len(ret)-1-i], ret[i]
}
return ret
}
func (m *Matcher) match(candidate string, candidateLower []byte) bool {
i, j := 0, 0
for ; i < len(candidateLower) && j < len(m.patternLower); i++ {
if candidateLower[i] == m.patternLower[j] {
j++
}
}
if j != len(m.patternLower) {
return false
}
// The input passes the simple test against pattern, so it is time to classify its characters.
// Character roles are used below to find the last segment.
m.roles = RuneRoles(candidate, m.input, m.rolesBuf[:])
if m.input != Text {
sep := len(candidateLower) - 1
for sep >= i && m.roles[sep] != RSep {
sep--
}
if sep >= i {
// We are not in the last segment, check that we have at least one character match in the last
// segment of the candidate.
return bytes.IndexByte(candidateLower[sep:], m.patternLower[len(m.pattern)-1]) != -1
}
}
return true
}
func (m *Matcher) computeScore(candidate string, candidateLower []byte) int {
pattLen, candLen := len(m.pattern), len(candidate)
for j := 0; j <= len(m.pattern); j++ {
m.scores[0][j][0] = minScore << 1
m.scores[0][j][1] = minScore << 1
}
m.scores[0][0][0] = score(0, 0) // Start with 0.
segmentsLeft, lastSegStart := 1, 0
for i := 0; i < candLen; i++ {
if m.roles[i] == RSep {
segmentsLeft++
lastSegStart = i + 1
}
}
// A per-character bonus for a consecutive match.
consecutiveBonus := 2
if m.input == Text {
// Consecutive matches for text are more important.
consecutiveBonus = 4
}
wordIdx := 0 // Word count within segment.
for i := 1; i <= candLen; i++ {
role := m.roles[i-1]
isHead := role == RHead
if isHead {
wordIdx++
} else if role == RSep && segmentsLeft > 1 {
wordIdx = 0
segmentsLeft--
}
var skipPenalty int
if segmentsLeft == 1 && isHead && m.input != Text {
// Skipping a word.
skipPenalty++
}
if i-1 == lastSegStart {
// Skipping the start of the last segment.
skipPenalty += 3
}
for j := 0; j <= pattLen; j++ {
// By default, we don't have a match. Fill in the skip data.
m.scores[i][j][1] = minScore << 1
if segmentsLeft > 1 && j == pattLen {
// The very last pattern character can only be matched in the last segment.
m.scores[i][j][0] = minScore << 1
continue
}
// Compute the skip score.
k := 0
if m.scores[i-1][j][0].val() < m.scores[i-1][j][1].val() {
k = 1
}
skipScore := m.scores[i-1][j][k].val()
// Do not penalize missing characters after the last matched segment.
if j != pattLen {
skipScore -= skipPenalty
}
m.scores[i][j][0] = score(skipScore, k)
if j == 0 || candidateLower[i-1] != m.patternLower[j-1] {
// Not a match.
continue
}
pRole := m.patternRoles[j-1]
if role == RTail && pRole == RHead {
if j > 1 {
// Not a match: a head in the pattern matches a tail character in the candidate.
continue
}
// Special treatment for the first character of the pattern. We allow
// matches in the middle of a word if they are long enough, at least
// min(3, pattern.length) characters.
if !bytes.HasPrefix(candidateLower[i-1:], m.patternShort) {
continue
}
}
// Compute the char score.
var charScore int
// Bonus 1: the char is in the candidate's last segment.
if segmentsLeft <= 1 {
charScore++
}
// Bonus 2: Case match or a Head in the pattern aligns with one in the word.
// Single-case patterns lack segmentation signals and we assume any character
// can be a head of a segment.
if candidate[i-1] == m.pattern[j-1] || role == RHead && (!m.caseSensitive || pRole == RHead) {
charScore++
}
// Penalty 1: pattern char is Head, candidate char is Tail.
if role == RTail && pRole == RHead {
charScore--
}
// Penalty 2: first pattern character matched in the middle of a word.
if j == 1 && role == RTail {
charScore -= 4
}
// Third dimension encodes whether there is a gap between the previous match and the current
// one.
for k := 0; k < 2; k++ {
sc := m.scores[i-1][j-1][k].val() + charScore
isConsecutive := k == 1 || i-1 == 0 || i-1 == lastSegStart
if isConsecutive || (m.input == Text && j-1 == 0) {
// Bonus 3: a consecutive match. First character match also gets a bonus to
// ensure prefix final match score normalizes to 1.0.
// Logically, this is a part of charScore, but we have to compute it here because it
// only applies for consecutive matches (k == 1).
sc += consecutiveBonus
}
if k == 0 {
// Penalty 3: Matching inside a segment (and previous char wasn't matched). Penalize for the lack
// of alignment.
if role == RTail || role == RUCTail {
sc -= 3
}
}
if sc > m.scores[i][j][1].val() {
m.scores[i][j][1] = score(sc, k)
}
}
}
}
result := m.scores[len(candidate)][len(m.pattern)][m.bestK(len(candidate), len(m.pattern))].val()
return result
}
// ScoreTable returns the score table computed for the provided candidate. Used only for debugging.
func (m *Matcher) ScoreTable(candidate string) string {
var buf bytes.Buffer
var line1, line2, separator bytes.Buffer
line1.WriteString("\t")
line2.WriteString("\t")
for j := 0; j < len(m.pattern); j++ {
line1.WriteString(fmt.Sprintf("%c\t\t", m.pattern[j]))
separator.WriteString("----------------")
}
buf.WriteString(line1.String())
buf.WriteString("\n")
buf.WriteString(separator.String())
buf.WriteString("\n")
for i := 1; i <= len(candidate); i++ {
line1.Reset()
line2.Reset()
line1.WriteString(fmt.Sprintf("%c\t", candidate[i-1]))
line2.WriteString("\t")
for j := 1; j <= len(m.pattern); j++ {
line1.WriteString(fmt.Sprintf("M%6d(%c)\t", m.scores[i][j][0].val(), dir(m.scores[i][j][0].prevK())))
line2.WriteString(fmt.Sprintf("H%6d(%c)\t", m.scores[i][j][1].val(), dir(m.scores[i][j][1].prevK())))
}
buf.WriteString(line1.String())
buf.WriteString("\n")
buf.WriteString(line2.String())
buf.WriteString("\n")
buf.WriteString(separator.String())
buf.WriteString("\n")
}
return buf.String()
}
func dir(prevK int) rune {
if prevK == 0 {
return 'M'
}
return 'H'
}
func (m *Matcher) poorMatch() bool {
if len(m.pattern) < 2 {
return false
}
i, j := m.lastCandidateLen, len(m.pattern)
k := m.bestK(i, j)
var counter, len int
for i > 0 {
take := (k == 1)
k = m.scores[i][j][k].prevK()
if take {
len++
if k == 0 && len < 3 && m.roles[i-1] == RTail {
// Short match in the middle of a word
counter++
if counter > 1 {
return true
}
}
j--
} else {
len = 0
}
i--
}
return false
}

View File

@ -1,352 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Benchmark results:
//
// BenchmarkMatcher-12 1000000 1615 ns/op 30.95 MB/s 0 B/op 0 allocs/op
//
package fuzzy_test
import (
"bytes"
"fmt"
"math"
"testing"
"golang.org/x/tools/internal/lsp/fuzzy"
)
func ExampleFuzzyMatcher() {
pattern := "TEdit"
candidates := []string{"fuzzy.TextEdit", "ArtEdit", "TED talks about IT"}
// Create a fuzzy matcher for the pattern.
matcher := fuzzy.NewMatcher(pattern, fuzzy.Text)
for _, candidate := range candidates {
// Compute candidate's score against the matcher.
score := matcher.Score(candidate)
if score > -1 {
// Get the substrings in the candidate matching the pattern.
ranges := matcher.MatchedRanges()
fmt.Println(ranges) // Do something with the ranges.
}
}
}
type comparator struct {
f func(val, ref float32) bool
descr string
}
var (
eq = comparator{
f: func(val, ref float32) bool {
return val == ref
},
descr: "==",
}
ge = comparator{
f: func(val, ref float32) bool {
return val >= ref
},
descr: ">=",
}
)
func (c comparator) eval(val, ref float32) bool {
return c.f(val, ref)
}
func (c comparator) String() string {
return c.descr
}
type scoreTest struct {
candidate string
comparator
ref float32
}
var matcherTests = []struct {
pattern string
input fuzzy.Input
tests []scoreTest
}{
{
pattern: "",
input: fuzzy.Text,
tests: []scoreTest{
{"def", eq, 1},
{"Ab stuff c", eq, 1},
},
},
{
pattern: "abc",
input: fuzzy.Text,
tests: []scoreTest{
{"def", eq, -1},
{"abd", eq, -1},
{"abc", ge, 0},
{"Abc", ge, 0},
{"Ab stuff c", ge, 0},
},
},
{
pattern: "Abc",
input: fuzzy.Text,
tests: []scoreTest{
{"def", eq, -1},
{"abd", eq, -1},
{"abc", ge, 0},
{"Abc", ge, 0},
{"Ab stuff c", ge, 0},
},
},
{
pattern: "subs",
input: fuzzy.Filename,
tests: []scoreTest{
{"sub/seq", ge, 0},
{"sub/seq/end", eq, -1},
{"sub/seq/base", ge, 0},
},
},
{
pattern: "subs",
input: fuzzy.Filename,
tests: []scoreTest{
{"//sub/seq", ge, 0},
{"//sub/seq/end", eq, -1},
{"//sub/seq/base", ge, 0},
},
},
}
func TestScore(t *testing.T) {
for _, tc := range matcherTests {
m := fuzzy.NewMatcher(tc.pattern, tc.input)
for _, sct := range tc.tests {
score := m.Score(sct.candidate)
if !sct.comparator.eval(score, sct.ref) {
t.Errorf("not true that m.Score(%s)[=%v] %s %v", sct.candidate, score, sct.comparator, sct.ref)
}
}
}
}
type candidateCompTest struct {
c1 string
comparator comparator
c2 string
}
var compareCandidatesTestCases = []struct {
pattern string
input fuzzy.Input
orderedCandidates []string
}{
{
pattern: "aa",
input: fuzzy.Filename,
orderedCandidates: []string{
"baab",
"bb_aa",
"a/a/a",
"aa_bb",
"aa_b",
"aabb",
"aab",
"b/aa",
},
},
{
pattern: "Foo",
input: fuzzy.Text,
orderedCandidates: []string{
"Barfoo",
"F_o_o",
"Faoo",
"F__oo",
"F_oo",
"FaoFooa",
"BarFoo",
"FooA",
"FooBar",
"Foo",
},
},
}
func TestCompareCandidateScores(t *testing.T) {
for _, tc := range compareCandidatesTestCases {
m := fuzzy.NewMatcher(tc.pattern, tc.input)
var prevScore float32
prevCand := "MIN_SCORE"
for _, cand := range tc.orderedCandidates {
score := m.Score(cand)
if prevScore > score {
t.Errorf("%s[=%v] is scored lower than %s[=%v]", cand, score, prevCand, prevScore)
}
if score < -1 || score > 1 {
t.Errorf("%s score is %v; want value between [-1, 1]", cand, score)
}
prevScore = score
prevCand = cand
}
}
}
var fuzzyMatcherTestCases = []struct {
p string
str string
want string
input fuzzy.Input
}{
// fuzzy.Filename
{p: "aa", str: "a_a/a_a", want: "[a]_a/[a]_a", input: fuzzy.Filename},
{p: "aaaa", str: "a_a/a_a", want: "[a]_[a]/[a]_[a]", input: fuzzy.Filename},
{p: "aaaa", str: "aaaa", want: "[aaaa]", input: fuzzy.Filename},
{p: "aaaa", str: "a_a/a_aaaa", want: "a_a/[a]_[aaa]a", input: fuzzy.Filename},
{p: "aaaa", str: "a_a/aaaaa", want: "a_a/[aaaa]a", input: fuzzy.Filename},
{p: "aaaa", str: "aabaaa", want: "[aa]b[aa]a", input: fuzzy.Filename},
{p: "aaaa", str: "a/baaa", want: "[a]/b[aaa]", input: fuzzy.Filename},
{p: "abcxz", str: "d/abc/abcd/oxz", want: "d/[abc]/abcd/o[xz]", input: fuzzy.Filename},
{p: "abcxz", str: "d/abcd/abc/oxz", want: "d/[abc]d/abc/o[xz]", input: fuzzy.Filename},
// fuzzy.Symbol
{p: "foo", str: "abc::foo", want: "abc::[foo]", input: fuzzy.Symbol},
{p: "foo", str: "foo.foo", want: "foo.[foo]", input: fuzzy.Symbol},
{p: "foo", str: "fo_oo.o_oo", want: "[fo]_oo.[o]_oo", input: fuzzy.Symbol},
{p: "foo", str: "fo_oo.fo_oo", want: "fo_oo.[fo]_[o]o", input: fuzzy.Symbol},
{p: "fo_o", str: "fo_oo.o_oo", want: "[f]o_oo.[o_o]o", input: fuzzy.Symbol},
{p: "fOO", str: "fo_oo.o_oo", want: "[f]o_oo.[o]_[o]o", input: fuzzy.Symbol},
{p: "tedit", str: "foo.TextEdit", want: "foo.[T]ext[Edit]", input: fuzzy.Symbol},
{p: "TEdit", str: "foo.TextEdit", want: "foo.[T]ext[Edit]", input: fuzzy.Symbol},
{p: "Tedit", str: "foo.TextEdit", want: "foo.[T]ext[Edit]", input: fuzzy.Symbol},
{p: "Tedit", str: "foo.Textedit", want: "foo.[Te]xte[dit]", input: fuzzy.Symbol},
{p: "TEdit", str: "foo.Textedit", want: "", input: fuzzy.Symbol},
{p: "te", str: "foo.Textedit", want: "foo.[Te]xtedit", input: fuzzy.Symbol},
{p: "ee", str: "foo.Textedit", want: "", input: fuzzy.Symbol}, // short middle of the word match
{p: "ex", str: "foo.Textedit", want: "foo.T[ex]tedit", input: fuzzy.Symbol},
{p: "exdi", str: "foo.Textedit", want: "", input: fuzzy.Symbol}, // short middle of the word match
{p: "exdit", str: "foo.Textedit", want: "", input: fuzzy.Symbol}, // short middle of the word match
{p: "extdit", str: "foo.Textedit", want: "foo.T[ext]e[dit]", input: fuzzy.Symbol},
{p: "e", str: "foo.Textedit", want: "foo.T[e]xtedit", input: fuzzy.Symbol},
{p: "E", str: "foo.Textedit", want: "foo.T[e]xtedit", input: fuzzy.Symbol},
{p: "ed", str: "foo.Textedit", want: "foo.Text[ed]it", input: fuzzy.Symbol},
{p: "edt", str: "foo.Textedit", want: "", input: fuzzy.Symbol}, // short middle of the word match
{p: "edit", str: "foo.Textedit", want: "foo.Text[edit]", input: fuzzy.Symbol},
{p: "edin", str: "foo.TexteditNum", want: "foo.Text[edi]t[N]um", input: fuzzy.Symbol},
{p: "n", str: "node.GoNodeMax", want: "node.Go[N]odeMax", input: fuzzy.Symbol},
{p: "N", str: "node.GoNodeMax", want: "node.Go[N]odeMax", input: fuzzy.Symbol},
{p: "completio", str: "completion", want: "[completio]n", input: fuzzy.Symbol},
{p: "completio", str: "completion.None", want: "[completi]on.N[o]ne", input: fuzzy.Symbol},
}
func TestFuzzyMatcherRanges(t *testing.T) {
for _, tc := range fuzzyMatcherTestCases {
matcher := fuzzy.NewMatcher(tc.p, tc.input)
score := matcher.Score(tc.str)
if tc.want == "" {
if score >= 0 {
t.Errorf("Score(%s, %s) = %v; want: <= 0", tc.p, tc.str, score)
}
continue
}
if score < 0 {
t.Errorf("Score(%s, %s) = %v, want: > 0", tc.p, tc.str, score)
continue
}
got := highlightMatches(tc.str, matcher)
if tc.want != got {
t.Errorf("highlightMatches(%s, %s) = %v, want: %v", tc.p, tc.str, got, tc.want)
}
}
}
var scoreTestCases = []struct {
p string
str string
want float64
}{
// Score precision up to five digits. Modify if changing the score, but make sure the new values
// are reasonable.
{p: "abc", str: "abc", want: 1},
{p: "abc", str: "Abc", want: 1},
{p: "abc", str: "Abcdef", want: 1},
{p: "strc", str: "StrCat", want: 1},
{p: "abc_def", str: "abc_def_xyz", want: 1},
{p: "abcdef", str: "abc_def_xyz", want: 0.91667},
{p: "abcxyz", str: "abc_def_xyz", want: 0.875},
{p: "sc", str: "StrCat", want: 0.75},
{p: "abc", str: "AbstrBasicCtor", want: 0.75},
{p: "foo", str: "abc::foo", want: 1},
{p: "afoo", str: "abc::foo", want: 0.9375},
{p: "abr", str: "abc::bar", want: 0.5},
{p: "br", str: "abc::bar", want: 0.375},
{p: "aar", str: "abc::bar", want: 0.16667},
{p: "edin", str: "foo.TexteditNum", want: 0},
{p: "ediu", str: "foo.TexteditNum", want: 0},
// We want the next two items to have roughly similar scores.
{p: "up", str: "unique_ptr", want: 0.75},
{p: "up", str: "upper_bound", want: 1},
}
func TestScores(t *testing.T) {
for _, tc := range scoreTestCases {
matcher := fuzzy.NewMatcher(tc.p, fuzzy.Symbol)
got := math.Round(float64(matcher.Score(tc.str))*1e5) / 1e5
if got != tc.want {
t.Errorf("Score(%s, %s) = %v, want: %v", tc.p, tc.str, got, tc.want)
}
}
}
func highlightMatches(str string, matcher *fuzzy.Matcher) string {
matches := matcher.MatchedRanges()
var buf bytes.Buffer
index := 0
for i := 0; i < len(matches)-1; i += 2 {
s, e := matches[i], matches[i+1]
fmt.Fprintf(&buf, "%s[%s]", str[index:s], str[s:e])
index = e
}
buf.WriteString(str[index:])
return buf.String()
}
func BenchmarkMatcher(b *testing.B) {
pattern := "Foo"
candidates := []string{
"F_o_o",
"Barfoo",
"Faoo",
"F__oo",
"F_oo",
"FaoFooa",
"BarFoo",
"FooA",
"FooBar",
"Foo",
}
matcher := fuzzy.NewMatcher(pattern, fuzzy.Text)
b.ResetTimer()
for i := 0; i < b.N; i++ {
for _, c := range candidates {
matcher.Score(c)
}
}
var numBytes int
for _, c := range candidates {
numBytes += len(c)
}
b.SetBytes(int64(numBytes))
}

View File

@ -15,21 +15,16 @@ import (
"golang.org/x/tools/internal/lsp/debug" "golang.org/x/tools/internal/lsp/debug"
"golang.org/x/tools/internal/lsp/protocol" "golang.org/x/tools/internal/lsp/protocol"
"golang.org/x/tools/internal/lsp/source" "golang.org/x/tools/internal/lsp/source"
"golang.org/x/tools/internal/lsp/telemetry/log"
"golang.org/x/tools/internal/lsp/telemetry/tag"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
) )
func (s *Server) initialize(ctx context.Context, params *protocol.InitializeParams) (*protocol.InitializeResult, error) { func (s *Server) initialize(ctx context.Context, params *protocol.InitializeParams) (*protocol.InitializeResult, error) {
s.stateMu.Lock() s.initializedMu.Lock()
state := s.state defer s.initializedMu.Unlock()
s.stateMu.Unlock() if s.isInitialized {
if state >= serverInitializing {
return nil, jsonrpc2.NewErrorf(jsonrpc2.CodeInvalidRequest, "server already initialized") return nil, jsonrpc2.NewErrorf(jsonrpc2.CodeInvalidRequest, "server already initialized")
} }
s.stateMu.Lock() s.isInitialized = true // mark server as initialized now
s.state = serverInitializing
s.stateMu.Unlock()
// TODO: Remove the option once we are certain there are no issues here. // TODO: Remove the option once we are certain there are no issues here.
s.textDocumentSyncKind = protocol.Incremental s.textDocumentSyncKind = protocol.Incremental
@ -39,14 +34,6 @@ func (s *Server) initialize(ctx context.Context, params *protocol.InitializePara
} }
} }
// Default to using synopsis as a default for hover information.
s.hoverKind = source.SynopsisDocumentation
s.supportedCodeActions = map[protocol.CodeActionKind]bool{
protocol.SourceOrganizeImports: true,
protocol.QuickFix: true,
}
s.setClientCapabilities(params.Capabilities) s.setClientCapabilities(params.Capabilities)
folders := params.WorkspaceFolders folders := params.WorkspaceFolders
@ -69,6 +56,7 @@ func (s *Server) initialize(ctx context.Context, params *protocol.InitializePara
return nil, err return nil, err
} }
} }
return &protocol.InitializeResult{ return &protocol.InitializeResult{
Capabilities: protocol.ServerCapabilities{ Capabilities: protocol.ServerCapabilities{
CodeActionProvider: true, CodeActionProvider: true,
@ -130,10 +118,6 @@ func (s *Server) setClientCapabilities(caps protocol.ClientCapabilities) {
} }
func (s *Server) initialized(ctx context.Context, params *protocol.InitializedParams) error { func (s *Server) initialized(ctx context.Context, params *protocol.InitializedParams) error {
s.stateMu.Lock()
s.state = serverInitialized
s.stateMu.Unlock()
if s.configurationSupported { if s.configurationSupported {
if s.dynamicConfigurationSupported { if s.dynamicConfigurationSupported {
s.client.RegisterCapability(ctx, &protocol.RegistrationParams{ s.client.RegisterCapability(ctx, &protocol.RegistrationParams{
@ -147,40 +131,27 @@ func (s *Server) initialized(ctx context.Context, params *protocol.InitializedPa
}) })
} }
for _, view := range s.session.Views() { for _, view := range s.session.Views() {
if err := s.fetchConfig(ctx, view); err != nil { config, err := s.client.Configuration(ctx, &protocol.ConfigurationParams{
Items: []protocol.ConfigurationItem{{
ScopeURI: protocol.NewURI(view.Folder()),
Section: "gopls",
}},
})
if err != nil {
return err
}
if err := s.processConfig(view, config[0]); err != nil {
return err return err
} }
} }
} }
buf := &bytes.Buffer{} buf := &bytes.Buffer{}
debug.PrintVersionInfo(buf, true, debug.PlainText) debug.PrintVersionInfo(buf, true, debug.PlainText)
log.Print(ctx, buf.String()) s.session.Logger().Infof(ctx, "%s", buf)
return nil return nil
} }
func (s *Server) fetchConfig(ctx context.Context, view source.View) error { func (s *Server) processConfig(view source.View, config interface{}) error {
configs, err := s.client.Configuration(ctx, &protocol.ConfigurationParams{
Items: []protocol.ConfigurationItem{{
ScopeURI: protocol.NewURI(view.Folder()),
Section: "gopls",
}, {
ScopeURI: protocol.NewURI(view.Folder()),
Section: view.Name(),
},
},
})
if err != nil {
return err
}
for _, config := range configs {
if err := s.processConfig(ctx, view, config); err != nil {
return err
}
}
return nil
}
func (s *Server) processConfig(ctx context.Context, view source.View, config interface{}) error {
// TODO: We should probably store and process more of the config. // TODO: We should probably store and process more of the config.
if config == nil { if config == nil {
return nil // ignore error if you don't have a config return nil // ignore error if you don't have a config
@ -213,31 +184,13 @@ func (s *Server) processConfig(ctx context.Context, view source.View, config int
} }
view.SetBuildFlags(flags) view.SetBuildFlags(flags)
} }
// Check if the user wants documentation in completion items.
if wantCompletionDocumentation, ok := c["wantCompletionDocumentation"].(bool); ok {
s.wantCompletionDocumentation = wantCompletionDocumentation
}
// Check if placeholders are enabled. // Check if placeholders are enabled.
if usePlaceholders, ok := c["usePlaceholders"].(bool); ok { if usePlaceholders, ok := c["usePlaceholders"].(bool); ok {
s.usePlaceholders = usePlaceholders s.usePlaceholders = usePlaceholders
} }
// Set the hover kind. // Check if user has disabled documentation on hover.
if hoverKind, ok := c["hoverKind"].(string); ok { if noDocsOnHover, ok := c["noDocsOnHover"].(bool); ok {
switch hoverKind { s.noDocsOnHover = noDocsOnHover
case "NoDocumentation":
s.hoverKind = source.NoDocumentation
case "SynopsisDocumentation":
s.hoverKind = source.SynopsisDocumentation
case "FullDocumentation":
s.hoverKind = source.FullDocumentation
default:
log.Error(ctx, "unsupported hover kind", nil, tag.Of("HoverKind", hoverKind))
// The default value is already be set to synopsis.
}
}
// Check if the user wants to see suggested fixes from go/analysis.
if wantSuggestedFixes, ok := c["wantSuggestedFixes"].(bool); ok {
s.wantSuggestedFixes = wantSuggestedFixes
} }
// Check if the user has explicitly disabled any analyses. // Check if the user has explicitly disabled any analyses.
if disabledAnalyses, ok := c["experimentalDisabledAnalyses"].([]interface{}); ok { if disabledAnalyses, ok := c["experimentalDisabledAnalyses"].([]interface{}); ok {
@ -256,21 +209,19 @@ func (s *Server) processConfig(ctx context.Context, view source.View, config int
} }
func (s *Server) shutdown(ctx context.Context) error { func (s *Server) shutdown(ctx context.Context) error {
s.stateMu.Lock() s.initializedMu.Lock()
defer s.stateMu.Unlock() defer s.initializedMu.Unlock()
if s.state < serverInitialized { if !s.isInitialized {
return jsonrpc2.NewErrorf(jsonrpc2.CodeInvalidRequest, "server not initialized") return jsonrpc2.NewErrorf(jsonrpc2.CodeInvalidRequest, "server not initialized")
} }
// drop all the active views // drop all the active views
s.session.Shutdown(ctx) s.session.Shutdown(ctx)
s.state = serverShutDown s.isInitialized = false
return nil return nil
} }
func (s *Server) exit(ctx context.Context) error { func (s *Server) exit(ctx context.Context) error {
s.stateMu.Lock() if s.isInitialized {
defer s.stateMu.Unlock()
if s.state != serverShutDown {
os.Exit(1) os.Exit(1)
} }
os.Exit(0) os.Exit(0)

View File

@ -9,8 +9,6 @@ import (
"golang.org/x/tools/internal/lsp/protocol" "golang.org/x/tools/internal/lsp/protocol"
"golang.org/x/tools/internal/lsp/source" "golang.org/x/tools/internal/lsp/source"
"golang.org/x/tools/internal/lsp/telemetry/log"
"golang.org/x/tools/internal/lsp/telemetry/tag"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
) )
@ -31,7 +29,7 @@ func (s *Server) documentHighlight(ctx context.Context, params *protocol.TextDoc
} }
spans, err := source.Highlight(ctx, f, rng.Start) spans, err := source.Highlight(ctx, f, rng.Start)
if err != nil { if err != nil {
log.Error(ctx, "no highlight", err, tag.Of("Span", spn)) view.Session().Logger().Errorf(ctx, "no highlight for %s: %v", spn, err)
} }
return toProtocolHighlight(m, spans), nil return toProtocolHighlight(m, spans), nil
} }

View File

@ -6,6 +6,8 @@ package lsp
import ( import (
"context" "context"
"fmt"
"golang.org/x/tools/internal/lsp/protocol" "golang.org/x/tools/internal/lsp/protocol"
"golang.org/x/tools/internal/lsp/source" "golang.org/x/tools/internal/lsp/source"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
@ -28,9 +30,9 @@ func (s *Server) hover(ctx context.Context, params *protocol.TextDocumentPositio
} }
ident, err := source.Identifier(ctx, view, f, identRange.Start) ident, err := source.Identifier(ctx, view, f, identRange.Start)
if err != nil { if err != nil {
return nil, nil return nil, err
} }
hover, err := ident.Hover(ctx, s.preferredContentFormat == protocol.Markdown, s.hoverKind) hover, err := ident.Hover(ctx, s.preferredContentFormat == protocol.Markdown, !s.noDocsOnHover)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -50,3 +52,19 @@ func (s *Server) hover(ctx context.Context, params *protocol.TextDocumentPositio
Range: &rng, Range: &rng,
}, nil }, nil
} }
func markupContent(decl, doc string, kind protocol.MarkupKind) protocol.MarkupContent {
result := protocol.MarkupContent{
Kind: kind,
}
switch kind {
case protocol.PlainText:
result.Value = decl
case protocol.Markdown:
result.Value = "```go\n" + decl + "\n```"
}
if doc != "" {
result.Value = fmt.Sprintf("%s\n%s", doc, result.Value)
}
return result
}

View File

@ -7,16 +7,9 @@ package lsp
import ( import (
"context" "context"
"fmt" "fmt"
"go/ast"
"go/token"
"regexp"
"strconv" "strconv"
"sync"
"golang.org/x/tools/internal/lsp/protocol" "golang.org/x/tools/internal/lsp/protocol"
"golang.org/x/tools/internal/lsp/source"
"golang.org/x/tools/internal/lsp/telemetry/log"
"golang.org/x/tools/internal/lsp/telemetry/tag"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
) )
@ -27,105 +20,30 @@ func (s *Server) documentLink(ctx context.Context, params *protocol.DocumentLink
if err != nil { if err != nil {
return nil, err return nil, err
} }
file, err := f.GetAST(ctx, source.ParseFull) file := f.GetAST(ctx)
if file == nil { if file == nil {
return nil, fmt.Errorf("no AST for %v", uri)
}
// Add a Godoc link for each imported package.
var result []protocol.DocumentLink
for _, imp := range file.Imports {
spn, err := span.NewRange(view.Session().Cache().FileSet(), imp.Pos(), imp.End()).Span()
if err != nil {
return nil, err return nil, err
} }
rng, err := m.Range(spn)
var links []protocol.DocumentLink
ast.Inspect(file, func(node ast.Node) bool {
switch n := node.(type) {
case *ast.ImportSpec:
target, err := strconv.Unquote(n.Path.Value)
if err != nil { if err != nil {
log.Error(ctx, "cannot unquote import path", err, tag.Of("Path", n.Path.Value)) return nil, err
return false
} }
target = "https://godoc.org/" + target target, err := strconv.Unquote(imp.Path.Value)
l, err := toProtocolLink(view, m, target, n.Pos(), n.End())
if err != nil { if err != nil {
log.Error(ctx, "cannot initialize DocumentLink", err, tag.Of("Path", n.Path.Value))
return false
}
links = append(links, l)
return false
case *ast.BasicLit:
if n.Kind != token.STRING {
return false
}
l, err := findLinksInString(n.Value, n.Pos(), view, m)
if err != nil {
log.Error(ctx, "cannot find links in string", err)
return false
}
links = append(links, l...)
return false
}
return true
})
for _, commentGroup := range file.Comments {
for _, comment := range commentGroup.List {
l, err := findLinksInString(comment.Text, comment.Pos(), view, m)
if err != nil {
log.Error(ctx, "cannot find links in comment", err)
continue continue
} }
links = append(links, l...) target = "https://godoc.org/" + target
} result = append(result, protocol.DocumentLink{
}
return links, nil
}
func findLinksInString(src string, pos token.Pos, view source.View, mapper *protocol.ColumnMapper) ([]protocol.DocumentLink, error) {
var links []protocol.DocumentLink
re, err := getURLRegexp()
if err != nil {
return nil, fmt.Errorf("cannot create regexp for links: %s", err.Error())
}
for _, urlIndex := range re.FindAllIndex([]byte(src), -1) {
start := urlIndex[0]
end := urlIndex[1]
startPos := token.Pos(int(pos) + start)
endPos := token.Pos(int(pos) + end)
target := src[start:end]
l, err := toProtocolLink(view, mapper, target, startPos, endPos)
if err != nil {
return nil, err
}
links = append(links, l)
}
return links, nil
}
const urlRegexpString = "(http|ftp|https)://([\\w_-]+(?:(?:\\.[\\w_-]+)+))([\\w.,@?^=%&:/~+#-]*[\\w@?^=%&/~+#-])?"
var (
urlRegexp *regexp.Regexp
regexpOnce sync.Once
regexpErr error
)
func getURLRegexp() (*regexp.Regexp, error) {
regexpOnce.Do(func() {
urlRegexp, regexpErr = regexp.Compile(urlRegexpString)
})
return urlRegexp, regexpErr
}
func toProtocolLink(view source.View, mapper *protocol.ColumnMapper, target string, start, end token.Pos) (protocol.DocumentLink, error) {
spn, err := span.NewRange(view.Session().Cache().FileSet(), start, end).Span()
if err != nil {
return protocol.DocumentLink{}, err
}
rng, err := mapper.Range(spn)
if err != nil {
return protocol.DocumentLink{}, err
}
l := protocol.DocumentLink{
Range: rng, Range: rng,
Target: target, Target: target,
})
} }
return l, nil return result, nil
} }

View File

@ -10,7 +10,6 @@ import (
"fmt" "fmt"
"go/token" "go/token"
"os/exec" "os/exec"
"path/filepath"
"sort" "sort"
"strings" "strings"
"testing" "testing"
@ -21,6 +20,7 @@ import (
"golang.org/x/tools/internal/lsp/protocol" "golang.org/x/tools/internal/lsp/protocol"
"golang.org/x/tools/internal/lsp/source" "golang.org/x/tools/internal/lsp/source"
"golang.org/x/tools/internal/lsp/tests" "golang.org/x/tools/internal/lsp/tests"
"golang.org/x/tools/internal/lsp/xlog"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
) )
@ -31,19 +31,18 @@ func TestLSP(t *testing.T) {
type runner struct { type runner struct {
server *Server server *Server
data *tests.Data data *tests.Data
ctx context.Context
} }
const viewName = "lsp_test" const viewName = "lsp_test"
func testLSP(t *testing.T, exporter packagestest.Exporter) { func testLSP(t *testing.T, exporter packagestest.Exporter) {
ctx := tests.Context(t)
data := tests.Load(t, exporter, "testdata") data := tests.Load(t, exporter, "testdata")
defer data.Exported.Cleanup() defer data.Exported.Cleanup()
log := xlog.New(xlog.StdSink{})
cache := cache.New() cache := cache.New()
session := cache.NewSession(ctx) session := cache.NewSession(log)
view := session.NewView(ctx, viewName, span.FileURI(data.Config.Dir)) view := session.NewView(viewName, span.FileURI(data.Config.Dir))
view.SetEnv(data.Config.Env) view.SetEnv(data.Config.Env)
for filename, content := range data.Config.Overlay { for filename, content := range data.Config.Overlay {
session.SetOverlay(span.FileURI(filename), content) session.SetOverlay(span.FileURI(filename), content)
@ -52,14 +51,8 @@ func testLSP(t *testing.T, exporter packagestest.Exporter) {
server: &Server{ server: &Server{
session: session, session: session,
undelivered: make(map[span.URI][]source.Diagnostic), undelivered: make(map[span.URI][]source.Diagnostic),
supportedCodeActions: map[protocol.CodeActionKind]bool{
protocol.SourceOrganizeImports: true,
protocol.QuickFix: true,
},
hoverKind: source.SynopsisDocumentation,
}, },
data: data, data: data,
ctx: ctx,
} }
tests.Run(t, r, data) tests.Run(t, r, data)
} }
@ -68,7 +61,7 @@ func testLSP(t *testing.T, exporter packagestest.Exporter) {
func (r *runner) Diagnostics(t *testing.T, data tests.Diagnostics) { func (r *runner) Diagnostics(t *testing.T, data tests.Diagnostics) {
v := r.server.session.View(viewName) v := r.server.session.View(viewName)
for uri, want := range data { for uri, want := range data {
f, err := v.GetFile(r.ctx, uri) f, err := v.GetFile(context.Background(), uri)
if err != nil { if err != nil {
t.Fatalf("no file for %s: %v", f, err) t.Fatalf("no file for %s: %v", f, err)
} }
@ -76,7 +69,7 @@ func (r *runner) Diagnostics(t *testing.T, data tests.Diagnostics) {
if !ok { if !ok {
t.Fatalf("%s is not a Go file: %v", uri, err) t.Fatalf("%s is not a Go file: %v", uri, err)
} }
results, err := source.Diagnostics(r.ctx, v, gof, nil) results, err := source.Diagnostics(context.Background(), v, gof, nil)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -219,7 +212,7 @@ func (r *runner) Completion(t *testing.T, data tests.Completions, snippets tests
func (r *runner) runCompletion(t *testing.T, src span.Span) *protocol.CompletionList { func (r *runner) runCompletion(t *testing.T, src span.Span) *protocol.CompletionList {
t.Helper() t.Helper()
list, err := r.server.Completion(r.ctx, &protocol.CompletionParams{ list, err := r.server.Completion(context.Background(), &protocol.CompletionParams{
TextDocumentPositionParams: protocol.TextDocumentPositionParams{ TextDocumentPositionParams: protocol.TextDocumentPositionParams{
TextDocument: protocol.TextDocumentIdentifier{ TextDocument: protocol.TextDocumentIdentifier{
URI: protocol.NewURI(src.URI()), URI: protocol.NewURI(src.URI()),
@ -296,6 +289,7 @@ func summarizeCompletionItems(i int, want []source.CompletionItem, got []protoco
} }
func (r *runner) Format(t *testing.T, data tests.Formats) { func (r *runner) Format(t *testing.T, data tests.Formats) {
ctx := context.Background()
for _, spn := range data { for _, spn := range data {
uri := spn.URI() uri := spn.URI()
filename := uri.Filename() filename := uri.Filename()
@ -305,7 +299,7 @@ func (r *runner) Format(t *testing.T, data tests.Formats) {
return out, nil return out, nil
})) }))
edits, err := r.server.Formatting(r.ctx, &protocol.DocumentFormattingParams{ edits, err := r.server.Formatting(context.Background(), &protocol.DocumentFormattingParams{
TextDocument: protocol.TextDocumentIdentifier{ TextDocument: protocol.TextDocumentIdentifier{
URI: protocol.NewURI(uri), URI: protocol.NewURI(uri),
}, },
@ -316,7 +310,7 @@ func (r *runner) Format(t *testing.T, data tests.Formats) {
} }
continue continue
} }
_, m, err := getSourceFile(r.ctx, r.server.session.ViewOf(uri), uri) _, m, err := getSourceFile(ctx, r.server.session.ViewOf(uri), uri)
if err != nil { if err != nil {
t.Error(err) t.Error(err)
} }
@ -333,6 +327,7 @@ func (r *runner) Format(t *testing.T, data tests.Formats) {
} }
func (r *runner) Import(t *testing.T, data tests.Imports) { func (r *runner) Import(t *testing.T, data tests.Imports) {
ctx := context.Background()
for _, spn := range data { for _, spn := range data {
uri := spn.URI() uri := spn.URI()
filename := uri.Filename() filename := uri.Filename()
@ -342,7 +337,7 @@ func (r *runner) Import(t *testing.T, data tests.Imports) {
return out, nil return out, nil
})) }))
actions, err := r.server.CodeAction(r.ctx, &protocol.CodeActionParams{ actions, err := r.server.CodeAction(context.Background(), &protocol.CodeActionParams{
TextDocument: protocol.TextDocumentIdentifier{ TextDocument: protocol.TextDocumentIdentifier{
URI: protocol.NewURI(uri), URI: protocol.NewURI(uri),
}, },
@ -353,7 +348,7 @@ func (r *runner) Import(t *testing.T, data tests.Imports) {
} }
continue continue
} }
_, m, err := getSourceFile(r.ctx, r.server.session.ViewOf(uri), uri) _, m, err := getSourceFile(ctx, r.server.session.ViewOf(uri), uri)
if err != nil { if err != nil {
t.Error(err) t.Error(err)
} }
@ -392,13 +387,13 @@ func (r *runner) Definition(t *testing.T, data tests.Definitions) {
var locs []protocol.Location var locs []protocol.Location
var hover *protocol.Hover var hover *protocol.Hover
if d.IsType { if d.IsType {
locs, err = r.server.TypeDefinition(r.ctx, params) locs, err = r.server.TypeDefinition(context.Background(), params)
} else { } else {
locs, err = r.server.Definition(r.ctx, params) locs, err = r.server.Definition(context.Background(), params)
if err != nil { if err != nil {
t.Fatalf("failed for %v: %v", d.Src, err) t.Fatalf("failed for %v: %v", d.Src, err)
} }
hover, err = r.server.Hover(r.ctx, params) hover, err = r.server.Hover(context.Background(), params)
} }
if err != nil { if err != nil {
t.Fatalf("failed for %v: %v", d.Src, err) t.Fatalf("failed for %v: %v", d.Src, err)
@ -445,7 +440,7 @@ func (r *runner) Highlight(t *testing.T, data tests.Highlights) {
TextDocument: protocol.TextDocumentIdentifier{URI: loc.URI}, TextDocument: protocol.TextDocumentIdentifier{URI: loc.URI},
Position: loc.Range.Start, Position: loc.Range.Start,
} }
highlights, err := r.server.DocumentHighlight(r.ctx, params) highlights, err := r.server.DocumentHighlight(context.Background(), params)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -475,11 +470,7 @@ func (r *runner) Reference(t *testing.T, data tests.References) {
want := make(map[protocol.Location]bool) want := make(map[protocol.Location]bool)
for _, pos := range itemList { for _, pos := range itemList {
m, err := r.mapper(pos.URI()) loc, err := sm.Location(pos)
if err != nil {
t.Fatal(err)
}
loc, err := m.Location(pos)
if err != nil { if err != nil {
t.Fatalf("failed for %v: %v", src, err) t.Fatalf("failed for %v: %v", src, err)
} }
@ -491,26 +482,25 @@ func (r *runner) Reference(t *testing.T, data tests.References) {
Position: loc.Range.Start, Position: loc.Range.Start,
}, },
} }
got, err := r.server.References(r.ctx, params) got, err := r.server.References(context.Background(), params)
if err != nil { if err != nil {
t.Fatalf("failed for %v: %v", src, err) t.Fatalf("failed for %v: %v", src, err)
} }
if len(got) != len(want) { if len(got) != len(itemList) {
t.Errorf("references failed: different lengths got %v want %v", len(got), len(want)) t.Errorf("references failed: different lengths got %v want %v", len(got), len(itemList))
} }
for _, loc := range got { for _, loc := range got {
if !want[loc] { if !want[loc] {
t.Errorf("references failed: incorrect references got %v want %v", loc, want) t.Errorf("references failed: incorrect references got %v want %v", got, want)
} }
} }
} }
} }
func (r *runner) Rename(t *testing.T, data tests.Renames) { func (r *runner) Rename(t *testing.T, data tests.Renames) {
ctx := context.Background()
for spn, newText := range data { for spn, newText := range data {
tag := fmt.Sprintf("%s-rename", newText)
uri := spn.URI() uri := spn.URI()
filename := uri.Filename() filename := uri.Filename()
sm, err := r.mapper(uri) sm, err := r.mapper(uri)
@ -522,7 +512,7 @@ func (r *runner) Rename(t *testing.T, data tests.Renames) {
t.Fatalf("failed for %v: %v", spn, err) t.Fatalf("failed for %v: %v", spn, err)
} }
workspaceEdits, err := r.server.Rename(r.ctx, &protocol.RenameParams{ workspaceEdits, err := r.server.Rename(ctx, &protocol.RenameParams{
TextDocument: protocol.TextDocumentIdentifier{ TextDocument: protocol.TextDocumentIdentifier{
URI: protocol.NewURI(uri), URI: protocol.NewURI(uri),
}, },
@ -530,56 +520,47 @@ func (r *runner) Rename(t *testing.T, data tests.Renames) {
NewName: newText, NewName: newText,
}) })
if err != nil { if err != nil {
renamed := string(r.data.Golden(tag, filename, func() ([]byte, error) { t.Error(err)
return []byte(err.Error()), nil
}))
if err.Error() != renamed {
t.Errorf("rename failed for %s, expected:\n%v\ngot:\n%v\n", newText, renamed, err)
}
continue continue
} }
var res []string _, m, err := getSourceFile(ctx, r.server.session.ViewOf(uri), uri)
for uri, edits := range *workspaceEdits.Changes {
spnURI := span.URI(uri)
_, m, err := getSourceFile(r.ctx, r.server.session.ViewOf(span.URI(spnURI)), spnURI)
if err != nil { if err != nil {
t.Error(err) t.Error(err)
} }
changes := *workspaceEdits.Changes
if len(changes) != 1 { // Renames must only affect a single file in these tests.
t.Errorf("rename failed for %s, edited %d files, wanted 1 file", newText, len(*workspaceEdits.Changes))
continue
}
edits := changes[string(uri)]
if edits == nil {
t.Errorf("rename failed for %s, did not edit %s", newText, filename)
continue
}
sedits, err := FromProtocolEdits(m, edits) sedits, err := FromProtocolEdits(m, edits)
if err != nil { if err != nil {
t.Error(err) t.Error(err)
} }
filename := filepath.Base(m.URI.Filename()) got := applyEdits(string(m.Content), sedits)
contents := applyEdits(string(m.Content), sedits)
res = append(res, fmt.Sprintf("%s:\n%s", filename, contents))
}
// Sort on filename tag := fmt.Sprintf("%s-rename", newText)
sort.Strings(res) gorenamed := string(r.data.Golden(tag, filename, func() ([]byte, error) {
var got string
for i, val := range res {
if i != 0 {
got += "\n"
}
got += val
}
renamed := string(r.data.Golden(tag, filename, func() ([]byte, error) {
return []byte(got), nil return []byte(got), nil
})) }))
if renamed != got { if gorenamed != got {
t.Errorf("rename failed for %s, expected:\n%v\ngot:\n%v", newText, renamed, got) t.Errorf("rename failed for %s, expected:\n%v\ngot:\n%v", newText, gorenamed, got)
} }
} }
} }
func applyEdits(contents string, edits []source.TextEdit) string { func applyEdits(contents string, edits []source.TextEdit) string {
res := contents res := contents
sortSourceTextEdits(edits)
// Apply the edits from the end of the file forward // Apply the edits from the end of the file forward
// to preserve the offsets // to preserve the offsets
@ -593,6 +574,15 @@ func applyEdits(contents string, edits []source.TextEdit) string {
return res return res
} }
func sortSourceTextEdits(d []source.TextEdit) {
sort.Slice(d, func(i int, j int) bool {
if r := span.Compare(d[i].Span, d[j].Span); r != 0 {
return r < 0
}
return d[i].NewText < d[j].NewText
})
}
func (r *runner) Symbol(t *testing.T, data tests.Symbols) { func (r *runner) Symbol(t *testing.T, data tests.Symbols) {
for uri, expectedSymbols := range data { for uri, expectedSymbols := range data {
params := &protocol.DocumentSymbolParams{ params := &protocol.DocumentSymbolParams{
@ -600,7 +590,7 @@ func (r *runner) Symbol(t *testing.T, data tests.Symbols) {
URI: string(uri), URI: string(uri),
}, },
} }
symbols, err := r.server.DocumentSymbol(r.ctx, params) symbols, err := r.server.DocumentSymbol(context.Background(), params)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -676,32 +666,23 @@ func (r *runner) SignatureHelp(t *testing.T, data tests.Signatures) {
if err != nil { if err != nil {
t.Fatalf("failed for %v: %v", loc, err) t.Fatalf("failed for %v: %v", loc, err)
} }
gotSignatures, err := r.server.SignatureHelp(r.ctx, &protocol.TextDocumentPositionParams{ gotSignatures, err := r.server.SignatureHelp(context.Background(), &protocol.TextDocumentPositionParams{
TextDocument: protocol.TextDocumentIdentifier{ TextDocument: protocol.TextDocumentIdentifier{
URI: protocol.NewURI(spn.URI()), URI: protocol.NewURI(spn.URI()),
}, },
Position: loc.Range.Start, Position: loc.Range.Start,
}) })
if err != nil { if err != nil {
// Only fail if we got an error we did not expect.
if expectedSignatures != nil {
t.Fatal(err) t.Fatal(err)
} }
continue
}
if expectedSignatures == nil {
if gotSignatures != nil {
t.Errorf("expected no signature, got %v", gotSignatures)
}
continue
}
if diff := diffSignatures(spn, expectedSignatures, gotSignatures); diff != "" { if diff := diffSignatures(spn, expectedSignatures, gotSignatures); diff != "" {
t.Error(diff) t.Error(diff)
} }
} }
} }
func diffSignatures(spn span.Span, want *source.SignatureInformation, got *protocol.SignatureHelp) string { func diffSignatures(spn span.Span, want source.SignatureInformation, got *protocol.SignatureHelp) string {
decorate := func(f string, args ...interface{}) string { decorate := func(f string, args ...interface{}) string {
return fmt.Sprintf("Invalid signature at %s: %s", spn, fmt.Sprintf(f, args...)) return fmt.Sprintf("Invalid signature at %s: %s", spn, fmt.Sprintf(f, args...))
} }
@ -742,7 +723,7 @@ func (r *runner) Link(t *testing.T, data tests.Links) {
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
gotLinks, err := r.server.DocumentLink(r.ctx, &protocol.DocumentLinkParams{ gotLinks, err := r.server.DocumentLink(context.Background(), &protocol.DocumentLinkParams{
TextDocument: protocol.TextDocumentIdentifier{ TextDocument: protocol.TextDocumentIdentifier{
URI: protocol.NewURI(uri), URI: protocol.NewURI(uri),
}, },
@ -750,30 +731,15 @@ func (r *runner) Link(t *testing.T, data tests.Links) {
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
var notePositions []token.Position
links := make(map[span.Span]string, len(wantLinks)) links := make(map[span.Span]string, len(wantLinks))
for _, link := range wantLinks { for _, link := range wantLinks {
links[link.Src] = link.Target links[link.Src] = link.Target
notePositions = append(notePositions, link.NotePosition)
} }
for _, link := range gotLinks { for _, link := range gotLinks {
spn, err := m.RangeSpan(link.Range) spn, err := m.RangeSpan(link.Range)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
linkInNote := false
for _, notePosition := range notePositions {
// Drop the links found inside expectation notes arguments as this links are not collected by expect package
if notePosition.Line == spn.Start().Line() &&
notePosition.Column <= spn.Start().Column() {
delete(links, spn)
linkInNote = true
}
}
if linkInNote {
continue
}
if target, ok := links[spn]; ok { if target, ok := links[spn]; ok {
delete(links, spn) delete(links, spn)
if target != link.Target { if target != link.Target {
@ -823,10 +789,10 @@ func TestBytesOffset(t *testing.T) {
{text: `a𐐀b`, pos: protocol.Position{Line: 0, Character: 4}, want: 6}, {text: `a𐐀b`, pos: protocol.Position{Line: 0, Character: 4}, want: 6},
{text: `a𐐀b`, pos: protocol.Position{Line: 0, Character: 5}, want: -1}, {text: `a𐐀b`, pos: protocol.Position{Line: 0, Character: 5}, want: -1},
{text: "aaa\nbbb\n", pos: protocol.Position{Line: 0, Character: 3}, want: 3}, {text: "aaa\nbbb\n", pos: protocol.Position{Line: 0, Character: 3}, want: 3},
{text: "aaa\nbbb\n", pos: protocol.Position{Line: 0, Character: 4}, want: 3}, {text: "aaa\nbbb\n", pos: protocol.Position{Line: 0, Character: 4}, want: -1},
{text: "aaa\nbbb\n", pos: protocol.Position{Line: 1, Character: 0}, want: 4}, {text: "aaa\nbbb\n", pos: protocol.Position{Line: 1, Character: 0}, want: 4},
{text: "aaa\nbbb\n", pos: protocol.Position{Line: 1, Character: 3}, want: 7}, {text: "aaa\nbbb\n", pos: protocol.Position{Line: 1, Character: 3}, want: 7},
{text: "aaa\nbbb\n", pos: protocol.Position{Line: 1, Character: 4}, want: 7}, {text: "aaa\nbbb\n", pos: protocol.Position{Line: 1, Character: 4}, want: -1},
{text: "aaa\nbbb\n", pos: protocol.Position{Line: 2, Character: 0}, want: 8}, {text: "aaa\nbbb\n", pos: protocol.Position{Line: 2, Character: 0}, want: 8},
{text: "aaa\nbbb\n", pos: protocol.Position{Line: 2, Character: 1}, want: -1}, {text: "aaa\nbbb\n", pos: protocol.Position{Line: 2, Character: 1}, want: -1},
{text: "aaa\nbbb\n\n", pos: protocol.Position{Line: 2, Character: 0}, want: 8}, {text: "aaa\nbbb\n\n", pos: protocol.Position{Line: 2, Character: 0}, want: 8},

View File

@ -1,40 +0,0 @@
package protocol
import (
"context"
"fmt"
"time"
"golang.org/x/tools/internal/lsp/telemetry/log"
"golang.org/x/tools/internal/lsp/telemetry/tag"
"golang.org/x/tools/internal/xcontext"
)
func init() {
log.AddLogger(logger)
}
type contextKey int
const (
clientKey = contextKey(iota)
)
func WithClient(ctx context.Context, client Client) context.Context {
return context.WithValue(ctx, clientKey, client)
}
// logger implements log.Logger in terms of the LogMessage call to a client.
func logger(ctx context.Context, at time.Time, tags tag.List) bool {
client, ok := ctx.Value(clientKey).(Client)
if !ok {
return false
}
entry := log.ToEntry(ctx, time.Time{}, tags)
msg := &LogMessageParams{Type: Info, Message: fmt.Sprint(entry)}
if entry.Error != nil {
msg.Type = Error
}
go client.LogMessage(xcontext.Detach(ctx), msg)
return true
}

View File

@ -13,7 +13,7 @@ var (
namesInitializeError [int(UnknownProtocolVersion) + 1]string namesInitializeError [int(UnknownProtocolVersion) + 1]string
namesMessageType [int(Log) + 1]string namesMessageType [int(Log) + 1]string
namesFileChangeType [int(Deleted) + 1]string namesFileChangeType [int(Deleted) + 1]string
namesWatchKind [int(WatchDelete) + 1]string namesWatchKind [int(Change) + 1]string
namesCompletionTriggerKind [int(TriggerForIncompleteCompletions) + 1]string namesCompletionTriggerKind [int(TriggerForIncompleteCompletions) + 1]string
namesDiagnosticSeverity [int(SeverityHint) + 1]string namesDiagnosticSeverity [int(SeverityHint) + 1]string
namesDiagnosticTag [int(Unnecessary) + 1]string namesDiagnosticTag [int(Unnecessary) + 1]string
@ -40,9 +40,7 @@ func init() {
namesFileChangeType[int(Changed)] = "Changed" namesFileChangeType[int(Changed)] = "Changed"
namesFileChangeType[int(Deleted)] = "Deleted" namesFileChangeType[int(Deleted)] = "Deleted"
namesWatchKind[int(WatchCreate)] = "WatchCreate" namesWatchKind[int(Change)] = "Change"
namesWatchKind[int(WatchChange)] = "WatchChange"
namesWatchKind[int(WatchDelete)] = "WatchDelete"
namesCompletionTriggerKind[int(Invoked)] = "Invoked" namesCompletionTriggerKind[int(Invoked)] = "Invoked"
namesCompletionTriggerKind[int(TriggerCharacter)] = "TriggerCharacter" namesCompletionTriggerKind[int(TriggerCharacter)] = "TriggerCharacter"

View File

@ -0,0 +1,32 @@
package protocol
import (
"context"
"golang.org/x/tools/internal/lsp/xlog"
)
// logSink implements xlog.Sink in terms of the LogMessage call to a client.
type logSink struct {
client Client
}
// NewLogger returns an xlog.Sink that sends its messages using client.LogMessage.
// It maps Debug to the Log level, Info and Error to their matching levels, and
// does not support warnings.
func NewLogger(client Client) xlog.Sink {
return logSink{client: client}
}
func (s logSink) Log(ctx context.Context, level xlog.Level, message string) {
typ := Log
switch level {
case xlog.ErrorLevel:
typ = Error
case xlog.InfoLevel:
typ = Info
case xlog.DebugLevel:
typ = Log
}
s.client.LogMessage(ctx, &LogMessageParams{Type: typ, Message: message})
}

View File

@ -8,56 +8,42 @@ import (
"context" "context"
"golang.org/x/tools/internal/jsonrpc2" "golang.org/x/tools/internal/jsonrpc2"
"golang.org/x/tools/internal/lsp/telemetry/log" "golang.org/x/tools/internal/lsp/xlog"
"golang.org/x/tools/internal/lsp/telemetry/trace"
"golang.org/x/tools/internal/xcontext"
) )
type DocumentUri = string const defaultMessageBufferSize = 20
const defaultRejectIfOverloaded = false
type canceller struct{ jsonrpc2.EmptyHandler } func canceller(ctx context.Context, conn *jsonrpc2.Conn, id jsonrpc2.ID) {
conn.Notify(context.Background(), "$/cancelRequest", &CancelParams{ID: id})
type clientHandler struct {
canceller
client Client
} }
type serverHandler struct { func NewClient(stream jsonrpc2.Stream, client Client) (*jsonrpc2.Conn, Server, xlog.Logger) {
canceller log := xlog.New(NewLogger(client))
server Server
}
func (canceller) Cancel(ctx context.Context, conn *jsonrpc2.Conn, id jsonrpc2.ID, cancelled bool) bool {
if cancelled {
return false
}
ctx = xcontext.Detach(ctx)
ctx, done := trace.StartSpan(ctx, "protocol.canceller")
defer done()
conn.Notify(ctx, "$/cancelRequest", &CancelParams{ID: id})
return true
}
func NewClient(ctx context.Context, stream jsonrpc2.Stream, client Client) (context.Context, *jsonrpc2.Conn, Server) {
ctx = WithClient(ctx, client)
conn := jsonrpc2.NewConn(stream) conn := jsonrpc2.NewConn(stream)
conn.AddHandler(&clientHandler{client: client}) conn.Capacity = defaultMessageBufferSize
return ctx, conn, &serverDispatcher{Conn: conn} conn.RejectIfOverloaded = defaultRejectIfOverloaded
conn.Handler = clientHandler(log, client)
conn.Canceler = jsonrpc2.Canceler(canceller)
return conn, &serverDispatcher{Conn: conn}, log
} }
func NewServer(ctx context.Context, stream jsonrpc2.Stream, server Server) (context.Context, *jsonrpc2.Conn, Client) { func NewServer(stream jsonrpc2.Stream, server Server) (*jsonrpc2.Conn, Client, xlog.Logger) {
conn := jsonrpc2.NewConn(stream) conn := jsonrpc2.NewConn(stream)
client := &clientDispatcher{Conn: conn} client := &clientDispatcher{Conn: conn}
ctx = WithClient(ctx, client) log := xlog.New(NewLogger(client))
conn.AddHandler(&serverHandler{server: server}) conn.Capacity = defaultMessageBufferSize
return ctx, conn, client conn.RejectIfOverloaded = defaultRejectIfOverloaded
conn.Handler = serverHandler(log, server)
conn.Canceler = jsonrpc2.Canceler(canceller)
return conn, client, log
} }
func sendParseError(ctx context.Context, req *jsonrpc2.Request, err error) { func sendParseError(ctx context.Context, log xlog.Logger, req *jsonrpc2.Request, err error) {
if _, ok := err.(*jsonrpc2.Error); !ok { if _, ok := err.(*jsonrpc2.Error); !ok {
err = jsonrpc2.NewErrorf(jsonrpc2.CodeParseError, "%v", err) err = jsonrpc2.NewErrorf(jsonrpc2.CodeParseError, "%v", err)
} }
if err := req.Reply(ctx, nil, err); err != nil { if err := req.Reply(ctx, nil, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
} }

View File

@ -7,7 +7,7 @@ import (
"encoding/json" "encoding/json"
"golang.org/x/tools/internal/jsonrpc2" "golang.org/x/tools/internal/jsonrpc2"
"golang.org/x/tools/internal/lsp/telemetry/log" "golang.org/x/tools/internal/lsp/xlog"
) )
type Client interface { type Client interface {
@ -23,127 +23,117 @@ type Client interface {
ApplyEdit(context.Context, *ApplyWorkspaceEditParams) (*ApplyWorkspaceEditResponse, error) ApplyEdit(context.Context, *ApplyWorkspaceEditParams) (*ApplyWorkspaceEditResponse, error)
} }
func (h clientHandler) Deliver(ctx context.Context, r *jsonrpc2.Request, delivered bool) bool { func clientHandler(log xlog.Logger, client Client) jsonrpc2.Handler {
if delivered { return func(ctx context.Context, r *jsonrpc2.Request) {
return false
}
switch r.Method { switch r.Method {
case "$/cancelRequest": case "$/cancelRequest":
var params CancelParams var params CancelParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
r.Conn().Cancel(params.ID) r.Conn().Cancel(params.ID)
return true
case "window/showMessage": // notif case "window/showMessage": // notif
var params ShowMessageParams var params ShowMessageParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
if err := h.client.ShowMessage(ctx, &params); err != nil { if err := client.ShowMessage(ctx, &params); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "window/logMessage": // notif case "window/logMessage": // notif
var params LogMessageParams var params LogMessageParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
if err := h.client.LogMessage(ctx, &params); err != nil { if err := client.LogMessage(ctx, &params); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "telemetry/event": // notif case "telemetry/event": // notif
var params interface{} var params interface{}
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
if err := h.client.Event(ctx, &params); err != nil { if err := client.Event(ctx, &params); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "textDocument/publishDiagnostics": // notif case "textDocument/publishDiagnostics": // notif
var params PublishDiagnosticsParams var params PublishDiagnosticsParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
if err := h.client.PublishDiagnostics(ctx, &params); err != nil { if err := client.PublishDiagnostics(ctx, &params); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "workspace/workspaceFolders": // req case "workspace/workspaceFolders": // req
if r.Params != nil { if r.Params != nil {
r.Reply(ctx, nil, jsonrpc2.NewErrorf(jsonrpc2.CodeInvalidParams, "Expected no params")) r.Reply(ctx, nil, jsonrpc2.NewErrorf(jsonrpc2.CodeInvalidParams, "Expected no params"))
return true return
} }
resp, err := h.client.WorkspaceFolders(ctx) resp, err := client.WorkspaceFolders(ctx)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "workspace/configuration": // req case "workspace/configuration": // req
var params ConfigurationParams var params ConfigurationParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.client.Configuration(ctx, &params) resp, err := client.Configuration(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "client/registerCapability": // req case "client/registerCapability": // req
var params RegistrationParams var params RegistrationParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
err := h.client.RegisterCapability(ctx, &params) err := client.RegisterCapability(ctx, &params)
if err := r.Reply(ctx, nil, err); err != nil { if err := r.Reply(ctx, nil, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "client/unregisterCapability": // req case "client/unregisterCapability": // req
var params UnregistrationParams var params UnregistrationParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
err := h.client.UnregisterCapability(ctx, &params) err := client.UnregisterCapability(ctx, &params)
if err := r.Reply(ctx, nil, err); err != nil { if err := r.Reply(ctx, nil, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "window/showMessageRequest": // req case "window/showMessageRequest": // req
var params ShowMessageRequestParams var params ShowMessageRequestParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.client.ShowMessageRequest(ctx, &params) resp, err := client.ShowMessageRequest(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "workspace/applyEdit": // req case "workspace/applyEdit": // req
var params ApplyWorkspaceEditParams var params ApplyWorkspaceEditParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.client.ApplyEdit(ctx, &params) resp, err := client.ApplyEdit(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
default: default:
return false if r.IsNotify() {
r.Reply(ctx, nil, jsonrpc2.NewErrorf(jsonrpc2.CodeMethodNotFound, "method %q not found", r.Method))
}
}
} }
} }

View File

@ -1,7 +1,7 @@
// Package protocol contains data types and code for LSP jsonrpcs // Package protocol contains data types and code for LSP jsonrpcs
// generated automatically from vscode-languageserver-node // generated automatically from vscode-languageserver-node
// commit: 8801c20b667945f455d7e023c71d2f741caeda25 // commit: c1e8923f8ea3b1f9c61dadf97448244d9ffbf7ae
// last fetched Sat Jul 13 2019 18:33:10 GMT-0700 (Pacific Daylight Time) // last fetched Tue May 21 2019 07:36:27 GMT-0400 (Eastern Daylight Time)
package protocol package protocol
// Code generated (see typescript/README.md) DO NOT EDIT. // Code generated (see typescript/README.md) DO NOT EDIT.
@ -155,26 +155,6 @@ type FoldingRangeParams struct {
TextDocument TextDocumentIdentifier `json:"textDocument"` TextDocument TextDocumentIdentifier `json:"textDocument"`
} }
// SelectionRangeProviderOptions is
type SelectionRangeProviderOptions struct {
}
/*SelectionRangeParams defined:
* A parameter literal used in selection range requests.
*/
type SelectionRangeParams struct {
/*TextDocument defined:
* The text document.
*/
TextDocument TextDocumentIdentifier `json:"textDocument"`
/*Positions defined:
* The positions inside the text document.
*/
Positions []Position `json:"positions"`
}
/*Registration defined: /*Registration defined:
* General parameters to to register for an notification or to register a provider. * General parameters to to register for an notification or to register a provider.
*/ */
@ -1261,19 +1241,6 @@ type ClientCapabilities struct {
*/ */
LinkSupport bool `json:"linkSupport,omitempty"` LinkSupport bool `json:"linkSupport,omitempty"`
} `json:"declaration,omitempty"` } `json:"declaration,omitempty"`
/*SelectionRange defined:
* Capabilities specific to `textDocument/selectionRange` requests
*/
SelectionRange struct {
/*DynamicRegistration defined:
* Whether implementation supports dynamic registration for selection range providers. If this is set to `true`
* the client supports the new `(SelectionRangeProviderOptions & TextDocumentRegistrationOptions & StaticRegistrationOptions)`
* return value for the corresponding server capability as well.
*/
DynamicRegistration bool `json:"dynamicRegistration,omitempty"`
} `json:"selectionRange,omitempty"`
} `json:"textDocument,omitempty"` } `json:"textDocument,omitempty"`
/*Window defined: /*Window defined:
@ -1633,11 +1600,6 @@ type ServerCapabilities struct {
* The server provides Goto Type Definition support. * The server provides Goto Type Definition support.
*/ */
DeclarationProvider bool `json:"declarationProvider,omitempty"` // boolean | (TextDocumentRegistrationOptions & StaticRegistrationOptions) DeclarationProvider bool `json:"declarationProvider,omitempty"` // boolean | (TextDocumentRegistrationOptions & StaticRegistrationOptions)
/*SelectionRangeProvider defined:
* The server provides selection range support.
*/
SelectionRangeProvider bool `json:"selectionRangeProvider,omitempty"` // boolean | (TextDocumentRegistrationOptions & StaticRegistrationOptions & SelectionRangeProviderOptions)
} }
// InitializeParams is // InitializeParams is
@ -1664,7 +1626,7 @@ type InitializeParams struct {
* *
* @deprecated in favour of workspaceFolders. * @deprecated in favour of workspaceFolders.
*/ */
RootURI DocumentUri `json:"rootUri"` RootURI string `json:"rootUri"`
/*Capabilities defined: /*Capabilities defined:
* The capabilities provided by the client (editor or tool) * The capabilities provided by the client (editor or tool)
@ -1899,7 +1861,7 @@ type FileEvent struct {
/*URI defined: /*URI defined:
* The file's uri. * The file's uri.
*/ */
URI DocumentUri `json:"uri"` URI string `json:"uri"`
/*Type defined: /*Type defined:
* The change type. * The change type.
@ -1948,12 +1910,10 @@ type PublishDiagnosticsParams struct {
/*URI defined: /*URI defined:
* The URI for which diagnostic information is reported. * The URI for which diagnostic information is reported.
*/ */
URI DocumentUri `json:"uri"` URI string `json:"uri"`
/*Version defined: /*Version defined:
* Optional the version number of the document the diagnostics are published for. * Optional the version number of the document the diagnostics are published for.
*
* @since 3.15
*/ */
Version float64 `json:"version,omitempty"` Version float64 `json:"version,omitempty"`
@ -2304,7 +2264,7 @@ type Range struct {
type Location struct { type Location struct {
// URI is // URI is
URI DocumentUri `json:"uri"` URI string `json:"uri"`
// Range is // Range is
Range Range `json:"range"` Range Range `json:"range"`
@ -2327,7 +2287,7 @@ type LocationLink struct {
/*TargetURI defined: /*TargetURI defined:
* The target resource identifier of this link. * The target resource identifier of this link.
*/ */
TargetURI DocumentUri `json:"targetUri"` TargetURI string `json:"targetUri"`
/*TargetRange defined: /*TargetRange defined:
* The full target range of this link. If the target for example is a symbol then target range is the * The full target range of this link. If the target for example is a symbol then target range is the
@ -2568,7 +2528,7 @@ type CreateFile struct {
/*URI defined: /*URI defined:
* The resource to create. * The resource to create.
*/ */
URI DocumentUri `json:"uri"` URI string `json:"uri"`
/*Options defined: /*Options defined:
* Additional options * Additional options
@ -2605,12 +2565,12 @@ type RenameFile struct {
/*OldURI defined: /*OldURI defined:
* The old (existing) location. * The old (existing) location.
*/ */
OldURI DocumentUri `json:"oldUri"` OldURI string `json:"oldUri"`
/*NewURI defined: /*NewURI defined:
* The new location. * The new location.
*/ */
NewURI DocumentUri `json:"newUri"` NewURI string `json:"newUri"`
/*Options defined: /*Options defined:
* Rename options. * Rename options.
@ -2647,7 +2607,7 @@ type DeleteFile struct {
/*URI defined: /*URI defined:
* The file to delete. * The file to delete.
*/ */
URI DocumentUri `json:"uri"` URI string `json:"uri"`
/*Options defined: /*Options defined:
* Delete options. * Delete options.
@ -2696,7 +2656,7 @@ type TextDocumentIdentifier struct {
/*URI defined: /*URI defined:
* The text document's uri. * The text document's uri.
*/ */
URI DocumentUri `json:"uri"` URI string `json:"uri"`
} }
/*VersionedTextDocumentIdentifier defined: /*VersionedTextDocumentIdentifier defined:
@ -2724,7 +2684,7 @@ type TextDocumentItem struct {
/*URI defined: /*URI defined:
* The text document's uri. * The text document's uri.
*/ */
URI DocumentUri `json:"uri"` URI string `json:"uri"`
/*LanguageID defined: /*LanguageID defined:
* The text document's language identifier * The text document's language identifier
@ -2849,6 +2809,8 @@ type CompletionItem struct {
* and a completion item with an `insertText` of `console` is provided it * and a completion item with an `insertText` of `console` is provided it
* will only insert `sole`. Therefore it is recommended to use `textEdit` instead * will only insert `sole`. Therefore it is recommended to use `textEdit` instead
* since it avoids additional client side interpretation. * since it avoids additional client side interpretation.
*
* @deprecated Use textEdit instead.
*/ */
InsertText string `json:"insertText,omitempty"` InsertText string `json:"insertText,omitempty"`
@ -3300,23 +3262,6 @@ type DocumentLink struct {
Data interface{} `json:"data,omitempty"` Data interface{} `json:"data,omitempty"`
} }
/*SelectionRange defined:
* A selection range represents a part of a selection hierarchy. A selection range
* may have a parent selection range that contains it.
*/
type SelectionRange struct {
/*Range defined:
* The [range](#Range) of this selection range.
*/
Range Range `json:"range"`
/*Parent defined:
* The parent selection range containing this range. Therefore `parent.range` must contain `this.range`.
*/
Parent *SelectionRange `json:"parent,omitempty"`
}
/*TextDocument defined: /*TextDocument defined:
* A simple text document. Not to be implemented. * A simple text document. Not to be implemented.
*/ */
@ -3329,7 +3274,7 @@ type TextDocument struct {
* *
* @readonly * @readonly
*/ */
URI DocumentUri `json:"uri"` URI string `json:"uri"`
/*LanguageID defined: /*LanguageID defined:
* The identifier of the language associated with this document. * The identifier of the language associated with this document.
@ -3611,20 +3556,10 @@ const (
*/ */
Deleted FileChangeType = 3 Deleted FileChangeType = 3
/*WatchCreate defined: /*Change defined:
* Interested in create events.
*/
WatchCreate WatchKind = 1
/*WatchChange defined:
* Interested in change events * Interested in change events
*/ */
WatchChange WatchKind = 2 Change WatchKind = 2
/*WatchDelete defined:
* Interested in delete events
*/
WatchDelete WatchKind = 4
/*Invoked defined: /*Invoked defined:
* Completion was triggered by typing an identifier (24x7 code * Completion was triggered by typing an identifier (24x7 code
@ -4017,12 +3952,6 @@ type DocumentFilter struct {
*/ */
type DocumentSelector []DocumentFilter type DocumentSelector []DocumentFilter
// DocumentURI is a type
/**
* A tagging type for string properties that are actually URIs.
*/
type DocumentURI string
// DefinitionLink is a type // DefinitionLink is a type
/** /**
* Information about where a symbol is defined. * Information about where a symbol is defined.

View File

@ -7,7 +7,7 @@ import (
"encoding/json" "encoding/json"
"golang.org/x/tools/internal/jsonrpc2" "golang.org/x/tools/internal/jsonrpc2"
"golang.org/x/tools/internal/lsp/telemetry/log" "golang.org/x/tools/internal/lsp/xlog"
) )
type Server interface { type Server interface {
@ -29,7 +29,6 @@ type Server interface {
ColorPresentation(context.Context, *ColorPresentationParams) ([]ColorPresentation, error) ColorPresentation(context.Context, *ColorPresentationParams) ([]ColorPresentation, error)
FoldingRange(context.Context, *FoldingRangeParams) ([]FoldingRange, error) FoldingRange(context.Context, *FoldingRangeParams) ([]FoldingRange, error)
Declaration(context.Context, *TextDocumentPositionParams) ([]DeclarationLink, error) Declaration(context.Context, *TextDocumentPositionParams) ([]DeclarationLink, error)
SelectionRange(context.Context, *SelectionRangeParams) ([]SelectionRange, error)
Initialize(context.Context, *InitializeParams) (*InitializeResult, error) Initialize(context.Context, *InitializeParams) (*InitializeResult, error)
Shutdown(context.Context) error Shutdown(context.Context) error
WillSaveWaitUntil(context.Context, *WillSaveTextDocumentParams) ([]TextEdit, error) WillSaveWaitUntil(context.Context, *WillSaveTextDocumentParams) ([]TextEdit, error)
@ -55,466 +54,414 @@ type Server interface {
ExecuteCommand(context.Context, *ExecuteCommandParams) (interface{}, error) ExecuteCommand(context.Context, *ExecuteCommandParams) (interface{}, error)
} }
func (h serverHandler) Deliver(ctx context.Context, r *jsonrpc2.Request, delivered bool) bool { func serverHandler(log xlog.Logger, server Server) jsonrpc2.Handler {
if delivered { return func(ctx context.Context, r *jsonrpc2.Request) {
return false
}
switch r.Method { switch r.Method {
case "$/cancelRequest": case "$/cancelRequest":
var params CancelParams var params CancelParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
r.Conn().Cancel(params.ID) r.Conn().Cancel(params.ID)
return true
case "workspace/didChangeWorkspaceFolders": // notif case "workspace/didChangeWorkspaceFolders": // notif
var params DidChangeWorkspaceFoldersParams var params DidChangeWorkspaceFoldersParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
if err := h.server.DidChangeWorkspaceFolders(ctx, &params); err != nil { if err := server.DidChangeWorkspaceFolders(ctx, &params); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "initialized": // notif case "initialized": // notif
var params InitializedParams var params InitializedParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
if err := h.server.Initialized(ctx, &params); err != nil { if err := server.Initialized(ctx, &params); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "exit": // notif case "exit": // notif
if err := h.server.Exit(ctx); err != nil { if err := server.Exit(ctx); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "workspace/didChangeConfiguration": // notif case "workspace/didChangeConfiguration": // notif
var params DidChangeConfigurationParams var params DidChangeConfigurationParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
if err := h.server.DidChangeConfiguration(ctx, &params); err != nil { if err := server.DidChangeConfiguration(ctx, &params); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "textDocument/didOpen": // notif case "textDocument/didOpen": // notif
var params DidOpenTextDocumentParams var params DidOpenTextDocumentParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
if err := h.server.DidOpen(ctx, &params); err != nil { if err := server.DidOpen(ctx, &params); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "textDocument/didChange": // notif case "textDocument/didChange": // notif
var params DidChangeTextDocumentParams var params DidChangeTextDocumentParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
if err := h.server.DidChange(ctx, &params); err != nil { if err := server.DidChange(ctx, &params); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "textDocument/didClose": // notif case "textDocument/didClose": // notif
var params DidCloseTextDocumentParams var params DidCloseTextDocumentParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
if err := h.server.DidClose(ctx, &params); err != nil { if err := server.DidClose(ctx, &params); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "textDocument/didSave": // notif case "textDocument/didSave": // notif
var params DidSaveTextDocumentParams var params DidSaveTextDocumentParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
if err := h.server.DidSave(ctx, &params); err != nil { if err := server.DidSave(ctx, &params); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "textDocument/willSave": // notif case "textDocument/willSave": // notif
var params WillSaveTextDocumentParams var params WillSaveTextDocumentParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
if err := h.server.WillSave(ctx, &params); err != nil { if err := server.WillSave(ctx, &params); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "workspace/didChangeWatchedFiles": // notif case "workspace/didChangeWatchedFiles": // notif
var params DidChangeWatchedFilesParams var params DidChangeWatchedFilesParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
if err := h.server.DidChangeWatchedFiles(ctx, &params); err != nil { if err := server.DidChangeWatchedFiles(ctx, &params); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "$/setTraceNotification": // notif case "$/setTraceNotification": // notif
var params SetTraceParams var params SetTraceParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
if err := h.server.SetTraceNotification(ctx, &params); err != nil { if err := server.SetTraceNotification(ctx, &params); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "$/logTraceNotification": // notif case "$/logTraceNotification": // notif
var params LogTraceParams var params LogTraceParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
if err := h.server.LogTraceNotification(ctx, &params); err != nil { if err := server.LogTraceNotification(ctx, &params); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "textDocument/implementation": // req case "textDocument/implementation": // req
var params TextDocumentPositionParams var params TextDocumentPositionParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.server.Implementation(ctx, &params) resp, err := server.Implementation(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "textDocument/typeDefinition": // req case "textDocument/typeDefinition": // req
var params TextDocumentPositionParams var params TextDocumentPositionParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.server.TypeDefinition(ctx, &params) resp, err := server.TypeDefinition(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "textDocument/documentColor": // req case "textDocument/documentColor": // req
var params DocumentColorParams var params DocumentColorParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.server.DocumentColor(ctx, &params) resp, err := server.DocumentColor(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "textDocument/colorPresentation": // req case "textDocument/colorPresentation": // req
var params ColorPresentationParams var params ColorPresentationParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.server.ColorPresentation(ctx, &params) resp, err := server.ColorPresentation(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "textDocument/foldingRange": // req case "textDocument/foldingRange": // req
var params FoldingRangeParams var params FoldingRangeParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.server.FoldingRange(ctx, &params) resp, err := server.FoldingRange(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "textDocument/declaration": // req case "textDocument/declaration": // req
var params TextDocumentPositionParams var params TextDocumentPositionParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.server.Declaration(ctx, &params) resp, err := server.Declaration(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "textDocument/selectionRange": // req
var params SelectionRangeParams
if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err)
return true
}
resp, err := h.server.SelectionRange(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err)
}
return true
case "initialize": // req case "initialize": // req
var params InitializeParams var params InitializeParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.server.Initialize(ctx, &params) resp, err := server.Initialize(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "shutdown": // req case "shutdown": // req
if r.Params != nil { if r.Params != nil {
r.Reply(ctx, nil, jsonrpc2.NewErrorf(jsonrpc2.CodeInvalidParams, "Expected no params")) r.Reply(ctx, nil, jsonrpc2.NewErrorf(jsonrpc2.CodeInvalidParams, "Expected no params"))
return true return
} }
err := h.server.Shutdown(ctx) err := server.Shutdown(ctx)
if err := r.Reply(ctx, nil, err); err != nil { if err := r.Reply(ctx, nil, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "textDocument/willSaveWaitUntil": // req case "textDocument/willSaveWaitUntil": // req
var params WillSaveTextDocumentParams var params WillSaveTextDocumentParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.server.WillSaveWaitUntil(ctx, &params) resp, err := server.WillSaveWaitUntil(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "textDocument/completion": // req case "textDocument/completion": // req
var params CompletionParams var params CompletionParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.server.Completion(ctx, &params) resp, err := server.Completion(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "completionItem/resolve": // req case "completionItem/resolve": // req
var params CompletionItem var params CompletionItem
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.server.Resolve(ctx, &params) resp, err := server.Resolve(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "textDocument/hover": // req case "textDocument/hover": // req
var params TextDocumentPositionParams var params TextDocumentPositionParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.server.Hover(ctx, &params) resp, err := server.Hover(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "textDocument/signatureHelp": // req case "textDocument/signatureHelp": // req
var params TextDocumentPositionParams var params TextDocumentPositionParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.server.SignatureHelp(ctx, &params) resp, err := server.SignatureHelp(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "textDocument/definition": // req case "textDocument/definition": // req
var params TextDocumentPositionParams var params TextDocumentPositionParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.server.Definition(ctx, &params) resp, err := server.Definition(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "textDocument/references": // req case "textDocument/references": // req
var params ReferenceParams var params ReferenceParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.server.References(ctx, &params) resp, err := server.References(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "textDocument/documentHighlight": // req case "textDocument/documentHighlight": // req
var params TextDocumentPositionParams var params TextDocumentPositionParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.server.DocumentHighlight(ctx, &params) resp, err := server.DocumentHighlight(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "textDocument/documentSymbol": // req case "textDocument/documentSymbol": // req
var params DocumentSymbolParams var params DocumentSymbolParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.server.DocumentSymbol(ctx, &params) resp, err := server.DocumentSymbol(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "workspace/symbol": // req case "workspace/symbol": // req
var params WorkspaceSymbolParams var params WorkspaceSymbolParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.server.Symbol(ctx, &params) resp, err := server.Symbol(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "textDocument/codeAction": // req case "textDocument/codeAction": // req
var params CodeActionParams var params CodeActionParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.server.CodeAction(ctx, &params) resp, err := server.CodeAction(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "textDocument/codeLens": // req case "textDocument/codeLens": // req
var params CodeLensParams var params CodeLensParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.server.CodeLens(ctx, &params) resp, err := server.CodeLens(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "codeLens/resolve": // req case "codeLens/resolve": // req
var params CodeLens var params CodeLens
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.server.ResolveCodeLens(ctx, &params) resp, err := server.ResolveCodeLens(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "textDocument/formatting": // req case "textDocument/formatting": // req
var params DocumentFormattingParams var params DocumentFormattingParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.server.Formatting(ctx, &params) resp, err := server.Formatting(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "textDocument/rangeFormatting": // req case "textDocument/rangeFormatting": // req
var params DocumentRangeFormattingParams var params DocumentRangeFormattingParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.server.RangeFormatting(ctx, &params) resp, err := server.RangeFormatting(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "textDocument/onTypeFormatting": // req case "textDocument/onTypeFormatting": // req
var params DocumentOnTypeFormattingParams var params DocumentOnTypeFormattingParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.server.OnTypeFormatting(ctx, &params) resp, err := server.OnTypeFormatting(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "textDocument/rename": // req case "textDocument/rename": // req
var params RenameParams var params RenameParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.server.Rename(ctx, &params) resp, err := server.Rename(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "textDocument/prepareRename": // req case "textDocument/prepareRename": // req
var params TextDocumentPositionParams var params TextDocumentPositionParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.server.PrepareRename(ctx, &params) resp, err := server.PrepareRename(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "textDocument/documentLink": // req case "textDocument/documentLink": // req
var params DocumentLinkParams var params DocumentLinkParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.server.DocumentLink(ctx, &params) resp, err := server.DocumentLink(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "documentLink/resolve": // req case "documentLink/resolve": // req
var params DocumentLink var params DocumentLink
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.server.ResolveDocumentLink(ctx, &params) resp, err := server.ResolveDocumentLink(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
case "workspace/executeCommand": // req case "workspace/executeCommand": // req
var params ExecuteCommandParams var params ExecuteCommandParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
resp, err := h.server.ExecuteCommand(ctx, &params) resp, err := server.ExecuteCommand(ctx, &params)
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }
return true
default: default:
return false if r.IsNotify() {
r.Reply(ctx, nil, jsonrpc2.NewErrorf(jsonrpc2.CodeMethodNotFound, "method %q not found", r.Method))
}
}
} }
} }
@ -617,14 +564,6 @@ func (s *serverDispatcher) Declaration(ctx context.Context, params *TextDocument
return result, nil return result, nil
} }
func (s *serverDispatcher) SelectionRange(ctx context.Context, params *SelectionRangeParams) ([]SelectionRange, error) {
var result []SelectionRange
if err := s.Conn.Call(ctx, "textDocument/selectionRange", params, &result); err != nil {
return nil, err
}
return result, nil
}
func (s *serverDispatcher) Initialize(ctx context.Context, params *InitializeParams) (*InitializeResult, error) { func (s *serverDispatcher) Initialize(ctx context.Context, params *InitializeParams) (*InitializeResult, error) {
var result InitializeResult var result InitializeResult
if err := s.Conn.Call(ctx, "initialize", params, &result); err != nil { if err := s.Conn.Call(ctx, "initialize", params, &result); err != nil {

View File

@ -4,10 +4,10 @@
1. Make sure `node` is installed. 1. Make sure `node` is installed.
As explained at the [node site](<https://nodejs.org> Node) As explained at the [node site](<https://nodejs.org> Node)
you may need `npm install @types/node` for the node runtime types you may need `node install @types/node` for the node runtime types
2. Install the typescript compiler, with `npm install typescript`. 2. Install the typescript compiler, with `node install typescript`.
3. Make sure `tsc` and `node` are in your execution path. 3. Make sure `tsc` and `node` are in your execution path.
4. Get the typescript code for the jsonrpc protocol with `git clone git@github.com:microsoft/vscode-languageserver-node.git` 4. Get the typescript code for the jsonrpc protocol with `git clone vscode-lanuageserver-node.git`
## Usage ## Usage

View File

@ -582,7 +582,7 @@ function generate(files: string[], options: ts.CompilerOptions): void {
} }
if (x[0].goType == 'bool') { // take it if (x[0].goType == 'bool') { // take it
if (x[1].goType == 'RenameOptions') { if (x[1].goType == 'RenameOptions') {
return ({goType: 'interface{}', gostuff: getText(node)}) return ({goType: 'RenameOptions', gostuff: getText(node)})
} }
return ({goType: 'bool', gostuff: getText(node)}) return ({goType: 'bool', gostuff: getText(node)})
} }
@ -927,7 +927,7 @@ let byName = new Map<string, Struct>();
// consts are unique. (Go consts are package-level, but Typescript's are // consts are unique. (Go consts are package-level, but Typescript's are
// not.) Use suffixes to minimize changes to gopls. // not.) Use suffixes to minimize changes to gopls.
let pref = new Map<string, string>( let pref = new Map<string, string>(
[['DiagnosticSeverity', 'Severity'], ['WatchKind', 'Watch']]) // typeName->prefix [['DiagnosticSeverity', 'Severity']]) // typeName->prefix
let suff = new Map<string, string>([ let suff = new Map<string, string>([
['CompletionItemKind', 'Completion'], ['InsertTextFormat', 'TextFormat'] ['CompletionItemKind', 'Completion'], ['InsertTextFormat', 'TextFormat']
]) ])

View File

@ -59,7 +59,7 @@ function generate(files: string[], options: ts.CompilerOptions): void {
setReceives(); // distinguish client and server setReceives(); // distinguish client and server
// for each of Client and Server there are 3 parts to the output: // for each of Client and Server there are 3 parts to the output:
// 1. type X interface {methods} // 1. type X interface {methods}
// 2. func (h *serverHandler) Deliver(...) { switch r.method } // 2. serverHandler(...) { return func(...) { switch r.method}}
// 3. func (x *xDispatcher) Method(ctx, parm) // 3. func (x *xDispatcher) Method(ctx, parm)
not.forEach( not.forEach(
(v, k) => { (v, k) => {
@ -99,7 +99,7 @@ function sig(nm: string, a: string, b: string, names?: boolean): string {
const notNil = `if r.Params != nil { const notNil = `if r.Params != nil {
r.Reply(ctx, nil, jsonrpc2.NewErrorf(jsonrpc2.CodeInvalidParams, "Expected no params")) r.Reply(ctx, nil, jsonrpc2.NewErrorf(jsonrpc2.CodeInvalidParams, "Expected no params"))
return true return
}`; }`;
// Go code for notifications. Side is client or server, m is the request method // Go code for notifications. Side is client or server, m is the request method
function goNot(side: side, m: string) { function goNot(side: side, m: string) {
@ -113,18 +113,16 @@ function goNot(side: side, m: string) {
if (a != '') { if (a != '') {
case1 = `var params ${a} case1 = `var params ${a}
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
if err := h.${side.name}.${nm}(ctx, &params); err != nil { if err := ${side.name}.${nm}(ctx, &params); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }`;
return true`;
} else { } else {
case1 = `if err := h.${side.name}.${nm}(ctx); err != nil { case1 = `if err := ${side.name}.${nm}(ctx); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }`;
return true`;
} }
side.cases.push(`${caseHdr}\n${case1}`); side.cases.push(`${caseHdr}\n${case1}`);
@ -154,26 +152,24 @@ function goReq(side: side, m: string) {
if (a != '') { if (a != '') {
case1 = `var params ${a} case1 = `var params ${a}
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
}`; }`;
} }
const arg2 = a == '' ? '' : ', &params'; const arg2 = a == '' ? '' : ', &params';
let case2 = `if err := h.${side.name}.${nm}(ctx${arg2}); err != nil { let case2 = `if err := ${side.name}.${nm}(ctx${arg2}); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
}`; }`;
if (b != '') { if (b != '') {
case2 = `resp, err := h.${side.name}.${nm}(ctx${arg2}) case2 = `resp, err := ${side.name}.${nm}(ctx${arg2})
if err := r.Reply(ctx, resp, err); err != nil { if err := r.Reply(ctx, resp, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }`;
return true`;
} else { // response is nil } else { // response is nil
case2 = `err := h.${side.name}.${nm}(ctx${arg2}) case2 = `err := ${side.name}.${nm}(ctx${arg2})
if err := r.Reply(ctx, nil, err); err != nil { if err := r.Reply(ctx, nil, err); err != nil {
log.Error(ctx, "", err) log.Errorf(ctx, "%v", err)
} }`
return true`
} }
side.cases.push(`${caseHdr}\n${case1}\n${case2}`); side.cases.push(`${caseHdr}\n${case1}\n${case2}`);
@ -226,31 +222,32 @@ function output(side: side) {
"encoding/json" "encoding/json"
"golang.org/x/tools/internal/jsonrpc2" "golang.org/x/tools/internal/jsonrpc2"
"golang.org/x/tools/internal/lsp/telemetry/log" "golang.org/x/tools/internal/lsp/xlog"
) )
`); `);
const a = side.name[0].toUpperCase() + side.name.substring(1) const a = side.name[0].toUpperCase() + side.name.substring(1)
f(`type ${a} interface {`); f(`type ${a} interface {`);
side.methods.forEach((v) => { f(v) }); side.methods.forEach((v) => { f(v) });
f('}\n'); f('}\n');
f(`func (h ${side.name}Handler) Deliver(ctx context.Context, r *jsonrpc2.Request, delivered bool) bool { f(`func ${side.name}Handler(log xlog.Logger, ${side.name} ${
if delivered { side.goName}) jsonrpc2.Handler {
return false return func(ctx context.Context, r *jsonrpc2.Request) {
}
switch r.Method { switch r.Method {
case "$/cancelRequest": case "$/cancelRequest":
var params CancelParams var params CancelParams
if err := json.Unmarshal(*r.Params, &params); err != nil { if err := json.Unmarshal(*r.Params, &params); err != nil {
sendParseError(ctx, r, err) sendParseError(ctx, log, r, err)
return true return
} }
r.Conn().Cancel(params.ID) r.Conn().Cancel(params.ID)`);
return true`);
side.cases.forEach((v) => { f(v) }); side.cases.forEach((v) => { f(v) });
f(` f(`
default: default:
return false if r.IsNotify() {
r.Reply(ctx, nil, jsonrpc2.NewErrorf(jsonrpc2.CodeMethodNotFound, "method %q not found", r.Method))
} }
}
}
}`); }`);
f(` f(`
type ${side.name}Dispatcher struct { type ${side.name}Dispatcher struct {

View File

@ -9,8 +9,6 @@ import (
"golang.org/x/tools/internal/lsp/protocol" "golang.org/x/tools/internal/lsp/protocol"
"golang.org/x/tools/internal/lsp/source" "golang.org/x/tools/internal/lsp/source"
"golang.org/x/tools/internal/lsp/telemetry/log"
"golang.org/x/tools/internal/lsp/telemetry/tag"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
) )
@ -36,33 +34,15 @@ func (s *Server) references(ctx context.Context, params *protocol.ReferenceParam
} }
references, err := ident.References(ctx) references, err := ident.References(ctx)
if err != nil { if err != nil {
log.Error(ctx, "no references", err, tag.Of("Identifier", ident.Name)) view.Session().Logger().Errorf(ctx, "no references for %s: %v", ident.Name, err)
} }
if params.Context.IncludeDeclaration {
// The declaration of this identifier may not be in the
// scope that we search for references, so make sure
// it is added to the beginning of the list if IncludeDeclaration
// was specified.
references = append([]*source.ReferenceInfo{
&source.ReferenceInfo{
Range: ident.DeclarationRange(),
},
}, references...)
}
// Get the location of each reference to return as the result. // Get the location of each reference to return as the result.
locations := make([]protocol.Location, 0, len(references)) locations := make([]protocol.Location, 0, len(references))
seen := make(map[span.Span]bool)
for _, ref := range references { for _, ref := range references {
refSpan, err := ref.Range.Span() refSpan, err := ref.Range.Span()
if err != nil { if err != nil {
return nil, err return nil, err
} }
if seen[refSpan] {
continue // already added this location
}
seen[refSpan] = true
_, refM, err := getSourceFile(ctx, view, refSpan.URI()) _, refM, err := getSourceFile(ctx, view, refSpan.URI())
if err != nil { if err != nil {
return nil, err return nil, err

View File

@ -13,36 +13,37 @@ import (
"golang.org/x/tools/internal/jsonrpc2" "golang.org/x/tools/internal/jsonrpc2"
"golang.org/x/tools/internal/lsp/protocol" "golang.org/x/tools/internal/lsp/protocol"
"golang.org/x/tools/internal/lsp/source" "golang.org/x/tools/internal/lsp/source"
"golang.org/x/tools/internal/lsp/xlog"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
) )
// NewClientServer // NewClientServer
func NewClientServer(ctx context.Context, cache source.Cache, client protocol.Client) (context.Context, *Server) { func NewClientServer(cache source.Cache, client protocol.Client) *Server {
ctx = protocol.WithClient(ctx, client) return &Server{
return ctx, &Server{
client: client, client: client,
session: cache.NewSession(ctx), session: cache.NewSession(xlog.New(protocol.NewLogger(client))),
} }
} }
// NewServer starts an LSP server on the supplied stream, and waits until the // NewServer starts an LSP server on the supplied stream, and waits until the
// stream is closed. // stream is closed.
func NewServer(ctx context.Context, cache source.Cache, stream jsonrpc2.Stream) (context.Context, *Server) { func NewServer(cache source.Cache, stream jsonrpc2.Stream) *Server {
s := &Server{} s := &Server{}
ctx, s.Conn, s.client = protocol.NewServer(ctx, stream, s) var log xlog.Logger
s.session = cache.NewSession(ctx) s.Conn, s.client, log = protocol.NewServer(stream, s)
return ctx, s s.session = cache.NewSession(log)
return s
} }
// RunServerOnPort starts an LSP server on the given port and does not exit. // RunServerOnPort starts an LSP server on the given port and does not exit.
// This function exists for debugging purposes. // This function exists for debugging purposes.
func RunServerOnPort(ctx context.Context, cache source.Cache, port int, h func(ctx context.Context, s *Server)) error { func RunServerOnPort(ctx context.Context, cache source.Cache, port int, h func(s *Server)) error {
return RunServerOnAddress(ctx, cache, fmt.Sprintf(":%v", port), h) return RunServerOnAddress(ctx, cache, fmt.Sprintf(":%v", port), h)
} }
// RunServerOnPort starts an LSP server on the given port and does not exit. // RunServerOnPort starts an LSP server on the given port and does not exit.
// This function exists for debugging purposes. // This function exists for debugging purposes.
func RunServerOnAddress(ctx context.Context, cache source.Cache, addr string, h func(ctx context.Context, s *Server)) error { func RunServerOnAddress(ctx context.Context, cache source.Cache, addr string, h func(s *Server)) error {
ln, err := net.Listen("tcp", addr) ln, err := net.Listen("tcp", addr)
if err != nil { if err != nil {
return err return err
@ -52,7 +53,7 @@ func RunServerOnAddress(ctx context.Context, cache source.Cache, addr string, h
if err != nil { if err != nil {
return err return err
} }
h(NewServer(ctx, cache, jsonrpc2.NewHeaderStream(conn, conn))) h(NewServer(cache, jsonrpc2.NewHeaderStream(conn, conn)))
} }
} }
@ -60,36 +61,23 @@ func (s *Server) Run(ctx context.Context) error {
return s.Conn.Run(ctx) return s.Conn.Run(ctx)
} }
type serverState int
const (
serverCreated = serverState(iota)
serverInitializing // set once the server has received "initialize" request
serverInitialized // set once the server has received "initialized" request
serverShutDown
)
type Server struct { type Server struct {
Conn *jsonrpc2.Conn Conn *jsonrpc2.Conn
client protocol.Client client protocol.Client
stateMu sync.Mutex initializedMu sync.Mutex
state serverState isInitialized bool // set once the server has received "initialize" request
// Configurations. // Configurations.
// TODO(rstambler): Separate these into their own struct? // TODO(rstambler): Separate these into their own struct?
usePlaceholders bool usePlaceholders bool
hoverKind source.HoverKind noDocsOnHover bool
useDeepCompletions bool useDeepCompletions bool
wantCompletionDocumentation bool
insertTextFormat protocol.InsertTextFormat insertTextFormat protocol.InsertTextFormat
configurationSupported bool configurationSupported bool
dynamicConfigurationSupported bool dynamicConfigurationSupported bool
preferredContentFormat protocol.MarkupKind preferredContentFormat protocol.MarkupKind
disabledAnalyses map[string]struct{} disabledAnalyses map[string]struct{}
wantSuggestedFixes bool
supportedCodeActions map[protocol.CodeActionKind]bool
textDocumentSyncKind protocol.TextDocumentSyncKind textDocumentSyncKind protocol.TextDocumentSyncKind
@ -173,8 +161,8 @@ func (s *Server) Completion(ctx context.Context, params *protocol.CompletionPara
return s.completion(ctx, params) return s.completion(ctx, params)
} }
func (s *Server) Resolve(ctx context.Context, item *protocol.CompletionItem) (*protocol.CompletionItem, error) { func (s *Server) CompletionResolve(context.Context, *protocol.CompletionItem) (*protocol.CompletionItem, error) {
return nil, notImplemented("completionItem/resolve") return nil, notImplemented("CompletionResolve")
} }
func (s *Server) Hover(ctx context.Context, params *protocol.TextDocumentPositionParams) (*protocol.Hover, error) { func (s *Server) Hover(ctx context.Context, params *protocol.TextDocumentPositionParams) (*protocol.Hover, error) {
@ -269,14 +257,13 @@ func (s *Server) PrepareRename(context.Context, *protocol.TextDocumentPositionPa
return nil, notImplemented("PrepareRename") return nil, notImplemented("PrepareRename")
} }
func (s *Server) Resolve(context.Context, *protocol.CompletionItem) (*protocol.CompletionItem, error) {
return nil, notImplemented("Resolve")
}
func (s *Server) SetTraceNotification(context.Context, *protocol.SetTraceParams) error { func (s *Server) SetTraceNotification(context.Context, *protocol.SetTraceParams) error {
return notImplemented("SetTraceNotification") return notImplemented("SetTraceNotification")
} }
func (s *Server) SelectionRange(context.Context, *protocol.SelectionRangeParams) ([]protocol.SelectionRange, error) {
return nil, notImplemented("SelectionRange")
}
func notImplemented(method string) *jsonrpc2.Error { func notImplemented(method string) *jsonrpc2.Error {
return jsonrpc2.NewErrorf(jsonrpc2.CodeMethodNotFound, "method %q not yet implemented", method) return jsonrpc2.NewErrorf(jsonrpc2.CodeMethodNotFound, "method %q not yet implemented", method)
} }

View File

@ -9,8 +9,6 @@ import (
"golang.org/x/tools/internal/lsp/protocol" "golang.org/x/tools/internal/lsp/protocol"
"golang.org/x/tools/internal/lsp/source" "golang.org/x/tools/internal/lsp/source"
"golang.org/x/tools/internal/lsp/telemetry/log"
"golang.org/x/tools/internal/lsp/telemetry/tag"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
) )
@ -31,7 +29,7 @@ func (s *Server) signatureHelp(ctx context.Context, params *protocol.TextDocumen
} }
info, err := source.SignatureHelp(ctx, f, rng.Start) info, err := source.SignatureHelp(ctx, f, rng.Start)
if err != nil { if err != nil {
log.Print(ctx, "no signature help", tag.Of("At", rng), tag.Of("Failure", err)) s.session.Logger().Infof(ctx, "no signature help for %s:%v:%v : %s", uri, int(params.Position.Line), int(params.Position.Character), err)
return nil, nil return nil, nil
} }
return toProtocolSignatureHelp(info), nil return toProtocolSignatureHelp(info), nil

View File

@ -11,6 +11,7 @@ import (
"fmt" "fmt"
"go/token" "go/token"
"go/types" "go/types"
"log"
"reflect" "reflect"
"sort" "sort"
"strings" "strings"
@ -19,12 +20,9 @@ import (
"golang.org/x/sync/errgroup" "golang.org/x/sync/errgroup"
"golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis"
"golang.org/x/tools/internal/lsp/telemetry/trace"
) )
func analyze(ctx context.Context, v View, pkgs []Package, analyzers []*analysis.Analyzer) ([]*Action, error) { func analyze(ctx context.Context, v View, pkgs []Package, analyzers []*analysis.Analyzer) ([]*Action, error) {
ctx, done := trace.StartSpan(ctx, "source.analyze")
defer done()
if ctx.Err() != nil { if ctx.Err() != nil {
return nil, ctx.Err() return nil, ctx.Err()
} }
@ -147,7 +145,7 @@ func (act *Action) execOnce(ctx context.Context, fset *token.FileSet) error {
pass := &analysis.Pass{ pass := &analysis.Pass{
Analyzer: act.Analyzer, Analyzer: act.Analyzer,
Fset: fset, Fset: fset,
Files: act.Pkg.GetSyntax(ctx), Files: act.Pkg.GetSyntax(),
Pkg: act.Pkg.GetTypes(), Pkg: act.Pkg.GetTypes(),
TypesInfo: act.Pkg.GetTypesInfo(), TypesInfo: act.Pkg.GetTypesInfo(),
TypesSizes: act.Pkg.GetTypesSizes(), TypesSizes: act.Pkg.GetTypesSizes(),
@ -244,12 +242,12 @@ func (act *Action) importObjectFact(obj types.Object, ptr analysis.Fact) bool {
// exportObjectFact implements Pass.ExportObjectFact. // exportObjectFact implements Pass.ExportObjectFact.
func (act *Action) exportObjectFact(obj types.Object, fact analysis.Fact) { func (act *Action) exportObjectFact(obj types.Object, fact analysis.Fact) {
if act.pass.ExportObjectFact == nil { if act.pass.ExportObjectFact == nil {
panic(fmt.Sprintf("%s: Pass.ExportObjectFact(%s, %T) called after Run", act, obj, fact)) log.Panicf("%s: Pass.ExportObjectFact(%s, %T) called after Run", act, obj, fact)
} }
if obj.Pkg() != act.Pkg.GetTypes() { if obj.Pkg() != act.Pkg.GetTypes() {
panic(fmt.Sprintf("internal error: in analysis %s of package %s: Fact.Set(%s, %T): can't set facts on objects belonging another package", log.Panicf("internal error: in analysis %s of package %s: Fact.Set(%s, %T): can't set facts on objects belonging another package",
act.Analyzer, act.Pkg, obj, fact)) act.Analyzer, act.Pkg, obj, fact)
} }
key := objectFactKey{obj, factType(fact)} key := objectFactKey{obj, factType(fact)}
@ -283,7 +281,7 @@ func (act *Action) importPackageFact(pkg *types.Package, ptr analysis.Fact) bool
// exportPackageFact implements Pass.ExportPackageFact. // exportPackageFact implements Pass.ExportPackageFact.
func (act *Action) exportPackageFact(fact analysis.Fact) { func (act *Action) exportPackageFact(fact analysis.Fact) {
if act.pass.ExportPackageFact == nil { if act.pass.ExportPackageFact == nil {
panic(fmt.Sprintf("%s: Pass.ExportPackageFact(%T) called after Run", act, fact)) log.Panicf("%s: Pass.ExportPackageFact(%T) called after Run", act, fact)
} }
key := packageFactKey{act.pass.Pkg, factType(fact)} key := packageFactKey{act.pass.Pkg, factType(fact)}
@ -293,7 +291,7 @@ func (act *Action) exportPackageFact(fact analysis.Fact) {
func factType(fact analysis.Fact) reflect.Type { func factType(fact analysis.Fact) reflect.Type {
t := reflect.TypeOf(fact) t := reflect.TypeOf(fact)
if t.Kind() != reflect.Ptr { if t.Kind() != reflect.Ptr {
panic(fmt.Sprintf("invalid Fact type: got %T, want pointer", t)) log.Fatalf("invalid Fact type: got %T, want pointer", t)
} }
return t return t
} }

View File

@ -12,9 +12,7 @@ import (
"go/types" "go/types"
"golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/ast/astutil"
"golang.org/x/tools/internal/lsp/fuzzy"
"golang.org/x/tools/internal/lsp/snippet" "golang.org/x/tools/internal/lsp/snippet"
"golang.org/x/tools/internal/lsp/telemetry/trace"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
) )
@ -65,9 +63,6 @@ type CompletionItem struct {
// foo(${1:a int}, ${2: b int}, ${3: c int}) // foo(${1:a int}, ${2: b int}, ${3: c int})
// //
placeholderSnippet *snippet.Builder placeholderSnippet *snippet.Builder
// Documentation is the documentation for the completion item.
Documentation string
} }
// Snippet is a convenience function that determines the snippet that should be // Snippet is a convenience function that determines the snippet that should be
@ -118,7 +113,6 @@ type completer struct {
types *types.Package types *types.Package
info *types.Info info *types.Info
qf types.Qualifier qf types.Qualifier
opts CompletionOptions
// view is the View associated with this completion request. // view is the View associated with this completion request.
view View view View
@ -154,9 +148,6 @@ type completer struct {
// deepState contains the current state of our deep completion search. // deepState contains the current state of our deep completion search.
deepState deepCompletionState deepState deepCompletionState
// matcher does fuzzy matching of the candidates for the surrounding prefix.
matcher *fuzzy.Matcher
} }
type compLitInfo struct { type compLitInfo struct {
@ -195,24 +186,23 @@ func (c *completer) setSurrounding(ident *ast.Ident) {
if c.surrounding != nil { if c.surrounding != nil {
return return
} }
if !(ident.Pos() <= c.pos && c.pos <= ident.End()) { if !(ident.Pos() <= c.pos && c.pos <= ident.End()) {
return return
} }
c.surrounding = &Selection{ c.surrounding = &Selection{
Content: ident.Name, Content: ident.Name,
Range: span.NewRange(c.view.Session().Cache().FileSet(), ident.Pos(), ident.End()), Range: span.NewRange(c.view.Session().Cache().FileSet(), ident.Pos(), ident.End()),
Cursor: c.pos, Cursor: c.pos,
} }
if c.surrounding.Prefix() != "" {
c.matcher = fuzzy.NewMatcher(c.surrounding.Prefix(), fuzzy.Symbol)
}
} }
// found adds a candidate completion. We will also search through the object's // found adds a candidate completion. We will also search through the object's
// members for more candidates. // members for more candidates.
func (c *completer) found(obj types.Object, score float64) error { func (c *completer) found(obj types.Object, score float64) {
if obj.Pkg() != nil && obj.Pkg() != c.types && !obj.Exported() { if obj.Pkg() != nil && obj.Pkg() != c.types && !obj.Exported() {
return fmt.Errorf("%s is inaccessible from %s", obj.Name(), c.types.Path()) return // inaccessible
} }
if c.inDeepCompletion() { if c.inDeepCompletion() {
@ -221,13 +211,13 @@ func (c *completer) found(obj types.Object, score float64) error {
// "bar.Baz" even though "Baz" is represented the same types.Object in both. // "bar.Baz" even though "Baz" is represented the same types.Object in both.
for _, seenObj := range c.deepState.chain { for _, seenObj := range c.deepState.chain {
if seenObj == obj { if seenObj == obj {
return nil return
} }
} }
} else { } else {
// At the top level, dedupe by object. // At the top level, dedupe by object.
if c.seen[obj] { if c.seen[obj] {
return nil return
} }
c.seen[obj] = true c.seen[obj] = true
} }
@ -243,14 +233,10 @@ func (c *completer) found(obj types.Object, score float64) error {
// Favor shallow matches by lowering weight according to depth. // Favor shallow matches by lowering weight according to depth.
cand.score -= stdScore * float64(len(c.deepState.chain)) cand.score -= stdScore * float64(len(c.deepState.chain))
item, err := c.item(cand)
if err != nil { c.items = append(c.items, c.item(cand))
return err
}
c.items = append(c.items, item)
c.deepSearch(obj) c.deepSearch(obj)
return nil
} }
// candidate represents a completion candidate. // candidate represents a completion candidate.
@ -268,7 +254,6 @@ type candidate struct {
type CompletionOptions struct { type CompletionOptions struct {
DeepComplete bool DeepComplete bool
WantDocumentaton bool
} }
// Completion returns a list of possible candidates for completion, given a // Completion returns a list of possible candidates for completion, given a
@ -278,13 +263,11 @@ type CompletionOptions struct {
// the client to score the quality of the completion. For instance, some clients // the client to score the quality of the completion. For instance, some clients
// may tolerate imperfect matches as valid completion results, since users may make typos. // may tolerate imperfect matches as valid completion results, since users may make typos.
func Completion(ctx context.Context, view View, f GoFile, pos token.Pos, opts CompletionOptions) ([]CompletionItem, *Selection, error) { func Completion(ctx context.Context, view View, f GoFile, pos token.Pos, opts CompletionOptions) ([]CompletionItem, *Selection, error) {
ctx, done := trace.StartSpan(ctx, "source.Completion") file := f.GetAST(ctx)
defer done()
file, err := f.GetAST(ctx, ParseFull)
if file == nil { if file == nil {
return nil, nil, err return nil, nil, fmt.Errorf("no AST for %s", f.URI())
} }
pkg := f.GetPackage(ctx) pkg := f.GetPackage(ctx)
if pkg == nil || pkg.IsIllTyped() { if pkg == nil || pkg.IsIllTyped() {
return nil, nil, fmt.Errorf("package for %s is ill typed", f.URI()) return nil, nil, fmt.Errorf("package for %s is ill typed", f.URI())
@ -319,7 +302,6 @@ func Completion(ctx context.Context, view View, f GoFile, pos token.Pos, opts Co
seen: make(map[types.Object]bool), seen: make(map[types.Object]bool),
enclosingFunction: enclosingFunction(path, pos, pkg.GetTypesInfo()), enclosingFunction: enclosingFunction(path, pos, pkg.GetTypesInfo()),
enclosingCompositeLiteral: clInfo, enclosingCompositeLiteral: clInfo,
opts: opts,
} }
c.deepState.enabled = opts.DeepComplete c.deepState.enabled = opts.DeepComplete
@ -509,7 +491,6 @@ func (c *completer) lexical() error {
if scope == types.Universe { if scope == types.Universe {
score *= 0.1 score *= 0.1
} }
// If we haven't already added a candidate for an object with this name. // If we haven't already added a candidate for an object with this name.
if _, ok := seen[obj.Name()]; !ok { if _, ok := seen[obj.Name()]; !ok {
seen[obj.Name()] = struct{}{} seen[obj.Name()] = struct{}{}
@ -746,9 +727,6 @@ type typeInference struct {
// assertableFrom is a type that must be assertable to our candidate type. // assertableFrom is a type that must be assertable to our candidate type.
assertableFrom types.Type assertableFrom types.Type
// convertibleTo is a type our candidate type must be convertible to.
convertibleTo types.Type
} }
// expectedType returns information about the expected type for an expression at // expectedType returns information about the expected type for an expression at
@ -765,7 +743,6 @@ func expectedType(c *completer) typeInference {
var ( var (
modifiers []typeModifier modifiers []typeModifier
typ types.Type typ types.Type
convertibleTo types.Type
) )
Nodes: Nodes:
@ -797,13 +774,6 @@ Nodes:
case *ast.CallExpr: case *ast.CallExpr:
// Only consider CallExpr args if position falls between parens. // Only consider CallExpr args if position falls between parens.
if node.Lparen <= c.pos && c.pos <= node.Rparen { if node.Lparen <= c.pos && c.pos <= node.Rparen {
// For type conversions like "int64(foo)" we can only infer our
// desired type is convertible to int64.
if typ := typeConversion(node, c.info); typ != nil {
convertibleTo = typ
break Nodes
}
if tv, ok := c.info.Types[node.Fun]; ok { if tv, ok := c.info.Types[node.Fun]; ok {
if sig, ok := tv.Type.(*types.Signature); ok { if sig, ok := tv.Type.(*types.Signature); ok {
if sig.Params().Len() == 0 { if sig.Params().Len() == 0 {
@ -892,7 +862,6 @@ Nodes:
return typeInference{ return typeInference{
objType: typ, objType: typ,
modifiers: modifiers, modifiers: modifiers,
convertibleTo: convertibleTo,
} }
} }
@ -1047,28 +1016,14 @@ func (c *completer) matchingType(cand *candidate) bool {
// are invoked by default. // are invoked by default.
cand.expandFuncCall = isFunc(cand.obj) cand.expandFuncCall = isFunc(cand.obj)
typeMatches := func(candType types.Type) bool { typeMatches := func(actual types.Type) bool {
// Take into account any type modifiers on the expected type. // Take into account any type modifiers on the expected type.
candType = c.expectedType.applyTypeModifiers(candType) actual = c.expectedType.applyTypeModifiers(actual)
if c.expectedType.objType != nil { if c.expectedType.objType != nil {
wantType := types.Default(c.expectedType.objType)
// Handle untyped values specially since AssignableTo gives false negatives
// for them (see https://golang.org/issue/32146).
if candBasic, ok := candType.(*types.Basic); ok && candBasic.Info()&types.IsUntyped > 0 {
if wantBasic, ok := wantType.Underlying().(*types.Basic); ok {
// Check that their constant kind (bool|int|float|complex|string) matches.
// This doesn't take into account the constant value, so there will be some
// false positives due to integer sign and overflow.
return candBasic.Info()&types.IsConstType == wantBasic.Info()&types.IsConstType
}
return false
}
// AssignableTo covers the case where the types are equal, but also handles // AssignableTo covers the case where the types are equal, but also handles
// cases like assigning a concrete type to an interface type. // cases like assigning a concrete type to an interface type.
return types.AssignableTo(candType, wantType) return types.AssignableTo(types.Default(actual), types.Default(c.expectedType.objType))
} }
return false return false
@ -1090,10 +1045,6 @@ func (c *completer) matchingType(cand *candidate) bool {
} }
} }
if c.expectedType.convertibleTo != nil {
return types.ConvertibleTo(objType, c.expectedType.convertibleTo)
}
return false return false
} }

View File

@ -14,13 +14,10 @@ import (
"strings" "strings"
"golang.org/x/tools/internal/lsp/snippet" "golang.org/x/tools/internal/lsp/snippet"
"golang.org/x/tools/internal/lsp/telemetry/log"
"golang.org/x/tools/internal/lsp/telemetry/tag"
"golang.org/x/tools/internal/span"
) )
// formatCompletion creates a completion item for a given candidate. // formatCompletion creates a completion item for a given candidate.
func (c *completer) item(cand candidate) (CompletionItem, error) { func (c *completer) item(cand candidate) CompletionItem {
obj := cand.obj obj := cand.obj
// Handle builtin types separately. // Handle builtin types separately.
@ -43,7 +40,7 @@ func (c *completer) item(cand candidate) (CompletionItem, error) {
params := formatParams(sig.Params(), sig.Variadic(), c.qf) params := formatParams(sig.Params(), sig.Variadic(), c.qf)
plainSnippet, placeholderSnippet = c.functionCallSnippets(label, params) plainSnippet, placeholderSnippet = c.functionCallSnippets(label, params)
results, writeParens := formatResults(sig.Results(), c.qf) results, writeParens := formatResults(sig.Results(), c.qf)
detail = "func" + formatFunction(params, results, writeParens) label, detail = formatFunction(label, params, results, writeParens)
} }
switch obj := obj.(type) { switch obj := obj.(type) {
@ -86,7 +83,8 @@ func (c *completer) item(cand candidate) (CompletionItem, error) {
} }
detail = strings.TrimPrefix(detail, "untyped ") detail = strings.TrimPrefix(detail, "untyped ")
item := CompletionItem{
return CompletionItem{
Label: label, Label: label,
InsertText: insert, InsertText: insert,
Detail: detail, Detail: detail,
@ -96,42 +94,6 @@ func (c *completer) item(cand candidate) (CompletionItem, error) {
plainSnippet: plainSnippet, plainSnippet: plainSnippet,
placeholderSnippet: placeholderSnippet, placeholderSnippet: placeholderSnippet,
} }
if c.opts.WantDocumentaton {
declRange, err := objToRange(c.ctx, c.view.Session().Cache().FileSet(), obj)
if err != nil {
log.Error(c.ctx, "failed to get declaration range for object", err, tag.Of("Name", obj.Name()))
goto Return
}
pos := declRange.FileSet.Position(declRange.Start)
if !pos.IsValid() {
log.Error(c.ctx, "invalid declaration position", err, tag.Of("Label", item.Label))
goto Return
}
uri := span.FileURI(pos.Filename)
f, err := c.view.GetFile(c.ctx, uri)
if err != nil {
log.Error(c.ctx, "unable to get file", err, tag.Of("URI", uri))
goto Return
}
gof, ok := f.(GoFile)
if !ok {
log.Error(c.ctx, "declaration in a Go file", err, tag.Of("Label", item.Label))
goto Return
}
ident, err := Identifier(c.ctx, c.view, gof, declRange.Start)
if err != nil {
log.Error(c.ctx, "no identifier", err, tag.Of("Name", obj.Name()))
goto Return
}
documentation, err := ident.Documentation(c.ctx, SynopsisDocumentation)
if err != nil {
log.Error(c.ctx, "no documentation", err, tag.Of("Name", obj.Name()))
goto Return
}
item.Documentation = documentation
}
Return:
return item, nil
} }
// isParameter returns true if the given *types.Var is a parameter // isParameter returns true if the given *types.Var is a parameter
@ -148,8 +110,9 @@ func (c *completer) isParameter(v *types.Var) bool {
return false return false
} }
func (c *completer) formatBuiltin(cand candidate) (CompletionItem, error) { func (c *completer) formatBuiltin(cand candidate) CompletionItem {
obj := cand.obj obj := cand.obj
item := CompletionItem{ item := CompletionItem{
Label: obj.Name(), Label: obj.Name(),
InsertText: obj.Name(), InsertText: obj.Name(),
@ -166,8 +129,7 @@ func (c *completer) formatBuiltin(cand candidate) (CompletionItem, error) {
} }
params, _ := formatFieldList(c.ctx, c.view, decl.Type.Params) params, _ := formatFieldList(c.ctx, c.view, decl.Type.Params)
results, writeResultParens := formatFieldList(c.ctx, c.view, decl.Type.Results) results, writeResultParens := formatFieldList(c.ctx, c.view, decl.Type.Results)
item.Label = obj.Name() item.Label, item.Detail = formatFunction(obj.Name(), params, results, writeResultParens)
item.Detail = "func" + formatFunction(params, results, writeResultParens)
item.plainSnippet, item.placeholderSnippet = c.functionCallSnippets(obj.Name(), params) item.plainSnippet, item.placeholderSnippet = c.functionCallSnippets(obj.Name(), params)
case *types.TypeName: case *types.TypeName:
if types.IsInterface(obj.Type()) { if types.IsInterface(obj.Type()) {
@ -178,7 +140,7 @@ func (c *completer) formatBuiltin(cand candidate) (CompletionItem, error) {
case *types.Nil: case *types.Nil:
item.Kind = VariableCompletionItem item.Kind = VariableCompletionItem
} }
return item, nil return item
} }
var replacer = strings.NewReplacer( var replacer = strings.NewReplacer(
@ -201,7 +163,7 @@ func formatFieldList(ctx context.Context, v View, list *ast.FieldList) ([]string
cfg := printer.Config{Mode: printer.UseSpaces | printer.TabIndent, Tabwidth: 4} cfg := printer.Config{Mode: printer.UseSpaces | printer.TabIndent, Tabwidth: 4}
b := &bytes.Buffer{} b := &bytes.Buffer{}
if err := cfg.Fprint(b, v.Session().Cache().FileSet(), p.Type); err != nil { if err := cfg.Fprint(b, v.Session().Cache().FileSet(), p.Type); err != nil {
log.Error(ctx, "unable to print type", nil, tag.Of("Type", p.Type)) v.Session().Logger().Errorf(ctx, "unable to print type %v", p.Type)
continue continue
} }
typ := replacer.Replace(b.String()) typ := replacer.Replace(b.String())

View File

@ -34,9 +34,6 @@ import (
"golang.org/x/tools/go/analysis/passes/unsafeptr" "golang.org/x/tools/go/analysis/passes/unsafeptr"
"golang.org/x/tools/go/analysis/passes/unusedresult" "golang.org/x/tools/go/analysis/passes/unusedresult"
"golang.org/x/tools/go/packages" "golang.org/x/tools/go/packages"
"golang.org/x/tools/internal/lsp/telemetry"
"golang.org/x/tools/internal/lsp/telemetry/log"
"golang.org/x/tools/internal/lsp/telemetry/trace"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
) )
@ -62,8 +59,6 @@ const (
) )
func Diagnostics(ctx context.Context, view View, f GoFile, disabledAnalyses map[string]struct{}) (map[span.URI][]Diagnostic, error) { func Diagnostics(ctx context.Context, view View, f GoFile, disabledAnalyses map[string]struct{}) (map[span.URI][]Diagnostic, error) {
ctx, done := trace.StartSpan(ctx, "source.Diagnostics", telemetry.File.Of(f.URI()))
defer done()
pkg := f.GetPackage(ctx) pkg := f.GetPackage(ctx)
if pkg == nil { if pkg == nil {
return singleDiagnostic(f.URI(), "%s is not part of a package", f.URI()), nil return singleDiagnostic(f.URI(), "%s is not part of a package", f.URI()), nil
@ -86,7 +81,7 @@ func Diagnostics(ctx context.Context, view View, f GoFile, disabledAnalyses map[
if !diagnostics(ctx, view, pkg, reports) { if !diagnostics(ctx, view, pkg, reports) {
// If we don't have any list, parse, or type errors, run analyses. // If we don't have any list, parse, or type errors, run analyses.
if err := analyses(ctx, view, pkg, disabledAnalyses, reports); err != nil { if err := analyses(ctx, view, pkg, disabledAnalyses, reports); err != nil {
log.Error(ctx, "failed to run analyses", err, telemetry.File) view.Session().Logger().Errorf(ctx, "failed to run analyses for %s: %v", f.URI(), err)
} }
} }
// Updates to the diagnostics for this package may need to be propagated. // Updates to the diagnostics for this package may need to be propagated.
@ -109,8 +104,6 @@ type diagnosticSet struct {
} }
func diagnostics(ctx context.Context, v View, pkg Package, reports map[span.URI][]Diagnostic) bool { func diagnostics(ctx context.Context, v View, pkg Package, reports map[span.URI][]Diagnostic) bool {
ctx, done := trace.StartSpan(ctx, "source.diagnostics", telemetry.Package.Of(pkg.ID()))
defer done()
diagSets := make(map[span.URI]*diagnosticSet) diagSets := make(map[span.URI]*diagnosticSet)
for _, err := range pkg.GetErrors() { for _, err := range pkg.GetErrors() {
diag := Diagnostic{ diag := Diagnostic{
@ -236,31 +229,30 @@ func parseDiagnosticMessage(input string) span.Span {
func pointToSpan(ctx context.Context, view View, spn span.Span) span.Span { func pointToSpan(ctx context.Context, view View, spn span.Span) span.Span {
f, err := view.GetFile(ctx, spn.URI()) f, err := view.GetFile(ctx, spn.URI())
ctx = telemetry.File.With(ctx, spn.URI())
if err != nil { if err != nil {
log.Error(ctx, "could not find file for diagnostic", nil, telemetry.File) view.Session().Logger().Errorf(ctx, "could not find file for diagnostic: %v", spn.URI())
return spn return spn
} }
diagFile, ok := f.(GoFile) diagFile, ok := f.(GoFile)
if !ok { if !ok {
log.Error(ctx, "not a Go file", nil, telemetry.File) view.Session().Logger().Errorf(ctx, "%s is not a Go file", spn.URI())
return spn return spn
} }
tok, err := diagFile.GetToken(ctx) tok := diagFile.GetToken(ctx)
if err != nil { if tok == nil {
log.Error(ctx, "could not find token.File for diagnostic", err, telemetry.File) view.Session().Logger().Errorf(ctx, "could not find token.File for diagnostic: %v", spn.URI())
return spn return spn
} }
data, _, err := diagFile.Handle(ctx).Read(ctx) data, _, err := diagFile.Handle(ctx).Read(ctx)
if err != nil { if err != nil {
log.Error(ctx, "could not find content for diagnostic", err, telemetry.File) view.Session().Logger().Errorf(ctx, "could not find content for diagnostic: %v", spn.URI())
return spn return spn
} }
c := span.NewTokenConverter(diagFile.FileSet(), tok) c := span.NewTokenConverter(diagFile.FileSet(), tok)
s, err := spn.WithOffset(c) s, err := spn.WithOffset(c)
//we just don't bother producing an error if this failed //we just don't bother producing an error if this failed
if err != nil { if err != nil {
log.Error(ctx, "invalid span for diagnostic", err, telemetry.File) view.Session().Logger().Errorf(ctx, "invalid span for diagnostic: %v: %v", spn.URI(), err)
return spn return spn
} }
start := s.Start() start := s.Start()

View File

@ -13,101 +13,38 @@ import (
"golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/ast/astutil"
"golang.org/x/tools/go/packages" "golang.org/x/tools/go/packages"
"golang.org/x/tools/internal/imports" "golang.org/x/tools/imports"
"golang.org/x/tools/internal/lsp/diff" "golang.org/x/tools/internal/lsp/diff"
"golang.org/x/tools/internal/lsp/telemetry/log"
"golang.org/x/tools/internal/lsp/telemetry/trace"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
) )
// Format formats a file with a given range. // Format formats a file with a given range.
func Format(ctx context.Context, f GoFile, rng span.Range) ([]TextEdit, error) { func Format(ctx context.Context, f GoFile, rng span.Range) ([]TextEdit, error) {
ctx, done := trace.StartSpan(ctx, "source.Format") file := f.GetAST(ctx)
defer done()
file, err := f.GetAST(ctx, ParseFull)
if file == nil { if file == nil {
return nil, err return nil, fmt.Errorf("no AST for %s", f.URI())
} }
pkg := f.GetPackage(ctx) pkg := f.GetPackage(ctx)
if hasListErrors(pkg.GetErrors()) || hasParseErrors(pkg.GetErrors()) { if hasParseErrors(pkg.GetErrors()) {
// Even if this package has list or parse errors, this file may not return nil, fmt.Errorf("%s has parse errors, not formatting", f.URI())
// have any parse errors and can still be formatted. Using format.Node
// on an ast with errors may result in code being added or removed.
// Attempt to format the source of this file instead.
formatted, err := formatSource(ctx, f)
if err != nil {
return nil, err
}
return computeTextEdits(ctx, f, string(formatted)), nil
} }
path, exact := astutil.PathEnclosingInterval(file, rng.Start, rng.End) path, exact := astutil.PathEnclosingInterval(file, rng.Start, rng.End)
if !exact || len(path) == 0 { if !exact || len(path) == 0 {
return nil, fmt.Errorf("no exact AST node matching the specified range") return nil, fmt.Errorf("no exact AST node matching the specified range")
} }
node := path[0] node := path[0]
fset := f.FileSet()
buf := &bytes.Buffer{}
// format.Node changes slightly from one release to another, so the version // format.Node changes slightly from one release to another, so the version
// of Go used to build the LSP server will determine how it formats code. // of Go used to build the LSP server will determine how it formats code.
// This should be acceptable for all users, who likely be prompted to rebuild // This should be acceptable for all users, who likely be prompted to rebuild
// the LSP server on each Go release. // the LSP server on each Go release.
fset := f.FileSet()
buf := &bytes.Buffer{}
if err := format.Node(buf, fset, node); err != nil { if err := format.Node(buf, fset, node); err != nil {
return nil, err return nil, err
} }
return computeTextEdits(ctx, f, buf.String()), nil return computeTextEdits(ctx, f, buf.String()), nil
} }
func formatSource(ctx context.Context, file File) ([]byte, error) {
ctx, done := trace.StartSpan(ctx, "source.formatSource")
defer done()
data, _, err := file.Handle(ctx).Read(ctx)
if err != nil {
return nil, err
}
return format.Source(data)
}
// Imports formats a file using the goimports tool.
func Imports(ctx context.Context, view View, f GoFile, rng span.Range) ([]TextEdit, error) {
ctx, done := trace.StartSpan(ctx, "source.Imports")
defer done()
data, _, err := f.Handle(ctx).Read(ctx)
if err != nil {
return nil, err
}
pkg := f.GetPackage(ctx)
if pkg == nil || pkg.IsIllTyped() {
return nil, fmt.Errorf("no package for file %s", f.URI())
}
if hasListErrors(pkg.GetErrors()) {
return nil, fmt.Errorf("%s has list errors, not running goimports", f.URI())
}
options := &imports.Options{
// Defaults.
AllErrors: true,
Comments: true,
Fragment: true,
FormatOnly: false,
TabIndent: true,
TabWidth: 8,
}
var formatted []byte
importFn := func(opts *imports.Options) error {
formatted, err = imports.Process(f.URI().Filename(), data, opts)
return err
}
err = view.RunProcessEnvFunc(ctx, importFn, options)
if err != nil {
return nil, err
}
return computeTextEdits(ctx, f, string(formatted)), nil
}
func hasParseErrors(errors []packages.Error) bool { func hasParseErrors(errors []packages.Error) bool {
for _, err := range errors { for _, err := range errors {
if err.Kind == packages.ParseError { if err.Kind == packages.ParseError {
@ -117,21 +54,27 @@ func hasParseErrors(errors []packages.Error) bool {
return false return false
} }
func hasListErrors(errors []packages.Error) bool { // Imports formats a file using the goimports tool.
for _, err := range errors { func Imports(ctx context.Context, f GoFile, rng span.Range) ([]TextEdit, error) {
if err.Kind == packages.ListError { data, _, err := f.Handle(ctx).Read(ctx)
return true if err != nil {
return nil, err
} }
tok := f.GetToken(ctx)
if tok == nil {
return nil, fmt.Errorf("no token file for %s", f.URI())
} }
return false formatted, err := imports.Process(tok.Name(), data, nil)
if err != nil {
return nil, err
}
return computeTextEdits(ctx, f, string(formatted)), nil
} }
func computeTextEdits(ctx context.Context, file File, formatted string) (edits []TextEdit) { func computeTextEdits(ctx context.Context, file File, formatted string) (edits []TextEdit) {
ctx, done := trace.StartSpan(ctx, "source.computeTextEdits")
defer done()
data, _, err := file.Handle(ctx).Read(ctx) data, _, err := file.Handle(ctx).Read(ctx)
if err != nil { if err != nil {
log.Error(ctx, "Cannot compute text edits", err) file.View().Session().Logger().Errorf(ctx, "Cannot compute text edits: %v", err)
return nil return nil
} }
u := diff.SplitLines(string(data)) u := diff.SplitLines(string(data))

View File

@ -11,17 +11,13 @@ import (
"go/token" "go/token"
"golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/ast/astutil"
"golang.org/x/tools/internal/lsp/telemetry/trace"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
) )
func Highlight(ctx context.Context, f GoFile, pos token.Pos) ([]span.Span, error) { func Highlight(ctx context.Context, f GoFile, pos token.Pos) ([]span.Span, error) {
ctx, done := trace.StartSpan(ctx, "source.Highlight") file := f.GetAST(ctx)
defer done()
file, err := f.GetAST(ctx, ParseFull)
if file == nil { if file == nil {
return nil, err return nil, fmt.Errorf("no AST for %s", f.URI())
} }
fset := f.FileSet() fset := f.FileSet()
path, _ := astutil.PathEnclosingInterval(file, pos, pos) path, _ := astutil.PathEnclosingInterval(file, pos, pos)

View File

@ -10,10 +10,9 @@ import (
"go/ast" "go/ast"
"go/doc" "go/doc"
"go/format" "go/format"
"go/token"
"go/types" "go/types"
"strings" "strings"
"golang.org/x/tools/internal/lsp/telemetry/trace"
) )
type documentation struct { type documentation struct {
@ -21,71 +20,22 @@ type documentation struct {
comment *ast.CommentGroup comment *ast.CommentGroup
} }
type HoverKind int func (i *IdentifierInfo) Hover(ctx context.Context, markdownSupported, wantComments bool) (string, error) {
const (
NoDocumentation = HoverKind(iota)
SynopsisDocumentation
FullDocumentation
// TODO: Support a single-line hover mode for clients like Vim.
singleLine
)
func (i *IdentifierInfo) Hover(ctx context.Context, markdownSupported bool, hoverKind HoverKind) (string, error) {
ctx, done := trace.StartSpan(ctx, "source.Hover")
defer done()
h, err := i.decl.hover(ctx) h, err := i.decl.hover(ctx)
if err != nil { if err != nil {
return "", err return "", err
} }
c := h.comment
if !wantComments {
c = nil
}
var b strings.Builder var b strings.Builder
if comment := formatDocumentation(h.comment, hoverKind); comment != "" { return writeHover(h.source, i.File.FileSet(), &b, c, markdownSupported, i.qf)
b.WriteString(comment)
b.WriteRune('\n')
}
if markdownSupported {
b.WriteString("```go\n")
}
switch x := h.source.(type) {
case ast.Node:
if err := format.Node(&b, i.File.FileSet(), x); err != nil {
return "", err
}
case types.Object:
b.WriteString(types.ObjectString(x, i.qf))
}
if markdownSupported {
b.WriteString("\n```")
}
return b.String(), nil
}
func formatDocumentation(c *ast.CommentGroup, hoverKind HoverKind) string {
switch hoverKind {
case SynopsisDocumentation:
return doc.Synopsis((c.Text()))
case FullDocumentation:
return c.Text()
}
return ""
}
func (i *IdentifierInfo) Documentation(ctx context.Context, hoverKind HoverKind) (string, error) {
h, err := i.decl.hover(ctx)
if err != nil {
return "", err
}
return formatDocumentation(h.comment, hoverKind), nil
} }
func (d declaration) hover(ctx context.Context) (*documentation, error) { func (d declaration) hover(ctx context.Context) (*documentation, error) {
ctx, done := trace.StartSpan(ctx, "source.hover")
defer done()
obj := d.obj obj := d.obj
switch node := d.node.(type) { switch node := d.node.(type) {
case *ast.ImportSpec:
return &documentation{node, nil}, nil
case *ast.GenDecl: case *ast.GenDecl:
switch obj := obj.(type) { switch obj := obj.(type) {
case *types.TypeName, *types.Var, *types.Const, *types.Func: case *types.TypeName, *types.Var, *types.Const, *types.Func:
@ -175,3 +125,34 @@ func formatVar(node ast.Spec, obj types.Object) (*documentation, error) {
// If we weren't able to find documentation for the object. // If we weren't able to find documentation for the object.
return &documentation{obj, nil}, nil return &documentation{obj, nil}, nil
} }
// writeHover writes the hover for a given node and its documentation.
func writeHover(x interface{}, fset *token.FileSet, b *strings.Builder, c *ast.CommentGroup, markdownSupported bool, qf types.Qualifier) (string, error) {
if c != nil {
// TODO(rstambler): Improve conversion from Go docs to markdown.
b.WriteString(formatDocumentation(c))
b.WriteRune('\n')
}
if markdownSupported {
b.WriteString("```go\n")
}
switch x := x.(type) {
case ast.Node:
if err := format.Node(b, fset, x); err != nil {
return "", err
}
case types.Object:
b.WriteString(types.ObjectString(x, qf))
}
if markdownSupported {
b.WriteString("\n```")
}
return b.String(), nil
}
func formatDocumentation(c *ast.CommentGroup) string {
if c == nil {
return ""
}
return doc.Synopsis(c.Text())
}

View File

@ -13,7 +13,6 @@ import (
"strconv" "strconv"
"golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/ast/astutil"
"golang.org/x/tools/internal/lsp/telemetry/trace"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
) )
@ -63,12 +62,9 @@ func Identifier(ctx context.Context, view View, f GoFile, pos token.Pos) (*Ident
// identifier checks a single position for a potential identifier. // identifier checks a single position for a potential identifier.
func identifier(ctx context.Context, view View, f GoFile, pos token.Pos) (*IdentifierInfo, error) { func identifier(ctx context.Context, view View, f GoFile, pos token.Pos) (*IdentifierInfo, error) {
ctx, done := trace.StartSpan(ctx, "source.identifier") file := f.GetAST(ctx)
defer done()
file, err := f.GetAST(ctx, ParseFull)
if file == nil { if file == nil {
return nil, err return nil, fmt.Errorf("no AST for %s", f.URI())
} }
pkg := f.GetPackage(ctx) pkg := f.GetPackage(ctx)
if pkg == nil || pkg.IsIllTyped() { if pkg == nil || pkg.IsIllTyped() {
@ -122,6 +118,8 @@ func identifier(ctx context.Context, view View, f GoFile, pos token.Pos) (*Ident
} }
} }
var err error
// Handle builtins separately. // Handle builtins separately.
if result.decl.obj.Parent() == types.Universe { if result.decl.obj.Parent() == types.Universe {
decl, ok := lookupBuiltinDecl(f.View(), result.Name).(ast.Node) decl, ok := lookupBuiltinDecl(f.View(), result.Name).(ast.Node)
@ -234,13 +232,14 @@ func objToNode(ctx context.Context, view View, originPkg *types.Package, obj typ
} }
// If the object is exported from a different package, // If the object is exported from a different package,
// we don't need its full AST to find the definition. // we don't need its full AST to find the definition.
mode := ParseFull var declAST *ast.File
if obj.Exported() && obj.Pkg() != originPkg { if obj.Exported() && obj.Pkg() != originPkg {
mode = ParseExported declAST = declFile.GetAnyAST(ctx)
} else {
declAST = declFile.GetAST(ctx)
} }
declAST, err := declFile.GetAST(ctx, mode)
if declAST == nil { if declAST == nil {
return nil, err return nil, fmt.Errorf("no AST for %s", f.URI())
} }
path, _ := astutil.PathEnclosingInterval(declAST, rng.Start, rng.End) path, _ := astutil.PathEnclosingInterval(declAST, rng.Start, rng.End)
if path == nil { if path == nil {
@ -290,12 +289,12 @@ func importSpec(ctx context.Context, f GoFile, fAST *ast.File, pkg Package, pos
if importedPkg == nil { if importedPkg == nil {
return nil, fmt.Errorf("no import for %q", importPath) return nil, fmt.Errorf("no import for %q", importPath)
} }
if importedPkg.GetSyntax(ctx) == nil { if importedPkg.GetSyntax() == nil {
return nil, fmt.Errorf("no syntax for for %q", importPath) return nil, fmt.Errorf("no syntax for for %q", importPath)
} }
// Heuristic: Jump to the longest (most "interesting") file of the package. // Heuristic: Jump to the longest (most "interesting") file of the package.
var dest *ast.File var dest *ast.File
for _, f := range importedPkg.GetSyntax(ctx) { for _, f := range importedPkg.GetSyntax() {
if dest == nil || f.End()-f.Pos() > dest.End()-dest.Pos() { if dest == nil || f.End()-f.Pos() > dest.End()-dest.Pos() {
dest = f dest = f
} }
@ -304,7 +303,6 @@ func importSpec(ctx context.Context, f GoFile, fAST *ast.File, pkg Package, pos
return nil, fmt.Errorf("package %q has no files", importPath) return nil, fmt.Errorf("package %q has no files", importPath)
} }
result.decl.rng = span.NewRange(f.FileSet(), dest.Name.Pos(), dest.Name.End()) result.decl.rng = span.NewRange(f.FileSet(), dest.Name.Pos(), dest.Name.End())
result.decl.node = imp
return result, nil return result, nil
} }

View File

@ -10,7 +10,6 @@ import (
"go/ast" "go/ast"
"go/types" "go/types"
"golang.org/x/tools/internal/lsp/telemetry/trace"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
) )
@ -20,80 +19,56 @@ type ReferenceInfo struct {
Range span.Range Range span.Range
ident *ast.Ident ident *ast.Ident
obj types.Object obj types.Object
pkg Package
isDeclaration bool isDeclaration bool
} }
// References returns a list of references for a given identifier within the packages // References returns a list of references for a given identifier within a package.
// containing i.File. Declarations appear first in the result.
func (i *IdentifierInfo) References(ctx context.Context) ([]*ReferenceInfo, error) { func (i *IdentifierInfo) References(ctx context.Context) ([]*ReferenceInfo, error) {
ctx, done := trace.StartSpan(ctx, "source.References")
defer done()
var references []*ReferenceInfo var references []*ReferenceInfo
if i.pkg == nil || i.pkg.IsIllTyped() {
return nil, fmt.Errorf("package for %s is ill typed", i.File.URI())
}
info := i.pkg.GetTypesInfo()
if info == nil {
return nil, fmt.Errorf("package %s has no types info", i.pkg.PkgPath())
}
// If the object declaration is nil, assume it is an import spec and do not look for references. // If the object declaration is nil, assume it is an import spec and do not look for references.
if i.decl.obj == nil { if i.decl.obj == nil {
return nil, fmt.Errorf("no references for an import spec") return nil, fmt.Errorf("no references for an import spec")
} }
pkgs := i.File.GetPackages(ctx)
for _, pkg := range pkgs {
if pkg == nil || pkg.IsIllTyped() {
return nil, fmt.Errorf("package for %s is ill typed", i.File.URI())
}
info := pkg.GetTypesInfo()
if info == nil {
return nil, fmt.Errorf("package %s has no types info", pkg.PkgPath())
}
if i.decl.wasImplicit { if i.decl.wasImplicit {
// The definition is implicit, so we must add it separately. // The definition is implicit, so we must add it separately.
// This occurs when the variable is declared in a type switch statement // This occurs when the variable is declared in a type switch statement
// or is an implicit package name. Both implicits are local to a file. // or is an implicit package name.
references = append(references, &ReferenceInfo{ references = append(references, &ReferenceInfo{
Name: i.decl.obj.Name(), Name: i.decl.obj.Name(),
Range: i.decl.rng, Range: i.decl.rng,
obj: i.decl.obj, obj: i.decl.obj,
pkg: pkg,
isDeclaration: true, isDeclaration: true,
}) })
} }
for ident, obj := range info.Defs { for ident, obj := range info.Defs {
if obj == nil || !sameObj(obj, i.decl.obj) { if obj == nil || obj.Pos() != i.decl.obj.Pos() {
continue continue
} }
// Add the declarations at the beginning of the references list. references = append(references, &ReferenceInfo{
references = append([]*ReferenceInfo{&ReferenceInfo{ Name: ident.Name,
Name: ident.Name, Range: span.NewRange(i.File.FileSet(), ident.Pos(), ident.End()),
Range: span.NewRange(i.File.FileSet(), ident.Pos(), ident.End()), ident: ident,
ident: ident, obj: obj,
obj: obj, isDeclaration: true,
pkg: pkg, })
isDeclaration: true, }
}}, references...) for ident, obj := range info.Uses {
} if obj == nil || obj.Pos() != i.decl.obj.Pos() {
for ident, obj := range info.Uses {
if obj == nil || !sameObj(obj, i.decl.obj) {
continue continue
} }
references = append(references, &ReferenceInfo{ references = append(references, &ReferenceInfo{
Name: ident.Name, Name: ident.Name,
Range: span.NewRange(i.File.FileSet(), ident.Pos(), ident.End()), Range: span.NewRange(i.File.FileSet(), ident.Pos(), ident.End()),
ident: ident, ident: ident,
pkg: pkg,
obj: obj, obj: obj,
}) })
} }
}
return references, nil return references, nil
} }
// sameObj returns true if obj is the same as declObj.
// Objects are the same if they have the some Pos and Name.
func sameObj(obj, declObj types.Object) bool {
// TODO(suzmue): support the case where an identifier may have two different
// declaration positions.
return obj.Pos() == declObj.Pos() && obj.Name() == declObj.Name()
}

View File

@ -5,17 +5,14 @@
package source package source
import ( import (
"bytes"
"context" "context"
"fmt" "fmt"
"go/ast" "go/ast"
"go/format"
"go/token" "go/token"
"go/types" "go/types"
"regexp" "regexp"
"golang.org/x/tools/go/types/typeutil" "golang.org/x/tools/go/types/typeutil"
"golang.org/x/tools/internal/lsp/telemetry/trace"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
"golang.org/x/tools/refactor/satisfy" "golang.org/x/tools/refactor/satisfy"
) )
@ -37,9 +34,6 @@ type renamer struct {
// Rename returns a map of TextEdits for each file modified when renaming a given identifier within a package. // Rename returns a map of TextEdits for each file modified when renaming a given identifier within a package.
func (i *IdentifierInfo) Rename(ctx context.Context, newName string) (map[span.URI][]TextEdit, error) { func (i *IdentifierInfo) Rename(ctx context.Context, newName string) (map[span.URI][]TextEdit, error) {
ctx, done := trace.StartSpan(ctx, "source.Rename")
defer done()
if i.Name == newName { if i.Name == newName {
return nil, fmt.Errorf("old and new names are the same: %s", newName) return nil, fmt.Errorf("old and new names are the same: %s", newName)
} }
@ -47,25 +41,25 @@ func (i *IdentifierInfo) Rename(ctx context.Context, newName string) (map[span.U
return nil, fmt.Errorf("invalid identifier to rename: %q", i.Name) return nil, fmt.Errorf("invalid identifier to rename: %q", i.Name)
} }
// Do not rename identifiers declared in another package.
if i.pkg == nil || i.pkg.IsIllTyped() { if i.pkg == nil || i.pkg.IsIllTyped() {
return nil, fmt.Errorf("package for %s is ill typed", i.File.URI()) return nil, fmt.Errorf("package for %s is ill typed", i.File.URI())
} }
// Do not rename builtin identifiers.
if i.decl.obj.Parent() == types.Universe {
return nil, fmt.Errorf("cannot rename builtin %q", i.Name)
}
// Do not rename identifiers declared in another package.
if i.pkg.GetTypes() != i.decl.obj.Pkg() { if i.pkg.GetTypes() != i.decl.obj.Pkg() {
return nil, fmt.Errorf("failed to rename because %q is declared in package %q", i.Name, i.decl.obj.Pkg().Name()) return nil, fmt.Errorf("failed to rename because %q is declared in package %q", i.Name, i.decl.obj.Pkg().Name())
} }
// TODO(suzmue): Support renaming of imported packages.
if _, ok := i.decl.obj.(*types.PkgName); ok {
return nil, fmt.Errorf("renaming imported package %s not supported", i.Name)
}
refs, err := i.References(ctx) refs, err := i.References(ctx)
if err != nil { if err != nil {
return nil, err return nil, err
} }
r := renamer{ r := renamer{
ctx: ctx,
fset: i.File.FileSet(), fset: i.File.FileSet(),
pkg: i.pkg, pkg: i.pkg,
refs: refs, refs: refs,
@ -74,9 +68,7 @@ func (i *IdentifierInfo) Rename(ctx context.Context, newName string) (map[span.U
to: newName, to: newName,
packages: make(map[*types.Package]Package), packages: make(map[*types.Package]Package),
} }
for _, from := range refs { r.packages[i.pkg.GetTypes()] = i.pkg
r.packages[from.pkg.GetTypes()] = from.pkg
}
// Check that the renaming of the identifier is ok. // Check that the renaming of the identifier is ok.
for _, from := range refs { for _, from := range refs {
@ -86,22 +78,12 @@ func (i *IdentifierInfo) Rename(ctx context.Context, newName string) (map[span.U
return nil, fmt.Errorf(r.errors) return nil, fmt.Errorf(r.errors)
} }
changes, err := r.update() return r.update(ctx)
if err != nil {
return nil, err
}
// Sort edits for each file.
for _, edits := range changes {
sortTextEdits(edits)
}
return changes, nil
} }
// Rename all references to the identifier. // Rename all references to the identifier.
func (r *renamer) update() (map[span.URI][]TextEdit, error) { func (r *renamer) update(ctx context.Context) (map[span.URI][]TextEdit, error) {
result := make(map[span.URI][]TextEdit) result := make(map[span.URI][]TextEdit)
seen := make(map[span.Span]bool)
docRegexp, err := regexp.Compile(`\b` + r.from + `\b`) docRegexp, err := regexp.Compile(`\b` + r.from + `\b`)
if err != nil { if err != nil {
@ -112,40 +94,23 @@ func (r *renamer) update() (map[span.URI][]TextEdit, error) {
if err != nil { if err != nil {
return nil, err return nil, err
} }
if seen[refSpan] {
continue
}
seen[refSpan] = true
// Renaming a types.PkgName may result in the addition or removal of an identifier,
// so we deal with this separately.
if pkgName, ok := ref.obj.(*types.PkgName); ok && ref.isDeclaration {
edit, err := r.updatePkgName(pkgName)
if err != nil {
return nil, err
}
result[refSpan.URI()] = append(result[refSpan.URI()], *edit)
continue
}
// Replace the identifier with r.to.
edit := TextEdit{ edit := TextEdit{
Span: refSpan, Span: refSpan,
NewText: r.to, NewText: r.to,
} }
result[refSpan.URI()] = append(result[refSpan.URI()], edit) result[refSpan.URI()] = append(result[refSpan.URI()], edit)
if !ref.isDeclaration || ref.ident == nil { // uses do not have doc comments to update. if !ref.isDeclaration { // not a declaration
continue continue
} }
doc := r.docComment(r.pkg, ref.ident) doc := r.docComment(r.pkg, ref.ident)
if doc == nil { if doc == nil { // no doc comment
continue continue
} }
// Perform the rename in doc comments declared in the original package. // Perform the rename in doc comments declared in the original package
for _, comment := range doc.List { for _, comment := range doc.List {
for _, locs := range docRegexp.FindAllStringIndex(comment.Text, -1) { for _, locs := range docRegexp.FindAllStringIndex(comment.Text, -1) {
rng := span.NewRange(r.fset, comment.Pos()+token.Pos(locs[0]), comment.Pos()+token.Pos(locs[1])) rng := span.NewRange(r.fset, comment.Pos()+token.Pos(locs[0]), comment.Pos()+token.Pos(locs[1]))
@ -153,7 +118,7 @@ func (r *renamer) update() (map[span.URI][]TextEdit, error) {
if err != nil { if err != nil {
return nil, err return nil, err
} }
result[spn.URI()] = append(result[spn.URI()], TextEdit{ result[refSpan.URI()] = append(result[refSpan.URI()], TextEdit{
Span: spn, Span: spn,
NewText: r.to, NewText: r.to,
}) })
@ -192,46 +157,3 @@ func (r *renamer) docComment(pkg Package, id *ast.Ident) *ast.CommentGroup {
} }
return nil return nil
} }
// updatePkgName returns the updates to rename a pkgName in the import spec
func (r *renamer) updatePkgName(pkgName *types.PkgName) (*TextEdit, error) {
// Modify ImportSpec syntax to add or remove the Name as needed.
pkg := r.packages[pkgName.Pkg()]
_, path, _ := pathEnclosingInterval(r.ctx, r.fset, pkg, pkgName.Pos(), pkgName.Pos())
if len(path) < 2 {
return nil, fmt.Errorf("failed to update PkgName for %s", pkgName.Name())
}
spec, ok := path[1].(*ast.ImportSpec)
if !ok {
return nil, fmt.Errorf("failed to update PkgName for %s", pkgName.Name())
}
var astIdent *ast.Ident // will be nil if ident is removed
if pkgName.Imported().Name() != r.to {
// ImportSpec.Name needed
astIdent = &ast.Ident{NamePos: spec.Path.Pos(), Name: r.to}
}
// Make a copy of the ident that just has the name and path.
updated := &ast.ImportSpec{
Name: astIdent,
Path: spec.Path,
EndPos: spec.EndPos,
}
rng := span.NewRange(r.fset, spec.Pos(), spec.End())
spn, err := rng.Span()
if err != nil {
return nil, err
}
var buf bytes.Buffer
format.Node(&buf, r.fset, updated)
newText := buf.String()
return &TextEdit{
Span: spn,
NewText: newText,
}, nil
}

View File

@ -113,7 +113,7 @@ func (r *renamer) checkInPackageBlock(from types.Object) {
} }
// Check for conflicts between package block and all file blocks. // Check for conflicts between package block and all file blocks.
for _, f := range pkg.GetSyntax(r.ctx) { for _, f := range pkg.GetSyntax() {
fileScope := pkg.GetTypesInfo().Scopes[f] fileScope := pkg.GetTypesInfo().Scopes[f]
b, prev := fileScope.LookupParent(r.to, token.NoPos) b, prev := fileScope.LookupParent(r.to, token.NoPos)
if b == fileScope { if b == fileScope {
@ -328,7 +328,7 @@ func forEachLexicalRef(ctx context.Context, pkg Package, obj types.Object, fn fu
return true return true
} }
for _, f := range pkg.GetSyntax(ctx) { for _, f := range pkg.GetSyntax() {
ast.Inspect(f, visit) ast.Inspect(f, visit)
if len(stack) != 0 { if len(stack) != 0 {
panic(stack) panic(stack)
@ -789,20 +789,7 @@ func (r *renamer) satisfy() map[satisfy.Constraint]bool {
// Compute on demand: it's expensive. // Compute on demand: it's expensive.
var f satisfy.Finder var f satisfy.Finder
for _, pkg := range r.packages { for _, pkg := range r.packages {
// From satisfy.Finder documentation: f.Find(pkg.GetTypesInfo(), pkg.GetSyntax())
//
// The package must be free of type errors, and
// info.{Defs,Uses,Selections,Types} must have been populated by the
// type-checker.
//
// Only proceed if all packages have no errors.
if errs := pkg.GetErrors(); len(errs) > 0 {
r.errorf(token.NoPos, // we don't have a position for this error.
"renaming %q to %q not possible because %q has errors",
r.from, r.to, pkg.PkgPath())
return nil
}
f.Find(pkg.GetTypesInfo(), pkg.GetSyntax(r.ctx))
} }
r.satisfyConstraints = f.Result r.satisfyConstraints = f.Result
} }
@ -835,7 +822,7 @@ func someUse(info *types.Info, obj types.Object) *ast.Ident {
// //
func pathEnclosingInterval(ctx context.Context, fset *token.FileSet, pkg Package, start, end token.Pos) (resPkg Package, path []ast.Node, exact bool) { func pathEnclosingInterval(ctx context.Context, fset *token.FileSet, pkg Package, start, end token.Pos) (resPkg Package, path []ast.Node, exact bool) {
var pkgs = []Package{pkg} var pkgs = []Package{pkg}
for _, f := range pkg.GetSyntax(ctx) { for _, f := range pkg.GetSyntax() {
for _, imp := range f.Imports { for _, imp := range f.Imports {
if imp == nil { if imp == nil {
continue continue
@ -848,7 +835,7 @@ func pathEnclosingInterval(ctx context.Context, fset *token.FileSet, pkg Package
} }
} }
for _, p := range pkgs { for _, p := range pkgs {
for _, f := range p.GetSyntax(ctx) { for _, f := range p.GetSyntax() {
if f.Pos() == token.NoPos { if f.Pos() == token.NoPos {
// This can happen if the parser saw // This can happen if the parser saw
// too many errors and bailed out. // too many errors and bailed out.

View File

@ -12,7 +12,6 @@ import (
"go/types" "go/types"
"golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/ast/astutil"
"golang.org/x/tools/internal/lsp/telemetry/trace"
) )
type SignatureInformation struct { type SignatureInformation struct {
@ -26,12 +25,9 @@ type ParameterInformation struct {
} }
func SignatureHelp(ctx context.Context, f GoFile, pos token.Pos) (*SignatureInformation, error) { func SignatureHelp(ctx context.Context, f GoFile, pos token.Pos) (*SignatureInformation, error) {
ctx, done := trace.StartSpan(ctx, "source.SignatureHelp") file := f.GetAST(ctx)
defer done()
file, err := f.GetAST(ctx, ParseFull)
if file == nil { if file == nil {
return nil, err return nil, fmt.Errorf("no AST for %s", f.URI())
} }
pkg := f.GetPackage(ctx) pkg := f.GetPackage(ctx)
if pkg == nil || pkg.IsIllTyped() { if pkg == nil || pkg.IsIllTyped() {
@ -44,19 +40,10 @@ func SignatureHelp(ctx context.Context, f GoFile, pos token.Pos) (*SignatureInfo
if path == nil { if path == nil {
return nil, fmt.Errorf("cannot find node enclosing position") return nil, fmt.Errorf("cannot find node enclosing position")
} }
FindCall:
for _, node := range path { for _, node := range path {
switch node := node.(type) { if c, ok := node.(*ast.CallExpr); ok && pos >= c.Lparen && pos <= c.Rparen {
case *ast.CallExpr: callExpr = c
if pos >= node.Lparen && pos <= node.Rparen { break
callExpr = node
break FindCall
}
case *ast.FuncLit, *ast.FuncType:
// The user is within an anonymous function,
// which may be the parameter to the *ast.CallExpr.
// Don't show signature help in this case.
return nil, fmt.Errorf("no signature help within a function declaration")
} }
} }
if callExpr == nil || callExpr.Fun == nil { if callExpr == nil || callExpr.Fun == nil {
@ -153,11 +140,14 @@ func signatureInformation(name string, comment *ast.CommentGroup, params, result
for _, p := range params { for _, p := range params {
paramInfo = append(paramInfo, ParameterInformation{Label: p}) paramInfo = append(paramInfo, ParameterInformation{Label: p})
} }
label := name + formatFunction(params, results, writeResultParens) label, detail := formatFunction(name, params, results, writeResultParens)
// Show return values of the function in the label.
if detail != "" {
label += " " + detail
}
return &SignatureInformation{ return &SignatureInformation{
Label: label, Label: label,
// TODO: Should we have the HoverKind apply to signature information as well? Documentation: formatDocumentation(comment),
Documentation: formatDocumentation(comment, SynopsisDocumentation),
Parameters: paramInfo, Parameters: paramInfo,
ActiveParameter: activeParam, ActiveParameter: activeParam,
} }

View File

@ -9,7 +9,6 @@ import (
"context" "context"
"fmt" "fmt"
"os/exec" "os/exec"
"path/filepath"
"sort" "sort"
"strings" "strings"
"testing" "testing"
@ -19,6 +18,7 @@ import (
"golang.org/x/tools/internal/lsp/diff" "golang.org/x/tools/internal/lsp/diff"
"golang.org/x/tools/internal/lsp/source" "golang.org/x/tools/internal/lsp/source"
"golang.org/x/tools/internal/lsp/tests" "golang.org/x/tools/internal/lsp/tests"
"golang.org/x/tools/internal/lsp/xlog"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
) )
@ -29,20 +29,18 @@ func TestSource(t *testing.T) {
type runner struct { type runner struct {
view source.View view source.View
data *tests.Data data *tests.Data
ctx context.Context
} }
func testSource(t *testing.T, exporter packagestest.Exporter) { func testSource(t *testing.T, exporter packagestest.Exporter) {
ctx := tests.Context(t)
data := tests.Load(t, exporter, "../testdata") data := tests.Load(t, exporter, "../testdata")
defer data.Exported.Cleanup() defer data.Exported.Cleanup()
log := xlog.New(xlog.StdSink{})
cache := cache.New() cache := cache.New()
session := cache.NewSession(ctx) session := cache.NewSession(log)
r := &runner{ r := &runner{
view: session.NewView(ctx, "source_test", span.FileURI(data.Config.Dir)), view: session.NewView("source_test", span.FileURI(data.Config.Dir)),
data: data, data: data,
ctx: ctx,
} }
r.view.SetEnv(data.Config.Env) r.view.SetEnv(data.Config.Env)
for filename, content := range data.Config.Overlay { for filename, content := range data.Config.Overlay {
@ -53,11 +51,11 @@ func testSource(t *testing.T, exporter packagestest.Exporter) {
func (r *runner) Diagnostics(t *testing.T, data tests.Diagnostics) { func (r *runner) Diagnostics(t *testing.T, data tests.Diagnostics) {
for uri, want := range data { for uri, want := range data {
f, err := r.view.GetFile(r.ctx, uri) f, err := r.view.GetFile(context.Background(), uri)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
results, err := source.Diagnostics(r.ctx, r.view, f.(source.GoFile), nil) results, err := source.Diagnostics(context.Background(), r.view, f.(source.GoFile), nil)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -133,7 +131,7 @@ func summarizeDiagnostics(i int, want []source.Diagnostic, got []source.Diagnost
} }
func (r *runner) Completion(t *testing.T, data tests.Completions, snippets tests.CompletionSnippets, items tests.CompletionItems) { func (r *runner) Completion(t *testing.T, data tests.Completions, snippets tests.CompletionSnippets, items tests.CompletionItems) {
ctx := r.ctx ctx := context.Background()
for src, itemList := range data { for src, itemList := range data {
var want []source.CompletionItem var want []source.CompletionItem
for _, pos := range itemList { for _, pos := range itemList {
@ -143,9 +141,9 @@ func (r *runner) Completion(t *testing.T, data tests.Completions, snippets tests
if err != nil { if err != nil {
t.Fatalf("failed for %v: %v", src, err) t.Fatalf("failed for %v: %v", src, err)
} }
tok, err := f.(source.GoFile).GetToken(ctx) tok := f.(source.GoFile).GetToken(ctx)
if err != nil { if tok == nil {
t.Fatalf("failed to get token for %s: %v", src.URI(), err) t.Fatalf("failed to get token for %v", src)
} }
pos := tok.Pos(src.Start().Offset()) pos := tok.Pos(src.Start().Offset())
list, surrounding, err := source.Completion(ctx, r.view, f.(source.GoFile), pos, source.CompletionOptions{ list, surrounding, err := source.Completion(ctx, r.view, f.(source.GoFile), pos, source.CompletionOptions{
@ -181,10 +179,7 @@ func (r *runner) Completion(t *testing.T, data tests.Completions, snippets tests
if err != nil { if err != nil {
t.Fatalf("failed for %v: %v", src, err) t.Fatalf("failed for %v: %v", src, err)
} }
tok, err := f.(source.GoFile).GetToken(ctx) tok := f.GetToken(ctx)
if err != nil {
t.Fatalf("failed to get token for %s: %v", src.URI(), err)
}
pos := tok.Pos(src.Start().Offset()) pos := tok.Pos(src.Start().Offset())
list, _, err := source.Completion(ctx, r.view, f.(source.GoFile), pos, source.CompletionOptions{ list, _, err := source.Completion(ctx, r.view, f.(source.GoFile), pos, source.CompletionOptions{
DeepComplete: strings.Contains(string(src.URI()), "deepcomplete"), DeepComplete: strings.Contains(string(src.URI()), "deepcomplete"),
@ -293,7 +288,7 @@ func summarizeCompletionItems(i int, want []source.CompletionItem, got []source.
} }
func (r *runner) Format(t *testing.T, data tests.Formats) { func (r *runner) Format(t *testing.T, data tests.Formats) {
ctx := r.ctx ctx := context.Background()
for _, spn := range data { for _, spn := range data {
uri := spn.URI() uri := spn.URI()
filename := uri.Filename() filename := uri.Filename()
@ -306,11 +301,7 @@ func (r *runner) Format(t *testing.T, data tests.Formats) {
if err != nil { if err != nil {
t.Fatalf("failed for %v: %v", spn, err) t.Fatalf("failed for %v: %v", spn, err)
} }
tok, err := f.(source.GoFile).GetToken(ctx) rng, err := spn.Range(span.NewTokenConverter(f.FileSet(), f.GetToken(ctx)))
if err != nil {
t.Fatalf("failed to get token for %s: %v", spn.URI(), err)
}
rng, err := spn.Range(span.NewTokenConverter(f.FileSet(), tok))
if err != nil { if err != nil {
t.Fatalf("failed for %v: %v", spn, err) t.Fatalf("failed for %v: %v", spn, err)
} }
@ -335,7 +326,7 @@ func (r *runner) Format(t *testing.T, data tests.Formats) {
} }
func (r *runner) Import(t *testing.T, data tests.Imports) { func (r *runner) Import(t *testing.T, data tests.Imports) {
ctx := r.ctx ctx := context.Background()
for _, spn := range data { for _, spn := range data {
uri := spn.URI() uri := spn.URI()
filename := uri.Filename() filename := uri.Filename()
@ -348,15 +339,11 @@ func (r *runner) Import(t *testing.T, data tests.Imports) {
if err != nil { if err != nil {
t.Fatalf("failed for %v: %v", spn, err) t.Fatalf("failed for %v: %v", spn, err)
} }
tok, err := f.(source.GoFile).GetToken(ctx) rng, err := spn.Range(span.NewTokenConverter(f.FileSet(), f.GetToken(ctx)))
if err != nil {
t.Fatalf("failed to get token for %s: %v", spn.URI(), err)
}
rng, err := spn.Range(span.NewTokenConverter(f.FileSet(), tok))
if err != nil { if err != nil {
t.Fatalf("failed for %v: %v", spn, err) t.Fatalf("failed for %v: %v", spn, err)
} }
edits, err := source.Imports(ctx, r.view, f.(source.GoFile), rng) edits, err := source.Imports(ctx, f.(source.GoFile), rng)
if err != nil { if err != nil {
if goimported != "" { if goimported != "" {
t.Error(err) t.Error(err)
@ -377,22 +364,19 @@ func (r *runner) Import(t *testing.T, data tests.Imports) {
} }
func (r *runner) Definition(t *testing.T, data tests.Definitions) { func (r *runner) Definition(t *testing.T, data tests.Definitions) {
ctx := r.ctx ctx := context.Background()
for _, d := range data { for _, d := range data {
f, err := r.view.GetFile(ctx, d.Src.URI()) f, err := r.view.GetFile(ctx, d.Src.URI())
if err != nil { if err != nil {
t.Fatalf("failed for %v: %v", d.Src, err) t.Fatalf("failed for %v: %v", d.Src, err)
} }
tok, err := f.(source.GoFile).GetToken(ctx) tok := f.GetToken(ctx)
if err != nil {
t.Fatalf("failed to get token for %s: %v", d.Src.URI(), err)
}
pos := tok.Pos(d.Src.Start().Offset()) pos := tok.Pos(d.Src.Start().Offset())
ident, err := source.Identifier(ctx, r.view, f.(source.GoFile), pos) ident, err := source.Identifier(ctx, r.view, f.(source.GoFile), pos)
if err != nil { if err != nil {
t.Fatalf("failed for %v: %v", d.Src, err) t.Fatalf("failed for %v: %v", d.Src, err)
} }
hover, err := ident.Hover(ctx, false, source.SynopsisDocumentation) hover, err := ident.Hover(ctx, false, true)
if err != nil { if err != nil {
t.Fatalf("failed for %v: %v", d.Src, err) t.Fatalf("failed for %v: %v", d.Src, err)
} }
@ -422,17 +406,14 @@ func (r *runner) Definition(t *testing.T, data tests.Definitions) {
} }
func (r *runner) Highlight(t *testing.T, data tests.Highlights) { func (r *runner) Highlight(t *testing.T, data tests.Highlights) {
ctx := r.ctx ctx := context.Background()
for name, locations := range data { for name, locations := range data {
src := locations[0] src := locations[0]
f, err := r.view.GetFile(ctx, src.URI()) f, err := r.view.GetFile(ctx, src.URI())
if err != nil { if err != nil {
t.Fatalf("failed for %v: %v", src, err) t.Fatalf("failed for %v: %v", src, err)
} }
tok, err := f.(source.GoFile).GetToken(ctx) tok := f.GetToken(ctx)
if err != nil {
t.Fatalf("failed to get token for %s: %v", src.URI(), err)
}
pos := tok.Pos(src.Start().Offset()) pos := tok.Pos(src.Start().Offset())
highlights, err := source.Highlight(ctx, f.(source.GoFile), pos) highlights, err := source.Highlight(ctx, f.(source.GoFile), pos)
if err != nil { if err != nil {
@ -450,16 +431,14 @@ func (r *runner) Highlight(t *testing.T, data tests.Highlights) {
} }
func (r *runner) Reference(t *testing.T, data tests.References) { func (r *runner) Reference(t *testing.T, data tests.References) {
ctx := r.ctx ctx := context.Background()
for src, itemList := range data { for src, itemList := range data {
f, err := r.view.GetFile(ctx, src.URI()) f, err := r.view.GetFile(ctx, src.URI())
if err != nil { if err != nil {
t.Fatalf("failed for %v: %v", src, err) t.Fatalf("failed for %v: %v", src, err)
} }
tok, err := f.(source.GoFile).GetToken(ctx)
if err != nil { tok := f.GetToken(ctx)
t.Fatalf("failed to get token for %s: %v", src.URI(), err)
}
pos := tok.Pos(src.Start().Offset()) pos := tok.Pos(src.Start().Offset())
ident, err := source.Identifier(ctx, r.view, f.(source.GoFile), pos) ident, err := source.Identifier(ctx, r.view, f.(source.GoFile), pos)
if err != nil { if err != nil {
@ -471,26 +450,20 @@ func (r *runner) Reference(t *testing.T, data tests.References) {
want[pos] = true want[pos] = true
} }
refs, err := ident.References(ctx) got, err := ident.References(ctx)
if err != nil { if err != nil {
t.Fatalf("failed for %v: %v", src, err) t.Fatalf("failed for %v: %v", src, err)
} }
got := make(map[span.Span]bool) if len(got) != len(itemList) {
for _, refInfo := range refs { t.Errorf("references failed: different lengths got %v want %v", len(got), len(itemList))
}
for _, refInfo := range got {
refSpan, err := refInfo.Range.Span() refSpan, err := refInfo.Range.Span()
if err != nil { if err != nil {
t.Errorf("failed for %v item %v: %v", src, refInfo.Name, err) t.Errorf("failed for %v item %v: %v", src, refInfo.Name, err)
} }
got[refSpan] = true if !want[refSpan] {
}
if len(got) != len(want) {
t.Errorf("references failed: different lengths got %v want %v", len(got), len(want))
}
for spn, _ := range got {
if !want[spn] {
t.Errorf("references failed: incorrect references got %v want locations %v", got, want) t.Errorf("references failed: incorrect references got %v want locations %v", got, want)
} }
} }
@ -498,76 +471,56 @@ func (r *runner) Reference(t *testing.T, data tests.References) {
} }
func (r *runner) Rename(t *testing.T, data tests.Renames) { func (r *runner) Rename(t *testing.T, data tests.Renames) {
ctx := r.ctx ctx := context.Background()
for spn, newText := range data { for spn, newText := range data {
tag := fmt.Sprintf("%s-rename", newText)
f, err := r.view.GetFile(ctx, spn.URI()) f, err := r.view.GetFile(ctx, spn.URI())
if err != nil { if err != nil {
t.Fatalf("failed for %v: %v", spn, err) t.Fatalf("failed for %v: %v", spn, err)
} }
tok, err := f.(source.GoFile).GetToken(ctx) tok := f.GetToken(ctx)
if err != nil {
t.Fatalf("failed to get token for %s: %v", spn.URI(), err)
}
pos := tok.Pos(spn.Start().Offset()) pos := tok.Pos(spn.Start().Offset())
ident, err := source.Identifier(r.ctx, r.view, f.(source.GoFile), pos) ident, err := source.Identifier(context.Background(), r.view, f.(source.GoFile), pos)
if err != nil {
t.Error(err)
}
changes, err := ident.Rename(context.Background(), newText)
if err != nil { if err != nil {
t.Error(err) t.Error(err)
continue continue
} }
changes, err := ident.Rename(r.ctx, newText)
if err != nil { if len(changes) != 1 { // Renames must only affect a single file in these tests.
renamed := string(r.data.Golden(tag, spn.URI().Filename(), func() ([]byte, error) { t.Errorf("rename failed for %s, edited %d files, wanted 1 file", newText, len(changes))
return []byte(err.Error()), nil
}))
if err.Error() != renamed {
t.Errorf("rename failed for %s, expected:\n%v\ngot:\n%v\n", newText, renamed, err)
}
continue continue
} }
var res []string edits := changes[spn.URI()]
for editSpn, edits := range changes { if edits == nil {
f, err := r.view.GetFile(ctx, editSpn) t.Errorf("rename failed for %s, did not edit %s", newText, spn.URI())
if err != nil { continue
t.Fatalf("failed for %v: %v", spn, err)
} }
data, _, err := f.Handle(ctx).Read(ctx) data, _, err := f.Handle(ctx).Read(ctx)
if err != nil { if err != nil {
t.Error(err) t.Error(err)
continue continue
} }
filename := filepath.Base(editSpn.Filename())
contents := applyEdits(string(data), edits)
res = append(res, fmt.Sprintf("%s:\n%s", filename, contents))
}
// Sort on filename got := applyEdits(string(data), edits)
sort.Strings(res) tag := fmt.Sprintf("%s-rename", newText)
gorenamed := string(r.data.Golden(tag, spn.URI().Filename(), func() ([]byte, error) {
var got string
for i, val := range res {
if i != 0 {
got += "\n"
}
got += val
}
renamed := string(r.data.Golden(tag, spn.URI().Filename(), func() ([]byte, error) {
return []byte(got), nil return []byte(got), nil
})) }))
if renamed != got { if gorenamed != got {
t.Errorf("rename failed for %s, expected:\n%v\ngot:\n%v", newText, renamed, got) t.Errorf("rename failed for %s, expected:\n%v\ngot:\n%v", newText, gorenamed, got)
} }
} }
} }
func applyEdits(contents string, edits []source.TextEdit) string { func applyEdits(contents string, edits []source.TextEdit) string {
res := contents res := contents
sortSourceTextEdits(edits)
// Apply the edits from the end of the file forward // Apply the edits from the end of the file forward
// to preserve the offsets // to preserve the offsets
@ -581,8 +534,17 @@ func applyEdits(contents string, edits []source.TextEdit) string {
return res return res
} }
func sortSourceTextEdits(d []source.TextEdit) {
sort.Slice(d, func(i int, j int) bool {
if r := span.Compare(d[i].Span, d[j].Span); r != 0 {
return r < 0
}
return d[i].NewText < d[j].NewText
})
}
func (r *runner) Symbol(t *testing.T, data tests.Symbols) { func (r *runner) Symbol(t *testing.T, data tests.Symbols) {
ctx := r.ctx ctx := context.Background()
for uri, expectedSymbols := range data { for uri, expectedSymbols := range data {
f, err := r.view.GetFile(ctx, uri) f, err := r.view.GetFile(ctx, uri)
if err != nil { if err != nil {
@ -646,46 +608,37 @@ func summarizeSymbols(i int, want []source.Symbol, got []source.Symbol, reason s
} }
func (r *runner) SignatureHelp(t *testing.T, data tests.Signatures) { func (r *runner) SignatureHelp(t *testing.T, data tests.Signatures) {
ctx := r.ctx ctx := context.Background()
for spn, expectedSignature := range data { for spn, expectedSignatures := range data {
f, err := r.view.GetFile(ctx, spn.URI()) f, err := r.view.GetFile(ctx, spn.URI())
if err != nil { if err != nil {
t.Fatalf("failed for %v: %v", spn, err) t.Fatalf("failed for %v: %v", spn, err)
} }
tok, err := f.(source.GoFile).GetToken(ctx) tok := f.GetToken(ctx)
if err != nil {
t.Fatalf("failed to get token for %s: %v", spn.URI(), err)
}
pos := tok.Pos(spn.Start().Offset()) pos := tok.Pos(spn.Start().Offset())
gotSignature, err := source.SignatureHelp(ctx, f.(source.GoFile), pos) gotSignature, err := source.SignatureHelp(ctx, f.(source.GoFile), pos)
if err != nil { if err != nil {
// Only fail if we got an error we did not expect.
if expectedSignature != nil {
t.Fatalf("failed for %v: %v", spn, err) t.Fatalf("failed for %v: %v", spn, err)
} }
} if diff := diffSignatures(spn, expectedSignatures, *gotSignature); diff != "" {
if expectedSignature == nil {
if gotSignature != nil {
t.Errorf("expected no signature, got %v", gotSignature)
}
continue
}
if diff := diffSignatures(spn, expectedSignature, gotSignature); diff != "" {
t.Error(diff) t.Error(diff)
} }
} }
} }
func diffSignatures(spn span.Span, want *source.SignatureInformation, got *source.SignatureInformation) string { func diffSignatures(spn span.Span, want source.SignatureInformation, got source.SignatureInformation) string {
decorate := func(f string, args ...interface{}) string { decorate := func(f string, args ...interface{}) string {
return fmt.Sprintf("Invalid signature at %s: %s", spn, fmt.Sprintf(f, args...)) return fmt.Sprintf("Invalid signature at %s: %s", spn, fmt.Sprintf(f, args...))
} }
if want.ActiveParameter != got.ActiveParameter { if want.ActiveParameter != got.ActiveParameter {
return decorate("wanted active parameter of %d, got %f", want.ActiveParameter, got.ActiveParameter) return decorate("wanted active parameter of %d, got %f", want.ActiveParameter, got.ActiveParameter)
} }
if want.Label != got.Label { if want.Label != got.Label {
return decorate("wanted label %q, got %q", want.Label, got.Label) return decorate("wanted label %q, got %q", want.Label, got.Label)
} }
var paramParts []string var paramParts []string
for _, p := range got.Parameters { for _, p := range got.Parameters {
paramParts = append(paramParts, p.Label) paramParts = append(paramParts, p.Label)
@ -694,9 +647,10 @@ func diffSignatures(spn span.Span, want *source.SignatureInformation, got *sourc
if !strings.Contains(got.Label, paramsStr) { if !strings.Contains(got.Label, paramsStr) {
return decorate("expected signature %q to contain params %q", got.Label, paramsStr) return decorate("expected signature %q to contain params %q", got.Label, paramsStr)
} }
return "" return ""
} }
func (r *runner) Link(t *testing.T, data tests.Links) { func (r *runner) Link(t *testing.T, data tests.Links) {
// This is a pure LSP feature, no source level functionality to be tested. //This is a pure LSP feature, no source level functionality to be tested
} }

View File

@ -8,10 +8,10 @@ import (
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
) )
func getCodeActions(fset *token.FileSet, diag analysis.Diagnostic) ([]SuggestedFixes, error) { func getCodeActions(fset *token.FileSet, diag analysis.Diagnostic) ([]CodeAction, error) {
var cas []SuggestedFixes var cas []CodeAction
for _, fix := range diag.SuggestedFixes { for _, fix := range diag.SuggestedFixes {
var ca SuggestedFixes var ca CodeAction
ca.Title = fix.Message ca.Title = fix.Message
for _, te := range fix.TextEdits { for _, te := range fix.TextEdits {
span, err := span.NewRange(fset, te.Pos, te.End).Span() span, err := span.NewRange(fset, te.Pos, te.End).Span()

View File

@ -12,7 +12,6 @@ import (
"go/token" "go/token"
"go/types" "go/types"
"golang.org/x/tools/internal/lsp/telemetry/trace"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
) )
@ -42,13 +41,10 @@ type Symbol struct {
} }
func DocumentSymbols(ctx context.Context, f GoFile) ([]Symbol, error) { func DocumentSymbols(ctx context.Context, f GoFile) ([]Symbol, error) {
ctx, done := trace.StartSpan(ctx, "source.DocumentSymbols")
defer done()
fset := f.FileSet() fset := f.FileSet()
file, err := f.GetAST(ctx, ParseFull) file := f.GetAST(ctx)
if file == nil { if file == nil {
return nil, err return nil, fmt.Errorf("no AST for %s", f.URI())
} }
pkg := f.GetPackage(ctx) pkg := f.GetPackage(ctx)
if pkg == nil || pkg.IsIllTyped() { if pkg == nil || pkg.IsIllTyped() {

View File

@ -9,41 +9,9 @@ import (
"go/ast" "go/ast"
"go/token" "go/token"
"go/types" "go/types"
"path/filepath"
"strings" "strings"
) )
func DetectLanguage(langID, filename string) FileKind {
switch langID {
case "go":
return Go
case "go.mod":
return Mod
case "go.sum":
return Sum
}
// Fallback to detecting the language based on the file extension.
switch filepath.Ext(filename) {
case ".mod":
return Mod
case ".sum":
return Sum
default: // fallback to Go
return Go
}
}
func (k FileKind) String() string {
switch k {
case Mod:
return "go.mod"
case Sum:
return "go.sum"
default:
return "go"
}
}
// indexExprAtPos returns the index of the expression containing pos. // indexExprAtPos returns the index of the expression containing pos.
func indexExprAtPos(pos token.Pos, args []ast.Expr) int { func indexExprAtPos(pos token.Pos, args []ast.Expr) int {
for i, expr := range args { for i, expr := range args {
@ -171,27 +139,6 @@ func isFunc(obj types.Object) bool {
return ok return ok
} }
// typeConversion returns the type being converted to if call is a type
// conversion expression.
func typeConversion(call *ast.CallExpr, info *types.Info) types.Type {
var ident *ast.Ident
switch expr := call.Fun.(type) {
case *ast.Ident:
ident = expr
case *ast.SelectorExpr:
ident = expr.Sel
default:
return nil
}
// Type conversion (e.g. "float64(foo)").
if fun, _ := info.ObjectOf(ident).(*types.TypeName); fun != nil {
return fun.Type()
}
return nil
}
func formatParams(tup *types.Tuple, variadic bool, qf types.Qualifier) []string { func formatParams(tup *types.Tuple, variadic bool, qf types.Qualifier) []string {
params := make([]string, 0, tup.Len()) params := make([]string, 0, tup.Len())
for i := 0; i < tup.Len(); i++ { for i := 0; i < tup.Len(); i++ {
@ -251,22 +198,17 @@ func formatType(typ types.Type, qf types.Qualifier) (detail string, kind Complet
return detail, kind return detail, kind
} }
func formatFunction(params []string, results []string, writeResultParens bool) string { func formatFunction(name string, params []string, results []string, writeResultParens bool) (string, string) {
var detail strings.Builder var label, detail strings.Builder
label.WriteString(name)
detail.WriteByte('(') label.WriteByte('(')
for i, p := range params { for i, p := range params {
if i > 0 { if i > 0 {
detail.WriteString(", ") label.WriteString(", ")
} }
detail.WriteString(p) label.WriteString(p)
}
detail.WriteByte(')')
// Add space between parameters and results.
if len(results) > 0 {
detail.WriteByte(' ')
} }
label.WriteByte(')')
if writeResultParens { if writeResultParens {
detail.WriteByte('(') detail.WriteByte('(')
@ -281,5 +223,5 @@ func formatFunction(params []string, results []string, writeResultParens bool) s
detail.WriteByte(')') detail.WriteByte(')')
} }
return detail.String() return label.String(), detail.String()
} }

View File

@ -9,13 +9,12 @@ import (
"go/ast" "go/ast"
"go/token" "go/token"
"go/types" "go/types"
"sort"
"strings" "strings"
"golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/packages" "golang.org/x/tools/go/packages"
"golang.org/x/tools/internal/imports"
"golang.org/x/tools/internal/lsp/diff" "golang.org/x/tools/internal/lsp/diff"
"golang.org/x/tools/internal/lsp/xlog"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
) )
@ -112,7 +111,7 @@ type Cache interface {
FileSystem FileSystem
// NewSession creates a new Session manager and returns it. // NewSession creates a new Session manager and returns it.
NewSession(ctx context.Context) Session NewSession(log xlog.Logger) Session
// FileSet returns the shared fileset used by all files in the system. // FileSet returns the shared fileset used by all files in the system.
FileSet() *token.FileSet FileSet() *token.FileSet
@ -130,11 +129,14 @@ type Cache interface {
// A session may have many active views at any given time. // A session may have many active views at any given time.
type Session interface { type Session interface {
// NewView creates a new View and returns it. // NewView creates a new View and returns it.
NewView(ctx context.Context, name string, folder span.URI) View NewView(name string, folder span.URI) View
// Cache returns the cache that created this session. // Cache returns the cache that created this session.
Cache() Cache Cache() Cache
// Returns the logger in use for this session.
Logger() xlog.Logger
// View returns a view with a mathing name, if the session has one. // View returns a view with a mathing name, if the session has one.
View(name string) View View(name string) View
@ -152,7 +154,7 @@ type Session interface {
FileSystem FileSystem
// DidOpen is invoked each time a file is opened in the editor. // DidOpen is invoked each time a file is opened in the editor.
DidOpen(ctx context.Context, uri span.URI, kind FileKind, text []byte) DidOpen(ctx context.Context, uri span.URI)
// DidSave is invoked each time an open file is saved in the editor. // DidSave is invoked each time an open file is saved in the editor.
DidSave(uri span.URI) DidSave(uri span.URI)
@ -207,12 +209,6 @@ type View interface {
// Ignore returns true if this file should be ignored by this view. // Ignore returns true if this file should be ignored by this view.
Ignore(span.URI) bool Ignore(span.URI) bool
Config(ctx context.Context) *packages.Config
// RunProcessEnvFunc runs fn with the process env for this view inserted into opts.
// Note: the process env contains cached module and filesystem state.
RunProcessEnvFunc(ctx context.Context, fn func(*imports.Options) error, opts *imports.Options) error
} }
// File represents a source file of any type. // File represents a source file of any type.
@ -221,15 +217,19 @@ type File interface {
View() View View() View
Handle(ctx context.Context) FileHandle Handle(ctx context.Context) FileHandle
FileSet() *token.FileSet FileSet() *token.FileSet
GetToken(ctx context.Context) (*token.File, error) GetToken(ctx context.Context) *token.File
} }
// GoFile represents a Go source file that has been type-checked. // GoFile represents a Go source file that has been type-checked.
type GoFile interface { type GoFile interface {
File File
// GetAnyAST returns an AST that may or may not contain function bodies.
// It should be used in scenarios where function bodies are not necessary.
GetAnyAST(ctx context.Context) *ast.File
// GetAST returns the full AST for the file. // GetAST returns the full AST for the file.
GetAST(ctx context.Context, mode ParseMode) (*ast.File, error) GetAST(ctx context.Context) *ast.File
// GetPackage returns the package that this file belongs to. // GetPackage returns the package that this file belongs to.
GetPackage(ctx context.Context) Package GetPackage(ctx context.Context) Package
@ -256,7 +256,7 @@ type Package interface {
ID() string ID() string
PkgPath() string PkgPath() string
GetFilenames() []string GetFilenames() []string
GetSyntax(context.Context) []*ast.File GetSyntax() []*ast.File
GetErrors() []packages.Error GetErrors() []packages.Error
GetTypes() *types.Package GetTypes() *types.Package
GetTypesInfo() *types.Info GetTypesInfo() *types.Info
@ -320,10 +320,3 @@ func EditsToDiff(edits []TextEdit) []*diff.Op {
} }
return ops return ops
} }
func sortTextEdits(d []TextEdit) {
// Use a stable sort to maintain the order of edits inserted at the same position.
sort.SliceStable(d, func(i int, j int) bool {
return span.Compare(d[i].Span, d[j].Span) < 0
})
}

View File

@ -9,13 +9,10 @@ import (
"golang.org/x/tools/internal/lsp/protocol" "golang.org/x/tools/internal/lsp/protocol"
"golang.org/x/tools/internal/lsp/source" "golang.org/x/tools/internal/lsp/source"
"golang.org/x/tools/internal/lsp/telemetry/trace"
"golang.org/x/tools/internal/span" "golang.org/x/tools/internal/span"
) )
func (s *Server) documentSymbol(ctx context.Context, params *protocol.DocumentSymbolParams) ([]protocol.DocumentSymbol, error) { func (s *Server) documentSymbol(ctx context.Context, params *protocol.DocumentSymbolParams) ([]protocol.DocumentSymbol, error) {
ctx, done := trace.StartSpan(ctx, "lsp.Server.documentSymbol")
defer done()
uri := span.NewURI(params.TextDocument.URI) uri := span.NewURI(params.TextDocument.URI)
view := s.session.ViewOf(uri) view := s.session.ViewOf(uri)
f, m, err := getGoFile(ctx, view, uri) f, m, err := getGoFile(ctx, view, uri)

View File

@ -1,51 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package log
import (
"context"
"fmt"
"time"
"golang.org/x/tools/internal/lsp/telemetry/tag"
)
type Entry struct {
At time.Time
Message string
Error error
Tags tag.List
}
func ToEntry(ctx context.Context, at time.Time, tags tag.List) Entry {
//TODO: filter more efficiently for the common case of stripping prefixes only
entry := Entry{
At: at,
}
for _, t := range tags {
switch t.Key {
case MessageTag:
entry.Message = t.Value.(string)
case ErrorTag:
entry.Error = t.Value.(error)
default:
entry.Tags = append(entry.Tags, t)
}
}
return entry
}
func (e Entry) Format(f fmt.State, r rune) {
if !e.At.IsZero() {
fmt.Fprint(f, e.At.Format("2006/01/02 15:04:05 "))
}
fmt.Fprint(f, e.Message)
if e.Error != nil {
fmt.Fprintf(f, ": %v", e.Error)
}
for _, tag := range e.Tags {
fmt.Fprintf(f, "\n\t%v = %v", tag.Key, tag.Value)
}
}

View File

@ -1,92 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package log is a context based logging package, designed to interact well
// with both the lsp protocol and the other telemetry packages.
package log
import (
"context"
"fmt"
"os"
"time"
"golang.org/x/tools/internal/lsp/telemetry/tag"
"golang.org/x/tools/internal/lsp/telemetry/worker"
)
const (
// The well known tag keys for the logging system.
MessageTag = tag.Key("message")
ErrorTag = tag.Key("error")
)
// Logger is a function that handles logging messages.
// Loggers are registered at start up, and may use information in the context
// to decide what to do with a given log message.
type Logger func(ctx context.Context, at time.Time, tags tag.List) bool
// With sends a tag list to the installed loggers.
func With(ctx context.Context, tags ...tag.Tag) {
at := time.Now()
worker.Do(func() {
deliver(ctx, at, tags)
})
}
// Print takes a message and a tag list and combines them into a single tag
// list before delivering them to the loggers.
func Print(ctx context.Context, message string, tags ...tag.Tagger) {
at := time.Now()
worker.Do(func() {
tags := append(tag.Tags(ctx, tags...), MessageTag.Of(message))
deliver(ctx, at, tags)
})
}
type errorString string
// Error allows errorString to conform to the error interface.
func (err errorString) Error() string { return string(err) }
// Print takes a message and a tag list and combines them into a single tag
// list before delivering them to the loggers.
func Error(ctx context.Context, message string, err error, tags ...tag.Tagger) {
at := time.Now()
worker.Do(func() {
if err == nil {
err = errorString(message)
message = ""
}
tags := append(tag.Tags(ctx, tags...), MessageTag.Of(message), ErrorTag.Of(err))
deliver(ctx, at, tags)
})
}
func deliver(ctx context.Context, at time.Time, tags tag.List) {
delivered := false
for _, logger := range loggers {
if logger(ctx, at, tags) {
delivered = true
}
}
if !delivered {
// no logger processed the message, so we log to stderr just in case
Stderr(ctx, at, tags)
}
}
var loggers = []Logger{}
func AddLogger(logger Logger) {
worker.Do(func() {
loggers = append(loggers, logger)
})
}
// Stderr is a logger that logs to stderr in the standard format.
func Stderr(ctx context.Context, at time.Time, tags tag.List) bool {
fmt.Fprintf(os.Stderr, "%v\n", ToEntry(ctx, at, tags))
return true
}

View File

@ -1,412 +0,0 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package metric aggregates stats into metrics that can be exported.
package metric
import (
"context"
"sort"
"golang.org/x/tools/internal/lsp/telemetry/stats"
"golang.org/x/tools/internal/lsp/telemetry/tag"
"golang.org/x/tools/internal/lsp/telemetry/worker"
)
// Handle uniquely identifies a constructed metric.
// It can be used to detect which observed data objects belong
// to that metric.
type Handle struct {
name string
}
// Data represents a single point in the time series of a metric.
// This provides the common interface to all metrics no matter their data
// format.
// To get the actual values for the metric you must type assert to a concrete
// metric type.
type Data interface {
// Handle returns the metric handle this data is for.
Handle() Handle
// Groups reports the rows that currently exist for this metric.
Groups() []tag.List
}
// Scalar represents the construction information for a scalar metric.
type Scalar struct {
// Name is the unique name of this metric.
Name string
// Description can be used by observers to describe the metric to users.
Description string
// Keys is the set of tags that collectively describe rows of the metric.
Keys []interface{}
}
// HistogramInt64 represents the construction information for an int64 histogram metric.
type HistogramInt64 struct {
// Name is the unique name of this metric.
Name string
// Description can be used by observers to describe the metric to users.
Description string
// Keys is the set of tags that collectively describe rows of the metric.
Keys []interface{}
// Buckets holds the inclusive upper bound of each bucket in the histogram.
Buckets []int64
}
// HistogramFloat64 represents the construction information for an float64 histogram metric.
type HistogramFloat64 struct {
// Name is the unique name of this metric.
Name string
// Description can be used by observers to describe the metric to users.
Description string
// Keys is the set of tags that collectively describe rows of the metric.
Keys []interface{}
// Buckets holds the inclusive upper bound of each bucket in the histogram.
Buckets []float64
}
// Observer is the type for functions that want to observe metric values
// as they arrive.
// Each data point delivered to an observer is immutable and can be stored if
// needed.
type Observer func(Data)
// CountInt64 creates a new metric based on the Scalar information that counts
// the number of times the supplied int64 measure is set.
// Metrics of this type will use Int64Data.
func (info Scalar) CountInt64(measure *stats.Int64Measure) Handle {
data := &Int64Data{Info: &info}
measure.Subscribe(data.countInt64)
return Handle{info.Name}
}
// SumInt64 creates a new metric based on the Scalar information that sums all
// the values recorded on the int64 measure.
// Metrics of this type will use Int64Data.
func (info Scalar) SumInt64(measure *stats.Int64Measure) Handle {
data := &Int64Data{Info: &info}
measure.Subscribe(data.sum)
_ = data
return Handle{info.Name}
}
// LatestInt64 creates a new metric based on the Scalar information that tracks
// the most recent value recorded on the int64 measure.
// Metrics of this type will use Int64Data.
func (info Scalar) LatestInt64(measure *stats.Int64Measure) Handle {
data := &Int64Data{Info: &info, IsGauge: true}
measure.Subscribe(data.latest)
return Handle{info.Name}
}
// CountFloat64 creates a new metric based on the Scalar information that counts
// the number of times the supplied float64 measure is set.
// Metrics of this type will use Int64Data.
func (info Scalar) CountFloat64(measure *stats.Float64Measure) Handle {
data := &Int64Data{Info: &info}
measure.Subscribe(data.countFloat64)
return Handle{info.Name}
}
// SumFloat64 creates a new metric based on the Scalar information that sums all
// the values recorded on the float64 measure.
// Metrics of this type will use Float64Data.
func (info Scalar) SumFloat64(measure *stats.Float64Measure) Handle {
data := &Float64Data{Info: &info}
measure.Subscribe(data.sum)
return Handle{info.Name}
}
// LatestFloat64 creates a new metric based on the Scalar information that tracks
// the most recent value recorded on the float64 measure.
// Metrics of this type will use Float64Data.
func (info Scalar) LatestFloat64(measure *stats.Float64Measure) Handle {
data := &Float64Data{Info: &info, IsGauge: true}
measure.Subscribe(data.latest)
return Handle{info.Name}
}
// Record creates a new metric based on the HistogramInt64 information that
// tracks the bucketized counts of values recorded on the int64 measure.
// Metrics of this type will use HistogramInt64Data.
func (info HistogramInt64) Record(measure *stats.Int64Measure) Handle {
data := &HistogramInt64Data{Info: &info}
measure.Subscribe(data.record)
return Handle{info.Name}
}
// Record creates a new metric based on the HistogramFloat64 information that
// tracks the bucketized counts of values recorded on the float64 measure.
// Metrics of this type will use HistogramFloat64Data.
func (info HistogramFloat64) Record(measure *stats.Float64Measure) Handle {
data := &HistogramFloat64Data{Info: &info}
measure.Subscribe(data.record)
return Handle{info.Name}
}
// Int64Data is a concrete implementation of Data for int64 scalar metrics.
type Int64Data struct {
// Info holds the original consruction information.
Info *Scalar
// IsGauge is true for metrics that track values, rather than increasing over time.
IsGauge bool
// Rows holds the per group values for the metric.
Rows []int64
groups []tag.List
}
// Float64Data is a concrete implementation of Data for float64 scalar metrics.
type Float64Data struct {
// Info holds the original consruction information.
Info *Scalar
// IsGauge is true for metrics that track values, rather than increasing over time.
IsGauge bool
// Rows holds the per group values for the metric.
Rows []float64
groups []tag.List
}
// HistogramInt64Data is a concrete implementation of Data for int64 histogram metrics.
type HistogramInt64Data struct {
// Info holds the original consruction information.
Info *HistogramInt64
// Rows holds the per group values for the metric.
Rows []*HistogramInt64Row
groups []tag.List
}
// HistogramInt64Row holds the values for a single row of a HistogramInt64Data.
type HistogramInt64Row struct {
// Values is the counts per bucket.
Values []int64
// Count is the total count.
Count int64
// Sum is the sum of all the values recorded.
Sum int64
// Min is the smallest recorded value.
Min int64
// Max is the largest recorded value.
Max int64
}
// HistogramFloat64Data is a concrete implementation of Data for float64 histogram metrics.
type HistogramFloat64Data struct {
// Info holds the original consruction information.
Info *HistogramFloat64
// Rows holds the per group values for the metric.
Rows []*HistogramFloat64Row
groups []tag.List
}
// HistogramFloat64Row holds the values for a single row of a HistogramFloat64Data.
type HistogramFloat64Row struct {
// Values is the counts per bucket.
Values []int64
// Count is the total count.
Count int64
// Sum is the sum of all the values recorded.
Sum float64
// Min is the smallest recorded value.
Min float64
// Max is the largest recorded value.
Max float64
}
// Name returns the name of the metric this is a handle for.
func (h Handle) Name() string { return h.name }
var observers []Observer
// RegisterObservers adds a new metric observer to the system.
// There is no way to unregister an observer.
func RegisterObservers(e ...Observer) {
worker.Do(func() {
observers = append(e, observers...)
})
}
// export must only be called from inside a worker
func export(m Data) {
for _, e := range observers {
e(m)
}
}
func getGroup(ctx context.Context, g *[]tag.List, keys []interface{}) (int, bool) {
group := tag.Get(ctx, keys...)
old := *g
index := sort.Search(len(old), func(i int) bool {
return !old[i].Less(group)
})
if index < len(old) && group.Equal(old[index]) {
// not a new group
return index, false
}
*g = make([]tag.List, len(old)+1)
copy(*g, old[:index])
copy((*g)[index+1:], old[index:])
(*g)[index] = group
return index, true
}
func (data *Int64Data) Handle() Handle { return Handle{data.Info.Name} }
func (data *Int64Data) Groups() []tag.List { return data.groups }
func (data *Int64Data) modify(ctx context.Context, f func(v int64) int64) {
worker.Do(func() {
index, insert := getGroup(ctx, &data.groups, data.Info.Keys)
old := data.Rows
if insert {
data.Rows = make([]int64, len(old)+1)
copy(data.Rows, old[:index])
copy(data.Rows[index+1:], old[index:])
} else {
data.Rows = make([]int64, len(old))
copy(data.Rows, old)
}
data.Rows[index] = f(data.Rows[index])
frozen := *data
export(&frozen)
})
}
func (data *Int64Data) countInt64(ctx context.Context, measure *stats.Int64Measure, value int64) {
data.modify(ctx, func(v int64) int64 { return v + 1 })
}
func (data *Int64Data) countFloat64(ctx context.Context, measure *stats.Float64Measure, value float64) {
data.modify(ctx, func(v int64) int64 { return v + 1 })
}
func (data *Int64Data) sum(ctx context.Context, measure *stats.Int64Measure, value int64) {
data.modify(ctx, func(v int64) int64 { return v + value })
}
func (data *Int64Data) latest(ctx context.Context, measure *stats.Int64Measure, value int64) {
data.modify(ctx, func(v int64) int64 { return value })
}
func (data *Float64Data) Handle() Handle { return Handle{data.Info.Name} }
func (data *Float64Data) Groups() []tag.List { return data.groups }
func (data *Float64Data) modify(ctx context.Context, f func(v float64) float64) {
worker.Do(func() {
index, insert := getGroup(ctx, &data.groups, data.Info.Keys)
old := data.Rows
if insert {
data.Rows = make([]float64, len(old)+1)
copy(data.Rows, old[:index])
copy(data.Rows[index+1:], old[index:])
} else {
data.Rows = make([]float64, len(old))
copy(data.Rows, old)
}
data.Rows[index] = f(data.Rows[index])
frozen := *data
export(&frozen)
})
}
func (data *Float64Data) sum(ctx context.Context, measure *stats.Float64Measure, value float64) {
data.modify(ctx, func(v float64) float64 { return v + value })
}
func (data *Float64Data) latest(ctx context.Context, measure *stats.Float64Measure, value float64) {
data.modify(ctx, func(v float64) float64 { return value })
}
func (data *HistogramInt64Data) Handle() Handle { return Handle{data.Info.Name} }
func (data *HistogramInt64Data) Groups() []tag.List { return data.groups }
func (data *HistogramInt64Data) modify(ctx context.Context, f func(v *HistogramInt64Row)) {
worker.Do(func() {
index, insert := getGroup(ctx, &data.groups, data.Info.Keys)
old := data.Rows
var v HistogramInt64Row
if insert {
data.Rows = make([]*HistogramInt64Row, len(old)+1)
copy(data.Rows, old[:index])
copy(data.Rows[index+1:], old[index:])
} else {
data.Rows = make([]*HistogramInt64Row, len(old))
copy(data.Rows, old)
v = *data.Rows[index]
}
oldValues := v.Values
v.Values = make([]int64, len(data.Info.Buckets))
copy(v.Values, oldValues)
f(&v)
data.Rows[index] = &v
frozen := *data
export(&frozen)
})
}
func (data *HistogramInt64Data) record(ctx context.Context, measure *stats.Int64Measure, value int64) {
data.modify(ctx, func(v *HistogramInt64Row) {
v.Sum += value
if v.Min > value || v.Count == 0 {
v.Min = value
}
if v.Max < value || v.Count == 0 {
v.Max = value
}
v.Count++
for i, b := range data.Info.Buckets {
if value <= b {
v.Values[i]++
}
}
})
}
func (data *HistogramFloat64Data) Handle() Handle { return Handle{data.Info.Name} }
func (data *HistogramFloat64Data) Groups() []tag.List { return data.groups }
func (data *HistogramFloat64Data) modify(ctx context.Context, f func(v *HistogramFloat64Row)) {
worker.Do(func() {
index, insert := getGroup(ctx, &data.groups, data.Info.Keys)
old := data.Rows
var v HistogramFloat64Row
if insert {
data.Rows = make([]*HistogramFloat64Row, len(old)+1)
copy(data.Rows, old[:index])
copy(data.Rows[index+1:], old[index:])
} else {
data.Rows = make([]*HistogramFloat64Row, len(old))
copy(data.Rows, old)
v = *data.Rows[index]
}
oldValues := v.Values
v.Values = make([]int64, len(data.Info.Buckets))
copy(v.Values, oldValues)
f(&v)
data.Rows[index] = &v
frozen := *data
export(&frozen)
})
}
func (data *HistogramFloat64Data) record(ctx context.Context, measure *stats.Float64Measure, value float64) {
data.modify(ctx, func(v *HistogramFloat64Row) {
v.Sum += value
if v.Min > value || v.Count == 0 {
v.Min = value
}
if v.Max < value || v.Count == 0 {
v.Max = value
}
v.Count++
for i, b := range data.Info.Buckets {
if value <= b {
v.Values[i]++
}
}
})
}

Some files were not shown because too many files have changed in this diff Show More