godoc: add search results that point to documentation instead of source.
Add explicit options to Corpus to control search indexing of documentation, Go source code, and full-text. R=bradfitz, r CC=golang-dev https://golang.org/cl/24190043
This commit is contained in:
parent
88e2928490
commit
f3faf8b6e0
|
|
@ -190,7 +190,11 @@ func main() {
|
||||||
|
|
||||||
corpus := godoc.NewCorpus(fs)
|
corpus := godoc.NewCorpus(fs)
|
||||||
corpus.Verbose = *verbose
|
corpus.Verbose = *verbose
|
||||||
|
corpus.MaxResults = *maxResults
|
||||||
corpus.IndexEnabled = *indexEnabled && httpMode
|
corpus.IndexEnabled = *indexEnabled && httpMode
|
||||||
|
if *maxResults == 0 {
|
||||||
|
corpus.IndexFullText = false
|
||||||
|
}
|
||||||
corpus.IndexFiles = *indexFiles
|
corpus.IndexFiles = *indexFiles
|
||||||
corpus.IndexThrottle = *indexThrottle
|
corpus.IndexThrottle = *indexThrottle
|
||||||
if *writeIndex {
|
if *writeIndex {
|
||||||
|
|
|
||||||
|
|
@ -45,8 +45,25 @@ type Corpus struct {
|
||||||
// built once.
|
// built once.
|
||||||
IndexInterval time.Duration
|
IndexInterval time.Duration
|
||||||
|
|
||||||
|
// IndexDocs enables indexing of Go documentation.
|
||||||
|
// This will produce search results for exported types, functions,
|
||||||
|
// methods, variables, and constants, and will link to the godoc
|
||||||
|
// documentation for those identifiers.
|
||||||
|
IndexDocs bool
|
||||||
|
|
||||||
|
// IndexGoCode enables indexing of Go source code.
|
||||||
|
// This will produce search results for internal and external identifiers
|
||||||
|
// and will link to both declarations and uses of those identifiers in
|
||||||
|
// source code.
|
||||||
|
IndexGoCode bool
|
||||||
|
|
||||||
|
// IndexFullText enables full-text indexing.
|
||||||
|
// This will provide search results for any matching text in any file that
|
||||||
|
// is indexed, including non-Go files (see whitelisted in index.go).
|
||||||
|
// Regexp searching is supported via full-text indexing.
|
||||||
|
IndexFullText bool
|
||||||
|
|
||||||
// MaxResults optionally specifies the maximum results for indexing.
|
// MaxResults optionally specifies the maximum results for indexing.
|
||||||
// The default is 1000.
|
|
||||||
MaxResults int
|
MaxResults int
|
||||||
|
|
||||||
// SummarizePackage optionally specifies a function to
|
// SummarizePackage optionally specifies a function to
|
||||||
|
|
@ -85,7 +102,8 @@ type Corpus struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewCorpus returns a new Corpus from a filesystem.
|
// NewCorpus returns a new Corpus from a filesystem.
|
||||||
// Set any options on Corpus before calling the Corpus.Init method.
|
// The returned corpus has all indexing enabled and MaxResults set to 1000.
|
||||||
|
// Change or set any options on Corpus before calling the Corpus.Init method.
|
||||||
func NewCorpus(fs vfs.FileSystem) *Corpus {
|
func NewCorpus(fs vfs.FileSystem) *Corpus {
|
||||||
c := &Corpus{
|
c := &Corpus{
|
||||||
fs: fs,
|
fs: fs,
|
||||||
|
|
@ -93,6 +111,9 @@ func NewCorpus(fs vfs.FileSystem) *Corpus {
|
||||||
|
|
||||||
MaxResults: 1000,
|
MaxResults: 1000,
|
||||||
IndexEnabled: true,
|
IndexEnabled: true,
|
||||||
|
IndexDocs: true,
|
||||||
|
IndexGoCode: true,
|
||||||
|
IndexFullText: true,
|
||||||
}
|
}
|
||||||
return c
|
return c
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -79,6 +79,7 @@ func (p *Presentation) initFuncMap() {
|
||||||
"pkgLink": pkgLinkFunc,
|
"pkgLink": pkgLinkFunc,
|
||||||
"srcLink": srcLinkFunc,
|
"srcLink": srcLinkFunc,
|
||||||
"posLink_url": newPosLink_urlFunc(srcPosLinkFunc),
|
"posLink_url": newPosLink_urlFunc(srcPosLinkFunc),
|
||||||
|
"docLink": docLinkFunc,
|
||||||
|
|
||||||
// formatting of Examples
|
// formatting of Examples
|
||||||
"example_html": p.example_htmlFunc,
|
"example_html": p.example_htmlFunc,
|
||||||
|
|
@ -297,6 +298,11 @@ func srcLinkFunc(s string) string {
|
||||||
return pathpkg.Clean("/" + s)
|
return pathpkg.Clean("/" + s)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func docLinkFunc(s string, ident string) string {
|
||||||
|
s = strings.TrimPrefix(s, "/src")
|
||||||
|
return pathpkg.Clean("/"+s) + "/#" + ident
|
||||||
|
}
|
||||||
|
|
||||||
func (p *Presentation) example_textFunc(info *PageInfo, funcName, indent string) string {
|
func (p *Presentation) example_textFunc(info *PageInfo, funcName, indent string) string {
|
||||||
if !p.ShowExamples {
|
if !p.ShowExamples {
|
||||||
return ""
|
return ""
|
||||||
|
|
|
||||||
342
godoc/index.go
342
godoc/index.go
|
|
@ -44,6 +44,7 @@ import (
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"go/ast"
|
"go/ast"
|
||||||
|
"go/doc"
|
||||||
"go/parser"
|
"go/parser"
|
||||||
"go/token"
|
"go/token"
|
||||||
"index/suffixarray"
|
"index/suffixarray"
|
||||||
|
|
@ -348,13 +349,44 @@ func (a *AltWords) filter(s string) *AltWords {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Ident stores information about external identifiers in order to create
|
||||||
|
// links to package documentation.
|
||||||
|
type Ident struct {
|
||||||
|
Path string // e.g. "net/http"
|
||||||
|
Package string // e.g. "http"
|
||||||
|
Name string // e.g. "NewRequest"
|
||||||
|
Doc string // e.g. "NewRequest returns a new Request..."
|
||||||
|
}
|
||||||
|
|
||||||
|
type byPackage []Ident
|
||||||
|
|
||||||
|
func (s byPackage) Len() int { return len(s) }
|
||||||
|
func (s byPackage) Less(i, j int) bool {
|
||||||
|
if s[i].Package == s[j].Package {
|
||||||
|
return s[i].Path < s[j].Path
|
||||||
|
}
|
||||||
|
return s[i].Package < s[j].Package
|
||||||
|
}
|
||||||
|
func (s byPackage) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
|
||||||
|
|
||||||
|
// Filter creates a new Ident list where the results match the given
|
||||||
|
// package name.
|
||||||
|
func (s byPackage) filter(pakname string) []Ident {
|
||||||
|
if s == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
var res []Ident
|
||||||
|
for _, i := range s {
|
||||||
|
if i.Package == pakname {
|
||||||
|
res = append(res, i)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|
||||||
// ----------------------------------------------------------------------------
|
// ----------------------------------------------------------------------------
|
||||||
// Indexer
|
// Indexer
|
||||||
|
|
||||||
// Adjust these flags as seems best.
|
|
||||||
const includeMainPackages = true
|
|
||||||
const includeTestFiles = true
|
|
||||||
|
|
||||||
type IndexResult struct {
|
type IndexResult struct {
|
||||||
Decls RunList // package-level declarations (with snippets)
|
Decls RunList // package-level declarations (with snippets)
|
||||||
Others RunList // all other occurrences
|
Others RunList // all other occurrences
|
||||||
|
|
@ -393,6 +425,7 @@ type Indexer struct {
|
||||||
packagePath map[string]map[string]bool // "template" => "text/template" => true
|
packagePath map[string]map[string]bool // "template" => "text/template" => true
|
||||||
exports map[string]map[string]SpotKind // "net/http" => "ListenAndServe" => FuncDecl
|
exports map[string]map[string]SpotKind // "net/http" => "ListenAndServe" => FuncDecl
|
||||||
curPkgExports map[string]SpotKind
|
curPkgExports map[string]SpotKind
|
||||||
|
idents map[SpotKind]map[string][]Ident // kind => name => list of Idents
|
||||||
}
|
}
|
||||||
|
|
||||||
func (x *Indexer) intern(s string) string {
|
func (x *Indexer) intern(s string) string {
|
||||||
|
|
@ -441,9 +474,11 @@ func (x *Indexer) visitIdent(kind SpotKind, id *ast.Ident) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if kind == Use || x.decl == nil {
|
if kind == Use || x.decl == nil {
|
||||||
|
if x.c.IndexGoCode {
|
||||||
// not a declaration or no snippet required
|
// not a declaration or no snippet required
|
||||||
info := makeSpotInfo(kind, x.current.Line(id.Pos()), false)
|
info := makeSpotInfo(kind, x.current.Line(id.Pos()), false)
|
||||||
lists.Others = append(lists.Others, Spot{x.file, info})
|
lists.Others = append(lists.Others, Spot{x.file, info})
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
// a declaration with snippet
|
// a declaration with snippet
|
||||||
index := x.addSnippet(NewSnippet(x.fset, x.decl, id))
|
index := x.addSnippet(NewSnippet(x.fset, x.decl, id))
|
||||||
|
|
@ -554,16 +589,6 @@ func (x *Indexer) Visit(node ast.Node) ast.Visitor {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func pkgName(filename string) string {
|
|
||||||
// use a new file set each time in order to not pollute the indexer's
|
|
||||||
// file set (which must stay in sync with the concatenated source code)
|
|
||||||
file, err := parser.ParseFile(token.NewFileSet(), filename, nil, parser.PackageClauseOnly)
|
|
||||||
if err != nil || file == nil {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
return file.Name.Name
|
|
||||||
}
|
|
||||||
|
|
||||||
// addFile adds a file to the index if possible and returns the file set file
|
// addFile adds a file to the index if possible and returns the file set file
|
||||||
// and the file's AST if it was successfully parsed as a Go file. If addFile
|
// and the file's AST if it was successfully parsed as a Go file. If addFile
|
||||||
// failed (that is, if the file was not added), it returns file == nil.
|
// failed (that is, if the file was not added), it returns file == nil.
|
||||||
|
|
@ -668,27 +693,133 @@ func isWhitelisted(filename string) bool {
|
||||||
return whitelisted[key]
|
return whitelisted[key]
|
||||||
}
|
}
|
||||||
|
|
||||||
func (x *Indexer) visitFile(dirname string, fi os.FileInfo, fulltextIndex bool) {
|
func (x *Indexer) indexDocs(dirname string, filename string, astFile *ast.File) {
|
||||||
if fi.IsDir() {
|
pkgName := astFile.Name.Name
|
||||||
|
if pkgName == "main" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
astPkg := ast.Package{
|
||||||
|
Name: pkgName,
|
||||||
|
Files: map[string]*ast.File{
|
||||||
|
filename: astFile,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
var m doc.Mode
|
||||||
|
docPkg := doc.New(&astPkg, pathpkg.Clean(dirname), m)
|
||||||
|
addIdent := func(sk SpotKind, name string, docstr string) {
|
||||||
|
if x.idents[sk] == nil {
|
||||||
|
x.idents[sk] = make(map[string][]Ident)
|
||||||
|
}
|
||||||
|
x.idents[sk][name] = append(x.idents[sk][name], Ident{
|
||||||
|
Path: pathpkg.Clean(dirname),
|
||||||
|
Package: pkgName,
|
||||||
|
Name: name,
|
||||||
|
Doc: doc.Synopsis(docstr),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
for _, c := range docPkg.Consts {
|
||||||
|
for _, name := range c.Names {
|
||||||
|
addIdent(ConstDecl, name, c.Doc)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, t := range docPkg.Types {
|
||||||
|
addIdent(TypeDecl, t.Name, t.Doc)
|
||||||
|
for _, c := range t.Consts {
|
||||||
|
for _, name := range c.Names {
|
||||||
|
addIdent(ConstDecl, name, c.Doc)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, v := range t.Vars {
|
||||||
|
for _, name := range v.Names {
|
||||||
|
addIdent(VarDecl, name, v.Doc)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, f := range t.Funcs {
|
||||||
|
addIdent(FuncDecl, f.Name, f.Doc)
|
||||||
|
}
|
||||||
|
for _, f := range t.Methods {
|
||||||
|
addIdent(MethodDecl, f.Name, f.Doc)
|
||||||
|
// Change the name of methods to be "<typename>.<methodname>".
|
||||||
|
// They will still be indexed as <methodname>.
|
||||||
|
idents := x.idents[MethodDecl][f.Name]
|
||||||
|
idents[len(idents)-1].Name = t.Name + "." + f.Name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, v := range docPkg.Vars {
|
||||||
|
for _, name := range v.Names {
|
||||||
|
addIdent(VarDecl, name, v.Doc)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, f := range docPkg.Funcs {
|
||||||
|
addIdent(FuncDecl, f.Name, f.Doc)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *Indexer) indexGoFile(dirname string, filename string, file *token.File, astFile *ast.File) {
|
||||||
|
pkgName := astFile.Name.Name
|
||||||
|
|
||||||
|
if x.c.IndexGoCode {
|
||||||
|
x.current = file
|
||||||
|
pak := x.lookupPackage(dirname, pkgName)
|
||||||
|
x.file = &File{filename, pak}
|
||||||
|
ast.Walk(x, astFile)
|
||||||
|
}
|
||||||
|
|
||||||
|
if x.c.IndexDocs {
|
||||||
|
// Test files are already filtered out in visitFile if IndexGoCode and
|
||||||
|
// IndexFullText are false. Otherwise, check here.
|
||||||
|
isTestFile := (x.c.IndexGoCode || x.c.IndexFullText) &&
|
||||||
|
(strings.HasSuffix(filename, "_test.go") || strings.HasPrefix(dirname, "test/"))
|
||||||
|
if !isTestFile {
|
||||||
|
x.indexDocs(dirname, filename, astFile)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ppKey := x.intern(pkgName)
|
||||||
|
if _, ok := x.packagePath[ppKey]; !ok {
|
||||||
|
x.packagePath[ppKey] = make(map[string]bool)
|
||||||
|
}
|
||||||
|
pkgPath := x.intern(strings.TrimPrefix(dirname, "/src/pkg/"))
|
||||||
|
x.packagePath[ppKey][pkgPath] = true
|
||||||
|
|
||||||
|
// Merge in exported symbols found walking this file into
|
||||||
|
// the map for that package.
|
||||||
|
if len(x.curPkgExports) > 0 {
|
||||||
|
dest, ok := x.exports[pkgPath]
|
||||||
|
if !ok {
|
||||||
|
dest = make(map[string]SpotKind)
|
||||||
|
x.exports[pkgPath] = dest
|
||||||
|
}
|
||||||
|
for k, v := range x.curPkgExports {
|
||||||
|
dest[k] = v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *Indexer) visitFile(dirname string, fi os.FileInfo) {
|
||||||
|
if fi.IsDir() || !x.c.IndexEnabled {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
filename := pathpkg.Join(dirname, fi.Name())
|
filename := pathpkg.Join(dirname, fi.Name())
|
||||||
goFile := false
|
goFile := isGoFile(fi)
|
||||||
|
|
||||||
switch {
|
switch {
|
||||||
case isGoFile(fi):
|
case x.c.IndexFullText:
|
||||||
if !includeTestFiles && (!isPkgFile(fi) || strings.HasPrefix(filename, "test/")) {
|
if !isWhitelisted(fi.Name()) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if !includeMainPackages && pkgName(filename) == "main" {
|
case x.c.IndexGoCode:
|
||||||
|
if !goFile {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
goFile = true
|
case x.c.IndexDocs:
|
||||||
|
if !goFile ||
|
||||||
case !fulltextIndex || !isWhitelisted(fi.Name()):
|
strings.HasSuffix(fi.Name(), "_test.go") ||
|
||||||
|
strings.HasPrefix(dirname, "test/") {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
x.fsOpenGate <- true
|
x.fsOpenGate <- true
|
||||||
defer func() { <-x.fsOpenGate }()
|
defer func() { <-x.fsOpenGate }()
|
||||||
|
|
@ -711,31 +842,7 @@ func (x *Indexer) visitFile(dirname string, fi os.FileInfo, fulltextIndex bool)
|
||||||
}
|
}
|
||||||
|
|
||||||
if fast != nil {
|
if fast != nil {
|
||||||
// we've got a Go file to index
|
x.indexGoFile(dirname, fi.Name(), file, fast)
|
||||||
x.current = file
|
|
||||||
pak := x.lookupPackage(dirname, fast.Name.Name)
|
|
||||||
x.file = &File{fi.Name(), pak}
|
|
||||||
ast.Walk(x, fast)
|
|
||||||
|
|
||||||
ppKey := x.intern(fast.Name.Name)
|
|
||||||
if _, ok := x.packagePath[ppKey]; !ok {
|
|
||||||
x.packagePath[ppKey] = make(map[string]bool)
|
|
||||||
}
|
|
||||||
pkgPath := x.intern(strings.TrimPrefix(dirname, "/src/pkg/"))
|
|
||||||
x.packagePath[ppKey][pkgPath] = true
|
|
||||||
|
|
||||||
// Merge in exported symbols found walking this file into
|
|
||||||
// the map for that package.
|
|
||||||
if len(x.curPkgExports) > 0 {
|
|
||||||
dest, ok := x.exports[pkgPath]
|
|
||||||
if !ok {
|
|
||||||
dest = make(map[string]SpotKind)
|
|
||||||
x.exports[pkgPath] = dest
|
|
||||||
}
|
|
||||||
for k, v := range x.curPkgExports {
|
|
||||||
dest[k] = v
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// update statistics
|
// update statistics
|
||||||
|
|
@ -744,6 +851,28 @@ func (x *Indexer) visitFile(dirname string, fi os.FileInfo, fulltextIndex bool)
|
||||||
x.stats.Lines += file.LineCount()
|
x.stats.Lines += file.LineCount()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// indexOptions contains information that affects the contents of an index.
|
||||||
|
type indexOptions struct {
|
||||||
|
// Docs provides documentation search results.
|
||||||
|
// It is only consulted if IndexEnabled is true.
|
||||||
|
// The default values is true.
|
||||||
|
Docs bool
|
||||||
|
|
||||||
|
// GoCode provides Go source code search results.
|
||||||
|
// It is only consulted if IndexEnabled is true.
|
||||||
|
// The default values is true.
|
||||||
|
GoCode bool
|
||||||
|
|
||||||
|
// FullText provides search results from all files.
|
||||||
|
// It is only consulted if IndexEnabled is true.
|
||||||
|
// The default values is true.
|
||||||
|
FullText bool
|
||||||
|
|
||||||
|
// MaxResults optionally specifies the maximum results for indexing.
|
||||||
|
// The default is 1000.
|
||||||
|
MaxResults int
|
||||||
|
}
|
||||||
|
|
||||||
// ----------------------------------------------------------------------------
|
// ----------------------------------------------------------------------------
|
||||||
// Index
|
// Index
|
||||||
|
|
||||||
|
|
@ -762,6 +891,8 @@ type Index struct {
|
||||||
importCount map[string]int // package path ("net/http") => count
|
importCount map[string]int // package path ("net/http") => count
|
||||||
packagePath map[string]map[string]bool // "template" => "text/template" => true
|
packagePath map[string]map[string]bool // "template" => "text/template" => true
|
||||||
exports map[string]map[string]SpotKind // "net/http" => "ListenAndServe" => FuncDecl
|
exports map[string]map[string]SpotKind // "net/http" => "ListenAndServe" => FuncDecl
|
||||||
|
idents map[SpotKind]map[string][]Ident
|
||||||
|
opts indexOptions
|
||||||
}
|
}
|
||||||
|
|
||||||
func canonical(w string) string { return strings.ToLower(w) }
|
func canonical(w string) string { return strings.ToLower(w) }
|
||||||
|
|
@ -774,12 +905,18 @@ const (
|
||||||
maxOpenDirs = 50
|
maxOpenDirs = 50
|
||||||
)
|
)
|
||||||
|
|
||||||
// NewIndex creates a new index for the .go files
|
func (c *Corpus) throttle() float64 {
|
||||||
// in the directories given by dirnames.
|
if c.IndexThrottle <= 0 {
|
||||||
// The throttle parameter specifies a value between 0.0 and 1.0 that controls
|
return 0.9
|
||||||
// artificial sleeping. If 0.0, the indexer always sleeps. If 1.0, the indexer
|
}
|
||||||
// never sleeps.
|
if c.IndexThrottle > 1.0 {
|
||||||
func NewIndex(c *Corpus, dirnames <-chan string, fulltextIndex bool, throttle float64) *Index {
|
return 1.0
|
||||||
|
}
|
||||||
|
return c.IndexThrottle
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewIndex creates a new index for the .go files provided by the corpus.
|
||||||
|
func (c *Corpus) NewIndex() *Index {
|
||||||
// initialize Indexer
|
// initialize Indexer
|
||||||
// (use some reasonably sized maps to start)
|
// (use some reasonably sized maps to start)
|
||||||
x := &Indexer{
|
x := &Indexer{
|
||||||
|
|
@ -789,16 +926,17 @@ func NewIndex(c *Corpus, dirnames <-chan string, fulltextIndex bool, throttle fl
|
||||||
strings: make(map[string]string),
|
strings: make(map[string]string),
|
||||||
packages: make(map[Pak]*Pak, 256),
|
packages: make(map[Pak]*Pak, 256),
|
||||||
words: make(map[string]*IndexResult, 8192),
|
words: make(map[string]*IndexResult, 8192),
|
||||||
throttle: util.NewThrottle(throttle, 100*time.Millisecond), // run at least 0.1s at a time
|
throttle: util.NewThrottle(c.throttle(), 100*time.Millisecond), // run at least 0.1s at a time
|
||||||
importCount: make(map[string]int),
|
importCount: make(map[string]int),
|
||||||
packagePath: make(map[string]map[string]bool),
|
packagePath: make(map[string]map[string]bool),
|
||||||
exports: make(map[string]map[string]SpotKind),
|
exports: make(map[string]map[string]SpotKind),
|
||||||
|
idents: make(map[SpotKind]map[string][]Ident, 4),
|
||||||
}
|
}
|
||||||
|
|
||||||
// index all files in the directories given by dirnames
|
// index all files in the directories given by dirnames
|
||||||
var wg sync.WaitGroup // outstanding ReadDir + visitFile
|
var wg sync.WaitGroup // outstanding ReadDir + visitFile
|
||||||
dirGate := make(chan bool, maxOpenDirs)
|
dirGate := make(chan bool, maxOpenDirs)
|
||||||
for dirname := range dirnames {
|
for dirname := range c.fsDirnames() {
|
||||||
if c.IndexDirectory != nil && !c.IndexDirectory(dirname) {
|
if c.IndexDirectory != nil && !c.IndexDirectory(dirname) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
@ -817,14 +955,14 @@ func NewIndex(c *Corpus, dirnames <-chan string, fulltextIndex bool, throttle fl
|
||||||
wg.Add(1)
|
wg.Add(1)
|
||||||
go func(fi os.FileInfo) {
|
go func(fi os.FileInfo) {
|
||||||
defer wg.Done()
|
defer wg.Done()
|
||||||
x.visitFile(dirname, fi, fulltextIndex)
|
x.visitFile(dirname, fi)
|
||||||
}(fi)
|
}(fi)
|
||||||
}
|
}
|
||||||
}(dirname)
|
}(dirname)
|
||||||
}
|
}
|
||||||
wg.Wait()
|
wg.Wait()
|
||||||
|
|
||||||
if !fulltextIndex {
|
if !c.IndexFullText {
|
||||||
// the file set, the current file, and the sources are
|
// the file set, the current file, and the sources are
|
||||||
// not needed after indexing if no text index is built -
|
// not needed after indexing if no text index is built -
|
||||||
// help GC and clear them
|
// help GC and clear them
|
||||||
|
|
@ -863,10 +1001,16 @@ func NewIndex(c *Corpus, dirnames <-chan string, fulltextIndex bool, throttle fl
|
||||||
|
|
||||||
// create text index
|
// create text index
|
||||||
var suffixes *suffixarray.Index
|
var suffixes *suffixarray.Index
|
||||||
if fulltextIndex {
|
if c.IndexFullText {
|
||||||
suffixes = suffixarray.New(x.sources.Bytes())
|
suffixes = suffixarray.New(x.sources.Bytes())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for _, idMap := range x.idents {
|
||||||
|
for _, ir := range idMap {
|
||||||
|
sort.Sort(byPackage(ir))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return &Index{
|
return &Index{
|
||||||
fset: x.fset,
|
fset: x.fset,
|
||||||
suffixes: suffixes,
|
suffixes: suffixes,
|
||||||
|
|
@ -877,12 +1021,19 @@ func NewIndex(c *Corpus, dirnames <-chan string, fulltextIndex bool, throttle fl
|
||||||
importCount: x.importCount,
|
importCount: x.importCount,
|
||||||
packagePath: x.packagePath,
|
packagePath: x.packagePath,
|
||||||
exports: x.exports,
|
exports: x.exports,
|
||||||
|
idents: x.idents,
|
||||||
|
opts: indexOptions{
|
||||||
|
Docs: x.c.IndexDocs,
|
||||||
|
GoCode: x.c.IndexGoCode,
|
||||||
|
FullText: x.c.IndexFullText,
|
||||||
|
MaxResults: x.c.MaxResults,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var ErrFileIndexVersion = errors.New("file index version out of date")
|
var ErrFileIndexVersion = errors.New("file index version out of date")
|
||||||
|
|
||||||
const fileIndexVersion = 2
|
const fileIndexVersion = 3
|
||||||
|
|
||||||
// fileIndex is the subset of Index that's gob-encoded for use by
|
// fileIndex is the subset of Index that's gob-encoded for use by
|
||||||
// Index.Write and Index.Read.
|
// Index.Write and Index.Read.
|
||||||
|
|
@ -896,6 +1047,8 @@ type fileIndex struct {
|
||||||
ImportCount map[string]int
|
ImportCount map[string]int
|
||||||
PackagePath map[string]map[string]bool
|
PackagePath map[string]map[string]bool
|
||||||
Exports map[string]map[string]SpotKind
|
Exports map[string]map[string]SpotKind
|
||||||
|
Idents map[SpotKind]map[string][]Ident
|
||||||
|
Opts indexOptions
|
||||||
}
|
}
|
||||||
|
|
||||||
func (x *fileIndex) Write(w io.Writer) error {
|
func (x *fileIndex) Write(w io.Writer) error {
|
||||||
|
|
@ -923,6 +1076,8 @@ func (x *Index) WriteTo(w io.Writer) (n int64, err error) {
|
||||||
ImportCount: x.importCount,
|
ImportCount: x.importCount,
|
||||||
PackagePath: x.packagePath,
|
PackagePath: x.packagePath,
|
||||||
Exports: x.exports,
|
Exports: x.exports,
|
||||||
|
Idents: x.idents,
|
||||||
|
Opts: x.opts,
|
||||||
}
|
}
|
||||||
if err := fx.Write(w); err != nil {
|
if err := fx.Write(w); err != nil {
|
||||||
return 0, err
|
return 0, err
|
||||||
|
|
@ -941,7 +1096,7 @@ func (x *Index) WriteTo(w io.Writer) (n int64, err error) {
|
||||||
return n, nil
|
return n, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read reads the index from r into x; x must not be nil.
|
// ReadFrom reads the index from r into x; x must not be nil.
|
||||||
// If r does not also implement io.ByteReader, it will be wrapped in a bufio.Reader.
|
// If r does not also implement io.ByteReader, it will be wrapped in a bufio.Reader.
|
||||||
// If the index is from an old version, the error is ErrFileIndexVersion.
|
// If the index is from an old version, the error is ErrFileIndexVersion.
|
||||||
func (x *Index) ReadFrom(r io.Reader) (n int64, err error) {
|
func (x *Index) ReadFrom(r io.Reader) (n int64, err error) {
|
||||||
|
|
@ -964,7 +1119,8 @@ func (x *Index) ReadFrom(r io.Reader) (n int64, err error) {
|
||||||
x.importCount = fx.ImportCount
|
x.importCount = fx.ImportCount
|
||||||
x.packagePath = fx.PackagePath
|
x.packagePath = fx.PackagePath
|
||||||
x.exports = fx.Exports
|
x.exports = fx.Exports
|
||||||
|
x.idents = fx.Idents
|
||||||
|
x.opts = fx.Opts
|
||||||
if fx.Fulltext {
|
if fx.Fulltext {
|
||||||
x.fset = token.NewFileSet()
|
x.fset = token.NewFileSet()
|
||||||
decode := func(x interface{}) error {
|
decode := func(x interface{}) error {
|
||||||
|
|
@ -1003,6 +1159,12 @@ func (x *Index) Exports() map[string]map[string]SpotKind {
|
||||||
return x.exports
|
return x.exports
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Idents returns a map from identifier type to exported
|
||||||
|
// symbol name to the list of identifiers matching that name.
|
||||||
|
func (x *Index) Idents() map[SpotKind]map[string][]Ident {
|
||||||
|
return x.idents
|
||||||
|
}
|
||||||
|
|
||||||
func (x *Index) lookupWord(w string) (match *LookupResult, alt *AltWords) {
|
func (x *Index) lookupWord(w string) (match *LookupResult, alt *AltWords) {
|
||||||
match = x.words[w]
|
match = x.words[w]
|
||||||
alt = x.alts[canonical(w)]
|
alt = x.alts[canonical(w)]
|
||||||
|
|
@ -1027,47 +1189,55 @@ func isIdentifier(s string) bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
// For a given query, which is either a single identifier or a qualified
|
// For a given query, which is either a single identifier or a qualified
|
||||||
// identifier, Lookup returns a list of packages, a LookupResult, and a
|
// identifier, Lookup returns a SearchResult containing packages, a LookupResult, a
|
||||||
// list of alternative spellings, if any. Any and all results may be nil.
|
// list of alternative spellings, and identifiers, if any. Any and all results
|
||||||
// If the query syntax is wrong, an error is reported.
|
// may be nil. If the query syntax is wrong, an error is reported.
|
||||||
func (x *Index) Lookup(query string) (paks HitList, match *LookupResult, alt *AltWords, err error) {
|
func (x *Index) Lookup(query string) (*SearchResult, error) {
|
||||||
ss := strings.Split(query, ".")
|
ss := strings.Split(query, ".")
|
||||||
|
|
||||||
// check query syntax
|
// check query syntax
|
||||||
for _, s := range ss {
|
for _, s := range ss {
|
||||||
if !isIdentifier(s) {
|
if !isIdentifier(s) {
|
||||||
err = errors.New("all query parts must be identifiers")
|
return nil, errors.New("all query parts must be identifiers")
|
||||||
return
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
rslt := &SearchResult{
|
||||||
|
Query: query,
|
||||||
|
Idents: make(map[SpotKind][]Ident, 5),
|
||||||
|
}
|
||||||
// handle simple and qualified identifiers
|
// handle simple and qualified identifiers
|
||||||
switch len(ss) {
|
switch len(ss) {
|
||||||
case 1:
|
case 1:
|
||||||
ident := ss[0]
|
ident := ss[0]
|
||||||
match, alt = x.lookupWord(ident)
|
rslt.Hit, rslt.Alt = x.lookupWord(ident)
|
||||||
if match != nil {
|
if rslt.Hit != nil {
|
||||||
// found a match - filter packages with same name
|
// found a match - filter packages with same name
|
||||||
// for the list of packages called ident, if any
|
// for the list of packages called ident, if any
|
||||||
paks = match.Others.filter(ident)
|
rslt.Pak = rslt.Hit.Others.filter(ident)
|
||||||
|
}
|
||||||
|
for k, v := range x.idents {
|
||||||
|
rslt.Idents[k] = v[ident]
|
||||||
}
|
}
|
||||||
|
|
||||||
case 2:
|
case 2:
|
||||||
pakname, ident := ss[0], ss[1]
|
pakname, ident := ss[0], ss[1]
|
||||||
match, alt = x.lookupWord(ident)
|
rslt.Hit, rslt.Alt = x.lookupWord(ident)
|
||||||
if match != nil {
|
if rslt.Hit != nil {
|
||||||
// found a match - filter by package name
|
// found a match - filter by package name
|
||||||
// (no paks - package names are not qualified)
|
// (no paks - package names are not qualified)
|
||||||
decls := match.Decls.filter(pakname)
|
decls := rslt.Hit.Decls.filter(pakname)
|
||||||
others := match.Others.filter(pakname)
|
others := rslt.Hit.Others.filter(pakname)
|
||||||
match = &LookupResult{decls, others}
|
rslt.Hit = &LookupResult{decls, others}
|
||||||
|
}
|
||||||
|
for k, v := range x.idents {
|
||||||
|
rslt.Idents[k] = byPackage(v[ident]).filter(pakname)
|
||||||
}
|
}
|
||||||
|
|
||||||
default:
|
default:
|
||||||
err = errors.New("query is not a (qualified) identifier")
|
return nil, errors.New("query is not a (qualified) identifier")
|
||||||
}
|
}
|
||||||
|
|
||||||
return
|
return rslt, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (x *Index) Snippet(i int) *Snippet {
|
func (x *Index) Snippet(i int) *Snippet {
|
||||||
|
|
@ -1213,6 +1383,15 @@ func (c *Corpus) fsDirnames() <-chan string {
|
||||||
return ch
|
return ch
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// CompatibleWith reports whether the Index x is compatible with the corpus
|
||||||
|
// indexing options set in c.
|
||||||
|
func (x *Index) CompatibleWith(c *Corpus) bool {
|
||||||
|
return x.opts.Docs == c.IndexDocs &&
|
||||||
|
x.opts.GoCode == c.IndexGoCode &&
|
||||||
|
x.opts.FullText == c.IndexFullText &&
|
||||||
|
x.opts.MaxResults == c.MaxResults
|
||||||
|
}
|
||||||
|
|
||||||
func (c *Corpus) readIndex(filenames string) error {
|
func (c *Corpus) readIndex(filenames string) error {
|
||||||
matches, err := filepath.Glob(filenames)
|
matches, err := filepath.Glob(filenames)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
@ -1234,6 +1413,9 @@ func (c *Corpus) readIndex(filenames string) error {
|
||||||
if _, err := x.ReadFrom(io.MultiReader(files...)); err != nil {
|
if _, err := x.ReadFrom(io.MultiReader(files...)); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
if !x.CompatibleWith(c) {
|
||||||
|
return fmt.Errorf("index file options are incompatible: %v", x.opts)
|
||||||
|
}
|
||||||
c.searchIndex.Set(x)
|
c.searchIndex.Set(x)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
@ -1249,7 +1431,7 @@ func (c *Corpus) UpdateIndex() {
|
||||||
} else if throttle > 1.0 {
|
} else if throttle > 1.0 {
|
||||||
throttle = 1.0
|
throttle = 1.0
|
||||||
}
|
}
|
||||||
index := NewIndex(c, c.fsDirnames(), c.MaxResults > 0, throttle)
|
index := c.NewIndex()
|
||||||
stop := time.Now()
|
stop := time.Now()
|
||||||
c.searchIndex.Set(index)
|
c.searchIndex.Set(index)
|
||||||
if c.Verbose {
|
if c.Verbose {
|
||||||
|
|
|
||||||
|
|
@ -7,6 +7,7 @@ package godoc
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"reflect"
|
"reflect"
|
||||||
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
|
@ -131,4 +132,70 @@ func testIndex(t *testing.T, ix *Index) {
|
||||||
}; !reflect.DeepEqual(got, want) {
|
}; !reflect.DeepEqual(got, want) {
|
||||||
t.Errorf("Exports = %v; want %v", got, want)
|
t.Errorf("Exports = %v; want %v", got, want)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if got, want := ix.Idents(), map[SpotKind]map[string][]Ident{
|
||||||
|
ConstDecl: map[string][]Ident{
|
||||||
|
"Pi": []Ident{{"/src/pkg/foo", "foo", "Pi", ""}},
|
||||||
|
},
|
||||||
|
VarDecl: map[string][]Ident{
|
||||||
|
"Foos": []Ident{{"/src/pkg/foo", "foo", "Foos", ""}},
|
||||||
|
},
|
||||||
|
TypeDecl: map[string][]Ident{
|
||||||
|
"Foo": []Ident{{"/src/pkg/foo", "foo", "Foo", "Foo is stuff."}},
|
||||||
|
},
|
||||||
|
FuncDecl: map[string][]Ident{
|
||||||
|
"New": []Ident{{"/src/pkg/foo", "foo", "New", ""}},
|
||||||
|
"X": []Ident{{"/src/pkg/other/bar", "bar", "X", ""}},
|
||||||
|
},
|
||||||
|
}; !reflect.DeepEqual(got, want) {
|
||||||
|
t.Errorf("Idents = %v; want %v", got, want)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIdentResultSort(t *testing.T) {
|
||||||
|
for _, tc := range []struct {
|
||||||
|
ir []Ident
|
||||||
|
exp []Ident
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
ir: []Ident{
|
||||||
|
{"/a/b/pkg2", "pkg2", "MyFunc2", ""},
|
||||||
|
{"/b/d/pkg3", "pkg3", "MyFunc3", ""},
|
||||||
|
{"/a/b/pkg1", "pkg1", "MyFunc1", ""},
|
||||||
|
},
|
||||||
|
exp: []Ident{
|
||||||
|
{"/a/b/pkg1", "pkg1", "MyFunc1", ""},
|
||||||
|
{"/a/b/pkg2", "pkg2", "MyFunc2", ""},
|
||||||
|
{"/b/d/pkg3", "pkg3", "MyFunc3", ""},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
} {
|
||||||
|
if sort.Sort(byPackage(tc.ir)); !reflect.DeepEqual(tc.ir, tc.exp) {
|
||||||
|
t.Errorf("got: %v, want %v", tc.ir, tc.exp)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIdentPackageFilter(t *testing.T) {
|
||||||
|
for _, tc := range []struct {
|
||||||
|
ir []Ident
|
||||||
|
pak string
|
||||||
|
exp []Ident
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
ir: []Ident{
|
||||||
|
{"/a/b/pkg2", "pkg2", "MyFunc2", ""},
|
||||||
|
{"/b/d/pkg3", "pkg3", "MyFunc3", ""},
|
||||||
|
{"/a/b/pkg1", "pkg1", "MyFunc1", ""},
|
||||||
|
},
|
||||||
|
pak: "pkg2",
|
||||||
|
exp: []Ident{
|
||||||
|
{"/a/b/pkg2", "pkg2", "MyFunc2", ""},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
} {
|
||||||
|
if res := byPackage(tc.ir).filter(tc.pak); !reflect.DeepEqual(res, tc.exp) {
|
||||||
|
t.Errorf("got: %v, want %v", res, tc.exp)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -25,30 +25,30 @@ type SearchResult struct {
|
||||||
Found int // number of textual occurrences found
|
Found int // number of textual occurrences found
|
||||||
Textual []FileLines // textual matches of Query
|
Textual []FileLines // textual matches of Query
|
||||||
Complete bool // true if all textual occurrences of Query are reported
|
Complete bool // true if all textual occurrences of Query are reported
|
||||||
|
Idents map[SpotKind][]Ident
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Corpus) Lookup(query string) SearchResult {
|
func (c *Corpus) Lookup(query string) SearchResult {
|
||||||
var result SearchResult
|
result := &SearchResult{Query: query}
|
||||||
result.Query = query
|
|
||||||
|
|
||||||
index, timestamp := c.CurrentIndex()
|
index, timestamp := c.CurrentIndex()
|
||||||
if index != nil {
|
if index != nil {
|
||||||
// identifier search
|
// identifier search
|
||||||
var err error
|
var err error
|
||||||
result.Pak, result.Hit, result.Alt, err = index.Lookup(query)
|
result, err = index.Lookup(query)
|
||||||
if err != nil && c.MaxResults <= 0 {
|
if err != nil && !c.IndexFullText {
|
||||||
// ignore the error if full text search is enabled
|
// ignore the error if full text search is enabled
|
||||||
// since the query may be a valid regular expression
|
// since the query may be a valid regular expression
|
||||||
result.Alert = "Error in query string: " + err.Error()
|
result.Alert = "Error in query string: " + err.Error()
|
||||||
return result
|
return *result
|
||||||
}
|
}
|
||||||
|
|
||||||
// full text search
|
// full text search
|
||||||
if c.MaxResults > 0 && query != "" {
|
if c.IndexFullText && query != "" {
|
||||||
rx, err := regexp.Compile(query)
|
rx, err := regexp.Compile(query)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
result.Alert = "Error in query regular expression: " + err.Error()
|
result.Alert = "Error in query regular expression: " + err.Error()
|
||||||
return result
|
return *result
|
||||||
}
|
}
|
||||||
// If we get maxResults+1 results we know that there are more than
|
// If we get maxResults+1 results we know that there are more than
|
||||||
// maxResults results and thus the result may be incomplete (to be
|
// maxResults results and thus the result may be incomplete (to be
|
||||||
|
|
@ -72,7 +72,7 @@ func (c *Corpus) Lookup(query string) SearchResult {
|
||||||
result.Alert = "Search index disabled: no results available"
|
result.Alert = "Search index disabled: no results available"
|
||||||
}
|
}
|
||||||
|
|
||||||
return result
|
return *result
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Presentation) HandleSearch(w http.ResponseWriter, r *http.Request) {
|
func (p *Presentation) HandleSearch(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
@ -84,8 +84,17 @@ func (p *Presentation) HandleSearch(w http.ResponseWriter, r *http.Request) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
haveResults := result.Hit != nil || len(result.Textual) > 0
|
||||||
|
if !haveResults {
|
||||||
|
for _, ir := range result.Idents {
|
||||||
|
if ir != nil {
|
||||||
|
haveResults = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
var title string
|
var title string
|
||||||
if result.Hit != nil || len(result.Textual) > 0 {
|
if haveResults {
|
||||||
title = fmt.Sprintf(`Results for query %q`, query)
|
title = fmt.Sprintf(`Results for query %q`, query)
|
||||||
} else {
|
} else {
|
||||||
title = fmt.Sprintf(`No results found for query %q`, query)
|
title = fmt.Sprintf(`No results found for query %q`, query)
|
||||||
|
|
|
||||||
|
|
@ -34,6 +34,23 @@ const (
|
||||||
nKinds
|
nKinds
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
// These must match the SpotKind values above.
|
||||||
|
name = []string{
|
||||||
|
"Packages",
|
||||||
|
"Imports",
|
||||||
|
"Constants",
|
||||||
|
"Types",
|
||||||
|
"Variables",
|
||||||
|
"Functions",
|
||||||
|
"Methods",
|
||||||
|
"Uses",
|
||||||
|
"Unknown",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
func (x SpotKind) Name() string { return name[x] }
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
// sanity check: if nKinds is too large, the SpotInfo
|
// sanity check: if nKinds is too large, the SpotInfo
|
||||||
// accessor functions may need to be updated
|
// accessor functions may need to be updated
|
||||||
|
|
|
||||||
|
|
@ -28,6 +28,21 @@
|
||||||
</table>
|
</table>
|
||||||
</p>
|
</p>
|
||||||
{{end}}
|
{{end}}
|
||||||
|
{{range $key, $val := .Idents}}
|
||||||
|
{{if $val}}
|
||||||
|
<h2 id="Global">{{$key.Name}}</h2>
|
||||||
|
{{range $val}}
|
||||||
|
{{$pkg_html := pkgLink .Path | html}}
|
||||||
|
{{$doc_html := docLink .Path .Name| html}}
|
||||||
|
<a href="/{{$pkg_html}}">{{html .Package}}</a>.<a href="{{$doc_html}}">{{.Name}}</a>
|
||||||
|
{{if .Doc}}
|
||||||
|
<p>{{comment_html .Doc}}</p>
|
||||||
|
{{else}}
|
||||||
|
<p><em>No documentation available</em></p>
|
||||||
|
{{end}}
|
||||||
|
{{end}}
|
||||||
|
{{end}}
|
||||||
|
{{end}}
|
||||||
{{with .Hit}}
|
{{with .Hit}}
|
||||||
{{with .Decls}}
|
{{with .Decls}}
|
||||||
<h2 id="Global">Package-level declarations</h2>
|
<h2 id="Global">Package-level declarations</h2>
|
||||||
|
|
|
||||||
|
|
@ -22,6 +22,13 @@ QUERY
|
||||||
|
|
||||||
---------------------------------------
|
---------------------------------------
|
||||||
|
|
||||||
|
*/}}{{range $key, $val := .Idents}}{{if $val}}{{$key.Name}}
|
||||||
|
{{range $val.Idents}} {{.Path}}.{{.Name}}
|
||||||
|
{{end}}
|
||||||
|
{{end}}{{end}}{{/* .Idents */}}{{/*
|
||||||
|
|
||||||
|
---------------------------------------
|
||||||
|
|
||||||
*/}}{{with .Hit}}{{with .Decls}}PACKAGE-LEVEL DECLARATIONS
|
*/}}{{with .Hit}}{{with .Decls}}PACKAGE-LEVEL DECLARATIONS
|
||||||
|
|
||||||
{{range .}}package {{.Pak.Name}}
|
{{range .}}package {{.Pak.Name}}
|
||||||
|
|
|
||||||
|
|
@ -1391,6 +1391,21 @@ function PlaygroundOutput(el) {
|
||||||
</table>
|
</table>
|
||||||
</p>
|
</p>
|
||||||
{{end}}
|
{{end}}
|
||||||
|
{{range $key, $val := .Idents}}
|
||||||
|
{{if $val}}
|
||||||
|
<h2 id="Global">{{$key.Name}}</h2>
|
||||||
|
{{range $val}}
|
||||||
|
{{$pkg_html := pkgLink .Path | html}}
|
||||||
|
{{$doc_html := docLink .Path .Name| html}}
|
||||||
|
<a href="/{{$pkg_html}}">{{html .Package}}</a>.<a href="{{$doc_html}}">{{.Name}}</a>
|
||||||
|
{{if .Doc}}
|
||||||
|
<p>{{comment_html .Doc}}</p>
|
||||||
|
{{else}}
|
||||||
|
<p><em>No documentation available</em></p>
|
||||||
|
{{end}}
|
||||||
|
{{end}}
|
||||||
|
{{end}}
|
||||||
|
{{end}}
|
||||||
{{with .Hit}}
|
{{with .Hit}}
|
||||||
{{with .Decls}}
|
{{with .Decls}}
|
||||||
<h2 id="Global">Package-level declarations</h2>
|
<h2 id="Global">Package-level declarations</h2>
|
||||||
|
|
@ -1495,6 +1510,13 @@ function PlaygroundOutput(el) {
|
||||||
|
|
||||||
---------------------------------------
|
---------------------------------------
|
||||||
|
|
||||||
|
*/}}{{range $key, $val := .Idents}}{{if $val}}{{$key.Name}}
|
||||||
|
{{range $val.Idents}} {{.Path}}.{{.Name}}
|
||||||
|
{{end}}
|
||||||
|
{{end}}{{end}}{{/* .Idents */}}{{/*
|
||||||
|
|
||||||
|
---------------------------------------
|
||||||
|
|
||||||
*/}}{{with .Hit}}{{with .Decls}}PACKAGE-LEVEL DECLARATIONS
|
*/}}{{with .Hit}}{{with .Decls}}PACKAGE-LEVEL DECLARATIONS
|
||||||
|
|
||||||
{{range .}}package {{.Pak.Name}}
|
{{range .}}package {{.Pak.Name}}
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue