tools: remove go1.8-tagged files
Change-Id: Ib52b85e1c981b6fca55c472120371a0cd37d2dc9 Reviewed-on: https://go-review.googlesource.com/32816 Reviewed-by: Robert Griesemer <gri@golang.org>
This commit is contained in:
parent
be0bbf2399
commit
701d657347
|
@ -2,8 +2,6 @@
|
||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
// +build !go1.8
|
|
||||||
|
|
||||||
// Stringer is a tool to automate the creation of methods that satisfy the fmt.Stringer
|
// Stringer is a tool to automate the creation of methods that satisfy the fmt.Stringer
|
||||||
// interface. Given the name of a (signed or unsigned) integer type T that has constants
|
// interface. Given the name of a (signed or unsigned) integer type T that has constants
|
||||||
// defined, stringer will create a new self-contained Go source file implementing
|
// defined, stringer will create a new self-contained Go source file implementing
|
||||||
|
|
|
@ -1,638 +0,0 @@
|
||||||
// Copyright 2014 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
// +build go1.8
|
|
||||||
|
|
||||||
// Stringer is a tool to automate the creation of methods that satisfy the fmt.Stringer
|
|
||||||
// interface. Given the name of a (signed or unsigned) integer type T that has constants
|
|
||||||
// defined, stringer will create a new self-contained Go source file implementing
|
|
||||||
// func (t T) String() string
|
|
||||||
// The file is created in the same package and directory as the package that defines T.
|
|
||||||
// It has helpful defaults designed for use with go generate.
|
|
||||||
//
|
|
||||||
// Stringer works best with constants that are consecutive values such as created using iota,
|
|
||||||
// but creates good code regardless. In the future it might also provide custom support for
|
|
||||||
// constant sets that are bit patterns.
|
|
||||||
//
|
|
||||||
// For example, given this snippet,
|
|
||||||
//
|
|
||||||
// package painkiller
|
|
||||||
//
|
|
||||||
// type Pill int
|
|
||||||
//
|
|
||||||
// const (
|
|
||||||
// Placebo Pill = iota
|
|
||||||
// Aspirin
|
|
||||||
// Ibuprofen
|
|
||||||
// Paracetamol
|
|
||||||
// Acetaminophen = Paracetamol
|
|
||||||
// )
|
|
||||||
//
|
|
||||||
// running this command
|
|
||||||
//
|
|
||||||
// stringer -type=Pill
|
|
||||||
//
|
|
||||||
// in the same directory will create the file pill_string.go, in package painkiller,
|
|
||||||
// containing a definition of
|
|
||||||
//
|
|
||||||
// func (Pill) String() string
|
|
||||||
//
|
|
||||||
// That method will translate the value of a Pill constant to the string representation
|
|
||||||
// of the respective constant name, so that the call fmt.Print(painkiller.Aspirin) will
|
|
||||||
// print the string "Aspirin".
|
|
||||||
//
|
|
||||||
// Typically this process would be run using go generate, like this:
|
|
||||||
//
|
|
||||||
// //go:generate stringer -type=Pill
|
|
||||||
//
|
|
||||||
// If multiple constants have the same value, the lexically first matching name will
|
|
||||||
// be used (in the example, Acetaminophen will print as "Paracetamol").
|
|
||||||
//
|
|
||||||
// With no arguments, it processes the package in the current directory.
|
|
||||||
// Otherwise, the arguments must name a single directory holding a Go package
|
|
||||||
// or a set of Go source files that represent a single Go package.
|
|
||||||
//
|
|
||||||
// The -type flag accepts a comma-separated list of types so a single run can
|
|
||||||
// generate methods for multiple types. The default output file is t_string.go,
|
|
||||||
// where t is the lower-cased name of the first type listed. It can be overridden
|
|
||||||
// with the -output flag.
|
|
||||||
//
|
|
||||||
package main // import "golang.org/x/tools/cmd/stringer"
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"flag"
|
|
||||||
"fmt"
|
|
||||||
"go/ast"
|
|
||||||
"go/build"
|
|
||||||
exact "go/constant"
|
|
||||||
"go/format"
|
|
||||||
"go/importer"
|
|
||||||
"go/parser"
|
|
||||||
"go/token"
|
|
||||||
"go/types"
|
|
||||||
"io/ioutil"
|
|
||||||
"log"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"sort"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
typeNames = flag.String("type", "", "comma-separated list of type names; must be set")
|
|
||||||
output = flag.String("output", "", "output file name; default srcdir/<type>_string.go")
|
|
||||||
)
|
|
||||||
|
|
||||||
// Usage is a replacement usage function for the flags package.
|
|
||||||
func Usage() {
|
|
||||||
fmt.Fprintf(os.Stderr, "Usage of %s:\n", os.Args[0])
|
|
||||||
fmt.Fprintf(os.Stderr, "\tstringer [flags] -type T [directory]\n")
|
|
||||||
fmt.Fprintf(os.Stderr, "\tstringer [flags] -type T files... # Must be a single package\n")
|
|
||||||
fmt.Fprintf(os.Stderr, "For more information, see:\n")
|
|
||||||
fmt.Fprintf(os.Stderr, "\thttp://godoc.org/golang.org/x/tools/cmd/stringer\n")
|
|
||||||
fmt.Fprintf(os.Stderr, "Flags:\n")
|
|
||||||
flag.PrintDefaults()
|
|
||||||
}
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
log.SetFlags(0)
|
|
||||||
log.SetPrefix("stringer: ")
|
|
||||||
flag.Usage = Usage
|
|
||||||
flag.Parse()
|
|
||||||
if len(*typeNames) == 0 {
|
|
||||||
flag.Usage()
|
|
||||||
os.Exit(2)
|
|
||||||
}
|
|
||||||
types := strings.Split(*typeNames, ",")
|
|
||||||
|
|
||||||
// We accept either one directory or a list of files. Which do we have?
|
|
||||||
args := flag.Args()
|
|
||||||
if len(args) == 0 {
|
|
||||||
// Default: process whole package in current directory.
|
|
||||||
args = []string{"."}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse the package once.
|
|
||||||
var (
|
|
||||||
dir string
|
|
||||||
g Generator
|
|
||||||
)
|
|
||||||
if len(args) == 1 && isDirectory(args[0]) {
|
|
||||||
dir = args[0]
|
|
||||||
g.parsePackageDir(args[0])
|
|
||||||
} else {
|
|
||||||
dir = filepath.Dir(args[0])
|
|
||||||
g.parsePackageFiles(args)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Print the header and package clause.
|
|
||||||
g.Printf("// Code generated by \"stringer %s\"; DO NOT EDIT\n", strings.Join(os.Args[1:], " "))
|
|
||||||
g.Printf("\n")
|
|
||||||
g.Printf("package %s", g.pkg.name)
|
|
||||||
g.Printf("\n")
|
|
||||||
g.Printf("import \"fmt\"\n") // Used by all methods.
|
|
||||||
|
|
||||||
// Run generate for each type.
|
|
||||||
for _, typeName := range types {
|
|
||||||
g.generate(typeName)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Format the output.
|
|
||||||
src := g.format()
|
|
||||||
|
|
||||||
// Write to file.
|
|
||||||
outputName := *output
|
|
||||||
if outputName == "" {
|
|
||||||
baseName := fmt.Sprintf("%s_string.go", types[0])
|
|
||||||
outputName = filepath.Join(dir, strings.ToLower(baseName))
|
|
||||||
}
|
|
||||||
err := ioutil.WriteFile(outputName, src, 0644)
|
|
||||||
if err != nil {
|
|
||||||
log.Fatalf("writing output: %s", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// isDirectory reports whether the named file is a directory.
|
|
||||||
func isDirectory(name string) bool {
|
|
||||||
info, err := os.Stat(name)
|
|
||||||
if err != nil {
|
|
||||||
log.Fatal(err)
|
|
||||||
}
|
|
||||||
return info.IsDir()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generator holds the state of the analysis. Primarily used to buffer
|
|
||||||
// the output for format.Source.
|
|
||||||
type Generator struct {
|
|
||||||
buf bytes.Buffer // Accumulated output.
|
|
||||||
pkg *Package // Package we are scanning.
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g *Generator) Printf(format string, args ...interface{}) {
|
|
||||||
fmt.Fprintf(&g.buf, format, args...)
|
|
||||||
}
|
|
||||||
|
|
||||||
// File holds a single parsed file and associated data.
|
|
||||||
type File struct {
|
|
||||||
pkg *Package // Package to which this file belongs.
|
|
||||||
file *ast.File // Parsed AST.
|
|
||||||
// These fields are reset for each type being generated.
|
|
||||||
typeName string // Name of the constant type.
|
|
||||||
values []Value // Accumulator for constant values of that type.
|
|
||||||
}
|
|
||||||
|
|
||||||
type Package struct {
|
|
||||||
dir string
|
|
||||||
name string
|
|
||||||
defs map[*ast.Ident]types.Object
|
|
||||||
files []*File
|
|
||||||
typesPkg *types.Package
|
|
||||||
}
|
|
||||||
|
|
||||||
// parsePackageDir parses the package residing in the directory.
|
|
||||||
func (g *Generator) parsePackageDir(directory string) {
|
|
||||||
pkg, err := build.Default.ImportDir(directory, 0)
|
|
||||||
if err != nil {
|
|
||||||
log.Fatalf("cannot process directory %s: %s", directory, err)
|
|
||||||
}
|
|
||||||
var names []string
|
|
||||||
names = append(names, pkg.GoFiles...)
|
|
||||||
names = append(names, pkg.CgoFiles...)
|
|
||||||
// TODO: Need to think about constants in test files. Maybe write type_string_test.go
|
|
||||||
// in a separate pass? For later.
|
|
||||||
// names = append(names, pkg.TestGoFiles...) // These are also in the "foo" package.
|
|
||||||
names = append(names, pkg.SFiles...)
|
|
||||||
names = prefixDirectory(directory, names)
|
|
||||||
g.parsePackage(directory, names, nil)
|
|
||||||
}
|
|
||||||
|
|
||||||
// parsePackageFiles parses the package occupying the named files.
|
|
||||||
func (g *Generator) parsePackageFiles(names []string) {
|
|
||||||
g.parsePackage(".", names, nil)
|
|
||||||
}
|
|
||||||
|
|
||||||
// prefixDirectory places the directory name on the beginning of each name in the list.
|
|
||||||
func prefixDirectory(directory string, names []string) []string {
|
|
||||||
if directory == "." {
|
|
||||||
return names
|
|
||||||
}
|
|
||||||
ret := make([]string, len(names))
|
|
||||||
for i, name := range names {
|
|
||||||
ret[i] = filepath.Join(directory, name)
|
|
||||||
}
|
|
||||||
return ret
|
|
||||||
}
|
|
||||||
|
|
||||||
// parsePackage analyzes the single package constructed from the named files.
|
|
||||||
// If text is non-nil, it is a string to be used instead of the content of the file,
|
|
||||||
// to be used for testing. parsePackage exits if there is an error.
|
|
||||||
func (g *Generator) parsePackage(directory string, names []string, text interface{}) {
|
|
||||||
var files []*File
|
|
||||||
var astFiles []*ast.File
|
|
||||||
g.pkg = new(Package)
|
|
||||||
fs := token.NewFileSet()
|
|
||||||
for _, name := range names {
|
|
||||||
if !strings.HasSuffix(name, ".go") {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
parsedFile, err := parser.ParseFile(fs, name, text, 0)
|
|
||||||
if err != nil {
|
|
||||||
log.Fatalf("parsing package: %s: %s", name, err)
|
|
||||||
}
|
|
||||||
astFiles = append(astFiles, parsedFile)
|
|
||||||
files = append(files, &File{
|
|
||||||
file: parsedFile,
|
|
||||||
pkg: g.pkg,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
if len(astFiles) == 0 {
|
|
||||||
log.Fatalf("%s: no buildable Go files", directory)
|
|
||||||
}
|
|
||||||
g.pkg.name = astFiles[0].Name.Name
|
|
||||||
g.pkg.files = files
|
|
||||||
g.pkg.dir = directory
|
|
||||||
// Type check the package.
|
|
||||||
g.pkg.check(fs, astFiles)
|
|
||||||
}
|
|
||||||
|
|
||||||
// check type-checks the package. The package must be OK to proceed.
|
|
||||||
func (pkg *Package) check(fs *token.FileSet, astFiles []*ast.File) {
|
|
||||||
pkg.defs = make(map[*ast.Ident]types.Object)
|
|
||||||
config := types.Config{Importer: importer.Default(), FakeImportC: true}
|
|
||||||
info := &types.Info{
|
|
||||||
Defs: pkg.defs,
|
|
||||||
}
|
|
||||||
typesPkg, err := config.Check(pkg.dir, fs, astFiles, info)
|
|
||||||
if err != nil {
|
|
||||||
log.Fatalf("checking package: %s", err)
|
|
||||||
}
|
|
||||||
pkg.typesPkg = typesPkg
|
|
||||||
}
|
|
||||||
|
|
||||||
// generate produces the String method for the named type.
|
|
||||||
func (g *Generator) generate(typeName string) {
|
|
||||||
values := make([]Value, 0, 100)
|
|
||||||
for _, file := range g.pkg.files {
|
|
||||||
// Set the state for this run of the walker.
|
|
||||||
file.typeName = typeName
|
|
||||||
file.values = nil
|
|
||||||
if file.file != nil {
|
|
||||||
ast.Inspect(file.file, file.genDecl)
|
|
||||||
values = append(values, file.values...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(values) == 0 {
|
|
||||||
log.Fatalf("no values defined for type %s", typeName)
|
|
||||||
}
|
|
||||||
runs := splitIntoRuns(values)
|
|
||||||
// The decision of which pattern to use depends on the number of
|
|
||||||
// runs in the numbers. If there's only one, it's easy. For more than
|
|
||||||
// one, there's a tradeoff between complexity and size of the data
|
|
||||||
// and code vs. the simplicity of a map. A map takes more space,
|
|
||||||
// but so does the code. The decision here (crossover at 10) is
|
|
||||||
// arbitrary, but considers that for large numbers of runs the cost
|
|
||||||
// of the linear scan in the switch might become important, and
|
|
||||||
// rather than use yet another algorithm such as binary search,
|
|
||||||
// we punt and use a map. In any case, the likelihood of a map
|
|
||||||
// being necessary for any realistic example other than bitmasks
|
|
||||||
// is very low. And bitmasks probably deserve their own analysis,
|
|
||||||
// to be done some other day.
|
|
||||||
switch {
|
|
||||||
case len(runs) == 1:
|
|
||||||
g.buildOneRun(runs, typeName)
|
|
||||||
case len(runs) <= 10:
|
|
||||||
g.buildMultipleRuns(runs, typeName)
|
|
||||||
default:
|
|
||||||
g.buildMap(runs, typeName)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// splitIntoRuns breaks the values into runs of contiguous sequences.
|
|
||||||
// For example, given 1,2,3,5,6,7 it returns {1,2,3},{5,6,7}.
|
|
||||||
// The input slice is known to be non-empty.
|
|
||||||
func splitIntoRuns(values []Value) [][]Value {
|
|
||||||
// We use stable sort so the lexically first name is chosen for equal elements.
|
|
||||||
sort.Stable(byValue(values))
|
|
||||||
// Remove duplicates. Stable sort has put the one we want to print first,
|
|
||||||
// so use that one. The String method won't care about which named constant
|
|
||||||
// was the argument, so the first name for the given value is the only one to keep.
|
|
||||||
// We need to do this because identical values would cause the switch or map
|
|
||||||
// to fail to compile.
|
|
||||||
j := 1
|
|
||||||
for i := 1; i < len(values); i++ {
|
|
||||||
if values[i].value != values[i-1].value {
|
|
||||||
values[j] = values[i]
|
|
||||||
j++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
values = values[:j]
|
|
||||||
runs := make([][]Value, 0, 10)
|
|
||||||
for len(values) > 0 {
|
|
||||||
// One contiguous sequence per outer loop.
|
|
||||||
i := 1
|
|
||||||
for i < len(values) && values[i].value == values[i-1].value+1 {
|
|
||||||
i++
|
|
||||||
}
|
|
||||||
runs = append(runs, values[:i])
|
|
||||||
values = values[i:]
|
|
||||||
}
|
|
||||||
return runs
|
|
||||||
}
|
|
||||||
|
|
||||||
// format returns the gofmt-ed contents of the Generator's buffer.
|
|
||||||
func (g *Generator) format() []byte {
|
|
||||||
src, err := format.Source(g.buf.Bytes())
|
|
||||||
if err != nil {
|
|
||||||
// Should never happen, but can arise when developing this code.
|
|
||||||
// The user can compile the output to see the error.
|
|
||||||
log.Printf("warning: internal error: invalid Go generated: %s", err)
|
|
||||||
log.Printf("warning: compile the package to analyze the error")
|
|
||||||
return g.buf.Bytes()
|
|
||||||
}
|
|
||||||
return src
|
|
||||||
}
|
|
||||||
|
|
||||||
// Value represents a declared constant.
|
|
||||||
type Value struct {
|
|
||||||
name string // The name of the constant.
|
|
||||||
// The value is stored as a bit pattern alone. The boolean tells us
|
|
||||||
// whether to interpret it as an int64 or a uint64; the only place
|
|
||||||
// this matters is when sorting.
|
|
||||||
// Much of the time the str field is all we need; it is printed
|
|
||||||
// by Value.String.
|
|
||||||
value uint64 // Will be converted to int64 when needed.
|
|
||||||
signed bool // Whether the constant is a signed type.
|
|
||||||
str string // The string representation given by the "go/exact" package.
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v *Value) String() string {
|
|
||||||
return v.str
|
|
||||||
}
|
|
||||||
|
|
||||||
// byValue lets us sort the constants into increasing order.
|
|
||||||
// We take care in the Less method to sort in signed or unsigned order,
|
|
||||||
// as appropriate.
|
|
||||||
type byValue []Value
|
|
||||||
|
|
||||||
func (b byValue) Len() int { return len(b) }
|
|
||||||
func (b byValue) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
|
|
||||||
func (b byValue) Less(i, j int) bool {
|
|
||||||
if b[i].signed {
|
|
||||||
return int64(b[i].value) < int64(b[j].value)
|
|
||||||
}
|
|
||||||
return b[i].value < b[j].value
|
|
||||||
}
|
|
||||||
|
|
||||||
// genDecl processes one declaration clause.
|
|
||||||
func (f *File) genDecl(node ast.Node) bool {
|
|
||||||
decl, ok := node.(*ast.GenDecl)
|
|
||||||
if !ok || decl.Tok != token.CONST {
|
|
||||||
// We only care about const declarations.
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
// The name of the type of the constants we are declaring.
|
|
||||||
// Can change if this is a multi-element declaration.
|
|
||||||
typ := ""
|
|
||||||
// Loop over the elements of the declaration. Each element is a ValueSpec:
|
|
||||||
// a list of names possibly followed by a type, possibly followed by values.
|
|
||||||
// If the type and value are both missing, we carry down the type (and value,
|
|
||||||
// but the "go/types" package takes care of that).
|
|
||||||
for _, spec := range decl.Specs {
|
|
||||||
vspec := spec.(*ast.ValueSpec) // Guaranteed to succeed as this is CONST.
|
|
||||||
if vspec.Type == nil && len(vspec.Values) > 0 {
|
|
||||||
// "X = 1". With no type but a value, the constant is untyped.
|
|
||||||
// Skip this vspec and reset the remembered type.
|
|
||||||
typ = ""
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if vspec.Type != nil {
|
|
||||||
// "X T". We have a type. Remember it.
|
|
||||||
ident, ok := vspec.Type.(*ast.Ident)
|
|
||||||
if !ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
typ = ident.Name
|
|
||||||
}
|
|
||||||
if typ != f.typeName {
|
|
||||||
// This is not the type we're looking for.
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
// We now have a list of names (from one line of source code) all being
|
|
||||||
// declared with the desired type.
|
|
||||||
// Grab their names and actual values and store them in f.values.
|
|
||||||
for _, name := range vspec.Names {
|
|
||||||
if name.Name == "_" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
// This dance lets the type checker find the values for us. It's a
|
|
||||||
// bit tricky: look up the object declared by the name, find its
|
|
||||||
// types.Const, and extract its value.
|
|
||||||
obj, ok := f.pkg.defs[name]
|
|
||||||
if !ok {
|
|
||||||
log.Fatalf("no value for constant %s", name)
|
|
||||||
}
|
|
||||||
info := obj.Type().Underlying().(*types.Basic).Info()
|
|
||||||
if info&types.IsInteger == 0 {
|
|
||||||
log.Fatalf("can't handle non-integer constant type %s", typ)
|
|
||||||
}
|
|
||||||
value := obj.(*types.Const).Val() // Guaranteed to succeed as this is CONST.
|
|
||||||
if value.Kind() != exact.Int {
|
|
||||||
log.Fatalf("can't happen: constant is not an integer %s", name)
|
|
||||||
}
|
|
||||||
i64, isInt := exact.Int64Val(value)
|
|
||||||
u64, isUint := exact.Uint64Val(value)
|
|
||||||
if !isInt && !isUint {
|
|
||||||
log.Fatalf("internal error: value of %s is not an integer: %s", name, value.String())
|
|
||||||
}
|
|
||||||
if !isInt {
|
|
||||||
u64 = uint64(i64)
|
|
||||||
}
|
|
||||||
v := Value{
|
|
||||||
name: name.Name,
|
|
||||||
value: u64,
|
|
||||||
signed: info&types.IsUnsigned == 0,
|
|
||||||
str: value.String(),
|
|
||||||
}
|
|
||||||
f.values = append(f.values, v)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helpers
|
|
||||||
|
|
||||||
// usize returns the number of bits of the smallest unsigned integer
|
|
||||||
// type that will hold n. Used to create the smallest possible slice of
|
|
||||||
// integers to use as indexes into the concatenated strings.
|
|
||||||
func usize(n int) int {
|
|
||||||
switch {
|
|
||||||
case n < 1<<8:
|
|
||||||
return 8
|
|
||||||
case n < 1<<16:
|
|
||||||
return 16
|
|
||||||
default:
|
|
||||||
// 2^32 is enough constants for anyone.
|
|
||||||
return 32
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// declareIndexAndNameVars declares the index slices and concatenated names
|
|
||||||
// strings representing the runs of values.
|
|
||||||
func (g *Generator) declareIndexAndNameVars(runs [][]Value, typeName string) {
|
|
||||||
var indexes, names []string
|
|
||||||
for i, run := range runs {
|
|
||||||
index, name := g.createIndexAndNameDecl(run, typeName, fmt.Sprintf("_%d", i))
|
|
||||||
indexes = append(indexes, index)
|
|
||||||
names = append(names, name)
|
|
||||||
}
|
|
||||||
g.Printf("const (\n")
|
|
||||||
for _, name := range names {
|
|
||||||
g.Printf("\t%s\n", name)
|
|
||||||
}
|
|
||||||
g.Printf(")\n\n")
|
|
||||||
g.Printf("var (")
|
|
||||||
for _, index := range indexes {
|
|
||||||
g.Printf("\t%s\n", index)
|
|
||||||
}
|
|
||||||
g.Printf(")\n\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
// declareIndexAndNameVar is the single-run version of declareIndexAndNameVars
|
|
||||||
func (g *Generator) declareIndexAndNameVar(run []Value, typeName string) {
|
|
||||||
index, name := g.createIndexAndNameDecl(run, typeName, "")
|
|
||||||
g.Printf("const %s\n", name)
|
|
||||||
g.Printf("var %s\n", index)
|
|
||||||
}
|
|
||||||
|
|
||||||
// createIndexAndNameDecl returns the pair of declarations for the run. The caller will add "const" and "var".
|
|
||||||
func (g *Generator) createIndexAndNameDecl(run []Value, typeName string, suffix string) (string, string) {
|
|
||||||
b := new(bytes.Buffer)
|
|
||||||
indexes := make([]int, len(run))
|
|
||||||
for i := range run {
|
|
||||||
b.WriteString(run[i].name)
|
|
||||||
indexes[i] = b.Len()
|
|
||||||
}
|
|
||||||
nameConst := fmt.Sprintf("_%s_name%s = %q", typeName, suffix, b.String())
|
|
||||||
nameLen := b.Len()
|
|
||||||
b.Reset()
|
|
||||||
fmt.Fprintf(b, "_%s_index%s = [...]uint%d{0, ", typeName, suffix, usize(nameLen))
|
|
||||||
for i, v := range indexes {
|
|
||||||
if i > 0 {
|
|
||||||
fmt.Fprintf(b, ", ")
|
|
||||||
}
|
|
||||||
fmt.Fprintf(b, "%d", v)
|
|
||||||
}
|
|
||||||
fmt.Fprintf(b, "}")
|
|
||||||
return b.String(), nameConst
|
|
||||||
}
|
|
||||||
|
|
||||||
// declareNameVars declares the concatenated names string representing all the values in the runs.
|
|
||||||
func (g *Generator) declareNameVars(runs [][]Value, typeName string, suffix string) {
|
|
||||||
g.Printf("const _%s_name%s = \"", typeName, suffix)
|
|
||||||
for _, run := range runs {
|
|
||||||
for i := range run {
|
|
||||||
g.Printf("%s", run[i].name)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
g.Printf("\"\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
// buildOneRun generates the variables and String method for a single run of contiguous values.
|
|
||||||
func (g *Generator) buildOneRun(runs [][]Value, typeName string) {
|
|
||||||
values := runs[0]
|
|
||||||
g.Printf("\n")
|
|
||||||
g.declareIndexAndNameVar(values, typeName)
|
|
||||||
// The generated code is simple enough to write as a Printf format.
|
|
||||||
lessThanZero := ""
|
|
||||||
if values[0].signed {
|
|
||||||
lessThanZero = "i < 0 || "
|
|
||||||
}
|
|
||||||
if values[0].value == 0 { // Signed or unsigned, 0 is still 0.
|
|
||||||
g.Printf(stringOneRun, typeName, usize(len(values)), lessThanZero)
|
|
||||||
} else {
|
|
||||||
g.Printf(stringOneRunWithOffset, typeName, values[0].String(), usize(len(values)), lessThanZero)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Arguments to format are:
|
|
||||||
// [1]: type name
|
|
||||||
// [2]: size of index element (8 for uint8 etc.)
|
|
||||||
// [3]: less than zero check (for signed types)
|
|
||||||
const stringOneRun = `func (i %[1]s) String() string {
|
|
||||||
if %[3]si >= %[1]s(len(_%[1]s_index)-1) {
|
|
||||||
return fmt.Sprintf("%[1]s(%%d)", i)
|
|
||||||
}
|
|
||||||
return _%[1]s_name[_%[1]s_index[i]:_%[1]s_index[i+1]]
|
|
||||||
}
|
|
||||||
`
|
|
||||||
|
|
||||||
// Arguments to format are:
|
|
||||||
// [1]: type name
|
|
||||||
// [2]: lowest defined value for type, as a string
|
|
||||||
// [3]: size of index element (8 for uint8 etc.)
|
|
||||||
// [4]: less than zero check (for signed types)
|
|
||||||
/*
|
|
||||||
*/
|
|
||||||
const stringOneRunWithOffset = `func (i %[1]s) String() string {
|
|
||||||
i -= %[2]s
|
|
||||||
if %[4]si >= %[1]s(len(_%[1]s_index)-1) {
|
|
||||||
return fmt.Sprintf("%[1]s(%%d)", i + %[2]s)
|
|
||||||
}
|
|
||||||
return _%[1]s_name[_%[1]s_index[i] : _%[1]s_index[i+1]]
|
|
||||||
}
|
|
||||||
`
|
|
||||||
|
|
||||||
// buildMultipleRuns generates the variables and String method for multiple runs of contiguous values.
|
|
||||||
// For this pattern, a single Printf format won't do.
|
|
||||||
func (g *Generator) buildMultipleRuns(runs [][]Value, typeName string) {
|
|
||||||
g.Printf("\n")
|
|
||||||
g.declareIndexAndNameVars(runs, typeName)
|
|
||||||
g.Printf("func (i %s) String() string {\n", typeName)
|
|
||||||
g.Printf("\tswitch {\n")
|
|
||||||
for i, values := range runs {
|
|
||||||
if len(values) == 1 {
|
|
||||||
g.Printf("\tcase i == %s:\n", &values[0])
|
|
||||||
g.Printf("\t\treturn _%s_name_%d\n", typeName, i)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
g.Printf("\tcase %s <= i && i <= %s:\n", &values[0], &values[len(values)-1])
|
|
||||||
if values[0].value != 0 {
|
|
||||||
g.Printf("\t\ti -= %s\n", &values[0])
|
|
||||||
}
|
|
||||||
g.Printf("\t\treturn _%s_name_%d[_%s_index_%d[i]:_%s_index_%d[i+1]]\n",
|
|
||||||
typeName, i, typeName, i, typeName, i)
|
|
||||||
}
|
|
||||||
g.Printf("\tdefault:\n")
|
|
||||||
g.Printf("\t\treturn fmt.Sprintf(\"%s(%%d)\", i)\n", typeName)
|
|
||||||
g.Printf("\t}\n")
|
|
||||||
g.Printf("}\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
// buildMap handles the case where the space is so sparse a map is a reasonable fallback.
|
|
||||||
// It's a rare situation but has simple code.
|
|
||||||
func (g *Generator) buildMap(runs [][]Value, typeName string) {
|
|
||||||
g.Printf("\n")
|
|
||||||
g.declareNameVars(runs, typeName, "")
|
|
||||||
g.Printf("\nvar _%s_map = map[%s]string{\n", typeName, typeName)
|
|
||||||
n := 0
|
|
||||||
for _, values := range runs {
|
|
||||||
for _, value := range values {
|
|
||||||
g.Printf("\t%s: _%s_name[%d:%d],\n", &value, typeName, n, n+len(value.name))
|
|
||||||
n += len(value.name)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
g.Printf("}\n\n")
|
|
||||||
g.Printf(stringMap, typeName)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Argument to format is the type name.
|
|
||||||
const stringMap = `func (i %[1]s) String() string {
|
|
||||||
if str, ok := _%[1]s_map[i]; ok {
|
|
||||||
return str
|
|
||||||
}
|
|
||||||
return fmt.Sprintf("%[1]s(%%d)", i)
|
|
||||||
}
|
|
||||||
`
|
|
|
@ -2,8 +2,6 @@
|
||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
// +build !go1.8
|
|
||||||
|
|
||||||
package astutil
|
package astutil
|
||||||
|
|
||||||
// This file defines utilities for working with source positions.
|
// This file defines utilities for working with source positions.
|
||||||
|
|
|
@ -1,629 +0,0 @@
|
||||||
// Copyright 2013 The Go Authors. All rights reserved.
|
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
// +build go1.8
|
|
||||||
|
|
||||||
package astutil
|
|
||||||
|
|
||||||
// This file defines utilities for working with source positions.
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"go/ast"
|
|
||||||
"go/token"
|
|
||||||
"sort"
|
|
||||||
)
|
|
||||||
|
|
||||||
// PathEnclosingInterval returns the node that encloses the source
|
|
||||||
// interval [start, end), and all its ancestors up to the AST root.
|
|
||||||
//
|
|
||||||
// The definition of "enclosing" used by this function considers
|
|
||||||
// additional whitespace abutting a node to be enclosed by it.
|
|
||||||
// In this example:
|
|
||||||
//
|
|
||||||
// z := x + y // add them
|
|
||||||
// <-A->
|
|
||||||
// <----B----->
|
|
||||||
//
|
|
||||||
// the ast.BinaryExpr(+) node is considered to enclose interval B
|
|
||||||
// even though its [Pos()..End()) is actually only interval A.
|
|
||||||
// This behaviour makes user interfaces more tolerant of imperfect
|
|
||||||
// input.
|
|
||||||
//
|
|
||||||
// This function treats tokens as nodes, though they are not included
|
|
||||||
// in the result. e.g. PathEnclosingInterval("+") returns the
|
|
||||||
// enclosing ast.BinaryExpr("x + y").
|
|
||||||
//
|
|
||||||
// If start==end, the 1-char interval following start is used instead.
|
|
||||||
//
|
|
||||||
// The 'exact' result is true if the interval contains only path[0]
|
|
||||||
// and perhaps some adjacent whitespace. It is false if the interval
|
|
||||||
// overlaps multiple children of path[0], or if it contains only
|
|
||||||
// interior whitespace of path[0].
|
|
||||||
// In this example:
|
|
||||||
//
|
|
||||||
// z := x + y // add them
|
|
||||||
// <--C--> <---E-->
|
|
||||||
// ^
|
|
||||||
// D
|
|
||||||
//
|
|
||||||
// intervals C, D and E are inexact. C is contained by the
|
|
||||||
// z-assignment statement, because it spans three of its children (:=,
|
|
||||||
// x, +). So too is the 1-char interval D, because it contains only
|
|
||||||
// interior whitespace of the assignment. E is considered interior
|
|
||||||
// whitespace of the BlockStmt containing the assignment.
|
|
||||||
//
|
|
||||||
// Precondition: [start, end) both lie within the same file as root.
|
|
||||||
// TODO(adonovan): return (nil, false) in this case and remove precond.
|
|
||||||
// Requires FileSet; see loader.tokenFileContainsPos.
|
|
||||||
//
|
|
||||||
// Postcondition: path is never nil; it always contains at least 'root'.
|
|
||||||
//
|
|
||||||
func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Node, exact bool) {
|
|
||||||
// fmt.Printf("EnclosingInterval %d %d\n", start, end) // debugging
|
|
||||||
|
|
||||||
// Precondition: node.[Pos..End) and adjoining whitespace contain [start, end).
|
|
||||||
var visit func(node ast.Node) bool
|
|
||||||
visit = func(node ast.Node) bool {
|
|
||||||
path = append(path, node)
|
|
||||||
|
|
||||||
nodePos := node.Pos()
|
|
||||||
nodeEnd := node.End()
|
|
||||||
|
|
||||||
// fmt.Printf("visit(%T, %d, %d)\n", node, nodePos, nodeEnd) // debugging
|
|
||||||
|
|
||||||
// Intersect [start, end) with interval of node.
|
|
||||||
if start < nodePos {
|
|
||||||
start = nodePos
|
|
||||||
}
|
|
||||||
if end > nodeEnd {
|
|
||||||
end = nodeEnd
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find sole child that contains [start, end).
|
|
||||||
children := childrenOf(node)
|
|
||||||
l := len(children)
|
|
||||||
for i, child := range children {
|
|
||||||
// [childPos, childEnd) is unaugmented interval of child.
|
|
||||||
childPos := child.Pos()
|
|
||||||
childEnd := child.End()
|
|
||||||
|
|
||||||
// [augPos, augEnd) is whitespace-augmented interval of child.
|
|
||||||
augPos := childPos
|
|
||||||
augEnd := childEnd
|
|
||||||
if i > 0 {
|
|
||||||
augPos = children[i-1].End() // start of preceding whitespace
|
|
||||||
}
|
|
||||||
if i < l-1 {
|
|
||||||
nextChildPos := children[i+1].Pos()
|
|
||||||
// Does [start, end) lie between child and next child?
|
|
||||||
if start >= augEnd && end <= nextChildPos {
|
|
||||||
return false // inexact match
|
|
||||||
}
|
|
||||||
augEnd = nextChildPos // end of following whitespace
|
|
||||||
}
|
|
||||||
|
|
||||||
// fmt.Printf("\tchild %d: [%d..%d)\tcontains interval [%d..%d)?\n",
|
|
||||||
// i, augPos, augEnd, start, end) // debugging
|
|
||||||
|
|
||||||
// Does augmented child strictly contain [start, end)?
|
|
||||||
if augPos <= start && end <= augEnd {
|
|
||||||
_, isToken := child.(tokenNode)
|
|
||||||
return isToken || visit(child)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Does [start, end) overlap multiple children?
|
|
||||||
// i.e. left-augmented child contains start
|
|
||||||
// but LR-augmented child does not contain end.
|
|
||||||
if start < childEnd && end > augEnd {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// No single child contained [start, end),
|
|
||||||
// so node is the result. Is it exact?
|
|
||||||
|
|
||||||
// (It's tempting to put this condition before the
|
|
||||||
// child loop, but it gives the wrong result in the
|
|
||||||
// case where a node (e.g. ExprStmt) and its sole
|
|
||||||
// child have equal intervals.)
|
|
||||||
if start == nodePos && end == nodeEnd {
|
|
||||||
return true // exact match
|
|
||||||
}
|
|
||||||
|
|
||||||
return false // inexact: overlaps multiple children
|
|
||||||
}
|
|
||||||
|
|
||||||
if start > end {
|
|
||||||
start, end = end, start
|
|
||||||
}
|
|
||||||
|
|
||||||
if start < root.End() && end > root.Pos() {
|
|
||||||
if start == end {
|
|
||||||
end = start + 1 // empty interval => interval of size 1
|
|
||||||
}
|
|
||||||
exact = visit(root)
|
|
||||||
|
|
||||||
// Reverse the path:
|
|
||||||
for i, l := 0, len(path); i < l/2; i++ {
|
|
||||||
path[i], path[l-1-i] = path[l-1-i], path[i]
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Selection lies within whitespace preceding the
|
|
||||||
// first (or following the last) declaration in the file.
|
|
||||||
// The result nonetheless always includes the ast.File.
|
|
||||||
path = append(path, root)
|
|
||||||
}
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// tokenNode is a dummy implementation of ast.Node for a single token.
|
|
||||||
// They are used transiently by PathEnclosingInterval but never escape
|
|
||||||
// this package.
|
|
||||||
//
|
|
||||||
type tokenNode struct {
|
|
||||||
pos token.Pos
|
|
||||||
end token.Pos
|
|
||||||
}
|
|
||||||
|
|
||||||
func (n tokenNode) Pos() token.Pos {
|
|
||||||
return n.pos
|
|
||||||
}
|
|
||||||
|
|
||||||
func (n tokenNode) End() token.Pos {
|
|
||||||
return n.end
|
|
||||||
}
|
|
||||||
|
|
||||||
func tok(pos token.Pos, len int) ast.Node {
|
|
||||||
return tokenNode{pos, pos + token.Pos(len)}
|
|
||||||
}
|
|
||||||
|
|
||||||
// childrenOf returns the direct non-nil children of ast.Node n.
|
|
||||||
// It may include fake ast.Node implementations for bare tokens.
|
|
||||||
// it is not safe to call (e.g.) ast.Walk on such nodes.
|
|
||||||
//
|
|
||||||
func childrenOf(n ast.Node) []ast.Node {
|
|
||||||
var children []ast.Node
|
|
||||||
|
|
||||||
// First add nodes for all true subtrees.
|
|
||||||
ast.Inspect(n, func(node ast.Node) bool {
|
|
||||||
if node == n { // push n
|
|
||||||
return true // recur
|
|
||||||
}
|
|
||||||
if node != nil { // push child
|
|
||||||
children = append(children, node)
|
|
||||||
}
|
|
||||||
return false // no recursion
|
|
||||||
})
|
|
||||||
|
|
||||||
// Then add fake Nodes for bare tokens.
|
|
||||||
switch n := n.(type) {
|
|
||||||
case *ast.ArrayType:
|
|
||||||
children = append(children,
|
|
||||||
tok(n.Lbrack, len("[")),
|
|
||||||
tok(n.Elt.End(), len("]")))
|
|
||||||
|
|
||||||
case *ast.AssignStmt:
|
|
||||||
children = append(children,
|
|
||||||
tok(n.TokPos, len(n.Tok.String())))
|
|
||||||
|
|
||||||
case *ast.BasicLit:
|
|
||||||
children = append(children,
|
|
||||||
tok(n.ValuePos, len(n.Value)))
|
|
||||||
|
|
||||||
case *ast.BinaryExpr:
|
|
||||||
children = append(children, tok(n.OpPos, len(n.Op.String())))
|
|
||||||
|
|
||||||
case *ast.BlockStmt:
|
|
||||||
children = append(children,
|
|
||||||
tok(n.Lbrace, len("{")),
|
|
||||||
tok(n.Rbrace, len("}")))
|
|
||||||
|
|
||||||
case *ast.BranchStmt:
|
|
||||||
children = append(children,
|
|
||||||
tok(n.TokPos, len(n.Tok.String())))
|
|
||||||
|
|
||||||
case *ast.CallExpr:
|
|
||||||
children = append(children,
|
|
||||||
tok(n.Lparen, len("(")),
|
|
||||||
tok(n.Rparen, len(")")))
|
|
||||||
if n.Ellipsis != 0 {
|
|
||||||
children = append(children, tok(n.Ellipsis, len("...")))
|
|
||||||
}
|
|
||||||
|
|
||||||
case *ast.CaseClause:
|
|
||||||
if n.List == nil {
|
|
||||||
children = append(children,
|
|
||||||
tok(n.Case, len("default")))
|
|
||||||
} else {
|
|
||||||
children = append(children,
|
|
||||||
tok(n.Case, len("case")))
|
|
||||||
}
|
|
||||||
children = append(children, tok(n.Colon, len(":")))
|
|
||||||
|
|
||||||
case *ast.ChanType:
|
|
||||||
switch n.Dir {
|
|
||||||
case ast.RECV:
|
|
||||||
children = append(children, tok(n.Begin, len("<-chan")))
|
|
||||||
case ast.SEND:
|
|
||||||
children = append(children, tok(n.Begin, len("chan<-")))
|
|
||||||
case ast.RECV | ast.SEND:
|
|
||||||
children = append(children, tok(n.Begin, len("chan")))
|
|
||||||
}
|
|
||||||
|
|
||||||
case *ast.CommClause:
|
|
||||||
if n.Comm == nil {
|
|
||||||
children = append(children,
|
|
||||||
tok(n.Case, len("default")))
|
|
||||||
} else {
|
|
||||||
children = append(children,
|
|
||||||
tok(n.Case, len("case")))
|
|
||||||
}
|
|
||||||
children = append(children, tok(n.Colon, len(":")))
|
|
||||||
|
|
||||||
case *ast.Comment:
|
|
||||||
// nop
|
|
||||||
|
|
||||||
case *ast.CommentGroup:
|
|
||||||
// nop
|
|
||||||
|
|
||||||
case *ast.CompositeLit:
|
|
||||||
children = append(children,
|
|
||||||
tok(n.Lbrace, len("{")),
|
|
||||||
tok(n.Rbrace, len("{")))
|
|
||||||
|
|
||||||
case *ast.DeclStmt:
|
|
||||||
// nop
|
|
||||||
|
|
||||||
case *ast.DeferStmt:
|
|
||||||
children = append(children,
|
|
||||||
tok(n.Defer, len("defer")))
|
|
||||||
|
|
||||||
case *ast.Ellipsis:
|
|
||||||
children = append(children,
|
|
||||||
tok(n.Ellipsis, len("...")))
|
|
||||||
|
|
||||||
case *ast.EmptyStmt:
|
|
||||||
// nop
|
|
||||||
|
|
||||||
case *ast.ExprStmt:
|
|
||||||
// nop
|
|
||||||
|
|
||||||
case *ast.Field:
|
|
||||||
// TODO(adonovan): Field.{Doc,Comment,Tag}?
|
|
||||||
|
|
||||||
case *ast.FieldList:
|
|
||||||
children = append(children,
|
|
||||||
tok(n.Opening, len("(")),
|
|
||||||
tok(n.Closing, len(")")))
|
|
||||||
|
|
||||||
case *ast.File:
|
|
||||||
// TODO test: Doc
|
|
||||||
children = append(children,
|
|
||||||
tok(n.Package, len("package")))
|
|
||||||
|
|
||||||
case *ast.ForStmt:
|
|
||||||
children = append(children,
|
|
||||||
tok(n.For, len("for")))
|
|
||||||
|
|
||||||
case *ast.FuncDecl:
|
|
||||||
// TODO(adonovan): FuncDecl.Comment?
|
|
||||||
|
|
||||||
// Uniquely, FuncDecl breaks the invariant that
|
|
||||||
// preorder traversal yields tokens in lexical order:
|
|
||||||
// in fact, FuncDecl.Recv precedes FuncDecl.Type.Func.
|
|
||||||
//
|
|
||||||
// As a workaround, we inline the case for FuncType
|
|
||||||
// here and order things correctly.
|
|
||||||
//
|
|
||||||
children = nil // discard ast.Walk(FuncDecl) info subtrees
|
|
||||||
children = append(children, tok(n.Type.Func, len("func")))
|
|
||||||
if n.Recv != nil {
|
|
||||||
children = append(children, n.Recv)
|
|
||||||
}
|
|
||||||
children = append(children, n.Name)
|
|
||||||
if n.Type.Params != nil {
|
|
||||||
children = append(children, n.Type.Params)
|
|
||||||
}
|
|
||||||
if n.Type.Results != nil {
|
|
||||||
children = append(children, n.Type.Results)
|
|
||||||
}
|
|
||||||
if n.Body != nil {
|
|
||||||
children = append(children, n.Body)
|
|
||||||
}
|
|
||||||
|
|
||||||
case *ast.FuncLit:
|
|
||||||
// nop
|
|
||||||
|
|
||||||
case *ast.FuncType:
|
|
||||||
if n.Func != 0 {
|
|
||||||
children = append(children,
|
|
||||||
tok(n.Func, len("func")))
|
|
||||||
}
|
|
||||||
|
|
||||||
case *ast.GenDecl:
|
|
||||||
children = append(children,
|
|
||||||
tok(n.TokPos, len(n.Tok.String())))
|
|
||||||
if n.Lparen != 0 {
|
|
||||||
children = append(children,
|
|
||||||
tok(n.Lparen, len("(")),
|
|
||||||
tok(n.Rparen, len(")")))
|
|
||||||
}
|
|
||||||
|
|
||||||
case *ast.GoStmt:
|
|
||||||
children = append(children,
|
|
||||||
tok(n.Go, len("go")))
|
|
||||||
|
|
||||||
case *ast.Ident:
|
|
||||||
children = append(children,
|
|
||||||
tok(n.NamePos, len(n.Name)))
|
|
||||||
|
|
||||||
case *ast.IfStmt:
|
|
||||||
children = append(children,
|
|
||||||
tok(n.If, len("if")))
|
|
||||||
|
|
||||||
case *ast.ImportSpec:
|
|
||||||
// TODO(adonovan): ImportSpec.{Doc,EndPos}?
|
|
||||||
|
|
||||||
case *ast.IncDecStmt:
|
|
||||||
children = append(children,
|
|
||||||
tok(n.TokPos, len(n.Tok.String())))
|
|
||||||
|
|
||||||
case *ast.IndexExpr:
|
|
||||||
children = append(children,
|
|
||||||
tok(n.Lbrack, len("{")),
|
|
||||||
tok(n.Rbrack, len("}")))
|
|
||||||
|
|
||||||
case *ast.InterfaceType:
|
|
||||||
children = append(children,
|
|
||||||
tok(n.Interface, len("interface")))
|
|
||||||
|
|
||||||
case *ast.KeyValueExpr:
|
|
||||||
children = append(children,
|
|
||||||
tok(n.Colon, len(":")))
|
|
||||||
|
|
||||||
case *ast.LabeledStmt:
|
|
||||||
children = append(children,
|
|
||||||
tok(n.Colon, len(":")))
|
|
||||||
|
|
||||||
case *ast.MapType:
|
|
||||||
children = append(children,
|
|
||||||
tok(n.Map, len("map")))
|
|
||||||
|
|
||||||
case *ast.ParenExpr:
|
|
||||||
children = append(children,
|
|
||||||
tok(n.Lparen, len("(")),
|
|
||||||
tok(n.Rparen, len(")")))
|
|
||||||
|
|
||||||
case *ast.RangeStmt:
|
|
||||||
children = append(children,
|
|
||||||
tok(n.For, len("for")),
|
|
||||||
tok(n.TokPos, len(n.Tok.String())))
|
|
||||||
|
|
||||||
case *ast.ReturnStmt:
|
|
||||||
children = append(children,
|
|
||||||
tok(n.Return, len("return")))
|
|
||||||
|
|
||||||
case *ast.SelectStmt:
|
|
||||||
children = append(children,
|
|
||||||
tok(n.Select, len("select")))
|
|
||||||
|
|
||||||
case *ast.SelectorExpr:
|
|
||||||
// nop
|
|
||||||
|
|
||||||
case *ast.SendStmt:
|
|
||||||
children = append(children,
|
|
||||||
tok(n.Arrow, len("<-")))
|
|
||||||
|
|
||||||
case *ast.SliceExpr:
|
|
||||||
children = append(children,
|
|
||||||
tok(n.Lbrack, len("[")),
|
|
||||||
tok(n.Rbrack, len("]")))
|
|
||||||
|
|
||||||
case *ast.StarExpr:
|
|
||||||
children = append(children, tok(n.Star, len("*")))
|
|
||||||
|
|
||||||
case *ast.StructType:
|
|
||||||
children = append(children, tok(n.Struct, len("struct")))
|
|
||||||
|
|
||||||
case *ast.SwitchStmt:
|
|
||||||
children = append(children, tok(n.Switch, len("switch")))
|
|
||||||
|
|
||||||
case *ast.TypeAssertExpr:
|
|
||||||
children = append(children,
|
|
||||||
tok(n.Lparen-1, len(".")),
|
|
||||||
tok(n.Lparen, len("(")),
|
|
||||||
tok(n.Rparen, len(")")))
|
|
||||||
|
|
||||||
case *ast.TypeSpec:
|
|
||||||
// TODO(adonovan): TypeSpec.{Doc,Comment}?
|
|
||||||
|
|
||||||
case *ast.TypeSwitchStmt:
|
|
||||||
children = append(children, tok(n.Switch, len("switch")))
|
|
||||||
|
|
||||||
case *ast.UnaryExpr:
|
|
||||||
children = append(children, tok(n.OpPos, len(n.Op.String())))
|
|
||||||
|
|
||||||
case *ast.ValueSpec:
|
|
||||||
// TODO(adonovan): ValueSpec.{Doc,Comment}?
|
|
||||||
|
|
||||||
case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt:
|
|
||||||
// nop
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO(adonovan): opt: merge the logic of ast.Inspect() into
|
|
||||||
// the switch above so we can make interleaved callbacks for
|
|
||||||
// both Nodes and Tokens in the right order and avoid the need
|
|
||||||
// to sort.
|
|
||||||
sort.Sort(byPos(children))
|
|
||||||
|
|
||||||
return children
|
|
||||||
}
|
|
||||||
|
|
||||||
type byPos []ast.Node
|
|
||||||
|
|
||||||
func (sl byPos) Len() int {
|
|
||||||
return len(sl)
|
|
||||||
}
|
|
||||||
func (sl byPos) Less(i, j int) bool {
|
|
||||||
return sl[i].Pos() < sl[j].Pos()
|
|
||||||
}
|
|
||||||
func (sl byPos) Swap(i, j int) {
|
|
||||||
sl[i], sl[j] = sl[j], sl[i]
|
|
||||||
}
|
|
||||||
|
|
||||||
// NodeDescription returns a description of the concrete type of n suitable
|
|
||||||
// for a user interface.
|
|
||||||
//
|
|
||||||
// TODO(adonovan): in some cases (e.g. Field, FieldList, Ident,
|
|
||||||
// StarExpr) we could be much more specific given the path to the AST
|
|
||||||
// root. Perhaps we should do that.
|
|
||||||
//
|
|
||||||
func NodeDescription(n ast.Node) string {
|
|
||||||
switch n := n.(type) {
|
|
||||||
case *ast.ArrayType:
|
|
||||||
return "array type"
|
|
||||||
case *ast.AssignStmt:
|
|
||||||
return "assignment"
|
|
||||||
case *ast.BadDecl:
|
|
||||||
return "bad declaration"
|
|
||||||
case *ast.BadExpr:
|
|
||||||
return "bad expression"
|
|
||||||
case *ast.BadStmt:
|
|
||||||
return "bad statement"
|
|
||||||
case *ast.BasicLit:
|
|
||||||
return "basic literal"
|
|
||||||
case *ast.BinaryExpr:
|
|
||||||
return fmt.Sprintf("binary %s operation", n.Op)
|
|
||||||
case *ast.BlockStmt:
|
|
||||||
return "block"
|
|
||||||
case *ast.BranchStmt:
|
|
||||||
switch n.Tok {
|
|
||||||
case token.BREAK:
|
|
||||||
return "break statement"
|
|
||||||
case token.CONTINUE:
|
|
||||||
return "continue statement"
|
|
||||||
case token.GOTO:
|
|
||||||
return "goto statement"
|
|
||||||
case token.FALLTHROUGH:
|
|
||||||
return "fall-through statement"
|
|
||||||
}
|
|
||||||
case *ast.CallExpr:
|
|
||||||
if len(n.Args) == 1 && !n.Ellipsis.IsValid() {
|
|
||||||
return "function call (or conversion)"
|
|
||||||
}
|
|
||||||
return "function call"
|
|
||||||
case *ast.CaseClause:
|
|
||||||
return "case clause"
|
|
||||||
case *ast.ChanType:
|
|
||||||
return "channel type"
|
|
||||||
case *ast.CommClause:
|
|
||||||
return "communication clause"
|
|
||||||
case *ast.Comment:
|
|
||||||
return "comment"
|
|
||||||
case *ast.CommentGroup:
|
|
||||||
return "comment group"
|
|
||||||
case *ast.CompositeLit:
|
|
||||||
return "composite literal"
|
|
||||||
case *ast.DeclStmt:
|
|
||||||
return NodeDescription(n.Decl) + " statement"
|
|
||||||
case *ast.DeferStmt:
|
|
||||||
return "defer statement"
|
|
||||||
case *ast.Ellipsis:
|
|
||||||
return "ellipsis"
|
|
||||||
case *ast.EmptyStmt:
|
|
||||||
return "empty statement"
|
|
||||||
case *ast.ExprStmt:
|
|
||||||
return "expression statement"
|
|
||||||
case *ast.Field:
|
|
||||||
// Can be any of these:
|
|
||||||
// struct {x, y int} -- struct field(s)
|
|
||||||
// struct {T} -- anon struct field
|
|
||||||
// interface {I} -- interface embedding
|
|
||||||
// interface {f()} -- interface method
|
|
||||||
// func (A) func(B) C -- receiver, param(s), result(s)
|
|
||||||
return "field/method/parameter"
|
|
||||||
case *ast.FieldList:
|
|
||||||
return "field/method/parameter list"
|
|
||||||
case *ast.File:
|
|
||||||
return "source file"
|
|
||||||
case *ast.ForStmt:
|
|
||||||
return "for loop"
|
|
||||||
case *ast.FuncDecl:
|
|
||||||
return "function declaration"
|
|
||||||
case *ast.FuncLit:
|
|
||||||
return "function literal"
|
|
||||||
case *ast.FuncType:
|
|
||||||
return "function type"
|
|
||||||
case *ast.GenDecl:
|
|
||||||
switch n.Tok {
|
|
||||||
case token.IMPORT:
|
|
||||||
return "import declaration"
|
|
||||||
case token.CONST:
|
|
||||||
return "constant declaration"
|
|
||||||
case token.TYPE:
|
|
||||||
return "type declaration"
|
|
||||||
case token.VAR:
|
|
||||||
return "variable declaration"
|
|
||||||
}
|
|
||||||
case *ast.GoStmt:
|
|
||||||
return "go statement"
|
|
||||||
case *ast.Ident:
|
|
||||||
return "identifier"
|
|
||||||
case *ast.IfStmt:
|
|
||||||
return "if statement"
|
|
||||||
case *ast.ImportSpec:
|
|
||||||
return "import specification"
|
|
||||||
case *ast.IncDecStmt:
|
|
||||||
if n.Tok == token.INC {
|
|
||||||
return "increment statement"
|
|
||||||
}
|
|
||||||
return "decrement statement"
|
|
||||||
case *ast.IndexExpr:
|
|
||||||
return "index expression"
|
|
||||||
case *ast.InterfaceType:
|
|
||||||
return "interface type"
|
|
||||||
case *ast.KeyValueExpr:
|
|
||||||
return "key/value association"
|
|
||||||
case *ast.LabeledStmt:
|
|
||||||
return "statement label"
|
|
||||||
case *ast.MapType:
|
|
||||||
return "map type"
|
|
||||||
case *ast.Package:
|
|
||||||
return "package"
|
|
||||||
case *ast.ParenExpr:
|
|
||||||
return "parenthesized " + NodeDescription(n.X)
|
|
||||||
case *ast.RangeStmt:
|
|
||||||
return "range loop"
|
|
||||||
case *ast.ReturnStmt:
|
|
||||||
return "return statement"
|
|
||||||
case *ast.SelectStmt:
|
|
||||||
return "select statement"
|
|
||||||
case *ast.SelectorExpr:
|
|
||||||
return "selector"
|
|
||||||
case *ast.SendStmt:
|
|
||||||
return "channel send"
|
|
||||||
case *ast.SliceExpr:
|
|
||||||
return "slice expression"
|
|
||||||
case *ast.StarExpr:
|
|
||||||
return "*-operation" // load/store expr or pointer type
|
|
||||||
case *ast.StructType:
|
|
||||||
return "struct type"
|
|
||||||
case *ast.SwitchStmt:
|
|
||||||
return "switch statement"
|
|
||||||
case *ast.TypeAssertExpr:
|
|
||||||
return "type assertion"
|
|
||||||
case *ast.TypeSpec:
|
|
||||||
return "type specification"
|
|
||||||
case *ast.TypeSwitchStmt:
|
|
||||||
return "type switch"
|
|
||||||
case *ast.UnaryExpr:
|
|
||||||
return fmt.Sprintf("unary %s operation", n.Op)
|
|
||||||
case *ast.ValueSpec:
|
|
||||||
return "value specification"
|
|
||||||
|
|
||||||
}
|
|
||||||
panic(fmt.Sprintf("unexpected node type: %T", n))
|
|
||||||
}
|
|
Loading…
Reference in New Issue