mirror of
https://github.com/superseriousbusiness/gotosocial
synced 2025-06-05 21:59:39 +02:00
[bugfix] Fix Swagger spec and add test script (#2698)
* Add Swagger spec test script * Fix Swagger spec errors not related to statuses with polls * Add API tests that post a status with a poll * Fix creating a status with a poll from form params * Fix Swagger spec errors related to statuses with polls (this is the last error) * Fix Swagger spec warnings not related to unused definitions * Suppress a duplicate list update params definition that was somehow causing wrong param names * Add Swagger test to CI - updates Drone config - vendorizes go-swagger - fixes a file extension issue that caused the test script to generate JSON instead of YAML with the vendorized version * Put `Sample: ` on its own line everywhere * Remove unused id param from emojiCategoriesGet * Add 5 more pairs of profile fields to account update API Swagger * Remove Swagger prefix from dummy fields It makes the generated code look weird * Manually annotate params for statusCreate operation * Fix all remaining Swagger spec warnings - Change some models into operation parameters - Ignore models that already correspond to manually documented operation parameters but can't be trivially changed (those with file fields) * Documented that creating a status with scheduled_at isn't implemented yet * sign drone.yml * Fix filter API Swagger errors * fixup! Fix filter API Swagger errors --------- Co-authored-by: tobi <tobi.smethurst@protonmail.com>
This commit is contained in:
634
vendor/golang.org/x/tools/go/ast/astutil/enclosing.go
generated
vendored
Normal file
634
vendor/golang.org/x/tools/go/ast/astutil/enclosing.go
generated
vendored
Normal file
@ -0,0 +1,634 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package astutil
|
||||
|
||||
// This file defines utilities for working with source positions.
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"sort"
|
||||
)
|
||||
|
||||
// PathEnclosingInterval returns the node that encloses the source
|
||||
// interval [start, end), and all its ancestors up to the AST root.
|
||||
//
|
||||
// The definition of "enclosing" used by this function considers
|
||||
// additional whitespace abutting a node to be enclosed by it.
|
||||
// In this example:
|
||||
//
|
||||
// z := x + y // add them
|
||||
// <-A->
|
||||
// <----B----->
|
||||
//
|
||||
// the ast.BinaryExpr(+) node is considered to enclose interval B
|
||||
// even though its [Pos()..End()) is actually only interval A.
|
||||
// This behaviour makes user interfaces more tolerant of imperfect
|
||||
// input.
|
||||
//
|
||||
// This function treats tokens as nodes, though they are not included
|
||||
// in the result. e.g. PathEnclosingInterval("+") returns the
|
||||
// enclosing ast.BinaryExpr("x + y").
|
||||
//
|
||||
// If start==end, the 1-char interval following start is used instead.
|
||||
//
|
||||
// The 'exact' result is true if the interval contains only path[0]
|
||||
// and perhaps some adjacent whitespace. It is false if the interval
|
||||
// overlaps multiple children of path[0], or if it contains only
|
||||
// interior whitespace of path[0].
|
||||
// In this example:
|
||||
//
|
||||
// z := x + y // add them
|
||||
// <--C--> <---E-->
|
||||
// ^
|
||||
// D
|
||||
//
|
||||
// intervals C, D and E are inexact. C is contained by the
|
||||
// z-assignment statement, because it spans three of its children (:=,
|
||||
// x, +). So too is the 1-char interval D, because it contains only
|
||||
// interior whitespace of the assignment. E is considered interior
|
||||
// whitespace of the BlockStmt containing the assignment.
|
||||
//
|
||||
// The resulting path is never empty; it always contains at least the
|
||||
// 'root' *ast.File. Ideally PathEnclosingInterval would reject
|
||||
// intervals that lie wholly or partially outside the range of the
|
||||
// file, but unfortunately ast.File records only the token.Pos of
|
||||
// the 'package' keyword, but not of the start of the file itself.
|
||||
func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Node, exact bool) {
|
||||
// fmt.Printf("EnclosingInterval %d %d\n", start, end) // debugging
|
||||
|
||||
// Precondition: node.[Pos..End) and adjoining whitespace contain [start, end).
|
||||
var visit func(node ast.Node) bool
|
||||
visit = func(node ast.Node) bool {
|
||||
path = append(path, node)
|
||||
|
||||
nodePos := node.Pos()
|
||||
nodeEnd := node.End()
|
||||
|
||||
// fmt.Printf("visit(%T, %d, %d)\n", node, nodePos, nodeEnd) // debugging
|
||||
|
||||
// Intersect [start, end) with interval of node.
|
||||
if start < nodePos {
|
||||
start = nodePos
|
||||
}
|
||||
if end > nodeEnd {
|
||||
end = nodeEnd
|
||||
}
|
||||
|
||||
// Find sole child that contains [start, end).
|
||||
children := childrenOf(node)
|
||||
l := len(children)
|
||||
for i, child := range children {
|
||||
// [childPos, childEnd) is unaugmented interval of child.
|
||||
childPos := child.Pos()
|
||||
childEnd := child.End()
|
||||
|
||||
// [augPos, augEnd) is whitespace-augmented interval of child.
|
||||
augPos := childPos
|
||||
augEnd := childEnd
|
||||
if i > 0 {
|
||||
augPos = children[i-1].End() // start of preceding whitespace
|
||||
}
|
||||
if i < l-1 {
|
||||
nextChildPos := children[i+1].Pos()
|
||||
// Does [start, end) lie between child and next child?
|
||||
if start >= augEnd && end <= nextChildPos {
|
||||
return false // inexact match
|
||||
}
|
||||
augEnd = nextChildPos // end of following whitespace
|
||||
}
|
||||
|
||||
// fmt.Printf("\tchild %d: [%d..%d)\tcontains interval [%d..%d)?\n",
|
||||
// i, augPos, augEnd, start, end) // debugging
|
||||
|
||||
// Does augmented child strictly contain [start, end)?
|
||||
if augPos <= start && end <= augEnd {
|
||||
_, isToken := child.(tokenNode)
|
||||
return isToken || visit(child)
|
||||
}
|
||||
|
||||
// Does [start, end) overlap multiple children?
|
||||
// i.e. left-augmented child contains start
|
||||
// but LR-augmented child does not contain end.
|
||||
if start < childEnd && end > augEnd {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// No single child contained [start, end),
|
||||
// so node is the result. Is it exact?
|
||||
|
||||
// (It's tempting to put this condition before the
|
||||
// child loop, but it gives the wrong result in the
|
||||
// case where a node (e.g. ExprStmt) and its sole
|
||||
// child have equal intervals.)
|
||||
if start == nodePos && end == nodeEnd {
|
||||
return true // exact match
|
||||
}
|
||||
|
||||
return false // inexact: overlaps multiple children
|
||||
}
|
||||
|
||||
// Ensure [start,end) is nondecreasing.
|
||||
if start > end {
|
||||
start, end = end, start
|
||||
}
|
||||
|
||||
if start < root.End() && end > root.Pos() {
|
||||
if start == end {
|
||||
end = start + 1 // empty interval => interval of size 1
|
||||
}
|
||||
exact = visit(root)
|
||||
|
||||
// Reverse the path:
|
||||
for i, l := 0, len(path); i < l/2; i++ {
|
||||
path[i], path[l-1-i] = path[l-1-i], path[i]
|
||||
}
|
||||
} else {
|
||||
// Selection lies within whitespace preceding the
|
||||
// first (or following the last) declaration in the file.
|
||||
// The result nonetheless always includes the ast.File.
|
||||
path = append(path, root)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// tokenNode is a dummy implementation of ast.Node for a single token.
|
||||
// They are used transiently by PathEnclosingInterval but never escape
|
||||
// this package.
|
||||
type tokenNode struct {
|
||||
pos token.Pos
|
||||
end token.Pos
|
||||
}
|
||||
|
||||
func (n tokenNode) Pos() token.Pos {
|
||||
return n.pos
|
||||
}
|
||||
|
||||
func (n tokenNode) End() token.Pos {
|
||||
return n.end
|
||||
}
|
||||
|
||||
func tok(pos token.Pos, len int) ast.Node {
|
||||
return tokenNode{pos, pos + token.Pos(len)}
|
||||
}
|
||||
|
||||
// childrenOf returns the direct non-nil children of ast.Node n.
|
||||
// It may include fake ast.Node implementations for bare tokens.
|
||||
// it is not safe to call (e.g.) ast.Walk on such nodes.
|
||||
func childrenOf(n ast.Node) []ast.Node {
|
||||
var children []ast.Node
|
||||
|
||||
// First add nodes for all true subtrees.
|
||||
ast.Inspect(n, func(node ast.Node) bool {
|
||||
if node == n { // push n
|
||||
return true // recur
|
||||
}
|
||||
if node != nil { // push child
|
||||
children = append(children, node)
|
||||
}
|
||||
return false // no recursion
|
||||
})
|
||||
|
||||
// Then add fake Nodes for bare tokens.
|
||||
switch n := n.(type) {
|
||||
case *ast.ArrayType:
|
||||
children = append(children,
|
||||
tok(n.Lbrack, len("[")),
|
||||
tok(n.Elt.End(), len("]")))
|
||||
|
||||
case *ast.AssignStmt:
|
||||
children = append(children,
|
||||
tok(n.TokPos, len(n.Tok.String())))
|
||||
|
||||
case *ast.BasicLit:
|
||||
children = append(children,
|
||||
tok(n.ValuePos, len(n.Value)))
|
||||
|
||||
case *ast.BinaryExpr:
|
||||
children = append(children, tok(n.OpPos, len(n.Op.String())))
|
||||
|
||||
case *ast.BlockStmt:
|
||||
children = append(children,
|
||||
tok(n.Lbrace, len("{")),
|
||||
tok(n.Rbrace, len("}")))
|
||||
|
||||
case *ast.BranchStmt:
|
||||
children = append(children,
|
||||
tok(n.TokPos, len(n.Tok.String())))
|
||||
|
||||
case *ast.CallExpr:
|
||||
children = append(children,
|
||||
tok(n.Lparen, len("(")),
|
||||
tok(n.Rparen, len(")")))
|
||||
if n.Ellipsis != 0 {
|
||||
children = append(children, tok(n.Ellipsis, len("...")))
|
||||
}
|
||||
|
||||
case *ast.CaseClause:
|
||||
if n.List == nil {
|
||||
children = append(children,
|
||||
tok(n.Case, len("default")))
|
||||
} else {
|
||||
children = append(children,
|
||||
tok(n.Case, len("case")))
|
||||
}
|
||||
children = append(children, tok(n.Colon, len(":")))
|
||||
|
||||
case *ast.ChanType:
|
||||
switch n.Dir {
|
||||
case ast.RECV:
|
||||
children = append(children, tok(n.Begin, len("<-chan")))
|
||||
case ast.SEND:
|
||||
children = append(children, tok(n.Begin, len("chan<-")))
|
||||
case ast.RECV | ast.SEND:
|
||||
children = append(children, tok(n.Begin, len("chan")))
|
||||
}
|
||||
|
||||
case *ast.CommClause:
|
||||
if n.Comm == nil {
|
||||
children = append(children,
|
||||
tok(n.Case, len("default")))
|
||||
} else {
|
||||
children = append(children,
|
||||
tok(n.Case, len("case")))
|
||||
}
|
||||
children = append(children, tok(n.Colon, len(":")))
|
||||
|
||||
case *ast.Comment:
|
||||
// nop
|
||||
|
||||
case *ast.CommentGroup:
|
||||
// nop
|
||||
|
||||
case *ast.CompositeLit:
|
||||
children = append(children,
|
||||
tok(n.Lbrace, len("{")),
|
||||
tok(n.Rbrace, len("{")))
|
||||
|
||||
case *ast.DeclStmt:
|
||||
// nop
|
||||
|
||||
case *ast.DeferStmt:
|
||||
children = append(children,
|
||||
tok(n.Defer, len("defer")))
|
||||
|
||||
case *ast.Ellipsis:
|
||||
children = append(children,
|
||||
tok(n.Ellipsis, len("...")))
|
||||
|
||||
case *ast.EmptyStmt:
|
||||
// nop
|
||||
|
||||
case *ast.ExprStmt:
|
||||
// nop
|
||||
|
||||
case *ast.Field:
|
||||
// TODO(adonovan): Field.{Doc,Comment,Tag}?
|
||||
|
||||
case *ast.FieldList:
|
||||
children = append(children,
|
||||
tok(n.Opening, len("(")), // or len("[")
|
||||
tok(n.Closing, len(")"))) // or len("]")
|
||||
|
||||
case *ast.File:
|
||||
// TODO test: Doc
|
||||
children = append(children,
|
||||
tok(n.Package, len("package")))
|
||||
|
||||
case *ast.ForStmt:
|
||||
children = append(children,
|
||||
tok(n.For, len("for")))
|
||||
|
||||
case *ast.FuncDecl:
|
||||
// TODO(adonovan): FuncDecl.Comment?
|
||||
|
||||
// Uniquely, FuncDecl breaks the invariant that
|
||||
// preorder traversal yields tokens in lexical order:
|
||||
// in fact, FuncDecl.Recv precedes FuncDecl.Type.Func.
|
||||
//
|
||||
// As a workaround, we inline the case for FuncType
|
||||
// here and order things correctly.
|
||||
//
|
||||
children = nil // discard ast.Walk(FuncDecl) info subtrees
|
||||
children = append(children, tok(n.Type.Func, len("func")))
|
||||
if n.Recv != nil {
|
||||
children = append(children, n.Recv)
|
||||
}
|
||||
children = append(children, n.Name)
|
||||
if tparams := n.Type.TypeParams; tparams != nil {
|
||||
children = append(children, tparams)
|
||||
}
|
||||
if n.Type.Params != nil {
|
||||
children = append(children, n.Type.Params)
|
||||
}
|
||||
if n.Type.Results != nil {
|
||||
children = append(children, n.Type.Results)
|
||||
}
|
||||
if n.Body != nil {
|
||||
children = append(children, n.Body)
|
||||
}
|
||||
|
||||
case *ast.FuncLit:
|
||||
// nop
|
||||
|
||||
case *ast.FuncType:
|
||||
if n.Func != 0 {
|
||||
children = append(children,
|
||||
tok(n.Func, len("func")))
|
||||
}
|
||||
|
||||
case *ast.GenDecl:
|
||||
children = append(children,
|
||||
tok(n.TokPos, len(n.Tok.String())))
|
||||
if n.Lparen != 0 {
|
||||
children = append(children,
|
||||
tok(n.Lparen, len("(")),
|
||||
tok(n.Rparen, len(")")))
|
||||
}
|
||||
|
||||
case *ast.GoStmt:
|
||||
children = append(children,
|
||||
tok(n.Go, len("go")))
|
||||
|
||||
case *ast.Ident:
|
||||
children = append(children,
|
||||
tok(n.NamePos, len(n.Name)))
|
||||
|
||||
case *ast.IfStmt:
|
||||
children = append(children,
|
||||
tok(n.If, len("if")))
|
||||
|
||||
case *ast.ImportSpec:
|
||||
// TODO(adonovan): ImportSpec.{Doc,EndPos}?
|
||||
|
||||
case *ast.IncDecStmt:
|
||||
children = append(children,
|
||||
tok(n.TokPos, len(n.Tok.String())))
|
||||
|
||||
case *ast.IndexExpr:
|
||||
children = append(children,
|
||||
tok(n.Lbrack, len("[")),
|
||||
tok(n.Rbrack, len("]")))
|
||||
|
||||
case *ast.IndexListExpr:
|
||||
children = append(children,
|
||||
tok(n.Lbrack, len("[")),
|
||||
tok(n.Rbrack, len("]")))
|
||||
|
||||
case *ast.InterfaceType:
|
||||
children = append(children,
|
||||
tok(n.Interface, len("interface")))
|
||||
|
||||
case *ast.KeyValueExpr:
|
||||
children = append(children,
|
||||
tok(n.Colon, len(":")))
|
||||
|
||||
case *ast.LabeledStmt:
|
||||
children = append(children,
|
||||
tok(n.Colon, len(":")))
|
||||
|
||||
case *ast.MapType:
|
||||
children = append(children,
|
||||
tok(n.Map, len("map")))
|
||||
|
||||
case *ast.ParenExpr:
|
||||
children = append(children,
|
||||
tok(n.Lparen, len("(")),
|
||||
tok(n.Rparen, len(")")))
|
||||
|
||||
case *ast.RangeStmt:
|
||||
children = append(children,
|
||||
tok(n.For, len("for")),
|
||||
tok(n.TokPos, len(n.Tok.String())))
|
||||
|
||||
case *ast.ReturnStmt:
|
||||
children = append(children,
|
||||
tok(n.Return, len("return")))
|
||||
|
||||
case *ast.SelectStmt:
|
||||
children = append(children,
|
||||
tok(n.Select, len("select")))
|
||||
|
||||
case *ast.SelectorExpr:
|
||||
// nop
|
||||
|
||||
case *ast.SendStmt:
|
||||
children = append(children,
|
||||
tok(n.Arrow, len("<-")))
|
||||
|
||||
case *ast.SliceExpr:
|
||||
children = append(children,
|
||||
tok(n.Lbrack, len("[")),
|
||||
tok(n.Rbrack, len("]")))
|
||||
|
||||
case *ast.StarExpr:
|
||||
children = append(children, tok(n.Star, len("*")))
|
||||
|
||||
case *ast.StructType:
|
||||
children = append(children, tok(n.Struct, len("struct")))
|
||||
|
||||
case *ast.SwitchStmt:
|
||||
children = append(children, tok(n.Switch, len("switch")))
|
||||
|
||||
case *ast.TypeAssertExpr:
|
||||
children = append(children,
|
||||
tok(n.Lparen-1, len(".")),
|
||||
tok(n.Lparen, len("(")),
|
||||
tok(n.Rparen, len(")")))
|
||||
|
||||
case *ast.TypeSpec:
|
||||
// TODO(adonovan): TypeSpec.{Doc,Comment}?
|
||||
|
||||
case *ast.TypeSwitchStmt:
|
||||
children = append(children, tok(n.Switch, len("switch")))
|
||||
|
||||
case *ast.UnaryExpr:
|
||||
children = append(children, tok(n.OpPos, len(n.Op.String())))
|
||||
|
||||
case *ast.ValueSpec:
|
||||
// TODO(adonovan): ValueSpec.{Doc,Comment}?
|
||||
|
||||
case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt:
|
||||
// nop
|
||||
}
|
||||
|
||||
// TODO(adonovan): opt: merge the logic of ast.Inspect() into
|
||||
// the switch above so we can make interleaved callbacks for
|
||||
// both Nodes and Tokens in the right order and avoid the need
|
||||
// to sort.
|
||||
sort.Sort(byPos(children))
|
||||
|
||||
return children
|
||||
}
|
||||
|
||||
type byPos []ast.Node
|
||||
|
||||
func (sl byPos) Len() int {
|
||||
return len(sl)
|
||||
}
|
||||
func (sl byPos) Less(i, j int) bool {
|
||||
return sl[i].Pos() < sl[j].Pos()
|
||||
}
|
||||
func (sl byPos) Swap(i, j int) {
|
||||
sl[i], sl[j] = sl[j], sl[i]
|
||||
}
|
||||
|
||||
// NodeDescription returns a description of the concrete type of n suitable
|
||||
// for a user interface.
|
||||
//
|
||||
// TODO(adonovan): in some cases (e.g. Field, FieldList, Ident,
|
||||
// StarExpr) we could be much more specific given the path to the AST
|
||||
// root. Perhaps we should do that.
|
||||
func NodeDescription(n ast.Node) string {
|
||||
switch n := n.(type) {
|
||||
case *ast.ArrayType:
|
||||
return "array type"
|
||||
case *ast.AssignStmt:
|
||||
return "assignment"
|
||||
case *ast.BadDecl:
|
||||
return "bad declaration"
|
||||
case *ast.BadExpr:
|
||||
return "bad expression"
|
||||
case *ast.BadStmt:
|
||||
return "bad statement"
|
||||
case *ast.BasicLit:
|
||||
return "basic literal"
|
||||
case *ast.BinaryExpr:
|
||||
return fmt.Sprintf("binary %s operation", n.Op)
|
||||
case *ast.BlockStmt:
|
||||
return "block"
|
||||
case *ast.BranchStmt:
|
||||
switch n.Tok {
|
||||
case token.BREAK:
|
||||
return "break statement"
|
||||
case token.CONTINUE:
|
||||
return "continue statement"
|
||||
case token.GOTO:
|
||||
return "goto statement"
|
||||
case token.FALLTHROUGH:
|
||||
return "fall-through statement"
|
||||
}
|
||||
case *ast.CallExpr:
|
||||
if len(n.Args) == 1 && !n.Ellipsis.IsValid() {
|
||||
return "function call (or conversion)"
|
||||
}
|
||||
return "function call"
|
||||
case *ast.CaseClause:
|
||||
return "case clause"
|
||||
case *ast.ChanType:
|
||||
return "channel type"
|
||||
case *ast.CommClause:
|
||||
return "communication clause"
|
||||
case *ast.Comment:
|
||||
return "comment"
|
||||
case *ast.CommentGroup:
|
||||
return "comment group"
|
||||
case *ast.CompositeLit:
|
||||
return "composite literal"
|
||||
case *ast.DeclStmt:
|
||||
return NodeDescription(n.Decl) + " statement"
|
||||
case *ast.DeferStmt:
|
||||
return "defer statement"
|
||||
case *ast.Ellipsis:
|
||||
return "ellipsis"
|
||||
case *ast.EmptyStmt:
|
||||
return "empty statement"
|
||||
case *ast.ExprStmt:
|
||||
return "expression statement"
|
||||
case *ast.Field:
|
||||
// Can be any of these:
|
||||
// struct {x, y int} -- struct field(s)
|
||||
// struct {T} -- anon struct field
|
||||
// interface {I} -- interface embedding
|
||||
// interface {f()} -- interface method
|
||||
// func (A) func(B) C -- receiver, param(s), result(s)
|
||||
return "field/method/parameter"
|
||||
case *ast.FieldList:
|
||||
return "field/method/parameter list"
|
||||
case *ast.File:
|
||||
return "source file"
|
||||
case *ast.ForStmt:
|
||||
return "for loop"
|
||||
case *ast.FuncDecl:
|
||||
return "function declaration"
|
||||
case *ast.FuncLit:
|
||||
return "function literal"
|
||||
case *ast.FuncType:
|
||||
return "function type"
|
||||
case *ast.GenDecl:
|
||||
switch n.Tok {
|
||||
case token.IMPORT:
|
||||
return "import declaration"
|
||||
case token.CONST:
|
||||
return "constant declaration"
|
||||
case token.TYPE:
|
||||
return "type declaration"
|
||||
case token.VAR:
|
||||
return "variable declaration"
|
||||
}
|
||||
case *ast.GoStmt:
|
||||
return "go statement"
|
||||
case *ast.Ident:
|
||||
return "identifier"
|
||||
case *ast.IfStmt:
|
||||
return "if statement"
|
||||
case *ast.ImportSpec:
|
||||
return "import specification"
|
||||
case *ast.IncDecStmt:
|
||||
if n.Tok == token.INC {
|
||||
return "increment statement"
|
||||
}
|
||||
return "decrement statement"
|
||||
case *ast.IndexExpr:
|
||||
return "index expression"
|
||||
case *ast.IndexListExpr:
|
||||
return "index list expression"
|
||||
case *ast.InterfaceType:
|
||||
return "interface type"
|
||||
case *ast.KeyValueExpr:
|
||||
return "key/value association"
|
||||
case *ast.LabeledStmt:
|
||||
return "statement label"
|
||||
case *ast.MapType:
|
||||
return "map type"
|
||||
case *ast.Package:
|
||||
return "package"
|
||||
case *ast.ParenExpr:
|
||||
return "parenthesized " + NodeDescription(n.X)
|
||||
case *ast.RangeStmt:
|
||||
return "range loop"
|
||||
case *ast.ReturnStmt:
|
||||
return "return statement"
|
||||
case *ast.SelectStmt:
|
||||
return "select statement"
|
||||
case *ast.SelectorExpr:
|
||||
return "selector"
|
||||
case *ast.SendStmt:
|
||||
return "channel send"
|
||||
case *ast.SliceExpr:
|
||||
return "slice expression"
|
||||
case *ast.StarExpr:
|
||||
return "*-operation" // load/store expr or pointer type
|
||||
case *ast.StructType:
|
||||
return "struct type"
|
||||
case *ast.SwitchStmt:
|
||||
return "switch statement"
|
||||
case *ast.TypeAssertExpr:
|
||||
return "type assertion"
|
||||
case *ast.TypeSpec:
|
||||
return "type specification"
|
||||
case *ast.TypeSwitchStmt:
|
||||
return "type switch"
|
||||
case *ast.UnaryExpr:
|
||||
return fmt.Sprintf("unary %s operation", n.Op)
|
||||
case *ast.ValueSpec:
|
||||
return "value specification"
|
||||
|
||||
}
|
||||
panic(fmt.Sprintf("unexpected node type: %T", n))
|
||||
}
|
485
vendor/golang.org/x/tools/go/ast/astutil/imports.go
generated
vendored
Normal file
485
vendor/golang.org/x/tools/go/ast/astutil/imports.go
generated
vendored
Normal file
@ -0,0 +1,485 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package astutil contains common utilities for working with the Go AST.
|
||||
package astutil // import "golang.org/x/tools/go/ast/astutil"
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// AddImport adds the import path to the file f, if absent.
|
||||
func AddImport(fset *token.FileSet, f *ast.File, path string) (added bool) {
|
||||
return AddNamedImport(fset, f, "", path)
|
||||
}
|
||||
|
||||
// AddNamedImport adds the import with the given name and path to the file f, if absent.
|
||||
// If name is not empty, it is used to rename the import.
|
||||
//
|
||||
// For example, calling
|
||||
//
|
||||
// AddNamedImport(fset, f, "pathpkg", "path")
|
||||
//
|
||||
// adds
|
||||
//
|
||||
// import pathpkg "path"
|
||||
func AddNamedImport(fset *token.FileSet, f *ast.File, name, path string) (added bool) {
|
||||
if imports(f, name, path) {
|
||||
return false
|
||||
}
|
||||
|
||||
newImport := &ast.ImportSpec{
|
||||
Path: &ast.BasicLit{
|
||||
Kind: token.STRING,
|
||||
Value: strconv.Quote(path),
|
||||
},
|
||||
}
|
||||
if name != "" {
|
||||
newImport.Name = &ast.Ident{Name: name}
|
||||
}
|
||||
|
||||
// Find an import decl to add to.
|
||||
// The goal is to find an existing import
|
||||
// whose import path has the longest shared
|
||||
// prefix with path.
|
||||
var (
|
||||
bestMatch = -1 // length of longest shared prefix
|
||||
lastImport = -1 // index in f.Decls of the file's final import decl
|
||||
impDecl *ast.GenDecl // import decl containing the best match
|
||||
impIndex = -1 // spec index in impDecl containing the best match
|
||||
|
||||
isThirdPartyPath = isThirdParty(path)
|
||||
)
|
||||
for i, decl := range f.Decls {
|
||||
gen, ok := decl.(*ast.GenDecl)
|
||||
if ok && gen.Tok == token.IMPORT {
|
||||
lastImport = i
|
||||
// Do not add to import "C", to avoid disrupting the
|
||||
// association with its doc comment, breaking cgo.
|
||||
if declImports(gen, "C") {
|
||||
continue
|
||||
}
|
||||
|
||||
// Match an empty import decl if that's all that is available.
|
||||
if len(gen.Specs) == 0 && bestMatch == -1 {
|
||||
impDecl = gen
|
||||
}
|
||||
|
||||
// Compute longest shared prefix with imports in this group and find best
|
||||
// matched import spec.
|
||||
// 1. Always prefer import spec with longest shared prefix.
|
||||
// 2. While match length is 0,
|
||||
// - for stdlib package: prefer first import spec.
|
||||
// - for third party package: prefer first third party import spec.
|
||||
// We cannot use last import spec as best match for third party package
|
||||
// because grouped imports are usually placed last by goimports -local
|
||||
// flag.
|
||||
// See issue #19190.
|
||||
seenAnyThirdParty := false
|
||||
for j, spec := range gen.Specs {
|
||||
impspec := spec.(*ast.ImportSpec)
|
||||
p := importPath(impspec)
|
||||
n := matchLen(p, path)
|
||||
if n > bestMatch || (bestMatch == 0 && !seenAnyThirdParty && isThirdPartyPath) {
|
||||
bestMatch = n
|
||||
impDecl = gen
|
||||
impIndex = j
|
||||
}
|
||||
seenAnyThirdParty = seenAnyThirdParty || isThirdParty(p)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If no import decl found, add one after the last import.
|
||||
if impDecl == nil {
|
||||
impDecl = &ast.GenDecl{
|
||||
Tok: token.IMPORT,
|
||||
}
|
||||
if lastImport >= 0 {
|
||||
impDecl.TokPos = f.Decls[lastImport].End()
|
||||
} else {
|
||||
// There are no existing imports.
|
||||
// Our new import, preceded by a blank line, goes after the package declaration
|
||||
// and after the comment, if any, that starts on the same line as the
|
||||
// package declaration.
|
||||
impDecl.TokPos = f.Package
|
||||
|
||||
file := fset.File(f.Package)
|
||||
pkgLine := file.Line(f.Package)
|
||||
for _, c := range f.Comments {
|
||||
if file.Line(c.Pos()) > pkgLine {
|
||||
break
|
||||
}
|
||||
// +2 for a blank line
|
||||
impDecl.TokPos = c.End() + 2
|
||||
}
|
||||
}
|
||||
f.Decls = append(f.Decls, nil)
|
||||
copy(f.Decls[lastImport+2:], f.Decls[lastImport+1:])
|
||||
f.Decls[lastImport+1] = impDecl
|
||||
}
|
||||
|
||||
// Insert new import at insertAt.
|
||||
insertAt := 0
|
||||
if impIndex >= 0 {
|
||||
// insert after the found import
|
||||
insertAt = impIndex + 1
|
||||
}
|
||||
impDecl.Specs = append(impDecl.Specs, nil)
|
||||
copy(impDecl.Specs[insertAt+1:], impDecl.Specs[insertAt:])
|
||||
impDecl.Specs[insertAt] = newImport
|
||||
pos := impDecl.Pos()
|
||||
if insertAt > 0 {
|
||||
// If there is a comment after an existing import, preserve the comment
|
||||
// position by adding the new import after the comment.
|
||||
if spec, ok := impDecl.Specs[insertAt-1].(*ast.ImportSpec); ok && spec.Comment != nil {
|
||||
pos = spec.Comment.End()
|
||||
} else {
|
||||
// Assign same position as the previous import,
|
||||
// so that the sorter sees it as being in the same block.
|
||||
pos = impDecl.Specs[insertAt-1].Pos()
|
||||
}
|
||||
}
|
||||
if newImport.Name != nil {
|
||||
newImport.Name.NamePos = pos
|
||||
}
|
||||
newImport.Path.ValuePos = pos
|
||||
newImport.EndPos = pos
|
||||
|
||||
// Clean up parens. impDecl contains at least one spec.
|
||||
if len(impDecl.Specs) == 1 {
|
||||
// Remove unneeded parens.
|
||||
impDecl.Lparen = token.NoPos
|
||||
} else if !impDecl.Lparen.IsValid() {
|
||||
// impDecl needs parens added.
|
||||
impDecl.Lparen = impDecl.Specs[0].Pos()
|
||||
}
|
||||
|
||||
f.Imports = append(f.Imports, newImport)
|
||||
|
||||
if len(f.Decls) <= 1 {
|
||||
return true
|
||||
}
|
||||
|
||||
// Merge all the import declarations into the first one.
|
||||
var first *ast.GenDecl
|
||||
for i := 0; i < len(f.Decls); i++ {
|
||||
decl := f.Decls[i]
|
||||
gen, ok := decl.(*ast.GenDecl)
|
||||
if !ok || gen.Tok != token.IMPORT || declImports(gen, "C") {
|
||||
continue
|
||||
}
|
||||
if first == nil {
|
||||
first = gen
|
||||
continue // Don't touch the first one.
|
||||
}
|
||||
// We now know there is more than one package in this import
|
||||
// declaration. Ensure that it ends up parenthesized.
|
||||
first.Lparen = first.Pos()
|
||||
// Move the imports of the other import declaration to the first one.
|
||||
for _, spec := range gen.Specs {
|
||||
spec.(*ast.ImportSpec).Path.ValuePos = first.Pos()
|
||||
first.Specs = append(first.Specs, spec)
|
||||
}
|
||||
f.Decls = append(f.Decls[:i], f.Decls[i+1:]...)
|
||||
i--
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func isThirdParty(importPath string) bool {
|
||||
// Third party package import path usually contains "." (".com", ".org", ...)
|
||||
// This logic is taken from golang.org/x/tools/imports package.
|
||||
return strings.Contains(importPath, ".")
|
||||
}
|
||||
|
||||
// DeleteImport deletes the import path from the file f, if present.
|
||||
// If there are duplicate import declarations, all matching ones are deleted.
|
||||
func DeleteImport(fset *token.FileSet, f *ast.File, path string) (deleted bool) {
|
||||
return DeleteNamedImport(fset, f, "", path)
|
||||
}
|
||||
|
||||
// DeleteNamedImport deletes the import with the given name and path from the file f, if present.
|
||||
// If there are duplicate import declarations, all matching ones are deleted.
|
||||
func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (deleted bool) {
|
||||
var delspecs []*ast.ImportSpec
|
||||
var delcomments []*ast.CommentGroup
|
||||
|
||||
// Find the import nodes that import path, if any.
|
||||
for i := 0; i < len(f.Decls); i++ {
|
||||
decl := f.Decls[i]
|
||||
gen, ok := decl.(*ast.GenDecl)
|
||||
if !ok || gen.Tok != token.IMPORT {
|
||||
continue
|
||||
}
|
||||
for j := 0; j < len(gen.Specs); j++ {
|
||||
spec := gen.Specs[j]
|
||||
impspec := spec.(*ast.ImportSpec)
|
||||
if importName(impspec) != name || importPath(impspec) != path {
|
||||
continue
|
||||
}
|
||||
|
||||
// We found an import spec that imports path.
|
||||
// Delete it.
|
||||
delspecs = append(delspecs, impspec)
|
||||
deleted = true
|
||||
copy(gen.Specs[j:], gen.Specs[j+1:])
|
||||
gen.Specs = gen.Specs[:len(gen.Specs)-1]
|
||||
|
||||
// If this was the last import spec in this decl,
|
||||
// delete the decl, too.
|
||||
if len(gen.Specs) == 0 {
|
||||
copy(f.Decls[i:], f.Decls[i+1:])
|
||||
f.Decls = f.Decls[:len(f.Decls)-1]
|
||||
i--
|
||||
break
|
||||
} else if len(gen.Specs) == 1 {
|
||||
if impspec.Doc != nil {
|
||||
delcomments = append(delcomments, impspec.Doc)
|
||||
}
|
||||
if impspec.Comment != nil {
|
||||
delcomments = append(delcomments, impspec.Comment)
|
||||
}
|
||||
for _, cg := range f.Comments {
|
||||
// Found comment on the same line as the import spec.
|
||||
if cg.End() < impspec.Pos() && fset.Position(cg.End()).Line == fset.Position(impspec.Pos()).Line {
|
||||
delcomments = append(delcomments, cg)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
spec := gen.Specs[0].(*ast.ImportSpec)
|
||||
|
||||
// Move the documentation right after the import decl.
|
||||
if spec.Doc != nil {
|
||||
for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Doc.Pos()).Line {
|
||||
fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line)
|
||||
}
|
||||
}
|
||||
for _, cg := range f.Comments {
|
||||
if cg.End() < spec.Pos() && fset.Position(cg.End()).Line == fset.Position(spec.Pos()).Line {
|
||||
for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Pos()).Line {
|
||||
fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if j > 0 {
|
||||
lastImpspec := gen.Specs[j-1].(*ast.ImportSpec)
|
||||
lastLine := fset.PositionFor(lastImpspec.Path.ValuePos, false).Line
|
||||
line := fset.PositionFor(impspec.Path.ValuePos, false).Line
|
||||
|
||||
// We deleted an entry but now there may be
|
||||
// a blank line-sized hole where the import was.
|
||||
if line-lastLine > 1 || !gen.Rparen.IsValid() {
|
||||
// There was a blank line immediately preceding the deleted import,
|
||||
// so there's no need to close the hole. The right parenthesis is
|
||||
// invalid after AddImport to an import statement without parenthesis.
|
||||
// Do nothing.
|
||||
} else if line != fset.File(gen.Rparen).LineCount() {
|
||||
// There was no blank line. Close the hole.
|
||||
fset.File(gen.Rparen).MergeLine(line)
|
||||
}
|
||||
}
|
||||
j--
|
||||
}
|
||||
}
|
||||
|
||||
// Delete imports from f.Imports.
|
||||
for i := 0; i < len(f.Imports); i++ {
|
||||
imp := f.Imports[i]
|
||||
for j, del := range delspecs {
|
||||
if imp == del {
|
||||
copy(f.Imports[i:], f.Imports[i+1:])
|
||||
f.Imports = f.Imports[:len(f.Imports)-1]
|
||||
copy(delspecs[j:], delspecs[j+1:])
|
||||
delspecs = delspecs[:len(delspecs)-1]
|
||||
i--
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Delete comments from f.Comments.
|
||||
for i := 0; i < len(f.Comments); i++ {
|
||||
cg := f.Comments[i]
|
||||
for j, del := range delcomments {
|
||||
if cg == del {
|
||||
copy(f.Comments[i:], f.Comments[i+1:])
|
||||
f.Comments = f.Comments[:len(f.Comments)-1]
|
||||
copy(delcomments[j:], delcomments[j+1:])
|
||||
delcomments = delcomments[:len(delcomments)-1]
|
||||
i--
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(delspecs) > 0 {
|
||||
panic(fmt.Sprintf("deleted specs from Decls but not Imports: %v", delspecs))
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// RewriteImport rewrites any import of path oldPath to path newPath.
|
||||
func RewriteImport(fset *token.FileSet, f *ast.File, oldPath, newPath string) (rewrote bool) {
|
||||
for _, imp := range f.Imports {
|
||||
if importPath(imp) == oldPath {
|
||||
rewrote = true
|
||||
// record old End, because the default is to compute
|
||||
// it using the length of imp.Path.Value.
|
||||
imp.EndPos = imp.End()
|
||||
imp.Path.Value = strconv.Quote(newPath)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// UsesImport reports whether a given import is used.
|
||||
func UsesImport(f *ast.File, path string) (used bool) {
|
||||
spec := importSpec(f, path)
|
||||
if spec == nil {
|
||||
return
|
||||
}
|
||||
|
||||
name := spec.Name.String()
|
||||
switch name {
|
||||
case "<nil>":
|
||||
// If the package name is not explicitly specified,
|
||||
// make an educated guess. This is not guaranteed to be correct.
|
||||
lastSlash := strings.LastIndex(path, "/")
|
||||
if lastSlash == -1 {
|
||||
name = path
|
||||
} else {
|
||||
name = path[lastSlash+1:]
|
||||
}
|
||||
case "_", ".":
|
||||
// Not sure if this import is used - err on the side of caution.
|
||||
return true
|
||||
}
|
||||
|
||||
ast.Walk(visitFn(func(n ast.Node) {
|
||||
sel, ok := n.(*ast.SelectorExpr)
|
||||
if ok && isTopName(sel.X, name) {
|
||||
used = true
|
||||
}
|
||||
}), f)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
type visitFn func(node ast.Node)
|
||||
|
||||
func (fn visitFn) Visit(node ast.Node) ast.Visitor {
|
||||
fn(node)
|
||||
return fn
|
||||
}
|
||||
|
||||
// imports reports whether f has an import with the specified name and path.
|
||||
func imports(f *ast.File, name, path string) bool {
|
||||
for _, s := range f.Imports {
|
||||
if importName(s) == name && importPath(s) == path {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// importSpec returns the import spec if f imports path,
|
||||
// or nil otherwise.
|
||||
func importSpec(f *ast.File, path string) *ast.ImportSpec {
|
||||
for _, s := range f.Imports {
|
||||
if importPath(s) == path {
|
||||
return s
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// importName returns the name of s,
|
||||
// or "" if the import is not named.
|
||||
func importName(s *ast.ImportSpec) string {
|
||||
if s.Name == nil {
|
||||
return ""
|
||||
}
|
||||
return s.Name.Name
|
||||
}
|
||||
|
||||
// importPath returns the unquoted import path of s,
|
||||
// or "" if the path is not properly quoted.
|
||||
func importPath(s *ast.ImportSpec) string {
|
||||
t, err := strconv.Unquote(s.Path.Value)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
return t
|
||||
}
|
||||
|
||||
// declImports reports whether gen contains an import of path.
|
||||
func declImports(gen *ast.GenDecl, path string) bool {
|
||||
if gen.Tok != token.IMPORT {
|
||||
return false
|
||||
}
|
||||
for _, spec := range gen.Specs {
|
||||
impspec := spec.(*ast.ImportSpec)
|
||||
if importPath(impspec) == path {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// matchLen returns the length of the longest path segment prefix shared by x and y.
|
||||
func matchLen(x, y string) int {
|
||||
n := 0
|
||||
for i := 0; i < len(x) && i < len(y) && x[i] == y[i]; i++ {
|
||||
if x[i] == '/' {
|
||||
n++
|
||||
}
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
// isTopName returns true if n is a top-level unresolved identifier with the given name.
|
||||
func isTopName(n ast.Expr, name string) bool {
|
||||
id, ok := n.(*ast.Ident)
|
||||
return ok && id.Name == name && id.Obj == nil
|
||||
}
|
||||
|
||||
// Imports returns the file imports grouped by paragraph.
|
||||
func Imports(fset *token.FileSet, f *ast.File) [][]*ast.ImportSpec {
|
||||
var groups [][]*ast.ImportSpec
|
||||
|
||||
for _, decl := range f.Decls {
|
||||
genDecl, ok := decl.(*ast.GenDecl)
|
||||
if !ok || genDecl.Tok != token.IMPORT {
|
||||
break
|
||||
}
|
||||
|
||||
group := []*ast.ImportSpec{}
|
||||
|
||||
var lastLine int
|
||||
for _, spec := range genDecl.Specs {
|
||||
importSpec := spec.(*ast.ImportSpec)
|
||||
pos := importSpec.Path.ValuePos
|
||||
line := fset.Position(pos).Line
|
||||
if lastLine > 0 && pos > 0 && line-lastLine > 1 {
|
||||
groups = append(groups, group)
|
||||
group = []*ast.ImportSpec{}
|
||||
}
|
||||
group = append(group, importSpec)
|
||||
lastLine = line
|
||||
}
|
||||
groups = append(groups, group)
|
||||
}
|
||||
|
||||
return groups
|
||||
}
|
486
vendor/golang.org/x/tools/go/ast/astutil/rewrite.go
generated
vendored
Normal file
486
vendor/golang.org/x/tools/go/ast/astutil/rewrite.go
generated
vendored
Normal file
@ -0,0 +1,486 @@
|
||||
// Copyright 2017 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package astutil
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"reflect"
|
||||
"sort"
|
||||
)
|
||||
|
||||
// An ApplyFunc is invoked by Apply for each node n, even if n is nil,
|
||||
// before and/or after the node's children, using a Cursor describing
|
||||
// the current node and providing operations on it.
|
||||
//
|
||||
// The return value of ApplyFunc controls the syntax tree traversal.
|
||||
// See Apply for details.
|
||||
type ApplyFunc func(*Cursor) bool
|
||||
|
||||
// Apply traverses a syntax tree recursively, starting with root,
|
||||
// and calling pre and post for each node as described below.
|
||||
// Apply returns the syntax tree, possibly modified.
|
||||
//
|
||||
// If pre is not nil, it is called for each node before the node's
|
||||
// children are traversed (pre-order). If pre returns false, no
|
||||
// children are traversed, and post is not called for that node.
|
||||
//
|
||||
// If post is not nil, and a prior call of pre didn't return false,
|
||||
// post is called for each node after its children are traversed
|
||||
// (post-order). If post returns false, traversal is terminated and
|
||||
// Apply returns immediately.
|
||||
//
|
||||
// Only fields that refer to AST nodes are considered children;
|
||||
// i.e., token.Pos, Scopes, Objects, and fields of basic types
|
||||
// (strings, etc.) are ignored.
|
||||
//
|
||||
// Children are traversed in the order in which they appear in the
|
||||
// respective node's struct definition. A package's files are
|
||||
// traversed in the filenames' alphabetical order.
|
||||
func Apply(root ast.Node, pre, post ApplyFunc) (result ast.Node) {
|
||||
parent := &struct{ ast.Node }{root}
|
||||
defer func() {
|
||||
if r := recover(); r != nil && r != abort {
|
||||
panic(r)
|
||||
}
|
||||
result = parent.Node
|
||||
}()
|
||||
a := &application{pre: pre, post: post}
|
||||
a.apply(parent, "Node", nil, root)
|
||||
return
|
||||
}
|
||||
|
||||
var abort = new(int) // singleton, to signal termination of Apply
|
||||
|
||||
// A Cursor describes a node encountered during Apply.
|
||||
// Information about the node and its parent is available
|
||||
// from the Node, Parent, Name, and Index methods.
|
||||
//
|
||||
// If p is a variable of type and value of the current parent node
|
||||
// c.Parent(), and f is the field identifier with name c.Name(),
|
||||
// the following invariants hold:
|
||||
//
|
||||
// p.f == c.Node() if c.Index() < 0
|
||||
// p.f[c.Index()] == c.Node() if c.Index() >= 0
|
||||
//
|
||||
// The methods Replace, Delete, InsertBefore, and InsertAfter
|
||||
// can be used to change the AST without disrupting Apply.
|
||||
type Cursor struct {
|
||||
parent ast.Node
|
||||
name string
|
||||
iter *iterator // valid if non-nil
|
||||
node ast.Node
|
||||
}
|
||||
|
||||
// Node returns the current Node.
|
||||
func (c *Cursor) Node() ast.Node { return c.node }
|
||||
|
||||
// Parent returns the parent of the current Node.
|
||||
func (c *Cursor) Parent() ast.Node { return c.parent }
|
||||
|
||||
// Name returns the name of the parent Node field that contains the current Node.
|
||||
// If the parent is a *ast.Package and the current Node is a *ast.File, Name returns
|
||||
// the filename for the current Node.
|
||||
func (c *Cursor) Name() string { return c.name }
|
||||
|
||||
// Index reports the index >= 0 of the current Node in the slice of Nodes that
|
||||
// contains it, or a value < 0 if the current Node is not part of a slice.
|
||||
// The index of the current node changes if InsertBefore is called while
|
||||
// processing the current node.
|
||||
func (c *Cursor) Index() int {
|
||||
if c.iter != nil {
|
||||
return c.iter.index
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
// field returns the current node's parent field value.
|
||||
func (c *Cursor) field() reflect.Value {
|
||||
return reflect.Indirect(reflect.ValueOf(c.parent)).FieldByName(c.name)
|
||||
}
|
||||
|
||||
// Replace replaces the current Node with n.
|
||||
// The replacement node is not walked by Apply.
|
||||
func (c *Cursor) Replace(n ast.Node) {
|
||||
if _, ok := c.node.(*ast.File); ok {
|
||||
file, ok := n.(*ast.File)
|
||||
if !ok {
|
||||
panic("attempt to replace *ast.File with non-*ast.File")
|
||||
}
|
||||
c.parent.(*ast.Package).Files[c.name] = file
|
||||
return
|
||||
}
|
||||
|
||||
v := c.field()
|
||||
if i := c.Index(); i >= 0 {
|
||||
v = v.Index(i)
|
||||
}
|
||||
v.Set(reflect.ValueOf(n))
|
||||
}
|
||||
|
||||
// Delete deletes the current Node from its containing slice.
|
||||
// If the current Node is not part of a slice, Delete panics.
|
||||
// As a special case, if the current node is a package file,
|
||||
// Delete removes it from the package's Files map.
|
||||
func (c *Cursor) Delete() {
|
||||
if _, ok := c.node.(*ast.File); ok {
|
||||
delete(c.parent.(*ast.Package).Files, c.name)
|
||||
return
|
||||
}
|
||||
|
||||
i := c.Index()
|
||||
if i < 0 {
|
||||
panic("Delete node not contained in slice")
|
||||
}
|
||||
v := c.field()
|
||||
l := v.Len()
|
||||
reflect.Copy(v.Slice(i, l), v.Slice(i+1, l))
|
||||
v.Index(l - 1).Set(reflect.Zero(v.Type().Elem()))
|
||||
v.SetLen(l - 1)
|
||||
c.iter.step--
|
||||
}
|
||||
|
||||
// InsertAfter inserts n after the current Node in its containing slice.
|
||||
// If the current Node is not part of a slice, InsertAfter panics.
|
||||
// Apply does not walk n.
|
||||
func (c *Cursor) InsertAfter(n ast.Node) {
|
||||
i := c.Index()
|
||||
if i < 0 {
|
||||
panic("InsertAfter node not contained in slice")
|
||||
}
|
||||
v := c.field()
|
||||
v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem())))
|
||||
l := v.Len()
|
||||
reflect.Copy(v.Slice(i+2, l), v.Slice(i+1, l))
|
||||
v.Index(i + 1).Set(reflect.ValueOf(n))
|
||||
c.iter.step++
|
||||
}
|
||||
|
||||
// InsertBefore inserts n before the current Node in its containing slice.
|
||||
// If the current Node is not part of a slice, InsertBefore panics.
|
||||
// Apply will not walk n.
|
||||
func (c *Cursor) InsertBefore(n ast.Node) {
|
||||
i := c.Index()
|
||||
if i < 0 {
|
||||
panic("InsertBefore node not contained in slice")
|
||||
}
|
||||
v := c.field()
|
||||
v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem())))
|
||||
l := v.Len()
|
||||
reflect.Copy(v.Slice(i+1, l), v.Slice(i, l))
|
||||
v.Index(i).Set(reflect.ValueOf(n))
|
||||
c.iter.index++
|
||||
}
|
||||
|
||||
// application carries all the shared data so we can pass it around cheaply.
|
||||
type application struct {
|
||||
pre, post ApplyFunc
|
||||
cursor Cursor
|
||||
iter iterator
|
||||
}
|
||||
|
||||
func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.Node) {
|
||||
// convert typed nil into untyped nil
|
||||
if v := reflect.ValueOf(n); v.Kind() == reflect.Ptr && v.IsNil() {
|
||||
n = nil
|
||||
}
|
||||
|
||||
// avoid heap-allocating a new cursor for each apply call; reuse a.cursor instead
|
||||
saved := a.cursor
|
||||
a.cursor.parent = parent
|
||||
a.cursor.name = name
|
||||
a.cursor.iter = iter
|
||||
a.cursor.node = n
|
||||
|
||||
if a.pre != nil && !a.pre(&a.cursor) {
|
||||
a.cursor = saved
|
||||
return
|
||||
}
|
||||
|
||||
// walk children
|
||||
// (the order of the cases matches the order of the corresponding node types in go/ast)
|
||||
switch n := n.(type) {
|
||||
case nil:
|
||||
// nothing to do
|
||||
|
||||
// Comments and fields
|
||||
case *ast.Comment:
|
||||
// nothing to do
|
||||
|
||||
case *ast.CommentGroup:
|
||||
if n != nil {
|
||||
a.applyList(n, "List")
|
||||
}
|
||||
|
||||
case *ast.Field:
|
||||
a.apply(n, "Doc", nil, n.Doc)
|
||||
a.applyList(n, "Names")
|
||||
a.apply(n, "Type", nil, n.Type)
|
||||
a.apply(n, "Tag", nil, n.Tag)
|
||||
a.apply(n, "Comment", nil, n.Comment)
|
||||
|
||||
case *ast.FieldList:
|
||||
a.applyList(n, "List")
|
||||
|
||||
// Expressions
|
||||
case *ast.BadExpr, *ast.Ident, *ast.BasicLit:
|
||||
// nothing to do
|
||||
|
||||
case *ast.Ellipsis:
|
||||
a.apply(n, "Elt", nil, n.Elt)
|
||||
|
||||
case *ast.FuncLit:
|
||||
a.apply(n, "Type", nil, n.Type)
|
||||
a.apply(n, "Body", nil, n.Body)
|
||||
|
||||
case *ast.CompositeLit:
|
||||
a.apply(n, "Type", nil, n.Type)
|
||||
a.applyList(n, "Elts")
|
||||
|
||||
case *ast.ParenExpr:
|
||||
a.apply(n, "X", nil, n.X)
|
||||
|
||||
case *ast.SelectorExpr:
|
||||
a.apply(n, "X", nil, n.X)
|
||||
a.apply(n, "Sel", nil, n.Sel)
|
||||
|
||||
case *ast.IndexExpr:
|
||||
a.apply(n, "X", nil, n.X)
|
||||
a.apply(n, "Index", nil, n.Index)
|
||||
|
||||
case *ast.IndexListExpr:
|
||||
a.apply(n, "X", nil, n.X)
|
||||
a.applyList(n, "Indices")
|
||||
|
||||
case *ast.SliceExpr:
|
||||
a.apply(n, "X", nil, n.X)
|
||||
a.apply(n, "Low", nil, n.Low)
|
||||
a.apply(n, "High", nil, n.High)
|
||||
a.apply(n, "Max", nil, n.Max)
|
||||
|
||||
case *ast.TypeAssertExpr:
|
||||
a.apply(n, "X", nil, n.X)
|
||||
a.apply(n, "Type", nil, n.Type)
|
||||
|
||||
case *ast.CallExpr:
|
||||
a.apply(n, "Fun", nil, n.Fun)
|
||||
a.applyList(n, "Args")
|
||||
|
||||
case *ast.StarExpr:
|
||||
a.apply(n, "X", nil, n.X)
|
||||
|
||||
case *ast.UnaryExpr:
|
||||
a.apply(n, "X", nil, n.X)
|
||||
|
||||
case *ast.BinaryExpr:
|
||||
a.apply(n, "X", nil, n.X)
|
||||
a.apply(n, "Y", nil, n.Y)
|
||||
|
||||
case *ast.KeyValueExpr:
|
||||
a.apply(n, "Key", nil, n.Key)
|
||||
a.apply(n, "Value", nil, n.Value)
|
||||
|
||||
// Types
|
||||
case *ast.ArrayType:
|
||||
a.apply(n, "Len", nil, n.Len)
|
||||
a.apply(n, "Elt", nil, n.Elt)
|
||||
|
||||
case *ast.StructType:
|
||||
a.apply(n, "Fields", nil, n.Fields)
|
||||
|
||||
case *ast.FuncType:
|
||||
if tparams := n.TypeParams; tparams != nil {
|
||||
a.apply(n, "TypeParams", nil, tparams)
|
||||
}
|
||||
a.apply(n, "Params", nil, n.Params)
|
||||
a.apply(n, "Results", nil, n.Results)
|
||||
|
||||
case *ast.InterfaceType:
|
||||
a.apply(n, "Methods", nil, n.Methods)
|
||||
|
||||
case *ast.MapType:
|
||||
a.apply(n, "Key", nil, n.Key)
|
||||
a.apply(n, "Value", nil, n.Value)
|
||||
|
||||
case *ast.ChanType:
|
||||
a.apply(n, "Value", nil, n.Value)
|
||||
|
||||
// Statements
|
||||
case *ast.BadStmt:
|
||||
// nothing to do
|
||||
|
||||
case *ast.DeclStmt:
|
||||
a.apply(n, "Decl", nil, n.Decl)
|
||||
|
||||
case *ast.EmptyStmt:
|
||||
// nothing to do
|
||||
|
||||
case *ast.LabeledStmt:
|
||||
a.apply(n, "Label", nil, n.Label)
|
||||
a.apply(n, "Stmt", nil, n.Stmt)
|
||||
|
||||
case *ast.ExprStmt:
|
||||
a.apply(n, "X", nil, n.X)
|
||||
|
||||
case *ast.SendStmt:
|
||||
a.apply(n, "Chan", nil, n.Chan)
|
||||
a.apply(n, "Value", nil, n.Value)
|
||||
|
||||
case *ast.IncDecStmt:
|
||||
a.apply(n, "X", nil, n.X)
|
||||
|
||||
case *ast.AssignStmt:
|
||||
a.applyList(n, "Lhs")
|
||||
a.applyList(n, "Rhs")
|
||||
|
||||
case *ast.GoStmt:
|
||||
a.apply(n, "Call", nil, n.Call)
|
||||
|
||||
case *ast.DeferStmt:
|
||||
a.apply(n, "Call", nil, n.Call)
|
||||
|
||||
case *ast.ReturnStmt:
|
||||
a.applyList(n, "Results")
|
||||
|
||||
case *ast.BranchStmt:
|
||||
a.apply(n, "Label", nil, n.Label)
|
||||
|
||||
case *ast.BlockStmt:
|
||||
a.applyList(n, "List")
|
||||
|
||||
case *ast.IfStmt:
|
||||
a.apply(n, "Init", nil, n.Init)
|
||||
a.apply(n, "Cond", nil, n.Cond)
|
||||
a.apply(n, "Body", nil, n.Body)
|
||||
a.apply(n, "Else", nil, n.Else)
|
||||
|
||||
case *ast.CaseClause:
|
||||
a.applyList(n, "List")
|
||||
a.applyList(n, "Body")
|
||||
|
||||
case *ast.SwitchStmt:
|
||||
a.apply(n, "Init", nil, n.Init)
|
||||
a.apply(n, "Tag", nil, n.Tag)
|
||||
a.apply(n, "Body", nil, n.Body)
|
||||
|
||||
case *ast.TypeSwitchStmt:
|
||||
a.apply(n, "Init", nil, n.Init)
|
||||
a.apply(n, "Assign", nil, n.Assign)
|
||||
a.apply(n, "Body", nil, n.Body)
|
||||
|
||||
case *ast.CommClause:
|
||||
a.apply(n, "Comm", nil, n.Comm)
|
||||
a.applyList(n, "Body")
|
||||
|
||||
case *ast.SelectStmt:
|
||||
a.apply(n, "Body", nil, n.Body)
|
||||
|
||||
case *ast.ForStmt:
|
||||
a.apply(n, "Init", nil, n.Init)
|
||||
a.apply(n, "Cond", nil, n.Cond)
|
||||
a.apply(n, "Post", nil, n.Post)
|
||||
a.apply(n, "Body", nil, n.Body)
|
||||
|
||||
case *ast.RangeStmt:
|
||||
a.apply(n, "Key", nil, n.Key)
|
||||
a.apply(n, "Value", nil, n.Value)
|
||||
a.apply(n, "X", nil, n.X)
|
||||
a.apply(n, "Body", nil, n.Body)
|
||||
|
||||
// Declarations
|
||||
case *ast.ImportSpec:
|
||||
a.apply(n, "Doc", nil, n.Doc)
|
||||
a.apply(n, "Name", nil, n.Name)
|
||||
a.apply(n, "Path", nil, n.Path)
|
||||
a.apply(n, "Comment", nil, n.Comment)
|
||||
|
||||
case *ast.ValueSpec:
|
||||
a.apply(n, "Doc", nil, n.Doc)
|
||||
a.applyList(n, "Names")
|
||||
a.apply(n, "Type", nil, n.Type)
|
||||
a.applyList(n, "Values")
|
||||
a.apply(n, "Comment", nil, n.Comment)
|
||||
|
||||
case *ast.TypeSpec:
|
||||
a.apply(n, "Doc", nil, n.Doc)
|
||||
a.apply(n, "Name", nil, n.Name)
|
||||
if tparams := n.TypeParams; tparams != nil {
|
||||
a.apply(n, "TypeParams", nil, tparams)
|
||||
}
|
||||
a.apply(n, "Type", nil, n.Type)
|
||||
a.apply(n, "Comment", nil, n.Comment)
|
||||
|
||||
case *ast.BadDecl:
|
||||
// nothing to do
|
||||
|
||||
case *ast.GenDecl:
|
||||
a.apply(n, "Doc", nil, n.Doc)
|
||||
a.applyList(n, "Specs")
|
||||
|
||||
case *ast.FuncDecl:
|
||||
a.apply(n, "Doc", nil, n.Doc)
|
||||
a.apply(n, "Recv", nil, n.Recv)
|
||||
a.apply(n, "Name", nil, n.Name)
|
||||
a.apply(n, "Type", nil, n.Type)
|
||||
a.apply(n, "Body", nil, n.Body)
|
||||
|
||||
// Files and packages
|
||||
case *ast.File:
|
||||
a.apply(n, "Doc", nil, n.Doc)
|
||||
a.apply(n, "Name", nil, n.Name)
|
||||
a.applyList(n, "Decls")
|
||||
// Don't walk n.Comments; they have either been walked already if
|
||||
// they are Doc comments, or they can be easily walked explicitly.
|
||||
|
||||
case *ast.Package:
|
||||
// collect and sort names for reproducible behavior
|
||||
var names []string
|
||||
for name := range n.Files {
|
||||
names = append(names, name)
|
||||
}
|
||||
sort.Strings(names)
|
||||
for _, name := range names {
|
||||
a.apply(n, name, nil, n.Files[name])
|
||||
}
|
||||
|
||||
default:
|
||||
panic(fmt.Sprintf("Apply: unexpected node type %T", n))
|
||||
}
|
||||
|
||||
if a.post != nil && !a.post(&a.cursor) {
|
||||
panic(abort)
|
||||
}
|
||||
|
||||
a.cursor = saved
|
||||
}
|
||||
|
||||
// An iterator controls iteration over a slice of nodes.
|
||||
type iterator struct {
|
||||
index, step int
|
||||
}
|
||||
|
||||
func (a *application) applyList(parent ast.Node, name string) {
|
||||
// avoid heap-allocating a new iterator for each applyList call; reuse a.iter instead
|
||||
saved := a.iter
|
||||
a.iter.index = 0
|
||||
for {
|
||||
// must reload parent.name each time, since cursor modifications might change it
|
||||
v := reflect.Indirect(reflect.ValueOf(parent)).FieldByName(name)
|
||||
if a.iter.index >= v.Len() {
|
||||
break
|
||||
}
|
||||
|
||||
// element x may be nil in a bad AST - be cautious
|
||||
var x ast.Node
|
||||
if e := v.Index(a.iter.index); e.IsValid() {
|
||||
x = e.Interface().(ast.Node)
|
||||
}
|
||||
|
||||
a.iter.step = 1
|
||||
a.apply(parent, name, &a.iter, x)
|
||||
a.iter.index += a.iter.step
|
||||
}
|
||||
a.iter = saved
|
||||
}
|
18
vendor/golang.org/x/tools/go/ast/astutil/util.go
generated
vendored
Normal file
18
vendor/golang.org/x/tools/go/ast/astutil/util.go
generated
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
// Copyright 2015 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package astutil
|
||||
|
||||
import "go/ast"
|
||||
|
||||
// Unparen returns e with any enclosing parentheses stripped.
|
||||
func Unparen(e ast.Expr) ast.Expr {
|
||||
for {
|
||||
p, ok := e.(*ast.ParenExpr)
|
||||
if !ok {
|
||||
return e
|
||||
}
|
||||
e = p.X
|
||||
}
|
||||
}
|
195
vendor/golang.org/x/tools/go/buildutil/allpackages.go
generated
vendored
Normal file
195
vendor/golang.org/x/tools/go/buildutil/allpackages.go
generated
vendored
Normal file
@ -0,0 +1,195 @@
|
||||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package buildutil provides utilities related to the go/build
|
||||
// package in the standard library.
|
||||
//
|
||||
// All I/O is done via the build.Context file system interface, which must
|
||||
// be concurrency-safe.
|
||||
package buildutil // import "golang.org/x/tools/go/buildutil"
|
||||
|
||||
import (
|
||||
"go/build"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// AllPackages returns the package path of each Go package in any source
|
||||
// directory of the specified build context (e.g. $GOROOT or an element
|
||||
// of $GOPATH). Errors are ignored. The results are sorted.
|
||||
// All package paths are canonical, and thus may contain "/vendor/".
|
||||
//
|
||||
// The result may include import paths for directories that contain no
|
||||
// *.go files, such as "archive" (in $GOROOT/src).
|
||||
//
|
||||
// All I/O is done via the build.Context file system interface,
|
||||
// which must be concurrency-safe.
|
||||
func AllPackages(ctxt *build.Context) []string {
|
||||
var list []string
|
||||
ForEachPackage(ctxt, func(pkg string, _ error) {
|
||||
list = append(list, pkg)
|
||||
})
|
||||
sort.Strings(list)
|
||||
return list
|
||||
}
|
||||
|
||||
// ForEachPackage calls the found function with the package path of
|
||||
// each Go package it finds in any source directory of the specified
|
||||
// build context (e.g. $GOROOT or an element of $GOPATH).
|
||||
// All package paths are canonical, and thus may contain "/vendor/".
|
||||
//
|
||||
// If the package directory exists but could not be read, the second
|
||||
// argument to the found function provides the error.
|
||||
//
|
||||
// All I/O is done via the build.Context file system interface,
|
||||
// which must be concurrency-safe.
|
||||
func ForEachPackage(ctxt *build.Context, found func(importPath string, err error)) {
|
||||
ch := make(chan item)
|
||||
|
||||
var wg sync.WaitGroup
|
||||
for _, root := range ctxt.SrcDirs() {
|
||||
root := root
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
allPackages(ctxt, root, ch)
|
||||
wg.Done()
|
||||
}()
|
||||
}
|
||||
go func() {
|
||||
wg.Wait()
|
||||
close(ch)
|
||||
}()
|
||||
|
||||
// All calls to found occur in the caller's goroutine.
|
||||
for i := range ch {
|
||||
found(i.importPath, i.err)
|
||||
}
|
||||
}
|
||||
|
||||
type item struct {
|
||||
importPath string
|
||||
err error // (optional)
|
||||
}
|
||||
|
||||
// We use a process-wide counting semaphore to limit
|
||||
// the number of parallel calls to ReadDir.
|
||||
var ioLimit = make(chan bool, 20)
|
||||
|
||||
func allPackages(ctxt *build.Context, root string, ch chan<- item) {
|
||||
root = filepath.Clean(root) + string(os.PathSeparator)
|
||||
|
||||
var wg sync.WaitGroup
|
||||
|
||||
var walkDir func(dir string)
|
||||
walkDir = func(dir string) {
|
||||
// Avoid .foo, _foo, and testdata directory trees.
|
||||
base := filepath.Base(dir)
|
||||
if base == "" || base[0] == '.' || base[0] == '_' || base == "testdata" {
|
||||
return
|
||||
}
|
||||
|
||||
pkg := filepath.ToSlash(strings.TrimPrefix(dir, root))
|
||||
|
||||
// Prune search if we encounter any of these import paths.
|
||||
switch pkg {
|
||||
case "builtin":
|
||||
return
|
||||
}
|
||||
|
||||
ioLimit <- true
|
||||
files, err := ReadDir(ctxt, dir)
|
||||
<-ioLimit
|
||||
if pkg != "" || err != nil {
|
||||
ch <- item{pkg, err}
|
||||
}
|
||||
for _, fi := range files {
|
||||
fi := fi
|
||||
if fi.IsDir() {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
walkDir(filepath.Join(dir, fi.Name()))
|
||||
wg.Done()
|
||||
}()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
walkDir(root)
|
||||
wg.Wait()
|
||||
}
|
||||
|
||||
// ExpandPatterns returns the set of packages matched by patterns,
|
||||
// which may have the following forms:
|
||||
//
|
||||
// golang.org/x/tools/cmd/guru # a single package
|
||||
// golang.org/x/tools/... # all packages beneath dir
|
||||
// ... # the entire workspace.
|
||||
//
|
||||
// Order is significant: a pattern preceded by '-' removes matching
|
||||
// packages from the set. For example, these patterns match all encoding
|
||||
// packages except encoding/xml:
|
||||
//
|
||||
// encoding/... -encoding/xml
|
||||
//
|
||||
// A trailing slash in a pattern is ignored. (Path components of Go
|
||||
// package names are separated by slash, not the platform's path separator.)
|
||||
func ExpandPatterns(ctxt *build.Context, patterns []string) map[string]bool {
|
||||
// TODO(adonovan): support other features of 'go list':
|
||||
// - "std"/"cmd"/"all" meta-packages
|
||||
// - "..." not at the end of a pattern
|
||||
// - relative patterns using "./" or "../" prefix
|
||||
|
||||
pkgs := make(map[string]bool)
|
||||
doPkg := func(pkg string, neg bool) {
|
||||
if neg {
|
||||
delete(pkgs, pkg)
|
||||
} else {
|
||||
pkgs[pkg] = true
|
||||
}
|
||||
}
|
||||
|
||||
// Scan entire workspace if wildcards are present.
|
||||
// TODO(adonovan): opt: scan only the necessary subtrees of the workspace.
|
||||
var all []string
|
||||
for _, arg := range patterns {
|
||||
if strings.HasSuffix(arg, "...") {
|
||||
all = AllPackages(ctxt)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
for _, arg := range patterns {
|
||||
if arg == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
neg := arg[0] == '-'
|
||||
if neg {
|
||||
arg = arg[1:]
|
||||
}
|
||||
|
||||
if arg == "..." {
|
||||
// ... matches all packages
|
||||
for _, pkg := range all {
|
||||
doPkg(pkg, neg)
|
||||
}
|
||||
} else if dir := strings.TrimSuffix(arg, "/..."); dir != arg {
|
||||
// dir/... matches all packages beneath dir
|
||||
for _, pkg := range all {
|
||||
if strings.HasPrefix(pkg, dir) &&
|
||||
(len(pkg) == len(dir) || pkg[len(dir)] == '/') {
|
||||
doPkg(pkg, neg)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// single package
|
||||
doPkg(strings.TrimSuffix(arg, "/"), neg)
|
||||
}
|
||||
}
|
||||
|
||||
return pkgs
|
||||
}
|
111
vendor/golang.org/x/tools/go/buildutil/fakecontext.go
generated
vendored
Normal file
111
vendor/golang.org/x/tools/go/buildutil/fakecontext.go
generated
vendored
Normal file
@ -0,0 +1,111 @@
|
||||
// Copyright 2015 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package buildutil
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/build"
|
||||
"io"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// FakeContext returns a build.Context for the fake file tree specified
|
||||
// by pkgs, which maps package import paths to a mapping from file base
|
||||
// names to contents.
|
||||
//
|
||||
// The fake Context has a GOROOT of "/go" and no GOPATH, and overrides
|
||||
// the necessary file access methods to read from memory instead of the
|
||||
// real file system.
|
||||
//
|
||||
// Unlike a real file tree, the fake one has only two levels---packages
|
||||
// and files---so ReadDir("/go/src/") returns all packages under
|
||||
// /go/src/ including, for instance, "math" and "math/big".
|
||||
// ReadDir("/go/src/math/big") would return all the files in the
|
||||
// "math/big" package.
|
||||
func FakeContext(pkgs map[string]map[string]string) *build.Context {
|
||||
clean := func(filename string) string {
|
||||
f := path.Clean(filepath.ToSlash(filename))
|
||||
// Removing "/go/src" while respecting segment
|
||||
// boundaries has this unfortunate corner case:
|
||||
if f == "/go/src" {
|
||||
return ""
|
||||
}
|
||||
return strings.TrimPrefix(f, "/go/src/")
|
||||
}
|
||||
|
||||
ctxt := build.Default // copy
|
||||
ctxt.GOROOT = "/go"
|
||||
ctxt.GOPATH = ""
|
||||
ctxt.Compiler = "gc"
|
||||
ctxt.IsDir = func(dir string) bool {
|
||||
dir = clean(dir)
|
||||
if dir == "" {
|
||||
return true // needed by (*build.Context).SrcDirs
|
||||
}
|
||||
return pkgs[dir] != nil
|
||||
}
|
||||
ctxt.ReadDir = func(dir string) ([]os.FileInfo, error) {
|
||||
dir = clean(dir)
|
||||
var fis []os.FileInfo
|
||||
if dir == "" {
|
||||
// enumerate packages
|
||||
for importPath := range pkgs {
|
||||
fis = append(fis, fakeDirInfo(importPath))
|
||||
}
|
||||
} else {
|
||||
// enumerate files of package
|
||||
for basename := range pkgs[dir] {
|
||||
fis = append(fis, fakeFileInfo(basename))
|
||||
}
|
||||
}
|
||||
sort.Sort(byName(fis))
|
||||
return fis, nil
|
||||
}
|
||||
ctxt.OpenFile = func(filename string) (io.ReadCloser, error) {
|
||||
filename = clean(filename)
|
||||
dir, base := path.Split(filename)
|
||||
content, ok := pkgs[path.Clean(dir)][base]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("file not found: %s", filename)
|
||||
}
|
||||
return io.NopCloser(strings.NewReader(content)), nil
|
||||
}
|
||||
ctxt.IsAbsPath = func(path string) bool {
|
||||
path = filepath.ToSlash(path)
|
||||
// Don't rely on the default (filepath.Path) since on
|
||||
// Windows, it reports virtual paths as non-absolute.
|
||||
return strings.HasPrefix(path, "/")
|
||||
}
|
||||
return &ctxt
|
||||
}
|
||||
|
||||
type byName []os.FileInfo
|
||||
|
||||
func (s byName) Len() int { return len(s) }
|
||||
func (s byName) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
|
||||
func (s byName) Less(i, j int) bool { return s[i].Name() < s[j].Name() }
|
||||
|
||||
type fakeFileInfo string
|
||||
|
||||
func (fi fakeFileInfo) Name() string { return string(fi) }
|
||||
func (fakeFileInfo) Sys() interface{} { return nil }
|
||||
func (fakeFileInfo) ModTime() time.Time { return time.Time{} }
|
||||
func (fakeFileInfo) IsDir() bool { return false }
|
||||
func (fakeFileInfo) Size() int64 { return 0 }
|
||||
func (fakeFileInfo) Mode() os.FileMode { return 0644 }
|
||||
|
||||
type fakeDirInfo string
|
||||
|
||||
func (fd fakeDirInfo) Name() string { return string(fd) }
|
||||
func (fakeDirInfo) Sys() interface{} { return nil }
|
||||
func (fakeDirInfo) ModTime() time.Time { return time.Time{} }
|
||||
func (fakeDirInfo) IsDir() bool { return true }
|
||||
func (fakeDirInfo) Size() int64 { return 0 }
|
||||
func (fakeDirInfo) Mode() os.FileMode { return 0755 }
|
101
vendor/golang.org/x/tools/go/buildutil/overlay.go
generated
vendored
Normal file
101
vendor/golang.org/x/tools/go/buildutil/overlay.go
generated
vendored
Normal file
@ -0,0 +1,101 @@
|
||||
// Copyright 2016 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package buildutil
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"fmt"
|
||||
"go/build"
|
||||
"io"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// OverlayContext overlays a build.Context with additional files from
|
||||
// a map. Files in the map take precedence over other files.
|
||||
//
|
||||
// In addition to plain string comparison, two file names are
|
||||
// considered equal if their base names match and their directory
|
||||
// components point at the same directory on the file system. That is,
|
||||
// symbolic links are followed for directories, but not files.
|
||||
//
|
||||
// A common use case for OverlayContext is to allow editors to pass in
|
||||
// a set of unsaved, modified files.
|
||||
//
|
||||
// Currently, only the Context.OpenFile function will respect the
|
||||
// overlay. This may change in the future.
|
||||
func OverlayContext(orig *build.Context, overlay map[string][]byte) *build.Context {
|
||||
// TODO(dominikh): Implement IsDir, HasSubdir and ReadDir
|
||||
|
||||
rc := func(data []byte) (io.ReadCloser, error) {
|
||||
return io.NopCloser(bytes.NewBuffer(data)), nil
|
||||
}
|
||||
|
||||
copy := *orig // make a copy
|
||||
ctxt := ©
|
||||
ctxt.OpenFile = func(path string) (io.ReadCloser, error) {
|
||||
// Fast path: names match exactly.
|
||||
if content, ok := overlay[path]; ok {
|
||||
return rc(content)
|
||||
}
|
||||
|
||||
// Slow path: check for same file under a different
|
||||
// alias, perhaps due to a symbolic link.
|
||||
for filename, content := range overlay {
|
||||
if sameFile(path, filename) {
|
||||
return rc(content)
|
||||
}
|
||||
}
|
||||
|
||||
return OpenFile(orig, path)
|
||||
}
|
||||
return ctxt
|
||||
}
|
||||
|
||||
// ParseOverlayArchive parses an archive containing Go files and their
|
||||
// contents. The result is intended to be used with OverlayContext.
|
||||
//
|
||||
// # Archive format
|
||||
//
|
||||
// The archive consists of a series of files. Each file consists of a
|
||||
// name, a decimal file size and the file contents, separated by
|
||||
// newlines. No newline follows after the file contents.
|
||||
func ParseOverlayArchive(archive io.Reader) (map[string][]byte, error) {
|
||||
overlay := make(map[string][]byte)
|
||||
r := bufio.NewReader(archive)
|
||||
for {
|
||||
// Read file name.
|
||||
filename, err := r.ReadString('\n')
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
break // OK
|
||||
}
|
||||
return nil, fmt.Errorf("reading archive file name: %v", err)
|
||||
}
|
||||
filename = filepath.Clean(strings.TrimSpace(filename))
|
||||
|
||||
// Read file size.
|
||||
sz, err := r.ReadString('\n')
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("reading size of archive file %s: %v", filename, err)
|
||||
}
|
||||
sz = strings.TrimSpace(sz)
|
||||
size, err := strconv.ParseUint(sz, 10, 32)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("parsing size of archive file %s: %v", filename, err)
|
||||
}
|
||||
|
||||
// Read file content.
|
||||
content := make([]byte, size)
|
||||
if _, err := io.ReadFull(r, content); err != nil {
|
||||
return nil, fmt.Errorf("reading archive file %s: %v", filename, err)
|
||||
}
|
||||
overlay[filename] = content
|
||||
}
|
||||
|
||||
return overlay, nil
|
||||
}
|
80
vendor/golang.org/x/tools/go/buildutil/tags.go
generated
vendored
Normal file
80
vendor/golang.org/x/tools/go/buildutil/tags.go
generated
vendored
Normal file
@ -0,0 +1,80 @@
|
||||
// Copyright 2015 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package buildutil
|
||||
|
||||
// This logic was copied from stringsFlag from $GOROOT/src/cmd/go/build.go.
|
||||
|
||||
import "fmt"
|
||||
|
||||
const TagsFlagDoc = "a list of `build tags` to consider satisfied during the build. " +
|
||||
"For more information about build tags, see the description of " +
|
||||
"build constraints in the documentation for the go/build package"
|
||||
|
||||
// TagsFlag is an implementation of the flag.Value and flag.Getter interfaces that parses
|
||||
// a flag value in the same manner as go build's -tags flag and
|
||||
// populates a []string slice.
|
||||
//
|
||||
// See $GOROOT/src/go/build/doc.go for description of build tags.
|
||||
// See $GOROOT/src/cmd/go/doc.go for description of 'go build -tags' flag.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// flag.Var((*buildutil.TagsFlag)(&build.Default.BuildTags), "tags", buildutil.TagsFlagDoc)
|
||||
type TagsFlag []string
|
||||
|
||||
func (v *TagsFlag) Set(s string) error {
|
||||
var err error
|
||||
*v, err = splitQuotedFields(s)
|
||||
if *v == nil {
|
||||
*v = []string{}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (v *TagsFlag) Get() interface{} { return *v }
|
||||
|
||||
func splitQuotedFields(s string) ([]string, error) {
|
||||
// Split fields allowing '' or "" around elements.
|
||||
// Quotes further inside the string do not count.
|
||||
var f []string
|
||||
for len(s) > 0 {
|
||||
for len(s) > 0 && isSpaceByte(s[0]) {
|
||||
s = s[1:]
|
||||
}
|
||||
if len(s) == 0 {
|
||||
break
|
||||
}
|
||||
// Accepted quoted string. No unescaping inside.
|
||||
if s[0] == '"' || s[0] == '\'' {
|
||||
quote := s[0]
|
||||
s = s[1:]
|
||||
i := 0
|
||||
for i < len(s) && s[i] != quote {
|
||||
i++
|
||||
}
|
||||
if i >= len(s) {
|
||||
return nil, fmt.Errorf("unterminated %c string", quote)
|
||||
}
|
||||
f = append(f, s[:i])
|
||||
s = s[i+1:]
|
||||
continue
|
||||
}
|
||||
i := 0
|
||||
for i < len(s) && !isSpaceByte(s[i]) {
|
||||
i++
|
||||
}
|
||||
f = append(f, s[:i])
|
||||
s = s[i:]
|
||||
}
|
||||
return f, nil
|
||||
}
|
||||
|
||||
func (v *TagsFlag) String() string {
|
||||
return "<tagsFlag>"
|
||||
}
|
||||
|
||||
func isSpaceByte(c byte) bool {
|
||||
return c == ' ' || c == '\t' || c == '\n' || c == '\r'
|
||||
}
|
209
vendor/golang.org/x/tools/go/buildutil/util.go
generated
vendored
Normal file
209
vendor/golang.org/x/tools/go/buildutil/util.go
generated
vendored
Normal file
@ -0,0 +1,209 @@
|
||||
// Copyright 2014 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package buildutil
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/build"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// ParseFile behaves like parser.ParseFile,
|
||||
// but uses the build context's file system interface, if any.
|
||||
//
|
||||
// If file is not absolute (as defined by IsAbsPath), the (dir, file)
|
||||
// components are joined using JoinPath; dir must be absolute.
|
||||
//
|
||||
// The displayPath function, if provided, is used to transform the
|
||||
// filename that will be attached to the ASTs.
|
||||
//
|
||||
// TODO(adonovan): call this from go/loader.parseFiles when the tree thaws.
|
||||
func ParseFile(fset *token.FileSet, ctxt *build.Context, displayPath func(string) string, dir string, file string, mode parser.Mode) (*ast.File, error) {
|
||||
if !IsAbsPath(ctxt, file) {
|
||||
file = JoinPath(ctxt, dir, file)
|
||||
}
|
||||
rd, err := OpenFile(ctxt, file)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rd.Close() // ignore error
|
||||
if displayPath != nil {
|
||||
file = displayPath(file)
|
||||
}
|
||||
return parser.ParseFile(fset, file, rd, mode)
|
||||
}
|
||||
|
||||
// ContainingPackage returns the package containing filename.
|
||||
//
|
||||
// If filename is not absolute, it is interpreted relative to working directory dir.
|
||||
// All I/O is via the build context's file system interface, if any.
|
||||
//
|
||||
// The '...Files []string' fields of the resulting build.Package are not
|
||||
// populated (build.FindOnly mode).
|
||||
func ContainingPackage(ctxt *build.Context, dir, filename string) (*build.Package, error) {
|
||||
if !IsAbsPath(ctxt, filename) {
|
||||
filename = JoinPath(ctxt, dir, filename)
|
||||
}
|
||||
|
||||
// We must not assume the file tree uses
|
||||
// "/" always,
|
||||
// `\` always,
|
||||
// or os.PathSeparator (which varies by platform),
|
||||
// but to make any progress, we are forced to assume that
|
||||
// paths will not use `\` unless the PathSeparator
|
||||
// is also `\`, thus we can rely on filepath.ToSlash for some sanity.
|
||||
|
||||
dirSlash := path.Dir(filepath.ToSlash(filename)) + "/"
|
||||
|
||||
// We assume that no source root (GOPATH[i] or GOROOT) contains any other.
|
||||
for _, srcdir := range ctxt.SrcDirs() {
|
||||
srcdirSlash := filepath.ToSlash(srcdir) + "/"
|
||||
if importPath, ok := HasSubdir(ctxt, srcdirSlash, dirSlash); ok {
|
||||
return ctxt.Import(importPath, dir, build.FindOnly)
|
||||
}
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("can't find package containing %s", filename)
|
||||
}
|
||||
|
||||
// -- Effective methods of file system interface -------------------------
|
||||
|
||||
// (go/build.Context defines these as methods, but does not export them.)
|
||||
|
||||
// HasSubdir calls ctxt.HasSubdir (if not nil) or else uses
|
||||
// the local file system to answer the question.
|
||||
func HasSubdir(ctxt *build.Context, root, dir string) (rel string, ok bool) {
|
||||
if f := ctxt.HasSubdir; f != nil {
|
||||
return f(root, dir)
|
||||
}
|
||||
|
||||
// Try using paths we received.
|
||||
if rel, ok = hasSubdir(root, dir); ok {
|
||||
return
|
||||
}
|
||||
|
||||
// Try expanding symlinks and comparing
|
||||
// expanded against unexpanded and
|
||||
// expanded against expanded.
|
||||
rootSym, _ := filepath.EvalSymlinks(root)
|
||||
dirSym, _ := filepath.EvalSymlinks(dir)
|
||||
|
||||
if rel, ok = hasSubdir(rootSym, dir); ok {
|
||||
return
|
||||
}
|
||||
if rel, ok = hasSubdir(root, dirSym); ok {
|
||||
return
|
||||
}
|
||||
return hasSubdir(rootSym, dirSym)
|
||||
}
|
||||
|
||||
func hasSubdir(root, dir string) (rel string, ok bool) {
|
||||
const sep = string(filepath.Separator)
|
||||
root = filepath.Clean(root)
|
||||
if !strings.HasSuffix(root, sep) {
|
||||
root += sep
|
||||
}
|
||||
|
||||
dir = filepath.Clean(dir)
|
||||
if !strings.HasPrefix(dir, root) {
|
||||
return "", false
|
||||
}
|
||||
|
||||
return filepath.ToSlash(dir[len(root):]), true
|
||||
}
|
||||
|
||||
// FileExists returns true if the specified file exists,
|
||||
// using the build context's file system interface.
|
||||
func FileExists(ctxt *build.Context, path string) bool {
|
||||
if ctxt.OpenFile != nil {
|
||||
r, err := ctxt.OpenFile(path)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
r.Close() // ignore error
|
||||
return true
|
||||
}
|
||||
_, err := os.Stat(path)
|
||||
return err == nil
|
||||
}
|
||||
|
||||
// OpenFile behaves like os.Open,
|
||||
// but uses the build context's file system interface, if any.
|
||||
func OpenFile(ctxt *build.Context, path string) (io.ReadCloser, error) {
|
||||
if ctxt.OpenFile != nil {
|
||||
return ctxt.OpenFile(path)
|
||||
}
|
||||
return os.Open(path)
|
||||
}
|
||||
|
||||
// IsAbsPath behaves like filepath.IsAbs,
|
||||
// but uses the build context's file system interface, if any.
|
||||
func IsAbsPath(ctxt *build.Context, path string) bool {
|
||||
if ctxt.IsAbsPath != nil {
|
||||
return ctxt.IsAbsPath(path)
|
||||
}
|
||||
return filepath.IsAbs(path)
|
||||
}
|
||||
|
||||
// JoinPath behaves like filepath.Join,
|
||||
// but uses the build context's file system interface, if any.
|
||||
func JoinPath(ctxt *build.Context, path ...string) string {
|
||||
if ctxt.JoinPath != nil {
|
||||
return ctxt.JoinPath(path...)
|
||||
}
|
||||
return filepath.Join(path...)
|
||||
}
|
||||
|
||||
// IsDir behaves like os.Stat plus IsDir,
|
||||
// but uses the build context's file system interface, if any.
|
||||
func IsDir(ctxt *build.Context, path string) bool {
|
||||
if ctxt.IsDir != nil {
|
||||
return ctxt.IsDir(path)
|
||||
}
|
||||
fi, err := os.Stat(path)
|
||||
return err == nil && fi.IsDir()
|
||||
}
|
||||
|
||||
// ReadDir behaves like ioutil.ReadDir,
|
||||
// but uses the build context's file system interface, if any.
|
||||
func ReadDir(ctxt *build.Context, path string) ([]os.FileInfo, error) {
|
||||
if ctxt.ReadDir != nil {
|
||||
return ctxt.ReadDir(path)
|
||||
}
|
||||
return ioutil.ReadDir(path)
|
||||
}
|
||||
|
||||
// SplitPathList behaves like filepath.SplitList,
|
||||
// but uses the build context's file system interface, if any.
|
||||
func SplitPathList(ctxt *build.Context, s string) []string {
|
||||
if ctxt.SplitPathList != nil {
|
||||
return ctxt.SplitPathList(s)
|
||||
}
|
||||
return filepath.SplitList(s)
|
||||
}
|
||||
|
||||
// sameFile returns true if x and y have the same basename and denote
|
||||
// the same file.
|
||||
func sameFile(x, y string) bool {
|
||||
if path.Clean(x) == path.Clean(y) {
|
||||
return true
|
||||
}
|
||||
if filepath.Base(x) == filepath.Base(y) { // (optimisation)
|
||||
if xi, err := os.Stat(x); err == nil {
|
||||
if yi, err := os.Stat(y); err == nil {
|
||||
return os.SameFile(xi, yi)
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
219
vendor/golang.org/x/tools/go/internal/cgo/cgo.go
generated
vendored
Normal file
219
vendor/golang.org/x/tools/go/internal/cgo/cgo.go
generated
vendored
Normal file
@ -0,0 +1,219 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package cgo handles cgo preprocessing of files containing `import "C"`.
|
||||
//
|
||||
// DESIGN
|
||||
//
|
||||
// The approach taken is to run the cgo processor on the package's
|
||||
// CgoFiles and parse the output, faking the filenames of the
|
||||
// resulting ASTs so that the synthetic file containing the C types is
|
||||
// called "C" (e.g. "~/go/src/net/C") and the preprocessed files
|
||||
// have their original names (e.g. "~/go/src/net/cgo_unix.go"),
|
||||
// not the names of the actual temporary files.
|
||||
//
|
||||
// The advantage of this approach is its fidelity to 'go build'. The
|
||||
// downside is that the token.Position.Offset for each AST node is
|
||||
// incorrect, being an offset within the temporary file. Line numbers
|
||||
// should still be correct because of the //line comments.
|
||||
//
|
||||
// The logic of this file is mostly plundered from the 'go build'
|
||||
// tool, which also invokes the cgo preprocessor.
|
||||
//
|
||||
//
|
||||
// REJECTED ALTERNATIVE
|
||||
//
|
||||
// An alternative approach that we explored is to extend go/types'
|
||||
// Importer mechanism to provide the identity of the importing package
|
||||
// so that each time `import "C"` appears it resolves to a different
|
||||
// synthetic package containing just the objects needed in that case.
|
||||
// The loader would invoke cgo but parse only the cgo_types.go file
|
||||
// defining the package-level objects, discarding the other files
|
||||
// resulting from preprocessing.
|
||||
//
|
||||
// The benefit of this approach would have been that source-level
|
||||
// syntax information would correspond exactly to the original cgo
|
||||
// file, with no preprocessing involved, making source tools like
|
||||
// godoc, guru, and eg happy. However, the approach was rejected
|
||||
// due to the additional complexity it would impose on go/types. (It
|
||||
// made for a beautiful demo, though.)
|
||||
//
|
||||
// cgo files, despite their *.go extension, are not legal Go source
|
||||
// files per the specification since they may refer to unexported
|
||||
// members of package "C" such as C.int. Also, a function such as
|
||||
// C.getpwent has in effect two types, one matching its C type and one
|
||||
// which additionally returns (errno C.int). The cgo preprocessor
|
||||
// uses name mangling to distinguish these two functions in the
|
||||
// processed code, but go/types would need to duplicate this logic in
|
||||
// its handling of function calls, analogous to the treatment of map
|
||||
// lookups in which y=m[k] and y,ok=m[k] are both legal.
|
||||
|
||||
package cgo
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/build"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// ProcessFiles invokes the cgo preprocessor on bp.CgoFiles, parses
|
||||
// the output and returns the resulting ASTs.
|
||||
func ProcessFiles(bp *build.Package, fset *token.FileSet, DisplayPath func(path string) string, mode parser.Mode) ([]*ast.File, error) {
|
||||
tmpdir, err := os.MkdirTemp("", strings.Replace(bp.ImportPath, "/", "_", -1)+"_C")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer os.RemoveAll(tmpdir)
|
||||
|
||||
pkgdir := bp.Dir
|
||||
if DisplayPath != nil {
|
||||
pkgdir = DisplayPath(pkgdir)
|
||||
}
|
||||
|
||||
cgoFiles, cgoDisplayFiles, err := Run(bp, pkgdir, tmpdir, false)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var files []*ast.File
|
||||
for i := range cgoFiles {
|
||||
rd, err := os.Open(cgoFiles[i])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
display := filepath.Join(bp.Dir, cgoDisplayFiles[i])
|
||||
f, err := parser.ParseFile(fset, display, rd, mode)
|
||||
rd.Close()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
files = append(files, f)
|
||||
}
|
||||
return files, nil
|
||||
}
|
||||
|
||||
var cgoRe = regexp.MustCompile(`[/\\:]`)
|
||||
|
||||
// Run invokes the cgo preprocessor on bp.CgoFiles and returns two
|
||||
// lists of files: the resulting processed files (in temporary
|
||||
// directory tmpdir) and the corresponding names of the unprocessed files.
|
||||
//
|
||||
// Run is adapted from (*builder).cgo in
|
||||
// $GOROOT/src/cmd/go/build.go, but these features are unsupported:
|
||||
// Objective C, CGOPKGPATH, CGO_FLAGS.
|
||||
//
|
||||
// If useabs is set to true, absolute paths of the bp.CgoFiles will be passed in
|
||||
// to the cgo preprocessor. This in turn will set the // line comments
|
||||
// referring to those files to use absolute paths. This is needed for
|
||||
// go/packages using the legacy go list support so it is able to find
|
||||
// the original files.
|
||||
func Run(bp *build.Package, pkgdir, tmpdir string, useabs bool) (files, displayFiles []string, err error) {
|
||||
cgoCPPFLAGS, _, _, _ := cflags(bp, true)
|
||||
_, cgoexeCFLAGS, _, _ := cflags(bp, false)
|
||||
|
||||
if len(bp.CgoPkgConfig) > 0 {
|
||||
pcCFLAGS, err := pkgConfigFlags(bp)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
cgoCPPFLAGS = append(cgoCPPFLAGS, pcCFLAGS...)
|
||||
}
|
||||
|
||||
// Allows including _cgo_export.h from .[ch] files in the package.
|
||||
cgoCPPFLAGS = append(cgoCPPFLAGS, "-I", tmpdir)
|
||||
|
||||
// _cgo_gotypes.go (displayed "C") contains the type definitions.
|
||||
files = append(files, filepath.Join(tmpdir, "_cgo_gotypes.go"))
|
||||
displayFiles = append(displayFiles, "C")
|
||||
for _, fn := range bp.CgoFiles {
|
||||
// "foo.cgo1.go" (displayed "foo.go") is the processed Go source.
|
||||
f := cgoRe.ReplaceAllString(fn[:len(fn)-len("go")], "_")
|
||||
files = append(files, filepath.Join(tmpdir, f+"cgo1.go"))
|
||||
displayFiles = append(displayFiles, fn)
|
||||
}
|
||||
|
||||
var cgoflags []string
|
||||
if bp.Goroot && bp.ImportPath == "runtime/cgo" {
|
||||
cgoflags = append(cgoflags, "-import_runtime_cgo=false")
|
||||
}
|
||||
if bp.Goroot && bp.ImportPath == "runtime/race" || bp.ImportPath == "runtime/cgo" {
|
||||
cgoflags = append(cgoflags, "-import_syscall=false")
|
||||
}
|
||||
|
||||
var cgoFiles []string = bp.CgoFiles
|
||||
if useabs {
|
||||
cgoFiles = make([]string, len(bp.CgoFiles))
|
||||
for i := range cgoFiles {
|
||||
cgoFiles[i] = filepath.Join(pkgdir, bp.CgoFiles[i])
|
||||
}
|
||||
}
|
||||
|
||||
args := stringList(
|
||||
"go", "tool", "cgo", "-objdir", tmpdir, cgoflags, "--",
|
||||
cgoCPPFLAGS, cgoexeCFLAGS, cgoFiles,
|
||||
)
|
||||
if false {
|
||||
log.Printf("Running cgo for package %q: %s (dir=%s)", bp.ImportPath, args, pkgdir)
|
||||
}
|
||||
cmd := exec.Command(args[0], args[1:]...)
|
||||
cmd.Dir = pkgdir
|
||||
cmd.Env = append(os.Environ(), "PWD="+pkgdir)
|
||||
cmd.Stdout = os.Stderr
|
||||
cmd.Stderr = os.Stderr
|
||||
if err := cmd.Run(); err != nil {
|
||||
return nil, nil, fmt.Errorf("cgo failed: %s: %s", args, err)
|
||||
}
|
||||
|
||||
return files, displayFiles, nil
|
||||
}
|
||||
|
||||
// -- unmodified from 'go build' ---------------------------------------
|
||||
|
||||
// Return the flags to use when invoking the C or C++ compilers, or cgo.
|
||||
func cflags(p *build.Package, def bool) (cppflags, cflags, cxxflags, ldflags []string) {
|
||||
var defaults string
|
||||
if def {
|
||||
defaults = "-g -O2"
|
||||
}
|
||||
|
||||
cppflags = stringList(envList("CGO_CPPFLAGS", ""), p.CgoCPPFLAGS)
|
||||
cflags = stringList(envList("CGO_CFLAGS", defaults), p.CgoCFLAGS)
|
||||
cxxflags = stringList(envList("CGO_CXXFLAGS", defaults), p.CgoCXXFLAGS)
|
||||
ldflags = stringList(envList("CGO_LDFLAGS", defaults), p.CgoLDFLAGS)
|
||||
return
|
||||
}
|
||||
|
||||
// envList returns the value of the given environment variable broken
|
||||
// into fields, using the default value when the variable is empty.
|
||||
func envList(key, def string) []string {
|
||||
v := os.Getenv(key)
|
||||
if v == "" {
|
||||
v = def
|
||||
}
|
||||
return strings.Fields(v)
|
||||
}
|
||||
|
||||
// stringList's arguments should be a sequence of string or []string values.
|
||||
// stringList flattens them into a single []string.
|
||||
func stringList(args ...interface{}) []string {
|
||||
var x []string
|
||||
for _, arg := range args {
|
||||
switch arg := arg.(type) {
|
||||
case []string:
|
||||
x = append(x, arg...)
|
||||
case string:
|
||||
x = append(x, arg)
|
||||
default:
|
||||
panic("stringList: invalid argument")
|
||||
}
|
||||
}
|
||||
return x
|
||||
}
|
39
vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go
generated
vendored
Normal file
39
vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go
generated
vendored
Normal file
@ -0,0 +1,39 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package cgo
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"go/build"
|
||||
"os/exec"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// pkgConfig runs pkg-config with the specified arguments and returns the flags it prints.
|
||||
func pkgConfig(mode string, pkgs []string) (flags []string, err error) {
|
||||
cmd := exec.Command("pkg-config", append([]string{mode}, pkgs...)...)
|
||||
out, err := cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
s := fmt.Sprintf("%s failed: %v", strings.Join(cmd.Args, " "), err)
|
||||
if len(out) > 0 {
|
||||
s = fmt.Sprintf("%s: %s", s, out)
|
||||
}
|
||||
return nil, errors.New(s)
|
||||
}
|
||||
if len(out) > 0 {
|
||||
flags = strings.Fields(string(out))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// pkgConfigFlags calls pkg-config if needed and returns the cflags
|
||||
// needed to build the package.
|
||||
func pkgConfigFlags(p *build.Package) (cflags []string, err error) {
|
||||
if len(p.CgoPkgConfig) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
return pkgConfig("--cflags", p.CgoPkgConfig)
|
||||
}
|
202
vendor/golang.org/x/tools/go/loader/doc.go
generated
vendored
Normal file
202
vendor/golang.org/x/tools/go/loader/doc.go
generated
vendored
Normal file
@ -0,0 +1,202 @@
|
||||
// Copyright 2015 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package loader loads a complete Go program from source code, parsing
|
||||
// and type-checking the initial packages plus their transitive closure
|
||||
// of dependencies. The ASTs and the derived facts are retained for
|
||||
// later use.
|
||||
//
|
||||
// Deprecated: This is an older API and does not have support
|
||||
// for modules. Use golang.org/x/tools/go/packages instead.
|
||||
//
|
||||
// The package defines two primary types: Config, which specifies a
|
||||
// set of initial packages to load and various other options; and
|
||||
// Program, which is the result of successfully loading the packages
|
||||
// specified by a configuration.
|
||||
//
|
||||
// The configuration can be set directly, but *Config provides various
|
||||
// convenience methods to simplify the common cases, each of which can
|
||||
// be called any number of times. Finally, these are followed by a
|
||||
// call to Load() to actually load and type-check the program.
|
||||
//
|
||||
// var conf loader.Config
|
||||
//
|
||||
// // Use the command-line arguments to specify
|
||||
// // a set of initial packages to load from source.
|
||||
// // See FromArgsUsage for help.
|
||||
// rest, err := conf.FromArgs(os.Args[1:], wantTests)
|
||||
//
|
||||
// // Parse the specified files and create an ad hoc package with path "foo".
|
||||
// // All files must have the same 'package' declaration.
|
||||
// conf.CreateFromFilenames("foo", "foo.go", "bar.go")
|
||||
//
|
||||
// // Create an ad hoc package with path "foo" from
|
||||
// // the specified already-parsed files.
|
||||
// // All ASTs must have the same 'package' declaration.
|
||||
// conf.CreateFromFiles("foo", parsedFiles)
|
||||
//
|
||||
// // Add "runtime" to the set of packages to be loaded.
|
||||
// conf.Import("runtime")
|
||||
//
|
||||
// // Adds "fmt" and "fmt_test" to the set of packages
|
||||
// // to be loaded. "fmt" will include *_test.go files.
|
||||
// conf.ImportWithTests("fmt")
|
||||
//
|
||||
// // Finally, load all the packages specified by the configuration.
|
||||
// prog, err := conf.Load()
|
||||
//
|
||||
// See examples_test.go for examples of API usage.
|
||||
//
|
||||
// # CONCEPTS AND TERMINOLOGY
|
||||
//
|
||||
// The WORKSPACE is the set of packages accessible to the loader. The
|
||||
// workspace is defined by Config.Build, a *build.Context. The
|
||||
// default context treats subdirectories of $GOROOT and $GOPATH as
|
||||
// packages, but this behavior may be overridden.
|
||||
//
|
||||
// An AD HOC package is one specified as a set of source files on the
|
||||
// command line. In the simplest case, it may consist of a single file
|
||||
// such as $GOROOT/src/net/http/triv.go.
|
||||
//
|
||||
// EXTERNAL TEST packages are those comprised of a set of *_test.go
|
||||
// files all with the same 'package foo_test' declaration, all in the
|
||||
// same directory. (go/build.Package calls these files XTestFiles.)
|
||||
//
|
||||
// An IMPORTABLE package is one that can be referred to by some import
|
||||
// spec. Every importable package is uniquely identified by its
|
||||
// PACKAGE PATH or just PATH, a string such as "fmt", "encoding/json",
|
||||
// or "cmd/vendor/golang.org/x/arch/x86/x86asm". A package path
|
||||
// typically denotes a subdirectory of the workspace.
|
||||
//
|
||||
// An import declaration uses an IMPORT PATH to refer to a package.
|
||||
// Most import declarations use the package path as the import path.
|
||||
//
|
||||
// Due to VENDORING (https://golang.org/s/go15vendor), the
|
||||
// interpretation of an import path may depend on the directory in which
|
||||
// it appears. To resolve an import path to a package path, go/build
|
||||
// must search the enclosing directories for a subdirectory named
|
||||
// "vendor".
|
||||
//
|
||||
// ad hoc packages and external test packages are NON-IMPORTABLE. The
|
||||
// path of an ad hoc package is inferred from the package
|
||||
// declarations of its files and is therefore not a unique package key.
|
||||
// For example, Config.CreatePkgs may specify two initial ad hoc
|
||||
// packages, both with path "main".
|
||||
//
|
||||
// An AUGMENTED package is an importable package P plus all the
|
||||
// *_test.go files with same 'package foo' declaration as P.
|
||||
// (go/build.Package calls these files TestFiles.)
|
||||
//
|
||||
// The INITIAL packages are those specified in the configuration. A
|
||||
// DEPENDENCY is a package loaded to satisfy an import in an initial
|
||||
// package or another dependency.
|
||||
package loader
|
||||
|
||||
// IMPLEMENTATION NOTES
|
||||
//
|
||||
// 'go test', in-package test files, and import cycles
|
||||
// ---------------------------------------------------
|
||||
//
|
||||
// An external test package may depend upon members of the augmented
|
||||
// package that are not in the unaugmented package, such as functions
|
||||
// that expose internals. (See bufio/export_test.go for an example.)
|
||||
// So, the loader must ensure that for each external test package
|
||||
// it loads, it also augments the corresponding non-test package.
|
||||
//
|
||||
// The import graph over n unaugmented packages must be acyclic; the
|
||||
// import graph over n-1 unaugmented packages plus one augmented
|
||||
// package must also be acyclic. ('go test' relies on this.) But the
|
||||
// import graph over n augmented packages may contain cycles.
|
||||
//
|
||||
// First, all the (unaugmented) non-test packages and their
|
||||
// dependencies are imported in the usual way; the loader reports an
|
||||
// error if it detects an import cycle.
|
||||
//
|
||||
// Then, each package P for which testing is desired is augmented by
|
||||
// the list P' of its in-package test files, by calling
|
||||
// (*types.Checker).Files. This arrangement ensures that P' may
|
||||
// reference definitions within P, but P may not reference definitions
|
||||
// within P'. Furthermore, P' may import any other package, including
|
||||
// ones that depend upon P, without an import cycle error.
|
||||
//
|
||||
// Consider two packages A and B, both of which have lists of
|
||||
// in-package test files we'll call A' and B', and which have the
|
||||
// following import graph edges:
|
||||
// B imports A
|
||||
// B' imports A
|
||||
// A' imports B
|
||||
// This last edge would be expected to create an error were it not
|
||||
// for the special type-checking discipline above.
|
||||
// Cycles of size greater than two are possible. For example:
|
||||
// compress/bzip2/bzip2_test.go (package bzip2) imports "io/ioutil"
|
||||
// io/ioutil/tempfile_test.go (package ioutil) imports "regexp"
|
||||
// regexp/exec_test.go (package regexp) imports "compress/bzip2"
|
||||
//
|
||||
//
|
||||
// Concurrency
|
||||
// -----------
|
||||
//
|
||||
// Let us define the import dependency graph as follows. Each node is a
|
||||
// list of files passed to (Checker).Files at once. Many of these lists
|
||||
// are the production code of an importable Go package, so those nodes
|
||||
// are labelled by the package's path. The remaining nodes are
|
||||
// ad hoc packages and lists of in-package *_test.go files that augment
|
||||
// an importable package; those nodes have no label.
|
||||
//
|
||||
// The edges of the graph represent import statements appearing within a
|
||||
// file. An edge connects a node (a list of files) to the node it
|
||||
// imports, which is importable and thus always labelled.
|
||||
//
|
||||
// Loading is controlled by this dependency graph.
|
||||
//
|
||||
// To reduce I/O latency, we start loading a package's dependencies
|
||||
// asynchronously as soon as we've parsed its files and enumerated its
|
||||
// imports (scanImports). This performs a preorder traversal of the
|
||||
// import dependency graph.
|
||||
//
|
||||
// To exploit hardware parallelism, we type-check unrelated packages in
|
||||
// parallel, where "unrelated" means not ordered by the partial order of
|
||||
// the import dependency graph.
|
||||
//
|
||||
// We use a concurrency-safe non-blocking cache (importer.imported) to
|
||||
// record the results of type-checking, whether success or failure. An
|
||||
// entry is created in this cache by startLoad the first time the
|
||||
// package is imported. The first goroutine to request an entry becomes
|
||||
// responsible for completing the task and broadcasting completion to
|
||||
// subsequent requestors, which block until then.
|
||||
//
|
||||
// Type checking occurs in (parallel) postorder: we cannot type-check a
|
||||
// set of files until we have loaded and type-checked all of their
|
||||
// immediate dependencies (and thus all of their transitive
|
||||
// dependencies). If the input were guaranteed free of import cycles,
|
||||
// this would be trivial: we could simply wait for completion of the
|
||||
// dependencies and then invoke the typechecker.
|
||||
//
|
||||
// But as we saw in the 'go test' section above, some cycles in the
|
||||
// import graph over packages are actually legal, so long as the
|
||||
// cycle-forming edge originates in the in-package test files that
|
||||
// augment the package. This explains why the nodes of the import
|
||||
// dependency graph are not packages, but lists of files: the unlabelled
|
||||
// nodes avoid the cycles. Consider packages A and B where B imports A
|
||||
// and A's in-package tests AT import B. The naively constructed import
|
||||
// graph over packages would contain a cycle (A+AT) --> B --> (A+AT) but
|
||||
// the graph over lists of files is AT --> B --> A, where AT is an
|
||||
// unlabelled node.
|
||||
//
|
||||
// Awaiting completion of the dependencies in a cyclic graph would
|
||||
// deadlock, so we must materialize the import dependency graph (as
|
||||
// importer.graph) and check whether each import edge forms a cycle. If
|
||||
// x imports y, and the graph already contains a path from y to x, then
|
||||
// there is an import cycle, in which case the processing of x must not
|
||||
// wait for the completion of processing of y.
|
||||
//
|
||||
// When the type-checker makes a callback (doImport) to the loader for a
|
||||
// given import edge, there are two possible cases. In the normal case,
|
||||
// the dependency has already been completely type-checked; doImport
|
||||
// does a cache lookup and returns it. In the cyclic case, the entry in
|
||||
// the cache is still necessarily incomplete, indicating a cycle. We
|
||||
// perform the cycle check again to obtain the error message, and return
|
||||
// the error.
|
||||
//
|
||||
// The result of using concurrency is about a 2.5x speedup for stdlib_test.
|
1066
vendor/golang.org/x/tools/go/loader/loader.go
generated
vendored
Normal file
1066
vendor/golang.org/x/tools/go/loader/loader.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
123
vendor/golang.org/x/tools/go/loader/util.go
generated
vendored
Normal file
123
vendor/golang.org/x/tools/go/loader/util.go
generated
vendored
Normal file
@ -0,0 +1,123 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package loader
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/build"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"io"
|
||||
"os"
|
||||
"strconv"
|
||||
"sync"
|
||||
|
||||
"golang.org/x/tools/go/buildutil"
|
||||
)
|
||||
|
||||
// We use a counting semaphore to limit
|
||||
// the number of parallel I/O calls per process.
|
||||
var ioLimit = make(chan bool, 10)
|
||||
|
||||
// parseFiles parses the Go source files within directory dir and
|
||||
// returns the ASTs of the ones that could be at least partially parsed,
|
||||
// along with a list of I/O and parse errors encountered.
|
||||
//
|
||||
// I/O is done via ctxt, which may specify a virtual file system.
|
||||
// displayPath is used to transform the filenames attached to the ASTs.
|
||||
func parseFiles(fset *token.FileSet, ctxt *build.Context, displayPath func(string) string, dir string, files []string, mode parser.Mode) ([]*ast.File, []error) {
|
||||
if displayPath == nil {
|
||||
displayPath = func(path string) string { return path }
|
||||
}
|
||||
var wg sync.WaitGroup
|
||||
n := len(files)
|
||||
parsed := make([]*ast.File, n)
|
||||
errors := make([]error, n)
|
||||
for i, file := range files {
|
||||
if !buildutil.IsAbsPath(ctxt, file) {
|
||||
file = buildutil.JoinPath(ctxt, dir, file)
|
||||
}
|
||||
wg.Add(1)
|
||||
go func(i int, file string) {
|
||||
ioLimit <- true // wait
|
||||
defer func() {
|
||||
wg.Done()
|
||||
<-ioLimit // signal
|
||||
}()
|
||||
var rd io.ReadCloser
|
||||
var err error
|
||||
if ctxt.OpenFile != nil {
|
||||
rd, err = ctxt.OpenFile(file)
|
||||
} else {
|
||||
rd, err = os.Open(file)
|
||||
}
|
||||
if err != nil {
|
||||
errors[i] = err // open failed
|
||||
return
|
||||
}
|
||||
|
||||
// ParseFile may return both an AST and an error.
|
||||
parsed[i], errors[i] = parser.ParseFile(fset, displayPath(file), rd, mode)
|
||||
rd.Close()
|
||||
}(i, file)
|
||||
}
|
||||
wg.Wait()
|
||||
|
||||
// Eliminate nils, preserving order.
|
||||
var o int
|
||||
for _, f := range parsed {
|
||||
if f != nil {
|
||||
parsed[o] = f
|
||||
o++
|
||||
}
|
||||
}
|
||||
parsed = parsed[:o]
|
||||
|
||||
o = 0
|
||||
for _, err := range errors {
|
||||
if err != nil {
|
||||
errors[o] = err
|
||||
o++
|
||||
}
|
||||
}
|
||||
errors = errors[:o]
|
||||
|
||||
return parsed, errors
|
||||
}
|
||||
|
||||
// scanImports returns the set of all import paths from all
|
||||
// import specs in the specified files.
|
||||
func scanImports(files []*ast.File) map[string]bool {
|
||||
imports := make(map[string]bool)
|
||||
for _, f := range files {
|
||||
for _, decl := range f.Decls {
|
||||
if decl, ok := decl.(*ast.GenDecl); ok && decl.Tok == token.IMPORT {
|
||||
for _, spec := range decl.Specs {
|
||||
spec := spec.(*ast.ImportSpec)
|
||||
|
||||
// NB: do not assume the program is well-formed!
|
||||
path, err := strconv.Unquote(spec.Path.Value)
|
||||
if err != nil {
|
||||
continue // quietly ignore the error
|
||||
}
|
||||
if path == "C" {
|
||||
continue // skip pseudopackage
|
||||
}
|
||||
imports[path] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return imports
|
||||
}
|
||||
|
||||
// ---------- Internal helpers ----------
|
||||
|
||||
// TODO(adonovan): make this a method: func (*token.File) Contains(token.Pos)
|
||||
func tokenFileContainsPos(f *token.File, pos token.Pos) bool {
|
||||
p := int(pos)
|
||||
base := f.Base()
|
||||
return base <= p && p < base+f.Size()
|
||||
}
|
Reference in New Issue
Block a user