chore: go modules integration
This commit is contained in:
parent
a152ca272e
commit
5535dfa03d
|
@ -1,5 +1,3 @@
|
||||||
// Code generated by pigeon; DO NOT EDIT.
|
|
||||||
|
|
||||||
package bootcommand
|
package bootcommand
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
@ -8,9 +6,7 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"math"
|
|
||||||
"os"
|
"os"
|
||||||
"sort"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
@ -791,85 +787,18 @@ var (
|
||||||
// errNoRule is returned when the grammar to parse has no rule.
|
// errNoRule is returned when the grammar to parse has no rule.
|
||||||
errNoRule = errors.New("grammar has no rule")
|
errNoRule = errors.New("grammar has no rule")
|
||||||
|
|
||||||
// errInvalidEntrypoint is returned when the specified entrypoint rule
|
|
||||||
// does not exit.
|
|
||||||
errInvalidEntrypoint = errors.New("invalid entrypoint")
|
|
||||||
|
|
||||||
// errInvalidEncoding is returned when the source is not properly
|
// errInvalidEncoding is returned when the source is not properly
|
||||||
// utf8-encoded.
|
// utf8-encoded.
|
||||||
errInvalidEncoding = errors.New("invalid encoding")
|
errInvalidEncoding = errors.New("invalid encoding")
|
||||||
|
|
||||||
// errMaxExprCnt is used to signal that the maximum number of
|
// errNoMatch is returned if no match could be found.
|
||||||
// expressions have been parsed.
|
errNoMatch = errors.New("no match found")
|
||||||
errMaxExprCnt = errors.New("max number of expresssions parsed")
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Option is a function that can set an option on the parser. It returns
|
// Option is a function that can set an option on the parser. It returns
|
||||||
// the previous setting as an Option.
|
// the previous setting as an Option.
|
||||||
type Option func(*parser) Option
|
type Option func(*parser) Option
|
||||||
|
|
||||||
// MaxExpressions creates an Option to stop parsing after the provided
|
|
||||||
// number of expressions have been parsed, if the value is 0 then the parser will
|
|
||||||
// parse for as many steps as needed (possibly an infinite number).
|
|
||||||
//
|
|
||||||
// The default for maxExprCnt is 0.
|
|
||||||
func MaxExpressions(maxExprCnt uint64) Option {
|
|
||||||
return func(p *parser) Option {
|
|
||||||
oldMaxExprCnt := p.maxExprCnt
|
|
||||||
p.maxExprCnt = maxExprCnt
|
|
||||||
return MaxExpressions(oldMaxExprCnt)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Entrypoint creates an Option to set the rule name to use as entrypoint.
|
|
||||||
// The rule name must have been specified in the -alternate-entrypoints
|
|
||||||
// if generating the parser with the -optimize-grammar flag, otherwise
|
|
||||||
// it may have been optimized out. Passing an empty string sets the
|
|
||||||
// entrypoint to the first rule in the grammar.
|
|
||||||
//
|
|
||||||
// The default is to start parsing at the first rule in the grammar.
|
|
||||||
func Entrypoint(ruleName string) Option {
|
|
||||||
return func(p *parser) Option {
|
|
||||||
oldEntrypoint := p.entrypoint
|
|
||||||
p.entrypoint = ruleName
|
|
||||||
if ruleName == "" {
|
|
||||||
p.entrypoint = g.rules[0].name
|
|
||||||
}
|
|
||||||
return Entrypoint(oldEntrypoint)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Statistics adds a user provided Stats struct to the parser to allow
|
|
||||||
// the user to process the results after the parsing has finished.
|
|
||||||
// Also the key for the "no match" counter is set.
|
|
||||||
//
|
|
||||||
// Example usage:
|
|
||||||
//
|
|
||||||
// input := "input"
|
|
||||||
// stats := Stats{}
|
|
||||||
// _, err := Parse("input-file", []byte(input), Statistics(&stats, "no match"))
|
|
||||||
// if err != nil {
|
|
||||||
// log.Panicln(err)
|
|
||||||
// }
|
|
||||||
// b, err := json.MarshalIndent(stats.ChoiceAltCnt, "", " ")
|
|
||||||
// if err != nil {
|
|
||||||
// log.Panicln(err)
|
|
||||||
// }
|
|
||||||
// fmt.Println(string(b))
|
|
||||||
//
|
|
||||||
func Statistics(stats *Stats, choiceNoMatch string) Option {
|
|
||||||
return func(p *parser) Option {
|
|
||||||
oldStats := p.Stats
|
|
||||||
p.Stats = stats
|
|
||||||
oldChoiceNoMatch := p.choiceNoMatch
|
|
||||||
p.choiceNoMatch = choiceNoMatch
|
|
||||||
if p.Stats.ChoiceAltCnt == nil {
|
|
||||||
p.Stats.ChoiceAltCnt = make(map[string]map[string]int)
|
|
||||||
}
|
|
||||||
return Statistics(oldStats, oldChoiceNoMatch)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Debug creates an Option to set the debug flag to b. When set to true,
|
// Debug creates an Option to set the debug flag to b. When set to true,
|
||||||
// debugging information is printed to stdout while parsing.
|
// debugging information is printed to stdout while parsing.
|
||||||
//
|
//
|
||||||
|
@ -896,20 +825,6 @@ func Memoize(b bool) Option {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// AllowInvalidUTF8 creates an Option to allow invalid UTF-8 bytes.
|
|
||||||
// Every invalid UTF-8 byte is treated as a utf8.RuneError (U+FFFD)
|
|
||||||
// by character class matchers and is matched by the any matcher.
|
|
||||||
// The returned matched value, c.text and c.offset are NOT affected.
|
|
||||||
//
|
|
||||||
// The default is false.
|
|
||||||
func AllowInvalidUTF8(b bool) Option {
|
|
||||||
return func(p *parser) Option {
|
|
||||||
old := p.allowInvalidUTF8
|
|
||||||
p.allowInvalidUTF8 = b
|
|
||||||
return AllowInvalidUTF8(old)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Recover creates an Option to set the recover flag to b. When set to
|
// Recover creates an Option to set the recover flag to b. When set to
|
||||||
// true, this causes the parser to recover from panics and convert it
|
// true, this causes the parser to recover from panics and convert it
|
||||||
// to an error. Setting it to false can be useful while debugging to
|
// to an error. Setting it to false can be useful while debugging to
|
||||||
|
@ -924,37 +839,13 @@ func Recover(b bool) Option {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// GlobalStore creates an Option to set a key to a certain value in
|
|
||||||
// the globalStore.
|
|
||||||
func GlobalStore(key string, value interface{}) Option {
|
|
||||||
return func(p *parser) Option {
|
|
||||||
old := p.cur.globalStore[key]
|
|
||||||
p.cur.globalStore[key] = value
|
|
||||||
return GlobalStore(key, old)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// InitState creates an Option to set a key to a certain value in
|
|
||||||
// the global "state" store.
|
|
||||||
func InitState(key string, value interface{}) Option {
|
|
||||||
return func(p *parser) Option {
|
|
||||||
old := p.cur.state[key]
|
|
||||||
p.cur.state[key] = value
|
|
||||||
return InitState(key, old)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ParseFile parses the file identified by filename.
|
// ParseFile parses the file identified by filename.
|
||||||
func ParseFile(filename string, opts ...Option) (i interface{}, err error) {
|
func ParseFile(filename string, opts ...Option) (interface{}, error) {
|
||||||
f, err := os.Open(filename)
|
f, err := os.Open(filename)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
defer func() {
|
defer f.Close()
|
||||||
if closeErr := f.Close(); closeErr != nil {
|
|
||||||
err = closeErr
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
return ParseReader(filename, f, opts...)
|
return ParseReader(filename, f, opts...)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -995,22 +886,8 @@ type savepoint struct {
|
||||||
type current struct {
|
type current struct {
|
||||||
pos position // start position of the match
|
pos position // start position of the match
|
||||||
text []byte // raw text of the match
|
text []byte // raw text of the match
|
||||||
|
|
||||||
// state is a store for arbitrary key,value pairs that the user wants to be
|
|
||||||
// tied to the backtracking of the parser.
|
|
||||||
// This is always rolled back if a parsing rule fails.
|
|
||||||
state storeDict
|
|
||||||
|
|
||||||
// globalStore is a general store for the user to store arbitrary key-value
|
|
||||||
// pairs that they need to manage and that they do not want tied to the
|
|
||||||
// backtracking of the parser. This is only modified by the user and never
|
|
||||||
// rolled back by the parser. It is always up to the user to keep this in a
|
|
||||||
// consistent state.
|
|
||||||
globalStore storeDict
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type storeDict map[string]interface{}
|
|
||||||
|
|
||||||
// the AST types...
|
// the AST types...
|
||||||
|
|
||||||
type grammar struct {
|
type grammar struct {
|
||||||
|
@ -1036,23 +913,11 @@ type actionExpr struct {
|
||||||
run func(*parser) (interface{}, error)
|
run func(*parser) (interface{}, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
type recoveryExpr struct {
|
|
||||||
pos position
|
|
||||||
expr interface{}
|
|
||||||
recoverExpr interface{}
|
|
||||||
failureLabel []string
|
|
||||||
}
|
|
||||||
|
|
||||||
type seqExpr struct {
|
type seqExpr struct {
|
||||||
pos position
|
pos position
|
||||||
exprs []interface{}
|
exprs []interface{}
|
||||||
}
|
}
|
||||||
|
|
||||||
type throwExpr struct {
|
|
||||||
pos position
|
|
||||||
label string
|
|
||||||
}
|
|
||||||
|
|
||||||
type labeledExpr struct {
|
type labeledExpr struct {
|
||||||
pos position
|
pos position
|
||||||
label string
|
label string
|
||||||
|
@ -1075,11 +940,6 @@ type ruleRefExpr struct {
|
||||||
name string
|
name string
|
||||||
}
|
}
|
||||||
|
|
||||||
type stateCodeExpr struct {
|
|
||||||
pos position
|
|
||||||
run func(*parser) error
|
|
||||||
}
|
|
||||||
|
|
||||||
type andCodeExpr struct {
|
type andCodeExpr struct {
|
||||||
pos position
|
pos position
|
||||||
run func(*parser) (bool, error)
|
run func(*parser) (bool, error)
|
||||||
|
@ -1097,14 +957,13 @@ type litMatcher struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type charClassMatcher struct {
|
type charClassMatcher struct {
|
||||||
pos position
|
pos position
|
||||||
val string
|
val string
|
||||||
basicLatinChars [128]bool
|
chars []rune
|
||||||
chars []rune
|
ranges []rune
|
||||||
ranges []rune
|
classes []*unicode.RangeTable
|
||||||
classes []*unicode.RangeTable
|
ignoreCase bool
|
||||||
ignoreCase bool
|
inverted bool
|
||||||
inverted bool
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type anyMatcher position
|
type anyMatcher position
|
||||||
|
@ -1158,10 +1017,9 @@ func (e errList) Error() string {
|
||||||
// parserError wraps an error with a prefix indicating the rule in which
|
// parserError wraps an error with a prefix indicating the rule in which
|
||||||
// the error occurred. The original error is stored in the Inner field.
|
// the error occurred. The original error is stored in the Inner field.
|
||||||
type parserError struct {
|
type parserError struct {
|
||||||
Inner error
|
Inner error
|
||||||
pos position
|
pos position
|
||||||
prefix string
|
prefix string
|
||||||
expected []string
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Error returns the error message.
|
// Error returns the error message.
|
||||||
|
@ -1171,32 +1029,14 @@ func (p *parserError) Error() string {
|
||||||
|
|
||||||
// newParser creates a parser with the specified input source and options.
|
// newParser creates a parser with the specified input source and options.
|
||||||
func newParser(filename string, b []byte, opts ...Option) *parser {
|
func newParser(filename string, b []byte, opts ...Option) *parser {
|
||||||
stats := Stats{
|
|
||||||
ChoiceAltCnt: make(map[string]map[string]int),
|
|
||||||
}
|
|
||||||
|
|
||||||
p := &parser{
|
p := &parser{
|
||||||
filename: filename,
|
filename: filename,
|
||||||
errs: new(errList),
|
errs: new(errList),
|
||||||
data: b,
|
data: b,
|
||||||
pt: savepoint{position: position{line: 1}},
|
pt: savepoint{position: position{line: 1}},
|
||||||
recover: true,
|
recover: true,
|
||||||
cur: current{
|
|
||||||
state: make(storeDict),
|
|
||||||
globalStore: make(storeDict),
|
|
||||||
},
|
|
||||||
maxFailPos: position{col: 1, line: 1},
|
|
||||||
maxFailExpected: make([]string, 0, 20),
|
|
||||||
Stats: &stats,
|
|
||||||
// start rule is rule [0] unless an alternate entrypoint is specified
|
|
||||||
entrypoint: g.rules[0].name,
|
|
||||||
}
|
}
|
||||||
p.setOptions(opts)
|
p.setOptions(opts)
|
||||||
|
|
||||||
if p.maxExprCnt == 0 {
|
|
||||||
p.maxExprCnt = math.MaxUint64
|
|
||||||
}
|
|
||||||
|
|
||||||
return p
|
return p
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1213,30 +1053,6 @@ type resultTuple struct {
|
||||||
end savepoint
|
end savepoint
|
||||||
}
|
}
|
||||||
|
|
||||||
const choiceNoMatch = -1
|
|
||||||
|
|
||||||
// Stats stores some statistics, gathered during parsing
|
|
||||||
type Stats struct {
|
|
||||||
// ExprCnt counts the number of expressions processed during parsing
|
|
||||||
// This value is compared to the maximum number of expressions allowed
|
|
||||||
// (set by the MaxExpressions option).
|
|
||||||
ExprCnt uint64
|
|
||||||
|
|
||||||
// ChoiceAltCnt is used to count for each ordered choice expression,
|
|
||||||
// which alternative is used how may times.
|
|
||||||
// These numbers allow to optimize the order of the ordered choice expression
|
|
||||||
// to increase the performance of the parser
|
|
||||||
//
|
|
||||||
// The outer key of ChoiceAltCnt is composed of the name of the rule as well
|
|
||||||
// as the line and the column of the ordered choice.
|
|
||||||
// The inner key of ChoiceAltCnt is the number (one-based) of the matching alternative.
|
|
||||||
// For each alternative the number of matches are counted. If an ordered choice does not
|
|
||||||
// match, a special counter is incremented. The name of this counter is set with
|
|
||||||
// the parser option Statistics.
|
|
||||||
// For an alternative to be included in ChoiceAltCnt, it has to match at least once.
|
|
||||||
ChoiceAltCnt map[string]map[string]int
|
|
||||||
}
|
|
||||||
|
|
||||||
type parser struct {
|
type parser struct {
|
||||||
filename string
|
filename string
|
||||||
pt savepoint
|
pt savepoint
|
||||||
|
@ -1245,9 +1061,9 @@ type parser struct {
|
||||||
data []byte
|
data []byte
|
||||||
errs *errList
|
errs *errList
|
||||||
|
|
||||||
depth int
|
|
||||||
recover bool
|
recover bool
|
||||||
debug bool
|
debug bool
|
||||||
|
depth int
|
||||||
|
|
||||||
memoize bool
|
memoize bool
|
||||||
// memoization table for the packrat algorithm:
|
// memoization table for the packrat algorithm:
|
||||||
|
@ -1261,23 +1077,8 @@ type parser struct {
|
||||||
// rule stack, allows identification of the current rule in errors
|
// rule stack, allows identification of the current rule in errors
|
||||||
rstack []*rule
|
rstack []*rule
|
||||||
|
|
||||||
// parse fail
|
// stats
|
||||||
maxFailPos position
|
exprCnt int
|
||||||
maxFailExpected []string
|
|
||||||
maxFailInvertExpected bool
|
|
||||||
|
|
||||||
// max number of expressions to be parsed
|
|
||||||
maxExprCnt uint64
|
|
||||||
// entrypoint for the parser
|
|
||||||
entrypoint string
|
|
||||||
|
|
||||||
allowInvalidUTF8 bool
|
|
||||||
|
|
||||||
*Stats
|
|
||||||
|
|
||||||
choiceNoMatch string
|
|
||||||
// recovery expression stack, keeps track of the currently available recovery expression, these are traversed in reverse
|
|
||||||
recoveryStack []map[string]interface{}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// push a variable set on the vstack.
|
// push a variable set on the vstack.
|
||||||
|
@ -1312,31 +1113,6 @@ func (p *parser) popV() {
|
||||||
p.vstack = p.vstack[:len(p.vstack)-1]
|
p.vstack = p.vstack[:len(p.vstack)-1]
|
||||||
}
|
}
|
||||||
|
|
||||||
// push a recovery expression with its labels to the recoveryStack
|
|
||||||
func (p *parser) pushRecovery(labels []string, expr interface{}) {
|
|
||||||
if cap(p.recoveryStack) == len(p.recoveryStack) {
|
|
||||||
// create new empty slot in the stack
|
|
||||||
p.recoveryStack = append(p.recoveryStack, nil)
|
|
||||||
} else {
|
|
||||||
// slice to 1 more
|
|
||||||
p.recoveryStack = p.recoveryStack[:len(p.recoveryStack)+1]
|
|
||||||
}
|
|
||||||
|
|
||||||
m := make(map[string]interface{}, len(labels))
|
|
||||||
for _, fl := range labels {
|
|
||||||
m[fl] = expr
|
|
||||||
}
|
|
||||||
p.recoveryStack[len(p.recoveryStack)-1] = m
|
|
||||||
}
|
|
||||||
|
|
||||||
// pop a recovery expression from the recoveryStack
|
|
||||||
func (p *parser) popRecovery() {
|
|
||||||
// GC that map
|
|
||||||
p.recoveryStack[len(p.recoveryStack)-1] = nil
|
|
||||||
|
|
||||||
p.recoveryStack = p.recoveryStack[:len(p.recoveryStack)-1]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *parser) print(prefix, s string) string {
|
func (p *parser) print(prefix, s string) string {
|
||||||
if !p.debug {
|
if !p.debug {
|
||||||
return s
|
return s
|
||||||
|
@ -1358,10 +1134,10 @@ func (p *parser) out(s string) string {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) addErr(err error) {
|
func (p *parser) addErr(err error) {
|
||||||
p.addErrAt(err, p.pt.position, []string{})
|
p.addErrAt(err, p.pt.position)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) addErrAt(err error, pos position, expected []string) {
|
func (p *parser) addErrAt(err error, pos position) {
|
||||||
var buf bytes.Buffer
|
var buf bytes.Buffer
|
||||||
if p.filename != "" {
|
if p.filename != "" {
|
||||||
buf.WriteString(p.filename)
|
buf.WriteString(p.filename)
|
||||||
|
@ -1381,29 +1157,10 @@ func (p *parser) addErrAt(err error, pos position, expected []string) {
|
||||||
buf.WriteString("rule " + rule.name)
|
buf.WriteString("rule " + rule.name)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pe := &parserError{Inner: err, pos: pos, prefix: buf.String(), expected: expected}
|
pe := &parserError{Inner: err, pos: pos, prefix: buf.String()}
|
||||||
p.errs.add(pe)
|
p.errs.add(pe)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) failAt(fail bool, pos position, want string) {
|
|
||||||
// process fail if parsing fails and not inverted or parsing succeeds and invert is set
|
|
||||||
if fail == p.maxFailInvertExpected {
|
|
||||||
if pos.offset < p.maxFailPos.offset {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if pos.offset > p.maxFailPos.offset {
|
|
||||||
p.maxFailPos = pos
|
|
||||||
p.maxFailExpected = p.maxFailExpected[:0]
|
|
||||||
}
|
|
||||||
|
|
||||||
if p.maxFailInvertExpected {
|
|
||||||
want = "!" + want
|
|
||||||
}
|
|
||||||
p.maxFailExpected = append(p.maxFailExpected, want)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// read advances the parser to the next rune.
|
// read advances the parser to the next rune.
|
||||||
func (p *parser) read() {
|
func (p *parser) read() {
|
||||||
p.pt.offset += p.pt.w
|
p.pt.offset += p.pt.w
|
||||||
|
@ -1416,8 +1173,8 @@ func (p *parser) read() {
|
||||||
p.pt.col = 0
|
p.pt.col = 0
|
||||||
}
|
}
|
||||||
|
|
||||||
if rn == utf8.RuneError && n == 1 { // see utf8.DecodeRune
|
if rn == utf8.RuneError {
|
||||||
if !p.allowInvalidUTF8 {
|
if n == 1 {
|
||||||
p.addErr(errInvalidEncoding)
|
p.addErr(errInvalidEncoding)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1434,43 +1191,6 @@ func (p *parser) restore(pt savepoint) {
|
||||||
p.pt = pt
|
p.pt = pt
|
||||||
}
|
}
|
||||||
|
|
||||||
// Cloner is implemented by any value that has a Clone method, which returns a
|
|
||||||
// copy of the value. This is mainly used for types which are not passed by
|
|
||||||
// value (e.g map, slice, chan) or structs that contain such types.
|
|
||||||
//
|
|
||||||
// This is used in conjunction with the global state feature to create proper
|
|
||||||
// copies of the state to allow the parser to properly restore the state in
|
|
||||||
// the case of backtracking.
|
|
||||||
type Cloner interface {
|
|
||||||
Clone() interface{}
|
|
||||||
}
|
|
||||||
|
|
||||||
// clone and return parser current state.
|
|
||||||
func (p *parser) cloneState() storeDict {
|
|
||||||
if p.debug {
|
|
||||||
defer p.out(p.in("cloneState"))
|
|
||||||
}
|
|
||||||
|
|
||||||
state := make(storeDict, len(p.cur.state))
|
|
||||||
for k, v := range p.cur.state {
|
|
||||||
if c, ok := v.(Cloner); ok {
|
|
||||||
state[k] = c.Clone()
|
|
||||||
} else {
|
|
||||||
state[k] = v
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return state
|
|
||||||
}
|
|
||||||
|
|
||||||
// restore parser current state to the state storeDict.
|
|
||||||
// every restoreState should applied only one time for every cloned state
|
|
||||||
func (p *parser) restoreState(state storeDict) {
|
|
||||||
if p.debug {
|
|
||||||
defer p.out(p.in("restoreState"))
|
|
||||||
}
|
|
||||||
p.cur.state = state
|
|
||||||
}
|
|
||||||
|
|
||||||
// get the slice of bytes from the savepoint start to the current position.
|
// get the slice of bytes from the savepoint start to the current position.
|
||||||
func (p *parser) sliceFrom(start savepoint) []byte {
|
func (p *parser) sliceFrom(start savepoint) []byte {
|
||||||
return p.data[start.position.offset:p.pt.position.offset]
|
return p.data[start.position.offset:p.pt.position.offset]
|
||||||
|
@ -1536,54 +1256,19 @@ func (p *parser) parse(g *grammar) (val interface{}, err error) {
|
||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
|
|
||||||
startRule, ok := p.rules[p.entrypoint]
|
// start rule is rule [0]
|
||||||
if !ok {
|
|
||||||
p.addErr(errInvalidEntrypoint)
|
|
||||||
return nil, p.errs.err()
|
|
||||||
}
|
|
||||||
|
|
||||||
p.read() // advance to first rune
|
p.read() // advance to first rune
|
||||||
val, ok = p.parseRule(startRule)
|
val, ok := p.parseRule(g.rules[0])
|
||||||
if !ok {
|
if !ok {
|
||||||
if len(*p.errs) == 0 {
|
if len(*p.errs) == 0 {
|
||||||
// If parsing fails, but no errors have been recorded, the expected values
|
// make sure this doesn't go out silently
|
||||||
// for the farthest parser position are returned as error.
|
p.addErr(errNoMatch)
|
||||||
maxFailExpectedMap := make(map[string]struct{}, len(p.maxFailExpected))
|
|
||||||
for _, v := range p.maxFailExpected {
|
|
||||||
maxFailExpectedMap[v] = struct{}{}
|
|
||||||
}
|
|
||||||
expected := make([]string, 0, len(maxFailExpectedMap))
|
|
||||||
eof := false
|
|
||||||
if _, ok := maxFailExpectedMap["!."]; ok {
|
|
||||||
delete(maxFailExpectedMap, "!.")
|
|
||||||
eof = true
|
|
||||||
}
|
|
||||||
for k := range maxFailExpectedMap {
|
|
||||||
expected = append(expected, k)
|
|
||||||
}
|
|
||||||
sort.Strings(expected)
|
|
||||||
if eof {
|
|
||||||
expected = append(expected, "EOF")
|
|
||||||
}
|
|
||||||
p.addErrAt(errors.New("no match found, expected: "+listJoin(expected, ", ", "or")), p.maxFailPos, expected)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, p.errs.err()
|
return nil, p.errs.err()
|
||||||
}
|
}
|
||||||
return val, p.errs.err()
|
return val, p.errs.err()
|
||||||
}
|
}
|
||||||
|
|
||||||
func listJoin(list []string, sep string, lastSep string) string {
|
|
||||||
switch len(list) {
|
|
||||||
case 0:
|
|
||||||
return ""
|
|
||||||
case 1:
|
|
||||||
return list[0]
|
|
||||||
default:
|
|
||||||
return fmt.Sprintf("%s %s %s", strings.Join(list[:len(list)-1], sep), lastSep, list[len(list)-1])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *parser) parseRule(rule *rule) (interface{}, bool) {
|
func (p *parser) parseRule(rule *rule) (interface{}, bool) {
|
||||||
if p.debug {
|
if p.debug {
|
||||||
defer p.out(p.in("parseRule " + rule.name))
|
defer p.out(p.in("parseRule " + rule.name))
|
||||||
|
@ -1615,6 +1300,7 @@ func (p *parser) parseRule(rule *rule) (interface{}, bool) {
|
||||||
|
|
||||||
func (p *parser) parseExpr(expr interface{}) (interface{}, bool) {
|
func (p *parser) parseExpr(expr interface{}) (interface{}, bool) {
|
||||||
var pt savepoint
|
var pt savepoint
|
||||||
|
var ok bool
|
||||||
|
|
||||||
if p.memoize {
|
if p.memoize {
|
||||||
res, ok := p.getMemoized(expr)
|
res, ok := p.getMemoized(expr)
|
||||||
|
@ -1625,13 +1311,8 @@ func (p *parser) parseExpr(expr interface{}) (interface{}, bool) {
|
||||||
pt = p.pt
|
pt = p.pt
|
||||||
}
|
}
|
||||||
|
|
||||||
p.ExprCnt++
|
p.exprCnt++
|
||||||
if p.ExprCnt > p.maxExprCnt {
|
|
||||||
panic(errMaxExprCnt)
|
|
||||||
}
|
|
||||||
|
|
||||||
var val interface{}
|
var val interface{}
|
||||||
var ok bool
|
|
||||||
switch expr := expr.(type) {
|
switch expr := expr.(type) {
|
||||||
case *actionExpr:
|
case *actionExpr:
|
||||||
val, ok = p.parseActionExpr(expr)
|
val, ok = p.parseActionExpr(expr)
|
||||||
|
@ -1655,16 +1336,10 @@ func (p *parser) parseExpr(expr interface{}) (interface{}, bool) {
|
||||||
val, ok = p.parseNotExpr(expr)
|
val, ok = p.parseNotExpr(expr)
|
||||||
case *oneOrMoreExpr:
|
case *oneOrMoreExpr:
|
||||||
val, ok = p.parseOneOrMoreExpr(expr)
|
val, ok = p.parseOneOrMoreExpr(expr)
|
||||||
case *recoveryExpr:
|
|
||||||
val, ok = p.parseRecoveryExpr(expr)
|
|
||||||
case *ruleRefExpr:
|
case *ruleRefExpr:
|
||||||
val, ok = p.parseRuleRefExpr(expr)
|
val, ok = p.parseRuleRefExpr(expr)
|
||||||
case *seqExpr:
|
case *seqExpr:
|
||||||
val, ok = p.parseSeqExpr(expr)
|
val, ok = p.parseSeqExpr(expr)
|
||||||
case *stateCodeExpr:
|
|
||||||
val, ok = p.parseStateCodeExpr(expr)
|
|
||||||
case *throwExpr:
|
|
||||||
val, ok = p.parseThrowExpr(expr)
|
|
||||||
case *zeroOrMoreExpr:
|
case *zeroOrMoreExpr:
|
||||||
val, ok = p.parseZeroOrMoreExpr(expr)
|
val, ok = p.parseZeroOrMoreExpr(expr)
|
||||||
case *zeroOrOneExpr:
|
case *zeroOrOneExpr:
|
||||||
|
@ -1688,13 +1363,10 @@ func (p *parser) parseActionExpr(act *actionExpr) (interface{}, bool) {
|
||||||
if ok {
|
if ok {
|
||||||
p.cur.pos = start.position
|
p.cur.pos = start.position
|
||||||
p.cur.text = p.sliceFrom(start)
|
p.cur.text = p.sliceFrom(start)
|
||||||
state := p.cloneState()
|
|
||||||
actVal, err := act.run(p)
|
actVal, err := act.run(p)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
p.addErrAt(err, start.position, []string{})
|
p.addErrAt(err, start.position)
|
||||||
}
|
}
|
||||||
p.restoreState(state)
|
|
||||||
|
|
||||||
val = actVal
|
val = actVal
|
||||||
}
|
}
|
||||||
if ok && p.debug {
|
if ok && p.debug {
|
||||||
|
@ -1708,14 +1380,10 @@ func (p *parser) parseAndCodeExpr(and *andCodeExpr) (interface{}, bool) {
|
||||||
defer p.out(p.in("parseAndCodeExpr"))
|
defer p.out(p.in("parseAndCodeExpr"))
|
||||||
}
|
}
|
||||||
|
|
||||||
state := p.cloneState()
|
|
||||||
|
|
||||||
ok, err := and.run(p)
|
ok, err := and.run(p)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
p.addErr(err)
|
p.addErr(err)
|
||||||
}
|
}
|
||||||
p.restoreState(state)
|
|
||||||
|
|
||||||
return nil, ok
|
return nil, ok
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1725,13 +1393,10 @@ func (p *parser) parseAndExpr(and *andExpr) (interface{}, bool) {
|
||||||
}
|
}
|
||||||
|
|
||||||
pt := p.pt
|
pt := p.pt
|
||||||
state := p.cloneState()
|
|
||||||
p.pushV()
|
p.pushV()
|
||||||
_, ok := p.parseExpr(and.expr)
|
_, ok := p.parseExpr(and.expr)
|
||||||
p.popV()
|
p.popV()
|
||||||
p.restoreState(state)
|
|
||||||
p.restore(pt)
|
p.restore(pt)
|
||||||
|
|
||||||
return nil, ok
|
return nil, ok
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1740,15 +1405,12 @@ func (p *parser) parseAnyMatcher(any *anyMatcher) (interface{}, bool) {
|
||||||
defer p.out(p.in("parseAnyMatcher"))
|
defer p.out(p.in("parseAnyMatcher"))
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.pt.rn == utf8.RuneError && p.pt.w == 0 {
|
if p.pt.rn != utf8.RuneError {
|
||||||
// EOF - see utf8.DecodeRune
|
start := p.pt
|
||||||
p.failAt(false, p.pt.position, ".")
|
p.read()
|
||||||
return nil, false
|
return p.sliceFrom(start), true
|
||||||
}
|
}
|
||||||
start := p.pt
|
return nil, false
|
||||||
p.read()
|
|
||||||
p.failAt(true, start.position, ".")
|
|
||||||
return p.sliceFrom(start), true
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseCharClassMatcher(chr *charClassMatcher) (interface{}, bool) {
|
func (p *parser) parseCharClassMatcher(chr *charClassMatcher) (interface{}, bool) {
|
||||||
|
@ -1757,14 +1419,11 @@ func (p *parser) parseCharClassMatcher(chr *charClassMatcher) (interface{}, bool
|
||||||
}
|
}
|
||||||
|
|
||||||
cur := p.pt.rn
|
cur := p.pt.rn
|
||||||
start := p.pt
|
|
||||||
|
|
||||||
// can't match EOF
|
// can't match EOF
|
||||||
if cur == utf8.RuneError && p.pt.w == 0 { // see utf8.DecodeRune
|
if cur == utf8.RuneError {
|
||||||
p.failAt(false, start.position, chr.val)
|
|
||||||
return nil, false
|
return nil, false
|
||||||
}
|
}
|
||||||
|
start := p.pt
|
||||||
if chr.ignoreCase {
|
if chr.ignoreCase {
|
||||||
cur = unicode.ToLower(cur)
|
cur = unicode.ToLower(cur)
|
||||||
}
|
}
|
||||||
|
@ -1773,11 +1432,9 @@ func (p *parser) parseCharClassMatcher(chr *charClassMatcher) (interface{}, bool
|
||||||
for _, rn := range chr.chars {
|
for _, rn := range chr.chars {
|
||||||
if rn == cur {
|
if rn == cur {
|
||||||
if chr.inverted {
|
if chr.inverted {
|
||||||
p.failAt(false, start.position, chr.val)
|
|
||||||
return nil, false
|
return nil, false
|
||||||
}
|
}
|
||||||
p.read()
|
p.read()
|
||||||
p.failAt(true, start.position, chr.val)
|
|
||||||
return p.sliceFrom(start), true
|
return p.sliceFrom(start), true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1786,11 +1443,9 @@ func (p *parser) parseCharClassMatcher(chr *charClassMatcher) (interface{}, bool
|
||||||
for i := 0; i < len(chr.ranges); i += 2 {
|
for i := 0; i < len(chr.ranges); i += 2 {
|
||||||
if cur >= chr.ranges[i] && cur <= chr.ranges[i+1] {
|
if cur >= chr.ranges[i] && cur <= chr.ranges[i+1] {
|
||||||
if chr.inverted {
|
if chr.inverted {
|
||||||
p.failAt(false, start.position, chr.val)
|
|
||||||
return nil, false
|
return nil, false
|
||||||
}
|
}
|
||||||
p.read()
|
p.read()
|
||||||
p.failAt(true, start.position, chr.val)
|
|
||||||
return p.sliceFrom(start), true
|
return p.sliceFrom(start), true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1799,60 +1454,33 @@ func (p *parser) parseCharClassMatcher(chr *charClassMatcher) (interface{}, bool
|
||||||
for _, cl := range chr.classes {
|
for _, cl := range chr.classes {
|
||||||
if unicode.Is(cl, cur) {
|
if unicode.Is(cl, cur) {
|
||||||
if chr.inverted {
|
if chr.inverted {
|
||||||
p.failAt(false, start.position, chr.val)
|
|
||||||
return nil, false
|
return nil, false
|
||||||
}
|
}
|
||||||
p.read()
|
p.read()
|
||||||
p.failAt(true, start.position, chr.val)
|
|
||||||
return p.sliceFrom(start), true
|
return p.sliceFrom(start), true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if chr.inverted {
|
if chr.inverted {
|
||||||
p.read()
|
p.read()
|
||||||
p.failAt(true, start.position, chr.val)
|
|
||||||
return p.sliceFrom(start), true
|
return p.sliceFrom(start), true
|
||||||
}
|
}
|
||||||
p.failAt(false, start.position, chr.val)
|
|
||||||
return nil, false
|
return nil, false
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) incChoiceAltCnt(ch *choiceExpr, altI int) {
|
|
||||||
choiceIdent := fmt.Sprintf("%s %d:%d", p.rstack[len(p.rstack)-1].name, ch.pos.line, ch.pos.col)
|
|
||||||
m := p.ChoiceAltCnt[choiceIdent]
|
|
||||||
if m == nil {
|
|
||||||
m = make(map[string]int)
|
|
||||||
p.ChoiceAltCnt[choiceIdent] = m
|
|
||||||
}
|
|
||||||
// We increment altI by 1, so the keys do not start at 0
|
|
||||||
alt := strconv.Itoa(altI + 1)
|
|
||||||
if altI == choiceNoMatch {
|
|
||||||
alt = p.choiceNoMatch
|
|
||||||
}
|
|
||||||
m[alt]++
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *parser) parseChoiceExpr(ch *choiceExpr) (interface{}, bool) {
|
func (p *parser) parseChoiceExpr(ch *choiceExpr) (interface{}, bool) {
|
||||||
if p.debug {
|
if p.debug {
|
||||||
defer p.out(p.in("parseChoiceExpr"))
|
defer p.out(p.in("parseChoiceExpr"))
|
||||||
}
|
}
|
||||||
|
|
||||||
for altI, alt := range ch.alternatives {
|
for _, alt := range ch.alternatives {
|
||||||
// dummy assignment to prevent compile error if optimized
|
|
||||||
_ = altI
|
|
||||||
|
|
||||||
state := p.cloneState()
|
|
||||||
|
|
||||||
p.pushV()
|
p.pushV()
|
||||||
val, ok := p.parseExpr(alt)
|
val, ok := p.parseExpr(alt)
|
||||||
p.popV()
|
p.popV()
|
||||||
if ok {
|
if ok {
|
||||||
p.incChoiceAltCnt(ch, altI)
|
|
||||||
return val, ok
|
return val, ok
|
||||||
}
|
}
|
||||||
p.restoreState(state)
|
|
||||||
}
|
}
|
||||||
p.incChoiceAltCnt(ch, choiceNoMatch)
|
|
||||||
return nil, false
|
return nil, false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1876,11 +1504,6 @@ func (p *parser) parseLitMatcher(lit *litMatcher) (interface{}, bool) {
|
||||||
defer p.out(p.in("parseLitMatcher"))
|
defer p.out(p.in("parseLitMatcher"))
|
||||||
}
|
}
|
||||||
|
|
||||||
ignoreCase := ""
|
|
||||||
if lit.ignoreCase {
|
|
||||||
ignoreCase = "i"
|
|
||||||
}
|
|
||||||
val := fmt.Sprintf("%q%s", lit.val, ignoreCase)
|
|
||||||
start := p.pt
|
start := p.pt
|
||||||
for _, want := range lit.val {
|
for _, want := range lit.val {
|
||||||
cur := p.pt.rn
|
cur := p.pt.rn
|
||||||
|
@ -1888,13 +1511,11 @@ func (p *parser) parseLitMatcher(lit *litMatcher) (interface{}, bool) {
|
||||||
cur = unicode.ToLower(cur)
|
cur = unicode.ToLower(cur)
|
||||||
}
|
}
|
||||||
if cur != want {
|
if cur != want {
|
||||||
p.failAt(false, start.position, val)
|
|
||||||
p.restore(start)
|
p.restore(start)
|
||||||
return nil, false
|
return nil, false
|
||||||
}
|
}
|
||||||
p.read()
|
p.read()
|
||||||
}
|
}
|
||||||
p.failAt(true, start.position, val)
|
|
||||||
return p.sliceFrom(start), true
|
return p.sliceFrom(start), true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1903,14 +1524,10 @@ func (p *parser) parseNotCodeExpr(not *notCodeExpr) (interface{}, bool) {
|
||||||
defer p.out(p.in("parseNotCodeExpr"))
|
defer p.out(p.in("parseNotCodeExpr"))
|
||||||
}
|
}
|
||||||
|
|
||||||
state := p.cloneState()
|
|
||||||
|
|
||||||
ok, err := not.run(p)
|
ok, err := not.run(p)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
p.addErr(err)
|
p.addErr(err)
|
||||||
}
|
}
|
||||||
p.restoreState(state)
|
|
||||||
|
|
||||||
return nil, !ok
|
return nil, !ok
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1920,15 +1537,10 @@ func (p *parser) parseNotExpr(not *notExpr) (interface{}, bool) {
|
||||||
}
|
}
|
||||||
|
|
||||||
pt := p.pt
|
pt := p.pt
|
||||||
state := p.cloneState()
|
|
||||||
p.pushV()
|
p.pushV()
|
||||||
p.maxFailInvertExpected = !p.maxFailInvertExpected
|
|
||||||
_, ok := p.parseExpr(not.expr)
|
_, ok := p.parseExpr(not.expr)
|
||||||
p.maxFailInvertExpected = !p.maxFailInvertExpected
|
|
||||||
p.popV()
|
p.popV()
|
||||||
p.restoreState(state)
|
|
||||||
p.restore(pt)
|
p.restore(pt)
|
||||||
|
|
||||||
return nil, !ok
|
return nil, !ok
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1954,18 +1566,6 @@ func (p *parser) parseOneOrMoreExpr(expr *oneOrMoreExpr) (interface{}, bool) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseRecoveryExpr(recover *recoveryExpr) (interface{}, bool) {
|
|
||||||
if p.debug {
|
|
||||||
defer p.out(p.in("parseRecoveryExpr (" + strings.Join(recover.failureLabel, ",") + ")"))
|
|
||||||
}
|
|
||||||
|
|
||||||
p.pushRecovery(recover.failureLabel, recover.recoverExpr)
|
|
||||||
val, ok := p.parseExpr(recover.expr)
|
|
||||||
p.popRecovery()
|
|
||||||
|
|
||||||
return val, ok
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *parser) parseRuleRefExpr(ref *ruleRefExpr) (interface{}, bool) {
|
func (p *parser) parseRuleRefExpr(ref *ruleRefExpr) (interface{}, bool) {
|
||||||
if p.debug {
|
if p.debug {
|
||||||
defer p.out(p.in("parseRuleRefExpr " + ref.name))
|
defer p.out(p.in("parseRuleRefExpr " + ref.name))
|
||||||
|
@ -1988,14 +1588,12 @@ func (p *parser) parseSeqExpr(seq *seqExpr) (interface{}, bool) {
|
||||||
defer p.out(p.in("parseSeqExpr"))
|
defer p.out(p.in("parseSeqExpr"))
|
||||||
}
|
}
|
||||||
|
|
||||||
vals := make([]interface{}, 0, len(seq.exprs))
|
var vals []interface{}
|
||||||
|
|
||||||
pt := p.pt
|
pt := p.pt
|
||||||
state := p.cloneState()
|
|
||||||
for _, expr := range seq.exprs {
|
for _, expr := range seq.exprs {
|
||||||
val, ok := p.parseExpr(expr)
|
val, ok := p.parseExpr(expr)
|
||||||
if !ok {
|
if !ok {
|
||||||
p.restoreState(state)
|
|
||||||
p.restore(pt)
|
p.restore(pt)
|
||||||
return nil, false
|
return nil, false
|
||||||
}
|
}
|
||||||
|
@ -2004,34 +1602,6 @@ func (p *parser) parseSeqExpr(seq *seqExpr) (interface{}, bool) {
|
||||||
return vals, true
|
return vals, true
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parser) parseStateCodeExpr(state *stateCodeExpr) (interface{}, bool) {
|
|
||||||
if p.debug {
|
|
||||||
defer p.out(p.in("parseStateCodeExpr"))
|
|
||||||
}
|
|
||||||
|
|
||||||
err := state.run(p)
|
|
||||||
if err != nil {
|
|
||||||
p.addErr(err)
|
|
||||||
}
|
|
||||||
return nil, true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *parser) parseThrowExpr(expr *throwExpr) (interface{}, bool) {
|
|
||||||
if p.debug {
|
|
||||||
defer p.out(p.in("parseThrowExpr"))
|
|
||||||
}
|
|
||||||
|
|
||||||
for i := len(p.recoveryStack) - 1; i >= 0; i-- {
|
|
||||||
if recoverExpr, ok := p.recoveryStack[i][expr.label]; ok {
|
|
||||||
if val, ok := p.parseExpr(recoverExpr); ok {
|
|
||||||
return val, ok
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *parser) parseZeroOrMoreExpr(expr *zeroOrMoreExpr) (interface{}, bool) {
|
func (p *parser) parseZeroOrMoreExpr(expr *zeroOrMoreExpr) (interface{}, bool) {
|
||||||
if p.debug {
|
if p.debug {
|
||||||
defer p.out(p.in("parseZeroOrMoreExpr"))
|
defer p.out(p.in("parseZeroOrMoreExpr"))
|
||||||
|
@ -2061,3 +1631,18 @@ func (p *parser) parseZeroOrOneExpr(expr *zeroOrOneExpr) (interface{}, bool) {
|
||||||
// whether it matched or not, consider it a match
|
// whether it matched or not, consider it a match
|
||||||
return val, true
|
return val, true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func rangeTable(class string) *unicode.RangeTable {
|
||||||
|
if rt, ok := unicode.Categories[class]; ok {
|
||||||
|
return rt
|
||||||
|
}
|
||||||
|
if rt, ok := unicode.Properties[class]; ok {
|
||||||
|
return rt
|
||||||
|
}
|
||||||
|
if rt, ok := unicode.Scripts[class]; ok {
|
||||||
|
return rt
|
||||||
|
}
|
||||||
|
|
||||||
|
// cannot happen
|
||||||
|
panic(fmt.Sprintf("invalid Unicode class: %s", class))
|
||||||
|
}
|
||||||
|
|
2
go.mod
2
go.mod
|
@ -6,6 +6,7 @@ require (
|
||||||
github.com/Azure/azure-sdk-for-go v30.0.0+incompatible
|
github.com/Azure/azure-sdk-for-go v30.0.0+incompatible
|
||||||
github.com/Azure/go-autorest v12.0.0+incompatible
|
github.com/Azure/go-autorest v12.0.0+incompatible
|
||||||
github.com/Azure/go-ntlmssp v0.0.0-20180810175552-4a21cbd618b4 // indirect
|
github.com/Azure/go-ntlmssp v0.0.0-20180810175552-4a21cbd618b4 // indirect
|
||||||
|
github.com/Bowery/prompt v0.0.0-20180817134258-8a1d5376df1c // indirect
|
||||||
github.com/ChrisTrenkamp/goxpath v0.0.0-20170625215350-4fe035839290
|
github.com/ChrisTrenkamp/goxpath v0.0.0-20170625215350-4fe035839290
|
||||||
github.com/NaverCloudPlatform/ncloud-sdk-go v0.0.0-20180110055012-c2e73f942591
|
github.com/NaverCloudPlatform/ncloud-sdk-go v0.0.0-20180110055012-c2e73f942591
|
||||||
github.com/Telmate/proxmox-api-go v0.0.0-20190614181158-26cd147831a4
|
github.com/Telmate/proxmox-api-go v0.0.0-20190614181158-26cd147831a4
|
||||||
|
@ -63,6 +64,7 @@ require (
|
||||||
github.com/joyent/triton-go v0.0.0-20180116165742-545edbe0d564
|
github.com/joyent/triton-go v0.0.0-20180116165742-545edbe0d564
|
||||||
github.com/json-iterator/go v1.1.6 // indirect
|
github.com/json-iterator/go v1.1.6 // indirect
|
||||||
github.com/jtolds/gls v4.2.1+incompatible // indirect
|
github.com/jtolds/gls v4.2.1+incompatible // indirect
|
||||||
|
github.com/kardianos/govendor v1.0.9 // indirect
|
||||||
github.com/kardianos/osext v0.0.0-20170510131534-ae77be60afb1
|
github.com/kardianos/osext v0.0.0-20170510131534-ae77be60afb1
|
||||||
github.com/klauspost/compress v0.0.0-20160131094358-f86d2e6d8a77 // indirect
|
github.com/klauspost/compress v0.0.0-20160131094358-f86d2e6d8a77 // indirect
|
||||||
github.com/klauspost/cpuid v0.0.0-20160106104451-349c67577817 // indirect
|
github.com/klauspost/cpuid v0.0.0-20160106104451-349c67577817 // indirect
|
||||||
|
|
2
go.sum
2
go.sum
|
@ -26,6 +26,7 @@ github.com/Azure/go-autorest/tracing v0.1.0 h1:TRBxC5Pj/fIuh4Qob0ZpkggbfT8RC0Sub
|
||||||
github.com/Azure/go-autorest/tracing v0.1.0/go.mod h1:ROEEAFwXycQw7Sn3DXNtEedEvdeRAgDr0izn4z5Ij88=
|
github.com/Azure/go-autorest/tracing v0.1.0/go.mod h1:ROEEAFwXycQw7Sn3DXNtEedEvdeRAgDr0izn4z5Ij88=
|
||||||
github.com/Azure/go-ntlmssp v0.0.0-20180810175552-4a21cbd618b4 h1:pSm8mp0T2OH2CPmPDPtwHPr3VAQaOwVF/JbllOPP4xA=
|
github.com/Azure/go-ntlmssp v0.0.0-20180810175552-4a21cbd618b4 h1:pSm8mp0T2OH2CPmPDPtwHPr3VAQaOwVF/JbllOPP4xA=
|
||||||
github.com/Azure/go-ntlmssp v0.0.0-20180810175552-4a21cbd618b4/go.mod h1:chxPXzSsl7ZWRAuOIE23GDNzjWuZquvFlgA8xmpunjU=
|
github.com/Azure/go-ntlmssp v0.0.0-20180810175552-4a21cbd618b4/go.mod h1:chxPXzSsl7ZWRAuOIE23GDNzjWuZquvFlgA8xmpunjU=
|
||||||
|
github.com/Bowery/prompt v0.0.0-20180817134258-8a1d5376df1c/go.mod h1:4/6eNcqZ09BZ9wLK3tZOjBA1nDj+B0728nlX5YRlSmQ=
|
||||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||||
github.com/ChrisTrenkamp/goxpath v0.0.0-20170625215350-4fe035839290 h1:K9I21XUHNbYD3GNMmJBN0UKJCpdP+glftwNZ7Bo8kqY=
|
github.com/ChrisTrenkamp/goxpath v0.0.0-20170625215350-4fe035839290 h1:K9I21XUHNbYD3GNMmJBN0UKJCpdP+glftwNZ7Bo8kqY=
|
||||||
github.com/ChrisTrenkamp/goxpath v0.0.0-20170625215350-4fe035839290/go.mod h1:nuWgzSkT5PnyOd+272uUmV0dnAnAn42Mk7PiQC5VzN4=
|
github.com/ChrisTrenkamp/goxpath v0.0.0-20170625215350-4fe035839290/go.mod h1:nuWgzSkT5PnyOd+272uUmV0dnAnAn42Mk7PiQC5VzN4=
|
||||||
|
@ -249,6 +250,7 @@ github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCV
|
||||||
github.com/jtolds/gls v4.2.1+incompatible h1:fSuqC+Gmlu6l/ZYAoZzx2pyucC8Xza35fpRVWLVmUEE=
|
github.com/jtolds/gls v4.2.1+incompatible h1:fSuqC+Gmlu6l/ZYAoZzx2pyucC8Xza35fpRVWLVmUEE=
|
||||||
github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
||||||
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
|
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
|
||||||
|
github.com/kardianos/govendor v1.0.9/go.mod h1:yvmR6q9ZZ7nSF5Wvh40v0wfP+3TwwL8zYQp+itoZSVM=
|
||||||
github.com/kardianos/osext v0.0.0-20170510131534-ae77be60afb1 h1:PJPDf8OUfOK1bb/NeTKd4f1QXZItOX389VN3B6qC8ro=
|
github.com/kardianos/osext v0.0.0-20170510131534-ae77be60afb1 h1:PJPDf8OUfOK1bb/NeTKd4f1QXZItOX389VN3B6qC8ro=
|
||||||
github.com/kardianos/osext v0.0.0-20170510131534-ae77be60afb1/go.mod h1:1NbS8ALrpOvjt0rHPNLyCIeMtbizbir8U//inJ+zuB8=
|
github.com/kardianos/osext v0.0.0-20170510131534-ae77be60afb1/go.mod h1:1NbS8ALrpOvjt0rHPNLyCIeMtbizbir8U//inJ+zuB8=
|
||||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||||
|
|
Loading…
Reference in New Issue