Fix wsl linter warnings
This commit is contained in:
parent
1f47c5b4a2
commit
3c690ef129
@ -7,6 +7,7 @@ linters:
|
||||
- gocritic
|
||||
- funlen
|
||||
- godox
|
||||
- gocyclo # is deprecated
|
||||
linters-settings:
|
||||
lll:
|
||||
line-length: 180
|
||||
|
2
eval.go
2
eval.go
@ -326,6 +326,7 @@ func (b *exprBucket) evaluate() (bool, error) {
|
||||
func itemMatchOperator(loc interface{}, op *operator) bool {
|
||||
topBytes, isKey := loc.([]byte)
|
||||
topInt, isIndex := loc.(int)
|
||||
|
||||
if isKey {
|
||||
switch op.typ {
|
||||
case opTypeNameWild:
|
||||
@ -342,5 +343,6 @@ func itemMatchOperator(loc interface{}, op *operator) bool {
|
||||
return topInt >= op.indexStart && (!op.hasIndexEnd || topInt <= op.indexEnd)
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
@ -131,6 +131,7 @@ func evalArrayAfterOpen(e *Eval, i *Item) evalStateFn {
|
||||
default:
|
||||
e.Error = errors.New(UnexpectedToken)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -152,6 +153,7 @@ func evalArrayValue(e *Eval, i *Item) evalStateFn {
|
||||
default:
|
||||
e.Error = errors.New(UnexpectedToken)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -163,21 +165,25 @@ func evalArrayAfterValue(e *Eval, i *Item) evalStateFn {
|
||||
e.prevIndex = valIndex
|
||||
}
|
||||
}
|
||||
|
||||
return evalArrayValue
|
||||
case jsonBracketRight:
|
||||
e.location.pop()
|
||||
setPrevIndex(e)
|
||||
|
||||
return rightBraceOrBracket(e)
|
||||
case jsonError:
|
||||
return evalError(e, i)
|
||||
default:
|
||||
e.Error = errors.New(UnexpectedToken)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func setPrevIndex(e *Eval) {
|
||||
e.prevIndex = -1
|
||||
|
||||
peeked, ok := e.location.peek()
|
||||
if ok {
|
||||
if peekedIndex, intOk := peeked.(int); intOk {
|
||||
@ -194,6 +200,7 @@ func evalRootEnd(e *Eval, i *Item) evalStateFn {
|
||||
e.Error = errors.New(BadStructure)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -72,6 +72,7 @@ func TestPathQuery(t *testing.T) {
|
||||
|
||||
func newResult(value string, typ int, keys ...interface{}) Result {
|
||||
keysChanged := make([]interface{}, len(keys))
|
||||
|
||||
for i, k := range keys {
|
||||
switch v := k.(type) {
|
||||
case string:
|
||||
@ -90,6 +91,7 @@ func newResult(value string, typ int, keys ...interface{}) Result {
|
||||
|
||||
func toResultArray(e *Eval) []Result {
|
||||
vals := make([]Result, 0)
|
||||
|
||||
for {
|
||||
if r, ok := e.Next(); ok {
|
||||
if r != nil {
|
||||
@ -99,5 +101,6 @@ func toResultArray(e *Eval) []Result {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return vals
|
||||
}
|
||||
|
@ -53,7 +53,8 @@ var opa = map[int]struct {
|
||||
|
||||
// Shunting-yard Algorithm (infix -> postfix)
|
||||
// http://rosettacode.org/wiki/Parsing/Shunting-yard_algorithm#Go
|
||||
func infixToPostFix(items []Item) (out []Item, err error) {
|
||||
func infixToPostFix(items []Item) ([]Item, error) {
|
||||
out := make([]Item, 0)
|
||||
stack := newStack()
|
||||
|
||||
for _, i := range items {
|
||||
@ -62,19 +63,23 @@ func infixToPostFix(items []Item) (out []Item, err error) {
|
||||
stack.push(i) // push "(" to stack
|
||||
case exprParenRight:
|
||||
found := false
|
||||
|
||||
for {
|
||||
// pop item ("(" or operator) from stack
|
||||
opInterface, ok := stack.pop()
|
||||
if !ok {
|
||||
return nil, errors.New(exprErrorMismatchedParens)
|
||||
}
|
||||
|
||||
op := opInterface.(Item)
|
||||
if op.typ == exprParenLeft {
|
||||
found = true
|
||||
break // discard "("
|
||||
}
|
||||
|
||||
out = append(out, op) // add operator to result
|
||||
}
|
||||
|
||||
if !found {
|
||||
return nil, errors.New(exprErrorMismatchedParens)
|
||||
}
|
||||
@ -85,12 +90,14 @@ func infixToPostFix(items []Item) (out []Item, err error) {
|
||||
// consider top item on stack
|
||||
opInt, _ := stack.peek()
|
||||
op := opInt.(Item)
|
||||
|
||||
if o2, isOp := opa[op.typ]; !isOp || o1.prec > o2.prec ||
|
||||
o1.prec == o2.prec && o1.rAssoc {
|
||||
break
|
||||
}
|
||||
// top item is an operator that needs to come off
|
||||
stack.pop() // pop it
|
||||
stack.pop() // pop it
|
||||
|
||||
out = append(out, op) // add it to result
|
||||
}
|
||||
// push operator (the new one) to stack
|
||||
@ -104,14 +111,18 @@ func infixToPostFix(items []Item) (out []Item, err error) {
|
||||
for stack.len() > 0 {
|
||||
opInt, _ := stack.pop()
|
||||
op := opInt.(Item)
|
||||
|
||||
if op.typ == exprParenLeft {
|
||||
return nil, errors.New(exprErrorMismatchedParens)
|
||||
}
|
||||
|
||||
out = append(out, op)
|
||||
}
|
||||
return
|
||||
|
||||
return out, nil
|
||||
}
|
||||
|
||||
// nolint:gocognit
|
||||
func evaluatePostFix(postFixItems []Item, pathValues map[string]Item) (interface{}, error) {
|
||||
s := newStack()
|
||||
|
||||
@ -121,19 +132,20 @@ func evaluatePostFix(postFixItems []Item, pathValues map[string]Item) (interface
|
||||
|
||||
for _, item := range postFixItems {
|
||||
switch item.typ {
|
||||
|
||||
// VALUES
|
||||
case exprBool:
|
||||
val, err := strconv.ParseBool(string(item.val))
|
||||
if err != nil {
|
||||
return false, fmt.Errorf(exprErrorBadValue, string(item.val), exprTokenNames[exprBool])
|
||||
}
|
||||
|
||||
s.push(val)
|
||||
case exprNumber:
|
||||
val, err := strconv.ParseFloat(string(item.val), 64)
|
||||
if err != nil {
|
||||
return false, fmt.Errorf(exprErrorBadValue, string(item.val), exprTokenNames[exprNumber])
|
||||
}
|
||||
|
||||
s.push(val)
|
||||
case exprPath:
|
||||
// TODO: Handle datatypes of JSON
|
||||
@ -150,6 +162,7 @@ func evaluatePostFix(postFixItems []Item, pathValues map[string]Item) (interface
|
||||
if err != nil {
|
||||
return false, fmt.Errorf(exprErrorBadValue, string(item.val), jsonTokenNames[jsonNumber])
|
||||
}
|
||||
|
||||
s.push(valFloat)
|
||||
case jsonKey, jsonString:
|
||||
s.push(i.val)
|
||||
@ -160,8 +173,6 @@ func evaluatePostFix(postFixItems []Item, pathValues map[string]Item) (interface
|
||||
s.push(item.val)
|
||||
case exprNull:
|
||||
s.push(nil)
|
||||
|
||||
// OPERATORS
|
||||
case exprOpAnd:
|
||||
a, b, err := take2Bool(s, item.typ)
|
||||
if err != nil {
|
||||
@ -188,18 +199,21 @@ func evaluatePostFix(postFixItems []Item, pathValues map[string]Item) (interface
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
s.push(a == b)
|
||||
case float64:
|
||||
a, b, err := take2Float(s, item.typ)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
s.push(a == b)
|
||||
case []byte:
|
||||
a, b, err := take2ByteSlice(s, item.typ)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
s.push(byteSlicesEqual(a, b))
|
||||
}
|
||||
case exprOpNeq:
|
||||
@ -221,18 +235,21 @@ func evaluatePostFix(postFixItems []Item, pathValues map[string]Item) (interface
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
s.push(a != b)
|
||||
case float64:
|
||||
a, b, err := take2Float(s, item.typ)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
s.push(a != b)
|
||||
case []byte:
|
||||
a, b, err := take2ByteSlice(s, item.typ)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
s.push(!byteSlicesEqual(a, b))
|
||||
}
|
||||
case exprOpNot:
|
||||
@ -282,24 +299,28 @@ func evaluatePostFix(postFixItems []Item, pathValues map[string]Item) (interface
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
s.push(b + a)
|
||||
case exprOpPlusUn:
|
||||
a, err := take1Float(s, item.typ)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
s.push(a)
|
||||
case exprOpMinus:
|
||||
a, b, err := take2Float(s, item.typ)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
s.push(b - a)
|
||||
case exprOpMinusUn:
|
||||
a, err := take1Float(s, item.typ)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
s.push(0 - a)
|
||||
case exprOpSlash:
|
||||
a, b, err := take2Float(s, item.typ)
|
||||
@ -310,6 +331,7 @@ func evaluatePostFix(postFixItems []Item, pathValues map[string]Item) (interface
|
||||
if a == 0.0 {
|
||||
return false, errors.New("cannot divide by zero")
|
||||
}
|
||||
|
||||
s.push(b / a)
|
||||
case exprOpStar:
|
||||
a, b, err := take2Float(s, item.typ)
|
||||
@ -337,8 +359,8 @@ func evaluatePostFix(postFixItems []Item, pathValues map[string]Item) (interface
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
s.push(!a)
|
||||
// Other
|
||||
default:
|
||||
return false, fmt.Errorf("token not supported in evaluator: %v", exprTokenNames[item.typ])
|
||||
}
|
||||
@ -349,11 +371,13 @@ func evaluatePostFix(postFixItems []Item, pathValues map[string]Item) (interface
|
||||
}
|
||||
|
||||
endInt, _ := s.pop()
|
||||
|
||||
return endInt, nil
|
||||
}
|
||||
|
||||
func take1Bool(s *stack, op int) (bool, error) {
|
||||
t := exprBool
|
||||
|
||||
val, ok := s.pop()
|
||||
if !ok {
|
||||
return false, fmt.Errorf(exprErrorNotEnoughOperands, exprTokenNames[op])
|
||||
@ -363,17 +387,20 @@ func take1Bool(s *stack, op int) (bool, error) {
|
||||
if !ok {
|
||||
return false, exprErrorBadTypeComparison{exprTokenNames[t], (reflect.TypeOf(val)).String()}
|
||||
}
|
||||
|
||||
return b, nil
|
||||
}
|
||||
|
||||
func take2Bool(s *stack, op int) (bool, bool, error) {
|
||||
a, aErr := take1Bool(s, op)
|
||||
b, bErr := take1Bool(s, op)
|
||||
|
||||
return a, b, firstError(aErr, bErr)
|
||||
}
|
||||
|
||||
func take1Float(s *stack, op int) (float64, error) {
|
||||
t := exprNumber
|
||||
|
||||
val, ok := s.pop()
|
||||
if !ok {
|
||||
return 0.0, fmt.Errorf(exprErrorNotEnoughOperands, exprTokenNames[op])
|
||||
@ -383,17 +410,20 @@ func take1Float(s *stack, op int) (float64, error) {
|
||||
if !ok {
|
||||
return 0.0, exprErrorBadTypeComparison{exprTokenNames[t], (reflect.TypeOf(val)).String()}
|
||||
}
|
||||
|
||||
return b, nil
|
||||
}
|
||||
|
||||
func take2Float(s *stack, op int) (float64, float64, error) {
|
||||
a, aErr := take1Float(s, op)
|
||||
b, bErr := take1Float(s, op)
|
||||
|
||||
return a, b, firstError(aErr, bErr)
|
||||
}
|
||||
|
||||
func take1ByteSlice(s *stack, op int) ([]byte, error) {
|
||||
t := exprNumber
|
||||
|
||||
val, ok := s.pop()
|
||||
if !ok {
|
||||
return nil, fmt.Errorf(exprErrorNotEnoughOperands, exprTokenNames[op])
|
||||
@ -403,17 +433,20 @@ func take1ByteSlice(s *stack, op int) ([]byte, error) {
|
||||
if !ok {
|
||||
return nil, exprErrorBadTypeComparison{exprTokenNames[t], (reflect.TypeOf(val)).String()}
|
||||
}
|
||||
|
||||
return b, nil
|
||||
}
|
||||
|
||||
func take2ByteSlice(s *stack, op int) ([]byte, []byte, error) {
|
||||
a, aErr := take1ByteSlice(s, op)
|
||||
b, bErr := take1ByteSlice(s, op)
|
||||
|
||||
return a, b, firstError(aErr, bErr)
|
||||
}
|
||||
|
||||
func take1Null(s *stack, op int) error {
|
||||
t := exprNull
|
||||
|
||||
val, ok := s.pop()
|
||||
if !ok {
|
||||
return fmt.Errorf(exprErrorNotEnoughOperands, exprTokenNames[op])
|
||||
@ -422,11 +455,13 @@ func take1Null(s *stack, op int) error {
|
||||
if v := reflect.TypeOf(val); v != nil {
|
||||
return exprErrorBadTypeComparison{exprTokenNames[t], v.String()}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func take2Null(s *stack, op int) error {
|
||||
aErr := take1Null(s, op)
|
||||
bErr := take1Null(s, op)
|
||||
|
||||
return firstError(aErr, bErr)
|
||||
}
|
||||
|
@ -65,20 +65,24 @@ var exprTokenNames = map[int]string{
|
||||
var EXPRESSION = lexExprText
|
||||
|
||||
func lexExprText(l lexer, state *intStack) stateFn {
|
||||
ignoreSpaceRun(l)
|
||||
cur := l.peek()
|
||||
var next stateFn
|
||||
|
||||
ignoreSpaceRun(l)
|
||||
|
||||
cur := l.peek()
|
||||
switch cur {
|
||||
case '(':
|
||||
l.take()
|
||||
state.push(exprParenLeft)
|
||||
l.emit(exprParenLeft)
|
||||
|
||||
next = lexExprText
|
||||
case ')':
|
||||
if top, ok := state.peek(); ok && top != exprParenLeft {
|
||||
next = l.errorf("Received %#U but has no matching (", cur)
|
||||
break
|
||||
}
|
||||
|
||||
state.pop()
|
||||
l.take()
|
||||
l.emit(exprParenRight)
|
||||
@ -87,82 +91,100 @@ func lexExprText(l lexer, state *intStack) stateFn {
|
||||
case '!':
|
||||
l.take()
|
||||
l.emit(exprOpNot)
|
||||
|
||||
next = lexExprText
|
||||
case '+':
|
||||
l.take()
|
||||
l.emit(exprOpPlusUn)
|
||||
|
||||
next = lexExprText
|
||||
case '-':
|
||||
l.take()
|
||||
l.emit(exprOpMinusUn)
|
||||
|
||||
next = lexExprText
|
||||
case '@': //, '$': // Only support current key location
|
||||
l.take()
|
||||
takePath(l)
|
||||
l.emit(exprPath)
|
||||
|
||||
next = lexOneValue
|
||||
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
takeNumeric(l)
|
||||
l.emit(exprNumber)
|
||||
|
||||
next = lexOneValue
|
||||
case 't':
|
||||
takeExactSequence(l, bytesTrue)
|
||||
l.emit(exprBool)
|
||||
|
||||
next = lexOneValue
|
||||
case 'f':
|
||||
takeExactSequence(l, bytesFalse)
|
||||
l.emit(exprBool)
|
||||
|
||||
next = lexOneValue
|
||||
case 'n':
|
||||
takeExactSequence(l, bytesNull)
|
||||
l.emit(exprNull)
|
||||
|
||||
next = lexOneValue
|
||||
case '"':
|
||||
err := l.takeString()
|
||||
if err != nil {
|
||||
return l.errorf("Could not take string because %q", err)
|
||||
}
|
||||
|
||||
l.emit(exprString)
|
||||
|
||||
next = lexOneValue
|
||||
case eof:
|
||||
l.emit(exprEOF)
|
||||
// next = nil
|
||||
l.emit(exprEOF) // next = nil
|
||||
default:
|
||||
return l.errorf("Unrecognized sequence in expression: %#U", cur)
|
||||
}
|
||||
|
||||
return next
|
||||
}
|
||||
|
||||
func lexOneValue(l lexer, state *intStack) stateFn {
|
||||
var next stateFn
|
||||
|
||||
cur := l.peek()
|
||||
switch cur {
|
||||
case '+':
|
||||
l.take()
|
||||
l.emit(exprOpPlus)
|
||||
|
||||
next = lexExprText
|
||||
case '-':
|
||||
l.take()
|
||||
l.emit(exprOpMinus)
|
||||
|
||||
next = lexExprText
|
||||
case '*':
|
||||
l.take()
|
||||
l.emit(exprOpStar)
|
||||
|
||||
next = lexExprText
|
||||
case '/':
|
||||
l.take()
|
||||
l.emit(exprOpSlash)
|
||||
|
||||
next = lexExprText
|
||||
case '%':
|
||||
l.take()
|
||||
l.emit(exprOpPercent)
|
||||
|
||||
next = lexExprText
|
||||
case '^':
|
||||
l.take()
|
||||
l.emit(exprOpHat)
|
||||
|
||||
next = lexExprText
|
||||
case '<':
|
||||
l.take()
|
||||
|
||||
cur = l.peek()
|
||||
if cur == '=' {
|
||||
l.take()
|
||||
@ -170,9 +192,11 @@ func lexOneValue(l lexer, state *intStack) stateFn {
|
||||
} else {
|
||||
l.emit(exprOpLt)
|
||||
}
|
||||
|
||||
next = lexExprText
|
||||
case '>':
|
||||
l.take()
|
||||
|
||||
cur = l.peek()
|
||||
if cur == '=' {
|
||||
l.take()
|
||||
@ -180,44 +204,58 @@ func lexOneValue(l lexer, state *intStack) stateFn {
|
||||
} else {
|
||||
l.emit(exprOpGt)
|
||||
}
|
||||
|
||||
next = lexExprText
|
||||
case '&':
|
||||
l.take()
|
||||
|
||||
cur = l.take()
|
||||
if cur != '&' {
|
||||
return l.errorf("Expected double & instead of %#U", cur)
|
||||
return l.errorf("expected double & instead of %#U", cur)
|
||||
}
|
||||
|
||||
l.emit(exprOpAnd)
|
||||
|
||||
next = lexExprText
|
||||
case '|':
|
||||
l.take()
|
||||
|
||||
cur = l.take()
|
||||
if cur != '|' {
|
||||
return l.errorf("Expected double | instead of %#U", cur)
|
||||
return l.errorf("expected double | instead of %#U", cur)
|
||||
}
|
||||
|
||||
l.emit(exprOpOr)
|
||||
|
||||
next = lexExprText
|
||||
case '=':
|
||||
l.take()
|
||||
|
||||
cur = l.take()
|
||||
if cur != '=' {
|
||||
return l.errorf("Expected double = instead of %#U", cur)
|
||||
return l.errorf("expected double = instead of %#U", cur)
|
||||
}
|
||||
|
||||
l.emit(exprOpEq)
|
||||
|
||||
next = lexExprText
|
||||
case '!':
|
||||
l.take()
|
||||
|
||||
cur = l.take()
|
||||
if cur != '=' {
|
||||
return l.errorf("Expected = for != instead of %#U", cur)
|
||||
return l.errorf("expected = for != instead of %#U", cur)
|
||||
}
|
||||
|
||||
l.emit(exprOpNeq)
|
||||
|
||||
next = lexExprText
|
||||
case ')':
|
||||
if top, ok := state.peek(); ok && top != exprParenLeft {
|
||||
next = l.errorf("Received %#U but has no matching (", cur)
|
||||
next = l.errorf("received %#U but has no matching (", cur)
|
||||
break
|
||||
}
|
||||
|
||||
state.pop()
|
||||
l.take()
|
||||
l.emit(exprParenRight)
|
||||
@ -226,19 +264,23 @@ func lexOneValue(l lexer, state *intStack) stateFn {
|
||||
case eof:
|
||||
l.emit(exprEOF)
|
||||
default:
|
||||
return l.errorf("Unrecognized sequence in expression: %#U", cur)
|
||||
return l.errorf("unrecognized sequence in expression: %#U", cur)
|
||||
}
|
||||
|
||||
return next
|
||||
}
|
||||
|
||||
func takeNumeric(l lexer) {
|
||||
takeDigits(l)
|
||||
|
||||
if l.peek() == '.' {
|
||||
l.take()
|
||||
takeDigits(l)
|
||||
}
|
||||
|
||||
if l.peek() == 'e' || l.peek() == 'E' {
|
||||
l.take()
|
||||
|
||||
if l.peek() == '+' || l.peek() == '-' {
|
||||
l.take()
|
||||
takeDigits(l)
|
||||
|
@ -24,6 +24,7 @@ var expressionTests = []lexTest{
|
||||
|
||||
func TestExpressionTokens(t *testing.T) {
|
||||
as := assert.New(t)
|
||||
|
||||
for _, test := range expressionTests {
|
||||
lexer := NewSliceLexer([]byte(test.input), EXPRESSION)
|
||||
items := readerToArray(lexer)
|
||||
|
@ -150,6 +150,7 @@ func TestExpressions(t *testing.T) {
|
||||
// trim EOF
|
||||
items = items[0 : len(items)-1]
|
||||
itemsPost, err := infixToPostFix(items)
|
||||
|
||||
if as.NoError(err, "Could not transform to postfix\nTest: %q", test.input) {
|
||||
val, err := evaluatePostFix(itemsPost, test.fields)
|
||||
if as.NoError(err, "Could not evaluate postfix\nTest Input: %q\nTest Values:%q\nError:%q", test.input, test.fields, err) {
|
||||
@ -210,17 +211,18 @@ func TestBadExpressions(t *testing.T) {
|
||||
items := readerToArray(lexer)
|
||||
// trim EOF
|
||||
items = items[0 : len(items)-1]
|
||||
|
||||
itemsPost, err := infixToPostFix(items)
|
||||
if err != nil {
|
||||
as.True(strings.Contains(err.Error(), test.expectedErrorSubstring), "Test Input: %q\nError %q does not contain %q", test.input, err.Error(), test.expectedErrorSubstring)
|
||||
continue
|
||||
}
|
||||
|
||||
if as.NoError(err, "Could not transform to postfix\nTest: %q", test.input) {
|
||||
_, err := evaluatePostFix(itemsPost, test.fields)
|
||||
if as.Error(err, "Could not evaluate postfix\nTest Input: %q\nTest Values:%q\nError:%s", test.input, test.fields, err) {
|
||||
as.True(strings.Contains(err.Error(), test.expectedErrorSubstring), "Test Input: %q\nError %s does not contain %q", test.input, err.Error(), test.expectedErrorSubstring)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -41,11 +41,11 @@ var jsonTokenNames = map[int]string{
|
||||
var JSON = lexJSONRoot
|
||||
|
||||
func lexJSONRoot(l lexer, state *intStack) stateFn {
|
||||
ignoreSpaceRun(l)
|
||||
cur := l.peek()
|
||||
|
||||
var next stateFn
|
||||
|
||||
ignoreSpaceRun(l)
|
||||
|
||||
cur := l.peek()
|
||||
switch cur {
|
||||
case '{':
|
||||
next = stateJSONObjectOpen
|
||||
@ -54,6 +54,7 @@ func lexJSONRoot(l lexer, state *intStack) stateFn {
|
||||
default:
|
||||
next = l.errorf("Expected '{' or '[' at root of JSON instead of %#U", cur)
|
||||
}
|
||||
|
||||
return next
|
||||
}
|
||||
|
||||
@ -62,6 +63,7 @@ func stateJSONObjectOpen(l lexer, state *intStack) stateFn {
|
||||
if cur != '{' {
|
||||
return l.errorf("Expected '{' as start of object instead of %#U", cur)
|
||||
}
|
||||
|
||||
l.emit(jsonBraceLeft)
|
||||
state.push(jsonBraceLeft)
|
||||
|
||||
@ -73,6 +75,7 @@ func stateJSONArrayOpen(l lexer, state *intStack) stateFn {
|
||||
if cur != '[' {
|
||||
return l.errorf("Expected '[' as start of array instead of %#U", cur)
|
||||
}
|
||||
|
||||
l.emit(jsonBracketLeft)
|
||||
state.push(jsonBracketLeft)
|
||||
|
||||
@ -81,6 +84,7 @@ func stateJSONArrayOpen(l lexer, state *intStack) stateFn {
|
||||
|
||||
func stateJSONObject(l lexer, state *intStack) stateFn {
|
||||
var next stateFn
|
||||
|
||||
cur := l.peek()
|
||||
switch cur {
|
||||
case '}':
|
||||
@ -88,20 +92,24 @@ func stateJSONObject(l lexer, state *intStack) stateFn {
|
||||
next = l.errorf("Received %#U but has no matching '{'", cur)
|
||||
break
|
||||
}
|
||||
|
||||
l.take()
|
||||
l.emit(jsonBraceRight)
|
||||
state.pop()
|
||||
|
||||
next = stateJSONAfterValue
|
||||
case '"':
|
||||
next = stateJSONKey
|
||||
default:
|
||||
next = l.errorf("Expected '}' or \" within an object instead of %#U", cur)
|
||||
}
|
||||
|
||||
return next
|
||||
}
|
||||
|
||||
func stateJSONArray(l lexer, state *intStack) stateFn {
|
||||
var next stateFn
|
||||
|
||||
cur := l.peek()
|
||||
switch cur {
|
||||
case ']':
|
||||
@ -109,20 +117,24 @@ func stateJSONArray(l lexer, state *intStack) stateFn {
|
||||
next = l.errorf("Received %#U but has no matching '['", cur)
|
||||
break
|
||||
}
|
||||
|
||||
l.take()
|
||||
l.emit(jsonBracketRight)
|
||||
state.pop()
|
||||
|
||||
next = stateJSONAfterValue
|
||||
default:
|
||||
next = stateJSONValue
|
||||
}
|
||||
|
||||
return next
|
||||
}
|
||||
|
||||
func stateJSONAfterValue(l lexer, state *intStack) stateFn {
|
||||
cur := l.take()
|
||||
top, ok := state.peek()
|
||||
topVal := noValue
|
||||
|
||||
top, ok := state.peek()
|
||||
if ok {
|
||||
topVal = top
|
||||
}
|
||||
@ -130,6 +142,7 @@ func stateJSONAfterValue(l lexer, state *intStack) stateFn {
|
||||
switch cur {
|
||||
case ',':
|
||||
l.emit(jsonComma)
|
||||
|
||||
switch topVal {
|
||||
case jsonBraceLeft:
|
||||
return stateJSONKey
|
||||
@ -143,6 +156,7 @@ func stateJSONAfterValue(l lexer, state *intStack) stateFn {
|
||||
case '}':
|
||||
l.emit(jsonBraceRight)
|
||||
state.pop()
|
||||
|
||||
switch topVal {
|
||||
case jsonBraceLeft:
|
||||
return stateJSONAfterValue
|
||||
@ -154,6 +168,7 @@ func stateJSONAfterValue(l lexer, state *intStack) stateFn {
|
||||
case ']':
|
||||
l.emit(jsonBracketRight)
|
||||
state.pop()
|
||||
|
||||
switch topVal {
|
||||
case jsonBraceLeft:
|
||||
return l.errorf("unexpected %#U in object", cur)
|
||||
@ -172,6 +187,7 @@ func stateJSONAfterValue(l lexer, state *intStack) stateFn {
|
||||
default:
|
||||
return l.errorf("unexpected character after json value token: %#U", cur)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -179,6 +195,7 @@ func stateJSONKey(l lexer, state *intStack) stateFn {
|
||||
if err := l.takeString(); err != nil {
|
||||
return l.errorf(err.Error())
|
||||
}
|
||||
|
||||
l.emit(jsonKey)
|
||||
|
||||
return stateJSONColon
|
||||
@ -187,8 +204,9 @@ func stateJSONKey(l lexer, state *intStack) stateFn {
|
||||
func stateJSONColon(l lexer, state *intStack) stateFn {
|
||||
cur := l.take()
|
||||
if cur != ':' {
|
||||
return l.errorf("Expected ':' after key string instead of %#U", cur)
|
||||
return l.errorf("expected ':' after key string instead of %#U", cur)
|
||||
}
|
||||
|
||||
l.emit(jsonColon)
|
||||
|
||||
return stateJSONValue
|
||||
@ -213,7 +231,7 @@ func stateJSONValue(l lexer, state *intStack) stateFn {
|
||||
case '[':
|
||||
return stateJSONArrayOpen
|
||||
default:
|
||||
return l.errorf("Unexpected character as start of value: %#U", cur)
|
||||
return l.errorf("unexpected character as start of value: %#U", cur)
|
||||
}
|
||||
}
|
||||
|
||||
@ -221,7 +239,9 @@ func stateJSONString(l lexer, state *intStack) stateFn {
|
||||
if err := l.takeString(); err != nil {
|
||||
return l.errorf(err.Error())
|
||||
}
|
||||
|
||||
l.emit(jsonString)
|
||||
|
||||
return stateJSONAfterValue
|
||||
}
|
||||
|
||||
@ -229,13 +249,16 @@ func stateJSONNumber(l lexer, state *intStack) stateFn {
|
||||
if err := takeJSONNumeric(l); err != nil {
|
||||
return l.errorf(err.Error())
|
||||
}
|
||||
|
||||
l.emit(jsonNumber)
|
||||
|
||||
return stateJSONAfterValue
|
||||
}
|
||||
|
||||
func stateJSONBool(l lexer, state *intStack) stateFn {
|
||||
cur := l.peek()
|
||||
var match []byte
|
||||
|
||||
cur := l.peek()
|
||||
switch cur {
|
||||
case 't':
|
||||
match = trueBytes
|
||||
@ -246,7 +269,9 @@ func stateJSONBool(l lexer, state *intStack) stateFn {
|
||||
if !takeExactSequence(l, match) {
|
||||
return l.errorf("Could not parse value as 'true' or 'false'")
|
||||
}
|
||||
|
||||
l.emit(jsonBool)
|
||||
|
||||
return stateJSONAfterValue
|
||||
}
|
||||
|
||||
@ -254,7 +279,9 @@ func stateJSONNull(l lexer, state *intStack) stateFn {
|
||||
if !takeExactSequence(l, nullBytes) {
|
||||
return l.errorf("Could not parse value as 'null'")
|
||||
}
|
||||
|
||||
l.emit(jsonNull)
|
||||
|
||||
return stateJSONAfterValue
|
||||
}
|
||||
|
||||
@ -263,6 +290,8 @@ func stateJSONAfterRoot(l lexer, state *intStack) stateFn {
|
||||
if cur != eof {
|
||||
return l.errorf("Expected EOF instead of %#U", cur)
|
||||
}
|
||||
|
||||
l.emit(jsonEOF)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
@ -63,6 +63,7 @@ func itemsToTypes(items []Item) []int {
|
||||
for i, item := range items {
|
||||
types[i] = item.typ
|
||||
}
|
||||
|
||||
return types
|
||||
}
|
||||
|
||||
@ -161,6 +162,7 @@ var examples = []string{
|
||||
|
||||
func TestMixedCaseJSON(t *testing.T) {
|
||||
as := assert.New(t)
|
||||
|
||||
for _, json := range examples {
|
||||
lexer := NewSliceLexer([]byte(json), JSON)
|
||||
items := readerToArray(lexer)
|
||||
|
1
lexer.go
1
lexer.go
@ -56,5 +56,6 @@ func typesDescription(types []int, nameMap map[int]string) []string {
|
||||
for i, val := range types {
|
||||
vals[i] = nameMap[val]
|
||||
}
|
||||
|
||||
return vals
|
||||
}
|
||||
|
@ -25,6 +25,7 @@ func NewReaderLexer(rr io.Reader, initial stateFn) *ReaderLexer {
|
||||
lex: newLex(initial),
|
||||
lexeme: bytes.NewBuffer(make([]byte, 0, 100)),
|
||||
}
|
||||
|
||||
return &l
|
||||
}
|
||||
|
||||
@ -36,6 +37,7 @@ func (l *ReaderLexer) take() int {
|
||||
nr := l.nextByte
|
||||
l.nextByte = noValue
|
||||
l.lexeme.WriteByte(byte(nr))
|
||||
|
||||
return nr
|
||||
}
|
||||
|
||||
@ -52,6 +54,7 @@ looper:
|
||||
if err == io.EOF {
|
||||
return errors.New("unexpected EOF in string")
|
||||
}
|
||||
|
||||
l.lexeme.WriteByte(curByte)
|
||||
|
||||
if curByte == '"' {
|
||||
@ -62,12 +65,14 @@ looper:
|
||||
if err == io.EOF {
|
||||
return errors.New("unexpected EOF in string")
|
||||
}
|
||||
|
||||
l.lexeme.WriteByte(curByte)
|
||||
}
|
||||
}
|
||||
|
||||
previous = curByte
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -83,14 +88,16 @@ func (l *ReaderLexer) peek() int {
|
||||
}
|
||||
|
||||
l.nextByte = int(r)
|
||||
|
||||
return l.nextByte
|
||||
}
|
||||
|
||||
func (l *ReaderLexer) emit(t int) {
|
||||
l.setItem(t, l.pos, l.lexeme.Bytes())
|
||||
l.pos += Pos(l.lexeme.Len())
|
||||
l.hasItem = true
|
||||
|
||||
l.pos += Pos(l.lexeme.Len())
|
||||
|
||||
if t == lexEOF {
|
||||
// Do not capture eof character to match slice_lexer
|
||||
l.item.val = []byte{}
|
||||
@ -105,6 +112,7 @@ func (l *ReaderLexer) emit(t int) {
|
||||
for l.nextByte != eof {
|
||||
if l.nextByte == ' ' || l.nextByte == '\t' || l.nextByte == '\r' || l.nextByte == '\n' {
|
||||
l.pos++
|
||||
|
||||
r, err := l.bufInput.ReadByte()
|
||||
if err == io.EOF {
|
||||
l.nextByte = eof
|
||||
@ -130,6 +138,7 @@ func (l *ReaderLexer) ignore() {
|
||||
|
||||
func (l *ReaderLexer) next() (*Item, bool) {
|
||||
l.lexeme.Reset()
|
||||
|
||||
for {
|
||||
if l.currentStateFn == nil {
|
||||
break
|
||||
@ -142,19 +151,23 @@ func (l *ReaderLexer) next() (*Item, bool) {
|
||||
return &l.item, true
|
||||
}
|
||||
}
|
||||
|
||||
return &l.item, false
|
||||
}
|
||||
|
||||
func (l *ReaderLexer) errorf(format string, args ...interface{}) stateFn {
|
||||
l.setItem(lexError, l.pos, []byte(fmt.Sprintf(format, args...)))
|
||||
l.lexeme.Truncate(0)
|
||||
|
||||
l.hasItem = true
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (l *ReaderLexer) reset() {
|
||||
l.bufInput.Reset(l.input)
|
||||
l.lexeme.Reset()
|
||||
|
||||
l.nextByte = noValue
|
||||
l.pos = 0
|
||||
l.lex = newLex(l.initialState)
|
||||
|
@ -17,6 +17,7 @@ func NewSliceLexer(input []byte, initial stateFn) *SliceLexer {
|
||||
lex: newLex(initial),
|
||||
input: input,
|
||||
}
|
||||
|
||||
return l
|
||||
}
|
||||
|
||||
@ -24,8 +25,10 @@ func (l *SliceLexer) take() int {
|
||||
if int(l.pos) >= len(l.input) {
|
||||
return eof
|
||||
}
|
||||
|
||||
r := int(l.input[l.pos])
|
||||
l.pos++
|
||||
|
||||
return r
|
||||
}
|
||||
|
||||
@ -39,6 +42,7 @@ func (l *SliceLexer) takeString() error {
|
||||
|
||||
cur := int(l.input[curPos])
|
||||
curPos++
|
||||
|
||||
if cur != '"' {
|
||||
l.pos = curPos
|
||||
return fmt.Errorf("expected \" as start of string instead of %#U", cur)
|
||||
@ -63,7 +67,9 @@ looper:
|
||||
|
||||
previous = cur
|
||||
}
|
||||
|
||||
l.pos = curPos
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -71,11 +77,13 @@ func (l *SliceLexer) peek() int {
|
||||
if int(l.pos) >= len(l.input) {
|
||||
return eof
|
||||
}
|
||||
|
||||
return int(l.input[l.pos])
|
||||
}
|
||||
|
||||
func (l *SliceLexer) emit(t int) {
|
||||
l.setItem(t, l.start, l.input[l.start:l.pos])
|
||||
|
||||
l.hasItem = true
|
||||
|
||||
// Ignore whitespace after this token
|
||||
@ -114,13 +122,16 @@ func (l *SliceLexer) next() (*Item, bool) {
|
||||
return &l.item, true
|
||||
}
|
||||
}
|
||||
|
||||
return &l.item, false
|
||||
}
|
||||
|
||||
func (l *SliceLexer) errorf(format string, args ...interface{}) stateFn {
|
||||
l.setItem(lexError, l.start, []byte(fmt.Sprintf(format, args...)))
|
||||
|
||||
l.start = l.pos
|
||||
l.hasItem = true
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -14,6 +14,7 @@ import (
|
||||
func testLexerMethods(l lexer, as *assert.Assertions) {
|
||||
s := l.peek()
|
||||
as.EqualValues('{', s, "First rune should match")
|
||||
|
||||
r := l.take()
|
||||
as.EqualValues('{', r, "First rune should match")
|
||||
r = l.take()
|
||||
@ -114,6 +115,7 @@ func TestReaderLexerReset(t *testing.T) {
|
||||
|
||||
lexer.reset()
|
||||
reader.Seek(0, 0)
|
||||
|
||||
ritems2 := readerToArray(lexer)
|
||||
|
||||
as.EqualValues(ritems, ritems2, "Item slices are not equal")
|
||||
@ -133,6 +135,7 @@ func TestLexersAgainstEachOther(t *testing.T) {
|
||||
|
||||
func TestLargeJSON(t *testing.T) {
|
||||
as := assert.New(t)
|
||||
|
||||
input, err := ioutil.ReadFile("large.test")
|
||||
if err != nil {
|
||||
t.SkipNow()
|
||||
@ -140,12 +143,16 @@ func TestLargeJSON(t *testing.T) {
|
||||
}
|
||||
|
||||
lexer := NewSliceLexer(input, JSON)
|
||||
|
||||
for {
|
||||
i, ok := lexer.next()
|
||||
|
||||
if i.typ == jsonError {
|
||||
as.Fail(string(i.val))
|
||||
}
|
||||
|
||||
_ = i
|
||||
|
||||
if !ok {
|
||||
break
|
||||
}
|
||||
@ -154,7 +161,9 @@ func TestLargeJSON(t *testing.T) {
|
||||
|
||||
func benchmarkBytesLexer(input []byte, b *testing.B) {
|
||||
lexer := NewSliceLexer(input, JSON)
|
||||
|
||||
b.ResetTimer()
|
||||
|
||||
for n := 0; n < b.N; n++ {
|
||||
for {
|
||||
_, ok := lexer.next()
|
||||
@ -169,7 +178,9 @@ func benchmarkBytesLexer(input []byte, b *testing.B) {
|
||||
func benchmarkReaderLexer(input []byte, b *testing.B) {
|
||||
reader := bytes.NewReader(input)
|
||||
lexer := NewReaderLexer(reader, JSON)
|
||||
|
||||
b.ResetTimer()
|
||||
|
||||
for n := 0; n < b.N; n++ {
|
||||
for {
|
||||
_, ok := lexer.next()
|
||||
@ -185,9 +196,12 @@ func benchmarkReaderLexer(input []byte, b *testing.B) {
|
||||
func benchmarkStdLibDecode(input []byte, b *testing.B) {
|
||||
reader := bytes.NewReader(input)
|
||||
dec := json.NewDecoder(reader)
|
||||
|
||||
b.ResetTimer()
|
||||
|
||||
for n := 0; n < b.N; n++ {
|
||||
var x struct{}
|
||||
|
||||
dec.Decode(&x)
|
||||
reader.Seek(0, 0)
|
||||
}
|
||||
@ -196,8 +210,10 @@ func benchmarkStdLibDecode(input []byte, b *testing.B) {
|
||||
// Not comparable to previous benchmarks
|
||||
func benchmarkStdUnmarshal(input []byte, b *testing.B) {
|
||||
b.ResetTimer()
|
||||
|
||||
for n := 0; n < b.N; n++ {
|
||||
var x interface{}
|
||||
|
||||
err := json.Unmarshal(input, &x)
|
||||
if err != nil {
|
||||
b.Error(err)
|
||||
@ -210,19 +226,24 @@ func BenchmarkStdUnmarshalLarge(b *testing.B) {
|
||||
if err != nil {
|
||||
b.SkipNow()
|
||||
}
|
||||
|
||||
benchmarkStdUnmarshal(input, b)
|
||||
}
|
||||
|
||||
func BenchmarkStdLibDecodeLarge(b *testing.B) {
|
||||
input, err := ioutil.ReadFile("large.test")
|
||||
reader := bytes.NewReader(input)
|
||||
if err != nil {
|
||||
b.SkipNow()
|
||||
}
|
||||
|
||||
reader := bytes.NewReader(input)
|
||||
dec := json.NewDecoder(reader)
|
||||
|
||||
b.ResetTimer()
|
||||
|
||||
for n := 0; n < b.N; n++ {
|
||||
var x struct{}
|
||||
|
||||
dec.Decode(&x)
|
||||
reader.Seek(0, 0)
|
||||
}
|
||||
@ -233,6 +254,7 @@ func BenchmarkSliceLexerLarge(b *testing.B) {
|
||||
if err != nil {
|
||||
b.SkipNow()
|
||||
}
|
||||
|
||||
benchmarkBytesLexer(input, b)
|
||||
}
|
||||
|
||||
@ -244,7 +266,9 @@ func BenchmarkReaderLexerLarge(b *testing.B) {
|
||||
// reader := io.NewReader(input)
|
||||
// reader, _ := os.Open("large.test")
|
||||
lexer := NewReaderLexer(input, JSON)
|
||||
|
||||
b.ResetTimer()
|
||||
|
||||
for n := 0; n < b.N; n++ {
|
||||
for {
|
||||
_, ok := lexer.next()
|
||||
|
15
misc.go
15
misc.go
@ -10,7 +10,9 @@ func takeExponent(l lexer) error {
|
||||
if r != 'e' && r != 'E' {
|
||||
return nil
|
||||
}
|
||||
|
||||
l.take()
|
||||
|
||||
r = l.take()
|
||||
switch r {
|
||||
case '+', '-':
|
||||
@ -18,12 +20,14 @@ func takeExponent(l lexer) error {
|
||||
if d := l.peek(); !(d >= '0' && d <= '9') {
|
||||
return fmt.Errorf("expected digit after numeric sign instead of %#U", d)
|
||||
}
|
||||
|
||||
takeDigits(l)
|
||||
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
takeDigits(l)
|
||||
default:
|
||||
return fmt.Errorf("expected digit after 'e' instead of %#U", r)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -35,6 +39,7 @@ func takeJSONNumeric(l lexer) error {
|
||||
if d := l.peek(); !(d >= '0' && d <= '9') {
|
||||
return fmt.Errorf("expected digit after dash instead of %#U", d)
|
||||
}
|
||||
|
||||
takeDigits(l)
|
||||
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
takeDigits(l)
|
||||
@ -51,7 +56,9 @@ func takeJSONNumeric(l lexer) error {
|
||||
if d := l.peek(); !(d >= '0' && d <= '9') {
|
||||
return fmt.Errorf("expected digit after '.' instead of %#U", d)
|
||||
}
|
||||
|
||||
takeDigits(l)
|
||||
|
||||
if err := takeExponent(l); err != nil {
|
||||
return err
|
||||
}
|
||||
@ -96,22 +103,27 @@ func takeExactSequence(l lexer, str []byte) bool {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func readerToArray(tr tokenReader) []Item {
|
||||
vals := make([]Item, 0)
|
||||
|
||||
for {
|
||||
i, ok := tr.next()
|
||||
if !ok {
|
||||
break
|
||||
}
|
||||
|
||||
v := *i
|
||||
s := make([]byte, len(v.val))
|
||||
copy(s, v.val)
|
||||
|
||||
v.val = s
|
||||
vals = append(vals, v)
|
||||
}
|
||||
|
||||
return vals
|
||||
}
|
||||
|
||||
@ -121,6 +133,7 @@ func findErrors(items []Item) (Item, bool) {
|
||||
return i, true
|
||||
}
|
||||
}
|
||||
|
||||
return Item{}, false
|
||||
}
|
||||
|
||||
@ -144,6 +157,7 @@ func firstError(errors ...error) error {
|
||||
return e
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -154,6 +168,7 @@ func abs(x int) int {
|
||||
case x == 0:
|
||||
return 0 // return correctly abs(-0)
|
||||
}
|
||||
|
||||
return x
|
||||
}
|
||||
|
||||
|
5
path.go
5
path.go
@ -49,9 +49,11 @@ func genIndexKey(tr tokenReader) (*operator, error) {
|
||||
case pathWildcard:
|
||||
k.typ = opTypeIndexWild
|
||||
k.indexStart = 0
|
||||
|
||||
if t, ok = tr.next(); !ok {
|
||||
return nil, errors.New("expected ] after *, but got none")
|
||||
}
|
||||
|
||||
if t.typ != pathBracketRight {
|
||||
return nil, fmt.Errorf("expected ] after * instead of %q", t.val)
|
||||
}
|
||||
@ -196,8 +198,8 @@ func tokensToOperators(tr tokenReader) (*Path, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
q.operators = append(q.operators, k)
|
||||
|
||||
q.operators = append(q.operators, k)
|
||||
case pathKey:
|
||||
keyName := p.val
|
||||
|
||||
@ -220,7 +222,6 @@ func tokensToOperators(tr tokenReader) (*Path, error) {
|
||||
)
|
||||
case pathWildcard:
|
||||
q.operators = append(q.operators, &operator{typ: opTypeNameWild})
|
||||
|
||||
case pathValue:
|
||||
q.captureEndValue = true
|
||||
case pathWhere:
|
||||
|
@ -44,6 +44,7 @@ var PATH = lexPathStart
|
||||
|
||||
func lexPathStart(l lexer, state *intStack) stateFn {
|
||||
ignoreSpaceRun(l)
|
||||
|
||||
cur := l.take()
|
||||
switch cur {
|
||||
case '$':
|
||||
@ -77,6 +78,7 @@ func lexPathAfterKey(l lexer, state *intStack) stateFn {
|
||||
default:
|
||||
return l.errorf("Unrecognized rune after path element %#U", cur)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -87,6 +89,7 @@ func lexPathExpression(l lexer, state *intStack) stateFn {
|
||||
}
|
||||
|
||||
parenLeftCount := 1
|
||||
|
||||
for {
|
||||
cur = l.take()
|
||||
switch cur {
|
||||
@ -103,6 +106,7 @@ func lexPathExpression(l lexer, state *intStack) stateFn {
|
||||
}
|
||||
}
|
||||
l.emit(pathExpression)
|
||||
|
||||
return lexPathAfterKey
|
||||
}
|
||||
|
||||
@ -111,19 +115,23 @@ func lexPathBracketOpen(l lexer, state *intStack) stateFn {
|
||||
case '*':
|
||||
l.take()
|
||||
l.emit(pathWildcard)
|
||||
|
||||
return lexPathBracketClose
|
||||
case '"':
|
||||
l.takeString()
|
||||
l.emit(pathKey)
|
||||
|
||||
return lexPathBracketClose
|
||||
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
l.take()
|
||||
takeDigits(l)
|
||||
l.emit(pathIndex)
|
||||
|
||||
return lexPathIndexRange
|
||||
case eof:
|
||||
l.emit(pathEOF)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -132,7 +140,9 @@ func lexPathBracketClose(l lexer, state *intStack) stateFn {
|
||||
if cur != ']' {
|
||||
return l.errorf("Expected ] instead of %#U", cur)
|
||||
}
|
||||
|
||||
l.emit(pathBracketRight)
|
||||
|
||||
return lexPathAfterKey
|
||||
}
|
||||
|
||||
@ -142,14 +152,17 @@ func lexKey(l lexer, state *intStack) stateFn {
|
||||
case '*':
|
||||
l.take()
|
||||
l.emit(pathWildcard)
|
||||
|
||||
return lexPathAfterKey
|
||||
case '"':
|
||||
l.takeString()
|
||||
l.emit(pathKey)
|
||||
|
||||
return lexPathAfterKey
|
||||
case eof:
|
||||
l.take()
|
||||
l.emit(pathEOF)
|
||||
|
||||
return nil
|
||||
default:
|
||||
for {
|
||||
@ -157,9 +170,12 @@ func lexKey(l lexer, state *intStack) stateFn {
|
||||
if v == '.' || v == '[' || v == '+' || v == '?' || v == eof {
|
||||
break
|
||||
}
|
||||
|
||||
l.take()
|
||||
}
|
||||
|
||||
l.emit(pathKey)
|
||||
|
||||
return lexPathAfterKey
|
||||
}
|
||||
}
|
||||
@ -172,6 +188,7 @@ func lexPathIndexRange(l lexer, state *intStack) stateFn {
|
||||
case ':':
|
||||
l.take()
|
||||
l.emit(pathIndexRange)
|
||||
|
||||
return lexPathIndexRangeSecond
|
||||
case ']':
|
||||
return lexPathBracketClose
|
||||
@ -186,6 +203,7 @@ func lexPathIndexRangeSecond(l lexer, state *intStack) stateFn {
|
||||
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||
takeDigits(l)
|
||||
l.emit(pathIndex)
|
||||
|
||||
return lexPathBracketClose
|
||||
case ']':
|
||||
return lexPathBracketClose
|
||||
@ -199,6 +217,8 @@ func lexPathAfterValue(l lexer, state *intStack) stateFn {
|
||||
if cur != eof {
|
||||
return l.errorf("Expected EOF instead of %#U", cur)
|
||||
}
|
||||
|
||||
l.emit(pathEOF)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
@ -21,6 +21,7 @@ var pathTests = []lexTest{
|
||||
|
||||
func TestValidPaths(t *testing.T) {
|
||||
as := assert.New(t)
|
||||
|
||||
for _, test := range pathTests {
|
||||
lexer := NewSliceLexer([]byte(test.input), PATH)
|
||||
types := itemsToTypes(readerToArray(lexer))
|
||||
|
5
queue.go
5
queue.go
@ -18,10 +18,12 @@ func (q *Results) push(n *Result) {
|
||||
nodes := make([]*Result, len(q.nodes)*2)
|
||||
copy(nodes, q.nodes[q.head:])
|
||||
copy(nodes[len(q.nodes)-q.head:], q.nodes[:q.head])
|
||||
|
||||
q.head = 0
|
||||
q.tail = len(q.nodes)
|
||||
q.nodes = nodes
|
||||
}
|
||||
|
||||
q.nodes[q.tail] = n
|
||||
q.tail = (q.tail + 1) % len(q.nodes)
|
||||
q.count++
|
||||
@ -31,9 +33,11 @@ func (q *Results) Pop() *Result {
|
||||
if q.count == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
node := q.nodes[q.head]
|
||||
q.head = (q.head + 1) % len(q.nodes)
|
||||
q.count--
|
||||
|
||||
return node
|
||||
}
|
||||
|
||||
@ -41,6 +45,7 @@ func (q *Results) Peek() *Result {
|
||||
if q.count == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
return q.nodes[q.head]
|
||||
}
|
||||
|
||||
|
@ -23,6 +23,7 @@ type Result struct {
|
||||
func (r *Result) Pretty(showPath bool) string {
|
||||
b := bytes.NewBufferString("")
|
||||
printed := false
|
||||
|
||||
if showPath {
|
||||
for _, k := range r.Keys {
|
||||
switch v := k.(type) {
|
||||
@ -32,6 +33,7 @@ func (r *Result) Pretty(showPath bool) string {
|
||||
b.WriteString(fmt.Sprintf("%q", v))
|
||||
}
|
||||
b.WriteRune('\t')
|
||||
|
||||
printed = true
|
||||
}
|
||||
} else if r.Value == nil {
|
||||
@ -48,10 +50,13 @@ func (r *Result) Pretty(showPath bool) string {
|
||||
|
||||
if r.Value != nil {
|
||||
printed = true
|
||||
|
||||
b.WriteString(fmt.Sprintf("%s", r.Value))
|
||||
}
|
||||
|
||||
if printed {
|
||||
b.WriteRune('\n')
|
||||
}
|
||||
|
||||
return b.String()
|
||||
}
|
||||
|
5
run.go
5
run.go
@ -5,23 +5,28 @@ import "io"
|
||||
func EvalPathsInBytes(input []byte, paths []*Path) (*Eval, error) {
|
||||
lexer := NewSliceLexer(input, JSON)
|
||||
eval := newEvaluation(lexer, paths...)
|
||||
|
||||
return eval, nil
|
||||
}
|
||||
|
||||
func EvalPathsInReader(r io.Reader, paths []*Path) (*Eval, error) {
|
||||
lexer := NewReaderLexer(r, JSON)
|
||||
eval := newEvaluation(lexer, paths...)
|
||||
|
||||
return eval, nil
|
||||
}
|
||||
|
||||
func ParsePaths(pathStrings ...string) ([]*Path, error) {
|
||||
paths := make([]*Path, len(pathStrings))
|
||||
|
||||
for x, p := range pathStrings {
|
||||
path, err := parsePath(p)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
paths[x] = path
|
||||
}
|
||||
|
||||
return paths, nil
|
||||
}
|
||||
|
9
stack.go
9
stack.go
@ -24,8 +24,10 @@ func (s *intStack) pop() (int, bool) {
|
||||
if s.len() == 0 {
|
||||
return 0, false
|
||||
}
|
||||
|
||||
v, _ := s.peek()
|
||||
s.values = s.values[:len(s.values)-1]
|
||||
|
||||
return v, true
|
||||
}
|
||||
|
||||
@ -33,7 +35,9 @@ func (s *intStack) peek() (int, bool) {
|
||||
if s.len() == 0 {
|
||||
return 0, false
|
||||
}
|
||||
|
||||
v := s.values[len(s.values)-1]
|
||||
|
||||
return v, true
|
||||
}
|
||||
|
||||
@ -61,8 +65,10 @@ func (s *stack) pop() (interface{}, bool) {
|
||||
if s.len() == 0 {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
v, _ := s.peek()
|
||||
s.values = s.values[:len(s.values)-1]
|
||||
|
||||
return v, true
|
||||
}
|
||||
|
||||
@ -70,7 +76,9 @@ func (s *stack) peek() (interface{}, bool) {
|
||||
if s.len() == 0 {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
v := s.values[len(s.values)-1]
|
||||
|
||||
return v, true
|
||||
}
|
||||
|
||||
@ -79,6 +87,7 @@ func (s *stack) clone() *stack {
|
||||
values: make([]interface{}, s.len()),
|
||||
}
|
||||
copy(d.values, s.values)
|
||||
|
||||
return &d
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user