2019-02-26 13:05:15 +04:00
|
|
|
package jsonpath
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
|
|
|
"fmt"
|
|
|
|
)
|
|
|
|
|
|
|
|
type queryStateFn func(*query, *Eval, *Item) queryStateFn
|
|
|
|
|
|
|
|
type query struct {
|
|
|
|
Path
|
|
|
|
state queryStateFn
|
|
|
|
start int
|
|
|
|
pos int
|
|
|
|
firstType int // first json token type in buffer
|
|
|
|
buffer bytes.Buffer
|
|
|
|
resultQueue *Results
|
|
|
|
valLoc stack // capture the current location stack at capture
|
|
|
|
errors []error
|
|
|
|
buckets stack // stack of exprBucket
|
|
|
|
}
|
|
|
|
|
|
|
|
type exprBucket struct {
|
|
|
|
operatorLoc int
|
|
|
|
expression []Item
|
|
|
|
queries []*query
|
|
|
|
results *Results
|
|
|
|
}
|
|
|
|
|
|
|
|
type evalStateFn func(*Eval, *Item) evalStateFn
|
|
|
|
|
|
|
|
type Eval struct {
|
|
|
|
tr tokenReader
|
|
|
|
levelStack intStack
|
|
|
|
location stack
|
|
|
|
queries map[string]*query
|
|
|
|
state evalStateFn
|
|
|
|
prevIndex int
|
|
|
|
nextKey []byte
|
|
|
|
copyValues bool
|
|
|
|
|
|
|
|
resultQueue *Results
|
|
|
|
Error error
|
|
|
|
}
|
|
|
|
|
|
|
|
func newEvaluation(tr tokenReader, paths ...*Path) *Eval {
|
|
|
|
e := &Eval{
|
|
|
|
tr: tr,
|
|
|
|
location: *newStack(),
|
|
|
|
levelStack: *newIntStack(),
|
|
|
|
state: evalRoot,
|
2019-10-19 01:17:00 +04:00
|
|
|
queries: make(map[string]*query),
|
2019-02-26 13:05:15 +04:00
|
|
|
prevIndex: -1,
|
|
|
|
nextKey: nil,
|
|
|
|
copyValues: true, // depends on which lexer is used
|
|
|
|
resultQueue: newResults(),
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, p := range paths {
|
|
|
|
e.queries[p.stringValue] = newQuery(p)
|
|
|
|
}
|
|
|
|
// Determine whether to copy emitted item values ([]byte) from lexer
|
|
|
|
switch tr.(type) {
|
2019-10-19 01:17:00 +04:00
|
|
|
case *ReaderLexer:
|
2019-02-26 13:05:15 +04:00
|
|
|
e.copyValues = true
|
|
|
|
default:
|
|
|
|
e.copyValues = false
|
|
|
|
}
|
|
|
|
|
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
|
|
|
func newQuery(p *Path) *query {
|
|
|
|
return &query{
|
|
|
|
Path: *p,
|
|
|
|
state: pathMatchOp,
|
|
|
|
start: -1,
|
|
|
|
pos: -1,
|
|
|
|
buffer: *bytes.NewBuffer(make([]byte, 0, 50)),
|
|
|
|
valLoc: *newStack(),
|
|
|
|
errors: make([]error, 0),
|
|
|
|
resultQueue: newResults(),
|
|
|
|
buckets: *newStack(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (e *Eval) Iterate() (*Results, bool) {
|
|
|
|
e.resultQueue.clear()
|
|
|
|
|
|
|
|
t, ok := e.tr.next()
|
|
|
|
if !ok || e.state == nil {
|
|
|
|
return nil, false
|
|
|
|
}
|
|
|
|
|
|
|
|
// run evaluator function
|
|
|
|
e.state = e.state(e, t)
|
|
|
|
|
|
|
|
anyRunning := false
|
|
|
|
// run path function for each path
|
|
|
|
for str, query := range e.queries {
|
|
|
|
anyRunning = true
|
|
|
|
query.state = query.state(query, e, t)
|
2019-10-19 01:17:00 +04:00
|
|
|
|
2019-02-26 13:05:15 +04:00
|
|
|
if query.state == nil {
|
|
|
|
delete(e.queries, str)
|
|
|
|
}
|
|
|
|
|
|
|
|
if query.resultQueue.len() > 0 {
|
|
|
|
e.resultQueue.push(query.resultQueue.Pop())
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, b := range query.buckets.values {
|
|
|
|
bucket := b.(exprBucket)
|
|
|
|
for _, dq := range bucket.queries {
|
|
|
|
dq.state = dq.state(dq, e, t)
|
|
|
|
|
|
|
|
if query.resultQueue.len() > 0 {
|
|
|
|
e.resultQueue.push(query.resultQueue.Pop())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if !anyRunning {
|
|
|
|
return nil, false
|
|
|
|
}
|
|
|
|
|
|
|
|
if e.Error != nil {
|
|
|
|
return nil, false
|
|
|
|
}
|
|
|
|
|
|
|
|
return e.resultQueue, true
|
|
|
|
}
|
|
|
|
|
|
|
|
func (e *Eval) Next() (*Result, bool) {
|
|
|
|
if e.resultQueue.len() > 0 {
|
|
|
|
return e.resultQueue.Pop(), true
|
|
|
|
}
|
|
|
|
|
|
|
|
for {
|
|
|
|
if _, ok := e.Iterate(); ok {
|
|
|
|
if e.resultQueue.len() > 0 {
|
|
|
|
return e.resultQueue.Pop(), true
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
2019-10-19 01:17:00 +04:00
|
|
|
|
2019-02-26 13:05:15 +04:00
|
|
|
return nil, false
|
|
|
|
}
|
|
|
|
|
|
|
|
func (q *query) loc() int {
|
|
|
|
return abs(q.pos-q.start) + q.start
|
|
|
|
}
|
|
|
|
|
|
|
|
func (q *query) trySpillOver() {
|
|
|
|
if b, ok := q.buckets.peek(); ok {
|
|
|
|
bucket := b.(exprBucket)
|
|
|
|
if q.loc() < bucket.operatorLoc {
|
|
|
|
q.buckets.pop()
|
|
|
|
|
|
|
|
exprRes, err := bucket.evaluate()
|
|
|
|
if err != nil {
|
|
|
|
q.errors = append(q.errors, err)
|
|
|
|
}
|
2019-10-19 01:17:00 +04:00
|
|
|
|
2019-02-26 13:05:15 +04:00
|
|
|
if exprRes {
|
|
|
|
next, ok := q.buckets.peek()
|
2019-10-19 01:17:00 +04:00
|
|
|
|
2019-02-26 13:05:15 +04:00
|
|
|
var spillover *Results
|
2019-10-19 01:17:00 +04:00
|
|
|
|
2019-02-26 13:05:15 +04:00
|
|
|
if !ok {
|
|
|
|
// fmt.Println("Spilling over into end queue")
|
|
|
|
spillover = q.resultQueue
|
|
|
|
} else {
|
|
|
|
// fmt.Println("Spilling over into lower bucket")
|
|
|
|
nextBucket := next.(exprBucket)
|
|
|
|
spillover = nextBucket.results
|
|
|
|
}
|
2019-10-19 01:17:00 +04:00
|
|
|
|
2019-02-26 13:05:15 +04:00
|
|
|
for {
|
|
|
|
v := bucket.results.Pop()
|
|
|
|
if v != nil {
|
|
|
|
spillover.push(v)
|
|
|
|
} else {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func pathMatchOp(q *query, e *Eval, i *Item) queryStateFn {
|
|
|
|
curLocation := e.location.len() - 1
|
|
|
|
|
|
|
|
if q.loc() > curLocation {
|
2019-10-19 01:17:00 +04:00
|
|
|
q.pos--
|
2019-02-26 13:05:15 +04:00
|
|
|
q.trySpillOver()
|
|
|
|
} else if q.loc() <= curLocation {
|
|
|
|
if q.loc() == curLocation-1 {
|
|
|
|
if len(q.operators)+q.start >= curLocation {
|
|
|
|
current, _ := e.location.peek()
|
|
|
|
nextOp := q.operators[abs(q.loc()-q.start)]
|
2019-10-19 01:00:37 +04:00
|
|
|
if itemMatchOperator(current, nextOp) {
|
|
|
|
q.pos++
|
2019-02-26 13:05:15 +04:00
|
|
|
|
|
|
|
if nextOp.whereClauseBytes != nil && len(nextOp.whereClause) > 0 {
|
|
|
|
bucket := exprBucket{
|
|
|
|
operatorLoc: q.loc(),
|
|
|
|
expression: nextOp.whereClause,
|
|
|
|
queries: make([]*query, len(nextOp.dependentPaths)),
|
|
|
|
results: newResults(),
|
|
|
|
}
|
|
|
|
|
|
|
|
for i, p := range nextOp.dependentPaths {
|
|
|
|
bucket.queries[i] = newQuery(p)
|
|
|
|
bucket.queries[i].pos = q.loc()
|
|
|
|
bucket.queries[i].start = q.loc()
|
|
|
|
bucket.queries[i].captureEndValue = true
|
|
|
|
}
|
|
|
|
q.buckets.push(bucket)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if q.loc() == len(q.operators)+q.start && q.loc() <= curLocation {
|
|
|
|
if q.captureEndValue {
|
|
|
|
q.firstType = i.typ
|
|
|
|
q.buffer.Write(i.val)
|
|
|
|
}
|
2019-10-19 01:17:00 +04:00
|
|
|
|
2019-02-26 13:05:15 +04:00
|
|
|
q.valLoc = *e.location.clone()
|
2019-10-19 01:17:00 +04:00
|
|
|
|
2019-02-26 13:05:15 +04:00
|
|
|
return pathEndValue
|
|
|
|
}
|
|
|
|
|
|
|
|
if q.loc() < -1 {
|
|
|
|
return nil
|
|
|
|
}
|
2019-10-19 01:17:00 +04:00
|
|
|
|
|
|
|
return pathMatchOp
|
2019-02-26 13:05:15 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
func pathEndValue(q *query, e *Eval, i *Item) queryStateFn {
|
|
|
|
if e.location.len()-1 >= q.loc() {
|
|
|
|
if q.captureEndValue {
|
|
|
|
q.buffer.Write(i.val)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
r := &Result{Keys: q.valLoc.toArray()}
|
|
|
|
if q.buffer.Len() > 0 {
|
|
|
|
val := make([]byte, q.buffer.Len())
|
|
|
|
copy(val, q.buffer.Bytes())
|
|
|
|
r.Value = val
|
|
|
|
|
|
|
|
switch q.firstType {
|
|
|
|
case jsonBraceLeft:
|
2019-10-19 01:41:57 +04:00
|
|
|
r.Type = JSONObject
|
2019-02-26 13:05:15 +04:00
|
|
|
case jsonString:
|
2019-10-19 01:41:57 +04:00
|
|
|
r.Type = JSONString
|
2019-02-26 13:05:15 +04:00
|
|
|
case jsonBracketLeft:
|
2019-10-19 01:41:57 +04:00
|
|
|
r.Type = JSONArray
|
2019-02-26 13:05:15 +04:00
|
|
|
case jsonNull:
|
2019-10-19 01:41:57 +04:00
|
|
|
r.Type = JSONNull
|
2019-02-26 13:05:15 +04:00
|
|
|
case jsonBool:
|
2019-10-19 01:41:57 +04:00
|
|
|
r.Type = JSONBool
|
2019-02-26 13:05:15 +04:00
|
|
|
case jsonNumber:
|
2019-10-19 01:41:57 +04:00
|
|
|
r.Type = JSONNumber
|
2019-02-26 13:05:15 +04:00
|
|
|
default:
|
|
|
|
r.Type = -1
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if q.buckets.len() == 0 {
|
|
|
|
q.resultQueue.push(r)
|
|
|
|
} else {
|
|
|
|
b, _ := q.buckets.peek()
|
|
|
|
b.(exprBucket).results.push(r)
|
|
|
|
}
|
|
|
|
|
|
|
|
q.valLoc = *newStack()
|
|
|
|
q.buffer.Truncate(0)
|
2019-10-19 01:17:00 +04:00
|
|
|
q.pos--
|
2019-02-26 13:05:15 +04:00
|
|
|
return pathMatchOp
|
|
|
|
}
|
2019-10-19 01:17:00 +04:00
|
|
|
|
2019-02-26 13:05:15 +04:00
|
|
|
return pathEndValue
|
|
|
|
}
|
|
|
|
|
|
|
|
func (b *exprBucket) evaluate() (bool, error) {
|
|
|
|
values := make(map[string]Item)
|
2019-10-19 01:17:00 +04:00
|
|
|
|
2019-02-26 13:05:15 +04:00
|
|
|
for _, q := range b.queries {
|
|
|
|
result := q.resultQueue.Pop()
|
|
|
|
if result != nil {
|
2019-10-19 01:41:57 +04:00
|
|
|
t, err := getJSONTokenType(result.Value)
|
2019-02-26 13:05:15 +04:00
|
|
|
if err != nil {
|
|
|
|
return false, err
|
|
|
|
}
|
2019-10-19 01:17:00 +04:00
|
|
|
|
2019-02-26 13:05:15 +04:00
|
|
|
i := Item{
|
|
|
|
typ: t,
|
|
|
|
val: result.Value,
|
|
|
|
}
|
|
|
|
values[q.Path.stringValue] = i
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
res, err := evaluatePostFix(b.expression, values)
|
|
|
|
if err != nil {
|
|
|
|
return false, err
|
|
|
|
}
|
2019-10-19 01:17:00 +04:00
|
|
|
|
|
|
|
resBool, ok := res.(bool)
|
2019-02-26 13:05:15 +04:00
|
|
|
if !ok {
|
|
|
|
return false, fmt.Errorf(exprErrorFinalValueNotBool, res)
|
|
|
|
}
|
2019-10-19 01:17:00 +04:00
|
|
|
|
|
|
|
return resBool, nil
|
2019-02-26 13:05:15 +04:00
|
|
|
}
|
|
|
|
|
2019-10-19 01:00:37 +04:00
|
|
|
func itemMatchOperator(loc interface{}, op *operator) bool {
|
2019-02-26 13:05:15 +04:00
|
|
|
topBytes, isKey := loc.([]byte)
|
|
|
|
topInt, isIndex := loc.(int)
|
2019-10-19 02:14:19 +04:00
|
|
|
|
2019-02-26 13:05:15 +04:00
|
|
|
if isKey {
|
|
|
|
switch op.typ {
|
|
|
|
case opTypeNameWild:
|
|
|
|
return true
|
|
|
|
case opTypeName, opTypeNameList:
|
|
|
|
_, found := op.keyStrings[string(topBytes)]
|
|
|
|
return found
|
|
|
|
}
|
|
|
|
} else if isIndex {
|
|
|
|
switch op.typ {
|
|
|
|
case opTypeIndexWild:
|
|
|
|
return true
|
|
|
|
case opTypeIndex, opTypeIndexRange:
|
|
|
|
return topInt >= op.indexStart && (!op.hasIndexEnd || topInt <= op.indexEnd)
|
|
|
|
}
|
|
|
|
}
|
2019-10-19 02:14:19 +04:00
|
|
|
|
2019-02-26 13:05:15 +04:00
|
|
|
return false
|
|
|
|
}
|