2019-02-26 13:05:15 +04:00
package jsonpath
import (
"bytes"
"encoding/json"
"io/ioutil"
"os"
"strings"
"testing"
"github.com/stretchr/testify/assert"
)
func testLexerMethods ( l lexer , as * assert . Assertions ) {
s := l . peek ( )
as . EqualValues ( '{' , s , "First rune should match" )
2019-10-19 02:14:19 +04:00
2019-02-26 13:05:15 +04:00
r := l . take ( )
as . EqualValues ( '{' , r , "First rune should match" )
r = l . take ( )
as . EqualValues ( '"' , r , "Second rune should match" )
r = l . take ( )
as . EqualValues ( 'k' , r , "Third rune should match" )
// Try peeking
r = l . peek ( )
as . EqualValues ( 'e' , r , "Peek fifth rune should match" )
// Second peek should yield same result
r = l . peek ( )
as . EqualValues ( 'e' , r , "Peek fifth rune should match" )
r = l . take ( )
// Taking should yield peeked result
as . EqualValues ( 'e' , r , "Rune should match" )
// Taking should yield next result
r = l . take ( )
as . EqualValues ( 'y' , r , "Rune should match" )
r = l . take ( )
as . EqualValues ( '"' , r , "Rune should match" )
r = l . peek ( )
as . EqualValues ( ' ' , r , "Rune should match" )
l . take ( )
l . ignore ( )
r = l . peek ( )
as . EqualValues ( ':' , r , "Rune should match" )
}
func TestLexerMethods ( t * testing . T ) {
as := assert . New ( t )
input := ` { "key" :"value"} `
sl := NewSliceLexer ( [ ] byte ( input ) , JSON )
testLexerMethods ( sl , as )
r := strings . NewReader ( input )
rl := NewReaderLexer ( r , JSON )
testLexerMethods ( rl , as )
}
const (
sampleMix = ` { "Type" : "inventory.all" , "Id" : 1 , "Digest" : "f0e8ff11922e2e988ad8a68e99dbd055be7445ee" , "NodeID" : "588be7c36150f57babf564e1a25ea1c1" , "Content" : { "sysinfo.package" : { "Entries" : [ { "Category" : "RPM_Packages" , "Details" : [ { "Tag" : "Name" , "Value" : "initscripts" } , { "Tag" : "Summary" , "Value" : "The inittab file and the /etc/init.d scripts" } , { "Tag" : "Version" , "Value" : "9.03.40" } , { "Tag" : "Arch" , "Value" : "x86_64" } , { "Tag" : "Size" , "Value" : "5720353" } , { "Tag" : "InstallTime" , "Value" : "1412965846" } , { "Tag" : "Name" , "Value" : "setup" } , { "Tag" : "Summary" , "Value" : "A set of system configuration and setup files" } , { "Tag" : "Version" , "Value" : "2.8.14" } , { "Tag" : "Arch" , "Value" : "noarch" } , { "Tag" : "Size" , "Value" : "666477" } , { "Tag" : "InstallTime" , "Value" : "1412965633" } , { "Tag" : "Name" , "Value" : "dracut" } , { "Tag" : "Summary" , "Value" : "Initramfs generator using udev" } , { "Tag" : "Version" , "Value" : "004" } , { "Tag" : "Arch" , "Value" : "noarch" } , { "Tag" : "Size" , "Value" : "257710" } , { "Tag" : "InstallTime" , "Value" : "1412965847" } , { "Tag" : "Name" , "Value" : "basesystem" } , { "Tag" : "Summary" , "Value" : "The skeleton package which defines a simple Red Hat Enterprise Linux system" } , { "Tag" : "Version" , "Value" : "10.0" } , { "Tag" : "Arch" , "Value" : "noarch" } , { "Tag" : "Size" , "Value" : "0" } , { "Tag" : "InstallTime" , "Value" : "1412965634" } , { "Tag" : "Name" , "Value" : "kernel" } , { "Tag" : "Summary" , "Value" : "The Linux kernel" } , { "Tag" : "Version" , "Value" : "2.6.32" } , { "Tag" : "Arch" , "Value" : "x86_64" } , { "Tag" : "Size" , "Value" : "126773875" } , { "Tag" : "InstallTime" , "Value" : "1412965853" } , { "Tag" : "Name" , "Value" : "e2fsprogs" } , { "Tag" : "Summary" , "Value" : "Utilities for managing ext2, ext3, and ext4 filesystems" } , { "Tag" : "Version" , "Value" : "1.41.12" } , { "Tag" : "Arch" , "Value" : "x86_64" } , { "Tag" : "Size" , "Value" : "2020232" } , { "Tag" : "InstallTime" , "Value" : "1412965856" } , { "Tag" : "Name" , "Value" : "curl" } , { "Tag" : "Summary" , "Value" : "A utility for getting files from remote servers (FTP, HTTP, and others)" } , { "Tag" : "Version" , "Value" : "7.19.7" } , { "Tag" : "Arch" , "Value" : "x86_64" } , { "Tag" : "Size" , "Value" : "355395" } , { "Tag" : "InstallTime" , "Value" : "1412965888" } , { "Tag" : "Name" , "Value" : "ncurses-libs" } , { "Tag" : "Summary" , "Value" : "Ncurses libraries" } , { "Tag" : "Version" , "Value" : "5.7" } , { "Tag" : "Arch" , "Value" : "x86_64" } , { "Tag" : "Size" , "Value" : "760448" } , { "Tag" : "InstallTime" , "Value" : "1412965644" } , { "Tag" : "Name" , "Value" : "audit" } , { "Tag" : "Summary" , "Value" : "User space tools for 2.6 kernel auditing" } , { "Tag" : "Version" , "Value" : "2.2" } , { "Tag" : "Arch" , "Value" : "x86_64" } , { "Tag" : "Size" , "Value" : "979444" } , { "Tag" : "InstallTime" , "Value" : "1412965889" } , { "Tag" : "Name" , "Value" : "libattr" } , { "Tag" : "Summary" , "Value" : "Dynamic library for extended attribute support" } , { "Tag" : "Version" , "Value" : "2.4.44" } , { "Tag" : "Arch" , "Value" : "x86_64" } , { "Tag" : "Size" , "Value" : "18712" } , { "Tag" : "InstallTime" , "Value" : "1412965644" } , { "Tag" : "Name" , "Value" : "ql2400-firmware" } , { "Tag" : "Summary" , "Value" : "Firmware for qlogic 2400 devices" } , { "Tag" : "Version" , "Value" : "7.03.00" } , { "Tag" : "Arch" , "Value" : "noarch" } , { "Tag" : "Size" , "Value" : "262499" } , { "Tag" : "InstallTime" , "Value" : "1412965890" } , { "Tag" : "Name" , "Value" : "zlib" } , { "Tag" : "Summary" , "Value" : "The zlib compression and decompression library" } , { "Tag" : "Version" , "Value" : "1.2.3" } , { "Tag" : "Arch" , "Value" : "x86_64" } , { "Tag" : "Size" , "Value" : "152305" } , { "Tag" : "InstallTime" , "Value" : "1412965644" } , { "Tag" : "Name" , "Value" : "libedit" } , { "Tag" : "Summary" , "Value" : "The NetBSD Editline library" } , { "Tag" : "Version" , "Value" : "2.11" } , { "Tag" : "Arch" , "Value" : "x86_64" } , { "Tag" : "Size" , "Value" : "185648" } , { "Tag" : "InstallTime" , "Value" : "1412986023" } , { "Tag" : "Name" , "Value" : "ntpdate" } , { "Tag" : "Summary" , "Value" : "Utility to set the date and time via NTP" } , { "Tag" : "Version" , "Value" : "4.2.6p5" } , { "Tag" : "Arch" , "Value" : "x86_64" } , { "Tag" : "Size" , "Value" : "121391" } , { "Tag" : "InstallTime" , "Value" : "1412987706" } , { "Tag" : "Name" , "Value" : "chkconfig" } , { "Tag" : "Summary" , "Value" : "A system tool for maintaining the /etc/rc*.d hierarchy" } , { "Tag" : "Version" , "Value" : "1.3.49.3" } , { "Tag" : "Arch" , "Value" : "x86_64" } , { "Tag" : "Size" , "Value" : "670132" } , { "Tag" : "InstallTime" , "Value" : "1412965645" } , { "Tag" : "Name" , "Value" : "ipmitool" } , { "Tag" : "Summary" , "Value" : "Utility for IPMI control" } , { "Tag" : "Version" , "Value" : "1.8.11" } , { "Tag" : "Arch" , "Value" : "x86_64" } , { "Tag" : "
sampleDigits = ` [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50] `
sampleStrings = ` ["abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc","abc"] `
sampleLiterals = ` [null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false,null,true,false] `
sampleArrays = ` [[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[1]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]] `
sampleObjects = ` { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": { "child": "value"}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}}} `
)
func BenchmarkUnmarshalMix ( b * testing . B ) { benchmarkStdUnmarshal ( [ ] byte ( sampleMix ) , b ) }
func BenchmarkDecodeMix ( b * testing . B ) { benchmarkStdLibDecode ( [ ] byte ( sampleMix ) , b ) }
func BenchmarkSliceMix ( b * testing . B ) { benchmarkBytesLexer ( [ ] byte ( sampleMix ) , b ) }
func BenchmarkReaderMix ( b * testing . B ) { benchmarkReaderLexer ( [ ] byte ( sampleMix ) , b ) }
func BenchmarkUnmarshalDigits ( b * testing . B ) { benchmarkStdUnmarshal ( [ ] byte ( sampleDigits ) , b ) }
func BenchmarkDecodeDigits ( b * testing . B ) { benchmarkStdLibDecode ( [ ] byte ( sampleDigits ) , b ) }
func BenchmarkSliceDigits ( b * testing . B ) { benchmarkBytesLexer ( [ ] byte ( sampleDigits ) , b ) }
func BenchmarkReaderDigits ( b * testing . B ) { benchmarkReaderLexer ( [ ] byte ( sampleDigits ) , b ) }
func BenchmarkUnmarshalStrings ( b * testing . B ) { benchmarkStdUnmarshal ( [ ] byte ( sampleStrings ) , b ) }
func BenchmarkDecodeStrings ( b * testing . B ) { benchmarkStdLibDecode ( [ ] byte ( sampleStrings ) , b ) }
func BenchmarkSliceStrings ( b * testing . B ) { benchmarkBytesLexer ( [ ] byte ( sampleStrings ) , b ) }
func BenchmarkReaderStrings ( b * testing . B ) { benchmarkReaderLexer ( [ ] byte ( sampleStrings ) , b ) }
func BenchmarkUnmarshalLiterals ( b * testing . B ) { benchmarkStdUnmarshal ( [ ] byte ( sampleLiterals ) , b ) }
func BenchmarkDecodeLiterals ( b * testing . B ) { benchmarkStdLibDecode ( [ ] byte ( sampleLiterals ) , b ) }
func BenchmarkSliceLiterals ( b * testing . B ) { benchmarkBytesLexer ( [ ] byte ( sampleLiterals ) , b ) }
func BenchmarkReaderLiterals ( b * testing . B ) { benchmarkReaderLexer ( [ ] byte ( sampleLiterals ) , b ) }
func BenchmarkUnmarshalArrays ( b * testing . B ) { benchmarkStdUnmarshal ( [ ] byte ( sampleArrays ) , b ) }
func BenchmarkDecodeArrays ( b * testing . B ) { benchmarkStdLibDecode ( [ ] byte ( sampleArrays ) , b ) }
func BenchmarkSliceArrays ( b * testing . B ) { benchmarkBytesLexer ( [ ] byte ( sampleArrays ) , b ) }
func BenchmarkReaderArrays ( b * testing . B ) { benchmarkReaderLexer ( [ ] byte ( sampleArrays ) , b ) }
func BenchmarkUnmarshalObjects ( b * testing . B ) { benchmarkStdUnmarshal ( [ ] byte ( sampleObjects ) , b ) }
func BenchmarkDecodeObjects ( b * testing . B ) { benchmarkStdLibDecode ( [ ] byte ( sampleObjects ) , b ) }
func BenchmarkSliceObjects ( b * testing . B ) { benchmarkBytesLexer ( [ ] byte ( sampleObjects ) , b ) }
func BenchmarkReaderObjects ( b * testing . B ) { benchmarkReaderLexer ( [ ] byte ( sampleObjects ) , b ) }
func TestBytesLexerReset ( t * testing . T ) {
as := assert . New ( t )
lexer := NewSliceLexer ( [ ] byte ( sampleMix ) , JSON )
sitems := readerToArray ( lexer )
lexer . reset ( )
sitems2 := readerToArray ( lexer )
as . EqualValues ( sitems , sitems2 )
}
func TestReaderLexerReset ( t * testing . T ) {
as := assert . New ( t )
reader := bytes . NewReader ( [ ] byte ( sampleMix ) )
lexer := NewReaderLexer ( reader , JSON )
ritems := readerToArray ( lexer )
lexer . reset ( )
reader . Seek ( 0 , 0 )
2019-10-19 02:14:19 +04:00
2019-02-26 13:05:15 +04:00
ritems2 := readerToArray ( lexer )
as . EqualValues ( ritems , ritems2 , "Item slices are not equal" )
}
func TestLexersAgainstEachOther ( t * testing . T ) {
as := assert . New ( t )
slexer := NewSliceLexer ( [ ] byte ( sampleMix ) , JSON )
sitems := readerToArray ( slexer )
reader := strings . NewReader ( sampleMix )
rlexer := NewReaderLexer ( reader , JSON )
ritems := readerToArray ( rlexer )
as . EqualValues ( sitems , ritems )
}
func TestLargeJSON ( t * testing . T ) {
as := assert . New ( t )
2019-10-19 02:14:19 +04:00
2019-02-26 13:05:15 +04:00
input , err := ioutil . ReadFile ( "large.test" )
if err != nil {
t . SkipNow ( )
return
}
lexer := NewSliceLexer ( input , JSON )
2019-10-19 02:14:19 +04:00
2019-02-26 13:05:15 +04:00
for {
i , ok := lexer . next ( )
2019-10-19 02:14:19 +04:00
2019-02-26 13:05:15 +04:00
if i . typ == jsonError {
as . Fail ( string ( i . val ) )
}
2019-10-19 02:14:19 +04:00
2019-02-26 13:05:15 +04:00
_ = i
2019-10-19 02:14:19 +04:00
2019-02-26 13:05:15 +04:00
if ! ok {
break
}
}
}
func benchmarkBytesLexer ( input [ ] byte , b * testing . B ) {
lexer := NewSliceLexer ( input , JSON )
2019-10-19 02:14:19 +04:00
2019-02-26 13:05:15 +04:00
b . ResetTimer ( )
2019-10-19 02:14:19 +04:00
2019-02-26 13:05:15 +04:00
for n := 0 ; n < b . N ; n ++ {
for {
_ , ok := lexer . next ( )
if ! ok {
break
}
}
lexer . reset ( )
}
}
func benchmarkReaderLexer ( input [ ] byte , b * testing . B ) {
reader := bytes . NewReader ( input )
lexer := NewReaderLexer ( reader , JSON )
2019-10-19 02:14:19 +04:00
2019-02-26 13:05:15 +04:00
b . ResetTimer ( )
2019-10-19 02:14:19 +04:00
2019-02-26 13:05:15 +04:00
for n := 0 ; n < b . N ; n ++ {
for {
_ , ok := lexer . next ( )
if ! ok {
break
}
}
lexer . reset ( )
reader . Seek ( 0 , 0 )
}
}
func benchmarkStdLibDecode ( input [ ] byte , b * testing . B ) {
reader := bytes . NewReader ( input )
dec := json . NewDecoder ( reader )
2019-10-19 02:14:19 +04:00
2019-02-26 13:05:15 +04:00
b . ResetTimer ( )
2019-10-19 02:14:19 +04:00
2019-02-26 13:05:15 +04:00
for n := 0 ; n < b . N ; n ++ {
var x struct { }
2019-10-19 02:14:19 +04:00
2019-02-26 13:05:15 +04:00
dec . Decode ( & x )
reader . Seek ( 0 , 0 )
}
}
// Not comparable to previous benchmarks
func benchmarkStdUnmarshal ( input [ ] byte , b * testing . B ) {
b . ResetTimer ( )
2019-10-19 02:14:19 +04:00
2019-02-26 13:05:15 +04:00
for n := 0 ; n < b . N ; n ++ {
var x interface { }
2019-10-19 02:14:19 +04:00
2019-02-26 13:05:15 +04:00
err := json . Unmarshal ( input , & x )
if err != nil {
b . Error ( err )
}
}
}
func BenchmarkStdUnmarshalLarge ( b * testing . B ) {
input , err := ioutil . ReadFile ( "large.test" )
if err != nil {
b . SkipNow ( )
}
2019-10-19 02:14:19 +04:00
2019-02-26 13:05:15 +04:00
benchmarkStdUnmarshal ( input , b )
}
func BenchmarkStdLibDecodeLarge ( b * testing . B ) {
input , err := ioutil . ReadFile ( "large.test" )
if err != nil {
b . SkipNow ( )
}
2019-10-19 02:14:19 +04:00
reader := bytes . NewReader ( input )
2019-02-26 13:05:15 +04:00
dec := json . NewDecoder ( reader )
2019-10-19 02:14:19 +04:00
2019-02-26 13:05:15 +04:00
b . ResetTimer ( )
2019-10-19 02:14:19 +04:00
2019-02-26 13:05:15 +04:00
for n := 0 ; n < b . N ; n ++ {
var x struct { }
2019-10-19 02:14:19 +04:00
2019-02-26 13:05:15 +04:00
dec . Decode ( & x )
reader . Seek ( 0 , 0 )
}
}
func BenchmarkSliceLexerLarge ( b * testing . B ) {
input , err := ioutil . ReadFile ( "large.test" )
if err != nil {
b . SkipNow ( )
}
2019-10-19 02:14:19 +04:00
2019-02-26 13:05:15 +04:00
benchmarkBytesLexer ( input , b )
}
func BenchmarkReaderLexerLarge ( b * testing . B ) {
input , err := os . Open ( "large.test" )
if err != nil {
b . SkipNow ( )
}
// reader := io.NewReader(input)
// reader, _ := os.Open("large.test")
lexer := NewReaderLexer ( input , JSON )
2019-10-19 02:14:19 +04:00
2019-02-26 13:05:15 +04:00
b . ResetTimer ( )
2019-10-19 02:14:19 +04:00
2019-02-26 13:05:15 +04:00
for n := 0 ; n < b . N ; n ++ {
for {
_ , ok := lexer . next ( )
if ! ok {
break
}
}
lexer . reset ( )
input . Seek ( 0 , 0 )
}
}
type lexTest struct {
name string
input string
tokenTypes [ ] int
}