feat: switch glide to govendor. (#186)
* feat: switch glide to govendor. * fix: testing
This commit is contained in:
12
vendor/github.com/buger/jsonparser/Dockerfile
generated
vendored
Normal file
12
vendor/github.com/buger/jsonparser/Dockerfile
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
FROM golang:1.6
|
||||
|
||||
RUN go get github.com/Jeffail/gabs
|
||||
RUN go get github.com/bitly/go-simplejson
|
||||
RUN go get github.com/pquerna/ffjson
|
||||
RUN go get github.com/antonholmquist/jason
|
||||
RUN go get github.com/mreiferson/go-ujson
|
||||
RUN go get -tags=unsafe -u github.com/ugorji/go/codec
|
||||
RUN go get github.com/mailru/easyjson
|
||||
|
||||
WORKDIR /go/src/github.com/buger/jsonparser
|
||||
ADD . /go/src/github.com/buger/jsonparser
|
||||
21
vendor/github.com/buger/jsonparser/LICENSE
generated
vendored
Normal file
21
vendor/github.com/buger/jsonparser/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2016 Leonid Bugaev
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
36
vendor/github.com/buger/jsonparser/Makefile
generated
vendored
Normal file
36
vendor/github.com/buger/jsonparser/Makefile
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
SOURCE = parser.go
|
||||
CONTAINER = jsonparser
|
||||
SOURCE_PATH = /go/src/github.com/buger/jsonparser
|
||||
BENCHMARK = JsonParser
|
||||
BENCHTIME = 5s
|
||||
TEST = .
|
||||
DRUN = docker run -v `pwd`:$(SOURCE_PATH) -i -t $(CONTAINER)
|
||||
|
||||
build:
|
||||
docker build -t $(CONTAINER) .
|
||||
|
||||
race:
|
||||
$(DRUN) --env GORACE="halt_on_error=1" go test ./. $(ARGS) -v -race -timeout 15s
|
||||
|
||||
bench:
|
||||
$(DRUN) go test $(LDFLAGS) -test.benchmem -bench $(BENCHMARK) ./benchmark/ $(ARGS) -benchtime $(BENCHTIME) -v
|
||||
|
||||
bench_local:
|
||||
$(DRUN) go test $(LDFLAGS) -test.benchmem -bench . $(ARGS) -benchtime $(BENCHTIME) -v
|
||||
|
||||
profile:
|
||||
$(DRUN) go test $(LDFLAGS) -test.benchmem -bench $(BENCHMARK) ./benchmark/ $(ARGS) -memprofile mem.mprof -v
|
||||
$(DRUN) go test $(LDFLAGS) -test.benchmem -bench $(BENCHMARK) ./benchmark/ $(ARGS) -cpuprofile cpu.out -v
|
||||
$(DRUN) go test $(LDFLAGS) -test.benchmem -bench $(BENCHMARK) ./benchmark/ $(ARGS) -c
|
||||
|
||||
test:
|
||||
$(DRUN) go test $(LDFLAGS) ./ -run $(TEST) -timeout 10s $(ARGS) -v
|
||||
|
||||
fmt:
|
||||
$(DRUN) go fmt ./...
|
||||
|
||||
vet:
|
||||
$(DRUN) go vet ./.
|
||||
|
||||
bash:
|
||||
$(DRUN) /bin/bash
|
||||
335
vendor/github.com/buger/jsonparser/README.md
generated
vendored
Normal file
335
vendor/github.com/buger/jsonparser/README.md
generated
vendored
Normal file
@@ -0,0 +1,335 @@
|
||||
[](https://goreportcard.com/report/github.com/buger/jsonparser) 
|
||||
# Alternative JSON parser for Go (so far fastest)
|
||||
|
||||
It does not require you to know the structure of the payload (eg. create structs), and allows accessing fields by providing the path to them. It is up to **10 times faster** than standard `encoding/json` package (depending on payload size and usage), **allocates no memory**. See benchmarks below.
|
||||
|
||||
## Rationale
|
||||
Originally I made this for a project that relies on a lot of 3rd party APIs that can be unpredictable and complex.
|
||||
I love simplicity and prefer to avoid external dependecies. `encoding/json` requires you to know exactly your data structures, or if you prefer to use `map[string]interface{}` instead, it will be very slow and hard to manage.
|
||||
I investigated what's on the market and found that most libraries are just wrappers around `encoding/json`, there is few options with own parsers (`ffjson`, `easyjson`), but they still requires you to create data structures.
|
||||
|
||||
|
||||
Goal of this project is to push JSON parser to the performance limits and not sucrifice with compliance and developer user experience.
|
||||
|
||||
## Example
|
||||
For the given JSON our goal is to extract the user's full name, number of github followers and avatar.
|
||||
|
||||
```go
|
||||
import "github.com/buger/jsonparser"
|
||||
|
||||
...
|
||||
|
||||
data := []byte(`{
|
||||
"person": {
|
||||
"name": {
|
||||
"first": "Leonid",
|
||||
"last": "Bugaev",
|
||||
"fullName": "Leonid Bugaev"
|
||||
},
|
||||
"github": {
|
||||
"handle": "buger",
|
||||
"followers": 109
|
||||
},
|
||||
"avatars": [
|
||||
{ "url": "https://avatars1.githubusercontent.com/u/14009?v=3&s=460", "type": "thumbnail" }
|
||||
]
|
||||
},
|
||||
"company": {
|
||||
"name": "Acme"
|
||||
}
|
||||
}`)
|
||||
|
||||
// You can specify key path by providing arguments to Get function
|
||||
jsonparser.Get(data, "person", "name", "fullName")
|
||||
|
||||
// There is `GetInt` and `GetBoolean` helpers if you exactly know key data type
|
||||
jsonparser.GetInt(data, "person", "github", "followers")
|
||||
|
||||
// When you try to get object, it will return you []byte slice pointer to data containing it
|
||||
// In `company` it will be `{"name": "Acme"}`
|
||||
jsonparser.Get(data, "company")
|
||||
|
||||
// If the key doesn't exist it will throw an error
|
||||
var size int64
|
||||
if value, _, err := jsonparser.GetInt(data, "company", "size"); err == nil {
|
||||
size = value
|
||||
}
|
||||
|
||||
// You can use `ArrayEach` helper to iterate items [item1, item2 .... itemN]
|
||||
jsonparser.ArrayEach(data, func(value []byte, dataType jsonparser.ValueType, offset int, err error) {
|
||||
fmt.Println(jsonparser.Get(value, "url"))
|
||||
}, "person", "avatars")
|
||||
|
||||
// Or use can access fields by index!
|
||||
jsonparser.GetInt("person", "avatars", "[0]", "url")
|
||||
|
||||
// You can use `ObjectEach` helper to iterate objects { "key1":object1, "key2":object2, .... "keyN":objectN }
|
||||
jsonparser.ObjectEach(data, func(key []byte, value []byte, dataType jsonparser.ValueType, offset int) error {
|
||||
fmt.Printf("Key: '%s'\n Value: '%s'\n Type: %s\n", string(key), string(value), dataType)
|
||||
return nil
|
||||
}, "person", "name")
|
||||
|
||||
// The most efficient way to extract multiple keys is `EachKey`
|
||||
|
||||
paths := [][]string{
|
||||
[]string{"person", "name", "fullName"},
|
||||
[]string{"person", "avatars", "[0]", "url"},
|
||||
[]string{"company", "url"},
|
||||
}
|
||||
jsonparser.EachKey(data, func(idx int, value []byte, vt jsonparser.ValueType, err error){
|
||||
switch idx {
|
||||
case 0: // []string{"person", "name", "fullName"}
|
||||
...
|
||||
case 1: // []string{"person", "avatars", "[0]", "url"}
|
||||
...
|
||||
case 2: // []string{"company", "url"},
|
||||
...
|
||||
}
|
||||
}, paths...)
|
||||
|
||||
// For more information see docs below
|
||||
```
|
||||
|
||||
## Need to speedup your app?
|
||||
|
||||
I'm available for consulting and can help you push your app performance to the limits. Ping me at: leonsbox@gmail.com.
|
||||
|
||||
## Reference
|
||||
|
||||
Library API is really simple. You just need the `Get` method to perform any operation. The rest is just helpers around it.
|
||||
|
||||
You also can view API at [godoc.org](https://godoc.org/github.com/buger/jsonparser)
|
||||
|
||||
|
||||
### **`Get`**
|
||||
```go
|
||||
func Get(data []byte, keys ...string) (value []byte, dataType jsonparser.ValueType, offset int, err error)
|
||||
```
|
||||
Receives data structure, and key path to extract value from.
|
||||
|
||||
Returns:
|
||||
* `value` - Pointer to original data structure containing key value, or just empty slice if nothing found or error
|
||||
* `dataType` - Can be: `NotExist`, `String`, `Number`, `Object`, `Array`, `Boolean` or `Null`
|
||||
* `offset` - Offset from provided data structure where key value ends. Used mostly internally, for example for `ArrayEach` helper.
|
||||
* `err` - If the key is not found or any other parsing issue, it should return error. If key not found it also sets `dataType` to `NotExist`
|
||||
|
||||
Accepts multiple keys to specify path to JSON value (in case of quering nested structures).
|
||||
If no keys are provided it will try to extract the closest JSON value (simple ones or object/array), useful for reading streams or arrays, see `ArrayEach` implementation.
|
||||
|
||||
Note that keys can be an array indexes: `jsonparser.GetInt("person", "avatars", "[0]", "url")`, pretty cool, yeah?
|
||||
|
||||
### **`GetString`**
|
||||
```go
|
||||
func GetString(data []byte, keys ...string) (val string, err error)
|
||||
```
|
||||
Returns strings properly handing escaped and unicode characters. Note that this will cause additional memory allocations.
|
||||
|
||||
### **`GetUnsafeString`**
|
||||
If you need string in your app, and ready to sacrifice with support of escaped symbols in favor of speed. It returns string mapped to existing byte slice memory, without any allocations:
|
||||
```go
|
||||
s, _, := jsonparser.GetUnsafeString(data, "person", "name", "title")
|
||||
switch s {
|
||||
case 'CEO':
|
||||
...
|
||||
case 'Engineer'
|
||||
...
|
||||
...
|
||||
}
|
||||
```
|
||||
Note that `unsafe` here means that your string will exist until GC will free underlying byte slice, for most of cases it means that you can use this string only in current context, and should not pass it anywhere externally: through channels or any other way.
|
||||
|
||||
|
||||
### **`GetBoolean`**, **`GetInt`** and **`GetFloat`**
|
||||
```go
|
||||
func GetBoolean(data []byte, keys ...string) (val bool, err error)
|
||||
|
||||
func GetFloat(data []byte, keys ...string) (val float64, err error)
|
||||
|
||||
func GetInt(data []byte, keys ...string) (val float64, err error)
|
||||
```
|
||||
If you know the key type, you can use the helpers above.
|
||||
If key data type do not match, it will return error.
|
||||
|
||||
### **`ArrayEach`**
|
||||
```go
|
||||
func ArrayEach(data []byte, cb func(value []byte, dataType jsonparser.ValueType, offset int, err error), keys ...string)
|
||||
```
|
||||
Needed for iterating arrays, accepts a callback function with the same return arguments as `Get`.
|
||||
|
||||
### **`ObjectEach`**
|
||||
```go
|
||||
func ObjectEach(data []byte, callback func(key []byte, value []byte, dataType ValueType, offset int) error, keys ...string) (err error)
|
||||
```
|
||||
Needed for iterating object, accepts a callback function. Example:
|
||||
```go
|
||||
var handler func([]byte, []byte, jsonparser.ValueType, int) error
|
||||
handler = func(key []byte, value []byte, dataType jsonparser.ValueType, offset int) error {
|
||||
//do stuff here
|
||||
}
|
||||
jsonparser.ObjectEach(myJson, handler)
|
||||
```
|
||||
|
||||
|
||||
### **`EachKey`**
|
||||
```go
|
||||
func EachKey(data []byte, cb func(idx int, value []byte, dataType jsonparser.ValueType, err error), paths ...[]string)
|
||||
```
|
||||
When you need to read multiple keys, and you do not afraid of low-level API `EachKey` is your friend. It read payload only single time, and calls callback function once path is found. For example when you call multiple times `Get`, it has to process payload multiple times, each time you call it. Depending on payload `EachKey` can be multiple times faster than `Get`. Path can use nested keys as well!
|
||||
|
||||
```go
|
||||
paths := [][]string{
|
||||
[]string{"uuid"},
|
||||
[]string{"tz"},
|
||||
[]string{"ua"},
|
||||
[]string{"st"},
|
||||
}
|
||||
var data SmallPayload
|
||||
|
||||
jsonparser.EachKey(smallFixture, func(idx int, value []byte, vt jsonparser.ValueType, err error){
|
||||
switch idx {
|
||||
case 0:
|
||||
data.Uuid, _ = value
|
||||
case 1:
|
||||
v, _ := jsonparser.ParseInt(value)
|
||||
data.Tz = int(v)
|
||||
case 2:
|
||||
data.Ua, _ = value
|
||||
case 3:
|
||||
v, _ := jsonparser.ParseInt(value)
|
||||
data.St = int(v)
|
||||
}
|
||||
}, paths...)
|
||||
```
|
||||
|
||||
|
||||
## What makes it so fast?
|
||||
* It does not rely on `encoding/json`, `reflection` or `interface{}`, the only real package dependency is `bytes`.
|
||||
* Operates with JSON payload on byte level, providing you pointers to the original data structure: no memory allocation.
|
||||
* No automatic type conversions, by default everything is a []byte, but it provides you value type, so you can convert by yourself (there is few helpers included).
|
||||
* Does not parse full record, only keys you specified
|
||||
|
||||
|
||||
## Benchmarks
|
||||
|
||||
There are 3 benchmark types, trying to simulate real-life usage for small, medium and large JSON payloads.
|
||||
For each metric, the lower value is better. Time/op is in nanoseconds. Values better than standard encoding/json marked as bold text.
|
||||
Benchmarks run on standard Linode 1024 box.
|
||||
|
||||
Compared libraries:
|
||||
* https://golang.org/pkg/encoding/json
|
||||
* https://github.com/Jeffail/gabs
|
||||
* https://github.com/bitly/go-simplejson
|
||||
* https://github.com/antonholmquist/jason
|
||||
* https://github.com/mreiferson/go-ujson
|
||||
* https://github.com/ugorji/go/codec
|
||||
* https://github.com/pquerna/ffjson
|
||||
* https://github.com/mailru/easyjson
|
||||
* https://github.com/buger/jsonparser
|
||||
|
||||
#### TLDR
|
||||
If you want to skip next sections we have 2 winner: `jsonparser` and `easyjson`.
|
||||
`jsonparser` is up to 10 times faster than standard `encoding/json` package (depending on payload size and usage), and almost infinitely (literally) better in memory consumption because it operates with data on byte level, and provide direct slice pointers.
|
||||
`easyjson` wins in CPU in medium tests and frankly i'm impressed with this package: it is remarkable results considering that it is almost drop-in replacement for `encoding/json` (require some code generation).
|
||||
|
||||
It's hard to fully compare `jsonparser` and `easyjson` (or `ffson`), they a true parsers and fully process record, unlike `jsonparser` which parse only keys you specified.
|
||||
|
||||
If you searching for replacement of `encoding/json` while keeping structs, `easyjson` is an amazing choise. If you want to process dynamic JSON, have memory constrains, or more control over your data you should try `jsonparser`.
|
||||
|
||||
`jsonparser` performance heavily depends on usage, and it works best when you do not need to process full record, only some keys. The more calls you need to make, the slower it will be, in contrast `easyjson` (or `ffjson`, `encoding/json`) parser record only 1 time, and then you can make as many calls as you want.
|
||||
|
||||
With great power comes great responsibility! :)
|
||||
|
||||
|
||||
#### Small payload
|
||||
|
||||
Each test processes 190 bytes of http log as a JSON record.
|
||||
It should read multiple fields.
|
||||
https://github.com/buger/jsonparser/blob/master/benchmark/benchmark_small_payload_test.go
|
||||
|
||||
| Library | time/op | bytes/op | allocs/op |
|
||||
| --- | --- | --- | --- | --- |
|
||||
| encoding/json struct | 7879 | 880 | 18 |
|
||||
| encoding/json interface{} | 8946 | 1521 | 38|
|
||||
| Jeffail/gabs | 10053 | 1649 | 46 |
|
||||
| bitly/go-simplejson | 10128 | 2241 | 36 |
|
||||
| antonholmquist/jason | 27152 | 7237 | 101 |
|
||||
| github.com/ugorji/go/codec | 8806 | 2176 | 31 |
|
||||
| mreiferson/go-ujson | **7008** | **1409** | 37 |
|
||||
| pquerna/ffjson | **3769** | **624** | **15** |
|
||||
| mailru/easyjson | **2002** | **192** | **9** |
|
||||
| buger/jsonparser | **1367** | **0** | **0** |
|
||||
| buger/jsonparser (EachKey API) | **809** | **0** | **0** |
|
||||
|
||||
Winners are ffjson, easyjson and jsonparser, where jsonparser is up to 9.8x faster than encoding/json and 4.6x faster than ffjson, and slightly faster than easyjson.
|
||||
If you look at memory allocation, jsonparser has no rivals, as it makes no data copy and operates with raw []byte structures and pointers to it.
|
||||
|
||||
#### Medium payload
|
||||
|
||||
Each test processes a 2.4kb JSON record (based on Clearbit API).
|
||||
It should read multiple nested fields and 1 array.
|
||||
|
||||
https://github.com/buger/jsonparser/blob/master/benchmark/benchmark_medium_payload_test.go
|
||||
|
||||
| Library | time/op | bytes/op | allocs/op |
|
||||
| --- | --- | --- | --- | --- |
|
||||
| encoding/json struct | 57749 | 1336 | 29 |
|
||||
| encoding/json interface{} | 79297 | 10627 | 215 |
|
||||
| Jeffail/gabs | 83807 | 11202 | 235 |
|
||||
| bitly/go-simplejson | 88187 | 17187 | 220 |
|
||||
| antonholmquist/jason | 94099 | 19013 | 247 |
|
||||
| github.com/ugorji/go/codec | 114719 | 6712 | 152 |
|
||||
| mreiferson/go-ujson | **56972** | 11547 | 270 |
|
||||
| pquerna/ffjson | **20298** | **856** | **20** |
|
||||
| mailru/easyjson | **10512** | **336** | **12** |
|
||||
| buger/jsonparser | **15955** | **0** | **0** |
|
||||
| buger/jsonparser (EachKey API) | **8916** | **0** | **0** |
|
||||
|
||||
The difference between ffjson and jsonparser in CPU usage is smaller, while the memory consumption difference is growing. On the other hand `easyjson` shows remarkable performance for medium payload.
|
||||
|
||||
`gabs`, `go-simplejson` and `jason` are based on encoding/json and map[string]interface{} and actually only helpers for unstructured JSON, their performance correlate with `encoding/json interface{}`, and they will skip next round.
|
||||
`go-ujson` while have its own parser, shows same performance as `encoding/json`, also skips next round. Same situation with `ugorji/go/codec`, but it showed unexpectedly bad performance for complex payloads.
|
||||
|
||||
|
||||
#### Large payload
|
||||
|
||||
Each test processes a 24kb JSON record (based on Discourse API)
|
||||
It should read 2 arrays, and for each item in array get a few fields.
|
||||
Basically it means processing a full JSON file.
|
||||
|
||||
https://github.com/buger/jsonparser/blob/master/benchmark/benchmark_large_payload_test.go
|
||||
|
||||
| Library | time/op | bytes/op | allocs/op |
|
||||
| --- | --- | --- | --- | --- |
|
||||
| encoding/json struct | 748336 | 8272 | 307 |
|
||||
| encoding/json interface{} | 1224271 | 215425 | 3395 |
|
||||
| pquerna/ffjson | **312271** | **7792** | **298** |
|
||||
| mailru/easyjson | **154186** | **6992** | **288** |
|
||||
| buger/jsonparser | **85308** | **0** | **0** |
|
||||
|
||||
`jsonparser` now is a winner, but do not forget that it is way more lighweight parser than `ffson` or `easyjson`, and they have to parser all the data, while `jsonparser` parse only what you need. All `ffjson`, `easysjon` and `jsonparser` have their own parsing code, and does not depend on `encoding/json` or `interface{}`, thats one of the reasons why they are so fast. `easyjson` also use a bit of `unsafe` package to reduce memory consuption (in theory it can lead to some unexpected GC issue, but i did not tested enough)
|
||||
|
||||
Also last benchmark did not included `EachKey` test, because in this particular case we need to read lot of Array values, and using `ArrayEach` is more efficient.
|
||||
|
||||
## Questions and support
|
||||
|
||||
All bug-reports and suggestions should go though Github Issues.
|
||||
If you have some private questions you can send them directly to me: leonsbox@gmail.com
|
||||
|
||||
## Contributing
|
||||
|
||||
1. Fork it
|
||||
2. Create your feature branch (git checkout -b my-new-feature)
|
||||
3. Commit your changes (git commit -am 'Added some feature')
|
||||
4. Push to the branch (git push origin my-new-feature)
|
||||
5. Create new Pull Request
|
||||
|
||||
## Development
|
||||
|
||||
All my development happens using Docker, and repo include some Make tasks to simplify development.
|
||||
|
||||
* `make build` - builds docker image, usually can be called only once
|
||||
* `make test` - run tests
|
||||
* `make fmt` - run go fmt
|
||||
* `make bench` - run benchmarks (if you need to run only single benchmark modify `BENCHMARK` variable in make file)
|
||||
* `make profile` - runs benchmark and generate 3 files- `cpu.out`, `mem.mprof` and `benchmark.test` binary, which can be used for `go tool pprof`
|
||||
* `make bash` - enter container (i use it for running `go tool pprof` above)
|
||||
28
vendor/github.com/buger/jsonparser/bytes.go
generated
vendored
Normal file
28
vendor/github.com/buger/jsonparser/bytes.go
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
package jsonparser
|
||||
|
||||
// About 3x faster then strconv.ParseInt because does not check for range error and support only base 10, which is enough for JSON
|
||||
func parseInt(bytes []byte) (v int64, ok bool) {
|
||||
if len(bytes) == 0 {
|
||||
return 0, false
|
||||
}
|
||||
|
||||
var neg bool = false
|
||||
if bytes[0] == '-' {
|
||||
neg = true
|
||||
bytes = bytes[1:]
|
||||
}
|
||||
|
||||
for _, c := range bytes {
|
||||
if c >= '0' && c <= '9' {
|
||||
v = (10 * v) + int64(c-'0')
|
||||
} else {
|
||||
return 0, false
|
||||
}
|
||||
}
|
||||
|
||||
if neg {
|
||||
return -v, true
|
||||
} else {
|
||||
return v, true
|
||||
}
|
||||
}
|
||||
21
vendor/github.com/buger/jsonparser/bytes_safe.go
generated
vendored
Normal file
21
vendor/github.com/buger/jsonparser/bytes_safe.go
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
// +build appengine appenginevm
|
||||
|
||||
package jsonparser
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// See fastbytes_unsafe.go for explanation on why *[]byte is used (signatures must be consistent with those in that file)
|
||||
|
||||
func equalStr(b *[]byte, s string) bool {
|
||||
return string(*b) == s
|
||||
}
|
||||
|
||||
func parseFloat(b *[]byte) (float64, error) {
|
||||
return strconv.ParseFloat(string(*b), 64)
|
||||
}
|
||||
|
||||
func bytesToString(b *[]byte) string {
|
||||
return string(*b)
|
||||
}
|
||||
31
vendor/github.com/buger/jsonparser/bytes_unsafe.go
generated
vendored
Normal file
31
vendor/github.com/buger/jsonparser/bytes_unsafe.go
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
// +build !appengine,!appenginevm
|
||||
|
||||
package jsonparser
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
//
|
||||
// The reason for using *[]byte rather than []byte in parameters is an optimization. As of Go 1.6,
|
||||
// the compiler cannot perfectly inline the function when using a non-pointer slice. That is,
|
||||
// the non-pointer []byte parameter version is slower than if its function body is manually
|
||||
// inlined, whereas the pointer []byte version is equally fast to the manually inlined
|
||||
// version. Instruction count in assembly taken from "go tool compile" confirms this difference.
|
||||
//
|
||||
// TODO: Remove hack after Go 1.7 release
|
||||
//
|
||||
func equalStr(b *[]byte, s string) bool {
|
||||
return *(*string)(unsafe.Pointer(b)) == s
|
||||
}
|
||||
|
||||
func parseFloat(b *[]byte) (float64, error) {
|
||||
return strconv.ParseFloat(*(*string)(unsafe.Pointer(b)), 64)
|
||||
}
|
||||
|
||||
// A hack until issue golang/go#2632 is fixed.
|
||||
// See: https://github.com/golang/go/issues/2632
|
||||
func bytesToString(b *[]byte) string {
|
||||
return *(*string)(unsafe.Pointer(b))
|
||||
}
|
||||
164
vendor/github.com/buger/jsonparser/escape.go
generated
vendored
Normal file
164
vendor/github.com/buger/jsonparser/escape.go
generated
vendored
Normal file
@@ -0,0 +1,164 @@
|
||||
package jsonparser
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// JSON Unicode stuff: see https://tools.ietf.org/html/rfc7159#section-7
|
||||
|
||||
const supplementalPlanesOffset = 0x10000
|
||||
const highSurrogateOffset = 0xD800
|
||||
const lowSurrogateOffset = 0xDC00
|
||||
|
||||
func combineUTF16Surrogates(high, low rune) rune {
|
||||
return supplementalPlanesOffset + (high-highSurrogateOffset)<<10 + (low - lowSurrogateOffset)
|
||||
}
|
||||
|
||||
const badHex = -1
|
||||
|
||||
func h2I(c byte) int {
|
||||
switch {
|
||||
case c >= '0' && c <= '9':
|
||||
return int(c - '0')
|
||||
case c >= 'A' && c <= 'F':
|
||||
return int(c - 'A' + 10)
|
||||
case c >= 'a' && c <= 'f':
|
||||
return int(c - 'a' + 10)
|
||||
}
|
||||
return badHex
|
||||
}
|
||||
|
||||
// decodeSingleUnicodeEscape decodes a single \uXXXX escape sequence. The prefix \u is assumed to be present and
|
||||
// is not checked.
|
||||
// In JSON, these escapes can either come alone or as part of "UTF16 surrogate pairs" that must be handled together.
|
||||
// This function only handles one; decodeUnicodeEscape handles this more complex case.
|
||||
func decodeSingleUnicodeEscape(in []byte) (rune, bool) {
|
||||
// We need at least 6 characters total
|
||||
if len(in) < 6 {
|
||||
return utf8.RuneError, false
|
||||
}
|
||||
|
||||
// Convert hex to decimal
|
||||
h1, h2, h3, h4 := h2I(in[2]), h2I(in[3]), h2I(in[4]), h2I(in[5])
|
||||
if h1 == badHex || h2 == badHex || h3 == badHex || h4 == badHex {
|
||||
return utf8.RuneError, false
|
||||
}
|
||||
|
||||
// Compose the hex digits
|
||||
return rune(h1<<12 + h2<<8 + h3<<4 + h4), true
|
||||
}
|
||||
|
||||
func decodeUnicodeEscape(in []byte) (rune, int) {
|
||||
if r, ok := decodeSingleUnicodeEscape(in); !ok {
|
||||
// Invalid Unicode escape
|
||||
return utf8.RuneError, -1
|
||||
} else if r < highSurrogateOffset {
|
||||
// Valid Unicode escape in Basic Multilingual Plane
|
||||
return r, 6
|
||||
} else if r2, ok := decodeSingleUnicodeEscape(in[6:]); !ok { // Note: previous decodeSingleUnicodeEscape success guarantees at least 6 bytes remain
|
||||
// UTF16 "high surrogate" without manditory valid following Unicode escape for the "low surrogate"
|
||||
return utf8.RuneError, -1
|
||||
} else if r2 < lowSurrogateOffset {
|
||||
// Invalid UTF16 "low surrogate"
|
||||
return utf8.RuneError, -1
|
||||
} else {
|
||||
// Valid UTF16 surrogate pair
|
||||
return combineUTF16Surrogates(r, r2), 12
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// backslashCharEscapeTable: when '\X' is found for some byte X, it is to be replaced with backslashCharEscapeTable[X]
|
||||
var backslashCharEscapeTable = [...]byte{
|
||||
'"': '"',
|
||||
'\\': '\\',
|
||||
'/': '/',
|
||||
'b': '\b',
|
||||
'f': '\f',
|
||||
'n': '\n',
|
||||
'r': '\r',
|
||||
't': '\t',
|
||||
}
|
||||
|
||||
// unescapeToUTF8 unescapes the single escape sequence starting at 'in' into 'out' and returns
|
||||
// how many characters were consumed from 'in' and emitted into 'out'.
|
||||
// If a valid escape sequence does not appear as a prefix of 'in', (-1, -1) to signal the error.
|
||||
func unescapeToUTF8(in, out []byte) (inLen int, outLen int) {
|
||||
if len(in) < 2 || in[0] != '\\' {
|
||||
// Invalid escape due to insufficient characters for any escape or no initial backslash
|
||||
return -1, -1
|
||||
}
|
||||
|
||||
// https://tools.ietf.org/html/rfc7159#section-7
|
||||
switch e := in[1]; e {
|
||||
case '"', '\\', '/', 'b', 'f', 'n', 'r', 't':
|
||||
// Valid basic 2-character escapes (use lookup table)
|
||||
out[0] = backslashCharEscapeTable[e]
|
||||
return 2, 1
|
||||
case 'u':
|
||||
// Unicode escape
|
||||
if r, inLen := decodeUnicodeEscape(in); inLen == -1 {
|
||||
// Invalid Unicode escape
|
||||
return -1, -1
|
||||
} else {
|
||||
// Valid Unicode escape; re-encode as UTF8
|
||||
outLen := utf8.EncodeRune(out, r)
|
||||
return inLen, outLen
|
||||
}
|
||||
}
|
||||
|
||||
return -1, -1
|
||||
}
|
||||
|
||||
// unescape unescapes the string contained in 'in' and returns it as a slice.
|
||||
// If 'in' contains no escaped characters:
|
||||
// Returns 'in'.
|
||||
// Else, if 'out' is of sufficient capacity (guaranteed if cap(out) >= len(in)):
|
||||
// 'out' is used to build the unescaped string and is returned with no extra allocation
|
||||
// Else:
|
||||
// A new slice is allocated and returned.
|
||||
func Unescape(in, out []byte) ([]byte, error) {
|
||||
firstBackslash := bytes.IndexByte(in, '\\')
|
||||
if firstBackslash == -1 {
|
||||
return in, nil
|
||||
}
|
||||
|
||||
// Get a buffer of sufficient size (allocate if needed)
|
||||
if cap(out) < len(in) {
|
||||
out = make([]byte, len(in))
|
||||
} else {
|
||||
out = out[0:len(in)]
|
||||
}
|
||||
|
||||
// Copy the first sequence of unescaped bytes to the output and obtain a buffer pointer (subslice)
|
||||
copy(out, in[:firstBackslash])
|
||||
in = in[firstBackslash:]
|
||||
buf := out[firstBackslash:]
|
||||
|
||||
for len(in) > 0 {
|
||||
// Unescape the next escaped character
|
||||
inLen, bufLen := unescapeToUTF8(in, buf)
|
||||
if inLen == -1 {
|
||||
return nil, MalformedStringEscapeError
|
||||
}
|
||||
|
||||
in = in[inLen:]
|
||||
buf = buf[bufLen:]
|
||||
|
||||
// Copy everything up until the next backslash
|
||||
nextBackslash := bytes.IndexByte(in, '\\')
|
||||
if nextBackslash == -1 {
|
||||
copy(buf, in)
|
||||
buf = buf[len(in):]
|
||||
break
|
||||
} else {
|
||||
copy(buf, in[:nextBackslash])
|
||||
buf = buf[nextBackslash:]
|
||||
in = in[nextBackslash:]
|
||||
}
|
||||
}
|
||||
|
||||
// Trim the out buffer to the amount that was actually emitted
|
||||
return out[:len(out)-len(buf)], nil
|
||||
}
|
||||
853
vendor/github.com/buger/jsonparser/parser.go
generated
vendored
Normal file
853
vendor/github.com/buger/jsonparser/parser.go
generated
vendored
Normal file
@@ -0,0 +1,853 @@
|
||||
package jsonparser
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"math"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// Errors
|
||||
var (
|
||||
KeyPathNotFoundError = errors.New("Key path not found")
|
||||
UnknownValueTypeError = errors.New("Unknown value type")
|
||||
MalformedJsonError = errors.New("Malformed JSON error")
|
||||
MalformedStringError = errors.New("Value is string, but can't find closing '\"' symbol")
|
||||
MalformedArrayError = errors.New("Value is array, but can't find closing ']' symbol")
|
||||
MalformedObjectError = errors.New("Value looks like object, but can't find closing '}' symbol")
|
||||
MalformedValueError = errors.New("Value looks like Number/Boolean/None, but can't find its end: ',' or '}' symbol")
|
||||
MalformedStringEscapeError = errors.New("Encountered an invalid escape sequence in a string")
|
||||
)
|
||||
|
||||
// How much stack space to allocate for unescaping JSON strings; if a string longer
|
||||
// than this needs to be escaped, it will result in a heap allocation
|
||||
const unescapeStackBufSize = 64
|
||||
|
||||
func tokenEnd(data []byte) int {
|
||||
for i, c := range data {
|
||||
switch c {
|
||||
case ' ', '\n', '\r', '\t', ',', '}', ']':
|
||||
return i
|
||||
}
|
||||
}
|
||||
|
||||
return len(data)
|
||||
}
|
||||
|
||||
// Find position of next character which is not whitespace
|
||||
func nextToken(data []byte) int {
|
||||
for i, c := range data {
|
||||
switch c {
|
||||
case ' ', '\n', '\r', '\t':
|
||||
continue
|
||||
default:
|
||||
return i
|
||||
}
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
||||
|
||||
// Tries to find the end of string
|
||||
// Support if string contains escaped quote symbols.
|
||||
func stringEnd(data []byte) (int, bool) {
|
||||
escaped := false
|
||||
for i, c := range data {
|
||||
if c == '"' {
|
||||
if !escaped {
|
||||
return i + 1, false
|
||||
} else {
|
||||
j := i - 1
|
||||
for {
|
||||
if j < 0 || data[j] != '\\' {
|
||||
return i + 1, true // even number of backslashes
|
||||
}
|
||||
j--
|
||||
if j < 0 || data[j] != '\\' {
|
||||
break // odd number of backslashes
|
||||
}
|
||||
j--
|
||||
|
||||
}
|
||||
}
|
||||
} else if c == '\\' {
|
||||
escaped = true
|
||||
}
|
||||
}
|
||||
|
||||
return -1, escaped
|
||||
}
|
||||
|
||||
// Find end of the data structure, array or object.
|
||||
// For array openSym and closeSym will be '[' and ']', for object '{' and '}'
|
||||
func blockEnd(data []byte, openSym byte, closeSym byte) int {
|
||||
level := 0
|
||||
i := 0
|
||||
ln := len(data)
|
||||
|
||||
for i < ln {
|
||||
switch data[i] {
|
||||
case '"': // If inside string, skip it
|
||||
se, _ := stringEnd(data[i+1:])
|
||||
if se == -1 {
|
||||
return -1
|
||||
}
|
||||
i += se
|
||||
case openSym: // If open symbol, increase level
|
||||
level++
|
||||
case closeSym: // If close symbol, increase level
|
||||
level--
|
||||
|
||||
// If we have returned to the original level, we're done
|
||||
if level == 0 {
|
||||
return i + 1
|
||||
}
|
||||
}
|
||||
i++
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
||||
|
||||
func searchKeys(data []byte, keys ...string) int {
|
||||
keyLevel := 0
|
||||
level := 0
|
||||
i := 0
|
||||
ln := len(data)
|
||||
lk := len(keys)
|
||||
|
||||
if lk == 0 {
|
||||
return 0
|
||||
}
|
||||
|
||||
var stackbuf [unescapeStackBufSize]byte // stack-allocated array for allocation-free unescaping of small strings
|
||||
|
||||
for i < ln {
|
||||
switch data[i] {
|
||||
case '"':
|
||||
i++
|
||||
keyBegin := i
|
||||
|
||||
strEnd, keyEscaped := stringEnd(data[i:])
|
||||
if strEnd == -1 {
|
||||
return -1
|
||||
}
|
||||
i += strEnd
|
||||
keyEnd := i - 1
|
||||
|
||||
valueOffset := nextToken(data[i:])
|
||||
if valueOffset == -1 {
|
||||
return -1
|
||||
}
|
||||
|
||||
i += valueOffset
|
||||
|
||||
// if string is a key, and key level match
|
||||
if data[i] == ':' && keyLevel == level-1 {
|
||||
key := data[keyBegin:keyEnd]
|
||||
|
||||
// for unescape: if there are no escape sequences, this is cheap; if there are, it is a
|
||||
// bit more expensive, but causes no allocations unless len(key) > unescapeStackBufSize
|
||||
var keyUnesc []byte
|
||||
if !keyEscaped {
|
||||
keyUnesc = key
|
||||
} else if ku, err := Unescape(key, stackbuf[:]); err != nil {
|
||||
return -1
|
||||
} else {
|
||||
keyUnesc = ku
|
||||
}
|
||||
|
||||
if equalStr(&keyUnesc, keys[level-1]) {
|
||||
keyLevel++
|
||||
// If we found all keys in path
|
||||
if keyLevel == lk {
|
||||
return i + 1
|
||||
}
|
||||
}
|
||||
} else {
|
||||
i--
|
||||
}
|
||||
case '{':
|
||||
level++
|
||||
case '}':
|
||||
level--
|
||||
if level == keyLevel {
|
||||
keyLevel--
|
||||
}
|
||||
case '[':
|
||||
// If we want to get array element by index
|
||||
if keyLevel == level && keys[level][0] == '[' {
|
||||
aIdx, _ := strconv.Atoi(keys[level][1 : len(keys[level])-1])
|
||||
|
||||
var curIdx int
|
||||
var valueFound []byte
|
||||
var valueOffset int
|
||||
|
||||
ArrayEach(data[i:], func(value []byte, dataType ValueType, offset int, err error) {
|
||||
if curIdx == aIdx {
|
||||
valueFound = value
|
||||
valueOffset = offset
|
||||
}
|
||||
curIdx += 1
|
||||
})
|
||||
|
||||
if valueFound == nil {
|
||||
return -1
|
||||
} else {
|
||||
return i + valueOffset + searchKeys(valueFound, keys[level+1:]...)
|
||||
}
|
||||
} else {
|
||||
// Do not search for keys inside arrays
|
||||
if arraySkip := blockEnd(data[i:], '[', ']'); arraySkip == -1 {
|
||||
return -1
|
||||
} else {
|
||||
i += arraySkip - 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
i++
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
||||
|
||||
var bitwiseFlags []int64
|
||||
|
||||
func init() {
|
||||
for i := 0; i < 63; i++ {
|
||||
bitwiseFlags = append(bitwiseFlags, int64(math.Pow(2, float64(i))))
|
||||
}
|
||||
}
|
||||
|
||||
func sameTree(p1, p2 []string) bool {
|
||||
minLen := len(p1)
|
||||
if len(p2) < minLen {
|
||||
minLen = len(p2)
|
||||
}
|
||||
|
||||
for pi_1, p_1 := range p1[:minLen] {
|
||||
if p2[pi_1] != p_1 {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func EachKey(data []byte, cb func(int, []byte, ValueType, error), paths ...[]string) int {
|
||||
var pathFlags int64
|
||||
var level, pathsMatched, i int
|
||||
ln := len(data)
|
||||
|
||||
var maxPath int
|
||||
for _, p := range paths {
|
||||
if len(p) > maxPath {
|
||||
maxPath = len(p)
|
||||
}
|
||||
}
|
||||
|
||||
var stackbuf [unescapeStackBufSize]byte // stack-allocated array for allocation-free unescaping of small strings
|
||||
pathsBuf := make([]string, maxPath)
|
||||
|
||||
for i < ln {
|
||||
switch data[i] {
|
||||
case '"':
|
||||
i++
|
||||
keyBegin := i
|
||||
|
||||
strEnd, keyEscaped := stringEnd(data[i:])
|
||||
if strEnd == -1 {
|
||||
return -1
|
||||
}
|
||||
i += strEnd
|
||||
|
||||
keyEnd := i - 1
|
||||
|
||||
valueOffset := nextToken(data[i:])
|
||||
if valueOffset == -1 {
|
||||
return -1
|
||||
}
|
||||
|
||||
i += valueOffset
|
||||
|
||||
// if string is a key, and key level match
|
||||
if data[i] == ':' {
|
||||
match := -1
|
||||
key := data[keyBegin:keyEnd]
|
||||
|
||||
// for unescape: if there are no escape sequences, this is cheap; if there are, it is a
|
||||
// bit more expensive, but causes no allocations unless len(key) > unescapeStackBufSize
|
||||
var keyUnesc []byte
|
||||
if !keyEscaped {
|
||||
keyUnesc = key
|
||||
} else if ku, err := Unescape(key, stackbuf[:]); err != nil {
|
||||
return -1
|
||||
} else {
|
||||
keyUnesc = ku
|
||||
}
|
||||
|
||||
if maxPath >= level {
|
||||
pathsBuf[level-1] = bytesToString(&keyUnesc)
|
||||
|
||||
for pi, p := range paths {
|
||||
if len(p) != level || pathFlags&bitwiseFlags[pi+1] != 0 || !equalStr(&keyUnesc, p[level-1]) || !sameTree(p, pathsBuf[:level]) {
|
||||
continue
|
||||
}
|
||||
|
||||
match = pi
|
||||
|
||||
i++
|
||||
pathsMatched++
|
||||
pathFlags |= bitwiseFlags[pi+1]
|
||||
|
||||
v, dt, of, e := Get(data[i:])
|
||||
cb(pi, v, dt, e)
|
||||
|
||||
if of != -1 {
|
||||
i += of
|
||||
}
|
||||
|
||||
if pathsMatched == len(paths) {
|
||||
return i
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if match == -1 {
|
||||
tokenOffset := nextToken(data[i+1:])
|
||||
i += tokenOffset
|
||||
|
||||
if data[i] == '{' {
|
||||
blockSkip := blockEnd(data[i:], '{', '}')
|
||||
i += blockSkip + 1
|
||||
}
|
||||
}
|
||||
|
||||
switch data[i] {
|
||||
case '{', '}', '[', '"':
|
||||
i--
|
||||
}
|
||||
} else {
|
||||
i--
|
||||
}
|
||||
case '{':
|
||||
level++
|
||||
case '}':
|
||||
level--
|
||||
case '[':
|
||||
var arrIdxFlags int64
|
||||
var pIdxFlags int64
|
||||
for pi, p := range paths {
|
||||
if len(p) < level+1 || pathFlags&bitwiseFlags[pi+1] != 0 || p[level][0] != '[' || !sameTree(p, pathsBuf[:level]) {
|
||||
continue
|
||||
}
|
||||
|
||||
aIdx, _ := strconv.Atoi(p[level][1 : len(p[level])-1])
|
||||
arrIdxFlags |= bitwiseFlags[aIdx+1]
|
||||
pIdxFlags |= bitwiseFlags[pi+1]
|
||||
}
|
||||
|
||||
if arrIdxFlags > 0 {
|
||||
level++
|
||||
|
||||
var curIdx int
|
||||
arrOff, _ := ArrayEach(data[i:], func(value []byte, dataType ValueType, offset int, err error) {
|
||||
if arrIdxFlags&bitwiseFlags[curIdx+1] != 0 {
|
||||
for pi, p := range paths {
|
||||
if pIdxFlags&bitwiseFlags[pi+1] != 0 {
|
||||
aIdx, _ := strconv.Atoi(p[level-1][1 : len(p[level-1])-1])
|
||||
|
||||
if curIdx == aIdx {
|
||||
of := searchKeys(value, p[level:]...)
|
||||
|
||||
pathsMatched++
|
||||
pathFlags |= bitwiseFlags[pi+1]
|
||||
|
||||
if of != -1 {
|
||||
v, dt, _, e := Get(value[of:])
|
||||
cb(pi, v, dt, e)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
curIdx += 1
|
||||
})
|
||||
|
||||
if pathsMatched == len(paths) {
|
||||
return i
|
||||
}
|
||||
|
||||
i += arrOff - 1
|
||||
} else {
|
||||
// Do not search for keys inside arrays
|
||||
if arraySkip := blockEnd(data[i:], '[', ']'); arraySkip == -1 {
|
||||
return -1
|
||||
} else {
|
||||
i += arraySkip - 1
|
||||
}
|
||||
}
|
||||
case ']':
|
||||
level--
|
||||
}
|
||||
|
||||
i++
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
||||
|
||||
// Data types available in valid JSON data.
|
||||
type ValueType int
|
||||
|
||||
const (
|
||||
NotExist = ValueType(iota)
|
||||
String
|
||||
Number
|
||||
Object
|
||||
Array
|
||||
Boolean
|
||||
Null
|
||||
Unknown
|
||||
)
|
||||
|
||||
func (vt ValueType) String() string {
|
||||
switch vt {
|
||||
case NotExist:
|
||||
return "non-existent"
|
||||
case String:
|
||||
return "string"
|
||||
case Number:
|
||||
return "number"
|
||||
case Object:
|
||||
return "object"
|
||||
case Array:
|
||||
return "array"
|
||||
case Boolean:
|
||||
return "boolean"
|
||||
case Null:
|
||||
return "null"
|
||||
default:
|
||||
return "unknown"
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
trueLiteral = []byte("true")
|
||||
falseLiteral = []byte("false")
|
||||
nullLiteral = []byte("null")
|
||||
)
|
||||
|
||||
/*
|
||||
Get - Receives data structure, and key path to extract value from.
|
||||
|
||||
Returns:
|
||||
`value` - Pointer to original data structure containing key value, or just empty slice if nothing found or error
|
||||
`dataType` - Can be: `NotExist`, `String`, `Number`, `Object`, `Array`, `Boolean` or `Null`
|
||||
`offset` - Offset from provided data structure where key value ends. Used mostly internally, for example for `ArrayEach` helper.
|
||||
`err` - If key not found or any other parsing issue it should return error. If key not found it also sets `dataType` to `NotExist`
|
||||
|
||||
Accept multiple keys to specify path to JSON value (in case of quering nested structures).
|
||||
If no keys provided it will try to extract closest JSON value (simple ones or object/array), useful for reading streams or arrays, see `ArrayEach` implementation.
|
||||
*/
|
||||
func Get(data []byte, keys ...string) (value []byte, dataType ValueType, offset int, err error) {
|
||||
if len(keys) > 0 {
|
||||
if offset = searchKeys(data, keys...); offset == -1 {
|
||||
return []byte{}, NotExist, -1, KeyPathNotFoundError
|
||||
}
|
||||
}
|
||||
|
||||
// Go to closest value
|
||||
nO := nextToken(data[offset:])
|
||||
if nO == -1 {
|
||||
return []byte{}, NotExist, -1, MalformedJsonError
|
||||
}
|
||||
|
||||
offset += nO
|
||||
|
||||
endOffset := offset
|
||||
// if string value
|
||||
if data[offset] == '"' {
|
||||
dataType = String
|
||||
if idx, _ := stringEnd(data[offset+1:]); idx != -1 {
|
||||
endOffset += idx + 1
|
||||
} else {
|
||||
return []byte{}, dataType, offset, MalformedStringError
|
||||
}
|
||||
} else if data[offset] == '[' { // if array value
|
||||
dataType = Array
|
||||
// break label, for stopping nested loops
|
||||
endOffset = blockEnd(data[offset:], '[', ']')
|
||||
|
||||
if endOffset == -1 {
|
||||
return []byte{}, dataType, offset, MalformedArrayError
|
||||
}
|
||||
|
||||
endOffset += offset
|
||||
} else if data[offset] == '{' { // if object value
|
||||
dataType = Object
|
||||
// break label, for stopping nested loops
|
||||
endOffset = blockEnd(data[offset:], '{', '}')
|
||||
|
||||
if endOffset == -1 {
|
||||
return []byte{}, dataType, offset, MalformedObjectError
|
||||
}
|
||||
|
||||
endOffset += offset
|
||||
} else {
|
||||
// Number, Boolean or None
|
||||
end := tokenEnd(data[endOffset:])
|
||||
|
||||
if end == -1 {
|
||||
return nil, dataType, offset, MalformedValueError
|
||||
}
|
||||
|
||||
value := data[offset : endOffset+end]
|
||||
|
||||
switch data[offset] {
|
||||
case 't', 'f': // true or false
|
||||
if bytes.Equal(value, trueLiteral) || bytes.Equal(value, falseLiteral) {
|
||||
dataType = Boolean
|
||||
} else {
|
||||
return nil, Unknown, offset, UnknownValueTypeError
|
||||
}
|
||||
case 'u', 'n': // undefined or null
|
||||
if bytes.Equal(value, nullLiteral) {
|
||||
dataType = Null
|
||||
} else {
|
||||
return nil, Unknown, offset, UnknownValueTypeError
|
||||
}
|
||||
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '-':
|
||||
dataType = Number
|
||||
default:
|
||||
return nil, Unknown, offset, UnknownValueTypeError
|
||||
}
|
||||
|
||||
endOffset += end
|
||||
}
|
||||
|
||||
value = data[offset:endOffset]
|
||||
|
||||
// Strip quotes from string values
|
||||
if dataType == String {
|
||||
value = value[1 : len(value)-1]
|
||||
}
|
||||
|
||||
if dataType == Null {
|
||||
value = []byte{}
|
||||
}
|
||||
|
||||
return value, dataType, endOffset, nil
|
||||
}
|
||||
|
||||
// ArrayEach is used when iterating arrays, accepts a callback function with the same return arguments as `Get`.
|
||||
func ArrayEach(data []byte, cb func(value []byte, dataType ValueType, offset int, err error), keys ...string) (offset int, err error) {
|
||||
if len(data) == 0 {
|
||||
return -1, MalformedObjectError
|
||||
}
|
||||
|
||||
offset = 1
|
||||
|
||||
if len(keys) > 0 {
|
||||
if offset = searchKeys(data, keys...); offset == -1 {
|
||||
return offset, KeyPathNotFoundError
|
||||
}
|
||||
|
||||
// Go to closest value
|
||||
nO := nextToken(data[offset:])
|
||||
if nO == -1 {
|
||||
return offset, MalformedJsonError
|
||||
}
|
||||
|
||||
offset += nO
|
||||
|
||||
if data[offset] != '[' {
|
||||
return offset, MalformedArrayError
|
||||
}
|
||||
|
||||
offset++
|
||||
}
|
||||
|
||||
nO := nextToken(data[offset:])
|
||||
if nO == -1 {
|
||||
return offset, MalformedJsonError
|
||||
}
|
||||
|
||||
offset += nO
|
||||
|
||||
if data[offset] == ']' {
|
||||
return offset, nil
|
||||
}
|
||||
|
||||
for true {
|
||||
v, t, o, e := Get(data[offset:])
|
||||
|
||||
if e != nil {
|
||||
return offset, e
|
||||
}
|
||||
|
||||
if o == 0 {
|
||||
break
|
||||
}
|
||||
|
||||
if t != NotExist {
|
||||
cb(v, t, offset+o-len(v), e)
|
||||
}
|
||||
|
||||
if e != nil {
|
||||
break
|
||||
}
|
||||
|
||||
offset += o
|
||||
|
||||
skipToToken := nextToken(data[offset:])
|
||||
if skipToToken == -1 {
|
||||
return offset, MalformedArrayError
|
||||
}
|
||||
offset += skipToToken
|
||||
|
||||
if data[offset] == ']' {
|
||||
break
|
||||
}
|
||||
|
||||
if data[offset] != ',' {
|
||||
return offset, MalformedArrayError
|
||||
}
|
||||
|
||||
offset++
|
||||
}
|
||||
|
||||
return offset, nil
|
||||
}
|
||||
|
||||
// ObjectEach iterates over the key-value pairs of a JSON object, invoking a given callback for each such entry
|
||||
func ObjectEach(data []byte, callback func(key []byte, value []byte, dataType ValueType, offset int) error, keys ...string) (err error) {
|
||||
var stackbuf [unescapeStackBufSize]byte // stack-allocated array for allocation-free unescaping of small strings
|
||||
offset := 0
|
||||
|
||||
// Descend to the desired key, if requested
|
||||
if len(keys) > 0 {
|
||||
if off := searchKeys(data, keys...); off == -1 {
|
||||
return KeyPathNotFoundError
|
||||
} else {
|
||||
offset = off
|
||||
}
|
||||
}
|
||||
|
||||
// Validate and skip past opening brace
|
||||
if off := nextToken(data[offset:]); off == -1 {
|
||||
return MalformedObjectError
|
||||
} else if offset += off; data[offset] != '{' {
|
||||
return MalformedObjectError
|
||||
} else {
|
||||
offset++
|
||||
}
|
||||
|
||||
// Skip to the first token inside the object, or stop if we find the ending brace
|
||||
if off := nextToken(data[offset:]); off == -1 {
|
||||
return MalformedJsonError
|
||||
} else if offset += off; data[offset] == '}' {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Loop pre-condition: data[offset] points to what should be either the next entry's key, or the closing brace (if it's anything else, the JSON is malformed)
|
||||
for offset < len(data) {
|
||||
// Step 1: find the next key
|
||||
var key []byte
|
||||
|
||||
// Check what the the next token is: start of string, end of object, or something else (error)
|
||||
switch data[offset] {
|
||||
case '"':
|
||||
offset++ // accept as string and skip opening quote
|
||||
case '}':
|
||||
return nil // we found the end of the object; stop and return success
|
||||
default:
|
||||
return MalformedObjectError
|
||||
}
|
||||
|
||||
// Find the end of the key string
|
||||
var keyEscaped bool
|
||||
if off, esc := stringEnd(data[offset:]); off == -1 {
|
||||
return MalformedJsonError
|
||||
} else {
|
||||
key, keyEscaped = data[offset:offset+off-1], esc
|
||||
offset += off
|
||||
}
|
||||
|
||||
// Unescape the string if needed
|
||||
if keyEscaped {
|
||||
if keyUnescaped, err := Unescape(key, stackbuf[:]); err != nil {
|
||||
return MalformedStringEscapeError
|
||||
} else {
|
||||
key = keyUnescaped
|
||||
}
|
||||
}
|
||||
|
||||
// Step 2: skip the colon
|
||||
if off := nextToken(data[offset:]); off == -1 {
|
||||
return MalformedJsonError
|
||||
} else if offset += off; data[offset] != ':' {
|
||||
return MalformedJsonError
|
||||
} else {
|
||||
offset++
|
||||
}
|
||||
|
||||
// Step 3: find the associated value, then invoke the callback
|
||||
if value, valueType, off, err := Get(data[offset:]); err != nil {
|
||||
return err
|
||||
} else if err := callback(key, value, valueType, offset+off); err != nil { // Invoke the callback here!
|
||||
return err
|
||||
} else {
|
||||
offset += off
|
||||
}
|
||||
|
||||
// Step 4: skip over the next comma to the following token, or stop if we hit the ending brace
|
||||
if off := nextToken(data[offset:]); off == -1 {
|
||||
return MalformedArrayError
|
||||
} else {
|
||||
offset += off
|
||||
switch data[offset] {
|
||||
case '}':
|
||||
return nil // Stop if we hit the close brace
|
||||
case ',':
|
||||
offset++ // Ignore the comma
|
||||
default:
|
||||
return MalformedObjectError
|
||||
}
|
||||
}
|
||||
|
||||
// Skip to the next token after the comma
|
||||
if off := nextToken(data[offset:]); off == -1 {
|
||||
return MalformedArrayError
|
||||
} else {
|
||||
offset += off
|
||||
}
|
||||
}
|
||||
|
||||
return MalformedObjectError // we shouldn't get here; it's expected that we will return via finding the ending brace
|
||||
}
|
||||
|
||||
// GetUnsafeString returns the value retrieved by `Get`, use creates string without memory allocation by mapping string to slice memory. It does not handle escape symbols.
|
||||
func GetUnsafeString(data []byte, keys ...string) (val string, err error) {
|
||||
v, _, _, e := Get(data, keys...)
|
||||
|
||||
if e != nil {
|
||||
return "", e
|
||||
}
|
||||
|
||||
return bytesToString(&v), nil
|
||||
}
|
||||
|
||||
// GetString returns the value retrieved by `Get`, cast to a string if possible, trying to properly handle escape and utf8 symbols
|
||||
// If key data type do not match, it will return an error.
|
||||
func GetString(data []byte, keys ...string) (val string, err error) {
|
||||
v, t, _, e := Get(data, keys...)
|
||||
|
||||
if e != nil {
|
||||
return "", e
|
||||
}
|
||||
|
||||
if t != String {
|
||||
return "", fmt.Errorf("Value is not a string: %s", string(v))
|
||||
}
|
||||
|
||||
// If no escapes return raw conten
|
||||
if bytes.IndexByte(v, '\\') == -1 {
|
||||
return string(v), nil
|
||||
}
|
||||
|
||||
return ParseString(v)
|
||||
}
|
||||
|
||||
// GetFloat returns the value retrieved by `Get`, cast to a float64 if possible.
|
||||
// The offset is the same as in `Get`.
|
||||
// If key data type do not match, it will return an error.
|
||||
func GetFloat(data []byte, keys ...string) (val float64, err error) {
|
||||
v, t, _, e := Get(data, keys...)
|
||||
|
||||
if e != nil {
|
||||
return 0, e
|
||||
}
|
||||
|
||||
if t != Number {
|
||||
return 0, fmt.Errorf("Value is not a number: %s", string(v))
|
||||
}
|
||||
|
||||
return ParseFloat(v)
|
||||
}
|
||||
|
||||
// GetInt returns the value retrieved by `Get`, cast to a int64 if possible.
|
||||
// If key data type do not match, it will return an error.
|
||||
func GetInt(data []byte, keys ...string) (val int64, err error) {
|
||||
v, t, _, e := Get(data, keys...)
|
||||
|
||||
if e != nil {
|
||||
return 0, e
|
||||
}
|
||||
|
||||
if t != Number {
|
||||
return 0, fmt.Errorf("Value is not a number: %s", string(v))
|
||||
}
|
||||
|
||||
return ParseInt(v)
|
||||
}
|
||||
|
||||
// GetBoolean returns the value retrieved by `Get`, cast to a bool if possible.
|
||||
// The offset is the same as in `Get`.
|
||||
// If key data type do not match, it will return error.
|
||||
func GetBoolean(data []byte, keys ...string) (val bool, err error) {
|
||||
v, t, _, e := Get(data, keys...)
|
||||
|
||||
if e != nil {
|
||||
return false, e
|
||||
}
|
||||
|
||||
if t != Boolean {
|
||||
return false, fmt.Errorf("Value is not a boolean: %s", string(v))
|
||||
}
|
||||
|
||||
return ParseBoolean(v)
|
||||
}
|
||||
|
||||
// ParseBoolean parses a Boolean ValueType into a Go bool (not particularly useful, but here for completeness)
|
||||
func ParseBoolean(b []byte) (bool, error) {
|
||||
switch {
|
||||
case bytes.Equal(b, trueLiteral):
|
||||
return true, nil
|
||||
case bytes.Equal(b, falseLiteral):
|
||||
return false, nil
|
||||
default:
|
||||
return false, MalformedValueError
|
||||
}
|
||||
}
|
||||
|
||||
// ParseString parses a String ValueType into a Go string (the main parsing work is unescaping the JSON string)
|
||||
func ParseString(b []byte) (string, error) {
|
||||
var stackbuf [unescapeStackBufSize]byte // stack-allocated array for allocation-free unescaping of small strings
|
||||
if bU, err := Unescape(b, stackbuf[:]); err != nil {
|
||||
return "", nil
|
||||
} else {
|
||||
return string(bU), nil
|
||||
}
|
||||
}
|
||||
|
||||
// ParseNumber parses a Number ValueType into a Go float64
|
||||
func ParseFloat(b []byte) (float64, error) {
|
||||
if v, err := parseFloat(&b); err != nil {
|
||||
return 0, MalformedValueError
|
||||
} else {
|
||||
return v, nil
|
||||
}
|
||||
}
|
||||
|
||||
// ParseInt parses a Number ValueType into a Go int64
|
||||
func ParseInt(b []byte) (int64, error) {
|
||||
if v, ok := parseInt(b); !ok {
|
||||
return 0, MalformedValueError
|
||||
} else {
|
||||
return v, nil
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user