summaryrefslogtreecommitdiff
path: root/vendor/github.com/neilotoole
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/github.com/neilotoole')
-rw-r--r--vendor/github.com/neilotoole/jsoncolor/.gitignore21
-rw-r--r--vendor/github.com/neilotoole/jsoncolor/.golangci.yml334
-rw-r--r--vendor/github.com/neilotoole/jsoncolor/LICENSE21
-rw-r--r--vendor/github.com/neilotoole/jsoncolor/README.md241
-rw-r--r--vendor/github.com/neilotoole/jsoncolor/SECURITY.md14
-rw-r--r--vendor/github.com/neilotoole/jsoncolor/SEGMENTIO_README.md76
-rw-r--r--vendor/github.com/neilotoole/jsoncolor/ascii.go124
-rw-r--r--vendor/github.com/neilotoole/jsoncolor/codec.go1183
-rw-r--r--vendor/github.com/neilotoole/jsoncolor/decode.go1195
-rw-r--r--vendor/github.com/neilotoole/jsoncolor/encode.go1054
-rw-r--r--vendor/github.com/neilotoole/jsoncolor/json.go459
-rw-r--r--vendor/github.com/neilotoole/jsoncolor/jsoncolor.go141
-rw-r--r--vendor/github.com/neilotoole/jsoncolor/parse.go735
-rw-r--r--vendor/github.com/neilotoole/jsoncolor/reflect.go20
-rw-r--r--vendor/github.com/neilotoole/jsoncolor/reflect_optimize.go30
-rw-r--r--vendor/github.com/neilotoole/jsoncolor/splash.pngbin0 -> 99137 bytes
-rw-r--r--vendor/github.com/neilotoole/jsoncolor/terminal.go42
-rw-r--r--vendor/github.com/neilotoole/jsoncolor/terminal_windows.go53
-rw-r--r--vendor/github.com/neilotoole/jsoncolor/token.go286
19 files changed, 6029 insertions, 0 deletions
diff --git a/vendor/github.com/neilotoole/jsoncolor/.gitignore b/vendor/github.com/neilotoole/jsoncolor/.gitignore
new file mode 100644
index 0000000..97fe1d6
--- /dev/null
+++ b/vendor/github.com/neilotoole/jsoncolor/.gitignore
@@ -0,0 +1,21 @@
+# Binaries for programs and plugins
+*.exe
+*.exe~
+*.dll
+*.so
+*.dylib
+
+# Test binary, built with `go test -c`
+*.test
+
+# Output of the go coverage tool, specifically when used with LiteIDE
+*.out
+
+# Dependency directories (remove the comment below to include it)
+# vendor/
+
+*.iml
+.idea
+TODO.md
+**/.DS_Store
+/scratch/
diff --git a/vendor/github.com/neilotoole/jsoncolor/.golangci.yml b/vendor/github.com/neilotoole/jsoncolor/.golangci.yml
new file mode 100644
index 0000000..a53fb78
--- /dev/null
+++ b/vendor/github.com/neilotoole/jsoncolor/.golangci.yml
@@ -0,0 +1,334 @@
+# This code is licensed under the terms of the MIT license.
+
+## Golden config for golangci-lint v1.54
+#
+# This is the best config for golangci-lint based on my experience and opinion.
+# It is very strict, but not extremely strict.
+# Feel free to adopt and change it for your needs.
+#
+# @neilotoole: ^^ Well, it's less strict now!
+# Based on: https://gist.github.com/maratori/47a4d00457a92aa426dbd48a18776322
+
+run:
+ # Timeout for analysis, e.g. 30s, 5m.
+ # Default: 1m
+ timeout: 5m
+
+ tests: false
+
+ skip-dirs:
+ - scratch
+
+
+
+
+
+output:
+ sort-results: true
+
+# This file contains only configs which differ from defaults.
+# All possible options can be found here https://github.com/golangci/golangci-lint/blob/master/.golangci.reference.yml
+linters-settings:
+ cyclop:
+ # The maximal code complexity to report.
+ # Default: 10
+ max-complexity: 50
+ # The maximal average package complexity.
+ # If it's higher than 0.0 (float) the check is enabled
+ # Default: 0.0
+ package-average: 10.0
+
+ errcheck:
+ # Report about not checking of errors in type assertions: `a := b.(MyStruct)`.
+ # Such cases aren't reported by default.
+ # Default: false
+ check-type-assertions: true
+
+ exhaustive:
+ # Program elements to check for exhaustiveness.
+ # Default: [ switch ]
+ check:
+ - switch
+ - map
+
+ funlen:
+ # Checks the number of lines in a function.
+ # If lower than 0, disable the check.
+ # Default: 60
+ lines: 150
+ # Checks the number of statements in a function.
+ # If lower than 0, disable the check.
+ # Default: 40
+ statements: 100
+
+ gocognit:
+ # Minimal code complexity to report
+ # Default: 30 (but we recommend 10-20)
+ min-complexity: 50
+
+ gocritic:
+ # Settings passed to gocritic.
+ # The settings key is the name of a supported gocritic checker.
+ # The list of supported checkers can be find in https://go-critic.github.io/overview.
+ settings:
+ captLocal:
+ # Whether to restrict checker to params only.
+ # Default: true
+ paramsOnly: false
+ underef:
+ # Whether to skip (*x).method() calls where x is a pointer receiver.
+ # Default: true
+ skipRecvDeref: false
+
+ gocyclo:
+ # Minimal code complexity to report.
+ # Default: 30 (but we recommend 10-20)
+ min-complexity: 50
+
+ gofumpt:
+ # Module path which contains the source code being formatted.
+ # Default: ""
+ module-path: github.com/neilotoole/jsoncolor
+ # Choose whether to use the extra rules.
+ # Default: false
+ extra-rules: true
+
+ gomnd:
+ # List of function patterns to exclude from analysis.
+ # Values always ignored: `time.Date`,
+ # `strconv.FormatInt`, `strconv.FormatUint`, `strconv.FormatFloat`,
+ # `strconv.ParseInt`, `strconv.ParseUint`, `strconv.ParseFloat`.
+ # Default: []
+ ignored-functions:
+ - make
+ - os.Chmod
+ - os.Mkdir
+ - os.MkdirAll
+ - os.OpenFile
+ - os.WriteFile
+ - prometheus.ExponentialBuckets
+ - prometheus.ExponentialBucketsRange
+ - prometheus.LinearBuckets
+ ignored-numbers:
+ - "2"
+ - "3"
+
+ gomodguard:
+ blocked:
+ # List of blocked modules.
+ # Default: []
+ modules:
+ - github.com/golang/protobuf:
+ recommendations:
+ - google.golang.org/protobuf
+ reason: "see https://developers.google.com/protocol-buffers/docs/reference/go/faq#modules"
+ - github.com/satori/go.uuid:
+ recommendations:
+ - github.com/google/uuid
+ reason: "satori's package is not maintained"
+ - github.com/gofrs/uuid:
+ recommendations:
+ - github.com/google/uuid
+ reason: "gofrs' package is not go module"
+
+ govet:
+ # Enable all analyzers.
+ # Default: false
+ enable-all: true
+ # Disable analyzers by name.
+ # Run `go tool vet help` to see all analyzers.
+ # Default: []
+ disable:
+ - fieldalignment # too strict
+ # Settings per analyzer.
+ settings:
+ shadow:
+ # Whether to be strict about shadowing; can be noisy.
+ # Default: false
+ strict: false
+
+ lll:
+ # Max line length, lines longer will be reported.
+ # '\t' is counted as 1 character by default, and can be changed with the tab-width option.
+ # Default: 120.
+ line-length: 120
+ # Tab width in spaces.
+ # Default: 1
+ tab-width: 1
+
+ nakedret:
+ # Make an issue if func has more lines of code than this setting, and it has naked returns.
+ # Default: 30
+ max-func-lines: 0
+
+ nestif:
+ # Minimal complexity of if statements to report.
+ # Default: 5
+ min-complexity: 20
+
+ nolintlint:
+ # Exclude following linters from requiring an explanation.
+ # Default: []
+ allow-no-explanation: [ funlen, gocognit, lll ]
+ # Enable to require an explanation of nonzero length after each nolint directive.
+ # Default: false
+ require-explanation: false
+ # Enable to require nolint directives to mention the specific linter being suppressed.
+ # Default: false
+ require-specific: true
+
+ rowserrcheck:
+ # database/sql is always checked
+ # Default: []
+ packages:
+# - github.com/jmoiron/sqlx
+
+ tenv:
+ # The option `all` will run against whole test files (`_test.go`) regardless of method/function signatures.
+ # Otherwise, only methods that take `*testing.T`, `*testing.B`, and `testing.TB` as arguments are checked.
+ # Default: false
+ all: true
+
+
+linters:
+ disable-all: true
+
+ enable:
+ ## enabled by default
+ - errcheck # checking for unchecked errors, these unchecked errors can be critical bugs in some cases
+ - gosimple # specializes in simplifying a code
+ - govet # reports suspicious constructs, such as Printf calls whose arguments do not align with the format string
+ - ineffassign # detects when assignments to existing variables are not used
+ - staticcheck # is a go vet on steroids, applying a ton of static analysis checks
+ - typecheck # like the front-end of a Go compiler, parses and type-checks Go code
+ - unused # checks for unused constants, variables, functions and types
+
+
+ # ## disabled by default
+ - asasalint # checks for pass []any as any in variadic func(...any)
+ - asciicheck # checks that your code does not contain non-ASCII identifiers
+ - bidichk # checks for dangerous unicode character sequences
+ - bodyclose # checks whether HTTP response body is closed successfully
+ - cyclop # checks function and package cyclomatic complexity
+ - dupl # tool for code clone detection
+ - durationcheck # checks for two durations multiplied together
+ - errname # checks that sentinel errors are prefixed with the Err and error types are suffixed with the Error
+ - errorlint # finds code that will cause problems with the error wrapping scheme introduced in Go 1.13
+ - execinquery # checks query string in Query function which reads your Go src files and warning it finds
+ - exhaustive # checks exhaustiveness of enum switch statements
+ - exportloopref # checks for pointers to enclosing loop variables
+ - forbidigo # forbids identifiers
+ - funlen # tool for detection of long functions
+ - gochecknoinits # checks that no init functions are present in Go code
+ - gocognit # computes and checks the cognitive complexity of functions
+ - goconst # finds repeated strings that could be replaced by a constant
+ - gocritic # provides diagnostics that check for bugs, performance and style issues
+ - gocyclo # computes and checks the cyclomatic complexity of functions
+ - godot # checks if comments end in a period
+ - gofumpt
+ - goimports # in addition to fixing imports, goimports also formats your code in the same style as gofmt
+ # - gomoddirectives # manages the use of 'replace', 'retract', and 'excludes' directives in go.mod
+ - gomodguard # allow and block lists linter for direct Go module dependencies. This is different from depguard where there are different block types for example version constraints and module recommendations
+ - goprintffuncname # checks that printf-like functions are named with f at the end
+ - gosec # inspects source code for security problems
+ - lll # reports long lines
+ - loggercheck # checks key value pairs for common logger libraries (kitlog,klog,logr,zap)
+ - makezero # finds slice declarations with non-zero initial length
+ - nakedret # finds naked returns in functions greater than a specified function length
+ - nestif # reports deeply nested if statements
+ - nilerr # finds the code that returns nil even if it checks that the error is not nil
+ - nilnil # checks that there is no simultaneous return of nil error and an invalid value
+ - noctx # finds sending http request without context.Context
+ - nolintlint # reports ill-formed or insufficient nolint directives
+ - nosprintfhostport # checks for misuse of Sprintf to construct a host with port in a URL
+ - predeclared # finds code that shadows one of Go's predeclared identifiers
+ - promlinter # checks Prometheus metrics naming via promlint
+ - reassign # checks that package variables are not reassigned
+ - revive # fast, configurable, extensible, flexible, and beautiful linter for Go, drop-in replacement of golint
+ - stylecheck # is a replacement for golint
+ - tenv # detects using os.Setenv instead of t.Setenv since Go1.17
+ - testableexamples # checks if examples are testable (have an expected output)
+ - tparallel # detects inappropriate usage of t.Parallel() method in your Go test codes
+ - unconvert # removes unnecessary type conversions
+ - unparam # reports unused function parameters
+ - usestdlibvars # detects the possibility to use variables/constants from the Go standard library
+ - whitespace # detects leading and trailing whitespace
+
+ ## These three linters are disabled for now due to generics: https://github.com/golangci/golangci-lint/issues/2649
+ #- rowserrcheck # checks whether Err of rows is checked successfully # Disabled because: https://github.com/golangci/golangci-lint/issues/2649
+ #- sqlclosecheck # checks that sql.Rows and sql.Stmt are closed
+ #- wastedassign # finds wasted assignment statements
+
+
+ ## you may want to enable
+ #- decorder # checks declaration order and count of types, constants, variables and functions
+ #- exhaustruct # checks if all structure fields are initialized
+ #- gochecknoglobals # checks that no global variables exist
+ #- godox # detects FIXME, TODO and other comment keywords
+ #- goheader # checks is file header matches to pattern
+ #- gomnd # detects magic numbers
+ #- interfacebloat # checks the number of methods inside an interface
+ #- ireturn # accept interfaces, return concrete types
+ #- prealloc # [premature optimization, but can be used in some cases] finds slice declarations that could potentially be preallocated
+ #- varnamelen # [great idea, but too many false positives] checks that the length of a variable's name matches its scope
+ #- wrapcheck # checks that errors returned from external packages are wrapped
+
+ ## disabled
+ #- containedctx # detects struct contained context.Context field
+ #- contextcheck # [too many false positives] checks the function whether use a non-inherited context
+ #- depguard # [replaced by gomodguard] checks if package imports are in a list of acceptable packages
+ #- dogsled # checks assignments with too many blank identifiers (e.g. x, _, _, _, := f())
+ #- dupword # [useless without config] checks for duplicate words in the source code
+ #- errchkjson # [don't see profit + I'm against of omitting errors like in the first example https://github.com/breml/errchkjson] checks types passed to the json encoding functions. Reports unsupported types and optionally reports occasions, where the check for the returned error can be omitted
+ #- forcetypeassert # [replaced by errcheck] finds forced type assertions
+ #- goerr113 # [too strict] checks the errors handling expressions
+ #- gofmt # [replaced by goimports] checks whether code was gofmt-ed
+ #- gofumpt # [replaced by goimports, gofumports is not available yet] checks whether code was gofumpt-ed
+ #- grouper # analyzes expression groups
+ #- importas # enforces consistent import aliases
+ #- maintidx # measures the maintainability index of each function
+ #- misspell # [useless] finds commonly misspelled English words in comments
+ #- nlreturn # [too strict and mostly code is not more readable] checks for a new line before return and branch statements to increase code clarity
+ #- paralleltest # [too many false positives] detects missing usage of t.Parallel() method in your Go test
+ #- tagliatelle # checks the struct tags
+ #- thelper # detects golang test helpers without t.Helper() call and checks the consistency of test helpers
+ #- wsl # [too strict and mostly code is not more readable] whitespace linter forces you to use empty lines
+
+ ## deprecated
+ #- deadcode # [deprecated, replaced by unused] finds unused code
+ #- exhaustivestruct # [deprecated, replaced by exhaustruct] checks if all struct's fields are initialized
+ #- golint # [deprecated, replaced by revive] golint differs from gofmt. Gofmt reformats Go source code, whereas golint prints out style mistakes
+ #- ifshort # [deprecated] checks that your code uses short syntax for if-statements whenever possible
+ #- interfacer # [deprecated] suggests narrower interface types
+ #- maligned # [deprecated, replaced by govet fieldalignment] detects Go structs that would take less memory if their fields were sorted
+ #- nosnakecase # [deprecated, replaced by revive var-naming] detects snake case of variable naming and function name
+ #- scopelint # [deprecated, replaced by exportloopref] checks for unpinned variables in go programs
+ #- structcheck # [deprecated, replaced by unused] finds unused struct fields
+ #- varcheck # [deprecated, replaced by unused] finds unused global variables and constants
+
+
+issues:
+ # Maximum count of issues with the same text.
+ # Set to 0 to disable.
+ # Default: 3
+ max-same-issues: 3
+
+ exclude-rules:
+ - source: "^//\\s*go:generate\\s"
+ linters: [ lll ]
+ - source: "(noinspection|TODO)"
+ linters: [ godot ]
+ - source: "//noinspection"
+ linters: [ gocritic ]
+ - source: "^\\s+if _, ok := err\\.\\([^.]+\\.InternalError\\); ok {"
+ linters: [ errorlint ]
+ - path: "_test\\.go"
+ linters:
+ - bodyclose
+ - dupl
+ - funlen
+ - goconst
+ - gosec
+ - noctx
+ - wrapcheck
+
diff --git a/vendor/github.com/neilotoole/jsoncolor/LICENSE b/vendor/github.com/neilotoole/jsoncolor/LICENSE
new file mode 100644
index 0000000..76e1f9c
--- /dev/null
+++ b/vendor/github.com/neilotoole/jsoncolor/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2023 Neil O'Toole
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/neilotoole/jsoncolor/README.md b/vendor/github.com/neilotoole/jsoncolor/README.md
new file mode 100644
index 0000000..a8da54c
--- /dev/null
+++ b/vendor/github.com/neilotoole/jsoncolor/README.md
@@ -0,0 +1,241 @@
+[![Actions Status](https://github.com/neilotoole/jsoncolor/workflows/Go/badge.svg)](https://github.com/neilotoole/jsoncolor/actions?query=workflow%3AGo)
+[![Go Report Card](https://goreportcard.com/badge/neilotoole/jsoncolor)](https://goreportcard.com/report/neilotoole/jsoncolor)
+[![release](https://img.shields.io/badge/release-v0.7.0-green.svg)](https://github.com/neilotoole/jsoncolor/releases/tag/v0.7.0)
+[![Go Reference](https://pkg.go.dev/badge/github.com/neilotoole/jsoncolor.svg)](https://pkg.go.dev/github.com/neilotoole/jsoncolor)
+[![license](https://img.shields.io/github/license/neilotoole/jsoncolor)](./LICENSE)
+
+# jsoncolor
+
+Package `neilotoole/jsoncolor` is a drop-in replacement for stdlib
+[`encoding/json`](https://pkg.go.dev/encoding/json) that outputs colorized JSON.
+
+Why? Well, [`jq`](https://jqlang.github.io/jq/) colorizes its output by default, and color output
+is desirable for many Go CLIs. This package performs colorization (and indentation) inline
+in the encoder, and is significantly faster than stdlib at indentation.
+
+From the example [`jc`](./cmd/jc/main.go) app:
+
+![jsoncolor-output](./splash.png)
+
+## Usage
+
+Get the package per the normal mechanism (requires Go 1.16+):
+
+```shell
+go get -u github.com/neilotoole/jsoncolor
+```
+
+Then:
+
+```go
+package main
+
+import (
+ "fmt"
+ "github.com/mattn/go-colorable"
+ json "github.com/neilotoole/jsoncolor"
+ "os"
+)
+
+func main() {
+ var enc *json.Encoder
+
+ // Note: this check will fail if running inside Goland (and
+ // other IDEs?) as IsColorTerminal will return false.
+ if json.IsColorTerminal(os.Stdout) {
+ // Safe to use color
+ out := colorable.NewColorable(os.Stdout) // needed for Windows
+ enc = json.NewEncoder(out)
+
+ // DefaultColors are similar to jq
+ clrs := json.DefaultColors()
+
+ // Change some values, just for fun
+ clrs.Bool = json.Color("\x1b[36m") // Change the bool color
+ clrs.String = json.Color{} // Disable the string color
+
+ enc.SetColors(clrs)
+ } else {
+ // Can't use color; but the encoder will still work
+ enc = json.NewEncoder(os.Stdout)
+ }
+
+ m := map[string]interface{}{
+ "a": 1,
+ "b": true,
+ "c": "hello",
+ }
+
+ if err := enc.Encode(m); err != nil {
+ fmt.Fprintln(os.Stderr, err)
+ os.Exit(1)
+ }
+}
+```
+
+### Configuration
+
+To enable colorization, invoke [`enc.SetColors`](https://pkg.go.dev/github.com/neilotoole/jsoncolor#Encoder.SetColors).
+
+The [`Colors`](https://pkg.go.dev/github.com/neilotoole/jsoncolor#Colors) struct
+holds color config. The zero value and `nil` are both safe for use (resulting in no colorization).
+
+The [`DefaultColors`](https://pkg.go.dev/github.com/neilotoole/jsoncolor#DefaultColors) func
+returns a `Colors` struct that produces results similar to `jq`:
+
+```go
+// DefaultColors returns the default Colors configuration.
+// These colors largely follow jq's default colorization,
+// with some deviation.
+func DefaultColors() *Colors {
+ return &Colors{
+ Null: Color("\x1b[2m"),
+ Bool: Color("\x1b[1m"),
+ Number: Color("\x1b[36m"),
+ String: Color("\x1b[32m"),
+ Key: Color("\x1b[34;1m"),
+ Bytes: Color("\x1b[2m"),
+ Time: Color("\x1b[32;2m"),
+ Punc: Color{}, // No colorization
+ }
+}
+```
+
+As seen above, use the `Color` zero value (`Color{}`) to
+disable colorization for that JSON element.
+
+### Helper for `fatih/color`
+
+It can be inconvenient to use terminal codes, e.g. `json.Color("\x1b[36m")`.
+A helper package provides an adapter for [`fatih/color`](https://github.com/fatih/color).
+
+```go
+ // import "github.com/neilotoole/jsoncolor/helper/fatihcolor"
+ // import "github.com/fatih/color"
+ // import "github.com/mattn/go-colorable"
+
+ out := colorable.NewColorable(os.Stdout) // needed for Windows
+ enc = json.NewEncoder(out)
+
+ fclrs := fatihcolor.DefaultColors()
+ // Change some values, just for fun
+ fclrs.Number = color.New(color.FgBlue)
+ fclrs.String = color.New(color.FgCyan)
+
+ clrs := fatihcolor.ToCoreColors(fclrs)
+ enc.SetColors(clrs)
+```
+
+### Drop-in for `encoding/json`
+
+This package is a full drop-in for stdlib [`encoding/json`](https://pkg.go.dev/encoding/json)
+(thanks to the ancestral [`segmentio/encoding/json`](https://pkg.go.dev/github.com/segmentio/encoding/json)
+pkg being a full drop-in).
+
+To drop-in, just use an import alias:
+
+```go
+ import json "github.com/neilotoole/jsoncolor"
+```
+
+## Example app: `jc`
+
+See [`cmd/jc`](cmd/jc/main.go) for a trivial CLI implementation that can accept JSON input,
+and output that JSON in color.
+
+```shell
+# From project root
+$ go install ./cmd/jc
+$ cat ./testdata/sakila_actor.json | jc
+```
+
+## Benchmarks
+
+Note that this package contains [`golang_bench_test.go`](./golang_bench_test.go), which
+is inherited from `segmentj`. But here we're interested in [`benchmark_test.go:BenchmarkEncode`](./benchmark_test.go),
+which benchmarks encoding performance versus other JSON encoder packages.
+The results below benchmark the following:
+
+- Stdlib [`encoding/json`](https://pkg.go.dev/encoding/json) (`go1.17.1`).
+- [`segmentj`](https://github.com/segmentio/encoding): `v0.1.14`, which was when `jsoncolor` was forked. The newer `segmentj` code performs even better.
+- `neilotoole/jsoncolor`: (this package) `v0.6.0`.
+- [`nwidger/jsoncolor`](https://github.com/nwidger/jsoncolor): `v0.3.0`, latest at time of benchmarks.
+
+Note that two other Go JSON colorization packages ([`hokaccha/go-prettyjson`](https://github.com/hokaccha/go-prettyjson) and
+[`TylerBrock/colorjson`](https://github.com/TylerBrock/colorjson)) are excluded from
+these benchmarks because they do not provide a stdlib-compatible `Encoder` impl.
+
+```
+$ go test -bench=BenchmarkEncode -benchtime="5s"
+goarch: amd64
+pkg: github.com/neilotoole/jsoncolor
+cpu: Intel(R) Core(TM) i9-9880H CPU @ 2.30GHz
+BenchmarkEncode/stdlib_NoIndent-16 181 33047390 ns/op 8870685 B/op 120022 allocs/op
+BenchmarkEncode/stdlib_Indent-16 124 48093178 ns/op 10470366 B/op 120033 allocs/op
+BenchmarkEncode/segmentj_NoIndent-16 415 14658699 ns/op 3788911 B/op 10020 allocs/op
+BenchmarkEncode/segmentj_Indent-16 195 30628798 ns/op 5404492 B/op 10025 allocs/op
+BenchmarkEncode/neilotoole_NoIndent_NoColor-16 362 16522399 ns/op 3789034 B/op 10020 allocs/op
+BenchmarkEncode/neilotoole_Indent_NoColor-16 303 20146856 ns/op 5460753 B/op 10021 allocs/op
+BenchmarkEncode/neilotoole_NoIndent_Color-16 295 19989420 ns/op 10326019 B/op 10029 allocs/op
+BenchmarkEncode/neilotoole_Indent_Color-16 246 24714163 ns/op 11996890 B/op 10030 allocs/op
+BenchmarkEncode/nwidger_NoIndent_NoColor-16 10 541107983 ns/op 92934231 B/op 4490210 allocs/op
+BenchmarkEncode/nwidger_Indent_NoColor-16 7 798088086 ns/op 117258321 B/op 6290213 allocs/op
+BenchmarkEncode/nwidger_indent_NoIndent_Colo-16 10 542002051 ns/op 92935639 B/op 4490224 allocs/op
+BenchmarkEncode/nwidger_indent_Indent_Color-16 7 799928353 ns/op 117259195 B/op 6290220 allocs/op
+```
+
+As always, take benchmarks with a large grain of salt, as they're based on a (small) synthetic benchmark.
+More benchmarks would give a better picture (and note as well that the benchmarked `segmentj` is an older version, `v0.1.14`).
+
+All that having been said, what can we surmise from these particular results?
+
+- `segmentj` performs better than `stdlib` at all encoding tasks.
+- `jsoncolor` performs better than `segmentj` for indentation (which makes sense, as indentation is performed inline).
+- `jsoncolor` performs better than `stdlib` at all encoding tasks.
+
+Again, trust these benchmarks at your peril. Create your own benchmarks for your own workload.
+
+## Notes
+
+- The [`.golangci.yml`](./.golangci.yml) linter settings have been fiddled with to hush some
+ linting issues inherited from the `segmentio` codebase at the time of forking. Thus, the linter report
+ may not be of great use. In an ideal world, the `jsoncolor` functionality would be [ported](https://github.com/neilotoole/jsoncolor/issues/15) to a
+ more recent (and better-linted) version of the `segementio` codebase.
+- The `segmentio` encoder (at least as of `v0.1.14`) encodes `time.Duration` as string, while `stdlib` outputs as `int64`.
+ This package follows `stdlib`.
+- The [`Colors.Punc`](https://pkg.go.dev/github.com/neilotoole/jsoncolor#Colors) field controls all
+ punctuation colorization, i.e. `[]{},:"`. It is probably worthwhile to [separate](https://github.com/neilotoole/jsoncolor/issues/16)
+ these out into individually-configurable elements.
+
+<a name="history"></a>
+## CHANGELOG
+
+History: this package is an extract of [`sq`](https://github.com/neilotoole/sq)'s JSON encoding package, which itself is a fork of the
+[`segmentio/encoding`](https://github.com/segmentio/encoding) JSON encoding package. Note that the
+original `sq` JSON encoder was forked from Segment's codebase at `v0.1.14`, so
+the codebases have drifted significantly by now.
+
+### [v0.7.1](https://github.com/neilotoole/jsoncolor/releases/tag/v0.7.1)
+
+- [#27](https://github.com/neilotoole/jsoncolor/pull/27): Improved Windows terminal color support checking.
+
+### [v0.7.0](https://github.com/neilotoole/jsoncolor/releases/tag/v0.7.0)
+
+- [#21](https://github.com/neilotoole/jsoncolor/pull/21): Support for [`encoding.TextMarshaler`](https://pkg.go.dev/encoding#TextMarshaler).
+- [#22](https://github.com/neilotoole/jsoncolor/pull/22): Removed redundant dependencies.
+- [#26](https://github.com/neilotoole/jsoncolor/pull/26): Updated dependencies.
+
+## Acknowledgments
+
+- [`jq`](https://stedolan.github.io/jq/): sine qua non.
+- [`segmentio/encoding`](https://github.com/segmentio/encoding): `jsoncolor` is layered into Segment's JSON encoder. They did the hard work. Much gratitude to that team.
+- [`sq`](https://github.com/neilotoole/sq): `jsoncolor` is effectively an extract of code created specifically for `sq`.
+- [`mattn/go-colorable`](https://github.com/mattn/go-colorable): no project is complete without `mattn` having played a role.
+- [`fatih/color`](https://github.com/fatih/color): the color library.
+- [`@hermannm`](https://github.com/hermannm): for several PRs.
+
+### Related
+
+- [`nwidger/jsoncolor`](https://github.com/nwidger/jsoncolor)
+- [`hokaccha/go-prettyjson`](https://github.com/hokaccha/go-prettyjson)
+- [`TylerBrock/colorjson`](https://github.com/TylerBrock/colorjson)
diff --git a/vendor/github.com/neilotoole/jsoncolor/SECURITY.md b/vendor/github.com/neilotoole/jsoncolor/SECURITY.md
new file mode 100644
index 0000000..af83493
--- /dev/null
+++ b/vendor/github.com/neilotoole/jsoncolor/SECURITY.md
@@ -0,0 +1,14 @@
+# Security Policy
+
+## Supported Versions
+
+
+| Version | Supported |
+| ------- | ------------------ |
+| v0.7.0 | :white_check_mark: |
+| v0.6.0 | :x: |
+
+
+## Reporting a Vulnerability
+
+Open an [issue](https://github.com/neilotoole/jsoncolor/issues/new).
diff --git a/vendor/github.com/neilotoole/jsoncolor/SEGMENTIO_README.md b/vendor/github.com/neilotoole/jsoncolor/SEGMENTIO_README.md
new file mode 100644
index 0000000..c5ed94b
--- /dev/null
+++ b/vendor/github.com/neilotoole/jsoncolor/SEGMENTIO_README.md
@@ -0,0 +1,76 @@
+# encoding/json [![GoDoc](https://godoc.org/github.com/segmentio/encoding/json?status.svg)](https://godoc.org/github.com/segmentio/encoding/json)
+
+Go package offering a replacement implementation of the standard library's
+[`encoding/json`](https://golang.org/pkg/encoding/json/) package, with much
+better performance.
+
+## Usage
+
+The exported API of this package mirrors the standard library's
+[`encoding/json`](https://golang.org/pkg/encoding/json/) package, the only
+change needed to take advantage of the performance improvements is the import
+path of the `json` package, from:
+```go
+import (
+ "encoding/json"
+)
+```
+to
+```go
+import (
+ "github.com/segmentio/encoding/json"
+)
+```
+
+One way to gain higher encoding throughput is to disable HTML escaping.
+It allows the string encoding to use a much more efficient code path which
+does not require parsing UTF-8 runes most of the time.
+
+## Performance Improvements
+
+The internal implementation uses a fair amount of unsafe operations (untyped
+code, pointer arithmetic, etc...) to avoid using reflection as much as possible,
+which is often the reason why serialization code has a large CPU and memory
+footprint.
+
+The package aims for zero unnecessary dynamic memory allocations and hot code
+paths that are mostly free from calls into the reflect package.
+
+## Compatibility with encoding/json
+
+This package aims to be a drop-in replacement, therefore it is tested to behave
+exactly like the standard library's package. However, there are still a few
+missing features that have not been ported yet:
+
+- Streaming decoder, currently the `Decoder` implementation offered by the
+package does not support progressively reading values from a JSON array (unlike
+the standard library). In our experience this is a very rare use-case, if you
+need it you're better off sticking to the standard library, or spend a bit of
+time implementing it in here ;)
+
+Note that none of those features should result in performance degradations if
+they were implemented in the package, and we welcome contributions!
+
+## Trade-offs
+
+As one would expect, we had to make a couple of trade-offs to achieve greater
+performance than the standard library, but there were also features that we
+did not want to give away.
+
+Other open-source packages offering a reduced CPU and memory footprint usually
+do so by designing a different API, or require code generation (therefore adding
+complexity to the build process). These were not acceptable conditions for us,
+as we were not willing to trade off developer productivity for better runtime
+performance. To achieve this, we chose to exactly replicate the standard
+library interfaces and behavior, which meant the package implementation was the
+only area that we were able to work with. The internals of this package make
+heavy use of unsafe pointer arithmetics and other performance optimizations,
+and therefore are not as approachable as typical Go programs. Basically, we put
+a bigger burden on maintainers to achieve better runtime cost without
+sacrificing developer productivity.
+
+For these reasons, we also don't believe that this code should be ported upstream
+to the standard `encoding/json` package. The standard library has to remain
+readable and approachable to maximize stability and maintainability, and make
+projects like this one possible because a high quality reference implementation
+already exists.
diff --git a/vendor/github.com/neilotoole/jsoncolor/ascii.go b/vendor/github.com/neilotoole/jsoncolor/ascii.go
new file mode 100644
index 0000000..6c66eb5
--- /dev/null
+++ b/vendor/github.com/neilotoole/jsoncolor/ascii.go
@@ -0,0 +1,124 @@
+package jsoncolor
+
+import "unsafe"
+
+// asciiValid returns true if b contains only ASCII characters.
+//
+// From https://github.com/segmentio/encoding/blob/v0.1.14/ascii/valid.go#L28
+//
+//go:nosplit
+func asciiValid(b []byte) bool {
+ s, n := unsafe.Pointer(&b), uintptr(len(b))
+
+ i := uintptr(0)
+ p := *(*unsafe.Pointer)(s)
+
+ for n >= 8 {
+ if ((*(*uint64)(unsafe.Pointer(uintptr(p) + i))) & 0x8080808080808080) != 0 {
+ return false
+ }
+ i += 8
+ n -= 8
+ }
+
+ if n >= 4 {
+ if ((*(*uint32)(unsafe.Pointer(uintptr(p) + i))) & 0x80808080) != 0 {
+ return false
+ }
+ i += 4
+ n -= 4
+ }
+
+ var x uint32
+ switch n {
+ case 3:
+ x = uint32(*(*uint8)(unsafe.Pointer(uintptr(p) + i))) | uint32(*(*uint16)(unsafe.Pointer(uintptr(p) + i + 1)))<<8
+ case 2:
+ x = uint32(*(*uint16)(unsafe.Pointer(uintptr(p) + i)))
+ case 1:
+ x = uint32(*(*uint8)(unsafe.Pointer(uintptr(p) + i)))
+ default:
+ return true
+ }
+ return (x & 0x80808080) == 0
+}
+
+// asciiValidPrint returns true if b contains only printable ASCII characters.
+//
+// From https://github.com/segmentio/encoding/blob/v0.1.14/ascii/valid.go#L83
+//
+//go:nosplit
+func asciiValidPrint(b []byte) bool {
+ s, n := unsafe.Pointer(&b), uintptr(len(b))
+
+ if n == 0 {
+ return true
+ }
+
+ i := uintptr(0)
+ p := *(*unsafe.Pointer)(s)
+
+ for (n - i) >= 8 {
+ x := *(*uint64)(unsafe.Pointer(uintptr(p) + i))
+ if hasLess64(x, 0x20) || hasMore64(x, 0x7e) {
+ return false
+ }
+ i += 8
+ }
+
+ if (n - i) >= 4 {
+ x := *(*uint32)(unsafe.Pointer(uintptr(p) + i))
+ if hasLess32(x, 0x20) || hasMore32(x, 0x7e) {
+ return false
+ }
+ i += 4
+ }
+
+ var x uint32
+ switch n - i {
+ case 3:
+ x = 0x20000000 | uint32(*(*uint8)(unsafe.Pointer(uintptr(p) + i))) | uint32(*(*uint16)(unsafe.Pointer(uintptr(p) + i + 1)))<<8
+ case 2:
+ x = 0x20200000 | uint32(*(*uint16)(unsafe.Pointer(uintptr(p) + i)))
+ case 1:
+ x = 0x20202000 | uint32(*(*uint8)(unsafe.Pointer(uintptr(p) + i)))
+ default:
+ return true
+ }
+ return !(hasLess32(x, 0x20) || hasMore32(x, 0x7e))
+}
+
+// https://graphics.stanford.edu/~seander/bithacks.html#HasLessInWord
+const (
+ hasLessConstL64 = (^uint64(0)) / 255
+ hasLessConstR64 = hasLessConstL64 * 128
+
+ hasLessConstL32 = (^uint32(0)) / 255
+ hasLessConstR32 = hasLessConstL32 * 128
+
+ hasMoreConstL64 = (^uint64(0)) / 255
+ hasMoreConstR64 = hasMoreConstL64 * 128
+
+ hasMoreConstL32 = (^uint32(0)) / 255
+ hasMoreConstR32 = hasMoreConstL32 * 128
+)
+
+//go:nosplit
+func hasLess64(x, n uint64) bool {
+ return ((x - (hasLessConstL64 * n)) & ^x & hasLessConstR64) != 0
+}
+
+//go:nosplit
+func hasLess32(x, n uint32) bool {
+ return ((x - (hasLessConstL32 * n)) & ^x & hasLessConstR32) != 0
+}
+
+//go:nosplit
+func hasMore64(x, n uint64) bool {
+ return (((x + (hasMoreConstL64 * (127 - n))) | x) & hasMoreConstR64) != 0
+}
+
+//go:nosplit
+func hasMore32(x, n uint32) bool {
+ return (((x + (hasMoreConstL32 * (127 - n))) | x) & hasMoreConstR32) != 0
+}
diff --git a/vendor/github.com/neilotoole/jsoncolor/codec.go b/vendor/github.com/neilotoole/jsoncolor/codec.go
new file mode 100644
index 0000000..b3b42ef
--- /dev/null
+++ b/vendor/github.com/neilotoole/jsoncolor/codec.go
@@ -0,0 +1,1183 @@
+package jsoncolor
+
+import (
+ "encoding"
+ "encoding/json"
+ "fmt"
+ "reflect"
+ "sort"
+ "strconv"
+ "strings"
+ "sync/atomic"
+ "time"
+ "unicode"
+ "unsafe"
+)
+
+type codec struct {
+ encode encodeFunc
+ decode decodeFunc
+}
+
+type encoder struct {
+ flags AppendFlags
+ clrs *Colors
+ indentr *indenter
+}
+type decoder struct{ flags ParseFlags }
+
+type encodeFunc func(encoder, []byte, unsafe.Pointer) ([]byte, error)
+type decodeFunc func(decoder, []byte, unsafe.Pointer) ([]byte, error)
+
+type emptyFunc func(unsafe.Pointer) bool
+type sortFunc func([]reflect.Value)
+
+var (
+ // Eventually consistent cache mapping go types to dynamically generated
+ // codecs.
+ //
+ // Note: using a uintptr as key instead of reflect.Type shaved ~15ns off of
+ // the ~30ns Marhsal/Unmarshal functions which were dominated by the map
+ // lookup time for simple types like bool, int, etc..
+ cache unsafe.Pointer // map[unsafe.Pointer]codec
+)
+
+func cacheLoad() map[unsafe.Pointer]codec {
+ p := atomic.LoadPointer(&cache)
+ return *(*map[unsafe.Pointer]codec)(unsafe.Pointer(&p))
+}
+
+func cacheStore(typ reflect.Type, cod codec, oldCodecs map[unsafe.Pointer]codec) {
+ newCodecs := make(map[unsafe.Pointer]codec, len(oldCodecs)+1)
+ newCodecs[typeid(typ)] = cod
+
+ for t, c := range oldCodecs {
+ newCodecs[t] = c
+ }
+
+ atomic.StorePointer(&cache, *(*unsafe.Pointer)(unsafe.Pointer(&newCodecs)))
+}
+
+func typeid(t reflect.Type) unsafe.Pointer {
+ return (*iface)(unsafe.Pointer(&t)).ptr
+}
+
+func constructCachedCodec(t reflect.Type, cache map[unsafe.Pointer]codec) codec {
+ c := constructCodec(t, map[reflect.Type]*structType{}, t.Kind() == reflect.Ptr)
+
+ if inlined(t) {
+ c.encode = constructInlineValueEncodeFunc(c.encode)
+ }
+
+ cacheStore(t, c, cache)
+ return c
+}
+
+func constructCodec(t reflect.Type, seen map[reflect.Type]*structType, canAddr bool) (c codec) {
+ switch t {
+ case nullType, nil:
+ c = codec{encode: encoder.encodeNull, decode: decoder.decodeNull}
+
+ case numberType:
+ c = codec{encode: encoder.encodeNumber, decode: decoder.decodeNumber}
+
+ case bytesType:
+ c = codec{encode: encoder.encodeBytes, decode: decoder.decodeBytes}
+
+ case durationType:
+ c = codec{encode: encoder.encodeDuration, decode: decoder.decodeDuration}
+
+ case timeType:
+ c = codec{encode: encoder.encodeTime, decode: decoder.decodeTime}
+
+ case interfaceType:
+ c = codec{encode: encoder.encodeInterface, decode: decoder.decodeInterface}
+
+ case rawMessageType:
+ c = codec{encode: encoder.encodeRawMessage, decode: decoder.decodeRawMessage}
+
+ case numberPtrType:
+ c = constructPointerCodec(numberPtrType, nil)
+
+ case durationPtrType:
+ c = constructPointerCodec(durationPtrType, nil)
+
+ case timePtrType:
+ c = constructPointerCodec(timePtrType, nil)
+
+ case rawMessagePtrType:
+ c = constructPointerCodec(rawMessagePtrType, nil)
+ }
+
+ if c.encode != nil {
+ return
+ }
+
+ switch t.Kind() {
+ case reflect.Bool:
+ c = codec{encode: encoder.encodeBool, decode: decoder.decodeBool}
+
+ case reflect.Int:
+ c = codec{encode: encoder.encodeInt, decode: decoder.decodeInt}
+
+ case reflect.Int8:
+ c = codec{encode: encoder.encodeInt8, decode: decoder.decodeInt8}
+
+ case reflect.Int16:
+ c = codec{encode: encoder.encodeInt16, decode: decoder.decodeInt16}
+
+ case reflect.Int32:
+ c = codec{encode: encoder.encodeInt32, decode: decoder.decodeInt32}
+
+ case reflect.Int64:
+ c = codec{encode: encoder.encodeInt64, decode: decoder.decodeInt64}
+
+ case reflect.Uint:
+ c = codec{encode: encoder.encodeUint, decode: decoder.decodeUint}
+
+ case reflect.Uintptr:
+ c = codec{encode: encoder.encodeUintptr, decode: decoder.decodeUintptr}
+
+ case reflect.Uint8:
+ c = codec{encode: encoder.encodeUint8, decode: decoder.decodeUint8}
+
+ case reflect.Uint16:
+ c = codec{encode: encoder.encodeUint16, decode: decoder.decodeUint16}
+
+ case reflect.Uint32:
+ c = codec{encode: encoder.encodeUint32, decode: decoder.decodeUint32}
+
+ case reflect.Uint64:
+ c = codec{encode: encoder.encodeUint64, decode: decoder.decodeUint64}
+
+ case reflect.Float32:
+ c = codec{encode: encoder.encodeFloat32, decode: decoder.decodeFloat32}
+
+ case reflect.Float64:
+ c = codec{encode: encoder.encodeFloat64, decode: decoder.decodeFloat64}
+
+ case reflect.String:
+ c = codec{encode: encoder.encodeString, decode: decoder.decodeString}
+
+ case reflect.Interface:
+ c = constructInterfaceCodec(t)
+
+ case reflect.Array:
+ c = constructArrayCodec(t, seen, canAddr)
+
+ case reflect.Slice:
+ c = constructSliceCodec(t, seen)
+
+ case reflect.Map:
+ c = constructMapCodec(t, seen)
+
+ case reflect.Struct:
+ c = constructStructCodec(t, seen, canAddr)
+
+ case reflect.Ptr:
+ c = constructPointerCodec(t, seen)
+
+ default:
+ c = constructUnsupportedTypeCodec(t)
+ }
+
+ p := reflect.PtrTo(t)
+
+ if canAddr {
+ switch {
+ case p.Implements(jsonMarshalerType):
+ c.encode = constructJSONMarshalerEncodeFunc(t, true)
+ case p.Implements(textMarshalerType):
+ c.encode = constructTextMarshalerEncodeFunc(t, true)
+ }
+ }
+
+ switch {
+ case t.Implements(jsonMarshalerType):
+ c.encode = constructJSONMarshalerEncodeFunc(t, false)
+ case t.Implements(textMarshalerType):
+ c.encode = constructTextMarshalerEncodeFunc(t, false)
+ }
+
+ switch {
+ case p.Implements(jsonUnmarshalerType):
+ c.decode = constructJSONUnmarshalerDecodeFunc(t, true)
+ case p.Implements(textUnmarshalerType):
+ c.decode = constructTextUnmarshalerDecodeFunc(t, true)
+ }
+
+ return
+}
+
+func constructStringCodec(t reflect.Type, seen map[reflect.Type]*structType, canAddr bool) codec {
+ c := constructCodec(t, seen, canAddr)
+ return codec{
+ encode: constructStringEncodeFunc(c.encode),
+ decode: constructStringDecodeFunc(c.decode),
+ }
+}
+
+func constructStringEncodeFunc(encode encodeFunc) encodeFunc {
+ return func(e encoder, b []byte, p unsafe.Pointer) ([]byte, error) {
+ return e.encodeToString(b, p, encode)
+ }
+}
+
+func constructStringDecodeFunc(decode decodeFunc) decodeFunc {
+ return func(d decoder, b []byte, p unsafe.Pointer) ([]byte, error) {
+ return d.decodeFromString(b, p, decode)
+ }
+}
+
+func constructStringToIntDecodeFunc(t reflect.Type, decode decodeFunc) decodeFunc {
+ return func(d decoder, b []byte, p unsafe.Pointer) ([]byte, error) {
+ return d.decodeFromStringToInt(b, p, t, decode)
+ }
+}
+
+func constructArrayCodec(t reflect.Type, seen map[reflect.Type]*structType, canAddr bool) codec {
+ e := t.Elem()
+ c := constructCodec(e, seen, canAddr)
+ s := alignedSize(e)
+ return codec{
+ encode: constructArrayEncodeFunc(s, t, c.encode),
+ decode: constructArrayDecodeFunc(s, t, c.decode),
+ }
+}
+
+func constructArrayEncodeFunc(size uintptr, t reflect.Type, encode encodeFunc) encodeFunc {
+ n := t.Len()
+ return func(e encoder, b []byte, p unsafe.Pointer) ([]byte, error) {
+ return e.encodeArray(b, p, n, size, t, encode)
+ }
+}
+
+func constructArrayDecodeFunc(size uintptr, t reflect.Type, decode decodeFunc) decodeFunc {
+ n := t.Len()
+ return func(d decoder, b []byte, p unsafe.Pointer) ([]byte, error) {
+ return d.decodeArray(b, p, n, size, t, decode)
+ }
+}
+
+func constructSliceCodec(t reflect.Type, seen map[reflect.Type]*structType) codec {
+ e := t.Elem()
+ s := alignedSize(e)
+
+ if e.Kind() == reflect.Uint8 {
+ // Go 1.7+ behavior: slices of byte types (and aliases) may override the
+ // default encoding and decoding behaviors by implementing marshaler and
+ // unmarshaler interfaces.
+ p := reflect.PtrTo(e)
+ c := codec{}
+
+ switch {
+ case e.Implements(jsonMarshalerType):
+ c.encode = constructJSONMarshalerEncodeFunc(e, false)
+ case e.Implements(textMarshalerType):
+ c.encode = constructTextMarshalerEncodeFunc(e, false)
+ case p.Implements(jsonMarshalerType):
+ c.encode = constructJSONMarshalerEncodeFunc(e, true)
+ case p.Implements(textMarshalerType):
+ c.encode = constructTextMarshalerEncodeFunc(e, true)
+ }
+
+ switch {
+ case e.Implements(jsonUnmarshalerType):
+ c.decode = constructJSONUnmarshalerDecodeFunc(e, false)
+ case e.Implements(textUnmarshalerType):
+ c.decode = constructTextUnmarshalerDecodeFunc(e, false)
+ case p.Implements(jsonUnmarshalerType):
+ c.decode = constructJSONUnmarshalerDecodeFunc(e, true)
+ case p.Implements(textUnmarshalerType):
+ c.decode = constructTextUnmarshalerDecodeFunc(e, true)
+ }
+
+ if c.encode != nil {
+ c.encode = constructSliceEncodeFunc(s, t, c.encode)
+ } else {
+ c.encode = encoder.encodeBytes
+ }
+
+ if c.decode != nil {
+ c.decode = constructSliceDecodeFunc(s, t, c.decode)
+ } else {
+ c.decode = decoder.decodeBytes
+ }
+
+ return c
+ }
+
+ c := constructCodec(e, seen, true)
+ return codec{
+ encode: constructSliceEncodeFunc(s, t, c.encode),
+ decode: constructSliceDecodeFunc(s, t, c.decode),
+ }
+}
+
+func constructSliceEncodeFunc(size uintptr, t reflect.Type, encode encodeFunc) encodeFunc {
+ return func(e encoder, b []byte, p unsafe.Pointer) ([]byte, error) {
+ return e.encodeSlice(b, p, size, t, encode)
+ }
+}
+
+func constructSliceDecodeFunc(size uintptr, t reflect.Type, decode decodeFunc) decodeFunc {
+ return func(d decoder, b []byte, p unsafe.Pointer) ([]byte, error) {
+ return d.decodeSlice(b, p, size, t, decode)
+ }
+}
+
+func constructMapCodec(t reflect.Type, seen map[reflect.Type]*structType) codec {
+ var sortKeys sortFunc
+ k := t.Key()
+ v := t.Elem()
+
+ // Faster implementations for some common cases.
+ switch {
+ case k == stringType && v == interfaceType:
+ return codec{
+ encode: encoder.encodeMapStringInterface,
+ decode: decoder.decodeMapStringInterface,
+ }
+
+ case k == stringType && v == rawMessageType:
+ return codec{
+ encode: encoder.encodeMapStringRawMessage,
+ decode: decoder.decodeMapStringRawMessage,
+ }
+ }
+
+ kc := codec{}
+ vc := constructCodec(v, seen, false)
+
+ if k.Implements(textMarshalerType) || reflect.PtrTo(k).Implements(textUnmarshalerType) {
+ kc.encode = constructTextMarshalerEncodeFunc(k, false)
+ kc.decode = constructTextUnmarshalerDecodeFunc(k, true)
+
+ sortKeys = func(keys []reflect.Value) {
+ sort.Slice(keys, func(i, j int) bool {
+ // This is a performance abomination but the use case is rare
+ // enough that it shouldn't be a problem in practice.
+ k1, _ := keys[i].Interface().(encoding.TextMarshaler).MarshalText()
+ k2, _ := keys[j].Interface().(encoding.TextMarshaler).MarshalText()
+ return string(k1) < string(k2)
+ })
+ }
+ } else {
+ switch k.Kind() {
+ case reflect.String:
+ kc.encode = encoder.encodeKey
+ kc.decode = decoder.decodeString
+
+ sortKeys = func(keys []reflect.Value) {
+ sort.Slice(keys, func(i, j int) bool { return keys[i].String() < keys[j].String() })
+ }
+
+ case reflect.Int,
+ reflect.Int8,
+ reflect.Int16,
+ reflect.Int32,
+ reflect.Int64:
+ kc = constructStringCodec(k, seen, false)
+
+ sortKeys = func(keys []reflect.Value) {
+ sort.Slice(keys, func(i, j int) bool { return intStringsAreSorted(keys[i].Int(), keys[j].Int()) })
+ }
+
+ case reflect.Uint,
+ reflect.Uintptr,
+ reflect.Uint8,
+ reflect.Uint16,
+ reflect.Uint32,
+ reflect.Uint64:
+ kc = constructStringCodec(k, seen, false)
+
+ sortKeys = func(keys []reflect.Value) {
+ sort.Slice(keys, func(i, j int) bool { return uintStringsAreSorted(keys[i].Uint(), keys[j].Uint()) })
+ }
+
+ default:
+ return constructUnsupportedTypeCodec(t)
+ }
+ }
+
+ if inlined(v) {
+ vc.encode = constructInlineValueEncodeFunc(vc.encode)
+ }
+
+ return codec{
+ encode: constructMapEncodeFunc(t, kc.encode, vc.encode, sortKeys),
+ decode: constructMapDecodeFunc(t, kc.decode, vc.decode),
+ }
+}
+
+func constructMapEncodeFunc(t reflect.Type, encodeKey, encodeValue encodeFunc, sortKeys sortFunc) encodeFunc {
+ return func(e encoder, b []byte, p unsafe.Pointer) ([]byte, error) {
+ return e.encodeMap(b, p, t, encodeKey, encodeValue, sortKeys)
+ }
+}
+
+func constructMapDecodeFunc(t reflect.Type, decodeKey, decodeValue decodeFunc) decodeFunc {
+ kt := t.Key()
+ vt := t.Elem()
+ kz := reflect.Zero(kt)
+ vz := reflect.Zero(vt)
+ return func(d decoder, b []byte, p unsafe.Pointer) ([]byte, error) {
+ return d.decodeMap(b, p, t, kt, vt, kz, vz, decodeKey, decodeValue)
+ }
+}
+
+func constructStructCodec(t reflect.Type, seen map[reflect.Type]*structType, canAddr bool) codec {
+ st := constructStructType(t, seen, canAddr)
+ return codec{
+ encode: constructStructEncodeFunc(st),
+ decode: constructStructDecodeFunc(st),
+ }
+}
+
+func constructStructType(t reflect.Type, seen map[reflect.Type]*structType, canAddr bool) *structType {
+ // Used for preventing infinite recursion on types that have pointers to
+ // themselves.
+ st := seen[t]
+
+ if st == nil {
+ st = &structType{
+ fields: make([]structField, 0, t.NumField()),
+ fieldsIndex: make(map[string]*structField),
+ ficaseIndex: make(map[string]*structField),
+ typ: t,
+ }
+
+ seen[t] = st
+ st.fields = appendStructFields(st.fields, t, 0, seen, canAddr)
+
+ for i := range st.fields {
+ f := &st.fields[i]
+ s := strings.ToLower(f.name)
+ st.fieldsIndex[f.name] = f
+ // When there is ambiguity because multiple fields have the same
+ // case-insensitive representation, the first field must win.
+ if _, exists := st.ficaseIndex[s]; !exists {
+ st.ficaseIndex[s] = f
+ }
+ }
+ }
+
+ return st
+}
+
+func constructStructEncodeFunc(st *structType) encodeFunc {
+ return func(e encoder, b []byte, p unsafe.Pointer) ([]byte, error) {
+ return e.encodeStruct(b, p, st)
+ }
+}
+
+func constructStructDecodeFunc(st *structType) decodeFunc {
+ return func(d decoder, b []byte, p unsafe.Pointer) ([]byte, error) {
+ return d.decodeStruct(b, p, st)
+ }
+}
+
+func constructEmbeddedStructPointerCodec(t reflect.Type, unexported bool, offset uintptr, field codec) codec {
+ return codec{
+ encode: constructEmbeddedStructPointerEncodeFunc(t, unexported, offset, field.encode),
+ decode: constructEmbeddedStructPointerDecodeFunc(t, unexported, offset, field.decode),
+ }
+}
+
+func constructEmbeddedStructPointerEncodeFunc(t reflect.Type, unexported bool, offset uintptr, encode encodeFunc) encodeFunc {
+ return func(e encoder, b []byte, p unsafe.Pointer) ([]byte, error) {
+ return e.encodeEmbeddedStructPointer(b, p, t, unexported, offset, encode)
+ }
+}
+
+func constructEmbeddedStructPointerDecodeFunc(t reflect.Type, unexported bool, offset uintptr, decode decodeFunc) decodeFunc {
+ return func(d decoder, b []byte, p unsafe.Pointer) ([]byte, error) {
+ return d.decodeEmbeddedStructPointer(b, p, t, unexported, offset, decode)
+ }
+}
+
+func appendStructFields(fields []structField, t reflect.Type, offset uintptr, seen map[reflect.Type]*structType, canAddr bool) []structField {
+ type embeddedField struct {
+ index int
+ offset uintptr
+ pointer bool
+ unexported bool
+ subtype *structType
+ subfield *structField
+ }
+
+ names := make(map[string]struct{})
+ embedded := make([]embeddedField, 0, 10)
+
+ for i, n := 0, t.NumField(); i < n; i++ {
+ f := t.Field(i)
+
+ var (
+ name = f.Name
+ anonymous = f.Anonymous
+ tag = false
+ omitempty = false
+ stringify = false
+ unexported = len(f.PkgPath) != 0
+ )
+
+ if unexported && !anonymous { // unexported
+ continue
+ }
+
+ if parts := strings.Split(f.Tag.Get("json"), ","); len(parts) != 0 {
+ if len(parts[0]) != 0 {
+ name, tag = parts[0], true
+ }
+
+ if name == "-" && len(parts) == 1 { // ignored
+ continue
+ }
+
+ if !isValidTag(name) {
+ name = f.Name
+ }
+
+ for _, tag := range parts[1:] {
+ switch tag {
+ case "omitempty":
+ omitempty = true
+ case "string":
+ stringify = true
+ }
+ }
+ }
+
+ if anonymous && !tag { // embedded
+ typ := f.Type
+ ptr := f.Type.Kind() == reflect.Ptr
+
+ if ptr {
+ typ = f.Type.Elem()
+ }
+
+ if typ.Kind() == reflect.Struct {
+ // When the embedded fields is inlined the fields can be looked
+ // up by offset from the address of the wrapping object, so we
+ // simply add the embedded struct fields to the list of fields
+ // of the current struct type.
+ subtype := constructStructType(typ, seen, canAddr)
+
+ for j := range subtype.fields {
+ embedded = append(embedded, embeddedField{
+ index: i<<32 | j,
+ offset: offset + f.Offset,
+ pointer: ptr,
+ unexported: unexported,
+ subtype: subtype,
+ subfield: &subtype.fields[j],
+ })
+ }
+
+ continue
+ }
+
+ if unexported { // ignore unexported non-struct types
+ continue
+ }
+ }
+
+ codec := constructCodec(f.Type, seen, canAddr)
+
+ if stringify {
+ // https://golang.org/pkg/encoding/json/#Marshal
+ //
+ // The "string" option signals that a field is stored as JSON inside
+ // a JSON-encoded string. It applies only to fields of string,
+ // floating point, integer, or boolean types. This extra level of
+ // encoding is sometimes used when communicating with JavaScript
+ // programs:
+ typ := f.Type
+
+ if typ.Kind() == reflect.Ptr {
+ typ = typ.Elem()
+ }
+
+ switch typ.Kind() {
+ case reflect.Int,
+ reflect.Int8,
+ reflect.Int16,
+ reflect.Int32,
+ reflect.Int64,
+ reflect.Uint,
+ reflect.Uintptr,
+ reflect.Uint8,
+ reflect.Uint16,
+ reflect.Uint32,
+ reflect.Uint64:
+ codec.encode = constructStringEncodeFunc(codec.encode)
+ codec.decode = constructStringToIntDecodeFunc(typ, codec.decode)
+ case reflect.Bool,
+ reflect.Float32,
+ reflect.Float64,
+ reflect.String:
+ codec.encode = constructStringEncodeFunc(codec.encode)
+ codec.decode = constructStringDecodeFunc(codec.decode)
+ }
+ }
+
+ fields = append(fields, structField{
+ codec: codec,
+ offset: offset + f.Offset,
+ empty: emptyFuncOf(f.Type),
+ tag: tag,
+ omitempty: omitempty,
+ name: name,
+ index: i << 32,
+ typ: f.Type,
+ zero: reflect.Zero(f.Type),
+ })
+
+ names[name] = struct{}{}
+ }
+
+ // Only unambiguous embedded fields must be serialized.
+ ambiguousNames := make(map[string]int)
+ ambiguousTags := make(map[string]int)
+
+ // Embedded types can never override a field that was already present at
+ // the top-level.
+ for name := range names {
+ ambiguousNames[name]++
+ ambiguousTags[name]++
+ }
+
+ for _, embfield := range embedded {
+ ambiguousNames[embfield.subfield.name]++
+ if embfield.subfield.tag {
+ ambiguousTags[embfield.subfield.name]++
+ }
+ }
+
+ for _, embfield := range embedded {
+ subfield := *embfield.subfield
+
+ if ambiguousNames[subfield.name] > 1 && !(subfield.tag && ambiguousTags[subfield.name] == 1) {
+ continue // ambiguous embedded field
+ }
+
+ if embfield.pointer {
+ subfield.codec = constructEmbeddedStructPointerCodec(embfield.subtype.typ, embfield.unexported, subfield.offset, subfield.codec)
+ subfield.offset = embfield.offset
+ } else {
+ subfield.offset += embfield.offset
+ }
+
+ // To prevent dominant flags more than one level below the embedded one.
+ subfield.tag = false
+
+ // To ensure the order of the fields in the output is the same is in the
+ // struct type.
+ subfield.index = embfield.index
+
+ fields = append(fields, subfield)
+ }
+
+ for i := range fields {
+ fields[i].json = encodeString(fields[i].name, 0)
+ fields[i].html = encodeString(fields[i].name, EscapeHTML)
+ }
+
+ sort.Slice(fields, func(i, j int) bool { return fields[i].index < fields[j].index })
+ return fields
+}
+
+func encodeString(s string, flags AppendFlags) string {
+ b := make([]byte, 0, len(s)+2)
+ e := encoder{flags: flags}
+ b, _ = e.doEncodeString(b, unsafe.Pointer(&s))
+ return *(*string)(unsafe.Pointer(&b))
+}
+
+func constructPointerCodec(t reflect.Type, seen map[reflect.Type]*structType) codec {
+ e := t.Elem()
+ c := constructCodec(e, seen, true)
+ return codec{
+ encode: constructPointerEncodeFunc(e, c.encode),
+ decode: constructPointerDecodeFunc(e, c.decode),
+ }
+}
+
+func constructPointerEncodeFunc(t reflect.Type, encode encodeFunc) encodeFunc {
+ return func(e encoder, b []byte, p unsafe.Pointer) ([]byte, error) {
+ return e.encodePointer(b, p, t, encode)
+ }
+}
+
+func constructPointerDecodeFunc(t reflect.Type, decode decodeFunc) decodeFunc {
+ return func(d decoder, b []byte, p unsafe.Pointer) ([]byte, error) {
+ return d.decodePointer(b, p, t, decode)
+ }
+}
+
+func constructInterfaceCodec(t reflect.Type) codec {
+ return codec{
+ encode: constructMaybeEmptyInterfaceEncoderFunc(t),
+ decode: constructMaybeEmptyInterfaceDecoderFunc(t),
+ }
+}
+
+func constructMaybeEmptyInterfaceEncoderFunc(t reflect.Type) encodeFunc {
+ return func(e encoder, b []byte, p unsafe.Pointer) ([]byte, error) {
+ return e.encodeMaybeEmptyInterface(b, p, t)
+ }
+}
+
+func constructMaybeEmptyInterfaceDecoderFunc(t reflect.Type) decodeFunc {
+ return func(d decoder, b []byte, p unsafe.Pointer) ([]byte, error) {
+ return d.decodeMaybeEmptyInterface(b, p, t)
+ }
+}
+
+func constructUnsupportedTypeCodec(t reflect.Type) codec {
+ return codec{
+ encode: constructUnsupportedTypeEncodeFunc(t),
+ decode: constructUnsupportedTypeDecodeFunc(t),
+ }
+}
+
+func constructUnsupportedTypeEncodeFunc(t reflect.Type) encodeFunc {
+ return func(e encoder, b []byte, p unsafe.Pointer) ([]byte, error) {
+ return e.encodeUnsupportedTypeError(b, p, t)
+ }
+}
+
+func constructUnsupportedTypeDecodeFunc(t reflect.Type) decodeFunc {
+ return func(d decoder, b []byte, p unsafe.Pointer) ([]byte, error) {
+ return d.decodeUnmarshalTypeError(b, p, t)
+ }
+}
+
+func constructJSONMarshalerEncodeFunc(t reflect.Type, pointer bool) encodeFunc {
+ return func(e encoder, b []byte, p unsafe.Pointer) ([]byte, error) {
+ return e.encodeJSONMarshaler(b, p, t, pointer)
+ }
+}
+
+func constructJSONUnmarshalerDecodeFunc(t reflect.Type, pointer bool) decodeFunc {
+ return func(d decoder, b []byte, p unsafe.Pointer) ([]byte, error) {
+ return d.decodeJSONUnmarshaler(b, p, t, pointer)
+ }
+}
+
+func constructTextMarshalerEncodeFunc(t reflect.Type, pointer bool) encodeFunc {
+ return func(e encoder, b []byte, p unsafe.Pointer) ([]byte, error) {
+ return e.encodeTextMarshaler(b, p, t, pointer)
+ }
+}
+
+func constructTextUnmarshalerDecodeFunc(t reflect.Type, pointer bool) decodeFunc {
+ return func(d decoder, b []byte, p unsafe.Pointer) ([]byte, error) {
+ return d.decodeTextUnmarshaler(b, p, t, pointer)
+ }
+}
+
+func constructInlineValueEncodeFunc(encode encodeFunc) encodeFunc {
+ return func(e encoder, b []byte, p unsafe.Pointer) ([]byte, error) {
+ return encode(e, b, noescape(unsafe.Pointer(&p)))
+ }
+}
+
+// noescape hides a pointer from escape analysis. noescape is
+// the identity function but escape analysis doesn't think the
+// output depends on the input. noescape is inlined and currently
+// compiles down to zero instructions.
+// USE CAREFULLY!
+// This was copied from the runtime; see issues 23382 and 7921.
+//go:nosplit
+func noescape(p unsafe.Pointer) unsafe.Pointer {
+ x := uintptr(p)
+ return unsafe.Pointer(x ^ 0)
+}
+
+func alignedSize(t reflect.Type) uintptr {
+ a := t.Align()
+ s := t.Size()
+ return align(uintptr(a), uintptr(s))
+}
+
+func align(align, size uintptr) uintptr {
+ if align != 0 && (size%align) != 0 {
+ size = ((size / align) + 1) * align
+ }
+ return size
+}
+
+func inlined(t reflect.Type) bool {
+ switch t.Kind() {
+ case reflect.Ptr:
+ return true
+ case reflect.Map:
+ return true
+ case reflect.Struct:
+ return t.NumField() == 1 && inlined(t.Field(0).Type)
+ default:
+ return false
+ }
+}
+
+func isValidTag(s string) bool {
+ if s == "" {
+ return false
+ }
+ for _, c := range s {
+ switch {
+ case strings.ContainsRune("!#$%&()*+-./:<=>?@[]^_{|}~ ", c):
+ // Backslash and quote chars are reserved, but
+ // otherwise any punctuation chars are allowed
+ // in a tag name.
+ default:
+ if !unicode.IsLetter(c) && !unicode.IsDigit(c) {
+ return false
+ }
+ }
+ }
+ return true
+}
+
+func emptyFuncOf(t reflect.Type) emptyFunc {
+ switch t {
+ case bytesType, rawMessageType:
+ return func(p unsafe.Pointer) bool { return (*slice)(p).len == 0 }
+ }
+
+ switch t.Kind() {
+ case reflect.Array:
+ if t.Len() == 0 {
+ return func(unsafe.Pointer) bool { return true }
+ }
+
+ case reflect.Map:
+ return func(p unsafe.Pointer) bool { return reflect.NewAt(t, p).Elem().Len() == 0 }
+
+ case reflect.Slice:
+ return func(p unsafe.Pointer) bool { return (*slice)(p).len == 0 }
+
+ case reflect.String:
+ return func(p unsafe.Pointer) bool { return len(*(*string)(p)) == 0 }
+
+ case reflect.Bool:
+ return func(p unsafe.Pointer) bool { return !*(*bool)(p) }
+
+ case reflect.Int, reflect.Uint:
+ return func(p unsafe.Pointer) bool { return *(*uint)(p) == 0 }
+
+ case reflect.Uintptr:
+ return func(p unsafe.Pointer) bool { return *(*uintptr)(p) == 0 }
+
+ case reflect.Int8, reflect.Uint8:
+ return func(p unsafe.Pointer) bool { return *(*uint8)(p) == 0 }
+
+ case reflect.Int16, reflect.Uint16:
+ return func(p unsafe.Pointer) bool { return *(*uint16)(p) == 0 }
+
+ case reflect.Int32, reflect.Uint32:
+ return func(p unsafe.Pointer) bool { return *(*uint32)(p) == 0 }
+
+ case reflect.Int64, reflect.Uint64:
+ return func(p unsafe.Pointer) bool { return *(*uint64)(p) == 0 }
+
+ case reflect.Float32:
+ return func(p unsafe.Pointer) bool { return *(*float32)(p) == 0 }
+
+ case reflect.Float64:
+ return func(p unsafe.Pointer) bool { return *(*float64)(p) == 0 }
+
+ case reflect.Ptr:
+ return func(p unsafe.Pointer) bool { return *(*unsafe.Pointer)(p) == nil }
+
+ case reflect.Interface:
+ return func(p unsafe.Pointer) bool { return (*iface)(p).ptr == nil }
+ }
+
+ return func(unsafe.Pointer) bool { return false }
+}
+
+type iface struct {
+ typ unsafe.Pointer
+ ptr unsafe.Pointer
+}
+
+type slice struct {
+ data unsafe.Pointer
+ len int
+ cap int
+}
+
+type structType struct {
+ fields []structField
+ fieldsIndex map[string]*structField
+ ficaseIndex map[string]*structField
+ typ reflect.Type
+ inlined bool
+}
+
+type structField struct {
+ codec codec
+ offset uintptr
+ empty emptyFunc
+ tag bool
+ omitempty bool
+ json string
+ html string
+ name string
+ typ reflect.Type
+ zero reflect.Value
+ index int
+}
+
+func unmarshalTypeError(b []byte, t reflect.Type) error {
+ return &UnmarshalTypeError{Value: strconv.Quote(prefix(b)), Type: t}
+}
+
+func unmarshalOverflow(b []byte, t reflect.Type) error {
+ return &UnmarshalTypeError{Value: "number " + prefix(b) + " overflows", Type: t}
+}
+
+func unexpectedEOF(b []byte) error {
+ return syntaxError(b, "unexpected end of JSON input")
+}
+
+var syntaxErrorMsgOffset = ^uintptr(0)
+
+func init() {
+ t := reflect.TypeOf(SyntaxError{})
+ for i, n := 0, t.NumField(); i < n; i++ {
+ if f := t.Field(i); f.Type.Kind() == reflect.String {
+ syntaxErrorMsgOffset = f.Offset
+ }
+ }
+}
+
+func syntaxError(b []byte, msg string, args ...interface{}) error {
+ e := new(SyntaxError)
+ i := syntaxErrorMsgOffset
+ if i != ^uintptr(0) {
+ s := "json: " + fmt.Sprintf(msg, args...) + ": " + prefix(b)
+ p := unsafe.Pointer(e)
+ // Hack to set the unexported `msg` field.
+ *(*string)(unsafe.Pointer(uintptr(p) + i)) = s
+ }
+ return e
+}
+
+func inputError(b []byte, t reflect.Type) ([]byte, error) {
+ if len(b) == 0 {
+ return nil, unexpectedEOF(b)
+ }
+ _, r, err := parseValue(b)
+ if err != nil {
+ return r, err
+ }
+ return skipSpaces(r), unmarshalTypeError(b, t)
+}
+
+func objectKeyError(b []byte, err error) ([]byte, error) {
+ if len(b) == 0 {
+ return nil, unexpectedEOF(b)
+ }
+ switch err.(type) {
+ case *UnmarshalTypeError:
+ err = syntaxError(b, "invalid character '%c' looking for beginning of object key", b[0])
+ }
+ return b, err
+}
+
+func prefix(b []byte) string {
+ if len(b) < 32 {
+ return string(b)
+ }
+ return string(b[:32]) + "..."
+}
+
+func intStringsAreSorted(i0, i1 int64) bool {
+ var b0, b1 [32]byte
+ return string(strconv.AppendInt(b0[:0], i0, 10)) < string(strconv.AppendInt(b1[:0], i1, 10))
+}
+
+func uintStringsAreSorted(u0, u1 uint64) bool {
+ var b0, b1 [32]byte
+ return string(strconv.AppendUint(b0[:0], u0, 10)) < string(strconv.AppendUint(b1[:0], u1, 10))
+}
+
+//go:nosplit
+func stringToBytes(s string) []byte {
+ return *(*[]byte)(unsafe.Pointer(&reflect.SliceHeader{ // nolint:govet // from segment's code
+ Data: ((*reflect.StringHeader)(unsafe.Pointer(&s))).Data,
+ Len: len(s),
+ Cap: len(s),
+ }))
+}
+
+var (
+ nullType = reflect.TypeOf(nil)
+ boolType = reflect.TypeOf(false)
+
+ intType = reflect.TypeOf(int(0))
+ int8Type = reflect.TypeOf(int8(0))
+ int16Type = reflect.TypeOf(int16(0))
+ int32Type = reflect.TypeOf(int32(0))
+ int64Type = reflect.TypeOf(int64(0))
+
+ uintType = reflect.TypeOf(uint(0))
+ uint8Type = reflect.TypeOf(uint8(0))
+ uint16Type = reflect.TypeOf(uint16(0))
+ uint32Type = reflect.TypeOf(uint32(0))
+ uint64Type = reflect.TypeOf(uint64(0))
+ uintptrType = reflect.TypeOf(uintptr(0))
+
+ float32Type = reflect.TypeOf(float32(0))
+ float64Type = reflect.TypeOf(float64(0))
+
+ numberType = reflect.TypeOf(json.Number(""))
+ stringType = reflect.TypeOf("")
+ bytesType = reflect.TypeOf(([]byte)(nil))
+ durationType = reflect.TypeOf(time.Duration(0))
+ timeType = reflect.TypeOf(time.Time{})
+ rawMessageType = reflect.TypeOf(RawMessage(nil))
+
+ numberPtrType = reflect.PtrTo(numberType)
+ durationPtrType = reflect.PtrTo(durationType)
+ timePtrType = reflect.PtrTo(timeType)
+ rawMessagePtrType = reflect.PtrTo(rawMessageType)
+
+ sliceInterfaceType = reflect.TypeOf(([]interface{})(nil))
+ mapStringInterfaceType = reflect.TypeOf((map[string]interface{})(nil))
+ mapStringRawMessageType = reflect.TypeOf((map[string]RawMessage)(nil))
+
+ interfaceType = reflect.TypeOf((*interface{})(nil)).Elem()
+ jsonMarshalerType = reflect.TypeOf((*Marshaler)(nil)).Elem()
+ jsonUnmarshalerType = reflect.TypeOf((*Unmarshaler)(nil)).Elem()
+ textMarshalerType = reflect.TypeOf((*encoding.TextMarshaler)(nil)).Elem()
+ textUnmarshalerType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem()
+)
+
+// =============================================================================
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// appendDuration appends a human-readable representation of d to b.
+//
+// The function copies the implementation of time.Duration.String but prevents
+// Go from making a dynamic memory allocation on the returned value.
+func appendDuration(b []byte, d time.Duration) []byte {
+ // Largest time is 2540400h10m10.000000000s
+ var buf [32]byte
+ w := len(buf)
+
+ u := uint64(d)
+ neg := d < 0
+ if neg {
+ u = -u
+ }
+
+ if u < uint64(time.Second) {
+ // Special case: if duration is smaller than a second,
+ // use smaller units, like 1.2ms
+ var prec int
+ w--
+ buf[w] = 's'
+ w--
+ switch {
+ case u == 0:
+ return append(b, '0', 's')
+ case u < uint64(time.Microsecond):
+ // print nanoseconds
+ prec = 0
+ buf[w] = 'n'
+ case u < uint64(time.Millisecond):
+ // print microseconds
+ prec = 3
+ // U+00B5 'µ' micro sign == 0xC2 0xB5
+ w-- // Need room for two bytes.
+ copy(buf[w:], "µ")
+ default:
+ // print milliseconds
+ prec = 6
+ buf[w] = 'm'
+ }
+ w, u = fmtFrac(buf[:w], u, prec)
+ w = fmtInt(buf[:w], u)
+ } else {
+ w--
+ buf[w] = 's'
+
+ w, u = fmtFrac(buf[:w], u, 9)
+
+ // u is now integer seconds
+ w = fmtInt(buf[:w], u%60)
+ u /= 60
+
+ // u is now integer minutes
+ if u > 0 {
+ w--
+ buf[w] = 'm'
+ w = fmtInt(buf[:w], u%60)
+ u /= 60
+
+ // u is now integer hours
+ // Stop at hours because days can be different lengths.
+ if u > 0 {
+ w--
+ buf[w] = 'h'
+ w = fmtInt(buf[:w], u)
+ }
+ }
+ }
+
+ if neg {
+ w--
+ buf[w] = '-'
+ }
+
+ return append(b, buf[w:]...)
+}
+
+// fmtFrac formats the fraction of v/10**prec (e.g., ".12345") into the
+// tail of buf, omitting trailing zeros. it omits the decimal
+// point too when the fraction is 0. It returns the index where the
+// output bytes begin and the value v/10**prec.
+func fmtFrac(buf []byte, v uint64, prec int) (nw int, nv uint64) {
+ // Omit trailing zeros up to and including decimal point.
+ w := len(buf)
+ print := false
+ for i := 0; i < prec; i++ {
+ digit := v % 10
+ print = print || digit != 0
+ if print {
+ w--
+ buf[w] = byte(digit) + '0'
+ }
+ v /= 10
+ }
+ if print {
+ w--
+ buf[w] = '.'
+ }
+ return w, v
+}
+
+// fmtInt formats v into the tail of buf.
+// It returns the index where the output begins.
+func fmtInt(buf []byte, v uint64) int {
+ w := len(buf)
+ if v == 0 {
+ w--
+ buf[w] = '0'
+ } else {
+ for v > 0 {
+ w--
+ buf[w] = byte(v%10) + '0'
+ v /= 10
+ }
+ }
+ return w
+}
+
+// =============================================================================
diff --git a/vendor/github.com/neilotoole/jsoncolor/decode.go b/vendor/github.com/neilotoole/jsoncolor/decode.go
new file mode 100644
index 0000000..b30a38f
--- /dev/null
+++ b/vendor/github.com/neilotoole/jsoncolor/decode.go
@@ -0,0 +1,1195 @@
+package jsoncolor
+
+import (
+ "bytes"
+ "encoding"
+ "encoding/base64"
+ "encoding/json"
+ "fmt"
+ "math"
+ "reflect"
+ "strconv"
+ "time"
+ "unsafe"
+)
+
+func (d decoder) decodeNull(b []byte, p unsafe.Pointer) ([]byte, error) {
+ if hasNullPrefix(b) {
+ return b[4:], nil
+ }
+ return inputError(b, nullType)
+}
+
+func (d decoder) decodeBool(b []byte, p unsafe.Pointer) ([]byte, error) {
+ switch {
+ case hasTruePrefix(b):
+ *(*bool)(p) = true
+ return b[4:], nil
+
+ case hasFalsePrefix(b):
+ *(*bool)(p) = false
+ return b[5:], nil
+
+ case hasNullPrefix(b):
+ return b[4:], nil
+
+ default:
+ return inputError(b, boolType)
+ }
+}
+
+func (d decoder) decodeInt(b []byte, p unsafe.Pointer) ([]byte, error) {
+ if hasNullPrefix(b) {
+ return b[4:], nil
+ }
+
+ v, r, err := parseInt(b, intType)
+ if err != nil {
+ return r, err
+ }
+
+ *(*int)(p) = int(v)
+ return r, nil
+}
+
+func (d decoder) decodeInt8(b []byte, p unsafe.Pointer) ([]byte, error) {
+ if hasNullPrefix(b) {
+ return b[4:], nil
+ }
+
+ v, r, err := parseInt(b, int8Type)
+ if err != nil {
+ return r, err
+ }
+
+ if v < math.MinInt8 || v > math.MaxInt8 {
+ return r, unmarshalOverflow(b[:len(b)-len(r)], int8Type)
+ }
+
+ *(*int8)(p) = int8(v)
+ return r, nil
+}
+
+func (d decoder) decodeInt16(b []byte, p unsafe.Pointer) ([]byte, error) {
+ if hasNullPrefix(b) {
+ return b[4:], nil
+ }
+
+ v, r, err := parseInt(b, int16Type)
+ if err != nil {
+ return r, err
+ }
+
+ if v < math.MinInt16 || v > math.MaxInt16 {
+ return r, unmarshalOverflow(b[:len(b)-len(r)], int16Type)
+ }
+
+ *(*int16)(p) = int16(v)
+ return r, nil
+}
+
+func (d decoder) decodeInt32(b []byte, p unsafe.Pointer) ([]byte, error) {
+ if hasNullPrefix(b) {
+ return b[4:], nil
+ }
+
+ v, r, err := parseInt(b, int32Type)
+ if err != nil {
+ return r, err
+ }
+
+ if v < math.MinInt32 || v > math.MaxInt32 {
+ return r, unmarshalOverflow(b[:len(b)-len(r)], int32Type)
+ }
+
+ *(*int32)(p) = int32(v)
+ return r, nil
+}
+
+func (d decoder) decodeInt64(b []byte, p unsafe.Pointer) ([]byte, error) {
+ if hasNullPrefix(b) {
+ return b[4:], nil
+ }
+
+ v, r, err := parseInt(b, int64Type)
+ if err != nil {
+ return r, err
+ }
+
+ *(*int64)(p) = v
+ return r, nil
+}
+
+func (d decoder) decodeUint(b []byte, p unsafe.Pointer) ([]byte, error) {
+ if hasNullPrefix(b) {
+ return b[4:], nil
+ }
+
+ v, r, err := parseUint(b, uintType)
+ if err != nil {
+ return r, err
+ }
+
+ *(*uint)(p) = uint(v)
+ return r, nil
+}
+
+func (d decoder) decodeUintptr(b []byte, p unsafe.Pointer) ([]byte, error) {
+ if hasNullPrefix(b) {
+ return b[4:], nil
+ }
+
+ v, r, err := parseUint(b, uintptrType)
+ if err != nil {
+ return r, err
+ }
+
+ *(*uintptr)(p) = uintptr(v)
+ return r, nil
+}
+
+func (d decoder) decodeUint8(b []byte, p unsafe.Pointer) ([]byte, error) {
+ if hasNullPrefix(b) {
+ return b[4:], nil
+ }
+
+ v, r, err := parseUint(b, uint8Type)
+ if err != nil {
+ return r, err
+ }
+
+ if v > math.MaxUint8 {
+ return r, unmarshalOverflow(b[:len(b)-len(r)], uint8Type)
+ }
+
+ *(*uint8)(p) = uint8(v)
+ return r, nil
+}
+
+func (d decoder) decodeUint16(b []byte, p unsafe.Pointer) ([]byte, error) {
+ if hasNullPrefix(b) {
+ return b[4:], nil
+ }
+
+ v, r, err := parseUint(b, uint16Type)
+ if err != nil {
+ return r, err
+ }
+
+ if v > math.MaxUint16 {
+ return r, unmarshalOverflow(b[:len(b)-len(r)], uint16Type)
+ }
+
+ *(*uint16)(p) = uint16(v)
+ return r, nil
+}
+
+func (d decoder) decodeUint32(b []byte, p unsafe.Pointer) ([]byte, error) {
+ if hasNullPrefix(b) {
+ return b[4:], nil
+ }
+
+ v, r, err := parseUint(b, uint32Type)
+ if err != nil {
+ return r, err
+ }
+
+ if v > math.MaxUint32 {
+ return r, unmarshalOverflow(b[:len(b)-len(r)], uint32Type)
+ }
+
+ *(*uint32)(p) = uint32(v)
+ return r, nil
+}
+
+func (d decoder) decodeUint64(b []byte, p unsafe.Pointer) ([]byte, error) {
+ if hasNullPrefix(b) {
+ return b[4:], nil
+ }
+
+ v, r, err := parseUint(b, uint64Type)
+ if err != nil {
+ return r, err
+ }
+
+ *(*uint64)(p) = v
+ return r, nil
+}
+
+func (d decoder) decodeFloat32(b []byte, p unsafe.Pointer) ([]byte, error) {
+ if hasNullPrefix(b) {
+ return b[4:], nil
+ }
+
+ v, r, err := parseNumber(b)
+ if err != nil {
+ return inputError(b, float32Type)
+ }
+
+ f, err := strconv.ParseFloat(*(*string)(unsafe.Pointer(&v)), 32)
+ if err != nil {
+ return inputError(b, float32Type)
+ }
+
+ *(*float32)(p) = float32(f)
+ return r, nil
+}
+
+func (d decoder) decodeFloat64(b []byte, p unsafe.Pointer) ([]byte, error) {
+ if hasNullPrefix(b) {
+ return b[4:], nil
+ }
+
+ v, r, err := parseNumber(b)
+ if err != nil {
+ return inputError(b, float64Type)
+ }
+
+ f, err := strconv.ParseFloat(*(*string)(unsafe.Pointer(&v)), 64)
+ if err != nil {
+ return inputError(b, float64Type)
+ }
+
+ *(*float64)(p) = f
+ return r, nil
+}
+
+func (d decoder) decodeNumber(b []byte, p unsafe.Pointer) ([]byte, error) {
+ if hasNullPrefix(b) {
+ return b[4:], nil
+ }
+
+ v, r, err := parseNumber(b)
+ if err != nil {
+ return inputError(b, numberType)
+ }
+
+ if (d.flags & DontCopyNumber) != 0 {
+ *(*Number)(p) = *(*Number)(unsafe.Pointer(&v))
+ } else {
+ *(*Number)(p) = Number(v)
+ }
+
+ return r, nil
+}
+
+func (d decoder) decodeString(b []byte, p unsafe.Pointer) ([]byte, error) {
+ if hasNullPrefix(b) {
+ return b[4:], nil
+ }
+
+ s, r, new, err := parseStringUnquote(b, nil)
+ if err != nil {
+ if len(b) == 0 || b[0] != '"' {
+ return inputError(b, stringType)
+ }
+ return r, err
+ }
+
+ if new || (d.flags&DontCopyString) != 0 {
+ *(*string)(p) = *(*string)(unsafe.Pointer(&s))
+ } else {
+ *(*string)(p) = string(s)
+ }
+
+ return r, nil
+}
+
+func (d decoder) decodeFromString(b []byte, p unsafe.Pointer, decode decodeFunc) ([]byte, error) {
+ if hasNullPrefix(b) {
+ return decode(d, b, p)
+ }
+
+ v, b, _, err := parseStringUnquote(b, nil)
+ if err != nil {
+ return inputError(v, stringType)
+ }
+
+ if v, err = decode(d, v, p); err != nil {
+ return b, err
+ }
+
+ if v = skipSpaces(v); len(v) != 0 {
+ return b, syntaxError(v, "unexpected trailing tokens after string value")
+ }
+
+ return b, nil
+}
+
+func (d decoder) decodeFromStringToInt(b []byte, p unsafe.Pointer, t reflect.Type, decode decodeFunc) ([]byte, error) {
+ if hasPrefix(b, "null") {
+ return decode(d, b, p)
+ }
+
+ if len(b) > 0 && b[0] != '"' {
+ v, r, err := parseNumber(b)
+ if err == nil {
+ // The encoding/json package will return a *json.UnmarshalTypeError if
+ // the input was a floating point number representation, even tho a
+ // string is expected here.
+ isFloat := true
+ switch {
+ case bytes.IndexByte(v, '.') >= 0:
+ case bytes.IndexByte(v, 'e') >= 0:
+ case bytes.IndexByte(v, 'E') >= 0:
+ default:
+ isFloat = false
+ }
+ if isFloat {
+ _, err := strconv.ParseFloat(*(*string)(unsafe.Pointer(&v)), 64)
+ if err != nil {
+ return r, unmarshalTypeError(v, t)
+ }
+ }
+ }
+ return r, fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal unquoted value into int")
+ }
+
+ if len(b) > 1 && b[0] == '"' && b[1] == '"' {
+ return b, fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal \"\" into int")
+ }
+
+ v, b, _, err := parseStringUnquote(b, nil)
+ if err != nil {
+ return inputError(v, t)
+ }
+
+ if hasLeadingZeroes(v) {
+ // In this context the encoding/json package accepts leading zeroes because
+ // it is not constrained by the JSON syntax, remove them so the parsing
+ // functions don't return syntax errors.
+ u := make([]byte, 0, len(v))
+ i := 0
+
+ if i < len(v) && v[i] == '-' || v[i] == '+' {
+ u = append(u, v[i])
+ i++
+ }
+
+ for (i+1) < len(v) && v[i] == '0' && '0' <= v[i+1] && v[i+1] <= '9' {
+ i++
+ }
+
+ v = append(u, v[i:]...)
+ }
+
+ if r, err := decode(d, v, p); err != nil {
+ if _, isSyntaxError := err.(*SyntaxError); isSyntaxError {
+ if hasPrefix(v, "-") {
+ // The standard library interprets sequences of '-' characters
+ // as numbers but still returns type errors in this case...
+ return b, unmarshalTypeError(v, t)
+ }
+ return b, fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into int", prefix(v))
+ }
+ // When the input value was a valid number representation we retain the
+ // error returned by the decoder.
+ if _, _, err := parseNumber(v); err != nil {
+ // When the input value valid JSON we mirror the behavior of the
+ // encoding/json package and return a generic error.
+ if _, _, err := parseValue(v); err == nil {
+ return b, fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into int", prefix(v))
+ }
+ }
+ return b, err
+ } else if len(r) != 0 {
+ return r, unmarshalTypeError(v, t)
+ }
+
+ return b, nil
+}
+
+func (d decoder) decodeBytes(b []byte, p unsafe.Pointer) ([]byte, error) {
+ if hasNullPrefix(b) {
+ *(*[]byte)(p) = nil
+ return b[4:], nil
+ }
+
+ if len(b) < 2 {
+ return inputError(b, bytesType)
+ }
+
+ if b[0] != '"' {
+ // Go 1.7- behavior: bytes slices may be decoded from array of integers.
+ if len(b) > 0 && b[0] == '[' {
+ return d.decodeSlice(b, p, 1, bytesType, decoder.decodeUint8)
+ }
+ return inputError(b, bytesType)
+ }
+
+ // The input string contains escaped sequences, we need to parse it before
+ // decoding it to match the encoding/json package behvaior.
+ src, r, _, err := parseStringUnquote(b, nil)
+ if err != nil {
+ return inputError(b, bytesType)
+ }
+
+ dst := make([]byte, base64.StdEncoding.DecodedLen(len(src)))
+
+ n, err := base64.StdEncoding.Decode(dst, src)
+ if err != nil {
+ return r, err
+ }
+
+ *(*[]byte)(p) = dst[:n]
+ return r, nil
+}
+
+func (d decoder) decodeDuration(b []byte, p unsafe.Pointer) ([]byte, error) {
+ if hasNullPrefix(b) {
+ return b[4:], nil
+ }
+
+ // in order to inter-operate with the stdlib, we must be able to interpret
+ // durations passed as integer values. there's some discussion about being
+ // flexible on how durations are formatted, but for the time being, it's
+ // been punted to go2 at the earliest: https://github.com/golang/go/issues/4712
+ if len(b) > 0 && b[0] != '"' {
+ v, r, err := parseInt(b, durationType)
+ if err != nil {
+ return inputError(b, int32Type)
+ }
+
+ if v < math.MinInt64 || v > math.MaxInt64 {
+ return r, unmarshalOverflow(b[:len(b)-len(r)], int32Type)
+ }
+
+ *(*time.Duration)(p) = time.Duration(v)
+ return r, nil
+ }
+
+ if len(b) < 2 || b[0] != '"' {
+ return inputError(b, durationType)
+ }
+
+ i := bytes.IndexByte(b[1:], '"') + 1
+ if i <= 0 {
+ return inputError(b, durationType)
+ }
+
+ s := b[1:i] // trim quotes
+
+ v, err := time.ParseDuration(*(*string)(unsafe.Pointer(&s)))
+ if err != nil {
+ return inputError(b, durationType)
+ }
+
+ *(*time.Duration)(p) = v
+ return b[i+1:], nil
+}
+
+func (d decoder) decodeTime(b []byte, p unsafe.Pointer) ([]byte, error) {
+ if hasNullPrefix(b) {
+ return b[4:], nil
+ }
+
+ if len(b) < 2 || b[0] != '"' {
+ return inputError(b, timeType)
+ }
+
+ i := bytes.IndexByte(b[1:], '"') + 1
+ if i <= 0 {
+ return inputError(b, timeType)
+ }
+
+ s := b[1:i] // trim quotes
+
+ v, err := time.Parse(time.RFC3339Nano, *(*string)(unsafe.Pointer(&s)))
+ if err != nil {
+ return inputError(b, timeType)
+ }
+
+ *(*time.Time)(p) = v
+ return b[i+1:], nil
+}
+
+func (d decoder) decodeArray(b []byte, p unsafe.Pointer, n int, size uintptr, t reflect.Type, decode decodeFunc) ([]byte, error) {
+ if hasNullPrefix(b) {
+ return b[4:], nil
+ }
+
+ if len(b) < 2 || b[0] != '[' {
+ return inputError(b, t)
+ }
+ b = b[1:]
+
+ var err error
+ for i := 0; i < n; i++ {
+ b = skipSpaces(b)
+
+ if i != 0 {
+ if len(b) == 0 {
+ return b, syntaxError(b, "unexpected EOF after array element")
+ }
+ switch b[0] {
+ case ',':
+ b = skipSpaces(b[1:])
+ case ']':
+ return b[1:], nil
+ default:
+ return b, syntaxError(b, "expected ',' after array element but found '%c'", b[0])
+ }
+ }
+
+ b, err = decode(d, b, unsafe.Pointer(uintptr(p)+(uintptr(i)*size)))
+ if err != nil {
+ if e, ok := err.(*UnmarshalTypeError); ok {
+ e.Struct = t.String() + e.Struct
+ e.Field = strconv.Itoa(i) + "." + e.Field
+ }
+ return b, err
+ }
+ }
+
+ // The encoding/json package ignores extra elements found when decoding into
+ // array types (which have a fixed size).
+ for {
+ b = skipSpaces(b)
+
+ if len(b) == 0 {
+ return b, syntaxError(b, "missing closing ']' in array value")
+ }
+
+ switch b[0] {
+ case ',':
+ b = skipSpaces(b[1:])
+ case ']':
+ return b[1:], nil
+ }
+
+ _, b, err = parseValue(b)
+ if err != nil {
+ return b, err
+ }
+ }
+}
+
+var (
+ // This is a placeholder used to consturct non-nil empty slices.
+ empty struct{}
+)
+
+func (d decoder) decodeSlice(b []byte, p unsafe.Pointer, size uintptr, t reflect.Type, decode decodeFunc) ([]byte, error) {
+ if hasNullPrefix(b) {
+ *(*slice)(p) = slice{}
+ return b[4:], nil
+ }
+
+ if len(b) < 2 {
+ return inputError(b, t)
+ }
+
+ if b[0] != '[' {
+ // Go 1.7- behavior: fallback to decoding as a []byte if the element
+ // type is byte; allow conversions from JSON strings even tho the
+ // underlying type implemented unmarshaler interfaces.
+ if t.Elem().Kind() == reflect.Uint8 {
+ return d.decodeBytes(b, p)
+ }
+ return inputError(b, t)
+ }
+
+ input := b
+ b = b[1:]
+
+ s := (*slice)(p)
+ s.len = 0
+
+ var err error
+ for {
+ b = skipSpaces(b)
+
+ if len(b) != 0 && b[0] == ']' {
+ if s.data == nil {
+ s.data = unsafe.Pointer(&empty)
+ }
+ return b[1:], nil
+ }
+
+ if s.len != 0 {
+ if len(b) == 0 {
+ return b, syntaxError(b, "unexpected EOF after array element")
+ }
+ if b[0] != ',' {
+ return b, syntaxError(b, "expected ',' after array element but found '%c'", b[0])
+ }
+ b = skipSpaces(b[1:])
+ }
+
+ if s.len == s.cap {
+ c := s.cap
+
+ if c == 0 {
+ c = 10
+ } else {
+ c *= 2
+ }
+
+ *s = extendSlice(t, s, c)
+ }
+
+ b, err = decode(d, b, unsafe.Pointer(uintptr(s.data)+(uintptr(s.len)*size)))
+ if err != nil {
+ if _, r, err := parseValue(input); err != nil {
+ return r, err
+ } else {
+ b = r
+ }
+ if e, ok := err.(*UnmarshalTypeError); ok {
+ e.Struct = t.String() + e.Struct
+ e.Field = strconv.Itoa(s.len) + "." + e.Field
+ }
+ return b, err
+ }
+
+ s.len++
+ }
+}
+
+func (d decoder) decodeMap(b []byte, p unsafe.Pointer, t, kt, vt reflect.Type, kz, vz reflect.Value, decodeKey, decodeValue decodeFunc) ([]byte, error) {
+ if hasNullPrefix(b) {
+ *(*unsafe.Pointer)(p) = nil
+ return b[4:], nil
+ }
+
+ if len(b) < 2 || b[0] != '{' {
+ return inputError(b, t)
+ }
+ i := 0
+ m := reflect.NewAt(t, p).Elem()
+
+ k := reflect.New(kt).Elem()
+ v := reflect.New(vt).Elem()
+
+ kptr := (*iface)(unsafe.Pointer(&k)).ptr
+ vptr := (*iface)(unsafe.Pointer(&v)).ptr
+ input := b
+
+ if m.IsNil() {
+ m = reflect.MakeMap(t)
+ }
+
+ var err error
+ b = b[1:]
+ for {
+ k.Set(kz)
+ v.Set(vz)
+ b = skipSpaces(b)
+
+ if len(b) != 0 && b[0] == '}' {
+ *(*unsafe.Pointer)(p) = unsafe.Pointer(m.Pointer())
+ return b[1:], nil
+ }
+
+ if i != 0 {
+ if len(b) == 0 {
+ return b, syntaxError(b, "unexpected end of JONS input after object field value")
+ }
+ if b[0] != ',' {
+ return b, syntaxError(b, "expected ',' after object field value but found '%c'", b[0])
+ }
+ b = skipSpaces(b[1:])
+ }
+
+ if hasPrefix(b, "null") {
+ return b, syntaxError(b, "cannot decode object key string from 'null' value")
+ }
+
+ if b, err = decodeKey(d, b, kptr); err != nil {
+ return objectKeyError(b, err)
+ }
+ b = skipSpaces(b)
+
+ if len(b) == 0 {
+ return b, syntaxError(b, "unexpected end of JSON input after object field key")
+ }
+ if b[0] != ':' {
+ return b, syntaxError(b, "expected ':' after object field key but found '%c'", b[0])
+ }
+ b = skipSpaces(b[1:])
+
+ if b, err = decodeValue(d, b, vptr); err != nil {
+ if _, r, err := parseValue(input); err != nil {
+ return r, err
+ } else {
+ b = r
+ }
+ if e, ok := err.(*UnmarshalTypeError); ok {
+ e.Struct = "map[" + kt.String() + "]" + vt.String() + "{" + e.Struct + "}"
+ e.Field = fmt.Sprint(k.Interface()) + "." + e.Field
+ }
+ return b, err
+ }
+
+ m.SetMapIndex(k, v)
+ i++
+ }
+}
+
+func (d decoder) decodeMapStringInterface(b []byte, p unsafe.Pointer) ([]byte, error) {
+ if hasNullPrefix(b) {
+ *(*unsafe.Pointer)(p) = nil
+ return b[4:], nil
+ }
+
+ if len(b) < 2 || b[0] != '{' {
+ return inputError(b, mapStringInterfaceType)
+ }
+
+ i := 0
+ m := *(*map[string]interface{})(p)
+
+ if m == nil {
+ m = make(map[string]interface{}, 64)
+ }
+
+ var err error
+ var key string
+ var val interface{}
+ var input = b
+
+ b = b[1:]
+ for {
+ key = ""
+ val = nil
+
+ b = skipSpaces(b)
+
+ if len(b) != 0 && b[0] == '}' {
+ *(*unsafe.Pointer)(p) = *(*unsafe.Pointer)(unsafe.Pointer(&m))
+ return b[1:], nil
+ }
+
+ if i != 0 {
+ if len(b) == 0 {
+ return b, syntaxError(b, "unexpected end of JSON input after object field value")
+ }
+ if b[0] != ',' {
+ return b, syntaxError(b, "expected ',' after object field value but found '%c'", b[0])
+ }
+ b = skipSpaces(b[1:])
+ }
+
+ if hasPrefix(b, "null") {
+ return b, syntaxError(b, "cannot decode object key string from 'null' value")
+ }
+
+ b, err = d.decodeString(b, unsafe.Pointer(&key))
+ if err != nil {
+ return objectKeyError(b, err)
+ }
+ b = skipSpaces(b)
+
+ if len(b) == 0 {
+ return b, syntaxError(b, "unexpected end of JSON input after object field key")
+ }
+ if b[0] != ':' {
+ return b, syntaxError(b, "expected ':' after object field key but found '%c'", b[0])
+ }
+ b = skipSpaces(b[1:])
+
+ b, err = d.decodeInterface(b, unsafe.Pointer(&val))
+ if err != nil {
+ if _, r, err := parseValue(input); err != nil {
+ return r, err
+ } else {
+ b = r
+ }
+ if e, ok := err.(*UnmarshalTypeError); ok {
+ e.Struct = mapStringInterfaceType.String() + e.Struct
+ e.Field = key + "." + e.Field
+ }
+ return b, err
+ }
+
+ m[key] = val
+ i++
+ }
+}
+
+func (d decoder) decodeMapStringRawMessage(b []byte, p unsafe.Pointer) ([]byte, error) {
+ if hasNullPrefix(b) {
+ *(*unsafe.Pointer)(p) = nil
+ return b[4:], nil
+ }
+
+ if len(b) < 2 || b[0] != '{' {
+ return inputError(b, mapStringRawMessageType)
+ }
+
+ i := 0
+ m := *(*map[string]RawMessage)(p)
+
+ if m == nil {
+ m = make(map[string]RawMessage, 64)
+ }
+
+ var err error
+ var key string
+ var val RawMessage
+ var input = b
+
+ b = b[1:]
+ for {
+ key = ""
+ val = nil
+
+ b = skipSpaces(b)
+
+ if len(b) != 0 && b[0] == '}' {
+ *(*unsafe.Pointer)(p) = *(*unsafe.Pointer)(unsafe.Pointer(&m))
+ return b[1:], nil
+ }
+
+ if i != 0 {
+ if len(b) == 0 {
+ return b, syntaxError(b, "unexpected end of JSON input after object field value")
+ }
+ if b[0] != ',' {
+ return b, syntaxError(b, "expected ',' after object field value but found '%c'", b[0])
+ }
+ b = skipSpaces(b[1:])
+ }
+
+ if hasPrefix(b, "null") {
+ return b, syntaxError(b, "cannot decode object key string from 'null' value")
+ }
+
+ b, err = d.decodeString(b, unsafe.Pointer(&key))
+ if err != nil {
+ return objectKeyError(b, err)
+ }
+ b = skipSpaces(b)
+
+ if len(b) == 0 {
+ return b, syntaxError(b, "unexpected end of JSON input after object field key")
+ }
+ if b[0] != ':' {
+ return b, syntaxError(b, "expected ':' after object field key but found '%c'", b[0])
+ }
+ b = skipSpaces(b[1:])
+
+ b, err = d.decodeRawMessage(b, unsafe.Pointer(&val))
+ if err != nil {
+ if _, r, err := parseValue(input); err != nil {
+ return r, err
+ } else {
+ b = r
+ }
+ if e, ok := err.(*UnmarshalTypeError); ok {
+ e.Struct = mapStringRawMessageType.String() + e.Struct
+ e.Field = key + "." + e.Field
+ }
+ return b, err
+ }
+
+ m[key] = val
+ i++
+ }
+}
+
+func (d decoder) decodeStruct(b []byte, p unsafe.Pointer, st *structType) ([]byte, error) {
+ if hasNullPrefix(b) {
+ return b[4:], nil
+ }
+
+ if len(b) < 2 || b[0] != '{' {
+ return inputError(b, st.typ)
+ }
+
+ var err error
+ var k []byte
+ var i int
+
+ // memory buffer used to convert short field names to lowercase
+ var buf [64]byte
+ var key []byte
+ var input = b
+
+ b = b[1:]
+ for {
+ b = skipSpaces(b)
+
+ if len(b) != 0 && b[0] == '}' {
+ return b[1:], nil
+ }
+
+ if i != 0 {
+ if len(b) == 0 {
+ return b, syntaxError(b, "unexpected end of JSON input after object field value")
+ }
+ if b[0] != ',' {
+ return b, syntaxError(b, "expected ',' after object field value but found '%c'", b[0])
+ }
+ b = skipSpaces(b[1:])
+ }
+ i++
+
+ if hasPrefix(b, "null") {
+ return b, syntaxError(b, "cannot decode object key string from 'null' value")
+ }
+
+ k, b, _, err = parseStringUnquote(b, nil)
+ if err != nil {
+ return objectKeyError(b, err)
+ }
+ b = skipSpaces(b)
+
+ if len(b) == 0 {
+ return b, syntaxError(b, "unexpected end of JSON input after object field key")
+ }
+ if b[0] != ':' {
+ return b, syntaxError(b, "expected ':' after object field key but found '%c'", b[0])
+ }
+ b = skipSpaces(b[1:])
+
+ f := st.fieldsIndex[string(k)]
+
+ if f == nil && (d.flags&DontMatchCaseInsensitiveStructFields) == 0 {
+ key = appendToLower(buf[:0], k)
+ f = st.ficaseIndex[string(key)]
+ }
+
+ if f == nil {
+ if (d.flags & DisallowUnknownFields) != 0 {
+ return b, fmt.Errorf("json: unknown field %q", k)
+ }
+ if _, b, err = parseValue(b); err != nil {
+ return b, err
+ }
+ continue
+ }
+
+ if b, err = f.codec.decode(d, b, unsafe.Pointer(uintptr(p)+f.offset)); err != nil {
+ if _, r, err := parseValue(input); err != nil {
+ return r, err
+ } else {
+ b = r
+ }
+ if e, ok := err.(*UnmarshalTypeError); ok {
+ e.Struct = st.typ.String() + e.Struct
+ e.Field = string(k) + "." + e.Field
+ }
+ return b, err
+ }
+ }
+}
+
+func (d decoder) decodeEmbeddedStructPointer(b []byte, p unsafe.Pointer, t reflect.Type, unexported bool, offset uintptr, decode decodeFunc) ([]byte, error) {
+ v := *(*unsafe.Pointer)(p)
+
+ if v == nil {
+ if unexported {
+ return nil, fmt.Errorf("json: cannot set embedded pointer to unexported struct: %s", t)
+ }
+ v = unsafe.Pointer(reflect.New(t).Pointer())
+ *(*unsafe.Pointer)(p) = v
+ }
+
+ return decode(d, b, unsafe.Pointer(uintptr(v)+offset))
+}
+
+func (d decoder) decodePointer(b []byte, p unsafe.Pointer, t reflect.Type, decode decodeFunc) ([]byte, error) {
+ if hasNullPrefix(b) {
+ pp := *(*unsafe.Pointer)(p)
+ if pp != nil && t.Kind() == reflect.Ptr {
+ return decode(d, b, pp)
+ }
+ *(*unsafe.Pointer)(p) = nil
+ return b[4:], nil
+ }
+
+ v := *(*unsafe.Pointer)(p)
+ if v == nil {
+ v = unsafe.Pointer(reflect.New(t).Pointer())
+ *(*unsafe.Pointer)(p) = v
+ }
+
+ return decode(d, b, v)
+}
+
+func (d decoder) decodeInterface(b []byte, p unsafe.Pointer) ([]byte, error) {
+ val := *(*interface{})(p)
+ *(*interface{})(p) = nil
+
+ if t := reflect.TypeOf(val); t != nil && t.Kind() == reflect.Ptr {
+ if v := reflect.ValueOf(val); v.IsNil() || t.Elem().Kind() != reflect.Ptr {
+ // If the destination is nil the only value that is OK to decode is
+ // `null`, and the encoding/json package always nils the destination
+ // interface value in this case.
+ if hasNullPrefix(b) {
+ *(*interface{})(p) = nil
+ return b[4:], nil
+ }
+ }
+
+ b, err := Parse(b, val, d.flags)
+ if err == nil {
+ *(*interface{})(p) = val
+ }
+ return b, err
+ }
+
+ v, b, err := parseValue(b)
+ if err != nil {
+ return b, err
+ }
+
+ switch v[0] {
+ case '{':
+ m := make(map[string]interface{})
+ v, err = d.decodeMapStringInterface(v, unsafe.Pointer(&m))
+ val = m
+
+ case '[':
+ a := make([]interface{}, 0, 10)
+ v, err = d.decodeSlice(v, unsafe.Pointer(&a), unsafe.Sizeof(a[0]), sliceInterfaceType, decoder.decodeInterface)
+ val = a
+
+ case '"':
+ s := ""
+ v, err = d.decodeString(v, unsafe.Pointer(&s))
+ val = s
+
+ case 'n':
+ v, err = d.decodeNull(v, nil)
+ val = nil
+
+ case 't', 'f':
+ x := false
+ v, err = d.decodeBool(v, unsafe.Pointer(&x))
+ val = x
+
+ case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ if (d.flags & UseNumber) != 0 {
+ n := Number("")
+ v, err = d.decodeNumber(v, unsafe.Pointer(&n))
+ val = n
+ } else {
+ f := 0.0
+ v, err = d.decodeFloat64(v, unsafe.Pointer(&f))
+ val = f
+ }
+
+ default:
+ return b, syntaxError(v, "expected token but found '%c'", v[0])
+ }
+
+ if err != nil {
+ return b, err
+ }
+
+ if v = skipSpaces(v); len(v) != 0 {
+ return b, syntaxError(v, "unexpected trailing trailing tokens after json value")
+ }
+
+ *(*interface{})(p) = val
+ return b, nil
+}
+
+func (d decoder) decodeMaybeEmptyInterface(b []byte, p unsafe.Pointer, t reflect.Type) ([]byte, error) {
+ if hasNullPrefix(b) {
+ *(*interface{})(p) = nil
+ return b[4:], nil
+ }
+
+ if x := reflect.NewAt(t, p).Elem(); !x.IsNil() {
+ if e := x.Elem(); e.Kind() == reflect.Ptr {
+ return Parse(b, e.Interface(), d.flags)
+ }
+ } else if t.NumMethod() == 0 { // empty interface
+ return Parse(b, (*interface{})(p), d.flags)
+ }
+
+ return d.decodeUnmarshalTypeError(b, p, t)
+}
+
+func (d decoder) decodeUnmarshalTypeError(b []byte, p unsafe.Pointer, t reflect.Type) ([]byte, error) {
+ v, b, err := parseValue(b)
+ if err != nil {
+ return b, err
+ }
+ return b, &UnmarshalTypeError{
+ Value: string(v),
+ Type: t,
+ }
+}
+
+func (d decoder) decodeRawMessage(b []byte, p unsafe.Pointer) ([]byte, error) {
+ v, r, err := parseValue(b)
+ if err != nil {
+ return inputError(b, rawMessageType)
+ }
+
+ if (d.flags & DontCopyRawMessage) == 0 {
+ v = append(make([]byte, 0, len(v)), v...)
+ }
+
+ *(*RawMessage)(p) = json.RawMessage(v)
+ return r, err
+}
+
+func (d decoder) decodeJSONUnmarshaler(b []byte, p unsafe.Pointer, t reflect.Type, pointer bool) ([]byte, error) {
+ v, b, err := parseValue(b)
+ if err != nil {
+ return b, err
+ }
+
+ if len(v) != 0 && v[0] == 'n' { // null
+ return b, nil
+ }
+
+ u := reflect.NewAt(t, p)
+ if !pointer {
+ u = u.Elem()
+ t = t.Elem()
+ }
+ if u.IsNil() {
+ u.Set(reflect.New(t))
+ }
+ return b, u.Interface().(Unmarshaler).UnmarshalJSON(v)
+}
+
+func (d decoder) decodeTextUnmarshaler(b []byte, p unsafe.Pointer, t reflect.Type, pointer bool) ([]byte, error) {
+ var value string
+
+ v, b, err := parseValue(b)
+ if err != nil {
+ return b, err
+ }
+ if len(v) == 0 {
+ return inputError(v, t)
+ }
+
+ switch v[0] {
+ case 'n':
+ _, _, err := parseNull(v)
+ return b, err
+ case '"':
+ s, _, _, err := parseStringUnquote(v, nil)
+ if err != nil {
+ return b, err
+ }
+ u := reflect.NewAt(t, p)
+ if !pointer {
+ u = u.Elem()
+ t = t.Elem()
+ }
+ if u.IsNil() {
+ u.Set(reflect.New(t))
+ }
+ return b, u.Interface().(encoding.TextUnmarshaler).UnmarshalText(s)
+ case '{':
+ value = "object"
+ case '[':
+ value = "array"
+ case 't':
+ value = "true"
+ case 'f':
+ value = "false"
+ case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ value = "number"
+ }
+
+ return b, &UnmarshalTypeError{Value: value, Type: reflect.PtrTo(t)}
+}
diff --git a/vendor/github.com/neilotoole/jsoncolor/encode.go b/vendor/github.com/neilotoole/jsoncolor/encode.go
new file mode 100644
index 0000000..4259352
--- /dev/null
+++ b/vendor/github.com/neilotoole/jsoncolor/encode.go
@@ -0,0 +1,1054 @@
+package jsoncolor
+
+import (
+ "bytes"
+ "encoding"
+ "encoding/base64"
+ "math"
+ "reflect"
+ "sort"
+ "strconv"
+ "sync"
+ "time"
+ "unicode/utf8"
+ "unsafe"
+)
+
+const hex = "0123456789abcdef"
+
+func (e encoder) encodeNull(b []byte, p unsafe.Pointer) ([]byte, error) {
+ return e.clrs.appendNull(b), nil
+}
+
+func (e encoder) encodeBool(b []byte, p unsafe.Pointer) ([]byte, error) {
+ return e.clrs.appendBool(b, *(*bool)(p)), nil
+}
+
+func (e encoder) encodeInt(b []byte, p unsafe.Pointer) ([]byte, error) {
+ return e.clrs.appendInt64(b, int64(*(*int)(p))), nil
+}
+
+func (e encoder) encodeInt8(b []byte, p unsafe.Pointer) ([]byte, error) {
+ return e.clrs.appendInt64(b, int64(*(*int8)(p))), nil
+}
+
+func (e encoder) encodeInt16(b []byte, p unsafe.Pointer) ([]byte, error) {
+ return e.clrs.appendInt64(b, int64(*(*int16)(p))), nil
+}
+
+func (e encoder) encodeInt32(b []byte, p unsafe.Pointer) ([]byte, error) {
+ return e.clrs.appendInt64(b, int64(*(*int32)(p))), nil
+}
+
+func (e encoder) encodeInt64(b []byte, p unsafe.Pointer) ([]byte, error) {
+ return e.clrs.appendInt64(b, *(*int64)(p)), nil
+}
+
+func (e encoder) encodeUint(b []byte, p unsafe.Pointer) ([]byte, error) {
+ return e.clrs.appendUint64(b, uint64(*(*uint)(p))), nil
+}
+
+func (e encoder) encodeUintptr(b []byte, p unsafe.Pointer) ([]byte, error) {
+ return e.clrs.appendUint64(b, uint64(*(*uintptr)(p))), nil
+}
+
+func (e encoder) encodeUint8(b []byte, p unsafe.Pointer) ([]byte, error) {
+ return e.clrs.appendUint64(b, uint64(*(*uint8)(p))), nil
+}
+
+func (e encoder) encodeUint16(b []byte, p unsafe.Pointer) ([]byte, error) {
+ return e.clrs.appendUint64(b, uint64(*(*uint16)(p))), nil
+}
+
+func (e encoder) encodeUint32(b []byte, p unsafe.Pointer) ([]byte, error) {
+ return e.clrs.appendUint64(b, uint64(*(*uint32)(p))), nil
+}
+
+func (e encoder) encodeUint64(b []byte, p unsafe.Pointer) ([]byte, error) {
+ return e.clrs.appendUint64(b, *(*uint64)(p)), nil
+}
+
+func (e encoder) encodeFloat32(b []byte, p unsafe.Pointer) ([]byte, error) {
+ if e.clrs == nil {
+ return e.encodeFloat(b, float64(*(*float32)(p)), 32)
+ }
+
+ b = append(b, e.clrs.Number...)
+ var err error
+ b, err = e.encodeFloat(b, float64(*(*float32)(p)), 32)
+ b = append(b, ansiReset...)
+ return b, err
+}
+
+func (e encoder) encodeFloat64(b []byte, p unsafe.Pointer) ([]byte, error) {
+ if e.clrs == nil {
+ return e.encodeFloat(b, *(*float64)(p), 64)
+ }
+
+ b = append(b, e.clrs.Number...)
+ var err error
+ b, err = e.encodeFloat(b, *(*float64)(p), 64)
+ b = append(b, ansiReset...)
+ return b, err
+}
+
+func (e encoder) encodeFloat(b []byte, f float64, bits int) ([]byte, error) {
+ switch {
+ case math.IsNaN(f):
+ return b, &UnsupportedValueError{Value: reflect.ValueOf(f), Str: "NaN"}
+ case math.IsInf(f, 0):
+ return b, &UnsupportedValueError{Value: reflect.ValueOf(f), Str: "inf"}
+ }
+
+ // Convert as if by ES6 number to string conversion.
+ // This matches most other JSON generators.
+ // See golang.org/issue/6384 and golang.org/issue/14135.
+ // Like fmt %g, but the exponent cutoffs are different
+ // and exponents themselves are not padded to two digits.
+ abs := math.Abs(f)
+ fmt := byte('f')
+ // Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
+ if abs != 0 {
+ if bits == 64 && (abs < 1e-6 || abs >= 1e21) || bits == 32 && (float32(abs) < 1e-6 || float32(abs) >= 1e21) {
+ fmt = 'e'
+ }
+ }
+
+ b = strconv.AppendFloat(b, f, fmt, -1, int(bits))
+
+ if fmt == 'e' {
+ // clean up e-09 to e-9
+ n := len(b)
+ if n >= 4 && b[n-4] == 'e' && b[n-3] == '-' && b[n-2] == '0' {
+ b[n-2] = b[n-1]
+ b = b[:n-1]
+ }
+ }
+
+ return b, nil
+}
+
+func (e encoder) encodeNumber(b []byte, p unsafe.Pointer) ([]byte, error) {
+ n := *(*Number)(p)
+ if n == "" {
+ n = "0"
+ }
+
+ _, _, err := parseNumber(stringToBytes(string(n)))
+ if err != nil {
+ return b, err
+ }
+
+ if e.clrs == nil {
+ return append(b, n...), nil
+ }
+
+ b = append(b, e.clrs.Number...)
+ b = append(b, n...)
+ b = append(b, ansiReset...)
+ return b, nil
+}
+
+func (e encoder) encodeKey(b []byte, p unsafe.Pointer) ([]byte, error) {
+ if e.clrs == nil {
+ return e.doEncodeString(b, p)
+ }
+
+ b = append(b, e.clrs.Key...)
+ var err error
+ b, err = e.doEncodeString(b, p)
+ b = append(b, ansiReset...)
+ return b, err
+}
+
+func (e encoder) encodeString(b []byte, p unsafe.Pointer) ([]byte, error) {
+ if e.clrs == nil {
+ return e.doEncodeString(b, p)
+ }
+
+ b = append(b, e.clrs.String...)
+ var err error
+ b, err = e.doEncodeString(b, p)
+ b = append(b, ansiReset...)
+ return b, err
+}
+
+func (e encoder) doEncodeString(b []byte, p unsafe.Pointer) ([]byte, error) {
+ s := *(*string)(p)
+ i := 0
+ j := 0
+ escapeHTML := (e.flags & EscapeHTML) != 0
+
+ b = append(b, '"')
+
+ for j < len(s) {
+ c := s[j]
+
+ if c >= 0x20 && c <= 0x7f && c != '\\' && c != '"' && (!escapeHTML || (c != '<' && c != '>' && c != '&')) {
+ // fast path: most of the time, printable ascii characters are used
+ j++
+ continue
+ }
+
+ switch c {
+ case '\\', '"':
+ b = append(b, s[i:j]...)
+ b = append(b, '\\', c)
+ i = j + 1
+ j = j + 1
+ continue
+
+ case '\n':
+ b = append(b, s[i:j]...)
+ b = append(b, '\\', 'n')
+ i = j + 1
+ j = j + 1
+ continue
+
+ case '\r':
+ b = append(b, s[i:j]...)
+ b = append(b, '\\', 'r')
+ i = j + 1
+ j = j + 1
+ continue
+
+ case '\t':
+ b = append(b, s[i:j]...)
+ b = append(b, '\\', 't')
+ i = j + 1
+ j = j + 1
+ continue
+
+ case '<', '>', '&':
+ b = append(b, s[i:j]...)
+ b = append(b, `\u00`...)
+ b = append(b, hex[c>>4], hex[c&0xF])
+ i = j + 1
+ j = j + 1
+ continue
+ }
+
+ // This encodes bytes < 0x20 except for \t, \n and \r.
+ if c < 0x20 {
+ b = append(b, s[i:j]...)
+ b = append(b, `\u00`...)
+ b = append(b, hex[c>>4], hex[c&0xF])
+ i = j + 1
+ j = j + 1
+ continue
+ }
+
+ r, size := utf8.DecodeRuneInString(s[j:])
+
+ if r == utf8.RuneError && size == 1 {
+ b = append(b, s[i:j]...)
+ b = append(b, `\ufffd`...)
+ i = j + size
+ j = j + size
+ continue
+ }
+
+ switch r {
+ case '\u2028', '\u2029':
+ // U+2028 is LINE SEPARATOR.
+ // U+2029 is PARAGRAPH SEPARATOR.
+ // They are both technically valid characters in JSON strings,
+ // but don't work in JSONP, which has to be evaluated as JavaScript,
+ // and can lead to security holes there. It is valid JSON to
+ // escape them, so we do so unconditionally.
+ // See http://timelessrepo.com/json-isnt-a-javascript-subset for discussion.
+ b = append(b, s[i:j]...)
+ b = append(b, `\u202`...)
+ b = append(b, hex[r&0xF])
+ i = j + size
+ j = j + size
+ continue
+ }
+
+ j += size
+ }
+
+ b = append(b, s[i:]...)
+ b = append(b, '"')
+ return b, nil
+}
+
+func (e encoder) encodeToString(b []byte, p unsafe.Pointer, encode encodeFunc) ([]byte, error) {
+ i := len(b)
+
+ b, err := encode(e, b, p)
+ if err != nil {
+ return b, err
+ }
+
+ j := len(b)
+ s := b[i:]
+
+ if b, err = e.doEncodeString(b, unsafe.Pointer(&s)); err != nil {
+ return b, err
+ }
+
+ n := copy(b[i:], b[j:])
+ return b[:i+n], nil
+}
+
+func (e encoder) encodeBytes(b []byte, p unsafe.Pointer) ([]byte, error) {
+ if e.clrs == nil {
+ return e.doEncodeBytes(b, p)
+ }
+
+ b = append(b, e.clrs.Bytes...)
+ var err error
+ b, err = e.doEncodeBytes(b, p)
+ return append(b, ansiReset...), err
+}
+
+func (e encoder) doEncodeBytes(b []byte, p unsafe.Pointer) ([]byte, error) {
+ v := *(*[]byte)(p)
+ if v == nil {
+ return e.clrs.appendNull(b), nil
+ }
+
+ n := base64.StdEncoding.EncodedLen(len(v)) + 2
+
+ if avail := cap(b) - len(b); avail < n {
+ newB := make([]byte, cap(b)+(n-avail))
+ copy(newB, b)
+ b = newB[:len(b)]
+ }
+
+ i := len(b)
+ j := len(b) + n
+
+ b = b[:j]
+ b[i] = '"'
+ base64.StdEncoding.Encode(b[i+1:j-1], v)
+ b[j-1] = '"'
+ return b, nil
+}
+
+func (e encoder) encodeDuration(b []byte, p unsafe.Pointer) ([]byte, error) {
+ // NOTE: The segmentj encoder does special handling for time.Duration (converts to string).
+ // The stdlib encoder does not. It just outputs the int64 value.
+ // We choose to follow the stdlib pattern, for fuller compatibility.
+
+ b = e.clrs.appendInt64(b, int64(*(*time.Duration)(p)))
+ return b, nil
+
+ // NOTE: if we were to follow the segmentj pattern, we'd execute the code below.
+ //if e.clrs == nil {
+ // b = append(b, '"')
+ //
+ // b = appendDuration(b, *(*time.Duration)(p))
+ // b = append(b, '"')
+ // return b, nil
+ //}
+ //
+ //b = append(b, e.clrs.Time...)
+ //b = append(b, '"')
+ //b = appendDuration(b, *(*time.Duration)(p))
+ //b = append(b, '"')
+ //b = append(b, ansiReset...)
+ //return b, nil
+}
+
+func (e encoder) encodeTime(b []byte, p unsafe.Pointer) ([]byte, error) {
+ if e.clrs == nil {
+ t := *(*time.Time)(p)
+ b = append(b, '"')
+ b = t.AppendFormat(b, time.RFC3339Nano)
+ b = append(b, '"')
+ return b, nil
+ }
+
+ t := *(*time.Time)(p)
+ b = append(b, e.clrs.Time...)
+ b = append(b, '"')
+ b = t.AppendFormat(b, time.RFC3339Nano)
+ b = append(b, '"')
+ b = append(b, ansiReset...)
+ return b, nil
+}
+
+func (e encoder) encodeArray(b []byte, p unsafe.Pointer, n int, size uintptr, t reflect.Type, encode encodeFunc) ([]byte, error) {
+ var start = len(b)
+ var err error
+
+ b = e.clrs.appendPunc(b, '[')
+
+ if n > 0 {
+ e.indentr.push()
+ for i := 0; i < n; i++ {
+ if i != 0 {
+ b = e.clrs.appendPunc(b, ',')
+ }
+
+ b = e.indentr.appendByte(b, '\n')
+ b = e.indentr.appendIndent(b)
+
+ if b, err = encode(e, b, unsafe.Pointer(uintptr(p)+(uintptr(i)*size))); err != nil {
+ return b[:start], err
+ }
+ }
+ e.indentr.pop()
+ b = e.indentr.appendByte(b, '\n')
+ b = e.indentr.appendIndent(b)
+ }
+
+ b = e.clrs.appendPunc(b, ']')
+
+ return b, nil
+}
+
+func (e encoder) encodeSlice(b []byte, p unsafe.Pointer, size uintptr, t reflect.Type, encode encodeFunc) ([]byte, error) {
+ s := (*slice)(p)
+
+ if s.data == nil && s.len == 0 && s.cap == 0 {
+ return e.clrs.appendNull(b), nil
+ }
+
+ return e.encodeArray(b, s.data, s.len, size, t, encode)
+}
+
+func (e encoder) encodeMap(b []byte, p unsafe.Pointer, t reflect.Type, encodeKey, encodeValue encodeFunc, sortKeys sortFunc) ([]byte, error) {
+ m := reflect.NewAt(t, p).Elem()
+ if m.IsNil() {
+ return e.clrs.appendNull(b), nil
+ }
+
+ keys := m.MapKeys()
+ if sortKeys != nil && (e.flags&SortMapKeys) != 0 {
+ sortKeys(keys)
+ }
+
+ var start = len(b)
+ var err error
+ b = e.clrs.appendPunc(b, '{')
+
+ if len(keys) != 0 {
+ b = e.indentr.appendByte(b, '\n')
+
+ e.indentr.push()
+ for i, k := range keys {
+ v := m.MapIndex(k)
+
+ if i != 0 {
+ b = e.clrs.appendPunc(b, ',')
+ b = e.indentr.appendByte(b, '\n')
+ }
+
+ b = e.indentr.appendIndent(b)
+ if b, err = encodeKey(e, b, (*iface)(unsafe.Pointer(&k)).ptr); err != nil {
+ return b[:start], err
+ }
+
+ b = e.clrs.appendPunc(b, ':')
+ b = e.indentr.appendByte(b, ' ')
+
+ if b, err = encodeValue(e, b, (*iface)(unsafe.Pointer(&v)).ptr); err != nil {
+ return b[:start], err
+ }
+ }
+ b = e.indentr.appendByte(b, '\n')
+ e.indentr.pop()
+ b = e.indentr.appendIndent(b)
+ }
+
+ b = e.clrs.appendPunc(b, '}')
+ return b, nil
+}
+
+type element struct {
+ key string
+ val interface{}
+ raw RawMessage
+}
+
+type mapslice struct {
+ elements []element
+}
+
+func (m *mapslice) Len() int { return len(m.elements) }
+func (m *mapslice) Less(i, j int) bool { return m.elements[i].key < m.elements[j].key }
+func (m *mapslice) Swap(i, j int) { m.elements[i], m.elements[j] = m.elements[j], m.elements[i] }
+
+var mapslicePool = sync.Pool{
+ New: func() interface{} { return new(mapslice) },
+}
+
+func (e encoder) encodeMapStringInterface(b []byte, p unsafe.Pointer) ([]byte, error) {
+ m := *(*map[string]interface{})(p)
+ if m == nil {
+ return e.clrs.appendNull(b), nil
+ }
+
+ if (e.flags & SortMapKeys) == 0 {
+ // Optimized code path when the program does not need the map keys to be
+ // sorted.
+ b = e.clrs.appendPunc(b, '{')
+
+ if len(m) != 0 {
+ b = e.indentr.appendByte(b, '\n')
+
+ var err error
+ var i = 0
+
+ e.indentr.push()
+ for k, v := range m {
+ if i != 0 {
+ b = e.clrs.appendPunc(b, ',')
+ b = e.indentr.appendByte(b, '\n')
+ }
+
+ b = e.indentr.appendIndent(b)
+
+ b, err = e.encodeKey(b, unsafe.Pointer(&k))
+ if err != nil {
+ return b, err
+ }
+
+ b = e.clrs.appendPunc(b, ':')
+ b = e.indentr.appendByte(b, ' ')
+
+ b, err = Append(b, v, e.flags, e.clrs, e.indentr)
+ if err != nil {
+ return b, err
+ }
+
+ i++
+ }
+ b = e.indentr.appendByte(b, '\n')
+ e.indentr.pop()
+ b = e.indentr.appendIndent(b)
+ }
+
+ b = e.clrs.appendPunc(b, '}')
+ return b, nil
+ }
+
+ s := mapslicePool.Get().(*mapslice)
+ if cap(s.elements) < len(m) {
+ s.elements = make([]element, 0, align(10, uintptr(len(m))))
+ }
+ for key, val := range m {
+ s.elements = append(s.elements, element{key: key, val: val})
+ }
+ sort.Sort(s)
+
+ var start = len(b)
+ var err error
+ b = e.clrs.appendPunc(b, '{')
+
+ if len(s.elements) > 0 {
+ b = e.indentr.appendByte(b, '\n')
+
+ e.indentr.push()
+ for i, elem := range s.elements {
+ if i != 0 {
+ b = e.clrs.appendPunc(b, ',')
+ b = e.indentr.appendByte(b, '\n')
+ }
+
+ b = e.indentr.appendIndent(b)
+
+ b, _ = e.encodeKey(b, unsafe.Pointer(&elem.key))
+ b = e.clrs.appendPunc(b, ':')
+ b = e.indentr.appendByte(b, ' ')
+
+ b, err = Append(b, elem.val, e.flags, e.clrs, e.indentr)
+ if err != nil {
+ break
+ }
+ }
+ b = e.indentr.appendByte(b, '\n')
+ e.indentr.pop()
+ b = e.indentr.appendIndent(b)
+ }
+
+ for i := range s.elements {
+ s.elements[i] = element{}
+ }
+
+ s.elements = s.elements[:0]
+ mapslicePool.Put(s)
+
+ if err != nil {
+ return b[:start], err
+ }
+
+ b = e.clrs.appendPunc(b, '}')
+ return b, nil
+}
+
+func (e encoder) encodeMapStringRawMessage(b []byte, p unsafe.Pointer) ([]byte, error) {
+ m := *(*map[string]RawMessage)(p)
+ if m == nil {
+ return e.clrs.appendNull(b), nil
+ }
+
+ if (e.flags & SortMapKeys) == 0 {
+ // Optimized code path when the program does not need the map keys to be
+ // sorted.
+ b = e.clrs.appendPunc(b, '{')
+
+ if len(m) != 0 {
+ b = e.indentr.appendByte(b, '\n')
+
+ var err error
+ var i = 0
+
+ e.indentr.push()
+ for k, v := range m {
+ if i != 0 {
+ b = e.clrs.appendPunc(b, ',')
+ b = e.indentr.appendByte(b, '\n')
+ }
+
+ b = e.indentr.appendIndent(b)
+
+ b, _ = e.encodeKey(b, unsafe.Pointer(&k))
+
+ b = e.clrs.appendPunc(b, ':')
+ b = e.indentr.appendByte(b, ' ')
+
+ b, err = e.encodeRawMessage(b, unsafe.Pointer(&v))
+ if err != nil {
+ break
+ }
+
+ i++
+ }
+ b = e.indentr.appendByte(b, '\n')
+ e.indentr.pop()
+ b = e.indentr.appendIndent(b)
+ }
+
+ b = e.clrs.appendPunc(b, '}')
+ return b, nil
+ }
+
+ s := mapslicePool.Get().(*mapslice)
+ if cap(s.elements) < len(m) {
+ s.elements = make([]element, 0, align(10, uintptr(len(m))))
+ }
+ for key, raw := range m {
+ s.elements = append(s.elements, element{key: key, raw: raw})
+ }
+ sort.Sort(s)
+
+ var start = len(b)
+ var err error
+ b = e.clrs.appendPunc(b, '{')
+
+ if len(s.elements) > 0 {
+ b = e.indentr.appendByte(b, '\n')
+
+ e.indentr.push()
+
+ for i, elem := range s.elements {
+ if i != 0 {
+ b = e.clrs.appendPunc(b, ',')
+ b = e.indentr.appendByte(b, '\n')
+ }
+
+ b = e.indentr.appendIndent(b)
+
+ b, _ = e.encodeKey(b, unsafe.Pointer(&elem.key))
+ b = e.clrs.appendPunc(b, ':')
+ b = e.indentr.appendByte(b, ' ')
+
+ b, err = e.encodeRawMessage(b, unsafe.Pointer(&elem.raw))
+ if err != nil {
+ break
+ }
+ }
+ b = e.indentr.appendByte(b, '\n')
+ e.indentr.pop()
+ b = e.indentr.appendIndent(b)
+ }
+
+ for i := range s.elements {
+ s.elements[i] = element{}
+ }
+
+ s.elements = s.elements[:0]
+ mapslicePool.Put(s)
+
+ if err != nil {
+ return b[:start], err
+ }
+
+ b = e.clrs.appendPunc(b, '}')
+ return b, nil
+}
+
+func (e encoder) encodeStruct(b []byte, p unsafe.Pointer, st *structType) ([]byte, error) {
+ var start = len(b)
+ var err error
+ var k string
+ var n int
+
+ b = e.clrs.appendPunc(b, '{')
+
+ if len(st.fields) > 0 {
+ b = e.indentr.appendByte(b, '\n')
+ }
+
+ e.indentr.push()
+
+ for i := range st.fields {
+ f := &st.fields[i]
+ v := unsafe.Pointer(uintptr(p) + f.offset)
+
+ if f.omitempty && f.empty(v) {
+ continue
+ }
+
+ if n != 0 {
+ b = e.clrs.appendPunc(b, ',')
+ b = e.indentr.appendByte(b, '\n')
+ }
+
+ if (e.flags & EscapeHTML) != 0 {
+ k = f.html
+ } else {
+ k = f.json
+ }
+
+ lengthBeforeKey := len(b)
+ b = e.indentr.appendIndent(b)
+
+ if e.clrs == nil {
+ b = append(b, k...)
+ } else {
+ b = append(b, e.clrs.Key...)
+ b = append(b, k...)
+ b = append(b, ansiReset...)
+ }
+
+ b = e.clrs.appendPunc(b, ':')
+
+ b = e.indentr.appendByte(b, ' ')
+
+ if b, err = f.codec.encode(e, b, v); err != nil {
+ if err == (rollback{}) {
+ b = b[:lengthBeforeKey]
+ continue
+ }
+ return b[:start], err
+ }
+
+ n++
+ }
+
+ if n > 0 {
+ b = e.indentr.appendByte(b, '\n')
+ }
+
+ e.indentr.pop()
+ b = e.indentr.appendIndent(b)
+
+ b = e.clrs.appendPunc(b, '}')
+ return b, nil
+}
+
+type rollback struct{}
+
+func (rollback) Error() string { return "rollback" }
+
+func (e encoder) encodeEmbeddedStructPointer(b []byte, p unsafe.Pointer, t reflect.Type, unexported bool, offset uintptr, encode encodeFunc) ([]byte, error) {
+ p = *(*unsafe.Pointer)(p)
+ if p == nil {
+ return b, rollback{}
+ }
+ return encode(e, b, unsafe.Pointer(uintptr(p)+offset))
+}
+
+func (e encoder) encodePointer(b []byte, p unsafe.Pointer, t reflect.Type, encode encodeFunc) ([]byte, error) {
+ if p = *(*unsafe.Pointer)(p); p != nil {
+ return encode(e, b, p)
+ }
+ return e.encodeNull(b, nil)
+}
+
+func (e encoder) encodeInterface(b []byte, p unsafe.Pointer) ([]byte, error) {
+ return Append(b, *(*interface{})(p), e.flags, e.clrs, e.indentr)
+}
+
+func (e encoder) encodeMaybeEmptyInterface(b []byte, p unsafe.Pointer, t reflect.Type) ([]byte, error) {
+ return Append(b, reflect.NewAt(t, p).Elem().Interface(), e.flags, e.clrs, e.indentr)
+}
+
+func (e encoder) encodeUnsupportedTypeError(b []byte, p unsafe.Pointer, t reflect.Type) ([]byte, error) {
+ return b, &UnsupportedTypeError{Type: t}
+}
+
+// encodeRawMessage encodes a RawMessage to bytes. Unfortunately, this
+// implementation has a deficiency: it uses Unmarshal to build an
+// object from the RawMessage, which in the case of a struct, results
+// in a map being constructed, and thus the order of the keys is not
+// guaranteed to be maintained. A superior implementation would decode and
+// then re-encode (with color/indentation) the basic JSON tokens on the fly.
+// Note also that if TrustRawMessage is set, and the RawMessage is
+// invalid JSON (cannot be parsed by Unmarshal), then this function
+// falls back to encodeRawMessageNoParseTrusted, which seems to exhibit the
+// correct behavior. It's a bit of a mess, but seems to do the trick.
+func (e encoder) encodeRawMessage(b []byte, p unsafe.Pointer) ([]byte, error) {
+ v := *(*RawMessage)(p)
+
+ if v == nil {
+ return e.clrs.appendNull(b), nil
+ }
+
+ var s []byte
+
+ if (e.flags & TrustRawMessage) != 0 {
+ s = v
+ } else {
+ var err error
+ s, _, err = parseValue(v)
+ if err != nil {
+ return b, &UnsupportedValueError{Value: reflect.ValueOf(v), Str: err.Error()}
+ }
+ }
+
+ var x interface{}
+ if err := Unmarshal(s, &x); err != nil {
+ return e.encodeRawMessageNoParseTrusted(b, p)
+ }
+
+ return Append(b, x, e.flags, e.clrs, e.indentr)
+}
+
+// encodeRawMessageNoParseTrusted is a fallback method that is
+// used by encodeRawMessage if it fails to parse a trusted RawMessage.
+// The (invalid) JSON produced by this method is not colorized.
+// This method may have wonky logic or even bugs in it; little effort
+// has been expended on it because it's a rarely visited edge case.
+func (e encoder) encodeRawMessageNoParseTrusted(b []byte, p unsafe.Pointer) ([]byte, error) {
+ v := *(*RawMessage)(p)
+
+ if v == nil {
+ return e.clrs.appendNull(b), nil
+ }
+
+ var s []byte
+
+ if (e.flags & TrustRawMessage) != 0 {
+ s = v
+ } else {
+ var err error
+ s, _, err = parseValue(v)
+ if err != nil {
+ return b, &UnsupportedValueError{Value: reflect.ValueOf(v), Str: err.Error()}
+ }
+ }
+
+ if e.indentr == nil {
+ if (e.flags & EscapeHTML) != 0 {
+ return appendCompactEscapeHTML(b, s), nil
+ }
+
+ return append(b, s...), nil
+ }
+
+ // In order to get the tests inherited from the original segmentio
+ // encoder to work, we need to support indentation.
+
+ // This below is sloppy, but seems to work.
+ if (e.flags & EscapeHTML) != 0 {
+ s = appendCompactEscapeHTML(nil, s)
+ }
+
+ // The "prefix" arg to Indent is the current indentation.
+ pre := e.indentr.appendIndent(nil)
+
+ buf := &bytes.Buffer{}
+ // And now we just make use of the existing Indent function.
+ err := Indent(buf, s, string(pre), e.indentr.indent)
+ if err != nil {
+ return b, err
+ }
+
+ s = buf.Bytes()
+
+ return append(b, s...), nil
+}
+
+
+// encodeJSONMarshaler suffers from the same defect as encodeRawMessage; it
+// can result in keys being reordered.
+func (e encoder) encodeJSONMarshaler(b []byte, p unsafe.Pointer, t reflect.Type, pointer bool) ([]byte, error) {
+ v := reflect.NewAt(t, p)
+
+ if !pointer {
+ v = v.Elem()
+ }
+
+ switch v.Kind() {
+ case reflect.Ptr, reflect.Interface:
+ if v.IsNil() {
+ return e.clrs.appendNull(b), nil
+ }
+ }
+
+ j, err := v.Interface().(Marshaler).MarshalJSON()
+ if err != nil {
+ return b, err
+ }
+
+ // We effectively delegate to the encodeRawMessage method.
+ return Append(b, RawMessage(j), e.flags, e.clrs, e.indentr)
+}
+
+func (e encoder) encodeTextMarshaler(b []byte, p unsafe.Pointer, t reflect.Type, pointer bool) ([]byte, error) {
+ v := reflect.NewAt(t, p)
+
+ if !pointer {
+ v = v.Elem()
+ }
+
+ switch v.Kind() {
+ case reflect.Ptr, reflect.Interface:
+ if v.IsNil() {
+ return e.clrs.appendNull(b), nil
+ }
+ }
+
+ s, err := v.Interface().(encoding.TextMarshaler).MarshalText()
+ if err != nil {
+ return b, err
+ }
+
+ if e.clrs == nil {
+ return e.doEncodeString(b, unsafe.Pointer(&s))
+ }
+
+ b = append(b, e.clrs.TextMarshaler...)
+ b, err = e.doEncodeString(b, unsafe.Pointer(&s))
+ b = append(b, ansiReset...)
+ return b, err
+}
+
+func appendCompactEscapeHTML(dst []byte, src []byte) []byte {
+ start := 0
+ escape := false
+ inString := false
+
+ for i, c := range src {
+ if !inString {
+ switch c {
+ case '"': // enter string
+ inString = true
+ case ' ', '\n', '\r', '\t': // skip space
+ if start < i {
+ dst = append(dst, src[start:i]...)
+ }
+ start = i + 1
+ }
+ continue
+ }
+
+ if escape {
+ escape = false
+ continue
+ }
+
+ if c == '\\' {
+ escape = true
+ continue
+ }
+
+ if c == '"' {
+ inString = false
+ continue
+ }
+
+ if c == '<' || c == '>' || c == '&' {
+ if start < i {
+ dst = append(dst, src[start:i]...)
+ }
+ dst = append(dst, `\u00`...)
+ dst = append(dst, hex[c>>4], hex[c&0xF])
+ start = i + 1
+ continue
+ }
+
+ // Convert U+2028 and U+2029 (E2 80 A8 and E2 80 A9).
+ if c == 0xE2 && i+2 < len(src) && src[i+1] == 0x80 && src[i+2]&^1 == 0xA8 {
+ if start < i {
+ dst = append(dst, src[start:i]...)
+ }
+ dst = append(dst, `\u202`...)
+ dst = append(dst, hex[src[i+2]&0xF])
+ start = i + 3
+ continue
+ }
+ }
+
+ if start < len(src) {
+ dst = append(dst, src[start:]...)
+ }
+
+ return dst
+}
+
+// indenter is used to indent JSON. The push and pop methods
+// change indentation level. The appendIndent method appends the
+// computed indentation. The appendByte method appends a byte. All
+// methods are safe to use with a nil receiver.
+type indenter struct {
+ disabled bool
+ prefix string
+ indent string
+ depth int
+}
+
+// newIndenter returns a new indenter instance. If prefix and
+// indent are both empty, the indenter is effectively disabled,
+// and the appendIndent and appendByte methods are no-op.
+func newIndenter(prefix, indent string) *indenter {
+ return &indenter{
+ disabled: prefix == "" && indent == "",
+ prefix: prefix,
+ indent: indent,
+ }
+}
+
+// push increases the indentation level.
+func (in *indenter) push() {
+ if in != nil {
+ in.depth++
+ }
+}
+
+// pop decreases the indentation level.
+func (in *indenter) pop() {
+ if in != nil {
+ in.depth--
+ }
+}
+
+// appendByte appends a to b if the indenter is non-nil and enabled.
+// Otherwise b is returned unmodified.
+func (in *indenter) appendByte(b []byte, a byte) []byte {
+ if in == nil || in.disabled {
+ return b
+ }
+
+ return append(b, a)
+}
+
+// appendIndent writes indentation to b, returning the resulting slice.
+// If the indenter is nil or disabled b is returned unchanged.
+func (in *indenter) appendIndent(b []byte) []byte {
+ if in == nil || in.disabled {
+ return b
+ }
+
+ b = append(b, in.prefix...)
+ for i := 0; i < in.depth; i++ {
+ b = append(b, in.indent...)
+ }
+ return b
+}
diff --git a/vendor/github.com/neilotoole/jsoncolor/json.go b/vendor/github.com/neilotoole/jsoncolor/json.go
new file mode 100644
index 0000000..3dc5b46
--- /dev/null
+++ b/vendor/github.com/neilotoole/jsoncolor/json.go
@@ -0,0 +1,459 @@
+package jsoncolor
+
+import (
+ "bytes"
+ "encoding/json"
+ "io"
+ "reflect"
+ "runtime"
+ "sync"
+ "unsafe"
+)
+
+// Delim is documented at https://golang.org/pkg/encoding/json/#Delim
+type Delim = json.Delim
+
+// InvalidUTF8Error is documented at https://golang.org/pkg/encoding/json/#InvalidUTF8Error
+type InvalidUTF8Error = json.InvalidUTF8Error
+
+// InvalidUnmarshalError is documented at https://golang.org/pkg/encoding/json/#InvalidUnmarshalError
+type InvalidUnmarshalError = json.InvalidUnmarshalError
+
+// Marshaler is documented at https://golang.org/pkg/encoding/json/#Marshaler
+type Marshaler = json.Marshaler
+
+// MarshalerError is documented at https://golang.org/pkg/encoding/json/#MarshalerError
+type MarshalerError = json.MarshalerError
+
+// Number is documented at https://golang.org/pkg/encoding/json/#Number
+type Number = json.Number
+
+// RawMessage is documented at https://golang.org/pkg/encoding/json/#RawMessage
+type RawMessage = json.RawMessage
+
+// A SyntaxError is a description of a JSON syntax error.
+type SyntaxError = json.SyntaxError
+
+// Token is documented at https://golang.org/pkg/encoding/json/#Token
+type Token = json.Token
+
+// UnmarshalFieldError is documented at https://golang.org/pkg/encoding/json/#UnmarshalFieldError
+type UnmarshalFieldError = json.UnmarshalFieldError
+
+// UnmarshalTypeError is documented at https://golang.org/pkg/encoding/json/#UnmarshalTypeError
+type UnmarshalTypeError = json.UnmarshalTypeError
+
+// Unmarshaler is documented at https://golang.org/pkg/encoding/json/#Unmarshaler
+type Unmarshaler = json.Unmarshaler
+
+// UnsupportedTypeError is documented at https://golang.org/pkg/encoding/json/#UnsupportedTypeError
+type UnsupportedTypeError = json.UnsupportedTypeError
+
+// UnsupportedValueError is documented at https://golang.org/pkg/encoding/json/#UnsupportedValueError
+type UnsupportedValueError = json.UnsupportedValueError
+
+// AppendFlags is a type used to represent configuration options that can be
+// applied when formatting json output.
+type AppendFlags int
+
+const (
+ // EscapeHTML is a formatting flag used to to escape HTML in json strings.
+ EscapeHTML AppendFlags = 1 << iota
+
+ // SortMapKeys is formatting flag used to enable sorting of map keys when
+ // encoding JSON (this matches the behavior of the standard encoding/json
+ // package).
+ SortMapKeys
+
+ // TrustRawMessage is a performance optimization flag to skip value
+ // checking of raw messages. It should only be used if the values are
+ // known to be valid json (e.g., they were created by json.Unmarshal).
+ TrustRawMessage
+)
+
+// ParseFlags is a type used to represent configuration options that can be
+// applied when parsing json input.
+type ParseFlags int
+
+const (
+ // DisallowUnknownFields is a parsing flag used to prevent decoding of
+ // objects to Go struct values when a field of the input does not match
+ // with any of the struct fields.
+ DisallowUnknownFields ParseFlags = 1 << iota
+
+ // UseNumber is a parsing flag used to load numeric values as Number
+ // instead of float64.
+ UseNumber
+
+ // DontCopyString is a parsing flag used to provide zero-copy support when
+ // loading string values from a json payload. It is not always possible to
+ // avoid dynamic memory allocations, for example when a string is escaped in
+ // the json data a new buffer has to be allocated, but when the `wire` value
+ // can be used as content of a Go value the decoder will simply point into
+ // the input buffer.
+ DontCopyString
+
+ // DontCopyNumber is a parsing flag used to provide zero-copy support when
+ // loading Number values (see DontCopyString and DontCopyRawMessage).
+ DontCopyNumber
+
+ // DontCopyRawMessage is a parsing flag used to provide zero-copy support
+ // when loading RawMessage values from a json payload. When used, the
+ // RawMessage values will not be allocated into new memory buffers and
+ // will instead point directly to the area of the input buffer where the
+ // value was found.
+ DontCopyRawMessage
+
+ // DontMatchCaseInsensitiveStructFields is a parsing flag used to prevent
+ // matching fields in a case-insensitive way. This can prevent degrading
+ // performance on case conversions, and can also act as a stricter decoding
+ // mode.
+ DontMatchCaseInsensitiveStructFields
+
+ // ZeroCopy is a parsing flag that combines all the copy optimizations
+ // available in the package.
+ //
+ // The zero-copy optimizations are better used in request-handler style
+ // code where none of the values are retained after the handler returns.
+ ZeroCopy = DontCopyString | DontCopyNumber | DontCopyRawMessage
+)
+
+// Append acts like Marshal but appends the json representation to b instead of
+// always reallocating a new slice.
+func Append(b []byte, x interface{}, flags AppendFlags, clrs *Colors, indentr *indenter) ([]byte, error) {
+ if x == nil {
+ // Special case for nil values because it makes the rest of the code
+ // simpler to assume that it won't be seeing nil pointers.
+ return clrs.appendNull(b), nil
+ }
+
+ t := reflect.TypeOf(x)
+ p := (*iface)(unsafe.Pointer(&x)).ptr
+
+ cache := cacheLoad()
+ c, found := cache[typeid(t)]
+
+ if !found {
+ c = constructCachedCodec(t, cache)
+ }
+
+ b, err := c.encode(encoder{flags: flags, clrs: clrs, indentr: indentr}, b, p)
+ runtime.KeepAlive(x)
+ return b, err
+}
+
+// Compact is documented at https://golang.org/pkg/encoding/json/#Compact
+func Compact(dst *bytes.Buffer, src []byte) error {
+ return json.Compact(dst, src)
+}
+
+// HTMLEscape is documented at https://golang.org/pkg/encoding/json/#HTMLEscape
+func HTMLEscape(dst *bytes.Buffer, src []byte) {
+ json.HTMLEscape(dst, src)
+}
+
+// Indent is documented at https://golang.org/pkg/encoding/json/#Indent
+func Indent(dst *bytes.Buffer, src []byte, prefix, indent string) error {
+ return json.Indent(dst, src, prefix, indent)
+}
+
+// Marshal is documented at https://golang.org/pkg/encoding/json/#Marshal
+func Marshal(x interface{}) ([]byte, error) {
+ var err error
+ var buf = encoderBufferPool.Get().(*encoderBuffer)
+
+ if buf.data, err = Append(buf.data[:0], x, EscapeHTML|SortMapKeys, nil, nil); err != nil {
+ return nil, err
+ }
+
+ b := make([]byte, len(buf.data))
+ copy(b, buf.data)
+ encoderBufferPool.Put(buf)
+ return b, nil
+}
+
+// MarshalIndent is documented at https://golang.org/pkg/encoding/json/#MarshalIndent
+func MarshalIndent(x interface{}, prefix, indent string) ([]byte, error) {
+ b, err := Marshal(x)
+
+ if err == nil {
+ tmp := &bytes.Buffer{}
+ tmp.Grow(2 * len(b))
+
+ if err = Indent(tmp, b, prefix, indent); err != nil {
+ return b, err
+ }
+
+ b = tmp.Bytes()
+ }
+
+ return b, err
+}
+
+// Unmarshal is documented at https://golang.org/pkg/encoding/json/#Unmarshal
+func Unmarshal(b []byte, x interface{}) error {
+ r, err := Parse(b, x, 0)
+ if len(r) != 0 {
+ if _, ok := err.(*SyntaxError); !ok {
+ // The encoding/json package prioritizes reporting errors caused by
+ // unexpected trailing bytes over other issues; here we emulate this
+ // behavior by overriding the error.
+ err = syntaxError(r, "invalid character '%c' after top-level value", r[0])
+ }
+ }
+ return err
+}
+
+// Parse behaves like Unmarshal but the caller can pass a set of flags to
+// configure the parsing behavior.
+func Parse(b []byte, x interface{}, flags ParseFlags) ([]byte, error) {
+ t := reflect.TypeOf(x)
+ p := (*iface)(unsafe.Pointer(&x)).ptr
+
+ if t == nil || p == nil || t.Kind() != reflect.Ptr {
+ _, r, err := parseValue(skipSpaces(b))
+ r = skipSpaces(r)
+ if err != nil {
+ return r, err
+ }
+ return r, &InvalidUnmarshalError{Type: t}
+ }
+ t = t.Elem()
+
+ cache := cacheLoad()
+ c, found := cache[typeid(t)]
+
+ if !found {
+ c = constructCachedCodec(t, cache)
+ }
+
+ r, err := c.decode(decoder{flags: flags}, skipSpaces(b), p)
+ return skipSpaces(r), err
+}
+
+// Valid is documented at https://golang.org/pkg/encoding/json/#Valid
+func Valid(data []byte) bool {
+ _, data, err := parseValue(skipSpaces(data))
+ if err != nil {
+ return false
+ }
+ return len(skipSpaces(data)) == 0
+}
+
+// Decoder is documented at https://golang.org/pkg/encoding/json/#Decoder
+type Decoder struct {
+ reader io.Reader
+ buffer []byte
+ remain []byte
+ inputOffset int64
+ err error
+ flags ParseFlags
+}
+
+// NewDecoder is documented at https://golang.org/pkg/encoding/json/#NewDecoder
+func NewDecoder(r io.Reader) *Decoder { return &Decoder{reader: r} }
+
+// Buffered is documented at https://golang.org/pkg/encoding/json/#Decoder.Buffered
+func (dec *Decoder) Buffered() io.Reader {
+ return bytes.NewReader(dec.remain)
+}
+
+// Decode is documented at https://golang.org/pkg/encoding/json/#Decoder.Decode
+func (dec *Decoder) Decode(v interface{}) error {
+ raw, err := dec.readValue()
+ if err != nil {
+ return err
+ }
+ _, err = Parse(raw, v, dec.flags)
+ return err
+}
+
+const (
+ minBufferSize = 32768
+ minReadSize = 4096
+)
+
+// readValue reads one JSON value from the buffer and returns its raw bytes. It
+// is optimized for the "one JSON value per line" case.
+func (dec *Decoder) readValue() (v []byte, err error) {
+ var n int
+ var r []byte
+
+ for {
+ if len(dec.remain) != 0 {
+ v, r, err = parseValue(dec.remain)
+ if err == nil {
+ dec.remain, n = skipSpacesN(r)
+ dec.inputOffset += int64(len(v) + n)
+ return
+ }
+ if len(r) != 0 {
+ // Parsing of the next JSON value stopped at a position other
+ // than the end of the input buffer, which indicaates that a
+ // syntax error was encountered.
+ return
+ }
+ }
+
+ if err = dec.err; err != nil {
+ if len(dec.remain) != 0 && err == io.EOF {
+ err = io.ErrUnexpectedEOF
+ }
+ return
+ }
+
+ if dec.buffer == nil {
+ dec.buffer = make([]byte, 0, minBufferSize)
+ } else {
+ dec.buffer = dec.buffer[:copy(dec.buffer[:cap(dec.buffer)], dec.remain)]
+ dec.remain = nil
+ }
+
+ if (cap(dec.buffer) - len(dec.buffer)) < minReadSize {
+ buf := make([]byte, len(dec.buffer), 2*cap(dec.buffer))
+ copy(buf, dec.buffer)
+ dec.buffer = buf
+ }
+
+ n, err = io.ReadFull(dec.reader, dec.buffer[len(dec.buffer):cap(dec.buffer)])
+ if n > 0 {
+ dec.buffer = dec.buffer[:len(dec.buffer)+n]
+ if err != nil {
+ err = nil
+ }
+ } else if err == io.ErrUnexpectedEOF {
+ err = io.EOF
+ }
+ dec.remain, n = skipSpacesN(dec.buffer)
+ dec.inputOffset += int64(n)
+ dec.err = err
+ }
+}
+
+// DisallowUnknownFields is documented at https://golang.org/pkg/encoding/json/#Decoder.DisallowUnknownFields
+func (dec *Decoder) DisallowUnknownFields() { dec.flags |= DisallowUnknownFields }
+
+// UseNumber is documented at https://golang.org/pkg/encoding/json/#Decoder.UseNumber
+func (dec *Decoder) UseNumber() { dec.flags |= UseNumber }
+
+// DontCopyString is an extension to the standard encoding/json package
+// which instructs the decoder to not copy strings loaded from the json
+// payloads when possible.
+func (dec *Decoder) DontCopyString() { dec.flags |= DontCopyString }
+
+// DontCopyNumber is an extension to the standard encoding/json package
+// which instructs the decoder to not copy numbers loaded from the json
+// payloads.
+func (dec *Decoder) DontCopyNumber() { dec.flags |= DontCopyNumber }
+
+// DontCopyRawMessage is an extension to the standard encoding/json package
+// which instructs the decoder to not allocate RawMessage values in separate
+// memory buffers (see the documentation of the DontcopyRawMessage flag for
+// more detais).
+func (dec *Decoder) DontCopyRawMessage() { dec.flags |= DontCopyRawMessage }
+
+// DontMatchCaseInsensitiveStructFields is an extension to the standard
+// encoding/json package which instructs the decoder to not match object fields
+// against struct fields in a case-insensitive way, the field names have to
+// match exactly to be decoded into the struct field values.
+func (dec *Decoder) DontMatchCaseInsensitiveStructFields() {
+ dec.flags |= DontMatchCaseInsensitiveStructFields
+}
+
+// ZeroCopy is an extension to the standard encoding/json package which enables
+// all the copy optimizations of the decoder.
+func (dec *Decoder) ZeroCopy() { dec.flags |= ZeroCopy }
+
+// InputOffset returns the input stream byte offset of the current decoder position.
+// The offset gives the location of the end of the most recently returned token
+// and the beginning of the next token.
+func (dec *Decoder) InputOffset() int64 {
+ return dec.inputOffset
+}
+
+// Encoder is documented at https://golang.org/pkg/encoding/json/#Encoder
+type Encoder struct {
+ writer io.Writer
+ buffer *bytes.Buffer
+ err error
+ flags AppendFlags
+ clrs *Colors
+ indentr *indenter
+}
+
+// NewEncoder is documented at https://golang.org/pkg/encoding/json/#NewEncoder
+func NewEncoder(w io.Writer) *Encoder { return &Encoder{writer: w, flags: EscapeHTML | SortMapKeys} }
+
+// SetColors sets the colors for the encoder to use.
+func (enc *Encoder) SetColors(c *Colors) {
+ enc.clrs = c
+}
+
+// Encode is documented at https://golang.org/pkg/encoding/json/#Encoder.Encode
+func (enc *Encoder) Encode(v interface{}) error {
+ if enc.err != nil {
+ return enc.err
+ }
+
+ var err error
+ var buf = encoderBufferPool.Get().(*encoderBuffer)
+
+ // Note: unlike the original segmentio encoder, indentation is
+ // performed via the Append function.
+ buf.data, err = Append(buf.data[:0], v, enc.flags, enc.clrs, enc.indentr)
+ if err != nil {
+ encoderBufferPool.Put(buf)
+ return err
+ }
+
+ buf.data = append(buf.data, '\n')
+ b := buf.data
+
+ if _, err := enc.writer.Write(b); err != nil {
+ enc.err = err
+ }
+
+ encoderBufferPool.Put(buf)
+ return err
+}
+
+// SetEscapeHTML is documented at https://golang.org/pkg/encoding/json/#Encoder.SetEscapeHTML
+func (enc *Encoder) SetEscapeHTML(on bool) {
+ if on {
+ enc.flags |= EscapeHTML
+ } else {
+ enc.flags &= ^EscapeHTML
+ }
+}
+
+// SetIndent is documented at https://golang.org/pkg/encoding/json/#Encoder.SetIndent
+func (enc *Encoder) SetIndent(prefix, indent string) {
+ enc.indentr = newIndenter(prefix, indent)
+}
+
+// SetSortMapKeys is an extension to the standard encoding/json package which
+// allows the program to toggle sorting of map keys on and off.
+func (enc *Encoder) SetSortMapKeys(on bool) {
+ if on {
+ enc.flags |= SortMapKeys
+ } else {
+ enc.flags &= ^SortMapKeys
+ }
+}
+
+// SetTrustRawMessage skips value checking when encoding a raw json message. It should only
+// be used if the values are known to be valid json, e.g. because they were originally created
+// by json.Unmarshal.
+func (enc *Encoder) SetTrustRawMessage(on bool) {
+ if on {
+ enc.flags |= TrustRawMessage
+ } else {
+ enc.flags &= ^TrustRawMessage
+ }
+}
+
+var encoderBufferPool = sync.Pool{
+ New: func() interface{} { return &encoderBuffer{data: make([]byte, 0, 4096)} },
+}
+
+type encoderBuffer struct{ data []byte }
diff --git a/vendor/github.com/neilotoole/jsoncolor/jsoncolor.go b/vendor/github.com/neilotoole/jsoncolor/jsoncolor.go
new file mode 100644
index 0000000..4c8835c
--- /dev/null
+++ b/vendor/github.com/neilotoole/jsoncolor/jsoncolor.go
@@ -0,0 +1,141 @@
+package jsoncolor
+
+import (
+ "strconv"
+)
+
+// Colors specifies colorization of JSON output. Each field
+// is a Color, which is simply the bytes of the terminal color code.
+type Colors struct {
+ // Null is the color for JSON nil.
+ Null Color
+
+ // Bool is the color for boolean values.
+ Bool Color
+
+ // Number is the color for number values.
+ Number Color
+
+ // String is the color for string values.
+ String Color
+
+ // Key is the color for JSON keys.
+ Key Color
+
+ // Bytes is the color for byte data.
+ Bytes Color
+
+ // Time is the color for datetime values.
+ Time Color
+
+ // Punc is the color for JSON punctuation: []{},: etc.
+ Punc Color
+
+ // TextMarshaler is the color for values implementing encoding.TextMarshaler.
+ TextMarshaler Color
+}
+
+// appendNull appends a colorized "null" to b.
+func (c *Colors) appendNull(b []byte) []byte {
+ if c == nil {
+ return append(b, "null"...)
+ }
+
+ b = append(b, c.Null...)
+ b = append(b, "null"...)
+ return append(b, ansiReset...)
+}
+
+// appendBool appends the colorized bool v to b.
+func (c *Colors) appendBool(b []byte, v bool) []byte {
+ if c == nil {
+ if v {
+ return append(b, "true"...)
+ }
+
+ return append(b, "false"...)
+ }
+
+ b = append(b, c.Bool...)
+ if v {
+ b = append(b, "true"...)
+ } else {
+ b = append(b, "false"...)
+ }
+
+ return append(b, ansiReset...)
+}
+
+// appendKey appends the colorized key v to b.
+func (c *Colors) appendKey(b []byte, v []byte) []byte {
+ if c == nil {
+ return append(b, v...)
+ }
+
+ b = append(b, c.Key...)
+ b = append(b, v...)
+ return append(b, ansiReset...)
+}
+
+// appendInt64 appends the colorized int64 v to b.
+func (c *Colors) appendInt64(b []byte, v int64) []byte {
+ if c == nil {
+ return strconv.AppendInt(b, v, 10)
+ }
+
+ b = append(b, c.Number...)
+ b = strconv.AppendInt(b, v, 10)
+ return append(b, ansiReset...)
+}
+
+// appendUint64 appends the colorized uint64 v to b.
+func (c *Colors) appendUint64(b []byte, v uint64) []byte {
+ if c == nil {
+ return strconv.AppendUint(b, v, 10)
+ }
+
+ b = append(b, c.Number...)
+ b = strconv.AppendUint(b, v, 10)
+ return append(b, ansiReset...)
+}
+
+// appendPunc appends the colorized punctuation mark v to b.
+func (c *Colors) appendPunc(b []byte, v byte) []byte {
+ if c == nil {
+ return append(b, v)
+ }
+
+ b = append(b, c.Punc...)
+ b = append(b, v)
+ return append(b, ansiReset...)
+}
+
+// Color is used to render terminal colors. In effect, Color is
+// the bytes of the ANSI prefix code. The zero value is valid (results in
+// no colorization). When Color is non-zero, the encoder writes the prefix,
+// then the actual value, then the ANSI reset code.
+//
+// Example value:
+//
+// number := Color("\x1b[36m")
+type Color []byte
+
+// ansiReset is the ANSI ansiReset escape code.
+const ansiReset = "\x1b[0m"
+
+// DefaultColors returns the default Colors configuration.
+// These colors largely follow jq's default colorization,
+// with some deviation.
+func DefaultColors() *Colors {
+ return &Colors{
+ Null: Color("\x1b[2m"),
+ Bool: Color("\x1b[1m"),
+ Number: Color("\x1b[36m"),
+ String: Color("\x1b[32m"),
+ Key: Color("\x1b[34;1m"),
+ Bytes: Color("\x1b[2m"),
+ Time: Color("\x1b[32;2m"),
+ Punc: Color{}, // No colorization
+ TextMarshaler: Color("\x1b[32m"), // Same as String
+ }
+}
diff --git a/vendor/github.com/neilotoole/jsoncolor/parse.go b/vendor/github.com/neilotoole/jsoncolor/parse.go
new file mode 100644
index 0000000..0f43b4c
--- /dev/null
+++ b/vendor/github.com/neilotoole/jsoncolor/parse.go
@@ -0,0 +1,735 @@
+package jsoncolor
+
+import (
+ "bytes"
+ "math"
+ "reflect"
+ "unicode"
+ "unicode/utf16"
+ "unicode/utf8"
+)
+
+// All spaces characters defined in the json specification.
+const (
+ sp = ' '
+ ht = '\t'
+ nl = '\n'
+ cr = '\r'
+)
+
+const (
+ escape = '\\'
+ quote = '"' //nolint:varcheck // from original code
+)
+
+func skipSpaces(b []byte) []byte {
+ b, _ = skipSpacesN(b)
+ return b
+}
+
+func skipSpacesN(b []byte) ([]byte, int) {
+ for i := range b {
+ switch b[i] {
+ case sp, ht, nl, cr:
+ default:
+ return b[i:], i
+ }
+ }
+ return nil, 0
+}
+
+// parseInt parses a decimanl representation of an int64 from b.
+//
+// The function is equivalent to calling strconv.ParseInt(string(b), 10, 64) but
+// it prevents Go from making a memory allocation for converting a byte slice to
+// a string (escape analysis fails due to the error returned by strconv.ParseInt).
+//
+// Because it only works with base 10 the function is also significantly faster
+// than strconv.ParseInt.
+func parseInt(b []byte, t reflect.Type) (int64, []byte, error) {
+ var value int64
+ var count int
+
+ if len(b) == 0 {
+ return 0, b, syntaxError(b, "cannot decode integer from an empty input")
+ }
+
+ if b[0] == '-' {
+ const max = math.MinInt64
+ const lim = max / 10
+
+ if len(b) == 1 {
+ return 0, b, syntaxError(b, "cannot decode integer from '-'")
+ }
+
+ if len(b) > 2 && b[1] == '0' && '0' <= b[2] && b[2] <= '9' {
+ return 0, b, syntaxError(b, "invalid leading character '0' in integer")
+ }
+
+ for _, d := range b[1:] {
+ if !(d >= '0' && d <= '9') {
+ if count == 0 {
+ b, err := inputError(b, t)
+ return 0, b, err
+ }
+ break
+ }
+
+ if value < lim {
+ return 0, b, unmarshalOverflow(b, t)
+ }
+
+ value *= 10
+ x := int64(d - '0')
+
+ if value < (max + x) {
+ return 0, b, unmarshalOverflow(b, t)
+ }
+
+ value -= x
+ count++
+ }
+
+ count++
+ } else {
+ const max = math.MaxInt64
+ const lim = max / 10
+
+ if len(b) > 1 && b[0] == '0' && '0' <= b[1] && b[1] <= '9' {
+ return 0, b, syntaxError(b, "invalid leading character '0' in integer")
+ }
+
+ for _, d := range b {
+ if !(d >= '0' && d <= '9') {
+ if count == 0 {
+ b, err := inputError(b, t)
+ return 0, b, err
+ }
+ break
+ }
+ x := int64(d - '0')
+
+ if value > lim {
+ return 0, b, unmarshalOverflow(b, t)
+ }
+
+ if value *= 10; value > (max - x) {
+ return 0, b, unmarshalOverflow(b, t)
+ }
+
+ value += x
+ count++
+ }
+ }
+
+ if count < len(b) {
+ switch b[count] {
+ case '.', 'e', 'E': // was this actually a float?
+ v, r, err := parseNumber(b)
+ if err != nil {
+ v, r = b[:count+1], b[count+1:]
+ }
+ return 0, r, unmarshalTypeError(v, t)
+ }
+ }
+
+ return value, b[count:], nil
+}
+
+// parseUint is like parseInt but for unsigned integers.
+func parseUint(b []byte, t reflect.Type) (uint64, []byte, error) {
+ const max = math.MaxUint64
+ const lim = max / 10
+
+ var value uint64
+ var count int
+
+ if len(b) == 0 {
+ return 0, b, syntaxError(b, "cannot decode integer value from an empty input")
+ }
+
+ if len(b) > 1 && b[0] == '0' && '0' <= b[1] && b[1] <= '9' {
+ return 0, b, syntaxError(b, "invalid leading character '0' in integer")
+ }
+
+ for _, d := range b {
+ if !(d >= '0' && d <= '9') {
+ if count == 0 {
+ b, err := inputError(b, t)
+ return 0, b, err
+ }
+ break
+ }
+ x := uint64(d - '0')
+
+ if value > lim {
+ return 0, b, unmarshalOverflow(b, t)
+ }
+
+ if value *= 10; value > (max - x) {
+ return 0, b, unmarshalOverflow(b, t)
+ }
+
+ value += x
+ count++
+ }
+
+ if count < len(b) {
+ switch b[count] {
+ case '.', 'e', 'E': // was this actually a float?
+ v, r, err := parseNumber(b)
+ if err != nil {
+ v, r = b[:count+1], b[count+1:]
+ }
+ return 0, r, unmarshalTypeError(v, t)
+ }
+ }
+
+ return value, b[count:], nil
+}
+
+// parseUintHex parses a hexadecimanl representation of a uint64 from b.
+//
+// The function is equivalent to calling strconv.ParseUint(string(b), 16, 64) but
+// it prevents Go from making a memory allocation for converting a byte slice to
+// a string (escape analysis fails due to the error returned by strconv.ParseUint).
+//
+// Because it only works with base 16 the function is also significantly faster
+// than strconv.ParseUint.
+func parseUintHex(b []byte) (uint64, []byte, error) {
+ const max = math.MaxUint64
+ const lim = max / 0x10
+
+ var value uint64
+ var count int
+
+ if len(b) == 0 {
+ return 0, b, syntaxError(b, "cannot decode hexadecimal value from an empty input")
+ }
+
+parseLoop:
+ for i, d := range b {
+ var x uint64
+
+ switch {
+ case d >= '0' && d <= '9':
+ x = uint64(d - '0')
+
+ case d >= 'A' && d <= 'F':
+ x = uint64(d-'A') + 0xA
+
+ case d >= 'a' && d <= 'f':
+ x = uint64(d-'a') + 0xA
+
+ default:
+ if i == 0 {
+ return 0, b, syntaxError(b, "expected hexadecimal digit but found '%c'", d)
+ }
+ break parseLoop
+ }
+
+ if value > lim {
+ return 0, b, syntaxError(b, "hexadecimal value out of range")
+ }
+
+ if value *= 0x10; value > (max - x) {
+ return 0, b, syntaxError(b, "hexadecimal value out of range")
+ }
+
+ value += x
+ count++
+ }
+
+ return value, b[count:], nil
+}
+
+func parseNull(b []byte) ([]byte, []byte, error) {
+ if hasNullPrefix(b) {
+ return b[:4], b[4:], nil
+ }
+ if len(b) < 4 {
+ return nil, b[len(b):], unexpectedEOF(b)
+ }
+ return nil, b, syntaxError(b, "expected 'null' but found invalid token")
+}
+
+func parseTrue(b []byte) ([]byte, []byte, error) {
+ if hasTruePrefix(b) {
+ return b[:4], b[4:], nil
+ }
+ if len(b) < 4 {
+ return nil, b[len(b):], unexpectedEOF(b)
+ }
+ return nil, b, syntaxError(b, "expected 'true' but found invalid token")
+}
+
+func parseFalse(b []byte) ([]byte, []byte, error) {
+ if hasFalsePrefix(b) {
+ return b[:5], b[5:], nil
+ }
+ if len(b) < 5 {
+ return nil, b[len(b):], unexpectedEOF(b)
+ }
+ return nil, b, syntaxError(b, "expected 'false' but found invalid token")
+}
+
+func parseNumber(b []byte) (v, r []byte, err error) {
+ if len(b) == 0 {
+ r, err = b, unexpectedEOF(b)
+ return
+ }
+
+ i := 0
+ // sign
+ if b[i] == '-' {
+ i++
+ }
+
+ if i == len(b) {
+ r, err = b[i:], syntaxError(b, "missing number value after sign")
+ return
+ }
+
+ if b[i] < '0' || b[i] > '9' {
+ r, err = b[i:], syntaxError(b, "expected digit but got '%c'", b[i])
+ return
+ }
+
+ // integer part
+ if b[i] == '0' {
+ i++
+ if i == len(b) || (b[i] != '.' && b[i] != 'e' && b[i] != 'E') {
+ v, r = b[:i], b[i:]
+ return
+ }
+ if '0' <= b[i] && b[i] <= '9' {
+ r, err = b[i:], syntaxError(b, "cannot decode number with leading '0' character")
+ return
+ }
+ }
+
+ for i < len(b) && '0' <= b[i] && b[i] <= '9' {
+ i++
+ }
+
+ // decimal part
+ if i < len(b) && b[i] == '.' {
+ i++
+ decimalStart := i
+
+ for i < len(b) {
+ if c := b[i]; !('0' <= c && c <= '9') {
+ if i == decimalStart {
+ r, err = b[i:], syntaxError(b, "expected digit but found '%c'", c)
+ return
+ }
+ break
+ }
+ i++
+ }
+
+ if i == decimalStart {
+ r, err = b[i:], syntaxError(b, "expected decimal part after '.'")
+ return
+ }
+ }
+
+ // exponent part
+ if i < len(b) && (b[i] == 'e' || b[i] == 'E') {
+ i++
+
+ if i < len(b) {
+ if c := b[i]; c == '+' || c == '-' {
+ i++
+ }
+ }
+
+ if i == len(b) {
+ r, err = b[i:], syntaxError(b, "missing exponent in number")
+ return
+ }
+
+ exponentStart := i
+
+ for i < len(b) {
+ if c := b[i]; !('0' <= c && c <= '9') {
+ if i == exponentStart {
+ err = syntaxError(b, "expected digit but found '%c'", c)
+ return
+ }
+ break
+ }
+ i++
+ }
+ }
+
+ v, r = b[:i], b[i:]
+ return
+}
+
+func parseUnicode(b []byte) (rune, int, error) {
+ if len(b) < 4 {
+ return 0, 0, syntaxError(b, "unicode code point must have at least 4 characters")
+ }
+
+ u, r, err := parseUintHex(b[:4])
+ if err != nil {
+ return 0, 0, syntaxError(b, "parsing unicode code point: %s", err)
+ }
+
+ if len(r) != 0 {
+ return 0, 0, syntaxError(b, "invalid unicode code point")
+ }
+
+ return rune(u), 4, nil
+}
+
+func parseStringFast(b []byte) ([]byte, []byte, bool, error) {
+ if len(b) < 2 {
+ return nil, b[len(b):], false, unexpectedEOF(b)
+ }
+ if b[0] != '"' {
+ return nil, b, false, syntaxError(b, "expected '\"' at the beginning of a string value")
+ }
+
+ n := bytes.IndexByte(b[1:], '"') + 2
+ if n <= 1 {
+ return nil, b[len(b):], false, syntaxError(b, "missing '\"' at the end of a string value")
+ }
+ if bytes.IndexByte(b[1:n], '\\') < 0 && asciiValidPrint(b[1:n]) {
+ return b[:n], b[n:], false, nil
+ }
+
+ for i := 1; i < len(b); i++ {
+ switch b[i] {
+ case '\\':
+ if i++; i < len(b) {
+ switch b[i] {
+ case '"', '\\', '/', 'n', 'r', 't', 'f', 'b':
+ case 'u':
+ _, n, err := parseUnicode(b[i+1:])
+ if err != nil {
+ return nil, b, false, err
+ }
+ i += n
+ default:
+ return nil, b, false, syntaxError(b, "invalid character '%c' in string escape code", b[i])
+ }
+ }
+
+ case '"':
+ return b[:i+1], b[i+1:], true, nil
+
+ default:
+ if b[i] < 0x20 {
+ return nil, b, false, syntaxError(b, "invalid character '%c' in string escape code", b[i])
+ }
+ }
+ }
+
+ return nil, b[len(b):], false, syntaxError(b, "missing '\"' at the end of a string value")
+}
+
+func parseString(b []byte) ([]byte, []byte, error) {
+ s, b, _, err := parseStringFast(b)
+ return s, b, err
+}
+
+func parseStringUnquote(b []byte, r []byte) ([]byte, []byte, bool, error) {
+ s, b, escaped, err := parseStringFast(b)
+ if err != nil {
+ return s, b, false, err
+ }
+
+ s = s[1 : len(s)-1] // trim the quotes
+
+ if !escaped {
+ return s, b, false, nil
+ }
+
+ if r == nil {
+ r = make([]byte, 0, len(s))
+ }
+
+ for len(s) != 0 {
+ i := bytes.IndexByte(s, '\\')
+
+ if i < 0 {
+ r = appendCoerceInvalidUTF8(r, s)
+ break
+ }
+
+ r = appendCoerceInvalidUTF8(r, s[:i])
+ s = s[i+1:]
+
+ c := s[0]
+ switch c {
+ case '"', '\\', '/':
+ // simple escaped character
+ case 'n':
+ c = '\n'
+
+ case 'r':
+ c = '\r'
+
+ case 't':
+ c = '\t'
+
+ case 'b':
+ c = '\b'
+
+ case 'f':
+ c = '\f'
+
+ case 'u':
+ s = s[1:]
+
+ r1, n1, err := parseUnicode(s)
+ if err != nil {
+ return r, b, true, err
+ }
+ s = s[n1:]
+
+ if utf16.IsSurrogate(r1) {
+ if !hasPrefix(s, `\u`) {
+ r1 = unicode.ReplacementChar
+ } else {
+ r2, n2, err := parseUnicode(s[2:])
+ if err != nil {
+ return r, b, true, err
+ }
+ if r1 = utf16.DecodeRune(r1, r2); r1 != unicode.ReplacementChar {
+ s = s[2+n2:]
+ }
+ }
+ }
+
+ r = appendRune(r, r1)
+ continue
+
+ default: // not sure what this escape sequence is
+ return r, b, false, syntaxError(s, "invalid character '%c' in string escape code", c)
+ }
+
+ r = append(r, c)
+ s = s[1:]
+ }
+
+ return r, b, true, nil
+}
+
+func appendRune(b []byte, r rune) []byte {
+ n := len(b)
+ b = append(b, 0, 0, 0, 0)
+ return b[:n+utf8.EncodeRune(b[n:], r)]
+}
+
+func appendCoerceInvalidUTF8(b []byte, s []byte) []byte {
+ c := [4]byte{}
+
+ for _, r := range string(s) {
+ b = append(b, c[:utf8.EncodeRune(c[:], r)]...)
+ }
+
+ return b
+}
+
+func parseObject(b []byte) ([]byte, []byte, error) {
+ if len(b) < 2 {
+ return nil, b[len(b):], unexpectedEOF(b)
+ }
+
+ if b[0] != '{' {
+ return nil, b, syntaxError(b, "expected '{' at the beginning of an object value")
+ }
+
+ var err error
+ var a = b
+ var n = len(b)
+ var i = 0
+
+ b = b[1:]
+ for {
+ b = skipSpaces(b)
+
+ if len(b) == 0 {
+ return nil, b, syntaxError(b, "cannot decode object from empty input")
+ }
+
+ if b[0] == '}' {
+ j := (n - len(b)) + 1
+ return a[:j], a[j:], nil
+ }
+
+ if i != 0 {
+ if len(b) == 0 {
+ return nil, b, syntaxError(b, "unexpected EOF after object field value")
+ }
+ if b[0] != ',' {
+ return nil, b, syntaxError(b, "expected ',' after object field value but found '%c'", b[0])
+ }
+ b = skipSpaces(b[1:])
+ if len(b) == 0 {
+ return nil, b, unexpectedEOF(b)
+ }
+ if b[0] == '}' {
+ return nil, b, syntaxError(b, "unexpected trailing comma after object field")
+ }
+ }
+
+ _, b, err = parseString(b)
+ if err != nil {
+ return nil, b, err
+ }
+ b = skipSpaces(b)
+
+ if len(b) == 0 {
+ return nil, b, syntaxError(b, "unexpected EOF after object field key")
+ }
+ if b[0] != ':' {
+ return nil, b, syntaxError(b, "expected ':' after object field key but found '%c'", b[0])
+ }
+ b = skipSpaces(b[1:])
+
+ _, b, err = parseValue(b)
+ if err != nil {
+ return nil, b, err
+ }
+
+ i++
+ }
+}
+
+func parseArray(b []byte) ([]byte, []byte, error) {
+ if len(b) < 2 {
+ return nil, b[len(b):], unexpectedEOF(b)
+ }
+
+ if b[0] != '[' {
+ return nil, b, syntaxError(b, "expected '[' at the beginning of array value")
+ }
+
+ var err error
+ var a = b
+ var n = len(b)
+ var i = 0
+
+ b = b[1:]
+ for {
+ b = skipSpaces(b)
+
+ if len(b) == 0 {
+ return nil, b, syntaxError(b, "missing closing ']' after array value")
+ }
+
+ if b[0] == ']' {
+ j := (n - len(b)) + 1
+ return a[:j], a[j:], nil
+ }
+
+ if i != 0 {
+ if len(b) == 0 {
+ return nil, b, syntaxError(b, "unexpected EOF after array element")
+ }
+ if b[0] != ',' {
+ return nil, b, syntaxError(b, "expected ',' after array element but found '%c'", b[0])
+ }
+ b = skipSpaces(b[1:])
+ if len(b) == 0 {
+ return nil, b, unexpectedEOF(b)
+ }
+ if b[0] == ']' {
+ return nil, b, syntaxError(b, "unexpected trailing comma after object field")
+ }
+ }
+
+ _, b, err = parseValue(b)
+ if err != nil {
+ return nil, b, err
+ }
+
+ i++
+ }
+}
+
+func parseValue(b []byte) ([]byte, []byte, error) {
+ if len(b) != 0 {
+ switch b[0] {
+ case '{':
+ return parseObject(b)
+ case '[':
+ return parseArray(b)
+ case '"':
+ return parseString(b)
+ case 'n':
+ return parseNull(b)
+ case 't':
+ return parseTrue(b)
+ case 'f':
+ return parseFalse(b)
+ case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ return parseNumber(b)
+ default:
+ return nil, b, syntaxError(b, "invalid character '%c' looking for beginning of value", b[0])
+ }
+ }
+ return nil, b, syntaxError(b, "unexpected end of JSON input")
+}
+
+func hasNullPrefix(b []byte) bool {
+ return len(b) >= 4 && string(b[:4]) == "null"
+}
+
+func hasTruePrefix(b []byte) bool {
+ return len(b) >= 4 && string(b[:4]) == "true"
+}
+
+func hasFalsePrefix(b []byte) bool {
+ return len(b) >= 5 && string(b[:5]) == "false"
+}
+
+func hasPrefix(b []byte, s string) bool {
+ return len(b) >= len(s) && s == string(b[:len(s)])
+}
+
+func hasLeadingSign(b []byte) bool {
+ return len(b) > 0 && (b[0] == '+' || b[0] == '-')
+}
+
+func hasLeadingZeroes(b []byte) bool {
+ if hasLeadingSign(b) {
+ b = b[1:]
+ }
+ return len(b) > 1 && b[0] == '0' && '0' <= b[1] && b[1] <= '9'
+}
+
+func appendToLower(b, s []byte) []byte {
+ if asciiValid(s) { // fast path for ascii strings
+ i := 0
+
+ for j := range s {
+ c := s[j]
+
+ if 'A' <= c && c <= 'Z' {
+ b = append(b, s[i:j]...)
+ b = append(b, c+('a'-'A'))
+ i = j + 1
+ }
+ }
+
+ return append(b, s[i:]...)
+ }
+
+ for _, r := range string(s) {
+ b = appendRune(b, foldRune(r))
+ }
+
+ return b
+}
+
+func foldRune(r rune) rune {
+ if r = unicode.SimpleFold(r); 'A' <= r && r <= 'Z' {
+ r = r + ('a' - 'A')
+ }
+ return r
+}
diff --git a/vendor/github.com/neilotoole/jsoncolor/reflect.go b/vendor/github.com/neilotoole/jsoncolor/reflect.go
new file mode 100644
index 0000000..0dcf174
--- /dev/null
+++ b/vendor/github.com/neilotoole/jsoncolor/reflect.go
@@ -0,0 +1,20 @@
+//go:build go1.15
+// +build go1.15
+
+package jsoncolor
+
+import (
+ "reflect"
+ "unsafe"
+)
+
+func extendSlice(t reflect.Type, s *slice, n int) slice {
+ arrayType := reflect.ArrayOf(n, t.Elem())
+ arrayData := reflect.New(arrayType)
+ reflect.Copy(arrayData.Elem(), reflect.NewAt(t, unsafe.Pointer(s)).Elem())
+ return slice{
+ data: unsafe.Pointer(arrayData.Pointer()),
+ len: s.len,
+ cap: n,
+ }
+}
diff --git a/vendor/github.com/neilotoole/jsoncolor/reflect_optimize.go b/vendor/github.com/neilotoole/jsoncolor/reflect_optimize.go
new file mode 100644
index 0000000..03fc849
--- /dev/null
+++ b/vendor/github.com/neilotoole/jsoncolor/reflect_optimize.go
@@ -0,0 +1,30 @@
+//go:build !go1.15
+// +build !go1.15
+
+package jsoncolor
+
+import (
+ "reflect"
+ "unsafe"
+)
+
+//go:linkname unsafe_NewArray reflect.unsafe_NewArray
+func unsafe_NewArray(rtype unsafe.Pointer, length int) unsafe.Pointer
+
+//go:linkname typedslicecopy reflect.typedslicecopy
+//go:noescape
+func typedslicecopy(elemType unsafe.Pointer, dst, src slice) int
+
+func extendSlice(t reflect.Type, s *slice, n int) slice {
+ elemTypeRef := t.Elem()
+ elemTypePtr := ((*iface)(unsafe.Pointer(&elemTypeRef))).ptr
+
+ d := slice{
+ data: unsafe_NewArray(elemTypePtr, n),
+ len: s.len,
+ cap: n,
+ }
+
+ typedslicecopy(elemTypePtr, d, *s)
+ return d
+}
diff --git a/vendor/github.com/neilotoole/jsoncolor/splash.png b/vendor/github.com/neilotoole/jsoncolor/splash.png
new file mode 100644
index 0000000..f962327
--- /dev/null
+++ b/vendor/github.com/neilotoole/jsoncolor/splash.png
Binary files differ
diff --git a/vendor/github.com/neilotoole/jsoncolor/terminal.go b/vendor/github.com/neilotoole/jsoncolor/terminal.go
new file mode 100644
index 0000000..e9398f9
--- /dev/null
+++ b/vendor/github.com/neilotoole/jsoncolor/terminal.go
@@ -0,0 +1,42 @@
+//go:build !windows
+
+package jsoncolor
+
+import (
+ "io"
+ "os"
+
+ "golang.org/x/term"
+)
+
+// IsColorTerminal returns true if w is a colorable terminal.
+// It respects [NO_COLOR], [FORCE_COLOR] and TERM=dumb environment variables.
+//
+// [NO_COLOR]: https://no-color.org/
+// [FORCE_COLOR]: https://force-color.org/
+func IsColorTerminal(w io.Writer) bool {
+ if os.Getenv("NO_COLOR") != "" {
+ return false
+ }
+ if os.Getenv("FORCE_COLOR") != "" {
+ return true
+ }
+ if os.Getenv("TERM") == "dumb" {
+ return false
+ }
+
+ if w == nil {
+ return false
+ }
+
+ f, ok := w.(*os.File)
+ if !ok {
+ return false
+ }
+
+ if !term.IsTerminal(int(f.Fd())) {
+ return false
+ }
+
+ return true
+}
diff --git a/vendor/github.com/neilotoole/jsoncolor/terminal_windows.go b/vendor/github.com/neilotoole/jsoncolor/terminal_windows.go
new file mode 100644
index 0000000..38259dc
--- /dev/null
+++ b/vendor/github.com/neilotoole/jsoncolor/terminal_windows.go
@@ -0,0 +1,53 @@
+package jsoncolor
+
+import (
+ "io"
+ "os"
+
+ "golang.org/x/sys/windows"
+)
+
+// IsColorTerminal returns true if w is a colorable terminal.
+// It respects [NO_COLOR], [FORCE_COLOR] and TERM=dumb environment variables.
+//
+// [NO_COLOR]: https://no-color.org/
+// [FORCE_COLOR]: https://force-color.org/
+func IsColorTerminal(w io.Writer) bool {
+ if os.Getenv("NO_COLOR") != "" {
+ return false
+ }
+ if os.Getenv("FORCE_COLOR") != "" {
+ return true
+ }
+ if os.Getenv("TERM") == "dumb" {
+ return false
+ }
+
+ if w == nil {
+ return false
+ }
+
+ f, ok := w.(*os.File)
+ if !ok {
+ return false
+ }
+ fd := f.Fd()
+
+ console := windows.Handle(fd)
+ var mode uint32
+ if err := windows.GetConsoleMode(console, &mode); err != nil {
+ return false
+ }
+
+ var want uint32 = windows.ENABLE_PROCESSED_OUTPUT | windows.ENABLE_VIRTUAL_TERMINAL_PROCESSING
+ if (mode & want) == want {
+ return true
+ }
+
+ mode |= want
+ if err := windows.SetConsoleMode(console, mode); err != nil {
+ return false
+ }
+
+ return true
+}
diff --git a/vendor/github.com/neilotoole/jsoncolor/token.go b/vendor/github.com/neilotoole/jsoncolor/token.go
new file mode 100644
index 0000000..ba3277e
--- /dev/null
+++ b/vendor/github.com/neilotoole/jsoncolor/token.go
@@ -0,0 +1,286 @@
+package jsoncolor
+
+// Tokenizer is an iterator-style type which can be used to progressively parse
+// through a json input.
+//
+// Tokenizing json is useful to build highly efficient parsing operations, for
+// example when doing tranformations on-the-fly where as the program reads the
+// input and produces the transformed json to an output buffer.
+//
+// Here is a common pattern to use a tokenizer:
+//
+// for t := json.NewTokenizer(b); t.Next(); {
+// switch t.Delim {
+// case '{':
+// ...
+// case '}':
+// ...
+// case '[':
+// ...
+// case ']':
+// ...
+// case ':':
+// ...
+// case ',':
+// ...
+// }
+//
+// switch {
+// case t.Value.String():
+// ...
+// case t.Value.Null():
+// ...
+// case t.Value.True():
+// ...
+// case t.Value.False():
+// ...
+// case t.Value.Number():
+// ...
+// }
+// }
+//
+type Tokenizer struct {
+ // When the tokenizer is positioned on a json delimiter this field is not
+ // zero. In this case the possible values are '{', '}', '[', ']', ':', and
+ // ','.
+ Delim Delim
+
+ // This field contains the raw json token that the tokenizer is pointing at.
+ // When Delim is not zero, this field is a single-element byte slice
+ // continaing the delimiter value. Otherwise, this field holds values like
+ // null, true, false, numbers, or quoted strings.
+ Value RawValue
+
+ // When the tokenizer has encountered invalid content this field is not nil.
+ Err error
+
+ // When the value is in an array or an object, this field contains the depth
+ // at which it was found.
+ Depth int
+
+ // When the value is in an array or an object, this field contains the
+ // position at which it was found.
+ Index int
+
+ // This field is true when the value is the key of an object.
+ IsKey bool
+
+ // Tells whether the next value read from the tokenizer is a key.
+ isKey bool
+
+ // json input for the tokenizer, pointing at data right after the last token
+ // that was parsed.
+ json []byte
+
+ // Stack used to track entering and leaving arrays, objects, and keys. The
+ // buffer is used as a AppendPre-allocated space to
+ stack []state
+ buffer [8]state
+}
+
+type state struct {
+ typ scope
+ len int
+}
+
+type scope int
+
+const (
+ inArray scope = iota
+ inObject
+)
+
+// NewTokenizer constructs a new Tokenizer which reads its json input from b.
+func NewTokenizer(b []byte) *Tokenizer { return &Tokenizer{json: b} }
+
+// Reset erases the state of t and re-initializes it with the json input from b.
+func (t *Tokenizer) Reset(b []byte) {
+ // This code is similar to:
+ //
+ // *t = Tokenizer{json: b}
+ //
+ // However, it does not compile down to an invocation of duff-copy, which
+ // ends up being slower and prevents the code from being inlined.
+ t.Delim = 0
+ t.Value = nil
+ t.Err = nil
+ t.Depth = 0
+ t.Index = 0
+ t.IsKey = false
+ t.isKey = false
+ t.json = b
+ t.stack = nil
+}
+
+// Next returns a new tokenizer pointing at the next token, or the zero-value of
+// Tokenizer if the end of the json input has been reached.
+//
+// If the tokenizer encounters malformed json while reading the input the method
+// sets t.Err to an error describing the issue, and returns false. Once an error
+// has been encountered, the tokenizer will always fail until its input is
+// cleared by a call to its Reset method.
+func (t *Tokenizer) Next() bool {
+ if t.Err != nil {
+ return false
+ }
+
+ // Inlined code of the skipSpaces function, this give a ~15% speed boost.
+ i := 0
+skipLoop:
+ for _, c := range t.json {
+ switch c {
+ case sp, ht, nl, cr:
+ i++
+ default:
+ break skipLoop
+ }
+ }
+
+ if t.json = t.json[i:]; len(t.json) == 0 {
+ t.Reset(nil)
+ return false
+ }
+
+ var d Delim
+ var v []byte
+ var b []byte
+ var err error
+
+ switch t.json[0] {
+ case '"':
+ v, b, err = parseString(t.json)
+ case 'n':
+ v, b, err = parseNull(t.json)
+ case 't':
+ v, b, err = parseTrue(t.json)
+ case 'f':
+ v, b, err = parseFalse(t.json)
+ case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ v, b, err = parseNumber(t.json)
+ case '{', '}', '[', ']', ':', ',':
+ d, v, b = Delim(t.json[0]), t.json[:1], t.json[1:]
+ default:
+ v, b, err = t.json[:1], t.json[1:], syntaxError(t.json, "expected token but found '%c'", t.json[0])
+ }
+
+ t.Delim = d
+ t.Value = RawValue(v)
+ t.Err = err
+ t.Depth = t.depth()
+ t.Index = t.index()
+ t.IsKey = d == 0 && t.isKey
+ t.json = b
+
+ if d != 0 {
+ switch d {
+ case '{':
+ t.isKey = true
+ t.push(inObject)
+ case '[':
+ t.push(inArray)
+ case '}':
+ err = t.pop(inObject)
+ t.Depth--
+ t.Index = t.index()
+ case ']':
+ err = t.pop(inArray)
+ t.Depth--
+ t.Index = t.index()
+ case ':':
+ t.isKey = false
+ case ',':
+ if t.is(inObject) {
+ t.isKey = true
+ }
+ t.stack[len(t.stack)-1].len++
+ }
+ }
+
+ return (d != 0 || len(v) != 0) && err == nil
+}
+
+func (t *Tokenizer) push(typ scope) {
+ if t.stack == nil {
+ t.stack = t.buffer[:0]
+ }
+ t.stack = append(t.stack, state{typ: typ, len: 1})
+}
+
+func (t *Tokenizer) pop(expect scope) error {
+ i := len(t.stack) - 1
+
+ if i < 0 {
+ return syntaxError(t.json, "found unexpected character while tokenizing json input")
+ }
+
+ if found := t.stack[i]; expect != found.typ {
+ return syntaxError(t.json, "found unexpected character while tokenizing json input")
+ }
+
+ t.stack = t.stack[:i]
+ return nil
+}
+
+func (t *Tokenizer) is(typ scope) bool {
+ return len(t.stack) != 0 && t.stack[len(t.stack)-1].typ == typ
+}
+
+func (t *Tokenizer) depth() int {
+ return len(t.stack)
+}
+
+func (t *Tokenizer) index() int {
+ if len(t.stack) == 0 {
+ return 0
+ }
+ return t.stack[len(t.stack)-1].len - 1
+}
+
+// RawValue represents a raw json value, it is intended to carry null, true,
+// false, number, and string values only.
+type RawValue []byte
+
+// String returns true if v contains a string value.
+func (v RawValue) String() bool { return len(v) != 0 && v[0] == '"' }
+
+// Null returns true if v contains a null value.
+func (v RawValue) Null() bool { return len(v) != 0 && v[0] == 'n' }
+
+// True returns true if v contains a true value.
+func (v RawValue) True() bool { return len(v) != 0 && v[0] == 't' }
+
+// False returns true if v contains a false value.
+func (v RawValue) False() bool { return len(v) != 0 && v[0] == 'f' }
+
+// Number returns true if v contains a number value.
+func (v RawValue) Number() bool {
+ if len(v) != 0 {
+ switch v[0] {
+ case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ return true
+ }
+ }
+ return false
+}
+
+// AppendUnquote writes the unquoted version of the string value in v into b.
+func (v RawValue) AppendUnquote(b []byte) []byte {
+ s, r, new, err := parseStringUnquote([]byte(v), b)
+ if err != nil {
+ panic(err)
+ }
+ if len(r) != 0 {
+ panic(syntaxError(r, "unexpected trailing tokens after json value"))
+ }
+ if new {
+ b = s
+ } else {
+ b = append(b, s...)
+ }
+ return b
+}
+
+// Unquote returns the unquoted version of the string value in v.
+func (v RawValue) Unquote() []byte {
+ return v.AppendUnquote(nil)
+}