aboutsummaryrefslogtreecommitdiff
path: root/vendor/gopkg.in/yaml.v2
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/gopkg.in/yaml.v2')
-rw-r--r--vendor/gopkg.in/yaml.v2/.travis.yml16
-rw-r--r--vendor/gopkg.in/yaml.v2/LICENSE201
-rw-r--r--vendor/gopkg.in/yaml.v2/LICENSE.libyaml31
-rw-r--r--vendor/gopkg.in/yaml.v2/NOTICE13
-rw-r--r--vendor/gopkg.in/yaml.v2/README.md133
-rw-r--r--vendor/gopkg.in/yaml.v2/apic.go740
-rw-r--r--vendor/gopkg.in/yaml.v2/decode.go815
-rw-r--r--vendor/gopkg.in/yaml.v2/emitterc.go1685
-rw-r--r--vendor/gopkg.in/yaml.v2/encode.go390
-rw-r--r--vendor/gopkg.in/yaml.v2/parserc.go1095
-rw-r--r--vendor/gopkg.in/yaml.v2/readerc.go412
-rw-r--r--vendor/gopkg.in/yaml.v2/resolve.go258
-rw-r--r--vendor/gopkg.in/yaml.v2/scannerc.go2711
-rw-r--r--vendor/gopkg.in/yaml.v2/sorter.go113
-rw-r--r--vendor/gopkg.in/yaml.v2/writerc.go26
-rw-r--r--vendor/gopkg.in/yaml.v2/yaml.go466
-rw-r--r--vendor/gopkg.in/yaml.v2/yamlh.go739
-rw-r--r--vendor/gopkg.in/yaml.v2/yamlprivateh.go173
18 files changed, 10017 insertions, 0 deletions
diff --git a/vendor/gopkg.in/yaml.v2/.travis.yml b/vendor/gopkg.in/yaml.v2/.travis.yml
new file mode 100644
index 0000000..055480b
--- /dev/null
+++ b/vendor/gopkg.in/yaml.v2/.travis.yml
@@ -0,0 +1,16 @@
1language: go
2
3go:
4 - "1.4.x"
5 - "1.5.x"
6 - "1.6.x"
7 - "1.7.x"
8 - "1.8.x"
9 - "1.9.x"
10 - "1.10.x"
11 - "1.11.x"
12 - "1.12.x"
13 - "1.13.x"
14 - "tip"
15
16go_import_path: gopkg.in/yaml.v2
diff --git a/vendor/gopkg.in/yaml.v2/LICENSE b/vendor/gopkg.in/yaml.v2/LICENSE
new file mode 100644
index 0000000..8dada3e
--- /dev/null
+++ b/vendor/gopkg.in/yaml.v2/LICENSE
@@ -0,0 +1,201 @@
1 Apache License
2 Version 2.0, January 2004
3 http://www.apache.org/licenses/
4
5 TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6
7 1. Definitions.
8
9 "License" shall mean the terms and conditions for use, reproduction,
10 and distribution as defined by Sections 1 through 9 of this document.
11
12 "Licensor" shall mean the copyright owner or entity authorized by
13 the copyright owner that is granting the License.
14
15 "Legal Entity" shall mean the union of the acting entity and all
16 other entities that control, are controlled by, or are under common
17 control with that entity. For the purposes of this definition,
18 "control" means (i) the power, direct or indirect, to cause the
19 direction or management of such entity, whether by contract or
20 otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 outstanding shares, or (iii) beneficial ownership of such entity.
22
23 "You" (or "Your") shall mean an individual or Legal Entity
24 exercising permissions granted by this License.
25
26 "Source" form shall mean the preferred form for making modifications,
27 including but not limited to software source code, documentation
28 source, and configuration files.
29
30 "Object" form shall mean any form resulting from mechanical
31 transformation or translation of a Source form, including but
32 not limited to compiled object code, generated documentation,
33 and conversions to other media types.
34
35 "Work" shall mean the work of authorship, whether in Source or
36 Object form, made available under the License, as indicated by a
37 copyright notice that is included in or attached to the work
38 (an example is provided in the Appendix below).
39
40 "Derivative Works" shall mean any work, whether in Source or Object
41 form, that is based on (or derived from) the Work and for which the
42 editorial revisions, annotations, elaborations, or other modifications
43 represent, as a whole, an original work of authorship. For the purposes
44 of this License, Derivative Works shall not include works that remain
45 separable from, or merely link (or bind by name) to the interfaces of,
46 the Work and Derivative Works thereof.
47
48 "Contribution" shall mean any work of authorship, including
49 the original version of the Work and any modifications or additions
50 to that Work or Derivative Works thereof, that is intentionally
51 submitted to Licensor for inclusion in the Work by the copyright owner
52 or by an individual or Legal Entity authorized to submit on behalf of
53 the copyright owner. For the purposes of this definition, "submitted"
54 means any form of electronic, verbal, or written communication sent
55 to the Licensor or its representatives, including but not limited to
56 communication on electronic mailing lists, source code control systems,
57 and issue tracking systems that are managed by, or on behalf of, the
58 Licensor for the purpose of discussing and improving the Work, but
59 excluding communication that is conspicuously marked or otherwise
60 designated in writing by the copyright owner as "Not a Contribution."
61
62 "Contributor" shall mean Licensor and any individual or Legal Entity
63 on behalf of whom a Contribution has been received by Licensor and
64 subsequently incorporated within the Work.
65
66 2. Grant of Copyright License. Subject to the terms and conditions of
67 this License, each Contributor hereby grants to You a perpetual,
68 worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 copyright license to reproduce, prepare Derivative Works of,
70 publicly display, publicly perform, sublicense, and distribute the
71 Work and such Derivative Works in Source or Object form.
72
73 3. Grant of Patent License. Subject to the terms and conditions of
74 this License, each Contributor hereby grants to You a perpetual,
75 worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 (except as stated in this section) patent license to make, have made,
77 use, offer to sell, sell, import, and otherwise transfer the Work,
78 where such license applies only to those patent claims licensable
79 by such Contributor that are necessarily infringed by their
80 Contribution(s) alone or by combination of their Contribution(s)
81 with the Work to which such Contribution(s) was submitted. If You
82 institute patent litigation against any entity (including a
83 cross-claim or counterclaim in a lawsuit) alleging that the Work
84 or a Contribution incorporated within the Work constitutes direct
85 or contributory patent infringement, then any patent licenses
86 granted to You under this License for that Work shall terminate
87 as of the date such litigation is filed.
88
89 4. Redistribution. You may reproduce and distribute copies of the
90 Work or Derivative Works thereof in any medium, with or without
91 modifications, and in Source or Object form, provided that You
92 meet the following conditions:
93
94 (a) You must give any other recipients of the Work or
95 Derivative Works a copy of this License; and
96
97 (b) You must cause any modified files to carry prominent notices
98 stating that You changed the files; and
99
100 (c) You must retain, in the Source form of any Derivative Works
101 that You distribute, all copyright, patent, trademark, and
102 attribution notices from the Source form of the Work,
103 excluding those notices that do not pertain to any part of
104 the Derivative Works; and
105
106 (d) If the Work includes a "NOTICE" text file as part of its
107 distribution, then any Derivative Works that You distribute must
108 include a readable copy of the attribution notices contained
109 within such NOTICE file, excluding those notices that do not
110 pertain to any part of the Derivative Works, in at least one
111 of the following places: within a NOTICE text file distributed
112 as part of the Derivative Works; within the Source form or
113 documentation, if provided along with the Derivative Works; or,
114 within a display generated by the Derivative Works, if and
115 wherever such third-party notices normally appear. The contents
116 of the NOTICE file are for informational purposes only and
117 do not modify the License. You may add Your own attribution
118 notices within Derivative Works that You distribute, alongside
119 or as an addendum to the NOTICE text from the Work, provided
120 that such additional attribution notices cannot be construed
121 as modifying the License.
122
123 You may add Your own copyright statement to Your modifications and
124 may provide additional or different license terms and conditions
125 for use, reproduction, or distribution of Your modifications, or
126 for any such Derivative Works as a whole, provided Your use,
127 reproduction, and distribution of the Work otherwise complies with
128 the conditions stated in this License.
129
130 5. Submission of Contributions. Unless You explicitly state otherwise,
131 any Contribution intentionally submitted for inclusion in the Work
132 by You to the Licensor shall be under the terms and conditions of
133 this License, without any additional terms or conditions.
134 Notwithstanding the above, nothing herein shall supersede or modify
135 the terms of any separate license agreement you may have executed
136 with Licensor regarding such Contributions.
137
138 6. Trademarks. This License does not grant permission to use the trade
139 names, trademarks, service marks, or product names of the Licensor,
140 except as required for reasonable and customary use in describing the
141 origin of the Work and reproducing the content of the NOTICE file.
142
143 7. Disclaimer of Warranty. Unless required by applicable law or
144 agreed to in writing, Licensor provides the Work (and each
145 Contributor provides its Contributions) on an "AS IS" BASIS,
146 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 implied, including, without limitation, any warranties or conditions
148 of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 PARTICULAR PURPOSE. You are solely responsible for determining the
150 appropriateness of using or redistributing the Work and assume any
151 risks associated with Your exercise of permissions under this License.
152
153 8. Limitation of Liability. In no event and under no legal theory,
154 whether in tort (including negligence), contract, or otherwise,
155 unless required by applicable law (such as deliberate and grossly
156 negligent acts) or agreed to in writing, shall any Contributor be
157 liable to You for damages, including any direct, indirect, special,
158 incidental, or consequential damages of any character arising as a
159 result of this License or out of the use or inability to use the
160 Work (including but not limited to damages for loss of goodwill,
161 work stoppage, computer failure or malfunction, or any and all
162 other commercial damages or losses), even if such Contributor
163 has been advised of the possibility of such damages.
164
165 9. Accepting Warranty or Additional Liability. While redistributing
166 the Work or Derivative Works thereof, You may choose to offer,
167 and charge a fee for, acceptance of support, warranty, indemnity,
168 or other liability obligations and/or rights consistent with this
169 License. However, in accepting such obligations, You may act only
170 on Your own behalf and on Your sole responsibility, not on behalf
171 of any other Contributor, and only if You agree to indemnify,
172 defend, and hold each Contributor harmless for any liability
173 incurred by, or claims asserted against, such Contributor by reason
174 of your accepting any such warranty or additional liability.
175
176 END OF TERMS AND CONDITIONS
177
178 APPENDIX: How to apply the Apache License to your work.
179
180 To apply the Apache License to your work, attach the following
181 boilerplate notice, with the fields enclosed by brackets "{}"
182 replaced with your own identifying information. (Don't include
183 the brackets!) The text should be enclosed in the appropriate
184 comment syntax for the file format. We also recommend that a
185 file or class name and description of purpose be included on the
186 same "printed page" as the copyright notice for easier
187 identification within third-party archives.
188
189 Copyright {yyyy} {name of copyright owner}
190
191 Licensed under the Apache License, Version 2.0 (the "License");
192 you may not use this file except in compliance with the License.
193 You may obtain a copy of the License at
194
195 http://www.apache.org/licenses/LICENSE-2.0
196
197 Unless required by applicable law or agreed to in writing, software
198 distributed under the License is distributed on an "AS IS" BASIS,
199 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 See the License for the specific language governing permissions and
201 limitations under the License.
diff --git a/vendor/gopkg.in/yaml.v2/LICENSE.libyaml b/vendor/gopkg.in/yaml.v2/LICENSE.libyaml
new file mode 100644
index 0000000..8da58fb
--- /dev/null
+++ b/vendor/gopkg.in/yaml.v2/LICENSE.libyaml
@@ -0,0 +1,31 @@
1The following files were ported to Go from C files of libyaml, and thus
2are still covered by their original copyright and license:
3
4 apic.go
5 emitterc.go
6 parserc.go
7 readerc.go
8 scannerc.go
9 writerc.go
10 yamlh.go
11 yamlprivateh.go
12
13Copyright (c) 2006 Kirill Simonov
14
15Permission is hereby granted, free of charge, to any person obtaining a copy of
16this software and associated documentation files (the "Software"), to deal in
17the Software without restriction, including without limitation the rights to
18use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
19of the Software, and to permit persons to whom the Software is furnished to do
20so, subject to the following conditions:
21
22The above copyright notice and this permission notice shall be included in all
23copies or substantial portions of the Software.
24
25THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
26IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
27FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
28AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
29LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
30OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
31SOFTWARE.
diff --git a/vendor/gopkg.in/yaml.v2/NOTICE b/vendor/gopkg.in/yaml.v2/NOTICE
new file mode 100644
index 0000000..866d74a
--- /dev/null
+++ b/vendor/gopkg.in/yaml.v2/NOTICE
@@ -0,0 +1,13 @@
1Copyright 2011-2016 Canonical Ltd.
2
3Licensed under the Apache License, Version 2.0 (the "License");
4you may not use this file except in compliance with the License.
5You may obtain a copy of the License at
6
7 http://www.apache.org/licenses/LICENSE-2.0
8
9Unless required by applicable law or agreed to in writing, software
10distributed under the License is distributed on an "AS IS" BASIS,
11WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12See the License for the specific language governing permissions and
13limitations under the License.
diff --git a/vendor/gopkg.in/yaml.v2/README.md b/vendor/gopkg.in/yaml.v2/README.md
new file mode 100644
index 0000000..b50c6e8
--- /dev/null
+++ b/vendor/gopkg.in/yaml.v2/README.md
@@ -0,0 +1,133 @@
1# YAML support for the Go language
2
3Introduction
4------------
5
6The yaml package enables Go programs to comfortably encode and decode YAML
7values. It was developed within [Canonical](https://www.canonical.com) as
8part of the [juju](https://juju.ubuntu.com) project, and is based on a
9pure Go port of the well-known [libyaml](http://pyyaml.org/wiki/LibYAML)
10C library to parse and generate YAML data quickly and reliably.
11
12Compatibility
13-------------
14
15The yaml package supports most of YAML 1.1 and 1.2, including support for
16anchors, tags, map merging, etc. Multi-document unmarshalling is not yet
17implemented, and base-60 floats from YAML 1.1 are purposefully not
18supported since they're a poor design and are gone in YAML 1.2.
19
20Installation and usage
21----------------------
22
23The import path for the package is *gopkg.in/yaml.v2*.
24
25To install it, run:
26
27 go get gopkg.in/yaml.v2
28
29API documentation
30-----------------
31
32If opened in a browser, the import path itself leads to the API documentation:
33
34 * [https://gopkg.in/yaml.v2](https://gopkg.in/yaml.v2)
35
36API stability
37-------------
38
39The package API for yaml v2 will remain stable as described in [gopkg.in](https://gopkg.in).
40
41
42License
43-------
44
45The yaml package is licensed under the Apache License 2.0. Please see the LICENSE file for details.
46
47
48Example
49-------
50
51```Go
52package main
53
54import (
55 "fmt"
56 "log"
57
58 "gopkg.in/yaml.v2"
59)
60
61var data = `
62a: Easy!
63b:
64 c: 2
65 d: [3, 4]
66`
67
68// Note: struct fields must be public in order for unmarshal to
69// correctly populate the data.
70type T struct {
71 A string
72 B struct {
73 RenamedC int `yaml:"c"`
74 D []int `yaml:",flow"`
75 }
76}
77
78func main() {
79 t := T{}
80
81 err := yaml.Unmarshal([]byte(data), &t)
82 if err != nil {
83 log.Fatalf("error: %v", err)
84 }
85 fmt.Printf("--- t:\n%v\n\n", t)
86
87 d, err := yaml.Marshal(&t)
88 if err != nil {
89 log.Fatalf("error: %v", err)
90 }
91 fmt.Printf("--- t dump:\n%s\n\n", string(d))
92
93 m := make(map[interface{}]interface{})
94
95 err = yaml.Unmarshal([]byte(data), &m)
96 if err != nil {
97 log.Fatalf("error: %v", err)
98 }
99 fmt.Printf("--- m:\n%v\n\n", m)
100
101 d, err = yaml.Marshal(&m)
102 if err != nil {
103 log.Fatalf("error: %v", err)
104 }
105 fmt.Printf("--- m dump:\n%s\n\n", string(d))
106}
107```
108
109This example will generate the following output:
110
111```
112--- t:
113{Easy! {2 [3 4]}}
114
115--- t dump:
116a: Easy!
117b:
118 c: 2
119 d: [3, 4]
120
121
122--- m:
123map[a:Easy! b:map[c:2 d:[3 4]]]
124
125--- m dump:
126a: Easy!
127b:
128 c: 2
129 d:
130 - 3
131 - 4
132```
133
diff --git a/vendor/gopkg.in/yaml.v2/apic.go b/vendor/gopkg.in/yaml.v2/apic.go
new file mode 100644
index 0000000..d2c2308
--- /dev/null
+++ b/vendor/gopkg.in/yaml.v2/apic.go
@@ -0,0 +1,740 @@
1package yaml
2
3import (
4 "io"
5)
6
7func yaml_insert_token(parser *yaml_parser_t, pos int, token *yaml_token_t) {
8 //fmt.Println("yaml_insert_token", "pos:", pos, "typ:", token.typ, "head:", parser.tokens_head, "len:", len(parser.tokens))
9
10 // Check if we can move the queue at the beginning of the buffer.
11 if parser.tokens_head > 0 && len(parser.tokens) == cap(parser.tokens) {
12 if parser.tokens_head != len(parser.tokens) {
13 copy(parser.tokens, parser.tokens[parser.tokens_head:])
14 }
15 parser.tokens = parser.tokens[:len(parser.tokens)-parser.tokens_head]
16 parser.tokens_head = 0
17 }
18 parser.tokens = append(parser.tokens, *token)
19 if pos < 0 {
20 return
21 }
22 copy(parser.tokens[parser.tokens_head+pos+1:], parser.tokens[parser.tokens_head+pos:])
23 parser.tokens[parser.tokens_head+pos] = *token
24}
25
26// Create a new parser object.
27func yaml_parser_initialize(parser *yaml_parser_t) bool {
28 *parser = yaml_parser_t{
29 raw_buffer: make([]byte, 0, input_raw_buffer_size),
30 buffer: make([]byte, 0, input_buffer_size),
31 }
32 return true
33}
34
35// Destroy a parser object.
36func yaml_parser_delete(parser *yaml_parser_t) {
37 *parser = yaml_parser_t{}
38}
39
40// String read handler.
41func yaml_string_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) {
42 if parser.input_pos == len(parser.input) {
43 return 0, io.EOF
44 }
45 n = copy(buffer, parser.input[parser.input_pos:])
46 parser.input_pos += n
47 return n, nil
48}
49
50// Reader read handler.
51func yaml_reader_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) {
52 return parser.input_reader.Read(buffer)
53}
54
55// Set a string input.
56func yaml_parser_set_input_string(parser *yaml_parser_t, input []byte) {
57 if parser.read_handler != nil {
58 panic("must set the input source only once")
59 }
60 parser.read_handler = yaml_string_read_handler
61 parser.input = input
62 parser.input_pos = 0
63}
64
65// Set a file input.
66func yaml_parser_set_input_reader(parser *yaml_parser_t, r io.Reader) {
67 if parser.read_handler != nil {
68 panic("must set the input source only once")
69 }
70 parser.read_handler = yaml_reader_read_handler
71 parser.input_reader = r
72}
73
74// Set the source encoding.
75func yaml_parser_set_encoding(parser *yaml_parser_t, encoding yaml_encoding_t) {
76 if parser.encoding != yaml_ANY_ENCODING {
77 panic("must set the encoding only once")
78 }
79 parser.encoding = encoding
80}
81
82// Create a new emitter object.
83func yaml_emitter_initialize(emitter *yaml_emitter_t) {
84 *emitter = yaml_emitter_t{
85 buffer: make([]byte, output_buffer_size),
86 raw_buffer: make([]byte, 0, output_raw_buffer_size),
87 states: make([]yaml_emitter_state_t, 0, initial_stack_size),
88 events: make([]yaml_event_t, 0, initial_queue_size),
89 best_width: -1,
90 }
91}
92
93// Destroy an emitter object.
94func yaml_emitter_delete(emitter *yaml_emitter_t) {
95 *emitter = yaml_emitter_t{}
96}
97
98// String write handler.
99func yaml_string_write_handler(emitter *yaml_emitter_t, buffer []byte) error {
100 *emitter.output_buffer = append(*emitter.output_buffer, buffer...)
101 return nil
102}
103
104// yaml_writer_write_handler uses emitter.output_writer to write the
105// emitted text.
106func yaml_writer_write_handler(emitter *yaml_emitter_t, buffer []byte) error {
107 _, err := emitter.output_writer.Write(buffer)
108 return err
109}
110
111// Set a string output.
112func yaml_emitter_set_output_string(emitter *yaml_emitter_t, output_buffer *[]byte) {
113 if emitter.write_handler != nil {
114 panic("must set the output target only once")
115 }
116 emitter.write_handler = yaml_string_write_handler
117 emitter.output_buffer = output_buffer
118}
119
120// Set a file output.
121func yaml_emitter_set_output_writer(emitter *yaml_emitter_t, w io.Writer) {
122 if emitter.write_handler != nil {
123 panic("must set the output target only once")
124 }
125 emitter.write_handler = yaml_writer_write_handler
126 emitter.output_writer = w
127}
128
129// Set the output encoding.
130func yaml_emitter_set_encoding(emitter *yaml_emitter_t, encoding yaml_encoding_t) {
131 if emitter.encoding != yaml_ANY_ENCODING {
132 panic("must set the output encoding only once")
133 }
134 emitter.encoding = encoding
135}
136
137// Set the canonical output style.
138func yaml_emitter_set_canonical(emitter *yaml_emitter_t, canonical bool) {
139 emitter.canonical = canonical
140}
141
142//// Set the indentation increment.
143func yaml_emitter_set_indent(emitter *yaml_emitter_t, indent int) {
144 if indent < 2 || indent > 9 {
145 indent = 2
146 }
147 emitter.best_indent = indent
148}
149
150// Set the preferred line width.
151func yaml_emitter_set_width(emitter *yaml_emitter_t, width int) {
152 if width < 0 {
153 width = -1
154 }
155 emitter.best_width = width
156}
157
158// Set if unescaped non-ASCII characters are allowed.
159func yaml_emitter_set_unicode(emitter *yaml_emitter_t, unicode bool) {
160 emitter.unicode = unicode
161}
162
163// Set the preferred line break character.
164func yaml_emitter_set_break(emitter *yaml_emitter_t, line_break yaml_break_t) {
165 emitter.line_break = line_break
166}
167
168///*
169// * Destroy a token object.
170// */
171//
172//YAML_DECLARE(void)
173//yaml_token_delete(yaml_token_t *token)
174//{
175// assert(token); // Non-NULL token object expected.
176//
177// switch (token.type)
178// {
179// case YAML_TAG_DIRECTIVE_TOKEN:
180// yaml_free(token.data.tag_directive.handle);
181// yaml_free(token.data.tag_directive.prefix);
182// break;
183//
184// case YAML_ALIAS_TOKEN:
185// yaml_free(token.data.alias.value);
186// break;
187//
188// case YAML_ANCHOR_TOKEN:
189// yaml_free(token.data.anchor.value);
190// break;
191//
192// case YAML_TAG_TOKEN:
193// yaml_free(token.data.tag.handle);
194// yaml_free(token.data.tag.suffix);
195// break;
196//
197// case YAML_SCALAR_TOKEN:
198// yaml_free(token.data.scalar.value);
199// break;
200//
201// default:
202// break;
203// }
204//
205// memset(token, 0, sizeof(yaml_token_t));
206//}
207//
208///*
209// * Check if a string is a valid UTF-8 sequence.
210// *
211// * Check 'reader.c' for more details on UTF-8 encoding.
212// */
213//
214//static int
215//yaml_check_utf8(yaml_char_t *start, size_t length)
216//{
217// yaml_char_t *end = start+length;
218// yaml_char_t *pointer = start;
219//
220// while (pointer < end) {
221// unsigned char octet;
222// unsigned int width;
223// unsigned int value;
224// size_t k;
225//
226// octet = pointer[0];
227// width = (octet & 0x80) == 0x00 ? 1 :
228// (octet & 0xE0) == 0xC0 ? 2 :
229// (octet & 0xF0) == 0xE0 ? 3 :
230// (octet & 0xF8) == 0xF0 ? 4 : 0;
231// value = (octet & 0x80) == 0x00 ? octet & 0x7F :
232// (octet & 0xE0) == 0xC0 ? octet & 0x1F :
233// (octet & 0xF0) == 0xE0 ? octet & 0x0F :
234// (octet & 0xF8) == 0xF0 ? octet & 0x07 : 0;
235// if (!width) return 0;
236// if (pointer+width > end) return 0;
237// for (k = 1; k < width; k ++) {
238// octet = pointer[k];
239// if ((octet & 0xC0) != 0x80) return 0;
240// value = (value << 6) + (octet & 0x3F);
241// }
242// if (!((width == 1) ||
243// (width == 2 && value >= 0x80) ||
244// (width == 3 && value >= 0x800) ||
245// (width == 4 && value >= 0x10000))) return 0;
246//
247// pointer += width;
248// }
249//
250// return 1;
251//}
252//
253
254// Create STREAM-START.
255func yaml_stream_start_event_initialize(event *yaml_event_t, encoding yaml_encoding_t) {
256 *event = yaml_event_t{
257 typ: yaml_STREAM_START_EVENT,
258 encoding: encoding,
259 }
260}
261
262// Create STREAM-END.
263func yaml_stream_end_event_initialize(event *yaml_event_t) {
264 *event = yaml_event_t{
265 typ: yaml_STREAM_END_EVENT,
266 }
267}
268
269// Create DOCUMENT-START.
270func yaml_document_start_event_initialize(
271 event *yaml_event_t,
272 version_directive *yaml_version_directive_t,
273 tag_directives []yaml_tag_directive_t,
274 implicit bool,
275) {
276 *event = yaml_event_t{
277 typ: yaml_DOCUMENT_START_EVENT,
278 version_directive: version_directive,
279 tag_directives: tag_directives,
280 implicit: implicit,
281 }
282}
283
284// Create DOCUMENT-END.
285func yaml_document_end_event_initialize(event *yaml_event_t, implicit bool) {
286 *event = yaml_event_t{
287 typ: yaml_DOCUMENT_END_EVENT,
288 implicit: implicit,
289 }
290}
291
292///*
293// * Create ALIAS.
294// */
295//
296//YAML_DECLARE(int)
297//yaml_alias_event_initialize(event *yaml_event_t, anchor *yaml_char_t)
298//{
299// mark yaml_mark_t = { 0, 0, 0 }
300// anchor_copy *yaml_char_t = NULL
301//
302// assert(event) // Non-NULL event object is expected.
303// assert(anchor) // Non-NULL anchor is expected.
304//
305// if (!yaml_check_utf8(anchor, strlen((char *)anchor))) return 0
306//
307// anchor_copy = yaml_strdup(anchor)
308// if (!anchor_copy)
309// return 0
310//
311// ALIAS_EVENT_INIT(*event, anchor_copy, mark, mark)
312//
313// return 1
314//}
315
316// Create SCALAR.
317func yaml_scalar_event_initialize(event *yaml_event_t, anchor, tag, value []byte, plain_implicit, quoted_implicit bool, style yaml_scalar_style_t) bool {
318 *event = yaml_event_t{
319 typ: yaml_SCALAR_EVENT,
320 anchor: anchor,
321 tag: tag,
322 value: value,
323 implicit: plain_implicit,
324 quoted_implicit: quoted_implicit,
325 style: yaml_style_t(style),
326 }
327 return true
328}
329
330// Create SEQUENCE-START.
331func yaml_sequence_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_sequence_style_t) bool {
332 *event = yaml_event_t{
333 typ: yaml_SEQUENCE_START_EVENT,
334 anchor: anchor,
335 tag: tag,
336 implicit: implicit,
337 style: yaml_style_t(style),
338 }
339 return true
340}
341
342// Create SEQUENCE-END.
343func yaml_sequence_end_event_initialize(event *yaml_event_t) bool {
344 *event = yaml_event_t{
345 typ: yaml_SEQUENCE_END_EVENT,
346 }
347 return true
348}
349
350// Create MAPPING-START.
351func yaml_mapping_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_mapping_style_t) {
352 *event = yaml_event_t{
353 typ: yaml_MAPPING_START_EVENT,
354 anchor: anchor,
355 tag: tag,
356 implicit: implicit,
357 style: yaml_style_t(style),
358 }
359}
360
361// Create MAPPING-END.
362func yaml_mapping_end_event_initialize(event *yaml_event_t) {
363 *event = yaml_event_t{
364 typ: yaml_MAPPING_END_EVENT,
365 }
366}
367
368// Destroy an event object.
369func yaml_event_delete(event *yaml_event_t) {
370 *event = yaml_event_t{}
371}
372
373///*
374// * Create a document object.
375// */
376//
377//YAML_DECLARE(int)
378//yaml_document_initialize(document *yaml_document_t,
379// version_directive *yaml_version_directive_t,
380// tag_directives_start *yaml_tag_directive_t,
381// tag_directives_end *yaml_tag_directive_t,
382// start_implicit int, end_implicit int)
383//{
384// struct {
385// error yaml_error_type_t
386// } context
387// struct {
388// start *yaml_node_t
389// end *yaml_node_t
390// top *yaml_node_t
391// } nodes = { NULL, NULL, NULL }
392// version_directive_copy *yaml_version_directive_t = NULL
393// struct {
394// start *yaml_tag_directive_t
395// end *yaml_tag_directive_t
396// top *yaml_tag_directive_t
397// } tag_directives_copy = { NULL, NULL, NULL }
398// value yaml_tag_directive_t = { NULL, NULL }
399// mark yaml_mark_t = { 0, 0, 0 }
400//
401// assert(document) // Non-NULL document object is expected.
402// assert((tag_directives_start && tag_directives_end) ||
403// (tag_directives_start == tag_directives_end))
404// // Valid tag directives are expected.
405//
406// if (!STACK_INIT(&context, nodes, INITIAL_STACK_SIZE)) goto error
407//
408// if (version_directive) {
409// version_directive_copy = yaml_malloc(sizeof(yaml_version_directive_t))
410// if (!version_directive_copy) goto error
411// version_directive_copy.major = version_directive.major
412// version_directive_copy.minor = version_directive.minor
413// }
414//
415// if (tag_directives_start != tag_directives_end) {
416// tag_directive *yaml_tag_directive_t
417// if (!STACK_INIT(&context, tag_directives_copy, INITIAL_STACK_SIZE))
418// goto error
419// for (tag_directive = tag_directives_start
420// tag_directive != tag_directives_end; tag_directive ++) {
421// assert(tag_directive.handle)
422// assert(tag_directive.prefix)
423// if (!yaml_check_utf8(tag_directive.handle,
424// strlen((char *)tag_directive.handle)))
425// goto error
426// if (!yaml_check_utf8(tag_directive.prefix,
427// strlen((char *)tag_directive.prefix)))
428// goto error
429// value.handle = yaml_strdup(tag_directive.handle)
430// value.prefix = yaml_strdup(tag_directive.prefix)
431// if (!value.handle || !value.prefix) goto error
432// if (!PUSH(&context, tag_directives_copy, value))
433// goto error
434// value.handle = NULL
435// value.prefix = NULL
436// }
437// }
438//
439// DOCUMENT_INIT(*document, nodes.start, nodes.end, version_directive_copy,
440// tag_directives_copy.start, tag_directives_copy.top,
441// start_implicit, end_implicit, mark, mark)
442//
443// return 1
444//
445//error:
446// STACK_DEL(&context, nodes)
447// yaml_free(version_directive_copy)
448// while (!STACK_EMPTY(&context, tag_directives_copy)) {
449// value yaml_tag_directive_t = POP(&context, tag_directives_copy)
450// yaml_free(value.handle)
451// yaml_free(value.prefix)
452// }
453// STACK_DEL(&context, tag_directives_copy)
454// yaml_free(value.handle)
455// yaml_free(value.prefix)
456//
457// return 0
458//}
459//
460///*
461// * Destroy a document object.
462// */
463//
464//YAML_DECLARE(void)
465//yaml_document_delete(document *yaml_document_t)
466//{
467// struct {
468// error yaml_error_type_t
469// } context
470// tag_directive *yaml_tag_directive_t
471//
472// context.error = YAML_NO_ERROR // Eliminate a compiler warning.
473//
474// assert(document) // Non-NULL document object is expected.
475//
476// while (!STACK_EMPTY(&context, document.nodes)) {
477// node yaml_node_t = POP(&context, document.nodes)
478// yaml_free(node.tag)
479// switch (node.type) {
480// case YAML_SCALAR_NODE:
481// yaml_free(node.data.scalar.value)
482// break
483// case YAML_SEQUENCE_NODE:
484// STACK_DEL(&context, node.data.sequence.items)
485// break
486// case YAML_MAPPING_NODE:
487// STACK_DEL(&context, node.data.mapping.pairs)
488// break
489// default:
490// assert(0) // Should not happen.
491// }
492// }
493// STACK_DEL(&context, document.nodes)
494//
495// yaml_free(document.version_directive)
496// for (tag_directive = document.tag_directives.start
497// tag_directive != document.tag_directives.end
498// tag_directive++) {
499// yaml_free(tag_directive.handle)
500// yaml_free(tag_directive.prefix)
501// }
502// yaml_free(document.tag_directives.start)
503//
504// memset(document, 0, sizeof(yaml_document_t))
505//}
506//
507///**
508// * Get a document node.
509// */
510//
511//YAML_DECLARE(yaml_node_t *)
512//yaml_document_get_node(document *yaml_document_t, index int)
513//{
514// assert(document) // Non-NULL document object is expected.
515//
516// if (index > 0 && document.nodes.start + index <= document.nodes.top) {
517// return document.nodes.start + index - 1
518// }
519// return NULL
520//}
521//
522///**
523// * Get the root object.
524// */
525//
526//YAML_DECLARE(yaml_node_t *)
527//yaml_document_get_root_node(document *yaml_document_t)
528//{
529// assert(document) // Non-NULL document object is expected.
530//
531// if (document.nodes.top != document.nodes.start) {
532// return document.nodes.start
533// }
534// return NULL
535//}
536//
537///*
538// * Add a scalar node to a document.
539// */
540//
541//YAML_DECLARE(int)
542//yaml_document_add_scalar(document *yaml_document_t,
543// tag *yaml_char_t, value *yaml_char_t, length int,
544// style yaml_scalar_style_t)
545//{
546// struct {
547// error yaml_error_type_t
548// } context
549// mark yaml_mark_t = { 0, 0, 0 }
550// tag_copy *yaml_char_t = NULL
551// value_copy *yaml_char_t = NULL
552// node yaml_node_t
553//
554// assert(document) // Non-NULL document object is expected.
555// assert(value) // Non-NULL value is expected.
556//
557// if (!tag) {
558// tag = (yaml_char_t *)YAML_DEFAULT_SCALAR_TAG
559// }
560//
561// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error
562// tag_copy = yaml_strdup(tag)
563// if (!tag_copy) goto error
564//
565// if (length < 0) {
566// length = strlen((char *)value)
567// }
568//
569// if (!yaml_check_utf8(value, length)) goto error
570// value_copy = yaml_malloc(length+1)
571// if (!value_copy) goto error
572// memcpy(value_copy, value, length)
573// value_copy[length] = '\0'
574//
575// SCALAR_NODE_INIT(node, tag_copy, value_copy, length, style, mark, mark)
576// if (!PUSH(&context, document.nodes, node)) goto error
577//
578// return document.nodes.top - document.nodes.start
579//
580//error:
581// yaml_free(tag_copy)
582// yaml_free(value_copy)
583//
584// return 0
585//}
586//
587///*
588// * Add a sequence node to a document.
589// */
590//
591//YAML_DECLARE(int)
592//yaml_document_add_sequence(document *yaml_document_t,
593// tag *yaml_char_t, style yaml_sequence_style_t)
594//{
595// struct {
596// error yaml_error_type_t
597// } context
598// mark yaml_mark_t = { 0, 0, 0 }
599// tag_copy *yaml_char_t = NULL
600// struct {
601// start *yaml_node_item_t
602// end *yaml_node_item_t
603// top *yaml_node_item_t
604// } items = { NULL, NULL, NULL }
605// node yaml_node_t
606//
607// assert(document) // Non-NULL document object is expected.
608//
609// if (!tag) {
610// tag = (yaml_char_t *)YAML_DEFAULT_SEQUENCE_TAG
611// }
612//
613// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error
614// tag_copy = yaml_strdup(tag)
615// if (!tag_copy) goto error
616//
617// if (!STACK_INIT(&context, items, INITIAL_STACK_SIZE)) goto error
618//
619// SEQUENCE_NODE_INIT(node, tag_copy, items.start, items.end,
620// style, mark, mark)
621// if (!PUSH(&context, document.nodes, node)) goto error
622//
623// return document.nodes.top - document.nodes.start
624//
625//error:
626// STACK_DEL(&context, items)
627// yaml_free(tag_copy)
628//
629// return 0
630//}
631//
632///*
633// * Add a mapping node to a document.
634// */
635//
636//YAML_DECLARE(int)
637//yaml_document_add_mapping(document *yaml_document_t,
638// tag *yaml_char_t, style yaml_mapping_style_t)
639//{
640// struct {
641// error yaml_error_type_t
642// } context
643// mark yaml_mark_t = { 0, 0, 0 }
644// tag_copy *yaml_char_t = NULL
645// struct {
646// start *yaml_node_pair_t
647// end *yaml_node_pair_t
648// top *yaml_node_pair_t
649// } pairs = { NULL, NULL, NULL }
650// node yaml_node_t
651//
652// assert(document) // Non-NULL document object is expected.
653//
654// if (!tag) {
655// tag = (yaml_char_t *)YAML_DEFAULT_MAPPING_TAG
656// }
657//
658// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error
659// tag_copy = yaml_strdup(tag)
660// if (!tag_copy) goto error
661//
662// if (!STACK_INIT(&context, pairs, INITIAL_STACK_SIZE)) goto error
663//
664// MAPPING_NODE_INIT(node, tag_copy, pairs.start, pairs.end,
665// style, mark, mark)
666// if (!PUSH(&context, document.nodes, node)) goto error
667//
668// return document.nodes.top - document.nodes.start
669//
670//error:
671// STACK_DEL(&context, pairs)
672// yaml_free(tag_copy)
673//
674// return 0
675//}
676//
677///*
678// * Append an item to a sequence node.
679// */
680//
681//YAML_DECLARE(int)
682//yaml_document_append_sequence_item(document *yaml_document_t,
683// sequence int, item int)
684//{
685// struct {
686// error yaml_error_type_t
687// } context
688//
689// assert(document) // Non-NULL document is required.
690// assert(sequence > 0
691// && document.nodes.start + sequence <= document.nodes.top)
692// // Valid sequence id is required.
693// assert(document.nodes.start[sequence-1].type == YAML_SEQUENCE_NODE)
694// // A sequence node is required.
695// assert(item > 0 && document.nodes.start + item <= document.nodes.top)
696// // Valid item id is required.
697//
698// if (!PUSH(&context,
699// document.nodes.start[sequence-1].data.sequence.items, item))
700// return 0
701//
702// return 1
703//}
704//
705///*
706// * Append a pair of a key and a value to a mapping node.
707// */
708//
709//YAML_DECLARE(int)
710//yaml_document_append_mapping_pair(document *yaml_document_t,
711// mapping int, key int, value int)
712//{
713// struct {
714// error yaml_error_type_t
715// } context
716//
717// pair yaml_node_pair_t
718//
719// assert(document) // Non-NULL document is required.
720// assert(mapping > 0
721// && document.nodes.start + mapping <= document.nodes.top)
722// // Valid mapping id is required.
723// assert(document.nodes.start[mapping-1].type == YAML_MAPPING_NODE)
724// // A mapping node is required.
725// assert(key > 0 && document.nodes.start + key <= document.nodes.top)
726// // Valid key id is required.
727// assert(value > 0 && document.nodes.start + value <= document.nodes.top)
728// // Valid value id is required.
729//
730// pair.key = key
731// pair.value = value
732//
733// if (!PUSH(&context,
734// document.nodes.start[mapping-1].data.mapping.pairs, pair))
735// return 0
736//
737// return 1
738//}
739//
740//
diff --git a/vendor/gopkg.in/yaml.v2/decode.go b/vendor/gopkg.in/yaml.v2/decode.go
new file mode 100644
index 0000000..129bc2a
--- /dev/null
+++ b/vendor/gopkg.in/yaml.v2/decode.go
@@ -0,0 +1,815 @@
1package yaml
2
3import (
4 "encoding"
5 "encoding/base64"
6 "fmt"
7 "io"
8 "math"
9 "reflect"
10 "strconv"
11 "time"
12)
13
14const (
15 documentNode = 1 << iota
16 mappingNode
17 sequenceNode
18 scalarNode
19 aliasNode
20)
21
22type node struct {
23 kind int
24 line, column int
25 tag string
26 // For an alias node, alias holds the resolved alias.
27 alias *node
28 value string
29 implicit bool
30 children []*node
31 anchors map[string]*node
32}
33
34// ----------------------------------------------------------------------------
35// Parser, produces a node tree out of a libyaml event stream.
36
37type parser struct {
38 parser yaml_parser_t
39 event yaml_event_t
40 doc *node
41 doneInit bool
42}
43
44func newParser(b []byte) *parser {
45 p := parser{}
46 if !yaml_parser_initialize(&p.parser) {
47 panic("failed to initialize YAML emitter")
48 }
49 if len(b) == 0 {
50 b = []byte{'\n'}
51 }
52 yaml_parser_set_input_string(&p.parser, b)
53 return &p
54}
55
56func newParserFromReader(r io.Reader) *parser {
57 p := parser{}
58 if !yaml_parser_initialize(&p.parser) {
59 panic("failed to initialize YAML emitter")
60 }
61 yaml_parser_set_input_reader(&p.parser, r)
62 return &p
63}
64
65func (p *parser) init() {
66 if p.doneInit {
67 return
68 }
69 p.expect(yaml_STREAM_START_EVENT)
70 p.doneInit = true
71}
72
73func (p *parser) destroy() {
74 if p.event.typ != yaml_NO_EVENT {
75 yaml_event_delete(&p.event)
76 }
77 yaml_parser_delete(&p.parser)
78}
79
80// expect consumes an event from the event stream and
81// checks that it's of the expected type.
82func (p *parser) expect(e yaml_event_type_t) {
83 if p.event.typ == yaml_NO_EVENT {
84 if !yaml_parser_parse(&p.parser, &p.event) {
85 p.fail()
86 }
87 }
88 if p.event.typ == yaml_STREAM_END_EVENT {
89 failf("attempted to go past the end of stream; corrupted value?")
90 }
91 if p.event.typ != e {
92 p.parser.problem = fmt.Sprintf("expected %s event but got %s", e, p.event.typ)
93 p.fail()
94 }
95 yaml_event_delete(&p.event)
96 p.event.typ = yaml_NO_EVENT
97}
98
99// peek peeks at the next event in the event stream,
100// puts the results into p.event and returns the event type.
101func (p *parser) peek() yaml_event_type_t {
102 if p.event.typ != yaml_NO_EVENT {
103 return p.event.typ
104 }
105 if !yaml_parser_parse(&p.parser, &p.event) {
106 p.fail()
107 }
108 return p.event.typ
109}
110
111func (p *parser) fail() {
112 var where string
113 var line int
114 if p.parser.problem_mark.line != 0 {
115 line = p.parser.problem_mark.line
116 // Scanner errors don't iterate line before returning error
117 if p.parser.error == yaml_SCANNER_ERROR {
118 line++
119 }
120 } else if p.parser.context_mark.line != 0 {
121 line = p.parser.context_mark.line
122 }
123 if line != 0 {
124 where = "line " + strconv.Itoa(line) + ": "
125 }
126 var msg string
127 if len(p.parser.problem) > 0 {
128 msg = p.parser.problem
129 } else {
130 msg = "unknown problem parsing YAML content"
131 }
132 failf("%s%s", where, msg)
133}
134
135func (p *parser) anchor(n *node, anchor []byte) {
136 if anchor != nil {
137 p.doc.anchors[string(anchor)] = n
138 }
139}
140
141func (p *parser) parse() *node {
142 p.init()
143 switch p.peek() {
144 case yaml_SCALAR_EVENT:
145 return p.scalar()
146 case yaml_ALIAS_EVENT:
147 return p.alias()
148 case yaml_MAPPING_START_EVENT:
149 return p.mapping()
150 case yaml_SEQUENCE_START_EVENT:
151 return p.sequence()
152 case yaml_DOCUMENT_START_EVENT:
153 return p.document()
154 case yaml_STREAM_END_EVENT:
155 // Happens when attempting to decode an empty buffer.
156 return nil
157 default:
158 panic("attempted to parse unknown event: " + p.event.typ.String())
159 }
160}
161
162func (p *parser) node(kind int) *node {
163 return &node{
164 kind: kind,
165 line: p.event.start_mark.line,
166 column: p.event.start_mark.column,
167 }
168}
169
170func (p *parser) document() *node {
171 n := p.node(documentNode)
172 n.anchors = make(map[string]*node)
173 p.doc = n
174 p.expect(yaml_DOCUMENT_START_EVENT)
175 n.children = append(n.children, p.parse())
176 p.expect(yaml_DOCUMENT_END_EVENT)
177 return n
178}
179
180func (p *parser) alias() *node {
181 n := p.node(aliasNode)
182 n.value = string(p.event.anchor)
183 n.alias = p.doc.anchors[n.value]
184 if n.alias == nil {
185 failf("unknown anchor '%s' referenced", n.value)
186 }
187 p.expect(yaml_ALIAS_EVENT)
188 return n
189}
190
191func (p *parser) scalar() *node {
192 n := p.node(scalarNode)
193 n.value = string(p.event.value)
194 n.tag = string(p.event.tag)
195 n.implicit = p.event.implicit
196 p.anchor(n, p.event.anchor)
197 p.expect(yaml_SCALAR_EVENT)
198 return n
199}
200
201func (p *parser) sequence() *node {
202 n := p.node(sequenceNode)
203 p.anchor(n, p.event.anchor)
204 p.expect(yaml_SEQUENCE_START_EVENT)
205 for p.peek() != yaml_SEQUENCE_END_EVENT {
206 n.children = append(n.children, p.parse())
207 }
208 p.expect(yaml_SEQUENCE_END_EVENT)
209 return n
210}
211
212func (p *parser) mapping() *node {
213 n := p.node(mappingNode)
214 p.anchor(n, p.event.anchor)
215 p.expect(yaml_MAPPING_START_EVENT)
216 for p.peek() != yaml_MAPPING_END_EVENT {
217 n.children = append(n.children, p.parse(), p.parse())
218 }
219 p.expect(yaml_MAPPING_END_EVENT)
220 return n
221}
222
223// ----------------------------------------------------------------------------
224// Decoder, unmarshals a node into a provided value.
225
226type decoder struct {
227 doc *node
228 aliases map[*node]bool
229 mapType reflect.Type
230 terrors []string
231 strict bool
232
233 decodeCount int
234 aliasCount int
235 aliasDepth int
236}
237
238var (
239 mapItemType = reflect.TypeOf(MapItem{})
240 durationType = reflect.TypeOf(time.Duration(0))
241 defaultMapType = reflect.TypeOf(map[interface{}]interface{}{})
242 ifaceType = defaultMapType.Elem()
243 timeType = reflect.TypeOf(time.Time{})
244 ptrTimeType = reflect.TypeOf(&time.Time{})
245)
246
247func newDecoder(strict bool) *decoder {
248 d := &decoder{mapType: defaultMapType, strict: strict}
249 d.aliases = make(map[*node]bool)
250 return d
251}
252
253func (d *decoder) terror(n *node, tag string, out reflect.Value) {
254 if n.tag != "" {
255 tag = n.tag
256 }
257 value := n.value
258 if tag != yaml_SEQ_TAG && tag != yaml_MAP_TAG {
259 if len(value) > 10 {
260 value = " `" + value[:7] + "...`"
261 } else {
262 value = " `" + value + "`"
263 }
264 }
265 d.terrors = append(d.terrors, fmt.Sprintf("line %d: cannot unmarshal %s%s into %s", n.line+1, shortTag(tag), value, out.Type()))
266}
267
268func (d *decoder) callUnmarshaler(n *node, u Unmarshaler) (good bool) {
269 terrlen := len(d.terrors)
270 err := u.UnmarshalYAML(func(v interface{}) (err error) {
271 defer handleErr(&err)
272 d.unmarshal(n, reflect.ValueOf(v))
273 if len(d.terrors) > terrlen {
274 issues := d.terrors[terrlen:]
275 d.terrors = d.terrors[:terrlen]
276 return &TypeError{issues}
277 }
278 return nil
279 })
280 if e, ok := err.(*TypeError); ok {
281 d.terrors = append(d.terrors, e.Errors...)
282 return false
283 }
284 if err != nil {
285 fail(err)
286 }
287 return true
288}
289
290// d.prepare initializes and dereferences pointers and calls UnmarshalYAML
291// if a value is found to implement it.
292// It returns the initialized and dereferenced out value, whether
293// unmarshalling was already done by UnmarshalYAML, and if so whether
294// its types unmarshalled appropriately.
295//
296// If n holds a null value, prepare returns before doing anything.
297func (d *decoder) prepare(n *node, out reflect.Value) (newout reflect.Value, unmarshaled, good bool) {
298 if n.tag == yaml_NULL_TAG || n.kind == scalarNode && n.tag == "" && (n.value == "null" || n.value == "~" || n.value == "" && n.implicit) {
299 return out, false, false
300 }
301 again := true
302 for again {
303 again = false
304 if out.Kind() == reflect.Ptr {
305 if out.IsNil() {
306 out.Set(reflect.New(out.Type().Elem()))
307 }
308 out = out.Elem()
309 again = true
310 }
311 if out.CanAddr() {
312 if u, ok := out.Addr().Interface().(Unmarshaler); ok {
313 good = d.callUnmarshaler(n, u)
314 return out, true, good
315 }
316 }
317 }
318 return out, false, false
319}
320
321const (
322 // 400,000 decode operations is ~500kb of dense object declarations, or
323 // ~5kb of dense object declarations with 10000% alias expansion
324 alias_ratio_range_low = 400000
325
326 // 4,000,000 decode operations is ~5MB of dense object declarations, or
327 // ~4.5MB of dense object declarations with 10% alias expansion
328 alias_ratio_range_high = 4000000
329
330 // alias_ratio_range is the range over which we scale allowed alias ratios
331 alias_ratio_range = float64(alias_ratio_range_high - alias_ratio_range_low)
332)
333
334func allowedAliasRatio(decodeCount int) float64 {
335 switch {
336 case decodeCount <= alias_ratio_range_low:
337 // allow 99% to come from alias expansion for small-to-medium documents
338 return 0.99
339 case decodeCount >= alias_ratio_range_high:
340 // allow 10% to come from alias expansion for very large documents
341 return 0.10
342 default:
343 // scale smoothly from 99% down to 10% over the range.
344 // this maps to 396,000 - 400,000 allowed alias-driven decodes over the range.
345 // 400,000 decode operations is ~100MB of allocations in worst-case scenarios (single-item maps).
346 return 0.99 - 0.89*(float64(decodeCount-alias_ratio_range_low)/alias_ratio_range)
347 }
348}
349
350func (d *decoder) unmarshal(n *node, out reflect.Value) (good bool) {
351 d.decodeCount++
352 if d.aliasDepth > 0 {
353 d.aliasCount++
354 }
355 if d.aliasCount > 100 && d.decodeCount > 1000 && float64(d.aliasCount)/float64(d.decodeCount) > allowedAliasRatio(d.decodeCount) {
356 failf("document contains excessive aliasing")
357 }
358 switch n.kind {
359 case documentNode:
360 return d.document(n, out)
361 case aliasNode:
362 return d.alias(n, out)
363 }
364 out, unmarshaled, good := d.prepare(n, out)
365 if unmarshaled {
366 return good
367 }
368 switch n.kind {
369 case scalarNode:
370 good = d.scalar(n, out)
371 case mappingNode:
372 good = d.mapping(n, out)
373 case sequenceNode:
374 good = d.sequence(n, out)
375 default:
376 panic("internal error: unknown node kind: " + strconv.Itoa(n.kind))
377 }
378 return good
379}
380
381func (d *decoder) document(n *node, out reflect.Value) (good bool) {
382 if len(n.children) == 1 {
383 d.doc = n
384 d.unmarshal(n.children[0], out)
385 return true
386 }
387 return false
388}
389
390func (d *decoder) alias(n *node, out reflect.Value) (good bool) {
391 if d.aliases[n] {
392 // TODO this could actually be allowed in some circumstances.
393 failf("anchor '%s' value contains itself", n.value)
394 }
395 d.aliases[n] = true
396 d.aliasDepth++
397 good = d.unmarshal(n.alias, out)
398 d.aliasDepth--
399 delete(d.aliases, n)
400 return good
401}
402
403var zeroValue reflect.Value
404
405func resetMap(out reflect.Value) {
406 for _, k := range out.MapKeys() {
407 out.SetMapIndex(k, zeroValue)
408 }
409}
410
411func (d *decoder) scalar(n *node, out reflect.Value) bool {
412 var tag string
413 var resolved interface{}
414 if n.tag == "" && !n.implicit {
415 tag = yaml_STR_TAG
416 resolved = n.value
417 } else {
418 tag, resolved = resolve(n.tag, n.value)
419 if tag == yaml_BINARY_TAG {
420 data, err := base64.StdEncoding.DecodeString(resolved.(string))
421 if err != nil {
422 failf("!!binary value contains invalid base64 data")
423 }
424 resolved = string(data)
425 }
426 }
427 if resolved == nil {
428 if out.Kind() == reflect.Map && !out.CanAddr() {
429 resetMap(out)
430 } else {
431 out.Set(reflect.Zero(out.Type()))
432 }
433 return true
434 }
435 if resolvedv := reflect.ValueOf(resolved); out.Type() == resolvedv.Type() {
436 // We've resolved to exactly the type we want, so use that.
437 out.Set(resolvedv)
438 return true
439 }
440 // Perhaps we can use the value as a TextUnmarshaler to
441 // set its value.
442 if out.CanAddr() {
443 u, ok := out.Addr().Interface().(encoding.TextUnmarshaler)
444 if ok {
445 var text []byte
446 if tag == yaml_BINARY_TAG {
447 text = []byte(resolved.(string))
448 } else {
449 // We let any value be unmarshaled into TextUnmarshaler.
450 // That might be more lax than we'd like, but the
451 // TextUnmarshaler itself should bowl out any dubious values.
452 text = []byte(n.value)
453 }
454 err := u.UnmarshalText(text)
455 if err != nil {
456 fail(err)
457 }
458 return true
459 }
460 }
461 switch out.Kind() {
462 case reflect.String:
463 if tag == yaml_BINARY_TAG {
464 out.SetString(resolved.(string))
465 return true
466 }
467 if resolved != nil {
468 out.SetString(n.value)
469 return true
470 }
471 case reflect.Interface:
472 if resolved == nil {
473 out.Set(reflect.Zero(out.Type()))
474 } else if tag == yaml_TIMESTAMP_TAG {
475 // It looks like a timestamp but for backward compatibility
476 // reasons we set it as a string, so that code that unmarshals
477 // timestamp-like values into interface{} will continue to
478 // see a string and not a time.Time.
479 // TODO(v3) Drop this.
480 out.Set(reflect.ValueOf(n.value))
481 } else {
482 out.Set(reflect.ValueOf(resolved))
483 }
484 return true
485 case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
486 switch resolved := resolved.(type) {
487 case int:
488 if !out.OverflowInt(int64(resolved)) {
489 out.SetInt(int64(resolved))
490 return true
491 }
492 case int64:
493 if !out.OverflowInt(resolved) {
494 out.SetInt(resolved)
495 return true
496 }
497 case uint64:
498 if resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) {
499 out.SetInt(int64(resolved))
500 return true
501 }
502 case float64:
503 if resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) {
504 out.SetInt(int64(resolved))
505 return true
506 }
507 case string:
508 if out.Type() == durationType {
509 d, err := time.ParseDuration(resolved)
510 if err == nil {
511 out.SetInt(int64(d))
512 return true
513 }
514 }
515 }
516 case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
517 switch resolved := resolved.(type) {
518 case int:
519 if resolved >= 0 && !out.OverflowUint(uint64(resolved)) {
520 out.SetUint(uint64(resolved))
521 return true
522 }
523 case int64:
524 if resolved >= 0 && !out.OverflowUint(uint64(resolved)) {
525 out.SetUint(uint64(resolved))
526 return true
527 }
528 case uint64:
529 if !out.OverflowUint(uint64(resolved)) {
530 out.SetUint(uint64(resolved))
531 return true
532 }
533 case float64:
534 if resolved <= math.MaxUint64 && !out.OverflowUint(uint64(resolved)) {
535 out.SetUint(uint64(resolved))
536 return true
537 }
538 }
539 case reflect.Bool:
540 switch resolved := resolved.(type) {
541 case bool:
542 out.SetBool(resolved)
543 return true
544 }
545 case reflect.Float32, reflect.Float64:
546 switch resolved := resolved.(type) {
547 case int:
548 out.SetFloat(float64(resolved))
549 return true
550 case int64:
551 out.SetFloat(float64(resolved))
552 return true
553 case uint64:
554 out.SetFloat(float64(resolved))
555 return true
556 case float64:
557 out.SetFloat(resolved)
558 return true
559 }
560 case reflect.Struct:
561 if resolvedv := reflect.ValueOf(resolved); out.Type() == resolvedv.Type() {
562 out.Set(resolvedv)
563 return true
564 }
565 case reflect.Ptr:
566 if out.Type().Elem() == reflect.TypeOf(resolved) {
567 // TODO DOes this make sense? When is out a Ptr except when decoding a nil value?
568 elem := reflect.New(out.Type().Elem())
569 elem.Elem().Set(reflect.ValueOf(resolved))
570 out.Set(elem)
571 return true
572 }
573 }
574 d.terror(n, tag, out)
575 return false
576}
577
578func settableValueOf(i interface{}) reflect.Value {
579 v := reflect.ValueOf(i)
580 sv := reflect.New(v.Type()).Elem()
581 sv.Set(v)
582 return sv
583}
584
585func (d *decoder) sequence(n *node, out reflect.Value) (good bool) {
586 l := len(n.children)
587
588 var iface reflect.Value
589 switch out.Kind() {
590 case reflect.Slice:
591 out.Set(reflect.MakeSlice(out.Type(), l, l))
592 case reflect.Array:
593 if l != out.Len() {
594 failf("invalid array: want %d elements but got %d", out.Len(), l)
595 }
596 case reflect.Interface:
597 // No type hints. Will have to use a generic sequence.
598 iface = out
599 out = settableValueOf(make([]interface{}, l))
600 default:
601 d.terror(n, yaml_SEQ_TAG, out)
602 return false
603 }
604 et := out.Type().Elem()
605
606 j := 0
607 for i := 0; i < l; i++ {
608 e := reflect.New(et).Elem()
609 if ok := d.unmarshal(n.children[i], e); ok {
610 out.Index(j).Set(e)
611 j++
612 }
613 }
614 if out.Kind() != reflect.Array {
615 out.Set(out.Slice(0, j))
616 }
617 if iface.IsValid() {
618 iface.Set(out)
619 }
620 return true
621}
622
623func (d *decoder) mapping(n *node, out reflect.Value) (good bool) {
624 switch out.Kind() {
625 case reflect.Struct:
626 return d.mappingStruct(n, out)
627 case reflect.Slice:
628 return d.mappingSlice(n, out)
629 case reflect.Map:
630 // okay
631 case reflect.Interface:
632 if d.mapType.Kind() == reflect.Map {
633 iface := out
634 out = reflect.MakeMap(d.mapType)
635 iface.Set(out)
636 } else {
637 slicev := reflect.New(d.mapType).Elem()
638 if !d.mappingSlice(n, slicev) {
639 return false
640 }
641 out.Set(slicev)
642 return true
643 }
644 default:
645 d.terror(n, yaml_MAP_TAG, out)
646 return false
647 }
648 outt := out.Type()
649 kt := outt.Key()
650 et := outt.Elem()
651
652 mapType := d.mapType
653 if outt.Key() == ifaceType && outt.Elem() == ifaceType {
654 d.mapType = outt
655 }
656
657 if out.IsNil() {
658 out.Set(reflect.MakeMap(outt))
659 }
660 l := len(n.children)
661 for i := 0; i < l; i += 2 {
662 if isMerge(n.children[i]) {
663 d.merge(n.children[i+1], out)
664 continue
665 }
666 k := reflect.New(kt).Elem()
667 if d.unmarshal(n.children[i], k) {
668 kkind := k.Kind()
669 if kkind == reflect.Interface {
670 kkind = k.Elem().Kind()
671 }
672 if kkind == reflect.Map || kkind == reflect.Slice {
673 failf("invalid map key: %#v", k.Interface())
674 }
675 e := reflect.New(et).Elem()
676 if d.unmarshal(n.children[i+1], e) {
677 d.setMapIndex(n.children[i+1], out, k, e)
678 }
679 }
680 }
681 d.mapType = mapType
682 return true
683}
684
685func (d *decoder) setMapIndex(n *node, out, k, v reflect.Value) {
686 if d.strict && out.MapIndex(k) != zeroValue {
687 d.terrors = append(d.terrors, fmt.Sprintf("line %d: key %#v already set in map", n.line+1, k.Interface()))
688 return
689 }
690 out.SetMapIndex(k, v)
691}
692
693func (d *decoder) mappingSlice(n *node, out reflect.Value) (good bool) {
694 outt := out.Type()
695 if outt.Elem() != mapItemType {
696 d.terror(n, yaml_MAP_TAG, out)
697 return false
698 }
699
700 mapType := d.mapType
701 d.mapType = outt
702
703 var slice []MapItem
704 var l = len(n.children)
705 for i := 0; i < l; i += 2 {
706 if isMerge(n.children[i]) {
707 d.merge(n.children[i+1], out)
708 continue
709 }
710 item := MapItem{}
711 k := reflect.ValueOf(&item.Key).Elem()
712 if d.unmarshal(n.children[i], k) {
713 v := reflect.ValueOf(&item.Value).Elem()
714 if d.unmarshal(n.children[i+1], v) {
715 slice = append(slice, item)
716 }
717 }
718 }
719 out.Set(reflect.ValueOf(slice))
720 d.mapType = mapType
721 return true
722}
723
724func (d *decoder) mappingStruct(n *node, out reflect.Value) (good bool) {
725 sinfo, err := getStructInfo(out.Type())
726 if err != nil {
727 panic(err)
728 }
729 name := settableValueOf("")
730 l := len(n.children)
731
732 var inlineMap reflect.Value
733 var elemType reflect.Type
734 if sinfo.InlineMap != -1 {
735 inlineMap = out.Field(sinfo.InlineMap)
736 inlineMap.Set(reflect.New(inlineMap.Type()).Elem())
737 elemType = inlineMap.Type().Elem()
738 }
739
740 var doneFields []bool
741 if d.strict {
742 doneFields = make([]bool, len(sinfo.FieldsList))
743 }
744 for i := 0; i < l; i += 2 {
745 ni := n.children[i]
746 if isMerge(ni) {
747 d.merge(n.children[i+1], out)
748 continue
749 }
750 if !d.unmarshal(ni, name) {
751 continue
752 }
753 if info, ok := sinfo.FieldsMap[name.String()]; ok {
754 if d.strict {
755 if doneFields[info.Id] {
756 d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s already set in type %s", ni.line+1, name.String(), out.Type()))
757 continue
758 }
759 doneFields[info.Id] = true
760 }
761 var field reflect.Value
762 if info.Inline == nil {
763 field = out.Field(info.Num)
764 } else {
765 field = out.FieldByIndex(info.Inline)
766 }
767 d.unmarshal(n.children[i+1], field)
768 } else if sinfo.InlineMap != -1 {
769 if inlineMap.IsNil() {
770 inlineMap.Set(reflect.MakeMap(inlineMap.Type()))
771 }
772 value := reflect.New(elemType).Elem()
773 d.unmarshal(n.children[i+1], value)
774 d.setMapIndex(n.children[i+1], inlineMap, name, value)
775 } else if d.strict {
776 d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s not found in type %s", ni.line+1, name.String(), out.Type()))
777 }
778 }
779 return true
780}
781
782func failWantMap() {
783 failf("map merge requires map or sequence of maps as the value")
784}
785
786func (d *decoder) merge(n *node, out reflect.Value) {
787 switch n.kind {
788 case mappingNode:
789 d.unmarshal(n, out)
790 case aliasNode:
791 if n.alias != nil && n.alias.kind != mappingNode {
792 failWantMap()
793 }
794 d.unmarshal(n, out)
795 case sequenceNode:
796 // Step backwards as earlier nodes take precedence.
797 for i := len(n.children) - 1; i >= 0; i-- {
798 ni := n.children[i]
799 if ni.kind == aliasNode {
800 if ni.alias != nil && ni.alias.kind != mappingNode {
801 failWantMap()
802 }
803 } else if ni.kind != mappingNode {
804 failWantMap()
805 }
806 d.unmarshal(ni, out)
807 }
808 default:
809 failWantMap()
810 }
811}
812
813func isMerge(n *node) bool {
814 return n.kind == scalarNode && n.value == "<<" && (n.implicit == true || n.tag == yaml_MERGE_TAG)
815}
diff --git a/vendor/gopkg.in/yaml.v2/emitterc.go b/vendor/gopkg.in/yaml.v2/emitterc.go
new file mode 100644
index 0000000..a1c2cc5
--- /dev/null
+++ b/vendor/gopkg.in/yaml.v2/emitterc.go
@@ -0,0 +1,1685 @@
1package yaml
2
3import (
4 "bytes"
5 "fmt"
6)
7
8// Flush the buffer if needed.
9func flush(emitter *yaml_emitter_t) bool {
10 if emitter.buffer_pos+5 >= len(emitter.buffer) {
11 return yaml_emitter_flush(emitter)
12 }
13 return true
14}
15
16// Put a character to the output buffer.
17func put(emitter *yaml_emitter_t, value byte) bool {
18 if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) {
19 return false
20 }
21 emitter.buffer[emitter.buffer_pos] = value
22 emitter.buffer_pos++
23 emitter.column++
24 return true
25}
26
27// Put a line break to the output buffer.
28func put_break(emitter *yaml_emitter_t) bool {
29 if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) {
30 return false
31 }
32 switch emitter.line_break {
33 case yaml_CR_BREAK:
34 emitter.buffer[emitter.buffer_pos] = '\r'
35 emitter.buffer_pos += 1
36 case yaml_LN_BREAK:
37 emitter.buffer[emitter.buffer_pos] = '\n'
38 emitter.buffer_pos += 1
39 case yaml_CRLN_BREAK:
40 emitter.buffer[emitter.buffer_pos+0] = '\r'
41 emitter.buffer[emitter.buffer_pos+1] = '\n'
42 emitter.buffer_pos += 2
43 default:
44 panic("unknown line break setting")
45 }
46 emitter.column = 0
47 emitter.line++
48 return true
49}
50
51// Copy a character from a string into buffer.
52func write(emitter *yaml_emitter_t, s []byte, i *int) bool {
53 if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) {
54 return false
55 }
56 p := emitter.buffer_pos
57 w := width(s[*i])
58 switch w {
59 case 4:
60 emitter.buffer[p+3] = s[*i+3]
61 fallthrough
62 case 3:
63 emitter.buffer[p+2] = s[*i+2]
64 fallthrough
65 case 2:
66 emitter.buffer[p+1] = s[*i+1]
67 fallthrough
68 case 1:
69 emitter.buffer[p+0] = s[*i+0]
70 default:
71 panic("unknown character width")
72 }
73 emitter.column++
74 emitter.buffer_pos += w
75 *i += w
76 return true
77}
78
79// Write a whole string into buffer.
80func write_all(emitter *yaml_emitter_t, s []byte) bool {
81 for i := 0; i < len(s); {
82 if !write(emitter, s, &i) {
83 return false
84 }
85 }
86 return true
87}
88
89// Copy a line break character from a string into buffer.
90func write_break(emitter *yaml_emitter_t, s []byte, i *int) bool {
91 if s[*i] == '\n' {
92 if !put_break(emitter) {
93 return false
94 }
95 *i++
96 } else {
97 if !write(emitter, s, i) {
98 return false
99 }
100 emitter.column = 0
101 emitter.line++
102 }
103 return true
104}
105
106// Set an emitter error and return false.
107func yaml_emitter_set_emitter_error(emitter *yaml_emitter_t, problem string) bool {
108 emitter.error = yaml_EMITTER_ERROR
109 emitter.problem = problem
110 return false
111}
112
113// Emit an event.
114func yaml_emitter_emit(emitter *yaml_emitter_t, event *yaml_event_t) bool {
115 emitter.events = append(emitter.events, *event)
116 for !yaml_emitter_need_more_events(emitter) {
117 event := &emitter.events[emitter.events_head]
118 if !yaml_emitter_analyze_event(emitter, event) {
119 return false
120 }
121 if !yaml_emitter_state_machine(emitter, event) {
122 return false
123 }
124 yaml_event_delete(event)
125 emitter.events_head++
126 }
127 return true
128}
129
130// Check if we need to accumulate more events before emitting.
131//
132// We accumulate extra
133// - 1 event for DOCUMENT-START
134// - 2 events for SEQUENCE-START
135// - 3 events for MAPPING-START
136//
137func yaml_emitter_need_more_events(emitter *yaml_emitter_t) bool {
138 if emitter.events_head == len(emitter.events) {
139 return true
140 }
141 var accumulate int
142 switch emitter.events[emitter.events_head].typ {
143 case yaml_DOCUMENT_START_EVENT:
144 accumulate = 1
145 break
146 case yaml_SEQUENCE_START_EVENT:
147 accumulate = 2
148 break
149 case yaml_MAPPING_START_EVENT:
150 accumulate = 3
151 break
152 default:
153 return false
154 }
155 if len(emitter.events)-emitter.events_head > accumulate {
156 return false
157 }
158 var level int
159 for i := emitter.events_head; i < len(emitter.events); i++ {
160 switch emitter.events[i].typ {
161 case yaml_STREAM_START_EVENT, yaml_DOCUMENT_START_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT:
162 level++
163 case yaml_STREAM_END_EVENT, yaml_DOCUMENT_END_EVENT, yaml_SEQUENCE_END_EVENT, yaml_MAPPING_END_EVENT:
164 level--
165 }
166 if level == 0 {
167 return false
168 }
169 }
170 return true
171}
172
173// Append a directive to the directives stack.
174func yaml_emitter_append_tag_directive(emitter *yaml_emitter_t, value *yaml_tag_directive_t, allow_duplicates bool) bool {
175 for i := 0; i < len(emitter.tag_directives); i++ {
176 if bytes.Equal(value.handle, emitter.tag_directives[i].handle) {
177 if allow_duplicates {
178 return true
179 }
180 return yaml_emitter_set_emitter_error(emitter, "duplicate %TAG directive")
181 }
182 }
183
184 // [Go] Do we actually need to copy this given garbage collection
185 // and the lack of deallocating destructors?
186 tag_copy := yaml_tag_directive_t{
187 handle: make([]byte, len(value.handle)),
188 prefix: make([]byte, len(value.prefix)),
189 }
190 copy(tag_copy.handle, value.handle)
191 copy(tag_copy.prefix, value.prefix)
192 emitter.tag_directives = append(emitter.tag_directives, tag_copy)
193 return true
194}
195
196// Increase the indentation level.
197func yaml_emitter_increase_indent(emitter *yaml_emitter_t, flow, indentless bool) bool {
198 emitter.indents = append(emitter.indents, emitter.indent)
199 if emitter.indent < 0 {
200 if flow {
201 emitter.indent = emitter.best_indent
202 } else {
203 emitter.indent = 0
204 }
205 } else if !indentless {
206 emitter.indent += emitter.best_indent
207 }
208 return true
209}
210
211// State dispatcher.
212func yaml_emitter_state_machine(emitter *yaml_emitter_t, event *yaml_event_t) bool {
213 switch emitter.state {
214 default:
215 case yaml_EMIT_STREAM_START_STATE:
216 return yaml_emitter_emit_stream_start(emitter, event)
217
218 case yaml_EMIT_FIRST_DOCUMENT_START_STATE:
219 return yaml_emitter_emit_document_start(emitter, event, true)
220
221 case yaml_EMIT_DOCUMENT_START_STATE:
222 return yaml_emitter_emit_document_start(emitter, event, false)
223
224 case yaml_EMIT_DOCUMENT_CONTENT_STATE:
225 return yaml_emitter_emit_document_content(emitter, event)
226
227 case yaml_EMIT_DOCUMENT_END_STATE:
228 return yaml_emitter_emit_document_end(emitter, event)
229
230 case yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE:
231 return yaml_emitter_emit_flow_sequence_item(emitter, event, true)
232
233 case yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE:
234 return yaml_emitter_emit_flow_sequence_item(emitter, event, false)
235
236 case yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE:
237 return yaml_emitter_emit_flow_mapping_key(emitter, event, true)
238
239 case yaml_EMIT_FLOW_MAPPING_KEY_STATE:
240 return yaml_emitter_emit_flow_mapping_key(emitter, event, false)
241
242 case yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE:
243 return yaml_emitter_emit_flow_mapping_value(emitter, event, true)
244
245 case yaml_EMIT_FLOW_MAPPING_VALUE_STATE:
246 return yaml_emitter_emit_flow_mapping_value(emitter, event, false)
247
248 case yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE:
249 return yaml_emitter_emit_block_sequence_item(emitter, event, true)
250
251 case yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE:
252 return yaml_emitter_emit_block_sequence_item(emitter, event, false)
253
254 case yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE:
255 return yaml_emitter_emit_block_mapping_key(emitter, event, true)
256
257 case yaml_EMIT_BLOCK_MAPPING_KEY_STATE:
258 return yaml_emitter_emit_block_mapping_key(emitter, event, false)
259
260 case yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE:
261 return yaml_emitter_emit_block_mapping_value(emitter, event, true)
262
263 case yaml_EMIT_BLOCK_MAPPING_VALUE_STATE:
264 return yaml_emitter_emit_block_mapping_value(emitter, event, false)
265
266 case yaml_EMIT_END_STATE:
267 return yaml_emitter_set_emitter_error(emitter, "expected nothing after STREAM-END")
268 }
269 panic("invalid emitter state")
270}
271
272// Expect STREAM-START.
273func yaml_emitter_emit_stream_start(emitter *yaml_emitter_t, event *yaml_event_t) bool {
274 if event.typ != yaml_STREAM_START_EVENT {
275 return yaml_emitter_set_emitter_error(emitter, "expected STREAM-START")
276 }
277 if emitter.encoding == yaml_ANY_ENCODING {
278 emitter.encoding = event.encoding
279 if emitter.encoding == yaml_ANY_ENCODING {
280 emitter.encoding = yaml_UTF8_ENCODING
281 }
282 }
283 if emitter.best_indent < 2 || emitter.best_indent > 9 {
284 emitter.best_indent = 2
285 }
286 if emitter.best_width >= 0 && emitter.best_width <= emitter.best_indent*2 {
287 emitter.best_width = 80
288 }
289 if emitter.best_width < 0 {
290 emitter.best_width = 1<<31 - 1
291 }
292 if emitter.line_break == yaml_ANY_BREAK {
293 emitter.line_break = yaml_LN_BREAK
294 }
295
296 emitter.indent = -1
297 emitter.line = 0
298 emitter.column = 0
299 emitter.whitespace = true
300 emitter.indention = true
301
302 if emitter.encoding != yaml_UTF8_ENCODING {
303 if !yaml_emitter_write_bom(emitter) {
304 return false
305 }
306 }
307 emitter.state = yaml_EMIT_FIRST_DOCUMENT_START_STATE
308 return true
309}
310
311// Expect DOCUMENT-START or STREAM-END.
312func yaml_emitter_emit_document_start(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool {
313
314 if event.typ == yaml_DOCUMENT_START_EVENT {
315
316 if event.version_directive != nil {
317 if !yaml_emitter_analyze_version_directive(emitter, event.version_directive) {
318 return false
319 }
320 }
321
322 for i := 0; i < len(event.tag_directives); i++ {
323 tag_directive := &event.tag_directives[i]
324 if !yaml_emitter_analyze_tag_directive(emitter, tag_directive) {
325 return false
326 }
327 if !yaml_emitter_append_tag_directive(emitter, tag_directive, false) {
328 return false
329 }
330 }
331
332 for i := 0; i < len(default_tag_directives); i++ {
333 tag_directive := &default_tag_directives[i]
334 if !yaml_emitter_append_tag_directive(emitter, tag_directive, true) {
335 return false
336 }
337 }
338
339 implicit := event.implicit
340 if !first || emitter.canonical {
341 implicit = false
342 }
343
344 if emitter.open_ended && (event.version_directive != nil || len(event.tag_directives) > 0) {
345 if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) {
346 return false
347 }
348 if !yaml_emitter_write_indent(emitter) {
349 return false
350 }
351 }
352
353 if event.version_directive != nil {
354 implicit = false
355 if !yaml_emitter_write_indicator(emitter, []byte("%YAML"), true, false, false) {
356 return false
357 }
358 if !yaml_emitter_write_indicator(emitter, []byte("1.1"), true, false, false) {
359 return false
360 }
361 if !yaml_emitter_write_indent(emitter) {
362 return false
363 }
364 }
365
366 if len(event.tag_directives) > 0 {
367 implicit = false
368 for i := 0; i < len(event.tag_directives); i++ {
369 tag_directive := &event.tag_directives[i]
370 if !yaml_emitter_write_indicator(emitter, []byte("%TAG"), true, false, false) {
371 return false
372 }
373 if !yaml_emitter_write_tag_handle(emitter, tag_directive.handle) {
374 return false
375 }
376 if !yaml_emitter_write_tag_content(emitter, tag_directive.prefix, true) {
377 return false
378 }
379 if !yaml_emitter_write_indent(emitter) {
380 return false
381 }
382 }
383 }
384
385 if yaml_emitter_check_empty_document(emitter) {
386 implicit = false
387 }
388 if !implicit {
389 if !yaml_emitter_write_indent(emitter) {
390 return false
391 }
392 if !yaml_emitter_write_indicator(emitter, []byte("---"), true, false, false) {
393 return false
394 }
395 if emitter.canonical {
396 if !yaml_emitter_write_indent(emitter) {
397 return false
398 }
399 }
400 }
401
402 emitter.state = yaml_EMIT_DOCUMENT_CONTENT_STATE
403 return true
404 }
405
406 if event.typ == yaml_STREAM_END_EVENT {
407 if emitter.open_ended {
408 if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) {
409 return false
410 }
411 if !yaml_emitter_write_indent(emitter) {
412 return false
413 }
414 }
415 if !yaml_emitter_flush(emitter) {
416 return false
417 }
418 emitter.state = yaml_EMIT_END_STATE
419 return true
420 }
421
422 return yaml_emitter_set_emitter_error(emitter, "expected DOCUMENT-START or STREAM-END")
423}
424
425// Expect the root node.
426func yaml_emitter_emit_document_content(emitter *yaml_emitter_t, event *yaml_event_t) bool {
427 emitter.states = append(emitter.states, yaml_EMIT_DOCUMENT_END_STATE)
428 return yaml_emitter_emit_node(emitter, event, true, false, false, false)
429}
430
431// Expect DOCUMENT-END.
432func yaml_emitter_emit_document_end(emitter *yaml_emitter_t, event *yaml_event_t) bool {
433 if event.typ != yaml_DOCUMENT_END_EVENT {
434 return yaml_emitter_set_emitter_error(emitter, "expected DOCUMENT-END")
435 }
436 if !yaml_emitter_write_indent(emitter) {
437 return false
438 }
439 if !event.implicit {
440 // [Go] Allocate the slice elsewhere.
441 if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) {
442 return false
443 }
444 if !yaml_emitter_write_indent(emitter) {
445 return false
446 }
447 }
448 if !yaml_emitter_flush(emitter) {
449 return false
450 }
451 emitter.state = yaml_EMIT_DOCUMENT_START_STATE
452 emitter.tag_directives = emitter.tag_directives[:0]
453 return true
454}
455
456// Expect a flow item node.
457func yaml_emitter_emit_flow_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool {
458 if first {
459 if !yaml_emitter_write_indicator(emitter, []byte{'['}, true, true, false) {
460 return false
461 }
462 if !yaml_emitter_increase_indent(emitter, true, false) {
463 return false
464 }
465 emitter.flow_level++
466 }
467
468 if event.typ == yaml_SEQUENCE_END_EVENT {
469 emitter.flow_level--
470 emitter.indent = emitter.indents[len(emitter.indents)-1]
471 emitter.indents = emitter.indents[:len(emitter.indents)-1]
472 if emitter.canonical && !first {
473 if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) {
474 return false
475 }
476 if !yaml_emitter_write_indent(emitter) {
477 return false
478 }
479 }
480 if !yaml_emitter_write_indicator(emitter, []byte{']'}, false, false, false) {
481 return false
482 }
483 emitter.state = emitter.states[len(emitter.states)-1]
484 emitter.states = emitter.states[:len(emitter.states)-1]
485
486 return true
487 }
488
489 if !first {
490 if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) {
491 return false
492 }
493 }
494
495 if emitter.canonical || emitter.column > emitter.best_width {
496 if !yaml_emitter_write_indent(emitter) {
497 return false
498 }
499 }
500 emitter.states = append(emitter.states, yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE)
501 return yaml_emitter_emit_node(emitter, event, false, true, false, false)
502}
503
504// Expect a flow key node.
505func yaml_emitter_emit_flow_mapping_key(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool {
506 if first {
507 if !yaml_emitter_write_indicator(emitter, []byte{'{'}, true, true, false) {
508 return false
509 }
510 if !yaml_emitter_increase_indent(emitter, true, false) {
511 return false
512 }
513 emitter.flow_level++
514 }
515
516 if event.typ == yaml_MAPPING_END_EVENT {
517 emitter.flow_level--
518 emitter.indent = emitter.indents[len(emitter.indents)-1]
519 emitter.indents = emitter.indents[:len(emitter.indents)-1]
520 if emitter.canonical && !first {
521 if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) {
522 return false
523 }
524 if !yaml_emitter_write_indent(emitter) {
525 return false
526 }
527 }
528 if !yaml_emitter_write_indicator(emitter, []byte{'}'}, false, false, false) {
529 return false
530 }
531 emitter.state = emitter.states[len(emitter.states)-1]
532 emitter.states = emitter.states[:len(emitter.states)-1]
533 return true
534 }
535
536 if !first {
537 if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) {
538 return false
539 }
540 }
541 if emitter.canonical || emitter.column > emitter.best_width {
542 if !yaml_emitter_write_indent(emitter) {
543 return false
544 }
545 }
546
547 if !emitter.canonical && yaml_emitter_check_simple_key(emitter) {
548 emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE)
549 return yaml_emitter_emit_node(emitter, event, false, false, true, true)
550 }
551 if !yaml_emitter_write_indicator(emitter, []byte{'?'}, true, false, false) {
552 return false
553 }
554 emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_VALUE_STATE)
555 return yaml_emitter_emit_node(emitter, event, false, false, true, false)
556}
557
558// Expect a flow value node.
559func yaml_emitter_emit_flow_mapping_value(emitter *yaml_emitter_t, event *yaml_event_t, simple bool) bool {
560 if simple {
561 if !yaml_emitter_write_indicator(emitter, []byte{':'}, false, false, false) {
562 return false
563 }
564 } else {
565 if emitter.canonical || emitter.column > emitter.best_width {
566 if !yaml_emitter_write_indent(emitter) {
567 return false
568 }
569 }
570 if !yaml_emitter_write_indicator(emitter, []byte{':'}, true, false, false) {
571 return false
572 }
573 }
574 emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_KEY_STATE)
575 return yaml_emitter_emit_node(emitter, event, false, false, true, false)
576}
577
578// Expect a block item node.
579func yaml_emitter_emit_block_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool {
580 if first {
581 if !yaml_emitter_increase_indent(emitter, false, emitter.mapping_context && !emitter.indention) {
582 return false
583 }
584 }
585 if event.typ == yaml_SEQUENCE_END_EVENT {
586 emitter.indent = emitter.indents[len(emitter.indents)-1]
587 emitter.indents = emitter.indents[:len(emitter.indents)-1]
588 emitter.state = emitter.states[len(emitter.states)-1]
589 emitter.states = emitter.states[:len(emitter.states)-1]
590 return true
591 }
592 if !yaml_emitter_write_indent(emitter) {
593 return false
594 }
595 if !yaml_emitter_write_indicator(emitter, []byte{'-'}, true, false, true) {
596 return false
597 }
598 emitter.states = append(emitter.states, yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE)
599 return yaml_emitter_emit_node(emitter, event, false, true, false, false)
600}
601
602// Expect a block key node.
603func yaml_emitter_emit_block_mapping_key(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool {
604 if first {
605 if !yaml_emitter_increase_indent(emitter, false, false) {
606 return false
607 }
608 }
609 if event.typ == yaml_MAPPING_END_EVENT {
610 emitter.indent = emitter.indents[len(emitter.indents)-1]
611 emitter.indents = emitter.indents[:len(emitter.indents)-1]
612 emitter.state = emitter.states[len(emitter.states)-1]
613 emitter.states = emitter.states[:len(emitter.states)-1]
614 return true
615 }
616 if !yaml_emitter_write_indent(emitter) {
617 return false
618 }
619 if yaml_emitter_check_simple_key(emitter) {
620 emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE)
621 return yaml_emitter_emit_node(emitter, event, false, false, true, true)
622 }
623 if !yaml_emitter_write_indicator(emitter, []byte{'?'}, true, false, true) {
624 return false
625 }
626 emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_VALUE_STATE)
627 return yaml_emitter_emit_node(emitter, event, false, false, true, false)
628}
629
630// Expect a block value node.
631func yaml_emitter_emit_block_mapping_value(emitter *yaml_emitter_t, event *yaml_event_t, simple bool) bool {
632 if simple {
633 if !yaml_emitter_write_indicator(emitter, []byte{':'}, false, false, false) {
634 return false
635 }
636 } else {
637 if !yaml_emitter_write_indent(emitter) {
638 return false
639 }
640 if !yaml_emitter_write_indicator(emitter, []byte{':'}, true, false, true) {
641 return false
642 }
643 }
644 emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_KEY_STATE)
645 return yaml_emitter_emit_node(emitter, event, false, false, true, false)
646}
647
648// Expect a node.
649func yaml_emitter_emit_node(emitter *yaml_emitter_t, event *yaml_event_t,
650 root bool, sequence bool, mapping bool, simple_key bool) bool {
651
652 emitter.root_context = root
653 emitter.sequence_context = sequence
654 emitter.mapping_context = mapping
655 emitter.simple_key_context = simple_key
656
657 switch event.typ {
658 case yaml_ALIAS_EVENT:
659 return yaml_emitter_emit_alias(emitter, event)
660 case yaml_SCALAR_EVENT:
661 return yaml_emitter_emit_scalar(emitter, event)
662 case yaml_SEQUENCE_START_EVENT:
663 return yaml_emitter_emit_sequence_start(emitter, event)
664 case yaml_MAPPING_START_EVENT:
665 return yaml_emitter_emit_mapping_start(emitter, event)
666 default:
667 return yaml_emitter_set_emitter_error(emitter,
668 fmt.Sprintf("expected SCALAR, SEQUENCE-START, MAPPING-START, or ALIAS, but got %v", event.typ))
669 }
670}
671
672// Expect ALIAS.
673func yaml_emitter_emit_alias(emitter *yaml_emitter_t, event *yaml_event_t) bool {
674 if !yaml_emitter_process_anchor(emitter) {
675 return false
676 }
677 emitter.state = emitter.states[len(emitter.states)-1]
678 emitter.states = emitter.states[:len(emitter.states)-1]
679 return true
680}
681
682// Expect SCALAR.
683func yaml_emitter_emit_scalar(emitter *yaml_emitter_t, event *yaml_event_t) bool {
684 if !yaml_emitter_select_scalar_style(emitter, event) {
685 return false
686 }
687 if !yaml_emitter_process_anchor(emitter) {
688 return false
689 }
690 if !yaml_emitter_process_tag(emitter) {
691 return false
692 }
693 if !yaml_emitter_increase_indent(emitter, true, false) {
694 return false
695 }
696 if !yaml_emitter_process_scalar(emitter) {
697 return false
698 }
699 emitter.indent = emitter.indents[len(emitter.indents)-1]
700 emitter.indents = emitter.indents[:len(emitter.indents)-1]
701 emitter.state = emitter.states[len(emitter.states)-1]
702 emitter.states = emitter.states[:len(emitter.states)-1]
703 return true
704}
705
706// Expect SEQUENCE-START.
707func yaml_emitter_emit_sequence_start(emitter *yaml_emitter_t, event *yaml_event_t) bool {
708 if !yaml_emitter_process_anchor(emitter) {
709 return false
710 }
711 if !yaml_emitter_process_tag(emitter) {
712 return false
713 }
714 if emitter.flow_level > 0 || emitter.canonical || event.sequence_style() == yaml_FLOW_SEQUENCE_STYLE ||
715 yaml_emitter_check_empty_sequence(emitter) {
716 emitter.state = yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE
717 } else {
718 emitter.state = yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE
719 }
720 return true
721}
722
723// Expect MAPPING-START.
724func yaml_emitter_emit_mapping_start(emitter *yaml_emitter_t, event *yaml_event_t) bool {
725 if !yaml_emitter_process_anchor(emitter) {
726 return false
727 }
728 if !yaml_emitter_process_tag(emitter) {
729 return false
730 }
731 if emitter.flow_level > 0 || emitter.canonical || event.mapping_style() == yaml_FLOW_MAPPING_STYLE ||
732 yaml_emitter_check_empty_mapping(emitter) {
733 emitter.state = yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE
734 } else {
735 emitter.state = yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE
736 }
737 return true
738}
739
740// Check if the document content is an empty scalar.
741func yaml_emitter_check_empty_document(emitter *yaml_emitter_t) bool {
742 return false // [Go] Huh?
743}
744
745// Check if the next events represent an empty sequence.
746func yaml_emitter_check_empty_sequence(emitter *yaml_emitter_t) bool {
747 if len(emitter.events)-emitter.events_head < 2 {
748 return false
749 }
750 return emitter.events[emitter.events_head].typ == yaml_SEQUENCE_START_EVENT &&
751 emitter.events[emitter.events_head+1].typ == yaml_SEQUENCE_END_EVENT
752}
753
754// Check if the next events represent an empty mapping.
755func yaml_emitter_check_empty_mapping(emitter *yaml_emitter_t) bool {
756 if len(emitter.events)-emitter.events_head < 2 {
757 return false
758 }
759 return emitter.events[emitter.events_head].typ == yaml_MAPPING_START_EVENT &&
760 emitter.events[emitter.events_head+1].typ == yaml_MAPPING_END_EVENT
761}
762
763// Check if the next node can be expressed as a simple key.
764func yaml_emitter_check_simple_key(emitter *yaml_emitter_t) bool {
765 length := 0
766 switch emitter.events[emitter.events_head].typ {
767 case yaml_ALIAS_EVENT:
768 length += len(emitter.anchor_data.anchor)
769 case yaml_SCALAR_EVENT:
770 if emitter.scalar_data.multiline {
771 return false
772 }
773 length += len(emitter.anchor_data.anchor) +
774 len(emitter.tag_data.handle) +
775 len(emitter.tag_data.suffix) +
776 len(emitter.scalar_data.value)
777 case yaml_SEQUENCE_START_EVENT:
778 if !yaml_emitter_check_empty_sequence(emitter) {
779 return false
780 }
781 length += len(emitter.anchor_data.anchor) +
782 len(emitter.tag_data.handle) +
783 len(emitter.tag_data.suffix)
784 case yaml_MAPPING_START_EVENT:
785 if !yaml_emitter_check_empty_mapping(emitter) {
786 return false
787 }
788 length += len(emitter.anchor_data.anchor) +
789 len(emitter.tag_data.handle) +
790 len(emitter.tag_data.suffix)
791 default:
792 return false
793 }
794 return length <= 128
795}
796
797// Determine an acceptable scalar style.
798func yaml_emitter_select_scalar_style(emitter *yaml_emitter_t, event *yaml_event_t) bool {
799
800 no_tag := len(emitter.tag_data.handle) == 0 && len(emitter.tag_data.suffix) == 0
801 if no_tag && !event.implicit && !event.quoted_implicit {
802 return yaml_emitter_set_emitter_error(emitter, "neither tag nor implicit flags are specified")
803 }
804
805 style := event.scalar_style()
806 if style == yaml_ANY_SCALAR_STYLE {
807 style = yaml_PLAIN_SCALAR_STYLE
808 }
809 if emitter.canonical {
810 style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
811 }
812 if emitter.simple_key_context && emitter.scalar_data.multiline {
813 style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
814 }
815
816 if style == yaml_PLAIN_SCALAR_STYLE {
817 if emitter.flow_level > 0 && !emitter.scalar_data.flow_plain_allowed ||
818 emitter.flow_level == 0 && !emitter.scalar_data.block_plain_allowed {
819 style = yaml_SINGLE_QUOTED_SCALAR_STYLE
820 }
821 if len(emitter.scalar_data.value) == 0 && (emitter.flow_level > 0 || emitter.simple_key_context) {
822 style = yaml_SINGLE_QUOTED_SCALAR_STYLE
823 }
824 if no_tag && !event.implicit {
825 style = yaml_SINGLE_QUOTED_SCALAR_STYLE
826 }
827 }
828 if style == yaml_SINGLE_QUOTED_SCALAR_STYLE {
829 if !emitter.scalar_data.single_quoted_allowed {
830 style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
831 }
832 }
833 if style == yaml_LITERAL_SCALAR_STYLE || style == yaml_FOLDED_SCALAR_STYLE {
834 if !emitter.scalar_data.block_allowed || emitter.flow_level > 0 || emitter.simple_key_context {
835 style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
836 }
837 }
838
839 if no_tag && !event.quoted_implicit && style != yaml_PLAIN_SCALAR_STYLE {
840 emitter.tag_data.handle = []byte{'!'}
841 }
842 emitter.scalar_data.style = style
843 return true
844}
845
846// Write an anchor.
847func yaml_emitter_process_anchor(emitter *yaml_emitter_t) bool {
848 if emitter.anchor_data.anchor == nil {
849 return true
850 }
851 c := []byte{'&'}
852 if emitter.anchor_data.alias {
853 c[0] = '*'
854 }
855 if !yaml_emitter_write_indicator(emitter, c, true, false, false) {
856 return false
857 }
858 return yaml_emitter_write_anchor(emitter, emitter.anchor_data.anchor)
859}
860
861// Write a tag.
862func yaml_emitter_process_tag(emitter *yaml_emitter_t) bool {
863 if len(emitter.tag_data.handle) == 0 && len(emitter.tag_data.suffix) == 0 {
864 return true
865 }
866 if len(emitter.tag_data.handle) > 0 {
867 if !yaml_emitter_write_tag_handle(emitter, emitter.tag_data.handle) {
868 return false
869 }
870 if len(emitter.tag_data.suffix) > 0 {
871 if !yaml_emitter_write_tag_content(emitter, emitter.tag_data.suffix, false) {
872 return false
873 }
874 }
875 } else {
876 // [Go] Allocate these slices elsewhere.
877 if !yaml_emitter_write_indicator(emitter, []byte("!<"), true, false, false) {
878 return false
879 }
880 if !yaml_emitter_write_tag_content(emitter, emitter.tag_data.suffix, false) {
881 return false
882 }
883 if !yaml_emitter_write_indicator(emitter, []byte{'>'}, false, false, false) {
884 return false
885 }
886 }
887 return true
888}
889
890// Write a scalar.
891func yaml_emitter_process_scalar(emitter *yaml_emitter_t) bool {
892 switch emitter.scalar_data.style {
893 case yaml_PLAIN_SCALAR_STYLE:
894 return yaml_emitter_write_plain_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context)
895
896 case yaml_SINGLE_QUOTED_SCALAR_STYLE:
897 return yaml_emitter_write_single_quoted_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context)
898
899 case yaml_DOUBLE_QUOTED_SCALAR_STYLE:
900 return yaml_emitter_write_double_quoted_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context)
901
902 case yaml_LITERAL_SCALAR_STYLE:
903 return yaml_emitter_write_literal_scalar(emitter, emitter.scalar_data.value)
904
905 case yaml_FOLDED_SCALAR_STYLE:
906 return yaml_emitter_write_folded_scalar(emitter, emitter.scalar_data.value)
907 }
908 panic("unknown scalar style")
909}
910
911// Check if a %YAML directive is valid.
912func yaml_emitter_analyze_version_directive(emitter *yaml_emitter_t, version_directive *yaml_version_directive_t) bool {
913 if version_directive.major != 1 || version_directive.minor != 1 {
914 return yaml_emitter_set_emitter_error(emitter, "incompatible %YAML directive")
915 }
916 return true
917}
918
919// Check if a %TAG directive is valid.
920func yaml_emitter_analyze_tag_directive(emitter *yaml_emitter_t, tag_directive *yaml_tag_directive_t) bool {
921 handle := tag_directive.handle
922 prefix := tag_directive.prefix
923 if len(handle) == 0 {
924 return yaml_emitter_set_emitter_error(emitter, "tag handle must not be empty")
925 }
926 if handle[0] != '!' {
927 return yaml_emitter_set_emitter_error(emitter, "tag handle must start with '!'")
928 }
929 if handle[len(handle)-1] != '!' {
930 return yaml_emitter_set_emitter_error(emitter, "tag handle must end with '!'")
931 }
932 for i := 1; i < len(handle)-1; i += width(handle[i]) {
933 if !is_alpha(handle, i) {
934 return yaml_emitter_set_emitter_error(emitter, "tag handle must contain alphanumerical characters only")
935 }
936 }
937 if len(prefix) == 0 {
938 return yaml_emitter_set_emitter_error(emitter, "tag prefix must not be empty")
939 }
940 return true
941}
942
943// Check if an anchor is valid.
944func yaml_emitter_analyze_anchor(emitter *yaml_emitter_t, anchor []byte, alias bool) bool {
945 if len(anchor) == 0 {
946 problem := "anchor value must not be empty"
947 if alias {
948 problem = "alias value must not be empty"
949 }
950 return yaml_emitter_set_emitter_error(emitter, problem)
951 }
952 for i := 0; i < len(anchor); i += width(anchor[i]) {
953 if !is_alpha(anchor, i) {
954 problem := "anchor value must contain alphanumerical characters only"
955 if alias {
956 problem = "alias value must contain alphanumerical characters only"
957 }
958 return yaml_emitter_set_emitter_error(emitter, problem)
959 }
960 }
961 emitter.anchor_data.anchor = anchor
962 emitter.anchor_data.alias = alias
963 return true
964}
965
966// Check if a tag is valid.
967func yaml_emitter_analyze_tag(emitter *yaml_emitter_t, tag []byte) bool {
968 if len(tag) == 0 {
969 return yaml_emitter_set_emitter_error(emitter, "tag value must not be empty")
970 }
971 for i := 0; i < len(emitter.tag_directives); i++ {
972 tag_directive := &emitter.tag_directives[i]
973 if bytes.HasPrefix(tag, tag_directive.prefix) {
974 emitter.tag_data.handle = tag_directive.handle
975 emitter.tag_data.suffix = tag[len(tag_directive.prefix):]
976 return true
977 }
978 }
979 emitter.tag_data.suffix = tag
980 return true
981}
982
983// Check if a scalar is valid.
984func yaml_emitter_analyze_scalar(emitter *yaml_emitter_t, value []byte) bool {
985 var (
986 block_indicators = false
987 flow_indicators = false
988 line_breaks = false
989 special_characters = false
990
991 leading_space = false
992 leading_break = false
993 trailing_space = false
994 trailing_break = false
995 break_space = false
996 space_break = false
997
998 preceded_by_whitespace = false
999 followed_by_whitespace = false
1000 previous_space = false
1001 previous_break = false
1002 )
1003
1004 emitter.scalar_data.value = value
1005
1006 if len(value) == 0 {
1007 emitter.scalar_data.multiline = false
1008 emitter.scalar_data.flow_plain_allowed = false
1009 emitter.scalar_data.block_plain_allowed = true
1010 emitter.scalar_data.single_quoted_allowed = true
1011 emitter.scalar_data.block_allowed = false
1012 return true
1013 }
1014
1015 if len(value) >= 3 && ((value[0] == '-' && value[1] == '-' && value[2] == '-') || (value[0] == '.' && value[1] == '.' && value[2] == '.')) {
1016 block_indicators = true
1017 flow_indicators = true
1018 }
1019
1020 preceded_by_whitespace = true
1021 for i, w := 0, 0; i < len(value); i += w {
1022 w = width(value[i])
1023 followed_by_whitespace = i+w >= len(value) || is_blank(value, i+w)
1024
1025 if i == 0 {
1026 switch value[i] {
1027 case '#', ',', '[', ']', '{', '}', '&', '*', '!', '|', '>', '\'', '"', '%', '@', '`':
1028 flow_indicators = true
1029 block_indicators = true
1030 case '?', ':':
1031 flow_indicators = true
1032 if followed_by_whitespace {
1033 block_indicators = true
1034 }
1035 case '-':
1036 if followed_by_whitespace {
1037 flow_indicators = true
1038 block_indicators = true
1039 }
1040 }
1041 } else {
1042 switch value[i] {
1043 case ',', '?', '[', ']', '{', '}':
1044 flow_indicators = true
1045 case ':':
1046 flow_indicators = true
1047 if followed_by_whitespace {
1048 block_indicators = true
1049 }
1050 case '#':
1051 if preceded_by_whitespace {
1052 flow_indicators = true
1053 block_indicators = true
1054 }
1055 }
1056 }
1057
1058 if !is_printable(value, i) || !is_ascii(value, i) && !emitter.unicode {
1059 special_characters = true
1060 }
1061 if is_space(value, i) {
1062 if i == 0 {
1063 leading_space = true
1064 }
1065 if i+width(value[i]) == len(value) {
1066 trailing_space = true
1067 }
1068 if previous_break {
1069 break_space = true
1070 }
1071 previous_space = true
1072 previous_break = false
1073 } else if is_break(value, i) {
1074 line_breaks = true
1075 if i == 0 {
1076 leading_break = true
1077 }
1078 if i+width(value[i]) == len(value) {
1079 trailing_break = true
1080 }
1081 if previous_space {
1082 space_break = true
1083 }
1084 previous_space = false
1085 previous_break = true
1086 } else {
1087 previous_space = false
1088 previous_break = false
1089 }
1090
1091 // [Go]: Why 'z'? Couldn't be the end of the string as that's the loop condition.
1092 preceded_by_whitespace = is_blankz(value, i)
1093 }
1094
1095 emitter.scalar_data.multiline = line_breaks
1096 emitter.scalar_data.flow_plain_allowed = true
1097 emitter.scalar_data.block_plain_allowed = true
1098 emitter.scalar_data.single_quoted_allowed = true
1099 emitter.scalar_data.block_allowed = true
1100
1101 if leading_space || leading_break || trailing_space || trailing_break {
1102 emitter.scalar_data.flow_plain_allowed = false
1103 emitter.scalar_data.block_plain_allowed = false
1104 }
1105 if trailing_space {
1106 emitter.scalar_data.block_allowed = false
1107 }
1108 if break_space {
1109 emitter.scalar_data.flow_plain_allowed = false
1110 emitter.scalar_data.block_plain_allowed = false
1111 emitter.scalar_data.single_quoted_allowed = false
1112 }
1113 if space_break || special_characters {
1114 emitter.scalar_data.flow_plain_allowed = false
1115 emitter.scalar_data.block_plain_allowed = false
1116 emitter.scalar_data.single_quoted_allowed = false
1117 emitter.scalar_data.block_allowed = false
1118 }
1119 if line_breaks {
1120 emitter.scalar_data.flow_plain_allowed = false
1121 emitter.scalar_data.block_plain_allowed = false
1122 }
1123 if flow_indicators {
1124 emitter.scalar_data.flow_plain_allowed = false
1125 }
1126 if block_indicators {
1127 emitter.scalar_data.block_plain_allowed = false
1128 }
1129 return true
1130}
1131
1132// Check if the event data is valid.
1133func yaml_emitter_analyze_event(emitter *yaml_emitter_t, event *yaml_event_t) bool {
1134
1135 emitter.anchor_data.anchor = nil
1136 emitter.tag_data.handle = nil
1137 emitter.tag_data.suffix = nil
1138 emitter.scalar_data.value = nil
1139
1140 switch event.typ {
1141 case yaml_ALIAS_EVENT:
1142 if !yaml_emitter_analyze_anchor(emitter, event.anchor, true) {
1143 return false
1144 }
1145
1146 case yaml_SCALAR_EVENT:
1147 if len(event.anchor) > 0 {
1148 if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) {
1149 return false
1150 }
1151 }
1152 if len(event.tag) > 0 && (emitter.canonical || (!event.implicit && !event.quoted_implicit)) {
1153 if !yaml_emitter_analyze_tag(emitter, event.tag) {
1154 return false
1155 }
1156 }
1157 if !yaml_emitter_analyze_scalar(emitter, event.value) {
1158 return false
1159 }
1160
1161 case yaml_SEQUENCE_START_EVENT:
1162 if len(event.anchor) > 0 {
1163 if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) {
1164 return false
1165 }
1166 }
1167 if len(event.tag) > 0 && (emitter.canonical || !event.implicit) {
1168 if !yaml_emitter_analyze_tag(emitter, event.tag) {
1169 return false
1170 }
1171 }
1172
1173 case yaml_MAPPING_START_EVENT:
1174 if len(event.anchor) > 0 {
1175 if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) {
1176 return false
1177 }
1178 }
1179 if len(event.tag) > 0 && (emitter.canonical || !event.implicit) {
1180 if !yaml_emitter_analyze_tag(emitter, event.tag) {
1181 return false
1182 }
1183 }
1184 }
1185 return true
1186}
1187
1188// Write the BOM character.
1189func yaml_emitter_write_bom(emitter *yaml_emitter_t) bool {
1190 if !flush(emitter) {
1191 return false
1192 }
1193 pos := emitter.buffer_pos
1194 emitter.buffer[pos+0] = '\xEF'
1195 emitter.buffer[pos+1] = '\xBB'
1196 emitter.buffer[pos+2] = '\xBF'
1197 emitter.buffer_pos += 3
1198 return true
1199}
1200
1201func yaml_emitter_write_indent(emitter *yaml_emitter_t) bool {
1202 indent := emitter.indent
1203 if indent < 0 {
1204 indent = 0
1205 }
1206 if !emitter.indention || emitter.column > indent || (emitter.column == indent && !emitter.whitespace) {
1207 if !put_break(emitter) {
1208 return false
1209 }
1210 }
1211 for emitter.column < indent {
1212 if !put(emitter, ' ') {
1213 return false
1214 }
1215 }
1216 emitter.whitespace = true
1217 emitter.indention = true
1218 return true
1219}
1220
1221func yaml_emitter_write_indicator(emitter *yaml_emitter_t, indicator []byte, need_whitespace, is_whitespace, is_indention bool) bool {
1222 if need_whitespace && !emitter.whitespace {
1223 if !put(emitter, ' ') {
1224 return false
1225 }
1226 }
1227 if !write_all(emitter, indicator) {
1228 return false
1229 }
1230 emitter.whitespace = is_whitespace
1231 emitter.indention = (emitter.indention && is_indention)
1232 emitter.open_ended = false
1233 return true
1234}
1235
1236func yaml_emitter_write_anchor(emitter *yaml_emitter_t, value []byte) bool {
1237 if !write_all(emitter, value) {
1238 return false
1239 }
1240 emitter.whitespace = false
1241 emitter.indention = false
1242 return true
1243}
1244
1245func yaml_emitter_write_tag_handle(emitter *yaml_emitter_t, value []byte) bool {
1246 if !emitter.whitespace {
1247 if !put(emitter, ' ') {
1248 return false
1249 }
1250 }
1251 if !write_all(emitter, value) {
1252 return false
1253 }
1254 emitter.whitespace = false
1255 emitter.indention = false
1256 return true
1257}
1258
1259func yaml_emitter_write_tag_content(emitter *yaml_emitter_t, value []byte, need_whitespace bool) bool {
1260 if need_whitespace && !emitter.whitespace {
1261 if !put(emitter, ' ') {
1262 return false
1263 }
1264 }
1265 for i := 0; i < len(value); {
1266 var must_write bool
1267 switch value[i] {
1268 case ';', '/', '?', ':', '@', '&', '=', '+', '$', ',', '_', '.', '~', '*', '\'', '(', ')', '[', ']':
1269 must_write = true
1270 default:
1271 must_write = is_alpha(value, i)
1272 }
1273 if must_write {
1274 if !write(emitter, value, &i) {
1275 return false
1276 }
1277 } else {
1278 w := width(value[i])
1279 for k := 0; k < w; k++ {
1280 octet := value[i]
1281 i++
1282 if !put(emitter, '%') {
1283 return false
1284 }
1285
1286 c := octet >> 4
1287 if c < 10 {
1288 c += '0'
1289 } else {
1290 c += 'A' - 10
1291 }
1292 if !put(emitter, c) {
1293 return false
1294 }
1295
1296 c = octet & 0x0f
1297 if c < 10 {
1298 c += '0'
1299 } else {
1300 c += 'A' - 10
1301 }
1302 if !put(emitter, c) {
1303 return false
1304 }
1305 }
1306 }
1307 }
1308 emitter.whitespace = false
1309 emitter.indention = false
1310 return true
1311}
1312
1313func yaml_emitter_write_plain_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool {
1314 if !emitter.whitespace {
1315 if !put(emitter, ' ') {
1316 return false
1317 }
1318 }
1319
1320 spaces := false
1321 breaks := false
1322 for i := 0; i < len(value); {
1323 if is_space(value, i) {
1324 if allow_breaks && !spaces && emitter.column > emitter.best_width && !is_space(value, i+1) {
1325 if !yaml_emitter_write_indent(emitter) {
1326 return false
1327 }
1328 i += width(value[i])
1329 } else {
1330 if !write(emitter, value, &i) {
1331 return false
1332 }
1333 }
1334 spaces = true
1335 } else if is_break(value, i) {
1336 if !breaks && value[i] == '\n' {
1337 if !put_break(emitter) {
1338 return false
1339 }
1340 }
1341 if !write_break(emitter, value, &i) {
1342 return false
1343 }
1344 emitter.indention = true
1345 breaks = true
1346 } else {
1347 if breaks {
1348 if !yaml_emitter_write_indent(emitter) {
1349 return false
1350 }
1351 }
1352 if !write(emitter, value, &i) {
1353 return false
1354 }
1355 emitter.indention = false
1356 spaces = false
1357 breaks = false
1358 }
1359 }
1360
1361 emitter.whitespace = false
1362 emitter.indention = false
1363 if emitter.root_context {
1364 emitter.open_ended = true
1365 }
1366
1367 return true
1368}
1369
1370func yaml_emitter_write_single_quoted_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool {
1371
1372 if !yaml_emitter_write_indicator(emitter, []byte{'\''}, true, false, false) {
1373 return false
1374 }
1375
1376 spaces := false
1377 breaks := false
1378 for i := 0; i < len(value); {
1379 if is_space(value, i) {
1380 if allow_breaks && !spaces && emitter.column > emitter.best_width && i > 0 && i < len(value)-1 && !is_space(value, i+1) {
1381 if !yaml_emitter_write_indent(emitter) {
1382 return false
1383 }
1384 i += width(value[i])
1385 } else {
1386 if !write(emitter, value, &i) {
1387 return false
1388 }
1389 }
1390 spaces = true
1391 } else if is_break(value, i) {
1392 if !breaks && value[i] == '\n' {
1393 if !put_break(emitter) {
1394 return false
1395 }
1396 }
1397 if !write_break(emitter, value, &i) {
1398 return false
1399 }
1400 emitter.indention = true
1401 breaks = true
1402 } else {
1403 if breaks {
1404 if !yaml_emitter_write_indent(emitter) {
1405 return false
1406 }
1407 }
1408 if value[i] == '\'' {
1409 if !put(emitter, '\'') {
1410 return false
1411 }
1412 }
1413 if !write(emitter, value, &i) {
1414 return false
1415 }
1416 emitter.indention = false
1417 spaces = false
1418 breaks = false
1419 }
1420 }
1421 if !yaml_emitter_write_indicator(emitter, []byte{'\''}, false, false, false) {
1422 return false
1423 }
1424 emitter.whitespace = false
1425 emitter.indention = false
1426 return true
1427}
1428
1429func yaml_emitter_write_double_quoted_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool {
1430 spaces := false
1431 if !yaml_emitter_write_indicator(emitter, []byte{'"'}, true, false, false) {
1432 return false
1433 }
1434
1435 for i := 0; i < len(value); {
1436 if !is_printable(value, i) || (!emitter.unicode && !is_ascii(value, i)) ||
1437 is_bom(value, i) || is_break(value, i) ||
1438 value[i] == '"' || value[i] == '\\' {
1439
1440 octet := value[i]
1441
1442 var w int
1443 var v rune
1444 switch {
1445 case octet&0x80 == 0x00:
1446 w, v = 1, rune(octet&0x7F)
1447 case octet&0xE0 == 0xC0:
1448 w, v = 2, rune(octet&0x1F)
1449 case octet&0xF0 == 0xE0:
1450 w, v = 3, rune(octet&0x0F)
1451 case octet&0xF8 == 0xF0:
1452 w, v = 4, rune(octet&0x07)
1453 }
1454 for k := 1; k < w; k++ {
1455 octet = value[i+k]
1456 v = (v << 6) + (rune(octet) & 0x3F)
1457 }
1458 i += w
1459
1460 if !put(emitter, '\\') {
1461 return false
1462 }
1463
1464 var ok bool
1465 switch v {
1466 case 0x00:
1467 ok = put(emitter, '0')
1468 case 0x07:
1469 ok = put(emitter, 'a')
1470 case 0x08:
1471 ok = put(emitter, 'b')
1472 case 0x09:
1473 ok = put(emitter, 't')
1474 case 0x0A:
1475 ok = put(emitter, 'n')
1476 case 0x0b:
1477 ok = put(emitter, 'v')
1478 case 0x0c:
1479 ok = put(emitter, 'f')
1480 case 0x0d:
1481 ok = put(emitter, 'r')
1482 case 0x1b:
1483 ok = put(emitter, 'e')
1484 case 0x22:
1485 ok = put(emitter, '"')
1486 case 0x5c:
1487 ok = put(emitter, '\\')
1488 case 0x85:
1489 ok = put(emitter, 'N')
1490 case 0xA0:
1491 ok = put(emitter, '_')
1492 case 0x2028:
1493 ok = put(emitter, 'L')
1494 case 0x2029:
1495 ok = put(emitter, 'P')
1496 default:
1497 if v <= 0xFF {
1498 ok = put(emitter, 'x')
1499 w = 2
1500 } else if v <= 0xFFFF {
1501 ok = put(emitter, 'u')
1502 w = 4
1503 } else {
1504 ok = put(emitter, 'U')
1505 w = 8
1506 }
1507 for k := (w - 1) * 4; ok && k >= 0; k -= 4 {
1508 digit := byte((v >> uint(k)) & 0x0F)
1509 if digit < 10 {
1510 ok = put(emitter, digit+'0')
1511 } else {
1512 ok = put(emitter, digit+'A'-10)
1513 }
1514 }
1515 }
1516 if !ok {
1517 return false
1518 }
1519 spaces = false
1520 } else if is_space(value, i) {
1521 if allow_breaks && !spaces && emitter.column > emitter.best_width && i > 0 && i < len(value)-1 {
1522 if !yaml_emitter_write_indent(emitter) {
1523 return false
1524 }
1525 if is_space(value, i+1) {
1526 if !put(emitter, '\\') {
1527 return false
1528 }
1529 }
1530 i += width(value[i])
1531 } else if !write(emitter, value, &i) {
1532 return false
1533 }
1534 spaces = true
1535 } else {
1536 if !write(emitter, value, &i) {
1537 return false
1538 }
1539 spaces = false
1540 }
1541 }
1542 if !yaml_emitter_write_indicator(emitter, []byte{'"'}, false, false, false) {
1543 return false
1544 }
1545 emitter.whitespace = false
1546 emitter.indention = false
1547 return true
1548}
1549
1550func yaml_emitter_write_block_scalar_hints(emitter *yaml_emitter_t, value []byte) bool {
1551 if is_space(value, 0) || is_break(value, 0) {
1552 indent_hint := []byte{'0' + byte(emitter.best_indent)}
1553 if !yaml_emitter_write_indicator(emitter, indent_hint, false, false, false) {
1554 return false
1555 }
1556 }
1557
1558 emitter.open_ended = false
1559
1560 var chomp_hint [1]byte
1561 if len(value) == 0 {
1562 chomp_hint[0] = '-'
1563 } else {
1564 i := len(value) - 1
1565 for value[i]&0xC0 == 0x80 {
1566 i--
1567 }
1568 if !is_break(value, i) {
1569 chomp_hint[0] = '-'
1570 } else if i == 0 {
1571 chomp_hint[0] = '+'
1572 emitter.open_ended = true
1573 } else {
1574 i--
1575 for value[i]&0xC0 == 0x80 {
1576 i--
1577 }
1578 if is_break(value, i) {
1579 chomp_hint[0] = '+'
1580 emitter.open_ended = true
1581 }
1582 }
1583 }
1584 if chomp_hint[0] != 0 {
1585 if !yaml_emitter_write_indicator(emitter, chomp_hint[:], false, false, false) {
1586 return false
1587 }
1588 }
1589 return true
1590}
1591
1592func yaml_emitter_write_literal_scalar(emitter *yaml_emitter_t, value []byte) bool {
1593 if !yaml_emitter_write_indicator(emitter, []byte{'|'}, true, false, false) {
1594 return false
1595 }
1596 if !yaml_emitter_write_block_scalar_hints(emitter, value) {
1597 return false
1598 }
1599 if !put_break(emitter) {
1600 return false
1601 }
1602 emitter.indention = true
1603 emitter.whitespace = true
1604 breaks := true
1605 for i := 0; i < len(value); {
1606 if is_break(value, i) {
1607 if !write_break(emitter, value, &i) {
1608 return false
1609 }
1610 emitter.indention = true
1611 breaks = true
1612 } else {
1613 if breaks {
1614 if !yaml_emitter_write_indent(emitter) {
1615 return false
1616 }
1617 }
1618 if !write(emitter, value, &i) {
1619 return false
1620 }
1621 emitter.indention = false
1622 breaks = false
1623 }
1624 }
1625
1626 return true
1627}
1628
1629func yaml_emitter_write_folded_scalar(emitter *yaml_emitter_t, value []byte) bool {
1630 if !yaml_emitter_write_indicator(emitter, []byte{'>'}, true, false, false) {
1631 return false
1632 }
1633 if !yaml_emitter_write_block_scalar_hints(emitter, value) {
1634 return false
1635 }
1636
1637 if !put_break(emitter) {
1638 return false
1639 }
1640 emitter.indention = true
1641 emitter.whitespace = true
1642
1643 breaks := true
1644 leading_spaces := true
1645 for i := 0; i < len(value); {
1646 if is_break(value, i) {
1647 if !breaks && !leading_spaces && value[i] == '\n' {
1648 k := 0
1649 for is_break(value, k) {
1650 k += width(value[k])
1651 }
1652 if !is_blankz(value, k) {
1653 if !put_break(emitter) {
1654 return false
1655 }
1656 }
1657 }
1658 if !write_break(emitter, value, &i) {
1659 return false
1660 }
1661 emitter.indention = true
1662 breaks = true
1663 } else {
1664 if breaks {
1665 if !yaml_emitter_write_indent(emitter) {
1666 return false
1667 }
1668 leading_spaces = is_blank(value, i)
1669 }
1670 if !breaks && is_space(value, i) && !is_space(value, i+1) && emitter.column > emitter.best_width {
1671 if !yaml_emitter_write_indent(emitter) {
1672 return false
1673 }
1674 i += width(value[i])
1675 } else {
1676 if !write(emitter, value, &i) {
1677 return false
1678 }
1679 }
1680 emitter.indention = false
1681 breaks = false
1682 }
1683 }
1684 return true
1685}
diff --git a/vendor/gopkg.in/yaml.v2/encode.go b/vendor/gopkg.in/yaml.v2/encode.go
new file mode 100644
index 0000000..0ee738e
--- /dev/null
+++ b/vendor/gopkg.in/yaml.v2/encode.go
@@ -0,0 +1,390 @@
1package yaml
2
3import (
4 "encoding"
5 "fmt"
6 "io"
7 "reflect"
8 "regexp"
9 "sort"
10 "strconv"
11 "strings"
12 "time"
13 "unicode/utf8"
14)
15
16// jsonNumber is the interface of the encoding/json.Number datatype.
17// Repeating the interface here avoids a dependency on encoding/json, and also
18// supports other libraries like jsoniter, which use a similar datatype with
19// the same interface. Detecting this interface is useful when dealing with
20// structures containing json.Number, which is a string under the hood. The
21// encoder should prefer the use of Int64(), Float64() and string(), in that
22// order, when encoding this type.
23type jsonNumber interface {
24 Float64() (float64, error)
25 Int64() (int64, error)
26 String() string
27}
28
29type encoder struct {
30 emitter yaml_emitter_t
31 event yaml_event_t
32 out []byte
33 flow bool
34 // doneInit holds whether the initial stream_start_event has been
35 // emitted.
36 doneInit bool
37}
38
39func newEncoder() *encoder {
40 e := &encoder{}
41 yaml_emitter_initialize(&e.emitter)
42 yaml_emitter_set_output_string(&e.emitter, &e.out)
43 yaml_emitter_set_unicode(&e.emitter, true)
44 return e
45}
46
47func newEncoderWithWriter(w io.Writer) *encoder {
48 e := &encoder{}
49 yaml_emitter_initialize(&e.emitter)
50 yaml_emitter_set_output_writer(&e.emitter, w)
51 yaml_emitter_set_unicode(&e.emitter, true)
52 return e
53}
54
55func (e *encoder) init() {
56 if e.doneInit {
57 return
58 }
59 yaml_stream_start_event_initialize(&e.event, yaml_UTF8_ENCODING)
60 e.emit()
61 e.doneInit = true
62}
63
64func (e *encoder) finish() {
65 e.emitter.open_ended = false
66 yaml_stream_end_event_initialize(&e.event)
67 e.emit()
68}
69
70func (e *encoder) destroy() {
71 yaml_emitter_delete(&e.emitter)
72}
73
74func (e *encoder) emit() {
75 // This will internally delete the e.event value.
76 e.must(yaml_emitter_emit(&e.emitter, &e.event))
77}
78
79func (e *encoder) must(ok bool) {
80 if !ok {
81 msg := e.emitter.problem
82 if msg == "" {
83 msg = "unknown problem generating YAML content"
84 }
85 failf("%s", msg)
86 }
87}
88
89func (e *encoder) marshalDoc(tag string, in reflect.Value) {
90 e.init()
91 yaml_document_start_event_initialize(&e.event, nil, nil, true)
92 e.emit()
93 e.marshal(tag, in)
94 yaml_document_end_event_initialize(&e.event, true)
95 e.emit()
96}
97
98func (e *encoder) marshal(tag string, in reflect.Value) {
99 if !in.IsValid() || in.Kind() == reflect.Ptr && in.IsNil() {
100 e.nilv()
101 return
102 }
103 iface := in.Interface()
104 switch m := iface.(type) {
105 case jsonNumber:
106 integer, err := m.Int64()
107 if err == nil {
108 // In this case the json.Number is a valid int64
109 in = reflect.ValueOf(integer)
110 break
111 }
112 float, err := m.Float64()
113 if err == nil {
114 // In this case the json.Number is a valid float64
115 in = reflect.ValueOf(float)
116 break
117 }
118 // fallback case - no number could be obtained
119 in = reflect.ValueOf(m.String())
120 case time.Time, *time.Time:
121 // Although time.Time implements TextMarshaler,
122 // we don't want to treat it as a string for YAML
123 // purposes because YAML has special support for
124 // timestamps.
125 case Marshaler:
126 v, err := m.MarshalYAML()
127 if err != nil {
128 fail(err)
129 }
130 if v == nil {
131 e.nilv()
132 return
133 }
134 in = reflect.ValueOf(v)
135 case encoding.TextMarshaler:
136 text, err := m.MarshalText()
137 if err != nil {
138 fail(err)
139 }
140 in = reflect.ValueOf(string(text))
141 case nil:
142 e.nilv()
143 return
144 }
145 switch in.Kind() {
146 case reflect.Interface:
147 e.marshal(tag, in.Elem())
148 case reflect.Map:
149 e.mapv(tag, in)
150 case reflect.Ptr:
151 if in.Type() == ptrTimeType {
152 e.timev(tag, in.Elem())
153 } else {
154 e.marshal(tag, in.Elem())
155 }
156 case reflect.Struct:
157 if in.Type() == timeType {
158 e.timev(tag, in)
159 } else {
160 e.structv(tag, in)
161 }
162 case reflect.Slice, reflect.Array:
163 if in.Type().Elem() == mapItemType {
164 e.itemsv(tag, in)
165 } else {
166 e.slicev(tag, in)
167 }
168 case reflect.String:
169 e.stringv(tag, in)
170 case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
171 if in.Type() == durationType {
172 e.stringv(tag, reflect.ValueOf(iface.(time.Duration).String()))
173 } else {
174 e.intv(tag, in)
175 }
176 case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
177 e.uintv(tag, in)
178 case reflect.Float32, reflect.Float64:
179 e.floatv(tag, in)
180 case reflect.Bool:
181 e.boolv(tag, in)
182 default:
183 panic("cannot marshal type: " + in.Type().String())
184 }
185}
186
187func (e *encoder) mapv(tag string, in reflect.Value) {
188 e.mappingv(tag, func() {
189 keys := keyList(in.MapKeys())
190 sort.Sort(keys)
191 for _, k := range keys {
192 e.marshal("", k)
193 e.marshal("", in.MapIndex(k))
194 }
195 })
196}
197
198func (e *encoder) itemsv(tag string, in reflect.Value) {
199 e.mappingv(tag, func() {
200 slice := in.Convert(reflect.TypeOf([]MapItem{})).Interface().([]MapItem)
201 for _, item := range slice {
202 e.marshal("", reflect.ValueOf(item.Key))
203 e.marshal("", reflect.ValueOf(item.Value))
204 }
205 })
206}
207
208func (e *encoder) structv(tag string, in reflect.Value) {
209 sinfo, err := getStructInfo(in.Type())
210 if err != nil {
211 panic(err)
212 }
213 e.mappingv(tag, func() {
214 for _, info := range sinfo.FieldsList {
215 var value reflect.Value
216 if info.Inline == nil {
217 value = in.Field(info.Num)
218 } else {
219 value = in.FieldByIndex(info.Inline)
220 }
221 if info.OmitEmpty && isZero(value) {
222 continue
223 }
224 e.marshal("", reflect.ValueOf(info.Key))
225 e.flow = info.Flow
226 e.marshal("", value)
227 }
228 if sinfo.InlineMap >= 0 {
229 m := in.Field(sinfo.InlineMap)
230 if m.Len() > 0 {
231 e.flow = false
232 keys := keyList(m.MapKeys())
233 sort.Sort(keys)
234 for _, k := range keys {
235 if _, found := sinfo.FieldsMap[k.String()]; found {
236 panic(fmt.Sprintf("Can't have key %q in inlined map; conflicts with struct field", k.String()))
237 }
238 e.marshal("", k)
239 e.flow = false
240 e.marshal("", m.MapIndex(k))
241 }
242 }
243 }
244 })
245}
246
247func (e *encoder) mappingv(tag string, f func()) {
248 implicit := tag == ""
249 style := yaml_BLOCK_MAPPING_STYLE
250 if e.flow {
251 e.flow = false
252 style = yaml_FLOW_MAPPING_STYLE
253 }
254 yaml_mapping_start_event_initialize(&e.event, nil, []byte(tag), implicit, style)
255 e.emit()
256 f()
257 yaml_mapping_end_event_initialize(&e.event)
258 e.emit()
259}
260
261func (e *encoder) slicev(tag string, in reflect.Value) {
262 implicit := tag == ""
263 style := yaml_BLOCK_SEQUENCE_STYLE
264 if e.flow {
265 e.flow = false
266 style = yaml_FLOW_SEQUENCE_STYLE
267 }
268 e.must(yaml_sequence_start_event_initialize(&e.event, nil, []byte(tag), implicit, style))
269 e.emit()
270 n := in.Len()
271 for i := 0; i < n; i++ {
272 e.marshal("", in.Index(i))
273 }
274 e.must(yaml_sequence_end_event_initialize(&e.event))
275 e.emit()
276}
277
278// isBase60 returns whether s is in base 60 notation as defined in YAML 1.1.
279//
280// The base 60 float notation in YAML 1.1 is a terrible idea and is unsupported
281// in YAML 1.2 and by this package, but these should be marshalled quoted for
282// the time being for compatibility with other parsers.
283func isBase60Float(s string) (result bool) {
284 // Fast path.
285 if s == "" {
286 return false
287 }
288 c := s[0]
289 if !(c == '+' || c == '-' || c >= '0' && c <= '9') || strings.IndexByte(s, ':') < 0 {
290 return false
291 }
292 // Do the full match.
293 return base60float.MatchString(s)
294}
295
296// From http://yaml.org/type/float.html, except the regular expression there
297// is bogus. In practice parsers do not enforce the "\.[0-9_]*" suffix.
298var base60float = regexp.MustCompile(`^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+(?:\.[0-9_]*)?$`)
299
300func (e *encoder) stringv(tag string, in reflect.Value) {
301 var style yaml_scalar_style_t
302 s := in.String()
303 canUsePlain := true
304 switch {
305 case !utf8.ValidString(s):
306 if tag == yaml_BINARY_TAG {
307 failf("explicitly tagged !!binary data must be base64-encoded")
308 }
309 if tag != "" {
310 failf("cannot marshal invalid UTF-8 data as %s", shortTag(tag))
311 }
312 // It can't be encoded directly as YAML so use a binary tag
313 // and encode it as base64.
314 tag = yaml_BINARY_TAG
315 s = encodeBase64(s)
316 case tag == "":
317 // Check to see if it would resolve to a specific
318 // tag when encoded unquoted. If it doesn't,
319 // there's no need to quote it.
320 rtag, _ := resolve("", s)
321 canUsePlain = rtag == yaml_STR_TAG && !isBase60Float(s)
322 }
323 // Note: it's possible for user code to emit invalid YAML
324 // if they explicitly specify a tag and a string containing
325 // text that's incompatible with that tag.
326 switch {
327 case strings.Contains(s, "\n"):
328 style = yaml_LITERAL_SCALAR_STYLE
329 case canUsePlain:
330 style = yaml_PLAIN_SCALAR_STYLE
331 default:
332 style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
333 }
334 e.emitScalar(s, "", tag, style)
335}
336
337func (e *encoder) boolv(tag string, in reflect.Value) {
338 var s string
339 if in.Bool() {
340 s = "true"
341 } else {
342 s = "false"
343 }
344 e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
345}
346
347func (e *encoder) intv(tag string, in reflect.Value) {
348 s := strconv.FormatInt(in.Int(), 10)
349 e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
350}
351
352func (e *encoder) uintv(tag string, in reflect.Value) {
353 s := strconv.FormatUint(in.Uint(), 10)
354 e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
355}
356
357func (e *encoder) timev(tag string, in reflect.Value) {
358 t := in.Interface().(time.Time)
359 s := t.Format(time.RFC3339Nano)
360 e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
361}
362
363func (e *encoder) floatv(tag string, in reflect.Value) {
364 // Issue #352: When formatting, use the precision of the underlying value
365 precision := 64
366 if in.Kind() == reflect.Float32 {
367 precision = 32
368 }
369
370 s := strconv.FormatFloat(in.Float(), 'g', -1, precision)
371 switch s {
372 case "+Inf":
373 s = ".inf"
374 case "-Inf":
375 s = "-.inf"
376 case "NaN":
377 s = ".nan"
378 }
379 e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
380}
381
382func (e *encoder) nilv() {
383 e.emitScalar("null", "", "", yaml_PLAIN_SCALAR_STYLE)
384}
385
386func (e *encoder) emitScalar(value, anchor, tag string, style yaml_scalar_style_t) {
387 implicit := tag == ""
388 e.must(yaml_scalar_event_initialize(&e.event, []byte(anchor), []byte(tag), []byte(value), implicit, implicit, style))
389 e.emit()
390}
diff --git a/vendor/gopkg.in/yaml.v2/parserc.go b/vendor/gopkg.in/yaml.v2/parserc.go
new file mode 100644
index 0000000..81d05df
--- /dev/null
+++ b/vendor/gopkg.in/yaml.v2/parserc.go
@@ -0,0 +1,1095 @@
1package yaml
2
3import (
4 "bytes"
5)
6
7// The parser implements the following grammar:
8//
9// stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
10// implicit_document ::= block_node DOCUMENT-END*
11// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
12// block_node_or_indentless_sequence ::=
13// ALIAS
14// | properties (block_content | indentless_block_sequence)?
15// | block_content
16// | indentless_block_sequence
17// block_node ::= ALIAS
18// | properties block_content?
19// | block_content
20// flow_node ::= ALIAS
21// | properties flow_content?
22// | flow_content
23// properties ::= TAG ANCHOR? | ANCHOR TAG?
24// block_content ::= block_collection | flow_collection | SCALAR
25// flow_content ::= flow_collection | SCALAR
26// block_collection ::= block_sequence | block_mapping
27// flow_collection ::= flow_sequence | flow_mapping
28// block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
29// indentless_sequence ::= (BLOCK-ENTRY block_node?)+
30// block_mapping ::= BLOCK-MAPPING_START
31// ((KEY block_node_or_indentless_sequence?)?
32// (VALUE block_node_or_indentless_sequence?)?)*
33// BLOCK-END
34// flow_sequence ::= FLOW-SEQUENCE-START
35// (flow_sequence_entry FLOW-ENTRY)*
36// flow_sequence_entry?
37// FLOW-SEQUENCE-END
38// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
39// flow_mapping ::= FLOW-MAPPING-START
40// (flow_mapping_entry FLOW-ENTRY)*
41// flow_mapping_entry?
42// FLOW-MAPPING-END
43// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
44
45// Peek the next token in the token queue.
46func peek_token(parser *yaml_parser_t) *yaml_token_t {
47 if parser.token_available || yaml_parser_fetch_more_tokens(parser) {
48 return &parser.tokens[parser.tokens_head]
49 }
50 return nil
51}
52
53// Remove the next token from the queue (must be called after peek_token).
54func skip_token(parser *yaml_parser_t) {
55 parser.token_available = false
56 parser.tokens_parsed++
57 parser.stream_end_produced = parser.tokens[parser.tokens_head].typ == yaml_STREAM_END_TOKEN
58 parser.tokens_head++
59}
60
61// Get the next event.
62func yaml_parser_parse(parser *yaml_parser_t, event *yaml_event_t) bool {
63 // Erase the event object.
64 *event = yaml_event_t{}
65
66 // No events after the end of the stream or error.
67 if parser.stream_end_produced || parser.error != yaml_NO_ERROR || parser.state == yaml_PARSE_END_STATE {
68 return true
69 }
70
71 // Generate the next event.
72 return yaml_parser_state_machine(parser, event)
73}
74
75// Set parser error.
76func yaml_parser_set_parser_error(parser *yaml_parser_t, problem string, problem_mark yaml_mark_t) bool {
77 parser.error = yaml_PARSER_ERROR
78 parser.problem = problem
79 parser.problem_mark = problem_mark
80 return false
81}
82
83func yaml_parser_set_parser_error_context(parser *yaml_parser_t, context string, context_mark yaml_mark_t, problem string, problem_mark yaml_mark_t) bool {
84 parser.error = yaml_PARSER_ERROR
85 parser.context = context
86 parser.context_mark = context_mark
87 parser.problem = problem
88 parser.problem_mark = problem_mark
89 return false
90}
91
92// State dispatcher.
93func yaml_parser_state_machine(parser *yaml_parser_t, event *yaml_event_t) bool {
94 //trace("yaml_parser_state_machine", "state:", parser.state.String())
95
96 switch parser.state {
97 case yaml_PARSE_STREAM_START_STATE:
98 return yaml_parser_parse_stream_start(parser, event)
99
100 case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE:
101 return yaml_parser_parse_document_start(parser, event, true)
102
103 case yaml_PARSE_DOCUMENT_START_STATE:
104 return yaml_parser_parse_document_start(parser, event, false)
105
106 case yaml_PARSE_DOCUMENT_CONTENT_STATE:
107 return yaml_parser_parse_document_content(parser, event)
108
109 case yaml_PARSE_DOCUMENT_END_STATE:
110 return yaml_parser_parse_document_end(parser, event)
111
112 case yaml_PARSE_BLOCK_NODE_STATE:
113 return yaml_parser_parse_node(parser, event, true, false)
114
115 case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE:
116 return yaml_parser_parse_node(parser, event, true, true)
117
118 case yaml_PARSE_FLOW_NODE_STATE:
119 return yaml_parser_parse_node(parser, event, false, false)
120
121 case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE:
122 return yaml_parser_parse_block_sequence_entry(parser, event, true)
123
124 case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE:
125 return yaml_parser_parse_block_sequence_entry(parser, event, false)
126
127 case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE:
128 return yaml_parser_parse_indentless_sequence_entry(parser, event)
129
130 case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE:
131 return yaml_parser_parse_block_mapping_key(parser, event, true)
132
133 case yaml_PARSE_BLOCK_MAPPING_KEY_STATE:
134 return yaml_parser_parse_block_mapping_key(parser, event, false)
135
136 case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE:
137 return yaml_parser_parse_block_mapping_value(parser, event)
138
139 case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE:
140 return yaml_parser_parse_flow_sequence_entry(parser, event, true)
141
142 case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE:
143 return yaml_parser_parse_flow_sequence_entry(parser, event, false)
144
145 case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE:
146 return yaml_parser_parse_flow_sequence_entry_mapping_key(parser, event)
147
148 case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE:
149 return yaml_parser_parse_flow_sequence_entry_mapping_value(parser, event)
150
151 case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE:
152 return yaml_parser_parse_flow_sequence_entry_mapping_end(parser, event)
153
154 case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE:
155 return yaml_parser_parse_flow_mapping_key(parser, event, true)
156
157 case yaml_PARSE_FLOW_MAPPING_KEY_STATE:
158 return yaml_parser_parse_flow_mapping_key(parser, event, false)
159
160 case yaml_PARSE_FLOW_MAPPING_VALUE_STATE:
161 return yaml_parser_parse_flow_mapping_value(parser, event, false)
162
163 case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE:
164 return yaml_parser_parse_flow_mapping_value(parser, event, true)
165
166 default:
167 panic("invalid parser state")
168 }
169}
170
171// Parse the production:
172// stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
173// ************
174func yaml_parser_parse_stream_start(parser *yaml_parser_t, event *yaml_event_t) bool {
175 token := peek_token(parser)
176 if token == nil {
177 return false
178 }
179 if token.typ != yaml_STREAM_START_TOKEN {
180 return yaml_parser_set_parser_error(parser, "did not find expected <stream-start>", token.start_mark)
181 }
182 parser.state = yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE
183 *event = yaml_event_t{
184 typ: yaml_STREAM_START_EVENT,
185 start_mark: token.start_mark,
186 end_mark: token.end_mark,
187 encoding: token.encoding,
188 }
189 skip_token(parser)
190 return true
191}
192
193// Parse the productions:
194// implicit_document ::= block_node DOCUMENT-END*
195// *
196// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
197// *************************
198func yaml_parser_parse_document_start(parser *yaml_parser_t, event *yaml_event_t, implicit bool) bool {
199
200 token := peek_token(parser)
201 if token == nil {
202 return false
203 }
204
205 // Parse extra document end indicators.
206 if !implicit {
207 for token.typ == yaml_DOCUMENT_END_TOKEN {
208 skip_token(parser)
209 token = peek_token(parser)
210 if token == nil {
211 return false
212 }
213 }
214 }
215
216 if implicit && token.typ != yaml_VERSION_DIRECTIVE_TOKEN &&
217 token.typ != yaml_TAG_DIRECTIVE_TOKEN &&
218 token.typ != yaml_DOCUMENT_START_TOKEN &&
219 token.typ != yaml_STREAM_END_TOKEN {
220 // Parse an implicit document.
221 if !yaml_parser_process_directives(parser, nil, nil) {
222 return false
223 }
224 parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE)
225 parser.state = yaml_PARSE_BLOCK_NODE_STATE
226
227 *event = yaml_event_t{
228 typ: yaml_DOCUMENT_START_EVENT,
229 start_mark: token.start_mark,
230 end_mark: token.end_mark,
231 }
232
233 } else if token.typ != yaml_STREAM_END_TOKEN {
234 // Parse an explicit document.
235 var version_directive *yaml_version_directive_t
236 var tag_directives []yaml_tag_directive_t
237 start_mark := token.start_mark
238 if !yaml_parser_process_directives(parser, &version_directive, &tag_directives) {
239 return false
240 }
241 token = peek_token(parser)
242 if token == nil {
243 return false
244 }
245 if token.typ != yaml_DOCUMENT_START_TOKEN {
246 yaml_parser_set_parser_error(parser,
247 "did not find expected <document start>", token.start_mark)
248 return false
249 }
250 parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE)
251 parser.state = yaml_PARSE_DOCUMENT_CONTENT_STATE
252 end_mark := token.end_mark
253
254 *event = yaml_event_t{
255 typ: yaml_DOCUMENT_START_EVENT,
256 start_mark: start_mark,
257 end_mark: end_mark,
258 version_directive: version_directive,
259 tag_directives: tag_directives,
260 implicit: false,
261 }
262 skip_token(parser)
263
264 } else {
265 // Parse the stream end.
266 parser.state = yaml_PARSE_END_STATE
267 *event = yaml_event_t{
268 typ: yaml_STREAM_END_EVENT,
269 start_mark: token.start_mark,
270 end_mark: token.end_mark,
271 }
272 skip_token(parser)
273 }
274
275 return true
276}
277
278// Parse the productions:
279// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
280// ***********
281//
282func yaml_parser_parse_document_content(parser *yaml_parser_t, event *yaml_event_t) bool {
283 token := peek_token(parser)
284 if token == nil {
285 return false
286 }
287 if token.typ == yaml_VERSION_DIRECTIVE_TOKEN ||
288 token.typ == yaml_TAG_DIRECTIVE_TOKEN ||
289 token.typ == yaml_DOCUMENT_START_TOKEN ||
290 token.typ == yaml_DOCUMENT_END_TOKEN ||
291 token.typ == yaml_STREAM_END_TOKEN {
292 parser.state = parser.states[len(parser.states)-1]
293 parser.states = parser.states[:len(parser.states)-1]
294 return yaml_parser_process_empty_scalar(parser, event,
295 token.start_mark)
296 }
297 return yaml_parser_parse_node(parser, event, true, false)
298}
299
300// Parse the productions:
301// implicit_document ::= block_node DOCUMENT-END*
302// *************
303// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
304//
305func yaml_parser_parse_document_end(parser *yaml_parser_t, event *yaml_event_t) bool {
306 token := peek_token(parser)
307 if token == nil {
308 return false
309 }
310
311 start_mark := token.start_mark
312 end_mark := token.start_mark
313
314 implicit := true
315 if token.typ == yaml_DOCUMENT_END_TOKEN {
316 end_mark = token.end_mark
317 skip_token(parser)
318 implicit = false
319 }
320
321 parser.tag_directives = parser.tag_directives[:0]
322
323 parser.state = yaml_PARSE_DOCUMENT_START_STATE
324 *event = yaml_event_t{
325 typ: yaml_DOCUMENT_END_EVENT,
326 start_mark: start_mark,
327 end_mark: end_mark,
328 implicit: implicit,
329 }
330 return true
331}
332
333// Parse the productions:
334// block_node_or_indentless_sequence ::=
335// ALIAS
336// *****
337// | properties (block_content | indentless_block_sequence)?
338// ********** *
339// | block_content | indentless_block_sequence
340// *
341// block_node ::= ALIAS
342// *****
343// | properties block_content?
344// ********** *
345// | block_content
346// *
347// flow_node ::= ALIAS
348// *****
349// | properties flow_content?
350// ********** *
351// | flow_content
352// *
353// properties ::= TAG ANCHOR? | ANCHOR TAG?
354// *************************
355// block_content ::= block_collection | flow_collection | SCALAR
356// ******
357// flow_content ::= flow_collection | SCALAR
358// ******
359func yaml_parser_parse_node(parser *yaml_parser_t, event *yaml_event_t, block, indentless_sequence bool) bool {
360 //defer trace("yaml_parser_parse_node", "block:", block, "indentless_sequence:", indentless_sequence)()
361
362 token := peek_token(parser)
363 if token == nil {
364 return false
365 }
366
367 if token.typ == yaml_ALIAS_TOKEN {
368 parser.state = parser.states[len(parser.states)-1]
369 parser.states = parser.states[:len(parser.states)-1]
370 *event = yaml_event_t{
371 typ: yaml_ALIAS_EVENT,
372 start_mark: token.start_mark,
373 end_mark: token.end_mark,
374 anchor: token.value,
375 }
376 skip_token(parser)
377 return true
378 }
379
380 start_mark := token.start_mark
381 end_mark := token.start_mark
382
383 var tag_token bool
384 var tag_handle, tag_suffix, anchor []byte
385 var tag_mark yaml_mark_t
386 if token.typ == yaml_ANCHOR_TOKEN {
387 anchor = token.value
388 start_mark = token.start_mark
389 end_mark = token.end_mark
390 skip_token(parser)
391 token = peek_token(parser)
392 if token == nil {
393 return false
394 }
395 if token.typ == yaml_TAG_TOKEN {
396 tag_token = true
397 tag_handle = token.value
398 tag_suffix = token.suffix
399 tag_mark = token.start_mark
400 end_mark = token.end_mark
401 skip_token(parser)
402 token = peek_token(parser)
403 if token == nil {
404 return false
405 }
406 }
407 } else if token.typ == yaml_TAG_TOKEN {
408 tag_token = true
409 tag_handle = token.value
410 tag_suffix = token.suffix
411 start_mark = token.start_mark
412 tag_mark = token.start_mark
413 end_mark = token.end_mark
414 skip_token(parser)
415 token = peek_token(parser)
416 if token == nil {
417 return false
418 }
419 if token.typ == yaml_ANCHOR_TOKEN {
420 anchor = token.value
421 end_mark = token.end_mark
422 skip_token(parser)
423 token = peek_token(parser)
424 if token == nil {
425 return false
426 }
427 }
428 }
429
430 var tag []byte
431 if tag_token {
432 if len(tag_handle) == 0 {
433 tag = tag_suffix
434 tag_suffix = nil
435 } else {
436 for i := range parser.tag_directives {
437 if bytes.Equal(parser.tag_directives[i].handle, tag_handle) {
438 tag = append([]byte(nil), parser.tag_directives[i].prefix...)
439 tag = append(tag, tag_suffix...)
440 break
441 }
442 }
443 if len(tag) == 0 {
444 yaml_parser_set_parser_error_context(parser,
445 "while parsing a node", start_mark,
446 "found undefined tag handle", tag_mark)
447 return false
448 }
449 }
450 }
451
452 implicit := len(tag) == 0
453 if indentless_sequence && token.typ == yaml_BLOCK_ENTRY_TOKEN {
454 end_mark = token.end_mark
455 parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE
456 *event = yaml_event_t{
457 typ: yaml_SEQUENCE_START_EVENT,
458 start_mark: start_mark,
459 end_mark: end_mark,
460 anchor: anchor,
461 tag: tag,
462 implicit: implicit,
463 style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE),
464 }
465 return true
466 }
467 if token.typ == yaml_SCALAR_TOKEN {
468 var plain_implicit, quoted_implicit bool
469 end_mark = token.end_mark
470 if (len(tag) == 0 && token.style == yaml_PLAIN_SCALAR_STYLE) || (len(tag) == 1 && tag[0] == '!') {
471 plain_implicit = true
472 } else if len(tag) == 0 {
473 quoted_implicit = true
474 }
475 parser.state = parser.states[len(parser.states)-1]
476 parser.states = parser.states[:len(parser.states)-1]
477
478 *event = yaml_event_t{
479 typ: yaml_SCALAR_EVENT,
480 start_mark: start_mark,
481 end_mark: end_mark,
482 anchor: anchor,
483 tag: tag,
484 value: token.value,
485 implicit: plain_implicit,
486 quoted_implicit: quoted_implicit,
487 style: yaml_style_t(token.style),
488 }
489 skip_token(parser)
490 return true
491 }
492 if token.typ == yaml_FLOW_SEQUENCE_START_TOKEN {
493 // [Go] Some of the events below can be merged as they differ only on style.
494 end_mark = token.end_mark
495 parser.state = yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE
496 *event = yaml_event_t{
497 typ: yaml_SEQUENCE_START_EVENT,
498 start_mark: start_mark,
499 end_mark: end_mark,
500 anchor: anchor,
501 tag: tag,
502 implicit: implicit,
503 style: yaml_style_t(yaml_FLOW_SEQUENCE_STYLE),
504 }
505 return true
506 }
507 if token.typ == yaml_FLOW_MAPPING_START_TOKEN {
508 end_mark = token.end_mark
509 parser.state = yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE
510 *event = yaml_event_t{
511 typ: yaml_MAPPING_START_EVENT,
512 start_mark: start_mark,
513 end_mark: end_mark,
514 anchor: anchor,
515 tag: tag,
516 implicit: implicit,
517 style: yaml_style_t(yaml_FLOW_MAPPING_STYLE),
518 }
519 return true
520 }
521 if block && token.typ == yaml_BLOCK_SEQUENCE_START_TOKEN {
522 end_mark = token.end_mark
523 parser.state = yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE
524 *event = yaml_event_t{
525 typ: yaml_SEQUENCE_START_EVENT,
526 start_mark: start_mark,
527 end_mark: end_mark,
528 anchor: anchor,
529 tag: tag,
530 implicit: implicit,
531 style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE),
532 }
533 return true
534 }
535 if block && token.typ == yaml_BLOCK_MAPPING_START_TOKEN {
536 end_mark = token.end_mark
537 parser.state = yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE
538 *event = yaml_event_t{
539 typ: yaml_MAPPING_START_EVENT,
540 start_mark: start_mark,
541 end_mark: end_mark,
542 anchor: anchor,
543 tag: tag,
544 implicit: implicit,
545 style: yaml_style_t(yaml_BLOCK_MAPPING_STYLE),
546 }
547 return true
548 }
549 if len(anchor) > 0 || len(tag) > 0 {
550 parser.state = parser.states[len(parser.states)-1]
551 parser.states = parser.states[:len(parser.states)-1]
552
553 *event = yaml_event_t{
554 typ: yaml_SCALAR_EVENT,
555 start_mark: start_mark,
556 end_mark: end_mark,
557 anchor: anchor,
558 tag: tag,
559 implicit: implicit,
560 quoted_implicit: false,
561 style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE),
562 }
563 return true
564 }
565
566 context := "while parsing a flow node"
567 if block {
568 context = "while parsing a block node"
569 }
570 yaml_parser_set_parser_error_context(parser, context, start_mark,
571 "did not find expected node content", token.start_mark)
572 return false
573}
574
575// Parse the productions:
576// block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
577// ******************** *********** * *********
578//
579func yaml_parser_parse_block_sequence_entry(parser *yaml_parser_t, event *yaml_event_t, first bool) bool {
580 if first {
581 token := peek_token(parser)
582 parser.marks = append(parser.marks, token.start_mark)
583 skip_token(parser)
584 }
585
586 token := peek_token(parser)
587 if token == nil {
588 return false
589 }
590
591 if token.typ == yaml_BLOCK_ENTRY_TOKEN {
592 mark := token.end_mark
593 skip_token(parser)
594 token = peek_token(parser)
595 if token == nil {
596 return false
597 }
598 if token.typ != yaml_BLOCK_ENTRY_TOKEN && token.typ != yaml_BLOCK_END_TOKEN {
599 parser.states = append(parser.states, yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE)
600 return yaml_parser_parse_node(parser, event, true, false)
601 } else {
602 parser.state = yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE
603 return yaml_parser_process_empty_scalar(parser, event, mark)
604 }
605 }
606 if token.typ == yaml_BLOCK_END_TOKEN {
607 parser.state = parser.states[len(parser.states)-1]
608 parser.states = parser.states[:len(parser.states)-1]
609 parser.marks = parser.marks[:len(parser.marks)-1]
610
611 *event = yaml_event_t{
612 typ: yaml_SEQUENCE_END_EVENT,
613 start_mark: token.start_mark,
614 end_mark: token.end_mark,
615 }
616
617 skip_token(parser)
618 return true
619 }
620
621 context_mark := parser.marks[len(parser.marks)-1]
622 parser.marks = parser.marks[:len(parser.marks)-1]
623 return yaml_parser_set_parser_error_context(parser,
624 "while parsing a block collection", context_mark,
625 "did not find expected '-' indicator", token.start_mark)
626}
627
628// Parse the productions:
629// indentless_sequence ::= (BLOCK-ENTRY block_node?)+
630// *********** *
631func yaml_parser_parse_indentless_sequence_entry(parser *yaml_parser_t, event *yaml_event_t) bool {
632 token := peek_token(parser)
633 if token == nil {
634 return false
635 }
636
637 if token.typ == yaml_BLOCK_ENTRY_TOKEN {
638 mark := token.end_mark
639 skip_token(parser)
640 token = peek_token(parser)
641 if token == nil {
642 return false
643 }
644 if token.typ != yaml_BLOCK_ENTRY_TOKEN &&
645 token.typ != yaml_KEY_TOKEN &&
646 token.typ != yaml_VALUE_TOKEN &&
647 token.typ != yaml_BLOCK_END_TOKEN {
648 parser.states = append(parser.states, yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE)
649 return yaml_parser_parse_node(parser, event, true, false)
650 }
651 parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE
652 return yaml_parser_process_empty_scalar(parser, event, mark)
653 }
654 parser.state = parser.states[len(parser.states)-1]
655 parser.states = parser.states[:len(parser.states)-1]
656
657 *event = yaml_event_t{
658 typ: yaml_SEQUENCE_END_EVENT,
659 start_mark: token.start_mark,
660 end_mark: token.start_mark, // [Go] Shouldn't this be token.end_mark?
661 }
662 return true
663}
664
665// Parse the productions:
666// block_mapping ::= BLOCK-MAPPING_START
667// *******************
668// ((KEY block_node_or_indentless_sequence?)?
669// *** *
670// (VALUE block_node_or_indentless_sequence?)?)*
671//
672// BLOCK-END
673// *********
674//
675func yaml_parser_parse_block_mapping_key(parser *yaml_parser_t, event *yaml_event_t, first bool) bool {
676 if first {
677 token := peek_token(parser)
678 parser.marks = append(parser.marks, token.start_mark)
679 skip_token(parser)
680 }
681
682 token := peek_token(parser)
683 if token == nil {
684 return false
685 }
686
687 if token.typ == yaml_KEY_TOKEN {
688 mark := token.end_mark
689 skip_token(parser)
690 token = peek_token(parser)
691 if token == nil {
692 return false
693 }
694 if token.typ != yaml_KEY_TOKEN &&
695 token.typ != yaml_VALUE_TOKEN &&
696 token.typ != yaml_BLOCK_END_TOKEN {
697 parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_VALUE_STATE)
698 return yaml_parser_parse_node(parser, event, true, true)
699 } else {
700 parser.state = yaml_PARSE_BLOCK_MAPPING_VALUE_STATE
701 return yaml_parser_process_empty_scalar(parser, event, mark)
702 }
703 } else if token.typ == yaml_BLOCK_END_TOKEN {
704 parser.state = parser.states[len(parser.states)-1]
705 parser.states = parser.states[:len(parser.states)-1]
706 parser.marks = parser.marks[:len(parser.marks)-1]
707 *event = yaml_event_t{
708 typ: yaml_MAPPING_END_EVENT,
709 start_mark: token.start_mark,
710 end_mark: token.end_mark,
711 }
712 skip_token(parser)
713 return true
714 }
715
716 context_mark := parser.marks[len(parser.marks)-1]
717 parser.marks = parser.marks[:len(parser.marks)-1]
718 return yaml_parser_set_parser_error_context(parser,
719 "while parsing a block mapping", context_mark,
720 "did not find expected key", token.start_mark)
721}
722
723// Parse the productions:
724// block_mapping ::= BLOCK-MAPPING_START
725//
726// ((KEY block_node_or_indentless_sequence?)?
727//
728// (VALUE block_node_or_indentless_sequence?)?)*
729// ***** *
730// BLOCK-END
731//
732//
733func yaml_parser_parse_block_mapping_value(parser *yaml_parser_t, event *yaml_event_t) bool {
734 token := peek_token(parser)
735 if token == nil {
736 return false
737 }
738 if token.typ == yaml_VALUE_TOKEN {
739 mark := token.end_mark
740 skip_token(parser)
741 token = peek_token(parser)
742 if token == nil {
743 return false
744 }
745 if token.typ != yaml_KEY_TOKEN &&
746 token.typ != yaml_VALUE_TOKEN &&
747 token.typ != yaml_BLOCK_END_TOKEN {
748 parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_KEY_STATE)
749 return yaml_parser_parse_node(parser, event, true, true)
750 }
751 parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE
752 return yaml_parser_process_empty_scalar(parser, event, mark)
753 }
754 parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE
755 return yaml_parser_process_empty_scalar(parser, event, token.start_mark)
756}
757
758// Parse the productions:
759// flow_sequence ::= FLOW-SEQUENCE-START
760// *******************
761// (flow_sequence_entry FLOW-ENTRY)*
762// * **********
763// flow_sequence_entry?
764// *
765// FLOW-SEQUENCE-END
766// *****************
767// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
768// *
769//
770func yaml_parser_parse_flow_sequence_entry(parser *yaml_parser_t, event *yaml_event_t, first bool) bool {
771 if first {
772 token := peek_token(parser)
773 parser.marks = append(parser.marks, token.start_mark)
774 skip_token(parser)
775 }
776 token := peek_token(parser)
777 if token == nil {
778 return false
779 }
780 if token.typ != yaml_FLOW_SEQUENCE_END_TOKEN {
781 if !first {
782 if token.typ == yaml_FLOW_ENTRY_TOKEN {
783 skip_token(parser)
784 token = peek_token(parser)
785 if token == nil {
786 return false
787 }
788 } else {
789 context_mark := parser.marks[len(parser.marks)-1]
790 parser.marks = parser.marks[:len(parser.marks)-1]
791 return yaml_parser_set_parser_error_context(parser,
792 "while parsing a flow sequence", context_mark,
793 "did not find expected ',' or ']'", token.start_mark)
794 }
795 }
796
797 if token.typ == yaml_KEY_TOKEN {
798 parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE
799 *event = yaml_event_t{
800 typ: yaml_MAPPING_START_EVENT,
801 start_mark: token.start_mark,
802 end_mark: token.end_mark,
803 implicit: true,
804 style: yaml_style_t(yaml_FLOW_MAPPING_STYLE),
805 }
806 skip_token(parser)
807 return true
808 } else if token.typ != yaml_FLOW_SEQUENCE_END_TOKEN {
809 parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE)
810 return yaml_parser_parse_node(parser, event, false, false)
811 }
812 }
813
814 parser.state = parser.states[len(parser.states)-1]
815 parser.states = parser.states[:len(parser.states)-1]
816 parser.marks = parser.marks[:len(parser.marks)-1]
817
818 *event = yaml_event_t{
819 typ: yaml_SEQUENCE_END_EVENT,
820 start_mark: token.start_mark,
821 end_mark: token.end_mark,
822 }
823
824 skip_token(parser)
825 return true
826}
827
828//
829// Parse the productions:
830// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
831// *** *
832//
833func yaml_parser_parse_flow_sequence_entry_mapping_key(parser *yaml_parser_t, event *yaml_event_t) bool {
834 token := peek_token(parser)
835 if token == nil {
836 return false
837 }
838 if token.typ != yaml_VALUE_TOKEN &&
839 token.typ != yaml_FLOW_ENTRY_TOKEN &&
840 token.typ != yaml_FLOW_SEQUENCE_END_TOKEN {
841 parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE)
842 return yaml_parser_parse_node(parser, event, false, false)
843 }
844 mark := token.end_mark
845 skip_token(parser)
846 parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE
847 return yaml_parser_process_empty_scalar(parser, event, mark)
848}
849
850// Parse the productions:
851// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
852// ***** *
853//
854func yaml_parser_parse_flow_sequence_entry_mapping_value(parser *yaml_parser_t, event *yaml_event_t) bool {
855 token := peek_token(parser)
856 if token == nil {
857 return false
858 }
859 if token.typ == yaml_VALUE_TOKEN {
860 skip_token(parser)
861 token := peek_token(parser)
862 if token == nil {
863 return false
864 }
865 if token.typ != yaml_FLOW_ENTRY_TOKEN && token.typ != yaml_FLOW_SEQUENCE_END_TOKEN {
866 parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE)
867 return yaml_parser_parse_node(parser, event, false, false)
868 }
869 }
870 parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE
871 return yaml_parser_process_empty_scalar(parser, event, token.start_mark)
872}
873
874// Parse the productions:
875// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
876// *
877//
878func yaml_parser_parse_flow_sequence_entry_mapping_end(parser *yaml_parser_t, event *yaml_event_t) bool {
879 token := peek_token(parser)
880 if token == nil {
881 return false
882 }
883 parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE
884 *event = yaml_event_t{
885 typ: yaml_MAPPING_END_EVENT,
886 start_mark: token.start_mark,
887 end_mark: token.start_mark, // [Go] Shouldn't this be end_mark?
888 }
889 return true
890}
891
892// Parse the productions:
893// flow_mapping ::= FLOW-MAPPING-START
894// ******************
895// (flow_mapping_entry FLOW-ENTRY)*
896// * **********
897// flow_mapping_entry?
898// ******************
899// FLOW-MAPPING-END
900// ****************
901// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
902// * *** *
903//
904func yaml_parser_parse_flow_mapping_key(parser *yaml_parser_t, event *yaml_event_t, first bool) bool {
905 if first {
906 token := peek_token(parser)
907 parser.marks = append(parser.marks, token.start_mark)
908 skip_token(parser)
909 }
910
911 token := peek_token(parser)
912 if token == nil {
913 return false
914 }
915
916 if token.typ != yaml_FLOW_MAPPING_END_TOKEN {
917 if !first {
918 if token.typ == yaml_FLOW_ENTRY_TOKEN {
919 skip_token(parser)
920 token = peek_token(parser)
921 if token == nil {
922 return false
923 }
924 } else {
925 context_mark := parser.marks[len(parser.marks)-1]
926 parser.marks = parser.marks[:len(parser.marks)-1]
927 return yaml_parser_set_parser_error_context(parser,
928 "while parsing a flow mapping", context_mark,
929 "did not find expected ',' or '}'", token.start_mark)
930 }
931 }
932
933 if token.typ == yaml_KEY_TOKEN {
934 skip_token(parser)
935 token = peek_token(parser)
936 if token == nil {
937 return false
938 }
939 if token.typ != yaml_VALUE_TOKEN &&
940 token.typ != yaml_FLOW_ENTRY_TOKEN &&
941 token.typ != yaml_FLOW_MAPPING_END_TOKEN {
942 parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_VALUE_STATE)
943 return yaml_parser_parse_node(parser, event, false, false)
944 } else {
945 parser.state = yaml_PARSE_FLOW_MAPPING_VALUE_STATE
946 return yaml_parser_process_empty_scalar(parser, event, token.start_mark)
947 }
948 } else if token.typ != yaml_FLOW_MAPPING_END_TOKEN {
949 parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE)
950 return yaml_parser_parse_node(parser, event, false, false)
951 }
952 }
953
954 parser.state = parser.states[len(parser.states)-1]
955 parser.states = parser.states[:len(parser.states)-1]
956 parser.marks = parser.marks[:len(parser.marks)-1]
957 *event = yaml_event_t{
958 typ: yaml_MAPPING_END_EVENT,
959 start_mark: token.start_mark,
960 end_mark: token.end_mark,
961 }
962 skip_token(parser)
963 return true
964}
965
966// Parse the productions:
967// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
968// * ***** *
969//
970func yaml_parser_parse_flow_mapping_value(parser *yaml_parser_t, event *yaml_event_t, empty bool) bool {
971 token := peek_token(parser)
972 if token == nil {
973 return false
974 }
975 if empty {
976 parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE
977 return yaml_parser_process_empty_scalar(parser, event, token.start_mark)
978 }
979 if token.typ == yaml_VALUE_TOKEN {
980 skip_token(parser)
981 token = peek_token(parser)
982 if token == nil {
983 return false
984 }
985 if token.typ != yaml_FLOW_ENTRY_TOKEN && token.typ != yaml_FLOW_MAPPING_END_TOKEN {
986 parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_KEY_STATE)
987 return yaml_parser_parse_node(parser, event, false, false)
988 }
989 }
990 parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE
991 return yaml_parser_process_empty_scalar(parser, event, token.start_mark)
992}
993
994// Generate an empty scalar event.
995func yaml_parser_process_empty_scalar(parser *yaml_parser_t, event *yaml_event_t, mark yaml_mark_t) bool {
996 *event = yaml_event_t{
997 typ: yaml_SCALAR_EVENT,
998 start_mark: mark,
999 end_mark: mark,
1000 value: nil, // Empty
1001 implicit: true,
1002 style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE),
1003 }
1004 return true
1005}
1006
1007var default_tag_directives = []yaml_tag_directive_t{
1008 {[]byte("!"), []byte("!")},
1009 {[]byte("!!"), []byte("tag:yaml.org,2002:")},
1010}
1011
1012// Parse directives.
1013func yaml_parser_process_directives(parser *yaml_parser_t,
1014 version_directive_ref **yaml_version_directive_t,
1015 tag_directives_ref *[]yaml_tag_directive_t) bool {
1016
1017 var version_directive *yaml_version_directive_t
1018 var tag_directives []yaml_tag_directive_t
1019
1020 token := peek_token(parser)
1021 if token == nil {
1022 return false
1023 }
1024
1025 for token.typ == yaml_VERSION_DIRECTIVE_TOKEN || token.typ == yaml_TAG_DIRECTIVE_TOKEN {
1026 if token.typ == yaml_VERSION_DIRECTIVE_TOKEN {
1027 if version_directive != nil {
1028 yaml_parser_set_parser_error(parser,
1029 "found duplicate %YAML directive", token.start_mark)
1030 return false
1031 }
1032 if token.major != 1 || token.minor != 1 {
1033 yaml_parser_set_parser_error(parser,
1034 "found incompatible YAML document", token.start_mark)
1035 return false
1036 }
1037 version_directive = &yaml_version_directive_t{
1038 major: token.major,
1039 minor: token.minor,
1040 }
1041 } else if token.typ == yaml_TAG_DIRECTIVE_TOKEN {
1042 value := yaml_tag_directive_t{
1043 handle: token.value,
1044 prefix: token.prefix,
1045 }
1046 if !yaml_parser_append_tag_directive(parser, value, false, token.start_mark) {
1047 return false
1048 }
1049 tag_directives = append(tag_directives, value)
1050 }
1051
1052 skip_token(parser)
1053 token = peek_token(parser)
1054 if token == nil {
1055 return false
1056 }
1057 }
1058
1059 for i := range default_tag_directives {
1060 if !yaml_parser_append_tag_directive(parser, default_tag_directives[i], true, token.start_mark) {
1061 return false
1062 }
1063 }
1064
1065 if version_directive_ref != nil {
1066 *version_directive_ref = version_directive
1067 }
1068 if tag_directives_ref != nil {
1069 *tag_directives_ref = tag_directives
1070 }
1071 return true
1072}
1073
1074// Append a tag directive to the directives stack.
1075func yaml_parser_append_tag_directive(parser *yaml_parser_t, value yaml_tag_directive_t, allow_duplicates bool, mark yaml_mark_t) bool {
1076 for i := range parser.tag_directives {
1077 if bytes.Equal(value.handle, parser.tag_directives[i].handle) {
1078 if allow_duplicates {
1079 return true
1080 }
1081 return yaml_parser_set_parser_error(parser, "found duplicate %TAG directive", mark)
1082 }
1083 }
1084
1085 // [Go] I suspect the copy is unnecessary. This was likely done
1086 // because there was no way to track ownership of the data.
1087 value_copy := yaml_tag_directive_t{
1088 handle: make([]byte, len(value.handle)),
1089 prefix: make([]byte, len(value.prefix)),
1090 }
1091 copy(value_copy.handle, value.handle)
1092 copy(value_copy.prefix, value.prefix)
1093 parser.tag_directives = append(parser.tag_directives, value_copy)
1094 return true
1095}
diff --git a/vendor/gopkg.in/yaml.v2/readerc.go b/vendor/gopkg.in/yaml.v2/readerc.go
new file mode 100644
index 0000000..7c1f5fa
--- /dev/null
+++ b/vendor/gopkg.in/yaml.v2/readerc.go
@@ -0,0 +1,412 @@
1package yaml
2
3import (
4 "io"
5)
6
7// Set the reader error and return 0.
8func yaml_parser_set_reader_error(parser *yaml_parser_t, problem string, offset int, value int) bool {
9 parser.error = yaml_READER_ERROR
10 parser.problem = problem
11 parser.problem_offset = offset
12 parser.problem_value = value
13 return false
14}
15
16// Byte order marks.
17const (
18 bom_UTF8 = "\xef\xbb\xbf"
19 bom_UTF16LE = "\xff\xfe"
20 bom_UTF16BE = "\xfe\xff"
21)
22
23// Determine the input stream encoding by checking the BOM symbol. If no BOM is
24// found, the UTF-8 encoding is assumed. Return 1 on success, 0 on failure.
25func yaml_parser_determine_encoding(parser *yaml_parser_t) bool {
26 // Ensure that we had enough bytes in the raw buffer.
27 for !parser.eof && len(parser.raw_buffer)-parser.raw_buffer_pos < 3 {
28 if !yaml_parser_update_raw_buffer(parser) {
29 return false
30 }
31 }
32
33 // Determine the encoding.
34 buf := parser.raw_buffer
35 pos := parser.raw_buffer_pos
36 avail := len(buf) - pos
37 if avail >= 2 && buf[pos] == bom_UTF16LE[0] && buf[pos+1] == bom_UTF16LE[1] {
38 parser.encoding = yaml_UTF16LE_ENCODING
39 parser.raw_buffer_pos += 2
40 parser.offset += 2
41 } else if avail >= 2 && buf[pos] == bom_UTF16BE[0] && buf[pos+1] == bom_UTF16BE[1] {
42 parser.encoding = yaml_UTF16BE_ENCODING
43 parser.raw_buffer_pos += 2
44 parser.offset += 2
45 } else if avail >= 3 && buf[pos] == bom_UTF8[0] && buf[pos+1] == bom_UTF8[1] && buf[pos+2] == bom_UTF8[2] {
46 parser.encoding = yaml_UTF8_ENCODING
47 parser.raw_buffer_pos += 3
48 parser.offset += 3
49 } else {
50 parser.encoding = yaml_UTF8_ENCODING
51 }
52 return true
53}
54
55// Update the raw buffer.
56func yaml_parser_update_raw_buffer(parser *yaml_parser_t) bool {
57 size_read := 0
58
59 // Return if the raw buffer is full.
60 if parser.raw_buffer_pos == 0 && len(parser.raw_buffer) == cap(parser.raw_buffer) {
61 return true
62 }
63
64 // Return on EOF.
65 if parser.eof {
66 return true
67 }
68
69 // Move the remaining bytes in the raw buffer to the beginning.
70 if parser.raw_buffer_pos > 0 && parser.raw_buffer_pos < len(parser.raw_buffer) {
71 copy(parser.raw_buffer, parser.raw_buffer[parser.raw_buffer_pos:])
72 }
73 parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)-parser.raw_buffer_pos]
74 parser.raw_buffer_pos = 0
75
76 // Call the read handler to fill the buffer.
77 size_read, err := parser.read_handler(parser, parser.raw_buffer[len(parser.raw_buffer):cap(parser.raw_buffer)])
78 parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)+size_read]
79 if err == io.EOF {
80 parser.eof = true
81 } else if err != nil {
82 return yaml_parser_set_reader_error(parser, "input error: "+err.Error(), parser.offset, -1)
83 }
84 return true
85}
86
87// Ensure that the buffer contains at least `length` characters.
88// Return true on success, false on failure.
89//
90// The length is supposed to be significantly less that the buffer size.
91func yaml_parser_update_buffer(parser *yaml_parser_t, length int) bool {
92 if parser.read_handler == nil {
93 panic("read handler must be set")
94 }
95
96 // [Go] This function was changed to guarantee the requested length size at EOF.
97 // The fact we need to do this is pretty awful, but the description above implies
98 // for that to be the case, and there are tests
99
100 // If the EOF flag is set and the raw buffer is empty, do nothing.
101 if parser.eof && parser.raw_buffer_pos == len(parser.raw_buffer) {
102 // [Go] ACTUALLY! Read the documentation of this function above.
103 // This is just broken. To return true, we need to have the
104 // given length in the buffer. Not doing that means every single
105 // check that calls this function to make sure the buffer has a
106 // given length is Go) panicking; or C) accessing invalid memory.
107 //return true
108 }
109
110 // Return if the buffer contains enough characters.
111 if parser.unread >= length {
112 return true
113 }
114
115 // Determine the input encoding if it is not known yet.
116 if parser.encoding == yaml_ANY_ENCODING {
117 if !yaml_parser_determine_encoding(parser) {
118 return false
119 }
120 }
121
122 // Move the unread characters to the beginning of the buffer.
123 buffer_len := len(parser.buffer)
124 if parser.buffer_pos > 0 && parser.buffer_pos < buffer_len {
125 copy(parser.buffer, parser.buffer[parser.buffer_pos:])
126 buffer_len -= parser.buffer_pos
127 parser.buffer_pos = 0
128 } else if parser.buffer_pos == buffer_len {
129 buffer_len = 0
130 parser.buffer_pos = 0
131 }
132
133 // Open the whole buffer for writing, and cut it before returning.
134 parser.buffer = parser.buffer[:cap(parser.buffer)]
135
136 // Fill the buffer until it has enough characters.
137 first := true
138 for parser.unread < length {
139
140 // Fill the raw buffer if necessary.
141 if !first || parser.raw_buffer_pos == len(parser.raw_buffer) {
142 if !yaml_parser_update_raw_buffer(parser) {
143 parser.buffer = parser.buffer[:buffer_len]
144 return false
145 }
146 }
147 first = false
148
149 // Decode the raw buffer.
150 inner:
151 for parser.raw_buffer_pos != len(parser.raw_buffer) {
152 var value rune
153 var width int
154
155 raw_unread := len(parser.raw_buffer) - parser.raw_buffer_pos
156
157 // Decode the next character.
158 switch parser.encoding {
159 case yaml_UTF8_ENCODING:
160 // Decode a UTF-8 character. Check RFC 3629
161 // (http://www.ietf.org/rfc/rfc3629.txt) for more details.
162 //
163 // The following table (taken from the RFC) is used for
164 // decoding.
165 //
166 // Char. number range | UTF-8 octet sequence
167 // (hexadecimal) | (binary)
168 // --------------------+------------------------------------
169 // 0000 0000-0000 007F | 0xxxxxxx
170 // 0000 0080-0000 07FF | 110xxxxx 10xxxxxx
171 // 0000 0800-0000 FFFF | 1110xxxx 10xxxxxx 10xxxxxx
172 // 0001 0000-0010 FFFF | 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx
173 //
174 // Additionally, the characters in the range 0xD800-0xDFFF
175 // are prohibited as they are reserved for use with UTF-16
176 // surrogate pairs.
177
178 // Determine the length of the UTF-8 sequence.
179 octet := parser.raw_buffer[parser.raw_buffer_pos]
180 switch {
181 case octet&0x80 == 0x00:
182 width = 1
183 case octet&0xE0 == 0xC0:
184 width = 2
185 case octet&0xF0 == 0xE0:
186 width = 3
187 case octet&0xF8 == 0xF0:
188 width = 4
189 default:
190 // The leading octet is invalid.
191 return yaml_parser_set_reader_error(parser,
192 "invalid leading UTF-8 octet",
193 parser.offset, int(octet))
194 }
195
196 // Check if the raw buffer contains an incomplete character.
197 if width > raw_unread {
198 if parser.eof {
199 return yaml_parser_set_reader_error(parser,
200 "incomplete UTF-8 octet sequence",
201 parser.offset, -1)
202 }
203 break inner
204 }
205
206 // Decode the leading octet.
207 switch {
208 case octet&0x80 == 0x00:
209 value = rune(octet & 0x7F)
210 case octet&0xE0 == 0xC0:
211 value = rune(octet & 0x1F)
212 case octet&0xF0 == 0xE0:
213 value = rune(octet & 0x0F)
214 case octet&0xF8 == 0xF0:
215 value = rune(octet & 0x07)
216 default:
217 value = 0
218 }
219
220 // Check and decode the trailing octets.
221 for k := 1; k < width; k++ {
222 octet = parser.raw_buffer[parser.raw_buffer_pos+k]
223
224 // Check if the octet is valid.
225 if (octet & 0xC0) != 0x80 {
226 return yaml_parser_set_reader_error(parser,
227 "invalid trailing UTF-8 octet",
228 parser.offset+k, int(octet))
229 }
230
231 // Decode the octet.
232 value = (value << 6) + rune(octet&0x3F)
233 }
234
235 // Check the length of the sequence against the value.
236 switch {
237 case width == 1:
238 case width == 2 && value >= 0x80:
239 case width == 3 && value >= 0x800:
240 case width == 4 && value >= 0x10000:
241 default:
242 return yaml_parser_set_reader_error(parser,
243 "invalid length of a UTF-8 sequence",
244 parser.offset, -1)
245 }
246
247 // Check the range of the value.
248 if value >= 0xD800 && value <= 0xDFFF || value > 0x10FFFF {
249 return yaml_parser_set_reader_error(parser,
250 "invalid Unicode character",
251 parser.offset, int(value))
252 }
253
254 case yaml_UTF16LE_ENCODING, yaml_UTF16BE_ENCODING:
255 var low, high int
256 if parser.encoding == yaml_UTF16LE_ENCODING {
257 low, high = 0, 1
258 } else {
259 low, high = 1, 0
260 }
261
262 // The UTF-16 encoding is not as simple as one might
263 // naively think. Check RFC 2781
264 // (http://www.ietf.org/rfc/rfc2781.txt).
265 //
266 // Normally, two subsequent bytes describe a Unicode
267 // character. However a special technique (called a
268 // surrogate pair) is used for specifying character
269 // values larger than 0xFFFF.
270 //
271 // A surrogate pair consists of two pseudo-characters:
272 // high surrogate area (0xD800-0xDBFF)
273 // low surrogate area (0xDC00-0xDFFF)
274 //
275 // The following formulas are used for decoding
276 // and encoding characters using surrogate pairs:
277 //
278 // U = U' + 0x10000 (0x01 00 00 <= U <= 0x10 FF FF)
279 // U' = yyyyyyyyyyxxxxxxxxxx (0 <= U' <= 0x0F FF FF)
280 // W1 = 110110yyyyyyyyyy
281 // W2 = 110111xxxxxxxxxx
282 //
283 // where U is the character value, W1 is the high surrogate
284 // area, W2 is the low surrogate area.
285
286 // Check for incomplete UTF-16 character.
287 if raw_unread < 2 {
288 if parser.eof {
289 return yaml_parser_set_reader_error(parser,
290 "incomplete UTF-16 character",
291 parser.offset, -1)
292 }
293 break inner
294 }
295
296 // Get the character.
297 value = rune(parser.raw_buffer[parser.raw_buffer_pos+low]) +
298 (rune(parser.raw_buffer[parser.raw_buffer_pos+high]) << 8)
299
300 // Check for unexpected low surrogate area.
301 if value&0xFC00 == 0xDC00 {
302 return yaml_parser_set_reader_error(parser,
303 "unexpected low surrogate area",
304 parser.offset, int(value))
305 }
306
307 // Check for a high surrogate area.
308 if value&0xFC00 == 0xD800 {
309 width = 4
310
311 // Check for incomplete surrogate pair.
312 if raw_unread < 4 {
313 if parser.eof {
314 return yaml_parser_set_reader_error(parser,
315 "incomplete UTF-16 surrogate pair",
316 parser.offset, -1)
317 }
318 break inner
319 }
320
321 // Get the next character.
322 value2 := rune(parser.raw_buffer[parser.raw_buffer_pos+low+2]) +
323 (rune(parser.raw_buffer[parser.raw_buffer_pos+high+2]) << 8)
324
325 // Check for a low surrogate area.
326 if value2&0xFC00 != 0xDC00 {
327 return yaml_parser_set_reader_error(parser,
328 "expected low surrogate area",
329 parser.offset+2, int(value2))
330 }
331
332 // Generate the value of the surrogate pair.
333 value = 0x10000 + ((value & 0x3FF) << 10) + (value2 & 0x3FF)
334 } else {
335 width = 2
336 }
337
338 default:
339 panic("impossible")
340 }
341
342 // Check if the character is in the allowed range:
343 // #x9 | #xA | #xD | [#x20-#x7E] (8 bit)
344 // | #x85 | [#xA0-#xD7FF] | [#xE000-#xFFFD] (16 bit)
345 // | [#x10000-#x10FFFF] (32 bit)
346 switch {
347 case value == 0x09:
348 case value == 0x0A:
349 case value == 0x0D:
350 case value >= 0x20 && value <= 0x7E:
351 case value == 0x85:
352 case value >= 0xA0 && value <= 0xD7FF:
353 case value >= 0xE000 && value <= 0xFFFD:
354 case value >= 0x10000 && value <= 0x10FFFF:
355 default:
356 return yaml_parser_set_reader_error(parser,
357 "control characters are not allowed",
358 parser.offset, int(value))
359 }
360
361 // Move the raw pointers.
362 parser.raw_buffer_pos += width
363 parser.offset += width
364
365 // Finally put the character into the buffer.
366 if value <= 0x7F {
367 // 0000 0000-0000 007F . 0xxxxxxx
368 parser.buffer[buffer_len+0] = byte(value)
369 buffer_len += 1
370 } else if value <= 0x7FF {
371 // 0000 0080-0000 07FF . 110xxxxx 10xxxxxx
372 parser.buffer[buffer_len+0] = byte(0xC0 + (value >> 6))
373 parser.buffer[buffer_len+1] = byte(0x80 + (value & 0x3F))
374 buffer_len += 2
375 } else if value <= 0xFFFF {
376 // 0000 0800-0000 FFFF . 1110xxxx 10xxxxxx 10xxxxxx
377 parser.buffer[buffer_len+0] = byte(0xE0 + (value >> 12))
378 parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 6) & 0x3F))
379 parser.buffer[buffer_len+2] = byte(0x80 + (value & 0x3F))
380 buffer_len += 3
381 } else {
382 // 0001 0000-0010 FFFF . 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx
383 parser.buffer[buffer_len+0] = byte(0xF0 + (value >> 18))
384 parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 12) & 0x3F))
385 parser.buffer[buffer_len+2] = byte(0x80 + ((value >> 6) & 0x3F))
386 parser.buffer[buffer_len+3] = byte(0x80 + (value & 0x3F))
387 buffer_len += 4
388 }
389
390 parser.unread++
391 }
392
393 // On EOF, put NUL into the buffer and return.
394 if parser.eof {
395 parser.buffer[buffer_len] = 0
396 buffer_len++
397 parser.unread++
398 break
399 }
400 }
401 // [Go] Read the documentation of this function above. To return true,
402 // we need to have the given length in the buffer. Not doing that means
403 // every single check that calls this function to make sure the buffer
404 // has a given length is Go) panicking; or C) accessing invalid memory.
405 // This happens here due to the EOF above breaking early.
406 for buffer_len < length {
407 parser.buffer[buffer_len] = 0
408 buffer_len++
409 }
410 parser.buffer = parser.buffer[:buffer_len]
411 return true
412}
diff --git a/vendor/gopkg.in/yaml.v2/resolve.go b/vendor/gopkg.in/yaml.v2/resolve.go
new file mode 100644
index 0000000..4120e0c
--- /dev/null
+++ b/vendor/gopkg.in/yaml.v2/resolve.go
@@ -0,0 +1,258 @@
1package yaml
2
3import (
4 "encoding/base64"
5 "math"
6 "regexp"
7 "strconv"
8 "strings"
9 "time"
10)
11
12type resolveMapItem struct {
13 value interface{}
14 tag string
15}
16
17var resolveTable = make([]byte, 256)
18var resolveMap = make(map[string]resolveMapItem)
19
20func init() {
21 t := resolveTable
22 t[int('+')] = 'S' // Sign
23 t[int('-')] = 'S'
24 for _, c := range "0123456789" {
25 t[int(c)] = 'D' // Digit
26 }
27 for _, c := range "yYnNtTfFoO~" {
28 t[int(c)] = 'M' // In map
29 }
30 t[int('.')] = '.' // Float (potentially in map)
31
32 var resolveMapList = []struct {
33 v interface{}
34 tag string
35 l []string
36 }{
37 {true, yaml_BOOL_TAG, []string{"y", "Y", "yes", "Yes", "YES"}},
38 {true, yaml_BOOL_TAG, []string{"true", "True", "TRUE"}},
39 {true, yaml_BOOL_TAG, []string{"on", "On", "ON"}},
40 {false, yaml_BOOL_TAG, []string{"n", "N", "no", "No", "NO"}},
41 {false, yaml_BOOL_TAG, []string{"false", "False", "FALSE"}},
42 {false, yaml_BOOL_TAG, []string{"off", "Off", "OFF"}},
43 {nil, yaml_NULL_TAG, []string{"", "~", "null", "Null", "NULL"}},
44 {math.NaN(), yaml_FLOAT_TAG, []string{".nan", ".NaN", ".NAN"}},
45 {math.Inf(+1), yaml_FLOAT_TAG, []string{".inf", ".Inf", ".INF"}},
46 {math.Inf(+1), yaml_FLOAT_TAG, []string{"+.inf", "+.Inf", "+.INF"}},
47 {math.Inf(-1), yaml_FLOAT_TAG, []string{"-.inf", "-.Inf", "-.INF"}},
48 {"<<", yaml_MERGE_TAG, []string{"<<"}},
49 }
50
51 m := resolveMap
52 for _, item := range resolveMapList {
53 for _, s := range item.l {
54 m[s] = resolveMapItem{item.v, item.tag}
55 }
56 }
57}
58
59const longTagPrefix = "tag:yaml.org,2002:"
60
61func shortTag(tag string) string {
62 // TODO This can easily be made faster and produce less garbage.
63 if strings.HasPrefix(tag, longTagPrefix) {
64 return "!!" + tag[len(longTagPrefix):]
65 }
66 return tag
67}
68
69func longTag(tag string) string {
70 if strings.HasPrefix(tag, "!!") {
71 return longTagPrefix + tag[2:]
72 }
73 return tag
74}
75
76func resolvableTag(tag string) bool {
77 switch tag {
78 case "", yaml_STR_TAG, yaml_BOOL_TAG, yaml_INT_TAG, yaml_FLOAT_TAG, yaml_NULL_TAG, yaml_TIMESTAMP_TAG:
79 return true
80 }
81 return false
82}
83
84var yamlStyleFloat = regexp.MustCompile(`^[-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?$`)
85
86func resolve(tag string, in string) (rtag string, out interface{}) {
87 if !resolvableTag(tag) {
88 return tag, in
89 }
90
91 defer func() {
92 switch tag {
93 case "", rtag, yaml_STR_TAG, yaml_BINARY_TAG:
94 return
95 case yaml_FLOAT_TAG:
96 if rtag == yaml_INT_TAG {
97 switch v := out.(type) {
98 case int64:
99 rtag = yaml_FLOAT_TAG
100 out = float64(v)
101 return
102 case int:
103 rtag = yaml_FLOAT_TAG
104 out = float64(v)
105 return
106 }
107 }
108 }
109 failf("cannot decode %s `%s` as a %s", shortTag(rtag), in, shortTag(tag))
110 }()
111
112 // Any data is accepted as a !!str or !!binary.
113 // Otherwise, the prefix is enough of a hint about what it might be.
114 hint := byte('N')
115 if in != "" {
116 hint = resolveTable[in[0]]
117 }
118 if hint != 0 && tag != yaml_STR_TAG && tag != yaml_BINARY_TAG {
119 // Handle things we can lookup in a map.
120 if item, ok := resolveMap[in]; ok {
121 return item.tag, item.value
122 }
123
124 // Base 60 floats are a bad idea, were dropped in YAML 1.2, and
125 // are purposefully unsupported here. They're still quoted on
126 // the way out for compatibility with other parser, though.
127
128 switch hint {
129 case 'M':
130 // We've already checked the map above.
131
132 case '.':
133 // Not in the map, so maybe a normal float.
134 floatv, err := strconv.ParseFloat(in, 64)
135 if err == nil {
136 return yaml_FLOAT_TAG, floatv
137 }
138
139 case 'D', 'S':
140 // Int, float, or timestamp.
141 // Only try values as a timestamp if the value is unquoted or there's an explicit
142 // !!timestamp tag.
143 if tag == "" || tag == yaml_TIMESTAMP_TAG {
144 t, ok := parseTimestamp(in)
145 if ok {
146 return yaml_TIMESTAMP_TAG, t
147 }
148 }
149
150 plain := strings.Replace(in, "_", "", -1)
151 intv, err := strconv.ParseInt(plain, 0, 64)
152 if err == nil {
153 if intv == int64(int(intv)) {
154 return yaml_INT_TAG, int(intv)
155 } else {
156 return yaml_INT_TAG, intv
157 }
158 }
159 uintv, err := strconv.ParseUint(plain, 0, 64)
160 if err == nil {
161 return yaml_INT_TAG, uintv
162 }
163 if yamlStyleFloat.MatchString(plain) {
164 floatv, err := strconv.ParseFloat(plain, 64)
165 if err == nil {
166 return yaml_FLOAT_TAG, floatv
167 }
168 }
169 if strings.HasPrefix(plain, "0b") {
170 intv, err := strconv.ParseInt(plain[2:], 2, 64)
171 if err == nil {
172 if intv == int64(int(intv)) {
173 return yaml_INT_TAG, int(intv)
174 } else {
175 return yaml_INT_TAG, intv
176 }
177 }
178 uintv, err := strconv.ParseUint(plain[2:], 2, 64)
179 if err == nil {
180 return yaml_INT_TAG, uintv
181 }
182 } else if strings.HasPrefix(plain, "-0b") {
183 intv, err := strconv.ParseInt("-" + plain[3:], 2, 64)
184 if err == nil {
185 if true || intv == int64(int(intv)) {
186 return yaml_INT_TAG, int(intv)
187 } else {
188 return yaml_INT_TAG, intv
189 }
190 }
191 }
192 default:
193 panic("resolveTable item not yet handled: " + string(rune(hint)) + " (with " + in + ")")
194 }
195 }
196 return yaml_STR_TAG, in
197}
198
199// encodeBase64 encodes s as base64 that is broken up into multiple lines
200// as appropriate for the resulting length.
201func encodeBase64(s string) string {
202 const lineLen = 70
203 encLen := base64.StdEncoding.EncodedLen(len(s))
204 lines := encLen/lineLen + 1
205 buf := make([]byte, encLen*2+lines)
206 in := buf[0:encLen]
207 out := buf[encLen:]
208 base64.StdEncoding.Encode(in, []byte(s))
209 k := 0
210 for i := 0; i < len(in); i += lineLen {
211 j := i + lineLen
212 if j > len(in) {
213 j = len(in)
214 }
215 k += copy(out[k:], in[i:j])
216 if lines > 1 {
217 out[k] = '\n'
218 k++
219 }
220 }
221 return string(out[:k])
222}
223
224// This is a subset of the formats allowed by the regular expression
225// defined at http://yaml.org/type/timestamp.html.
226var allowedTimestampFormats = []string{
227 "2006-1-2T15:4:5.999999999Z07:00", // RCF3339Nano with short date fields.
228 "2006-1-2t15:4:5.999999999Z07:00", // RFC3339Nano with short date fields and lower-case "t".
229 "2006-1-2 15:4:5.999999999", // space separated with no time zone
230 "2006-1-2", // date only
231 // Notable exception: time.Parse cannot handle: "2001-12-14 21:59:43.10 -5"
232 // from the set of examples.
233}
234
235// parseTimestamp parses s as a timestamp string and
236// returns the timestamp and reports whether it succeeded.
237// Timestamp formats are defined at http://yaml.org/type/timestamp.html
238func parseTimestamp(s string) (time.Time, bool) {
239 // TODO write code to check all the formats supported by
240 // http://yaml.org/type/timestamp.html instead of using time.Parse.
241
242 // Quick check: all date formats start with YYYY-.
243 i := 0
244 for ; i < len(s); i++ {
245 if c := s[i]; c < '0' || c > '9' {
246 break
247 }
248 }
249 if i != 4 || i == len(s) || s[i] != '-' {
250 return time.Time{}, false
251 }
252 for _, format := range allowedTimestampFormats {
253 if t, err := time.Parse(format, s); err == nil {
254 return t, true
255 }
256 }
257 return time.Time{}, false
258}
diff --git a/vendor/gopkg.in/yaml.v2/scannerc.go b/vendor/gopkg.in/yaml.v2/scannerc.go
new file mode 100644
index 0000000..0b9bb60
--- /dev/null
+++ b/vendor/gopkg.in/yaml.v2/scannerc.go
@@ -0,0 +1,2711 @@
1package yaml
2
3import (
4 "bytes"
5 "fmt"
6)
7
8// Introduction
9// ************
10//
11// The following notes assume that you are familiar with the YAML specification
12// (http://yaml.org/spec/1.2/spec.html). We mostly follow it, although in
13// some cases we are less restrictive that it requires.
14//
15// The process of transforming a YAML stream into a sequence of events is
16// divided on two steps: Scanning and Parsing.
17//
18// The Scanner transforms the input stream into a sequence of tokens, while the
19// parser transform the sequence of tokens produced by the Scanner into a
20// sequence of parsing events.
21//
22// The Scanner is rather clever and complicated. The Parser, on the contrary,
23// is a straightforward implementation of a recursive-descendant parser (or,
24// LL(1) parser, as it is usually called).
25//
26// Actually there are two issues of Scanning that might be called "clever", the
27// rest is quite straightforward. The issues are "block collection start" and
28// "simple keys". Both issues are explained below in details.
29//
30// Here the Scanning step is explained and implemented. We start with the list
31// of all the tokens produced by the Scanner together with short descriptions.
32//
33// Now, tokens:
34//
35// STREAM-START(encoding) # The stream start.
36// STREAM-END # The stream end.
37// VERSION-DIRECTIVE(major,minor) # The '%YAML' directive.
38// TAG-DIRECTIVE(handle,prefix) # The '%TAG' directive.
39// DOCUMENT-START # '---'
40// DOCUMENT-END # '...'
41// BLOCK-SEQUENCE-START # Indentation increase denoting a block
42// BLOCK-MAPPING-START # sequence or a block mapping.
43// BLOCK-END # Indentation decrease.
44// FLOW-SEQUENCE-START # '['
45// FLOW-SEQUENCE-END # ']'
46// BLOCK-SEQUENCE-START # '{'
47// BLOCK-SEQUENCE-END # '}'
48// BLOCK-ENTRY # '-'
49// FLOW-ENTRY # ','
50// KEY # '?' or nothing (simple keys).
51// VALUE # ':'
52// ALIAS(anchor) # '*anchor'
53// ANCHOR(anchor) # '&anchor'
54// TAG(handle,suffix) # '!handle!suffix'
55// SCALAR(value,style) # A scalar.
56//
57// The following two tokens are "virtual" tokens denoting the beginning and the
58// end of the stream:
59//
60// STREAM-START(encoding)
61// STREAM-END
62//
63// We pass the information about the input stream encoding with the
64// STREAM-START token.
65//
66// The next two tokens are responsible for tags:
67//
68// VERSION-DIRECTIVE(major,minor)
69// TAG-DIRECTIVE(handle,prefix)
70//
71// Example:
72//
73// %YAML 1.1
74// %TAG ! !foo
75// %TAG !yaml! tag:yaml.org,2002:
76// ---
77//
78// The correspoding sequence of tokens:
79//
80// STREAM-START(utf-8)
81// VERSION-DIRECTIVE(1,1)
82// TAG-DIRECTIVE("!","!foo")
83// TAG-DIRECTIVE("!yaml","tag:yaml.org,2002:")
84// DOCUMENT-START
85// STREAM-END
86//
87// Note that the VERSION-DIRECTIVE and TAG-DIRECTIVE tokens occupy a whole
88// line.
89//
90// The document start and end indicators are represented by:
91//
92// DOCUMENT-START
93// DOCUMENT-END
94//
95// Note that if a YAML stream contains an implicit document (without '---'
96// and '...' indicators), no DOCUMENT-START and DOCUMENT-END tokens will be
97// produced.
98//
99// In the following examples, we present whole documents together with the
100// produced tokens.
101//
102// 1. An implicit document:
103//
104// 'a scalar'
105//
106// Tokens:
107//
108// STREAM-START(utf-8)
109// SCALAR("a scalar",single-quoted)
110// STREAM-END
111//
112// 2. An explicit document:
113//
114// ---
115// 'a scalar'
116// ...
117//
118// Tokens:
119//
120// STREAM-START(utf-8)
121// DOCUMENT-START
122// SCALAR("a scalar",single-quoted)
123// DOCUMENT-END
124// STREAM-END
125//
126// 3. Several documents in a stream:
127//
128// 'a scalar'
129// ---
130// 'another scalar'
131// ---
132// 'yet another scalar'
133//
134// Tokens:
135//
136// STREAM-START(utf-8)
137// SCALAR("a scalar",single-quoted)
138// DOCUMENT-START
139// SCALAR("another scalar",single-quoted)
140// DOCUMENT-START
141// SCALAR("yet another scalar",single-quoted)
142// STREAM-END
143//
144// We have already introduced the SCALAR token above. The following tokens are
145// used to describe aliases, anchors, tag, and scalars:
146//
147// ALIAS(anchor)
148// ANCHOR(anchor)
149// TAG(handle,suffix)
150// SCALAR(value,style)
151//
152// The following series of examples illustrate the usage of these tokens:
153//
154// 1. A recursive sequence:
155//
156// &A [ *A ]
157//
158// Tokens:
159//
160// STREAM-START(utf-8)
161// ANCHOR("A")
162// FLOW-SEQUENCE-START
163// ALIAS("A")
164// FLOW-SEQUENCE-END
165// STREAM-END
166//
167// 2. A tagged scalar:
168//
169// !!float "3.14" # A good approximation.
170//
171// Tokens:
172//
173// STREAM-START(utf-8)
174// TAG("!!","float")
175// SCALAR("3.14",double-quoted)
176// STREAM-END
177//
178// 3. Various scalar styles:
179//
180// --- # Implicit empty plain scalars do not produce tokens.
181// --- a plain scalar
182// --- 'a single-quoted scalar'
183// --- "a double-quoted scalar"
184// --- |-
185// a literal scalar
186// --- >-
187// a folded
188// scalar
189//
190// Tokens:
191//
192// STREAM-START(utf-8)
193// DOCUMENT-START
194// DOCUMENT-START
195// SCALAR("a plain scalar",plain)
196// DOCUMENT-START
197// SCALAR("a single-quoted scalar",single-quoted)
198// DOCUMENT-START
199// SCALAR("a double-quoted scalar",double-quoted)
200// DOCUMENT-START
201// SCALAR("a literal scalar",literal)
202// DOCUMENT-START
203// SCALAR("a folded scalar",folded)
204// STREAM-END
205//
206// Now it's time to review collection-related tokens. We will start with
207// flow collections:
208//
209// FLOW-SEQUENCE-START
210// FLOW-SEQUENCE-END
211// FLOW-MAPPING-START
212// FLOW-MAPPING-END
213// FLOW-ENTRY
214// KEY
215// VALUE
216//
217// The tokens FLOW-SEQUENCE-START, FLOW-SEQUENCE-END, FLOW-MAPPING-START, and
218// FLOW-MAPPING-END represent the indicators '[', ']', '{', and '}'
219// correspondingly. FLOW-ENTRY represent the ',' indicator. Finally the
220// indicators '?' and ':', which are used for denoting mapping keys and values,
221// are represented by the KEY and VALUE tokens.
222//
223// The following examples show flow collections:
224//
225// 1. A flow sequence:
226//
227// [item 1, item 2, item 3]
228//
229// Tokens:
230//
231// STREAM-START(utf-8)
232// FLOW-SEQUENCE-START
233// SCALAR("item 1",plain)
234// FLOW-ENTRY
235// SCALAR("item 2",plain)
236// FLOW-ENTRY
237// SCALAR("item 3",plain)
238// FLOW-SEQUENCE-END
239// STREAM-END
240//
241// 2. A flow mapping:
242//
243// {
244// a simple key: a value, # Note that the KEY token is produced.
245// ? a complex key: another value,
246// }
247//
248// Tokens:
249//
250// STREAM-START(utf-8)
251// FLOW-MAPPING-START
252// KEY
253// SCALAR("a simple key",plain)
254// VALUE
255// SCALAR("a value",plain)
256// FLOW-ENTRY
257// KEY
258// SCALAR("a complex key",plain)
259// VALUE
260// SCALAR("another value",plain)
261// FLOW-ENTRY
262// FLOW-MAPPING-END
263// STREAM-END
264//
265// A simple key is a key which is not denoted by the '?' indicator. Note that
266// the Scanner still produce the KEY token whenever it encounters a simple key.
267//
268// For scanning block collections, the following tokens are used (note that we
269// repeat KEY and VALUE here):
270//
271// BLOCK-SEQUENCE-START
272// BLOCK-MAPPING-START
273// BLOCK-END
274// BLOCK-ENTRY
275// KEY
276// VALUE
277//
278// The tokens BLOCK-SEQUENCE-START and BLOCK-MAPPING-START denote indentation
279// increase that precedes a block collection (cf. the INDENT token in Python).
280// The token BLOCK-END denote indentation decrease that ends a block collection
281// (cf. the DEDENT token in Python). However YAML has some syntax pecularities
282// that makes detections of these tokens more complex.
283//
284// The tokens BLOCK-ENTRY, KEY, and VALUE are used to represent the indicators
285// '-', '?', and ':' correspondingly.
286//
287// The following examples show how the tokens BLOCK-SEQUENCE-START,
288// BLOCK-MAPPING-START, and BLOCK-END are emitted by the Scanner:
289//
290// 1. Block sequences:
291//
292// - item 1
293// - item 2
294// -
295// - item 3.1
296// - item 3.2
297// -
298// key 1: value 1
299// key 2: value 2
300//
301// Tokens:
302//
303// STREAM-START(utf-8)
304// BLOCK-SEQUENCE-START
305// BLOCK-ENTRY
306// SCALAR("item 1",plain)
307// BLOCK-ENTRY
308// SCALAR("item 2",plain)
309// BLOCK-ENTRY
310// BLOCK-SEQUENCE-START
311// BLOCK-ENTRY
312// SCALAR("item 3.1",plain)
313// BLOCK-ENTRY
314// SCALAR("item 3.2",plain)
315// BLOCK-END
316// BLOCK-ENTRY
317// BLOCK-MAPPING-START
318// KEY
319// SCALAR("key 1",plain)
320// VALUE
321// SCALAR("value 1",plain)
322// KEY
323// SCALAR("key 2",plain)
324// VALUE
325// SCALAR("value 2",plain)
326// BLOCK-END
327// BLOCK-END
328// STREAM-END
329//
330// 2. Block mappings:
331//
332// a simple key: a value # The KEY token is produced here.
333// ? a complex key
334// : another value
335// a mapping:
336// key 1: value 1
337// key 2: value 2
338// a sequence:
339// - item 1
340// - item 2
341//
342// Tokens:
343//
344// STREAM-START(utf-8)
345// BLOCK-MAPPING-START
346// KEY
347// SCALAR("a simple key",plain)
348// VALUE
349// SCALAR("a value",plain)
350// KEY
351// SCALAR("a complex key",plain)
352// VALUE
353// SCALAR("another value",plain)
354// KEY
355// SCALAR("a mapping",plain)
356// BLOCK-MAPPING-START
357// KEY
358// SCALAR("key 1",plain)
359// VALUE
360// SCALAR("value 1",plain)
361// KEY
362// SCALAR("key 2",plain)
363// VALUE
364// SCALAR("value 2",plain)
365// BLOCK-END
366// KEY
367// SCALAR("a sequence",plain)
368// VALUE
369// BLOCK-SEQUENCE-START
370// BLOCK-ENTRY
371// SCALAR("item 1",plain)
372// BLOCK-ENTRY
373// SCALAR("item 2",plain)
374// BLOCK-END
375// BLOCK-END
376// STREAM-END
377//
378// YAML does not always require to start a new block collection from a new
379// line. If the current line contains only '-', '?', and ':' indicators, a new
380// block collection may start at the current line. The following examples
381// illustrate this case:
382//
383// 1. Collections in a sequence:
384//
385// - - item 1
386// - item 2
387// - key 1: value 1
388// key 2: value 2
389// - ? complex key
390// : complex value
391//
392// Tokens:
393//
394// STREAM-START(utf-8)
395// BLOCK-SEQUENCE-START
396// BLOCK-ENTRY
397// BLOCK-SEQUENCE-START
398// BLOCK-ENTRY
399// SCALAR("item 1",plain)
400// BLOCK-ENTRY
401// SCALAR("item 2",plain)
402// BLOCK-END
403// BLOCK-ENTRY
404// BLOCK-MAPPING-START
405// KEY
406// SCALAR("key 1",plain)
407// VALUE
408// SCALAR("value 1",plain)
409// KEY
410// SCALAR("key 2",plain)
411// VALUE
412// SCALAR("value 2",plain)
413// BLOCK-END
414// BLOCK-ENTRY
415// BLOCK-MAPPING-START
416// KEY
417// SCALAR("complex key")
418// VALUE
419// SCALAR("complex value")
420// BLOCK-END
421// BLOCK-END
422// STREAM-END
423//
424// 2. Collections in a mapping:
425//
426// ? a sequence
427// : - item 1
428// - item 2
429// ? a mapping
430// : key 1: value 1
431// key 2: value 2
432//
433// Tokens:
434//
435// STREAM-START(utf-8)
436// BLOCK-MAPPING-START
437// KEY
438// SCALAR("a sequence",plain)
439// VALUE
440// BLOCK-SEQUENCE-START
441// BLOCK-ENTRY
442// SCALAR("item 1",plain)
443// BLOCK-ENTRY
444// SCALAR("item 2",plain)
445// BLOCK-END
446// KEY
447// SCALAR("a mapping",plain)
448// VALUE
449// BLOCK-MAPPING-START
450// KEY
451// SCALAR("key 1",plain)
452// VALUE
453// SCALAR("value 1",plain)
454// KEY
455// SCALAR("key 2",plain)
456// VALUE
457// SCALAR("value 2",plain)
458// BLOCK-END
459// BLOCK-END
460// STREAM-END
461//
462// YAML also permits non-indented sequences if they are included into a block
463// mapping. In this case, the token BLOCK-SEQUENCE-START is not produced:
464//
465// key:
466// - item 1 # BLOCK-SEQUENCE-START is NOT produced here.
467// - item 2
468//
469// Tokens:
470//
471// STREAM-START(utf-8)
472// BLOCK-MAPPING-START
473// KEY
474// SCALAR("key",plain)
475// VALUE
476// BLOCK-ENTRY
477// SCALAR("item 1",plain)
478// BLOCK-ENTRY
479// SCALAR("item 2",plain)
480// BLOCK-END
481//
482
483// Ensure that the buffer contains the required number of characters.
484// Return true on success, false on failure (reader error or memory error).
485func cache(parser *yaml_parser_t, length int) bool {
486 // [Go] This was inlined: !cache(A, B) -> unread < B && !update(A, B)
487 return parser.unread >= length || yaml_parser_update_buffer(parser, length)
488}
489
490// Advance the buffer pointer.
491func skip(parser *yaml_parser_t) {
492 parser.mark.index++
493 parser.mark.column++
494 parser.unread--
495 parser.buffer_pos += width(parser.buffer[parser.buffer_pos])
496}
497
498func skip_line(parser *yaml_parser_t) {
499 if is_crlf(parser.buffer, parser.buffer_pos) {
500 parser.mark.index += 2
501 parser.mark.column = 0
502 parser.mark.line++
503 parser.unread -= 2
504 parser.buffer_pos += 2
505 } else if is_break(parser.buffer, parser.buffer_pos) {
506 parser.mark.index++
507 parser.mark.column = 0
508 parser.mark.line++
509 parser.unread--
510 parser.buffer_pos += width(parser.buffer[parser.buffer_pos])
511 }
512}
513
514// Copy a character to a string buffer and advance pointers.
515func read(parser *yaml_parser_t, s []byte) []byte {
516 w := width(parser.buffer[parser.buffer_pos])
517 if w == 0 {
518 panic("invalid character sequence")
519 }
520 if len(s) == 0 {
521 s = make([]byte, 0, 32)
522 }
523 if w == 1 && len(s)+w <= cap(s) {
524 s = s[:len(s)+1]
525 s[len(s)-1] = parser.buffer[parser.buffer_pos]
526 parser.buffer_pos++
527 } else {
528 s = append(s, parser.buffer[parser.buffer_pos:parser.buffer_pos+w]...)
529 parser.buffer_pos += w
530 }
531 parser.mark.index++
532 parser.mark.column++
533 parser.unread--
534 return s
535}
536
537// Copy a line break character to a string buffer and advance pointers.
538func read_line(parser *yaml_parser_t, s []byte) []byte {
539 buf := parser.buffer
540 pos := parser.buffer_pos
541 switch {
542 case buf[pos] == '\r' && buf[pos+1] == '\n':
543 // CR LF . LF
544 s = append(s, '\n')
545 parser.buffer_pos += 2
546 parser.mark.index++
547 parser.unread--
548 case buf[pos] == '\r' || buf[pos] == '\n':
549 // CR|LF . LF
550 s = append(s, '\n')
551 parser.buffer_pos += 1
552 case buf[pos] == '\xC2' && buf[pos+1] == '\x85':
553 // NEL . LF
554 s = append(s, '\n')
555 parser.buffer_pos += 2
556 case buf[pos] == '\xE2' && buf[pos+1] == '\x80' && (buf[pos+2] == '\xA8' || buf[pos+2] == '\xA9'):
557 // LS|PS . LS|PS
558 s = append(s, buf[parser.buffer_pos:pos+3]...)
559 parser.buffer_pos += 3
560 default:
561 return s
562 }
563 parser.mark.index++
564 parser.mark.column = 0
565 parser.mark.line++
566 parser.unread--
567 return s
568}
569
570// Get the next token.
571func yaml_parser_scan(parser *yaml_parser_t, token *yaml_token_t) bool {
572 // Erase the token object.
573 *token = yaml_token_t{} // [Go] Is this necessary?
574
575 // No tokens after STREAM-END or error.
576 if parser.stream_end_produced || parser.error != yaml_NO_ERROR {
577 return true
578 }
579
580 // Ensure that the tokens queue contains enough tokens.
581 if !parser.token_available {
582 if !yaml_parser_fetch_more_tokens(parser) {
583 return false
584 }
585 }
586
587 // Fetch the next token from the queue.
588 *token = parser.tokens[parser.tokens_head]
589 parser.tokens_head++
590 parser.tokens_parsed++
591 parser.token_available = false
592
593 if token.typ == yaml_STREAM_END_TOKEN {
594 parser.stream_end_produced = true
595 }
596 return true
597}
598
599// Set the scanner error and return false.
600func yaml_parser_set_scanner_error(parser *yaml_parser_t, context string, context_mark yaml_mark_t, problem string) bool {
601 parser.error = yaml_SCANNER_ERROR
602 parser.context = context
603 parser.context_mark = context_mark
604 parser.problem = problem
605 parser.problem_mark = parser.mark
606 return false
607}
608
609func yaml_parser_set_scanner_tag_error(parser *yaml_parser_t, directive bool, context_mark yaml_mark_t, problem string) bool {
610 context := "while parsing a tag"
611 if directive {
612 context = "while parsing a %TAG directive"
613 }
614 return yaml_parser_set_scanner_error(parser, context, context_mark, problem)
615}
616
617func trace(args ...interface{}) func() {
618 pargs := append([]interface{}{"+++"}, args...)
619 fmt.Println(pargs...)
620 pargs = append([]interface{}{"---"}, args...)
621 return func() { fmt.Println(pargs...) }
622}
623
624// Ensure that the tokens queue contains at least one token which can be
625// returned to the Parser.
626func yaml_parser_fetch_more_tokens(parser *yaml_parser_t) bool {
627 // While we need more tokens to fetch, do it.
628 for {
629 if parser.tokens_head != len(parser.tokens) {
630 // If queue is non-empty, check if any potential simple key may
631 // occupy the head position.
632 head_tok_idx, ok := parser.simple_keys_by_tok[parser.tokens_parsed]
633 if !ok {
634 break
635 } else if valid, ok := yaml_simple_key_is_valid(parser, &parser.simple_keys[head_tok_idx]); !ok {
636 return false
637 } else if !valid {
638 break
639 }
640 }
641 // Fetch the next token.
642 if !yaml_parser_fetch_next_token(parser) {
643 return false
644 }
645 }
646
647 parser.token_available = true
648 return true
649}
650
651// The dispatcher for token fetchers.
652func yaml_parser_fetch_next_token(parser *yaml_parser_t) bool {
653 // Ensure that the buffer is initialized.
654 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
655 return false
656 }
657
658 // Check if we just started scanning. Fetch STREAM-START then.
659 if !parser.stream_start_produced {
660 return yaml_parser_fetch_stream_start(parser)
661 }
662
663 // Eat whitespaces and comments until we reach the next token.
664 if !yaml_parser_scan_to_next_token(parser) {
665 return false
666 }
667
668 // Check the indentation level against the current column.
669 if !yaml_parser_unroll_indent(parser, parser.mark.column) {
670 return false
671 }
672
673 // Ensure that the buffer contains at least 4 characters. 4 is the length
674 // of the longest indicators ('--- ' and '... ').
675 if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) {
676 return false
677 }
678
679 // Is it the end of the stream?
680 if is_z(parser.buffer, parser.buffer_pos) {
681 return yaml_parser_fetch_stream_end(parser)
682 }
683
684 // Is it a directive?
685 if parser.mark.column == 0 && parser.buffer[parser.buffer_pos] == '%' {
686 return yaml_parser_fetch_directive(parser)
687 }
688
689 buf := parser.buffer
690 pos := parser.buffer_pos
691
692 // Is it the document start indicator?
693 if parser.mark.column == 0 && buf[pos] == '-' && buf[pos+1] == '-' && buf[pos+2] == '-' && is_blankz(buf, pos+3) {
694 return yaml_parser_fetch_document_indicator(parser, yaml_DOCUMENT_START_TOKEN)
695 }
696
697 // Is it the document end indicator?
698 if parser.mark.column == 0 && buf[pos] == '.' && buf[pos+1] == '.' && buf[pos+2] == '.' && is_blankz(buf, pos+3) {
699 return yaml_parser_fetch_document_indicator(parser, yaml_DOCUMENT_END_TOKEN)
700 }
701
702 // Is it the flow sequence start indicator?
703 if buf[pos] == '[' {
704 return yaml_parser_fetch_flow_collection_start(parser, yaml_FLOW_SEQUENCE_START_TOKEN)
705 }
706
707 // Is it the flow mapping start indicator?
708 if parser.buffer[parser.buffer_pos] == '{' {
709 return yaml_parser_fetch_flow_collection_start(parser, yaml_FLOW_MAPPING_START_TOKEN)
710 }
711
712 // Is it the flow sequence end indicator?
713 if parser.buffer[parser.buffer_pos] == ']' {
714 return yaml_parser_fetch_flow_collection_end(parser,
715 yaml_FLOW_SEQUENCE_END_TOKEN)
716 }
717
718 // Is it the flow mapping end indicator?
719 if parser.buffer[parser.buffer_pos] == '}' {
720 return yaml_parser_fetch_flow_collection_end(parser,
721 yaml_FLOW_MAPPING_END_TOKEN)
722 }
723
724 // Is it the flow entry indicator?
725 if parser.buffer[parser.buffer_pos] == ',' {
726 return yaml_parser_fetch_flow_entry(parser)
727 }
728
729 // Is it the block entry indicator?
730 if parser.buffer[parser.buffer_pos] == '-' && is_blankz(parser.buffer, parser.buffer_pos+1) {
731 return yaml_parser_fetch_block_entry(parser)
732 }
733
734 // Is it the key indicator?
735 if parser.buffer[parser.buffer_pos] == '?' && (parser.flow_level > 0 || is_blankz(parser.buffer, parser.buffer_pos+1)) {
736 return yaml_parser_fetch_key(parser)
737 }
738
739 // Is it the value indicator?
740 if parser.buffer[parser.buffer_pos] == ':' && (parser.flow_level > 0 || is_blankz(parser.buffer, parser.buffer_pos+1)) {
741 return yaml_parser_fetch_value(parser)
742 }
743
744 // Is it an alias?
745 if parser.buffer[parser.buffer_pos] == '*' {
746 return yaml_parser_fetch_anchor(parser, yaml_ALIAS_TOKEN)
747 }
748
749 // Is it an anchor?
750 if parser.buffer[parser.buffer_pos] == '&' {
751 return yaml_parser_fetch_anchor(parser, yaml_ANCHOR_TOKEN)
752 }
753
754 // Is it a tag?
755 if parser.buffer[parser.buffer_pos] == '!' {
756 return yaml_parser_fetch_tag(parser)
757 }
758
759 // Is it a literal scalar?
760 if parser.buffer[parser.buffer_pos] == '|' && parser.flow_level == 0 {
761 return yaml_parser_fetch_block_scalar(parser, true)
762 }
763
764 // Is it a folded scalar?
765 if parser.buffer[parser.buffer_pos] == '>' && parser.flow_level == 0 {
766 return yaml_parser_fetch_block_scalar(parser, false)
767 }
768
769 // Is it a single-quoted scalar?
770 if parser.buffer[parser.buffer_pos] == '\'' {
771 return yaml_parser_fetch_flow_scalar(parser, true)
772 }
773
774 // Is it a double-quoted scalar?
775 if parser.buffer[parser.buffer_pos] == '"' {
776 return yaml_parser_fetch_flow_scalar(parser, false)
777 }
778
779 // Is it a plain scalar?
780 //
781 // A plain scalar may start with any non-blank characters except
782 //
783 // '-', '?', ':', ',', '[', ']', '{', '}',
784 // '#', '&', '*', '!', '|', '>', '\'', '\"',
785 // '%', '@', '`'.
786 //
787 // In the block context (and, for the '-' indicator, in the flow context
788 // too), it may also start with the characters
789 //
790 // '-', '?', ':'
791 //
792 // if it is followed by a non-space character.
793 //
794 // The last rule is more restrictive than the specification requires.
795 // [Go] Make this logic more reasonable.
796 //switch parser.buffer[parser.buffer_pos] {
797 //case '-', '?', ':', ',', '?', '-', ',', ':', ']', '[', '}', '{', '&', '#', '!', '*', '>', '|', '"', '\'', '@', '%', '-', '`':
798 //}
799 if !(is_blankz(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == '-' ||
800 parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == ':' ||
801 parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == '[' ||
802 parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '{' ||
803 parser.buffer[parser.buffer_pos] == '}' || parser.buffer[parser.buffer_pos] == '#' ||
804 parser.buffer[parser.buffer_pos] == '&' || parser.buffer[parser.buffer_pos] == '*' ||
805 parser.buffer[parser.buffer_pos] == '!' || parser.buffer[parser.buffer_pos] == '|' ||
806 parser.buffer[parser.buffer_pos] == '>' || parser.buffer[parser.buffer_pos] == '\'' ||
807 parser.buffer[parser.buffer_pos] == '"' || parser.buffer[parser.buffer_pos] == '%' ||
808 parser.buffer[parser.buffer_pos] == '@' || parser.buffer[parser.buffer_pos] == '`') ||
809 (parser.buffer[parser.buffer_pos] == '-' && !is_blank(parser.buffer, parser.buffer_pos+1)) ||
810 (parser.flow_level == 0 &&
811 (parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == ':') &&
812 !is_blankz(parser.buffer, parser.buffer_pos+1)) {
813 return yaml_parser_fetch_plain_scalar(parser)
814 }
815
816 // If we don't determine the token type so far, it is an error.
817 return yaml_parser_set_scanner_error(parser,
818 "while scanning for the next token", parser.mark,
819 "found character that cannot start any token")
820}
821
822func yaml_simple_key_is_valid(parser *yaml_parser_t, simple_key *yaml_simple_key_t) (valid, ok bool) {
823 if !simple_key.possible {
824 return false, true
825 }
826
827 // The 1.2 specification says:
828 //
829 // "If the ? indicator is omitted, parsing needs to see past the
830 // implicit key to recognize it as such. To limit the amount of
831 // lookahead required, the “:” indicator must appear at most 1024
832 // Unicode characters beyond the start of the key. In addition, the key
833 // is restricted to a single line."
834 //
835 if simple_key.mark.line < parser.mark.line || simple_key.mark.index+1024 < parser.mark.index {
836 // Check if the potential simple key to be removed is required.
837 if simple_key.required {
838 return false, yaml_parser_set_scanner_error(parser,
839 "while scanning a simple key", simple_key.mark,
840 "could not find expected ':'")
841 }
842 simple_key.possible = false
843 return false, true
844 }
845 return true, true
846}
847
848// Check if a simple key may start at the current position and add it if
849// needed.
850func yaml_parser_save_simple_key(parser *yaml_parser_t) bool {
851 // A simple key is required at the current position if the scanner is in
852 // the block context and the current column coincides with the indentation
853 // level.
854
855 required := parser.flow_level == 0 && parser.indent == parser.mark.column
856
857 //
858 // If the current position may start a simple key, save it.
859 //
860 if parser.simple_key_allowed {
861 simple_key := yaml_simple_key_t{
862 possible: true,
863 required: required,
864 token_number: parser.tokens_parsed + (len(parser.tokens) - parser.tokens_head),
865 mark: parser.mark,
866 }
867
868 if !yaml_parser_remove_simple_key(parser) {
869 return false
870 }
871 parser.simple_keys[len(parser.simple_keys)-1] = simple_key
872 parser.simple_keys_by_tok[simple_key.token_number] = len(parser.simple_keys) - 1
873 }
874 return true
875}
876
877// Remove a potential simple key at the current flow level.
878func yaml_parser_remove_simple_key(parser *yaml_parser_t) bool {
879 i := len(parser.simple_keys) - 1
880 if parser.simple_keys[i].possible {
881 // If the key is required, it is an error.
882 if parser.simple_keys[i].required {
883 return yaml_parser_set_scanner_error(parser,
884 "while scanning a simple key", parser.simple_keys[i].mark,
885 "could not find expected ':'")
886 }
887 // Remove the key from the stack.
888 parser.simple_keys[i].possible = false
889 delete(parser.simple_keys_by_tok, parser.simple_keys[i].token_number)
890 }
891 return true
892}
893
894// max_flow_level limits the flow_level
895const max_flow_level = 10000
896
897// Increase the flow level and resize the simple key list if needed.
898func yaml_parser_increase_flow_level(parser *yaml_parser_t) bool {
899 // Reset the simple key on the next level.
900 parser.simple_keys = append(parser.simple_keys, yaml_simple_key_t{
901 possible: false,
902 required: false,
903 token_number: parser.tokens_parsed + (len(parser.tokens) - parser.tokens_head),
904 mark: parser.mark,
905 })
906
907 // Increase the flow level.
908 parser.flow_level++
909 if parser.flow_level > max_flow_level {
910 return yaml_parser_set_scanner_error(parser,
911 "while increasing flow level", parser.simple_keys[len(parser.simple_keys)-1].mark,
912 fmt.Sprintf("exceeded max depth of %d", max_flow_level))
913 }
914 return true
915}
916
917// Decrease the flow level.
918func yaml_parser_decrease_flow_level(parser *yaml_parser_t) bool {
919 if parser.flow_level > 0 {
920 parser.flow_level--
921 last := len(parser.simple_keys) - 1
922 delete(parser.simple_keys_by_tok, parser.simple_keys[last].token_number)
923 parser.simple_keys = parser.simple_keys[:last]
924 }
925 return true
926}
927
928// max_indents limits the indents stack size
929const max_indents = 10000
930
931// Push the current indentation level to the stack and set the new level
932// the current column is greater than the indentation level. In this case,
933// append or insert the specified token into the token queue.
934func yaml_parser_roll_indent(parser *yaml_parser_t, column, number int, typ yaml_token_type_t, mark yaml_mark_t) bool {
935 // In the flow context, do nothing.
936 if parser.flow_level > 0 {
937 return true
938 }
939
940 if parser.indent < column {
941 // Push the current indentation level to the stack and set the new
942 // indentation level.
943 parser.indents = append(parser.indents, parser.indent)
944 parser.indent = column
945 if len(parser.indents) > max_indents {
946 return yaml_parser_set_scanner_error(parser,
947 "while increasing indent level", parser.simple_keys[len(parser.simple_keys)-1].mark,
948 fmt.Sprintf("exceeded max depth of %d", max_indents))
949 }
950
951 // Create a token and insert it into the queue.
952 token := yaml_token_t{
953 typ: typ,
954 start_mark: mark,
955 end_mark: mark,
956 }
957 if number > -1 {
958 number -= parser.tokens_parsed
959 }
960 yaml_insert_token(parser, number, &token)
961 }
962 return true
963}
964
965// Pop indentation levels from the indents stack until the current level
966// becomes less or equal to the column. For each indentation level, append
967// the BLOCK-END token.
968func yaml_parser_unroll_indent(parser *yaml_parser_t, column int) bool {
969 // In the flow context, do nothing.
970 if parser.flow_level > 0 {
971 return true
972 }
973
974 // Loop through the indentation levels in the stack.
975 for parser.indent > column {
976 // Create a token and append it to the queue.
977 token := yaml_token_t{
978 typ: yaml_BLOCK_END_TOKEN,
979 start_mark: parser.mark,
980 end_mark: parser.mark,
981 }
982 yaml_insert_token(parser, -1, &token)
983
984 // Pop the indentation level.
985 parser.indent = parser.indents[len(parser.indents)-1]
986 parser.indents = parser.indents[:len(parser.indents)-1]
987 }
988 return true
989}
990
991// Initialize the scanner and produce the STREAM-START token.
992func yaml_parser_fetch_stream_start(parser *yaml_parser_t) bool {
993
994 // Set the initial indentation.
995 parser.indent = -1
996
997 // Initialize the simple key stack.
998 parser.simple_keys = append(parser.simple_keys, yaml_simple_key_t{})
999
1000 parser.simple_keys_by_tok = make(map[int]int)
1001
1002 // A simple key is allowed at the beginning of the stream.
1003 parser.simple_key_allowed = true
1004
1005 // We have started.
1006 parser.stream_start_produced = true
1007
1008 // Create the STREAM-START token and append it to the queue.
1009 token := yaml_token_t{
1010 typ: yaml_STREAM_START_TOKEN,
1011 start_mark: parser.mark,
1012 end_mark: parser.mark,
1013 encoding: parser.encoding,
1014 }
1015 yaml_insert_token(parser, -1, &token)
1016 return true
1017}
1018
1019// Produce the STREAM-END token and shut down the scanner.
1020func yaml_parser_fetch_stream_end(parser *yaml_parser_t) bool {
1021
1022 // Force new line.
1023 if parser.mark.column != 0 {
1024 parser.mark.column = 0
1025 parser.mark.line++
1026 }
1027
1028 // Reset the indentation level.
1029 if !yaml_parser_unroll_indent(parser, -1) {
1030 return false
1031 }
1032
1033 // Reset simple keys.
1034 if !yaml_parser_remove_simple_key(parser) {
1035 return false
1036 }
1037
1038 parser.simple_key_allowed = false
1039
1040 // Create the STREAM-END token and append it to the queue.
1041 token := yaml_token_t{
1042 typ: yaml_STREAM_END_TOKEN,
1043 start_mark: parser.mark,
1044 end_mark: parser.mark,
1045 }
1046 yaml_insert_token(parser, -1, &token)
1047 return true
1048}
1049
1050// Produce a VERSION-DIRECTIVE or TAG-DIRECTIVE token.
1051func yaml_parser_fetch_directive(parser *yaml_parser_t) bool {
1052 // Reset the indentation level.
1053 if !yaml_parser_unroll_indent(parser, -1) {
1054 return false
1055 }
1056
1057 // Reset simple keys.
1058 if !yaml_parser_remove_simple_key(parser) {
1059 return false
1060 }
1061
1062 parser.simple_key_allowed = false
1063
1064 // Create the YAML-DIRECTIVE or TAG-DIRECTIVE token.
1065 token := yaml_token_t{}
1066 if !yaml_parser_scan_directive(parser, &token) {
1067 return false
1068 }
1069 // Append the token to the queue.
1070 yaml_insert_token(parser, -1, &token)
1071 return true
1072}
1073
1074// Produce the DOCUMENT-START or DOCUMENT-END token.
1075func yaml_parser_fetch_document_indicator(parser *yaml_parser_t, typ yaml_token_type_t) bool {
1076 // Reset the indentation level.
1077 if !yaml_parser_unroll_indent(parser, -1) {
1078 return false
1079 }
1080
1081 // Reset simple keys.
1082 if !yaml_parser_remove_simple_key(parser) {
1083 return false
1084 }
1085
1086 parser.simple_key_allowed = false
1087
1088 // Consume the token.
1089 start_mark := parser.mark
1090
1091 skip(parser)
1092 skip(parser)
1093 skip(parser)
1094
1095 end_mark := parser.mark
1096
1097 // Create the DOCUMENT-START or DOCUMENT-END token.
1098 token := yaml_token_t{
1099 typ: typ,
1100 start_mark: start_mark,
1101 end_mark: end_mark,
1102 }
1103 // Append the token to the queue.
1104 yaml_insert_token(parser, -1, &token)
1105 return true
1106}
1107
1108// Produce the FLOW-SEQUENCE-START or FLOW-MAPPING-START token.
1109func yaml_parser_fetch_flow_collection_start(parser *yaml_parser_t, typ yaml_token_type_t) bool {
1110 // The indicators '[' and '{' may start a simple key.
1111 if !yaml_parser_save_simple_key(parser) {
1112 return false
1113 }
1114
1115 // Increase the flow level.
1116 if !yaml_parser_increase_flow_level(parser) {
1117 return false
1118 }
1119
1120 // A simple key may follow the indicators '[' and '{'.
1121 parser.simple_key_allowed = true
1122
1123 // Consume the token.
1124 start_mark := parser.mark
1125 skip(parser)
1126 end_mark := parser.mark
1127
1128 // Create the FLOW-SEQUENCE-START of FLOW-MAPPING-START token.
1129 token := yaml_token_t{
1130 typ: typ,
1131 start_mark: start_mark,
1132 end_mark: end_mark,
1133 }
1134 // Append the token to the queue.
1135 yaml_insert_token(parser, -1, &token)
1136 return true
1137}
1138
1139// Produce the FLOW-SEQUENCE-END or FLOW-MAPPING-END token.
1140func yaml_parser_fetch_flow_collection_end(parser *yaml_parser_t, typ yaml_token_type_t) bool {
1141 // Reset any potential simple key on the current flow level.
1142 if !yaml_parser_remove_simple_key(parser) {
1143 return false
1144 }
1145
1146 // Decrease the flow level.
1147 if !yaml_parser_decrease_flow_level(parser) {
1148 return false
1149 }
1150
1151 // No simple keys after the indicators ']' and '}'.
1152 parser.simple_key_allowed = false
1153
1154 // Consume the token.
1155
1156 start_mark := parser.mark
1157 skip(parser)
1158 end_mark := parser.mark
1159
1160 // Create the FLOW-SEQUENCE-END of FLOW-MAPPING-END token.
1161 token := yaml_token_t{
1162 typ: typ,
1163 start_mark: start_mark,
1164 end_mark: end_mark,
1165 }
1166 // Append the token to the queue.
1167 yaml_insert_token(parser, -1, &token)
1168 return true
1169}
1170
1171// Produce the FLOW-ENTRY token.
1172func yaml_parser_fetch_flow_entry(parser *yaml_parser_t) bool {
1173 // Reset any potential simple keys on the current flow level.
1174 if !yaml_parser_remove_simple_key(parser) {
1175 return false
1176 }
1177
1178 // Simple keys are allowed after ','.
1179 parser.simple_key_allowed = true
1180
1181 // Consume the token.
1182 start_mark := parser.mark
1183 skip(parser)
1184 end_mark := parser.mark
1185
1186 // Create the FLOW-ENTRY token and append it to the queue.
1187 token := yaml_token_t{
1188 typ: yaml_FLOW_ENTRY_TOKEN,
1189 start_mark: start_mark,
1190 end_mark: end_mark,
1191 }
1192 yaml_insert_token(parser, -1, &token)
1193 return true
1194}
1195
1196// Produce the BLOCK-ENTRY token.
1197func yaml_parser_fetch_block_entry(parser *yaml_parser_t) bool {
1198 // Check if the scanner is in the block context.
1199 if parser.flow_level == 0 {
1200 // Check if we are allowed to start a new entry.
1201 if !parser.simple_key_allowed {
1202 return yaml_parser_set_scanner_error(parser, "", parser.mark,
1203 "block sequence entries are not allowed in this context")
1204 }
1205 // Add the BLOCK-SEQUENCE-START token if needed.
1206 if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_SEQUENCE_START_TOKEN, parser.mark) {
1207 return false
1208 }
1209 } else {
1210 // It is an error for the '-' indicator to occur in the flow context,
1211 // but we let the Parser detect and report about it because the Parser
1212 // is able to point to the context.
1213 }
1214
1215 // Reset any potential simple keys on the current flow level.
1216 if !yaml_parser_remove_simple_key(parser) {
1217 return false
1218 }
1219
1220 // Simple keys are allowed after '-'.
1221 parser.simple_key_allowed = true
1222
1223 // Consume the token.
1224 start_mark := parser.mark
1225 skip(parser)
1226 end_mark := parser.mark
1227
1228 // Create the BLOCK-ENTRY token and append it to the queue.
1229 token := yaml_token_t{
1230 typ: yaml_BLOCK_ENTRY_TOKEN,
1231 start_mark: start_mark,
1232 end_mark: end_mark,
1233 }
1234 yaml_insert_token(parser, -1, &token)
1235 return true
1236}
1237
1238// Produce the KEY token.
1239func yaml_parser_fetch_key(parser *yaml_parser_t) bool {
1240
1241 // In the block context, additional checks are required.
1242 if parser.flow_level == 0 {
1243 // Check if we are allowed to start a new key (not nessesary simple).
1244 if !parser.simple_key_allowed {
1245 return yaml_parser_set_scanner_error(parser, "", parser.mark,
1246 "mapping keys are not allowed in this context")
1247 }
1248 // Add the BLOCK-MAPPING-START token if needed.
1249 if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_MAPPING_START_TOKEN, parser.mark) {
1250 return false
1251 }
1252 }
1253
1254 // Reset any potential simple keys on the current flow level.
1255 if !yaml_parser_remove_simple_key(parser) {
1256 return false
1257 }
1258
1259 // Simple keys are allowed after '?' in the block context.
1260 parser.simple_key_allowed = parser.flow_level == 0
1261
1262 // Consume the token.
1263 start_mark := parser.mark
1264 skip(parser)
1265 end_mark := parser.mark
1266
1267 // Create the KEY token and append it to the queue.
1268 token := yaml_token_t{
1269 typ: yaml_KEY_TOKEN,
1270 start_mark: start_mark,
1271 end_mark: end_mark,
1272 }
1273 yaml_insert_token(parser, -1, &token)
1274 return true
1275}
1276
1277// Produce the VALUE token.
1278func yaml_parser_fetch_value(parser *yaml_parser_t) bool {
1279
1280 simple_key := &parser.simple_keys[len(parser.simple_keys)-1]
1281
1282 // Have we found a simple key?
1283 if valid, ok := yaml_simple_key_is_valid(parser, simple_key); !ok {
1284 return false
1285
1286 } else if valid {
1287
1288 // Create the KEY token and insert it into the queue.
1289 token := yaml_token_t{
1290 typ: yaml_KEY_TOKEN,
1291 start_mark: simple_key.mark,
1292 end_mark: simple_key.mark,
1293 }
1294 yaml_insert_token(parser, simple_key.token_number-parser.tokens_parsed, &token)
1295
1296 // In the block context, we may need to add the BLOCK-MAPPING-START token.
1297 if !yaml_parser_roll_indent(parser, simple_key.mark.column,
1298 simple_key.token_number,
1299 yaml_BLOCK_MAPPING_START_TOKEN, simple_key.mark) {
1300 return false
1301 }
1302
1303 // Remove the simple key.
1304 simple_key.possible = false
1305 delete(parser.simple_keys_by_tok, simple_key.token_number)
1306
1307 // A simple key cannot follow another simple key.
1308 parser.simple_key_allowed = false
1309
1310 } else {
1311 // The ':' indicator follows a complex key.
1312
1313 // In the block context, extra checks are required.
1314 if parser.flow_level == 0 {
1315
1316 // Check if we are allowed to start a complex value.
1317 if !parser.simple_key_allowed {
1318 return yaml_parser_set_scanner_error(parser, "", parser.mark,
1319 "mapping values are not allowed in this context")
1320 }
1321
1322 // Add the BLOCK-MAPPING-START token if needed.
1323 if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_MAPPING_START_TOKEN, parser.mark) {
1324 return false
1325 }
1326 }
1327
1328 // Simple keys after ':' are allowed in the block context.
1329 parser.simple_key_allowed = parser.flow_level == 0
1330 }
1331
1332 // Consume the token.
1333 start_mark := parser.mark
1334 skip(parser)
1335 end_mark := parser.mark
1336
1337 // Create the VALUE token and append it to the queue.
1338 token := yaml_token_t{
1339 typ: yaml_VALUE_TOKEN,
1340 start_mark: start_mark,
1341 end_mark: end_mark,
1342 }
1343 yaml_insert_token(parser, -1, &token)
1344 return true
1345}
1346
1347// Produce the ALIAS or ANCHOR token.
1348func yaml_parser_fetch_anchor(parser *yaml_parser_t, typ yaml_token_type_t) bool {
1349 // An anchor or an alias could be a simple key.
1350 if !yaml_parser_save_simple_key(parser) {
1351 return false
1352 }
1353
1354 // A simple key cannot follow an anchor or an alias.
1355 parser.simple_key_allowed = false
1356
1357 // Create the ALIAS or ANCHOR token and append it to the queue.
1358 var token yaml_token_t
1359 if !yaml_parser_scan_anchor(parser, &token, typ) {
1360 return false
1361 }
1362 yaml_insert_token(parser, -1, &token)
1363 return true
1364}
1365
1366// Produce the TAG token.
1367func yaml_parser_fetch_tag(parser *yaml_parser_t) bool {
1368 // A tag could be a simple key.
1369 if !yaml_parser_save_simple_key(parser) {
1370 return false
1371 }
1372
1373 // A simple key cannot follow a tag.
1374 parser.simple_key_allowed = false
1375
1376 // Create the TAG token and append it to the queue.
1377 var token yaml_token_t
1378 if !yaml_parser_scan_tag(parser, &token) {
1379 return false
1380 }
1381 yaml_insert_token(parser, -1, &token)
1382 return true
1383}
1384
1385// Produce the SCALAR(...,literal) or SCALAR(...,folded) tokens.
1386func yaml_parser_fetch_block_scalar(parser *yaml_parser_t, literal bool) bool {
1387 // Remove any potential simple keys.
1388 if !yaml_parser_remove_simple_key(parser) {
1389 return false
1390 }
1391
1392 // A simple key may follow a block scalar.
1393 parser.simple_key_allowed = true
1394
1395 // Create the SCALAR token and append it to the queue.
1396 var token yaml_token_t
1397 if !yaml_parser_scan_block_scalar(parser, &token, literal) {
1398 return false
1399 }
1400 yaml_insert_token(parser, -1, &token)
1401 return true
1402}
1403
1404// Produce the SCALAR(...,single-quoted) or SCALAR(...,double-quoted) tokens.
1405func yaml_parser_fetch_flow_scalar(parser *yaml_parser_t, single bool) bool {
1406 // A plain scalar could be a simple key.
1407 if !yaml_parser_save_simple_key(parser) {
1408 return false
1409 }
1410
1411 // A simple key cannot follow a flow scalar.
1412 parser.simple_key_allowed = false
1413
1414 // Create the SCALAR token and append it to the queue.
1415 var token yaml_token_t
1416 if !yaml_parser_scan_flow_scalar(parser, &token, single) {
1417 return false
1418 }
1419 yaml_insert_token(parser, -1, &token)
1420 return true
1421}
1422
1423// Produce the SCALAR(...,plain) token.
1424func yaml_parser_fetch_plain_scalar(parser *yaml_parser_t) bool {
1425 // A plain scalar could be a simple key.
1426 if !yaml_parser_save_simple_key(parser) {
1427 return false
1428 }
1429
1430 // A simple key cannot follow a flow scalar.
1431 parser.simple_key_allowed = false
1432
1433 // Create the SCALAR token and append it to the queue.
1434 var token yaml_token_t
1435 if !yaml_parser_scan_plain_scalar(parser, &token) {
1436 return false
1437 }
1438 yaml_insert_token(parser, -1, &token)
1439 return true
1440}
1441
1442// Eat whitespaces and comments until the next token is found.
1443func yaml_parser_scan_to_next_token(parser *yaml_parser_t) bool {
1444
1445 // Until the next token is not found.
1446 for {
1447 // Allow the BOM mark to start a line.
1448 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
1449 return false
1450 }
1451 if parser.mark.column == 0 && is_bom(parser.buffer, parser.buffer_pos) {
1452 skip(parser)
1453 }
1454
1455 // Eat whitespaces.
1456 // Tabs are allowed:
1457 // - in the flow context
1458 // - in the block context, but not at the beginning of the line or
1459 // after '-', '?', or ':' (complex value).
1460 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
1461 return false
1462 }
1463
1464 for parser.buffer[parser.buffer_pos] == ' ' || ((parser.flow_level > 0 || !parser.simple_key_allowed) && parser.buffer[parser.buffer_pos] == '\t') {
1465 skip(parser)
1466 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
1467 return false
1468 }
1469 }
1470
1471 // Eat a comment until a line break.
1472 if parser.buffer[parser.buffer_pos] == '#' {
1473 for !is_breakz(parser.buffer, parser.buffer_pos) {
1474 skip(parser)
1475 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
1476 return false
1477 }
1478 }
1479 }
1480
1481 // If it is a line break, eat it.
1482 if is_break(parser.buffer, parser.buffer_pos) {
1483 if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
1484 return false
1485 }
1486 skip_line(parser)
1487
1488 // In the block context, a new line may start a simple key.
1489 if parser.flow_level == 0 {
1490 parser.simple_key_allowed = true
1491 }
1492 } else {
1493 break // We have found a token.
1494 }
1495 }
1496
1497 return true
1498}
1499
1500// Scan a YAML-DIRECTIVE or TAG-DIRECTIVE token.
1501//
1502// Scope:
1503// %YAML 1.1 # a comment \n
1504// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
1505// %TAG !yaml! tag:yaml.org,2002: \n
1506// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
1507//
1508func yaml_parser_scan_directive(parser *yaml_parser_t, token *yaml_token_t) bool {
1509 // Eat '%'.
1510 start_mark := parser.mark
1511 skip(parser)
1512
1513 // Scan the directive name.
1514 var name []byte
1515 if !yaml_parser_scan_directive_name(parser, start_mark, &name) {
1516 return false
1517 }
1518
1519 // Is it a YAML directive?
1520 if bytes.Equal(name, []byte("YAML")) {
1521 // Scan the VERSION directive value.
1522 var major, minor int8
1523 if !yaml_parser_scan_version_directive_value(parser, start_mark, &major, &minor) {
1524 return false
1525 }
1526 end_mark := parser.mark
1527
1528 // Create a VERSION-DIRECTIVE token.
1529 *token = yaml_token_t{
1530 typ: yaml_VERSION_DIRECTIVE_TOKEN,
1531 start_mark: start_mark,
1532 end_mark: end_mark,
1533 major: major,
1534 minor: minor,
1535 }
1536
1537 // Is it a TAG directive?
1538 } else if bytes.Equal(name, []byte("TAG")) {
1539 // Scan the TAG directive value.
1540 var handle, prefix []byte
1541 if !yaml_parser_scan_tag_directive_value(parser, start_mark, &handle, &prefix) {
1542 return false
1543 }
1544 end_mark := parser.mark
1545
1546 // Create a TAG-DIRECTIVE token.
1547 *token = yaml_token_t{
1548 typ: yaml_TAG_DIRECTIVE_TOKEN,
1549 start_mark: start_mark,
1550 end_mark: end_mark,
1551 value: handle,
1552 prefix: prefix,
1553 }
1554
1555 // Unknown directive.
1556 } else {
1557 yaml_parser_set_scanner_error(parser, "while scanning a directive",
1558 start_mark, "found unknown directive name")
1559 return false
1560 }
1561
1562 // Eat the rest of the line including any comments.
1563 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
1564 return false
1565 }
1566
1567 for is_blank(parser.buffer, parser.buffer_pos) {
1568 skip(parser)
1569 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
1570 return false
1571 }
1572 }
1573
1574 if parser.buffer[parser.buffer_pos] == '#' {
1575 for !is_breakz(parser.buffer, parser.buffer_pos) {
1576 skip(parser)
1577 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
1578 return false
1579 }
1580 }
1581 }
1582
1583 // Check if we are at the end of the line.
1584 if !is_breakz(parser.buffer, parser.buffer_pos) {
1585 yaml_parser_set_scanner_error(parser, "while scanning a directive",
1586 start_mark, "did not find expected comment or line break")
1587 return false
1588 }
1589
1590 // Eat a line break.
1591 if is_break(parser.buffer, parser.buffer_pos) {
1592 if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
1593 return false
1594 }
1595 skip_line(parser)
1596 }
1597
1598 return true
1599}
1600
1601// Scan the directive name.
1602//
1603// Scope:
1604// %YAML 1.1 # a comment \n
1605// ^^^^
1606// %TAG !yaml! tag:yaml.org,2002: \n
1607// ^^^
1608//
1609func yaml_parser_scan_directive_name(parser *yaml_parser_t, start_mark yaml_mark_t, name *[]byte) bool {
1610 // Consume the directive name.
1611 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
1612 return false
1613 }
1614
1615 var s []byte
1616 for is_alpha(parser.buffer, parser.buffer_pos) {
1617 s = read(parser, s)
1618 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
1619 return false
1620 }
1621 }
1622
1623 // Check if the name is empty.
1624 if len(s) == 0 {
1625 yaml_parser_set_scanner_error(parser, "while scanning a directive",
1626 start_mark, "could not find expected directive name")
1627 return false
1628 }
1629
1630 // Check for an blank character after the name.
1631 if !is_blankz(parser.buffer, parser.buffer_pos) {
1632 yaml_parser_set_scanner_error(parser, "while scanning a directive",
1633 start_mark, "found unexpected non-alphabetical character")
1634 return false
1635 }
1636 *name = s
1637 return true
1638}
1639
1640// Scan the value of VERSION-DIRECTIVE.
1641//
1642// Scope:
1643// %YAML 1.1 # a comment \n
1644// ^^^^^^
1645func yaml_parser_scan_version_directive_value(parser *yaml_parser_t, start_mark yaml_mark_t, major, minor *int8) bool {
1646 // Eat whitespaces.
1647 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
1648 return false
1649 }
1650 for is_blank(parser.buffer, parser.buffer_pos) {
1651 skip(parser)
1652 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
1653 return false
1654 }
1655 }
1656
1657 // Consume the major version number.
1658 if !yaml_parser_scan_version_directive_number(parser, start_mark, major) {
1659 return false
1660 }
1661
1662 // Eat '.'.
1663 if parser.buffer[parser.buffer_pos] != '.' {
1664 return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive",
1665 start_mark, "did not find expected digit or '.' character")
1666 }
1667
1668 skip(parser)
1669
1670 // Consume the minor version number.
1671 if !yaml_parser_scan_version_directive_number(parser, start_mark, minor) {
1672 return false
1673 }
1674 return true
1675}
1676
1677const max_number_length = 2
1678
1679// Scan the version number of VERSION-DIRECTIVE.
1680//
1681// Scope:
1682// %YAML 1.1 # a comment \n
1683// ^
1684// %YAML 1.1 # a comment \n
1685// ^
1686func yaml_parser_scan_version_directive_number(parser *yaml_parser_t, start_mark yaml_mark_t, number *int8) bool {
1687
1688 // Repeat while the next character is digit.
1689 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
1690 return false
1691 }
1692 var value, length int8
1693 for is_digit(parser.buffer, parser.buffer_pos) {
1694 // Check if the number is too long.
1695 length++
1696 if length > max_number_length {
1697 return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive",
1698 start_mark, "found extremely long version number")
1699 }
1700 value = value*10 + int8(as_digit(parser.buffer, parser.buffer_pos))
1701 skip(parser)
1702 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
1703 return false
1704 }
1705 }
1706
1707 // Check if the number was present.
1708 if length == 0 {
1709 return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive",
1710 start_mark, "did not find expected version number")
1711 }
1712 *number = value
1713 return true
1714}
1715
1716// Scan the value of a TAG-DIRECTIVE token.
1717//
1718// Scope:
1719// %TAG !yaml! tag:yaml.org,2002: \n
1720// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
1721//
1722func yaml_parser_scan_tag_directive_value(parser *yaml_parser_t, start_mark yaml_mark_t, handle, prefix *[]byte) bool {
1723 var handle_value, prefix_value []byte
1724
1725 // Eat whitespaces.
1726 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
1727 return false
1728 }
1729
1730 for is_blank(parser.buffer, parser.buffer_pos) {
1731 skip(parser)
1732 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
1733 return false
1734 }
1735 }
1736
1737 // Scan a handle.
1738 if !yaml_parser_scan_tag_handle(parser, true, start_mark, &handle_value) {
1739 return false
1740 }
1741
1742 // Expect a whitespace.
1743 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
1744 return false
1745 }
1746 if !is_blank(parser.buffer, parser.buffer_pos) {
1747 yaml_parser_set_scanner_error(parser, "while scanning a %TAG directive",
1748 start_mark, "did not find expected whitespace")
1749 return false
1750 }
1751
1752 // Eat whitespaces.
1753 for is_blank(parser.buffer, parser.buffer_pos) {
1754 skip(parser)
1755 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
1756 return false
1757 }
1758 }
1759
1760 // Scan a prefix.
1761 if !yaml_parser_scan_tag_uri(parser, true, nil, start_mark, &prefix_value) {
1762 return false
1763 }
1764
1765 // Expect a whitespace or line break.
1766 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
1767 return false
1768 }
1769 if !is_blankz(parser.buffer, parser.buffer_pos) {
1770 yaml_parser_set_scanner_error(parser, "while scanning a %TAG directive",
1771 start_mark, "did not find expected whitespace or line break")
1772 return false
1773 }
1774
1775 *handle = handle_value
1776 *prefix = prefix_value
1777 return true
1778}
1779
1780func yaml_parser_scan_anchor(parser *yaml_parser_t, token *yaml_token_t, typ yaml_token_type_t) bool {
1781 var s []byte
1782
1783 // Eat the indicator character.
1784 start_mark := parser.mark
1785 skip(parser)
1786
1787 // Consume the value.
1788 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
1789 return false
1790 }
1791
1792 for is_alpha(parser.buffer, parser.buffer_pos) {
1793 s = read(parser, s)
1794 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
1795 return false
1796 }
1797 }
1798
1799 end_mark := parser.mark
1800
1801 /*
1802 * Check if length of the anchor is greater than 0 and it is followed by
1803 * a whitespace character or one of the indicators:
1804 *
1805 * '?', ':', ',', ']', '}', '%', '@', '`'.
1806 */
1807
1808 if len(s) == 0 ||
1809 !(is_blankz(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == '?' ||
1810 parser.buffer[parser.buffer_pos] == ':' || parser.buffer[parser.buffer_pos] == ',' ||
1811 parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '}' ||
1812 parser.buffer[parser.buffer_pos] == '%' || parser.buffer[parser.buffer_pos] == '@' ||
1813 parser.buffer[parser.buffer_pos] == '`') {
1814 context := "while scanning an alias"
1815 if typ == yaml_ANCHOR_TOKEN {
1816 context = "while scanning an anchor"
1817 }
1818 yaml_parser_set_scanner_error(parser, context, start_mark,
1819 "did not find expected alphabetic or numeric character")
1820 return false
1821 }
1822
1823 // Create a token.
1824 *token = yaml_token_t{
1825 typ: typ,
1826 start_mark: start_mark,
1827 end_mark: end_mark,
1828 value: s,
1829 }
1830
1831 return true
1832}
1833
1834/*
1835 * Scan a TAG token.
1836 */
1837
1838func yaml_parser_scan_tag(parser *yaml_parser_t, token *yaml_token_t) bool {
1839 var handle, suffix []byte
1840
1841 start_mark := parser.mark
1842
1843 // Check if the tag is in the canonical form.
1844 if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
1845 return false
1846 }
1847
1848 if parser.buffer[parser.buffer_pos+1] == '<' {
1849 // Keep the handle as ''
1850
1851 // Eat '!<'
1852 skip(parser)
1853 skip(parser)
1854
1855 // Consume the tag value.
1856 if !yaml_parser_scan_tag_uri(parser, false, nil, start_mark, &suffix) {
1857 return false
1858 }
1859
1860 // Check for '>' and eat it.
1861 if parser.buffer[parser.buffer_pos] != '>' {
1862 yaml_parser_set_scanner_error(parser, "while scanning a tag",
1863 start_mark, "did not find the expected '>'")
1864 return false
1865 }
1866
1867 skip(parser)
1868 } else {
1869 // The tag has either the '!suffix' or the '!handle!suffix' form.
1870
1871 // First, try to scan a handle.
1872 if !yaml_parser_scan_tag_handle(parser, false, start_mark, &handle) {
1873 return false
1874 }
1875
1876 // Check if it is, indeed, handle.
1877 if handle[0] == '!' && len(handle) > 1 && handle[len(handle)-1] == '!' {
1878 // Scan the suffix now.
1879 if !yaml_parser_scan_tag_uri(parser, false, nil, start_mark, &suffix) {
1880 return false
1881 }
1882 } else {
1883 // It wasn't a handle after all. Scan the rest of the tag.
1884 if !yaml_parser_scan_tag_uri(parser, false, handle, start_mark, &suffix) {
1885 return false
1886 }
1887
1888 // Set the handle to '!'.
1889 handle = []byte{'!'}
1890
1891 // A special case: the '!' tag. Set the handle to '' and the
1892 // suffix to '!'.
1893 if len(suffix) == 0 {
1894 handle, suffix = suffix, handle
1895 }
1896 }
1897 }
1898
1899 // Check the character which ends the tag.
1900 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
1901 return false
1902 }
1903 if !is_blankz(parser.buffer, parser.buffer_pos) {
1904 yaml_parser_set_scanner_error(parser, "while scanning a tag",
1905 start_mark, "did not find expected whitespace or line break")
1906 return false
1907 }
1908
1909 end_mark := parser.mark
1910
1911 // Create a token.
1912 *token = yaml_token_t{
1913 typ: yaml_TAG_TOKEN,
1914 start_mark: start_mark,
1915 end_mark: end_mark,
1916 value: handle,
1917 suffix: suffix,
1918 }
1919 return true
1920}
1921
1922// Scan a tag handle.
1923func yaml_parser_scan_tag_handle(parser *yaml_parser_t, directive bool, start_mark yaml_mark_t, handle *[]byte) bool {
1924 // Check the initial '!' character.
1925 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
1926 return false
1927 }
1928 if parser.buffer[parser.buffer_pos] != '!' {
1929 yaml_parser_set_scanner_tag_error(parser, directive,
1930 start_mark, "did not find expected '!'")
1931 return false
1932 }
1933
1934 var s []byte
1935
1936 // Copy the '!' character.
1937 s = read(parser, s)
1938
1939 // Copy all subsequent alphabetical and numerical characters.
1940 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
1941 return false
1942 }
1943 for is_alpha(parser.buffer, parser.buffer_pos) {
1944 s = read(parser, s)
1945 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
1946 return false
1947 }
1948 }
1949
1950 // Check if the trailing character is '!' and copy it.
1951 if parser.buffer[parser.buffer_pos] == '!' {
1952 s = read(parser, s)
1953 } else {
1954 // It's either the '!' tag or not really a tag handle. If it's a %TAG
1955 // directive, it's an error. If it's a tag token, it must be a part of URI.
1956 if directive && string(s) != "!" {
1957 yaml_parser_set_scanner_tag_error(parser, directive,
1958 start_mark, "did not find expected '!'")
1959 return false
1960 }
1961 }
1962
1963 *handle = s
1964 return true
1965}
1966
1967// Scan a tag.
1968func yaml_parser_scan_tag_uri(parser *yaml_parser_t, directive bool, head []byte, start_mark yaml_mark_t, uri *[]byte) bool {
1969 //size_t length = head ? strlen((char *)head) : 0
1970 var s []byte
1971 hasTag := len(head) > 0
1972
1973 // Copy the head if needed.
1974 //
1975 // Note that we don't copy the leading '!' character.
1976 if len(head) > 1 {
1977 s = append(s, head[1:]...)
1978 }
1979
1980 // Scan the tag.
1981 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
1982 return false
1983 }
1984
1985 // The set of characters that may appear in URI is as follows:
1986 //
1987 // '0'-'9', 'A'-'Z', 'a'-'z', '_', '-', ';', '/', '?', ':', '@', '&',
1988 // '=', '+', '$', ',', '.', '!', '~', '*', '\'', '(', ')', '[', ']',
1989 // '%'.
1990 // [Go] Convert this into more reasonable logic.
1991 for is_alpha(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == ';' ||
1992 parser.buffer[parser.buffer_pos] == '/' || parser.buffer[parser.buffer_pos] == '?' ||
1993 parser.buffer[parser.buffer_pos] == ':' || parser.buffer[parser.buffer_pos] == '@' ||
1994 parser.buffer[parser.buffer_pos] == '&' || parser.buffer[parser.buffer_pos] == '=' ||
1995 parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '$' ||
1996 parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == '.' ||
1997 parser.buffer[parser.buffer_pos] == '!' || parser.buffer[parser.buffer_pos] == '~' ||
1998 parser.buffer[parser.buffer_pos] == '*' || parser.buffer[parser.buffer_pos] == '\'' ||
1999 parser.buffer[parser.buffer_pos] == '(' || parser.buffer[parser.buffer_pos] == ')' ||
2000 parser.buffer[parser.buffer_pos] == '[' || parser.buffer[parser.buffer_pos] == ']' ||
2001 parser.buffer[parser.buffer_pos] == '%' {
2002 // Check if it is a URI-escape sequence.
2003 if parser.buffer[parser.buffer_pos] == '%' {
2004 if !yaml_parser_scan_uri_escapes(parser, directive, start_mark, &s) {
2005 return false
2006 }
2007 } else {
2008 s = read(parser, s)
2009 }
2010 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
2011 return false
2012 }
2013 hasTag = true
2014 }
2015
2016 if !hasTag {
2017 yaml_parser_set_scanner_tag_error(parser, directive,
2018 start_mark, "did not find expected tag URI")
2019 return false
2020 }
2021 *uri = s
2022 return true
2023}
2024
2025// Decode an URI-escape sequence corresponding to a single UTF-8 character.
2026func yaml_parser_scan_uri_escapes(parser *yaml_parser_t, directive bool, start_mark yaml_mark_t, s *[]byte) bool {
2027
2028 // Decode the required number of characters.
2029 w := 1024
2030 for w > 0 {
2031 // Check for a URI-escaped octet.
2032 if parser.unread < 3 && !yaml_parser_update_buffer(parser, 3) {
2033 return false
2034 }
2035
2036 if !(parser.buffer[parser.buffer_pos] == '%' &&
2037 is_hex(parser.buffer, parser.buffer_pos+1) &&
2038 is_hex(parser.buffer, parser.buffer_pos+2)) {
2039 return yaml_parser_set_scanner_tag_error(parser, directive,
2040 start_mark, "did not find URI escaped octet")
2041 }
2042
2043 // Get the octet.
2044 octet := byte((as_hex(parser.buffer, parser.buffer_pos+1) << 4) + as_hex(parser.buffer, parser.buffer_pos+2))
2045
2046 // If it is the leading octet, determine the length of the UTF-8 sequence.
2047 if w == 1024 {
2048 w = width(octet)
2049 if w == 0 {
2050 return yaml_parser_set_scanner_tag_error(parser, directive,
2051 start_mark, "found an incorrect leading UTF-8 octet")
2052 }
2053 } else {
2054 // Check if the trailing octet is correct.
2055 if octet&0xC0 != 0x80 {
2056 return yaml_parser_set_scanner_tag_error(parser, directive,
2057 start_mark, "found an incorrect trailing UTF-8 octet")
2058 }
2059 }
2060
2061 // Copy the octet and move the pointers.
2062 *s = append(*s, octet)
2063 skip(parser)
2064 skip(parser)
2065 skip(parser)
2066 w--
2067 }
2068 return true
2069}
2070
2071// Scan a block scalar.
2072func yaml_parser_scan_block_scalar(parser *yaml_parser_t, token *yaml_token_t, literal bool) bool {
2073 // Eat the indicator '|' or '>'.
2074 start_mark := parser.mark
2075 skip(parser)
2076
2077 // Scan the additional block scalar indicators.
2078 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
2079 return false
2080 }
2081
2082 // Check for a chomping indicator.
2083 var chomping, increment int
2084 if parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '-' {
2085 // Set the chomping method and eat the indicator.
2086 if parser.buffer[parser.buffer_pos] == '+' {
2087 chomping = +1
2088 } else {
2089 chomping = -1
2090 }
2091 skip(parser)
2092
2093 // Check for an indentation indicator.
2094 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
2095 return false
2096 }
2097 if is_digit(parser.buffer, parser.buffer_pos) {
2098 // Check that the indentation is greater than 0.
2099 if parser.buffer[parser.buffer_pos] == '0' {
2100 yaml_parser_set_scanner_error(parser, "while scanning a block scalar",
2101 start_mark, "found an indentation indicator equal to 0")
2102 return false
2103 }
2104
2105 // Get the indentation level and eat the indicator.
2106 increment = as_digit(parser.buffer, parser.buffer_pos)
2107 skip(parser)
2108 }
2109
2110 } else if is_digit(parser.buffer, parser.buffer_pos) {
2111 // Do the same as above, but in the opposite order.
2112
2113 if parser.buffer[parser.buffer_pos] == '0' {
2114 yaml_parser_set_scanner_error(parser, "while scanning a block scalar",
2115 start_mark, "found an indentation indicator equal to 0")
2116 return false
2117 }
2118 increment = as_digit(parser.buffer, parser.buffer_pos)
2119 skip(parser)
2120
2121 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
2122 return false
2123 }
2124 if parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '-' {
2125 if parser.buffer[parser.buffer_pos] == '+' {
2126 chomping = +1
2127 } else {
2128 chomping = -1
2129 }
2130 skip(parser)
2131 }
2132 }
2133
2134 // Eat whitespaces and comments to the end of the line.
2135 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
2136 return false
2137 }
2138 for is_blank(parser.buffer, parser.buffer_pos) {
2139 skip(parser)
2140 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
2141 return false
2142 }
2143 }
2144 if parser.buffer[parser.buffer_pos] == '#' {
2145 for !is_breakz(parser.buffer, parser.buffer_pos) {
2146 skip(parser)
2147 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
2148 return false
2149 }
2150 }
2151 }
2152
2153 // Check if we are at the end of the line.
2154 if !is_breakz(parser.buffer, parser.buffer_pos) {
2155 yaml_parser_set_scanner_error(parser, "while scanning a block scalar",
2156 start_mark, "did not find expected comment or line break")
2157 return false
2158 }
2159
2160 // Eat a line break.
2161 if is_break(parser.buffer, parser.buffer_pos) {
2162 if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
2163 return false
2164 }
2165 skip_line(parser)
2166 }
2167
2168 end_mark := parser.mark
2169
2170 // Set the indentation level if it was specified.
2171 var indent int
2172 if increment > 0 {
2173 if parser.indent >= 0 {
2174 indent = parser.indent + increment
2175 } else {
2176 indent = increment
2177 }
2178 }
2179
2180 // Scan the leading line breaks and determine the indentation level if needed.
2181 var s, leading_break, trailing_breaks []byte
2182 if !yaml_parser_scan_block_scalar_breaks(parser, &indent, &trailing_breaks, start_mark, &end_mark) {
2183 return false
2184 }
2185
2186 // Scan the block scalar content.
2187 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
2188 return false
2189 }
2190 var leading_blank, trailing_blank bool
2191 for parser.mark.column == indent && !is_z(parser.buffer, parser.buffer_pos) {
2192 // We are at the beginning of a non-empty line.
2193
2194 // Is it a trailing whitespace?
2195 trailing_blank = is_blank(parser.buffer, parser.buffer_pos)
2196
2197 // Check if we need to fold the leading line break.
2198 if !literal && !leading_blank && !trailing_blank && len(leading_break) > 0 && leading_break[0] == '\n' {
2199 // Do we need to join the lines by space?
2200 if len(trailing_breaks) == 0 {
2201 s = append(s, ' ')
2202 }
2203 } else {
2204 s = append(s, leading_break...)
2205 }
2206 leading_break = leading_break[:0]
2207
2208 // Append the remaining line breaks.
2209 s = append(s, trailing_breaks...)
2210 trailing_breaks = trailing_breaks[:0]
2211
2212 // Is it a leading whitespace?
2213 leading_blank = is_blank(parser.buffer, parser.buffer_pos)
2214
2215 // Consume the current line.
2216 for !is_breakz(parser.buffer, parser.buffer_pos) {
2217 s = read(parser, s)
2218 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
2219 return false
2220 }
2221 }
2222
2223 // Consume the line break.
2224 if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
2225 return false
2226 }
2227
2228 leading_break = read_line(parser, leading_break)
2229
2230 // Eat the following indentation spaces and line breaks.
2231 if !yaml_parser_scan_block_scalar_breaks(parser, &indent, &trailing_breaks, start_mark, &end_mark) {
2232 return false
2233 }
2234 }
2235
2236 // Chomp the tail.
2237 if chomping != -1 {
2238 s = append(s, leading_break...)
2239 }
2240 if chomping == 1 {
2241 s = append(s, trailing_breaks...)
2242 }
2243
2244 // Create a token.
2245 *token = yaml_token_t{
2246 typ: yaml_SCALAR_TOKEN,
2247 start_mark: start_mark,
2248 end_mark: end_mark,
2249 value: s,
2250 style: yaml_LITERAL_SCALAR_STYLE,
2251 }
2252 if !literal {
2253 token.style = yaml_FOLDED_SCALAR_STYLE
2254 }
2255 return true
2256}
2257
2258// Scan indentation spaces and line breaks for a block scalar. Determine the
2259// indentation level if needed.
2260func yaml_parser_scan_block_scalar_breaks(parser *yaml_parser_t, indent *int, breaks *[]byte, start_mark yaml_mark_t, end_mark *yaml_mark_t) bool {
2261 *end_mark = parser.mark
2262
2263 // Eat the indentation spaces and line breaks.
2264 max_indent := 0
2265 for {
2266 // Eat the indentation spaces.
2267 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
2268 return false
2269 }
2270 for (*indent == 0 || parser.mark.column < *indent) && is_space(parser.buffer, parser.buffer_pos) {
2271 skip(parser)
2272 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
2273 return false
2274 }
2275 }
2276 if parser.mark.column > max_indent {
2277 max_indent = parser.mark.column
2278 }
2279
2280 // Check for a tab character messing the indentation.
2281 if (*indent == 0 || parser.mark.column < *indent) && is_tab(parser.buffer, parser.buffer_pos) {
2282 return yaml_parser_set_scanner_error(parser, "while scanning a block scalar",
2283 start_mark, "found a tab character where an indentation space is expected")
2284 }
2285
2286 // Have we found a non-empty line?
2287 if !is_break(parser.buffer, parser.buffer_pos) {
2288 break
2289 }
2290
2291 // Consume the line break.
2292 if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
2293 return false
2294 }
2295 // [Go] Should really be returning breaks instead.
2296 *breaks = read_line(parser, *breaks)
2297 *end_mark = parser.mark
2298 }
2299
2300 // Determine the indentation level if needed.
2301 if *indent == 0 {
2302 *indent = max_indent
2303 if *indent < parser.indent+1 {
2304 *indent = parser.indent + 1
2305 }
2306 if *indent < 1 {
2307 *indent = 1
2308 }
2309 }
2310 return true
2311}
2312
2313// Scan a quoted scalar.
2314func yaml_parser_scan_flow_scalar(parser *yaml_parser_t, token *yaml_token_t, single bool) bool {
2315 // Eat the left quote.
2316 start_mark := parser.mark
2317 skip(parser)
2318
2319 // Consume the content of the quoted scalar.
2320 var s, leading_break, trailing_breaks, whitespaces []byte
2321 for {
2322 // Check that there are no document indicators at the beginning of the line.
2323 if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) {
2324 return false
2325 }
2326
2327 if parser.mark.column == 0 &&
2328 ((parser.buffer[parser.buffer_pos+0] == '-' &&
2329 parser.buffer[parser.buffer_pos+1] == '-' &&
2330 parser.buffer[parser.buffer_pos+2] == '-') ||
2331 (parser.buffer[parser.buffer_pos+0] == '.' &&
2332 parser.buffer[parser.buffer_pos+1] == '.' &&
2333 parser.buffer[parser.buffer_pos+2] == '.')) &&
2334 is_blankz(parser.buffer, parser.buffer_pos+3) {
2335 yaml_parser_set_scanner_error(parser, "while scanning a quoted scalar",
2336 start_mark, "found unexpected document indicator")
2337 return false
2338 }
2339
2340 // Check for EOF.
2341 if is_z(parser.buffer, parser.buffer_pos) {
2342 yaml_parser_set_scanner_error(parser, "while scanning a quoted scalar",
2343 start_mark, "found unexpected end of stream")
2344 return false
2345 }
2346
2347 // Consume non-blank characters.
2348 leading_blanks := false
2349 for !is_blankz(parser.buffer, parser.buffer_pos) {
2350 if single && parser.buffer[parser.buffer_pos] == '\'' && parser.buffer[parser.buffer_pos+1] == '\'' {
2351 // Is is an escaped single quote.
2352 s = append(s, '\'')
2353 skip(parser)
2354 skip(parser)
2355
2356 } else if single && parser.buffer[parser.buffer_pos] == '\'' {
2357 // It is a right single quote.
2358 break
2359 } else if !single && parser.buffer[parser.buffer_pos] == '"' {
2360 // It is a right double quote.
2361 break
2362
2363 } else if !single && parser.buffer[parser.buffer_pos] == '\\' && is_break(parser.buffer, parser.buffer_pos+1) {
2364 // It is an escaped line break.
2365 if parser.unread < 3 && !yaml_parser_update_buffer(parser, 3) {
2366 return false
2367 }
2368 skip(parser)
2369 skip_line(parser)
2370 leading_blanks = true
2371 break
2372
2373 } else if !single && parser.buffer[parser.buffer_pos] == '\\' {
2374 // It is an escape sequence.
2375 code_length := 0
2376
2377 // Check the escape character.
2378 switch parser.buffer[parser.buffer_pos+1] {
2379 case '0':
2380 s = append(s, 0)
2381 case 'a':
2382 s = append(s, '\x07')
2383 case 'b':
2384 s = append(s, '\x08')
2385 case 't', '\t':
2386 s = append(s, '\x09')
2387 case 'n':
2388 s = append(s, '\x0A')
2389 case 'v':
2390 s = append(s, '\x0B')
2391 case 'f':
2392 s = append(s, '\x0C')
2393 case 'r':
2394 s = append(s, '\x0D')
2395 case 'e':
2396 s = append(s, '\x1B')
2397 case ' ':
2398 s = append(s, '\x20')
2399 case '"':
2400 s = append(s, '"')
2401 case '\'':
2402 s = append(s, '\'')
2403 case '\\':
2404 s = append(s, '\\')
2405 case 'N': // NEL (#x85)
2406 s = append(s, '\xC2')
2407 s = append(s, '\x85')
2408 case '_': // #xA0
2409 s = append(s, '\xC2')
2410 s = append(s, '\xA0')
2411 case 'L': // LS (#x2028)
2412 s = append(s, '\xE2')
2413 s = append(s, '\x80')
2414 s = append(s, '\xA8')
2415 case 'P': // PS (#x2029)
2416 s = append(s, '\xE2')
2417 s = append(s, '\x80')
2418 s = append(s, '\xA9')
2419 case 'x':
2420 code_length = 2
2421 case 'u':
2422 code_length = 4
2423 case 'U':
2424 code_length = 8
2425 default:
2426 yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar",
2427 start_mark, "found unknown escape character")
2428 return false
2429 }
2430
2431 skip(parser)
2432 skip(parser)
2433
2434 // Consume an arbitrary escape code.
2435 if code_length > 0 {
2436 var value int
2437
2438 // Scan the character value.
2439 if parser.unread < code_length && !yaml_parser_update_buffer(parser, code_length) {
2440 return false
2441 }
2442 for k := 0; k < code_length; k++ {
2443 if !is_hex(parser.buffer, parser.buffer_pos+k) {
2444 yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar",
2445 start_mark, "did not find expected hexdecimal number")
2446 return false
2447 }
2448 value = (value << 4) + as_hex(parser.buffer, parser.buffer_pos+k)
2449 }
2450
2451 // Check the value and write the character.
2452 if (value >= 0xD800 && value <= 0xDFFF) || value > 0x10FFFF {
2453 yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar",
2454 start_mark, "found invalid Unicode character escape code")
2455 return false
2456 }
2457 if value <= 0x7F {
2458 s = append(s, byte(value))
2459 } else if value <= 0x7FF {
2460 s = append(s, byte(0xC0+(value>>6)))
2461 s = append(s, byte(0x80+(value&0x3F)))
2462 } else if value <= 0xFFFF {
2463 s = append(s, byte(0xE0+(value>>12)))
2464 s = append(s, byte(0x80+((value>>6)&0x3F)))
2465 s = append(s, byte(0x80+(value&0x3F)))
2466 } else {
2467 s = append(s, byte(0xF0+(value>>18)))
2468 s = append(s, byte(0x80+((value>>12)&0x3F)))
2469 s = append(s, byte(0x80+((value>>6)&0x3F)))
2470 s = append(s, byte(0x80+(value&0x3F)))
2471 }
2472
2473 // Advance the pointer.
2474 for k := 0; k < code_length; k++ {
2475 skip(parser)
2476 }
2477 }
2478 } else {
2479 // It is a non-escaped non-blank character.
2480 s = read(parser, s)
2481 }
2482 if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
2483 return false
2484 }
2485 }
2486
2487 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
2488 return false
2489 }
2490
2491 // Check if we are at the end of the scalar.
2492 if single {
2493 if parser.buffer[parser.buffer_pos] == '\'' {
2494 break
2495 }
2496 } else {
2497 if parser.buffer[parser.buffer_pos] == '"' {
2498 break
2499 }
2500 }
2501
2502 // Consume blank characters.
2503 for is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos) {
2504 if is_blank(parser.buffer, parser.buffer_pos) {
2505 // Consume a space or a tab character.
2506 if !leading_blanks {
2507 whitespaces = read(parser, whitespaces)
2508 } else {
2509 skip(parser)
2510 }
2511 } else {
2512 if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
2513 return false
2514 }
2515
2516 // Check if it is a first line break.
2517 if !leading_blanks {
2518 whitespaces = whitespaces[:0]
2519 leading_break = read_line(parser, leading_break)
2520 leading_blanks = true
2521 } else {
2522 trailing_breaks = read_line(parser, trailing_breaks)
2523 }
2524 }
2525 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
2526 return false
2527 }
2528 }
2529
2530 // Join the whitespaces or fold line breaks.
2531 if leading_blanks {
2532 // Do we need to fold line breaks?
2533 if len(leading_break) > 0 && leading_break[0] == '\n' {
2534 if len(trailing_breaks) == 0 {
2535 s = append(s, ' ')
2536 } else {
2537 s = append(s, trailing_breaks...)
2538 }
2539 } else {
2540 s = append(s, leading_break...)
2541 s = append(s, trailing_breaks...)
2542 }
2543 trailing_breaks = trailing_breaks[:0]
2544 leading_break = leading_break[:0]
2545 } else {
2546 s = append(s, whitespaces...)
2547 whitespaces = whitespaces[:0]
2548 }
2549 }
2550
2551 // Eat the right quote.
2552 skip(parser)
2553 end_mark := parser.mark
2554
2555 // Create a token.
2556 *token = yaml_token_t{
2557 typ: yaml_SCALAR_TOKEN,
2558 start_mark: start_mark,
2559 end_mark: end_mark,
2560 value: s,
2561 style: yaml_SINGLE_QUOTED_SCALAR_STYLE,
2562 }
2563 if !single {
2564 token.style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
2565 }
2566 return true
2567}
2568
2569// Scan a plain scalar.
2570func yaml_parser_scan_plain_scalar(parser *yaml_parser_t, token *yaml_token_t) bool {
2571
2572 var s, leading_break, trailing_breaks, whitespaces []byte
2573 var leading_blanks bool
2574 var indent = parser.indent + 1
2575
2576 start_mark := parser.mark
2577 end_mark := parser.mark
2578
2579 // Consume the content of the plain scalar.
2580 for {
2581 // Check for a document indicator.
2582 if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) {
2583 return false
2584 }
2585 if parser.mark.column == 0 &&
2586 ((parser.buffer[parser.buffer_pos+0] == '-' &&
2587 parser.buffer[parser.buffer_pos+1] == '-' &&
2588 parser.buffer[parser.buffer_pos+2] == '-') ||
2589 (parser.buffer[parser.buffer_pos+0] == '.' &&
2590 parser.buffer[parser.buffer_pos+1] == '.' &&
2591 parser.buffer[parser.buffer_pos+2] == '.')) &&
2592 is_blankz(parser.buffer, parser.buffer_pos+3) {
2593 break
2594 }
2595
2596 // Check for a comment.
2597 if parser.buffer[parser.buffer_pos] == '#' {
2598 break
2599 }
2600
2601 // Consume non-blank characters.
2602 for !is_blankz(parser.buffer, parser.buffer_pos) {
2603
2604 // Check for indicators that may end a plain scalar.
2605 if (parser.buffer[parser.buffer_pos] == ':' && is_blankz(parser.buffer, parser.buffer_pos+1)) ||
2606 (parser.flow_level > 0 &&
2607 (parser.buffer[parser.buffer_pos] == ',' ||
2608 parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == '[' ||
2609 parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '{' ||
2610 parser.buffer[parser.buffer_pos] == '}')) {
2611 break
2612 }
2613
2614 // Check if we need to join whitespaces and breaks.
2615 if leading_blanks || len(whitespaces) > 0 {
2616 if leading_blanks {
2617 // Do we need to fold line breaks?
2618 if leading_break[0] == '\n' {
2619 if len(trailing_breaks) == 0 {
2620 s = append(s, ' ')
2621 } else {
2622 s = append(s, trailing_breaks...)
2623 }
2624 } else {
2625 s = append(s, leading_break...)
2626 s = append(s, trailing_breaks...)
2627 }
2628 trailing_breaks = trailing_breaks[:0]
2629 leading_break = leading_break[:0]
2630 leading_blanks = false
2631 } else {
2632 s = append(s, whitespaces...)
2633 whitespaces = whitespaces[:0]
2634 }
2635 }
2636
2637 // Copy the character.
2638 s = read(parser, s)
2639
2640 end_mark = parser.mark
2641 if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
2642 return false
2643 }
2644 }
2645
2646 // Is it the end?
2647 if !(is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos)) {
2648 break
2649 }
2650
2651 // Consume blank characters.
2652 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
2653 return false
2654 }
2655
2656 for is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos) {
2657 if is_blank(parser.buffer, parser.buffer_pos) {
2658
2659 // Check for tab characters that abuse indentation.
2660 if leading_blanks && parser.mark.column < indent && is_tab(parser.buffer, parser.buffer_pos) {
2661 yaml_parser_set_scanner_error(parser, "while scanning a plain scalar",
2662 start_mark, "found a tab character that violates indentation")
2663 return false
2664 }
2665
2666 // Consume a space or a tab character.
2667 if !leading_blanks {
2668 whitespaces = read(parser, whitespaces)
2669 } else {
2670 skip(parser)
2671 }
2672 } else {
2673 if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) {
2674 return false
2675 }
2676
2677 // Check if it is a first line break.
2678 if !leading_blanks {
2679 whitespaces = whitespaces[:0]
2680 leading_break = read_line(parser, leading_break)
2681 leading_blanks = true
2682 } else {
2683 trailing_breaks = read_line(parser, trailing_breaks)
2684 }
2685 }
2686 if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
2687 return false
2688 }
2689 }
2690
2691 // Check indentation level.
2692 if parser.flow_level == 0 && parser.mark.column < indent {
2693 break
2694 }
2695 }
2696
2697 // Create a token.
2698 *token = yaml_token_t{
2699 typ: yaml_SCALAR_TOKEN,
2700 start_mark: start_mark,
2701 end_mark: end_mark,
2702 value: s,
2703 style: yaml_PLAIN_SCALAR_STYLE,
2704 }
2705
2706 // Note that we change the 'simple_key_allowed' flag.
2707 if leading_blanks {
2708 parser.simple_key_allowed = true
2709 }
2710 return true
2711}
diff --git a/vendor/gopkg.in/yaml.v2/sorter.go b/vendor/gopkg.in/yaml.v2/sorter.go
new file mode 100644
index 0000000..4c45e66
--- /dev/null
+++ b/vendor/gopkg.in/yaml.v2/sorter.go
@@ -0,0 +1,113 @@
1package yaml
2
3import (
4 "reflect"
5 "unicode"
6)
7
8type keyList []reflect.Value
9
10func (l keyList) Len() int { return len(l) }
11func (l keyList) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
12func (l keyList) Less(i, j int) bool {
13 a := l[i]
14 b := l[j]
15 ak := a.Kind()
16 bk := b.Kind()
17 for (ak == reflect.Interface || ak == reflect.Ptr) && !a.IsNil() {
18 a = a.Elem()
19 ak = a.Kind()
20 }
21 for (bk == reflect.Interface || bk == reflect.Ptr) && !b.IsNil() {
22 b = b.Elem()
23 bk = b.Kind()
24 }
25 af, aok := keyFloat(a)
26 bf, bok := keyFloat(b)
27 if aok && bok {
28 if af != bf {
29 return af < bf
30 }
31 if ak != bk {
32 return ak < bk
33 }
34 return numLess(a, b)
35 }
36 if ak != reflect.String || bk != reflect.String {
37 return ak < bk
38 }
39 ar, br := []rune(a.String()), []rune(b.String())
40 for i := 0; i < len(ar) && i < len(br); i++ {
41 if ar[i] == br[i] {
42 continue
43 }
44 al := unicode.IsLetter(ar[i])
45 bl := unicode.IsLetter(br[i])
46 if al && bl {
47 return ar[i] < br[i]
48 }
49 if al || bl {
50 return bl
51 }
52 var ai, bi int
53 var an, bn int64
54 if ar[i] == '0' || br[i] == '0' {
55 for j := i-1; j >= 0 && unicode.IsDigit(ar[j]); j-- {
56 if ar[j] != '0' {
57 an = 1
58 bn = 1
59 break
60 }
61 }
62 }
63 for ai = i; ai < len(ar) && unicode.IsDigit(ar[ai]); ai++ {
64 an = an*10 + int64(ar[ai]-'0')
65 }
66 for bi = i; bi < len(br) && unicode.IsDigit(br[bi]); bi++ {
67 bn = bn*10 + int64(br[bi]-'0')
68 }
69 if an != bn {
70 return an < bn
71 }
72 if ai != bi {
73 return ai < bi
74 }
75 return ar[i] < br[i]
76 }
77 return len(ar) < len(br)
78}
79
80// keyFloat returns a float value for v if it is a number/bool
81// and whether it is a number/bool or not.
82func keyFloat(v reflect.Value) (f float64, ok bool) {
83 switch v.Kind() {
84 case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
85 return float64(v.Int()), true
86 case reflect.Float32, reflect.Float64:
87 return v.Float(), true
88 case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
89 return float64(v.Uint()), true
90 case reflect.Bool:
91 if v.Bool() {
92 return 1, true
93 }
94 return 0, true
95 }
96 return 0, false
97}
98
99// numLess returns whether a < b.
100// a and b must necessarily have the same kind.
101func numLess(a, b reflect.Value) bool {
102 switch a.Kind() {
103 case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
104 return a.Int() < b.Int()
105 case reflect.Float32, reflect.Float64:
106 return a.Float() < b.Float()
107 case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
108 return a.Uint() < b.Uint()
109 case reflect.Bool:
110 return !a.Bool() && b.Bool()
111 }
112 panic("not a number")
113}
diff --git a/vendor/gopkg.in/yaml.v2/writerc.go b/vendor/gopkg.in/yaml.v2/writerc.go
new file mode 100644
index 0000000..a2dde60
--- /dev/null
+++ b/vendor/gopkg.in/yaml.v2/writerc.go
@@ -0,0 +1,26 @@
1package yaml
2
3// Set the writer error and return false.
4func yaml_emitter_set_writer_error(emitter *yaml_emitter_t, problem string) bool {
5 emitter.error = yaml_WRITER_ERROR
6 emitter.problem = problem
7 return false
8}
9
10// Flush the output buffer.
11func yaml_emitter_flush(emitter *yaml_emitter_t) bool {
12 if emitter.write_handler == nil {
13 panic("write handler not set")
14 }
15
16 // Check if the buffer is empty.
17 if emitter.buffer_pos == 0 {
18 return true
19 }
20
21 if err := emitter.write_handler(emitter, emitter.buffer[:emitter.buffer_pos]); err != nil {
22 return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error())
23 }
24 emitter.buffer_pos = 0
25 return true
26}
diff --git a/vendor/gopkg.in/yaml.v2/yaml.go b/vendor/gopkg.in/yaml.v2/yaml.go
new file mode 100644
index 0000000..89650e2
--- /dev/null
+++ b/vendor/gopkg.in/yaml.v2/yaml.go
@@ -0,0 +1,466 @@
1// Package yaml implements YAML support for the Go language.
2//
3// Source code and other details for the project are available at GitHub:
4//
5// https://github.com/go-yaml/yaml
6//
7package yaml
8
9import (
10 "errors"
11 "fmt"
12 "io"
13 "reflect"
14 "strings"
15 "sync"
16)
17
18// MapSlice encodes and decodes as a YAML map.
19// The order of keys is preserved when encoding and decoding.
20type MapSlice []MapItem
21
22// MapItem is an item in a MapSlice.
23type MapItem struct {
24 Key, Value interface{}
25}
26
27// The Unmarshaler interface may be implemented by types to customize their
28// behavior when being unmarshaled from a YAML document. The UnmarshalYAML
29// method receives a function that may be called to unmarshal the original
30// YAML value into a field or variable. It is safe to call the unmarshal
31// function parameter more than once if necessary.
32type Unmarshaler interface {
33 UnmarshalYAML(unmarshal func(interface{}) error) error
34}
35
36// The Marshaler interface may be implemented by types to customize their
37// behavior when being marshaled into a YAML document. The returned value
38// is marshaled in place of the original value implementing Marshaler.
39//
40// If an error is returned by MarshalYAML, the marshaling procedure stops
41// and returns with the provided error.
42type Marshaler interface {
43 MarshalYAML() (interface{}, error)
44}
45
46// Unmarshal decodes the first document found within the in byte slice
47// and assigns decoded values into the out value.
48//
49// Maps and pointers (to a struct, string, int, etc) are accepted as out
50// values. If an internal pointer within a struct is not initialized,
51// the yaml package will initialize it if necessary for unmarshalling
52// the provided data. The out parameter must not be nil.
53//
54// The type of the decoded values should be compatible with the respective
55// values in out. If one or more values cannot be decoded due to a type
56// mismatches, decoding continues partially until the end of the YAML
57// content, and a *yaml.TypeError is returned with details for all
58// missed values.
59//
60// Struct fields are only unmarshalled if they are exported (have an
61// upper case first letter), and are unmarshalled using the field name
62// lowercased as the default key. Custom keys may be defined via the
63// "yaml" name in the field tag: the content preceding the first comma
64// is used as the key, and the following comma-separated options are
65// used to tweak the marshalling process (see Marshal).
66// Conflicting names result in a runtime error.
67//
68// For example:
69//
70// type T struct {
71// F int `yaml:"a,omitempty"`
72// B int
73// }
74// var t T
75// yaml.Unmarshal([]byte("a: 1\nb: 2"), &t)
76//
77// See the documentation of Marshal for the format of tags and a list of
78// supported tag options.
79//
80func Unmarshal(in []byte, out interface{}) (err error) {
81 return unmarshal(in, out, false)
82}
83
84// UnmarshalStrict is like Unmarshal except that any fields that are found
85// in the data that do not have corresponding struct members, or mapping
86// keys that are duplicates, will result in
87// an error.
88func UnmarshalStrict(in []byte, out interface{}) (err error) {
89 return unmarshal(in, out, true)
90}
91
92// A Decoder reads and decodes YAML values from an input stream.
93type Decoder struct {
94 strict bool
95 parser *parser
96}
97
98// NewDecoder returns a new decoder that reads from r.
99//
100// The decoder introduces its own buffering and may read
101// data from r beyond the YAML values requested.
102func NewDecoder(r io.Reader) *Decoder {
103 return &Decoder{
104 parser: newParserFromReader(r),
105 }
106}
107
108// SetStrict sets whether strict decoding behaviour is enabled when
109// decoding items in the data (see UnmarshalStrict). By default, decoding is not strict.
110func (dec *Decoder) SetStrict(strict bool) {
111 dec.strict = strict
112}
113
114// Decode reads the next YAML-encoded value from its input
115// and stores it in the value pointed to by v.
116//
117// See the documentation for Unmarshal for details about the
118// conversion of YAML into a Go value.
119func (dec *Decoder) Decode(v interface{}) (err error) {
120 d := newDecoder(dec.strict)
121 defer handleErr(&err)
122 node := dec.parser.parse()
123 if node == nil {
124 return io.EOF
125 }
126 out := reflect.ValueOf(v)
127 if out.Kind() == reflect.Ptr && !out.IsNil() {
128 out = out.Elem()
129 }
130 d.unmarshal(node, out)
131 if len(d.terrors) > 0 {
132 return &TypeError{d.terrors}
133 }
134 return nil
135}
136
137func unmarshal(in []byte, out interface{}, strict bool) (err error) {
138 defer handleErr(&err)
139 d := newDecoder(strict)
140 p := newParser(in)
141 defer p.destroy()
142 node := p.parse()
143 if node != nil {
144 v := reflect.ValueOf(out)
145 if v.Kind() == reflect.Ptr && !v.IsNil() {
146 v = v.Elem()
147 }
148 d.unmarshal(node, v)
149 }
150 if len(d.terrors) > 0 {
151 return &TypeError{d.terrors}
152 }
153 return nil
154}
155
156// Marshal serializes the value provided into a YAML document. The structure
157// of the generated document will reflect the structure of the value itself.
158// Maps and pointers (to struct, string, int, etc) are accepted as the in value.
159//
160// Struct fields are only marshalled if they are exported (have an upper case
161// first letter), and are marshalled using the field name lowercased as the
162// default key. Custom keys may be defined via the "yaml" name in the field
163// tag: the content preceding the first comma is used as the key, and the
164// following comma-separated options are used to tweak the marshalling process.
165// Conflicting names result in a runtime error.
166//
167// The field tag format accepted is:
168//
169// `(...) yaml:"[<key>][,<flag1>[,<flag2>]]" (...)`
170//
171// The following flags are currently supported:
172//
173// omitempty Only include the field if it's not set to the zero
174// value for the type or to empty slices or maps.
175// Zero valued structs will be omitted if all their public
176// fields are zero, unless they implement an IsZero
177// method (see the IsZeroer interface type), in which
178// case the field will be included if that method returns true.
179//
180// flow Marshal using a flow style (useful for structs,
181// sequences and maps).
182//
183// inline Inline the field, which must be a struct or a map,
184// causing all of its fields or keys to be processed as if
185// they were part of the outer struct. For maps, keys must
186// not conflict with the yaml keys of other struct fields.
187//
188// In addition, if the key is "-", the field is ignored.
189//
190// For example:
191//
192// type T struct {
193// F int `yaml:"a,omitempty"`
194// B int
195// }
196// yaml.Marshal(&T{B: 2}) // Returns "b: 2\n"
197// yaml.Marshal(&T{F: 1}} // Returns "a: 1\nb: 0\n"
198//
199func Marshal(in interface{}) (out []byte, err error) {
200 defer handleErr(&err)
201 e := newEncoder()
202 defer e.destroy()
203 e.marshalDoc("", reflect.ValueOf(in))
204 e.finish()
205 out = e.out
206 return
207}
208
209// An Encoder writes YAML values to an output stream.
210type Encoder struct {
211 encoder *encoder
212}
213
214// NewEncoder returns a new encoder that writes to w.
215// The Encoder should be closed after use to flush all data
216// to w.
217func NewEncoder(w io.Writer) *Encoder {
218 return &Encoder{
219 encoder: newEncoderWithWriter(w),
220 }
221}
222
223// Encode writes the YAML encoding of v to the stream.
224// If multiple items are encoded to the stream, the
225// second and subsequent document will be preceded
226// with a "---" document separator, but the first will not.
227//
228// See the documentation for Marshal for details about the conversion of Go
229// values to YAML.
230func (e *Encoder) Encode(v interface{}) (err error) {
231 defer handleErr(&err)
232 e.encoder.marshalDoc("", reflect.ValueOf(v))
233 return nil
234}
235
236// Close closes the encoder by writing any remaining data.
237// It does not write a stream terminating string "...".
238func (e *Encoder) Close() (err error) {
239 defer handleErr(&err)
240 e.encoder.finish()
241 return nil
242}
243
244func handleErr(err *error) {
245 if v := recover(); v != nil {
246 if e, ok := v.(yamlError); ok {
247 *err = e.err
248 } else {
249 panic(v)
250 }
251 }
252}
253
254type yamlError struct {
255 err error
256}
257
258func fail(err error) {
259 panic(yamlError{err})
260}
261
262func failf(format string, args ...interface{}) {
263 panic(yamlError{fmt.Errorf("yaml: "+format, args...)})
264}
265
266// A TypeError is returned by Unmarshal when one or more fields in
267// the YAML document cannot be properly decoded into the requested
268// types. When this error is returned, the value is still
269// unmarshaled partially.
270type TypeError struct {
271 Errors []string
272}
273
274func (e *TypeError) Error() string {
275 return fmt.Sprintf("yaml: unmarshal errors:\n %s", strings.Join(e.Errors, "\n "))
276}
277
278// --------------------------------------------------------------------------
279// Maintain a mapping of keys to structure field indexes
280
281// The code in this section was copied from mgo/bson.
282
283// structInfo holds details for the serialization of fields of
284// a given struct.
285type structInfo struct {
286 FieldsMap map[string]fieldInfo
287 FieldsList []fieldInfo
288
289 // InlineMap is the number of the field in the struct that
290 // contains an ,inline map, or -1 if there's none.
291 InlineMap int
292}
293
294type fieldInfo struct {
295 Key string
296 Num int
297 OmitEmpty bool
298 Flow bool
299 // Id holds the unique field identifier, so we can cheaply
300 // check for field duplicates without maintaining an extra map.
301 Id int
302
303 // Inline holds the field index if the field is part of an inlined struct.
304 Inline []int
305}
306
307var structMap = make(map[reflect.Type]*structInfo)
308var fieldMapMutex sync.RWMutex
309
310func getStructInfo(st reflect.Type) (*structInfo, error) {
311 fieldMapMutex.RLock()
312 sinfo, found := structMap[st]
313 fieldMapMutex.RUnlock()
314 if found {
315 return sinfo, nil
316 }
317
318 n := st.NumField()
319 fieldsMap := make(map[string]fieldInfo)
320 fieldsList := make([]fieldInfo, 0, n)
321 inlineMap := -1
322 for i := 0; i != n; i++ {
323 field := st.Field(i)
324 if field.PkgPath != "" && !field.Anonymous {
325 continue // Private field
326 }
327
328 info := fieldInfo{Num: i}
329
330 tag := field.Tag.Get("yaml")
331 if tag == "" && strings.Index(string(field.Tag), ":") < 0 {
332 tag = string(field.Tag)
333 }
334 if tag == "-" {
335 continue
336 }
337
338 inline := false
339 fields := strings.Split(tag, ",")
340 if len(fields) > 1 {
341 for _, flag := range fields[1:] {
342 switch flag {
343 case "omitempty":
344 info.OmitEmpty = true
345 case "flow":
346 info.Flow = true
347 case "inline":
348 inline = true
349 default:
350 return nil, errors.New(fmt.Sprintf("Unsupported flag %q in tag %q of type %s", flag, tag, st))
351 }
352 }
353 tag = fields[0]
354 }
355
356 if inline {
357 switch field.Type.Kind() {
358 case reflect.Map:
359 if inlineMap >= 0 {
360 return nil, errors.New("Multiple ,inline maps in struct " + st.String())
361 }
362 if field.Type.Key() != reflect.TypeOf("") {
363 return nil, errors.New("Option ,inline needs a map with string keys in struct " + st.String())
364 }
365 inlineMap = info.Num
366 case reflect.Struct:
367 sinfo, err := getStructInfo(field.Type)
368 if err != nil {
369 return nil, err
370 }
371 for _, finfo := range sinfo.FieldsList {
372 if _, found := fieldsMap[finfo.Key]; found {
373 msg := "Duplicated key '" + finfo.Key + "' in struct " + st.String()
374 return nil, errors.New(msg)
375 }
376 if finfo.Inline == nil {
377 finfo.Inline = []int{i, finfo.Num}
378 } else {
379 finfo.Inline = append([]int{i}, finfo.Inline...)
380 }
381 finfo.Id = len(fieldsList)
382 fieldsMap[finfo.Key] = finfo
383 fieldsList = append(fieldsList, finfo)
384 }
385 default:
386 //return nil, errors.New("Option ,inline needs a struct value or map field")
387 return nil, errors.New("Option ,inline needs a struct value field")
388 }
389 continue
390 }
391
392 if tag != "" {
393 info.Key = tag
394 } else {
395 info.Key = strings.ToLower(field.Name)
396 }
397
398 if _, found = fieldsMap[info.Key]; found {
399 msg := "Duplicated key '" + info.Key + "' in struct " + st.String()
400 return nil, errors.New(msg)
401 }
402
403 info.Id = len(fieldsList)
404 fieldsList = append(fieldsList, info)
405 fieldsMap[info.Key] = info
406 }
407
408 sinfo = &structInfo{
409 FieldsMap: fieldsMap,
410 FieldsList: fieldsList,
411 InlineMap: inlineMap,
412 }
413
414 fieldMapMutex.Lock()
415 structMap[st] = sinfo
416 fieldMapMutex.Unlock()
417 return sinfo, nil
418}
419
420// IsZeroer is used to check whether an object is zero to
421// determine whether it should be omitted when marshaling
422// with the omitempty flag. One notable implementation
423// is time.Time.
424type IsZeroer interface {
425 IsZero() bool
426}
427
428func isZero(v reflect.Value) bool {
429 kind := v.Kind()
430 if z, ok := v.Interface().(IsZeroer); ok {
431 if (kind == reflect.Ptr || kind == reflect.Interface) && v.IsNil() {
432 return true
433 }
434 return z.IsZero()
435 }
436 switch kind {
437 case reflect.String:
438 return len(v.String()) == 0
439 case reflect.Interface, reflect.Ptr:
440 return v.IsNil()
441 case reflect.Slice:
442 return v.Len() == 0
443 case reflect.Map:
444 return v.Len() == 0
445 case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
446 return v.Int() == 0
447 case reflect.Float32, reflect.Float64:
448 return v.Float() == 0
449 case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
450 return v.Uint() == 0
451 case reflect.Bool:
452 return !v.Bool()
453 case reflect.Struct:
454 vt := v.Type()
455 for i := v.NumField() - 1; i >= 0; i-- {
456 if vt.Field(i).PkgPath != "" {
457 continue // Private field
458 }
459 if !isZero(v.Field(i)) {
460 return false
461 }
462 }
463 return true
464 }
465 return false
466}
diff --git a/vendor/gopkg.in/yaml.v2/yamlh.go b/vendor/gopkg.in/yaml.v2/yamlh.go
new file mode 100644
index 0000000..f6a9c8e
--- /dev/null
+++ b/vendor/gopkg.in/yaml.v2/yamlh.go
@@ -0,0 +1,739 @@
1package yaml
2
3import (
4 "fmt"
5 "io"
6)
7
8// The version directive data.
9type yaml_version_directive_t struct {
10 major int8 // The major version number.
11 minor int8 // The minor version number.
12}
13
14// The tag directive data.
15type yaml_tag_directive_t struct {
16 handle []byte // The tag handle.
17 prefix []byte // The tag prefix.
18}
19
20type yaml_encoding_t int
21
22// The stream encoding.
23const (
24 // Let the parser choose the encoding.
25 yaml_ANY_ENCODING yaml_encoding_t = iota
26
27 yaml_UTF8_ENCODING // The default UTF-8 encoding.
28 yaml_UTF16LE_ENCODING // The UTF-16-LE encoding with BOM.
29 yaml_UTF16BE_ENCODING // The UTF-16-BE encoding with BOM.
30)
31
32type yaml_break_t int
33
34// Line break types.
35const (
36 // Let the parser choose the break type.
37 yaml_ANY_BREAK yaml_break_t = iota
38
39 yaml_CR_BREAK // Use CR for line breaks (Mac style).
40 yaml_LN_BREAK // Use LN for line breaks (Unix style).
41 yaml_CRLN_BREAK // Use CR LN for line breaks (DOS style).
42)
43
44type yaml_error_type_t int
45
46// Many bad things could happen with the parser and emitter.
47const (
48 // No error is produced.
49 yaml_NO_ERROR yaml_error_type_t = iota
50
51 yaml_MEMORY_ERROR // Cannot allocate or reallocate a block of memory.
52 yaml_READER_ERROR // Cannot read or decode the input stream.
53 yaml_SCANNER_ERROR // Cannot scan the input stream.
54 yaml_PARSER_ERROR // Cannot parse the input stream.
55 yaml_COMPOSER_ERROR // Cannot compose a YAML document.
56 yaml_WRITER_ERROR // Cannot write to the output stream.
57 yaml_EMITTER_ERROR // Cannot emit a YAML stream.
58)
59
60// The pointer position.
61type yaml_mark_t struct {
62 index int // The position index.
63 line int // The position line.
64 column int // The position column.
65}
66
67// Node Styles
68
69type yaml_style_t int8
70
71type yaml_scalar_style_t yaml_style_t
72
73// Scalar styles.
74const (
75 // Let the emitter choose the style.
76 yaml_ANY_SCALAR_STYLE yaml_scalar_style_t = iota
77
78 yaml_PLAIN_SCALAR_STYLE // The plain scalar style.
79 yaml_SINGLE_QUOTED_SCALAR_STYLE // The single-quoted scalar style.
80 yaml_DOUBLE_QUOTED_SCALAR_STYLE // The double-quoted scalar style.
81 yaml_LITERAL_SCALAR_STYLE // The literal scalar style.
82 yaml_FOLDED_SCALAR_STYLE // The folded scalar style.
83)
84
85type yaml_sequence_style_t yaml_style_t
86
87// Sequence styles.
88const (
89 // Let the emitter choose the style.
90 yaml_ANY_SEQUENCE_STYLE yaml_sequence_style_t = iota
91
92 yaml_BLOCK_SEQUENCE_STYLE // The block sequence style.
93 yaml_FLOW_SEQUENCE_STYLE // The flow sequence style.
94)
95
96type yaml_mapping_style_t yaml_style_t
97
98// Mapping styles.
99const (
100 // Let the emitter choose the style.
101 yaml_ANY_MAPPING_STYLE yaml_mapping_style_t = iota
102
103 yaml_BLOCK_MAPPING_STYLE // The block mapping style.
104 yaml_FLOW_MAPPING_STYLE // The flow mapping style.
105)
106
107// Tokens
108
109type yaml_token_type_t int
110
111// Token types.
112const (
113 // An empty token.
114 yaml_NO_TOKEN yaml_token_type_t = iota
115
116 yaml_STREAM_START_TOKEN // A STREAM-START token.
117 yaml_STREAM_END_TOKEN // A STREAM-END token.
118
119 yaml_VERSION_DIRECTIVE_TOKEN // A VERSION-DIRECTIVE token.
120 yaml_TAG_DIRECTIVE_TOKEN // A TAG-DIRECTIVE token.
121 yaml_DOCUMENT_START_TOKEN // A DOCUMENT-START token.
122 yaml_DOCUMENT_END_TOKEN // A DOCUMENT-END token.
123
124 yaml_BLOCK_SEQUENCE_START_TOKEN // A BLOCK-SEQUENCE-START token.
125 yaml_BLOCK_MAPPING_START_TOKEN // A BLOCK-SEQUENCE-END token.
126 yaml_BLOCK_END_TOKEN // A BLOCK-END token.
127
128 yaml_FLOW_SEQUENCE_START_TOKEN // A FLOW-SEQUENCE-START token.
129 yaml_FLOW_SEQUENCE_END_TOKEN // A FLOW-SEQUENCE-END token.
130 yaml_FLOW_MAPPING_START_TOKEN // A FLOW-MAPPING-START token.
131 yaml_FLOW_MAPPING_END_TOKEN // A FLOW-MAPPING-END token.
132
133 yaml_BLOCK_ENTRY_TOKEN // A BLOCK-ENTRY token.
134 yaml_FLOW_ENTRY_TOKEN // A FLOW-ENTRY token.
135 yaml_KEY_TOKEN // A KEY token.
136 yaml_VALUE_TOKEN // A VALUE token.
137
138 yaml_ALIAS_TOKEN // An ALIAS token.
139 yaml_ANCHOR_TOKEN // An ANCHOR token.
140 yaml_TAG_TOKEN // A TAG token.
141 yaml_SCALAR_TOKEN // A SCALAR token.
142)
143
144func (tt yaml_token_type_t) String() string {
145 switch tt {
146 case yaml_NO_TOKEN:
147 return "yaml_NO_TOKEN"
148 case yaml_STREAM_START_TOKEN:
149 return "yaml_STREAM_START_TOKEN"
150 case yaml_STREAM_END_TOKEN:
151 return "yaml_STREAM_END_TOKEN"
152 case yaml_VERSION_DIRECTIVE_TOKEN:
153 return "yaml_VERSION_DIRECTIVE_TOKEN"
154 case yaml_TAG_DIRECTIVE_TOKEN:
155 return "yaml_TAG_DIRECTIVE_TOKEN"
156 case yaml_DOCUMENT_START_TOKEN:
157 return "yaml_DOCUMENT_START_TOKEN"
158 case yaml_DOCUMENT_END_TOKEN:
159 return "yaml_DOCUMENT_END_TOKEN"
160 case yaml_BLOCK_SEQUENCE_START_TOKEN:
161 return "yaml_BLOCK_SEQUENCE_START_TOKEN"
162 case yaml_BLOCK_MAPPING_START_TOKEN:
163 return "yaml_BLOCK_MAPPING_START_TOKEN"
164 case yaml_BLOCK_END_TOKEN:
165 return "yaml_BLOCK_END_TOKEN"
166 case yaml_FLOW_SEQUENCE_START_TOKEN:
167 return "yaml_FLOW_SEQUENCE_START_TOKEN"
168 case yaml_FLOW_SEQUENCE_END_TOKEN:
169 return "yaml_FLOW_SEQUENCE_END_TOKEN"
170 case yaml_FLOW_MAPPING_START_TOKEN:
171 return "yaml_FLOW_MAPPING_START_TOKEN"
172 case yaml_FLOW_MAPPING_END_TOKEN:
173 return "yaml_FLOW_MAPPING_END_TOKEN"
174 case yaml_BLOCK_ENTRY_TOKEN:
175 return "yaml_BLOCK_ENTRY_TOKEN"
176 case yaml_FLOW_ENTRY_TOKEN:
177 return "yaml_FLOW_ENTRY_TOKEN"
178 case yaml_KEY_TOKEN:
179 return "yaml_KEY_TOKEN"
180 case yaml_VALUE_TOKEN:
181 return "yaml_VALUE_TOKEN"
182 case yaml_ALIAS_TOKEN:
183 return "yaml_ALIAS_TOKEN"
184 case yaml_ANCHOR_TOKEN:
185 return "yaml_ANCHOR_TOKEN"
186 case yaml_TAG_TOKEN:
187 return "yaml_TAG_TOKEN"
188 case yaml_SCALAR_TOKEN:
189 return "yaml_SCALAR_TOKEN"
190 }
191 return "<unknown token>"
192}
193
194// The token structure.
195type yaml_token_t struct {
196 // The token type.
197 typ yaml_token_type_t
198
199 // The start/end of the token.
200 start_mark, end_mark yaml_mark_t
201
202 // The stream encoding (for yaml_STREAM_START_TOKEN).
203 encoding yaml_encoding_t
204
205 // The alias/anchor/scalar value or tag/tag directive handle
206 // (for yaml_ALIAS_TOKEN, yaml_ANCHOR_TOKEN, yaml_SCALAR_TOKEN, yaml_TAG_TOKEN, yaml_TAG_DIRECTIVE_TOKEN).
207 value []byte
208
209 // The tag suffix (for yaml_TAG_TOKEN).
210 suffix []byte
211
212 // The tag directive prefix (for yaml_TAG_DIRECTIVE_TOKEN).
213 prefix []byte
214
215 // The scalar style (for yaml_SCALAR_TOKEN).
216 style yaml_scalar_style_t
217
218 // The version directive major/minor (for yaml_VERSION_DIRECTIVE_TOKEN).
219 major, minor int8
220}
221
222// Events
223
224type yaml_event_type_t int8
225
226// Event types.
227const (
228 // An empty event.
229 yaml_NO_EVENT yaml_event_type_t = iota
230
231 yaml_STREAM_START_EVENT // A STREAM-START event.
232 yaml_STREAM_END_EVENT // A STREAM-END event.
233 yaml_DOCUMENT_START_EVENT // A DOCUMENT-START event.
234 yaml_DOCUMENT_END_EVENT // A DOCUMENT-END event.
235 yaml_ALIAS_EVENT // An ALIAS event.
236 yaml_SCALAR_EVENT // A SCALAR event.
237 yaml_SEQUENCE_START_EVENT // A SEQUENCE-START event.
238 yaml_SEQUENCE_END_EVENT // A SEQUENCE-END event.
239 yaml_MAPPING_START_EVENT // A MAPPING-START event.
240 yaml_MAPPING_END_EVENT // A MAPPING-END event.
241)
242
243var eventStrings = []string{
244 yaml_NO_EVENT: "none",
245 yaml_STREAM_START_EVENT: "stream start",
246 yaml_STREAM_END_EVENT: "stream end",
247 yaml_DOCUMENT_START_EVENT: "document start",
248 yaml_DOCUMENT_END_EVENT: "document end",
249 yaml_ALIAS_EVENT: "alias",
250 yaml_SCALAR_EVENT: "scalar",
251 yaml_SEQUENCE_START_EVENT: "sequence start",
252 yaml_SEQUENCE_END_EVENT: "sequence end",
253 yaml_MAPPING_START_EVENT: "mapping start",
254 yaml_MAPPING_END_EVENT: "mapping end",
255}
256
257func (e yaml_event_type_t) String() string {
258 if e < 0 || int(e) >= len(eventStrings) {
259 return fmt.Sprintf("unknown event %d", e)
260 }
261 return eventStrings[e]
262}
263
264// The event structure.
265type yaml_event_t struct {
266
267 // The event type.
268 typ yaml_event_type_t
269
270 // The start and end of the event.
271 start_mark, end_mark yaml_mark_t
272
273 // The document encoding (for yaml_STREAM_START_EVENT).
274 encoding yaml_encoding_t
275
276 // The version directive (for yaml_DOCUMENT_START_EVENT).
277 version_directive *yaml_version_directive_t
278
279 // The list of tag directives (for yaml_DOCUMENT_START_EVENT).
280 tag_directives []yaml_tag_directive_t
281
282 // The anchor (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_ALIAS_EVENT).
283 anchor []byte
284
285 // The tag (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT).
286 tag []byte
287
288 // The scalar value (for yaml_SCALAR_EVENT).
289 value []byte
290
291 // Is the document start/end indicator implicit, or the tag optional?
292 // (for yaml_DOCUMENT_START_EVENT, yaml_DOCUMENT_END_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_SCALAR_EVENT).
293 implicit bool
294
295 // Is the tag optional for any non-plain style? (for yaml_SCALAR_EVENT).
296 quoted_implicit bool
297
298 // The style (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT).
299 style yaml_style_t
300}
301
302func (e *yaml_event_t) scalar_style() yaml_scalar_style_t { return yaml_scalar_style_t(e.style) }
303func (e *yaml_event_t) sequence_style() yaml_sequence_style_t { return yaml_sequence_style_t(e.style) }
304func (e *yaml_event_t) mapping_style() yaml_mapping_style_t { return yaml_mapping_style_t(e.style) }
305
306// Nodes
307
308const (
309 yaml_NULL_TAG = "tag:yaml.org,2002:null" // The tag !!null with the only possible value: null.
310 yaml_BOOL_TAG = "tag:yaml.org,2002:bool" // The tag !!bool with the values: true and false.
311 yaml_STR_TAG = "tag:yaml.org,2002:str" // The tag !!str for string values.
312 yaml_INT_TAG = "tag:yaml.org,2002:int" // The tag !!int for integer values.
313 yaml_FLOAT_TAG = "tag:yaml.org,2002:float" // The tag !!float for float values.
314 yaml_TIMESTAMP_TAG = "tag:yaml.org,2002:timestamp" // The tag !!timestamp for date and time values.
315
316 yaml_SEQ_TAG = "tag:yaml.org,2002:seq" // The tag !!seq is used to denote sequences.
317 yaml_MAP_TAG = "tag:yaml.org,2002:map" // The tag !!map is used to denote mapping.
318
319 // Not in original libyaml.
320 yaml_BINARY_TAG = "tag:yaml.org,2002:binary"
321 yaml_MERGE_TAG = "tag:yaml.org,2002:merge"
322
323 yaml_DEFAULT_SCALAR_TAG = yaml_STR_TAG // The default scalar tag is !!str.
324 yaml_DEFAULT_SEQUENCE_TAG = yaml_SEQ_TAG // The default sequence tag is !!seq.
325 yaml_DEFAULT_MAPPING_TAG = yaml_MAP_TAG // The default mapping tag is !!map.
326)
327
328type yaml_node_type_t int
329
330// Node types.
331const (
332 // An empty node.
333 yaml_NO_NODE yaml_node_type_t = iota
334
335 yaml_SCALAR_NODE // A scalar node.
336 yaml_SEQUENCE_NODE // A sequence node.
337 yaml_MAPPING_NODE // A mapping node.
338)
339
340// An element of a sequence node.
341type yaml_node_item_t int
342
343// An element of a mapping node.
344type yaml_node_pair_t struct {
345 key int // The key of the element.
346 value int // The value of the element.
347}
348
349// The node structure.
350type yaml_node_t struct {
351 typ yaml_node_type_t // The node type.
352 tag []byte // The node tag.
353
354 // The node data.
355
356 // The scalar parameters (for yaml_SCALAR_NODE).
357 scalar struct {
358 value []byte // The scalar value.
359 length int // The length of the scalar value.
360 style yaml_scalar_style_t // The scalar style.
361 }
362
363 // The sequence parameters (for YAML_SEQUENCE_NODE).
364 sequence struct {
365 items_data []yaml_node_item_t // The stack of sequence items.
366 style yaml_sequence_style_t // The sequence style.
367 }
368
369 // The mapping parameters (for yaml_MAPPING_NODE).
370 mapping struct {
371 pairs_data []yaml_node_pair_t // The stack of mapping pairs (key, value).
372 pairs_start *yaml_node_pair_t // The beginning of the stack.
373 pairs_end *yaml_node_pair_t // The end of the stack.
374 pairs_top *yaml_node_pair_t // The top of the stack.
375 style yaml_mapping_style_t // The mapping style.
376 }
377
378 start_mark yaml_mark_t // The beginning of the node.
379 end_mark yaml_mark_t // The end of the node.
380
381}
382
383// The document structure.
384type yaml_document_t struct {
385
386 // The document nodes.
387 nodes []yaml_node_t
388
389 // The version directive.
390 version_directive *yaml_version_directive_t
391
392 // The list of tag directives.
393 tag_directives_data []yaml_tag_directive_t
394 tag_directives_start int // The beginning of the tag directives list.
395 tag_directives_end int // The end of the tag directives list.
396
397 start_implicit int // Is the document start indicator implicit?
398 end_implicit int // Is the document end indicator implicit?
399
400 // The start/end of the document.
401 start_mark, end_mark yaml_mark_t
402}
403
404// The prototype of a read handler.
405//
406// The read handler is called when the parser needs to read more bytes from the
407// source. The handler should write not more than size bytes to the buffer.
408// The number of written bytes should be set to the size_read variable.
409//
410// [in,out] data A pointer to an application data specified by
411// yaml_parser_set_input().
412// [out] buffer The buffer to write the data from the source.
413// [in] size The size of the buffer.
414// [out] size_read The actual number of bytes read from the source.
415//
416// On success, the handler should return 1. If the handler failed,
417// the returned value should be 0. On EOF, the handler should set the
418// size_read to 0 and return 1.
419type yaml_read_handler_t func(parser *yaml_parser_t, buffer []byte) (n int, err error)
420
421// This structure holds information about a potential simple key.
422type yaml_simple_key_t struct {
423 possible bool // Is a simple key possible?
424 required bool // Is a simple key required?
425 token_number int // The number of the token.
426 mark yaml_mark_t // The position mark.
427}
428
429// The states of the parser.
430type yaml_parser_state_t int
431
432const (
433 yaml_PARSE_STREAM_START_STATE yaml_parser_state_t = iota
434
435 yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE // Expect the beginning of an implicit document.
436 yaml_PARSE_DOCUMENT_START_STATE // Expect DOCUMENT-START.
437 yaml_PARSE_DOCUMENT_CONTENT_STATE // Expect the content of a document.
438 yaml_PARSE_DOCUMENT_END_STATE // Expect DOCUMENT-END.
439 yaml_PARSE_BLOCK_NODE_STATE // Expect a block node.
440 yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE // Expect a block node or indentless sequence.
441 yaml_PARSE_FLOW_NODE_STATE // Expect a flow node.
442 yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE // Expect the first entry of a block sequence.
443 yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE // Expect an entry of a block sequence.
444 yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE // Expect an entry of an indentless sequence.
445 yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE // Expect the first key of a block mapping.
446 yaml_PARSE_BLOCK_MAPPING_KEY_STATE // Expect a block mapping key.
447 yaml_PARSE_BLOCK_MAPPING_VALUE_STATE // Expect a block mapping value.
448 yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE // Expect the first entry of a flow sequence.
449 yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE // Expect an entry of a flow sequence.
450 yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE // Expect a key of an ordered mapping.
451 yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE // Expect a value of an ordered mapping.
452 yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE // Expect the and of an ordered mapping entry.
453 yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping.
454 yaml_PARSE_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping.
455 yaml_PARSE_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping.
456 yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE // Expect an empty value of a flow mapping.
457 yaml_PARSE_END_STATE // Expect nothing.
458)
459
460func (ps yaml_parser_state_t) String() string {
461 switch ps {
462 case yaml_PARSE_STREAM_START_STATE:
463 return "yaml_PARSE_STREAM_START_STATE"
464 case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE:
465 return "yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE"
466 case yaml_PARSE_DOCUMENT_START_STATE:
467 return "yaml_PARSE_DOCUMENT_START_STATE"
468 case yaml_PARSE_DOCUMENT_CONTENT_STATE:
469 return "yaml_PARSE_DOCUMENT_CONTENT_STATE"
470 case yaml_PARSE_DOCUMENT_END_STATE:
471 return "yaml_PARSE_DOCUMENT_END_STATE"
472 case yaml_PARSE_BLOCK_NODE_STATE:
473 return "yaml_PARSE_BLOCK_NODE_STATE"
474 case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE:
475 return "yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE"
476 case yaml_PARSE_FLOW_NODE_STATE:
477 return "yaml_PARSE_FLOW_NODE_STATE"
478 case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE:
479 return "yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE"
480 case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE:
481 return "yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE"
482 case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE:
483 return "yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE"
484 case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE:
485 return "yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE"
486 case yaml_PARSE_BLOCK_MAPPING_KEY_STATE:
487 return "yaml_PARSE_BLOCK_MAPPING_KEY_STATE"
488 case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE:
489 return "yaml_PARSE_BLOCK_MAPPING_VALUE_STATE"
490 case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE:
491 return "yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE"
492 case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE:
493 return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE"
494 case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE:
495 return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE"
496 case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE:
497 return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE"
498 case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE:
499 return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE"
500 case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE:
501 return "yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE"
502 case yaml_PARSE_FLOW_MAPPING_KEY_STATE:
503 return "yaml_PARSE_FLOW_MAPPING_KEY_STATE"
504 case yaml_PARSE_FLOW_MAPPING_VALUE_STATE:
505 return "yaml_PARSE_FLOW_MAPPING_VALUE_STATE"
506 case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE:
507 return "yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE"
508 case yaml_PARSE_END_STATE:
509 return "yaml_PARSE_END_STATE"
510 }
511 return "<unknown parser state>"
512}
513
514// This structure holds aliases data.
515type yaml_alias_data_t struct {
516 anchor []byte // The anchor.
517 index int // The node id.
518 mark yaml_mark_t // The anchor mark.
519}
520
521// The parser structure.
522//
523// All members are internal. Manage the structure using the
524// yaml_parser_ family of functions.
525type yaml_parser_t struct {
526
527 // Error handling
528
529 error yaml_error_type_t // Error type.
530
531 problem string // Error description.
532
533 // The byte about which the problem occurred.
534 problem_offset int
535 problem_value int
536 problem_mark yaml_mark_t
537
538 // The error context.
539 context string
540 context_mark yaml_mark_t
541
542 // Reader stuff
543
544 read_handler yaml_read_handler_t // Read handler.
545
546 input_reader io.Reader // File input data.
547 input []byte // String input data.
548 input_pos int
549
550 eof bool // EOF flag
551
552 buffer []byte // The working buffer.
553 buffer_pos int // The current position of the buffer.
554
555 unread int // The number of unread characters in the buffer.
556
557 raw_buffer []byte // The raw buffer.
558 raw_buffer_pos int // The current position of the buffer.
559
560 encoding yaml_encoding_t // The input encoding.
561
562 offset int // The offset of the current position (in bytes).
563 mark yaml_mark_t // The mark of the current position.
564
565 // Scanner stuff
566
567 stream_start_produced bool // Have we started to scan the input stream?
568 stream_end_produced bool // Have we reached the end of the input stream?
569
570 flow_level int // The number of unclosed '[' and '{' indicators.
571
572 tokens []yaml_token_t // The tokens queue.
573 tokens_head int // The head of the tokens queue.
574 tokens_parsed int // The number of tokens fetched from the queue.
575 token_available bool // Does the tokens queue contain a token ready for dequeueing.
576
577 indent int // The current indentation level.
578 indents []int // The indentation levels stack.
579
580 simple_key_allowed bool // May a simple key occur at the current position?
581 simple_keys []yaml_simple_key_t // The stack of simple keys.
582 simple_keys_by_tok map[int]int // possible simple_key indexes indexed by token_number
583
584 // Parser stuff
585
586 state yaml_parser_state_t // The current parser state.
587 states []yaml_parser_state_t // The parser states stack.
588 marks []yaml_mark_t // The stack of marks.
589 tag_directives []yaml_tag_directive_t // The list of TAG directives.
590
591 // Dumper stuff
592
593 aliases []yaml_alias_data_t // The alias data.
594
595 document *yaml_document_t // The currently parsed document.
596}
597
598// Emitter Definitions
599
600// The prototype of a write handler.
601//
602// The write handler is called when the emitter needs to flush the accumulated
603// characters to the output. The handler should write @a size bytes of the
604// @a buffer to the output.
605//
606// @param[in,out] data A pointer to an application data specified by
607// yaml_emitter_set_output().
608// @param[in] buffer The buffer with bytes to be written.
609// @param[in] size The size of the buffer.
610//
611// @returns On success, the handler should return @c 1. If the handler failed,
612// the returned value should be @c 0.
613//
614type yaml_write_handler_t func(emitter *yaml_emitter_t, buffer []byte) error
615
616type yaml_emitter_state_t int
617
618// The emitter states.
619const (
620 // Expect STREAM-START.
621 yaml_EMIT_STREAM_START_STATE yaml_emitter_state_t = iota
622
623 yaml_EMIT_FIRST_DOCUMENT_START_STATE // Expect the first DOCUMENT-START or STREAM-END.
624 yaml_EMIT_DOCUMENT_START_STATE // Expect DOCUMENT-START or STREAM-END.
625 yaml_EMIT_DOCUMENT_CONTENT_STATE // Expect the content of a document.
626 yaml_EMIT_DOCUMENT_END_STATE // Expect DOCUMENT-END.
627 yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a flow sequence.
628 yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE // Expect an item of a flow sequence.
629 yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping.
630 yaml_EMIT_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping.
631 yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a flow mapping.
632 yaml_EMIT_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping.
633 yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a block sequence.
634 yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE // Expect an item of a block sequence.
635 yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE // Expect the first key of a block mapping.
636 yaml_EMIT_BLOCK_MAPPING_KEY_STATE // Expect the key of a block mapping.
637 yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a block mapping.
638 yaml_EMIT_BLOCK_MAPPING_VALUE_STATE // Expect a value of a block mapping.
639 yaml_EMIT_END_STATE // Expect nothing.
640)
641
642// The emitter structure.
643//
644// All members are internal. Manage the structure using the @c yaml_emitter_
645// family of functions.
646type yaml_emitter_t struct {
647
648 // Error handling
649
650 error yaml_error_type_t // Error type.
651 problem string // Error description.
652
653 // Writer stuff
654
655 write_handler yaml_write_handler_t // Write handler.
656
657 output_buffer *[]byte // String output data.
658 output_writer io.Writer // File output data.
659
660 buffer []byte // The working buffer.
661 buffer_pos int // The current position of the buffer.
662
663 raw_buffer []byte // The raw buffer.
664 raw_buffer_pos int // The current position of the buffer.
665
666 encoding yaml_encoding_t // The stream encoding.
667
668 // Emitter stuff
669
670 canonical bool // If the output is in the canonical style?
671 best_indent int // The number of indentation spaces.
672 best_width int // The preferred width of the output lines.
673 unicode bool // Allow unescaped non-ASCII characters?
674 line_break yaml_break_t // The preferred line break.
675
676 state yaml_emitter_state_t // The current emitter state.
677 states []yaml_emitter_state_t // The stack of states.
678
679 events []yaml_event_t // The event queue.
680 events_head int // The head of the event queue.
681
682 indents []int // The stack of indentation levels.
683
684 tag_directives []yaml_tag_directive_t // The list of tag directives.
685
686 indent int // The current indentation level.
687
688 flow_level int // The current flow level.
689
690 root_context bool // Is it the document root context?
691 sequence_context bool // Is it a sequence context?
692 mapping_context bool // Is it a mapping context?
693 simple_key_context bool // Is it a simple mapping key context?
694
695 line int // The current line.
696 column int // The current column.
697 whitespace bool // If the last character was a whitespace?
698 indention bool // If the last character was an indentation character (' ', '-', '?', ':')?
699 open_ended bool // If an explicit document end is required?
700
701 // Anchor analysis.
702 anchor_data struct {
703 anchor []byte // The anchor value.
704 alias bool // Is it an alias?
705 }
706
707 // Tag analysis.
708 tag_data struct {
709 handle []byte // The tag handle.
710 suffix []byte // The tag suffix.
711 }
712
713 // Scalar analysis.
714 scalar_data struct {
715 value []byte // The scalar value.
716 multiline bool // Does the scalar contain line breaks?
717 flow_plain_allowed bool // Can the scalar be expessed in the flow plain style?
718 block_plain_allowed bool // Can the scalar be expressed in the block plain style?
719 single_quoted_allowed bool // Can the scalar be expressed in the single quoted style?
720 block_allowed bool // Can the scalar be expressed in the literal or folded styles?
721 style yaml_scalar_style_t // The output style.
722 }
723
724 // Dumper stuff
725
726 opened bool // If the stream was already opened?
727 closed bool // If the stream was already closed?
728
729 // The information associated with the document nodes.
730 anchors *struct {
731 references int // The number of references.
732 anchor int // The anchor id.
733 serialized bool // If the node has been emitted?
734 }
735
736 last_anchor_id int // The last assigned anchor id.
737
738 document *yaml_document_t // The currently emitted document.
739}
diff --git a/vendor/gopkg.in/yaml.v2/yamlprivateh.go b/vendor/gopkg.in/yaml.v2/yamlprivateh.go
new file mode 100644
index 0000000..8110ce3
--- /dev/null
+++ b/vendor/gopkg.in/yaml.v2/yamlprivateh.go
@@ -0,0 +1,173 @@
1package yaml
2
3const (
4 // The size of the input raw buffer.
5 input_raw_buffer_size = 512
6
7 // The size of the input buffer.
8 // It should be possible to decode the whole raw buffer.
9 input_buffer_size = input_raw_buffer_size * 3
10
11 // The size of the output buffer.
12 output_buffer_size = 128
13
14 // The size of the output raw buffer.
15 // It should be possible to encode the whole output buffer.
16 output_raw_buffer_size = (output_buffer_size*2 + 2)
17
18 // The size of other stacks and queues.
19 initial_stack_size = 16
20 initial_queue_size = 16
21 initial_string_size = 16
22)
23
24// Check if the character at the specified position is an alphabetical
25// character, a digit, '_', or '-'.
26func is_alpha(b []byte, i int) bool {
27 return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'Z' || b[i] >= 'a' && b[i] <= 'z' || b[i] == '_' || b[i] == '-'
28}
29
30// Check if the character at the specified position is a digit.
31func is_digit(b []byte, i int) bool {
32 return b[i] >= '0' && b[i] <= '9'
33}
34
35// Get the value of a digit.
36func as_digit(b []byte, i int) int {
37 return int(b[i]) - '0'
38}
39
40// Check if the character at the specified position is a hex-digit.
41func is_hex(b []byte, i int) bool {
42 return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'F' || b[i] >= 'a' && b[i] <= 'f'
43}
44
45// Get the value of a hex-digit.
46func as_hex(b []byte, i int) int {
47 bi := b[i]
48 if bi >= 'A' && bi <= 'F' {
49 return int(bi) - 'A' + 10
50 }
51 if bi >= 'a' && bi <= 'f' {
52 return int(bi) - 'a' + 10
53 }
54 return int(bi) - '0'
55}
56
57// Check if the character is ASCII.
58func is_ascii(b []byte, i int) bool {
59 return b[i] <= 0x7F
60}
61
62// Check if the character at the start of the buffer can be printed unescaped.
63func is_printable(b []byte, i int) bool {
64 return ((b[i] == 0x0A) || // . == #x0A
65 (b[i] >= 0x20 && b[i] <= 0x7E) || // #x20 <= . <= #x7E
66 (b[i] == 0xC2 && b[i+1] >= 0xA0) || // #0xA0 <= . <= #xD7FF
67 (b[i] > 0xC2 && b[i] < 0xED) ||
68 (b[i] == 0xED && b[i+1] < 0xA0) ||
69 (b[i] == 0xEE) ||
70 (b[i] == 0xEF && // #xE000 <= . <= #xFFFD
71 !(b[i+1] == 0xBB && b[i+2] == 0xBF) && // && . != #xFEFF
72 !(b[i+1] == 0xBF && (b[i+2] == 0xBE || b[i+2] == 0xBF))))
73}
74
75// Check if the character at the specified position is NUL.
76func is_z(b []byte, i int) bool {
77 return b[i] == 0x00
78}
79
80// Check if the beginning of the buffer is a BOM.
81func is_bom(b []byte, i int) bool {
82 return b[0] == 0xEF && b[1] == 0xBB && b[2] == 0xBF
83}
84
85// Check if the character at the specified position is space.
86func is_space(b []byte, i int) bool {
87 return b[i] == ' '
88}
89
90// Check if the character at the specified position is tab.
91func is_tab(b []byte, i int) bool {
92 return b[i] == '\t'
93}
94
95// Check if the character at the specified position is blank (space or tab).
96func is_blank(b []byte, i int) bool {
97 //return is_space(b, i) || is_tab(b, i)
98 return b[i] == ' ' || b[i] == '\t'
99}
100
101// Check if the character at the specified position is a line break.
102func is_break(b []byte, i int) bool {
103 return (b[i] == '\r' || // CR (#xD)
104 b[i] == '\n' || // LF (#xA)
105 b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85)
106 b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028)
107 b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9) // PS (#x2029)
108}
109
110func is_crlf(b []byte, i int) bool {
111 return b[i] == '\r' && b[i+1] == '\n'
112}
113
114// Check if the character is a line break or NUL.
115func is_breakz(b []byte, i int) bool {
116 //return is_break(b, i) || is_z(b, i)
117 return ( // is_break:
118 b[i] == '\r' || // CR (#xD)
119 b[i] == '\n' || // LF (#xA)
120 b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85)
121 b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028)
122 b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029)
123 // is_z:
124 b[i] == 0)
125}
126
127// Check if the character is a line break, space, or NUL.
128func is_spacez(b []byte, i int) bool {
129 //return is_space(b, i) || is_breakz(b, i)
130 return ( // is_space:
131 b[i] == ' ' ||
132 // is_breakz:
133 b[i] == '\r' || // CR (#xD)
134 b[i] == '\n' || // LF (#xA)
135 b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85)
136 b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028)
137 b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029)
138 b[i] == 0)
139}
140
141// Check if the character is a line break, space, tab, or NUL.
142func is_blankz(b []byte, i int) bool {
143 //return is_blank(b, i) || is_breakz(b, i)
144 return ( // is_blank:
145 b[i] == ' ' || b[i] == '\t' ||
146 // is_breakz:
147 b[i] == '\r' || // CR (#xD)
148 b[i] == '\n' || // LF (#xA)
149 b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85)
150 b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028)
151 b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029)
152 b[i] == 0)
153}
154
155// Determine the width of the character.
156func width(b byte) int {
157 // Don't replace these by a switch without first
158 // confirming that it is being inlined.
159 if b&0x80 == 0x00 {
160 return 1
161 }
162 if b&0xE0 == 0xC0 {
163 return 2
164 }
165 if b&0xF0 == 0xE0 {
166 return 3
167 }
168 if b&0xF8 == 0xF0 {
169 return 4
170 }
171 return 0
172
173}