diff --git a/.github/workflows/go.yaml b/.github/workflows/go.yaml new file mode 100644 index 00000000..d2bb00b0 --- /dev/null +++ b/.github/workflows/go.yaml @@ -0,0 +1,61 @@ +--- +name: Go +on: [push, pull_request] +jobs: + test: + name: Test + runs-on: ubuntu-20.04 + strategy: + fail-fast: false + matrix: + go: + - "1.5" + - "1.6" + - "1.7" + - "1.8" + - "1.9" + - "1.10" + - "1.11" + - "1.12" + - "1.13" + - "1.14" + - "1.15" + - "1.16.0-beta1" + - "tip" + env: + GOPATH: ${{ github.workspace }}/go + steps: + - name: Check out code into the Go module directory + uses: actions/checkout@v2 + with: + path: ${{ github.workspace }}/go/src/gopkg.in/yaml.v3 + - name: Set up Go ${{ matrix.go }} + if: matrix.go != 'tip' + uses: actions/setup-go@v2 + with: + go-version: ${{ matrix.go }} + stable: false + - name: Set up Go ${{ matrix.go }} + if: matrix.go == 'tip' + run: | + export GOROOT_BOOTSTRAP=`go env GOROOT` + export GOROOT=$HOME/gotip + mkdir $HOME/gotip + cd $HOME/gotip + + curl -s 'https://go.googlesource.com/go/+/refs/heads/master?format=JSON' | awk '/"commit"/{print substr($2,2,40);exit}' >HEAD + awk '{printf("gotip-%s",substr($0,0,7))}'
VERSION + + curl -s -o go.tar.gz https://go.googlesource.com/go/+archive/`cat HEAD`.tar.gz + tar xfz go.tar.gz + + cd src + bash make.bash + + echo "GOROOT=$GOROOT" >> $GITHUB_ENV + echo "$GOROOT/bin" >> $GITHUB_PATH + - run: go version + - run: go get -t ./... + working-directory: ${{ github.workspace }}/go/src/gopkg.in/yaml.v3 + - run: go test . + working-directory: ${{ github.workspace }}/go/src/gopkg.in/yaml.v3 diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 9f556934..00000000 --- a/.travis.yml +++ /dev/null @@ -1,12 +0,0 @@ -language: go - -go: - - 1.4 - - 1.5 - - 1.6 - - 1.7 - - 1.8 - - 1.9 - - tip - -go_import_path: gopkg.in/yaml.v2 diff --git a/LICENSE b/LICENSE index 8dada3ed..2683e4bb 100644 --- a/LICENSE +++ b/LICENSE @@ -1,201 +1,50 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION +This project is covered by two different licenses: MIT and Apache. - 1. Definitions. +#### MIT License #### - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. +The following files were ported to Go from C files of libyaml, and thus +are still covered by their original MIT license, with the additional +copyright staring in 2011 when the project was ported over: - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. + apic.go emitterc.go parserc.go readerc.go scannerc.go + writerc.go yamlh.go yamlprivateh.go - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. +Copyright (c) 2006-2010 Kirill Simonov +Copyright (c) 2006-2011 Kirill Simonov - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). +### Apache License ### - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. +All the remaining project files are covered by the Apache license: - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." +Copyright (c) 2011-2019 Canonical Ltd - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. + http://www.apache.org/licenses/LICENSE-2.0 - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright {yyyy} {name of copyright owner} - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/LICENSE.libyaml b/LICENSE.libyaml deleted file mode 100644 index 8da58fbf..00000000 --- a/LICENSE.libyaml +++ /dev/null @@ -1,31 +0,0 @@ -The following files were ported to Go from C files of libyaml, and thus -are still covered by their original copyright and license: - - apic.go - emitterc.go - parserc.go - readerc.go - scannerc.go - writerc.go - yamlh.go - yamlprivateh.go - -Copyright (c) 2006 Kirill Simonov - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/README.md b/README.md index b50c6e87..08eb1bab 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,23 @@ C library to parse and generate YAML data quickly and reliably. Compatibility ------------- -The yaml package supports most of YAML 1.1 and 1.2, including support for +The yaml package supports most of YAML 1.2, but preserves some behavior +from 1.1 for backwards compatibility. + +Specifically, as of v3 of the yaml package: + + - YAML 1.1 bools (_yes/no, on/off_) are supported as long as they are being + decoded into a typed bool value. Otherwise they behave as a string. Booleans + in YAML 1.2 are _true/false_ only. + - Octals encode and decode as _0777_ per YAML 1.1, rather than _0o777_ + as specified in YAML 1.2, because most parsers still use the old format. + Octals in the _0o777_ format are supported though, so new files work. + - Does not support base-60 floats. These are gone from YAML 1.2, and were + actually never supported by this package as it's clearly a poor choice. + +and offers backwards +compatibility with YAML 1.1 in some cases. +1.2, including support for anchors, tags, map merging, etc. Multi-document unmarshalling is not yet implemented, and base-60 floats from YAML 1.1 are purposefully not supported since they're a poor design and are gone in YAML 1.2. @@ -20,29 +36,30 @@ supported since they're a poor design and are gone in YAML 1.2. Installation and usage ---------------------- -The import path for the package is *gopkg.in/yaml.v2*. +The import path for the package is *gopkg.in/yaml.v3*. To install it, run: - go get gopkg.in/yaml.v2 + go get gopkg.in/yaml.v3 API documentation ----------------- If opened in a browser, the import path itself leads to the API documentation: - * [https://gopkg.in/yaml.v2](https://gopkg.in/yaml.v2) + - [https://gopkg.in/yaml.v3](https://gopkg.in/yaml.v3) API stability ------------- -The package API for yaml v2 will remain stable as described in [gopkg.in](https://gopkg.in). +The package API for yaml v3 will remain stable as described in [gopkg.in](https://gopkg.in). License ------- -The yaml package is licensed under the Apache License 2.0. Please see the LICENSE file for details. +The yaml package is licensed under the MIT and Apache License 2.0 licenses. +Please see the LICENSE file for details. Example @@ -55,7 +72,7 @@ import ( "fmt" "log" - "gopkg.in/yaml.v2" + "gopkg.in/yaml.v3" ) var data = ` diff --git a/apic.go b/apic.go index 1f7e87e6..ae7d049f 100644 --- a/apic.go +++ b/apic.go @@ -1,3 +1,25 @@ +// +// Copyright (c) 2011-2019 Canonical Ltd +// Copyright (c) 2006-2010 Kirill Simonov +// +// Permission is hereby granted, free of charge, to any person obtaining a copy of +// this software and associated documentation files (the "Software"), to deal in +// the Software without restriction, including without limitation the rights to +// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +// of the Software, and to permit persons to whom the Software is furnished to do +// so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + package yaml import ( @@ -86,6 +108,7 @@ func yaml_emitter_initialize(emitter *yaml_emitter_t) { raw_buffer: make([]byte, 0, output_raw_buffer_size), states: make([]yaml_emitter_state_t, 0, initial_stack_size), events: make([]yaml_event_t, 0, initial_queue_size), + best_width: -1, } } @@ -138,7 +161,7 @@ func yaml_emitter_set_canonical(emitter *yaml_emitter_t, canonical bool) { emitter.canonical = canonical } -//// Set the indentation increment. +// Set the indentation increment. func yaml_emitter_set_indent(emitter *yaml_emitter_t, indent int) { if indent < 2 || indent > 9 { indent = 2 @@ -288,29 +311,14 @@ func yaml_document_end_event_initialize(event *yaml_event_t, implicit bool) { } } -///* -// * Create ALIAS. -// */ -// -//YAML_DECLARE(int) -//yaml_alias_event_initialize(event *yaml_event_t, anchor *yaml_char_t) -//{ -// mark yaml_mark_t = { 0, 0, 0 } -// anchor_copy *yaml_char_t = NULL -// -// assert(event) // Non-NULL event object is expected. -// assert(anchor) // Non-NULL anchor is expected. -// -// if (!yaml_check_utf8(anchor, strlen((char *)anchor))) return 0 -// -// anchor_copy = yaml_strdup(anchor) -// if (!anchor_copy) -// return 0 -// -// ALIAS_EVENT_INIT(*event, anchor_copy, mark, mark) -// -// return 1 -//} +// Create ALIAS. +func yaml_alias_event_initialize(event *yaml_event_t, anchor []byte) bool { + *event = yaml_event_t{ + typ: yaml_ALIAS_EVENT, + anchor: anchor, + } + return true +} // Create SCALAR. func yaml_scalar_event_initialize(event *yaml_event_t, anchor, tag, value []byte, plain_implicit, quoted_implicit bool, style yaml_scalar_style_t) bool { diff --git a/decode.go b/decode.go index e4e56e28..0173b698 100644 --- a/decode.go +++ b/decode.go @@ -1,3 +1,18 @@ +// +// Copyright (c) 2011-2019 Canonical Ltd +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package yaml import ( @@ -11,34 +26,16 @@ import ( "time" ) -const ( - documentNode = 1 << iota - mappingNode - sequenceNode - scalarNode - aliasNode -) - -type node struct { - kind int - line, column int - tag string - // For an alias node, alias holds the resolved alias. - alias *node - value string - implicit bool - children []*node - anchors map[string]*node -} - // ---------------------------------------------------------------------------- // Parser, produces a node tree out of a libyaml event stream. type parser struct { parser yaml_parser_t event yaml_event_t - doc *node + doc *Node + anchors map[string]*Node doneInit bool + textless bool } func newParser(b []byte) *parser { @@ -66,6 +63,7 @@ func (p *parser) init() { if p.doneInit { return } + p.anchors = make(map[string]*Node) p.expect(yaml_STREAM_START_EVENT) p.doneInit = true } @@ -102,7 +100,10 @@ func (p *parser) peek() yaml_event_type_t { if p.event.typ != yaml_NO_EVENT { return p.event.typ } - if !yaml_parser_parse(&p.parser, &p.event) { + // It's curious choice from the underlying API to generally return a + // positive result on success, but on this case return true in an error + // scenario. This was the source of bugs in the past (issue #666). + if !yaml_parser_parse(&p.parser, &p.event) || p.parser.error != yaml_NO_ERROR { p.fail() } return p.event.typ @@ -111,14 +112,18 @@ func (p *parser) peek() yaml_event_type_t { func (p *parser) fail() { var where string var line int - if p.parser.problem_mark.line != 0 { + if p.parser.context_mark.line != 0 { + line = p.parser.context_mark.line + // Scanner errors don't iterate line before returning error + if p.parser.error == yaml_SCANNER_ERROR { + line++ + } + } else if p.parser.problem_mark.line != 0 { line = p.parser.problem_mark.line // Scanner errors don't iterate line before returning error if p.parser.error == yaml_SCANNER_ERROR { line++ } - } else if p.parser.context_mark.line != 0 { - line = p.parser.context_mark.line } if line != 0 { where = "line " + strconv.Itoa(line) + ": " @@ -132,13 +137,14 @@ func (p *parser) fail() { failf("%s%s", where, msg) } -func (p *parser) anchor(n *node, anchor []byte) { +func (p *parser) anchor(n *Node, anchor []byte) { if anchor != nil { - p.doc.anchors[string(anchor)] = n + n.Anchor = string(anchor) + p.anchors[n.Anchor] = n } } -func (p *parser) parse() *node { +func (p *parser) parse() *Node { p.init() switch p.peek() { case yaml_SCALAR_EVENT: @@ -154,67 +160,148 @@ func (p *parser) parse() *node { case yaml_STREAM_END_EVENT: // Happens when attempting to decode an empty buffer. return nil + case yaml_TAIL_COMMENT_EVENT: + panic("internal error: unexpected tail comment event (please report)") default: - panic("attempted to parse unknown event: " + p.event.typ.String()) + panic("internal error: attempted to parse unknown event (please report): " + p.event.typ.String()) } } -func (p *parser) node(kind int) *node { - return &node{ - kind: kind, - line: p.event.start_mark.line, - column: p.event.start_mark.column, +func (p *parser) node(kind Kind, defaultTag, tag, value string) *Node { + var style Style + if tag != "" && tag != "!" { + tag = shortTag(tag) + style = TaggedStyle + } else if defaultTag != "" { + tag = defaultTag + } else if kind == ScalarNode { + tag, _ = resolve("", value) + } + n := &Node{ + Kind: kind, + Tag: tag, + Value: value, + Style: style, } + if !p.textless { + n.Line = p.event.start_mark.line + 1 + n.Column = p.event.start_mark.column + 1 + n.HeadComment = string(p.event.head_comment) + n.LineComment = string(p.event.line_comment) + n.FootComment = string(p.event.foot_comment) + } + return n } -func (p *parser) document() *node { - n := p.node(documentNode) - n.anchors = make(map[string]*node) +func (p *parser) parseChild(parent *Node) *Node { + child := p.parse() + parent.Content = append(parent.Content, child) + return child +} + +func (p *parser) document() *Node { + n := p.node(DocumentNode, "", "", "") p.doc = n p.expect(yaml_DOCUMENT_START_EVENT) - n.children = append(n.children, p.parse()) + p.parseChild(n) + if p.peek() == yaml_DOCUMENT_END_EVENT { + n.FootComment = string(p.event.foot_comment) + } p.expect(yaml_DOCUMENT_END_EVENT) return n } -func (p *parser) alias() *node { - n := p.node(aliasNode) - n.value = string(p.event.anchor) - n.alias = p.doc.anchors[n.value] - if n.alias == nil { - failf("unknown anchor '%s' referenced", n.value) +func (p *parser) alias() *Node { + n := p.node(AliasNode, "", "", string(p.event.anchor)) + n.Alias = p.anchors[n.Value] + if n.Alias == nil { + failf("unknown anchor '%s' referenced", n.Value) } p.expect(yaml_ALIAS_EVENT) return n } -func (p *parser) scalar() *node { - n := p.node(scalarNode) - n.value = string(p.event.value) - n.tag = string(p.event.tag) - n.implicit = p.event.implicit +func (p *parser) scalar() *Node { + var parsedStyle = p.event.scalar_style() + var nodeStyle Style + switch { + case parsedStyle&yaml_DOUBLE_QUOTED_SCALAR_STYLE != 0: + nodeStyle = DoubleQuotedStyle + case parsedStyle&yaml_SINGLE_QUOTED_SCALAR_STYLE != 0: + nodeStyle = SingleQuotedStyle + case parsedStyle&yaml_LITERAL_SCALAR_STYLE != 0: + nodeStyle = LiteralStyle + case parsedStyle&yaml_FOLDED_SCALAR_STYLE != 0: + nodeStyle = FoldedStyle + } + var nodeValue = string(p.event.value) + var nodeTag = string(p.event.tag) + var defaultTag string + if nodeStyle == 0 { + if nodeValue == "<<" { + defaultTag = mergeTag + } + } else { + defaultTag = strTag + } + n := p.node(ScalarNode, defaultTag, nodeTag, nodeValue) + n.Style |= nodeStyle p.anchor(n, p.event.anchor) p.expect(yaml_SCALAR_EVENT) return n } -func (p *parser) sequence() *node { - n := p.node(sequenceNode) +func (p *parser) sequence() *Node { + n := p.node(SequenceNode, seqTag, string(p.event.tag), "") + if p.event.sequence_style()&yaml_FLOW_SEQUENCE_STYLE != 0 { + n.Style |= FlowStyle + } p.anchor(n, p.event.anchor) p.expect(yaml_SEQUENCE_START_EVENT) for p.peek() != yaml_SEQUENCE_END_EVENT { - n.children = append(n.children, p.parse()) + p.parseChild(n) } + n.LineComment = string(p.event.line_comment) + n.FootComment = string(p.event.foot_comment) p.expect(yaml_SEQUENCE_END_EVENT) return n } -func (p *parser) mapping() *node { - n := p.node(mappingNode) +func (p *parser) mapping() *Node { + n := p.node(MappingNode, mapTag, string(p.event.tag), "") + block := true + if p.event.mapping_style()&yaml_FLOW_MAPPING_STYLE != 0 { + block = false + n.Style |= FlowStyle + } p.anchor(n, p.event.anchor) p.expect(yaml_MAPPING_START_EVENT) for p.peek() != yaml_MAPPING_END_EVENT { - n.children = append(n.children, p.parse(), p.parse()) + k := p.parseChild(n) + if block && k.FootComment != "" { + // Must be a foot comment for the prior value when being dedented. + if len(n.Content) > 2 { + n.Content[len(n.Content)-3].FootComment = k.FootComment + k.FootComment = "" + } + } + v := p.parseChild(n) + if k.FootComment == "" && v.FootComment != "" { + k.FootComment = v.FootComment + v.FootComment = "" + } + if p.peek() == yaml_TAIL_COMMENT_EVENT { + if k.FootComment == "" { + k.FootComment = string(p.event.foot_comment) + } + p.expect(yaml_TAIL_COMMENT_EVENT) + } + } + n.LineComment = string(p.event.line_comment) + n.FootComment = string(p.event.foot_comment) + if n.Style&FlowStyle == 0 && n.FootComment != "" && len(n.Content) > 1 { + n.Content[len(n.Content)-2].FootComment = n.FootComment + n.FootComment = "" } p.expect(yaml_MAPPING_END_EVENT) return n @@ -224,44 +311,70 @@ func (p *parser) mapping() *node { // Decoder, unmarshals a node into a provided value. type decoder struct { - doc *node - aliases map[*node]bool - mapType reflect.Type + doc *Node + aliases map[*Node]bool terrors []string - strict bool + + stringMapType reflect.Type + generalMapType reflect.Type + + knownFields bool + uniqueKeys bool + decodeCount int + aliasCount int + aliasDepth int + + mergedFields map[interface{}]bool } var ( - mapItemType = reflect.TypeOf(MapItem{}) + nodeType = reflect.TypeOf(Node{}) durationType = reflect.TypeOf(time.Duration(0)) - defaultMapType = reflect.TypeOf(map[interface{}]interface{}{}) - ifaceType = defaultMapType.Elem() + stringMapType = reflect.TypeOf(map[string]interface{}{}) + generalMapType = reflect.TypeOf(map[interface{}]interface{}{}) + ifaceType = generalMapType.Elem() timeType = reflect.TypeOf(time.Time{}) ptrTimeType = reflect.TypeOf(&time.Time{}) ) -func newDecoder(strict bool) *decoder { - d := &decoder{mapType: defaultMapType, strict: strict} - d.aliases = make(map[*node]bool) +func newDecoder() *decoder { + d := &decoder{ + stringMapType: stringMapType, + generalMapType: generalMapType, + uniqueKeys: true, + } + d.aliases = make(map[*Node]bool) return d } -func (d *decoder) terror(n *node, tag string, out reflect.Value) { - if n.tag != "" { - tag = n.tag +func (d *decoder) terror(n *Node, tag string, out reflect.Value) { + if n.Tag != "" { + tag = n.Tag } - value := n.value - if tag != yaml_SEQ_TAG && tag != yaml_MAP_TAG { + value := n.Value + if tag != seqTag && tag != mapTag { if len(value) > 10 { value = " `" + value[:7] + "...`" } else { value = " `" + value + "`" } } - d.terrors = append(d.terrors, fmt.Sprintf("line %d: cannot unmarshal %s%s into %s", n.line+1, shortTag(tag), value, out.Type())) + d.terrors = append(d.terrors, fmt.Sprintf("line %d: cannot unmarshal %s%s into %s", n.Line, shortTag(tag), value, out.Type())) } -func (d *decoder) callUnmarshaler(n *node, u Unmarshaler) (good bool) { +func (d *decoder) callUnmarshaler(n *Node, u Unmarshaler) (good bool) { + err := u.UnmarshalYAML(n) + if e, ok := err.(*TypeError); ok { + d.terrors = append(d.terrors, e.Errors...) + return false + } + if err != nil { + fail(err) + } + return true +} + +func (d *decoder) callObsoleteUnmarshaler(n *Node, u obsoleteUnmarshaler) (good bool) { terrlen := len(d.terrors) err := u.UnmarshalYAML(func(v interface{}) (err error) { defer handleErr(&err) @@ -290,8 +403,8 @@ func (d *decoder) callUnmarshaler(n *node, u Unmarshaler) (good bool) { // its types unmarshalled appropriately. // // If n holds a null value, prepare returns before doing anything. -func (d *decoder) prepare(n *node, out reflect.Value) (newout reflect.Value, unmarshaled, good bool) { - if n.tag == yaml_NULL_TAG || n.kind == scalarNode && n.tag == "" && (n.value == "null" || n.value == "~" || n.value == "" && n.implicit) { +func (d *decoder) prepare(n *Node, out reflect.Value) (newout reflect.Value, unmarshaled, good bool) { + if n.ShortTag() == nullTag { return out, false, false } again := true @@ -305,55 +418,127 @@ func (d *decoder) prepare(n *node, out reflect.Value) (newout reflect.Value, unm again = true } if out.CanAddr() { - if u, ok := out.Addr().Interface().(Unmarshaler); ok { + outi := out.Addr().Interface() + if u, ok := outi.(Unmarshaler); ok { good = d.callUnmarshaler(n, u) return out, true, good } + if u, ok := outi.(obsoleteUnmarshaler); ok { + good = d.callObsoleteUnmarshaler(n, u) + return out, true, good + } } } return out, false, false } -func (d *decoder) unmarshal(n *node, out reflect.Value) (good bool) { - switch n.kind { - case documentNode: +func (d *decoder) fieldByIndex(n *Node, v reflect.Value, index []int) (field reflect.Value) { + if n.ShortTag() == nullTag { + return reflect.Value{} + } + for _, num := range index { + for { + if v.Kind() == reflect.Ptr { + if v.IsNil() { + v.Set(reflect.New(v.Type().Elem())) + } + v = v.Elem() + continue + } + break + } + v = v.Field(num) + } + return v +} + +const ( + // 400,000 decode operations is ~500kb of dense object declarations, or + // ~5kb of dense object declarations with 10000% alias expansion + alias_ratio_range_low = 400000 + + // 4,000,000 decode operations is ~5MB of dense object declarations, or + // ~4.5MB of dense object declarations with 10% alias expansion + alias_ratio_range_high = 4000000 + + // alias_ratio_range is the range over which we scale allowed alias ratios + alias_ratio_range = float64(alias_ratio_range_high - alias_ratio_range_low) +) + +func allowedAliasRatio(decodeCount int) float64 { + switch { + case decodeCount <= alias_ratio_range_low: + // allow 99% to come from alias expansion for small-to-medium documents + return 0.99 + case decodeCount >= alias_ratio_range_high: + // allow 10% to come from alias expansion for very large documents + return 0.10 + default: + // scale smoothly from 99% down to 10% over the range. + // this maps to 396,000 - 400,000 allowed alias-driven decodes over the range. + // 400,000 decode operations is ~100MB of allocations in worst-case scenarios (single-item maps). + return 0.99 - 0.89*(float64(decodeCount-alias_ratio_range_low)/alias_ratio_range) + } +} + +func (d *decoder) unmarshal(n *Node, out reflect.Value) (good bool) { + d.decodeCount++ + if d.aliasDepth > 0 { + d.aliasCount++ + } + if d.aliasCount > 100 && d.decodeCount > 1000 && float64(d.aliasCount)/float64(d.decodeCount) > allowedAliasRatio(d.decodeCount) { + failf("document contains excessive aliasing") + } + if out.Type() == nodeType { + out.Set(reflect.ValueOf(n).Elem()) + return true + } + switch n.Kind { + case DocumentNode: return d.document(n, out) - case aliasNode: + case AliasNode: return d.alias(n, out) } out, unmarshaled, good := d.prepare(n, out) if unmarshaled { return good } - switch n.kind { - case scalarNode: + switch n.Kind { + case ScalarNode: good = d.scalar(n, out) - case mappingNode: + case MappingNode: good = d.mapping(n, out) - case sequenceNode: + case SequenceNode: good = d.sequence(n, out) + case 0: + if n.IsZero() { + return d.null(out) + } + fallthrough default: - panic("internal error: unknown node kind: " + strconv.Itoa(n.kind)) + failf("cannot decode node with unknown kind %d", n.Kind) } return good } -func (d *decoder) document(n *node, out reflect.Value) (good bool) { - if len(n.children) == 1 { +func (d *decoder) document(n *Node, out reflect.Value) (good bool) { + if len(n.Content) == 1 { d.doc = n - d.unmarshal(n.children[0], out) + d.unmarshal(n.Content[0], out) return true } return false } -func (d *decoder) alias(n *node, out reflect.Value) (good bool) { +func (d *decoder) alias(n *Node, out reflect.Value) (good bool) { if d.aliases[n] { // TODO this could actually be allowed in some circumstances. - failf("anchor '%s' value contains itself", n.value) + failf("anchor '%s' value contains itself", n.Value) } d.aliases[n] = true - good = d.unmarshal(n.alias, out) + d.aliasDepth++ + good = d.unmarshal(n.Alias, out) + d.aliasDepth-- delete(d.aliases, n) return good } @@ -366,15 +551,26 @@ func resetMap(out reflect.Value) { } } -func (d *decoder) scalar(n *node, out reflect.Value) bool { +func (d *decoder) null(out reflect.Value) bool { + if out.CanAddr() { + switch out.Kind() { + case reflect.Interface, reflect.Ptr, reflect.Map, reflect.Slice: + out.Set(reflect.Zero(out.Type())) + return true + } + } + return false +} + +func (d *decoder) scalar(n *Node, out reflect.Value) bool { var tag string var resolved interface{} - if n.tag == "" && !n.implicit { - tag = yaml_STR_TAG - resolved = n.value + if n.indicatedString() { + tag = strTag + resolved = n.Value } else { - tag, resolved = resolve(n.tag, n.value) - if tag == yaml_BINARY_TAG { + tag, resolved = resolve(n.Tag, n.Value) + if tag == binaryTag { data, err := base64.StdEncoding.DecodeString(resolved.(string)) if err != nil { failf("!!binary value contains invalid base64 data") @@ -383,12 +579,7 @@ func (d *decoder) scalar(n *node, out reflect.Value) bool { } } if resolved == nil { - if out.Kind() == reflect.Map && !out.CanAddr() { - resetMap(out) - } else { - out.Set(reflect.Zero(out.Type())) - } - return true + return d.null(out) } if resolvedv := reflect.ValueOf(resolved); out.Type() == resolvedv.Type() { // We've resolved to exactly the type we want, so use that. @@ -401,13 +592,13 @@ func (d *decoder) scalar(n *node, out reflect.Value) bool { u, ok := out.Addr().Interface().(encoding.TextUnmarshaler) if ok { var text []byte - if tag == yaml_BINARY_TAG { + if tag == binaryTag { text = []byte(resolved.(string)) } else { // We let any value be unmarshaled into TextUnmarshaler. // That might be more lax than we'd like, but the // TextUnmarshaler itself should bowl out any dubious values. - text = []byte(n.value) + text = []byte(n.Value) } err := u.UnmarshalText(text) if err != nil { @@ -418,47 +609,37 @@ func (d *decoder) scalar(n *node, out reflect.Value) bool { } switch out.Kind() { case reflect.String: - if tag == yaml_BINARY_TAG { + if tag == binaryTag { out.SetString(resolved.(string)) return true } - if resolved != nil { - out.SetString(n.value) - return true - } + out.SetString(n.Value) + return true case reflect.Interface: - if resolved == nil { - out.Set(reflect.Zero(out.Type())) - } else if tag == yaml_TIMESTAMP_TAG { - // It looks like a timestamp but for backward compatibility - // reasons we set it as a string, so that code that unmarshals - // timestamp-like values into interface{} will continue to - // see a string and not a time.Time. - // TODO(v3) Drop this. - out.Set(reflect.ValueOf(n.value)) - } else { - out.Set(reflect.ValueOf(resolved)) - } + out.Set(reflect.ValueOf(resolved)) return true case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + // This used to work in v2, but it's very unfriendly. + isDuration := out.Type() == durationType + switch resolved := resolved.(type) { case int: - if !out.OverflowInt(int64(resolved)) { + if !isDuration && !out.OverflowInt(int64(resolved)) { out.SetInt(int64(resolved)) return true } case int64: - if !out.OverflowInt(resolved) { + if !isDuration && !out.OverflowInt(resolved) { out.SetInt(resolved) return true } case uint64: - if resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) { + if !isDuration && resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) { out.SetInt(int64(resolved)) return true } case float64: - if resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) { + if !isDuration && resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) { out.SetInt(int64(resolved)) return true } @@ -499,6 +680,17 @@ func (d *decoder) scalar(n *node, out reflect.Value) bool { case bool: out.SetBool(resolved) return true + case string: + // This offers some compatibility with the 1.1 spec (https://yaml.org/type/bool.html). + // It only works if explicitly attempting to unmarshal into a typed bool value. + switch resolved { + case "y", "Y", "yes", "Yes", "YES", "on", "On", "ON": + out.SetBool(true) + return true + case "n", "N", "no", "No", "NO", "off", "Off", "OFF": + out.SetBool(false) + return true + } } case reflect.Float32, reflect.Float64: switch resolved := resolved.(type) { @@ -521,13 +713,7 @@ func (d *decoder) scalar(n *node, out reflect.Value) bool { return true } case reflect.Ptr: - if out.Type().Elem() == reflect.TypeOf(resolved) { - // TODO DOes this make sense? When is out a Ptr except when decoding a nil value? - elem := reflect.New(out.Type().Elem()) - elem.Elem().Set(reflect.ValueOf(resolved)) - out.Set(elem) - return true - } + panic("yaml internal error: please report the issue") } d.terror(n, tag, out) return false @@ -540,8 +726,8 @@ func settableValueOf(i interface{}) reflect.Value { return sv } -func (d *decoder) sequence(n *node, out reflect.Value) (good bool) { - l := len(n.children) +func (d *decoder) sequence(n *Node, out reflect.Value) (good bool) { + l := len(n.Content) var iface reflect.Value switch out.Kind() { @@ -556,7 +742,7 @@ func (d *decoder) sequence(n *node, out reflect.Value) (good bool) { iface = out out = settableValueOf(make([]interface{}, l)) default: - d.terror(n, yaml_SEQ_TAG, out) + d.terror(n, seqTag, out) return false } et := out.Type().Elem() @@ -564,7 +750,7 @@ func (d *decoder) sequence(n *node, out reflect.Value) (good bool) { j := 0 for i := 0; i < l; i++ { e := reflect.New(et).Elem() - if ok := d.unmarshal(n.children[i], e); ok { + if ok := d.unmarshal(n.Content[i], e); ok { out.Index(j).Set(e) j++ } @@ -578,51 +764,79 @@ func (d *decoder) sequence(n *node, out reflect.Value) (good bool) { return true } -func (d *decoder) mapping(n *node, out reflect.Value) (good bool) { +func (d *decoder) mapping(n *Node, out reflect.Value) (good bool) { + l := len(n.Content) + if d.uniqueKeys { + nerrs := len(d.terrors) + for i := 0; i < l; i += 2 { + ni := n.Content[i] + for j := i + 2; j < l; j += 2 { + nj := n.Content[j] + if ni.Kind == nj.Kind && ni.Value == nj.Value { + d.terrors = append(d.terrors, fmt.Sprintf("line %d: mapping key %#v already defined at line %d", nj.Line, nj.Value, ni.Line)) + } + } + } + if len(d.terrors) > nerrs { + return false + } + } switch out.Kind() { case reflect.Struct: return d.mappingStruct(n, out) - case reflect.Slice: - return d.mappingSlice(n, out) case reflect.Map: // okay case reflect.Interface: - if d.mapType.Kind() == reflect.Map { - iface := out - out = reflect.MakeMap(d.mapType) - iface.Set(out) + iface := out + if isStringMap(n) { + out = reflect.MakeMap(d.stringMapType) } else { - slicev := reflect.New(d.mapType).Elem() - if !d.mappingSlice(n, slicev) { - return false - } - out.Set(slicev) - return true + out = reflect.MakeMap(d.generalMapType) } + iface.Set(out) default: - d.terror(n, yaml_MAP_TAG, out) + d.terror(n, mapTag, out) return false } + outt := out.Type() kt := outt.Key() et := outt.Elem() - mapType := d.mapType - if outt.Key() == ifaceType && outt.Elem() == ifaceType { - d.mapType = outt + stringMapType := d.stringMapType + generalMapType := d.generalMapType + if outt.Elem() == ifaceType { + if outt.Key().Kind() == reflect.String { + d.stringMapType = outt + } else if outt.Key() == ifaceType { + d.generalMapType = outt + } } + mergedFields := d.mergedFields + d.mergedFields = nil + + var mergeNode *Node + + mapIsNew := false if out.IsNil() { out.Set(reflect.MakeMap(outt)) + mapIsNew = true } - l := len(n.children) for i := 0; i < l; i += 2 { - if isMerge(n.children[i]) { - d.merge(n.children[i+1], out) + if isMerge(n.Content[i]) { + mergeNode = n.Content[i+1] continue } k := reflect.New(kt).Elem() - if d.unmarshal(n.children[i], k) { + if d.unmarshal(n.Content[i], k) { + if mergedFields != nil { + ki := k.Interface() + if mergedFields[ki] { + continue + } + mergedFields[ki] = true + } kkind := k.Kind() if kkind == reflect.Interface { kkind = k.Elem().Kind() @@ -631,87 +845,83 @@ func (d *decoder) mapping(n *node, out reflect.Value) (good bool) { failf("invalid map key: %#v", k.Interface()) } e := reflect.New(et).Elem() - if d.unmarshal(n.children[i+1], e) { - d.setMapIndex(n.children[i+1], out, k, e) + if d.unmarshal(n.Content[i+1], e) || n.Content[i+1].ShortTag() == nullTag && (mapIsNew || !out.MapIndex(k).IsValid()) { + out.SetMapIndex(k, e) } } } - d.mapType = mapType - return true -} -func (d *decoder) setMapIndex(n *node, out, k, v reflect.Value) { - if d.strict && out.MapIndex(k) != zeroValue { - d.terrors = append(d.terrors, fmt.Sprintf("line %d: key %#v already set in map", n.line+1, k.Interface())) - return + d.mergedFields = mergedFields + if mergeNode != nil { + d.merge(n, mergeNode, out) } - out.SetMapIndex(k, v) + + d.stringMapType = stringMapType + d.generalMapType = generalMapType + return true } -func (d *decoder) mappingSlice(n *node, out reflect.Value) (good bool) { - outt := out.Type() - if outt.Elem() != mapItemType { - d.terror(n, yaml_MAP_TAG, out) +func isStringMap(n *Node) bool { + if n.Kind != MappingNode { return false } - - mapType := d.mapType - d.mapType = outt - - var slice []MapItem - var l = len(n.children) + l := len(n.Content) for i := 0; i < l; i += 2 { - if isMerge(n.children[i]) { - d.merge(n.children[i+1], out) - continue - } - item := MapItem{} - k := reflect.ValueOf(&item.Key).Elem() - if d.unmarshal(n.children[i], k) { - v := reflect.ValueOf(&item.Value).Elem() - if d.unmarshal(n.children[i+1], v) { - slice = append(slice, item) - } + shortTag := n.Content[i].ShortTag() + if shortTag != strTag && shortTag != mergeTag { + return false } } - out.Set(reflect.ValueOf(slice)) - d.mapType = mapType return true } -func (d *decoder) mappingStruct(n *node, out reflect.Value) (good bool) { +func (d *decoder) mappingStruct(n *Node, out reflect.Value) (good bool) { sinfo, err := getStructInfo(out.Type()) if err != nil { panic(err) } - name := settableValueOf("") - l := len(n.children) var inlineMap reflect.Value var elemType reflect.Type if sinfo.InlineMap != -1 { inlineMap = out.Field(sinfo.InlineMap) - inlineMap.Set(reflect.New(inlineMap.Type()).Elem()) elemType = inlineMap.Type().Elem() } + for _, index := range sinfo.InlineUnmarshalers { + field := d.fieldByIndex(n, out, index) + d.prepare(n, field) + } + + mergedFields := d.mergedFields + d.mergedFields = nil + var mergeNode *Node var doneFields []bool - if d.strict { + if d.uniqueKeys { doneFields = make([]bool, len(sinfo.FieldsList)) } + name := settableValueOf("") + l := len(n.Content) for i := 0; i < l; i += 2 { - ni := n.children[i] + ni := n.Content[i] if isMerge(ni) { - d.merge(n.children[i+1], out) + mergeNode = n.Content[i+1] continue } if !d.unmarshal(ni, name) { continue } - if info, ok := sinfo.FieldsMap[name.String()]; ok { - if d.strict { + sname := name.String() + if mergedFields != nil { + if mergedFields[sname] { + continue + } + mergedFields[sname] = true + } + if info, ok := sinfo.FieldsMap[sname]; ok { + if d.uniqueKeys { if doneFields[info.Id] { - d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s already set in type %s", ni.line+1, name.String(), out.Type())) + d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s already set in type %s", ni.Line, name.String(), out.Type())) continue } doneFields[info.Id] = true @@ -720,20 +930,25 @@ func (d *decoder) mappingStruct(n *node, out reflect.Value) (good bool) { if info.Inline == nil { field = out.Field(info.Num) } else { - field = out.FieldByIndex(info.Inline) + field = d.fieldByIndex(n, out, info.Inline) } - d.unmarshal(n.children[i+1], field) + d.unmarshal(n.Content[i+1], field) } else if sinfo.InlineMap != -1 { if inlineMap.IsNil() { inlineMap.Set(reflect.MakeMap(inlineMap.Type())) } value := reflect.New(elemType).Elem() - d.unmarshal(n.children[i+1], value) - d.setMapIndex(n.children[i+1], inlineMap, name, value) - } else if d.strict { - d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s not found in type %s", ni.line+1, name.String(), out.Type())) + d.unmarshal(n.Content[i+1], value) + inlineMap.SetMapIndex(name, value) + } else if d.knownFields { + d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s not found in type %s", ni.Line, name.String(), out.Type())) } } + + d.mergedFields = mergedFields + if mergeNode != nil { + d.merge(n, mergeNode, out) + } return true } @@ -741,26 +956,34 @@ func failWantMap() { failf("map merge requires map or sequence of maps as the value") } -func (d *decoder) merge(n *node, out reflect.Value) { - switch n.kind { - case mappingNode: - d.unmarshal(n, out) - case aliasNode: - an, ok := d.doc.anchors[n.value] - if ok && an.kind != mappingNode { +func (d *decoder) merge(parent *Node, merge *Node, out reflect.Value) { + mergedFields := d.mergedFields + if mergedFields == nil { + d.mergedFields = make(map[interface{}]bool) + for i := 0; i < len(parent.Content); i += 2 { + k := reflect.New(ifaceType).Elem() + if d.unmarshal(parent.Content[i], k) { + d.mergedFields[k.Interface()] = true + } + } + } + + switch merge.Kind { + case MappingNode: + d.unmarshal(merge, out) + case AliasNode: + if merge.Alias != nil && merge.Alias.Kind != MappingNode { failWantMap() } - d.unmarshal(n, out) - case sequenceNode: - // Step backwards as earlier nodes take precedence. - for i := len(n.children) - 1; i >= 0; i-- { - ni := n.children[i] - if ni.kind == aliasNode { - an, ok := d.doc.anchors[ni.value] - if ok && an.kind != mappingNode { + d.unmarshal(merge, out) + case SequenceNode: + for i := 0; i < len(merge.Content); i++ { + ni := merge.Content[i] + if ni.Kind == AliasNode { + if ni.Alias != nil && ni.Alias.Kind != MappingNode { failWantMap() } - } else if ni.kind != mappingNode { + } else if ni.Kind != MappingNode { failWantMap() } d.unmarshal(ni, out) @@ -768,8 +991,10 @@ func (d *decoder) merge(n *node, out reflect.Value) { default: failWantMap() } + + d.mergedFields = mergedFields } -func isMerge(n *node) bool { - return n.kind == scalarNode && n.value == "<<" && (n.implicit == true || n.tag == yaml_MERGE_TAG) +func isMerge(n *Node) bool { + return n.Kind == ScalarNode && n.Value == "<<" && (n.Tag == "" || n.Tag == "!" || shortTag(n.Tag) == mergeTag) } diff --git a/decode_test.go b/decode_test.go index 9269f12b..72a8e465 100644 --- a/decode_test.go +++ b/decode_test.go @@ -1,7 +1,24 @@ +// +// Copyright (c) 2011-2019 Canonical Ltd +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package yaml_test import ( + "bytes" "errors" + "fmt" "io" "math" "reflect" @@ -9,7 +26,7 @@ import ( "time" . "gopkg.in/check.v1" - "gopkg.in/yaml.v2" + "gopkg.in/yaml.v3" ) var unmarshalIntTest = 123 @@ -93,22 +110,42 @@ var unmarshalTests = []struct { //{"sexa: 190:20:30.15", map[string]interface{}{"sexa": 0}}, // Unsupported //{"notanum: .NaN", map[string]interface{}{"notanum": math.NaN()}}, // Equality of NaN fails. - // Bools from spec + // Bools are per 1.2 spec. { - "canonical: y", + "canonical: true", map[string]interface{}{"canonical": true}, }, { - "answer: NO", - map[string]interface{}{"answer": false}, + "canonical: false", + map[string]interface{}{"canonical": false}, }, { - "logical: True", - map[string]interface{}{"logical": true}, + "bool: True", + map[string]interface{}{"bool": true}, }, { - "option: on", - map[string]interface{}{"option": true}, + "bool: False", + map[string]interface{}{"bool": false}, }, { + "bool: TRUE", + map[string]interface{}{"bool": true}, + }, { + "bool: FALSE", + map[string]interface{}{"bool": false}, + }, + // For backwards compatibility with 1.1, decoding old strings into typed values still works. + { "option: on", map[string]bool{"option": true}, + }, { + "option: y", + map[string]bool{"option": true}, + }, { + "option: Off", + map[string]bool{"option": false}, + }, { + "option: No", + map[string]bool{"option": false}, + }, { + "option: other", + map[string]bool{}, }, // Ints from spec { @@ -120,6 +157,15 @@ var unmarshalTests = []struct { }, { "octal: 02472256", map[string]interface{}{"octal": 685230}, + }, { + "octal: -02472256", + map[string]interface{}{"octal": -685230}, + }, { + "octal: 0o2472256", + map[string]interface{}{"octal": 685230}, + }, { + "octal: -0o2472256", + map[string]interface{}{"octal": -685230}, }, { "hexa: 0x_0A_74_AE", map[string]interface{}{"hexa": 685230}, @@ -207,7 +253,12 @@ var unmarshalTests = []struct { // Map inside interface with no type hints. { "a: {b: c}", - map[interface{}]interface{}{"a": map[interface{}]interface{}{"b": "c"}}, + map[interface{}]interface{}{"a": map[string]interface{}{"b": "c"}}, + }, + // Non-string map inside interface with no type hints. + { + "a: {b: c, 1: d}", + map[interface{}]interface{}{"a": map[interface{}]interface{}{"b": "c", 1: "d"}}, }, // Structs and type conversions. @@ -220,6 +271,9 @@ var unmarshalTests = []struct { }, { "a: {b: c}", &struct{ A *struct{ B string } }{&struct{ B string }{"c"}}, + }, { + "a: 'null'", + &struct{ A *unmarshalerType }{&unmarshalerType{"null"}}, }, { "a: {b: c}", &struct{ A map[string]string }{map[string]string{"b": "c"}}, @@ -256,7 +310,8 @@ var unmarshalTests = []struct { B int "a" }{1}, }, { - "a: y", + // Some limited backwards compatibility with the 1.1 spec. + "a: YES", &struct{ A bool }{true}, }, @@ -501,6 +556,27 @@ var unmarshalTests = []struct { }{1, inlineB{2, inlineC{3}}}, }, + // Struct inlining as a pointer. + { + "a: 1\nb: 2\nc: 3\n", + &struct { + A int + C *inlineB `yaml:",inline"` + }{1, &inlineB{2, inlineC{3}}}, + }, { + "a: 1\n", + &struct { + A int + C *inlineB `yaml:",inline"` + }{1, nil}, + }, { + "a: 1\nc: 3\nd: 4\n", + &struct { + A int + C *inlineD `yaml:",inline"` + }{1, &inlineD{&inlineC{3}, 4}}, + }, + // Map inlining { "a: 1\nb: 2\nc: 3\n", @@ -527,7 +603,7 @@ var unmarshalTests = []struct { // issue #295 (allow scalars with colons in flow mappings and sequences) { "a: {b: https://github.com/go-yaml/yaml}", - map[string]interface{}{"a": map[interface{}]interface{}{ + map[string]interface{}{"a": map[string]interface{}{ "b": "https://github.com/go-yaml/yaml", }}, }, @@ -566,16 +642,10 @@ var unmarshalTests = []struct { map[string]string{"a": strings.Repeat("\x00", 52)}, }, - // Ordered maps. - { - "{b: 2, a: 1, d: 4, c: 3, sub: {e: 5}}", - &yaml.MapSlice{{"b", 2}, {"a", 1}, {"d", 4}, {"c", 3}, {"sub", yaml.MapSlice{{"e", 5}}}}, - }, - // Issue #39. { "a:\n b:\n c: d\n", - map[string]struct{ B interface{} }{"a": {map[interface{}]interface{}{"c": "d"}}}, + map[string]struct{ B interface{} }{"a": {map[string]interface{}{"c": "d"}}}, }, // Custom map type. @@ -655,12 +725,12 @@ var unmarshalTests = []struct { { // explicit timestamp tag into interface. "a: !!timestamp \"2015-01-01\"", - map[string]interface{}{"a": "2015-01-01"}, + map[string]interface{}{"a": time.Date(2015, 1, 1, 0, 0, 0, 0, time.UTC)}, }, { // implicit timestamp tag into interface. "a: 2015-01-01", - map[string]interface{}{"a": "2015-01-01"}, + map[string]interface{}{"a": time.Date(2015, 1, 1, 0, 0, 0, 0, time.UTC)}, }, // Encode empty lists as zero-length slices. @@ -691,22 +761,22 @@ var unmarshalTests = []struct { M{"ñoño": "very yes 🟔"}, }, - // YAML Float regex shouldn't match this + // This *is* in fact a float number, per the spec. #171 was a mistake. { "a: 123456e1\n", - M{"a": "123456e1"}, + M{"a": 123456e1}, }, { "a: 123456E1\n", - M{"a": "123456E1"}, + M{"a": 123456e1}, }, // yaml-test-suite 3GZX: Spec Example 7.1. Alias Nodes { "First occurrence: &anchor Foo\nSecond occurrence: *anchor\nOverride anchor: &anchor Bar\nReuse anchor: *anchor\n", - map[interface{}]interface{}{ - "Reuse anchor": "Bar", + map[string]interface{}{ "First occurrence": "Foo", "Second occurrence": "Foo", "Override anchor": "Bar", + "Reuse anchor": "Bar", }, }, // Single document with garbage following it. @@ -714,9 +784,27 @@ var unmarshalTests = []struct { "---\nhello\n...\n}not yaml", "hello", }, + + // Comment scan exhausting the input buffer (issue #469). + { + "true\n#" + strings.Repeat(" ", 512*3), + "true", + }, { + "true #" + strings.Repeat(" ", 512*3), + "true", + }, + + // CRLF + { + "a: b\r\nc:\r\n- d\r\n- e\r\n", + map[string]interface{}{ + "a": "b", + "c": []interface{}{"d", "e"}, + }, + }, } -type M map[interface{}]interface{} +type M map[string]interface{} type inlineB struct { B int @@ -727,6 +815,11 @@ type inlineC struct { C int } +type inlineD struct { + C *inlineC `yaml:",inline"` + D int +} + func (s *S) TestUnmarshal(c *C) { for i, item := range unmarshalTests { c.Logf("test %d: %q", i, item.data) @@ -740,16 +833,15 @@ func (s *S) TestUnmarshal(c *C) { } } -// TODO(v3): This test should also work when unmarshaling onto an interface{}. func (s *S) TestUnmarshalFullTimestamp(c *C) { // Full timestamp in same format as encoded. This is confirmed to be // properly decoded by Python as a timestamp as well. var str = "2015-02-24T18:19:39.123456789-03:00" - var t time.Time + var t interface{} err := yaml.Unmarshal([]byte(str), &t) c.Assert(err, IsNil) - c.Assert(t, Equals, time.Date(2015, 2, 24, 18, 19, 39, 123456789, t.Location())) - c.Assert(t.In(time.UTC), Equals, time.Date(2015, 2, 24, 21, 19, 39, 123456789, time.UTC)) + c.Assert(t, Equals, time.Date(2015, 2, 24, 18, 19, 39, 123456789, t.(time.Time).Location())) + c.Assert(t.(time.Time).In(time.UTC), Equals, time.Date(2015, 2, 24, 21, 19, 39, 123456789, time.UTC)) } func (s *S) TestDecoderSingleDocument(c *C) { @@ -780,12 +872,12 @@ var decoderTests = []struct { }, { "a: b", []interface{}{ - map[interface{}]interface{}{"a": "b"}, + map[string]interface{}{"a": "b"}, }, }, { "---\na: b\n...\n", []interface{}{ - map[interface{}]interface{}{"a": "b"}, + map[string]interface{}{"a": "b"}, }, }, { "---\n'hello'\n...\n---\ngoodbye\n...\n", @@ -831,6 +923,13 @@ func (s *S) TestUnmarshalNaN(c *C) { c.Assert(math.IsNaN(value["notanum"].(float64)), Equals, true) } +func (s *S) TestUnmarshalDurationInt(c *C) { + // Don't accept plain ints as durations as it's unclear (issue #200). + var d time.Duration + err := yaml.Unmarshal([]byte("123"), &d) + c.Assert(err, ErrorMatches, "(?s).* line 1: cannot unmarshal !!int `123` into time.Duration") +} + var unmarshalErrorTests = []struct { data, error string }{ @@ -843,9 +942,24 @@ var unmarshalErrorTests = []struct { {"value: -", "yaml: block sequence entries are not allowed in this context"}, {"a: !!binary ==", "yaml: !!binary value contains invalid base64 data"}, {"{[.]}", `yaml: invalid map key: \[\]interface \{\}\{"\."\}`}, - {"{{.}}", `yaml: invalid map key: map\[interface\ \{\}\]interface \{\}\{".":interface \{\}\(nil\)\}`}, + {"{{.}}", `yaml: invalid map key: map\[string]interface \{\}\{".":interface \{\}\(nil\)\}`}, {"b: *a\na: &a {c: 1}", `yaml: unknown anchor 'a' referenced`}, {"%TAG !%79! tag:yaml.org,2002:\n---\nv: !%79!int '1'", "yaml: did not find expected whitespace"}, + {"a:\n 1:\nb\n 2:", ".*could not find expected ':'"}, + {"a: 1\nb: 2\nc 2\nd: 3\n", "^yaml: line 3: could not find expected ':'$"}, + {"0: [:!00 \xef", "yaml: incomplete UTF-8 octet sequence"}, // Issue #666 + { + "a: &a [00,00,00,00,00,00,00,00,00]\n" + + "b: &b [*a,*a,*a,*a,*a,*a,*a,*a,*a]\n" + + "c: &c [*b,*b,*b,*b,*b,*b,*b,*b,*b]\n" + + "d: &d [*c,*c,*c,*c,*c,*c,*c,*c,*c]\n" + + "e: &e [*d,*d,*d,*d,*d,*d,*d,*d,*d]\n" + + "f: &f [*e,*e,*e,*e,*e,*e,*e,*e,*e]\n" + + "g: &g [*f,*f,*f,*f,*f,*f,*f,*f,*f]\n" + + "h: &h [*g,*g,*g,*g,*g,*g,*g,*g,*g]\n" + + "i: &i [*h,*h,*h,*h,*h,*h,*h,*h,*h]\n", + "yaml: document contains excessive aliasing", + }, } func (s *S) TestUnmarshalErrors(c *C) { @@ -869,7 +983,7 @@ var unmarshalerTests = []struct { data, tag string value interface{} }{ - {"_: {hi: there}", "!!map", map[interface{}]interface{}{"hi": "there"}}, + {"_: {hi: there}", "!!map", map[string]interface{}{"hi": "there"}}, {"_: [1,A]", "!!seq", []interface{}{1, "A"}}, {"_: 10", "!!int", 10}, {"_: null", "!!null", nil}, @@ -885,8 +999,8 @@ type unmarshalerType struct { value interface{} } -func (o *unmarshalerType) UnmarshalYAML(unmarshal func(v interface{}) error) error { - if err := unmarshal(&o.value); err != nil { +func (o *unmarshalerType) UnmarshalYAML(value *yaml.Node) error { + if err := value.Decode(&o.value); err != nil { return err } if i, ok := o.value.(int); ok { @@ -905,6 +1019,39 @@ type unmarshalerValue struct { Field unmarshalerType "_" } +type unmarshalerInlined struct { + Field *unmarshalerType "_" + Inlined unmarshalerType `yaml:",inline"` +} + +type unmarshalerInlinedTwice struct { + InlinedTwice unmarshalerInlined `yaml:",inline"` +} + +type obsoleteUnmarshalerType struct { + value interface{} +} + +func (o *obsoleteUnmarshalerType) UnmarshalYAML(unmarshal func(v interface{}) error) error { + if err := unmarshal(&o.value); err != nil { + return err + } + if i, ok := o.value.(int); ok { + if result, ok := unmarshalerResult[i]; ok { + return result + } + } + return nil +} + +type obsoleteUnmarshalerPointer struct { + Field *obsoleteUnmarshalerType "_" +} + +type obsoleteUnmarshalerValue struct { + Field obsoleteUnmarshalerType "_" +} + func (s *S) TestUnmarshalerPointerField(c *C) { for _, item := range unmarshalerTests { obj := &unmarshalerPointer{} @@ -917,11 +1064,22 @@ func (s *S) TestUnmarshalerPointerField(c *C) { c.Assert(obj.Field.value, DeepEquals, item.value) } } + for _, item := range unmarshalerTests { + obj := &obsoleteUnmarshalerPointer{} + err := yaml.Unmarshal([]byte(item.data), obj) + c.Assert(err, IsNil) + if item.value == nil { + c.Assert(obj.Field, IsNil) + } else { + c.Assert(obj.Field, NotNil, Commentf("Pointer not initialized (%#v)", item.value)) + c.Assert(obj.Field.value, DeepEquals, item.value) + } + } } func (s *S) TestUnmarshalerValueField(c *C) { for _, item := range unmarshalerTests { - obj := &unmarshalerValue{} + obj := &obsoleteUnmarshalerValue{} err := yaml.Unmarshal([]byte(item.data), obj) c.Assert(err, IsNil) c.Assert(obj.Field, NotNil, Commentf("Pointer not initialized (%#v)", item.value)) @@ -929,11 +1087,25 @@ func (s *S) TestUnmarshalerValueField(c *C) { } } +func (s *S) TestUnmarshalerInlinedField(c *C) { + obj := &unmarshalerInlined{} + err := yaml.Unmarshal([]byte("_: a\ninlined: b\n"), obj) + c.Assert(err, IsNil) + c.Assert(obj.Field, DeepEquals, &unmarshalerType{"a"}) + c.Assert(obj.Inlined, DeepEquals, unmarshalerType{map[string]interface{}{"_": "a", "inlined": "b"}}) + + twc := &unmarshalerInlinedTwice{} + err = yaml.Unmarshal([]byte("_: a\ninlined: b\n"), twc) + c.Assert(err, IsNil) + c.Assert(twc.InlinedTwice.Field, DeepEquals, &unmarshalerType{"a"}) + c.Assert(twc.InlinedTwice.Inlined, DeepEquals, unmarshalerType{map[string]interface{}{"_": "a", "inlined": "b"}}) +} + func (s *S) TestUnmarshalerWholeDocument(c *C) { - obj := &unmarshalerType{} + obj := &obsoleteUnmarshalerType{} err := yaml.Unmarshal([]byte(unmarshalerTests[0].data), obj) c.Assert(err, IsNil) - value, ok := obj.value.(map[interface{}]interface{}) + value, ok := obj.value.(map[string]interface{}) c.Assert(ok, Equals, true, Commentf("value: %#v", obj.value)) c.Assert(value["_"], DeepEquals, unmarshalerTests[0].value) } @@ -969,9 +1141,78 @@ func (s *S) TestUnmarshalerTypeError(c *C) { c.Assert(v.M["ghi"].value, Equals, 3) } +func (s *S) TestObsoleteUnmarshalerTypeError(c *C) { + unmarshalerResult[2] = &yaml.TypeError{[]string{"foo"}} + unmarshalerResult[4] = &yaml.TypeError{[]string{"bar"}} + defer func() { + delete(unmarshalerResult, 2) + delete(unmarshalerResult, 4) + }() + + type T struct { + Before int + After int + M map[string]*obsoleteUnmarshalerType + } + var v T + data := `{before: A, m: {abc: 1, def: 2, ghi: 3, jkl: 4}, after: B}` + err := yaml.Unmarshal([]byte(data), &v) + c.Assert(err, ErrorMatches, ""+ + "yaml: unmarshal errors:\n"+ + " line 1: cannot unmarshal !!str `A` into int\n"+ + " foo\n"+ + " bar\n"+ + " line 1: cannot unmarshal !!str `B` into int") + c.Assert(v.M["abc"], NotNil) + c.Assert(v.M["def"], IsNil) + c.Assert(v.M["ghi"], NotNil) + c.Assert(v.M["jkl"], IsNil) + + c.Assert(v.M["abc"].value, Equals, 1) + c.Assert(v.M["ghi"].value, Equals, 3) +} + type proxyTypeError struct{} -func (v *proxyTypeError) UnmarshalYAML(unmarshal func(interface{}) error) error { +func (v *proxyTypeError) UnmarshalYAML(node *yaml.Node) error { + var s string + var a int32 + var b int64 + if err := node.Decode(&s); err != nil { + panic(err) + } + if s == "a" { + if err := node.Decode(&b); err == nil { + panic("should have failed") + } + return node.Decode(&a) + } + if err := node.Decode(&a); err == nil { + panic("should have failed") + } + return node.Decode(&b) +} + +func (s *S) TestUnmarshalerTypeErrorProxying(c *C) { + type T struct { + Before int + After int + M map[string]*proxyTypeError + } + var v T + data := `{before: A, m: {abc: a, def: b}, after: B}` + err := yaml.Unmarshal([]byte(data), &v) + c.Assert(err, ErrorMatches, ""+ + "yaml: unmarshal errors:\n"+ + " line 1: cannot unmarshal !!str `A` into int\n"+ + " line 1: cannot unmarshal !!str `a` into int32\n"+ + " line 1: cannot unmarshal !!str `b` into int64\n"+ + " line 1: cannot unmarshal !!str `B` into int") +} + +type obsoleteProxyTypeError struct{} + +func (v *obsoleteProxyTypeError) UnmarshalYAML(unmarshal func(interface{}) error) error { var s string var a int32 var b int64 @@ -990,11 +1231,11 @@ func (v *proxyTypeError) UnmarshalYAML(unmarshal func(interface{}) error) error return unmarshal(&b) } -func (s *S) TestUnmarshalerTypeErrorProxying(c *C) { +func (s *S) TestObsoleteUnmarshalerTypeErrorProxying(c *C) { type T struct { Before int After int - M map[string]*proxyTypeError + M map[string]*obsoleteProxyTypeError } var v T data := `{before: A, m: {abc: a, def: b}, after: B}` @@ -1007,11 +1248,11 @@ func (s *S) TestUnmarshalerTypeErrorProxying(c *C) { " line 1: cannot unmarshal !!str `B` into int") } -type failingUnmarshaler struct{} - var failingErr = errors.New("failingErr") -func (ft *failingUnmarshaler) UnmarshalYAML(unmarshal func(interface{}) error) error { +type failingUnmarshaler struct{} + +func (ft *failingUnmarshaler) UnmarshalYAML(node *yaml.Node) error { return failingErr } @@ -1020,18 +1261,29 @@ func (s *S) TestUnmarshalerError(c *C) { c.Assert(err, Equals, failingErr) } +type obsoleteFailingUnmarshaler struct{} + +func (ft *obsoleteFailingUnmarshaler) UnmarshalYAML(unmarshal func(interface{}) error) error { + return failingErr +} + +func (s *S) TestObsoleteUnmarshalerError(c *C) { + err := yaml.Unmarshal([]byte("a: b"), &obsoleteFailingUnmarshaler{}) + c.Assert(err, Equals, failingErr) +} + type sliceUnmarshaler []int -func (su *sliceUnmarshaler) UnmarshalYAML(unmarshal func(interface{}) error) error { +func (su *sliceUnmarshaler) UnmarshalYAML(node *yaml.Node) error { var slice []int - err := unmarshal(&slice) + err := node.Decode(&slice) if err == nil { *su = slice return nil } var intVal int - err = unmarshal(&intVal) + err = node.Decode(&intVal) if err == nil { *su = []int{intVal} return nil @@ -1051,6 +1303,37 @@ func (s *S) TestUnmarshalerRetry(c *C) { c.Assert(su, DeepEquals, sliceUnmarshaler([]int{1})) } +type obsoleteSliceUnmarshaler []int + +func (su *obsoleteSliceUnmarshaler) UnmarshalYAML(unmarshal func(interface{}) error) error { + var slice []int + err := unmarshal(&slice) + if err == nil { + *su = slice + return nil + } + + var intVal int + err = unmarshal(&intVal) + if err == nil { + *su = []int{intVal} + return nil + } + + return err +} + +func (s *S) TestObsoleteUnmarshalerRetry(c *C) { + var su obsoleteSliceUnmarshaler + err := yaml.Unmarshal([]byte("[1, 2, 3]"), &su) + c.Assert(err, IsNil) + c.Assert(su, DeepEquals, obsoleteSliceUnmarshaler([]int{1, 2, 3})) + + err = yaml.Unmarshal([]byte("1"), &su) + c.Assert(err, IsNil) + c.Assert(su, DeepEquals, obsoleteSliceUnmarshaler([]int{1})) +} + // From http://yaml.org/type/merge.html var mergeTests = ` anchors: @@ -1108,13 +1391,18 @@ inlineSequenceMap: ` func (s *S) TestMerge(c *C) { - var want = map[interface{}]interface{}{ + var want = map[string]interface{}{ "x": 1, "y": 2, "r": 10, "label": "center/big", } + wantStringMap := make(map[string]interface{}) + for k, v := range want { + wantStringMap[fmt.Sprintf("%v", k)] = v + } + var m map[interface{}]interface{} err := yaml.Unmarshal([]byte(mergeTests), &m) c.Assert(err, IsNil) @@ -1122,6 +1410,10 @@ func (s *S) TestMerge(c *C) { if name == "anchors" { continue } + if name == "plain" { + c.Assert(test, DeepEquals, wantStringMap, Commentf("test %q failed", name)) + continue + } c.Assert(test, DeepEquals, want, Commentf("test %q failed", name)) } } @@ -1144,28 +1436,168 @@ func (s *S) TestMergeStruct(c *C) { } } -var unmarshalNullTests = []func() interface{}{ +var mergeTestsNested = ` +mergeouter1: &mergeouter1 + d: 40 + e: 50 + +mergeouter2: &mergeouter2 + e: 5 + f: 6 + g: 70 + +mergeinner1: &mergeinner1 + <<: *mergeouter1 + inner: + a: 1 + b: 2 + +mergeinner2: &mergeinner2 + <<: *mergeouter2 + inner: + a: -1 + b: -2 + +outer: + <<: [*mergeinner1, *mergeinner2] + f: 60 + inner: + a: 10 +` + +func (s *S) TestMergeNestedStruct(c *C) { + // Issue #818: Merging used to just unmarshal twice on the target + // value, which worked for maps as these were replaced by the new map, + // but not on struct values as these are preserved. This resulted in + // the nested data from the merged map to be mixed up with the data + // from the map being merged into. + // + // This test also prevents two potential bugs from showing up: + // + // 1) A simple implementation might just zero out the nested value + // before unmarshaling the second time, but this would clobber previous + // data that is usually respected ({C: 30} below). + // + // 2) A simple implementation might attempt to handle the key skipping + // directly by iterating over the merging map without recursion, but + // there are more complex cases that require recursion. + // + // Quick summary of the fields: + // + // - A must come from outer and not overriden + // - B must not be set as its in the ignored merge + // - C should still be set as it's preset in the value + // - D should be set from the recursive merge + // - E should be set from the first recursive merge, ignored on the second + // - F should be set in the inlined map from outer, ignored later + // - G should be set in the inlined map from the second recursive merge + // + + type Inner struct { + A, B, C int + } + type Outer struct { + D, E int + Inner Inner + Inline map[string]int `yaml:",inline"` + } + type Data struct { + Outer Outer + } + + test := Data{Outer{0, 0, Inner{C: 30}, nil}} + want := Data{Outer{40, 50, Inner{A: 10, C: 30}, map[string]int{"f": 60, "g": 70}}} + + err := yaml.Unmarshal([]byte(mergeTestsNested), &test) + c.Assert(err, IsNil) + c.Assert(test, DeepEquals, want) + + // Repeat test with a map. + + var testm map[string]interface{} + var wantm = map[string]interface {} { + "f": 60, + "inner": map[string]interface{}{ + "a": 10, + }, + "d": 40, + "e": 50, + "g": 70, + } + err = yaml.Unmarshal([]byte(mergeTestsNested), &testm) + c.Assert(err, IsNil) + c.Assert(testm["outer"], DeepEquals, wantm) +} + +var unmarshalNullTests = []struct { + input string + pristine, expected func() interface{} +}{{ + "null", func() interface{} { var v interface{}; v = "v"; return &v }, + func() interface{} { var v interface{}; v = nil; return &v }, +}, { + "null", + func() interface{} { var s = "s"; return &s }, func() interface{} { var s = "s"; return &s }, +}, { + "null", func() interface{} { var s = "s"; sptr := &s; return &sptr }, + func() interface{} { var sptr *string; return &sptr }, +}, { + "null", func() interface{} { var i = 1; return &i }, + func() interface{} { var i = 1; return &i }, +}, { + "null", func() interface{} { var i = 1; iptr := &i; return &iptr }, - func() interface{} { m := map[string]int{"s": 1}; return &m }, - func() interface{} { m := map[string]int{"s": 1}; return m }, -} + func() interface{} { var iptr *int; return &iptr }, +}, { + "null", + func() interface{} { var m = map[string]int{"s": 1}; return &m }, + func() interface{} { var m map[string]int; return &m }, +}, { + "null", + func() interface{} { var m = map[string]int{"s": 1}; return m }, + func() interface{} { var m = map[string]int{"s": 1}; return m }, +}, { + "s2: null\ns3: null", + func() interface{} { var m = map[string]int{"s1": 1, "s2": 2}; return m }, + func() interface{} { var m = map[string]int{"s1": 1, "s2": 2, "s3": 0}; return m }, +}, { + "s2: null\ns3: null", + func() interface{} { var m = map[string]interface{}{"s1": 1, "s2": 2}; return m }, + func() interface{} { var m = map[string]interface{}{"s1": 1, "s2": nil, "s3": nil}; return m }, +}} func (s *S) TestUnmarshalNull(c *C) { for _, test := range unmarshalNullTests { - item := test() - zero := reflect.Zero(reflect.TypeOf(item).Elem()).Interface() - err := yaml.Unmarshal([]byte("null"), item) + pristine := test.pristine() + expected := test.expected() + err := yaml.Unmarshal([]byte(test.input), pristine) c.Assert(err, IsNil) - if reflect.TypeOf(item).Kind() == reflect.Map { - c.Assert(reflect.ValueOf(item).Interface(), DeepEquals, reflect.MakeMap(reflect.TypeOf(item)).Interface()) - } else { - c.Assert(reflect.ValueOf(item).Elem().Interface(), DeepEquals, zero) - } + c.Assert(pristine, DeepEquals, expected) + } +} + +func (s *S) TestUnmarshalPreservesData(c *C) { + var v struct { + A, B int + C int `yaml:"-"` } + v.A = 42 + v.C = 88 + err := yaml.Unmarshal([]byte("---"), &v) + c.Assert(err, IsNil) + c.Assert(v.A, Equals, 42) + c.Assert(v.B, Equals, 0) + c.Assert(v.C, Equals, 88) + + err = yaml.Unmarshal([]byte("b: 21\nc: 99"), &v) + c.Assert(err, IsNil) + c.Assert(v.A, Equals, 42) + c.Assert(v.B, Equals, 21) + c.Assert(v.C, Equals, 88) } func (s *S) TestUnmarshalSliceOnPreset(c *C) { @@ -1176,19 +1608,24 @@ func (s *S) TestUnmarshalSliceOnPreset(c *C) { } var unmarshalStrictTests = []struct { - data string - value interface{} - error string + known bool + unique bool + data string + value interface{} + error string }{{ + known: true, data: "a: 1\nc: 2\n", value: struct{ A, B int }{A: 1}, error: `yaml: unmarshal errors:\n line 2: field c not found in type struct { A int; B int }`, }, { - data: "a: 1\nb: 2\na: 3\n", - value: struct{ A, B int }{A: 3, B: 2}, - error: `yaml: unmarshal errors:\n line 3: field a already set in type struct { A int; B int }`, + unique: true, + data: "a: 1\nb: 2\na: 3\n", + value: struct{ A, B int }{A: 3, B: 2}, + error: `yaml: unmarshal errors:\n line 3: mapping key "a" already defined at line 1`, }, { - data: "c: 3\na: 1\nb: 2\nc: 4\n", + unique: true, + data: "c: 3\na: 1\nb: 2\nc: 4\n", value: struct { A int inlineB `yaml:",inline"` @@ -1201,9 +1638,10 @@ var unmarshalStrictTests = []struct { }, }, }, - error: `yaml: unmarshal errors:\n line 4: field c already set in type struct { A int; yaml_test.inlineB "yaml:\\",inline\\"" }`, + error: `yaml: unmarshal errors:\n line 4: mapping key "c" already defined at line 1`, }, { - data: "c: 0\na: 1\nb: 2\nc: 1\n", + unique: true, + data: "c: 0\na: 1\nb: 2\nc: 1\n", value: struct { A int inlineB `yaml:",inline"` @@ -1216,9 +1654,10 @@ var unmarshalStrictTests = []struct { }, }, }, - error: `yaml: unmarshal errors:\n line 4: field c already set in type struct { A int; yaml_test.inlineB "yaml:\\",inline\\"" }`, + error: `yaml: unmarshal errors:\n line 4: mapping key "c" already defined at line 1`, }, { - data: "c: 1\na: 1\nb: 2\nc: 3\n", + unique: true, + data: "c: 1\na: 1\nb: 2\nc: 3\n", value: struct { A int M map[string]interface{} `yaml:",inline"` @@ -1229,31 +1668,36 @@ var unmarshalStrictTests = []struct { "c": 3, }, }, - error: `yaml: unmarshal errors:\n line 4: key "c" already set in map`, + error: `yaml: unmarshal errors:\n line 4: mapping key "c" already defined at line 1`, }, { - data: "a: 1\n9: 2\nnull: 3\n9: 4", + unique: true, + data: "a: 1\n9: 2\nnull: 3\n9: 4", value: map[interface{}]interface{}{ "a": 1, nil: 3, 9: 4, }, - error: `yaml: unmarshal errors:\n line 4: key 9 already set in map`, + error: `yaml: unmarshal errors:\n line 4: mapping key "9" already defined at line 2`, }} -func (s *S) TestUnmarshalStrict(c *C) { +func (s *S) TestUnmarshalKnownFields(c *C) { for i, item := range unmarshalStrictTests { c.Logf("test %d: %q", i, item.data) // First test that normal Unmarshal unmarshals to the expected value. + if !item.unique { + t := reflect.ValueOf(item.value).Type() + value := reflect.New(t) + err := yaml.Unmarshal([]byte(item.data), value.Interface()) + c.Assert(err, Equals, nil) + c.Assert(value.Elem().Interface(), DeepEquals, item.value) + } + + // Then test that it fails on the same thing with KnownFields on. t := reflect.ValueOf(item.value).Type() value := reflect.New(t) - err := yaml.Unmarshal([]byte(item.data), value.Interface()) - c.Assert(err, Equals, nil) - c.Assert(value.Elem().Interface(), DeepEquals, item.value) - - // Then test that UnmarshalStrict fails on the same thing. - t = reflect.ValueOf(item.value).Type() - value = reflect.New(t) - err = yaml.UnmarshalStrict([]byte(item.data), value.Interface()) + dec := yaml.NewDecoder(bytes.NewBuffer([]byte(item.data))) + dec.KnownFields(item.known) + err := dec.Decode(value.Interface()) c.Assert(err, ErrorMatches, item.error) } } diff --git a/emitterc.go b/emitterc.go index a1c2cc52..0f47c9ca 100644 --- a/emitterc.go +++ b/emitterc.go @@ -1,3 +1,25 @@ +// +// Copyright (c) 2011-2019 Canonical Ltd +// Copyright (c) 2006-2010 Kirill Simonov +// +// Permission is hereby granted, free of charge, to any person obtaining a copy of +// this software and associated documentation files (the "Software"), to deal in +// the Software without restriction, including without limitation the rights to +// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +// of the Software, and to permit persons to whom the Software is furnished to do +// so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + package yaml import ( @@ -43,8 +65,13 @@ func put_break(emitter *yaml_emitter_t) bool { default: panic("unknown line break setting") } + if emitter.column == 0 { + emitter.space_above = true + } emitter.column = 0 emitter.line++ + // [Go] Do this here and below and drop from everywhere else (see commented lines). + emitter.indention = true return true } @@ -97,8 +124,13 @@ func write_break(emitter *yaml_emitter_t, s []byte, i *int) bool { if !write(emitter, s, i) { return false } + if emitter.column == 0 { + emitter.space_above = true + } emitter.column = 0 emitter.line++ + // [Go] Do this here and above and drop from everywhere else (see commented lines). + emitter.indention = true } return true } @@ -203,7 +235,14 @@ func yaml_emitter_increase_indent(emitter *yaml_emitter_t, flow, indentless bool emitter.indent = 0 } } else if !indentless { - emitter.indent += emitter.best_indent + // [Go] This was changed so that indentations are more regular. + if emitter.states[len(emitter.states)-1] == yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE { + // The first indent inside a sequence will just skip the "- " indicator. + emitter.indent += 2 + } else { + // Everything else aligns to the chosen indentation. + emitter.indent = emitter.best_indent*((emitter.indent+emitter.best_indent)/emitter.best_indent) + } } return true } @@ -228,16 +267,22 @@ func yaml_emitter_state_machine(emitter *yaml_emitter_t, event *yaml_event_t) bo return yaml_emitter_emit_document_end(emitter, event) case yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE: - return yaml_emitter_emit_flow_sequence_item(emitter, event, true) + return yaml_emitter_emit_flow_sequence_item(emitter, event, true, false) + + case yaml_EMIT_FLOW_SEQUENCE_TRAIL_ITEM_STATE: + return yaml_emitter_emit_flow_sequence_item(emitter, event, false, true) case yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE: - return yaml_emitter_emit_flow_sequence_item(emitter, event, false) + return yaml_emitter_emit_flow_sequence_item(emitter, event, false, false) case yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE: - return yaml_emitter_emit_flow_mapping_key(emitter, event, true) + return yaml_emitter_emit_flow_mapping_key(emitter, event, true, false) + + case yaml_EMIT_FLOW_MAPPING_TRAIL_KEY_STATE: + return yaml_emitter_emit_flow_mapping_key(emitter, event, false, true) case yaml_EMIT_FLOW_MAPPING_KEY_STATE: - return yaml_emitter_emit_flow_mapping_key(emitter, event, false) + return yaml_emitter_emit_flow_mapping_key(emitter, event, false, false) case yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE: return yaml_emitter_emit_flow_mapping_value(emitter, event, true) @@ -298,6 +343,8 @@ func yaml_emitter_emit_stream_start(emitter *yaml_emitter_t, event *yaml_event_t emitter.column = 0 emitter.whitespace = true emitter.indention = true + emitter.space_above = true + emitter.foot_indent = -1 if emitter.encoding != yaml_UTF8_ENCODING { if !yaml_emitter_write_bom(emitter) { @@ -392,13 +439,22 @@ func yaml_emitter_emit_document_start(emitter *yaml_emitter_t, event *yaml_event if !yaml_emitter_write_indicator(emitter, []byte("---"), true, false, false) { return false } - if emitter.canonical { + if emitter.canonical || true { if !yaml_emitter_write_indent(emitter) { return false } } } + if len(emitter.head_comment) > 0 { + if !yaml_emitter_process_head_comment(emitter) { + return false + } + if !put_break(emitter) { + return false + } + } + emitter.state = yaml_EMIT_DOCUMENT_CONTENT_STATE return true } @@ -425,7 +481,20 @@ func yaml_emitter_emit_document_start(emitter *yaml_emitter_t, event *yaml_event // Expect the root node. func yaml_emitter_emit_document_content(emitter *yaml_emitter_t, event *yaml_event_t) bool { emitter.states = append(emitter.states, yaml_EMIT_DOCUMENT_END_STATE) - return yaml_emitter_emit_node(emitter, event, true, false, false, false) + + if !yaml_emitter_process_head_comment(emitter) { + return false + } + if !yaml_emitter_emit_node(emitter, event, true, false, false, false) { + return false + } + if !yaml_emitter_process_line_comment(emitter) { + return false + } + if !yaml_emitter_process_foot_comment(emitter) { + return false + } + return true } // Expect DOCUMENT-END. @@ -433,6 +502,12 @@ func yaml_emitter_emit_document_end(emitter *yaml_emitter_t, event *yaml_event_t if event.typ != yaml_DOCUMENT_END_EVENT { return yaml_emitter_set_emitter_error(emitter, "expected DOCUMENT-END") } + // [Go] Force document foot separation. + emitter.foot_indent = 0 + if !yaml_emitter_process_foot_comment(emitter) { + return false + } + emitter.foot_indent = -1 if !yaml_emitter_write_indent(emitter) { return false } @@ -454,7 +529,7 @@ func yaml_emitter_emit_document_end(emitter *yaml_emitter_t, event *yaml_event_t } // Expect a flow item node. -func yaml_emitter_emit_flow_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { +func yaml_emitter_emit_flow_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first, trail bool) bool { if first { if !yaml_emitter_write_indicator(emitter, []byte{'['}, true, true, false) { return false @@ -466,13 +541,15 @@ func yaml_emitter_emit_flow_sequence_item(emitter *yaml_emitter_t, event *yaml_e } if event.typ == yaml_SEQUENCE_END_EVENT { - emitter.flow_level-- - emitter.indent = emitter.indents[len(emitter.indents)-1] - emitter.indents = emitter.indents[:len(emitter.indents)-1] - if emitter.canonical && !first { + if emitter.canonical && !first && !trail { if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { return false } + } + emitter.flow_level-- + emitter.indent = emitter.indents[len(emitter.indents)-1] + emitter.indents = emitter.indents[:len(emitter.indents)-1] + if emitter.column == 0 || emitter.canonical && !first { if !yaml_emitter_write_indent(emitter) { return false } @@ -480,29 +557,62 @@ func yaml_emitter_emit_flow_sequence_item(emitter *yaml_emitter_t, event *yaml_e if !yaml_emitter_write_indicator(emitter, []byte{']'}, false, false, false) { return false } + if !yaml_emitter_process_line_comment(emitter) { + return false + } + if !yaml_emitter_process_foot_comment(emitter) { + return false + } emitter.state = emitter.states[len(emitter.states)-1] emitter.states = emitter.states[:len(emitter.states)-1] return true } - if !first { + if !first && !trail { if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { return false } } + if !yaml_emitter_process_head_comment(emitter) { + return false + } + if emitter.column == 0 { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if emitter.canonical || emitter.column > emitter.best_width { if !yaml_emitter_write_indent(emitter) { return false } } - emitter.states = append(emitter.states, yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE) - return yaml_emitter_emit_node(emitter, event, false, true, false, false) + if len(emitter.line_comment)+len(emitter.foot_comment)+len(emitter.tail_comment) > 0 { + emitter.states = append(emitter.states, yaml_EMIT_FLOW_SEQUENCE_TRAIL_ITEM_STATE) + } else { + emitter.states = append(emitter.states, yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE) + } + if !yaml_emitter_emit_node(emitter, event, false, true, false, false) { + return false + } + if len(emitter.line_comment)+len(emitter.foot_comment)+len(emitter.tail_comment) > 0 { + if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { + return false + } + } + if !yaml_emitter_process_line_comment(emitter) { + return false + } + if !yaml_emitter_process_foot_comment(emitter) { + return false + } + return true } // Expect a flow key node. -func yaml_emitter_emit_flow_mapping_key(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { +func yaml_emitter_emit_flow_mapping_key(emitter *yaml_emitter_t, event *yaml_event_t, first, trail bool) bool { if first { if !yaml_emitter_write_indicator(emitter, []byte{'{'}, true, true, false) { return false @@ -514,13 +624,18 @@ func yaml_emitter_emit_flow_mapping_key(emitter *yaml_emitter_t, event *yaml_eve } if event.typ == yaml_MAPPING_END_EVENT { + if (emitter.canonical || len(emitter.head_comment)+len(emitter.foot_comment)+len(emitter.tail_comment) > 0) && !first && !trail { + if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { + return false + } + } + if !yaml_emitter_process_head_comment(emitter) { + return false + } emitter.flow_level-- emitter.indent = emitter.indents[len(emitter.indents)-1] emitter.indents = emitter.indents[:len(emitter.indents)-1] if emitter.canonical && !first { - if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { - return false - } if !yaml_emitter_write_indent(emitter) { return false } @@ -528,16 +643,33 @@ func yaml_emitter_emit_flow_mapping_key(emitter *yaml_emitter_t, event *yaml_eve if !yaml_emitter_write_indicator(emitter, []byte{'}'}, false, false, false) { return false } + if !yaml_emitter_process_line_comment(emitter) { + return false + } + if !yaml_emitter_process_foot_comment(emitter) { + return false + } emitter.state = emitter.states[len(emitter.states)-1] emitter.states = emitter.states[:len(emitter.states)-1] return true } - if !first { + if !first && !trail { if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { return false } } + + if !yaml_emitter_process_head_comment(emitter) { + return false + } + + if emitter.column == 0 { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if emitter.canonical || emitter.column > emitter.best_width { if !yaml_emitter_write_indent(emitter) { return false @@ -571,14 +703,32 @@ func yaml_emitter_emit_flow_mapping_value(emitter *yaml_emitter_t, event *yaml_e return false } } - emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_KEY_STATE) - return yaml_emitter_emit_node(emitter, event, false, false, true, false) + if len(emitter.line_comment)+len(emitter.foot_comment)+len(emitter.tail_comment) > 0 { + emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_TRAIL_KEY_STATE) + } else { + emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_KEY_STATE) + } + if !yaml_emitter_emit_node(emitter, event, false, false, true, false) { + return false + } + if len(emitter.line_comment)+len(emitter.foot_comment)+len(emitter.tail_comment) > 0 { + if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { + return false + } + } + if !yaml_emitter_process_line_comment(emitter) { + return false + } + if !yaml_emitter_process_foot_comment(emitter) { + return false + } + return true } // Expect a block item node. func yaml_emitter_emit_block_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { if first { - if !yaml_emitter_increase_indent(emitter, false, emitter.mapping_context && !emitter.indention) { + if !yaml_emitter_increase_indent(emitter, false, false) { return false } } @@ -589,6 +739,9 @@ func yaml_emitter_emit_block_sequence_item(emitter *yaml_emitter_t, event *yaml_ emitter.states = emitter.states[:len(emitter.states)-1] return true } + if !yaml_emitter_process_head_comment(emitter) { + return false + } if !yaml_emitter_write_indent(emitter) { return false } @@ -596,7 +749,16 @@ func yaml_emitter_emit_block_sequence_item(emitter *yaml_emitter_t, event *yaml_ return false } emitter.states = append(emitter.states, yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE) - return yaml_emitter_emit_node(emitter, event, false, true, false, false) + if !yaml_emitter_emit_node(emitter, event, false, true, false, false) { + return false + } + if !yaml_emitter_process_line_comment(emitter) { + return false + } + if !yaml_emitter_process_foot_comment(emitter) { + return false + } + return true } // Expect a block key node. @@ -606,6 +768,9 @@ func yaml_emitter_emit_block_mapping_key(emitter *yaml_emitter_t, event *yaml_ev return false } } + if !yaml_emitter_process_head_comment(emitter) { + return false + } if event.typ == yaml_MAPPING_END_EVENT { emitter.indent = emitter.indents[len(emitter.indents)-1] emitter.indents = emitter.indents[:len(emitter.indents)-1] @@ -616,6 +781,13 @@ func yaml_emitter_emit_block_mapping_key(emitter *yaml_emitter_t, event *yaml_ev if !yaml_emitter_write_indent(emitter) { return false } + if len(emitter.line_comment) > 0 { + // [Go] A line comment was provided for the key. That's unusual as the + // scanner associates line comments with the value. Either way, + // save the line comment and render it appropriately later. + emitter.key_line_comment = emitter.line_comment + emitter.line_comment = nil + } if yaml_emitter_check_simple_key(emitter) { emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE) return yaml_emitter_emit_node(emitter, event, false, false, true, true) @@ -641,8 +813,42 @@ func yaml_emitter_emit_block_mapping_value(emitter *yaml_emitter_t, event *yaml_ return false } } + if len(emitter.key_line_comment) > 0 { + // [Go] Line comments are generally associated with the value, but when there's + // no value on the same line as a mapping key they end up attached to the + // key itself. + if event.typ == yaml_SCALAR_EVENT { + if len(emitter.line_comment) == 0 { + // A scalar is coming and it has no line comments by itself yet, + // so just let it handle the line comment as usual. If it has a + // line comment, we can't have both so the one from the key is lost. + emitter.line_comment = emitter.key_line_comment + emitter.key_line_comment = nil + } + } else if event.sequence_style() != yaml_FLOW_SEQUENCE_STYLE && (event.typ == yaml_MAPPING_START_EVENT || event.typ == yaml_SEQUENCE_START_EVENT) { + // An indented block follows, so write the comment right now. + emitter.line_comment, emitter.key_line_comment = emitter.key_line_comment, emitter.line_comment + if !yaml_emitter_process_line_comment(emitter) { + return false + } + emitter.line_comment, emitter.key_line_comment = emitter.key_line_comment, emitter.line_comment + } + } emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_KEY_STATE) - return yaml_emitter_emit_node(emitter, event, false, false, true, false) + if !yaml_emitter_emit_node(emitter, event, false, false, true, false) { + return false + } + if !yaml_emitter_process_line_comment(emitter) { + return false + } + if !yaml_emitter_process_foot_comment(emitter) { + return false + } + return true +} + +func yaml_emitter_silent_nil_event(emitter *yaml_emitter_t, event *yaml_event_t) bool { + return event.typ == yaml_SCALAR_EVENT && event.implicit && !emitter.canonical && len(emitter.scalar_data.value) == 0 } // Expect a node. @@ -908,6 +1114,71 @@ func yaml_emitter_process_scalar(emitter *yaml_emitter_t) bool { panic("unknown scalar style") } +// Write a head comment. +func yaml_emitter_process_head_comment(emitter *yaml_emitter_t) bool { + if len(emitter.tail_comment) > 0 { + if !yaml_emitter_write_indent(emitter) { + return false + } + if !yaml_emitter_write_comment(emitter, emitter.tail_comment) { + return false + } + emitter.tail_comment = emitter.tail_comment[:0] + emitter.foot_indent = emitter.indent + if emitter.foot_indent < 0 { + emitter.foot_indent = 0 + } + } + + if len(emitter.head_comment) == 0 { + return true + } + if !yaml_emitter_write_indent(emitter) { + return false + } + if !yaml_emitter_write_comment(emitter, emitter.head_comment) { + return false + } + emitter.head_comment = emitter.head_comment[:0] + return true +} + +// Write an line comment. +func yaml_emitter_process_line_comment(emitter *yaml_emitter_t) bool { + if len(emitter.line_comment) == 0 { + return true + } + if !emitter.whitespace { + if !put(emitter, ' ') { + return false + } + } + if !yaml_emitter_write_comment(emitter, emitter.line_comment) { + return false + } + emitter.line_comment = emitter.line_comment[:0] + return true +} + +// Write a foot comment. +func yaml_emitter_process_foot_comment(emitter *yaml_emitter_t) bool { + if len(emitter.foot_comment) == 0 { + return true + } + if !yaml_emitter_write_indent(emitter) { + return false + } + if !yaml_emitter_write_comment(emitter, emitter.foot_comment) { + return false + } + emitter.foot_comment = emitter.foot_comment[:0] + emitter.foot_indent = emitter.indent + if emitter.foot_indent < 0 { + emitter.foot_indent = 0 + } + return true +} + // Check if a %YAML directive is valid. func yaml_emitter_analyze_version_directive(emitter *yaml_emitter_t, version_directive *yaml_version_directive_t) bool { if version_directive.major != 1 || version_directive.minor != 1 { @@ -987,6 +1258,7 @@ func yaml_emitter_analyze_scalar(emitter *yaml_emitter_t, value []byte) bool { flow_indicators = false line_breaks = false special_characters = false + tab_characters = false leading_space = false leading_break = false @@ -1055,7 +1327,9 @@ func yaml_emitter_analyze_scalar(emitter *yaml_emitter_t, value []byte) bool { } } - if !is_printable(value, i) || !is_ascii(value, i) && !emitter.unicode { + if value[i] == '\t' { + tab_characters = true + } else if !is_printable(value, i) || !is_ascii(value, i) && !emitter.unicode { special_characters = true } if is_space(value, i) { @@ -1110,10 +1384,12 @@ func yaml_emitter_analyze_scalar(emitter *yaml_emitter_t, value []byte) bool { emitter.scalar_data.block_plain_allowed = false emitter.scalar_data.single_quoted_allowed = false } - if space_break || special_characters { + if space_break || tab_characters || special_characters { emitter.scalar_data.flow_plain_allowed = false emitter.scalar_data.block_plain_allowed = false emitter.scalar_data.single_quoted_allowed = false + } + if space_break || special_characters { emitter.scalar_data.block_allowed = false } if line_breaks { @@ -1137,6 +1413,19 @@ func yaml_emitter_analyze_event(emitter *yaml_emitter_t, event *yaml_event_t) bo emitter.tag_data.suffix = nil emitter.scalar_data.value = nil + if len(event.head_comment) > 0 { + emitter.head_comment = event.head_comment + } + if len(event.line_comment) > 0 { + emitter.line_comment = event.line_comment + } + if len(event.foot_comment) > 0 { + emitter.foot_comment = event.foot_comment + } + if len(event.tail_comment) > 0 { + emitter.tail_comment = event.tail_comment + } + switch event.typ { case yaml_ALIAS_EVENT: if !yaml_emitter_analyze_anchor(emitter, event.anchor, true) { @@ -1208,13 +1497,20 @@ func yaml_emitter_write_indent(emitter *yaml_emitter_t) bool { return false } } + if emitter.foot_indent == indent { + if !put_break(emitter) { + return false + } + } for emitter.column < indent { if !put(emitter, ' ') { return false } } emitter.whitespace = true - emitter.indention = true + //emitter.indention = true + emitter.space_above = false + emitter.foot_indent = -1 return true } @@ -1311,7 +1607,7 @@ func yaml_emitter_write_tag_content(emitter *yaml_emitter_t, value []byte, need_ } func yaml_emitter_write_plain_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool { - if !emitter.whitespace { + if len(value) > 0 && !emitter.whitespace { if !put(emitter, ' ') { return false } @@ -1341,7 +1637,7 @@ func yaml_emitter_write_plain_scalar(emitter *yaml_emitter_t, value []byte, allo if !write_break(emitter, value, &i) { return false } - emitter.indention = true + //emitter.indention = true breaks = true } else { if breaks { @@ -1358,7 +1654,9 @@ func yaml_emitter_write_plain_scalar(emitter *yaml_emitter_t, value []byte, allo } } - emitter.whitespace = false + if len(value) > 0 { + emitter.whitespace = false + } emitter.indention = false if emitter.root_context { emitter.open_ended = true @@ -1397,7 +1695,7 @@ func yaml_emitter_write_single_quoted_scalar(emitter *yaml_emitter_t, value []by if !write_break(emitter, value, &i) { return false } - emitter.indention = true + //emitter.indention = true breaks = true } else { if breaks { @@ -1596,10 +1894,10 @@ func yaml_emitter_write_literal_scalar(emitter *yaml_emitter_t, value []byte) bo if !yaml_emitter_write_block_scalar_hints(emitter, value) { return false } - if !put_break(emitter) { + if !yaml_emitter_process_line_comment(emitter) { return false } - emitter.indention = true + //emitter.indention = true emitter.whitespace = true breaks := true for i := 0; i < len(value); { @@ -1607,7 +1905,7 @@ func yaml_emitter_write_literal_scalar(emitter *yaml_emitter_t, value []byte) bo if !write_break(emitter, value, &i) { return false } - emitter.indention = true + //emitter.indention = true breaks = true } else { if breaks { @@ -1633,11 +1931,11 @@ func yaml_emitter_write_folded_scalar(emitter *yaml_emitter_t, value []byte) boo if !yaml_emitter_write_block_scalar_hints(emitter, value) { return false } - - if !put_break(emitter) { + if !yaml_emitter_process_line_comment(emitter) { return false } - emitter.indention = true + + //emitter.indention = true emitter.whitespace = true breaks := true @@ -1658,7 +1956,7 @@ func yaml_emitter_write_folded_scalar(emitter *yaml_emitter_t, value []byte) boo if !write_break(emitter, value, &i) { return false } - emitter.indention = true + //emitter.indention = true breaks = true } else { if breaks { @@ -1683,3 +1981,40 @@ func yaml_emitter_write_folded_scalar(emitter *yaml_emitter_t, value []byte) boo } return true } + +func yaml_emitter_write_comment(emitter *yaml_emitter_t, comment []byte) bool { + breaks := false + pound := false + for i := 0; i < len(comment); { + if is_break(comment, i) { + if !write_break(emitter, comment, &i) { + return false + } + //emitter.indention = true + breaks = true + pound = false + } else { + if breaks && !yaml_emitter_write_indent(emitter) { + return false + } + if !pound { + if comment[i] != '#' && (!put(emitter, '#') || !put(emitter, ' ')) { + return false + } + pound = true + } + if !write(emitter, comment, &i) { + return false + } + emitter.indention = false + breaks = false + } + } + if !breaks && !put_break(emitter) { + return false + } + + emitter.whitespace = true + //emitter.indention = true + return true +} diff --git a/encode.go b/encode.go index a14435e8..de9e72a3 100644 --- a/encode.go +++ b/encode.go @@ -1,3 +1,18 @@ +// +// Copyright (c) 2011-2019 Canonical Ltd +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package yaml import ( @@ -14,12 +29,11 @@ import ( ) type encoder struct { - emitter yaml_emitter_t - event yaml_event_t - out []byte - flow bool - // doneInit holds whether the initial stream_start_event has been - // emitted. + emitter yaml_emitter_t + event yaml_event_t + out []byte + flow bool + indent int doneInit bool } @@ -43,6 +57,10 @@ func (e *encoder) init() { if e.doneInit { return } + if e.indent == 0 { + e.indent = 4 + } + e.emitter.best_indent = e.indent yaml_stream_start_event_initialize(&e.event, yaml_UTF8_ENCODING) e.emit() e.doneInit = true @@ -75,27 +93,51 @@ func (e *encoder) must(ok bool) { func (e *encoder) marshalDoc(tag string, in reflect.Value) { e.init() - yaml_document_start_event_initialize(&e.event, nil, nil, true) - e.emit() - e.marshal(tag, in) - yaml_document_end_event_initialize(&e.event, true) - e.emit() + var node *Node + if in.IsValid() { + node, _ = in.Interface().(*Node) + } + if node != nil && node.Kind == DocumentNode { + e.nodev(in) + } else { + yaml_document_start_event_initialize(&e.event, nil, nil, true) + e.emit() + e.marshal(tag, in) + yaml_document_end_event_initialize(&e.event, true) + e.emit() + } } func (e *encoder) marshal(tag string, in reflect.Value) { + tag = shortTag(tag) if !in.IsValid() || in.Kind() == reflect.Ptr && in.IsNil() { e.nilv() return } iface := in.Interface() - switch m := iface.(type) { - case time.Time, *time.Time: - // Although time.Time implements TextMarshaler, - // we don't want to treat it as a string for YAML - // purposes because YAML has special support for - // timestamps. + switch value := iface.(type) { + case *Node: + e.nodev(in) + return + case Node: + if !in.CanAddr() { + var n = reflect.New(in.Type()).Elem() + n.Set(in) + in = n + } + e.nodev(in.Addr()) + return + case time.Time: + e.timev(tag, in) + return + case *time.Time: + e.timev(tag, in.Elem()) + return + case time.Duration: + e.stringv(tag, reflect.ValueOf(value.String())) + return case Marshaler: - v, err := m.MarshalYAML() + v, err := value.MarshalYAML() if err != nil { fail(err) } @@ -103,9 +145,10 @@ func (e *encoder) marshal(tag string, in reflect.Value) { e.nilv() return } - in = reflect.ValueOf(v) + e.marshal(tag, reflect.ValueOf(v)) + return case encoding.TextMarshaler: - text, err := m.MarshalText() + text, err := value.MarshalText() if err != nil { fail(err) } @@ -120,31 +163,15 @@ func (e *encoder) marshal(tag string, in reflect.Value) { case reflect.Map: e.mapv(tag, in) case reflect.Ptr: - if in.Type() == ptrTimeType { - e.timev(tag, in.Elem()) - } else { - e.marshal(tag, in.Elem()) - } + e.marshal(tag, in.Elem()) case reflect.Struct: - if in.Type() == timeType { - e.timev(tag, in) - } else { - e.structv(tag, in) - } + e.structv(tag, in) case reflect.Slice, reflect.Array: - if in.Type().Elem() == mapItemType { - e.itemsv(tag, in) - } else { - e.slicev(tag, in) - } + e.slicev(tag, in) case reflect.String: e.stringv(tag, in) case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - if in.Type() == durationType { - e.stringv(tag, reflect.ValueOf(iface.(time.Duration).String())) - } else { - e.intv(tag, in) - } + e.intv(tag, in) case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: e.uintv(tag, in) case reflect.Float32, reflect.Float64: @@ -167,14 +194,21 @@ func (e *encoder) mapv(tag string, in reflect.Value) { }) } -func (e *encoder) itemsv(tag string, in reflect.Value) { - e.mappingv(tag, func() { - slice := in.Convert(reflect.TypeOf([]MapItem{})).Interface().([]MapItem) - for _, item := range slice { - e.marshal("", reflect.ValueOf(item.Key)) - e.marshal("", reflect.ValueOf(item.Value)) +func (e *encoder) fieldByIndex(v reflect.Value, index []int) (field reflect.Value) { + for _, num := range index { + for { + if v.Kind() == reflect.Ptr { + if v.IsNil() { + return reflect.Value{} + } + v = v.Elem() + continue + } + break } - }) + v = v.Field(num) + } + return v } func (e *encoder) structv(tag string, in reflect.Value) { @@ -188,7 +222,10 @@ func (e *encoder) structv(tag string, in reflect.Value) { if info.Inline == nil { value = in.Field(info.Num) } else { - value = in.FieldByIndex(info.Inline) + value = e.fieldByIndex(in, info.Inline) + if !value.IsValid() { + continue + } } if info.OmitEmpty && isZero(value) { continue @@ -205,7 +242,7 @@ func (e *encoder) structv(tag string, in reflect.Value) { sort.Sort(keys) for _, k := range keys { if _, found := sinfo.FieldsMap[k.String()]; found { - panic(fmt.Sprintf("Can't have key %q in inlined map; conflicts with struct field", k.String())) + panic(fmt.Sprintf("cannot have key %q in inlined map: conflicts with struct field", k.String())) } e.marshal("", k) e.flow = false @@ -269,13 +306,28 @@ func isBase60Float(s string) (result bool) { // is bogus. In practice parsers do not enforce the "\.[0-9_]*" suffix. var base60float = regexp.MustCompile(`^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+(?:\.[0-9_]*)?$`) +// isOldBool returns whether s is bool notation as defined in YAML 1.1. +// +// We continue to force strings that YAML 1.1 would interpret as booleans to be +// rendered as quotes strings so that the marshalled output valid for YAML 1.1 +// parsing. +func isOldBool(s string) (result bool) { + switch s { + case "y", "Y", "yes", "Yes", "YES", "on", "On", "ON", + "n", "N", "no", "No", "NO", "off", "Off", "OFF": + return true + default: + return false + } +} + func (e *encoder) stringv(tag string, in reflect.Value) { var style yaml_scalar_style_t s := in.String() canUsePlain := true switch { case !utf8.ValidString(s): - if tag == yaml_BINARY_TAG { + if tag == binaryTag { failf("explicitly tagged !!binary data must be base64-encoded") } if tag != "" { @@ -283,27 +335,31 @@ func (e *encoder) stringv(tag string, in reflect.Value) { } // It can't be encoded directly as YAML so use a binary tag // and encode it as base64. - tag = yaml_BINARY_TAG + tag = binaryTag s = encodeBase64(s) case tag == "": // Check to see if it would resolve to a specific // tag when encoded unquoted. If it doesn't, // there's no need to quote it. rtag, _ := resolve("", s) - canUsePlain = rtag == yaml_STR_TAG && !isBase60Float(s) + canUsePlain = rtag == strTag && !(isBase60Float(s) || isOldBool(s)) } // Note: it's possible for user code to emit invalid YAML // if they explicitly specify a tag and a string containing // text that's incompatible with that tag. switch { case strings.Contains(s, "\n"): - style = yaml_LITERAL_SCALAR_STYLE + if e.flow { + style = yaml_DOUBLE_QUOTED_SCALAR_STYLE + } else { + style = yaml_LITERAL_SCALAR_STYLE + } case canUsePlain: style = yaml_PLAIN_SCALAR_STYLE default: style = yaml_DOUBLE_QUOTED_SCALAR_STYLE } - e.emitScalar(s, "", tag, style) + e.emitScalar(s, "", tag, style, nil, nil, nil, nil) } func (e *encoder) boolv(tag string, in reflect.Value) { @@ -313,23 +369,23 @@ func (e *encoder) boolv(tag string, in reflect.Value) { } else { s = "false" } - e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) + e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE, nil, nil, nil, nil) } func (e *encoder) intv(tag string, in reflect.Value) { s := strconv.FormatInt(in.Int(), 10) - e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) + e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE, nil, nil, nil, nil) } func (e *encoder) uintv(tag string, in reflect.Value) { s := strconv.FormatUint(in.Uint(), 10) - e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) + e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE, nil, nil, nil, nil) } func (e *encoder) timev(tag string, in reflect.Value) { t := in.Interface().(time.Time) s := t.Format(time.RFC3339Nano) - e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) + e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE, nil, nil, nil, nil) } func (e *encoder) floatv(tag string, in reflect.Value) { @@ -348,15 +404,174 @@ func (e *encoder) floatv(tag string, in reflect.Value) { case "NaN": s = ".nan" } - e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) + e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE, nil, nil, nil, nil) } func (e *encoder) nilv() { - e.emitScalar("null", "", "", yaml_PLAIN_SCALAR_STYLE) + e.emitScalar("null", "", "", yaml_PLAIN_SCALAR_STYLE, nil, nil, nil, nil) } -func (e *encoder) emitScalar(value, anchor, tag string, style yaml_scalar_style_t) { +func (e *encoder) emitScalar(value, anchor, tag string, style yaml_scalar_style_t, head, line, foot, tail []byte) { + // TODO Kill this function. Replace all initialize calls by their underlining Go literals. implicit := tag == "" + if !implicit { + tag = longTag(tag) + } e.must(yaml_scalar_event_initialize(&e.event, []byte(anchor), []byte(tag), []byte(value), implicit, implicit, style)) + e.event.head_comment = head + e.event.line_comment = line + e.event.foot_comment = foot + e.event.tail_comment = tail e.emit() } + +func (e *encoder) nodev(in reflect.Value) { + e.node(in.Interface().(*Node), "") +} + +func (e *encoder) node(node *Node, tail string) { + // Zero nodes behave as nil. + if node.Kind == 0 && node.IsZero() { + e.nilv() + return + } + + // If the tag was not explicitly requested, and dropping it won't change the + // implicit tag of the value, don't include it in the presentation. + var tag = node.Tag + var stag = shortTag(tag) + var forceQuoting bool + if tag != "" && node.Style&TaggedStyle == 0 { + if node.Kind == ScalarNode { + if stag == strTag && node.Style&(SingleQuotedStyle|DoubleQuotedStyle|LiteralStyle|FoldedStyle) != 0 { + tag = "" + } else { + rtag, _ := resolve("", node.Value) + if rtag == stag { + tag = "" + } else if stag == strTag { + tag = "" + forceQuoting = true + } + } + } else { + var rtag string + switch node.Kind { + case MappingNode: + rtag = mapTag + case SequenceNode: + rtag = seqTag + } + if rtag == stag { + tag = "" + } + } + } + + switch node.Kind { + case DocumentNode: + yaml_document_start_event_initialize(&e.event, nil, nil, true) + e.event.head_comment = []byte(node.HeadComment) + e.emit() + for _, node := range node.Content { + e.node(node, "") + } + yaml_document_end_event_initialize(&e.event, true) + e.event.foot_comment = []byte(node.FootComment) + e.emit() + + case SequenceNode: + style := yaml_BLOCK_SEQUENCE_STYLE + if node.Style&FlowStyle != 0 { + style = yaml_FLOW_SEQUENCE_STYLE + } + e.must(yaml_sequence_start_event_initialize(&e.event, []byte(node.Anchor), []byte(longTag(tag)), tag == "", style)) + e.event.head_comment = []byte(node.HeadComment) + e.emit() + for _, node := range node.Content { + e.node(node, "") + } + e.must(yaml_sequence_end_event_initialize(&e.event)) + e.event.line_comment = []byte(node.LineComment) + e.event.foot_comment = []byte(node.FootComment) + e.emit() + + case MappingNode: + style := yaml_BLOCK_MAPPING_STYLE + if node.Style&FlowStyle != 0 { + style = yaml_FLOW_MAPPING_STYLE + } + yaml_mapping_start_event_initialize(&e.event, []byte(node.Anchor), []byte(longTag(tag)), tag == "", style) + e.event.tail_comment = []byte(tail) + e.event.head_comment = []byte(node.HeadComment) + e.emit() + + // The tail logic below moves the foot comment of prior keys to the following key, + // since the value for each key may be a nested structure and the foot needs to be + // processed only the entirety of the value is streamed. The last tail is processed + // with the mapping end event. + var tail string + for i := 0; i+1 < len(node.Content); i += 2 { + k := node.Content[i] + foot := k.FootComment + if foot != "" { + kopy := *k + kopy.FootComment = "" + k = &kopy + } + e.node(k, tail) + tail = foot + + v := node.Content[i+1] + e.node(v, "") + } + + yaml_mapping_end_event_initialize(&e.event) + e.event.tail_comment = []byte(tail) + e.event.line_comment = []byte(node.LineComment) + e.event.foot_comment = []byte(node.FootComment) + e.emit() + + case AliasNode: + yaml_alias_event_initialize(&e.event, []byte(node.Value)) + e.event.head_comment = []byte(node.HeadComment) + e.event.line_comment = []byte(node.LineComment) + e.event.foot_comment = []byte(node.FootComment) + e.emit() + + case ScalarNode: + value := node.Value + if !utf8.ValidString(value) { + if stag == binaryTag { + failf("explicitly tagged !!binary data must be base64-encoded") + } + if stag != "" { + failf("cannot marshal invalid UTF-8 data as %s", stag) + } + // It can't be encoded directly as YAML so use a binary tag + // and encode it as base64. + tag = binaryTag + value = encodeBase64(value) + } + + style := yaml_PLAIN_SCALAR_STYLE + switch { + case node.Style&DoubleQuotedStyle != 0: + style = yaml_DOUBLE_QUOTED_SCALAR_STYLE + case node.Style&SingleQuotedStyle != 0: + style = yaml_SINGLE_QUOTED_SCALAR_STYLE + case node.Style&LiteralStyle != 0: + style = yaml_LITERAL_SCALAR_STYLE + case node.Style&FoldedStyle != 0: + style = yaml_FOLDED_SCALAR_STYLE + case strings.Contains(value, "\n"): + style = yaml_LITERAL_SCALAR_STYLE + case forceQuoting: + style = yaml_DOUBLE_QUOTED_SCALAR_STYLE + } + + e.emitScalar(value, node.Anchor, tag, style, []byte(node.HeadComment), []byte(node.LineComment), []byte(node.FootComment), []byte(tail)) + default: + failf("cannot encode node with unknown kind %d", node.Kind) + } +} diff --git a/encode_test.go b/encode_test.go index f0911a76..4a8bf2e2 100644 --- a/encode_test.go +++ b/encode_test.go @@ -1,3 +1,18 @@ +// +// Copyright (c) 2011-2019 Canonical Ltd +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package yaml_test import ( @@ -12,7 +27,7 @@ import ( "os" . "gopkg.in/check.v1" - "gopkg.in/yaml.v2" + "gopkg.in/yaml.v3" ) var marshalIntTest = 123 @@ -98,16 +113,16 @@ var marshalTests = []struct { "v: \"\"\n", }, { map[string][]string{"v": []string{"A", "B"}}, - "v:\n- A\n- B\n", + "v:\n - A\n - B\n", }, { map[string][]string{"v": []string{"A", "B\nC"}}, - "v:\n- A\n- |-\n B\n C\n", + "v:\n - A\n - |-\n B\n C\n", }, { map[string][]interface{}{"v": []interface{}{"A", 1, map[string][]int{"B": []int{2, 3}}}}, - "v:\n- A\n- 1\n- B:\n - 2\n - 3\n", + "v:\n - A\n - 1\n - B:\n - 2\n - 3\n", }, { map[string]interface{}{"a": map[interface{}]interface{}{"b": "c"}}, - "a:\n b: c\n", + "a:\n b: c\n", }, { map[string]interface{}{"a": "-"}, "a: '-'\n", @@ -129,14 +144,14 @@ var marshalTests = []struct { B string } }{struct{ B string }{"c"}}, - "a:\n b: c\n", + "a:\n b: c\n", }, { &struct { A *struct { B string } }{&struct{ B string }{"c"}}, - "a:\n b: c\n", + "a:\n b: c\n", }, { &struct { A *struct { @@ -149,10 +164,10 @@ var marshalTests = []struct { "a: 1\n", }, { &struct{ A []int }{[]int{1, 2}}, - "a:\n- 1\n- 2\n", + "a:\n - 1\n - 2\n", }, { &struct{ A [2]int }{[2]int{1, 2}}, - "a:\n- 1\n- 2\n", + "a:\n - 1\n - 2\n", }, { &struct { B int "a" @@ -161,6 +176,12 @@ var marshalTests = []struct { }, { &struct{ A bool }{true}, "a: true\n", + }, { + &struct{ A string }{"true"}, + "a: \"true\"\n", + }, { + &struct{ A string }{"off"}, + "a: \"off\"\n", }, // Conditional flag @@ -246,6 +267,11 @@ var marshalTests = []struct { } "a,flow" }{struct{ B, D string }{"c", "e"}}, "a: {b: c, d: e}\n", + }, { + &struct { + A string "a,flow" + }{"b\nc"}, + "a: \"b\\nc\"\n", }, // Unexported field @@ -274,6 +300,26 @@ var marshalTests = []struct { }{1, inlineB{2, inlineC{3}}}, "a: 1\nb: 2\nc: 3\n", }, + // Struct inlining as a pointer + { + &struct { + A int + C *inlineB `yaml:",inline"` + }{1, &inlineB{2, inlineC{3}}}, + "a: 1\nb: 2\nc: 3\n", + }, { + &struct { + A int + C *inlineB `yaml:",inline"` + }{1, nil}, + "a: 1\n", + }, { + &struct { + A int + D *inlineD `yaml:",inline"` + }{1, &inlineD{&inlineC{3}, 4}}, + "a: 1\nc: 3\nd: 4\n", + }, // Map inlining { @@ -312,13 +358,7 @@ var marshalTests = []struct { "a: !!binary gIGC\n", }, { map[string]string{"a": strings.Repeat("\x90", 54)}, - "a: !!binary |\n " + strings.Repeat("kJCQ", 17) + "kJ\n CQ\n", - }, - - // Ordered maps. - { - &yaml.MapSlice{{"b", 2}, {"a", 1}, {"d", 4}, {"c", 3}, {"sub", yaml.MapSlice{{"e", 5}}}}, - "b: 2\na: 1\nd: 4\nc: 3\nsub:\n e: 5\n", + "a: !!binary |\n " + strings.Repeat("kJCQ", 17) + "kJ\n CQ\n", }, // Encode unicode as utf-8 rather than in escaped form. @@ -367,6 +407,93 @@ var marshalTests = []struct { map[string]string{"a": "你好 #comment"}, "a: '你好 #comment'\n", }, + + // Ensure MarshalYAML also gets called on the result of MarshalYAML itself. + { + &marshalerType{marshalerType{true}}, + "true\n", + }, { + &marshalerType{&marshalerType{true}}, + "true\n", + }, + + // Check indentation of maps inside sequences inside maps. + { + map[string]interface{}{"a": map[string]interface{}{"b": []map[string]int{{"c": 1, "d": 2}}}}, + "a:\n b:\n - c: 1\n d: 2\n", + }, + + // Strings with tabs were disallowed as literals (issue #471). + { + map[string]string{"a": "\tB\n\tC\n"}, + "a: |\n \tB\n \tC\n", + }, + + // Ensure that strings do not wrap + { + map[string]string{"a": "abcdefghijklmnopqrstuvwxyz ABCDEFGHIJKLMNOPQRSTUVWXYZ 1234567890 abcdefghijklmnopqrstuvwxyz ABCDEFGHIJKLMNOPQRSTUVWXYZ 1234567890 "}, + "a: 'abcdefghijklmnopqrstuvwxyz ABCDEFGHIJKLMNOPQRSTUVWXYZ 1234567890 abcdefghijklmnopqrstuvwxyz ABCDEFGHIJKLMNOPQRSTUVWXYZ 1234567890 '\n", + }, + + // yaml.Node + { + &struct { + Value yaml.Node + }{ + yaml.Node{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "foo", + Style: yaml.SingleQuotedStyle, + }, + }, + "value: 'foo'\n", + }, { + yaml.Node{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "foo", + Style: yaml.SingleQuotedStyle, + }, + "'foo'\n", + }, + + // Enforced tagging with shorthand notation (issue #616). + { + &struct { + Value yaml.Node + }{ + yaml.Node{ + Kind: yaml.ScalarNode, + Style: yaml.TaggedStyle, + Value: "foo", + Tag: "!!str", + }, + }, + "value: !!str foo\n", + }, { + &struct { + Value yaml.Node + }{ + yaml.Node{ + Kind: yaml.MappingNode, + Style: yaml.TaggedStyle, + Tag: "!!map", + }, + }, + "value: !!map {}\n", + }, { + &struct { + Value yaml.Node + }{ + yaml.Node{ + Kind: yaml.SequenceNode, + Style: yaml.TaggedStyle, + Tag: "!!seq", + }, + }, + "value: !!seq []\n", + }, } func (s *S) TestMarshal(c *C) { @@ -426,13 +553,13 @@ var marshalErrorTests = []struct { B int inlineB ",inline" }{1, inlineB{2, inlineC{3}}}, - panic: `Duplicated key 'b' in struct struct \{ B int; .*`, + panic: `duplicated key 'b' in struct struct \{ B int; .*`, }, { value: &struct { A int B map[string]int ",inline" }{1, map[string]int{"a": 2}}, - panic: `Can't have key "a" in inlined map; conflicts with struct field`, + panic: `cannot have key "a" in inlined map: conflicts with struct field`, }} func (s *S) TestMarshalErrors(c *C) { @@ -466,8 +593,8 @@ var marshalerTests = []struct { data string value interface{} }{ - {"_:\n hi: there\n", map[interface{}]interface{}{"hi": "there"}}, - {"_:\n- 1\n- A\n", []interface{}{1, "A"}}, + {"_:\n hi: there\n", map[interface{}]interface{}{"hi": "there"}}, + {"_:\n - 1\n - A\n", []interface{}{1, "A"}}, {"_: 10\n", 10}, {"_: null\n", nil}, {"_: BAR!\n", "BAR!"}, @@ -518,6 +645,17 @@ func (s *S) TestMarshalerError(c *C) { c.Assert(err, Equals, failingErr) } +func (s *S) TestSetIndent(c *C) { + var buf bytes.Buffer + enc := yaml.NewEncoder(&buf) + enc.SetIndent(8) + err := enc.Encode(map[string]interface{}{"a": map[string]interface{}{"b": map[string]string{"c": "d"}}}) + c.Assert(err, Equals, nil) + err = enc.Close() + c.Assert(err, Equals, nil) + c.Assert(buf.String(), Equals, "a:\n b:\n c: d\n") +} + func (s *S) TestSortedOutput(c *C) { order := []interface{}{ false, @@ -563,6 +701,9 @@ func (s *S) TestSortedOutput(c *C) { "d7abc", "d12", "d12a", + "e2b", + "e4b", + "e21a", } m := make(map[interface{}]int) for _, k := range order { diff --git a/example_embedded_test.go b/example_embedded_test.go index 171c0931..9d17398a 100644 --- a/example_embedded_test.go +++ b/example_embedded_test.go @@ -1,10 +1,25 @@ +// +// Copyright (c) 2011-2019 Canonical Ltd +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package yaml_test import ( "fmt" "log" - "gopkg.in/yaml.v2" + "gopkg.in/yaml.v3" ) // An example showing how to unmarshal embedded diff --git a/go.mod b/go.mod index 1934e876..f407ea32 100644 --- a/go.mod +++ b/go.mod @@ -1,4 +1,4 @@ -module "gopkg.in/yaml.v2" +module "gopkg.in/yaml.v3" require ( "gopkg.in/check.v1" v0.0.0-20161208181325-20d25e280405 diff --git a/limit_test.go b/limit_test.go new file mode 100644 index 00000000..07a3cbd4 --- /dev/null +++ b/limit_test.go @@ -0,0 +1,128 @@ +package yaml_test + +import ( + "strings" + "testing" + + . "gopkg.in/check.v1" + "gopkg.in/yaml.v3" +) + +var limitTests = []struct { + name string + data []byte + error string +}{ + { + name: "1000kb of maps with 100 aliases", + data: []byte(`{a: &a [{a}` + strings.Repeat(`,{a}`, 1000*1024/4-100) + `], b: &b [*a` + strings.Repeat(`,*a`, 99) + `]}`), + error: "yaml: document contains excessive aliasing", + }, { + name: "1000kb of deeply nested slices", + data: []byte(strings.Repeat(`[`, 1000*1024)), + error: "yaml: exceeded max depth of 10000", + }, { + name: "1000kb of deeply nested maps", + data: []byte("x: " + strings.Repeat(`{`, 1000*1024)), + error: "yaml: exceeded max depth of 10000", + }, { + name: "1000kb of deeply nested indents", + data: []byte(strings.Repeat(`- `, 1000*1024)), + error: "yaml: exceeded max depth of 10000", + }, { + name: "1000kb of 1000-indent lines", + data: []byte(strings.Repeat(strings.Repeat(`- `, 1000)+"\n", 1024/2)), + }, + {name: "1kb of maps", data: []byte(`a: &a [{a}` + strings.Repeat(`,{a}`, 1*1024/4-1) + `]`)}, + {name: "10kb of maps", data: []byte(`a: &a [{a}` + strings.Repeat(`,{a}`, 10*1024/4-1) + `]`)}, + {name: "100kb of maps", data: []byte(`a: &a [{a}` + strings.Repeat(`,{a}`, 100*1024/4-1) + `]`)}, + {name: "1000kb of maps", data: []byte(`a: &a [{a}` + strings.Repeat(`,{a}`, 1000*1024/4-1) + `]`)}, + {name: "1000kb slice nested at max-depth", data: []byte(strings.Repeat(`[`, 10000) + `1` + strings.Repeat(`,1`, 1000*1024/2-20000-1) + strings.Repeat(`]`, 10000))}, + {name: "1000kb slice nested in maps at max-depth", data: []byte("{a,b:\n" + strings.Repeat(" {a,b:", 10000-2) + ` [1` + strings.Repeat(",1", 1000*1024/2-6*10000-1) + `]` + strings.Repeat(`}`, 10000-1))}, + {name: "1000kb of 10000-nested lines", data: []byte(strings.Repeat(`- `+strings.Repeat(`[`, 10000)+strings.Repeat(`]`, 10000)+"\n", 1000*1024/20000))}, +} + +func (s *S) TestLimits(c *C) { + if testing.Short() { + return + } + for _, tc := range limitTests { + var v interface{} + err := yaml.Unmarshal(tc.data, &v) + if len(tc.error) > 0 { + c.Assert(err, ErrorMatches, tc.error, Commentf("testcase: %s", tc.name)) + } else { + c.Assert(err, IsNil, Commentf("testcase: %s", tc.name)) + } + } +} + +func Benchmark1000KB100Aliases(b *testing.B) { + benchmark(b, "1000kb of maps with 100 aliases") +} +func Benchmark1000KBDeeplyNestedSlices(b *testing.B) { + benchmark(b, "1000kb of deeply nested slices") +} +func Benchmark1000KBDeeplyNestedMaps(b *testing.B) { + benchmark(b, "1000kb of deeply nested maps") +} +func Benchmark1000KBDeeplyNestedIndents(b *testing.B) { + benchmark(b, "1000kb of deeply nested indents") +} +func Benchmark1000KB1000IndentLines(b *testing.B) { + benchmark(b, "1000kb of 1000-indent lines") +} +func Benchmark1KBMaps(b *testing.B) { + benchmark(b, "1kb of maps") +} +func Benchmark10KBMaps(b *testing.B) { + benchmark(b, "10kb of maps") +} +func Benchmark100KBMaps(b *testing.B) { + benchmark(b, "100kb of maps") +} +func Benchmark1000KBMaps(b *testing.B) { + benchmark(b, "1000kb of maps") +} + +func BenchmarkDeepSlice(b *testing.B) { + benchmark(b, "1000kb slice nested at max-depth") +} + +func BenchmarkDeepFlow(b *testing.B) { + benchmark(b, "1000kb slice nested in maps at max-depth") +} + +func Benchmark1000KBMaxDepthNested(b *testing.B) { + benchmark(b, "1000kb of 10000-nested lines") +} + +func benchmark(b *testing.B, name string) { + for _, t := range limitTests { + if t.name != name { + continue + } + + b.ResetTimer() + + for i := 0; i < b.N; i++ { + var v interface{} + err := yaml.Unmarshal(t.data, &v) + if len(t.error) > 0 { + if err == nil { + b.Errorf("expected error, got none") + } else if err.Error() != t.error { + b.Errorf("expected error '%s', got '%s'", t.error, err.Error()) + } + } else { + if err != nil { + b.Errorf("unexpected error: %v", err) + } + } + } + + return + } + + b.Errorf("testcase %q not found", name) +} diff --git a/node_test.go b/node_test.go new file mode 100644 index 00000000..b7d0c9a3 --- /dev/null +++ b/node_test.go @@ -0,0 +1,2886 @@ +// +// Copyright (c) 2011-2019 Canonical Ltd +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package yaml_test + +import ( + "bytes" + "fmt" + "os" + + . "gopkg.in/check.v1" + "gopkg.in/yaml.v3" + "io" + "strings" +) + +var nodeTests = []struct { + yaml string + node yaml.Node +}{ + { + "null\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "null", + Tag: "!!null", + Line: 1, + Column: 1, + }}, + }, + }, { + "[encode]null\n", + yaml.Node{}, + }, { + "foo\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "foo", + Tag: "!!str", + Line: 1, + Column: 1, + }}, + }, + }, { + "\"foo\"\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Style: yaml.DoubleQuotedStyle, + Value: "foo", + Tag: "!!str", + Line: 1, + Column: 1, + }}, + }, + }, { + "'foo'\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Style: yaml.SingleQuotedStyle, + Value: "foo", + Tag: "!!str", + Line: 1, + Column: 1, + }}, + }, + }, { + "!!str 123\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Style: yaml.TaggedStyle, + Value: "123", + Tag: "!!str", + Line: 1, + Column: 1, + }}, + }, + }, { + // Although the node isn't TaggedStyle, dropping the tag would change the value. + "[encode]!!binary gIGC\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "gIGC", + Tag: "!!binary", + Line: 1, + Column: 1, + }}, + }, + }, { + // Item doesn't have a tag, but needs to be binary encoded due to its content. + "[encode]!!binary gIGC\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "\x80\x81\x82", + Line: 1, + Column: 1, + }}, + }, + }, { + // Same, but with strings we can just quote them. + "[encode]\"123\"\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "123", + Tag: "!!str", + Line: 1, + Column: 1, + }}, + }, + }, { + "!tag:something 123\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Style: yaml.TaggedStyle, + Value: "123", + Tag: "!tag:something", + Line: 1, + Column: 1, + }}, + }, + }, { + "[encode]!tag:something 123\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "123", + Tag: "!tag:something", + Line: 1, + Column: 1, + }}, + }, + }, { + "!tag:something {}\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Style: yaml.TaggedStyle | yaml.FlowStyle, + Tag: "!tag:something", + Line: 1, + Column: 1, + }}, + }, + }, { + "[encode]!tag:something {}\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Style: yaml.FlowStyle, + Tag: "!tag:something", + Line: 1, + Column: 1, + }}, + }, + }, { + "!tag:something []\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.SequenceNode, + Style: yaml.TaggedStyle | yaml.FlowStyle, + Tag: "!tag:something", + Line: 1, + Column: 1, + }}, + }, + }, { + "[encode]!tag:something []\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.SequenceNode, + Style: yaml.FlowStyle, + Tag: "!tag:something", + Line: 1, + Column: 1, + }}, + }, + }, { + "''\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Style: yaml.SingleQuotedStyle, + Value: "", + Tag: "!!str", + Line: 1, + Column: 1, + }}, + }, + }, { + "|\n foo\n bar\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Style: yaml.LiteralStyle, + Value: "foo\nbar\n", + Tag: "!!str", + Line: 1, + Column: 1, + }}, + }, + }, { + "true\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "true", + Tag: "!!bool", + Line: 1, + Column: 1, + }}, + }, + }, { + "-10\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "-10", + Tag: "!!int", + Line: 1, + Column: 1, + }}, + }, + }, { + "4294967296\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "4294967296", + Tag: "!!int", + Line: 1, + Column: 1, + }}, + }, + }, { + "0.1000\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "0.1000", + Tag: "!!float", + Line: 1, + Column: 1, + }}, + }, + }, { + "-.inf\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "-.inf", + Tag: "!!float", + Line: 1, + Column: 1, + }}, + }, + }, { + ".nan\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: ".nan", + Tag: "!!float", + Line: 1, + Column: 1, + }}, + }, + }, { + "{}\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Style: yaml.FlowStyle, + Value: "", + Tag: "!!map", + Line: 1, + Column: 1, + }}, + }, + }, { + "a: b c\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Value: "", + Tag: "!!map", + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "a", + Tag: "!!str", + Line: 1, + Column: 1, + }, { + Kind: yaml.ScalarNode, + Value: "b c", + Tag: "!!str", + Line: 1, + Column: 4, + }}, + }}, + }, + }, { + "a:\n b: c\n d: e\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "a", + Tag: "!!str", + Line: 1, + Column: 1, + }, { + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 2, + Column: 3, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "b", + Tag: "!!str", + Line: 2, + Column: 3, + }, { + Kind: yaml.ScalarNode, + Value: "c", + Tag: "!!str", + Line: 2, + Column: 6, + }, { + Kind: yaml.ScalarNode, + Value: "d", + Tag: "!!str", + Line: 3, + Column: 3, + }, { + Kind: yaml.ScalarNode, + Value: "e", + Tag: "!!str", + Line: 3, + Column: 6, + }}, + }}, + }}, + }, + }, { + "a:\n - b: c\n d: e\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "a", + Tag: "!!str", + Line: 1, + Column: 1, + }, { + Kind: yaml.SequenceNode, + Tag: "!!seq", + Line: 2, + Column: 3, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 2, + Column: 5, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "b", + Tag: "!!str", + Line: 2, + Column: 5, + }, { + Kind: yaml.ScalarNode, + Value: "c", + Tag: "!!str", + Line: 2, + Column: 8, + }, { + Kind: yaml.ScalarNode, + Value: "d", + Tag: "!!str", + Line: 3, + Column: 5, + }, { + Kind: yaml.ScalarNode, + Value: "e", + Tag: "!!str", + Line: 3, + Column: 8, + }}, + }}, + }}, + }}, + }, + }, { + "a: # AI\n - b\nc:\n - d\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "a", + LineComment: "# AI", + Line: 1, + Column: 1, + }, { + Kind: yaml.SequenceNode, + Tag: "!!seq", + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "b", + Line: 2, + Column: 5, + }}, + Line: 2, + Column: 3, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "c", + Line: 3, + Column: 1, + }, { + Kind: yaml.SequenceNode, + Tag: "!!seq", + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "d", + Line: 4, + Column: 5, + }}, + Line: 4, + Column: 3, + }}, + }}, + }, + }, { + "[decode]a:\n # HM\n - # HB1\n # HB2\n b: # IB\n c # IC\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Style: 0x0, + Tag: "!!str", + Value: "a", + Line: 1, + Column: 1, + }, { + Kind: yaml.SequenceNode, + Tag: "!!seq", + Line: 3, + Column: 3, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + HeadComment: "# HM", + Line: 5, + Column: 5, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "b", + HeadComment: "# HB1\n# HB2", + LineComment: "# IB", + Line: 5, + Column: 5, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "c", + LineComment: "# IC", + Line: 6, + Column: 7, + }}, + }}, + }}, + }}, + }, + }, { + // When encoding the value above, it loses b's inline comment. + "[encode]a:\n # HM\n - # HB1\n # HB2\n b: c # IC\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Style: 0x0, + Tag: "!!str", + Value: "a", + Line: 1, + Column: 1, + }, { + Kind: yaml.SequenceNode, + Tag: "!!seq", + Line: 3, + Column: 3, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + HeadComment: "# HM", + Line: 5, + Column: 5, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "b", + HeadComment: "# HB1\n# HB2", + LineComment: "# IB", + Line: 5, + Column: 5, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "c", + LineComment: "# IC", + Line: 6, + Column: 7, + }}, + }}, + }}, + }}, + }, + }, { + // Multiple cases of comment inlining next to mapping keys. + "a: | # IA\n str\nb: >- # IB\n str\nc: # IC\n - str\nd: # ID\n str:\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "a", + Line: 1, + Column: 1, + }, { + Kind: yaml.ScalarNode, + Style: yaml.LiteralStyle, + Tag: "!!str", + Value: "str\n", + LineComment: "# IA", + Line: 1, + Column: 4, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "b", + Line: 3, + Column: 1, + }, { + Kind: yaml.ScalarNode, + Style: yaml.FoldedStyle, + Tag: "!!str", + Value: "str", + LineComment: "# IB", + Line: 3, + Column: 4, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "c", + LineComment: "# IC", + Line: 5, + Column: 1, + }, { + Kind: yaml.SequenceNode, + Tag: "!!seq", + Line: 6, + Column: 3, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "str", + Line: 6, + Column: 5, + }}, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "d", + LineComment: "# ID", + Line: 7, + Column: 1, + }, { + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 8, + Column: 3, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "str", + Line: 8, + Column: 3, + }, { + Kind: yaml.ScalarNode, + Tag: "!!null", + Line: 8, + Column: 7, + }}, + }}, + }}, + }, + }, { + // Indentless sequence. + "[decode]a:\n# HM\n- # HB1\n # HB2\n b: # IB\n c # IC\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "a", + Line: 1, + Column: 1, + }, { + Kind: yaml.SequenceNode, + Tag: "!!seq", + Line: 3, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + HeadComment: "# HM", + Line: 5, + Column: 3, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "b", + HeadComment: "# HB1\n# HB2", + LineComment: "# IB", + Line: 5, + Column: 3, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "c", + LineComment: "# IC", + Line: 6, + Column: 5, + }}, + }}, + }}, + }}, + }, + }, { + "- a\n- b\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.SequenceNode, + Value: "", + Tag: "!!seq", + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "a", + Tag: "!!str", + Line: 1, + Column: 3, + }, { + Kind: yaml.ScalarNode, + Value: "b", + Tag: "!!str", + Line: 2, + Column: 3, + }}, + }}, + }, + }, { + "- a\n- - b\n - c\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.SequenceNode, + Tag: "!!seq", + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "a", + Tag: "!!str", + Line: 1, + Column: 3, + }, { + Kind: yaml.SequenceNode, + Tag: "!!seq", + Line: 2, + Column: 3, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "b", + Tag: "!!str", + Line: 2, + Column: 5, + }, { + Kind: yaml.ScalarNode, + Value: "c", + Tag: "!!str", + Line: 3, + Column: 5, + }}, + }}, + }}, + }, + }, { + "[a, b]\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.SequenceNode, + Style: yaml.FlowStyle, + Value: "", + Tag: "!!seq", + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "a", + Tag: "!!str", + Line: 1, + Column: 2, + }, { + Kind: yaml.ScalarNode, + Value: "b", + Tag: "!!str", + Line: 1, + Column: 5, + }}, + }}, + }, + }, { + "- a\n- [b, c]\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.SequenceNode, + Tag: "!!seq", + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "a", + Tag: "!!str", + Line: 1, + Column: 3, + }, { + Kind: yaml.SequenceNode, + Tag: "!!seq", + Style: yaml.FlowStyle, + Line: 2, + Column: 3, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "b", + Tag: "!!str", + Line: 2, + Column: 4, + }, { + Kind: yaml.ScalarNode, + Value: "c", + Tag: "!!str", + Line: 2, + Column: 7, + }}, + }}, + }}, + }, + }, { + "a: &x 1\nb: &y 2\nc: *x\nd: *y\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Line: 1, + Column: 1, + Tag: "!!map", + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "a", + Tag: "!!str", + Line: 1, + Column: 1, + }, + saveNode("x", &yaml.Node{ + Kind: yaml.ScalarNode, + Value: "1", + Tag: "!!int", + Anchor: "x", + Line: 1, + Column: 4, + }), + { + Kind: yaml.ScalarNode, + Value: "b", + Tag: "!!str", + Line: 2, + Column: 1, + }, + saveNode("y", &yaml.Node{ + Kind: yaml.ScalarNode, + Value: "2", + Tag: "!!int", + Anchor: "y", + Line: 2, + Column: 4, + }), + { + Kind: yaml.ScalarNode, + Value: "c", + Tag: "!!str", + Line: 3, + Column: 1, + }, { + Kind: yaml.AliasNode, + Value: "x", + Alias: dropNode("x"), + Line: 3, + Column: 4, + }, { + Kind: yaml.ScalarNode, + Value: "d", + Tag: "!!str", + Line: 4, + Column: 1, + }, { + Kind: yaml.AliasNode, + Value: "y", + Tag: "", + Alias: dropNode("y"), + Line: 4, + Column: 4, + }}, + }}, + }, + }, { + + "# One\n# Two\ntrue # Three\n# Four\n# Five\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 3, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "true", + Tag: "!!bool", + Line: 3, + Column: 1, + HeadComment: "# One\n# Two", + LineComment: "# Three", + FootComment: "# Four\n# Five", + }}, + }, + }, { + + "# š\ntrue # š\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 2, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "true", + Tag: "!!bool", + Line: 2, + Column: 1, + HeadComment: "# š", + LineComment: "# š", + }}, + }, + }, { + + "[decode]\n# One\n\n# Two\n\n# Three\ntrue # Four\n# Five\n\n# Six\n\n# Seven\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 7, + Column: 1, + HeadComment: "# One\n\n# Two", + FootComment: "# Six\n\n# Seven", + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "true", + Tag: "!!bool", + Line: 7, + Column: 1, + HeadComment: "# Three", + LineComment: "# Four", + FootComment: "# Five", + }}, + }, + }, { + // Write out the pound character if missing from comments. + "[encode]# One\n# Two\ntrue # Three\n# Four\n# Five\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 3, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "true", + Tag: "!!bool", + Line: 3, + Column: 1, + HeadComment: "One\nTwo\n", + LineComment: "Three\n", + FootComment: "Four\nFive\n", + }}, + }, + }, { + "[encode]# One\n# Two\ntrue # Three\n# Four\n# Five\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 3, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "true", + Tag: "!!bool", + Line: 3, + Column: 1, + HeadComment: " One\n Two", + LineComment: " Three", + FootComment: " Four\n Five", + }}, + }, + }, { + "# DH1\n\n# DH2\n\n# H1\n# H2\ntrue # I\n# F1\n# F2\n\n# DF1\n\n# DF2\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 7, + Column: 1, + HeadComment: "# DH1\n\n# DH2", + FootComment: "# DF1\n\n# DF2", + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "true", + Tag: "!!bool", + Line: 7, + Column: 1, + HeadComment: "# H1\n# H2", + LineComment: "# I", + FootComment: "# F1\n# F2", + }}, + }, + }, { + "# DH1\n\n# DH2\n\n# HA1\n# HA2\nka: va # IA\n# FA1\n# FA2\n\n# HB1\n# HB2\nkb: vb # IB\n# FB1\n# FB2\n\n# DF1\n\n# DF2\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 7, + Column: 1, + HeadComment: "# DH1\n\n# DH2", + FootComment: "# DF1\n\n# DF2", + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 7, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Line: 7, + Column: 1, + Tag: "!!str", + Value: "ka", + HeadComment: "# HA1\n# HA2", + FootComment: "# FA1\n# FA2", + }, { + Kind: yaml.ScalarNode, + Line: 7, + Column: 5, + Tag: "!!str", + Value: "va", + LineComment: "# IA", + }, { + Kind: yaml.ScalarNode, + Line: 13, + Column: 1, + Tag: "!!str", + Value: "kb", + HeadComment: "# HB1\n# HB2", + FootComment: "# FB1\n# FB2", + }, { + Kind: yaml.ScalarNode, + Line: 13, + Column: 5, + Tag: "!!str", + Value: "vb", + LineComment: "# IB", + }}, + }}, + }, + }, { + "# DH1\n\n# DH2\n\n# HA1\n# HA2\n- la # IA\n# FA1\n# FA2\n\n# HB1\n# HB2\n- lb # IB\n# FB1\n# FB2\n\n# DF1\n\n# DF2\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 7, + Column: 1, + HeadComment: "# DH1\n\n# DH2", + FootComment: "# DF1\n\n# DF2", + Content: []*yaml.Node{{ + Kind: yaml.SequenceNode, + Tag: "!!seq", + Line: 7, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 7, + Column: 3, + Value: "la", + HeadComment: "# HA1\n# HA2", + LineComment: "# IA", + FootComment: "# FA1\n# FA2", + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 13, + Column: 3, + Value: "lb", + HeadComment: "# HB1\n# HB2", + LineComment: "# IB", + FootComment: "# FB1\n# FB2", + }}, + }}, + }, + }, { + "# DH1\n\n- la # IA\n# HB1\n- lb\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 3, + Column: 1, + HeadComment: "# DH1", + Content: []*yaml.Node{{ + Kind: yaml.SequenceNode, + Tag: "!!seq", + Line: 3, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 3, + Column: 3, + Value: "la", + LineComment: "# IA", + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 5, + Column: 3, + Value: "lb", + HeadComment: "# HB1", + }}, + }}, + }, + }, { + "- la # IA\n- lb # IB\n- lc # IC\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.SequenceNode, + Tag: "!!seq", + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 1, + Column: 3, + Value: "la", + LineComment: "# IA", + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 2, + Column: 3, + Value: "lb", + LineComment: "# IB", + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 3, + Column: 3, + Value: "lc", + LineComment: "# IC", + }}, + }}, + }, + }, { + "# DH1\n\n# HL1\n- - la\n # HB1\n - lb\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 4, + Column: 1, + HeadComment: "# DH1", + Content: []*yaml.Node{{ + Kind: yaml.SequenceNode, + Tag: "!!seq", + Line: 4, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.SequenceNode, + Tag: "!!seq", + Line: 4, + Column: 3, + HeadComment: "# HL1", + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 4, + Column: 5, + Value: "la", + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 6, + Column: 5, + Value: "lb", + HeadComment: "# HB1", + }}, + }}, + }}, + }, + }, { + "# DH1\n\n# HL1\n- # HA1\n - la\n # HB1\n - lb\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 4, + Column: 1, + HeadComment: "# DH1", + Content: []*yaml.Node{{ + Kind: yaml.SequenceNode, + Tag: "!!seq", + Line: 4, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.SequenceNode, + Tag: "!!seq", + Line: 5, + Column: 3, + HeadComment: "# HL1", + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 5, + Column: 5, + Value: "la", + HeadComment: "# HA1", + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 7, + Column: 5, + Value: "lb", + HeadComment: "# HB1", + }}, + }}, + }}, + }, + }, { + "[decode]# DH1\n\n# HL1\n- # HA1\n\n - la\n # HB1\n - lb\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 4, + Column: 1, + HeadComment: "# DH1", + Content: []*yaml.Node{{ + Kind: yaml.SequenceNode, + Tag: "!!seq", + Line: 4, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.SequenceNode, + Tag: "!!seq", + Line: 6, + Column: 3, + HeadComment: "# HL1", + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 6, + Column: 5, + Value: "la", + HeadComment: "# HA1\n", + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 8, + Column: 5, + Value: "lb", + HeadComment: "# HB1", + }}, + }}, + }}, + }, + }, { + "# DH1\n\n# HA1\nka:\n # HB1\n kb:\n # HC1\n # HC2\n - lc # IC\n # FC1\n # FC2\n\n # HD1\n - ld # ID\n # FD1\n\n# DF1\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 4, + Column: 1, + HeadComment: "# DH1", + FootComment: "# DF1", + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 4, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 4, + Column: 1, + Value: "ka", + HeadComment: "# HA1", + }, { + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 6, + Column: 3, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 6, + Column: 3, + Value: "kb", + HeadComment: "# HB1", + }, { + Kind: yaml.SequenceNode, + Line: 9, + Column: 5, + Tag: "!!seq", + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 9, + Column: 7, + Value: "lc", + HeadComment: "# HC1\n# HC2", + LineComment: "# IC", + FootComment: "# FC1\n# FC2", + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 14, + Column: 7, + Value: "ld", + HeadComment: "# HD1", + + LineComment: "# ID", + FootComment: "# FD1", + }}, + }}, + }}, + }}, + }, + }, { + "# DH1\n\n# HA1\nka:\n # HB1\n kb:\n # HC1\n # HC2\n - lc # IC\n # FC1\n # FC2\n\n # HD1\n - ld # ID\n # FD1\nke: ve\n\n# DF1\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 4, + Column: 1, + HeadComment: "# DH1", + FootComment: "# DF1", + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 4, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 4, + Column: 1, + Value: "ka", + HeadComment: "# HA1", + }, { + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 6, + Column: 3, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 6, + Column: 3, + Value: "kb", + HeadComment: "# HB1", + }, { + Kind: yaml.SequenceNode, + Line: 9, + Column: 5, + Tag: "!!seq", + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 9, + Column: 7, + Value: "lc", + HeadComment: "# HC1\n# HC2", + LineComment: "# IC", + FootComment: "# FC1\n# FC2", + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 14, + Column: 7, + Value: "ld", + HeadComment: "# HD1", + LineComment: "# ID", + FootComment: "# FD1", + }}, + }}, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 16, + Column: 1, + Value: "ke", + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 16, + Column: 5, + Value: "ve", + }}, + }}, + }, + }, { + "# DH1\n\n# DH2\n\n# HA1\n# HA2\nka:\n # HB1\n # HB2\n kb:\n" + + " # HC1\n # HC2\n kc:\n # HD1\n # HD2\n kd: vd\n # FD1\n # FD2\n" + + " # FC1\n # FC2\n # FB1\n # FB2\n# FA1\n# FA2\n\n# HE1\n# HE2\nke: ve\n# FE1\n# FE2\n\n# DF1\n\n# DF2\n", + yaml.Node{ + Kind: yaml.DocumentNode, + HeadComment: "# DH1\n\n# DH2", + FootComment: "# DF1\n\n# DF2", + Line: 7, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 7, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "ka", + HeadComment: "# HA1\n# HA2", + FootComment: "# FA1\n# FA2", + Line: 7, + Column: 1, + }, { + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 10, + Column: 3, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "kb", + HeadComment: "# HB1\n# HB2", + FootComment: "# FB1\n# FB2", + Line: 10, + Column: 3, + }, { + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 13, + Column: 5, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "kc", + HeadComment: "# HC1\n# HC2", + FootComment: "# FC1\n# FC2", + Line: 13, + Column: 5, + }, { + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 16, + Column: 7, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "kd", + HeadComment: "# HD1\n# HD2", + FootComment: "# FD1\n# FD2", + Line: 16, + Column: 7, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "vd", + Line: 16, + Column: 11, + }}, + }}, + }}, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "ke", + HeadComment: "# HE1\n# HE2", + FootComment: "# FE1\n# FE2", + Line: 28, + Column: 1, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "ve", + Line: 28, + Column: 5, + }}, + }}, + }, + }, { + // Same as above but indenting ke in so it's also part of ka's value. + "# DH1\n\n# DH2\n\n# HA1\n# HA2\nka:\n # HB1\n # HB2\n kb:\n" + + " # HC1\n # HC2\n kc:\n # HD1\n # HD2\n kd: vd\n # FD1\n # FD2\n" + + " # FC1\n # FC2\n # FB1\n # FB2\n\n # HE1\n # HE2\n ke: ve\n # FE1\n # FE2\n# FA1\n# FA2\n\n# DF1\n\n# DF2\n", + yaml.Node{ + Kind: yaml.DocumentNode, + HeadComment: "# DH1\n\n# DH2", + FootComment: "# DF1\n\n# DF2", + Line: 7, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 7, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "ka", + HeadComment: "# HA1\n# HA2", + FootComment: "# FA1\n# FA2", + Line: 7, + Column: 1, + }, { + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 10, + Column: 3, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "kb", + HeadComment: "# HB1\n# HB2", + FootComment: "# FB1\n# FB2", + Line: 10, + Column: 3, + }, { + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 13, + Column: 5, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "kc", + HeadComment: "# HC1\n# HC2", + FootComment: "# FC1\n# FC2", + Line: 13, + Column: 5, + }, { + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 16, + Column: 7, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "kd", + HeadComment: "# HD1\n# HD2", + FootComment: "# FD1\n# FD2", + Line: 16, + Column: 7, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "vd", + Line: 16, + Column: 11, + }}, + }}, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "ke", + HeadComment: "# HE1\n# HE2", + FootComment: "# FE1\n# FE2", + Line: 26, + Column: 3, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "ve", + Line: 26, + Column: 7, + }}, + }}, + }}, + }, + }, { + // Decode only due to lack of newline at the end. + "[decode]# HA1\nka:\n # HB1\n kb: vb\n # FB1\n# FA1", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 2, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 2, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "ka", + HeadComment: "# HA1", + FootComment: "# FA1", + Line: 2, + Column: 1, + }, { + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 4, + Column: 3, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "kb", + HeadComment: "# HB1", + FootComment: "# FB1", + Line: 4, + Column: 3, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "vb", + Line: 4, + Column: 7, + }}, + }}, + }}, + }, + }, { + // Same as above, but with newline at the end. + "# HA1\nka:\n # HB1\n kb: vb\n # FB1\n# FA1\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 2, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 2, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "ka", + HeadComment: "# HA1", + FootComment: "# FA1", + Line: 2, + Column: 1, + }, { + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 4, + Column: 3, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "kb", + HeadComment: "# HB1", + FootComment: "# FB1", + Line: 4, + Column: 3, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "vb", + Line: 4, + Column: 7, + }}, + }}, + }}, + }, + }, { + // Same as above, but without FB1. + "# HA1\nka:\n # HB1\n kb: vb\n# FA1\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 2, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 2, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "ka", + HeadComment: "# HA1", + FootComment: "# FA1", + Line: 2, + Column: 1, + }, { + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 4, + Column: 3, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "kb", + HeadComment: "# HB1", + Line: 4, + Column: 3, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "vb", + Line: 4, + Column: 7, + }}, + }}, + }}, + }, + }, { + // Same as above, but with two newlines at the end. Decode-only for that. + "[decode]# HA1\nka:\n # HB1\n kb: vb\n # FB1\n# FA1\n\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 2, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 2, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "ka", + HeadComment: "# HA1", + FootComment: "# FA1", + Line: 2, + Column: 1, + }, { + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 4, + Column: 3, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "kb", + HeadComment: "# HB1", + FootComment: "# FB1", + Line: 4, + Column: 3, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "vb", + Line: 4, + Column: 7, + }}, + }}, + }}, + }, + }, { + // Similar to above, but make HB1 look more like a footer of ka. + "[decode]# HA1\nka:\n# HB1\n\n kb: vb\n# FA1\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 2, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 2, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "ka", + HeadComment: "# HA1", + FootComment: "# FA1", + Line: 2, + Column: 1, + }, { + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 5, + Column: 3, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "kb", + HeadComment: "# HB1\n", + Line: 5, + Column: 3, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "vb", + Line: 5, + Column: 7, + }}, + }}, + }}, + }, + }, { + "ka:\n kb: vb\n# FA1\n\nkc: vc\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "ka", + Line: 1, + Column: 1, + FootComment: "# FA1", + }, { + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 2, + Column: 3, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "kb", + Line: 2, + Column: 3, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "vb", + Line: 2, + Column: 7, + }}, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "kc", + Line: 5, + Column: 1, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "vc", + Line: 5, + Column: 5, + }}, + }}, + }, + }, { + "ka:\n kb: vb\n# HC1\nkc: vc\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "ka", + Line: 1, + Column: 1, + }, { + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 2, + Column: 3, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "kb", + Line: 2, + Column: 3, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "vb", + Line: 2, + Column: 7, + }}, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "kc", + HeadComment: "# HC1", + Line: 4, + Column: 1, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "vc", + Line: 4, + Column: 5, + }}, + }}, + }, + }, { + // Decode only due to empty line before HC1. + "[decode]ka:\n kb: vb\n\n# HC1\nkc: vc\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "ka", + Line: 1, + Column: 1, + }, { + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 2, + Column: 3, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "kb", + Line: 2, + Column: 3, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "vb", + Line: 2, + Column: 7, + }}, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "kc", + HeadComment: "# HC1", + Line: 5, + Column: 1, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "vc", + Line: 5, + Column: 5, + }}, + }}, + }, + }, { + // Decode-only due to empty lines around HC1. + "[decode]ka:\n kb: vb\n\n# HC1\n\nkc: vc\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "ka", + Line: 1, + Column: 1, + }, { + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 2, + Column: 3, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "kb", + Line: 2, + Column: 3, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "vb", + Line: 2, + Column: 7, + }}, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "kc", + HeadComment: "# HC1\n", + Line: 6, + Column: 1, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "vc", + Line: 6, + Column: 5, + }}, + }}, + }, + }, { + "ka: # IA\n kb: # IB\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "ka", + Line: 1, + Column: 1, + LineComment: "# IA", + }, { + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 2, + Column: 3, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "kb", + Line: 2, + Column: 3, + LineComment: "# IB", + }, { + Kind: yaml.ScalarNode, + Tag: "!!null", + Line: 2, + Column: 6, + }}, + }}, + }}, + }, + }, { + "# HA1\nka:\n # HB1\n kb: vb\n # FB1\n# HC1\n# HC2\nkc: vc\n# FC1\n# FC2\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 2, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 2, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "ka", + HeadComment: "# HA1", + Line: 2, + Column: 1, + }, { + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 4, + Column: 3, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "kb", + HeadComment: "# HB1", + FootComment: "# FB1", + Line: 4, + Column: 3, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "vb", + Line: 4, + Column: 7, + }}, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "kc", + HeadComment: "# HC1\n# HC2", + FootComment: "# FC1\n# FC2", + Line: 8, + Column: 1, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "vc", + Line: 8, + Column: 5, + }}, + }}, + }, + }, { + // Same as above, but decode only due to empty line between ka's value and kc's headers. + "[decode]# HA1\nka:\n # HB1\n kb: vb\n # FB1\n\n# HC1\n# HC2\nkc: vc\n# FC1\n# FC2\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 2, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 2, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "ka", + HeadComment: "# HA1", + Line: 2, + Column: 1, + }, { + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 4, + Column: 3, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "kb", + HeadComment: "# HB1", + FootComment: "# FB1", + Line: 4, + Column: 3, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "vb", + Line: 4, + Column: 7, + }}, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "kc", + HeadComment: "# HC1\n# HC2", + FootComment: "# FC1\n# FC2", + Line: 9, + Column: 1, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "vc", + Line: 9, + Column: 5, + }}, + }}, + }, + }, { + "# H1\n[la, lb] # I\n# F1\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 2, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.SequenceNode, + Tag: "!!seq", + Style: yaml.FlowStyle, + Line: 2, + Column: 1, + HeadComment: "# H1", + LineComment: "# I", + FootComment: "# F1", + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 2, + Column: 2, + Value: "la", + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 2, + Column: 6, + Value: "lb", + }}, + }}, + }, + }, { + "# DH1\n\n# SH1\n[\n # HA1\n la, # IA\n # FA1\n\n # HB1\n lb, # IB\n # FB1\n]\n# SF1\n\n# DF1\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 4, + Column: 1, + HeadComment: "# DH1", + FootComment: "# DF1", + Content: []*yaml.Node{{ + Kind: yaml.SequenceNode, + Tag: "!!seq", + Style: yaml.FlowStyle, + Line: 4, + Column: 1, + HeadComment: "# SH1", + FootComment: "# SF1", + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 6, + Column: 3, + Value: "la", + HeadComment: "# HA1", + LineComment: "# IA", + FootComment: "# FA1", + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 10, + Column: 3, + Value: "lb", + HeadComment: "# HB1", + LineComment: "# IB", + FootComment: "# FB1", + }}, + }}, + }, + }, { + // Same as above, but with extra newlines before FB1 and FB2 + "[decode]# DH1\n\n# SH1\n[\n # HA1\n la, # IA\n # FA1\n\n # HB1\n lb, # IB\n\n\n # FB1\n\n# FB2\n]\n# SF1\n\n# DF1\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 4, + Column: 1, + HeadComment: "# DH1", + FootComment: "# DF1", + Content: []*yaml.Node{{ + Kind: yaml.SequenceNode, + Tag: "!!seq", + Style: yaml.FlowStyle, + Line: 4, + Column: 1, + HeadComment: "# SH1", + FootComment: "# SF1", + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 6, + Column: 3, + Value: "la", + HeadComment: "# HA1", + LineComment: "# IA", + FootComment: "# FA1", + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 10, + Column: 3, + Value: "lb", + HeadComment: "# HB1", + LineComment: "# IB", + FootComment: "# FB1\n\n# FB2", + }}, + }}, + }, + }, { + "# DH1\n\n# SH1\n[\n # HA1\n la,\n # FA1\n\n # HB1\n lb,\n # FB1\n]\n# SF1\n\n# DF1\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 4, + Column: 1, + HeadComment: "# DH1", + FootComment: "# DF1", + Content: []*yaml.Node{{ + Kind: yaml.SequenceNode, + Tag: "!!seq", + Style: yaml.FlowStyle, + Line: 4, + Column: 1, + HeadComment: "# SH1", + FootComment: "# SF1", + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 6, + Column: 3, + Value: "la", + HeadComment: "# HA1", + FootComment: "# FA1", + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 10, + Column: 3, + Value: "lb", + HeadComment: "# HB1", + FootComment: "# FB1", + }}, + }}, + }, + }, { + "ka:\n kb: [\n # HA1\n la,\n # FA1\n\n # HB1\n lb,\n # FB1\n ]\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + Line: 1, + Column: 1, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "ka", + Line: 1, + Column: 1, + }, { + Kind: 0x4, + Tag: "!!map", + Line: 2, + Column: 3, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "kb", + Line: 2, + Column: 3, + }, { + Kind: yaml.SequenceNode, + Style: 0x20, + Tag: "!!seq", + Line: 2, + Column: 7, + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "la", + HeadComment: "# HA1", + FootComment: "# FA1", + Line: 4, + Column: 5, + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Value: "lb", + HeadComment: "# HB1", + FootComment: "# FB1", + Line: 8, + Column: 5, + }}, + }}, + }}, + }}, + }, + }, { + "# DH1\n\n# MH1\n{\n # HA1\n ka: va, # IA\n # FA1\n\n # HB1\n kb: vb, # IB\n # FB1\n}\n# MF1\n\n# DF1\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 4, + Column: 1, + HeadComment: "# DH1", + FootComment: "# DF1", + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + Style: yaml.FlowStyle, + Line: 4, + Column: 1, + HeadComment: "# MH1", + FootComment: "# MF1", + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 6, + Column: 3, + Value: "ka", + HeadComment: "# HA1", + FootComment: "# FA1", + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 6, + Column: 7, + Value: "va", + LineComment: "# IA", + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 10, + Column: 3, + Value: "kb", + HeadComment: "# HB1", + FootComment: "# FB1", + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 10, + Column: 7, + Value: "vb", + LineComment: "# IB", + }}, + }}, + }, + }, { + "# DH1\n\n# MH1\n{\n # HA1\n ka: va,\n # FA1\n\n # HB1\n kb: vb,\n # FB1\n}\n# MF1\n\n# DF1\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 4, + Column: 1, + HeadComment: "# DH1", + FootComment: "# DF1", + Content: []*yaml.Node{{ + Kind: yaml.MappingNode, + Tag: "!!map", + Style: yaml.FlowStyle, + Line: 4, + Column: 1, + HeadComment: "# MH1", + FootComment: "# MF1", + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 6, + Column: 3, + Value: "ka", + HeadComment: "# HA1", + FootComment: "# FA1", + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 6, + Column: 7, + Value: "va", + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 10, + Column: 3, + Value: "kb", + HeadComment: "# HB1", + FootComment: "# FB1", + }, { + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 10, + Column: 7, + Value: "vb", + }}, + }}, + }, + }, { + "# DH1\n\n# DH2\n\n# HA1\n# HA2\n- &x la # IA\n# FA1\n# FA2\n\n# HB1\n# HB2\n- *x # IB\n# FB1\n# FB2\n\n# DF1\n\n# DF2\n", + yaml.Node{ + Kind: yaml.DocumentNode, + Line: 7, + Column: 1, + HeadComment: "# DH1\n\n# DH2", + FootComment: "# DF1\n\n# DF2", + Content: []*yaml.Node{{ + Kind: yaml.SequenceNode, + Tag: "!!seq", + Line: 7, + Column: 1, + Content: []*yaml.Node{ + saveNode("x", &yaml.Node{ + Kind: yaml.ScalarNode, + Tag: "!!str", + Line: 7, + Column: 3, + Value: "la", + HeadComment: "# HA1\n# HA2", + LineComment: "# IA", + FootComment: "# FA1\n# FA2", + Anchor: "x", + }), { + Kind: yaml.AliasNode, + Line: 13, + Column: 3, + Value: "x", + Alias: dropNode("x"), + HeadComment: "# HB1\n# HB2", + LineComment: "# IB", + FootComment: "# FB1\n# FB2", + }, + }, + }}, + }, + }, +} + +func (s *S) TestNodeRoundtrip(c *C) { + defer os.Setenv("TZ", os.Getenv("TZ")) + os.Setenv("TZ", "UTC") + for i, item := range nodeTests { + c.Logf("test %d: %q", i, item.yaml) + + if strings.Contains(item.yaml, "#") { + var buf bytes.Buffer + fprintComments(&buf, &item.node, " ") + c.Logf(" expected comments:\n%s", buf.Bytes()) + } + + decode := true + encode := true + + testYaml := item.yaml + if s := strings.TrimPrefix(testYaml, "[decode]"); s != testYaml { + encode = false + testYaml = s + } + if s := strings.TrimPrefix(testYaml, "[encode]"); s != testYaml { + decode = false + testYaml = s + } + + if decode { + var node yaml.Node + err := yaml.Unmarshal([]byte(testYaml), &node) + c.Assert(err, IsNil) + if strings.Contains(item.yaml, "#") { + var buf bytes.Buffer + fprintComments(&buf, &node, " ") + c.Logf(" obtained comments:\n%s", buf.Bytes()) + } + c.Assert(&node, DeepEquals, &item.node) + } + if encode { + node := deepCopyNode(&item.node, nil) + buf := bytes.Buffer{} + enc := yaml.NewEncoder(&buf) + enc.SetIndent(2) + err := enc.Encode(node) + c.Assert(err, IsNil) + err = enc.Close() + c.Assert(err, IsNil) + c.Assert(buf.String(), Equals, testYaml) + + // Ensure there were no mutations to the tree. + c.Assert(node, DeepEquals, &item.node) + } + } +} + +func deepCopyNode(node *yaml.Node, cache map[*yaml.Node]*yaml.Node) *yaml.Node { + if n, ok := cache[node]; ok { + return n + } + if cache == nil { + cache = make(map[*yaml.Node]*yaml.Node) + } + copy := *node + cache[node] = © + copy.Content = nil + for _, elem := range node.Content { + copy.Content = append(copy.Content, deepCopyNode(elem, cache)) + } + if node.Alias != nil { + copy.Alias = deepCopyNode(node.Alias, cache) + } + return © +} + +var savedNodes = make(map[string]*yaml.Node) + +func saveNode(name string, node *yaml.Node) *yaml.Node { + savedNodes[name] = node + return node +} + +func peekNode(name string) *yaml.Node { + return savedNodes[name] +} + +func dropNode(name string) *yaml.Node { + node := savedNodes[name] + delete(savedNodes, name) + return node +} + +var setStringTests = []struct { + str string + yaml string + node yaml.Node +}{ + { + "something simple", + "something simple\n", + yaml.Node{ + Kind: yaml.ScalarNode, + Value: "something simple", + Tag: "!!str", + }, + }, { + `"quoted value"`, + "'\"quoted value\"'\n", + yaml.Node{ + Kind: yaml.ScalarNode, + Value: `"quoted value"`, + Tag: "!!str", + }, + }, { + "multi\nline", + "|-\n multi\n line\n", + yaml.Node{ + Kind: yaml.ScalarNode, + Value: "multi\nline", + Tag: "!!str", + Style: yaml.LiteralStyle, + }, + }, { + "123", + "\"123\"\n", + yaml.Node{ + Kind: yaml.ScalarNode, + Value: "123", + Tag: "!!str", + }, + }, { + "multi\nline\n", + "|\n multi\n line\n", + yaml.Node{ + Kind: yaml.ScalarNode, + Value: "multi\nline\n", + Tag: "!!str", + Style: yaml.LiteralStyle, + }, + }, { + "\x80\x81\x82", + "!!binary gIGC\n", + yaml.Node{ + Kind: yaml.ScalarNode, + Value: "gIGC", + Tag: "!!binary", + }, + }, +} + +func (s *S) TestSetString(c *C) { + defer os.Setenv("TZ", os.Getenv("TZ")) + os.Setenv("TZ", "UTC") + for i, item := range setStringTests { + c.Logf("test %d: %q", i, item.str) + + var node yaml.Node + + node.SetString(item.str) + + c.Assert(node, DeepEquals, item.node) + + buf := bytes.Buffer{} + enc := yaml.NewEncoder(&buf) + enc.SetIndent(2) + err := enc.Encode(&item.node) + c.Assert(err, IsNil) + err = enc.Close() + c.Assert(err, IsNil) + c.Assert(buf.String(), Equals, item.yaml) + + var doc yaml.Node + err = yaml.Unmarshal([]byte(item.yaml), &doc) + c.Assert(err, IsNil) + + var str string + err = node.Decode(&str) + c.Assert(err, IsNil) + c.Assert(str, Equals, item.str) + } +} + +var nodeEncodeDecodeTests = []struct { + value interface{} + yaml string + node yaml.Node +}{{ + "something simple", + "something simple\n", + yaml.Node{ + Kind: yaml.ScalarNode, + Value: "something simple", + Tag: "!!str", + }, +}, { + `"quoted value"`, + "'\"quoted value\"'\n", + yaml.Node{ + Kind: yaml.ScalarNode, + Style: yaml.SingleQuotedStyle, + Value: `"quoted value"`, + Tag: "!!str", + }, +}, { + 123, + "123", + yaml.Node{ + Kind: yaml.ScalarNode, + Value: `123`, + Tag: "!!int", + }, +}, { + []interface{}{1, 2}, + "[1, 2]", + yaml.Node{ + Kind: yaml.SequenceNode, + Tag: "!!seq", + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "1", + Tag: "!!int", + }, { + Kind: yaml.ScalarNode, + Value: "2", + Tag: "!!int", + }}, + }, +}, { + map[string]interface{}{"a": "b"}, + "a: b", + yaml.Node{ + Kind: yaml.MappingNode, + Tag: "!!map", + Content: []*yaml.Node{{ + Kind: yaml.ScalarNode, + Value: "a", + Tag: "!!str", + }, { + Kind: yaml.ScalarNode, + Value: "b", + Tag: "!!str", + }}, + }, +}} + +func (s *S) TestNodeEncodeDecode(c *C) { + for i, item := range nodeEncodeDecodeTests { + c.Logf("Encode/Decode test value #%d: %#v", i, item.value) + + var v interface{} + err := item.node.Decode(&v) + c.Assert(err, IsNil) + c.Assert(v, DeepEquals, item.value) + + var n yaml.Node + err = n.Encode(item.value) + c.Assert(err, IsNil) + c.Assert(n, DeepEquals, item.node) + } +} + +func (s *S) TestNodeZeroEncodeDecode(c *C) { + // Zero node value behaves as nil when encoding... + var n yaml.Node + data, err := yaml.Marshal(&n) + c.Assert(err, IsNil) + c.Assert(string(data), Equals, "null\n") + + // ... and decoding. + var v *struct{} = &struct{}{} + c.Assert(n.Decode(&v), IsNil) + c.Assert(v, IsNil) + + // ... and even when looking for its tag. + c.Assert(n.ShortTag(), Equals, "!!null") + + // Kind zero is still unknown, though. + n.Line = 1 + _, err = yaml.Marshal(&n) + c.Assert(err, ErrorMatches, "yaml: cannot encode node with unknown kind 0") + c.Assert(n.Decode(&v), ErrorMatches, "yaml: cannot decode node with unknown kind 0") +} + +func (s *S) TestNodeOmitEmpty(c *C) { + var v struct { + A int + B yaml.Node ",omitempty" + } + v.A = 1 + data, err := yaml.Marshal(&v) + c.Assert(err, IsNil) + c.Assert(string(data), Equals, "a: 1\n") + + v.B.Line = 1 + _, err = yaml.Marshal(&v) + c.Assert(err, ErrorMatches, "yaml: cannot encode node with unknown kind 0") +} + +func fprintComments(out io.Writer, node *yaml.Node, indent string) { + switch node.Kind { + case yaml.ScalarNode: + fmt.Fprintf(out, "%s<%s> ", indent, node.Value) + fprintCommentSet(out, node) + fmt.Fprintf(out, "\n") + case yaml.DocumentNode: + fmt.Fprintf(out, "%s