diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index db0a2be3..1c29078c 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -26,6 +26,20 @@ jobs:
- name: Set up Chrome
uses: browser-actions/setup-chrome@latest
+ - name: Run tests
+ run: make ci
+
+ - name: Run octocov
+ uses: k1LoW/octocov-action@v1
+ lint:
+ name: Lint
+ runs-on: ubuntu-latest
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ steps:
+ - name: Check out source code
+ uses: actions/checkout@v4
+
- name: Run lint
uses: reviewdog/action-golangci-lint@v2
with:
@@ -45,12 +59,6 @@ jobs:
go-version-file: go.mod
repo-checkout: false
go-package: ./...
-
- - name: Run tests
- run: make ci
-
- - name: Run octocov
- uses: k1LoW/octocov-action@v1
os-test:
name: Run on each OS
strategy:
diff --git a/.golangci.yml b/.golangci.yml
index 4ab95374..85bc6a1b 100644
--- a/.golangci.yml
+++ b/.golangci.yml
@@ -22,8 +22,5 @@ linters-settings:
- name: exported
disabled: false
issues:
- # include:
- # - EXC0012
- # - EXC0014
- exclude:
- - SA3000
+ exclude-dirs:
+ - tmpmod
diff --git a/.gostyle.yml b/.gostyle.yml
index fa2d8fc4..6f33b72c 100755
--- a/.gostyle.yml
+++ b/.gostyle.yml
@@ -14,3 +14,5 @@ analyzers-settings:
# medium-varname-max: 8 # max length of variable name for medium scope (default: -1)
# large-varname-max: 16 # max length of variable name for large scope (default: -1)
# very-large-varname-max: 32 # max length of variable name for very large scope (default: -1)
+exclude-files:
+ - tmpmod/**/*.go
diff --git a/.octocov.yml b/.octocov.yml
index de26a573..94c6152c 100644
--- a/.octocov.yml
+++ b/.octocov.yml
@@ -1,13 +1,17 @@
coverage:
if: true
+ exclude:
+ - 'github.com/k1LoW/runn/tmpmod/**/*'
codeToTestRatio:
code:
- '**/*.go'
- '!**/*_test.go'
- '!testutil/*.go'
+ - '!tmpmod/**/.go'
test:
- '**/*_test.go'
- 'testutil/*.go'
+ - '!tmpmod/**/.go'
testExecutionTime:
if: true
diff:
diff --git a/book.go b/book.go
index 29b9b9e0..bde1775b 100644
--- a/book.go
+++ b/book.go
@@ -12,7 +12,7 @@ import (
"time"
"github.com/goccy/go-json"
- "github.com/goccy/go-yaml"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml"
"github.com/k1LoW/duration"
"github.com/k1LoW/sshc/v4"
)
diff --git a/eval.go b/eval.go
index 31b002cc..81d1ff4c 100644
--- a/eval.go
+++ b/eval.go
@@ -12,7 +12,7 @@ import (
"github.com/expr-lang/expr/parser"
"github.com/expr-lang/expr/parser/lexer"
"github.com/goccy/go-json"
- "github.com/goccy/go-yaml"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml"
"github.com/k1LoW/expand"
"github.com/xlab/treeprint"
)
diff --git a/go.mod b/go.mod
index 71210e18..14c22649 100644
--- a/go.mod
+++ b/go.mod
@@ -18,9 +18,9 @@ require (
github.com/expr-lang/expr v1.16.5
github.com/fatih/color v1.16.0
github.com/gliderlabs/ssh v0.3.7
+ github.com/go-playground/validator/v10 v10.4.1
github.com/go-sql-driver/mysql v1.8.1
github.com/goccy/go-json v0.10.2
- github.com/goccy/go-yaml v1.11.3
github.com/golang-sql/sqlexp v0.1.0
github.com/google/go-cmp v0.6.0
github.com/google/go-github/v58 v58.0.0
@@ -46,6 +46,7 @@ require (
github.com/k1LoW/urlfilepath v0.1.0
github.com/lestrrat-go/backoff/v2 v2.0.8
github.com/lib/pq v1.10.9
+ github.com/mattn/go-colorable v0.1.13
github.com/mattn/go-isatty v0.0.20
github.com/minio/pkg v1.7.5
github.com/mitchellh/copystructure v1.2.0
@@ -64,6 +65,7 @@ require (
github.com/xo/dburl v0.23.0
golang.org/x/crypto v0.22.0
golang.org/x/sync v0.7.0
+ golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028
google.golang.org/grpc v1.63.2
google.golang.org/protobuf v1.33.1-0.20240408130810-98873a205002
gopkg.in/yaml.v2 v2.4.0
@@ -109,9 +111,12 @@ require (
github.com/fullstorydev/grpcurl v1.8.9 // indirect
github.com/go-logr/logr v1.4.1 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
+ github.com/go-playground/locales v0.13.0 // indirect
+ github.com/go-playground/universal-translator v0.17.0 // indirect
github.com/gobwas/httphead v0.1.0 // indirect
github.com/gobwas/pool v0.2.1 // indirect
github.com/gobwas/ws v1.3.2 // indirect
+ github.com/goccy/go-yaml v1.11.3 // indirect
github.com/gogo/protobuf v1.3.2 // indirect
github.com/golang-jwt/jwt/v4 v4.5.0 // indirect
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
@@ -133,9 +138,9 @@ require (
github.com/josharian/txtarfs v0.0.0-20210615234325-77aca6df5bca // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/kevinburke/ssh_config v1.2.0 // indirect
+ github.com/leodido/go-urn v1.2.0 // indirect
github.com/lestrrat-go/option v1.0.1 // indirect
github.com/mailru/easyjson v0.7.7 // indirect
- github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-runewidth v0.0.15 // indirect
github.com/mattn/go-shellwords v1.0.12 // indirect
github.com/mattn/go-tty v0.0.5 // indirect
@@ -177,7 +182,6 @@ require (
golang.org/x/text v0.14.0 // indirect
golang.org/x/time v0.5.0 // indirect
golang.org/x/tools v0.19.0 // indirect
- golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 // indirect
google.golang.org/api v0.174.0 // indirect
google.golang.org/genproto v0.0.0-20240228224816-df926f6c8641 // indirect
google.golang.org/genproto/googleapis/api v0.0.0-20240314234333-6e1732d8331c // indirect
diff --git a/go.sum b/go.sum
index ecf19f08..18235985 100644
--- a/go.sum
+++ b/go.sum
@@ -133,6 +133,8 @@ github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ=
github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
+github.com/go-playground/assert/v2 v2.0.1 h1:MsBgLAaY856+nPRTKrp3/OZK38U/wa0CcBYNjji3q3A=
+github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
github.com/go-playground/locales v0.13.0 h1:HyWk6mgj5qFqCT5fjGBuRArbVDfE4hi8+e8ceBS/t7Q=
github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8=
github.com/go-playground/universal-translator v0.17.0 h1:icxd5fm+REJzpZx7ZfpaD876Lmtgy7VtROAbHHXk8no=
@@ -531,6 +533,7 @@ golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuX
golang.org/x/term v0.19.0 h1:+ThwsDv+tYfnJFhF4L8jITxu1tdTWRTZpdsWgEgjL6Q=
golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
diff --git a/http_test.go b/http_test.go
index 521152a6..1b0378b0 100644
--- a/http_test.go
+++ b/http_test.go
@@ -13,7 +13,7 @@ import (
"testing"
"time"
- "github.com/goccy/go-yaml"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml"
"github.com/google/go-cmp/cmp"
"github.com/k1LoW/runn/testutil"
)
diff --git a/http_validator.go b/http_validator.go
index d1695cc1..712e009d 100644
--- a/http_validator.go
+++ b/http_validator.go
@@ -15,7 +15,7 @@ import (
"strconv"
"strings"
- "github.com/goccy/go-yaml"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml"
"github.com/pb33f/libopenapi"
validator "github.com/pb33f/libopenapi-validator"
verrors "github.com/pb33f/libopenapi-validator/errors"
diff --git a/loop.go b/loop.go
index 4def6c38..2d25127d 100644
--- a/loop.go
+++ b/loop.go
@@ -6,7 +6,7 @@ import (
"strings"
"time"
- "github.com/goccy/go-yaml"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml"
"github.com/lestrrat-go/backoff/v2"
)
diff --git a/parse.go b/parse.go
index 7b428683..e83f9f97 100644
--- a/parse.go
+++ b/parse.go
@@ -7,7 +7,7 @@ import (
"strings"
"time"
- "github.com/goccy/go-yaml"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml"
"github.com/k1LoW/duration"
"google.golang.org/grpc/metadata"
)
diff --git a/parse_test.go b/parse_test.go
index 49619219..daf17652 100644
--- a/parse_test.go
+++ b/parse_test.go
@@ -5,7 +5,7 @@ import (
"testing"
"time"
- "github.com/goccy/go-yaml"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"google.golang.org/grpc/metadata"
diff --git a/runbook.go b/runbook.go
index 14e882a1..fd9ba803 100644
--- a/runbook.go
+++ b/runbook.go
@@ -10,11 +10,11 @@ import (
"strings"
"github.com/Songmu/axslogparser"
- goyaml "github.com/goccy/go-yaml"
- "github.com/goccy/go-yaml/ast"
- "github.com/goccy/go-yaml/lexer"
- "github.com/goccy/go-yaml/parser"
- "github.com/goccy/go-yaml/token"
+ goyaml "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/ast"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/lexer"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/parser"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/token"
"github.com/k1LoW/curlreq"
"github.com/k1LoW/expand"
"github.com/k1LoW/grpcurlreq"
diff --git a/runbook_test.go b/runbook_test.go
index 4a35a749..91625d5b 100644
--- a/runbook_test.go
+++ b/runbook_test.go
@@ -9,7 +9,7 @@ import (
"strings"
"testing"
- "github.com/goccy/go-yaml/token"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/token"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/tenntenn/golden"
diff --git a/runner_option.go b/runner_option.go
index cfa68960..558c066e 100644
--- a/runner_option.go
+++ b/runner_option.go
@@ -5,7 +5,7 @@ import (
"strconv"
"strings"
- "github.com/goccy/go-yaml"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml"
"github.com/pb33f/libopenapi"
"github.com/pb33f/libopenapi/datamodel"
)
diff --git a/testdata/book/book.yml b/testdata/book/book.yml
index 320b3328..88a470bf 100644
--- a/testdata/book/book.yml
+++ b/testdata/book/book.yml
@@ -6,6 +6,7 @@ vars:
username: alice
steps:
-
+ desc: "`SELECT * FROM users WHERE name = 'alice'`"
db:
query: SELECT * FROM users WHERE name = '{{ vars.username }}'
-
diff --git a/tmpmod/github.com/goccy/go-yaml/.codecov.yml b/tmpmod/github.com/goccy/go-yaml/.codecov.yml
new file mode 100644
index 00000000..8364eea0
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/.codecov.yml
@@ -0,0 +1,31 @@
+codecov:
+ require_ci_to_pass: yes
+
+coverage:
+ precision: 2
+ round: down
+ range: "70...100"
+
+ status:
+ project:
+ default:
+ target: 75%
+ threshold: 2%
+ patch: off
+ changes: no
+
+parsers:
+ gcov:
+ branch_detection:
+ conditional: yes
+ loop: yes
+ method: no
+ macro: no
+
+comment:
+ layout: "header,diff"
+ behavior: default
+ require_changes: no
+
+ignore:
+ - ast
diff --git a/tmpmod/github.com/goccy/go-yaml/.github/FUNDING.yml b/tmpmod/github.com/goccy/go-yaml/.github/FUNDING.yml
new file mode 100644
index 00000000..ab4b632c
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/.github/FUNDING.yml
@@ -0,0 +1 @@
+github: [goccy]
diff --git a/tmpmod/github.com/goccy/go-yaml/.github/ISSUE_TEMPLATE/bug_report.md b/tmpmod/github.com/goccy/go-yaml/.github/ISSUE_TEMPLATE/bug_report.md
new file mode 100644
index 00000000..1aad4754
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/.github/ISSUE_TEMPLATE/bug_report.md
@@ -0,0 +1,29 @@
+---
+name: Bug report
+about: Create a report to help us improve
+title: ''
+labels: bug
+assignees: ''
+
+---
+
+**Describe the bug**
+A clear and concise description of what the bug is.
+
+**To Reproduce**
+
+Please provide a minimum yaml content that can be reproduced.
+We are more than happy to use [Go Playground](https://go.dev/play)
+
+**Expected behavior**
+A clear and concise description of what you expected to happen.
+
+**Screenshots**
+If applicable, add screenshots to help explain your problem.
+
+**Version Variables**
+ - Go version: [e.g. 1.21 ]
+ - go-yaml's Version: [e.g. v1.11.1 ]
+
+**Additional context**
+Add any other context about the problem here.
diff --git a/tmpmod/github.com/goccy/go-yaml/.github/ISSUE_TEMPLATE/feature_request.md b/tmpmod/github.com/goccy/go-yaml/.github/ISSUE_TEMPLATE/feature_request.md
new file mode 100644
index 00000000..e301d68c
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/.github/ISSUE_TEMPLATE/feature_request.md
@@ -0,0 +1,20 @@
+---
+name: Feature request
+about: Suggest an idea for this project
+title: ''
+labels: feature request
+assignees: ''
+
+---
+
+**Is your feature request related to a problem? Please describe.**
+A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
+
+**Describe the solution you'd like**
+A clear and concise description of what you want to happen.
+
+**Describe alternatives you've considered**
+A clear and concise description of any alternative solutions or features you've considered.
+
+**Additional context**
+Add any other context or screenshots about the feature request here.
diff --git a/tmpmod/github.com/goccy/go-yaml/.github/pull_request_template.md b/tmpmod/github.com/goccy/go-yaml/.github/pull_request_template.md
new file mode 100644
index 00000000..a1d6c8e0
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/.github/pull_request_template.md
@@ -0,0 +1,4 @@
+Before submitting your PR, please confirm the following.
+
+- [ ] Describe the purpose for which you created this PR.
+- [ ] Create test code that corresponds to the modification
\ No newline at end of file
diff --git a/tmpmod/github.com/goccy/go-yaml/.github/workflows/go.yml b/tmpmod/github.com/goccy/go-yaml/.github/workflows/go.yml
new file mode 100644
index 00000000..647c692f
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/.github/workflows/go.yml
@@ -0,0 +1,79 @@
+name: Go
+on:
+ push:
+ branches:
+ - master
+ pull_request:
+jobs:
+ race-test:
+ name: Test with -race
+ strategy:
+ matrix:
+ os: [ "ubuntu-latest", "macos-latest", "windows-latest" ]
+ go-version: [ "1.19", "1.20", "1.21" ]
+ runs-on: ${{ matrix.os }}
+ steps:
+ - name: checkout
+ uses: actions/checkout@v4
+ - name: setup Go ${{ matrix.go-version }}
+ uses: actions/setup-go@v4
+ with:
+ go-version: ${{ matrix.go-version }}
+ - name: test
+ run: |
+ make test
+
+ i386-test:
+ name: Test in i386
+ strategy:
+ matrix:
+ os: [ "ubuntu-latest", "windows-latest" ]
+ go-version: [ "1.19", "1.20", "1.21" ]
+ runs-on: ${{ matrix.os }}
+ env:
+ GOARCH: "386"
+ steps:
+ - name: checkout
+ uses: actions/checkout@v4
+ - name: setup Go ${{ matrix.go-version }}
+ uses: actions/setup-go@v4
+ with:
+ go-version: ${{ matrix.go-version }}
+ - name: test
+ run: |
+ make simple-test
+
+ ycat:
+ name: ycat
+ runs-on: ubuntu-latest
+ steps:
+ - name: checkout
+ uses: actions/checkout@v4
+ - name: setup Go
+ uses: actions/setup-go@v4
+ with:
+ go-version: "1.21"
+ - name: build
+ run: |
+ make ycat/build
+ - name: run
+ run: |
+ ./ycat .github/workflows/go.yml
+
+ coverage:
+ name: Coverage
+ runs-on: ubuntu-latest
+ steps:
+ - name: checkout
+ uses: actions/checkout@v4
+ - name: setup Go
+ uses: actions/setup-go@v4
+ with:
+ go-version: "1.21"
+ - name: measure coverage
+ run: |
+ make cover
+ - uses: codecov/codecov-action@v3
+ with:
+ fail_ci_if_error: true
+ verbose: true
diff --git a/tmpmod/github.com/goccy/go-yaml/.tmpmod.log b/tmpmod/github.com/goccy/go-yaml/.tmpmod.log
new file mode 100755
index 00000000..1b742ff2
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/.tmpmod.log
@@ -0,0 +1 @@
+Use github.com/k1LoW/go-yaml@cannot-start-with-reserved-character (e8b0e1dd639ae297422c6efd920da84e2682e55c) as github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml temporarily
\ No newline at end of file
diff --git a/tmpmod/github.com/goccy/go-yaml/CHANGELOG.md b/tmpmod/github.com/goccy/go-yaml/CHANGELOG.md
new file mode 100644
index 00000000..c8f820de
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/CHANGELOG.md
@@ -0,0 +1,186 @@
+# 1.11.2 - 2023-09-15
+
+### Fix bugs
+
+- Fix quoted comments ( #370 )
+- Fix handle of space at start or last ( #376 )
+- Fix sequence with comment ( #390 )
+
+# 1.11.1 - 2023-09-14
+
+### Fix bugs
+
+- Handle `\r` in a double-quoted string the same as `\n` ( #372 )
+- Replace loop with n.Values = append(n.Values, target.Values...) ( #380 )
+- Skip encoding an inline field if it is null ( #386 )
+- Fix comment parsing with null value ( #388 )
+
+# 1.11.0 - 2023-04-03
+
+### Features
+
+- Supports dynamically switch encode and decode processing for a given type
+
+# 1.10.1 - 2023-03-28
+
+### Features
+
+- Quote YAML 1.1 bools at encoding time for compatibility with other legacy parsers
+- Add support of 32-bit architecture
+
+### Fix bugs
+
+- Don't trim all space characters in block style sequence
+- Support strings starting with `@`
+
+# 1.10.0 - 2023-03-01
+
+### Fix bugs
+
+Reversible conversion of comments was not working in various cases, which has been corrected.
+**Breaking Change** exists in the comment map interface. However, if you are dealing with CommentMap directly, there is no problem.
+
+
+# 1.9.8 - 2022-12-19
+
+### Fix feature
+
+- Append new line at the end of file ( #329 )
+
+### Fix bugs
+
+- Fix custom marshaler ( #333, #334 )
+- Fix behavior when struct fields conflicted( #335 )
+- Fix position calculation for literal, folded and raw folded strings ( #330 )
+
+# 1.9.7 - 2022-12-03
+
+### Fix bugs
+
+- Fix handling of quoted map key ( #328 )
+- Fix resusing process of scanning context ( #322 )
+
+## v1.9.6 - 2022-10-26
+
+### New Features
+
+- Introduce MapKeyNode interface to limit node types for map key ( #312 )
+
+### Fix bugs
+
+- Quote strings with special characters in flow mode ( #270 )
+- typeError implements PrettyPrinter interface ( #280 )
+- Fix incorrect const type ( #284 )
+- Fix large literals type inference on 32 bits ( #293 )
+- Fix UTF-8 characters ( #294 )
+- Fix decoding of unknown aliases ( #317 )
+- Fix stream encoder for insert a separator between each encoded document ( #318 )
+
+### Update
+
+- Update golang.org/x/sys ( #289 )
+- Update Go version in CI ( #295 )
+- Add test cases for missing keys to struct literals ( #300 )
+
+## v1.9.5 - 2022-01-12
+
+### New Features
+
+* Add UseSingleQuote option ( #265 )
+
+### Fix bugs
+
+* Preserve defaults while decoding nested structs ( #260 )
+* Fix minor typo in decodeInit error ( #264 )
+* Handle empty sequence entries ( #275 )
+* Fix encoding of sequence with multiline string ( #276 )
+* Fix encoding of BytesMarshaler type ( #277 )
+* Fix indentState logic for multi-line value ( #278 )
+
+## v1.9.4 - 2021-10-12
+
+### Fix bugs
+
+* Keep prev/next reference between tokens containing comments when filtering comment tokens ( #257 )
+* Supports escaping reserved keywords in PathBuilder ( #258 )
+
+## v1.9.3 - 2021-09-07
+
+### New Features
+
+* Support encoding and decoding `time.Duration` fields ( #246 )
+* Allow reserved characters for key name in YAMLPath ( #251 )
+* Support getting YAMLPath from ast.Node ( #252 )
+* Support CommentToMap option ( #253 )
+
+### Fix bugs
+
+* Fix encoding nested sequences with `yaml.IndentSequence` ( #241 )
+* Fix error reporting on inline structs in strict mode ( #244, #245 )
+* Fix encoding of large floats ( #247 )
+
+### Improve workflow
+
+* Migrate CI from CircleCI to GitHub Action ( #249 )
+* Add workflow for ycat ( #250 )
+
+## v1.9.2 - 2021-07-26
+
+### Support WithComment option ( #238 )
+
+`yaml.WithComment` is a option for encoding with comment.
+The position where you want to add a comment is represented by YAMLPath, and it is the key of `yaml.CommentMap`.
+Also, you can select `Head` comment or `Line` comment as the comment type.
+
+## v1.9.1 - 2021-07-20
+
+### Fix DecodeFromNode ( #237 )
+
+- Fix YAML handling where anchor exists
+
+## v1.9.0 - 2021-07-19
+
+### New features
+
+- Support encoding of comment node ( #233 )
+- Support `yaml.NodeToValue(ast.Node, interface{}, ...DecodeOption) error` ( #236 )
+ - Can convert a AST node to a value directly
+
+### Fix decoder for comment
+
+- Fix parsing of literal with comment ( #234 )
+
+### Rename API ( #235 )
+
+- Rename `MarshalWithContext` to `MarshalContext`
+- Rename `UnmarshalWithContext` to `UnmarshalContext`
+
+## v1.8.10 - 2021-07-02
+
+### Fixed bugs
+
+- Fix searching anchor by alias name ( #212 )
+- Fixing Issue 186, scanner should account for newline characters when processing multi-line text. Without this source annotations line/column number (for this and all subsequent tokens) is inconsistent with plain text editors. e.g. https://github.com/goccy/go-yaml/issues/186. This addresses the issue specifically for single and double quote text only. ( #210 )
+- Add error for unterminated flow mapping node ( #213 )
+- Handle missing required field validation ( #221 )
+- Nicely format unexpected node type errors ( #229 )
+- Support to encode map which has defined type key ( #231 )
+
+### New features
+
+- Support sequence indentation by EncodeOption ( #232 )
+
+## v1.8.9 - 2021-03-01
+
+### Fixed bugs
+
+- Fix origin buffer for DocumentHeader and DocumentEnd and Directive
+- Fix origin buffer for anchor value
+- Fix syntax error about map value
+- Fix parsing MergeKey ('<<') characters
+- Fix encoding of float value
+- Fix incorrect column annotation when single or double quotes are used
+
+### New features
+
+- Support to encode/decode of ast.Node directly
diff --git a/tmpmod/github.com/goccy/go-yaml/LICENSE b/tmpmod/github.com/goccy/go-yaml/LICENSE
new file mode 100644
index 00000000..04485ce6
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2019 Masaaki Goshima
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/tmpmod/github.com/goccy/go-yaml/Makefile b/tmpmod/github.com/goccy/go-yaml/Makefile
new file mode 100644
index 00000000..1b1d9239
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/Makefile
@@ -0,0 +1,19 @@
+.PHONY: test
+test:
+ go test -v -race ./...
+
+.PHONY: simple-test
+simple-test:
+ go test -v ./...
+
+.PHONY: cover
+cover:
+ go test -coverprofile=cover.out ./...
+
+.PHONY: cover-html
+cover-html: cover
+ go tool cover -html=cover.out
+
+.PHONY: ycat/build
+ycat/build:
+ go build -o ycat ./cmd/ycat
diff --git a/tmpmod/github.com/goccy/go-yaml/README.md b/tmpmod/github.com/goccy/go-yaml/README.md
new file mode 100644
index 00000000..94523491
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/README.md
@@ -0,0 +1,370 @@
+# YAML support for the Go language
+
+[![PkgGoDev](https://pkg.go.dev/badge/github.com/goccy/go-yaml)](https://pkg.go.dev/github.com/goccy/go-yaml)
+![Go](https://github.com/goccy/go-yaml/workflows/Go/badge.svg)
+[![codecov](https://codecov.io/gh/goccy/go-yaml/branch/master/graph/badge.svg)](https://codecov.io/gh/goccy/go-yaml)
+[![Go Report Card](https://goreportcard.com/badge/github.com/goccy/go-yaml)](https://goreportcard.com/report/github.com/goccy/go-yaml)
+
+
+
+# Why a new library?
+
+As of this writing, there already exists a de facto standard library for YAML processing for Go: [https://github.com/go-yaml/yaml](https://github.com/go-yaml/yaml). However we feel that some features are lacking, namely:
+
+- Pretty format for error notifications
+- Direct manipulation of YAML abstract syntax tree
+- Support for `Anchor` and `Alias` when marshaling
+- Allow referencing elements declared in another file via anchors
+
+# Features
+
+- Pretty format for error notifications
+- Supports `Scanner` or `Lexer` or `Parser` as public API
+- Supports `Anchor` and `Alias` to Marshaler
+- Allow referencing elements declared in another file via anchors
+- Extract value or AST by YAMLPath ( YAMLPath is like a JSONPath )
+
+# Installation
+
+```sh
+go get -u github.com/goccy/go-yaml
+```
+
+# Synopsis
+
+## 1. Simple Encode/Decode
+
+Has an interface like `go-yaml/yaml` using `reflect`
+
+```go
+var v struct {
+ A int
+ B string
+}
+v.A = 1
+v.B = "hello"
+bytes, err := yaml.Marshal(v)
+if err != nil {
+ //...
+}
+fmt.Println(string(bytes)) // "a: 1\nb: hello\n"
+```
+
+```go
+ yml := `
+%YAML 1.2
+---
+a: 1
+b: c
+`
+var v struct {
+ A int
+ B string
+}
+if err := yaml.Unmarshal([]byte(yml), &v); err != nil {
+ //...
+}
+```
+
+To control marshal/unmarshal behavior, you can use the `yaml` tag.
+
+```go
+ yml := `---
+foo: 1
+bar: c
+`
+var v struct {
+ A int `yaml:"foo"`
+ B string `yaml:"bar"`
+}
+if err := yaml.Unmarshal([]byte(yml), &v); err != nil {
+ //...
+}
+```
+
+For convenience, we also accept the `json` tag. Note that not all options from
+the `json` tag will have significance when parsing YAML documents. If both
+tags exist, `yaml` tag will take precedence.
+
+```go
+ yml := `---
+foo: 1
+bar: c
+`
+var v struct {
+ A int `json:"foo"`
+ B string `json:"bar"`
+}
+if err := yaml.Unmarshal([]byte(yml), &v); err != nil {
+ //...
+}
+```
+
+For custom marshal/unmarshaling, implement either `Bytes` or `Interface` variant of marshaler/unmarshaler. The difference is that while `BytesMarshaler`/`BytesUnmarshaler` behaves like [`encoding/json`](https://pkg.go.dev/encoding/json) and `InterfaceMarshaler`/`InterfaceUnmarshaler` behaves like [`gopkg.in/yaml.v2`](https://pkg.go.dev/gopkg.in/yaml.v2).
+
+Semantically both are the same, but they differ in performance. Because indentation matters in YAML, you cannot simply accept a valid YAML fragment from a Marshaler, and expect it to work when it is attached to the parent container's serialized form. Therefore when we receive use the `BytesMarshaler`, which returns `[]byte`, we must decode it once to figure out how to make it work in the given context. If you use the `InterfaceMarshaler`, we can skip the decoding.
+
+If you are repeatedly marshaling complex objects, the latter is always better
+performance wise. But if you are, for example, just providing a choice between
+a config file format that is read only once, the former is probably easier to
+code.
+
+## 2. Reference elements declared in another file
+
+`testdata` directory contains `anchor.yml` file:
+
+```shell
+├── testdata
+ └── anchor.yml
+```
+
+And `anchor.yml` is defined as follows:
+
+```yaml
+a: &a
+ b: 1
+ c: hello
+```
+
+Then, if `yaml.ReferenceDirs("testdata")` option is passed to `yaml.Decoder`,
+ `Decoder` tries to find the anchor definition from YAML files the under `testdata` directory.
+
+```go
+buf := bytes.NewBufferString("a: *a\n")
+dec := yaml.NewDecoder(buf, yaml.ReferenceDirs("testdata"))
+var v struct {
+ A struct {
+ B int
+ C string
+ }
+}
+if err := dec.Decode(&v); err != nil {
+ //...
+}
+fmt.Printf("%+v\n", v) // {A:{B:1 C:hello}}
+```
+
+## 3. Encode with `Anchor` and `Alias`
+
+### 3.1. Explicitly declared `Anchor` name and `Alias` name
+
+If you want to use `anchor` or `alias`, you can define it as a struct tag.
+
+```go
+type T struct {
+ A int
+ B string
+}
+var v struct {
+ C *T `yaml:"c,anchor=x"`
+ D *T `yaml:"d,alias=x"`
+}
+v.C = &T{A: 1, B: "hello"}
+v.D = v.C
+bytes, err := yaml.Marshal(v)
+if err != nil {
+ panic(err)
+}
+fmt.Println(string(bytes))
+/*
+c: &x
+ a: 1
+ b: hello
+d: *x
+*/
+```
+
+### 3.2. Implicitly declared `Anchor` and `Alias` names
+
+If you do not explicitly declare the anchor name, the default behavior is to
+use the equivalent of `strings.ToLower($FieldName)` as the name of the anchor.
+
+If you do not explicitly declare the alias name AND the value is a pointer
+to another element, we look up the anchor name by finding out which anchor
+field the value is assigned to by looking up its pointer address.
+
+```go
+type T struct {
+ I int
+ S string
+}
+var v struct {
+ A *T `yaml:"a,anchor"`
+ B *T `yaml:"b,anchor"`
+ C *T `yaml:"c,alias"`
+ D *T `yaml:"d,alias"`
+}
+v.A = &T{I: 1, S: "hello"}
+v.B = &T{I: 2, S: "world"}
+v.C = v.A // C has same pointer address to A
+v.D = v.B // D has same pointer address to B
+bytes, err := yaml.Marshal(v)
+if err != nil {
+ //...
+}
+fmt.Println(string(bytes))
+/*
+a: &a
+ i: 1
+ s: hello
+b: &b
+ i: 2
+ s: world
+c: *a
+d: *b
+*/
+```
+
+### 3.3 MergeKey and Alias
+
+Merge key and alias ( `<<: *alias` ) can be used by embedding a structure with the `inline,alias` tag.
+
+```go
+type Person struct {
+ *Person `yaml:",omitempty,inline,alias"` // embed Person type for default value
+ Name string `yaml:",omitempty"`
+ Age int `yaml:",omitempty"`
+}
+defaultPerson := &Person{
+ Name: "John Smith",
+ Age: 20,
+}
+people := []*Person{
+ {
+ Person: defaultPerson, // assign default value
+ Name: "Ken", // override Name property
+ Age: 10, // override Age property
+ },
+ {
+ Person: defaultPerson, // assign default value only
+ },
+}
+var doc struct {
+ Default *Person `yaml:"default,anchor"`
+ People []*Person `yaml:"people"`
+}
+doc.Default = defaultPerson
+doc.People = people
+bytes, err := yaml.Marshal(doc)
+if err != nil {
+ //...
+}
+fmt.Println(string(bytes))
+/*
+default: &default
+ name: John Smith
+ age: 20
+people:
+- <<: *default
+ name: Ken
+ age: 10
+- <<: *default
+*/
+```
+
+## 4. Pretty Formatted Errors
+
+Error values produced during parsing have two extra features over regular
+error values.
+
+First, by default, they contain extra information on the location of the error
+from the source YAML document, to make it easier to find the error location.
+
+Second, the error messages can optionally be colorized.
+
+If you would like to control exactly how the output looks like, consider
+using `yaml.FormatError`, which accepts two boolean values to
+control turning these features on or off.
+
+
+
+## 5. Use YAMLPath
+
+```go
+yml := `
+store:
+ book:
+ - author: john
+ price: 10
+ - author: ken
+ price: 12
+ bicycle:
+ color: red
+ price: 19.95
+`
+path, err := yaml.PathString("$.store.book[*].author")
+if err != nil {
+ //...
+}
+var authors []string
+if err := path.Read(strings.NewReader(yml), &authors); err != nil {
+ //...
+}
+fmt.Println(authors)
+// [john ken]
+```
+
+### 5.1 Print customized error with YAML source code
+
+```go
+package main
+
+import (
+ "fmt"
+
+ "github.com/goccy/go-yaml"
+)
+
+func main() {
+ yml := `
+a: 1
+b: "hello"
+`
+ var v struct {
+ A int
+ B string
+ }
+ if err := yaml.Unmarshal([]byte(yml), &v); err != nil {
+ panic(err)
+ }
+ if v.A != 2 {
+ // output error with YAML source
+ path, err := yaml.PathString("$.a")
+ if err != nil {
+ panic(err)
+ }
+ source, err := path.AnnotateSource([]byte(yml), true)
+ if err != nil {
+ panic(err)
+ }
+ fmt.Printf("a value expected 2 but actual %d:\n%s\n", v.A, string(source))
+ }
+}
+```
+
+output result is the following:
+
+
+
+
+# Tools
+
+## ycat
+
+print yaml file with color
+
+
+
+### Installation
+
+```sh
+go install github.com/goccy/go-yaml/cmd/ycat@latest
+```
+
+# Looking for Sponsors
+
+I'm looking for sponsors this library. This library is being developed as a personal project in my spare time. If you want a quick response or problem resolution when using this library in your project, please register as a [sponsor](https://github.com/sponsors/goccy). I will cooperate as much as possible. Of course, this library is developed as an MIT license, so you can use it freely for free.
+
+# License
+
+MIT
diff --git a/tmpmod/github.com/goccy/go-yaml/ast/ast.go b/tmpmod/github.com/goccy/go-yaml/ast/ast.go
new file mode 100644
index 00000000..ff1d40c6
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/ast/ast.go
@@ -0,0 +1,2115 @@
+package ast
+
+import (
+ "fmt"
+ "io"
+ "math"
+ "strconv"
+ "strings"
+
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/token"
+ "golang.org/x/xerrors"
+)
+
+var (
+ ErrInvalidTokenType = xerrors.New("invalid token type")
+ ErrInvalidAnchorName = xerrors.New("invalid anchor name")
+ ErrInvalidAliasName = xerrors.New("invalid alias name")
+)
+
+// NodeType type identifier of node
+type NodeType int
+
+const (
+ // UnknownNodeType type identifier for default
+ UnknownNodeType NodeType = iota
+ // DocumentType type identifier for document node
+ DocumentType
+ // NullType type identifier for null node
+ NullType
+ // BoolType type identifier for boolean node
+ BoolType
+ // IntegerType type identifier for integer node
+ IntegerType
+ // FloatType type identifier for float node
+ FloatType
+ // InfinityType type identifier for infinity node
+ InfinityType
+ // NanType type identifier for nan node
+ NanType
+ // StringType type identifier for string node
+ StringType
+ // MergeKeyType type identifier for merge key node
+ MergeKeyType
+ // LiteralType type identifier for literal node
+ LiteralType
+ // MappingType type identifier for mapping node
+ MappingType
+ // MappingKeyType type identifier for mapping key node
+ MappingKeyType
+ // MappingValueType type identifier for mapping value node
+ MappingValueType
+ // SequenceType type identifier for sequence node
+ SequenceType
+ // AnchorType type identifier for anchor node
+ AnchorType
+ // AliasType type identifier for alias node
+ AliasType
+ // DirectiveType type identifier for directive node
+ DirectiveType
+ // TagType type identifier for tag node
+ TagType
+ // CommentType type identifier for comment node
+ CommentType
+ // CommentGroupType type identifier for comment group node
+ CommentGroupType
+)
+
+// String node type identifier to text
+func (t NodeType) String() string {
+ switch t {
+ case UnknownNodeType:
+ return "UnknownNode"
+ case DocumentType:
+ return "Document"
+ case NullType:
+ return "Null"
+ case BoolType:
+ return "Bool"
+ case IntegerType:
+ return "Integer"
+ case FloatType:
+ return "Float"
+ case InfinityType:
+ return "Infinity"
+ case NanType:
+ return "Nan"
+ case StringType:
+ return "String"
+ case MergeKeyType:
+ return "MergeKey"
+ case LiteralType:
+ return "Literal"
+ case MappingType:
+ return "Mapping"
+ case MappingKeyType:
+ return "MappingKey"
+ case MappingValueType:
+ return "MappingValue"
+ case SequenceType:
+ return "Sequence"
+ case AnchorType:
+ return "Anchor"
+ case AliasType:
+ return "Alias"
+ case DirectiveType:
+ return "Directive"
+ case TagType:
+ return "Tag"
+ case CommentType:
+ return "Comment"
+ case CommentGroupType:
+ return "CommentGroup"
+ }
+ return ""
+}
+
+// String node type identifier to YAML Structure name
+// based on https://yaml.org/spec/1.2/spec.html
+func (t NodeType) YAMLName() string {
+ switch t {
+ case UnknownNodeType:
+ return "unknown"
+ case DocumentType:
+ return "document"
+ case NullType:
+ return "null"
+ case BoolType:
+ return "boolean"
+ case IntegerType:
+ return "int"
+ case FloatType:
+ return "float"
+ case InfinityType:
+ return "inf"
+ case NanType:
+ return "nan"
+ case StringType:
+ return "string"
+ case MergeKeyType:
+ return "merge key"
+ case LiteralType:
+ return "scalar"
+ case MappingType:
+ return "mapping"
+ case MappingKeyType:
+ return "key"
+ case MappingValueType:
+ return "value"
+ case SequenceType:
+ return "sequence"
+ case AnchorType:
+ return "anchor"
+ case AliasType:
+ return "alias"
+ case DirectiveType:
+ return "directive"
+ case TagType:
+ return "tag"
+ case CommentType:
+ return "comment"
+ case CommentGroupType:
+ return "comment"
+ }
+ return ""
+}
+
+// Node type of node
+type Node interface {
+ io.Reader
+ // String node to text
+ String() string
+ // GetToken returns token instance
+ GetToken() *token.Token
+ // Type returns type of node
+ Type() NodeType
+ // AddColumn add column number to child nodes recursively
+ AddColumn(int)
+ // SetComment set comment token to node
+ SetComment(*CommentGroupNode) error
+ // Comment returns comment token instance
+ GetComment() *CommentGroupNode
+ // GetPath returns YAMLPath for the current node
+ GetPath() string
+ // SetPath set YAMLPath for the current node
+ SetPath(string)
+ // MarshalYAML
+ MarshalYAML() ([]byte, error)
+ // already read length
+ readLen() int
+ // append read length
+ addReadLen(int)
+ // clean read length
+ clearLen()
+}
+
+// MapKeyNode type for map key node
+type MapKeyNode interface {
+ Node
+ // String node to text without comment
+ stringWithoutComment() string
+}
+
+// ScalarNode type for scalar node
+type ScalarNode interface {
+ MapKeyNode
+ GetValue() interface{}
+}
+
+type BaseNode struct {
+ Path string
+ Comment *CommentGroupNode
+ read int
+}
+
+func addCommentString(base string, node *CommentGroupNode) string {
+ return fmt.Sprintf("%s %s", base, node.String())
+}
+
+func (n *BaseNode) readLen() int {
+ return n.read
+}
+
+func (n *BaseNode) clearLen() {
+ n.read = 0
+}
+
+func (n *BaseNode) addReadLen(len int) {
+ n.read += len
+}
+
+// GetPath returns YAMLPath for the current node.
+func (n *BaseNode) GetPath() string {
+ if n == nil {
+ return ""
+ }
+ return n.Path
+}
+
+// SetPath set YAMLPath for the current node.
+func (n *BaseNode) SetPath(path string) {
+ if n == nil {
+ return
+ }
+ n.Path = path
+}
+
+// GetComment returns comment token instance
+func (n *BaseNode) GetComment() *CommentGroupNode {
+ return n.Comment
+}
+
+// SetComment set comment token
+func (n *BaseNode) SetComment(node *CommentGroupNode) error {
+ n.Comment = node
+ return nil
+}
+
+func min(a, b int) int {
+ if a < b {
+ return a
+ }
+ return b
+}
+
+func readNode(p []byte, node Node) (int, error) {
+ s := node.String()
+ readLen := node.readLen()
+ remain := len(s) - readLen
+ if remain == 0 {
+ node.clearLen()
+ return 0, io.EOF
+ }
+ size := min(remain, len(p))
+ for idx, b := range []byte(s[readLen : readLen+size]) {
+ p[idx] = byte(b)
+ }
+ node.addReadLen(size)
+ return size, nil
+}
+
+// Null create node for null value
+func Null(tk *token.Token) *NullNode {
+ return &NullNode{
+ BaseNode: &BaseNode{},
+ Token: tk,
+ }
+}
+
+// Bool create node for boolean value
+func Bool(tk *token.Token) *BoolNode {
+ b, _ := strconv.ParseBool(tk.Value)
+ return &BoolNode{
+ BaseNode: &BaseNode{},
+ Token: tk,
+ Value: b,
+ }
+}
+
+// Integer create node for integer value
+func Integer(tk *token.Token) *IntegerNode {
+ value := removeUnderScoreFromNumber(tk.Value)
+ switch tk.Type {
+ case token.BinaryIntegerType:
+ // skip two characters because binary token starts with '0b'
+ skipCharacterNum := 2
+ negativePrefix := ""
+ if value[0] == '-' {
+ skipCharacterNum++
+ negativePrefix = "-"
+ }
+ if len(negativePrefix) > 0 {
+ i, _ := strconv.ParseInt(negativePrefix+value[skipCharacterNum:], 2, 64)
+ return &IntegerNode{
+ BaseNode: &BaseNode{},
+ Token: tk,
+ Value: i,
+ }
+ }
+ i, _ := strconv.ParseUint(negativePrefix+value[skipCharacterNum:], 2, 64)
+ return &IntegerNode{
+ BaseNode: &BaseNode{},
+ Token: tk,
+ Value: i,
+ }
+ case token.OctetIntegerType:
+ // octet token starts with '0o' or '-0o' or '0' or '-0'
+ skipCharacterNum := 1
+ negativePrefix := ""
+ if value[0] == '-' {
+ skipCharacterNum++
+ if len(value) > 2 && value[2] == 'o' {
+ skipCharacterNum++
+ }
+ negativePrefix = "-"
+ } else {
+ if value[1] == 'o' {
+ skipCharacterNum++
+ }
+ }
+ if len(negativePrefix) > 0 {
+ i, _ := strconv.ParseInt(negativePrefix+value[skipCharacterNum:], 8, 64)
+ return &IntegerNode{
+ BaseNode: &BaseNode{},
+ Token: tk,
+ Value: i,
+ }
+ }
+ i, _ := strconv.ParseUint(value[skipCharacterNum:], 8, 64)
+ return &IntegerNode{
+ BaseNode: &BaseNode{},
+ Token: tk,
+ Value: i,
+ }
+ case token.HexIntegerType:
+ // hex token starts with '0x' or '-0x'
+ skipCharacterNum := 2
+ negativePrefix := ""
+ if value[0] == '-' {
+ skipCharacterNum++
+ negativePrefix = "-"
+ }
+ if len(negativePrefix) > 0 {
+ i, _ := strconv.ParseInt(negativePrefix+value[skipCharacterNum:], 16, 64)
+ return &IntegerNode{
+ BaseNode: &BaseNode{},
+ Token: tk,
+ Value: i,
+ }
+ }
+ i, _ := strconv.ParseUint(value[skipCharacterNum:], 16, 64)
+ return &IntegerNode{
+ BaseNode: &BaseNode{},
+ Token: tk,
+ Value: i,
+ }
+ }
+ if value[0] == '-' || value[0] == '+' {
+ i, _ := strconv.ParseInt(value, 10, 64)
+ return &IntegerNode{
+ BaseNode: &BaseNode{},
+ Token: tk,
+ Value: i,
+ }
+ }
+ i, _ := strconv.ParseUint(value, 10, 64)
+ return &IntegerNode{
+ BaseNode: &BaseNode{},
+ Token: tk,
+ Value: i,
+ }
+}
+
+// Float create node for float value
+func Float(tk *token.Token) *FloatNode {
+ f, _ := strconv.ParseFloat(removeUnderScoreFromNumber(tk.Value), 64)
+ return &FloatNode{
+ BaseNode: &BaseNode{},
+ Token: tk,
+ Value: f,
+ }
+}
+
+// Infinity create node for .inf or -.inf value
+func Infinity(tk *token.Token) *InfinityNode {
+ node := &InfinityNode{
+ BaseNode: &BaseNode{},
+ Token: tk,
+ }
+ switch tk.Value {
+ case ".inf", ".Inf", ".INF":
+ node.Value = math.Inf(0)
+ case "-.inf", "-.Inf", "-.INF":
+ node.Value = math.Inf(-1)
+ }
+ return node
+}
+
+// Nan create node for .nan value
+func Nan(tk *token.Token) *NanNode {
+ return &NanNode{
+ BaseNode: &BaseNode{},
+ Token: tk,
+ }
+}
+
+// String create node for string value
+func String(tk *token.Token) *StringNode {
+ return &StringNode{
+ BaseNode: &BaseNode{},
+ Token: tk,
+ Value: tk.Value,
+ }
+}
+
+// Comment create node for comment
+func Comment(tk *token.Token) *CommentNode {
+ return &CommentNode{
+ BaseNode: &BaseNode{},
+ Token: tk,
+ }
+}
+
+func CommentGroup(comments []*token.Token) *CommentGroupNode {
+ nodes := []*CommentNode{}
+ for _, comment := range comments {
+ nodes = append(nodes, Comment(comment))
+ }
+ return &CommentGroupNode{
+ BaseNode: &BaseNode{},
+ Comments: nodes,
+ }
+}
+
+// MergeKey create node for merge key ( << )
+func MergeKey(tk *token.Token) *MergeKeyNode {
+ return &MergeKeyNode{
+ BaseNode: &BaseNode{},
+ Token: tk,
+ }
+}
+
+// Mapping create node for map
+func Mapping(tk *token.Token, isFlowStyle bool, values ...*MappingValueNode) *MappingNode {
+ node := &MappingNode{
+ BaseNode: &BaseNode{},
+ Start: tk,
+ IsFlowStyle: isFlowStyle,
+ Values: []*MappingValueNode{},
+ }
+ node.Values = append(node.Values, values...)
+ return node
+}
+
+// MappingValue create node for mapping value
+func MappingValue(tk *token.Token, key MapKeyNode, value Node) *MappingValueNode {
+ return &MappingValueNode{
+ BaseNode: &BaseNode{},
+ Start: tk,
+ Key: key,
+ Value: value,
+ }
+}
+
+// MappingKey create node for map key ( '?' ).
+func MappingKey(tk *token.Token) *MappingKeyNode {
+ return &MappingKeyNode{
+ BaseNode: &BaseNode{},
+ Start: tk,
+ }
+}
+
+// Sequence create node for sequence
+func Sequence(tk *token.Token, isFlowStyle bool) *SequenceNode {
+ return &SequenceNode{
+ BaseNode: &BaseNode{},
+ Start: tk,
+ IsFlowStyle: isFlowStyle,
+ Values: []Node{},
+ }
+}
+
+func Anchor(tk *token.Token) *AnchorNode {
+ return &AnchorNode{
+ BaseNode: &BaseNode{},
+ Start: tk,
+ }
+}
+
+func Alias(tk *token.Token) *AliasNode {
+ return &AliasNode{
+ BaseNode: &BaseNode{},
+ Start: tk,
+ }
+}
+
+func Document(tk *token.Token, body Node) *DocumentNode {
+ return &DocumentNode{
+ BaseNode: &BaseNode{},
+ Start: tk,
+ Body: body,
+ }
+}
+
+func Directive(tk *token.Token) *DirectiveNode {
+ return &DirectiveNode{
+ BaseNode: &BaseNode{},
+ Start: tk,
+ }
+}
+
+func Literal(tk *token.Token) *LiteralNode {
+ return &LiteralNode{
+ BaseNode: &BaseNode{},
+ Start: tk,
+ }
+}
+
+func Tag(tk *token.Token) *TagNode {
+ return &TagNode{
+ BaseNode: &BaseNode{},
+ Start: tk,
+ }
+}
+
+// File contains all documents in YAML file
+type File struct {
+ Name string
+ Docs []*DocumentNode
+}
+
+// Read implements (io.Reader).Read
+func (f *File) Read(p []byte) (int, error) {
+ for _, doc := range f.Docs {
+ n, err := doc.Read(p)
+ if err == io.EOF {
+ continue
+ }
+ return n, nil
+ }
+ return 0, io.EOF
+}
+
+// String all documents to text
+func (f *File) String() string {
+ docs := []string{}
+ for _, doc := range f.Docs {
+ docs = append(docs, doc.String())
+ }
+ if len(docs) > 0 {
+ return strings.Join(docs, "\n") + "\n"
+ } else {
+ return ""
+ }
+}
+
+// DocumentNode type of Document
+type DocumentNode struct {
+ *BaseNode
+ Start *token.Token // position of DocumentHeader ( `---` )
+ End *token.Token // position of DocumentEnd ( `...` )
+ Body Node
+}
+
+// Read implements (io.Reader).Read
+func (d *DocumentNode) Read(p []byte) (int, error) {
+ return readNode(p, d)
+}
+
+// Type returns DocumentNodeType
+func (d *DocumentNode) Type() NodeType { return DocumentType }
+
+// GetToken returns token instance
+func (d *DocumentNode) GetToken() *token.Token {
+ return d.Body.GetToken()
+}
+
+// AddColumn add column number to child nodes recursively
+func (d *DocumentNode) AddColumn(col int) {
+ if d.Body != nil {
+ d.Body.AddColumn(col)
+ }
+}
+
+// String document to text
+func (d *DocumentNode) String() string {
+ doc := []string{}
+ if d.Start != nil {
+ doc = append(doc, d.Start.Value)
+ }
+ doc = append(doc, d.Body.String())
+ if d.End != nil {
+ doc = append(doc, d.End.Value)
+ }
+ return strings.Join(doc, "\n")
+}
+
+// MarshalYAML encodes to a YAML text
+func (d *DocumentNode) MarshalYAML() ([]byte, error) {
+ return []byte(d.String()), nil
+}
+
+func removeUnderScoreFromNumber(num string) string {
+ return strings.ReplaceAll(num, "_", "")
+}
+
+// NullNode type of null node
+type NullNode struct {
+ *BaseNode
+ Token *token.Token
+}
+
+// Read implements (io.Reader).Read
+func (n *NullNode) Read(p []byte) (int, error) {
+ return readNode(p, n)
+}
+
+// Type returns NullType
+func (n *NullNode) Type() NodeType { return NullType }
+
+// GetToken returns token instance
+func (n *NullNode) GetToken() *token.Token {
+ return n.Token
+}
+
+// AddColumn add column number to child nodes recursively
+func (n *NullNode) AddColumn(col int) {
+ n.Token.AddColumn(col)
+}
+
+// GetValue returns nil value
+func (n *NullNode) GetValue() interface{} {
+ return nil
+}
+
+// String returns `null` text
+func (n *NullNode) String() string {
+ if n.Comment != nil {
+ return addCommentString("null", n.Comment)
+ }
+ return n.stringWithoutComment()
+}
+
+func (n *NullNode) stringWithoutComment() string {
+ return "null"
+}
+
+// MarshalYAML encodes to a YAML text
+func (n *NullNode) MarshalYAML() ([]byte, error) {
+ return []byte(n.String()), nil
+}
+
+// IntegerNode type of integer node
+type IntegerNode struct {
+ *BaseNode
+ Token *token.Token
+ Value interface{} // int64 or uint64 value
+}
+
+// Read implements (io.Reader).Read
+func (n *IntegerNode) Read(p []byte) (int, error) {
+ return readNode(p, n)
+}
+
+// Type returns IntegerType
+func (n *IntegerNode) Type() NodeType { return IntegerType }
+
+// GetToken returns token instance
+func (n *IntegerNode) GetToken() *token.Token {
+ return n.Token
+}
+
+// AddColumn add column number to child nodes recursively
+func (n *IntegerNode) AddColumn(col int) {
+ n.Token.AddColumn(col)
+}
+
+// GetValue returns int64 value
+func (n *IntegerNode) GetValue() interface{} {
+ return n.Value
+}
+
+// String int64 to text
+func (n *IntegerNode) String() string {
+ if n.Comment != nil {
+ return addCommentString(n.Token.Value, n.Comment)
+ }
+ return n.stringWithoutComment()
+}
+
+func (n *IntegerNode) stringWithoutComment() string {
+ return n.Token.Value
+}
+
+// MarshalYAML encodes to a YAML text
+func (n *IntegerNode) MarshalYAML() ([]byte, error) {
+ return []byte(n.String()), nil
+}
+
+// FloatNode type of float node
+type FloatNode struct {
+ *BaseNode
+ Token *token.Token
+ Precision int
+ Value float64
+}
+
+// Read implements (io.Reader).Read
+func (n *FloatNode) Read(p []byte) (int, error) {
+ return readNode(p, n)
+}
+
+// Type returns FloatType
+func (n *FloatNode) Type() NodeType { return FloatType }
+
+// GetToken returns token instance
+func (n *FloatNode) GetToken() *token.Token {
+ return n.Token
+}
+
+// AddColumn add column number to child nodes recursively
+func (n *FloatNode) AddColumn(col int) {
+ n.Token.AddColumn(col)
+}
+
+// GetValue returns float64 value
+func (n *FloatNode) GetValue() interface{} {
+ return n.Value
+}
+
+// String float64 to text
+func (n *FloatNode) String() string {
+ if n.Comment != nil {
+ return addCommentString(n.Token.Value, n.Comment)
+ }
+ return n.stringWithoutComment()
+}
+
+func (n *FloatNode) stringWithoutComment() string {
+ return n.Token.Value
+}
+
+// MarshalYAML encodes to a YAML text
+func (n *FloatNode) MarshalYAML() ([]byte, error) {
+ return []byte(n.String()), nil
+}
+
+// StringNode type of string node
+type StringNode struct {
+ *BaseNode
+ Token *token.Token
+ Value string
+}
+
+// Read implements (io.Reader).Read
+func (n *StringNode) Read(p []byte) (int, error) {
+ return readNode(p, n)
+}
+
+// Type returns StringType
+func (n *StringNode) Type() NodeType { return StringType }
+
+// GetToken returns token instance
+func (n *StringNode) GetToken() *token.Token {
+ return n.Token
+}
+
+// AddColumn add column number to child nodes recursively
+func (n *StringNode) AddColumn(col int) {
+ n.Token.AddColumn(col)
+}
+
+// GetValue returns string value
+func (n *StringNode) GetValue() interface{} {
+ return n.Value
+}
+
+// escapeSingleQuote escapes s to a single quoted scalar.
+// https://yaml.org/spec/1.2.2/#732-single-quoted-style
+func escapeSingleQuote(s string) string {
+ var sb strings.Builder
+ growLen := len(s) + // s includes also one ' from the doubled pair
+ 2 + // opening and closing '
+ strings.Count(s, "'") // ' added by ReplaceAll
+ sb.Grow(growLen)
+ sb.WriteString("'")
+ sb.WriteString(strings.ReplaceAll(s, "'", "''"))
+ sb.WriteString("'")
+ return sb.String()
+}
+
+// String string value to text with quote or literal header if required
+func (n *StringNode) String() string {
+ switch n.Token.Type {
+ case token.SingleQuoteType:
+ quoted := escapeSingleQuote(n.Value)
+ if n.Comment != nil {
+ return addCommentString(quoted, n.Comment)
+ }
+ return quoted
+ case token.DoubleQuoteType:
+ quoted := strconv.Quote(n.Value)
+ if n.Comment != nil {
+ return addCommentString(quoted, n.Comment)
+ }
+ return quoted
+ }
+
+ lbc := token.DetectLineBreakCharacter(n.Value)
+ if strings.Contains(n.Value, lbc) {
+ // This block assumes that the line breaks in this inside scalar content and the Outside scalar content are the same.
+ // It works mostly, but inconsistencies occur if line break characters are mixed.
+ header := token.LiteralBlockHeader(n.Value)
+ space := strings.Repeat(" ", n.Token.Position.Column-1)
+ values := []string{}
+ for _, v := range strings.Split(n.Value, lbc) {
+ values = append(values, fmt.Sprintf("%s %s", space, v))
+ }
+ block := strings.TrimSuffix(strings.TrimSuffix(strings.Join(values, lbc), fmt.Sprintf("%s %s", lbc, space)), fmt.Sprintf(" %s", space))
+ return fmt.Sprintf("%s%s%s", header, lbc, block)
+ } else if len(n.Value) > 0 && (n.Value[0] == '{' || n.Value[0] == '[') {
+ return fmt.Sprintf(`'%s'`, n.Value)
+ }
+ if n.Comment != nil {
+ return addCommentString(n.Value, n.Comment)
+ }
+ return n.Value
+}
+
+func (n *StringNode) stringWithoutComment() string {
+ switch n.Token.Type {
+ case token.SingleQuoteType:
+ quoted := fmt.Sprintf(`'%s'`, n.Value)
+ return quoted
+ case token.DoubleQuoteType:
+ quoted := strconv.Quote(n.Value)
+ return quoted
+ }
+
+ lbc := token.DetectLineBreakCharacter(n.Value)
+ if strings.Contains(n.Value, lbc) {
+ // This block assumes that the line breaks in this inside scalar content and the Outside scalar content are the same.
+ // It works mostly, but inconsistencies occur if line break characters are mixed.
+ header := token.LiteralBlockHeader(n.Value)
+ space := strings.Repeat(" ", n.Token.Position.Column-1)
+ values := []string{}
+ for _, v := range strings.Split(n.Value, lbc) {
+ values = append(values, fmt.Sprintf("%s %s", space, v))
+ }
+ block := strings.TrimSuffix(strings.TrimSuffix(strings.Join(values, lbc), fmt.Sprintf("%s %s", lbc, space)), fmt.Sprintf(" %s", space))
+ return fmt.Sprintf("%s%s%s", header, lbc, block)
+ } else if len(n.Value) > 0 && (n.Value[0] == '{' || n.Value[0] == '[') {
+ return fmt.Sprintf(`'%s'`, n.Value)
+ }
+ return n.Value
+}
+
+// MarshalYAML encodes to a YAML text
+func (n *StringNode) MarshalYAML() ([]byte, error) {
+ return []byte(n.String()), nil
+}
+
+// LiteralNode type of literal node
+type LiteralNode struct {
+ *BaseNode
+ Start *token.Token
+ Value *StringNode
+}
+
+// Read implements (io.Reader).Read
+func (n *LiteralNode) Read(p []byte) (int, error) {
+ return readNode(p, n)
+}
+
+// Type returns LiteralType
+func (n *LiteralNode) Type() NodeType { return LiteralType }
+
+// GetToken returns token instance
+func (n *LiteralNode) GetToken() *token.Token {
+ return n.Start
+}
+
+// AddColumn add column number to child nodes recursively
+func (n *LiteralNode) AddColumn(col int) {
+ n.Start.AddColumn(col)
+ if n.Value != nil {
+ n.Value.AddColumn(col)
+ }
+}
+
+// GetValue returns string value
+func (n *LiteralNode) GetValue() interface{} {
+ return n.String()
+}
+
+// String literal to text
+func (n *LiteralNode) String() string {
+ origin := n.Value.GetToken().Origin
+ lit := strings.TrimRight(strings.TrimRight(origin, " "), "\n")
+ if n.Comment != nil {
+ return fmt.Sprintf("%s %s\n%s", n.Start.Value, n.Comment.String(), lit)
+ }
+ return fmt.Sprintf("%s\n%s", n.Start.Value, lit)
+}
+
+func (n *LiteralNode) stringWithoutComment() string {
+ return n.String()
+}
+
+// MarshalYAML encodes to a YAML text
+func (n *LiteralNode) MarshalYAML() ([]byte, error) {
+ return []byte(n.String()), nil
+}
+
+// MergeKeyNode type of merge key node
+type MergeKeyNode struct {
+ *BaseNode
+ Token *token.Token
+}
+
+// Read implements (io.Reader).Read
+func (n *MergeKeyNode) Read(p []byte) (int, error) {
+ return readNode(p, n)
+}
+
+// Type returns MergeKeyType
+func (n *MergeKeyNode) Type() NodeType { return MergeKeyType }
+
+// GetToken returns token instance
+func (n *MergeKeyNode) GetToken() *token.Token {
+ return n.Token
+}
+
+// GetValue returns '<<' value
+func (n *MergeKeyNode) GetValue() interface{} {
+ return n.Token.Value
+}
+
+// String returns '<<' value
+func (n *MergeKeyNode) String() string {
+ return n.stringWithoutComment()
+}
+
+func (n *MergeKeyNode) stringWithoutComment() string {
+ return n.Token.Value
+}
+
+// AddColumn add column number to child nodes recursively
+func (n *MergeKeyNode) AddColumn(col int) {
+ n.Token.AddColumn(col)
+}
+
+// MarshalYAML encodes to a YAML text
+func (n *MergeKeyNode) MarshalYAML() ([]byte, error) {
+ return []byte(n.String()), nil
+}
+
+// BoolNode type of boolean node
+type BoolNode struct {
+ *BaseNode
+ Token *token.Token
+ Value bool
+}
+
+// Read implements (io.Reader).Read
+func (n *BoolNode) Read(p []byte) (int, error) {
+ return readNode(p, n)
+}
+
+// Type returns BoolType
+func (n *BoolNode) Type() NodeType { return BoolType }
+
+// GetToken returns token instance
+func (n *BoolNode) GetToken() *token.Token {
+ return n.Token
+}
+
+// AddColumn add column number to child nodes recursively
+func (n *BoolNode) AddColumn(col int) {
+ n.Token.AddColumn(col)
+}
+
+// GetValue returns boolean value
+func (n *BoolNode) GetValue() interface{} {
+ return n.Value
+}
+
+// String boolean to text
+func (n *BoolNode) String() string {
+ if n.Comment != nil {
+ return addCommentString(n.Token.Value, n.Comment)
+ }
+ return n.stringWithoutComment()
+}
+
+func (n *BoolNode) stringWithoutComment() string {
+ return n.Token.Value
+}
+
+// MarshalYAML encodes to a YAML text
+func (n *BoolNode) MarshalYAML() ([]byte, error) {
+ return []byte(n.String()), nil
+}
+
+// InfinityNode type of infinity node
+type InfinityNode struct {
+ *BaseNode
+ Token *token.Token
+ Value float64
+}
+
+// Read implements (io.Reader).Read
+func (n *InfinityNode) Read(p []byte) (int, error) {
+ return readNode(p, n)
+}
+
+// Type returns InfinityType
+func (n *InfinityNode) Type() NodeType { return InfinityType }
+
+// GetToken returns token instance
+func (n *InfinityNode) GetToken() *token.Token {
+ return n.Token
+}
+
+// AddColumn add column number to child nodes recursively
+func (n *InfinityNode) AddColumn(col int) {
+ n.Token.AddColumn(col)
+}
+
+// GetValue returns math.Inf(0) or math.Inf(-1)
+func (n *InfinityNode) GetValue() interface{} {
+ return n.Value
+}
+
+// String infinity to text
+func (n *InfinityNode) String() string {
+ if n.Comment != nil {
+ return addCommentString(n.Token.Value, n.Comment)
+ }
+ return n.stringWithoutComment()
+}
+
+func (n *InfinityNode) stringWithoutComment() string {
+ return n.Token.Value
+}
+
+// MarshalYAML encodes to a YAML text
+func (n *InfinityNode) MarshalYAML() ([]byte, error) {
+ return []byte(n.String()), nil
+}
+
+// NanNode type of nan node
+type NanNode struct {
+ *BaseNode
+ Token *token.Token
+}
+
+// Read implements (io.Reader).Read
+func (n *NanNode) Read(p []byte) (int, error) {
+ return readNode(p, n)
+}
+
+// Type returns NanType
+func (n *NanNode) Type() NodeType { return NanType }
+
+// GetToken returns token instance
+func (n *NanNode) GetToken() *token.Token {
+ return n.Token
+}
+
+// AddColumn add column number to child nodes recursively
+func (n *NanNode) AddColumn(col int) {
+ n.Token.AddColumn(col)
+}
+
+// GetValue returns math.NaN()
+func (n *NanNode) GetValue() interface{} {
+ return math.NaN()
+}
+
+// String returns .nan
+func (n *NanNode) String() string {
+ if n.Comment != nil {
+ return addCommentString(n.Token.Value, n.Comment)
+ }
+ return n.stringWithoutComment()
+}
+
+func (n *NanNode) stringWithoutComment() string {
+ return n.Token.Value
+}
+
+// MarshalYAML encodes to a YAML text
+func (n *NanNode) MarshalYAML() ([]byte, error) {
+ return []byte(n.String()), nil
+}
+
+// MapNode interface of MappingValueNode / MappingNode
+type MapNode interface {
+ MapRange() *MapNodeIter
+}
+
+// MapNodeIter is an iterator for ranging over a MapNode
+type MapNodeIter struct {
+ values []*MappingValueNode
+ idx int
+}
+
+const (
+ startRangeIndex = -1
+)
+
+// Next advances the map iterator and reports whether there is another entry.
+// It returns false when the iterator is exhausted.
+func (m *MapNodeIter) Next() bool {
+ m.idx++
+ next := m.idx < len(m.values)
+ return next
+}
+
+// Key returns the key of the iterator's current map node entry.
+func (m *MapNodeIter) Key() MapKeyNode {
+ return m.values[m.idx].Key
+}
+
+// Value returns the value of the iterator's current map node entry.
+func (m *MapNodeIter) Value() Node {
+ return m.values[m.idx].Value
+}
+
+// MappingNode type of mapping node
+type MappingNode struct {
+ *BaseNode
+ Start *token.Token
+ End *token.Token
+ IsFlowStyle bool
+ Values []*MappingValueNode
+ FootComment *CommentGroupNode
+}
+
+func (n *MappingNode) startPos() *token.Position {
+ if len(n.Values) == 0 {
+ return n.Start.Position
+ }
+ return n.Values[0].Key.GetToken().Position
+}
+
+// Merge merge key/value of map.
+func (n *MappingNode) Merge(target *MappingNode) {
+ keyToMapValueMap := map[string]*MappingValueNode{}
+ for _, value := range n.Values {
+ key := value.Key.String()
+ keyToMapValueMap[key] = value
+ }
+ column := n.startPos().Column - target.startPos().Column
+ target.AddColumn(column)
+ for _, value := range target.Values {
+ mapValue, exists := keyToMapValueMap[value.Key.String()]
+ if exists {
+ mapValue.Value = value.Value
+ } else {
+ n.Values = append(n.Values, value)
+ }
+ }
+}
+
+// SetIsFlowStyle set value to IsFlowStyle field recursively.
+func (n *MappingNode) SetIsFlowStyle(isFlow bool) {
+ n.IsFlowStyle = isFlow
+ for _, value := range n.Values {
+ value.SetIsFlowStyle(isFlow)
+ }
+}
+
+// Read implements (io.Reader).Read
+func (n *MappingNode) Read(p []byte) (int, error) {
+ return readNode(p, n)
+}
+
+// Type returns MappingType
+func (n *MappingNode) Type() NodeType { return MappingType }
+
+// GetToken returns token instance
+func (n *MappingNode) GetToken() *token.Token {
+ return n.Start
+}
+
+// AddColumn add column number to child nodes recursively
+func (n *MappingNode) AddColumn(col int) {
+ n.Start.AddColumn(col)
+ n.End.AddColumn(col)
+ for _, value := range n.Values {
+ value.AddColumn(col)
+ }
+}
+
+func (n *MappingNode) flowStyleString(commentMode bool) string {
+ values := []string{}
+ for _, value := range n.Values {
+ values = append(values, strings.TrimLeft(value.String(), " "))
+ }
+ mapText := fmt.Sprintf("{%s}", strings.Join(values, ", "))
+ if commentMode && n.Comment != nil {
+ return addCommentString(mapText, n.Comment)
+ }
+ return mapText
+}
+
+func (n *MappingNode) blockStyleString(commentMode bool) string {
+ values := []string{}
+ for _, value := range n.Values {
+ values = append(values, value.String())
+ }
+ mapText := strings.Join(values, "\n")
+ if commentMode && n.Comment != nil {
+ value := values[0]
+ var spaceNum int
+ for i := 0; i < len(value); i++ {
+ if value[i] != ' ' {
+ break
+ }
+ spaceNum++
+ }
+ comment := n.Comment.StringWithSpace(spaceNum)
+ return fmt.Sprintf("%s\n%s", comment, mapText)
+ }
+ return mapText
+}
+
+// String mapping values to text
+func (n *MappingNode) String() string {
+ if len(n.Values) == 0 {
+ if n.Comment != nil {
+ return addCommentString("{}", n.Comment)
+ }
+ return "{}"
+ }
+
+ commentMode := true
+ if n.IsFlowStyle || len(n.Values) == 0 {
+ return n.flowStyleString(commentMode)
+ }
+ return n.blockStyleString(commentMode)
+}
+
+// MapRange implements MapNode protocol
+func (n *MappingNode) MapRange() *MapNodeIter {
+ return &MapNodeIter{
+ idx: startRangeIndex,
+ values: n.Values,
+ }
+}
+
+// MarshalYAML encodes to a YAML text
+func (n *MappingNode) MarshalYAML() ([]byte, error) {
+ return []byte(n.String()), nil
+}
+
+// MappingKeyNode type of tag node
+type MappingKeyNode struct {
+ *BaseNode
+ Start *token.Token
+ Value Node
+}
+
+// Read implements (io.Reader).Read
+func (n *MappingKeyNode) Read(p []byte) (int, error) {
+ return readNode(p, n)
+}
+
+// Type returns MappingKeyType
+func (n *MappingKeyNode) Type() NodeType { return MappingKeyType }
+
+// GetToken returns token instance
+func (n *MappingKeyNode) GetToken() *token.Token {
+ return n.Start
+}
+
+// AddColumn add column number to child nodes recursively
+func (n *MappingKeyNode) AddColumn(col int) {
+ n.Start.AddColumn(col)
+ if n.Value != nil {
+ n.Value.AddColumn(col)
+ }
+}
+
+// String tag to text
+func (n *MappingKeyNode) String() string {
+ return n.stringWithoutComment()
+}
+
+func (n *MappingKeyNode) stringWithoutComment() string {
+ return fmt.Sprintf("%s %s", n.Start.Value, n.Value.String())
+}
+
+// MarshalYAML encodes to a YAML text
+func (n *MappingKeyNode) MarshalYAML() ([]byte, error) {
+ return []byte(n.String()), nil
+}
+
+// MappingValueNode type of mapping value
+type MappingValueNode struct {
+ *BaseNode
+ Start *token.Token
+ Key MapKeyNode
+ Value Node
+ FootComment *CommentGroupNode
+}
+
+// Replace replace value node.
+func (n *MappingValueNode) Replace(value Node) error {
+ column := n.Value.GetToken().Position.Column - value.GetToken().Position.Column
+ value.AddColumn(column)
+ n.Value = value
+ return nil
+}
+
+// Read implements (io.Reader).Read
+func (n *MappingValueNode) Read(p []byte) (int, error) {
+ return readNode(p, n)
+}
+
+// Type returns MappingValueType
+func (n *MappingValueNode) Type() NodeType { return MappingValueType }
+
+// GetToken returns token instance
+func (n *MappingValueNode) GetToken() *token.Token {
+ return n.Start
+}
+
+// AddColumn add column number to child nodes recursively
+func (n *MappingValueNode) AddColumn(col int) {
+ n.Start.AddColumn(col)
+ if n.Key != nil {
+ n.Key.AddColumn(col)
+ }
+ if n.Value != nil {
+ n.Value.AddColumn(col)
+ }
+}
+
+// SetIsFlowStyle set value to IsFlowStyle field recursively.
+func (n *MappingValueNode) SetIsFlowStyle(isFlow bool) {
+ switch value := n.Value.(type) {
+ case *MappingNode:
+ value.SetIsFlowStyle(isFlow)
+ case *MappingValueNode:
+ value.SetIsFlowStyle(isFlow)
+ case *SequenceNode:
+ value.SetIsFlowStyle(isFlow)
+ }
+}
+
+// String mapping value to text
+func (n *MappingValueNode) String() string {
+ var text string
+ if n.Comment != nil {
+ text = fmt.Sprintf(
+ "%s\n%s",
+ n.Comment.StringWithSpace(n.Key.GetToken().Position.Column-1),
+ n.toString(),
+ )
+ } else {
+ text = n.toString()
+ }
+ if n.FootComment != nil {
+ text += fmt.Sprintf("\n%s", n.FootComment.StringWithSpace(n.Key.GetToken().Position.Column-1))
+ }
+ return text
+}
+
+func (n *MappingValueNode) toString() string {
+ space := strings.Repeat(" ", n.Key.GetToken().Position.Column-1)
+ keyIndentLevel := n.Key.GetToken().Position.IndentLevel
+ valueIndentLevel := n.Value.GetToken().Position.IndentLevel
+ keyComment := n.Key.GetComment()
+ if _, ok := n.Value.(ScalarNode); ok {
+ return fmt.Sprintf("%s%s: %s", space, n.Key.String(), n.Value.String())
+ } else if keyIndentLevel < valueIndentLevel {
+ if keyComment != nil {
+ return fmt.Sprintf(
+ "%s%s: %s\n%s",
+ space,
+ n.Key.stringWithoutComment(),
+ keyComment.String(),
+ n.Value.String(),
+ )
+ }
+ return fmt.Sprintf("%s%s:\n%s", space, n.Key.String(), n.Value.String())
+ } else if m, ok := n.Value.(*MappingNode); ok && (m.IsFlowStyle || len(m.Values) == 0) {
+ return fmt.Sprintf("%s%s: %s", space, n.Key.String(), n.Value.String())
+ } else if s, ok := n.Value.(*SequenceNode); ok && (s.IsFlowStyle || len(s.Values) == 0) {
+ return fmt.Sprintf("%s%s: %s", space, n.Key.String(), n.Value.String())
+ } else if _, ok := n.Value.(*AnchorNode); ok {
+ return fmt.Sprintf("%s%s: %s", space, n.Key.String(), n.Value.String())
+ } else if _, ok := n.Value.(*AliasNode); ok {
+ return fmt.Sprintf("%s%s: %s", space, n.Key.String(), n.Value.String())
+ }
+ if keyComment != nil {
+ return fmt.Sprintf(
+ "%s%s: %s\n%s",
+ space,
+ n.Key.stringWithoutComment(),
+ keyComment.String(),
+ n.Value.String(),
+ )
+ }
+ if m, ok := n.Value.(*MappingNode); ok && m.Comment != nil {
+ return fmt.Sprintf(
+ "%s%s: %s",
+ space,
+ n.Key.String(),
+ strings.TrimLeft(n.Value.String(), " "),
+ )
+ }
+ return fmt.Sprintf("%s%s:\n%s", space, n.Key.String(), n.Value.String())
+}
+
+// MapRange implements MapNode protocol
+func (n *MappingValueNode) MapRange() *MapNodeIter {
+ return &MapNodeIter{
+ idx: startRangeIndex,
+ values: []*MappingValueNode{n},
+ }
+}
+
+// MarshalYAML encodes to a YAML text
+func (n *MappingValueNode) MarshalYAML() ([]byte, error) {
+ return []byte(n.String()), nil
+}
+
+// ArrayNode interface of SequenceNode
+type ArrayNode interface {
+ ArrayRange() *ArrayNodeIter
+}
+
+// ArrayNodeIter is an iterator for ranging over a ArrayNode
+type ArrayNodeIter struct {
+ values []Node
+ idx int
+}
+
+// Next advances the array iterator and reports whether there is another entry.
+// It returns false when the iterator is exhausted.
+func (m *ArrayNodeIter) Next() bool {
+ m.idx++
+ next := m.idx < len(m.values)
+ return next
+}
+
+// Value returns the value of the iterator's current array entry.
+func (m *ArrayNodeIter) Value() Node {
+ return m.values[m.idx]
+}
+
+// Len returns length of array
+func (m *ArrayNodeIter) Len() int {
+ return len(m.values)
+}
+
+// SequenceNode type of sequence node
+type SequenceNode struct {
+ *BaseNode
+ Start *token.Token
+ End *token.Token
+ IsFlowStyle bool
+ Values []Node
+ ValueHeadComments []*CommentGroupNode
+ FootComment *CommentGroupNode
+}
+
+// Replace replace value node.
+func (n *SequenceNode) Replace(idx int, value Node) error {
+ if len(n.Values) <= idx {
+ return xerrors.Errorf(
+ "invalid index for sequence: sequence length is %d, but specified %d index",
+ len(n.Values), idx,
+ )
+ }
+ column := n.Values[idx].GetToken().Position.Column - value.GetToken().Position.Column
+ value.AddColumn(column)
+ n.Values[idx] = value
+ return nil
+}
+
+// Merge merge sequence value.
+func (n *SequenceNode) Merge(target *SequenceNode) {
+ column := n.Start.Position.Column - target.Start.Position.Column
+ target.AddColumn(column)
+ n.Values = append(n.Values, target.Values...)
+}
+
+// SetIsFlowStyle set value to IsFlowStyle field recursively.
+func (n *SequenceNode) SetIsFlowStyle(isFlow bool) {
+ n.IsFlowStyle = isFlow
+ for _, value := range n.Values {
+ switch value := value.(type) {
+ case *MappingNode:
+ value.SetIsFlowStyle(isFlow)
+ case *MappingValueNode:
+ value.SetIsFlowStyle(isFlow)
+ case *SequenceNode:
+ value.SetIsFlowStyle(isFlow)
+ }
+ }
+}
+
+// Read implements (io.Reader).Read
+func (n *SequenceNode) Read(p []byte) (int, error) {
+ return readNode(p, n)
+}
+
+// Type returns SequenceType
+func (n *SequenceNode) Type() NodeType { return SequenceType }
+
+// GetToken returns token instance
+func (n *SequenceNode) GetToken() *token.Token {
+ return n.Start
+}
+
+// AddColumn add column number to child nodes recursively
+func (n *SequenceNode) AddColumn(col int) {
+ n.Start.AddColumn(col)
+ n.End.AddColumn(col)
+ for _, value := range n.Values {
+ value.AddColumn(col)
+ }
+}
+
+func (n *SequenceNode) flowStyleString() string {
+ values := []string{}
+ for _, value := range n.Values {
+ values = append(values, value.String())
+ }
+ return fmt.Sprintf("[%s]", strings.Join(values, ", "))
+}
+
+func (n *SequenceNode) blockStyleString() string {
+ space := strings.Repeat(" ", n.Start.Position.Column-1)
+ values := []string{}
+ if n.Comment != nil {
+ values = append(values, n.Comment.StringWithSpace(n.Start.Position.Column-1))
+ }
+
+ for idx, value := range n.Values {
+ valueStr := value.String()
+ splittedValues := strings.Split(valueStr, "\n")
+ trimmedFirstValue := strings.TrimLeft(splittedValues[0], " ")
+ diffLength := len(splittedValues[0]) - len(trimmedFirstValue)
+ if len(splittedValues) > 1 && value.Type() == StringType || value.Type() == LiteralType {
+ // If multi-line string, the space characters for indent have already been added, so delete them.
+ prefix := space + " "
+ for i := 1; i < len(splittedValues); i++ {
+ splittedValues[i] = strings.TrimPrefix(splittedValues[i], prefix)
+ }
+ }
+ newValues := []string{trimmedFirstValue}
+ for i := 1; i < len(splittedValues); i++ {
+ if len(splittedValues[i]) <= diffLength {
+ // this line is \n or white space only
+ newValues = append(newValues, "")
+ continue
+ }
+ trimmed := splittedValues[i][diffLength:]
+ newValues = append(newValues, fmt.Sprintf("%s %s", space, trimmed))
+ }
+ newValue := strings.Join(newValues, "\n")
+ if len(n.ValueHeadComments) == len(n.Values) && n.ValueHeadComments[idx] != nil {
+ values = append(values, n.ValueHeadComments[idx].StringWithSpace(n.Start.Position.Column-1))
+ }
+ values = append(values, fmt.Sprintf("%s- %s", space, newValue))
+ }
+ if n.FootComment != nil {
+ values = append(values, n.FootComment.StringWithSpace(n.Start.Position.Column-1))
+ }
+ return strings.Join(values, "\n")
+}
+
+// String sequence to text
+func (n *SequenceNode) String() string {
+ if n.IsFlowStyle || len(n.Values) == 0 {
+ return n.flowStyleString()
+ }
+ return n.blockStyleString()
+}
+
+// ArrayRange implements ArrayNode protocol
+func (n *SequenceNode) ArrayRange() *ArrayNodeIter {
+ return &ArrayNodeIter{
+ idx: startRangeIndex,
+ values: n.Values,
+ }
+}
+
+// MarshalYAML encodes to a YAML text
+func (n *SequenceNode) MarshalYAML() ([]byte, error) {
+ return []byte(n.String()), nil
+}
+
+// AnchorNode type of anchor node
+type AnchorNode struct {
+ *BaseNode
+ Start *token.Token
+ Name Node
+ Value Node
+}
+
+func (n *AnchorNode) SetName(name string) error {
+ if n.Name == nil {
+ return ErrInvalidAnchorName
+ }
+ s, ok := n.Name.(*StringNode)
+ if !ok {
+ return ErrInvalidAnchorName
+ }
+ s.Value = name
+ return nil
+}
+
+// Read implements (io.Reader).Read
+func (n *AnchorNode) Read(p []byte) (int, error) {
+ return readNode(p, n)
+}
+
+// Type returns AnchorType
+func (n *AnchorNode) Type() NodeType { return AnchorType }
+
+// GetToken returns token instance
+func (n *AnchorNode) GetToken() *token.Token {
+ return n.Start
+}
+
+// AddColumn add column number to child nodes recursively
+func (n *AnchorNode) AddColumn(col int) {
+ n.Start.AddColumn(col)
+ if n.Name != nil {
+ n.Name.AddColumn(col)
+ }
+ if n.Value != nil {
+ n.Value.AddColumn(col)
+ }
+}
+
+// String anchor to text
+func (n *AnchorNode) String() string {
+ value := n.Value.String()
+ if len(strings.Split(value, "\n")) > 1 {
+ return fmt.Sprintf("&%s\n%s", n.Name.String(), value)
+ } else if s, ok := n.Value.(*SequenceNode); ok && !s.IsFlowStyle {
+ return fmt.Sprintf("&%s\n%s", n.Name.String(), value)
+ } else if m, ok := n.Value.(*MappingNode); ok && !m.IsFlowStyle {
+ return fmt.Sprintf("&%s\n%s", n.Name.String(), value)
+ }
+ return fmt.Sprintf("&%s %s", n.Name.String(), value)
+}
+
+// MarshalYAML encodes to a YAML text
+func (n *AnchorNode) MarshalYAML() ([]byte, error) {
+ return []byte(n.String()), nil
+}
+
+// AliasNode type of alias node
+type AliasNode struct {
+ *BaseNode
+ Start *token.Token
+ Value Node
+}
+
+func (n *AliasNode) SetName(name string) error {
+ if n.Value == nil {
+ return ErrInvalidAliasName
+ }
+ s, ok := n.Value.(*StringNode)
+ if !ok {
+ return ErrInvalidAliasName
+ }
+ s.Value = name
+ return nil
+}
+
+// Read implements (io.Reader).Read
+func (n *AliasNode) Read(p []byte) (int, error) {
+ return readNode(p, n)
+}
+
+// Type returns AliasType
+func (n *AliasNode) Type() NodeType { return AliasType }
+
+// GetToken returns token instance
+func (n *AliasNode) GetToken() *token.Token {
+ return n.Start
+}
+
+// AddColumn add column number to child nodes recursively
+func (n *AliasNode) AddColumn(col int) {
+ n.Start.AddColumn(col)
+ if n.Value != nil {
+ n.Value.AddColumn(col)
+ }
+}
+
+// String alias to text
+func (n *AliasNode) String() string {
+ return fmt.Sprintf("*%s", n.Value.String())
+}
+
+// MarshalYAML encodes to a YAML text
+func (n *AliasNode) MarshalYAML() ([]byte, error) {
+ return []byte(n.String()), nil
+}
+
+// DirectiveNode type of directive node
+type DirectiveNode struct {
+ *BaseNode
+ Start *token.Token
+ Value Node
+}
+
+// Read implements (io.Reader).Read
+func (n *DirectiveNode) Read(p []byte) (int, error) {
+ return readNode(p, n)
+}
+
+// Type returns DirectiveType
+func (n *DirectiveNode) Type() NodeType { return DirectiveType }
+
+// GetToken returns token instance
+func (n *DirectiveNode) GetToken() *token.Token {
+ return n.Start
+}
+
+// AddColumn add column number to child nodes recursively
+func (n *DirectiveNode) AddColumn(col int) {
+ if n.Value != nil {
+ n.Value.AddColumn(col)
+ }
+}
+
+// String directive to text
+func (n *DirectiveNode) String() string {
+ return fmt.Sprintf("%s%s", n.Start.Value, n.Value.String())
+}
+
+// MarshalYAML encodes to a YAML text
+func (n *DirectiveNode) MarshalYAML() ([]byte, error) {
+ return []byte(n.String()), nil
+}
+
+// TagNode type of tag node
+type TagNode struct {
+ *BaseNode
+ Start *token.Token
+ Value Node
+}
+
+// Read implements (io.Reader).Read
+func (n *TagNode) Read(p []byte) (int, error) {
+ return readNode(p, n)
+}
+
+// Type returns TagType
+func (n *TagNode) Type() NodeType { return TagType }
+
+// GetToken returns token instance
+func (n *TagNode) GetToken() *token.Token {
+ return n.Start
+}
+
+// AddColumn add column number to child nodes recursively
+func (n *TagNode) AddColumn(col int) {
+ n.Start.AddColumn(col)
+ if n.Value != nil {
+ n.Value.AddColumn(col)
+ }
+}
+
+// String tag to text
+func (n *TagNode) String() string {
+ return fmt.Sprintf("%s %s", n.Start.Value, n.Value.String())
+}
+
+// MarshalYAML encodes to a YAML text
+func (n *TagNode) MarshalYAML() ([]byte, error) {
+ return []byte(n.String()), nil
+}
+
+// CommentNode type of comment node
+type CommentNode struct {
+ *BaseNode
+ Token *token.Token
+}
+
+// Read implements (io.Reader).Read
+func (n *CommentNode) Read(p []byte) (int, error) {
+ return readNode(p, n)
+}
+
+// Type returns TagType
+func (n *CommentNode) Type() NodeType { return CommentType }
+
+// GetToken returns token instance
+func (n *CommentNode) GetToken() *token.Token { return n.Token }
+
+// AddColumn add column number to child nodes recursively
+func (n *CommentNode) AddColumn(col int) {
+ if n.Token == nil {
+ return
+ }
+ n.Token.AddColumn(col)
+}
+
+// String comment to text
+func (n *CommentNode) String() string {
+ return fmt.Sprintf("#%s", n.Token.Value)
+}
+
+// MarshalYAML encodes to a YAML text
+func (n *CommentNode) MarshalYAML() ([]byte, error) {
+ return []byte(n.String()), nil
+}
+
+// CommentGroupNode type of comment node
+type CommentGroupNode struct {
+ *BaseNode
+ Comments []*CommentNode
+}
+
+// Read implements (io.Reader).Read
+func (n *CommentGroupNode) Read(p []byte) (int, error) {
+ return readNode(p, n)
+}
+
+// Type returns TagType
+func (n *CommentGroupNode) Type() NodeType { return CommentType }
+
+// GetToken returns token instance
+func (n *CommentGroupNode) GetToken() *token.Token {
+ if len(n.Comments) > 0 {
+ return n.Comments[0].Token
+ }
+ return nil
+}
+
+// AddColumn add column number to child nodes recursively
+func (n *CommentGroupNode) AddColumn(col int) {
+ for _, comment := range n.Comments {
+ comment.AddColumn(col)
+ }
+}
+
+// String comment to text
+func (n *CommentGroupNode) String() string {
+ values := []string{}
+ for _, comment := range n.Comments {
+ values = append(values, comment.String())
+ }
+ return strings.Join(values, "\n")
+}
+
+func (n *CommentGroupNode) StringWithSpace(col int) string {
+ values := []string{}
+ space := strings.Repeat(" ", col)
+ for _, comment := range n.Comments {
+ values = append(values, space+comment.String())
+ }
+ return strings.Join(values, "\n")
+
+}
+
+// MarshalYAML encodes to a YAML text
+func (n *CommentGroupNode) MarshalYAML() ([]byte, error) {
+ return []byte(n.String()), nil
+}
+
+// Visitor has Visit method that is invokded for each node encountered by Walk.
+// If the result visitor w is not nil, Walk visits each of the children of node with the visitor w,
+// followed by a call of w.Visit(nil).
+type Visitor interface {
+ Visit(Node) Visitor
+}
+
+// Walk traverses an AST in depth-first order: It starts by calling v.Visit(node); node must not be nil.
+// If the visitor w returned by v.Visit(node) is not nil,
+// Walk is invoked recursively with visitor w for each of the non-nil children of node,
+// followed by a call of w.Visit(nil).
+func Walk(v Visitor, node Node) {
+ if v = v.Visit(node); v == nil {
+ return
+ }
+
+ switch n := node.(type) {
+ case *CommentNode:
+ case *NullNode:
+ walkComment(v, n.BaseNode)
+ case *IntegerNode:
+ walkComment(v, n.BaseNode)
+ case *FloatNode:
+ walkComment(v, n.BaseNode)
+ case *StringNode:
+ walkComment(v, n.BaseNode)
+ case *MergeKeyNode:
+ walkComment(v, n.BaseNode)
+ case *BoolNode:
+ walkComment(v, n.BaseNode)
+ case *InfinityNode:
+ walkComment(v, n.BaseNode)
+ case *NanNode:
+ walkComment(v, n.BaseNode)
+ case *LiteralNode:
+ walkComment(v, n.BaseNode)
+ Walk(v, n.Value)
+ case *DirectiveNode:
+ walkComment(v, n.BaseNode)
+ Walk(v, n.Value)
+ case *TagNode:
+ walkComment(v, n.BaseNode)
+ Walk(v, n.Value)
+ case *DocumentNode:
+ walkComment(v, n.BaseNode)
+ Walk(v, n.Body)
+ case *MappingNode:
+ walkComment(v, n.BaseNode)
+ for _, value := range n.Values {
+ Walk(v, value)
+ }
+ case *MappingKeyNode:
+ walkComment(v, n.BaseNode)
+ Walk(v, n.Value)
+ case *MappingValueNode:
+ walkComment(v, n.BaseNode)
+ Walk(v, n.Key)
+ Walk(v, n.Value)
+ case *SequenceNode:
+ walkComment(v, n.BaseNode)
+ for _, value := range n.Values {
+ Walk(v, value)
+ }
+ case *AnchorNode:
+ walkComment(v, n.BaseNode)
+ Walk(v, n.Name)
+ Walk(v, n.Value)
+ case *AliasNode:
+ walkComment(v, n.BaseNode)
+ Walk(v, n.Value)
+ }
+}
+
+func walkComment(v Visitor, base *BaseNode) {
+ if base == nil {
+ return
+ }
+ if base.Comment == nil {
+ return
+ }
+ Walk(v, base.Comment)
+}
+
+type filterWalker struct {
+ typ NodeType
+ results []Node
+}
+
+func (v *filterWalker) Visit(n Node) Visitor {
+ if v.typ == n.Type() {
+ v.results = append(v.results, n)
+ }
+ return v
+}
+
+type parentFinder struct {
+ target Node
+}
+
+func (f *parentFinder) walk(parent, node Node) Node {
+ if f.target == node {
+ return parent
+ }
+ switch n := node.(type) {
+ case *CommentNode:
+ return nil
+ case *NullNode:
+ return nil
+ case *IntegerNode:
+ return nil
+ case *FloatNode:
+ return nil
+ case *StringNode:
+ return nil
+ case *MergeKeyNode:
+ return nil
+ case *BoolNode:
+ return nil
+ case *InfinityNode:
+ return nil
+ case *NanNode:
+ return nil
+ case *LiteralNode:
+ return f.walk(node, n.Value)
+ case *DirectiveNode:
+ return f.walk(node, n.Value)
+ case *TagNode:
+ return f.walk(node, n.Value)
+ case *DocumentNode:
+ return f.walk(node, n.Body)
+ case *MappingNode:
+ for _, value := range n.Values {
+ if found := f.walk(node, value); found != nil {
+ return found
+ }
+ }
+ case *MappingKeyNode:
+ return f.walk(node, n.Value)
+ case *MappingValueNode:
+ if found := f.walk(node, n.Key); found != nil {
+ return found
+ }
+ return f.walk(node, n.Value)
+ case *SequenceNode:
+ for _, value := range n.Values {
+ if found := f.walk(node, value); found != nil {
+ return found
+ }
+ }
+ case *AnchorNode:
+ if found := f.walk(node, n.Name); found != nil {
+ return found
+ }
+ return f.walk(node, n.Value)
+ case *AliasNode:
+ return f.walk(node, n.Value)
+ }
+ return nil
+}
+
+// Parent get parent node from child node.
+func Parent(root, child Node) Node {
+ finder := &parentFinder{target: child}
+ return finder.walk(root, root)
+}
+
+// Filter returns a list of nodes that match the given type.
+func Filter(typ NodeType, node Node) []Node {
+ walker := &filterWalker{typ: typ}
+ Walk(walker, node)
+ return walker.results
+}
+
+// FilterFile returns a list of nodes that match the given type.
+func FilterFile(typ NodeType, file *File) []Node {
+ results := []Node{}
+ for _, doc := range file.Docs {
+ walker := &filterWalker{typ: typ}
+ Walk(walker, doc)
+ results = append(results, walker.results...)
+ }
+ return results
+}
+
+type ErrInvalidMergeType struct {
+ dst Node
+ src Node
+}
+
+func (e *ErrInvalidMergeType) Error() string {
+ return fmt.Sprintf("cannot merge %s into %s", e.src.Type(), e.dst.Type())
+}
+
+// Merge merge document, map, sequence node.
+func Merge(dst Node, src Node) error {
+ if doc, ok := src.(*DocumentNode); ok {
+ src = doc.Body
+ }
+ err := &ErrInvalidMergeType{dst: dst, src: src}
+ switch dst.Type() {
+ case DocumentType:
+ node := dst.(*DocumentNode)
+ return Merge(node.Body, src)
+ case MappingType:
+ node := dst.(*MappingNode)
+ target, ok := src.(*MappingNode)
+ if !ok {
+ return err
+ }
+ node.Merge(target)
+ return nil
+ case SequenceType:
+ node := dst.(*SequenceNode)
+ target, ok := src.(*SequenceNode)
+ if !ok {
+ return err
+ }
+ node.Merge(target)
+ return nil
+ }
+ return err
+}
diff --git a/tmpmod/github.com/goccy/go-yaml/ast/ast_test.go b/tmpmod/github.com/goccy/go-yaml/ast/ast_test.go
new file mode 100644
index 00000000..7a162ff1
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/ast/ast_test.go
@@ -0,0 +1,36 @@
+package ast
+
+import (
+ "testing"
+
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/token"
+)
+
+func TestEscapeSingleQuote(t *testing.T) {
+ expected := `'Victor''s victory'`
+ got := escapeSingleQuote("Victor's victory")
+ if got != expected {
+ t.Fatalf("expected:%s\ngot:%s", expected, got)
+ }
+}
+
+func TestReadNode(t *testing.T) {
+ t.Run("utf-8", func(t *testing.T) {
+ value := "éɛทᛞ⠻チ▓🦄"
+ node := &StringNode{
+ BaseNode: &BaseNode{},
+ Token: &token.Token{},
+ Value: value,
+ }
+ expectedSize := len(value)
+ gotBuffer := make([]byte, expectedSize)
+ expectedBuffer := []byte(value)
+ gotSize, _ := readNode(gotBuffer, node)
+ if gotSize != expectedSize {
+ t.Fatalf("expected size:%d\ngot:%d", expectedSize, gotSize)
+ }
+ if string(gotBuffer) != string(expectedBuffer) {
+ t.Fatalf("expected buffer:%s\ngot:%s", expectedBuffer, gotBuffer)
+ }
+ })
+}
diff --git a/tmpmod/github.com/goccy/go-yaml/benchmarks/benchmark_test.go b/tmpmod/github.com/goccy/go-yaml/benchmarks/benchmark_test.go
new file mode 100644
index 00000000..de7cfd9a
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/benchmarks/benchmark_test.go
@@ -0,0 +1,52 @@
+package benchmarks
+
+import (
+ "testing"
+
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml"
+ goyaml2 "gopkg.in/yaml.v2"
+ goyaml3 "gopkg.in/yaml.v3"
+)
+
+func Benchmark(b *testing.B) {
+ const src = `---
+id: 1
+message: Hello, World
+verified: true
+elements:
+ - one
+ - 0.02
+ - null
+ - -inf
+`
+ type T struct {
+ ID int `yaml:"id"`
+ Message string `yaml:"message"`
+ Verified bool `yaml:"verified,omitempty"`
+ }
+
+ b.Run("gopkg.in/yaml.v2", func(b *testing.B) {
+ var t T
+ for i := 0; i < b.N; i++ {
+ if err := goyaml2.Unmarshal([]byte(src), &t); err != nil {
+ b.Fatal(err)
+ }
+ }
+ })
+ b.Run("gopkg.in/yaml.v3", func(b *testing.B) {
+ var t T
+ for i := 0; i < b.N; i++ {
+ if err := goyaml3.Unmarshal([]byte(src), &t); err != nil {
+ b.Fatal(err)
+ }
+ }
+ })
+ b.Run("github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml", func(b *testing.B) {
+ var t T
+ for i := 0; i < b.N; i++ {
+ if err := yaml.Unmarshal([]byte(src), &t); err != nil {
+ b.Fatal(err)
+ }
+ }
+ })
+}
diff --git a/tmpmod/github.com/goccy/go-yaml/benchmarks/go.mod b/tmpmod/github.com/goccy/go-yaml/benchmarks/go.mod
new file mode 100644
index 00000000..e1676313
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/benchmarks/go.mod
@@ -0,0 +1,11 @@
+module benchmarks
+
+go 1.12
+
+replace github.com/goccy/go-yaml => ../
+
+require (
+ github.com/goccy/go-yaml v0.0.0-00010101000000-000000000000
+ gopkg.in/yaml.v2 v2.3.0
+ gopkg.in/yaml.v3 v3.0.0-20200506231410-2ff61e1afc86
+)
diff --git a/tmpmod/github.com/goccy/go-yaml/benchmarks/go.sum b/tmpmod/github.com/goccy/go-yaml/benchmarks/go.sum
new file mode 100644
index 00000000..4bac5893
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/benchmarks/go.sum
@@ -0,0 +1,39 @@
+github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/fatih/color v1.7.0 h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys=
+github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
+github.com/go-playground/locales v0.13.0 h1:HyWk6mgj5qFqCT5fjGBuRArbVDfE4hi8+e8ceBS/t7Q=
+github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8=
+github.com/go-playground/universal-translator v0.17.0 h1:icxd5fm+REJzpZx7ZfpaD876Lmtgy7VtROAbHHXk8no=
+github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA=
+github.com/leodido/go-urn v1.2.0 h1:hpXL4XnriNwQ/ABnpepYM/1vCLWNDfUNts8dX3xTG6Y=
+github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII=
+github.com/mattn/go-colorable v0.1.4 h1:snbPLB8fVfU9iwbbo30TPtbLRzwWu6aJS6Xh4eaaviA=
+github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
+github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
+github.com/mattn/go-isatty v0.0.10 h1:qxFzApOv4WsAL965uUPIsXzAKCZxN2p9UqdhFS4ZW10=
+github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
+github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
+golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20191010194322-b09406accb47 h1:/XfQ9z7ib8eEJX2hdgFTZJ/ntt0swNk5oYBziWeTCvY=
+golang.org/x/sys v0.0.0-20191010194322-b09406accb47/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
+golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898 h1:/atklqdjdhuosWIl6AIbOeHJjicWYPqR9bpxqxYG2pA=
+golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/go-playground/assert.v1 v1.2.1 h1:xoYuJVE7KT85PYWrN730RguIQO0ePzVRfFMXadIrXTM=
+gopkg.in/go-playground/assert.v1 v1.2.1/go.mod h1:9RXL0bg/zibRAgZUYszZSwO/z8Y/a8bDuhia5mkpMnE=
+gopkg.in/go-playground/validator.v9 v9.30.0 h1:Wk0Z37oBmKj9/n+tPyBHZmeL19LaCoK3Qq48VwYENss=
+gopkg.in/go-playground/validator.v9 v9.30.0/go.mod h1:+c9/zcJMFNgbLvly1L1V+PpxWdVbfP1avr/N00E2vyQ=
+gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU=
+gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v3 v3.0.0-20200506231410-2ff61e1afc86 h1:OfFoIUYv/me30yv7XlMy4F9RJw8DEm8WQ6QG1Ph4bH0=
+gopkg.in/yaml.v3 v3.0.0-20200506231410-2ff61e1afc86/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
diff --git a/tmpmod/github.com/goccy/go-yaml/cmd/ycat/ycat.go b/tmpmod/github.com/goccy/go-yaml/cmd/ycat/ycat.go
new file mode 100644
index 00000000..d273855a
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/cmd/ycat/ycat.go
@@ -0,0 +1,82 @@
+package main
+
+import (
+ "errors"
+ "fmt"
+ "os"
+
+ "github.com/fatih/color"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/lexer"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/printer"
+ "github.com/mattn/go-colorable"
+)
+
+const escape = "\x1b"
+
+func format(attr color.Attribute) string {
+ return fmt.Sprintf("%s[%dm", escape, attr)
+}
+
+func _main(args []string) error {
+ if len(args) < 2 {
+ return errors.New("ycat: usage: ycat file.yml")
+ }
+ filename := args[1]
+ bytes, err := os.ReadFile(filename)
+ if err != nil {
+ return err
+ }
+ tokens := lexer.Tokenize(string(bytes))
+ var p printer.Printer
+ p.LineNumber = true
+ p.LineNumberFormat = func(num int) string {
+ fn := color.New(color.Bold, color.FgHiWhite).SprintFunc()
+ return fn(fmt.Sprintf("%2d | ", num))
+ }
+ p.Bool = func() *printer.Property {
+ return &printer.Property{
+ Prefix: format(color.FgHiMagenta),
+ Suffix: format(color.Reset),
+ }
+ }
+ p.Number = func() *printer.Property {
+ return &printer.Property{
+ Prefix: format(color.FgHiMagenta),
+ Suffix: format(color.Reset),
+ }
+ }
+ p.MapKey = func() *printer.Property {
+ return &printer.Property{
+ Prefix: format(color.FgHiCyan),
+ Suffix: format(color.Reset),
+ }
+ }
+ p.Anchor = func() *printer.Property {
+ return &printer.Property{
+ Prefix: format(color.FgHiYellow),
+ Suffix: format(color.Reset),
+ }
+ }
+ p.Alias = func() *printer.Property {
+ return &printer.Property{
+ Prefix: format(color.FgHiYellow),
+ Suffix: format(color.Reset),
+ }
+ }
+ p.String = func() *printer.Property {
+ return &printer.Property{
+ Prefix: format(color.FgHiGreen),
+ Suffix: format(color.Reset),
+ }
+ }
+ writer := colorable.NewColorableStdout()
+ writer.Write([]byte(p.PrintTokens(tokens) + "\n"))
+ return nil
+}
+
+func main() {
+ if err := _main(os.Args); err != nil {
+ fmt.Printf("%v\n", yaml.FormatError(err, true, true))
+ }
+}
diff --git a/tmpmod/github.com/goccy/go-yaml/decode.go b/tmpmod/github.com/goccy/go-yaml/decode.go
new file mode 100644
index 00000000..75a5ac23
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/decode.go
@@ -0,0 +1,1752 @@
+package yaml
+
+import (
+ "bytes"
+ "context"
+ "encoding"
+ "encoding/base64"
+ "fmt"
+ "io"
+ "math"
+ "os"
+ "path/filepath"
+ "reflect"
+ "sort"
+ "strconv"
+ "time"
+
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/ast"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/internal/errors"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/parser"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/token"
+ "golang.org/x/xerrors"
+)
+
+// Decoder reads and decodes YAML values from an input stream.
+type Decoder struct {
+ reader io.Reader
+ referenceReaders []io.Reader
+ anchorNodeMap map[string]ast.Node
+ anchorValueMap map[string]reflect.Value
+ customUnmarshalerMap map[reflect.Type]func(interface{}, []byte) error
+ toCommentMap CommentMap
+ opts []DecodeOption
+ referenceFiles []string
+ referenceDirs []string
+ isRecursiveDir bool
+ isResolvedReference bool
+ validator StructValidator
+ disallowUnknownField bool
+ disallowDuplicateKey bool
+ useOrderedMap bool
+ useJSONUnmarshaler bool
+ parsedFile *ast.File
+ streamIndex int
+}
+
+// NewDecoder returns a new decoder that reads from r.
+func NewDecoder(r io.Reader, opts ...DecodeOption) *Decoder {
+ return &Decoder{
+ reader: r,
+ anchorNodeMap: map[string]ast.Node{},
+ anchorValueMap: map[string]reflect.Value{},
+ customUnmarshalerMap: map[reflect.Type]func(interface{}, []byte) error{},
+ opts: opts,
+ referenceReaders: []io.Reader{},
+ referenceFiles: []string{},
+ referenceDirs: []string{},
+ isRecursiveDir: false,
+ isResolvedReference: false,
+ disallowUnknownField: false,
+ disallowDuplicateKey: false,
+ useOrderedMap: false,
+ }
+}
+
+func (d *Decoder) castToFloat(v interface{}) interface{} {
+ switch vv := v.(type) {
+ case int:
+ return float64(vv)
+ case int8:
+ return float64(vv)
+ case int16:
+ return float64(vv)
+ case int32:
+ return float64(vv)
+ case int64:
+ return float64(vv)
+ case uint:
+ return float64(vv)
+ case uint8:
+ return float64(vv)
+ case uint16:
+ return float64(vv)
+ case uint32:
+ return float64(vv)
+ case uint64:
+ return float64(vv)
+ case float32:
+ return float64(vv)
+ case float64:
+ return vv
+ case string:
+ // if error occurred, return zero value
+ f, _ := strconv.ParseFloat(vv, 64)
+ return f
+ }
+ return 0
+}
+
+func (d *Decoder) mergeValueNode(value ast.Node) ast.Node {
+ if value.Type() == ast.AliasType {
+ aliasNode := value.(*ast.AliasNode)
+ aliasName := aliasNode.Value.GetToken().Value
+ return d.anchorNodeMap[aliasName]
+ }
+ return value
+}
+
+func (d *Decoder) mapKeyNodeToString(node ast.MapKeyNode) string {
+ key := d.nodeToValue(node)
+ if key == nil {
+ return "null"
+ }
+ if k, ok := key.(string); ok {
+ return k
+ }
+ return fmt.Sprint(key)
+}
+
+func (d *Decoder) setToMapValue(node ast.Node, m map[string]interface{}) {
+ d.setPathToCommentMap(node)
+ switch n := node.(type) {
+ case *ast.MappingValueNode:
+ if n.Key.Type() == ast.MergeKeyType {
+ d.setToMapValue(d.mergeValueNode(n.Value), m)
+ } else {
+ key := d.mapKeyNodeToString(n.Key)
+ m[key] = d.nodeToValue(n.Value)
+ }
+ case *ast.MappingNode:
+ for _, value := range n.Values {
+ d.setToMapValue(value, m)
+ }
+ case *ast.AnchorNode:
+ anchorName := n.Name.GetToken().Value
+ d.anchorNodeMap[anchorName] = n.Value
+ }
+}
+
+func (d *Decoder) setToOrderedMapValue(node ast.Node, m *MapSlice) {
+ switch n := node.(type) {
+ case *ast.MappingValueNode:
+ if n.Key.Type() == ast.MergeKeyType {
+ d.setToOrderedMapValue(d.mergeValueNode(n.Value), m)
+ } else {
+ key := d.mapKeyNodeToString(n.Key)
+ *m = append(*m, MapItem{Key: key, Value: d.nodeToValue(n.Value)})
+ }
+ case *ast.MappingNode:
+ for _, value := range n.Values {
+ d.setToOrderedMapValue(value, m)
+ }
+ }
+}
+
+func (d *Decoder) setPathToCommentMap(node ast.Node) {
+ if d.toCommentMap == nil {
+ return
+ }
+ d.addHeadOrLineCommentToMap(node)
+ d.addFootCommentToMap(node)
+}
+
+func (d *Decoder) addHeadOrLineCommentToMap(node ast.Node) {
+ sequence, ok := node.(*ast.SequenceNode)
+ if ok {
+ d.addSequenceNodeCommentToMap(sequence)
+ return
+ }
+ commentGroup := node.GetComment()
+ if commentGroup == nil {
+ return
+ }
+ texts := []string{}
+ targetLine := node.GetToken().Position.Line
+ minCommentLine := math.MaxInt
+ for _, comment := range commentGroup.Comments {
+ if minCommentLine > comment.Token.Position.Line {
+ minCommentLine = comment.Token.Position.Line
+ }
+ texts = append(texts, comment.Token.Value)
+ }
+ if len(texts) == 0 {
+ return
+ }
+ commentPath := node.GetPath()
+ if minCommentLine < targetLine {
+ d.addCommentToMap(commentPath, HeadComment(texts...))
+ } else {
+ d.addCommentToMap(commentPath, LineComment(texts[0]))
+ }
+}
+
+func (d *Decoder) addSequenceNodeCommentToMap(node *ast.SequenceNode) {
+ if len(node.ValueHeadComments) != 0 {
+ for idx, headComment := range node.ValueHeadComments {
+ if headComment == nil {
+ continue
+ }
+ texts := make([]string, 0, len(headComment.Comments))
+ for _, comment := range headComment.Comments {
+ texts = append(texts, comment.Token.Value)
+ }
+ if len(texts) != 0 {
+ d.addCommentToMap(node.Values[idx].GetPath(), HeadComment(texts...))
+ }
+ }
+ }
+ firstElemHeadComment := node.GetComment()
+ if firstElemHeadComment != nil {
+ texts := make([]string, 0, len(firstElemHeadComment.Comments))
+ for _, comment := range firstElemHeadComment.Comments {
+ texts = append(texts, comment.Token.Value)
+ }
+ if len(texts) != 0 {
+ d.addCommentToMap(node.Values[0].GetPath(), HeadComment(texts...))
+ }
+ }
+}
+
+func (d *Decoder) addFootCommentToMap(node ast.Node) {
+ var (
+ footComment *ast.CommentGroupNode
+ footCommentPath string = node.GetPath()
+ )
+ switch n := node.(type) {
+ case *ast.SequenceNode:
+ if len(n.Values) != 0 {
+ footCommentPath = n.Values[len(n.Values)-1].GetPath()
+ }
+ footComment = n.FootComment
+ case *ast.MappingNode:
+ footComment = n.FootComment
+ case *ast.MappingValueNode:
+ footComment = n.FootComment
+ }
+ if footComment == nil {
+ return
+ }
+ var texts []string
+ for _, comment := range footComment.Comments {
+ texts = append(texts, comment.Token.Value)
+ }
+ if len(texts) != 0 {
+ d.addCommentToMap(footCommentPath, FootComment(texts...))
+ }
+}
+
+func (d *Decoder) addCommentToMap(path string, comment *Comment) {
+ for _, c := range d.toCommentMap[path] {
+ if c.Position == comment.Position {
+ // already added same comment
+ return
+ }
+ }
+ d.toCommentMap[path] = append(d.toCommentMap[path], comment)
+ sort.Slice(d.toCommentMap[path], func(i, j int) bool {
+ return d.toCommentMap[path][i].Position < d.toCommentMap[path][j].Position
+ })
+}
+
+func (d *Decoder) nodeToValue(node ast.Node) interface{} {
+ d.setPathToCommentMap(node)
+ switch n := node.(type) {
+ case *ast.NullNode:
+ return nil
+ case *ast.StringNode:
+ return n.GetValue()
+ case *ast.IntegerNode:
+ return n.GetValue()
+ case *ast.FloatNode:
+ return n.GetValue()
+ case *ast.BoolNode:
+ return n.GetValue()
+ case *ast.InfinityNode:
+ return n.GetValue()
+ case *ast.NanNode:
+ return n.GetValue()
+ case *ast.TagNode:
+ switch token.ReservedTagKeyword(n.Start.Value) {
+ case token.TimestampTag:
+ t, _ := d.castToTime(n.Value)
+ return t
+ case token.IntegerTag:
+ i, _ := strconv.Atoi(fmt.Sprint(d.nodeToValue(n.Value)))
+ return i
+ case token.FloatTag:
+ return d.castToFloat(d.nodeToValue(n.Value))
+ case token.NullTag:
+ return nil
+ case token.BinaryTag:
+ b, _ := base64.StdEncoding.DecodeString(d.nodeToValue(n.Value).(string))
+ return b
+ case token.StringTag:
+ return d.nodeToValue(n.Value)
+ case token.MappingTag:
+ return d.nodeToValue(n.Value)
+ }
+ case *ast.AnchorNode:
+ anchorName := n.Name.GetToken().Value
+ anchorValue := d.nodeToValue(n.Value)
+ d.anchorNodeMap[anchorName] = n.Value
+ return anchorValue
+ case *ast.AliasNode:
+ aliasName := n.Value.GetToken().Value
+ node := d.anchorNodeMap[aliasName]
+ return d.nodeToValue(node)
+ case *ast.LiteralNode:
+ return n.Value.GetValue()
+ case *ast.MappingKeyNode:
+ return d.nodeToValue(n.Value)
+ case *ast.MappingValueNode:
+ if n.Key.Type() == ast.MergeKeyType {
+ value := d.mergeValueNode(n.Value)
+ if d.useOrderedMap {
+ m := MapSlice{}
+ d.setToOrderedMapValue(value, &m)
+ return m
+ }
+ m := map[string]interface{}{}
+ d.setToMapValue(value, m)
+ return m
+ }
+ key := d.mapKeyNodeToString(n.Key)
+ if d.useOrderedMap {
+ return MapSlice{{Key: key, Value: d.nodeToValue(n.Value)}}
+ }
+ return map[string]interface{}{
+ key: d.nodeToValue(n.Value),
+ }
+ case *ast.MappingNode:
+ if d.useOrderedMap {
+ m := make(MapSlice, 0, len(n.Values))
+ for _, value := range n.Values {
+ d.setToOrderedMapValue(value, &m)
+ }
+ return m
+ }
+ m := make(map[string]interface{}, len(n.Values))
+ for _, value := range n.Values {
+ d.setToMapValue(value, m)
+ }
+ return m
+ case *ast.SequenceNode:
+ v := make([]interface{}, 0, len(n.Values))
+ for _, value := range n.Values {
+ v = append(v, d.nodeToValue(value))
+ }
+ return v
+ }
+ return nil
+}
+
+func (d *Decoder) resolveAlias(node ast.Node) (ast.Node, error) {
+ switch n := node.(type) {
+ case *ast.MappingNode:
+ for idx, v := range n.Values {
+ value, err := d.resolveAlias(v)
+ if err != nil {
+ return nil, err
+ }
+ n.Values[idx] = value.(*ast.MappingValueNode)
+ }
+ case *ast.TagNode:
+ value, err := d.resolveAlias(n.Value)
+ if err != nil {
+ return nil, err
+ }
+ n.Value = value
+ case *ast.MappingKeyNode:
+ value, err := d.resolveAlias(n.Value)
+ if err != nil {
+ return nil, err
+ }
+ n.Value = value
+ case *ast.MappingValueNode:
+ if n.Key.Type() == ast.MergeKeyType && n.Value.Type() == ast.AliasType {
+ value, err := d.resolveAlias(n.Value)
+ if err != nil {
+ return nil, err
+ }
+ keyColumn := n.Key.GetToken().Position.Column
+ requiredColumn := keyColumn + 2
+ value.AddColumn(requiredColumn)
+ n.Value = value
+ } else {
+ key, err := d.resolveAlias(n.Key)
+ if err != nil {
+ return nil, err
+ }
+ n.Key = key.(ast.MapKeyNode)
+ value, err := d.resolveAlias(n.Value)
+ if err != nil {
+ return nil, err
+ }
+ n.Value = value
+ }
+ case *ast.SequenceNode:
+ for idx, v := range n.Values {
+ value, err := d.resolveAlias(v)
+ if err != nil {
+ return nil, err
+ }
+ n.Values[idx] = value
+ }
+ case *ast.AliasNode:
+ aliasName := n.Value.GetToken().Value
+ node := d.anchorNodeMap[aliasName]
+ if node == nil {
+ return nil, xerrors.Errorf("cannot find anchor by alias name %s", aliasName)
+ }
+ return d.resolveAlias(node)
+ }
+ return node, nil
+}
+
+func (d *Decoder) getMapNode(node ast.Node) (ast.MapNode, error) {
+ if _, ok := node.(*ast.NullNode); ok {
+ return nil, nil
+ }
+ if anchor, ok := node.(*ast.AnchorNode); ok {
+ mapNode, ok := anchor.Value.(ast.MapNode)
+ if ok {
+ return mapNode, nil
+ }
+ return nil, errUnexpectedNodeType(anchor.Value.Type(), ast.MappingType, node.GetToken())
+ }
+ if alias, ok := node.(*ast.AliasNode); ok {
+ aliasName := alias.Value.GetToken().Value
+ node := d.anchorNodeMap[aliasName]
+ if node == nil {
+ return nil, xerrors.Errorf("cannot find anchor by alias name %s", aliasName)
+ }
+ mapNode, ok := node.(ast.MapNode)
+ if ok {
+ return mapNode, nil
+ }
+ return nil, errUnexpectedNodeType(node.Type(), ast.MappingType, node.GetToken())
+ }
+ mapNode, ok := node.(ast.MapNode)
+ if !ok {
+ return nil, errUnexpectedNodeType(node.Type(), ast.MappingType, node.GetToken())
+ }
+ return mapNode, nil
+}
+
+func (d *Decoder) getArrayNode(node ast.Node) (ast.ArrayNode, error) {
+ if _, ok := node.(*ast.NullNode); ok {
+ return nil, nil
+ }
+ if anchor, ok := node.(*ast.AnchorNode); ok {
+ arrayNode, ok := anchor.Value.(ast.ArrayNode)
+ if ok {
+ return arrayNode, nil
+ }
+
+ return nil, errUnexpectedNodeType(anchor.Value.Type(), ast.SequenceType, node.GetToken())
+ }
+ if alias, ok := node.(*ast.AliasNode); ok {
+ aliasName := alias.Value.GetToken().Value
+ node := d.anchorNodeMap[aliasName]
+ if node == nil {
+ return nil, xerrors.Errorf("cannot find anchor by alias name %s", aliasName)
+ }
+ arrayNode, ok := node.(ast.ArrayNode)
+ if ok {
+ return arrayNode, nil
+ }
+ return nil, errUnexpectedNodeType(node.Type(), ast.SequenceType, node.GetToken())
+ }
+ arrayNode, ok := node.(ast.ArrayNode)
+ if !ok {
+ return nil, errUnexpectedNodeType(node.Type(), ast.SequenceType, node.GetToken())
+ }
+ return arrayNode, nil
+}
+
+func (d *Decoder) fileToNode(f *ast.File) ast.Node {
+ for _, doc := range f.Docs {
+ if v := d.nodeToValue(doc.Body); v != nil {
+ return doc.Body
+ }
+ }
+ return nil
+}
+
+func (d *Decoder) convertValue(v reflect.Value, typ reflect.Type, src ast.Node) (reflect.Value, error) {
+ if typ.Kind() != reflect.String {
+ if !v.Type().ConvertibleTo(typ) {
+ return reflect.Zero(typ), errTypeMismatch(typ, v.Type(), src.GetToken())
+ }
+ return v.Convert(typ), nil
+ }
+ // cast value to string
+ switch v.Type().Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return reflect.ValueOf(fmt.Sprint(v.Int())), nil
+ case reflect.Float32, reflect.Float64:
+ return reflect.ValueOf(fmt.Sprint(v.Float())), nil
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ return reflect.ValueOf(fmt.Sprint(v.Uint())), nil
+ case reflect.Bool:
+ return reflect.ValueOf(fmt.Sprint(v.Bool())), nil
+ }
+ if !v.Type().ConvertibleTo(typ) {
+ return reflect.Zero(typ), errTypeMismatch(typ, v.Type(), src.GetToken())
+ }
+ return v.Convert(typ), nil
+}
+
+type overflowError struct {
+ dstType reflect.Type
+ srcNum string
+}
+
+func (e *overflowError) Error() string {
+ return fmt.Sprintf("cannot unmarshal %s into Go value of type %s ( overflow )", e.srcNum, e.dstType)
+}
+
+func errOverflow(dstType reflect.Type, num string) *overflowError {
+ return &overflowError{dstType: dstType, srcNum: num}
+}
+
+func errTypeMismatch(dstType, srcType reflect.Type, token *token.Token) *errors.TypeError {
+ return &errors.TypeError{DstType: dstType, SrcType: srcType, Token: token}
+}
+
+type unknownFieldError struct {
+ err error
+}
+
+func (e *unknownFieldError) Error() string {
+ return e.err.Error()
+}
+
+func errUnknownField(msg string, tk *token.Token) *unknownFieldError {
+ return &unknownFieldError{err: errors.ErrSyntax(msg, tk)}
+}
+
+func errUnexpectedNodeType(actual, expected ast.NodeType, tk *token.Token) error {
+ return errors.ErrSyntax(fmt.Sprintf("%s was used where %s is expected", actual.YAMLName(), expected.YAMLName()), tk)
+}
+
+type duplicateKeyError struct {
+ err error
+}
+
+func (e *duplicateKeyError) Error() string {
+ return e.err.Error()
+}
+
+func errDuplicateKey(msg string, tk *token.Token) *duplicateKeyError {
+ return &duplicateKeyError{err: errors.ErrSyntax(msg, tk)}
+}
+
+func (d *Decoder) deleteStructKeys(structType reflect.Type, unknownFields map[string]ast.Node) error {
+ if structType.Kind() == reflect.Ptr {
+ structType = structType.Elem()
+ }
+ structFieldMap, err := structFieldMap(structType)
+ if err != nil {
+ return errors.Wrapf(err, "failed to create struct field map")
+ }
+
+ for j := 0; j < structType.NumField(); j++ {
+ field := structType.Field(j)
+ if isIgnoredStructField(field) {
+ continue
+ }
+
+ structField, exists := structFieldMap[field.Name]
+ if !exists {
+ continue
+ }
+
+ if structField.IsInline {
+ d.deleteStructKeys(field.Type, unknownFields)
+ } else {
+ delete(unknownFields, structField.RenderName)
+ }
+ }
+ return nil
+}
+
+func (d *Decoder) lastNode(node ast.Node) ast.Node {
+ switch n := node.(type) {
+ case *ast.MappingNode:
+ if len(n.Values) > 0 {
+ return d.lastNode(n.Values[len(n.Values)-1])
+ }
+ case *ast.MappingValueNode:
+ return d.lastNode(n.Value)
+ case *ast.SequenceNode:
+ if len(n.Values) > 0 {
+ return d.lastNode(n.Values[len(n.Values)-1])
+ }
+ }
+ return node
+}
+
+func (d *Decoder) unmarshalableDocument(node ast.Node) ([]byte, error) {
+ var err error
+ node, err = d.resolveAlias(node)
+ if err != nil {
+ return nil, err
+ }
+ doc := node.String()
+ last := d.lastNode(node)
+ if last != nil && last.Type() == ast.LiteralType {
+ doc += "\n"
+ }
+ return []byte(doc), nil
+}
+
+func (d *Decoder) unmarshalableText(node ast.Node) ([]byte, bool, error) {
+ var err error
+ node, err = d.resolveAlias(node)
+ if err != nil {
+ return nil, false, err
+ }
+ if node.Type() == ast.AnchorType {
+ node = node.(*ast.AnchorNode).Value
+ }
+ switch n := node.(type) {
+ case *ast.StringNode:
+ return []byte(n.Value), true, nil
+ case *ast.LiteralNode:
+ return []byte(n.Value.GetToken().Value), true, nil
+ default:
+ scalar, ok := n.(ast.ScalarNode)
+ if ok {
+ return []byte(fmt.Sprint(scalar.GetValue())), true, nil
+ }
+ }
+ return nil, false, nil
+}
+
+type jsonUnmarshaler interface {
+ UnmarshalJSON([]byte) error
+}
+
+func (d *Decoder) existsTypeInCustomUnmarshalerMap(t reflect.Type) bool {
+ if _, exists := d.customUnmarshalerMap[t]; exists {
+ return true
+ }
+
+ globalCustomUnmarshalerMu.Lock()
+ defer globalCustomUnmarshalerMu.Unlock()
+ if _, exists := globalCustomUnmarshalerMap[t]; exists {
+ return true
+ }
+ return false
+}
+
+func (d *Decoder) unmarshalerFromCustomUnmarshalerMap(t reflect.Type) (func(interface{}, []byte) error, bool) {
+ if unmarshaler, exists := d.customUnmarshalerMap[t]; exists {
+ return unmarshaler, exists
+ }
+
+ globalCustomUnmarshalerMu.Lock()
+ defer globalCustomUnmarshalerMu.Unlock()
+ if unmarshaler, exists := globalCustomUnmarshalerMap[t]; exists {
+ return unmarshaler, exists
+ }
+ return nil, false
+}
+
+func (d *Decoder) canDecodeByUnmarshaler(dst reflect.Value) bool {
+ ptrValue := dst.Addr()
+ if d.existsTypeInCustomUnmarshalerMap(ptrValue.Type()) {
+ return true
+ }
+ iface := ptrValue.Interface()
+ switch iface.(type) {
+ case BytesUnmarshalerContext:
+ return true
+ case BytesUnmarshaler:
+ return true
+ case InterfaceUnmarshalerContext:
+ return true
+ case InterfaceUnmarshaler:
+ return true
+ case *time.Time:
+ return true
+ case *time.Duration:
+ return true
+ case encoding.TextUnmarshaler:
+ return true
+ case jsonUnmarshaler:
+ return d.useJSONUnmarshaler
+ }
+ return false
+}
+
+func (d *Decoder) decodeByUnmarshaler(ctx context.Context, dst reflect.Value, src ast.Node) error {
+ ptrValue := dst.Addr()
+ if unmarshaler, exists := d.unmarshalerFromCustomUnmarshalerMap(ptrValue.Type()); exists {
+ b, err := d.unmarshalableDocument(src)
+ if err != nil {
+ return errors.Wrapf(err, "failed to UnmarshalYAML")
+ }
+ if err := unmarshaler(ptrValue.Interface(), b); err != nil {
+ return errors.Wrapf(err, "failed to UnmarshalYAML")
+ }
+ return nil
+ }
+ iface := ptrValue.Interface()
+
+ if unmarshaler, ok := iface.(BytesUnmarshalerContext); ok {
+ b, err := d.unmarshalableDocument(src)
+ if err != nil {
+ return errors.Wrapf(err, "failed to UnmarshalYAML")
+ }
+ if err := unmarshaler.UnmarshalYAML(ctx, b); err != nil {
+ return errors.Wrapf(err, "failed to UnmarshalYAML")
+ }
+ return nil
+ }
+
+ if unmarshaler, ok := iface.(BytesUnmarshaler); ok {
+ b, err := d.unmarshalableDocument(src)
+ if err != nil {
+ return errors.Wrapf(err, "failed to UnmarshalYAML")
+ }
+ if err := unmarshaler.UnmarshalYAML(b); err != nil {
+ return errors.Wrapf(err, "failed to UnmarshalYAML")
+ }
+ return nil
+ }
+
+ if unmarshaler, ok := iface.(InterfaceUnmarshalerContext); ok {
+ if err := unmarshaler.UnmarshalYAML(ctx, func(v interface{}) error {
+ rv := reflect.ValueOf(v)
+ if rv.Type().Kind() != reflect.Ptr {
+ return errors.ErrDecodeRequiredPointerType
+ }
+ if err := d.decodeValue(ctx, rv.Elem(), src); err != nil {
+ return errors.Wrapf(err, "failed to decode value")
+ }
+ return nil
+ }); err != nil {
+ return errors.Wrapf(err, "failed to UnmarshalYAML")
+ }
+ return nil
+ }
+
+ if unmarshaler, ok := iface.(InterfaceUnmarshaler); ok {
+ if err := unmarshaler.UnmarshalYAML(func(v interface{}) error {
+ rv := reflect.ValueOf(v)
+ if rv.Type().Kind() != reflect.Ptr {
+ return errors.ErrDecodeRequiredPointerType
+ }
+ if err := d.decodeValue(ctx, rv.Elem(), src); err != nil {
+ return errors.Wrapf(err, "failed to decode value")
+ }
+ return nil
+ }); err != nil {
+ return errors.Wrapf(err, "failed to UnmarshalYAML")
+ }
+ return nil
+ }
+
+ if _, ok := iface.(*time.Time); ok {
+ return d.decodeTime(ctx, dst, src)
+ }
+
+ if _, ok := iface.(*time.Duration); ok {
+ return d.decodeDuration(ctx, dst, src)
+ }
+
+ if unmarshaler, isText := iface.(encoding.TextUnmarshaler); isText {
+ b, ok, err := d.unmarshalableText(src)
+ if err != nil {
+ return errors.Wrapf(err, "failed to UnmarshalText")
+ }
+ if ok {
+ if err := unmarshaler.UnmarshalText(b); err != nil {
+ return errors.Wrapf(err, "failed to UnmarshalText")
+ }
+ return nil
+ }
+ }
+
+ if d.useJSONUnmarshaler {
+ if unmarshaler, ok := iface.(jsonUnmarshaler); ok {
+ b, err := d.unmarshalableDocument(src)
+ if err != nil {
+ return errors.Wrapf(err, "failed to UnmarshalJSON")
+ }
+ jsonBytes, err := YAMLToJSON(b)
+ if err != nil {
+ return errors.Wrapf(err, "failed to convert yaml to json")
+ }
+ jsonBytes = bytes.TrimRight(jsonBytes, "\n")
+ if err := unmarshaler.UnmarshalJSON(jsonBytes); err != nil {
+ return errors.Wrapf(err, "failed to UnmarshalJSON")
+ }
+ return nil
+ }
+ }
+
+ return xerrors.Errorf("does not implemented Unmarshaler")
+}
+
+var (
+ astNodeType = reflect.TypeOf((*ast.Node)(nil)).Elem()
+)
+
+func (d *Decoder) decodeValue(ctx context.Context, dst reflect.Value, src ast.Node) error {
+ if src.Type() == ast.AnchorType {
+ anchorName := src.(*ast.AnchorNode).Name.GetToken().Value
+ if _, exists := d.anchorValueMap[anchorName]; !exists {
+ d.anchorValueMap[anchorName] = dst
+ }
+ }
+ if d.canDecodeByUnmarshaler(dst) {
+ if err := d.decodeByUnmarshaler(ctx, dst, src); err != nil {
+ return errors.Wrapf(err, "failed to decode by unmarshaler")
+ }
+ return nil
+ }
+ valueType := dst.Type()
+ switch valueType.Kind() {
+ case reflect.Ptr:
+ if dst.IsNil() {
+ return nil
+ }
+ if src.Type() == ast.NullType {
+ // set nil value to pointer
+ dst.Set(reflect.Zero(valueType))
+ return nil
+ }
+ v := d.createDecodableValue(dst.Type())
+ if err := d.decodeValue(ctx, v, src); err != nil {
+ return errors.Wrapf(err, "failed to decode ptr value")
+ }
+ dst.Set(d.castToAssignableValue(v, dst.Type()))
+ case reflect.Interface:
+ if dst.Type() == astNodeType {
+ dst.Set(reflect.ValueOf(src))
+ return nil
+ }
+ v := reflect.ValueOf(d.nodeToValue(src))
+ if v.IsValid() {
+ dst.Set(v)
+ }
+ case reflect.Map:
+ return d.decodeMap(ctx, dst, src)
+ case reflect.Array:
+ return d.decodeArray(ctx, dst, src)
+ case reflect.Slice:
+ if mapSlice, ok := dst.Addr().Interface().(*MapSlice); ok {
+ return d.decodeMapSlice(ctx, mapSlice, src)
+ }
+ return d.decodeSlice(ctx, dst, src)
+ case reflect.Struct:
+ if mapItem, ok := dst.Addr().Interface().(*MapItem); ok {
+ return d.decodeMapItem(ctx, mapItem, src)
+ }
+ return d.decodeStruct(ctx, dst, src)
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ v := d.nodeToValue(src)
+ switch vv := v.(type) {
+ case int64:
+ if !dst.OverflowInt(vv) {
+ dst.SetInt(vv)
+ return nil
+ }
+ case uint64:
+ if vv <= math.MaxInt64 && !dst.OverflowInt(int64(vv)) {
+ dst.SetInt(int64(vv))
+ return nil
+ }
+ case float64:
+ if vv <= math.MaxInt64 && !dst.OverflowInt(int64(vv)) {
+ dst.SetInt(int64(vv))
+ return nil
+ }
+ default:
+ return errTypeMismatch(valueType, reflect.TypeOf(v), src.GetToken())
+ }
+ return errOverflow(valueType, fmt.Sprint(v))
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ v := d.nodeToValue(src)
+ switch vv := v.(type) {
+ case int64:
+ if 0 <= vv && !dst.OverflowUint(uint64(vv)) {
+ dst.SetUint(uint64(vv))
+ return nil
+ }
+ case uint64:
+ if !dst.OverflowUint(vv) {
+ dst.SetUint(vv)
+ return nil
+ }
+ case float64:
+ if 0 <= vv && vv <= math.MaxUint64 && !dst.OverflowUint(uint64(vv)) {
+ dst.SetUint(uint64(vv))
+ return nil
+ }
+ default:
+ return errTypeMismatch(valueType, reflect.TypeOf(v), src.GetToken())
+ }
+ return errOverflow(valueType, fmt.Sprint(v))
+ }
+ v := reflect.ValueOf(d.nodeToValue(src))
+ if v.IsValid() {
+ convertedValue, err := d.convertValue(v, dst.Type(), src)
+ if err != nil {
+ return errors.Wrapf(err, "failed to convert value")
+ }
+ dst.Set(convertedValue)
+ }
+ return nil
+}
+
+func (d *Decoder) createDecodableValue(typ reflect.Type) reflect.Value {
+ for {
+ if typ.Kind() == reflect.Ptr {
+ typ = typ.Elem()
+ continue
+ }
+ break
+ }
+ return reflect.New(typ).Elem()
+}
+
+func (d *Decoder) castToAssignableValue(value reflect.Value, target reflect.Type) reflect.Value {
+ if target.Kind() != reflect.Ptr {
+ return value
+ }
+ maxTryCount := 5
+ tryCount := 0
+ for {
+ if tryCount > maxTryCount {
+ return value
+ }
+ if value.Type().AssignableTo(target) {
+ break
+ }
+ value = value.Addr()
+ tryCount++
+ }
+ return value
+}
+
+func (d *Decoder) createDecodedNewValue(
+ ctx context.Context, typ reflect.Type, defaultVal reflect.Value, node ast.Node,
+) (reflect.Value, error) {
+ if node.Type() == ast.AliasType {
+ aliasName := node.(*ast.AliasNode).Value.GetToken().Value
+ newValue := d.anchorValueMap[aliasName]
+ if newValue.IsValid() {
+ return newValue, nil
+ }
+ }
+ if node.Type() == ast.NullType {
+ return reflect.Zero(typ), nil
+ }
+ newValue := d.createDecodableValue(typ)
+ for defaultVal.Kind() == reflect.Ptr {
+ defaultVal = defaultVal.Elem()
+ }
+ if defaultVal.IsValid() && defaultVal.Type().AssignableTo(newValue.Type()) {
+ newValue.Set(defaultVal)
+ }
+ if err := d.decodeValue(ctx, newValue, node); err != nil {
+ return newValue, errors.Wrapf(err, "failed to decode value")
+ }
+ return newValue, nil
+}
+
+func (d *Decoder) keyToNodeMap(node ast.Node, ignoreMergeKey bool, getKeyOrValueNode func(*ast.MapNodeIter) ast.Node) (map[string]ast.Node, error) {
+ mapNode, err := d.getMapNode(node)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to get map node")
+ }
+ keyMap := map[string]struct{}{}
+ keyToNodeMap := map[string]ast.Node{}
+ if mapNode == nil {
+ return keyToNodeMap, nil
+ }
+ mapIter := mapNode.MapRange()
+ for mapIter.Next() {
+ keyNode := mapIter.Key()
+ if keyNode.Type() == ast.MergeKeyType {
+ if ignoreMergeKey {
+ continue
+ }
+ mergeMap, err := d.keyToNodeMap(mapIter.Value(), ignoreMergeKey, getKeyOrValueNode)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to get keyToNodeMap by MergeKey node")
+ }
+ for k, v := range mergeMap {
+ if err := d.validateDuplicateKey(keyMap, k, v); err != nil {
+ return nil, errors.Wrapf(err, "invalid struct key")
+ }
+ keyToNodeMap[k] = v
+ }
+ } else {
+ key, ok := d.nodeToValue(keyNode).(string)
+ if !ok {
+ return nil, errors.Wrapf(err, "failed to decode map key")
+ }
+ if err := d.validateDuplicateKey(keyMap, key, keyNode); err != nil {
+ return nil, errors.Wrapf(err, "invalid struct key")
+ }
+ keyToNodeMap[key] = getKeyOrValueNode(mapIter)
+ }
+ }
+ return keyToNodeMap, nil
+}
+
+func (d *Decoder) keyToKeyNodeMap(node ast.Node, ignoreMergeKey bool) (map[string]ast.Node, error) {
+ m, err := d.keyToNodeMap(node, ignoreMergeKey, func(nodeMap *ast.MapNodeIter) ast.Node { return nodeMap.Key() })
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to get keyToNodeMap")
+ }
+ return m, nil
+}
+
+func (d *Decoder) keyToValueNodeMap(node ast.Node, ignoreMergeKey bool) (map[string]ast.Node, error) {
+ m, err := d.keyToNodeMap(node, ignoreMergeKey, func(nodeMap *ast.MapNodeIter) ast.Node { return nodeMap.Value() })
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to get keyToNodeMap")
+ }
+ return m, nil
+}
+
+func (d *Decoder) setDefaultValueIfConflicted(v reflect.Value, fieldMap StructFieldMap) error {
+ typ := v.Type()
+ if typ.Kind() != reflect.Struct {
+ return nil
+ }
+ embeddedStructFieldMap, err := structFieldMap(typ)
+ if err != nil {
+ return errors.Wrapf(err, "failed to get struct field map by embedded type")
+ }
+ for i := 0; i < typ.NumField(); i++ {
+ field := typ.Field(i)
+ if isIgnoredStructField(field) {
+ continue
+ }
+ structField := embeddedStructFieldMap[field.Name]
+ if !fieldMap.isIncludedRenderName(structField.RenderName) {
+ continue
+ }
+ // if declared same key name, set default value
+ fieldValue := v.Field(i)
+ if fieldValue.CanSet() {
+ fieldValue.Set(reflect.Zero(fieldValue.Type()))
+ }
+ }
+ return nil
+}
+
+// This is a subset of the formats allowed by the regular expression
+// defined at http://yaml.org/type/timestamp.html.
+var allowedTimestampFormats = []string{
+ "2006-1-2T15:4:5.999999999Z07:00", // RCF3339Nano with short date fields.
+ "2006-1-2t15:4:5.999999999Z07:00", // RFC3339Nano with short date fields and lower-case "t".
+ "2006-1-2 15:4:5.999999999", // space separated with no time zone
+ "2006-1-2", // date only
+}
+
+func (d *Decoder) castToTime(src ast.Node) (time.Time, error) {
+ if src == nil {
+ return time.Time{}, nil
+ }
+ v := d.nodeToValue(src)
+ if t, ok := v.(time.Time); ok {
+ return t, nil
+ }
+ s, ok := v.(string)
+ if !ok {
+ return time.Time{}, errTypeMismatch(reflect.TypeOf(time.Time{}), reflect.TypeOf(v), src.GetToken())
+ }
+ for _, format := range allowedTimestampFormats {
+ t, err := time.Parse(format, s)
+ if err != nil {
+ // invalid format
+ continue
+ }
+ return t, nil
+ }
+ return time.Time{}, nil
+}
+
+func (d *Decoder) decodeTime(ctx context.Context, dst reflect.Value, src ast.Node) error {
+ t, err := d.castToTime(src)
+ if err != nil {
+ return errors.Wrapf(err, "failed to convert to time")
+ }
+ dst.Set(reflect.ValueOf(t))
+ return nil
+}
+
+func (d *Decoder) castToDuration(src ast.Node) (time.Duration, error) {
+ if src == nil {
+ return 0, nil
+ }
+ v := d.nodeToValue(src)
+ if t, ok := v.(time.Duration); ok {
+ return t, nil
+ }
+ s, ok := v.(string)
+ if !ok {
+ return 0, errTypeMismatch(reflect.TypeOf(time.Duration(0)), reflect.TypeOf(v), src.GetToken())
+ }
+ t, err := time.ParseDuration(s)
+ if err != nil {
+ return 0, errors.Wrapf(err, "failed to parse duration")
+ }
+ return t, nil
+}
+
+func (d *Decoder) decodeDuration(ctx context.Context, dst reflect.Value, src ast.Node) error {
+ t, err := d.castToDuration(src)
+ if err != nil {
+ return errors.Wrapf(err, "failed to convert to duration")
+ }
+ dst.Set(reflect.ValueOf(t))
+ return nil
+}
+
+// getMergeAliasName support single alias only
+func (d *Decoder) getMergeAliasName(src ast.Node) string {
+ mapNode, err := d.getMapNode(src)
+ if err != nil {
+ return ""
+ }
+ if mapNode == nil {
+ return ""
+ }
+ mapIter := mapNode.MapRange()
+ for mapIter.Next() {
+ key := mapIter.Key()
+ value := mapIter.Value()
+ if key.Type() == ast.MergeKeyType && value.Type() == ast.AliasType {
+ return value.(*ast.AliasNode).Value.GetToken().Value
+ }
+ }
+ return ""
+}
+
+func (d *Decoder) decodeStruct(ctx context.Context, dst reflect.Value, src ast.Node) error {
+ if src == nil {
+ return nil
+ }
+ structType := dst.Type()
+ srcValue := reflect.ValueOf(src)
+ srcType := srcValue.Type()
+ if srcType.Kind() == reflect.Ptr {
+ srcType = srcType.Elem()
+ srcValue = srcValue.Elem()
+ }
+ if structType == srcType {
+ // dst value implements ast.Node
+ dst.Set(srcValue)
+ return nil
+ }
+ structFieldMap, err := structFieldMap(structType)
+ if err != nil {
+ return errors.Wrapf(err, "failed to create struct field map")
+ }
+ ignoreMergeKey := structFieldMap.hasMergeProperty()
+ keyToNodeMap, err := d.keyToValueNodeMap(src, ignoreMergeKey)
+ if err != nil {
+ return errors.Wrapf(err, "failed to get keyToValueNodeMap")
+ }
+ var unknownFields map[string]ast.Node
+ if d.disallowUnknownField {
+ unknownFields, err = d.keyToKeyNodeMap(src, ignoreMergeKey)
+ if err != nil {
+ return errors.Wrapf(err, "failed to get keyToKeyNodeMap")
+ }
+ }
+
+ aliasName := d.getMergeAliasName(src)
+ var foundErr error
+
+ for i := 0; i < structType.NumField(); i++ {
+ field := structType.Field(i)
+ if isIgnoredStructField(field) {
+ continue
+ }
+ structField := structFieldMap[field.Name]
+ if structField.IsInline {
+ fieldValue := dst.FieldByName(field.Name)
+ if structField.IsAutoAlias {
+ if aliasName != "" {
+ newFieldValue := d.anchorValueMap[aliasName]
+ if newFieldValue.IsValid() {
+ fieldValue.Set(d.castToAssignableValue(newFieldValue, fieldValue.Type()))
+ }
+ }
+ continue
+ }
+ if !fieldValue.CanSet() {
+ return xerrors.Errorf("cannot set embedded type as unexported field %s.%s", field.PkgPath, field.Name)
+ }
+ if fieldValue.Type().Kind() == reflect.Ptr && src.Type() == ast.NullType {
+ // set nil value to pointer
+ fieldValue.Set(reflect.Zero(fieldValue.Type()))
+ continue
+ }
+ mapNode := ast.Mapping(nil, false)
+ for k, v := range keyToNodeMap {
+ key := &ast.StringNode{BaseNode: &ast.BaseNode{}, Value: k}
+ mapNode.Values = append(mapNode.Values, ast.MappingValue(nil, key, v))
+ }
+ newFieldValue, err := d.createDecodedNewValue(ctx, fieldValue.Type(), fieldValue, mapNode)
+ if d.disallowUnknownField {
+ if err := d.deleteStructKeys(fieldValue.Type(), unknownFields); err != nil {
+ return errors.Wrapf(err, "cannot delete struct keys")
+ }
+ }
+
+ if err != nil {
+ if foundErr != nil {
+ continue
+ }
+ var te *errors.TypeError
+ if xerrors.As(err, &te) {
+ if te.StructFieldName != nil {
+ fieldName := fmt.Sprintf("%s.%s", structType.Name(), *te.StructFieldName)
+ te.StructFieldName = &fieldName
+ } else {
+ fieldName := fmt.Sprintf("%s.%s", structType.Name(), field.Name)
+ te.StructFieldName = &fieldName
+ }
+ foundErr = te
+ continue
+ } else {
+ foundErr = err
+ }
+ continue
+ }
+ d.setDefaultValueIfConflicted(newFieldValue, structFieldMap)
+ fieldValue.Set(d.castToAssignableValue(newFieldValue, fieldValue.Type()))
+ continue
+ }
+ v, exists := keyToNodeMap[structField.RenderName]
+ if !exists {
+ continue
+ }
+ delete(unknownFields, structField.RenderName)
+ fieldValue := dst.FieldByName(field.Name)
+ if fieldValue.Type().Kind() == reflect.Ptr && src.Type() == ast.NullType {
+ // set nil value to pointer
+ fieldValue.Set(reflect.Zero(fieldValue.Type()))
+ continue
+ }
+ newFieldValue, err := d.createDecodedNewValue(ctx, fieldValue.Type(), fieldValue, v)
+ if err != nil {
+ if foundErr != nil {
+ continue
+ }
+ var te *errors.TypeError
+ if xerrors.As(err, &te) {
+ fieldName := fmt.Sprintf("%s.%s", structType.Name(), field.Name)
+ te.StructFieldName = &fieldName
+ foundErr = te
+ } else {
+ foundErr = err
+ }
+ continue
+ }
+ fieldValue.Set(d.castToAssignableValue(newFieldValue, fieldValue.Type()))
+ }
+ if foundErr != nil {
+ return errors.Wrapf(foundErr, "failed to decode value")
+ }
+
+ // Ignore unknown fields when parsing an inline struct (recognized by a nil token).
+ // Unknown fields are expected (they could be fields from the parent struct).
+ if len(unknownFields) != 0 && d.disallowUnknownField && src.GetToken() != nil {
+ for key, node := range unknownFields {
+ return errUnknownField(fmt.Sprintf(`unknown field "%s"`, key), node.GetToken())
+ }
+ }
+
+ if d.validator != nil {
+ if err := d.validator.Struct(dst.Interface()); err != nil {
+ ev := reflect.ValueOf(err)
+ if ev.Type().Kind() == reflect.Slice {
+ for i := 0; i < ev.Len(); i++ {
+ fieldErr, ok := ev.Index(i).Interface().(FieldError)
+ if !ok {
+ continue
+ }
+ fieldName := fieldErr.StructField()
+ structField, exists := structFieldMap[fieldName]
+ if !exists {
+ continue
+ }
+ node, exists := keyToNodeMap[structField.RenderName]
+ if exists {
+ // TODO: to make FieldError message cutomizable
+ return errors.ErrSyntax(fmt.Sprintf("%s", err), node.GetToken())
+ } else if t := src.GetToken(); t != nil && t.Prev != nil && t.Prev.Prev != nil {
+ // A missing required field will not be in the keyToNodeMap
+ // the error needs to be associated with the parent of the source node
+ return errors.ErrSyntax(fmt.Sprintf("%s", err), t.Prev.Prev)
+ }
+ }
+ }
+ return err
+ }
+ }
+ return nil
+}
+
+func (d *Decoder) decodeArray(ctx context.Context, dst reflect.Value, src ast.Node) error {
+ arrayNode, err := d.getArrayNode(src)
+ if err != nil {
+ return errors.Wrapf(err, "failed to get array node")
+ }
+ if arrayNode == nil {
+ return nil
+ }
+ iter := arrayNode.ArrayRange()
+ arrayValue := reflect.New(dst.Type()).Elem()
+ arrayType := dst.Type()
+ elemType := arrayType.Elem()
+ idx := 0
+
+ var foundErr error
+ for iter.Next() {
+ v := iter.Value()
+ if elemType.Kind() == reflect.Ptr && v.Type() == ast.NullType {
+ // set nil value to pointer
+ arrayValue.Index(idx).Set(reflect.Zero(elemType))
+ } else {
+ dstValue, err := d.createDecodedNewValue(ctx, elemType, reflect.Value{}, v)
+ if err != nil {
+ if foundErr == nil {
+ foundErr = err
+ }
+ continue
+ } else {
+ arrayValue.Index(idx).Set(d.castToAssignableValue(dstValue, elemType))
+ }
+ }
+ idx++
+ }
+ dst.Set(arrayValue)
+ if foundErr != nil {
+ return errors.Wrapf(foundErr, "failed to decode value")
+ }
+ return nil
+}
+
+func (d *Decoder) decodeSlice(ctx context.Context, dst reflect.Value, src ast.Node) error {
+ arrayNode, err := d.getArrayNode(src)
+ if err != nil {
+ return errors.Wrapf(err, "failed to get array node")
+ }
+ if arrayNode == nil {
+ return nil
+ }
+ iter := arrayNode.ArrayRange()
+ sliceType := dst.Type()
+ sliceValue := reflect.MakeSlice(sliceType, 0, iter.Len())
+ elemType := sliceType.Elem()
+
+ var foundErr error
+ for iter.Next() {
+ v := iter.Value()
+ if elemType.Kind() == reflect.Ptr && v.Type() == ast.NullType {
+ // set nil value to pointer
+ sliceValue = reflect.Append(sliceValue, reflect.Zero(elemType))
+ continue
+ }
+ dstValue, err := d.createDecodedNewValue(ctx, elemType, reflect.Value{}, v)
+ if err != nil {
+ if foundErr == nil {
+ foundErr = err
+ }
+ continue
+ }
+ sliceValue = reflect.Append(sliceValue, d.castToAssignableValue(dstValue, elemType))
+ }
+ dst.Set(sliceValue)
+ if foundErr != nil {
+ return errors.Wrapf(foundErr, "failed to decode value")
+ }
+ return nil
+}
+
+func (d *Decoder) decodeMapItem(ctx context.Context, dst *MapItem, src ast.Node) error {
+ mapNode, err := d.getMapNode(src)
+ if err != nil {
+ return errors.Wrapf(err, "failed to get map node")
+ }
+ if mapNode == nil {
+ return nil
+ }
+ mapIter := mapNode.MapRange()
+ if !mapIter.Next() {
+ return nil
+ }
+ key := mapIter.Key()
+ value := mapIter.Value()
+ if key.Type() == ast.MergeKeyType {
+ if err := d.decodeMapItem(ctx, dst, value); err != nil {
+ return errors.Wrapf(err, "failed to decode map with merge key")
+ }
+ return nil
+ }
+ *dst = MapItem{
+ Key: d.nodeToValue(key),
+ Value: d.nodeToValue(value),
+ }
+ return nil
+}
+
+func (d *Decoder) validateDuplicateKey(keyMap map[string]struct{}, key interface{}, keyNode ast.Node) error {
+ k, ok := key.(string)
+ if !ok {
+ return nil
+ }
+ if d.disallowDuplicateKey {
+ if _, exists := keyMap[k]; exists {
+ return errDuplicateKey(fmt.Sprintf(`duplicate key "%s"`, k), keyNode.GetToken())
+ }
+ }
+ keyMap[k] = struct{}{}
+ return nil
+}
+
+func (d *Decoder) decodeMapSlice(ctx context.Context, dst *MapSlice, src ast.Node) error {
+ mapNode, err := d.getMapNode(src)
+ if err != nil {
+ return errors.Wrapf(err, "failed to get map node")
+ }
+ if mapNode == nil {
+ return nil
+ }
+ mapSlice := MapSlice{}
+ mapIter := mapNode.MapRange()
+ keyMap := map[string]struct{}{}
+ for mapIter.Next() {
+ key := mapIter.Key()
+ value := mapIter.Value()
+ if key.Type() == ast.MergeKeyType {
+ var m MapSlice
+ if err := d.decodeMapSlice(ctx, &m, value); err != nil {
+ return errors.Wrapf(err, "failed to decode map with merge key")
+ }
+ for _, v := range m {
+ if err := d.validateDuplicateKey(keyMap, v.Key, value); err != nil {
+ return errors.Wrapf(err, "invalid map key")
+ }
+ mapSlice = append(mapSlice, v)
+ }
+ continue
+ }
+ k := d.nodeToValue(key)
+ if err := d.validateDuplicateKey(keyMap, k, key); err != nil {
+ return errors.Wrapf(err, "invalid map key")
+ }
+ mapSlice = append(mapSlice, MapItem{
+ Key: k,
+ Value: d.nodeToValue(value),
+ })
+ }
+ *dst = mapSlice
+ return nil
+}
+
+func (d *Decoder) decodeMap(ctx context.Context, dst reflect.Value, src ast.Node) error {
+ mapNode, err := d.getMapNode(src)
+ if err != nil {
+ return errors.Wrapf(err, "failed to get map node")
+ }
+ if mapNode == nil {
+ return nil
+ }
+ mapType := dst.Type()
+ mapValue := reflect.MakeMap(mapType)
+ keyType := mapValue.Type().Key()
+ valueType := mapValue.Type().Elem()
+ mapIter := mapNode.MapRange()
+ keyMap := map[string]struct{}{}
+ var foundErr error
+ for mapIter.Next() {
+ key := mapIter.Key()
+ value := mapIter.Value()
+ if key.Type() == ast.MergeKeyType {
+ if err := d.decodeMap(ctx, dst, value); err != nil {
+ return errors.Wrapf(err, "failed to decode map with merge key")
+ }
+ iter := dst.MapRange()
+ for iter.Next() {
+ if err := d.validateDuplicateKey(keyMap, iter.Key(), value); err != nil {
+ return errors.Wrapf(err, "invalid map key")
+ }
+ mapValue.SetMapIndex(iter.Key(), iter.Value())
+ }
+ continue
+ }
+ k := reflect.ValueOf(d.nodeToValue(key))
+ if k.IsValid() && k.Type().ConvertibleTo(keyType) {
+ k = k.Convert(keyType)
+ }
+ if k.IsValid() {
+ if err := d.validateDuplicateKey(keyMap, k.Interface(), key); err != nil {
+ return errors.Wrapf(err, "invalid map key")
+ }
+ }
+ if valueType.Kind() == reflect.Ptr && value.Type() == ast.NullType {
+ // set nil value to pointer
+ mapValue.SetMapIndex(k, reflect.Zero(valueType))
+ continue
+ }
+ dstValue, err := d.createDecodedNewValue(ctx, valueType, reflect.Value{}, value)
+ if err != nil {
+ if foundErr == nil {
+ foundErr = err
+ }
+ }
+ if !k.IsValid() {
+ // expect nil key
+ mapValue.SetMapIndex(d.createDecodableValue(keyType), d.castToAssignableValue(dstValue, valueType))
+ continue
+ }
+ mapValue.SetMapIndex(k, d.castToAssignableValue(dstValue, valueType))
+ }
+ dst.Set(mapValue)
+ if foundErr != nil {
+ return errors.Wrapf(foundErr, "failed to decode value")
+ }
+ return nil
+}
+
+func (d *Decoder) fileToReader(file string) (io.Reader, error) {
+ reader, err := os.Open(file)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to open file")
+ }
+ return reader, nil
+}
+
+func (d *Decoder) isYAMLFile(file string) bool {
+ ext := filepath.Ext(file)
+ if ext == ".yml" {
+ return true
+ }
+ if ext == ".yaml" {
+ return true
+ }
+ return false
+}
+
+func (d *Decoder) readersUnderDir(dir string) ([]io.Reader, error) {
+ pattern := fmt.Sprintf("%s/*", dir)
+ matches, err := filepath.Glob(pattern)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to get files by %s", pattern)
+ }
+ readers := []io.Reader{}
+ for _, match := range matches {
+ if !d.isYAMLFile(match) {
+ continue
+ }
+ reader, err := d.fileToReader(match)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to get reader")
+ }
+ readers = append(readers, reader)
+ }
+ return readers, nil
+}
+
+func (d *Decoder) readersUnderDirRecursive(dir string) ([]io.Reader, error) {
+ readers := []io.Reader{}
+ if err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error {
+ if !d.isYAMLFile(path) {
+ return nil
+ }
+ reader, err := d.fileToReader(path)
+ if err != nil {
+ return errors.Wrapf(err, "failed to get reader")
+ }
+ readers = append(readers, reader)
+ return nil
+ }); err != nil {
+ return nil, errors.Wrapf(err, "interrupt walk in %s", dir)
+ }
+ return readers, nil
+}
+
+func (d *Decoder) resolveReference() error {
+ for _, opt := range d.opts {
+ if err := opt(d); err != nil {
+ return errors.Wrapf(err, "failed to exec option")
+ }
+ }
+ for _, file := range d.referenceFiles {
+ reader, err := d.fileToReader(file)
+ if err != nil {
+ return errors.Wrapf(err, "failed to get reader")
+ }
+ d.referenceReaders = append(d.referenceReaders, reader)
+ }
+ for _, dir := range d.referenceDirs {
+ if !d.isRecursiveDir {
+ readers, err := d.readersUnderDir(dir)
+ if err != nil {
+ return errors.Wrapf(err, "failed to get readers from under the %s", dir)
+ }
+ d.referenceReaders = append(d.referenceReaders, readers...)
+ } else {
+ readers, err := d.readersUnderDirRecursive(dir)
+ if err != nil {
+ return errors.Wrapf(err, "failed to get readers from under the %s", dir)
+ }
+ d.referenceReaders = append(d.referenceReaders, readers...)
+ }
+ }
+ for _, reader := range d.referenceReaders {
+ bytes, err := io.ReadAll(reader)
+ if err != nil {
+ return errors.Wrapf(err, "failed to read buffer")
+ }
+
+ // assign new anchor definition to anchorMap
+ if _, err := d.parse(bytes); err != nil {
+ return errors.Wrapf(err, "failed to decode")
+ }
+ }
+ d.isResolvedReference = true
+ return nil
+}
+
+func (d *Decoder) parse(bytes []byte) (*ast.File, error) {
+ var parseMode parser.Mode
+ if d.toCommentMap != nil {
+ parseMode = parser.ParseComments
+ }
+ f, err := parser.ParseBytes(bytes, parseMode)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse yaml")
+ }
+ normalizedFile := &ast.File{}
+ for _, doc := range f.Docs {
+ // try to decode ast.Node to value and map anchor value to anchorMap
+ if v := d.nodeToValue(doc.Body); v != nil {
+ normalizedFile.Docs = append(normalizedFile.Docs, doc)
+ }
+ }
+ return normalizedFile, nil
+}
+
+func (d *Decoder) isInitialized() bool {
+ return d.parsedFile != nil
+}
+
+func (d *Decoder) decodeInit() error {
+ if !d.isResolvedReference {
+ if err := d.resolveReference(); err != nil {
+ return errors.Wrapf(err, "failed to resolve reference")
+ }
+ }
+ var buf bytes.Buffer
+ if _, err := io.Copy(&buf, d.reader); err != nil {
+ return errors.Wrapf(err, "failed to copy from reader")
+ }
+ file, err := d.parse(buf.Bytes())
+ if err != nil {
+ return errors.Wrapf(err, "failed to decode")
+ }
+ d.parsedFile = file
+ return nil
+}
+
+func (d *Decoder) decode(ctx context.Context, v reflect.Value) error {
+ if len(d.parsedFile.Docs) <= d.streamIndex {
+ return io.EOF
+ }
+ body := d.parsedFile.Docs[d.streamIndex].Body
+ if body == nil {
+ return nil
+ }
+ if err := d.decodeValue(ctx, v.Elem(), body); err != nil {
+ return errors.Wrapf(err, "failed to decode value")
+ }
+ d.streamIndex++
+ return nil
+}
+
+// Decode reads the next YAML-encoded value from its input
+// and stores it in the value pointed to by v.
+//
+// See the documentation for Unmarshal for details about the
+// conversion of YAML into a Go value.
+func (d *Decoder) Decode(v interface{}) error {
+ return d.DecodeContext(context.Background(), v)
+}
+
+// DecodeContext reads the next YAML-encoded value from its input
+// and stores it in the value pointed to by v with context.Context.
+func (d *Decoder) DecodeContext(ctx context.Context, v interface{}) error {
+ rv := reflect.ValueOf(v)
+ if rv.Type().Kind() != reflect.Ptr {
+ return errors.ErrDecodeRequiredPointerType
+ }
+ if d.isInitialized() {
+ if err := d.decode(ctx, rv); err != nil {
+ if err == io.EOF {
+ return err
+ }
+ return errors.Wrapf(err, "failed to decode")
+ }
+ return nil
+ }
+ if err := d.decodeInit(); err != nil {
+ return errors.Wrapf(err, "failed to decodeInit")
+ }
+ if err := d.decode(ctx, rv); err != nil {
+ if err == io.EOF {
+ return err
+ }
+ return errors.Wrapf(err, "failed to decode")
+ }
+ return nil
+}
+
+// DecodeFromNode decodes node into the value pointed to by v.
+func (d *Decoder) DecodeFromNode(node ast.Node, v interface{}) error {
+ return d.DecodeFromNodeContext(context.Background(), node, v)
+}
+
+// DecodeFromNodeContext decodes node into the value pointed to by v with context.Context.
+func (d *Decoder) DecodeFromNodeContext(ctx context.Context, node ast.Node, v interface{}) error {
+ rv := reflect.ValueOf(v)
+ if rv.Type().Kind() != reflect.Ptr {
+ return errors.ErrDecodeRequiredPointerType
+ }
+ if !d.isInitialized() {
+ if err := d.decodeInit(); err != nil {
+ return errors.Wrapf(err, "failed to decodInit")
+ }
+ }
+ // resolve references to the anchor on the same file
+ d.nodeToValue(node)
+ if err := d.decodeValue(ctx, rv.Elem(), node); err != nil {
+ return errors.Wrapf(err, "failed to decode value")
+ }
+ return nil
+}
diff --git a/tmpmod/github.com/goccy/go-yaml/decode_test.go b/tmpmod/github.com/goccy/go-yaml/decode_test.go
new file mode 100644
index 00000000..61dd019d
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/decode_test.go
@@ -0,0 +1,2908 @@
+package yaml_test
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "io"
+ "log"
+ "math"
+ "net"
+ "reflect"
+ "strconv"
+ "strings"
+ "testing"
+ "time"
+
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/ast"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/internal/errors"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/parser"
+ "golang.org/x/xerrors"
+)
+
+type Child struct {
+ B int
+ C int `yaml:"-"`
+}
+
+func TestDecoder(t *testing.T) {
+ tests := []struct {
+ source string
+ value interface{}
+ }{
+ {
+ "null\n",
+ (*struct{})(nil),
+ },
+ {
+ "v: hi\n",
+ map[string]string{"v": "hi"},
+ },
+ {
+ "v: \"true\"\n",
+ map[string]string{"v": "true"},
+ },
+ {
+ "v: \"false\"\n",
+ map[string]string{"v": "false"},
+ },
+ {
+ "v: true\n",
+ map[string]interface{}{"v": true},
+ },
+ {
+ "v: true\n",
+ map[string]string{"v": "true"},
+ },
+ {
+ "v: 10\n",
+ map[string]string{"v": "10"},
+ },
+ {
+ "v: -10\n",
+ map[string]string{"v": "-10"},
+ },
+ {
+ "v: 1.234\n",
+ map[string]string{"v": "1.234"},
+ },
+ {
+ "v: \" foo\"\n",
+ map[string]string{"v": " foo"},
+ },
+ {
+ "v: \"foo \"\n",
+ map[string]string{"v": "foo "},
+ },
+ {
+ "v: \" foo \"\n",
+ map[string]string{"v": " foo "},
+ },
+ {
+ "v: false\n",
+ map[string]bool{"v": false},
+ },
+ {
+ "v: 10\n",
+ map[string]int{"v": 10},
+ },
+ {
+ "v: 10",
+ map[string]interface{}{"v": 10},
+ },
+ {
+ "v: 0b10",
+ map[string]interface{}{"v": 2},
+ },
+ {
+ "v: -0b101010",
+ map[string]interface{}{"v": -42},
+ },
+ {
+ "v: -0b1000000000000000000000000000000000000000000000000000000000000000",
+ map[string]interface{}{"v": int64(-9223372036854775808)},
+ },
+ {
+ "v: 0xA",
+ map[string]interface{}{"v": 10},
+ },
+ {
+ "v: .1",
+ map[string]interface{}{"v": 0.1},
+ },
+ {
+ "v: -.1",
+ map[string]interface{}{"v": -0.1},
+ },
+ {
+ "v: -10\n",
+ map[string]int{"v": -10},
+ },
+ {
+ "v: 4294967296\n",
+ map[string]int64{"v": int64(4294967296)},
+ },
+ {
+ "v: 0.1\n",
+ map[string]interface{}{"v": 0.1},
+ },
+ {
+ "v: 0.99\n",
+ map[string]float32{"v": 0.99},
+ },
+ {
+ "v: -0.1\n",
+ map[string]float64{"v": -0.1},
+ },
+ {
+ "v: 6.8523e+5",
+ map[string]interface{}{"v": 6.8523e+5},
+ },
+ {
+ "v: 685.230_15e+03",
+ map[string]interface{}{"v": 685.23015e+03},
+ },
+ {
+ "v: 685_230.15",
+ map[string]interface{}{"v": 685230.15},
+ },
+ {
+ "v: 685_230.15",
+ map[string]float64{"v": 685230.15},
+ },
+ {
+ "v: 685230",
+ map[string]interface{}{"v": 685230},
+ },
+ {
+ "v: +685_230",
+ map[string]interface{}{"v": 685230},
+ },
+ {
+ "v: 02472256",
+ map[string]interface{}{"v": 685230},
+ },
+ {
+ "v: 0x_0A_74_AE",
+ map[string]interface{}{"v": 685230},
+ },
+ {
+ "v: 0b1010_0111_0100_1010_1110",
+ map[string]interface{}{"v": 685230},
+ },
+ {
+ "v: +685_230",
+ map[string]int{"v": 685230},
+ },
+
+ // Bools from spec
+ {
+ "v: True",
+ map[string]interface{}{"v": true},
+ },
+ {
+ "v: TRUE",
+ map[string]interface{}{"v": true},
+ },
+ {
+ "v: False",
+ map[string]interface{}{"v": false},
+ },
+ {
+ "v: FALSE",
+ map[string]interface{}{"v": false},
+ },
+ {
+ "v: y",
+ map[string]interface{}{"v": "y"}, // y or yes or Yes is string
+ },
+ {
+ "v: NO",
+ map[string]interface{}{"v": "NO"}, // no or No or NO is string
+ },
+ {
+ "v: on",
+ map[string]interface{}{"v": "on"}, // on is string
+ },
+
+ // Some cross type conversions
+ {
+ "v: 42",
+ map[string]uint{"v": 42},
+ }, {
+ "v: 4294967296",
+ map[string]uint64{"v": uint64(4294967296)},
+ },
+
+ // int
+ {
+ "v: 2147483647",
+ map[string]int{"v": math.MaxInt32},
+ },
+ {
+ "v: -2147483648",
+ map[string]int{"v": math.MinInt32},
+ },
+
+ // int64
+ {
+ "v: 9223372036854775807",
+ map[string]int64{"v": math.MaxInt64},
+ },
+ {
+ "v: 0b111111111111111111111111111111111111111111111111111111111111111",
+ map[string]int64{"v": math.MaxInt64},
+ },
+ {
+ "v: -9223372036854775808",
+ map[string]int64{"v": math.MinInt64},
+ },
+ {
+ "v: -0b111111111111111111111111111111111111111111111111111111111111111",
+ map[string]int64{"v": -math.MaxInt64},
+ },
+
+ // uint
+ {
+ "v: 0",
+ map[string]uint{"v": 0},
+ },
+ {
+ "v: 4294967295",
+ map[string]uint{"v": math.MaxUint32},
+ },
+
+ // uint64
+ {
+ "v: 0",
+ map[string]uint{"v": 0},
+ },
+ {
+ "v: 18446744073709551615",
+ map[string]uint64{"v": math.MaxUint64},
+ },
+ {
+ "v: 0b1111111111111111111111111111111111111111111111111111111111111111",
+ map[string]uint64{"v": math.MaxUint64},
+ },
+ {
+ "v: 9223372036854775807",
+ map[string]uint64{"v": math.MaxInt64},
+ },
+
+ // float32
+ {
+ "v: 3.40282346638528859811704183484516925440e+38",
+ map[string]float32{"v": math.MaxFloat32},
+ },
+ {
+ "v: 1.401298464324817070923729583289916131280e-45",
+ map[string]float32{"v": math.SmallestNonzeroFloat32},
+ },
+ {
+ "v: 18446744073709551615",
+ map[string]float32{"v": float32(math.MaxUint64)},
+ },
+ {
+ "v: 18446744073709551616",
+ map[string]float32{"v": float32(math.MaxUint64 + 1)},
+ },
+
+ // float64
+ {
+ "v: 1.797693134862315708145274237317043567981e+308",
+ map[string]float64{"v": math.MaxFloat64},
+ },
+ {
+ "v: 4.940656458412465441765687928682213723651e-324",
+ map[string]float64{"v": math.SmallestNonzeroFloat64},
+ },
+ {
+ "v: 18446744073709551615",
+ map[string]float64{"v": float64(math.MaxUint64)},
+ },
+ {
+ "v: 18446744073709551616",
+ map[string]float64{"v": float64(math.MaxUint64 + 1)},
+ },
+
+ // Timestamps
+ {
+ // Date only.
+ "v: 2015-01-01\n",
+ map[string]time.Time{"v": time.Date(2015, 1, 1, 0, 0, 0, 0, time.UTC)},
+ },
+ {
+ // RFC3339
+ "v: 2015-02-24T18:19:39.12Z\n",
+ map[string]time.Time{"v": time.Date(2015, 2, 24, 18, 19, 39, .12e9, time.UTC)},
+ },
+ {
+ // RFC3339 with short dates.
+ "v: 2015-2-3T3:4:5Z",
+ map[string]time.Time{"v": time.Date(2015, 2, 3, 3, 4, 5, 0, time.UTC)},
+ },
+ {
+ // ISO8601 lower case t
+ "v: 2015-02-24t18:19:39Z\n",
+ map[string]time.Time{"v": time.Date(2015, 2, 24, 18, 19, 39, 0, time.UTC)},
+ },
+ {
+ // space separate, no time zone
+ "v: 2015-02-24 18:19:39\n",
+ map[string]time.Time{"v": time.Date(2015, 2, 24, 18, 19, 39, 0, time.UTC)},
+ },
+ {
+ "v: 60s\n",
+ map[string]time.Duration{"v": time.Minute},
+ },
+ {
+ "v: -0.5h\n",
+ map[string]time.Duration{"v": -30 * time.Minute},
+ },
+
+ // Single Quoted values.
+ {
+ `'1': '2'`,
+ map[interface{}]interface{}{"1": `2`},
+ },
+ {
+ `'1': '"2"'`,
+ map[interface{}]interface{}{"1": `"2"`},
+ },
+ {
+ `'1': ''''`,
+ map[interface{}]interface{}{"1": `'`},
+ },
+ {
+ `'1': '''2'''`,
+ map[interface{}]interface{}{"1": `'2'`},
+ },
+ {
+ `'1': 'B''z'`,
+ map[interface{}]interface{}{"1": `B'z`},
+ },
+ {
+ `'1': '\'`,
+ map[interface{}]interface{}{"1": `\`},
+ },
+ {
+ `'1': '\\'`,
+ map[interface{}]interface{}{"1": `\\`},
+ },
+ {
+ `'1': '\"2\"'`,
+ map[interface{}]interface{}{"1": `\"2\"`},
+ },
+ {
+ `'1': '\\"2\\"'`,
+ map[interface{}]interface{}{"1": `\\"2\\"`},
+ },
+ {
+ "'1': ' 1\n 2\n 3'",
+ map[interface{}]interface{}{"1": " 1 2 3"},
+ },
+ {
+ "'1': '\n 2\n 3'",
+ map[interface{}]interface{}{"1": " 2 3"},
+ },
+
+ // Double Quoted values.
+ {
+ `"1": "2"`,
+ map[interface{}]interface{}{"1": `2`},
+ },
+ {
+ `"1": "\"2\""`,
+ map[interface{}]interface{}{"1": `"2"`},
+ },
+ {
+ `"1": "\""`,
+ map[interface{}]interface{}{"1": `"`},
+ },
+ {
+ `"1": "X\"z"`,
+ map[interface{}]interface{}{"1": `X"z`},
+ },
+ {
+ `"1": "\\"`,
+ map[interface{}]interface{}{"1": `\`},
+ },
+ {
+ `"1": "\\\\"`,
+ map[interface{}]interface{}{"1": `\\`},
+ },
+ {
+ `"1": "\\\"2\\\""`,
+ map[interface{}]interface{}{"1": `\"2\"`},
+ },
+ {
+ "'1': \" 1\n 2\n 3\"",
+ map[interface{}]interface{}{"1": " 1 2 3"},
+ },
+ {
+ "'1': \"\n 2\n 3\"",
+ map[interface{}]interface{}{"1": " 2 3"},
+ },
+ {
+ `"1": "a\x2Fb"`,
+ map[interface{}]interface{}{"1": `a/b`},
+ },
+ {
+ `"1": "a\u002Fb"`,
+ map[interface{}]interface{}{"1": `a/b`},
+ },
+ {
+ `"1": "a\x2Fb\u002Fc\U0000002Fd"`,
+ map[interface{}]interface{}{"1": `a/b/c/d`},
+ },
+ {
+ "'1': \"2\\n3\"",
+ map[interface{}]interface{}{"1": "2\n3"},
+ },
+ {
+ "'1': \"2\\r\\n3\"",
+ map[interface{}]interface{}{"1": "2\r\n3"},
+ },
+
+ {
+ "a: -b_c",
+ map[string]interface{}{"a": "-b_c"},
+ },
+ {
+ "a: +b_c",
+ map[string]interface{}{"a": "+b_c"},
+ },
+ {
+ "a: 50cent_of_dollar",
+ map[string]interface{}{"a": "50cent_of_dollar"},
+ },
+
+ // Nulls
+ {
+ "v:",
+ map[string]interface{}{"v": nil},
+ },
+ {
+ "v: ~",
+ map[string]interface{}{"v": nil},
+ },
+ {
+ "~: null key",
+ map[interface{}]string{nil: "null key"},
+ },
+ {
+ "v:",
+ map[string]*bool{"v": nil},
+ },
+ {
+ "v: null",
+ map[string]*string{"v": nil},
+ },
+ {
+ "v: null",
+ map[string]string{"v": ""},
+ },
+ {
+ "v: null",
+ map[string]interface{}{"v": nil},
+ },
+ {
+ "v: Null",
+ map[string]interface{}{"v": nil},
+ },
+ {
+ "v: NULL",
+ map[string]interface{}{"v": nil},
+ },
+ {
+ "v: ~",
+ map[string]*string{"v": nil},
+ },
+ {
+ "v: ~",
+ map[string]string{"v": ""},
+ },
+
+ {
+ "v: .inf\n",
+ map[string]interface{}{"v": math.Inf(0)},
+ },
+ {
+ "v: .Inf\n",
+ map[string]interface{}{"v": math.Inf(0)},
+ },
+ {
+ "v: .INF\n",
+ map[string]interface{}{"v": math.Inf(0)},
+ },
+ {
+ "v: -.inf\n",
+ map[string]interface{}{"v": math.Inf(-1)},
+ },
+ {
+ "v: -.Inf\n",
+ map[string]interface{}{"v": math.Inf(-1)},
+ },
+ {
+ "v: -.INF\n",
+ map[string]interface{}{"v": math.Inf(-1)},
+ },
+ {
+ "v: .nan\n",
+ map[string]interface{}{"v": math.NaN()},
+ },
+ {
+ "v: .NaN\n",
+ map[string]interface{}{"v": math.NaN()},
+ },
+ {
+ "v: .NAN\n",
+ map[string]interface{}{"v": math.NaN()},
+ },
+
+ // Explicit tags.
+ {
+ "v: !!float '1.1'",
+ map[string]interface{}{"v": 1.1},
+ },
+ {
+ "v: !!float 0",
+ map[string]interface{}{"v": float64(0)},
+ },
+ {
+ "v: !!float -1",
+ map[string]interface{}{"v": float64(-1)},
+ },
+ {
+ "v: !!null ''",
+ map[string]interface{}{"v": nil},
+ },
+ {
+ "v: !!timestamp \"2015-01-01\"",
+ map[string]time.Time{"v": time.Date(2015, 1, 1, 0, 0, 0, 0, time.UTC)},
+ },
+ {
+ "v: !!timestamp 2015-01-01",
+ map[string]time.Time{"v": time.Date(2015, 1, 1, 0, 0, 0, 0, time.UTC)},
+ },
+
+ // Flow sequence
+ {
+ "v: [A,B]",
+ map[string]interface{}{"v": []interface{}{"A", "B"}},
+ },
+ {
+ "v: [A,B,C,]",
+ map[string][]string{"v": []string{"A", "B", "C"}},
+ },
+ {
+ "v: [A,1,C]",
+ map[string][]string{"v": []string{"A", "1", "C"}},
+ },
+ {
+ "v: [A,1,C]",
+ map[string]interface{}{"v": []interface{}{"A", 1, "C"}},
+ },
+
+ // Block sequence
+ {
+ "v:\n - A\n - B",
+ map[string]interface{}{"v": []interface{}{"A", "B"}},
+ },
+ {
+ "v:\n - A\n - B\n - C",
+ map[string][]string{"v": []string{"A", "B", "C"}},
+ },
+ {
+ "v:\n - A\n - 1\n - C",
+ map[string][]string{"v": []string{"A", "1", "C"}},
+ },
+ {
+ "v:\n - A\n - 1\n - C",
+ map[string]interface{}{"v": []interface{}{"A", 1, "C"}},
+ },
+
+ // Map inside interface with no type hints.
+ {
+ "a: {b: c}",
+ map[interface{}]interface{}{"a": map[interface{}]interface{}{"b": "c"}},
+ },
+
+ {
+ "v: \"\"\n",
+ map[string]string{"v": ""},
+ },
+ {
+ "v:\n- A\n- B\n",
+ map[string][]string{"v": {"A", "B"}},
+ },
+ {
+ "a: '-'\n",
+ map[string]string{"a": "-"},
+ },
+ {
+ "123\n",
+ 123,
+ },
+ {
+ "hello: world\n",
+ map[string]string{"hello": "world"},
+ },
+ {
+ "hello: world\r\n",
+ map[string]string{"hello": "world"},
+ },
+ {
+ "hello: world\rGo: Gopher",
+ map[string]string{"hello": "world", "Go": "Gopher"},
+ },
+
+ // Structs and type conversions.
+ {
+ "hello: world",
+ struct{ Hello string }{"world"},
+ },
+ {
+ "a: {b: c}",
+ struct{ A struct{ B string } }{struct{ B string }{"c"}},
+ },
+ {
+ "a: {b: c}",
+ struct{ A map[string]string }{map[string]string{"b": "c"}},
+ },
+ {
+ "a:",
+ struct{ A map[string]string }{},
+ },
+ {
+ "a: 1",
+ struct{ A int }{1},
+ },
+ {
+ "a: 1",
+ struct{ A float64 }{1},
+ },
+ {
+ "a: 1.0",
+ struct{ A int }{1},
+ },
+ {
+ "a: 1.0",
+ struct{ A uint }{1},
+ },
+ {
+ "a: [1, 2]",
+ struct{ A []int }{[]int{1, 2}},
+ },
+ {
+ "a: [1, 2]",
+ struct{ A [2]int }{[2]int{1, 2}},
+ },
+ {
+ "a: 1",
+ struct{ B int }{0},
+ },
+ {
+ "a: 1",
+ struct {
+ B int `yaml:"a"`
+ }{1},
+ },
+
+ {
+ "a: 1\n",
+ yaml.MapItem{Key: "a", Value: 1},
+ },
+ {
+ "a: 1\nb: 2\nc: 3\n",
+ yaml.MapSlice{
+ {Key: "a", Value: 1},
+ {Key: "b", Value: 2},
+ {Key: "c", Value: 3},
+ },
+ },
+ {
+ "v:\n- A\n- 1\n- B:\n - 2\n - 3\n",
+ map[string]interface{}{
+ "v": []interface{}{
+ "A",
+ 1,
+ map[string][]int{
+ "B": {2, 3},
+ },
+ },
+ },
+ },
+ {
+ "a:\n b: c\n",
+ map[string]interface{}{
+ "a": map[string]string{
+ "b": "c",
+ },
+ },
+ },
+ {
+ "a: {x: 1}\n",
+ map[string]map[string]int{
+ "a": {
+ "x": 1,
+ },
+ },
+ },
+ {
+ "t2: 2018-01-09T10:40:47Z\nt4: 2098-01-09T10:40:47Z\n",
+ map[string]string{
+ "t2": "2018-01-09T10:40:47Z",
+ "t4": "2098-01-09T10:40:47Z",
+ },
+ },
+ {
+ "a: [1, 2]\n",
+ map[string][]int{
+ "a": {1, 2},
+ },
+ },
+ {
+ "a: {b: c, d: e}\n",
+ map[string]interface{}{
+ "a": map[string]string{
+ "b": "c",
+ "d": "e",
+ },
+ },
+ },
+ {
+ "a: 3s\n",
+ map[string]string{
+ "a": "3s",
+ },
+ },
+ {
+ "a: \n",
+ map[string]string{"a": ""},
+ },
+ {
+ "a: \"1:1\"\n",
+ map[string]string{"a": "1:1"},
+ },
+ {
+ "a: 1.2.3.4\n",
+ map[string]string{"a": "1.2.3.4"},
+ },
+ {
+ "a: 'b: c'\n",
+ map[string]string{"a": "b: c"},
+ },
+ {
+ "a: 'Hello #comment'\n",
+ map[string]string{"a": "Hello #comment"},
+ },
+ {
+ "a: 100.5\n",
+ map[string]interface{}{
+ "a": 100.5,
+ },
+ },
+ {
+ "a: \"\\0\"\n",
+ map[string]string{"a": "\\0"},
+ },
+ {
+ "b: 2\na: 1\nd: 4\nc: 3\nsub:\n e: 5\n",
+ map[string]interface{}{
+ "b": 2,
+ "a": 1,
+ "d": 4,
+ "c": 3,
+ "sub": map[string]int{
+ "e": 5,
+ },
+ },
+ },
+ {
+ " a : b \n",
+ map[string]string{"a": "b"},
+ },
+ {
+ "a: b # comment\nb: c\n",
+ map[string]string{
+ "a": "b",
+ "b": "c",
+ },
+ },
+ {
+ "---\na: b\n",
+ map[string]string{"a": "b"},
+ },
+ {
+ "a: b\n...\n",
+ map[string]string{"a": "b"},
+ },
+ {
+ "%YAML 1.2\n---\n",
+ (*struct{})(nil),
+ },
+ {
+ "---\n",
+ (*struct{})(nil),
+ },
+ {
+ "...",
+ (*struct{})(nil),
+ },
+ {
+ "v: go test ./...",
+ map[string]string{"v": "go test ./..."},
+ },
+ {
+ "v: echo ---",
+ map[string]string{"v": "echo ---"},
+ },
+ {
+ "v: |\n hello\n ...\n world\n",
+ map[string]string{"v": "hello\n...\nworld\n"},
+ },
+ {
+ "a: !!binary gIGC\n",
+ map[string]string{"a": "\x80\x81\x82"},
+ },
+ {
+ "a: !!binary |\n " + strings.Repeat("kJCQ", 17) + "kJ\n CQ\n",
+ map[string]string{"a": strings.Repeat("\x90", 54)},
+ },
+ {
+ "v:\n- A\n- |-\n B\n C\n",
+ map[string][]string{
+ "v": {
+ "A", "B\nC",
+ },
+ },
+ },
+ {
+ "v:\n- A\n- |-\n B\n C\n\n\n",
+ map[string][]string{
+ "v": {
+ "A", "B\nC",
+ },
+ },
+ },
+ {
+ "v:\n- A\n- >-\n B\n C\n",
+ map[string][]string{
+ "v": {
+ "A", "B C",
+ },
+ },
+ },
+ {
+ "v:\n- A\n- >-\n B\n C\n\n\n",
+ map[string][]string{
+ "v": {
+ "A", "B C",
+ },
+ },
+ },
+ {
+ "a: b\nc: d\n",
+ struct {
+ A string
+ C string `yaml:"c"`
+ }{
+ "b", "d",
+ },
+ },
+ {
+ "a: 1\nb: 2\n",
+ struct {
+ A int
+ B int `yaml:"-"`
+ }{
+ 1, 0,
+ },
+ },
+ {
+ "a: 1\nb: 2\n",
+ struct {
+ A int
+ Child `yaml:",inline"`
+ }{
+ 1,
+ Child{
+ B: 2,
+ C: 0,
+ },
+ },
+ },
+
+ // Anchors and aliases.
+ {
+ "a: &x 1\nb: &y 2\nc: *x\nd: *y\n",
+ struct{ A, B, C, D int }{1, 2, 1, 2},
+ },
+ {
+ "a: &a {c: 1}\nb: *a\n",
+ struct {
+ A, B struct {
+ C int
+ }
+ }{struct{ C int }{1}, struct{ C int }{1}},
+ },
+ {
+ "a: &a [1, 2]\nb: *a\n",
+ struct{ B []int }{[]int{1, 2}},
+ },
+
+ {
+ "tags:\n- hello-world\na: foo",
+ struct {
+ Tags []string
+ A string
+ }{Tags: []string{"hello-world"}, A: "foo"},
+ },
+ {
+ "",
+ (*struct{})(nil),
+ },
+ {
+ "{}", struct{}{},
+ },
+ {
+ "v: /a/{b}",
+ map[string]string{"v": "/a/{b}"},
+ },
+ {
+ "v: 1[]{},!%?&*",
+ map[string]string{"v": "1[]{},!%?&*"},
+ },
+ {
+ "v: user's item",
+ map[string]string{"v": "user's item"},
+ },
+ {
+ "v: [1,[2,[3,[4,5],6],7],8]",
+ map[string]interface{}{
+ "v": []interface{}{
+ 1,
+ []interface{}{
+ 2,
+ []interface{}{
+ 3,
+ []int{4, 5},
+ 6,
+ },
+ 7,
+ },
+ 8,
+ },
+ },
+ },
+ {
+ "v: {a: {b: {c: {d: e},f: g},h: i},j: k}",
+ map[string]interface{}{
+ "v": map[string]interface{}{
+ "a": map[string]interface{}{
+ "b": map[string]interface{}{
+ "c": map[string]string{
+ "d": "e",
+ },
+ "f": "g",
+ },
+ "h": "i",
+ },
+ "j": "k",
+ },
+ },
+ },
+ {
+ `---
+- a:
+ b:
+- c: d
+`,
+ []map[string]interface{}{
+ {
+ "a": map[string]interface{}{
+ "b": nil,
+ },
+ },
+ {
+ "c": "d",
+ },
+ },
+ },
+ {
+ `---
+a:
+ b:
+c: d
+`,
+ map[string]interface{}{
+ "a": map[string]interface{}{
+ "b": nil,
+ },
+ "c": "d",
+ },
+ },
+ {
+ `---
+a:
+b:
+c:
+`,
+ map[string]interface{}{
+ "a": nil,
+ "b": nil,
+ "c": nil,
+ },
+ },
+ {
+ `---
+a: go test ./...
+b:
+c:
+`,
+ map[string]interface{}{
+ "a": "go test ./...",
+ "b": nil,
+ "c": nil,
+ },
+ },
+ {
+ `---
+a: |
+ hello
+ ...
+ world
+b:
+c:
+`,
+ map[string]interface{}{
+ "a": "hello\n...\nworld\n",
+ "b": nil,
+ "c": nil,
+ },
+ },
+
+ // Multi bytes
+ {
+ "v: あいうえお\nv2: かきくけこ",
+ map[string]string{"v": "あいうえお", "v2": "かきくけこ"},
+ },
+ }
+ for _, test := range tests {
+ t.Run(test.source, func(t *testing.T) {
+ buf := bytes.NewBufferString(test.source)
+ dec := yaml.NewDecoder(buf)
+ typ := reflect.ValueOf(test.value).Type()
+ value := reflect.New(typ)
+ if err := dec.Decode(value.Interface()); err != nil {
+ if err == io.EOF {
+ return
+ }
+ t.Fatalf("%s: %+v", test.source, err)
+ }
+ actual := fmt.Sprintf("%+v", value.Elem().Interface())
+ expect := fmt.Sprintf("%+v", test.value)
+ if actual != expect {
+ t.Fatalf("failed to test [%s], actual=[%s], expect=[%s]", test.source, actual, expect)
+ }
+ })
+ }
+}
+
+func TestDecoder_TypeConversionError(t *testing.T) {
+ t.Run("type conversion for struct", func(t *testing.T) {
+ type T struct {
+ A int
+ B uint
+ C float32
+ D bool
+ }
+ type U struct {
+ *T `yaml:",inline"`
+ }
+ t.Run("string to int", func(t *testing.T) {
+ var v T
+ err := yaml.Unmarshal([]byte(`a: str`), &v)
+ if err == nil {
+ t.Fatal("expected to error")
+ }
+ msg := "cannot unmarshal string into Go struct field T.A of type int"
+ if !strings.Contains(err.Error(), msg) {
+ t.Fatalf("expected error message: %s to contain: %s", err.Error(), msg)
+ }
+ })
+ t.Run("string to bool", func(t *testing.T) {
+ var v T
+ err := yaml.Unmarshal([]byte(`d: str`), &v)
+ if err == nil {
+ t.Fatal("expected to error")
+ }
+ msg := "cannot unmarshal string into Go struct field T.D of type bool"
+ if !strings.Contains(err.Error(), msg) {
+ t.Fatalf("expected error message: %s to contain: %s", err.Error(), msg)
+ }
+ })
+ t.Run("string to int at inline", func(t *testing.T) {
+ var v U
+ err := yaml.Unmarshal([]byte(`a: str`), &v)
+ if err == nil {
+ t.Fatal("expected to error")
+ }
+ msg := "cannot unmarshal string into Go struct field U.T.A of type int"
+ if !strings.Contains(err.Error(), msg) {
+ t.Fatalf("expected error message: %s to contain: %s", err.Error(), msg)
+ }
+ })
+ })
+ t.Run("type conversion for array", func(t *testing.T) {
+ t.Run("string to int", func(t *testing.T) {
+ var v map[string][]int
+ err := yaml.Unmarshal([]byte(`v: [A,1,C]`), &v)
+ if err == nil {
+ t.Fatal("expected to error")
+ }
+ msg := "cannot unmarshal string into Go value of type int"
+ if !strings.Contains(err.Error(), msg) {
+ t.Fatalf("expected error message: %s to contain: %s", err.Error(), msg)
+ }
+ if len(v) == 0 || len(v["v"]) == 0 {
+ t.Fatal("failed to decode value")
+ }
+ if v["v"][0] != 1 {
+ t.Fatal("failed to decode value")
+ }
+ })
+ t.Run("string to int", func(t *testing.T) {
+ var v map[string][]int
+ err := yaml.Unmarshal([]byte("v:\n - A\n - 1\n - C"), &v)
+ if err == nil {
+ t.Fatal("expected to error")
+ }
+ msg := "cannot unmarshal string into Go value of type int"
+ if !strings.Contains(err.Error(), msg) {
+ t.Fatalf("expected error message: %s to contain: %s", err.Error(), msg)
+ }
+ if len(v) == 0 || len(v["v"]) == 0 {
+ t.Fatal("failed to decode value")
+ }
+ if v["v"][0] != 1 {
+ t.Fatal("failed to decode value")
+ }
+ })
+ })
+ t.Run("overflow error", func(t *testing.T) {
+ t.Run("negative number to uint", func(t *testing.T) {
+ var v map[string]uint
+ err := yaml.Unmarshal([]byte("v: -42"), &v)
+ if err == nil {
+ t.Fatal("expected to error")
+ }
+ msg := "cannot unmarshal -42 into Go value of type uint ( overflow )"
+ if !strings.Contains(err.Error(), msg) {
+ t.Fatalf("expected error message: %s to contain: %s", err.Error(), msg)
+ }
+ if v["v"] != 0 {
+ t.Fatal("failed to decode value")
+ }
+ })
+ t.Run("negative number to uint64", func(t *testing.T) {
+ var v map[string]uint64
+ err := yaml.Unmarshal([]byte("v: -4294967296"), &v)
+ if err == nil {
+ t.Fatal("expected to error")
+ }
+ msg := "cannot unmarshal -4294967296 into Go value of type uint64 ( overflow )"
+ if !strings.Contains(err.Error(), msg) {
+ t.Fatalf("expected error message: %s to contain: %s", err.Error(), msg)
+ }
+ if v["v"] != 0 {
+ t.Fatal("failed to decode value")
+ }
+ })
+ t.Run("larger number for int32", func(t *testing.T) {
+ var v map[string]int32
+ err := yaml.Unmarshal([]byte("v: 4294967297"), &v)
+ if err == nil {
+ t.Fatal("expected to error")
+ }
+ msg := "cannot unmarshal 4294967297 into Go value of type int32 ( overflow )"
+ if !strings.Contains(err.Error(), msg) {
+ t.Fatalf("expected error message: %s to contain: %s", err.Error(), msg)
+ }
+ if v["v"] != 0 {
+ t.Fatal("failed to decode value")
+ }
+ })
+ t.Run("larger number for int8", func(t *testing.T) {
+ var v map[string]int8
+ err := yaml.Unmarshal([]byte("v: 128"), &v)
+ if err == nil {
+ t.Fatal("expected to error")
+ }
+ msg := "cannot unmarshal 128 into Go value of type int8 ( overflow )"
+ if !strings.Contains(err.Error(), msg) {
+ t.Fatalf("expected error message: %s to contain: %s", err.Error(), msg)
+ }
+ if v["v"] != 0 {
+ t.Fatal("failed to decode value")
+ }
+ })
+ })
+ t.Run("type conversion for time", func(t *testing.T) {
+ type T struct {
+ A time.Time
+ B time.Duration
+ }
+ t.Run("int to time", func(t *testing.T) {
+ var v T
+ err := yaml.Unmarshal([]byte(`a: 123`), &v)
+ if err == nil {
+ t.Fatal("expected to error")
+ }
+ msg := "cannot unmarshal uint64 into Go struct field T.A of type time.Time"
+ if !strings.Contains(err.Error(), msg) {
+ t.Fatalf("expected error message: %s to contain: %s", err.Error(), msg)
+ }
+ })
+ t.Run("string to duration", func(t *testing.T) {
+ var v T
+ err := yaml.Unmarshal([]byte(`b: str`), &v)
+ if err == nil {
+ t.Fatal("expected to error")
+ }
+ msg := `time: invalid duration "str"`
+ if !strings.Contains(err.Error(), msg) {
+ t.Fatalf("expected error message: %s to contain: %s", err.Error(), msg)
+ }
+ })
+ t.Run("int to duration", func(t *testing.T) {
+ var v T
+ err := yaml.Unmarshal([]byte(`b: 10`), &v)
+ if err == nil {
+ t.Fatal("expected to error")
+ }
+ msg := "cannot unmarshal uint64 into Go struct field T.B of type time.Duration"
+ if !strings.Contains(err.Error(), msg) {
+ t.Fatalf("expected error message: %s to contain: %s", err.Error(), msg)
+ }
+ })
+ })
+}
+
+func TestDecoder_AnchorReferenceDirs(t *testing.T) {
+ buf := bytes.NewBufferString("a: *a\n")
+ dec := yaml.NewDecoder(buf, yaml.ReferenceDirs("testdata"))
+ var v struct {
+ A struct {
+ B int
+ C string
+ }
+ }
+ if err := dec.Decode(&v); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ if v.A.B != 1 {
+ t.Fatal("failed to decode by reference dirs")
+ }
+ if v.A.C != "hello" {
+ t.Fatal("failed to decode by reference dirs")
+ }
+}
+
+func TestDecoder_AnchorReferenceDirsRecursive(t *testing.T) {
+ buf := bytes.NewBufferString("a: *a\n")
+ dec := yaml.NewDecoder(
+ buf,
+ yaml.RecursiveDir(true),
+ yaml.ReferenceDirs("testdata"),
+ )
+ var v struct {
+ A struct {
+ B int
+ C string
+ }
+ }
+ if err := dec.Decode(&v); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ if v.A.B != 1 {
+ t.Fatal("failed to decode by reference dirs")
+ }
+ if v.A.C != "hello" {
+ t.Fatal("failed to decode by reference dirs")
+ }
+}
+
+func TestDecoder_AnchorFiles(t *testing.T) {
+ buf := bytes.NewBufferString("a: *a\n")
+ dec := yaml.NewDecoder(buf, yaml.ReferenceFiles("testdata/anchor.yml"))
+ var v struct {
+ A struct {
+ B int
+ C string
+ }
+ }
+ if err := dec.Decode(&v); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ if v.A.B != 1 {
+ t.Fatal("failed to decode by reference dirs")
+ }
+ if v.A.C != "hello" {
+ t.Fatal("failed to decode by reference dirs")
+ }
+}
+
+func TestDecodeWithMergeKey(t *testing.T) {
+ yml := `
+a: &a
+ b: 1
+ c: hello
+items:
+- <<: *a
+- <<: *a
+ c: world
+`
+ type Item struct {
+ B int
+ C string
+ }
+ type T struct {
+ Items []*Item
+ }
+ buf := bytes.NewBufferString(yml)
+ dec := yaml.NewDecoder(buf)
+ var v T
+ if err := dec.Decode(&v); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ if len(v.Items) != 2 {
+ t.Fatal("failed to decode with merge key")
+ }
+ if v.Items[0].B != 1 || v.Items[0].C != "hello" {
+ t.Fatal("failed to decode with merge key")
+ }
+ if v.Items[1].B != 1 || v.Items[1].C != "world" {
+ t.Fatal("failed to decode with merge key")
+ }
+ t.Run("decode with interface{}", func(t *testing.T) {
+ buf := bytes.NewBufferString(yml)
+ dec := yaml.NewDecoder(buf)
+ var v interface{}
+ if err := dec.Decode(&v); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ items := v.(map[string]interface{})["items"].([]interface{})
+ if len(items) != 2 {
+ t.Fatal("failed to decode with merge key")
+ }
+ b0 := items[0].(map[string]interface{})["b"]
+ if _, ok := b0.(uint64); !ok {
+ t.Fatal("failed to decode with merge key")
+ }
+ if b0.(uint64) != 1 {
+ t.Fatal("failed to decode with merge key")
+ }
+ c0 := items[0].(map[string]interface{})["c"]
+ if _, ok := c0.(string); !ok {
+ t.Fatal("failed to decode with merge key")
+ }
+ if c0.(string) != "hello" {
+ t.Fatal("failed to decode with merge key")
+ }
+ b1 := items[1].(map[string]interface{})["b"]
+ if _, ok := b1.(uint64); !ok {
+ t.Fatal("failed to decode with merge key")
+ }
+ if b1.(uint64) != 1 {
+ t.Fatal("failed to decode with merge key")
+ }
+ c1 := items[1].(map[string]interface{})["c"]
+ if _, ok := c1.(string); !ok {
+ t.Fatal("failed to decode with merge key")
+ }
+ if c1.(string) != "world" {
+ t.Fatal("failed to decode with merge key")
+ }
+ })
+ t.Run("decode with map", func(t *testing.T) {
+ var v struct {
+ Items []map[string]interface{}
+ }
+ buf := bytes.NewBufferString(yml)
+ dec := yaml.NewDecoder(buf)
+ if err := dec.Decode(&v); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ if len(v.Items) != 2 {
+ t.Fatal("failed to decode with merge key")
+ }
+ b0 := v.Items[0]["b"]
+ if _, ok := b0.(uint64); !ok {
+ t.Fatal("failed to decode with merge key")
+ }
+ if b0.(uint64) != 1 {
+ t.Fatal("failed to decode with merge key")
+ }
+ c0 := v.Items[0]["c"]
+ if _, ok := c0.(string); !ok {
+ t.Fatal("failed to decode with merge key")
+ }
+ if c0.(string) != "hello" {
+ t.Fatal("failed to decode with merge key")
+ }
+ b1 := v.Items[1]["b"]
+ if _, ok := b1.(uint64); !ok {
+ t.Fatal("failed to decode with merge key")
+ }
+ if b1.(uint64) != 1 {
+ t.Fatal("failed to decode with merge key")
+ }
+ c1 := v.Items[1]["c"]
+ if _, ok := c1.(string); !ok {
+ t.Fatal("failed to decode with merge key")
+ }
+ if c1.(string) != "world" {
+ t.Fatal("failed to decode with merge key")
+ }
+ })
+}
+
+func TestDecoder_Inline(t *testing.T) {
+ type Base struct {
+ A int
+ B string
+ }
+ yml := `---
+a: 1
+b: hello
+c: true
+`
+ var v struct {
+ *Base `yaml:",inline"`
+ C bool
+ }
+ if err := yaml.NewDecoder(strings.NewReader(yml)).Decode(&v); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ if v.A != 1 {
+ t.Fatal("failed to decode with inline key")
+ }
+ if v.B != "hello" {
+ t.Fatal("failed to decode with inline key")
+ }
+ if !v.C {
+ t.Fatal("failed to decode with inline key")
+ }
+
+ t.Run("multiple inline with strict", func(t *testing.T) {
+ type Base struct {
+ A int
+ B string
+ }
+ type Base2 struct {
+ Base *Base `yaml:",inline"`
+ }
+ yml := `---
+a: 1
+b: hello
+`
+ var v struct {
+ Base2 *Base2 `yaml:",inline"`
+ }
+ if err := yaml.NewDecoder(strings.NewReader(yml), yaml.Strict()).Decode(&v); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ if v.Base2.Base.A != 1 {
+ t.Fatal("failed to decode with inline key")
+ }
+ if v.Base2.Base.B != "hello" {
+ t.Fatal("failed to decode with inline key")
+ }
+ })
+}
+
+func TestDecoder_InlineAndConflictKey(t *testing.T) {
+ type Base struct {
+ A int
+ B string
+ }
+ yml := `---
+a: 1
+b: hello
+c: true
+`
+ var v struct {
+ *Base `yaml:",inline"`
+ A int
+ C bool
+ }
+ if err := yaml.NewDecoder(strings.NewReader(yml)).Decode(&v); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ if v.A != 1 {
+ t.Fatal("failed to decode with inline key")
+ }
+ if v.B != "hello" {
+ t.Fatal("failed to decode with inline key")
+ }
+ if !v.C {
+ t.Fatal("failed to decode with inline key")
+ }
+ if v.Base.A != 0 {
+ t.Fatal("failed to decode with inline key")
+ }
+}
+
+func TestDecoder_InlineAndWrongTypeStrict(t *testing.T) {
+ type Base struct {
+ A int
+ B string
+ }
+ yml := `---
+a: notanint
+b: hello
+c: true
+`
+ var v struct {
+ *Base `yaml:",inline"`
+ C bool
+ }
+ err := yaml.NewDecoder(strings.NewReader(yml), yaml.Strict()).Decode(&v)
+ if err == nil {
+ t.Fatalf("expected error")
+ }
+
+ //TODO: properly check if errors are colored/have source
+ t.Logf("%s", err)
+ t.Logf("%s", yaml.FormatError(err, true, false))
+ t.Logf("%s", yaml.FormatError(err, false, true))
+ t.Logf("%s", yaml.FormatError(err, true, true))
+}
+
+func TestDecoder_InvalidCases(t *testing.T) {
+ const src = `---
+a:
+- b
+ c: d
+`
+ var v struct {
+ A []string
+ }
+ err := yaml.NewDecoder(strings.NewReader(src)).Decode(&v)
+ if err == nil {
+ t.Fatalf("expected error")
+ }
+
+ if err.Error() != yaml.FormatError(err, false, true) {
+ t.Logf("err.Error() = %s", err.Error())
+ t.Logf("yaml.FormatError(err, false, true) = %s", yaml.FormatError(err, false, true))
+ t.Fatal(`err.Error() should match yaml.FormatError(err, false, true)`)
+ }
+
+ //TODO: properly check if errors are colored/have source
+ t.Logf("%s", err)
+ t.Logf("%s", yaml.FormatError(err, true, false))
+ t.Logf("%s", yaml.FormatError(err, false, true))
+ t.Logf("%s", yaml.FormatError(err, true, true))
+}
+
+func TestDecoder_JSONTags(t *testing.T) {
+ var v struct {
+ A string `json:"a_json"` // no YAML tag
+ B string `json:"b_json" yaml:"b_yaml"` // both tags
+ }
+
+ const src = `---
+a_json: a_json_value
+b_json: b_json_value
+b_yaml: b_yaml_value
+`
+ if err := yaml.NewDecoder(strings.NewReader(src)).Decode(&v); err != nil {
+ t.Fatalf(`parsing should succeed: %s`, err)
+ }
+
+ if v.A != "a_json_value" {
+ t.Fatalf("v.A should be `a_json_value`, got `%s`", v.A)
+ }
+
+ if v.B != "b_yaml_value" {
+ t.Fatalf("v.B should be `b_yaml_value`, got `%s`", v.B)
+ }
+}
+
+func TestDecoder_DisallowUnknownField(t *testing.T) {
+ t.Run("different level keys with same name", func(t *testing.T) {
+ var v struct {
+ C Child `yaml:"c"`
+ }
+ yml := `---
+b: 1
+c:
+ b: 1
+`
+
+ err := yaml.NewDecoder(strings.NewReader(yml), yaml.DisallowUnknownField()).Decode(&v)
+ if err == nil {
+ t.Fatalf("error expected")
+ }
+ })
+ t.Run("inline", func(t *testing.T) {
+ var v struct {
+ *Child `yaml:",inline"`
+ A string `yaml:"a"`
+ }
+ yml := `---
+a: a
+b: 1
+`
+
+ if err := yaml.NewDecoder(strings.NewReader(yml), yaml.DisallowUnknownField()).Decode(&v); err != nil {
+ t.Fatalf(`parsing should succeed: %s`, err)
+ }
+ if v.A != "a" {
+ t.Fatalf("v.A should be `a`, got `%s`", v.A)
+ }
+ if v.B != 1 {
+ t.Fatalf("v.B should be 1, got %d", v.B)
+ }
+ if v.C != 0 {
+ t.Fatalf("v.C should be 0, got %d", v.C)
+ }
+ })
+ t.Run("list", func(t *testing.T) {
+ type C struct {
+ Child `yaml:",inline"`
+ }
+
+ var v struct {
+ Children []C `yaml:"children"`
+ }
+
+ yml := `---
+children:
+- b: 1
+- b: 2
+`
+
+ if err := yaml.NewDecoder(strings.NewReader(yml), yaml.DisallowUnknownField()).Decode(&v); err != nil {
+ t.Fatalf(`parsing should succeed: %s`, err)
+ }
+
+ if len(v.Children) != 2 {
+ t.Fatalf(`len(v.Children) should be 2, got %d`, len(v.Children))
+ }
+
+ if v.Children[0].B != 1 {
+ t.Fatalf(`v.Children[0].B should be 1, got %d`, v.Children[0].B)
+ }
+
+ if v.Children[1].B != 2 {
+ t.Fatalf(`v.Children[1].B should be 2, got %d`, v.Children[1].B)
+ }
+ })
+}
+
+func TestDecoder_DisallowDuplicateKey(t *testing.T) {
+ yml := `
+a: b
+a: c
+`
+ expected := `
+[3:1] duplicate key "a"
+ 2 | a: b
+> 3 | a: c
+ ^
+`
+ t.Run("map", func(t *testing.T) {
+ var v map[string]string
+ err := yaml.NewDecoder(strings.NewReader(yml), yaml.DisallowDuplicateKey()).Decode(&v)
+ if err == nil {
+ t.Fatal("decoding should fail")
+ }
+ actual := "\n" + err.Error()
+ if expected != actual {
+ t.Fatalf("expected:[%s] actual:[%s]", expected, actual)
+ }
+ })
+ t.Run("struct", func(t *testing.T) {
+ var v struct {
+ A string
+ }
+ err := yaml.NewDecoder(strings.NewReader(yml), yaml.DisallowDuplicateKey()).Decode(&v)
+ if err == nil {
+ t.Fatal("decoding should fail")
+ }
+ actual := "\n" + err.Error()
+ if expected != actual {
+ t.Fatalf("expected:[%s] actual:[%s]", expected, actual)
+ }
+ })
+}
+
+func TestDecoder_DefaultValues(t *testing.T) {
+ v := struct {
+ A string `yaml:"a"`
+ B string `yaml:"b"`
+ c string // private
+ D struct {
+ E string `yaml:"e"`
+ F struct {
+ G string `yaml:"g"`
+ } `yaml:"f"`
+ H struct {
+ I string `yaml:"i"`
+ } `yaml:",inline"`
+ } `yaml:"d"`
+ J struct {
+ K string `yaml:"k"`
+ L struct {
+ M string `yaml:"m"`
+ } `yaml:"l"`
+ N struct {
+ O string `yaml:"o"`
+ } `yaml:",inline"`
+ } `yaml:",inline"`
+ P struct {
+ Q string `yaml:"q"`
+ R struct {
+ S string `yaml:"s"`
+ } `yaml:"r"`
+ T struct {
+ U string `yaml:"u"`
+ } `yaml:",inline"`
+ } `yaml:"p"`
+ V struct {
+ W string `yaml:"w"`
+ X struct {
+ Y string `yaml:"y"`
+ } `yaml:"x"`
+ Z struct {
+ Ä string `yaml:"ä"`
+ } `yaml:",inline"`
+ } `yaml:",inline"`
+ }{
+ B: "defaultBValue",
+ c: "defaultCValue",
+ }
+
+ v.D.E = "defaultEValue"
+ v.D.F.G = "defaultGValue"
+ v.D.H.I = "defaultIValue"
+ v.J.K = "defaultKValue"
+ v.J.L.M = "defaultMValue"
+ v.J.N.O = "defaultOValue"
+ v.P.R.S = "defaultSValue"
+ v.P.T.U = "defaultUValue"
+ v.V.X.Y = "defaultYValue"
+ v.V.Z.Ä = "defaultÄValue"
+
+ const src = `---
+a: a_value
+p:
+ q: q_value
+w: w_value
+`
+ if err := yaml.NewDecoder(strings.NewReader(src)).Decode(&v); err != nil {
+ t.Fatalf(`parsing should succeed: %s`, err)
+ }
+ if v.A != "a_value" {
+ t.Fatalf("v.A should be `a_value`, got `%s`", v.A)
+ }
+
+ if v.B != "defaultBValue" {
+ t.Fatalf("v.B should be `defaultValue`, got `%s`", v.B)
+ }
+
+ if v.c != "defaultCValue" {
+ t.Fatalf("v.c should be `defaultCValue`, got `%s`", v.c)
+ }
+
+ if v.D.E != "defaultEValue" {
+ t.Fatalf("v.D.E should be `defaultEValue`, got `%s`", v.D.E)
+ }
+
+ if v.D.F.G != "defaultGValue" {
+ t.Fatalf("v.D.F.G should be `defaultGValue`, got `%s`", v.D.F.G)
+ }
+
+ if v.D.H.I != "defaultIValue" {
+ t.Fatalf("v.D.H.I should be `defaultIValue`, got `%s`", v.D.H.I)
+ }
+
+ if v.J.K != "defaultKValue" {
+ t.Fatalf("v.J.K should be `defaultKValue`, got `%s`", v.J.K)
+ }
+
+ if v.J.L.M != "defaultMValue" {
+ t.Fatalf("v.J.L.M should be `defaultMValue`, got `%s`", v.J.L.M)
+ }
+
+ if v.J.N.O != "defaultOValue" {
+ t.Fatalf("v.J.N.O should be `defaultOValue`, got `%s`", v.J.N.O)
+ }
+
+ if v.P.Q != "q_value" {
+ t.Fatalf("v.P.Q should be `q_value`, got `%s`", v.P.Q)
+ }
+
+ if v.P.R.S != "defaultSValue" {
+ t.Fatalf("v.P.R.S should be `defaultSValue`, got `%s`", v.P.R.S)
+ }
+
+ if v.P.T.U != "defaultUValue" {
+ t.Fatalf("v.P.T.U should be `defaultUValue`, got `%s`", v.P.T.U)
+ }
+
+ if v.V.W != "w_value" {
+ t.Fatalf("v.V.W should be `w_value`, got `%s`", v.V.W)
+ }
+
+ if v.V.X.Y != "defaultYValue" {
+ t.Fatalf("v.V.X.Y should be `defaultYValue`, got `%s`", v.V.X.Y)
+ }
+
+ if v.V.Z.Ä != "defaultÄValue" {
+ t.Fatalf("v.V.Z.Ä should be `defaultÄValue`, got `%s`", v.V.Z.Ä)
+ }
+}
+
+func Example_YAMLTags() {
+ yml := `---
+foo: 1
+bar: c
+A: 2
+B: d
+`
+ var v struct {
+ A int `yaml:"foo" json:"A"`
+ B string `yaml:"bar" json:"B"`
+ }
+ if err := yaml.Unmarshal([]byte(yml), &v); err != nil {
+ log.Fatal(err)
+ }
+ fmt.Println(v.A)
+ fmt.Println(v.B)
+ // OUTPUT:
+ // 1
+ // c
+}
+
+type useJSONUnmarshalerTest struct {
+ s string
+}
+
+func (t *useJSONUnmarshalerTest) UnmarshalJSON(b []byte) error {
+ s, err := strconv.Unquote(string(b))
+ if err != nil {
+ return err
+ }
+ t.s = s
+ return nil
+}
+
+func TestDecoder_UseJSONUnmarshaler(t *testing.T) {
+ var v useJSONUnmarshalerTest
+ if err := yaml.UnmarshalWithOptions([]byte(`"a"`), &v, yaml.UseJSONUnmarshaler()); err != nil {
+ t.Fatal(err)
+ }
+ if v.s != "a" {
+ t.Fatalf("unexpected decoded value: %s", v.s)
+ }
+}
+
+func TestDecoder_CustomUnmarshaler(t *testing.T) {
+ t.Run("override struct type", func(t *testing.T) {
+ type T struct {
+ Foo string `yaml:"foo"`
+ }
+ src := []byte(`foo: "bar"`)
+ var v T
+ if err := yaml.UnmarshalWithOptions(src, &v, yaml.CustomUnmarshaler[T](func(dst *T, b []byte) error {
+ if !bytes.Equal(src, b) {
+ t.Fatalf("failed to get decode target buffer. expected %q but got %q", src, b)
+ }
+ var v T
+ if err := yaml.Unmarshal(b, &v); err != nil {
+ return err
+ }
+ if v.Foo != "bar" {
+ t.Fatal("failed to decode")
+ }
+ dst.Foo = "bazbaz" // assign another value to target
+ return nil
+ })); err != nil {
+ t.Fatal(err)
+ }
+ if v.Foo != "bazbaz" {
+ t.Fatalf("failed to switch to custom unmarshaler. got: %v", v.Foo)
+ }
+ })
+ t.Run("override bytes type", func(t *testing.T) {
+ type T struct {
+ Foo []byte `yaml:"foo"`
+ }
+ src := []byte(`foo: "bar"`)
+ var v T
+ if err := yaml.UnmarshalWithOptions(src, &v, yaml.CustomUnmarshaler[[]byte](func(dst *[]byte, b []byte) error {
+ if !bytes.Equal(b, []byte(`"bar"`)) {
+ t.Fatalf("failed to get target buffer: %q", b)
+ }
+ *dst = []byte("bazbaz")
+ return nil
+ })); err != nil {
+ t.Fatal(err)
+ }
+ if !bytes.Equal(v.Foo, []byte("bazbaz")) {
+ t.Fatalf("failed to switch to custom unmarshaler. got: %q", v.Foo)
+ }
+ })
+}
+
+type unmarshalContext struct {
+ v int
+}
+
+func (c *unmarshalContext) UnmarshalYAML(ctx context.Context, b []byte) error {
+ v, ok := ctx.Value("k").(int)
+ if !ok {
+ return fmt.Errorf("cannot get valid context")
+ }
+ if v != 1 {
+ return fmt.Errorf("cannot get valid context")
+ }
+ if string(b) != "1" {
+ return fmt.Errorf("cannot get valid bytes")
+ }
+ c.v = v
+ return nil
+}
+
+func Test_UnmarshalerContext(t *testing.T) {
+ ctx := context.WithValue(context.Background(), "k", 1)
+ var v unmarshalContext
+ if err := yaml.UnmarshalContext(ctx, []byte(`1`), &v); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ if v.v != 1 {
+ t.Fatal("cannot call UnmarshalYAML")
+ }
+}
+
+func TestDecoder_DecodeFromNode(t *testing.T) {
+ t.Run("has reference", func(t *testing.T) {
+ str := `
+anchor: &map
+ text: hello
+map: *map`
+ var buf bytes.Buffer
+ dec := yaml.NewDecoder(&buf)
+ f, err := parser.ParseBytes([]byte(str), 0)
+ if err != nil {
+ t.Fatalf("failed to parse: %s", err)
+ }
+ type T struct {
+ Map map[string]string
+ }
+ var v T
+ if err := dec.DecodeFromNode(f.Docs[0].Body, &v); err != nil {
+ t.Fatalf("failed to decode: %s", err)
+ }
+ actual := fmt.Sprintf("%+v", v)
+ expect := fmt.Sprintf("%+v", T{map[string]string{"text": "hello"}})
+ if actual != expect {
+ t.Fatalf("actual=[%s], expect=[%s]", actual, expect)
+ }
+ })
+ t.Run("with reference option", func(t *testing.T) {
+ anchor := strings.NewReader(`
+map: &map
+ text: hello`)
+ var buf bytes.Buffer
+ dec := yaml.NewDecoder(&buf, yaml.ReferenceReaders(anchor))
+ f, err := parser.ParseBytes([]byte("map: *map"), 0)
+ if err != nil {
+ t.Fatalf("failed to parse: %s", err)
+ }
+ type T struct {
+ Map map[string]string
+ }
+ var v T
+ if err := dec.DecodeFromNode(f.Docs[0].Body, &v); err != nil {
+ t.Fatalf("failed to decode: %s", err)
+ }
+ actual := fmt.Sprintf("%+v", v)
+ expect := fmt.Sprintf("%+v", T{map[string]string{"text": "hello"}})
+ if actual != expect {
+ t.Fatalf("actual=[%s], expect=[%s]", actual, expect)
+ }
+ })
+ t.Run("value is not pointer", func(t *testing.T) {
+ var buf bytes.Buffer
+ var v bool
+ err := yaml.NewDecoder(&buf).DecodeFromNode(nil, v)
+ if !xerrors.Is(err, errors.ErrDecodeRequiredPointerType) {
+ t.Fatalf("unexpected error: %s", err)
+ }
+ })
+}
+
+func Example_JSONTags() {
+ yml := `---
+foo: 1
+bar: c
+`
+ var v struct {
+ A int `json:"foo"`
+ B string `json:"bar"`
+ }
+ if err := yaml.Unmarshal([]byte(yml), &v); err != nil {
+ log.Fatal(err)
+ }
+ fmt.Println(v.A)
+ fmt.Println(v.B)
+ // OUTPUT:
+ // 1
+ // c
+}
+
+func Example_DisallowUnknownField() {
+ var v struct {
+ A string `yaml:"simple"`
+ C string `yaml:"complicated"`
+ }
+
+ const src = `---
+simple: string
+complecated: string
+`
+ err := yaml.NewDecoder(strings.NewReader(src), yaml.DisallowUnknownField()).Decode(&v)
+ fmt.Printf("%v\n", err)
+
+ // OUTPUT:
+ // [3:1] unknown field "complecated"
+ // 1 | ---
+ // 2 | simple: string
+ // > 3 | complecated: string
+ // ^
+}
+
+func Example_Unmarshal_Node() {
+ f, err := parser.ParseBytes([]byte("text: node example"), 0)
+ if err != nil {
+ panic(err)
+ }
+ var v struct {
+ Text string `yaml:"text"`
+ }
+ if err := yaml.NodeToValue(f.Docs[0].Body, &v); err != nil {
+ panic(err)
+ }
+ fmt.Println(v.Text)
+ // OUTPUT:
+ // node example
+}
+
+type unmarshalableYAMLStringValue string
+
+func (v *unmarshalableYAMLStringValue) UnmarshalYAML(b []byte) error {
+ var s string
+ if err := yaml.Unmarshal(b, &s); err != nil {
+ return err
+ }
+ *v = unmarshalableYAMLStringValue(s)
+ return nil
+}
+
+type unmarshalableTextStringValue string
+
+func (v *unmarshalableTextStringValue) UnmarshalText(b []byte) error {
+ *v = unmarshalableTextStringValue(string(b))
+ return nil
+}
+
+type unmarshalableStringContainer struct {
+ A unmarshalableYAMLStringValue `yaml:"a"`
+ B unmarshalableTextStringValue `yaml:"b"`
+}
+
+func TestUnmarshalableString(t *testing.T) {
+ t.Run("empty string", func(t *testing.T) {
+ t.Parallel()
+ yml := `
+a: ""
+b: ""
+`
+ var container unmarshalableStringContainer
+ if err := yaml.Unmarshal([]byte(yml), &container); err != nil {
+ t.Fatalf("failed to unmarshal %v", err)
+ }
+ if container.A != "" {
+ t.Fatalf("expected empty string, but %q is set", container.A)
+ }
+ if container.B != "" {
+ t.Fatalf("expected empty string, but %q is set", container.B)
+ }
+ })
+ t.Run("filled string", func(t *testing.T) {
+ t.Parallel()
+ yml := `
+a: "aaa"
+b: "bbb"
+`
+ var container unmarshalableStringContainer
+ if err := yaml.Unmarshal([]byte(yml), &container); err != nil {
+ t.Fatalf("failed to unmarshal %v", err)
+ }
+ if container.A != "aaa" {
+ t.Fatalf("expected \"aaa\", but %q is set", container.A)
+ }
+ if container.B != "bbb" {
+ t.Fatalf("expected \"bbb\", but %q is set", container.B)
+ }
+ })
+ t.Run("single-quoted string", func(t *testing.T) {
+ t.Parallel()
+ yml := `
+a: 'aaa'
+b: 'bbb'
+`
+ var container unmarshalableStringContainer
+ if err := yaml.Unmarshal([]byte(yml), &container); err != nil {
+ t.Fatalf("failed to unmarshal %v", err)
+ }
+ if container.A != "aaa" {
+ t.Fatalf("expected \"aaa\", but %q is set", container.A)
+ }
+ if container.B != "bbb" {
+ t.Fatalf("expected \"aaa\", but %q is set", container.B)
+ }
+ })
+ t.Run("literal", func(t *testing.T) {
+ t.Parallel()
+ yml := `
+a: |
+ a
+ b
+ c
+b: |
+ a
+ b
+ c
+`
+ var container unmarshalableStringContainer
+ if err := yaml.Unmarshal([]byte(yml), &container); err != nil {
+ t.Fatalf("failed to unmarshal %v", err)
+ }
+ if container.A != "a\nb\nc\n" {
+ t.Fatalf("expected \"a\nb\nc\n\", but %q is set", container.A)
+ }
+ if container.B != "a\nb\nc\n" {
+ t.Fatalf("expected \"a\nb\nc\n\", but %q is set", container.B)
+ }
+ })
+ t.Run("anchor/alias", func(t *testing.T) {
+ yml := `
+a: &x 1
+b: *x
+c: &y hello
+d: *y
+`
+ var v struct {
+ A, B, C, D unmarshalableTextStringValue
+ }
+ if err := yaml.Unmarshal([]byte(yml), &v); err != nil {
+ t.Fatal(err)
+ }
+ if v.A != "1" {
+ t.Fatal("failed to unmarshal")
+ }
+ if v.B != "1" {
+ t.Fatal("failed to unmarshal")
+ }
+ if v.C != "hello" {
+ t.Fatal("failed to unmarshal")
+ }
+ if v.D != "hello" {
+ t.Fatal("failed to unmarshal")
+ }
+ })
+ t.Run("net.IP", func(t *testing.T) {
+ yml := `
+a: &a 127.0.0.1
+b: *a
+`
+ var v struct {
+ A, B net.IP
+ }
+ if err := yaml.Unmarshal([]byte(yml), &v); err != nil {
+ t.Fatal(err)
+ }
+ if v.A.String() != net.IPv4(127, 0, 0, 1).String() {
+ t.Fatal("failed to unmarshal")
+ }
+ if v.B.String() != net.IPv4(127, 0, 0, 1).String() {
+ t.Fatal("failed to unmarshal")
+ }
+ })
+ t.Run("quoted map keys", func(t *testing.T) {
+ t.Parallel()
+ yml := `
+a:
+ "b" : 2
+ 'c': true
+`
+ var v struct {
+ A struct {
+ B int
+ C bool
+ }
+ }
+ if err := yaml.Unmarshal([]byte(yml), &v); err != nil {
+ t.Fatalf("failed to unmarshal %v", err)
+ }
+ if v.A.B != 2 {
+ t.Fatalf("expected a.b to equal 2 but was %d", v.A.B)
+ }
+ if !v.A.C {
+ t.Fatal("expected a.c to be true but was false")
+ }
+ })
+}
+
+type unmarshalablePtrStringContainer struct {
+ V *string `yaml:"value"`
+}
+
+func TestUnmarshalablePtrString(t *testing.T) {
+ t.Run("empty string", func(t *testing.T) {
+ t.Parallel()
+ var container unmarshalablePtrStringContainer
+ if err := yaml.Unmarshal([]byte(`value: ""`), &container); err != nil {
+ t.Fatalf("failed to unmarshal %v", err)
+ }
+ if container.V == nil || *container.V != "" {
+ t.Fatalf("expected empty string, but %q is set", *container.V)
+ }
+ })
+
+ t.Run("null", func(t *testing.T) {
+ t.Parallel()
+ var container unmarshalablePtrStringContainer
+ if err := yaml.Unmarshal([]byte(`value: null`), &container); err != nil {
+ t.Fatalf("failed to unmarshal %v", err)
+ }
+ if container.V != (*string)(nil) {
+ t.Fatalf("expected nil, but %q is set", *container.V)
+ }
+ })
+}
+
+type unmarshalableIntValue int
+
+func (v *unmarshalableIntValue) UnmarshalYAML(raw []byte) error {
+ i, err := strconv.Atoi(string(raw))
+ if err != nil {
+ return err
+ }
+ *v = unmarshalableIntValue(i)
+ return nil
+}
+
+type unmarshalableIntContainer struct {
+ V unmarshalableIntValue `yaml:"value"`
+}
+
+func TestUnmarshalableInt(t *testing.T) {
+ t.Run("empty int", func(t *testing.T) {
+ t.Parallel()
+ var container unmarshalableIntContainer
+ if err := yaml.Unmarshal([]byte(``), &container); err != nil {
+ t.Fatalf("failed to unmarshal %v", err)
+ }
+ if container.V != 0 {
+ t.Fatalf("expected empty int, but %d is set", container.V)
+ }
+ })
+ t.Run("filled int", func(t *testing.T) {
+ t.Parallel()
+ var container unmarshalableIntContainer
+ if err := yaml.Unmarshal([]byte(`value: 9`), &container); err != nil {
+ t.Fatalf("failed to unmarshal %v", err)
+ }
+ if container.V != 9 {
+ t.Fatalf("expected 9, but %d is set", container.V)
+ }
+ })
+ t.Run("filled number", func(t *testing.T) {
+ t.Parallel()
+ var container unmarshalableIntContainer
+ if err := yaml.Unmarshal([]byte(`value: 9`), &container); err != nil {
+ t.Fatalf("failed to unmarshal %v", err)
+ }
+ if container.V != 9 {
+ t.Fatalf("expected 9, but %d is set", container.V)
+ }
+ })
+}
+
+type unmarshalablePtrIntContainer struct {
+ V *int `yaml:"value"`
+}
+
+func TestUnmarshalablePtrInt(t *testing.T) {
+ t.Run("empty int", func(t *testing.T) {
+ t.Parallel()
+ var container unmarshalablePtrIntContainer
+ if err := yaml.Unmarshal([]byte(`value: 0`), &container); err != nil {
+ t.Fatalf("failed to unmarshal %v", err)
+ }
+ if container.V == nil || *container.V != 0 {
+ t.Fatalf("expected 0, but %q is set", *container.V)
+ }
+ })
+
+ t.Run("null", func(t *testing.T) {
+ t.Parallel()
+ var container unmarshalablePtrIntContainer
+ if err := yaml.Unmarshal([]byte(`value: null`), &container); err != nil {
+ t.Fatalf("failed to unmarshal %v", err)
+ }
+ if container.V != (*int)(nil) {
+ t.Fatalf("expected nil, but %q is set", *container.V)
+ }
+ })
+}
+
+type literalContainer struct {
+ v string
+}
+
+func (c *literalContainer) UnmarshalYAML(v []byte) error {
+ var lit string
+ if err := yaml.Unmarshal(v, &lit); err != nil {
+ return err
+ }
+ c.v = lit
+ return nil
+}
+
+func TestDecode_Literal(t *testing.T) {
+ yml := `---
+value: |
+ {
+ "key": "value"
+ }
+`
+ var v map[string]*literalContainer
+ if err := yaml.Unmarshal([]byte(yml), &v); err != nil {
+ t.Fatalf("failed to unmarshal %+v", err)
+ }
+ if v["value"] == nil {
+ t.Fatal("failed to unmarshal literal with bytes unmarshaler")
+ }
+ if v["value"].v == "" {
+ t.Fatal("failed to unmarshal literal with bytes unmarshaler")
+ }
+}
+
+func TestDecoder_UseOrderedMap(t *testing.T) {
+ yml := `
+a: b
+c: d
+e:
+ f: g
+ h: i
+j: k
+`
+ var v interface{}
+ if err := yaml.NewDecoder(strings.NewReader(yml), yaml.UseOrderedMap()).Decode(&v); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ if _, ok := v.(yaml.MapSlice); !ok {
+ t.Fatalf("failed to convert to ordered map: %T", v)
+ }
+ bytes, err := yaml.Marshal(v)
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ if string(yml) != "\n"+string(bytes) {
+ t.Fatalf("expected:[%s] actual:[%s]", string(yml), "\n"+string(bytes))
+ }
+}
+
+func TestDecoder_Stream(t *testing.T) {
+ yml := `
+---
+a: b
+c: d
+---
+e: f
+g: h
+---
+i: j
+k: l
+`
+ dec := yaml.NewDecoder(strings.NewReader(yml))
+ values := []map[string]string{}
+ for {
+ var v map[string]string
+ if err := dec.Decode(&v); err != nil {
+ if err == io.EOF {
+ break
+ }
+ t.Fatalf("%+v", err)
+ }
+ values = append(values, v)
+ }
+ if len(values) != 3 {
+ t.Fatal("failed to stream decoding")
+ }
+ if values[0]["a"] != "b" {
+ t.Fatal("failed to stream decoding")
+ }
+ if values[1]["e"] != "f" {
+ t.Fatal("failed to stream decoding")
+ }
+ if values[2]["i"] != "j" {
+ t.Fatal("failed to stream decoding")
+ }
+}
+
+type unmarshalYAMLWithAliasString string
+
+func (v *unmarshalYAMLWithAliasString) UnmarshalYAML(b []byte) error {
+ var s string
+ if err := yaml.Unmarshal(b, &s); err != nil {
+ return err
+ }
+ *v = unmarshalYAMLWithAliasString(s)
+ return nil
+}
+
+type unmarshalYAMLWithAliasMap map[string]interface{}
+
+func (v *unmarshalYAMLWithAliasMap) UnmarshalYAML(b []byte) error {
+ var m map[string]interface{}
+ if err := yaml.Unmarshal(b, &m); err != nil {
+ return err
+ }
+ *v = unmarshalYAMLWithAliasMap(m)
+ return nil
+}
+
+func TestDecoder_UnmarshalYAMLWithAlias(t *testing.T) {
+ type value struct {
+ String unmarshalYAMLWithAliasString
+ Map unmarshalYAMLWithAliasMap
+ }
+ tests := []struct {
+ name string
+ yaml string
+ expectedValue value
+ err string
+ }{
+ {
+ name: "ok",
+ yaml: `
+anchors:
+ w: &w "\"hello\" \"world\""
+ map: &x
+ a: b
+ c: d
+ d: *w
+string: *w
+map:
+ <<: *x
+ e: f
+`,
+ expectedValue: value{
+ String: unmarshalYAMLWithAliasString(`"hello" "world"`),
+ Map: unmarshalYAMLWithAliasMap(map[string]interface{}{
+ "a": "b",
+ "c": "d",
+ "d": `"hello" "world"`,
+ "e": "f",
+ }),
+ },
+ },
+ {
+ name: "unknown alias",
+ yaml: `
+anchors:
+ w: &w "\"hello\" \"world\""
+ map: &x
+ a: b
+ c: d
+ d: *w
+string: *y
+map:
+ <<: *z
+ e: f
+`,
+ err: "cannot find anchor by alias name y",
+ },
+ }
+
+ for _, test := range tests {
+ t.Run(test.name, func(t *testing.T) {
+ var v value
+ err := yaml.Unmarshal([]byte(test.yaml), &v)
+
+ if test.err != "" {
+ if err == nil {
+ t.Fatal("expected to error")
+ }
+ if !strings.Contains(err.Error(), test.err) {
+ t.Fatalf("expected error message: %s to contain: %s", err.Error(), test.err)
+ }
+ } else {
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ if !reflect.DeepEqual(test.expectedValue, v) {
+ t.Fatalf("non matching values:\nexpected[%s]\ngot [%s]", test.expectedValue, v)
+ }
+ }
+ })
+ }
+}
+
+type unmarshalString string
+
+func (u *unmarshalString) UnmarshalYAML(b []byte) error {
+ *u = unmarshalString(string(b))
+ return nil
+}
+
+type unmarshalList struct {
+ v []map[string]unmarshalString
+}
+
+func (u *unmarshalList) UnmarshalYAML(b []byte) error {
+ expected := `
+ - b: c
+ d: |
+ hello
+
+ hello
+ f: g
+ - h: i`
+ actual := "\n" + string(b)
+ if expected != actual {
+ return xerrors.Errorf("unexpected bytes: expected [%q] but got [%q]", expected, actual)
+ }
+ var v []map[string]unmarshalString
+ if err := yaml.Unmarshal(b, &v); err != nil {
+ return err
+ }
+ u.v = v
+ return nil
+}
+
+func TestDecoder_UnmarshalBytesWithSeparatedList(t *testing.T) {
+ yml := `
+a:
+ - b: c
+ d: |
+ hello
+
+ hello
+ f: g
+ - h: i
+`
+ var v struct {
+ A unmarshalList
+ }
+ if err := yaml.Unmarshal([]byte(yml), &v); err != nil {
+ t.Fatal(err)
+ }
+ if len(v.A.v) != 2 {
+ t.Fatalf("failed to unmarshal %+v", v)
+ }
+ if len(v.A.v[0]) != 3 {
+ t.Fatalf("failed to unmarshal %+v", v.A.v[0])
+ }
+ if len(v.A.v[1]) != 1 {
+ t.Fatalf("failed to unmarshal %+v", v.A.v[1])
+ }
+}
+
+func TestDecoder_LiteralWithNewLine(t *testing.T) {
+ type A struct {
+ Node string `yaml:"b"`
+ LastNode string `yaml:"last"`
+ }
+ tests := []A{
+ A{
+ Node: "hello\nworld",
+ },
+ A{
+ Node: "hello\nworld\n",
+ },
+ A{
+ Node: "hello\nworld\n\n",
+ },
+ A{
+ LastNode: "hello\nworld",
+ },
+ A{
+ LastNode: "hello\nworld\n",
+ },
+ A{
+ LastNode: "hello\nworld\n\n",
+ },
+ }
+ // struct(want) -> Marshal -> Unmarchal -> struct(got)
+ for _, want := range tests {
+ bytes, _ := yaml.Marshal(want)
+ got := A{}
+ if err := yaml.Unmarshal(bytes, &got); err != nil {
+ t.Fatal(err)
+ }
+ if want.Node != got.Node {
+ t.Fatalf("expected:%q but got %q", want.Node, got.Node)
+ }
+ if want.LastNode != got.LastNode {
+ t.Fatalf("expected:%q but got %q", want.LastNode, got.LastNode)
+ }
+ }
+}
+
+func TestDecoder_TabCharacterAtRight(t *testing.T) {
+ yml := `
+- a: [2 , 2]
+ b: [2 , 2]
+ c: [2 , 2]`
+ var v []map[string][]int
+ if err := yaml.Unmarshal([]byte(yml), &v); err != nil {
+ t.Fatal(err)
+ }
+ if len(v) != 1 {
+ t.Fatalf("failed to unmarshal %+v", v)
+ }
+ if len(v[0]) != 3 {
+ t.Fatalf("failed to unmarshal %+v", v)
+ }
+}
+
+func TestDecoder_Canonical(t *testing.T) {
+ yml := `
+!!map {
+ ? !!str "explicit":!!str "entry",
+ ? !!str "implicit" : !!str "entry",
+ ? !!null "" : !!null "",
+}
+`
+ var v interface{}
+ if err := yaml.Unmarshal([]byte(yml), &v); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ m, ok := v.(map[string]interface{})
+ if !ok {
+ t.Fatalf("failed to decode canonical yaml: %+v", v)
+ }
+ if m["explicit"] != "entry" {
+ t.Fatalf("failed to decode canonical yaml: %+v", m)
+ }
+ if m["implicit"] != "entry" {
+ t.Fatalf("failed to decode canonical yaml: %+v", m)
+ }
+ if m["null"] != nil {
+ t.Fatalf("failed to decode canonical yaml: %+v", m)
+ }
+}
+
+func TestDecoder_DecodeFromFile(t *testing.T) {
+ yml := `
+a: b
+c: d
+`
+ file, err := parser.ParseBytes([]byte(yml), 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+ var v map[string]string
+ if err := yaml.NewDecoder(file).Decode(&v); err != nil {
+ t.Fatal(err)
+ }
+ if len(v) != 2 {
+ t.Fatal("failed to decode from ast.File")
+ }
+ if v["a"] != "b" {
+ t.Fatal("failed to decode from ast.File")
+ }
+ if v["c"] != "d" {
+ t.Fatal("failed to decode from ast.File")
+ }
+}
+
+func TestDecoder_DecodeWithNode(t *testing.T) {
+ t.Run("abstract node", func(t *testing.T) {
+ type T struct {
+ Text ast.Node `yaml:"text"`
+ }
+ var v T
+ if err := yaml.Unmarshal([]byte(`text: hello`), &v); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ expected := "hello"
+ got := v.Text.String()
+ if expected != got {
+ t.Fatalf("failed to decode to ast.Node: expected %s but got %s", expected, got)
+ }
+ })
+ t.Run("concrete node", func(t *testing.T) {
+ type T struct {
+ Text *ast.StringNode `yaml:"text"`
+ }
+ var v T
+ if err := yaml.Unmarshal([]byte(`text: hello`), &v); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ expected := "hello"
+ got := v.Text.String()
+ if expected != got {
+ t.Fatalf("failed to decode to ast.Node: expected %s but got %s", expected, got)
+ }
+ })
+}
+
+func TestRoundtripAnchorAlias(t *testing.T) {
+ t.Run("irreversible", func(t *testing.T) {
+ type foo struct {
+ K1 string
+ K2 string
+ }
+
+ type bar struct {
+ K1 string
+ K3 string
+ }
+
+ type doc struct {
+ Foo foo
+ Bar bar
+ }
+ yml := `
+foo:
+ <<: &test-anchor
+ k1: "One"
+ k2: "Two"
+
+bar:
+ <<: *test-anchor
+ k3: "Three"
+`
+ var v doc
+ if err := yaml.Unmarshal([]byte(yml), &v); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ bytes, err := yaml.Marshal(v)
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ expected := `
+foo:
+ k1: One
+ k2: Two
+bar:
+ k1: One
+ k3: Three
+`
+ got := "\n" + string(bytes)
+ if expected != got {
+ t.Fatalf("expected:[%s] but got [%s]", expected, got)
+ }
+ })
+ t.Run("reversible", func(t *testing.T) {
+ type TestAnchor struct {
+ K1 string
+ }
+ type foo struct {
+ *TestAnchor `yaml:",inline,alias"`
+ K2 string
+ }
+ type bar struct {
+ *TestAnchor `yaml:",inline,alias"`
+ K3 string
+ }
+ type doc struct {
+ TestAnchor *TestAnchor `yaml:"test-anchor,anchor"`
+ Foo foo
+ Bar bar
+ }
+ yml := `
+test-anchor: &test-anchor
+ k1: One
+foo:
+ <<: *test-anchor
+ k2: Two
+bar:
+ <<: *test-anchor
+ k3: Three
+`
+ var v doc
+ if err := yaml.Unmarshal([]byte(yml), &v); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ bytes, err := yaml.Marshal(v)
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ got := "\n" + string(bytes)
+ if yml != got {
+ t.Fatalf("expected:[%s] but got [%s]", yml, got)
+ }
+ })
+}
+
+func TestUnmarshalMapSliceParallel(t *testing.T) {
+ content := `
+steps:
+ req0:
+ desc: Get /users/1
+ req:
+ /users/1:
+ get: nil
+ test: |
+ current.res.status == 200
+ req1:
+ desc: Get /private
+ req:
+ /private:
+ get: nil
+ test: |
+ current.res.status == 403
+ req2:
+ desc: Get /users
+ req:
+ /users:
+ get: nil
+ test: |
+ current.res.status == 200
+`
+ type mappedSteps struct {
+ Steps yaml.MapSlice `yaml:"steps,omitempty"`
+ }
+ for i := 0; i < 100; i++ {
+ t.Run(fmt.Sprintf("i=%d", i), func(t *testing.T) {
+ t.Parallel()
+ for i := 0; i < 10; i++ {
+ m := mappedSteps{
+ Steps: yaml.MapSlice{},
+ }
+ if err := yaml.Unmarshal([]byte(content), &m); err != nil {
+ t.Fatal(err)
+ }
+ for _, s := range m.Steps {
+ _, ok := s.Value.(map[string]interface{})
+ if !ok {
+ t.Fatal("unexpected error")
+ }
+ }
+ }
+ })
+ }
+}
+
+func TestSameNameInineStruct(t *testing.T) {
+ type X struct {
+ X float64 `yaml:"x"`
+ }
+
+ type T struct {
+ X `yaml:",inline"`
+ }
+
+ var v T
+ if err := yaml.Unmarshal([]byte(`x: 0.7`), &v); err != nil {
+ t.Fatal(err)
+ }
+ if fmt.Sprint(v.X.X) != "0.7" {
+ t.Fatalf("failed to decode")
+ }
+}
diff --git a/tmpmod/github.com/goccy/go-yaml/encode.go b/tmpmod/github.com/goccy/go-yaml/encode.go
new file mode 100644
index 00000000..aa46066b
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/encode.go
@@ -0,0 +1,875 @@
+package yaml
+
+import (
+ "context"
+ "encoding"
+ "fmt"
+ "io"
+ "math"
+ "reflect"
+ "sort"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/ast"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/internal/errors"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/parser"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/printer"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/token"
+ "golang.org/x/xerrors"
+)
+
+const (
+ // DefaultIndentSpaces default number of space for indent
+ DefaultIndentSpaces = 2
+)
+
+// Encoder writes YAML values to an output stream.
+type Encoder struct {
+ writer io.Writer
+ opts []EncodeOption
+ indent int
+ indentSequence bool
+ singleQuote bool
+ isFlowStyle bool
+ isJSONStyle bool
+ useJSONMarshaler bool
+ anchorCallback func(*ast.AnchorNode, interface{}) error
+ anchorPtrToNameMap map[uintptr]string
+ customMarshalerMap map[reflect.Type]func(interface{}) ([]byte, error)
+ useLiteralStyleIfMultiline bool
+ commentMap map[*Path][]*Comment
+ written bool
+
+ line int
+ column int
+ offset int
+ indentNum int
+ indentLevel int
+}
+
+// NewEncoder returns a new encoder that writes to w.
+// The Encoder should be closed after use to flush all data to w.
+func NewEncoder(w io.Writer, opts ...EncodeOption) *Encoder {
+ return &Encoder{
+ writer: w,
+ opts: opts,
+ indent: DefaultIndentSpaces,
+ anchorPtrToNameMap: map[uintptr]string{},
+ customMarshalerMap: map[reflect.Type]func(interface{}) ([]byte, error){},
+ line: 1,
+ column: 1,
+ offset: 0,
+ }
+}
+
+// Close closes the encoder by writing any remaining data.
+// It does not write a stream terminating string "...".
+func (e *Encoder) Close() error {
+ return nil
+}
+
+// Encode writes the YAML encoding of v to the stream.
+// If multiple items are encoded to the stream,
+// the second and subsequent document will be preceded with a "---" document separator,
+// but the first will not.
+//
+// See the documentation for Marshal for details about the conversion of Go values to YAML.
+func (e *Encoder) Encode(v interface{}) error {
+ return e.EncodeContext(context.Background(), v)
+}
+
+// EncodeContext writes the YAML encoding of v to the stream with context.Context.
+func (e *Encoder) EncodeContext(ctx context.Context, v interface{}) error {
+ node, err := e.EncodeToNodeContext(ctx, v)
+ if err != nil {
+ return errors.Wrapf(err, "failed to encode to node")
+ }
+ if err := e.setCommentByCommentMap(node); err != nil {
+ return errors.Wrapf(err, "failed to set comment by comment map")
+ }
+ if !e.written {
+ e.written = true
+ } else {
+ // write document separator
+ e.writer.Write([]byte("---\n"))
+ }
+ var p printer.Printer
+ e.writer.Write(p.PrintNode(node))
+ return nil
+}
+
+// EncodeToNode convert v to ast.Node.
+func (e *Encoder) EncodeToNode(v interface{}) (ast.Node, error) {
+ return e.EncodeToNodeContext(context.Background(), v)
+}
+
+// EncodeToNodeContext convert v to ast.Node with context.Context.
+func (e *Encoder) EncodeToNodeContext(ctx context.Context, v interface{}) (ast.Node, error) {
+ for _, opt := range e.opts {
+ if err := opt(e); err != nil {
+ return nil, errors.Wrapf(err, "failed to run option for encoder")
+ }
+ }
+ node, err := e.encodeValue(ctx, reflect.ValueOf(v), 1)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to encode value")
+ }
+ return node, nil
+}
+
+func (e *Encoder) setCommentByCommentMap(node ast.Node) error {
+ if e.commentMap == nil {
+ return nil
+ }
+ for path, comments := range e.commentMap {
+ n, err := path.FilterNode(node)
+ if err != nil {
+ return errors.Wrapf(err, "failed to filter node")
+ }
+ if n == nil {
+ continue
+ }
+ for _, comment := range comments {
+ commentTokens := []*token.Token{}
+ for _, text := range comment.Texts {
+ commentTokens = append(commentTokens, token.New(text, text, nil))
+ }
+ commentGroup := ast.CommentGroup(commentTokens)
+ switch comment.Position {
+ case CommentHeadPosition:
+ if err := e.setHeadComment(node, n, commentGroup); err != nil {
+ return errors.Wrapf(err, "failed to set head comment")
+ }
+ case CommentLinePosition:
+ if err := e.setLineComment(node, n, commentGroup); err != nil {
+ return errors.Wrapf(err, "failed to set line comment")
+ }
+ case CommentFootPosition:
+ if err := e.setFootComment(node, n, commentGroup); err != nil {
+ return errors.Wrapf(err, "failed to set foot comment")
+ }
+ default:
+ return ErrUnknownCommentPositionType
+ }
+ }
+ }
+ return nil
+}
+
+func (e *Encoder) setHeadComment(node ast.Node, filtered ast.Node, comment *ast.CommentGroupNode) error {
+ parent := ast.Parent(node, filtered)
+ if parent == nil {
+ return ErrUnsupportedHeadPositionType(node)
+ }
+ switch p := parent.(type) {
+ case *ast.MappingValueNode:
+ if err := p.SetComment(comment); err != nil {
+ return errors.Wrapf(err, "failed to set comment")
+ }
+ case *ast.MappingNode:
+ if err := p.SetComment(comment); err != nil {
+ return errors.Wrapf(err, "failed to set comment")
+ }
+ case *ast.SequenceNode:
+ if len(p.ValueHeadComments) == 0 {
+ p.ValueHeadComments = make([]*ast.CommentGroupNode, len(p.Values))
+ }
+ var foundIdx int
+ for idx, v := range p.Values {
+ if v == filtered {
+ foundIdx = idx
+ break
+ }
+ }
+ p.ValueHeadComments[foundIdx] = comment
+ default:
+ return ErrUnsupportedHeadPositionType(node)
+ }
+ return nil
+}
+
+func (e *Encoder) setLineComment(node ast.Node, filtered ast.Node, comment *ast.CommentGroupNode) error {
+ switch filtered.(type) {
+ case *ast.MappingValueNode, *ast.SequenceNode:
+ // Line comment cannot be set for mapping value node.
+ // It should probably be set for the parent map node
+ if err := e.setLineCommentToParentMapNode(node, filtered, comment); err != nil {
+ return errors.Wrapf(err, "failed to set line comment to parent node")
+ }
+ default:
+ if err := filtered.SetComment(comment); err != nil {
+ return errors.Wrapf(err, "failed to set comment")
+ }
+ }
+ return nil
+}
+
+func (e *Encoder) setLineCommentToParentMapNode(node ast.Node, filtered ast.Node, comment *ast.CommentGroupNode) error {
+ parent := ast.Parent(node, filtered)
+ if parent == nil {
+ return ErrUnsupportedLinePositionType(node)
+ }
+ switch p := parent.(type) {
+ case *ast.MappingValueNode:
+ if err := p.Key.SetComment(comment); err != nil {
+ return errors.Wrapf(err, "failed to set comment")
+ }
+ case *ast.MappingNode:
+ if err := p.SetComment(comment); err != nil {
+ return errors.Wrapf(err, "failed to set comment")
+ }
+ default:
+ return ErrUnsupportedLinePositionType(parent)
+ }
+ return nil
+}
+
+func (e *Encoder) setFootComment(node ast.Node, filtered ast.Node, comment *ast.CommentGroupNode) error {
+ parent := ast.Parent(node, filtered)
+ if parent == nil {
+ return ErrUnsupportedFootPositionType(node)
+ }
+ switch n := parent.(type) {
+ case *ast.MappingValueNode:
+ n.FootComment = comment
+ case *ast.MappingNode:
+ n.FootComment = comment
+ case *ast.SequenceNode:
+ n.FootComment = comment
+ default:
+ return ErrUnsupportedFootPositionType(n)
+ }
+ return nil
+}
+
+func (e *Encoder) encodeDocument(doc []byte) (ast.Node, error) {
+ f, err := parser.ParseBytes(doc, 0)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse yaml")
+ }
+ for _, docNode := range f.Docs {
+ if docNode.Body != nil {
+ return docNode.Body, nil
+ }
+ }
+ return nil, nil
+}
+
+func (e *Encoder) isInvalidValue(v reflect.Value) bool {
+ if !v.IsValid() {
+ return true
+ }
+ kind := v.Type().Kind()
+ if kind == reflect.Ptr && v.IsNil() {
+ return true
+ }
+ if kind == reflect.Interface && v.IsNil() {
+ return true
+ }
+ return false
+}
+
+type jsonMarshaler interface {
+ MarshalJSON() ([]byte, error)
+}
+
+func (e *Encoder) existsTypeInCustomMarshalerMap(t reflect.Type) bool {
+ if _, exists := e.customMarshalerMap[t]; exists {
+ return true
+ }
+
+ globalCustomMarshalerMu.Lock()
+ defer globalCustomMarshalerMu.Unlock()
+ if _, exists := globalCustomMarshalerMap[t]; exists {
+ return true
+ }
+ return false
+}
+
+func (e *Encoder) marshalerFromCustomMarshalerMap(t reflect.Type) (func(interface{}) ([]byte, error), bool) {
+ if marshaler, exists := e.customMarshalerMap[t]; exists {
+ return marshaler, exists
+ }
+
+ globalCustomMarshalerMu.Lock()
+ defer globalCustomMarshalerMu.Unlock()
+ if marshaler, exists := globalCustomMarshalerMap[t]; exists {
+ return marshaler, exists
+ }
+ return nil, false
+}
+
+func (e *Encoder) canEncodeByMarshaler(v reflect.Value) bool {
+ if !v.CanInterface() {
+ return false
+ }
+ if e.existsTypeInCustomMarshalerMap(v.Type()) {
+ return true
+ }
+ iface := v.Interface()
+ switch iface.(type) {
+ case BytesMarshalerContext:
+ return true
+ case BytesMarshaler:
+ return true
+ case InterfaceMarshalerContext:
+ return true
+ case InterfaceMarshaler:
+ return true
+ case time.Time:
+ return true
+ case time.Duration:
+ return true
+ case encoding.TextMarshaler:
+ return true
+ case jsonMarshaler:
+ return e.useJSONMarshaler
+ }
+ return false
+}
+
+func (e *Encoder) encodeByMarshaler(ctx context.Context, v reflect.Value, column int) (ast.Node, error) {
+ iface := v.Interface()
+
+ if marshaler, exists := e.marshalerFromCustomMarshalerMap(v.Type()); exists {
+ doc, err := marshaler(iface)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to MarshalYAML")
+ }
+ node, err := e.encodeDocument(doc)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to encode document")
+ }
+ return node, nil
+ }
+
+ if marshaler, ok := iface.(BytesMarshalerContext); ok {
+ doc, err := marshaler.MarshalYAML(ctx)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to MarshalYAML")
+ }
+ node, err := e.encodeDocument(doc)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to encode document")
+ }
+ return node, nil
+ }
+
+ if marshaler, ok := iface.(BytesMarshaler); ok {
+ doc, err := marshaler.MarshalYAML()
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to MarshalYAML")
+ }
+ node, err := e.encodeDocument(doc)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to encode document")
+ }
+ return node, nil
+ }
+
+ if marshaler, ok := iface.(InterfaceMarshalerContext); ok {
+ marshalV, err := marshaler.MarshalYAML(ctx)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to MarshalYAML")
+ }
+ return e.encodeValue(ctx, reflect.ValueOf(marshalV), column)
+ }
+
+ if marshaler, ok := iface.(InterfaceMarshaler); ok {
+ marshalV, err := marshaler.MarshalYAML()
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to MarshalYAML")
+ }
+ return e.encodeValue(ctx, reflect.ValueOf(marshalV), column)
+ }
+
+ if t, ok := iface.(time.Time); ok {
+ return e.encodeTime(t, column), nil
+ }
+
+ if t, ok := iface.(time.Duration); ok {
+ return e.encodeDuration(t, column), nil
+ }
+
+ if marshaler, ok := iface.(encoding.TextMarshaler); ok {
+ doc, err := marshaler.MarshalText()
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to MarshalText")
+ }
+ node, err := e.encodeDocument(doc)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to encode document")
+ }
+ return node, nil
+ }
+
+ if e.useJSONMarshaler {
+ if marshaler, ok := iface.(jsonMarshaler); ok {
+ jsonBytes, err := marshaler.MarshalJSON()
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to MarshalJSON")
+ }
+ doc, err := JSONToYAML(jsonBytes)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to convert json to yaml")
+ }
+ node, err := e.encodeDocument(doc)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to encode document")
+ }
+ return node, nil
+ }
+ }
+
+ return nil, xerrors.Errorf("does not implemented Marshaler")
+}
+
+func (e *Encoder) encodeValue(ctx context.Context, v reflect.Value, column int) (ast.Node, error) {
+ if e.isInvalidValue(v) {
+ return e.encodeNil(), nil
+ }
+ if e.canEncodeByMarshaler(v) {
+ node, err := e.encodeByMarshaler(ctx, v, column)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to encode by marshaler")
+ }
+ return node, nil
+ }
+ switch v.Type().Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return e.encodeInt(v.Int()), nil
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ return e.encodeUint(v.Uint()), nil
+ case reflect.Float32:
+ return e.encodeFloat(v.Float(), 32), nil
+ case reflect.Float64:
+ return e.encodeFloat(v.Float(), 64), nil
+ case reflect.Ptr:
+ anchorName := e.anchorPtrToNameMap[v.Pointer()]
+ if anchorName != "" {
+ aliasName := anchorName
+ alias := ast.Alias(token.New("*", "*", e.pos(column)))
+ alias.Value = ast.String(token.New(aliasName, aliasName, e.pos(column)))
+ return alias, nil
+ }
+ return e.encodeValue(ctx, v.Elem(), column)
+ case reflect.Interface:
+ return e.encodeValue(ctx, v.Elem(), column)
+ case reflect.String:
+ return e.encodeString(v.String(), column), nil
+ case reflect.Bool:
+ return e.encodeBool(v.Bool()), nil
+ case reflect.Slice:
+ if mapSlice, ok := v.Interface().(MapSlice); ok {
+ return e.encodeMapSlice(ctx, mapSlice, column)
+ }
+ return e.encodeSlice(ctx, v)
+ case reflect.Array:
+ return e.encodeArray(ctx, v)
+ case reflect.Struct:
+ if v.CanInterface() {
+ if mapItem, ok := v.Interface().(MapItem); ok {
+ return e.encodeMapItem(ctx, mapItem, column)
+ }
+ if t, ok := v.Interface().(time.Time); ok {
+ return e.encodeTime(t, column), nil
+ }
+ }
+ return e.encodeStruct(ctx, v, column)
+ case reflect.Map:
+ return e.encodeMap(ctx, v, column), nil
+ default:
+ return nil, xerrors.Errorf("unknown value type %s", v.Type().String())
+ }
+}
+
+func (e *Encoder) pos(column int) *token.Position {
+ return &token.Position{
+ Line: e.line,
+ Column: column,
+ Offset: e.offset,
+ IndentNum: e.indentNum,
+ IndentLevel: e.indentLevel,
+ }
+}
+
+func (e *Encoder) encodeNil() *ast.NullNode {
+ value := "null"
+ return ast.Null(token.New(value, value, e.pos(e.column)))
+}
+
+func (e *Encoder) encodeInt(v int64) *ast.IntegerNode {
+ value := fmt.Sprint(v)
+ return ast.Integer(token.New(value, value, e.pos(e.column)))
+}
+
+func (e *Encoder) encodeUint(v uint64) *ast.IntegerNode {
+ value := fmt.Sprint(v)
+ return ast.Integer(token.New(value, value, e.pos(e.column)))
+}
+
+func (e *Encoder) encodeFloat(v float64, bitSize int) ast.Node {
+ if v == math.Inf(0) {
+ value := ".inf"
+ return ast.Infinity(token.New(value, value, e.pos(e.column)))
+ } else if v == math.Inf(-1) {
+ value := "-.inf"
+ return ast.Infinity(token.New(value, value, e.pos(e.column)))
+ } else if math.IsNaN(v) {
+ value := ".nan"
+ return ast.Nan(token.New(value, value, e.pos(e.column)))
+ }
+ value := strconv.FormatFloat(v, 'g', -1, bitSize)
+ if !strings.Contains(value, ".") && !strings.Contains(value, "e") {
+ // append x.0 suffix to keep float value context
+ value = fmt.Sprintf("%s.0", value)
+ }
+ return ast.Float(token.New(value, value, e.pos(e.column)))
+}
+
+func (e *Encoder) isNeedQuoted(v string) bool {
+ if e.isJSONStyle {
+ return true
+ }
+ if e.useLiteralStyleIfMultiline && strings.ContainsAny(v, "\n\r") {
+ return false
+ }
+ if e.isFlowStyle && strings.ContainsAny(v, `]},'"`) {
+ return true
+ }
+ if token.IsNeedQuoted(v) {
+ return true
+ }
+ return false
+}
+
+func (e *Encoder) encodeString(v string, column int) *ast.StringNode {
+ if e.isNeedQuoted(v) {
+ if e.singleQuote {
+ v = quoteWith(v, '\'')
+ } else {
+ v = strconv.Quote(v)
+ }
+ }
+ return ast.String(token.New(v, v, e.pos(column)))
+}
+
+func (e *Encoder) encodeBool(v bool) *ast.BoolNode {
+ value := fmt.Sprint(v)
+ return ast.Bool(token.New(value, value, e.pos(e.column)))
+}
+
+func (e *Encoder) encodeSlice(ctx context.Context, value reflect.Value) (*ast.SequenceNode, error) {
+ if e.indentSequence {
+ e.column += e.indent
+ }
+ column := e.column
+ sequence := ast.Sequence(token.New("-", "-", e.pos(column)), e.isFlowStyle)
+ for i := 0; i < value.Len(); i++ {
+ node, err := e.encodeValue(ctx, value.Index(i), column)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to encode value for slice")
+ }
+ sequence.Values = append(sequence.Values, node)
+ }
+ if e.indentSequence {
+ e.column -= e.indent
+ }
+ return sequence, nil
+}
+
+func (e *Encoder) encodeArray(ctx context.Context, value reflect.Value) (*ast.SequenceNode, error) {
+ if e.indentSequence {
+ e.column += e.indent
+ }
+ column := e.column
+ sequence := ast.Sequence(token.New("-", "-", e.pos(column)), e.isFlowStyle)
+ for i := 0; i < value.Len(); i++ {
+ node, err := e.encodeValue(ctx, value.Index(i), column)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to encode value for array")
+ }
+ sequence.Values = append(sequence.Values, node)
+ }
+ if e.indentSequence {
+ e.column -= e.indent
+ }
+ return sequence, nil
+}
+
+func (e *Encoder) encodeMapItem(ctx context.Context, item MapItem, column int) (*ast.MappingValueNode, error) {
+ k := reflect.ValueOf(item.Key)
+ v := reflect.ValueOf(item.Value)
+ value, err := e.encodeValue(ctx, v, column)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to encode MapItem")
+ }
+ if e.isMapNode(value) {
+ value.AddColumn(e.indent)
+ }
+ return ast.MappingValue(
+ token.New("", "", e.pos(column)),
+ e.encodeString(k.Interface().(string), column),
+ value,
+ ), nil
+}
+
+func (e *Encoder) encodeMapSlice(ctx context.Context, value MapSlice, column int) (*ast.MappingNode, error) {
+ node := ast.Mapping(token.New("", "", e.pos(column)), e.isFlowStyle)
+ for _, item := range value {
+ value, err := e.encodeMapItem(ctx, item, column)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to encode MapItem for MapSlice")
+ }
+ node.Values = append(node.Values, value)
+ }
+ return node, nil
+}
+
+func (e *Encoder) isMapNode(node ast.Node) bool {
+ _, ok := node.(ast.MapNode)
+ return ok
+}
+
+func (e *Encoder) encodeMap(ctx context.Context, value reflect.Value, column int) ast.Node {
+ node := ast.Mapping(token.New("", "", e.pos(column)), e.isFlowStyle)
+ keys := make([]interface{}, len(value.MapKeys()))
+ for i, k := range value.MapKeys() {
+ keys[i] = k.Interface()
+ }
+ sort.Slice(keys, func(i, j int) bool {
+ return fmt.Sprint(keys[i]) < fmt.Sprint(keys[j])
+ })
+ for _, key := range keys {
+ k := reflect.ValueOf(key)
+ v := value.MapIndex(k)
+ value, err := e.encodeValue(ctx, v, column)
+ if err != nil {
+ return nil
+ }
+ if e.isMapNode(value) {
+ value.AddColumn(e.indent)
+ }
+ node.Values = append(node.Values, ast.MappingValue(
+ nil,
+ e.encodeString(fmt.Sprint(key), column),
+ value,
+ ))
+ }
+ return node
+}
+
+// IsZeroer is used to check whether an object is zero to determine
+// whether it should be omitted when marshaling with the omitempty flag.
+// One notable implementation is time.Time.
+type IsZeroer interface {
+ IsZero() bool
+}
+
+func (e *Encoder) isZeroValue(v reflect.Value) bool {
+ kind := v.Kind()
+ if z, ok := v.Interface().(IsZeroer); ok {
+ if (kind == reflect.Ptr || kind == reflect.Interface) && v.IsNil() {
+ return true
+ }
+ return z.IsZero()
+ }
+ switch kind {
+ case reflect.String:
+ return len(v.String()) == 0
+ case reflect.Interface, reflect.Ptr:
+ return v.IsNil()
+ case reflect.Slice:
+ return v.Len() == 0
+ case reflect.Map:
+ return v.Len() == 0
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return v.Int() == 0
+ case reflect.Float32, reflect.Float64:
+ return v.Float() == 0
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ return v.Uint() == 0
+ case reflect.Bool:
+ return !v.Bool()
+ case reflect.Struct:
+ vt := v.Type()
+ for i := v.NumField() - 1; i >= 0; i-- {
+ if vt.Field(i).PkgPath != "" {
+ continue // private field
+ }
+ if !e.isZeroValue(v.Field(i)) {
+ return false
+ }
+ }
+ return true
+ }
+ return false
+}
+
+func (e *Encoder) encodeTime(v time.Time, column int) *ast.StringNode {
+ value := v.Format(time.RFC3339Nano)
+ if e.isJSONStyle {
+ value = strconv.Quote(value)
+ }
+ return ast.String(token.New(value, value, e.pos(column)))
+}
+
+func (e *Encoder) encodeDuration(v time.Duration, column int) *ast.StringNode {
+ value := v.String()
+ if e.isJSONStyle {
+ value = strconv.Quote(value)
+ }
+ return ast.String(token.New(value, value, e.pos(column)))
+}
+
+func (e *Encoder) encodeAnchor(anchorName string, value ast.Node, fieldValue reflect.Value, column int) (*ast.AnchorNode, error) {
+ anchorNode := ast.Anchor(token.New("&", "&", e.pos(column)))
+ anchorNode.Name = ast.String(token.New(anchorName, anchorName, e.pos(column)))
+ anchorNode.Value = value
+ if e.anchorCallback != nil {
+ if err := e.anchorCallback(anchorNode, fieldValue.Interface()); err != nil {
+ return nil, errors.Wrapf(err, "failed to marshal anchor")
+ }
+ if snode, ok := anchorNode.Name.(*ast.StringNode); ok {
+ anchorName = snode.Value
+ }
+ }
+ if fieldValue.Kind() == reflect.Ptr {
+ e.anchorPtrToNameMap[fieldValue.Pointer()] = anchorName
+ }
+ return anchorNode, nil
+}
+
+func (e *Encoder) encodeStruct(ctx context.Context, value reflect.Value, column int) (ast.Node, error) {
+ node := ast.Mapping(token.New("", "", e.pos(column)), e.isFlowStyle)
+ structType := value.Type()
+ structFieldMap, err := structFieldMap(structType)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to get struct field map")
+ }
+ hasInlineAnchorField := false
+ var inlineAnchorValue reflect.Value
+ for i := 0; i < value.NumField(); i++ {
+ field := structType.Field(i)
+ if isIgnoredStructField(field) {
+ continue
+ }
+ fieldValue := value.FieldByName(field.Name)
+ structField := structFieldMap[field.Name]
+ if structField.IsOmitEmpty && e.isZeroValue(fieldValue) {
+ // omit encoding
+ continue
+ }
+ ve := e
+ if !e.isFlowStyle && structField.IsFlow {
+ ve = &Encoder{}
+ *ve = *e
+ ve.isFlowStyle = true
+ }
+ value, err := ve.encodeValue(ctx, fieldValue, column)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to encode value")
+ }
+ if e.isMapNode(value) {
+ value.AddColumn(e.indent)
+ }
+ var key ast.MapKeyNode = e.encodeString(structField.RenderName, column)
+ switch {
+ case structField.AnchorName != "":
+ anchorNode, err := e.encodeAnchor(structField.AnchorName, value, fieldValue, column)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to encode anchor")
+ }
+ value = anchorNode
+ case structField.IsAutoAlias:
+ if fieldValue.Kind() != reflect.Ptr {
+ return nil, xerrors.Errorf(
+ "%s in struct is not pointer type. but required automatically alias detection",
+ structField.FieldName,
+ )
+ }
+ anchorName := e.anchorPtrToNameMap[fieldValue.Pointer()]
+ if anchorName == "" {
+ return nil, xerrors.Errorf(
+ "cannot find anchor name from pointer address for automatically alias detection",
+ )
+ }
+ aliasName := anchorName
+ alias := ast.Alias(token.New("*", "*", e.pos(column)))
+ alias.Value = ast.String(token.New(aliasName, aliasName, e.pos(column)))
+ value = alias
+ if structField.IsInline {
+ // if both used alias and inline, output `<<: *alias`
+ key = ast.MergeKey(token.New("<<", "<<", e.pos(column)))
+ }
+ case structField.AliasName != "":
+ aliasName := structField.AliasName
+ alias := ast.Alias(token.New("*", "*", e.pos(column)))
+ alias.Value = ast.String(token.New(aliasName, aliasName, e.pos(column)))
+ value = alias
+ if structField.IsInline {
+ // if both used alias and inline, output `<<: *alias`
+ key = ast.MergeKey(token.New("<<", "<<", e.pos(column)))
+ }
+ case structField.IsInline:
+ isAutoAnchor := structField.IsAutoAnchor
+ if !hasInlineAnchorField {
+ hasInlineAnchorField = isAutoAnchor
+ }
+ if isAutoAnchor {
+ inlineAnchorValue = fieldValue
+ }
+ mapNode, ok := value.(ast.MapNode)
+ if !ok {
+ // if an inline field is null, skip encoding it
+ if _, ok := value.(*ast.NullNode); ok {
+ continue
+ }
+ return nil, xerrors.Errorf("inline value is must be map or struct type")
+ }
+ mapIter := mapNode.MapRange()
+ for mapIter.Next() {
+ key := mapIter.Key()
+ value := mapIter.Value()
+ keyName := key.GetToken().Value
+ if structFieldMap.isIncludedRenderName(keyName) {
+ // if declared same key name, skip encoding this field
+ continue
+ }
+ key.AddColumn(-e.indent)
+ value.AddColumn(-e.indent)
+ node.Values = append(node.Values, ast.MappingValue(nil, key, value))
+ }
+ continue
+ case structField.IsAutoAnchor:
+ anchorNode, err := e.encodeAnchor(structField.RenderName, value, fieldValue, column)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to encode anchor")
+ }
+ value = anchorNode
+ }
+ node.Values = append(node.Values, ast.MappingValue(nil, key, value))
+ }
+ if hasInlineAnchorField {
+ node.AddColumn(e.indent)
+ anchorName := "anchor"
+ anchorNode := ast.Anchor(token.New("&", "&", e.pos(column)))
+ anchorNode.Name = ast.String(token.New(anchorName, anchorName, e.pos(column)))
+ anchorNode.Value = node
+ if e.anchorCallback != nil {
+ if err := e.anchorCallback(anchorNode, value.Addr().Interface()); err != nil {
+ return nil, errors.Wrapf(err, "failed to marshal anchor")
+ }
+ if snode, ok := anchorNode.Name.(*ast.StringNode); ok {
+ anchorName = snode.Value
+ }
+ }
+ if inlineAnchorValue.Kind() == reflect.Ptr {
+ e.anchorPtrToNameMap[inlineAnchorValue.Pointer()] = anchorName
+ }
+ return anchorNode, nil
+ }
+ return node, nil
+}
diff --git a/tmpmod/github.com/goccy/go-yaml/encode_test.go b/tmpmod/github.com/goccy/go-yaml/encode_test.go
new file mode 100644
index 00000000..5c9dae18
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/encode_test.go
@@ -0,0 +1,1637 @@
+package yaml_test
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "math"
+ "reflect"
+ "strconv"
+ "testing"
+ "time"
+
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/parser"
+
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/ast"
+)
+
+var zero = 0
+var emptyStr = ""
+
+func TestEncoder(t *testing.T) {
+ tests := []struct {
+ source string
+ value interface{}
+ options []yaml.EncodeOption
+ }{
+ {
+ "null\n",
+ (*struct{})(nil),
+ nil,
+ },
+ {
+ "v: hi\n",
+ map[string]string{"v": "hi"},
+ nil,
+ },
+ {
+ "v: \"true\"\n",
+ map[string]string{"v": "true"},
+ nil,
+ },
+ {
+ "v: \"false\"\n",
+ map[string]string{"v": "false"},
+ nil,
+ },
+ {
+ "v: true\n",
+ map[string]interface{}{"v": true},
+ nil,
+ },
+ {
+ "v: false\n",
+ map[string]bool{"v": false},
+ nil,
+ },
+ {
+ "v: 10\n",
+ map[string]int{"v": 10},
+ nil,
+ },
+ {
+ "v: -10\n",
+ map[string]int{"v": -10},
+ nil,
+ },
+ {
+ "v: 4294967296\n",
+ map[string]int64{"v": int64(4294967296)},
+ nil,
+ },
+ {
+ "v: 0.1\n",
+ map[string]interface{}{"v": 0.1},
+ nil,
+ },
+ {
+ "v: 0.99\n",
+ map[string]float32{"v": 0.99},
+ nil,
+ },
+ {
+ "v: 0.123456789\n",
+ map[string]float64{"v": 0.123456789},
+ nil,
+ },
+ {
+ "v: -0.1\n",
+ map[string]float64{"v": -0.1},
+ nil,
+ },
+ {
+ "v: 1.0\n",
+ map[string]float64{"v": 1.0},
+ nil,
+ },
+ {
+ "v: 1e+06\n",
+ map[string]float64{"v": 1000000},
+ nil,
+ },
+ {
+ "v: .inf\n",
+ map[string]interface{}{"v": math.Inf(0)},
+ nil,
+ },
+ {
+ "v: -.inf\n",
+ map[string]interface{}{"v": math.Inf(-1)},
+ nil,
+ },
+ {
+ "v: .nan\n",
+ map[string]interface{}{"v": math.NaN()},
+ nil,
+ },
+ {
+ "v: null\n",
+ map[string]interface{}{"v": nil},
+ nil,
+ },
+ {
+ "v: \"\"\n",
+ map[string]string{"v": ""},
+ nil,
+ },
+ {
+ "v:\n- A\n- B\n",
+ map[string][]string{"v": {"A", "B"}},
+ nil,
+ },
+ {
+ "v:\n - A\n - B\n",
+ map[string][]string{"v": {"A", "B"}},
+ []yaml.EncodeOption{
+ yaml.IndentSequence(true),
+ },
+ },
+ {
+ "v:\n- A\n- B\n",
+ map[string][2]string{"v": {"A", "B"}},
+ nil,
+ },
+ {
+ "v:\n - A\n - B\n",
+ map[string][2]string{"v": {"A", "B"}},
+ []yaml.EncodeOption{
+ yaml.IndentSequence(true),
+ },
+ },
+ {
+ "a: -\n",
+ map[string]string{"a": "-"},
+ nil,
+ },
+ {
+ "123\n",
+ 123,
+ nil,
+ },
+ {
+ "hello: world\n",
+ map[string]string{"hello": "world"},
+ nil,
+ },
+ {
+ "hello: |\n hello\n world\n",
+ map[string]string{"hello": "hello\nworld\n"},
+ nil,
+ },
+ {
+ "hello: |-\n hello\n world\n",
+ map[string]string{"hello": "hello\nworld"},
+ nil,
+ },
+ {
+ "hello: |+\n hello\n world\n\n",
+ map[string]string{"hello": "hello\nworld\n\n"},
+ nil,
+ },
+ {
+ "hello:\n hello: |\n hello\n world\n",
+ map[string]map[string]string{"hello": {"hello": "hello\nworld\n"}},
+ nil,
+ },
+ {
+ "hello: |\r hello\r world\n",
+ map[string]string{"hello": "hello\rworld\r"},
+ nil,
+ },
+ {
+ "hello: |\r\n hello\r\n world\n",
+ map[string]string{"hello": "hello\r\nworld\r\n"},
+ nil,
+ },
+ {
+ "v: |-\n username: hello\n password: hello123\n",
+ map[string]interface{}{"v": "username: hello\npassword: hello123"},
+ []yaml.EncodeOption{
+ yaml.UseLiteralStyleIfMultiline(true),
+ },
+ },
+ {
+ "v: |-\n # comment\n username: hello\n password: hello123\n",
+ map[string]interface{}{"v": "# comment\nusername: hello\npassword: hello123"},
+ []yaml.EncodeOption{
+ yaml.UseLiteralStyleIfMultiline(true),
+ },
+ },
+ {
+ "v: \"# comment\\nusername: hello\\npassword: hello123\"\n",
+ map[string]interface{}{"v": "# comment\nusername: hello\npassword: hello123"},
+ []yaml.EncodeOption{
+ yaml.UseLiteralStyleIfMultiline(false),
+ },
+ },
+ {
+ "v:\n- A\n- 1\n- B:\n - 2\n - 3\n",
+ map[string]interface{}{
+ "v": []interface{}{
+ "A",
+ 1,
+ map[string][]int{
+ "B": {2, 3},
+ },
+ },
+ },
+ nil,
+ },
+ {
+ "v:\n - A\n - 1\n - B:\n - 2\n - 3\n - 2\n",
+ map[string]interface{}{
+ "v": []interface{}{
+ "A",
+ 1,
+ map[string][]int{
+ "B": {2, 3},
+ },
+ 2,
+ },
+ },
+ []yaml.EncodeOption{
+ yaml.IndentSequence(true),
+ },
+ },
+ {
+ "a:\n b: c\n",
+ map[string]interface{}{
+ "a": map[string]string{
+ "b": "c",
+ },
+ },
+ nil,
+ },
+ {
+ "t2: 2018-01-09T10:40:47Z\nt4: 2098-01-09T10:40:47Z\n",
+ map[string]string{
+ "t2": "2018-01-09T10:40:47Z",
+ "t4": "2098-01-09T10:40:47Z",
+ },
+ nil,
+ },
+ {
+ "a:\n b: c\n d: e\n",
+ map[string]interface{}{
+ "a": map[string]string{
+ "b": "c",
+ "d": "e",
+ },
+ },
+ nil,
+ },
+ {
+ "a: 3s\n",
+ map[string]string{
+ "a": "3s",
+ },
+ nil,
+ },
+ {
+ "a: \n",
+ map[string]string{"a": ""},
+ nil,
+ },
+ {
+ "a: \"1:1\"\n",
+ map[string]string{"a": "1:1"},
+ nil,
+ },
+ {
+ "a: 1.2.3.4\n",
+ map[string]string{"a": "1.2.3.4"},
+ nil,
+ },
+ {
+ "a: \"b: c\"\n",
+ map[string]string{"a": "b: c"},
+ nil,
+ },
+ {
+ "a: \"Hello #comment\"\n",
+ map[string]string{"a": "Hello #comment"},
+ nil,
+ },
+ {
+ "a: \" b\"\n",
+ map[string]string{"a": " b"},
+ nil,
+ },
+ {
+ "a: \"b \"\n",
+ map[string]string{"a": "b "},
+ nil,
+ },
+ {
+ "a: \" b \"\n",
+ map[string]string{"a": " b "},
+ nil,
+ },
+ {
+ "a: \"`b` c\"\n",
+ map[string]string{"a": "`b` c"},
+ nil,
+ },
+ {
+ "a: 100.5\n",
+ map[string]interface{}{
+ "a": 100.5,
+ },
+ nil,
+ },
+ {
+ "a: \"\\\\0\"\n",
+ map[string]string{"a": "\\0"},
+ nil,
+ },
+ {
+ "a: 1\nb: 2\nc: 3\nd: 4\nsub:\n e: 5\n",
+ map[string]interface{}{
+ "a": 1,
+ "b": 2,
+ "c": 3,
+ "d": 4,
+ "sub": map[string]int{
+ "e": 5,
+ },
+ },
+ nil,
+ },
+ {
+ "a: 1\nb: []\n",
+ struct {
+ A int
+ B []string
+ }{
+ 1, ([]string)(nil),
+ },
+ nil,
+ },
+ {
+ "a: 1\nb: []\n",
+ struct {
+ A int
+ B []string
+ }{
+ 1, []string{},
+ },
+ nil,
+ },
+ {
+ "a: {}\n",
+ struct {
+ A map[string]interface{}
+ }{
+ map[string]interface{}{},
+ },
+ nil,
+ },
+ {
+ "a: b\nc: d\n",
+ struct {
+ A string
+ C string `yaml:"c"`
+ }{
+ "b", "d",
+ },
+ nil,
+ },
+ {
+ "a: 1\n",
+ struct {
+ A int
+ B int `yaml:"-"`
+ }{
+ 1, 0,
+ },
+ nil,
+ },
+ {
+ "a: \"\"\n",
+ struct {
+ A string
+ }{
+ "",
+ },
+ nil,
+ },
+ {
+ "a: null\n",
+ struct {
+ A *string
+ }{
+ nil,
+ },
+ nil,
+ },
+ {
+ "a: \"\"\n",
+ struct {
+ A *string
+ }{
+ &emptyStr,
+ },
+ nil,
+ },
+ {
+ "a: null\n",
+ struct {
+ A *int
+ }{
+ nil,
+ },
+ nil,
+ },
+ {
+ "a: 0\n",
+ struct {
+ A *int
+ }{
+ &zero,
+ },
+ nil,
+ },
+
+ // Conditional flag
+ {
+ "a: 1\n",
+ struct {
+ A int `yaml:"a,omitempty"`
+ B int `yaml:"b,omitempty"`
+ }{1, 0},
+ nil,
+ },
+ {
+ "{}\n",
+ struct {
+ A int `yaml:"a,omitempty"`
+ B int `yaml:"b,omitempty"`
+ }{0, 0},
+ nil,
+ },
+
+ {
+ "a:\n \"y\": \"\"\n",
+ struct {
+ A *struct {
+ X string `yaml:"x,omitempty"`
+ Y string
+ }
+ }{&struct {
+ X string `yaml:"x,omitempty"`
+ Y string
+ }{}},
+ nil,
+ },
+
+ {
+ "a: {}\n",
+ struct {
+ A *struct {
+ X string `yaml:"x,omitempty"`
+ Y string `yaml:"y,omitempty"`
+ }
+ }{&struct {
+ X string `yaml:"x,omitempty"`
+ Y string `yaml:"y,omitempty"`
+ }{}},
+ nil,
+ },
+
+ {
+ "a: {x: 1}\n",
+ struct {
+ A *struct{ X, y int } `yaml:"a,omitempty,flow"`
+ }{&struct{ X, y int }{1, 2}},
+ nil,
+ },
+
+ {
+ "{}\n",
+ struct {
+ A *struct{ X, y int } `yaml:"a,omitempty,flow"`
+ }{nil},
+ nil,
+ },
+
+ {
+ "a: {x: 0}\n",
+ struct {
+ A *struct{ X, y int } `yaml:"a,omitempty,flow"`
+ }{&struct{ X, y int }{}},
+ nil,
+ },
+
+ {
+ "a: {x: 1}\n",
+ struct {
+ A struct{ X, y int } `yaml:"a,omitempty,flow"`
+ }{struct{ X, y int }{1, 2}},
+ nil,
+ },
+ {
+ "{}\n",
+ struct {
+ A struct{ X, y int } `yaml:"a,omitempty,flow"`
+ }{struct{ X, y int }{0, 1}},
+ nil,
+ },
+ {
+ "a: 1.0\n",
+ struct {
+ A float64 `yaml:"a,omitempty"`
+ B float64 `yaml:"b,omitempty"`
+ }{1, 0},
+ nil,
+ },
+ {
+ "a: 1\n",
+ struct {
+ A int
+ B []string `yaml:"b,omitempty"`
+ }{
+ 1, []string{},
+ },
+ nil,
+ },
+
+ // Flow flag
+ {
+ "a: [1, 2]\n",
+ struct {
+ A []int `yaml:"a,flow"`
+ }{[]int{1, 2}},
+ nil,
+ },
+ {
+ "a: {b: c, d: e}\n",
+ &struct {
+ A map[string]string `yaml:"a,flow"`
+ }{map[string]string{"b": "c", "d": "e"}},
+ nil,
+ },
+ {
+ "a: {b: c, d: e}\n",
+ struct {
+ A struct {
+ B, D string
+ } `yaml:"a,flow"`
+ }{struct{ B, D string }{"c", "e"}},
+ nil,
+ },
+ // Quoting in flow mode
+ {
+ `a: [b, "c,d", e]` + "\n",
+ struct {
+ A []string `yaml:"a,flow"`
+ }{[]string{"b", "c,d", "e"}},
+ []yaml.EncodeOption{
+ yaml.UseSingleQuote(false),
+ },
+ },
+ {
+ `a: [b, "c]", d]` + "\n",
+ struct {
+ A []string `yaml:"a,flow"`
+ }{[]string{"b", "c]", "d"}},
+ []yaml.EncodeOption{
+ yaml.UseSingleQuote(false),
+ },
+ },
+ {
+ `a: [b, "c}", d]` + "\n",
+ struct {
+ A []string `yaml:"a,flow"`
+ }{[]string{"b", "c}", "d"}},
+ []yaml.EncodeOption{
+ yaml.UseSingleQuote(false),
+ },
+ },
+ {
+ `a: [b, "c\"", d]` + "\n",
+ struct {
+ A []string `yaml:"a,flow"`
+ }{[]string{"b", `c"`, "d"}},
+ []yaml.EncodeOption{
+ yaml.UseSingleQuote(false),
+ },
+ },
+ {
+ `a: [b, "c'", d]` + "\n",
+ struct {
+ A []string `yaml:"a,flow"`
+ }{[]string{"b", "c'", "d"}},
+ []yaml.EncodeOption{
+ yaml.UseSingleQuote(false),
+ },
+ },
+ // No quoting in non-flow mode
+ {
+ "a:\n- b\n- c,d\n- e\n",
+ struct {
+ A []string `yaml:"a"`
+ }{[]string{"b", "c,d", "e"}},
+ nil,
+ },
+ {
+ `a: [b, "c]", d]` + "\n",
+ struct {
+ A []string `yaml:"a,flow"`
+ }{[]string{"b", "c]", "d"}},
+ nil,
+ },
+ {
+ `a: [b, "c}", d]` + "\n",
+ struct {
+ A []string `yaml:"a,flow"`
+ }{[]string{"b", "c}", "d"}},
+ nil,
+ },
+ {
+ `a: [b, "c\"", d]` + "\n",
+ struct {
+ A []string `yaml:"a,flow"`
+ }{[]string{"b", `c"`, "d"}},
+ nil,
+ },
+ {
+ `a: [b, "c'", d]` + "\n",
+ struct {
+ A []string `yaml:"a,flow"`
+ }{[]string{"b", "c'", "d"}},
+ nil,
+ },
+
+ // Multi bytes
+ {
+ "v: あいうえお\nv2: かきくけこ\n",
+ map[string]string{"v": "あいうえお", "v2": "かきくけこ"},
+ nil,
+ },
+
+ // time value
+ {
+ "v: 0001-01-01T00:00:00Z\n",
+ map[string]time.Time{"v": time.Time{}},
+ nil,
+ },
+ {
+ "v: 0001-01-01T00:00:00Z\n",
+ map[string]*time.Time{"v": &time.Time{}},
+ nil,
+ },
+ {
+ "v: null\n",
+ map[string]*time.Time{"v": nil},
+ nil,
+ },
+ {
+ "v: 30s\n",
+ map[string]time.Duration{"v": 30 * time.Second},
+ nil,
+ },
+ // Quote style
+ {
+ `v: '\'a\'b'` + "\n",
+ map[string]string{"v": `'a'b`},
+ []yaml.EncodeOption{
+ yaml.UseSingleQuote(true),
+ },
+ },
+ {
+ `v: "'a'b"` + "\n",
+ map[string]string{"v": `'a'b`},
+ []yaml.EncodeOption{
+ yaml.UseSingleQuote(false),
+ },
+ },
+ }
+ for _, test := range tests {
+ var buf bytes.Buffer
+ enc := yaml.NewEncoder(&buf, test.options...)
+ if err := enc.Encode(test.value); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ if test.source != buf.String() {
+ t.Fatalf("expect = [%s], actual = [%s]", test.source, buf.String())
+ }
+ }
+}
+
+func TestEncodeStructIncludeMap(t *testing.T) {
+ type U struct {
+ M map[string]string
+ }
+ type T struct {
+ A U
+ }
+ bytes, err := yaml.Marshal(T{
+ A: U{
+ M: map[string]string{"x": "y"},
+ },
+ })
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ expect := "a:\n m:\n x: \"y\"\n"
+ actual := string(bytes)
+ if actual != expect {
+ t.Fatalf("unexpected output. expect:[%s] actual:[%s]", expect, actual)
+ }
+}
+
+func TestEncodeDefinedTypeKeyMap(t *testing.T) {
+ type K string
+ type U struct {
+ M map[K]string
+ }
+ bytes, err := yaml.Marshal(U{
+ M: map[K]string{K("x"): "y"},
+ })
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ expect := "m:\n x: \"y\"\n"
+ actual := string(bytes)
+ if actual != expect {
+ t.Fatalf("unexpected output. expect:[%s] actual:[%s]", expect, actual)
+ }
+}
+
+func TestEncodeWithAnchorAndAlias(t *testing.T) {
+ var buf bytes.Buffer
+ enc := yaml.NewEncoder(&buf)
+ type T struct {
+ A int
+ B string
+ }
+ var v struct {
+ A *T `yaml:"a,anchor=c"`
+ B *T `yaml:"b,alias=c"`
+ }
+ v.A = &T{A: 1, B: "hello"}
+ v.B = v.A
+ if err := enc.Encode(v); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ expect := "a: &c\n a: 1\n b: hello\nb: *c\n"
+ if expect != buf.String() {
+ t.Fatalf("expect = [%s], actual = [%s]", expect, buf.String())
+ }
+}
+
+func TestEncodeWithAutoAlias(t *testing.T) {
+ var buf bytes.Buffer
+ enc := yaml.NewEncoder(&buf)
+ type T struct {
+ I int
+ S string
+ }
+ var v struct {
+ A *T `yaml:"a,anchor=a"`
+ B *T `yaml:"b,anchor=b"`
+ C *T `yaml:"c,alias"`
+ D *T `yaml:"d,alias"`
+ }
+ v.A = &T{I: 1, S: "hello"}
+ v.B = &T{I: 2, S: "world"}
+ v.C = v.A
+ v.D = v.B
+ if err := enc.Encode(v); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ expect := `a: &a
+ i: 1
+ s: hello
+b: &b
+ i: 2
+ s: world
+c: *a
+d: *b
+`
+ if expect != buf.String() {
+ t.Fatalf("expect = [%s], actual = [%s]", expect, buf.String())
+ }
+}
+
+func TestEncodeWithImplicitAnchorAndAlias(t *testing.T) {
+ var buf bytes.Buffer
+ enc := yaml.NewEncoder(&buf)
+ type T struct {
+ I int
+ S string
+ }
+ var v struct {
+ A *T `yaml:"a,anchor"`
+ B *T `yaml:"b,anchor"`
+ C *T `yaml:"c,alias"`
+ D *T `yaml:"d,alias"`
+ }
+ v.A = &T{I: 1, S: "hello"}
+ v.B = &T{I: 2, S: "world"}
+ v.C = v.A
+ v.D = v.B
+ if err := enc.Encode(v); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ expect := `a: &a
+ i: 1
+ s: hello
+b: &b
+ i: 2
+ s: world
+c: *a
+d: *b
+`
+ if expect != buf.String() {
+ t.Fatalf("expect = [%s], actual = [%s]", expect, buf.String())
+ }
+}
+
+func TestEncodeWithMerge(t *testing.T) {
+ type Person struct {
+ *Person `yaml:",omitempty,inline,alias"`
+ Name string `yaml:",omitempty"`
+ Age int `yaml:",omitempty"`
+ }
+ defaultPerson := &Person{
+ Name: "John Smith",
+ Age: 20,
+ }
+ people := []*Person{
+ {
+ Person: defaultPerson,
+ Name: "Ken",
+ Age: 10,
+ },
+ {
+ Person: defaultPerson,
+ },
+ }
+ var doc struct {
+ Default *Person `yaml:"default,anchor"`
+ People []*Person `yaml:"people"`
+ }
+ doc.Default = defaultPerson
+ doc.People = people
+ var buf bytes.Buffer
+ enc := yaml.NewEncoder(&buf)
+ if err := enc.Encode(doc); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ expect := `default: &default
+ name: John Smith
+ age: 20
+people:
+- <<: *default
+ name: Ken
+ age: 10
+- <<: *default
+`
+ if expect != buf.String() {
+ t.Fatalf("expect = [%s], actual = [%s]", expect, buf.String())
+ }
+}
+
+func TestEncodeWithNestedYAML(t *testing.T) {
+ // Represents objects containing stringified YAML, and special chars
+ tests := []struct {
+ value interface{}
+ // If true, expects a different result between when using forced literal style or not
+ expectDifferent bool
+ }{
+ {
+ value: map[string]interface{}{"v": "# comment\nname: hello\npassword: hello123\nspecial: \":ghost:\"\ntext: |\n nested multiline!"},
+ expectDifferent: true,
+ },
+ {
+ value: map[string]interface{}{"v": "# comment\nusername: hello\npassword: hello123"},
+ expectDifferent: true,
+ },
+ {
+ value: map[string]interface{}{"v": "# comment\n"},
+ expectDifferent: true,
+ },
+ {
+ value: map[string]interface{}{"v": "\n"},
+ },
+ }
+
+ for _, test := range tests {
+ yamlBytesForced, err := yaml.MarshalWithOptions(test.value, yaml.UseLiteralStyleIfMultiline(true))
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+
+ // Convert it back for proper equality testing
+ var unmarshaled interface{}
+
+ if err := yaml.Unmarshal(yamlBytesForced, &unmarshaled); err != nil {
+ t.Fatalf("%+v", err)
+ }
+
+ if !reflect.DeepEqual(test.value, unmarshaled) {
+ t.Fatalf("expected %v(%T). but actual %v(%T)", test.value, test.value, unmarshaled, unmarshaled)
+ }
+
+ if test.expectDifferent {
+ yamlBytesNotForced, err := yaml.MarshalWithOptions(test.value)
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+
+ if string(yamlBytesForced) == string(yamlBytesNotForced) {
+ t.Fatalf("expected different strings when force literal style is not enabled. forced: %s, not forced: %s", string(yamlBytesForced), string(yamlBytesNotForced))
+ }
+ }
+ }
+}
+
+func TestEncoder_Inline(t *testing.T) {
+ type base struct {
+ A int
+ B string
+ }
+ var buf bytes.Buffer
+ enc := yaml.NewEncoder(&buf)
+ if err := enc.Encode(struct {
+ *base `yaml:",inline"`
+ C bool
+ }{
+ base: &base{
+ A: 1,
+ B: "hello",
+ },
+ C: true,
+ }); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ expect := `
+a: 1
+b: hello
+c: true
+`
+ actual := "\n" + buf.String()
+ if expect != actual {
+ t.Fatalf("inline marshal error: expect=[%s] actual=[%s]", expect, actual)
+ }
+}
+
+func TestEncoder_InlineAndConflictKey(t *testing.T) {
+ type base struct {
+ A int
+ B string
+ }
+ var buf bytes.Buffer
+ enc := yaml.NewEncoder(&buf)
+ if err := enc.Encode(struct {
+ *base `yaml:",inline"`
+ A int // conflict
+ C bool
+ }{
+ base: &base{
+ A: 1,
+ B: "hello",
+ },
+ A: 0, // default value
+ C: true,
+ }); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ expect := `
+b: hello
+a: 0
+c: true
+`
+ actual := "\n" + buf.String()
+ if expect != actual {
+ t.Fatalf("inline marshal error: expect=[%s] actual=[%s]", expect, actual)
+ }
+}
+
+func TestEncoder_InlineNil(t *testing.T) {
+ type base struct {
+ A int
+ B string
+ }
+ var buf bytes.Buffer
+ enc := yaml.NewEncoder(&buf)
+ if err := enc.Encode(struct {
+ *base `yaml:",inline"`
+ C bool
+ }{
+ C: true,
+ }); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ expect := `
+c: true
+`
+ actual := "\n" + buf.String()
+ if expect != actual {
+ t.Fatalf("inline marshal error: expect=[%s] actual=[%s]", expect, actual)
+ }
+}
+
+func TestEncoder_Flow(t *testing.T) {
+ var buf bytes.Buffer
+ enc := yaml.NewEncoder(&buf, yaml.Flow(true))
+ var v struct {
+ A int
+ B string
+ C struct {
+ D int
+ E string
+ }
+ F []int
+ }
+ v.A = 1
+ v.B = "hello"
+ v.C.D = 3
+ v.C.E = "world"
+ v.F = []int{1, 2}
+ if err := enc.Encode(v); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ expect := `
+{a: 1, b: hello, c: {d: 3, e: world}, f: [1, 2]}
+`
+ actual := "\n" + buf.String()
+ if expect != actual {
+ t.Fatalf("flow style marshal error: expect=[%s] actual=[%s]", expect, actual)
+ }
+}
+
+func TestEncoder_FlowRecursive(t *testing.T) {
+ var v struct {
+ M map[string][]int `yaml:",flow"`
+ }
+ v.M = map[string][]int{
+ "test": []int{1, 2, 3},
+ }
+ var buf bytes.Buffer
+ if err := yaml.NewEncoder(&buf).Encode(v); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ expect := `
+m: {test: [1, 2, 3]}
+`
+ actual := "\n" + buf.String()
+ if expect != actual {
+ t.Fatalf("flow style marshal error: expect=[%s] actual=[%s]", expect, actual)
+ }
+}
+
+func TestEncoder_JSON(t *testing.T) {
+ var buf bytes.Buffer
+ enc := yaml.NewEncoder(&buf, yaml.JSON())
+ type st struct {
+ I int8
+ S string
+ F float32
+ }
+ if err := enc.Encode(struct {
+ I int
+ U uint
+ S string
+ F float64
+ Struct *st
+ Slice []int
+ Map map[string]interface{}
+ Time time.Time
+ Duration time.Duration
+ }{
+ I: -10,
+ U: 10,
+ S: "hello",
+ F: 3.14,
+ Struct: &st{
+ I: 2,
+ S: "world",
+ F: 1.23,
+ },
+ Slice: []int{1, 2, 3, 4, 5},
+ Map: map[string]interface{}{
+ "a": 1,
+ "b": 1.23,
+ "c": "json",
+ },
+ Time: time.Time{},
+ Duration: 5 * time.Minute,
+ }); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ expect := `
+{"i": -10, "u": 10, "s": "hello", "f": 3.14, "struct": {"i": 2, "s": "world", "f": 1.23}, "slice": [1, 2, 3, 4, 5], "map": {"a": 1, "b": 1.23, "c": "json"}, "time": "0001-01-01T00:00:00Z", "duration": "5m0s"}
+`
+ actual := "\n" + buf.String()
+ if expect != actual {
+ t.Fatalf("JSON style marshal error: expect=[%s] actual=[%s]", expect, actual)
+ }
+}
+
+func TestEncoder_MarshalAnchor(t *testing.T) {
+ type Host struct {
+ Hostname string
+ Username string
+ Password string
+ }
+ type HostDecl struct {
+ Host *Host `yaml:",anchor"`
+ }
+ type Queue struct {
+ Name string `yaml:","`
+ *Host `yaml:",alias"`
+ }
+ var doc struct {
+ Hosts []*HostDecl `yaml:"hosts"`
+ Queues []*Queue `yaml:"queues"`
+ }
+ host1 := &Host{
+ Hostname: "host1.example.com",
+ Username: "userA",
+ Password: "pass1",
+ }
+ host2 := &Host{
+ Hostname: "host2.example.com",
+ Username: "userB",
+ Password: "pass2",
+ }
+ doc.Hosts = []*HostDecl{
+ {
+ Host: host1,
+ },
+ {
+ Host: host2,
+ },
+ }
+ doc.Queues = []*Queue{
+ {
+ Name: "queue",
+ Host: host1,
+ }, {
+ Name: "queue2",
+ Host: host2,
+ },
+ }
+ hostIdx := 1
+ opt := yaml.MarshalAnchor(func(anchor *ast.AnchorNode, value interface{}) error {
+ if _, ok := value.(*Host); ok {
+ nameNode := anchor.Name.(*ast.StringNode)
+ nameNode.Value = fmt.Sprintf("host%d", hostIdx)
+ hostIdx++
+ }
+ return nil
+ })
+
+ var buf bytes.Buffer
+ if err := yaml.NewEncoder(&buf, opt).Encode(doc); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ expect := `
+hosts:
+- host: &host1
+ hostname: host1.example.com
+ username: userA
+ password: pass1
+- host: &host2
+ hostname: host2.example.com
+ username: userB
+ password: pass2
+queues:
+- name: queue
+ host: *host1
+- name: queue2
+ host: *host2
+`
+ if "\n"+buf.String() != expect {
+ t.Fatalf("unexpected output. %s", buf.String())
+ }
+}
+
+type useJSONMarshalerTest struct{}
+
+func (t useJSONMarshalerTest) MarshalJSON() ([]byte, error) {
+ return []byte(`{"a":[1, 2, 3]}`), nil
+}
+
+func TestEncoder_UseJSONMarshaler(t *testing.T) {
+ got, err := yaml.MarshalWithOptions(useJSONMarshalerTest{}, yaml.UseJSONMarshaler())
+ if err != nil {
+ t.Fatal(err)
+ }
+ expected := `
+a:
+- 1
+- 2
+- 3
+`
+ if expected != "\n"+string(got) {
+ t.Fatalf("failed to use json marshaler. expected [%q] but got [%q]", expected, string(got))
+ }
+}
+
+func TestEncoder_CustomMarshaler(t *testing.T) {
+ t.Run("override struct type", func(t *testing.T) {
+ type T struct {
+ Foo string `yaml:"foo"`
+ }
+ b, err := yaml.MarshalWithOptions(&T{Foo: "bar"}, yaml.CustomMarshaler[T](func(v T) ([]byte, error) {
+ return []byte(`"override"`), nil
+ }))
+ if err != nil {
+ t.Fatal(err)
+ }
+ if !bytes.Equal(b, []byte("\"override\"\n")) {
+ t.Fatalf("failed to switch to custom marshaler. got: %q", b)
+ }
+ })
+ t.Run("override bytes type", func(t *testing.T) {
+ type T struct {
+ Foo []byte `yaml:"foo"`
+ }
+ b, err := yaml.MarshalWithOptions(&T{Foo: []byte("bar")}, yaml.CustomMarshaler[[]byte](func(v []byte) ([]byte, error) {
+ if !bytes.Equal(v, []byte("bar")) {
+ t.Fatalf("failed to get src buffer: %q", v)
+ }
+ return []byte(`override`), nil
+ }))
+ if err != nil {
+ t.Fatal(err)
+ }
+ if !bytes.Equal(b, []byte("foo: override\n")) {
+ t.Fatalf("failed to switch to custom marshaler. got: %q", b)
+ }
+ })
+}
+
+func TestEncoder_MultipleDocuments(t *testing.T) {
+ var buf bytes.Buffer
+ enc := yaml.NewEncoder(&buf)
+ if err := enc.Encode(1); err != nil {
+ t.Fatalf("failed to encode: %s", err)
+ }
+ if err := enc.Encode(2); err != nil {
+ t.Fatalf("failed to encode: %s", err)
+ }
+ if actual, expect := buf.String(), "1\n---\n2\n"; actual != expect {
+ t.Errorf("expect:\n%s\nactual\n%s\n", expect, actual)
+ }
+}
+
+func Example_Marshal_Node() {
+ type T struct {
+ Text ast.Node `yaml:"text"`
+ }
+ stringNode, err := yaml.ValueToNode("node example")
+ if err != nil {
+ panic(err)
+ }
+ bytes, err := yaml.Marshal(T{Text: stringNode})
+ if err != nil {
+ panic(err)
+ }
+ fmt.Println(string(bytes))
+ // OUTPUT:
+ // text: node example
+}
+
+func Example_Marshal_ExplicitAnchorAlias() {
+ type T struct {
+ A int
+ B string
+ }
+ var v struct {
+ C *T `yaml:"c,anchor=x"`
+ D *T `yaml:"d,alias=x"`
+ }
+ v.C = &T{A: 1, B: "hello"}
+ v.D = v.C
+ bytes, err := yaml.Marshal(v)
+ if err != nil {
+ panic(err)
+ }
+ fmt.Println(string(bytes))
+ // OUTPUT:
+ // c: &x
+ // a: 1
+ // b: hello
+ // d: *x
+}
+
+func Example_Marshal_ImplicitAnchorAlias() {
+ type T struct {
+ I int
+ S string
+ }
+ var v struct {
+ A *T `yaml:"a,anchor"`
+ B *T `yaml:"b,anchor"`
+ C *T `yaml:"c,alias"`
+ D *T `yaml:"d,alias"`
+ }
+ v.A = &T{I: 1, S: "hello"}
+ v.B = &T{I: 2, S: "world"}
+ v.C = v.A // C has same pointer address to A
+ v.D = v.B // D has same pointer address to B
+ bytes, err := yaml.Marshal(v)
+ if err != nil {
+ panic(err)
+ }
+ fmt.Println(string(bytes))
+ // OUTPUT:
+ // a: &a
+ // i: 1
+ // s: hello
+ // b: &b
+ // i: 2
+ // s: world
+ // c: *a
+ // d: *b
+}
+
+type tMarshal []string
+
+func (t *tMarshal) MarshalYAML() ([]byte, error) {
+ var buf bytes.Buffer
+ buf.WriteString("tags:\n")
+ for i, v := range *t {
+ if i == 0 {
+ fmt.Fprintf(&buf, "- %s\n", v)
+ } else {
+ fmt.Fprintf(&buf, " %s\n", v)
+ }
+ }
+ return buf.Bytes(), nil
+}
+func Test_Marshaler(t *testing.T) {
+ const expected = `- hello-world
+`
+
+ // sanity check
+ var l []string
+ if err := yaml.Unmarshal([]byte(expected), &l); err != nil {
+ t.Fatalf("failed to parse string: %s", err)
+ }
+
+ buf, err := yaml.Marshal(tMarshal{"hello-world"})
+ if err != nil {
+ t.Fatalf("failed to marshal: %s", err)
+ }
+
+ if string(buf) != expected {
+ t.Fatalf("expected '%s', got '%s'", expected, buf)
+ }
+
+ t.Logf("%s", buf)
+}
+
+type marshalContext struct{}
+
+func (c *marshalContext) MarshalYAML(ctx context.Context) ([]byte, error) {
+ v, ok := ctx.Value("k").(int)
+ if !ok {
+ return nil, fmt.Errorf("cannot get valid context")
+ }
+ if v != 1 {
+ return nil, fmt.Errorf("cannot get valid context")
+ }
+ return []byte("1"), nil
+}
+
+func Test_MarshalerContext(t *testing.T) {
+ ctx := context.WithValue(context.Background(), "k", 1)
+ bytes, err := yaml.MarshalContext(ctx, &marshalContext{})
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ if string(bytes) != "1\n" {
+ t.Fatalf("failed marshal: %q", string(bytes))
+ }
+}
+
+type SlowMarshaler struct {
+ A string
+ B int
+}
+type FastMarshaler struct {
+ A string
+ B int
+}
+type TextMarshaler int64
+type TextMarshalerContainer struct {
+ Field TextMarshaler `yaml:"field"`
+}
+
+func (v SlowMarshaler) MarshalYAML() ([]byte, error) {
+ var buf bytes.Buffer
+ buf.WriteString("tags:\n")
+ buf.WriteString("- slow-marshaler\n")
+ buf.WriteString("a: " + v.A + "\n")
+ buf.WriteString("b: " + strconv.FormatInt(int64(v.B), 10) + "\n")
+ return buf.Bytes(), nil
+}
+
+func (v FastMarshaler) MarshalYAML() (interface{}, error) {
+ return yaml.MapSlice{
+ {"tags", []string{"fast-marshaler"}},
+ {"a", v.A},
+ {"b", v.B},
+ }, nil
+}
+
+func (t TextMarshaler) MarshalText() ([]byte, error) {
+ return []byte(strconv.FormatInt(int64(t), 8)), nil
+}
+
+func Example_MarshalYAML() {
+ var slow SlowMarshaler
+ slow.A = "Hello slow poke"
+ slow.B = 100
+ buf, err := yaml.Marshal(slow)
+ if err != nil {
+ panic(err.Error())
+ }
+
+ fmt.Println(string(buf))
+
+ var fast FastMarshaler
+ fast.A = "Hello speed demon"
+ fast.B = 100
+ buf, err = yaml.Marshal(fast)
+ if err != nil {
+ panic(err.Error())
+ }
+
+ fmt.Println(string(buf))
+
+ text := TextMarshalerContainer{
+ Field: 11,
+ }
+ buf, err = yaml.Marshal(text)
+ if err != nil {
+ panic(err.Error())
+ }
+
+ fmt.Println(string(buf))
+ // OUTPUT:
+ // tags:
+ // - slow-marshaler
+ // a: Hello slow poke
+ // b: 100
+ //
+ // tags:
+ // - fast-marshaler
+ // a: Hello speed demon
+ // b: 100
+ //
+ // field: 13
+}
+
+func TestIssue356(t *testing.T) {
+ tests := map[string]struct {
+ in string
+ }{
+ "content on first line": {
+ in: `args:
+ - |
+
+ key:
+ nest1: something
+ nest2:
+ nest2a: b
+`,
+ },
+ "empty first line": {
+ in: `args:
+ - |
+
+ key:
+ nest1: something
+ nest2:
+ nest2a: b
+`,
+ },
+ }
+
+ for name, test := range tests {
+ t.Run(name, func(t *testing.T) {
+ f, err := parser.ParseBytes([]byte(test.in), 0)
+ if err != nil {
+ t.Fatalf("parse: %v", err)
+ }
+ got := f.String()
+ if test.in != got {
+ t.Fatalf("failed to encode.\nexpected:\n%s\nbut got:\n%s\n", test.in, got)
+ }
+ })
+ }
+}
+
+func TestMarshalIndentWithMultipleText(t *testing.T) {
+ t.Run("depth1", func(t *testing.T) {
+ b, err := yaml.MarshalWithOptions(map[string]interface{}{
+ "key": []string{`line1
+line2
+line3`},
+ }, yaml.Indent(2))
+ if err != nil {
+ t.Fatal(err)
+ }
+ got := string(b)
+ expected := `key:
+- |-
+ line1
+ line2
+ line3
+`
+ if expected != got {
+ t.Fatalf("failed to encode.\nexpected:\n%s\nbut got:\n%s\n", expected, got)
+ }
+ })
+ t.Run("depth2", func(t *testing.T) {
+ b, err := yaml.MarshalWithOptions(map[string]interface{}{
+ "key": map[string]interface{}{
+ "key2": []string{`line1
+line2
+line3`},
+ },
+ }, yaml.Indent(2))
+ if err != nil {
+ t.Fatal(err)
+ }
+ got := string(b)
+ expected := `key:
+ key2:
+ - |-
+ line1
+ line2
+ line3
+`
+ if expected != got {
+ t.Fatalf("failed to encode.\nexpected:\n%s\nbut got:\n%s\n", expected, got)
+ }
+ })
+}
+
+type bytesMarshaler struct{}
+
+func (b *bytesMarshaler) MarshalYAML() ([]byte, error) {
+ return yaml.Marshal(map[string]interface{}{"d": "foo"})
+}
+
+func TestBytesMarshaler(t *testing.T) {
+ b, err := yaml.Marshal(map[string]interface{}{
+ "a": map[string]interface{}{
+ "b": map[string]interface{}{
+ "c": &bytesMarshaler{},
+ },
+ },
+ })
+ if err != nil {
+ t.Fatal(err)
+ }
+ expected := `
+a:
+ b:
+ c:
+ d: foo
+`
+ got := "\n" + string(b)
+ if expected != got {
+ t.Fatalf("failed to encode. expected %s but got %s", expected, got)
+ }
+}
+
+type customMapSliceOneItemMarshaler struct{}
+
+func (m *customMapSliceOneItemMarshaler) MarshalYAML() ([]byte, error) {
+ var v yaml.MapSlice
+ v = append(v, yaml.MapItem{"a", "b"})
+ return yaml.Marshal(v)
+}
+
+type customMapSliceTwoItemMarshaler struct{}
+
+func (m *customMapSliceTwoItemMarshaler) MarshalYAML() ([]byte, error) {
+ var v yaml.MapSlice
+ v = append(v, yaml.MapItem{"a", "b"})
+ v = append(v, yaml.MapItem{"b", "c"})
+ return yaml.Marshal(v)
+}
+
+func TestCustomMapSliceMarshaler(t *testing.T) {
+ type T struct {
+ A *customMapSliceOneItemMarshaler `yaml:"a"`
+ B *customMapSliceTwoItemMarshaler `yaml:"b"`
+ }
+ b, err := yaml.Marshal(&T{
+ A: &customMapSliceOneItemMarshaler{},
+ B: &customMapSliceTwoItemMarshaler{},
+ })
+ if err != nil {
+ t.Fatal(err)
+ }
+ expected := `
+a:
+ a: b
+b:
+ a: b
+ b: c
+`
+ got := "\n" + string(b)
+ if expected != got {
+ t.Fatalf("failed to encode. expected %s but got %s", expected, got)
+ }
+}
diff --git a/tmpmod/github.com/goccy/go-yaml/error.go b/tmpmod/github.com/goccy/go-yaml/error.go
new file mode 100644
index 00000000..177ba6b0
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/error.go
@@ -0,0 +1,62 @@
+package yaml
+
+import (
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/ast"
+ "golang.org/x/xerrors"
+)
+
+var (
+ ErrInvalidQuery = xerrors.New("invalid query")
+ ErrInvalidPath = xerrors.New("invalid path instance")
+ ErrInvalidPathString = xerrors.New("invalid path string")
+ ErrNotFoundNode = xerrors.New("node not found")
+ ErrUnknownCommentPositionType = xerrors.New("unknown comment position type")
+ ErrInvalidCommentMapValue = xerrors.New("invalid comment map value. it must be not nil value")
+)
+
+func ErrUnsupportedHeadPositionType(node ast.Node) error {
+ return xerrors.Errorf("unsupported comment head position for %s", node.Type())
+}
+
+func ErrUnsupportedLinePositionType(node ast.Node) error {
+ return xerrors.Errorf("unsupported comment line position for %s", node.Type())
+}
+
+func ErrUnsupportedFootPositionType(node ast.Node) error {
+ return xerrors.Errorf("unsupported comment foot position for %s", node.Type())
+}
+
+// IsInvalidQueryError whether err is ErrInvalidQuery or not.
+func IsInvalidQueryError(err error) bool {
+ return xerrors.Is(err, ErrInvalidQuery)
+}
+
+// IsInvalidPathError whether err is ErrInvalidPath or not.
+func IsInvalidPathError(err error) bool {
+ return xerrors.Is(err, ErrInvalidPath)
+}
+
+// IsInvalidPathStringError whether err is ErrInvalidPathString or not.
+func IsInvalidPathStringError(err error) bool {
+ return xerrors.Is(err, ErrInvalidPathString)
+}
+
+// IsNotFoundNodeError whether err is ErrNotFoundNode or not.
+func IsNotFoundNodeError(err error) bool {
+ return xerrors.Is(err, ErrNotFoundNode)
+}
+
+// IsInvalidTokenTypeError whether err is ast.ErrInvalidTokenType or not.
+func IsInvalidTokenTypeError(err error) bool {
+ return xerrors.Is(err, ast.ErrInvalidTokenType)
+}
+
+// IsInvalidAnchorNameError whether err is ast.ErrInvalidAnchorName or not.
+func IsInvalidAnchorNameError(err error) bool {
+ return xerrors.Is(err, ast.ErrInvalidAnchorName)
+}
+
+// IsInvalidAliasNameError whether err is ast.ErrInvalidAliasName or not.
+func IsInvalidAliasNameError(err error) bool {
+ return xerrors.Is(err, ast.ErrInvalidAliasName)
+}
diff --git a/tmpmod/github.com/goccy/go-yaml/go.mod.tmpmod b/tmpmod/github.com/goccy/go-yaml/go.mod.tmpmod
new file mode 100644
index 00000000..4550ff37
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/go.mod.tmpmod
@@ -0,0 +1,20 @@
+module github.com/goccy/go-yaml
+
+go 1.19
+
+require (
+ github.com/fatih/color v1.10.0
+ github.com/go-playground/validator/v10 v10.4.1
+ github.com/google/go-cmp v0.5.9
+ github.com/mattn/go-colorable v0.1.8
+ golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1
+)
+
+require (
+ github.com/go-playground/locales v0.13.0 // indirect
+ github.com/go-playground/universal-translator v0.17.0 // indirect
+ github.com/leodido/go-urn v1.2.0 // indirect
+ github.com/mattn/go-isatty v0.0.12 // indirect
+ golang.org/x/crypto v0.7.0 // indirect
+ golang.org/x/sys v0.6.0 // indirect
+)
diff --git a/tmpmod/github.com/goccy/go-yaml/go.sum.tmpmod b/tmpmod/github.com/goccy/go-yaml/go.sum.tmpmod
new file mode 100644
index 00000000..7249df6b
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/go.sum.tmpmod
@@ -0,0 +1,44 @@
+github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/fatih/color v1.10.0 h1:s36xzo75JdqLaaWoiEHk767eHiwo0598uUxyfiPkDsg=
+github.com/fatih/color v1.10.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM=
+github.com/go-playground/assert/v2 v2.0.1 h1:MsBgLAaY856+nPRTKrp3/OZK38U/wa0CcBYNjji3q3A=
+github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
+github.com/go-playground/locales v0.13.0 h1:HyWk6mgj5qFqCT5fjGBuRArbVDfE4hi8+e8ceBS/t7Q=
+github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8=
+github.com/go-playground/universal-translator v0.17.0 h1:icxd5fm+REJzpZx7ZfpaD876Lmtgy7VtROAbHHXk8no=
+github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA=
+github.com/go-playground/validator/v10 v10.4.1 h1:pH2c5ADXtd66mxoE0Zm9SUhxE20r7aM3F26W0hOn+GE=
+github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4=
+github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
+github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
+github.com/leodido/go-urn v1.2.0 h1:hpXL4XnriNwQ/ABnpepYM/1vCLWNDfUNts8dX3xTG6Y=
+github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII=
+github.com/mattn/go-colorable v0.1.8 h1:c1ghPdyEDarC70ftn0y+A/Ee++9zz8ljHG1b13eJ0s8=
+github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
+github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=
+github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
+github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
+golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
+golang.org/x/crypto v0.7.0 h1:AvwMYaRytfdeVt3u6mLaxYtErKYjxA2OXjJ1HHq6t3A=
+golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU=
+golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.6.0 h1:MVltZSvRTcU2ljQOhs94SXPftV6DCNnZViHeQps87pQ=
+golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
+golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
+golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
+gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
diff --git a/tmpmod/github.com/goccy/go-yaml/internal/errors/error.go b/tmpmod/github.com/goccy/go-yaml/internal/errors/error.go
new file mode 100644
index 00000000..66668ccd
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/internal/errors/error.go
@@ -0,0 +1,260 @@
+package errors
+
+import (
+ "bytes"
+ "fmt"
+ "reflect"
+
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/printer"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/token"
+ "golang.org/x/xerrors"
+)
+
+const (
+ defaultColorize = false
+ defaultIncludeSource = true
+)
+
+var (
+ ErrDecodeRequiredPointerType = xerrors.New("required pointer type value")
+)
+
+// Wrapf wrap error for stack trace
+func Wrapf(err error, msg string, args ...interface{}) error {
+ return &wrapError{
+ baseError: &baseError{},
+ err: xerrors.Errorf(msg, args...),
+ nextErr: err,
+ frame: xerrors.Caller(1),
+ }
+}
+
+// ErrSyntax create syntax error instance with message and token
+func ErrSyntax(msg string, tk *token.Token) *syntaxError {
+ return &syntaxError{
+ baseError: &baseError{},
+ msg: msg,
+ token: tk,
+ frame: xerrors.Caller(1),
+ }
+}
+
+type baseError struct {
+ state fmt.State
+ verb rune
+}
+
+func (e *baseError) Error() string {
+ return ""
+}
+
+func (e *baseError) chainStateAndVerb(err error) {
+ wrapErr, ok := err.(*wrapError)
+ if ok {
+ wrapErr.state = e.state
+ wrapErr.verb = e.verb
+ }
+ syntaxErr, ok := err.(*syntaxError)
+ if ok {
+ syntaxErr.state = e.state
+ syntaxErr.verb = e.verb
+ }
+}
+
+type wrapError struct {
+ *baseError
+ err error
+ nextErr error
+ frame xerrors.Frame
+}
+
+type FormatErrorPrinter struct {
+ xerrors.Printer
+ Colored bool
+ InclSource bool
+}
+
+func (e *wrapError) As(target interface{}) bool {
+ err := e.nextErr
+ for {
+ if wrapErr, ok := err.(*wrapError); ok {
+ err = wrapErr.nextErr
+ continue
+ }
+ break
+ }
+ return xerrors.As(err, target)
+}
+
+func (e *wrapError) Unwrap() error {
+ return e.nextErr
+}
+
+func (e *wrapError) PrettyPrint(p xerrors.Printer, colored, inclSource bool) error {
+ return e.FormatError(&FormatErrorPrinter{Printer: p, Colored: colored, InclSource: inclSource})
+}
+
+func (e *wrapError) FormatError(p xerrors.Printer) error {
+ if _, ok := p.(*FormatErrorPrinter); !ok {
+ p = &FormatErrorPrinter{
+ Printer: p,
+ Colored: defaultColorize,
+ InclSource: defaultIncludeSource,
+ }
+ }
+ if e.verb == 'v' && e.state.Flag('+') {
+ // print stack trace for debugging
+ p.Print(e.err, "\n")
+ e.frame.Format(p)
+ e.chainStateAndVerb(e.nextErr)
+ return e.nextErr
+ }
+ err := e.nextErr
+ for {
+ if wrapErr, ok := err.(*wrapError); ok {
+ err = wrapErr.nextErr
+ continue
+ }
+ break
+ }
+ e.chainStateAndVerb(err)
+ if fmtErr, ok := err.(xerrors.Formatter); ok {
+ fmtErr.FormatError(p)
+ } else {
+ p.Print(err)
+ }
+ return nil
+}
+
+type wrapState struct {
+ org fmt.State
+}
+
+func (s *wrapState) Write(b []byte) (n int, err error) {
+ return s.org.Write(b)
+}
+
+func (s *wrapState) Width() (wid int, ok bool) {
+ return s.org.Width()
+}
+
+func (s *wrapState) Precision() (prec int, ok bool) {
+ return s.org.Precision()
+}
+
+func (s *wrapState) Flag(c int) bool {
+ // set true to 'printDetail' forced because when p.Detail() is false, xerrors.Printer no output any text
+ if c == '#' {
+ // ignore '#' keyword because xerrors.FormatError doesn't set true to printDetail.
+ // ( see https://github.com/golang/xerrors/blob/master/adaptor.go#L39-L43 )
+ return false
+ }
+ return true
+}
+
+func (e *wrapError) Format(state fmt.State, verb rune) {
+ e.state = state
+ e.verb = verb
+ xerrors.FormatError(e, &wrapState{org: state}, verb)
+}
+
+func (e *wrapError) Error() string {
+ var buf bytes.Buffer
+ e.PrettyPrint(&Sink{&buf}, defaultColorize, defaultIncludeSource)
+ return buf.String()
+}
+
+type syntaxError struct {
+ *baseError
+ msg string
+ token *token.Token
+ frame xerrors.Frame
+}
+
+func (e *syntaxError) PrettyPrint(p xerrors.Printer, colored, inclSource bool) error {
+ return e.FormatError(&FormatErrorPrinter{Printer: p, Colored: colored, InclSource: inclSource})
+}
+
+func (e *syntaxError) FormatError(p xerrors.Printer) error {
+ var pp printer.Printer
+
+ var colored, inclSource bool
+ if fep, ok := p.(*FormatErrorPrinter); ok {
+ colored = fep.Colored
+ inclSource = fep.InclSource
+ }
+
+ pos := fmt.Sprintf("[%d:%d] ", e.token.Position.Line, e.token.Position.Column)
+ msg := pp.PrintErrorMessage(fmt.Sprintf("%s%s", pos, e.msg), colored)
+ if inclSource {
+ msg += "\n" + pp.PrintErrorToken(e.token, colored)
+ }
+ p.Print(msg)
+
+ if e.verb == 'v' && e.state.Flag('+') {
+ // %+v
+ // print stack trace for debugging
+ e.frame.Format(p)
+ }
+ return nil
+}
+
+type PrettyPrinter interface {
+ PrettyPrint(xerrors.Printer, bool, bool) error
+}
+type Sink struct{ *bytes.Buffer }
+
+func (es *Sink) Print(args ...interface{}) {
+ fmt.Fprint(es.Buffer, args...)
+}
+
+func (es *Sink) Printf(f string, args ...interface{}) {
+ fmt.Fprintf(es.Buffer, f, args...)
+}
+
+func (es *Sink) Detail() bool {
+ return false
+}
+
+func (e *syntaxError) Error() string {
+ var buf bytes.Buffer
+ e.PrettyPrint(&Sink{&buf}, defaultColorize, defaultIncludeSource)
+ return buf.String()
+}
+
+type TypeError struct {
+ DstType reflect.Type
+ SrcType reflect.Type
+ StructFieldName *string
+ Token *token.Token
+}
+
+func (e *TypeError) Error() string {
+ if e.StructFieldName != nil {
+ return fmt.Sprintf("cannot unmarshal %s into Go struct field %s of type %s", e.SrcType, *e.StructFieldName, e.DstType)
+ }
+ return fmt.Sprintf("cannot unmarshal %s into Go value of type %s", e.SrcType, e.DstType)
+}
+
+func (e *TypeError) PrettyPrint(p xerrors.Printer, colored, inclSource bool) error {
+ return e.FormatError(&FormatErrorPrinter{Printer: p, Colored: colored, InclSource: inclSource})
+}
+
+func (e *TypeError) FormatError(p xerrors.Printer) error {
+ var pp printer.Printer
+
+ var colored, inclSource bool
+ if fep, ok := p.(*FormatErrorPrinter); ok {
+ colored = fep.Colored
+ inclSource = fep.InclSource
+ }
+
+ pos := fmt.Sprintf("[%d:%d] ", e.Token.Position.Line, e.Token.Position.Column)
+ msg := pp.PrintErrorMessage(fmt.Sprintf("%s%s", pos, e.Error()), colored)
+ if inclSource {
+ msg += "\n" + pp.PrintErrorToken(e.Token, colored)
+ }
+ p.Print(msg)
+
+ return nil
+}
diff --git a/tmpmod/github.com/goccy/go-yaml/lexer/lexer.go b/tmpmod/github.com/goccy/go-yaml/lexer/lexer.go
new file mode 100644
index 00000000..60fa788c
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/lexer/lexer.go
@@ -0,0 +1,23 @@
+package lexer
+
+import (
+ "io"
+
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/scanner"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/token"
+)
+
+// Tokenize split to token instances from string
+func Tokenize(src string) token.Tokens {
+ var s scanner.Scanner
+ s.Init(src)
+ var tokens token.Tokens
+ for {
+ subTokens, err := s.Scan()
+ if err == io.EOF {
+ break
+ }
+ tokens.Add(subTokens...)
+ }
+ return tokens
+}
diff --git a/tmpmod/github.com/goccy/go-yaml/lexer/lexer_test.go b/tmpmod/github.com/goccy/go-yaml/lexer/lexer_test.go
new file mode 100644
index 00000000..f87ac9f0
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/lexer/lexer_test.go
@@ -0,0 +1,621 @@
+package lexer_test
+
+import (
+ "sort"
+ "strings"
+ "testing"
+
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/lexer"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/token"
+)
+
+func TestTokenize(t *testing.T) {
+ sources := []string{
+ "null\n",
+ "{}\n",
+ "v: hi\n",
+ "v: \"true\"\n",
+ "v: \"false\"\n",
+ "v: true\n",
+ "v: false\n",
+ "v: 10\n",
+ "v: -10\n",
+ "v: 42\n",
+ "v: 4294967296\n",
+ "v: \"10\"\n",
+ "v: 0.1\n",
+ "v: 0.99\n",
+ "v: -0.1\n",
+ "v: .inf\n",
+ "v: -.inf\n",
+ "v: .nan\n",
+ "v: null\n",
+ "v: \"\"\n",
+ "v:\n- A\n- B\n",
+ "v:\n- A\n- |-\n B\n C\n",
+ "v:\n- A\n- 1\n- B:\n - 2\n - 3\n",
+ "a:\n b: c\n",
+ "a: '-'\n",
+ "123\n",
+ "hello: world\n",
+ "a: null\n",
+ "a: {x: 1}\n",
+ "a: [1, 2]\n",
+ "t2: 2018-01-09T10:40:47Z\nt4: 2098-01-09T10:40:47Z\n",
+ "a: {b: c, d: e}\n",
+ "a: 3s\n",
+ "a: \n",
+ "a: \"1:1\"\n",
+ "a: \"\\0\"\n",
+ "a: !!binary gIGC\n",
+ "a: !!binary |\n " + strings.Repeat("kJCQ", 17) + "kJ\n CQ\n",
+ "b: 2\na: 1\nd: 4\nc: 3\nsub:\n e: 5\n",
+ "a: 1.2.3.4\n",
+ "a: \"2015-02-24T18:19:39Z\"\n",
+ "a: 'b: c'\n",
+ "a: 'Hello #comment'\n",
+ "a: 100.5\n",
+ "a: bogus\n",
+ "\"a\": double quoted map key",
+ "'a': single quoted map key",
+ "a: \"double quoted\"\nb: \"value map\"",
+ "a: 'single quoted'\nb: 'value map'",
+ }
+ for _, src := range sources {
+ lexer.Tokenize(src).Dump()
+ }
+}
+
+type testToken struct {
+ line int
+ column int
+ value string
+}
+
+func TestSingleLineToken_ValueLineColumnPosition(t *testing.T) {
+ tests := []struct {
+ name string
+ src string
+ expect map[int]string // Column -> Value map.
+ }{
+ {
+ name: "single quote, single value array",
+ src: "test: ['test']",
+ expect: map[int]string{
+ 1: "test",
+ 5: ":",
+ 7: "[",
+ 8: "test",
+ 14: "]",
+ },
+ },
+ {
+ name: "double quote, single value array",
+ src: `test: ["test"]`,
+ expect: map[int]string{
+ 1: "test",
+ 5: ":",
+ 7: "[",
+ 8: "test",
+ 14: "]",
+ },
+ },
+ {
+ name: "no quotes, single value array",
+ src: "test: [somevalue]",
+ expect: map[int]string{
+ 1: "test",
+ 5: ":",
+ 7: "[",
+ 8: "somevalue",
+ 17: "]",
+ },
+ },
+ {
+ name: "single quote, multi value array",
+ src: "myarr: ['1','2','3', '444' , '55','66' , '77' ]",
+ expect: map[int]string{
+ 1: "myarr",
+ 6: ":",
+ 8: "[",
+ 9: "1",
+ 12: ",",
+ 13: "2",
+ 16: ",",
+ 17: "3",
+ 20: ",",
+ 22: "444",
+ 28: ",",
+ 30: "55",
+ 34: ",",
+ 35: "66",
+ 40: ",",
+ 43: "77",
+ 49: "]",
+ },
+ },
+ {
+ name: "double quote, multi value array",
+ src: `myarr: ["1","2","3", "444" , "55","66" , "77" ]`,
+ expect: map[int]string{
+ 1: "myarr",
+ 6: ":",
+ 8: "[",
+ 9: "1",
+ 12: ",",
+ 13: "2",
+ 16: ",",
+ 17: "3",
+ 20: ",",
+ 22: "444",
+ 28: ",",
+ 30: "55",
+ 34: ",",
+ 35: "66",
+ 40: ",",
+ 43: "77",
+ 49: "]",
+ },
+ },
+ {
+ name: "no quote, multi value array",
+ src: "numbers: [1, 5, 99,100, 3, 7 ]",
+ expect: map[int]string{
+ 1: "numbers",
+ 8: ":",
+ 10: "[",
+ 11: "1",
+ 12: ",",
+ 14: "5",
+ 15: ",",
+ 17: "99",
+ 19: ",",
+ 20: "100",
+ 23: ",",
+ 25: "3",
+ 26: ",",
+ 28: "7",
+ 30: "]",
+ },
+ },
+ {
+ name: "double quotes, nested arrays",
+ src: `Strings: ["1",["2",["3"]]]`,
+ expect: map[int]string{
+ 1: "Strings",
+ 8: ":",
+ 10: "[",
+ 11: "1",
+ 14: ",",
+ 15: "[",
+ 16: "2",
+ 19: ",",
+ 20: "[",
+ 21: "3",
+ 24: "]",
+ 25: "]",
+ 26: "]",
+ },
+ },
+ {
+ name: "mixed quotes, nested arrays",
+ src: `Values: [1,['2',"3",4,["5",6]]]`,
+ expect: map[int]string{
+ 1: "Values",
+ 7: ":",
+ 9: "[",
+ 10: "1",
+ 11: ",",
+ 12: "[",
+ 13: "2",
+ 16: ",",
+ 17: "3",
+ 20: ",",
+ 21: "4",
+ 22: ",",
+ 23: "[",
+ 24: "5",
+ 27: ",",
+ 28: "6",
+ 29: "]",
+ 30: "]",
+ 31: "]",
+ },
+ },
+ {
+ name: "double quote, empty array",
+ src: `Empty: ["", ""]`,
+ expect: map[int]string{
+ 1: "Empty",
+ 6: ":",
+ 8: "[",
+ 9: "",
+ 11: ",",
+ 13: "",
+ 15: "]",
+ },
+ },
+ {
+ name: "double quote key",
+ src: `"a": b`,
+ expect: map[int]string{
+ 1: "a",
+ 4: ":",
+ 6: "b",
+ },
+ },
+ {
+ name: "single quote key",
+ src: `'a': b`,
+ expect: map[int]string{
+ 1: "a",
+ 4: ":",
+ 6: "b",
+ },
+ },
+ {
+ name: "double quote key and value",
+ src: `"a": "b"`,
+ expect: map[int]string{
+ 1: "a",
+ 4: ":",
+ 6: "b",
+ },
+ },
+ {
+ name: "single quote key and value",
+ src: `'a': 'b'`,
+ expect: map[int]string{
+ 1: "a",
+ 4: ":",
+ 6: "b",
+ },
+ },
+ {
+ name: "double quote key, single quote value",
+ src: `"a": 'b'`,
+ expect: map[int]string{
+ 1: "a",
+ 4: ":",
+ 6: "b",
+ },
+ },
+ {
+ name: "single quote key, double quote value",
+ src: `'a': "b"`,
+ expect: map[int]string{
+ 1: "a",
+ 4: ":",
+ 6: "b",
+ },
+ },
+ }
+
+ for _, tc := range tests {
+ t.Run(tc.name, func(t *testing.T) {
+ got := lexer.Tokenize(tc.src)
+ sort.Slice(got, func(i, j int) bool {
+ return got[i].Position.Column < got[j].Position.Column
+ })
+ var expected []testToken
+ for k, v := range tc.expect {
+ tt := testToken{
+ line: 1,
+ column: k,
+ value: v,
+ }
+ expected = append(expected, tt)
+ }
+ sort.Slice(expected, func(i, j int) bool {
+ return expected[i].column < expected[j].column
+ })
+ if len(got) != len(expected) {
+ t.Errorf("Tokenize(%s) token count mismatch, expected:%d got:%d", tc.src, len(expected), len(got))
+ }
+ for i, tok := range got {
+ if !tokenMatches(tok, expected[i]) {
+ t.Errorf("Tokenize(%s) expected:%+v got line:%d column:%d value:%s", tc.src, expected[i], tok.Position.Line, tok.Position.Column, tok.Value)
+ }
+ }
+ })
+ }
+}
+
+func tokenMatches(t *token.Token, e testToken) bool {
+ return t != nil && t.Position != nil &&
+ t.Value == e.value &&
+ t.Position.Line == e.line &&
+ t.Position.Column == e.column
+}
+
+func TestMultiLineToken_ValueLineColumnPosition(t *testing.T) {
+ tests := []struct {
+ name string
+ src string
+ expect []testToken
+ }{
+ {
+ name: "double quote",
+ src: `one: "1 2 3 4 5"
+two: "1 2
+3 4
+5"
+three: "1 2 3 4
+5"`,
+ expect: []testToken{
+ {
+ line: 1,
+ column: 1,
+ value: "one",
+ },
+ {
+ line: 1,
+ column: 4,
+ value: ":",
+ },
+ {
+ line: 1,
+ column: 6,
+ value: "1 2 3 4 5",
+ },
+ {
+ line: 2,
+ column: 1,
+ value: "two",
+ },
+ {
+ line: 2,
+ column: 4,
+ value: ":",
+ },
+ {
+ line: 2,
+ column: 6,
+ value: "1 2 3 4 5",
+ },
+ {
+ line: 5,
+ column: 1,
+ value: "three",
+ },
+ {
+ line: 5,
+ column: 6,
+ value: ":",
+ },
+ {
+ line: 5,
+ column: 8,
+ value: "1 2 3 4 5",
+ },
+ },
+ },
+ {
+ name: "single quote in an array",
+ src: `arr: ['1', 'and
+two']
+last: 'hello'`,
+ expect: []testToken{
+ {
+ line: 1,
+ column: 1,
+ value: "arr",
+ },
+ {
+ line: 1,
+ column: 4,
+ value: ":",
+ },
+ {
+ line: 1,
+ column: 6,
+ value: "[",
+ },
+ {
+ line: 1,
+ column: 7,
+ value: "1",
+ },
+ {
+ line: 1,
+ column: 10,
+ value: ",",
+ },
+ {
+ line: 1,
+ column: 12,
+ value: "and two",
+ },
+ {
+ line: 2,
+ column: 5,
+ value: "]",
+ },
+ {
+ line: 3,
+ column: 1,
+ value: "last",
+ },
+ {
+ line: 3,
+ column: 5,
+ value: ":",
+ },
+ {
+ line: 3,
+ column: 7,
+ value: "hello",
+ },
+ },
+ },
+ {
+ name: "single quote and double quote",
+ src: `foo: "test
+
+
+
+
+bar"
+foo2: 'bar2'`,
+ expect: []testToken{
+ {
+ line: 1,
+ column: 1,
+ value: "foo",
+ },
+ {
+ line: 1,
+ column: 4,
+ value: ":",
+ },
+ {
+ line: 1,
+ column: 6,
+ value: "test bar",
+ },
+ {
+ line: 7,
+ column: 1,
+ value: "foo2",
+ },
+ {
+ line: 7,
+ column: 5,
+ value: ":",
+ },
+ {
+ line: 7,
+ column: 7,
+ value: "bar2",
+ },
+ },
+ },
+ {
+ name: "single and double quote map keys",
+ src: `"a": test
+'b': 1
+c: true`,
+ expect: []testToken{
+ {
+ line: 1,
+ column: 1,
+ value: "a",
+ },
+ {
+ line: 1,
+ column: 4,
+ value: ":",
+ },
+ {
+ line: 1,
+ column: 6,
+ value: "test",
+ },
+ {
+ line: 2,
+ column: 1,
+ value: "b",
+ },
+ {
+ line: 2,
+ column: 4,
+ value: ":",
+ },
+ {
+ line: 2,
+ column: 6,
+ value: "1",
+ },
+ {
+ line: 3,
+ column: 1,
+ value: "c",
+ },
+ {
+ line: 3,
+ column: 2,
+ value: ":",
+ },
+ {
+ line: 3,
+ column: 4,
+ value: "true",
+ },
+ },
+ },
+ {
+ name: "issue326",
+ src: `a: |
+ Text
+b: 1`,
+ expect: []testToken{
+ {
+ line: 1,
+ column: 1,
+ value: "a",
+ },
+ {
+ line: 1,
+ column: 2,
+ value: ":",
+ },
+ {
+ line: 1,
+ column: 4,
+ value: "|",
+ },
+ {
+ line: 2,
+ column: 3,
+ value: "Text\n",
+ },
+ {
+ line: 3,
+ column: 1,
+ value: "b",
+ },
+ {
+ line: 3,
+ column: 2,
+ value: ":",
+ },
+ {
+ line: 3,
+ column: 4,
+ value: "1",
+ },
+ },
+ },
+ }
+
+ for _, tc := range tests {
+ t.Run(tc.name, func(t *testing.T) {
+ got := lexer.Tokenize(tc.src)
+ sort.Slice(got, func(i, j int) bool {
+ // sort by line, then column
+ if got[i].Position.Line < got[j].Position.Line {
+ return true
+ } else if got[i].Position.Line == got[j].Position.Line {
+ return got[i].Position.Column < got[j].Position.Column
+ }
+ return false
+ })
+ sort.Slice(tc.expect, func(i, j int) bool {
+ if tc.expect[i].line < tc.expect[j].line {
+ return true
+ } else if tc.expect[i].line == tc.expect[j].line {
+ return tc.expect[i].column < tc.expect[j].column
+ }
+ return false
+ })
+ if len(got) != len(tc.expect) {
+ t.Errorf("Tokenize() token count mismatch, expected:%d got:%d", len(tc.expect), len(got))
+ }
+ for i, tok := range got {
+ if !tokenMatches(tok, tc.expect[i]) {
+ t.Errorf("Tokenize() expected:%+v got line:%d column:%d value:%s", tc.expect[i], tok.Position.Line, tok.Position.Column, tok.Value)
+ }
+ }
+ })
+ }
+}
diff --git a/tmpmod/github.com/goccy/go-yaml/option.go b/tmpmod/github.com/goccy/go-yaml/option.go
new file mode 100644
index 00000000..f36e445b
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/option.go
@@ -0,0 +1,278 @@
+package yaml
+
+import (
+ "io"
+ "reflect"
+
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/ast"
+)
+
+// DecodeOption functional option type for Decoder
+type DecodeOption func(d *Decoder) error
+
+// ReferenceReaders pass to Decoder that reference to anchor defined by passed readers
+func ReferenceReaders(readers ...io.Reader) DecodeOption {
+ return func(d *Decoder) error {
+ d.referenceReaders = append(d.referenceReaders, readers...)
+ return nil
+ }
+}
+
+// ReferenceFiles pass to Decoder that reference to anchor defined by passed files
+func ReferenceFiles(files ...string) DecodeOption {
+ return func(d *Decoder) error {
+ d.referenceFiles = files
+ return nil
+ }
+}
+
+// ReferenceDirs pass to Decoder that reference to anchor defined by files under the passed dirs
+func ReferenceDirs(dirs ...string) DecodeOption {
+ return func(d *Decoder) error {
+ d.referenceDirs = dirs
+ return nil
+ }
+}
+
+// RecursiveDir search yaml file recursively from passed dirs by ReferenceDirs option
+func RecursiveDir(isRecursive bool) DecodeOption {
+ return func(d *Decoder) error {
+ d.isRecursiveDir = isRecursive
+ return nil
+ }
+}
+
+// Validator set StructValidator instance to Decoder
+func Validator(v StructValidator) DecodeOption {
+ return func(d *Decoder) error {
+ d.validator = v
+ return nil
+ }
+}
+
+// Strict enable DisallowUnknownField and DisallowDuplicateKey
+func Strict() DecodeOption {
+ return func(d *Decoder) error {
+ d.disallowUnknownField = true
+ d.disallowDuplicateKey = true
+ return nil
+ }
+}
+
+// DisallowUnknownField causes the Decoder to return an error when the destination
+// is a struct and the input contains object keys which do not match any
+// non-ignored, exported fields in the destination.
+func DisallowUnknownField() DecodeOption {
+ return func(d *Decoder) error {
+ d.disallowUnknownField = true
+ return nil
+ }
+}
+
+// DisallowDuplicateKey causes an error when mapping keys that are duplicates
+func DisallowDuplicateKey() DecodeOption {
+ return func(d *Decoder) error {
+ d.disallowDuplicateKey = true
+ return nil
+ }
+}
+
+// UseOrderedMap can be interpreted as a map,
+// and uses MapSlice ( ordered map ) aggressively if there is no type specification
+func UseOrderedMap() DecodeOption {
+ return func(d *Decoder) error {
+ d.useOrderedMap = true
+ return nil
+ }
+}
+
+// UseJSONUnmarshaler if neither `BytesUnmarshaler` nor `InterfaceUnmarshaler` is implemented
+// and `UnmashalJSON([]byte)error` is implemented, convert the argument from `YAML` to `JSON` and then call it.
+func UseJSONUnmarshaler() DecodeOption {
+ return func(d *Decoder) error {
+ d.useJSONUnmarshaler = true
+ return nil
+ }
+}
+
+// CustomUnmarshaler overrides any decoding process for the type specified in generics.
+//
+// NOTE: If RegisterCustomUnmarshaler and CustomUnmarshaler of DecodeOption are specified for the same type,
+// the CustomUnmarshaler specified in DecodeOption takes precedence.
+func CustomUnmarshaler[T any](unmarshaler func(*T, []byte) error) DecodeOption {
+ return func(d *Decoder) error {
+ var typ *T
+ d.customUnmarshalerMap[reflect.TypeOf(typ)] = func(v interface{}, b []byte) error {
+ return unmarshaler(v.(*T), b)
+ }
+ return nil
+ }
+}
+
+// EncodeOption functional option type for Encoder
+type EncodeOption func(e *Encoder) error
+
+// Indent change indent number
+func Indent(spaces int) EncodeOption {
+ return func(e *Encoder) error {
+ e.indent = spaces
+ return nil
+ }
+}
+
+// IndentSequence causes sequence values to be indented the same value as Indent
+func IndentSequence(indent bool) EncodeOption {
+ return func(e *Encoder) error {
+ e.indentSequence = indent
+ return nil
+ }
+}
+
+// UseSingleQuote determines if single or double quotes should be preferred for strings.
+func UseSingleQuote(sq bool) EncodeOption {
+ return func(e *Encoder) error {
+ e.singleQuote = sq
+ return nil
+ }
+}
+
+// Flow encoding by flow style
+func Flow(isFlowStyle bool) EncodeOption {
+ return func(e *Encoder) error {
+ e.isFlowStyle = isFlowStyle
+ return nil
+ }
+}
+
+// UseLiteralStyleIfMultiline causes encoding multiline strings with a literal syntax,
+// no matter what characters they include
+func UseLiteralStyleIfMultiline(useLiteralStyleIfMultiline bool) EncodeOption {
+ return func(e *Encoder) error {
+ e.useLiteralStyleIfMultiline = useLiteralStyleIfMultiline
+ return nil
+ }
+}
+
+// JSON encode in JSON format
+func JSON() EncodeOption {
+ return func(e *Encoder) error {
+ e.isJSONStyle = true
+ e.isFlowStyle = true
+ return nil
+ }
+}
+
+// MarshalAnchor call back if encoder find an anchor during encoding
+func MarshalAnchor(callback func(*ast.AnchorNode, interface{}) error) EncodeOption {
+ return func(e *Encoder) error {
+ e.anchorCallback = callback
+ return nil
+ }
+}
+
+// UseJSONMarshaler if neither `BytesMarshaler` nor `InterfaceMarshaler`
+// nor `encoding.TextMarshaler` is implemented and `MarshalJSON()([]byte, error)` is implemented,
+// call `MarshalJSON` to convert the returned `JSON` to `YAML` for processing.
+func UseJSONMarshaler() EncodeOption {
+ return func(e *Encoder) error {
+ e.useJSONMarshaler = true
+ return nil
+ }
+}
+
+// CustomMarshaler overrides any encoding process for the type specified in generics.
+//
+// NOTE: If type T implements MarshalYAML for pointer receiver, the type specified in CustomMarshaler must be *T.
+// If RegisterCustomMarshaler and CustomMarshaler of EncodeOption are specified for the same type,
+// the CustomMarshaler specified in EncodeOption takes precedence.
+func CustomMarshaler[T any](marshaler func(T) ([]byte, error)) EncodeOption {
+ return func(e *Encoder) error {
+ var typ T
+ e.customMarshalerMap[reflect.TypeOf(typ)] = func(v interface{}) ([]byte, error) {
+ return marshaler(v.(T))
+ }
+ return nil
+ }
+}
+
+// CommentPosition type of the position for comment.
+type CommentPosition int
+
+const (
+ CommentHeadPosition CommentPosition = CommentPosition(iota)
+ CommentLinePosition
+ CommentFootPosition
+)
+
+func (p CommentPosition) String() string {
+ switch p {
+ case CommentHeadPosition:
+ return "Head"
+ case CommentLinePosition:
+ return "Line"
+ case CommentFootPosition:
+ return "Foot"
+ default:
+ return ""
+ }
+}
+
+// LineComment create a one-line comment for CommentMap.
+func LineComment(text string) *Comment {
+ return &Comment{
+ Texts: []string{text},
+ Position: CommentLinePosition,
+ }
+}
+
+// HeadComment create a multiline comment for CommentMap.
+func HeadComment(texts ...string) *Comment {
+ return &Comment{
+ Texts: texts,
+ Position: CommentHeadPosition,
+ }
+}
+
+// FootComment create a multiline comment for CommentMap.
+func FootComment(texts ...string) *Comment {
+ return &Comment{
+ Texts: texts,
+ Position: CommentFootPosition,
+ }
+}
+
+// Comment raw data for comment.
+type Comment struct {
+ Texts []string
+ Position CommentPosition
+}
+
+// CommentMap map of the position of the comment and the comment information.
+type CommentMap map[string][]*Comment
+
+// WithComment add a comment using the location and text information given in the CommentMap.
+func WithComment(cm CommentMap) EncodeOption {
+ return func(e *Encoder) error {
+ commentMap := map[*Path][]*Comment{}
+ for k, v := range cm {
+ path, err := PathString(k)
+ if err != nil {
+ return err
+ }
+ commentMap[path] = v
+ }
+ e.commentMap = commentMap
+ return nil
+ }
+}
+
+// CommentToMap apply the position and content of comments in a YAML document to a CommentMap.
+func CommentToMap(cm CommentMap) DecodeOption {
+ return func(d *Decoder) error {
+ if cm == nil {
+ return ErrInvalidCommentMapValue
+ }
+ d.toCommentMap = cm
+ return nil
+ }
+}
diff --git a/tmpmod/github.com/goccy/go-yaml/parser/context.go b/tmpmod/github.com/goccy/go-yaml/parser/context.go
new file mode 100644
index 00000000..592ed9f6
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/parser/context.go
@@ -0,0 +1,192 @@
+package parser
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/token"
+)
+
+// context context at parsing
+type context struct {
+ parent *context
+ idx int
+ size int
+ tokens token.Tokens
+ path string
+}
+
+var pathSpecialChars = []string{
+ "$", "*", ".", "[", "]",
+}
+
+func containsPathSpecialChar(path string) bool {
+ for _, char := range pathSpecialChars {
+ if strings.Contains(path, char) {
+ return true
+ }
+ }
+ return false
+}
+
+func normalizePath(path string) string {
+ if containsPathSpecialChar(path) {
+ return fmt.Sprintf("'%s'", path)
+ }
+ return path
+}
+
+func (c *context) withChild(path string) *context {
+ ctx := c.copy()
+ path = normalizePath(path)
+ ctx.path += fmt.Sprintf(".%s", path)
+ return ctx
+}
+
+func (c *context) withIndex(idx uint) *context {
+ ctx := c.copy()
+ ctx.path += fmt.Sprintf("[%d]", idx)
+ return ctx
+}
+
+func (c *context) copy() *context {
+ return &context{
+ parent: c,
+ idx: c.idx,
+ size: c.size,
+ tokens: append(token.Tokens{}, c.tokens...),
+ path: c.path,
+ }
+}
+
+func (c *context) next() bool {
+ return c.idx < c.size
+}
+
+func (c *context) previousToken() *token.Token {
+ if c.idx > 0 {
+ return c.tokens[c.idx-1]
+ }
+ return nil
+}
+
+func (c *context) insertToken(idx int, tk *token.Token) {
+ if c.parent != nil {
+ c.parent.insertToken(idx, tk)
+ }
+ if c.size < idx {
+ return
+ }
+ if c.size == idx {
+ curToken := c.tokens[c.size-1]
+ tk.Next = curToken
+ curToken.Prev = tk
+
+ c.tokens = append(c.tokens, tk)
+ c.size = len(c.tokens)
+ return
+ }
+
+ curToken := c.tokens[idx]
+ tk.Next = curToken
+ curToken.Prev = tk
+
+ c.tokens = append(c.tokens[:idx+1], c.tokens[idx:]...)
+ c.tokens[idx] = tk
+ c.size = len(c.tokens)
+}
+
+func (c *context) currentToken() *token.Token {
+ if c.idx >= c.size {
+ return nil
+ }
+ return c.tokens[c.idx]
+}
+
+func (c *context) nextToken() *token.Token {
+ if c.idx+1 >= c.size {
+ return nil
+ }
+ return c.tokens[c.idx+1]
+}
+
+func (c *context) afterNextToken() *token.Token {
+ if c.idx+2 >= c.size {
+ return nil
+ }
+ return c.tokens[c.idx+2]
+}
+
+func (c *context) nextNotCommentToken() *token.Token {
+ for i := c.idx + 1; i < c.size; i++ {
+ tk := c.tokens[i]
+ if tk.Type == token.CommentType {
+ continue
+ }
+ return tk
+ }
+ return nil
+}
+
+func (c *context) afterNextNotCommentToken() *token.Token {
+ notCommentTokenCount := 0
+ for i := c.idx + 1; i < c.size; i++ {
+ tk := c.tokens[i]
+ if tk.Type == token.CommentType {
+ continue
+ }
+ notCommentTokenCount++
+ if notCommentTokenCount == 2 {
+ return tk
+ }
+ }
+ return nil
+}
+
+func (c *context) isCurrentCommentToken() bool {
+ tk := c.currentToken()
+ if tk == nil {
+ return false
+ }
+ return tk.Type == token.CommentType
+}
+
+func (c *context) progressIgnoreComment(num int) {
+ if c.parent != nil {
+ c.parent.progressIgnoreComment(num)
+ }
+ if c.size <= c.idx+num {
+ c.idx = c.size
+ } else {
+ c.idx += num
+ }
+}
+
+func (c *context) progress(num int) {
+ if c.isCurrentCommentToken() {
+ return
+ }
+ c.progressIgnoreComment(num)
+}
+
+func newContext(tokens token.Tokens, mode Mode) *context {
+ filteredTokens := []*token.Token{}
+ if mode&ParseComments != 0 {
+ filteredTokens = tokens
+ } else {
+ for _, tk := range tokens {
+ if tk.Type == token.CommentType {
+ continue
+ }
+ // keep prev/next reference between tokens containing comments
+ // https://github.com/goccy/go-yaml/issues/254
+ filteredTokens = append(filteredTokens, tk)
+ }
+ }
+ return &context{
+ idx: 0,
+ size: len(filteredTokens),
+ tokens: token.Tokens(filteredTokens),
+ path: "$",
+ }
+}
diff --git a/tmpmod/github.com/goccy/go-yaml/parser/parser.go b/tmpmod/github.com/goccy/go-yaml/parser/parser.go
new file mode 100644
index 00000000..9a813324
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/parser/parser.go
@@ -0,0 +1,743 @@
+package parser
+
+import (
+ "fmt"
+ "os"
+ "strings"
+
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/ast"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/internal/errors"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/lexer"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/token"
+ "golang.org/x/xerrors"
+)
+
+type parser struct{}
+
+func (p *parser) parseMapping(ctx *context) (*ast.MappingNode, error) {
+ mapTk := ctx.currentToken()
+ node := ast.Mapping(mapTk, true)
+ node.SetPath(ctx.path)
+ ctx.progress(1) // skip MappingStart token
+ for ctx.next() {
+ tk := ctx.currentToken()
+ if tk.Type == token.MappingEndType {
+ node.End = tk
+ return node, nil
+ } else if tk.Type == token.CollectEntryType {
+ ctx.progress(1)
+ continue
+ }
+
+ value, err := p.parseMappingValue(ctx)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse mapping value in mapping node")
+ }
+ mvnode, ok := value.(*ast.MappingValueNode)
+ if !ok {
+ return nil, errors.ErrSyntax("failed to parse flow mapping node", value.GetToken())
+ }
+ node.Values = append(node.Values, mvnode)
+ ctx.progress(1)
+ }
+ return nil, errors.ErrSyntax("unterminated flow mapping", node.GetToken())
+}
+
+func (p *parser) parseSequence(ctx *context) (*ast.SequenceNode, error) {
+ node := ast.Sequence(ctx.currentToken(), true)
+ node.SetPath(ctx.path)
+ ctx.progress(1) // skip SequenceStart token
+ for ctx.next() {
+ tk := ctx.currentToken()
+ if tk.Type == token.SequenceEndType {
+ node.End = tk
+ break
+ } else if tk.Type == token.CollectEntryType {
+ ctx.progress(1)
+ continue
+ }
+
+ value, err := p.parseToken(ctx.withIndex(uint(len(node.Values))), tk)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse sequence value in flow sequence node")
+ }
+ node.Values = append(node.Values, value)
+ ctx.progress(1)
+ }
+ return node, nil
+}
+
+func (p *parser) parseTag(ctx *context) (*ast.TagNode, error) {
+ tagToken := ctx.currentToken()
+ node := ast.Tag(tagToken)
+ node.SetPath(ctx.path)
+ ctx.progress(1) // skip tag token
+ var (
+ value ast.Node
+ err error
+ )
+ switch token.ReservedTagKeyword(tagToken.Value) {
+ case token.MappingTag,
+ token.OrderedMapTag:
+ value, err = p.parseMapping(ctx)
+ case token.IntegerTag,
+ token.FloatTag,
+ token.StringTag,
+ token.BinaryTag,
+ token.TimestampTag,
+ token.NullTag:
+ typ := ctx.currentToken().Type
+ if typ == token.LiteralType || typ == token.FoldedType {
+ value, err = p.parseLiteral(ctx)
+ } else {
+ value = p.parseScalarValue(ctx.currentToken())
+ }
+ case token.SequenceTag,
+ token.SetTag:
+ err = errors.ErrSyntax(fmt.Sprintf("sorry, currently not supported %s tag", tagToken.Value), tagToken)
+ default:
+ // custom tag
+ value, err = p.parseToken(ctx, ctx.currentToken())
+ }
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse tag value")
+ }
+ node.Value = value
+ return node, nil
+}
+
+func (p *parser) removeLeftSideNewLineCharacter(src string) string {
+ // CR or LF or CRLF
+ return strings.TrimLeft(strings.TrimLeft(strings.TrimLeft(src, "\r"), "\n"), "\r\n")
+}
+
+func (p *parser) existsNewLineCharacter(src string) bool {
+ if strings.Index(src, "\n") > 0 {
+ return true
+ }
+ if strings.Index(src, "\r") > 0 {
+ return true
+ }
+ return false
+}
+
+func (p *parser) validateMapKey(tk *token.Token) error {
+ if tk.Type != token.StringType {
+ return nil
+ }
+ origin := p.removeLeftSideNewLineCharacter(tk.Origin)
+ if p.existsNewLineCharacter(origin) {
+ return errors.ErrSyntax("unexpected key name", tk)
+ }
+ return nil
+}
+
+func (p *parser) createNullToken(base *token.Token) *token.Token {
+ pos := *(base.Position)
+ pos.Column++
+ return token.New("null", "null", &pos)
+}
+
+func (p *parser) parseMapValue(ctx *context, key ast.MapKeyNode, colonToken *token.Token) (ast.Node, error) {
+ node, err := p.createMapValueNode(ctx, key, colonToken)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to create map value node")
+ }
+ if node != nil && node.GetPath() == "" {
+ node.SetPath(ctx.path)
+ }
+ return node, nil
+}
+
+func (p *parser) createMapValueNode(ctx *context, key ast.MapKeyNode, colonToken *token.Token) (ast.Node, error) {
+ tk := ctx.currentToken()
+ if tk == nil {
+ nullToken := p.createNullToken(colonToken)
+ ctx.insertToken(ctx.idx, nullToken)
+ return ast.Null(nullToken), nil
+ }
+ var comment *ast.CommentGroupNode
+ if tk.Type == token.CommentType {
+ comment = p.parseCommentOnly(ctx)
+ if comment != nil {
+ comment.SetPath(ctx.withChild(key.GetToken().Value).path)
+ }
+ tk = ctx.currentToken()
+ }
+ if tk.Position.Column == key.GetToken().Position.Column && tk.Type == token.StringType {
+ // in this case,
+ // ----
+ // key:
+ // next
+
+ nullToken := p.createNullToken(colonToken)
+ ctx.insertToken(ctx.idx, nullToken)
+ nullNode := ast.Null(nullToken)
+
+ if comment != nil {
+ nullNode.SetComment(comment)
+ } else {
+ // If there is a comment, it is already bound to the key node,
+ // so remove the comment from the key to bind it to the null value.
+ keyComment := key.GetComment()
+ if keyComment != nil {
+ if err := key.SetComment(nil); err != nil {
+ return nil, err
+ }
+ nullNode.SetComment(keyComment)
+ }
+ }
+ return nullNode, nil
+ }
+
+ if tk.Position.Column < key.GetToken().Position.Column {
+ // in this case,
+ // ----
+ // key:
+ // next
+ nullToken := p.createNullToken(colonToken)
+ ctx.insertToken(ctx.idx, nullToken)
+ nullNode := ast.Null(nullToken)
+ if comment != nil {
+ nullNode.SetComment(comment)
+ }
+ return nullNode, nil
+ }
+
+ value, err := p.parseToken(ctx, ctx.currentToken())
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse mapping 'value' node")
+ }
+ if comment != nil {
+ value.SetComment(comment)
+ }
+ return value, nil
+}
+
+func (p *parser) validateMapValue(ctx *context, key, value ast.Node) error {
+ keyColumn := key.GetToken().Position.Column
+ valueColumn := value.GetToken().Position.Column
+ if keyColumn != valueColumn {
+ return nil
+ }
+ if value.Type() != ast.StringType {
+ return nil
+ }
+ ntk := ctx.nextToken()
+ if ntk == nil || (ntk.Type != token.MappingValueType && ntk.Type != token.SequenceEntryType) {
+ return errors.ErrSyntax("could not found expected ':' token", value.GetToken())
+ }
+ return nil
+}
+
+func (p *parser) parseMappingValue(ctx *context) (ast.Node, error) {
+ key, err := p.parseMapKey(ctx)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse map key")
+ }
+ keyText := key.GetToken().Value
+ key.SetPath(ctx.withChild(keyText).path)
+ if err := p.validateMapKey(key.GetToken()); err != nil {
+ return nil, errors.Wrapf(err, "validate mapping key error")
+ }
+ ctx.progress(1) // progress to mapping value token
+ tk := ctx.currentToken() // get mapping value token
+ if tk == nil {
+ return nil, errors.ErrSyntax("unexpected map", key.GetToken())
+ }
+ ctx.progress(1) // progress to value token
+ if err := p.setSameLineCommentIfExists(ctx.withChild(keyText), key); err != nil {
+ return nil, errors.Wrapf(err, "failed to set same line comment to node")
+ }
+ if key.GetComment() != nil {
+ // if current token is comment, GetComment() is not nil.
+ // then progress to value token
+ ctx.progressIgnoreComment(1)
+ }
+
+ value, err := p.parseMapValue(ctx.withChild(keyText), key, tk)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse map value")
+ }
+ if err := p.validateMapValue(ctx, key, value); err != nil {
+ return nil, errors.Wrapf(err, "failed to validate map value")
+ }
+
+ mvnode := ast.MappingValue(tk, key, value)
+ mvnode.SetPath(ctx.withChild(keyText).path)
+ node := ast.Mapping(tk, false, mvnode)
+ node.SetPath(ctx.withChild(keyText).path)
+
+ ntk := ctx.nextNotCommentToken()
+ antk := ctx.afterNextNotCommentToken()
+ for antk != nil && antk.Type == token.MappingValueType &&
+ ntk.Position.Column == key.GetToken().Position.Column {
+ ctx.progressIgnoreComment(1)
+ value, err := p.parseToken(ctx, ctx.currentToken())
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse mapping node")
+ }
+ switch value.Type() {
+ case ast.MappingType:
+ c := value.(*ast.MappingNode)
+ comment := c.GetComment()
+ for idx, v := range c.Values {
+ if idx == 0 && comment != nil {
+ if err := v.SetComment(comment); err != nil {
+ return nil, errors.Wrapf(err, "failed to set comment token to node")
+ }
+ }
+ node.Values = append(node.Values, v)
+ }
+ case ast.MappingValueType:
+ node.Values = append(node.Values, value.(*ast.MappingValueNode))
+ default:
+ return nil, xerrors.Errorf("failed to parse mapping value node node is %s", value.Type())
+ }
+ ntk = ctx.nextNotCommentToken()
+ antk = ctx.afterNextNotCommentToken()
+ }
+ if len(node.Values) == 1 {
+ mapKeyCol := mvnode.Key.GetToken().Position.Column
+ commentTk := ctx.nextToken()
+ if commentTk != nil && commentTk.Type == token.CommentType && mapKeyCol <= commentTk.Position.Column {
+ // If the comment is in the same or deeper column as the last element column in map value,
+ // treat it as a footer comment for the last element.
+ comment := p.parseFootComment(ctx, mapKeyCol)
+ mvnode.FootComment = comment
+ }
+ return mvnode, nil
+ }
+ mapCol := node.GetToken().Position.Column
+ commentTk := ctx.nextToken()
+ if commentTk != nil && commentTk.Type == token.CommentType && mapCol <= commentTk.Position.Column {
+ // If the comment is in the same or deeper column as the last element column in map value,
+ // treat it as a footer comment for the last element.
+ comment := p.parseFootComment(ctx, mapCol)
+ node.FootComment = comment
+ }
+ return node, nil
+}
+
+func (p *parser) parseSequenceEntry(ctx *context) (*ast.SequenceNode, error) {
+ tk := ctx.currentToken()
+ sequenceNode := ast.Sequence(tk, false)
+ sequenceNode.SetPath(ctx.path)
+ curColumn := tk.Position.Column
+ for tk.Type == token.SequenceEntryType {
+ ctx.progress(1) // skip sequence token
+ tk = ctx.currentToken()
+ if tk == nil {
+ return nil, errors.ErrSyntax("empty sequence entry", ctx.previousToken())
+ }
+ var comment *ast.CommentGroupNode
+ if tk.Type == token.CommentType {
+ comment = p.parseCommentOnly(ctx)
+ tk = ctx.currentToken()
+ if tk.Type == token.SequenceEntryType {
+ ctx.progress(1) // skip sequence token
+ }
+ }
+ value, err := p.parseToken(ctx.withIndex(uint(len(sequenceNode.Values))), ctx.currentToken())
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse sequence")
+ }
+ if comment != nil {
+ comment.SetPath(ctx.withIndex(uint(len(sequenceNode.Values))).path)
+ sequenceNode.ValueHeadComments = append(sequenceNode.ValueHeadComments, comment)
+ } else {
+ sequenceNode.ValueHeadComments = append(sequenceNode.ValueHeadComments, nil)
+ }
+ sequenceNode.Values = append(sequenceNode.Values, value)
+ tk = ctx.nextNotCommentToken()
+ if tk == nil {
+ break
+ }
+ if tk.Type != token.SequenceEntryType {
+ break
+ }
+ if tk.Position.Column != curColumn {
+ break
+ }
+ ctx.progressIgnoreComment(1)
+ }
+ commentTk := ctx.nextToken()
+ if commentTk != nil && commentTk.Type == token.CommentType && curColumn <= commentTk.Position.Column {
+ // If the comment is in the same or deeper column as the last element column in sequence value,
+ // treat it as a footer comment for the last element.
+ comment := p.parseFootComment(ctx, curColumn)
+ sequenceNode.FootComment = comment
+ }
+ return sequenceNode, nil
+}
+
+func (p *parser) parseAnchor(ctx *context) (*ast.AnchorNode, error) {
+ tk := ctx.currentToken()
+ anchor := ast.Anchor(tk)
+ anchor.SetPath(ctx.path)
+ ntk := ctx.nextToken()
+ if ntk == nil {
+ return nil, errors.ErrSyntax("unexpected anchor. anchor name is undefined", tk)
+ }
+ ctx.progress(1) // skip anchor token
+ name, err := p.parseToken(ctx, ctx.currentToken())
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parser anchor name node")
+ }
+ anchor.Name = name
+ ntk = ctx.nextToken()
+ if ntk == nil {
+ return nil, errors.ErrSyntax("unexpected anchor. anchor value is undefined", ctx.currentToken())
+ }
+ ctx.progress(1)
+ value, err := p.parseToken(ctx, ctx.currentToken())
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parser anchor name node")
+ }
+ anchor.Value = value
+ return anchor, nil
+}
+
+func (p *parser) parseAlias(ctx *context) (*ast.AliasNode, error) {
+ tk := ctx.currentToken()
+ alias := ast.Alias(tk)
+ alias.SetPath(ctx.path)
+ ntk := ctx.nextToken()
+ if ntk == nil {
+ return nil, errors.ErrSyntax("unexpected alias. alias name is undefined", tk)
+ }
+ ctx.progress(1) // skip alias token
+ name, err := p.parseToken(ctx, ctx.currentToken())
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parser alias name node")
+ }
+ alias.Value = name
+ return alias, nil
+}
+
+func (p *parser) parseMapKey(ctx *context) (ast.MapKeyNode, error) {
+ tk := ctx.currentToken()
+ if value := p.parseScalarValue(tk); value != nil {
+ return value, nil
+ }
+ switch tk.Type {
+ case token.MergeKeyType:
+ return ast.MergeKey(tk), nil
+ case token.MappingKeyType:
+ return p.parseMappingKey(ctx)
+ }
+ return nil, errors.ErrSyntax("unexpected mapping key", tk)
+}
+
+func (p *parser) parseStringValue(tk *token.Token) *ast.StringNode {
+ switch tk.Type {
+ case token.StringType,
+ token.SingleQuoteType,
+ token.DoubleQuoteType:
+ return ast.String(tk)
+ }
+ return nil
+}
+
+func (p *parser) parseScalarValueWithComment(ctx *context, tk *token.Token) (ast.ScalarNode, error) {
+ node := p.parseScalarValue(tk)
+ if node == nil {
+ return nil, nil
+ }
+ node.SetPath(ctx.path)
+ if p.isSameLineComment(ctx.nextToken(), node) {
+ ctx.progress(1)
+ if err := p.setSameLineCommentIfExists(ctx, node); err != nil {
+ return nil, errors.Wrapf(err, "failed to set same line comment to node")
+ }
+ }
+ return node, nil
+}
+
+func (p *parser) parseScalarValue(tk *token.Token) ast.ScalarNode {
+ if node := p.parseStringValue(tk); node != nil {
+ return node
+ }
+ switch tk.Type {
+ case token.NullType:
+ return ast.Null(tk)
+ case token.BoolType:
+ return ast.Bool(tk)
+ case token.IntegerType,
+ token.BinaryIntegerType,
+ token.OctetIntegerType,
+ token.HexIntegerType:
+ return ast.Integer(tk)
+ case token.FloatType:
+ return ast.Float(tk)
+ case token.InfinityType:
+ return ast.Infinity(tk)
+ case token.NanType:
+ return ast.Nan(tk)
+ }
+ return nil
+}
+
+func (p *parser) parseDirective(ctx *context) (*ast.DirectiveNode, error) {
+ node := ast.Directive(ctx.currentToken())
+ ctx.progress(1) // skip directive token
+ value, err := p.parseToken(ctx, ctx.currentToken())
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse directive value")
+ }
+ node.Value = value
+ ctx.progress(1)
+ tk := ctx.currentToken()
+ if tk == nil {
+ // Since current token is nil, use the previous token to specify
+ // the syntax error location.
+ return nil, errors.ErrSyntax("unexpected directive value. document not started", ctx.previousToken())
+ }
+ if tk.Type != token.DocumentHeaderType {
+ return nil, errors.ErrSyntax("unexpected directive value. document not started", ctx.currentToken())
+ }
+ return node, nil
+}
+
+func (p *parser) parseLiteral(ctx *context) (*ast.LiteralNode, error) {
+ node := ast.Literal(ctx.currentToken())
+ ctx.progress(1) // skip literal/folded token
+
+ tk := ctx.currentToken()
+ var comment *ast.CommentGroupNode
+ if tk.Type == token.CommentType {
+ comment = p.parseCommentOnly(ctx)
+ comment.SetPath(ctx.path)
+ if err := node.SetComment(comment); err != nil {
+ return nil, errors.Wrapf(err, "failed to set comment to literal")
+ }
+ tk = ctx.currentToken()
+ }
+ value, err := p.parseToken(ctx, tk)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse literal/folded value")
+ }
+ snode, ok := value.(*ast.StringNode)
+ if !ok {
+ return nil, errors.ErrSyntax("unexpected token. required string token", value.GetToken())
+ }
+ node.Value = snode
+ return node, nil
+}
+
+func (p *parser) isSameLineComment(tk *token.Token, node ast.Node) bool {
+ if tk == nil {
+ return false
+ }
+ if tk.Type != token.CommentType {
+ return false
+ }
+ return tk.Position.Line == node.GetToken().Position.Line
+}
+
+func (p *parser) setSameLineCommentIfExists(ctx *context, node ast.Node) error {
+ tk := ctx.currentToken()
+ if !p.isSameLineComment(tk, node) {
+ return nil
+ }
+ comment := ast.CommentGroup([]*token.Token{tk})
+ comment.SetPath(ctx.path)
+ if err := node.SetComment(comment); err != nil {
+ return errors.Wrapf(err, "failed to set comment token to ast.Node")
+ }
+ return nil
+}
+
+func (p *parser) parseDocument(ctx *context) (*ast.DocumentNode, error) {
+ startTk := ctx.currentToken()
+ ctx.progress(1) // skip document header token
+ body, err := p.parseToken(ctx, ctx.currentToken())
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse document body")
+ }
+ node := ast.Document(startTk, body)
+ if ntk := ctx.nextToken(); ntk != nil && ntk.Type == token.DocumentEndType {
+ node.End = ntk
+ ctx.progress(1)
+ }
+ return node, nil
+}
+
+func (p *parser) parseCommentOnly(ctx *context) *ast.CommentGroupNode {
+ commentTokens := []*token.Token{}
+ for {
+ tk := ctx.currentToken()
+ if tk == nil {
+ break
+ }
+ if tk.Type != token.CommentType {
+ break
+ }
+ commentTokens = append(commentTokens, tk)
+ ctx.progressIgnoreComment(1) // skip comment token
+ }
+ return ast.CommentGroup(commentTokens)
+}
+
+func (p *parser) parseFootComment(ctx *context, col int) *ast.CommentGroupNode {
+ commentTokens := []*token.Token{}
+ for {
+ ctx.progressIgnoreComment(1)
+ commentTokens = append(commentTokens, ctx.currentToken())
+
+ nextTk := ctx.nextToken()
+ if nextTk == nil {
+ break
+ }
+ if nextTk.Type != token.CommentType {
+ break
+ }
+ if col > nextTk.Position.Column {
+ break
+ }
+ }
+ return ast.CommentGroup(commentTokens)
+}
+
+func (p *parser) parseComment(ctx *context) (ast.Node, error) {
+ group := p.parseCommentOnly(ctx)
+ node, err := p.parseToken(ctx, ctx.currentToken())
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse node after comment")
+ }
+ if node == nil {
+ return group, nil
+ }
+ group.SetPath(node.GetPath())
+ if err := node.SetComment(group); err != nil {
+ return nil, errors.Wrapf(err, "failed to set comment token to node")
+ }
+ return node, nil
+}
+
+func (p *parser) parseMappingKey(ctx *context) (*ast.MappingKeyNode, error) {
+ keyTk := ctx.currentToken()
+ node := ast.MappingKey(keyTk)
+ node.SetPath(ctx.path)
+ ctx.progress(1) // skip mapping key token
+ value, err := p.parseToken(ctx.withChild(keyTk.Value), ctx.currentToken())
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse map key")
+ }
+ node.Value = value
+ return node, nil
+}
+
+func (p *parser) parseToken(ctx *context, tk *token.Token) (ast.Node, error) {
+ node, err := p.createNodeFromToken(ctx, tk)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to create node from token")
+ }
+ if node != nil && node.GetPath() == "" {
+ node.SetPath(ctx.path)
+ }
+ return node, nil
+}
+
+func (p *parser) createNodeFromToken(ctx *context, tk *token.Token) (ast.Node, error) {
+ if tk == nil {
+ return nil, nil
+ }
+ if tk.NextType() == token.MappingValueType {
+ node, err := p.parseMappingValue(ctx)
+ return node, err
+ }
+ node, err := p.parseScalarValueWithComment(ctx, tk)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse scalar value")
+ }
+ if node != nil {
+ return node, nil
+ }
+ switch tk.Type {
+ case token.CommentType:
+ return p.parseComment(ctx)
+ case token.MappingKeyType:
+ return p.parseMappingKey(ctx)
+ case token.DocumentHeaderType:
+ return p.parseDocument(ctx)
+ case token.MappingStartType:
+ return p.parseMapping(ctx)
+ case token.SequenceStartType:
+ return p.parseSequence(ctx)
+ case token.SequenceEntryType:
+ return p.parseSequenceEntry(ctx)
+ case token.AnchorType:
+ return p.parseAnchor(ctx)
+ case token.AliasType:
+ return p.parseAlias(ctx)
+ case token.DirectiveType:
+ return p.parseDirective(ctx)
+ case token.TagType:
+ return p.parseTag(ctx)
+ case token.LiteralType, token.FoldedType:
+ return p.parseLiteral(ctx)
+ }
+ return nil, nil
+}
+
+func (p *parser) parse(tokens token.Tokens, mode Mode) (*ast.File, error) {
+ ctx := newContext(tokens, mode)
+ file := &ast.File{Docs: []*ast.DocumentNode{}}
+ for ctx.next() {
+ node, err := p.parseToken(ctx, ctx.currentToken())
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse")
+ }
+ ctx.progressIgnoreComment(1)
+ if node == nil {
+ continue
+ }
+ if doc, ok := node.(*ast.DocumentNode); ok {
+ file.Docs = append(file.Docs, doc)
+ } else {
+ file.Docs = append(file.Docs, ast.Document(nil, node))
+ }
+ }
+ return file, nil
+}
+
+type Mode uint
+
+const (
+ ParseComments Mode = 1 << iota // parse comments and add them to AST
+)
+
+// ParseBytes parse from byte slice, and returns ast.File
+func ParseBytes(bytes []byte, mode Mode) (*ast.File, error) {
+ tokens := lexer.Tokenize(string(bytes))
+ f, err := Parse(tokens, mode)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse")
+ }
+ return f, nil
+}
+
+// Parse parse from token instances, and returns ast.File
+func Parse(tokens token.Tokens, mode Mode) (*ast.File, error) {
+ var p parser
+ f, err := p.parse(tokens, mode)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse")
+ }
+ return f, nil
+}
+
+// Parse parse from filename, and returns ast.File
+func ParseFile(filename string, mode Mode) (*ast.File, error) {
+ file, err := os.ReadFile(filename)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to read file: %s", filename)
+ }
+ f, err := ParseBytes(file, mode)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse")
+ }
+ f.Name = filename
+ return f, nil
+}
diff --git a/tmpmod/github.com/goccy/go-yaml/parser/parser_test.go b/tmpmod/github.com/goccy/go-yaml/parser/parser_test.go
new file mode 100644
index 00000000..67f7641e
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/parser/parser_test.go
@@ -0,0 +1,1010 @@
+package parser_test
+
+import (
+ "fmt"
+ "path/filepath"
+ "reflect"
+ "strings"
+ "testing"
+
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/ast"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/lexer"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/parser"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/token"
+)
+
+func TestParser(t *testing.T) {
+ sources := []string{
+ "null\n",
+ "{}\n",
+ "v: hi\n",
+ "v: \"true\"\n",
+ "v: \"false\"\n",
+ "v: true\n",
+ "v: false\n",
+ "v: 10\n",
+ "v: -10\n",
+ "v: 42\n",
+ "v: 4294967296\n",
+ "v: \"10\"\n",
+ "v: 0.1\n",
+ "v: 0.99\n",
+ "v: -0.1\n",
+ "v: .inf\n",
+ "v: -.inf\n",
+ "v: .nan\n",
+ "v: null\n",
+ "v: \"\"\n",
+ "v:\n- A\n- B\n",
+ "a: '-'\n",
+ "123\n",
+ "hello: world\n",
+ "a: null\n",
+ "v:\n- A\n- 1\n- B:\n - 2\n - 3\n",
+ "a:\n b: c\n",
+ "a: {x: 1}\n",
+ "t2: 2018-01-09T10:40:47Z\nt4: 2098-01-09T10:40:47Z\n",
+ "a: [1, 2]\n",
+ "a: {b: c, d: e}\n",
+ "a: 3s\n",
+ "a: \n",
+ "a: \"1:1\"\n",
+ "a: 1.2.3.4\n",
+ "a: \"2015-02-24T18:19:39Z\"\n",
+ "a: 'b: c'\n",
+ "a: 'Hello #comment'\n",
+ "a: abc <> ghi",
+ "a: <-\n B\n C\n",
+ "v: |-\n 0\n",
+ "v: |-\n 0\nx: 0",
+ `"a\n1\nb"`,
+ `{"a":"b"}`,
+ `!!map {
+ ? !!str "explicit":!!str "entry",
+ ? !!str "implicit" : !!str "entry",
+ ? !!null "" : !!null "",
+}`,
+ "\"a\": a\n\"b\": b",
+ "'a': a\n'b': b",
+ }
+ for _, src := range sources {
+ if _, err := parser.Parse(lexer.Tokenize(src), 0); err != nil {
+ t.Fatalf("parse error: source [%s]: %+v", src, err)
+ }
+ }
+}
+
+func TestParseComplicatedDocument(t *testing.T) {
+ tests := []struct {
+ source string
+ expect string
+ }{
+ {
+ `
+american:
+ - Boston Red Sox
+ - Detroit Tigers
+ - New York Yankees
+national:
+ - New York Mets
+ - Chicago Cubs
+ - Atlanta Braves
+`, `
+american:
+ - Boston Red Sox
+ - Detroit Tigers
+ - New York Yankees
+national:
+ - New York Mets
+ - Chicago Cubs
+ - Atlanta Braves
+`,
+ },
+ {
+ `
+a:
+ b: c
+ d: e
+ f: g
+h:
+ i: j
+ k:
+ l: m
+ n: o
+ p: q
+r: s
+`, `
+a:
+ b: c
+ d: e
+ f: g
+h:
+ i: j
+ k:
+ l: m
+ n: o
+ p: q
+r: s
+`,
+ },
+ {
+ `
+- a:
+ - b
+ - c
+- d
+`, `
+- a:
+ - b
+ - c
+- d
+`,
+ },
+ {
+ `
+- a
+- b
+- c
+ - d
+ - e
+- f
+`, `
+- a
+- b
+- c - d - e
+- f
+`,
+ },
+ {
+ `
+a: 0 - 1
+`,
+ `
+a: 0 - 1
+`,
+ },
+ {`
+- a:
+ b: c
+ d: e
+- f:
+ g: h
+`,
+ `
+- a:
+ b: c
+ d: e
+- f: null
+ g: h
+`,
+ },
+ {
+ `
+a:
+ b
+ c
+d: e
+`, `
+a: b c
+d: e
+`,
+ },
+ {
+ `
+a
+b
+c
+`, `
+a b c
+`,
+ },
+ {
+ `
+a:
+ - b
+ - c
+`, `
+a:
+ - b
+ - c
+`,
+ },
+ {
+ `
+- a :
+ b: c
+`, `
+- a: null
+ b: c
+`,
+ },
+ {
+ `
+- a:
+ b
+ c
+ d
+ hoge: fuga
+`, `
+- a: b c d
+ hoge: fuga
+`,
+ },
+ {
+ `
+- a # ' " # - : %
+- b # " # - : % '
+- c # # - : % ' "
+- d # - : % ' " #
+- e # : % ' " # -
+- f # % ' : # - :
+`,
+ `
+- a
+- b
+- c
+- d
+- e
+- f
+`,
+ },
+ {
+ `
+# comment
+a: # comment
+# comment
+ b: c # comment
+ # comment
+d: e # comment
+# comment
+`,
+ `
+a:
+ b: c
+d: e
+`,
+ },
+ {
+ `
+a: b#notcomment
+`,
+ `
+a: b#notcomment
+`,
+ },
+ {
+ `
+anchored: &anchor foo
+aliased: *anchor
+`,
+ `
+anchored: &anchor foo
+aliased: *anchor
+`,
+ },
+ {
+ `
+---
+- &CENTER { x: 1, y: 2 }
+- &LEFT { x: 0, y: 2 }
+- &BIG { r: 10 }
+- &SMALL { r: 1 }
+
+# All the following maps are equal:
+
+- # Explicit keys
+ x: 1
+ y: 2
+ r: 10
+ label: center/big
+
+- # Merge one map
+ << : *CENTER
+ r: 10
+ label: center/big
+
+- # Merge multiple maps
+ << : [ *CENTER, *BIG ]
+ label: center/big
+
+- # Override
+ << : [ *BIG, *LEFT, *SMALL ]
+ x: 1
+ label: center/big
+`,
+ `
+---
+- &CENTER {x: 1, y: 2}
+- &LEFT {x: 0, y: 2}
+- &BIG {r: 10}
+- &SMALL {r: 1}
+- x: 1
+ y: 2
+ r: 10
+ label: center/big
+- <<: *CENTER
+ r: 10
+ label: center/big
+- <<: [*CENTER, *BIG]
+ label: center/big
+- <<: [*BIG, *LEFT, *SMALL]
+ x: 1
+ label: center/big
+`,
+ },
+ {
+ `
+a:
+- - b
+- - c
+ - d
+`,
+ `
+a:
+- - b
+- - c
+ - d
+`,
+ },
+ {
+ `
+a:
+ b:
+ c: d
+ e:
+ f: g
+ h: i
+j: k
+`,
+ `
+a:
+ b:
+ c: d
+ e:
+ f: g
+ h: i
+j: k
+`,
+ },
+ {
+ `
+---
+a: 1
+b: 2
+...
+---
+c: 3
+d: 4
+...
+`,
+ `
+---
+a: 1
+b: 2
+...
+---
+c: 3
+d: 4
+...
+`,
+ },
+ {
+ `
+a:
+ b: |
+ {
+ [ 1, 2 ]
+ }
+ c: d
+`,
+ `
+a:
+ b: |
+ {
+ [ 1, 2 ]
+ }
+ c: d
+`,
+ },
+ {
+ `
+|
+ hoge
+ fuga
+ piyo`,
+ `
+|
+ hoge
+ fuga
+ piyo
+`,
+ },
+ {
+ `
+a: |
+ bbbbbbb
+
+
+ ccccccc
+d: eeeeeeeeeeeeeeeee
+`,
+ `
+a: |
+ bbbbbbb
+
+
+ ccccccc
+d: eeeeeeeeeeeeeeeee
+`,
+ },
+ {
+ `
+a: b
+ c
+`,
+ `
+a: b c
+`,
+ },
+ {
+ `
+a:
+ b: c
+`,
+ `
+a:
+ b: c
+`,
+ },
+ {
+ `
+a: b
+c: d
+`,
+ `
+a: b
+c: d
+`,
+ },
+ {
+ `
+- ab - cd
+- ef - gh
+`,
+ `
+- ab - cd
+- ef - gh
+`,
+ },
+ {
+ `
+- 0 - 1
+ - 2 - 3
+`,
+ `
+- 0 - 1 - 2 - 3
+`,
+ },
+ {
+ `
+a - b - c: value
+`,
+ `
+a - b - c: value
+`,
+ },
+ {
+ `
+a:
+-
+ b: c
+ d: e
+-
+ f: g
+ h: i
+`,
+ `
+a:
+- b: c
+ d: e
+- f: g
+ h: i
+`,
+ },
+ {
+ `
+a: |-
+ value
+b: c
+`,
+ `
+a: |-
+ value
+b: c
+`,
+ },
+ {
+ `
+a: |+
+ value
+b: c
+`,
+ `
+a: |+
+ value
+b: c
+`,
+ },
+ {
+ `
+- key1: val
+ key2:
+ (
+ foo
+ +
+ bar
+ )
+`,
+ `
+- key1: val
+ key2: ( foo + bar )
+`,
+ },
+ {
+ `
+"a": b
+'c': d
+"e": "f"
+g: "h"
+i: 'j'
+`,
+ `
+"a": b
+'c': d
+"e": "f"
+g: "h"
+i: 'j'
+`,
+ },
+ }
+
+ for _, test := range tests {
+ t.Run(test.source, func(t *testing.T) {
+ tokens := lexer.Tokenize(test.source)
+ f, err := parser.Parse(tokens, 0)
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ var v Visitor
+ for _, doc := range f.Docs {
+ ast.Walk(&v, doc.Body)
+ }
+ expect := fmt.Sprintf("\n%+v", f)
+ if test.expect != expect {
+ tokens.Dump()
+ t.Fatalf("unexpected output: [%s] != [%s]", test.expect, expect)
+ }
+ })
+ }
+}
+
+func TestNewLineChar(t *testing.T) {
+ for _, f := range []string{
+ "lf.yml",
+ "cr.yml",
+ "crlf.yml",
+ } {
+ ast, err := parser.ParseFile(filepath.Join("testdata", f), 0)
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ actual := fmt.Sprintf("%v", ast)
+ expect := `a: "a"
+b: 1
+`
+ if expect != actual {
+ t.Fatal("unexpected result")
+ }
+ }
+}
+
+func TestSyntaxError(t *testing.T) {
+ tests := []struct {
+ source string
+ expect string
+ }{
+ {
+ `
+a:
+- b
+ c: d
+ e: f
+ g: h`,
+ `
+[3:3] unexpected key name
+ 2 | a:
+> 3 | - b
+ 4 | c: d
+ ^
+ 5 | e: f
+ 6 | g: h`,
+ },
+ {
+ `
+a
+- b: c`,
+ `
+[2:1] unexpected key name
+> 2 | a
+ 3 | - b: c
+ ^
+`,
+ },
+ {
+ `%YAML 1.1 {}`,
+ `
+[1:2] unexpected directive value. document not started
+> 1 | %YAML 1.1 {}
+ ^
+`,
+ },
+ {
+ `{invalid`,
+ `
+[1:2] unexpected map
+> 1 | {invalid
+ ^
+`,
+ },
+ {
+ `{ "key": "value" `,
+ `
+[1:1] unterminated flow mapping
+> 1 | { "key": "value"
+ ^
+`,
+ },
+ {
+ `
+a:
+- b: c
+- `,
+ `
+[4:1] empty sequence entry
+ 2 | a:
+ 3 | - b: c
+> 4 | -
+ ^
+`,
+ },
+ }
+ for _, test := range tests {
+ t.Run(test.source, func(t *testing.T) {
+ _, err := parser.ParseBytes([]byte(test.source), 0)
+ if err == nil {
+ t.Fatal("cannot catch syntax error")
+ }
+ actual := "\n" + err.Error()
+ if test.expect != actual {
+ t.Fatalf("expected: [%s] but got [%s]", test.expect, actual)
+ }
+ })
+ }
+}
+
+func TestComment(t *testing.T) {
+ tests := []struct {
+ name string
+ yaml string
+ }{
+ {
+ name: "map with comment",
+ yaml: `
+# commentA
+a: #commentB
+ # commentC
+ b: c # commentD
+ # commentE
+ d: e # commentF
+ # commentG
+ f: g # commentH
+# commentI
+f: g # commentJ
+# commentK
+`,
+ },
+ {
+ name: "sequence with comment",
+ yaml: `
+# commentA
+- a # commentB
+# commentC
+- b: # commentD
+ # commentE
+ - d # commentF
+ - e # commentG
+# commentH
+`,
+ },
+ {
+ name: "anchor and alias",
+ yaml: `
+a: &x b # commentA
+c: *x # commentB
+`,
+ },
+ {
+ name: "multiline",
+ yaml: `
+# foo comment
+# foo comment2
+foo: # map key comment
+ # bar above comment
+ # bar above comment2
+ bar: 10 # comment for bar
+ # baz above comment
+ # baz above comment2
+ baz: bbbb # comment for baz
+ piyo: # sequence key comment
+ # sequence1 above comment 1
+ # sequence1 above comment 2
+ - sequence1 # sequence1
+ # sequence2 above comment 1
+ # sequence2 above comment 2
+ - sequence2 # sequence2
+ # sequence3 above comment 1
+ # sequence3 above comment 2
+ - false # sequence3
+# foo2 comment
+# foo2 comment2
+foo2: &anchor text # anchor comment
+# foo3 comment
+# foo3 comment2
+foo3: *anchor # alias comment
+`,
+ },
+ {
+ name: "literal",
+ yaml: `
+foo: | # comment
+ x: 42
+`,
+ },
+ {
+ name: "folded",
+ yaml: `
+foo: > # comment
+ x: 42
+`,
+ },
+ }
+ for _, test := range tests {
+ t.Run(test.name, func(t *testing.T) {
+ f, err := parser.ParseBytes([]byte(test.yaml), parser.ParseComments)
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ got := "\n" + f.String()
+ if test.yaml != got {
+ t.Fatalf("expected:%s\ngot:%s", test.yaml, got)
+ }
+ })
+ }
+}
+
+func TestCommentWithNull(t *testing.T) {
+ t.Run("same line", func(t *testing.T) {
+ content := `
+foo:
+ bar: # comment
+ baz: 1
+`
+ expected := `
+foo:
+ bar: null # comment
+ baz: 1`
+ f, err := parser.ParseBytes([]byte(content), parser.ParseComments)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if len(f.Docs) != 1 {
+ t.Fatal("failed to parse content with same line comment")
+ }
+ if f.Docs[0].String() != strings.TrimPrefix(expected, "\n") {
+ t.Fatal("failed to parse comment")
+ }
+ })
+ t.Run("next line", func(t *testing.T) {
+ content := `
+foo:
+ bar:
+ # comment
+ baz: 1
+`
+ expected := `
+foo:
+ bar: null # comment
+ baz: 1`
+ f, err := parser.ParseBytes([]byte(content), parser.ParseComments)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if len(f.Docs) != 1 {
+ t.Fatal("failed to parse content with next line comment")
+ }
+ if f.Docs[0].String() != strings.TrimPrefix(expected, "\n") {
+ t.Fatal("failed to parse comment")
+ }
+ })
+ t.Run("next line and different indent", func(t *testing.T) {
+ content := `
+foo:
+ bar:
+ # comment
+baz: 1`
+ f, err := parser.ParseBytes([]byte(content), parser.ParseComments)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if len(f.Docs) != 1 {
+ t.Fatal("failed to parse content with next line comment")
+ }
+ expected := `
+foo:
+ bar: null # comment
+baz: 1`
+ if f.Docs[0].String() != strings.TrimPrefix(expected, "\n") {
+ t.Fatal("failed to parse comment")
+ }
+ })
+}
+
+func TestSequenceComment(t *testing.T) {
+ content := `
+foo:
+ - # comment
+ bar: 1
+baz:
+ - xxx
+`
+ f, err := parser.ParseBytes([]byte(content), parser.ParseComments)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if len(f.Docs) != 1 {
+ t.Fatal("failed to parse content with next line with sequence")
+ }
+ expected := `
+foo:
+ # comment
+ - bar: 1
+baz:
+ - xxx`
+ if f.Docs[0].String() != strings.TrimPrefix(expected, "\n") {
+ t.Fatal("failed to parse comment")
+ }
+ t.Run("foo[0].bar", func(t *testing.T) {
+ path, err := yaml.PathString("$.foo[0].bar")
+ if err != nil {
+ t.Fatal(err)
+ }
+ v, err := path.FilterFile(f)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if v.String() != "1" {
+ t.Fatal("failed to get foo[0].bar value")
+ }
+ })
+ t.Run("baz[0]", func(t *testing.T) {
+ path, err := yaml.PathString("$.baz[0]")
+ if err != nil {
+ t.Fatal(err)
+ }
+ v, err := path.FilterFile(f)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if v.String() != "xxx" {
+ t.Fatal("failed to get baz[0] value")
+ }
+ })
+}
+
+func TestNodePath(t *testing.T) {
+ yml := `
+a: # commentA
+ b: # commentB
+ c: foo # commentC
+ d: bar # commentD
+ e: baz # commentE
+ f: # commentF
+ g: hoge # commentG
+ h: # commentH
+ - list1 # comment list1
+ - list2 # comment list2
+ - list3 # comment list3
+ i: fuga # commentI
+j: piyo # commentJ
+k.l.m.n: moge # commentKLMN
+o#p: hogera # commentOP
+q#.r: hogehoge # commentQR
+`
+ f, err := parser.ParseBytes([]byte(yml), parser.ParseComments)
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ var capturer pathCapturer
+ for _, doc := range f.Docs {
+ ast.Walk(&capturer, doc.Body)
+ }
+ commentPaths := []string{}
+ for i := 0; i < capturer.capturedNum; i++ {
+ if capturer.orderedTypes[i] == ast.CommentType {
+ commentPaths = append(commentPaths, capturer.orderedPaths[i])
+ }
+ }
+ expectedPaths := []string{
+ "$.a",
+ "$.a.b",
+ "$.a.b.c",
+ "$.a.b.d",
+ "$.a.b.e",
+ "$.a.f",
+ "$.a.f.g",
+ "$.a.h",
+ "$.a.h[0]",
+ "$.a.h[1]",
+ "$.a.h[2]",
+ "$.a.i",
+ "$.j",
+ "$.'k.l.m.n'",
+ "$.o#p",
+ "$.'q#.r'",
+ }
+ if !reflect.DeepEqual(expectedPaths, commentPaths) {
+ t.Fatalf("failed to get YAMLPath to the comment node:\nexpected[%s]\ngot [%s]", expectedPaths, commentPaths)
+ }
+}
+
+type pathCapturer struct {
+ capturedNum int
+ orderedPaths []string
+ orderedTypes []ast.NodeType
+ orderedTokens []*token.Token
+}
+
+func (c *pathCapturer) Visit(node ast.Node) ast.Visitor {
+ c.capturedNum++
+ c.orderedPaths = append(c.orderedPaths, node.GetPath())
+ c.orderedTypes = append(c.orderedTypes, node.Type())
+ c.orderedTokens = append(c.orderedTokens, node.GetToken())
+ return c
+}
+
+type Visitor struct {
+}
+
+func (v *Visitor) Visit(node ast.Node) ast.Visitor {
+ tk := node.GetToken()
+ tk.Prev = nil
+ tk.Next = nil
+ return v
+}
diff --git a/tmpmod/github.com/goccy/go-yaml/parser/testdata/cr.yml b/tmpmod/github.com/goccy/go-yaml/parser/testdata/cr.yml
new file mode 100644
index 00000000..37b52a6e
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/parser/testdata/cr.yml
@@ -0,0 +1 @@
+a: "a"
b: 1
\ No newline at end of file
diff --git a/tmpmod/github.com/goccy/go-yaml/parser/testdata/crlf.yml b/tmpmod/github.com/goccy/go-yaml/parser/testdata/crlf.yml
new file mode 100644
index 00000000..85929f99
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/parser/testdata/crlf.yml
@@ -0,0 +1,3 @@
+a: "a"
+
+b: 1
diff --git a/tmpmod/github.com/goccy/go-yaml/parser/testdata/lf.yml b/tmpmod/github.com/goccy/go-yaml/parser/testdata/lf.yml
new file mode 100644
index 00000000..d2fe51f3
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/parser/testdata/lf.yml
@@ -0,0 +1,3 @@
+a: "a"
+
+b: 1
diff --git a/tmpmod/github.com/goccy/go-yaml/path.go b/tmpmod/github.com/goccy/go-yaml/path.go
new file mode 100644
index 00000000..2c7197de
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/path.go
@@ -0,0 +1,814 @@
+package yaml
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "strconv"
+ "strings"
+
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/ast"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/internal/errors"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/parser"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/printer"
+)
+
+// PathString create Path from string
+//
+// YAMLPath rule
+// $ : the root object/element
+// . : child operator
+// .. : recursive descent
+// [num] : object/element of array by number
+// [*] : all objects/elements for array.
+//
+// If you want to use reserved characters such as `.` and `*` as a key name,
+// enclose them in single quotation as follows ( $.foo.'bar.baz-*'.hoge ).
+// If you want to use a single quote with reserved characters, escape it with `\` ( $.foo.'bar.baz\'s value'.hoge ).
+func PathString(s string) (*Path, error) {
+ buf := []rune(s)
+ length := len(buf)
+ cursor := 0
+ builder := &PathBuilder{}
+ for cursor < length {
+ c := buf[cursor]
+ switch c {
+ case '$':
+ builder = builder.Root()
+ cursor++
+ case '.':
+ b, buf, c, err := parsePathDot(builder, buf, cursor)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse path of dot")
+ }
+ length = len(buf)
+ builder = b
+ cursor = c
+ case '[':
+ b, buf, c, err := parsePathIndex(builder, buf, cursor)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse path of index")
+ }
+ length = len(buf)
+ builder = b
+ cursor = c
+ default:
+ return nil, errors.Wrapf(ErrInvalidPathString, "invalid path at %d", cursor)
+ }
+ }
+ return builder.Build(), nil
+}
+
+func parsePathRecursive(b *PathBuilder, buf []rune, cursor int) (*PathBuilder, []rune, int, error) {
+ length := len(buf)
+ cursor += 2 // skip .. characters
+ start := cursor
+ for ; cursor < length; cursor++ {
+ c := buf[cursor]
+ switch c {
+ case '$':
+ return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "specified '$' after '..' character")
+ case '*':
+ return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "specified '*' after '..' character")
+ case '.', '[':
+ goto end
+ case ']':
+ return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "specified ']' after '..' character")
+ }
+ }
+end:
+ if start == cursor {
+ return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "not found recursive selector")
+ }
+ return b.Recursive(string(buf[start:cursor])), buf, cursor, nil
+}
+
+func parsePathDot(b *PathBuilder, buf []rune, cursor int) (*PathBuilder, []rune, int, error) {
+ length := len(buf)
+ if cursor+1 < length && buf[cursor+1] == '.' {
+ b, buf, c, err := parsePathRecursive(b, buf, cursor)
+ if err != nil {
+ return nil, nil, 0, errors.Wrapf(err, "failed to parse path of recursive")
+ }
+ return b, buf, c, nil
+ }
+ cursor++ // skip . character
+ start := cursor
+
+ // if started single quote, looking for end single quote char
+ if cursor < length && buf[cursor] == '\'' {
+ return parseQuotedKey(b, buf, cursor)
+ }
+ for ; cursor < length; cursor++ {
+ c := buf[cursor]
+ switch c {
+ case '$':
+ return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "specified '$' after '.' character")
+ case '*':
+ return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "specified '*' after '.' character")
+ case '.', '[':
+ goto end
+ case ']':
+ return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "specified ']' after '.' character")
+ }
+ }
+end:
+ if start == cursor {
+ return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "cloud not find by empty key")
+ }
+ return b.child(string(buf[start:cursor])), buf, cursor, nil
+}
+
+func parseQuotedKey(b *PathBuilder, buf []rune, cursor int) (*PathBuilder, []rune, int, error) {
+ cursor++ // skip single quote
+ start := cursor
+ length := len(buf)
+ var foundEndDelim bool
+ for ; cursor < length; cursor++ {
+ switch buf[cursor] {
+ case '\\':
+ buf = append(append([]rune{}, buf[:cursor]...), buf[cursor+1:]...)
+ length = len(buf)
+ case '\'':
+ foundEndDelim = true
+ goto end
+ }
+ }
+end:
+ if !foundEndDelim {
+ return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "could not find end delimiter for key")
+ }
+ if start == cursor {
+ return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "could not find by empty key")
+ }
+ selector := buf[start:cursor]
+ cursor++
+ if cursor < length {
+ switch buf[cursor] {
+ case '$':
+ return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "specified '$' after '.' character")
+ case '*':
+ return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "specified '*' after '.' character")
+ case ']':
+ return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "specified ']' after '.' character")
+ }
+ }
+ return b.child(string(selector)), buf, cursor, nil
+}
+
+func parsePathIndex(b *PathBuilder, buf []rune, cursor int) (*PathBuilder, []rune, int, error) {
+ length := len(buf)
+ cursor++ // skip '[' character
+ if length <= cursor {
+ return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "unexpected end of YAML Path")
+ }
+ c := buf[cursor]
+ switch c {
+ case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '*':
+ start := cursor
+ cursor++
+ for ; cursor < length; cursor++ {
+ c := buf[cursor]
+ switch c {
+ case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ continue
+ }
+ break
+ }
+ if buf[cursor] != ']' {
+ return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "invalid character %s at %d", string(buf[cursor]), cursor)
+ }
+ numOrAll := string(buf[start:cursor])
+ if numOrAll == "*" {
+ return b.IndexAll(), buf, cursor + 1, nil
+ }
+ num, err := strconv.ParseInt(numOrAll, 10, 64)
+ if err != nil {
+ return nil, nil, 0, errors.Wrapf(err, "failed to parse number")
+ }
+ return b.Index(uint(num)), buf, cursor + 1, nil
+ }
+ return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "invalid character %s at %d", c, cursor)
+}
+
+// Path represent YAMLPath ( like a JSONPath ).
+type Path struct {
+ node pathNode
+}
+
+// String path to text.
+func (p *Path) String() string {
+ return p.node.String()
+}
+
+// Read decode from r and set extracted value by YAMLPath to v.
+func (p *Path) Read(r io.Reader, v interface{}) error {
+ node, err := p.ReadNode(r)
+ if err != nil {
+ return errors.Wrapf(err, "failed to read node")
+ }
+ if err := Unmarshal([]byte(node.String()), v); err != nil {
+ return errors.Wrapf(err, "failed to unmarshal")
+ }
+ return nil
+}
+
+// ReadNode create AST from r and extract node by YAMLPath.
+func (p *Path) ReadNode(r io.Reader) (ast.Node, error) {
+ if p.node == nil {
+ return nil, ErrInvalidPath
+ }
+ var buf bytes.Buffer
+ if _, err := io.Copy(&buf, r); err != nil {
+ return nil, errors.Wrapf(err, "failed to copy from reader")
+ }
+ f, err := parser.ParseBytes(buf.Bytes(), 0)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to parse yaml")
+ }
+ node, err := p.FilterFile(f)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to filter from ast.File")
+ }
+ return node, nil
+}
+
+// Filter filter from target by YAMLPath and set it to v.
+func (p *Path) Filter(target, v interface{}) error {
+ b, err := Marshal(target)
+ if err != nil {
+ return errors.Wrapf(err, "failed to marshal target value")
+ }
+ if err := p.Read(bytes.NewBuffer(b), v); err != nil {
+ return errors.Wrapf(err, "failed to read")
+ }
+ return nil
+}
+
+// FilterFile filter from ast.File by YAMLPath.
+func (p *Path) FilterFile(f *ast.File) (ast.Node, error) {
+ for _, doc := range f.Docs {
+ node, err := p.FilterNode(doc.Body)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to filter node by path ( %s )", p.node)
+ }
+ if node != nil {
+ return node, nil
+ }
+ }
+ return nil, errors.Wrapf(ErrNotFoundNode, "failed to find path ( %s )", p.node)
+}
+
+// FilterNode filter from node by YAMLPath.
+func (p *Path) FilterNode(node ast.Node) (ast.Node, error) {
+ n, err := p.node.filter(node)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to filter node by path ( %s )", p.node)
+ }
+ return n, nil
+}
+
+// MergeFromReader merge YAML text into ast.File.
+func (p *Path) MergeFromReader(dst *ast.File, src io.Reader) error {
+ var buf bytes.Buffer
+ if _, err := io.Copy(&buf, src); err != nil {
+ return errors.Wrapf(err, "failed to copy from reader")
+ }
+ file, err := parser.ParseBytes(buf.Bytes(), 0)
+ if err != nil {
+ return errors.Wrapf(err, "failed to parse")
+ }
+ if err := p.MergeFromFile(dst, file); err != nil {
+ return errors.Wrapf(err, "failed to merge file")
+ }
+ return nil
+}
+
+// MergeFromFile merge ast.File into ast.File.
+func (p *Path) MergeFromFile(dst *ast.File, src *ast.File) error {
+ base, err := p.FilterFile(dst)
+ if err != nil {
+ return errors.Wrapf(err, "failed to filter file")
+ }
+ for _, doc := range src.Docs {
+ if err := ast.Merge(base, doc); err != nil {
+ return errors.Wrapf(err, "failed to merge")
+ }
+ }
+ return nil
+}
+
+// MergeFromNode merge ast.Node into ast.File.
+func (p *Path) MergeFromNode(dst *ast.File, src ast.Node) error {
+ base, err := p.FilterFile(dst)
+ if err != nil {
+ return errors.Wrapf(err, "failed to filter file")
+ }
+ if err := ast.Merge(base, src); err != nil {
+ return errors.Wrapf(err, "failed to merge")
+ }
+ return nil
+}
+
+// ReplaceWithReader replace ast.File with io.Reader.
+func (p *Path) ReplaceWithReader(dst *ast.File, src io.Reader) error {
+ var buf bytes.Buffer
+ if _, err := io.Copy(&buf, src); err != nil {
+ return errors.Wrapf(err, "failed to copy from reader")
+ }
+ file, err := parser.ParseBytes(buf.Bytes(), 0)
+ if err != nil {
+ return errors.Wrapf(err, "failed to parse")
+ }
+ if err := p.ReplaceWithFile(dst, file); err != nil {
+ return errors.Wrapf(err, "failed to replace file")
+ }
+ return nil
+}
+
+// ReplaceWithFile replace ast.File with ast.File.
+func (p *Path) ReplaceWithFile(dst *ast.File, src *ast.File) error {
+ for _, doc := range src.Docs {
+ if err := p.ReplaceWithNode(dst, doc); err != nil {
+ return errors.Wrapf(err, "failed to replace file by path ( %s )", p.node)
+ }
+ }
+ return nil
+}
+
+// ReplaceNode replace ast.File with ast.Node.
+func (p *Path) ReplaceWithNode(dst *ast.File, node ast.Node) error {
+ for _, doc := range dst.Docs {
+ if node.Type() == ast.DocumentType {
+ node = node.(*ast.DocumentNode).Body
+ }
+ if err := p.node.replace(doc.Body, node); err != nil {
+ return errors.Wrapf(err, "failed to replace node by path ( %s )", p.node)
+ }
+ }
+ return nil
+}
+
+// AnnotateSource add annotation to passed source ( see section 5.1 in README.md ).
+func (p *Path) AnnotateSource(source []byte, colored bool) ([]byte, error) {
+ file, err := parser.ParseBytes([]byte(source), 0)
+ if err != nil {
+ return nil, err
+ }
+ node, err := p.FilterFile(file)
+ if err != nil {
+ return nil, err
+ }
+ var pp printer.Printer
+ return []byte(pp.PrintErrorToken(node.GetToken(), colored)), nil
+}
+
+// PathBuilder represent builder for YAMLPath.
+type PathBuilder struct {
+ root *rootNode
+ node pathNode
+}
+
+// Root add '$' to current path.
+func (b *PathBuilder) Root() *PathBuilder {
+ root := newRootNode()
+ return &PathBuilder{root: root, node: root}
+}
+
+// IndexAll add '[*]' to current path.
+func (b *PathBuilder) IndexAll() *PathBuilder {
+ b.node = b.node.chain(newIndexAllNode())
+ return b
+}
+
+// Recursive add '..selector' to current path.
+func (b *PathBuilder) Recursive(selector string) *PathBuilder {
+ b.node = b.node.chain(newRecursiveNode(selector))
+ return b
+}
+
+func (b *PathBuilder) containsReservedPathCharacters(path string) bool {
+ if strings.Contains(path, ".") {
+ return true
+ }
+ if strings.Contains(path, "*") {
+ return true
+ }
+ return false
+}
+
+func (b *PathBuilder) enclosedSingleQuote(name string) bool {
+ return strings.HasPrefix(name, "'") && strings.HasSuffix(name, "'")
+}
+
+func (b *PathBuilder) normalizeSelectorName(name string) string {
+ if b.enclosedSingleQuote(name) {
+ // already escaped name
+ return name
+ }
+ if b.containsReservedPathCharacters(name) {
+ escapedName := strings.ReplaceAll(name, `'`, `\'`)
+ return "'" + escapedName + "'"
+ }
+ return name
+}
+
+func (b *PathBuilder) child(name string) *PathBuilder {
+ b.node = b.node.chain(newSelectorNode(name))
+ return b
+}
+
+// Child add '.name' to current path.
+func (b *PathBuilder) Child(name string) *PathBuilder {
+ return b.child(b.normalizeSelectorName(name))
+}
+
+// Index add '[idx]' to current path.
+func (b *PathBuilder) Index(idx uint) *PathBuilder {
+ b.node = b.node.chain(newIndexNode(idx))
+ return b
+}
+
+// Build build YAMLPath.
+func (b *PathBuilder) Build() *Path {
+ return &Path{node: b.root}
+}
+
+type pathNode interface {
+ fmt.Stringer
+ chain(pathNode) pathNode
+ filter(ast.Node) (ast.Node, error)
+ replace(ast.Node, ast.Node) error
+}
+
+type basePathNode struct {
+ child pathNode
+}
+
+func (n *basePathNode) chain(node pathNode) pathNode {
+ n.child = node
+ return node
+}
+
+type rootNode struct {
+ *basePathNode
+}
+
+func newRootNode() *rootNode {
+ return &rootNode{basePathNode: &basePathNode{}}
+}
+
+func (n *rootNode) String() string {
+ s := "$"
+ if n.child != nil {
+ s += n.child.String()
+ }
+ return s
+}
+
+func (n *rootNode) filter(node ast.Node) (ast.Node, error) {
+ if n.child == nil {
+ return nil, nil
+ }
+ filtered, err := n.child.filter(node)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to filter")
+ }
+ return filtered, nil
+}
+
+func (n *rootNode) replace(node ast.Node, target ast.Node) error {
+ if n.child == nil {
+ return nil
+ }
+ if err := n.child.replace(node, target); err != nil {
+ return errors.Wrapf(err, "failed to replace")
+ }
+ return nil
+}
+
+type selectorNode struct {
+ *basePathNode
+ selector string
+}
+
+func newSelectorNode(selector string) *selectorNode {
+ return &selectorNode{
+ basePathNode: &basePathNode{},
+ selector: selector,
+ }
+}
+
+func (n *selectorNode) filter(node ast.Node) (ast.Node, error) {
+ selector := n.selector
+ if len(selector) > 1 && selector[0] == '\'' && selector[len(selector)-1] == '\'' {
+ selector = selector[1 : len(selector)-1]
+ }
+ switch node.Type() {
+ case ast.MappingType:
+ for _, value := range node.(*ast.MappingNode).Values {
+ key := value.Key.GetToken().Value
+ if len(key) > 0 {
+ switch key[0] {
+ case '"':
+ var err error
+ key, err = strconv.Unquote(key)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to unquote")
+ }
+ case '\'':
+ if len(key) > 1 && key[len(key)-1] == '\'' {
+ key = key[1 : len(key)-1]
+ }
+ }
+ }
+ if key == selector {
+ if n.child == nil {
+ return value.Value, nil
+ }
+ filtered, err := n.child.filter(value.Value)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to filter")
+ }
+ return filtered, nil
+ }
+ }
+ case ast.MappingValueType:
+ value := node.(*ast.MappingValueNode)
+ key := value.Key.GetToken().Value
+ if key == selector {
+ if n.child == nil {
+ return value.Value, nil
+ }
+ filtered, err := n.child.filter(value.Value)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to filter")
+ }
+ return filtered, nil
+ }
+ default:
+ return nil, errors.Wrapf(ErrInvalidQuery, "expected node type is map or map value. but got %s", node.Type())
+ }
+ return nil, nil
+}
+
+func (n *selectorNode) replaceMapValue(value *ast.MappingValueNode, target ast.Node) error {
+ key := value.Key.GetToken().Value
+ if key != n.selector {
+ return nil
+ }
+ if n.child == nil {
+ if err := value.Replace(target); err != nil {
+ return errors.Wrapf(err, "failed to replace")
+ }
+ } else {
+ if err := n.child.replace(value.Value, target); err != nil {
+ return errors.Wrapf(err, "failed to replace")
+ }
+ }
+ return nil
+}
+
+func (n *selectorNode) replace(node ast.Node, target ast.Node) error {
+ switch node.Type() {
+ case ast.MappingType:
+ for _, value := range node.(*ast.MappingNode).Values {
+ if err := n.replaceMapValue(value, target); err != nil {
+ return errors.Wrapf(err, "failed to replace map value")
+ }
+ }
+ case ast.MappingValueType:
+ value := node.(*ast.MappingValueNode)
+ if err := n.replaceMapValue(value, target); err != nil {
+ return errors.Wrapf(err, "failed to replace map value")
+ }
+ default:
+ return errors.Wrapf(ErrInvalidQuery, "expected node type is map or map value. but got %s", node.Type())
+ }
+ return nil
+}
+
+func (n *selectorNode) String() string {
+ var builder PathBuilder
+ selector := builder.normalizeSelectorName(n.selector)
+ s := fmt.Sprintf(".%s", selector)
+ if n.child != nil {
+ s += n.child.String()
+ }
+ return s
+}
+
+type indexNode struct {
+ *basePathNode
+ selector uint
+}
+
+func newIndexNode(selector uint) *indexNode {
+ return &indexNode{
+ basePathNode: &basePathNode{},
+ selector: selector,
+ }
+}
+
+func (n *indexNode) filter(node ast.Node) (ast.Node, error) {
+ if node.Type() != ast.SequenceType {
+ return nil, errors.Wrapf(ErrInvalidQuery, "expected sequence type node. but got %s", node.Type())
+ }
+ sequence := node.(*ast.SequenceNode)
+ if n.selector >= uint(len(sequence.Values)) {
+ return nil, errors.Wrapf(ErrInvalidQuery, "expected index is %d. but got sequences has %d items", n.selector, sequence.Values)
+ }
+ value := sequence.Values[n.selector]
+ if n.child == nil {
+ return value, nil
+ }
+ filtered, err := n.child.filter(value)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to filter")
+ }
+ return filtered, nil
+}
+
+func (n *indexNode) replace(node ast.Node, target ast.Node) error {
+ if node.Type() != ast.SequenceType {
+ return errors.Wrapf(ErrInvalidQuery, "expected sequence type node. but got %s", node.Type())
+ }
+ sequence := node.(*ast.SequenceNode)
+ if n.selector >= uint(len(sequence.Values)) {
+ return errors.Wrapf(ErrInvalidQuery, "expected index is %d. but got sequences has %d items", n.selector, sequence.Values)
+ }
+ if n.child == nil {
+ if err := sequence.Replace(int(n.selector), target); err != nil {
+ return errors.Wrapf(err, "failed to replace")
+ }
+ return nil
+ }
+ if err := n.child.replace(sequence.Values[n.selector], target); err != nil {
+ return errors.Wrapf(err, "failed to replace")
+ }
+ return nil
+}
+
+func (n *indexNode) String() string {
+ s := fmt.Sprintf("[%d]", n.selector)
+ if n.child != nil {
+ s += n.child.String()
+ }
+ return s
+}
+
+type indexAllNode struct {
+ *basePathNode
+}
+
+func newIndexAllNode() *indexAllNode {
+ return &indexAllNode{
+ basePathNode: &basePathNode{},
+ }
+}
+
+func (n *indexAllNode) String() string {
+ s := "[*]"
+ if n.child != nil {
+ s += n.child.String()
+ }
+ return s
+}
+
+func (n *indexAllNode) filter(node ast.Node) (ast.Node, error) {
+ if node.Type() != ast.SequenceType {
+ return nil, errors.Wrapf(ErrInvalidQuery, "expected sequence type node. but got %s", node.Type())
+ }
+ sequence := node.(*ast.SequenceNode)
+ if n.child == nil {
+ return sequence, nil
+ }
+ out := *sequence
+ out.Values = []ast.Node{}
+ for _, value := range sequence.Values {
+ filtered, err := n.child.filter(value)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to filter")
+ }
+ out.Values = append(out.Values, filtered)
+ }
+ return &out, nil
+}
+
+func (n *indexAllNode) replace(node ast.Node, target ast.Node) error {
+ if node.Type() != ast.SequenceType {
+ return errors.Wrapf(ErrInvalidQuery, "expected sequence type node. but got %s", node.Type())
+ }
+ sequence := node.(*ast.SequenceNode)
+ if n.child == nil {
+ for idx := range sequence.Values {
+ if err := sequence.Replace(idx, target); err != nil {
+ return errors.Wrapf(err, "failed to replace")
+ }
+ }
+ return nil
+ }
+ for _, value := range sequence.Values {
+ if err := n.child.replace(value, target); err != nil {
+ return errors.Wrapf(err, "failed to replace")
+ }
+ }
+ return nil
+}
+
+type recursiveNode struct {
+ *basePathNode
+ selector string
+}
+
+func newRecursiveNode(selector string) *recursiveNode {
+ return &recursiveNode{
+ basePathNode: &basePathNode{},
+ selector: selector,
+ }
+}
+
+func (n *recursiveNode) String() string {
+ s := fmt.Sprintf("..%s", n.selector)
+ if n.child != nil {
+ s += n.child.String()
+ }
+ return s
+}
+
+func (n *recursiveNode) filterNode(node ast.Node) (*ast.SequenceNode, error) {
+ sequence := &ast.SequenceNode{BaseNode: &ast.BaseNode{}}
+ switch typedNode := node.(type) {
+ case *ast.MappingNode:
+ for _, value := range typedNode.Values {
+ seq, err := n.filterNode(value)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to filter")
+ }
+ sequence.Values = append(sequence.Values, seq.Values...)
+ }
+ case *ast.MappingValueNode:
+ key := typedNode.Key.GetToken().Value
+ if n.selector == key {
+ sequence.Values = append(sequence.Values, typedNode.Value)
+ }
+ seq, err := n.filterNode(typedNode.Value)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to filter")
+ }
+ sequence.Values = append(sequence.Values, seq.Values...)
+ case *ast.SequenceNode:
+ for _, value := range typedNode.Values {
+ seq, err := n.filterNode(value)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to filter")
+ }
+ sequence.Values = append(sequence.Values, seq.Values...)
+ }
+ }
+ return sequence, nil
+}
+
+func (n *recursiveNode) filter(node ast.Node) (ast.Node, error) {
+ sequence, err := n.filterNode(node)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to filter")
+ }
+ sequence.Start = node.GetToken()
+ return sequence, nil
+}
+
+func (n *recursiveNode) replaceNode(node ast.Node, target ast.Node) error {
+ switch typedNode := node.(type) {
+ case *ast.MappingNode:
+ for _, value := range typedNode.Values {
+ if err := n.replaceNode(value, target); err != nil {
+ return errors.Wrapf(err, "failed to replace")
+ }
+ }
+ case *ast.MappingValueNode:
+ key := typedNode.Key.GetToken().Value
+ if n.selector == key {
+ if err := typedNode.Replace(target); err != nil {
+ return errors.Wrapf(err, "failed to replace")
+ }
+ }
+ if err := n.replaceNode(typedNode.Value, target); err != nil {
+ return errors.Wrapf(err, "failed to replace")
+ }
+ case *ast.SequenceNode:
+ for _, value := range typedNode.Values {
+ if err := n.replaceNode(value, target); err != nil {
+ return errors.Wrapf(err, "failed to replace")
+ }
+ }
+ }
+ return nil
+}
+
+func (n *recursiveNode) replace(node ast.Node, target ast.Node) error {
+ if err := n.replaceNode(node, target); err != nil {
+ return errors.Wrapf(err, "failed to replace")
+ }
+ return nil
+}
diff --git a/tmpmod/github.com/goccy/go-yaml/path_test.go b/tmpmod/github.com/goccy/go-yaml/path_test.go
new file mode 100644
index 00000000..e26a7192
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/path_test.go
@@ -0,0 +1,666 @@
+package yaml_test
+
+import (
+ "fmt"
+ "log"
+ "reflect"
+ "strings"
+ "testing"
+
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/parser"
+)
+
+func builder() *yaml.PathBuilder { return &yaml.PathBuilder{} }
+
+func TestPathBuilder(t *testing.T) {
+ tests := []struct {
+ expected string
+ path *yaml.Path
+ }{
+ {
+ expected: `$.a.b[0]`,
+ path: builder().Root().Child("a").Child("b").Index(0).Build(),
+ },
+ {
+ expected: `$.'a.b'.'c*d'`,
+ path: builder().Root().Child("a.b").Child("c*d").Build(),
+ },
+ {
+ expected: `$.'a.b-*'.c`,
+ path: builder().Root().Child("a.b-*").Child("c").Build(),
+ },
+ {
+ expected: `$.'a'.b`,
+ path: builder().Root().Child("'a'").Child("b").Build(),
+ },
+ {
+ expected: `$.'a.b'.c`,
+ path: builder().Root().Child("'a.b'").Child("c").Build(),
+ },
+ }
+ for _, test := range tests {
+ t.Run(test.expected, func(t *testing.T) {
+ expected := test.expected
+ got := test.path.String()
+ if expected != got {
+ t.Fatalf("failed to build path. expected:[%q] but got:[%q]", expected, got)
+ }
+ })
+ }
+}
+
+func TestPath(t *testing.T) {
+ yml := `
+store:
+ book:
+ - author: john
+ price: 10
+ - author: ken
+ price: 12
+ bicycle:
+ color: red
+ price: 19.95
+ bicycle*unicycle:
+ price: 20.25
+`
+ tests := []struct {
+ name string
+ path *yaml.Path
+ expected interface{}
+ }{
+ {
+ name: "$.store.book[0].author",
+ path: builder().Root().Child("store").Child("book").Index(0).Child("author").Build(),
+ expected: "john",
+ },
+ {
+ name: "$.store.book[1].price",
+ path: builder().Root().Child("store").Child("book").Index(1).Child("price").Build(),
+ expected: uint64(12),
+ },
+ {
+ name: "$.store.book[*].author",
+ path: builder().Root().Child("store").Child("book").IndexAll().Child("author").Build(),
+ expected: []interface{}{"john", "ken"},
+ },
+ {
+ name: "$.store.book[0]",
+ path: builder().Root().Child("store").Child("book").Index(0).Build(),
+ expected: map[string]interface{}{"author": "john", "price": uint64(10)},
+ },
+ {
+ name: "$..author",
+ path: builder().Root().Recursive("author").Build(),
+ expected: []interface{}{"john", "ken"},
+ },
+ {
+ name: "$.store.bicycle.price",
+ path: builder().Root().Child("store").Child("bicycle").Child("price").Build(),
+ expected: float64(19.95),
+ },
+ {
+ name: `$.store.'bicycle*unicycle'.price`,
+ path: builder().Root().Child("store").Child(`bicycle*unicycle`).Child("price").Build(),
+ expected: float64(20.25),
+ },
+ }
+ t.Run("PathString", func(t *testing.T) {
+ for _, test := range tests {
+ t.Run(test.name, func(t *testing.T) {
+ path, err := yaml.PathString(test.name)
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ if test.name != path.String() {
+ t.Fatalf("expected %s but actual %s", test.name, path.String())
+ }
+ })
+ }
+ })
+ t.Run("string", func(t *testing.T) {
+ for _, test := range tests {
+ t.Run(test.name, func(t *testing.T) {
+ if test.name != test.path.String() {
+ t.Fatalf("expected %s but actual %s", test.name, test.path.String())
+ }
+ })
+ }
+ })
+ t.Run("read", func(t *testing.T) {
+ for _, test := range tests {
+ t.Run(test.name, func(t *testing.T) {
+ var v interface{}
+ if err := test.path.Read(strings.NewReader(yml), &v); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ if !reflect.DeepEqual(test.expected, v) {
+ t.Fatalf("expected %v(%T). but actual %v(%T)", test.expected, test.expected, v, v)
+ }
+ })
+ }
+ })
+ t.Run("filter", func(t *testing.T) {
+ var target interface{}
+ if err := yaml.Unmarshal([]byte(yml), &target); err != nil {
+ t.Fatalf("failed to unmarshal: %+v", err)
+ }
+ for _, test := range tests {
+ t.Run(test.name, func(t *testing.T) {
+ var v interface{}
+ if err := test.path.Filter(target, &v); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ if !reflect.DeepEqual(test.expected, v) {
+ t.Fatalf("expected %v(%T). but actual %v(%T)", test.expected, test.expected, v, v)
+ }
+ })
+ }
+ })
+}
+
+func TestPath_ReservedKeyword(t *testing.T) {
+ tests := []struct {
+ name string
+ path string
+ src string
+ expected interface{}
+ failure bool
+ }{
+ {
+ name: "quoted path",
+ path: `$.'a.b.c'.foo`,
+ src: `
+a.b.c:
+ foo: bar
+`,
+ expected: "bar",
+ },
+ {
+ name: "contains quote key",
+ path: `$.a'b`,
+ src: `a'b: 10`,
+ expected: uint64(10),
+ },
+ {
+ name: "escaped quote",
+ path: `$.'alice\'s age'`,
+ src: `alice's age: 10`,
+ expected: uint64(10),
+ },
+ {
+ name: "directly use white space",
+ path: `$.a b`,
+ src: `a b: 10`,
+ expected: uint64(10),
+ },
+ {
+ name: "empty quoted key",
+ path: `$.''`,
+ src: `a: 10`,
+ failure: true,
+ },
+ {
+ name: "unterminated quote",
+ path: `$.'abcd`,
+ src: `abcd: 10`,
+ failure: true,
+ },
+ }
+ for _, test := range tests {
+ t.Run(test.name, func(t *testing.T) {
+ path, err := yaml.PathString(test.path)
+ if test.failure {
+ if err == nil {
+ t.Fatal("expected error")
+ }
+ return
+ } else {
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ }
+ file, err := parser.ParseBytes([]byte(test.src), 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+ var v interface{}
+ if err := path.Read(file, &v); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ if v != test.expected {
+ t.Fatalf("failed to get value. expected:[%v] but got:[%v]", test.expected, v)
+ }
+ })
+ }
+}
+
+func TestPath_Invalid(t *testing.T) {
+ tests := []struct {
+ path string
+ src string
+ }{
+ {
+ path: "$.wrong",
+ src: "foo: bar",
+ },
+ }
+ for _, test := range tests {
+ path, err := yaml.PathString(test.path)
+ if err != nil {
+ t.Fatal(err)
+ }
+ t.Run("path.Read", func(t *testing.T) {
+ file, err := parser.ParseBytes([]byte(test.src), 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+ var v interface{}
+ err = path.Read(file, &v)
+ if err == nil {
+ t.Fatal("expected error")
+ }
+ if !yaml.IsNotFoundNodeError(err) {
+ t.Fatalf("unexpected error %s", err)
+ }
+ })
+ t.Run("path.ReadNode", func(t *testing.T) {
+ file, err := parser.ParseBytes([]byte(test.src), 0)
+ if err != nil {
+ t.Fatal(err)
+ }
+ _, err = path.ReadNode(file)
+ if err == nil {
+ t.Fatal("expected error")
+ }
+ if !yaml.IsNotFoundNodeError(err) {
+ t.Fatalf("unexpected error %s", err)
+ }
+ })
+ }
+}
+
+func TestPath_Merge(t *testing.T) {
+ tests := []struct {
+ path string
+ dst string
+ src string
+ expected string
+ }{
+ {
+ path: "$.c",
+ dst: `
+a: 1
+b: 2
+c:
+ d: 3
+ e: 4
+`,
+ src: `
+f: 5
+g: 6
+`,
+ expected: `
+a: 1
+b: 2
+c:
+ d: 3
+ e: 4
+ f: 5
+ g: 6
+`,
+ },
+ {
+ path: "$.a.b",
+ dst: `
+a:
+ b:
+ - 1
+ - 2
+`,
+ src: `
+- 3
+- map:
+ - 4
+ - 5
+`,
+ expected: `
+a:
+ b:
+ - 1
+ - 2
+ - 3
+ - map:
+ - 4
+ - 5
+`,
+ },
+ }
+ for _, test := range tests {
+ t.Run(test.path, func(t *testing.T) {
+ path, err := yaml.PathString(test.path)
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ t.Run("FromReader", func(t *testing.T) {
+ file, err := parser.ParseBytes([]byte(test.dst), 0)
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ if err := path.MergeFromReader(file, strings.NewReader(test.src)); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ actual := "\n" + file.String()
+ if test.expected != actual {
+ t.Fatalf("expected: %q. but got %q", test.expected, actual)
+ }
+ })
+ t.Run("FromFile", func(t *testing.T) {
+ file, err := parser.ParseBytes([]byte(test.dst), 0)
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ src, err := parser.ParseBytes([]byte(test.src), 0)
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ if err := path.MergeFromFile(file, src); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ actual := "\n" + file.String()
+ if test.expected != actual {
+ t.Fatalf("expected: %q. but got %q", test.expected, actual)
+ }
+ })
+ t.Run("FromNode", func(t *testing.T) {
+ file, err := parser.ParseBytes([]byte(test.dst), 0)
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ src, err := parser.ParseBytes([]byte(test.src), 0)
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ if len(src.Docs) == 0 {
+ t.Fatalf("failed to parse")
+ }
+ if err := path.MergeFromNode(file, src.Docs[0]); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ actual := "\n" + file.String()
+ if test.expected != actual {
+ t.Fatalf("expected: %q. but got %q", test.expected, actual)
+ }
+ })
+ })
+ }
+}
+
+func TestPath_Replace(t *testing.T) {
+ tests := []struct {
+ path string
+ dst string
+ src string
+ expected string
+ }{
+ {
+ path: "$.a",
+ dst: `
+a: 1
+b: 2
+`,
+ src: `3`,
+ expected: `
+a: 3
+b: 2
+`,
+ },
+ {
+ path: "$.b",
+ dst: `
+b: 1
+c: 2
+`,
+ src: `
+d: e
+f:
+ g: h
+ i: j
+`,
+ expected: `
+b:
+ d: e
+ f:
+ g: h
+ i: j
+c: 2
+`,
+ },
+ {
+ path: "$.a.b[0]",
+ dst: `
+a:
+ b:
+ - hello
+c: 2
+`,
+ src: `world`,
+ expected: `
+a:
+ b:
+ - world
+c: 2
+`,
+ },
+
+ {
+ path: "$.books[*].author",
+ dst: `
+books:
+ - name: book_a
+ author: none
+ - name: book_b
+ author: none
+pictures:
+ - name: picture_a
+ author: none
+ - name: picture_b
+ author: none
+building:
+ author: none
+`,
+ src: `ken`,
+ expected: `
+books:
+ - name: book_a
+ author: ken
+ - name: book_b
+ author: ken
+pictures:
+ - name: picture_a
+ author: none
+ - name: picture_b
+ author: none
+building:
+ author: none
+`,
+ },
+ {
+ path: "$..author",
+ dst: `
+books:
+ - name: book_a
+ author: none
+ - name: book_b
+ author: none
+pictures:
+ - name: picture_a
+ author: none
+ - name: picture_b
+ author: none
+building:
+ author: none
+`,
+ src: `ken`,
+ expected: `
+books:
+ - name: book_a
+ author: ken
+ - name: book_b
+ author: ken
+pictures:
+ - name: picture_a
+ author: ken
+ - name: picture_b
+ author: ken
+building:
+ author: ken
+`,
+ },
+ }
+ for _, test := range tests {
+ t.Run(test.path, func(t *testing.T) {
+ path, err := yaml.PathString(test.path)
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ t.Run("WithReader", func(t *testing.T) {
+ file, err := parser.ParseBytes([]byte(test.dst), 0)
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ if err := path.ReplaceWithReader(file, strings.NewReader(test.src)); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ actual := "\n" + file.String()
+ if test.expected != actual {
+ t.Fatalf("expected: %q. but got %q", test.expected, actual)
+ }
+ })
+ t.Run("WithFile", func(t *testing.T) {
+ file, err := parser.ParseBytes([]byte(test.dst), 0)
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ src, err := parser.ParseBytes([]byte(test.src), 0)
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ if err := path.ReplaceWithFile(file, src); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ actual := "\n" + file.String()
+ if test.expected != actual {
+ t.Fatalf("expected: %q. but got %q", test.expected, actual)
+ }
+ })
+ t.Run("WithNode", func(t *testing.T) {
+ file, err := parser.ParseBytes([]byte(test.dst), 0)
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ src, err := parser.ParseBytes([]byte(test.src), 0)
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ if len(src.Docs) == 0 {
+ t.Fatalf("failed to parse")
+ }
+ if err := path.ReplaceWithNode(file, src.Docs[0]); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ actual := "\n" + file.String()
+ if test.expected != actual {
+ t.Fatalf("expected: %q. but got %q", test.expected, actual)
+ }
+ })
+ })
+ }
+}
+
+func ExamplePath_AnnotateSource() {
+ yml := `
+a: 1
+b: "hello"
+`
+ var v struct {
+ A int
+ B string
+ }
+ if err := yaml.Unmarshal([]byte(yml), &v); err != nil {
+ panic(err)
+ }
+ if v.A != 2 {
+ // output error with YAML source
+ path, err := yaml.PathString("$.a")
+ if err != nil {
+ log.Fatal(err)
+ }
+ source, err := path.AnnotateSource([]byte(yml), false)
+ if err != nil {
+ log.Fatal(err)
+ }
+ fmt.Printf("a value expected 2 but actual %d:\n%s\n", v.A, string(source))
+ }
+ // OUTPUT:
+ // a value expected 2 but actual 1:
+ // > 2 | a: 1
+ // ^
+ // 3 | b: "hello"
+}
+
+func ExamplePath_AnnotateSourceWithComment() {
+ yml := `
+# This is my document
+doc:
+ # This comment should be line 3
+ map:
+ # And below should be line 5
+ - value1
+ - value2
+ other: value3
+ `
+ path, err := yaml.PathString("$.doc.map[0]")
+ if err != nil {
+ log.Fatal(err)
+ }
+ msg, err := path.AnnotateSource([]byte(yml), false)
+ if err != nil {
+ log.Fatal(err)
+ }
+ fmt.Println(string(msg))
+ // OUTPUT:
+ // 4 | # This comment should be line 3
+ // 5 | map:
+ // 6 | # And below should be line 5
+ // > 7 | - value1
+ // ^
+ // 8 | - value2
+ // 9 | other: value3
+ // 10 |
+}
+
+func ExamplePath_PathString() {
+ yml := `
+store:
+ book:
+ - author: john
+ price: 10
+ - author: ken
+ price: 12
+ bicycle:
+ color: red
+ price: 19.95
+`
+ path, err := yaml.PathString("$.store.book[*].author")
+ if err != nil {
+ log.Fatal(err)
+ }
+ var authors []string
+ if err := path.Read(strings.NewReader(yml), &authors); err != nil {
+ log.Fatal(err)
+ }
+ fmt.Println(authors)
+ // OUTPUT:
+ // [john ken]
+}
diff --git a/tmpmod/github.com/goccy/go-yaml/printer/printer.go b/tmpmod/github.com/goccy/go-yaml/printer/printer.go
new file mode 100644
index 00000000..28f01787
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/printer/printer.go
@@ -0,0 +1,352 @@
+package printer
+
+import (
+ "fmt"
+ "math"
+ "strings"
+
+ "github.com/fatih/color"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/ast"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/token"
+)
+
+// Property additional property set for each the token
+type Property struct {
+ Prefix string
+ Suffix string
+}
+
+// PrintFunc returns property instance
+type PrintFunc func() *Property
+
+// Printer create text from token collection or ast
+type Printer struct {
+ LineNumber bool
+ LineNumberFormat func(num int) string
+ MapKey PrintFunc
+ Anchor PrintFunc
+ Alias PrintFunc
+ Bool PrintFunc
+ String PrintFunc
+ Number PrintFunc
+}
+
+func defaultLineNumberFormat(num int) string {
+ return fmt.Sprintf("%2d | ", num)
+}
+
+func (p *Printer) property(tk *token.Token) *Property {
+ prop := &Property{}
+ switch tk.PreviousType() {
+ case token.AnchorType:
+ if p.Anchor != nil {
+ return p.Anchor()
+ }
+ return prop
+ case token.AliasType:
+ if p.Alias != nil {
+ return p.Alias()
+ }
+ return prop
+ }
+ switch tk.NextType() {
+ case token.MappingValueType:
+ if p.MapKey != nil {
+ return p.MapKey()
+ }
+ return prop
+ }
+ switch tk.Type {
+ case token.BoolType:
+ if p.Bool != nil {
+ return p.Bool()
+ }
+ return prop
+ case token.AnchorType:
+ if p.Anchor != nil {
+ return p.Anchor()
+ }
+ return prop
+ case token.AliasType:
+ if p.Anchor != nil {
+ return p.Alias()
+ }
+ return prop
+ case token.StringType, token.SingleQuoteType, token.DoubleQuoteType:
+ if p.String != nil {
+ return p.String()
+ }
+ return prop
+ case token.IntegerType, token.FloatType:
+ if p.Number != nil {
+ return p.Number()
+ }
+ return prop
+ default:
+ }
+ return prop
+}
+
+// PrintTokens create text from token collection
+func (p *Printer) PrintTokens(tokens token.Tokens) string {
+ if len(tokens) == 0 {
+ return ""
+ }
+ if p.LineNumber {
+ if p.LineNumberFormat == nil {
+ p.LineNumberFormat = defaultLineNumberFormat
+ }
+ }
+ texts := []string{}
+ lineNumber := tokens[0].Position.Line
+ for _, tk := range tokens {
+ lines := strings.Split(tk.Origin, "\n")
+ prop := p.property(tk)
+ header := ""
+ if p.LineNumber {
+ header = p.LineNumberFormat(lineNumber)
+ }
+ if len(lines) == 1 {
+ line := prop.Prefix + lines[0] + prop.Suffix
+ if len(texts) == 0 {
+ texts = append(texts, header+line)
+ lineNumber++
+ } else {
+ text := texts[len(texts)-1]
+ texts[len(texts)-1] = text + line
+ }
+ } else {
+ for idx, src := range lines {
+ if p.LineNumber {
+ header = p.LineNumberFormat(lineNumber)
+ }
+ line := prop.Prefix + src + prop.Suffix
+ if idx == 0 {
+ if len(texts) == 0 {
+ texts = append(texts, header+line)
+ lineNumber++
+ } else {
+ text := texts[len(texts)-1]
+ texts[len(texts)-1] = text + line
+ }
+ } else {
+ texts = append(texts, fmt.Sprintf("%s%s", header, line))
+ lineNumber++
+ }
+ }
+ }
+ }
+ return strings.Join(texts, "\n")
+}
+
+// PrintNode create text from ast.Node
+func (p *Printer) PrintNode(node ast.Node) []byte {
+ return []byte(fmt.Sprintf("%+v\n", node))
+}
+
+const escape = "\x1b"
+
+func format(attr color.Attribute) string {
+ return fmt.Sprintf("%s[%dm", escape, attr)
+}
+
+func (p *Printer) setDefaultColorSet() {
+ p.Bool = func() *Property {
+ return &Property{
+ Prefix: format(color.FgHiMagenta),
+ Suffix: format(color.Reset),
+ }
+ }
+ p.Number = func() *Property {
+ return &Property{
+ Prefix: format(color.FgHiMagenta),
+ Suffix: format(color.Reset),
+ }
+ }
+ p.MapKey = func() *Property {
+ return &Property{
+ Prefix: format(color.FgHiCyan),
+ Suffix: format(color.Reset),
+ }
+ }
+ p.Anchor = func() *Property {
+ return &Property{
+ Prefix: format(color.FgHiYellow),
+ Suffix: format(color.Reset),
+ }
+ }
+ p.Alias = func() *Property {
+ return &Property{
+ Prefix: format(color.FgHiYellow),
+ Suffix: format(color.Reset),
+ }
+ }
+ p.String = func() *Property {
+ return &Property{
+ Prefix: format(color.FgHiGreen),
+ Suffix: format(color.Reset),
+ }
+ }
+}
+
+func (p *Printer) PrintErrorMessage(msg string, isColored bool) string {
+ if isColored {
+ return fmt.Sprintf("%s%s%s",
+ format(color.FgHiRed),
+ msg,
+ format(color.Reset),
+ )
+ }
+ return msg
+}
+
+func (p *Printer) removeLeftSideNewLineChar(src string) string {
+ return strings.TrimLeft(strings.TrimLeft(strings.TrimLeft(src, "\r"), "\n"), "\r\n")
+}
+
+func (p *Printer) removeRightSideNewLineChar(src string) string {
+ return strings.TrimRight(strings.TrimRight(strings.TrimRight(src, "\r"), "\n"), "\r\n")
+}
+
+func (p *Printer) removeRightSideWhiteSpaceChar(src string) string {
+ return p.removeRightSideNewLineChar(strings.TrimRight(src, " "))
+}
+
+func (p *Printer) newLineCount(s string) int {
+ src := []rune(s)
+ size := len(src)
+ cnt := 0
+ for i := 0; i < size; i++ {
+ c := src[i]
+ switch c {
+ case '\r':
+ if i+1 < size && src[i+1] == '\n' {
+ i++
+ }
+ cnt++
+ case '\n':
+ cnt++
+ }
+ }
+ return cnt
+}
+
+func (p *Printer) isNewLineLastChar(s string) bool {
+ for i := len(s) - 1; i > 0; i-- {
+ c := s[i]
+ switch c {
+ case ' ':
+ continue
+ case '\n', '\r':
+ return true
+ }
+ break
+ }
+ return false
+}
+
+func (p *Printer) printBeforeTokens(tk *token.Token, minLine, extLine int) token.Tokens {
+ for {
+ if tk.Prev == nil {
+ break
+ }
+ if tk.Prev.Position.Line < minLine {
+ break
+ }
+ tk = tk.Prev
+ }
+ minTk := tk.Clone()
+ if minTk.Prev != nil {
+ // add white spaces to minTk by prev token
+ prev := minTk.Prev
+ whiteSpaceLen := len(prev.Origin) - len(strings.TrimRight(prev.Origin, " "))
+ minTk.Origin = strings.Repeat(" ", whiteSpaceLen) + minTk.Origin
+ }
+ minTk.Origin = p.removeLeftSideNewLineChar(minTk.Origin)
+ tokens := token.Tokens{minTk}
+ tk = minTk.Next
+ for tk != nil && tk.Position.Line <= extLine {
+ clonedTk := tk.Clone()
+ tokens.Add(clonedTk)
+ tk = clonedTk.Next
+ }
+ lastTk := tokens[len(tokens)-1]
+ trimmedOrigin := p.removeRightSideWhiteSpaceChar(lastTk.Origin)
+ suffix := lastTk.Origin[len(trimmedOrigin):]
+ lastTk.Origin = trimmedOrigin
+
+ if lastTk.Next != nil && len(suffix) > 1 {
+ next := lastTk.Next.Clone()
+ // add suffix to header of next token
+ if suffix[0] == '\n' || suffix[0] == '\r' {
+ suffix = suffix[1:]
+ }
+ next.Origin = suffix + next.Origin
+ lastTk.Next = next
+ }
+ return tokens
+}
+
+func (p *Printer) printAfterTokens(tk *token.Token, maxLine int) token.Tokens {
+ tokens := token.Tokens{}
+ if tk == nil {
+ return tokens
+ }
+ if tk.Position.Line > maxLine {
+ return tokens
+ }
+ minTk := tk.Clone()
+ minTk.Origin = p.removeLeftSideNewLineChar(minTk.Origin)
+ tokens.Add(minTk)
+ tk = minTk.Next
+ for tk != nil && tk.Position.Line <= maxLine {
+ clonedTk := tk.Clone()
+ tokens.Add(clonedTk)
+ tk = clonedTk.Next
+ }
+ return tokens
+}
+
+func (p *Printer) setupErrorTokenFormat(annotateLine int, isColored bool) {
+ prefix := func(annotateLine, num int) string {
+ if annotateLine == num {
+ return fmt.Sprintf("> %2d | ", num)
+ }
+ return fmt.Sprintf(" %2d | ", num)
+ }
+ p.LineNumber = true
+ p.LineNumberFormat = func(num int) string {
+ if isColored {
+ fn := color.New(color.Bold, color.FgHiWhite).SprintFunc()
+ return fn(prefix(annotateLine, num))
+ }
+ return prefix(annotateLine, num)
+ }
+ if isColored {
+ p.setDefaultColorSet()
+ }
+}
+
+func (p *Printer) PrintErrorToken(tk *token.Token, isColored bool) string {
+ errToken := tk
+ curLine := tk.Position.Line
+ curExtLine := curLine + p.newLineCount(p.removeLeftSideNewLineChar(tk.Origin))
+ if p.isNewLineLastChar(tk.Origin) {
+ // if last character ( exclude white space ) is new line character, ignore it.
+ curExtLine--
+ }
+
+ minLine := int(math.Max(float64(curLine-3), 1))
+ maxLine := curExtLine + 3
+ p.setupErrorTokenFormat(curLine, isColored)
+
+ beforeTokens := p.printBeforeTokens(tk, minLine, curExtLine)
+ lastTk := beforeTokens[len(beforeTokens)-1]
+ afterTokens := p.printAfterTokens(lastTk.Next, maxLine)
+
+ beforeSource := p.PrintTokens(beforeTokens)
+ prefixSpaceNum := len(fmt.Sprintf(" %2d | ", curLine))
+ annotateLine := strings.Repeat(" ", prefixSpaceNum+errToken.Position.Column-1) + "^"
+ afterSource := p.PrintTokens(afterTokens)
+ return fmt.Sprintf("%s\n%s\n%s", beforeSource, annotateLine, afterSource)
+}
diff --git a/tmpmod/github.com/goccy/go-yaml/printer/printer_test.go b/tmpmod/github.com/goccy/go-yaml/printer/printer_test.go
new file mode 100644
index 00000000..a99d87a7
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/printer/printer_test.go
@@ -0,0 +1,223 @@
+package printer_test
+
+import (
+ "fmt"
+ "testing"
+
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/lexer"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/printer"
+)
+
+func Test_Printer(t *testing.T) {
+ yml := `---
+text: aaaa
+text2: aaaa
+ bbbb
+ cccc
+ dddd
+ eeee
+text3: ffff
+ gggg
+ hhhh
+ iiii
+ jjjj
+bool: true
+number: 10
+anchor: &x 1
+alias: *x
+`
+ t.Run("print starting from tokens[3]", func(t *testing.T) {
+ tokens := lexer.Tokenize(yml)
+ var p printer.Printer
+ actual := "\n" + p.PrintErrorToken(tokens[3], false)
+ expect := `
+ 1 | ---
+> 2 | text: aaaa
+ ^
+ 3 | text2: aaaa
+ 4 | bbbb
+ 5 | cccc
+ 6 | dddd
+ 7 | eeee
+ 8 | `
+ if actual != expect {
+ t.Fatalf("unexpected output: expect:[%s]\n actual:[%s]", expect, actual)
+ }
+ })
+ t.Run("print starting from tokens[4]", func(t *testing.T) {
+ tokens := lexer.Tokenize(yml)
+ var p printer.Printer
+ actual := "\n" + p.PrintErrorToken(tokens[4], false)
+ expect := `
+ 1 | ---
+ 2 | text: aaaa
+> 3 | text2: aaaa
+ 4 | bbbb
+ 5 | cccc
+ 6 | dddd
+ 7 | eeee
+ ^
+`
+ if actual != expect {
+ t.Fatalf("unexpected output: expect:[%s]\n actual:[%s]", expect, actual)
+ }
+ })
+ t.Run("print starting from tokens[6]", func(t *testing.T) {
+ tokens := lexer.Tokenize(yml)
+ var p printer.Printer
+ actual := "\n" + p.PrintErrorToken(tokens[6], false)
+ expect := `
+ 1 | ---
+ 2 | text: aaaa
+> 3 | text2: aaaa
+ 4 | bbbb
+ 5 | cccc
+ 6 | dddd
+ 7 | eeee
+ ^
+ 8 | text3: ffff
+ 9 | gggg
+ 10 | hhhh
+ 11 | iiii
+ 12 | jjjj
+ 13 | `
+ if actual != expect {
+ t.Fatalf("unexpected output: expect:[%s]\n actual:[%s]", expect, actual)
+ }
+ })
+ t.Run("print error token with document header", func(t *testing.T) {
+ tokens := lexer.Tokenize(`---
+a:
+ b:
+ c:
+ d: e
+ f: g
+ h: i
+
+---
+`)
+ expect := `
+ 3 | b:
+ 4 | c:
+ 5 | d: e
+> 6 | f: g
+ ^
+ 7 | h: i
+ 8 |
+ 9 | ---`
+ var p printer.Printer
+ actual := "\n" + p.PrintErrorToken(tokens[12], false)
+ if actual != expect {
+ t.Fatalf("unexpected output: expect:[%s]\n actual:[%s]", expect, actual)
+ }
+ })
+ t.Run("output with color", func(t *testing.T) {
+ t.Run("token6", func(t *testing.T) {
+ tokens := lexer.Tokenize(yml)
+ var p printer.Printer
+ t.Logf("\n%s", p.PrintErrorToken(tokens[6], true))
+ })
+ t.Run("token9", func(t *testing.T) {
+ tokens := lexer.Tokenize(yml)
+ var p printer.Printer
+ t.Logf("\n%s", p.PrintErrorToken(tokens[9], true))
+ })
+ t.Run("token12", func(t *testing.T) {
+ tokens := lexer.Tokenize(yml)
+ var p printer.Printer
+ t.Logf("\n%s", p.PrintErrorToken(tokens[12], true))
+ })
+ })
+ t.Run("print error message", func(t *testing.T) {
+ var p printer.Printer
+ src := "message"
+ msg := p.PrintErrorMessage(src, false)
+ if msg != src {
+ t.Fatal("unexpected result")
+ }
+ p.PrintErrorMessage(src, true)
+ })
+}
+
+func TestPrinter_Anchor(t *testing.T) {
+ expected := `
+anchor: &x 1
+alias: *x`
+ tokens := lexer.Tokenize(expected)
+ var p printer.Printer
+ got := p.PrintTokens(tokens)
+ if expected != got {
+ t.Fatalf("unexpected output: expect:[%s]\n actual:[%s]", expected, got)
+ }
+}
+
+func Test_Printer_Multiline(t *testing.T) {
+ yml := `
+text1: 'aaaa
+ bbbb
+ cccc'
+text2: "ffff
+ gggg
+ hhhh"
+text3: hello
+`
+ tc := []struct {
+ token int
+ want string
+ }{
+ {
+ token: 2,
+ want: `
+> 2 | text1: 'aaaa
+ 3 | bbbb
+ 4 | cccc'
+ ^
+ 5 | text2: "ffff
+ 6 | gggg
+ 7 | hhhh"`,
+ },
+ {token: 3,
+ want: `
+ 2 | text1: 'aaaa
+ 3 | bbbb
+ 4 | cccc'
+> 5 | text2: "ffff
+ 6 | gggg
+ 7 | hhhh"
+ ^
+ 8 | text3: hello`,
+ },
+ {token: 5,
+ want: `
+ 2 | text1: 'aaaa
+ 3 | bbbb
+ 4 | cccc'
+> 5 | text2: "ffff
+ 6 | gggg
+ 7 | hhhh"
+ ^
+ 8 | text3: hello`,
+ },
+ {token: 6,
+ want: `
+ 5 | text2: "ffff
+ 6 | gggg
+ 7 | hhhh"
+> 8 | text3: hello
+ ^
+`,
+ },
+ }
+ for _, tt := range tc {
+ name := fmt.Sprintf("print starting from tokens[%d]", tt.token)
+ t.Run(name, func(t *testing.T) {
+ tokens := lexer.Tokenize(yml)
+ var p printer.Printer
+ got := "\n" + p.PrintErrorToken(tokens[tt.token], false)
+ want := tt.want
+ if got != want {
+ t.Fatalf("PrintErrorToken() got: %s\n want:%s\n", want, got)
+ }
+ })
+ }
+}
diff --git a/tmpmod/github.com/goccy/go-yaml/scanner/context.go b/tmpmod/github.com/goccy/go-yaml/scanner/context.go
new file mode 100644
index 00000000..902f678d
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/scanner/context.go
@@ -0,0 +1,236 @@
+package scanner
+
+import (
+ "sync"
+
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/token"
+)
+
+const whitespace = ' '
+
+// Context context at scanning
+type Context struct {
+ idx int
+ size int
+ notSpaceCharPos int
+ notSpaceOrgCharPos int
+ src []rune
+ buf []rune
+ obuf []rune
+ tokens token.Tokens
+ isRawFolded bool
+ isLiteral bool
+ isFolded bool
+ isSingleLine bool
+ literalOpt string
+}
+
+var (
+ ctxPool = sync.Pool{
+ New: func() interface{} {
+ return createContext()
+ },
+ }
+)
+
+func createContext() *Context {
+ return &Context{
+ idx: 0,
+ tokens: token.Tokens{},
+ isSingleLine: true,
+ }
+}
+
+func newContext(src []rune) *Context {
+ ctx := ctxPool.Get().(*Context)
+ ctx.reset(src)
+ return ctx
+}
+
+func (c *Context) release() {
+ ctxPool.Put(c)
+}
+
+func (c *Context) reset(src []rune) {
+ c.idx = 0
+ c.size = len(src)
+ c.src = src
+ c.tokens = c.tokens[:0]
+ c.resetBuffer()
+ c.isRawFolded = false
+ c.isSingleLine = true
+ c.isLiteral = false
+ c.isFolded = false
+ c.literalOpt = ""
+}
+
+func (c *Context) resetBuffer() {
+ c.buf = c.buf[:0]
+ c.obuf = c.obuf[:0]
+ c.notSpaceCharPos = 0
+ c.notSpaceOrgCharPos = 0
+}
+
+func (c *Context) isSaveIndentMode() bool {
+ return c.isLiteral || c.isFolded || c.isRawFolded
+}
+
+func (c *Context) breakLiteral() {
+ c.isLiteral = false
+ c.isRawFolded = false
+ c.isFolded = false
+ c.literalOpt = ""
+}
+
+func (c *Context) addToken(tk *token.Token) {
+ if tk == nil {
+ return
+ }
+ c.tokens = append(c.tokens, tk)
+}
+
+func (c *Context) addBuf(r rune) {
+ if len(c.buf) == 0 && r == ' ' {
+ return
+ }
+ c.buf = append(c.buf, r)
+ if r != ' ' && r != '\t' {
+ c.notSpaceCharPos = len(c.buf)
+ }
+}
+
+func (c *Context) addOriginBuf(r rune) {
+ c.obuf = append(c.obuf, r)
+ if r != ' ' && r != '\t' {
+ c.notSpaceOrgCharPos = len(c.obuf)
+ }
+}
+
+func (c *Context) removeRightSpaceFromBuf() int {
+ trimmedBuf := c.obuf[:c.notSpaceOrgCharPos]
+ buflen := len(trimmedBuf)
+ diff := len(c.obuf) - buflen
+ if diff > 0 {
+ c.obuf = c.obuf[:buflen]
+ c.buf = c.bufferedSrc()
+ }
+ return diff
+}
+
+func (c *Context) isDocument() bool {
+ return c.isLiteral || c.isFolded || c.isRawFolded
+}
+
+func (c *Context) isEOS() bool {
+ return len(c.src)-1 <= c.idx
+}
+
+func (c *Context) isNextEOS() bool {
+ return len(c.src)-1 <= c.idx+1
+}
+
+func (c *Context) next() bool {
+ return c.idx < c.size
+}
+
+func (c *Context) source(s, e int) string {
+ return string(c.src[s:e])
+}
+
+func (c *Context) previousChar() rune {
+ if c.idx > 0 {
+ return c.src[c.idx-1]
+ }
+ return rune(0)
+}
+
+func (c *Context) currentChar() rune {
+ if c.size > c.idx {
+ return c.src[c.idx]
+ }
+ return rune(0)
+}
+
+func (c *Context) currentCharWithSkipWhitespace() rune {
+ idx := c.idx
+ for c.size > idx {
+ ch := c.src[idx]
+ if ch != whitespace {
+ return ch
+ }
+ idx++
+ }
+ return rune(0)
+}
+
+func (c *Context) nextChar() rune {
+ if c.size > c.idx+1 {
+ return c.src[c.idx+1]
+ }
+ return rune(0)
+}
+
+func (c *Context) repeatNum(r rune) int {
+ cnt := 0
+ for i := c.idx; i < c.size; i++ {
+ if c.src[i] == r {
+ cnt++
+ } else {
+ break
+ }
+ }
+ return cnt
+}
+
+func (c *Context) progress(num int) {
+ c.idx += num
+}
+
+func (c *Context) nextPos() int {
+ return c.idx + 1
+}
+
+func (c *Context) existsBuffer() bool {
+ return len(c.bufferedSrc()) != 0
+}
+
+func (c *Context) bufferedSrc() []rune {
+ src := c.buf[:c.notSpaceCharPos]
+ if c.isDocument() && c.literalOpt == "-" {
+ // remove end '\n' character and trailing empty lines
+ // https://yaml.org/spec/1.2.2/#8112-block-chomping-indicator
+ for {
+ if len(src) > 0 && src[len(src)-1] == '\n' {
+ src = src[:len(src)-1]
+ continue
+ }
+ break
+ }
+ }
+ return src
+}
+
+func (c *Context) bufferedToken(pos *token.Position) *token.Token {
+ if c.idx == 0 {
+ return nil
+ }
+ source := c.bufferedSrc()
+ if len(source) == 0 {
+ return nil
+ }
+ var tk *token.Token
+ if c.isDocument() {
+ tk = token.String(string(source), string(c.obuf), pos)
+ } else {
+ tk = token.New(string(source), string(c.obuf), pos)
+ }
+ c.resetBuffer()
+ return tk
+}
+
+func (c *Context) lastToken() *token.Token {
+ if len(c.tokens) != 0 {
+ return c.tokens[len(c.tokens)-1]
+ }
+ return nil
+}
diff --git a/tmpmod/github.com/goccy/go-yaml/scanner/scanner.go b/tmpmod/github.com/goccy/go-yaml/scanner/scanner.go
new file mode 100644
index 00000000..e195d269
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/scanner/scanner.go
@@ -0,0 +1,908 @@
+package scanner
+
+import (
+ "io"
+ "strings"
+
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/token"
+ "golang.org/x/xerrors"
+)
+
+// IndentState state for indent
+type IndentState int
+
+const (
+ // IndentStateEqual equals previous indent
+ IndentStateEqual IndentState = iota
+ // IndentStateUp more indent than previous
+ IndentStateUp
+ // IndentStateDown less indent than previous
+ IndentStateDown
+ // IndentStateKeep uses not indent token
+ IndentStateKeep
+)
+
+// Scanner holds the scanner's internal state while processing a given text.
+// It can be allocated as part of another data structure but must be initialized via Init before use.
+type Scanner struct {
+ source []rune
+ sourcePos int
+ sourceSize int
+ line int
+ column int
+ offset int
+ prevIndentLevel int
+ prevIndentNum int
+ prevIndentColumn int
+ docStartColumn int
+ indentLevel int
+ indentNum int
+ isFirstCharAtLine bool
+ isAnchor bool
+ startedFlowSequenceNum int
+ startedFlowMapNum int
+ indentState IndentState
+ savedPos *token.Position
+}
+
+func (s *Scanner) pos() *token.Position {
+ return &token.Position{
+ Line: s.line,
+ Column: s.column,
+ Offset: s.offset,
+ IndentNum: s.indentNum,
+ IndentLevel: s.indentLevel,
+ }
+}
+
+func (s *Scanner) bufferedToken(ctx *Context) *token.Token {
+ if s.savedPos != nil {
+ tk := ctx.bufferedToken(s.savedPos)
+ s.savedPos = nil
+ return tk
+ }
+ line := s.line
+ column := s.column - len(ctx.buf)
+ level := s.indentLevel
+ if ctx.isSaveIndentMode() {
+ line -= s.newLineCount(ctx.buf)
+ column = strings.Index(string(ctx.obuf), string(ctx.buf)) + 1
+ // Since we are in a literal, folded or raw folded
+ // we can use the indent level from the last token.
+ last := ctx.lastToken()
+ if last != nil { // The last token should never be nil here.
+ level = last.Position.IndentLevel + 1
+ }
+ }
+ return ctx.bufferedToken(&token.Position{
+ Line: line,
+ Column: column,
+ Offset: s.offset - len(ctx.buf),
+ IndentNum: s.indentNum,
+ IndentLevel: level,
+ })
+}
+
+func (s *Scanner) progressColumn(ctx *Context, num int) {
+ s.column += num
+ s.offset += num
+ ctx.progress(num)
+}
+
+func (s *Scanner) progressLine(ctx *Context) {
+ s.column = 1
+ s.line++
+ s.offset++
+ s.indentNum = 0
+ s.isFirstCharAtLine = true
+ s.isAnchor = false
+ ctx.progress(1)
+}
+
+func (s *Scanner) isNeededKeepPreviousIndentNum(ctx *Context, c rune) bool {
+ if !s.isChangedToIndentStateUp() {
+ return false
+ }
+ if ctx.isDocument() {
+ return true
+ }
+ if c == '-' && ctx.existsBuffer() {
+ return true
+ }
+ return false
+}
+
+func (s *Scanner) isNewLineChar(c rune) bool {
+ if c == '\n' {
+ return true
+ }
+ if c == '\r' {
+ return true
+ }
+ return false
+}
+
+func (s *Scanner) newLineCount(src []rune) int {
+ size := len(src)
+ cnt := 0
+ for i := 0; i < size; i++ {
+ c := src[i]
+ switch c {
+ case '\r':
+ if i+1 < size && src[i+1] == '\n' {
+ i++
+ }
+ cnt++
+ case '\n':
+ cnt++
+ }
+ }
+ return cnt
+}
+
+func (s *Scanner) updateIndentState(ctx *Context) {
+ indentNumBasedIndentState := s.indentState
+ if s.prevIndentNum < s.indentNum {
+ s.indentLevel = s.prevIndentLevel + 1
+ indentNumBasedIndentState = IndentStateUp
+ } else if s.prevIndentNum == s.indentNum {
+ s.indentLevel = s.prevIndentLevel
+ indentNumBasedIndentState = IndentStateEqual
+ } else {
+ indentNumBasedIndentState = IndentStateDown
+ if s.prevIndentLevel > 0 {
+ s.indentLevel = s.prevIndentLevel - 1
+ }
+ }
+
+ if s.prevIndentColumn > 0 {
+ if s.prevIndentColumn < s.column {
+ s.indentState = IndentStateUp
+ } else if s.prevIndentColumn != s.column || indentNumBasedIndentState != IndentStateEqual {
+ // The following case ( current position is 'd' ), some variables becomes like here
+ // - prevIndentColumn: 1 of 'a'
+ // - indentNumBasedIndentState: IndentStateDown because d's indentNum(1) is less than c's indentNum(3).
+ // Therefore, s.prevIndentColumn(1) == s.column(1) is true, but we want to treat this as IndentStateDown.
+ // So, we look also current indentState value by the above prevIndentNum based logic, and determins finally indentState.
+ // ---
+ // a:
+ // b
+ // c
+ // d: e
+ // ^
+ s.indentState = IndentStateDown
+ } else {
+ s.indentState = IndentStateEqual
+ }
+ } else {
+ s.indentState = indentNumBasedIndentState
+ }
+}
+
+func (s *Scanner) updateIndent(ctx *Context, c rune) {
+ if s.isFirstCharAtLine && s.isNewLineChar(c) && ctx.isDocument() {
+ return
+ }
+ if s.isFirstCharAtLine && c == ' ' {
+ s.indentNum++
+ return
+ }
+ if !s.isFirstCharAtLine {
+ s.indentState = IndentStateKeep
+ return
+ }
+ s.updateIndentState(ctx)
+ s.isFirstCharAtLine = false
+ if s.isNeededKeepPreviousIndentNum(ctx, c) {
+ return
+ }
+ if s.indentState != IndentStateUp {
+ s.prevIndentColumn = 0
+ }
+ s.prevIndentNum = s.indentNum
+ s.prevIndentLevel = s.indentLevel
+}
+
+func (s *Scanner) isChangedToIndentStateDown() bool {
+ return s.indentState == IndentStateDown
+}
+
+func (s *Scanner) isChangedToIndentStateUp() bool {
+ return s.indentState == IndentStateUp
+}
+
+func (s *Scanner) isChangedToIndentStateEqual() bool {
+ return s.indentState == IndentStateEqual
+}
+
+func (s *Scanner) addBufferedTokenIfExists(ctx *Context) {
+ ctx.addToken(s.bufferedToken(ctx))
+}
+
+func (s *Scanner) breakLiteral(ctx *Context) {
+ s.docStartColumn = 0
+ ctx.breakLiteral()
+}
+
+func (s *Scanner) scanSingleQuote(ctx *Context) (tk *token.Token, pos int) {
+ ctx.addOriginBuf('\'')
+ srcpos := s.pos()
+ startIndex := ctx.idx + 1
+ src := ctx.src
+ size := len(src)
+ value := []rune{}
+ isFirstLineChar := false
+ isNewLine := false
+ for idx := startIndex; idx < size; idx++ {
+ if !isNewLine {
+ s.progressColumn(ctx, 1)
+ } else {
+ isNewLine = false
+ }
+ c := src[idx]
+ pos = idx + 1
+ ctx.addOriginBuf(c)
+ if s.isNewLineChar(c) {
+ value = append(value, ' ')
+ isFirstLineChar = true
+ isNewLine = true
+ s.progressLine(ctx)
+ continue
+ } else if c == ' ' && isFirstLineChar {
+ continue
+ } else if c != '\'' {
+ value = append(value, c)
+ isFirstLineChar = false
+ continue
+ }
+ if idx+1 < len(ctx.src) && ctx.src[idx+1] == '\'' {
+ // '' handle as ' character
+ value = append(value, c)
+ ctx.addOriginBuf(c)
+ idx++
+ continue
+ }
+ s.progressColumn(ctx, 1)
+ tk = token.SingleQuote(string(value), string(ctx.obuf), srcpos)
+ pos = idx - startIndex + 1
+ return
+ }
+ return
+}
+
+func hexToInt(b rune) int {
+ if b >= 'A' && b <= 'F' {
+ return int(b) - 'A' + 10
+ }
+ if b >= 'a' && b <= 'f' {
+ return int(b) - 'a' + 10
+ }
+ return int(b) - '0'
+}
+
+func hexRunesToInt(b []rune) int {
+ sum := 0
+ for i := 0; i < len(b); i++ {
+ sum += hexToInt(b[i]) << (uint(len(b)-i-1) * 4)
+ }
+ return sum
+}
+
+func (s *Scanner) scanDoubleQuote(ctx *Context) (tk *token.Token, pos int) {
+ ctx.addOriginBuf('"')
+ srcpos := s.pos()
+ startIndex := ctx.idx + 1
+ src := ctx.src
+ size := len(src)
+ value := []rune{}
+ isFirstLineChar := false
+ isNewLine := false
+ for idx := startIndex; idx < size; idx++ {
+ if !isNewLine {
+ s.progressColumn(ctx, 1)
+ } else {
+ isNewLine = false
+ }
+ c := src[idx]
+ pos = idx + 1
+ ctx.addOriginBuf(c)
+ if s.isNewLineChar(c) {
+ value = append(value, ' ')
+ isFirstLineChar = true
+ isNewLine = true
+ s.progressLine(ctx)
+ continue
+ } else if c == ' ' && isFirstLineChar {
+ continue
+ } else if c == '\\' {
+ isFirstLineChar = false
+ if idx+1 < size {
+ nextChar := src[idx+1]
+ switch nextChar {
+ case 'b':
+ ctx.addOriginBuf(nextChar)
+ value = append(value, '\b')
+ idx++
+ continue
+ case 'e':
+ ctx.addOriginBuf(nextChar)
+ value = append(value, '\x1B')
+ idx++
+ continue
+ case 'f':
+ ctx.addOriginBuf(nextChar)
+ value = append(value, '\f')
+ idx++
+ continue
+ case 'n':
+ ctx.addOriginBuf(nextChar)
+ value = append(value, '\n')
+ idx++
+ continue
+ case 'r':
+ ctx.addOriginBuf(nextChar)
+ value = append(value, '\r')
+ idx++
+ continue
+ case 'v':
+ ctx.addOriginBuf(nextChar)
+ value = append(value, '\v')
+ idx++
+ continue
+ case 'L': // LS (#x2028)
+ ctx.addOriginBuf(nextChar)
+ value = append(value, []rune{'\xE2', '\x80', '\xA8'}...)
+ idx++
+ continue
+ case 'N': // NEL (#x85)
+ ctx.addOriginBuf(nextChar)
+ value = append(value, []rune{'\xC2', '\x85'}...)
+ idx++
+ continue
+ case 'P': // PS (#x2029)
+ ctx.addOriginBuf(nextChar)
+ value = append(value, []rune{'\xE2', '\x80', '\xA9'}...)
+ idx++
+ continue
+ case '_': // #xA0
+ ctx.addOriginBuf(nextChar)
+ value = append(value, []rune{'\xC2', '\xA0'}...)
+ idx++
+ continue
+ case '"':
+ ctx.addOriginBuf(nextChar)
+ value = append(value, nextChar)
+ idx++
+ continue
+ case 'x':
+ if idx+3 >= size {
+ // TODO: need to return error
+ //err = xerrors.New("invalid escape character \\x")
+ return
+ }
+ codeNum := hexRunesToInt(src[idx+2 : idx+4])
+ value = append(value, rune(codeNum))
+ idx += 3
+ continue
+ case 'u':
+ if idx+5 >= size {
+ // TODO: need to return error
+ //err = xerrors.New("invalid escape character \\u")
+ return
+ }
+ codeNum := hexRunesToInt(src[idx+2 : idx+6])
+ value = append(value, rune(codeNum))
+ idx += 5
+ continue
+ case 'U':
+ if idx+9 >= size {
+ // TODO: need to return error
+ //err = xerrors.New("invalid escape character \\U")
+ return
+ }
+ codeNum := hexRunesToInt(src[idx+2 : idx+10])
+ value = append(value, rune(codeNum))
+ idx += 9
+ continue
+ case '\\':
+ ctx.addOriginBuf(nextChar)
+ idx++
+ }
+ }
+ value = append(value, c)
+ continue
+ } else if c != '"' {
+ value = append(value, c)
+ isFirstLineChar = false
+ continue
+ }
+ s.progressColumn(ctx, 1)
+ tk = token.DoubleQuote(string(value), string(ctx.obuf), srcpos)
+ pos = idx - startIndex + 1
+ return
+ }
+ return
+}
+
+func (s *Scanner) scanQuote(ctx *Context, ch rune) (tk *token.Token, pos int) {
+ if ch == '\'' {
+ return s.scanSingleQuote(ctx)
+ }
+ return s.scanDoubleQuote(ctx)
+}
+
+func (s *Scanner) isMergeKey(ctx *Context) bool {
+ if ctx.repeatNum('<') != 2 {
+ return false
+ }
+ src := ctx.src
+ size := len(src)
+ for idx := ctx.idx + 2; idx < size; idx++ {
+ c := src[idx]
+ if c == ' ' {
+ continue
+ }
+ if c != ':' {
+ return false
+ }
+ if idx+1 < size {
+ nc := src[idx+1]
+ if nc == ' ' || s.isNewLineChar(nc) {
+ return true
+ }
+ }
+ }
+ return false
+}
+
+func (s *Scanner) scanTag(ctx *Context) (tk *token.Token, pos int) {
+ ctx.addOriginBuf('!')
+ ctx.progress(1) // skip '!' character
+ for idx, c := range ctx.src[ctx.idx:] {
+ pos = idx + 1
+ ctx.addOriginBuf(c)
+ switch c {
+ case ' ', '\n', '\r':
+ value := ctx.source(ctx.idx-1, ctx.idx+idx)
+ tk = token.Tag(value, string(ctx.obuf), s.pos())
+ pos = len([]rune(value))
+ return
+ }
+ }
+ return
+}
+
+func (s *Scanner) scanComment(ctx *Context) (tk *token.Token, pos int) {
+ ctx.addOriginBuf('#')
+ ctx.progress(1) // skip '#' character
+ for idx, c := range ctx.src[ctx.idx:] {
+ pos = idx + 1
+ ctx.addOriginBuf(c)
+ switch c {
+ case '\n', '\r':
+ if ctx.previousChar() == '\\' {
+ continue
+ }
+ value := ctx.source(ctx.idx, ctx.idx+idx)
+ tk = token.Comment(value, string(ctx.obuf), s.pos())
+ pos = len([]rune(value)) + 1
+ return
+ }
+ }
+ // document ends with comment.
+ value := string(ctx.src[ctx.idx:])
+ tk = token.Comment(value, string(ctx.obuf), s.pos())
+ pos = len([]rune(value)) + 1
+ return
+}
+
+func trimCommentFromLiteralOpt(text string) (string, error) {
+ idx := strings.Index(text, "#")
+ if idx < 0 {
+ return text, nil
+ }
+ if idx == 0 {
+ return "", xerrors.New("invalid literal header")
+ }
+ return text[:idx-1], nil
+}
+
+func (s *Scanner) scanLiteral(ctx *Context, c rune) {
+ ctx.addOriginBuf(c)
+ if ctx.isEOS() {
+ if ctx.isLiteral {
+ ctx.addBuf(c)
+ }
+ value := ctx.bufferedSrc()
+ ctx.addToken(token.String(string(value), string(ctx.obuf), s.pos()))
+ ctx.resetBuffer()
+ s.progressColumn(ctx, 1)
+ } else if s.isNewLineChar(c) {
+ if ctx.isLiteral {
+ ctx.addBuf(c)
+ } else {
+ ctx.addBuf(' ')
+ }
+ s.progressLine(ctx)
+ } else if s.isFirstCharAtLine && c == ' ' {
+ if 0 < s.docStartColumn && s.docStartColumn <= s.column {
+ ctx.addBuf(c)
+ }
+ s.progressColumn(ctx, 1)
+ } else {
+ if s.docStartColumn == 0 {
+ s.docStartColumn = s.column
+ }
+ ctx.addBuf(c)
+ s.progressColumn(ctx, 1)
+ }
+}
+
+func (s *Scanner) scanLiteralHeader(ctx *Context) (pos int, err error) {
+ header := ctx.currentChar()
+ ctx.addOriginBuf(header)
+ ctx.progress(1) // skip '|' or '>' character
+ for idx, c := range ctx.src[ctx.idx:] {
+ pos = idx
+ ctx.addOriginBuf(c)
+ switch c {
+ case '\n', '\r':
+ value := ctx.source(ctx.idx, ctx.idx+idx)
+ opt := strings.TrimRight(value, " ")
+ orgOptLen := len(opt)
+ opt, err = trimCommentFromLiteralOpt(opt)
+ if err != nil {
+ return
+ }
+ switch opt {
+ case "", "+", "-",
+ "0", "1", "2", "3", "4", "5", "6", "7", "8", "9":
+ hasComment := len(opt) < orgOptLen
+ if header == '|' {
+ if hasComment {
+ commentLen := orgOptLen - len(opt)
+ headerPos := strings.Index(string(ctx.obuf), "|")
+ litBuf := ctx.obuf[:len(ctx.obuf)-commentLen-headerPos]
+ commentBuf := ctx.obuf[len(litBuf):]
+ ctx.addToken(token.Literal("|"+opt, string(litBuf), s.pos()))
+ s.column += len(litBuf)
+ s.offset += len(litBuf)
+ commentHeader := strings.Index(value, "#")
+ ctx.addToken(token.Comment(string(value[commentHeader+1:]), string(commentBuf), s.pos()))
+ } else {
+ ctx.addToken(token.Literal("|"+opt, string(ctx.obuf), s.pos()))
+ }
+ ctx.isLiteral = true
+ } else if header == '>' {
+ if hasComment {
+ commentLen := orgOptLen - len(opt)
+ headerPos := strings.Index(string(ctx.obuf), ">")
+ foldedBuf := ctx.obuf[:len(ctx.obuf)-commentLen-headerPos]
+ commentBuf := ctx.obuf[len(foldedBuf):]
+ ctx.addToken(token.Folded(">"+opt, string(foldedBuf), s.pos()))
+ s.column += len(foldedBuf)
+ s.offset += len(foldedBuf)
+ commentHeader := strings.Index(value, "#")
+ ctx.addToken(token.Comment(string(value[commentHeader+1:]), string(commentBuf), s.pos()))
+ } else {
+ ctx.addToken(token.Folded(">"+opt, string(ctx.obuf), s.pos()))
+ }
+ ctx.isFolded = true
+ }
+ s.indentState = IndentStateKeep
+ ctx.resetBuffer()
+ ctx.literalOpt = opt
+ return
+ }
+ break
+ }
+ }
+ err = xerrors.New("invalid literal header")
+ return
+}
+
+func (s *Scanner) scanNewLine(ctx *Context, c rune) {
+ if len(ctx.buf) > 0 && s.savedPos == nil {
+ s.savedPos = s.pos()
+ s.savedPos.Column -= len(ctx.bufferedSrc())
+ }
+
+ // if the following case, origin buffer has unnecessary two spaces.
+ // So, `removeRightSpaceFromOriginBuf` remove them, also fix column number too.
+ // ---
+ // a:[space][space]
+ // b: c
+ removedNum := ctx.removeRightSpaceFromBuf()
+ if removedNum > 0 {
+ s.column -= removedNum
+ s.offset -= removedNum
+ if s.savedPos != nil {
+ s.savedPos.Column -= removedNum
+ }
+ }
+
+ if ctx.isEOS() {
+ s.addBufferedTokenIfExists(ctx)
+ } else if s.isAnchor {
+ s.addBufferedTokenIfExists(ctx)
+ }
+ ctx.addBuf(' ')
+ ctx.addOriginBuf(c)
+ ctx.isSingleLine = false
+ s.progressLine(ctx)
+}
+
+func (s *Scanner) scan(ctx *Context) (pos int) {
+ for ctx.next() {
+ pos = ctx.nextPos()
+ c := ctx.currentChar()
+ s.updateIndent(ctx, c)
+ if ctx.isDocument() {
+ if s.isChangedToIndentStateEqual() ||
+ s.isChangedToIndentStateDown() {
+ s.addBufferedTokenIfExists(ctx)
+ s.breakLiteral(ctx)
+ } else {
+ s.scanLiteral(ctx, c)
+ continue
+ }
+ } else if s.isChangedToIndentStateDown() {
+ s.addBufferedTokenIfExists(ctx)
+ } else if s.isChangedToIndentStateEqual() {
+ // if first character is new line character, buffer expect to raw folded literal
+ if len(ctx.obuf) > 0 && s.newLineCount(ctx.obuf) <= 1 {
+ // doesn't raw folded literal
+ s.addBufferedTokenIfExists(ctx)
+ }
+ }
+ switch c {
+ case '{':
+ if !ctx.existsBuffer() {
+ ctx.addOriginBuf(c)
+ ctx.addToken(token.MappingStart(string(ctx.obuf), s.pos()))
+ s.startedFlowMapNum++
+ s.progressColumn(ctx, 1)
+ return
+ }
+ case '}':
+ if !ctx.existsBuffer() || s.startedFlowMapNum > 0 {
+ ctx.addToken(s.bufferedToken(ctx))
+ ctx.addOriginBuf(c)
+ ctx.addToken(token.MappingEnd(string(ctx.obuf), s.pos()))
+ s.startedFlowMapNum--
+ s.progressColumn(ctx, 1)
+ return
+ }
+ case '.':
+ if s.indentNum == 0 && s.column == 1 && ctx.repeatNum('.') == 3 {
+ ctx.addToken(token.DocumentEnd(string(ctx.obuf)+"...", s.pos()))
+ s.progressColumn(ctx, 3)
+ pos += 2
+ return
+ }
+ case '<':
+ if s.isMergeKey(ctx) {
+ s.prevIndentColumn = s.column
+ ctx.addToken(token.MergeKey(string(ctx.obuf)+"<<", s.pos()))
+ s.progressColumn(ctx, 1)
+ pos++
+ return
+ }
+ case '-':
+ if s.indentNum == 0 && s.column == 1 && ctx.repeatNum('-') == 3 {
+ s.addBufferedTokenIfExists(ctx)
+ ctx.addToken(token.DocumentHeader(string(ctx.obuf)+"---", s.pos()))
+ s.progressColumn(ctx, 3)
+ pos += 2
+ return
+ }
+ if ctx.existsBuffer() && s.isChangedToIndentStateUp() {
+ // raw folded
+ ctx.isRawFolded = true
+ ctx.addBuf(c)
+ ctx.addOriginBuf(c)
+ s.progressColumn(ctx, 1)
+ continue
+ }
+ if ctx.existsBuffer() {
+ // '-' is literal
+ ctx.addBuf(c)
+ ctx.addOriginBuf(c)
+ s.progressColumn(ctx, 1)
+ continue
+ }
+ nc := ctx.nextChar()
+ if nc == ' ' || s.isNewLineChar(nc) {
+ s.addBufferedTokenIfExists(ctx)
+ ctx.addOriginBuf(c)
+ tk := token.SequenceEntry(string(ctx.obuf), s.pos())
+ s.prevIndentColumn = tk.Position.Column
+ ctx.addToken(tk)
+ s.progressColumn(ctx, 1)
+ return
+ }
+ case '[':
+ if !ctx.existsBuffer() {
+ ctx.addOriginBuf(c)
+ ctx.addToken(token.SequenceStart(string(ctx.obuf), s.pos()))
+ s.startedFlowSequenceNum++
+ s.progressColumn(ctx, 1)
+ return
+ }
+ case ']':
+ if !ctx.existsBuffer() || s.startedFlowSequenceNum > 0 {
+ s.addBufferedTokenIfExists(ctx)
+ ctx.addOriginBuf(c)
+ ctx.addToken(token.SequenceEnd(string(ctx.obuf), s.pos()))
+ s.startedFlowSequenceNum--
+ s.progressColumn(ctx, 1)
+ return
+ }
+ case ',':
+ if s.startedFlowSequenceNum > 0 || s.startedFlowMapNum > 0 {
+ s.addBufferedTokenIfExists(ctx)
+ ctx.addOriginBuf(c)
+ ctx.addToken(token.CollectEntry(string(ctx.obuf), s.pos()))
+ s.progressColumn(ctx, 1)
+ return
+ }
+ case ':':
+ nc := ctx.nextChar()
+ if s.startedFlowMapNum > 0 || nc == ' ' || s.isNewLineChar(nc) || ctx.isNextEOS() {
+ // mapping value
+ tk := s.bufferedToken(ctx)
+ if tk != nil {
+ s.prevIndentColumn = tk.Position.Column
+ ctx.addToken(tk)
+ } else if tk := ctx.lastToken(); tk != nil {
+ // If the map key is quote, the buffer does not exist because it has already been cut into tokens.
+ // Therefore, we need to check the last token.
+ if tk.Indicator == token.QuotedScalarIndicator {
+ s.prevIndentColumn = tk.Position.Column
+ }
+ }
+ ctx.addToken(token.MappingValue(s.pos()))
+ s.progressColumn(ctx, 1)
+ return
+ }
+ case '|', '>':
+ if !ctx.existsBuffer() {
+ progress, err := s.scanLiteralHeader(ctx)
+ if err != nil {
+ // TODO: returns syntax error object
+ return
+ }
+ s.progressColumn(ctx, progress)
+ s.progressLine(ctx)
+ continue
+ }
+ case '!':
+ if !ctx.existsBuffer() {
+ token, progress := s.scanTag(ctx)
+ ctx.addToken(token)
+ s.progressColumn(ctx, progress)
+ if c := ctx.previousChar(); s.isNewLineChar(c) {
+ s.progressLine(ctx)
+ }
+ pos += progress
+ return
+ }
+ case '%':
+ if !ctx.existsBuffer() && s.indentNum == 0 {
+ ctx.addToken(token.Directive(string(ctx.obuf)+"%", s.pos()))
+ s.progressColumn(ctx, 1)
+ return
+ }
+ case '?':
+ nc := ctx.nextChar()
+ if !ctx.existsBuffer() && nc == ' ' {
+ ctx.addToken(token.MappingKey(s.pos()))
+ s.progressColumn(ctx, 1)
+ return
+ }
+ case '&':
+ if !ctx.existsBuffer() {
+ s.addBufferedTokenIfExists(ctx)
+ ctx.addOriginBuf(c)
+ ctx.addToken(token.Anchor(string(ctx.obuf), s.pos()))
+ s.progressColumn(ctx, 1)
+ s.isAnchor = true
+ return
+ }
+ case '*':
+ if !ctx.existsBuffer() {
+ s.addBufferedTokenIfExists(ctx)
+ ctx.addOriginBuf(c)
+ ctx.addToken(token.Alias(string(ctx.obuf), s.pos()))
+ s.progressColumn(ctx, 1)
+ return
+ }
+ case '#':
+ if !ctx.existsBuffer() || ctx.previousChar() == ' ' {
+ s.addBufferedTokenIfExists(ctx)
+ token, progress := s.scanComment(ctx)
+ ctx.addToken(token)
+ s.progressColumn(ctx, progress)
+ s.progressLine(ctx)
+ pos += progress
+ return
+ }
+ case '\'', '"':
+ if !ctx.existsBuffer() {
+ token, progress := s.scanQuote(ctx, c)
+ ctx.addToken(token)
+ pos += progress
+ // If the non-whitespace character immediately following the quote is ':', the quote should be treated as a map key.
+ // Therefore, do not return and continue processing as a normal map key.
+ if ctx.currentCharWithSkipWhitespace() == ':' {
+ continue
+ }
+ return
+ }
+ case '\r', '\n':
+ // There is no problem that we ignore CR which followed by LF and normalize it to LF, because of following YAML1.2 spec.
+ // > Line breaks inside scalar content must be normalized by the YAML processor. Each such line break must be parsed into a single line feed character.
+ // > Outside scalar content, YAML allows any line break to be used to terminate lines.
+ // > -- https://yaml.org/spec/1.2/spec.html
+ if c == '\r' && ctx.nextChar() == '\n' {
+ ctx.addOriginBuf('\r')
+ ctx.progress(1)
+ c = '\n'
+ }
+ s.scanNewLine(ctx, c)
+ continue
+ case ' ':
+ if ctx.isSaveIndentMode() || (!s.isAnchor && !s.isFirstCharAtLine) {
+ ctx.addBuf(c)
+ ctx.addOriginBuf(c)
+ s.progressColumn(ctx, 1)
+ continue
+ }
+ if s.isFirstCharAtLine {
+ s.progressColumn(ctx, 1)
+ ctx.addOriginBuf(c)
+ continue
+ }
+ s.addBufferedTokenIfExists(ctx)
+ pos-- // to rescan white space at next scanning for adding white space to next buffer.
+ s.isAnchor = false
+ return
+ }
+ ctx.addBuf(c)
+ ctx.addOriginBuf(c)
+ s.progressColumn(ctx, 1)
+ }
+ s.addBufferedTokenIfExists(ctx)
+ return
+}
+
+// Init prepares the scanner s to tokenize the text src by setting the scanner at the beginning of src.
+func (s *Scanner) Init(text string) {
+ src := []rune(text)
+ s.source = src
+ s.sourcePos = 0
+ s.sourceSize = len(src)
+ s.line = 1
+ s.column = 1
+ s.offset = 1
+ s.prevIndentLevel = 0
+ s.prevIndentNum = 0
+ s.prevIndentColumn = 0
+ s.indentLevel = 0
+ s.indentNum = 0
+ s.isFirstCharAtLine = true
+}
+
+// Scan scans the next token and returns the token collection. The source end is indicated by io.EOF.
+func (s *Scanner) Scan() (token.Tokens, error) {
+ if s.sourcePos >= s.sourceSize {
+ return nil, io.EOF
+ }
+ ctx := newContext(s.source[s.sourcePos:])
+ defer ctx.release()
+ progress := s.scan(ctx)
+ s.sourcePos += progress
+ var tokens token.Tokens
+ tokens = append(tokens, ctx.tokens...)
+ return tokens, nil
+}
diff --git a/tmpmod/github.com/goccy/go-yaml/stdlib_quote.go b/tmpmod/github.com/goccy/go-yaml/stdlib_quote.go
new file mode 100644
index 00000000..be50ae61
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/stdlib_quote.go
@@ -0,0 +1,103 @@
+// Copied and trimmed down from https://github.com/golang/go/blob/e3769299cd3484e018e0e2a6e1b95c2b18ce4f41/src/strconv/quote.go
+// We want to use the standard library's private "quoteWith" function rather than write our own so that we get robust unicode support.
+// Every private function called by quoteWith was copied.
+// There are 2 modifications to simplify the code:
+// 1. The unicode.IsPrint function was substituted for the custom implementation of IsPrint
+// 2. All code paths reachable only when ASCIIonly or grphicOnly are set to true were removed.
+
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package yaml
+
+import (
+ "unicode"
+ "unicode/utf8"
+)
+
+const (
+ lowerhex = "0123456789abcdef"
+)
+
+func quoteWith(s string, quote byte) string {
+ return string(appendQuotedWith(make([]byte, 0, 3*len(s)/2), s, quote))
+}
+
+func appendQuotedWith(buf []byte, s string, quote byte) []byte {
+ // Often called with big strings, so preallocate. If there's quoting,
+ // this is conservative but still helps a lot.
+ if cap(buf)-len(buf) < len(s) {
+ nBuf := make([]byte, len(buf), len(buf)+1+len(s)+1)
+ copy(nBuf, buf)
+ buf = nBuf
+ }
+ buf = append(buf, quote)
+ for width := 0; len(s) > 0; s = s[width:] {
+ r := rune(s[0])
+ width = 1
+ if r >= utf8.RuneSelf {
+ r, width = utf8.DecodeRuneInString(s)
+ }
+ if width == 1 && r == utf8.RuneError {
+ buf = append(buf, `\x`...)
+ buf = append(buf, lowerhex[s[0]>>4])
+ buf = append(buf, lowerhex[s[0]&0xF])
+ continue
+ }
+ buf = appendEscapedRune(buf, r, quote)
+ }
+ buf = append(buf, quote)
+ return buf
+}
+
+func appendEscapedRune(buf []byte, r rune, quote byte) []byte {
+ var runeTmp [utf8.UTFMax]byte
+ if r == rune(quote) || r == '\\' { // always backslashed
+ buf = append(buf, '\\')
+ buf = append(buf, byte(r))
+ return buf
+ }
+ if unicode.IsPrint(r) {
+ n := utf8.EncodeRune(runeTmp[:], r)
+ buf = append(buf, runeTmp[:n]...)
+ return buf
+ }
+ switch r {
+ case '\a':
+ buf = append(buf, `\a`...)
+ case '\b':
+ buf = append(buf, `\b`...)
+ case '\f':
+ buf = append(buf, `\f`...)
+ case '\n':
+ buf = append(buf, `\n`...)
+ case '\r':
+ buf = append(buf, `\r`...)
+ case '\t':
+ buf = append(buf, `\t`...)
+ case '\v':
+ buf = append(buf, `\v`...)
+ default:
+ switch {
+ case r < ' ':
+ buf = append(buf, `\x`...)
+ buf = append(buf, lowerhex[byte(r)>>4])
+ buf = append(buf, lowerhex[byte(r)&0xF])
+ case r > utf8.MaxRune:
+ r = 0xFFFD
+ fallthrough
+ case r < 0x10000:
+ buf = append(buf, `\u`...)
+ for s := 12; s >= 0; s -= 4 {
+ buf = append(buf, lowerhex[r>>uint(s)&0xF])
+ }
+ default:
+ buf = append(buf, `\U`...)
+ for s := 28; s >= 0; s -= 4 {
+ buf = append(buf, lowerhex[r>>uint(s)&0xF])
+ }
+ }
+ }
+ return buf
+}
diff --git a/tmpmod/github.com/goccy/go-yaml/struct.go b/tmpmod/github.com/goccy/go-yaml/struct.go
new file mode 100644
index 00000000..a3da8ddd
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/struct.go
@@ -0,0 +1,130 @@
+package yaml
+
+import (
+ "reflect"
+ "strings"
+
+ "golang.org/x/xerrors"
+)
+
+const (
+ // StructTagName tag keyword for Marshal/Unmarshal
+ StructTagName = "yaml"
+)
+
+// StructField information for each the field in structure
+type StructField struct {
+ FieldName string
+ RenderName string
+ AnchorName string
+ AliasName string
+ IsAutoAnchor bool
+ IsAutoAlias bool
+ IsOmitEmpty bool
+ IsFlow bool
+ IsInline bool
+}
+
+func getTag(field reflect.StructField) string {
+ // If struct tag `yaml` exist, use that. If no `yaml`
+ // exists, but `json` does, use that and try the best to
+ // adhere to its rules
+ tag := field.Tag.Get(StructTagName)
+ if tag == "" {
+ tag = field.Tag.Get(`json`)
+ }
+ return tag
+}
+
+func structField(field reflect.StructField) *StructField {
+ tag := getTag(field)
+ fieldName := strings.ToLower(field.Name)
+ options := strings.Split(tag, ",")
+ if len(options) > 0 {
+ if options[0] != "" {
+ fieldName = options[0]
+ }
+ }
+ structField := &StructField{
+ FieldName: field.Name,
+ RenderName: fieldName,
+ }
+ if len(options) > 1 {
+ for _, opt := range options[1:] {
+ switch {
+ case opt == "omitempty":
+ structField.IsOmitEmpty = true
+ case opt == "flow":
+ structField.IsFlow = true
+ case opt == "inline":
+ structField.IsInline = true
+ case strings.HasPrefix(opt, "anchor"):
+ anchor := strings.Split(opt, "=")
+ if len(anchor) > 1 {
+ structField.AnchorName = anchor[1]
+ } else {
+ structField.IsAutoAnchor = true
+ }
+ case strings.HasPrefix(opt, "alias"):
+ alias := strings.Split(opt, "=")
+ if len(alias) > 1 {
+ structField.AliasName = alias[1]
+ } else {
+ structField.IsAutoAlias = true
+ }
+ default:
+ }
+ }
+ }
+ return structField
+}
+
+func isIgnoredStructField(field reflect.StructField) bool {
+ if field.PkgPath != "" && !field.Anonymous {
+ // private field
+ return true
+ }
+ tag := getTag(field)
+ if tag == "-" {
+ return true
+ }
+ return false
+}
+
+type StructFieldMap map[string]*StructField
+
+func (m StructFieldMap) isIncludedRenderName(name string) bool {
+ for _, v := range m {
+ if !v.IsInline && v.RenderName == name {
+ return true
+ }
+ }
+ return false
+}
+
+func (m StructFieldMap) hasMergeProperty() bool {
+ for _, v := range m {
+ if v.IsOmitEmpty && v.IsInline && v.IsAutoAlias {
+ return true
+ }
+ }
+ return false
+}
+
+func structFieldMap(structType reflect.Type) (StructFieldMap, error) {
+ structFieldMap := StructFieldMap{}
+ renderNameMap := map[string]struct{}{}
+ for i := 0; i < structType.NumField(); i++ {
+ field := structType.Field(i)
+ if isIgnoredStructField(field) {
+ continue
+ }
+ structField := structField(field)
+ if _, exists := renderNameMap[structField.RenderName]; exists {
+ return nil, xerrors.Errorf("duplicated struct field name %s", structField.RenderName)
+ }
+ structFieldMap[structField.FieldName] = structField
+ renderNameMap[structField.RenderName] = struct{}{}
+ }
+ return structFieldMap, nil
+}
diff --git a/tmpmod/github.com/goccy/go-yaml/testdata/anchor.yml b/tmpmod/github.com/goccy/go-yaml/testdata/anchor.yml
new file mode 100644
index 00000000..c016c7f6
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/testdata/anchor.yml
@@ -0,0 +1,3 @@
+a: &a
+ b: 1
+ c: hello
diff --git a/tmpmod/github.com/goccy/go-yaml/token/token.go b/tmpmod/github.com/goccy/go-yaml/token/token.go
new file mode 100644
index 00000000..14d76220
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/token/token.go
@@ -0,0 +1,1070 @@
+package token
+
+import (
+ "fmt"
+ "strings"
+)
+
+// Character type for character
+type Character byte
+
+const (
+ // SequenceEntryCharacter character for sequence entry
+ SequenceEntryCharacter Character = '-'
+ // MappingKeyCharacter character for mapping key
+ MappingKeyCharacter Character = '?'
+ // MappingValueCharacter character for mapping value
+ MappingValueCharacter Character = ':'
+ // CollectEntryCharacter character for collect entry
+ CollectEntryCharacter Character = ','
+ // SequenceStartCharacter character for sequence start
+ SequenceStartCharacter Character = '['
+ // SequenceEndCharacter character for sequence end
+ SequenceEndCharacter Character = ']'
+ // MappingStartCharacter character for mapping start
+ MappingStartCharacter Character = '{'
+ // MappingEndCharacter character for mapping end
+ MappingEndCharacter Character = '}'
+ // CommentCharacter character for comment
+ CommentCharacter Character = '#'
+ // AnchorCharacter character for anchor
+ AnchorCharacter Character = '&'
+ // AliasCharacter character for alias
+ AliasCharacter Character = '*'
+ // TagCharacter character for tag
+ TagCharacter Character = '!'
+ // LiteralCharacter character for literal
+ LiteralCharacter Character = '|'
+ // FoldedCharacter character for folded
+ FoldedCharacter Character = '>'
+ // SingleQuoteCharacter character for single quote
+ SingleQuoteCharacter Character = '\''
+ // DoubleQuoteCharacter character for double quote
+ DoubleQuoteCharacter Character = '"'
+ // DirectiveCharacter character for directive
+ DirectiveCharacter Character = '%'
+ // SpaceCharacter character for space
+ SpaceCharacter Character = ' '
+ // LineBreakCharacter character for line break
+ LineBreakCharacter Character = '\n'
+)
+
+// Type type identifier for token
+type Type int
+
+const (
+ // UnknownType reserve for invalid type
+ UnknownType Type = iota
+ // DocumentHeaderType type for DocumentHeader token
+ DocumentHeaderType
+ // DocumentEndType type for DocumentEnd token
+ DocumentEndType
+ // SequenceEntryType type for SequenceEntry token
+ SequenceEntryType
+ // MappingKeyType type for MappingKey token
+ MappingKeyType
+ // MappingValueType type for MappingValue token
+ MappingValueType
+ // MergeKeyType type for MergeKey token
+ MergeKeyType
+ // CollectEntryType type for CollectEntry token
+ CollectEntryType
+ // SequenceStartType type for SequenceStart token
+ SequenceStartType
+ // SequenceEndType type for SequenceEnd token
+ SequenceEndType
+ // MappingStartType type for MappingStart token
+ MappingStartType
+ // MappingEndType type for MappingEnd token
+ MappingEndType
+ // CommentType type for Comment token
+ CommentType
+ // AnchorType type for Anchor token
+ AnchorType
+ // AliasType type for Alias token
+ AliasType
+ // TagType type for Tag token
+ TagType
+ // LiteralType type for Literal token
+ LiteralType
+ // FoldedType type for Folded token
+ FoldedType
+ // SingleQuoteType type for SingleQuote token
+ SingleQuoteType
+ // DoubleQuoteType type for DoubleQuote token
+ DoubleQuoteType
+ // DirectiveType type for Directive token
+ DirectiveType
+ // SpaceType type for Space token
+ SpaceType
+ // NullType type for Null token
+ NullType
+ // InfinityType type for Infinity token
+ InfinityType
+ // NanType type for Nan token
+ NanType
+ // IntegerType type for Integer token
+ IntegerType
+ // BinaryIntegerType type for BinaryInteger token
+ BinaryIntegerType
+ // OctetIntegerType type for OctetInteger token
+ OctetIntegerType
+ // HexIntegerType type for HexInteger token
+ HexIntegerType
+ // FloatType type for Float token
+ FloatType
+ // StringType type for String token
+ StringType
+ // BoolType type for Bool token
+ BoolType
+)
+
+// String type identifier to text
+func (t Type) String() string {
+ switch t {
+ case UnknownType:
+ return "Unknown"
+ case DocumentHeaderType:
+ return "DocumentHeader"
+ case DocumentEndType:
+ return "DocumentEnd"
+ case SequenceEntryType:
+ return "SequenceEntry"
+ case MappingKeyType:
+ return "MappingKey"
+ case MappingValueType:
+ return "MappingValue"
+ case MergeKeyType:
+ return "MergeKey"
+ case CollectEntryType:
+ return "CollectEntry"
+ case SequenceStartType:
+ return "SequenceStart"
+ case SequenceEndType:
+ return "SequenceEnd"
+ case MappingStartType:
+ return "MappingStart"
+ case MappingEndType:
+ return "MappingEnd"
+ case CommentType:
+ return "Comment"
+ case AnchorType:
+ return "Anchor"
+ case AliasType:
+ return "Alias"
+ case TagType:
+ return "Tag"
+ case LiteralType:
+ return "Literal"
+ case FoldedType:
+ return "Folded"
+ case SingleQuoteType:
+ return "SingleQuote"
+ case DoubleQuoteType:
+ return "DoubleQuote"
+ case DirectiveType:
+ return "Directive"
+ case SpaceType:
+ return "Space"
+ case StringType:
+ return "String"
+ case BoolType:
+ return "Bool"
+ case IntegerType:
+ return "Integer"
+ case BinaryIntegerType:
+ return "BinaryInteger"
+ case OctetIntegerType:
+ return "OctetInteger"
+ case HexIntegerType:
+ return "HexInteger"
+ case FloatType:
+ return "Float"
+ case NullType:
+ return "Null"
+ case InfinityType:
+ return "Infinity"
+ case NanType:
+ return "Nan"
+ }
+ return ""
+}
+
+// CharacterType type for character category
+type CharacterType int
+
+const (
+ // CharacterTypeIndicator type of indicator character
+ CharacterTypeIndicator CharacterType = iota
+ // CharacterTypeWhiteSpace type of white space character
+ CharacterTypeWhiteSpace
+ // CharacterTypeMiscellaneous type of miscellaneous character
+ CharacterTypeMiscellaneous
+ // CharacterTypeEscaped type of escaped character
+ CharacterTypeEscaped
+)
+
+// String character type identifier to text
+func (c CharacterType) String() string {
+ switch c {
+ case CharacterTypeIndicator:
+ return "Indicator"
+ case CharacterTypeWhiteSpace:
+ return "WhiteSpcae"
+ case CharacterTypeMiscellaneous:
+ return "Miscellaneous"
+ case CharacterTypeEscaped:
+ return "Escaped"
+ }
+ return ""
+}
+
+// Indicator type for indicator
+type Indicator int
+
+const (
+ // NotIndicator not indicator
+ NotIndicator Indicator = iota
+ // BlockStructureIndicator indicator for block structure ( '-', '?', ':' )
+ BlockStructureIndicator
+ // FlowCollectionIndicator indicator for flow collection ( '[', ']', '{', '}', ',' )
+ FlowCollectionIndicator
+ // CommentIndicator indicator for comment ( '#' )
+ CommentIndicator
+ // NodePropertyIndicator indicator for node property ( '!', '&', '*' )
+ NodePropertyIndicator
+ // BlockScalarIndicator indicator for block scalar ( '|', '>' )
+ BlockScalarIndicator
+ // QuotedScalarIndicator indicator for quoted scalar ( ''', '"' )
+ QuotedScalarIndicator
+ // DirectiveIndicator indicator for directive ( '%' )
+ DirectiveIndicator
+ // InvalidUseOfReservedIndicator indicator for invalid use of reserved keyword ( '@', '`' )
+ InvalidUseOfReservedIndicator
+)
+
+// String indicator to text
+func (i Indicator) String() string {
+ switch i {
+ case NotIndicator:
+ return "NotIndicator"
+ case BlockStructureIndicator:
+ return "BlockStructure"
+ case FlowCollectionIndicator:
+ return "FlowCollection"
+ case CommentIndicator:
+ return "Comment"
+ case NodePropertyIndicator:
+ return "NodeProperty"
+ case BlockScalarIndicator:
+ return "BlockScalar"
+ case QuotedScalarIndicator:
+ return "QuotedScalar"
+ case DirectiveIndicator:
+ return "Directive"
+ case InvalidUseOfReservedIndicator:
+ return "InvalidUseOfReserved"
+ }
+ return ""
+}
+
+var (
+ reservedNullKeywords = []string{
+ "null",
+ "Null",
+ "NULL",
+ "~",
+ }
+ reservedBoolKeywords = []string{
+ "true",
+ "True",
+ "TRUE",
+ "false",
+ "False",
+ "FALSE",
+ }
+ // For compatibility with other YAML 1.1 parsers
+ // Note that we use these solely for encoding the bool value with quotes.
+ // go-yaml should not treat these as reserved keywords at parsing time.
+ // as go-yaml is supposed to be compliant only with YAML 1.2.
+ reservedLegacyBoolKeywords = []string{
+ "y",
+ "Y",
+ "yes",
+ "Yes",
+ "YES",
+ "n",
+ "N",
+ "no",
+ "No",
+ "NO",
+ "on",
+ "On",
+ "ON",
+ "off",
+ "Off",
+ "OFF",
+ }
+ reservedInfKeywords = []string{
+ ".inf",
+ ".Inf",
+ ".INF",
+ "-.inf",
+ "-.Inf",
+ "-.INF",
+ }
+ reservedNanKeywords = []string{
+ ".nan",
+ ".NaN",
+ ".NAN",
+ }
+ reservedKeywordMap = map[string]func(string, string, *Position) *Token{}
+ // reservedEncKeywordMap contains is the keyword map used at encoding time.
+ // This is supposed to be a superset of reservedKeywordMap,
+ // and used to quote legacy keywords present in YAML 1.1 or lesser for compatibility reasons,
+ // even though this library is supposed to be YAML 1.2-compliant.
+ reservedEncKeywordMap = map[string]func(string, string, *Position) *Token{}
+)
+
+func reservedKeywordToken(typ Type, value, org string, pos *Position) *Token {
+ return &Token{
+ Type: typ,
+ CharacterType: CharacterTypeMiscellaneous,
+ Indicator: NotIndicator,
+ Value: value,
+ Origin: org,
+ Position: pos,
+ }
+}
+
+func init() {
+ for _, keyword := range reservedNullKeywords {
+ reservedKeywordMap[keyword] = func(value, org string, pos *Position) *Token {
+ return reservedKeywordToken(NullType, value, org, pos)
+ }
+ }
+ for _, keyword := range reservedBoolKeywords {
+ f := func(value, org string, pos *Position) *Token {
+ return reservedKeywordToken(BoolType, value, org, pos)
+ }
+ reservedKeywordMap[keyword] = f
+ reservedEncKeywordMap[keyword] = f
+ }
+ for _, keyword := range reservedLegacyBoolKeywords {
+ reservedEncKeywordMap[keyword] = func(value, org string, pos *Position) *Token {
+ return reservedKeywordToken(BoolType, value, org, pos)
+ }
+ }
+ for _, keyword := range reservedInfKeywords {
+ reservedKeywordMap[keyword] = func(value, org string, pos *Position) *Token {
+ return reservedKeywordToken(InfinityType, value, org, pos)
+ }
+ }
+ for _, keyword := range reservedNanKeywords {
+ reservedKeywordMap[keyword] = func(value, org string, pos *Position) *Token {
+ return reservedKeywordToken(NanType, value, org, pos)
+ }
+ }
+}
+
+// ReservedTagKeyword type of reserved tag keyword
+type ReservedTagKeyword string
+
+const (
+ // IntegerTag `!!int` tag
+ IntegerTag ReservedTagKeyword = "!!int"
+ // FloatTag `!!float` tag
+ FloatTag ReservedTagKeyword = "!!float"
+ // NullTag `!!null` tag
+ NullTag ReservedTagKeyword = "!!null"
+ // SequenceTag `!!seq` tag
+ SequenceTag ReservedTagKeyword = "!!seq"
+ // MappingTag `!!map` tag
+ MappingTag ReservedTagKeyword = "!!map"
+ // StringTag `!!str` tag
+ StringTag ReservedTagKeyword = "!!str"
+ // BinaryTag `!!binary` tag
+ BinaryTag ReservedTagKeyword = "!!binary"
+ // OrderedMapTag `!!omap` tag
+ OrderedMapTag ReservedTagKeyword = "!!omap"
+ // SetTag `!!set` tag
+ SetTag ReservedTagKeyword = "!!set"
+ // TimestampTag `!!timestamp` tag
+ TimestampTag ReservedTagKeyword = "!!timestamp"
+)
+
+var (
+ // ReservedTagKeywordMap map for reserved tag keywords
+ ReservedTagKeywordMap = map[ReservedTagKeyword]func(string, string, *Position) *Token{
+ IntegerTag: func(value, org string, pos *Position) *Token {
+ return &Token{
+ Type: TagType,
+ CharacterType: CharacterTypeIndicator,
+ Indicator: NodePropertyIndicator,
+ Value: value,
+ Origin: org,
+ Position: pos,
+ }
+ },
+ FloatTag: func(value, org string, pos *Position) *Token {
+ return &Token{
+ Type: TagType,
+ CharacterType: CharacterTypeIndicator,
+ Indicator: NodePropertyIndicator,
+ Value: value,
+ Origin: org,
+ Position: pos,
+ }
+ },
+ NullTag: func(value, org string, pos *Position) *Token {
+ return &Token{
+ Type: TagType,
+ CharacterType: CharacterTypeIndicator,
+ Indicator: NodePropertyIndicator,
+ Value: value,
+ Origin: org,
+ Position: pos,
+ }
+ },
+ SequenceTag: func(value, org string, pos *Position) *Token {
+ return &Token{
+ Type: TagType,
+ CharacterType: CharacterTypeIndicator,
+ Indicator: NodePropertyIndicator,
+ Value: value,
+ Origin: org,
+ Position: pos,
+ }
+ },
+ MappingTag: func(value, org string, pos *Position) *Token {
+ return &Token{
+ Type: TagType,
+ CharacterType: CharacterTypeIndicator,
+ Indicator: NodePropertyIndicator,
+ Value: value,
+ Origin: org,
+ Position: pos,
+ }
+ },
+ StringTag: func(value, org string, pos *Position) *Token {
+ return &Token{
+ Type: TagType,
+ CharacterType: CharacterTypeIndicator,
+ Indicator: NodePropertyIndicator,
+ Value: value,
+ Origin: org,
+ Position: pos,
+ }
+ },
+ BinaryTag: func(value, org string, pos *Position) *Token {
+ return &Token{
+ Type: TagType,
+ CharacterType: CharacterTypeIndicator,
+ Indicator: NodePropertyIndicator,
+ Value: value,
+ Origin: org,
+ Position: pos,
+ }
+ },
+ OrderedMapTag: func(value, org string, pos *Position) *Token {
+ return &Token{
+ Type: TagType,
+ CharacterType: CharacterTypeIndicator,
+ Indicator: NodePropertyIndicator,
+ Value: value,
+ Origin: org,
+ Position: pos,
+ }
+ },
+ SetTag: func(value, org string, pos *Position) *Token {
+ return &Token{
+ Type: TagType,
+ CharacterType: CharacterTypeIndicator,
+ Indicator: NodePropertyIndicator,
+ Value: value,
+ Origin: org,
+ Position: pos,
+ }
+ },
+ TimestampTag: func(value, org string, pos *Position) *Token {
+ return &Token{
+ Type: TagType,
+ CharacterType: CharacterTypeIndicator,
+ Indicator: NodePropertyIndicator,
+ Value: value,
+ Origin: org,
+ Position: pos,
+ }
+ },
+ }
+)
+
+type numType int
+
+const (
+ numTypeNone numType = iota
+ numTypeBinary
+ numTypeOctet
+ numTypeHex
+ numTypeFloat
+)
+
+type numStat struct {
+ isNum bool
+ typ numType
+}
+
+func getNumberStat(str string) *numStat {
+ stat := &numStat{}
+ if str == "" {
+ return stat
+ }
+ if str == "-" || str == "." || str == "+" || str == "_" {
+ return stat
+ }
+ if str[0] == '_' {
+ return stat
+ }
+ dotFound := false
+ isNegative := false
+ isExponent := false
+ if str[0] == '-' {
+ isNegative = true
+ }
+ for idx, c := range str {
+ switch c {
+ case 'x':
+ if (isNegative && idx == 2) || (!isNegative && idx == 1) {
+ continue
+ }
+ case 'o':
+ if (isNegative && idx == 2) || (!isNegative && idx == 1) {
+ continue
+ }
+ case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ continue
+ case 'a', 'b', 'c', 'd', 'e', 'f', 'A', 'B', 'C', 'D', 'E', 'F':
+ if (len(str) > 2 && str[0] == '0' && str[1] == 'x') ||
+ (len(str) > 3 && isNegative && str[1] == '0' && str[2] == 'x') {
+ // hex number
+ continue
+ }
+ if c == 'b' && ((isNegative && idx == 2) || (!isNegative && idx == 1)) {
+ // binary number
+ continue
+ }
+ if (c == 'e' || c == 'E') && dotFound {
+ // exponent
+ isExponent = true
+ continue
+ }
+ case '.':
+ if dotFound {
+ // multiple dot
+ return stat
+ }
+ dotFound = true
+ continue
+ case '-':
+ if idx == 0 || isExponent {
+ continue
+ }
+ case '+':
+ if idx == 0 || isExponent {
+ continue
+ }
+ case '_':
+ continue
+ }
+ return stat
+ }
+ stat.isNum = true
+ switch {
+ case dotFound:
+ stat.typ = numTypeFloat
+ case strings.HasPrefix(str, "0b") || strings.HasPrefix(str, "-0b"):
+ stat.typ = numTypeBinary
+ case strings.HasPrefix(str, "0x") || strings.HasPrefix(str, "-0x"):
+ stat.typ = numTypeHex
+ case strings.HasPrefix(str, "0o") || strings.HasPrefix(str, "-0o"):
+ stat.typ = numTypeOctet
+ case (len(str) > 1 && str[0] == '0') || (len(str) > 1 && str[0] == '-' && str[1] == '0'):
+ stat.typ = numTypeOctet
+ }
+ return stat
+}
+
+func looksLikeTimeValue(value string) bool {
+ for i, c := range value {
+ switch c {
+ case ':', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ continue
+ case '0':
+ if i == 0 {
+ return false
+ }
+ continue
+ }
+ return false
+ }
+ return true
+}
+
+// IsNeedQuoted whether need quote for passed string or not
+func IsNeedQuoted(value string) bool {
+ if value == "" {
+ return true
+ }
+ if _, exists := reservedEncKeywordMap[value]; exists {
+ return true
+ }
+ if stat := getNumberStat(value); stat.isNum {
+ return true
+ }
+ first := value[0]
+ switch first {
+ case '*', '&', '[', '{', '}', ']', ',', '!', '|', '>', '%', '\'', '"', '@', ' ', '`':
+ return true
+ }
+ last := value[len(value)-1]
+ switch last {
+ case ':', ' ':
+ return true
+ }
+ if looksLikeTimeValue(value) {
+ return true
+ }
+ for i, c := range value {
+ switch c {
+ case '#', '\\':
+ return true
+ case ':':
+ if i+1 < len(value) && value[i+1] == ' ' {
+ return true
+ }
+ }
+ }
+ return false
+}
+
+// LiteralBlockHeader detect literal block scalar header
+func LiteralBlockHeader(value string) string {
+ lbc := DetectLineBreakCharacter(value)
+
+ switch {
+ case !strings.Contains(value, lbc):
+ return ""
+ case strings.HasSuffix(value, fmt.Sprintf("%s%s", lbc, lbc)):
+ return "|+"
+ case strings.HasSuffix(value, lbc):
+ return "|"
+ default:
+ return "|-"
+ }
+}
+
+// New create reserved keyword token or number token and other string token
+func New(value string, org string, pos *Position) *Token {
+ fn := reservedKeywordMap[value]
+ if fn != nil {
+ return fn(value, org, pos)
+ }
+ if stat := getNumberStat(value); stat.isNum {
+ tk := &Token{
+ Type: IntegerType,
+ CharacterType: CharacterTypeMiscellaneous,
+ Indicator: NotIndicator,
+ Value: value,
+ Origin: org,
+ Position: pos,
+ }
+ switch stat.typ {
+ case numTypeFloat:
+ tk.Type = FloatType
+ case numTypeBinary:
+ tk.Type = BinaryIntegerType
+ case numTypeOctet:
+ tk.Type = OctetIntegerType
+ case numTypeHex:
+ tk.Type = HexIntegerType
+ }
+ return tk
+ }
+ return String(value, org, pos)
+}
+
+// Position type for position in YAML document
+type Position struct {
+ Line int
+ Column int
+ Offset int
+ IndentNum int
+ IndentLevel int
+}
+
+// String position to text
+func (p *Position) String() string {
+ return fmt.Sprintf("[level:%d,line:%d,column:%d,offset:%d]", p.IndentLevel, p.Line, p.Column, p.Offset)
+}
+
+// Token type for token
+type Token struct {
+ Type Type
+ CharacterType CharacterType
+ Indicator Indicator
+ Value string
+ Origin string
+ Position *Position
+ Next *Token
+ Prev *Token
+}
+
+// PreviousType previous token type
+func (t *Token) PreviousType() Type {
+ if t.Prev != nil {
+ return t.Prev.Type
+ }
+ return UnknownType
+}
+
+// NextType next token type
+func (t *Token) NextType() Type {
+ if t.Next != nil {
+ return t.Next.Type
+ }
+ return UnknownType
+}
+
+// AddColumn append column number to current position of column
+func (t *Token) AddColumn(col int) {
+ if t == nil {
+ return
+ }
+ t.Position.Column += col
+}
+
+// Clone copy token ( preserve Prev/Next reference )
+func (t *Token) Clone() *Token {
+ if t == nil {
+ return nil
+ }
+ copied := *t
+ if t.Position != nil {
+ pos := *(t.Position)
+ copied.Position = &pos
+ }
+ return &copied
+}
+
+// Tokens type of token collection
+type Tokens []*Token
+
+func (t *Tokens) add(tk *Token) {
+ tokens := *t
+ if len(tokens) == 0 {
+ tokens = append(tokens, tk)
+ } else {
+ last := tokens[len(tokens)-1]
+ last.Next = tk
+ tk.Prev = last
+ tokens = append(tokens, tk)
+ }
+ *t = tokens
+}
+
+// Add append new some tokens
+func (t *Tokens) Add(tks ...*Token) {
+ for _, tk := range tks {
+ t.add(tk)
+ }
+}
+
+// Dump dump all token structures for debugging
+func (t Tokens) Dump() {
+ for _, tk := range t {
+ fmt.Printf("- %+v\n", tk)
+ }
+}
+
+// String create token for String
+func String(value string, org string, pos *Position) *Token {
+ return &Token{
+ Type: StringType,
+ CharacterType: CharacterTypeMiscellaneous,
+ Indicator: NotIndicator,
+ Value: value,
+ Origin: org,
+ Position: pos,
+ }
+}
+
+// SequenceEntry create token for SequenceEntry
+func SequenceEntry(org string, pos *Position) *Token {
+ return &Token{
+ Type: SequenceEntryType,
+ CharacterType: CharacterTypeIndicator,
+ Indicator: BlockStructureIndicator,
+ Value: string(SequenceEntryCharacter),
+ Origin: org,
+ Position: pos,
+ }
+}
+
+// MappingKey create token for MappingKey
+func MappingKey(pos *Position) *Token {
+ return &Token{
+ Type: MappingKeyType,
+ CharacterType: CharacterTypeIndicator,
+ Indicator: BlockStructureIndicator,
+ Value: string(MappingKeyCharacter),
+ Origin: string(MappingKeyCharacter),
+ Position: pos,
+ }
+}
+
+// MappingValue create token for MappingValue
+func MappingValue(pos *Position) *Token {
+ return &Token{
+ Type: MappingValueType,
+ CharacterType: CharacterTypeIndicator,
+ Indicator: BlockStructureIndicator,
+ Value: string(MappingValueCharacter),
+ Origin: string(MappingValueCharacter),
+ Position: pos,
+ }
+}
+
+// CollectEntry create token for CollectEntry
+func CollectEntry(org string, pos *Position) *Token {
+ return &Token{
+ Type: CollectEntryType,
+ CharacterType: CharacterTypeIndicator,
+ Indicator: FlowCollectionIndicator,
+ Value: string(CollectEntryCharacter),
+ Origin: org,
+ Position: pos,
+ }
+}
+
+// SequenceStart create token for SequenceStart
+func SequenceStart(org string, pos *Position) *Token {
+ return &Token{
+ Type: SequenceStartType,
+ CharacterType: CharacterTypeIndicator,
+ Indicator: FlowCollectionIndicator,
+ Value: string(SequenceStartCharacter),
+ Origin: org,
+ Position: pos,
+ }
+}
+
+// SequenceEnd create token for SequenceEnd
+func SequenceEnd(org string, pos *Position) *Token {
+ return &Token{
+ Type: SequenceEndType,
+ CharacterType: CharacterTypeIndicator,
+ Indicator: FlowCollectionIndicator,
+ Value: string(SequenceEndCharacter),
+ Origin: org,
+ Position: pos,
+ }
+}
+
+// MappingStart create token for MappingStart
+func MappingStart(org string, pos *Position) *Token {
+ return &Token{
+ Type: MappingStartType,
+ CharacterType: CharacterTypeIndicator,
+ Indicator: FlowCollectionIndicator,
+ Value: string(MappingStartCharacter),
+ Origin: org,
+ Position: pos,
+ }
+}
+
+// MappingEnd create token for MappingEnd
+func MappingEnd(org string, pos *Position) *Token {
+ return &Token{
+ Type: MappingEndType,
+ CharacterType: CharacterTypeIndicator,
+ Indicator: FlowCollectionIndicator,
+ Value: string(MappingEndCharacter),
+ Origin: org,
+ Position: pos,
+ }
+}
+
+// Comment create token for Comment
+func Comment(value string, org string, pos *Position) *Token {
+ return &Token{
+ Type: CommentType,
+ CharacterType: CharacterTypeIndicator,
+ Indicator: CommentIndicator,
+ Value: value,
+ Origin: org,
+ Position: pos,
+ }
+}
+
+// Anchor create token for Anchor
+func Anchor(org string, pos *Position) *Token {
+ return &Token{
+ Type: AnchorType,
+ CharacterType: CharacterTypeIndicator,
+ Indicator: NodePropertyIndicator,
+ Value: string(AnchorCharacter),
+ Origin: org,
+ Position: pos,
+ }
+}
+
+// Alias create token for Alias
+func Alias(org string, pos *Position) *Token {
+ return &Token{
+ Type: AliasType,
+ CharacterType: CharacterTypeIndicator,
+ Indicator: NodePropertyIndicator,
+ Value: string(AliasCharacter),
+ Origin: org,
+ Position: pos,
+ }
+}
+
+// Tag create token for Tag
+func Tag(value string, org string, pos *Position) *Token {
+ fn := ReservedTagKeywordMap[ReservedTagKeyword(value)]
+ if fn != nil {
+ return fn(value, org, pos)
+ }
+ return &Token{
+ Type: TagType,
+ CharacterType: CharacterTypeIndicator,
+ Indicator: NodePropertyIndicator,
+ Value: value,
+ Origin: org,
+ Position: pos,
+ }
+}
+
+// Literal create token for Literal
+func Literal(value string, org string, pos *Position) *Token {
+ return &Token{
+ Type: LiteralType,
+ CharacterType: CharacterTypeIndicator,
+ Indicator: BlockScalarIndicator,
+ Value: value,
+ Origin: org,
+ Position: pos,
+ }
+}
+
+// Folded create token for Folded
+func Folded(value string, org string, pos *Position) *Token {
+ return &Token{
+ Type: FoldedType,
+ CharacterType: CharacterTypeIndicator,
+ Indicator: BlockScalarIndicator,
+ Value: value,
+ Origin: org,
+ Position: pos,
+ }
+}
+
+// SingleQuote create token for SingleQuote
+func SingleQuote(value string, org string, pos *Position) *Token {
+ return &Token{
+ Type: SingleQuoteType,
+ CharacterType: CharacterTypeIndicator,
+ Indicator: QuotedScalarIndicator,
+ Value: value,
+ Origin: org,
+ Position: pos,
+ }
+}
+
+// DoubleQuote create token for DoubleQuote
+func DoubleQuote(value string, org string, pos *Position) *Token {
+ return &Token{
+ Type: DoubleQuoteType,
+ CharacterType: CharacterTypeIndicator,
+ Indicator: QuotedScalarIndicator,
+ Value: value,
+ Origin: org,
+ Position: pos,
+ }
+}
+
+// Directive create token for Directive
+func Directive(org string, pos *Position) *Token {
+ return &Token{
+ Type: DirectiveType,
+ CharacterType: CharacterTypeIndicator,
+ Indicator: DirectiveIndicator,
+ Value: string(DirectiveCharacter),
+ Origin: org,
+ Position: pos,
+ }
+}
+
+// Space create token for Space
+func Space(pos *Position) *Token {
+ return &Token{
+ Type: SpaceType,
+ CharacterType: CharacterTypeWhiteSpace,
+ Indicator: NotIndicator,
+ Value: string(SpaceCharacter),
+ Origin: string(SpaceCharacter),
+ Position: pos,
+ }
+}
+
+// MergeKey create token for MergeKey
+func MergeKey(org string, pos *Position) *Token {
+ return &Token{
+ Type: MergeKeyType,
+ CharacterType: CharacterTypeMiscellaneous,
+ Indicator: NotIndicator,
+ Value: "<<",
+ Origin: org,
+ Position: pos,
+ }
+}
+
+// DocumentHeader create token for DocumentHeader
+func DocumentHeader(org string, pos *Position) *Token {
+ return &Token{
+ Type: DocumentHeaderType,
+ CharacterType: CharacterTypeMiscellaneous,
+ Indicator: NotIndicator,
+ Value: "---",
+ Origin: org,
+ Position: pos,
+ }
+}
+
+// DocumentEnd create token for DocumentEnd
+func DocumentEnd(org string, pos *Position) *Token {
+ return &Token{
+ Type: DocumentEndType,
+ CharacterType: CharacterTypeMiscellaneous,
+ Indicator: NotIndicator,
+ Value: "...",
+ Origin: org,
+ Position: pos,
+ }
+}
+
+// DetectLineBreakCharacter detect line break character in only one inside scalar content scope.
+func DetectLineBreakCharacter(src string) string {
+ nc := strings.Count(src, "\n")
+ rc := strings.Count(src, "\r")
+ rnc := strings.Count(src, "\r\n")
+ switch {
+ case nc == rnc && rc == rnc:
+ return "\r\n"
+ case rc > nc:
+ return "\r"
+ default:
+ return "\n"
+ }
+}
diff --git a/tmpmod/github.com/goccy/go-yaml/token/token_test.go b/tmpmod/github.com/goccy/go-yaml/token/token_test.go
new file mode 100644
index 00000000..258c8d3a
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/token/token_test.go
@@ -0,0 +1,138 @@
+package token_test
+
+import (
+ "testing"
+
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/token"
+)
+
+func TestToken(t *testing.T) {
+ pos := &token.Position{}
+ tokens := token.Tokens{
+ token.SequenceEntry("-", pos),
+ token.MappingKey(pos),
+ token.MappingValue(pos),
+ token.CollectEntry(",", pos),
+ token.SequenceStart("[", pos),
+ token.SequenceEnd("]", pos),
+ token.MappingStart("{", pos),
+ token.MappingEnd("}", pos),
+ token.Comment("#", "#", pos),
+ token.Anchor("&", pos),
+ token.Alias("*", pos),
+ token.Literal("|", "|", pos),
+ token.Folded(">", ">", pos),
+ token.SingleQuote("'", "'", pos),
+ token.DoubleQuote(`"`, `"`, pos),
+ token.Directive("%", pos),
+ token.Space(pos),
+ token.MergeKey("<<", pos),
+ token.DocumentHeader("---", pos),
+ token.DocumentEnd("...", pos),
+ token.New("1", "1", pos),
+ token.New("3.14", "3.14", pos),
+ token.New("-0b101010", "-0b101010", pos),
+ token.New("0xA", "0xA", pos),
+ token.New("685.230_15e+03", "685.230_15e+03", pos),
+ token.New("02472256", "02472256", pos),
+ token.New("0o2472256", "0o2472256", pos),
+ token.New("", "", pos),
+ token.New("_1", "_1", pos),
+ token.New("1.1.1.1", "1.1.1.1", pos),
+ token.New("+", "+", pos),
+ token.New("-", "-", pos),
+ token.New("_", "_", pos),
+ token.New("~", "~", pos),
+ token.New("true", "true", pos),
+ token.New("false", "false", pos),
+ token.New(".nan", ".nan", pos),
+ token.New(".inf", ".inf", pos),
+ token.New("-.inf", "-.inf", pos),
+ token.New("null", "null", pos),
+ token.Tag("!!null", "!!null", pos),
+ token.Tag("!!map", "!!map", pos),
+ token.Tag("!!str", "!!str", pos),
+ token.Tag("!!seq", "!!seq", pos),
+ token.Tag("!!binary", "!!binary", pos),
+ token.Tag("!!omap", "!!omap", pos),
+ token.Tag("!!set", "!!set", pos),
+ token.Tag("!!int", "!!int", pos),
+ token.Tag("!!float", "!!float", pos),
+ token.Tag("!hoge", "!hoge", pos),
+ }
+ tokens.Dump()
+ tokens.Add(token.New("hoge", "hoge", pos))
+ if tokens[len(tokens)-1].PreviousType() != token.TagType {
+ t.Fatal("invalid previous token type")
+ }
+ if tokens[0].PreviousType() != token.UnknownType {
+ t.Fatal("invalid previous token type")
+ }
+ if tokens[len(tokens)-2].NextType() != token.StringType {
+ t.Fatal("invalid next token type")
+ }
+ if tokens[len(tokens)-1].NextType() != token.UnknownType {
+ t.Fatal("invalid next token type")
+ }
+}
+
+func TestIsNeedQuoted(t *testing.T) {
+ needQuotedTests := []string{
+ "",
+ "true",
+ "1.234",
+ "1:1",
+ "hoge # comment",
+ "\\0",
+ "#a b",
+ "*a b",
+ "&a b",
+ "{a b",
+ "}a b",
+ "[a b",
+ "]a b",
+ ",a b",
+ "!a b",
+ "|a b",
+ ">a b",
+ ">a b",
+ "%a b",
+ `'a b`,
+ `"a b`,
+ "a:",
+ "a: b",
+ "y",
+ "Y",
+ "yes",
+ "Yes",
+ "YES",
+ "n",
+ "N",
+ "no",
+ "No",
+ "NO",
+ "on",
+ "On",
+ "ON",
+ "off",
+ "Off",
+ "OFF",
+ "@test",
+ " a",
+ " a ",
+ "a ",
+ }
+ for i, test := range needQuotedTests {
+ if !token.IsNeedQuoted(test) {
+ t.Fatalf("%d: failed to quoted judge for %s", i, test)
+ }
+ }
+ notNeedQuotedTests := []string{
+ "Hello World",
+ }
+ for i, test := range notNeedQuotedTests {
+ if token.IsNeedQuoted(test) {
+ t.Fatalf("%d: failed to quoted judge for %s", i, test)
+ }
+ }
+}
diff --git a/tmpmod/github.com/goccy/go-yaml/validate.go b/tmpmod/github.com/goccy/go-yaml/validate.go
new file mode 100644
index 00000000..20a2d6d9
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/validate.go
@@ -0,0 +1,13 @@
+package yaml
+
+// StructValidator need to implement Struct method only
+// ( see https://pkg.go.dev/github.com/go-playground/validator/v10#Validate.Struct )
+type StructValidator interface {
+ Struct(interface{}) error
+}
+
+// FieldError need to implement StructField method only
+// ( see https://pkg.go.dev/github.com/go-playground/validator/v10#FieldError )
+type FieldError interface {
+ StructField() string
+}
diff --git a/tmpmod/github.com/goccy/go-yaml/validate_test.go b/tmpmod/github.com/goccy/go-yaml/validate_test.go
new file mode 100644
index 00000000..70336ab0
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/validate_test.go
@@ -0,0 +1,198 @@
+package yaml_test
+
+import (
+ "strings"
+ "testing"
+
+ "github.com/go-playground/validator/v10"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml"
+)
+
+func TestStructValidator(t *testing.T) {
+ type Inner struct {
+ Required string `validate:"required"`
+ Lt10 int `validate:"lt=10"`
+ }
+
+ cases := []struct {
+ TestName string
+ YAMLContent string
+ ExpectedErr string
+ Instance interface{}
+ }{
+ {
+ TestName: "Test Simple Validation",
+ YAMLContent: `---
+- name: john
+ age: 20
+- name: tom
+ age: -1
+- name: ken
+ age: 10`,
+ ExpectedErr: `[5:8] Key: 'Age' Error:Field validation for 'Age' failed on the 'gte' tag
+ 2 | - name: john
+ 3 | age: 20
+ 4 | - name: tom
+> 5 | age: -1
+ ^
+ 6 | - name: ken
+ 7 | age: 10`,
+ Instance: &[]struct {
+ Name string `yaml:"name" validate:"required"`
+ Age int `yaml:"age" validate:"gte=0,lt=120"`
+ }{},
+ },
+ {
+ TestName: "Test Missing Required Field",
+ YAMLContent: `---
+- name: john
+ age: 20
+- age: 10`,
+ ExpectedErr: `[4:1] Key: 'Name' Error:Field validation for 'Name' failed on the 'required' tag
+ 1 | ---
+ 2 | - name: john
+ 3 | age: 20
+> 4 | - age: 10
+ ^
+`,
+ Instance: &[]struct {
+ Name string `yaml:"name" validate:"required"`
+ Age int `yaml:"age" validate:"gte=0,lt=120"`
+ }{},
+ },
+ {
+ TestName: "Test Nested Validation Missing Internal Required",
+ YAMLContent: `---
+name: john
+age: 10
+addr:
+ number: seven`,
+ ExpectedErr: `[4:5] Key: 'State' Error:Field validation for 'State' failed on the 'required' tag
+ 1 | ---
+ 2 | name: john
+ 3 | age: 10
+> 4 | addr:
+ ^
+ 5 | number: seven`,
+ Instance: &struct {
+ Name string `yaml:"name" validate:"required"`
+ Age int `yaml:"age" validate:"gte=0,lt=120"`
+ Addr struct {
+ Number string `yaml:"number" validate:"required"`
+ State string `yaml:"state" validate:"required"`
+ } `yaml:"addr"`
+ }{},
+ },
+ {
+ TestName: "Test nested Validation with unknown field",
+ YAMLContent: `---
+name: john
+age: 20
+addr:
+ number: seven
+ state: washington
+ error: error
+`,
+ ExpectedErr: `[7:3] unknown field "error"
+ 4 | addr:
+ 5 | number: seven
+ 6 | state: washington
+> 7 | error: error
+ ^
+`,
+ Instance: &struct {
+ Name string `yaml:"name" validate:"required"`
+ Age int `yaml:"age" validate:"gte=0,lt=120"`
+ Addr *struct {
+ Number string `yaml:"number" validate:"required"`
+ State string `yaml:"state" validate:"required"`
+ } `yaml:"addr" validate:"required"`
+ }{},
+ },
+ {
+ TestName: "Test Validation with wrong field type",
+ YAMLContent: `---
+name: myDocument
+roles:
+ name: myRole
+ permissions:
+ - hello
+ - how
+ - are
+ - you
+ `,
+ ExpectedErr: `[4:7] mapping was used where sequence is expected
+ 1 | ---
+ 2 | name: myDocument
+ 3 | roles:
+> 4 | name: myRole
+ ^
+ 5 | permissions:
+ 6 | - hello
+ 7 | - how
+ 8 | `,
+ Instance: &struct {
+ Name string `yaml:"name"`
+ Roles []struct {
+ Name string `yaml:"name"`
+ Permissions []string `yaml:"permissions"`
+ } `yaml:"roles"`
+ }{},
+ },
+ {
+ TestName: "Test inline validation missing required",
+ YAMLContent: `---
+name: john
+age: 20
+`,
+ ExpectedErr: `Key: 'Inner.Required' Error:Field validation for 'Required' failed on the 'required' tag`,
+ Instance: &struct {
+ Name string `yaml:"name" validate:"required"`
+ Age int `yaml:"age" validate:"gte=0,lt=120"`
+ Inner `yaml:",inline"`
+ }{},
+ },
+ {
+ TestName: "Test inline validation field error",
+ YAMLContent: `---
+name: john
+age: 20
+required: present
+lt10: 20
+`,
+ ExpectedErr: `[5:7] Key: 'Inner.Lt10' Error:Field validation for 'Lt10' failed on the 'lt' tag
+ 2 | name: john
+ 3 | age: 20
+ 4 | required: present
+> 5 | lt10: 20
+ ^
+`,
+ Instance: &struct {
+ Name string `yaml:"name" validate:"required"`
+ Age int `yaml:"age" validate:"gte=0,lt=120"`
+ Inner `yaml:",inline"`
+ }{},
+ },
+ }
+
+ for _, tc := range cases {
+ tc := tc // NOTE: https://github.com/golang/go/wiki/CommonMistakes#using-goroutines-on-loop-iterator-variables
+ t.Run(tc.TestName, func(t *testing.T) {
+ validate := validator.New()
+ dec := yaml.NewDecoder(
+ strings.NewReader(tc.YAMLContent),
+ yaml.Validator(validate),
+ yaml.Strict(),
+ )
+ err := dec.Decode(tc.Instance)
+ switch {
+ case tc.ExpectedErr != "" && err == nil:
+ t.Fatal("expected error")
+ case tc.ExpectedErr == "" && err != nil:
+ t.Fatalf("unexpected error: %v", err)
+ case tc.ExpectedErr != "" && tc.ExpectedErr != err.Error():
+ t.Fatalf("expected `%s` but actual `%s`", tc.ExpectedErr, err.Error())
+ }
+ })
+ }
+}
diff --git a/tmpmod/github.com/goccy/go-yaml/yaml.go b/tmpmod/github.com/goccy/go-yaml/yaml.go
new file mode 100644
index 00000000..ac01ec76
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/yaml.go
@@ -0,0 +1,288 @@
+package yaml
+
+import (
+ "bytes"
+ "context"
+ "io"
+ "reflect"
+ "sync"
+
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/ast"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/internal/errors"
+ "golang.org/x/xerrors"
+)
+
+// BytesMarshaler interface may be implemented by types to customize their
+// behavior when being marshaled into a YAML document. The returned value
+// is marshaled in place of the original value implementing Marshaler.
+//
+// If an error is returned by MarshalYAML, the marshaling procedure stops
+// and returns with the provided error.
+type BytesMarshaler interface {
+ MarshalYAML() ([]byte, error)
+}
+
+// BytesMarshalerContext interface use BytesMarshaler with context.Context.
+type BytesMarshalerContext interface {
+ MarshalYAML(context.Context) ([]byte, error)
+}
+
+// InterfaceMarshaler interface has MarshalYAML compatible with github.com/go-yaml/yaml package.
+type InterfaceMarshaler interface {
+ MarshalYAML() (interface{}, error)
+}
+
+// InterfaceMarshalerContext interface use InterfaceMarshaler with context.Context.
+type InterfaceMarshalerContext interface {
+ MarshalYAML(context.Context) (interface{}, error)
+}
+
+// BytesUnmarshaler interface may be implemented by types to customize their
+// behavior when being unmarshaled from a YAML document.
+type BytesUnmarshaler interface {
+ UnmarshalYAML([]byte) error
+}
+
+// BytesUnmarshalerContext interface use BytesUnmarshaler with context.Context.
+type BytesUnmarshalerContext interface {
+ UnmarshalYAML(context.Context, []byte) error
+}
+
+// InterfaceUnmarshaler interface has UnmarshalYAML compatible with github.com/go-yaml/yaml package.
+type InterfaceUnmarshaler interface {
+ UnmarshalYAML(func(interface{}) error) error
+}
+
+// InterfaceUnmarshalerContext interface use InterfaceUnmarshaler with context.Context.
+type InterfaceUnmarshalerContext interface {
+ UnmarshalYAML(context.Context, func(interface{}) error) error
+}
+
+// MapItem is an item in a MapSlice.
+type MapItem struct {
+ Key, Value interface{}
+}
+
+// MapSlice encodes and decodes as a YAML map.
+// The order of keys is preserved when encoding and decoding.
+type MapSlice []MapItem
+
+// ToMap convert to map[interface{}]interface{}.
+func (s MapSlice) ToMap() map[interface{}]interface{} {
+ v := map[interface{}]interface{}{}
+ for _, item := range s {
+ v[item.Key] = item.Value
+ }
+ return v
+}
+
+// Marshal serializes the value provided into a YAML document. The structure
+// of the generated document will reflect the structure of the value itself.
+// Maps and pointers (to struct, string, int, etc) are accepted as the in value.
+//
+// Struct fields are only marshalled if they are exported (have an upper case
+// first letter), and are marshalled using the field name lowercased as the
+// default key. Custom keys may be defined via the "yaml" name in the field
+// tag: the content preceding the first comma is used as the key, and the
+// following comma-separated options are used to tweak the marshalling process.
+// Conflicting names result in a runtime error.
+//
+// The field tag format accepted is:
+//
+// `(...) yaml:"[][,[,]]" (...)`
+//
+// The following flags are currently supported:
+//
+// omitempty Only include the field if it's not set to the zero
+// value for the type or to empty slices or maps.
+// Zero valued structs will be omitted if all their public
+// fields are zero, unless they implement an IsZero
+// method (see the IsZeroer interface type), in which
+// case the field will be included if that method returns true.
+//
+// flow Marshal using a flow style (useful for structs,
+// sequences and maps).
+//
+// inline Inline the field, which must be a struct or a map,
+// causing all of its fields or keys to be processed as if
+// they were part of the outer struct. For maps, keys must
+// not conflict with the yaml keys of other struct fields.
+//
+// anchor Marshal with anchor. If want to define anchor name explicitly, use anchor=name style.
+// Otherwise, if used 'anchor' name only, used the field name lowercased as the anchor name
+//
+// alias Marshal with alias. If want to define alias name explicitly, use alias=name style.
+// Otherwise, If omitted alias name and the field type is pointer type,
+// assigned anchor name automatically from same pointer address.
+//
+// In addition, if the key is "-", the field is ignored.
+//
+// For example:
+//
+// type T struct {
+// F int `yaml:"a,omitempty"`
+// B int
+// }
+// yaml.Marshal(&T{B: 2}) // Returns "b: 2\n"
+// yaml.Marshal(&T{F: 1}) // Returns "a: 1\nb: 0\n"
+func Marshal(v interface{}) ([]byte, error) {
+ return MarshalWithOptions(v)
+}
+
+// MarshalWithOptions serializes the value provided into a YAML document with EncodeOptions.
+func MarshalWithOptions(v interface{}, opts ...EncodeOption) ([]byte, error) {
+ return MarshalContext(context.Background(), v, opts...)
+}
+
+// MarshalContext serializes the value provided into a YAML document with context.Context and EncodeOptions.
+func MarshalContext(ctx context.Context, v interface{}, opts ...EncodeOption) ([]byte, error) {
+ var buf bytes.Buffer
+ if err := NewEncoder(&buf, opts...).EncodeContext(ctx, v); err != nil {
+ return nil, errors.Wrapf(err, "failed to marshal")
+ }
+ return buf.Bytes(), nil
+}
+
+// ValueToNode convert from value to ast.Node.
+func ValueToNode(v interface{}, opts ...EncodeOption) (ast.Node, error) {
+ var buf bytes.Buffer
+ node, err := NewEncoder(&buf, opts...).EncodeToNode(v)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to convert value to node")
+ }
+ return node, nil
+}
+
+// Unmarshal decodes the first document found within the in byte slice
+// and assigns decoded values into the out value.
+//
+// Struct fields are only unmarshalled if they are exported (have an
+// upper case first letter), and are unmarshalled using the field name
+// lowercased as the default key. Custom keys may be defined via the
+// "yaml" name in the field tag: the content preceding the first comma
+// is used as the key, and the following comma-separated options are
+// used to tweak the marshalling process (see Marshal).
+// Conflicting names result in a runtime error.
+//
+// For example:
+//
+// type T struct {
+// F int `yaml:"a,omitempty"`
+// B int
+// }
+// var t T
+// yaml.Unmarshal([]byte("a: 1\nb: 2"), &t)
+//
+// See the documentation of Marshal for the format of tags and a list of
+// supported tag options.
+func Unmarshal(data []byte, v interface{}) error {
+ return UnmarshalWithOptions(data, v)
+}
+
+// UnmarshalWithOptions decodes with DecodeOptions the first document found within the in byte slice
+// and assigns decoded values into the out value.
+func UnmarshalWithOptions(data []byte, v interface{}, opts ...DecodeOption) error {
+ return UnmarshalContext(context.Background(), data, v, opts...)
+}
+
+// UnmarshalContext decodes with context.Context and DecodeOptions.
+func UnmarshalContext(ctx context.Context, data []byte, v interface{}, opts ...DecodeOption) error {
+ dec := NewDecoder(bytes.NewBuffer(data), opts...)
+ if err := dec.DecodeContext(ctx, v); err != nil {
+ if err == io.EOF {
+ return nil
+ }
+ return errors.Wrapf(err, "failed to unmarshal")
+ }
+ return nil
+}
+
+// NodeToValue converts node to the value pointed to by v.
+func NodeToValue(node ast.Node, v interface{}, opts ...DecodeOption) error {
+ var buf bytes.Buffer
+ if err := NewDecoder(&buf, opts...).DecodeFromNode(node, v); err != nil {
+ return errors.Wrapf(err, "failed to convert node to value")
+ }
+ return nil
+}
+
+// FormatError is a utility function that takes advantage of the metadata
+// stored in the errors returned by this package's parser.
+//
+// If the second argument `colored` is true, the error message is colorized.
+// If the third argument `inclSource` is true, the error message will
+// contain snippets of the YAML source that was used.
+func FormatError(e error, colored, inclSource bool) string {
+ var pp errors.PrettyPrinter
+ if xerrors.As(e, &pp) {
+ var buf bytes.Buffer
+ pp.PrettyPrint(&errors.Sink{&buf}, colored, inclSource)
+ return buf.String()
+ }
+
+ return e.Error()
+}
+
+// YAMLToJSON convert YAML bytes to JSON.
+func YAMLToJSON(bytes []byte) ([]byte, error) {
+ var v interface{}
+ if err := UnmarshalWithOptions(bytes, &v, UseOrderedMap()); err != nil {
+ return nil, errors.Wrapf(err, "failed to unmarshal")
+ }
+ out, err := MarshalWithOptions(v, JSON())
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to marshal with json option")
+ }
+ return out, nil
+}
+
+// JSONToYAML convert JSON bytes to YAML.
+func JSONToYAML(bytes []byte) ([]byte, error) {
+ var v interface{}
+ if err := UnmarshalWithOptions(bytes, &v, UseOrderedMap()); err != nil {
+ return nil, errors.Wrapf(err, "failed to unmarshal from json bytes")
+ }
+ out, err := Marshal(v)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to marshal")
+ }
+ return out, nil
+}
+
+var (
+ globalCustomMarshalerMu sync.Mutex
+ globalCustomUnmarshalerMu sync.Mutex
+ globalCustomMarshalerMap = map[reflect.Type]func(interface{}) ([]byte, error){}
+ globalCustomUnmarshalerMap = map[reflect.Type]func(interface{}, []byte) error{}
+)
+
+// RegisterCustomMarshaler overrides any encoding process for the type specified in generics.
+// If you want to switch the behavior for each encoder, use `CustomMarshaler` defined as EncodeOption.
+//
+// NOTE: If type T implements MarshalYAML for pointer receiver, the type specified in RegisterCustomMarshaler must be *T.
+// If RegisterCustomMarshaler and CustomMarshaler of EncodeOption are specified for the same type,
+// the CustomMarshaler specified in EncodeOption takes precedence.
+func RegisterCustomMarshaler[T any](marshaler func(T) ([]byte, error)) {
+ globalCustomMarshalerMu.Lock()
+ defer globalCustomMarshalerMu.Unlock()
+
+ var typ T
+ globalCustomMarshalerMap[reflect.TypeOf(typ)] = func(v interface{}) ([]byte, error) {
+ return marshaler(v.(T))
+ }
+}
+
+// RegisterCustomUnmarshaler overrides any decoding process for the type specified in generics.
+// If you want to switch the behavior for each decoder, use `CustomUnmarshaler` defined as DecodeOption.
+//
+// NOTE: If RegisterCustomUnmarshaler and CustomUnmarshaler of DecodeOption are specified for the same type,
+// the CustomUnmarshaler specified in DecodeOption takes precedence.
+func RegisterCustomUnmarshaler[T any](unmarshaler func(*T, []byte) error) {
+ globalCustomUnmarshalerMu.Lock()
+ defer globalCustomUnmarshalerMu.Unlock()
+
+ var typ *T
+ globalCustomUnmarshalerMap[reflect.TypeOf(typ)] = func(v interface{}, b []byte) error {
+ return unmarshaler(v.(*T), b)
+ }
+}
diff --git a/tmpmod/github.com/goccy/go-yaml/yaml_test.go b/tmpmod/github.com/goccy/go-yaml/yaml_test.go
new file mode 100644
index 00000000..df44ee73
--- /dev/null
+++ b/tmpmod/github.com/goccy/go-yaml/yaml_test.go
@@ -0,0 +1,1284 @@
+package yaml_test
+
+import (
+ "bytes"
+ "strings"
+ "testing"
+
+ "github.com/google/go-cmp/cmp"
+ "golang.org/x/xerrors"
+
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml/ast"
+)
+
+func TestMarshal(t *testing.T) {
+ var v struct {
+ A int
+ B string
+ }
+ v.A = 1
+ v.B = "hello"
+ bytes, err := yaml.Marshal(v)
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ if string(bytes) != "a: 1\nb: hello\n" {
+ t.Fatal("failed to marshal")
+ }
+}
+
+func TestUnmarshal(t *testing.T) {
+ yml := `
+%YAML 1.2
+---
+a: 1
+b: c
+`
+ var v struct {
+ A int
+ B string
+ }
+ if err := yaml.Unmarshal([]byte(yml), &v); err != nil {
+ t.Fatalf("%+v", err)
+ }
+}
+
+type marshalTest struct{}
+
+func (t *marshalTest) MarshalYAML() ([]byte, error) {
+ return yaml.Marshal(yaml.MapSlice{
+ {
+ "a", 1,
+ },
+ {
+ "b", "hello",
+ },
+ {
+ "c", true,
+ },
+ {
+ "d", map[string]string{"x": "y"},
+ },
+ })
+}
+
+type marshalTest2 struct{}
+
+func (t *marshalTest2) MarshalYAML() (interface{}, error) {
+ return yaml.MapSlice{
+ {
+ "a", 2,
+ },
+ {
+ "b", "world",
+ },
+ {
+ "c", true,
+ },
+ }, nil
+}
+
+func TestMarshalYAML(t *testing.T) {
+ var v struct {
+ A *marshalTest
+ B *marshalTest2
+ }
+ v.A = &marshalTest{}
+ v.B = &marshalTest2{}
+ bytes, err := yaml.Marshal(v)
+ if err != nil {
+ t.Fatalf("failed to Marshal: %+v", err)
+ }
+ expect := `
+a:
+ a: 1
+ b: hello
+ c: true
+ d:
+ x: "y"
+b:
+ a: 2
+ b: world
+ c: true
+`
+ actual := "\n" + string(bytes)
+ if expect != actual {
+ t.Fatalf("failed to MarshalYAML expect:[%s], actual:[%s]", expect, actual)
+ }
+}
+
+type unmarshalTest struct {
+ a int
+ b string
+ c bool
+}
+
+func (t *unmarshalTest) UnmarshalYAML(b []byte) error {
+ if t.a != 0 {
+ return xerrors.New("unexpected field value to a")
+ }
+ if t.b != "" {
+ return xerrors.New("unexpected field value to b")
+ }
+ if t.c {
+ return xerrors.New("unexpected field value to c")
+ }
+ var v struct {
+ A int
+ B string
+ C bool
+ }
+ if err := yaml.Unmarshal(b, &v); err != nil {
+ return err
+ }
+ t.a = v.A
+ t.b = v.B
+ t.c = v.C
+ return nil
+}
+
+type unmarshalTest2 struct {
+ a int
+ b string
+ c bool
+}
+
+func (t *unmarshalTest2) UnmarshalYAML(unmarshal func(interface{}) error) error {
+ var v struct {
+ A int
+ B string
+ C bool
+ }
+ if t.a != 0 {
+ return xerrors.New("unexpected field value to a")
+ }
+ if t.b != "" {
+ return xerrors.New("unexpected field value to b")
+ }
+ if t.c {
+ return xerrors.New("unexpected field value to c")
+ }
+ if err := unmarshal(&v); err != nil {
+ return err
+ }
+ t.a = v.A
+ t.b = v.B
+ t.c = v.C
+ return nil
+}
+
+func TestUnmarshalYAML(t *testing.T) {
+ yml := `
+a:
+ a: 1
+ b: hello
+ c: true
+b:
+ a: 2
+ b: world
+ c: true
+`
+ var v struct {
+ A *unmarshalTest
+ B *unmarshalTest2
+ }
+ if err := yaml.Unmarshal([]byte(yml), &v); err != nil {
+ t.Fatalf("failed to Unmarshal: %+v", err)
+ }
+ if v.A == nil {
+ t.Fatal("failed to UnmarshalYAML")
+ }
+ if v.A.a != 1 {
+ t.Fatal("failed to UnmarshalYAML")
+ }
+ if v.A.b != "hello" {
+ t.Fatal("failed to UnmarshalYAML")
+ }
+ if !v.A.c {
+ t.Fatal("failed to UnmarshalYAML")
+ }
+ if v.B == nil {
+ t.Fatal("failed to UnmarshalYAML")
+ }
+ if v.B.a != 2 {
+ t.Fatal("failed to UnmarshalYAML")
+ }
+ if v.B.b != "world" {
+ t.Fatal("failed to UnmarshalYAML")
+ }
+ if !v.B.c {
+ t.Fatal("failed to UnmarshalYAML")
+ }
+}
+
+type ObjectMap map[string]*Object
+type ObjectDecl struct {
+ Name string `yaml:"-"`
+ *Object `yaml:",inline,anchor"`
+}
+
+func (m ObjectMap) MarshalYAML() (interface{}, error) {
+ newMap := map[string]*ObjectDecl{}
+ for k, v := range m {
+ newMap[k] = &ObjectDecl{Name: k, Object: v}
+ }
+ return newMap, nil
+}
+
+type rootObject struct {
+ Single ObjectMap `yaml:"single"`
+ Collection map[string][]*Object `yaml:"collection"`
+}
+
+type Object struct {
+ *Object `yaml:",omitempty,inline,alias"`
+ MapValue map[string]interface{} `yaml:",omitempty,inline"`
+}
+
+func TestInlineAnchorAndAlias(t *testing.T) {
+ yml := `---
+single:
+ default: &default
+ id: 1
+ name: john
+ user_1: &user_1
+ id: 1
+ name: ken
+ user_2: &user_2
+ <<: *default
+ id: 2
+collection:
+ defaults:
+ - *default
+ - <<: *default
+ - <<: *default
+ id: 2
+ users:
+ - <<: *user_1
+ - <<: *user_2
+ - <<: *user_1
+ id: 3
+ - <<: *user_1
+ id: 4
+ - <<: *user_1
+ id: 5
+`
+ var v rootObject
+ if err := yaml.Unmarshal([]byte(yml), &v); err != nil {
+ t.Fatal(err)
+ }
+ opt := yaml.MarshalAnchor(func(anchor *ast.AnchorNode, value interface{}) error {
+ if o, ok := value.(*ObjectDecl); ok {
+ return anchor.SetName(o.Name)
+ }
+ return nil
+ })
+ var buf bytes.Buffer
+ if err := yaml.NewEncoder(&buf, opt).Encode(v); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ actual := "---\n" + buf.String()
+ if yml != actual {
+ t.Fatalf("failed to marshal: expected:[%s] actual:[%s]", yml, actual)
+ }
+}
+
+func TestMapSlice_Map(t *testing.T) {
+ yml := `
+a: b
+c: d
+`
+ var v yaml.MapSlice
+ if err := yaml.Unmarshal([]byte(yml), &v); err != nil {
+ t.Fatal(err)
+ }
+ m := v.ToMap()
+ if len(m) != 2 {
+ t.Fatal("failed to convert MapSlice to map")
+ }
+ if m["a"] != "b" {
+ t.Fatal("failed to convert MapSlice to map")
+ }
+ if m["c"] != "d" {
+ t.Fatal("failed to convert MapSlice to map")
+ }
+}
+
+func TestMarshalWithModifiedAnchorAlias(t *testing.T) {
+ yml := `
+a: &a 1
+b: *a
+`
+ var v struct {
+ A *int `yaml:"a,anchor"`
+ B *int `yaml:"b,alias"`
+ }
+ if err := yaml.Unmarshal([]byte(yml), &v); err != nil {
+ t.Fatal(err)
+ }
+ node, err := yaml.ValueToNode(v)
+ if err != nil {
+ t.Fatal(err)
+ }
+ anchors := ast.Filter(ast.AnchorType, node)
+ if len(anchors) != 1 {
+ t.Fatal("failed to filter node")
+ }
+ anchor := anchors[0].(*ast.AnchorNode)
+ if err := anchor.SetName("b"); err != nil {
+ t.Fatal(err)
+ }
+ aliases := ast.Filter(ast.AliasType, node)
+ if len(anchors) != 1 {
+ t.Fatal("failed to filter node")
+ }
+ alias := aliases[0].(*ast.AliasNode)
+ if err := alias.SetName("b"); err != nil {
+ t.Fatal(err)
+ }
+
+ expected := `
+a: &b 1
+b: *b`
+
+ actual := "\n" + node.String()
+ if expected != actual {
+ t.Fatalf("failed to marshal: expected:[%q] but got [%q]", expected, actual)
+ }
+}
+
+func Test_YAMLToJSON(t *testing.T) {
+ yml := `
+foo:
+ bar:
+ - a
+ - b
+ - c
+a: 1
+`
+ actual, err := yaml.YAMLToJSON([]byte(yml))
+ if err != nil {
+ t.Fatal(err)
+ }
+ expected := `{"foo": {"bar": ["a", "b", "c"]}, "a": 1}`
+ if expected+"\n" != string(actual) {
+ t.Fatalf("failed to convert yaml to json: expected [%q] but got [%q]", expected, actual)
+ }
+}
+
+func Test_JSONToYAML(t *testing.T) {
+ json := `{"foo": {"bar": ["a", "b", "c"]}, "a": 1}`
+ expected := `
+foo:
+ bar:
+ - a
+ - b
+ - c
+a: 1
+`
+ actual, err := yaml.JSONToYAML([]byte(json))
+ if err != nil {
+ t.Fatal(err)
+ }
+ if expected != "\n"+string(actual) {
+ t.Fatalf("failed to convert json to yaml: expected [%q] but got [%q]", expected, actual)
+ }
+}
+
+func Test_WithCommentOption(t *testing.T) {
+ t.Run("line comment", func(t *testing.T) {
+ v := struct {
+ Foo string `yaml:"foo"`
+ Bar map[string]interface{} `yaml:"bar"`
+ Baz struct {
+ X int `yaml:"x"`
+ } `yaml:"baz"`
+ }{
+ Foo: "aaa",
+ Bar: map[string]interface{}{"bbb": "ccc"},
+ Baz: struct {
+ X int `yaml:"x"`
+ }{X: 10},
+ }
+ b, err := yaml.MarshalWithOptions(v, yaml.WithComment(
+ yaml.CommentMap{
+ "$.foo": []*yaml.Comment{yaml.LineComment("foo comment")},
+ "$.bar": []*yaml.Comment{yaml.LineComment("bar comment")},
+ "$.bar.bbb": []*yaml.Comment{yaml.LineComment("bbb comment")},
+ "$.baz.x": []*yaml.Comment{yaml.LineComment("x comment")},
+ },
+ ))
+ if err != nil {
+ t.Fatal(err)
+ }
+ expected := `
+foo: aaa #foo comment
+bar: #bar comment
+ bbb: ccc #bbb comment
+baz:
+ x: 10 #x comment
+`
+ actual := "\n" + string(b)
+ if expected != actual {
+ t.Fatalf("expected:%s but got %s", expected, actual)
+ }
+ })
+ t.Run("line comment2", func(t *testing.T) {
+ v := struct {
+ Foo map[string]interface{} `yaml:"foo"`
+ }{
+ Foo: map[string]interface{}{
+ "bar": map[string]interface{}{
+ "baz": true,
+ },
+ },
+ }
+ b, err := yaml.MarshalWithOptions(v, yaml.WithComment(
+ yaml.CommentMap{
+ "$.foo.bar": []*yaml.Comment{yaml.HeadComment(" bar head comment"), yaml.LineComment(" bar line comment")},
+ "$.foo.bar.baz": []*yaml.Comment{yaml.LineComment(" baz line comment")},
+ },
+ ))
+ if err != nil {
+ t.Fatal(err)
+ }
+ expected := `
+foo:
+ # bar head comment
+ bar: # bar line comment
+ baz: true # baz line comment
+`
+ actual := "\n" + string(b)
+ if expected != actual {
+ t.Fatalf("expected:%s but got %s", expected, actual)
+ }
+ })
+ t.Run("single head comment", func(t *testing.T) {
+ v := struct {
+ Foo string `yaml:"foo"`
+ Bar map[string]interface{} `yaml:"bar"`
+ Baz struct {
+ X int `yaml:"x"`
+ } `yaml:"baz"`
+ }{
+ Foo: "aaa",
+ Bar: map[string]interface{}{"bbb": "ccc"},
+ Baz: struct {
+ X int `yaml:"x"`
+ }{X: 10},
+ }
+
+ b, err := yaml.MarshalWithOptions(v, yaml.WithComment(
+ yaml.CommentMap{
+ "$.foo": []*yaml.Comment{yaml.HeadComment("foo comment")},
+ "$.bar": []*yaml.Comment{yaml.HeadComment("bar comment")},
+ "$.bar.bbb": []*yaml.Comment{yaml.HeadComment("bbb comment")},
+ "$.baz.x": []*yaml.Comment{yaml.HeadComment("x comment")},
+ },
+ ))
+ if err != nil {
+ t.Fatal(err)
+ }
+ expected := `
+#foo comment
+foo: aaa
+#bar comment
+bar:
+ #bbb comment
+ bbb: ccc
+baz:
+ #x comment
+ x: 10
+`
+ actual := "\n" + string(b)
+ if expected != actual {
+ t.Fatalf("expected:%s but got %s", expected, actual)
+ }
+ })
+
+ t.Run("multiple head comment", func(t *testing.T) {
+ v := struct {
+ Foo string `yaml:"foo"`
+ Bar map[string]interface{} `yaml:"bar"`
+ Baz struct {
+ X int `yaml:"x"`
+ } `yaml:"baz"`
+ }{
+ Foo: "aaa",
+ Bar: map[string]interface{}{"bbb": "ccc"},
+ Baz: struct {
+ X int `yaml:"x"`
+ }{X: 10},
+ }
+
+ b, err := yaml.MarshalWithOptions(v, yaml.WithComment(
+ yaml.CommentMap{
+ "$.foo": []*yaml.Comment{
+ yaml.HeadComment(
+ "foo comment",
+ "foo comment2",
+ ),
+ },
+ "$.bar": []*yaml.Comment{
+ yaml.HeadComment(
+ "bar comment",
+ "bar comment2",
+ ),
+ },
+ "$.bar.bbb": []*yaml.Comment{
+ yaml.HeadComment(
+ "bbb comment",
+ "bbb comment2",
+ ),
+ },
+ "$.baz.x": []*yaml.Comment{
+ yaml.HeadComment(
+ "x comment",
+ "x comment2",
+ ),
+ },
+ },
+ ))
+ if err != nil {
+ t.Fatal(err)
+ }
+ expected := `
+#foo comment
+#foo comment2
+foo: aaa
+#bar comment
+#bar comment2
+bar:
+ #bbb comment
+ #bbb comment2
+ bbb: ccc
+baz:
+ #x comment
+ #x comment2
+ x: 10
+`
+ actual := "\n" + string(b)
+ if expected != actual {
+ t.Fatalf("expected:%s but got %s", expected, actual)
+ }
+ })
+ t.Run("foot comment", func(t *testing.T) {
+ v := struct {
+ Bar map[string]interface{} `yaml:"bar"`
+ Baz []int `yaml:"baz"`
+ }{
+ Bar: map[string]interface{}{"bbb": "ccc"},
+ Baz: []int{1, 2},
+ }
+
+ b, err := yaml.MarshalWithOptions(v, yaml.IndentSequence(true), yaml.WithComment(
+ yaml.CommentMap{
+ "$.bar.bbb": []*yaml.Comment{yaml.FootComment("ccc: ddd")},
+ "$.baz[1]": []*yaml.Comment{yaml.FootComment("- 3")},
+ "$.baz": []*yaml.Comment{yaml.FootComment(" foot comment", "foot comment2")},
+ },
+ ))
+ if err != nil {
+ t.Fatal(err)
+ }
+ expected := `
+bar:
+ bbb: ccc
+ #ccc: ddd
+baz:
+ - 1
+ - 2
+ #- 3
+# foot comment
+#foot comment2
+`
+ actual := "\n" + string(b)
+ if expected != actual {
+ t.Fatalf("expected:%s but got %s", expected, actual)
+ }
+ })
+
+ t.Run("combination", func(t *testing.T) {
+ v := struct {
+ Foo map[string]interface{} `yaml:"foo"`
+ O map[string]interface{} `yaml:"o"`
+ T map[string]bool `yaml:"t"`
+ Bar map[string]interface{} `yaml:"bar"`
+ Baz []int `yaml:"baz"`
+ Hoge map[string]interface{} `yaml:"hoge"`
+ }{
+ Foo: map[string]interface{}{
+ "a": map[string]interface{}{
+ "b": map[string]interface{}{
+ "c": "d",
+ },
+ },
+ },
+ O: map[string]interface{}{
+ "p": map[string]interface{}{
+ "q": map[string]interface{}{
+ "r": "s",
+ },
+ },
+ },
+ T: map[string]bool{
+ "u": true,
+ },
+ Bar: map[string]interface{}{"bbb": "ccc"},
+ Baz: []int{1, 2},
+ Hoge: map[string]interface{}{
+ "moga": true,
+ },
+ }
+
+ b, err := yaml.MarshalWithOptions(v, yaml.IndentSequence(true), yaml.WithComment(
+ yaml.CommentMap{
+ "$.foo": []*yaml.Comment{
+ yaml.HeadComment(" foo head comment", " foo head comment2"),
+ yaml.LineComment(" foo line comment"),
+ },
+ "$.foo.a": []*yaml.Comment{
+ yaml.HeadComment(" a head comment"),
+ yaml.LineComment(" a line comment"),
+ },
+ "$.foo.a.b": []*yaml.Comment{
+ yaml.HeadComment(" b head comment"),
+ yaml.LineComment(" b line comment"),
+ },
+ "$.foo.a.b.c": []*yaml.Comment{
+ yaml.LineComment(" c line comment"),
+ },
+ "$.o": []*yaml.Comment{
+ yaml.LineComment(" o line comment"),
+ },
+ "$.o.p": []*yaml.Comment{
+ yaml.HeadComment(" p head comment", " p head comment2"),
+ yaml.LineComment(" p line comment"),
+ },
+ "$.o.p.q": []*yaml.Comment{
+ yaml.HeadComment(" q head comment", " q head comment2"),
+ yaml.LineComment(" q line comment"),
+ },
+ "$.o.p.q.r": []*yaml.Comment{
+ yaml.LineComment(" r line comment"),
+ },
+ "$.t.u": []*yaml.Comment{
+ yaml.LineComment(" u line comment"),
+ },
+ "$.bar": []*yaml.Comment{
+ yaml.HeadComment(" bar head comment"),
+ yaml.LineComment(" bar line comment"),
+ },
+ "$.bar.bbb": []*yaml.Comment{
+ yaml.HeadComment(" bbb head comment"),
+ yaml.LineComment(" bbb line comment"),
+ yaml.FootComment(" bbb foot comment"),
+ },
+ "$.baz[0]": []*yaml.Comment{
+ yaml.HeadComment(" sequence head comment"),
+ yaml.LineComment(" sequence line comment"),
+ },
+ "$.baz[1]": []*yaml.Comment{
+ yaml.HeadComment(" sequence head comment2"),
+ yaml.LineComment(" sequence line comment2"),
+ yaml.FootComment(" sequence foot comment"),
+ },
+ "$.baz": []*yaml.Comment{
+ yaml.HeadComment(" baz head comment", " baz head comment2"),
+ yaml.LineComment(" baz line comment"),
+ yaml.FootComment(" baz foot comment"),
+ },
+ "$.hoge.moga": []*yaml.Comment{
+ yaml.LineComment(" moga line comment"),
+ yaml.FootComment(" moga foot comment"),
+ },
+ },
+ ))
+ if err != nil {
+ t.Fatal(err)
+ }
+ expected := `
+# foo head comment
+# foo head comment2
+foo: # foo line comment
+ # a head comment
+ a: # a line comment
+ # b head comment
+ b: # b line comment
+ c: d # c line comment
+o: # o line comment
+ # p head comment
+ # p head comment2
+ p: # p line comment
+ # q head comment
+ # q head comment2
+ q: # q line comment
+ r: s # r line comment
+t:
+ u: true # u line comment
+# bar head comment
+bar: # bar line comment
+ # bbb head comment
+ bbb: ccc # bbb line comment
+ # bbb foot comment
+# baz head comment
+# baz head comment2
+baz: # baz line comment
+ # sequence head comment
+ - 1 # sequence line comment
+ # sequence head comment2
+ - 2 # sequence line comment2
+ # sequence foot comment
+# baz foot comment
+hoge:
+ moga: true # moga line comment
+ # moga foot comment
+`
+ actual := "\n" + string(b)
+ if expected != actual {
+ t.Fatalf("expected:%s but got %s", expected, actual)
+ }
+ })
+
+}
+
+func Test_CommentToMapOption(t *testing.T) {
+ t.Run("line comment", func(t *testing.T) {
+ yml := `
+foo: aaa #foo comment
+bar: #bar comment
+ bbb: ccc #bbb comment
+baz:
+ x: 10 #x comment
+`
+ var (
+ v interface{}
+ cm = yaml.CommentMap{}
+ )
+ if err := yaml.UnmarshalWithOptions([]byte(yml), &v, yaml.CommentToMap(cm)); err != nil {
+ t.Fatal(err)
+ }
+ expected := []struct {
+ path string
+ comments []*yaml.Comment
+ }{
+ {
+ path: "$.foo",
+ comments: []*yaml.Comment{yaml.LineComment("foo comment")},
+ },
+ {
+ path: "$.bar",
+ comments: []*yaml.Comment{yaml.LineComment("bar comment")},
+ },
+ {
+ path: "$.bar.bbb",
+ comments: []*yaml.Comment{yaml.LineComment("bbb comment")},
+ },
+ {
+ path: "$.baz.x",
+ comments: []*yaml.Comment{yaml.LineComment("x comment")},
+ },
+ }
+ for _, exp := range expected {
+ comments := cm[exp.path]
+ if comments == nil {
+ t.Fatalf("failed to get path %s", exp.path)
+ }
+ if diff := cmp.Diff(exp.comments, comments); diff != "" {
+ t.Errorf("(-got, +want)\n%s", diff)
+ }
+ }
+ })
+ t.Run("line comment2", func(t *testing.T) {
+ yml := `
+foo:
+ bar: baz # comment`
+ var (
+ v interface{}
+ cm = yaml.CommentMap{}
+ )
+ if err := yaml.UnmarshalWithOptions([]byte(yml), &v, yaml.CommentToMap(cm)); err != nil {
+ t.Fatal(err)
+ }
+ expected := []struct {
+ path string
+ comments []*yaml.Comment
+ }{
+ {
+ path: "$.foo.bar",
+ comments: []*yaml.Comment{yaml.LineComment(" comment")},
+ },
+ }
+ for _, exp := range expected {
+ comments := cm[exp.path]
+ if comments == nil {
+ t.Fatalf("failed to get path %s", exp.path)
+ }
+ if diff := cmp.Diff(exp.comments, comments); diff != "" {
+ t.Errorf("(-got, +want)\n%s", diff)
+ }
+ }
+ })
+
+ t.Run("single head comment", func(t *testing.T) {
+ yml := `
+#foo comment
+foo: aaa
+#bar comment
+bar:
+ #bbb comment
+ bbb: ccc
+baz:
+ #x comment
+ x: 10
+`
+ var (
+ v interface{}
+ cm = yaml.CommentMap{}
+ )
+ if err := yaml.UnmarshalWithOptions([]byte(yml), &v, yaml.CommentToMap(cm)); err != nil {
+ t.Fatal(err)
+ }
+ expected := []struct {
+ path string
+ comments []*yaml.Comment
+ }{
+ {
+ path: "$.foo",
+ comments: []*yaml.Comment{yaml.HeadComment("foo comment")},
+ },
+ {
+ path: "$.bar",
+ comments: []*yaml.Comment{yaml.HeadComment("bar comment")},
+ },
+ {
+ path: "$.bar.bbb",
+ comments: []*yaml.Comment{yaml.HeadComment("bbb comment")},
+ },
+ {
+ path: "$.baz.x",
+ comments: []*yaml.Comment{yaml.HeadComment("x comment")},
+ },
+ }
+ for _, exp := range expected {
+ comments := cm[exp.path]
+ if comments == nil {
+ t.Fatalf("failed to get path %s", exp.path)
+ }
+ if diff := cmp.Diff(exp.comments, comments); diff != "" {
+ t.Errorf("(-got, +want)\n%s", diff)
+ }
+ }
+ })
+ t.Run("multiple head comments", func(t *testing.T) {
+ yml := `
+#foo comment
+#foo comment2
+foo: aaa
+#bar comment
+#bar comment2
+bar:
+ #bbb comment
+ #bbb comment2
+ bbb: ccc
+baz:
+ #x comment
+ #x comment2
+ x: 10
+`
+ var (
+ v interface{}
+ cm = yaml.CommentMap{}
+ )
+ if err := yaml.UnmarshalWithOptions([]byte(yml), &v, yaml.CommentToMap(cm)); err != nil {
+ t.Fatal(err)
+ }
+ expected := []struct {
+ path string
+ comments []*yaml.Comment
+ }{
+ {
+ path: "$.foo",
+ comments: []*yaml.Comment{
+ yaml.HeadComment(
+ "foo comment",
+ "foo comment2",
+ ),
+ },
+ },
+ {
+ path: "$.bar",
+ comments: []*yaml.Comment{
+ yaml.HeadComment(
+ "bar comment",
+ "bar comment2",
+ ),
+ },
+ },
+ {
+ path: "$.bar.bbb",
+ comments: []*yaml.Comment{
+ yaml.HeadComment(
+ "bbb comment",
+ "bbb comment2",
+ ),
+ },
+ },
+ {
+ path: "$.baz.x",
+ comments: []*yaml.Comment{
+ yaml.HeadComment(
+ "x comment",
+ "x comment2",
+ ),
+ },
+ },
+ }
+ for _, exp := range expected {
+ comments := cm[exp.path]
+ if comments == nil {
+ t.Fatalf("failed to get path %s", exp.path)
+ }
+ if diff := cmp.Diff(exp.comments, comments); diff != "" {
+ t.Errorf("(-got, +want)\n%s", diff)
+ }
+ }
+ })
+ t.Run("foot comment", func(t *testing.T) {
+ yml := `
+bar:
+ bbb: ccc
+ #ccc: ddd
+baz:
+ - 1
+ - 2
+ #- 3
+ # foot comment
+#foot comment2
+`
+ var (
+ v interface{}
+ cm = yaml.CommentMap{}
+ )
+ if err := yaml.UnmarshalWithOptions([]byte(yml), &v, yaml.CommentToMap(cm)); err != nil {
+ t.Fatal(err)
+ }
+ expected := []struct {
+ path string
+ comments []*yaml.Comment
+ }{
+ {
+ path: "$.bar.bbb",
+ comments: []*yaml.Comment{yaml.FootComment("ccc: ddd")},
+ },
+ {
+ path: "$.baz[1]",
+ comments: []*yaml.Comment{yaml.FootComment("- 3")},
+ },
+ {
+ path: "$.baz",
+ comments: []*yaml.Comment{yaml.FootComment(" foot comment", "foot comment2")},
+ },
+ }
+ for _, exp := range expected {
+ comments := cm[exp.path]
+ if comments == nil {
+ t.Fatalf("failed to get path %s", exp.path)
+ }
+ if diff := cmp.Diff(exp.comments, comments); diff != "" {
+ t.Errorf("(-got, +want)\n%s", diff)
+ }
+ }
+ })
+ t.Run("combination", func(t *testing.T) {
+ yml := `
+# foo head comment
+# foo head comment2
+foo: # foo line comment
+ # a head comment
+ a: # a line comment
+ # b head comment
+ b: # b line comment
+ c: d # c line comment
+o: # o line comment
+ # p head comment
+ # p head comment2
+ p: # p line comment
+ # q head comment
+ # q head comment2
+ q: # q line comment
+ r: s # r line comment
+t:
+ u: true # u line comment
+# bar head comment
+bar: # bar line comment
+ # bbb head comment
+ bbb: ccc # bbb line comment
+ # bbb foot comment
+# baz head comment
+# baz head comment2
+baz: # baz line comment
+ # sequence head comment
+ - 1 # sequence line comment
+ # sequence head comment2
+ - 2 # sequence line comment2
+ # sequence foot comment
+hoge:
+ moga: true # moga line comment
+ # moga foot comment
+# hoge foot comment
+`
+ var (
+ v interface{}
+ cm = yaml.CommentMap{}
+ )
+ if err := yaml.UnmarshalWithOptions([]byte(yml), &v, yaml.CommentToMap(cm)); err != nil {
+ t.Fatal(err)
+ }
+ expected := []struct {
+ path string
+ comments []*yaml.Comment
+ }{
+ {
+ path: "$.foo",
+ comments: []*yaml.Comment{
+ yaml.HeadComment(" foo head comment", " foo head comment2"),
+ yaml.LineComment(" foo line comment"),
+ },
+ },
+ {
+ path: "$.foo.a",
+ comments: []*yaml.Comment{
+ yaml.HeadComment(" a head comment"),
+ yaml.LineComment(" a line comment"),
+ },
+ },
+ {
+ path: "$.foo.a.b",
+ comments: []*yaml.Comment{
+ yaml.HeadComment(" b head comment"),
+ yaml.LineComment(" b line comment"),
+ },
+ },
+ {
+ path: "$.foo.a.b.c",
+ comments: []*yaml.Comment{
+ yaml.LineComment(" c line comment"),
+ },
+ },
+ {
+ path: "$.o",
+ comments: []*yaml.Comment{
+ yaml.LineComment(" o line comment"),
+ },
+ },
+ {
+ path: "$.o.p",
+ comments: []*yaml.Comment{
+ yaml.HeadComment(" p head comment", " p head comment2"),
+ yaml.LineComment(" p line comment"),
+ },
+ },
+ {
+ path: "$.o.p.q",
+ comments: []*yaml.Comment{
+ yaml.HeadComment(" q head comment", " q head comment2"),
+ yaml.LineComment(" q line comment"),
+ },
+ },
+ {
+ path: "$.o.p.q.r",
+ comments: []*yaml.Comment{
+ yaml.LineComment(" r line comment"),
+ },
+ },
+ {
+ path: "$.t.u",
+ comments: []*yaml.Comment{
+ yaml.LineComment(" u line comment"),
+ },
+ },
+ {
+ path: "$.bar",
+ comments: []*yaml.Comment{
+ yaml.HeadComment(" bar head comment"),
+ yaml.LineComment(" bar line comment"),
+ },
+ },
+ {
+ path: "$.bar.bbb",
+ comments: []*yaml.Comment{
+ yaml.HeadComment(" bbb head comment"),
+ yaml.LineComment(" bbb line comment"),
+ yaml.FootComment(" bbb foot comment"),
+ },
+ },
+ {
+ path: "$.baz[0]",
+ comments: []*yaml.Comment{
+ yaml.HeadComment(" sequence head comment"),
+ yaml.LineComment(" sequence line comment"),
+ },
+ },
+ {
+ path: "$.baz[1]",
+ comments: []*yaml.Comment{
+ yaml.HeadComment(" sequence head comment2"),
+ yaml.LineComment(" sequence line comment2"),
+ yaml.FootComment(" sequence foot comment"),
+ },
+ },
+ {
+ path: "$.baz",
+ comments: []*yaml.Comment{
+ yaml.HeadComment(" baz head comment", " baz head comment2"),
+ yaml.LineComment(" baz line comment"),
+ },
+ },
+ {
+ path: "$.hoge",
+ comments: []*yaml.Comment{
+ yaml.FootComment(" hoge foot comment"),
+ },
+ },
+ {
+ path: "$.hoge.moga",
+ comments: []*yaml.Comment{
+ yaml.LineComment(" moga line comment"),
+ yaml.FootComment(" moga foot comment"),
+ },
+ },
+ }
+ for _, exp := range expected {
+ comments := cm[exp.path]
+ if comments == nil {
+ t.Fatalf("failed to get path %s", exp.path)
+ }
+ if diff := cmp.Diff(exp.comments, comments); diff != "" {
+ t.Errorf("%s: (-got, +want)\n%s", exp.path, diff)
+ }
+ }
+ })
+}
+
+func TestCommentMapRoundTrip(t *testing.T) {
+ // test that an unmarshal and marshal round trip retains comments.
+ // if expect is empty, the test will use the input as the expected result.
+ tests := []struct {
+ name string
+ source string
+ expect string
+ encodeOptions []yaml.EncodeOption
+ }{
+ {
+ name: "simple map",
+ source: `
+# head
+a: 1 # line
+# foot
+`,
+ },
+ {
+ name: "nesting",
+ source: `
+- 1 # one
+- foo:
+ a: b
+ # c comment
+ c: d # d comment
+ "e#f": g # g comment
+ h.i: j # j comment
+ "k.#l": m # m comment
+`,
+ },
+ {
+ name: "single quotes",
+ source: `'a#b': c # c comment`,
+ encodeOptions: []yaml.EncodeOption{yaml.UseSingleQuote(true)},
+ },
+ {
+ name: "single quotes added in encode",
+ source: `a#b: c # c comment`,
+ encodeOptions: []yaml.EncodeOption{yaml.UseSingleQuote(true)},
+ expect: `'a#b': c # c comment`,
+ },
+ {
+ name: "double quotes quotes transformed to single quotes",
+ source: `"a#b": c # c comment`,
+ encodeOptions: []yaml.EncodeOption{yaml.UseSingleQuote(true)},
+ expect: `'a#b': c # c comment`,
+ },
+ {
+ name: "single quotes quotes transformed to double quotes",
+ source: `'a#b': c # c comment`,
+ expect: `"a#b": c # c comment`,
+ },
+ {
+ name: "single quotes removed",
+ source: `'a': b # b comment`,
+ expect: `a: b # b comment`,
+ },
+ {
+ name: "double quotes removed",
+ source: `"a": b # b comment`,
+ expect: `a: b # b comment`,
+ },
+ }
+ for _, test := range tests {
+ t.Run(test.name, func(t *testing.T) {
+ var val any
+ cm := yaml.CommentMap{}
+ source := strings.TrimSpace(test.source)
+ if err := yaml.UnmarshalWithOptions([]byte(source), &val, yaml.CommentToMap(cm)); err != nil {
+ t.Fatalf("%+v", err)
+ }
+ marshaled, err := yaml.MarshalWithOptions(val, append(test.encodeOptions, yaml.WithComment(cm))...)
+ if err != nil {
+ t.Fatalf("%+v", err)
+ }
+ got := strings.TrimSpace(string(marshaled))
+ expect := strings.TrimSpace(test.expect)
+ if expect == "" {
+ expect = source
+ }
+ if got != expect {
+ t.Fatalf("expected:\n%s\ngot:\n%s\n", expect, got)
+ }
+ })
+
+ }
+}
+
+func TestRegisterCustomMarshaler(t *testing.T) {
+ type T struct {
+ Foo []byte `yaml:"foo"`
+ }
+ yaml.RegisterCustomMarshaler[T](func(_ T) ([]byte, error) {
+ return []byte(`"override"`), nil
+ })
+ b, err := yaml.Marshal(&T{Foo: []byte("bar")})
+ if err != nil {
+ t.Fatal(err)
+ }
+ if !bytes.Equal(b, []byte("\"override\"\n")) {
+ t.Fatalf("failed to register custom marshaler. got: %q", b)
+ }
+}
+
+func TestRegisterCustomUnmarshaler(t *testing.T) {
+ type T struct {
+ Foo []byte `yaml:"foo"`
+ }
+ yaml.RegisterCustomUnmarshaler[T](func(v *T, _ []byte) error {
+ v.Foo = []byte("override")
+ return nil
+ })
+ var v T
+ if err := yaml.Unmarshal([]byte(`"foo: "bar"`), &v); err != nil {
+ t.Fatal(err)
+ }
+ if !bytes.Equal(v.Foo, []byte("override")) {
+ t.Fatalf("failed to decode. got %q", v.Foo)
+ }
+}
diff --git a/vars.go b/vars.go
index 5de2115e..30d7b04e 100644
--- a/vars.go
+++ b/vars.go
@@ -11,7 +11,7 @@ import (
"github.com/bmatcuk/doublestar/v4"
"github.com/goccy/go-json"
- "github.com/goccy/go-yaml"
+ "github.com/k1LoW/runn/tmpmod/github.com/goccy/go-yaml"
)
const multiple = "*"