From 4c2a1fe28b3cfaada15f7ff1d2a4e39566a808a5 Mon Sep 17 00:00:00 2001 From: Chris Randles Date: Sat, 5 Oct 2024 16:51:10 -0400 Subject: [PATCH] feat: add lint error output --- go.mod | 5 + go.sum | 20 + pkg/cmd/lint.go | 127 + pkg/cmd/lint_test.go | 135 ++ pkg/cmd/validate.go | 25 +- vendor/github.com/fatih/color/LICENSE.md | 20 + vendor/github.com/fatih/color/README.md | 173 ++ vendor/github.com/fatih/color/color.go | 603 +++++ vendor/github.com/fatih/color/doc.go | 133 ++ vendor/github.com/goccy/go-yaml/.codecov.yml | 31 + vendor/github.com/goccy/go-yaml/CHANGELOG.md | 186 ++ vendor/github.com/goccy/go-yaml/LICENSE | 21 + vendor/github.com/goccy/go-yaml/Makefile | 19 + vendor/github.com/goccy/go-yaml/README.md | 370 +++ vendor/github.com/goccy/go-yaml/ast/ast.go | 2115 +++++++++++++++++ vendor/github.com/goccy/go-yaml/decode.go | 1796 ++++++++++++++ vendor/github.com/goccy/go-yaml/encode.go | 875 +++++++ vendor/github.com/goccy/go-yaml/error.go | 62 + .../goccy/go-yaml/internal/errors/error.go | 260 ++ .../github.com/goccy/go-yaml/lexer/lexer.go | 23 + vendor/github.com/goccy/go-yaml/option.go | 278 +++ .../goccy/go-yaml/parser/context.go | 192 ++ .../github.com/goccy/go-yaml/parser/parser.go | 743 ++++++ vendor/github.com/goccy/go-yaml/path.go | 814 +++++++ .../goccy/go-yaml/printer/printer.go | 352 +++ .../goccy/go-yaml/scanner/context.go | 236 ++ .../goccy/go-yaml/scanner/scanner.go | 903 +++++++ .../github.com/goccy/go-yaml/stdlib_quote.go | 103 + vendor/github.com/goccy/go-yaml/struct.go | 130 + .../github.com/goccy/go-yaml/token/token.go | 1070 +++++++++ vendor/github.com/goccy/go-yaml/validate.go | 13 + vendor/github.com/goccy/go-yaml/yaml.go | 288 +++ .../github.com/mattn/go-colorable/.travis.yml | 15 + vendor/github.com/mattn/go-colorable/LICENSE | 21 + .../github.com/mattn/go-colorable/README.md | 48 + .../mattn/go-colorable/colorable_appengine.go | 37 + .../mattn/go-colorable/colorable_others.go | 38 + .../mattn/go-colorable/colorable_windows.go | 1043 ++++++++ .../github.com/mattn/go-colorable/go.test.sh | 12 + .../mattn/go-colorable/noncolorable.go | 55 + vendor/github.com/mattn/go-isatty/.travis.yml | 14 + vendor/github.com/mattn/go-isatty/LICENSE | 9 + vendor/github.com/mattn/go-isatty/README.md | 50 + vendor/github.com/mattn/go-isatty/doc.go | 2 + vendor/github.com/mattn/go-isatty/go.test.sh | 12 + .../github.com/mattn/go-isatty/isatty_bsd.go | 18 + .../mattn/go-isatty/isatty_others.go | 15 + .../mattn/go-isatty/isatty_plan9.go | 22 + .../mattn/go-isatty/isatty_solaris.go | 22 + .../mattn/go-isatty/isatty_tcgets.go | 18 + .../mattn/go-isatty/isatty_windows.go | 125 + .../github.com/mattn/go-isatty/renovate.json | 8 + vendor/golang.org/x/xerrors/LICENSE | 27 + vendor/golang.org/x/xerrors/PATENTS | 22 + vendor/golang.org/x/xerrors/README | 2 + vendor/golang.org/x/xerrors/adaptor.go | 193 ++ vendor/golang.org/x/xerrors/codereview.cfg | 1 + vendor/golang.org/x/xerrors/doc.go | 23 + vendor/golang.org/x/xerrors/errors.go | 33 + vendor/golang.org/x/xerrors/fmt.go | 190 ++ vendor/golang.org/x/xerrors/format.go | 34 + vendor/golang.org/x/xerrors/frame.go | 56 + .../golang.org/x/xerrors/internal/internal.go | 8 + vendor/golang.org/x/xerrors/wrap.go | 112 + vendor/modules.txt | 23 + 65 files changed, 14422 insertions(+), 7 deletions(-) create mode 100644 pkg/cmd/lint.go create mode 100644 pkg/cmd/lint_test.go create mode 100644 vendor/github.com/fatih/color/LICENSE.md create mode 100644 vendor/github.com/fatih/color/README.md create mode 100644 vendor/github.com/fatih/color/color.go create mode 100644 vendor/github.com/fatih/color/doc.go create mode 100644 vendor/github.com/goccy/go-yaml/.codecov.yml create mode 100644 vendor/github.com/goccy/go-yaml/CHANGELOG.md create mode 100644 vendor/github.com/goccy/go-yaml/LICENSE create mode 100644 vendor/github.com/goccy/go-yaml/Makefile create mode 100644 vendor/github.com/goccy/go-yaml/README.md create mode 100644 vendor/github.com/goccy/go-yaml/ast/ast.go create mode 100644 vendor/github.com/goccy/go-yaml/decode.go create mode 100644 vendor/github.com/goccy/go-yaml/encode.go create mode 100644 vendor/github.com/goccy/go-yaml/error.go create mode 100644 vendor/github.com/goccy/go-yaml/internal/errors/error.go create mode 100644 vendor/github.com/goccy/go-yaml/lexer/lexer.go create mode 100644 vendor/github.com/goccy/go-yaml/option.go create mode 100644 vendor/github.com/goccy/go-yaml/parser/context.go create mode 100644 vendor/github.com/goccy/go-yaml/parser/parser.go create mode 100644 vendor/github.com/goccy/go-yaml/path.go create mode 100644 vendor/github.com/goccy/go-yaml/printer/printer.go create mode 100644 vendor/github.com/goccy/go-yaml/scanner/context.go create mode 100644 vendor/github.com/goccy/go-yaml/scanner/scanner.go create mode 100644 vendor/github.com/goccy/go-yaml/stdlib_quote.go create mode 100644 vendor/github.com/goccy/go-yaml/struct.go create mode 100644 vendor/github.com/goccy/go-yaml/token/token.go create mode 100644 vendor/github.com/goccy/go-yaml/validate.go create mode 100644 vendor/github.com/goccy/go-yaml/yaml.go create mode 100644 vendor/github.com/mattn/go-colorable/.travis.yml create mode 100644 vendor/github.com/mattn/go-colorable/LICENSE create mode 100644 vendor/github.com/mattn/go-colorable/README.md create mode 100644 vendor/github.com/mattn/go-colorable/colorable_appengine.go create mode 100644 vendor/github.com/mattn/go-colorable/colorable_others.go create mode 100644 vendor/github.com/mattn/go-colorable/colorable_windows.go create mode 100644 vendor/github.com/mattn/go-colorable/go.test.sh create mode 100644 vendor/github.com/mattn/go-colorable/noncolorable.go create mode 100644 vendor/github.com/mattn/go-isatty/.travis.yml create mode 100644 vendor/github.com/mattn/go-isatty/LICENSE create mode 100644 vendor/github.com/mattn/go-isatty/README.md create mode 100644 vendor/github.com/mattn/go-isatty/doc.go create mode 100644 vendor/github.com/mattn/go-isatty/go.test.sh create mode 100644 vendor/github.com/mattn/go-isatty/isatty_bsd.go create mode 100644 vendor/github.com/mattn/go-isatty/isatty_others.go create mode 100644 vendor/github.com/mattn/go-isatty/isatty_plan9.go create mode 100644 vendor/github.com/mattn/go-isatty/isatty_solaris.go create mode 100644 vendor/github.com/mattn/go-isatty/isatty_tcgets.go create mode 100644 vendor/github.com/mattn/go-isatty/isatty_windows.go create mode 100644 vendor/github.com/mattn/go-isatty/renovate.json create mode 100644 vendor/golang.org/x/xerrors/LICENSE create mode 100644 vendor/golang.org/x/xerrors/PATENTS create mode 100644 vendor/golang.org/x/xerrors/README create mode 100644 vendor/golang.org/x/xerrors/adaptor.go create mode 100644 vendor/golang.org/x/xerrors/codereview.cfg create mode 100644 vendor/golang.org/x/xerrors/doc.go create mode 100644 vendor/golang.org/x/xerrors/errors.go create mode 100644 vendor/golang.org/x/xerrors/fmt.go create mode 100644 vendor/golang.org/x/xerrors/format.go create mode 100644 vendor/golang.org/x/xerrors/frame.go create mode 100644 vendor/golang.org/x/xerrors/internal/internal.go create mode 100644 vendor/golang.org/x/xerrors/wrap.go diff --git a/go.mod b/go.mod index afebf871..16669390 100644 --- a/go.mod +++ b/go.mod @@ -6,6 +6,7 @@ toolchain go1.22.1 require ( github.com/evanphx/json-patch v5.6.0+incompatible + github.com/goccy/go-yaml v1.12.0 github.com/spf13/cobra v1.8.1 github.com/stretchr/testify v1.9.0 golang.org/x/exp v0.0.0-20230905200255-921286631fa9 @@ -29,6 +30,7 @@ require ( github.com/coreos/go-systemd/v22 v22.5.0 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect github.com/emicklei/go-restful/v3 v3.11.0 // indirect + github.com/fatih/color v1.10.0 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect github.com/fsnotify/fsnotify v1.7.0 // indirect github.com/fxamacker/cbor/v2 v2.7.0 // indirect @@ -52,6 +54,8 @@ require ( github.com/josharian/intern v1.0.0 // indirect github.com/json-iterator/go v1.1.12 // indirect github.com/mailru/easyjson v0.7.7 // indirect + github.com/mattn/go-colorable v0.1.8 // indirect + github.com/mattn/go-isatty v0.0.12 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect @@ -86,6 +90,7 @@ require ( golang.org/x/term v0.21.0 // indirect golang.org/x/text v0.16.0 // indirect golang.org/x/time v0.3.0 // indirect + golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect google.golang.org/genproto/googleapis/api v0.0.0-20240528184218-531527333157 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20240701130421-f6361c86f094 // indirect google.golang.org/grpc v1.65.0 // indirect diff --git a/go.sum b/go.sum index 9df4a207..9ceb1744 100644 --- a/go.sum +++ b/go.sum @@ -28,6 +28,8 @@ github.com/emicklei/go-restful/v3 v3.11.0 h1:rAQeMHw1c7zTmncogyy8VvRZwtkmkZ4FxER github.com/emicklei/go-restful/v3 v3.11.0/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc= github.com/evanphx/json-patch v5.6.0+incompatible h1:jBYDEEiFBPxA0v50tFdvOzQQTCvpL6mnFh5mB2/l16U= github.com/evanphx/json-patch v5.6.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= +github.com/fatih/color v1.10.0 h1:s36xzo75JdqLaaWoiEHk767eHiwo0598uUxyfiPkDsg= +github.com/fatih/color v1.10.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA= @@ -49,8 +51,16 @@ github.com/go-openapi/jsonreference v0.20.2/go.mod h1:Bl1zwGIM8/wsvqjsOQLJ/SH+En github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14= github.com/go-openapi/swag v0.22.4 h1:QLMzNJnMGPRNDCbySlcj1x01tzU8/9LTTL9hZZZogBU= github.com/go-openapi/swag v0.22.4/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14= +github.com/go-playground/locales v0.13.0 h1:HyWk6mgj5qFqCT5fjGBuRArbVDfE4hi8+e8ceBS/t7Q= +github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= +github.com/go-playground/universal-translator v0.17.0 h1:icxd5fm+REJzpZx7ZfpaD876Lmtgy7VtROAbHHXk8no= +github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA= +github.com/go-playground/validator/v10 v10.4.1 h1:pH2c5ADXtd66mxoE0Zm9SUhxE20r7aM3F26W0hOn+GE= +github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4= github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI= github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8= +github.com/goccy/go-yaml v1.12.0 h1:/1WHjnMsI1dlIBQutrvSMGZRQufVO3asrHfTwfACoPM= +github.com/goccy/go-yaml v1.12.0/go.mod h1:wKnAMd44+9JAAnGQpWVEgBzGt3YuTaQ4uXoHvE4m7WU= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= @@ -105,8 +115,14 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/leodido/go-urn v1.2.0 h1:hpXL4XnriNwQ/ABnpepYM/1vCLWNDfUNts8dX3xTG6Y= +github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/mattn/go-colorable v0.1.8 h1:c1ghPdyEDarC70ftn0y+A/Ee++9zz8ljHG1b13eJ0s8= +github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -225,6 +241,8 @@ golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M= golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.21.0 h1:rF+pYz3DAGSQAxAu1CbC7catZg4ebC4UIeIhKxBZvws= golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= @@ -246,6 +264,8 @@ golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 h1:H2TDz8ibqkAF6YGhCdN3jS9O0/s90v0rJh3X/OLHEUk= +golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= google.golang.org/genproto v0.0.0-20230822172742-b8732ec3820d h1:VBu5YqKPv6XiJ199exd8Br+Aetz+o08F+PLMnwJQHAY= google.golang.org/genproto v0.0.0-20230822172742-b8732ec3820d/go.mod h1:yZTlhN0tQnXo3h00fuXNCxJdLdIdnVFVBaRJ5LWBbw4= google.golang.org/genproto/googleapis/api v0.0.0-20240528184218-531527333157 h1:7whR9kGa5LUwFtpLm2ArCEejtnxlGeLbAyjFY8sGNFw= diff --git a/pkg/cmd/lint.go b/pkg/cmd/lint.go new file mode 100644 index 00000000..49dfbf05 --- /dev/null +++ b/pkg/cmd/lint.go @@ -0,0 +1,127 @@ +package cmd + +import ( + "cmp" + "fmt" + "os" + "slices" + "strings" + + "github.com/goccy/go-yaml" + "github.com/goccy/go-yaml/parser" + "golang.org/x/exp/maps" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" +) + +// convert the given map of filenames to validation errors into a lint output format: '%f:%l:%c: %m' +// %f - file, %l - line, %c - column, %m - message +func lintMarshal(details map[string][]metav1.Status) ([]byte, error) { + const ( + nilValue = "" + ) + files := maps.Keys(details) + slices.Sort(files) + + results := []string{} +DETAILS: + for _, file := range files { + status := details[file] + causes := make(map[string][]metav1.StatusCause) + for _, s := range status { + if s.Status == metav1.StatusSuccess { + continue DETAILS // only lint errors + } + for _, c := range s.Details.Causes { + if c.Field == nilValue { + continue // no field to lookup/annotate + } + key := string(c.Type) + causes[key] = append(causes[key], c) + } + } + if len(causes) == 0 { + continue // nothing to do, no causes deemed problematic + } + b, err := os.ReadFile(file) + if err != nil { + return nil, err + } + // group causes by position, so that we can group them together in the same output line + errors := make(map[Position][]metav1.StatusCause) + for _, items := range causes { + for _, c := range items { + path, err := yaml.PathString(fmt.Sprintf("$.%s", c.Field)) + if err != nil { + return nil, err + } + position, err := getPosition(path, b) + if err != nil { + return nil, err + } + errors[position] = append(errors[position], c) + } + } + keys := maps.Keys(errors) + slices.SortFunc(keys, func(i, j Position) int { + return cmp.Or( + cmp.Compare(i.Line, j.Line), + cmp.Compare(i.Column, j.Column), + ) + }) + for _, position := range keys { + causes := errors[position] + messages := make(map[string][]string) + for _, c := range causes { + messages[c.Field] = append(messages[c.Field], fmt.Sprintf("(reason: %q; %s)", c.Type, c.Message)) + } + fieldMessages := []string{} + for field, msgs := range messages { + fieldMessages = append(fieldMessages, fmt.Sprintf("field %q: %s", field, strings.Join(msgs, ", "))) + } + le := LintError{ + File: file, + Line: position.Line, + Column: position.Column, + Message: strings.Join(fieldMessages, ", "), + } + results = append(results, le.String()) + } + } + return []byte(strings.Join(results, "\n")), nil +} + +type Position struct { + Line int + Column int +} + +type Reason struct { + Type string + Message string +} + +type LintError struct { + File string + Line int + Column int + Message string +} + +func (e LintError) String() string { + return fmt.Sprintf("%s:%d:%d: %s", e.File, e.Line, e.Column, e.Message) +} + +func getPosition(p *yaml.Path, source []byte) (Position, error) { + file, err := parser.ParseBytes([]byte(source), 0) + if err != nil { + return Position{}, err + } + node, err := p.FilterFile(file) + if err != nil { + return Position{}, err + } + return Position{ + Line: node.GetToken().Position.Line, + Column: node.GetToken().Position.Column, + }, nil +} diff --git a/pkg/cmd/lint_test.go b/pkg/cmd/lint_test.go new file mode 100644 index 00000000..6be1550b --- /dev/null +++ b/pkg/cmd/lint_test.go @@ -0,0 +1,135 @@ +package cmd + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/require" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" +) + +func TestLintMarshal(t *testing.T) { + cases := []struct { + name string + input map[string][]metav1.Status + expected string + }{ + { + name: "empty", + input: map[string][]metav1.Status{}, + expected: ``, + }, + { + name: "success", + input: map[string][]metav1.Status{ + "file.yaml": { + {Status: metav1.StatusSuccess, Reason: "valid"}, + }, + }, + expected: ``, + }, + { + name: "single error, single cause", + input: map[string][]metav1.Status{ + "../../testcases/manifests/configmap.yaml": { + {Status: metav1.StatusFailure, Reason: "invalid", Details: &metav1.StatusDetails{ + Causes: []metav1.StatusCause{ + { + Type: "FailureType", + Field: "metadata.name", + Message: "name is required or invalid somehow", + }, + }, + }}, + }, + }, + expected: `../../testcases/manifests/configmap.yaml:8:9: field "metadata.name": (reason: "FailureType"; name is required or invalid somehow)`, + }, + { + name: "single error with ignored success", + input: map[string][]metav1.Status{ + "../../testcases/manifests/configmap.yaml": { + {Status: metav1.StatusSuccess, Reason: "valid"}, + }, + "../../testcases/manifests/apiservice.yaml": { + {Status: metav1.StatusFailure, Reason: "invalid", Details: &metav1.StatusDetails{ + Causes: []metav1.StatusCause{ + { + Type: "FailureType", + Field: "metadata.name", + Message: "name is required or invalid somehow but specific to apiservices", + }, + }, + }}, + }, + }, + expected: `../../testcases/manifests/apiservice.yaml:14:9: field "metadata.name": (reason: "FailureType"; name is required or invalid somehow but specific to apiservices)`, + }, + { + name: "multiple errors, multiple causes", + input: map[string][]metav1.Status{ + "../../testcases/manifests/configmap.yaml": { + {Status: metav1.StatusFailure, Reason: "invalid", Details: &metav1.StatusDetails{ + Causes: []metav1.StatusCause{ + { + Type: "FailureType", + Field: "metadata.name", + Message: "name is required or invalid somehow 1x1", + }, + { + Type: "FailureType", + Field: "metadata.finalizers", + Message: "something wrong with finalizers 1x2", + }, + }, + }}, + }, + "../../testcases/manifests/apiservice.yaml": { + {Status: metav1.StatusFailure, Reason: "invalid", Details: &metav1.StatusDetails{ + Causes: []metav1.StatusCause{ + { + Type: "FailureType", + Field: "metadata.name", + Message: "name is required or invalid somehow 2x1", + }, + { + Type: "FailureType", + Field: "metadata.name", + Message: "name is required or invalid somehow 2x2", + }, + }, + }}, + }, + }, + expected: strings.Join([]string{ + `../../testcases/manifests/apiservice.yaml:14:9: field "metadata.name": (reason: "FailureType"; name is required or invalid somehow 2x1), (reason: "FailureType"; name is required or invalid somehow 2x2)`, + `../../testcases/manifests/configmap.yaml:8:9: field "metadata.name": (reason: "FailureType"; name is required or invalid somehow 1x1)`, + `../../testcases/manifests/configmap.yaml:10:3: field "metadata.finalizers": (reason: "FailureType"; something wrong with finalizers 1x2)`, + }, "\n"), + }, + { + name: "single error of complex field", + input: map[string][]metav1.Status{ + "../../testcases/manifests/error_x_list_map_duplicate_key.yaml": { + {Status: metav1.StatusFailure, Reason: "invalid", Details: &metav1.StatusDetails{ + Causes: []metav1.StatusCause{ + { + Type: "FieldValueDuplicate", + Field: "spec.containers[0].ports[2]", + Message: `Duplicate value: map[string]interface{}{"key":"value"}`, + }, + }, + }}, + }, + }, + expected: `../../testcases/manifests/error_x_list_map_duplicate_key.yaml:51:19: field "spec.containers[0].ports[2]": (reason: "FieldValueDuplicate"; Duplicate value: map[string]interface{}{"key":"value"})`, + }, + } + for _, tc := range cases { + t.Run(tc.name, func(t *testing.T) { + actual, err := lintMarshal(tc.input) + require.NoError(t, err) + require.Equal(t, tc.expected, string(actual)) + }) + } +} diff --git a/pkg/cmd/validate.go b/pkg/cmd/validate.go index b175acc9..338a4830 100644 --- a/pkg/cmd/validate.go +++ b/pkg/cmd/validate.go @@ -33,6 +33,7 @@ type OutputFormat string const ( OutputHuman OutputFormat = "human" OutputJSON OutputFormat = "json" + OutputLint OutputFormat = "lint" ) // String is used both by fmt.Print and by Cobra in help text @@ -42,12 +43,12 @@ func (e *OutputFormat) String() string { // Set must have pointer receiver so it doesn't change the value of a copy func (e *OutputFormat) Set(v string) error { - switch v { - case "human", "json": + switch OutputFormat(v) { + case OutputHuman, OutputJSON, OutputLint: *e = OutputFormat(v) return nil default: - return fmt.Errorf(`must be one of "human", or "json"`) + return fmt.Errorf(`must be one of "human", "json", or "lint"`) } } @@ -82,7 +83,7 @@ func NewRootCommand() *cobra.Command { res.Flags().StringVarP(&invoked.localSchemasDir, "local-schemas", "", "", "--local-schemas=./path/to/schemas/dir. Path to a directory with format: /apis//.json for each group-version's schema.") res.Flags().StringSliceVarP(&invoked.localCRDsDir, "local-crds", "", []string{}, "--local-crds=./path/to/crds/dir. Paths to directories containing .yaml or .yml files for CRD definitions.") res.Flags().StringVarP(&invoked.schemaPatchesDir, "schema-patches", "", "", "Path to a directory with format: /apis//.json for each group-version's schema you wish to jsonpatch to the groupversion's final schema. Patches only apply if the schema exists") - res.Flags().VarP(&invoked.outputFormat, "output", "o", "Output format. Choice of: \"human\" or \"json\"") + res.Flags().VarP(&invoked.outputFormat, "output", "o", "Output format. Choice of: \"human\", \"json\", or \"lint\"") clientcmd.BindOverrideFlags(&invoked.kubeConfigOverrides, res.Flags(), clientcmd.RecommendedConfigOverrideFlags("kube-")) return res } @@ -245,9 +246,19 @@ func (c *commandFlags) Run(cmd *cobra.Command, args []string) error { hasError = hasError || err != nil } } - data, e := json.MarshalIndent(res, "", " ") - if e != nil { - return InternalError{fmt.Errorf("failed to render results into JSON: %w", e)} + var data []byte + if c.outputFormat == OutputLint { + var err error + data, err = lintMarshal(res) + if err != nil { + return InternalError{fmt.Errorf("failed to render results into lint format: %w", err)} + } + } else if c.outputFormat == OutputJSON { + var err error + data, err = json.MarshalIndent(res, "", " ") + if err != nil { + return InternalError{fmt.Errorf("failed to render results into JSON: %w", err)} + } } fmt.Fprintln(cmd.OutOrStdout(), string(data)) } diff --git a/vendor/github.com/fatih/color/LICENSE.md b/vendor/github.com/fatih/color/LICENSE.md new file mode 100644 index 00000000..25fdaf63 --- /dev/null +++ b/vendor/github.com/fatih/color/LICENSE.md @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2013 Fatih Arslan + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/fatih/color/README.md b/vendor/github.com/fatih/color/README.md new file mode 100644 index 00000000..d62e4024 --- /dev/null +++ b/vendor/github.com/fatih/color/README.md @@ -0,0 +1,173 @@ +# color [![](https://github.com/fatih/color/workflows/build/badge.svg)](https://github.com/fatih/color/actions) [![PkgGoDev](https://pkg.go.dev/badge/github.com/fatih/color)](https://pkg.go.dev/github.com/fatih/color) + +Color lets you use colorized outputs in terms of [ANSI Escape +Codes](http://en.wikipedia.org/wiki/ANSI_escape_code#Colors) in Go (Golang). It +has support for Windows too! The API can be used in several ways, pick one that +suits you. + +![Color](https://user-images.githubusercontent.com/438920/96832689-03b3e000-13f4-11eb-9803-46f4c4de3406.jpg) + + +## Install + +```bash +go get github.com/fatih/color +``` + +## Examples + +### Standard colors + +```go +// Print with default helper functions +color.Cyan("Prints text in cyan.") + +// A newline will be appended automatically +color.Blue("Prints %s in blue.", "text") + +// These are using the default foreground colors +color.Red("We have red") +color.Magenta("And many others ..") + +``` + +### Mix and reuse colors + +```go +// Create a new color object +c := color.New(color.FgCyan).Add(color.Underline) +c.Println("Prints cyan text with an underline.") + +// Or just add them to New() +d := color.New(color.FgCyan, color.Bold) +d.Printf("This prints bold cyan %s\n", "too!.") + +// Mix up foreground and background colors, create new mixes! +red := color.New(color.FgRed) + +boldRed := red.Add(color.Bold) +boldRed.Println("This will print text in bold red.") + +whiteBackground := red.Add(color.BgWhite) +whiteBackground.Println("Red text with white background.") +``` + +### Use your own output (io.Writer) + +```go +// Use your own io.Writer output +color.New(color.FgBlue).Fprintln(myWriter, "blue color!") + +blue := color.New(color.FgBlue) +blue.Fprint(writer, "This will print text in blue.") +``` + +### Custom print functions (PrintFunc) + +```go +// Create a custom print function for convenience +red := color.New(color.FgRed).PrintfFunc() +red("Warning") +red("Error: %s", err) + +// Mix up multiple attributes +notice := color.New(color.Bold, color.FgGreen).PrintlnFunc() +notice("Don't forget this...") +``` + +### Custom fprint functions (FprintFunc) + +```go +blue := color.New(FgBlue).FprintfFunc() +blue(myWriter, "important notice: %s", stars) + +// Mix up with multiple attributes +success := color.New(color.Bold, color.FgGreen).FprintlnFunc() +success(myWriter, "Don't forget this...") +``` + +### Insert into noncolor strings (SprintFunc) + +```go +// Create SprintXxx functions to mix strings with other non-colorized strings: +yellow := color.New(color.FgYellow).SprintFunc() +red := color.New(color.FgRed).SprintFunc() +fmt.Printf("This is a %s and this is %s.\n", yellow("warning"), red("error")) + +info := color.New(color.FgWhite, color.BgGreen).SprintFunc() +fmt.Printf("This %s rocks!\n", info("package")) + +// Use helper functions +fmt.Println("This", color.RedString("warning"), "should be not neglected.") +fmt.Printf("%v %v\n", color.GreenString("Info:"), "an important message.") + +// Windows supported too! Just don't forget to change the output to color.Output +fmt.Fprintf(color.Output, "Windows support: %s", color.GreenString("PASS")) +``` + +### Plug into existing code + +```go +// Use handy standard colors +color.Set(color.FgYellow) + +fmt.Println("Existing text will now be in yellow") +fmt.Printf("This one %s\n", "too") + +color.Unset() // Don't forget to unset + +// You can mix up parameters +color.Set(color.FgMagenta, color.Bold) +defer color.Unset() // Use it in your function + +fmt.Println("All text will now be bold magenta.") +``` + +### Disable/Enable color + +There might be a case where you want to explicitly disable/enable color output. the +`go-isatty` package will automatically disable color output for non-tty output streams +(for example if the output were piped directly to `less`) + +`Color` has support to disable/enable colors both globally and for single color +definitions. For example suppose you have a CLI app and a `--no-color` bool flag. You +can easily disable the color output with: + +```go + +var flagNoColor = flag.Bool("no-color", false, "Disable color output") + +if *flagNoColor { + color.NoColor = true // disables colorized output +} +``` + +It also has support for single color definitions (local). You can +disable/enable color output on the fly: + +```go +c := color.New(color.FgCyan) +c.Println("Prints cyan text") + +c.DisableColor() +c.Println("This is printed without any color") + +c.EnableColor() +c.Println("This prints again cyan...") +``` + +## Todo + +* Save/Return previous values +* Evaluate fmt.Formatter interface + + +## Credits + + * [Fatih Arslan](https://github.com/fatih) + * Windows support via @mattn: [colorable](https://github.com/mattn/go-colorable) + +## License + +The MIT License (MIT) - see [`LICENSE.md`](https://github.com/fatih/color/blob/master/LICENSE.md) for more details + diff --git a/vendor/github.com/fatih/color/color.go b/vendor/github.com/fatih/color/color.go new file mode 100644 index 00000000..91c8e9f0 --- /dev/null +++ b/vendor/github.com/fatih/color/color.go @@ -0,0 +1,603 @@ +package color + +import ( + "fmt" + "io" + "os" + "strconv" + "strings" + "sync" + + "github.com/mattn/go-colorable" + "github.com/mattn/go-isatty" +) + +var ( + // NoColor defines if the output is colorized or not. It's dynamically set to + // false or true based on the stdout's file descriptor referring to a terminal + // or not. This is a global option and affects all colors. For more control + // over each color block use the methods DisableColor() individually. + NoColor = os.Getenv("TERM") == "dumb" || + (!isatty.IsTerminal(os.Stdout.Fd()) && !isatty.IsCygwinTerminal(os.Stdout.Fd())) + + // Output defines the standard output of the print functions. By default + // os.Stdout is used. + Output = colorable.NewColorableStdout() + + // Error defines a color supporting writer for os.Stderr. + Error = colorable.NewColorableStderr() + + // colorsCache is used to reduce the count of created Color objects and + // allows to reuse already created objects with required Attribute. + colorsCache = make(map[Attribute]*Color) + colorsCacheMu sync.Mutex // protects colorsCache +) + +// Color defines a custom color object which is defined by SGR parameters. +type Color struct { + params []Attribute + noColor *bool +} + +// Attribute defines a single SGR Code +type Attribute int + +const escape = "\x1b" + +// Base attributes +const ( + Reset Attribute = iota + Bold + Faint + Italic + Underline + BlinkSlow + BlinkRapid + ReverseVideo + Concealed + CrossedOut +) + +// Foreground text colors +const ( + FgBlack Attribute = iota + 30 + FgRed + FgGreen + FgYellow + FgBlue + FgMagenta + FgCyan + FgWhite +) + +// Foreground Hi-Intensity text colors +const ( + FgHiBlack Attribute = iota + 90 + FgHiRed + FgHiGreen + FgHiYellow + FgHiBlue + FgHiMagenta + FgHiCyan + FgHiWhite +) + +// Background text colors +const ( + BgBlack Attribute = iota + 40 + BgRed + BgGreen + BgYellow + BgBlue + BgMagenta + BgCyan + BgWhite +) + +// Background Hi-Intensity text colors +const ( + BgHiBlack Attribute = iota + 100 + BgHiRed + BgHiGreen + BgHiYellow + BgHiBlue + BgHiMagenta + BgHiCyan + BgHiWhite +) + +// New returns a newly created color object. +func New(value ...Attribute) *Color { + c := &Color{params: make([]Attribute, 0)} + c.Add(value...) + return c +} + +// Set sets the given parameters immediately. It will change the color of +// output with the given SGR parameters until color.Unset() is called. +func Set(p ...Attribute) *Color { + c := New(p...) + c.Set() + return c +} + +// Unset resets all escape attributes and clears the output. Usually should +// be called after Set(). +func Unset() { + if NoColor { + return + } + + fmt.Fprintf(Output, "%s[%dm", escape, Reset) +} + +// Set sets the SGR sequence. +func (c *Color) Set() *Color { + if c.isNoColorSet() { + return c + } + + fmt.Fprintf(Output, c.format()) + return c +} + +func (c *Color) unset() { + if c.isNoColorSet() { + return + } + + Unset() +} + +func (c *Color) setWriter(w io.Writer) *Color { + if c.isNoColorSet() { + return c + } + + fmt.Fprintf(w, c.format()) + return c +} + +func (c *Color) unsetWriter(w io.Writer) { + if c.isNoColorSet() { + return + } + + if NoColor { + return + } + + fmt.Fprintf(w, "%s[%dm", escape, Reset) +} + +// Add is used to chain SGR parameters. Use as many as parameters to combine +// and create custom color objects. Example: Add(color.FgRed, color.Underline). +func (c *Color) Add(value ...Attribute) *Color { + c.params = append(c.params, value...) + return c +} + +func (c *Color) prepend(value Attribute) { + c.params = append(c.params, 0) + copy(c.params[1:], c.params[0:]) + c.params[0] = value +} + +// Fprint formats using the default formats for its operands and writes to w. +// Spaces are added between operands when neither is a string. +// It returns the number of bytes written and any write error encountered. +// On Windows, users should wrap w with colorable.NewColorable() if w is of +// type *os.File. +func (c *Color) Fprint(w io.Writer, a ...interface{}) (n int, err error) { + c.setWriter(w) + defer c.unsetWriter(w) + + return fmt.Fprint(w, a...) +} + +// Print formats using the default formats for its operands and writes to +// standard output. Spaces are added between operands when neither is a +// string. It returns the number of bytes written and any write error +// encountered. This is the standard fmt.Print() method wrapped with the given +// color. +func (c *Color) Print(a ...interface{}) (n int, err error) { + c.Set() + defer c.unset() + + return fmt.Fprint(Output, a...) +} + +// Fprintf formats according to a format specifier and writes to w. +// It returns the number of bytes written and any write error encountered. +// On Windows, users should wrap w with colorable.NewColorable() if w is of +// type *os.File. +func (c *Color) Fprintf(w io.Writer, format string, a ...interface{}) (n int, err error) { + c.setWriter(w) + defer c.unsetWriter(w) + + return fmt.Fprintf(w, format, a...) +} + +// Printf formats according to a format specifier and writes to standard output. +// It returns the number of bytes written and any write error encountered. +// This is the standard fmt.Printf() method wrapped with the given color. +func (c *Color) Printf(format string, a ...interface{}) (n int, err error) { + c.Set() + defer c.unset() + + return fmt.Fprintf(Output, format, a...) +} + +// Fprintln formats using the default formats for its operands and writes to w. +// Spaces are always added between operands and a newline is appended. +// On Windows, users should wrap w with colorable.NewColorable() if w is of +// type *os.File. +func (c *Color) Fprintln(w io.Writer, a ...interface{}) (n int, err error) { + c.setWriter(w) + defer c.unsetWriter(w) + + return fmt.Fprintln(w, a...) +} + +// Println formats using the default formats for its operands and writes to +// standard output. Spaces are always added between operands and a newline is +// appended. It returns the number of bytes written and any write error +// encountered. This is the standard fmt.Print() method wrapped with the given +// color. +func (c *Color) Println(a ...interface{}) (n int, err error) { + c.Set() + defer c.unset() + + return fmt.Fprintln(Output, a...) +} + +// Sprint is just like Print, but returns a string instead of printing it. +func (c *Color) Sprint(a ...interface{}) string { + return c.wrap(fmt.Sprint(a...)) +} + +// Sprintln is just like Println, but returns a string instead of printing it. +func (c *Color) Sprintln(a ...interface{}) string { + return c.wrap(fmt.Sprintln(a...)) +} + +// Sprintf is just like Printf, but returns a string instead of printing it. +func (c *Color) Sprintf(format string, a ...interface{}) string { + return c.wrap(fmt.Sprintf(format, a...)) +} + +// FprintFunc returns a new function that prints the passed arguments as +// colorized with color.Fprint(). +func (c *Color) FprintFunc() func(w io.Writer, a ...interface{}) { + return func(w io.Writer, a ...interface{}) { + c.Fprint(w, a...) + } +} + +// PrintFunc returns a new function that prints the passed arguments as +// colorized with color.Print(). +func (c *Color) PrintFunc() func(a ...interface{}) { + return func(a ...interface{}) { + c.Print(a...) + } +} + +// FprintfFunc returns a new function that prints the passed arguments as +// colorized with color.Fprintf(). +func (c *Color) FprintfFunc() func(w io.Writer, format string, a ...interface{}) { + return func(w io.Writer, format string, a ...interface{}) { + c.Fprintf(w, format, a...) + } +} + +// PrintfFunc returns a new function that prints the passed arguments as +// colorized with color.Printf(). +func (c *Color) PrintfFunc() func(format string, a ...interface{}) { + return func(format string, a ...interface{}) { + c.Printf(format, a...) + } +} + +// FprintlnFunc returns a new function that prints the passed arguments as +// colorized with color.Fprintln(). +func (c *Color) FprintlnFunc() func(w io.Writer, a ...interface{}) { + return func(w io.Writer, a ...interface{}) { + c.Fprintln(w, a...) + } +} + +// PrintlnFunc returns a new function that prints the passed arguments as +// colorized with color.Println(). +func (c *Color) PrintlnFunc() func(a ...interface{}) { + return func(a ...interface{}) { + c.Println(a...) + } +} + +// SprintFunc returns a new function that returns colorized strings for the +// given arguments with fmt.Sprint(). Useful to put into or mix into other +// string. Windows users should use this in conjunction with color.Output, example: +// +// put := New(FgYellow).SprintFunc() +// fmt.Fprintf(color.Output, "This is a %s", put("warning")) +func (c *Color) SprintFunc() func(a ...interface{}) string { + return func(a ...interface{}) string { + return c.wrap(fmt.Sprint(a...)) + } +} + +// SprintfFunc returns a new function that returns colorized strings for the +// given arguments with fmt.Sprintf(). Useful to put into or mix into other +// string. Windows users should use this in conjunction with color.Output. +func (c *Color) SprintfFunc() func(format string, a ...interface{}) string { + return func(format string, a ...interface{}) string { + return c.wrap(fmt.Sprintf(format, a...)) + } +} + +// SprintlnFunc returns a new function that returns colorized strings for the +// given arguments with fmt.Sprintln(). Useful to put into or mix into other +// string. Windows users should use this in conjunction with color.Output. +func (c *Color) SprintlnFunc() func(a ...interface{}) string { + return func(a ...interface{}) string { + return c.wrap(fmt.Sprintln(a...)) + } +} + +// sequence returns a formatted SGR sequence to be plugged into a "\x1b[...m" +// an example output might be: "1;36" -> bold cyan +func (c *Color) sequence() string { + format := make([]string, len(c.params)) + for i, v := range c.params { + format[i] = strconv.Itoa(int(v)) + } + + return strings.Join(format, ";") +} + +// wrap wraps the s string with the colors attributes. The string is ready to +// be printed. +func (c *Color) wrap(s string) string { + if c.isNoColorSet() { + return s + } + + return c.format() + s + c.unformat() +} + +func (c *Color) format() string { + return fmt.Sprintf("%s[%sm", escape, c.sequence()) +} + +func (c *Color) unformat() string { + return fmt.Sprintf("%s[%dm", escape, Reset) +} + +// DisableColor disables the color output. Useful to not change any existing +// code and still being able to output. Can be used for flags like +// "--no-color". To enable back use EnableColor() method. +func (c *Color) DisableColor() { + c.noColor = boolPtr(true) +} + +// EnableColor enables the color output. Use it in conjunction with +// DisableColor(). Otherwise this method has no side effects. +func (c *Color) EnableColor() { + c.noColor = boolPtr(false) +} + +func (c *Color) isNoColorSet() bool { + // check first if we have user setted action + if c.noColor != nil { + return *c.noColor + } + + // if not return the global option, which is disabled by default + return NoColor +} + +// Equals returns a boolean value indicating whether two colors are equal. +func (c *Color) Equals(c2 *Color) bool { + if len(c.params) != len(c2.params) { + return false + } + + for _, attr := range c.params { + if !c2.attrExists(attr) { + return false + } + } + + return true +} + +func (c *Color) attrExists(a Attribute) bool { + for _, attr := range c.params { + if attr == a { + return true + } + } + + return false +} + +func boolPtr(v bool) *bool { + return &v +} + +func getCachedColor(p Attribute) *Color { + colorsCacheMu.Lock() + defer colorsCacheMu.Unlock() + + c, ok := colorsCache[p] + if !ok { + c = New(p) + colorsCache[p] = c + } + + return c +} + +func colorPrint(format string, p Attribute, a ...interface{}) { + c := getCachedColor(p) + + if !strings.HasSuffix(format, "\n") { + format += "\n" + } + + if len(a) == 0 { + c.Print(format) + } else { + c.Printf(format, a...) + } +} + +func colorString(format string, p Attribute, a ...interface{}) string { + c := getCachedColor(p) + + if len(a) == 0 { + return c.SprintFunc()(format) + } + + return c.SprintfFunc()(format, a...) +} + +// Black is a convenient helper function to print with black foreground. A +// newline is appended to format by default. +func Black(format string, a ...interface{}) { colorPrint(format, FgBlack, a...) } + +// Red is a convenient helper function to print with red foreground. A +// newline is appended to format by default. +func Red(format string, a ...interface{}) { colorPrint(format, FgRed, a...) } + +// Green is a convenient helper function to print with green foreground. A +// newline is appended to format by default. +func Green(format string, a ...interface{}) { colorPrint(format, FgGreen, a...) } + +// Yellow is a convenient helper function to print with yellow foreground. +// A newline is appended to format by default. +func Yellow(format string, a ...interface{}) { colorPrint(format, FgYellow, a...) } + +// Blue is a convenient helper function to print with blue foreground. A +// newline is appended to format by default. +func Blue(format string, a ...interface{}) { colorPrint(format, FgBlue, a...) } + +// Magenta is a convenient helper function to print with magenta foreground. +// A newline is appended to format by default. +func Magenta(format string, a ...interface{}) { colorPrint(format, FgMagenta, a...) } + +// Cyan is a convenient helper function to print with cyan foreground. A +// newline is appended to format by default. +func Cyan(format string, a ...interface{}) { colorPrint(format, FgCyan, a...) } + +// White is a convenient helper function to print with white foreground. A +// newline is appended to format by default. +func White(format string, a ...interface{}) { colorPrint(format, FgWhite, a...) } + +// BlackString is a convenient helper function to return a string with black +// foreground. +func BlackString(format string, a ...interface{}) string { return colorString(format, FgBlack, a...) } + +// RedString is a convenient helper function to return a string with red +// foreground. +func RedString(format string, a ...interface{}) string { return colorString(format, FgRed, a...) } + +// GreenString is a convenient helper function to return a string with green +// foreground. +func GreenString(format string, a ...interface{}) string { return colorString(format, FgGreen, a...) } + +// YellowString is a convenient helper function to return a string with yellow +// foreground. +func YellowString(format string, a ...interface{}) string { return colorString(format, FgYellow, a...) } + +// BlueString is a convenient helper function to return a string with blue +// foreground. +func BlueString(format string, a ...interface{}) string { return colorString(format, FgBlue, a...) } + +// MagentaString is a convenient helper function to return a string with magenta +// foreground. +func MagentaString(format string, a ...interface{}) string { + return colorString(format, FgMagenta, a...) +} + +// CyanString is a convenient helper function to return a string with cyan +// foreground. +func CyanString(format string, a ...interface{}) string { return colorString(format, FgCyan, a...) } + +// WhiteString is a convenient helper function to return a string with white +// foreground. +func WhiteString(format string, a ...interface{}) string { return colorString(format, FgWhite, a...) } + +// HiBlack is a convenient helper function to print with hi-intensity black foreground. A +// newline is appended to format by default. +func HiBlack(format string, a ...interface{}) { colorPrint(format, FgHiBlack, a...) } + +// HiRed is a convenient helper function to print with hi-intensity red foreground. A +// newline is appended to format by default. +func HiRed(format string, a ...interface{}) { colorPrint(format, FgHiRed, a...) } + +// HiGreen is a convenient helper function to print with hi-intensity green foreground. A +// newline is appended to format by default. +func HiGreen(format string, a ...interface{}) { colorPrint(format, FgHiGreen, a...) } + +// HiYellow is a convenient helper function to print with hi-intensity yellow foreground. +// A newline is appended to format by default. +func HiYellow(format string, a ...interface{}) { colorPrint(format, FgHiYellow, a...) } + +// HiBlue is a convenient helper function to print with hi-intensity blue foreground. A +// newline is appended to format by default. +func HiBlue(format string, a ...interface{}) { colorPrint(format, FgHiBlue, a...) } + +// HiMagenta is a convenient helper function to print with hi-intensity magenta foreground. +// A newline is appended to format by default. +func HiMagenta(format string, a ...interface{}) { colorPrint(format, FgHiMagenta, a...) } + +// HiCyan is a convenient helper function to print with hi-intensity cyan foreground. A +// newline is appended to format by default. +func HiCyan(format string, a ...interface{}) { colorPrint(format, FgHiCyan, a...) } + +// HiWhite is a convenient helper function to print with hi-intensity white foreground. A +// newline is appended to format by default. +func HiWhite(format string, a ...interface{}) { colorPrint(format, FgHiWhite, a...) } + +// HiBlackString is a convenient helper function to return a string with hi-intensity black +// foreground. +func HiBlackString(format string, a ...interface{}) string { + return colorString(format, FgHiBlack, a...) +} + +// HiRedString is a convenient helper function to return a string with hi-intensity red +// foreground. +func HiRedString(format string, a ...interface{}) string { return colorString(format, FgHiRed, a...) } + +// HiGreenString is a convenient helper function to return a string with hi-intensity green +// foreground. +func HiGreenString(format string, a ...interface{}) string { + return colorString(format, FgHiGreen, a...) +} + +// HiYellowString is a convenient helper function to return a string with hi-intensity yellow +// foreground. +func HiYellowString(format string, a ...interface{}) string { + return colorString(format, FgHiYellow, a...) +} + +// HiBlueString is a convenient helper function to return a string with hi-intensity blue +// foreground. +func HiBlueString(format string, a ...interface{}) string { return colorString(format, FgHiBlue, a...) } + +// HiMagentaString is a convenient helper function to return a string with hi-intensity magenta +// foreground. +func HiMagentaString(format string, a ...interface{}) string { + return colorString(format, FgHiMagenta, a...) +} + +// HiCyanString is a convenient helper function to return a string with hi-intensity cyan +// foreground. +func HiCyanString(format string, a ...interface{}) string { return colorString(format, FgHiCyan, a...) } + +// HiWhiteString is a convenient helper function to return a string with hi-intensity white +// foreground. +func HiWhiteString(format string, a ...interface{}) string { + return colorString(format, FgHiWhite, a...) +} diff --git a/vendor/github.com/fatih/color/doc.go b/vendor/github.com/fatih/color/doc.go new file mode 100644 index 00000000..cf1e9650 --- /dev/null +++ b/vendor/github.com/fatih/color/doc.go @@ -0,0 +1,133 @@ +/* +Package color is an ANSI color package to output colorized or SGR defined +output to the standard output. The API can be used in several way, pick one +that suits you. + +Use simple and default helper functions with predefined foreground colors: + + color.Cyan("Prints text in cyan.") + + // a newline will be appended automatically + color.Blue("Prints %s in blue.", "text") + + // More default foreground colors.. + color.Red("We have red") + color.Yellow("Yellow color too!") + color.Magenta("And many others ..") + + // Hi-intensity colors + color.HiGreen("Bright green color.") + color.HiBlack("Bright black means gray..") + color.HiWhite("Shiny white color!") + +However there are times where custom color mixes are required. Below are some +examples to create custom color objects and use the print functions of each +separate color object. + + // Create a new color object + c := color.New(color.FgCyan).Add(color.Underline) + c.Println("Prints cyan text with an underline.") + + // Or just add them to New() + d := color.New(color.FgCyan, color.Bold) + d.Printf("This prints bold cyan %s\n", "too!.") + + + // Mix up foreground and background colors, create new mixes! + red := color.New(color.FgRed) + + boldRed := red.Add(color.Bold) + boldRed.Println("This will print text in bold red.") + + whiteBackground := red.Add(color.BgWhite) + whiteBackground.Println("Red text with White background.") + + // Use your own io.Writer output + color.New(color.FgBlue).Fprintln(myWriter, "blue color!") + + blue := color.New(color.FgBlue) + blue.Fprint(myWriter, "This will print text in blue.") + +You can create PrintXxx functions to simplify even more: + + // Create a custom print function for convenient + red := color.New(color.FgRed).PrintfFunc() + red("warning") + red("error: %s", err) + + // Mix up multiple attributes + notice := color.New(color.Bold, color.FgGreen).PrintlnFunc() + notice("don't forget this...") + +You can also FprintXxx functions to pass your own io.Writer: + + blue := color.New(FgBlue).FprintfFunc() + blue(myWriter, "important notice: %s", stars) + + // Mix up with multiple attributes + success := color.New(color.Bold, color.FgGreen).FprintlnFunc() + success(myWriter, don't forget this...") + + +Or create SprintXxx functions to mix strings with other non-colorized strings: + + yellow := New(FgYellow).SprintFunc() + red := New(FgRed).SprintFunc() + + fmt.Printf("this is a %s and this is %s.\n", yellow("warning"), red("error")) + + info := New(FgWhite, BgGreen).SprintFunc() + fmt.Printf("this %s rocks!\n", info("package")) + +Windows support is enabled by default. All Print functions work as intended. +However only for color.SprintXXX functions, user should use fmt.FprintXXX and +set the output to color.Output: + + fmt.Fprintf(color.Output, "Windows support: %s", color.GreenString("PASS")) + + info := New(FgWhite, BgGreen).SprintFunc() + fmt.Fprintf(color.Output, "this %s rocks!\n", info("package")) + +Using with existing code is possible. Just use the Set() method to set the +standard output to the given parameters. That way a rewrite of an existing +code is not required. + + // Use handy standard colors. + color.Set(color.FgYellow) + + fmt.Println("Existing text will be now in Yellow") + fmt.Printf("This one %s\n", "too") + + color.Unset() // don't forget to unset + + // You can mix up parameters + color.Set(color.FgMagenta, color.Bold) + defer color.Unset() // use it in your function + + fmt.Println("All text will be now bold magenta.") + +There might be a case where you want to disable color output (for example to +pipe the standard output of your app to somewhere else). `Color` has support to +disable colors both globally and for single color definition. For example +suppose you have a CLI app and a `--no-color` bool flag. You can easily disable +the color output with: + + var flagNoColor = flag.Bool("no-color", false, "Disable color output") + + if *flagNoColor { + color.NoColor = true // disables colorized output + } + +It also has support for single color definitions (local). You can +disable/enable color output on the fly: + + c := color.New(color.FgCyan) + c.Println("Prints cyan text") + + c.DisableColor() + c.Println("This is printed without any color") + + c.EnableColor() + c.Println("This prints again cyan...") +*/ +package color diff --git a/vendor/github.com/goccy/go-yaml/.codecov.yml b/vendor/github.com/goccy/go-yaml/.codecov.yml new file mode 100644 index 00000000..8364eea0 --- /dev/null +++ b/vendor/github.com/goccy/go-yaml/.codecov.yml @@ -0,0 +1,31 @@ +codecov: + require_ci_to_pass: yes + +coverage: + precision: 2 + round: down + range: "70...100" + + status: + project: + default: + target: 75% + threshold: 2% + patch: off + changes: no + +parsers: + gcov: + branch_detection: + conditional: yes + loop: yes + method: no + macro: no + +comment: + layout: "header,diff" + behavior: default + require_changes: no + +ignore: + - ast diff --git a/vendor/github.com/goccy/go-yaml/CHANGELOG.md b/vendor/github.com/goccy/go-yaml/CHANGELOG.md new file mode 100644 index 00000000..c8f820de --- /dev/null +++ b/vendor/github.com/goccy/go-yaml/CHANGELOG.md @@ -0,0 +1,186 @@ +# 1.11.2 - 2023-09-15 + +### Fix bugs + +- Fix quoted comments ( #370 ) +- Fix handle of space at start or last ( #376 ) +- Fix sequence with comment ( #390 ) + +# 1.11.1 - 2023-09-14 + +### Fix bugs + +- Handle `\r` in a double-quoted string the same as `\n` ( #372 ) +- Replace loop with n.Values = append(n.Values, target.Values...) ( #380 ) +- Skip encoding an inline field if it is null ( #386 ) +- Fix comment parsing with null value ( #388 ) + +# 1.11.0 - 2023-04-03 + +### Features + +- Supports dynamically switch encode and decode processing for a given type + +# 1.10.1 - 2023-03-28 + +### Features + +- Quote YAML 1.1 bools at encoding time for compatibility with other legacy parsers +- Add support of 32-bit architecture + +### Fix bugs + +- Don't trim all space characters in block style sequence +- Support strings starting with `@` + +# 1.10.0 - 2023-03-01 + +### Fix bugs + +Reversible conversion of comments was not working in various cases, which has been corrected. +**Breaking Change** exists in the comment map interface. However, if you are dealing with CommentMap directly, there is no problem. + + +# 1.9.8 - 2022-12-19 + +### Fix feature + +- Append new line at the end of file ( #329 ) + +### Fix bugs + +- Fix custom marshaler ( #333, #334 ) +- Fix behavior when struct fields conflicted( #335 ) +- Fix position calculation for literal, folded and raw folded strings ( #330 ) + +# 1.9.7 - 2022-12-03 + +### Fix bugs + +- Fix handling of quoted map key ( #328 ) +- Fix resusing process of scanning context ( #322 ) + +## v1.9.6 - 2022-10-26 + +### New Features + +- Introduce MapKeyNode interface to limit node types for map key ( #312 ) + +### Fix bugs + +- Quote strings with special characters in flow mode ( #270 ) +- typeError implements PrettyPrinter interface ( #280 ) +- Fix incorrect const type ( #284 ) +- Fix large literals type inference on 32 bits ( #293 ) +- Fix UTF-8 characters ( #294 ) +- Fix decoding of unknown aliases ( #317 ) +- Fix stream encoder for insert a separator between each encoded document ( #318 ) + +### Update + +- Update golang.org/x/sys ( #289 ) +- Update Go version in CI ( #295 ) +- Add test cases for missing keys to struct literals ( #300 ) + +## v1.9.5 - 2022-01-12 + +### New Features + +* Add UseSingleQuote option ( #265 ) + +### Fix bugs + +* Preserve defaults while decoding nested structs ( #260 ) +* Fix minor typo in decodeInit error ( #264 ) +* Handle empty sequence entries ( #275 ) +* Fix encoding of sequence with multiline string ( #276 ) +* Fix encoding of BytesMarshaler type ( #277 ) +* Fix indentState logic for multi-line value ( #278 ) + +## v1.9.4 - 2021-10-12 + +### Fix bugs + +* Keep prev/next reference between tokens containing comments when filtering comment tokens ( #257 ) +* Supports escaping reserved keywords in PathBuilder ( #258 ) + +## v1.9.3 - 2021-09-07 + +### New Features + +* Support encoding and decoding `time.Duration` fields ( #246 ) +* Allow reserved characters for key name in YAMLPath ( #251 ) +* Support getting YAMLPath from ast.Node ( #252 ) +* Support CommentToMap option ( #253 ) + +### Fix bugs + +* Fix encoding nested sequences with `yaml.IndentSequence` ( #241 ) +* Fix error reporting on inline structs in strict mode ( #244, #245 ) +* Fix encoding of large floats ( #247 ) + +### Improve workflow + +* Migrate CI from CircleCI to GitHub Action ( #249 ) +* Add workflow for ycat ( #250 ) + +## v1.9.2 - 2021-07-26 + +### Support WithComment option ( #238 ) + +`yaml.WithComment` is a option for encoding with comment. +The position where you want to add a comment is represented by YAMLPath, and it is the key of `yaml.CommentMap`. +Also, you can select `Head` comment or `Line` comment as the comment type. + +## v1.9.1 - 2021-07-20 + +### Fix DecodeFromNode ( #237 ) + +- Fix YAML handling where anchor exists + +## v1.9.0 - 2021-07-19 + +### New features + +- Support encoding of comment node ( #233 ) +- Support `yaml.NodeToValue(ast.Node, interface{}, ...DecodeOption) error` ( #236 ) + - Can convert a AST node to a value directly + +### Fix decoder for comment + +- Fix parsing of literal with comment ( #234 ) + +### Rename API ( #235 ) + +- Rename `MarshalWithContext` to `MarshalContext` +- Rename `UnmarshalWithContext` to `UnmarshalContext` + +## v1.8.10 - 2021-07-02 + +### Fixed bugs + +- Fix searching anchor by alias name ( #212 ) +- Fixing Issue 186, scanner should account for newline characters when processing multi-line text. Without this source annotations line/column number (for this and all subsequent tokens) is inconsistent with plain text editors. e.g. https://github.com/goccy/go-yaml/issues/186. This addresses the issue specifically for single and double quote text only. ( #210 ) +- Add error for unterminated flow mapping node ( #213 ) +- Handle missing required field validation ( #221 ) +- Nicely format unexpected node type errors ( #229 ) +- Support to encode map which has defined type key ( #231 ) + +### New features + +- Support sequence indentation by EncodeOption ( #232 ) + +## v1.8.9 - 2021-03-01 + +### Fixed bugs + +- Fix origin buffer for DocumentHeader and DocumentEnd and Directive +- Fix origin buffer for anchor value +- Fix syntax error about map value +- Fix parsing MergeKey ('<<') characters +- Fix encoding of float value +- Fix incorrect column annotation when single or double quotes are used + +### New features + +- Support to encode/decode of ast.Node directly diff --git a/vendor/github.com/goccy/go-yaml/LICENSE b/vendor/github.com/goccy/go-yaml/LICENSE new file mode 100644 index 00000000..04485ce6 --- /dev/null +++ b/vendor/github.com/goccy/go-yaml/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 Masaaki Goshima + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/goccy/go-yaml/Makefile b/vendor/github.com/goccy/go-yaml/Makefile new file mode 100644 index 00000000..1b1d9239 --- /dev/null +++ b/vendor/github.com/goccy/go-yaml/Makefile @@ -0,0 +1,19 @@ +.PHONY: test +test: + go test -v -race ./... + +.PHONY: simple-test +simple-test: + go test -v ./... + +.PHONY: cover +cover: + go test -coverprofile=cover.out ./... + +.PHONY: cover-html +cover-html: cover + go tool cover -html=cover.out + +.PHONY: ycat/build +ycat/build: + go build -o ycat ./cmd/ycat diff --git a/vendor/github.com/goccy/go-yaml/README.md b/vendor/github.com/goccy/go-yaml/README.md new file mode 100644 index 00000000..94523491 --- /dev/null +++ b/vendor/github.com/goccy/go-yaml/README.md @@ -0,0 +1,370 @@ +# YAML support for the Go language + +[![PkgGoDev](https://pkg.go.dev/badge/github.com/goccy/go-yaml)](https://pkg.go.dev/github.com/goccy/go-yaml) +![Go](https://github.com/goccy/go-yaml/workflows/Go/badge.svg) +[![codecov](https://codecov.io/gh/goccy/go-yaml/branch/master/graph/badge.svg)](https://codecov.io/gh/goccy/go-yaml) +[![Go Report Card](https://goreportcard.com/badge/github.com/goccy/go-yaml)](https://goreportcard.com/report/github.com/goccy/go-yaml) + + + +# Why a new library? + +As of this writing, there already exists a de facto standard library for YAML processing for Go: [https://github.com/go-yaml/yaml](https://github.com/go-yaml/yaml). However we feel that some features are lacking, namely: + +- Pretty format for error notifications +- Direct manipulation of YAML abstract syntax tree +- Support for `Anchor` and `Alias` when marshaling +- Allow referencing elements declared in another file via anchors + +# Features + +- Pretty format for error notifications +- Supports `Scanner` or `Lexer` or `Parser` as public API +- Supports `Anchor` and `Alias` to Marshaler +- Allow referencing elements declared in another file via anchors +- Extract value or AST by YAMLPath ( YAMLPath is like a JSONPath ) + +# Installation + +```sh +go get -u github.com/goccy/go-yaml +``` + +# Synopsis + +## 1. Simple Encode/Decode + +Has an interface like `go-yaml/yaml` using `reflect` + +```go +var v struct { + A int + B string +} +v.A = 1 +v.B = "hello" +bytes, err := yaml.Marshal(v) +if err != nil { + //... +} +fmt.Println(string(bytes)) // "a: 1\nb: hello\n" +``` + +```go + yml := ` +%YAML 1.2 +--- +a: 1 +b: c +` +var v struct { + A int + B string +} +if err := yaml.Unmarshal([]byte(yml), &v); err != nil { + //... +} +``` + +To control marshal/unmarshal behavior, you can use the `yaml` tag. + +```go + yml := `--- +foo: 1 +bar: c +` +var v struct { + A int `yaml:"foo"` + B string `yaml:"bar"` +} +if err := yaml.Unmarshal([]byte(yml), &v); err != nil { + //... +} +``` + +For convenience, we also accept the `json` tag. Note that not all options from +the `json` tag will have significance when parsing YAML documents. If both +tags exist, `yaml` tag will take precedence. + +```go + yml := `--- +foo: 1 +bar: c +` +var v struct { + A int `json:"foo"` + B string `json:"bar"` +} +if err := yaml.Unmarshal([]byte(yml), &v); err != nil { + //... +} +``` + +For custom marshal/unmarshaling, implement either `Bytes` or `Interface` variant of marshaler/unmarshaler. The difference is that while `BytesMarshaler`/`BytesUnmarshaler` behaves like [`encoding/json`](https://pkg.go.dev/encoding/json) and `InterfaceMarshaler`/`InterfaceUnmarshaler` behaves like [`gopkg.in/yaml.v2`](https://pkg.go.dev/gopkg.in/yaml.v2). + +Semantically both are the same, but they differ in performance. Because indentation matters in YAML, you cannot simply accept a valid YAML fragment from a Marshaler, and expect it to work when it is attached to the parent container's serialized form. Therefore when we receive use the `BytesMarshaler`, which returns `[]byte`, we must decode it once to figure out how to make it work in the given context. If you use the `InterfaceMarshaler`, we can skip the decoding. + +If you are repeatedly marshaling complex objects, the latter is always better +performance wise. But if you are, for example, just providing a choice between +a config file format that is read only once, the former is probably easier to +code. + +## 2. Reference elements declared in another file + +`testdata` directory contains `anchor.yml` file: + +```shell +├── testdata +   └── anchor.yml +``` + +And `anchor.yml` is defined as follows: + +```yaml +a: &a + b: 1 + c: hello +``` + +Then, if `yaml.ReferenceDirs("testdata")` option is passed to `yaml.Decoder`, + `Decoder` tries to find the anchor definition from YAML files the under `testdata` directory. + +```go +buf := bytes.NewBufferString("a: *a\n") +dec := yaml.NewDecoder(buf, yaml.ReferenceDirs("testdata")) +var v struct { + A struct { + B int + C string + } +} +if err := dec.Decode(&v); err != nil { + //... +} +fmt.Printf("%+v\n", v) // {A:{B:1 C:hello}} +``` + +## 3. Encode with `Anchor` and `Alias` + +### 3.1. Explicitly declared `Anchor` name and `Alias` name + +If you want to use `anchor` or `alias`, you can define it as a struct tag. + +```go +type T struct { + A int + B string +} +var v struct { + C *T `yaml:"c,anchor=x"` + D *T `yaml:"d,alias=x"` +} +v.C = &T{A: 1, B: "hello"} +v.D = v.C +bytes, err := yaml.Marshal(v) +if err != nil { + panic(err) +} +fmt.Println(string(bytes)) +/* +c: &x + a: 1 + b: hello +d: *x +*/ +``` + +### 3.2. Implicitly declared `Anchor` and `Alias` names + +If you do not explicitly declare the anchor name, the default behavior is to +use the equivalent of `strings.ToLower($FieldName)` as the name of the anchor. + +If you do not explicitly declare the alias name AND the value is a pointer +to another element, we look up the anchor name by finding out which anchor +field the value is assigned to by looking up its pointer address. + +```go +type T struct { + I int + S string +} +var v struct { + A *T `yaml:"a,anchor"` + B *T `yaml:"b,anchor"` + C *T `yaml:"c,alias"` + D *T `yaml:"d,alias"` +} +v.A = &T{I: 1, S: "hello"} +v.B = &T{I: 2, S: "world"} +v.C = v.A // C has same pointer address to A +v.D = v.B // D has same pointer address to B +bytes, err := yaml.Marshal(v) +if err != nil { + //... +} +fmt.Println(string(bytes)) +/* +a: &a + i: 1 + s: hello +b: &b + i: 2 + s: world +c: *a +d: *b +*/ +``` + +### 3.3 MergeKey and Alias + +Merge key and alias ( `<<: *alias` ) can be used by embedding a structure with the `inline,alias` tag. + +```go +type Person struct { + *Person `yaml:",omitempty,inline,alias"` // embed Person type for default value + Name string `yaml:",omitempty"` + Age int `yaml:",omitempty"` +} +defaultPerson := &Person{ + Name: "John Smith", + Age: 20, +} +people := []*Person{ + { + Person: defaultPerson, // assign default value + Name: "Ken", // override Name property + Age: 10, // override Age property + }, + { + Person: defaultPerson, // assign default value only + }, +} +var doc struct { + Default *Person `yaml:"default,anchor"` + People []*Person `yaml:"people"` +} +doc.Default = defaultPerson +doc.People = people +bytes, err := yaml.Marshal(doc) +if err != nil { + //... +} +fmt.Println(string(bytes)) +/* +default: &default + name: John Smith + age: 20 +people: +- <<: *default + name: Ken + age: 10 +- <<: *default +*/ +``` + +## 4. Pretty Formatted Errors + +Error values produced during parsing have two extra features over regular +error values. + +First, by default, they contain extra information on the location of the error +from the source YAML document, to make it easier to find the error location. + +Second, the error messages can optionally be colorized. + +If you would like to control exactly how the output looks like, consider +using `yaml.FormatError`, which accepts two boolean values to +control turning these features on or off. + + + +## 5. Use YAMLPath + +```go +yml := ` +store: + book: + - author: john + price: 10 + - author: ken + price: 12 + bicycle: + color: red + price: 19.95 +` +path, err := yaml.PathString("$.store.book[*].author") +if err != nil { + //... +} +var authors []string +if err := path.Read(strings.NewReader(yml), &authors); err != nil { + //... +} +fmt.Println(authors) +// [john ken] +``` + +### 5.1 Print customized error with YAML source code + +```go +package main + +import ( + "fmt" + + "github.com/goccy/go-yaml" +) + +func main() { + yml := ` +a: 1 +b: "hello" +` + var v struct { + A int + B string + } + if err := yaml.Unmarshal([]byte(yml), &v); err != nil { + panic(err) + } + if v.A != 2 { + // output error with YAML source + path, err := yaml.PathString("$.a") + if err != nil { + panic(err) + } + source, err := path.AnnotateSource([]byte(yml), true) + if err != nil { + panic(err) + } + fmt.Printf("a value expected 2 but actual %d:\n%s\n", v.A, string(source)) + } +} +``` + +output result is the following: + + + + +# Tools + +## ycat + +print yaml file with color + +ycat + +### Installation + +```sh +go install github.com/goccy/go-yaml/cmd/ycat@latest +``` + +# Looking for Sponsors + +I'm looking for sponsors this library. This library is being developed as a personal project in my spare time. If you want a quick response or problem resolution when using this library in your project, please register as a [sponsor](https://github.com/sponsors/goccy). I will cooperate as much as possible. Of course, this library is developed as an MIT license, so you can use it freely for free. + +# License + +MIT diff --git a/vendor/github.com/goccy/go-yaml/ast/ast.go b/vendor/github.com/goccy/go-yaml/ast/ast.go new file mode 100644 index 00000000..b4d5ec41 --- /dev/null +++ b/vendor/github.com/goccy/go-yaml/ast/ast.go @@ -0,0 +1,2115 @@ +package ast + +import ( + "fmt" + "io" + "math" + "strconv" + "strings" + + "github.com/goccy/go-yaml/token" + "golang.org/x/xerrors" +) + +var ( + ErrInvalidTokenType = xerrors.New("invalid token type") + ErrInvalidAnchorName = xerrors.New("invalid anchor name") + ErrInvalidAliasName = xerrors.New("invalid alias name") +) + +// NodeType type identifier of node +type NodeType int + +const ( + // UnknownNodeType type identifier for default + UnknownNodeType NodeType = iota + // DocumentType type identifier for document node + DocumentType + // NullType type identifier for null node + NullType + // BoolType type identifier for boolean node + BoolType + // IntegerType type identifier for integer node + IntegerType + // FloatType type identifier for float node + FloatType + // InfinityType type identifier for infinity node + InfinityType + // NanType type identifier for nan node + NanType + // StringType type identifier for string node + StringType + // MergeKeyType type identifier for merge key node + MergeKeyType + // LiteralType type identifier for literal node + LiteralType + // MappingType type identifier for mapping node + MappingType + // MappingKeyType type identifier for mapping key node + MappingKeyType + // MappingValueType type identifier for mapping value node + MappingValueType + // SequenceType type identifier for sequence node + SequenceType + // AnchorType type identifier for anchor node + AnchorType + // AliasType type identifier for alias node + AliasType + // DirectiveType type identifier for directive node + DirectiveType + // TagType type identifier for tag node + TagType + // CommentType type identifier for comment node + CommentType + // CommentGroupType type identifier for comment group node + CommentGroupType +) + +// String node type identifier to text +func (t NodeType) String() string { + switch t { + case UnknownNodeType: + return "UnknownNode" + case DocumentType: + return "Document" + case NullType: + return "Null" + case BoolType: + return "Bool" + case IntegerType: + return "Integer" + case FloatType: + return "Float" + case InfinityType: + return "Infinity" + case NanType: + return "Nan" + case StringType: + return "String" + case MergeKeyType: + return "MergeKey" + case LiteralType: + return "Literal" + case MappingType: + return "Mapping" + case MappingKeyType: + return "MappingKey" + case MappingValueType: + return "MappingValue" + case SequenceType: + return "Sequence" + case AnchorType: + return "Anchor" + case AliasType: + return "Alias" + case DirectiveType: + return "Directive" + case TagType: + return "Tag" + case CommentType: + return "Comment" + case CommentGroupType: + return "CommentGroup" + } + return "" +} + +// String node type identifier to YAML Structure name +// based on https://yaml.org/spec/1.2/spec.html +func (t NodeType) YAMLName() string { + switch t { + case UnknownNodeType: + return "unknown" + case DocumentType: + return "document" + case NullType: + return "null" + case BoolType: + return "boolean" + case IntegerType: + return "int" + case FloatType: + return "float" + case InfinityType: + return "inf" + case NanType: + return "nan" + case StringType: + return "string" + case MergeKeyType: + return "merge key" + case LiteralType: + return "scalar" + case MappingType: + return "mapping" + case MappingKeyType: + return "key" + case MappingValueType: + return "value" + case SequenceType: + return "sequence" + case AnchorType: + return "anchor" + case AliasType: + return "alias" + case DirectiveType: + return "directive" + case TagType: + return "tag" + case CommentType: + return "comment" + case CommentGroupType: + return "comment" + } + return "" +} + +// Node type of node +type Node interface { + io.Reader + // String node to text + String() string + // GetToken returns token instance + GetToken() *token.Token + // Type returns type of node + Type() NodeType + // AddColumn add column number to child nodes recursively + AddColumn(int) + // SetComment set comment token to node + SetComment(*CommentGroupNode) error + // Comment returns comment token instance + GetComment() *CommentGroupNode + // GetPath returns YAMLPath for the current node + GetPath() string + // SetPath set YAMLPath for the current node + SetPath(string) + // MarshalYAML + MarshalYAML() ([]byte, error) + // already read length + readLen() int + // append read length + addReadLen(int) + // clean read length + clearLen() +} + +// MapKeyNode type for map key node +type MapKeyNode interface { + Node + // String node to text without comment + stringWithoutComment() string +} + +// ScalarNode type for scalar node +type ScalarNode interface { + MapKeyNode + GetValue() interface{} +} + +type BaseNode struct { + Path string + Comment *CommentGroupNode + read int +} + +func addCommentString(base string, node *CommentGroupNode) string { + return fmt.Sprintf("%s %s", base, node.String()) +} + +func (n *BaseNode) readLen() int { + return n.read +} + +func (n *BaseNode) clearLen() { + n.read = 0 +} + +func (n *BaseNode) addReadLen(len int) { + n.read += len +} + +// GetPath returns YAMLPath for the current node. +func (n *BaseNode) GetPath() string { + if n == nil { + return "" + } + return n.Path +} + +// SetPath set YAMLPath for the current node. +func (n *BaseNode) SetPath(path string) { + if n == nil { + return + } + n.Path = path +} + +// GetComment returns comment token instance +func (n *BaseNode) GetComment() *CommentGroupNode { + return n.Comment +} + +// SetComment set comment token +func (n *BaseNode) SetComment(node *CommentGroupNode) error { + n.Comment = node + return nil +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} + +func readNode(p []byte, node Node) (int, error) { + s := node.String() + readLen := node.readLen() + remain := len(s) - readLen + if remain == 0 { + node.clearLen() + return 0, io.EOF + } + size := min(remain, len(p)) + for idx, b := range []byte(s[readLen : readLen+size]) { + p[idx] = byte(b) + } + node.addReadLen(size) + return size, nil +} + +// Null create node for null value +func Null(tk *token.Token) *NullNode { + return &NullNode{ + BaseNode: &BaseNode{}, + Token: tk, + } +} + +// Bool create node for boolean value +func Bool(tk *token.Token) *BoolNode { + b, _ := strconv.ParseBool(tk.Value) + return &BoolNode{ + BaseNode: &BaseNode{}, + Token: tk, + Value: b, + } +} + +// Integer create node for integer value +func Integer(tk *token.Token) *IntegerNode { + value := removeUnderScoreFromNumber(tk.Value) + switch tk.Type { + case token.BinaryIntegerType: + // skip two characters because binary token starts with '0b' + skipCharacterNum := 2 + negativePrefix := "" + if value[0] == '-' { + skipCharacterNum++ + negativePrefix = "-" + } + if len(negativePrefix) > 0 { + i, _ := strconv.ParseInt(negativePrefix+value[skipCharacterNum:], 2, 64) + return &IntegerNode{ + BaseNode: &BaseNode{}, + Token: tk, + Value: i, + } + } + i, _ := strconv.ParseUint(negativePrefix+value[skipCharacterNum:], 2, 64) + return &IntegerNode{ + BaseNode: &BaseNode{}, + Token: tk, + Value: i, + } + case token.OctetIntegerType: + // octet token starts with '0o' or '-0o' or '0' or '-0' + skipCharacterNum := 1 + negativePrefix := "" + if value[0] == '-' { + skipCharacterNum++ + if len(value) > 2 && value[2] == 'o' { + skipCharacterNum++ + } + negativePrefix = "-" + } else { + if value[1] == 'o' { + skipCharacterNum++ + } + } + if len(negativePrefix) > 0 { + i, _ := strconv.ParseInt(negativePrefix+value[skipCharacterNum:], 8, 64) + return &IntegerNode{ + BaseNode: &BaseNode{}, + Token: tk, + Value: i, + } + } + i, _ := strconv.ParseUint(value[skipCharacterNum:], 8, 64) + return &IntegerNode{ + BaseNode: &BaseNode{}, + Token: tk, + Value: i, + } + case token.HexIntegerType: + // hex token starts with '0x' or '-0x' + skipCharacterNum := 2 + negativePrefix := "" + if value[0] == '-' { + skipCharacterNum++ + negativePrefix = "-" + } + if len(negativePrefix) > 0 { + i, _ := strconv.ParseInt(negativePrefix+value[skipCharacterNum:], 16, 64) + return &IntegerNode{ + BaseNode: &BaseNode{}, + Token: tk, + Value: i, + } + } + i, _ := strconv.ParseUint(value[skipCharacterNum:], 16, 64) + return &IntegerNode{ + BaseNode: &BaseNode{}, + Token: tk, + Value: i, + } + } + if value[0] == '-' || value[0] == '+' { + i, _ := strconv.ParseInt(value, 10, 64) + return &IntegerNode{ + BaseNode: &BaseNode{}, + Token: tk, + Value: i, + } + } + i, _ := strconv.ParseUint(value, 10, 64) + return &IntegerNode{ + BaseNode: &BaseNode{}, + Token: tk, + Value: i, + } +} + +// Float create node for float value +func Float(tk *token.Token) *FloatNode { + f, _ := strconv.ParseFloat(removeUnderScoreFromNumber(tk.Value), 64) + return &FloatNode{ + BaseNode: &BaseNode{}, + Token: tk, + Value: f, + } +} + +// Infinity create node for .inf or -.inf value +func Infinity(tk *token.Token) *InfinityNode { + node := &InfinityNode{ + BaseNode: &BaseNode{}, + Token: tk, + } + switch tk.Value { + case ".inf", ".Inf", ".INF": + node.Value = math.Inf(0) + case "-.inf", "-.Inf", "-.INF": + node.Value = math.Inf(-1) + } + return node +} + +// Nan create node for .nan value +func Nan(tk *token.Token) *NanNode { + return &NanNode{ + BaseNode: &BaseNode{}, + Token: tk, + } +} + +// String create node for string value +func String(tk *token.Token) *StringNode { + return &StringNode{ + BaseNode: &BaseNode{}, + Token: tk, + Value: tk.Value, + } +} + +// Comment create node for comment +func Comment(tk *token.Token) *CommentNode { + return &CommentNode{ + BaseNode: &BaseNode{}, + Token: tk, + } +} + +func CommentGroup(comments []*token.Token) *CommentGroupNode { + nodes := []*CommentNode{} + for _, comment := range comments { + nodes = append(nodes, Comment(comment)) + } + return &CommentGroupNode{ + BaseNode: &BaseNode{}, + Comments: nodes, + } +} + +// MergeKey create node for merge key ( << ) +func MergeKey(tk *token.Token) *MergeKeyNode { + return &MergeKeyNode{ + BaseNode: &BaseNode{}, + Token: tk, + } +} + +// Mapping create node for map +func Mapping(tk *token.Token, isFlowStyle bool, values ...*MappingValueNode) *MappingNode { + node := &MappingNode{ + BaseNode: &BaseNode{}, + Start: tk, + IsFlowStyle: isFlowStyle, + Values: []*MappingValueNode{}, + } + node.Values = append(node.Values, values...) + return node +} + +// MappingValue create node for mapping value +func MappingValue(tk *token.Token, key MapKeyNode, value Node) *MappingValueNode { + return &MappingValueNode{ + BaseNode: &BaseNode{}, + Start: tk, + Key: key, + Value: value, + } +} + +// MappingKey create node for map key ( '?' ). +func MappingKey(tk *token.Token) *MappingKeyNode { + return &MappingKeyNode{ + BaseNode: &BaseNode{}, + Start: tk, + } +} + +// Sequence create node for sequence +func Sequence(tk *token.Token, isFlowStyle bool) *SequenceNode { + return &SequenceNode{ + BaseNode: &BaseNode{}, + Start: tk, + IsFlowStyle: isFlowStyle, + Values: []Node{}, + } +} + +func Anchor(tk *token.Token) *AnchorNode { + return &AnchorNode{ + BaseNode: &BaseNode{}, + Start: tk, + } +} + +func Alias(tk *token.Token) *AliasNode { + return &AliasNode{ + BaseNode: &BaseNode{}, + Start: tk, + } +} + +func Document(tk *token.Token, body Node) *DocumentNode { + return &DocumentNode{ + BaseNode: &BaseNode{}, + Start: tk, + Body: body, + } +} + +func Directive(tk *token.Token) *DirectiveNode { + return &DirectiveNode{ + BaseNode: &BaseNode{}, + Start: tk, + } +} + +func Literal(tk *token.Token) *LiteralNode { + return &LiteralNode{ + BaseNode: &BaseNode{}, + Start: tk, + } +} + +func Tag(tk *token.Token) *TagNode { + return &TagNode{ + BaseNode: &BaseNode{}, + Start: tk, + } +} + +// File contains all documents in YAML file +type File struct { + Name string + Docs []*DocumentNode +} + +// Read implements (io.Reader).Read +func (f *File) Read(p []byte) (int, error) { + for _, doc := range f.Docs { + n, err := doc.Read(p) + if err == io.EOF { + continue + } + return n, nil + } + return 0, io.EOF +} + +// String all documents to text +func (f *File) String() string { + docs := []string{} + for _, doc := range f.Docs { + docs = append(docs, doc.String()) + } + if len(docs) > 0 { + return strings.Join(docs, "\n") + "\n" + } else { + return "" + } +} + +// DocumentNode type of Document +type DocumentNode struct { + *BaseNode + Start *token.Token // position of DocumentHeader ( `---` ) + End *token.Token // position of DocumentEnd ( `...` ) + Body Node +} + +// Read implements (io.Reader).Read +func (d *DocumentNode) Read(p []byte) (int, error) { + return readNode(p, d) +} + +// Type returns DocumentNodeType +func (d *DocumentNode) Type() NodeType { return DocumentType } + +// GetToken returns token instance +func (d *DocumentNode) GetToken() *token.Token { + return d.Body.GetToken() +} + +// AddColumn add column number to child nodes recursively +func (d *DocumentNode) AddColumn(col int) { + if d.Body != nil { + d.Body.AddColumn(col) + } +} + +// String document to text +func (d *DocumentNode) String() string { + doc := []string{} + if d.Start != nil { + doc = append(doc, d.Start.Value) + } + doc = append(doc, d.Body.String()) + if d.End != nil { + doc = append(doc, d.End.Value) + } + return strings.Join(doc, "\n") +} + +// MarshalYAML encodes to a YAML text +func (d *DocumentNode) MarshalYAML() ([]byte, error) { + return []byte(d.String()), nil +} + +func removeUnderScoreFromNumber(num string) string { + return strings.ReplaceAll(num, "_", "") +} + +// NullNode type of null node +type NullNode struct { + *BaseNode + Token *token.Token +} + +// Read implements (io.Reader).Read +func (n *NullNode) Read(p []byte) (int, error) { + return readNode(p, n) +} + +// Type returns NullType +func (n *NullNode) Type() NodeType { return NullType } + +// GetToken returns token instance +func (n *NullNode) GetToken() *token.Token { + return n.Token +} + +// AddColumn add column number to child nodes recursively +func (n *NullNode) AddColumn(col int) { + n.Token.AddColumn(col) +} + +// GetValue returns nil value +func (n *NullNode) GetValue() interface{} { + return nil +} + +// String returns `null` text +func (n *NullNode) String() string { + if n.Comment != nil { + return addCommentString("null", n.Comment) + } + return n.stringWithoutComment() +} + +func (n *NullNode) stringWithoutComment() string { + return "null" +} + +// MarshalYAML encodes to a YAML text +func (n *NullNode) MarshalYAML() ([]byte, error) { + return []byte(n.String()), nil +} + +// IntegerNode type of integer node +type IntegerNode struct { + *BaseNode + Token *token.Token + Value interface{} // int64 or uint64 value +} + +// Read implements (io.Reader).Read +func (n *IntegerNode) Read(p []byte) (int, error) { + return readNode(p, n) +} + +// Type returns IntegerType +func (n *IntegerNode) Type() NodeType { return IntegerType } + +// GetToken returns token instance +func (n *IntegerNode) GetToken() *token.Token { + return n.Token +} + +// AddColumn add column number to child nodes recursively +func (n *IntegerNode) AddColumn(col int) { + n.Token.AddColumn(col) +} + +// GetValue returns int64 value +func (n *IntegerNode) GetValue() interface{} { + return n.Value +} + +// String int64 to text +func (n *IntegerNode) String() string { + if n.Comment != nil { + return addCommentString(n.Token.Value, n.Comment) + } + return n.stringWithoutComment() +} + +func (n *IntegerNode) stringWithoutComment() string { + return n.Token.Value +} + +// MarshalYAML encodes to a YAML text +func (n *IntegerNode) MarshalYAML() ([]byte, error) { + return []byte(n.String()), nil +} + +// FloatNode type of float node +type FloatNode struct { + *BaseNode + Token *token.Token + Precision int + Value float64 +} + +// Read implements (io.Reader).Read +func (n *FloatNode) Read(p []byte) (int, error) { + return readNode(p, n) +} + +// Type returns FloatType +func (n *FloatNode) Type() NodeType { return FloatType } + +// GetToken returns token instance +func (n *FloatNode) GetToken() *token.Token { + return n.Token +} + +// AddColumn add column number to child nodes recursively +func (n *FloatNode) AddColumn(col int) { + n.Token.AddColumn(col) +} + +// GetValue returns float64 value +func (n *FloatNode) GetValue() interface{} { + return n.Value +} + +// String float64 to text +func (n *FloatNode) String() string { + if n.Comment != nil { + return addCommentString(n.Token.Value, n.Comment) + } + return n.stringWithoutComment() +} + +func (n *FloatNode) stringWithoutComment() string { + return n.Token.Value +} + +// MarshalYAML encodes to a YAML text +func (n *FloatNode) MarshalYAML() ([]byte, error) { + return []byte(n.String()), nil +} + +// StringNode type of string node +type StringNode struct { + *BaseNode + Token *token.Token + Value string +} + +// Read implements (io.Reader).Read +func (n *StringNode) Read(p []byte) (int, error) { + return readNode(p, n) +} + +// Type returns StringType +func (n *StringNode) Type() NodeType { return StringType } + +// GetToken returns token instance +func (n *StringNode) GetToken() *token.Token { + return n.Token +} + +// AddColumn add column number to child nodes recursively +func (n *StringNode) AddColumn(col int) { + n.Token.AddColumn(col) +} + +// GetValue returns string value +func (n *StringNode) GetValue() interface{} { + return n.Value +} + +// escapeSingleQuote escapes s to a single quoted scalar. +// https://yaml.org/spec/1.2.2/#732-single-quoted-style +func escapeSingleQuote(s string) string { + var sb strings.Builder + growLen := len(s) + // s includes also one ' from the doubled pair + 2 + // opening and closing ' + strings.Count(s, "'") // ' added by ReplaceAll + sb.Grow(growLen) + sb.WriteString("'") + sb.WriteString(strings.ReplaceAll(s, "'", "''")) + sb.WriteString("'") + return sb.String() +} + +// String string value to text with quote or literal header if required +func (n *StringNode) String() string { + switch n.Token.Type { + case token.SingleQuoteType: + quoted := escapeSingleQuote(n.Value) + if n.Comment != nil { + return addCommentString(quoted, n.Comment) + } + return quoted + case token.DoubleQuoteType: + quoted := strconv.Quote(n.Value) + if n.Comment != nil { + return addCommentString(quoted, n.Comment) + } + return quoted + } + + lbc := token.DetectLineBreakCharacter(n.Value) + if strings.Contains(n.Value, lbc) { + // This block assumes that the line breaks in this inside scalar content and the Outside scalar content are the same. + // It works mostly, but inconsistencies occur if line break characters are mixed. + header := token.LiteralBlockHeader(n.Value) + space := strings.Repeat(" ", n.Token.Position.Column-1) + values := []string{} + for _, v := range strings.Split(n.Value, lbc) { + values = append(values, fmt.Sprintf("%s %s", space, v)) + } + block := strings.TrimSuffix(strings.TrimSuffix(strings.Join(values, lbc), fmt.Sprintf("%s %s", lbc, space)), fmt.Sprintf(" %s", space)) + return fmt.Sprintf("%s%s%s", header, lbc, block) + } else if len(n.Value) > 0 && (n.Value[0] == '{' || n.Value[0] == '[') { + return fmt.Sprintf(`'%s'`, n.Value) + } + if n.Comment != nil { + return addCommentString(n.Value, n.Comment) + } + return n.Value +} + +func (n *StringNode) stringWithoutComment() string { + switch n.Token.Type { + case token.SingleQuoteType: + quoted := fmt.Sprintf(`'%s'`, n.Value) + return quoted + case token.DoubleQuoteType: + quoted := strconv.Quote(n.Value) + return quoted + } + + lbc := token.DetectLineBreakCharacter(n.Value) + if strings.Contains(n.Value, lbc) { + // This block assumes that the line breaks in this inside scalar content and the Outside scalar content are the same. + // It works mostly, but inconsistencies occur if line break characters are mixed. + header := token.LiteralBlockHeader(n.Value) + space := strings.Repeat(" ", n.Token.Position.Column-1) + values := []string{} + for _, v := range strings.Split(n.Value, lbc) { + values = append(values, fmt.Sprintf("%s %s", space, v)) + } + block := strings.TrimSuffix(strings.TrimSuffix(strings.Join(values, lbc), fmt.Sprintf("%s %s", lbc, space)), fmt.Sprintf(" %s", space)) + return fmt.Sprintf("%s%s%s", header, lbc, block) + } else if len(n.Value) > 0 && (n.Value[0] == '{' || n.Value[0] == '[') { + return fmt.Sprintf(`'%s'`, n.Value) + } + return n.Value +} + +// MarshalYAML encodes to a YAML text +func (n *StringNode) MarshalYAML() ([]byte, error) { + return []byte(n.String()), nil +} + +// LiteralNode type of literal node +type LiteralNode struct { + *BaseNode + Start *token.Token + Value *StringNode +} + +// Read implements (io.Reader).Read +func (n *LiteralNode) Read(p []byte) (int, error) { + return readNode(p, n) +} + +// Type returns LiteralType +func (n *LiteralNode) Type() NodeType { return LiteralType } + +// GetToken returns token instance +func (n *LiteralNode) GetToken() *token.Token { + return n.Start +} + +// AddColumn add column number to child nodes recursively +func (n *LiteralNode) AddColumn(col int) { + n.Start.AddColumn(col) + if n.Value != nil { + n.Value.AddColumn(col) + } +} + +// GetValue returns string value +func (n *LiteralNode) GetValue() interface{} { + return n.String() +} + +// String literal to text +func (n *LiteralNode) String() string { + origin := n.Value.GetToken().Origin + lit := strings.TrimRight(strings.TrimRight(origin, " "), "\n") + if n.Comment != nil { + return fmt.Sprintf("%s %s\n%s", n.Start.Value, n.Comment.String(), lit) + } + return fmt.Sprintf("%s\n%s", n.Start.Value, lit) +} + +func (n *LiteralNode) stringWithoutComment() string { + return n.String() +} + +// MarshalYAML encodes to a YAML text +func (n *LiteralNode) MarshalYAML() ([]byte, error) { + return []byte(n.String()), nil +} + +// MergeKeyNode type of merge key node +type MergeKeyNode struct { + *BaseNode + Token *token.Token +} + +// Read implements (io.Reader).Read +func (n *MergeKeyNode) Read(p []byte) (int, error) { + return readNode(p, n) +} + +// Type returns MergeKeyType +func (n *MergeKeyNode) Type() NodeType { return MergeKeyType } + +// GetToken returns token instance +func (n *MergeKeyNode) GetToken() *token.Token { + return n.Token +} + +// GetValue returns '<<' value +func (n *MergeKeyNode) GetValue() interface{} { + return n.Token.Value +} + +// String returns '<<' value +func (n *MergeKeyNode) String() string { + return n.stringWithoutComment() +} + +func (n *MergeKeyNode) stringWithoutComment() string { + return n.Token.Value +} + +// AddColumn add column number to child nodes recursively +func (n *MergeKeyNode) AddColumn(col int) { + n.Token.AddColumn(col) +} + +// MarshalYAML encodes to a YAML text +func (n *MergeKeyNode) MarshalYAML() ([]byte, error) { + return []byte(n.String()), nil +} + +// BoolNode type of boolean node +type BoolNode struct { + *BaseNode + Token *token.Token + Value bool +} + +// Read implements (io.Reader).Read +func (n *BoolNode) Read(p []byte) (int, error) { + return readNode(p, n) +} + +// Type returns BoolType +func (n *BoolNode) Type() NodeType { return BoolType } + +// GetToken returns token instance +func (n *BoolNode) GetToken() *token.Token { + return n.Token +} + +// AddColumn add column number to child nodes recursively +func (n *BoolNode) AddColumn(col int) { + n.Token.AddColumn(col) +} + +// GetValue returns boolean value +func (n *BoolNode) GetValue() interface{} { + return n.Value +} + +// String boolean to text +func (n *BoolNode) String() string { + if n.Comment != nil { + return addCommentString(n.Token.Value, n.Comment) + } + return n.stringWithoutComment() +} + +func (n *BoolNode) stringWithoutComment() string { + return n.Token.Value +} + +// MarshalYAML encodes to a YAML text +func (n *BoolNode) MarshalYAML() ([]byte, error) { + return []byte(n.String()), nil +} + +// InfinityNode type of infinity node +type InfinityNode struct { + *BaseNode + Token *token.Token + Value float64 +} + +// Read implements (io.Reader).Read +func (n *InfinityNode) Read(p []byte) (int, error) { + return readNode(p, n) +} + +// Type returns InfinityType +func (n *InfinityNode) Type() NodeType { return InfinityType } + +// GetToken returns token instance +func (n *InfinityNode) GetToken() *token.Token { + return n.Token +} + +// AddColumn add column number to child nodes recursively +func (n *InfinityNode) AddColumn(col int) { + n.Token.AddColumn(col) +} + +// GetValue returns math.Inf(0) or math.Inf(-1) +func (n *InfinityNode) GetValue() interface{} { + return n.Value +} + +// String infinity to text +func (n *InfinityNode) String() string { + if n.Comment != nil { + return addCommentString(n.Token.Value, n.Comment) + } + return n.stringWithoutComment() +} + +func (n *InfinityNode) stringWithoutComment() string { + return n.Token.Value +} + +// MarshalYAML encodes to a YAML text +func (n *InfinityNode) MarshalYAML() ([]byte, error) { + return []byte(n.String()), nil +} + +// NanNode type of nan node +type NanNode struct { + *BaseNode + Token *token.Token +} + +// Read implements (io.Reader).Read +func (n *NanNode) Read(p []byte) (int, error) { + return readNode(p, n) +} + +// Type returns NanType +func (n *NanNode) Type() NodeType { return NanType } + +// GetToken returns token instance +func (n *NanNode) GetToken() *token.Token { + return n.Token +} + +// AddColumn add column number to child nodes recursively +func (n *NanNode) AddColumn(col int) { + n.Token.AddColumn(col) +} + +// GetValue returns math.NaN() +func (n *NanNode) GetValue() interface{} { + return math.NaN() +} + +// String returns .nan +func (n *NanNode) String() string { + if n.Comment != nil { + return addCommentString(n.Token.Value, n.Comment) + } + return n.stringWithoutComment() +} + +func (n *NanNode) stringWithoutComment() string { + return n.Token.Value +} + +// MarshalYAML encodes to a YAML text +func (n *NanNode) MarshalYAML() ([]byte, error) { + return []byte(n.String()), nil +} + +// MapNode interface of MappingValueNode / MappingNode +type MapNode interface { + MapRange() *MapNodeIter +} + +// MapNodeIter is an iterator for ranging over a MapNode +type MapNodeIter struct { + values []*MappingValueNode + idx int +} + +const ( + startRangeIndex = -1 +) + +// Next advances the map iterator and reports whether there is another entry. +// It returns false when the iterator is exhausted. +func (m *MapNodeIter) Next() bool { + m.idx++ + next := m.idx < len(m.values) + return next +} + +// Key returns the key of the iterator's current map node entry. +func (m *MapNodeIter) Key() MapKeyNode { + return m.values[m.idx].Key +} + +// Value returns the value of the iterator's current map node entry. +func (m *MapNodeIter) Value() Node { + return m.values[m.idx].Value +} + +// MappingNode type of mapping node +type MappingNode struct { + *BaseNode + Start *token.Token + End *token.Token + IsFlowStyle bool + Values []*MappingValueNode + FootComment *CommentGroupNode +} + +func (n *MappingNode) startPos() *token.Position { + if len(n.Values) == 0 { + return n.Start.Position + } + return n.Values[0].Key.GetToken().Position +} + +// Merge merge key/value of map. +func (n *MappingNode) Merge(target *MappingNode) { + keyToMapValueMap := map[string]*MappingValueNode{} + for _, value := range n.Values { + key := value.Key.String() + keyToMapValueMap[key] = value + } + column := n.startPos().Column - target.startPos().Column + target.AddColumn(column) + for _, value := range target.Values { + mapValue, exists := keyToMapValueMap[value.Key.String()] + if exists { + mapValue.Value = value.Value + } else { + n.Values = append(n.Values, value) + } + } +} + +// SetIsFlowStyle set value to IsFlowStyle field recursively. +func (n *MappingNode) SetIsFlowStyle(isFlow bool) { + n.IsFlowStyle = isFlow + for _, value := range n.Values { + value.SetIsFlowStyle(isFlow) + } +} + +// Read implements (io.Reader).Read +func (n *MappingNode) Read(p []byte) (int, error) { + return readNode(p, n) +} + +// Type returns MappingType +func (n *MappingNode) Type() NodeType { return MappingType } + +// GetToken returns token instance +func (n *MappingNode) GetToken() *token.Token { + return n.Start +} + +// AddColumn add column number to child nodes recursively +func (n *MappingNode) AddColumn(col int) { + n.Start.AddColumn(col) + n.End.AddColumn(col) + for _, value := range n.Values { + value.AddColumn(col) + } +} + +func (n *MappingNode) flowStyleString(commentMode bool) string { + values := []string{} + for _, value := range n.Values { + values = append(values, strings.TrimLeft(value.String(), " ")) + } + mapText := fmt.Sprintf("{%s}", strings.Join(values, ", ")) + if commentMode && n.Comment != nil { + return addCommentString(mapText, n.Comment) + } + return mapText +} + +func (n *MappingNode) blockStyleString(commentMode bool) string { + values := []string{} + for _, value := range n.Values { + values = append(values, value.String()) + } + mapText := strings.Join(values, "\n") + if commentMode && n.Comment != nil { + value := values[0] + var spaceNum int + for i := 0; i < len(value); i++ { + if value[i] != ' ' { + break + } + spaceNum++ + } + comment := n.Comment.StringWithSpace(spaceNum) + return fmt.Sprintf("%s\n%s", comment, mapText) + } + return mapText +} + +// String mapping values to text +func (n *MappingNode) String() string { + if len(n.Values) == 0 { + if n.Comment != nil { + return addCommentString("{}", n.Comment) + } + return "{}" + } + + commentMode := true + if n.IsFlowStyle || len(n.Values) == 0 { + return n.flowStyleString(commentMode) + } + return n.blockStyleString(commentMode) +} + +// MapRange implements MapNode protocol +func (n *MappingNode) MapRange() *MapNodeIter { + return &MapNodeIter{ + idx: startRangeIndex, + values: n.Values, + } +} + +// MarshalYAML encodes to a YAML text +func (n *MappingNode) MarshalYAML() ([]byte, error) { + return []byte(n.String()), nil +} + +// MappingKeyNode type of tag node +type MappingKeyNode struct { + *BaseNode + Start *token.Token + Value Node +} + +// Read implements (io.Reader).Read +func (n *MappingKeyNode) Read(p []byte) (int, error) { + return readNode(p, n) +} + +// Type returns MappingKeyType +func (n *MappingKeyNode) Type() NodeType { return MappingKeyType } + +// GetToken returns token instance +func (n *MappingKeyNode) GetToken() *token.Token { + return n.Start +} + +// AddColumn add column number to child nodes recursively +func (n *MappingKeyNode) AddColumn(col int) { + n.Start.AddColumn(col) + if n.Value != nil { + n.Value.AddColumn(col) + } +} + +// String tag to text +func (n *MappingKeyNode) String() string { + return n.stringWithoutComment() +} + +func (n *MappingKeyNode) stringWithoutComment() string { + return fmt.Sprintf("%s %s", n.Start.Value, n.Value.String()) +} + +// MarshalYAML encodes to a YAML text +func (n *MappingKeyNode) MarshalYAML() ([]byte, error) { + return []byte(n.String()), nil +} + +// MappingValueNode type of mapping value +type MappingValueNode struct { + *BaseNode + Start *token.Token + Key MapKeyNode + Value Node + FootComment *CommentGroupNode +} + +// Replace replace value node. +func (n *MappingValueNode) Replace(value Node) error { + column := n.Value.GetToken().Position.Column - value.GetToken().Position.Column + value.AddColumn(column) + n.Value = value + return nil +} + +// Read implements (io.Reader).Read +func (n *MappingValueNode) Read(p []byte) (int, error) { + return readNode(p, n) +} + +// Type returns MappingValueType +func (n *MappingValueNode) Type() NodeType { return MappingValueType } + +// GetToken returns token instance +func (n *MappingValueNode) GetToken() *token.Token { + return n.Start +} + +// AddColumn add column number to child nodes recursively +func (n *MappingValueNode) AddColumn(col int) { + n.Start.AddColumn(col) + if n.Key != nil { + n.Key.AddColumn(col) + } + if n.Value != nil { + n.Value.AddColumn(col) + } +} + +// SetIsFlowStyle set value to IsFlowStyle field recursively. +func (n *MappingValueNode) SetIsFlowStyle(isFlow bool) { + switch value := n.Value.(type) { + case *MappingNode: + value.SetIsFlowStyle(isFlow) + case *MappingValueNode: + value.SetIsFlowStyle(isFlow) + case *SequenceNode: + value.SetIsFlowStyle(isFlow) + } +} + +// String mapping value to text +func (n *MappingValueNode) String() string { + var text string + if n.Comment != nil { + text = fmt.Sprintf( + "%s\n%s", + n.Comment.StringWithSpace(n.Key.GetToken().Position.Column-1), + n.toString(), + ) + } else { + text = n.toString() + } + if n.FootComment != nil { + text += fmt.Sprintf("\n%s", n.FootComment.StringWithSpace(n.Key.GetToken().Position.Column-1)) + } + return text +} + +func (n *MappingValueNode) toString() string { + space := strings.Repeat(" ", n.Key.GetToken().Position.Column-1) + keyIndentLevel := n.Key.GetToken().Position.IndentLevel + valueIndentLevel := n.Value.GetToken().Position.IndentLevel + keyComment := n.Key.GetComment() + if _, ok := n.Value.(ScalarNode); ok { + return fmt.Sprintf("%s%s: %s", space, n.Key.String(), n.Value.String()) + } else if keyIndentLevel < valueIndentLevel { + if keyComment != nil { + return fmt.Sprintf( + "%s%s: %s\n%s", + space, + n.Key.stringWithoutComment(), + keyComment.String(), + n.Value.String(), + ) + } + return fmt.Sprintf("%s%s:\n%s", space, n.Key.String(), n.Value.String()) + } else if m, ok := n.Value.(*MappingNode); ok && (m.IsFlowStyle || len(m.Values) == 0) { + return fmt.Sprintf("%s%s: %s", space, n.Key.String(), n.Value.String()) + } else if s, ok := n.Value.(*SequenceNode); ok && (s.IsFlowStyle || len(s.Values) == 0) { + return fmt.Sprintf("%s%s: %s", space, n.Key.String(), n.Value.String()) + } else if _, ok := n.Value.(*AnchorNode); ok { + return fmt.Sprintf("%s%s: %s", space, n.Key.String(), n.Value.String()) + } else if _, ok := n.Value.(*AliasNode); ok { + return fmt.Sprintf("%s%s: %s", space, n.Key.String(), n.Value.String()) + } + if keyComment != nil { + return fmt.Sprintf( + "%s%s: %s\n%s", + space, + n.Key.stringWithoutComment(), + keyComment.String(), + n.Value.String(), + ) + } + if m, ok := n.Value.(*MappingNode); ok && m.Comment != nil { + return fmt.Sprintf( + "%s%s: %s", + space, + n.Key.String(), + strings.TrimLeft(n.Value.String(), " "), + ) + } + return fmt.Sprintf("%s%s:\n%s", space, n.Key.String(), n.Value.String()) +} + +// MapRange implements MapNode protocol +func (n *MappingValueNode) MapRange() *MapNodeIter { + return &MapNodeIter{ + idx: startRangeIndex, + values: []*MappingValueNode{n}, + } +} + +// MarshalYAML encodes to a YAML text +func (n *MappingValueNode) MarshalYAML() ([]byte, error) { + return []byte(n.String()), nil +} + +// ArrayNode interface of SequenceNode +type ArrayNode interface { + ArrayRange() *ArrayNodeIter +} + +// ArrayNodeIter is an iterator for ranging over a ArrayNode +type ArrayNodeIter struct { + values []Node + idx int +} + +// Next advances the array iterator and reports whether there is another entry. +// It returns false when the iterator is exhausted. +func (m *ArrayNodeIter) Next() bool { + m.idx++ + next := m.idx < len(m.values) + return next +} + +// Value returns the value of the iterator's current array entry. +func (m *ArrayNodeIter) Value() Node { + return m.values[m.idx] +} + +// Len returns length of array +func (m *ArrayNodeIter) Len() int { + return len(m.values) +} + +// SequenceNode type of sequence node +type SequenceNode struct { + *BaseNode + Start *token.Token + End *token.Token + IsFlowStyle bool + Values []Node + ValueHeadComments []*CommentGroupNode + FootComment *CommentGroupNode +} + +// Replace replace value node. +func (n *SequenceNode) Replace(idx int, value Node) error { + if len(n.Values) <= idx { + return xerrors.Errorf( + "invalid index for sequence: sequence length is %d, but specified %d index", + len(n.Values), idx, + ) + } + column := n.Values[idx].GetToken().Position.Column - value.GetToken().Position.Column + value.AddColumn(column) + n.Values[idx] = value + return nil +} + +// Merge merge sequence value. +func (n *SequenceNode) Merge(target *SequenceNode) { + column := n.Start.Position.Column - target.Start.Position.Column + target.AddColumn(column) + n.Values = append(n.Values, target.Values...) +} + +// SetIsFlowStyle set value to IsFlowStyle field recursively. +func (n *SequenceNode) SetIsFlowStyle(isFlow bool) { + n.IsFlowStyle = isFlow + for _, value := range n.Values { + switch value := value.(type) { + case *MappingNode: + value.SetIsFlowStyle(isFlow) + case *MappingValueNode: + value.SetIsFlowStyle(isFlow) + case *SequenceNode: + value.SetIsFlowStyle(isFlow) + } + } +} + +// Read implements (io.Reader).Read +func (n *SequenceNode) Read(p []byte) (int, error) { + return readNode(p, n) +} + +// Type returns SequenceType +func (n *SequenceNode) Type() NodeType { return SequenceType } + +// GetToken returns token instance +func (n *SequenceNode) GetToken() *token.Token { + return n.Start +} + +// AddColumn add column number to child nodes recursively +func (n *SequenceNode) AddColumn(col int) { + n.Start.AddColumn(col) + n.End.AddColumn(col) + for _, value := range n.Values { + value.AddColumn(col) + } +} + +func (n *SequenceNode) flowStyleString() string { + values := []string{} + for _, value := range n.Values { + values = append(values, value.String()) + } + return fmt.Sprintf("[%s]", strings.Join(values, ", ")) +} + +func (n *SequenceNode) blockStyleString() string { + space := strings.Repeat(" ", n.Start.Position.Column-1) + values := []string{} + if n.Comment != nil { + values = append(values, n.Comment.StringWithSpace(n.Start.Position.Column-1)) + } + + for idx, value := range n.Values { + valueStr := value.String() + splittedValues := strings.Split(valueStr, "\n") + trimmedFirstValue := strings.TrimLeft(splittedValues[0], " ") + diffLength := len(splittedValues[0]) - len(trimmedFirstValue) + if len(splittedValues) > 1 && value.Type() == StringType || value.Type() == LiteralType { + // If multi-line string, the space characters for indent have already been added, so delete them. + prefix := space + " " + for i := 1; i < len(splittedValues); i++ { + splittedValues[i] = strings.TrimPrefix(splittedValues[i], prefix) + } + } + newValues := []string{trimmedFirstValue} + for i := 1; i < len(splittedValues); i++ { + if len(splittedValues[i]) <= diffLength { + // this line is \n or white space only + newValues = append(newValues, "") + continue + } + trimmed := splittedValues[i][diffLength:] + newValues = append(newValues, fmt.Sprintf("%s %s", space, trimmed)) + } + newValue := strings.Join(newValues, "\n") + if len(n.ValueHeadComments) == len(n.Values) && n.ValueHeadComments[idx] != nil { + values = append(values, n.ValueHeadComments[idx].StringWithSpace(n.Start.Position.Column-1)) + } + values = append(values, fmt.Sprintf("%s- %s", space, newValue)) + } + if n.FootComment != nil { + values = append(values, n.FootComment.StringWithSpace(n.Start.Position.Column-1)) + } + return strings.Join(values, "\n") +} + +// String sequence to text +func (n *SequenceNode) String() string { + if n.IsFlowStyle || len(n.Values) == 0 { + return n.flowStyleString() + } + return n.blockStyleString() +} + +// ArrayRange implements ArrayNode protocol +func (n *SequenceNode) ArrayRange() *ArrayNodeIter { + return &ArrayNodeIter{ + idx: startRangeIndex, + values: n.Values, + } +} + +// MarshalYAML encodes to a YAML text +func (n *SequenceNode) MarshalYAML() ([]byte, error) { + return []byte(n.String()), nil +} + +// AnchorNode type of anchor node +type AnchorNode struct { + *BaseNode + Start *token.Token + Name Node + Value Node +} + +func (n *AnchorNode) SetName(name string) error { + if n.Name == nil { + return ErrInvalidAnchorName + } + s, ok := n.Name.(*StringNode) + if !ok { + return ErrInvalidAnchorName + } + s.Value = name + return nil +} + +// Read implements (io.Reader).Read +func (n *AnchorNode) Read(p []byte) (int, error) { + return readNode(p, n) +} + +// Type returns AnchorType +func (n *AnchorNode) Type() NodeType { return AnchorType } + +// GetToken returns token instance +func (n *AnchorNode) GetToken() *token.Token { + return n.Start +} + +// AddColumn add column number to child nodes recursively +func (n *AnchorNode) AddColumn(col int) { + n.Start.AddColumn(col) + if n.Name != nil { + n.Name.AddColumn(col) + } + if n.Value != nil { + n.Value.AddColumn(col) + } +} + +// String anchor to text +func (n *AnchorNode) String() string { + value := n.Value.String() + if len(strings.Split(value, "\n")) > 1 { + return fmt.Sprintf("&%s\n%s", n.Name.String(), value) + } else if s, ok := n.Value.(*SequenceNode); ok && !s.IsFlowStyle { + return fmt.Sprintf("&%s\n%s", n.Name.String(), value) + } else if m, ok := n.Value.(*MappingNode); ok && !m.IsFlowStyle { + return fmt.Sprintf("&%s\n%s", n.Name.String(), value) + } + return fmt.Sprintf("&%s %s", n.Name.String(), value) +} + +// MarshalYAML encodes to a YAML text +func (n *AnchorNode) MarshalYAML() ([]byte, error) { + return []byte(n.String()), nil +} + +// AliasNode type of alias node +type AliasNode struct { + *BaseNode + Start *token.Token + Value Node +} + +func (n *AliasNode) SetName(name string) error { + if n.Value == nil { + return ErrInvalidAliasName + } + s, ok := n.Value.(*StringNode) + if !ok { + return ErrInvalidAliasName + } + s.Value = name + return nil +} + +// Read implements (io.Reader).Read +func (n *AliasNode) Read(p []byte) (int, error) { + return readNode(p, n) +} + +// Type returns AliasType +func (n *AliasNode) Type() NodeType { return AliasType } + +// GetToken returns token instance +func (n *AliasNode) GetToken() *token.Token { + return n.Start +} + +// AddColumn add column number to child nodes recursively +func (n *AliasNode) AddColumn(col int) { + n.Start.AddColumn(col) + if n.Value != nil { + n.Value.AddColumn(col) + } +} + +// String alias to text +func (n *AliasNode) String() string { + return fmt.Sprintf("*%s", n.Value.String()) +} + +// MarshalYAML encodes to a YAML text +func (n *AliasNode) MarshalYAML() ([]byte, error) { + return []byte(n.String()), nil +} + +// DirectiveNode type of directive node +type DirectiveNode struct { + *BaseNode + Start *token.Token + Value Node +} + +// Read implements (io.Reader).Read +func (n *DirectiveNode) Read(p []byte) (int, error) { + return readNode(p, n) +} + +// Type returns DirectiveType +func (n *DirectiveNode) Type() NodeType { return DirectiveType } + +// GetToken returns token instance +func (n *DirectiveNode) GetToken() *token.Token { + return n.Start +} + +// AddColumn add column number to child nodes recursively +func (n *DirectiveNode) AddColumn(col int) { + if n.Value != nil { + n.Value.AddColumn(col) + } +} + +// String directive to text +func (n *DirectiveNode) String() string { + return fmt.Sprintf("%s%s", n.Start.Value, n.Value.String()) +} + +// MarshalYAML encodes to a YAML text +func (n *DirectiveNode) MarshalYAML() ([]byte, error) { + return []byte(n.String()), nil +} + +// TagNode type of tag node +type TagNode struct { + *BaseNode + Start *token.Token + Value Node +} + +// Read implements (io.Reader).Read +func (n *TagNode) Read(p []byte) (int, error) { + return readNode(p, n) +} + +// Type returns TagType +func (n *TagNode) Type() NodeType { return TagType } + +// GetToken returns token instance +func (n *TagNode) GetToken() *token.Token { + return n.Start +} + +// AddColumn add column number to child nodes recursively +func (n *TagNode) AddColumn(col int) { + n.Start.AddColumn(col) + if n.Value != nil { + n.Value.AddColumn(col) + } +} + +// String tag to text +func (n *TagNode) String() string { + return fmt.Sprintf("%s %s", n.Start.Value, n.Value.String()) +} + +// MarshalYAML encodes to a YAML text +func (n *TagNode) MarshalYAML() ([]byte, error) { + return []byte(n.String()), nil +} + +// CommentNode type of comment node +type CommentNode struct { + *BaseNode + Token *token.Token +} + +// Read implements (io.Reader).Read +func (n *CommentNode) Read(p []byte) (int, error) { + return readNode(p, n) +} + +// Type returns TagType +func (n *CommentNode) Type() NodeType { return CommentType } + +// GetToken returns token instance +func (n *CommentNode) GetToken() *token.Token { return n.Token } + +// AddColumn add column number to child nodes recursively +func (n *CommentNode) AddColumn(col int) { + if n.Token == nil { + return + } + n.Token.AddColumn(col) +} + +// String comment to text +func (n *CommentNode) String() string { + return fmt.Sprintf("#%s", n.Token.Value) +} + +// MarshalYAML encodes to a YAML text +func (n *CommentNode) MarshalYAML() ([]byte, error) { + return []byte(n.String()), nil +} + +// CommentGroupNode type of comment node +type CommentGroupNode struct { + *BaseNode + Comments []*CommentNode +} + +// Read implements (io.Reader).Read +func (n *CommentGroupNode) Read(p []byte) (int, error) { + return readNode(p, n) +} + +// Type returns TagType +func (n *CommentGroupNode) Type() NodeType { return CommentType } + +// GetToken returns token instance +func (n *CommentGroupNode) GetToken() *token.Token { + if len(n.Comments) > 0 { + return n.Comments[0].Token + } + return nil +} + +// AddColumn add column number to child nodes recursively +func (n *CommentGroupNode) AddColumn(col int) { + for _, comment := range n.Comments { + comment.AddColumn(col) + } +} + +// String comment to text +func (n *CommentGroupNode) String() string { + values := []string{} + for _, comment := range n.Comments { + values = append(values, comment.String()) + } + return strings.Join(values, "\n") +} + +func (n *CommentGroupNode) StringWithSpace(col int) string { + values := []string{} + space := strings.Repeat(" ", col) + for _, comment := range n.Comments { + values = append(values, space+comment.String()) + } + return strings.Join(values, "\n") + +} + +// MarshalYAML encodes to a YAML text +func (n *CommentGroupNode) MarshalYAML() ([]byte, error) { + return []byte(n.String()), nil +} + +// Visitor has Visit method that is invokded for each node encountered by Walk. +// If the result visitor w is not nil, Walk visits each of the children of node with the visitor w, +// followed by a call of w.Visit(nil). +type Visitor interface { + Visit(Node) Visitor +} + +// Walk traverses an AST in depth-first order: It starts by calling v.Visit(node); node must not be nil. +// If the visitor w returned by v.Visit(node) is not nil, +// Walk is invoked recursively with visitor w for each of the non-nil children of node, +// followed by a call of w.Visit(nil). +func Walk(v Visitor, node Node) { + if v = v.Visit(node); v == nil { + return + } + + switch n := node.(type) { + case *CommentNode: + case *NullNode: + walkComment(v, n.BaseNode) + case *IntegerNode: + walkComment(v, n.BaseNode) + case *FloatNode: + walkComment(v, n.BaseNode) + case *StringNode: + walkComment(v, n.BaseNode) + case *MergeKeyNode: + walkComment(v, n.BaseNode) + case *BoolNode: + walkComment(v, n.BaseNode) + case *InfinityNode: + walkComment(v, n.BaseNode) + case *NanNode: + walkComment(v, n.BaseNode) + case *LiteralNode: + walkComment(v, n.BaseNode) + Walk(v, n.Value) + case *DirectiveNode: + walkComment(v, n.BaseNode) + Walk(v, n.Value) + case *TagNode: + walkComment(v, n.BaseNode) + Walk(v, n.Value) + case *DocumentNode: + walkComment(v, n.BaseNode) + Walk(v, n.Body) + case *MappingNode: + walkComment(v, n.BaseNode) + for _, value := range n.Values { + Walk(v, value) + } + case *MappingKeyNode: + walkComment(v, n.BaseNode) + Walk(v, n.Value) + case *MappingValueNode: + walkComment(v, n.BaseNode) + Walk(v, n.Key) + Walk(v, n.Value) + case *SequenceNode: + walkComment(v, n.BaseNode) + for _, value := range n.Values { + Walk(v, value) + } + case *AnchorNode: + walkComment(v, n.BaseNode) + Walk(v, n.Name) + Walk(v, n.Value) + case *AliasNode: + walkComment(v, n.BaseNode) + Walk(v, n.Value) + } +} + +func walkComment(v Visitor, base *BaseNode) { + if base == nil { + return + } + if base.Comment == nil { + return + } + Walk(v, base.Comment) +} + +type filterWalker struct { + typ NodeType + results []Node +} + +func (v *filterWalker) Visit(n Node) Visitor { + if v.typ == n.Type() { + v.results = append(v.results, n) + } + return v +} + +type parentFinder struct { + target Node +} + +func (f *parentFinder) walk(parent, node Node) Node { + if f.target == node { + return parent + } + switch n := node.(type) { + case *CommentNode: + return nil + case *NullNode: + return nil + case *IntegerNode: + return nil + case *FloatNode: + return nil + case *StringNode: + return nil + case *MergeKeyNode: + return nil + case *BoolNode: + return nil + case *InfinityNode: + return nil + case *NanNode: + return nil + case *LiteralNode: + return f.walk(node, n.Value) + case *DirectiveNode: + return f.walk(node, n.Value) + case *TagNode: + return f.walk(node, n.Value) + case *DocumentNode: + return f.walk(node, n.Body) + case *MappingNode: + for _, value := range n.Values { + if found := f.walk(node, value); found != nil { + return found + } + } + case *MappingKeyNode: + return f.walk(node, n.Value) + case *MappingValueNode: + if found := f.walk(node, n.Key); found != nil { + return found + } + return f.walk(node, n.Value) + case *SequenceNode: + for _, value := range n.Values { + if found := f.walk(node, value); found != nil { + return found + } + } + case *AnchorNode: + if found := f.walk(node, n.Name); found != nil { + return found + } + return f.walk(node, n.Value) + case *AliasNode: + return f.walk(node, n.Value) + } + return nil +} + +// Parent get parent node from child node. +func Parent(root, child Node) Node { + finder := &parentFinder{target: child} + return finder.walk(root, root) +} + +// Filter returns a list of nodes that match the given type. +func Filter(typ NodeType, node Node) []Node { + walker := &filterWalker{typ: typ} + Walk(walker, node) + return walker.results +} + +// FilterFile returns a list of nodes that match the given type. +func FilterFile(typ NodeType, file *File) []Node { + results := []Node{} + for _, doc := range file.Docs { + walker := &filterWalker{typ: typ} + Walk(walker, doc) + results = append(results, walker.results...) + } + return results +} + +type ErrInvalidMergeType struct { + dst Node + src Node +} + +func (e *ErrInvalidMergeType) Error() string { + return fmt.Sprintf("cannot merge %s into %s", e.src.Type(), e.dst.Type()) +} + +// Merge merge document, map, sequence node. +func Merge(dst Node, src Node) error { + if doc, ok := src.(*DocumentNode); ok { + src = doc.Body + } + err := &ErrInvalidMergeType{dst: dst, src: src} + switch dst.Type() { + case DocumentType: + node := dst.(*DocumentNode) + return Merge(node.Body, src) + case MappingType: + node := dst.(*MappingNode) + target, ok := src.(*MappingNode) + if !ok { + return err + } + node.Merge(target) + return nil + case SequenceType: + node := dst.(*SequenceNode) + target, ok := src.(*SequenceNode) + if !ok { + return err + } + node.Merge(target) + return nil + } + return err +} diff --git a/vendor/github.com/goccy/go-yaml/decode.go b/vendor/github.com/goccy/go-yaml/decode.go new file mode 100644 index 00000000..72af5e22 --- /dev/null +++ b/vendor/github.com/goccy/go-yaml/decode.go @@ -0,0 +1,1796 @@ +package yaml + +import ( + "bytes" + "context" + "encoding" + "encoding/base64" + "fmt" + "io" + "math" + "os" + "path/filepath" + "reflect" + "sort" + "strconv" + "time" + + "golang.org/x/xerrors" + + "github.com/goccy/go-yaml/ast" + "github.com/goccy/go-yaml/internal/errors" + "github.com/goccy/go-yaml/parser" + "github.com/goccy/go-yaml/token" +) + +// Decoder reads and decodes YAML values from an input stream. +type Decoder struct { + reader io.Reader + referenceReaders []io.Reader + anchorNodeMap map[string]ast.Node + anchorValueMap map[string]reflect.Value + customUnmarshalerMap map[reflect.Type]func(interface{}, []byte) error + toCommentMap CommentMap + opts []DecodeOption + referenceFiles []string + referenceDirs []string + isRecursiveDir bool + isResolvedReference bool + validator StructValidator + disallowUnknownField bool + disallowDuplicateKey bool + useOrderedMap bool + useJSONUnmarshaler bool + parsedFile *ast.File + streamIndex int +} + +// NewDecoder returns a new decoder that reads from r. +func NewDecoder(r io.Reader, opts ...DecodeOption) *Decoder { + return &Decoder{ + reader: r, + anchorNodeMap: map[string]ast.Node{}, + anchorValueMap: map[string]reflect.Value{}, + customUnmarshalerMap: map[reflect.Type]func(interface{}, []byte) error{}, + opts: opts, + referenceReaders: []io.Reader{}, + referenceFiles: []string{}, + referenceDirs: []string{}, + isRecursiveDir: false, + isResolvedReference: false, + disallowUnknownField: false, + disallowDuplicateKey: false, + useOrderedMap: false, + } +} + +func (d *Decoder) castToFloat(v interface{}) interface{} { + switch vv := v.(type) { + case int: + return float64(vv) + case int8: + return float64(vv) + case int16: + return float64(vv) + case int32: + return float64(vv) + case int64: + return float64(vv) + case uint: + return float64(vv) + case uint8: + return float64(vv) + case uint16: + return float64(vv) + case uint32: + return float64(vv) + case uint64: + return float64(vv) + case float32: + return float64(vv) + case float64: + return vv + case string: + // if error occurred, return zero value + f, _ := strconv.ParseFloat(vv, 64) + return f + } + return 0 +} + +func (d *Decoder) mergeValueNode(value ast.Node) ast.Node { + if value.Type() == ast.AliasType { + aliasNode := value.(*ast.AliasNode) + aliasName := aliasNode.Value.GetToken().Value + return d.anchorNodeMap[aliasName] + } + return value +} + +func (d *Decoder) mapKeyNodeToString(node ast.MapKeyNode) string { + key := d.nodeToValue(node) + if key == nil { + return "null" + } + if k, ok := key.(string); ok { + return k + } + return fmt.Sprint(key) +} + +func (d *Decoder) setToMapValue(node ast.Node, m map[string]interface{}) { + d.setPathToCommentMap(node) + switch n := node.(type) { + case *ast.MappingValueNode: + if n.Key.Type() == ast.MergeKeyType { + d.setToMapValue(d.mergeValueNode(n.Value), m) + } else { + key := d.mapKeyNodeToString(n.Key) + m[key] = d.nodeToValue(n.Value) + } + case *ast.MappingNode: + for _, value := range n.Values { + d.setToMapValue(value, m) + } + case *ast.AnchorNode: + anchorName := n.Name.GetToken().Value + d.anchorNodeMap[anchorName] = n.Value + } +} + +func (d *Decoder) setToOrderedMapValue(node ast.Node, m *MapSlice) { + switch n := node.(type) { + case *ast.MappingValueNode: + if n.Key.Type() == ast.MergeKeyType { + d.setToOrderedMapValue(d.mergeValueNode(n.Value), m) + } else { + key := d.mapKeyNodeToString(n.Key) + *m = append(*m, MapItem{Key: key, Value: d.nodeToValue(n.Value)}) + } + case *ast.MappingNode: + for _, value := range n.Values { + d.setToOrderedMapValue(value, m) + } + } +} + +func (d *Decoder) setPathToCommentMap(node ast.Node) { + if d.toCommentMap == nil { + return + } + d.addHeadOrLineCommentToMap(node) + d.addFootCommentToMap(node) +} + +func (d *Decoder) addHeadOrLineCommentToMap(node ast.Node) { + sequence, ok := node.(*ast.SequenceNode) + if ok { + d.addSequenceNodeCommentToMap(sequence) + return + } + commentGroup := node.GetComment() + if commentGroup == nil { + return + } + texts := []string{} + targetLine := node.GetToken().Position.Line + minCommentLine := math.MaxInt + for _, comment := range commentGroup.Comments { + if minCommentLine > comment.Token.Position.Line { + minCommentLine = comment.Token.Position.Line + } + texts = append(texts, comment.Token.Value) + } + if len(texts) == 0 { + return + } + commentPath := node.GetPath() + if minCommentLine < targetLine { + d.addCommentToMap(commentPath, HeadComment(texts...)) + } else { + d.addCommentToMap(commentPath, LineComment(texts[0])) + } +} + +func (d *Decoder) addSequenceNodeCommentToMap(node *ast.SequenceNode) { + if len(node.ValueHeadComments) != 0 { + for idx, headComment := range node.ValueHeadComments { + if headComment == nil { + continue + } + texts := make([]string, 0, len(headComment.Comments)) + for _, comment := range headComment.Comments { + texts = append(texts, comment.Token.Value) + } + if len(texts) != 0 { + d.addCommentToMap(node.Values[idx].GetPath(), HeadComment(texts...)) + } + } + } + firstElemHeadComment := node.GetComment() + if firstElemHeadComment != nil { + texts := make([]string, 0, len(firstElemHeadComment.Comments)) + for _, comment := range firstElemHeadComment.Comments { + texts = append(texts, comment.Token.Value) + } + if len(texts) != 0 { + d.addCommentToMap(node.Values[0].GetPath(), HeadComment(texts...)) + } + } +} + +func (d *Decoder) addFootCommentToMap(node ast.Node) { + var ( + footComment *ast.CommentGroupNode + footCommentPath string = node.GetPath() + ) + switch n := node.(type) { + case *ast.SequenceNode: + if len(n.Values) != 0 { + footCommentPath = n.Values[len(n.Values)-1].GetPath() + } + footComment = n.FootComment + case *ast.MappingNode: + footComment = n.FootComment + case *ast.MappingValueNode: + footComment = n.FootComment + } + if footComment == nil { + return + } + var texts []string + for _, comment := range footComment.Comments { + texts = append(texts, comment.Token.Value) + } + if len(texts) != 0 { + d.addCommentToMap(footCommentPath, FootComment(texts...)) + } +} + +func (d *Decoder) addCommentToMap(path string, comment *Comment) { + for _, c := range d.toCommentMap[path] { + if c.Position == comment.Position { + // already added same comment + return + } + } + d.toCommentMap[path] = append(d.toCommentMap[path], comment) + sort.Slice(d.toCommentMap[path], func(i, j int) bool { + return d.toCommentMap[path][i].Position < d.toCommentMap[path][j].Position + }) +} + +func (d *Decoder) nodeToValue(node ast.Node) interface{} { + d.setPathToCommentMap(node) + switch n := node.(type) { + case *ast.NullNode: + return nil + case *ast.StringNode: + return n.GetValue() + case *ast.IntegerNode: + return n.GetValue() + case *ast.FloatNode: + return n.GetValue() + case *ast.BoolNode: + return n.GetValue() + case *ast.InfinityNode: + return n.GetValue() + case *ast.NanNode: + return n.GetValue() + case *ast.TagNode: + switch token.ReservedTagKeyword(n.Start.Value) { + case token.TimestampTag: + t, _ := d.castToTime(n.Value) + return t + case token.IntegerTag: + i, _ := strconv.Atoi(fmt.Sprint(d.nodeToValue(n.Value))) + return i + case token.FloatTag: + return d.castToFloat(d.nodeToValue(n.Value)) + case token.NullTag: + return nil + case token.BinaryTag: + b, _ := base64.StdEncoding.DecodeString(d.nodeToValue(n.Value).(string)) + return b + case token.StringTag: + return d.nodeToValue(n.Value) + case token.MappingTag: + return d.nodeToValue(n.Value) + } + case *ast.AnchorNode: + anchorName := n.Name.GetToken().Value + anchorValue := d.nodeToValue(n.Value) + d.anchorNodeMap[anchorName] = n.Value + return anchorValue + case *ast.AliasNode: + aliasName := n.Value.GetToken().Value + node := d.anchorNodeMap[aliasName] + return d.nodeToValue(node) + case *ast.LiteralNode: + return n.Value.GetValue() + case *ast.MappingKeyNode: + return d.nodeToValue(n.Value) + case *ast.MappingValueNode: + if n.Key.Type() == ast.MergeKeyType { + value := d.mergeValueNode(n.Value) + if d.useOrderedMap { + m := MapSlice{} + d.setToOrderedMapValue(value, &m) + return m + } + m := map[string]interface{}{} + d.setToMapValue(value, m) + return m + } + key := d.mapKeyNodeToString(n.Key) + if d.useOrderedMap { + return MapSlice{{Key: key, Value: d.nodeToValue(n.Value)}} + } + return map[string]interface{}{ + key: d.nodeToValue(n.Value), + } + case *ast.MappingNode: + if d.useOrderedMap { + m := make(MapSlice, 0, len(n.Values)) + for _, value := range n.Values { + d.setToOrderedMapValue(value, &m) + } + return m + } + m := make(map[string]interface{}, len(n.Values)) + for _, value := range n.Values { + d.setToMapValue(value, m) + } + return m + case *ast.SequenceNode: + v := make([]interface{}, 0, len(n.Values)) + for _, value := range n.Values { + v = append(v, d.nodeToValue(value)) + } + return v + } + return nil +} + +func (d *Decoder) resolveAlias(node ast.Node) (ast.Node, error) { + switch n := node.(type) { + case *ast.MappingNode: + for idx, v := range n.Values { + value, err := d.resolveAlias(v) + if err != nil { + return nil, err + } + n.Values[idx] = value.(*ast.MappingValueNode) + } + case *ast.TagNode: + value, err := d.resolveAlias(n.Value) + if err != nil { + return nil, err + } + n.Value = value + case *ast.MappingKeyNode: + value, err := d.resolveAlias(n.Value) + if err != nil { + return nil, err + } + n.Value = value + case *ast.MappingValueNode: + if n.Key.Type() == ast.MergeKeyType && n.Value.Type() == ast.AliasType { + value, err := d.resolveAlias(n.Value) + if err != nil { + return nil, err + } + keyColumn := n.Key.GetToken().Position.Column + requiredColumn := keyColumn + 2 + value.AddColumn(requiredColumn) + n.Value = value + } else { + key, err := d.resolveAlias(n.Key) + if err != nil { + return nil, err + } + n.Key = key.(ast.MapKeyNode) + value, err := d.resolveAlias(n.Value) + if err != nil { + return nil, err + } + n.Value = value + } + case *ast.SequenceNode: + for idx, v := range n.Values { + value, err := d.resolveAlias(v) + if err != nil { + return nil, err + } + n.Values[idx] = value + } + case *ast.AliasNode: + aliasName := n.Value.GetToken().Value + node := d.anchorNodeMap[aliasName] + if node == nil { + return nil, xerrors.Errorf("cannot find anchor by alias name %s", aliasName) + } + return d.resolveAlias(node) + } + return node, nil +} + +func (d *Decoder) getMapNode(node ast.Node) (ast.MapNode, error) { + if _, ok := node.(*ast.NullNode); ok { + return nil, nil + } + if anchor, ok := node.(*ast.AnchorNode); ok { + mapNode, ok := anchor.Value.(ast.MapNode) + if ok { + return mapNode, nil + } + return nil, errUnexpectedNodeType(anchor.Value.Type(), ast.MappingType, node.GetToken()) + } + if alias, ok := node.(*ast.AliasNode); ok { + aliasName := alias.Value.GetToken().Value + node := d.anchorNodeMap[aliasName] + if node == nil { + return nil, xerrors.Errorf("cannot find anchor by alias name %s", aliasName) + } + mapNode, ok := node.(ast.MapNode) + if ok { + return mapNode, nil + } + return nil, errUnexpectedNodeType(node.Type(), ast.MappingType, node.GetToken()) + } + mapNode, ok := node.(ast.MapNode) + if !ok { + return nil, errUnexpectedNodeType(node.Type(), ast.MappingType, node.GetToken()) + } + return mapNode, nil +} + +func (d *Decoder) getArrayNode(node ast.Node) (ast.ArrayNode, error) { + if _, ok := node.(*ast.NullNode); ok { + return nil, nil + } + if anchor, ok := node.(*ast.AnchorNode); ok { + arrayNode, ok := anchor.Value.(ast.ArrayNode) + if ok { + return arrayNode, nil + } + + return nil, errUnexpectedNodeType(anchor.Value.Type(), ast.SequenceType, node.GetToken()) + } + if alias, ok := node.(*ast.AliasNode); ok { + aliasName := alias.Value.GetToken().Value + node := d.anchorNodeMap[aliasName] + if node == nil { + return nil, xerrors.Errorf("cannot find anchor by alias name %s", aliasName) + } + arrayNode, ok := node.(ast.ArrayNode) + if ok { + return arrayNode, nil + } + return nil, errUnexpectedNodeType(node.Type(), ast.SequenceType, node.GetToken()) + } + arrayNode, ok := node.(ast.ArrayNode) + if !ok { + return nil, errUnexpectedNodeType(node.Type(), ast.SequenceType, node.GetToken()) + } + return arrayNode, nil +} + +func (d *Decoder) fileToNode(f *ast.File) ast.Node { + for _, doc := range f.Docs { + if v := d.nodeToValue(doc.Body); v != nil { + return doc.Body + } + } + return nil +} + +func (d *Decoder) convertValue(v reflect.Value, typ reflect.Type, src ast.Node) (reflect.Value, error) { + if typ.Kind() != reflect.String { + if !v.Type().ConvertibleTo(typ) { + + // Special case for "strings -> floats" aka scientific notation + // If the destination type is a float and the source type is a string, check if we can + // use strconv.ParseFloat to convert the string to a float. + if (typ.Kind() == reflect.Float32 || typ.Kind() == reflect.Float64) && + v.Type().Kind() == reflect.String { + if f, err := strconv.ParseFloat(v.String(), 64); err == nil { + if typ.Kind() == reflect.Float32 { + return reflect.ValueOf(float32(f)), nil + } else if typ.Kind() == reflect.Float64 { + return reflect.ValueOf(f), nil + } + // else, fall through to the error below + } + } + return reflect.Zero(typ), errTypeMismatch(typ, v.Type(), src.GetToken()) + } + return v.Convert(typ), nil + } + // cast value to string + switch v.Type().Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return reflect.ValueOf(fmt.Sprint(v.Int())), nil + case reflect.Float32, reflect.Float64: + return reflect.ValueOf(fmt.Sprint(v.Float())), nil + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return reflect.ValueOf(fmt.Sprint(v.Uint())), nil + case reflect.Bool: + return reflect.ValueOf(fmt.Sprint(v.Bool())), nil + } + if !v.Type().ConvertibleTo(typ) { + return reflect.Zero(typ), errTypeMismatch(typ, v.Type(), src.GetToken()) + } + return v.Convert(typ), nil +} + +type overflowError struct { + dstType reflect.Type + srcNum string +} + +func (e *overflowError) Error() string { + return fmt.Sprintf("cannot unmarshal %s into Go value of type %s ( overflow )", e.srcNum, e.dstType) +} + +func errOverflow(dstType reflect.Type, num string) *overflowError { + return &overflowError{dstType: dstType, srcNum: num} +} + +func errTypeMismatch(dstType, srcType reflect.Type, token *token.Token) *errors.TypeError { + return &errors.TypeError{DstType: dstType, SrcType: srcType, Token: token} +} + +type unknownFieldError struct { + err error +} + +func (e *unknownFieldError) Error() string { + return e.err.Error() +} + +func errUnknownField(msg string, tk *token.Token) *unknownFieldError { + return &unknownFieldError{err: errors.ErrSyntax(msg, tk)} +} + +func errUnexpectedNodeType(actual, expected ast.NodeType, tk *token.Token) error { + return errors.ErrSyntax(fmt.Sprintf("%s was used where %s is expected", actual.YAMLName(), expected.YAMLName()), tk) +} + +type duplicateKeyError struct { + err error +} + +func (e *duplicateKeyError) Error() string { + return e.err.Error() +} + +func errDuplicateKey(msg string, tk *token.Token) *duplicateKeyError { + return &duplicateKeyError{err: errors.ErrSyntax(msg, tk)} +} + +func (d *Decoder) deleteStructKeys(structType reflect.Type, unknownFields map[string]ast.Node) error { + if structType.Kind() == reflect.Ptr { + structType = structType.Elem() + } + structFieldMap, err := structFieldMap(structType) + if err != nil { + return errors.Wrapf(err, "failed to create struct field map") + } + + for j := 0; j < structType.NumField(); j++ { + field := structType.Field(j) + if isIgnoredStructField(field) { + continue + } + + structField, exists := structFieldMap[field.Name] + if !exists { + continue + } + + if structField.IsInline { + d.deleteStructKeys(field.Type, unknownFields) + } else { + delete(unknownFields, structField.RenderName) + } + } + return nil +} + +func (d *Decoder) lastNode(node ast.Node) ast.Node { + switch n := node.(type) { + case *ast.MappingNode: + if len(n.Values) > 0 { + return d.lastNode(n.Values[len(n.Values)-1]) + } + case *ast.MappingValueNode: + return d.lastNode(n.Value) + case *ast.SequenceNode: + if len(n.Values) > 0 { + return d.lastNode(n.Values[len(n.Values)-1]) + } + } + return node +} + +func (d *Decoder) unmarshalableDocument(node ast.Node) ([]byte, error) { + var err error + node, err = d.resolveAlias(node) + if err != nil { + return nil, err + } + doc := node.String() + last := d.lastNode(node) + if last != nil && last.Type() == ast.LiteralType { + doc += "\n" + } + return []byte(doc), nil +} + +func (d *Decoder) unmarshalableText(node ast.Node) ([]byte, bool, error) { + var err error + node, err = d.resolveAlias(node) + if err != nil { + return nil, false, err + } + if node.Type() == ast.AnchorType { + node = node.(*ast.AnchorNode).Value + } + switch n := node.(type) { + case *ast.StringNode: + return []byte(n.Value), true, nil + case *ast.LiteralNode: + return []byte(n.Value.GetToken().Value), true, nil + default: + scalar, ok := n.(ast.ScalarNode) + if ok { + return []byte(fmt.Sprint(scalar.GetValue())), true, nil + } + } + return nil, false, nil +} + +type jsonUnmarshaler interface { + UnmarshalJSON([]byte) error +} + +func (d *Decoder) existsTypeInCustomUnmarshalerMap(t reflect.Type) bool { + if _, exists := d.customUnmarshalerMap[t]; exists { + return true + } + + globalCustomUnmarshalerMu.Lock() + defer globalCustomUnmarshalerMu.Unlock() + if _, exists := globalCustomUnmarshalerMap[t]; exists { + return true + } + return false +} + +func (d *Decoder) unmarshalerFromCustomUnmarshalerMap(t reflect.Type) (func(interface{}, []byte) error, bool) { + if unmarshaler, exists := d.customUnmarshalerMap[t]; exists { + return unmarshaler, exists + } + + globalCustomUnmarshalerMu.Lock() + defer globalCustomUnmarshalerMu.Unlock() + if unmarshaler, exists := globalCustomUnmarshalerMap[t]; exists { + return unmarshaler, exists + } + return nil, false +} + +func (d *Decoder) canDecodeByUnmarshaler(dst reflect.Value) bool { + ptrValue := dst.Addr() + if d.existsTypeInCustomUnmarshalerMap(ptrValue.Type()) { + return true + } + iface := ptrValue.Interface() + switch iface.(type) { + case BytesUnmarshalerContext: + return true + case BytesUnmarshaler: + return true + case InterfaceUnmarshalerContext: + return true + case InterfaceUnmarshaler: + return true + case *time.Time: + return true + case *time.Duration: + return true + case encoding.TextUnmarshaler: + return true + case jsonUnmarshaler: + return d.useJSONUnmarshaler + } + return false +} + +func (d *Decoder) decodeByUnmarshaler(ctx context.Context, dst reflect.Value, src ast.Node) error { + ptrValue := dst.Addr() + if unmarshaler, exists := d.unmarshalerFromCustomUnmarshalerMap(ptrValue.Type()); exists { + b, err := d.unmarshalableDocument(src) + if err != nil { + return errors.Wrapf(err, "failed to UnmarshalYAML") + } + if err := unmarshaler(ptrValue.Interface(), b); err != nil { + return errors.Wrapf(err, "failed to UnmarshalYAML") + } + return nil + } + iface := ptrValue.Interface() + + if unmarshaler, ok := iface.(BytesUnmarshalerContext); ok { + b, err := d.unmarshalableDocument(src) + if err != nil { + return errors.Wrapf(err, "failed to UnmarshalYAML") + } + if err := unmarshaler.UnmarshalYAML(ctx, b); err != nil { + return errors.Wrapf(err, "failed to UnmarshalYAML") + } + return nil + } + + if unmarshaler, ok := iface.(BytesUnmarshaler); ok { + b, err := d.unmarshalableDocument(src) + if err != nil { + return errors.Wrapf(err, "failed to UnmarshalYAML") + } + if err := unmarshaler.UnmarshalYAML(b); err != nil { + return errors.Wrapf(err, "failed to UnmarshalYAML") + } + return nil + } + + if unmarshaler, ok := iface.(InterfaceUnmarshalerContext); ok { + if err := unmarshaler.UnmarshalYAML(ctx, func(v interface{}) error { + rv := reflect.ValueOf(v) + if rv.Type().Kind() != reflect.Ptr { + return errors.ErrDecodeRequiredPointerType + } + if err := d.decodeValue(ctx, rv.Elem(), src); err != nil { + return errors.Wrapf(err, "failed to decode value") + } + return nil + }); err != nil { + return errors.Wrapf(err, "failed to UnmarshalYAML") + } + return nil + } + + if unmarshaler, ok := iface.(InterfaceUnmarshaler); ok { + if err := unmarshaler.UnmarshalYAML(func(v interface{}) error { + rv := reflect.ValueOf(v) + if rv.Type().Kind() != reflect.Ptr { + return errors.ErrDecodeRequiredPointerType + } + if err := d.decodeValue(ctx, rv.Elem(), src); err != nil { + return errors.Wrapf(err, "failed to decode value") + } + return nil + }); err != nil { + return errors.Wrapf(err, "failed to UnmarshalYAML") + } + return nil + } + + if _, ok := iface.(*time.Time); ok { + return d.decodeTime(ctx, dst, src) + } + + if _, ok := iface.(*time.Duration); ok { + return d.decodeDuration(ctx, dst, src) + } + + if unmarshaler, isText := iface.(encoding.TextUnmarshaler); isText { + b, ok, err := d.unmarshalableText(src) + if err != nil { + return errors.Wrapf(err, "failed to UnmarshalText") + } + if ok { + if err := unmarshaler.UnmarshalText(b); err != nil { + return errors.Wrapf(err, "failed to UnmarshalText") + } + return nil + } + } + + if d.useJSONUnmarshaler { + if unmarshaler, ok := iface.(jsonUnmarshaler); ok { + b, err := d.unmarshalableDocument(src) + if err != nil { + return errors.Wrapf(err, "failed to UnmarshalJSON") + } + jsonBytes, err := YAMLToJSON(b) + if err != nil { + return errors.Wrapf(err, "failed to convert yaml to json") + } + jsonBytes = bytes.TrimRight(jsonBytes, "\n") + if err := unmarshaler.UnmarshalJSON(jsonBytes); err != nil { + return errors.Wrapf(err, "failed to UnmarshalJSON") + } + return nil + } + } + + return xerrors.Errorf("does not implemented Unmarshaler") +} + +var ( + astNodeType = reflect.TypeOf((*ast.Node)(nil)).Elem() +) + +func (d *Decoder) decodeValue(ctx context.Context, dst reflect.Value, src ast.Node) error { + if src.Type() == ast.AnchorType { + anchorName := src.(*ast.AnchorNode).Name.GetToken().Value + if _, exists := d.anchorValueMap[anchorName]; !exists { + d.anchorValueMap[anchorName] = dst + } + } + if d.canDecodeByUnmarshaler(dst) { + if err := d.decodeByUnmarshaler(ctx, dst, src); err != nil { + return errors.Wrapf(err, "failed to decode by unmarshaler") + } + return nil + } + valueType := dst.Type() + switch valueType.Kind() { + case reflect.Ptr: + if dst.IsNil() { + return nil + } + if src.Type() == ast.NullType { + // set nil value to pointer + dst.Set(reflect.Zero(valueType)) + return nil + } + v := d.createDecodableValue(dst.Type()) + if err := d.decodeValue(ctx, v, src); err != nil { + return errors.Wrapf(err, "failed to decode ptr value") + } + dst.Set(d.castToAssignableValue(v, dst.Type())) + case reflect.Interface: + if dst.Type() == astNodeType { + dst.Set(reflect.ValueOf(src)) + return nil + } + v := reflect.ValueOf(d.nodeToValue(src)) + if v.IsValid() { + dst.Set(v) + } + case reflect.Map: + return d.decodeMap(ctx, dst, src) + case reflect.Array: + return d.decodeArray(ctx, dst, src) + case reflect.Slice: + if mapSlice, ok := dst.Addr().Interface().(*MapSlice); ok { + return d.decodeMapSlice(ctx, mapSlice, src) + } + return d.decodeSlice(ctx, dst, src) + case reflect.Struct: + if mapItem, ok := dst.Addr().Interface().(*MapItem); ok { + return d.decodeMapItem(ctx, mapItem, src) + } + return d.decodeStruct(ctx, dst, src) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + v := d.nodeToValue(src) + switch vv := v.(type) { + case int64: + if !dst.OverflowInt(vv) { + dst.SetInt(vv) + return nil + } + case uint64: + if vv <= math.MaxInt64 && !dst.OverflowInt(int64(vv)) { + dst.SetInt(int64(vv)) + return nil + } + case float64: + if vv <= math.MaxInt64 && !dst.OverflowInt(int64(vv)) { + dst.SetInt(int64(vv)) + return nil + } + case string: // handle scientific notation + if i, err := strconv.ParseFloat(vv, 64); err == nil { + if 0 <= i && i <= math.MaxUint64 && !dst.OverflowInt(int64(i)) { + dst.SetInt(int64(i)) + return nil + } + } else { // couldn't be parsed as float + return errTypeMismatch(valueType, reflect.TypeOf(v), src.GetToken()) + } + default: + return errTypeMismatch(valueType, reflect.TypeOf(v), src.GetToken()) + } + return errOverflow(valueType, fmt.Sprint(v)) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + v := d.nodeToValue(src) + switch vv := v.(type) { + case int64: + if 0 <= vv && !dst.OverflowUint(uint64(vv)) { + dst.SetUint(uint64(vv)) + return nil + } + case uint64: + if !dst.OverflowUint(vv) { + dst.SetUint(vv) + return nil + } + case float64: + if 0 <= vv && vv <= math.MaxUint64 && !dst.OverflowUint(uint64(vv)) { + dst.SetUint(uint64(vv)) + return nil + } + case string: // handle scientific notation + if i, err := strconv.ParseFloat(vv, 64); err == nil { + if 0 <= i && i <= math.MaxUint64 && !dst.OverflowUint(uint64(i)) { + dst.SetUint(uint64(i)) + return nil + } + } else { // couldn't be parsed as float + return errTypeMismatch(valueType, reflect.TypeOf(v), src.GetToken()) + } + + default: + return errTypeMismatch(valueType, reflect.TypeOf(v), src.GetToken()) + } + return errOverflow(valueType, fmt.Sprint(v)) + } + v := reflect.ValueOf(d.nodeToValue(src)) + if v.IsValid() { + convertedValue, err := d.convertValue(v, dst.Type(), src) + if err != nil { + return errors.Wrapf(err, "failed to convert value") + } + dst.Set(convertedValue) + } + return nil +} + +func (d *Decoder) createDecodableValue(typ reflect.Type) reflect.Value { + for { + if typ.Kind() == reflect.Ptr { + typ = typ.Elem() + continue + } + break + } + return reflect.New(typ).Elem() +} + +func (d *Decoder) castToAssignableValue(value reflect.Value, target reflect.Type) reflect.Value { + if target.Kind() != reflect.Ptr { + return value + } + maxTryCount := 5 + tryCount := 0 + for { + if tryCount > maxTryCount { + return value + } + if value.Type().AssignableTo(target) { + break + } + value = value.Addr() + tryCount++ + } + return value +} + +func (d *Decoder) createDecodedNewValue( + ctx context.Context, typ reflect.Type, defaultVal reflect.Value, node ast.Node, +) (reflect.Value, error) { + if node.Type() == ast.AliasType { + aliasName := node.(*ast.AliasNode).Value.GetToken().Value + newValue := d.anchorValueMap[aliasName] + if newValue.IsValid() { + return newValue, nil + } + } + if node.Type() == ast.NullType { + return reflect.Zero(typ), nil + } + newValue := d.createDecodableValue(typ) + for defaultVal.Kind() == reflect.Ptr { + defaultVal = defaultVal.Elem() + } + if defaultVal.IsValid() && defaultVal.Type().AssignableTo(newValue.Type()) { + newValue.Set(defaultVal) + } + if err := d.decodeValue(ctx, newValue, node); err != nil { + return newValue, errors.Wrapf(err, "failed to decode value") + } + return newValue, nil +} + +func (d *Decoder) keyToNodeMap(node ast.Node, ignoreMergeKey bool, getKeyOrValueNode func(*ast.MapNodeIter) ast.Node) (map[string]ast.Node, error) { + mapNode, err := d.getMapNode(node) + if err != nil { + return nil, errors.Wrapf(err, "failed to get map node") + } + keyMap := map[string]struct{}{} + keyToNodeMap := map[string]ast.Node{} + if mapNode == nil { + return keyToNodeMap, nil + } + mapIter := mapNode.MapRange() + for mapIter.Next() { + keyNode := mapIter.Key() + if keyNode.Type() == ast.MergeKeyType { + if ignoreMergeKey { + continue + } + mergeMap, err := d.keyToNodeMap(mapIter.Value(), ignoreMergeKey, getKeyOrValueNode) + if err != nil { + return nil, errors.Wrapf(err, "failed to get keyToNodeMap by MergeKey node") + } + for k, v := range mergeMap { + if err := d.validateDuplicateKey(keyMap, k, v); err != nil { + return nil, errors.Wrapf(err, "invalid struct key") + } + keyToNodeMap[k] = v + } + } else { + key, ok := d.nodeToValue(keyNode).(string) + if !ok { + return nil, errors.Wrapf(err, "failed to decode map key") + } + if err := d.validateDuplicateKey(keyMap, key, keyNode); err != nil { + return nil, errors.Wrapf(err, "invalid struct key") + } + keyToNodeMap[key] = getKeyOrValueNode(mapIter) + } + } + return keyToNodeMap, nil +} + +func (d *Decoder) keyToKeyNodeMap(node ast.Node, ignoreMergeKey bool) (map[string]ast.Node, error) { + m, err := d.keyToNodeMap(node, ignoreMergeKey, func(nodeMap *ast.MapNodeIter) ast.Node { return nodeMap.Key() }) + if err != nil { + return nil, errors.Wrapf(err, "failed to get keyToNodeMap") + } + return m, nil +} + +func (d *Decoder) keyToValueNodeMap(node ast.Node, ignoreMergeKey bool) (map[string]ast.Node, error) { + m, err := d.keyToNodeMap(node, ignoreMergeKey, func(nodeMap *ast.MapNodeIter) ast.Node { return nodeMap.Value() }) + if err != nil { + return nil, errors.Wrapf(err, "failed to get keyToNodeMap") + } + return m, nil +} + +func (d *Decoder) setDefaultValueIfConflicted(v reflect.Value, fieldMap StructFieldMap) error { + typ := v.Type() + if typ.Kind() != reflect.Struct { + return nil + } + embeddedStructFieldMap, err := structFieldMap(typ) + if err != nil { + return errors.Wrapf(err, "failed to get struct field map by embedded type") + } + for i := 0; i < typ.NumField(); i++ { + field := typ.Field(i) + if isIgnoredStructField(field) { + continue + } + structField := embeddedStructFieldMap[field.Name] + if !fieldMap.isIncludedRenderName(structField.RenderName) { + continue + } + // if declared same key name, set default value + fieldValue := v.Field(i) + if fieldValue.CanSet() { + fieldValue.Set(reflect.Zero(fieldValue.Type())) + } + } + return nil +} + +// This is a subset of the formats allowed by the regular expression +// defined at http://yaml.org/type/timestamp.html. +var allowedTimestampFormats = []string{ + "2006-1-2T15:4:5.999999999Z07:00", // RCF3339Nano with short date fields. + "2006-1-2t15:4:5.999999999Z07:00", // RFC3339Nano with short date fields and lower-case "t". + "2006-1-2 15:4:5.999999999", // space separated with no time zone + "2006-1-2", // date only +} + +func (d *Decoder) castToTime(src ast.Node) (time.Time, error) { + if src == nil { + return time.Time{}, nil + } + v := d.nodeToValue(src) + if t, ok := v.(time.Time); ok { + return t, nil + } + s, ok := v.(string) + if !ok { + return time.Time{}, errTypeMismatch(reflect.TypeOf(time.Time{}), reflect.TypeOf(v), src.GetToken()) + } + for _, format := range allowedTimestampFormats { + t, err := time.Parse(format, s) + if err != nil { + // invalid format + continue + } + return t, nil + } + return time.Time{}, nil +} + +func (d *Decoder) decodeTime(ctx context.Context, dst reflect.Value, src ast.Node) error { + t, err := d.castToTime(src) + if err != nil { + return errors.Wrapf(err, "failed to convert to time") + } + dst.Set(reflect.ValueOf(t)) + return nil +} + +func (d *Decoder) castToDuration(src ast.Node) (time.Duration, error) { + if src == nil { + return 0, nil + } + v := d.nodeToValue(src) + if t, ok := v.(time.Duration); ok { + return t, nil + } + s, ok := v.(string) + if !ok { + return 0, errTypeMismatch(reflect.TypeOf(time.Duration(0)), reflect.TypeOf(v), src.GetToken()) + } + t, err := time.ParseDuration(s) + if err != nil { + return 0, errors.Wrapf(err, "failed to parse duration") + } + return t, nil +} + +func (d *Decoder) decodeDuration(ctx context.Context, dst reflect.Value, src ast.Node) error { + t, err := d.castToDuration(src) + if err != nil { + return errors.Wrapf(err, "failed to convert to duration") + } + dst.Set(reflect.ValueOf(t)) + return nil +} + +// getMergeAliasName support single alias only +func (d *Decoder) getMergeAliasName(src ast.Node) string { + mapNode, err := d.getMapNode(src) + if err != nil { + return "" + } + if mapNode == nil { + return "" + } + mapIter := mapNode.MapRange() + for mapIter.Next() { + key := mapIter.Key() + value := mapIter.Value() + if key.Type() == ast.MergeKeyType && value.Type() == ast.AliasType { + return value.(*ast.AliasNode).Value.GetToken().Value + } + } + return "" +} + +func (d *Decoder) decodeStruct(ctx context.Context, dst reflect.Value, src ast.Node) error { + if src == nil { + return nil + } + structType := dst.Type() + srcValue := reflect.ValueOf(src) + srcType := srcValue.Type() + if srcType.Kind() == reflect.Ptr { + srcType = srcType.Elem() + srcValue = srcValue.Elem() + } + if structType == srcType { + // dst value implements ast.Node + dst.Set(srcValue) + return nil + } + structFieldMap, err := structFieldMap(structType) + if err != nil { + return errors.Wrapf(err, "failed to create struct field map") + } + ignoreMergeKey := structFieldMap.hasMergeProperty() + keyToNodeMap, err := d.keyToValueNodeMap(src, ignoreMergeKey) + if err != nil { + return errors.Wrapf(err, "failed to get keyToValueNodeMap") + } + var unknownFields map[string]ast.Node + if d.disallowUnknownField { + unknownFields, err = d.keyToKeyNodeMap(src, ignoreMergeKey) + if err != nil { + return errors.Wrapf(err, "failed to get keyToKeyNodeMap") + } + } + + aliasName := d.getMergeAliasName(src) + var foundErr error + + for i := 0; i < structType.NumField(); i++ { + field := structType.Field(i) + if isIgnoredStructField(field) { + continue + } + structField := structFieldMap[field.Name] + if structField.IsInline { + fieldValue := dst.FieldByName(field.Name) + if structField.IsAutoAlias { + if aliasName != "" { + newFieldValue := d.anchorValueMap[aliasName] + if newFieldValue.IsValid() { + fieldValue.Set(d.castToAssignableValue(newFieldValue, fieldValue.Type())) + } + } + continue + } + if !fieldValue.CanSet() { + return xerrors.Errorf("cannot set embedded type as unexported field %s.%s", field.PkgPath, field.Name) + } + if fieldValue.Type().Kind() == reflect.Ptr && src.Type() == ast.NullType { + // set nil value to pointer + fieldValue.Set(reflect.Zero(fieldValue.Type())) + continue + } + mapNode := ast.Mapping(nil, false) + for k, v := range keyToNodeMap { + key := &ast.StringNode{BaseNode: &ast.BaseNode{}, Value: k} + mapNode.Values = append(mapNode.Values, ast.MappingValue(nil, key, v)) + } + newFieldValue, err := d.createDecodedNewValue(ctx, fieldValue.Type(), fieldValue, mapNode) + if d.disallowUnknownField { + if err := d.deleteStructKeys(fieldValue.Type(), unknownFields); err != nil { + return errors.Wrapf(err, "cannot delete struct keys") + } + } + + if err != nil { + if foundErr != nil { + continue + } + var te *errors.TypeError + if xerrors.As(err, &te) { + if te.StructFieldName != nil { + fieldName := fmt.Sprintf("%s.%s", structType.Name(), *te.StructFieldName) + te.StructFieldName = &fieldName + } else { + fieldName := fmt.Sprintf("%s.%s", structType.Name(), field.Name) + te.StructFieldName = &fieldName + } + foundErr = te + continue + } else { + foundErr = err + } + continue + } + d.setDefaultValueIfConflicted(newFieldValue, structFieldMap) + fieldValue.Set(d.castToAssignableValue(newFieldValue, fieldValue.Type())) + continue + } + v, exists := keyToNodeMap[structField.RenderName] + if !exists { + continue + } + delete(unknownFields, structField.RenderName) + fieldValue := dst.FieldByName(field.Name) + if fieldValue.Type().Kind() == reflect.Ptr && src.Type() == ast.NullType { + // set nil value to pointer + fieldValue.Set(reflect.Zero(fieldValue.Type())) + continue + } + newFieldValue, err := d.createDecodedNewValue(ctx, fieldValue.Type(), fieldValue, v) + if err != nil { + if foundErr != nil { + continue + } + var te *errors.TypeError + if xerrors.As(err, &te) { + fieldName := fmt.Sprintf("%s.%s", structType.Name(), field.Name) + te.StructFieldName = &fieldName + foundErr = te + } else { + foundErr = err + } + continue + } + fieldValue.Set(d.castToAssignableValue(newFieldValue, fieldValue.Type())) + } + if foundErr != nil { + return errors.Wrapf(foundErr, "failed to decode value") + } + + // Ignore unknown fields when parsing an inline struct (recognized by a nil token). + // Unknown fields are expected (they could be fields from the parent struct). + if len(unknownFields) != 0 && d.disallowUnknownField && src.GetToken() != nil { + for key, node := range unknownFields { + return errUnknownField(fmt.Sprintf(`unknown field "%s"`, key), node.GetToken()) + } + } + + if d.validator != nil { + if err := d.validator.Struct(dst.Interface()); err != nil { + ev := reflect.ValueOf(err) + if ev.Type().Kind() == reflect.Slice { + for i := 0; i < ev.Len(); i++ { + fieldErr, ok := ev.Index(i).Interface().(FieldError) + if !ok { + continue + } + fieldName := fieldErr.StructField() + structField, exists := structFieldMap[fieldName] + if !exists { + continue + } + node, exists := keyToNodeMap[structField.RenderName] + if exists { + // TODO: to make FieldError message cutomizable + return errors.ErrSyntax(fmt.Sprintf("%s", err), node.GetToken()) + } else if t := src.GetToken(); t != nil && t.Prev != nil && t.Prev.Prev != nil { + // A missing required field will not be in the keyToNodeMap + // the error needs to be associated with the parent of the source node + return errors.ErrSyntax(fmt.Sprintf("%s", err), t.Prev.Prev) + } + } + } + return err + } + } + return nil +} + +func (d *Decoder) decodeArray(ctx context.Context, dst reflect.Value, src ast.Node) error { + arrayNode, err := d.getArrayNode(src) + if err != nil { + return errors.Wrapf(err, "failed to get array node") + } + if arrayNode == nil { + return nil + } + iter := arrayNode.ArrayRange() + arrayValue := reflect.New(dst.Type()).Elem() + arrayType := dst.Type() + elemType := arrayType.Elem() + idx := 0 + + var foundErr error + for iter.Next() { + v := iter.Value() + if elemType.Kind() == reflect.Ptr && v.Type() == ast.NullType { + // set nil value to pointer + arrayValue.Index(idx).Set(reflect.Zero(elemType)) + } else { + dstValue, err := d.createDecodedNewValue(ctx, elemType, reflect.Value{}, v) + if err != nil { + if foundErr == nil { + foundErr = err + } + continue + } else { + arrayValue.Index(idx).Set(d.castToAssignableValue(dstValue, elemType)) + } + } + idx++ + } + dst.Set(arrayValue) + if foundErr != nil { + return errors.Wrapf(foundErr, "failed to decode value") + } + return nil +} + +func (d *Decoder) decodeSlice(ctx context.Context, dst reflect.Value, src ast.Node) error { + arrayNode, err := d.getArrayNode(src) + if err != nil { + return errors.Wrapf(err, "failed to get array node") + } + if arrayNode == nil { + return nil + } + iter := arrayNode.ArrayRange() + sliceType := dst.Type() + sliceValue := reflect.MakeSlice(sliceType, 0, iter.Len()) + elemType := sliceType.Elem() + + var foundErr error + for iter.Next() { + v := iter.Value() + if elemType.Kind() == reflect.Ptr && v.Type() == ast.NullType { + // set nil value to pointer + sliceValue = reflect.Append(sliceValue, reflect.Zero(elemType)) + continue + } + dstValue, err := d.createDecodedNewValue(ctx, elemType, reflect.Value{}, v) + if err != nil { + if foundErr == nil { + foundErr = err + } + continue + } + sliceValue = reflect.Append(sliceValue, d.castToAssignableValue(dstValue, elemType)) + } + dst.Set(sliceValue) + if foundErr != nil { + return errors.Wrapf(foundErr, "failed to decode value") + } + return nil +} + +func (d *Decoder) decodeMapItem(ctx context.Context, dst *MapItem, src ast.Node) error { + mapNode, err := d.getMapNode(src) + if err != nil { + return errors.Wrapf(err, "failed to get map node") + } + if mapNode == nil { + return nil + } + mapIter := mapNode.MapRange() + if !mapIter.Next() { + return nil + } + key := mapIter.Key() + value := mapIter.Value() + if key.Type() == ast.MergeKeyType { + if err := d.decodeMapItem(ctx, dst, value); err != nil { + return errors.Wrapf(err, "failed to decode map with merge key") + } + return nil + } + *dst = MapItem{ + Key: d.nodeToValue(key), + Value: d.nodeToValue(value), + } + return nil +} + +func (d *Decoder) validateDuplicateKey(keyMap map[string]struct{}, key interface{}, keyNode ast.Node) error { + k, ok := key.(string) + if !ok { + return nil + } + if d.disallowDuplicateKey { + if _, exists := keyMap[k]; exists { + return errDuplicateKey(fmt.Sprintf(`duplicate key "%s"`, k), keyNode.GetToken()) + } + } + keyMap[k] = struct{}{} + return nil +} + +func (d *Decoder) decodeMapSlice(ctx context.Context, dst *MapSlice, src ast.Node) error { + mapNode, err := d.getMapNode(src) + if err != nil { + return errors.Wrapf(err, "failed to get map node") + } + if mapNode == nil { + return nil + } + mapSlice := MapSlice{} + mapIter := mapNode.MapRange() + keyMap := map[string]struct{}{} + for mapIter.Next() { + key := mapIter.Key() + value := mapIter.Value() + if key.Type() == ast.MergeKeyType { + var m MapSlice + if err := d.decodeMapSlice(ctx, &m, value); err != nil { + return errors.Wrapf(err, "failed to decode map with merge key") + } + for _, v := range m { + if err := d.validateDuplicateKey(keyMap, v.Key, value); err != nil { + return errors.Wrapf(err, "invalid map key") + } + mapSlice = append(mapSlice, v) + } + continue + } + k := d.nodeToValue(key) + if err := d.validateDuplicateKey(keyMap, k, key); err != nil { + return errors.Wrapf(err, "invalid map key") + } + mapSlice = append(mapSlice, MapItem{ + Key: k, + Value: d.nodeToValue(value), + }) + } + *dst = mapSlice + return nil +} + +func (d *Decoder) decodeMap(ctx context.Context, dst reflect.Value, src ast.Node) error { + mapNode, err := d.getMapNode(src) + if err != nil { + return errors.Wrapf(err, "failed to get map node") + } + if mapNode == nil { + return nil + } + mapType := dst.Type() + mapValue := reflect.MakeMap(mapType) + keyType := mapValue.Type().Key() + valueType := mapValue.Type().Elem() + mapIter := mapNode.MapRange() + keyMap := map[string]struct{}{} + var foundErr error + for mapIter.Next() { + key := mapIter.Key() + value := mapIter.Value() + if key.Type() == ast.MergeKeyType { + if err := d.decodeMap(ctx, dst, value); err != nil { + return errors.Wrapf(err, "failed to decode map with merge key") + } + iter := dst.MapRange() + for iter.Next() { + if err := d.validateDuplicateKey(keyMap, iter.Key(), value); err != nil { + return errors.Wrapf(err, "invalid map key") + } + mapValue.SetMapIndex(iter.Key(), iter.Value()) + } + continue + } + + k := d.createDecodableValue(keyType) + if d.canDecodeByUnmarshaler(k) { + if err := d.decodeByUnmarshaler(ctx, k, key); err != nil { + return errors.Wrapf(err, "failed to decode by unmarshaler") + } + } else { + k = reflect.ValueOf(d.nodeToValue(key)) + if k.IsValid() && k.Type().ConvertibleTo(keyType) { + k = k.Convert(keyType) + } + } + + if k.IsValid() { + if err := d.validateDuplicateKey(keyMap, k.Interface(), key); err != nil { + return errors.Wrapf(err, "invalid map key") + } + } + if valueType.Kind() == reflect.Ptr && value.Type() == ast.NullType { + // set nil value to pointer + mapValue.SetMapIndex(k, reflect.Zero(valueType)) + continue + } + dstValue, err := d.createDecodedNewValue(ctx, valueType, reflect.Value{}, value) + if err != nil { + if foundErr == nil { + foundErr = err + } + } + if !k.IsValid() { + // expect nil key + mapValue.SetMapIndex(d.createDecodableValue(keyType), d.castToAssignableValue(dstValue, valueType)) + continue + } + mapValue.SetMapIndex(k, d.castToAssignableValue(dstValue, valueType)) + } + dst.Set(mapValue) + if foundErr != nil { + return errors.Wrapf(foundErr, "failed to decode value") + } + return nil +} + +func (d *Decoder) fileToReader(file string) (io.Reader, error) { + reader, err := os.Open(file) + if err != nil { + return nil, errors.Wrapf(err, "failed to open file") + } + return reader, nil +} + +func (d *Decoder) isYAMLFile(file string) bool { + ext := filepath.Ext(file) + if ext == ".yml" { + return true + } + if ext == ".yaml" { + return true + } + return false +} + +func (d *Decoder) readersUnderDir(dir string) ([]io.Reader, error) { + pattern := fmt.Sprintf("%s/*", dir) + matches, err := filepath.Glob(pattern) + if err != nil { + return nil, errors.Wrapf(err, "failed to get files by %s", pattern) + } + readers := []io.Reader{} + for _, match := range matches { + if !d.isYAMLFile(match) { + continue + } + reader, err := d.fileToReader(match) + if err != nil { + return nil, errors.Wrapf(err, "failed to get reader") + } + readers = append(readers, reader) + } + return readers, nil +} + +func (d *Decoder) readersUnderDirRecursive(dir string) ([]io.Reader, error) { + readers := []io.Reader{} + if err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { + if !d.isYAMLFile(path) { + return nil + } + reader, err := d.fileToReader(path) + if err != nil { + return errors.Wrapf(err, "failed to get reader") + } + readers = append(readers, reader) + return nil + }); err != nil { + return nil, errors.Wrapf(err, "interrupt walk in %s", dir) + } + return readers, nil +} + +func (d *Decoder) resolveReference() error { + for _, opt := range d.opts { + if err := opt(d); err != nil { + return errors.Wrapf(err, "failed to exec option") + } + } + for _, file := range d.referenceFiles { + reader, err := d.fileToReader(file) + if err != nil { + return errors.Wrapf(err, "failed to get reader") + } + d.referenceReaders = append(d.referenceReaders, reader) + } + for _, dir := range d.referenceDirs { + if !d.isRecursiveDir { + readers, err := d.readersUnderDir(dir) + if err != nil { + return errors.Wrapf(err, "failed to get readers from under the %s", dir) + } + d.referenceReaders = append(d.referenceReaders, readers...) + } else { + readers, err := d.readersUnderDirRecursive(dir) + if err != nil { + return errors.Wrapf(err, "failed to get readers from under the %s", dir) + } + d.referenceReaders = append(d.referenceReaders, readers...) + } + } + for _, reader := range d.referenceReaders { + bytes, err := io.ReadAll(reader) + if err != nil { + return errors.Wrapf(err, "failed to read buffer") + } + + // assign new anchor definition to anchorMap + if _, err := d.parse(bytes); err != nil { + return errors.Wrapf(err, "failed to decode") + } + } + d.isResolvedReference = true + return nil +} + +func (d *Decoder) parse(bytes []byte) (*ast.File, error) { + var parseMode parser.Mode + if d.toCommentMap != nil { + parseMode = parser.ParseComments + } + f, err := parser.ParseBytes(bytes, parseMode) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse yaml") + } + normalizedFile := &ast.File{} + for _, doc := range f.Docs { + // try to decode ast.Node to value and map anchor value to anchorMap + if v := d.nodeToValue(doc.Body); v != nil { + normalizedFile.Docs = append(normalizedFile.Docs, doc) + } + } + return normalizedFile, nil +} + +func (d *Decoder) isInitialized() bool { + return d.parsedFile != nil +} + +func (d *Decoder) decodeInit() error { + if !d.isResolvedReference { + if err := d.resolveReference(); err != nil { + return errors.Wrapf(err, "failed to resolve reference") + } + } + var buf bytes.Buffer + if _, err := io.Copy(&buf, d.reader); err != nil { + return errors.Wrapf(err, "failed to copy from reader") + } + file, err := d.parse(buf.Bytes()) + if err != nil { + return errors.Wrapf(err, "failed to decode") + } + d.parsedFile = file + return nil +} + +func (d *Decoder) decode(ctx context.Context, v reflect.Value) error { + if len(d.parsedFile.Docs) <= d.streamIndex { + return io.EOF + } + body := d.parsedFile.Docs[d.streamIndex].Body + if body == nil { + return nil + } + if err := d.decodeValue(ctx, v.Elem(), body); err != nil { + return errors.Wrapf(err, "failed to decode value") + } + d.streamIndex++ + return nil +} + +// Decode reads the next YAML-encoded value from its input +// and stores it in the value pointed to by v. +// +// See the documentation for Unmarshal for details about the +// conversion of YAML into a Go value. +func (d *Decoder) Decode(v interface{}) error { + return d.DecodeContext(context.Background(), v) +} + +// DecodeContext reads the next YAML-encoded value from its input +// and stores it in the value pointed to by v with context.Context. +func (d *Decoder) DecodeContext(ctx context.Context, v interface{}) error { + rv := reflect.ValueOf(v) + if rv.Type().Kind() != reflect.Ptr { + return errors.ErrDecodeRequiredPointerType + } + if d.isInitialized() { + if err := d.decode(ctx, rv); err != nil { + if err == io.EOF { + return err + } + return errors.Wrapf(err, "failed to decode") + } + return nil + } + if err := d.decodeInit(); err != nil { + return errors.Wrapf(err, "failed to decodeInit") + } + if err := d.decode(ctx, rv); err != nil { + if err == io.EOF { + return err + } + return errors.Wrapf(err, "failed to decode") + } + return nil +} + +// DecodeFromNode decodes node into the value pointed to by v. +func (d *Decoder) DecodeFromNode(node ast.Node, v interface{}) error { + return d.DecodeFromNodeContext(context.Background(), node, v) +} + +// DecodeFromNodeContext decodes node into the value pointed to by v with context.Context. +func (d *Decoder) DecodeFromNodeContext(ctx context.Context, node ast.Node, v interface{}) error { + rv := reflect.ValueOf(v) + if rv.Type().Kind() != reflect.Ptr { + return errors.ErrDecodeRequiredPointerType + } + if !d.isInitialized() { + if err := d.decodeInit(); err != nil { + return errors.Wrapf(err, "failed to decodInit") + } + } + // resolve references to the anchor on the same file + d.nodeToValue(node) + if err := d.decodeValue(ctx, rv.Elem(), node); err != nil { + return errors.Wrapf(err, "failed to decode value") + } + return nil +} diff --git a/vendor/github.com/goccy/go-yaml/encode.go b/vendor/github.com/goccy/go-yaml/encode.go new file mode 100644 index 00000000..3b9b2981 --- /dev/null +++ b/vendor/github.com/goccy/go-yaml/encode.go @@ -0,0 +1,875 @@ +package yaml + +import ( + "context" + "encoding" + "fmt" + "io" + "math" + "reflect" + "sort" + "strconv" + "strings" + "time" + + "github.com/goccy/go-yaml/ast" + "github.com/goccy/go-yaml/internal/errors" + "github.com/goccy/go-yaml/parser" + "github.com/goccy/go-yaml/printer" + "github.com/goccy/go-yaml/token" + "golang.org/x/xerrors" +) + +const ( + // DefaultIndentSpaces default number of space for indent + DefaultIndentSpaces = 2 +) + +// Encoder writes YAML values to an output stream. +type Encoder struct { + writer io.Writer + opts []EncodeOption + indent int + indentSequence bool + singleQuote bool + isFlowStyle bool + isJSONStyle bool + useJSONMarshaler bool + anchorCallback func(*ast.AnchorNode, interface{}) error + anchorPtrToNameMap map[uintptr]string + customMarshalerMap map[reflect.Type]func(interface{}) ([]byte, error) + useLiteralStyleIfMultiline bool + commentMap map[*Path][]*Comment + written bool + + line int + column int + offset int + indentNum int + indentLevel int +} + +// NewEncoder returns a new encoder that writes to w. +// The Encoder should be closed after use to flush all data to w. +func NewEncoder(w io.Writer, opts ...EncodeOption) *Encoder { + return &Encoder{ + writer: w, + opts: opts, + indent: DefaultIndentSpaces, + anchorPtrToNameMap: map[uintptr]string{}, + customMarshalerMap: map[reflect.Type]func(interface{}) ([]byte, error){}, + line: 1, + column: 1, + offset: 0, + } +} + +// Close closes the encoder by writing any remaining data. +// It does not write a stream terminating string "...". +func (e *Encoder) Close() error { + return nil +} + +// Encode writes the YAML encoding of v to the stream. +// If multiple items are encoded to the stream, +// the second and subsequent document will be preceded with a "---" document separator, +// but the first will not. +// +// See the documentation for Marshal for details about the conversion of Go values to YAML. +func (e *Encoder) Encode(v interface{}) error { + return e.EncodeContext(context.Background(), v) +} + +// EncodeContext writes the YAML encoding of v to the stream with context.Context. +func (e *Encoder) EncodeContext(ctx context.Context, v interface{}) error { + node, err := e.EncodeToNodeContext(ctx, v) + if err != nil { + return errors.Wrapf(err, "failed to encode to node") + } + if err := e.setCommentByCommentMap(node); err != nil { + return errors.Wrapf(err, "failed to set comment by comment map") + } + if !e.written { + e.written = true + } else { + // write document separator + e.writer.Write([]byte("---\n")) + } + var p printer.Printer + e.writer.Write(p.PrintNode(node)) + return nil +} + +// EncodeToNode convert v to ast.Node. +func (e *Encoder) EncodeToNode(v interface{}) (ast.Node, error) { + return e.EncodeToNodeContext(context.Background(), v) +} + +// EncodeToNodeContext convert v to ast.Node with context.Context. +func (e *Encoder) EncodeToNodeContext(ctx context.Context, v interface{}) (ast.Node, error) { + for _, opt := range e.opts { + if err := opt(e); err != nil { + return nil, errors.Wrapf(err, "failed to run option for encoder") + } + } + node, err := e.encodeValue(ctx, reflect.ValueOf(v), 1) + if err != nil { + return nil, errors.Wrapf(err, "failed to encode value") + } + return node, nil +} + +func (e *Encoder) setCommentByCommentMap(node ast.Node) error { + if e.commentMap == nil { + return nil + } + for path, comments := range e.commentMap { + n, err := path.FilterNode(node) + if err != nil { + return errors.Wrapf(err, "failed to filter node") + } + if n == nil { + continue + } + for _, comment := range comments { + commentTokens := []*token.Token{} + for _, text := range comment.Texts { + commentTokens = append(commentTokens, token.New(text, text, nil)) + } + commentGroup := ast.CommentGroup(commentTokens) + switch comment.Position { + case CommentHeadPosition: + if err := e.setHeadComment(node, n, commentGroup); err != nil { + return errors.Wrapf(err, "failed to set head comment") + } + case CommentLinePosition: + if err := e.setLineComment(node, n, commentGroup); err != nil { + return errors.Wrapf(err, "failed to set line comment") + } + case CommentFootPosition: + if err := e.setFootComment(node, n, commentGroup); err != nil { + return errors.Wrapf(err, "failed to set foot comment") + } + default: + return ErrUnknownCommentPositionType + } + } + } + return nil +} + +func (e *Encoder) setHeadComment(node ast.Node, filtered ast.Node, comment *ast.CommentGroupNode) error { + parent := ast.Parent(node, filtered) + if parent == nil { + return ErrUnsupportedHeadPositionType(node) + } + switch p := parent.(type) { + case *ast.MappingValueNode: + if err := p.SetComment(comment); err != nil { + return errors.Wrapf(err, "failed to set comment") + } + case *ast.MappingNode: + if err := p.SetComment(comment); err != nil { + return errors.Wrapf(err, "failed to set comment") + } + case *ast.SequenceNode: + if len(p.ValueHeadComments) == 0 { + p.ValueHeadComments = make([]*ast.CommentGroupNode, len(p.Values)) + } + var foundIdx int + for idx, v := range p.Values { + if v == filtered { + foundIdx = idx + break + } + } + p.ValueHeadComments[foundIdx] = comment + default: + return ErrUnsupportedHeadPositionType(node) + } + return nil +} + +func (e *Encoder) setLineComment(node ast.Node, filtered ast.Node, comment *ast.CommentGroupNode) error { + switch filtered.(type) { + case *ast.MappingValueNode, *ast.SequenceNode: + // Line comment cannot be set for mapping value node. + // It should probably be set for the parent map node + if err := e.setLineCommentToParentMapNode(node, filtered, comment); err != nil { + return errors.Wrapf(err, "failed to set line comment to parent node") + } + default: + if err := filtered.SetComment(comment); err != nil { + return errors.Wrapf(err, "failed to set comment") + } + } + return nil +} + +func (e *Encoder) setLineCommentToParentMapNode(node ast.Node, filtered ast.Node, comment *ast.CommentGroupNode) error { + parent := ast.Parent(node, filtered) + if parent == nil { + return ErrUnsupportedLinePositionType(node) + } + switch p := parent.(type) { + case *ast.MappingValueNode: + if err := p.Key.SetComment(comment); err != nil { + return errors.Wrapf(err, "failed to set comment") + } + case *ast.MappingNode: + if err := p.SetComment(comment); err != nil { + return errors.Wrapf(err, "failed to set comment") + } + default: + return ErrUnsupportedLinePositionType(parent) + } + return nil +} + +func (e *Encoder) setFootComment(node ast.Node, filtered ast.Node, comment *ast.CommentGroupNode) error { + parent := ast.Parent(node, filtered) + if parent == nil { + return ErrUnsupportedFootPositionType(node) + } + switch n := parent.(type) { + case *ast.MappingValueNode: + n.FootComment = comment + case *ast.MappingNode: + n.FootComment = comment + case *ast.SequenceNode: + n.FootComment = comment + default: + return ErrUnsupportedFootPositionType(n) + } + return nil +} + +func (e *Encoder) encodeDocument(doc []byte) (ast.Node, error) { + f, err := parser.ParseBytes(doc, 0) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse yaml") + } + for _, docNode := range f.Docs { + if docNode.Body != nil { + return docNode.Body, nil + } + } + return nil, nil +} + +func (e *Encoder) isInvalidValue(v reflect.Value) bool { + if !v.IsValid() { + return true + } + kind := v.Type().Kind() + if kind == reflect.Ptr && v.IsNil() { + return true + } + if kind == reflect.Interface && v.IsNil() { + return true + } + return false +} + +type jsonMarshaler interface { + MarshalJSON() ([]byte, error) +} + +func (e *Encoder) existsTypeInCustomMarshalerMap(t reflect.Type) bool { + if _, exists := e.customMarshalerMap[t]; exists { + return true + } + + globalCustomMarshalerMu.Lock() + defer globalCustomMarshalerMu.Unlock() + if _, exists := globalCustomMarshalerMap[t]; exists { + return true + } + return false +} + +func (e *Encoder) marshalerFromCustomMarshalerMap(t reflect.Type) (func(interface{}) ([]byte, error), bool) { + if marshaler, exists := e.customMarshalerMap[t]; exists { + return marshaler, exists + } + + globalCustomMarshalerMu.Lock() + defer globalCustomMarshalerMu.Unlock() + if marshaler, exists := globalCustomMarshalerMap[t]; exists { + return marshaler, exists + } + return nil, false +} + +func (e *Encoder) canEncodeByMarshaler(v reflect.Value) bool { + if !v.CanInterface() { + return false + } + if e.existsTypeInCustomMarshalerMap(v.Type()) { + return true + } + iface := v.Interface() + switch iface.(type) { + case BytesMarshalerContext: + return true + case BytesMarshaler: + return true + case InterfaceMarshalerContext: + return true + case InterfaceMarshaler: + return true + case time.Time: + return true + case time.Duration: + return true + case encoding.TextMarshaler: + return true + case jsonMarshaler: + return e.useJSONMarshaler + } + return false +} + +func (e *Encoder) encodeByMarshaler(ctx context.Context, v reflect.Value, column int) (ast.Node, error) { + iface := v.Interface() + + if marshaler, exists := e.marshalerFromCustomMarshalerMap(v.Type()); exists { + doc, err := marshaler(iface) + if err != nil { + return nil, errors.Wrapf(err, "failed to MarshalYAML") + } + node, err := e.encodeDocument(doc) + if err != nil { + return nil, errors.Wrapf(err, "failed to encode document") + } + return node, nil + } + + if marshaler, ok := iface.(BytesMarshalerContext); ok { + doc, err := marshaler.MarshalYAML(ctx) + if err != nil { + return nil, errors.Wrapf(err, "failed to MarshalYAML") + } + node, err := e.encodeDocument(doc) + if err != nil { + return nil, errors.Wrapf(err, "failed to encode document") + } + return node, nil + } + + if marshaler, ok := iface.(BytesMarshaler); ok { + doc, err := marshaler.MarshalYAML() + if err != nil { + return nil, errors.Wrapf(err, "failed to MarshalYAML") + } + node, err := e.encodeDocument(doc) + if err != nil { + return nil, errors.Wrapf(err, "failed to encode document") + } + return node, nil + } + + if marshaler, ok := iface.(InterfaceMarshalerContext); ok { + marshalV, err := marshaler.MarshalYAML(ctx) + if err != nil { + return nil, errors.Wrapf(err, "failed to MarshalYAML") + } + return e.encodeValue(ctx, reflect.ValueOf(marshalV), column) + } + + if marshaler, ok := iface.(InterfaceMarshaler); ok { + marshalV, err := marshaler.MarshalYAML() + if err != nil { + return nil, errors.Wrapf(err, "failed to MarshalYAML") + } + return e.encodeValue(ctx, reflect.ValueOf(marshalV), column) + } + + if t, ok := iface.(time.Time); ok { + return e.encodeTime(t, column), nil + } + + if t, ok := iface.(time.Duration); ok { + return e.encodeDuration(t, column), nil + } + + if marshaler, ok := iface.(encoding.TextMarshaler); ok { + doc, err := marshaler.MarshalText() + if err != nil { + return nil, errors.Wrapf(err, "failed to MarshalText") + } + node, err := e.encodeDocument(doc) + if err != nil { + return nil, errors.Wrapf(err, "failed to encode document") + } + return node, nil + } + + if e.useJSONMarshaler { + if marshaler, ok := iface.(jsonMarshaler); ok { + jsonBytes, err := marshaler.MarshalJSON() + if err != nil { + return nil, errors.Wrapf(err, "failed to MarshalJSON") + } + doc, err := JSONToYAML(jsonBytes) + if err != nil { + return nil, errors.Wrapf(err, "failed to convert json to yaml") + } + node, err := e.encodeDocument(doc) + if err != nil { + return nil, errors.Wrapf(err, "failed to encode document") + } + return node, nil + } + } + + return nil, xerrors.Errorf("does not implemented Marshaler") +} + +func (e *Encoder) encodeValue(ctx context.Context, v reflect.Value, column int) (ast.Node, error) { + if e.isInvalidValue(v) { + return e.encodeNil(), nil + } + if e.canEncodeByMarshaler(v) { + node, err := e.encodeByMarshaler(ctx, v, column) + if err != nil { + return nil, errors.Wrapf(err, "failed to encode by marshaler") + } + return node, nil + } + switch v.Type().Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return e.encodeInt(v.Int()), nil + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + return e.encodeUint(v.Uint()), nil + case reflect.Float32: + return e.encodeFloat(v.Float(), 32), nil + case reflect.Float64: + return e.encodeFloat(v.Float(), 64), nil + case reflect.Ptr: + anchorName := e.anchorPtrToNameMap[v.Pointer()] + if anchorName != "" { + aliasName := anchorName + alias := ast.Alias(token.New("*", "*", e.pos(column))) + alias.Value = ast.String(token.New(aliasName, aliasName, e.pos(column))) + return alias, nil + } + return e.encodeValue(ctx, v.Elem(), column) + case reflect.Interface: + return e.encodeValue(ctx, v.Elem(), column) + case reflect.String: + return e.encodeString(v.String(), column), nil + case reflect.Bool: + return e.encodeBool(v.Bool()), nil + case reflect.Slice: + if mapSlice, ok := v.Interface().(MapSlice); ok { + return e.encodeMapSlice(ctx, mapSlice, column) + } + return e.encodeSlice(ctx, v) + case reflect.Array: + return e.encodeArray(ctx, v) + case reflect.Struct: + if v.CanInterface() { + if mapItem, ok := v.Interface().(MapItem); ok { + return e.encodeMapItem(ctx, mapItem, column) + } + if t, ok := v.Interface().(time.Time); ok { + return e.encodeTime(t, column), nil + } + } + return e.encodeStruct(ctx, v, column) + case reflect.Map: + return e.encodeMap(ctx, v, column), nil + default: + return nil, xerrors.Errorf("unknown value type %s", v.Type().String()) + } +} + +func (e *Encoder) pos(column int) *token.Position { + return &token.Position{ + Line: e.line, + Column: column, + Offset: e.offset, + IndentNum: e.indentNum, + IndentLevel: e.indentLevel, + } +} + +func (e *Encoder) encodeNil() *ast.NullNode { + value := "null" + return ast.Null(token.New(value, value, e.pos(e.column))) +} + +func (e *Encoder) encodeInt(v int64) *ast.IntegerNode { + value := fmt.Sprint(v) + return ast.Integer(token.New(value, value, e.pos(e.column))) +} + +func (e *Encoder) encodeUint(v uint64) *ast.IntegerNode { + value := fmt.Sprint(v) + return ast.Integer(token.New(value, value, e.pos(e.column))) +} + +func (e *Encoder) encodeFloat(v float64, bitSize int) ast.Node { + if v == math.Inf(0) { + value := ".inf" + return ast.Infinity(token.New(value, value, e.pos(e.column))) + } else if v == math.Inf(-1) { + value := "-.inf" + return ast.Infinity(token.New(value, value, e.pos(e.column))) + } else if math.IsNaN(v) { + value := ".nan" + return ast.Nan(token.New(value, value, e.pos(e.column))) + } + value := strconv.FormatFloat(v, 'g', -1, bitSize) + if !strings.Contains(value, ".") && !strings.Contains(value, "e") { + // append x.0 suffix to keep float value context + value = fmt.Sprintf("%s.0", value) + } + return ast.Float(token.New(value, value, e.pos(e.column))) +} + +func (e *Encoder) isNeedQuoted(v string) bool { + if e.isJSONStyle { + return true + } + if e.useLiteralStyleIfMultiline && strings.ContainsAny(v, "\n\r") { + return false + } + if e.isFlowStyle && strings.ContainsAny(v, `]},'"`) { + return true + } + if token.IsNeedQuoted(v) { + return true + } + return false +} + +func (e *Encoder) encodeString(v string, column int) *ast.StringNode { + if e.isNeedQuoted(v) { + if e.singleQuote { + v = quoteWith(v, '\'') + } else { + v = strconv.Quote(v) + } + } + return ast.String(token.New(v, v, e.pos(column))) +} + +func (e *Encoder) encodeBool(v bool) *ast.BoolNode { + value := fmt.Sprint(v) + return ast.Bool(token.New(value, value, e.pos(e.column))) +} + +func (e *Encoder) encodeSlice(ctx context.Context, value reflect.Value) (*ast.SequenceNode, error) { + if e.indentSequence { + e.column += e.indent + } + column := e.column + sequence := ast.Sequence(token.New("-", "-", e.pos(column)), e.isFlowStyle) + for i := 0; i < value.Len(); i++ { + node, err := e.encodeValue(ctx, value.Index(i), column) + if err != nil { + return nil, errors.Wrapf(err, "failed to encode value for slice") + } + sequence.Values = append(sequence.Values, node) + } + if e.indentSequence { + e.column -= e.indent + } + return sequence, nil +} + +func (e *Encoder) encodeArray(ctx context.Context, value reflect.Value) (*ast.SequenceNode, error) { + if e.indentSequence { + e.column += e.indent + } + column := e.column + sequence := ast.Sequence(token.New("-", "-", e.pos(column)), e.isFlowStyle) + for i := 0; i < value.Len(); i++ { + node, err := e.encodeValue(ctx, value.Index(i), column) + if err != nil { + return nil, errors.Wrapf(err, "failed to encode value for array") + } + sequence.Values = append(sequence.Values, node) + } + if e.indentSequence { + e.column -= e.indent + } + return sequence, nil +} + +func (e *Encoder) encodeMapItem(ctx context.Context, item MapItem, column int) (*ast.MappingValueNode, error) { + k := reflect.ValueOf(item.Key) + v := reflect.ValueOf(item.Value) + value, err := e.encodeValue(ctx, v, column) + if err != nil { + return nil, errors.Wrapf(err, "failed to encode MapItem") + } + if e.isMapNode(value) { + value.AddColumn(e.indent) + } + return ast.MappingValue( + token.New("", "", e.pos(column)), + e.encodeString(k.Interface().(string), column), + value, + ), nil +} + +func (e *Encoder) encodeMapSlice(ctx context.Context, value MapSlice, column int) (*ast.MappingNode, error) { + node := ast.Mapping(token.New("", "", e.pos(column)), e.isFlowStyle) + for _, item := range value { + value, err := e.encodeMapItem(ctx, item, column) + if err != nil { + return nil, errors.Wrapf(err, "failed to encode MapItem for MapSlice") + } + node.Values = append(node.Values, value) + } + return node, nil +} + +func (e *Encoder) isMapNode(node ast.Node) bool { + _, ok := node.(ast.MapNode) + return ok +} + +func (e *Encoder) encodeMap(ctx context.Context, value reflect.Value, column int) ast.Node { + node := ast.Mapping(token.New("", "", e.pos(column)), e.isFlowStyle) + keys := make([]interface{}, len(value.MapKeys())) + for i, k := range value.MapKeys() { + keys[i] = k.Interface() + } + sort.Slice(keys, func(i, j int) bool { + return fmt.Sprint(keys[i]) < fmt.Sprint(keys[j]) + }) + for _, key := range keys { + k := reflect.ValueOf(key) + v := value.MapIndex(k) + value, err := e.encodeValue(ctx, v, column) + if err != nil { + return nil + } + if e.isMapNode(value) { + value.AddColumn(e.indent) + } + node.Values = append(node.Values, ast.MappingValue( + nil, + e.encodeString(fmt.Sprint(key), column), + value, + )) + } + return node +} + +// IsZeroer is used to check whether an object is zero to determine +// whether it should be omitted when marshaling with the omitempty flag. +// One notable implementation is time.Time. +type IsZeroer interface { + IsZero() bool +} + +func (e *Encoder) isZeroValue(v reflect.Value) bool { + kind := v.Kind() + if z, ok := v.Interface().(IsZeroer); ok { + if (kind == reflect.Ptr || kind == reflect.Interface) && v.IsNil() { + return true + } + return z.IsZero() + } + switch kind { + case reflect.String: + return len(v.String()) == 0 + case reflect.Interface, reflect.Ptr: + return v.IsNil() + case reflect.Slice: + return v.Len() == 0 + case reflect.Map: + return v.Len() == 0 + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return v.Int() == 0 + case reflect.Float32, reflect.Float64: + return v.Float() == 0 + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return v.Uint() == 0 + case reflect.Bool: + return !v.Bool() + case reflect.Struct: + vt := v.Type() + for i := v.NumField() - 1; i >= 0; i-- { + if vt.Field(i).PkgPath != "" { + continue // private field + } + if !e.isZeroValue(v.Field(i)) { + return false + } + } + return true + } + return false +} + +func (e *Encoder) encodeTime(v time.Time, column int) *ast.StringNode { + value := v.Format(time.RFC3339Nano) + if e.isJSONStyle { + value = strconv.Quote(value) + } + return ast.String(token.New(value, value, e.pos(column))) +} + +func (e *Encoder) encodeDuration(v time.Duration, column int) *ast.StringNode { + value := v.String() + if e.isJSONStyle { + value = strconv.Quote(value) + } + return ast.String(token.New(value, value, e.pos(column))) +} + +func (e *Encoder) encodeAnchor(anchorName string, value ast.Node, fieldValue reflect.Value, column int) (*ast.AnchorNode, error) { + anchorNode := ast.Anchor(token.New("&", "&", e.pos(column))) + anchorNode.Name = ast.String(token.New(anchorName, anchorName, e.pos(column))) + anchorNode.Value = value + if e.anchorCallback != nil { + if err := e.anchorCallback(anchorNode, fieldValue.Interface()); err != nil { + return nil, errors.Wrapf(err, "failed to marshal anchor") + } + if snode, ok := anchorNode.Name.(*ast.StringNode); ok { + anchorName = snode.Value + } + } + if fieldValue.Kind() == reflect.Ptr { + e.anchorPtrToNameMap[fieldValue.Pointer()] = anchorName + } + return anchorNode, nil +} + +func (e *Encoder) encodeStruct(ctx context.Context, value reflect.Value, column int) (ast.Node, error) { + node := ast.Mapping(token.New("", "", e.pos(column)), e.isFlowStyle) + structType := value.Type() + structFieldMap, err := structFieldMap(structType) + if err != nil { + return nil, errors.Wrapf(err, "failed to get struct field map") + } + hasInlineAnchorField := false + var inlineAnchorValue reflect.Value + for i := 0; i < value.NumField(); i++ { + field := structType.Field(i) + if isIgnoredStructField(field) { + continue + } + fieldValue := value.FieldByName(field.Name) + structField := structFieldMap[field.Name] + if structField.IsOmitEmpty && e.isZeroValue(fieldValue) { + // omit encoding + continue + } + ve := e + if !e.isFlowStyle && structField.IsFlow { + ve = &Encoder{} + *ve = *e + ve.isFlowStyle = true + } + value, err := ve.encodeValue(ctx, fieldValue, column) + if err != nil { + return nil, errors.Wrapf(err, "failed to encode value") + } + if e.isMapNode(value) { + value.AddColumn(e.indent) + } + var key ast.MapKeyNode = e.encodeString(structField.RenderName, column) + switch { + case structField.AnchorName != "": + anchorNode, err := e.encodeAnchor(structField.AnchorName, value, fieldValue, column) + if err != nil { + return nil, errors.Wrapf(err, "failed to encode anchor") + } + value = anchorNode + case structField.IsAutoAlias: + if fieldValue.Kind() != reflect.Ptr { + return nil, xerrors.Errorf( + "%s in struct is not pointer type. but required automatically alias detection", + structField.FieldName, + ) + } + anchorName := e.anchorPtrToNameMap[fieldValue.Pointer()] + if anchorName == "" { + return nil, xerrors.Errorf( + "cannot find anchor name from pointer address for automatically alias detection", + ) + } + aliasName := anchorName + alias := ast.Alias(token.New("*", "*", e.pos(column))) + alias.Value = ast.String(token.New(aliasName, aliasName, e.pos(column))) + value = alias + if structField.IsInline { + // if both used alias and inline, output `<<: *alias` + key = ast.MergeKey(token.New("<<", "<<", e.pos(column))) + } + case structField.AliasName != "": + aliasName := structField.AliasName + alias := ast.Alias(token.New("*", "*", e.pos(column))) + alias.Value = ast.String(token.New(aliasName, aliasName, e.pos(column))) + value = alias + if structField.IsInline { + // if both used alias and inline, output `<<: *alias` + key = ast.MergeKey(token.New("<<", "<<", e.pos(column))) + } + case structField.IsInline: + isAutoAnchor := structField.IsAutoAnchor + if !hasInlineAnchorField { + hasInlineAnchorField = isAutoAnchor + } + if isAutoAnchor { + inlineAnchorValue = fieldValue + } + mapNode, ok := value.(ast.MapNode) + if !ok { + // if an inline field is null, skip encoding it + if _, ok := value.(*ast.NullNode); ok { + continue + } + return nil, xerrors.Errorf("inline value is must be map or struct type") + } + mapIter := mapNode.MapRange() + for mapIter.Next() { + key := mapIter.Key() + value := mapIter.Value() + keyName := key.GetToken().Value + if structFieldMap.isIncludedRenderName(keyName) { + // if declared same key name, skip encoding this field + continue + } + key.AddColumn(-e.indent) + value.AddColumn(-e.indent) + node.Values = append(node.Values, ast.MappingValue(nil, key, value)) + } + continue + case structField.IsAutoAnchor: + anchorNode, err := e.encodeAnchor(structField.RenderName, value, fieldValue, column) + if err != nil { + return nil, errors.Wrapf(err, "failed to encode anchor") + } + value = anchorNode + } + node.Values = append(node.Values, ast.MappingValue(nil, key, value)) + } + if hasInlineAnchorField { + node.AddColumn(e.indent) + anchorName := "anchor" + anchorNode := ast.Anchor(token.New("&", "&", e.pos(column))) + anchorNode.Name = ast.String(token.New(anchorName, anchorName, e.pos(column))) + anchorNode.Value = node + if e.anchorCallback != nil { + if err := e.anchorCallback(anchorNode, value.Addr().Interface()); err != nil { + return nil, errors.Wrapf(err, "failed to marshal anchor") + } + if snode, ok := anchorNode.Name.(*ast.StringNode); ok { + anchorName = snode.Value + } + } + if inlineAnchorValue.Kind() == reflect.Ptr { + e.anchorPtrToNameMap[inlineAnchorValue.Pointer()] = anchorName + } + return anchorNode, nil + } + return node, nil +} diff --git a/vendor/github.com/goccy/go-yaml/error.go b/vendor/github.com/goccy/go-yaml/error.go new file mode 100644 index 00000000..163dcc55 --- /dev/null +++ b/vendor/github.com/goccy/go-yaml/error.go @@ -0,0 +1,62 @@ +package yaml + +import ( + "github.com/goccy/go-yaml/ast" + "golang.org/x/xerrors" +) + +var ( + ErrInvalidQuery = xerrors.New("invalid query") + ErrInvalidPath = xerrors.New("invalid path instance") + ErrInvalidPathString = xerrors.New("invalid path string") + ErrNotFoundNode = xerrors.New("node not found") + ErrUnknownCommentPositionType = xerrors.New("unknown comment position type") + ErrInvalidCommentMapValue = xerrors.New("invalid comment map value. it must be not nil value") +) + +func ErrUnsupportedHeadPositionType(node ast.Node) error { + return xerrors.Errorf("unsupported comment head position for %s", node.Type()) +} + +func ErrUnsupportedLinePositionType(node ast.Node) error { + return xerrors.Errorf("unsupported comment line position for %s", node.Type()) +} + +func ErrUnsupportedFootPositionType(node ast.Node) error { + return xerrors.Errorf("unsupported comment foot position for %s", node.Type()) +} + +// IsInvalidQueryError whether err is ErrInvalidQuery or not. +func IsInvalidQueryError(err error) bool { + return xerrors.Is(err, ErrInvalidQuery) +} + +// IsInvalidPathError whether err is ErrInvalidPath or not. +func IsInvalidPathError(err error) bool { + return xerrors.Is(err, ErrInvalidPath) +} + +// IsInvalidPathStringError whether err is ErrInvalidPathString or not. +func IsInvalidPathStringError(err error) bool { + return xerrors.Is(err, ErrInvalidPathString) +} + +// IsNotFoundNodeError whether err is ErrNotFoundNode or not. +func IsNotFoundNodeError(err error) bool { + return xerrors.Is(err, ErrNotFoundNode) +} + +// IsInvalidTokenTypeError whether err is ast.ErrInvalidTokenType or not. +func IsInvalidTokenTypeError(err error) bool { + return xerrors.Is(err, ast.ErrInvalidTokenType) +} + +// IsInvalidAnchorNameError whether err is ast.ErrInvalidAnchorName or not. +func IsInvalidAnchorNameError(err error) bool { + return xerrors.Is(err, ast.ErrInvalidAnchorName) +} + +// IsInvalidAliasNameError whether err is ast.ErrInvalidAliasName or not. +func IsInvalidAliasNameError(err error) bool { + return xerrors.Is(err, ast.ErrInvalidAliasName) +} diff --git a/vendor/github.com/goccy/go-yaml/internal/errors/error.go b/vendor/github.com/goccy/go-yaml/internal/errors/error.go new file mode 100644 index 00000000..7f1ea9af --- /dev/null +++ b/vendor/github.com/goccy/go-yaml/internal/errors/error.go @@ -0,0 +1,260 @@ +package errors + +import ( + "bytes" + "fmt" + "reflect" + + "github.com/goccy/go-yaml/printer" + "github.com/goccy/go-yaml/token" + "golang.org/x/xerrors" +) + +const ( + defaultColorize = false + defaultIncludeSource = true +) + +var ( + ErrDecodeRequiredPointerType = xerrors.New("required pointer type value") +) + +// Wrapf wrap error for stack trace +func Wrapf(err error, msg string, args ...interface{}) error { + return &wrapError{ + baseError: &baseError{}, + err: xerrors.Errorf(msg, args...), + nextErr: err, + frame: xerrors.Caller(1), + } +} + +// ErrSyntax create syntax error instance with message and token +func ErrSyntax(msg string, tk *token.Token) *syntaxError { + return &syntaxError{ + baseError: &baseError{}, + msg: msg, + token: tk, + frame: xerrors.Caller(1), + } +} + +type baseError struct { + state fmt.State + verb rune +} + +func (e *baseError) Error() string { + return "" +} + +func (e *baseError) chainStateAndVerb(err error) { + wrapErr, ok := err.(*wrapError) + if ok { + wrapErr.state = e.state + wrapErr.verb = e.verb + } + syntaxErr, ok := err.(*syntaxError) + if ok { + syntaxErr.state = e.state + syntaxErr.verb = e.verb + } +} + +type wrapError struct { + *baseError + err error + nextErr error + frame xerrors.Frame +} + +type FormatErrorPrinter struct { + xerrors.Printer + Colored bool + InclSource bool +} + +func (e *wrapError) As(target interface{}) bool { + err := e.nextErr + for { + if wrapErr, ok := err.(*wrapError); ok { + err = wrapErr.nextErr + continue + } + break + } + return xerrors.As(err, target) +} + +func (e *wrapError) Unwrap() error { + return e.nextErr +} + +func (e *wrapError) PrettyPrint(p xerrors.Printer, colored, inclSource bool) error { + return e.FormatError(&FormatErrorPrinter{Printer: p, Colored: colored, InclSource: inclSource}) +} + +func (e *wrapError) FormatError(p xerrors.Printer) error { + if _, ok := p.(*FormatErrorPrinter); !ok { + p = &FormatErrorPrinter{ + Printer: p, + Colored: defaultColorize, + InclSource: defaultIncludeSource, + } + } + if e.verb == 'v' && e.state.Flag('+') { + // print stack trace for debugging + p.Print(e.err, "\n") + e.frame.Format(p) + e.chainStateAndVerb(e.nextErr) + return e.nextErr + } + err := e.nextErr + for { + if wrapErr, ok := err.(*wrapError); ok { + err = wrapErr.nextErr + continue + } + break + } + e.chainStateAndVerb(err) + if fmtErr, ok := err.(xerrors.Formatter); ok { + fmtErr.FormatError(p) + } else { + p.Print(err) + } + return nil +} + +type wrapState struct { + org fmt.State +} + +func (s *wrapState) Write(b []byte) (n int, err error) { + return s.org.Write(b) +} + +func (s *wrapState) Width() (wid int, ok bool) { + return s.org.Width() +} + +func (s *wrapState) Precision() (prec int, ok bool) { + return s.org.Precision() +} + +func (s *wrapState) Flag(c int) bool { + // set true to 'printDetail' forced because when p.Detail() is false, xerrors.Printer no output any text + if c == '#' { + // ignore '#' keyword because xerrors.FormatError doesn't set true to printDetail. + // ( see https://github.com/golang/xerrors/blob/master/adaptor.go#L39-L43 ) + return false + } + return true +} + +func (e *wrapError) Format(state fmt.State, verb rune) { + e.state = state + e.verb = verb + xerrors.FormatError(e, &wrapState{org: state}, verb) +} + +func (e *wrapError) Error() string { + var buf bytes.Buffer + e.PrettyPrint(&Sink{&buf}, defaultColorize, defaultIncludeSource) + return buf.String() +} + +type syntaxError struct { + *baseError + msg string + token *token.Token + frame xerrors.Frame +} + +func (e *syntaxError) PrettyPrint(p xerrors.Printer, colored, inclSource bool) error { + return e.FormatError(&FormatErrorPrinter{Printer: p, Colored: colored, InclSource: inclSource}) +} + +func (e *syntaxError) FormatError(p xerrors.Printer) error { + var pp printer.Printer + + var colored, inclSource bool + if fep, ok := p.(*FormatErrorPrinter); ok { + colored = fep.Colored + inclSource = fep.InclSource + } + + pos := fmt.Sprintf("[%d:%d] ", e.token.Position.Line, e.token.Position.Column) + msg := pp.PrintErrorMessage(fmt.Sprintf("%s%s", pos, e.msg), colored) + if inclSource { + msg += "\n" + pp.PrintErrorToken(e.token, colored) + } + p.Print(msg) + + if e.verb == 'v' && e.state.Flag('+') { + // %+v + // print stack trace for debugging + e.frame.Format(p) + } + return nil +} + +type PrettyPrinter interface { + PrettyPrint(xerrors.Printer, bool, bool) error +} +type Sink struct{ *bytes.Buffer } + +func (es *Sink) Print(args ...interface{}) { + fmt.Fprint(es.Buffer, args...) +} + +func (es *Sink) Printf(f string, args ...interface{}) { + fmt.Fprintf(es.Buffer, f, args...) +} + +func (es *Sink) Detail() bool { + return false +} + +func (e *syntaxError) Error() string { + var buf bytes.Buffer + e.PrettyPrint(&Sink{&buf}, defaultColorize, defaultIncludeSource) + return buf.String() +} + +type TypeError struct { + DstType reflect.Type + SrcType reflect.Type + StructFieldName *string + Token *token.Token +} + +func (e *TypeError) Error() string { + if e.StructFieldName != nil { + return fmt.Sprintf("cannot unmarshal %s into Go struct field %s of type %s", e.SrcType, *e.StructFieldName, e.DstType) + } + return fmt.Sprintf("cannot unmarshal %s into Go value of type %s", e.SrcType, e.DstType) +} + +func (e *TypeError) PrettyPrint(p xerrors.Printer, colored, inclSource bool) error { + return e.FormatError(&FormatErrorPrinter{Printer: p, Colored: colored, InclSource: inclSource}) +} + +func (e *TypeError) FormatError(p xerrors.Printer) error { + var pp printer.Printer + + var colored, inclSource bool + if fep, ok := p.(*FormatErrorPrinter); ok { + colored = fep.Colored + inclSource = fep.InclSource + } + + pos := fmt.Sprintf("[%d:%d] ", e.Token.Position.Line, e.Token.Position.Column) + msg := pp.PrintErrorMessage(fmt.Sprintf("%s%s", pos, e.Error()), colored) + if inclSource { + msg += "\n" + pp.PrintErrorToken(e.Token, colored) + } + p.Print(msg) + + return nil +} diff --git a/vendor/github.com/goccy/go-yaml/lexer/lexer.go b/vendor/github.com/goccy/go-yaml/lexer/lexer.go new file mode 100644 index 00000000..3207f4f2 --- /dev/null +++ b/vendor/github.com/goccy/go-yaml/lexer/lexer.go @@ -0,0 +1,23 @@ +package lexer + +import ( + "io" + + "github.com/goccy/go-yaml/scanner" + "github.com/goccy/go-yaml/token" +) + +// Tokenize split to token instances from string +func Tokenize(src string) token.Tokens { + var s scanner.Scanner + s.Init(src) + var tokens token.Tokens + for { + subTokens, err := s.Scan() + if err == io.EOF { + break + } + tokens.Add(subTokens...) + } + return tokens +} diff --git a/vendor/github.com/goccy/go-yaml/option.go b/vendor/github.com/goccy/go-yaml/option.go new file mode 100644 index 00000000..eab5d43a --- /dev/null +++ b/vendor/github.com/goccy/go-yaml/option.go @@ -0,0 +1,278 @@ +package yaml + +import ( + "io" + "reflect" + + "github.com/goccy/go-yaml/ast" +) + +// DecodeOption functional option type for Decoder +type DecodeOption func(d *Decoder) error + +// ReferenceReaders pass to Decoder that reference to anchor defined by passed readers +func ReferenceReaders(readers ...io.Reader) DecodeOption { + return func(d *Decoder) error { + d.referenceReaders = append(d.referenceReaders, readers...) + return nil + } +} + +// ReferenceFiles pass to Decoder that reference to anchor defined by passed files +func ReferenceFiles(files ...string) DecodeOption { + return func(d *Decoder) error { + d.referenceFiles = files + return nil + } +} + +// ReferenceDirs pass to Decoder that reference to anchor defined by files under the passed dirs +func ReferenceDirs(dirs ...string) DecodeOption { + return func(d *Decoder) error { + d.referenceDirs = dirs + return nil + } +} + +// RecursiveDir search yaml file recursively from passed dirs by ReferenceDirs option +func RecursiveDir(isRecursive bool) DecodeOption { + return func(d *Decoder) error { + d.isRecursiveDir = isRecursive + return nil + } +} + +// Validator set StructValidator instance to Decoder +func Validator(v StructValidator) DecodeOption { + return func(d *Decoder) error { + d.validator = v + return nil + } +} + +// Strict enable DisallowUnknownField and DisallowDuplicateKey +func Strict() DecodeOption { + return func(d *Decoder) error { + d.disallowUnknownField = true + d.disallowDuplicateKey = true + return nil + } +} + +// DisallowUnknownField causes the Decoder to return an error when the destination +// is a struct and the input contains object keys which do not match any +// non-ignored, exported fields in the destination. +func DisallowUnknownField() DecodeOption { + return func(d *Decoder) error { + d.disallowUnknownField = true + return nil + } +} + +// DisallowDuplicateKey causes an error when mapping keys that are duplicates +func DisallowDuplicateKey() DecodeOption { + return func(d *Decoder) error { + d.disallowDuplicateKey = true + return nil + } +} + +// UseOrderedMap can be interpreted as a map, +// and uses MapSlice ( ordered map ) aggressively if there is no type specification +func UseOrderedMap() DecodeOption { + return func(d *Decoder) error { + d.useOrderedMap = true + return nil + } +} + +// UseJSONUnmarshaler if neither `BytesUnmarshaler` nor `InterfaceUnmarshaler` is implemented +// and `UnmashalJSON([]byte)error` is implemented, convert the argument from `YAML` to `JSON` and then call it. +func UseJSONUnmarshaler() DecodeOption { + return func(d *Decoder) error { + d.useJSONUnmarshaler = true + return nil + } +} + +// CustomUnmarshaler overrides any decoding process for the type specified in generics. +// +// NOTE: If RegisterCustomUnmarshaler and CustomUnmarshaler of DecodeOption are specified for the same type, +// the CustomUnmarshaler specified in DecodeOption takes precedence. +func CustomUnmarshaler[T any](unmarshaler func(*T, []byte) error) DecodeOption { + return func(d *Decoder) error { + var typ *T + d.customUnmarshalerMap[reflect.TypeOf(typ)] = func(v interface{}, b []byte) error { + return unmarshaler(v.(*T), b) + } + return nil + } +} + +// EncodeOption functional option type for Encoder +type EncodeOption func(e *Encoder) error + +// Indent change indent number +func Indent(spaces int) EncodeOption { + return func(e *Encoder) error { + e.indent = spaces + return nil + } +} + +// IndentSequence causes sequence values to be indented the same value as Indent +func IndentSequence(indent bool) EncodeOption { + return func(e *Encoder) error { + e.indentSequence = indent + return nil + } +} + +// UseSingleQuote determines if single or double quotes should be preferred for strings. +func UseSingleQuote(sq bool) EncodeOption { + return func(e *Encoder) error { + e.singleQuote = sq + return nil + } +} + +// Flow encoding by flow style +func Flow(isFlowStyle bool) EncodeOption { + return func(e *Encoder) error { + e.isFlowStyle = isFlowStyle + return nil + } +} + +// UseLiteralStyleIfMultiline causes encoding multiline strings with a literal syntax, +// no matter what characters they include +func UseLiteralStyleIfMultiline(useLiteralStyleIfMultiline bool) EncodeOption { + return func(e *Encoder) error { + e.useLiteralStyleIfMultiline = useLiteralStyleIfMultiline + return nil + } +} + +// JSON encode in JSON format +func JSON() EncodeOption { + return func(e *Encoder) error { + e.isJSONStyle = true + e.isFlowStyle = true + return nil + } +} + +// MarshalAnchor call back if encoder find an anchor during encoding +func MarshalAnchor(callback func(*ast.AnchorNode, interface{}) error) EncodeOption { + return func(e *Encoder) error { + e.anchorCallback = callback + return nil + } +} + +// UseJSONMarshaler if neither `BytesMarshaler` nor `InterfaceMarshaler` +// nor `encoding.TextMarshaler` is implemented and `MarshalJSON()([]byte, error)` is implemented, +// call `MarshalJSON` to convert the returned `JSON` to `YAML` for processing. +func UseJSONMarshaler() EncodeOption { + return func(e *Encoder) error { + e.useJSONMarshaler = true + return nil + } +} + +// CustomMarshaler overrides any encoding process for the type specified in generics. +// +// NOTE: If type T implements MarshalYAML for pointer receiver, the type specified in CustomMarshaler must be *T. +// If RegisterCustomMarshaler and CustomMarshaler of EncodeOption are specified for the same type, +// the CustomMarshaler specified in EncodeOption takes precedence. +func CustomMarshaler[T any](marshaler func(T) ([]byte, error)) EncodeOption { + return func(e *Encoder) error { + var typ T + e.customMarshalerMap[reflect.TypeOf(typ)] = func(v interface{}) ([]byte, error) { + return marshaler(v.(T)) + } + return nil + } +} + +// CommentPosition type of the position for comment. +type CommentPosition int + +const ( + CommentHeadPosition CommentPosition = CommentPosition(iota) + CommentLinePosition + CommentFootPosition +) + +func (p CommentPosition) String() string { + switch p { + case CommentHeadPosition: + return "Head" + case CommentLinePosition: + return "Line" + case CommentFootPosition: + return "Foot" + default: + return "" + } +} + +// LineComment create a one-line comment for CommentMap. +func LineComment(text string) *Comment { + return &Comment{ + Texts: []string{text}, + Position: CommentLinePosition, + } +} + +// HeadComment create a multiline comment for CommentMap. +func HeadComment(texts ...string) *Comment { + return &Comment{ + Texts: texts, + Position: CommentHeadPosition, + } +} + +// FootComment create a multiline comment for CommentMap. +func FootComment(texts ...string) *Comment { + return &Comment{ + Texts: texts, + Position: CommentFootPosition, + } +} + +// Comment raw data for comment. +type Comment struct { + Texts []string + Position CommentPosition +} + +// CommentMap map of the position of the comment and the comment information. +type CommentMap map[string][]*Comment + +// WithComment add a comment using the location and text information given in the CommentMap. +func WithComment(cm CommentMap) EncodeOption { + return func(e *Encoder) error { + commentMap := map[*Path][]*Comment{} + for k, v := range cm { + path, err := PathString(k) + if err != nil { + return err + } + commentMap[path] = v + } + e.commentMap = commentMap + return nil + } +} + +// CommentToMap apply the position and content of comments in a YAML document to a CommentMap. +func CommentToMap(cm CommentMap) DecodeOption { + return func(d *Decoder) error { + if cm == nil { + return ErrInvalidCommentMapValue + } + d.toCommentMap = cm + return nil + } +} diff --git a/vendor/github.com/goccy/go-yaml/parser/context.go b/vendor/github.com/goccy/go-yaml/parser/context.go new file mode 100644 index 00000000..42cc4f8f --- /dev/null +++ b/vendor/github.com/goccy/go-yaml/parser/context.go @@ -0,0 +1,192 @@ +package parser + +import ( + "fmt" + "strings" + + "github.com/goccy/go-yaml/token" +) + +// context context at parsing +type context struct { + parent *context + idx int + size int + tokens token.Tokens + path string +} + +var pathSpecialChars = []string{ + "$", "*", ".", "[", "]", +} + +func containsPathSpecialChar(path string) bool { + for _, char := range pathSpecialChars { + if strings.Contains(path, char) { + return true + } + } + return false +} + +func normalizePath(path string) string { + if containsPathSpecialChar(path) { + return fmt.Sprintf("'%s'", path) + } + return path +} + +func (c *context) withChild(path string) *context { + ctx := c.copy() + path = normalizePath(path) + ctx.path += fmt.Sprintf(".%s", path) + return ctx +} + +func (c *context) withIndex(idx uint) *context { + ctx := c.copy() + ctx.path += fmt.Sprintf("[%d]", idx) + return ctx +} + +func (c *context) copy() *context { + return &context{ + parent: c, + idx: c.idx, + size: c.size, + tokens: append(token.Tokens{}, c.tokens...), + path: c.path, + } +} + +func (c *context) next() bool { + return c.idx < c.size +} + +func (c *context) previousToken() *token.Token { + if c.idx > 0 { + return c.tokens[c.idx-1] + } + return nil +} + +func (c *context) insertToken(idx int, tk *token.Token) { + if c.parent != nil { + c.parent.insertToken(idx, tk) + } + if c.size < idx { + return + } + if c.size == idx { + curToken := c.tokens[c.size-1] + tk.Next = curToken + curToken.Prev = tk + + c.tokens = append(c.tokens, tk) + c.size = len(c.tokens) + return + } + + curToken := c.tokens[idx] + tk.Next = curToken + curToken.Prev = tk + + c.tokens = append(c.tokens[:idx+1], c.tokens[idx:]...) + c.tokens[idx] = tk + c.size = len(c.tokens) +} + +func (c *context) currentToken() *token.Token { + if c.idx >= c.size { + return nil + } + return c.tokens[c.idx] +} + +func (c *context) nextToken() *token.Token { + if c.idx+1 >= c.size { + return nil + } + return c.tokens[c.idx+1] +} + +func (c *context) afterNextToken() *token.Token { + if c.idx+2 >= c.size { + return nil + } + return c.tokens[c.idx+2] +} + +func (c *context) nextNotCommentToken() *token.Token { + for i := c.idx + 1; i < c.size; i++ { + tk := c.tokens[i] + if tk.Type == token.CommentType { + continue + } + return tk + } + return nil +} + +func (c *context) afterNextNotCommentToken() *token.Token { + notCommentTokenCount := 0 + for i := c.idx + 1; i < c.size; i++ { + tk := c.tokens[i] + if tk.Type == token.CommentType { + continue + } + notCommentTokenCount++ + if notCommentTokenCount == 2 { + return tk + } + } + return nil +} + +func (c *context) isCurrentCommentToken() bool { + tk := c.currentToken() + if tk == nil { + return false + } + return tk.Type == token.CommentType +} + +func (c *context) progressIgnoreComment(num int) { + if c.parent != nil { + c.parent.progressIgnoreComment(num) + } + if c.size <= c.idx+num { + c.idx = c.size + } else { + c.idx += num + } +} + +func (c *context) progress(num int) { + if c.isCurrentCommentToken() { + return + } + c.progressIgnoreComment(num) +} + +func newContext(tokens token.Tokens, mode Mode) *context { + filteredTokens := []*token.Token{} + if mode&ParseComments != 0 { + filteredTokens = tokens + } else { + for _, tk := range tokens { + if tk.Type == token.CommentType { + continue + } + // keep prev/next reference between tokens containing comments + // https://github.com/goccy/go-yaml/issues/254 + filteredTokens = append(filteredTokens, tk) + } + } + return &context{ + idx: 0, + size: len(filteredTokens), + tokens: token.Tokens(filteredTokens), + path: "$", + } +} diff --git a/vendor/github.com/goccy/go-yaml/parser/parser.go b/vendor/github.com/goccy/go-yaml/parser/parser.go new file mode 100644 index 00000000..2bec5fea --- /dev/null +++ b/vendor/github.com/goccy/go-yaml/parser/parser.go @@ -0,0 +1,743 @@ +package parser + +import ( + "fmt" + "os" + "strings" + + "github.com/goccy/go-yaml/ast" + "github.com/goccy/go-yaml/internal/errors" + "github.com/goccy/go-yaml/lexer" + "github.com/goccy/go-yaml/token" + "golang.org/x/xerrors" +) + +type parser struct{} + +func (p *parser) parseMapping(ctx *context) (*ast.MappingNode, error) { + mapTk := ctx.currentToken() + node := ast.Mapping(mapTk, true) + node.SetPath(ctx.path) + ctx.progress(1) // skip MappingStart token + for ctx.next() { + tk := ctx.currentToken() + if tk.Type == token.MappingEndType { + node.End = tk + return node, nil + } else if tk.Type == token.CollectEntryType { + ctx.progress(1) + continue + } + + value, err := p.parseMappingValue(ctx) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse mapping value in mapping node") + } + mvnode, ok := value.(*ast.MappingValueNode) + if !ok { + return nil, errors.ErrSyntax("failed to parse flow mapping node", value.GetToken()) + } + node.Values = append(node.Values, mvnode) + ctx.progress(1) + } + return nil, errors.ErrSyntax("unterminated flow mapping", node.GetToken()) +} + +func (p *parser) parseSequence(ctx *context) (*ast.SequenceNode, error) { + node := ast.Sequence(ctx.currentToken(), true) + node.SetPath(ctx.path) + ctx.progress(1) // skip SequenceStart token + for ctx.next() { + tk := ctx.currentToken() + if tk.Type == token.SequenceEndType { + node.End = tk + break + } else if tk.Type == token.CollectEntryType { + ctx.progress(1) + continue + } + + value, err := p.parseToken(ctx.withIndex(uint(len(node.Values))), tk) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse sequence value in flow sequence node") + } + node.Values = append(node.Values, value) + ctx.progress(1) + } + return node, nil +} + +func (p *parser) parseTag(ctx *context) (*ast.TagNode, error) { + tagToken := ctx.currentToken() + node := ast.Tag(tagToken) + node.SetPath(ctx.path) + ctx.progress(1) // skip tag token + var ( + value ast.Node + err error + ) + switch token.ReservedTagKeyword(tagToken.Value) { + case token.MappingTag, + token.OrderedMapTag: + value, err = p.parseMapping(ctx) + case token.IntegerTag, + token.FloatTag, + token.StringTag, + token.BinaryTag, + token.TimestampTag, + token.NullTag: + typ := ctx.currentToken().Type + if typ == token.LiteralType || typ == token.FoldedType { + value, err = p.parseLiteral(ctx) + } else { + value = p.parseScalarValue(ctx.currentToken()) + } + case token.SequenceTag, + token.SetTag: + err = errors.ErrSyntax(fmt.Sprintf("sorry, currently not supported %s tag", tagToken.Value), tagToken) + default: + // custom tag + value, err = p.parseToken(ctx, ctx.currentToken()) + } + if err != nil { + return nil, errors.Wrapf(err, "failed to parse tag value") + } + node.Value = value + return node, nil +} + +func (p *parser) removeLeftSideNewLineCharacter(src string) string { + // CR or LF or CRLF + return strings.TrimLeft(strings.TrimLeft(strings.TrimLeft(src, "\r"), "\n"), "\r\n") +} + +func (p *parser) existsNewLineCharacter(src string) bool { + if strings.Index(src, "\n") > 0 { + return true + } + if strings.Index(src, "\r") > 0 { + return true + } + return false +} + +func (p *parser) validateMapKey(tk *token.Token) error { + if tk.Type != token.StringType { + return nil + } + origin := p.removeLeftSideNewLineCharacter(tk.Origin) + if p.existsNewLineCharacter(origin) { + return errors.ErrSyntax("unexpected key name", tk) + } + return nil +} + +func (p *parser) createNullToken(base *token.Token) *token.Token { + pos := *(base.Position) + pos.Column++ + return token.New("null", "null", &pos) +} + +func (p *parser) parseMapValue(ctx *context, key ast.MapKeyNode, colonToken *token.Token) (ast.Node, error) { + node, err := p.createMapValueNode(ctx, key, colonToken) + if err != nil { + return nil, errors.Wrapf(err, "failed to create map value node") + } + if node != nil && node.GetPath() == "" { + node.SetPath(ctx.path) + } + return node, nil +} + +func (p *parser) createMapValueNode(ctx *context, key ast.MapKeyNode, colonToken *token.Token) (ast.Node, error) { + tk := ctx.currentToken() + if tk == nil { + nullToken := p.createNullToken(colonToken) + ctx.insertToken(ctx.idx, nullToken) + return ast.Null(nullToken), nil + } + var comment *ast.CommentGroupNode + if tk.Type == token.CommentType { + comment = p.parseCommentOnly(ctx) + if comment != nil { + comment.SetPath(ctx.withChild(key.GetToken().Value).path) + } + tk = ctx.currentToken() + } + if tk.Position.Column == key.GetToken().Position.Column && tk.Type == token.StringType { + // in this case, + // ---- + // key: + // next + + nullToken := p.createNullToken(colonToken) + ctx.insertToken(ctx.idx, nullToken) + nullNode := ast.Null(nullToken) + + if comment != nil { + nullNode.SetComment(comment) + } else { + // If there is a comment, it is already bound to the key node, + // so remove the comment from the key to bind it to the null value. + keyComment := key.GetComment() + if keyComment != nil { + if err := key.SetComment(nil); err != nil { + return nil, err + } + nullNode.SetComment(keyComment) + } + } + return nullNode, nil + } + + if tk.Position.Column < key.GetToken().Position.Column { + // in this case, + // ---- + // key: + // next + nullToken := p.createNullToken(colonToken) + ctx.insertToken(ctx.idx, nullToken) + nullNode := ast.Null(nullToken) + if comment != nil { + nullNode.SetComment(comment) + } + return nullNode, nil + } + + value, err := p.parseToken(ctx, ctx.currentToken()) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse mapping 'value' node") + } + if comment != nil { + value.SetComment(comment) + } + return value, nil +} + +func (p *parser) validateMapValue(ctx *context, key, value ast.Node) error { + keyColumn := key.GetToken().Position.Column + valueColumn := value.GetToken().Position.Column + if keyColumn != valueColumn { + return nil + } + if value.Type() != ast.StringType { + return nil + } + ntk := ctx.nextToken() + if ntk == nil || (ntk.Type != token.MappingValueType && ntk.Type != token.SequenceEntryType) { + return errors.ErrSyntax("could not found expected ':' token", value.GetToken()) + } + return nil +} + +func (p *parser) parseMappingValue(ctx *context) (ast.Node, error) { + key, err := p.parseMapKey(ctx) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse map key") + } + keyText := key.GetToken().Value + key.SetPath(ctx.withChild(keyText).path) + if err := p.validateMapKey(key.GetToken()); err != nil { + return nil, errors.Wrapf(err, "validate mapping key error") + } + ctx.progress(1) // progress to mapping value token + tk := ctx.currentToken() // get mapping value token + if tk == nil { + return nil, errors.ErrSyntax("unexpected map", key.GetToken()) + } + ctx.progress(1) // progress to value token + if err := p.setSameLineCommentIfExists(ctx.withChild(keyText), key); err != nil { + return nil, errors.Wrapf(err, "failed to set same line comment to node") + } + if key.GetComment() != nil { + // if current token is comment, GetComment() is not nil. + // then progress to value token + ctx.progressIgnoreComment(1) + } + + value, err := p.parseMapValue(ctx.withChild(keyText), key, tk) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse map value") + } + if err := p.validateMapValue(ctx, key, value); err != nil { + return nil, errors.Wrapf(err, "failed to validate map value") + } + + mvnode := ast.MappingValue(tk, key, value) + mvnode.SetPath(ctx.withChild(keyText).path) + node := ast.Mapping(tk, false, mvnode) + node.SetPath(ctx.withChild(keyText).path) + + ntk := ctx.nextNotCommentToken() + antk := ctx.afterNextNotCommentToken() + for antk != nil && antk.Type == token.MappingValueType && + ntk.Position.Column == key.GetToken().Position.Column { + ctx.progressIgnoreComment(1) + value, err := p.parseToken(ctx, ctx.currentToken()) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse mapping node") + } + switch value.Type() { + case ast.MappingType: + c := value.(*ast.MappingNode) + comment := c.GetComment() + for idx, v := range c.Values { + if idx == 0 && comment != nil { + if err := v.SetComment(comment); err != nil { + return nil, errors.Wrapf(err, "failed to set comment token to node") + } + } + node.Values = append(node.Values, v) + } + case ast.MappingValueType: + node.Values = append(node.Values, value.(*ast.MappingValueNode)) + default: + return nil, xerrors.Errorf("failed to parse mapping value node node is %s", value.Type()) + } + ntk = ctx.nextNotCommentToken() + antk = ctx.afterNextNotCommentToken() + } + if len(node.Values) == 1 { + mapKeyCol := mvnode.Key.GetToken().Position.Column + commentTk := ctx.nextToken() + if commentTk != nil && commentTk.Type == token.CommentType && mapKeyCol <= commentTk.Position.Column { + // If the comment is in the same or deeper column as the last element column in map value, + // treat it as a footer comment for the last element. + comment := p.parseFootComment(ctx, mapKeyCol) + mvnode.FootComment = comment + } + return mvnode, nil + } + mapCol := node.GetToken().Position.Column + commentTk := ctx.nextToken() + if commentTk != nil && commentTk.Type == token.CommentType && mapCol <= commentTk.Position.Column { + // If the comment is in the same or deeper column as the last element column in map value, + // treat it as a footer comment for the last element. + comment := p.parseFootComment(ctx, mapCol) + node.FootComment = comment + } + return node, nil +} + +func (p *parser) parseSequenceEntry(ctx *context) (*ast.SequenceNode, error) { + tk := ctx.currentToken() + sequenceNode := ast.Sequence(tk, false) + sequenceNode.SetPath(ctx.path) + curColumn := tk.Position.Column + for tk.Type == token.SequenceEntryType { + ctx.progress(1) // skip sequence token + tk = ctx.currentToken() + if tk == nil { + return nil, errors.ErrSyntax("empty sequence entry", ctx.previousToken()) + } + var comment *ast.CommentGroupNode + if tk.Type == token.CommentType { + comment = p.parseCommentOnly(ctx) + tk = ctx.currentToken() + if tk.Type == token.SequenceEntryType { + ctx.progress(1) // skip sequence token + } + } + value, err := p.parseToken(ctx.withIndex(uint(len(sequenceNode.Values))), ctx.currentToken()) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse sequence") + } + if comment != nil { + comment.SetPath(ctx.withIndex(uint(len(sequenceNode.Values))).path) + sequenceNode.ValueHeadComments = append(sequenceNode.ValueHeadComments, comment) + } else { + sequenceNode.ValueHeadComments = append(sequenceNode.ValueHeadComments, nil) + } + sequenceNode.Values = append(sequenceNode.Values, value) + tk = ctx.nextNotCommentToken() + if tk == nil { + break + } + if tk.Type != token.SequenceEntryType { + break + } + if tk.Position.Column != curColumn { + break + } + ctx.progressIgnoreComment(1) + } + commentTk := ctx.nextToken() + if commentTk != nil && commentTk.Type == token.CommentType && curColumn <= commentTk.Position.Column { + // If the comment is in the same or deeper column as the last element column in sequence value, + // treat it as a footer comment for the last element. + comment := p.parseFootComment(ctx, curColumn) + sequenceNode.FootComment = comment + } + return sequenceNode, nil +} + +func (p *parser) parseAnchor(ctx *context) (*ast.AnchorNode, error) { + tk := ctx.currentToken() + anchor := ast.Anchor(tk) + anchor.SetPath(ctx.path) + ntk := ctx.nextToken() + if ntk == nil { + return nil, errors.ErrSyntax("unexpected anchor. anchor name is undefined", tk) + } + ctx.progress(1) // skip anchor token + name, err := p.parseToken(ctx, ctx.currentToken()) + if err != nil { + return nil, errors.Wrapf(err, "failed to parser anchor name node") + } + anchor.Name = name + ntk = ctx.nextToken() + if ntk == nil { + return nil, errors.ErrSyntax("unexpected anchor. anchor value is undefined", ctx.currentToken()) + } + ctx.progress(1) + value, err := p.parseToken(ctx, ctx.currentToken()) + if err != nil { + return nil, errors.Wrapf(err, "failed to parser anchor name node") + } + anchor.Value = value + return anchor, nil +} + +func (p *parser) parseAlias(ctx *context) (*ast.AliasNode, error) { + tk := ctx.currentToken() + alias := ast.Alias(tk) + alias.SetPath(ctx.path) + ntk := ctx.nextToken() + if ntk == nil { + return nil, errors.ErrSyntax("unexpected alias. alias name is undefined", tk) + } + ctx.progress(1) // skip alias token + name, err := p.parseToken(ctx, ctx.currentToken()) + if err != nil { + return nil, errors.Wrapf(err, "failed to parser alias name node") + } + alias.Value = name + return alias, nil +} + +func (p *parser) parseMapKey(ctx *context) (ast.MapKeyNode, error) { + tk := ctx.currentToken() + if value := p.parseScalarValue(tk); value != nil { + return value, nil + } + switch tk.Type { + case token.MergeKeyType: + return ast.MergeKey(tk), nil + case token.MappingKeyType: + return p.parseMappingKey(ctx) + } + return nil, errors.ErrSyntax("unexpected mapping key", tk) +} + +func (p *parser) parseStringValue(tk *token.Token) *ast.StringNode { + switch tk.Type { + case token.StringType, + token.SingleQuoteType, + token.DoubleQuoteType: + return ast.String(tk) + } + return nil +} + +func (p *parser) parseScalarValueWithComment(ctx *context, tk *token.Token) (ast.ScalarNode, error) { + node := p.parseScalarValue(tk) + if node == nil { + return nil, nil + } + node.SetPath(ctx.path) + if p.isSameLineComment(ctx.nextToken(), node) { + ctx.progress(1) + if err := p.setSameLineCommentIfExists(ctx, node); err != nil { + return nil, errors.Wrapf(err, "failed to set same line comment to node") + } + } + return node, nil +} + +func (p *parser) parseScalarValue(tk *token.Token) ast.ScalarNode { + if node := p.parseStringValue(tk); node != nil { + return node + } + switch tk.Type { + case token.NullType: + return ast.Null(tk) + case token.BoolType: + return ast.Bool(tk) + case token.IntegerType, + token.BinaryIntegerType, + token.OctetIntegerType, + token.HexIntegerType: + return ast.Integer(tk) + case token.FloatType: + return ast.Float(tk) + case token.InfinityType: + return ast.Infinity(tk) + case token.NanType: + return ast.Nan(tk) + } + return nil +} + +func (p *parser) parseDirective(ctx *context) (*ast.DirectiveNode, error) { + node := ast.Directive(ctx.currentToken()) + ctx.progress(1) // skip directive token + value, err := p.parseToken(ctx, ctx.currentToken()) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse directive value") + } + node.Value = value + ctx.progress(1) + tk := ctx.currentToken() + if tk == nil { + // Since current token is nil, use the previous token to specify + // the syntax error location. + return nil, errors.ErrSyntax("unexpected directive value. document not started", ctx.previousToken()) + } + if tk.Type != token.DocumentHeaderType { + return nil, errors.ErrSyntax("unexpected directive value. document not started", ctx.currentToken()) + } + return node, nil +} + +func (p *parser) parseLiteral(ctx *context) (*ast.LiteralNode, error) { + node := ast.Literal(ctx.currentToken()) + ctx.progress(1) // skip literal/folded token + + tk := ctx.currentToken() + var comment *ast.CommentGroupNode + if tk.Type == token.CommentType { + comment = p.parseCommentOnly(ctx) + comment.SetPath(ctx.path) + if err := node.SetComment(comment); err != nil { + return nil, errors.Wrapf(err, "failed to set comment to literal") + } + tk = ctx.currentToken() + } + value, err := p.parseToken(ctx, tk) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse literal/folded value") + } + snode, ok := value.(*ast.StringNode) + if !ok { + return nil, errors.ErrSyntax("unexpected token. required string token", value.GetToken()) + } + node.Value = snode + return node, nil +} + +func (p *parser) isSameLineComment(tk *token.Token, node ast.Node) bool { + if tk == nil { + return false + } + if tk.Type != token.CommentType { + return false + } + return tk.Position.Line == node.GetToken().Position.Line +} + +func (p *parser) setSameLineCommentIfExists(ctx *context, node ast.Node) error { + tk := ctx.currentToken() + if !p.isSameLineComment(tk, node) { + return nil + } + comment := ast.CommentGroup([]*token.Token{tk}) + comment.SetPath(ctx.path) + if err := node.SetComment(comment); err != nil { + return errors.Wrapf(err, "failed to set comment token to ast.Node") + } + return nil +} + +func (p *parser) parseDocument(ctx *context) (*ast.DocumentNode, error) { + startTk := ctx.currentToken() + ctx.progress(1) // skip document header token + body, err := p.parseToken(ctx, ctx.currentToken()) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse document body") + } + node := ast.Document(startTk, body) + if ntk := ctx.nextToken(); ntk != nil && ntk.Type == token.DocumentEndType { + node.End = ntk + ctx.progress(1) + } + return node, nil +} + +func (p *parser) parseCommentOnly(ctx *context) *ast.CommentGroupNode { + commentTokens := []*token.Token{} + for { + tk := ctx.currentToken() + if tk == nil { + break + } + if tk.Type != token.CommentType { + break + } + commentTokens = append(commentTokens, tk) + ctx.progressIgnoreComment(1) // skip comment token + } + return ast.CommentGroup(commentTokens) +} + +func (p *parser) parseFootComment(ctx *context, col int) *ast.CommentGroupNode { + commentTokens := []*token.Token{} + for { + ctx.progressIgnoreComment(1) + commentTokens = append(commentTokens, ctx.currentToken()) + + nextTk := ctx.nextToken() + if nextTk == nil { + break + } + if nextTk.Type != token.CommentType { + break + } + if col > nextTk.Position.Column { + break + } + } + return ast.CommentGroup(commentTokens) +} + +func (p *parser) parseComment(ctx *context) (ast.Node, error) { + group := p.parseCommentOnly(ctx) + node, err := p.parseToken(ctx, ctx.currentToken()) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse node after comment") + } + if node == nil { + return group, nil + } + group.SetPath(node.GetPath()) + if err := node.SetComment(group); err != nil { + return nil, errors.Wrapf(err, "failed to set comment token to node") + } + return node, nil +} + +func (p *parser) parseMappingKey(ctx *context) (*ast.MappingKeyNode, error) { + keyTk := ctx.currentToken() + node := ast.MappingKey(keyTk) + node.SetPath(ctx.path) + ctx.progress(1) // skip mapping key token + value, err := p.parseToken(ctx.withChild(keyTk.Value), ctx.currentToken()) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse map key") + } + node.Value = value + return node, nil +} + +func (p *parser) parseToken(ctx *context, tk *token.Token) (ast.Node, error) { + node, err := p.createNodeFromToken(ctx, tk) + if err != nil { + return nil, errors.Wrapf(err, "failed to create node from token") + } + if node != nil && node.GetPath() == "" { + node.SetPath(ctx.path) + } + return node, nil +} + +func (p *parser) createNodeFromToken(ctx *context, tk *token.Token) (ast.Node, error) { + if tk == nil { + return nil, nil + } + if tk.NextType() == token.MappingValueType { + node, err := p.parseMappingValue(ctx) + return node, err + } + node, err := p.parseScalarValueWithComment(ctx, tk) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse scalar value") + } + if node != nil { + return node, nil + } + switch tk.Type { + case token.CommentType: + return p.parseComment(ctx) + case token.MappingKeyType: + return p.parseMappingKey(ctx) + case token.DocumentHeaderType: + return p.parseDocument(ctx) + case token.MappingStartType: + return p.parseMapping(ctx) + case token.SequenceStartType: + return p.parseSequence(ctx) + case token.SequenceEntryType: + return p.parseSequenceEntry(ctx) + case token.AnchorType: + return p.parseAnchor(ctx) + case token.AliasType: + return p.parseAlias(ctx) + case token.DirectiveType: + return p.parseDirective(ctx) + case token.TagType: + return p.parseTag(ctx) + case token.LiteralType, token.FoldedType: + return p.parseLiteral(ctx) + } + return nil, nil +} + +func (p *parser) parse(tokens token.Tokens, mode Mode) (*ast.File, error) { + ctx := newContext(tokens, mode) + file := &ast.File{Docs: []*ast.DocumentNode{}} + for ctx.next() { + node, err := p.parseToken(ctx, ctx.currentToken()) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse") + } + ctx.progressIgnoreComment(1) + if node == nil { + continue + } + if doc, ok := node.(*ast.DocumentNode); ok { + file.Docs = append(file.Docs, doc) + } else { + file.Docs = append(file.Docs, ast.Document(nil, node)) + } + } + return file, nil +} + +type Mode uint + +const ( + ParseComments Mode = 1 << iota // parse comments and add them to AST +) + +// ParseBytes parse from byte slice, and returns ast.File +func ParseBytes(bytes []byte, mode Mode) (*ast.File, error) { + tokens := lexer.Tokenize(string(bytes)) + f, err := Parse(tokens, mode) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse") + } + return f, nil +} + +// Parse parse from token instances, and returns ast.File +func Parse(tokens token.Tokens, mode Mode) (*ast.File, error) { + var p parser + f, err := p.parse(tokens, mode) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse") + } + return f, nil +} + +// Parse parse from filename, and returns ast.File +func ParseFile(filename string, mode Mode) (*ast.File, error) { + file, err := os.ReadFile(filename) + if err != nil { + return nil, errors.Wrapf(err, "failed to read file: %s", filename) + } + f, err := ParseBytes(file, mode) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse") + } + f.Name = filename + return f, nil +} diff --git a/vendor/github.com/goccy/go-yaml/path.go b/vendor/github.com/goccy/go-yaml/path.go new file mode 100644 index 00000000..b79c6669 --- /dev/null +++ b/vendor/github.com/goccy/go-yaml/path.go @@ -0,0 +1,814 @@ +package yaml + +import ( + "bytes" + "fmt" + "io" + "strconv" + "strings" + + "github.com/goccy/go-yaml/ast" + "github.com/goccy/go-yaml/internal/errors" + "github.com/goccy/go-yaml/parser" + "github.com/goccy/go-yaml/printer" +) + +// PathString create Path from string +// +// YAMLPath rule +// $ : the root object/element +// . : child operator +// .. : recursive descent +// [num] : object/element of array by number +// [*] : all objects/elements for array. +// +// If you want to use reserved characters such as `.` and `*` as a key name, +// enclose them in single quotation as follows ( $.foo.'bar.baz-*'.hoge ). +// If you want to use a single quote with reserved characters, escape it with `\` ( $.foo.'bar.baz\'s value'.hoge ). +func PathString(s string) (*Path, error) { + buf := []rune(s) + length := len(buf) + cursor := 0 + builder := &PathBuilder{} + for cursor < length { + c := buf[cursor] + switch c { + case '$': + builder = builder.Root() + cursor++ + case '.': + b, buf, c, err := parsePathDot(builder, buf, cursor) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse path of dot") + } + length = len(buf) + builder = b + cursor = c + case '[': + b, buf, c, err := parsePathIndex(builder, buf, cursor) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse path of index") + } + length = len(buf) + builder = b + cursor = c + default: + return nil, errors.Wrapf(ErrInvalidPathString, "invalid path at %d", cursor) + } + } + return builder.Build(), nil +} + +func parsePathRecursive(b *PathBuilder, buf []rune, cursor int) (*PathBuilder, []rune, int, error) { + length := len(buf) + cursor += 2 // skip .. characters + start := cursor + for ; cursor < length; cursor++ { + c := buf[cursor] + switch c { + case '$': + return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "specified '$' after '..' character") + case '*': + return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "specified '*' after '..' character") + case '.', '[': + goto end + case ']': + return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "specified ']' after '..' character") + } + } +end: + if start == cursor { + return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "not found recursive selector") + } + return b.Recursive(string(buf[start:cursor])), buf, cursor, nil +} + +func parsePathDot(b *PathBuilder, buf []rune, cursor int) (*PathBuilder, []rune, int, error) { + length := len(buf) + if cursor+1 < length && buf[cursor+1] == '.' { + b, buf, c, err := parsePathRecursive(b, buf, cursor) + if err != nil { + return nil, nil, 0, errors.Wrapf(err, "failed to parse path of recursive") + } + return b, buf, c, nil + } + cursor++ // skip . character + start := cursor + + // if started single quote, looking for end single quote char + if cursor < length && buf[cursor] == '\'' { + return parseQuotedKey(b, buf, cursor) + } + for ; cursor < length; cursor++ { + c := buf[cursor] + switch c { + case '$': + return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "specified '$' after '.' character") + case '*': + return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "specified '*' after '.' character") + case '.', '[': + goto end + case ']': + return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "specified ']' after '.' character") + } + } +end: + if start == cursor { + return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "cloud not find by empty key") + } + return b.child(string(buf[start:cursor])), buf, cursor, nil +} + +func parseQuotedKey(b *PathBuilder, buf []rune, cursor int) (*PathBuilder, []rune, int, error) { + cursor++ // skip single quote + start := cursor + length := len(buf) + var foundEndDelim bool + for ; cursor < length; cursor++ { + switch buf[cursor] { + case '\\': + buf = append(append([]rune{}, buf[:cursor]...), buf[cursor+1:]...) + length = len(buf) + case '\'': + foundEndDelim = true + goto end + } + } +end: + if !foundEndDelim { + return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "could not find end delimiter for key") + } + if start == cursor { + return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "could not find by empty key") + } + selector := buf[start:cursor] + cursor++ + if cursor < length { + switch buf[cursor] { + case '$': + return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "specified '$' after '.' character") + case '*': + return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "specified '*' after '.' character") + case ']': + return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "specified ']' after '.' character") + } + } + return b.child(string(selector)), buf, cursor, nil +} + +func parsePathIndex(b *PathBuilder, buf []rune, cursor int) (*PathBuilder, []rune, int, error) { + length := len(buf) + cursor++ // skip '[' character + if length <= cursor { + return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "unexpected end of YAML Path") + } + c := buf[cursor] + switch c { + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '*': + start := cursor + cursor++ + for ; cursor < length; cursor++ { + c := buf[cursor] + switch c { + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + continue + } + break + } + if buf[cursor] != ']' { + return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "invalid character %s at %d", string(buf[cursor]), cursor) + } + numOrAll := string(buf[start:cursor]) + if numOrAll == "*" { + return b.IndexAll(), buf, cursor + 1, nil + } + num, err := strconv.ParseInt(numOrAll, 10, 64) + if err != nil { + return nil, nil, 0, errors.Wrapf(err, "failed to parse number") + } + return b.Index(uint(num)), buf, cursor + 1, nil + } + return nil, nil, 0, errors.Wrapf(ErrInvalidPathString, "invalid character %s at %d", c, cursor) +} + +// Path represent YAMLPath ( like a JSONPath ). +type Path struct { + node pathNode +} + +// String path to text. +func (p *Path) String() string { + return p.node.String() +} + +// Read decode from r and set extracted value by YAMLPath to v. +func (p *Path) Read(r io.Reader, v interface{}) error { + node, err := p.ReadNode(r) + if err != nil { + return errors.Wrapf(err, "failed to read node") + } + if err := Unmarshal([]byte(node.String()), v); err != nil { + return errors.Wrapf(err, "failed to unmarshal") + } + return nil +} + +// ReadNode create AST from r and extract node by YAMLPath. +func (p *Path) ReadNode(r io.Reader) (ast.Node, error) { + if p.node == nil { + return nil, ErrInvalidPath + } + var buf bytes.Buffer + if _, err := io.Copy(&buf, r); err != nil { + return nil, errors.Wrapf(err, "failed to copy from reader") + } + f, err := parser.ParseBytes(buf.Bytes(), 0) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse yaml") + } + node, err := p.FilterFile(f) + if err != nil { + return nil, errors.Wrapf(err, "failed to filter from ast.File") + } + return node, nil +} + +// Filter filter from target by YAMLPath and set it to v. +func (p *Path) Filter(target, v interface{}) error { + b, err := Marshal(target) + if err != nil { + return errors.Wrapf(err, "failed to marshal target value") + } + if err := p.Read(bytes.NewBuffer(b), v); err != nil { + return errors.Wrapf(err, "failed to read") + } + return nil +} + +// FilterFile filter from ast.File by YAMLPath. +func (p *Path) FilterFile(f *ast.File) (ast.Node, error) { + for _, doc := range f.Docs { + node, err := p.FilterNode(doc.Body) + if err != nil { + return nil, errors.Wrapf(err, "failed to filter node by path ( %s )", p.node) + } + if node != nil { + return node, nil + } + } + return nil, errors.Wrapf(ErrNotFoundNode, "failed to find path ( %s )", p.node) +} + +// FilterNode filter from node by YAMLPath. +func (p *Path) FilterNode(node ast.Node) (ast.Node, error) { + n, err := p.node.filter(node) + if err != nil { + return nil, errors.Wrapf(err, "failed to filter node by path ( %s )", p.node) + } + return n, nil +} + +// MergeFromReader merge YAML text into ast.File. +func (p *Path) MergeFromReader(dst *ast.File, src io.Reader) error { + var buf bytes.Buffer + if _, err := io.Copy(&buf, src); err != nil { + return errors.Wrapf(err, "failed to copy from reader") + } + file, err := parser.ParseBytes(buf.Bytes(), 0) + if err != nil { + return errors.Wrapf(err, "failed to parse") + } + if err := p.MergeFromFile(dst, file); err != nil { + return errors.Wrapf(err, "failed to merge file") + } + return nil +} + +// MergeFromFile merge ast.File into ast.File. +func (p *Path) MergeFromFile(dst *ast.File, src *ast.File) error { + base, err := p.FilterFile(dst) + if err != nil { + return errors.Wrapf(err, "failed to filter file") + } + for _, doc := range src.Docs { + if err := ast.Merge(base, doc); err != nil { + return errors.Wrapf(err, "failed to merge") + } + } + return nil +} + +// MergeFromNode merge ast.Node into ast.File. +func (p *Path) MergeFromNode(dst *ast.File, src ast.Node) error { + base, err := p.FilterFile(dst) + if err != nil { + return errors.Wrapf(err, "failed to filter file") + } + if err := ast.Merge(base, src); err != nil { + return errors.Wrapf(err, "failed to merge") + } + return nil +} + +// ReplaceWithReader replace ast.File with io.Reader. +func (p *Path) ReplaceWithReader(dst *ast.File, src io.Reader) error { + var buf bytes.Buffer + if _, err := io.Copy(&buf, src); err != nil { + return errors.Wrapf(err, "failed to copy from reader") + } + file, err := parser.ParseBytes(buf.Bytes(), 0) + if err != nil { + return errors.Wrapf(err, "failed to parse") + } + if err := p.ReplaceWithFile(dst, file); err != nil { + return errors.Wrapf(err, "failed to replace file") + } + return nil +} + +// ReplaceWithFile replace ast.File with ast.File. +func (p *Path) ReplaceWithFile(dst *ast.File, src *ast.File) error { + for _, doc := range src.Docs { + if err := p.ReplaceWithNode(dst, doc); err != nil { + return errors.Wrapf(err, "failed to replace file by path ( %s )", p.node) + } + } + return nil +} + +// ReplaceNode replace ast.File with ast.Node. +func (p *Path) ReplaceWithNode(dst *ast.File, node ast.Node) error { + for _, doc := range dst.Docs { + if node.Type() == ast.DocumentType { + node = node.(*ast.DocumentNode).Body + } + if err := p.node.replace(doc.Body, node); err != nil { + return errors.Wrapf(err, "failed to replace node by path ( %s )", p.node) + } + } + return nil +} + +// AnnotateSource add annotation to passed source ( see section 5.1 in README.md ). +func (p *Path) AnnotateSource(source []byte, colored bool) ([]byte, error) { + file, err := parser.ParseBytes([]byte(source), 0) + if err != nil { + return nil, err + } + node, err := p.FilterFile(file) + if err != nil { + return nil, err + } + var pp printer.Printer + return []byte(pp.PrintErrorToken(node.GetToken(), colored)), nil +} + +// PathBuilder represent builder for YAMLPath. +type PathBuilder struct { + root *rootNode + node pathNode +} + +// Root add '$' to current path. +func (b *PathBuilder) Root() *PathBuilder { + root := newRootNode() + return &PathBuilder{root: root, node: root} +} + +// IndexAll add '[*]' to current path. +func (b *PathBuilder) IndexAll() *PathBuilder { + b.node = b.node.chain(newIndexAllNode()) + return b +} + +// Recursive add '..selector' to current path. +func (b *PathBuilder) Recursive(selector string) *PathBuilder { + b.node = b.node.chain(newRecursiveNode(selector)) + return b +} + +func (b *PathBuilder) containsReservedPathCharacters(path string) bool { + if strings.Contains(path, ".") { + return true + } + if strings.Contains(path, "*") { + return true + } + return false +} + +func (b *PathBuilder) enclosedSingleQuote(name string) bool { + return strings.HasPrefix(name, "'") && strings.HasSuffix(name, "'") +} + +func (b *PathBuilder) normalizeSelectorName(name string) string { + if b.enclosedSingleQuote(name) { + // already escaped name + return name + } + if b.containsReservedPathCharacters(name) { + escapedName := strings.ReplaceAll(name, `'`, `\'`) + return "'" + escapedName + "'" + } + return name +} + +func (b *PathBuilder) child(name string) *PathBuilder { + b.node = b.node.chain(newSelectorNode(name)) + return b +} + +// Child add '.name' to current path. +func (b *PathBuilder) Child(name string) *PathBuilder { + return b.child(b.normalizeSelectorName(name)) +} + +// Index add '[idx]' to current path. +func (b *PathBuilder) Index(idx uint) *PathBuilder { + b.node = b.node.chain(newIndexNode(idx)) + return b +} + +// Build build YAMLPath. +func (b *PathBuilder) Build() *Path { + return &Path{node: b.root} +} + +type pathNode interface { + fmt.Stringer + chain(pathNode) pathNode + filter(ast.Node) (ast.Node, error) + replace(ast.Node, ast.Node) error +} + +type basePathNode struct { + child pathNode +} + +func (n *basePathNode) chain(node pathNode) pathNode { + n.child = node + return node +} + +type rootNode struct { + *basePathNode +} + +func newRootNode() *rootNode { + return &rootNode{basePathNode: &basePathNode{}} +} + +func (n *rootNode) String() string { + s := "$" + if n.child != nil { + s += n.child.String() + } + return s +} + +func (n *rootNode) filter(node ast.Node) (ast.Node, error) { + if n.child == nil { + return node, nil + } + filtered, err := n.child.filter(node) + if err != nil { + return nil, errors.Wrapf(err, "failed to filter") + } + return filtered, nil +} + +func (n *rootNode) replace(node ast.Node, target ast.Node) error { + if n.child == nil { + return nil + } + if err := n.child.replace(node, target); err != nil { + return errors.Wrapf(err, "failed to replace") + } + return nil +} + +type selectorNode struct { + *basePathNode + selector string +} + +func newSelectorNode(selector string) *selectorNode { + return &selectorNode{ + basePathNode: &basePathNode{}, + selector: selector, + } +} + +func (n *selectorNode) filter(node ast.Node) (ast.Node, error) { + selector := n.selector + if len(selector) > 1 && selector[0] == '\'' && selector[len(selector)-1] == '\'' { + selector = selector[1 : len(selector)-1] + } + switch node.Type() { + case ast.MappingType: + for _, value := range node.(*ast.MappingNode).Values { + key := value.Key.GetToken().Value + if len(key) > 0 { + switch key[0] { + case '"': + var err error + key, err = strconv.Unquote(key) + if err != nil { + return nil, errors.Wrapf(err, "failed to unquote") + } + case '\'': + if len(key) > 1 && key[len(key)-1] == '\'' { + key = key[1 : len(key)-1] + } + } + } + if key == selector { + if n.child == nil { + return value.Value, nil + } + filtered, err := n.child.filter(value.Value) + if err != nil { + return nil, errors.Wrapf(err, "failed to filter") + } + return filtered, nil + } + } + case ast.MappingValueType: + value := node.(*ast.MappingValueNode) + key := value.Key.GetToken().Value + if key == selector { + if n.child == nil { + return value.Value, nil + } + filtered, err := n.child.filter(value.Value) + if err != nil { + return nil, errors.Wrapf(err, "failed to filter") + } + return filtered, nil + } + default: + return nil, errors.Wrapf(ErrInvalidQuery, "expected node type is map or map value. but got %s", node.Type()) + } + return nil, nil +} + +func (n *selectorNode) replaceMapValue(value *ast.MappingValueNode, target ast.Node) error { + key := value.Key.GetToken().Value + if key != n.selector { + return nil + } + if n.child == nil { + if err := value.Replace(target); err != nil { + return errors.Wrapf(err, "failed to replace") + } + } else { + if err := n.child.replace(value.Value, target); err != nil { + return errors.Wrapf(err, "failed to replace") + } + } + return nil +} + +func (n *selectorNode) replace(node ast.Node, target ast.Node) error { + switch node.Type() { + case ast.MappingType: + for _, value := range node.(*ast.MappingNode).Values { + if err := n.replaceMapValue(value, target); err != nil { + return errors.Wrapf(err, "failed to replace map value") + } + } + case ast.MappingValueType: + value := node.(*ast.MappingValueNode) + if err := n.replaceMapValue(value, target); err != nil { + return errors.Wrapf(err, "failed to replace map value") + } + default: + return errors.Wrapf(ErrInvalidQuery, "expected node type is map or map value. but got %s", node.Type()) + } + return nil +} + +func (n *selectorNode) String() string { + var builder PathBuilder + selector := builder.normalizeSelectorName(n.selector) + s := fmt.Sprintf(".%s", selector) + if n.child != nil { + s += n.child.String() + } + return s +} + +type indexNode struct { + *basePathNode + selector uint +} + +func newIndexNode(selector uint) *indexNode { + return &indexNode{ + basePathNode: &basePathNode{}, + selector: selector, + } +} + +func (n *indexNode) filter(node ast.Node) (ast.Node, error) { + if node.Type() != ast.SequenceType { + return nil, errors.Wrapf(ErrInvalidQuery, "expected sequence type node. but got %s", node.Type()) + } + sequence := node.(*ast.SequenceNode) + if n.selector >= uint(len(sequence.Values)) { + return nil, errors.Wrapf(ErrInvalidQuery, "expected index is %d. but got sequences has %d items", n.selector, sequence.Values) + } + value := sequence.Values[n.selector] + if n.child == nil { + return value, nil + } + filtered, err := n.child.filter(value) + if err != nil { + return nil, errors.Wrapf(err, "failed to filter") + } + return filtered, nil +} + +func (n *indexNode) replace(node ast.Node, target ast.Node) error { + if node.Type() != ast.SequenceType { + return errors.Wrapf(ErrInvalidQuery, "expected sequence type node. but got %s", node.Type()) + } + sequence := node.(*ast.SequenceNode) + if n.selector >= uint(len(sequence.Values)) { + return errors.Wrapf(ErrInvalidQuery, "expected index is %d. but got sequences has %d items", n.selector, sequence.Values) + } + if n.child == nil { + if err := sequence.Replace(int(n.selector), target); err != nil { + return errors.Wrapf(err, "failed to replace") + } + return nil + } + if err := n.child.replace(sequence.Values[n.selector], target); err != nil { + return errors.Wrapf(err, "failed to replace") + } + return nil +} + +func (n *indexNode) String() string { + s := fmt.Sprintf("[%d]", n.selector) + if n.child != nil { + s += n.child.String() + } + return s +} + +type indexAllNode struct { + *basePathNode +} + +func newIndexAllNode() *indexAllNode { + return &indexAllNode{ + basePathNode: &basePathNode{}, + } +} + +func (n *indexAllNode) String() string { + s := "[*]" + if n.child != nil { + s += n.child.String() + } + return s +} + +func (n *indexAllNode) filter(node ast.Node) (ast.Node, error) { + if node.Type() != ast.SequenceType { + return nil, errors.Wrapf(ErrInvalidQuery, "expected sequence type node. but got %s", node.Type()) + } + sequence := node.(*ast.SequenceNode) + if n.child == nil { + return sequence, nil + } + out := *sequence + out.Values = []ast.Node{} + for _, value := range sequence.Values { + filtered, err := n.child.filter(value) + if err != nil { + return nil, errors.Wrapf(err, "failed to filter") + } + out.Values = append(out.Values, filtered) + } + return &out, nil +} + +func (n *indexAllNode) replace(node ast.Node, target ast.Node) error { + if node.Type() != ast.SequenceType { + return errors.Wrapf(ErrInvalidQuery, "expected sequence type node. but got %s", node.Type()) + } + sequence := node.(*ast.SequenceNode) + if n.child == nil { + for idx := range sequence.Values { + if err := sequence.Replace(idx, target); err != nil { + return errors.Wrapf(err, "failed to replace") + } + } + return nil + } + for _, value := range sequence.Values { + if err := n.child.replace(value, target); err != nil { + return errors.Wrapf(err, "failed to replace") + } + } + return nil +} + +type recursiveNode struct { + *basePathNode + selector string +} + +func newRecursiveNode(selector string) *recursiveNode { + return &recursiveNode{ + basePathNode: &basePathNode{}, + selector: selector, + } +} + +func (n *recursiveNode) String() string { + s := fmt.Sprintf("..%s", n.selector) + if n.child != nil { + s += n.child.String() + } + return s +} + +func (n *recursiveNode) filterNode(node ast.Node) (*ast.SequenceNode, error) { + sequence := &ast.SequenceNode{BaseNode: &ast.BaseNode{}} + switch typedNode := node.(type) { + case *ast.MappingNode: + for _, value := range typedNode.Values { + seq, err := n.filterNode(value) + if err != nil { + return nil, errors.Wrapf(err, "failed to filter") + } + sequence.Values = append(sequence.Values, seq.Values...) + } + case *ast.MappingValueNode: + key := typedNode.Key.GetToken().Value + if n.selector == key { + sequence.Values = append(sequence.Values, typedNode.Value) + } + seq, err := n.filterNode(typedNode.Value) + if err != nil { + return nil, errors.Wrapf(err, "failed to filter") + } + sequence.Values = append(sequence.Values, seq.Values...) + case *ast.SequenceNode: + for _, value := range typedNode.Values { + seq, err := n.filterNode(value) + if err != nil { + return nil, errors.Wrapf(err, "failed to filter") + } + sequence.Values = append(sequence.Values, seq.Values...) + } + } + return sequence, nil +} + +func (n *recursiveNode) filter(node ast.Node) (ast.Node, error) { + sequence, err := n.filterNode(node) + if err != nil { + return nil, errors.Wrapf(err, "failed to filter") + } + sequence.Start = node.GetToken() + return sequence, nil +} + +func (n *recursiveNode) replaceNode(node ast.Node, target ast.Node) error { + switch typedNode := node.(type) { + case *ast.MappingNode: + for _, value := range typedNode.Values { + if err := n.replaceNode(value, target); err != nil { + return errors.Wrapf(err, "failed to replace") + } + } + case *ast.MappingValueNode: + key := typedNode.Key.GetToken().Value + if n.selector == key { + if err := typedNode.Replace(target); err != nil { + return errors.Wrapf(err, "failed to replace") + } + } + if err := n.replaceNode(typedNode.Value, target); err != nil { + return errors.Wrapf(err, "failed to replace") + } + case *ast.SequenceNode: + for _, value := range typedNode.Values { + if err := n.replaceNode(value, target); err != nil { + return errors.Wrapf(err, "failed to replace") + } + } + } + return nil +} + +func (n *recursiveNode) replace(node ast.Node, target ast.Node) error { + if err := n.replaceNode(node, target); err != nil { + return errors.Wrapf(err, "failed to replace") + } + return nil +} diff --git a/vendor/github.com/goccy/go-yaml/printer/printer.go b/vendor/github.com/goccy/go-yaml/printer/printer.go new file mode 100644 index 00000000..d5e25dc9 --- /dev/null +++ b/vendor/github.com/goccy/go-yaml/printer/printer.go @@ -0,0 +1,352 @@ +package printer + +import ( + "fmt" + "math" + "strings" + + "github.com/fatih/color" + "github.com/goccy/go-yaml/ast" + "github.com/goccy/go-yaml/token" +) + +// Property additional property set for each the token +type Property struct { + Prefix string + Suffix string +} + +// PrintFunc returns property instance +type PrintFunc func() *Property + +// Printer create text from token collection or ast +type Printer struct { + LineNumber bool + LineNumberFormat func(num int) string + MapKey PrintFunc + Anchor PrintFunc + Alias PrintFunc + Bool PrintFunc + String PrintFunc + Number PrintFunc +} + +func defaultLineNumberFormat(num int) string { + return fmt.Sprintf("%2d | ", num) +} + +func (p *Printer) property(tk *token.Token) *Property { + prop := &Property{} + switch tk.PreviousType() { + case token.AnchorType: + if p.Anchor != nil { + return p.Anchor() + } + return prop + case token.AliasType: + if p.Alias != nil { + return p.Alias() + } + return prop + } + switch tk.NextType() { + case token.MappingValueType: + if p.MapKey != nil { + return p.MapKey() + } + return prop + } + switch tk.Type { + case token.BoolType: + if p.Bool != nil { + return p.Bool() + } + return prop + case token.AnchorType: + if p.Anchor != nil { + return p.Anchor() + } + return prop + case token.AliasType: + if p.Anchor != nil { + return p.Alias() + } + return prop + case token.StringType, token.SingleQuoteType, token.DoubleQuoteType: + if p.String != nil { + return p.String() + } + return prop + case token.IntegerType, token.FloatType: + if p.Number != nil { + return p.Number() + } + return prop + default: + } + return prop +} + +// PrintTokens create text from token collection +func (p *Printer) PrintTokens(tokens token.Tokens) string { + if len(tokens) == 0 { + return "" + } + if p.LineNumber { + if p.LineNumberFormat == nil { + p.LineNumberFormat = defaultLineNumberFormat + } + } + texts := []string{} + lineNumber := tokens[0].Position.Line + for _, tk := range tokens { + lines := strings.Split(tk.Origin, "\n") + prop := p.property(tk) + header := "" + if p.LineNumber { + header = p.LineNumberFormat(lineNumber) + } + if len(lines) == 1 { + line := prop.Prefix + lines[0] + prop.Suffix + if len(texts) == 0 { + texts = append(texts, header+line) + lineNumber++ + } else { + text := texts[len(texts)-1] + texts[len(texts)-1] = text + line + } + } else { + for idx, src := range lines { + if p.LineNumber { + header = p.LineNumberFormat(lineNumber) + } + line := prop.Prefix + src + prop.Suffix + if idx == 0 { + if len(texts) == 0 { + texts = append(texts, header+line) + lineNumber++ + } else { + text := texts[len(texts)-1] + texts[len(texts)-1] = text + line + } + } else { + texts = append(texts, fmt.Sprintf("%s%s", header, line)) + lineNumber++ + } + } + } + } + return strings.Join(texts, "\n") +} + +// PrintNode create text from ast.Node +func (p *Printer) PrintNode(node ast.Node) []byte { + return []byte(fmt.Sprintf("%+v\n", node)) +} + +const escape = "\x1b" + +func format(attr color.Attribute) string { + return fmt.Sprintf("%s[%dm", escape, attr) +} + +func (p *Printer) setDefaultColorSet() { + p.Bool = func() *Property { + return &Property{ + Prefix: format(color.FgHiMagenta), + Suffix: format(color.Reset), + } + } + p.Number = func() *Property { + return &Property{ + Prefix: format(color.FgHiMagenta), + Suffix: format(color.Reset), + } + } + p.MapKey = func() *Property { + return &Property{ + Prefix: format(color.FgHiCyan), + Suffix: format(color.Reset), + } + } + p.Anchor = func() *Property { + return &Property{ + Prefix: format(color.FgHiYellow), + Suffix: format(color.Reset), + } + } + p.Alias = func() *Property { + return &Property{ + Prefix: format(color.FgHiYellow), + Suffix: format(color.Reset), + } + } + p.String = func() *Property { + return &Property{ + Prefix: format(color.FgHiGreen), + Suffix: format(color.Reset), + } + } +} + +func (p *Printer) PrintErrorMessage(msg string, isColored bool) string { + if isColored { + return fmt.Sprintf("%s%s%s", + format(color.FgHiRed), + msg, + format(color.Reset), + ) + } + return msg +} + +func (p *Printer) removeLeftSideNewLineChar(src string) string { + return strings.TrimLeft(strings.TrimLeft(strings.TrimLeft(src, "\r"), "\n"), "\r\n") +} + +func (p *Printer) removeRightSideNewLineChar(src string) string { + return strings.TrimRight(strings.TrimRight(strings.TrimRight(src, "\r"), "\n"), "\r\n") +} + +func (p *Printer) removeRightSideWhiteSpaceChar(src string) string { + return p.removeRightSideNewLineChar(strings.TrimRight(src, " ")) +} + +func (p *Printer) newLineCount(s string) int { + src := []rune(s) + size := len(src) + cnt := 0 + for i := 0; i < size; i++ { + c := src[i] + switch c { + case '\r': + if i+1 < size && src[i+1] == '\n' { + i++ + } + cnt++ + case '\n': + cnt++ + } + } + return cnt +} + +func (p *Printer) isNewLineLastChar(s string) bool { + for i := len(s) - 1; i > 0; i-- { + c := s[i] + switch c { + case ' ': + continue + case '\n', '\r': + return true + } + break + } + return false +} + +func (p *Printer) printBeforeTokens(tk *token.Token, minLine, extLine int) token.Tokens { + for { + if tk.Prev == nil { + break + } + if tk.Prev.Position.Line < minLine { + break + } + tk = tk.Prev + } + minTk := tk.Clone() + if minTk.Prev != nil { + // add white spaces to minTk by prev token + prev := minTk.Prev + whiteSpaceLen := len(prev.Origin) - len(strings.TrimRight(prev.Origin, " ")) + minTk.Origin = strings.Repeat(" ", whiteSpaceLen) + minTk.Origin + } + minTk.Origin = p.removeLeftSideNewLineChar(minTk.Origin) + tokens := token.Tokens{minTk} + tk = minTk.Next + for tk != nil && tk.Position.Line <= extLine { + clonedTk := tk.Clone() + tokens.Add(clonedTk) + tk = clonedTk.Next + } + lastTk := tokens[len(tokens)-1] + trimmedOrigin := p.removeRightSideWhiteSpaceChar(lastTk.Origin) + suffix := lastTk.Origin[len(trimmedOrigin):] + lastTk.Origin = trimmedOrigin + + if lastTk.Next != nil && len(suffix) > 1 { + next := lastTk.Next.Clone() + // add suffix to header of next token + if suffix[0] == '\n' || suffix[0] == '\r' { + suffix = suffix[1:] + } + next.Origin = suffix + next.Origin + lastTk.Next = next + } + return tokens +} + +func (p *Printer) printAfterTokens(tk *token.Token, maxLine int) token.Tokens { + tokens := token.Tokens{} + if tk == nil { + return tokens + } + if tk.Position.Line > maxLine { + return tokens + } + minTk := tk.Clone() + minTk.Origin = p.removeLeftSideNewLineChar(minTk.Origin) + tokens.Add(minTk) + tk = minTk.Next + for tk != nil && tk.Position.Line <= maxLine { + clonedTk := tk.Clone() + tokens.Add(clonedTk) + tk = clonedTk.Next + } + return tokens +} + +func (p *Printer) setupErrorTokenFormat(annotateLine int, isColored bool) { + prefix := func(annotateLine, num int) string { + if annotateLine == num { + return fmt.Sprintf("> %2d | ", num) + } + return fmt.Sprintf(" %2d | ", num) + } + p.LineNumber = true + p.LineNumberFormat = func(num int) string { + if isColored { + fn := color.New(color.Bold, color.FgHiWhite).SprintFunc() + return fn(prefix(annotateLine, num)) + } + return prefix(annotateLine, num) + } + if isColored { + p.setDefaultColorSet() + } +} + +func (p *Printer) PrintErrorToken(tk *token.Token, isColored bool) string { + errToken := tk + curLine := tk.Position.Line + curExtLine := curLine + p.newLineCount(p.removeLeftSideNewLineChar(tk.Origin)) + if p.isNewLineLastChar(tk.Origin) { + // if last character ( exclude white space ) is new line character, ignore it. + curExtLine-- + } + + minLine := int(math.Max(float64(curLine-3), 1)) + maxLine := curExtLine + 3 + p.setupErrorTokenFormat(curLine, isColored) + + beforeTokens := p.printBeforeTokens(tk, minLine, curExtLine) + lastTk := beforeTokens[len(beforeTokens)-1] + afterTokens := p.printAfterTokens(lastTk.Next, maxLine) + + beforeSource := p.PrintTokens(beforeTokens) + prefixSpaceNum := len(fmt.Sprintf(" %2d | ", curLine)) + annotateLine := strings.Repeat(" ", prefixSpaceNum+errToken.Position.Column-1) + "^" + afterSource := p.PrintTokens(afterTokens) + return fmt.Sprintf("%s\n%s\n%s", beforeSource, annotateLine, afterSource) +} diff --git a/vendor/github.com/goccy/go-yaml/scanner/context.go b/vendor/github.com/goccy/go-yaml/scanner/context.go new file mode 100644 index 00000000..3aaec561 --- /dev/null +++ b/vendor/github.com/goccy/go-yaml/scanner/context.go @@ -0,0 +1,236 @@ +package scanner + +import ( + "sync" + + "github.com/goccy/go-yaml/token" +) + +const whitespace = ' ' + +// Context context at scanning +type Context struct { + idx int + size int + notSpaceCharPos int + notSpaceOrgCharPos int + src []rune + buf []rune + obuf []rune + tokens token.Tokens + isRawFolded bool + isLiteral bool + isFolded bool + isSingleLine bool + literalOpt string +} + +var ( + ctxPool = sync.Pool{ + New: func() interface{} { + return createContext() + }, + } +) + +func createContext() *Context { + return &Context{ + idx: 0, + tokens: token.Tokens{}, + isSingleLine: true, + } +} + +func newContext(src []rune) *Context { + ctx := ctxPool.Get().(*Context) + ctx.reset(src) + return ctx +} + +func (c *Context) release() { + ctxPool.Put(c) +} + +func (c *Context) reset(src []rune) { + c.idx = 0 + c.size = len(src) + c.src = src + c.tokens = c.tokens[:0] + c.resetBuffer() + c.isRawFolded = false + c.isSingleLine = true + c.isLiteral = false + c.isFolded = false + c.literalOpt = "" +} + +func (c *Context) resetBuffer() { + c.buf = c.buf[:0] + c.obuf = c.obuf[:0] + c.notSpaceCharPos = 0 + c.notSpaceOrgCharPos = 0 +} + +func (c *Context) isSaveIndentMode() bool { + return c.isLiteral || c.isFolded || c.isRawFolded +} + +func (c *Context) breakLiteral() { + c.isLiteral = false + c.isRawFolded = false + c.isFolded = false + c.literalOpt = "" +} + +func (c *Context) addToken(tk *token.Token) { + if tk == nil { + return + } + c.tokens = append(c.tokens, tk) +} + +func (c *Context) addBuf(r rune) { + if len(c.buf) == 0 && r == ' ' { + return + } + c.buf = append(c.buf, r) + if r != ' ' && r != '\t' { + c.notSpaceCharPos = len(c.buf) + } +} + +func (c *Context) addOriginBuf(r rune) { + c.obuf = append(c.obuf, r) + if r != ' ' && r != '\t' { + c.notSpaceOrgCharPos = len(c.obuf) + } +} + +func (c *Context) removeRightSpaceFromBuf() int { + trimmedBuf := c.obuf[:c.notSpaceOrgCharPos] + buflen := len(trimmedBuf) + diff := len(c.obuf) - buflen + if diff > 0 { + c.obuf = c.obuf[:buflen] + c.buf = c.bufferedSrc() + } + return diff +} + +func (c *Context) isDocument() bool { + return c.isLiteral || c.isFolded || c.isRawFolded +} + +func (c *Context) isEOS() bool { + return len(c.src)-1 <= c.idx +} + +func (c *Context) isNextEOS() bool { + return len(c.src)-1 <= c.idx+1 +} + +func (c *Context) next() bool { + return c.idx < c.size +} + +func (c *Context) source(s, e int) string { + return string(c.src[s:e]) +} + +func (c *Context) previousChar() rune { + if c.idx > 0 { + return c.src[c.idx-1] + } + return rune(0) +} + +func (c *Context) currentChar() rune { + if c.size > c.idx { + return c.src[c.idx] + } + return rune(0) +} + +func (c *Context) currentCharWithSkipWhitespace() rune { + idx := c.idx + for c.size > idx { + ch := c.src[idx] + if ch != whitespace { + return ch + } + idx++ + } + return rune(0) +} + +func (c *Context) nextChar() rune { + if c.size > c.idx+1 { + return c.src[c.idx+1] + } + return rune(0) +} + +func (c *Context) repeatNum(r rune) int { + cnt := 0 + for i := c.idx; i < c.size; i++ { + if c.src[i] == r { + cnt++ + } else { + break + } + } + return cnt +} + +func (c *Context) progress(num int) { + c.idx += num +} + +func (c *Context) nextPos() int { + return c.idx + 1 +} + +func (c *Context) existsBuffer() bool { + return len(c.bufferedSrc()) != 0 +} + +func (c *Context) bufferedSrc() []rune { + src := c.buf[:c.notSpaceCharPos] + if c.isDocument() && c.literalOpt == "-" { + // remove end '\n' character and trailing empty lines + // https://yaml.org/spec/1.2.2/#8112-block-chomping-indicator + for { + if len(src) > 0 && src[len(src)-1] == '\n' { + src = src[:len(src)-1] + continue + } + break + } + } + return src +} + +func (c *Context) bufferedToken(pos *token.Position) *token.Token { + if c.idx == 0 { + return nil + } + source := c.bufferedSrc() + if len(source) == 0 { + return nil + } + var tk *token.Token + if c.isDocument() { + tk = token.String(string(source), string(c.obuf), pos) + } else { + tk = token.New(string(source), string(c.obuf), pos) + } + c.resetBuffer() + return tk +} + +func (c *Context) lastToken() *token.Token { + if len(c.tokens) != 0 { + return c.tokens[len(c.tokens)-1] + } + return nil +} diff --git a/vendor/github.com/goccy/go-yaml/scanner/scanner.go b/vendor/github.com/goccy/go-yaml/scanner/scanner.go new file mode 100644 index 00000000..77acb418 --- /dev/null +++ b/vendor/github.com/goccy/go-yaml/scanner/scanner.go @@ -0,0 +1,903 @@ +package scanner + +import ( + "io" + "strings" + + "golang.org/x/xerrors" + + "github.com/goccy/go-yaml/token" +) + +// IndentState state for indent +type IndentState int + +const ( + // IndentStateEqual equals previous indent + IndentStateEqual IndentState = iota + // IndentStateUp more indent than previous + IndentStateUp + // IndentStateDown less indent than previous + IndentStateDown + // IndentStateKeep uses not indent token + IndentStateKeep +) + +// Scanner holds the scanner's internal state while processing a given text. +// It can be allocated as part of another data structure but must be initialized via Init before use. +type Scanner struct { + source []rune + sourcePos int + sourceSize int + line int + column int + offset int + prevIndentLevel int + prevIndentNum int + prevIndentColumn int + docStartColumn int + indentLevel int + indentNum int + isFirstCharAtLine bool + isAnchor bool + startedFlowSequenceNum int + startedFlowMapNum int + indentState IndentState + savedPos *token.Position +} + +func (s *Scanner) pos() *token.Position { + return &token.Position{ + Line: s.line, + Column: s.column, + Offset: s.offset, + IndentNum: s.indentNum, + IndentLevel: s.indentLevel, + } +} + +func (s *Scanner) bufferedToken(ctx *Context) *token.Token { + if s.savedPos != nil { + tk := ctx.bufferedToken(s.savedPos) + s.savedPos = nil + return tk + } + line := s.line + column := s.column - len(ctx.buf) + level := s.indentLevel + if ctx.isSaveIndentMode() { + line -= s.newLineCount(ctx.buf) + column = strings.Index(string(ctx.obuf), string(ctx.buf)) + 1 + // Since we are in a literal, folded or raw folded + // we can use the indent level from the last token. + last := ctx.lastToken() + if last != nil { // The last token should never be nil here. + level = last.Position.IndentLevel + 1 + } + } + return ctx.bufferedToken(&token.Position{ + Line: line, + Column: column, + Offset: s.offset - len(ctx.buf), + IndentNum: s.indentNum, + IndentLevel: level, + }) +} + +func (s *Scanner) progressColumn(ctx *Context, num int) { + s.column += num + s.offset += num + ctx.progress(num) +} + +func (s *Scanner) progressLine(ctx *Context) { + s.column = 1 + s.line++ + s.offset++ + s.indentNum = 0 + s.isFirstCharAtLine = true + s.isAnchor = false + ctx.progress(1) +} + +func (s *Scanner) isNeededKeepPreviousIndentNum(ctx *Context, c rune) bool { + if !s.isChangedToIndentStateUp() { + return false + } + if ctx.isDocument() { + return true + } + if c == '-' && ctx.existsBuffer() { + return true + } + return false +} + +func (s *Scanner) isNewLineChar(c rune) bool { + if c == '\n' { + return true + } + if c == '\r' { + return true + } + return false +} + +func (s *Scanner) newLineCount(src []rune) int { + size := len(src) + cnt := 0 + for i := 0; i < size; i++ { + c := src[i] + switch c { + case '\r': + if i+1 < size && src[i+1] == '\n' { + i++ + } + cnt++ + case '\n': + cnt++ + } + } + return cnt +} + +func (s *Scanner) updateIndentState(ctx *Context) { + indentNumBasedIndentState := s.indentState + if s.prevIndentNum < s.indentNum { + s.indentLevel = s.prevIndentLevel + 1 + indentNumBasedIndentState = IndentStateUp + } else if s.prevIndentNum == s.indentNum { + s.indentLevel = s.prevIndentLevel + indentNumBasedIndentState = IndentStateEqual + } else { + indentNumBasedIndentState = IndentStateDown + if s.prevIndentLevel > 0 { + s.indentLevel = s.prevIndentLevel - 1 + } + } + + if s.prevIndentColumn > 0 { + if s.prevIndentColumn < s.column { + s.indentState = IndentStateUp + } else if s.prevIndentColumn != s.column || indentNumBasedIndentState != IndentStateEqual { + // The following case ( current position is 'd' ), some variables becomes like here + // - prevIndentColumn: 1 of 'a' + // - indentNumBasedIndentState: IndentStateDown because d's indentNum(1) is less than c's indentNum(3). + // Therefore, s.prevIndentColumn(1) == s.column(1) is true, but we want to treat this as IndentStateDown. + // So, we look also current indentState value by the above prevIndentNum based logic, and determins finally indentState. + // --- + // a: + // b + // c + // d: e + // ^ + s.indentState = IndentStateDown + } else { + s.indentState = IndentStateEqual + } + } else { + s.indentState = indentNumBasedIndentState + } +} + +func (s *Scanner) updateIndent(ctx *Context, c rune) { + if s.isFirstCharAtLine && s.isNewLineChar(c) && ctx.isDocument() { + return + } + if s.isFirstCharAtLine && c == ' ' { + s.indentNum++ + return + } + if !s.isFirstCharAtLine { + s.indentState = IndentStateKeep + return + } + s.updateIndentState(ctx) + s.isFirstCharAtLine = false + if s.isNeededKeepPreviousIndentNum(ctx, c) { + return + } + if s.indentState != IndentStateUp { + s.prevIndentColumn = 0 + } + s.prevIndentNum = s.indentNum + s.prevIndentLevel = s.indentLevel +} + +func (s *Scanner) isChangedToIndentStateDown() bool { + return s.indentState == IndentStateDown +} + +func (s *Scanner) isChangedToIndentStateUp() bool { + return s.indentState == IndentStateUp +} + +func (s *Scanner) isChangedToIndentStateEqual() bool { + return s.indentState == IndentStateEqual +} + +func (s *Scanner) addBufferedTokenIfExists(ctx *Context) { + ctx.addToken(s.bufferedToken(ctx)) +} + +func (s *Scanner) breakLiteral(ctx *Context) { + s.docStartColumn = 0 + ctx.breakLiteral() +} + +func (s *Scanner) scanSingleQuote(ctx *Context) (tk *token.Token, pos int) { + ctx.addOriginBuf('\'') + srcpos := s.pos() + startIndex := ctx.idx + 1 + src := ctx.src + size := len(src) + value := []rune{} + isFirstLineChar := false + isNewLine := false + for idx := startIndex; idx < size; idx++ { + if !isNewLine { + s.progressColumn(ctx, 1) + } else { + isNewLine = false + } + c := src[idx] + pos = idx + 1 + ctx.addOriginBuf(c) + if s.isNewLineChar(c) { + value = append(value, ' ') + isFirstLineChar = true + isNewLine = true + s.progressLine(ctx) + continue + } else if c == ' ' && isFirstLineChar { + continue + } else if c != '\'' { + value = append(value, c) + isFirstLineChar = false + continue + } + if idx+1 < len(ctx.src) && ctx.src[idx+1] == '\'' { + // '' handle as ' character + value = append(value, c) + ctx.addOriginBuf(c) + idx++ + continue + } + s.progressColumn(ctx, 1) + tk = token.SingleQuote(string(value), string(ctx.obuf), srcpos) + pos = idx - startIndex + 1 + return + } + return +} + +func hexToInt(b rune) int { + if b >= 'A' && b <= 'F' { + return int(b) - 'A' + 10 + } + if b >= 'a' && b <= 'f' { + return int(b) - 'a' + 10 + } + return int(b) - '0' +} + +func hexRunesToInt(b []rune) int { + sum := 0 + for i := 0; i < len(b); i++ { + sum += hexToInt(b[i]) << (uint(len(b)-i-1) * 4) + } + return sum +} + +func (s *Scanner) scanDoubleQuote(ctx *Context) (tk *token.Token, pos int) { + ctx.addOriginBuf('"') + srcpos := s.pos() + startIndex := ctx.idx + 1 + src := ctx.src + size := len(src) + value := []rune{} + isFirstLineChar := false + isNewLine := false + for idx := startIndex; idx < size; idx++ { + if !isNewLine { + s.progressColumn(ctx, 1) + } else { + isNewLine = false + } + c := src[idx] + pos = idx + 1 + ctx.addOriginBuf(c) + if s.isNewLineChar(c) { + value = append(value, ' ') + isFirstLineChar = true + isNewLine = true + s.progressLine(ctx) + continue + } else if c == ' ' && isFirstLineChar { + continue + } else if c == '\\' { + isFirstLineChar = false + if idx+1 >= size { + value = append(value, c) + continue + } + nextChar := src[idx+1] + progress := 0 + switch nextChar { + case 'b': + progress = 1 + ctx.addOriginBuf(nextChar) + value = append(value, '\b') + case 'e': + progress = 1 + ctx.addOriginBuf(nextChar) + value = append(value, '\x1B') + case 'f': + progress = 1 + ctx.addOriginBuf(nextChar) + value = append(value, '\f') + case 'n': + progress = 1 + ctx.addOriginBuf(nextChar) + value = append(value, '\n') + case 'r': + progress = 1 + ctx.addOriginBuf(nextChar) + value = append(value, '\r') + case 'v': + progress = 1 + ctx.addOriginBuf(nextChar) + value = append(value, '\v') + case 'L': // LS (#x2028) + progress = 1 + ctx.addOriginBuf(nextChar) + value = append(value, []rune{'\xE2', '\x80', '\xA8'}...) + case 'N': // NEL (#x85) + progress = 1 + ctx.addOriginBuf(nextChar) + value = append(value, []rune{'\xC2', '\x85'}...) + case 'P': // PS (#x2029) + progress = 1 + ctx.addOriginBuf(nextChar) + value = append(value, []rune{'\xE2', '\x80', '\xA9'}...) + case '_': // #xA0 + progress = 1 + ctx.addOriginBuf(nextChar) + value = append(value, []rune{'\xC2', '\xA0'}...) + case '"': + progress = 1 + ctx.addOriginBuf(nextChar) + value = append(value, nextChar) + case 'x': + progress = 3 + if idx+progress >= size { + // TODO: need to return error + //err = xerrors.New("invalid escape character \\x") + return + } + codeNum := hexRunesToInt(src[idx+2 : idx+progress+1]) + value = append(value, rune(codeNum)) + case 'u': + progress = 5 + if idx+progress >= size { + // TODO: need to return error + //err = xerrors.New("invalid escape character \\u") + return + } + codeNum := hexRunesToInt(src[idx+2 : idx+progress+1]) + value = append(value, rune(codeNum)) + case 'U': + progress = 9 + if idx+progress >= size { + // TODO: need to return error + //err = xerrors.New("invalid escape character \\U") + return + } + codeNum := hexRunesToInt(src[idx+2 : idx+progress+1]) + value = append(value, rune(codeNum)) + case '\\': + progress = 1 + ctx.addOriginBuf(nextChar) + value = append(value, c) + default: + value = append(value, c) + } + idx += progress + s.progressColumn(ctx, progress) + continue + } else if c != '"' { + value = append(value, c) + isFirstLineChar = false + continue + } + s.progressColumn(ctx, 1) + tk = token.DoubleQuote(string(value), string(ctx.obuf), srcpos) + pos = idx - startIndex + 1 + return + } + return +} + +func (s *Scanner) scanQuote(ctx *Context, ch rune) (tk *token.Token, pos int) { + if ch == '\'' { + return s.scanSingleQuote(ctx) + } + return s.scanDoubleQuote(ctx) +} + +func (s *Scanner) isMergeKey(ctx *Context) bool { + if ctx.repeatNum('<') != 2 { + return false + } + src := ctx.src + size := len(src) + for idx := ctx.idx + 2; idx < size; idx++ { + c := src[idx] + if c == ' ' { + continue + } + if c != ':' { + return false + } + if idx+1 < size { + nc := src[idx+1] + if nc == ' ' || s.isNewLineChar(nc) { + return true + } + } + } + return false +} + +func (s *Scanner) scanTag(ctx *Context) (tk *token.Token, pos int) { + ctx.addOriginBuf('!') + ctx.progress(1) // skip '!' character + for idx, c := range ctx.src[ctx.idx:] { + pos = idx + 1 + ctx.addOriginBuf(c) + switch c { + case ' ', '\n', '\r': + value := ctx.source(ctx.idx-1, ctx.idx+idx) + tk = token.Tag(value, string(ctx.obuf), s.pos()) + pos = len([]rune(value)) + return + } + } + return +} + +func (s *Scanner) scanComment(ctx *Context) (tk *token.Token, pos int) { + ctx.addOriginBuf('#') + ctx.progress(1) // skip '#' character + for idx, c := range ctx.src[ctx.idx:] { + pos = idx + 1 + ctx.addOriginBuf(c) + switch c { + case '\n', '\r': + if ctx.previousChar() == '\\' { + continue + } + value := ctx.source(ctx.idx, ctx.idx+idx) + tk = token.Comment(value, string(ctx.obuf), s.pos()) + pos = len([]rune(value)) + 1 + return + } + } + // document ends with comment. + value := string(ctx.src[ctx.idx:]) + tk = token.Comment(value, string(ctx.obuf), s.pos()) + pos = len([]rune(value)) + 1 + return +} + +func trimCommentFromLiteralOpt(text string) (string, error) { + idx := strings.Index(text, "#") + if idx < 0 { + return text, nil + } + if idx == 0 { + return "", xerrors.New("invalid literal header") + } + return text[:idx-1], nil +} + +func (s *Scanner) scanLiteral(ctx *Context, c rune) { + ctx.addOriginBuf(c) + if ctx.isEOS() { + if ctx.isLiteral { + ctx.addBuf(c) + } + value := ctx.bufferedSrc() + ctx.addToken(token.String(string(value), string(ctx.obuf), s.pos())) + ctx.resetBuffer() + s.progressColumn(ctx, 1) + } else if s.isNewLineChar(c) { + if ctx.isLiteral { + ctx.addBuf(c) + } else { + ctx.addBuf(' ') + } + s.progressLine(ctx) + } else if s.isFirstCharAtLine && c == ' ' { + if 0 < s.docStartColumn && s.docStartColumn <= s.column { + ctx.addBuf(c) + } + s.progressColumn(ctx, 1) + } else { + if s.docStartColumn == 0 { + s.docStartColumn = s.column + } + ctx.addBuf(c) + s.progressColumn(ctx, 1) + } +} + +func (s *Scanner) scanLiteralHeader(ctx *Context) (pos int, err error) { + header := ctx.currentChar() + ctx.addOriginBuf(header) + ctx.progress(1) // skip '|' or '>' character + for idx, c := range ctx.src[ctx.idx:] { + pos = idx + ctx.addOriginBuf(c) + switch c { + case '\n', '\r': + value := ctx.source(ctx.idx, ctx.idx+idx) + opt := strings.TrimRight(value, " ") + orgOptLen := len(opt) + opt, err = trimCommentFromLiteralOpt(opt) + if err != nil { + return + } + switch opt { + case "", "+", "-", + "0", "1", "2", "3", "4", "5", "6", "7", "8", "9": + hasComment := len(opt) < orgOptLen + if header == '|' { + if hasComment { + commentLen := orgOptLen - len(opt) + headerPos := strings.Index(string(ctx.obuf), "|") + litBuf := ctx.obuf[:len(ctx.obuf)-commentLen-headerPos] + commentBuf := ctx.obuf[len(litBuf):] + ctx.addToken(token.Literal("|"+opt, string(litBuf), s.pos())) + s.column += len(litBuf) + s.offset += len(litBuf) + commentHeader := strings.Index(value, "#") + ctx.addToken(token.Comment(string(value[commentHeader+1:]), string(commentBuf), s.pos())) + } else { + ctx.addToken(token.Literal("|"+opt, string(ctx.obuf), s.pos())) + } + ctx.isLiteral = true + } else if header == '>' { + if hasComment { + commentLen := orgOptLen - len(opt) + headerPos := strings.Index(string(ctx.obuf), ">") + foldedBuf := ctx.obuf[:len(ctx.obuf)-commentLen-headerPos] + commentBuf := ctx.obuf[len(foldedBuf):] + ctx.addToken(token.Folded(">"+opt, string(foldedBuf), s.pos())) + s.column += len(foldedBuf) + s.offset += len(foldedBuf) + commentHeader := strings.Index(value, "#") + ctx.addToken(token.Comment(string(value[commentHeader+1:]), string(commentBuf), s.pos())) + } else { + ctx.addToken(token.Folded(">"+opt, string(ctx.obuf), s.pos())) + } + ctx.isFolded = true + } + s.indentState = IndentStateKeep + ctx.resetBuffer() + ctx.literalOpt = opt + return + } + break + } + } + err = xerrors.New("invalid literal header") + return +} + +func (s *Scanner) scanNewLine(ctx *Context, c rune) { + if len(ctx.buf) > 0 && s.savedPos == nil { + s.savedPos = s.pos() + s.savedPos.Column -= len(ctx.bufferedSrc()) + } + + // if the following case, origin buffer has unnecessary two spaces. + // So, `removeRightSpaceFromOriginBuf` remove them, also fix column number too. + // --- + // a:[space][space] + // b: c + removedNum := ctx.removeRightSpaceFromBuf() + if removedNum > 0 { + s.column -= removedNum + s.offset -= removedNum + if s.savedPos != nil { + s.savedPos.Column -= removedNum + } + } + + // There is no problem that we ignore CR which followed by LF and normalize it to LF, because of following YAML1.2 spec. + // > Line breaks inside scalar content must be normalized by the YAML processor. Each such line break must be parsed into a single line feed character. + // > Outside scalar content, YAML allows any line break to be used to terminate lines. + // > -- https://yaml.org/spec/1.2/spec.html + if c == '\r' && ctx.nextChar() == '\n' { + ctx.addOriginBuf('\r') + ctx.progress(1) + c = '\n' + } + + if ctx.isEOS() { + s.addBufferedTokenIfExists(ctx) + } else if s.isAnchor { + s.addBufferedTokenIfExists(ctx) + } + ctx.addBuf(' ') + ctx.addOriginBuf(c) + ctx.isSingleLine = false + s.progressLine(ctx) +} + +func (s *Scanner) scan(ctx *Context) (pos int) { + for ctx.next() { + pos = ctx.nextPos() + c := ctx.currentChar() + s.updateIndent(ctx, c) + if ctx.isDocument() { + if s.isChangedToIndentStateEqual() || + s.isChangedToIndentStateDown() { + s.addBufferedTokenIfExists(ctx) + s.breakLiteral(ctx) + } else { + s.scanLiteral(ctx, c) + continue + } + } else if s.isChangedToIndentStateDown() { + s.addBufferedTokenIfExists(ctx) + } else if s.isChangedToIndentStateEqual() { + // if first character is new line character, buffer expect to raw folded literal + if len(ctx.obuf) > 0 && s.newLineCount(ctx.obuf) <= 1 { + // doesn't raw folded literal + s.addBufferedTokenIfExists(ctx) + } + } + switch c { + case '{': + if !ctx.existsBuffer() { + ctx.addOriginBuf(c) + ctx.addToken(token.MappingStart(string(ctx.obuf), s.pos())) + s.startedFlowMapNum++ + s.progressColumn(ctx, 1) + return + } + case '}': + if !ctx.existsBuffer() || s.startedFlowMapNum > 0 { + ctx.addToken(s.bufferedToken(ctx)) + ctx.addOriginBuf(c) + ctx.addToken(token.MappingEnd(string(ctx.obuf), s.pos())) + s.startedFlowMapNum-- + s.progressColumn(ctx, 1) + return + } + case '.': + if s.indentNum == 0 && s.column == 1 && ctx.repeatNum('.') == 3 { + ctx.addToken(token.DocumentEnd(string(ctx.obuf)+"...", s.pos())) + s.progressColumn(ctx, 3) + pos += 2 + return + } + case '<': + if s.isMergeKey(ctx) { + s.prevIndentColumn = s.column + ctx.addToken(token.MergeKey(string(ctx.obuf)+"<<", s.pos())) + s.progressColumn(ctx, 1) + pos++ + return + } + case '-': + if s.indentNum == 0 && s.column == 1 && ctx.repeatNum('-') == 3 { + s.addBufferedTokenIfExists(ctx) + ctx.addToken(token.DocumentHeader(string(ctx.obuf)+"---", s.pos())) + s.progressColumn(ctx, 3) + pos += 2 + return + } + if ctx.existsBuffer() && s.isChangedToIndentStateUp() { + // raw folded + ctx.isRawFolded = true + ctx.addBuf(c) + ctx.addOriginBuf(c) + s.progressColumn(ctx, 1) + continue + } + if ctx.existsBuffer() { + // '-' is literal + ctx.addBuf(c) + ctx.addOriginBuf(c) + s.progressColumn(ctx, 1) + continue + } + nc := ctx.nextChar() + if nc == ' ' || s.isNewLineChar(nc) { + s.addBufferedTokenIfExists(ctx) + ctx.addOriginBuf(c) + tk := token.SequenceEntry(string(ctx.obuf), s.pos()) + s.prevIndentColumn = tk.Position.Column + ctx.addToken(tk) + s.progressColumn(ctx, 1) + return + } + case '[': + if !ctx.existsBuffer() { + ctx.addOriginBuf(c) + ctx.addToken(token.SequenceStart(string(ctx.obuf), s.pos())) + s.startedFlowSequenceNum++ + s.progressColumn(ctx, 1) + return + } + case ']': + if !ctx.existsBuffer() || s.startedFlowSequenceNum > 0 { + s.addBufferedTokenIfExists(ctx) + ctx.addOriginBuf(c) + ctx.addToken(token.SequenceEnd(string(ctx.obuf), s.pos())) + s.startedFlowSequenceNum-- + s.progressColumn(ctx, 1) + return + } + case ',': + if s.startedFlowSequenceNum > 0 || s.startedFlowMapNum > 0 { + s.addBufferedTokenIfExists(ctx) + ctx.addOriginBuf(c) + ctx.addToken(token.CollectEntry(string(ctx.obuf), s.pos())) + s.progressColumn(ctx, 1) + return + } + case ':': + nc := ctx.nextChar() + if s.startedFlowMapNum > 0 || nc == ' ' || s.isNewLineChar(nc) || ctx.isNextEOS() { + // mapping value + tk := s.bufferedToken(ctx) + if tk != nil { + s.prevIndentColumn = tk.Position.Column + ctx.addToken(tk) + } else if tk := ctx.lastToken(); tk != nil { + // If the map key is quote, the buffer does not exist because it has already been cut into tokens. + // Therefore, we need to check the last token. + if tk.Indicator == token.QuotedScalarIndicator { + s.prevIndentColumn = tk.Position.Column + } + } + ctx.addToken(token.MappingValue(s.pos())) + s.progressColumn(ctx, 1) + return + } + case '|', '>': + if !ctx.existsBuffer() { + progress, err := s.scanLiteralHeader(ctx) + if err != nil { + // TODO: returns syntax error object + return + } + s.progressColumn(ctx, progress) + s.progressLine(ctx) + continue + } + case '!': + if !ctx.existsBuffer() { + token, progress := s.scanTag(ctx) + ctx.addToken(token) + s.progressColumn(ctx, progress) + if c := ctx.previousChar(); s.isNewLineChar(c) { + s.progressLine(ctx) + } + pos += progress + return + } + case '%': + if !ctx.existsBuffer() && s.indentNum == 0 { + ctx.addToken(token.Directive(string(ctx.obuf)+"%", s.pos())) + s.progressColumn(ctx, 1) + return + } + case '?': + nc := ctx.nextChar() + if !ctx.existsBuffer() && nc == ' ' { + ctx.addToken(token.MappingKey(s.pos())) + s.progressColumn(ctx, 1) + return + } + case '&': + if !ctx.existsBuffer() { + s.addBufferedTokenIfExists(ctx) + ctx.addOriginBuf(c) + ctx.addToken(token.Anchor(string(ctx.obuf), s.pos())) + s.progressColumn(ctx, 1) + s.isAnchor = true + return + } + case '*': + if !ctx.existsBuffer() { + s.addBufferedTokenIfExists(ctx) + ctx.addOriginBuf(c) + ctx.addToken(token.Alias(string(ctx.obuf), s.pos())) + s.progressColumn(ctx, 1) + return + } + case '#': + if !ctx.existsBuffer() || ctx.previousChar() == ' ' { + s.addBufferedTokenIfExists(ctx) + token, progress := s.scanComment(ctx) + ctx.addToken(token) + s.progressColumn(ctx, progress) + s.progressLine(ctx) + pos += progress + return + } + case '\'', '"': + if !ctx.existsBuffer() { + token, progress := s.scanQuote(ctx, c) + ctx.addToken(token) + pos += progress + // If the non-whitespace character immediately following the quote is ':', the quote should be treated as a map key. + // Therefore, do not return and continue processing as a normal map key. + if ctx.currentCharWithSkipWhitespace() == ':' { + continue + } + return + } + case '\r', '\n': + s.scanNewLine(ctx, c) + continue + case ' ': + if ctx.isSaveIndentMode() || (!s.isAnchor && !s.isFirstCharAtLine) { + ctx.addBuf(c) + ctx.addOriginBuf(c) + s.progressColumn(ctx, 1) + continue + } + if s.isFirstCharAtLine { + s.progressColumn(ctx, 1) + ctx.addOriginBuf(c) + continue + } + s.addBufferedTokenIfExists(ctx) + pos-- // to rescan white space at next scanning for adding white space to next buffer. + s.isAnchor = false + return + } + ctx.addBuf(c) + ctx.addOriginBuf(c) + s.progressColumn(ctx, 1) + } + s.addBufferedTokenIfExists(ctx) + return +} + +// Init prepares the scanner s to tokenize the text src by setting the scanner at the beginning of src. +func (s *Scanner) Init(text string) { + src := []rune(text) + s.source = src + s.sourcePos = 0 + s.sourceSize = len(src) + s.line = 1 + s.column = 1 + s.offset = 1 + s.prevIndentLevel = 0 + s.prevIndentNum = 0 + s.prevIndentColumn = 0 + s.indentLevel = 0 + s.indentNum = 0 + s.isFirstCharAtLine = true +} + +// Scan scans the next token and returns the token collection. The source end is indicated by io.EOF. +func (s *Scanner) Scan() (token.Tokens, error) { + if s.sourcePos >= s.sourceSize { + return nil, io.EOF + } + ctx := newContext(s.source[s.sourcePos:]) + defer ctx.release() + progress := s.scan(ctx) + s.sourcePos += progress + var tokens token.Tokens + tokens = append(tokens, ctx.tokens...) + return tokens, nil +} diff --git a/vendor/github.com/goccy/go-yaml/stdlib_quote.go b/vendor/github.com/goccy/go-yaml/stdlib_quote.go new file mode 100644 index 00000000..be50ae61 --- /dev/null +++ b/vendor/github.com/goccy/go-yaml/stdlib_quote.go @@ -0,0 +1,103 @@ +// Copied and trimmed down from https://github.com/golang/go/blob/e3769299cd3484e018e0e2a6e1b95c2b18ce4f41/src/strconv/quote.go +// We want to use the standard library's private "quoteWith" function rather than write our own so that we get robust unicode support. +// Every private function called by quoteWith was copied. +// There are 2 modifications to simplify the code: +// 1. The unicode.IsPrint function was substituted for the custom implementation of IsPrint +// 2. All code paths reachable only when ASCIIonly or grphicOnly are set to true were removed. + +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package yaml + +import ( + "unicode" + "unicode/utf8" +) + +const ( + lowerhex = "0123456789abcdef" +) + +func quoteWith(s string, quote byte) string { + return string(appendQuotedWith(make([]byte, 0, 3*len(s)/2), s, quote)) +} + +func appendQuotedWith(buf []byte, s string, quote byte) []byte { + // Often called with big strings, so preallocate. If there's quoting, + // this is conservative but still helps a lot. + if cap(buf)-len(buf) < len(s) { + nBuf := make([]byte, len(buf), len(buf)+1+len(s)+1) + copy(nBuf, buf) + buf = nBuf + } + buf = append(buf, quote) + for width := 0; len(s) > 0; s = s[width:] { + r := rune(s[0]) + width = 1 + if r >= utf8.RuneSelf { + r, width = utf8.DecodeRuneInString(s) + } + if width == 1 && r == utf8.RuneError { + buf = append(buf, `\x`...) + buf = append(buf, lowerhex[s[0]>>4]) + buf = append(buf, lowerhex[s[0]&0xF]) + continue + } + buf = appendEscapedRune(buf, r, quote) + } + buf = append(buf, quote) + return buf +} + +func appendEscapedRune(buf []byte, r rune, quote byte) []byte { + var runeTmp [utf8.UTFMax]byte + if r == rune(quote) || r == '\\' { // always backslashed + buf = append(buf, '\\') + buf = append(buf, byte(r)) + return buf + } + if unicode.IsPrint(r) { + n := utf8.EncodeRune(runeTmp[:], r) + buf = append(buf, runeTmp[:n]...) + return buf + } + switch r { + case '\a': + buf = append(buf, `\a`...) + case '\b': + buf = append(buf, `\b`...) + case '\f': + buf = append(buf, `\f`...) + case '\n': + buf = append(buf, `\n`...) + case '\r': + buf = append(buf, `\r`...) + case '\t': + buf = append(buf, `\t`...) + case '\v': + buf = append(buf, `\v`...) + default: + switch { + case r < ' ': + buf = append(buf, `\x`...) + buf = append(buf, lowerhex[byte(r)>>4]) + buf = append(buf, lowerhex[byte(r)&0xF]) + case r > utf8.MaxRune: + r = 0xFFFD + fallthrough + case r < 0x10000: + buf = append(buf, `\u`...) + for s := 12; s >= 0; s -= 4 { + buf = append(buf, lowerhex[r>>uint(s)&0xF]) + } + default: + buf = append(buf, `\U`...) + for s := 28; s >= 0; s -= 4 { + buf = append(buf, lowerhex[r>>uint(s)&0xF]) + } + } + } + return buf +} diff --git a/vendor/github.com/goccy/go-yaml/struct.go b/vendor/github.com/goccy/go-yaml/struct.go new file mode 100644 index 00000000..a3da8ddd --- /dev/null +++ b/vendor/github.com/goccy/go-yaml/struct.go @@ -0,0 +1,130 @@ +package yaml + +import ( + "reflect" + "strings" + + "golang.org/x/xerrors" +) + +const ( + // StructTagName tag keyword for Marshal/Unmarshal + StructTagName = "yaml" +) + +// StructField information for each the field in structure +type StructField struct { + FieldName string + RenderName string + AnchorName string + AliasName string + IsAutoAnchor bool + IsAutoAlias bool + IsOmitEmpty bool + IsFlow bool + IsInline bool +} + +func getTag(field reflect.StructField) string { + // If struct tag `yaml` exist, use that. If no `yaml` + // exists, but `json` does, use that and try the best to + // adhere to its rules + tag := field.Tag.Get(StructTagName) + if tag == "" { + tag = field.Tag.Get(`json`) + } + return tag +} + +func structField(field reflect.StructField) *StructField { + tag := getTag(field) + fieldName := strings.ToLower(field.Name) + options := strings.Split(tag, ",") + if len(options) > 0 { + if options[0] != "" { + fieldName = options[0] + } + } + structField := &StructField{ + FieldName: field.Name, + RenderName: fieldName, + } + if len(options) > 1 { + for _, opt := range options[1:] { + switch { + case opt == "omitempty": + structField.IsOmitEmpty = true + case opt == "flow": + structField.IsFlow = true + case opt == "inline": + structField.IsInline = true + case strings.HasPrefix(opt, "anchor"): + anchor := strings.Split(opt, "=") + if len(anchor) > 1 { + structField.AnchorName = anchor[1] + } else { + structField.IsAutoAnchor = true + } + case strings.HasPrefix(opt, "alias"): + alias := strings.Split(opt, "=") + if len(alias) > 1 { + structField.AliasName = alias[1] + } else { + structField.IsAutoAlias = true + } + default: + } + } + } + return structField +} + +func isIgnoredStructField(field reflect.StructField) bool { + if field.PkgPath != "" && !field.Anonymous { + // private field + return true + } + tag := getTag(field) + if tag == "-" { + return true + } + return false +} + +type StructFieldMap map[string]*StructField + +func (m StructFieldMap) isIncludedRenderName(name string) bool { + for _, v := range m { + if !v.IsInline && v.RenderName == name { + return true + } + } + return false +} + +func (m StructFieldMap) hasMergeProperty() bool { + for _, v := range m { + if v.IsOmitEmpty && v.IsInline && v.IsAutoAlias { + return true + } + } + return false +} + +func structFieldMap(structType reflect.Type) (StructFieldMap, error) { + structFieldMap := StructFieldMap{} + renderNameMap := map[string]struct{}{} + for i := 0; i < structType.NumField(); i++ { + field := structType.Field(i) + if isIgnoredStructField(field) { + continue + } + structField := structField(field) + if _, exists := renderNameMap[structField.RenderName]; exists { + return nil, xerrors.Errorf("duplicated struct field name %s", structField.RenderName) + } + structFieldMap[structField.FieldName] = structField + renderNameMap[structField.RenderName] = struct{}{} + } + return structFieldMap, nil +} diff --git a/vendor/github.com/goccy/go-yaml/token/token.go b/vendor/github.com/goccy/go-yaml/token/token.go new file mode 100644 index 00000000..14d76220 --- /dev/null +++ b/vendor/github.com/goccy/go-yaml/token/token.go @@ -0,0 +1,1070 @@ +package token + +import ( + "fmt" + "strings" +) + +// Character type for character +type Character byte + +const ( + // SequenceEntryCharacter character for sequence entry + SequenceEntryCharacter Character = '-' + // MappingKeyCharacter character for mapping key + MappingKeyCharacter Character = '?' + // MappingValueCharacter character for mapping value + MappingValueCharacter Character = ':' + // CollectEntryCharacter character for collect entry + CollectEntryCharacter Character = ',' + // SequenceStartCharacter character for sequence start + SequenceStartCharacter Character = '[' + // SequenceEndCharacter character for sequence end + SequenceEndCharacter Character = ']' + // MappingStartCharacter character for mapping start + MappingStartCharacter Character = '{' + // MappingEndCharacter character for mapping end + MappingEndCharacter Character = '}' + // CommentCharacter character for comment + CommentCharacter Character = '#' + // AnchorCharacter character for anchor + AnchorCharacter Character = '&' + // AliasCharacter character for alias + AliasCharacter Character = '*' + // TagCharacter character for tag + TagCharacter Character = '!' + // LiteralCharacter character for literal + LiteralCharacter Character = '|' + // FoldedCharacter character for folded + FoldedCharacter Character = '>' + // SingleQuoteCharacter character for single quote + SingleQuoteCharacter Character = '\'' + // DoubleQuoteCharacter character for double quote + DoubleQuoteCharacter Character = '"' + // DirectiveCharacter character for directive + DirectiveCharacter Character = '%' + // SpaceCharacter character for space + SpaceCharacter Character = ' ' + // LineBreakCharacter character for line break + LineBreakCharacter Character = '\n' +) + +// Type type identifier for token +type Type int + +const ( + // UnknownType reserve for invalid type + UnknownType Type = iota + // DocumentHeaderType type for DocumentHeader token + DocumentHeaderType + // DocumentEndType type for DocumentEnd token + DocumentEndType + // SequenceEntryType type for SequenceEntry token + SequenceEntryType + // MappingKeyType type for MappingKey token + MappingKeyType + // MappingValueType type for MappingValue token + MappingValueType + // MergeKeyType type for MergeKey token + MergeKeyType + // CollectEntryType type for CollectEntry token + CollectEntryType + // SequenceStartType type for SequenceStart token + SequenceStartType + // SequenceEndType type for SequenceEnd token + SequenceEndType + // MappingStartType type for MappingStart token + MappingStartType + // MappingEndType type for MappingEnd token + MappingEndType + // CommentType type for Comment token + CommentType + // AnchorType type for Anchor token + AnchorType + // AliasType type for Alias token + AliasType + // TagType type for Tag token + TagType + // LiteralType type for Literal token + LiteralType + // FoldedType type for Folded token + FoldedType + // SingleQuoteType type for SingleQuote token + SingleQuoteType + // DoubleQuoteType type for DoubleQuote token + DoubleQuoteType + // DirectiveType type for Directive token + DirectiveType + // SpaceType type for Space token + SpaceType + // NullType type for Null token + NullType + // InfinityType type for Infinity token + InfinityType + // NanType type for Nan token + NanType + // IntegerType type for Integer token + IntegerType + // BinaryIntegerType type for BinaryInteger token + BinaryIntegerType + // OctetIntegerType type for OctetInteger token + OctetIntegerType + // HexIntegerType type for HexInteger token + HexIntegerType + // FloatType type for Float token + FloatType + // StringType type for String token + StringType + // BoolType type for Bool token + BoolType +) + +// String type identifier to text +func (t Type) String() string { + switch t { + case UnknownType: + return "Unknown" + case DocumentHeaderType: + return "DocumentHeader" + case DocumentEndType: + return "DocumentEnd" + case SequenceEntryType: + return "SequenceEntry" + case MappingKeyType: + return "MappingKey" + case MappingValueType: + return "MappingValue" + case MergeKeyType: + return "MergeKey" + case CollectEntryType: + return "CollectEntry" + case SequenceStartType: + return "SequenceStart" + case SequenceEndType: + return "SequenceEnd" + case MappingStartType: + return "MappingStart" + case MappingEndType: + return "MappingEnd" + case CommentType: + return "Comment" + case AnchorType: + return "Anchor" + case AliasType: + return "Alias" + case TagType: + return "Tag" + case LiteralType: + return "Literal" + case FoldedType: + return "Folded" + case SingleQuoteType: + return "SingleQuote" + case DoubleQuoteType: + return "DoubleQuote" + case DirectiveType: + return "Directive" + case SpaceType: + return "Space" + case StringType: + return "String" + case BoolType: + return "Bool" + case IntegerType: + return "Integer" + case BinaryIntegerType: + return "BinaryInteger" + case OctetIntegerType: + return "OctetInteger" + case HexIntegerType: + return "HexInteger" + case FloatType: + return "Float" + case NullType: + return "Null" + case InfinityType: + return "Infinity" + case NanType: + return "Nan" + } + return "" +} + +// CharacterType type for character category +type CharacterType int + +const ( + // CharacterTypeIndicator type of indicator character + CharacterTypeIndicator CharacterType = iota + // CharacterTypeWhiteSpace type of white space character + CharacterTypeWhiteSpace + // CharacterTypeMiscellaneous type of miscellaneous character + CharacterTypeMiscellaneous + // CharacterTypeEscaped type of escaped character + CharacterTypeEscaped +) + +// String character type identifier to text +func (c CharacterType) String() string { + switch c { + case CharacterTypeIndicator: + return "Indicator" + case CharacterTypeWhiteSpace: + return "WhiteSpcae" + case CharacterTypeMiscellaneous: + return "Miscellaneous" + case CharacterTypeEscaped: + return "Escaped" + } + return "" +} + +// Indicator type for indicator +type Indicator int + +const ( + // NotIndicator not indicator + NotIndicator Indicator = iota + // BlockStructureIndicator indicator for block structure ( '-', '?', ':' ) + BlockStructureIndicator + // FlowCollectionIndicator indicator for flow collection ( '[', ']', '{', '}', ',' ) + FlowCollectionIndicator + // CommentIndicator indicator for comment ( '#' ) + CommentIndicator + // NodePropertyIndicator indicator for node property ( '!', '&', '*' ) + NodePropertyIndicator + // BlockScalarIndicator indicator for block scalar ( '|', '>' ) + BlockScalarIndicator + // QuotedScalarIndicator indicator for quoted scalar ( ''', '"' ) + QuotedScalarIndicator + // DirectiveIndicator indicator for directive ( '%' ) + DirectiveIndicator + // InvalidUseOfReservedIndicator indicator for invalid use of reserved keyword ( '@', '`' ) + InvalidUseOfReservedIndicator +) + +// String indicator to text +func (i Indicator) String() string { + switch i { + case NotIndicator: + return "NotIndicator" + case BlockStructureIndicator: + return "BlockStructure" + case FlowCollectionIndicator: + return "FlowCollection" + case CommentIndicator: + return "Comment" + case NodePropertyIndicator: + return "NodeProperty" + case BlockScalarIndicator: + return "BlockScalar" + case QuotedScalarIndicator: + return "QuotedScalar" + case DirectiveIndicator: + return "Directive" + case InvalidUseOfReservedIndicator: + return "InvalidUseOfReserved" + } + return "" +} + +var ( + reservedNullKeywords = []string{ + "null", + "Null", + "NULL", + "~", + } + reservedBoolKeywords = []string{ + "true", + "True", + "TRUE", + "false", + "False", + "FALSE", + } + // For compatibility with other YAML 1.1 parsers + // Note that we use these solely for encoding the bool value with quotes. + // go-yaml should not treat these as reserved keywords at parsing time. + // as go-yaml is supposed to be compliant only with YAML 1.2. + reservedLegacyBoolKeywords = []string{ + "y", + "Y", + "yes", + "Yes", + "YES", + "n", + "N", + "no", + "No", + "NO", + "on", + "On", + "ON", + "off", + "Off", + "OFF", + } + reservedInfKeywords = []string{ + ".inf", + ".Inf", + ".INF", + "-.inf", + "-.Inf", + "-.INF", + } + reservedNanKeywords = []string{ + ".nan", + ".NaN", + ".NAN", + } + reservedKeywordMap = map[string]func(string, string, *Position) *Token{} + // reservedEncKeywordMap contains is the keyword map used at encoding time. + // This is supposed to be a superset of reservedKeywordMap, + // and used to quote legacy keywords present in YAML 1.1 or lesser for compatibility reasons, + // even though this library is supposed to be YAML 1.2-compliant. + reservedEncKeywordMap = map[string]func(string, string, *Position) *Token{} +) + +func reservedKeywordToken(typ Type, value, org string, pos *Position) *Token { + return &Token{ + Type: typ, + CharacterType: CharacterTypeMiscellaneous, + Indicator: NotIndicator, + Value: value, + Origin: org, + Position: pos, + } +} + +func init() { + for _, keyword := range reservedNullKeywords { + reservedKeywordMap[keyword] = func(value, org string, pos *Position) *Token { + return reservedKeywordToken(NullType, value, org, pos) + } + } + for _, keyword := range reservedBoolKeywords { + f := func(value, org string, pos *Position) *Token { + return reservedKeywordToken(BoolType, value, org, pos) + } + reservedKeywordMap[keyword] = f + reservedEncKeywordMap[keyword] = f + } + for _, keyword := range reservedLegacyBoolKeywords { + reservedEncKeywordMap[keyword] = func(value, org string, pos *Position) *Token { + return reservedKeywordToken(BoolType, value, org, pos) + } + } + for _, keyword := range reservedInfKeywords { + reservedKeywordMap[keyword] = func(value, org string, pos *Position) *Token { + return reservedKeywordToken(InfinityType, value, org, pos) + } + } + for _, keyword := range reservedNanKeywords { + reservedKeywordMap[keyword] = func(value, org string, pos *Position) *Token { + return reservedKeywordToken(NanType, value, org, pos) + } + } +} + +// ReservedTagKeyword type of reserved tag keyword +type ReservedTagKeyword string + +const ( + // IntegerTag `!!int` tag + IntegerTag ReservedTagKeyword = "!!int" + // FloatTag `!!float` tag + FloatTag ReservedTagKeyword = "!!float" + // NullTag `!!null` tag + NullTag ReservedTagKeyword = "!!null" + // SequenceTag `!!seq` tag + SequenceTag ReservedTagKeyword = "!!seq" + // MappingTag `!!map` tag + MappingTag ReservedTagKeyword = "!!map" + // StringTag `!!str` tag + StringTag ReservedTagKeyword = "!!str" + // BinaryTag `!!binary` tag + BinaryTag ReservedTagKeyword = "!!binary" + // OrderedMapTag `!!omap` tag + OrderedMapTag ReservedTagKeyword = "!!omap" + // SetTag `!!set` tag + SetTag ReservedTagKeyword = "!!set" + // TimestampTag `!!timestamp` tag + TimestampTag ReservedTagKeyword = "!!timestamp" +) + +var ( + // ReservedTagKeywordMap map for reserved tag keywords + ReservedTagKeywordMap = map[ReservedTagKeyword]func(string, string, *Position) *Token{ + IntegerTag: func(value, org string, pos *Position) *Token { + return &Token{ + Type: TagType, + CharacterType: CharacterTypeIndicator, + Indicator: NodePropertyIndicator, + Value: value, + Origin: org, + Position: pos, + } + }, + FloatTag: func(value, org string, pos *Position) *Token { + return &Token{ + Type: TagType, + CharacterType: CharacterTypeIndicator, + Indicator: NodePropertyIndicator, + Value: value, + Origin: org, + Position: pos, + } + }, + NullTag: func(value, org string, pos *Position) *Token { + return &Token{ + Type: TagType, + CharacterType: CharacterTypeIndicator, + Indicator: NodePropertyIndicator, + Value: value, + Origin: org, + Position: pos, + } + }, + SequenceTag: func(value, org string, pos *Position) *Token { + return &Token{ + Type: TagType, + CharacterType: CharacterTypeIndicator, + Indicator: NodePropertyIndicator, + Value: value, + Origin: org, + Position: pos, + } + }, + MappingTag: func(value, org string, pos *Position) *Token { + return &Token{ + Type: TagType, + CharacterType: CharacterTypeIndicator, + Indicator: NodePropertyIndicator, + Value: value, + Origin: org, + Position: pos, + } + }, + StringTag: func(value, org string, pos *Position) *Token { + return &Token{ + Type: TagType, + CharacterType: CharacterTypeIndicator, + Indicator: NodePropertyIndicator, + Value: value, + Origin: org, + Position: pos, + } + }, + BinaryTag: func(value, org string, pos *Position) *Token { + return &Token{ + Type: TagType, + CharacterType: CharacterTypeIndicator, + Indicator: NodePropertyIndicator, + Value: value, + Origin: org, + Position: pos, + } + }, + OrderedMapTag: func(value, org string, pos *Position) *Token { + return &Token{ + Type: TagType, + CharacterType: CharacterTypeIndicator, + Indicator: NodePropertyIndicator, + Value: value, + Origin: org, + Position: pos, + } + }, + SetTag: func(value, org string, pos *Position) *Token { + return &Token{ + Type: TagType, + CharacterType: CharacterTypeIndicator, + Indicator: NodePropertyIndicator, + Value: value, + Origin: org, + Position: pos, + } + }, + TimestampTag: func(value, org string, pos *Position) *Token { + return &Token{ + Type: TagType, + CharacterType: CharacterTypeIndicator, + Indicator: NodePropertyIndicator, + Value: value, + Origin: org, + Position: pos, + } + }, + } +) + +type numType int + +const ( + numTypeNone numType = iota + numTypeBinary + numTypeOctet + numTypeHex + numTypeFloat +) + +type numStat struct { + isNum bool + typ numType +} + +func getNumberStat(str string) *numStat { + stat := &numStat{} + if str == "" { + return stat + } + if str == "-" || str == "." || str == "+" || str == "_" { + return stat + } + if str[0] == '_' { + return stat + } + dotFound := false + isNegative := false + isExponent := false + if str[0] == '-' { + isNegative = true + } + for idx, c := range str { + switch c { + case 'x': + if (isNegative && idx == 2) || (!isNegative && idx == 1) { + continue + } + case 'o': + if (isNegative && idx == 2) || (!isNegative && idx == 1) { + continue + } + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + continue + case 'a', 'b', 'c', 'd', 'e', 'f', 'A', 'B', 'C', 'D', 'E', 'F': + if (len(str) > 2 && str[0] == '0' && str[1] == 'x') || + (len(str) > 3 && isNegative && str[1] == '0' && str[2] == 'x') { + // hex number + continue + } + if c == 'b' && ((isNegative && idx == 2) || (!isNegative && idx == 1)) { + // binary number + continue + } + if (c == 'e' || c == 'E') && dotFound { + // exponent + isExponent = true + continue + } + case '.': + if dotFound { + // multiple dot + return stat + } + dotFound = true + continue + case '-': + if idx == 0 || isExponent { + continue + } + case '+': + if idx == 0 || isExponent { + continue + } + case '_': + continue + } + return stat + } + stat.isNum = true + switch { + case dotFound: + stat.typ = numTypeFloat + case strings.HasPrefix(str, "0b") || strings.HasPrefix(str, "-0b"): + stat.typ = numTypeBinary + case strings.HasPrefix(str, "0x") || strings.HasPrefix(str, "-0x"): + stat.typ = numTypeHex + case strings.HasPrefix(str, "0o") || strings.HasPrefix(str, "-0o"): + stat.typ = numTypeOctet + case (len(str) > 1 && str[0] == '0') || (len(str) > 1 && str[0] == '-' && str[1] == '0'): + stat.typ = numTypeOctet + } + return stat +} + +func looksLikeTimeValue(value string) bool { + for i, c := range value { + switch c { + case ':', '1', '2', '3', '4', '5', '6', '7', '8', '9': + continue + case '0': + if i == 0 { + return false + } + continue + } + return false + } + return true +} + +// IsNeedQuoted whether need quote for passed string or not +func IsNeedQuoted(value string) bool { + if value == "" { + return true + } + if _, exists := reservedEncKeywordMap[value]; exists { + return true + } + if stat := getNumberStat(value); stat.isNum { + return true + } + first := value[0] + switch first { + case '*', '&', '[', '{', '}', ']', ',', '!', '|', '>', '%', '\'', '"', '@', ' ', '`': + return true + } + last := value[len(value)-1] + switch last { + case ':', ' ': + return true + } + if looksLikeTimeValue(value) { + return true + } + for i, c := range value { + switch c { + case '#', '\\': + return true + case ':': + if i+1 < len(value) && value[i+1] == ' ' { + return true + } + } + } + return false +} + +// LiteralBlockHeader detect literal block scalar header +func LiteralBlockHeader(value string) string { + lbc := DetectLineBreakCharacter(value) + + switch { + case !strings.Contains(value, lbc): + return "" + case strings.HasSuffix(value, fmt.Sprintf("%s%s", lbc, lbc)): + return "|+" + case strings.HasSuffix(value, lbc): + return "|" + default: + return "|-" + } +} + +// New create reserved keyword token or number token and other string token +func New(value string, org string, pos *Position) *Token { + fn := reservedKeywordMap[value] + if fn != nil { + return fn(value, org, pos) + } + if stat := getNumberStat(value); stat.isNum { + tk := &Token{ + Type: IntegerType, + CharacterType: CharacterTypeMiscellaneous, + Indicator: NotIndicator, + Value: value, + Origin: org, + Position: pos, + } + switch stat.typ { + case numTypeFloat: + tk.Type = FloatType + case numTypeBinary: + tk.Type = BinaryIntegerType + case numTypeOctet: + tk.Type = OctetIntegerType + case numTypeHex: + tk.Type = HexIntegerType + } + return tk + } + return String(value, org, pos) +} + +// Position type for position in YAML document +type Position struct { + Line int + Column int + Offset int + IndentNum int + IndentLevel int +} + +// String position to text +func (p *Position) String() string { + return fmt.Sprintf("[level:%d,line:%d,column:%d,offset:%d]", p.IndentLevel, p.Line, p.Column, p.Offset) +} + +// Token type for token +type Token struct { + Type Type + CharacterType CharacterType + Indicator Indicator + Value string + Origin string + Position *Position + Next *Token + Prev *Token +} + +// PreviousType previous token type +func (t *Token) PreviousType() Type { + if t.Prev != nil { + return t.Prev.Type + } + return UnknownType +} + +// NextType next token type +func (t *Token) NextType() Type { + if t.Next != nil { + return t.Next.Type + } + return UnknownType +} + +// AddColumn append column number to current position of column +func (t *Token) AddColumn(col int) { + if t == nil { + return + } + t.Position.Column += col +} + +// Clone copy token ( preserve Prev/Next reference ) +func (t *Token) Clone() *Token { + if t == nil { + return nil + } + copied := *t + if t.Position != nil { + pos := *(t.Position) + copied.Position = &pos + } + return &copied +} + +// Tokens type of token collection +type Tokens []*Token + +func (t *Tokens) add(tk *Token) { + tokens := *t + if len(tokens) == 0 { + tokens = append(tokens, tk) + } else { + last := tokens[len(tokens)-1] + last.Next = tk + tk.Prev = last + tokens = append(tokens, tk) + } + *t = tokens +} + +// Add append new some tokens +func (t *Tokens) Add(tks ...*Token) { + for _, tk := range tks { + t.add(tk) + } +} + +// Dump dump all token structures for debugging +func (t Tokens) Dump() { + for _, tk := range t { + fmt.Printf("- %+v\n", tk) + } +} + +// String create token for String +func String(value string, org string, pos *Position) *Token { + return &Token{ + Type: StringType, + CharacterType: CharacterTypeMiscellaneous, + Indicator: NotIndicator, + Value: value, + Origin: org, + Position: pos, + } +} + +// SequenceEntry create token for SequenceEntry +func SequenceEntry(org string, pos *Position) *Token { + return &Token{ + Type: SequenceEntryType, + CharacterType: CharacterTypeIndicator, + Indicator: BlockStructureIndicator, + Value: string(SequenceEntryCharacter), + Origin: org, + Position: pos, + } +} + +// MappingKey create token for MappingKey +func MappingKey(pos *Position) *Token { + return &Token{ + Type: MappingKeyType, + CharacterType: CharacterTypeIndicator, + Indicator: BlockStructureIndicator, + Value: string(MappingKeyCharacter), + Origin: string(MappingKeyCharacter), + Position: pos, + } +} + +// MappingValue create token for MappingValue +func MappingValue(pos *Position) *Token { + return &Token{ + Type: MappingValueType, + CharacterType: CharacterTypeIndicator, + Indicator: BlockStructureIndicator, + Value: string(MappingValueCharacter), + Origin: string(MappingValueCharacter), + Position: pos, + } +} + +// CollectEntry create token for CollectEntry +func CollectEntry(org string, pos *Position) *Token { + return &Token{ + Type: CollectEntryType, + CharacterType: CharacterTypeIndicator, + Indicator: FlowCollectionIndicator, + Value: string(CollectEntryCharacter), + Origin: org, + Position: pos, + } +} + +// SequenceStart create token for SequenceStart +func SequenceStart(org string, pos *Position) *Token { + return &Token{ + Type: SequenceStartType, + CharacterType: CharacterTypeIndicator, + Indicator: FlowCollectionIndicator, + Value: string(SequenceStartCharacter), + Origin: org, + Position: pos, + } +} + +// SequenceEnd create token for SequenceEnd +func SequenceEnd(org string, pos *Position) *Token { + return &Token{ + Type: SequenceEndType, + CharacterType: CharacterTypeIndicator, + Indicator: FlowCollectionIndicator, + Value: string(SequenceEndCharacter), + Origin: org, + Position: pos, + } +} + +// MappingStart create token for MappingStart +func MappingStart(org string, pos *Position) *Token { + return &Token{ + Type: MappingStartType, + CharacterType: CharacterTypeIndicator, + Indicator: FlowCollectionIndicator, + Value: string(MappingStartCharacter), + Origin: org, + Position: pos, + } +} + +// MappingEnd create token for MappingEnd +func MappingEnd(org string, pos *Position) *Token { + return &Token{ + Type: MappingEndType, + CharacterType: CharacterTypeIndicator, + Indicator: FlowCollectionIndicator, + Value: string(MappingEndCharacter), + Origin: org, + Position: pos, + } +} + +// Comment create token for Comment +func Comment(value string, org string, pos *Position) *Token { + return &Token{ + Type: CommentType, + CharacterType: CharacterTypeIndicator, + Indicator: CommentIndicator, + Value: value, + Origin: org, + Position: pos, + } +} + +// Anchor create token for Anchor +func Anchor(org string, pos *Position) *Token { + return &Token{ + Type: AnchorType, + CharacterType: CharacterTypeIndicator, + Indicator: NodePropertyIndicator, + Value: string(AnchorCharacter), + Origin: org, + Position: pos, + } +} + +// Alias create token for Alias +func Alias(org string, pos *Position) *Token { + return &Token{ + Type: AliasType, + CharacterType: CharacterTypeIndicator, + Indicator: NodePropertyIndicator, + Value: string(AliasCharacter), + Origin: org, + Position: pos, + } +} + +// Tag create token for Tag +func Tag(value string, org string, pos *Position) *Token { + fn := ReservedTagKeywordMap[ReservedTagKeyword(value)] + if fn != nil { + return fn(value, org, pos) + } + return &Token{ + Type: TagType, + CharacterType: CharacterTypeIndicator, + Indicator: NodePropertyIndicator, + Value: value, + Origin: org, + Position: pos, + } +} + +// Literal create token for Literal +func Literal(value string, org string, pos *Position) *Token { + return &Token{ + Type: LiteralType, + CharacterType: CharacterTypeIndicator, + Indicator: BlockScalarIndicator, + Value: value, + Origin: org, + Position: pos, + } +} + +// Folded create token for Folded +func Folded(value string, org string, pos *Position) *Token { + return &Token{ + Type: FoldedType, + CharacterType: CharacterTypeIndicator, + Indicator: BlockScalarIndicator, + Value: value, + Origin: org, + Position: pos, + } +} + +// SingleQuote create token for SingleQuote +func SingleQuote(value string, org string, pos *Position) *Token { + return &Token{ + Type: SingleQuoteType, + CharacterType: CharacterTypeIndicator, + Indicator: QuotedScalarIndicator, + Value: value, + Origin: org, + Position: pos, + } +} + +// DoubleQuote create token for DoubleQuote +func DoubleQuote(value string, org string, pos *Position) *Token { + return &Token{ + Type: DoubleQuoteType, + CharacterType: CharacterTypeIndicator, + Indicator: QuotedScalarIndicator, + Value: value, + Origin: org, + Position: pos, + } +} + +// Directive create token for Directive +func Directive(org string, pos *Position) *Token { + return &Token{ + Type: DirectiveType, + CharacterType: CharacterTypeIndicator, + Indicator: DirectiveIndicator, + Value: string(DirectiveCharacter), + Origin: org, + Position: pos, + } +} + +// Space create token for Space +func Space(pos *Position) *Token { + return &Token{ + Type: SpaceType, + CharacterType: CharacterTypeWhiteSpace, + Indicator: NotIndicator, + Value: string(SpaceCharacter), + Origin: string(SpaceCharacter), + Position: pos, + } +} + +// MergeKey create token for MergeKey +func MergeKey(org string, pos *Position) *Token { + return &Token{ + Type: MergeKeyType, + CharacterType: CharacterTypeMiscellaneous, + Indicator: NotIndicator, + Value: "<<", + Origin: org, + Position: pos, + } +} + +// DocumentHeader create token for DocumentHeader +func DocumentHeader(org string, pos *Position) *Token { + return &Token{ + Type: DocumentHeaderType, + CharacterType: CharacterTypeMiscellaneous, + Indicator: NotIndicator, + Value: "---", + Origin: org, + Position: pos, + } +} + +// DocumentEnd create token for DocumentEnd +func DocumentEnd(org string, pos *Position) *Token { + return &Token{ + Type: DocumentEndType, + CharacterType: CharacterTypeMiscellaneous, + Indicator: NotIndicator, + Value: "...", + Origin: org, + Position: pos, + } +} + +// DetectLineBreakCharacter detect line break character in only one inside scalar content scope. +func DetectLineBreakCharacter(src string) string { + nc := strings.Count(src, "\n") + rc := strings.Count(src, "\r") + rnc := strings.Count(src, "\r\n") + switch { + case nc == rnc && rc == rnc: + return "\r\n" + case rc > nc: + return "\r" + default: + return "\n" + } +} diff --git a/vendor/github.com/goccy/go-yaml/validate.go b/vendor/github.com/goccy/go-yaml/validate.go new file mode 100644 index 00000000..20a2d6d9 --- /dev/null +++ b/vendor/github.com/goccy/go-yaml/validate.go @@ -0,0 +1,13 @@ +package yaml + +// StructValidator need to implement Struct method only +// ( see https://pkg.go.dev/github.com/go-playground/validator/v10#Validate.Struct ) +type StructValidator interface { + Struct(interface{}) error +} + +// FieldError need to implement StructField method only +// ( see https://pkg.go.dev/github.com/go-playground/validator/v10#FieldError ) +type FieldError interface { + StructField() string +} diff --git a/vendor/github.com/goccy/go-yaml/yaml.go b/vendor/github.com/goccy/go-yaml/yaml.go new file mode 100644 index 00000000..25b1056f --- /dev/null +++ b/vendor/github.com/goccy/go-yaml/yaml.go @@ -0,0 +1,288 @@ +package yaml + +import ( + "bytes" + "context" + "io" + "reflect" + "sync" + + "github.com/goccy/go-yaml/ast" + "github.com/goccy/go-yaml/internal/errors" + "golang.org/x/xerrors" +) + +// BytesMarshaler interface may be implemented by types to customize their +// behavior when being marshaled into a YAML document. The returned value +// is marshaled in place of the original value implementing Marshaler. +// +// If an error is returned by MarshalYAML, the marshaling procedure stops +// and returns with the provided error. +type BytesMarshaler interface { + MarshalYAML() ([]byte, error) +} + +// BytesMarshalerContext interface use BytesMarshaler with context.Context. +type BytesMarshalerContext interface { + MarshalYAML(context.Context) ([]byte, error) +} + +// InterfaceMarshaler interface has MarshalYAML compatible with github.com/go-yaml/yaml package. +type InterfaceMarshaler interface { + MarshalYAML() (interface{}, error) +} + +// InterfaceMarshalerContext interface use InterfaceMarshaler with context.Context. +type InterfaceMarshalerContext interface { + MarshalYAML(context.Context) (interface{}, error) +} + +// BytesUnmarshaler interface may be implemented by types to customize their +// behavior when being unmarshaled from a YAML document. +type BytesUnmarshaler interface { + UnmarshalYAML([]byte) error +} + +// BytesUnmarshalerContext interface use BytesUnmarshaler with context.Context. +type BytesUnmarshalerContext interface { + UnmarshalYAML(context.Context, []byte) error +} + +// InterfaceUnmarshaler interface has UnmarshalYAML compatible with github.com/go-yaml/yaml package. +type InterfaceUnmarshaler interface { + UnmarshalYAML(func(interface{}) error) error +} + +// InterfaceUnmarshalerContext interface use InterfaceUnmarshaler with context.Context. +type InterfaceUnmarshalerContext interface { + UnmarshalYAML(context.Context, func(interface{}) error) error +} + +// MapItem is an item in a MapSlice. +type MapItem struct { + Key, Value interface{} +} + +// MapSlice encodes and decodes as a YAML map. +// The order of keys is preserved when encoding and decoding. +type MapSlice []MapItem + +// ToMap convert to map[interface{}]interface{}. +func (s MapSlice) ToMap() map[interface{}]interface{} { + v := map[interface{}]interface{}{} + for _, item := range s { + v[item.Key] = item.Value + } + return v +} + +// Marshal serializes the value provided into a YAML document. The structure +// of the generated document will reflect the structure of the value itself. +// Maps and pointers (to struct, string, int, etc) are accepted as the in value. +// +// Struct fields are only marshalled if they are exported (have an upper case +// first letter), and are marshalled using the field name lowercased as the +// default key. Custom keys may be defined via the "yaml" name in the field +// tag: the content preceding the first comma is used as the key, and the +// following comma-separated options are used to tweak the marshalling process. +// Conflicting names result in a runtime error. +// +// The field tag format accepted is: +// +// `(...) yaml:"[][,[,]]" (...)` +// +// The following flags are currently supported: +// +// omitempty Only include the field if it's not set to the zero +// value for the type or to empty slices or maps. +// Zero valued structs will be omitted if all their public +// fields are zero, unless they implement an IsZero +// method (see the IsZeroer interface type), in which +// case the field will be included if that method returns true. +// +// flow Marshal using a flow style (useful for structs, +// sequences and maps). +// +// inline Inline the field, which must be a struct or a map, +// causing all of its fields or keys to be processed as if +// they were part of the outer struct. For maps, keys must +// not conflict with the yaml keys of other struct fields. +// +// anchor Marshal with anchor. If want to define anchor name explicitly, use anchor=name style. +// Otherwise, if used 'anchor' name only, used the field name lowercased as the anchor name +// +// alias Marshal with alias. If want to define alias name explicitly, use alias=name style. +// Otherwise, If omitted alias name and the field type is pointer type, +// assigned anchor name automatically from same pointer address. +// +// In addition, if the key is "-", the field is ignored. +// +// For example: +// +// type T struct { +// F int `yaml:"a,omitempty"` +// B int +// } +// yaml.Marshal(&T{B: 2}) // Returns "b: 2\n" +// yaml.Marshal(&T{F: 1}) // Returns "a: 1\nb: 0\n" +func Marshal(v interface{}) ([]byte, error) { + return MarshalWithOptions(v) +} + +// MarshalWithOptions serializes the value provided into a YAML document with EncodeOptions. +func MarshalWithOptions(v interface{}, opts ...EncodeOption) ([]byte, error) { + return MarshalContext(context.Background(), v, opts...) +} + +// MarshalContext serializes the value provided into a YAML document with context.Context and EncodeOptions. +func MarshalContext(ctx context.Context, v interface{}, opts ...EncodeOption) ([]byte, error) { + var buf bytes.Buffer + if err := NewEncoder(&buf, opts...).EncodeContext(ctx, v); err != nil { + return nil, errors.Wrapf(err, "failed to marshal") + } + return buf.Bytes(), nil +} + +// ValueToNode convert from value to ast.Node. +func ValueToNode(v interface{}, opts ...EncodeOption) (ast.Node, error) { + var buf bytes.Buffer + node, err := NewEncoder(&buf, opts...).EncodeToNode(v) + if err != nil { + return nil, errors.Wrapf(err, "failed to convert value to node") + } + return node, nil +} + +// Unmarshal decodes the first document found within the in byte slice +// and assigns decoded values into the out value. +// +// Struct fields are only unmarshalled if they are exported (have an +// upper case first letter), and are unmarshalled using the field name +// lowercased as the default key. Custom keys may be defined via the +// "yaml" name in the field tag: the content preceding the first comma +// is used as the key, and the following comma-separated options are +// used to tweak the marshalling process (see Marshal). +// Conflicting names result in a runtime error. +// +// For example: +// +// type T struct { +// F int `yaml:"a,omitempty"` +// B int +// } +// var t T +// yaml.Unmarshal([]byte("a: 1\nb: 2"), &t) +// +// See the documentation of Marshal for the format of tags and a list of +// supported tag options. +func Unmarshal(data []byte, v interface{}) error { + return UnmarshalWithOptions(data, v) +} + +// UnmarshalWithOptions decodes with DecodeOptions the first document found within the in byte slice +// and assigns decoded values into the out value. +func UnmarshalWithOptions(data []byte, v interface{}, opts ...DecodeOption) error { + return UnmarshalContext(context.Background(), data, v, opts...) +} + +// UnmarshalContext decodes with context.Context and DecodeOptions. +func UnmarshalContext(ctx context.Context, data []byte, v interface{}, opts ...DecodeOption) error { + dec := NewDecoder(bytes.NewBuffer(data), opts...) + if err := dec.DecodeContext(ctx, v); err != nil { + if err == io.EOF { + return nil + } + return errors.Wrapf(err, "failed to unmarshal") + } + return nil +} + +// NodeToValue converts node to the value pointed to by v. +func NodeToValue(node ast.Node, v interface{}, opts ...DecodeOption) error { + var buf bytes.Buffer + if err := NewDecoder(&buf, opts...).DecodeFromNode(node, v); err != nil { + return errors.Wrapf(err, "failed to convert node to value") + } + return nil +} + +// FormatError is a utility function that takes advantage of the metadata +// stored in the errors returned by this package's parser. +// +// If the second argument `colored` is true, the error message is colorized. +// If the third argument `inclSource` is true, the error message will +// contain snippets of the YAML source that was used. +func FormatError(e error, colored, inclSource bool) string { + var pp errors.PrettyPrinter + if xerrors.As(e, &pp) { + var buf bytes.Buffer + pp.PrettyPrint(&errors.Sink{&buf}, colored, inclSource) + return buf.String() + } + + return e.Error() +} + +// YAMLToJSON convert YAML bytes to JSON. +func YAMLToJSON(bytes []byte) ([]byte, error) { + var v interface{} + if err := UnmarshalWithOptions(bytes, &v, UseOrderedMap()); err != nil { + return nil, errors.Wrapf(err, "failed to unmarshal") + } + out, err := MarshalWithOptions(v, JSON()) + if err != nil { + return nil, errors.Wrapf(err, "failed to marshal with json option") + } + return out, nil +} + +// JSONToYAML convert JSON bytes to YAML. +func JSONToYAML(bytes []byte) ([]byte, error) { + var v interface{} + if err := UnmarshalWithOptions(bytes, &v, UseOrderedMap()); err != nil { + return nil, errors.Wrapf(err, "failed to unmarshal from json bytes") + } + out, err := Marshal(v) + if err != nil { + return nil, errors.Wrapf(err, "failed to marshal") + } + return out, nil +} + +var ( + globalCustomMarshalerMu sync.Mutex + globalCustomUnmarshalerMu sync.Mutex + globalCustomMarshalerMap = map[reflect.Type]func(interface{}) ([]byte, error){} + globalCustomUnmarshalerMap = map[reflect.Type]func(interface{}, []byte) error{} +) + +// RegisterCustomMarshaler overrides any encoding process for the type specified in generics. +// If you want to switch the behavior for each encoder, use `CustomMarshaler` defined as EncodeOption. +// +// NOTE: If type T implements MarshalYAML for pointer receiver, the type specified in RegisterCustomMarshaler must be *T. +// If RegisterCustomMarshaler and CustomMarshaler of EncodeOption are specified for the same type, +// the CustomMarshaler specified in EncodeOption takes precedence. +func RegisterCustomMarshaler[T any](marshaler func(T) ([]byte, error)) { + globalCustomMarshalerMu.Lock() + defer globalCustomMarshalerMu.Unlock() + + var typ T + globalCustomMarshalerMap[reflect.TypeOf(typ)] = func(v interface{}) ([]byte, error) { + return marshaler(v.(T)) + } +} + +// RegisterCustomUnmarshaler overrides any decoding process for the type specified in generics. +// If you want to switch the behavior for each decoder, use `CustomUnmarshaler` defined as DecodeOption. +// +// NOTE: If RegisterCustomUnmarshaler and CustomUnmarshaler of DecodeOption are specified for the same type, +// the CustomUnmarshaler specified in DecodeOption takes precedence. +func RegisterCustomUnmarshaler[T any](unmarshaler func(*T, []byte) error) { + globalCustomUnmarshalerMu.Lock() + defer globalCustomUnmarshalerMu.Unlock() + + var typ *T + globalCustomUnmarshalerMap[reflect.TypeOf(typ)] = func(v interface{}, b []byte) error { + return unmarshaler(v.(*T), b) + } +} diff --git a/vendor/github.com/mattn/go-colorable/.travis.yml b/vendor/github.com/mattn/go-colorable/.travis.yml new file mode 100644 index 00000000..7942c565 --- /dev/null +++ b/vendor/github.com/mattn/go-colorable/.travis.yml @@ -0,0 +1,15 @@ +language: go +sudo: false +go: + - 1.13.x + - tip + +before_install: + - go get -t -v ./... + +script: + - ./go.test.sh + +after_success: + - bash <(curl -s https://codecov.io/bash) + diff --git a/vendor/github.com/mattn/go-colorable/LICENSE b/vendor/github.com/mattn/go-colorable/LICENSE new file mode 100644 index 00000000..91b5cef3 --- /dev/null +++ b/vendor/github.com/mattn/go-colorable/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Yasuhiro Matsumoto + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/mattn/go-colorable/README.md b/vendor/github.com/mattn/go-colorable/README.md new file mode 100644 index 00000000..e055952b --- /dev/null +++ b/vendor/github.com/mattn/go-colorable/README.md @@ -0,0 +1,48 @@ +# go-colorable + +[![Build Status](https://travis-ci.org/mattn/go-colorable.svg?branch=master)](https://travis-ci.org/mattn/go-colorable) +[![Codecov](https://codecov.io/gh/mattn/go-colorable/branch/master/graph/badge.svg)](https://codecov.io/gh/mattn/go-colorable) +[![GoDoc](https://godoc.org/github.com/mattn/go-colorable?status.svg)](http://godoc.org/github.com/mattn/go-colorable) +[![Go Report Card](https://goreportcard.com/badge/mattn/go-colorable)](https://goreportcard.com/report/mattn/go-colorable) + +Colorable writer for windows. + +For example, most of logger packages doesn't show colors on windows. (I know we can do it with ansicon. But I don't want.) +This package is possible to handle escape sequence for ansi color on windows. + +## Too Bad! + +![](https://raw.githubusercontent.com/mattn/go-colorable/gh-pages/bad.png) + + +## So Good! + +![](https://raw.githubusercontent.com/mattn/go-colorable/gh-pages/good.png) + +## Usage + +```go +logrus.SetFormatter(&logrus.TextFormatter{ForceColors: true}) +logrus.SetOutput(colorable.NewColorableStdout()) + +logrus.Info("succeeded") +logrus.Warn("not correct") +logrus.Error("something error") +logrus.Fatal("panic") +``` + +You can compile above code on non-windows OSs. + +## Installation + +``` +$ go get github.com/mattn/go-colorable +``` + +# License + +MIT + +# Author + +Yasuhiro Matsumoto (a.k.a mattn) diff --git a/vendor/github.com/mattn/go-colorable/colorable_appengine.go b/vendor/github.com/mattn/go-colorable/colorable_appengine.go new file mode 100644 index 00000000..1f7806fe --- /dev/null +++ b/vendor/github.com/mattn/go-colorable/colorable_appengine.go @@ -0,0 +1,37 @@ +// +build appengine + +package colorable + +import ( + "io" + "os" + + _ "github.com/mattn/go-isatty" +) + +// NewColorable returns new instance of Writer which handles escape sequence. +func NewColorable(file *os.File) io.Writer { + if file == nil { + panic("nil passed instead of *os.File to NewColorable()") + } + + return file +} + +// NewColorableStdout returns new instance of Writer which handles escape sequence for stdout. +func NewColorableStdout() io.Writer { + return os.Stdout +} + +// NewColorableStderr returns new instance of Writer which handles escape sequence for stderr. +func NewColorableStderr() io.Writer { + return os.Stderr +} + +// EnableColorsStdout enable colors if possible. +func EnableColorsStdout(enabled *bool) func() { + if enabled != nil { + *enabled = true + } + return func() {} +} diff --git a/vendor/github.com/mattn/go-colorable/colorable_others.go b/vendor/github.com/mattn/go-colorable/colorable_others.go new file mode 100644 index 00000000..08cbd1e0 --- /dev/null +++ b/vendor/github.com/mattn/go-colorable/colorable_others.go @@ -0,0 +1,38 @@ +// +build !windows +// +build !appengine + +package colorable + +import ( + "io" + "os" + + _ "github.com/mattn/go-isatty" +) + +// NewColorable returns new instance of Writer which handles escape sequence. +func NewColorable(file *os.File) io.Writer { + if file == nil { + panic("nil passed instead of *os.File to NewColorable()") + } + + return file +} + +// NewColorableStdout returns new instance of Writer which handles escape sequence for stdout. +func NewColorableStdout() io.Writer { + return os.Stdout +} + +// NewColorableStderr returns new instance of Writer which handles escape sequence for stderr. +func NewColorableStderr() io.Writer { + return os.Stderr +} + +// EnableColorsStdout enable colors if possible. +func EnableColorsStdout(enabled *bool) func() { + if enabled != nil { + *enabled = true + } + return func() {} +} diff --git a/vendor/github.com/mattn/go-colorable/colorable_windows.go b/vendor/github.com/mattn/go-colorable/colorable_windows.go new file mode 100644 index 00000000..41215d7f --- /dev/null +++ b/vendor/github.com/mattn/go-colorable/colorable_windows.go @@ -0,0 +1,1043 @@ +// +build windows +// +build !appengine + +package colorable + +import ( + "bytes" + "io" + "math" + "os" + "strconv" + "strings" + "sync" + "syscall" + "unsafe" + + "github.com/mattn/go-isatty" +) + +const ( + foregroundBlue = 0x1 + foregroundGreen = 0x2 + foregroundRed = 0x4 + foregroundIntensity = 0x8 + foregroundMask = (foregroundRed | foregroundBlue | foregroundGreen | foregroundIntensity) + backgroundBlue = 0x10 + backgroundGreen = 0x20 + backgroundRed = 0x40 + backgroundIntensity = 0x80 + backgroundMask = (backgroundRed | backgroundBlue | backgroundGreen | backgroundIntensity) + commonLvbUnderscore = 0x8000 + + cENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x4 +) + +const ( + genericRead = 0x80000000 + genericWrite = 0x40000000 +) + +const ( + consoleTextmodeBuffer = 0x1 +) + +type wchar uint16 +type short int16 +type dword uint32 +type word uint16 + +type coord struct { + x short + y short +} + +type smallRect struct { + left short + top short + right short + bottom short +} + +type consoleScreenBufferInfo struct { + size coord + cursorPosition coord + attributes word + window smallRect + maximumWindowSize coord +} + +type consoleCursorInfo struct { + size dword + visible int32 +} + +var ( + kernel32 = syscall.NewLazyDLL("kernel32.dll") + procGetConsoleScreenBufferInfo = kernel32.NewProc("GetConsoleScreenBufferInfo") + procSetConsoleTextAttribute = kernel32.NewProc("SetConsoleTextAttribute") + procSetConsoleCursorPosition = kernel32.NewProc("SetConsoleCursorPosition") + procFillConsoleOutputCharacter = kernel32.NewProc("FillConsoleOutputCharacterW") + procFillConsoleOutputAttribute = kernel32.NewProc("FillConsoleOutputAttribute") + procGetConsoleCursorInfo = kernel32.NewProc("GetConsoleCursorInfo") + procSetConsoleCursorInfo = kernel32.NewProc("SetConsoleCursorInfo") + procSetConsoleTitle = kernel32.NewProc("SetConsoleTitleW") + procGetConsoleMode = kernel32.NewProc("GetConsoleMode") + procSetConsoleMode = kernel32.NewProc("SetConsoleMode") + procCreateConsoleScreenBuffer = kernel32.NewProc("CreateConsoleScreenBuffer") +) + +// Writer provides colorable Writer to the console +type Writer struct { + out io.Writer + handle syscall.Handle + althandle syscall.Handle + oldattr word + oldpos coord + rest bytes.Buffer + mutex sync.Mutex +} + +// NewColorable returns new instance of Writer which handles escape sequence from File. +func NewColorable(file *os.File) io.Writer { + if file == nil { + panic("nil passed instead of *os.File to NewColorable()") + } + + if isatty.IsTerminal(file.Fd()) { + var mode uint32 + if r, _, _ := procGetConsoleMode.Call(file.Fd(), uintptr(unsafe.Pointer(&mode))); r != 0 && mode&cENABLE_VIRTUAL_TERMINAL_PROCESSING != 0 { + return file + } + var csbi consoleScreenBufferInfo + handle := syscall.Handle(file.Fd()) + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + return &Writer{out: file, handle: handle, oldattr: csbi.attributes, oldpos: coord{0, 0}} + } + return file +} + +// NewColorableStdout returns new instance of Writer which handles escape sequence for stdout. +func NewColorableStdout() io.Writer { + return NewColorable(os.Stdout) +} + +// NewColorableStderr returns new instance of Writer which handles escape sequence for stderr. +func NewColorableStderr() io.Writer { + return NewColorable(os.Stderr) +} + +var color256 = map[int]int{ + 0: 0x000000, + 1: 0x800000, + 2: 0x008000, + 3: 0x808000, + 4: 0x000080, + 5: 0x800080, + 6: 0x008080, + 7: 0xc0c0c0, + 8: 0x808080, + 9: 0xff0000, + 10: 0x00ff00, + 11: 0xffff00, + 12: 0x0000ff, + 13: 0xff00ff, + 14: 0x00ffff, + 15: 0xffffff, + 16: 0x000000, + 17: 0x00005f, + 18: 0x000087, + 19: 0x0000af, + 20: 0x0000d7, + 21: 0x0000ff, + 22: 0x005f00, + 23: 0x005f5f, + 24: 0x005f87, + 25: 0x005faf, + 26: 0x005fd7, + 27: 0x005fff, + 28: 0x008700, + 29: 0x00875f, + 30: 0x008787, + 31: 0x0087af, + 32: 0x0087d7, + 33: 0x0087ff, + 34: 0x00af00, + 35: 0x00af5f, + 36: 0x00af87, + 37: 0x00afaf, + 38: 0x00afd7, + 39: 0x00afff, + 40: 0x00d700, + 41: 0x00d75f, + 42: 0x00d787, + 43: 0x00d7af, + 44: 0x00d7d7, + 45: 0x00d7ff, + 46: 0x00ff00, + 47: 0x00ff5f, + 48: 0x00ff87, + 49: 0x00ffaf, + 50: 0x00ffd7, + 51: 0x00ffff, + 52: 0x5f0000, + 53: 0x5f005f, + 54: 0x5f0087, + 55: 0x5f00af, + 56: 0x5f00d7, + 57: 0x5f00ff, + 58: 0x5f5f00, + 59: 0x5f5f5f, + 60: 0x5f5f87, + 61: 0x5f5faf, + 62: 0x5f5fd7, + 63: 0x5f5fff, + 64: 0x5f8700, + 65: 0x5f875f, + 66: 0x5f8787, + 67: 0x5f87af, + 68: 0x5f87d7, + 69: 0x5f87ff, + 70: 0x5faf00, + 71: 0x5faf5f, + 72: 0x5faf87, + 73: 0x5fafaf, + 74: 0x5fafd7, + 75: 0x5fafff, + 76: 0x5fd700, + 77: 0x5fd75f, + 78: 0x5fd787, + 79: 0x5fd7af, + 80: 0x5fd7d7, + 81: 0x5fd7ff, + 82: 0x5fff00, + 83: 0x5fff5f, + 84: 0x5fff87, + 85: 0x5fffaf, + 86: 0x5fffd7, + 87: 0x5fffff, + 88: 0x870000, + 89: 0x87005f, + 90: 0x870087, + 91: 0x8700af, + 92: 0x8700d7, + 93: 0x8700ff, + 94: 0x875f00, + 95: 0x875f5f, + 96: 0x875f87, + 97: 0x875faf, + 98: 0x875fd7, + 99: 0x875fff, + 100: 0x878700, + 101: 0x87875f, + 102: 0x878787, + 103: 0x8787af, + 104: 0x8787d7, + 105: 0x8787ff, + 106: 0x87af00, + 107: 0x87af5f, + 108: 0x87af87, + 109: 0x87afaf, + 110: 0x87afd7, + 111: 0x87afff, + 112: 0x87d700, + 113: 0x87d75f, + 114: 0x87d787, + 115: 0x87d7af, + 116: 0x87d7d7, + 117: 0x87d7ff, + 118: 0x87ff00, + 119: 0x87ff5f, + 120: 0x87ff87, + 121: 0x87ffaf, + 122: 0x87ffd7, + 123: 0x87ffff, + 124: 0xaf0000, + 125: 0xaf005f, + 126: 0xaf0087, + 127: 0xaf00af, + 128: 0xaf00d7, + 129: 0xaf00ff, + 130: 0xaf5f00, + 131: 0xaf5f5f, + 132: 0xaf5f87, + 133: 0xaf5faf, + 134: 0xaf5fd7, + 135: 0xaf5fff, + 136: 0xaf8700, + 137: 0xaf875f, + 138: 0xaf8787, + 139: 0xaf87af, + 140: 0xaf87d7, + 141: 0xaf87ff, + 142: 0xafaf00, + 143: 0xafaf5f, + 144: 0xafaf87, + 145: 0xafafaf, + 146: 0xafafd7, + 147: 0xafafff, + 148: 0xafd700, + 149: 0xafd75f, + 150: 0xafd787, + 151: 0xafd7af, + 152: 0xafd7d7, + 153: 0xafd7ff, + 154: 0xafff00, + 155: 0xafff5f, + 156: 0xafff87, + 157: 0xafffaf, + 158: 0xafffd7, + 159: 0xafffff, + 160: 0xd70000, + 161: 0xd7005f, + 162: 0xd70087, + 163: 0xd700af, + 164: 0xd700d7, + 165: 0xd700ff, + 166: 0xd75f00, + 167: 0xd75f5f, + 168: 0xd75f87, + 169: 0xd75faf, + 170: 0xd75fd7, + 171: 0xd75fff, + 172: 0xd78700, + 173: 0xd7875f, + 174: 0xd78787, + 175: 0xd787af, + 176: 0xd787d7, + 177: 0xd787ff, + 178: 0xd7af00, + 179: 0xd7af5f, + 180: 0xd7af87, + 181: 0xd7afaf, + 182: 0xd7afd7, + 183: 0xd7afff, + 184: 0xd7d700, + 185: 0xd7d75f, + 186: 0xd7d787, + 187: 0xd7d7af, + 188: 0xd7d7d7, + 189: 0xd7d7ff, + 190: 0xd7ff00, + 191: 0xd7ff5f, + 192: 0xd7ff87, + 193: 0xd7ffaf, + 194: 0xd7ffd7, + 195: 0xd7ffff, + 196: 0xff0000, + 197: 0xff005f, + 198: 0xff0087, + 199: 0xff00af, + 200: 0xff00d7, + 201: 0xff00ff, + 202: 0xff5f00, + 203: 0xff5f5f, + 204: 0xff5f87, + 205: 0xff5faf, + 206: 0xff5fd7, + 207: 0xff5fff, + 208: 0xff8700, + 209: 0xff875f, + 210: 0xff8787, + 211: 0xff87af, + 212: 0xff87d7, + 213: 0xff87ff, + 214: 0xffaf00, + 215: 0xffaf5f, + 216: 0xffaf87, + 217: 0xffafaf, + 218: 0xffafd7, + 219: 0xffafff, + 220: 0xffd700, + 221: 0xffd75f, + 222: 0xffd787, + 223: 0xffd7af, + 224: 0xffd7d7, + 225: 0xffd7ff, + 226: 0xffff00, + 227: 0xffff5f, + 228: 0xffff87, + 229: 0xffffaf, + 230: 0xffffd7, + 231: 0xffffff, + 232: 0x080808, + 233: 0x121212, + 234: 0x1c1c1c, + 235: 0x262626, + 236: 0x303030, + 237: 0x3a3a3a, + 238: 0x444444, + 239: 0x4e4e4e, + 240: 0x585858, + 241: 0x626262, + 242: 0x6c6c6c, + 243: 0x767676, + 244: 0x808080, + 245: 0x8a8a8a, + 246: 0x949494, + 247: 0x9e9e9e, + 248: 0xa8a8a8, + 249: 0xb2b2b2, + 250: 0xbcbcbc, + 251: 0xc6c6c6, + 252: 0xd0d0d0, + 253: 0xdadada, + 254: 0xe4e4e4, + 255: 0xeeeeee, +} + +// `\033]0;TITLESTR\007` +func doTitleSequence(er *bytes.Reader) error { + var c byte + var err error + + c, err = er.ReadByte() + if err != nil { + return err + } + if c != '0' && c != '2' { + return nil + } + c, err = er.ReadByte() + if err != nil { + return err + } + if c != ';' { + return nil + } + title := make([]byte, 0, 80) + for { + c, err = er.ReadByte() + if err != nil { + return err + } + if c == 0x07 || c == '\n' { + break + } + title = append(title, c) + } + if len(title) > 0 { + title8, err := syscall.UTF16PtrFromString(string(title)) + if err == nil { + procSetConsoleTitle.Call(uintptr(unsafe.Pointer(title8))) + } + } + return nil +} + +// returns Atoi(s) unless s == "" in which case it returns def +func atoiWithDefault(s string, def int) (int, error) { + if s == "" { + return def, nil + } + return strconv.Atoi(s) +} + +// Write writes data on console +func (w *Writer) Write(data []byte) (n int, err error) { + w.mutex.Lock() + defer w.mutex.Unlock() + var csbi consoleScreenBufferInfo + procGetConsoleScreenBufferInfo.Call(uintptr(w.handle), uintptr(unsafe.Pointer(&csbi))) + + handle := w.handle + + var er *bytes.Reader + if w.rest.Len() > 0 { + var rest bytes.Buffer + w.rest.WriteTo(&rest) + w.rest.Reset() + rest.Write(data) + er = bytes.NewReader(rest.Bytes()) + } else { + er = bytes.NewReader(data) + } + var bw [1]byte +loop: + for { + c1, err := er.ReadByte() + if err != nil { + break loop + } + if c1 != 0x1b { + bw[0] = c1 + w.out.Write(bw[:]) + continue + } + c2, err := er.ReadByte() + if err != nil { + break loop + } + + switch c2 { + case '>': + continue + case ']': + w.rest.WriteByte(c1) + w.rest.WriteByte(c2) + er.WriteTo(&w.rest) + if bytes.IndexByte(w.rest.Bytes(), 0x07) == -1 { + break loop + } + er = bytes.NewReader(w.rest.Bytes()[2:]) + err := doTitleSequence(er) + if err != nil { + break loop + } + w.rest.Reset() + continue + // https://github.com/mattn/go-colorable/issues/27 + case '7': + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + w.oldpos = csbi.cursorPosition + continue + case '8': + procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&w.oldpos))) + continue + case 0x5b: + // execute part after switch + default: + continue + } + + w.rest.WriteByte(c1) + w.rest.WriteByte(c2) + er.WriteTo(&w.rest) + + var buf bytes.Buffer + var m byte + for i, c := range w.rest.Bytes()[2:] { + if ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || c == '@' { + m = c + er = bytes.NewReader(w.rest.Bytes()[2+i+1:]) + w.rest.Reset() + break + } + buf.Write([]byte(string(c))) + } + if m == 0 { + break loop + } + + switch m { + case 'A': + n, err = atoiWithDefault(buf.String(), 1) + if err != nil { + continue + } + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + csbi.cursorPosition.y -= short(n) + procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition))) + case 'B': + n, err = atoiWithDefault(buf.String(), 1) + if err != nil { + continue + } + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + csbi.cursorPosition.y += short(n) + procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition))) + case 'C': + n, err = atoiWithDefault(buf.String(), 1) + if err != nil { + continue + } + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + csbi.cursorPosition.x += short(n) + procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition))) + case 'D': + n, err = atoiWithDefault(buf.String(), 1) + if err != nil { + continue + } + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + csbi.cursorPosition.x -= short(n) + if csbi.cursorPosition.x < 0 { + csbi.cursorPosition.x = 0 + } + procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition))) + case 'E': + n, err = strconv.Atoi(buf.String()) + if err != nil { + continue + } + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + csbi.cursorPosition.x = 0 + csbi.cursorPosition.y += short(n) + procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition))) + case 'F': + n, err = strconv.Atoi(buf.String()) + if err != nil { + continue + } + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + csbi.cursorPosition.x = 0 + csbi.cursorPosition.y -= short(n) + procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition))) + case 'G': + n, err = strconv.Atoi(buf.String()) + if err != nil { + continue + } + if n < 1 { + n = 1 + } + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + csbi.cursorPosition.x = short(n - 1) + procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition))) + case 'H', 'f': + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + if buf.Len() > 0 { + token := strings.Split(buf.String(), ";") + switch len(token) { + case 1: + n1, err := strconv.Atoi(token[0]) + if err != nil { + continue + } + csbi.cursorPosition.y = short(n1 - 1) + case 2: + n1, err := strconv.Atoi(token[0]) + if err != nil { + continue + } + n2, err := strconv.Atoi(token[1]) + if err != nil { + continue + } + csbi.cursorPosition.x = short(n2 - 1) + csbi.cursorPosition.y = short(n1 - 1) + } + } else { + csbi.cursorPosition.y = 0 + } + procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition))) + case 'J': + n := 0 + if buf.Len() > 0 { + n, err = strconv.Atoi(buf.String()) + if err != nil { + continue + } + } + var count, written dword + var cursor coord + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + switch n { + case 0: + cursor = coord{x: csbi.cursorPosition.x, y: csbi.cursorPosition.y} + count = dword(csbi.size.x) - dword(csbi.cursorPosition.x) + dword(csbi.size.y-csbi.cursorPosition.y)*dword(csbi.size.x) + case 1: + cursor = coord{x: csbi.window.left, y: csbi.window.top} + count = dword(csbi.size.x) - dword(csbi.cursorPosition.x) + dword(csbi.window.top-csbi.cursorPosition.y)*dword(csbi.size.x) + case 2: + cursor = coord{x: csbi.window.left, y: csbi.window.top} + count = dword(csbi.size.x) - dword(csbi.cursorPosition.x) + dword(csbi.size.y-csbi.cursorPosition.y)*dword(csbi.size.x) + } + procFillConsoleOutputCharacter.Call(uintptr(handle), uintptr(' '), uintptr(count), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written))) + procFillConsoleOutputAttribute.Call(uintptr(handle), uintptr(csbi.attributes), uintptr(count), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written))) + case 'K': + n := 0 + if buf.Len() > 0 { + n, err = strconv.Atoi(buf.String()) + if err != nil { + continue + } + } + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + var cursor coord + var count, written dword + switch n { + case 0: + cursor = coord{x: csbi.cursorPosition.x, y: csbi.cursorPosition.y} + count = dword(csbi.size.x - csbi.cursorPosition.x) + case 1: + cursor = coord{x: csbi.window.left, y: csbi.cursorPosition.y} + count = dword(csbi.size.x - csbi.cursorPosition.x) + case 2: + cursor = coord{x: csbi.window.left, y: csbi.cursorPosition.y} + count = dword(csbi.size.x) + } + procFillConsoleOutputCharacter.Call(uintptr(handle), uintptr(' '), uintptr(count), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written))) + procFillConsoleOutputAttribute.Call(uintptr(handle), uintptr(csbi.attributes), uintptr(count), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written))) + case 'X': + n := 0 + if buf.Len() > 0 { + n, err = strconv.Atoi(buf.String()) + if err != nil { + continue + } + } + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + var cursor coord + var written dword + cursor = coord{x: csbi.cursorPosition.x, y: csbi.cursorPosition.y} + procFillConsoleOutputCharacter.Call(uintptr(handle), uintptr(' '), uintptr(n), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written))) + procFillConsoleOutputAttribute.Call(uintptr(handle), uintptr(csbi.attributes), uintptr(n), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written))) + case 'm': + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + attr := csbi.attributes + cs := buf.String() + if cs == "" { + procSetConsoleTextAttribute.Call(uintptr(handle), uintptr(w.oldattr)) + continue + } + token := strings.Split(cs, ";") + for i := 0; i < len(token); i++ { + ns := token[i] + if n, err = strconv.Atoi(ns); err == nil { + switch { + case n == 0 || n == 100: + attr = w.oldattr + case n == 4: + attr |= commonLvbUnderscore + case (1 <= n && n <= 3) || n == 5: + attr |= foregroundIntensity + case n == 7 || n == 27: + attr = + (attr &^ (foregroundMask | backgroundMask)) | + ((attr & foregroundMask) << 4) | + ((attr & backgroundMask) >> 4) + case n == 22: + attr &^= foregroundIntensity + case n == 24: + attr &^= commonLvbUnderscore + case 30 <= n && n <= 37: + attr &= backgroundMask + if (n-30)&1 != 0 { + attr |= foregroundRed + } + if (n-30)&2 != 0 { + attr |= foregroundGreen + } + if (n-30)&4 != 0 { + attr |= foregroundBlue + } + case n == 38: // set foreground color. + if i < len(token)-2 && (token[i+1] == "5" || token[i+1] == "05") { + if n256, err := strconv.Atoi(token[i+2]); err == nil { + if n256foreAttr == nil { + n256setup() + } + attr &= backgroundMask + attr |= n256foreAttr[n256%len(n256foreAttr)] + i += 2 + } + } else if len(token) == 5 && token[i+1] == "2" { + var r, g, b int + r, _ = strconv.Atoi(token[i+2]) + g, _ = strconv.Atoi(token[i+3]) + b, _ = strconv.Atoi(token[i+4]) + i += 4 + if r > 127 { + attr |= foregroundRed + } + if g > 127 { + attr |= foregroundGreen + } + if b > 127 { + attr |= foregroundBlue + } + } else { + attr = attr & (w.oldattr & backgroundMask) + } + case n == 39: // reset foreground color. + attr &= backgroundMask + attr |= w.oldattr & foregroundMask + case 40 <= n && n <= 47: + attr &= foregroundMask + if (n-40)&1 != 0 { + attr |= backgroundRed + } + if (n-40)&2 != 0 { + attr |= backgroundGreen + } + if (n-40)&4 != 0 { + attr |= backgroundBlue + } + case n == 48: // set background color. + if i < len(token)-2 && token[i+1] == "5" { + if n256, err := strconv.Atoi(token[i+2]); err == nil { + if n256backAttr == nil { + n256setup() + } + attr &= foregroundMask + attr |= n256backAttr[n256%len(n256backAttr)] + i += 2 + } + } else if len(token) == 5 && token[i+1] == "2" { + var r, g, b int + r, _ = strconv.Atoi(token[i+2]) + g, _ = strconv.Atoi(token[i+3]) + b, _ = strconv.Atoi(token[i+4]) + i += 4 + if r > 127 { + attr |= backgroundRed + } + if g > 127 { + attr |= backgroundGreen + } + if b > 127 { + attr |= backgroundBlue + } + } else { + attr = attr & (w.oldattr & foregroundMask) + } + case n == 49: // reset foreground color. + attr &= foregroundMask + attr |= w.oldattr & backgroundMask + case 90 <= n && n <= 97: + attr = (attr & backgroundMask) + attr |= foregroundIntensity + if (n-90)&1 != 0 { + attr |= foregroundRed + } + if (n-90)&2 != 0 { + attr |= foregroundGreen + } + if (n-90)&4 != 0 { + attr |= foregroundBlue + } + case 100 <= n && n <= 107: + attr = (attr & foregroundMask) + attr |= backgroundIntensity + if (n-100)&1 != 0 { + attr |= backgroundRed + } + if (n-100)&2 != 0 { + attr |= backgroundGreen + } + if (n-100)&4 != 0 { + attr |= backgroundBlue + } + } + procSetConsoleTextAttribute.Call(uintptr(handle), uintptr(attr)) + } + } + case 'h': + var ci consoleCursorInfo + cs := buf.String() + if cs == "5>" { + procGetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci))) + ci.visible = 0 + procSetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci))) + } else if cs == "?25" { + procGetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci))) + ci.visible = 1 + procSetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci))) + } else if cs == "?1049" { + if w.althandle == 0 { + h, _, _ := procCreateConsoleScreenBuffer.Call(uintptr(genericRead|genericWrite), 0, 0, uintptr(consoleTextmodeBuffer), 0, 0) + w.althandle = syscall.Handle(h) + if w.althandle != 0 { + handle = w.althandle + } + } + } + case 'l': + var ci consoleCursorInfo + cs := buf.String() + if cs == "5>" { + procGetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci))) + ci.visible = 1 + procSetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci))) + } else if cs == "?25" { + procGetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci))) + ci.visible = 0 + procSetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci))) + } else if cs == "?1049" { + if w.althandle != 0 { + syscall.CloseHandle(w.althandle) + w.althandle = 0 + handle = w.handle + } + } + case 's': + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + w.oldpos = csbi.cursorPosition + case 'u': + procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&w.oldpos))) + } + } + + return len(data), nil +} + +type consoleColor struct { + rgb int + red bool + green bool + blue bool + intensity bool +} + +func (c consoleColor) foregroundAttr() (attr word) { + if c.red { + attr |= foregroundRed + } + if c.green { + attr |= foregroundGreen + } + if c.blue { + attr |= foregroundBlue + } + if c.intensity { + attr |= foregroundIntensity + } + return +} + +func (c consoleColor) backgroundAttr() (attr word) { + if c.red { + attr |= backgroundRed + } + if c.green { + attr |= backgroundGreen + } + if c.blue { + attr |= backgroundBlue + } + if c.intensity { + attr |= backgroundIntensity + } + return +} + +var color16 = []consoleColor{ + {0x000000, false, false, false, false}, + {0x000080, false, false, true, false}, + {0x008000, false, true, false, false}, + {0x008080, false, true, true, false}, + {0x800000, true, false, false, false}, + {0x800080, true, false, true, false}, + {0x808000, true, true, false, false}, + {0xc0c0c0, true, true, true, false}, + {0x808080, false, false, false, true}, + {0x0000ff, false, false, true, true}, + {0x00ff00, false, true, false, true}, + {0x00ffff, false, true, true, true}, + {0xff0000, true, false, false, true}, + {0xff00ff, true, false, true, true}, + {0xffff00, true, true, false, true}, + {0xffffff, true, true, true, true}, +} + +type hsv struct { + h, s, v float32 +} + +func (a hsv) dist(b hsv) float32 { + dh := a.h - b.h + switch { + case dh > 0.5: + dh = 1 - dh + case dh < -0.5: + dh = -1 - dh + } + ds := a.s - b.s + dv := a.v - b.v + return float32(math.Sqrt(float64(dh*dh + ds*ds + dv*dv))) +} + +func toHSV(rgb int) hsv { + r, g, b := float32((rgb&0xFF0000)>>16)/256.0, + float32((rgb&0x00FF00)>>8)/256.0, + float32(rgb&0x0000FF)/256.0 + min, max := minmax3f(r, g, b) + h := max - min + if h > 0 { + if max == r { + h = (g - b) / h + if h < 0 { + h += 6 + } + } else if max == g { + h = 2 + (b-r)/h + } else { + h = 4 + (r-g)/h + } + } + h /= 6.0 + s := max - min + if max != 0 { + s /= max + } + v := max + return hsv{h: h, s: s, v: v} +} + +type hsvTable []hsv + +func toHSVTable(rgbTable []consoleColor) hsvTable { + t := make(hsvTable, len(rgbTable)) + for i, c := range rgbTable { + t[i] = toHSV(c.rgb) + } + return t +} + +func (t hsvTable) find(rgb int) consoleColor { + hsv := toHSV(rgb) + n := 7 + l := float32(5.0) + for i, p := range t { + d := hsv.dist(p) + if d < l { + l, n = d, i + } + } + return color16[n] +} + +func minmax3f(a, b, c float32) (min, max float32) { + if a < b { + if b < c { + return a, c + } else if a < c { + return a, b + } else { + return c, b + } + } else { + if a < c { + return b, c + } else if b < c { + return b, a + } else { + return c, a + } + } +} + +var n256foreAttr []word +var n256backAttr []word + +func n256setup() { + n256foreAttr = make([]word, 256) + n256backAttr = make([]word, 256) + t := toHSVTable(color16) + for i, rgb := range color256 { + c := t.find(rgb) + n256foreAttr[i] = c.foregroundAttr() + n256backAttr[i] = c.backgroundAttr() + } +} + +// EnableColorsStdout enable colors if possible. +func EnableColorsStdout(enabled *bool) func() { + var mode uint32 + h := os.Stdout.Fd() + if r, _, _ := procGetConsoleMode.Call(h, uintptr(unsafe.Pointer(&mode))); r != 0 { + if r, _, _ = procSetConsoleMode.Call(h, uintptr(mode|cENABLE_VIRTUAL_TERMINAL_PROCESSING)); r != 0 { + if enabled != nil { + *enabled = true + } + return func() { + procSetConsoleMode.Call(h, uintptr(mode)) + } + } + } + if enabled != nil { + *enabled = true + } + return func() {} +} diff --git a/vendor/github.com/mattn/go-colorable/go.test.sh b/vendor/github.com/mattn/go-colorable/go.test.sh new file mode 100644 index 00000000..012162b0 --- /dev/null +++ b/vendor/github.com/mattn/go-colorable/go.test.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash + +set -e +echo "" > coverage.txt + +for d in $(go list ./... | grep -v vendor); do + go test -race -coverprofile=profile.out -covermode=atomic "$d" + if [ -f profile.out ]; then + cat profile.out >> coverage.txt + rm profile.out + fi +done diff --git a/vendor/github.com/mattn/go-colorable/noncolorable.go b/vendor/github.com/mattn/go-colorable/noncolorable.go new file mode 100644 index 00000000..95f2c6be --- /dev/null +++ b/vendor/github.com/mattn/go-colorable/noncolorable.go @@ -0,0 +1,55 @@ +package colorable + +import ( + "bytes" + "io" +) + +// NonColorable holds writer but removes escape sequence. +type NonColorable struct { + out io.Writer +} + +// NewNonColorable returns new instance of Writer which removes escape sequence from Writer. +func NewNonColorable(w io.Writer) io.Writer { + return &NonColorable{out: w} +} + +// Write writes data on console +func (w *NonColorable) Write(data []byte) (n int, err error) { + er := bytes.NewReader(data) + var bw [1]byte +loop: + for { + c1, err := er.ReadByte() + if err != nil { + break loop + } + if c1 != 0x1b { + bw[0] = c1 + w.out.Write(bw[:]) + continue + } + c2, err := er.ReadByte() + if err != nil { + break loop + } + if c2 != 0x5b { + continue + } + + var buf bytes.Buffer + for { + c, err := er.ReadByte() + if err != nil { + break loop + } + if ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || c == '@' { + break + } + buf.Write([]byte(string(c))) + } + } + + return len(data), nil +} diff --git a/vendor/github.com/mattn/go-isatty/.travis.yml b/vendor/github.com/mattn/go-isatty/.travis.yml new file mode 100644 index 00000000..604314dd --- /dev/null +++ b/vendor/github.com/mattn/go-isatty/.travis.yml @@ -0,0 +1,14 @@ +language: go +sudo: false +go: + - 1.13.x + - tip + +before_install: + - go get -t -v ./... + +script: + - ./go.test.sh + +after_success: + - bash <(curl -s https://codecov.io/bash) diff --git a/vendor/github.com/mattn/go-isatty/LICENSE b/vendor/github.com/mattn/go-isatty/LICENSE new file mode 100644 index 00000000..65dc692b --- /dev/null +++ b/vendor/github.com/mattn/go-isatty/LICENSE @@ -0,0 +1,9 @@ +Copyright (c) Yasuhiro MATSUMOTO + +MIT License (Expat) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/mattn/go-isatty/README.md b/vendor/github.com/mattn/go-isatty/README.md new file mode 100644 index 00000000..38418353 --- /dev/null +++ b/vendor/github.com/mattn/go-isatty/README.md @@ -0,0 +1,50 @@ +# go-isatty + +[![Godoc Reference](https://godoc.org/github.com/mattn/go-isatty?status.svg)](http://godoc.org/github.com/mattn/go-isatty) +[![Codecov](https://codecov.io/gh/mattn/go-isatty/branch/master/graph/badge.svg)](https://codecov.io/gh/mattn/go-isatty) +[![Coverage Status](https://coveralls.io/repos/github/mattn/go-isatty/badge.svg?branch=master)](https://coveralls.io/github/mattn/go-isatty?branch=master) +[![Go Report Card](https://goreportcard.com/badge/mattn/go-isatty)](https://goreportcard.com/report/mattn/go-isatty) + +isatty for golang + +## Usage + +```go +package main + +import ( + "fmt" + "github.com/mattn/go-isatty" + "os" +) + +func main() { + if isatty.IsTerminal(os.Stdout.Fd()) { + fmt.Println("Is Terminal") + } else if isatty.IsCygwinTerminal(os.Stdout.Fd()) { + fmt.Println("Is Cygwin/MSYS2 Terminal") + } else { + fmt.Println("Is Not Terminal") + } +} +``` + +## Installation + +``` +$ go get github.com/mattn/go-isatty +``` + +## License + +MIT + +## Author + +Yasuhiro Matsumoto (a.k.a mattn) + +## Thanks + +* k-takata: base idea for IsCygwinTerminal + + https://github.com/k-takata/go-iscygpty diff --git a/vendor/github.com/mattn/go-isatty/doc.go b/vendor/github.com/mattn/go-isatty/doc.go new file mode 100644 index 00000000..17d4f90e --- /dev/null +++ b/vendor/github.com/mattn/go-isatty/doc.go @@ -0,0 +1,2 @@ +// Package isatty implements interface to isatty +package isatty diff --git a/vendor/github.com/mattn/go-isatty/go.test.sh b/vendor/github.com/mattn/go-isatty/go.test.sh new file mode 100644 index 00000000..012162b0 --- /dev/null +++ b/vendor/github.com/mattn/go-isatty/go.test.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash + +set -e +echo "" > coverage.txt + +for d in $(go list ./... | grep -v vendor); do + go test -race -coverprofile=profile.out -covermode=atomic "$d" + if [ -f profile.out ]; then + cat profile.out >> coverage.txt + rm profile.out + fi +done diff --git a/vendor/github.com/mattn/go-isatty/isatty_bsd.go b/vendor/github.com/mattn/go-isatty/isatty_bsd.go new file mode 100644 index 00000000..711f2880 --- /dev/null +++ b/vendor/github.com/mattn/go-isatty/isatty_bsd.go @@ -0,0 +1,18 @@ +// +build darwin freebsd openbsd netbsd dragonfly +// +build !appengine + +package isatty + +import "golang.org/x/sys/unix" + +// IsTerminal return true if the file descriptor is terminal. +func IsTerminal(fd uintptr) bool { + _, err := unix.IoctlGetTermios(int(fd), unix.TIOCGETA) + return err == nil +} + +// IsCygwinTerminal return true if the file descriptor is a cygwin or msys2 +// terminal. This is also always false on this environment. +func IsCygwinTerminal(fd uintptr) bool { + return false +} diff --git a/vendor/github.com/mattn/go-isatty/isatty_others.go b/vendor/github.com/mattn/go-isatty/isatty_others.go new file mode 100644 index 00000000..ff714a37 --- /dev/null +++ b/vendor/github.com/mattn/go-isatty/isatty_others.go @@ -0,0 +1,15 @@ +// +build appengine js nacl + +package isatty + +// IsTerminal returns true if the file descriptor is terminal which +// is always false on js and appengine classic which is a sandboxed PaaS. +func IsTerminal(fd uintptr) bool { + return false +} + +// IsCygwinTerminal() return true if the file descriptor is a cygwin or msys2 +// terminal. This is also always false on this environment. +func IsCygwinTerminal(fd uintptr) bool { + return false +} diff --git a/vendor/github.com/mattn/go-isatty/isatty_plan9.go b/vendor/github.com/mattn/go-isatty/isatty_plan9.go new file mode 100644 index 00000000..c5b6e0c0 --- /dev/null +++ b/vendor/github.com/mattn/go-isatty/isatty_plan9.go @@ -0,0 +1,22 @@ +// +build plan9 + +package isatty + +import ( + "syscall" +) + +// IsTerminal returns true if the given file descriptor is a terminal. +func IsTerminal(fd uintptr) bool { + path, err := syscall.Fd2path(int(fd)) + if err != nil { + return false + } + return path == "/dev/cons" || path == "/mnt/term/dev/cons" +} + +// IsCygwinTerminal return true if the file descriptor is a cygwin or msys2 +// terminal. This is also always false on this environment. +func IsCygwinTerminal(fd uintptr) bool { + return false +} diff --git a/vendor/github.com/mattn/go-isatty/isatty_solaris.go b/vendor/github.com/mattn/go-isatty/isatty_solaris.go new file mode 100644 index 00000000..bdd5c79a --- /dev/null +++ b/vendor/github.com/mattn/go-isatty/isatty_solaris.go @@ -0,0 +1,22 @@ +// +build solaris +// +build !appengine + +package isatty + +import ( + "golang.org/x/sys/unix" +) + +// IsTerminal returns true if the given file descriptor is a terminal. +// see: http://src.illumos.org/source/xref/illumos-gate/usr/src/lib/libbc/libc/gen/common/isatty.c +func IsTerminal(fd uintptr) bool { + var termio unix.Termio + err := unix.IoctlSetTermio(int(fd), unix.TCGETA, &termio) + return err == nil +} + +// IsCygwinTerminal return true if the file descriptor is a cygwin or msys2 +// terminal. This is also always false on this environment. +func IsCygwinTerminal(fd uintptr) bool { + return false +} diff --git a/vendor/github.com/mattn/go-isatty/isatty_tcgets.go b/vendor/github.com/mattn/go-isatty/isatty_tcgets.go new file mode 100644 index 00000000..31a1ca97 --- /dev/null +++ b/vendor/github.com/mattn/go-isatty/isatty_tcgets.go @@ -0,0 +1,18 @@ +// +build linux aix +// +build !appengine + +package isatty + +import "golang.org/x/sys/unix" + +// IsTerminal return true if the file descriptor is terminal. +func IsTerminal(fd uintptr) bool { + _, err := unix.IoctlGetTermios(int(fd), unix.TCGETS) + return err == nil +} + +// IsCygwinTerminal return true if the file descriptor is a cygwin or msys2 +// terminal. This is also always false on this environment. +func IsCygwinTerminal(fd uintptr) bool { + return false +} diff --git a/vendor/github.com/mattn/go-isatty/isatty_windows.go b/vendor/github.com/mattn/go-isatty/isatty_windows.go new file mode 100644 index 00000000..1fa86915 --- /dev/null +++ b/vendor/github.com/mattn/go-isatty/isatty_windows.go @@ -0,0 +1,125 @@ +// +build windows +// +build !appengine + +package isatty + +import ( + "errors" + "strings" + "syscall" + "unicode/utf16" + "unsafe" +) + +const ( + objectNameInfo uintptr = 1 + fileNameInfo = 2 + fileTypePipe = 3 +) + +var ( + kernel32 = syscall.NewLazyDLL("kernel32.dll") + ntdll = syscall.NewLazyDLL("ntdll.dll") + procGetConsoleMode = kernel32.NewProc("GetConsoleMode") + procGetFileInformationByHandleEx = kernel32.NewProc("GetFileInformationByHandleEx") + procGetFileType = kernel32.NewProc("GetFileType") + procNtQueryObject = ntdll.NewProc("NtQueryObject") +) + +func init() { + // Check if GetFileInformationByHandleEx is available. + if procGetFileInformationByHandleEx.Find() != nil { + procGetFileInformationByHandleEx = nil + } +} + +// IsTerminal return true if the file descriptor is terminal. +func IsTerminal(fd uintptr) bool { + var st uint32 + r, _, e := syscall.Syscall(procGetConsoleMode.Addr(), 2, fd, uintptr(unsafe.Pointer(&st)), 0) + return r != 0 && e == 0 +} + +// Check pipe name is used for cygwin/msys2 pty. +// Cygwin/MSYS2 PTY has a name like: +// \{cygwin,msys}-XXXXXXXXXXXXXXXX-ptyN-{from,to}-master +func isCygwinPipeName(name string) bool { + token := strings.Split(name, "-") + if len(token) < 5 { + return false + } + + if token[0] != `\msys` && + token[0] != `\cygwin` && + token[0] != `\Device\NamedPipe\msys` && + token[0] != `\Device\NamedPipe\cygwin` { + return false + } + + if token[1] == "" { + return false + } + + if !strings.HasPrefix(token[2], "pty") { + return false + } + + if token[3] != `from` && token[3] != `to` { + return false + } + + if token[4] != "master" { + return false + } + + return true +} + +// getFileNameByHandle use the undocomented ntdll NtQueryObject to get file full name from file handler +// since GetFileInformationByHandleEx is not avilable under windows Vista and still some old fashion +// guys are using Windows XP, this is a workaround for those guys, it will also work on system from +// Windows vista to 10 +// see https://stackoverflow.com/a/18792477 for details +func getFileNameByHandle(fd uintptr) (string, error) { + if procNtQueryObject == nil { + return "", errors.New("ntdll.dll: NtQueryObject not supported") + } + + var buf [4 + syscall.MAX_PATH]uint16 + var result int + r, _, e := syscall.Syscall6(procNtQueryObject.Addr(), 5, + fd, objectNameInfo, uintptr(unsafe.Pointer(&buf)), uintptr(2*len(buf)), uintptr(unsafe.Pointer(&result)), 0) + if r != 0 { + return "", e + } + return string(utf16.Decode(buf[4 : 4+buf[0]/2])), nil +} + +// IsCygwinTerminal() return true if the file descriptor is a cygwin or msys2 +// terminal. +func IsCygwinTerminal(fd uintptr) bool { + if procGetFileInformationByHandleEx == nil { + name, err := getFileNameByHandle(fd) + if err != nil { + return false + } + return isCygwinPipeName(name) + } + + // Cygwin/msys's pty is a pipe. + ft, _, e := syscall.Syscall(procGetFileType.Addr(), 1, fd, 0, 0) + if ft != fileTypePipe || e != 0 { + return false + } + + var buf [2 + syscall.MAX_PATH]uint16 + r, _, e := syscall.Syscall6(procGetFileInformationByHandleEx.Addr(), + 4, fd, fileNameInfo, uintptr(unsafe.Pointer(&buf)), + uintptr(len(buf)*2), 0, 0) + if r == 0 || e != 0 { + return false + } + + l := *(*uint32)(unsafe.Pointer(&buf)) + return isCygwinPipeName(string(utf16.Decode(buf[2 : 2+l/2]))) +} diff --git a/vendor/github.com/mattn/go-isatty/renovate.json b/vendor/github.com/mattn/go-isatty/renovate.json new file mode 100644 index 00000000..5ae9d96b --- /dev/null +++ b/vendor/github.com/mattn/go-isatty/renovate.json @@ -0,0 +1,8 @@ +{ + "extends": [ + "config:base" + ], + "postUpdateOptions": [ + "gomodTidy" + ] +} diff --git a/vendor/golang.org/x/xerrors/LICENSE b/vendor/golang.org/x/xerrors/LICENSE new file mode 100644 index 00000000..e4a47e17 --- /dev/null +++ b/vendor/golang.org/x/xerrors/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2019 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/golang.org/x/xerrors/PATENTS b/vendor/golang.org/x/xerrors/PATENTS new file mode 100644 index 00000000..73309904 --- /dev/null +++ b/vendor/golang.org/x/xerrors/PATENTS @@ -0,0 +1,22 @@ +Additional IP Rights Grant (Patents) + +"This implementation" means the copyrightable works distributed by +Google as part of the Go project. + +Google hereby grants to You a perpetual, worldwide, non-exclusive, +no-charge, royalty-free, irrevocable (except as stated in this section) +patent license to make, have made, use, offer to sell, sell, import, +transfer and otherwise run, modify and propagate the contents of this +implementation of Go, where such license applies only to those patent +claims, both currently owned or controlled by Google and acquired in +the future, licensable by Google that are necessarily infringed by this +implementation of Go. This grant does not include claims that would be +infringed only as a consequence of further modification of this +implementation. If you or your agent or exclusive licensee institute or +order or agree to the institution of patent litigation against any +entity (including a cross-claim or counterclaim in a lawsuit) alleging +that this implementation of Go or any code incorporated within this +implementation of Go constitutes direct or contributory patent +infringement, or inducement of patent infringement, then any patent +rights granted to you under this License for this implementation of Go +shall terminate as of the date such litigation is filed. diff --git a/vendor/golang.org/x/xerrors/README b/vendor/golang.org/x/xerrors/README new file mode 100644 index 00000000..aac7867a --- /dev/null +++ b/vendor/golang.org/x/xerrors/README @@ -0,0 +1,2 @@ +This repository holds the transition packages for the new Go 1.13 error values. +See golang.org/design/29934-error-values. diff --git a/vendor/golang.org/x/xerrors/adaptor.go b/vendor/golang.org/x/xerrors/adaptor.go new file mode 100644 index 00000000..4317f248 --- /dev/null +++ b/vendor/golang.org/x/xerrors/adaptor.go @@ -0,0 +1,193 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package xerrors + +import ( + "bytes" + "fmt" + "io" + "reflect" + "strconv" +) + +// FormatError calls the FormatError method of f with an errors.Printer +// configured according to s and verb, and writes the result to s. +func FormatError(f Formatter, s fmt.State, verb rune) { + // Assuming this function is only called from the Format method, and given + // that FormatError takes precedence over Format, it cannot be called from + // any package that supports errors.Formatter. It is therefore safe to + // disregard that State may be a specific printer implementation and use one + // of our choice instead. + + // limitations: does not support printing error as Go struct. + + var ( + sep = " " // separator before next error + p = &state{State: s} + direct = true + ) + + var err error = f + + switch verb { + // Note that this switch must match the preference order + // for ordinary string printing (%#v before %+v, and so on). + + case 'v': + if s.Flag('#') { + if stringer, ok := err.(fmt.GoStringer); ok { + io.WriteString(&p.buf, stringer.GoString()) + goto exit + } + // proceed as if it were %v + } else if s.Flag('+') { + p.printDetail = true + sep = "\n - " + } + case 's': + case 'q', 'x', 'X': + // Use an intermediate buffer in the rare cases that precision, + // truncation, or one of the alternative verbs (q, x, and X) are + // specified. + direct = false + + default: + p.buf.WriteString("%!") + p.buf.WriteRune(verb) + p.buf.WriteByte('(') + switch { + case err != nil: + p.buf.WriteString(reflect.TypeOf(f).String()) + default: + p.buf.WriteString("") + } + p.buf.WriteByte(')') + io.Copy(s, &p.buf) + return + } + +loop: + for { + switch v := err.(type) { + case Formatter: + err = v.FormatError((*printer)(p)) + case fmt.Formatter: + v.Format(p, 'v') + break loop + default: + io.WriteString(&p.buf, v.Error()) + break loop + } + if err == nil { + break + } + if p.needColon || !p.printDetail { + p.buf.WriteByte(':') + p.needColon = false + } + p.buf.WriteString(sep) + p.inDetail = false + p.needNewline = false + } + +exit: + width, okW := s.Width() + prec, okP := s.Precision() + + if !direct || (okW && width > 0) || okP { + // Construct format string from State s. + format := []byte{'%'} + if s.Flag('-') { + format = append(format, '-') + } + if s.Flag('+') { + format = append(format, '+') + } + if s.Flag(' ') { + format = append(format, ' ') + } + if okW { + format = strconv.AppendInt(format, int64(width), 10) + } + if okP { + format = append(format, '.') + format = strconv.AppendInt(format, int64(prec), 10) + } + format = append(format, string(verb)...) + fmt.Fprintf(s, string(format), p.buf.String()) + } else { + io.Copy(s, &p.buf) + } +} + +var detailSep = []byte("\n ") + +// state tracks error printing state. It implements fmt.State. +type state struct { + fmt.State + buf bytes.Buffer + + printDetail bool + inDetail bool + needColon bool + needNewline bool +} + +func (s *state) Write(b []byte) (n int, err error) { + if s.printDetail { + if len(b) == 0 { + return 0, nil + } + if s.inDetail && s.needColon { + s.needNewline = true + if b[0] == '\n' { + b = b[1:] + } + } + k := 0 + for i, c := range b { + if s.needNewline { + if s.inDetail && s.needColon { + s.buf.WriteByte(':') + s.needColon = false + } + s.buf.Write(detailSep) + s.needNewline = false + } + if c == '\n' { + s.buf.Write(b[k:i]) + k = i + 1 + s.needNewline = true + } + } + s.buf.Write(b[k:]) + if !s.inDetail { + s.needColon = true + } + } else if !s.inDetail { + s.buf.Write(b) + } + return len(b), nil +} + +// printer wraps a state to implement an xerrors.Printer. +type printer state + +func (s *printer) Print(args ...interface{}) { + if !s.inDetail || s.printDetail { + fmt.Fprint((*state)(s), args...) + } +} + +func (s *printer) Printf(format string, args ...interface{}) { + if !s.inDetail || s.printDetail { + fmt.Fprintf((*state)(s), format, args...) + } +} + +func (s *printer) Detail() bool { + s.inDetail = true + return s.printDetail +} diff --git a/vendor/golang.org/x/xerrors/codereview.cfg b/vendor/golang.org/x/xerrors/codereview.cfg new file mode 100644 index 00000000..3f8b14b6 --- /dev/null +++ b/vendor/golang.org/x/xerrors/codereview.cfg @@ -0,0 +1 @@ +issuerepo: golang/go diff --git a/vendor/golang.org/x/xerrors/doc.go b/vendor/golang.org/x/xerrors/doc.go new file mode 100644 index 00000000..2ef99f5a --- /dev/null +++ b/vendor/golang.org/x/xerrors/doc.go @@ -0,0 +1,23 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package xerrors implements functions to manipulate errors. +// +// This package is based on the Go 2 proposal for error values: +// +// https://golang.org/design/29934-error-values +// +// These functions were incorporated into the standard library's errors package +// in Go 1.13: +// - Is +// - As +// - Unwrap +// +// Also, Errorf's %w verb was incorporated into fmt.Errorf. +// +// Use this package to get equivalent behavior in all supported Go versions. +// +// No other features of this package were included in Go 1.13, and at present +// there are no plans to include any of them. +package xerrors // import "golang.org/x/xerrors" diff --git a/vendor/golang.org/x/xerrors/errors.go b/vendor/golang.org/x/xerrors/errors.go new file mode 100644 index 00000000..e88d3772 --- /dev/null +++ b/vendor/golang.org/x/xerrors/errors.go @@ -0,0 +1,33 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package xerrors + +import "fmt" + +// errorString is a trivial implementation of error. +type errorString struct { + s string + frame Frame +} + +// New returns an error that formats as the given text. +// +// The returned error contains a Frame set to the caller's location and +// implements Formatter to show this information when printed with details. +func New(text string) error { + return &errorString{text, Caller(1)} +} + +func (e *errorString) Error() string { + return e.s +} + +func (e *errorString) Format(s fmt.State, v rune) { FormatError(e, s, v) } + +func (e *errorString) FormatError(p Printer) (next error) { + p.Print(e.s) + e.frame.Format(p) + return nil +} diff --git a/vendor/golang.org/x/xerrors/fmt.go b/vendor/golang.org/x/xerrors/fmt.go new file mode 100644 index 00000000..27a5d70b --- /dev/null +++ b/vendor/golang.org/x/xerrors/fmt.go @@ -0,0 +1,190 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package xerrors + +import ( + "fmt" + "strings" + "unicode" + "unicode/utf8" + + "golang.org/x/xerrors/internal" +) + +const percentBangString = "%!" + +// Errorf formats according to a format specifier and returns the string as a +// value that satisfies error. +// +// The returned error includes the file and line number of the caller when +// formatted with additional detail enabled. If the last argument is an error +// the returned error's Format method will return it if the format string ends +// with ": %s", ": %v", or ": %w". If the last argument is an error and the +// format string ends with ": %w", the returned error implements an Unwrap +// method returning it. +// +// If the format specifier includes a %w verb with an error operand in a +// position other than at the end, the returned error will still implement an +// Unwrap method returning the operand, but the error's Format method will not +// return the wrapped error. +// +// It is invalid to include more than one %w verb or to supply it with an +// operand that does not implement the error interface. The %w verb is otherwise +// a synonym for %v. +// +// Note that as of Go 1.13, the fmt.Errorf function will do error formatting, +// but it will not capture a stack backtrace. +func Errorf(format string, a ...interface{}) error { + format = formatPlusW(format) + // Support a ": %[wsv]" suffix, which works well with xerrors.Formatter. + wrap := strings.HasSuffix(format, ": %w") + idx, format2, ok := parsePercentW(format) + percentWElsewhere := !wrap && idx >= 0 + if !percentWElsewhere && (wrap || strings.HasSuffix(format, ": %s") || strings.HasSuffix(format, ": %v")) { + err := errorAt(a, len(a)-1) + if err == nil { + return &noWrapError{fmt.Sprintf(format, a...), nil, Caller(1)} + } + // TODO: this is not entirely correct. The error value could be + // printed elsewhere in format if it mixes numbered with unnumbered + // substitutions. With relatively small changes to doPrintf we can + // have it optionally ignore extra arguments and pass the argument + // list in its entirety. + msg := fmt.Sprintf(format[:len(format)-len(": %s")], a[:len(a)-1]...) + frame := Frame{} + if internal.EnableTrace { + frame = Caller(1) + } + if wrap { + return &wrapError{msg, err, frame} + } + return &noWrapError{msg, err, frame} + } + // Support %w anywhere. + // TODO: don't repeat the wrapped error's message when %w occurs in the middle. + msg := fmt.Sprintf(format2, a...) + if idx < 0 { + return &noWrapError{msg, nil, Caller(1)} + } + err := errorAt(a, idx) + if !ok || err == nil { + // Too many %ws or argument of %w is not an error. Approximate the Go + // 1.13 fmt.Errorf message. + return &noWrapError{fmt.Sprintf("%sw(%s)", percentBangString, msg), nil, Caller(1)} + } + frame := Frame{} + if internal.EnableTrace { + frame = Caller(1) + } + return &wrapError{msg, err, frame} +} + +func errorAt(args []interface{}, i int) error { + if i < 0 || i >= len(args) { + return nil + } + err, ok := args[i].(error) + if !ok { + return nil + } + return err +} + +// formatPlusW is used to avoid the vet check that will barf at %w. +func formatPlusW(s string) string { + return s +} + +// Return the index of the only %w in format, or -1 if none. +// Also return a rewritten format string with %w replaced by %v, and +// false if there is more than one %w. +// TODO: handle "%[N]w". +func parsePercentW(format string) (idx int, newFormat string, ok bool) { + // Loosely copied from golang.org/x/tools/go/analysis/passes/printf/printf.go. + idx = -1 + ok = true + n := 0 + sz := 0 + var isW bool + for i := 0; i < len(format); i += sz { + if format[i] != '%' { + sz = 1 + continue + } + // "%%" is not a format directive. + if i+1 < len(format) && format[i+1] == '%' { + sz = 2 + continue + } + sz, isW = parsePrintfVerb(format[i:]) + if isW { + if idx >= 0 { + ok = false + } else { + idx = n + } + // "Replace" the last character, the 'w', with a 'v'. + p := i + sz - 1 + format = format[:p] + "v" + format[p+1:] + } + n++ + } + return idx, format, ok +} + +// Parse the printf verb starting with a % at s[0]. +// Return how many bytes it occupies and whether the verb is 'w'. +func parsePrintfVerb(s string) (int, bool) { + // Assume only that the directive is a sequence of non-letters followed by a single letter. + sz := 0 + var r rune + for i := 1; i < len(s); i += sz { + r, sz = utf8.DecodeRuneInString(s[i:]) + if unicode.IsLetter(r) { + return i + sz, r == 'w' + } + } + return len(s), false +} + +type noWrapError struct { + msg string + err error + frame Frame +} + +func (e *noWrapError) Error() string { + return fmt.Sprint(e) +} + +func (e *noWrapError) Format(s fmt.State, v rune) { FormatError(e, s, v) } + +func (e *noWrapError) FormatError(p Printer) (next error) { + p.Print(e.msg) + e.frame.Format(p) + return e.err +} + +type wrapError struct { + msg string + err error + frame Frame +} + +func (e *wrapError) Error() string { + return fmt.Sprint(e) +} + +func (e *wrapError) Format(s fmt.State, v rune) { FormatError(e, s, v) } + +func (e *wrapError) FormatError(p Printer) (next error) { + p.Print(e.msg) + e.frame.Format(p) + return e.err +} + +func (e *wrapError) Unwrap() error { + return e.err +} diff --git a/vendor/golang.org/x/xerrors/format.go b/vendor/golang.org/x/xerrors/format.go new file mode 100644 index 00000000..1bc9c26b --- /dev/null +++ b/vendor/golang.org/x/xerrors/format.go @@ -0,0 +1,34 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package xerrors + +// A Formatter formats error messages. +type Formatter interface { + error + + // FormatError prints the receiver's first error and returns the next error in + // the error chain, if any. + FormatError(p Printer) (next error) +} + +// A Printer formats error messages. +// +// The most common implementation of Printer is the one provided by package fmt +// during Printf (as of Go 1.13). Localization packages such as golang.org/x/text/message +// typically provide their own implementations. +type Printer interface { + // Print appends args to the message output. + Print(args ...interface{}) + + // Printf writes a formatted string. + Printf(format string, args ...interface{}) + + // Detail reports whether error detail is requested. + // After the first call to Detail, all text written to the Printer + // is formatted as additional detail, or ignored when + // detail has not been requested. + // If Detail returns false, the caller can avoid printing the detail at all. + Detail() bool +} diff --git a/vendor/golang.org/x/xerrors/frame.go b/vendor/golang.org/x/xerrors/frame.go new file mode 100644 index 00000000..0de628ec --- /dev/null +++ b/vendor/golang.org/x/xerrors/frame.go @@ -0,0 +1,56 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package xerrors + +import ( + "runtime" +) + +// A Frame contains part of a call stack. +type Frame struct { + // Make room for three PCs: the one we were asked for, what it called, + // and possibly a PC for skipPleaseUseCallersFrames. See: + // https://go.googlesource.com/go/+/032678e0fb/src/runtime/extern.go#169 + frames [3]uintptr +} + +// Caller returns a Frame that describes a frame on the caller's stack. +// The argument skip is the number of frames to skip over. +// Caller(0) returns the frame for the caller of Caller. +func Caller(skip int) Frame { + var s Frame + runtime.Callers(skip+1, s.frames[:]) + return s +} + +// location reports the file, line, and function of a frame. +// +// The returned function may be "" even if file and line are not. +func (f Frame) location() (function, file string, line int) { + frames := runtime.CallersFrames(f.frames[:]) + if _, ok := frames.Next(); !ok { + return "", "", 0 + } + fr, ok := frames.Next() + if !ok { + return "", "", 0 + } + return fr.Function, fr.File, fr.Line +} + +// Format prints the stack as error detail. +// It should be called from an error's Format implementation +// after printing any other error detail. +func (f Frame) Format(p Printer) { + if p.Detail() { + function, file, line := f.location() + if function != "" { + p.Printf("%s\n ", function) + } + if file != "" { + p.Printf("%s:%d\n", file, line) + } + } +} diff --git a/vendor/golang.org/x/xerrors/internal/internal.go b/vendor/golang.org/x/xerrors/internal/internal.go new file mode 100644 index 00000000..89f4eca5 --- /dev/null +++ b/vendor/golang.org/x/xerrors/internal/internal.go @@ -0,0 +1,8 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package internal + +// EnableTrace indicates whether stack information should be recorded in errors. +var EnableTrace = true diff --git a/vendor/golang.org/x/xerrors/wrap.go b/vendor/golang.org/x/xerrors/wrap.go new file mode 100644 index 00000000..9842758c --- /dev/null +++ b/vendor/golang.org/x/xerrors/wrap.go @@ -0,0 +1,112 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package xerrors + +import ( + "reflect" +) + +// A Wrapper provides context around another error. +type Wrapper interface { + // Unwrap returns the next error in the error chain. + // If there is no next error, Unwrap returns nil. + Unwrap() error +} + +// Opaque returns an error with the same error formatting as err +// but that does not match err and cannot be unwrapped. +func Opaque(err error) error { + return noWrapper{err} +} + +type noWrapper struct { + error +} + +func (e noWrapper) FormatError(p Printer) (next error) { + if f, ok := e.error.(Formatter); ok { + return f.FormatError(p) + } + p.Print(e.error) + return nil +} + +// Unwrap returns the result of calling the Unwrap method on err, if err implements +// Unwrap. Otherwise, Unwrap returns nil. +// +// Deprecated: As of Go 1.13, use errors.Unwrap instead. +func Unwrap(err error) error { + u, ok := err.(Wrapper) + if !ok { + return nil + } + return u.Unwrap() +} + +// Is reports whether any error in err's chain matches target. +// +// An error is considered to match a target if it is equal to that target or if +// it implements a method Is(error) bool such that Is(target) returns true. +// +// Deprecated: As of Go 1.13, use errors.Is instead. +func Is(err, target error) bool { + if target == nil { + return err == target + } + + isComparable := reflect.TypeOf(target).Comparable() + for { + if isComparable && err == target { + return true + } + if x, ok := err.(interface{ Is(error) bool }); ok && x.Is(target) { + return true + } + // TODO: consider supporing target.Is(err). This would allow + // user-definable predicates, but also may allow for coping with sloppy + // APIs, thereby making it easier to get away with them. + if err = Unwrap(err); err == nil { + return false + } + } +} + +// As finds the first error in err's chain that matches the type to which target +// points, and if so, sets the target to its value and returns true. An error +// matches a type if it is assignable to the target type, or if it has a method +// As(interface{}) bool such that As(target) returns true. As will panic if target +// is not a non-nil pointer to a type which implements error or is of interface type. +// +// The As method should set the target to its value and return true if err +// matches the type to which target points. +// +// Deprecated: As of Go 1.13, use errors.As instead. +func As(err error, target interface{}) bool { + if target == nil { + panic("errors: target cannot be nil") + } + val := reflect.ValueOf(target) + typ := val.Type() + if typ.Kind() != reflect.Ptr || val.IsNil() { + panic("errors: target must be a non-nil pointer") + } + if e := typ.Elem(); e.Kind() != reflect.Interface && !e.Implements(errorType) { + panic("errors: *target must be interface or implement error") + } + targetType := typ.Elem() + for err != nil { + if reflect.TypeOf(err).AssignableTo(targetType) { + val.Elem().Set(reflect.ValueOf(err)) + return true + } + if x, ok := err.(interface{ As(interface{}) bool }); ok && x.As(target) { + return true + } + err = Unwrap(err) + } + return false +} + +var errorType = reflect.TypeOf((*error)(nil)).Elem() diff --git a/vendor/modules.txt b/vendor/modules.txt index 014f7596..79a0ed9e 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -36,6 +36,9 @@ github.com/emicklei/go-restful/v3/log # github.com/evanphx/json-patch v5.6.0+incompatible ## explicit github.com/evanphx/json-patch +# github.com/fatih/color v1.10.0 +## explicit; go 1.13 +github.com/fatih/color # github.com/felixge/httpsnoop v1.0.4 ## explicit; go 1.13 github.com/felixge/httpsnoop @@ -62,6 +65,16 @@ github.com/go-openapi/jsonreference/internal # github.com/go-openapi/swag v0.22.4 ## explicit; go 1.18 github.com/go-openapi/swag +# github.com/goccy/go-yaml v1.12.0 +## explicit; go 1.19 +github.com/goccy/go-yaml +github.com/goccy/go-yaml/ast +github.com/goccy/go-yaml/internal/errors +github.com/goccy/go-yaml/lexer +github.com/goccy/go-yaml/parser +github.com/goccy/go-yaml/printer +github.com/goccy/go-yaml/scanner +github.com/goccy/go-yaml/token # github.com/gogo/protobuf v1.3.2 ## explicit; go 1.15 github.com/gogo/protobuf/gogoproto @@ -148,6 +161,12 @@ github.com/json-iterator/go github.com/mailru/easyjson/buffer github.com/mailru/easyjson/jlexer github.com/mailru/easyjson/jwriter +# github.com/mattn/go-colorable v0.1.8 +## explicit; go 1.13 +github.com/mattn/go-colorable +# github.com/mattn/go-isatty v0.0.12 +## explicit; go 1.12 +github.com/mattn/go-isatty # github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd ## explicit github.com/modern-go/concurrent @@ -353,6 +372,10 @@ golang.org/x/text/width # golang.org/x/time v0.3.0 ## explicit golang.org/x/time/rate +# golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 +## explicit; go 1.17 +golang.org/x/xerrors +golang.org/x/xerrors/internal # google.golang.org/genproto/googleapis/api v0.0.0-20240528184218-531527333157 ## explicit; go 1.20 google.golang.org/genproto/googleapis/api